summaryrefslogtreecommitdiff
path: root/gst
diff options
context:
space:
mode:
Diffstat (limited to 'gst')
-rw-r--r--gst/alpha/gstalpha.c2617
-rw-r--r--gst/alpha/gstalpha.h99
-rw-r--r--gst/alpha/gstalphacolor.c708
-rw-r--r--gst/alpha/gstalphacolor.h42
-rw-r--r--gst/alpha/meson.build19
-rw-r--r--gst/apetag/gstapedemux.c446
-rw-r--r--gst/apetag/gstapedemux.h54
-rw-r--r--gst/apetag/meson.build9
-rw-r--r--gst/audiofx/audioamplify.c480
-rw-r--r--gst/audiofx/audioamplify.h64
-rw-r--r--gst/audiofx/audiochebband.c662
-rw-r--r--gst/audiofx/audiochebband.h66
-rw-r--r--gst/audiofx/audiocheblimit.c576
-rw-r--r--gst/audiofx/audiocheblimit.h68
-rw-r--r--gst/audiofx/audiodynamic.c718
-rw-r--r--gst/audiofx/audiodynamic.h65
-rw-r--r--gst/audiofx/audioecho.c506
-rw-r--r--gst/audiofx/audioecho.h75
-rw-r--r--gst/audiofx/audiofirfilter.c265
-rw-r--r--gst/audiofx/audiofirfilter.h73
-rw-r--r--gst/audiofx/audiofx.c74
-rw-r--r--gst/audiofx/audiofxbasefirfilter.c1088
-rw-r--r--gst/audiofx/audiofxbasefirfilter.h102
-rw-r--r--gst/audiofx/audiofxbaseiirfilter.c418
-rw-r--r--gst/audiofx/audiofxbaseiirfilter.h78
-rw-r--r--gst/audiofx/audioiirfilter.c284
-rw-r--r--gst/audiofx/audioiirfilter.h72
-rw-r--r--gst/audiofx/audioinvert.c258
-rw-r--r--gst/audiofx/audioinvert.h62
-rw-r--r--gst/audiofx/audiokaraoke.c365
-rw-r--r--gst/audiofx/audiokaraoke.h69
-rw-r--r--gst/audiofx/audiopanorama.c537
-rw-r--r--gst/audiofx/audiopanorama.h70
-rw-r--r--gst/audiofx/audiopanoramaorc-dist.c3977
-rw-r--r--gst/audiofx/audiopanoramaorc-dist.h107
-rw-r--r--gst/audiofx/audiopanoramaorc.orc258
-rw-r--r--gst/audiofx/audiowsincband.c487
-rw-r--r--gst/audiofx/audiowsincband.h82
-rw-r--r--gst/audiofx/audiowsinclimit.c413
-rw-r--r--gst/audiofx/audiowsinclimit.h82
-rw-r--r--gst/audiofx/gstscaletempo.c944
-rw-r--r--gst/audiofx/gstscaletempo.h101
-rw-r--r--gst/audiofx/gstscaletempoplugin.c68
-rw-r--r--gst/audiofx/gststereo.c202
-rw-r--r--gst/audiofx/gststereo.h57
-rw-r--r--gst/audiofx/math_compat.h55
-rw-r--r--gst/audiofx/meson.build50
-rw-r--r--gst/audioparsers/gstaacparse.c1677
-rw-r--r--gst/audioparsers/gstaacparse.h105
-rw-r--r--gst/audioparsers/gstac3parse.c958
-rw-r--r--gst/audioparsers/gstac3parse.h83
-rw-r--r--gst/audioparsers/gstamrparse.c457
-rw-r--r--gst/audioparsers/gstamrparse.h78
-rw-r--r--gst/audioparsers/gstaudioparserselements.h39
-rw-r--r--gst/audioparsers/gstdcaparse.c622
-rw-r--r--gst/audioparsers/gstdcaparse.h82
-rw-r--r--gst/audioparsers/gstflacparse.c1914
-rw-r--r--gst/audioparsers/gstflacparse.h99
-rw-r--r--gst/audioparsers/gstmpegaudioparse.c1478
-rw-r--r--gst/audioparsers/gstmpegaudioparse.h114
-rw-r--r--gst/audioparsers/gstsbcparse.c539
-rw-r--r--gst/audioparsers/gstsbcparse.h76
-rw-r--r--gst/audioparsers/gstwavpackparse.c713
-rw-r--r--gst/audioparsers/gstwavpackparse.h133
-rw-r--r--gst/audioparsers/meson.build23
-rw-r--r--gst/audioparsers/plugin.c48
-rw-r--r--gst/auparse/gstauparse.c812
-rw-r--r--gst/auparse/gstauparse.h77
-rw-r--r--gst/auparse/meson.build10
-rw-r--r--gst/autodetect/gstautoaudiosink.c147
-rw-r--r--gst/autodetect/gstautoaudiosink.h55
-rw-r--r--gst/autodetect/gstautoaudiosrc.c99
-rw-r--r--gst/autodetect/gstautoaudiosrc.h54
-rw-r--r--gst/autodetect/gstautodetect.c482
-rw-r--r--gst/autodetect/gstautodetect.h72
-rw-r--r--gst/autodetect/gstautodetectelement.c42
-rw-r--r--gst/autodetect/gstautodetectelements.h41
-rw-r--r--gst/autodetect/gstautodetectplugin.c45
-rw-r--r--gst/autodetect/gstautovideosink.c147
-rw-r--r--gst/autodetect/gstautovideosink.h55
-rw-r--r--gst/autodetect/gstautovideosrc.c98
-rw-r--r--gst/autodetect/gstautovideosrc.h54
-rw-r--r--gst/autodetect/meson.build20
-rw-r--r--gst/avi/README72
-rw-r--r--gst/avi/avi-ids.h79
-rw-r--r--gst/avi/gstavi.c44
-rw-r--r--gst/avi/gstavidemux.c6051
-rw-r--r--gst/avi/gstavidemux.h221
-rw-r--r--gst/avi/gstavielement.c44
-rw-r--r--gst/avi/gstavielements.h40
-rw-r--r--gst/avi/gstavimux.c2428
-rw-r--r--gst/avi/gstavimux.h209
-rw-r--r--gst/avi/gstavisubtitle.c390
-rw-r--r--gst/avi/gstavisubtitle.h39
-rw-r--r--gst/avi/meson.build18
-rw-r--r--gst/cutter/README38
-rw-r--r--gst/cutter/filter.func16
-rw-r--r--gst/cutter/gstcutter.c498
-rw-r--r--gst/cutter/gstcutter.h85
-rw-r--r--gst/cutter/meson.build9
-rw-r--r--gst/debugutils/breakmydata.c297
-rw-r--r--gst/debugutils/cpureport.c149
-rw-r--r--gst/debugutils/cpureport.h58
-rw-r--r--gst/debugutils/gstcapsdebug.c261
-rw-r--r--gst/debugutils/gstcapsdebug.h55
-rw-r--r--gst/debugutils/gstcapssetter.c335
-rw-r--r--gst/debugutils/gstcapssetter.h63
-rw-r--r--gst/debugutils/gstdebug.c55
-rw-r--r--gst/debugutils/gstdebugutilselements.h47
-rw-r--r--gst/debugutils/gstnavigationtest.c278
-rw-r--r--gst/debugutils/gstnavigationtest.h67
-rw-r--r--gst/debugutils/gstnavseek.c432
-rw-r--r--gst/debugutils/gstnavseek.h62
-rw-r--r--gst/debugutils/gstpushfilesrc.c424
-rw-r--r--gst/debugutils/gstpushfilesrc.h64
-rw-r--r--gst/debugutils/gsttaginject.c206
-rw-r--r--gst/debugutils/gsttaginject.h66
-rw-r--r--gst/debugutils/meson.build31
-rw-r--r--gst/debugutils/progressreport.c522
-rw-r--r--gst/debugutils/progressreport.h68
-rw-r--r--gst/debugutils/rndbuffersize.c581
-rw-r--r--gst/debugutils/testplugin.c306
-rw-r--r--gst/debugutils/tests.c269
-rw-r--r--gst/debugutils/tests.h43
-rw-r--r--gst/deinterlace/gstdeinterlace.c3354
-rw-r--r--gst/deinterlace/gstdeinterlace.h213
-rw-r--r--gst/deinterlace/gstdeinterlacemethod.c857
-rw-r--r--gst/deinterlace/gstdeinterlacemethod.h222
-rw-r--r--gst/deinterlace/meson.build101
-rw-r--r--gst/deinterlace/tvtime-dist.c1119
-rw-r--r--gst/deinterlace/tvtime-dist.h93
-rw-r--r--gst/deinterlace/tvtime.orc109
-rw-r--r--gst/deinterlace/tvtime/greedy.c250
-rw-r--r--gst/deinterlace/tvtime/greedyh.asm472
-rw-r--r--gst/deinterlace/tvtime/greedyh.c1076
-rw-r--r--gst/deinterlace/tvtime/greedyhmacros.h83
-rw-r--r--gst/deinterlace/tvtime/linear.c125
-rw-r--r--gst/deinterlace/tvtime/linearblend.c217
-rw-r--r--gst/deinterlace/tvtime/mmx.h723
-rw-r--r--gst/deinterlace/tvtime/plugins.h54
-rw-r--r--gst/deinterlace/tvtime/scalerbob.c118
-rw-r--r--gst/deinterlace/tvtime/sse.h992
-rw-r--r--gst/deinterlace/tvtime/tomsmocomp.c216
-rw-r--r--gst/deinterlace/tvtime/tomsmocomp/SearchLoop0A.inc15
-rw-r--r--gst/deinterlace/tvtime/tomsmocomp/SearchLoopBottom.inc174
-rw-r--r--gst/deinterlace/tvtime/tomsmocomp/SearchLoopEdgeA.inc11
-rw-r--r--gst/deinterlace/tvtime/tomsmocomp/SearchLoopEdgeA8.inc12
-rw-r--r--gst/deinterlace/tvtime/tomsmocomp/SearchLoopOddA.inc10
-rw-r--r--gst/deinterlace/tvtime/tomsmocomp/SearchLoopOddA2.inc5
-rw-r--r--gst/deinterlace/tvtime/tomsmocomp/SearchLoopOddA6.inc11
-rw-r--r--gst/deinterlace/tvtime/tomsmocomp/SearchLoopOddAH.inc10
-rw-r--r--gst/deinterlace/tvtime/tomsmocomp/SearchLoopOddAH2.inc5
-rw-r--r--gst/deinterlace/tvtime/tomsmocomp/SearchLoopTop.inc254
-rw-r--r--gst/deinterlace/tvtime/tomsmocomp/SearchLoopVA.inc6
-rw-r--r--gst/deinterlace/tvtime/tomsmocomp/SearchLoopVAH.inc6
-rw-r--r--gst/deinterlace/tvtime/tomsmocomp/StrangeBob.inc435
-rw-r--r--gst/deinterlace/tvtime/tomsmocomp/TomsMoCompAll.inc266
-rw-r--r--gst/deinterlace/tvtime/tomsmocomp/TomsMoCompAll2.inc243
-rw-r--r--gst/deinterlace/tvtime/tomsmocomp/WierdBob.inc286
-rw-r--r--gst/deinterlace/tvtime/tomsmocomp/tomsmocompmacros.h164
-rw-r--r--gst/deinterlace/tvtime/vfir.c314
-rw-r--r--gst/deinterlace/tvtime/weave.c173
-rw-r--r--gst/deinterlace/tvtime/weavebff.c174
-rw-r--r--gst/deinterlace/tvtime/weavetff.c175
-rw-r--r--gst/deinterlace/tvtime/x86-64_macros.inc82
-rw-r--r--gst/deinterlace/x86/x86inc.asm1701
-rw-r--r--gst/deinterlace/x86/yadif.asm410
-rw-r--r--gst/deinterlace/yadif.c486
-rw-r--r--gst/deinterlace/yadif.h48
-rw-r--r--gst/dtmf/gstdtmf.c42
-rw-r--r--gst/dtmf/gstdtmfcommon.h42
-rw-r--r--gst/dtmf/gstdtmfsrc.c953
-rw-r--r--gst/dtmf/gstdtmfsrc.h101
-rw-r--r--gst/dtmf/gstrtpdtmfdepay.c497
-rw-r--r--gst/dtmf/gstrtpdtmfdepay.h68
-rw-r--r--gst/dtmf/gstrtpdtmfsrc.c1144
-rw-r--r--gst/dtmf/gstrtpdtmfsrc.h114
-rw-r--r--gst/dtmf/meson.build18
-rw-r--r--gst/effectv/gstaging.c404
-rw-r--r--gst/effectv/gstaging.h89
-rw-r--r--gst/effectv/gstdice.c316
-rw-r--r--gst/effectv/gstdice.h75
-rw-r--r--gst/effectv/gstedge.c256
-rw-r--r--gst/effectv/gstedge.h70
-rw-r--r--gst/effectv/gsteffectv.c67
-rw-r--r--gst/effectv/gsteffectv.h46
-rw-r--r--gst/effectv/gstop.c421
-rw-r--r--gst/effectv/gstop.h74
-rw-r--r--gst/effectv/gstquark.c301
-rw-r--r--gst/effectv/gstquark.h71
-rw-r--r--gst/effectv/gstradioac.c627
-rw-r--r--gst/effectv/gstradioac.h87
-rw-r--r--gst/effectv/gstrev.c252
-rw-r--r--gst/effectv/gstrev.h87
-rw-r--r--gst/effectv/gstripple.c615
-rw-r--r--gst/effectv/gstripple.h86
-rw-r--r--gst/effectv/gstshagadelic.c258
-rw-r--r--gst/effectv/gstshagadelic.h72
-rw-r--r--gst/effectv/gststreak.c267
-rw-r--r--gst/effectv/gststreak.h74
-rw-r--r--gst/effectv/gstvertigo.c330
-rw-r--r--gst/effectv/gstvertigo.h71
-rw-r--r--gst/effectv/gstwarp.c268
-rw-r--r--gst/effectv/gstwarp.h67
-rw-r--r--gst/effectv/meson.build16
-rw-r--r--gst/equalizer/GstIirEqualizer10Bands.prs174
-rw-r--r--gst/equalizer/GstIirEqualizer3Bands.prs14
-rw-r--r--gst/equalizer/gstiirequalizer.c904
-rw-r--r--gst/equalizer/gstiirequalizer.h82
-rw-r--r--gst/equalizer/gstiirequalizer10bands.c232
-rw-r--r--gst/equalizer/gstiirequalizer10bands.h51
-rw-r--r--gst/equalizer/gstiirequalizer3bands.c145
-rw-r--r--gst/equalizer/gstiirequalizer3bands.h51
-rw-r--r--gst/equalizer/gstiirequalizernbands.c165
-rw-r--r--gst/equalizer/gstiirequalizernbands.h52
-rw-r--r--gst/equalizer/gstiirequalizerplugin.c45
-rw-r--r--gst/equalizer/meson.build22
-rw-r--r--gst/flv/amfdefs.h44
-rw-r--r--gst/flv/gstflvdemux.c3837
-rw-r--r--gst/flv/gstflvdemux.h170
-rw-r--r--gst/flv/gstflvelement.c43
-rw-r--r--gst/flv/gstflvelements.h43
-rw-r--r--gst/flv/gstflvmux.c2203
-rw-r--r--gst/flv/gstflvmux.h120
-rw-r--r--gst/flv/gstflvplugin.c39
-rw-r--r--gst/flv/gstindex.c1017
-rw-r--r--gst/flv/gstindex.h448
-rw-r--r--gst/flv/gstmemindex.c432
-rw-r--r--gst/flv/meson.build10
-rw-r--r--gst/flx/flx_color.c117
-rw-r--r--gst/flx/flx_color.h52
-rw-r--r--gst/flx/flx_fmt.h128
-rw-r--r--gst/flx/gstflxdec.c1000
-rw-r--r--gst/flx/gstflxdec.h89
-rw-r--r--gst/flx/meson.build10
-rw-r--r--gst/goom/README13
-rw-r--r--gst/goom/config_param.c142
-rw-r--r--gst/goom/convolve_fx.c368
-rw-r--r--gst/goom/drawmethods.c222
-rw-r--r--gst/goom/drawmethods.h27
-rw-r--r--gst/goom/filters.c861
-rw-r--r--gst/goom/filters_mmx.s216
-rw-r--r--gst/goom/flying_stars_fx.c361
-rw-r--r--gst/goom/goom.h42
-rw-r--r--gst/goom/goom_config.h45
-rw-r--r--gst/goom/goom_config_param.h134
-rw-r--r--gst/goom/goom_core.c856
-rw-r--r--gst/goom/goom_filters.h70
-rw-r--r--gst/goom/goom_fx.h30
-rw-r--r--gst/goom/goom_graphic.h92
-rw-r--r--gst/goom/goom_plugin_info.h181
-rw-r--r--gst/goom/goom_tools.c50
-rw-r--r--gst/goom/goom_tools.h53
-rw-r--r--gst/goom/goom_typedefs.h29
-rw-r--r--gst/goom/goom_visual_fx.h35
-rw-r--r--gst/goom/goomsl_lex.l94
-rw-r--r--gst/goom/goomsl_yacc.y1438
-rw-r--r--gst/goom/graphic.c28
-rw-r--r--gst/goom/gstgoom.c218
-rw-r--r--gst/goom/gstgoom.h68
-rw-r--r--gst/goom/ifs.c774
-rw-r--r--gst/goom/ifs.h54
-rw-r--r--gst/goom/lines.c257
-rw-r--r--gst/goom/lines.h94
-rw-r--r--gst/goom/mathtools.c106
-rw-r--r--gst/goom/mathtools.h58
-rw-r--r--gst/goom/meson.build31
-rw-r--r--gst/goom/mmx.c291
-rw-r--r--gst/goom/mmx.h741
-rw-r--r--gst/goom/motif_goom1.h1044
-rw-r--r--gst/goom/motif_goom2.h1044
-rw-r--r--gst/goom/plugin_info.c262
-rw-r--r--gst/goom/ppc_drawings.h28
-rw-r--r--gst/goom/ppc_drawings.s394
-rw-r--r--gst/goom/ppc_zoom_ultimate.h25
-rw-r--r--gst/goom/ppc_zoom_ultimate.s336
-rw-r--r--gst/goom/sound_tester.c161
-rw-r--r--gst/goom/sound_tester.h29
-rw-r--r--gst/goom/surf3d.c152
-rw-r--r--gst/goom/surf3d.h57
-rw-r--r--gst/goom/surf3d.s484
-rw-r--r--gst/goom/tentacle3d.c358
-rw-r--r--gst/goom/tentacle3d.h26
-rw-r--r--gst/goom/v3d.c38
-rw-r--r--gst/goom/v3d.h83
-rw-r--r--gst/goom/xmmx.c402
-rw-r--r--gst/goom/xmmx.h537
-rw-r--r--gst/goom2k1/README5
-rw-r--r--gst/goom2k1/filters.c531
-rw-r--r--gst/goom2k1/filters.h83
-rw-r--r--gst/goom2k1/filters_mmx.s130
-rw-r--r--gst/goom2k1/goom_core.c411
-rw-r--r--gst/goom2k1/goom_core.h43
-rw-r--r--gst/goom2k1/goom_tools.h24
-rw-r--r--gst/goom2k1/graphic.c14
-rw-r--r--gst/goom2k1/graphic.h23
-rw-r--r--gst/goom2k1/gstgoom.c193
-rw-r--r--gst/goom2k1/gstgoom.h68
-rw-r--r--gst/goom2k1/lines.c112
-rw-r--r--gst/goom2k1/lines.h16
-rw-r--r--gst/goom2k1/meson.build41
-rw-r--r--gst/icydemux/gsticydemux.c677
-rw-r--r--gst/icydemux/gsticydemux.h89
-rw-r--r--gst/icydemux/meson.build10
-rw-r--r--gst/id3demux/gstid3demux.c292
-rw-r--r--gst/id3demux/gstid3demux.h61
-rw-r--r--gst/id3demux/meson.build10
-rw-r--r--gst/imagefreeze/gstimagefreeze.c1251
-rw-r--r--gst/imagefreeze/gstimagefreeze.h87
-rw-r--r--gst/imagefreeze/meson.build10
-rw-r--r--gst/interleave/deinterleave.c1032
-rw-r--r--gst/interleave/deinterleave.h73
-rw-r--r--gst/interleave/gstinterleaveelements.h34
-rw-r--r--gst/interleave/interleave.c1341
-rw-r--r--gst/interleave/interleave.h90
-rw-r--r--gst/interleave/meson.build10
-rw-r--r--gst/interleave/plugin.c43
-rw-r--r--gst/isomp4/GstQTMux.prs6
-rw-r--r--gst/isomp4/LEGAL10
-rw-r--r--gst/isomp4/atoms.c5768
-rw-r--r--gst/isomp4/atoms.h1149
-rw-r--r--gst/isomp4/atomsrecovery.c1206
-rw-r--r--gst/isomp4/atomsrecovery.h162
-rw-r--r--gst/isomp4/descriptors.c457
-rw-r--r--gst/isomp4/descriptors.h151
-rw-r--r--gst/isomp4/fourcc.h413
-rw-r--r--gst/isomp4/gstisoff.c203
-rw-r--r--gst/isomp4/gstisoff.h100
-rw-r--r--gst/isomp4/gstisomp4element.c55
-rw-r--r--gst/isomp4/gstisomp4elements.h43
-rw-r--r--gst/isomp4/gstqtmoovrecover.c380
-rw-r--r--gst/isomp4/gstqtmoovrecover.h88
-rw-r--r--gst/isomp4/gstqtmux-doc.c318
-rw-r--r--gst/isomp4/gstqtmux-doc.h53
-rw-r--r--gst/isomp4/gstqtmux.c7509
-rw-r--r--gst/isomp4/gstqtmux.h390
-rw-r--r--gst/isomp4/gstqtmuxmap.c424
-rw-r--r--gst/isomp4/gstqtmuxmap.h86
-rw-r--r--gst/isomp4/gstrtpxqtdepay.c690
-rw-r--r--gst/isomp4/gstrtpxqtdepay.h64
-rw-r--r--gst/isomp4/isomp4-plugin.c47
-rw-r--r--gst/isomp4/meson.build34
-rw-r--r--gst/isomp4/properties.c210
-rw-r--r--gst/isomp4/properties.h87
-rw-r--r--gst/isomp4/qtatomparser.h139
-rw-r--r--gst/isomp4/qtdemux.c15031
-rw-r--r--gst/isomp4/qtdemux.h507
-rw-r--r--gst/isomp4/qtdemux_debug.h12
-rw-r--r--gst/isomp4/qtdemux_dump.c1097
-rw-r--r--gst/isomp4/qtdemux_dump.h98
-rw-r--r--gst/isomp4/qtdemux_lang.c207
-rw-r--r--gst/isomp4/qtdemux_lang.h31
-rw-r--r--gst/isomp4/qtdemux_tags.c1034
-rw-r--r--gst/isomp4/qtdemux_tags.h30
-rw-r--r--gst/isomp4/qtdemux_tree.c122
-rw-r--r--gst/isomp4/qtdemux_tree.h47
-rw-r--r--gst/isomp4/qtdemux_types.c250
-rw-r--r--gst/isomp4/qtdemux_types.h83
-rw-r--r--gst/isomp4/qtpalette.h137
-rw-r--r--gst/law/alaw-decode.c250
-rw-r--r--gst/law/alaw-decode.h57
-rw-r--r--gst/law/alaw-encode.c447
-rw-r--r--gst/law/alaw-encode.h59
-rw-r--r--gst/law/alaw.c44
-rw-r--r--gst/law/meson.build21
-rw-r--r--gst/law/mulaw-conversion.c122
-rw-r--r--gst/law/mulaw-conversion.h12
-rw-r--r--gst/law/mulaw-decode.c201
-rw-r--r--gst/law/mulaw-decode.h55
-rw-r--r--gst/law/mulaw-encode.c235
-rw-r--r--gst/law/mulaw-encode.h59
-rw-r--r--gst/law/mulaw.c46
-rw-r--r--gst/level/gstlevel.c832
-rw-r--r--gst/level/gstlevel.h94
-rw-r--r--gst/level/meson.build10
-rw-r--r--gst/matroska/ebml-ids.h56
-rw-r--r--gst/matroska/ebml-read.c682
-rw-r--r--gst/matroska/ebml-read.h171
-rw-r--r--gst/matroska/ebml-write.c941
-rw-r--r--gst/matroska/ebml-write.h154
-rw-r--r--gst/matroska/gstmatroskaelement.c43
-rw-r--r--gst/matroska/gstmatroskaelements.h41
-rw-r--r--gst/matroska/lzo.c292
-rw-r--r--gst/matroska/lzo.h35
-rw-r--r--gst/matroska/matroska-demux.c7503
-rw-r--r--gst/matroska/matroska-demux.h140
-rw-r--r--gst/matroska/matroska-ids.c438
-rw-r--r--gst/matroska/matroska-ids.h773
-rw-r--r--gst/matroska/matroska-mux.c4458
-rw-r--r--gst/matroska/matroska-mux.h160
-rw-r--r--gst/matroska/matroska-parse.c3272
-rw-r--r--gst/matroska/matroska-parse.h103
-rw-r--r--gst/matroska/matroska-read-common.c3411
-rw-r--r--gst/matroska/matroska-read-common.h175
-rw-r--r--gst/matroska/matroska.c45
-rw-r--r--gst/matroska/meson.build30
-rw-r--r--gst/matroska/webm-mux.c104
-rw-r--r--gst/matroska/webm-mux.h49
-rw-r--r--gst/meson.build13
-rw-r--r--gst/monoscope/README9
-rw-r--r--gst/monoscope/convolve.c363
-rw-r--r--gst/monoscope/convolve.h48
-rw-r--r--gst/monoscope/gstmonoscope.c598
-rw-r--r--gst/monoscope/gstmonoscope.h86
-rw-r--r--gst/monoscope/meson.build12
-rw-r--r--gst/monoscope/monoscope.c168
-rw-r--r--gst/monoscope/monoscope.h27
-rw-r--r--gst/multifile/gstimagesequencesrc.c659
-rw-r--r--gst/multifile/gstimagesequencesrc.h56
-rw-r--r--gst/multifile/gstmultifile.c56
-rw-r--r--gst/multifile/gstmultifilesink.c1082
-rw-r--r--gst/multifile/gstmultifilesink.h120
-rw-r--r--gst/multifile/gstmultifilesrc.c573
-rw-r--r--gst/multifile/gstmultifilesrc.h74
-rw-r--r--gst/multifile/gstsplitfilesrc.c600
-rw-r--r--gst/multifile/gstsplitfilesrc.h76
-rw-r--r--gst/multifile/gstsplitmuxpartreader.c1377
-rw-r--r--gst/multifile/gstsplitmuxpartreader.h122
-rw-r--r--gst/multifile/gstsplitmuxsink.c3819
-rw-r--r--gst/multifile/gstsplitmuxsink.h225
-rw-r--r--gst/multifile/gstsplitmuxsrc.c1544
-rw-r--r--gst/multifile/gstsplitmuxsrc.h123
-rw-r--r--gst/multifile/gstsplitutils.c105
-rw-r--r--gst/multifile/gstsplitutils.h40
-rw-r--r--gst/multifile/meson.build40
-rw-r--r--gst/multifile/patternspec.c334
-rw-r--r--gst/multifile/patternspec.h47
-rw-r--r--gst/multifile/test-splitmuxpartreader.c104
-rw-r--r--gst/multipart/meson.build12
-rw-r--r--gst/multipart/multipart.c44
-rw-r--r--gst/multipart/multipartdemux.c802
-rw-r--r--gst/multipart/multipartdemux.h110
-rw-r--r--gst/multipart/multipartmux.c689
-rw-r--r--gst/multipart/multipartmux.h98
-rw-r--r--gst/replaygain/gstrganalysis.c705
-rw-r--r--gst/replaygain/gstrganalysis.h86
-rw-r--r--gst/replaygain/gstrglimiter.c201
-rw-r--r--gst/replaygain/gstrglimiter.h66
-rw-r--r--gst/replaygain/gstrgvolume.c687
-rw-r--r--gst/replaygain/gstrgvolume.h90
-rw-r--r--gst/replaygain/meson.build19
-rw-r--r--gst/replaygain/replaygain.c47
-rw-r--r--gst/replaygain/replaygain.h36
-rw-r--r--gst/replaygain/rganalysis.c824
-rw-r--r--gst/replaygain/rganalysis.h65
-rw-r--r--gst/rtp/README398
-rw-r--r--gst/rtp/TODO15
-rw-r--r--gst/rtp/dboolhuff.LICENSE29
-rw-r--r--gst/rtp/dboolhuff.c73
-rw-r--r--gst/rtp/dboolhuff.h155
-rw-r--r--gst/rtp/fnv1hash.c63
-rw-r--r--gst/rtp/fnv1hash.h36
-rw-r--r--gst/rtp/gstasteriskh263.c226
-rw-r--r--gst/rtp/gstasteriskh263.h63
-rw-r--r--gst/rtp/gstbuffermemory.c120
-rw-r--r--gst/rtp/gstbuffermemory.h66
-rw-r--r--gst/rtp/gstrtp.c139
-rw-r--r--gst/rtp/gstrtpL16depay.c296
-rw-r--r--gst/rtp/gstrtpL16depay.h65
-rw-r--r--gst/rtp/gstrtpL16pay.c259
-rw-r--r--gst/rtp/gstrtpL16pay.h61
-rw-r--r--gst/rtp/gstrtpL24depay.c261
-rw-r--r--gst/rtp/gstrtpL24depay.h65
-rw-r--r--gst/rtp/gstrtpL24pay.c240
-rw-r--r--gst/rtp/gstrtpL24pay.h61
-rw-r--r--gst/rtp/gstrtpL8depay.c265
-rw-r--r--gst/rtp/gstrtpL8depay.h63
-rw-r--r--gst/rtp/gstrtpL8pay.c241
-rw-r--r--gst/rtp/gstrtpL8pay.h62
-rw-r--r--gst/rtp/gstrtpac3depay.c176
-rw-r--r--gst/rtp/gstrtpac3depay.h56
-rw-r--r--gst/rtp/gstrtpac3pay.c475
-rw-r--r--gst/rtp/gstrtpac3pay.h62
-rw-r--r--gst/rtp/gstrtpamrdepay.c474
-rw-r--r--gst/rtp/gstrtpamrdepay.h75
-rw-r--r--gst/rtp/gstrtpamrpay.c461
-rw-r--r--gst/rtp/gstrtpamrpay.h68
-rw-r--r--gst/rtp/gstrtpbvdepay.c187
-rw-r--r--gst/rtp/gstrtpbvdepay.h58
-rw-r--r--gst/rtp/gstrtpbvpay.c236
-rw-r--r--gst/rtp/gstrtpbvpay.h58
-rw-r--r--gst/rtp/gstrtpceltdepay.c271
-rw-r--r--gst/rtp/gstrtpceltdepay.h52
-rw-r--r--gst/rtp/gstrtpceltpay.c500
-rw-r--r--gst/rtp/gstrtpceltpay.h60
-rw-r--r--gst/rtp/gstrtpchannels.c310
-rw-r--r--gst/rtp/gstrtpchannels.h46
-rw-r--r--gst/rtp/gstrtpdvdepay.c421
-rw-r--r--gst/rtp/gstrtpdvdepay.h64
-rw-r--r--gst/rtp/gstrtpdvpay.c397
-rw-r--r--gst/rtp/gstrtpdvpay.h67
-rw-r--r--gst/rtp/gstrtpelement.c46
-rw-r--r--gst/rtp/gstrtpelements.h134
-rw-r--r--gst/rtp/gstrtpg722depay.c258
-rw-r--r--gst/rtp/gstrtpg722depay.h62
-rw-r--r--gst/rtp/gstrtpg722pay.c232
-rw-r--r--gst/rtp/gstrtpg722pay.h59
-rw-r--r--gst/rtp/gstrtpg723depay.c219
-rw-r--r--gst/rtp/gstrtpg723depay.h57
-rw-r--r--gst/rtp/gstrtpg723pay.c303
-rw-r--r--gst/rtp/gstrtpg723pay.h62
-rw-r--r--gst/rtp/gstrtpg726depay.c389
-rw-r--r--gst/rtp/gstrtpg726depay.h56
-rw-r--r--gst/rtp/gstrtpg726pay.c418
-rw-r--r--gst/rtp/gstrtpg726pay.h53
-rw-r--r--gst/rtp/gstrtpg729depay.c221
-rw-r--r--gst/rtp/gstrtpg729depay.h59
-rw-r--r--gst/rtp/gstrtpg729pay.c394
-rw-r--r--gst/rtp/gstrtpg729pay.h64
-rw-r--r--gst/rtp/gstrtpgsmdepay.c148
-rw-r--r--gst/rtp/gstrtpgsmdepay.h56
-rw-r--r--gst/rtp/gstrtpgsmpay.c177
-rw-r--r--gst/rtp/gstrtpgsmpay.h58
-rw-r--r--gst/rtp/gstrtpgstdepay.c607
-rw-r--r--gst/rtp/gstrtpgstdepay.h64
-rw-r--r--gst/rtp/gstrtpgstpay.c699
-rw-r--r--gst/rtp/gstrtpgstpay.h71
-rw-r--r--gst/rtp/gstrtph261depay.c289
-rw-r--r--gst/rtp/gstrtph261depay.h58
-rw-r--r--gst/rtp/gstrtph261pay.c1068
-rw-r--r--gst/rtp/gstrtph261pay.h98
-rw-r--r--gst/rtp/gstrtph263depay.c443
-rw-r--r--gst/rtp/gstrtph263depay.h64
-rw-r--r--gst/rtp/gstrtph263pay.c1870
-rw-r--r--gst/rtp/gstrtph263pay.h413
-rw-r--r--gst/rtp/gstrtph263pdepay.c493
-rw-r--r--gst/rtp/gstrtph263pdepay.h60
-rw-r--r--gst/rtp/gstrtph263ppay.c814
-rw-r--r--gst/rtp/gstrtph263ppay.h68
-rw-r--r--gst/rtp/gstrtph264depay.c1512
-rw-r--r--gst/rtp/gstrtph264depay.h92
-rw-r--r--gst/rtp/gstrtph264pay.c1814
-rw-r--r--gst/rtp/gstrtph264pay.h109
-rw-r--r--gst/rtp/gstrtph265depay.c1639
-rw-r--r--gst/rtp/gstrtph265depay.h115
-rw-r--r--gst/rtp/gstrtph265pay.c1826
-rw-r--r--gst/rtp/gstrtph265pay.h91
-rw-r--r--gst/rtp/gstrtph265types.h76
-rw-r--r--gst/rtp/gstrtphdrext-colorspace.c465
-rw-r--r--gst/rtp/gstrtphdrext-colorspace.h41
-rw-r--r--gst/rtp/gstrtpilbcdepay.c235
-rw-r--r--gst/rtp/gstrtpilbcdepay.h63
-rw-r--r--gst/rtp/gstrtpilbcpay.c224
-rw-r--r--gst/rtp/gstrtpilbcpay.h58
-rw-r--r--gst/rtp/gstrtpisacdepay.c147
-rw-r--r--gst/rtp/gstrtpisacdepay.h31
-rw-r--r--gst/rtp/gstrtpisacpay.c183
-rw-r--r--gst/rtp/gstrtpisacpay.h31
-rw-r--r--gst/rtp/gstrtpj2kcommon.h102
-rw-r--r--gst/rtp/gstrtpj2kdepay.c664
-rw-r--r--gst/rtp/gstrtpj2kdepay.h71
-rw-r--r--gst/rtp/gstrtpj2kpay.c567
-rw-r--r--gst/rtp/gstrtpj2kpay.h56
-rw-r--r--gst/rtp/gstrtpjpegdepay.c795
-rw-r--r--gst/rtp/gstrtpjpegdepay.h69
-rw-r--r--gst/rtp/gstrtpjpegpay.c1053
-rw-r--r--gst/rtp/gstrtpjpegpay.h61
-rw-r--r--gst/rtp/gstrtpklvdepay.c393
-rw-r--r--gst/rtp/gstrtpklvdepay.h63
-rw-r--r--gst/rtp/gstrtpklvpay.c200
-rw-r--r--gst/rtp/gstrtpklvpay.h58
-rw-r--r--gst/rtp/gstrtpldacpay.c171
-rw-r--r--gst/rtp/gstrtpldacpay.h55
-rw-r--r--gst/rtp/gstrtpmp1sdepay.c139
-rw-r--r--gst/rtp/gstrtpmp1sdepay.h56
-rw-r--r--gst/rtp/gstrtpmp2tdepay.c239
-rw-r--r--gst/rtp/gstrtpmp2tdepay.h58
-rw-r--r--gst/rtp/gstrtpmp2tpay.c235
-rw-r--r--gst/rtp/gstrtpmp2tpay.h62
-rw-r--r--gst/rtp/gstrtpmp4adepay.c462
-rw-r--r--gst/rtp/gstrtpmp4adepay.h62
-rw-r--r--gst/rtp/gstrtpmp4apay.c461
-rw-r--r--gst/rtp/gstrtpmp4apay.h63
-rw-r--r--gst/rtp/gstrtpmp4gdepay.c811
-rw-r--r--gst/rtp/gstrtpmp4gdepay.h87
-rw-r--r--gst/rtp/gstrtpmp4gpay.c637
-rw-r--r--gst/rtp/gstrtpmp4gpay.h70
-rw-r--r--gst/rtp/gstrtpmp4vdepay.c223
-rw-r--r--gst/rtp/gstrtpmp4vdepay.h59
-rw-r--r--gst/rtp/gstrtpmp4vpay.c640
-rw-r--r--gst/rtp/gstrtpmp4vpay.h72
-rw-r--r--gst/rtp/gstrtpmpadepay.c177
-rw-r--r--gst/rtp/gstrtpmpadepay.h56
-rw-r--r--gst/rtp/gstrtpmpapay.c341
-rw-r--r--gst/rtp/gstrtpmpapay.h61
-rw-r--r--gst/rtp/gstrtpmparobustdepay.c808
-rw-r--r--gst/rtp/gstrtpmparobustdepay.h76
-rw-r--r--gst/rtp/gstrtpmpvdepay.c194
-rw-r--r--gst/rtp/gstrtpmpvdepay.h56
-rw-r--r--gst/rtp/gstrtpmpvpay.c332
-rw-r--r--gst/rtp/gstrtpmpvpay.h62
-rw-r--r--gst/rtp/gstrtpopusdepay.c256
-rw-r--r--gst/rtp/gstrtpopusdepay.h57
-rw-r--r--gst/rtp/gstrtpopuspay.c421
-rw-r--r--gst/rtp/gstrtpopuspay.h63
-rw-r--r--gst/rtp/gstrtppcmadepay.c160
-rw-r--r--gst/rtp/gstrtppcmadepay.h51
-rw-r--r--gst/rtp/gstrtppcmapay.c112
-rw-r--r--gst/rtp/gstrtppcmapay.h52
-rw-r--r--gst/rtp/gstrtppcmudepay.c161
-rw-r--r--gst/rtp/gstrtppcmudepay.h51
-rw-r--r--gst/rtp/gstrtppcmupay.c112
-rw-r--r--gst/rtp/gstrtppcmupay.h52
-rw-r--r--gst/rtp/gstrtpqcelpdepay.c428
-rw-r--r--gst/rtp/gstrtpqcelpdepay.h60
-rw-r--r--gst/rtp/gstrtpqdmdepay.c411
-rw-r--r--gst/rtp/gstrtpqdmdepay.h83
-rw-r--r--gst/rtp/gstrtpreddec.c546
-rw-r--r--gst/rtp/gstrtpreddec.h61
-rw-r--r--gst/rtp/gstrtpredenc.c532
-rw-r--r--gst/rtp/gstrtpredenc.h65
-rw-r--r--gst/rtp/gstrtpsbcdepay.c391
-rw-r--r--gst/rtp/gstrtpsbcdepay.h68
-rw-r--r--gst/rtp/gstrtpsbcpay.c372
-rw-r--r--gst/rtp/gstrtpsbcpay.h63
-rw-r--r--gst/rtp/gstrtpsirendepay.c121
-rw-r--r--gst/rtp/gstrtpsirendepay.h57
-rw-r--r--gst/rtp/gstrtpsirenpay.c143
-rw-r--r--gst/rtp/gstrtpsirenpay.h55
-rw-r--r--gst/rtp/gstrtpspeexdepay.c222
-rw-r--r--gst/rtp/gstrtpspeexdepay.h51
-rw-r--r--gst/rtp/gstrtpspeexpay.c346
-rw-r--r--gst/rtp/gstrtpspeexpay.h54
-rw-r--r--gst/rtp/gstrtpstorage.c221
-rw-r--r--gst/rtp/gstrtpstorage.h59
-rw-r--r--gst/rtp/gstrtpstreamdepay.c228
-rw-r--r--gst/rtp/gstrtpstreamdepay.h56
-rw-r--r--gst/rtp/gstrtpstreampay.c282
-rw-r--r--gst/rtp/gstrtpstreampay.h52
-rw-r--r--gst/rtp/gstrtpsv3vdepay.c316
-rw-r--r--gst/rtp/gstrtpsv3vdepay.h65
-rw-r--r--gst/rtp/gstrtptheoradepay.c703
-rw-r--r--gst/rtp/gstrtptheoradepay.h70
-rw-r--r--gst/rtp/gstrtptheorapay.c981
-rw-r--r--gst/rtp/gstrtptheorapay.h84
-rw-r--r--gst/rtp/gstrtpulpfecdec.c721
-rw-r--r--gst/rtp/gstrtpulpfecdec.h84
-rw-r--r--gst/rtp/gstrtpulpfecenc.c714
-rw-r--r--gst/rtp/gstrtpulpfecenc.h99
-rw-r--r--gst/rtp/gstrtputils.c153
-rw-r--r--gst/rtp/gstrtputils.h54
-rw-r--r--gst/rtp/gstrtpvorbisdepay.c707
-rw-r--r--gst/rtp/gstrtpvorbisdepay.h68
-rw-r--r--gst/rtp/gstrtpvorbispay.c1002
-rw-r--r--gst/rtp/gstrtpvorbispay.h83
-rw-r--r--gst/rtp/gstrtpvp8depay.c563
-rw-r--r--gst/rtp/gstrtpvp8depay.h81
-rw-r--r--gst/rtp/gstrtpvp8pay.c727
-rw-r--r--gst/rtp/gstrtpvp8pay.h75
-rw-r--r--gst/rtp/gstrtpvp9depay.c528
-rw-r--r--gst/rtp/gstrtpvp9depay.h79
-rw-r--r--gst/rtp/gstrtpvp9pay.c563
-rw-r--r--gst/rtp/gstrtpvp9pay.h70
-rw-r--r--gst/rtp/gstrtpvrawdepay.c663
-rw-r--r--gst/rtp/gstrtpvrawdepay.h69
-rw-r--r--gst/rtp/gstrtpvrawpay.c661
-rw-r--r--gst/rtp/gstrtpvrawpay.h65
-rw-r--r--gst/rtp/meson.build128
-rw-r--r--gst/rtp/rtpredcommon.c90
-rw-r--r--gst/rtp/rtpredcommon.h80
-rw-r--r--gst/rtp/rtpstorage.c259
-rw-r--r--gst/rtp/rtpstorage.h69
-rw-r--r--gst/rtp/rtpstoragestream.c277
-rw-r--r--gst/rtp/rtpstoragestream.h62
-rw-r--r--gst/rtp/rtpulpfeccommon.c447
-rw-r--r--gst/rtp/rtpulpfeccommon.h162
-rw-r--r--gst/rtpmanager/gstrtpbin.c5273
-rw-r--r--gst/rtpmanager/gstrtpbin.h155
-rw-r--r--gst/rtpmanager/gstrtpdtmfmux.c228
-rw-r--r--gst/rtpmanager/gstrtpdtmfmux.h68
-rw-r--r--gst/rtpmanager/gstrtpfunnel.c690
-rw-r--r--gst/rtpmanager/gstrtpfunnel.h50
-rw-r--r--gst/rtpmanager/gstrtphdrext-rfc6464.c326
-rw-r--r--gst/rtpmanager/gstrtphdrext-rfc6464.h32
-rw-r--r--gst/rtpmanager/gstrtphdrext-twcc.c234
-rw-r--r--gst/rtpmanager/gstrtphdrext-twcc.h83
-rw-r--r--gst/rtpmanager/gstrtpjitterbuffer.c4926
-rw-r--r--gst/rtpmanager/gstrtpjitterbuffer.h89
-rw-r--r--gst/rtpmanager/gstrtpmanager.c66
-rw-r--r--gst/rtpmanager/gstrtpmux.c1016
-rw-r--r--gst/rtpmanager/gstrtpmux.h96
-rw-r--r--gst/rtpmanager/gstrtpptdemux.c757
-rw-r--r--gst/rtpmanager/gstrtpptdemux.h65
-rw-r--r--gst/rtpmanager/gstrtprtxqueue.c520
-rw-r--r--gst/rtpmanager/gstrtprtxqueue.h80
-rw-r--r--gst/rtpmanager/gstrtprtxreceive.c791
-rw-r--r--gst/rtpmanager/gstrtprtxreceive.h81
-rw-r--r--gst/rtpmanager/gstrtprtxsend.c1000
-rw-r--r--gst/rtpmanager/gstrtprtxsend.h89
-rw-r--r--gst/rtpmanager/gstrtpsession.c2902
-rw-r--r--gst/rtpmanager/gstrtpsession.h94
-rw-r--r--gst/rtpmanager/gstrtpssrcdemux.c1008
-rw-r--r--gst/rtpmanager/gstrtpssrcdemux.h62
-rw-r--r--gst/rtpmanager/gstrtpst2022-1-fecdec.c1012
-rw-r--r--gst/rtpmanager/gstrtpst2022-1-fecdec.h39
-rw-r--r--gst/rtpmanager/gstrtpst2022-1-fecenc.c796
-rw-r--r--gst/rtpmanager/gstrtpst2022-1-fecenc.h39
-rw-r--r--gst/rtpmanager/meson.build35
-rw-r--r--gst/rtpmanager/rtpjitterbuffer.c1559
-rw-r--r--gst/rtpmanager/rtpjitterbuffer.h221
-rw-r--r--gst/rtpmanager/rtpsession.c4882
-rw-r--r--gst/rtpmanager/rtpsession.h443
-rw-r--r--gst/rtpmanager/rtpsource.c2073
-rw-r--r--gst/rtpmanager/rtpsource.h311
-rw-r--r--gst/rtpmanager/rtpstats.c680
-rw-r--r--gst/rtpmanager/rtpstats.h303
-rw-r--r--gst/rtpmanager/rtptimerqueue.c742
-rw-r--r--gst/rtpmanager/rtptimerqueue.h125
-rw-r--r--gst/rtpmanager/rtptwcc.c1116
-rw-r--r--gst/rtpmanager/rtptwcc.h78
-rw-r--r--gst/rtsp/COPYING.MIT21
-rw-r--r--gst/rtsp/README377
-rw-r--r--gst/rtsp/URLS38
-rw-r--r--gst/rtsp/gstrtpdec.c895
-rw-r--r--gst/rtsp/gstrtpdec.h88
-rw-r--r--gst/rtsp/gstrtsp.c65
-rw-r--r--gst/rtsp/gstrtspelement.c65
-rw-r--r--gst/rtsp/gstrtspelements.h62
-rw-r--r--gst/rtsp/gstrtspext.c268
-rw-r--r--gst/rtsp/gstrtspext.h83
-rw-r--r--gst/rtsp/gstrtspsrc.c10030
-rw-r--r--gst/rtsp/gstrtspsrc.h347
-rw-r--r--gst/rtsp/meson.build18
-rw-r--r--gst/shapewipe/gstshapewipe.c1146
-rw-r--r--gst/shapewipe/gstshapewipe.h83
-rw-r--r--gst/shapewipe/meson.build10
-rw-r--r--gst/smpte/barboxwipes.c963
-rw-r--r--gst/smpte/gstmask.c121
-rw-r--r--gst/smpte/gstmask.h64
-rw-r--r--gst/smpte/gstsmpte.c667
-rw-r--r--gst/smpte/gstsmpte.h86
-rw-r--r--gst/smpte/gstsmptealpha.c805
-rw-r--r--gst/smpte/gstsmptealpha.h80
-rw-r--r--gst/smpte/meson.build19
-rw-r--r--gst/smpte/paint.c338
-rw-r--r--gst/smpte/paint.h47
-rw-r--r--gst/smpte/plugin.c42
-rw-r--r--gst/spectrum/gstspectrum.c966
-rw-r--r--gst/spectrum/gstspectrum.h97
-rw-r--r--gst/spectrum/meson.build10
-rw-r--r--gst/udp/README7
-rw-r--r--gst/udp/gstdynudpsink.c601
-rw-r--r--gst/udp/gstdynudpsink.h72
-rw-r--r--gst/udp/gstmultiudpsink.c1808
-rw-r--r--gst/udp/gstmultiudpsink.h130
-rw-r--r--gst/udp/gstudp.c44
-rw-r--r--gst/udp/gstudpelement.c47
-rw-r--r--gst/udp/gstudpelements.h41
-rw-r--r--gst/udp/gstudpnetutils.c108
-rw-r--r--gst/udp/gstudpnetutils.h29
-rw-r--r--gst/udp/gstudpsink.c262
-rw-r--r--gst/udp/gstudpsink.h57
-rw-r--r--gst/udp/gstudpsrc.c1995
-rw-r--r--gst/udp/gstudpsrc.h111
-rw-r--r--gst/udp/meson.build20
-rw-r--r--gst/videobox/README21
-rw-r--r--gst/videobox/gstvideobox.c3349
-rw-r--r--gst/videobox/gstvideobox.h94
-rw-r--r--gst/videobox/gstvideoboxorc-dist.c242
-rw-r--r--gst/videobox/gstvideoboxorc-dist.h90
-rw-r--r--gst/videobox/gstvideoboxorc.orc7
-rw-r--r--gst/videobox/meson.build30
-rw-r--r--gst/videocrop/gstaspectratiocrop.c516
-rw-r--r--gst/videocrop/gstaspectratiocrop.h71
-rw-r--r--gst/videocrop/gstvideocrop-private.h35
-rw-r--r--gst/videocrop/gstvideocrop.c990
-rw-r--r--gst/videocrop/gstvideocrop.h95
-rw-r--r--gst/videocrop/gstvideocropplugin.c43
-rw-r--r--gst/videocrop/meson.build10
-rw-r--r--gst/videofilter/gstgamma.c415
-rw-r--r--gst/videofilter/gstgamma.h77
-rw-r--r--gst/videofilter/gstvideobalance.c844
-rw-r--r--gst/videofilter/gstvideobalance.h81
-rw-r--r--gst/videofilter/gstvideoflip.c1427
-rw-r--r--gst/videofilter/gstvideoflip.h99
-rw-r--r--gst/videofilter/gstvideomedian.c327
-rw-r--r--gst/videofilter/gstvideomedian.h68
-rw-r--r--gst/videofilter/gstvideotemplate.c253
-rwxr-xr-xgst/videofilter/make_filter39
-rw-r--r--gst/videofilter/meson.build18
-rw-r--r--gst/videofilter/plugin.c48
-rw-r--r--gst/videomixer/README27
-rw-r--r--gst/videomixer/blend.c1061
-rw-r--r--gst/videomixer/blend.h103
-rw-r--r--gst/videomixer/meson.build35
-rw-r--r--gst/videomixer/videomixer2.c2300
-rw-r--r--gst/videomixer/videomixer2.h133
-rw-r--r--gst/videomixer/videomixer2pad.h83
-rw-r--r--gst/videomixer/videomixerorc-dist.c2414
-rw-r--r--gst/videomixer/videomixerorc-dist.h96
-rw-r--r--gst/videomixer/videomixerorc.orc221
-rw-r--r--gst/wavenc/gstwavenc.c1152
-rw-r--r--gst/wavenc/gstwavenc.h85
-rw-r--r--gst/wavenc/meson.build10
-rw-r--r--gst/wavparse/gstwavparse.c3002
-rw-r--r--gst/wavparse/gstwavparse.h140
-rw-r--r--gst/wavparse/meson.build11
-rw-r--r--gst/y4m/gsty4mencode.c312
-rw-r--r--gst/y4m/gsty4mencode.h66
-rw-r--r--gst/y4m/meson.build10
800 files changed, 318011 insertions, 0 deletions
diff --git a/gst/alpha/gstalpha.c b/gst/alpha/gstalpha.c
new file mode 100644
index 0000000000..a9a75dcd63
--- /dev/null
+++ b/gst/alpha/gstalpha.c
@@ -0,0 +1,2617 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@collabora.co.uk>
+ * Copyright (C) <2007> Edward Hervey <edward.hervey@collabora.co.uk>
+ * Copyright (C) <2007> Jan Schmidt <thaytan@noraisin.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-alpha
+ * @title: alpha
+ *
+ * The alpha element adds an alpha channel to a video stream. The values
+ * of the alpha channel can be either be set to a constant or can be
+ * dynamically calculated via chroma keying, e.g. blue can be set as
+ * the transparent color.
+ *
+ * Sample pipeline:
+ * |[
+ * gst-launch-1.0 videotestsrc pattern=snow ! mixer.sink_0 \
+ * videotestsrc pattern=smpte75 ! alpha method=green ! mixer.sink_1 \
+ * videomixer name=mixer sink_0::zorder=0 sink_1::zorder=1 ! \
+ * videoconvert ! autovideosink
+ * ]| This pipeline adds a alpha channel to the SMPTE color bars
+ * with green as the transparent color and overlays the output on
+ * top of a snow video stream.
+ */
+
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstalpha.h"
+
+#include <stdlib.h>
+#include <string.h>
+#include <math.h>
+
+#ifndef M_PI
+#define M_PI 3.14159265358979323846
+#endif
+
+/* Generated by -bad/ext/cog/generate_tables */
+static const int cog_ycbcr_to_rgb_matrix_8bit_hdtv[] = {
+ 298, 0, 459, -63514,
+ 298, -55, -136, 19681,
+ 298, 541, 0, -73988,
+};
+
+static const int cog_ycbcr_to_rgb_matrix_8bit_sdtv[] = {
+ 298, 0, 409, -57068,
+ 298, -100, -208, 34707,
+ 298, 516, 0, -70870,
+};
+
+static const gint cog_rgb_to_ycbcr_matrix_8bit_hdtv[] = {
+ 47, 157, 16, 4096,
+ -26, -87, 112, 32768,
+ 112, -102, -10, 32768,
+};
+
+static const gint cog_rgb_to_ycbcr_matrix_8bit_sdtv[] = {
+ 66, 129, 25, 4096,
+ -38, -74, 112, 32768,
+ 112, -94, -18, 32768,
+};
+
+static const gint cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit[] = {
+ 256, -30, -53, 10600,
+ 0, 261, 29, -4367,
+ 0, 19, 262, -3289,
+};
+
+static const gint cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit[] = {
+ 256, 25, 49, -9536,
+ 0, 253, -28, 3958,
+ 0, -19, 252, 2918,
+};
+
+/* Alpha signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+#define DEFAULT_METHOD ALPHA_METHOD_SET
+#define DEFAULT_ALPHA 1.0
+#define DEFAULT_TARGET_R 0
+#define DEFAULT_TARGET_G 255
+#define DEFAULT_TARGET_B 0
+#define DEFAULT_ANGLE 20.0
+#define DEFAULT_NOISE_LEVEL 2.0
+#define DEFAULT_BLACK_SENSITIVITY 100
+#define DEFAULT_WHITE_SENSITIVITY 100
+#define DEFAULT_PREFER_PASSTHROUGH FALSE
+
+enum
+{
+ PROP_0,
+ PROP_METHOD,
+ PROP_ALPHA,
+ PROP_TARGET_R,
+ PROP_TARGET_G,
+ PROP_TARGET_B,
+ PROP_ANGLE,
+ PROP_NOISE_LEVEL,
+ PROP_BLACK_SENSITIVITY,
+ PROP_WHITE_SENSITIVITY,
+ PROP_PREFER_PASSTHROUGH
+};
+
+static GstStaticPadTemplate gst_alpha_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, BGRx, xBGR, "
+ "RGBx, RGB, BGR, Y42B, YUY2, YVYU, UYVY, I420, YV12, Y41B } "))
+ );
+
+static GstStaticPadTemplate gst_alpha_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, BGRx, xBGR, "
+ "RGBx, RGB, BGR, Y42B, YUY2, YVYU, UYVY, I420, YV12, " "Y41B } "))
+ );
+
+static GstStaticCaps gst_alpha_alpha_caps =
+GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, ARGB, BGRA, ABGR, RGBA }"));
+
+/* FIXME: why do we need our own lock for this? */
+#define GST_ALPHA_LOCK(alpha) G_STMT_START { \
+ GST_LOG_OBJECT (alpha, "Locking alpha from thread %p", g_thread_self ()); \
+ g_mutex_lock (&alpha->lock); \
+ GST_LOG_OBJECT (alpha, "Locked alpha from thread %p", g_thread_self ()); \
+} G_STMT_END
+
+#define GST_ALPHA_UNLOCK(alpha) G_STMT_START { \
+ GST_LOG_OBJECT (alpha, "Unlocking alpha from thread %p", g_thread_self ()); \
+ g_mutex_unlock (&alpha->lock); \
+} G_STMT_END
+
+static GstCaps *gst_alpha_transform_caps (GstBaseTransform * btrans,
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter);
+static void gst_alpha_before_transform (GstBaseTransform * btrans,
+ GstBuffer * buf);
+
+static gboolean gst_alpha_set_info (GstVideoFilter * filter,
+ GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
+ GstVideoInfo * out_info);
+static GstFlowReturn gst_alpha_transform_frame (GstVideoFilter * filter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame);
+
+static void gst_alpha_init_params_full (GstAlpha * alpha,
+ const GstVideoFormatInfo * in_info, const GstVideoFormatInfo * out_info);
+static void gst_alpha_init_params (GstAlpha * alpha);
+static void gst_alpha_set_process_function (GstAlpha * alpha);
+static gboolean gst_alpha_set_process_function_full (GstAlpha * alpha,
+ GstVideoInfo * in_info, GstVideoInfo * out_info);
+
+static void gst_alpha_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_alpha_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_alpha_finalize (GObject * object);
+
+#define gst_alpha_parent_class parent_class
+G_DEFINE_TYPE (GstAlpha, gst_alpha, GST_TYPE_VIDEO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (alpha, "alpha", GST_RANK_NONE, GST_TYPE_ALPHA);
+
+#define GST_TYPE_ALPHA_METHOD (gst_alpha_method_get_type())
+static GType
+gst_alpha_method_get_type (void)
+{
+ static GType alpha_method_type = 0;
+ static const GEnumValue alpha_method[] = {
+ {ALPHA_METHOD_SET, "Set/adjust alpha channel", "set"},
+ {ALPHA_METHOD_GREEN, "Chroma Key on pure green", "green"},
+ {ALPHA_METHOD_BLUE, "Chroma Key on pure blue", "blue"},
+ {ALPHA_METHOD_CUSTOM, "Chroma Key on custom RGB values", "custom"},
+ {0, NULL, NULL},
+ };
+
+ if (!alpha_method_type) {
+ alpha_method_type = g_enum_register_static ("GstAlphaMethod", alpha_method);
+ }
+ return alpha_method_type;
+}
+
+static void
+gst_alpha_class_init (GstAlphaClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *btrans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (gst_alpha_debug, "alpha", 0,
+ "alpha - Element for adding alpha channel to streams");
+
+ gobject_class->set_property = gst_alpha_set_property;
+ gobject_class->get_property = gst_alpha_get_property;
+ gobject_class->finalize = gst_alpha_finalize;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_METHOD,
+ g_param_spec_enum ("method", "Method",
+ "How the alpha channels should be created", GST_TYPE_ALPHA_METHOD,
+ DEFAULT_METHOD, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_ALPHA,
+ g_param_spec_double ("alpha", "Alpha", "The value for the alpha channel",
+ 0.0, 1.0, DEFAULT_ALPHA,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_TARGET_R,
+ g_param_spec_uint ("target-r", "Target Red",
+ "The red color value for custom RGB chroma keying", 0, 255,
+ DEFAULT_TARGET_R,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_TARGET_G,
+ g_param_spec_uint ("target-g", "Target Green",
+ "The green color value for custom RGB chroma keying", 0, 255,
+ DEFAULT_TARGET_G,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_TARGET_B,
+ g_param_spec_uint ("target-b", "Target Blue",
+ "The blue color value for custom RGB chroma keying", 0, 255,
+ DEFAULT_TARGET_B,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_ANGLE,
+ g_param_spec_float ("angle", "Angle", "Size of the colorcube to change",
+ 0.0, 90.0, DEFAULT_ANGLE,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_NOISE_LEVEL,
+ g_param_spec_float ("noise-level", "Noise Level", "Size of noise radius",
+ 0.0, 64.0, DEFAULT_NOISE_LEVEL,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass),
+ PROP_BLACK_SENSITIVITY, g_param_spec_uint ("black-sensitivity",
+ "Black Sensitivity", "Sensitivity to dark colors", 0, 128,
+ DEFAULT_BLACK_SENSITIVITY,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass),
+ PROP_WHITE_SENSITIVITY, g_param_spec_uint ("white-sensitivity",
+ "White Sensitivity", "Sensitivity to bright colors", 0, 128,
+ DEFAULT_WHITE_SENSITIVITY,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass),
+ PROP_PREFER_PASSTHROUGH, g_param_spec_boolean ("prefer-passthrough",
+ "Prefer Passthrough",
+ "Don't do any processing for alpha=1.0 if possible",
+ DEFAULT_PREFER_PASSTHROUGH,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (gstelement_class, "Alpha filter",
+ "Filter/Effect/Video",
+ "Adds an alpha channel to video - uniform or via chroma-keying",
+ "Wim Taymans <wim.taymans@gmail.com>\n"
+ "Edward Hervey <edward.hervey@collabora.co.uk>\n"
+ "Jan Schmidt <thaytan@noraisin.net>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_alpha_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_alpha_src_template);
+
+ btrans_class->before_transform =
+ GST_DEBUG_FUNCPTR (gst_alpha_before_transform);
+ btrans_class->transform_caps = GST_DEBUG_FUNCPTR (gst_alpha_transform_caps);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_alpha_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_alpha_transform_frame);
+
+ gst_type_mark_as_plugin_api (GST_TYPE_ALPHA_METHOD, 0);
+}
+
+static void
+gst_alpha_init (GstAlpha * alpha)
+{
+ alpha->alpha = DEFAULT_ALPHA;
+ alpha->method = DEFAULT_METHOD;
+ alpha->target_r = DEFAULT_TARGET_R;
+ alpha->target_g = DEFAULT_TARGET_G;
+ alpha->target_b = DEFAULT_TARGET_B;
+ alpha->angle = DEFAULT_ANGLE;
+ alpha->noise_level = DEFAULT_NOISE_LEVEL;
+ alpha->black_sensitivity = DEFAULT_BLACK_SENSITIVITY;
+ alpha->white_sensitivity = DEFAULT_WHITE_SENSITIVITY;
+
+ g_mutex_init (&alpha->lock);
+}
+
+static void
+gst_alpha_finalize (GObject * object)
+{
+ GstAlpha *alpha = GST_ALPHA (object);
+
+ g_mutex_clear (&alpha->lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_alpha_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAlpha *alpha = GST_ALPHA (object);
+ gboolean reconfigure = FALSE;
+
+ GST_ALPHA_LOCK (alpha);
+ switch (prop_id) {
+ case PROP_METHOD:{
+ gint method = g_value_get_enum (value);
+
+ reconfigure = (method != alpha->method) && (method == ALPHA_METHOD_SET
+ || alpha->method == ALPHA_METHOD_SET) && (alpha->alpha == 1.0)
+ && (alpha->prefer_passthrough);
+ alpha->method = method;
+
+ gst_alpha_set_process_function (alpha);
+ gst_alpha_init_params (alpha);
+ break;
+ }
+ case PROP_ALPHA:{
+ gdouble a = g_value_get_double (value);
+
+ reconfigure = (a != alpha->alpha) && (a == 1.0 || alpha->alpha == 1.0)
+ && (alpha->method == ALPHA_METHOD_SET) && (alpha->prefer_passthrough);
+ alpha->alpha = a;
+ break;
+ }
+ case PROP_TARGET_R:
+ alpha->target_r = g_value_get_uint (value);
+ gst_alpha_init_params (alpha);
+ break;
+ case PROP_TARGET_G:
+ alpha->target_g = g_value_get_uint (value);
+ gst_alpha_init_params (alpha);
+ break;
+ case PROP_TARGET_B:
+ alpha->target_b = g_value_get_uint (value);
+ gst_alpha_init_params (alpha);
+ break;
+ case PROP_ANGLE:
+ alpha->angle = g_value_get_float (value);
+ gst_alpha_init_params (alpha);
+ break;
+ case PROP_NOISE_LEVEL:
+ alpha->noise_level = g_value_get_float (value);
+ gst_alpha_init_params (alpha);
+ break;
+ case PROP_BLACK_SENSITIVITY:
+ alpha->black_sensitivity = g_value_get_uint (value);
+ break;
+ case PROP_WHITE_SENSITIVITY:
+ alpha->white_sensitivity = g_value_get_uint (value);
+ break;
+ case PROP_PREFER_PASSTHROUGH:{
+ gboolean prefer_passthrough = g_value_get_boolean (value);
+
+ reconfigure = ((! !prefer_passthrough) != (! !alpha->prefer_passthrough))
+ && (alpha->method == ALPHA_METHOD_SET) && (alpha->alpha == 1.0);
+ alpha->prefer_passthrough = prefer_passthrough;
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+
+ if (reconfigure)
+ gst_base_transform_reconfigure_src (GST_BASE_TRANSFORM_CAST (alpha));
+
+ GST_ALPHA_UNLOCK (alpha);
+}
+
+static void
+gst_alpha_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstAlpha *alpha = GST_ALPHA (object);
+
+ switch (prop_id) {
+ case PROP_METHOD:
+ g_value_set_enum (value, alpha->method);
+ break;
+ case PROP_ALPHA:
+ g_value_set_double (value, alpha->alpha);
+ break;
+ case PROP_TARGET_R:
+ g_value_set_uint (value, alpha->target_r);
+ break;
+ case PROP_TARGET_G:
+ g_value_set_uint (value, alpha->target_g);
+ break;
+ case PROP_TARGET_B:
+ g_value_set_uint (value, alpha->target_b);
+ break;
+ case PROP_ANGLE:
+ g_value_set_float (value, alpha->angle);
+ break;
+ case PROP_NOISE_LEVEL:
+ g_value_set_float (value, alpha->noise_level);
+ break;
+ case PROP_BLACK_SENSITIVITY:
+ g_value_set_uint (value, alpha->black_sensitivity);
+ break;
+ case PROP_WHITE_SENSITIVITY:
+ g_value_set_uint (value, alpha->white_sensitivity);
+ break;
+ case PROP_PREFER_PASSTHROUGH:
+ g_value_set_boolean (value, alpha->prefer_passthrough);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstCaps *
+gst_alpha_transform_caps (GstBaseTransform * btrans,
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter)
+{
+ GstAlpha *alpha = GST_ALPHA (btrans);
+ GstCaps *ret, *tmp, *tmp2;
+ GstStructure *structure;
+ gint i;
+
+ tmp = gst_caps_new_empty ();
+
+ GST_ALPHA_LOCK (alpha);
+ for (i = 0; i < gst_caps_get_size (caps); i++) {
+ structure = gst_structure_copy (gst_caps_get_structure (caps, i));
+
+ gst_structure_remove_field (structure, "format");
+ gst_structure_remove_field (structure, "colorimetry");
+ gst_structure_remove_field (structure, "chroma-site");
+
+ gst_caps_append_structure (tmp, structure);
+ }
+
+ if (direction == GST_PAD_SINK) {
+ tmp2 = gst_static_caps_get (&gst_alpha_alpha_caps);
+ ret = gst_caps_intersect (tmp, tmp2);
+ gst_caps_unref (tmp);
+ gst_caps_unref (tmp2);
+ tmp = ret;
+ ret = NULL;
+
+ if (alpha->prefer_passthrough && alpha->method == ALPHA_METHOD_SET
+ && alpha->alpha == 1.0) {
+ ret = gst_caps_copy (caps);
+ gst_caps_append (ret, tmp);
+ tmp = NULL;
+ } else {
+ ret = tmp;
+ tmp = NULL;
+ }
+ } else {
+ ret = tmp;
+ tmp = NULL;
+ }
+
+ GST_DEBUG_OBJECT (alpha,
+ "Transformed %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT, caps, ret);
+
+ if (filter) {
+ GstCaps *intersection;
+
+ GST_DEBUG_OBJECT (alpha, "Using filter caps %" GST_PTR_FORMAT, filter);
+ intersection =
+ gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (ret);
+ ret = intersection;
+ GST_DEBUG_OBJECT (alpha, "Intersection %" GST_PTR_FORMAT, ret);
+ }
+
+
+ GST_ALPHA_UNLOCK (alpha);
+
+ return ret;
+}
+
+static gboolean
+gst_alpha_set_info (GstVideoFilter * filter,
+ GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
+ GstVideoInfo * out_info)
+{
+ GstAlpha *alpha = GST_ALPHA (filter);
+ gboolean passthrough;
+
+ GST_ALPHA_LOCK (alpha);
+
+ alpha->in_sdtv = in_info->colorimetry.matrix == GST_VIDEO_COLOR_MATRIX_BT601;
+ alpha->out_sdtv =
+ out_info->colorimetry.matrix == GST_VIDEO_COLOR_MATRIX_BT601;
+
+ passthrough = alpha->prefer_passthrough &&
+ GST_VIDEO_INFO_FORMAT (in_info) == GST_VIDEO_INFO_FORMAT (out_info)
+ && alpha->in_sdtv == alpha->out_sdtv && alpha->method == ALPHA_METHOD_SET
+ && alpha->alpha == 1.0;
+
+ GST_DEBUG_OBJECT (alpha,
+ "Setting caps %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT
+ " (passthrough: %d)", incaps, outcaps, passthrough);
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM_CAST (filter),
+ passthrough);
+
+ if (!gst_alpha_set_process_function_full (alpha, in_info, out_info)
+ && !passthrough)
+ goto no_process;
+
+ gst_alpha_init_params_full (alpha, in_info->finfo, out_info->finfo);
+
+ GST_ALPHA_UNLOCK (alpha);
+
+ return TRUE;
+
+ /* ERRORS */
+no_process:
+ {
+ GST_WARNING_OBJECT (alpha,
+ "No processing function for this caps and no passthrough mode");
+ GST_ALPHA_UNLOCK (alpha);
+ return FALSE;
+ }
+}
+
+/* based on http://www.cs.utah.edu/~michael/chroma/
+ */
+static inline gint
+chroma_keying_yuv (gint a, gint * y, gint * u,
+ gint * v, gint cr, gint cb, gint smin, gint smax, guint8 accept_angle_tg,
+ guint8 accept_angle_ctg, guint8 one_over_kc, guint8 kfgy_scale, gint8 kg,
+ guint noise_level2)
+{
+ gint tmp, tmp1;
+ gint x1, y1;
+ gint x, z;
+ gint b_alpha;
+
+ /* too dark or too bright, keep alpha */
+ if (*y < smin || *y > smax)
+ return a;
+
+ /* Convert foreground to XZ coords where X direction is defined by
+ the key color */
+ tmp = ((*u) * cb + (*v) * cr) >> 7;
+ x = CLAMP (tmp, -128, 127);
+ tmp = ((*v) * cb - (*u) * cr) >> 7;
+ z = CLAMP (tmp, -128, 127);
+
+ /* WARNING: accept angle should never be set greater than "somewhat less
+ than 90 degrees" to avoid dealing with negative/infinite tg. In reality,
+ 80 degrees should be enough if foreground is reasonable. If this seems
+ to be a problem, go to alternative ways of checking point position
+ (scalar product or line equations). This angle should not be too small
+ either to avoid infinite ctg (used to suppress foreground without use of
+ division) */
+
+ tmp = (x * accept_angle_tg) >> 4;
+ tmp = MIN (tmp, 127);
+
+ if (abs (z) > tmp) {
+ /* keep foreground Kfg = 0 */
+ return a;
+ }
+ /* Compute Kfg (implicitly) and Kbg, suppress foreground in XZ coord
+ according to Kfg */
+ tmp = (z * accept_angle_ctg) >> 4;
+ tmp = CLAMP (tmp, -128, 127);
+ x1 = abs (tmp);
+ y1 = z;
+
+ tmp1 = x - x1;
+ tmp1 = MAX (tmp1, 0);
+ b_alpha = (tmp1 * one_over_kc) / 2;
+ b_alpha = 255 - CLAMP (b_alpha, 0, 255);
+ b_alpha = (a * b_alpha) >> 8;
+
+ tmp = (tmp1 * kfgy_scale) >> 4;
+ tmp1 = MIN (tmp, 255);
+
+ *y = (*y < tmp1) ? 0 : *y - tmp1;
+
+ /* Convert suppressed foreground back to CbCr */
+ tmp = (x1 * cb - y1 * cr) >> 7;
+ *u = CLAMP (tmp, -128, 127);
+
+ tmp = (x1 * cr + y1 * cb) >> 7;
+ *v = CLAMP (tmp, -128, 127);
+
+ /* Deal with noise. For now, a circle around the key color with
+ radius of noise_level treated as exact key color. Introduces
+ sharp transitions.
+ */
+ tmp = z * z + (x - kg) * (x - kg);
+ tmp = MIN (tmp, 0xffff);
+
+ if (tmp < noise_level2)
+ b_alpha = 0;
+
+ return b_alpha;
+}
+
+#define APPLY_MATRIX(m,o,v1,v2,v3) ((m[o*4] * v1 + m[o*4+1] * v2 + m[o*4+2] * v3 + m[o*4+3]) >> 8)
+
+static void
+gst_alpha_set_argb_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
+{
+ gint s_alpha = CLAMP ((gint) (alpha->alpha * 256), 0, 256);
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
+ gint i, j;
+ gint matrix[12];
+ gint y, u, v;
+ gint o[4];
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ o[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 3);
+ o[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
+ o[2] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
+
+ memcpy (matrix,
+ alpha->out_sdtv ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
+ cog_rgb_to_ycbcr_matrix_8bit_hdtv, 12 * sizeof (gint));
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ dest[0] = (src[o[0]] * s_alpha) >> 8;
+
+ y = APPLY_MATRIX (matrix, 0, src[o[1]], src[o[2]], src[o[3]]);
+ u = APPLY_MATRIX (matrix, 1, src[o[1]], src[o[2]], src[o[3]]);
+ v = APPLY_MATRIX (matrix, 2, src[o[1]], src[o[2]], src[o[3]]);
+
+ dest[1] = y;
+ dest[2] = u;
+ dest[3] = v;
+
+ dest += 4;
+ src += 4;
+ }
+ }
+}
+
+static void
+gst_alpha_chroma_key_argb_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
+{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
+ gint i, j;
+ gint a, y, u, v;
+ gint r, g, b;
+ gint smin, smax;
+ gint pa = CLAMP ((gint) (alpha->alpha * 256), 0, 256);
+ gint8 cb = alpha->cb, cr = alpha->cr;
+ gint8 kg = alpha->kg;
+ guint8 accept_angle_tg = alpha->accept_angle_tg;
+ guint8 accept_angle_ctg = alpha->accept_angle_ctg;
+ guint8 one_over_kc = alpha->one_over_kc;
+ guint8 kfgy_scale = alpha->kfgy_scale;
+ guint noise_level2 = alpha->noise_level2;
+ gint matrix[12];
+ gint o[4];
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ o[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 3);
+ o[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
+ o[2] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
+
+ smin = 128 - alpha->black_sensitivity;
+ smax = 128 + alpha->white_sensitivity;
+
+ memcpy (matrix,
+ alpha->out_sdtv ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
+ cog_rgb_to_ycbcr_matrix_8bit_hdtv, 12 * sizeof (gint));
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ a = (src[o[0]] * pa) >> 8;
+ r = src[o[1]];
+ g = src[o[2]];
+ b = src[o[3]];
+
+ y = APPLY_MATRIX (matrix, 0, r, g, b);
+ u = APPLY_MATRIX (matrix, 1, r, g, b) - 128;
+ v = APPLY_MATRIX (matrix, 2, r, g, b) - 128;
+
+ a = chroma_keying_yuv (a, &y, &u, &v, cr, cb,
+ smin, smax, accept_angle_tg, accept_angle_ctg,
+ one_over_kc, kfgy_scale, kg, noise_level2);
+
+ u += 128;
+ v += 128;
+
+ dest[0] = a;
+ dest[1] = y;
+ dest[2] = u;
+ dest[3] = v;
+
+ src += 4;
+ dest += 4;
+ }
+ }
+}
+
+static void
+gst_alpha_set_argb_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
+{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
+ gint s_alpha = CLAMP ((gint) (alpha->alpha * 256), 0, 256);
+ gint i, j;
+ gint p[4], o[4];
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 2);
+
+ o[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 3);
+ o[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
+ o[2] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ dest[p[0]] = (src[o[0]] * s_alpha) >> 8;
+
+ dest[p[1]] = src[o[1]];
+ dest[p[2]] = src[o[2]];
+ dest[p[3]] = src[o[3]];
+
+ dest += 4;
+ src += 4;
+ }
+ }
+}
+
+static void
+gst_alpha_chroma_key_argb_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
+{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
+ gint i, j;
+ gint a, y, u, v;
+ gint r, g, b;
+ gint smin, smax;
+ gint pa = CLAMP ((gint) (alpha->alpha * 256), 0, 256);
+ gint8 cb = alpha->cb, cr = alpha->cr;
+ gint8 kg = alpha->kg;
+ guint8 accept_angle_tg = alpha->accept_angle_tg;
+ guint8 accept_angle_ctg = alpha->accept_angle_ctg;
+ guint8 one_over_kc = alpha->one_over_kc;
+ guint8 kfgy_scale = alpha->kfgy_scale;
+ guint noise_level2 = alpha->noise_level2;
+ gint matrix[12], matrix2[12];
+ gint p[4], o[4];
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 2);
+
+ o[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 3);
+ o[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
+ o[2] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
+
+ smin = 128 - alpha->black_sensitivity;
+ smax = 128 + alpha->white_sensitivity;
+
+ memcpy (matrix, cog_rgb_to_ycbcr_matrix_8bit_sdtv, 12 * sizeof (gint));
+ memcpy (matrix2, cog_ycbcr_to_rgb_matrix_8bit_sdtv, 12 * sizeof (gint));
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ a = (src[o[0]] * pa) >> 8;
+ r = src[o[1]];
+ g = src[o[2]];
+ b = src[o[3]];
+
+ y = APPLY_MATRIX (matrix, 0, r, g, b);
+ u = APPLY_MATRIX (matrix, 1, r, g, b) - 128;
+ v = APPLY_MATRIX (matrix, 2, r, g, b) - 128;
+
+ a = chroma_keying_yuv (a, &y, &u, &v, cr, cb,
+ smin, smax, accept_angle_tg, accept_angle_ctg,
+ one_over_kc, kfgy_scale, kg, noise_level2);
+
+ u += 128;
+ v += 128;
+
+ r = APPLY_MATRIX (matrix2, 0, y, u, v);
+ g = APPLY_MATRIX (matrix2, 1, y, u, v);
+ b = APPLY_MATRIX (matrix2, 2, y, u, v);
+
+ dest[p[0]] = a;
+ dest[p[1]] = CLAMP (r, 0, 255);
+ dest[p[2]] = CLAMP (g, 0, 255);
+ dest[p[3]] = CLAMP (b, 0, 255);
+
+ src += 4;
+ dest += 4;
+ }
+ }
+}
+
+static void
+gst_alpha_set_ayuv_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
+{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
+ gint s_alpha = CLAMP ((gint) (alpha->alpha * 256), 0, 256);
+ gint y, x;
+ gint matrix[12];
+ gint r, g, b;
+ gint p[4];
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 2);
+
+ memcpy (matrix,
+ alpha->in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv :
+ cog_ycbcr_to_rgb_matrix_8bit_hdtv, 12 * sizeof (gint));
+
+ for (y = 0; y < height; y++) {
+ for (x = 0; x < width; x++) {
+ dest[p[0]] = (src[0] * s_alpha) >> 8;
+
+ r = APPLY_MATRIX (matrix, 0, src[1], src[2], src[3]);
+ g = APPLY_MATRIX (matrix, 1, src[1], src[2], src[3]);
+ b = APPLY_MATRIX (matrix, 2, src[1], src[2], src[3]);
+
+ dest[p[1]] = CLAMP (r, 0, 255);
+ dest[p[2]] = CLAMP (g, 0, 255);
+ dest[p[3]] = CLAMP (b, 0, 255);
+
+ dest += 4;
+ src += 4;
+ }
+ }
+}
+
+static void
+gst_alpha_chroma_key_ayuv_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
+{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
+ gint i, j;
+ gint a, y, u, v;
+ gint r, g, b;
+ gint smin, smax;
+ gint pa = CLAMP ((gint) (alpha->alpha * 256), 0, 256);
+ gint8 cb = alpha->cb, cr = alpha->cr;
+ gint8 kg = alpha->kg;
+ guint8 accept_angle_tg = alpha->accept_angle_tg;
+ guint8 accept_angle_ctg = alpha->accept_angle_ctg;
+ guint8 one_over_kc = alpha->one_over_kc;
+ guint8 kfgy_scale = alpha->kfgy_scale;
+ guint noise_level2 = alpha->noise_level2;
+ gint matrix[12];
+ gint p[4];
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 2);
+
+ smin = 128 - alpha->black_sensitivity;
+ smax = 128 + alpha->white_sensitivity;
+
+ memcpy (matrix,
+ alpha->in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv :
+ cog_ycbcr_to_rgb_matrix_8bit_hdtv, 12 * sizeof (gint));
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ a = (src[0] * pa) >> 8;
+ y = src[1];
+ u = src[2] - 128;
+ v = src[3] - 128;
+
+ a = chroma_keying_yuv (a, &y, &u, &v, cr, cb,
+ smin, smax, accept_angle_tg, accept_angle_ctg,
+ one_over_kc, kfgy_scale, kg, noise_level2);
+
+ u += 128;
+ v += 128;
+
+ r = APPLY_MATRIX (matrix, 0, y, u, v);
+ g = APPLY_MATRIX (matrix, 1, y, u, v);
+ b = APPLY_MATRIX (matrix, 2, y, u, v);
+
+ dest[p[0]] = a;
+ dest[p[1]] = CLAMP (r, 0, 255);
+ dest[p[2]] = CLAMP (g, 0, 255);
+ dest[p[3]] = CLAMP (b, 0, 255);
+
+ src += 4;
+ dest += 4;
+ }
+ }
+}
+
+static void
+gst_alpha_set_ayuv_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
+{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
+ gint s_alpha = CLAMP ((gint) (alpha->alpha * 256), 0, 256);
+ gint y, x;
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ if (alpha->in_sdtv == alpha->out_sdtv) {
+ for (y = 0; y < height; y++) {
+ for (x = 0; x < width; x++) {
+ dest[0] = (src[0] * s_alpha) >> 8;
+ dest[1] = src[1];
+ dest[2] = src[2];
+ dest[3] = src[3];
+
+ dest += 4;
+ src += 4;
+ }
+ }
+ } else {
+ gint matrix[12];
+
+ memcpy (matrix,
+ alpha->out_sdtv ? cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit :
+ cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit, 12 * sizeof (gint));
+
+ for (y = 0; y < height; y++) {
+ for (x = 0; x < width; x++) {
+ dest[0] = (src[0] * s_alpha) >> 8;
+ dest[1] = APPLY_MATRIX (matrix, 0, src[1], src[2], src[3]);
+ dest[2] = APPLY_MATRIX (matrix, 1, src[1], src[2], src[3]);
+ dest[3] = APPLY_MATRIX (matrix, 2, src[1], src[2], src[3]);
+
+ dest += 4;
+ src += 4;
+ }
+ }
+ }
+}
+
+static void
+gst_alpha_chroma_key_ayuv_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
+{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
+ gint i, j;
+ gint a, y, u, v;
+ gint smin, smax;
+ gint pa = CLAMP ((gint) (alpha->alpha * 256), 0, 256);
+ gint8 cb = alpha->cb, cr = alpha->cr;
+ gint8 kg = alpha->kg;
+ guint8 accept_angle_tg = alpha->accept_angle_tg;
+ guint8 accept_angle_ctg = alpha->accept_angle_ctg;
+ guint8 one_over_kc = alpha->one_over_kc;
+ guint8 kfgy_scale = alpha->kfgy_scale;
+ guint noise_level2 = alpha->noise_level2;
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ smin = 128 - alpha->black_sensitivity;
+ smax = 128 + alpha->white_sensitivity;
+
+ if (alpha->in_sdtv == alpha->out_sdtv) {
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ a = (src[0] * pa) >> 8;
+ y = src[1];
+ u = src[2] - 128;
+ v = src[3] - 128;
+
+ a = chroma_keying_yuv (a, &y, &u, &v, cr, cb,
+ smin, smax, accept_angle_tg, accept_angle_ctg,
+ one_over_kc, kfgy_scale, kg, noise_level2);
+
+ u += 128;
+ v += 128;
+
+ dest[0] = a;
+ dest[1] = y;
+ dest[2] = u;
+ dest[3] = v;
+
+ src += 4;
+ dest += 4;
+ }
+ }
+ } else {
+ gint matrix[12];
+
+ memcpy (matrix,
+ alpha->out_sdtv ? cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit :
+ cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit, 12 * sizeof (gint));
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ a = (src[0] * pa) >> 8;
+ y = APPLY_MATRIX (matrix, 0, src[1], src[2], src[3]);
+ u = APPLY_MATRIX (matrix, 1, src[1], src[2], src[3]) - 128;
+ v = APPLY_MATRIX (matrix, 2, src[1], src[2], src[3]) - 128;
+
+ a = chroma_keying_yuv (a, &y, &u, &v, cr, cb,
+ smin, smax, accept_angle_tg, accept_angle_ctg,
+ one_over_kc, kfgy_scale, kg, noise_level2);
+
+ u += 128;
+ v += 128;
+
+ dest[0] = a;
+ dest[1] = y;
+ dest[2] = u;
+ dest[3] = v;
+
+ src += 4;
+ dest += 4;
+ }
+ }
+ }
+}
+
+static void
+gst_alpha_set_rgb_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
+{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
+ gint s_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
+ gint i, j;
+ gint matrix[12];
+ gint y, u, v;
+ gint o[3];
+ gint bpp;
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
+ o[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
+ o[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ o[2] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
+
+ memcpy (matrix,
+ alpha->out_sdtv ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
+ cog_rgb_to_ycbcr_matrix_8bit_hdtv, 12 * sizeof (gint));
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ dest[0] = s_alpha;
+
+ y = APPLY_MATRIX (matrix, 0, src[o[0]], src[o[1]], src[o[2]]);
+ u = APPLY_MATRIX (matrix, 1, src[o[0]], src[o[1]], src[o[2]]);
+ v = APPLY_MATRIX (matrix, 2, src[o[0]], src[o[1]], src[o[2]]);
+
+ dest[1] = y;
+ dest[2] = u;
+ dest[3] = v;
+
+ dest += 4;
+ src += bpp;
+ }
+ }
+}
+
+static void
+gst_alpha_chroma_key_rgb_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
+{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
+ gint i, j;
+ gint a, y, u, v;
+ gint r, g, b;
+ gint smin, smax;
+ gint pa = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
+ gint8 cb = alpha->cb, cr = alpha->cr;
+ gint8 kg = alpha->kg;
+ guint8 accept_angle_tg = alpha->accept_angle_tg;
+ guint8 accept_angle_ctg = alpha->accept_angle_ctg;
+ guint8 one_over_kc = alpha->one_over_kc;
+ guint8 kfgy_scale = alpha->kfgy_scale;
+ guint noise_level2 = alpha->noise_level2;
+ gint matrix[12];
+ gint o[3];
+ gint bpp;
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
+
+ o[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
+ o[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ o[2] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
+
+ smin = 128 - alpha->black_sensitivity;
+ smax = 128 + alpha->white_sensitivity;
+
+ memcpy (matrix,
+ alpha->out_sdtv ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
+ cog_rgb_to_ycbcr_matrix_8bit_hdtv, 12 * sizeof (gint));
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ a = pa;
+ r = src[o[0]];
+ g = src[o[1]];
+ b = src[o[2]];
+
+ y = APPLY_MATRIX (matrix, 0, r, g, b);
+ u = APPLY_MATRIX (matrix, 1, r, g, b) - 128;
+ v = APPLY_MATRIX (matrix, 2, r, g, b) - 128;
+
+ a = chroma_keying_yuv (a, &y, &u, &v, cr, cb,
+ smin, smax, accept_angle_tg, accept_angle_ctg,
+ one_over_kc, kfgy_scale, kg, noise_level2);
+
+ u += 128;
+ v += 128;
+
+ dest[0] = a;
+ dest[1] = y;
+ dest[2] = u;
+ dest[3] = v;
+
+ src += bpp;
+ dest += 4;
+ }
+ }
+}
+
+static void
+gst_alpha_set_rgb_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
+{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
+ gint s_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
+ gint i, j;
+ gint p[4], o[3];
+ gint bpp;
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
+
+ o[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
+ o[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ o[2] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
+
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 2);
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ dest[p[0]] = s_alpha;
+
+ dest[p[1]] = src[o[0]];
+ dest[p[2]] = src[o[1]];
+ dest[p[3]] = src[o[2]];
+
+ dest += 4;
+ src += bpp;
+ }
+ }
+}
+
+static void
+gst_alpha_chroma_key_rgb_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
+{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
+ gint i, j;
+ gint a, y, u, v;
+ gint r, g, b;
+ gint smin, smax;
+ gint pa = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
+ gint8 cb = alpha->cb, cr = alpha->cr;
+ gint8 kg = alpha->kg;
+ guint8 accept_angle_tg = alpha->accept_angle_tg;
+ guint8 accept_angle_ctg = alpha->accept_angle_ctg;
+ guint8 one_over_kc = alpha->one_over_kc;
+ guint8 kfgy_scale = alpha->kfgy_scale;
+ guint noise_level2 = alpha->noise_level2;
+ gint matrix[12], matrix2[12];
+ gint p[4], o[3];
+ gint bpp;
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
+
+ o[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
+ o[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ o[2] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
+
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 2);
+
+ smin = 128 - alpha->black_sensitivity;
+ smax = 128 + alpha->white_sensitivity;
+
+ memcpy (matrix, cog_rgb_to_ycbcr_matrix_8bit_sdtv, 12 * sizeof (gint));
+ memcpy (matrix2, cog_ycbcr_to_rgb_matrix_8bit_sdtv, 12 * sizeof (gint));
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ a = pa;
+ r = src[o[0]];
+ g = src[o[1]];
+ b = src[o[2]];
+
+ y = APPLY_MATRIX (matrix, 0, r, g, b);
+ u = APPLY_MATRIX (matrix, 1, r, g, b) - 128;
+ v = APPLY_MATRIX (matrix, 2, r, g, b) - 128;
+
+ a = chroma_keying_yuv (a, &y, &u, &v, cr, cb,
+ smin, smax, accept_angle_tg, accept_angle_ctg,
+ one_over_kc, kfgy_scale, kg, noise_level2);
+
+ u += 128;
+ v += 128;
+
+ r = APPLY_MATRIX (matrix2, 0, y, u, v);
+ g = APPLY_MATRIX (matrix2, 1, y, u, v);
+ b = APPLY_MATRIX (matrix2, 2, y, u, v);
+
+ dest[p[0]] = a;
+ dest[p[1]] = CLAMP (r, 0, 255);
+ dest[p[2]] = CLAMP (g, 0, 255);
+ dest[p[3]] = CLAMP (b, 0, 255);
+
+ src += bpp;
+ dest += 4;
+ }
+ }
+}
+
+static void
+gst_alpha_set_planar_yuv_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
+{
+ guint8 *dest;
+ gint width, height;
+ gint b_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
+ const guint8 *srcY, *srcY_tmp;
+ const guint8 *srcU, *srcU_tmp;
+ const guint8 *srcV, *srcV_tmp;
+ gint i, j;
+ gint y_stride, uv_stride;
+ gint v_subs, h_subs;
+
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ y_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+ uv_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 1);
+
+ srcY_tmp = srcY = GST_VIDEO_FRAME_COMP_DATA (in_frame, 0);
+ srcU_tmp = srcU = GST_VIDEO_FRAME_COMP_DATA (in_frame, 1);
+ srcV_tmp = srcV = GST_VIDEO_FRAME_COMP_DATA (in_frame, 2);
+
+ switch (GST_VIDEO_FRAME_FORMAT (in_frame)) {
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
+ v_subs = h_subs = 2;
+ break;
+ case GST_VIDEO_FORMAT_Y444:
+ v_subs = h_subs = 1;
+ break;
+ case GST_VIDEO_FORMAT_Y42B:
+ v_subs = 1;
+ h_subs = 2;
+ break;
+ case GST_VIDEO_FORMAT_Y41B:
+ v_subs = 1;
+ h_subs = 4;
+ break;
+ default:
+ g_assert_not_reached ();
+ return;
+ }
+
+ if (alpha->in_sdtv == alpha->out_sdtv) {
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ dest[0] = b_alpha;
+ dest[1] = srcY[0];
+ dest[2] = srcU[0];
+ dest[3] = srcV[0];
+
+ dest += 4;
+ srcY++;
+ if ((j + 1) % h_subs == 0) {
+ srcU++;
+ srcV++;
+ }
+ }
+
+ srcY_tmp = srcY = srcY_tmp + y_stride;
+ if ((i + 1) % v_subs == 0) {
+ srcU_tmp = srcU = srcU_tmp + uv_stride;
+ srcV_tmp = srcV = srcV_tmp + uv_stride;
+ } else {
+ srcU = srcU_tmp;
+ srcV = srcV_tmp;
+ }
+ }
+ } else {
+ gint matrix[12];
+ gint a, y, u, v;
+
+ memcpy (matrix,
+ alpha->out_sdtv ? cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit :
+ cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit, 12 * sizeof (gint));
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ a = b_alpha;
+ y = srcY[0];
+ u = srcU[0];
+ v = srcV[0];
+
+ dest[0] = a;
+ dest[1] = APPLY_MATRIX (matrix, 0, y, u, v);
+ dest[2] = APPLY_MATRIX (matrix, 1, y, u, v);
+ dest[3] = APPLY_MATRIX (matrix, 2, y, u, v);
+
+ dest += 4;
+ srcY++;
+ if ((j + 1) % h_subs == 0) {
+ srcU++;
+ srcV++;
+ }
+ }
+
+ srcY_tmp = srcY = srcY_tmp + y_stride;
+ if ((i + 1) % v_subs == 0) {
+ srcU_tmp = srcU = srcU_tmp + uv_stride;
+ srcV_tmp = srcV = srcV_tmp + uv_stride;
+ } else {
+ srcU = srcU_tmp;
+ srcV = srcV_tmp;
+ }
+ }
+ }
+}
+
+static void
+gst_alpha_chroma_key_planar_yuv_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
+{
+ guint8 *dest;
+ gint width, height;
+ gint b_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
+ const guint8 *srcY, *srcY_tmp;
+ const guint8 *srcU, *srcU_tmp;
+ const guint8 *srcV, *srcV_tmp;
+ gint i, j;
+ gint a, y, u, v;
+ gint y_stride, uv_stride;
+ gint v_subs, h_subs;
+ gint smin = 128 - alpha->black_sensitivity;
+ gint smax = 128 + alpha->white_sensitivity;
+ gint8 cb = alpha->cb, cr = alpha->cr;
+ gint8 kg = alpha->kg;
+ guint8 accept_angle_tg = alpha->accept_angle_tg;
+ guint8 accept_angle_ctg = alpha->accept_angle_ctg;
+ guint8 one_over_kc = alpha->one_over_kc;
+ guint8 kfgy_scale = alpha->kfgy_scale;
+ guint noise_level2 = alpha->noise_level2;
+
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ y_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+ uv_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 1);
+
+ srcY_tmp = srcY = GST_VIDEO_FRAME_COMP_DATA (in_frame, 0);
+ srcU_tmp = srcU = GST_VIDEO_FRAME_COMP_DATA (in_frame, 1);
+ srcV_tmp = srcV = GST_VIDEO_FRAME_COMP_DATA (in_frame, 2);
+
+ switch (GST_VIDEO_FRAME_FORMAT (in_frame)) {
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
+ v_subs = h_subs = 2;
+ break;
+ case GST_VIDEO_FORMAT_Y444:
+ v_subs = h_subs = 1;
+ break;
+ case GST_VIDEO_FORMAT_Y42B:
+ v_subs = 1;
+ h_subs = 2;
+ break;
+ case GST_VIDEO_FORMAT_Y41B:
+ v_subs = 1;
+ h_subs = 4;
+ break;
+ default:
+ g_assert_not_reached ();
+ return;
+ }
+
+ if (alpha->in_sdtv == alpha->out_sdtv) {
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ a = b_alpha;
+ y = srcY[0];
+ u = srcU[0] - 128;
+ v = srcV[0] - 128;
+
+ a = chroma_keying_yuv (a, &y, &u, &v, cr, cb, smin,
+ smax, accept_angle_tg, accept_angle_ctg,
+ one_over_kc, kfgy_scale, kg, noise_level2);
+
+ u += 128;
+ v += 128;
+
+ dest[0] = a;
+ dest[1] = y;
+ dest[2] = u;
+ dest[3] = v;
+
+ dest += 4;
+ srcY++;
+ if ((j + 1) % h_subs == 0) {
+ srcU++;
+ srcV++;
+ }
+ }
+
+ srcY_tmp = srcY = srcY_tmp + y_stride;
+ if ((i + 1) % v_subs == 0) {
+ srcU_tmp = srcU = srcU_tmp + uv_stride;
+ srcV_tmp = srcV = srcV_tmp + uv_stride;
+ } else {
+ srcU = srcU_tmp;
+ srcV = srcV_tmp;
+ }
+ }
+ } else {
+ gint matrix[12];
+
+ memcpy (matrix,
+ alpha->out_sdtv ? cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit :
+ cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit, 12 * sizeof (gint));
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ a = b_alpha;
+ y = APPLY_MATRIX (matrix, 0, srcY[0], srcU[0], srcV[0]);
+ u = APPLY_MATRIX (matrix, 1, srcY[0], srcU[0], srcV[0]) - 128;
+ v = APPLY_MATRIX (matrix, 2, srcY[0], srcU[0], srcV[0]) - 128;
+
+ a = chroma_keying_yuv (a, &y, &u, &v, cr, cb, smin,
+ smax, accept_angle_tg, accept_angle_ctg,
+ one_over_kc, kfgy_scale, kg, noise_level2);
+
+ dest[0] = a;
+ dest[1] = y;
+ dest[2] = u + 128;
+ dest[3] = v + 128;
+
+ dest += 4;
+ srcY++;
+ if ((j + 1) % h_subs == 0) {
+ srcU++;
+ srcV++;
+ }
+ }
+
+ srcY_tmp = srcY = srcY_tmp + y_stride;
+ if ((i + 1) % v_subs == 0) {
+ srcU_tmp = srcU = srcU_tmp + uv_stride;
+ srcV_tmp = srcV = srcV_tmp + uv_stride;
+ } else {
+ srcU = srcU_tmp;
+ srcV = srcV_tmp;
+ }
+ }
+ }
+}
+
+static void
+gst_alpha_set_planar_yuv_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
+{
+ guint8 *dest;
+ gint width, height;
+ gint b_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
+ const guint8 *srcY, *srcY_tmp;
+ const guint8 *srcU, *srcU_tmp;
+ const guint8 *srcV, *srcV_tmp;
+ gint i, j;
+ gint y_stride, uv_stride;
+ gint v_subs, h_subs;
+ gint matrix[12];
+ gint a, y, u, v;
+ gint r, g, b;
+ gint p[4];
+
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 2);
+
+ y_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+ uv_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 1);
+
+ srcY_tmp = srcY = GST_VIDEO_FRAME_COMP_DATA (in_frame, 0);
+ srcU_tmp = srcU = GST_VIDEO_FRAME_COMP_DATA (in_frame, 1);
+ srcV_tmp = srcV = GST_VIDEO_FRAME_COMP_DATA (in_frame, 2);
+
+ switch (GST_VIDEO_FRAME_FORMAT (in_frame)) {
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
+ v_subs = h_subs = 2;
+ break;
+ case GST_VIDEO_FORMAT_Y444:
+ v_subs = h_subs = 1;
+ break;
+ case GST_VIDEO_FORMAT_Y42B:
+ v_subs = 1;
+ h_subs = 2;
+ break;
+ case GST_VIDEO_FORMAT_Y41B:
+ v_subs = 1;
+ h_subs = 4;
+ break;
+ default:
+ g_assert_not_reached ();
+ return;
+ }
+
+ memcpy (matrix,
+ alpha->in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv :
+ cog_ycbcr_to_rgb_matrix_8bit_hdtv, 12 * sizeof (gint));
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ a = b_alpha;
+ y = srcY[0];
+ u = srcU[0];
+ v = srcV[0];
+
+ dest[p[0]] = a;
+ r = APPLY_MATRIX (matrix, 0, y, u, v);
+ g = APPLY_MATRIX (matrix, 1, y, u, v);
+ b = APPLY_MATRIX (matrix, 2, y, u, v);
+ dest[p[1]] = CLAMP (r, 0, 255);
+ dest[p[2]] = CLAMP (g, 0, 255);
+ dest[p[3]] = CLAMP (b, 0, 255);
+
+ dest += 4;
+ srcY++;
+ if ((j + 1) % h_subs == 0) {
+ srcU++;
+ srcV++;
+ }
+ }
+
+ srcY_tmp = srcY = srcY_tmp + y_stride;
+ if ((i + 1) % v_subs == 0) {
+ srcU_tmp = srcU = srcU_tmp + uv_stride;
+ srcV_tmp = srcV = srcV_tmp + uv_stride;
+ } else {
+ srcU = srcU_tmp;
+ srcV = srcV_tmp;
+ }
+ }
+}
+
+static void
+gst_alpha_chroma_key_planar_yuv_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
+{
+ guint8 *dest;
+ gint width, height;
+ gint b_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
+ const guint8 *srcY, *srcY_tmp;
+ const guint8 *srcU, *srcU_tmp;
+ const guint8 *srcV, *srcV_tmp;
+ gint i, j;
+ gint a, y, u, v;
+ gint r, g, b;
+ gint y_stride, uv_stride;
+ gint v_subs, h_subs;
+ gint smin = 128 - alpha->black_sensitivity;
+ gint smax = 128 + alpha->white_sensitivity;
+ gint8 cb = alpha->cb, cr = alpha->cr;
+ gint8 kg = alpha->kg;
+ guint8 accept_angle_tg = alpha->accept_angle_tg;
+ guint8 accept_angle_ctg = alpha->accept_angle_ctg;
+ guint8 one_over_kc = alpha->one_over_kc;
+ guint8 kfgy_scale = alpha->kfgy_scale;
+ guint noise_level2 = alpha->noise_level2;
+ gint matrix[12];
+ gint p[4];
+
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 2);
+
+ y_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+ uv_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 1);
+
+ srcY_tmp = srcY = GST_VIDEO_FRAME_COMP_DATA (in_frame, 0);
+ srcU_tmp = srcU = GST_VIDEO_FRAME_COMP_DATA (in_frame, 1);
+ srcV_tmp = srcV = GST_VIDEO_FRAME_COMP_DATA (in_frame, 2);
+
+ switch (GST_VIDEO_FRAME_FORMAT (in_frame)) {
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
+ v_subs = h_subs = 2;
+ break;
+ case GST_VIDEO_FORMAT_Y444:
+ v_subs = h_subs = 1;
+ break;
+ case GST_VIDEO_FORMAT_Y42B:
+ v_subs = 1;
+ h_subs = 2;
+ break;
+ case GST_VIDEO_FORMAT_Y41B:
+ v_subs = 1;
+ h_subs = 4;
+ break;
+ default:
+ g_assert_not_reached ();
+ return;
+ }
+
+ memcpy (matrix,
+ alpha->in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv :
+ cog_ycbcr_to_rgb_matrix_8bit_hdtv, 12 * sizeof (gint));
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ a = b_alpha;
+ y = srcY[0];
+ u = srcU[0] - 128;
+ v = srcV[0] - 128;
+
+ a = chroma_keying_yuv (a, &y, &u, &v, cr, cb, smin,
+ smax, accept_angle_tg, accept_angle_ctg,
+ one_over_kc, kfgy_scale, kg, noise_level2);
+
+ u += 128;
+ v += 128;
+
+ dest[p[0]] = a;
+ r = APPLY_MATRIX (matrix, 0, y, u, v);
+ g = APPLY_MATRIX (matrix, 1, y, u, v);
+ b = APPLY_MATRIX (matrix, 2, y, u, v);
+ dest[p[1]] = CLAMP (r, 0, 255);
+ dest[p[2]] = CLAMP (g, 0, 255);
+ dest[p[3]] = CLAMP (b, 0, 255);
+
+ dest += 4;
+ srcY++;
+ if ((j + 1) % h_subs == 0) {
+ srcU++;
+ srcV++;
+ }
+ }
+
+ srcY_tmp = srcY = srcY_tmp + y_stride;
+ if ((i + 1) % v_subs == 0) {
+ srcU_tmp = srcU = srcU_tmp + uv_stride;
+ srcV_tmp = srcV = srcV_tmp + uv_stride;
+ } else {
+ srcU = srcU_tmp;
+ srcV = srcV_tmp;
+ }
+ }
+}
+
+static void
+gst_alpha_set_packed_422_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
+{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
+ gint s_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
+ gint i, j;
+ gint y, u, v;
+ gint p[4]; /* Y U Y V */
+ gint src_stride;
+ const guint8 *src_tmp;
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ src_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
+ p[2] = p[0] + 2;
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
+
+ if (alpha->in_sdtv != alpha->out_sdtv) {
+ gint matrix[12];
+
+ memcpy (matrix,
+ alpha->in_sdtv ? cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit :
+ cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit, 12 * sizeof (gint));
+
+ for (i = 0; i < height; i++) {
+ src_tmp = src;
+
+ for (j = 0; j < width - 1; j += 2) {
+ dest[0] = s_alpha;
+ dest[4] = s_alpha;
+
+ y = APPLY_MATRIX (matrix, 0, src[p[0]], src[p[1]], src[p[3]]);
+ u = APPLY_MATRIX (matrix, 1, src[p[0]], src[p[1]], src[p[3]]);
+ v = APPLY_MATRIX (matrix, 2, src[p[0]], src[p[1]], src[p[3]]);
+
+ dest[1] = y;
+ dest[2] = u;
+ dest[3] = v;
+
+ y = APPLY_MATRIX (matrix, 0, src[p[2]], src[p[1]], src[p[3]]);
+ u = APPLY_MATRIX (matrix, 1, src[p[2]], src[p[1]], src[p[3]]);
+ v = APPLY_MATRIX (matrix, 2, src[p[2]], src[p[1]], src[p[3]]);
+
+ dest[5] = y;
+ dest[6] = u;
+ dest[7] = v;
+
+ dest += 8;
+ src += 4;
+ }
+
+ if (j == width - 1) {
+ dest[0] = s_alpha;
+
+ y = APPLY_MATRIX (matrix, 0, src[p[0]], src[p[1]], src[p[3]]);
+ u = APPLY_MATRIX (matrix, 1, src[p[0]], src[p[1]], src[p[3]]);
+ v = APPLY_MATRIX (matrix, 2, src[p[0]], src[p[1]], src[p[3]]);
+
+ dest[1] = y;
+ dest[2] = u;
+ dest[3] = v;
+
+ dest += 4;
+ }
+
+ src = src_tmp + src_stride;
+ }
+ } else {
+ for (i = 0; i < height; i++) {
+ src_tmp = src;
+
+ for (j = 0; j < width - 1; j += 2) {
+ dest[0] = s_alpha;
+ dest[4] = s_alpha;
+
+ y = src[p[0]];
+ u = src[p[1]];
+ v = src[p[3]];
+
+ dest[1] = y;
+ dest[2] = u;
+ dest[3] = v;
+
+ y = src[p[2]];
+
+ dest[5] = y;
+ dest[6] = u;
+ dest[7] = v;
+
+ dest += 8;
+ src += 4;
+ }
+
+ if (j == width - 1) {
+ dest[0] = s_alpha;
+
+ y = src[p[0]];
+ u = src[p[1]];
+ v = src[p[3]];
+
+ dest[1] = y;
+ dest[2] = u;
+ dest[3] = v;
+
+ dest += 4;
+ }
+
+ src = src_tmp + src_stride;
+ }
+ }
+}
+
+static void
+gst_alpha_chroma_key_packed_422_ayuv (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
+{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
+ gint i, j;
+ gint a, y, u, v;
+ gint smin, smax;
+ gint pa = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
+ gint8 cb = alpha->cb, cr = alpha->cr;
+ gint8 kg = alpha->kg;
+ guint8 accept_angle_tg = alpha->accept_angle_tg;
+ guint8 accept_angle_ctg = alpha->accept_angle_ctg;
+ guint8 one_over_kc = alpha->one_over_kc;
+ guint8 kfgy_scale = alpha->kfgy_scale;
+ guint noise_level2 = alpha->noise_level2;
+ gint p[4]; /* Y U Y V */
+ gint src_stride;
+ const guint8 *src_tmp;
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ src_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
+ p[2] = p[0] + 2;
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
+
+ smin = 128 - alpha->black_sensitivity;
+ smax = 128 + alpha->white_sensitivity;
+
+ if (alpha->in_sdtv != alpha->out_sdtv) {
+ gint matrix[12];
+
+ memcpy (matrix,
+ alpha->in_sdtv ? cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit :
+ cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit, 12 * sizeof (gint));
+
+ for (i = 0; i < height; i++) {
+ src_tmp = src;
+
+ for (j = 0; j < width - 1; j += 2) {
+ y = APPLY_MATRIX (matrix, 0, src[p[0]], src[p[1]], src[p[3]]);
+ u = APPLY_MATRIX (matrix, 1, src[p[0]], src[p[1]], src[p[3]]) - 128;
+ v = APPLY_MATRIX (matrix, 2, src[p[0]], src[p[1]], src[p[3]]) - 128;
+
+ a = chroma_keying_yuv (pa, &y, &u, &v, cr, cb,
+ smin, smax, accept_angle_tg, accept_angle_ctg,
+ one_over_kc, kfgy_scale, kg, noise_level2);
+
+ dest[0] = a;
+ dest[1] = y;
+ dest[2] = u + 128;
+ dest[3] = v + 128;
+
+ y = APPLY_MATRIX (matrix, 0, src[p[2]], src[p[1]], src[p[3]]);
+ u = APPLY_MATRIX (matrix, 1, src[p[2]], src[p[1]], src[p[3]]) - 128;
+ v = APPLY_MATRIX (matrix, 2, src[p[2]], src[p[1]], src[p[3]]) - 128;
+
+ a = chroma_keying_yuv (pa, &y, &u, &v, cr, cb,
+ smin, smax, accept_angle_tg, accept_angle_ctg,
+ one_over_kc, kfgy_scale, kg, noise_level2);
+
+ dest[4] = a;
+ dest[5] = y;
+ dest[6] = u + 128;
+ dest[7] = v + 128;
+
+ dest += 8;
+ src += 4;
+ }
+
+ if (j == width - 1) {
+ y = APPLY_MATRIX (matrix, 0, src[p[0]], src[p[1]], src[p[3]]);
+ u = APPLY_MATRIX (matrix, 1, src[p[0]], src[p[1]], src[p[3]]) - 128;
+ v = APPLY_MATRIX (matrix, 2, src[p[0]], src[p[1]], src[p[3]]) - 128;
+
+ a = chroma_keying_yuv (pa, &y, &u, &v, cr, cb,
+ smin, smax, accept_angle_tg, accept_angle_ctg,
+ one_over_kc, kfgy_scale, kg, noise_level2);
+
+ dest[0] = a;
+ dest[1] = y;
+ dest[2] = u + 128;
+ dest[3] = v + 128;
+
+ dest += 4;
+ }
+
+ src = src_tmp + src_stride;
+ }
+ } else {
+ for (i = 0; i < height; i++) {
+ src_tmp = src;
+
+ for (j = 0; j < width - 1; j += 2) {
+ y = src[p[0]];
+ u = src[p[1]] - 128;
+ v = src[p[3]] - 128;
+
+ a = chroma_keying_yuv (pa, &y, &u, &v, cr, cb,
+ smin, smax, accept_angle_tg, accept_angle_ctg,
+ one_over_kc, kfgy_scale, kg, noise_level2);
+
+ dest[0] = a;
+ dest[1] = y;
+ dest[2] = u + 128;
+ dest[3] = v + 128;
+
+ y = src[p[2]];
+ u = src[p[1]] - 128;
+ v = src[p[3]] - 128;
+
+ a = chroma_keying_yuv (pa, &y, &u, &v, cr, cb,
+ smin, smax, accept_angle_tg, accept_angle_ctg,
+ one_over_kc, kfgy_scale, kg, noise_level2);
+
+ dest[4] = a;
+ dest[5] = y;
+ dest[6] = u + 128;
+ dest[7] = v + 128;
+
+ dest += 8;
+ src += 4;
+ }
+
+ if (j == width - 1) {
+ y = src[p[0]];
+ u = src[p[1]] - 128;
+ v = src[p[3]] - 128;
+
+ a = chroma_keying_yuv (pa, &y, &u, &v, cr, cb,
+ smin, smax, accept_angle_tg, accept_angle_ctg,
+ one_over_kc, kfgy_scale, kg, noise_level2);
+
+ dest[0] = a;
+ dest[1] = y;
+ dest[2] = u + 128;
+ dest[3] = v + 128;
+
+ dest += 4;
+ }
+
+ src = src_tmp + src_stride;
+ }
+ }
+}
+
+static void
+gst_alpha_set_packed_422_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
+{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
+ gint s_alpha = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
+ gint i, j;
+ gint p[4], o[4];
+ gint src_stride;
+ const guint8 *src_tmp;
+ gint matrix[12];
+ gint r, g, b;
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ src_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+
+ o[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
+ o[2] = o[0] + 2;
+ o[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
+
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 2);
+
+ memcpy (matrix,
+ alpha->in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv :
+ cog_ycbcr_to_rgb_matrix_8bit_hdtv, 12 * sizeof (gint));
+
+ for (i = 0; i < height; i++) {
+ src_tmp = src;
+
+ for (j = 0; j < width - 1; j += 2) {
+ r = APPLY_MATRIX (matrix, 0, src[o[0]], src[o[1]], src[o[3]]);
+ g = APPLY_MATRIX (matrix, 1, src[o[0]], src[o[1]], src[o[3]]);
+ b = APPLY_MATRIX (matrix, 2, src[o[0]], src[o[1]], src[o[3]]);
+
+ dest[p[0]] = s_alpha;
+ dest[p[1]] = CLAMP (r, 0, 255);
+ dest[p[2]] = CLAMP (g, 0, 255);
+ dest[p[3]] = CLAMP (b, 0, 255);
+
+ r = APPLY_MATRIX (matrix, 0, src[o[2]], src[o[1]], src[o[3]]);
+ g = APPLY_MATRIX (matrix, 1, src[o[2]], src[o[1]], src[o[3]]);
+ b = APPLY_MATRIX (matrix, 2, src[o[2]], src[o[1]], src[o[3]]);
+
+ dest[4 + p[0]] = s_alpha;
+ dest[4 + p[1]] = CLAMP (r, 0, 255);
+ dest[4 + p[2]] = CLAMP (g, 0, 255);
+ dest[4 + p[3]] = CLAMP (b, 0, 255);
+
+ dest += 8;
+ src += 4;
+ }
+
+ if (j == width - 1) {
+ r = APPLY_MATRIX (matrix, 0, src[o[0]], src[o[1]], src[o[3]]);
+ g = APPLY_MATRIX (matrix, 1, src[o[0]], src[o[1]], src[o[3]]);
+ b = APPLY_MATRIX (matrix, 2, src[o[0]], src[o[1]], src[o[3]]);
+
+ dest[p[0]] = s_alpha;
+ dest[p[1]] = CLAMP (r, 0, 255);
+ dest[p[2]] = CLAMP (g, 0, 255);
+ dest[p[3]] = CLAMP (b, 0, 255);
+
+ dest += 4;
+ }
+
+ src = src_tmp + src_stride;
+ }
+}
+
+static void
+gst_alpha_chroma_key_packed_422_argb (const GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame, GstAlpha * alpha)
+{
+ const guint8 *src;
+ guint8 *dest;
+ gint width, height;
+ gint i, j;
+ gint a, y, u, v;
+ gint r, g, b;
+ gint smin, smax;
+ gint pa = CLAMP ((gint) (alpha->alpha * 255), 0, 255);
+ gint8 cb = alpha->cb, cr = alpha->cr;
+ gint8 kg = alpha->kg;
+ guint8 accept_angle_tg = alpha->accept_angle_tg;
+ guint8 accept_angle_ctg = alpha->accept_angle_ctg;
+ guint8 one_over_kc = alpha->one_over_kc;
+ guint8 kfgy_scale = alpha->kfgy_scale;
+ guint noise_level2 = alpha->noise_level2;
+ gint p[4], o[4];
+ gint src_stride;
+ const guint8 *src_tmp;
+ gint matrix[12];
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ src_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+
+ o[0] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 0);
+ o[2] = o[0] + 2;
+ o[1] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 1);
+ o[3] = GST_VIDEO_FRAME_COMP_POFFSET (in_frame, 2);
+
+ p[0] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_POFFSET (out_frame, 2);
+
+ memcpy (matrix,
+ alpha->in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv :
+ cog_ycbcr_to_rgb_matrix_8bit_hdtv, 12 * sizeof (gint));
+
+ smin = 128 - alpha->black_sensitivity;
+ smax = 128 + alpha->white_sensitivity;
+
+ for (i = 0; i < height; i++) {
+ src_tmp = src;
+
+ for (j = 0; j < width - 1; j += 2) {
+ y = src[o[0]];
+ u = src[o[1]] - 128;
+ v = src[o[3]] - 128;
+
+ a = chroma_keying_yuv (pa, &y, &u, &v, cr, cb,
+ smin, smax, accept_angle_tg, accept_angle_ctg,
+ one_over_kc, kfgy_scale, kg, noise_level2);
+ u += 128;
+ v += 128;
+
+ r = APPLY_MATRIX (matrix, 0, y, u, v);
+ g = APPLY_MATRIX (matrix, 1, y, u, v);
+ b = APPLY_MATRIX (matrix, 2, y, u, v);
+
+ dest[p[0]] = a;
+ dest[p[1]] = CLAMP (r, 0, 255);
+ dest[p[2]] = CLAMP (g, 0, 255);
+ dest[p[3]] = CLAMP (b, 0, 255);
+
+ y = src[o[2]];
+ u = src[o[1]] - 128;
+ v = src[o[3]] - 128;
+
+ a = chroma_keying_yuv (pa, &y, &u, &v, cr, cb,
+ smin, smax, accept_angle_tg, accept_angle_ctg,
+ one_over_kc, kfgy_scale, kg, noise_level2);
+ u += 128;
+ v += 128;
+
+ r = APPLY_MATRIX (matrix, 0, y, u, v);
+ g = APPLY_MATRIX (matrix, 1, y, u, v);
+ b = APPLY_MATRIX (matrix, 2, y, u, v);
+
+ dest[4 + p[0]] = a;
+ dest[4 + p[1]] = CLAMP (r, 0, 255);
+ dest[4 + p[2]] = CLAMP (g, 0, 255);
+ dest[4 + p[3]] = CLAMP (b, 0, 255);
+
+ dest += 8;
+ src += 4;
+ }
+
+ if (j == width - 1) {
+ y = src[o[0]];
+ u = src[o[1]] - 128;
+ v = src[o[3]] - 128;
+
+ a = chroma_keying_yuv (pa, &y, &u, &v, cr, cb,
+ smin, smax, accept_angle_tg, accept_angle_ctg,
+ one_over_kc, kfgy_scale, kg, noise_level2);
+ u += 128;
+ v += 128;
+
+ r = APPLY_MATRIX (matrix, 0, y, u, v);
+ g = APPLY_MATRIX (matrix, 1, y, u, v);
+ b = APPLY_MATRIX (matrix, 2, y, u, v);
+
+ dest[p[0]] = a;
+ dest[p[1]] = CLAMP (r, 0, 255);
+ dest[p[2]] = CLAMP (g, 0, 255);
+ dest[p[3]] = CLAMP (b, 0, 255);
+
+ dest += 4;
+ }
+
+ src = src_tmp + src_stride;
+ }
+}
+
+/* Protected with the alpha lock */
+static void
+gst_alpha_init_params_full (GstAlpha * alpha,
+ const GstVideoFormatInfo * in_info, const GstVideoFormatInfo * out_info)
+{
+ gfloat kgl;
+ gfloat tmp;
+ gfloat tmp1, tmp2;
+ gfloat y;
+ guint target_r = alpha->target_r;
+ guint target_g = alpha->target_g;
+ guint target_b = alpha->target_b;
+ const gint *matrix;
+
+ switch (alpha->method) {
+ case ALPHA_METHOD_GREEN:
+ target_r = 0;
+ target_g = 255;
+ target_b = 0;
+ break;
+ case ALPHA_METHOD_BLUE:
+ target_r = 0;
+ target_g = 0;
+ target_b = 255;
+ break;
+ default:
+ break;
+ }
+
+ /* RGB->RGB: convert to SDTV YUV, chroma keying, convert back
+ * YUV->RGB: chroma keying, convert to RGB
+ * RGB->YUV: convert to YUV, chroma keying
+ * YUV->YUV: convert matrix, chroma keying
+ */
+ if (GST_VIDEO_FORMAT_INFO_IS_RGB (in_info)
+ && GST_VIDEO_FORMAT_INFO_IS_RGB (out_info))
+ matrix = cog_rgb_to_ycbcr_matrix_8bit_sdtv;
+ else if (GST_VIDEO_FORMAT_INFO_IS_YUV (in_info)
+ && GST_VIDEO_FORMAT_INFO_IS_RGB (out_info))
+ matrix =
+ (alpha->in_sdtv) ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
+ cog_rgb_to_ycbcr_matrix_8bit_hdtv;
+ else if (GST_VIDEO_FORMAT_INFO_IS_RGB (in_info)
+ && GST_VIDEO_FORMAT_INFO_IS_YUV (out_info))
+ matrix =
+ (alpha->out_sdtv) ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
+ cog_rgb_to_ycbcr_matrix_8bit_hdtv;
+ else /* yuv -> yuv */
+ matrix =
+ (alpha->out_sdtv) ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
+ cog_rgb_to_ycbcr_matrix_8bit_hdtv;
+
+ y = (matrix[0] * ((gint) target_r) +
+ matrix[1] * ((gint) target_g) +
+ matrix[2] * ((gint) target_b) + matrix[3]) >> 8;
+ /* Cb,Cr without offset here because the chroma keying
+ * works with them being in range [-128,127]
+ */
+ tmp1 =
+ (matrix[4] * ((gint) target_r) +
+ matrix[5] * ((gint) target_g) + matrix[6] * ((gint) target_b)) >> 8;
+ tmp2 =
+ (matrix[8] * ((gint) target_r) +
+ matrix[9] * ((gint) target_g) + matrix[10] * ((gint) target_b)) >> 8;
+
+ kgl = sqrt (tmp1 * tmp1 + tmp2 * tmp2);
+ alpha->cb = 127 * (tmp1 / kgl);
+ alpha->cr = 127 * (tmp2 / kgl);
+
+ tmp = 15 * tan (M_PI * alpha->angle / 180);
+ tmp = MIN (tmp, 255);
+ alpha->accept_angle_tg = tmp;
+ tmp = 15 / tan (M_PI * alpha->angle / 180);
+ tmp = MIN (tmp, 255);
+ alpha->accept_angle_ctg = tmp;
+ tmp = 1 / (kgl);
+ alpha->one_over_kc = (gint) (255 * 2 * tmp - 255);
+ tmp = 15 * y / kgl;
+ tmp = MIN (tmp, 255);
+ alpha->kfgy_scale = tmp;
+ alpha->kg = MIN (kgl, 127);
+
+ alpha->noise_level2 = alpha->noise_level * alpha->noise_level;
+}
+
+static void
+gst_alpha_init_params (GstAlpha * alpha)
+{
+ const GstVideoFormatInfo *finfo_in, *finfo_out;
+
+ finfo_in = GST_VIDEO_FILTER (alpha)->in_info.finfo;
+ finfo_out = GST_VIDEO_FILTER (alpha)->out_info.finfo;
+
+ if (finfo_in != NULL && finfo_out != NULL) {
+ gst_alpha_init_params_full (alpha, finfo_in, finfo_out);
+ } else {
+ GST_DEBUG_OBJECT (alpha, "video formats not set yet");
+ }
+}
+
+/* Protected with the alpha lock */
+static gboolean
+gst_alpha_set_process_function_full (GstAlpha * alpha, GstVideoInfo * in_info,
+ GstVideoInfo * out_info)
+{
+ alpha->process = NULL;
+
+ switch (alpha->method) {
+ case ALPHA_METHOD_SET:
+ switch (GST_VIDEO_INFO_FORMAT (out_info)) {
+ case GST_VIDEO_FORMAT_AYUV:
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
+ case GST_VIDEO_FORMAT_AYUV:
+ alpha->process = gst_alpha_set_ayuv_ayuv;
+ break;
+ case GST_VIDEO_FORMAT_Y444:
+ case GST_VIDEO_FORMAT_Y42B:
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
+ case GST_VIDEO_FORMAT_Y41B:
+ alpha->process = gst_alpha_set_planar_yuv_ayuv;
+ break;
+ case GST_VIDEO_FORMAT_YUY2:
+ case GST_VIDEO_FORMAT_YVYU:
+ case GST_VIDEO_FORMAT_UYVY:
+ alpha->process = gst_alpha_set_packed_422_ayuv;
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_BGRA:
+ alpha->process = gst_alpha_set_argb_ayuv;
+ break;
+ case GST_VIDEO_FORMAT_xRGB:
+ case GST_VIDEO_FORMAT_xBGR:
+ case GST_VIDEO_FORMAT_RGBx:
+ case GST_VIDEO_FORMAT_BGRx:
+ case GST_VIDEO_FORMAT_RGB:
+ case GST_VIDEO_FORMAT_BGR:
+ alpha->process = gst_alpha_set_rgb_ayuv;
+ break;
+ default:
+ break;
+ }
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_BGRA:
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
+ case GST_VIDEO_FORMAT_AYUV:
+ alpha->process = gst_alpha_set_ayuv_argb;
+ break;
+ case GST_VIDEO_FORMAT_Y444:
+ case GST_VIDEO_FORMAT_Y42B:
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
+ case GST_VIDEO_FORMAT_Y41B:
+ alpha->process = gst_alpha_set_planar_yuv_argb;
+ break;
+ case GST_VIDEO_FORMAT_YUY2:
+ case GST_VIDEO_FORMAT_YVYU:
+ case GST_VIDEO_FORMAT_UYVY:
+ alpha->process = gst_alpha_set_packed_422_argb;
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_BGRA:
+ alpha->process = gst_alpha_set_argb_argb;
+ break;
+ case GST_VIDEO_FORMAT_xRGB:
+ case GST_VIDEO_FORMAT_xBGR:
+ case GST_VIDEO_FORMAT_RGBx:
+ case GST_VIDEO_FORMAT_BGRx:
+ case GST_VIDEO_FORMAT_RGB:
+ case GST_VIDEO_FORMAT_BGR:
+ alpha->process = gst_alpha_set_rgb_argb;
+ break;
+ default:
+ break;
+ }
+ break;
+ default:
+ break;
+ }
+ break;
+ case ALPHA_METHOD_GREEN:
+ case ALPHA_METHOD_BLUE:
+ case ALPHA_METHOD_CUSTOM:
+ switch (GST_VIDEO_INFO_FORMAT (out_info)) {
+ case GST_VIDEO_FORMAT_AYUV:
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
+ case GST_VIDEO_FORMAT_AYUV:
+ alpha->process = gst_alpha_chroma_key_ayuv_ayuv;
+ break;
+ case GST_VIDEO_FORMAT_Y444:
+ case GST_VIDEO_FORMAT_Y42B:
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
+ case GST_VIDEO_FORMAT_Y41B:
+ alpha->process = gst_alpha_chroma_key_planar_yuv_ayuv;
+ break;
+ case GST_VIDEO_FORMAT_YUY2:
+ case GST_VIDEO_FORMAT_YVYU:
+ case GST_VIDEO_FORMAT_UYVY:
+ alpha->process = gst_alpha_chroma_key_packed_422_ayuv;
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_BGRA:
+ alpha->process = gst_alpha_chroma_key_argb_ayuv;
+ break;
+ case GST_VIDEO_FORMAT_xRGB:
+ case GST_VIDEO_FORMAT_xBGR:
+ case GST_VIDEO_FORMAT_RGBx:
+ case GST_VIDEO_FORMAT_BGRx:
+ case GST_VIDEO_FORMAT_RGB:
+ case GST_VIDEO_FORMAT_BGR:
+ alpha->process = gst_alpha_chroma_key_rgb_ayuv;
+ break;
+ default:
+ break;
+ }
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_BGRA:
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
+ case GST_VIDEO_FORMAT_AYUV:
+ alpha->process = gst_alpha_chroma_key_ayuv_argb;
+ break;
+ case GST_VIDEO_FORMAT_Y444:
+ case GST_VIDEO_FORMAT_Y42B:
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
+ case GST_VIDEO_FORMAT_Y41B:
+ alpha->process = gst_alpha_chroma_key_planar_yuv_argb;
+ break;
+ case GST_VIDEO_FORMAT_YUY2:
+ case GST_VIDEO_FORMAT_YVYU:
+ case GST_VIDEO_FORMAT_UYVY:
+ alpha->process = gst_alpha_chroma_key_packed_422_argb;
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_BGRA:
+ alpha->process = gst_alpha_chroma_key_argb_argb;
+ break;
+ case GST_VIDEO_FORMAT_xRGB:
+ case GST_VIDEO_FORMAT_xBGR:
+ case GST_VIDEO_FORMAT_RGBx:
+ case GST_VIDEO_FORMAT_BGRx:
+ case GST_VIDEO_FORMAT_RGB:
+ case GST_VIDEO_FORMAT_BGR:
+ alpha->process = gst_alpha_chroma_key_rgb_argb;
+ break;
+ default:
+ break;
+ }
+ break;
+ default:
+ break;
+ }
+ break;
+ default:
+ break;
+ }
+ return alpha->process != NULL;
+}
+
+static void
+gst_alpha_set_process_function (GstAlpha * alpha)
+{
+ GstVideoInfo *info_in, *info_out;
+
+ info_in = &GST_VIDEO_FILTER (alpha)->in_info;
+ info_out = &GST_VIDEO_FILTER (alpha)->out_info;
+
+ if (info_in->finfo != NULL && info_out->finfo != NULL) {
+ gst_alpha_set_process_function_full (alpha, info_in, info_out);
+ } else {
+ GST_DEBUG_OBJECT (alpha, "video formats not set yet");
+ }
+}
+
+static void
+gst_alpha_before_transform (GstBaseTransform * btrans, GstBuffer * buf)
+{
+ GstAlpha *alpha = GST_ALPHA (btrans);
+ GstClockTime timestamp;
+
+ timestamp = gst_segment_to_stream_time (&btrans->segment, GST_FORMAT_TIME,
+ GST_BUFFER_TIMESTAMP (buf));
+ GST_LOG ("Got stream time of %" GST_TIME_FORMAT, GST_TIME_ARGS (timestamp));
+ if (GST_CLOCK_TIME_IS_VALID (timestamp))
+ gst_object_sync_values (GST_OBJECT (alpha), timestamp);
+}
+
+static GstFlowReturn
+gst_alpha_transform_frame (GstVideoFilter * filter, GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame)
+{
+ GstAlpha *alpha = GST_ALPHA (filter);
+
+ GST_ALPHA_LOCK (alpha);
+
+ if (G_UNLIKELY (!alpha->process))
+ goto not_negotiated;
+
+ alpha->process (in_frame, out_frame, alpha);
+
+ GST_ALPHA_UNLOCK (alpha);
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+not_negotiated:
+ {
+ GST_ERROR_OBJECT (alpha, "Not negotiated yet");
+ GST_ALPHA_UNLOCK (alpha);
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ return GST_ELEMENT_REGISTER (alpha, plugin);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ alpha,
+ "adds an alpha channel to video - constant or via chroma-keying",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/alpha/gstalpha.h b/gst/alpha/gstalpha.h
new file mode 100644
index 0000000000..b0a61bc7c7
--- /dev/null
+++ b/gst/alpha/gstalpha.h
@@ -0,0 +1,99 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@collabora.co.uk>
+ * Copyright (C) <2007> Edward Hervey <edward.hervey@collabora.co.uk>
+ * Copyright (C) <2007> Jan Schmidt <thaytan@noraisin.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_ALPHA_H__
+#define __GST_ALPHA_H__
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include <gst/video/gstvideofilter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_ALPHA (gst_alpha_get_type ())
+
+G_DECLARE_FINAL_TYPE (GstAlpha, gst_alpha, GST, ALPHA, GstVideoFilter)
+
+/**
+ * GstAlphaMethod:
+ * @ALPHA_METHOD_SET: Set/adjust alpha channel
+ * @ALPHA_METHOD_GREEN: Chroma Key green
+ * @ALPHA_METHOD_BLUE: Chroma Key blue
+ * @ALPHA_METHOD_CUSTOM: Chroma Key on target_r/g/b
+ */
+typedef enum
+{
+ ALPHA_METHOD_SET,
+ ALPHA_METHOD_GREEN,
+ ALPHA_METHOD_BLUE,
+ ALPHA_METHOD_CUSTOM,
+}
+GstAlphaMethod;
+
+GST_DEBUG_CATEGORY_STATIC (gst_alpha_debug);
+#define GST_CAT_DEFAULT gst_alpha_debug
+
+struct _GstAlpha
+{
+ GstVideoFilter parent;
+
+ /* <private> */
+
+ /* caps */
+ GMutex lock;
+
+ gboolean in_sdtv, out_sdtv;
+
+ /* properties */
+ gdouble alpha;
+
+ guint target_r;
+ guint target_g;
+ guint target_b;
+
+ GstAlphaMethod method;
+
+ gfloat angle;
+ gfloat noise_level;
+ guint black_sensitivity;
+ guint white_sensitivity;
+
+ gboolean prefer_passthrough;
+
+ /* processing function */
+ void (*process) (const GstVideoFrame *in_frame, GstVideoFrame *out_frame, GstAlpha *alpha);
+
+ /* precalculated values for chroma keying */
+ gint8 cb, cr;
+ gint8 kg;
+ guint8 accept_angle_tg;
+ guint8 accept_angle_ctg;
+ guint8 one_over_kc;
+ guint8 kfgy_scale;
+ guint noise_level2;
+};
+
+GST_ELEMENT_REGISTER_DECLARE (alpha);
+
+G_END_DECLS
+
+#endif /* __GST_ALPHA_H__ */
diff --git a/gst/alpha/gstalphacolor.c b/gst/alpha/gstalphacolor.c
new file mode 100644
index 0000000000..295ee9bc65
--- /dev/null
+++ b/gst/alpha/gstalphacolor.c
@@ -0,0 +1,708 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-alphacolor
+ * @title: alphacolor
+ *
+ * The alphacolor element does memory-efficient (in-place) colourspace
+ * conversion from RGBA to AYUV or AYUV to RGBA while preserving the
+ * alpha channel.
+ *
+ * Sample pipeline:
+ * |[
+ * gst-launch-1.0 videotestsrc ! "video/x-raw,format=(fourcc)AYUV" ! \
+ * alphacolor ! videoconvert ! autovideosink
+ * ]|
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstalphacolor.h"
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+#include <string.h>
+
+GST_DEBUG_CATEGORY_STATIC (alpha_color_debug);
+#define GST_CAT_DEFAULT alpha_color_debug
+
+/* elementfactory information */
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGBA, BGRA, ARGB, ABGR, AYUV }"))
+ );
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGBA, BGRA, ARGB, ABGR, AYUV }"))
+ );
+
+G_DEFINE_TYPE (GstAlphaColor, gst_alpha_color, GST_TYPE_VIDEO_FILTER);
+
+static GstCaps *gst_alpha_color_transform_caps (GstBaseTransform * btrans,
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter);
+
+static gboolean gst_alpha_color_set_info (GstVideoFilter * filter,
+ GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
+ GstVideoInfo * out_info);
+static GstFlowReturn gst_alpha_color_transform_frame_ip (GstVideoFilter *
+ filter, GstVideoFrame * frame);
+
+static void
+gst_alpha_color_class_init (GstAlphaColorClass * klass)
+{
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *gstbasetransform_class =
+ (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *gstvideofilter_class = (GstVideoFilterClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (alpha_color_debug, "alphacolor", 0,
+ "ARGB<->AYUV colorspace conversion preserving the alpha channels");
+
+ gst_element_class_set_static_metadata (gstelement_class, "Alpha color filter",
+ "Filter/Converter/Video",
+ "ARGB from/to AYUV colorspace conversion preserving the alpha channel",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class, &src_template);
+
+ gstbasetransform_class->transform_caps =
+ GST_DEBUG_FUNCPTR (gst_alpha_color_transform_caps);
+ gstbasetransform_class->transform_ip_on_passthrough = FALSE;
+
+ gstvideofilter_class->set_info = GST_DEBUG_FUNCPTR (gst_alpha_color_set_info);
+ gstvideofilter_class->transform_frame_ip =
+ GST_DEBUG_FUNCPTR (gst_alpha_color_transform_frame_ip);
+}
+
+static void
+gst_alpha_color_init (GstAlphaColor * alpha)
+{
+ GstBaseTransform *btrans = GST_BASE_TRANSFORM (alpha);
+
+ gst_base_transform_set_in_place (btrans, TRUE);
+}
+
+static GstCaps *
+gst_alpha_color_transform_caps (GstBaseTransform * btrans,
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter)
+{
+ GstCaps *tmpl_caps = NULL;
+ GstCaps *result = NULL, *local_caps = NULL;
+ guint i;
+
+ local_caps = gst_caps_new_empty ();
+
+ for (i = 0; i < gst_caps_get_size (caps); i++) {
+ GstStructure *structure =
+ gst_structure_copy (gst_caps_get_structure (caps, i));
+
+ /* Remove any specific parameter from the structure */
+ gst_structure_remove_field (structure, "format");
+ gst_structure_remove_field (structure, "colorimetry");
+ gst_structure_remove_field (structure, "chroma-site");
+
+ gst_structure_set_name (structure, "video/x-raw");
+ gst_caps_append_structure (local_caps, structure);
+ }
+
+ /* Get the appropriate template */
+ if (direction == GST_PAD_SINK) {
+ tmpl_caps = gst_static_pad_template_get_caps (&src_template);
+ } else if (direction == GST_PAD_SRC) {
+ tmpl_caps = gst_static_pad_template_get_caps (&sink_template);
+ }
+
+ /* Intersect with our template caps */
+ result = gst_caps_intersect (local_caps, tmpl_caps);
+ gst_caps_unref (tmpl_caps);
+ gst_caps_unref (local_caps);
+
+ result = gst_caps_simplify (result);
+
+ GST_LOG_OBJECT (btrans, "transformed %" GST_PTR_FORMAT " to %" GST_PTR_FORMAT,
+ caps, result);
+
+ if (filter) {
+ GstCaps *intersection;
+
+ GST_DEBUG_OBJECT (btrans, "Using filter caps %" GST_PTR_FORMAT, filter);
+ intersection =
+ gst_caps_intersect_full (filter, result, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (result);
+ result = intersection;
+ GST_DEBUG_OBJECT (btrans, "Intersection %" GST_PTR_FORMAT, result);
+ }
+
+
+ return result;
+}
+
+/* Generated by -bad/ext/cog/generate_tables */
+static const int cog_ycbcr_to_rgb_matrix_8bit_hdtv[] = {
+ 298, 0, 459, -63514,
+ 298, -55, -136, 19681,
+ 298, 541, 0, -73988,
+};
+
+static const int cog_ycbcr_to_rgb_matrix_8bit_sdtv[] = {
+ 298, 0, 409, -57068,
+ 298, -100, -208, 34707,
+ 298, 516, 0, -70870,
+};
+
+static const gint cog_rgb_to_ycbcr_matrix_8bit_hdtv[] = {
+ 47, 157, 16, 4096,
+ -26, -87, 112, 32768,
+ 112, -102, -10, 32768,
+};
+
+static const gint cog_rgb_to_ycbcr_matrix_8bit_sdtv[] = {
+ 66, 129, 25, 4096,
+ -38, -74, 112, 32768,
+ 112, -94, -18, 32768,
+};
+
+static const gint cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit[] = {
+ 256, -30, -53, 10600,
+ 0, 261, 29, -4367,
+ 0, 19, 262, -3289,
+};
+
+static const gint cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit[] = {
+ 256, 25, 49, -9536,
+ 0, 253, -28, 3958,
+ 0, -19, 252, 2918,
+};
+
+#define DEFINE_ARGB_AYUV_FUNCTIONS(name, A, R, G, B) \
+static void \
+transform_##name##_ayuv (GstVideoFrame * frame, const gint *matrix) \
+{ \
+ guint8 *data; \
+ gsize size; \
+ gint y, u, v; \
+ gint yc[4]; \
+ gint uc[4]; \
+ gint vc[4]; \
+ \
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);\
+ size = GST_VIDEO_FRAME_SIZE (frame);\
+ \
+ memcpy (yc, matrix, 4 * sizeof (gint)); \
+ memcpy (uc, matrix + 4, 4 * sizeof (gint)); \
+ memcpy (vc, matrix + 8, 4 * sizeof (gint)); \
+ \
+ while (size > 0) { \
+ y = (data[R] * yc[0] + data[G] * yc[1] + data[B] * yc[2] + yc[3]) >> 8; \
+ u = (data[R] * uc[0] + data[G] * uc[1] + data[B] * uc[2] + uc[3]) >> 8; \
+ v = (data[R] * vc[0] + data[G] * vc[1] + data[B] * vc[2] + vc[3]) >> 8; \
+ \
+ data[0] = data[A]; \
+ data[1] = y; \
+ data[2] = u; \
+ data[3] = v; \
+ \
+ data += 4; \
+ size -= 4; \
+ } \
+} \
+\
+static void \
+transform_ayuv_##name (GstVideoFrame * frame, const gint *matrix) \
+{ \
+ guint8 *data; \
+ gsize size; \
+ gint r, g, b; \
+ gint rc[4]; \
+ gint gc[4]; \
+ gint bc[4]; \
+ \
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);\
+ size = GST_VIDEO_FRAME_SIZE (frame);\
+ \
+ memcpy (rc, matrix, 4 * sizeof (gint)); \
+ memcpy (gc, matrix + 4, 4 * sizeof (gint)); \
+ memcpy (bc, matrix + 8, 4 * sizeof (gint)); \
+ \
+ while (size > 0) { \
+ r = (data[1] * rc[0] + data[2] * rc[1] + data[3] * rc[2] + rc[3]) >> 8; \
+ g = (data[1] * gc[0] + data[2] * gc[1] + data[3] * gc[2] + gc[3]) >> 8; \
+ b = (data[1] * bc[0] + data[2] * bc[1] + data[3] * bc[2] + bc[3]) >> 8; \
+ \
+ data[A] = data[0]; \
+ data[R] = CLAMP (r, 0, 255); \
+ data[G] = CLAMP (g, 0, 255); \
+ data[B] = CLAMP (b, 0, 255); \
+ \
+ data += 4; \
+ size -= 4; \
+ } \
+}
+
+DEFINE_ARGB_AYUV_FUNCTIONS (rgba, 3, 0, 1, 2);
+DEFINE_ARGB_AYUV_FUNCTIONS (bgra, 3, 2, 1, 0);
+DEFINE_ARGB_AYUV_FUNCTIONS (argb, 0, 1, 2, 3);
+DEFINE_ARGB_AYUV_FUNCTIONS (abgr, 0, 3, 2, 1);
+
+static void
+transform_ayuv_ayuv (GstVideoFrame * frame, const gint * matrix)
+{
+ guint8 *data;
+ gsize size;
+ gint y, u, v;
+ gint yc[4];
+ gint uc[4];
+ gint vc[4];
+
+ if (matrix == NULL)
+ return;
+
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
+ memcpy (yc, matrix, 4 * sizeof (gint));
+ memcpy (uc, matrix + 4, 4 * sizeof (gint));
+ memcpy (vc, matrix + 8, 4 * sizeof (gint));
+
+ while (size > 0) {
+ y = (data[1] * yc[0] + data[2] * yc[1] + data[3] * yc[2] + yc[3]) >> 8;
+ u = (data[1] * uc[0] + data[2] * uc[1] + data[3] * uc[2] + uc[3]) >> 8;
+ v = (data[1] * vc[0] + data[2] * vc[1] + data[3] * vc[2] + vc[3]) >> 8;
+
+ data[1] = y;
+ data[2] = u;
+ data[3] = v;
+
+ data += 4;
+ size -= 4;
+ }
+}
+
+static void
+transform_argb_bgra (GstVideoFrame * frame, const gint * matrix)
+{
+ guint8 *data;
+ gsize size;
+ gint r, g, b;
+
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
+ while (size > 0) {
+ r = data[1];
+ g = data[2];
+ b = data[3];
+
+ data[3] = data[0];
+ data[0] = b;
+ data[1] = g;
+ data[2] = r;
+
+ data += 4;
+ size -= 4;
+ }
+}
+
+#define transform_abgr_rgba transform_argb_bgra
+
+static void
+transform_argb_abgr (GstVideoFrame * frame, const gint * matrix)
+{
+ guint8 *data;
+ gsize size;
+ gint r, g, b;
+
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
+ while (size > 0) {
+ r = data[1];
+ g = data[2];
+ b = data[3];
+
+ /* data[0] = data[0]; */
+ data[1] = b;
+ data[2] = g;
+ data[3] = r;
+
+ data += 4;
+ size -= 4;
+ }
+}
+
+#define transform_abgr_argb transform_argb_abgr
+
+static void
+transform_rgba_bgra (GstVideoFrame * frame, const gint * matrix)
+{
+ guint8 *data;
+ gsize size;
+ gint r, g, b;
+
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
+ while (size > 0) {
+ r = data[0];
+ g = data[1];
+ b = data[2];
+
+ /* data[3] = data[3] */ ;
+ data[0] = b;
+ data[1] = g;
+ data[2] = r;
+
+ data += 4;
+ size -= 4;
+ }
+}
+
+#define transform_bgra_rgba transform_rgba_bgra
+
+static void
+transform_argb_rgba (GstVideoFrame * frame, const gint * matrix)
+{
+ guint8 *data;
+ gsize size;
+ gint r, g, b;
+
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
+ while (size > 0) {
+ r = data[1];
+ g = data[2];
+ b = data[3];
+
+ data[3] = data[0];
+ data[0] = r;
+ data[1] = g;
+ data[2] = b;
+
+ data += 4;
+ size -= 4;
+ }
+}
+
+#define transform_abgr_bgra transform_argb_rgba
+
+static void
+transform_bgra_argb (GstVideoFrame * frame, const gint * matrix)
+{
+ guint8 *data;
+ gsize size;
+ gint r, g, b;
+
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
+ while (size > 0) {
+ r = data[2];
+ g = data[1];
+ b = data[0];
+
+ data[0] = data[3];
+ data[1] = r;
+ data[2] = g;
+ data[3] = b;
+
+ data += 4;
+ size -= 4;
+ }
+}
+
+#define transform_rgba_abgr transform_bgra_argb
+
+static void
+transform_rgba_argb (GstVideoFrame * frame, const gint * matrix)
+{
+ guint8 *data;
+ gsize size;
+ gint r, g, b;
+
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ size = GST_VIDEO_FRAME_SIZE (frame);
+
+ while (size > 0) {
+ r = data[0];
+ g = data[1];
+ b = data[2];
+
+ data[0] = data[3];
+ data[1] = r;
+ data[2] = g;
+ data[3] = b;
+
+ data += 4;
+ size -= 4;
+ }
+}
+
+#define transform_bgra_abgr transform_rgba_argb
+
+static gboolean
+gst_alpha_color_set_info (GstVideoFilter * filter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
+{
+ GstAlphaColor *alpha = GST_ALPHA_COLOR (filter);
+ gboolean in_sdtv, out_sdtv;
+
+ alpha->process = NULL;
+ alpha->matrix = NULL;
+
+ if (GST_VIDEO_INFO_WIDTH (in_info) != GST_VIDEO_INFO_WIDTH (out_info) ||
+ GST_VIDEO_INFO_HEIGHT (in_info) != GST_VIDEO_INFO_HEIGHT (out_info))
+ goto invalid_caps;
+
+ in_sdtv = in_info->colorimetry.matrix == GST_VIDEO_COLOR_MATRIX_BT601;
+ out_sdtv = out_info->colorimetry.matrix == GST_VIDEO_COLOR_MATRIX_BT601;
+
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
+ case GST_VIDEO_FORMAT_ARGB:
+ switch (GST_VIDEO_INFO_FORMAT (out_info)) {
+ case GST_VIDEO_FORMAT_ARGB:
+ alpha->process = NULL;
+ alpha->matrix = NULL;
+ break;
+ case GST_VIDEO_FORMAT_BGRA:
+ alpha->process = transform_argb_bgra;
+ alpha->matrix = NULL;
+ break;
+ case GST_VIDEO_FORMAT_ABGR:
+ alpha->process = transform_argb_abgr;
+ alpha->matrix = NULL;
+ break;
+ case GST_VIDEO_FORMAT_RGBA:
+ alpha->process = transform_argb_rgba;
+ alpha->matrix = NULL;
+ break;
+ case GST_VIDEO_FORMAT_AYUV:
+ alpha->process = transform_argb_ayuv;
+ alpha->matrix =
+ out_sdtv ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
+ cog_rgb_to_ycbcr_matrix_8bit_hdtv;
+ break;
+ default:
+ alpha->process = NULL;
+ alpha->matrix = NULL;
+ break;
+ }
+ break;
+ case GST_VIDEO_FORMAT_BGRA:
+ switch (GST_VIDEO_INFO_FORMAT (out_info)) {
+ case GST_VIDEO_FORMAT_BGRA:
+ alpha->process = NULL;
+ alpha->matrix = NULL;
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ alpha->process = transform_bgra_argb;
+ alpha->matrix = NULL;
+ break;
+ case GST_VIDEO_FORMAT_ABGR:
+ alpha->process = transform_bgra_abgr;
+ alpha->matrix = NULL;
+ break;
+ case GST_VIDEO_FORMAT_RGBA:
+ alpha->process = transform_bgra_rgba;
+ alpha->matrix = NULL;
+ break;
+ case GST_VIDEO_FORMAT_AYUV:
+ alpha->process = transform_bgra_ayuv;
+ alpha->matrix =
+ out_sdtv ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
+ cog_rgb_to_ycbcr_matrix_8bit_hdtv;
+ break;
+ default:
+ alpha->process = NULL;
+ alpha->matrix = NULL;
+ break;
+ }
+ break;
+ case GST_VIDEO_FORMAT_ABGR:
+ switch (GST_VIDEO_INFO_FORMAT (out_info)) {
+ case GST_VIDEO_FORMAT_ABGR:
+ alpha->process = NULL;
+ alpha->matrix = NULL;
+ break;
+ case GST_VIDEO_FORMAT_RGBA:
+ alpha->process = transform_abgr_rgba;
+ alpha->matrix = NULL;
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ alpha->process = transform_abgr_argb;
+ alpha->matrix = NULL;
+ break;
+ case GST_VIDEO_FORMAT_BGRA:
+ alpha->process = transform_abgr_bgra;
+ alpha->matrix = NULL;
+ break;
+ case GST_VIDEO_FORMAT_AYUV:
+ alpha->process = transform_abgr_ayuv;
+ alpha->matrix =
+ out_sdtv ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
+ cog_rgb_to_ycbcr_matrix_8bit_hdtv;
+ break;
+ default:
+ alpha->process = NULL;
+ alpha->matrix = NULL;
+ break;
+ }
+ break;
+ case GST_VIDEO_FORMAT_RGBA:
+ switch (GST_VIDEO_INFO_FORMAT (out_info)) {
+ case GST_VIDEO_FORMAT_RGBA:
+ alpha->process = NULL;
+ alpha->matrix = NULL;
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ alpha->process = transform_rgba_argb;
+ alpha->matrix = NULL;
+ break;
+ case GST_VIDEO_FORMAT_ABGR:
+ alpha->process = transform_rgba_abgr;
+ alpha->matrix = NULL;
+ break;
+ case GST_VIDEO_FORMAT_BGRA:
+ alpha->process = transform_rgba_bgra;
+ alpha->matrix = NULL;
+ break;
+ case GST_VIDEO_FORMAT_AYUV:
+ alpha->process = transform_rgba_ayuv;
+ alpha->matrix =
+ out_sdtv ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
+ cog_rgb_to_ycbcr_matrix_8bit_hdtv;
+ break;
+ default:
+ alpha->process = NULL;
+ alpha->matrix = NULL;
+ break;
+ }
+ break;
+ case GST_VIDEO_FORMAT_AYUV:
+ switch (GST_VIDEO_INFO_FORMAT (out_info)) {
+ case GST_VIDEO_FORMAT_AYUV:
+ if (in_sdtv == out_sdtv) {
+ alpha->process = transform_ayuv_ayuv;
+ alpha->matrix = NULL;
+ } else {
+ alpha->process = transform_ayuv_ayuv;
+ alpha->matrix =
+ out_sdtv ? cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit :
+ cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit;
+ }
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ alpha->process = transform_ayuv_argb;
+ alpha->matrix =
+ in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv :
+ cog_ycbcr_to_rgb_matrix_8bit_hdtv;
+ break;
+ case GST_VIDEO_FORMAT_BGRA:
+ alpha->process = transform_ayuv_bgra;
+ alpha->matrix =
+ in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv :
+ cog_ycbcr_to_rgb_matrix_8bit_hdtv;
+ break;
+ case GST_VIDEO_FORMAT_ABGR:
+ alpha->process = transform_ayuv_abgr;
+ alpha->matrix =
+ in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv :
+ cog_ycbcr_to_rgb_matrix_8bit_hdtv;
+ break;
+ case GST_VIDEO_FORMAT_RGBA:
+ alpha->process = transform_ayuv_rgba;
+ alpha->matrix =
+ in_sdtv ? cog_ycbcr_to_rgb_matrix_8bit_sdtv :
+ cog_ycbcr_to_rgb_matrix_8bit_hdtv;
+ break;
+ default:
+ alpha->process = NULL;
+ alpha->matrix = NULL;
+ break;
+ }
+ break;
+ default:
+ alpha->process = NULL;
+ alpha->matrix = NULL;
+ break;
+ }
+
+ if (GST_VIDEO_INFO_FORMAT (in_info) == GST_VIDEO_INFO_FORMAT (out_info)
+ && in_sdtv == out_sdtv)
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter), TRUE);
+ else if (!alpha->process)
+ goto no_process;
+
+ return TRUE;
+
+ /* ERRORS */
+invalid_caps:
+ {
+ GST_DEBUG_OBJECT (alpha, "incomplete or invalid caps");
+ return FALSE;
+ }
+no_process:
+ {
+ GST_DEBUG_OBJECT (alpha, "could not find process function");
+ return FALSE;
+ }
+}
+
+static GstFlowReturn
+gst_alpha_color_transform_frame_ip (GstVideoFilter * filter,
+ GstVideoFrame * frame)
+{
+ GstAlphaColor *alpha = GST_ALPHA_COLOR (filter);
+
+ if (G_UNLIKELY (!alpha->process))
+ goto not_negotiated;
+
+ /* Transform in place */
+ alpha->process (frame, alpha->matrix);
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+not_negotiated:
+ {
+ GST_ERROR_OBJECT (alpha, "Not negotiated yet");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ return gst_element_register (plugin, "alphacolor", GST_RANK_NONE,
+ GST_TYPE_ALPHA_COLOR);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ alphacolor,
+ "RGBA from/to AYUV colorspace conversion preserving the alpha channel",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/alpha/gstalphacolor.h b/gst/alpha/gstalphacolor.h
new file mode 100644
index 0000000000..bc397346ec
--- /dev/null
+++ b/gst/alpha/gstalphacolor.h
@@ -0,0 +1,42 @@
+/* GStreamer alphacolor element
+ * Copyright (C) 2005 Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef _GST_ALPHA_COLOR_H_
+#define _GST_ALPHA_COLOR_H_
+
+#include <gst/video/video.h>
+#include <gst/video/gstvideofilter.h>
+
+#define GST_TYPE_ALPHA_COLOR (gst_alpha_color_get_type ())
+
+G_DECLARE_FINAL_TYPE (GstAlphaColor, gst_alpha_color,
+ GST, ALPHA_COLOR,
+ GstVideoFilter)
+
+struct _GstAlphaColor
+{
+ GstVideoFilter parent;
+
+ /*< private >*/
+ void (*process) (GstVideoFrame * frame, const gint * matrix);
+
+ const gint *matrix;
+};
+
+#endif /* _GST_ALPHA_COLOR_H_ */
diff --git a/gst/alpha/meson.build b/gst/alpha/meson.build
new file mode 100644
index 0000000000..3caf431e18
--- /dev/null
+++ b/gst/alpha/meson.build
@@ -0,0 +1,19 @@
+gstalpha = library('gstalpha', 'gstalpha.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gstvideo_dep, gst_dep, libm],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstalpha, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstalpha]
+
+gstalphacolor = library('gstalphacolor', 'gstalphacolor.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gstvideo_dep, gst_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstalphacolor, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstalphacolor]
diff --git a/gst/apetag/gstapedemux.c b/gst/apetag/gstapedemux.c
new file mode 100644
index 0000000000..425bdb3f31
--- /dev/null
+++ b/gst/apetag/gstapedemux.c
@@ -0,0 +1,446 @@
+/* GStreamer APEv1/2 tag reader
+ * Copyright (C) 2004 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * Copyright (C) 2006 Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-apedemux
+ * @title: apedemux
+ *
+ * apedemux accepts data streams with APE tags at the start or at the end
+ * (or both). The mime type of the data between the tag blocks is detected
+ * using typefind functions, and the appropriate output mime type set on
+ * outgoing buffers.
+ *
+ * The element is only able to read APE tags at the end of a stream from
+ * a seekable stream, ie. when get_range mode is supported by the upstream
+ * elements. If get_range operation is available, apedemux makes it available
+ * downstream. This means that elements which require get_range mode, such as
+ * wavparse or musepackdec, can operate on files containing APE tag
+ * information.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -t filesrc location=file.mpc ! apedemux ! fakesink
+ * ]| This pipeline should read any available APE tag information and output it.
+ * The contents of the file inside the APE tag regions should be detected, and
+ * the appropriate mime type set on buffers produced from apedemux.
+ *
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/gst-i18n-plugin.h>
+#include <gst/pbutils/pbutils.h>
+
+#include "gstapedemux.h"
+
+#include <stdio.h>
+#include <string.h>
+
+#define APE_VERSION_MAJOR(ver) ((ver)/1000)
+
+GST_DEBUG_CATEGORY_STATIC (apedemux_debug);
+#define GST_CAT_DEFAULT (apedemux_debug)
+
+static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-apetag")
+ );
+
+static gboolean gst_ape_demux_identify_tag (GstTagDemux * demux,
+ GstBuffer * buffer, gboolean start_tag, guint * tag_size);
+static GstTagDemuxResult gst_ape_demux_parse_tag (GstTagDemux * demux,
+ GstBuffer * buffer, gboolean start_tag, guint * tag_size,
+ GstTagList ** tags);
+
+G_DEFINE_TYPE (GstApeDemux, gst_ape_demux, GST_TYPE_TAG_DEMUX);
+GST_ELEMENT_REGISTER_DEFINE (apedemux, "apedemux", GST_RANK_PRIMARY,
+ GST_TYPE_APE_DEMUX);
+
+static void
+gst_ape_demux_class_init (GstApeDemuxClass * klass)
+{
+ GstElementClass *element_class;
+ GstTagDemuxClass *tagdemux_class;
+
+ GST_DEBUG_CATEGORY_INIT (apedemux_debug, "apedemux", 0,
+ "GStreamer APE tag demuxer");
+
+ tagdemux_class = GST_TAG_DEMUX_CLASS (klass);
+ element_class = GST_ELEMENT_CLASS (klass);
+
+ gst_element_class_set_static_metadata (element_class, "APE tag demuxer",
+ "Codec/Demuxer/Metadata",
+ "Read and output APE tags while demuxing the contents",
+ "Tim-Philipp Müller <tim centricular net>");
+
+ gst_element_class_add_static_pad_template (element_class, &sink_factory);
+
+ tagdemux_class->identify_tag = GST_DEBUG_FUNCPTR (gst_ape_demux_identify_tag);
+ tagdemux_class->parse_tag = GST_DEBUG_FUNCPTR (gst_ape_demux_parse_tag);
+
+ /* no need for a merge function, the default behaviour to prefer start
+ * tags (APEv2) over end tags (usually APEv1, but could theoretically also
+ * be APEv2) is fine */
+
+ tagdemux_class->min_start_size = 32;
+ tagdemux_class->min_end_size = 32;
+}
+
+static void
+gst_ape_demux_init (GstApeDemux * apedemux)
+{
+ /* nothing to do here */
+}
+
+static const struct _GstApeDemuxTagTableEntry
+{
+ const gchar *ape_tag;
+ const gchar *gst_tag;
+} tag_table[] = {
+ {
+ "replaygain_track_gain", GST_TAG_TRACK_GAIN}, {
+ "replaygain_track_peak", GST_TAG_TRACK_PEAK}, {
+ "replaygain_album_gain", GST_TAG_ALBUM_GAIN}, {
+ "replaygain_album_peak", GST_TAG_ALBUM_PEAK}, {
+ "title", GST_TAG_TITLE}, {
+ "artist", GST_TAG_ARTIST}, {
+ "album", GST_TAG_ALBUM}, {
+ "composer", GST_TAG_COMPOSER}, {
+ "comment", GST_TAG_COMMENT}, {
+ "comments", GST_TAG_COMMENT}, {
+ "copyright", GST_TAG_COPYRIGHT}, {
+ "genre", GST_TAG_GENRE}, {
+ "isrc", GST_TAG_ISRC}, {
+ "disc", GST_TAG_ALBUM_VOLUME_NUMBER}, {
+ "disk", GST_TAG_ALBUM_VOLUME_NUMBER}, {
+ "discnumber", GST_TAG_ALBUM_VOLUME_NUMBER}, {
+ "disknumber", GST_TAG_ALBUM_VOLUME_NUMBER}, {
+ "track", GST_TAG_TRACK_NUMBER}, {
+ "tracknumber", GST_TAG_TRACK_NUMBER}, {
+ "year", GST_TAG_DATE}, {
+ "file", GST_TAG_LOCATION}
+};
+
+static gboolean
+ape_demux_get_gst_tag_from_tag (const gchar * ape_tag,
+ const gchar ** gst_tag, GType * gst_tag_type)
+{
+ gint i;
+
+ for (i = 0; i < G_N_ELEMENTS (tag_table); ++i) {
+ if (g_ascii_strcasecmp (tag_table[i].ape_tag, ape_tag) == 0) {
+ *gst_tag = tag_table[i].gst_tag;
+ *gst_tag_type = gst_tag_get_type (tag_table[i].gst_tag);
+ GST_LOG ("Mapped APE tag '%s' to GStreamer tag '%s'", ape_tag, *gst_tag);
+ return TRUE;
+ }
+ }
+
+ GST_WARNING ("Could not map APE tag '%s' to a GStreamer tag", ape_tag);
+ return FALSE;
+}
+
+static GstTagList *
+ape_demux_parse_tags (const guint8 * data, gint size)
+{
+ GstTagList *taglist = gst_tag_list_new_empty ();
+
+ GST_LOG ("Reading tags from chunk of size %u bytes", size);
+
+ /* get rid of header/footer */
+ if (size >= 32 && memcmp (data, "APETAGEX", 8) == 0) {
+ data += 32;
+ size -= 32;
+ }
+ if (size > 32 && memcmp (data + size - 32, "APETAGEX", 8) == 0) {
+ size -= 32;
+ }
+
+ /* read actual tags - at least 10 bytes for tag header */
+ while (size >= 10) {
+ guint len, n = 8;
+ gchar *tag, *val;
+ const gchar *gst_tag;
+ GType gst_tag_type;
+
+ /* find tag type and size */
+ len = GST_READ_UINT32_LE (data);
+ while (n < size && data[n] != 0x0)
+ n++;
+ if (n == size)
+ break;
+ g_assert (data[n] == 0x0);
+ n++;
+ if (size - n < len)
+ break;
+
+ /* If the tag is empty, skip to the next one */
+ if (len == 0)
+ goto next_tag;
+
+ /* read */
+ tag = g_strndup ((gchar *) data + 8, n - 9);
+ val = g_strndup ((gchar *) data + n, len);
+
+ GST_LOG ("tag [%s], val[%s]", tag, val);
+
+ /* special-case 'media' tag, could be e.g. "CD 1/2" */
+ if (g_ascii_strcasecmp (tag, "media") == 0) {
+ gchar *sp, *sp2;
+
+ g_free (tag);
+ tag = g_strdup ("discnumber");
+ /* get rid of the medium in front */
+ sp = strchr (val, ' ');
+ while (sp != NULL && (sp2 = strchr (sp + 1, ' ')) != NULL)
+ sp = sp2;
+ if (sp) {
+ memmove (val, sp + 1, strlen (sp + 1) + 1);
+ }
+ }
+
+ if (ape_demux_get_gst_tag_from_tag (tag, &gst_tag, &gst_tag_type)) {
+ GValue v = { 0, };
+
+ switch (gst_tag_type) {
+ case G_TYPE_INT:{
+ gint v_int;
+
+ if (sscanf (val, "%d", &v_int) == 1) {
+ g_value_init (&v, G_TYPE_INT);
+ g_value_set_int (&v, v_int);
+ }
+ break;
+ }
+ case G_TYPE_UINT:{
+ guint v_uint, count;
+
+ if (strcmp (gst_tag, GST_TAG_TRACK_NUMBER) == 0) {
+ gint dummy;
+
+ if (sscanf (val, "%u", &v_uint) == 1 && v_uint > 0) {
+ g_value_init (&v, G_TYPE_UINT);
+ g_value_set_uint (&v, v_uint);
+ }
+ GST_LOG ("checking for track count: %s", val);
+ /* might be 0/N or -1/N to specify that there is only a count */
+ if (sscanf (val, "%d/%u", &dummy, &count) == 2 && count > 0) {
+ gst_tag_list_add (taglist, GST_TAG_MERGE_APPEND,
+ GST_TAG_TRACK_COUNT, count, NULL);
+ }
+ } else if (strcmp (gst_tag, GST_TAG_ALBUM_VOLUME_NUMBER) == 0) {
+ gint dummy;
+
+ if (sscanf (val, "%u", &v_uint) == 1 && v_uint > 0) {
+ g_value_init (&v, G_TYPE_UINT);
+ g_value_set_uint (&v, v_uint);
+ }
+ GST_LOG ("checking for volume count: %s", val);
+ /* might be 0/N or -1/N to specify that there is only a count */
+ if (sscanf (val, "%d/%u", &dummy, &count) == 2 && count > 0) {
+ gst_tag_list_add (taglist, GST_TAG_MERGE_APPEND,
+ GST_TAG_ALBUM_VOLUME_COUNT, count, NULL);
+ }
+ } else if (sscanf (val, "%u", &v_uint) == 1) {
+ g_value_init (&v, G_TYPE_UINT);
+ g_value_set_uint (&v, v_uint);
+ }
+ break;
+ }
+ case G_TYPE_STRING:{
+ g_value_init (&v, G_TYPE_STRING);
+ g_value_set_string (&v, val);
+ break;
+ }
+ case G_TYPE_DOUBLE:{
+ gdouble v_double;
+ gchar *endptr;
+
+ /* floating point strings can be "4,123" or "4.123" depending on
+ * the locale. We need to be able to parse and read either version
+ * no matter what our current locale is */
+ g_strdelimit (val, ",", '.');
+ v_double = g_ascii_strtod (val, &endptr);
+ if (endptr != val) {
+ g_value_init (&v, G_TYPE_DOUBLE);
+ g_value_set_double (&v, v_double);
+ }
+
+ break;
+ }
+ default:{
+ if (gst_tag_type == G_TYPE_DATE) {
+ gint v_int;
+
+ if (sscanf (val, "%d", &v_int) == 1) {
+ GDate *date = g_date_new_dmy (1, 1, v_int);
+
+ g_value_init (&v, G_TYPE_DATE);
+ g_value_take_boxed (&v, date);
+ }
+ } else {
+ GST_WARNING ("Unhandled tag type '%s' for tag '%s'",
+ g_type_name (gst_tag_type), gst_tag);
+ }
+ break;
+ }
+ }
+ if (G_VALUE_TYPE (&v) != 0) {
+ gst_tag_list_add_values (taglist, GST_TAG_MERGE_APPEND,
+ gst_tag, &v, NULL);
+ g_value_unset (&v);
+ }
+ }
+ GST_DEBUG ("Read tag %s: %s", tag, val);
+ g_free (tag);
+ g_free (val);
+
+ /* move data pointer */
+ next_tag:
+ size -= len + n;
+ data += len + n;
+ }
+
+ GST_DEBUG ("Taglist: %" GST_PTR_FORMAT, taglist);
+ return taglist;
+}
+
+static gboolean
+gst_ape_demux_identify_tag (GstTagDemux * demux, GstBuffer * buffer,
+ gboolean start_tag, guint * tag_size)
+{
+ GstMapInfo map;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ if (memcmp (map.data, "APETAGEX", 8) != 0) {
+ GST_DEBUG_OBJECT (demux, "No APETAGEX marker at %s - not an APE file",
+ (start_tag) ? "start" : "end");
+ gst_buffer_unmap (buffer, &map);
+ return FALSE;
+ }
+
+ *tag_size = GST_READ_UINT32_LE (map.data + 12);
+
+ /* size is without header, so add 32 to account for that */
+ *tag_size += 32;
+
+ gst_buffer_unmap (buffer, &map);
+
+ return TRUE;
+}
+
+static GstTagDemuxResult
+gst_ape_demux_parse_tag (GstTagDemux * demux, GstBuffer * buffer,
+ gboolean start_tag, guint * tag_size, GstTagList ** tags)
+{
+ guint8 *data;
+ guint8 *footer;
+ gboolean have_header;
+ gboolean end_tag = !start_tag;
+ GstCaps *sink_caps;
+ guint version, footer_size;
+ GstMapInfo map;
+ gsize size;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ GST_LOG_OBJECT (demux, "Parsing buffer of size %" G_GSIZE_FORMAT, size);
+
+ footer = data + size - 32;
+
+ GST_LOG_OBJECT (demux, "Checking for footer at offset 0x%04x",
+ (guint) (footer - data));
+ if (footer > data && memcmp (footer, "APETAGEX", 8) == 0) {
+ GST_DEBUG_OBJECT (demux, "Found footer");
+ footer_size = 32;
+ } else {
+ GST_DEBUG_OBJECT (demux, "No footer");
+ footer_size = 0;
+ }
+
+ /* APE tags at the end must have a footer */
+ if (end_tag && footer_size == 0) {
+ GST_WARNING_OBJECT (demux, "Tag at end of file without footer!");
+ return GST_TAG_DEMUX_RESULT_BROKEN_TAG;
+ }
+
+ /* don't trust the header/footer flags, better detect them ourselves */
+ have_header = (memcmp (data, "APETAGEX", 8) == 0);
+
+ if (start_tag && !have_header) {
+ GST_DEBUG_OBJECT (demux, "Tag at beginning of file without header!");
+ return GST_TAG_DEMUX_RESULT_BROKEN_TAG;
+ }
+
+ if (end_tag && !have_header) {
+ GST_DEBUG_OBJECT (demux, "Tag at end of file has no header (APEv1)");
+ *tag_size -= 32; /* adjust tag size */
+ }
+
+ if (have_header) {
+ version = GST_READ_UINT32_LE (data + 8);
+ } else {
+ version = GST_READ_UINT32_LE (footer + 8);
+ }
+
+ /* skip header */
+ if (have_header) {
+ data += 32;
+ }
+
+ GST_DEBUG_OBJECT (demux, "APE tag with version %u, size %u at offset 0x%08"
+ G_GINT64_MODIFIER "x", version, *tag_size,
+ GST_BUFFER_OFFSET (buffer) + ((have_header) ? 0 : 32));
+
+ if (APE_VERSION_MAJOR (version) != 1 && APE_VERSION_MAJOR (version) != 2) {
+ GST_WARNING ("APE tag is version %u.%03u, but decoder only supports "
+ "v1 or v2. Ignoring.", APE_VERSION_MAJOR (version), version % 1000);
+ return GST_TAG_DEMUX_RESULT_OK;
+ }
+
+ *tags = ape_demux_parse_tags (data, *tag_size - footer_size);
+
+ sink_caps = gst_static_pad_template_get_caps (&sink_factory);
+ gst_pb_utils_add_codec_description_to_tag_list (*tags,
+ GST_TAG_CONTAINER_FORMAT, sink_caps);
+ gst_caps_unref (sink_caps);
+
+ gst_buffer_unmap (buffer, &map);
+
+ return GST_TAG_DEMUX_RESULT_OK;
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ return GST_ELEMENT_REGISTER (apedemux, plugin);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ apetag,
+ "APEv1/2 tag reader",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/apetag/gstapedemux.h b/gst/apetag/gstapedemux.h
new file mode 100644
index 0000000000..20fc98bf2a
--- /dev/null
+++ b/gst/apetag/gstapedemux.h
@@ -0,0 +1,54 @@
+/* GStreamer APEv1/2 tag reader
+ * Copyright (C) 2004 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * Copyright (C) 2006 Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_APE_DEMUX_H__
+#define __GST_APE_DEMUX_H__
+
+#include <gst/tag/gsttagdemux.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_APE_DEMUX (gst_ape_demux_get_type())
+#define GST_APE_DEMUX(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_APE_DEMUX,GstApeDemux))
+#define GST_APE_DEMUX_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_APE_DEMUX,GstApeDemuxClass))
+#define GST_IS_APE_DEMUX(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_APE_DEMUX))
+#define GST_IS_APE_DEMUX_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_APE_DEMUX))
+
+typedef struct _GstApeDemux GstApeDemux;
+typedef struct _GstApeDemuxClass GstApeDemuxClass;
+
+struct _GstApeDemux
+{
+ GstTagDemux tagdemux;
+};
+
+struct _GstApeDemuxClass
+{
+ GstTagDemuxClass parent_class;
+};
+
+GType gst_ape_demux_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (apedemux);
+
+G_END_DECLS
+
+#endif /* __GST_APE_DEMUX_H__ */
+
diff --git a/gst/apetag/meson.build b/gst/apetag/meson.build
new file mode 100644
index 0000000000..6b2885d52e
--- /dev/null
+++ b/gst/apetag/meson.build
@@ -0,0 +1,9 @@
+gstapetag = library('gstapetag', 'gstapedemux.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc, libsinc],
+ dependencies : [gstpbutils_dep, gsttag_dep, gst_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstapetag, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstapetag]
diff --git a/gst/audiofx/audioamplify.c b/gst/audiofx/audioamplify.c
new file mode 100644
index 0000000000..8228ebdbdc
--- /dev/null
+++ b/gst/audiofx/audioamplify.c
@@ -0,0 +1,480 @@
+/*
+ * GStreamer
+ * Copyright (C) 2007 Sebastian Dröge <slomo@circular-chaos.org>
+ * Copyright (C) 2006 Stefan Kost <ensonic@users.sf.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-audioamplify
+ * @title: audioamplify
+ *
+ * Amplifies an audio stream by a given factor and allows the selection of different clipping modes.
+ * The difference between the clipping modes is best evaluated by testing.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 audiotestsrc wave=saw ! audioamplify amplification=1.5 ! alsasink
+ * gst-launch-1.0 filesrc location="melo1.ogg" ! oggdemux ! vorbisdec ! audioconvert ! audioamplify amplification=1.5 clipping-method=wrap-negative ! alsasink
+ * gst-launch-1.0 audiotestsrc wave=saw ! audioconvert ! audioamplify amplification=1.5 clipping-method=wrap-positive ! audioconvert ! alsasink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#include "audioamplify.h"
+
+#define GST_CAT_DEFAULT gst_audio_amplify_debug
+GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+/* Filter signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_AMPLIFICATION,
+ PROP_CLIPPING_METHOD
+};
+
+enum
+{
+ METHOD_CLIP = 0,
+ METHOD_WRAP_NEGATIVE,
+ METHOD_WRAP_POSITIVE,
+ METHOD_NOCLIP,
+ NUM_METHODS
+};
+
+#define GST_TYPE_AUDIO_AMPLIFY_CLIPPING_METHOD (gst_audio_amplify_clipping_method_get_type ())
+static GType
+gst_audio_amplify_clipping_method_get_type (void)
+{
+ static GType gtype = 0;
+
+ if (gtype == 0) {
+ static const GEnumValue values[] = {
+ {METHOD_CLIP, "Normal clipping (default)", "clip"},
+ {METHOD_WRAP_NEGATIVE,
+ "Push overdriven values back from the opposite side",
+ "wrap-negative"},
+ {METHOD_WRAP_POSITIVE, "Push overdriven values back from the same side",
+ "wrap-positive"},
+ {METHOD_NOCLIP, "No clipping", "none"},
+ {0, NULL, NULL}
+ };
+ gtype = g_enum_register_static ("GstAudioAmplifyClippingMethod", values);
+ }
+ return gtype;
+}
+
+#define ALLOWED_CAPS \
+ "audio/x-raw," \
+ " format=(string) {S8,"GST_AUDIO_NE(S16)","GST_AUDIO_NE(S32)"," \
+ GST_AUDIO_NE(F32)","GST_AUDIO_NE(F64)"}," \
+ " rate=(int)[1,MAX]," \
+ " channels=(int)[1,MAX], " \
+ " layout=(string) {interleaved, non-interleaved}"
+
+G_DEFINE_TYPE (GstAudioAmplify, gst_audio_amplify, GST_TYPE_AUDIO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (audioamplify, "audioamplify",
+ GST_RANK_NONE, GST_TYPE_AUDIO_AMPLIFY);
+
+static gboolean gst_audio_amplify_set_process_function (GstAudioAmplify *
+ filter, gint clipping, GstAudioFormat format);
+static void gst_audio_amplify_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_audio_amplify_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_audio_amplify_setup (GstAudioFilter * filter,
+ const GstAudioInfo * info);
+static GstFlowReturn gst_audio_amplify_transform_ip (GstBaseTransform * base,
+ GstBuffer * buf);
+
+#define MIN_gint8 G_MININT8
+#define MAX_gint8 G_MAXINT8
+#define MIN_gint16 G_MININT16
+#define MAX_gint16 G_MAXINT16
+#define MIN_gint32 G_MININT32
+#define MAX_gint32 G_MAXINT32
+
+#define MAKE_INT_FUNCS(type,largetype) \
+static void \
+gst_audio_amplify_transform_##type##_clip (GstAudioAmplify * filter, \
+ void * data, guint num_samples) \
+{ \
+ type *d = data; \
+ \
+ while (num_samples--) { \
+ largetype val = *d * filter->amplification; \
+ *d++ = CLAMP (val, MIN_##type, MAX_##type); \
+ } \
+} \
+static void \
+gst_audio_amplify_transform_##type##_wrap_negative (GstAudioAmplify * filter, \
+ void * data, guint num_samples) \
+{ \
+ type *d = data; \
+ \
+ while (num_samples--) { \
+ largetype val = *d * filter->amplification; \
+ if (val > MAX_##type) \
+ val = MIN_##type + (val - MIN_##type) % ((largetype) MAX_##type + 1 - \
+ MIN_##type); \
+ else if (val < MIN_##type) \
+ val = MAX_##type - (MAX_##type - val) % ((largetype) MAX_##type + 1 - \
+ MIN_##type); \
+ *d++ = val; \
+ } \
+} \
+static void \
+gst_audio_amplify_transform_##type##_wrap_positive (GstAudioAmplify * filter, \
+ void * data, guint num_samples) \
+{ \
+ type *d = data; \
+ \
+ while (num_samples--) { \
+ largetype val = *d * filter->amplification; \
+ do { \
+ if (val > MAX_##type) \
+ val = MAX_##type - (val - MAX_##type); \
+ else if (val < MIN_##type) \
+ val = MIN_##type + (MIN_##type - val); \
+ else \
+ break; \
+ } while (1); \
+ *d++ = val; \
+ } \
+} \
+static void \
+gst_audio_amplify_transform_##type##_noclip (GstAudioAmplify * filter, \
+ void * data, guint num_samples) \
+{ \
+ type *d = data; \
+ \
+ while (num_samples--) \
+ *d++ *= filter->amplification; \
+}
+
+#define MAKE_FLOAT_FUNCS(type) \
+static void \
+gst_audio_amplify_transform_##type##_clip (GstAudioAmplify * filter, \
+ void * data, guint num_samples) \
+{ \
+ type *d = data; \
+ \
+ while (num_samples--) { \
+ type val = *d* filter->amplification; \
+ *d++ = CLAMP (val, -1.0, +1.0); \
+ } \
+} \
+static void \
+gst_audio_amplify_transform_##type##_wrap_negative (GstAudioAmplify * \
+ filter, void * data, guint num_samples) \
+{ \
+ type *d = data; \
+ \
+ while (num_samples--) { \
+ type val = *d * filter->amplification; \
+ do { \
+ if (val > 1.0) \
+ val = -1.0 + (val - 1.0); \
+ else if (val < -1.0) \
+ val = 1.0 - (1.0 - val); \
+ else \
+ break; \
+ } while (1); \
+ *d++ = val; \
+ } \
+} \
+static void \
+gst_audio_amplify_transform_##type##_wrap_positive (GstAudioAmplify * filter, \
+ void * data, guint num_samples) \
+{ \
+ type *d = data; \
+ \
+ while (num_samples--) { \
+ type val = *d* filter->amplification; \
+ do { \
+ if (val > 1.0) \
+ val = 1.0 - (val - 1.0); \
+ else if (val < -1.0) \
+ val = -1.0 + (-1.0 - val); \
+ else \
+ break; \
+ } while (1); \
+ *d++ = val; \
+ } \
+} \
+static void \
+gst_audio_amplify_transform_##type##_noclip (GstAudioAmplify * filter, \
+ void * data, guint num_samples) \
+{ \
+ type *d = data; \
+ \
+ while (num_samples--) \
+ *d++ *= filter->amplification; \
+}
+
+/* *INDENT-OFF* */
+MAKE_INT_FUNCS (gint8,gint)
+MAKE_INT_FUNCS (gint16,gint)
+MAKE_INT_FUNCS (gint32,gint64)
+MAKE_FLOAT_FUNCS (gfloat)
+MAKE_FLOAT_FUNCS (gdouble)
+/* *INDENT-ON* */
+
+/* GObject vmethod implementations */
+
+static void
+gst_audio_amplify_class_init (GstAudioAmplifyClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_amplify_debug, "audioamplify", 0,
+ "audioamplify element");
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->set_property = gst_audio_amplify_set_property;
+ gobject_class->get_property = gst_audio_amplify_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_AMPLIFICATION,
+ g_param_spec_float ("amplification", "Amplification",
+ "Factor of amplification", -G_MAXFLOAT, G_MAXFLOAT,
+ 1.0,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstAudioAmplify:clipping-method
+ *
+ * Clipping method: clip mode set values higher than the maximum to the
+ * maximum. The wrap-negative mode pushes those values back from the
+ * opposite side, wrap-positive pushes them back from the same side.
+ *
+ **/
+ g_object_class_install_property (gobject_class, PROP_CLIPPING_METHOD,
+ g_param_spec_enum ("clipping-method", "Clipping method",
+ "Selects how to handle values higher than the maximum",
+ GST_TYPE_AUDIO_AMPLIFY_CLIPPING_METHOD, METHOD_CLIP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (gstelement_class, "Audio amplifier",
+ "Filter/Effect/Audio",
+ "Amplifies an audio stream by a given factor",
+ "Sebastian Dröge <slomo@circular-chaos.org>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
+ GST_BASE_TRANSFORM_CLASS (klass)->transform_ip =
+ GST_DEBUG_FUNCPTR (gst_audio_amplify_transform_ip);
+ GST_BASE_TRANSFORM_CLASS (klass)->transform_ip_on_passthrough = FALSE;
+
+ GST_AUDIO_FILTER_CLASS (klass)->setup =
+ GST_DEBUG_FUNCPTR (gst_audio_amplify_setup);
+
+ gst_type_mark_as_plugin_api (GST_TYPE_AUDIO_AMPLIFY_CLIPPING_METHOD, 0);
+}
+
+static void
+gst_audio_amplify_init (GstAudioAmplify * filter)
+{
+ filter->amplification = 1.0;
+ gst_audio_amplify_set_process_function (filter, METHOD_CLIP,
+ GST_AUDIO_FORMAT_S16);
+ gst_base_transform_set_in_place (GST_BASE_TRANSFORM (filter), TRUE);
+ gst_base_transform_set_gap_aware (GST_BASE_TRANSFORM (filter), TRUE);
+}
+
+static GstAudioAmplifyProcessFunc
+gst_audio_amplify_process_function (gint clipping, GstAudioFormat format)
+{
+ static const struct process
+ {
+ GstAudioFormat format;
+ gint clipping;
+ GstAudioAmplifyProcessFunc func;
+ } process[] = {
+ {
+ GST_AUDIO_FORMAT_F32, METHOD_CLIP, gst_audio_amplify_transform_gfloat_clip}, {
+ GST_AUDIO_FORMAT_F32, METHOD_WRAP_NEGATIVE,
+ gst_audio_amplify_transform_gfloat_wrap_negative}, {
+ GST_AUDIO_FORMAT_F32, METHOD_WRAP_POSITIVE,
+ gst_audio_amplify_transform_gfloat_wrap_positive}, {
+ GST_AUDIO_FORMAT_F32, METHOD_NOCLIP,
+ gst_audio_amplify_transform_gfloat_noclip}, {
+ GST_AUDIO_FORMAT_F64, METHOD_CLIP,
+ gst_audio_amplify_transform_gdouble_clip}, {
+ GST_AUDIO_FORMAT_F64, METHOD_WRAP_NEGATIVE,
+ gst_audio_amplify_transform_gdouble_wrap_negative}, {
+ GST_AUDIO_FORMAT_F64, METHOD_WRAP_POSITIVE,
+ gst_audio_amplify_transform_gdouble_wrap_positive}, {
+ GST_AUDIO_FORMAT_F64, METHOD_NOCLIP,
+ gst_audio_amplify_transform_gdouble_noclip}, {
+ GST_AUDIO_FORMAT_S8, METHOD_CLIP, gst_audio_amplify_transform_gint8_clip}, {
+ GST_AUDIO_FORMAT_S8, METHOD_WRAP_NEGATIVE,
+ gst_audio_amplify_transform_gint8_wrap_negative}, {
+ GST_AUDIO_FORMAT_S8, METHOD_WRAP_POSITIVE,
+ gst_audio_amplify_transform_gint8_wrap_positive}, {
+ GST_AUDIO_FORMAT_S8, METHOD_NOCLIP,
+ gst_audio_amplify_transform_gint8_noclip}, {
+ GST_AUDIO_FORMAT_S16, METHOD_CLIP, gst_audio_amplify_transform_gint16_clip}, {
+ GST_AUDIO_FORMAT_S16, METHOD_WRAP_NEGATIVE,
+ gst_audio_amplify_transform_gint16_wrap_negative}, {
+ GST_AUDIO_FORMAT_S16, METHOD_WRAP_POSITIVE,
+ gst_audio_amplify_transform_gint16_wrap_positive}, {
+ GST_AUDIO_FORMAT_S16, METHOD_NOCLIP,
+ gst_audio_amplify_transform_gint16_noclip}, {
+ GST_AUDIO_FORMAT_S32, METHOD_CLIP, gst_audio_amplify_transform_gint32_clip}, {
+ GST_AUDIO_FORMAT_S32, METHOD_WRAP_NEGATIVE,
+ gst_audio_amplify_transform_gint32_wrap_negative}, {
+ GST_AUDIO_FORMAT_S32, METHOD_WRAP_POSITIVE,
+ gst_audio_amplify_transform_gint32_wrap_positive}, {
+ GST_AUDIO_FORMAT_S32, METHOD_NOCLIP,
+ gst_audio_amplify_transform_gint32_noclip}, {
+ 0, 0, NULL}
+ };
+ const struct process *p;
+
+ for (p = process; p->func; p++)
+ if (p->format == format && p->clipping == clipping)
+ return p->func;
+ return NULL;
+}
+
+static gboolean
+gst_audio_amplify_set_process_function (GstAudioAmplify * filter, gint
+ clipping_method, GstAudioFormat format)
+{
+ GstAudioAmplifyProcessFunc process;
+
+ /* set processing function */
+
+ process = gst_audio_amplify_process_function (clipping_method, format);
+ if (!process) {
+ GST_DEBUG ("wrong format");
+ return FALSE;
+ }
+
+ filter->process = process;
+ filter->clipping_method = clipping_method;
+ filter->format = format;
+
+ return TRUE;
+}
+
+static void
+gst_audio_amplify_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAudioAmplify *filter = GST_AUDIO_AMPLIFY (object);
+
+ switch (prop_id) {
+ case PROP_AMPLIFICATION:
+ filter->amplification = g_value_get_float (value);
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter),
+ filter->amplification == 1.0);
+ break;
+ case PROP_CLIPPING_METHOD:
+ gst_audio_amplify_set_process_function (filter, g_value_get_enum (value),
+ filter->format);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_audio_amplify_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAudioAmplify *filter = GST_AUDIO_AMPLIFY (object);
+
+ switch (prop_id) {
+ case PROP_AMPLIFICATION:
+ g_value_set_float (value, filter->amplification);
+ break;
+ case PROP_CLIPPING_METHOD:
+ g_value_set_enum (value, filter->clipping_method);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+/* GstAudioFilter vmethod implementations */
+static gboolean
+gst_audio_amplify_setup (GstAudioFilter * base, const GstAudioInfo * info)
+{
+ GstAudioAmplify *filter = GST_AUDIO_AMPLIFY (base);
+
+ return gst_audio_amplify_set_process_function (filter,
+ filter->clipping_method, GST_AUDIO_INFO_FORMAT (info));
+}
+
+/* GstBaseTransform vmethod implementations */
+static GstFlowReturn
+gst_audio_amplify_transform_ip (GstBaseTransform * base, GstBuffer * buf)
+{
+ GstAudioAmplify *filter = GST_AUDIO_AMPLIFY (base);
+ guint num_samples;
+ GstClockTime timestamp, stream_time;
+ GstMapInfo map;
+
+ timestamp = GST_BUFFER_TIMESTAMP (buf);
+ stream_time =
+ gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
+
+ if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)))
+ return GST_FLOW_OK;
+
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+ num_samples = map.size / GST_AUDIO_FILTER_BPS (filter);
+
+ filter->process (filter, map.data, num_samples);
+
+ gst_buffer_unmap (buf, &map);
+
+ return GST_FLOW_OK;
+}
diff --git a/gst/audiofx/audioamplify.h b/gst/audiofx/audioamplify.h
new file mode 100644
index 0000000000..24fc62fa87
--- /dev/null
+++ b/gst/audiofx/audioamplify.h
@@ -0,0 +1,64 @@
+/*
+ * GStreamer
+ * Copyright (C) 2007 Sebastian Dröge <slomo@circular-chaos.org>
+ * Copyright (C) 2006 Stefan Kost <ensonic@users.sf.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AUDIO_AMPLIFY_H__
+#define __GST_AUDIO_AMPLIFY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/gstaudiofilter.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_AUDIO_AMPLIFY (gst_audio_amplify_get_type())
+#define GST_AUDIO_AMPLIFY(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AUDIO_AMPLIFY,GstAudioAmplify))
+#define GST_IS_AUDIO_AMPLIFY(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AUDIO_AMPLIFY))
+#define GST_AUDIO_AMPLIFY_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_AUDIO_AMPLIFY,GstAudioAmplifyClass))
+#define GST_IS_AUDIO_AMPLIFY_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_AUDIO_AMPLIFY))
+#define GST_AUDIO_AMPLIFY_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_AUDIO_AMPLIFY,GstAudioAmplifyClass))
+typedef struct _GstAudioAmplify GstAudioAmplify;
+typedef struct _GstAudioAmplifyClass GstAudioAmplifyClass;
+
+typedef void (*GstAudioAmplifyProcessFunc) (GstAudioAmplify *, void *, guint);
+
+struct _GstAudioAmplify
+{
+ GstAudioFilter audiofilter;
+
+ gfloat amplification;
+
+ /* < private > */
+ GstAudioAmplifyProcessFunc process;
+ gint clipping_method;
+ GstAudioFormat format;
+};
+
+struct _GstAudioAmplifyClass
+{
+ GstAudioFilterClass parent;
+};
+
+GType gst_audio_amplify_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (audioamplify);
+
+G_END_DECLS
+#endif /* __GST_AUDIO_AMPLIFY_H__ */
diff --git a/gst/audiofx/audiochebband.c b/gst/audiofx/audiochebband.c
new file mode 100644
index 0000000000..b318e557a8
--- /dev/null
+++ b/gst/audiofx/audiochebband.c
@@ -0,0 +1,662 @@
+/*
+ * GStreamer
+ * Copyright (C) 2007-2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * Chebyshev type 1 filter design based on
+ * "The Scientist and Engineer's Guide to DSP", Chapter 20.
+ * http://www.dspguide.com/
+ *
+ * For type 2 and Chebyshev filters in general read
+ * http://en.wikipedia.org/wiki/Chebyshev_filter
+ *
+ * Transformation from lowpass to bandpass/bandreject:
+ * http://docs.dewresearch.com/DspHelp/html/IDH_LinearSystems_LowpassToBandPassZ.htm
+ * http://docs.dewresearch.com/DspHelp/html/IDH_LinearSystems_LowpassToBandStopZ.htm
+ *
+ */
+
+/**
+ * SECTION:element-audiochebband
+ * @title: audiochebband
+ *
+ * Attenuates all frequencies outside (bandpass) or inside (bandreject) of a frequency
+ * band. The number of poles and the ripple parameter control the rolloff.
+ *
+ * This element has the advantage over the windowed sinc bandpass and bandreject filter that it is
+ * much faster and produces almost as good results. It's only disadvantages are the highly
+ * non-linear phase and the slower rolloff compared to a windowed sinc filter with a large kernel.
+ *
+ * For type 1 the ripple parameter specifies how much ripple in dB is allowed in the passband, i.e.
+ * some frequencies in the passband will be amplified by that value. A higher ripple value will allow
+ * a faster rolloff.
+ *
+ * For type 2 the ripple parameter specifies the stopband attenuation. In the stopband the gain will
+ * be at most this value. A lower ripple value will allow a faster rolloff.
+ *
+ * As a special case, a Chebyshev type 1 filter with no ripple is a Butterworth filter.
+ *
+ * > Be warned that a too large number of poles can produce noise. The most poles are possible with
+ * > a cutoff frequency at a quarter of the sampling rate.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 audiotestsrc freq=1500 ! audioconvert ! audiochebband mode=band-pass lower-frequency=1000 upper-frequency=6000 poles=4 ! audioconvert ! alsasink
+ * gst-launch-1.0 filesrc location="melo1.ogg" ! oggdemux ! vorbisdec ! audioconvert ! audiochebband mode=band-reject lower-frequency=1000 upper-frequency=4000 ripple=0.2 ! audioconvert ! alsasink
+ * gst-launch-1.0 audiotestsrc wave=white-noise ! audioconvert ! audiochebband mode=band-pass lower-frequency=1000 upper-frequency=4000 type=2 ! audioconvert ! alsasink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#include <math.h>
+
+#include "math_compat.h"
+
+#include "audiochebband.h"
+
+#include "gst/glib-compat-private.h"
+
+#define GST_CAT_DEFAULT gst_audio_cheb_band_debug
+GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+enum
+{
+ PROP_0,
+ PROP_MODE,
+ PROP_TYPE,
+ PROP_LOWER_FREQUENCY,
+ PROP_UPPER_FREQUENCY,
+ PROP_RIPPLE,
+ PROP_POLES
+};
+
+#define gst_audio_cheb_band_parent_class parent_class
+G_DEFINE_TYPE (GstAudioChebBand, gst_audio_cheb_band,
+ GST_TYPE_AUDIO_FX_BASE_IIR_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (audiochebband, "audiochebband",
+ GST_RANK_NONE, GST_TYPE_AUDIO_CHEB_BAND);
+
+static void gst_audio_cheb_band_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_audio_cheb_band_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+static void gst_audio_cheb_band_finalize (GObject * object);
+
+static gboolean gst_audio_cheb_band_setup (GstAudioFilter * filter,
+ const GstAudioInfo * info);
+
+enum
+{
+ MODE_BAND_PASS = 0,
+ MODE_BAND_REJECT
+};
+
+#define GST_TYPE_AUDIO_CHEBYSHEV_FREQ_BAND_MODE (gst_audio_cheb_band_mode_get_type ())
+static GType
+gst_audio_cheb_band_mode_get_type (void)
+{
+ static GType gtype = 0;
+
+ if (gtype == 0) {
+ static const GEnumValue values[] = {
+ {MODE_BAND_PASS, "Band pass (default)",
+ "band-pass"},
+ {MODE_BAND_REJECT, "Band reject",
+ "band-reject"},
+ {0, NULL, NULL}
+ };
+
+ gtype = g_enum_register_static ("GstAudioChebBandMode", values);
+ }
+ return gtype;
+}
+
+/* GObject vmethod implementations */
+
+static void
+gst_audio_cheb_band_class_init (GstAudioChebBandClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_cheb_band_debug, "audiochebband", 0,
+ "audiochebband element");
+
+ gobject_class->set_property = gst_audio_cheb_band_set_property;
+ gobject_class->get_property = gst_audio_cheb_band_get_property;
+ gobject_class->finalize = gst_audio_cheb_band_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_MODE,
+ g_param_spec_enum ("mode", "Mode",
+ "Low pass or high pass mode", GST_TYPE_AUDIO_CHEBYSHEV_FREQ_BAND_MODE,
+ MODE_BAND_PASS,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_TYPE,
+ g_param_spec_int ("type", "Type", "Type of the chebychev filter", 1, 2, 1,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ /* FIXME: Don't use the complete possible range but restrict the upper boundary
+ * so automatically generated UIs can use a slider without */
+ g_object_class_install_property (gobject_class, PROP_LOWER_FREQUENCY,
+ g_param_spec_float ("lower-frequency", "Lower frequency",
+ "Start frequency of the band (Hz)", 0.0, 100000.0,
+ 0.0,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_UPPER_FREQUENCY,
+ g_param_spec_float ("upper-frequency", "Upper frequency",
+ "Stop frequency of the band (Hz)", 0.0, 100000.0, 0.0,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_RIPPLE,
+ g_param_spec_float ("ripple", "Ripple", "Amount of ripple (dB)", 0.0,
+ 200.0, 0.25,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ /* FIXME: What to do about this upper boundary? With a frequencies near
+ * rate/4 32 poles are completely possible, with frequencies very low
+ * or very high 16 poles already produces only noise */
+ g_object_class_install_property (gobject_class, PROP_POLES,
+ g_param_spec_int ("poles", "Poles",
+ "Number of poles to use, will be rounded up to the next multiply of four",
+ 4, 32, 4,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Band pass & band reject filter", "Filter/Effect/Audio",
+ "Chebyshev band pass and band reject filter",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_cheb_band_setup);
+
+ gst_type_mark_as_plugin_api (GST_TYPE_AUDIO_CHEBYSHEV_FREQ_BAND_MODE, 0);
+}
+
+static void
+gst_audio_cheb_band_init (GstAudioChebBand * filter)
+{
+ filter->lower_frequency = filter->upper_frequency = 0.0;
+ filter->mode = MODE_BAND_PASS;
+ filter->type = 1;
+ filter->poles = 4;
+ filter->ripple = 0.25;
+
+ g_mutex_init (&filter->lock);
+}
+
+static void
+generate_biquad_coefficients (GstAudioChebBand * filter,
+ gint p, gint rate, gdouble * b0, gdouble * b1, gdouble * b2, gdouble * b3,
+ gdouble * b4, gdouble * a1, gdouble * a2, gdouble * a3, gdouble * a4)
+{
+ gint np = filter->poles / 2;
+ gdouble ripple = filter->ripple;
+
+ /* pole location in s-plane */
+ gdouble rp, ip;
+
+ /* zero location in s-plane */
+ gdouble iz = 0.0;
+
+ /* transfer function coefficients for the z-plane */
+ gdouble x0, x1, x2, y1, y2;
+ gint type = filter->type;
+
+ /* Calculate pole location for lowpass at frequency 1 */
+ {
+ gdouble angle = (G_PI / 2.0) * (2.0 * p - 1) / np;
+
+ rp = -sin (angle);
+ ip = cos (angle);
+ }
+
+ /* If we allow ripple, move the pole from the unit
+ * circle to an ellipse and keep cutoff at frequency 1 */
+ if (ripple > 0 && type == 1) {
+ gdouble es, vx;
+
+ es = sqrt (pow (10.0, ripple / 10.0) - 1.0);
+
+ vx = (1.0 / np) * asinh (1.0 / es);
+ rp = rp * sinh (vx);
+ ip = ip * cosh (vx);
+ } else if (type == 2) {
+ gdouble es, vx;
+
+ es = sqrt (pow (10.0, ripple / 10.0) - 1.0);
+ vx = (1.0 / np) * asinh (es);
+ rp = rp * sinh (vx);
+ ip = ip * cosh (vx);
+ }
+
+ /* Calculate inverse of the pole location to move from
+ * type I to type II */
+ if (type == 2) {
+ gdouble mag2 = rp * rp + ip * ip;
+
+ rp /= mag2;
+ ip /= mag2;
+ }
+
+ /* Calculate zero location for frequency 1 on the
+ * unit circle for type 2 */
+ if (type == 2) {
+ gdouble angle = G_PI / (np * 2.0) + ((p - 1) * G_PI) / (np);
+ gdouble mag2;
+
+ iz = cos (angle);
+ mag2 = iz * iz;
+ iz /= mag2;
+ }
+
+ /* Convert from s-domain to z-domain by
+ * using the bilinear Z-transform, i.e.
+ * substitute s by (2/t)*((z-1)/(z+1))
+ * with t = 2 * tan(0.5).
+ */
+ if (type == 1) {
+ gdouble t, m, d;
+
+ t = 2.0 * tan (0.5);
+ m = rp * rp + ip * ip;
+ d = 4.0 - 4.0 * rp * t + m * t * t;
+
+ x0 = (t * t) / d;
+ x1 = 2.0 * x0;
+ x2 = x0;
+ y1 = (8.0 - 2.0 * m * t * t) / d;
+ y2 = (-4.0 - 4.0 * rp * t - m * t * t) / d;
+ } else {
+ gdouble t, m, d;
+
+ t = 2.0 * tan (0.5);
+ m = rp * rp + ip * ip;
+ d = 4.0 - 4.0 * rp * t + m * t * t;
+
+ x0 = (t * t * iz * iz + 4.0) / d;
+ x1 = (-8.0 + 2.0 * iz * iz * t * t) / d;
+ x2 = x0;
+ y1 = (8.0 - 2.0 * m * t * t) / d;
+ y2 = (-4.0 - 4.0 * rp * t - m * t * t) / d;
+ }
+
+ /* Convert from lowpass at frequency 1 to either bandpass
+ * or band reject.
+ *
+ * For bandpass substitute z^(-1) with:
+ *
+ * -2 -1
+ * -z + alpha * z - beta
+ * ----------------------------
+ * -2 -1
+ * beta * z - alpha * z + 1
+ *
+ * alpha = (2*a*b)/(1+b)
+ * beta = (b-1)/(b+1)
+ * a = cos((w1 + w0)/2) / cos((w1 - w0)/2)
+ * b = tan(1/2) * cot((w1 - w0)/2)
+ *
+ * For bandreject substitute z^(-1) with:
+ *
+ * -2 -1
+ * z - alpha * z + beta
+ * ----------------------------
+ * -2 -1
+ * beta * z - alpha * z + 1
+ *
+ * alpha = (2*a)/(1+b)
+ * beta = (1-b)/(1+b)
+ * a = cos((w1 + w0)/2) / cos((w1 - w0)/2)
+ * b = tan(1/2) * tan((w1 - w0)/2)
+ *
+ */
+ {
+ gdouble a, b, d;
+ gdouble alpha, beta;
+ gdouble w0 = 2.0 * G_PI * (filter->lower_frequency / rate);
+ gdouble w1 = 2.0 * G_PI * (filter->upper_frequency / rate);
+
+ if (filter->mode == MODE_BAND_PASS) {
+ a = cos ((w1 + w0) / 2.0) / cos ((w1 - w0) / 2.0);
+ b = tan (1.0 / 2.0) / tan ((w1 - w0) / 2.0);
+
+ alpha = (2.0 * a * b) / (1.0 + b);
+ beta = (b - 1.0) / (b + 1.0);
+
+ d = 1.0 + beta * (y1 - beta * y2);
+
+ *b0 = (x0 + beta * (-x1 + beta * x2)) / d;
+ *b1 = (alpha * (-2.0 * x0 + x1 + beta * x1 - 2.0 * beta * x2)) / d;
+ *b2 =
+ (-x1 - beta * beta * x1 + 2.0 * beta * (x0 + x2) +
+ alpha * alpha * (x0 - x1 + x2)) / d;
+ *b3 = (alpha * (x1 + beta * (-2.0 * x0 + x1) - 2.0 * x2)) / d;
+ *b4 = (beta * (beta * x0 - x1) + x2) / d;
+ *a1 = (alpha * (2.0 + y1 + beta * y1 - 2.0 * beta * y2)) / d;
+ *a2 =
+ (-y1 - beta * beta * y1 - alpha * alpha * (1.0 + y1 - y2) +
+ 2.0 * beta * (-1.0 + y2)) / d;
+ *a3 = (alpha * (y1 + beta * (2.0 + y1) - 2.0 * y2)) / d;
+ *a4 = (-beta * beta - beta * y1 + y2) / d;
+ } else {
+ a = cos ((w1 + w0) / 2.0) / cos ((w1 - w0) / 2.0);
+ b = tan (1.0 / 2.0) * tan ((w1 - w0) / 2.0);
+
+ alpha = (2.0 * a) / (1.0 + b);
+ beta = (1.0 - b) / (1.0 + b);
+
+ d = -1.0 + beta * (beta * y2 + y1);
+
+ *b0 = (-x0 - beta * x1 - beta * beta * x2) / d;
+ *b1 = (alpha * (2.0 * x0 + x1 + beta * x1 + 2.0 * beta * x2)) / d;
+ *b2 =
+ (-x1 - beta * beta * x1 - 2.0 * beta * (x0 + x2) -
+ alpha * alpha * (x0 + x1 + x2)) / d;
+ *b3 = (alpha * (x1 + beta * (2.0 * x0 + x1) + 2.0 * x2)) / d;
+ *b4 = (-beta * beta * x0 - beta * x1 - x2) / d;
+ *a1 = (alpha * (-2.0 + y1 + beta * y1 + 2.0 * beta * y2)) / d;
+ *a2 =
+ -(y1 + beta * beta * y1 + 2.0 * beta * (-1.0 + y2) +
+ alpha * alpha * (-1.0 + y1 + y2)) / d;
+ *a3 = (alpha * (beta * (-2.0 + y1) + y1 + 2.0 * y2)) / d;
+ *a4 = -(-beta * beta + beta * y1 + y2) / d;
+ }
+ }
+}
+
+static void
+generate_coefficients (GstAudioChebBand * filter, const GstAudioInfo * info)
+{
+ gint rate;
+
+ if (info) {
+ rate = GST_AUDIO_INFO_RATE (info);
+ } else {
+ rate = GST_AUDIO_FILTER_RATE (filter);
+ }
+
+ if (rate == 0) {
+ gdouble *a = g_new0 (gdouble, 1);
+ gdouble *b = g_new0 (gdouble, 1);
+
+ a[0] = 1.0;
+ b[0] = 1.0;
+ gst_audio_fx_base_iir_filter_set_coefficients (GST_AUDIO_FX_BASE_IIR_FILTER
+ (filter), a, 1, b, 1);
+ GST_LOG_OBJECT (filter, "rate was not set yet");
+ return;
+ }
+
+ if (filter->upper_frequency <= filter->lower_frequency) {
+ gdouble *a = g_new0 (gdouble, 1);
+ gdouble *b = g_new0 (gdouble, 1);
+
+ a[0] = 1.0;
+ b[0] = (filter->mode == MODE_BAND_PASS) ? 0.0 : 1.0;
+ gst_audio_fx_base_iir_filter_set_coefficients (GST_AUDIO_FX_BASE_IIR_FILTER
+ (filter), a, 1, b, 1);
+
+ GST_LOG_OBJECT (filter, "frequency band had no or negative dimension");
+ return;
+ }
+
+ if (filter->upper_frequency > rate / 2) {
+ filter->upper_frequency = rate / 2;
+ GST_LOG_OBJECT (filter, "clipped upper frequency to nyquist frequency");
+ }
+
+ if (filter->lower_frequency < 0.0) {
+ filter->lower_frequency = 0.0;
+ GST_LOG_OBJECT (filter, "clipped lower frequency to 0.0");
+ }
+
+ /* Calculate coefficients for the chebyshev filter */
+ {
+ gint np = filter->poles;
+ gdouble *a, *b;
+ gint i, p;
+
+ a = g_new0 (gdouble, np + 5);
+ b = g_new0 (gdouble, np + 5);
+
+ /* Calculate transfer function coefficients */
+ a[4] = 1.0;
+ b[4] = 1.0;
+
+ for (p = 1; p <= np / 4; p++) {
+ gdouble b0, b1, b2, b3, b4, a1, a2, a3, a4;
+ gdouble *ta = g_new0 (gdouble, np + 5);
+ gdouble *tb = g_new0 (gdouble, np + 5);
+
+ generate_biquad_coefficients (filter, p, rate,
+ &b0, &b1, &b2, &b3, &b4, &a1, &a2, &a3, &a4);
+
+ memcpy (ta, a, sizeof (gdouble) * (np + 5));
+ memcpy (tb, b, sizeof (gdouble) * (np + 5));
+
+ /* add the new coefficients for the new two poles
+ * to the cascade by multiplication of the transfer
+ * functions */
+ for (i = 4; i < np + 5; i++) {
+ b[i] =
+ b0 * tb[i] + b1 * tb[i - 1] + b2 * tb[i - 2] + b3 * tb[i - 3] +
+ b4 * tb[i - 4];
+ a[i] =
+ ta[i] - a1 * ta[i - 1] - a2 * ta[i - 2] - a3 * ta[i - 3] -
+ a4 * ta[i - 4];
+ }
+ g_free (ta);
+ g_free (tb);
+ }
+
+ /* Move coefficients to the beginning of the array to move from
+ * the transfer function's coefficients to the difference
+ * equation's coefficients */
+ for (i = 0; i <= np; i++) {
+ a[i] = a[i + 4];
+ b[i] = b[i + 4];
+ }
+
+ /* Normalize to unity gain at frequency 0 and frequency
+ * 0.5 for bandreject and unity gain at band center frequency
+ * for bandpass */
+ if (filter->mode == MODE_BAND_REJECT) {
+ /* gain is sqrt(H(0)*H(0.5)) */
+
+ gdouble gain1 =
+ gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1, b, np + 1,
+ 1.0, 0.0);
+ gdouble gain2 =
+ gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1, b, np + 1,
+ -1.0, 0.0);
+
+ gain1 = sqrt (gain1 * gain2);
+
+ for (i = 0; i <= np; i++) {
+ b[i] /= gain1;
+ }
+ } else {
+ /* gain is H(wc), wc = center frequency */
+
+ gdouble w1 = 2.0 * G_PI * (filter->lower_frequency / rate);
+ gdouble w2 = 2.0 * G_PI * (filter->upper_frequency / rate);
+ gdouble w0 = (w2 + w1) / 2.0;
+ gdouble zr = cos (w0), zi = sin (w0);
+ gdouble gain =
+ gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1, b, np + 1, zr,
+ zi);
+
+ for (i = 0; i <= np; i++) {
+ b[i] /= gain;
+ }
+ }
+
+ gst_audio_fx_base_iir_filter_set_coefficients (GST_AUDIO_FX_BASE_IIR_FILTER
+ (filter), a, np + 1, b, np + 1);
+
+ GST_LOG_OBJECT (filter,
+ "Generated IIR coefficients for the Chebyshev filter");
+ GST_LOG_OBJECT (filter,
+ "mode: %s, type: %d, poles: %d, lower-frequency: %.2f Hz, upper-frequency: %.2f Hz, ripple: %.2f dB",
+ (filter->mode == MODE_BAND_PASS) ? "band-pass" : "band-reject",
+ filter->type, filter->poles, filter->lower_frequency,
+ filter->upper_frequency, filter->ripple);
+
+ GST_LOG_OBJECT (filter, "%.2f dB gain @ 0Hz",
+ 20.0 * log10 (gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1, b,
+ np + 1, 1.0, 0.0)));
+ {
+ gdouble w1 = 2.0 * G_PI * (filter->lower_frequency / rate);
+ gdouble w2 = 2.0 * G_PI * (filter->upper_frequency / rate);
+ gdouble w0 = (w2 + w1) / 2.0;
+ gdouble zr, zi;
+
+ zr = cos (w1);
+ zi = sin (w1);
+ GST_LOG_OBJECT (filter, "%.2f dB gain @ %dHz",
+ 20.0 * log10 (gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1,
+ b, np + 1, zr, zi)), (int) filter->lower_frequency);
+ zr = cos (w0);
+ zi = sin (w0);
+ GST_LOG_OBJECT (filter, "%.2f dB gain @ %dHz",
+ 20.0 * log10 (gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1,
+ b, np + 1, zr, zi)),
+ (int) ((filter->lower_frequency + filter->upper_frequency) / 2.0));
+ zr = cos (w2);
+ zi = sin (w2);
+ GST_LOG_OBJECT (filter, "%.2f dB gain @ %dHz",
+ 20.0 * log10 (gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1,
+ b, np + 1, zr, zi)), (int) filter->upper_frequency);
+ }
+ GST_LOG_OBJECT (filter, "%.2f dB gain @ %dHz",
+ 20.0 * log10 (gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1, b,
+ np + 1, -1.0, 0.0)), rate / 2);
+ }
+}
+
+static void
+gst_audio_cheb_band_finalize (GObject * object)
+{
+ GstAudioChebBand *filter = GST_AUDIO_CHEB_BAND (object);
+
+ g_mutex_clear (&filter->lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_audio_cheb_band_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAudioChebBand *filter = GST_AUDIO_CHEB_BAND (object);
+
+ switch (prop_id) {
+ case PROP_MODE:
+ g_mutex_lock (&filter->lock);
+ filter->mode = g_value_get_enum (value);
+ generate_coefficients (filter, NULL);
+ g_mutex_unlock (&filter->lock);
+ break;
+ case PROP_TYPE:
+ g_mutex_lock (&filter->lock);
+ filter->type = g_value_get_int (value);
+ generate_coefficients (filter, NULL);
+ g_mutex_unlock (&filter->lock);
+ break;
+ case PROP_LOWER_FREQUENCY:
+ g_mutex_lock (&filter->lock);
+ filter->lower_frequency = g_value_get_float (value);
+ generate_coefficients (filter, NULL);
+ g_mutex_unlock (&filter->lock);
+ break;
+ case PROP_UPPER_FREQUENCY:
+ g_mutex_lock (&filter->lock);
+ filter->upper_frequency = g_value_get_float (value);
+ generate_coefficients (filter, NULL);
+ g_mutex_unlock (&filter->lock);
+ break;
+ case PROP_RIPPLE:
+ g_mutex_lock (&filter->lock);
+ filter->ripple = g_value_get_float (value);
+ generate_coefficients (filter, NULL);
+ g_mutex_unlock (&filter->lock);
+ break;
+ case PROP_POLES:
+ g_mutex_lock (&filter->lock);
+ filter->poles = GST_ROUND_UP_4 (g_value_get_int (value));
+ generate_coefficients (filter, NULL);
+ g_mutex_unlock (&filter->lock);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_audio_cheb_band_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAudioChebBand *filter = GST_AUDIO_CHEB_BAND (object);
+
+ switch (prop_id) {
+ case PROP_MODE:
+ g_value_set_enum (value, filter->mode);
+ break;
+ case PROP_TYPE:
+ g_value_set_int (value, filter->type);
+ break;
+ case PROP_LOWER_FREQUENCY:
+ g_value_set_float (value, filter->lower_frequency);
+ break;
+ case PROP_UPPER_FREQUENCY:
+ g_value_set_float (value, filter->upper_frequency);
+ break;
+ case PROP_RIPPLE:
+ g_value_set_float (value, filter->ripple);
+ break;
+ case PROP_POLES:
+ g_value_set_int (value, filter->poles);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+/* GstAudioFilter vmethod implementations */
+
+static gboolean
+gst_audio_cheb_band_setup (GstAudioFilter * base, const GstAudioInfo * info)
+{
+ GstAudioChebBand *filter = GST_AUDIO_CHEB_BAND (base);
+
+ generate_coefficients (filter, info);
+
+ return GST_AUDIO_FILTER_CLASS (parent_class)->setup (base, info);
+}
diff --git a/gst/audiofx/audiochebband.h b/gst/audiofx/audiochebband.h
new file mode 100644
index 0000000000..c8c2187683
--- /dev/null
+++ b/gst/audiofx/audiochebband.h
@@ -0,0 +1,66 @@
+/*
+ * GStreamer
+ * Copyright (C) 2007-2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AUDIO_CHEB_BAND_H__
+#define __GST_AUDIO_CHEB_BAND_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#include "audiofxbaseiirfilter.h"
+
+G_BEGIN_DECLS
+#define GST_TYPE_AUDIO_CHEB_BAND (gst_audio_cheb_band_get_type())
+#define GST_AUDIO_CHEB_BAND(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AUDIO_CHEB_BAND,GstAudioChebBand))
+#define GST_IS_AUDIO_CHEB_BAND(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AUDIO_CHEB_BAND))
+#define GST_AUDIO_CHEB_BAND_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_AUDIO_CHEB_BAND,GstAudioChebBandClass))
+#define GST_IS_AUDIO_CHEB_BAND_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_AUDIO_CHEB_BAND))
+#define GST_AUDIO_CHEB_BAND_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_AUDIO_CHEB_BAND,GstAudioChebBandClass))
+typedef struct _GstAudioChebBand GstAudioChebBand;
+typedef struct _GstAudioChebBandClass GstAudioChebBandClass;
+
+struct _GstAudioChebBand
+{
+ GstAudioFXBaseIIRFilter parent;
+
+ gint mode;
+ gint type;
+ gint poles;
+ gfloat lower_frequency;
+ gfloat upper_frequency;
+ gfloat ripple;
+
+ /* < private > */
+ GMutex lock;
+};
+
+struct _GstAudioChebBandClass
+{
+ GstAudioFXBaseIIRFilterClass parent;
+};
+
+GType gst_audio_cheb_band_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (audiochebband);
+
+G_END_DECLS
+#endif /* __GST_AUDIO_CHEB_BAND_H__ */
diff --git a/gst/audiofx/audiocheblimit.c b/gst/audiofx/audiocheblimit.c
new file mode 100644
index 0000000000..f9d2a4750a
--- /dev/null
+++ b/gst/audiofx/audiocheblimit.c
@@ -0,0 +1,576 @@
+/*
+ * GStreamer
+ * Copyright (C) 2007-2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * Chebyshev type 1 filter design based on
+ * "The Scientist and Engineer's Guide to DSP", Chapter 20.
+ * http://www.dspguide.com/
+ *
+ * For type 2 and Chebyshev filters in general read
+ * http://en.wikipedia.org/wiki/Chebyshev_filter
+ *
+ */
+
+/**
+ * SECTION:element-audiocheblimit
+ * @title: audiocheblimit
+ *
+ * Attenuates all frequencies above the cutoff frequency (low-pass) or all frequencies below the
+ * cutoff frequency (high-pass). The number of poles and the ripple parameter control the rolloff.
+ *
+ * This element has the advantage over the windowed sinc lowpass and highpass filter that it is
+ * much faster and produces almost as good results. It's only disadvantages are the highly
+ * non-linear phase and the slower rolloff compared to a windowed sinc filter with a large kernel.
+ *
+ * For type 1 the ripple parameter specifies how much ripple in dB is allowed in the passband, i.e.
+ * some frequencies in the passband will be amplified by that value. A higher ripple value will allow
+ * a faster rolloff.
+ *
+ * For type 2 the ripple parameter specifies the stopband attenuation. In the stopband the gain will
+ * be at most this value. A lower ripple value will allow a faster rolloff.
+ *
+ * As a special case, a Chebyshev type 1 filter with no ripple is a Butterworth filter.
+ *
+ * > Be warned that a too large number of poles can produce noise. The most poles are possible with
+ * > a cutoff frequency at a quarter of the sampling rate.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 audiotestsrc freq=1500 ! audioconvert ! audiocheblimit mode=low-pass cutoff=1000 poles=4 ! audioconvert ! alsasink
+ * gst-launch-1.0 filesrc location="melo1.ogg" ! oggdemux ! vorbisdec ! audioconvert ! audiocheblimit mode=high-pass cutoff=400 ripple=0.2 ! audioconvert ! alsasink
+ * gst-launch-1.0 audiotestsrc wave=white-noise ! audioconvert ! audiocheblimit mode=low-pass cutoff=800 type=2 ! audioconvert ! alsasink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#include <math.h>
+
+#include "math_compat.h"
+
+#include "audiocheblimit.h"
+
+#include "gst/glib-compat-private.h"
+
+#define GST_CAT_DEFAULT gst_audio_cheb_limit_debug
+GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+enum
+{
+ PROP_0,
+ PROP_MODE,
+ PROP_TYPE,
+ PROP_CUTOFF,
+ PROP_RIPPLE,
+ PROP_POLES
+};
+
+#define gst_audio_cheb_limit_parent_class parent_class
+G_DEFINE_TYPE (GstAudioChebLimit,
+ gst_audio_cheb_limit, GST_TYPE_AUDIO_FX_BASE_IIR_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (audiocheblimit, "audiocheblimit",
+ GST_RANK_NONE, GST_TYPE_AUDIO_CHEB_LIMIT);
+
+static void gst_audio_cheb_limit_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_audio_cheb_limit_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+static void gst_audio_cheb_limit_finalize (GObject * object);
+
+static gboolean gst_audio_cheb_limit_setup (GstAudioFilter * filter,
+ const GstAudioInfo * info);
+
+enum
+{
+ MODE_LOW_PASS = 0,
+ MODE_HIGH_PASS
+};
+
+#define GST_TYPE_AUDIO_CHEBYSHEV_FREQ_LIMIT_MODE (gst_audio_cheb_limit_mode_get_type ())
+static GType
+gst_audio_cheb_limit_mode_get_type (void)
+{
+ static GType gtype = 0;
+
+ if (gtype == 0) {
+ static const GEnumValue values[] = {
+ {MODE_LOW_PASS, "Low pass (default)",
+ "low-pass"},
+ {MODE_HIGH_PASS, "High pass",
+ "high-pass"},
+ {0, NULL, NULL}
+ };
+
+ gtype = g_enum_register_static ("GstAudioChebLimitMode", values);
+ }
+ return gtype;
+}
+
+/* GObject vmethod implementations */
+
+static void
+gst_audio_cheb_limit_class_init (GstAudioChebLimitClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_cheb_limit_debug, "audiocheblimit", 0,
+ "audiocheblimit element");
+
+ gobject_class->set_property = gst_audio_cheb_limit_set_property;
+ gobject_class->get_property = gst_audio_cheb_limit_get_property;
+ gobject_class->finalize = gst_audio_cheb_limit_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_MODE,
+ g_param_spec_enum ("mode", "Mode",
+ "Low pass or high pass mode",
+ GST_TYPE_AUDIO_CHEBYSHEV_FREQ_LIMIT_MODE, MODE_LOW_PASS,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_TYPE,
+ g_param_spec_int ("type", "Type", "Type of the chebychev filter", 1, 2, 1,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ /* FIXME: Don't use the complete possible range but restrict the upper boundary
+ * so automatically generated UIs can use a slider without */
+ g_object_class_install_property (gobject_class, PROP_CUTOFF,
+ g_param_spec_float ("cutoff", "Cutoff", "Cut off frequency (Hz)", 0.0,
+ 100000.0, 0.0,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_RIPPLE,
+ g_param_spec_float ("ripple", "Ripple", "Amount of ripple (dB)", 0.0,
+ 200.0, 0.25,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ /* FIXME: What to do about this upper boundary? With a cutoff frequency of
+ * rate/4 32 poles are completely possible, with a cutoff frequency very low
+ * or very high 16 poles already produces only noise */
+ g_object_class_install_property (gobject_class, PROP_POLES,
+ g_param_spec_int ("poles", "Poles",
+ "Number of poles to use, will be rounded up to the next even number",
+ 2, 32, 4,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Low pass & high pass filter",
+ "Filter/Effect/Audio",
+ "Chebyshev low pass and high pass filter",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_cheb_limit_setup);
+
+ gst_type_mark_as_plugin_api (GST_TYPE_AUDIO_CHEBYSHEV_FREQ_LIMIT_MODE, 0);
+}
+
+static void
+gst_audio_cheb_limit_init (GstAudioChebLimit * filter)
+{
+ filter->cutoff = 0.0;
+ filter->mode = MODE_LOW_PASS;
+ filter->type = 1;
+ filter->poles = 4;
+ filter->ripple = 0.25;
+
+ g_mutex_init (&filter->lock);
+}
+
+static void
+generate_biquad_coefficients (GstAudioChebLimit * filter,
+ gint p, gint rate, gdouble * b0, gdouble * b1, gdouble * b2,
+ gdouble * a1, gdouble * a2)
+{
+ gint np = filter->poles;
+ gdouble ripple = filter->ripple;
+
+ /* pole location in s-plane */
+ gdouble rp, ip;
+
+ /* zero location in s-plane */
+ gdouble iz = 0.0;
+
+ /* transfer function coefficients for the z-plane */
+ gdouble x0, x1, x2, y1, y2;
+ gint type = filter->type;
+
+ /* Calculate pole location for lowpass at frequency 1 */
+ {
+ gdouble angle = (G_PI / 2.0) * (2.0 * p - 1) / np;
+
+ rp = -sin (angle);
+ ip = cos (angle);
+ }
+
+ /* If we allow ripple, move the pole from the unit
+ * circle to an ellipse and keep cutoff at frequency 1 */
+ if (ripple > 0 && type == 1) {
+ gdouble es, vx;
+
+ es = sqrt (pow (10.0, ripple / 10.0) - 1.0);
+
+ vx = (1.0 / np) * asinh (1.0 / es);
+ rp = rp * sinh (vx);
+ ip = ip * cosh (vx);
+ } else if (type == 2) {
+ gdouble es, vx;
+
+ es = sqrt (pow (10.0, ripple / 10.0) - 1.0);
+ vx = (1.0 / np) * asinh (es);
+ rp = rp * sinh (vx);
+ ip = ip * cosh (vx);
+ }
+
+ /* Calculate inverse of the pole location to convert from
+ * type I to type II */
+ if (type == 2) {
+ gdouble mag2 = rp * rp + ip * ip;
+
+ rp /= mag2;
+ ip /= mag2;
+ }
+
+ /* Calculate zero location for frequency 1 on the
+ * unit circle for type 2 */
+ if (type == 2) {
+ gdouble angle = G_PI / (np * 2.0) + ((p - 1) * G_PI) / (np);
+ gdouble mag2;
+
+ iz = cos (angle);
+ mag2 = iz * iz;
+ iz /= mag2;
+ }
+
+ /* Convert from s-domain to z-domain by
+ * using the bilinear Z-transform, i.e.
+ * substitute s by (2/t)*((z-1)/(z+1))
+ * with t = 2 * tan(0.5).
+ */
+ if (type == 1) {
+ gdouble t, m, d;
+
+ t = 2.0 * tan (0.5);
+ m = rp * rp + ip * ip;
+ d = 4.0 - 4.0 * rp * t + m * t * t;
+
+ x0 = (t * t) / d;
+ x1 = 2.0 * x0;
+ x2 = x0;
+ y1 = (8.0 - 2.0 * m * t * t) / d;
+ y2 = (-4.0 - 4.0 * rp * t - m * t * t) / d;
+ } else {
+ gdouble t, m, d;
+
+ t = 2.0 * tan (0.5);
+ m = rp * rp + ip * ip;
+ d = 4.0 - 4.0 * rp * t + m * t * t;
+
+ x0 = (t * t * iz * iz + 4.0) / d;
+ x1 = (-8.0 + 2.0 * iz * iz * t * t) / d;
+ x2 = x0;
+ y1 = (8.0 - 2.0 * m * t * t) / d;
+ y2 = (-4.0 - 4.0 * rp * t - m * t * t) / d;
+ }
+
+ /* Convert from lowpass at frequency 1 to either lowpass
+ * or highpass.
+ *
+ * For lowpass substitute z^(-1) with:
+ * -1
+ * z - k
+ * ------------
+ * -1
+ * 1 - k * z
+ *
+ * k = sin((1-w)/2) / sin((1+w)/2)
+ *
+ * For highpass substitute z^(-1) with:
+ *
+ * -1
+ * -z - k
+ * ------------
+ * -1
+ * 1 + k * z
+ *
+ * k = -cos((1+w)/2) / cos((1-w)/2)
+ *
+ */
+ {
+ gdouble k, d;
+ gdouble omega = 2.0 * G_PI * (filter->cutoff / rate);
+
+ if (filter->mode == MODE_LOW_PASS)
+ k = sin ((1.0 - omega) / 2.0) / sin ((1.0 + omega) / 2.0);
+ else
+ k = -cos ((omega + 1.0) / 2.0) / cos ((omega - 1.0) / 2.0);
+
+ d = 1.0 + y1 * k - y2 * k * k;
+ *b0 = (x0 + k * (-x1 + k * x2)) / d;
+ *b1 = (x1 + k * k * x1 - 2.0 * k * (x0 + x2)) / d;
+ *b2 = (x0 * k * k - x1 * k + x2) / d;
+ *a1 = (2.0 * k + y1 + y1 * k * k - 2.0 * y2 * k) / d;
+ *a2 = (-k * k - y1 * k + y2) / d;
+
+ if (filter->mode == MODE_HIGH_PASS) {
+ *a1 = -*a1;
+ *b1 = -*b1;
+ }
+ }
+}
+
+static void
+generate_coefficients (GstAudioChebLimit * filter, const GstAudioInfo * info)
+{
+ gint rate;
+
+ if (info) {
+ rate = GST_AUDIO_INFO_RATE (info);
+ } else {
+ rate = GST_AUDIO_FILTER_RATE (filter);
+ }
+
+ GST_LOG_OBJECT (filter, "cutoff %f", filter->cutoff);
+
+ if (rate == 0) {
+ gdouble *a = g_new0 (gdouble, 1);
+ gdouble *b = g_new0 (gdouble, 1);
+
+ a[0] = 1.0;
+ b[0] = 1.0;
+ gst_audio_fx_base_iir_filter_set_coefficients (GST_AUDIO_FX_BASE_IIR_FILTER
+ (filter), a, 1, b, 1);
+
+ GST_LOG_OBJECT (filter, "rate was not set yet");
+ return;
+ }
+
+ if (filter->cutoff >= rate / 2.0) {
+ gdouble *a = g_new0 (gdouble, 1);
+ gdouble *b = g_new0 (gdouble, 1);
+
+ a[0] = 1.0;
+ b[0] = (filter->mode == MODE_LOW_PASS) ? 1.0 : 0.0;
+ gst_audio_fx_base_iir_filter_set_coefficients (GST_AUDIO_FX_BASE_IIR_FILTER
+ (filter), a, 1, b, 1);
+ GST_LOG_OBJECT (filter, "cutoff was higher than nyquist frequency");
+ return;
+ } else if (filter->cutoff <= 0.0) {
+ gdouble *a = g_new0 (gdouble, 1);
+ gdouble *b = g_new0 (gdouble, 1);
+
+ a[0] = 1.0;
+ b[0] = (filter->mode == MODE_LOW_PASS) ? 0.0 : 1.0;
+ gst_audio_fx_base_iir_filter_set_coefficients (GST_AUDIO_FX_BASE_IIR_FILTER
+ (filter), a, 1, b, 1);
+ GST_LOG_OBJECT (filter, "cutoff is lower than zero");
+ return;
+ }
+
+ /* Calculate coefficients for the chebyshev filter */
+ {
+ gint np = filter->poles;
+ gdouble *a, *b;
+ gint i, p;
+
+ a = g_new0 (gdouble, np + 3);
+ b = g_new0 (gdouble, np + 3);
+
+ /* Calculate transfer function coefficients */
+ a[2] = 1.0;
+ b[2] = 1.0;
+
+ for (p = 1; p <= np / 2; p++) {
+ gdouble b0, b1, b2, a1, a2;
+ gdouble *ta = g_new0 (gdouble, np + 3);
+ gdouble *tb = g_new0 (gdouble, np + 3);
+
+ generate_biquad_coefficients (filter, p, rate, &b0, &b1, &b2, &a1, &a2);
+
+ memcpy (ta, a, sizeof (gdouble) * (np + 3));
+ memcpy (tb, b, sizeof (gdouble) * (np + 3));
+
+ /* add the new coefficients for the new two poles
+ * to the cascade by multiplication of the transfer
+ * functions */
+ for (i = 2; i < np + 3; i++) {
+ b[i] = b0 * tb[i] + b1 * tb[i - 1] + b2 * tb[i - 2];
+ a[i] = ta[i] - a1 * ta[i - 1] - a2 * ta[i - 2];
+ }
+ g_free (ta);
+ g_free (tb);
+ }
+
+ /* Move coefficients to the beginning of the array to move from
+ * the transfer function's coefficients to the difference
+ * equation's coefficients */
+ for (i = 0; i <= np; i++) {
+ a[i] = a[i + 2];
+ b[i] = b[i + 2];
+ }
+
+ /* Normalize to unity gain at frequency 0 for lowpass
+ * and frequency 0.5 for highpass */
+ {
+ gdouble gain;
+
+ if (filter->mode == MODE_LOW_PASS)
+ gain =
+ gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1, b, np + 1,
+ 1.0, 0.0);
+ else
+ gain =
+ gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1, b, np + 1,
+ -1.0, 0.0);
+
+ for (i = 0; i <= np; i++) {
+ b[i] /= gain;
+ }
+ }
+
+ gst_audio_fx_base_iir_filter_set_coefficients (GST_AUDIO_FX_BASE_IIR_FILTER
+ (filter), a, np + 1, b, np + 1);
+
+ GST_LOG_OBJECT (filter,
+ "Generated IIR coefficients for the Chebyshev filter");
+ GST_LOG_OBJECT (filter,
+ "mode: %s, type: %d, poles: %d, cutoff: %.2f Hz, ripple: %.2f dB",
+ (filter->mode == MODE_LOW_PASS) ? "low-pass" : "high-pass",
+ filter->type, filter->poles, filter->cutoff, filter->ripple);
+ GST_LOG_OBJECT (filter, "%.2f dB gain @ 0 Hz",
+ 20.0 * log10 (gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1, b,
+ np + 1, 1.0, 0.0)));
+
+#ifndef GST_DISABLE_GST_DEBUG
+ {
+ gdouble wc = 2.0 * G_PI * (filter->cutoff / rate);
+ gdouble zr = cos (wc), zi = sin (wc);
+
+ GST_LOG_OBJECT (filter, "%.2f dB gain @ %d Hz",
+ 20.0 * log10 (gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1,
+ b, np + 1, zr, zi)), (int) filter->cutoff);
+ }
+#endif
+
+ GST_LOG_OBJECT (filter, "%.2f dB gain @ %d Hz",
+ 20.0 * log10 (gst_audio_fx_base_iir_filter_calculate_gain (a, np + 1, b,
+ np + 1, -1.0, 0.0)), rate);
+ }
+}
+
+static void
+gst_audio_cheb_limit_finalize (GObject * object)
+{
+ GstAudioChebLimit *filter = GST_AUDIO_CHEB_LIMIT (object);
+
+ g_mutex_clear (&filter->lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_audio_cheb_limit_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAudioChebLimit *filter = GST_AUDIO_CHEB_LIMIT (object);
+
+ switch (prop_id) {
+ case PROP_MODE:
+ g_mutex_lock (&filter->lock);
+ filter->mode = g_value_get_enum (value);
+ generate_coefficients (filter, NULL);
+ g_mutex_unlock (&filter->lock);
+ break;
+ case PROP_TYPE:
+ g_mutex_lock (&filter->lock);
+ filter->type = g_value_get_int (value);
+ generate_coefficients (filter, NULL);
+ g_mutex_unlock (&filter->lock);
+ break;
+ case PROP_CUTOFF:
+ g_mutex_lock (&filter->lock);
+ filter->cutoff = g_value_get_float (value);
+ generate_coefficients (filter, NULL);
+ g_mutex_unlock (&filter->lock);
+ break;
+ case PROP_RIPPLE:
+ g_mutex_lock (&filter->lock);
+ filter->ripple = g_value_get_float (value);
+ generate_coefficients (filter, NULL);
+ g_mutex_unlock (&filter->lock);
+ break;
+ case PROP_POLES:
+ g_mutex_lock (&filter->lock);
+ filter->poles = GST_ROUND_UP_2 (g_value_get_int (value));
+ generate_coefficients (filter, NULL);
+ g_mutex_unlock (&filter->lock);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_audio_cheb_limit_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAudioChebLimit *filter = GST_AUDIO_CHEB_LIMIT (object);
+
+ switch (prop_id) {
+ case PROP_MODE:
+ g_value_set_enum (value, filter->mode);
+ break;
+ case PROP_TYPE:
+ g_value_set_int (value, filter->type);
+ break;
+ case PROP_CUTOFF:
+ g_value_set_float (value, filter->cutoff);
+ break;
+ case PROP_RIPPLE:
+ g_value_set_float (value, filter->ripple);
+ break;
+ case PROP_POLES:
+ g_value_set_int (value, filter->poles);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+/* GstAudioFilter vmethod implementations */
+
+static gboolean
+gst_audio_cheb_limit_setup (GstAudioFilter * base, const GstAudioInfo * info)
+{
+ GstAudioChebLimit *filter = GST_AUDIO_CHEB_LIMIT (base);
+
+ generate_coefficients (filter, info);
+
+ return GST_AUDIO_FILTER_CLASS (parent_class)->setup (base, info);
+}
diff --git a/gst/audiofx/audiocheblimit.h b/gst/audiofx/audiocheblimit.h
new file mode 100644
index 0000000000..4c6392141c
--- /dev/null
+++ b/gst/audiofx/audiocheblimit.h
@@ -0,0 +1,68 @@
+/*
+ * GStreamer
+ * Copyright (C) 2007-2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AUDIO_CHEB_LIMIT_H__
+#define __GST_AUDIO_CHEB_LIMIT_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#include "audiofxbaseiirfilter.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AUDIO_CHEB_LIMIT (gst_audio_cheb_limit_get_type())
+#define GST_AUDIO_CHEB_LIMIT(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AUDIO_CHEB_LIMIT,GstAudioChebLimit))
+#define GST_IS_AUDIO_CHEB_LIMIT(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AUDIO_CHEB_LIMIT))
+#define GST_AUDIO_CHEB_LIMIT_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_AUDIO_CHEB_LIMIT,GstAudioChebLimitClass))
+#define GST_IS_AUDIO_CHEB_LIMIT_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_AUDIO_CHEB_LIMIT))
+#define GST_AUDIO_CHEB_LIMIT_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_AUDIO_CHEB_LIMIT,GstAudioChebLimitClass))
+
+typedef struct _GstAudioChebLimit GstAudioChebLimit;
+typedef struct _GstAudioChebLimitClass GstAudioChebLimitClass;
+
+struct _GstAudioChebLimit
+{
+ GstAudioFXBaseIIRFilter parent;
+
+ gint mode;
+ gint type;
+ gint poles;
+ gfloat cutoff;
+ gfloat ripple;
+
+ /* < private > */
+ GMutex lock;
+};
+
+struct _GstAudioChebLimitClass
+{
+ GstAudioFXBaseIIRFilterClass parent;
+};
+
+GType gst_audio_cheb_limit_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (audiocheblimit);
+
+G_END_DECLS
+
+#endif /* __GST_AUDIO_CHEB_LIMIT_H__ */
diff --git a/gst/audiofx/audiodynamic.c b/gst/audiofx/audiodynamic.c
new file mode 100644
index 0000000000..a244268548
--- /dev/null
+++ b/gst/audiofx/audiodynamic.c
@@ -0,0 +1,718 @@
+/*
+ * GStreamer
+ * Copyright (C) 2007 Sebastian Dröge <slomo@circular-chaos.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-audiodynamic
+ * @title: audiodynamic
+ *
+ * This element can act as a compressor or expander. A compressor changes the
+ * amplitude of all samples above a specific threshold with a specific ratio,
+ * a expander does the same for all samples below a specific threshold. If
+ * soft-knee mode is selected the ratio is applied smoothly.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 audiotestsrc wave=saw ! audiodynamic characteristics=soft-knee mode=compressor threshold=0.5 ratio=0.5 ! alsasink
+ * gst-launch-1.0 filesrc location="melo1.ogg" ! oggdemux ! vorbisdec ! audioconvert ! audiodynamic characteristics=hard-knee mode=expander threshold=0.2 ratio=4.0 ! alsasink
+ * gst-launch-1.0 audiotestsrc wave=saw ! audioconvert ! audiodynamic ! audioconvert ! alsasink
+ * ]|
+ *
+ */
+
+/* TODO: Implement attack and release parameters */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#include "audiodynamic.h"
+
+#define GST_CAT_DEFAULT gst_audio_dynamic_debug
+GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+/* Filter signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_CHARACTERISTICS,
+ PROP_MODE,
+ PROP_THRESHOLD,
+ PROP_RATIO
+};
+
+#define ALLOWED_CAPS \
+ "audio/x-raw," \
+ " format=(string) {"GST_AUDIO_NE(S16)","GST_AUDIO_NE(F32)"}," \
+ " rate=(int)[1,MAX]," \
+ " channels=(int)[1,MAX]," \
+ " layout=(string) {interleaved, non-interleaved}"
+
+G_DEFINE_TYPE (GstAudioDynamic, gst_audio_dynamic, GST_TYPE_AUDIO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (audiodynamic, "audiodynamic",
+ GST_RANK_NONE, GST_TYPE_AUDIO_DYNAMIC);
+
+static void gst_audio_dynamic_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_audio_dynamic_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_audio_dynamic_setup (GstAudioFilter * filter,
+ const GstAudioInfo * info);
+static GstFlowReturn gst_audio_dynamic_transform_ip (GstBaseTransform * base,
+ GstBuffer * buf);
+
+static void
+gst_audio_dynamic_transform_hard_knee_compressor_int (GstAudioDynamic * filter,
+ gint16 * data, guint num_samples);
+static void
+gst_audio_dynamic_transform_hard_knee_compressor_float (GstAudioDynamic *
+ filter, gfloat * data, guint num_samples);
+static void
+gst_audio_dynamic_transform_soft_knee_compressor_int (GstAudioDynamic * filter,
+ gint16 * data, guint num_samples);
+static void
+gst_audio_dynamic_transform_soft_knee_compressor_float (GstAudioDynamic *
+ filter, gfloat * data, guint num_samples);
+static void gst_audio_dynamic_transform_hard_knee_expander_int (GstAudioDynamic
+ * filter, gint16 * data, guint num_samples);
+static void
+gst_audio_dynamic_transform_hard_knee_expander_float (GstAudioDynamic * filter,
+ gfloat * data, guint num_samples);
+static void gst_audio_dynamic_transform_soft_knee_expander_int (GstAudioDynamic
+ * filter, gint16 * data, guint num_samples);
+static void
+gst_audio_dynamic_transform_soft_knee_expander_float (GstAudioDynamic * filter,
+ gfloat * data, guint num_samples);
+
+static const GstAudioDynamicProcessFunc process_functions[] = {
+ (GstAudioDynamicProcessFunc)
+ gst_audio_dynamic_transform_hard_knee_compressor_int,
+ (GstAudioDynamicProcessFunc)
+ gst_audio_dynamic_transform_hard_knee_compressor_float,
+ (GstAudioDynamicProcessFunc)
+ gst_audio_dynamic_transform_soft_knee_compressor_int,
+ (GstAudioDynamicProcessFunc)
+ gst_audio_dynamic_transform_soft_knee_compressor_float,
+ (GstAudioDynamicProcessFunc)
+ gst_audio_dynamic_transform_hard_knee_expander_int,
+ (GstAudioDynamicProcessFunc)
+ gst_audio_dynamic_transform_hard_knee_expander_float,
+ (GstAudioDynamicProcessFunc)
+ gst_audio_dynamic_transform_soft_knee_expander_int,
+ (GstAudioDynamicProcessFunc)
+ gst_audio_dynamic_transform_soft_knee_expander_float
+};
+
+enum
+{
+ CHARACTERISTICS_HARD_KNEE = 0,
+ CHARACTERISTICS_SOFT_KNEE
+};
+
+#define GST_TYPE_AUDIO_DYNAMIC_CHARACTERISTICS (gst_audio_dynamic_characteristics_get_type ())
+static GType
+gst_audio_dynamic_characteristics_get_type (void)
+{
+ static GType gtype = 0;
+
+ if (gtype == 0) {
+ static const GEnumValue values[] = {
+ {CHARACTERISTICS_HARD_KNEE, "Hard Knee (default)",
+ "hard-knee"},
+ {CHARACTERISTICS_SOFT_KNEE, "Soft Knee (smooth)",
+ "soft-knee"},
+ {0, NULL, NULL}
+ };
+
+ gtype = g_enum_register_static ("GstAudioDynamicCharacteristics", values);
+ }
+ return gtype;
+}
+
+enum
+{
+ MODE_COMPRESSOR = 0,
+ MODE_EXPANDER
+};
+
+#define GST_TYPE_AUDIO_DYNAMIC_MODE (gst_audio_dynamic_mode_get_type ())
+static GType
+gst_audio_dynamic_mode_get_type (void)
+{
+ static GType gtype = 0;
+
+ if (gtype == 0) {
+ static const GEnumValue values[] = {
+ {MODE_COMPRESSOR, "Compressor (default)",
+ "compressor"},
+ {MODE_EXPANDER, "Expander", "expander"},
+ {0, NULL, NULL}
+ };
+
+ gtype = g_enum_register_static ("GstAudioDynamicMode", values);
+ }
+ return gtype;
+}
+
+static void
+gst_audio_dynamic_set_process_function (GstAudioDynamic * filter,
+ const GstAudioInfo * info)
+{
+ gint func_index;
+
+ func_index = (filter->mode == MODE_COMPRESSOR) ? 0 : 4;
+ func_index += (filter->characteristics == CHARACTERISTICS_HARD_KNEE) ? 0 : 2;
+ func_index += (GST_AUDIO_INFO_FORMAT (info) == GST_AUDIO_FORMAT_F32) ? 1 : 0;
+
+ g_assert (func_index >= 0 && func_index < G_N_ELEMENTS (process_functions));
+
+ filter->process = process_functions[func_index];
+}
+
+/* GObject vmethod implementations */
+
+static void
+gst_audio_dynamic_class_init (GstAudioDynamicClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_dynamic_debug, "audiodynamic", 0,
+ "audiodynamic element");
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->set_property = gst_audio_dynamic_set_property;
+ gobject_class->get_property = gst_audio_dynamic_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_CHARACTERISTICS,
+ g_param_spec_enum ("characteristics", "Characteristics",
+ "Selects whether the ratio should be applied smooth (soft-knee) "
+ "or hard (hard-knee).",
+ GST_TYPE_AUDIO_DYNAMIC_CHARACTERISTICS, CHARACTERISTICS_HARD_KNEE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MODE,
+ g_param_spec_enum ("mode", "Mode",
+ "Selects whether the filter should work on loud samples (compressor) or"
+ "quiet samples (expander).",
+ GST_TYPE_AUDIO_DYNAMIC_MODE, MODE_COMPRESSOR,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_THRESHOLD,
+ g_param_spec_float ("threshold", "Threshold",
+ "Threshold until the filter is activated", 0.0, 1.0,
+ 0.0,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RATIO,
+ g_param_spec_float ("ratio", "Ratio",
+ "Ratio that should be applied", 0.0, G_MAXFLOAT,
+ 1.0,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Dynamic range controller", "Filter/Effect/Audio",
+ "Compressor and Expander", "Sebastian Dröge <slomo@circular-chaos.org>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
+ GST_AUDIO_FILTER_CLASS (klass)->setup =
+ GST_DEBUG_FUNCPTR (gst_audio_dynamic_setup);
+
+ GST_BASE_TRANSFORM_CLASS (klass)->transform_ip =
+ GST_DEBUG_FUNCPTR (gst_audio_dynamic_transform_ip);
+ GST_BASE_TRANSFORM_CLASS (klass)->transform_ip_on_passthrough = FALSE;
+
+ gst_type_mark_as_plugin_api (GST_TYPE_AUDIO_DYNAMIC_CHARACTERISTICS, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_AUDIO_DYNAMIC_MODE, 0);
+}
+
+static void
+gst_audio_dynamic_init (GstAudioDynamic * filter)
+{
+ filter->ratio = 1.0;
+ filter->threshold = 0.0;
+ filter->characteristics = CHARACTERISTICS_HARD_KNEE;
+ filter->mode = MODE_COMPRESSOR;
+ gst_base_transform_set_in_place (GST_BASE_TRANSFORM (filter), TRUE);
+ gst_base_transform_set_gap_aware (GST_BASE_TRANSFORM (filter), TRUE);
+}
+
+static void
+gst_audio_dynamic_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAudioDynamic *filter = GST_AUDIO_DYNAMIC (object);
+
+ switch (prop_id) {
+ case PROP_CHARACTERISTICS:
+ filter->characteristics = g_value_get_enum (value);
+ gst_audio_dynamic_set_process_function (filter,
+ GST_AUDIO_FILTER_INFO (filter));
+ break;
+ case PROP_MODE:
+ filter->mode = g_value_get_enum (value);
+ gst_audio_dynamic_set_process_function (filter,
+ GST_AUDIO_FILTER_INFO (filter));
+ break;
+ case PROP_THRESHOLD:
+ filter->threshold = g_value_get_float (value);
+ break;
+ case PROP_RATIO:
+ filter->ratio = g_value_get_float (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_audio_dynamic_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAudioDynamic *filter = GST_AUDIO_DYNAMIC (object);
+
+ switch (prop_id) {
+ case PROP_CHARACTERISTICS:
+ g_value_set_enum (value, filter->characteristics);
+ break;
+ case PROP_MODE:
+ g_value_set_enum (value, filter->mode);
+ break;
+ case PROP_THRESHOLD:
+ g_value_set_float (value, filter->threshold);
+ break;
+ case PROP_RATIO:
+ g_value_set_float (value, filter->ratio);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+/* GstAudioFilter vmethod implementations */
+
+static gboolean
+gst_audio_dynamic_setup (GstAudioFilter * base, const GstAudioInfo * info)
+{
+ GstAudioDynamic *filter = GST_AUDIO_DYNAMIC (base);
+
+ gst_audio_dynamic_set_process_function (filter, info);
+ return TRUE;
+}
+
+static void
+gst_audio_dynamic_transform_hard_knee_compressor_int (GstAudioDynamic * filter,
+ gint16 * data, guint num_samples)
+{
+ glong val;
+ glong thr_p = filter->threshold * G_MAXINT16;
+ glong thr_n = filter->threshold * G_MININT16;
+
+ /* Nothing to do for us if ratio is 1.0 or if the threshold
+ * equals 1.0. */
+ if (filter->threshold == 1.0 || filter->ratio == 1.0)
+ return;
+
+ for (; num_samples; num_samples--) {
+ val = *data;
+
+ if (val > thr_p) {
+ val = thr_p + (val - thr_p) * filter->ratio;
+ } else if (val < thr_n) {
+ val = thr_n + (val - thr_n) * filter->ratio;
+ }
+ *data++ = (gint16) CLAMP (val, G_MININT16, G_MAXINT16);
+ }
+}
+
+static void
+gst_audio_dynamic_transform_hard_knee_compressor_float (GstAudioDynamic *
+ filter, gfloat * data, guint num_samples)
+{
+ gdouble val, threshold = filter->threshold;
+
+ /* Nothing to do for us if ratio == 1.0.
+ * As float values can be above 1.0 we have to do something
+ * if threshold is greater than 1.0. */
+ if (filter->ratio == 1.0)
+ return;
+
+ for (; num_samples; num_samples--) {
+ val = *data;
+
+ if (val > threshold) {
+ val = threshold + (val - threshold) * filter->ratio;
+ } else if (val < -threshold) {
+ val = -threshold + (val + threshold) * filter->ratio;
+ }
+ *data++ = (gfloat) val;
+ }
+}
+
+static void
+gst_audio_dynamic_transform_soft_knee_compressor_int (GstAudioDynamic * filter,
+ gint16 * data, guint num_samples)
+{
+ glong val;
+ glong thr_p = filter->threshold * G_MAXINT16;
+ glong thr_n = filter->threshold * G_MININT16;
+ gdouble a_p, b_p, c_p;
+ gdouble a_n, b_n, c_n;
+
+ /* Nothing to do for us if ratio is 1.0 or if the threshold
+ * equals 1.0. */
+ if (filter->threshold == 1.0 || filter->ratio == 1.0)
+ return;
+
+ /* We build a 2nd degree polynomial here for
+ * values greater than threshold or small than
+ * -threshold with:
+ * f(t) = t, f'(t) = 1, f'(m) = r
+ * =>
+ * a = (1-r)/(2*(t-m))
+ * b = (r*t - m)/(t-m)
+ * c = t * (1 - b - a*t)
+ * f(x) = ax^2 + bx + c
+ */
+
+ /* shouldn't happen because this would only be the case
+ * for threshold == 1.0 which we catch above */
+ g_assert (thr_p - G_MAXINT16 != 0);
+ g_assert (thr_n - G_MININT != 0);
+
+ a_p = (1 - filter->ratio) / (2 * (thr_p - G_MAXINT16));
+ b_p = (filter->ratio * thr_p - G_MAXINT16) / (thr_p - G_MAXINT16);
+ c_p = thr_p * (1 - b_p - a_p * thr_p);
+ a_n = (1 - filter->ratio) / (2 * (thr_n - G_MININT16));
+ b_n = (filter->ratio * thr_n - G_MININT16) / (thr_n - G_MININT16);
+ c_n = thr_n * (1 - b_n - a_n * thr_n);
+
+ for (; num_samples; num_samples--) {
+ val = *data;
+
+ if (val > thr_p) {
+ val = a_p * val * val + b_p * val + c_p;
+ } else if (val < thr_n) {
+ val = a_n * val * val + b_n * val + c_n;
+ }
+ *data++ = (gint16) CLAMP (val, G_MININT16, G_MAXINT16);
+ }
+}
+
+static void
+gst_audio_dynamic_transform_soft_knee_compressor_float (GstAudioDynamic *
+ filter, gfloat * data, guint num_samples)
+{
+ gdouble val;
+ gdouble threshold = filter->threshold;
+ gdouble a_p, b_p, c_p;
+ gdouble a_n, b_n, c_n;
+
+ /* Nothing to do for us if ratio == 1.0.
+ * As float values can be above 1.0 we have to do something
+ * if threshold is greater than 1.0. */
+ if (filter->ratio == 1.0)
+ return;
+
+ /* We build a 2nd degree polynomial here for
+ * values greater than threshold or small than
+ * -threshold with:
+ * f(t) = t, f'(t) = 1, f'(m) = r
+ * =>
+ * a = (1-r)/(2*(t-m))
+ * b = (r*t - m)/(t-m)
+ * c = t * (1 - b - a*t)
+ * f(x) = ax^2 + bx + c
+ */
+
+ /* FIXME: If threshold is the same as the maximum
+ * we need to raise it a bit to prevent
+ * division by zero. */
+ if (threshold == 1.0)
+ threshold = 1.0 + 0.00001;
+
+ a_p = (1.0 - filter->ratio) / (2.0 * (threshold - 1.0));
+ b_p = (filter->ratio * threshold - 1.0) / (threshold - 1.0);
+ c_p = threshold * (1.0 - b_p - a_p * threshold);
+ a_n = (1.0 - filter->ratio) / (2.0 * (-threshold + 1.0));
+ b_n = (-filter->ratio * threshold + 1.0) / (-threshold + 1.0);
+ c_n = -threshold * (1.0 - b_n + a_n * threshold);
+
+ for (; num_samples; num_samples--) {
+ val = *data;
+
+ if (val > 1.0) {
+ val = 1.0 + (val - 1.0) * filter->ratio;
+ } else if (val > threshold) {
+ val = a_p * val * val + b_p * val + c_p;
+ } else if (val < -1.0) {
+ val = -1.0 + (val + 1.0) * filter->ratio;
+ } else if (val < -threshold) {
+ val = a_n * val * val + b_n * val + c_n;
+ }
+ *data++ = (gfloat) val;
+ }
+}
+
+static void
+gst_audio_dynamic_transform_hard_knee_expander_int (GstAudioDynamic * filter,
+ gint16 * data, guint num_samples)
+{
+ glong val;
+ glong thr_p = filter->threshold * G_MAXINT16;
+ glong thr_n = filter->threshold * G_MININT16;
+ gdouble zero_p, zero_n;
+
+ /* Nothing to do for us here if threshold equals 0.0
+ * or ratio equals 1.0 */
+ if (filter->threshold == 0.0 || filter->ratio == 1.0)
+ return;
+
+ /* zero crossing of our function */
+ if (filter->ratio != 0.0) {
+ zero_p = thr_p - thr_p / filter->ratio;
+ zero_n = thr_n - thr_n / filter->ratio;
+ } else {
+ zero_p = zero_n = 0.0;
+ }
+
+ if (zero_p < 0.0)
+ zero_p = 0.0;
+ if (zero_n > 0.0)
+ zero_n = 0.0;
+
+ for (; num_samples; num_samples--) {
+ val = *data;
+
+ if (val < thr_p && val > zero_p) {
+ val = filter->ratio * val + thr_p * (1 - filter->ratio);
+ } else if ((val <= zero_p && val > 0) || (val >= zero_n && val < 0)) {
+ val = 0;
+ } else if (val > thr_n && val < zero_n) {
+ val = filter->ratio * val + thr_n * (1 - filter->ratio);
+ }
+ *data++ = (gint16) CLAMP (val, G_MININT16, G_MAXINT16);
+ }
+}
+
+static void
+gst_audio_dynamic_transform_hard_knee_expander_float (GstAudioDynamic * filter,
+ gfloat * data, guint num_samples)
+{
+ gdouble val, threshold = filter->threshold, zero;
+
+ /* Nothing to do for us here if threshold equals 0.0
+ * or ratio equals 1.0 */
+ if (filter->threshold == 0.0 || filter->ratio == 1.0)
+ return;
+
+ /* zero crossing of our function */
+ if (filter->ratio != 0.0)
+ zero = threshold - threshold / filter->ratio;
+ else
+ zero = 0.0;
+
+ if (zero < 0.0)
+ zero = 0.0;
+
+ for (; num_samples; num_samples--) {
+ val = *data;
+
+ if (val < threshold && val > zero) {
+ val = filter->ratio * val + threshold * (1.0 - filter->ratio);
+ } else if ((val <= zero && val > 0.0) || (val >= -zero && val < 0.0)) {
+ val = 0.0;
+ } else if (val > -threshold && val < -zero) {
+ val = filter->ratio * val - threshold * (1.0 - filter->ratio);
+ }
+ *data++ = (gfloat) val;
+ }
+}
+
+static void
+gst_audio_dynamic_transform_soft_knee_expander_int (GstAudioDynamic * filter,
+ gint16 * data, guint num_samples)
+{
+ glong val;
+ glong thr_p = filter->threshold * G_MAXINT16;
+ glong thr_n = filter->threshold * G_MININT16;
+ gdouble zero_p, zero_n;
+ gdouble a_p, b_p, c_p;
+ gdouble a_n, b_n, c_n;
+ gdouble r2;
+
+ /* Nothing to do for us here if threshold equals 0.0
+ * or ratio equals 1.0 */
+ if (filter->threshold == 0.0 || filter->ratio == 1.0)
+ return;
+
+ /* zero crossing of our function */
+ zero_p = (thr_p * (filter->ratio - 1.0)) / (1.0 + filter->ratio);
+ zero_n = (thr_n * (filter->ratio - 1.0)) / (1.0 + filter->ratio);
+
+ if (zero_p < 0.0)
+ zero_p = 0.0;
+ if (zero_n > 0.0)
+ zero_n = 0.0;
+
+ /* shouldn't happen as this would only happen
+ * with threshold == 0.0 */
+ g_assert (thr_p != 0);
+ g_assert (thr_n != 0);
+
+ /* We build a 2n degree polynomial here for values between
+ * 0 and threshold or 0 and -threshold with:
+ * f(t) = t, f'(t) = 1, f(z) = 0, f'(z) = r
+ * z between 0 and t
+ * =>
+ * a = (1 - r^2) / (4 * t)
+ * b = (1 + r^2) / 2
+ * c = t * (1.0 - b - a*t)
+ * f(x) = ax^2 + bx + c */
+ r2 = filter->ratio * filter->ratio;
+ a_p = (1.0 - r2) / (4.0 * thr_p);
+ b_p = (1.0 + r2) / 2.0;
+ c_p = thr_p * (1.0 - b_p - a_p * thr_p);
+ a_n = (1.0 - r2) / (4.0 * thr_n);
+ b_n = (1.0 + r2) / 2.0;
+ c_n = thr_n * (1.0 - b_n - a_n * thr_n);
+
+ for (; num_samples; num_samples--) {
+ val = *data;
+
+ if (val < thr_p && val > zero_p) {
+ val = a_p * val * val + b_p * val + c_p;
+ } else if ((val <= zero_p && val > 0) || (val >= zero_n && val < 0)) {
+ val = 0;
+ } else if (val > thr_n && val < zero_n) {
+ val = a_n * val * val + b_n * val + c_n;
+ }
+ *data++ = (gint16) CLAMP (val, G_MININT16, G_MAXINT16);
+ }
+}
+
+static void
+gst_audio_dynamic_transform_soft_knee_expander_float (GstAudioDynamic * filter,
+ gfloat * data, guint num_samples)
+{
+ gdouble val;
+ gdouble threshold = filter->threshold;
+ gdouble zero;
+ gdouble a_p, b_p, c_p;
+ gdouble a_n, b_n, c_n;
+ gdouble r2;
+
+ /* Nothing to do for us here if threshold equals 0.0
+ * or ratio equals 1.0 */
+ if (filter->threshold == 0.0 || filter->ratio == 1.0)
+ return;
+
+ /* zero crossing of our function */
+ zero = (threshold * (filter->ratio - 1.0)) / (1.0 + filter->ratio);
+
+ if (zero < 0.0)
+ zero = 0.0;
+
+ /* shouldn't happen as this only happens with
+ * threshold == 0.0 */
+ g_assert (threshold != 0.0);
+
+ /* We build a 2n degree polynomial here for values between
+ * 0 and threshold or 0 and -threshold with:
+ * f(t) = t, f'(t) = 1, f(z) = 0, f'(z) = r
+ * z between 0 and t
+ * =>
+ * a = (1 - r^2) / (4 * t)
+ * b = (1 + r^2) / 2
+ * c = t * (1.0 - b - a*t)
+ * f(x) = ax^2 + bx + c */
+ r2 = filter->ratio * filter->ratio;
+ a_p = (1.0 - r2) / (4.0 * threshold);
+ b_p = (1.0 + r2) / 2.0;
+ c_p = threshold * (1.0 - b_p - a_p * threshold);
+ a_n = (1.0 - r2) / (-4.0 * threshold);
+ b_n = (1.0 + r2) / 2.0;
+ c_n = -threshold * (1.0 - b_n + a_n * threshold);
+
+ for (; num_samples; num_samples--) {
+ val = *data;
+
+ if (val < threshold && val > zero) {
+ val = a_p * val * val + b_p * val + c_p;
+ } else if ((val <= zero && val > 0.0) || (val >= -zero && val < 0.0)) {
+ val = 0.0;
+ } else if (val > -threshold && val < -zero) {
+ val = a_n * val * val + b_n * val + c_n;
+ }
+ *data++ = (gfloat) val;
+ }
+}
+
+/* GstBaseTransform vmethod implementations */
+static GstFlowReturn
+gst_audio_dynamic_transform_ip (GstBaseTransform * base, GstBuffer * buf)
+{
+ GstAudioDynamic *filter = GST_AUDIO_DYNAMIC (base);
+ guint num_samples;
+ GstClockTime timestamp, stream_time;
+ GstMapInfo map;
+
+ timestamp = GST_BUFFER_TIMESTAMP (buf);
+ stream_time =
+ gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
+
+ if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)))
+ return GST_FLOW_OK;
+
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+ num_samples = map.size / GST_AUDIO_FILTER_BPS (filter);
+
+ filter->process (filter, map.data, num_samples);
+
+ gst_buffer_unmap (buf, &map);
+
+ return GST_FLOW_OK;
+}
diff --git a/gst/audiofx/audiodynamic.h b/gst/audiofx/audiodynamic.h
new file mode 100644
index 0000000000..246a56496a
--- /dev/null
+++ b/gst/audiofx/audiodynamic.h
@@ -0,0 +1,65 @@
+/*
+ * GStreamer
+ * Copyright (C) 2007 Sebastian Dröge <slomo@circular-chaos.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AUDIO_DYNAMIC_H__
+#define __GST_AUDIO_DYNAMIC_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/gstaudiofilter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AUDIO_DYNAMIC (gst_audio_dynamic_get_type())
+#define GST_AUDIO_DYNAMIC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AUDIO_DYNAMIC,GstAudioDynamic))
+#define GST_IS_AUDIO_DYNAMIC(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AUDIO_DYNAMIC))
+#define GST_AUDIO_DYNAMIC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_AUDIO_DYNAMIC,GstAudioDynamicClass))
+#define GST_IS_AUDIO_DYNAMIC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_AUDIO_DYNAMIC))
+#define GST_AUDIO_DYNAMIC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_AUDIO_DYNAMIC,GstAudioDynamicClass))
+typedef struct _GstAudioDynamic GstAudioDynamic;
+typedef struct _GstAudioDynamicClass GstAudioDynamicClass;
+
+typedef void (*GstAudioDynamicProcessFunc) (GstAudioDynamic *, guint8 *, guint);
+
+struct _GstAudioDynamic
+{
+ GstAudioFilter audiofilter;
+
+ /* < private > */
+ GstAudioDynamicProcessFunc process;
+ gint characteristics;
+ gint mode;
+ gfloat threshold;
+ gfloat ratio;
+};
+
+struct _GstAudioDynamicClass
+{
+ GstAudioFilterClass parent;
+};
+
+GType gst_audio_dynamic_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (audiodynamic);
+
+G_END_DECLS
+
+#endif /* __GST_AUDIO_DYNAMIC_H__ */
diff --git a/gst/audiofx/audioecho.c b/gst/audiofx/audioecho.c
new file mode 100644
index 0000000000..70af624a2d
--- /dev/null
+++ b/gst/audiofx/audioecho.c
@@ -0,0 +1,506 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-audioecho
+ * @title: audioecho
+ *
+ * audioecho adds an echo or (simple) reverb effect to an audio stream. The echo
+ * delay, intensity and the percentage of feedback can be configured.
+ *
+ * For getting an echo effect you have to set the delay to a larger value,
+ * for example 200ms and more. Everything below will result in a simple
+ * reverb effect, which results in a slightly metallic sound.
+ *
+ * Use the max-delay property to set the maximum amount of delay that
+ * will be used. This can only be set before going to the PAUSED or PLAYING
+ * state and will be set to the current delay by default.
+ *
+ * audioecho can also be used to apply a configurable delay to audio channels
+ * by setting surround-delay=true. In that mode, it just delays "surround
+ * channels" by the delay amount instead of performing an echo. The
+ * channels that are configured surround channels for the delay are
+ * selected using the surround-channels mask property.
+ *
+ * ## Example launch lines
+ * |[
+ * gst-launch-1.0 autoaudiosrc ! audioconvert ! audioecho delay=500000000 intensity=0.6 feedback=0.4 ! audioconvert ! autoaudiosink
+ * gst-launch-1.0 filesrc location="melo1.ogg" ! decodebin ! audioconvert ! audioecho delay=50000000 intensity=0.6 feedback=0.4 ! audioconvert ! autoaudiosink
+ * gst-launch-1.0 audiotestsrc ! audioconvert ! audio/x-raw,channels=4 ! audioecho surround-delay=true delay=500000000 ! audioconvert ! autoaudiosink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#include "audioecho.h"
+
+#define GST_CAT_DEFAULT gst_audio_echo_debug
+GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+/* Everything except the first 2 channels are considered surround */
+#define DEFAULT_SURROUND_MASK ~((guint64)(0x3))
+
+enum
+{
+ PROP_0,
+ PROP_DELAY,
+ PROP_MAX_DELAY,
+ PROP_INTENSITY,
+ PROP_FEEDBACK,
+ PROP_SUR_DELAY,
+ PROP_SUR_MASK
+};
+
+#define ALLOWED_CAPS \
+ "audio/x-raw," \
+ " format=(string) {"GST_AUDIO_NE(F32)","GST_AUDIO_NE(F64)"}, " \
+ " rate=(int)[1,MAX]," \
+ " channels=(int)[1,MAX]," \
+ " layout=(string) interleaved"
+
+#define gst_audio_echo_parent_class parent_class
+G_DEFINE_TYPE (GstAudioEcho, gst_audio_echo, GST_TYPE_AUDIO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (audioecho, "audioecho",
+ GST_RANK_NONE, GST_TYPE_AUDIO_ECHO);
+
+static void gst_audio_echo_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_audio_echo_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_audio_echo_finalize (GObject * object);
+
+static gboolean gst_audio_echo_setup (GstAudioFilter * self,
+ const GstAudioInfo * info);
+static gboolean gst_audio_echo_stop (GstBaseTransform * base);
+static GstFlowReturn gst_audio_echo_transform_ip (GstBaseTransform * base,
+ GstBuffer * buf);
+
+static void gst_audio_echo_transform_float (GstAudioEcho * self,
+ gfloat * data, guint num_samples);
+static void gst_audio_echo_transform_double (GstAudioEcho * self,
+ gdouble * data, guint num_samples);
+
+/* GObject vmethod implementations */
+
+static void
+gst_audio_echo_class_init (GstAudioEchoClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *basetransform_class = (GstBaseTransformClass *) klass;
+ GstAudioFilterClass *audioself_class = (GstAudioFilterClass *) klass;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_echo_debug, "audioecho", 0,
+ "audioecho element");
+
+ gobject_class->set_property = gst_audio_echo_set_property;
+ gobject_class->get_property = gst_audio_echo_get_property;
+ gobject_class->finalize = gst_audio_echo_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_DELAY,
+ g_param_spec_uint64 ("delay", "Delay",
+ "Delay of the echo in nanoseconds", 1, G_MAXUINT64,
+ 1, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS
+ | GST_PARAM_CONTROLLABLE));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_DELAY,
+ g_param_spec_uint64 ("max-delay", "Maximum Delay",
+ "Maximum delay of the echo in nanoseconds"
+ " (can't be changed in PLAYING or PAUSED state)",
+ 1, G_MAXUINT64, 1,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+
+ g_object_class_install_property (gobject_class, PROP_INTENSITY,
+ g_param_spec_float ("intensity", "Intensity",
+ "Intensity of the echo", 0.0, 1.0,
+ 0.0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS
+ | GST_PARAM_CONTROLLABLE));
+
+ g_object_class_install_property (gobject_class, PROP_FEEDBACK,
+ g_param_spec_float ("feedback", "Feedback",
+ "Amount of feedback", 0.0, 1.0,
+ 0.0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS
+ | GST_PARAM_CONTROLLABLE));
+
+ g_object_class_install_property (gobject_class, PROP_SUR_DELAY,
+ g_param_spec_boolean ("surround-delay", "Enable Surround Delay",
+ "Delay Surround Channels when TRUE instead of applying an echo effect",
+ FALSE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ g_object_class_install_property (gobject_class, PROP_SUR_MASK,
+ g_param_spec_uint64 ("surround-mask", "Surround Mask",
+ "A bitmask of channels that are considered surround and delayed when surround-delay = TRUE",
+ 1, G_MAXUINT64, DEFAULT_SURROUND_MASK,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+
+ gst_element_class_set_static_metadata (gstelement_class, "Audio echo",
+ "Filter/Effect/Audio",
+ "Adds an echo or reverb effect to an audio stream",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
+ audioself_class->setup = GST_DEBUG_FUNCPTR (gst_audio_echo_setup);
+ basetransform_class->transform_ip =
+ GST_DEBUG_FUNCPTR (gst_audio_echo_transform_ip);
+ basetransform_class->stop = GST_DEBUG_FUNCPTR (gst_audio_echo_stop);
+}
+
+static void
+gst_audio_echo_init (GstAudioEcho * self)
+{
+ self->delay = 1;
+ self->max_delay = 1;
+ self->intensity = 0.0;
+ self->feedback = 0.0;
+ self->surdelay = FALSE;
+ self->surround_mask = DEFAULT_SURROUND_MASK;
+
+ g_mutex_init (&self->lock);
+
+ gst_base_transform_set_in_place (GST_BASE_TRANSFORM (self), TRUE);
+}
+
+static void
+gst_audio_echo_finalize (GObject * object)
+{
+ GstAudioEcho *self = GST_AUDIO_ECHO (object);
+
+ g_free (self->buffer);
+ self->buffer = NULL;
+
+ g_mutex_clear (&self->lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_audio_echo_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAudioEcho *self = GST_AUDIO_ECHO (object);
+
+ switch (prop_id) {
+ case PROP_DELAY:{
+ guint64 max_delay, delay;
+ guint rate;
+
+ g_mutex_lock (&self->lock);
+ delay = g_value_get_uint64 (value);
+ max_delay = self->max_delay;
+
+ if (delay > max_delay && GST_STATE (self) > GST_STATE_READY) {
+ GST_WARNING_OBJECT (self, "New delay (%" GST_TIME_FORMAT ") "
+ "is larger than maximum delay (%" GST_TIME_FORMAT ")",
+ GST_TIME_ARGS (delay), GST_TIME_ARGS (max_delay));
+ self->delay = max_delay;
+ } else {
+ self->delay = delay;
+ self->max_delay = MAX (delay, max_delay);
+ if (delay > max_delay) {
+ g_free (self->buffer);
+ self->buffer = NULL;
+ }
+ }
+ rate = GST_AUDIO_FILTER_RATE (self);
+ if (rate > 0)
+ self->delay_frames =
+ MAX (gst_util_uint64_scale (self->delay, rate, GST_SECOND), 1);
+
+ g_mutex_unlock (&self->lock);
+ break;
+ }
+ case PROP_MAX_DELAY:{
+ guint64 max_delay;
+
+ g_mutex_lock (&self->lock);
+ max_delay = g_value_get_uint64 (value);
+
+ if (GST_STATE (self) > GST_STATE_READY) {
+ GST_ERROR_OBJECT (self, "Can't change maximum delay in"
+ " PLAYING or PAUSED state");
+ } else {
+ self->max_delay = max_delay;
+ g_free (self->buffer);
+ self->buffer = NULL;
+ }
+ g_mutex_unlock (&self->lock);
+ break;
+ }
+ case PROP_INTENSITY:{
+ g_mutex_lock (&self->lock);
+ self->intensity = g_value_get_float (value);
+ g_mutex_unlock (&self->lock);
+ break;
+ }
+ case PROP_FEEDBACK:{
+ g_mutex_lock (&self->lock);
+ self->feedback = g_value_get_float (value);
+ g_mutex_unlock (&self->lock);
+ break;
+ }
+ case PROP_SUR_DELAY:{
+ g_mutex_lock (&self->lock);
+ self->surdelay = g_value_get_boolean (value);
+ g_mutex_unlock (&self->lock);
+ break;
+ }
+ case PROP_SUR_MASK:{
+ g_mutex_lock (&self->lock);
+ self->surround_mask = g_value_get_uint64 (value);
+ g_mutex_unlock (&self->lock);
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_audio_echo_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAudioEcho *self = GST_AUDIO_ECHO (object);
+
+ switch (prop_id) {
+ case PROP_DELAY:
+ g_mutex_lock (&self->lock);
+ g_value_set_uint64 (value, self->delay);
+ g_mutex_unlock (&self->lock);
+ break;
+ case PROP_MAX_DELAY:
+ g_mutex_lock (&self->lock);
+ g_value_set_uint64 (value, self->max_delay);
+ g_mutex_unlock (&self->lock);
+ break;
+ case PROP_INTENSITY:
+ g_mutex_lock (&self->lock);
+ g_value_set_float (value, self->intensity);
+ g_mutex_unlock (&self->lock);
+ break;
+ case PROP_FEEDBACK:
+ g_mutex_lock (&self->lock);
+ g_value_set_float (value, self->feedback);
+ g_mutex_unlock (&self->lock);
+ break;
+ case PROP_SUR_DELAY:
+ g_mutex_lock (&self->lock);
+ g_value_set_boolean (value, self->surdelay);
+ g_mutex_unlock (&self->lock);
+ break;
+ case PROP_SUR_MASK:{
+ g_mutex_lock (&self->lock);
+ g_value_set_uint64 (value, self->surround_mask);
+ g_mutex_unlock (&self->lock);
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+/* GstAudioFilter vmethod implementations */
+
+static gboolean
+gst_audio_echo_setup (GstAudioFilter * base, const GstAudioInfo * info)
+{
+ GstAudioEcho *self = GST_AUDIO_ECHO (base);
+ gboolean ret = TRUE;
+
+ switch (GST_AUDIO_INFO_FORMAT (info)) {
+ case GST_AUDIO_FORMAT_F32:
+ self->process = (GstAudioEchoProcessFunc)
+ gst_audio_echo_transform_float;
+ break;
+ case GST_AUDIO_FORMAT_F64:
+ self->process = (GstAudioEchoProcessFunc)
+ gst_audio_echo_transform_double;
+ break;
+ default:
+ ret = FALSE;
+ break;
+ }
+
+ g_free (self->buffer);
+ self->buffer = NULL;
+ self->buffer_pos = 0;
+ self->buffer_size = 0;
+ self->buffer_size_frames = 0;
+
+ return ret;
+}
+
+static gboolean
+gst_audio_echo_stop (GstBaseTransform * base)
+{
+ GstAudioEcho *self = GST_AUDIO_ECHO (base);
+
+ g_free (self->buffer);
+ self->buffer = NULL;
+ self->buffer_pos = 0;
+ self->buffer_size = 0;
+ self->buffer_size_frames = 0;
+
+ return TRUE;
+}
+
+#define TRANSFORM_FUNC(name, type) \
+static void \
+gst_audio_echo_transform_##name (GstAudioEcho * self, \
+ type * data, guint num_samples) \
+{ \
+ type *buffer = (type *) self->buffer; \
+ guint channels = GST_AUDIO_FILTER_CHANNELS (self); \
+ guint i, j; \
+ guint echo_offset = self->buffer_size_frames - self->delay_frames; \
+ gdouble intensity = self->intensity; \
+ gdouble feedback = self->feedback; \
+ guint buffer_pos = self->buffer_pos; \
+ guint buffer_size_frames = self->buffer_size_frames; \
+ \
+ if (self->surdelay == FALSE) { \
+ guint read_pos = ((echo_offset + buffer_pos) % buffer_size_frames) * channels; \
+ guint write_pos = (buffer_pos % buffer_size_frames) * channels; \
+ guint buffer_size = buffer_size_frames * channels; \
+ for (i = 0; i < num_samples; i++) { \
+ gdouble in = *data; \
+ gdouble echo = buffer[read_pos]; \
+ type out = in + intensity * echo; \
+ \
+ *data = out; \
+ \
+ buffer[write_pos] = in + feedback * echo; \
+ read_pos = (read_pos + 1) % buffer_size; \
+ write_pos = (write_pos + 1) % buffer_size; \
+ data++; \
+ } \
+ buffer_pos = write_pos / channels; \
+ } else { \
+ guint64 surround_mask = self->surround_mask; \
+ guint read_pos = ((echo_offset + buffer_pos) % buffer_size_frames) * channels; \
+ guint write_pos = (buffer_pos % buffer_size_frames) * channels; \
+ guint buffer_size = buffer_size_frames * channels; \
+ \
+ num_samples /= channels; \
+ \
+ for (i = 0; i < num_samples; i++) { \
+ guint64 channel_mask = 1; \
+ \
+ for (j = 0; j < channels; j++) { \
+ if (channel_mask & surround_mask) { \
+ gdouble in = data[j]; \
+ gdouble echo = buffer[read_pos + j]; \
+ type out = echo; \
+ \
+ data[j] = out; \
+ \
+ buffer[write_pos + j] = in; \
+ } else { \
+ gdouble in = data[j]; \
+ gdouble echo = buffer[read_pos + j]; \
+ type out = in + intensity * echo; \
+ \
+ data[j] = out; \
+ \
+ buffer[write_pos + j] = in + feedback * echo; \
+ } \
+ channel_mask <<= 1; \
+ } \
+ read_pos = (read_pos + channels) % buffer_size; \
+ write_pos = (write_pos + channels) % buffer_size; \
+ data += channels; \
+ } \
+ buffer_pos = write_pos / channels; \
+ } \
+ self->buffer_pos = buffer_pos; \
+}
+
+TRANSFORM_FUNC (float, gfloat);
+TRANSFORM_FUNC (double, gdouble);
+
+/* GstBaseTransform vmethod implementations */
+static GstFlowReturn
+gst_audio_echo_transform_ip (GstBaseTransform * base, GstBuffer * buf)
+{
+ GstAudioEcho *self = GST_AUDIO_ECHO (base);
+ guint num_samples;
+ GstClockTime timestamp, stream_time;
+ GstMapInfo map;
+
+ g_mutex_lock (&self->lock);
+ timestamp = GST_BUFFER_TIMESTAMP (buf);
+ stream_time =
+ gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (self, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (self), stream_time);
+
+ if (self->buffer == NULL) {
+ guint bpf, rate;
+
+ bpf = GST_AUDIO_FILTER_BPF (self);
+ rate = GST_AUDIO_FILTER_RATE (self);
+
+ self->delay_frames =
+ MAX (gst_util_uint64_scale (self->delay, rate, GST_SECOND), 1);
+ self->buffer_size_frames =
+ MAX (gst_util_uint64_scale (self->max_delay, rate, GST_SECOND), 1);
+
+ self->buffer_size = self->buffer_size_frames * bpf;
+ self->buffer = g_try_malloc0 (self->buffer_size);
+ self->buffer_pos = 0;
+
+ if (self->buffer == NULL) {
+ g_mutex_unlock (&self->lock);
+ GST_ERROR_OBJECT (self, "Failed to allocate %u bytes", self->buffer_size);
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+ num_samples = map.size / GST_AUDIO_FILTER_BPS (self);
+
+ self->process (self, map.data, num_samples);
+
+ gst_buffer_unmap (buf, &map);
+ g_mutex_unlock (&self->lock);
+
+ return GST_FLOW_OK;
+}
diff --git a/gst/audiofx/audioecho.h b/gst/audiofx/audioecho.h
new file mode 100644
index 0000000000..b5eb5d6fe0
--- /dev/null
+++ b/gst/audiofx/audioecho.h
@@ -0,0 +1,75 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AUDIO_ECHO_H__
+#define __GST_AUDIO_ECHO_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/gstaudiofilter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AUDIO_ECHO (gst_audio_echo_get_type())
+#define GST_AUDIO_ECHO(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AUDIO_ECHO,GstAudioEcho))
+#define GST_IS_AUDIO_ECHO(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AUDIO_ECHO))
+#define GST_AUDIO_ECHO_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_AUDIO_ECHO,GstAudioEchoClass))
+#define GST_IS_AUDIO_ECHO_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_AUDIO_ECHO))
+#define GST_AUDIO_ECHO_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_AUDIO_ECHO,GstAudioEchoClass))
+typedef struct _GstAudioEcho GstAudioEcho;
+typedef struct _GstAudioEchoClass GstAudioEchoClass;
+
+typedef void (*GstAudioEchoProcessFunc) (GstAudioEcho *, guint8 *, guint);
+
+struct _GstAudioEcho
+{
+ GstAudioFilter audiofilter;
+
+ guint64 delay;
+ guint64 max_delay;
+ gfloat intensity;
+ gfloat feedback;
+ gboolean surdelay;
+ guint64 surround_mask;
+
+ /* < private > */
+ GstAudioEchoProcessFunc process;
+ guint delay_frames;
+ guint8 *buffer;
+ guint buffer_pos;
+ guint buffer_size;
+ guint buffer_size_frames;
+
+ GMutex lock;
+};
+
+struct _GstAudioEchoClass
+{
+ GstAudioFilterClass parent;
+};
+
+GType gst_audio_echo_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (audioecho);
+
+G_END_DECLS
+
+#endif /* __GST_AUDIO_ECHO_H__ */
diff --git a/gst/audiofx/audiofirfilter.c b/gst/audiofx/audiofirfilter.c
new file mode 100644
index 0000000000..fa1df30353
--- /dev/null
+++ b/gst/audiofx/audiofirfilter.c
@@ -0,0 +1,265 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+/**
+ * SECTION:element-audiofirfilter
+ * @title: audiofirfilter
+ *
+ * audiofirfilter implements a generic audio
+ * [FIR filter](http://en.wikipedia.org/wiki/Finite_impulse_response). Before
+ * usage the "kernel" property has to be set to the filter kernel that should be
+ * used and the "latency" property has to be set to the latency (in samples)
+ * that is introduced by the filter kernel. Setting a latency of n samples
+ * will lead to the first n samples being dropped from the output and
+ * n samples added to the end.
+ *
+ * The filter kernel describes the impulse response of the filter. To
+ * calculate the frequency response of the filter you have to calculate
+ * the Fourier Transform of the impulse response.
+ *
+ * To change the filter kernel whenever the sampling rate changes the
+ * "rate-changed" signal can be used. This should be done for most
+ * FIR filters as they're depending on the sampling rate.
+ *
+ * ## Example application
+ * <programlisting language="C">
+ * <xi:include xmlns:xi="http://www.w3.org/2003/XInclude" parse="text" href="../../../../tests/examples/audiofx/firfilter-example.c" />
+ * ]|
+ *
+ */
+
+/* FIXME 0.11: suppress warnings for deprecated API such as GValueArray
+ * with newer GLib versions (>= 2.31.0) */
+#define GLIB_DISABLE_DEPRECATION_WARNINGS
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <math.h>
+#include <gst/gst.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#include "audiofirfilter.h"
+
+#include "gst/glib-compat-private.h"
+
+#define GST_CAT_DEFAULT gst_audio_fir_filter_debug
+GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+enum
+{
+ SIGNAL_RATE_CHANGED,
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_KERNEL,
+ PROP_LATENCY
+};
+
+static guint gst_audio_fir_filter_signals[LAST_SIGNAL] = { 0, };
+
+#define gst_audio_fir_filter_parent_class parent_class
+G_DEFINE_TYPE (GstAudioFIRFilter, gst_audio_fir_filter,
+ GST_TYPE_AUDIO_FX_BASE_FIR_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (audiofirfilter, "audiofirfilter",
+ GST_RANK_NONE, GST_TYPE_AUDIO_FIR_FILTER);
+
+static void gst_audio_fir_filter_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_audio_fir_filter_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_audio_fir_filter_finalize (GObject * object);
+
+static gboolean gst_audio_fir_filter_setup (GstAudioFilter * base,
+ const GstAudioInfo * info);
+
+
+static void
+gst_audio_fir_filter_class_init (GstAudioFIRFilterClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_fir_filter_debug, "audiofirfilter", 0,
+ "Generic audio FIR filter plugin");
+
+ gobject_class->set_property = gst_audio_fir_filter_set_property;
+ gobject_class->get_property = gst_audio_fir_filter_get_property;
+ gobject_class->finalize = gst_audio_fir_filter_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_KERNEL,
+ g_param_spec_value_array ("kernel", "Filter Kernel",
+ "Filter kernel for the FIR filter",
+ g_param_spec_double ("Element", "Filter Kernel Element",
+ "Element of the filter kernel", -G_MAXDOUBLE, G_MAXDOUBLE, 0.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS),
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_LATENCY,
+ g_param_spec_uint64 ("latency", "Latecy",
+ "Filter latency in samples",
+ 0, G_MAXUINT64, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_fir_filter_setup);
+
+ /**
+ * GstAudioFIRFilter::rate-changed:
+ * @filter: the filter on which the signal is emitted
+ * @rate: the new sampling rate
+ *
+ * Will be emitted when the sampling rate changes. The callbacks
+ * will be called from the streaming thread and processing will
+ * stop until the event is handled.
+ */
+ gst_audio_fir_filter_signals[SIGNAL_RATE_CHANGED] =
+ g_signal_new ("rate-changed", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstAudioFIRFilterClass, rate_changed),
+ NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_INT);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Audio FIR filter", "Filter/Effect/Audio",
+ "Generic audio FIR filter with custom filter kernel",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+}
+
+static void
+gst_audio_fir_filter_update_kernel (GstAudioFIRFilter * self, GValueArray * va)
+{
+ gdouble *kernel;
+ guint i;
+
+ if (va) {
+ if (self->kernel)
+ g_value_array_free (self->kernel);
+
+ self->kernel = va;
+ }
+
+ kernel = g_new (gdouble, self->kernel->n_values);
+
+ for (i = 0; i < self->kernel->n_values; i++) {
+ GValue *v = g_value_array_get_nth (self->kernel, i);
+ kernel[i] = g_value_get_double (v);
+ }
+
+ gst_audio_fx_base_fir_filter_set_kernel (GST_AUDIO_FX_BASE_FIR_FILTER (self),
+ kernel, self->kernel->n_values, self->latency, NULL);
+}
+
+static void
+gst_audio_fir_filter_init (GstAudioFIRFilter * self)
+{
+ GValue v = { 0, };
+ GValueArray *va;
+
+ self->latency = 0;
+ va = g_value_array_new (1);
+
+ g_value_init (&v, G_TYPE_DOUBLE);
+ g_value_set_double (&v, 1.0);
+ g_value_array_append (va, &v);
+ g_value_unset (&v);
+ gst_audio_fir_filter_update_kernel (self, va);
+
+ g_mutex_init (&self->lock);
+}
+
+/* GstAudioFilter vmethod implementations */
+
+/* get notified of caps and plug in the correct process function */
+static gboolean
+gst_audio_fir_filter_setup (GstAudioFilter * base, const GstAudioInfo * info)
+{
+ GstAudioFIRFilter *self = GST_AUDIO_FIR_FILTER (base);
+ gint new_rate = GST_AUDIO_INFO_RATE (info);
+
+ if (GST_AUDIO_FILTER_RATE (self) != new_rate) {
+ g_signal_emit (G_OBJECT (self),
+ gst_audio_fir_filter_signals[SIGNAL_RATE_CHANGED], 0, new_rate);
+ }
+
+ return GST_AUDIO_FILTER_CLASS (parent_class)->setup (base, info);
+}
+
+static void
+gst_audio_fir_filter_finalize (GObject * object)
+{
+ GstAudioFIRFilter *self = GST_AUDIO_FIR_FILTER (object);
+
+ g_mutex_clear (&self->lock);
+
+ if (self->kernel)
+ g_value_array_free (self->kernel);
+ self->kernel = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_audio_fir_filter_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAudioFIRFilter *self = GST_AUDIO_FIR_FILTER (object);
+
+ g_return_if_fail (GST_IS_AUDIO_FIR_FILTER (self));
+
+ switch (prop_id) {
+ case PROP_KERNEL:
+ g_mutex_lock (&self->lock);
+ /* update kernel already pushes residues */
+ gst_audio_fir_filter_update_kernel (self, g_value_dup_boxed (value));
+ g_mutex_unlock (&self->lock);
+ break;
+ case PROP_LATENCY:
+ g_mutex_lock (&self->lock);
+ self->latency = g_value_get_uint64 (value);
+ gst_audio_fir_filter_update_kernel (self, NULL);
+ g_mutex_unlock (&self->lock);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_audio_fir_filter_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAudioFIRFilter *self = GST_AUDIO_FIR_FILTER (object);
+
+ switch (prop_id) {
+ case PROP_KERNEL:
+ g_value_set_boxed (value, self->kernel);
+ break;
+ case PROP_LATENCY:
+ g_value_set_uint64 (value, self->latency);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/audiofx/audiofirfilter.h b/gst/audiofx/audiofirfilter.h
new file mode 100644
index 0000000000..e6f18ce711
--- /dev/null
+++ b/gst/audiofx/audiofirfilter.h
@@ -0,0 +1,73 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+#ifndef __GST_AUDIO_FIR_FILTER_H__
+#define __GST_AUDIO_FIR_FILTER_H__
+
+#include <gst/gst.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#include "audiofxbasefirfilter.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AUDIO_FIR_FILTER \
+ (gst_audio_fir_filter_get_type())
+#define GST_AUDIO_FIR_FILTER(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AUDIO_FIR_FILTER,GstAudioFIRFilter))
+#define GST_AUDIO_FIR_FILTER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_AUDIO_FIR_FILTER,GstAudioFIRFilterClass))
+#define GST_IS_AUDIO_FIR_FILTER(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AUDIO_FIR_FILTER))
+#define GST_IS_AUDIO_FIR_FILTER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_AUDIO_FIR_FILTER))
+
+typedef struct _GstAudioFIRFilter GstAudioFIRFilter;
+typedef struct _GstAudioFIRFilterClass GstAudioFIRFilterClass;
+
+/**
+ * GstAudioFIRFilter:
+ *
+ * Opaque data structure.
+ */
+struct _GstAudioFIRFilter {
+ GstAudioFXBaseFIRFilter parent;
+
+ GValueArray *kernel;
+ guint64 latency;
+
+ /* < private > */
+ GMutex lock;
+};
+
+struct _GstAudioFIRFilterClass {
+ GstAudioFXBaseFIRFilterClass parent;
+
+ void (*rate_changed) (GstElement * element, gint rate);
+};
+
+GType gst_audio_fir_filter_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (audiofirfilter);
+
+G_END_DECLS
+
+#endif /* __GST_AUDIO_FIR_FILTER_H__ */
diff --git a/gst/audiofx/audiofx.c b/gst/audiofx/audiofx.c
new file mode 100644
index 0000000000..dae58ead60
--- /dev/null
+++ b/gst/audiofx/audiofx.c
@@ -0,0 +1,74 @@
+/*
+ * GStreamer
+ * Copyright (C) 2006 Stefan Kost <ensonic@users.sf.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+
+#include "audiopanorama.h"
+#include "audioinvert.h"
+#include "audiokaraoke.h"
+#include "audioamplify.h"
+#include "audiodynamic.h"
+#include "audiocheblimit.h"
+#include "audiochebband.h"
+#include "audioiirfilter.h"
+#include "audiowsincband.h"
+#include "audiowsinclimit.h"
+#include "audiofirfilter.h"
+#include "audioecho.h"
+#include "gstscaletempo.h"
+#include "gststereo.h"
+
+/* entry point to initialize the plug-in
+ * initialize the plug-in itself
+ * register the element factories and pad templates
+ * register the features
+ */
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (audiopanorama, plugin);
+ ret |= GST_ELEMENT_REGISTER (audioinvert, plugin);
+ ret |= GST_ELEMENT_REGISTER (audiokaraoke, plugin);
+ ret |= GST_ELEMENT_REGISTER (audioamplify, plugin);
+ ret |= GST_ELEMENT_REGISTER (audiodynamic, plugin);
+ ret |= GST_ELEMENT_REGISTER (audiocheblimit, plugin);
+ ret |= GST_ELEMENT_REGISTER (audiochebband, plugin);
+ ret |= GST_ELEMENT_REGISTER (audioiirfilter, plugin);
+ ret |= GST_ELEMENT_REGISTER (audiowsinclimit, plugin);
+ ret |= GST_ELEMENT_REGISTER (audiowsincband, plugin);
+ ret |= GST_ELEMENT_REGISTER (audiofirfilter, plugin);
+ ret |= GST_ELEMENT_REGISTER (audioecho, plugin);
+ ret |= GST_ELEMENT_REGISTER (scaletempo, plugin);
+ ret |= GST_ELEMENT_REGISTER (stereo, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ audiofx,
+ "Audio effects plugin",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/audiofx/audiofxbasefirfilter.c b/gst/audiofx/audiofxbasefirfilter.c
new file mode 100644
index 0000000000..e28cb64aca
--- /dev/null
+++ b/gst/audiofx/audiofxbasefirfilter.c
@@ -0,0 +1,1088 @@
+/* -*- c-basic-offset: 2 -*-
+ *
+ * GStreamer
+ * Copyright (C) 1999-2001 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2006 Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>
+ * 2007-2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <math.h>
+#include <gst/gst.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#include "audiofxbasefirfilter.h"
+
+#define GST_CAT_DEFAULT gst_audio_fx_base_fir_filter_debug
+GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+#define ALLOWED_CAPS \
+ "audio/x-raw, " \
+ " format=(string){"GST_AUDIO_NE(F32)","GST_AUDIO_NE(F64)"}, " \
+ " rate = (int) [ 1, MAX ], " \
+ " channels = (int) [ 1, MAX ], " \
+ " layout=(string) interleaved"
+
+/* Switch from time-domain to FFT convolution for kernels >= this */
+#define FFT_THRESHOLD 32
+
+enum
+{
+ PROP_0 = 0,
+ PROP_LOW_LATENCY,
+ PROP_DRAIN_ON_CHANGES
+};
+
+#define DEFAULT_LOW_LATENCY FALSE
+#define DEFAULT_DRAIN_ON_CHANGES TRUE
+
+#define gst_audio_fx_base_fir_filter_parent_class parent_class
+G_DEFINE_TYPE (GstAudioFXBaseFIRFilter, gst_audio_fx_base_fir_filter,
+ GST_TYPE_AUDIO_FILTER);
+
+static GstFlowReturn gst_audio_fx_base_fir_filter_transform (GstBaseTransform *
+ base, GstBuffer * inbuf, GstBuffer * outbuf);
+static gboolean gst_audio_fx_base_fir_filter_start (GstBaseTransform * base);
+static gboolean gst_audio_fx_base_fir_filter_stop (GstBaseTransform * base);
+static gboolean gst_audio_fx_base_fir_filter_sink_event (GstBaseTransform *
+ base, GstEvent * event);
+static gboolean gst_audio_fx_base_fir_filter_transform_size (GstBaseTransform *
+ base, GstPadDirection direction, GstCaps * caps, gsize size,
+ GstCaps * othercaps, gsize * othersize);
+static gboolean gst_audio_fx_base_fir_filter_setup (GstAudioFilter * base,
+ const GstAudioInfo * info);
+
+static gboolean gst_audio_fx_base_fir_filter_query (GstBaseTransform * trans,
+ GstPadDirection direction, GstQuery * quer);
+
+/*
+ * The code below calculates the linear convolution:
+ *
+ * y[t] = \sum_{u=0}^{M-1} x[t - u] * h[u]
+ *
+ * where y is the output, x is the input, M is the length
+ * of the filter kernel and h is the filter kernel. For x
+ * holds: x[t] == 0 \forall t < 0.
+ *
+ * The runtime complexity of this is O (M) per sample.
+ *
+ */
+#define DEFINE_PROCESS_FUNC(width,ctype) \
+static guint \
+process_##width (GstAudioFXBaseFIRFilter * self, const g##ctype * src, g##ctype * dst, guint input_samples) \
+{ \
+ gint channels = GST_AUDIO_FILTER_CHANNELS (self); \
+ TIME_DOMAIN_CONVOLUTION_BODY (channels); \
+}
+
+#define DEFINE_PROCESS_FUNC_FIXED_CHANNELS(width,channels,ctype) \
+static guint \
+process_##channels##_##width (GstAudioFXBaseFIRFilter * self, const g##ctype * src, g##ctype * dst, guint input_samples) \
+{ \
+ TIME_DOMAIN_CONVOLUTION_BODY (channels); \
+}
+
+#define TIME_DOMAIN_CONVOLUTION_BODY(channels) G_STMT_START { \
+ gint kernel_length = self->kernel_length; \
+ gint i, j, k, l; \
+ gint res_start; \
+ gint from_input; \
+ gint off; \
+ gdouble *buffer = self->buffer; \
+ gdouble *kernel = self->kernel; \
+ \
+ if (!buffer) { \
+ self->buffer_length = kernel_length * channels; \
+ self->buffer = buffer = g_new0 (gdouble, self->buffer_length); \
+ } \
+ \
+ input_samples *= channels; \
+ /* convolution */ \
+ for (i = 0; i < input_samples; i++) { \
+ dst[i] = 0.0; \
+ k = i % channels; \
+ l = i / channels; \
+ from_input = MIN (l, kernel_length-1); \
+ off = l * channels + k; \
+ for (j = 0; j <= from_input; j++) { \
+ dst[i] += src[off] * kernel[j]; \
+ off -= channels; \
+ } \
+ /* j == from_input && off == (l - j) * channels + k */ \
+ off += kernel_length * channels; \
+ for (; j < kernel_length; j++) { \
+ dst[i] += buffer[off] * kernel[j]; \
+ off -= channels; \
+ } \
+ } \
+ \
+ /* copy the tail of the current input buffer to the residue, while \
+ * keeping parts of the residue if the input buffer is smaller than \
+ * the kernel length */ \
+ /* from now on take kernel length as length over all channels */ \
+ kernel_length *= channels; \
+ if (input_samples < kernel_length) \
+ res_start = kernel_length - input_samples; \
+ else \
+ res_start = 0; \
+ \
+ for (i = 0; i < res_start; i++) \
+ buffer[i] = buffer[i + input_samples]; \
+ /* i == res_start */ \
+ for (; i < kernel_length; i++) \
+ buffer[i] = src[input_samples - kernel_length + i]; \
+ \
+ self->buffer_fill += kernel_length - res_start; \
+ if (self->buffer_fill > kernel_length) \
+ self->buffer_fill = kernel_length; \
+ \
+ return input_samples / channels; \
+} G_STMT_END
+
+DEFINE_PROCESS_FUNC (32, float);
+DEFINE_PROCESS_FUNC (64, double);
+
+DEFINE_PROCESS_FUNC_FIXED_CHANNELS (32, 1, float);
+DEFINE_PROCESS_FUNC_FIXED_CHANNELS (64, 1, double);
+
+DEFINE_PROCESS_FUNC_FIXED_CHANNELS (32, 2, float);
+DEFINE_PROCESS_FUNC_FIXED_CHANNELS (64, 2, double);
+
+#undef TIME_DOMAIN_CONVOLUTION_BODY
+#undef DEFINE_PROCESS_FUNC
+#undef DEFINE_PROCESS_FUNC_FIXED_CHANNELS
+
+/* This implements FFT convolution and uses the overlap-save algorithm.
+ * See http://cnx.org/content/m12022/latest/ or your favorite
+ * digital signal processing book for details.
+ *
+ * In every pass the following is calculated:
+ *
+ * y = IFFT (FFT(x) * FFT(h))
+ *
+ * where y is the output in the time domain, x the
+ * input and h the filter kernel. * is the multiplication
+ * of complex numbers.
+ *
+ * Due to the circular convolution theorem this
+ * gives in the time domain:
+ *
+ * y[t] = \sum_{u=0}^{M-1} x[t - u] * h[u]
+ *
+ * where y is the output, M is the kernel length,
+ * x the periodically extended[0] input and h the
+ * filter kernel.
+ *
+ * ([0] Periodically extended means: )
+ * ( x[t] = x[t+kN] \forall k \in Z )
+ * ( where N is the length of x )
+ *
+ * This means:
+ * - Obviously x and h need to be of the same size for the FFT
+ * - The first M-1 output values are useless because they're
+ * built from 1 up to M-1 values from the end of the input
+ * (circular convolusion!).
+ * - The last M-1 input values are only used for 1 up to M-1
+ * output values, i.e. they need to be used again in the
+ * next pass for the first M-1 input values.
+ *
+ * => The first pass needs M-1 zeroes at the beginning of the
+ * input and the last M-1 input values of every pass need to
+ * be used as the first M-1 input values of the next pass.
+ *
+ * => x must be larger than h to give a useful number of output
+ * samples and h needs to be padded by zeroes at the end to give
+ * it virtually the same size as x (by M we denote the number of
+ * non-padding samples of h). If len(x)==len(h)==M only 1 output
+ * sample would be calculated per pass, len(x)==2*len(h) would
+ * give M+1 output samples, etc. Usually a factor between 4 and 8
+ * gives a low number of operations per output samples (see website
+ * given above).
+ *
+ * Overall this gives a runtime complexity per sample of
+ *
+ * ( N log N )
+ * O ( --------- ) compared to O (M) for the direct calculation.
+ * ( N - M + 1 )
+ */
+#define DEFINE_FFT_PROCESS_FUNC(width,ctype) \
+static guint \
+process_fft_##width (GstAudioFXBaseFIRFilter * self, const g##ctype * src, \
+ g##ctype * dst, guint input_samples) \
+{ \
+ gint channels = GST_AUDIO_FILTER_CHANNELS (self); \
+ FFT_CONVOLUTION_BODY (channels); \
+}
+
+#define DEFINE_FFT_PROCESS_FUNC_FIXED_CHANNELS(width,channels,ctype) \
+static guint \
+process_fft_##channels##_##width (GstAudioFXBaseFIRFilter * self, const g##ctype * src, \
+ g##ctype * dst, guint input_samples) \
+{ \
+ FFT_CONVOLUTION_BODY (channels); \
+}
+
+#define FFT_CONVOLUTION_BODY(channels) G_STMT_START { \
+ gint i, j; \
+ guint pass; \
+ guint kernel_length = self->kernel_length; \
+ guint block_length = self->block_length; \
+ guint buffer_length = self->buffer_length; \
+ guint real_buffer_length = buffer_length + kernel_length - 1; \
+ guint buffer_fill = self->buffer_fill; \
+ GstFFTF64 *fft = self->fft; \
+ GstFFTF64 *ifft = self->ifft; \
+ GstFFTF64Complex *frequency_response = self->frequency_response; \
+ GstFFTF64Complex *fft_buffer = self->fft_buffer; \
+ guint frequency_response_length = self->frequency_response_length; \
+ gdouble *buffer = self->buffer; \
+ guint generated = 0; \
+ gdouble re, im; \
+ \
+ if (!fft_buffer) \
+ self->fft_buffer = fft_buffer = \
+ g_new (GstFFTF64Complex, frequency_response_length); \
+ \
+ /* Buffer contains the time domain samples of input data for one chunk \
+ * plus some more space for the inverse FFT below. \
+ * \
+ * The samples are put at offset kernel_length, the inverse FFT \
+ * overwrites everything from offset 0 to length-kernel_length+1, keeping \
+ * the last kernel_length-1 samples for copying to the next processing \
+ * step. \
+ */ \
+ if (!buffer) { \
+ self->buffer_length = buffer_length = block_length; \
+ real_buffer_length = buffer_length + kernel_length - 1; \
+ \
+ self->buffer = buffer = g_new0 (gdouble, real_buffer_length * channels); \
+ \
+ /* Beginning has kernel_length-1 zeroes at the beginning */ \
+ self->buffer_fill = buffer_fill = kernel_length - 1; \
+ } \
+ \
+ g_assert (self->buffer_length == block_length); \
+ \
+ while (input_samples) { \
+ pass = MIN (buffer_length - buffer_fill, input_samples); \
+ \
+ /* Deinterleave channels */ \
+ for (i = 0; i < pass; i++) { \
+ for (j = 0; j < channels; j++) { \
+ buffer[real_buffer_length * j + buffer_fill + kernel_length - 1 + i] = \
+ src[i * channels + j]; \
+ } \
+ } \
+ buffer_fill += pass; \
+ src += channels * pass; \
+ input_samples -= pass; \
+ \
+ /* If we don't have a complete buffer go out */ \
+ if (buffer_fill < buffer_length) \
+ break; \
+ \
+ for (j = 0; j < channels; j++) { \
+ /* Calculate FFT of input block */ \
+ gst_fft_f64_fft (fft, \
+ buffer + real_buffer_length * j + kernel_length - 1, fft_buffer); \
+ \
+ /* Complex multiplication of input and filter spectrum */ \
+ for (i = 0; i < frequency_response_length; i++) { \
+ re = fft_buffer[i].r; \
+ im = fft_buffer[i].i; \
+ \
+ fft_buffer[i].r = \
+ re * frequency_response[i].r - \
+ im * frequency_response[i].i; \
+ fft_buffer[i].i = \
+ re * frequency_response[i].i + \
+ im * frequency_response[i].r; \
+ } \
+ \
+ /* Calculate inverse FFT of the result */ \
+ gst_fft_f64_inverse_fft (ifft, fft_buffer, \
+ buffer + real_buffer_length * j); \
+ \
+ /* Copy all except the first kernel_length-1 samples to the output */ \
+ for (i = 0; i < buffer_length - kernel_length + 1; i++) { \
+ dst[i * channels + j] = \
+ buffer[real_buffer_length * j + kernel_length - 1 + i]; \
+ } \
+ \
+ /* Copy the last kernel_length-1 samples to the beginning for the next block */ \
+ for (i = 0; i < kernel_length - 1; i++) { \
+ buffer[real_buffer_length * j + kernel_length - 1 + i] = \
+ buffer[real_buffer_length * j + buffer_length + i]; \
+ } \
+ } \
+ \
+ generated += buffer_length - kernel_length + 1; \
+ dst += channels * (buffer_length - kernel_length + 1); \
+ \
+ /* The the first kernel_length-1 samples are there already */ \
+ buffer_fill = kernel_length - 1; \
+ } \
+ \
+ /* Write back cached buffer_fill value */ \
+ self->buffer_fill = buffer_fill; \
+ \
+ return generated; \
+} G_STMT_END
+
+DEFINE_FFT_PROCESS_FUNC (32, float);
+DEFINE_FFT_PROCESS_FUNC (64, double);
+
+DEFINE_FFT_PROCESS_FUNC_FIXED_CHANNELS (32, 1, float);
+DEFINE_FFT_PROCESS_FUNC_FIXED_CHANNELS (64, 1, double);
+
+DEFINE_FFT_PROCESS_FUNC_FIXED_CHANNELS (32, 2, float);
+DEFINE_FFT_PROCESS_FUNC_FIXED_CHANNELS (64, 2, double);
+
+#undef FFT_CONVOLUTION_BODY
+#undef DEFINE_FFT_PROCESS_FUNC
+#undef DEFINE_FFT_PROCESS_FUNC_FIXED_CHANNELS
+
+/* Element class */
+static void
+ gst_audio_fx_base_fir_filter_calculate_frequency_response
+ (GstAudioFXBaseFIRFilter * self)
+{
+ gst_fft_f64_free (self->fft);
+ self->fft = NULL;
+ gst_fft_f64_free (self->ifft);
+ self->ifft = NULL;
+ g_free (self->frequency_response);
+ self->frequency_response_length = 0;
+ g_free (self->fft_buffer);
+ self->fft_buffer = NULL;
+
+ if (self->kernel && self->kernel_length >= FFT_THRESHOLD
+ && !self->low_latency) {
+ guint block_length, i;
+ gdouble *kernel_tmp, *kernel = self->kernel;
+
+ /* We process 4 * kernel_length samples per pass in FFT mode */
+ block_length = 4 * self->kernel_length;
+ block_length = gst_fft_next_fast_length (block_length);
+ self->block_length = block_length;
+
+ kernel_tmp = g_new0 (gdouble, block_length);
+ memcpy (kernel_tmp, kernel, self->kernel_length * sizeof (gdouble));
+
+ self->fft = gst_fft_f64_new (block_length, FALSE);
+ self->ifft = gst_fft_f64_new (block_length, TRUE);
+ self->frequency_response_length = block_length / 2 + 1;
+ self->frequency_response =
+ g_new (GstFFTF64Complex, self->frequency_response_length);
+ gst_fft_f64_fft (self->fft, kernel_tmp, self->frequency_response);
+ g_free (kernel_tmp);
+
+ /* Normalize to make sure IFFT(FFT(x)) == x */
+ for (i = 0; i < self->frequency_response_length; i++) {
+ self->frequency_response[i].r /= block_length;
+ self->frequency_response[i].i /= block_length;
+ }
+ }
+}
+
+/* Must be called with base transform lock! */
+static void
+gst_audio_fx_base_fir_filter_select_process_function (GstAudioFXBaseFIRFilter *
+ self, GstAudioFormat format, gint channels)
+{
+ switch (format) {
+ case GST_AUDIO_FORMAT_F32:
+ if (self->fft && !self->low_latency) {
+ if (channels == 1)
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_fft_1_32;
+ else if (channels == 2)
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_fft_2_32;
+ else
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_fft_32;
+ } else {
+ if (channels == 1)
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_1_32;
+ else if (channels == 2)
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_2_32;
+ else
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_32;
+ }
+ break;
+ case GST_AUDIO_FORMAT_F64:
+ if (self->fft && !self->low_latency) {
+ if (channels == 1)
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_fft_1_64;
+ else if (channels == 2)
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_fft_2_64;
+ else
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_fft_64;
+ } else {
+ if (channels == 1)
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_1_64;
+ else if (channels == 2)
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_2_64;
+ else
+ self->process = (GstAudioFXBaseFIRFilterProcessFunc) process_64;
+ }
+ break;
+ default:
+ self->process = NULL;
+ break;
+ }
+}
+
+static void
+gst_audio_fx_base_fir_filter_finalize (GObject * object)
+{
+ GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (object);
+
+ g_free (self->buffer);
+ g_free (self->kernel);
+ gst_fft_f64_free (self->fft);
+ gst_fft_f64_free (self->ifft);
+ g_free (self->frequency_response);
+ g_free (self->fft_buffer);
+ g_mutex_clear (&self->lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_audio_fx_base_fir_filter_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (object);
+
+ switch (prop_id) {
+ case PROP_LOW_LATENCY:{
+ gboolean low_latency;
+
+ if (GST_STATE (self) >= GST_STATE_PAUSED) {
+ g_warning ("Changing the \"low-latency\" property "
+ "is only allowed in states < PAUSED");
+ return;
+ }
+
+
+ g_mutex_lock (&self->lock);
+ low_latency = g_value_get_boolean (value);
+
+ if (self->low_latency != low_latency) {
+ self->low_latency = low_latency;
+ gst_audio_fx_base_fir_filter_calculate_frequency_response (self);
+ gst_audio_fx_base_fir_filter_select_process_function (self,
+ GST_AUDIO_FILTER_FORMAT (self), GST_AUDIO_FILTER_CHANNELS (self));
+ }
+ g_mutex_unlock (&self->lock);
+ break;
+ }
+ case PROP_DRAIN_ON_CHANGES:{
+ g_mutex_lock (&self->lock);
+ self->drain_on_changes = g_value_get_boolean (value);
+ g_mutex_unlock (&self->lock);
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_audio_fx_base_fir_filter_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (object);
+
+ switch (prop_id) {
+ case PROP_LOW_LATENCY:
+ g_value_set_boolean (value, self->low_latency);
+ break;
+ case PROP_DRAIN_ON_CHANGES:
+ g_value_set_boolean (value, self->drain_on_changes);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_audio_fx_base_fir_filter_class_init (GstAudioFXBaseFIRFilterClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_fx_base_fir_filter_debug,
+ "audiofxbasefirfilter", 0, "FIR filter base class");
+
+ gobject_class->finalize = gst_audio_fx_base_fir_filter_finalize;
+ gobject_class->set_property = gst_audio_fx_base_fir_filter_set_property;
+ gobject_class->get_property = gst_audio_fx_base_fir_filter_get_property;
+
+ /**
+ * GstAudioFXBaseFIRFilter:low-latency:
+ *
+ * Work in low-latency mode. This mode is much slower for large filter sizes
+ * but the latency is always only the pre-latency of the filter.
+ */
+ g_object_class_install_property (gobject_class, PROP_LOW_LATENCY,
+ g_param_spec_boolean ("low-latency", "Low latency",
+ "Operate in low latency mode. This mode is slower but the "
+ "latency will only be the filter pre-latency. "
+ "Can only be changed in states < PAUSED!", DEFAULT_LOW_LATENCY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstAudioFXBaseFIRFilter:drain-on-changes:
+ *
+ * Whether the filter should be drained when its coefficients change
+ *
+ * Note: Currently this only works if the kernel size is not changed!
+ * Support for drainless kernel size changes will be added in the future.
+ */
+ g_object_class_install_property (gobject_class, PROP_DRAIN_ON_CHANGES,
+ g_param_spec_boolean ("drain-on-changes", "Drain on changes",
+ "Drains the filter when its coefficients change",
+ DEFAULT_DRAIN_ON_CHANGES,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
+ trans_class->transform =
+ GST_DEBUG_FUNCPTR (gst_audio_fx_base_fir_filter_transform);
+ trans_class->start = GST_DEBUG_FUNCPTR (gst_audio_fx_base_fir_filter_start);
+ trans_class->stop = GST_DEBUG_FUNCPTR (gst_audio_fx_base_fir_filter_stop);
+ trans_class->sink_event =
+ GST_DEBUG_FUNCPTR (gst_audio_fx_base_fir_filter_sink_event);
+ trans_class->query = GST_DEBUG_FUNCPTR (gst_audio_fx_base_fir_filter_query);
+ trans_class->transform_size =
+ GST_DEBUG_FUNCPTR (gst_audio_fx_base_fir_filter_transform_size);
+ filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_fx_base_fir_filter_setup);
+
+ gst_type_mark_as_plugin_api (GST_TYPE_AUDIO_FX_BASE_FIR_FILTER, 0);
+}
+
+static void
+gst_audio_fx_base_fir_filter_init (GstAudioFXBaseFIRFilter * self)
+{
+ self->kernel = NULL;
+ self->buffer = NULL;
+ self->buffer_length = 0;
+
+ self->start_ts = GST_CLOCK_TIME_NONE;
+ self->start_off = GST_BUFFER_OFFSET_NONE;
+ self->nsamples_out = 0;
+ self->nsamples_in = 0;
+
+ self->low_latency = DEFAULT_LOW_LATENCY;
+ self->drain_on_changes = DEFAULT_DRAIN_ON_CHANGES;
+
+ g_mutex_init (&self->lock);
+}
+
+void
+gst_audio_fx_base_fir_filter_push_residue (GstAudioFXBaseFIRFilter * self)
+{
+ GstBuffer *outbuf;
+ GstFlowReturn res;
+ gint rate = GST_AUDIO_FILTER_RATE (self);
+ gint channels = GST_AUDIO_FILTER_CHANNELS (self);
+ gint bps = GST_AUDIO_FILTER_BPS (self);
+ gint outsize, outsamples;
+ GstMapInfo map;
+ guint8 *in, *out;
+
+ if (channels == 0 || rate == 0 || self->nsamples_in == 0) {
+ self->buffer_fill = 0;
+ g_free (self->buffer);
+ self->buffer = NULL;
+ return;
+ }
+
+ /* Calculate the number of samples and their memory size that
+ * should be pushed from the residue */
+ outsamples = self->nsamples_in - (self->nsamples_out - self->latency);
+ if (outsamples <= 0) {
+ self->buffer_fill = 0;
+ g_free (self->buffer);
+ self->buffer = NULL;
+ return;
+ }
+ outsize = outsamples * channels * bps;
+
+ if (!self->fft || self->low_latency) {
+ gint64 diffsize, diffsamples;
+
+ /* Process the difference between latency and residue length samples
+ * to start at the actual data instead of starting at the zeros before
+ * when we only got one buffer smaller than latency */
+ diffsamples =
+ ((gint64) self->latency) - ((gint64) self->buffer_fill) / channels;
+ if (diffsamples > 0) {
+ diffsize = diffsamples * channels * bps;
+ in = g_new0 (guint8, diffsize);
+ out = g_new0 (guint8, diffsize);
+ self->nsamples_out += self->process (self, in, out, diffsamples);
+ g_free (in);
+ g_free (out);
+ }
+
+ outbuf = gst_buffer_new_and_alloc (outsize);
+
+ /* Convolve the residue with zeros to get the actual remaining data */
+ in = g_new0 (guint8, outsize);
+ gst_buffer_map (outbuf, &map, GST_MAP_READWRITE);
+ self->nsamples_out += self->process (self, in, map.data, outsamples);
+ gst_buffer_unmap (outbuf, &map);
+
+ g_free (in);
+ } else {
+ guint gensamples = 0;
+
+ outbuf = gst_buffer_new_and_alloc (outsize);
+ gst_buffer_map (outbuf, &map, GST_MAP_READWRITE);
+
+ while (gensamples < outsamples) {
+ guint step_insamples = self->block_length - self->buffer_fill;
+ guint8 *zeroes = g_new0 (guint8, step_insamples * channels * bps);
+ guint8 *out = g_new (guint8, self->block_length * channels * bps);
+ guint step_gensamples;
+
+ step_gensamples = self->process (self, zeroes, out, step_insamples);
+ g_free (zeroes);
+
+ memcpy (map.data + gensamples * bps, out, MIN (step_gensamples,
+ outsamples - gensamples) * bps);
+ gensamples += MIN (step_gensamples, outsamples - gensamples);
+
+ g_free (out);
+ }
+ self->nsamples_out += gensamples;
+
+ gst_buffer_unmap (outbuf, &map);
+ }
+
+ /* Set timestamp, offset, etc from the values we
+ * saved when processing the regular buffers */
+ if (GST_CLOCK_TIME_IS_VALID (self->start_ts))
+ GST_BUFFER_TIMESTAMP (outbuf) = self->start_ts;
+ else
+ GST_BUFFER_TIMESTAMP (outbuf) = 0;
+ GST_BUFFER_TIMESTAMP (outbuf) +=
+ gst_util_uint64_scale_int (self->nsamples_out - outsamples -
+ self->latency, GST_SECOND, rate);
+
+ GST_BUFFER_DURATION (outbuf) =
+ gst_util_uint64_scale_int (outsamples, GST_SECOND, rate);
+
+ if (self->start_off != GST_BUFFER_OFFSET_NONE) {
+ GST_BUFFER_OFFSET (outbuf) =
+ self->start_off + self->nsamples_out - outsamples - self->latency;
+ GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET (outbuf) + outsamples;
+ }
+
+ GST_DEBUG_OBJECT (self,
+ "Pushing residue buffer of size %" G_GSIZE_FORMAT " with timestamp: %"
+ GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %"
+ G_GUINT64_FORMAT ", offset_end: %" G_GUINT64_FORMAT ", nsamples_out: %d",
+ gst_buffer_get_size (outbuf),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf),
+ GST_BUFFER_OFFSET_END (outbuf), outsamples);
+
+ res = gst_pad_push (GST_BASE_TRANSFORM_CAST (self)->srcpad, outbuf);
+
+ if (G_UNLIKELY (res != GST_FLOW_OK)) {
+ GST_WARNING_OBJECT (self, "failed to push residue");
+ }
+
+ self->buffer_fill = 0;
+}
+
+/* GstAudioFilter vmethod implementations */
+
+/* get notified of caps and plug in the correct process function */
+static gboolean
+gst_audio_fx_base_fir_filter_setup (GstAudioFilter * base,
+ const GstAudioInfo * info)
+{
+ GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (base);
+
+ g_mutex_lock (&self->lock);
+ if (self->buffer) {
+ gst_audio_fx_base_fir_filter_push_residue (self);
+ g_free (self->buffer);
+ self->buffer = NULL;
+ self->buffer_fill = 0;
+ self->buffer_length = 0;
+ self->start_ts = GST_CLOCK_TIME_NONE;
+ self->start_off = GST_BUFFER_OFFSET_NONE;
+ self->nsamples_out = 0;
+ self->nsamples_in = 0;
+ }
+
+ gst_audio_fx_base_fir_filter_select_process_function (self,
+ GST_AUDIO_INFO_FORMAT (info), GST_AUDIO_INFO_CHANNELS (info));
+ g_mutex_unlock (&self->lock);
+
+ return (self->process != NULL);
+}
+
+/* GstBaseTransform vmethod implementations */
+
+static gboolean
+gst_audio_fx_base_fir_filter_transform_size (GstBaseTransform * base,
+ GstPadDirection direction, GstCaps * caps, gsize size, GstCaps * othercaps,
+ gsize * othersize)
+{
+ GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (base);
+ guint blocklen;
+ GstAudioInfo info;
+ gint bpf;
+
+ if (!self->fft || self->low_latency || direction == GST_PAD_SRC) {
+ *othersize = size;
+ return TRUE;
+ }
+
+ if (!gst_audio_info_from_caps (&info, caps))
+ return FALSE;
+
+ bpf = GST_AUDIO_INFO_BPF (&info);
+
+ size /= bpf;
+ blocklen = self->block_length - self->kernel_length + 1;
+ *othersize = ((size + blocklen - 1) / blocklen) * blocklen;
+ *othersize *= bpf;
+
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_audio_fx_base_fir_filter_transform (GstBaseTransform * base,
+ GstBuffer * inbuf, GstBuffer * outbuf)
+{
+ GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (base);
+ GstClockTime timestamp, expected_timestamp;
+ gint channels = GST_AUDIO_FILTER_CHANNELS (self);
+ gint rate = GST_AUDIO_FILTER_RATE (self);
+ gint bps = GST_AUDIO_FILTER_BPS (self);
+ GstMapInfo inmap, outmap;
+ guint input_samples;
+ guint output_samples;
+ guint generated_samples;
+ guint64 output_offset;
+ gint64 diff = 0;
+ GstClockTime stream_time;
+
+ timestamp = GST_BUFFER_TIMESTAMP (outbuf);
+
+ if (!GST_CLOCK_TIME_IS_VALID (timestamp)
+ && !GST_CLOCK_TIME_IS_VALID (self->start_ts)) {
+ GST_ERROR_OBJECT (self, "Invalid timestamp");
+ return GST_FLOW_ERROR;
+ }
+
+ g_mutex_lock (&self->lock);
+ stream_time =
+ gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (self, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (self), stream_time);
+
+ g_return_val_if_fail (self->kernel != NULL, GST_FLOW_ERROR);
+ g_return_val_if_fail (channels != 0, GST_FLOW_ERROR);
+
+ if (GST_CLOCK_TIME_IS_VALID (self->start_ts))
+ expected_timestamp =
+ self->start_ts + gst_util_uint64_scale_int (self->nsamples_in,
+ GST_SECOND, rate);
+ else
+ expected_timestamp = GST_CLOCK_TIME_NONE;
+
+ /* Reset the residue if already existing on discont buffers */
+ if (GST_BUFFER_IS_DISCONT (inbuf)
+ || (GST_CLOCK_TIME_IS_VALID (expected_timestamp)
+ && (ABS (GST_CLOCK_DIFF (timestamp,
+ expected_timestamp)) > 5 * GST_MSECOND))) {
+ GST_DEBUG_OBJECT (self, "Discontinuity detected - flushing");
+ if (GST_CLOCK_TIME_IS_VALID (expected_timestamp))
+ gst_audio_fx_base_fir_filter_push_residue (self);
+ self->buffer_fill = 0;
+ g_free (self->buffer);
+ self->buffer = NULL;
+ self->start_ts = timestamp;
+ self->start_off = GST_BUFFER_OFFSET (inbuf);
+ self->nsamples_out = 0;
+ self->nsamples_in = 0;
+ } else if (!GST_CLOCK_TIME_IS_VALID (self->start_ts)) {
+ self->start_ts = timestamp;
+ self->start_off = GST_BUFFER_OFFSET (inbuf);
+ }
+
+ gst_buffer_map (inbuf, &inmap, GST_MAP_READ);
+ gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);
+
+ input_samples = (inmap.size / bps) / channels;
+ output_samples = (outmap.size / bps) / channels;
+
+ self->nsamples_in += input_samples;
+
+ generated_samples =
+ self->process (self, inmap.data, outmap.data, input_samples);
+
+ gst_buffer_unmap (inbuf, &inmap);
+ gst_buffer_unmap (outbuf, &outmap);
+
+ g_assert (generated_samples <= output_samples);
+ self->nsamples_out += generated_samples;
+ if (generated_samples == 0)
+ goto no_samples;
+
+ /* Calculate the number of samples we can push out now without outputting
+ * latency zeros in the beginning */
+ diff = ((gint64) self->nsamples_out) - ((gint64) self->latency);
+ if (diff < 0)
+ goto no_samples;
+
+ if (diff < generated_samples) {
+ gint64 tmp = diff;
+ diff = generated_samples - diff;
+ generated_samples = tmp;
+ } else {
+ diff = 0;
+ }
+
+ gst_buffer_resize (outbuf, diff * bps * channels,
+ generated_samples * bps * channels);
+
+ output_offset = self->nsamples_out - self->latency - generated_samples;
+ GST_BUFFER_TIMESTAMP (outbuf) =
+ self->start_ts + gst_util_uint64_scale_int (output_offset, GST_SECOND,
+ rate);
+ GST_BUFFER_DURATION (outbuf) =
+ gst_util_uint64_scale_int (output_samples, GST_SECOND, rate);
+ if (self->start_off != GST_BUFFER_OFFSET_NONE) {
+ GST_BUFFER_OFFSET (outbuf) = self->start_off + output_offset;
+ GST_BUFFER_OFFSET_END (outbuf) =
+ GST_BUFFER_OFFSET (outbuf) + generated_samples;
+ } else {
+ GST_BUFFER_OFFSET (outbuf) = GST_BUFFER_OFFSET_NONE;
+ GST_BUFFER_OFFSET_END (outbuf) = GST_BUFFER_OFFSET_NONE;
+ }
+ g_mutex_unlock (&self->lock);
+
+ GST_DEBUG_OBJECT (self,
+ "Pushing buffer of size %" G_GSIZE_FORMAT " with timestamp: %"
+ GST_TIME_FORMAT ", duration: %" GST_TIME_FORMAT ", offset: %"
+ G_GUINT64_FORMAT ", offset_end: %" G_GUINT64_FORMAT ", nsamples_out: %d",
+ gst_buffer_get_size (outbuf),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf),
+ GST_BUFFER_OFFSET_END (outbuf), generated_samples);
+
+ return GST_FLOW_OK;
+
+no_samples:
+ {
+ g_mutex_unlock (&self->lock);
+ return GST_BASE_TRANSFORM_FLOW_DROPPED;
+ }
+}
+
+static gboolean
+gst_audio_fx_base_fir_filter_start (GstBaseTransform * base)
+{
+ GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (base);
+
+ self->buffer_fill = 0;
+ g_free (self->buffer);
+ self->buffer = NULL;
+ self->start_ts = GST_CLOCK_TIME_NONE;
+ self->start_off = GST_BUFFER_OFFSET_NONE;
+ self->nsamples_out = 0;
+ self->nsamples_in = 0;
+
+ return TRUE;
+}
+
+static gboolean
+gst_audio_fx_base_fir_filter_stop (GstBaseTransform * base)
+{
+ GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (base);
+
+ g_free (self->buffer);
+ self->buffer = NULL;
+ self->buffer_length = 0;
+
+ return TRUE;
+}
+
+static gboolean
+gst_audio_fx_base_fir_filter_query (GstBaseTransform * trans,
+ GstPadDirection direction, GstQuery * query)
+{
+ GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (trans);
+ gboolean res = TRUE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_LATENCY:
+ {
+ GstClockTime min, max;
+ gboolean live;
+ guint64 latency;
+ gint rate = GST_AUDIO_FILTER_RATE (self);
+
+ if (rate == 0) {
+ res = FALSE;
+ } else if ((res =
+ gst_pad_peer_query (GST_BASE_TRANSFORM (self)->sinkpad, query))) {
+ gst_query_parse_latency (query, &live, &min, &max);
+
+ GST_DEBUG_OBJECT (self, "Peer latency: min %"
+ GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (min), GST_TIME_ARGS (max));
+
+ if (self->fft && !self->low_latency)
+ latency = self->block_length - self->kernel_length + 1;
+ else
+ latency = self->latency;
+
+ /* add our own latency */
+ latency = gst_util_uint64_scale_round (latency, GST_SECOND, rate);
+
+ GST_DEBUG_OBJECT (self, "Our latency: %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (latency));
+
+ min += latency;
+ if (max != GST_CLOCK_TIME_NONE)
+ max += latency;
+
+ GST_DEBUG_OBJECT (self, "Calculated total latency : min %"
+ GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (min), GST_TIME_ARGS (max));
+
+ gst_query_set_latency (query, live, min, max);
+ }
+ break;
+ }
+ default:
+ res =
+ GST_BASE_TRANSFORM_CLASS (parent_class)->query (trans, direction,
+ query);
+ break;
+ }
+ return res;
+}
+
+static gboolean
+gst_audio_fx_base_fir_filter_sink_event (GstBaseTransform * base,
+ GstEvent * event)
+{
+ GstAudioFXBaseFIRFilter *self = GST_AUDIO_FX_BASE_FIR_FILTER (base);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ gst_audio_fx_base_fir_filter_push_residue (self);
+ self->start_ts = GST_CLOCK_TIME_NONE;
+ self->start_off = GST_BUFFER_OFFSET_NONE;
+ self->nsamples_out = 0;
+ self->nsamples_in = 0;
+ break;
+ default:
+ break;
+ }
+
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (base, event);
+}
+
+void
+gst_audio_fx_base_fir_filter_set_kernel (GstAudioFXBaseFIRFilter * self,
+ gdouble * kernel, guint kernel_length, guint64 latency,
+ const GstAudioInfo * info)
+{
+ gboolean latency_changed;
+ GstAudioFormat format;
+ gint channels;
+
+ g_return_if_fail (kernel != NULL);
+ g_return_if_fail (self != NULL);
+
+ g_mutex_lock (&self->lock);
+
+ latency_changed = (self->latency != latency
+ || (!self->low_latency && self->kernel_length < FFT_THRESHOLD
+ && kernel_length >= FFT_THRESHOLD)
+ || (!self->low_latency && self->kernel_length >= FFT_THRESHOLD
+ && kernel_length < FFT_THRESHOLD));
+
+ /* FIXME: If the latency changes, the buffer size changes too and we
+ * have to drain in any case until this is fixed in the future */
+ if (self->buffer && (!self->drain_on_changes || latency_changed)) {
+ gst_audio_fx_base_fir_filter_push_residue (self);
+ self->start_ts = GST_CLOCK_TIME_NONE;
+ self->start_off = GST_BUFFER_OFFSET_NONE;
+ self->nsamples_out = 0;
+ self->nsamples_in = 0;
+ self->buffer_fill = 0;
+ }
+
+ g_free (self->kernel);
+ if (!self->drain_on_changes || latency_changed) {
+ g_free (self->buffer);
+ self->buffer = NULL;
+ self->buffer_fill = 0;
+ self->buffer_length = 0;
+ }
+
+ self->kernel = kernel;
+ self->kernel_length = kernel_length;
+
+ if (info) {
+ format = GST_AUDIO_INFO_FORMAT (info);
+ channels = GST_AUDIO_INFO_CHANNELS (info);
+ } else {
+ format = GST_AUDIO_FILTER_FORMAT (self);
+ channels = GST_AUDIO_FILTER_CHANNELS (self);
+ }
+
+ gst_audio_fx_base_fir_filter_calculate_frequency_response (self);
+ gst_audio_fx_base_fir_filter_select_process_function (self, format, channels);
+
+ if (latency_changed) {
+ self->latency = latency;
+ gst_element_post_message (GST_ELEMENT (self),
+ gst_message_new_latency (GST_OBJECT (self)));
+ }
+
+ g_mutex_unlock (&self->lock);
+}
diff --git a/gst/audiofx/audiofxbasefirfilter.h b/gst/audiofx/audiofxbasefirfilter.h
new file mode 100644
index 0000000000..390ed8fd06
--- /dev/null
+++ b/gst/audiofx/audiofxbasefirfilter.h
@@ -0,0 +1,102 @@
+/* -*- c-basic-offset: 2 -*-
+ *
+ * GStreamer
+ * Copyright (C) 1999-2001 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2006 Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>
+ * 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+#ifndef __GST_AUDIO_FX_BASE_FIR_FILTER_H__
+#define __GST_AUDIO_FX_BASE_FIR_FILTER_H__
+
+#include <gst/gst.h>
+#include <gst/audio/gstaudiofilter.h>
+#include <gst/fft/gstfftf64.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AUDIO_FX_BASE_FIR_FILTER \
+ (gst_audio_fx_base_fir_filter_get_type())
+#define GST_AUDIO_FX_BASE_FIR_FILTER(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AUDIO_FX_BASE_FIR_FILTER,GstAudioFXBaseFIRFilter))
+#define GST_AUDIO_FX_BASE_FIR_FILTER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_AUDIO_FX_BASE_FIR_FILTER,GstAudioFXBaseFIRFilterClass))
+#define GST_IS_AUDIO_FX_BASE_FIR_FILTER(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AUDIO_FX_BASE_FIR_FILTER))
+#define GST_IS_AUDIO_FX_BASE_FIR_FILTER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_AUDIO_FX_BASE_FIR_FILTER))
+
+typedef struct _GstAudioFXBaseFIRFilter GstAudioFXBaseFIRFilter;
+typedef struct _GstAudioFXBaseFIRFilterClass GstAudioFXBaseFIRFilterClass;
+
+typedef guint (*GstAudioFXBaseFIRFilterProcessFunc) (GstAudioFXBaseFIRFilter *, const guint8 *, guint8 *, guint);
+
+/**
+ * GstAudioFXBaseFIRFilter:
+ *
+ * Opaque data structure.
+ */
+struct _GstAudioFXBaseFIRFilter {
+ GstAudioFilter element;
+
+ /* properties */
+ gdouble *kernel; /* filter kernel -- time domain */
+ guint kernel_length; /* length of the filter kernel -- time domain */
+
+ guint64 latency; /* pre-latency of the filter kernel */
+ gboolean low_latency; /* work in slower low latency mode */
+
+ gboolean drain_on_changes; /* If the filter should be drained when
+ * coefficients change */
+
+ /* < private > */
+ GstAudioFXBaseFIRFilterProcessFunc process;
+
+ gdouble *buffer; /* buffer for storing samples of previous buffers */
+ guint buffer_fill; /* fill level of buffer */
+ guint buffer_length; /* length of the buffer -- meaning depends on processing mode */
+
+ /* FFT convolution specific data */
+ GstFFTF64 *fft;
+ GstFFTF64 *ifft;
+ GstFFTF64Complex *frequency_response; /* filter kernel -- frequency domain */
+ guint frequency_response_length; /* length of filter kernel -- frequency domain */
+ GstFFTF64Complex *fft_buffer; /* FFT buffer, has the length of the frequency response */
+ guint block_length; /* Length of the processing blocks -- time domain */
+
+ GstClockTime start_ts; /* start timestamp after a discont */
+ guint64 start_off; /* start offset after a discont */
+ guint64 nsamples_out; /* number of output samples since last discont */
+ guint64 nsamples_in; /* number of input samples since last discont */
+
+ GMutex lock;
+};
+
+struct _GstAudioFXBaseFIRFilterClass {
+ GstAudioFilterClass parent_class;
+};
+
+GType gst_audio_fx_base_fir_filter_get_type (void);
+void gst_audio_fx_base_fir_filter_set_kernel (GstAudioFXBaseFIRFilter *filter, gdouble *kernel,
+ guint kernel_length, guint64 latency, const GstAudioInfo * info);
+void gst_audio_fx_base_fir_filter_push_residue (GstAudioFXBaseFIRFilter *filter);
+
+G_END_DECLS
+
+#endif /* __GST_AUDIO_FX_BASE_FIR_FILTER_H__ */
diff --git a/gst/audiofx/audiofxbaseiirfilter.c b/gst/audiofx/audiofxbaseiirfilter.c
new file mode 100644
index 0000000000..72ee3e8431
--- /dev/null
+++ b/gst/audiofx/audiofxbaseiirfilter.c
@@ -0,0 +1,418 @@
+/*
+ * GStreamer
+ * Copyright (C) 2007-2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#include <math.h>
+
+#include "audiofxbaseiirfilter.h"
+
+#define GST_CAT_DEFAULT gst_audio_fx_base_iir_filter_debug
+GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+#define ALLOWED_CAPS \
+ "audio/x-raw," \
+ " format=(string){"GST_AUDIO_NE(F32)","GST_AUDIO_NE(F64)"}," \
+ " rate = (int) [ 1, MAX ]," \
+ " channels = (int) [ 1, MAX ]," \
+ " layout=(string) interleaved"
+
+#define gst_audio_fx_base_iir_filter_parent_class parent_class
+G_DEFINE_TYPE (GstAudioFXBaseIIRFilter,
+ gst_audio_fx_base_iir_filter, GST_TYPE_AUDIO_FILTER);
+
+static gboolean gst_audio_fx_base_iir_filter_setup (GstAudioFilter * filter,
+ const GstAudioInfo * info);
+static GstFlowReturn
+gst_audio_fx_base_iir_filter_transform_ip (GstBaseTransform * base,
+ GstBuffer * buf);
+static gboolean gst_audio_fx_base_iir_filter_stop (GstBaseTransform * base);
+
+static void process_64 (GstAudioFXBaseIIRFilter * filter,
+ gdouble * data, guint num_samples);
+static void process_32 (GstAudioFXBaseIIRFilter * filter,
+ gfloat * data, guint num_samples);
+
+/* GObject vmethod implementations */
+
+static void
+gst_audio_fx_base_iir_filter_finalize (GObject * object)
+{
+ GstAudioFXBaseIIRFilter *filter = GST_AUDIO_FX_BASE_IIR_FILTER (object);
+
+ if (filter->a) {
+ g_free (filter->a);
+ filter->a = NULL;
+ }
+
+ if (filter->b) {
+ g_free (filter->b);
+ filter->b = NULL;
+ }
+
+ if (filter->channels) {
+ GstAudioFXBaseIIRFilterChannelCtx *ctx;
+ guint i;
+
+ for (i = 0; i < filter->nchannels; i++) {
+ ctx = &filter->channels[i];
+ g_free (ctx->x);
+ g_free (ctx->y);
+ }
+
+ g_free (filter->channels);
+ filter->channels = NULL;
+ }
+ g_mutex_clear (&filter->lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_audio_fx_base_iir_filter_class_init (GstAudioFXBaseIIRFilterClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_fx_base_iir_filter_debug,
+ "audiofxbaseiirfilter", 0, "Audio IIR Filter Base Class");
+
+ gobject_class->finalize = gst_audio_fx_base_iir_filter_finalize;
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
+ filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_fx_base_iir_filter_setup);
+
+ trans_class->transform_ip =
+ GST_DEBUG_FUNCPTR (gst_audio_fx_base_iir_filter_transform_ip);
+ trans_class->transform_ip_on_passthrough = FALSE;
+ trans_class->stop = GST_DEBUG_FUNCPTR (gst_audio_fx_base_iir_filter_stop);
+
+ gst_type_mark_as_plugin_api (GST_TYPE_AUDIO_FX_BASE_IIR_FILTER, 0);
+}
+
+static void
+gst_audio_fx_base_iir_filter_init (GstAudioFXBaseIIRFilter * filter)
+{
+ gst_base_transform_set_in_place (GST_BASE_TRANSFORM (filter), TRUE);
+
+ filter->a = NULL;
+ filter->na = 0;
+ filter->b = NULL;
+ filter->nb = 0;
+ filter->channels = NULL;
+ filter->nchannels = 0;
+
+ g_mutex_init (&filter->lock);
+}
+
+/* Evaluate the transfer function that corresponds to the IIR
+ * coefficients at (zr + zi*I)^-1 and return the magnitude */
+gdouble
+gst_audio_fx_base_iir_filter_calculate_gain (gdouble * a, guint na, gdouble * b,
+ guint nb, gdouble zr, gdouble zi)
+{
+ gdouble sum_ar, sum_ai;
+ gdouble sum_br, sum_bi;
+ gdouble gain_r, gain_i;
+
+ gdouble sum_r_old;
+ gdouble sum_i_old;
+
+ gint i;
+
+ sum_ar = a[na - 1];
+ sum_ai = 0.0;
+ for (i = na - 2; i >= 0; i--) {
+ sum_r_old = sum_ar;
+ sum_i_old = sum_ai;
+
+ sum_ar = (sum_r_old * zr - sum_i_old * zi) + a[i];
+ sum_ai = (sum_r_old * zi + sum_i_old * zr) + 0.0;
+ }
+
+ sum_br = b[nb - 1];
+ sum_bi = 0.0;
+ for (i = nb - 2; i >= 0; i--) {
+ sum_r_old = sum_br;
+ sum_i_old = sum_bi;
+
+ sum_br = (sum_r_old * zr - sum_i_old * zi) + b[i];
+ sum_bi = (sum_r_old * zi + sum_i_old * zr) + 0.0;
+ }
+
+ gain_r =
+ (sum_br * sum_ar + sum_bi * sum_ai) / (sum_ar * sum_ar + sum_ai * sum_ai);
+ gain_i =
+ (sum_bi * sum_ar - sum_br * sum_ai) / (sum_ar * sum_ar + sum_ai * sum_ai);
+
+ return (sqrt (gain_r * gain_r + gain_i * gain_i));
+}
+
+void
+gst_audio_fx_base_iir_filter_set_coefficients (GstAudioFXBaseIIRFilter * filter,
+ gdouble * a, guint na, gdouble * b, guint nb)
+{
+ guint i;
+
+ g_return_if_fail (GST_IS_AUDIO_FX_BASE_IIR_FILTER (filter));
+
+ g_mutex_lock (&filter->lock);
+
+ g_free (filter->a);
+ g_free (filter->b);
+
+ filter->a = filter->b = NULL;
+
+ if (filter->channels) {
+ GstAudioFXBaseIIRFilterChannelCtx *ctx;
+ gboolean free = (na != filter->na || nb != filter->nb);
+
+ for (i = 0; i < filter->nchannels; i++) {
+ ctx = &filter->channels[i];
+
+ if (free)
+ g_free (ctx->x);
+ else
+ memset (ctx->x, 0, filter->nb * sizeof (gdouble));
+
+ if (free)
+ g_free (ctx->y);
+ else
+ memset (ctx->y, 0, filter->na * sizeof (gdouble));
+ }
+
+ g_free (filter->channels);
+ filter->channels = NULL;
+ }
+
+ filter->na = na;
+ filter->nb = nb;
+
+ filter->a = a;
+ filter->b = b;
+
+ if (filter->nchannels && !filter->channels) {
+ GstAudioFXBaseIIRFilterChannelCtx *ctx;
+
+ filter->channels =
+ g_new0 (GstAudioFXBaseIIRFilterChannelCtx, filter->nchannels);
+ for (i = 0; i < filter->nchannels; i++) {
+ ctx = &filter->channels[i];
+
+ ctx->x = g_new0 (gdouble, filter->nb);
+ ctx->y = g_new0 (gdouble, filter->na);
+ }
+ }
+
+ g_mutex_unlock (&filter->lock);
+}
+
+/* GstAudioFilter vmethod implementations */
+
+static gboolean
+gst_audio_fx_base_iir_filter_setup (GstAudioFilter * base,
+ const GstAudioInfo * info)
+{
+ GstAudioFXBaseIIRFilter *filter = GST_AUDIO_FX_BASE_IIR_FILTER (base);
+ gboolean ret = TRUE;
+ gint channels;
+
+ g_mutex_lock (&filter->lock);
+ switch (GST_AUDIO_INFO_FORMAT (info)) {
+ case GST_AUDIO_FORMAT_F32:
+ filter->process = (GstAudioFXBaseIIRFilterProcessFunc)
+ process_32;
+ break;
+ case GST_AUDIO_FORMAT_F64:
+ filter->process = (GstAudioFXBaseIIRFilterProcessFunc)
+ process_64;
+ break;
+ default:
+ ret = FALSE;
+ break;
+ }
+
+ channels = GST_AUDIO_INFO_CHANNELS (info);
+
+ if (channels != filter->nchannels) {
+ guint i;
+ GstAudioFXBaseIIRFilterChannelCtx *ctx;
+
+ if (filter->channels) {
+ for (i = 0; i < filter->nchannels; i++) {
+ ctx = &filter->channels[i];
+
+ g_free (ctx->x);
+ g_free (ctx->y);
+ }
+ g_free (filter->channels);
+ }
+
+ filter->channels = g_new0 (GstAudioFXBaseIIRFilterChannelCtx, channels);
+ for (i = 0; i < channels; i++) {
+ ctx = &filter->channels[i];
+
+ ctx->x = g_new0 (gdouble, filter->nb);
+ ctx->y = g_new0 (gdouble, filter->na);
+ }
+ filter->nchannels = channels;
+ }
+ g_mutex_unlock (&filter->lock);
+
+ return ret;
+}
+
+static inline gdouble
+process (GstAudioFXBaseIIRFilter * filter,
+ GstAudioFXBaseIIRFilterChannelCtx * ctx, gdouble x0)
+{
+ gdouble val = filter->b[0] * x0;
+ gint i, j;
+
+ for (i = 1, j = ctx->x_pos; i < filter->nb; i++) {
+ val += filter->b[i] * ctx->x[j];
+ j--;
+ if (j < 0)
+ j = filter->nb - 1;
+ }
+
+ for (i = 1, j = ctx->y_pos; i < filter->na; i++) {
+ val -= filter->a[i] * ctx->y[j];
+ j--;
+ if (j < 0)
+ j = filter->na - 1;
+ }
+ val /= filter->a[0];
+
+ if (ctx->x) {
+ ctx->x_pos++;
+ if (ctx->x_pos >= filter->nb)
+ ctx->x_pos = 0;
+ ctx->x[ctx->x_pos] = x0;
+ }
+ if (ctx->y) {
+ ctx->y_pos++;
+ if (ctx->y_pos >= filter->na)
+ ctx->y_pos = 0;
+
+ ctx->y[ctx->y_pos] = val;
+ }
+
+ return val;
+}
+
+#define DEFINE_PROCESS_FUNC(width,ctype) \
+static void \
+process_##width (GstAudioFXBaseIIRFilter * filter, \
+ g##ctype * data, guint num_samples) \
+{ \
+ gint i, j, channels = filter->nchannels; \
+ gdouble val; \
+ \
+ for (i = 0; i < num_samples / channels; i++) { \
+ for (j = 0; j < channels; j++) { \
+ val = process (filter, &filter->channels[j], *data); \
+ *data++ = val; \
+ } \
+ } \
+}
+
+DEFINE_PROCESS_FUNC (32, float);
+DEFINE_PROCESS_FUNC (64, double);
+
+#undef DEFINE_PROCESS_FUNC
+
+/* GstBaseTransform vmethod implementations */
+static GstFlowReturn
+gst_audio_fx_base_iir_filter_transform_ip (GstBaseTransform * base,
+ GstBuffer * buf)
+{
+ GstAudioFXBaseIIRFilter *filter = GST_AUDIO_FX_BASE_IIR_FILTER (base);
+ guint num_samples;
+ GstClockTime timestamp, stream_time;
+ GstMapInfo map;
+
+ timestamp = GST_BUFFER_TIMESTAMP (buf);
+ stream_time =
+ gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
+
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+ num_samples = map.size / GST_AUDIO_FILTER_BPS (filter);
+
+ g_mutex_lock (&filter->lock);
+ if (filter->a == NULL || filter->b == NULL) {
+ g_warn_if_fail (filter->a != NULL && filter->b != NULL);
+ gst_buffer_unmap (buf, &map);
+ g_mutex_unlock (&filter->lock);
+ return GST_FLOW_ERROR;
+ }
+ filter->process (filter, map.data, num_samples);
+ g_mutex_unlock (&filter->lock);
+
+ gst_buffer_unmap (buf, &map);
+
+ return GST_FLOW_OK;
+}
+
+
+static gboolean
+gst_audio_fx_base_iir_filter_stop (GstBaseTransform * base)
+{
+ GstAudioFXBaseIIRFilter *filter = GST_AUDIO_FX_BASE_IIR_FILTER (base);
+ guint channels = filter->nchannels;
+ GstAudioFXBaseIIRFilterChannelCtx *ctx;
+ guint i;
+
+ /* Reset the history of input and output values if
+ * already existing */
+ if (channels && filter->channels) {
+ for (i = 0; i < channels; i++) {
+ ctx = &filter->channels[i];
+ g_free (ctx->x);
+ g_free (ctx->y);
+ }
+ g_free (filter->channels);
+ }
+ filter->channels = NULL;
+ filter->nchannels = 0;
+
+ return TRUE;
+}
diff --git a/gst/audiofx/audiofxbaseiirfilter.h b/gst/audiofx/audiofxbaseiirfilter.h
new file mode 100644
index 0000000000..f78ae317e9
--- /dev/null
+++ b/gst/audiofx/audiofxbaseiirfilter.h
@@ -0,0 +1,78 @@
+/*
+ * GStreamer
+ * Copyright (C) 2007-2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AUDIO_FX_BASE_IIR_FILTER_H__
+#define __GST_AUDIO_FX_BASE_IIR_FILTER_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/gstaudiofilter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AUDIO_FX_BASE_IIR_FILTER (gst_audio_fx_base_iir_filter_get_type())
+#define GST_AUDIO_FX_BASE_IIR_FILTER(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AUDIO_FX_BASE_IIR_FILTER,GstAudioFXBaseIIRFilter))
+#define GST_IS_AUDIO_FX_BASE_IIR_FILTER(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AUDIO_FX_BASE_IIR_FILTER))
+#define GST_AUDIO_FX_BASE_IIR_FILTER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_AUDIO_FX_BASE_IIR_FILTER,GstAudioFXBaseIIRFilterClass))
+#define GST_IS_AUDIO_FX_BASE_IIR_FILTER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_AUDIO_FX_BASE_IIR_FILTER))
+#define GST_AUDIO_FX_BASE_IIR_FILTER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_AUDIO_FX_BASE_IIR_FILTER,GstAudioFXBaseIIRFilterClass))
+typedef struct _GstAudioFXBaseIIRFilter GstAudioFXBaseIIRFilter;
+typedef struct _GstAudioFXBaseIIRFilterClass GstAudioFXBaseIIRFilterClass;
+
+typedef void (*GstAudioFXBaseIIRFilterProcessFunc) (GstAudioFXBaseIIRFilter *, guint8 *, guint);
+
+typedef struct
+{
+ gdouble *x;
+ gint x_pos;
+ gdouble *y;
+ gint y_pos;
+} GstAudioFXBaseIIRFilterChannelCtx;
+
+struct _GstAudioFXBaseIIRFilter
+{
+ GstAudioFilter audiofilter;
+
+ /* < private > */
+ GstAudioFXBaseIIRFilterProcessFunc process;
+
+ gdouble *a;
+ guint na;
+ gdouble *b;
+ guint nb;
+ GstAudioFXBaseIIRFilterChannelCtx *channels;
+ guint nchannels;
+
+ GMutex lock;
+};
+
+struct _GstAudioFXBaseIIRFilterClass
+{
+ GstAudioFilterClass parent;
+};
+
+GType gst_audio_fx_base_iir_filter_get_type (void);
+void gst_audio_fx_base_iir_filter_set_coefficients (GstAudioFXBaseIIRFilter *filter, gdouble *a, guint na, gdouble *b, guint nb);
+gdouble gst_audio_fx_base_iir_filter_calculate_gain (gdouble *a, guint na, gdouble *b, guint nb, gdouble zr, gdouble zi);
+
+G_END_DECLS
+
+#endif /* __GST_AUDIO_FX_BASE_IIR_FILTER_H__ */
diff --git a/gst/audiofx/audioiirfilter.c b/gst/audiofx/audioiirfilter.c
new file mode 100644
index 0000000000..fae565c9c4
--- /dev/null
+++ b/gst/audiofx/audioiirfilter.c
@@ -0,0 +1,284 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+/**
+ * SECTION:element-audioiirfilter
+ * @title: audioiirfilter
+ *
+ * audioiirfilter implements a generic audio
+ * [IIR filter](http://en.wikipedia.org/wiki/Infinite_impulse_response).
+ * Before usage the "a" and "b" properties have to be set to the filter
+ * coefficients that should be used.
+ *
+ * The filter coefficients describe the numerator and denominator of the
+ * transfer function.
+ *
+ * To change the filter coefficients whenever the sampling rate changes the
+ * "rate-changed" signal can be used. This should be done for most
+ * IIR filters as they're depending on the sampling rate.
+ *
+ * ## Example application
+ * <programlisting language="C">
+ * <xi:include xmlns:xi="http://www.w3.org/2003/XInclude" parse="text" href="../../../../tests/examples/audiofx/iirfilter-example.c" />
+ * ]|
+ *
+ */
+
+/* FIXME 0.11: suppress warnings for deprecated API such as GValueArray
+ * with newer GLib versions (>= 2.31.0) */
+#define GLIB_DISABLE_DEPRECATION_WARNINGS
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <math.h>
+#include <gst/gst.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#include "audioiirfilter.h"
+
+#include "gst/glib-compat-private.h"
+
+#define GST_CAT_DEFAULT gst_audio_iir_filter_debug
+GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+enum
+{
+ SIGNAL_RATE_CHANGED,
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_A,
+ PROP_B
+};
+
+static guint gst_audio_iir_filter_signals[LAST_SIGNAL] = { 0, };
+
+#define gst_audio_iir_filter_parent_class parent_class
+G_DEFINE_TYPE (GstAudioIIRFilter, gst_audio_iir_filter,
+ GST_TYPE_AUDIO_FX_BASE_IIR_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (audioiirfilter, "audioiirfilter",
+ GST_RANK_NONE, GST_TYPE_AUDIO_IIR_FILTER);
+
+static void gst_audio_iir_filter_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_audio_iir_filter_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_audio_iir_filter_finalize (GObject * object);
+
+static gboolean gst_audio_iir_filter_setup (GstAudioFilter * base,
+ const GstAudioInfo * info);
+
+static void
+gst_audio_iir_filter_class_init (GstAudioIIRFilterClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_iir_filter_debug, "audioiirfilter", 0,
+ "Generic audio IIR filter plugin");
+
+ gobject_class->set_property = gst_audio_iir_filter_set_property;
+ gobject_class->get_property = gst_audio_iir_filter_get_property;
+ gobject_class->finalize = gst_audio_iir_filter_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_A,
+ g_param_spec_value_array ("a", "A",
+ "Filter coefficients (denominator of transfer function)",
+ g_param_spec_double ("Coefficient", "Filter Coefficient",
+ "Filter coefficient", -G_MAXDOUBLE, G_MAXDOUBLE, 0.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS),
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_B,
+ g_param_spec_value_array ("b", "B",
+ "Filter coefficients (numerator of transfer function)",
+ g_param_spec_double ("Coefficient", "Filter Coefficient",
+ "Filter coefficient", -G_MAXDOUBLE, G_MAXDOUBLE, 0.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS),
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_iir_filter_setup);
+
+ /**
+ * GstAudioIIRFilter::rate-changed:
+ * @filter: the filter on which the signal is emitted
+ * @rate: the new sampling rate
+ *
+ * Will be emitted when the sampling rate changes. The callbacks
+ * will be called from the streaming thread and processing will
+ * stop until the event is handled.
+ */
+ gst_audio_iir_filter_signals[SIGNAL_RATE_CHANGED] =
+ g_signal_new ("rate-changed", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstAudioIIRFilterClass, rate_changed),
+ NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_INT);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Audio IIR filter", "Filter/Effect/Audio",
+ "Generic audio IIR filter with custom filter kernel",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+}
+
+static void
+gst_audio_iir_filter_update_coefficients (GstAudioIIRFilter * self,
+ GValueArray * va, GValueArray * vb)
+{
+ gdouble *a = NULL, *b = NULL;
+ guint i;
+
+ if (va) {
+ if (self->a)
+ g_value_array_free (self->a);
+
+ self->a = va;
+ }
+ if (vb) {
+ if (self->b)
+ g_value_array_free (self->b);
+
+ self->b = vb;
+ }
+
+ if (self->a && self->a->n_values > 0) {
+ a = g_new (gdouble, self->a->n_values);
+
+ for (i = 0; i < self->a->n_values; i++) {
+ GValue *v = g_value_array_get_nth (self->a, i);
+ a[i] = g_value_get_double (v);
+ }
+ }
+
+ if (self->b && self->b->n_values > 0) {
+ b = g_new (gdouble, self->b->n_values);
+ for (i = 0; i < self->b->n_values; i++) {
+ GValue *v = g_value_array_get_nth (self->b, i);
+ b[i] = g_value_get_double (v);
+ }
+ }
+
+ gst_audio_fx_base_iir_filter_set_coefficients (GST_AUDIO_FX_BASE_IIR_FILTER
+ (self), a, (self->a) ? self->a->n_values : 0, b,
+ (self->b) ? self->b->n_values : 0);
+}
+
+static void
+gst_audio_iir_filter_init (GstAudioIIRFilter * self)
+{
+ GValue v = { 0, };
+ GValueArray *a;
+
+ a = g_value_array_new (1);
+
+ g_value_init (&v, G_TYPE_DOUBLE);
+ g_value_set_double (&v, 1.0);
+ g_value_array_append (a, &v);
+ g_value_unset (&v);
+
+ gst_audio_iir_filter_update_coefficients (self, a, g_value_array_copy (a));
+
+ g_mutex_init (&self->lock);
+}
+
+/* GstAudioFilter vmethod implementations */
+
+/* get notified of caps and plug in the correct process function */
+static gboolean
+gst_audio_iir_filter_setup (GstAudioFilter * base, const GstAudioInfo * info)
+{
+ GstAudioIIRFilter *self = GST_AUDIO_IIR_FILTER (base);
+ gint new_rate = GST_AUDIO_INFO_RATE (info);
+
+ if (GST_AUDIO_FILTER_RATE (self) != new_rate) {
+ g_signal_emit (G_OBJECT (self),
+ gst_audio_iir_filter_signals[SIGNAL_RATE_CHANGED], 0, new_rate);
+ }
+
+ return GST_AUDIO_FILTER_CLASS (parent_class)->setup (base, info);
+}
+
+static void
+gst_audio_iir_filter_finalize (GObject * object)
+{
+ GstAudioIIRFilter *self = GST_AUDIO_IIR_FILTER (object);
+
+ g_mutex_clear (&self->lock);
+
+ if (self->a)
+ g_value_array_free (self->a);
+ self->a = NULL;
+ if (self->b)
+ g_value_array_free (self->b);
+ self->b = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_audio_iir_filter_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAudioIIRFilter *self = GST_AUDIO_IIR_FILTER (object);
+
+ g_return_if_fail (GST_IS_AUDIO_IIR_FILTER (self));
+
+ switch (prop_id) {
+ case PROP_A:
+ g_mutex_lock (&self->lock);
+ gst_audio_iir_filter_update_coefficients (self, g_value_dup_boxed (value),
+ NULL);
+ g_mutex_unlock (&self->lock);
+ break;
+ case PROP_B:
+ g_mutex_lock (&self->lock);
+ gst_audio_iir_filter_update_coefficients (self, NULL,
+ g_value_dup_boxed (value));
+ g_mutex_unlock (&self->lock);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_audio_iir_filter_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAudioIIRFilter *self = GST_AUDIO_IIR_FILTER (object);
+
+ switch (prop_id) {
+ case PROP_A:
+ g_value_set_boxed (value, self->a);
+ break;
+ case PROP_B:
+ g_value_set_boxed (value, self->b);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/audiofx/audioiirfilter.h b/gst/audiofx/audioiirfilter.h
new file mode 100644
index 0000000000..df3e51843e
--- /dev/null
+++ b/gst/audiofx/audioiirfilter.h
@@ -0,0 +1,72 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+#ifndef __GST_AUDIO_IIR_FILTER_H__
+#define __GST_AUDIO_IIR_FILTER_H__
+
+#include <gst/gst.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#include "audiofxbaseiirfilter.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AUDIO_IIR_FILTER \
+ (gst_audio_iir_filter_get_type())
+#define GST_AUDIO_IIR_FILTER(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AUDIO_IIR_FILTER,GstAudioIIRFilter))
+#define GST_AUDIO_IIR_FILTER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_AUDIO_IIR_FILTER,GstAudioIIRFilterClass))
+#define GST_IS_AUDIO_IIR_FILTER(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AUDIO_IIR_FILTER))
+#define GST_IS_AUDIO_IIR_FILTER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_AUDIO_IIR_FILTER))
+
+typedef struct _GstAudioIIRFilter GstAudioIIRFilter;
+typedef struct _GstAudioIIRFilterClass GstAudioIIRFilterClass;
+
+/**
+ * GstAudioIIRFilter:
+ *
+ * Opaque data structure.
+ */
+struct _GstAudioIIRFilter {
+ GstAudioFXBaseIIRFilter parent;
+
+ GValueArray *a, *b;
+
+ /* < private > */
+ GMutex lock;
+};
+
+struct _GstAudioIIRFilterClass {
+ GstAudioFXBaseIIRFilterClass parent;
+
+ void (*rate_changed) (GstElement * element, gint rate);
+};
+
+GType gst_audio_iir_filter_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (audioiirfilter);
+
+G_END_DECLS
+
+#endif /* __GST_AUDIO_IIR_FILTER_H__ */
diff --git a/gst/audiofx/audioinvert.c b/gst/audiofx/audioinvert.c
new file mode 100644
index 0000000000..a51735304f
--- /dev/null
+++ b/gst/audiofx/audioinvert.c
@@ -0,0 +1,258 @@
+/*
+ * GStreamer
+ * Copyright (C) 2007 Sebastian Dröge <slomo@circular-chaos.org>
+ * Copyright (C) 2006 Stefan Kost <ensonic@users.sf.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-audioinvert
+ * @title: audioinvert
+ *
+ * Swaps upper and lower half of audio samples. Mixing an inverted sample on top of
+ * the original with a slight delay can produce effects that sound like resonance.
+ * Creating a stereo sample from a mono source, with one channel inverted produces wide-stereo sounds.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 audiotestsrc wave=saw ! audioinvert degree=0.4 ! alsasink
+ * gst-launch-1.0 filesrc location="melo1.ogg" ! oggdemux ! vorbisdec ! audioconvert ! audioinvert degree=0.4 ! alsasink
+ * gst-launch-1.0 audiotestsrc wave=saw ! audioconvert ! audioinvert degree=0.4 ! audioconvert ! alsasink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#include "audioinvert.h"
+
+#define GST_CAT_DEFAULT gst_audio_invert_debug
+GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+/* Filter signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_DEGREE
+};
+
+#define ALLOWED_CAPS \
+ "audio/x-raw," \
+ " format=(string) {"GST_AUDIO_NE(S16)","GST_AUDIO_NE(F32)"}," \
+ " rate=(int)[1,MAX]," \
+ " channels=(int)[1,MAX]," \
+ " layout=(string) {interleaved, non-interleaved}"
+
+G_DEFINE_TYPE (GstAudioInvert, gst_audio_invert, GST_TYPE_AUDIO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (audioinvert, "audioinvert",
+ GST_RANK_NONE, GST_TYPE_AUDIO_INVERT);
+
+static void gst_audio_invert_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_audio_invert_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_audio_invert_setup (GstAudioFilter * filter,
+ const GstAudioInfo * info);
+static GstFlowReturn gst_audio_invert_transform_ip (GstBaseTransform * base,
+ GstBuffer * buf);
+
+static void gst_audio_invert_transform_int (GstAudioInvert * filter,
+ gint16 * data, guint num_samples);
+static void gst_audio_invert_transform_float (GstAudioInvert * filter,
+ gfloat * data, guint num_samples);
+
+/* GObject vmethod implementations */
+
+static void
+gst_audio_invert_class_init (GstAudioInvertClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_invert_debug, "audioinvert", 0,
+ "audioinvert element");
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->set_property = gst_audio_invert_set_property;
+ gobject_class->get_property = gst_audio_invert_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_DEGREE,
+ g_param_spec_float ("degree", "Degree",
+ "Degree of inversion", 0.0, 1.0,
+ 0.0,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (gstelement_class, "Audio inversion",
+ "Filter/Effect/Audio",
+ "Swaps upper and lower half of audio samples",
+ "Sebastian Dröge <slomo@circular-chaos.org>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
+ GST_BASE_TRANSFORM_CLASS (klass)->transform_ip =
+ GST_DEBUG_FUNCPTR (gst_audio_invert_transform_ip);
+ GST_BASE_TRANSFORM_CLASS (klass)->transform_ip_on_passthrough = FALSE;
+
+ GST_AUDIO_FILTER_CLASS (klass)->setup =
+ GST_DEBUG_FUNCPTR (gst_audio_invert_setup);
+}
+
+static void
+gst_audio_invert_init (GstAudioInvert * filter)
+{
+ filter->degree = 0.0;
+ gst_base_transform_set_in_place (GST_BASE_TRANSFORM (filter), TRUE);
+ gst_base_transform_set_gap_aware (GST_BASE_TRANSFORM (filter), TRUE);
+}
+
+static void
+gst_audio_invert_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAudioInvert *filter = GST_AUDIO_INVERT (object);
+
+ switch (prop_id) {
+ case PROP_DEGREE:
+ filter->degree = g_value_get_float (value);
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter),
+ filter->degree == 0.0);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_audio_invert_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAudioInvert *filter = GST_AUDIO_INVERT (object);
+
+ switch (prop_id) {
+ case PROP_DEGREE:
+ g_value_set_float (value, filter->degree);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+/* GstAudioFilter vmethod implementations */
+
+static gboolean
+gst_audio_invert_setup (GstAudioFilter * base, const GstAudioInfo * info)
+{
+ GstAudioInvert *filter = GST_AUDIO_INVERT (base);
+ gboolean ret = TRUE;
+
+ switch (GST_AUDIO_INFO_FORMAT (info)) {
+ case GST_AUDIO_FORMAT_S16:
+ filter->process = (GstAudioInvertProcessFunc)
+ gst_audio_invert_transform_int;
+ break;
+ case GST_AUDIO_FORMAT_F32:
+ filter->process = (GstAudioInvertProcessFunc)
+ gst_audio_invert_transform_float;
+ break;
+ default:
+ ret = FALSE;
+ break;
+ }
+ return ret;
+}
+
+static void
+gst_audio_invert_transform_int (GstAudioInvert * filter,
+ gint16 * data, guint num_samples)
+{
+ gint i;
+ gfloat dry = 1.0 - filter->degree;
+ glong val;
+
+ for (i = 0; i < num_samples; i++) {
+ val = (*data) * dry + (-1 - (*data)) * filter->degree;
+ *data++ = (gint16) CLAMP (val, G_MININT16, G_MAXINT16);
+ }
+}
+
+static void
+gst_audio_invert_transform_float (GstAudioInvert * filter,
+ gfloat * data, guint num_samples)
+{
+ gint i;
+ gfloat dry = 1.0 - filter->degree;
+ glong val;
+
+ for (i = 0; i < num_samples; i++) {
+ val = (*data) * dry - (*data) * filter->degree;
+ *data++ = val;
+ }
+}
+
+/* GstBaseTransform vmethod implementations */
+static GstFlowReturn
+gst_audio_invert_transform_ip (GstBaseTransform * base, GstBuffer * buf)
+{
+ GstAudioInvert *filter = GST_AUDIO_INVERT (base);
+ guint num_samples;
+ GstClockTime timestamp, stream_time;
+ GstMapInfo map;
+
+ timestamp = GST_BUFFER_TIMESTAMP (buf);
+ stream_time =
+ gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
+
+ if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)))
+ return GST_FLOW_OK;
+
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+ num_samples = map.size / GST_AUDIO_FILTER_BPS (filter);
+
+ filter->process (filter, map.data, num_samples);
+
+ gst_buffer_unmap (buf, &map);
+
+ return GST_FLOW_OK;
+}
diff --git a/gst/audiofx/audioinvert.h b/gst/audiofx/audioinvert.h
new file mode 100644
index 0000000000..342d0df394
--- /dev/null
+++ b/gst/audiofx/audioinvert.h
@@ -0,0 +1,62 @@
+/*
+ * GStreamer
+ * Copyright (C) 2007 Sebastian Dröge <slomo@circular-chaos.org>
+ * Copyright (C) 2006 Stefan Kost <ensonic@users.sf.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AUDIO_INVERT_H__
+#define __GST_AUDIO_INVERT_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/gstaudiofilter.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_AUDIO_INVERT (gst_audio_invert_get_type())
+#define GST_AUDIO_INVERT(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AUDIO_INVERT,GstAudioInvert))
+#define GST_IS_AUDIO_INVERT(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AUDIO_INVERT))
+#define GST_AUDIO_INVERT_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_AUDIO_INVERT,GstAudioInvertClass))
+#define GST_IS_AUDIO_INVERT_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_AUDIO_INVERT))
+#define GST_AUDIO_INVERT_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_AUDIO_INVERT,GstAudioInvertClass))
+typedef struct _GstAudioInvert GstAudioInvert;
+typedef struct _GstAudioInvertClass GstAudioInvertClass;
+
+typedef void (*GstAudioInvertProcessFunc) (GstAudioInvert *, guint8 *, guint);
+
+struct _GstAudioInvert
+{
+ GstAudioFilter audiofilter;
+
+ gfloat degree;
+
+ /* < private > */
+ GstAudioInvertProcessFunc process;
+};
+
+struct _GstAudioInvertClass
+{
+ GstAudioFilterClass parent;
+};
+
+GType gst_audio_invert_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (audioinvert);
+
+G_END_DECLS
+#endif /* __GST_AUDIO_INVERT_H__ */
diff --git a/gst/audiofx/audiokaraoke.c b/gst/audiofx/audiokaraoke.c
new file mode 100644
index 0000000000..2bd8dfdd6e
--- /dev/null
+++ b/gst/audiofx/audiokaraoke.c
@@ -0,0 +1,365 @@
+/*
+ * GStreamer
+ * Copyright (C) 2008 Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-audiokaraoke
+ * @title: audiokaraoke
+ *
+ * Remove the voice from audio by filtering the center channel.
+ * This plugin is useful for karaoke applications.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=song.ogg ! oggdemux ! vorbisdec ! audiokaraoke ! audioconvert ! alsasink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <math.h>
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#include "audiokaraoke.h"
+
+#define GST_CAT_DEFAULT gst_audio_karaoke_debug
+GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+/* Filter signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+#define DEFAULT_LEVEL 1.0
+#define DEFAULT_MONO_LEVEL 1.0
+#define DEFAULT_FILTER_BAND 220.0
+#define DEFAULT_FILTER_WIDTH 100.0
+
+enum
+{
+ PROP_0,
+ PROP_LEVEL,
+ PROP_MONO_LEVEL,
+ PROP_FILTER_BAND,
+ PROP_FILTER_WIDTH
+};
+
+#define ALLOWED_CAPS \
+ "audio/x-raw," \
+ " format=(string){"GST_AUDIO_NE(S16)","GST_AUDIO_NE(F32)"}," \
+ " rate=(int)[1,MAX]," \
+ " channels=(int)2," \
+ " channel-mask=(bitmask)0x3," \
+ " layout=(string) interleaved"
+
+G_DEFINE_TYPE (GstAudioKaraoke, gst_audio_karaoke, GST_TYPE_AUDIO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (audiokaraoke, "audiokaraoke",
+ GST_RANK_NONE, GST_TYPE_AUDIO_KARAOKE);
+
+static void gst_audio_karaoke_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_audio_karaoke_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_audio_karaoke_setup (GstAudioFilter * filter,
+ const GstAudioInfo * info);
+static GstFlowReturn gst_audio_karaoke_transform_ip (GstBaseTransform * base,
+ GstBuffer * buf);
+
+static void gst_audio_karaoke_transform_int (GstAudioKaraoke * filter,
+ gint16 * data, guint num_samples);
+static void gst_audio_karaoke_transform_float (GstAudioKaraoke * filter,
+ gfloat * data, guint num_samples);
+
+/* GObject vmethod implementations */
+
+static void
+gst_audio_karaoke_class_init (GstAudioKaraokeClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstCaps *caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_karaoke_debug, "audiokaraoke", 0,
+ "audiokaraoke element");
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->set_property = gst_audio_karaoke_set_property;
+ gobject_class->get_property = gst_audio_karaoke_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_LEVEL,
+ g_param_spec_float ("level", "Level",
+ "Level of the effect (1.0 = full)", 0.0, 1.0, DEFAULT_LEVEL,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MONO_LEVEL,
+ g_param_spec_float ("mono-level", "Mono Level",
+ "Level of the mono channel (1.0 = full)", 0.0, 1.0, DEFAULT_LEVEL,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_FILTER_BAND,
+ g_param_spec_float ("filter-band", "Filter Band",
+ "The Frequency band of the filter", 0.0, 441.0, DEFAULT_FILTER_BAND,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_FILTER_WIDTH,
+ g_param_spec_float ("filter-width", "Filter Width",
+ "The Frequency width of the filter", 0.0, 100.0, DEFAULT_FILTER_WIDTH,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (gstelement_class, "AudioKaraoke",
+ "Filter/Effect/Audio",
+ "Removes voice from sound", "Wim Taymans <wim.taymans@gmail.com>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (GST_AUDIO_FILTER_CLASS (klass),
+ caps);
+ gst_caps_unref (caps);
+
+ GST_BASE_TRANSFORM_CLASS (klass)->transform_ip =
+ GST_DEBUG_FUNCPTR (gst_audio_karaoke_transform_ip);
+ GST_BASE_TRANSFORM_CLASS (klass)->transform_ip_on_passthrough = FALSE;
+
+ GST_AUDIO_FILTER_CLASS (klass)->setup =
+ GST_DEBUG_FUNCPTR (gst_audio_karaoke_setup);
+}
+
+static void
+gst_audio_karaoke_init (GstAudioKaraoke * filter)
+{
+ gst_base_transform_set_in_place (GST_BASE_TRANSFORM (filter), TRUE);
+ gst_base_transform_set_gap_aware (GST_BASE_TRANSFORM (filter), TRUE);
+
+ filter->level = DEFAULT_LEVEL;
+ filter->mono_level = DEFAULT_MONO_LEVEL;
+ filter->filter_band = DEFAULT_FILTER_BAND;
+ filter->filter_width = DEFAULT_FILTER_WIDTH;
+}
+
+static void
+update_filter (GstAudioKaraoke * filter, const GstAudioInfo * info)
+{
+ gfloat A, B, C;
+ gint rate;
+
+ if (info) {
+ rate = GST_AUDIO_INFO_RATE (info);
+ } else {
+ rate = GST_AUDIO_FILTER_RATE (filter);
+ }
+
+ if (rate == 0)
+ return;
+
+ C = exp (-2 * G_PI * filter->filter_width / rate);
+ B = -4 * C / (1 + C) * cos (2 * G_PI * filter->filter_band / rate);
+ A = sqrt (1 - B * B / (4 * C)) * (1 - C);
+
+ filter->A = A;
+ filter->B = B;
+ filter->C = C;
+ filter->y1 = 0.0;
+ filter->y2 = 0.0;
+}
+
+static void
+gst_audio_karaoke_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAudioKaraoke *filter;
+
+ filter = GST_AUDIO_KARAOKE (object);
+
+ switch (prop_id) {
+ case PROP_LEVEL:
+ filter->level = g_value_get_float (value);
+ break;
+ case PROP_MONO_LEVEL:
+ filter->mono_level = g_value_get_float (value);
+ break;
+ case PROP_FILTER_BAND:
+ filter->filter_band = g_value_get_float (value);
+ update_filter (filter, NULL);
+ break;
+ case PROP_FILTER_WIDTH:
+ filter->filter_width = g_value_get_float (value);
+ update_filter (filter, NULL);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_audio_karaoke_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAudioKaraoke *filter;
+
+ filter = GST_AUDIO_KARAOKE (object);
+
+ switch (prop_id) {
+ case PROP_LEVEL:
+ g_value_set_float (value, filter->level);
+ break;
+ case PROP_MONO_LEVEL:
+ g_value_set_float (value, filter->mono_level);
+ break;
+ case PROP_FILTER_BAND:
+ g_value_set_float (value, filter->filter_band);
+ break;
+ case PROP_FILTER_WIDTH:
+ g_value_set_float (value, filter->filter_width);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+/* GstAudioFilter vmethod implementations */
+
+static gboolean
+gst_audio_karaoke_setup (GstAudioFilter * base, const GstAudioInfo * info)
+{
+ GstAudioKaraoke *filter = GST_AUDIO_KARAOKE (base);
+ gboolean ret = TRUE;
+
+ switch (GST_AUDIO_INFO_FORMAT (info)) {
+ case GST_AUDIO_FORMAT_S16:
+ filter->process = (GstAudioKaraokeProcessFunc)
+ gst_audio_karaoke_transform_int;
+ break;
+ case GST_AUDIO_FORMAT_F32:
+ filter->process = (GstAudioKaraokeProcessFunc)
+ gst_audio_karaoke_transform_float;
+ break;
+ default:
+ ret = FALSE;
+ break;
+ }
+ update_filter (filter, info);
+
+ return ret;
+}
+
+static void
+gst_audio_karaoke_transform_int (GstAudioKaraoke * filter,
+ gint16 * data, guint num_samples)
+{
+ gint i, l, r, o, x;
+ gint channels;
+ gdouble y;
+ gint level;
+
+ channels = GST_AUDIO_FILTER_CHANNELS (filter);
+ level = filter->level * 256;
+
+ for (i = 0; i < num_samples; i += channels) {
+ /* get left and right inputs */
+ l = data[i];
+ r = data[i + 1];
+ /* do filtering */
+ x = (l + r) / 2;
+ y = (filter->A * x - filter->B * filter->y1) - filter->C * filter->y2;
+ filter->y2 = filter->y1;
+ filter->y1 = y;
+ /* filter mono signal */
+ o = (int) (y * filter->mono_level);
+ o = CLAMP (o, G_MININT16, G_MAXINT16);
+ o = (o * level) >> 8;
+ /* now cut the center */
+ x = l - ((r * level) >> 8) + o;
+ r = r - ((l * level) >> 8) + o;
+ data[i] = CLAMP (x, G_MININT16, G_MAXINT16);
+ data[i + 1] = CLAMP (r, G_MININT16, G_MAXINT16);
+ }
+}
+
+static void
+gst_audio_karaoke_transform_float (GstAudioKaraoke * filter,
+ gfloat * data, guint num_samples)
+{
+ gint i;
+ gint channels;
+ gdouble l, r, o;
+ gdouble y;
+
+ channels = GST_AUDIO_FILTER_CHANNELS (filter);
+
+ for (i = 0; i < num_samples; i += channels) {
+ /* get left and right inputs */
+ l = data[i];
+ r = data[i + 1];
+ /* do filtering */
+ y = (filter->A * ((l + r) / 2.0) - filter->B * filter->y1) -
+ filter->C * filter->y2;
+ filter->y2 = filter->y1;
+ filter->y1 = y;
+ /* filter mono signal */
+ o = y * filter->mono_level * filter->level;
+ /* now cut the center */
+ data[i] = l - (r * filter->level) + o;
+ data[i + 1] = r - (l * filter->level) + o;
+ }
+}
+
+/* GstBaseTransform vmethod implementations */
+static GstFlowReturn
+gst_audio_karaoke_transform_ip (GstBaseTransform * base, GstBuffer * buf)
+{
+ GstAudioKaraoke *filter = GST_AUDIO_KARAOKE (base);
+ guint num_samples;
+ GstClockTime timestamp, stream_time;
+ GstMapInfo map;
+
+ timestamp = GST_BUFFER_TIMESTAMP (buf);
+ stream_time =
+ gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
+
+ if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)))
+ return GST_FLOW_OK;
+
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+ num_samples = map.size / GST_AUDIO_FILTER_BPS (filter);
+
+ filter->process (filter, map.data, num_samples);
+
+ gst_buffer_unmap (buf, &map);
+
+ return GST_FLOW_OK;
+}
diff --git a/gst/audiofx/audiokaraoke.h b/gst/audiofx/audiokaraoke.h
new file mode 100644
index 0000000000..5fef3924d3
--- /dev/null
+++ b/gst/audiofx/audiokaraoke.h
@@ -0,0 +1,69 @@
+/*
+ * GStreamer
+ * Copyright (C) 2008 Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AUDIO_KARAOKE_H__
+#define __GST_AUDIO_KARAOKE_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/gstaudiofilter.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_AUDIO_KARAOKE (gst_audio_karaoke_get_type())
+#define GST_AUDIO_KARAOKE(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AUDIO_KARAOKE,GstAudioKaraoke))
+#define GST_IS_AUDIO_KARAOKE(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AUDIO_KARAOKE))
+#define GST_AUDIO_KARAOKE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_AUDIO_KARAOKE,GstAudioKaraokeClass))
+#define GST_IS_AUDIO_KARAOKE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_AUDIO_KARAOKE))
+#define GST_AUDIO_KARAOKE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_AUDIO_KARAOKE,GstAudioKaraokeClass))
+typedef struct _GstAudioKaraoke GstAudioKaraoke;
+typedef struct _GstAudioKaraokeClass GstAudioKaraokeClass;
+
+typedef void (*GstAudioKaraokeProcessFunc) (GstAudioKaraoke *, guint8 *, guint);
+
+struct _GstAudioKaraoke
+{
+ GstAudioFilter audiofilter;
+
+ /* properties */
+ gfloat level;
+ gfloat mono_level;
+ gfloat filter_band;
+ gfloat filter_width;
+
+ /* filter coef */
+ gfloat A, B, C;
+ gfloat y1, y2;
+
+ /* < private > */
+ GstAudioKaraokeProcessFunc process;
+};
+
+struct _GstAudioKaraokeClass
+{
+ GstAudioFilterClass parent;
+};
+
+GType gst_audio_karaoke_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (audiokaraoke);
+
+G_END_DECLS
+#endif /* __GST_AUDIO_KARAOKE_H__ */
diff --git a/gst/audiofx/audiopanorama.c b/gst/audiofx/audiopanorama.c
new file mode 100644
index 0000000000..3856a951ce
--- /dev/null
+++ b/gst/audiofx/audiopanorama.c
@@ -0,0 +1,537 @@
+/*
+ * GStreamer
+ * Copyright (C) 2006 Stefan Kost <ensonic@users.sf.net>
+ * Copyright (C) 2006 Sebastian Dröge <slomo@circular-chaos.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-audiopanorama
+ * @title: audiopanorama
+ *
+ * Stereo panorama effect with controllable pan position. One can choose between the default psychoacoustic panning method,
+ * which keeps the same perceived loudness, and a simple panning method that just controls the volume on one channel.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 audiotestsrc wave=saw ! audiopanorama panorama=-1.00 ! alsasink
+ * gst-launch-1.0 filesrc location="melo1.ogg" ! oggdemux ! vorbisdec ! audioconvert ! audiopanorama panorama=-1.00 ! alsasink
+ * gst-launch-1.0 audiotestsrc wave=saw ! audioconvert ! audiopanorama panorama=-1.00 ! audioconvert ! alsasink
+ * gst-launch-1.0 audiotestsrc wave=saw ! audioconvert ! audiopanorama method=simple panorama=-0.50 ! audioconvert ! alsasink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+
+#ifdef HAVE_ORC
+#include <orc/orcfunctions.h>
+#else
+#define orc_memset memset
+#endif
+
+#include "audiopanorama.h"
+#include "audiopanoramaorc.h"
+
+#define GST_CAT_DEFAULT gst_audio_panorama_debug
+GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+/* Filter signals and args */
+enum
+{
+ PROP_0,
+ PROP_PANORAMA,
+ PROP_METHOD
+};
+
+#define GST_TYPE_AUDIO_PANORAMA_METHOD (gst_audio_panorama_method_get_type ())
+static GType
+gst_audio_panorama_method_get_type (void)
+{
+ static GType gtype = 0;
+
+ if (gtype == 0) {
+ static const GEnumValue values[] = {
+ {METHOD_PSYCHOACOUSTIC, "Psychoacoustic Panning (default)",
+ "psychoacoustic"},
+ {METHOD_SIMPLE, "Simple Panning", "simple"},
+ {0, NULL, NULL}
+ };
+
+ gtype = g_enum_register_static ("GstAudioPanoramaMethod", values);
+ }
+ return gtype;
+}
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) { " GST_AUDIO_NE (F32) ", " GST_AUDIO_NE (S16) "}, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 2 ], "
+ "layout = (string) interleaved")
+ );
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) { " GST_AUDIO_NE (F32) ", " GST_AUDIO_NE (S16) "}, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) 2, "
+ "layout = (string) interleaved")
+ );
+
+G_DEFINE_TYPE (GstAudioPanorama, gst_audio_panorama, GST_TYPE_BASE_TRANSFORM);
+GST_ELEMENT_REGISTER_DEFINE (audiopanorama, "audiopanorama",
+ GST_RANK_NONE, GST_TYPE_AUDIO_PANORAMA);
+
+static void gst_audio_panorama_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_audio_panorama_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_audio_panorama_get_unit_size (GstBaseTransform * base,
+ GstCaps * caps, gsize * size);
+static GstCaps *gst_audio_panorama_transform_caps (GstBaseTransform * base,
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter);
+static gboolean gst_audio_panorama_set_caps (GstBaseTransform * base,
+ GstCaps * incaps, GstCaps * outcaps);
+
+static void gst_audio_panorama_m2s_int (gfloat pan,
+ gint16 * idata, gint16 * odata, guint num_samples);
+static void gst_audio_panorama_s2s_int (gfloat pan,
+ gint16 * idata, gint16 * odata, guint num_samples);
+static void gst_audio_panorama_m2s_float (gfloat pan,
+ gfloat * idata, gfloat * odata, guint num_samples);
+static void gst_audio_panorama_s2s_float (gfloat pan,
+ gfloat * idata, gfloat * odata, guint num_samples);
+
+static void gst_audio_panorama_m2s_int_simple (gfloat pan,
+ gint16 * idata, gint16 * odata, guint num_samples);
+static void gst_audio_panorama_s2s_int_simple (gfloat pan,
+ gint16 * idata, gint16 * odata, guint num_samples);
+static void gst_audio_panorama_m2s_float_simple (gfloat pan,
+ gfloat * idata, gfloat * odata, guint num_samples);
+static void gst_audio_panorama_s2s_float_simple (gfloat pan,
+ gfloat * idata, gfloat * odata, guint num_samples);
+
+static GstFlowReturn gst_audio_panorama_transform (GstBaseTransform * base,
+ GstBuffer * inbuf, GstBuffer * outbuf);
+
+
+/* Table with processing functions: [channels][format][method] */
+static const GstAudioPanoramaProcessFunc panorama_process_functions[2][2][2] = {
+ {
+ {
+ (GstAudioPanoramaProcessFunc) gst_audio_panorama_m2s_int,
+ (GstAudioPanoramaProcessFunc) gst_audio_panorama_m2s_int_simple},
+ {
+ (GstAudioPanoramaProcessFunc) gst_audio_panorama_m2s_float,
+ (GstAudioPanoramaProcessFunc) gst_audio_panorama_m2s_float_simple}
+ },
+ {
+ {
+ (GstAudioPanoramaProcessFunc) gst_audio_panorama_s2s_int,
+ (GstAudioPanoramaProcessFunc) gst_audio_panorama_s2s_int_simple},
+ {
+ (GstAudioPanoramaProcessFunc) gst_audio_panorama_s2s_float,
+ (GstAudioPanoramaProcessFunc) gst_audio_panorama_s2s_float_simple}
+ }
+};
+
+/* GObject vmethod implementations */
+
+static void
+gst_audio_panorama_class_init (GstAudioPanoramaClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_panorama_debug, "audiopanorama", 0,
+ "audiopanorama element");
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->set_property = gst_audio_panorama_set_property;
+ gobject_class->get_property = gst_audio_panorama_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_PANORAMA,
+ g_param_spec_float ("panorama", "Panorama",
+ "Position in stereo panorama (-1.0 left -> 1.0 right)", -1.0, 1.0,
+ 0.0,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstAudioPanorama:method:
+ *
+ * Panning method: psychoacoustic mode keeps the same perceived loudness,
+ * while simple mode just controls the volume of one channel. It's merely
+ * a matter of taste which method should be chosen.
+ */
+ g_object_class_install_property (gobject_class, PROP_METHOD,
+ g_param_spec_enum ("method", "Panning method",
+ "Psychoacoustic mode keeps same perceived loudness, "
+ "simple mode just controls volume of one channel.",
+ GST_TYPE_AUDIO_PANORAMA_METHOD, METHOD_PSYCHOACOUSTIC,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (gstelement_class, "Stereo positioning",
+ "Filter/Effect/Audio",
+ "Positions audio streams in the stereo panorama",
+ "Stefan Kost <ensonic@users.sf.net>");
+
+ gst_element_class_add_static_pad_template (gstelement_class, &src_template);
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
+
+ GST_BASE_TRANSFORM_CLASS (klass)->get_unit_size =
+ GST_DEBUG_FUNCPTR (gst_audio_panorama_get_unit_size);
+ GST_BASE_TRANSFORM_CLASS (klass)->transform_caps =
+ GST_DEBUG_FUNCPTR (gst_audio_panorama_transform_caps);
+ GST_BASE_TRANSFORM_CLASS (klass)->set_caps =
+ GST_DEBUG_FUNCPTR (gst_audio_panorama_set_caps);
+ GST_BASE_TRANSFORM_CLASS (klass)->transform =
+ GST_DEBUG_FUNCPTR (gst_audio_panorama_transform);
+
+ gst_type_mark_as_plugin_api (GST_TYPE_AUDIO_PANORAMA_METHOD, 0);
+}
+
+static void
+gst_audio_panorama_init (GstAudioPanorama * filter)
+{
+
+ filter->panorama = 0;
+ filter->method = METHOD_PSYCHOACOUSTIC;
+ gst_audio_info_init (&filter->info);
+ filter->process = NULL;
+
+ gst_base_transform_set_gap_aware (GST_BASE_TRANSFORM (filter), TRUE);
+}
+
+static gboolean
+gst_audio_panorama_set_process_function (GstAudioPanorama * filter,
+ GstAudioInfo * info)
+{
+ gint channel_index, format_index, method_index;
+ const GstAudioFormatInfo *finfo = info->finfo;
+
+ /* set processing function */
+ channel_index = GST_AUDIO_INFO_CHANNELS (info) - 1;
+ if (channel_index > 1 || channel_index < 0) {
+ filter->process = NULL;
+ return FALSE;
+ }
+
+ format_index = GST_AUDIO_FORMAT_INFO_IS_FLOAT (finfo) ? 1 : 0;
+ method_index = filter->method;
+
+ filter->process =
+ panorama_process_functions[channel_index][format_index][method_index];
+ return TRUE;
+}
+
+static void
+gst_audio_panorama_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAudioPanorama *filter = GST_AUDIO_PANORAMA (object);
+
+ switch (prop_id) {
+ case PROP_PANORAMA:
+ filter->panorama = g_value_get_float (value);
+ break;
+ case PROP_METHOD:
+ filter->method = g_value_get_enum (value);
+ gst_audio_panorama_set_process_function (filter, &filter->info);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_audio_panorama_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAudioPanorama *filter = GST_AUDIO_PANORAMA (object);
+
+ switch (prop_id) {
+ case PROP_PANORAMA:
+ g_value_set_float (value, filter->panorama);
+ break;
+ case PROP_METHOD:
+ g_value_set_enum (value, filter->method);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+/* GstBaseTransform vmethod implementations */
+
+static gboolean
+gst_audio_panorama_get_unit_size (GstBaseTransform * base, GstCaps * caps,
+ gsize * size)
+{
+ GstAudioInfo info;
+
+ g_assert (size);
+
+ if (!gst_audio_info_from_caps (&info, caps))
+ return FALSE;
+
+ *size = GST_AUDIO_INFO_BPF (&info);
+
+ return TRUE;
+}
+
+static GstCaps *
+gst_audio_panorama_transform_caps (GstBaseTransform * base,
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter)
+{
+ GstCaps *res;
+ GstStructure *structure;
+ gint i;
+
+ /* replace the channel property with our range. */
+ res = gst_caps_copy (caps);
+ for (i = 0; i < gst_caps_get_size (res); i++) {
+ structure = gst_caps_get_structure (res, i);
+ if (direction == GST_PAD_SRC) {
+ GST_INFO_OBJECT (base, "[%d] allow 1-2 channels", i);
+ gst_structure_set (structure, "channels", GST_TYPE_INT_RANGE, 1, 2, NULL);
+ } else {
+ GST_INFO_OBJECT (base, "[%d] allow 2 channels", i);
+ gst_structure_set (structure, "channels", G_TYPE_INT, 2, NULL);
+ }
+ gst_structure_remove_field (structure, "channel-mask");
+ }
+ GST_DEBUG_OBJECT (base, "transformed %" GST_PTR_FORMAT, res);
+
+ if (filter) {
+ GstCaps *intersection;
+
+ GST_DEBUG_OBJECT (base, "Using filter caps %" GST_PTR_FORMAT, filter);
+ intersection =
+ gst_caps_intersect_full (filter, res, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (res);
+ res = intersection;
+ GST_DEBUG_OBJECT (base, "Intersection %" GST_PTR_FORMAT, res);
+ }
+
+ return res;
+}
+
+static gboolean
+gst_audio_panorama_set_caps (GstBaseTransform * base, GstCaps * incaps,
+ GstCaps * outcaps)
+{
+ GstAudioPanorama *filter = GST_AUDIO_PANORAMA (base);
+ GstAudioInfo info;
+
+ /*GST_INFO ("incaps are %" GST_PTR_FORMAT, incaps); */
+ if (!gst_audio_info_from_caps (&info, incaps))
+ goto no_format;
+
+ GST_DEBUG ("try to process %d input with %d channels",
+ GST_AUDIO_INFO_FORMAT (&info), GST_AUDIO_INFO_CHANNELS (&info));
+
+ if (!gst_audio_panorama_set_process_function (filter, &info))
+ goto no_format;
+
+ filter->info = info;
+
+ return TRUE;
+
+no_format:
+ {
+ GST_DEBUG ("invalid caps");
+ return FALSE;
+ }
+}
+
+/* psychoacoustic processing functions */
+
+/* mono to stereo panning
+ * pan: -1.0 0.0 1.0
+ * l: 1.0 0.5 0.0
+ * r: 0.0 0.5 1.0
+ *
+ * FIXME: we should use -3db (1/sqtr(2)) for 50:50
+ */
+static void
+gst_audio_panorama_m2s_int (gfloat pan, gint16 * idata, gint16 * odata, guint n)
+{
+ gfloat r = (pan + 1.0) / 2.0;
+ audiopanoramam_orc_process_s16_ch1_psy (odata, idata, 1.0 - r, r, n);
+}
+
+static void
+gst_audio_panorama_m2s_float (gfloat pan, gfloat * idata,
+ gfloat * odata, guint n)
+{
+ gfloat r = (pan + 1.0) / 2.0;
+ audiopanoramam_orc_process_f32_ch1_psy (odata, idata, 1.0 - r, r, n);
+}
+
+/* stereo balance
+ * pan: -1.0 0.0 1.0
+ * ll: 1.0 1.0 0.0
+ * lr: 1.0 0.0 0.0
+ * rr: 0.0 1.0 1.0
+ * rl: 0.0 0.0 1.0
+ */
+static void
+gst_audio_panorama_s2s_int (gfloat pan, gint16 * idata, gint16 * odata, guint n)
+{
+ if (pan == 0.0) {
+ audiopanoramam_orc_process_s16_ch2_none (odata, idata, n);
+ } else if (pan > 0.0) {
+ gfloat rl = pan;
+ gfloat ll = 1.0 - rl;
+ audiopanoramam_orc_process_s16_ch2_psy_right (odata, idata, ll, rl, n);
+ } else {
+ gfloat rr = 1.0 + pan;
+ gfloat lr = 1.0 - rr;
+ audiopanoramam_orc_process_s16_ch2_psy_left (odata, idata, lr, rr, n);
+ }
+}
+
+static void
+gst_audio_panorama_s2s_float (gfloat pan, gfloat * idata,
+ gfloat * odata, guint n)
+{
+ if (pan == 0.0) {
+ audiopanoramam_orc_process_f32_ch2_none (odata, idata, n);
+ } else if (pan > 0.0) {
+ gfloat rl = pan;
+ gfloat ll = 1.0 - rl;
+ audiopanoramam_orc_process_f32_ch2_psy_right (odata, idata, ll, rl, n);
+ } else {
+ gfloat rr = 1.0 + pan;
+ gfloat lr = 1.0 - rr;
+ audiopanoramam_orc_process_f32_ch2_psy_left (odata, idata, lr, rr, n);
+ }
+}
+
+/* simple processing functions */
+
+static void
+gst_audio_panorama_m2s_int_simple (gfloat pan, gint16 * idata,
+ gint16 * odata, guint n)
+{
+ if (pan == 0.0) {
+ audiopanoramam_orc_process_s16_ch1_none (odata, idata, n);
+ } else if (pan > 0.0) {
+ gfloat lpan = 1.0 - pan;
+ audiopanoramam_orc_process_s16_ch1_sim_left (odata, idata, lpan, n);
+ } else {
+ gfloat rpan = 1.0 + pan;
+ audiopanoramam_orc_process_s16_ch1_sim_right (odata, idata, rpan, n);
+ }
+}
+
+static void
+gst_audio_panorama_s2s_int_simple (gfloat pan, gint16 * idata,
+ gint16 * odata, guint n)
+{
+ if (pan == 0.0) {
+ audiopanoramam_orc_process_s16_ch2_none (odata, idata, n);
+ } else if (pan > 0.0) {
+ gfloat lpan = 1.0 - pan;
+ audiopanoramam_orc_process_s16_ch2_sim_left (odata, idata, lpan, n);
+ } else {
+ gfloat rpan = 1.0 + pan;
+ audiopanoramam_orc_process_s16_ch2_sim_right (odata, idata, rpan, n);
+ }
+}
+
+static void
+gst_audio_panorama_m2s_float_simple (gfloat pan, gfloat * idata,
+ gfloat * odata, guint n)
+{
+ if (pan == 0.0) {
+ audiopanoramam_orc_process_f32_ch1_none (odata, idata, n);
+ } else if (pan > 0.0) {
+ gfloat lpan = 1.0 - pan;
+ audiopanoramam_orc_process_f32_ch1_sim_left (odata, idata, lpan, n);
+ } else {
+ gfloat rpan = 1.0 + pan;
+ audiopanoramam_orc_process_f32_ch1_sim_right (odata, idata, rpan, n);
+ }
+}
+
+static void
+gst_audio_panorama_s2s_float_simple (gfloat pan, gfloat * idata,
+ gfloat * odata, guint n)
+{
+ if (pan == 0.0) {
+ audiopanoramam_orc_process_f32_ch2_none (odata, idata, n);
+ } else if (pan > 0.0) {
+ gfloat lpan = 1.0 - pan;
+ audiopanoramam_orc_process_f32_ch2_sim_left (odata, idata, lpan, n);
+ } else {
+ gfloat rpan = 1.0 + pan;
+ audiopanoramam_orc_process_f32_ch2_sim_right (odata, idata, rpan, n);
+ }
+}
+
+/* this function does the actual processing
+ */
+static GstFlowReturn
+gst_audio_panorama_transform (GstBaseTransform * base, GstBuffer * inbuf,
+ GstBuffer * outbuf)
+{
+ GstAudioPanorama *filter = GST_AUDIO_PANORAMA (base);
+ GstClockTime ts;
+ GstMapInfo inmap, outmap;
+
+ ts = gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME,
+ GST_BUFFER_TIMESTAMP (inbuf));
+
+ if (GST_CLOCK_TIME_IS_VALID (ts)) {
+ GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT, GST_TIME_ARGS (ts));
+ gst_object_sync_values (GST_OBJECT (filter), ts);
+ }
+
+ gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);
+
+ if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_GAP))) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);
+ orc_memset (outmap.data, 0, outmap.size);
+ } else {
+ /* output is always stereo, input is mono or stereo,
+ * and info describes input format */
+ guint num_samples = outmap.size / (2 * GST_AUDIO_INFO_BPS (&filter->info));
+
+ gst_buffer_map (inbuf, &inmap, GST_MAP_READ);
+ filter->process (filter->panorama, inmap.data, outmap.data, num_samples);
+ gst_buffer_unmap (inbuf, &inmap);
+ }
+
+ gst_buffer_unmap (outbuf, &outmap);
+
+ return GST_FLOW_OK;
+}
diff --git a/gst/audiofx/audiopanorama.h b/gst/audiofx/audiopanorama.h
new file mode 100644
index 0000000000..36a0633313
--- /dev/null
+++ b/gst/audiofx/audiopanorama.h
@@ -0,0 +1,70 @@
+/*
+ * GStreamer
+ * Copyright (C) 2006 Stefan Kost <ensonic@users.sf.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AUDIO_PANORAMA_H__
+#define __GST_AUDIO_PANORAMA_H__
+
+#include <gst/gst.h>
+#include <gst/audio/audio.h>
+#include <gst/base/gstbasetransform.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AUDIO_PANORAMA (gst_audio_panorama_get_type())
+#define GST_AUDIO_PANORAMA(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AUDIO_PANORAMA,GstAudioPanorama))
+#define GST_IS_AUDIO_PANORAMA(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AUDIO_PANORAMA))
+#define GST_AUDIO_PANORAMA_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_AUDIO_PANORAMA,GstAudioPanoramaClass))
+#define GST_IS_AUDIO_PANORAMA_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_AUDIO_PANORAMA))
+#define GST_AUDIO_PANORAMA_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_AUDIO_PANORAMA,GstAudioPanoramaClass))
+
+typedef struct _GstAudioPanorama GstAudioPanorama;
+typedef struct _GstAudioPanoramaClass GstAudioPanoramaClass;
+
+typedef void (*GstAudioPanoramaProcessFunc)(gfloat, guint8*, guint8*, guint);
+
+typedef enum
+{
+ METHOD_PSYCHOACOUSTIC = 0,
+ METHOD_SIMPLE
+} GstAudioPanoramaMethod;
+
+struct _GstAudioPanorama {
+ GstBaseTransform element;
+
+ /* properties */
+ gfloat panorama;
+ GstAudioPanoramaMethod method;
+
+ /* < private > */
+ GstAudioPanoramaProcessFunc process;
+ GstAudioInfo info;
+};
+
+struct _GstAudioPanoramaClass {
+ GstBaseTransformClass parent_class;
+};
+
+GType gst_audio_panorama_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (audiopanorama);
+
+G_END_DECLS
+
+#endif /* __GST_AUDIO_PANORAMA_H__ */
diff --git a/gst/audiofx/audiopanoramaorc-dist.c b/gst/audiofx/audiopanoramaorc-dist.c
new file mode 100644
index 0000000000..86d6861fb6
--- /dev/null
+++ b/gst/audiofx/audiopanoramaorc-dist.c
@@ -0,0 +1,3977 @@
+
+/* autogenerated from audiopanoramaorc.orc */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include <glib.h>
+
+#ifndef _ORC_INTEGER_TYPEDEFS_
+#define _ORC_INTEGER_TYPEDEFS_
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#include <stdint.h>
+typedef int8_t orc_int8;
+typedef int16_t orc_int16;
+typedef int32_t orc_int32;
+typedef int64_t orc_int64;
+typedef uint8_t orc_uint8;
+typedef uint16_t orc_uint16;
+typedef uint32_t orc_uint32;
+typedef uint64_t orc_uint64;
+#define ORC_UINT64_C(x) UINT64_C(x)
+#elif defined(_MSC_VER)
+typedef signed __int8 orc_int8;
+typedef signed __int16 orc_int16;
+typedef signed __int32 orc_int32;
+typedef signed __int64 orc_int64;
+typedef unsigned __int8 orc_uint8;
+typedef unsigned __int16 orc_uint16;
+typedef unsigned __int32 orc_uint32;
+typedef unsigned __int64 orc_uint64;
+#define ORC_UINT64_C(x) (x##Ui64)
+#define inline __inline
+#else
+#include <limits.h>
+typedef signed char orc_int8;
+typedef short orc_int16;
+typedef int orc_int32;
+typedef unsigned char orc_uint8;
+typedef unsigned short orc_uint16;
+typedef unsigned int orc_uint32;
+#if INT_MAX == LONG_MAX
+typedef long long orc_int64;
+typedef unsigned long long orc_uint64;
+#define ORC_UINT64_C(x) (x##ULL)
+#else
+typedef long orc_int64;
+typedef unsigned long orc_uint64;
+#define ORC_UINT64_C(x) (x##UL)
+#endif
+#endif
+typedef union
+{
+ orc_int16 i;
+ orc_int8 x2[2];
+} orc_union16;
+typedef union
+{
+ orc_int32 i;
+ float f;
+ orc_int16 x2[2];
+ orc_int8 x4[4];
+} orc_union32;
+typedef union
+{
+ orc_int64 i;
+ double f;
+ orc_int32 x2[2];
+ float x2f[2];
+ orc_int16 x4[4];
+} orc_union64;
+#endif
+#ifndef ORC_RESTRICT
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#define ORC_RESTRICT restrict
+#elif defined(__GNUC__) && __GNUC__ >= 4
+#define ORC_RESTRICT __restrict__
+#else
+#define ORC_RESTRICT
+#endif
+#endif
+
+#ifndef ORC_INTERNAL
+#if defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
+#define ORC_INTERNAL __attribute__((visibility("hidden")))
+#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x550)
+#define ORC_INTERNAL __hidden
+#elif defined (__GNUC__)
+#define ORC_INTERNAL __attribute__((visibility("hidden")))
+#else
+#define ORC_INTERNAL
+#endif
+#endif
+
+
+#ifndef DISABLE_ORC
+#include <orc/orc.h>
+#endif
+void audiopanoramam_orc_process_s16_ch1_none (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, int n);
+void audiopanoramam_orc_process_f32_ch1_none (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, int n);
+void audiopanoramam_orc_process_s16_ch2_none (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, int n);
+void audiopanoramam_orc_process_f32_ch2_none (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, int n);
+void audiopanoramam_orc_process_s16_ch1_psy (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, float p2, int n);
+void audiopanoramam_orc_process_f32_ch1_psy (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, float p2, int n);
+void audiopanoramam_orc_process_s16_ch2_psy_right (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, float p2, int n);
+void audiopanoramam_orc_process_s16_ch2_psy_left (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, float p2, int n);
+void audiopanoramam_orc_process_f32_ch2_psy_right (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, float p2, int n);
+void audiopanoramam_orc_process_f32_ch2_psy_left (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, float p2, int n);
+void audiopanoramam_orc_process_s16_ch1_sim_right (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, int n);
+void audiopanoramam_orc_process_s16_ch1_sim_left (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, int n);
+void audiopanoramam_orc_process_s16_ch2_sim_right (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, int n);
+void audiopanoramam_orc_process_s16_ch2_sim_left (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, int n);
+void audiopanoramam_orc_process_f32_ch1_sim_right (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, int n);
+void audiopanoramam_orc_process_f32_ch1_sim_left (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, int n);
+void audiopanoramam_orc_process_f32_ch2_sim_right (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, int n);
+void audiopanoramam_orc_process_f32_ch2_sim_left (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, int n);
+
+
+/* begin Orc C target preamble */
+#define ORC_CLAMP(x,a,b) ((x)<(a) ? (a) : ((x)>(b) ? (b) : (x)))
+#define ORC_ABS(a) ((a)<0 ? -(a) : (a))
+#define ORC_MIN(a,b) ((a)<(b) ? (a) : (b))
+#define ORC_MAX(a,b) ((a)>(b) ? (a) : (b))
+#define ORC_SB_MAX 127
+#define ORC_SB_MIN (-1-ORC_SB_MAX)
+#define ORC_UB_MAX (orc_uint8) 255
+#define ORC_UB_MIN 0
+#define ORC_SW_MAX 32767
+#define ORC_SW_MIN (-1-ORC_SW_MAX)
+#define ORC_UW_MAX (orc_uint16)65535
+#define ORC_UW_MIN 0
+#define ORC_SL_MAX 2147483647
+#define ORC_SL_MIN (-1-ORC_SL_MAX)
+#define ORC_UL_MAX 4294967295U
+#define ORC_UL_MIN 0
+#define ORC_CLAMP_SB(x) ORC_CLAMP(x,ORC_SB_MIN,ORC_SB_MAX)
+#define ORC_CLAMP_UB(x) ORC_CLAMP(x,ORC_UB_MIN,ORC_UB_MAX)
+#define ORC_CLAMP_SW(x) ORC_CLAMP(x,ORC_SW_MIN,ORC_SW_MAX)
+#define ORC_CLAMP_UW(x) ORC_CLAMP(x,ORC_UW_MIN,ORC_UW_MAX)
+#define ORC_CLAMP_SL(x) ORC_CLAMP(x,ORC_SL_MIN,ORC_SL_MAX)
+#define ORC_CLAMP_UL(x) ORC_CLAMP(x,ORC_UL_MIN,ORC_UL_MAX)
+#define ORC_SWAP_W(x) ((((x)&0xffU)<<8) | (((x)&0xff00U)>>8))
+#define ORC_SWAP_L(x) ((((x)&0xffU)<<24) | (((x)&0xff00U)<<8) | (((x)&0xff0000U)>>8) | (((x)&0xff000000U)>>24))
+#define ORC_SWAP_Q(x) ((((x)&ORC_UINT64_C(0xff))<<56) | (((x)&ORC_UINT64_C(0xff00))<<40) | (((x)&ORC_UINT64_C(0xff0000))<<24) | (((x)&ORC_UINT64_C(0xff000000))<<8) | (((x)&ORC_UINT64_C(0xff00000000))>>8) | (((x)&ORC_UINT64_C(0xff0000000000))>>24) | (((x)&ORC_UINT64_C(0xff000000000000))>>40) | (((x)&ORC_UINT64_C(0xff00000000000000))>>56))
+#define ORC_PTR_OFFSET(ptr,offset) ((void *)(((unsigned char *)(ptr)) + (offset)))
+#define ORC_DENORMAL(x) ((x) & ((((x)&0x7f800000) == 0) ? 0xff800000 : 0xffffffff))
+#define ORC_ISNAN(x) ((((x)&0x7f800000) == 0x7f800000) && (((x)&0x007fffff) != 0))
+#define ORC_DENORMAL_DOUBLE(x) ((x) & ((((x)&ORC_UINT64_C(0x7ff0000000000000)) == 0) ? ORC_UINT64_C(0xfff0000000000000) : ORC_UINT64_C(0xffffffffffffffff)))
+#define ORC_ISNAN_DOUBLE(x) ((((x)&ORC_UINT64_C(0x7ff0000000000000)) == ORC_UINT64_C(0x7ff0000000000000)) && (((x)&ORC_UINT64_C(0x000fffffffffffff)) != 0))
+#ifndef ORC_RESTRICT
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#define ORC_RESTRICT restrict
+#elif defined(__GNUC__) && __GNUC__ >= 4
+#define ORC_RESTRICT __restrict__
+#else
+#define ORC_RESTRICT
+#endif
+#endif
+/* end Orc C target preamble */
+
+
+
+/* audiopanoramam_orc_process_s16_ch1_none */
+#ifdef DISABLE_ORC
+void
+audiopanoramam_orc_process_s16_ch1_none (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, int n)
+{
+ int i;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union16 *ORC_RESTRICT ptr4;
+ orc_union16 var32;
+ orc_union16 var33;
+ orc_union32 var34;
+
+ ptr0 = (orc_union32 *) d1;
+ ptr4 = (orc_union16 *) s1;
+
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadw */
+ var32 = ptr4[i];
+ /* 1: loadw */
+ var33 = ptr4[i];
+ /* 2: mergewl */
+ {
+ orc_union32 _dest;
+ _dest.x2[0] = var32.i;
+ _dest.x2[1] = var33.i;
+ var34.i = _dest.i;
+ }
+ /* 3: storel */
+ ptr0[i] = var34;
+ }
+
+}
+
+#else
+static void
+_backup_audiopanoramam_orc_process_s16_ch1_none (OrcExecutor * ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union16 *ORC_RESTRICT ptr4;
+ orc_union16 var32;
+ orc_union16 var33;
+ orc_union32 var34;
+
+ ptr0 = (orc_union32 *) ex->arrays[0];
+ ptr4 = (orc_union16 *) ex->arrays[4];
+
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadw */
+ var32 = ptr4[i];
+ /* 1: loadw */
+ var33 = ptr4[i];
+ /* 2: mergewl */
+ {
+ orc_union32 _dest;
+ _dest.x2[0] = var32.i;
+ _dest.x2[1] = var33.i;
+ var34.i = _dest.i;
+ }
+ /* 3: storel */
+ ptr0[i] = var34;
+ }
+
+}
+
+void
+audiopanoramam_orc_process_s16_ch1_none (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 39, 97, 117, 100, 105, 111, 112, 97, 110, 111, 114, 97, 109, 97,
+ 109, 95, 111, 114, 99, 95, 112, 114, 111, 99, 101, 115, 115, 95, 115,
+ 49,
+ 54, 95, 99, 104, 49, 95, 110, 111, 110, 101, 11, 4, 4, 12, 2, 2,
+ 195, 0, 4, 4, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_s16_ch1_none);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "audiopanoramam_orc_process_s16_ch1_none");
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_s16_ch1_none);
+ orc_program_add_destination (p, 4, "d1");
+ orc_program_add_source (p, 2, "s1");
+
+ orc_program_append_2 (p, "mergewl", 0, ORC_VAR_D1, ORC_VAR_S1, ORC_VAR_S1,
+ ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* audiopanoramam_orc_process_f32_ch1_none */
+#ifdef DISABLE_ORC
+void
+audiopanoramam_orc_process_f32_ch1_none (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, int n)
+{
+ int i;
+ orc_union64 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union32 var32;
+ orc_union32 var33;
+ orc_union64 var34;
+
+ ptr0 = (orc_union64 *) d1;
+ ptr4 = (orc_union32 *) s1;
+
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var32 = ptr4[i];
+ /* 1: loadl */
+ var33 = ptr4[i];
+ /* 2: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var32.i;
+ _dest.x2[1] = var33.i;
+ var34.i = _dest.i;
+ }
+ /* 3: storeq */
+ ptr0[i] = var34;
+ }
+
+}
+
+#else
+static void
+_backup_audiopanoramam_orc_process_f32_ch1_none (OrcExecutor * ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union64 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union32 var32;
+ orc_union32 var33;
+ orc_union64 var34;
+
+ ptr0 = (orc_union64 *) ex->arrays[0];
+ ptr4 = (orc_union32 *) ex->arrays[4];
+
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var32 = ptr4[i];
+ /* 1: loadl */
+ var33 = ptr4[i];
+ /* 2: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var32.i;
+ _dest.x2[1] = var33.i;
+ var34.i = _dest.i;
+ }
+ /* 3: storeq */
+ ptr0[i] = var34;
+ }
+
+}
+
+void
+audiopanoramam_orc_process_f32_ch1_none (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 39, 97, 117, 100, 105, 111, 112, 97, 110, 111, 114, 97, 109, 97,
+ 109, 95, 111, 114, 99, 95, 112, 114, 111, 99, 101, 115, 115, 95, 102,
+ 51,
+ 50, 95, 99, 104, 49, 95, 110, 111, 110, 101, 11, 8, 8, 12, 4, 4,
+ 194, 0, 4, 4, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_f32_ch1_none);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "audiopanoramam_orc_process_f32_ch1_none");
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_f32_ch1_none);
+ orc_program_add_destination (p, 8, "d1");
+ orc_program_add_source (p, 4, "s1");
+
+ orc_program_append_2 (p, "mergelq", 0, ORC_VAR_D1, ORC_VAR_S1, ORC_VAR_S1,
+ ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* audiopanoramam_orc_process_s16_ch2_none */
+#ifdef DISABLE_ORC
+void
+audiopanoramam_orc_process_s16_ch2_none (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, int n)
+{
+ int i;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union32 var32;
+ orc_union32 var33;
+
+ ptr0 = (orc_union32 *) d1;
+ ptr4 = (orc_union32 *) s1;
+
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var32 = ptr4[i];
+ /* 1: copyw */
+ var33.x2[0] = var32.x2[0];
+ var33.x2[1] = var32.x2[1];
+ /* 2: storel */
+ ptr0[i] = var33;
+ }
+
+}
+
+#else
+static void
+_backup_audiopanoramam_orc_process_s16_ch2_none (OrcExecutor * ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union32 var32;
+ orc_union32 var33;
+
+ ptr0 = (orc_union32 *) ex->arrays[0];
+ ptr4 = (orc_union32 *) ex->arrays[4];
+
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var32 = ptr4[i];
+ /* 1: copyw */
+ var33.x2[0] = var32.x2[0];
+ var33.x2[1] = var32.x2[1];
+ /* 2: storel */
+ ptr0[i] = var33;
+ }
+
+}
+
+void
+audiopanoramam_orc_process_s16_ch2_none (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 39, 97, 117, 100, 105, 111, 112, 97, 110, 111, 114, 97, 109, 97,
+ 109, 95, 111, 114, 99, 95, 112, 114, 111, 99, 101, 115, 115, 95, 115,
+ 49,
+ 54, 95, 99, 104, 50, 95, 110, 111, 110, 101, 11, 4, 4, 12, 4, 4,
+ 21, 1, 79, 0, 4, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_s16_ch2_none);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "audiopanoramam_orc_process_s16_ch2_none");
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_s16_ch2_none);
+ orc_program_add_destination (p, 4, "d1");
+ orc_program_add_source (p, 4, "s1");
+
+ orc_program_append_2 (p, "copyw", 1, ORC_VAR_D1, ORC_VAR_S1, ORC_VAR_D1,
+ ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* audiopanoramam_orc_process_f32_ch2_none */
+#ifdef DISABLE_ORC
+void
+audiopanoramam_orc_process_f32_ch2_none (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, int n)
+{
+ int i;
+ orc_union64 *ORC_RESTRICT ptr0;
+ const orc_union64 *ORC_RESTRICT ptr4;
+ orc_union64 var32;
+ orc_union64 var33;
+
+ ptr0 = (orc_union64 *) d1;
+ ptr4 = (orc_union64 *) s1;
+
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadq */
+ var32 = ptr4[i];
+ /* 1: copyl */
+ var33.x2[0] = var32.x2[0];
+ var33.x2[1] = var32.x2[1];
+ /* 2: storeq */
+ ptr0[i] = var33;
+ }
+
+}
+
+#else
+static void
+_backup_audiopanoramam_orc_process_f32_ch2_none (OrcExecutor * ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union64 *ORC_RESTRICT ptr0;
+ const orc_union64 *ORC_RESTRICT ptr4;
+ orc_union64 var32;
+ orc_union64 var33;
+
+ ptr0 = (orc_union64 *) ex->arrays[0];
+ ptr4 = (orc_union64 *) ex->arrays[4];
+
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadq */
+ var32 = ptr4[i];
+ /* 1: copyl */
+ var33.x2[0] = var32.x2[0];
+ var33.x2[1] = var32.x2[1];
+ /* 2: storeq */
+ ptr0[i] = var33;
+ }
+
+}
+
+void
+audiopanoramam_orc_process_f32_ch2_none (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 39, 97, 117, 100, 105, 111, 112, 97, 110, 111, 114, 97, 109, 97,
+ 109, 95, 111, 114, 99, 95, 112, 114, 111, 99, 101, 115, 115, 95, 102,
+ 51,
+ 50, 95, 99, 104, 50, 95, 110, 111, 110, 101, 11, 8, 8, 12, 8, 8,
+ 21, 1, 112, 0, 4, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_f32_ch2_none);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "audiopanoramam_orc_process_f32_ch2_none");
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_f32_ch2_none);
+ orc_program_add_destination (p, 8, "d1");
+ orc_program_add_source (p, 8, "s1");
+
+ orc_program_append_2 (p, "copyl", 1, ORC_VAR_D1, ORC_VAR_S1, ORC_VAR_D1,
+ ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* audiopanoramam_orc_process_s16_ch1_psy */
+#ifdef DISABLE_ORC
+void
+audiopanoramam_orc_process_s16_ch1_psy (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, float p2, int n)
+{
+ int i;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union16 *ORC_RESTRICT ptr4;
+ orc_union16 var35;
+ orc_union32 var36;
+ orc_union32 var37;
+ orc_union32 var38;
+ orc_union32 var39;
+ orc_union32 var40;
+ orc_union32 var41;
+ orc_union32 var42;
+ orc_union64 var43;
+ orc_union64 var44;
+
+ ptr0 = (orc_union32 *) d1;
+ ptr4 = (orc_union16 *) s1;
+
+ /* 3: loadpl */
+ var36.f = p2;
+ /* 5: loadpl */
+ var37.f = p1;
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadw */
+ var35 = ptr4[i];
+ /* 1: convswl */
+ var39.i = var35.i;
+ /* 2: convlf */
+ var40.f = var39.i;
+ /* 4: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var40.i);
+ _src2.i = ORC_DENORMAL (var36.i);
+ _dest1.f = _src1.f * _src2.f;
+ var41.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 6: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var40.i);
+ _src2.i = ORC_DENORMAL (var37.i);
+ _dest1.f = _src1.f * _src2.f;
+ var42.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 7: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var42.i;
+ _dest.x2[1] = var41.i;
+ var43.i = _dest.i;
+ }
+ /* 8: convfl */
+ {
+ int tmp;
+ tmp = (int) var43.x2f[0];
+ if (tmp == 0x80000000 && !(var43.x2[0] & 0x80000000))
+ tmp = 0x7fffffff;
+ var44.x2[0] = tmp;
+ }
+ {
+ int tmp;
+ tmp = (int) var43.x2f[1];
+ if (tmp == 0x80000000 && !(var43.x2[1] & 0x80000000))
+ tmp = 0x7fffffff;
+ var44.x2[1] = tmp;
+ }
+ /* 9: convssslw */
+ var38.x2[0] = ORC_CLAMP_SW (var44.x2[0]);
+ var38.x2[1] = ORC_CLAMP_SW (var44.x2[1]);
+ /* 10: storel */
+ ptr0[i] = var38;
+ }
+
+}
+
+#else
+static void
+_backup_audiopanoramam_orc_process_s16_ch1_psy (OrcExecutor * ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union16 *ORC_RESTRICT ptr4;
+ orc_union16 var35;
+ orc_union32 var36;
+ orc_union32 var37;
+ orc_union32 var38;
+ orc_union32 var39;
+ orc_union32 var40;
+ orc_union32 var41;
+ orc_union32 var42;
+ orc_union64 var43;
+ orc_union64 var44;
+
+ ptr0 = (orc_union32 *) ex->arrays[0];
+ ptr4 = (orc_union16 *) ex->arrays[4];
+
+ /* 3: loadpl */
+ var36.i = ex->params[25];
+ /* 5: loadpl */
+ var37.i = ex->params[24];
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadw */
+ var35 = ptr4[i];
+ /* 1: convswl */
+ var39.i = var35.i;
+ /* 2: convlf */
+ var40.f = var39.i;
+ /* 4: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var40.i);
+ _src2.i = ORC_DENORMAL (var36.i);
+ _dest1.f = _src1.f * _src2.f;
+ var41.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 6: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var40.i);
+ _src2.i = ORC_DENORMAL (var37.i);
+ _dest1.f = _src1.f * _src2.f;
+ var42.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 7: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var42.i;
+ _dest.x2[1] = var41.i;
+ var43.i = _dest.i;
+ }
+ /* 8: convfl */
+ {
+ int tmp;
+ tmp = (int) var43.x2f[0];
+ if (tmp == 0x80000000 && !(var43.x2[0] & 0x80000000))
+ tmp = 0x7fffffff;
+ var44.x2[0] = tmp;
+ }
+ {
+ int tmp;
+ tmp = (int) var43.x2f[1];
+ if (tmp == 0x80000000 && !(var43.x2[1] & 0x80000000))
+ tmp = 0x7fffffff;
+ var44.x2[1] = tmp;
+ }
+ /* 9: convssslw */
+ var38.x2[0] = ORC_CLAMP_SW (var44.x2[0]);
+ var38.x2[1] = ORC_CLAMP_SW (var44.x2[1]);
+ /* 10: storel */
+ ptr0[i] = var38;
+ }
+
+}
+
+void
+audiopanoramam_orc_process_s16_ch1_psy (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, float p2, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 38, 97, 117, 100, 105, 111, 112, 97, 110, 111, 114, 97, 109, 97,
+ 109, 95, 111, 114, 99, 95, 112, 114, 111, 99, 101, 115, 115, 95, 115,
+ 49,
+ 54, 95, 99, 104, 49, 95, 112, 115, 121, 11, 4, 4, 12, 2, 2, 17,
+ 4, 17, 4, 20, 8, 20, 4, 20, 4, 153, 33, 4, 211, 33, 33, 202,
+ 34, 33, 25, 202, 33, 33, 24, 194, 32, 33, 34, 21, 1, 210, 32, 32,
+ 21, 1, 165, 0, 32, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_s16_ch1_psy);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "audiopanoramam_orc_process_s16_ch1_psy");
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_s16_ch1_psy);
+ orc_program_add_destination (p, 4, "d1");
+ orc_program_add_source (p, 2, "s1");
+ orc_program_add_parameter_float (p, 4, "p1");
+ orc_program_add_parameter_float (p, 4, "p2");
+ orc_program_add_temporary (p, 8, "t1");
+ orc_program_add_temporary (p, 4, "t2");
+ orc_program_add_temporary (p, 4, "t3");
+
+ orc_program_append_2 (p, "convswl", 0, ORC_VAR_T2, ORC_VAR_S1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convlf", 0, ORC_VAR_T2, ORC_VAR_T2, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mulf", 0, ORC_VAR_T3, ORC_VAR_T2, ORC_VAR_P2,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mulf", 0, ORC_VAR_T2, ORC_VAR_T2, ORC_VAR_P1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mergelq", 0, ORC_VAR_T1, ORC_VAR_T2, ORC_VAR_T3,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convfl", 1, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convssslw", 1, ORC_VAR_D1, ORC_VAR_T1,
+ ORC_VAR_D1, ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ {
+ orc_union32 tmp;
+ tmp.f = p1;
+ ex->params[ORC_VAR_P1] = tmp.i;
+ }
+ {
+ orc_union32 tmp;
+ tmp.f = p2;
+ ex->params[ORC_VAR_P2] = tmp.i;
+ }
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* audiopanoramam_orc_process_f32_ch1_psy */
+#ifdef DISABLE_ORC
+void
+audiopanoramam_orc_process_f32_ch1_psy (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, float p2, int n)
+{
+ int i;
+ orc_union64 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union32 var34;
+ orc_union32 var35;
+ orc_union32 var36;
+ orc_union32 var37;
+ orc_union64 var38;
+ orc_union32 var39;
+ orc_union32 var40;
+
+ ptr0 = (orc_union64 *) d1;
+ ptr4 = (orc_union32 *) s1;
+
+ /* 1: loadpl */
+ var35.f = p2;
+ /* 4: loadpl */
+ var37.f = p1;
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var34 = ptr4[i];
+ /* 2: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var34.i);
+ _src2.i = ORC_DENORMAL (var35.i);
+ _dest1.f = _src1.f * _src2.f;
+ var39.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 3: loadl */
+ var36 = ptr4[i];
+ /* 5: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var36.i);
+ _src2.i = ORC_DENORMAL (var37.i);
+ _dest1.f = _src1.f * _src2.f;
+ var40.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 6: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var40.i;
+ _dest.x2[1] = var39.i;
+ var38.i = _dest.i;
+ }
+ /* 7: storeq */
+ ptr0[i] = var38;
+ }
+
+}
+
+#else
+static void
+_backup_audiopanoramam_orc_process_f32_ch1_psy (OrcExecutor * ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union64 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union32 var34;
+ orc_union32 var35;
+ orc_union32 var36;
+ orc_union32 var37;
+ orc_union64 var38;
+ orc_union32 var39;
+ orc_union32 var40;
+
+ ptr0 = (orc_union64 *) ex->arrays[0];
+ ptr4 = (orc_union32 *) ex->arrays[4];
+
+ /* 1: loadpl */
+ var35.i = ex->params[25];
+ /* 4: loadpl */
+ var37.i = ex->params[24];
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var34 = ptr4[i];
+ /* 2: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var34.i);
+ _src2.i = ORC_DENORMAL (var35.i);
+ _dest1.f = _src1.f * _src2.f;
+ var39.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 3: loadl */
+ var36 = ptr4[i];
+ /* 5: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var36.i);
+ _src2.i = ORC_DENORMAL (var37.i);
+ _dest1.f = _src1.f * _src2.f;
+ var40.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 6: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var40.i;
+ _dest.x2[1] = var39.i;
+ var38.i = _dest.i;
+ }
+ /* 7: storeq */
+ ptr0[i] = var38;
+ }
+
+}
+
+void
+audiopanoramam_orc_process_f32_ch1_psy (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, float p2, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 38, 97, 117, 100, 105, 111, 112, 97, 110, 111, 114, 97, 109, 97,
+ 109, 95, 111, 114, 99, 95, 112, 114, 111, 99, 101, 115, 115, 95, 102,
+ 51,
+ 50, 95, 99, 104, 49, 95, 112, 115, 121, 11, 8, 8, 12, 4, 4, 17,
+ 4, 17, 4, 20, 4, 20, 4, 202, 33, 4, 25, 202, 32, 4, 24, 194,
+ 0, 32, 33, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_f32_ch1_psy);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "audiopanoramam_orc_process_f32_ch1_psy");
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_f32_ch1_psy);
+ orc_program_add_destination (p, 8, "d1");
+ orc_program_add_source (p, 4, "s1");
+ orc_program_add_parameter_float (p, 4, "p1");
+ orc_program_add_parameter_float (p, 4, "p2");
+ orc_program_add_temporary (p, 4, "t1");
+ orc_program_add_temporary (p, 4, "t2");
+
+ orc_program_append_2 (p, "mulf", 0, ORC_VAR_T2, ORC_VAR_S1, ORC_VAR_P2,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mulf", 0, ORC_VAR_T1, ORC_VAR_S1, ORC_VAR_P1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mergelq", 0, ORC_VAR_D1, ORC_VAR_T1, ORC_VAR_T2,
+ ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ {
+ orc_union32 tmp;
+ tmp.f = p1;
+ ex->params[ORC_VAR_P1] = tmp.i;
+ }
+ {
+ orc_union32 tmp;
+ tmp.f = p2;
+ ex->params[ORC_VAR_P2] = tmp.i;
+ }
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* audiopanoramam_orc_process_s16_ch2_psy_right */
+#ifdef DISABLE_ORC
+void
+audiopanoramam_orc_process_s16_ch2_psy_right (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, float p2, int n)
+{
+ int i;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union32 var36;
+ orc_union32 var37;
+ orc_union32 var38;
+ orc_union32 var39;
+ orc_union64 var40;
+ orc_union64 var41;
+ orc_union32 var42;
+ orc_union32 var43;
+ orc_union32 var44;
+ orc_union32 var45;
+ orc_union32 var46;
+ orc_union64 var47;
+ orc_union64 var48;
+
+ ptr0 = (orc_union32 *) d1;
+ ptr4 = (orc_union32 *) s1;
+
+ /* 5: loadpl */
+ var37.f = p2;
+ /* 7: loadpl */
+ var38.f = p1;
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var36 = ptr4[i];
+ /* 1: convswl */
+ var40.x2[0] = var36.x2[0];
+ var40.x2[1] = var36.x2[1];
+ /* 2: convlf */
+ var41.x2f[0] = var40.x2[0];
+ var41.x2f[1] = var40.x2[1];
+ /* 3: select0ql */
+ {
+ orc_union64 _src;
+ _src.i = var41.i;
+ var42.i = _src.x2[0];
+ }
+ /* 4: select1ql */
+ {
+ orc_union64 _src;
+ _src.i = var41.i;
+ var43.i = _src.x2[1];
+ }
+ /* 6: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var42.i);
+ _src2.i = ORC_DENORMAL (var37.i);
+ _dest1.f = _src1.f * _src2.f;
+ var44.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 8: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var42.i);
+ _src2.i = ORC_DENORMAL (var38.i);
+ _dest1.f = _src1.f * _src2.f;
+ var45.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 9: addf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var44.i);
+ _src2.i = ORC_DENORMAL (var43.i);
+ _dest1.f = _src1.f + _src2.f;
+ var46.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 10: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var45.i;
+ _dest.x2[1] = var46.i;
+ var47.i = _dest.i;
+ }
+ /* 11: convfl */
+ {
+ int tmp;
+ tmp = (int) var47.x2f[0];
+ if (tmp == 0x80000000 && !(var47.x2[0] & 0x80000000))
+ tmp = 0x7fffffff;
+ var48.x2[0] = tmp;
+ }
+ {
+ int tmp;
+ tmp = (int) var47.x2f[1];
+ if (tmp == 0x80000000 && !(var47.x2[1] & 0x80000000))
+ tmp = 0x7fffffff;
+ var48.x2[1] = tmp;
+ }
+ /* 12: convssslw */
+ var39.x2[0] = ORC_CLAMP_SW (var48.x2[0]);
+ var39.x2[1] = ORC_CLAMP_SW (var48.x2[1]);
+ /* 13: storel */
+ ptr0[i] = var39;
+ }
+
+}
+
+#else
+static void
+_backup_audiopanoramam_orc_process_s16_ch2_psy_right (OrcExecutor *
+ ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union32 var36;
+ orc_union32 var37;
+ orc_union32 var38;
+ orc_union32 var39;
+ orc_union64 var40;
+ orc_union64 var41;
+ orc_union32 var42;
+ orc_union32 var43;
+ orc_union32 var44;
+ orc_union32 var45;
+ orc_union32 var46;
+ orc_union64 var47;
+ orc_union64 var48;
+
+ ptr0 = (orc_union32 *) ex->arrays[0];
+ ptr4 = (orc_union32 *) ex->arrays[4];
+
+ /* 5: loadpl */
+ var37.i = ex->params[25];
+ /* 7: loadpl */
+ var38.i = ex->params[24];
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var36 = ptr4[i];
+ /* 1: convswl */
+ var40.x2[0] = var36.x2[0];
+ var40.x2[1] = var36.x2[1];
+ /* 2: convlf */
+ var41.x2f[0] = var40.x2[0];
+ var41.x2f[1] = var40.x2[1];
+ /* 3: select0ql */
+ {
+ orc_union64 _src;
+ _src.i = var41.i;
+ var42.i = _src.x2[0];
+ }
+ /* 4: select1ql */
+ {
+ orc_union64 _src;
+ _src.i = var41.i;
+ var43.i = _src.x2[1];
+ }
+ /* 6: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var42.i);
+ _src2.i = ORC_DENORMAL (var37.i);
+ _dest1.f = _src1.f * _src2.f;
+ var44.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 8: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var42.i);
+ _src2.i = ORC_DENORMAL (var38.i);
+ _dest1.f = _src1.f * _src2.f;
+ var45.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 9: addf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var44.i);
+ _src2.i = ORC_DENORMAL (var43.i);
+ _dest1.f = _src1.f + _src2.f;
+ var46.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 10: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var45.i;
+ _dest.x2[1] = var46.i;
+ var47.i = _dest.i;
+ }
+ /* 11: convfl */
+ {
+ int tmp;
+ tmp = (int) var47.x2f[0];
+ if (tmp == 0x80000000 && !(var47.x2[0] & 0x80000000))
+ tmp = 0x7fffffff;
+ var48.x2[0] = tmp;
+ }
+ {
+ int tmp;
+ tmp = (int) var47.x2f[1];
+ if (tmp == 0x80000000 && !(var47.x2[1] & 0x80000000))
+ tmp = 0x7fffffff;
+ var48.x2[1] = tmp;
+ }
+ /* 12: convssslw */
+ var39.x2[0] = ORC_CLAMP_SW (var48.x2[0]);
+ var39.x2[1] = ORC_CLAMP_SW (var48.x2[1]);
+ /* 13: storel */
+ ptr0[i] = var39;
+ }
+
+}
+
+void
+audiopanoramam_orc_process_s16_ch2_psy_right (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, float p2, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 44, 97, 117, 100, 105, 111, 112, 97, 110, 111, 114, 97, 109, 97,
+ 109, 95, 111, 114, 99, 95, 112, 114, 111, 99, 101, 115, 115, 95, 115,
+ 49,
+ 54, 95, 99, 104, 50, 95, 112, 115, 121, 95, 114, 105, 103, 104, 116, 11,
+ 4, 4, 12, 4, 4, 17, 4, 17, 4, 20, 8, 20, 4, 20, 4, 20,
+ 4, 21, 1, 153, 32, 4, 21, 1, 211, 32, 32, 192, 33, 32, 193, 34,
+ 32, 202, 35, 33, 25, 202, 33, 33, 24, 200, 34, 35, 34, 194, 32, 33,
+ 34, 21, 1, 210, 32, 32, 21, 1, 165, 0, 32, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_s16_ch2_psy_right);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "audiopanoramam_orc_process_s16_ch2_psy_right");
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_s16_ch2_psy_right);
+ orc_program_add_destination (p, 4, "d1");
+ orc_program_add_source (p, 4, "s1");
+ orc_program_add_parameter_float (p, 4, "p1");
+ orc_program_add_parameter_float (p, 4, "p2");
+ orc_program_add_temporary (p, 8, "t1");
+ orc_program_add_temporary (p, 4, "t2");
+ orc_program_add_temporary (p, 4, "t3");
+ orc_program_add_temporary (p, 4, "t4");
+
+ orc_program_append_2 (p, "convswl", 1, ORC_VAR_T1, ORC_VAR_S1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convlf", 1, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "select0ql", 0, ORC_VAR_T2, ORC_VAR_T1,
+ ORC_VAR_D1, ORC_VAR_D1);
+ orc_program_append_2 (p, "select1ql", 0, ORC_VAR_T3, ORC_VAR_T1,
+ ORC_VAR_D1, ORC_VAR_D1);
+ orc_program_append_2 (p, "mulf", 0, ORC_VAR_T4, ORC_VAR_T2, ORC_VAR_P2,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mulf", 0, ORC_VAR_T2, ORC_VAR_T2, ORC_VAR_P1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "addf", 0, ORC_VAR_T3, ORC_VAR_T4, ORC_VAR_T3,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mergelq", 0, ORC_VAR_T1, ORC_VAR_T2, ORC_VAR_T3,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convfl", 1, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convssslw", 1, ORC_VAR_D1, ORC_VAR_T1,
+ ORC_VAR_D1, ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ {
+ orc_union32 tmp;
+ tmp.f = p1;
+ ex->params[ORC_VAR_P1] = tmp.i;
+ }
+ {
+ orc_union32 tmp;
+ tmp.f = p2;
+ ex->params[ORC_VAR_P2] = tmp.i;
+ }
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* audiopanoramam_orc_process_s16_ch2_psy_left */
+#ifdef DISABLE_ORC
+void
+audiopanoramam_orc_process_s16_ch2_psy_left (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, float p2, int n)
+{
+ int i;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union32 var36;
+ orc_union32 var37;
+ orc_union32 var38;
+ orc_union32 var39;
+ orc_union64 var40;
+ orc_union64 var41;
+ orc_union32 var42;
+ orc_union32 var43;
+ orc_union32 var44;
+ orc_union32 var45;
+ orc_union32 var46;
+ orc_union64 var47;
+ orc_union64 var48;
+
+ ptr0 = (orc_union32 *) d1;
+ ptr4 = (orc_union32 *) s1;
+
+ /* 5: loadpl */
+ var37.f = p1;
+ /* 7: loadpl */
+ var38.f = p2;
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var36 = ptr4[i];
+ /* 1: convswl */
+ var40.x2[0] = var36.x2[0];
+ var40.x2[1] = var36.x2[1];
+ /* 2: convlf */
+ var41.x2f[0] = var40.x2[0];
+ var41.x2f[1] = var40.x2[1];
+ /* 3: select0ql */
+ {
+ orc_union64 _src;
+ _src.i = var41.i;
+ var42.i = _src.x2[0];
+ }
+ /* 4: select1ql */
+ {
+ orc_union64 _src;
+ _src.i = var41.i;
+ var43.i = _src.x2[1];
+ }
+ /* 6: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var43.i);
+ _src2.i = ORC_DENORMAL (var37.i);
+ _dest1.f = _src1.f * _src2.f;
+ var44.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 8: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var43.i);
+ _src2.i = ORC_DENORMAL (var38.i);
+ _dest1.f = _src1.f * _src2.f;
+ var45.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 9: addf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var44.i);
+ _src2.i = ORC_DENORMAL (var42.i);
+ _dest1.f = _src1.f + _src2.f;
+ var46.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 10: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var46.i;
+ _dest.x2[1] = var45.i;
+ var47.i = _dest.i;
+ }
+ /* 11: convfl */
+ {
+ int tmp;
+ tmp = (int) var47.x2f[0];
+ if (tmp == 0x80000000 && !(var47.x2[0] & 0x80000000))
+ tmp = 0x7fffffff;
+ var48.x2[0] = tmp;
+ }
+ {
+ int tmp;
+ tmp = (int) var47.x2f[1];
+ if (tmp == 0x80000000 && !(var47.x2[1] & 0x80000000))
+ tmp = 0x7fffffff;
+ var48.x2[1] = tmp;
+ }
+ /* 12: convssslw */
+ var39.x2[0] = ORC_CLAMP_SW (var48.x2[0]);
+ var39.x2[1] = ORC_CLAMP_SW (var48.x2[1]);
+ /* 13: storel */
+ ptr0[i] = var39;
+ }
+
+}
+
+#else
+static void
+_backup_audiopanoramam_orc_process_s16_ch2_psy_left (OrcExecutor *
+ ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union32 var36;
+ orc_union32 var37;
+ orc_union32 var38;
+ orc_union32 var39;
+ orc_union64 var40;
+ orc_union64 var41;
+ orc_union32 var42;
+ orc_union32 var43;
+ orc_union32 var44;
+ orc_union32 var45;
+ orc_union32 var46;
+ orc_union64 var47;
+ orc_union64 var48;
+
+ ptr0 = (orc_union32 *) ex->arrays[0];
+ ptr4 = (orc_union32 *) ex->arrays[4];
+
+ /* 5: loadpl */
+ var37.i = ex->params[24];
+ /* 7: loadpl */
+ var38.i = ex->params[25];
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var36 = ptr4[i];
+ /* 1: convswl */
+ var40.x2[0] = var36.x2[0];
+ var40.x2[1] = var36.x2[1];
+ /* 2: convlf */
+ var41.x2f[0] = var40.x2[0];
+ var41.x2f[1] = var40.x2[1];
+ /* 3: select0ql */
+ {
+ orc_union64 _src;
+ _src.i = var41.i;
+ var42.i = _src.x2[0];
+ }
+ /* 4: select1ql */
+ {
+ orc_union64 _src;
+ _src.i = var41.i;
+ var43.i = _src.x2[1];
+ }
+ /* 6: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var43.i);
+ _src2.i = ORC_DENORMAL (var37.i);
+ _dest1.f = _src1.f * _src2.f;
+ var44.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 8: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var43.i);
+ _src2.i = ORC_DENORMAL (var38.i);
+ _dest1.f = _src1.f * _src2.f;
+ var45.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 9: addf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var44.i);
+ _src2.i = ORC_DENORMAL (var42.i);
+ _dest1.f = _src1.f + _src2.f;
+ var46.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 10: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var46.i;
+ _dest.x2[1] = var45.i;
+ var47.i = _dest.i;
+ }
+ /* 11: convfl */
+ {
+ int tmp;
+ tmp = (int) var47.x2f[0];
+ if (tmp == 0x80000000 && !(var47.x2[0] & 0x80000000))
+ tmp = 0x7fffffff;
+ var48.x2[0] = tmp;
+ }
+ {
+ int tmp;
+ tmp = (int) var47.x2f[1];
+ if (tmp == 0x80000000 && !(var47.x2[1] & 0x80000000))
+ tmp = 0x7fffffff;
+ var48.x2[1] = tmp;
+ }
+ /* 12: convssslw */
+ var39.x2[0] = ORC_CLAMP_SW (var48.x2[0]);
+ var39.x2[1] = ORC_CLAMP_SW (var48.x2[1]);
+ /* 13: storel */
+ ptr0[i] = var39;
+ }
+
+}
+
+void
+audiopanoramam_orc_process_s16_ch2_psy_left (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, float p2, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 43, 97, 117, 100, 105, 111, 112, 97, 110, 111, 114, 97, 109, 97,
+ 109, 95, 111, 114, 99, 95, 112, 114, 111, 99, 101, 115, 115, 95, 115,
+ 49,
+ 54, 95, 99, 104, 50, 95, 112, 115, 121, 95, 108, 101, 102, 116, 11, 4,
+ 4, 12, 4, 4, 17, 4, 17, 4, 20, 8, 20, 4, 20, 4, 20, 4,
+ 21, 1, 153, 32, 4, 21, 1, 211, 32, 32, 192, 33, 32, 193, 35, 32,
+ 202, 34, 35, 24, 202, 35, 35, 25, 200, 33, 34, 33, 194, 32, 33, 35,
+ 21, 1, 210, 32, 32, 21, 1, 165, 0, 32, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_s16_ch2_psy_left);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "audiopanoramam_orc_process_s16_ch2_psy_left");
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_s16_ch2_psy_left);
+ orc_program_add_destination (p, 4, "d1");
+ orc_program_add_source (p, 4, "s1");
+ orc_program_add_parameter_float (p, 4, "p1");
+ orc_program_add_parameter_float (p, 4, "p2");
+ orc_program_add_temporary (p, 8, "t1");
+ orc_program_add_temporary (p, 4, "t2");
+ orc_program_add_temporary (p, 4, "t3");
+ orc_program_add_temporary (p, 4, "t4");
+
+ orc_program_append_2 (p, "convswl", 1, ORC_VAR_T1, ORC_VAR_S1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convlf", 1, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "select0ql", 0, ORC_VAR_T2, ORC_VAR_T1,
+ ORC_VAR_D1, ORC_VAR_D1);
+ orc_program_append_2 (p, "select1ql", 0, ORC_VAR_T4, ORC_VAR_T1,
+ ORC_VAR_D1, ORC_VAR_D1);
+ orc_program_append_2 (p, "mulf", 0, ORC_VAR_T3, ORC_VAR_T4, ORC_VAR_P1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mulf", 0, ORC_VAR_T4, ORC_VAR_T4, ORC_VAR_P2,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "addf", 0, ORC_VAR_T2, ORC_VAR_T3, ORC_VAR_T2,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mergelq", 0, ORC_VAR_T1, ORC_VAR_T2, ORC_VAR_T4,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convfl", 1, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convssslw", 1, ORC_VAR_D1, ORC_VAR_T1,
+ ORC_VAR_D1, ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ {
+ orc_union32 tmp;
+ tmp.f = p1;
+ ex->params[ORC_VAR_P1] = tmp.i;
+ }
+ {
+ orc_union32 tmp;
+ tmp.f = p2;
+ ex->params[ORC_VAR_P2] = tmp.i;
+ }
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* audiopanoramam_orc_process_f32_ch2_psy_right */
+#ifdef DISABLE_ORC
+void
+audiopanoramam_orc_process_f32_ch2_psy_right (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, float p2, int n)
+{
+ int i;
+ orc_union64 *ORC_RESTRICT ptr0;
+ const orc_union64 *ORC_RESTRICT ptr4;
+ orc_union64 var35;
+ orc_union64 var36;
+ orc_union32 var37;
+ orc_union32 var38;
+ orc_union64 var39;
+ orc_union32 var40;
+ orc_union32 var41;
+ orc_union32 var42;
+ orc_union32 var43;
+ orc_union32 var44;
+
+ ptr0 = (orc_union64 *) d1;
+ ptr4 = (orc_union64 *) s1;
+
+ /* 4: loadpl */
+ var37.f = p2;
+ /* 6: loadpl */
+ var38.f = p1;
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadq */
+ var35 = ptr4[i];
+ /* 1: select0ql */
+ {
+ orc_union64 _src;
+ _src.i = var35.i;
+ var40.i = _src.x2[0];
+ }
+ /* 2: loadq */
+ var36 = ptr4[i];
+ /* 3: select1ql */
+ {
+ orc_union64 _src;
+ _src.i = var36.i;
+ var41.i = _src.x2[1];
+ }
+ /* 5: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var40.i);
+ _src2.i = ORC_DENORMAL (var37.i);
+ _dest1.f = _src1.f * _src2.f;
+ var42.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 7: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var40.i);
+ _src2.i = ORC_DENORMAL (var38.i);
+ _dest1.f = _src1.f * _src2.f;
+ var43.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 8: addf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var42.i);
+ _src2.i = ORC_DENORMAL (var41.i);
+ _dest1.f = _src1.f + _src2.f;
+ var44.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 9: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var43.i;
+ _dest.x2[1] = var44.i;
+ var39.i = _dest.i;
+ }
+ /* 10: storeq */
+ ptr0[i] = var39;
+ }
+
+}
+
+#else
+static void
+_backup_audiopanoramam_orc_process_f32_ch2_psy_right (OrcExecutor *
+ ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union64 *ORC_RESTRICT ptr0;
+ const orc_union64 *ORC_RESTRICT ptr4;
+ orc_union64 var35;
+ orc_union64 var36;
+ orc_union32 var37;
+ orc_union32 var38;
+ orc_union64 var39;
+ orc_union32 var40;
+ orc_union32 var41;
+ orc_union32 var42;
+ orc_union32 var43;
+ orc_union32 var44;
+
+ ptr0 = (orc_union64 *) ex->arrays[0];
+ ptr4 = (orc_union64 *) ex->arrays[4];
+
+ /* 4: loadpl */
+ var37.i = ex->params[25];
+ /* 6: loadpl */
+ var38.i = ex->params[24];
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadq */
+ var35 = ptr4[i];
+ /* 1: select0ql */
+ {
+ orc_union64 _src;
+ _src.i = var35.i;
+ var40.i = _src.x2[0];
+ }
+ /* 2: loadq */
+ var36 = ptr4[i];
+ /* 3: select1ql */
+ {
+ orc_union64 _src;
+ _src.i = var36.i;
+ var41.i = _src.x2[1];
+ }
+ /* 5: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var40.i);
+ _src2.i = ORC_DENORMAL (var37.i);
+ _dest1.f = _src1.f * _src2.f;
+ var42.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 7: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var40.i);
+ _src2.i = ORC_DENORMAL (var38.i);
+ _dest1.f = _src1.f * _src2.f;
+ var43.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 8: addf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var42.i);
+ _src2.i = ORC_DENORMAL (var41.i);
+ _dest1.f = _src1.f + _src2.f;
+ var44.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 9: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var43.i;
+ _dest.x2[1] = var44.i;
+ var39.i = _dest.i;
+ }
+ /* 10: storeq */
+ ptr0[i] = var39;
+ }
+
+}
+
+void
+audiopanoramam_orc_process_f32_ch2_psy_right (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, float p2, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 44, 97, 117, 100, 105, 111, 112, 97, 110, 111, 114, 97, 109, 97,
+ 109, 95, 111, 114, 99, 95, 112, 114, 111, 99, 101, 115, 115, 95, 102,
+ 51,
+ 50, 95, 99, 104, 50, 95, 112, 115, 121, 95, 114, 105, 103, 104, 116, 11,
+ 8, 8, 12, 8, 8, 17, 4, 17, 4, 20, 4, 20, 4, 20, 4, 192,
+ 32, 4, 193, 33, 4, 202, 34, 32, 25, 202, 32, 32, 24, 200, 33, 34,
+ 33, 194, 0, 32, 33, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_f32_ch2_psy_right);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "audiopanoramam_orc_process_f32_ch2_psy_right");
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_f32_ch2_psy_right);
+ orc_program_add_destination (p, 8, "d1");
+ orc_program_add_source (p, 8, "s1");
+ orc_program_add_parameter_float (p, 4, "p1");
+ orc_program_add_parameter_float (p, 4, "p2");
+ orc_program_add_temporary (p, 4, "t1");
+ orc_program_add_temporary (p, 4, "t2");
+ orc_program_add_temporary (p, 4, "t3");
+
+ orc_program_append_2 (p, "select0ql", 0, ORC_VAR_T1, ORC_VAR_S1,
+ ORC_VAR_D1, ORC_VAR_D1);
+ orc_program_append_2 (p, "select1ql", 0, ORC_VAR_T2, ORC_VAR_S1,
+ ORC_VAR_D1, ORC_VAR_D1);
+ orc_program_append_2 (p, "mulf", 0, ORC_VAR_T3, ORC_VAR_T1, ORC_VAR_P2,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mulf", 0, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_P1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "addf", 0, ORC_VAR_T2, ORC_VAR_T3, ORC_VAR_T2,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mergelq", 0, ORC_VAR_D1, ORC_VAR_T1, ORC_VAR_T2,
+ ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ {
+ orc_union32 tmp;
+ tmp.f = p1;
+ ex->params[ORC_VAR_P1] = tmp.i;
+ }
+ {
+ orc_union32 tmp;
+ tmp.f = p2;
+ ex->params[ORC_VAR_P2] = tmp.i;
+ }
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* audiopanoramam_orc_process_f32_ch2_psy_left */
+#ifdef DISABLE_ORC
+void
+audiopanoramam_orc_process_f32_ch2_psy_left (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, float p2, int n)
+{
+ int i;
+ orc_union64 *ORC_RESTRICT ptr0;
+ const orc_union64 *ORC_RESTRICT ptr4;
+ orc_union64 var35;
+ orc_union64 var36;
+ orc_union32 var37;
+ orc_union32 var38;
+ orc_union64 var39;
+ orc_union32 var40;
+ orc_union32 var41;
+ orc_union32 var42;
+ orc_union32 var43;
+ orc_union32 var44;
+
+ ptr0 = (orc_union64 *) d1;
+ ptr4 = (orc_union64 *) s1;
+
+ /* 4: loadpl */
+ var37.f = p1;
+ /* 6: loadpl */
+ var38.f = p2;
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadq */
+ var35 = ptr4[i];
+ /* 1: select0ql */
+ {
+ orc_union64 _src;
+ _src.i = var35.i;
+ var40.i = _src.x2[0];
+ }
+ /* 2: loadq */
+ var36 = ptr4[i];
+ /* 3: select1ql */
+ {
+ orc_union64 _src;
+ _src.i = var36.i;
+ var41.i = _src.x2[1];
+ }
+ /* 5: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var41.i);
+ _src2.i = ORC_DENORMAL (var37.i);
+ _dest1.f = _src1.f * _src2.f;
+ var42.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 7: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var41.i);
+ _src2.i = ORC_DENORMAL (var38.i);
+ _dest1.f = _src1.f * _src2.f;
+ var43.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 8: addf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var42.i);
+ _src2.i = ORC_DENORMAL (var40.i);
+ _dest1.f = _src1.f + _src2.f;
+ var44.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 9: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var44.i;
+ _dest.x2[1] = var43.i;
+ var39.i = _dest.i;
+ }
+ /* 10: storeq */
+ ptr0[i] = var39;
+ }
+
+}
+
+#else
+static void
+_backup_audiopanoramam_orc_process_f32_ch2_psy_left (OrcExecutor *
+ ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union64 *ORC_RESTRICT ptr0;
+ const orc_union64 *ORC_RESTRICT ptr4;
+ orc_union64 var35;
+ orc_union64 var36;
+ orc_union32 var37;
+ orc_union32 var38;
+ orc_union64 var39;
+ orc_union32 var40;
+ orc_union32 var41;
+ orc_union32 var42;
+ orc_union32 var43;
+ orc_union32 var44;
+
+ ptr0 = (orc_union64 *) ex->arrays[0];
+ ptr4 = (orc_union64 *) ex->arrays[4];
+
+ /* 4: loadpl */
+ var37.i = ex->params[24];
+ /* 6: loadpl */
+ var38.i = ex->params[25];
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadq */
+ var35 = ptr4[i];
+ /* 1: select0ql */
+ {
+ orc_union64 _src;
+ _src.i = var35.i;
+ var40.i = _src.x2[0];
+ }
+ /* 2: loadq */
+ var36 = ptr4[i];
+ /* 3: select1ql */
+ {
+ orc_union64 _src;
+ _src.i = var36.i;
+ var41.i = _src.x2[1];
+ }
+ /* 5: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var41.i);
+ _src2.i = ORC_DENORMAL (var37.i);
+ _dest1.f = _src1.f * _src2.f;
+ var42.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 7: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var41.i);
+ _src2.i = ORC_DENORMAL (var38.i);
+ _dest1.f = _src1.f * _src2.f;
+ var43.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 8: addf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var42.i);
+ _src2.i = ORC_DENORMAL (var40.i);
+ _dest1.f = _src1.f + _src2.f;
+ var44.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 9: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var44.i;
+ _dest.x2[1] = var43.i;
+ var39.i = _dest.i;
+ }
+ /* 10: storeq */
+ ptr0[i] = var39;
+ }
+
+}
+
+void
+audiopanoramam_orc_process_f32_ch2_psy_left (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, float p2, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 43, 97, 117, 100, 105, 111, 112, 97, 110, 111, 114, 97, 109, 97,
+ 109, 95, 111, 114, 99, 95, 112, 114, 111, 99, 101, 115, 115, 95, 102,
+ 51,
+ 50, 95, 99, 104, 50, 95, 112, 115, 121, 95, 108, 101, 102, 116, 11, 8,
+ 8, 12, 8, 8, 17, 4, 17, 4, 20, 4, 20, 4, 20, 4, 192, 32,
+ 4, 193, 34, 4, 202, 33, 34, 24, 202, 34, 34, 25, 200, 32, 33, 32,
+ 194, 0, 32, 34, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_f32_ch2_psy_left);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "audiopanoramam_orc_process_f32_ch2_psy_left");
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_f32_ch2_psy_left);
+ orc_program_add_destination (p, 8, "d1");
+ orc_program_add_source (p, 8, "s1");
+ orc_program_add_parameter_float (p, 4, "p1");
+ orc_program_add_parameter_float (p, 4, "p2");
+ orc_program_add_temporary (p, 4, "t1");
+ orc_program_add_temporary (p, 4, "t2");
+ orc_program_add_temporary (p, 4, "t3");
+
+ orc_program_append_2 (p, "select0ql", 0, ORC_VAR_T1, ORC_VAR_S1,
+ ORC_VAR_D1, ORC_VAR_D1);
+ orc_program_append_2 (p, "select1ql", 0, ORC_VAR_T3, ORC_VAR_S1,
+ ORC_VAR_D1, ORC_VAR_D1);
+ orc_program_append_2 (p, "mulf", 0, ORC_VAR_T2, ORC_VAR_T3, ORC_VAR_P1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mulf", 0, ORC_VAR_T3, ORC_VAR_T3, ORC_VAR_P2,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "addf", 0, ORC_VAR_T1, ORC_VAR_T2, ORC_VAR_T1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mergelq", 0, ORC_VAR_D1, ORC_VAR_T1, ORC_VAR_T3,
+ ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ {
+ orc_union32 tmp;
+ tmp.f = p1;
+ ex->params[ORC_VAR_P1] = tmp.i;
+ }
+ {
+ orc_union32 tmp;
+ tmp.f = p2;
+ ex->params[ORC_VAR_P2] = tmp.i;
+ }
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* audiopanoramam_orc_process_s16_ch1_sim_right */
+#ifdef DISABLE_ORC
+void
+audiopanoramam_orc_process_s16_ch1_sim_right (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, int n)
+{
+ int i;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union16 *ORC_RESTRICT ptr4;
+ orc_union16 var35;
+ orc_union32 var36;
+ orc_union32 var37;
+ orc_union32 var38;
+ orc_union32 var39;
+ orc_union32 var40;
+ orc_union64 var41;
+ orc_union64 var42;
+
+ ptr0 = (orc_union32 *) d1;
+ ptr4 = (orc_union16 *) s1;
+
+ /* 3: loadpl */
+ var36.f = p1;
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadw */
+ var35 = ptr4[i];
+ /* 1: convswl */
+ var38.i = var35.i;
+ /* 2: convlf */
+ var39.f = var38.i;
+ /* 4: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var39.i);
+ _src2.i = ORC_DENORMAL (var36.i);
+ _dest1.f = _src1.f * _src2.f;
+ var40.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 5: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var39.i;
+ _dest.x2[1] = var40.i;
+ var41.i = _dest.i;
+ }
+ /* 6: convfl */
+ {
+ int tmp;
+ tmp = (int) var41.x2f[0];
+ if (tmp == 0x80000000 && !(var41.x2[0] & 0x80000000))
+ tmp = 0x7fffffff;
+ var42.x2[0] = tmp;
+ }
+ {
+ int tmp;
+ tmp = (int) var41.x2f[1];
+ if (tmp == 0x80000000 && !(var41.x2[1] & 0x80000000))
+ tmp = 0x7fffffff;
+ var42.x2[1] = tmp;
+ }
+ /* 7: convssslw */
+ var37.x2[0] = ORC_CLAMP_SW (var42.x2[0]);
+ var37.x2[1] = ORC_CLAMP_SW (var42.x2[1]);
+ /* 8: storel */
+ ptr0[i] = var37;
+ }
+
+}
+
+#else
+static void
+_backup_audiopanoramam_orc_process_s16_ch1_sim_right (OrcExecutor *
+ ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union16 *ORC_RESTRICT ptr4;
+ orc_union16 var35;
+ orc_union32 var36;
+ orc_union32 var37;
+ orc_union32 var38;
+ orc_union32 var39;
+ orc_union32 var40;
+ orc_union64 var41;
+ orc_union64 var42;
+
+ ptr0 = (orc_union32 *) ex->arrays[0];
+ ptr4 = (orc_union16 *) ex->arrays[4];
+
+ /* 3: loadpl */
+ var36.i = ex->params[24];
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadw */
+ var35 = ptr4[i];
+ /* 1: convswl */
+ var38.i = var35.i;
+ /* 2: convlf */
+ var39.f = var38.i;
+ /* 4: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var39.i);
+ _src2.i = ORC_DENORMAL (var36.i);
+ _dest1.f = _src1.f * _src2.f;
+ var40.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 5: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var39.i;
+ _dest.x2[1] = var40.i;
+ var41.i = _dest.i;
+ }
+ /* 6: convfl */
+ {
+ int tmp;
+ tmp = (int) var41.x2f[0];
+ if (tmp == 0x80000000 && !(var41.x2[0] & 0x80000000))
+ tmp = 0x7fffffff;
+ var42.x2[0] = tmp;
+ }
+ {
+ int tmp;
+ tmp = (int) var41.x2f[1];
+ if (tmp == 0x80000000 && !(var41.x2[1] & 0x80000000))
+ tmp = 0x7fffffff;
+ var42.x2[1] = tmp;
+ }
+ /* 7: convssslw */
+ var37.x2[0] = ORC_CLAMP_SW (var42.x2[0]);
+ var37.x2[1] = ORC_CLAMP_SW (var42.x2[1]);
+ /* 8: storel */
+ ptr0[i] = var37;
+ }
+
+}
+
+void
+audiopanoramam_orc_process_s16_ch1_sim_right (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 44, 97, 117, 100, 105, 111, 112, 97, 110, 111, 114, 97, 109, 97,
+ 109, 95, 111, 114, 99, 95, 112, 114, 111, 99, 101, 115, 115, 95, 115,
+ 49,
+ 54, 95, 99, 104, 49, 95, 115, 105, 109, 95, 114, 105, 103, 104, 116, 11,
+ 4, 4, 12, 2, 2, 17, 4, 20, 8, 20, 4, 20, 4, 153, 33, 4,
+ 211, 33, 33, 202, 34, 33, 24, 194, 32, 33, 34, 21, 1, 210, 32, 32,
+ 21, 1, 165, 0, 32, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_s16_ch1_sim_right);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "audiopanoramam_orc_process_s16_ch1_sim_right");
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_s16_ch1_sim_right);
+ orc_program_add_destination (p, 4, "d1");
+ orc_program_add_source (p, 2, "s1");
+ orc_program_add_parameter_float (p, 4, "p1");
+ orc_program_add_temporary (p, 8, "t1");
+ orc_program_add_temporary (p, 4, "t2");
+ orc_program_add_temporary (p, 4, "t3");
+
+ orc_program_append_2 (p, "convswl", 0, ORC_VAR_T2, ORC_VAR_S1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convlf", 0, ORC_VAR_T2, ORC_VAR_T2, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mulf", 0, ORC_VAR_T3, ORC_VAR_T2, ORC_VAR_P1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mergelq", 0, ORC_VAR_T1, ORC_VAR_T2, ORC_VAR_T3,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convfl", 1, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convssslw", 1, ORC_VAR_D1, ORC_VAR_T1,
+ ORC_VAR_D1, ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ {
+ orc_union32 tmp;
+ tmp.f = p1;
+ ex->params[ORC_VAR_P1] = tmp.i;
+ }
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* audiopanoramam_orc_process_s16_ch1_sim_left */
+#ifdef DISABLE_ORC
+void
+audiopanoramam_orc_process_s16_ch1_sim_left (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, int n)
+{
+ int i;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union16 *ORC_RESTRICT ptr4;
+ orc_union16 var35;
+ orc_union32 var36;
+ orc_union32 var37;
+ orc_union32 var38;
+ orc_union32 var39;
+ orc_union32 var40;
+ orc_union64 var41;
+ orc_union64 var42;
+
+ ptr0 = (orc_union32 *) d1;
+ ptr4 = (orc_union16 *) s1;
+
+ /* 3: loadpl */
+ var36.f = p1;
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadw */
+ var35 = ptr4[i];
+ /* 1: convswl */
+ var38.i = var35.i;
+ /* 2: convlf */
+ var39.f = var38.i;
+ /* 4: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var39.i);
+ _src2.i = ORC_DENORMAL (var36.i);
+ _dest1.f = _src1.f * _src2.f;
+ var40.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 5: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var40.i;
+ _dest.x2[1] = var39.i;
+ var41.i = _dest.i;
+ }
+ /* 6: convfl */
+ {
+ int tmp;
+ tmp = (int) var41.x2f[0];
+ if (tmp == 0x80000000 && !(var41.x2[0] & 0x80000000))
+ tmp = 0x7fffffff;
+ var42.x2[0] = tmp;
+ }
+ {
+ int tmp;
+ tmp = (int) var41.x2f[1];
+ if (tmp == 0x80000000 && !(var41.x2[1] & 0x80000000))
+ tmp = 0x7fffffff;
+ var42.x2[1] = tmp;
+ }
+ /* 7: convssslw */
+ var37.x2[0] = ORC_CLAMP_SW (var42.x2[0]);
+ var37.x2[1] = ORC_CLAMP_SW (var42.x2[1]);
+ /* 8: storel */
+ ptr0[i] = var37;
+ }
+
+}
+
+#else
+static void
+_backup_audiopanoramam_orc_process_s16_ch1_sim_left (OrcExecutor *
+ ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union16 *ORC_RESTRICT ptr4;
+ orc_union16 var35;
+ orc_union32 var36;
+ orc_union32 var37;
+ orc_union32 var38;
+ orc_union32 var39;
+ orc_union32 var40;
+ orc_union64 var41;
+ orc_union64 var42;
+
+ ptr0 = (orc_union32 *) ex->arrays[0];
+ ptr4 = (orc_union16 *) ex->arrays[4];
+
+ /* 3: loadpl */
+ var36.i = ex->params[24];
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadw */
+ var35 = ptr4[i];
+ /* 1: convswl */
+ var38.i = var35.i;
+ /* 2: convlf */
+ var39.f = var38.i;
+ /* 4: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var39.i);
+ _src2.i = ORC_DENORMAL (var36.i);
+ _dest1.f = _src1.f * _src2.f;
+ var40.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 5: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var40.i;
+ _dest.x2[1] = var39.i;
+ var41.i = _dest.i;
+ }
+ /* 6: convfl */
+ {
+ int tmp;
+ tmp = (int) var41.x2f[0];
+ if (tmp == 0x80000000 && !(var41.x2[0] & 0x80000000))
+ tmp = 0x7fffffff;
+ var42.x2[0] = tmp;
+ }
+ {
+ int tmp;
+ tmp = (int) var41.x2f[1];
+ if (tmp == 0x80000000 && !(var41.x2[1] & 0x80000000))
+ tmp = 0x7fffffff;
+ var42.x2[1] = tmp;
+ }
+ /* 7: convssslw */
+ var37.x2[0] = ORC_CLAMP_SW (var42.x2[0]);
+ var37.x2[1] = ORC_CLAMP_SW (var42.x2[1]);
+ /* 8: storel */
+ ptr0[i] = var37;
+ }
+
+}
+
+void
+audiopanoramam_orc_process_s16_ch1_sim_left (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 43, 97, 117, 100, 105, 111, 112, 97, 110, 111, 114, 97, 109, 97,
+ 109, 95, 111, 114, 99, 95, 112, 114, 111, 99, 101, 115, 115, 95, 115,
+ 49,
+ 54, 95, 99, 104, 49, 95, 115, 105, 109, 95, 108, 101, 102, 116, 11, 4,
+ 4, 12, 2, 2, 17, 4, 20, 8, 20, 4, 20, 4, 153, 34, 4, 211,
+ 34, 34, 202, 33, 34, 24, 194, 32, 33, 34, 21, 1, 210, 32, 32, 21,
+ 1, 165, 0, 32, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_s16_ch1_sim_left);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "audiopanoramam_orc_process_s16_ch1_sim_left");
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_s16_ch1_sim_left);
+ orc_program_add_destination (p, 4, "d1");
+ orc_program_add_source (p, 2, "s1");
+ orc_program_add_parameter_float (p, 4, "p1");
+ orc_program_add_temporary (p, 8, "t1");
+ orc_program_add_temporary (p, 4, "t2");
+ orc_program_add_temporary (p, 4, "t3");
+
+ orc_program_append_2 (p, "convswl", 0, ORC_VAR_T3, ORC_VAR_S1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convlf", 0, ORC_VAR_T3, ORC_VAR_T3, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mulf", 0, ORC_VAR_T2, ORC_VAR_T3, ORC_VAR_P1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mergelq", 0, ORC_VAR_T1, ORC_VAR_T2, ORC_VAR_T3,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convfl", 1, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convssslw", 1, ORC_VAR_D1, ORC_VAR_T1,
+ ORC_VAR_D1, ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ {
+ orc_union32 tmp;
+ tmp.f = p1;
+ ex->params[ORC_VAR_P1] = tmp.i;
+ }
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* audiopanoramam_orc_process_s16_ch2_sim_right */
+#ifdef DISABLE_ORC
+void
+audiopanoramam_orc_process_s16_ch2_sim_right (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, int n)
+{
+ int i;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union32 var35;
+ orc_union32 var36;
+ orc_union32 var37;
+ orc_union64 var38;
+ orc_union64 var39;
+ orc_union32 var40;
+ orc_union32 var41;
+ orc_union32 var42;
+ orc_union64 var43;
+ orc_union64 var44;
+
+ ptr0 = (orc_union32 *) d1;
+ ptr4 = (orc_union32 *) s1;
+
+ /* 5: loadpl */
+ var36.f = p1;
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var35 = ptr4[i];
+ /* 1: convswl */
+ var38.x2[0] = var35.x2[0];
+ var38.x2[1] = var35.x2[1];
+ /* 2: convlf */
+ var39.x2f[0] = var38.x2[0];
+ var39.x2f[1] = var38.x2[1];
+ /* 3: select0ql */
+ {
+ orc_union64 _src;
+ _src.i = var39.i;
+ var40.i = _src.x2[0];
+ }
+ /* 4: select1ql */
+ {
+ orc_union64 _src;
+ _src.i = var39.i;
+ var41.i = _src.x2[1];
+ }
+ /* 6: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var41.i);
+ _src2.i = ORC_DENORMAL (var36.i);
+ _dest1.f = _src1.f * _src2.f;
+ var42.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 7: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var40.i;
+ _dest.x2[1] = var42.i;
+ var43.i = _dest.i;
+ }
+ /* 8: convfl */
+ {
+ int tmp;
+ tmp = (int) var43.x2f[0];
+ if (tmp == 0x80000000 && !(var43.x2[0] & 0x80000000))
+ tmp = 0x7fffffff;
+ var44.x2[0] = tmp;
+ }
+ {
+ int tmp;
+ tmp = (int) var43.x2f[1];
+ if (tmp == 0x80000000 && !(var43.x2[1] & 0x80000000))
+ tmp = 0x7fffffff;
+ var44.x2[1] = tmp;
+ }
+ /* 9: convssslw */
+ var37.x2[0] = ORC_CLAMP_SW (var44.x2[0]);
+ var37.x2[1] = ORC_CLAMP_SW (var44.x2[1]);
+ /* 10: storel */
+ ptr0[i] = var37;
+ }
+
+}
+
+#else
+static void
+_backup_audiopanoramam_orc_process_s16_ch2_sim_right (OrcExecutor *
+ ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union32 var35;
+ orc_union32 var36;
+ orc_union32 var37;
+ orc_union64 var38;
+ orc_union64 var39;
+ orc_union32 var40;
+ orc_union32 var41;
+ orc_union32 var42;
+ orc_union64 var43;
+ orc_union64 var44;
+
+ ptr0 = (orc_union32 *) ex->arrays[0];
+ ptr4 = (orc_union32 *) ex->arrays[4];
+
+ /* 5: loadpl */
+ var36.i = ex->params[24];
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var35 = ptr4[i];
+ /* 1: convswl */
+ var38.x2[0] = var35.x2[0];
+ var38.x2[1] = var35.x2[1];
+ /* 2: convlf */
+ var39.x2f[0] = var38.x2[0];
+ var39.x2f[1] = var38.x2[1];
+ /* 3: select0ql */
+ {
+ orc_union64 _src;
+ _src.i = var39.i;
+ var40.i = _src.x2[0];
+ }
+ /* 4: select1ql */
+ {
+ orc_union64 _src;
+ _src.i = var39.i;
+ var41.i = _src.x2[1];
+ }
+ /* 6: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var41.i);
+ _src2.i = ORC_DENORMAL (var36.i);
+ _dest1.f = _src1.f * _src2.f;
+ var42.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 7: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var40.i;
+ _dest.x2[1] = var42.i;
+ var43.i = _dest.i;
+ }
+ /* 8: convfl */
+ {
+ int tmp;
+ tmp = (int) var43.x2f[0];
+ if (tmp == 0x80000000 && !(var43.x2[0] & 0x80000000))
+ tmp = 0x7fffffff;
+ var44.x2[0] = tmp;
+ }
+ {
+ int tmp;
+ tmp = (int) var43.x2f[1];
+ if (tmp == 0x80000000 && !(var43.x2[1] & 0x80000000))
+ tmp = 0x7fffffff;
+ var44.x2[1] = tmp;
+ }
+ /* 9: convssslw */
+ var37.x2[0] = ORC_CLAMP_SW (var44.x2[0]);
+ var37.x2[1] = ORC_CLAMP_SW (var44.x2[1]);
+ /* 10: storel */
+ ptr0[i] = var37;
+ }
+
+}
+
+void
+audiopanoramam_orc_process_s16_ch2_sim_right (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 44, 97, 117, 100, 105, 111, 112, 97, 110, 111, 114, 97, 109, 97,
+ 109, 95, 111, 114, 99, 95, 112, 114, 111, 99, 101, 115, 115, 95, 115,
+ 49,
+ 54, 95, 99, 104, 50, 95, 115, 105, 109, 95, 114, 105, 103, 104, 116, 11,
+ 4, 4, 12, 4, 4, 17, 4, 20, 8, 20, 4, 20, 4, 21, 1, 153,
+ 32, 4, 21, 1, 211, 32, 32, 192, 33, 32, 193, 34, 32, 202, 34, 34,
+ 24, 194, 32, 33, 34, 21, 1, 210, 32, 32, 21, 1, 165, 0, 32, 2,
+ 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_s16_ch2_sim_right);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "audiopanoramam_orc_process_s16_ch2_sim_right");
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_s16_ch2_sim_right);
+ orc_program_add_destination (p, 4, "d1");
+ orc_program_add_source (p, 4, "s1");
+ orc_program_add_parameter_float (p, 4, "p1");
+ orc_program_add_temporary (p, 8, "t1");
+ orc_program_add_temporary (p, 4, "t2");
+ orc_program_add_temporary (p, 4, "t3");
+
+ orc_program_append_2 (p, "convswl", 1, ORC_VAR_T1, ORC_VAR_S1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convlf", 1, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "select0ql", 0, ORC_VAR_T2, ORC_VAR_T1,
+ ORC_VAR_D1, ORC_VAR_D1);
+ orc_program_append_2 (p, "select1ql", 0, ORC_VAR_T3, ORC_VAR_T1,
+ ORC_VAR_D1, ORC_VAR_D1);
+ orc_program_append_2 (p, "mulf", 0, ORC_VAR_T3, ORC_VAR_T3, ORC_VAR_P1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mergelq", 0, ORC_VAR_T1, ORC_VAR_T2, ORC_VAR_T3,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convfl", 1, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convssslw", 1, ORC_VAR_D1, ORC_VAR_T1,
+ ORC_VAR_D1, ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ {
+ orc_union32 tmp;
+ tmp.f = p1;
+ ex->params[ORC_VAR_P1] = tmp.i;
+ }
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* audiopanoramam_orc_process_s16_ch2_sim_left */
+#ifdef DISABLE_ORC
+void
+audiopanoramam_orc_process_s16_ch2_sim_left (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, int n)
+{
+ int i;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union32 var35;
+ orc_union32 var36;
+ orc_union32 var37;
+ orc_union64 var38;
+ orc_union64 var39;
+ orc_union32 var40;
+ orc_union32 var41;
+ orc_union32 var42;
+ orc_union64 var43;
+ orc_union64 var44;
+
+ ptr0 = (orc_union32 *) d1;
+ ptr4 = (orc_union32 *) s1;
+
+ /* 5: loadpl */
+ var36.f = p1;
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var35 = ptr4[i];
+ /* 1: convswl */
+ var38.x2[0] = var35.x2[0];
+ var38.x2[1] = var35.x2[1];
+ /* 2: convlf */
+ var39.x2f[0] = var38.x2[0];
+ var39.x2f[1] = var38.x2[1];
+ /* 3: select0ql */
+ {
+ orc_union64 _src;
+ _src.i = var39.i;
+ var40.i = _src.x2[0];
+ }
+ /* 4: select1ql */
+ {
+ orc_union64 _src;
+ _src.i = var39.i;
+ var41.i = _src.x2[1];
+ }
+ /* 6: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var40.i);
+ _src2.i = ORC_DENORMAL (var36.i);
+ _dest1.f = _src1.f * _src2.f;
+ var42.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 7: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var42.i;
+ _dest.x2[1] = var41.i;
+ var43.i = _dest.i;
+ }
+ /* 8: convfl */
+ {
+ int tmp;
+ tmp = (int) var43.x2f[0];
+ if (tmp == 0x80000000 && !(var43.x2[0] & 0x80000000))
+ tmp = 0x7fffffff;
+ var44.x2[0] = tmp;
+ }
+ {
+ int tmp;
+ tmp = (int) var43.x2f[1];
+ if (tmp == 0x80000000 && !(var43.x2[1] & 0x80000000))
+ tmp = 0x7fffffff;
+ var44.x2[1] = tmp;
+ }
+ /* 9: convssslw */
+ var37.x2[0] = ORC_CLAMP_SW (var44.x2[0]);
+ var37.x2[1] = ORC_CLAMP_SW (var44.x2[1]);
+ /* 10: storel */
+ ptr0[i] = var37;
+ }
+
+}
+
+#else
+static void
+_backup_audiopanoramam_orc_process_s16_ch2_sim_left (OrcExecutor *
+ ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union32 var35;
+ orc_union32 var36;
+ orc_union32 var37;
+ orc_union64 var38;
+ orc_union64 var39;
+ orc_union32 var40;
+ orc_union32 var41;
+ orc_union32 var42;
+ orc_union64 var43;
+ orc_union64 var44;
+
+ ptr0 = (orc_union32 *) ex->arrays[0];
+ ptr4 = (orc_union32 *) ex->arrays[4];
+
+ /* 5: loadpl */
+ var36.i = ex->params[24];
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var35 = ptr4[i];
+ /* 1: convswl */
+ var38.x2[0] = var35.x2[0];
+ var38.x2[1] = var35.x2[1];
+ /* 2: convlf */
+ var39.x2f[0] = var38.x2[0];
+ var39.x2f[1] = var38.x2[1];
+ /* 3: select0ql */
+ {
+ orc_union64 _src;
+ _src.i = var39.i;
+ var40.i = _src.x2[0];
+ }
+ /* 4: select1ql */
+ {
+ orc_union64 _src;
+ _src.i = var39.i;
+ var41.i = _src.x2[1];
+ }
+ /* 6: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var40.i);
+ _src2.i = ORC_DENORMAL (var36.i);
+ _dest1.f = _src1.f * _src2.f;
+ var42.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 7: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var42.i;
+ _dest.x2[1] = var41.i;
+ var43.i = _dest.i;
+ }
+ /* 8: convfl */
+ {
+ int tmp;
+ tmp = (int) var43.x2f[0];
+ if (tmp == 0x80000000 && !(var43.x2[0] & 0x80000000))
+ tmp = 0x7fffffff;
+ var44.x2[0] = tmp;
+ }
+ {
+ int tmp;
+ tmp = (int) var43.x2f[1];
+ if (tmp == 0x80000000 && !(var43.x2[1] & 0x80000000))
+ tmp = 0x7fffffff;
+ var44.x2[1] = tmp;
+ }
+ /* 9: convssslw */
+ var37.x2[0] = ORC_CLAMP_SW (var44.x2[0]);
+ var37.x2[1] = ORC_CLAMP_SW (var44.x2[1]);
+ /* 10: storel */
+ ptr0[i] = var37;
+ }
+
+}
+
+void
+audiopanoramam_orc_process_s16_ch2_sim_left (gint16 * ORC_RESTRICT d1,
+ const gint16 * ORC_RESTRICT s1, float p1, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 43, 97, 117, 100, 105, 111, 112, 97, 110, 111, 114, 97, 109, 97,
+ 109, 95, 111, 114, 99, 95, 112, 114, 111, 99, 101, 115, 115, 95, 115,
+ 49,
+ 54, 95, 99, 104, 50, 95, 115, 105, 109, 95, 108, 101, 102, 116, 11, 4,
+ 4, 12, 4, 4, 17, 4, 20, 8, 20, 4, 20, 4, 21, 1, 153, 32,
+ 4, 21, 1, 211, 32, 32, 192, 33, 32, 193, 34, 32, 202, 33, 33, 24,
+ 194, 32, 33, 34, 21, 1, 210, 32, 32, 21, 1, 165, 0, 32, 2, 0,
+
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_s16_ch2_sim_left);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "audiopanoramam_orc_process_s16_ch2_sim_left");
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_s16_ch2_sim_left);
+ orc_program_add_destination (p, 4, "d1");
+ orc_program_add_source (p, 4, "s1");
+ orc_program_add_parameter_float (p, 4, "p1");
+ orc_program_add_temporary (p, 8, "t1");
+ orc_program_add_temporary (p, 4, "t2");
+ orc_program_add_temporary (p, 4, "t3");
+
+ orc_program_append_2 (p, "convswl", 1, ORC_VAR_T1, ORC_VAR_S1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convlf", 1, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "select0ql", 0, ORC_VAR_T2, ORC_VAR_T1,
+ ORC_VAR_D1, ORC_VAR_D1);
+ orc_program_append_2 (p, "select1ql", 0, ORC_VAR_T3, ORC_VAR_T1,
+ ORC_VAR_D1, ORC_VAR_D1);
+ orc_program_append_2 (p, "mulf", 0, ORC_VAR_T2, ORC_VAR_T2, ORC_VAR_P1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mergelq", 0, ORC_VAR_T1, ORC_VAR_T2, ORC_VAR_T3,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convfl", 1, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convssslw", 1, ORC_VAR_D1, ORC_VAR_T1,
+ ORC_VAR_D1, ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ {
+ orc_union32 tmp;
+ tmp.f = p1;
+ ex->params[ORC_VAR_P1] = tmp.i;
+ }
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* audiopanoramam_orc_process_f32_ch1_sim_right */
+#ifdef DISABLE_ORC
+void
+audiopanoramam_orc_process_f32_ch1_sim_right (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, int n)
+{
+ int i;
+ orc_union64 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union32 var34;
+ orc_union32 var35;
+ orc_union32 var36;
+ orc_union64 var37;
+ orc_union32 var38;
+ orc_union32 var39;
+
+ ptr0 = (orc_union64 *) d1;
+ ptr4 = (orc_union32 *) s1;
+
+ /* 3: loadpl */
+ var36.f = p1;
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var34 = ptr4[i];
+ /* 1: copyl */
+ var38.i = var34.i;
+ /* 2: loadl */
+ var35 = ptr4[i];
+ /* 4: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var35.i);
+ _src2.i = ORC_DENORMAL (var36.i);
+ _dest1.f = _src1.f * _src2.f;
+ var39.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 5: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var38.i;
+ _dest.x2[1] = var39.i;
+ var37.i = _dest.i;
+ }
+ /* 6: storeq */
+ ptr0[i] = var37;
+ }
+
+}
+
+#else
+static void
+_backup_audiopanoramam_orc_process_f32_ch1_sim_right (OrcExecutor *
+ ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union64 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union32 var34;
+ orc_union32 var35;
+ orc_union32 var36;
+ orc_union64 var37;
+ orc_union32 var38;
+ orc_union32 var39;
+
+ ptr0 = (orc_union64 *) ex->arrays[0];
+ ptr4 = (orc_union32 *) ex->arrays[4];
+
+ /* 3: loadpl */
+ var36.i = ex->params[24];
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var34 = ptr4[i];
+ /* 1: copyl */
+ var38.i = var34.i;
+ /* 2: loadl */
+ var35 = ptr4[i];
+ /* 4: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var35.i);
+ _src2.i = ORC_DENORMAL (var36.i);
+ _dest1.f = _src1.f * _src2.f;
+ var39.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 5: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var38.i;
+ _dest.x2[1] = var39.i;
+ var37.i = _dest.i;
+ }
+ /* 6: storeq */
+ ptr0[i] = var37;
+ }
+
+}
+
+void
+audiopanoramam_orc_process_f32_ch1_sim_right (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 44, 97, 117, 100, 105, 111, 112, 97, 110, 111, 114, 97, 109, 97,
+ 109, 95, 111, 114, 99, 95, 112, 114, 111, 99, 101, 115, 115, 95, 102,
+ 51,
+ 50, 95, 99, 104, 49, 95, 115, 105, 109, 95, 114, 105, 103, 104, 116, 11,
+ 8, 8, 12, 4, 4, 17, 4, 20, 4, 20, 4, 112, 32, 4, 202, 33,
+ 4, 24, 194, 0, 32, 33, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_f32_ch1_sim_right);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "audiopanoramam_orc_process_f32_ch1_sim_right");
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_f32_ch1_sim_right);
+ orc_program_add_destination (p, 8, "d1");
+ orc_program_add_source (p, 4, "s1");
+ orc_program_add_parameter_float (p, 4, "p1");
+ orc_program_add_temporary (p, 4, "t1");
+ orc_program_add_temporary (p, 4, "t2");
+
+ orc_program_append_2 (p, "copyl", 0, ORC_VAR_T1, ORC_VAR_S1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mulf", 0, ORC_VAR_T2, ORC_VAR_S1, ORC_VAR_P1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mergelq", 0, ORC_VAR_D1, ORC_VAR_T1, ORC_VAR_T2,
+ ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ {
+ orc_union32 tmp;
+ tmp.f = p1;
+ ex->params[ORC_VAR_P1] = tmp.i;
+ }
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* audiopanoramam_orc_process_f32_ch1_sim_left */
+#ifdef DISABLE_ORC
+void
+audiopanoramam_orc_process_f32_ch1_sim_left (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, int n)
+{
+ int i;
+ orc_union64 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union32 var34;
+ orc_union32 var35;
+ orc_union32 var36;
+ orc_union64 var37;
+ orc_union32 var38;
+ orc_union32 var39;
+
+ ptr0 = (orc_union64 *) d1;
+ ptr4 = (orc_union32 *) s1;
+
+ /* 1: loadpl */
+ var35.f = p1;
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var34 = ptr4[i];
+ /* 2: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var34.i);
+ _src2.i = ORC_DENORMAL (var35.i);
+ _dest1.f = _src1.f * _src2.f;
+ var38.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 3: loadl */
+ var36 = ptr4[i];
+ /* 4: copyl */
+ var39.i = var36.i;
+ /* 5: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var38.i;
+ _dest.x2[1] = var39.i;
+ var37.i = _dest.i;
+ }
+ /* 6: storeq */
+ ptr0[i] = var37;
+ }
+
+}
+
+#else
+static void
+_backup_audiopanoramam_orc_process_f32_ch1_sim_left (OrcExecutor *
+ ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union64 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union32 var34;
+ orc_union32 var35;
+ orc_union32 var36;
+ orc_union64 var37;
+ orc_union32 var38;
+ orc_union32 var39;
+
+ ptr0 = (orc_union64 *) ex->arrays[0];
+ ptr4 = (orc_union32 *) ex->arrays[4];
+
+ /* 1: loadpl */
+ var35.i = ex->params[24];
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var34 = ptr4[i];
+ /* 2: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var34.i);
+ _src2.i = ORC_DENORMAL (var35.i);
+ _dest1.f = _src1.f * _src2.f;
+ var38.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 3: loadl */
+ var36 = ptr4[i];
+ /* 4: copyl */
+ var39.i = var36.i;
+ /* 5: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var38.i;
+ _dest.x2[1] = var39.i;
+ var37.i = _dest.i;
+ }
+ /* 6: storeq */
+ ptr0[i] = var37;
+ }
+
+}
+
+void
+audiopanoramam_orc_process_f32_ch1_sim_left (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 43, 97, 117, 100, 105, 111, 112, 97, 110, 111, 114, 97, 109, 97,
+ 109, 95, 111, 114, 99, 95, 112, 114, 111, 99, 101, 115, 115, 95, 102,
+ 51,
+ 50, 95, 99, 104, 49, 95, 115, 105, 109, 95, 108, 101, 102, 116, 11, 8,
+ 8, 12, 4, 4, 17, 4, 20, 4, 20, 4, 202, 32, 4, 24, 112, 33,
+ 4, 194, 0, 32, 33, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_f32_ch1_sim_left);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "audiopanoramam_orc_process_f32_ch1_sim_left");
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_f32_ch1_sim_left);
+ orc_program_add_destination (p, 8, "d1");
+ orc_program_add_source (p, 4, "s1");
+ orc_program_add_parameter_float (p, 4, "p1");
+ orc_program_add_temporary (p, 4, "t1");
+ orc_program_add_temporary (p, 4, "t2");
+
+ orc_program_append_2 (p, "mulf", 0, ORC_VAR_T1, ORC_VAR_S1, ORC_VAR_P1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "copyl", 0, ORC_VAR_T2, ORC_VAR_S1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mergelq", 0, ORC_VAR_D1, ORC_VAR_T1, ORC_VAR_T2,
+ ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ {
+ orc_union32 tmp;
+ tmp.f = p1;
+ ex->params[ORC_VAR_P1] = tmp.i;
+ }
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* audiopanoramam_orc_process_f32_ch2_sim_right */
+#ifdef DISABLE_ORC
+void
+audiopanoramam_orc_process_f32_ch2_sim_right (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, int n)
+{
+ int i;
+ orc_union64 *ORC_RESTRICT ptr0;
+ const orc_union64 *ORC_RESTRICT ptr4;
+ orc_union64 var34;
+ orc_union64 var35;
+ orc_union32 var36;
+ orc_union64 var37;
+ orc_union32 var38;
+ orc_union32 var39;
+ orc_union32 var40;
+
+ ptr0 = (orc_union64 *) d1;
+ ptr4 = (orc_union64 *) s1;
+
+ /* 4: loadpl */
+ var36.f = p1;
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadq */
+ var34 = ptr4[i];
+ /* 1: select0ql */
+ {
+ orc_union64 _src;
+ _src.i = var34.i;
+ var38.i = _src.x2[0];
+ }
+ /* 2: loadq */
+ var35 = ptr4[i];
+ /* 3: select1ql */
+ {
+ orc_union64 _src;
+ _src.i = var35.i;
+ var39.i = _src.x2[1];
+ }
+ /* 5: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var39.i);
+ _src2.i = ORC_DENORMAL (var36.i);
+ _dest1.f = _src1.f * _src2.f;
+ var40.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 6: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var38.i;
+ _dest.x2[1] = var40.i;
+ var37.i = _dest.i;
+ }
+ /* 7: storeq */
+ ptr0[i] = var37;
+ }
+
+}
+
+#else
+static void
+_backup_audiopanoramam_orc_process_f32_ch2_sim_right (OrcExecutor *
+ ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union64 *ORC_RESTRICT ptr0;
+ const orc_union64 *ORC_RESTRICT ptr4;
+ orc_union64 var34;
+ orc_union64 var35;
+ orc_union32 var36;
+ orc_union64 var37;
+ orc_union32 var38;
+ orc_union32 var39;
+ orc_union32 var40;
+
+ ptr0 = (orc_union64 *) ex->arrays[0];
+ ptr4 = (orc_union64 *) ex->arrays[4];
+
+ /* 4: loadpl */
+ var36.i = ex->params[24];
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadq */
+ var34 = ptr4[i];
+ /* 1: select0ql */
+ {
+ orc_union64 _src;
+ _src.i = var34.i;
+ var38.i = _src.x2[0];
+ }
+ /* 2: loadq */
+ var35 = ptr4[i];
+ /* 3: select1ql */
+ {
+ orc_union64 _src;
+ _src.i = var35.i;
+ var39.i = _src.x2[1];
+ }
+ /* 5: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var39.i);
+ _src2.i = ORC_DENORMAL (var36.i);
+ _dest1.f = _src1.f * _src2.f;
+ var40.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 6: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var38.i;
+ _dest.x2[1] = var40.i;
+ var37.i = _dest.i;
+ }
+ /* 7: storeq */
+ ptr0[i] = var37;
+ }
+
+}
+
+void
+audiopanoramam_orc_process_f32_ch2_sim_right (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 44, 97, 117, 100, 105, 111, 112, 97, 110, 111, 114, 97, 109, 97,
+ 109, 95, 111, 114, 99, 95, 112, 114, 111, 99, 101, 115, 115, 95, 102,
+ 51,
+ 50, 95, 99, 104, 50, 95, 115, 105, 109, 95, 114, 105, 103, 104, 116, 11,
+ 8, 8, 12, 8, 8, 17, 4, 20, 4, 20, 4, 192, 32, 4, 193, 33,
+ 4, 202, 33, 33, 24, 194, 0, 32, 33, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_f32_ch2_sim_right);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "audiopanoramam_orc_process_f32_ch2_sim_right");
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_f32_ch2_sim_right);
+ orc_program_add_destination (p, 8, "d1");
+ orc_program_add_source (p, 8, "s1");
+ orc_program_add_parameter_float (p, 4, "p1");
+ orc_program_add_temporary (p, 4, "t1");
+ orc_program_add_temporary (p, 4, "t2");
+
+ orc_program_append_2 (p, "select0ql", 0, ORC_VAR_T1, ORC_VAR_S1,
+ ORC_VAR_D1, ORC_VAR_D1);
+ orc_program_append_2 (p, "select1ql", 0, ORC_VAR_T2, ORC_VAR_S1,
+ ORC_VAR_D1, ORC_VAR_D1);
+ orc_program_append_2 (p, "mulf", 0, ORC_VAR_T2, ORC_VAR_T2, ORC_VAR_P1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mergelq", 0, ORC_VAR_D1, ORC_VAR_T1, ORC_VAR_T2,
+ ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ {
+ orc_union32 tmp;
+ tmp.f = p1;
+ ex->params[ORC_VAR_P1] = tmp.i;
+ }
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* audiopanoramam_orc_process_f32_ch2_sim_left */
+#ifdef DISABLE_ORC
+void
+audiopanoramam_orc_process_f32_ch2_sim_left (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, int n)
+{
+ int i;
+ orc_union64 *ORC_RESTRICT ptr0;
+ const orc_union64 *ORC_RESTRICT ptr4;
+ orc_union64 var34;
+ orc_union64 var35;
+ orc_union32 var36;
+ orc_union64 var37;
+ orc_union32 var38;
+ orc_union32 var39;
+ orc_union32 var40;
+
+ ptr0 = (orc_union64 *) d1;
+ ptr4 = (orc_union64 *) s1;
+
+ /* 4: loadpl */
+ var36.f = p1;
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadq */
+ var34 = ptr4[i];
+ /* 1: select0ql */
+ {
+ orc_union64 _src;
+ _src.i = var34.i;
+ var38.i = _src.x2[0];
+ }
+ /* 2: loadq */
+ var35 = ptr4[i];
+ /* 3: select1ql */
+ {
+ orc_union64 _src;
+ _src.i = var35.i;
+ var39.i = _src.x2[1];
+ }
+ /* 5: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var38.i);
+ _src2.i = ORC_DENORMAL (var36.i);
+ _dest1.f = _src1.f * _src2.f;
+ var40.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 6: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var40.i;
+ _dest.x2[1] = var39.i;
+ var37.i = _dest.i;
+ }
+ /* 7: storeq */
+ ptr0[i] = var37;
+ }
+
+}
+
+#else
+static void
+_backup_audiopanoramam_orc_process_f32_ch2_sim_left (OrcExecutor *
+ ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union64 *ORC_RESTRICT ptr0;
+ const orc_union64 *ORC_RESTRICT ptr4;
+ orc_union64 var34;
+ orc_union64 var35;
+ orc_union32 var36;
+ orc_union64 var37;
+ orc_union32 var38;
+ orc_union32 var39;
+ orc_union32 var40;
+
+ ptr0 = (orc_union64 *) ex->arrays[0];
+ ptr4 = (orc_union64 *) ex->arrays[4];
+
+ /* 4: loadpl */
+ var36.i = ex->params[24];
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadq */
+ var34 = ptr4[i];
+ /* 1: select0ql */
+ {
+ orc_union64 _src;
+ _src.i = var34.i;
+ var38.i = _src.x2[0];
+ }
+ /* 2: loadq */
+ var35 = ptr4[i];
+ /* 3: select1ql */
+ {
+ orc_union64 _src;
+ _src.i = var35.i;
+ var39.i = _src.x2[1];
+ }
+ /* 5: mulf */
+ {
+ orc_union32 _src1;
+ orc_union32 _src2;
+ orc_union32 _dest1;
+ _src1.i = ORC_DENORMAL (var38.i);
+ _src2.i = ORC_DENORMAL (var36.i);
+ _dest1.f = _src1.f * _src2.f;
+ var40.i = ORC_DENORMAL (_dest1.i);
+ }
+ /* 6: mergelq */
+ {
+ orc_union64 _dest;
+ _dest.x2[0] = var40.i;
+ _dest.x2[1] = var39.i;
+ var37.i = _dest.i;
+ }
+ /* 7: storeq */
+ ptr0[i] = var37;
+ }
+
+}
+
+void
+audiopanoramam_orc_process_f32_ch2_sim_left (gfloat * ORC_RESTRICT d1,
+ const gfloat * ORC_RESTRICT s1, float p1, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 43, 97, 117, 100, 105, 111, 112, 97, 110, 111, 114, 97, 109, 97,
+ 109, 95, 111, 114, 99, 95, 112, 114, 111, 99, 101, 115, 115, 95, 102,
+ 51,
+ 50, 95, 99, 104, 50, 95, 115, 105, 109, 95, 108, 101, 102, 116, 11, 8,
+ 8, 12, 8, 8, 17, 4, 20, 4, 20, 4, 192, 32, 4, 193, 33, 4,
+ 202, 32, 32, 24, 194, 0, 32, 33, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_f32_ch2_sim_left);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "audiopanoramam_orc_process_f32_ch2_sim_left");
+ orc_program_set_backup_function (p,
+ _backup_audiopanoramam_orc_process_f32_ch2_sim_left);
+ orc_program_add_destination (p, 8, "d1");
+ orc_program_add_source (p, 8, "s1");
+ orc_program_add_parameter_float (p, 4, "p1");
+ orc_program_add_temporary (p, 4, "t1");
+ orc_program_add_temporary (p, 4, "t2");
+
+ orc_program_append_2 (p, "select0ql", 0, ORC_VAR_T1, ORC_VAR_S1,
+ ORC_VAR_D1, ORC_VAR_D1);
+ orc_program_append_2 (p, "select1ql", 0, ORC_VAR_T2, ORC_VAR_S1,
+ ORC_VAR_D1, ORC_VAR_D1);
+ orc_program_append_2 (p, "mulf", 0, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_P1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mergelq", 0, ORC_VAR_D1, ORC_VAR_T1, ORC_VAR_T2,
+ ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ {
+ orc_union32 tmp;
+ tmp.f = p1;
+ ex->params[ORC_VAR_P1] = tmp.i;
+ }
+
+ func = c->exec;
+ func (ex);
+}
+#endif
diff --git a/gst/audiofx/audiopanoramaorc-dist.h b/gst/audiofx/audiopanoramaorc-dist.h
new file mode 100644
index 0000000000..7611fba774
--- /dev/null
+++ b/gst/audiofx/audiopanoramaorc-dist.h
@@ -0,0 +1,107 @@
+
+/* autogenerated from audiopanoramaorc.orc */
+
+#ifndef _AUDIOPANORAMAORC_H_
+#define _AUDIOPANORAMAORC_H_
+
+#include <glib.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#ifndef _ORC_INTEGER_TYPEDEFS_
+#define _ORC_INTEGER_TYPEDEFS_
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#include <stdint.h>
+typedef int8_t orc_int8;
+typedef int16_t orc_int16;
+typedef int32_t orc_int32;
+typedef int64_t orc_int64;
+typedef uint8_t orc_uint8;
+typedef uint16_t orc_uint16;
+typedef uint32_t orc_uint32;
+typedef uint64_t orc_uint64;
+#define ORC_UINT64_C(x) UINT64_C(x)
+#elif defined(_MSC_VER)
+typedef signed __int8 orc_int8;
+typedef signed __int16 orc_int16;
+typedef signed __int32 orc_int32;
+typedef signed __int64 orc_int64;
+typedef unsigned __int8 orc_uint8;
+typedef unsigned __int16 orc_uint16;
+typedef unsigned __int32 orc_uint32;
+typedef unsigned __int64 orc_uint64;
+#define ORC_UINT64_C(x) (x##Ui64)
+#define inline __inline
+#else
+#include <limits.h>
+typedef signed char orc_int8;
+typedef short orc_int16;
+typedef int orc_int32;
+typedef unsigned char orc_uint8;
+typedef unsigned short orc_uint16;
+typedef unsigned int orc_uint32;
+#if INT_MAX == LONG_MAX
+typedef long long orc_int64;
+typedef unsigned long long orc_uint64;
+#define ORC_UINT64_C(x) (x##ULL)
+#else
+typedef long orc_int64;
+typedef unsigned long orc_uint64;
+#define ORC_UINT64_C(x) (x##UL)
+#endif
+#endif
+typedef union { orc_int16 i; orc_int8 x2[2]; } orc_union16;
+typedef union { orc_int32 i; float f; orc_int16 x2[2]; orc_int8 x4[4]; } orc_union32;
+typedef union { orc_int64 i; double f; orc_int32 x2[2]; float x2f[2]; orc_int16 x4[4]; } orc_union64;
+#endif
+#ifndef ORC_RESTRICT
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#define ORC_RESTRICT restrict
+#elif defined(__GNUC__) && __GNUC__ >= 4
+#define ORC_RESTRICT __restrict__
+#else
+#define ORC_RESTRICT
+#endif
+#endif
+
+#ifndef ORC_INTERNAL
+#if defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
+#define ORC_INTERNAL __attribute__((visibility("hidden")))
+#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x550)
+#define ORC_INTERNAL __hidden
+#elif defined (__GNUC__)
+#define ORC_INTERNAL __attribute__((visibility("hidden")))
+#else
+#define ORC_INTERNAL
+#endif
+#endif
+
+void audiopanoramam_orc_process_s16_ch1_none (gint16 * ORC_RESTRICT d1, const gint16 * ORC_RESTRICT s1, int n);
+void audiopanoramam_orc_process_f32_ch1_none (gfloat * ORC_RESTRICT d1, const gfloat * ORC_RESTRICT s1, int n);
+void audiopanoramam_orc_process_s16_ch2_none (gint16 * ORC_RESTRICT d1, const gint16 * ORC_RESTRICT s1, int n);
+void audiopanoramam_orc_process_f32_ch2_none (gfloat * ORC_RESTRICT d1, const gfloat * ORC_RESTRICT s1, int n);
+void audiopanoramam_orc_process_s16_ch1_psy (gint16 * ORC_RESTRICT d1, const gint16 * ORC_RESTRICT s1, float p1, float p2, int n);
+void audiopanoramam_orc_process_f32_ch1_psy (gfloat * ORC_RESTRICT d1, const gfloat * ORC_RESTRICT s1, float p1, float p2, int n);
+void audiopanoramam_orc_process_s16_ch2_psy_right (gint16 * ORC_RESTRICT d1, const gint16 * ORC_RESTRICT s1, float p1, float p2, int n);
+void audiopanoramam_orc_process_s16_ch2_psy_left (gint16 * ORC_RESTRICT d1, const gint16 * ORC_RESTRICT s1, float p1, float p2, int n);
+void audiopanoramam_orc_process_f32_ch2_psy_right (gfloat * ORC_RESTRICT d1, const gfloat * ORC_RESTRICT s1, float p1, float p2, int n);
+void audiopanoramam_orc_process_f32_ch2_psy_left (gfloat * ORC_RESTRICT d1, const gfloat * ORC_RESTRICT s1, float p1, float p2, int n);
+void audiopanoramam_orc_process_s16_ch1_sim_right (gint16 * ORC_RESTRICT d1, const gint16 * ORC_RESTRICT s1, float p1, int n);
+void audiopanoramam_orc_process_s16_ch1_sim_left (gint16 * ORC_RESTRICT d1, const gint16 * ORC_RESTRICT s1, float p1, int n);
+void audiopanoramam_orc_process_s16_ch2_sim_right (gint16 * ORC_RESTRICT d1, const gint16 * ORC_RESTRICT s1, float p1, int n);
+void audiopanoramam_orc_process_s16_ch2_sim_left (gint16 * ORC_RESTRICT d1, const gint16 * ORC_RESTRICT s1, float p1, int n);
+void audiopanoramam_orc_process_f32_ch1_sim_right (gfloat * ORC_RESTRICT d1, const gfloat * ORC_RESTRICT s1, float p1, int n);
+void audiopanoramam_orc_process_f32_ch1_sim_left (gfloat * ORC_RESTRICT d1, const gfloat * ORC_RESTRICT s1, float p1, int n);
+void audiopanoramam_orc_process_f32_ch2_sim_right (gfloat * ORC_RESTRICT d1, const gfloat * ORC_RESTRICT s1, float p1, int n);
+void audiopanoramam_orc_process_f32_ch2_sim_left (gfloat * ORC_RESTRICT d1, const gfloat * ORC_RESTRICT s1, float p1, int n);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
+
diff --git a/gst/audiofx/audiopanoramaorc.orc b/gst/audiofx/audiopanoramaorc.orc
new file mode 100644
index 0000000000..46140111e7
--- /dev/null
+++ b/gst/audiofx/audiopanoramaorc.orc
@@ -0,0 +1,258 @@
+# pass through functions
+
+.function audiopanoramam_orc_process_s16_ch1_none
+.source 2 s1 gint16
+.dest 4 d1 gint16
+
+mergewl d1 s1 s1
+
+
+.function audiopanoramam_orc_process_f32_ch1_none
+.source 4 s1 gfloat
+.dest 8 d1 gfloat
+
+mergelq d1 s1 s1
+
+
+.function audiopanoramam_orc_process_s16_ch2_none
+.source 4 s1 gint16
+.dest 4 d1 gint16
+
+x2 copyw d1 s1
+
+
+.function audiopanoramam_orc_process_f32_ch2_none
+.source 8 s1 gfloat
+.dest 8 d1 gfloat
+
+x2 copyl d1 s1
+
+
+# psychoacoustic processing function
+
+.function audiopanoramam_orc_process_s16_ch1_psy
+.source 2 s1 gint16
+.dest 4 d1 gint16
+.floatparam 4 lpan
+.floatparam 4 rpan
+.temp 8 t1
+.temp 4 left
+.temp 4 right
+
+convswl left s1
+convlf left left
+mulf right left rpan
+mulf left left lpan
+mergelq t1 left right
+x2 convfl t1 t1
+x2 convssslw d1 t1
+
+
+.function audiopanoramam_orc_process_f32_ch1_psy
+.source 4 s1 gfloat
+.dest 8 d1 gfloat
+.floatparam 4 lpan
+.floatparam 4 rpan
+.temp 4 left
+.temp 4 right
+
+mulf right s1 rpan
+mulf left s1 lpan
+mergelq d1 left right
+
+
+.function audiopanoramam_orc_process_s16_ch2_psy_right
+.source 4 s1 gint16
+.dest 4 d1 gint16
+.floatparam 4 llpan
+.floatparam 4 rlpan
+.temp 8 t1
+.temp 4 left
+.temp 4 right
+.temp 4 right1
+
+x2 convswl t1 s1
+x2 convlf t1 t1
+select0ql left t1
+select1ql right t1
+mulf right1 left rlpan
+mulf left left llpan
+addf right right1 right
+mergelq t1 left right
+x2 convfl t1 t1
+x2 convssslw d1 t1
+
+
+.function audiopanoramam_orc_process_s16_ch2_psy_left
+.source 4 s1 gint16
+.dest 4 d1 gint16
+.floatparam 4 lrpan
+.floatparam 4 rrpan
+.temp 8 t1
+.temp 4 left
+.temp 4 left1
+.temp 4 right
+
+x2 convswl t1 s1
+x2 convlf t1 t1
+select0ql left t1
+select1ql right t1
+mulf left1 right lrpan
+mulf right right rrpan
+addf left left1 left
+mergelq t1 left right
+x2 convfl t1 t1
+x2 convssslw d1 t1
+
+
+.function audiopanoramam_orc_process_f32_ch2_psy_right
+.source 8 s1 gfloat
+.dest 8 d1 gfloat
+.floatparam 4 llpan
+.floatparam 4 rlpan
+.temp 4 left
+.temp 4 right
+.temp 4 right1
+
+select0ql left s1
+select1ql right s1
+mulf right1 left rlpan
+mulf left left llpan
+addf right right1 right
+mergelq d1 left right
+
+
+.function audiopanoramam_orc_process_f32_ch2_psy_left
+.source 8 s1 gfloat
+.dest 8 d1 gfloat
+.floatparam 4 lrpan
+.floatparam 4 rrpan
+.temp 4 left
+.temp 4 left1
+.temp 4 right
+
+select0ql left s1
+select1ql right s1
+mulf left1 right lrpan
+mulf right right rrpan
+addf left left1 left
+mergelq d1 left right
+
+# simple processing functions
+
+.function audiopanoramam_orc_process_s16_ch1_sim_right
+.source 2 s1 gint16
+.dest 4 d1 gint16
+.floatparam 4 rpan
+.temp 8 t1
+.temp 4 left
+.temp 4 right
+
+convswl left s1
+convlf left left
+mulf right left rpan
+mergelq t1 left right
+x2 convfl t1 t1
+x2 convssslw d1 t1
+
+
+.function audiopanoramam_orc_process_s16_ch1_sim_left
+.source 2 s1 gint16
+.dest 4 d1 gint16
+.floatparam 4 lpan
+.temp 8 t1
+.temp 4 left
+.temp 4 right
+
+convswl right s1
+convlf right right
+mulf left right lpan
+mergelq t1 left right
+x2 convfl t1 t1
+x2 convssslw d1 t1
+
+
+.function audiopanoramam_orc_process_s16_ch2_sim_right
+.source 4 s1 gint16
+.dest 4 d1 gint16
+.floatparam 4 rpan
+.temp 8 t1
+.temp 4 left
+.temp 4 right
+
+x2 convswl t1 s1
+x2 convlf t1 t1
+select0ql left t1
+select1ql right t1
+mulf right right rpan
+mergelq t1 left right
+x2 convfl t1 t1
+x2 convssslw d1 t1
+
+
+.function audiopanoramam_orc_process_s16_ch2_sim_left
+.source 4 s1 gint16
+.dest 4 d1 gint16
+.floatparam 4 lpan
+.temp 8 t1
+.temp 4 left
+.temp 4 right
+
+x2 convswl t1 s1
+x2 convlf t1 t1
+select0ql left t1
+select1ql right t1
+mulf left left lpan
+mergelq t1 left right
+x2 convfl t1 t1
+x2 convssslw d1 t1
+
+
+.function audiopanoramam_orc_process_f32_ch1_sim_right
+.source 4 s1 gfloat
+.dest 8 d1 gfloat
+.floatparam 4 rpan
+.temp 4 left
+.temp 4 right
+
+copyl left s1
+mulf right s1 rpan
+mergelq d1 left right
+
+
+.function audiopanoramam_orc_process_f32_ch1_sim_left
+.source 4 s1 gfloat
+.dest 8 d1 gfloat
+.floatparam 4 lpan
+.temp 4 left
+.temp 4 right
+
+mulf left s1 lpan
+copyl right s1
+mergelq d1 left right
+
+
+.function audiopanoramam_orc_process_f32_ch2_sim_right
+.source 8 s1 gfloat
+.dest 8 d1 gfloat
+.floatparam 4 rpan
+.temp 4 left
+.temp 4 right
+
+select0ql left s1
+select1ql right s1
+mulf right right rpan
+mergelq d1 left right
+
+.function audiopanoramam_orc_process_f32_ch2_sim_left
+.source 8 s1 gfloat
+.dest 8 d1 gfloat
+.floatparam 4 lpan
+.temp 4 left
+.temp 4 right
+
+select0ql left s1
+select1ql right s1
+mulf left left lpan
+mergelq d1 left right
+
diff --git a/gst/audiofx/audiowsincband.c b/gst/audiofx/audiowsincband.c
new file mode 100644
index 0000000000..8a512f7003
--- /dev/null
+++ b/gst/audiofx/audiowsincband.c
@@ -0,0 +1,487 @@
+/* -*- c-basic-offset: 2 -*-
+ *
+ * GStreamer
+ * Copyright (C) 1999-2001 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2006 Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>
+ * 2007-2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ *
+ * this windowed sinc filter is taken from the freely downloadable DSP book,
+ * "The Scientist and Engineer's Guide to Digital Signal Processing",
+ * chapter 16
+ * available at http://www.dspguide.com/
+ *
+ * For the window functions see
+ * http://en.wikipedia.org/wiki/Window_function
+ */
+
+/**
+ * SECTION:element-audiowsincband
+ * @title: audiowsincband
+ *
+ * Attenuates all frequencies outside (bandpass) or inside (bandreject) of a frequency
+ * band. The length parameter controls the rolloff, the window parameter
+ * controls rolloff and stopband attenuation. The Hamming window provides a faster rolloff but a bit
+ * worse stopband attenuation, the other way around for the Blackman window.
+ *
+ * This element has the advantage over the Chebyshev bandpass and bandreject filter that it has
+ * a much better rolloff when using a larger kernel size and almost linear phase. The only
+ * disadvantage is the much slower execution time with larger kernels.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 audiotestsrc freq=1500 ! audioconvert ! audiowsincband mode=band-pass lower-frequency=3000 upper-frequency=10000 length=501 window=blackman ! audioconvert ! alsasink
+ * gst-launch-1.0 filesrc location="melo1.ogg" ! oggdemux ! vorbisdec ! audioconvert ! audiowsincband mode=band-reject lower-frequency=59 upper-frequency=61 length=10001 window=hamming ! audioconvert ! alsasink
+ * gst-launch-1.0 audiotestsrc wave=white-noise ! audioconvert ! audiowsincband mode=band-pass lower-frequency=1000 upper-frequency=2000 length=31 ! audioconvert ! alsasink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <math.h>
+#include <gst/gst.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#include "audiowsincband.h"
+
+#include "gst/glib-compat-private.h"
+
+#define GST_CAT_DEFAULT gst_gst_audio_wsincband_debug
+GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+enum
+{
+ PROP_0,
+ PROP_LENGTH,
+ PROP_LOWER_FREQUENCY,
+ PROP_UPPER_FREQUENCY,
+ PROP_MODE,
+ PROP_WINDOW
+};
+
+enum
+{
+ MODE_BAND_PASS = 0,
+ MODE_BAND_REJECT
+};
+
+#define GST_TYPE_AUDIO_WSINC_BAND_MODE (gst_gst_audio_wsincband_mode_get_type ())
+static GType
+gst_gst_audio_wsincband_mode_get_type (void)
+{
+ static GType gtype = 0;
+
+ if (gtype == 0) {
+ static const GEnumValue values[] = {
+ {MODE_BAND_PASS, "Band pass (default)",
+ "band-pass"},
+ {MODE_BAND_REJECT, "Band reject",
+ "band-reject"},
+ {0, NULL, NULL}
+ };
+
+ gtype = g_enum_register_static ("GstAudioWSincBandMode", values);
+ }
+ return gtype;
+}
+
+enum
+{
+ WINDOW_HAMMING = 0,
+ WINDOW_BLACKMAN,
+ WINDOW_GAUSSIAN,
+ WINDOW_COSINE,
+ WINDOW_HANN
+};
+
+#define GST_TYPE_AUDIO_WSINC_BAND_WINDOW (gst_gst_audio_wsincband_window_get_type ())
+static GType
+gst_gst_audio_wsincband_window_get_type (void)
+{
+ static GType gtype = 0;
+
+ if (gtype == 0) {
+ static const GEnumValue values[] = {
+ {WINDOW_HAMMING, "Hamming window (default)",
+ "hamming"},
+ {WINDOW_BLACKMAN, "Blackman window",
+ "blackman"},
+ {WINDOW_GAUSSIAN, "Gaussian window",
+ "gaussian"},
+ {WINDOW_COSINE, "Cosine window",
+ "cosine"},
+ {WINDOW_HANN, "Hann window",
+ "hann"},
+ {0, NULL, NULL}
+ };
+
+ gtype = g_enum_register_static ("GstAudioWSincBandWindow", values);
+ }
+ return gtype;
+}
+
+#define gst_audio_wsincband_parent_class parent_class
+G_DEFINE_TYPE (GstAudioWSincBand, gst_audio_wsincband,
+ GST_TYPE_AUDIO_FX_BASE_FIR_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (audiowsincband, "audiowsincband",
+ GST_RANK_NONE, GST_TYPE_AUDIO_WSINC_BAND);
+
+static void gst_audio_wsincband_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_audio_wsincband_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_audio_wsincband_finalize (GObject * object);
+
+static gboolean gst_audio_wsincband_setup (GstAudioFilter * base,
+ const GstAudioInfo * info);
+
+#define POW2(x) (x)*(x)
+
+static void
+gst_audio_wsincband_class_init (GstAudioWSincBandClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (gst_gst_audio_wsincband_debug, "audiowsincband", 0,
+ "Band-pass and Band-reject Windowed sinc filter plugin");
+
+ gobject_class->set_property = gst_audio_wsincband_set_property;
+ gobject_class->get_property = gst_audio_wsincband_get_property;
+ gobject_class->finalize = gst_audio_wsincband_finalize;
+
+ /* FIXME: Don't use the complete possible range but restrict the upper boundary
+ * so automatically generated UIs can use a slider */
+ g_object_class_install_property (gobject_class, PROP_LOWER_FREQUENCY,
+ g_param_spec_float ("lower-frequency", "Lower Frequency",
+ "Cut-off lower frequency (Hz)", 0.0, 100000.0, 0,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_UPPER_FREQUENCY,
+ g_param_spec_float ("upper-frequency", "Upper Frequency",
+ "Cut-off upper frequency (Hz)", 0.0, 100000.0, 0,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_LENGTH,
+ g_param_spec_int ("length", "Length",
+ "Filter kernel length, will be rounded to the next odd number", 3,
+ 256000, 101,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MODE,
+ g_param_spec_enum ("mode", "Mode",
+ "Band pass or band reject mode", GST_TYPE_AUDIO_WSINC_BAND_MODE,
+ MODE_BAND_PASS,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_WINDOW,
+ g_param_spec_enum ("window", "Window",
+ "Window function to use", GST_TYPE_AUDIO_WSINC_BAND_WINDOW,
+ WINDOW_HAMMING,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Band pass & band reject filter", "Filter/Effect/Audio",
+ "Band pass and band reject windowed sinc filter",
+ "Thomas Vander Stichele <thomas at apestaart dot org>, "
+ "Steven W. Smith, "
+ "Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_wsincband_setup);
+
+ gst_type_mark_as_plugin_api (GST_TYPE_AUDIO_WSINC_BAND_MODE, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_AUDIO_WSINC_BAND_WINDOW, 0);
+}
+
+static void
+gst_audio_wsincband_init (GstAudioWSincBand * self)
+{
+ self->kernel_length = 101;
+ self->lower_frequency = 0.0;
+ self->upper_frequency = 0.0;
+ self->mode = MODE_BAND_PASS;
+ self->window = WINDOW_HAMMING;
+
+ g_mutex_init (&self->lock);
+}
+
+static void
+gst_audio_wsincband_build_kernel (GstAudioWSincBand * self,
+ const GstAudioInfo * info)
+{
+ gint i = 0;
+ gdouble sum = 0.0;
+ gint len = 0;
+ gdouble *kernel_lp, *kernel_hp;
+ gdouble w;
+ gdouble *kernel;
+ gint rate, channels;
+
+ len = self->kernel_length;
+
+ if (info) {
+ rate = GST_AUDIO_INFO_RATE (info);
+ channels = GST_AUDIO_INFO_CHANNELS (info);
+ } else {
+ rate = GST_AUDIO_FILTER_RATE (self);
+ channels = GST_AUDIO_FILTER_CHANNELS (self);
+ }
+
+ if (rate == 0) {
+ GST_DEBUG ("rate not set yet");
+ return;
+ }
+
+ if (channels == 0) {
+ GST_DEBUG ("channels not set yet");
+ return;
+ }
+
+ /* Clamp frequencies */
+ self->lower_frequency = CLAMP (self->lower_frequency, 0.0, rate / 2);
+ self->upper_frequency = CLAMP (self->upper_frequency, 0.0, rate / 2);
+
+ if (self->lower_frequency > self->upper_frequency) {
+ gint tmp = self->lower_frequency;
+
+ self->lower_frequency = self->upper_frequency;
+ self->upper_frequency = tmp;
+ }
+
+ GST_DEBUG ("gst_audio_wsincband: initializing filter kernel of length %d "
+ "with lower frequency %.2lf Hz "
+ ", upper frequency %.2lf Hz for mode %s",
+ len, self->lower_frequency, self->upper_frequency,
+ (self->mode == MODE_BAND_PASS) ? "band-pass" : "band-reject");
+
+ /* fill the lp kernel */
+ w = 2 * G_PI * (self->lower_frequency / rate);
+ kernel_lp = g_new (gdouble, len);
+ for (i = 0; i < len; ++i) {
+ if (i == (len - 1) / 2.0)
+ kernel_lp[i] = w;
+ else
+ kernel_lp[i] = sin (w * (i - (len - 1) / 2.0)) / (i - (len - 1) / 2.0);
+
+ /* windowing */
+ switch (self->window) {
+ case WINDOW_HAMMING:
+ kernel_lp[i] *= (0.54 - 0.46 * cos (2 * G_PI * i / (len - 1)));
+ break;
+ case WINDOW_BLACKMAN:
+ kernel_lp[i] *= (0.42 - 0.5 * cos (2 * G_PI * i / (len - 1)) +
+ 0.08 * cos (4 * G_PI * i / (len - 1)));
+ break;
+ case WINDOW_GAUSSIAN:
+ kernel_lp[i] *= exp (-0.5 * POW2 (3.0 / len * (2 * i - (len - 1))));
+ break;
+ case WINDOW_COSINE:
+ kernel_lp[i] *= cos (G_PI * i / (len - 1) - G_PI / 2);
+ break;
+ case WINDOW_HANN:
+ kernel_lp[i] *= 0.5 * (1 - cos (2 * G_PI * i / (len - 1)));
+ break;
+ }
+ }
+
+ /* normalize for unity gain at DC */
+ sum = 0.0;
+ for (i = 0; i < len; ++i)
+ sum += kernel_lp[i];
+ for (i = 0; i < len; ++i)
+ kernel_lp[i] /= sum;
+
+ /* fill the hp kernel */
+ w = 2 * G_PI * (self->upper_frequency / rate);
+ kernel_hp = g_new (gdouble, len);
+ for (i = 0; i < len; ++i) {
+ if (i == (len - 1) / 2.0)
+ kernel_hp[i] = w;
+ else
+ kernel_hp[i] = sin (w * (i - (len - 1) / 2.0)) / (i - (len - 1) / 2.0);
+
+ /* Windowing */
+ switch (self->window) {
+ case WINDOW_HAMMING:
+ kernel_hp[i] *= (0.54 - 0.46 * cos (2 * G_PI * i / (len - 1)));
+ break;
+ case WINDOW_BLACKMAN:
+ kernel_hp[i] *= (0.42 - 0.5 * cos (2 * G_PI * i / (len - 1)) +
+ 0.08 * cos (4 * G_PI * i / (len - 1)));
+ break;
+ case WINDOW_GAUSSIAN:
+ kernel_hp[i] *= exp (-0.5 * POW2 (3.0 / len * (2 * i - (len - 1))));
+ break;
+ case WINDOW_COSINE:
+ kernel_hp[i] *= cos (G_PI * i / (len - 1) - G_PI / 2);
+ break;
+ case WINDOW_HANN:
+ kernel_hp[i] *= 0.5 * (1 - cos (2 * G_PI * i / (len - 1)));
+ break;
+ }
+ }
+
+ /* normalize for unity gain at DC */
+ sum = 0.0;
+ for (i = 0; i < len; ++i)
+ sum += kernel_hp[i];
+ for (i = 0; i < len; ++i)
+ kernel_hp[i] /= sum;
+
+ /* do spectral inversion to go from lowpass to highpass */
+ for (i = 0; i < len; ++i)
+ kernel_hp[i] = -kernel_hp[i];
+ if (len % 2 == 1) {
+ kernel_hp[(len - 1) / 2] += 1.0;
+ } else {
+ kernel_hp[len / 2 - 1] += 0.5;
+ kernel_hp[len / 2] += 0.5;
+ }
+
+ /* combine the two kernels */
+ kernel = g_new (gdouble, len);
+
+ for (i = 0; i < len; ++i)
+ kernel[i] = kernel_lp[i] + kernel_hp[i];
+
+ /* free the helper kernels */
+ g_free (kernel_lp);
+ g_free (kernel_hp);
+
+ /* do spectral inversion to go from bandreject to bandpass
+ * if specified */
+ if (self->mode == MODE_BAND_PASS) {
+ for (i = 0; i < len; ++i)
+ kernel[i] = -kernel[i];
+ kernel[len / 2] += 1;
+ }
+
+ gst_audio_fx_base_fir_filter_set_kernel (GST_AUDIO_FX_BASE_FIR_FILTER (self),
+ kernel, self->kernel_length, (len - 1) / 2, info);
+}
+
+/* GstAudioFilter vmethod implementations */
+
+/* get notified of caps and plug in the correct process function */
+static gboolean
+gst_audio_wsincband_setup (GstAudioFilter * base, const GstAudioInfo * info)
+{
+ GstAudioWSincBand *self = GST_AUDIO_WSINC_BAND (base);
+
+ gst_audio_wsincband_build_kernel (self, info);
+
+ return GST_AUDIO_FILTER_CLASS (parent_class)->setup (base, info);
+}
+
+static void
+gst_audio_wsincband_finalize (GObject * object)
+{
+ GstAudioWSincBand *self = GST_AUDIO_WSINC_BAND (object);
+
+ g_mutex_clear (&self->lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_audio_wsincband_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAudioWSincBand *self = GST_AUDIO_WSINC_BAND (object);
+
+ g_return_if_fail (GST_IS_AUDIO_WSINC_BAND (self));
+
+ switch (prop_id) {
+ case PROP_LENGTH:{
+ gint val;
+
+ g_mutex_lock (&self->lock);
+ val = g_value_get_int (value);
+ if (val % 2 == 0)
+ val++;
+
+ if (val != self->kernel_length) {
+ gst_audio_fx_base_fir_filter_push_residue (GST_AUDIO_FX_BASE_FIR_FILTER
+ (self));
+ self->kernel_length = val;
+ gst_audio_wsincband_build_kernel (self, NULL);
+ }
+ g_mutex_unlock (&self->lock);
+ break;
+ }
+ case PROP_LOWER_FREQUENCY:
+ g_mutex_lock (&self->lock);
+ self->lower_frequency = g_value_get_float (value);
+ gst_audio_wsincband_build_kernel (self, NULL);
+ g_mutex_unlock (&self->lock);
+ break;
+ case PROP_UPPER_FREQUENCY:
+ g_mutex_lock (&self->lock);
+ self->upper_frequency = g_value_get_float (value);
+ gst_audio_wsincband_build_kernel (self, NULL);
+ g_mutex_unlock (&self->lock);
+ break;
+ case PROP_MODE:
+ g_mutex_lock (&self->lock);
+ self->mode = g_value_get_enum (value);
+ gst_audio_wsincband_build_kernel (self, NULL);
+ g_mutex_unlock (&self->lock);
+ break;
+ case PROP_WINDOW:
+ g_mutex_lock (&self->lock);
+ self->window = g_value_get_enum (value);
+ gst_audio_wsincband_build_kernel (self, NULL);
+ g_mutex_unlock (&self->lock);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_audio_wsincband_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAudioWSincBand *self = GST_AUDIO_WSINC_BAND (object);
+
+ switch (prop_id) {
+ case PROP_LENGTH:
+ g_value_set_int (value, self->kernel_length);
+ break;
+ case PROP_LOWER_FREQUENCY:
+ g_value_set_float (value, self->lower_frequency);
+ break;
+ case PROP_UPPER_FREQUENCY:
+ g_value_set_float (value, self->upper_frequency);
+ break;
+ case PROP_MODE:
+ g_value_set_enum (value, self->mode);
+ break;
+ case PROP_WINDOW:
+ g_value_set_enum (value, self->window);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/audiofx/audiowsincband.h b/gst/audiofx/audiowsincband.h
new file mode 100644
index 0000000000..d7795f2f8c
--- /dev/null
+++ b/gst/audiofx/audiowsincband.h
@@ -0,0 +1,82 @@
+/* -*- c-basic-offset: 2 -*-
+ *
+ * GStreamer
+ * Copyright (C) 1999-2001 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2006 Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>
+ * 2007-2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ *
+ * this windowed sinc filter is taken from the freely downloadable DSP book,
+ * "The Scientist and Engineer's Guide to Digital Signal Processing",
+ * chapter 16
+ * available at http://www.dspguide.com/
+ *
+ */
+
+#ifndef __GST_AUDIO_WSINC_BAND_H__
+#define __GST_AUDIO_WSINC_BAND_H__
+
+#include <gst/gst.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#include "audiofxbasefirfilter.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AUDIO_WSINC_BAND \
+ (gst_audio_wsincband_get_type())
+#define GST_AUDIO_WSINC_BAND(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AUDIO_WSINC_BAND,GstAudioWSincBand))
+#define GST_AUDIO_WSINC_BAND_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_AUDIO_WSINC_BAND,GstAudioWSincBandClass))
+#define GST_IS_AUDIO_WSINC_BAND(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AUDIO_WSINC_BAND))
+#define GST_IS_AUDIO_WSINC_BAND_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_AUDIO_WSINC_BAND))
+
+typedef struct _GstAudioWSincBand GstAudioWSincBand;
+typedef struct _GstAudioWSincBandClass GstAudioWSincBandClass;
+
+/**
+ * GstAudioWSincBand:
+ *
+ * Opaque data structure.
+ */
+struct _GstAudioWSincBand {
+ GstAudioFXBaseFIRFilter parent;
+
+ gint mode;
+ gint window;
+ gfloat lower_frequency, upper_frequency;
+ gint kernel_length; /* length of the filter kernel */
+
+ /* < private > */
+ GMutex lock;
+};
+
+struct _GstAudioWSincBandClass {
+ GstAudioFilterClass parent;
+};
+
+GType gst_audio_wsincband_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (audiowsincband);
+
+G_END_DECLS
+
+#endif /* __GST_AUDIO_WSINC_BAND_H__ */
diff --git a/gst/audiofx/audiowsinclimit.c b/gst/audiofx/audiowsinclimit.c
new file mode 100644
index 0000000000..dd5a1dfbad
--- /dev/null
+++ b/gst/audiofx/audiowsinclimit.c
@@ -0,0 +1,413 @@
+/* -*- c-basic-offset: 2 -*-
+ *
+ * GStreamer
+ * Copyright (C) 1999-2001 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2006 Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>
+ * 2007-2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ *
+ * this windowed sinc filter is taken from the freely downloadable DSP book,
+ * "The Scientist and Engineer's Guide to Digital Signal Processing",
+ * chapter 16
+ * available at http://www.dspguide.com/
+ *
+ * For the window functions see
+ * http://en.wikipedia.org/wiki/Window_function
+ */
+
+/**
+ * SECTION:element-audiowsinclimit
+ * @title: audiowsinclimit
+ *
+ * Attenuates all frequencies above the cutoff frequency (low-pass) or all frequencies below the
+ * cutoff frequency (high-pass). The length parameter controls the rolloff, the window parameter
+ * controls rolloff and stopband attenuation. The Hamming window provides a faster rolloff but a bit
+ * worse stopband attenuation, the other way around for the Blackman window.
+ *
+ * This element has the advantage over the Chebyshev lowpass and highpass filter that it has
+ * a much better rolloff when using a larger kernel size and almost linear phase. The only
+ * disadvantage is the much slower execution time with larger kernels.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 audiotestsrc freq=1500 ! audioconvert ! audiowsinclimit mode=low-pass cutoff=1000 length=501 ! audioconvert ! alsasink
+ * gst-launch-1.0 filesrc location="melo1.ogg" ! oggdemux ! vorbisdec ! audioconvert ! audiowsinclimit mode=high-pass cutoff=15000 length=501 ! audioconvert ! alsasink
+ * gst-launch-1.0 audiotestsrc wave=white-noise ! audioconvert ! audiowsinclimit mode=low-pass cutoff=1000 length=10001 window=blackman ! audioconvert ! alsasink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <math.h>
+#include <gst/gst.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#include "audiowsinclimit.h"
+
+#include "gst/glib-compat-private.h"
+
+#define GST_CAT_DEFAULT gst_audio_wsinclimit_debug
+GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+enum
+{
+ PROP_0,
+ PROP_LENGTH,
+ PROP_FREQUENCY,
+ PROP_MODE,
+ PROP_WINDOW
+};
+
+enum
+{
+ MODE_LOW_PASS = 0,
+ MODE_HIGH_PASS
+};
+
+#define GST_TYPE_AUDIO_WSINC_LIMIT_MODE (gst_audio_wsinclimit_mode_get_type ())
+static GType
+gst_audio_wsinclimit_mode_get_type (void)
+{
+ static GType gtype = 0;
+
+ if (gtype == 0) {
+ static const GEnumValue values[] = {
+ {MODE_LOW_PASS, "Low pass (default)",
+ "low-pass"},
+ {MODE_HIGH_PASS, "High pass",
+ "high-pass"},
+ {0, NULL, NULL}
+ };
+
+ gtype = g_enum_register_static ("GstAudioWSincLimitMode", values);
+ }
+ return gtype;
+}
+
+enum
+{
+ WINDOW_HAMMING = 0,
+ WINDOW_BLACKMAN,
+ WINDOW_GAUSSIAN,
+ WINDOW_COSINE,
+ WINDOW_HANN
+};
+
+#define GST_TYPE_AUDIO_WSINC_LIMIT_WINDOW (gst_audio_wsinclimit_window_get_type ())
+static GType
+gst_audio_wsinclimit_window_get_type (void)
+{
+ static GType gtype = 0;
+
+ if (gtype == 0) {
+ static const GEnumValue values[] = {
+ {WINDOW_HAMMING, "Hamming window (default)",
+ "hamming"},
+ {WINDOW_BLACKMAN, "Blackman window",
+ "blackman"},
+ {WINDOW_GAUSSIAN, "Gaussian window",
+ "gaussian"},
+ {WINDOW_COSINE, "Cosine window",
+ "cosine"},
+ {WINDOW_HANN, "Hann window",
+ "hann"},
+ {0, NULL, NULL}
+ };
+
+ gtype = g_enum_register_static ("GstAudioWSincLimitWindow", values);
+ }
+ return gtype;
+}
+
+#define gst_audio_wsinclimit_parent_class parent_class
+G_DEFINE_TYPE (GstAudioWSincLimit, gst_audio_wsinclimit,
+ GST_TYPE_AUDIO_FX_BASE_FIR_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (audiowsinclimit, "audiowsinclimit",
+ GST_RANK_NONE, GST_TYPE_AUDIO_WSINC_LIMIT);
+
+static void gst_audio_wsinclimit_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_audio_wsinclimit_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_audio_wsinclimit_finalize (GObject * object);
+
+static gboolean gst_audio_wsinclimit_setup (GstAudioFilter * base,
+ const GstAudioInfo * info);
+
+
+#define POW2(x) (x)*(x)
+
+static void
+gst_audio_wsinclimit_class_init (GstAudioWSincLimitClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstAudioFilterClass *filter_class = (GstAudioFilterClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (gst_audio_wsinclimit_debug, "audiowsinclimit", 0,
+ "Low-pass and High-pass Windowed sinc filter plugin");
+
+ gobject_class->set_property = gst_audio_wsinclimit_set_property;
+ gobject_class->get_property = gst_audio_wsinclimit_get_property;
+ gobject_class->finalize = gst_audio_wsinclimit_finalize;
+
+ /* FIXME: Don't use the complete possible range but restrict the upper boundary
+ * so automatically generated UIs can use a slider */
+ g_object_class_install_property (gobject_class, PROP_FREQUENCY,
+ g_param_spec_float ("cutoff", "Cutoff",
+ "Cut-off Frequency (Hz)", 0.0, 100000.0, 0.0,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_LENGTH,
+ g_param_spec_int ("length", "Length",
+ "Filter kernel length, will be rounded to the next odd number",
+ 3, 256000, 101,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MODE,
+ g_param_spec_enum ("mode", "Mode",
+ "Low pass or high pass mode", GST_TYPE_AUDIO_WSINC_LIMIT_MODE,
+ MODE_LOW_PASS,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_WINDOW,
+ g_param_spec_enum ("window", "Window",
+ "Window function to use", GST_TYPE_AUDIO_WSINC_LIMIT_WINDOW,
+ WINDOW_HAMMING,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Low pass & high pass filter", "Filter/Effect/Audio",
+ "Low pass and high pass windowed sinc filter",
+ "Thomas Vander Stichele <thomas at apestaart dot org>, "
+ "Steven W. Smith, "
+ "Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ filter_class->setup = GST_DEBUG_FUNCPTR (gst_audio_wsinclimit_setup);
+
+ gst_type_mark_as_plugin_api (GST_TYPE_AUDIO_WSINC_LIMIT_MODE, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_AUDIO_WSINC_LIMIT_WINDOW, 0);
+}
+
+static void
+gst_audio_wsinclimit_init (GstAudioWSincLimit * self)
+{
+ self->mode = MODE_LOW_PASS;
+ self->window = WINDOW_HAMMING;
+ self->kernel_length = 101;
+ self->cutoff = 0.0;
+
+ g_mutex_init (&self->lock);
+}
+
+static void
+gst_audio_wsinclimit_build_kernel (GstAudioWSincLimit * self,
+ const GstAudioInfo * info)
+{
+ gint i = 0;
+ gdouble sum = 0.0;
+ gint len = 0;
+ gdouble w;
+ gdouble *kernel = NULL;
+ gint rate, channels;
+
+ len = self->kernel_length;
+
+ if (info) {
+ rate = GST_AUDIO_INFO_RATE (info);
+ channels = GST_AUDIO_INFO_CHANNELS (info);
+ } else {
+ rate = GST_AUDIO_FILTER_RATE (self);
+ channels = GST_AUDIO_FILTER_CHANNELS (self);
+ }
+
+ if (rate == 0) {
+ GST_DEBUG ("rate not set yet");
+ return;
+ }
+
+ if (channels == 0) {
+ GST_DEBUG ("channels not set yet");
+ return;
+ }
+
+ /* Clamp cutoff frequency between 0 and the nyquist frequency */
+ self->cutoff = CLAMP (self->cutoff, 0.0, rate / 2);
+
+ GST_DEBUG ("gst_audio_wsinclimit_: initializing filter kernel of length %d "
+ "with cutoff %.2lf Hz "
+ "for mode %s",
+ len, self->cutoff,
+ (self->mode == MODE_LOW_PASS) ? "low-pass" : "high-pass");
+
+ /* fill the kernel */
+ w = 2 * G_PI * (self->cutoff / rate);
+
+ kernel = g_new (gdouble, len);
+
+ for (i = 0; i < len; ++i) {
+ if (i == (len - 1) / 2.0)
+ kernel[i] = w;
+ else
+ kernel[i] = sin (w * (i - (len - 1) / 2)) / (i - (len - 1) / 2.0);
+
+ /* windowing */
+ switch (self->window) {
+ case WINDOW_HAMMING:
+ kernel[i] *= (0.54 - 0.46 * cos (2 * G_PI * i / (len - 1)));
+ break;
+ case WINDOW_BLACKMAN:
+ kernel[i] *= (0.42 - 0.5 * cos (2 * G_PI * i / (len - 1)) +
+ 0.08 * cos (4 * G_PI * i / (len - 1)));
+ break;
+ case WINDOW_GAUSSIAN:
+ kernel[i] *= exp (-0.5 * POW2 (3.0 / len * (2 * i - (len - 1))));
+ break;
+ case WINDOW_COSINE:
+ kernel[i] *= cos (G_PI * i / (len - 1) - G_PI / 2);
+ break;
+ case WINDOW_HANN:
+ kernel[i] *= 0.5 * (1 - cos (2 * G_PI * i / (len - 1)));
+ break;
+ }
+ }
+
+ /* normalize for unity gain at DC */
+ for (i = 0; i < len; ++i)
+ sum += kernel[i];
+ for (i = 0; i < len; ++i)
+ kernel[i] /= sum;
+
+ /* convert to highpass if specified */
+ if (self->mode == MODE_HIGH_PASS) {
+ for (i = 0; i < len; ++i)
+ kernel[i] = -kernel[i];
+
+ if (len % 2 == 1) {
+ kernel[(len - 1) / 2] += 1.0;
+ } else {
+ kernel[len / 2 - 1] += 0.5;
+ kernel[len / 2] += 0.5;
+ }
+ }
+
+ gst_audio_fx_base_fir_filter_set_kernel (GST_AUDIO_FX_BASE_FIR_FILTER (self),
+ kernel, self->kernel_length, (len - 1) / 2, info);
+}
+
+/* GstAudioFilter vmethod implementations */
+
+/* get notified of caps and plug in the correct process function */
+static gboolean
+gst_audio_wsinclimit_setup (GstAudioFilter * base, const GstAudioInfo * info)
+{
+ GstAudioWSincLimit *self = GST_AUDIO_WSINC_LIMIT (base);
+
+ gst_audio_wsinclimit_build_kernel (self, info);
+
+ return GST_AUDIO_FILTER_CLASS (parent_class)->setup (base, info);
+}
+
+static void
+gst_audio_wsinclimit_finalize (GObject * object)
+{
+ GstAudioWSincLimit *self = GST_AUDIO_WSINC_LIMIT (object);
+
+ g_mutex_clear (&self->lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_audio_wsinclimit_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAudioWSincLimit *self = GST_AUDIO_WSINC_LIMIT (object);
+
+ g_return_if_fail (GST_IS_AUDIO_WSINC_LIMIT (self));
+
+ switch (prop_id) {
+ case PROP_LENGTH:{
+ gint val;
+
+ g_mutex_lock (&self->lock);
+ val = g_value_get_int (value);
+ if (val % 2 == 0)
+ val++;
+
+ if (val != self->kernel_length) {
+ gst_audio_fx_base_fir_filter_push_residue (GST_AUDIO_FX_BASE_FIR_FILTER
+ (self));
+ self->kernel_length = val;
+ gst_audio_wsinclimit_build_kernel (self, NULL);
+ }
+ g_mutex_unlock (&self->lock);
+ break;
+ }
+ case PROP_FREQUENCY:
+ g_mutex_lock (&self->lock);
+ self->cutoff = g_value_get_float (value);
+ gst_audio_wsinclimit_build_kernel (self, NULL);
+ g_mutex_unlock (&self->lock);
+ break;
+ case PROP_MODE:
+ g_mutex_lock (&self->lock);
+ self->mode = g_value_get_enum (value);
+ gst_audio_wsinclimit_build_kernel (self, NULL);
+ g_mutex_unlock (&self->lock);
+ break;
+ case PROP_WINDOW:
+ g_mutex_lock (&self->lock);
+ self->window = g_value_get_enum (value);
+ gst_audio_wsinclimit_build_kernel (self, NULL);
+ g_mutex_unlock (&self->lock);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_audio_wsinclimit_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAudioWSincLimit *self = GST_AUDIO_WSINC_LIMIT (object);
+
+ switch (prop_id) {
+ case PROP_LENGTH:
+ g_value_set_int (value, self->kernel_length);
+ break;
+ case PROP_FREQUENCY:
+ g_value_set_float (value, self->cutoff);
+ break;
+ case PROP_MODE:
+ g_value_set_enum (value, self->mode);
+ break;
+ case PROP_WINDOW:
+ g_value_set_enum (value, self->window);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/audiofx/audiowsinclimit.h b/gst/audiofx/audiowsinclimit.h
new file mode 100644
index 0000000000..ffd24b57a9
--- /dev/null
+++ b/gst/audiofx/audiowsinclimit.h
@@ -0,0 +1,82 @@
+/* -*- c-basic-offset: 2 -*-
+ *
+ * GStreamer
+ * Copyright (C) 1999-2001 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2006 Dreamlab Technologies Ltd. <mathis.hofer@dreamlab.net>
+ * 2007-2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ *
+ * this windowed sinc filter is taken from the freely downloadable DSP book,
+ * "The Scientist and Engineer's Guide to Digital Signal Processing",
+ * chapter 16
+ * available at http://www.dspguide.com/
+ *
+ */
+
+#ifndef __GST_AUDIO_WSINC_LIMIT_H__
+#define __GST_AUDIO_WSINC_LIMIT_H__
+
+#include <gst/gst.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#include "audiofxbasefirfilter.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AUDIO_WSINC_LIMIT \
+ (gst_audio_wsinclimit_get_type())
+#define GST_AUDIO_WSINC_LIMIT(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AUDIO_WSINC_LIMIT,GstAudioWSincLimit))
+#define GST_AUDIO_WSINC_LIMIT_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_AUDIO_WSINC_LIMIT,GstAudioWSincLimitClass))
+#define GST_IS_AUDIO_WSINC_LIMIT(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AUDIO_WSINC_LIMIT))
+#define GST_IS_AUDIO_WSINC_LIMIT_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_AUDIO_WSINC_LIMIT))
+
+typedef struct _GstAudioWSincLimit GstAudioWSincLimit;
+typedef struct _GstAudioWSincLimitClass GstAudioWSincLimitClass;
+
+/**
+ * GstAudioWSincLimit:
+ *
+ * Opaque data structure.
+ */
+struct _GstAudioWSincLimit {
+ GstAudioFXBaseFIRFilter parent;
+
+ gint mode;
+ gint window;
+ gfloat cutoff;
+ gint kernel_length;
+
+ /* < private > */
+ GMutex lock;
+};
+
+struct _GstAudioWSincLimitClass {
+ GstAudioFXBaseFIRFilterClass parent;
+};
+
+GType gst_audio_wsinclimit_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (audiowsinclimit);
+
+G_END_DECLS
+
+#endif /* __GST_AUDIO_WSINC_LIMIT_H__ */
diff --git a/gst/audiofx/gstscaletempo.c b/gst/audiofx/gstscaletempo.c
new file mode 100644
index 0000000000..646181fca8
--- /dev/null
+++ b/gst/audiofx/gstscaletempo.c
@@ -0,0 +1,944 @@
+/*
+ * GStreamer
+ * Copyright (C) 2008 Rov Juvano <rovjuvano@users.sourceforge.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/**
+ * SECTION:element-scaletempo
+ * @title: scaletempo
+ *
+ * Scale tempo while maintaining pitch
+ * (WSOLA-like technique with cross correlation)
+ * Inspired by SoundTouch library by Olli Parviainen
+ *
+ * Use Sceletempo to apply playback rates without the chipmunk effect.
+ *
+ * ## Example pipelines
+ *
+ * |[
+ * filesrc location=media.ext ! decodebin name=d \
+ * d. ! queue ! audioconvert ! audioresample ! scaletempo ! audioconvert ! audioresample ! autoaudiosink \
+ * d. ! queue ! videoconvert ! autovideosink
+ * ]|
+ * OR
+ * |[
+ * playbin uri=... audio_sink="scaletempo ! audioconvert ! audioresample ! autoaudiosink"
+ * ]|
+ * When an application sends a seek event with rate != 1.0, Scaletempo applies
+ * the rate change by scaling the tempo without scaling the pitch.
+ *
+ * Scaletempo works by producing audio in constant sized chunks
+ * (#GstScaletempo:stride) but consuming chunks proportional to the playback
+ * rate.
+ *
+ * Scaletempo then smooths the output by blending the end of one stride with
+ * the next (#GstScaletempo:overlap).
+ *
+ * Scaletempo smooths the overlap further by searching within the input buffer
+ * for the best overlap position. Scaletempo uses a statistical cross
+ * correlation (roughly a dot-product). Scaletempo consumes most of its CPU
+ * cycles here. One can use the #GstScaletempo:search propery to tune how far
+ * the algorithm looks.
+ *
+ */
+
+/*
+ * Note: frame = audio key unit (i.e. one sample for each channel)
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
+#include <string.h> /* for memset */
+
+#include "gstscaletempo.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_scaletempo_debug);
+#define GST_CAT_DEFAULT gst_scaletempo_debug
+
+/* Filter signals and args */
+enum
+{
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_RATE,
+ PROP_STRIDE,
+ PROP_OVERLAP,
+ PROP_SEARCH,
+};
+
+#define SUPPORTED_CAPS \
+GST_STATIC_CAPS ( \
+ GST_AUDIO_CAPS_MAKE (GST_AUDIO_NE (F32)) ", layout=(string)interleaved; " \
+ GST_AUDIO_CAPS_MAKE (GST_AUDIO_NE (F64)) ", layout=(string)interleaved; " \
+ GST_AUDIO_CAPS_MAKE (GST_AUDIO_NE (S16)) ", layout=(string)interleaved" \
+)
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ SUPPORTED_CAPS);
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ SUPPORTED_CAPS);
+
+#define DEBUG_INIT(bla) GST_DEBUG_CATEGORY_INIT (gst_scaletempo_debug, "scaletempo", 0, "scaletempo element");
+
+#define gst_scaletempo_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstScaletempo, gst_scaletempo,
+ GST_TYPE_BASE_TRANSFORM, DEBUG_INIT (0));
+GST_ELEMENT_REGISTER_DEFINE (scaletempo, "scaletempo",
+ GST_RANK_NONE, GST_TYPE_SCALETEMPO);
+
+#define CREATE_BEST_OVERLAP_OFFSET_FLOAT_FUNC(type) \
+static guint \
+best_overlap_offset_##type (GstScaletempo * st) \
+{ \
+ g##type *pw, *po, *ppc, *search_start; \
+ g##type best_corr = G_MININT; \
+ guint best_off = 0; \
+ gint i, off; \
+ \
+ pw = st->table_window; \
+ po = st->buf_overlap; \
+ po += st->samples_per_frame; \
+ ppc = st->buf_pre_corr; \
+ for (i = st->samples_per_frame; i < st->samples_overlap; i++) { \
+ *ppc++ = *pw++ * *po++; \
+ } \
+ \
+ search_start = (g##type *) st->buf_queue + st->samples_per_frame; \
+ for (off = 0; off < st->frames_search; off++) { \
+ g##type corr = 0; \
+ g##type *ps = search_start; \
+ ppc = st->buf_pre_corr; \
+ for (i = st->samples_per_frame; i < st->samples_overlap; i++) { \
+ corr += *ppc++ * *ps++; \
+ } \
+ if (corr > best_corr) { \
+ best_corr = corr; \
+ best_off = off; \
+ } \
+ search_start += st->samples_per_frame; \
+ } \
+ \
+ return best_off * st->bytes_per_frame; \
+}
+
+CREATE_BEST_OVERLAP_OFFSET_FLOAT_FUNC (float);
+CREATE_BEST_OVERLAP_OFFSET_FLOAT_FUNC (double);
+
+/* buffer padding for loop optimization: sizeof(gint32) * (loop_size - 1) */
+#define UNROLL_PADDING (4*3)
+static guint
+best_overlap_offset_s16 (GstScaletempo * st)
+{
+ gint32 *pw, *ppc;
+ gint16 *po, *search_start;
+ gint64 best_corr = G_MININT64;
+ guint best_off = 0;
+ guint off;
+ glong i;
+
+ pw = st->table_window;
+ po = st->buf_overlap;
+ po += st->samples_per_frame;
+ ppc = st->buf_pre_corr;
+ for (i = st->samples_per_frame; i < st->samples_overlap; i++) {
+ *ppc++ = (*pw++ * *po++) >> 15;
+ }
+
+ search_start = (gint16 *) st->buf_queue + st->samples_per_frame;
+ for (off = 0; off < st->frames_search; off++) {
+ gint64 corr = 0;
+ gint16 *ps = search_start;
+ ppc = st->buf_pre_corr;
+ ppc += st->samples_overlap - st->samples_per_frame;
+ ps += st->samples_overlap - st->samples_per_frame;
+ i = -((glong) st->samples_overlap - (glong) st->samples_per_frame);
+ do {
+ corr += ppc[i + 0] * ps[i + 0];
+ corr += ppc[i + 1] * ps[i + 1];
+ corr += ppc[i + 2] * ps[i + 2];
+ corr += ppc[i + 3] * ps[i + 3];
+ i += 4;
+ } while (i < 0);
+ if (corr > best_corr) {
+ best_corr = corr;
+ best_off = off;
+ }
+ search_start += st->samples_per_frame;
+ }
+
+ return best_off * st->bytes_per_frame;
+}
+
+#define CREATE_OUTPUT_OVERLAP_FLOAT_FUNC(type) \
+static void \
+output_overlap_##type (GstScaletempo * st, gpointer buf_out, guint bytes_off) \
+{ \
+ g##type *pout = buf_out; \
+ g##type *pb = st->table_blend; \
+ g##type *po = st->buf_overlap; \
+ g##type *pin = (g##type *) (st->buf_queue + bytes_off); \
+ gint i; \
+ for (i = 0; i < st->samples_overlap; i++) { \
+ *pout++ = *po - *pb++ * (*po - *pin++); \
+ po++; \
+ } \
+}
+
+CREATE_OUTPUT_OVERLAP_FLOAT_FUNC (float);
+CREATE_OUTPUT_OVERLAP_FLOAT_FUNC (double);
+
+static void
+output_overlap_s16 (GstScaletempo * st, gpointer buf_out, guint bytes_off)
+{
+ gint16 *pout = buf_out;
+ gint32 *pb = st->table_blend;
+ gint16 *po = st->buf_overlap;
+ gint16 *pin = (gint16 *) (st->buf_queue + bytes_off);
+ gint i;
+ for (i = 0; i < st->samples_overlap; i++) {
+ *pout++ = *po - ((*pb++ * (*po - *pin++)) >> 16);
+ po++;
+ }
+}
+
+static guint
+fill_queue (GstScaletempo * st, GstBuffer * buf_in, guint offset)
+{
+ guint bytes_in = gst_buffer_get_size (buf_in) - offset;
+ guint offset_unchanged = offset;
+ GstMapInfo map;
+
+ gst_buffer_map (buf_in, &map, GST_MAP_READ);
+ if (st->bytes_to_slide > 0) {
+ if (st->bytes_to_slide < st->bytes_queued) {
+ guint bytes_in_move = st->bytes_queued - st->bytes_to_slide;
+ memmove (st->buf_queue, st->buf_queue + st->bytes_to_slide,
+ bytes_in_move);
+ st->bytes_to_slide = 0;
+ st->bytes_queued = bytes_in_move;
+ } else {
+ guint bytes_in_skip;
+ st->bytes_to_slide -= st->bytes_queued;
+ bytes_in_skip = MIN (st->bytes_to_slide, bytes_in);
+ st->bytes_queued = 0;
+ st->bytes_to_slide -= bytes_in_skip;
+ offset += bytes_in_skip;
+ bytes_in -= bytes_in_skip;
+ }
+ }
+
+ if (bytes_in > 0) {
+ guint bytes_in_copy =
+ MIN (st->bytes_queue_max - st->bytes_queued, bytes_in);
+ memcpy (st->buf_queue + st->bytes_queued, map.data + offset, bytes_in_copy);
+ st->bytes_queued += bytes_in_copy;
+ offset += bytes_in_copy;
+ }
+ gst_buffer_unmap (buf_in, &map);
+
+ return offset - offset_unchanged;
+}
+
+static void
+reinit_buffers (GstScaletempo * st)
+{
+ gint i, j;
+ guint frames_overlap;
+ guint new_size;
+ GstClockTime latency;
+
+ guint frames_stride = st->ms_stride * st->sample_rate / 1000.0;
+ st->bytes_stride = frames_stride * st->bytes_per_frame;
+
+ /* overlap */
+ frames_overlap = frames_stride * st->percent_overlap;
+ if (frames_overlap < 1) { /* if no overlap */
+ st->bytes_overlap = 0;
+ st->bytes_standing = st->bytes_stride;
+ st->samples_standing = st->bytes_standing / st->bytes_per_sample;
+ st->output_overlap = NULL;
+ } else {
+ guint prev_overlap = st->bytes_overlap;
+ st->bytes_overlap = frames_overlap * st->bytes_per_frame;
+ st->samples_overlap = frames_overlap * st->samples_per_frame;
+ st->bytes_standing = st->bytes_stride - st->bytes_overlap;
+ st->samples_standing = st->bytes_standing / st->bytes_per_sample;
+ st->buf_overlap = g_realloc (st->buf_overlap, st->bytes_overlap);
+ /* S16 uses gint32 blend table, floats/doubles use their respective type */
+ st->table_blend =
+ g_realloc (st->table_blend,
+ st->samples_overlap * (st->format ==
+ GST_AUDIO_FORMAT_S16 ? 4 : st->bytes_per_sample));
+ if (st->bytes_overlap > prev_overlap) {
+ memset ((guint8 *) st->buf_overlap + prev_overlap, 0,
+ st->bytes_overlap - prev_overlap);
+ }
+ if (st->format == GST_AUDIO_FORMAT_S16) {
+ gint32 *pb = st->table_blend;
+ gint64 blend = 0;
+ for (i = 0; i < frames_overlap; i++) {
+ gint32 v = blend / frames_overlap;
+ for (j = 0; j < st->samples_per_frame; j++) {
+ *pb++ = v;
+ }
+ blend += 65535; /* 2^16 */
+ }
+ st->output_overlap = output_overlap_s16;
+ } else if (st->format == GST_AUDIO_FORMAT_F32) {
+ gfloat *pb = st->table_blend;
+ gfloat t = (gfloat) frames_overlap;
+ for (i = 0; i < frames_overlap; i++) {
+ gfloat v = i / t;
+ for (j = 0; j < st->samples_per_frame; j++) {
+ *pb++ = v;
+ }
+ }
+ st->output_overlap = output_overlap_float;
+ } else {
+ gdouble *pb = st->table_blend;
+ gdouble t = (gdouble) frames_overlap;
+ for (i = 0; i < frames_overlap; i++) {
+ gdouble v = i / t;
+ for (j = 0; j < st->samples_per_frame; j++) {
+ *pb++ = v;
+ }
+ }
+ st->output_overlap = output_overlap_double;
+ }
+ }
+
+ /* best overlap */
+ st->frames_search =
+ (frames_overlap <= 1) ? 0 : st->ms_search * st->sample_rate / 1000.0;
+ if (st->frames_search < 1) { /* if no search */
+ st->best_overlap_offset = NULL;
+ } else {
+ /* S16 uses gint32 buffer, floats/doubles use their respective type */
+ guint bytes_pre_corr =
+ (st->samples_overlap - st->samples_per_frame) * (st->format ==
+ GST_AUDIO_FORMAT_S16 ? 4 : st->bytes_per_sample);
+ st->buf_pre_corr =
+ g_realloc (st->buf_pre_corr, bytes_pre_corr + UNROLL_PADDING);
+ st->table_window = g_realloc (st->table_window, bytes_pre_corr);
+ if (st->format == GST_AUDIO_FORMAT_S16) {
+ gint64 t = frames_overlap;
+ gint32 n = 8589934588LL / (t * t); /* 4 * (2^31 - 1) / t^2 */
+ gint32 *pw;
+
+ memset ((guint8 *) st->buf_pre_corr + bytes_pre_corr, 0, UNROLL_PADDING);
+ pw = st->table_window;
+ for (i = 1; i < frames_overlap; i++) {
+ gint32 v = (i * (t - i) * n) >> 15;
+ for (j = 0; j < st->samples_per_frame; j++) {
+ *pw++ = v;
+ }
+ }
+ st->best_overlap_offset = best_overlap_offset_s16;
+ } else if (st->format == GST_AUDIO_FORMAT_F32) {
+ gfloat *pw = st->table_window;
+ for (i = 1; i < frames_overlap; i++) {
+ gfloat v = i * (frames_overlap - i);
+ for (j = 0; j < st->samples_per_frame; j++) {
+ *pw++ = v;
+ }
+ }
+ st->best_overlap_offset = best_overlap_offset_float;
+ } else {
+ gdouble *pw = st->table_window;
+ for (i = 1; i < frames_overlap; i++) {
+ gdouble v = i * (frames_overlap - i);
+ for (j = 0; j < st->samples_per_frame; j++) {
+ *pw++ = v;
+ }
+ }
+ st->best_overlap_offset = best_overlap_offset_double;
+ }
+ }
+
+ new_size =
+ (st->frames_search + frames_stride +
+ frames_overlap) * st->bytes_per_frame;
+ if (st->bytes_queued > new_size) {
+ if (st->bytes_to_slide > st->bytes_queued) {
+ st->bytes_to_slide -= st->bytes_queued;
+ st->bytes_queued = 0;
+ } else {
+ guint new_queued = MIN (st->bytes_queued - st->bytes_to_slide, new_size);
+ memmove (st->buf_queue,
+ st->buf_queue + st->bytes_queued - new_queued, new_queued);
+ st->bytes_to_slide = 0;
+ st->bytes_queued = new_queued;
+ }
+ }
+
+ st->bytes_queue_max = new_size;
+ st->buf_queue = g_realloc (st->buf_queue, st->bytes_queue_max);
+
+ latency =
+ gst_util_uint64_scale (st->bytes_queue_max, GST_SECOND,
+ st->bytes_per_frame * st->sample_rate);
+ if (st->latency != latency) {
+ st->latency = latency;
+ gst_element_post_message (GST_ELEMENT (st),
+ gst_message_new_latency (GST_OBJECT (st)));
+ }
+
+ st->bytes_stride_scaled = st->bytes_stride * st->scale;
+ st->frames_stride_scaled = st->bytes_stride_scaled / st->bytes_per_frame;
+
+ GST_DEBUG
+ ("%.3f scale, %.3f stride_in, %i stride_out, %i standing, %i overlap, %i search, %i queue, %s mode",
+ st->scale, st->frames_stride_scaled,
+ (gint) (st->bytes_stride / st->bytes_per_frame),
+ (gint) (st->bytes_standing / st->bytes_per_frame),
+ (gint) (st->bytes_overlap / st->bytes_per_frame), st->frames_search,
+ (gint) (st->bytes_queue_max / st->bytes_per_frame),
+ gst_audio_format_to_string (st->format));
+
+ st->reinit_buffers = FALSE;
+}
+
+static GstBuffer *
+reverse_buffer (GstScaletempo * st, GstBuffer * inbuf)
+{
+ GstBuffer *outbuf;
+ GstMapInfo imap, omap;
+
+ gst_buffer_map (inbuf, &imap, GST_MAP_READ);
+ outbuf = gst_buffer_new_and_alloc (imap.size);
+ gst_buffer_map (outbuf, &omap, GST_MAP_WRITE);
+
+ if (st->format == GST_AUDIO_FORMAT_F64) {
+ const gint64 *ip = (const gint64 *) imap.data;
+ gint64 *op = (gint64 *) (omap.data + omap.size - 8 * st->samples_per_frame);
+ guint i, n = imap.size / (8 * st->samples_per_frame);
+ guint j, c = st->samples_per_frame;
+
+ for (i = 0; i < n; i++) {
+ for (j = 0; j < c; j++)
+ op[j] = ip[j];
+ op -= c;
+ ip += c;
+ }
+ } else {
+ const gint32 *ip = (const gint32 *) imap.data;
+ gint32 *op = (gint32 *) (omap.data + omap.size - 4 * st->samples_per_frame);
+ guint i, n = imap.size / (4 * st->samples_per_frame);
+ guint j, c = st->samples_per_frame;
+
+ for (i = 0; i < n; i++) {
+ for (j = 0; j < c; j++)
+ op[j] = ip[j];
+ op -= c;
+ ip += c;
+ }
+ }
+
+ gst_buffer_unmap (inbuf, &imap);
+ gst_buffer_unmap (outbuf, &omap);
+
+ return outbuf;
+}
+
+/* GstBaseTransform vmethod implementations */
+static GstFlowReturn
+gst_scaletempo_transform (GstBaseTransform * trans,
+ GstBuffer * inbuf, GstBuffer * outbuf)
+{
+ GstScaletempo *st = GST_SCALETEMPO (trans);
+ gint8 *pout;
+ guint offset_in, bytes_out;
+ GstMapInfo omap;
+ GstClockTime timestamp;
+ GstBuffer *tmpbuf = NULL;
+
+ if (st->reverse)
+ tmpbuf = reverse_buffer (st, inbuf);
+
+ gst_buffer_map (outbuf, &omap, GST_MAP_WRITE);
+ pout = (gint8 *) omap.data;
+ bytes_out = omap.size;
+
+ offset_in = fill_queue (st, tmpbuf ? tmpbuf : inbuf, 0);
+ bytes_out = 0;
+ while (st->bytes_queued >= st->bytes_queue_max) {
+ guint bytes_off = 0;
+ gdouble frames_to_slide;
+ guint frames_to_stride_whole;
+
+ /* output stride */
+ if (st->output_overlap) {
+ if (st->best_overlap_offset) {
+ bytes_off = st->best_overlap_offset (st);
+ }
+ st->output_overlap (st, pout, bytes_off);
+ }
+ memcpy (pout + st->bytes_overlap,
+ st->buf_queue + bytes_off + st->bytes_overlap, st->bytes_standing);
+ pout += st->bytes_stride;
+ bytes_out += st->bytes_stride;
+
+ /* input stride */
+ memcpy (st->buf_overlap,
+ st->buf_queue + bytes_off + st->bytes_stride, st->bytes_overlap);
+ frames_to_slide = st->frames_stride_scaled + st->frames_stride_error;
+ frames_to_stride_whole = (gint) frames_to_slide;
+ st->bytes_to_slide = frames_to_stride_whole * st->bytes_per_frame;
+ st->frames_stride_error = frames_to_slide - frames_to_stride_whole;
+
+ offset_in += fill_queue (st, tmpbuf ? tmpbuf : inbuf, offset_in);
+ }
+ gst_buffer_unmap (outbuf, &omap);
+
+ if (st->reverse) {
+ timestamp = st->in_segment.stop - GST_BUFFER_TIMESTAMP (inbuf);
+ if (timestamp < st->latency)
+ timestamp = 0;
+ else
+ timestamp -= st->latency;
+ } else {
+ timestamp = GST_BUFFER_TIMESTAMP (inbuf) - st->in_segment.start;
+ if (timestamp < st->latency)
+ timestamp = 0;
+ else
+ timestamp -= st->latency;
+ }
+ GST_BUFFER_TIMESTAMP (outbuf) = timestamp / st->scale + st->in_segment.start;
+ GST_BUFFER_DURATION (outbuf) =
+ gst_util_uint64_scale (bytes_out, GST_SECOND,
+ st->bytes_per_frame * st->sample_rate);
+ gst_buffer_set_size (outbuf, bytes_out);
+
+ if (tmpbuf)
+ gst_buffer_unref (tmpbuf);
+
+ return GST_FLOW_OK;
+}
+
+static GstFlowReturn
+gst_scaletempo_submit_input_buffer (GstBaseTransform * trans,
+ gboolean is_discont, GstBuffer * input)
+{
+ GstScaletempo *scaletempo = GST_SCALETEMPO (trans);
+
+ if (scaletempo->in_segment.format == GST_FORMAT_TIME) {
+ input =
+ gst_audio_buffer_clip (input, &scaletempo->in_segment,
+ scaletempo->sample_rate, scaletempo->bytes_per_frame);
+ if (!input)
+ return GST_FLOW_OK;
+ }
+
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->submit_input_buffer (trans,
+ is_discont, input);
+}
+
+static gboolean
+gst_scaletempo_transform_size (GstBaseTransform * trans,
+ GstPadDirection direction,
+ GstCaps * caps, gsize size, GstCaps * othercaps, gsize * othersize)
+{
+ if (direction == GST_PAD_SINK) {
+ GstScaletempo *scaletempo = GST_SCALETEMPO (trans);
+ gint bytes_to_out;
+
+ if (scaletempo->reinit_buffers)
+ reinit_buffers (scaletempo);
+
+ bytes_to_out = size + scaletempo->bytes_queued - scaletempo->bytes_to_slide;
+ if (bytes_to_out < (gint) scaletempo->bytes_queue_max) {
+ *othersize = 0;
+ } else {
+ /* while (total_buffered - stride_length * n >= queue_max) n++ */
+ *othersize = scaletempo->bytes_stride * ((guint) (
+ (bytes_to_out - scaletempo->bytes_queue_max +
+ /* rounding protection */ scaletempo->bytes_per_frame)
+ / scaletempo->bytes_stride_scaled) + 1);
+ }
+
+ return TRUE;
+ }
+ return FALSE;
+}
+
+static gboolean
+gst_scaletempo_sink_event (GstBaseTransform * trans, GstEvent * event)
+{
+ GstScaletempo *scaletempo = GST_SCALETEMPO (trans);
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT) {
+ GstSegment segment;
+
+ gst_event_copy_segment (event, &segment);
+
+ if (segment.format != GST_FORMAT_TIME
+ || scaletempo->scale != ABS (segment.rate)
+ || ! !scaletempo->reverse != ! !(segment.rate < 0.0)) {
+ if (segment.format != GST_FORMAT_TIME || ABS (segment.rate - 1.0) < 1e-10) {
+ scaletempo->scale = 1.0;
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (scaletempo),
+ TRUE);
+ } else {
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (scaletempo),
+ FALSE);
+ scaletempo->scale = ABS (segment.rate);
+ scaletempo->reverse = segment.rate < 0.0;
+ scaletempo->bytes_stride_scaled =
+ scaletempo->bytes_stride * scaletempo->scale;
+ scaletempo->frames_stride_scaled =
+ scaletempo->bytes_stride_scaled / scaletempo->bytes_per_frame;
+ GST_DEBUG ("%.3f scale, %.3f stride_in, %i stride_out",
+ scaletempo->scale, scaletempo->frames_stride_scaled,
+ (gint) (scaletempo->bytes_stride / scaletempo->bytes_per_frame));
+
+ scaletempo->bytes_to_slide = 0;
+ }
+ }
+
+ scaletempo->in_segment = segment;
+ scaletempo->out_segment = segment;
+
+ if (scaletempo->scale != 1.0 || scaletempo->reverse) {
+ guint32 seqnum;
+
+ segment.applied_rate = segment.rate;
+ segment.rate = 1.0;
+
+ if (segment.stop != -1) {
+ segment.stop =
+ (segment.stop - segment.start) / ABS (segment.applied_rate) +
+ segment.start;
+ }
+
+ scaletempo->out_segment = segment;
+
+ seqnum = gst_event_get_seqnum (event);
+ gst_event_unref (event);
+
+ event = gst_event_new_segment (&segment);
+ gst_event_set_seqnum (event, seqnum);
+
+ return gst_pad_push_event (GST_BASE_TRANSFORM_SRC_PAD (trans), event);
+ }
+ } else if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) {
+ gst_segment_init (&scaletempo->in_segment, GST_FORMAT_UNDEFINED);
+ gst_segment_init (&scaletempo->out_segment, GST_FORMAT_UNDEFINED);
+ } else if (GST_EVENT_TYPE (event) == GST_EVENT_GAP) {
+ if (scaletempo->scale != 1.0) {
+ GstClockTime gap_ts, gap_duration;
+ gst_event_parse_gap (event, &gap_ts, &gap_duration);
+ if (scaletempo->reverse) {
+ gap_ts = scaletempo->in_segment.stop - gap_ts;
+ } else {
+ gap_ts = gap_ts - scaletempo->in_segment.start;
+ }
+ gap_ts = gap_ts / scaletempo->scale + scaletempo->in_segment.start;
+ if (GST_CLOCK_TIME_IS_VALID (gap_duration)) {
+ gap_duration = gap_duration / ABS (scaletempo->scale);
+ }
+ gst_event_unref (event);
+ event = gst_event_new_gap (gap_ts, gap_duration);
+ }
+ }
+
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, event);
+}
+
+static gboolean
+gst_scaletempo_set_caps (GstBaseTransform * trans,
+ GstCaps * incaps, GstCaps * outcaps)
+{
+ GstScaletempo *scaletempo = GST_SCALETEMPO (trans);
+
+ gint width, bps, nch, rate;
+ GstAudioInfo info;
+ GstAudioFormat format;
+
+ if (!gst_audio_info_from_caps (&info, incaps))
+ return FALSE;
+
+ nch = GST_AUDIO_INFO_CHANNELS (&info);
+ rate = GST_AUDIO_INFO_RATE (&info);
+ width = GST_AUDIO_INFO_WIDTH (&info);
+ format = GST_AUDIO_INFO_FORMAT (&info);
+
+ bps = width / 8;
+
+ GST_DEBUG ("caps: %" GST_PTR_FORMAT ", %d bps", incaps, bps);
+
+ if (rate != scaletempo->sample_rate
+ || nch != scaletempo->samples_per_frame
+ || bps != scaletempo->bytes_per_sample || format != scaletempo->format) {
+ scaletempo->sample_rate = rate;
+ scaletempo->samples_per_frame = nch;
+ scaletempo->bytes_per_sample = bps;
+ scaletempo->bytes_per_frame = nch * bps;
+ scaletempo->format = format;
+ scaletempo->reinit_buffers = TRUE;
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_scaletempo_start (GstBaseTransform * trans)
+{
+ GstScaletempo *scaletempo = GST_SCALETEMPO (trans);
+
+ gst_segment_init (&scaletempo->in_segment, GST_FORMAT_UNDEFINED);
+ gst_segment_init (&scaletempo->out_segment, GST_FORMAT_UNDEFINED);
+ scaletempo->reinit_buffers = TRUE;
+
+ return TRUE;
+}
+
+static gboolean
+gst_scaletempo_stop (GstBaseTransform * trans)
+{
+ GstScaletempo *scaletempo = GST_SCALETEMPO (trans);
+
+ g_free (scaletempo->buf_queue);
+ scaletempo->buf_queue = NULL;
+ g_free (scaletempo->buf_overlap);
+ scaletempo->buf_overlap = NULL;
+ g_free (scaletempo->table_blend);
+ scaletempo->table_blend = NULL;
+ g_free (scaletempo->buf_pre_corr);
+ scaletempo->buf_pre_corr = NULL;
+ g_free (scaletempo->table_window);
+ scaletempo->table_window = NULL;
+ scaletempo->reinit_buffers = TRUE;
+
+ return TRUE;
+}
+
+static gboolean
+gst_scaletempo_query (GstBaseTransform * trans, GstPadDirection direction,
+ GstQuery * query)
+{
+ GstScaletempo *scaletempo = GST_SCALETEMPO (trans);
+
+ if (direction == GST_PAD_SRC) {
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_SEGMENT:
+ {
+ GstFormat format;
+ gint64 start, stop;
+
+ format = scaletempo->out_segment.format;
+
+ start =
+ gst_segment_to_stream_time (&scaletempo->out_segment, format,
+ scaletempo->out_segment.start);
+ if ((stop = scaletempo->out_segment.stop) == -1)
+ stop = scaletempo->out_segment.duration;
+ else
+ stop =
+ gst_segment_to_stream_time (&scaletempo->out_segment, format,
+ stop);
+
+ gst_query_set_segment (query, scaletempo->out_segment.rate, format,
+ start, stop);
+ return TRUE;
+ }
+ case GST_QUERY_LATENCY:{
+ GstPad *peer;
+
+ if ((peer = gst_pad_get_peer (GST_BASE_TRANSFORM_SINK_PAD (trans)))) {
+ if ((gst_pad_query (peer, query))) {
+ GstClockTime min, max;
+ gboolean live;
+
+ gst_query_parse_latency (query, &live, &min, &max);
+
+ GST_DEBUG_OBJECT (scaletempo, "Peer latency: min %"
+ GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (min), GST_TIME_ARGS (max));
+
+ /* add our own latency */
+ GST_DEBUG_OBJECT (scaletempo, "Our latency: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (scaletempo->latency));
+ min += scaletempo->latency;
+ if (max != GST_CLOCK_TIME_NONE)
+ max += scaletempo->latency;
+
+ GST_DEBUG_OBJECT (scaletempo, "Calculated total latency : min %"
+ GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (min), GST_TIME_ARGS (max));
+ gst_query_set_latency (query, live, min, max);
+ }
+ gst_object_unref (peer);
+ }
+
+ return TRUE;
+ }
+ default:{
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->query (trans, direction,
+ query);
+ }
+ }
+ } else {
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->query (trans, direction,
+ query);
+ }
+}
+
+/* GObject vmethod implementations */
+static void
+gst_scaletempo_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstScaletempo *scaletempo = GST_SCALETEMPO (object);
+
+ switch (prop_id) {
+ case PROP_RATE:
+ g_value_set_double (value, scaletempo->scale);
+ break;
+ case PROP_STRIDE:
+ g_value_set_uint (value, scaletempo->ms_stride);
+ break;
+ case PROP_OVERLAP:
+ g_value_set_double (value, scaletempo->percent_overlap);
+ break;
+ case PROP_SEARCH:
+ g_value_set_uint (value, scaletempo->ms_search);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_scaletempo_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstScaletempo *scaletempo = GST_SCALETEMPO (object);
+
+ switch (prop_id) {
+ case PROP_STRIDE:{
+ guint new_value = g_value_get_uint (value);
+ if (scaletempo->ms_stride != new_value) {
+ scaletempo->ms_stride = new_value;
+ scaletempo->reinit_buffers = TRUE;
+ }
+ break;
+ }
+ case PROP_OVERLAP:{
+ gdouble new_value = g_value_get_double (value);
+ if (scaletempo->percent_overlap != new_value) {
+ scaletempo->percent_overlap = new_value;
+ scaletempo->reinit_buffers = TRUE;
+ }
+ break;
+ }
+ case PROP_SEARCH:{
+ guint new_value = g_value_get_uint (value);
+ if (scaletempo->ms_search != new_value) {
+ scaletempo->ms_search = new_value;
+ scaletempo->reinit_buffers = TRUE;
+ }
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_scaletempo_class_init (GstScaletempoClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GstBaseTransformClass *basetransform_class = GST_BASE_TRANSFORM_CLASS (klass);
+
+ gobject_class->get_property = GST_DEBUG_FUNCPTR (gst_scaletempo_get_property);
+ gobject_class->set_property = GST_DEBUG_FUNCPTR (gst_scaletempo_set_property);
+
+ g_object_class_install_property (gobject_class, PROP_RATE,
+ g_param_spec_double ("rate", "Playback Rate", "Current playback rate",
+ G_MININT, G_MAXINT, 1.0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_STRIDE,
+ g_param_spec_uint ("stride", "Stride Length",
+ "Length in milliseconds to output each stride", 1, 5000, 30,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_OVERLAP,
+ g_param_spec_double ("overlap", "Overlap Length",
+ "Percentage of stride to overlap", 0, 1, .2,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_SEARCH,
+ g_param_spec_uint ("search", "Search Length",
+ "Length in milliseconds to search for best overlap position", 0, 500,
+ 14, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class, &src_template);
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
+ gst_element_class_set_static_metadata (gstelement_class, "Scaletempo",
+ "Filter/Effect/Rate/Audio",
+ "Sync audio tempo with playback rate",
+ "Rov Juvano <rovjuvano@users.sourceforge.net>");
+
+ basetransform_class->sink_event =
+ GST_DEBUG_FUNCPTR (gst_scaletempo_sink_event);
+ basetransform_class->set_caps = GST_DEBUG_FUNCPTR (gst_scaletempo_set_caps);
+ basetransform_class->transform_size =
+ GST_DEBUG_FUNCPTR (gst_scaletempo_transform_size);
+ basetransform_class->transform = GST_DEBUG_FUNCPTR (gst_scaletempo_transform);
+ basetransform_class->query = GST_DEBUG_FUNCPTR (gst_scaletempo_query);
+ basetransform_class->start = GST_DEBUG_FUNCPTR (gst_scaletempo_start);
+ basetransform_class->stop = GST_DEBUG_FUNCPTR (gst_scaletempo_stop);
+ basetransform_class->submit_input_buffer =
+ GST_DEBUG_FUNCPTR (gst_scaletempo_submit_input_buffer);
+}
+
+static void
+gst_scaletempo_init (GstScaletempo * scaletempo)
+{
+ /* defaults */
+ scaletempo->ms_stride = 30;
+ scaletempo->percent_overlap = .2;
+ scaletempo->ms_search = 14;
+
+ /* uninitialized */
+ scaletempo->scale = 0;
+ scaletempo->sample_rate = 0;
+ scaletempo->frames_stride_error = 0;
+ scaletempo->bytes_stride = 0;
+ scaletempo->bytes_queued = 0;
+ scaletempo->bytes_to_slide = 0;
+ gst_segment_init (&scaletempo->in_segment, GST_FORMAT_UNDEFINED);
+ gst_segment_init (&scaletempo->out_segment, GST_FORMAT_UNDEFINED);
+}
diff --git a/gst/audiofx/gstscaletempo.h b/gst/audiofx/gstscaletempo.h
new file mode 100644
index 0000000000..02760fb6e3
--- /dev/null
+++ b/gst/audiofx/gstscaletempo.h
@@ -0,0 +1,101 @@
+/*
+ * GStreamer
+ * Copyright (C) 2008 Rov Juvano <rovjuvano@users.sourceforge.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_SCALETEMPO_H__
+#define __GST_SCALETEMPO_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_SCALETEMPO (gst_scaletempo_get_type())
+#define GST_SCALETEMPO(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_SCALETEMPO, GstScaletempo))
+#define GST_SCALETEMPO_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_SCALETEMPO, GstScaletempoClass))
+#define GST_IS_SCALETEMPO(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_SCALETEMPO))
+#define GST_IS_SCALETEMPO_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_SCALETEMPO))
+
+typedef struct _GstScaletempo GstScaletempo;
+typedef struct _GstScaletempoClass GstScaletempoClass;
+typedef struct _GstScaletempoPrivate GstScaletempoPrivate;
+
+struct _GstScaletempo
+{
+ GstBaseTransform element;
+
+ gdouble scale;
+ gboolean reverse;
+
+ /* parameters */
+ guint ms_stride;
+ gdouble percent_overlap;
+ guint ms_search;
+
+ /* caps */
+ GstAudioFormat format;
+ guint samples_per_frame; /* AKA number of channels */
+ guint bytes_per_sample;
+ guint bytes_per_frame;
+ guint sample_rate;
+
+ /* stride */
+ gdouble frames_stride_scaled;
+ gdouble frames_stride_error;
+ guint bytes_stride;
+ gdouble bytes_stride_scaled;
+ guint bytes_queue_max;
+ guint bytes_queued;
+ guint bytes_to_slide;
+ gint8 *buf_queue;
+
+ /* overlap */
+ guint samples_overlap;
+ guint samples_standing;
+ guint bytes_overlap;
+ guint bytes_standing;
+ gpointer buf_overlap;
+ gpointer table_blend;
+ void (*output_overlap) (GstScaletempo * scaletempo, gpointer out_buf, guint bytes_off);
+
+ /* best overlap */
+ guint frames_search;
+ gpointer buf_pre_corr;
+ gpointer table_window;
+ guint (*best_overlap_offset) (GstScaletempo * scaletempo);
+
+ /* gstreamer */
+ GstSegment in_segment, out_segment;
+ GstClockTime latency;
+
+ /* threads */
+ gboolean reinit_buffers;
+};
+
+struct _GstScaletempoClass
+{
+ GstBaseTransformClass parent_class;
+};
+
+GType gst_scaletempo_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (scaletempo);
+
+G_END_DECLS
+#endif /* __GST_SCALETEMPO_H__ */
diff --git a/gst/audiofx/gstscaletempoplugin.c b/gst/audiofx/gstscaletempoplugin.c
new file mode 100644
index 0000000000..3b45ef391e
--- /dev/null
+++ b/gst/audiofx/gstscaletempoplugin.c
@@ -0,0 +1,68 @@
+/*
+ * GStreamer
+ * Copyright (C) 2008 Rov Juvano <rovjuvano@users.sourceforge.net>
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the "Software"),
+ * to deal in the Software without restriction, including without limitation
+ * the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ * and/or sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ * DEALINGS IN THE SOFTWARE.
+ *
+ * Alternatively, the contents of this file may be used under the
+ * GNU Lesser General Public License Version 2.1 (the "LGPL"), in
+ * which case the following provisions apply instead of the ones
+ * mentioned above:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include <config.h>
+#endif
+
+#include <gst/gst.h>
+#include "gstscaletempo.h"
+
+/* entry point to initialize the plug-in
+ * initialize the plug-in itself
+ * register the element factories and pad templates
+ * register the features
+ *
+ * exchange the string 'plugin' with your element name
+ */
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ return gst_element_register (plugin, "scaletempo", GST_RANK_NONE,
+ GST_TYPE_SCALETEMPO);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, GST_VERSION_MINOR,
+ scaletempo, "Scale audio tempo in sync with playback rate",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/audiofx/gststereo.c b/gst/audiofx/gststereo.c
new file mode 100644
index 0000000000..fb299f1919
--- /dev/null
+++ b/gst/audiofx/gststereo.c
@@ -0,0 +1,202 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/* This effect is borrowed from xmms-0.6.1, though I mangled it so badly in
+ * the process of copying it over that the xmms people probably won't want
+ * any credit for it ;-)
+ */
+/**
+ * SECTION:element-stereo
+ * @title: stereo
+ *
+ * Create a wide stereo effect.
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 -v filesrc location=sine.ogg ! oggdemux ! vorbisdec ! audioconvert ! stereo ! audioconvert ! audioresample ! alsasink
+ * ]| Play an Ogg/Vorbis file.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include "gststereo.h"
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#define ALLOWED_CAPS \
+ "audio/x-raw," \
+ " format = "GST_AUDIO_NE (S16) "," \
+ " rate = (int) [ 1, MAX ]," \
+ " channels = (int) 2"
+
+/* Stereo signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_ACTIVE,
+ PROP_STEREO
+};
+
+static void gst_stereo_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_stereo_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static GstFlowReturn gst_stereo_transform_ip (GstBaseTransform * base,
+ GstBuffer * outbuf);
+
+G_DEFINE_TYPE (GstStereo, gst_stereo, GST_TYPE_AUDIO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (stereo, "stereo", GST_RANK_NONE, GST_TYPE_STEREO);
+
+static void
+gst_stereo_class_init (GstStereoClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseTransformClass *trans_class = GST_BASE_TRANSFORM_CLASS (klass);
+ GstAudioFilterClass *audiofilter_class = GST_AUDIO_FILTER_CLASS (klass);
+ GstCaps *caps;
+
+ gst_element_class_set_static_metadata (element_class, "Stereo effect",
+ "Filter/Effect/Audio",
+ "Muck with the stereo signal to enhance its 'stereo-ness'",
+ "Erik Walthinsen <omega@cse.ogi.edu>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (audiofilter_class, caps);
+ gst_caps_unref (caps);
+
+ gobject_class->set_property = gst_stereo_set_property;
+ gobject_class->get_property = gst_stereo_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_ACTIVE,
+ g_param_spec_boolean ("active", "active", "active",
+ TRUE,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_STEREO,
+ g_param_spec_float ("stereo", "stereo", "stereo",
+ 0.0, 1.0, 0.1f,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ trans_class->transform_ip = GST_DEBUG_FUNCPTR (gst_stereo_transform_ip);
+}
+
+static void
+gst_stereo_init (GstStereo * stereo)
+{
+ stereo->active = TRUE;
+ stereo->stereo = 0.1f;
+}
+
+static GstFlowReturn
+gst_stereo_transform_ip (GstBaseTransform * base, GstBuffer * outbuf)
+{
+ GstStereo *stereo = GST_STEREO (base);
+ gint samples;
+ gint i;
+ gdouble avg, ldiff, rdiff, tmp;
+ gdouble mul = stereo->stereo;
+ gint16 *data;
+ GstMapInfo info;
+
+ if (!gst_buffer_map (outbuf, &info, GST_MAP_READWRITE))
+ return GST_FLOW_ERROR;
+
+ data = (gint16 *) info.data;
+ samples = info.size / 2;
+
+ if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_TIMESTAMP (outbuf)))
+ gst_object_sync_values (GST_OBJECT (stereo), GST_BUFFER_TIMESTAMP (outbuf));
+
+ if (stereo->active) {
+ for (i = 0; i < samples / 2; i += 2) {
+ avg = (data[i] + data[i + 1]) / 2;
+ ldiff = data[i] - avg;
+ rdiff = data[i + 1] - avg;
+
+ tmp = avg + ldiff * mul;
+ if (tmp < -32768)
+ tmp = -32768;
+ if (tmp > 32767)
+ tmp = 32767;
+ data[i] = tmp;
+
+ tmp = avg + rdiff * mul;
+ if (tmp < -32768)
+ tmp = -32768;
+ if (tmp > 32767)
+ tmp = 32767;
+ data[i + 1] = tmp;
+ }
+ }
+
+ gst_buffer_unmap (outbuf, &info);
+
+ return GST_FLOW_OK;
+}
+
+static void
+gst_stereo_set_property (GObject * object, guint prop_id, const GValue * value,
+ GParamSpec * pspec)
+{
+ GstStereo *stereo = GST_STEREO (object);
+
+ switch (prop_id) {
+ case PROP_ACTIVE:
+ stereo->active = g_value_get_boolean (value);
+ break;
+ case PROP_STEREO:
+ stereo->stereo = g_value_get_float (value) * 10.0;
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_stereo_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstStereo *stereo = GST_STEREO (object);
+
+ switch (prop_id) {
+ case PROP_ACTIVE:
+ g_value_set_boolean (value, stereo->active);
+ break;
+ case PROP_STEREO:
+ g_value_set_float (value, stereo->stereo / 10.0);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/audiofx/gststereo.h b/gst/audiofx/gststereo.h
new file mode 100644
index 0000000000..cf7757c5f3
--- /dev/null
+++ b/gst/audiofx/gststereo.h
@@ -0,0 +1,57 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_STEREO_H__
+#define __GST_STEREO_H__
+
+
+#include <gst/gst.h>
+#include <gst/audio/gstaudiofilter.h>
+
+#define GST_TYPE_STEREO \
+ (gst_stereo_get_type())
+#define GST_STEREO(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_STEREO,GstStereo))
+#define GST_STEREO_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_STEREO,GstStereoClass))
+#define GST_IS_STEREO(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_STEREO))
+#define GST_IS_STEREO_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_STEREO))
+
+typedef struct _GstStereo GstStereo;
+typedef struct _GstStereoClass GstStereoClass;
+
+struct _GstStereo {
+ GstAudioFilter element;
+
+ gboolean active;
+ gfloat stereo;
+};
+
+struct _GstStereoClass {
+ GstAudioFilterClass parent_class;
+};
+
+GType gst_stereo_get_type(void);
+
+GST_ELEMENT_REGISTER_DECLARE (stereo);
+
+#endif /* __GST_STEREO_H__ */
diff --git a/gst/audiofx/math_compat.h b/gst/audiofx/math_compat.h
new file mode 100644
index 0000000000..da2370c148
--- /dev/null
+++ b/gst/audiofx/math_compat.h
@@ -0,0 +1,55 @@
+/*
+ * GStreamer
+ * Copyright (C) 2008 Sebastian Dröge <slomo@circular-chaos.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __MATH_COMPAT_H__
+#define __MATH_COMPAT_H__
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <glib.h>
+#include <math.h>
+
+#ifndef HAVE_ASINH
+static inline gdouble
+asinh (gdouble x)
+{
+ return log(x + sqrt (x * x + 1));
+}
+#endif
+
+#ifndef HAVE_SINH
+static inline gdouble
+sinh (gdouble x)
+{
+ return 0.5 * (exp (x) - exp (-x));
+}
+#endif
+
+#ifndef HAVE_COSH
+static inline gdouble
+cosh (gdouble x)
+{
+ return 0.5 * (exp (x) + exp (-x));
+}
+#endif
+
+#endif /* __MATH_COMPAT_H__ */
diff --git a/gst/audiofx/meson.build b/gst/audiofx/meson.build
new file mode 100644
index 0000000000..1711cb6670
--- /dev/null
+++ b/gst/audiofx/meson.build
@@ -0,0 +1,50 @@
+audiofx_sources = [
+ 'audiofx.c',
+ 'audiopanorama.c',
+ 'audioinvert.c',
+ 'audioamplify.c',
+ 'audiodynamic.c',
+ 'audiokaraoke.c',
+ 'audiofxbaseiirfilter.c',
+ 'audiocheblimit.c',
+ 'audiochebband.c',
+ 'audioiirfilter.c',
+ 'audiofxbasefirfilter.c',
+ 'audiowsincband.c',
+ 'audiowsinclimit.c',
+ 'audiofirfilter.c',
+ 'audioecho.c',
+ 'gstscaletempo.c',
+ 'gststereo.c'
+]
+
+orcsrc = 'audiopanoramaorc'
+if have_orcc
+ orc_h = custom_target(orcsrc + '.h',
+ input : orcsrc + '.orc',
+ output : orcsrc + '.h',
+ command : orcc_args + ['--header', '-o', '@OUTPUT@', '@INPUT@'])
+ orc_c = custom_target(orcsrc + '.c',
+ input : orcsrc + '.orc',
+ output : orcsrc + '.c',
+ command : orcc_args + ['--implementation', '-o', '@OUTPUT@', '@INPUT@'])
+ orc_targets += {'name': orcsrc, 'orc-source': files(orcsrc + '.orc'), 'header': orc_h, 'source': orc_c}
+else
+ orc_h = configure_file(input : orcsrc + '-dist.h',
+ output : orcsrc + '.h',
+ copy : true)
+ orc_c = configure_file(input : orcsrc + '-dist.c',
+ output : orcsrc + '.c',
+ copy : true)
+endif
+
+gstaudiofx = library('gstaudiofx',
+ audiofx_sources, orc_c, orc_h,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc, libsinc],
+ dependencies : [orc_dep, gstaudio_dep, gstfft_dep, libm],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstaudiofx, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstaudiofx]
diff --git a/gst/audioparsers/gstaacparse.c b/gst/audioparsers/gstaacparse.c
new file mode 100644
index 0000000000..b282873667
--- /dev/null
+++ b/gst/audioparsers/gstaacparse.c
@@ -0,0 +1,1677 @@
+/* GStreamer AAC parser plugin
+ * Copyright (C) 2008 Nokia Corporation. All rights reserved.
+ *
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-aacparse
+ * @title: aacparse
+ * @short_description: AAC parser
+ * @see_also: #GstAmrParse
+ *
+ * This is an AAC parser which handles both ADIF and ADTS stream formats.
+ *
+ * As ADIF format is not framed, it is not seekable and stream duration cannot
+ * be determined either. However, ADTS format AAC clips can be seeked, and parser
+ * can also estimate playback position and clip duration.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=abc.aac ! aacparse ! faad ! audioresample ! audioconvert ! alsasink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/base/gstbitreader.h>
+#include <gst/pbutils/pbutils.h>
+#include "gstaudioparserselements.h"
+#include "gstaacparse.h"
+
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/mpeg, "
+ "framed = (boolean) true, " "mpegversion = (int) { 2, 4 }, "
+ "stream-format = (string) { raw, adts, adif, loas };"));
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/mpeg, mpegversion = (int) { 2, 4 };"));
+
+GST_DEBUG_CATEGORY_STATIC (aacparse_debug);
+#define GST_CAT_DEFAULT aacparse_debug
+
+
+#define ADIF_MAX_SIZE 40 /* Should be enough */
+#define ADTS_MAX_SIZE 10 /* Should be enough */
+#define LOAS_MAX_SIZE 3 /* Should be enough */
+#define RAW_MAX_SIZE 1 /* Correct framing is required */
+
+#define ADTS_HEADERS_LENGTH 7UL /* Total byte-length of fixed and variable
+ headers prepended during raw to ADTS
+ conversion */
+
+#define AAC_FRAME_DURATION(parse) (GST_SECOND/parse->frames_per_sec)
+
+static const gint loas_sample_rate_table[16] = {
+ 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050,
+ 16000, 12000, 11025, 8000, 7350, 0, 0, 0
+};
+
+static const gint loas_channels_table[16] = {
+ 0, 1, 2, 3, 4, 5, 6, 8,
+ 0, 0, 0, 7, 8, 0, 8, 0
+};
+
+static gboolean gst_aac_parse_start (GstBaseParse * parse);
+static gboolean gst_aac_parse_stop (GstBaseParse * parse);
+
+static gboolean gst_aac_parse_sink_setcaps (GstBaseParse * parse,
+ GstCaps * caps);
+static GstCaps *gst_aac_parse_sink_getcaps (GstBaseParse * parse,
+ GstCaps * filter);
+
+static GstFlowReturn gst_aac_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize);
+static GstFlowReturn gst_aac_parse_pre_push_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame);
+static gboolean gst_aac_parse_src_event (GstBaseParse * parse,
+ GstEvent * event);
+
+static gboolean gst_aac_parse_read_audio_specific_config (GstAacParse *
+ aacparse, GstBitReader * br, gint * object_type, gint * sample_rate,
+ gint * channels, gint * frame_samples);
+
+
+#define gst_aac_parse_parent_class parent_class
+G_DEFINE_TYPE (GstAacParse, gst_aac_parse, GST_TYPE_BASE_PARSE);
+GST_ELEMENT_REGISTER_DEFINE (aacparse, "aacparse",
+ GST_RANK_PRIMARY + 1, GST_TYPE_AAC_PARSE);
+
+/**
+ * gst_aac_parse_class_init:
+ * @klass: #GstAacParseClass.
+ *
+ */
+static void
+gst_aac_parse_class_init (GstAacParseClass * klass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (aacparse_debug, "aacparse", 0,
+ "AAC audio stream parser");
+
+ gst_element_class_add_static_pad_template (element_class, &sink_template);
+ gst_element_class_add_static_pad_template (element_class, &src_template);
+
+ gst_element_class_set_static_metadata (element_class,
+ "AAC audio stream parser", "Codec/Parser/Audio",
+ "Advanced Audio Coding parser", "Stefan Kost <stefan.kost@nokia.com>");
+
+ parse_class->start = GST_DEBUG_FUNCPTR (gst_aac_parse_start);
+ parse_class->stop = GST_DEBUG_FUNCPTR (gst_aac_parse_stop);
+ parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_aac_parse_sink_setcaps);
+ parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_aac_parse_sink_getcaps);
+ parse_class->handle_frame = GST_DEBUG_FUNCPTR (gst_aac_parse_handle_frame);
+ parse_class->pre_push_frame =
+ GST_DEBUG_FUNCPTR (gst_aac_parse_pre_push_frame);
+ parse_class->src_event = GST_DEBUG_FUNCPTR (gst_aac_parse_src_event);
+}
+
+
+/**
+ * gst_aac_parse_init:
+ * @aacparse: #GstAacParse.
+ * @klass: #GstAacParseClass.
+ *
+ */
+static void
+gst_aac_parse_init (GstAacParse * aacparse)
+{
+ GST_DEBUG ("initialized");
+ GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (aacparse));
+ GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_PARSE_SINK_PAD (aacparse));
+
+ aacparse->last_parsed_sample_rate = 0;
+ aacparse->last_parsed_channels = 0;
+}
+
+
+/**
+ * gst_aac_parse_set_src_caps:
+ * @aacparse: #GstAacParse.
+ * @sink_caps: (proposed) caps of sink pad
+ *
+ * Set source pad caps according to current knowledge about the
+ * audio stream.
+ *
+ * Returns: TRUE if caps were successfully set.
+ */
+static gboolean
+gst_aac_parse_set_src_caps (GstAacParse * aacparse, GstCaps * sink_caps)
+{
+ GstStructure *s;
+ GstCaps *src_caps = NULL, *peercaps;
+ gboolean res = FALSE;
+ const gchar *stream_format;
+ guint8 codec_data[2];
+ guint16 codec_data_data;
+ gint sample_rate_idx;
+
+ GST_DEBUG_OBJECT (aacparse, "sink caps: %" GST_PTR_FORMAT, sink_caps);
+ if (sink_caps)
+ src_caps = gst_caps_copy (sink_caps);
+ else
+ src_caps = gst_caps_new_empty_simple ("audio/mpeg");
+
+ gst_caps_set_simple (src_caps, "framed", G_TYPE_BOOLEAN, TRUE,
+ "mpegversion", G_TYPE_INT, aacparse->mpegversion, NULL);
+
+ aacparse->output_header_type = aacparse->header_type;
+ switch (aacparse->header_type) {
+ case DSPAAC_HEADER_NONE:
+ stream_format = "raw";
+ break;
+ case DSPAAC_HEADER_ADTS:
+ stream_format = "adts";
+ break;
+ case DSPAAC_HEADER_ADIF:
+ stream_format = "adif";
+ break;
+ case DSPAAC_HEADER_LOAS:
+ stream_format = "loas";
+ break;
+ default:
+ stream_format = NULL;
+ }
+
+ /* Generate codec data to be able to set profile/level on the caps */
+ sample_rate_idx =
+ gst_codec_utils_aac_get_index_from_sample_rate (aacparse->sample_rate);
+ if (sample_rate_idx < 0)
+ goto not_a_known_rate;
+ codec_data_data =
+ (aacparse->object_type << 11) |
+ (sample_rate_idx << 7) | (aacparse->channels << 3);
+ GST_WRITE_UINT16_BE (codec_data, codec_data_data);
+ gst_codec_utils_aac_caps_set_level_and_profile (src_caps, codec_data, 2);
+
+ s = gst_caps_get_structure (src_caps, 0);
+ if (aacparse->sample_rate > 0)
+ gst_structure_set (s, "rate", G_TYPE_INT, aacparse->sample_rate, NULL);
+ if (aacparse->channels > 0)
+ gst_structure_set (s, "channels", G_TYPE_INT, aacparse->channels, NULL);
+ if (stream_format)
+ gst_structure_set (s, "stream-format", G_TYPE_STRING, stream_format, NULL);
+
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (aacparse), NULL);
+ if (peercaps && !gst_caps_can_intersect (src_caps, peercaps)) {
+ GST_DEBUG_OBJECT (GST_BASE_PARSE (aacparse)->srcpad,
+ "Caps can not intersect");
+ if (aacparse->header_type == DSPAAC_HEADER_ADTS) {
+ GST_DEBUG_OBJECT (GST_BASE_PARSE (aacparse)->srcpad,
+ "Input is ADTS, trying raw");
+ gst_caps_set_simple (src_caps, "stream-format", G_TYPE_STRING, "raw",
+ NULL);
+ if (gst_caps_can_intersect (src_caps, peercaps)) {
+ GstBuffer *codec_data_buffer;
+
+ GST_DEBUG_OBJECT (GST_BASE_PARSE (aacparse)->srcpad,
+ "Caps can intersect, we will drop the ADTS layer");
+ aacparse->output_header_type = DSPAAC_HEADER_NONE;
+
+ /* The codec_data data is according to AudioSpecificConfig,
+ ISO/IEC 14496-3, 1.6.2.1 */
+ codec_data_buffer = gst_buffer_new_and_alloc (2);
+ gst_buffer_fill (codec_data_buffer, 0, codec_data, 2);
+ gst_caps_set_simple (src_caps, "codec_data", GST_TYPE_BUFFER,
+ codec_data_buffer, NULL);
+ gst_buffer_unref (codec_data_buffer);
+ }
+ } else if (aacparse->header_type == DSPAAC_HEADER_NONE) {
+ GST_DEBUG_OBJECT (GST_BASE_PARSE (aacparse)->srcpad,
+ "Input is raw, trying ADTS");
+ gst_caps_set_simple (src_caps, "stream-format", G_TYPE_STRING, "adts",
+ NULL);
+ if (gst_caps_can_intersect (src_caps, peercaps)) {
+ GST_DEBUG_OBJECT (GST_BASE_PARSE (aacparse)->srcpad,
+ "Caps can intersect, we will prepend ADTS headers");
+ aacparse->output_header_type = DSPAAC_HEADER_ADTS;
+ }
+ }
+ }
+ if (peercaps)
+ gst_caps_unref (peercaps);
+
+ aacparse->last_parsed_channels = 0;
+ aacparse->last_parsed_sample_rate = 0;
+
+ GST_DEBUG_OBJECT (aacparse, "setting src caps: %" GST_PTR_FORMAT, src_caps);
+
+ res = gst_pad_set_caps (GST_BASE_PARSE (aacparse)->srcpad, src_caps);
+ gst_caps_unref (src_caps);
+ return res;
+
+not_a_known_rate:
+ GST_ERROR_OBJECT (aacparse, "Not a known sample rate: %d",
+ aacparse->sample_rate);
+ gst_caps_unref (src_caps);
+ return FALSE;
+}
+
+
+/**
+ * gst_aac_parse_sink_setcaps:
+ * @sinkpad: GstPad
+ * @caps: GstCaps
+ *
+ * Implementation of "set_sink_caps" vmethod in #GstBaseParse class.
+ *
+ * Returns: TRUE on success.
+ */
+static gboolean
+gst_aac_parse_sink_setcaps (GstBaseParse * parse, GstCaps * caps)
+{
+ GstAacParse *aacparse;
+ GstStructure *structure;
+ gchar *caps_str;
+ const GValue *value;
+
+ aacparse = GST_AAC_PARSE (parse);
+ structure = gst_caps_get_structure (caps, 0);
+ caps_str = gst_caps_to_string (caps);
+
+ GST_DEBUG_OBJECT (aacparse, "setcaps: %s", caps_str);
+ g_free (caps_str);
+
+ /* This is needed at least in case of RTP
+ * Parses the codec_data information to get ObjectType,
+ * number of channels and samplerate */
+ value = gst_structure_get_value (structure, "codec_data");
+ if (value) {
+ GstBuffer *buf = gst_value_get_buffer (value);
+
+ if (buf && gst_buffer_get_size (buf) >= 2) {
+ GstMapInfo map;
+ GstBitReader br;
+
+ if (!gst_buffer_map (buf, &map, GST_MAP_READ))
+ return FALSE;
+ gst_bit_reader_init (&br, map.data, map.size);
+ gst_aac_parse_read_audio_specific_config (aacparse, &br,
+ &aacparse->object_type, &aacparse->sample_rate, &aacparse->channels,
+ &aacparse->frame_samples);
+
+ aacparse->header_type = DSPAAC_HEADER_NONE;
+ aacparse->mpegversion = 4;
+ gst_buffer_unmap (buf, &map);
+
+ GST_DEBUG ("codec_data: object_type=%d, sample_rate=%d, channels=%d, "
+ "samples=%d", aacparse->object_type, aacparse->sample_rate,
+ aacparse->channels, aacparse->frame_samples);
+
+ /* arrange for metadata and get out of the way */
+ gst_aac_parse_set_src_caps (aacparse, caps);
+ if (aacparse->header_type == aacparse->output_header_type)
+ gst_base_parse_set_passthrough (parse, TRUE);
+
+ /* input is already correctly framed */
+ gst_base_parse_set_min_frame_size (parse, RAW_MAX_SIZE);
+ } else {
+ return FALSE;
+ }
+
+ /* caps info overrides */
+ gst_structure_get_int (structure, "rate", &aacparse->sample_rate);
+ gst_structure_get_int (structure, "channels", &aacparse->channels);
+ } else {
+ const gchar *stream_format =
+ gst_structure_get_string (structure, "stream-format");
+
+ if (g_strcmp0 (stream_format, "raw") == 0) {
+ GST_ERROR_OBJECT (parse, "Need codec_data for raw AAC");
+ return FALSE;
+ } else {
+ aacparse->sample_rate = 0;
+ aacparse->channels = 0;
+ aacparse->header_type = DSPAAC_HEADER_NOT_PARSED;
+ gst_base_parse_set_passthrough (parse, FALSE);
+ }
+ }
+ return TRUE;
+}
+
+
+/**
+ * gst_aac_parse_adts_get_frame_len:
+ * @data: block of data containing an ADTS header.
+ *
+ * This function calculates ADTS frame length from the given header.
+ *
+ * Returns: size of the ADTS frame.
+ */
+static inline guint
+gst_aac_parse_adts_get_frame_len (const guint8 * data)
+{
+ return ((data[3] & 0x03) << 11) | (data[4] << 3) | ((data[5] & 0xe0) >> 5);
+}
+
+
+/**
+ * gst_aac_parse_check_adts_frame:
+ * @aacparse: #GstAacParse.
+ * @data: Data to be checked.
+ * @avail: Amount of data passed.
+ * @framesize: If valid ADTS frame was found, this will be set to tell the
+ * found frame size in bytes.
+ * @needed_data: If frame was not found, this may be set to tell how much
+ * more data is needed in the next round to detect the frame
+ * reliably. This may happen when a frame header candidate
+ * is found but it cannot be guaranteed to be the header without
+ * peeking the following data.
+ *
+ * Check if the given data contains contains ADTS frame. The algorithm
+ * will examine ADTS frame header and calculate the frame size. Also, another
+ * consecutive ADTS frame header need to be present after the found frame.
+ * Otherwise the data is not considered as a valid ADTS frame. However, this
+ * "extra check" is omitted when EOS has been received. In this case it is
+ * enough when data[0] contains a valid ADTS header.
+ *
+ * This function may set the #needed_data to indicate that a possible frame
+ * candidate has been found, but more data (#needed_data bytes) is needed to
+ * be absolutely sure. When this situation occurs, FALSE will be returned.
+ *
+ * When a valid frame is detected, this function will use
+ * gst_base_parse_set_min_frame_size() function from #GstBaseParse class
+ * to set the needed bytes for next frame.This way next data chunk is already
+ * of correct size.
+ *
+ * Returns: TRUE if the given data contains a valid ADTS header.
+ */
+static gboolean
+gst_aac_parse_check_adts_frame (GstAacParse * aacparse,
+ const guint8 * data, const guint avail, gboolean drain,
+ guint * framesize, guint * needed_data)
+{
+ guint crc_size;
+
+ *needed_data = 0;
+
+ /* Absolute minimum to perform the ADTS syncword,
+ layer and sampling frequency tests */
+ if (G_UNLIKELY (avail < 3)) {
+ *needed_data = 3;
+ return FALSE;
+ }
+
+ /* Syncword and layer tests */
+ if ((data[0] == 0xff) && ((data[1] & 0xf6) == 0xf0)) {
+
+ /* Sampling frequency test */
+ if (G_UNLIKELY ((data[2] & 0x3C) >> 2 == 15))
+ return FALSE;
+
+ /* This looks like an ADTS frame header but
+ we need at least 6 bytes to proceed */
+ if (G_UNLIKELY (avail < 6)) {
+ *needed_data = 6;
+ return FALSE;
+ }
+
+ *framesize = gst_aac_parse_adts_get_frame_len (data);
+
+ /* If frame has CRC, it needs 2 bytes
+ for it at the end of the header */
+ crc_size = (data[1] & 0x01) ? 0 : 2;
+
+ /* CRC size test */
+ if (*framesize < 7 + crc_size) {
+ *needed_data = 7 + crc_size;
+ return FALSE;
+ }
+
+ /* In EOS mode this is enough. No need to examine the data further.
+ We also relax the check when we have sync, on the assumption that
+ if we're not looking at random data, we have a much higher chance
+ to get the correct sync, and this avoids losing two frames when
+ a single bit corruption happens. */
+ if (drain || !GST_BASE_PARSE_LOST_SYNC (aacparse)) {
+ return TRUE;
+ }
+
+ if (*framesize + ADTS_MAX_SIZE > avail) {
+ /* We have found a possible frame header candidate, but can't be
+ sure since we don't have enough data to check the next frame */
+ GST_DEBUG ("NEED MORE DATA: we need %d, available %d",
+ *framesize + ADTS_MAX_SIZE, avail);
+ *needed_data = *framesize + ADTS_MAX_SIZE;
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse),
+ *framesize + ADTS_MAX_SIZE);
+ return FALSE;
+ }
+
+ if ((data[*framesize] == 0xff) && ((data[*framesize + 1] & 0xf6) == 0xf0)) {
+ guint nextlen = gst_aac_parse_adts_get_frame_len (data + (*framesize));
+
+ GST_LOG ("ADTS frame found, len: %d bytes", *framesize);
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse),
+ nextlen + ADTS_MAX_SIZE);
+ return TRUE;
+ }
+ }
+ return FALSE;
+}
+
+static gboolean
+gst_aac_parse_latm_get_value (GstAacParse * aacparse, GstBitReader * br,
+ guint32 * value)
+{
+ guint8 bytes, i, byte;
+
+ *value = 0;
+ if (!gst_bit_reader_get_bits_uint8 (br, &bytes, 2))
+ return FALSE;
+ for (i = 0; i <= bytes; ++i) {
+ *value <<= 8;
+ if (!gst_bit_reader_get_bits_uint8 (br, &byte, 8))
+ return FALSE;
+ *value += byte;
+ }
+ return TRUE;
+}
+
+static gboolean
+gst_aac_parse_get_audio_object_type (GstAacParse * aacparse, GstBitReader * br,
+ guint8 * audio_object_type)
+{
+ if (!gst_bit_reader_get_bits_uint8 (br, audio_object_type, 5))
+ return FALSE;
+ if (*audio_object_type == 31) {
+ if (!gst_bit_reader_get_bits_uint8 (br, audio_object_type, 6))
+ return FALSE;
+ *audio_object_type += 32;
+ }
+ GST_LOG_OBJECT (aacparse, "audio object type %u", *audio_object_type);
+ return TRUE;
+}
+
+static gboolean
+gst_aac_parse_get_audio_sample_rate (GstAacParse * aacparse, GstBitReader * br,
+ gint * sample_rate)
+{
+ guint8 sampling_frequency_index;
+ if (!gst_bit_reader_get_bits_uint8 (br, &sampling_frequency_index, 4))
+ return FALSE;
+ GST_LOG_OBJECT (aacparse, "sampling_frequency_index: %u",
+ sampling_frequency_index);
+ if (sampling_frequency_index == 0xf) {
+ guint32 sampling_rate;
+ if (!gst_bit_reader_get_bits_uint32 (br, &sampling_rate, 24))
+ return FALSE;
+ *sample_rate = sampling_rate;
+ } else {
+ *sample_rate = loas_sample_rate_table[sampling_frequency_index];
+ if (!*sample_rate)
+ return FALSE;
+ }
+ aacparse->last_parsed_sample_rate = *sample_rate;
+ return TRUE;
+}
+
+/* See table 1.13 in ISO/IEC 14496-3 */
+static gboolean
+gst_aac_parse_read_audio_specific_config (GstAacParse * aacparse,
+ GstBitReader * br, gint * object_type, gint * sample_rate, gint * channels,
+ gint * frame_samples)
+{
+ guint8 audio_object_type;
+ guint8 G_GNUC_UNUSED extension_audio_object_type;
+ guint8 channel_configuration, extension_channel_configuration;
+ gboolean G_GNUC_UNUSED sbr = FALSE, ps = FALSE;
+
+ if (!gst_aac_parse_get_audio_object_type (aacparse, br, &audio_object_type))
+ return FALSE;
+ if (object_type)
+ *object_type = audio_object_type;
+
+ if (!gst_aac_parse_get_audio_sample_rate (aacparse, br, sample_rate))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (br, &channel_configuration, 4))
+ return FALSE;
+ *channels = loas_channels_table[channel_configuration];
+ GST_LOG_OBJECT (aacparse, "channel_configuration: %d", channel_configuration);
+ if (!*channels)
+ return FALSE;
+
+ if (audio_object_type == 5 || audio_object_type == 29) {
+ extension_audio_object_type = 5;
+ sbr = TRUE;
+ if (audio_object_type == 29) {
+ ps = TRUE;
+ /* Parametric stereo. If we have a one-channel configuration, we can
+ * override it to stereo */
+ if (*channels == 1)
+ *channels = 2;
+ }
+
+ GST_LOG_OBJECT (aacparse,
+ "Audio object type 5 or 29, so rereading sampling rate (was %d)...",
+ *sample_rate);
+ if (!gst_aac_parse_get_audio_sample_rate (aacparse, br, sample_rate))
+ return FALSE;
+
+ if (!gst_aac_parse_get_audio_object_type (aacparse, br, &audio_object_type))
+ return FALSE;
+
+ if (audio_object_type == 22) {
+ /* extension channel configuration */
+ if (!gst_bit_reader_get_bits_uint8 (br, &extension_channel_configuration,
+ 4))
+ return FALSE;
+ GST_LOG_OBJECT (aacparse, "extension channel_configuration: %d",
+ extension_channel_configuration);
+ *channels = loas_channels_table[extension_channel_configuration];
+ if (!*channels)
+ return FALSE;
+ }
+ } else {
+ extension_audio_object_type = 0;
+ }
+
+ GST_INFO_OBJECT (aacparse, "Parsed AudioSpecificConfig: %d Hz, %d channels",
+ *sample_rate, *channels);
+
+ if (frame_samples && audio_object_type == 23) {
+ guint8 frame_flag;
+ /* Read the Decoder Configuration (GASpecificConfig) if present */
+ /* We only care about the first bit to know what the number of samples
+ * in a frame is */
+ if (!gst_bit_reader_get_bits_uint8 (br, &frame_flag, 1))
+ return FALSE;
+ *frame_samples = frame_flag ? 960 : 1024;
+ }
+
+ /* There's LOTS of stuff next, but we ignore it for now as we have
+ what we want (sample rate and number of channels */
+ GST_DEBUG_OBJECT (aacparse,
+ "Need more code to parse humongous LOAS data, currently ignored");
+ aacparse->last_parsed_channels = *channels;
+ return TRUE;
+}
+
+
+static gboolean
+gst_aac_parse_read_loas_config (GstAacParse * aacparse, const guint8 * data,
+ guint avail, gint * sample_rate, gint * channels, gint * version)
+{
+ GstBitReader br;
+ guint8 u8, v, vA;
+
+ /* No version in the bitstream, but the spec has LOAS in the MPEG-4 section */
+ if (version)
+ *version = 4;
+
+ gst_bit_reader_init (&br, data, avail);
+
+ /* skip sync word (11 bits) and size (13 bits) */
+ if (!gst_bit_reader_skip (&br, 11 + 13))
+ return FALSE;
+
+ /* First bit is "use last config" */
+ if (!gst_bit_reader_get_bits_uint8 (&br, &u8, 1))
+ return FALSE;
+ if (u8) {
+ GST_LOG_OBJECT (aacparse, "Frame uses previous config");
+ if (!aacparse->last_parsed_sample_rate || !aacparse->last_parsed_channels) {
+ GST_DEBUG_OBJECT (aacparse,
+ "No previous config to use. We'll look for more data.");
+ return FALSE;
+ }
+ *sample_rate = aacparse->last_parsed_sample_rate;
+ *channels = aacparse->last_parsed_channels;
+ return TRUE;
+ }
+
+ GST_DEBUG_OBJECT (aacparse, "Frame contains new config");
+
+ /* audioMuxVersion */
+ if (!gst_bit_reader_get_bits_uint8 (&br, &v, 1))
+ return FALSE;
+ if (v) {
+ /* audioMuxVersionA */
+ if (!gst_bit_reader_get_bits_uint8 (&br, &vA, 1))
+ return FALSE;
+ } else
+ vA = 0;
+
+ GST_LOG_OBJECT (aacparse, "v %d, vA %d", v, vA);
+ if (vA == 0) {
+ guint8 same_time, subframes, num_program, prog;
+ if (v == 1) {
+ guint32 value;
+ /* taraBufferFullness */
+ if (!gst_aac_parse_latm_get_value (aacparse, &br, &value))
+ return FALSE;
+ }
+ if (!gst_bit_reader_get_bits_uint8 (&br, &same_time, 1))
+ return FALSE;
+ if (!gst_bit_reader_get_bits_uint8 (&br, &subframes, 6))
+ return FALSE;
+ if (!gst_bit_reader_get_bits_uint8 (&br, &num_program, 4))
+ return FALSE;
+ GST_LOG_OBJECT (aacparse, "same_time %d, subframes %d, num_program %d",
+ same_time, subframes, num_program);
+
+ for (prog = 0; prog <= num_program; ++prog) {
+ guint8 num_layer, layer;
+ if (!gst_bit_reader_get_bits_uint8 (&br, &num_layer, 3))
+ return FALSE;
+ GST_LOG_OBJECT (aacparse, "Program %d: %d layers", prog, num_layer);
+
+ for (layer = 0; layer <= num_layer; ++layer) {
+ guint8 use_same_config;
+ if (prog == 0 && layer == 0) {
+ use_same_config = 0;
+ } else {
+ if (!gst_bit_reader_get_bits_uint8 (&br, &use_same_config, 1))
+ return FALSE;
+ }
+ if (!use_same_config) {
+ if (v == 0) {
+ if (!gst_aac_parse_read_audio_specific_config (aacparse, &br, NULL,
+ sample_rate, channels, NULL))
+ return FALSE;
+ } else {
+ guint32 asc_len;
+ if (!gst_aac_parse_latm_get_value (aacparse, &br, &asc_len))
+ return FALSE;
+ if (!gst_aac_parse_read_audio_specific_config (aacparse, &br, NULL,
+ sample_rate, channels, NULL))
+ return FALSE;
+ if (!gst_bit_reader_skip (&br, asc_len))
+ return FALSE;
+ }
+ }
+ }
+ }
+ GST_LOG_OBJECT (aacparse, "More data ignored");
+ } else {
+ GST_WARNING_OBJECT (aacparse, "Spec says \"TBD\"...");
+ return FALSE;
+ }
+ return TRUE;
+}
+
+/**
+ * gst_aac_parse_loas_get_frame_len:
+ * @data: block of data containing a LOAS header.
+ *
+ * This function calculates LOAS frame length from the given header.
+ *
+ * Returns: size of the LOAS frame.
+ */
+static inline guint
+gst_aac_parse_loas_get_frame_len (const guint8 * data)
+{
+ return (((data[1] & 0x1f) << 8) | data[2]) + 3;
+}
+
+
+/**
+ * gst_aac_parse_check_loas_frame:
+ * @aacparse: #GstAacParse.
+ * @data: Data to be checked.
+ * @avail: Amount of data passed.
+ * @framesize: If valid LOAS frame was found, this will be set to tell the
+ * found frame size in bytes.
+ * @needed_data: If frame was not found, this may be set to tell how much
+ * more data is needed in the next round to detect the frame
+ * reliably. This may happen when a frame header candidate
+ * is found but it cannot be guaranteed to be the header without
+ * peeking the following data.
+ *
+ * Check if the given data contains contains LOAS frame. The algorithm
+ * will examine LOAS frame header and calculate the frame size. Also, another
+ * consecutive LOAS frame header need to be present after the found frame.
+ * Otherwise the data is not considered as a valid LOAS frame. However, this
+ * "extra check" is omitted when EOS has been received. In this case it is
+ * enough when data[0] contains a valid LOAS header.
+ *
+ * This function may set the #needed_data to indicate that a possible frame
+ * candidate has been found, but more data (#needed_data bytes) is needed to
+ * be absolutely sure. When this situation occurs, FALSE will be returned.
+ *
+ * When a valid frame is detected, this function will use
+ * gst_base_parse_set_min_frame_size() function from #GstBaseParse class
+ * to set the needed bytes for next frame.This way next data chunk is already
+ * of correct size.
+ *
+ * LOAS can have three different formats, if I read the spec correctly. Only
+ * one of them is supported here, as the two samples I have use this one.
+ *
+ * Returns: TRUE if the given data contains a valid LOAS header.
+ */
+static gboolean
+gst_aac_parse_check_loas_frame (GstAacParse * aacparse,
+ const guint8 * data, const guint avail, gboolean drain,
+ guint * framesize, guint * needed_data)
+{
+ *needed_data = 0;
+
+ /* 3 byte header */
+ if (G_UNLIKELY (avail < 3)) {
+ *needed_data = 3;
+ return FALSE;
+ }
+
+ if ((data[0] == 0x56) && ((data[1] & 0xe0) == 0xe0)) {
+ *framesize = gst_aac_parse_loas_get_frame_len (data);
+ GST_DEBUG_OBJECT (aacparse, "Found possible %u byte LOAS frame",
+ *framesize);
+
+ /* In EOS mode this is enough. No need to examine the data further.
+ We also relax the check when we have sync, on the assumption that
+ if we're not looking at random data, we have a much higher chance
+ to get the correct sync, and this avoids losing two frames when
+ a single bit corruption happens. */
+ if (drain || !GST_BASE_PARSE_LOST_SYNC (aacparse)) {
+ return TRUE;
+ }
+
+ if (*framesize + LOAS_MAX_SIZE > avail) {
+ /* We have found a possible frame header candidate, but can't be
+ sure since we don't have enough data to check the next frame */
+ GST_DEBUG ("NEED MORE DATA: we need %d, available %d",
+ *framesize + LOAS_MAX_SIZE, avail);
+ *needed_data = *framesize + LOAS_MAX_SIZE;
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse),
+ *framesize + LOAS_MAX_SIZE);
+ return FALSE;
+ }
+
+ if ((data[*framesize] == 0x56) && ((data[*framesize + 1] & 0xe0) == 0xe0)) {
+ guint nextlen = gst_aac_parse_loas_get_frame_len (data + (*framesize));
+
+ GST_LOG ("LOAS frame found, len: %d bytes", *framesize);
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse),
+ nextlen + LOAS_MAX_SIZE);
+ return TRUE;
+ } else {
+ GST_DEBUG_OBJECT (aacparse, "That was a false positive");
+ }
+ }
+ return FALSE;
+}
+
+/* caller ensure sufficient data */
+static inline void
+gst_aac_parse_parse_adts_header (GstAacParse * aacparse, const guint8 * data,
+ gint * rate, gint * channels, gint * object, gint * version)
+{
+
+ if (rate) {
+ gint sr_idx = (data[2] & 0x3c) >> 2;
+
+ *rate = gst_codec_utils_aac_get_sample_rate_from_index (sr_idx);
+ }
+ if (channels) {
+ *channels = ((data[2] & 0x01) << 2) | ((data[3] & 0xc0) >> 6);
+ if (*channels == 7)
+ *channels = 8;
+ }
+
+ if (version)
+ *version = (data[1] & 0x08) ? 2 : 4;
+ if (object)
+ *object = ((data[2] & 0xc0) >> 6) + 1;
+}
+
+/**
+ * gst_aac_parse_detect_stream:
+ * @aacparse: #GstAacParse.
+ * @data: A block of data that needs to be examined for stream characteristics.
+ * @avail: Size of the given datablock.
+ * @framesize: If valid stream was found, this will be set to tell the
+ * first frame size in bytes.
+ * @skipsize: If valid stream was found, this will be set to tell the first
+ * audio frame position within the given data.
+ *
+ * Examines the given piece of data and try to detect the format of it. It
+ * checks for "ADIF" header (in the beginning of the clip) and ADTS frame
+ * header. If the stream is detected, TRUE will be returned and #framesize
+ * is set to indicate the found frame size. Additionally, #skipsize might
+ * be set to indicate the number of bytes that need to be skipped, a.k.a. the
+ * position of the frame inside given data chunk.
+ *
+ * Returns: TRUE on success.
+ */
+static gboolean
+gst_aac_parse_detect_stream (GstAacParse * aacparse,
+ const guint8 * data, const guint avail, gboolean drain,
+ guint * framesize, gint * skipsize)
+{
+ gboolean found = FALSE;
+ guint need_data_adts = 0, need_data_loas;
+ guint i = 0;
+
+ GST_DEBUG_OBJECT (aacparse, "Parsing header data");
+
+ /* FIXME: No need to check for ADIF if we are not in the beginning of the
+ stream */
+
+ /* Can we even parse the header? */
+ if (avail < MAX (ADTS_MAX_SIZE, LOAS_MAX_SIZE)) {
+ GST_DEBUG_OBJECT (aacparse, "Not enough data to check");
+ return FALSE;
+ }
+
+ for (i = 0; i < avail - 4; i++) {
+ if (((data[i] == 0xff) && ((data[i + 1] & 0xf6) == 0xf0)) ||
+ ((data[i] == 0x56) && ((data[i + 1] & 0xe0) == 0xe0)) ||
+ strncmp ((char *) data + i, "ADIF", 4) == 0) {
+ GST_DEBUG_OBJECT (aacparse, "Found signature at offset %u", i);
+ found = TRUE;
+
+ if (i) {
+ /* Trick: tell the parent class that we didn't find the frame yet,
+ but make it skip 'i' amount of bytes. Next time we arrive
+ here we have full frame in the beginning of the data. */
+ *skipsize = i;
+ return FALSE;
+ }
+ break;
+ }
+ }
+ if (!found) {
+ if (i)
+ *skipsize = i;
+ return FALSE;
+ }
+
+ if (gst_aac_parse_check_adts_frame (aacparse, data, avail, drain,
+ framesize, &need_data_adts)) {
+ gint rate, channels;
+
+ GST_INFO ("ADTS ID: %d, framesize: %d", (data[1] & 0x08) >> 3, *framesize);
+
+ gst_aac_parse_parse_adts_header (aacparse, data, &rate, &channels,
+ &aacparse->object_type, &aacparse->mpegversion);
+
+ if (!channels || !framesize) {
+ GST_DEBUG_OBJECT (aacparse, "impossible ADTS configuration");
+ return FALSE;
+ }
+
+ aacparse->header_type = DSPAAC_HEADER_ADTS;
+ gst_base_parse_set_frame_rate (GST_BASE_PARSE (aacparse), rate,
+ aacparse->frame_samples, 2, 2);
+
+ GST_DEBUG ("ADTS: samplerate %d, channels %d, objtype %d, version %d",
+ rate, channels, aacparse->object_type, aacparse->mpegversion);
+
+ gst_base_parse_set_syncable (GST_BASE_PARSE (aacparse), TRUE);
+
+ return TRUE;
+ }
+
+ if (gst_aac_parse_check_loas_frame (aacparse, data, avail, drain,
+ framesize, &need_data_loas)) {
+ gint rate = 0, channels = 0;
+
+ GST_INFO ("LOAS, framesize: %d", *framesize);
+
+ aacparse->header_type = DSPAAC_HEADER_LOAS;
+
+ if (!gst_aac_parse_read_loas_config (aacparse, data, avail, &rate,
+ &channels, &aacparse->mpegversion)) {
+ /* This is pretty normal when skipping data at the start of
+ * random stream (MPEG-TS capture for example) */
+ GST_LOG_OBJECT (aacparse, "Error reading LOAS config");
+ return FALSE;
+ }
+
+ if (rate && channels) {
+ gst_base_parse_set_frame_rate (GST_BASE_PARSE (aacparse), rate,
+ aacparse->frame_samples, 2, 2);
+
+ /* Don't store the sample rate and channels yet -
+ * this is just format detection. */
+ GST_DEBUG ("LOAS: samplerate %d, channels %d, objtype %d, version %d",
+ rate, channels, aacparse->object_type, aacparse->mpegversion);
+ }
+
+ gst_base_parse_set_syncable (GST_BASE_PARSE (aacparse), TRUE);
+
+ return TRUE;
+ }
+
+ if (need_data_adts || need_data_loas) {
+ /* This tells the parent class not to skip any data */
+ *skipsize = 0;
+ return FALSE;
+ }
+
+ if (avail < ADIF_MAX_SIZE)
+ return FALSE;
+
+ if (memcmp (data + i, "ADIF", 4) == 0) {
+ const guint8 *adif;
+ int skip_size = 0;
+ int bitstream_type;
+ int sr_idx;
+ GstCaps *sinkcaps;
+
+ aacparse->header_type = DSPAAC_HEADER_ADIF;
+ aacparse->mpegversion = 4;
+
+ /* Skip the "ADIF" bytes */
+ adif = data + i + 4;
+
+ /* copyright string */
+ if (adif[0] & 0x80)
+ skip_size += 9; /* skip 9 bytes */
+
+ bitstream_type = adif[0 + skip_size] & 0x10;
+ aacparse->bitrate =
+ ((unsigned int) (adif[0 + skip_size] & 0x0f) << 19) |
+ ((unsigned int) adif[1 + skip_size] << 11) |
+ ((unsigned int) adif[2 + skip_size] << 3) |
+ ((unsigned int) adif[3 + skip_size] & 0xe0);
+
+ /* CBR */
+ if (bitstream_type == 0) {
+#if 0
+ /* Buffer fullness parsing. Currently not needed... */
+ guint num_elems = 0;
+ guint fullness = 0;
+
+ num_elems = (adif[3 + skip_size] & 0x1e);
+ GST_INFO ("ADIF num_config_elems: %d", num_elems);
+
+ fullness = ((unsigned int) (adif[3 + skip_size] & 0x01) << 19) |
+ ((unsigned int) adif[4 + skip_size] << 11) |
+ ((unsigned int) adif[5 + skip_size] << 3) |
+ ((unsigned int) (adif[6 + skip_size] & 0xe0) >> 5);
+
+ GST_INFO ("ADIF buffer fullness: %d", fullness);
+#endif
+ aacparse->object_type = ((adif[6 + skip_size] & 0x01) << 1) |
+ ((adif[7 + skip_size] & 0x80) >> 7);
+ sr_idx = (adif[7 + skip_size] & 0x78) >> 3;
+ }
+ /* VBR */
+ else {
+ aacparse->object_type = (adif[4 + skip_size] & 0x18) >> 3;
+ sr_idx = ((adif[4 + skip_size] & 0x07) << 1) |
+ ((adif[5 + skip_size] & 0x80) >> 7);
+ }
+
+ /* FIXME: This gives totally wrong results. Duration calculation cannot
+ be based on this */
+ aacparse->sample_rate =
+ gst_codec_utils_aac_get_sample_rate_from_index (sr_idx);
+
+ /* baseparse is not given any fps,
+ * so it will give up on timestamps, seeking, etc */
+
+ /* FIXME: Can we assume this? */
+ aacparse->channels = 2;
+
+ GST_INFO ("ADIF: br=%d, samplerate=%d, objtype=%d",
+ aacparse->bitrate, aacparse->sample_rate, aacparse->object_type);
+
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse), 512);
+
+ /* arrange for metadata and get out of the way */
+ sinkcaps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (aacparse));
+ gst_aac_parse_set_src_caps (aacparse, sinkcaps);
+ if (sinkcaps)
+ gst_caps_unref (sinkcaps);
+
+ /* not syncable, not easily seekable (unless we push data from start */
+ gst_base_parse_set_syncable (GST_BASE_PARSE_CAST (aacparse), FALSE);
+ gst_base_parse_set_passthrough (GST_BASE_PARSE_CAST (aacparse), TRUE);
+ gst_base_parse_set_average_bitrate (GST_BASE_PARSE_CAST (aacparse), 0);
+
+ *framesize = avail;
+ return TRUE;
+ }
+
+ /* This should never happen */
+ return FALSE;
+}
+
+/**
+ * gst_aac_parse_get_audio_profile_object_type
+ * @aacparse: #GstAacParse.
+ *
+ * Gets the MPEG-2 profile or the MPEG-4 object type value corresponding to the
+ * mpegversion and profile of @aacparse's src pad caps, according to the
+ * values defined by table 1.A.11 in ISO/IEC 14496-3.
+ *
+ * Returns: the profile or object type value corresponding to @aacparse's src
+ * pad caps, if such a value exists; otherwise G_MAXUINT8.
+ */
+static guint8
+gst_aac_parse_get_audio_profile_object_type (GstAacParse * aacparse)
+{
+ GstCaps *srccaps;
+ GstStructure *srcstruct;
+ const gchar *profile;
+ guint8 ret;
+
+ srccaps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (aacparse));
+ if (G_UNLIKELY (srccaps == NULL)) {
+ return G_MAXUINT8;
+ }
+
+ srcstruct = gst_caps_get_structure (srccaps, 0);
+ profile = gst_structure_get_string (srcstruct, "profile");
+ if (G_UNLIKELY (profile == NULL)) {
+ gst_caps_unref (srccaps);
+ return G_MAXUINT8;
+ }
+
+ if (g_strcmp0 (profile, "main") == 0) {
+ ret = (guint8) 0U;
+ } else if (g_strcmp0 (profile, "lc") == 0) {
+ ret = (guint8) 1U;
+ } else if (g_strcmp0 (profile, "ssr") == 0) {
+ ret = (guint8) 2U;
+ } else if (g_strcmp0 (profile, "ltp") == 0) {
+ if (G_LIKELY (aacparse->mpegversion == 4))
+ ret = (guint8) 3U;
+ else
+ ret = G_MAXUINT8; /* LTP Object Type allowed only for MPEG-4 */
+ } else {
+ ret = G_MAXUINT8;
+ }
+
+ gst_caps_unref (srccaps);
+ return ret;
+}
+
+/**
+ * gst_aac_parse_get_audio_channel_configuration
+ * @num_channels: number of audio channels.
+ *
+ * Gets the Channel Configuration value, as defined by table 1.19 in ISO/IEC
+ * 14496-3, for a given number of audio channels.
+ *
+ * Returns: the Channel Configuration value corresponding to @num_channels, if
+ * such a value exists; otherwise G_MAXUINT8.
+ */
+static guint8
+gst_aac_parse_get_audio_channel_configuration (gint num_channels)
+{
+ if (num_channels >= 1 && num_channels <= 6) /* Mono up to & including 5.1 */
+ return (guint8) num_channels;
+ else if (num_channels == 8) /* 7.1 */
+ return (guint8) 7U;
+ else
+ return G_MAXUINT8;
+
+ /* FIXME: Add support for configurations 11, 12 and 14 from
+ * ISO/IEC 14496-3:2009/PDAM 4 based on the actual channel layout
+ */
+}
+
+/**
+ * gst_aac_parse_get_audio_sampling_frequency_index:
+ * @sample_rate: audio sampling rate.
+ *
+ * Gets the Sampling Frequency Index value, as defined by table 1.18 in ISO/IEC
+ * 14496-3, for a given sampling rate.
+ *
+ * Returns: the Sampling Frequency Index value corresponding to @sample_rate,
+ * if such a value exists; otherwise G_MAXUINT8.
+ */
+static guint8
+gst_aac_parse_get_audio_sampling_frequency_index (gint sample_rate)
+{
+ switch (sample_rate) {
+ case 96000:
+ return 0x0U;
+ case 88200:
+ return 0x1U;
+ case 64000:
+ return 0x2U;
+ case 48000:
+ return 0x3U;
+ case 44100:
+ return 0x4U;
+ case 32000:
+ return 0x5U;
+ case 24000:
+ return 0x6U;
+ case 22050:
+ return 0x7U;
+ case 16000:
+ return 0x8U;
+ case 12000:
+ return 0x9U;
+ case 11025:
+ return 0xAU;
+ case 8000:
+ return 0xBU;
+ case 7350:
+ return 0xCU;
+ default:
+ return G_MAXUINT8;
+ }
+}
+
+/**
+ * gst_aac_parse_prepend_adts_headers:
+ * @aacparse: #GstAacParse.
+ * @frame: raw AAC frame to which ADTS headers shall be prepended.
+ *
+ * Prepends ADTS headers to a raw AAC audio frame.
+ *
+ * Returns: TRUE if ADTS headers were successfully prepended; FALSE otherwise.
+ */
+static gboolean
+gst_aac_parse_prepend_adts_headers (GstAacParse * aacparse,
+ GstBaseParseFrame * frame)
+{
+ GstMemory *mem;
+ guint8 *adts_headers;
+ gsize buf_size;
+ gsize frame_size;
+ guint8 id, profile, channel_configuration, sampling_frequency_index;
+
+ id = (aacparse->mpegversion == 4) ? 0x0U : 0x1U;
+ profile = gst_aac_parse_get_audio_profile_object_type (aacparse);
+ if (profile == G_MAXUINT8) {
+ GST_ERROR_OBJECT (aacparse, "Unsupported audio profile or object type");
+ return FALSE;
+ }
+ channel_configuration =
+ gst_aac_parse_get_audio_channel_configuration (aacparse->channels);
+ if (channel_configuration == G_MAXUINT8) {
+ GST_ERROR_OBJECT (aacparse, "Unsupported number of channels");
+ return FALSE;
+ }
+ sampling_frequency_index =
+ gst_aac_parse_get_audio_sampling_frequency_index (aacparse->sample_rate);
+ if (sampling_frequency_index == G_MAXUINT8) {
+ GST_ERROR_OBJECT (aacparse, "Unsupported sampling frequency");
+ return FALSE;
+ }
+
+ frame->out_buffer = gst_buffer_copy (frame->buffer);
+ buf_size = gst_buffer_get_size (frame->out_buffer);
+ frame_size = buf_size + ADTS_HEADERS_LENGTH;
+
+ if (G_UNLIKELY (frame_size >= 0x4000)) {
+ GST_ERROR_OBJECT (aacparse, "Frame size is too big for ADTS");
+ return FALSE;
+ }
+
+ adts_headers = (guint8 *) g_malloc0 (ADTS_HEADERS_LENGTH);
+
+ /* Note: no error correction bits are added to the resulting ADTS frames */
+ adts_headers[0] = 0xFFU;
+ adts_headers[1] = 0xF0U | (id << 3) | 0x1U;
+ adts_headers[2] = (profile << 6) | (sampling_frequency_index << 2) | 0x2U |
+ ((channel_configuration & 0x4U) >> 2);
+ adts_headers[3] = ((channel_configuration & 0x3U) << 6) | 0x30U |
+ (guint8) (frame_size >> 11);
+ adts_headers[4] = (guint8) ((frame_size >> 3) & 0x00FF);
+ adts_headers[5] = (guint8) (((frame_size & 0x0007) << 5) + 0x1FU);
+ adts_headers[6] = 0xFCU;
+
+ mem = gst_memory_new_wrapped (0, adts_headers, ADTS_HEADERS_LENGTH, 0,
+ ADTS_HEADERS_LENGTH, adts_headers, g_free);
+ gst_buffer_prepend_memory (frame->out_buffer, mem);
+
+ return TRUE;
+}
+
+/**
+ * gst_aac_parse_check_valid_frame:
+ * @parse: #GstBaseParse.
+ * @frame: #GstBaseParseFrame.
+ * @skipsize: How much data parent class should skip in order to find the
+ * frame header.
+ *
+ * Implementation of "handle_frame" vmethod in #GstBaseParse class.
+ *
+ * Also determines frame overhead.
+ * ADTS streams have a 7 byte header in each frame. MP4 and ADIF streams don't have
+ * a per-frame header. LOAS has 3 bytes.
+ *
+ * We're making a couple of simplifying assumptions:
+ *
+ * 1. We count Program Configuration Elements rather than searching for them
+ * in the streams to discount them - the overhead is negligible.
+ *
+ * 2. We ignore CRC. This has a worst-case impact of (num_raw_blocks + 1)*16
+ * bits, which should still not be significant enough to warrant the
+ * additional parsing through the headers
+ *
+ * Returns: a #GstFlowReturn.
+ */
+static GstFlowReturn
+gst_aac_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize)
+{
+ GstMapInfo map;
+ GstAacParse *aacparse;
+ gboolean ret = FALSE;
+ gboolean lost_sync;
+ GstBuffer *buffer;
+ guint framesize;
+ gint rate = 0, channels = 0;
+
+ aacparse = GST_AAC_PARSE (parse);
+ buffer = frame->buffer;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ *skipsize = -1;
+ lost_sync = GST_BASE_PARSE_LOST_SYNC (parse);
+
+ if (aacparse->header_type == DSPAAC_HEADER_ADIF ||
+ aacparse->header_type == DSPAAC_HEADER_NONE) {
+ /* There is nothing to parse */
+ framesize = map.size;
+ ret = TRUE;
+
+ } else if (aacparse->header_type == DSPAAC_HEADER_NOT_PARSED || lost_sync) {
+
+ ret = gst_aac_parse_detect_stream (aacparse, map.data, map.size,
+ GST_BASE_PARSE_DRAINING (parse), &framesize, skipsize);
+
+ } else if (aacparse->header_type == DSPAAC_HEADER_ADTS) {
+ guint needed_data = 1024;
+
+ ret = gst_aac_parse_check_adts_frame (aacparse, map.data, map.size,
+ GST_BASE_PARSE_DRAINING (parse), &framesize, &needed_data);
+
+ if (!ret && needed_data) {
+ GST_DEBUG ("buffer didn't contain valid frame");
+ *skipsize = 0;
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse),
+ needed_data);
+ }
+
+ } else if (aacparse->header_type == DSPAAC_HEADER_LOAS) {
+ guint needed_data = 1024;
+
+ ret = gst_aac_parse_check_loas_frame (aacparse, map.data,
+ map.size, GST_BASE_PARSE_DRAINING (parse), &framesize, &needed_data);
+
+ if (!ret && needed_data) {
+ GST_DEBUG ("buffer didn't contain valid frame");
+ *skipsize = 0;
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse),
+ needed_data);
+ }
+
+ } else {
+ GST_DEBUG ("buffer didn't contain valid frame");
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse),
+ ADTS_MAX_SIZE);
+ }
+
+ if (G_UNLIKELY (!ret))
+ goto exit;
+
+ if (aacparse->header_type == DSPAAC_HEADER_ADTS) {
+ /* see above */
+ frame->overhead = 7;
+
+ gst_aac_parse_parse_adts_header (aacparse, map.data,
+ &rate, &channels, NULL, NULL);
+
+ GST_LOG_OBJECT (aacparse, "rate: %d, chans: %d", rate, channels);
+
+ if (G_UNLIKELY (rate != aacparse->sample_rate
+ || channels != aacparse->channels)) {
+ aacparse->sample_rate = rate;
+ aacparse->channels = channels;
+
+ if (!gst_aac_parse_set_src_caps (aacparse, NULL)) {
+ /* If linking fails, we need to return appropriate error */
+ ret = GST_FLOW_NOT_LINKED;
+ }
+
+ gst_base_parse_set_frame_rate (GST_BASE_PARSE (aacparse),
+ aacparse->sample_rate, aacparse->frame_samples, 2, 2);
+ }
+ } else if (aacparse->header_type == DSPAAC_HEADER_LOAS) {
+ gboolean setcaps = FALSE;
+
+ /* see above */
+ frame->overhead = 3;
+
+ if (!gst_aac_parse_read_loas_config (aacparse, map.data, map.size, &rate,
+ &channels, NULL) || !rate || !channels) {
+ /* This is pretty normal when skipping data at the start of
+ * random stream (MPEG-TS capture for example) */
+ GST_DEBUG_OBJECT (aacparse, "Error reading LOAS config. Skipping.");
+ /* Since we don't fully parse the LOAS config, we don't know for sure
+ * how much to skip. Just skip 1 to end up to the next marker and
+ * resume parsing from there */
+ *skipsize = 1;
+ goto exit;
+ }
+
+ if (G_UNLIKELY (rate != aacparse->sample_rate
+ || channels != aacparse->channels)) {
+ aacparse->sample_rate = rate;
+ aacparse->channels = channels;
+ setcaps = TRUE;
+ GST_INFO_OBJECT (aacparse, "New LOAS config: %d Hz, %d channels", rate,
+ channels);
+ }
+
+ /* We want to set caps both at start, and when rate/channels change.
+ Since only some LOAS frames have that info, we may receive frames
+ before knowing about rate/channels. */
+ if (setcaps
+ || !gst_pad_has_current_caps (GST_BASE_PARSE_SRC_PAD (aacparse))) {
+ if (!gst_aac_parse_set_src_caps (aacparse, NULL)) {
+ /* If linking fails, we need to return appropriate error */
+ ret = GST_FLOW_NOT_LINKED;
+ }
+
+ gst_base_parse_set_frame_rate (GST_BASE_PARSE (aacparse),
+ aacparse->sample_rate, aacparse->frame_samples, 2, 2);
+ }
+ }
+
+ if (aacparse->header_type == DSPAAC_HEADER_NONE
+ && aacparse->output_header_type == DSPAAC_HEADER_ADTS) {
+ if (!gst_aac_parse_prepend_adts_headers (aacparse, frame)) {
+ GST_ERROR_OBJECT (aacparse, "Failed to prepend ADTS headers to frame");
+ ret = GST_FLOW_ERROR;
+ }
+ }
+
+exit:
+ gst_buffer_unmap (buffer, &map);
+
+ if (ret) {
+ /* found, skip if needed */
+ if (*skipsize > 0)
+ return GST_FLOW_OK;
+ *skipsize = 0;
+ } else {
+ if (*skipsize < 0)
+ *skipsize = 1;
+ }
+
+ if (ret && framesize <= map.size) {
+ return gst_base_parse_finish_frame (parse, frame, framesize);
+ }
+
+ return GST_FLOW_OK;
+}
+
+static GstFlowReturn
+gst_aac_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
+{
+ GstAacParse *aacparse = GST_AAC_PARSE (parse);
+
+ if (!aacparse->sent_codec_tag) {
+ GstTagList *taglist;
+ GstCaps *caps;
+
+ /* codec tag */
+ caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (parse));
+ if (caps == NULL) {
+ if (GST_PAD_IS_FLUSHING (GST_BASE_PARSE_SRC_PAD (parse))) {
+ GST_INFO_OBJECT (parse, "Src pad is flushing");
+ return GST_FLOW_FLUSHING;
+ } else {
+ GST_INFO_OBJECT (parse, "Src pad is not negotiated!");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+ }
+
+ taglist = gst_tag_list_new_empty ();
+ gst_pb_utils_add_codec_description_to_tag_list (taglist,
+ GST_TAG_AUDIO_CODEC, caps);
+ gst_caps_unref (caps);
+
+ gst_base_parse_merge_tags (parse, taglist, GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (taglist);
+
+ /* also signals the end of first-frame processing */
+ aacparse->sent_codec_tag = TRUE;
+ }
+
+ /* As a special case, we can remove the ADTS framing and output raw AAC. */
+ if (aacparse->header_type == DSPAAC_HEADER_ADTS
+ && aacparse->output_header_type == DSPAAC_HEADER_NONE) {
+ guint header_size;
+ GstMapInfo map;
+ frame->out_buffer = gst_buffer_make_writable (frame->buffer);
+ frame->buffer = NULL;
+ gst_buffer_map (frame->out_buffer, &map, GST_MAP_READ);
+ header_size = (map.data[1] & 1) ? 7 : 9; /* optional CRC */
+ gst_buffer_unmap (frame->out_buffer, &map);
+ gst_buffer_resize (frame->out_buffer, header_size,
+ gst_buffer_get_size (frame->out_buffer) - header_size);
+ }
+
+ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_CLIP;
+
+ return GST_FLOW_OK;
+}
+
+
+/**
+ * gst_aac_parse_start:
+ * @parse: #GstBaseParse.
+ *
+ * Implementation of "start" vmethod in #GstBaseParse class.
+ *
+ * Returns: TRUE if startup succeeded.
+ */
+static gboolean
+gst_aac_parse_start (GstBaseParse * parse)
+{
+ GstAacParse *aacparse;
+
+ aacparse = GST_AAC_PARSE (parse);
+ GST_DEBUG ("start");
+ aacparse->frame_samples = 1024;
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (aacparse), ADTS_MAX_SIZE);
+ aacparse->sent_codec_tag = FALSE;
+ aacparse->last_parsed_channels = 0;
+ aacparse->last_parsed_sample_rate = 0;
+ aacparse->object_type = 0;
+ aacparse->bitrate = 0;
+ aacparse->header_type = DSPAAC_HEADER_NOT_PARSED;
+ aacparse->output_header_type = DSPAAC_HEADER_NOT_PARSED;
+ aacparse->channels = 0;
+ aacparse->sample_rate = 0;
+ return TRUE;
+}
+
+
+/**
+ * gst_aac_parse_stop:
+ * @parse: #GstBaseParse.
+ *
+ * Implementation of "stop" vmethod in #GstBaseParse class.
+ *
+ * Returns: TRUE is stopping succeeded.
+ */
+static gboolean
+gst_aac_parse_stop (GstBaseParse * parse)
+{
+ GST_DEBUG ("stop");
+ return TRUE;
+}
+
+static void
+remove_fields (GstCaps * caps)
+{
+ guint i, n;
+
+ n = gst_caps_get_size (caps);
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (caps, i);
+
+ gst_structure_remove_field (s, "framed");
+ }
+}
+
+static void
+add_conversion_fields (GstCaps * caps)
+{
+ guint i, n;
+
+ n = gst_caps_get_size (caps);
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (caps, i);
+
+ if (gst_structure_has_field (s, "stream-format")) {
+ const GValue *v = gst_structure_get_value (s, "stream-format");
+
+ if (G_VALUE_HOLDS_STRING (v)) {
+ const gchar *str = g_value_get_string (v);
+
+ if (strcmp (str, "adts") == 0 || strcmp (str, "raw") == 0) {
+ GValue va = G_VALUE_INIT;
+ GValue vs = G_VALUE_INIT;
+
+ g_value_init (&va, GST_TYPE_LIST);
+ g_value_init (&vs, G_TYPE_STRING);
+ g_value_set_string (&vs, "adts");
+ gst_value_list_append_value (&va, &vs);
+ g_value_set_string (&vs, "raw");
+ gst_value_list_append_value (&va, &vs);
+ gst_structure_set_value (s, "stream-format", &va);
+ g_value_unset (&va);
+ g_value_unset (&vs);
+ }
+ } else if (GST_VALUE_HOLDS_LIST (v)) {
+ gboolean contains_raw = FALSE;
+ gboolean contains_adts = FALSE;
+ guint m = gst_value_list_get_size (v), j;
+
+ for (j = 0; j < m; j++) {
+ const GValue *ve = gst_value_list_get_value (v, j);
+ const gchar *str;
+
+ if (G_VALUE_HOLDS_STRING (ve) && (str = g_value_get_string (ve))) {
+ if (strcmp (str, "adts") == 0)
+ contains_adts = TRUE;
+ else if (strcmp (str, "raw") == 0)
+ contains_raw = TRUE;
+ }
+ }
+
+ if (contains_adts || contains_raw) {
+ GValue va = G_VALUE_INIT;
+ GValue vs = G_VALUE_INIT;
+
+ g_value_init (&va, GST_TYPE_LIST);
+ g_value_init (&vs, G_TYPE_STRING);
+ g_value_copy (v, &va);
+
+ if (!contains_raw) {
+ g_value_set_string (&vs, "raw");
+ gst_value_list_append_value (&va, &vs);
+ }
+ if (!contains_adts) {
+ g_value_set_string (&vs, "adts");
+ gst_value_list_append_value (&va, &vs);
+ }
+
+ gst_structure_set_value (s, "stream-format", &va);
+
+ g_value_unset (&vs);
+ g_value_unset (&va);
+ }
+ }
+ }
+ }
+}
+
+static GstCaps *
+gst_aac_parse_sink_getcaps (GstBaseParse * parse, GstCaps * filter)
+{
+ GstCaps *peercaps, *templ;
+ GstCaps *res;
+
+ templ = gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD (parse));
+
+ if (filter) {
+ GstCaps *fcopy = gst_caps_copy (filter);
+ /* Remove the fields we convert */
+ remove_fields (fcopy);
+ add_conversion_fields (fcopy);
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), fcopy);
+ gst_caps_unref (fcopy);
+ } else
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), NULL);
+
+ if (peercaps) {
+ peercaps = gst_caps_make_writable (peercaps);
+ /* Remove the fields we convert */
+ remove_fields (peercaps);
+ add_conversion_fields (peercaps);
+
+ res = gst_caps_intersect_full (peercaps, templ, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (peercaps);
+ gst_caps_unref (templ);
+ } else {
+ res = templ;
+ }
+
+ if (filter) {
+ GstCaps *intersection;
+
+ intersection =
+ gst_caps_intersect_full (filter, res, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (res);
+ res = intersection;
+ }
+
+ return res;
+}
+
+static gboolean
+gst_aac_parse_src_event (GstBaseParse * parse, GstEvent * event)
+{
+ GstAacParse *aacparse = GST_AAC_PARSE (parse);
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) {
+ aacparse->last_parsed_channels = 0;
+ aacparse->last_parsed_sample_rate = 0;
+ }
+
+ return GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
+}
diff --git a/gst/audioparsers/gstaacparse.h b/gst/audioparsers/gstaacparse.h
new file mode 100644
index 0000000000..40c96ff1f8
--- /dev/null
+++ b/gst/audioparsers/gstaacparse.h
@@ -0,0 +1,105 @@
+/* GStreamer AAC parser
+ * Copyright (C) 2008 Nokia Corporation. All rights reserved.
+ *
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AAC_PARSE_H__
+#define __GST_AAC_PARSE_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbaseparse.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AAC_PARSE \
+ (gst_aac_parse_get_type())
+#define GST_AAC_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_AAC_PARSE, GstAacParse))
+#define GST_AAC_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_AAC_PARSE, GstAacParseClass))
+#define GST_IS_AAC_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_AAC_PARSE))
+#define GST_IS_AAC_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_AAC_PARSE))
+
+
+/**
+ * GstAacHeaderType:
+ * @DSPAAC_HEADER_NOT_PARSED: Header not parsed yet.
+ * @DSPAAC_HEADER_UNKNOWN: Unknown (not recognized) header.
+ * @DSPAAC_HEADER_ADIF: ADIF header found.
+ * @DSPAAC_HEADER_ADTS: ADTS header found.
+ * @DSPAAC_HEADER_LOAS: LOAS header found.
+ * @DSPAAC_HEADER_NONE: Raw stream, no header.
+ *
+ * Type header enumeration set in #header_type.
+ */
+typedef enum {
+ DSPAAC_HEADER_NOT_PARSED,
+ DSPAAC_HEADER_UNKNOWN,
+ DSPAAC_HEADER_ADIF,
+ DSPAAC_HEADER_ADTS,
+ DSPAAC_HEADER_LOAS,
+ DSPAAC_HEADER_NONE
+} GstAacHeaderType;
+
+
+typedef struct _GstAacParse GstAacParse;
+typedef struct _GstAacParseClass GstAacParseClass;
+
+/**
+ * GstAacParse:
+ *
+ * The opaque GstAacParse data structure.
+ */
+struct _GstAacParse {
+ GstBaseParse element;
+
+ /* Stream type -related info */
+ gint object_type;
+ gint bitrate;
+ gint sample_rate;
+ gint channels;
+ gint mpegversion;
+ gint frame_samples;
+
+ GstAacHeaderType header_type;
+ GstAacHeaderType output_header_type;
+
+ gboolean sent_codec_tag;
+
+ gint last_parsed_sample_rate;
+ gint last_parsed_channels;
+};
+
+/**
+ * GstAacParseClass:
+ * @parent_class: Element parent class.
+ *
+ * The opaque GstAacParseClass data structure.
+ */
+struct _GstAacParseClass {
+ GstBaseParseClass parent_class;
+};
+
+GType gst_aac_parse_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_AAC_PARSE_H__ */
diff --git a/gst/audioparsers/gstac3parse.c b/gst/audioparsers/gstac3parse.c
new file mode 100644
index 0000000000..64b3ca3c1e
--- /dev/null
+++ b/gst/audioparsers/gstac3parse.c
@@ -0,0 +1,958 @@
+/* GStreamer AC3 parser
+ * Copyright (C) 2009 Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) 2009 Mark Nauwelaerts <mnauw users sf net>
+ * Copyright (C) 2009 Nokia Corporation. All rights reserved.
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-ac3parse
+ * @title: ac3parse
+ * @short_description: AC3 parser
+ * @see_also: #GstAmrParse, #GstAACParse
+ *
+ * This is an AC3 parser.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=abc.ac3 ! ac3parse ! a52dec ! audioresample ! audioconvert ! autoaudiosink
+ * ]|
+ *
+ */
+
+/* TODO:
+ * - audio/ac3 to audio/x-private1-ac3 is not implemented (done in the muxer)
+ * - should accept framed and unframed input (needs decodebin fixes first)
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+
+#include "gstaudioparserselements.h"
+#include "gstac3parse.h"
+#include <gst/base/base.h>
+#include <gst/pbutils/pbutils.h>
+
+GST_DEBUG_CATEGORY_STATIC (ac3_parse_debug);
+#define GST_CAT_DEFAULT ac3_parse_debug
+
+static const struct
+{
+ const guint bit_rate; /* nominal bit rate */
+ const guint frame_size[3]; /* frame size for 32kHz, 44kHz, and 48kHz */
+} frmsizcod_table[38] = {
+ {
+ 32, {
+ 64, 69, 96}}, {
+ 32, {
+ 64, 70, 96}}, {
+ 40, {
+ 80, 87, 120}}, {
+ 40, {
+ 80, 88, 120}}, {
+ 48, {
+ 96, 104, 144}}, {
+ 48, {
+ 96, 105, 144}}, {
+ 56, {
+ 112, 121, 168}}, {
+ 56, {
+ 112, 122, 168}}, {
+ 64, {
+ 128, 139, 192}}, {
+ 64, {
+ 128, 140, 192}}, {
+ 80, {
+ 160, 174, 240}}, {
+ 80, {
+ 160, 175, 240}}, {
+ 96, {
+ 192, 208, 288}}, {
+ 96, {
+ 192, 209, 288}}, {
+ 112, {
+ 224, 243, 336}}, {
+ 112, {
+ 224, 244, 336}}, {
+ 128, {
+ 256, 278, 384}}, {
+ 128, {
+ 256, 279, 384}}, {
+ 160, {
+ 320, 348, 480}}, {
+ 160, {
+ 320, 349, 480}}, {
+ 192, {
+ 384, 417, 576}}, {
+ 192, {
+ 384, 418, 576}}, {
+ 224, {
+ 448, 487, 672}}, {
+ 224, {
+ 448, 488, 672}}, {
+ 256, {
+ 512, 557, 768}}, {
+ 256, {
+ 512, 558, 768}}, {
+ 320, {
+ 640, 696, 960}}, {
+ 320, {
+ 640, 697, 960}}, {
+ 384, {
+ 768, 835, 1152}}, {
+ 384, {
+ 768, 836, 1152}}, {
+ 448, {
+ 896, 975, 1344}}, {
+ 448, {
+ 896, 976, 1344}}, {
+ 512, {
+ 1024, 1114, 1536}}, {
+ 512, {
+ 1024, 1115, 1536}}, {
+ 576, {
+ 1152, 1253, 1728}}, {
+ 576, {
+ 1152, 1254, 1728}}, {
+ 640, {
+ 1280, 1393, 1920}}, {
+ 640, {
+ 1280, 1394, 1920}}
+};
+
+static const guint fscod_rates[4] = { 48000, 44100, 32000, 0 };
+static const guint acmod_chans[8] = { 2, 1, 2, 3, 3, 4, 4, 5 };
+static const guint numblks[4] = { 1, 2, 3, 6 };
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-ac3, framed = (boolean) true, "
+ " channels = (int) [ 1, 6 ], rate = (int) [ 8000, 48000 ], "
+ " alignment = (string) { iec61937, frame}; "
+ "audio/x-eac3, framed = (boolean) true, "
+ " channels = (int) [ 1, 6 ], rate = (int) [ 8000, 48000 ], "
+ " alignment = (string) { iec61937, frame}; "));
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-ac3; " "audio/x-eac3; " "audio/ac3; "
+ "audio/x-private1-ac3"));
+
+static void gst_ac3_parse_finalize (GObject * object);
+
+static gboolean gst_ac3_parse_start (GstBaseParse * parse);
+static gboolean gst_ac3_parse_stop (GstBaseParse * parse);
+static GstFlowReturn gst_ac3_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize);
+static GstFlowReturn gst_ac3_parse_pre_push_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame);
+static gboolean gst_ac3_parse_src_event (GstBaseParse * parse,
+ GstEvent * event);
+static GstCaps *gst_ac3_parse_get_sink_caps (GstBaseParse * parse,
+ GstCaps * filter);
+static gboolean gst_ac3_parse_set_sink_caps (GstBaseParse * parse,
+ GstCaps * caps);
+
+#define gst_ac3_parse_parent_class parent_class
+G_DEFINE_TYPE (GstAc3Parse, gst_ac3_parse, GST_TYPE_BASE_PARSE);
+GST_ELEMENT_REGISTER_DEFINE (ac3parse, "ac3parse",
+ GST_RANK_PRIMARY + 1, GST_TYPE_AC3_PARSE);
+
+static void
+gst_ac3_parse_class_init (GstAc3ParseClass * klass)
+{
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (ac3_parse_debug, "ac3parse", 0,
+ "AC3 audio stream parser");
+
+ object_class->finalize = gst_ac3_parse_finalize;
+
+ gst_element_class_add_static_pad_template (element_class, &sink_template);
+ gst_element_class_add_static_pad_template (element_class, &src_template);
+
+ gst_element_class_set_static_metadata (element_class,
+ "AC3 audio stream parser", "Codec/Parser/Converter/Audio",
+ "AC3 parser", "Tim-Philipp Müller <tim centricular net>");
+
+ parse_class->start = GST_DEBUG_FUNCPTR (gst_ac3_parse_start);
+ parse_class->stop = GST_DEBUG_FUNCPTR (gst_ac3_parse_stop);
+ parse_class->handle_frame = GST_DEBUG_FUNCPTR (gst_ac3_parse_handle_frame);
+ parse_class->pre_push_frame =
+ GST_DEBUG_FUNCPTR (gst_ac3_parse_pre_push_frame);
+ parse_class->src_event = GST_DEBUG_FUNCPTR (gst_ac3_parse_src_event);
+ parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_ac3_parse_get_sink_caps);
+ parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_ac3_parse_set_sink_caps);
+}
+
+static void
+gst_ac3_parse_reset (GstAc3Parse * ac3parse)
+{
+ ac3parse->channels = -1;
+ ac3parse->sample_rate = -1;
+ ac3parse->blocks = -1;
+ ac3parse->eac = FALSE;
+ ac3parse->sent_codec_tag = FALSE;
+ g_atomic_int_set (&ac3parse->align, GST_AC3_PARSE_ALIGN_NONE);
+}
+
+static void
+gst_ac3_parse_init (GstAc3Parse * ac3parse)
+{
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (ac3parse), 8);
+ gst_ac3_parse_reset (ac3parse);
+ ac3parse->baseparse_chainfunc =
+ GST_BASE_PARSE_SINK_PAD (GST_BASE_PARSE (ac3parse))->chainfunc;
+ GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (ac3parse));
+ GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_PARSE_SINK_PAD (ac3parse));
+}
+
+static void
+gst_ac3_parse_finalize (GObject * object)
+{
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_ac3_parse_start (GstBaseParse * parse)
+{
+ GstAc3Parse *ac3parse = GST_AC3_PARSE (parse);
+
+ GST_DEBUG_OBJECT (parse, "starting");
+
+ gst_ac3_parse_reset (ac3parse);
+
+ return TRUE;
+}
+
+static gboolean
+gst_ac3_parse_stop (GstBaseParse * parse)
+{
+ GST_DEBUG_OBJECT (parse, "stopping");
+
+ return TRUE;
+}
+
+static void
+gst_ac3_parse_set_alignment (GstAc3Parse * ac3parse, gboolean eac)
+{
+ GstCaps *caps;
+ GstStructure *st;
+ const gchar *str = NULL;
+ int i;
+
+ if (G_LIKELY (!eac))
+ goto done;
+
+ caps = gst_pad_get_allowed_caps (GST_BASE_PARSE_SRC_PAD (ac3parse));
+
+ if (!caps)
+ goto done;
+
+ for (i = 0; i < gst_caps_get_size (caps); i++) {
+ st = gst_caps_get_structure (caps, i);
+
+ if (!g_str_equal (gst_structure_get_name (st), "audio/x-eac3"))
+ continue;
+
+ if ((str = gst_structure_get_string (st, "alignment"))) {
+ if (g_str_equal (str, "iec61937")) {
+ g_atomic_int_set (&ac3parse->align, GST_AC3_PARSE_ALIGN_IEC61937);
+ GST_DEBUG_OBJECT (ac3parse, "picked iec61937 alignment");
+ } else if (g_str_equal (str, "frame") == 0) {
+ g_atomic_int_set (&ac3parse->align, GST_AC3_PARSE_ALIGN_FRAME);
+ GST_DEBUG_OBJECT (ac3parse, "picked frame alignment");
+ } else {
+ g_atomic_int_set (&ac3parse->align, GST_AC3_PARSE_ALIGN_FRAME);
+ GST_WARNING_OBJECT (ac3parse, "unknown alignment: %s", str);
+ }
+ break;
+ }
+ }
+
+ if (caps)
+ gst_caps_unref (caps);
+
+done:
+ /* default */
+ if (ac3parse->align == GST_AC3_PARSE_ALIGN_NONE) {
+ g_atomic_int_set (&ac3parse->align, GST_AC3_PARSE_ALIGN_FRAME);
+ GST_DEBUG_OBJECT (ac3parse, "picked syncframe alignment");
+ }
+}
+
+static gboolean
+gst_ac3_parse_frame_header_ac3 (GstAc3Parse * ac3parse, GstBuffer * buf,
+ gint skip, guint * frame_size, guint * rate, guint * chans, guint * blks,
+ guint * sid)
+{
+ GstBitReader bits;
+ GstMapInfo map;
+ guint8 fscod, frmsizcod, bsid, acmod, lfe_on, rate_scale;
+ gboolean ret = FALSE;
+
+ GST_LOG_OBJECT (ac3parse, "parsing ac3");
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ gst_bit_reader_init (&bits, map.data, map.size);
+ gst_bit_reader_skip_unchecked (&bits, skip * 8);
+
+ gst_bit_reader_skip_unchecked (&bits, 16 + 16);
+ fscod = gst_bit_reader_get_bits_uint8_unchecked (&bits, 2);
+ frmsizcod = gst_bit_reader_get_bits_uint8_unchecked (&bits, 6);
+
+ if (G_UNLIKELY (fscod == 3 || frmsizcod >= G_N_ELEMENTS (frmsizcod_table))) {
+ GST_DEBUG_OBJECT (ac3parse, "bad fscod=%d frmsizcod=%d", fscod, frmsizcod);
+ goto cleanup;
+ }
+
+ bsid = gst_bit_reader_get_bits_uint8_unchecked (&bits, 5);
+ gst_bit_reader_skip_unchecked (&bits, 3); /* bsmod */
+ acmod = gst_bit_reader_get_bits_uint8_unchecked (&bits, 3);
+
+ /* spec not quite clear here: decoder should decode if less than 8,
+ * but seemingly only defines 6 and 8 cases */
+ /* Files with 9 and 10 happen, and seem to comply with the <= 8
+ format, so let them through. The spec says nothing about 9 and 10 */
+ if (bsid > 10) {
+ GST_DEBUG_OBJECT (ac3parse, "unexpected bsid=%d", bsid);
+ goto cleanup;
+ } else if (bsid != 8 && bsid != 6) {
+ GST_DEBUG_OBJECT (ac3parse, "undefined bsid=%d", bsid);
+ }
+
+ if ((acmod & 0x1) && (acmod != 0x1)) /* 3 front channels */
+ gst_bit_reader_skip_unchecked (&bits, 2);
+ if ((acmod & 0x4)) /* if a surround channel exists */
+ gst_bit_reader_skip_unchecked (&bits, 2);
+ if (acmod == 0x2) /* if in 2/0 mode */
+ gst_bit_reader_skip_unchecked (&bits, 2);
+
+ lfe_on = gst_bit_reader_get_bits_uint8_unchecked (&bits, 1);
+
+ /* 6/8->0, 9->1, 10->2,
+ see http://matroska.org/technical/specs/codecid/index.html */
+ rate_scale = (CLAMP (bsid, 8, 10) - 8);
+
+ if (frame_size)
+ *frame_size = frmsizcod_table[frmsizcod].frame_size[fscod] * 2;
+ if (rate)
+ *rate = fscod_rates[fscod] >> rate_scale;
+ if (chans)
+ *chans = acmod_chans[acmod] + lfe_on;
+ if (blks)
+ *blks = 6;
+ if (sid)
+ *sid = 0;
+
+ ret = TRUE;
+
+cleanup:
+ gst_buffer_unmap (buf, &map);
+
+ return ret;
+}
+
+static gboolean
+gst_ac3_parse_frame_header_eac3 (GstAc3Parse * ac3parse, GstBuffer * buf,
+ gint skip, guint * frame_size, guint * rate, guint * chans, guint * blks,
+ guint * sid)
+{
+ GstBitReader bits;
+ GstMapInfo map;
+ guint16 frmsiz, sample_rate, blocks;
+ guint8 strmtyp, fscod, fscod2, acmod, lfe_on, strmid, numblkscod;
+ gboolean ret = FALSE;
+
+ GST_LOG_OBJECT (ac3parse, "parsing e-ac3");
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ gst_bit_reader_init (&bits, map.data, map.size);
+ gst_bit_reader_skip_unchecked (&bits, skip * 8);
+
+ gst_bit_reader_skip_unchecked (&bits, 16);
+ strmtyp = gst_bit_reader_get_bits_uint8_unchecked (&bits, 2); /* strmtyp */
+ if (G_UNLIKELY (strmtyp == 3)) {
+ GST_DEBUG_OBJECT (ac3parse, "bad strmtyp %d", strmtyp);
+ goto cleanup;
+ }
+
+ strmid = gst_bit_reader_get_bits_uint8_unchecked (&bits, 3); /* substreamid */
+ frmsiz = gst_bit_reader_get_bits_uint16_unchecked (&bits, 11); /* frmsiz */
+ fscod = gst_bit_reader_get_bits_uint8_unchecked (&bits, 2); /* fscod */
+ if (fscod == 3) {
+ fscod2 = gst_bit_reader_get_bits_uint8_unchecked (&bits, 2); /* fscod2 */
+ if (G_UNLIKELY (fscod2 == 3)) {
+ GST_DEBUG_OBJECT (ac3parse, "invalid fscod2");
+ goto cleanup;
+ }
+ sample_rate = fscod_rates[fscod2] / 2;
+ blocks = 6;
+ } else {
+ numblkscod = gst_bit_reader_get_bits_uint8_unchecked (&bits, 2); /* numblkscod */
+ sample_rate = fscod_rates[fscod];
+ blocks = numblks[numblkscod];
+ }
+
+ acmod = gst_bit_reader_get_bits_uint8_unchecked (&bits, 3); /* acmod */
+ lfe_on = gst_bit_reader_get_bits_uint8_unchecked (&bits, 1); /* lfeon */
+
+ gst_bit_reader_skip_unchecked (&bits, 5); /* bsid */
+
+ if (frame_size)
+ *frame_size = (frmsiz + 1) * 2;
+ if (rate)
+ *rate = sample_rate;
+ if (chans)
+ *chans = acmod_chans[acmod] + lfe_on;
+ if (blks)
+ *blks = blocks;
+ if (sid)
+ *sid = (strmtyp & 0x1) << 3 | strmid;
+
+ ret = TRUE;
+
+cleanup:
+ gst_buffer_unmap (buf, &map);
+
+ return ret;
+}
+
+static gboolean
+gst_ac3_parse_frame_header (GstAc3Parse * parse, GstBuffer * buf, gint skip,
+ guint * framesize, guint * rate, guint * chans, guint * blocks,
+ guint * sid, gboolean * eac)
+{
+ GstBitReader bits;
+ guint16 sync;
+ guint8 bsid;
+ GstMapInfo map;
+ gboolean ret = FALSE;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ gst_bit_reader_init (&bits, map.data, map.size);
+
+ GST_MEMDUMP_OBJECT (parse, "AC3 frame sync", map.data, MIN (map.size, 16));
+
+ gst_bit_reader_skip_unchecked (&bits, skip * 8);
+
+ sync = gst_bit_reader_get_bits_uint16_unchecked (&bits, 16);
+ gst_bit_reader_skip_unchecked (&bits, 16 + 8);
+ bsid = gst_bit_reader_peek_bits_uint8_unchecked (&bits, 5);
+
+ if (G_UNLIKELY (sync != 0x0b77))
+ goto cleanup;
+
+ GST_LOG_OBJECT (parse, "bsid = %d", bsid);
+
+ if (bsid <= 10) {
+ if (eac)
+ *eac = FALSE;
+ ret = gst_ac3_parse_frame_header_ac3 (parse, buf, skip, framesize, rate,
+ chans, blocks, sid);
+ goto cleanup;
+ } else if (bsid <= 16) {
+ if (eac)
+ *eac = TRUE;
+ ret = gst_ac3_parse_frame_header_eac3 (parse, buf, skip, framesize, rate,
+ chans, blocks, sid);
+ goto cleanup;
+ } else {
+ GST_DEBUG_OBJECT (parse, "unexpected bsid %d", bsid);
+ ret = FALSE;
+ goto cleanup;
+ }
+
+ GST_DEBUG_OBJECT (parse, "unexpected bsid %d", bsid);
+
+cleanup:
+ gst_buffer_unmap (buf, &map);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_ac3_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize)
+{
+ GstAc3Parse *ac3parse = GST_AC3_PARSE (parse);
+ GstBuffer *buf = frame->buffer;
+ GstByteReader reader;
+ gint off;
+ gboolean lost_sync, draining, eac, more = FALSE;
+ guint frmsiz, blocks, sid;
+ guint rate, chans;
+ gboolean update_rate = FALSE;
+ gint framesize = 0;
+ gint have_blocks = 0;
+ GstMapInfo map;
+ gboolean ret = FALSE;
+ GstFlowReturn res = GST_FLOW_OK;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ if (G_UNLIKELY (map.size < 8)) {
+ *skipsize = 1;
+ goto cleanup;
+ }
+
+ gst_byte_reader_init (&reader, map.data, map.size);
+ off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffff0000, 0x0b770000,
+ 0, map.size);
+
+ GST_LOG_OBJECT (parse, "possible sync at buffer offset %d", off);
+
+ /* didn't find anything that looks like a sync word, skip */
+ if (off < 0) {
+ *skipsize = map.size - 3;
+ goto cleanup;
+ }
+
+ /* possible frame header, but not at offset 0? skip bytes before sync */
+ if (off > 0) {
+ *skipsize = off;
+ goto cleanup;
+ }
+
+ /* make sure the values in the frame header look sane */
+ if (!gst_ac3_parse_frame_header (ac3parse, buf, 0, &frmsiz, &rate, &chans,
+ &blocks, &sid, &eac)) {
+ *skipsize = off + 2;
+ goto cleanup;
+ }
+
+ GST_LOG_OBJECT (parse, "size: %u, blocks: %u, rate: %u, chans: %u", frmsiz,
+ blocks, rate, chans);
+
+ framesize = frmsiz;
+
+ if (G_UNLIKELY (g_atomic_int_get (&ac3parse->align) ==
+ GST_AC3_PARSE_ALIGN_NONE))
+ gst_ac3_parse_set_alignment (ac3parse, eac);
+
+ GST_LOG_OBJECT (parse, "got frame");
+
+ lost_sync = GST_BASE_PARSE_LOST_SYNC (parse);
+ draining = GST_BASE_PARSE_DRAINING (parse);
+
+ if (g_atomic_int_get (&ac3parse->align) == GST_AC3_PARSE_ALIGN_IEC61937) {
+ /* We need 6 audio blocks from each substream, so we keep going forwards
+ * till we have it */
+
+ g_assert (blocks > 0);
+ GST_LOG_OBJECT (ac3parse, "Need %d frames before pushing", 6 / blocks);
+
+ if (sid != 0) {
+ /* We need the first substream to be the one with id 0 */
+ GST_LOG_OBJECT (ac3parse, "Skipping till we find sid 0");
+ *skipsize = off + 2;
+ goto cleanup;
+ }
+
+ framesize = 0;
+
+ /* Loop till we have 6 blocks per substream */
+ for (have_blocks = 0; !more && have_blocks < 6; have_blocks += blocks) {
+ /* Loop till we get one frame from each substream */
+ do {
+ framesize += frmsiz;
+
+ if (!gst_byte_reader_skip (&reader, frmsiz)
+ || map.size < (framesize + 6)) {
+ more = TRUE;
+ break;
+ }
+
+ if (!gst_ac3_parse_frame_header (ac3parse, buf, framesize, &frmsiz,
+ NULL, NULL, NULL, &sid, &eac)) {
+ *skipsize = off + 2;
+ goto cleanup;
+ }
+ } while (sid);
+ }
+
+ /* We're now at the next frame, so no need to skip if resyncing */
+ frmsiz = 0;
+ }
+
+ if (lost_sync && !draining) {
+ guint16 word = 0;
+
+ GST_DEBUG_OBJECT (ac3parse, "resyncing; checking next frame syncword");
+
+ if (more || !gst_byte_reader_skip (&reader, frmsiz) ||
+ !gst_byte_reader_get_uint16_be (&reader, &word)) {
+ GST_DEBUG_OBJECT (ac3parse, "... but not sufficient data");
+ gst_base_parse_set_min_frame_size (parse, framesize + 8);
+ *skipsize = 0;
+ goto cleanup;
+ } else {
+ if (word != 0x0b77) {
+ GST_DEBUG_OBJECT (ac3parse, "0x%x not OK", word);
+ *skipsize = off + 2;
+ goto cleanup;
+ } else {
+ /* ok, got sync now, let's assume constant frame size */
+ gst_base_parse_set_min_frame_size (parse, framesize);
+ }
+ }
+ }
+
+ /* expect to have found a frame here */
+ g_assert (framesize);
+ ret = TRUE;
+
+ /* arrange for metadata setup */
+ if (G_UNLIKELY (sid)) {
+ /* dependent frame, no need to (ac)count for or consider further */
+ GST_LOG_OBJECT (parse, "sid: %d", sid);
+ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_NO_FRAME;
+ /* TODO maybe also mark as DELTA_UNIT,
+ * if that does not surprise baseparse elsewhere */
+ /* occupies same time space as previous base frame */
+ if (G_LIKELY (GST_BUFFER_TIMESTAMP (buf) >= GST_BUFFER_DURATION (buf)))
+ GST_BUFFER_TIMESTAMP (buf) -= GST_BUFFER_DURATION (buf);
+ /* only shortcut if we already arranged for caps */
+ if (G_LIKELY (ac3parse->sample_rate > 0))
+ goto cleanup;
+ }
+
+ if (G_UNLIKELY (ac3parse->sample_rate != rate || ac3parse->channels != chans
+ || ac3parse->eac != eac)) {
+ GstCaps *caps = gst_caps_new_simple (eac ? "audio/x-eac3" : "audio/x-ac3",
+ "framed", G_TYPE_BOOLEAN, TRUE, "rate", G_TYPE_INT, rate,
+ "channels", G_TYPE_INT, chans, NULL);
+ gst_caps_set_simple (caps, "alignment", G_TYPE_STRING,
+ g_atomic_int_get (&ac3parse->align) == GST_AC3_PARSE_ALIGN_IEC61937 ?
+ "iec61937" : "frame", NULL);
+ gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps);
+ gst_caps_unref (caps);
+
+ ac3parse->sample_rate = rate;
+ ac3parse->channels = chans;
+ ac3parse->eac = eac;
+
+ update_rate = TRUE;
+ }
+
+ if (G_UNLIKELY (ac3parse->blocks != blocks)) {
+ ac3parse->blocks = blocks;
+
+ update_rate = TRUE;
+ }
+
+ if (G_UNLIKELY (update_rate))
+ gst_base_parse_set_frame_rate (parse, rate, 256 * blocks, 2, 2);
+
+cleanup:
+ gst_buffer_unmap (buf, &map);
+
+ if (ret && framesize <= map.size) {
+ res = gst_base_parse_finish_frame (parse, frame, framesize);
+ }
+
+ return res;
+}
+
+
+/*
+ * MPEG-PS private1 streams add a 2 bytes "Audio Substream Headers" for each
+ * buffer (not each frame) with the offset of the next frame's start.
+ *
+ * Buffer 1:
+ * -------------------------------------------
+ * |firstAccUnit|AC3SyncWord|xxxxxxxxxxxxxxxxx
+ * -------------------------------------------
+ * Buffer 2:
+ * -------------------------------------------
+ * |firstAccUnit|xxxxxx|AC3SyncWord|xxxxxxxxxx
+ * -------------------------------------------
+ *
+ * These 2 bytes can be dropped safely as they do not include any timing
+ * information, only the offset to the start of the next frame.
+ *
+ * From http://stnsoft.com/DVD/ass-hdr.html:
+ * "FirstAccUnit offset to frame which corresponds to PTS value offset 0 is the
+ * last byte of FirstAccUnit, ie add the offset of byte 2 to get the AU's offset
+ * The value 0000 indicates there is no first access unit"
+ * */
+
+static GstFlowReturn
+gst_ac3_parse_chain_priv (GstPad * pad, GstObject * parent, GstBuffer * buf)
+{
+ GstAc3Parse *ac3parse = GST_AC3_PARSE (parent);
+ GstFlowReturn ret;
+ gsize size;
+ guint8 data[2];
+ gint offset;
+ gint len;
+ GstBuffer *subbuf;
+ gint first_access;
+
+ size = gst_buffer_get_size (buf);
+ if (size < 2)
+ goto not_enough_data;
+
+ gst_buffer_extract (buf, 0, data, 2);
+ first_access = (data[0] << 8) | data[1];
+
+ /* Skip the first_access header */
+ offset = 2;
+
+ if (first_access > 1) {
+ /* Length of data before first_access */
+ len = first_access - 1;
+
+ if (len <= 0 || offset + len > size)
+ goto bad_first_access_parameter;
+
+ subbuf = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, offset, len);
+ GST_BUFFER_DTS (subbuf) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_PTS (subbuf) = GST_CLOCK_TIME_NONE;
+ ret = ac3parse->baseparse_chainfunc (pad, parent, subbuf);
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_NOT_LINKED) {
+ gst_buffer_unref (buf);
+ goto done;
+ }
+
+ offset += len;
+ len = size - offset;
+
+ if (len > 0) {
+ subbuf = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, offset, len);
+ GST_BUFFER_PTS (subbuf) = GST_BUFFER_PTS (buf);
+ GST_BUFFER_DTS (subbuf) = GST_BUFFER_DTS (buf);
+
+ ret = ac3parse->baseparse_chainfunc (pad, parent, subbuf);
+ }
+ gst_buffer_unref (buf);
+ } else {
+ /* first_access = 0 or 1, so if there's a timestamp it applies to the first byte */
+ subbuf =
+ gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, offset,
+ size - offset);
+ GST_BUFFER_PTS (subbuf) = GST_BUFFER_PTS (buf);
+ GST_BUFFER_DTS (subbuf) = GST_BUFFER_DTS (buf);
+ gst_buffer_unref (buf);
+ ret = ac3parse->baseparse_chainfunc (pad, parent, subbuf);
+ }
+
+done:
+ return ret;
+
+/* ERRORS */
+not_enough_data:
+ {
+ GST_ELEMENT_ERROR (GST_ELEMENT (ac3parse), STREAM, FORMAT, (NULL),
+ ("Insufficient data in buffer. Can't determine first_acess"));
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+bad_first_access_parameter:
+ {
+ GST_ELEMENT_ERROR (GST_ELEMENT (ac3parse), STREAM, FORMAT, (NULL),
+ ("Bad first_access parameter (%d) in buffer", first_access));
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+}
+
+static GstFlowReturn
+gst_ac3_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
+{
+ GstAc3Parse *ac3parse = GST_AC3_PARSE (parse);
+
+ if (!ac3parse->sent_codec_tag) {
+ GstTagList *taglist;
+ GstCaps *caps;
+
+ /* codec tag */
+ caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (parse));
+ if (G_UNLIKELY (caps == NULL)) {
+ if (GST_PAD_IS_FLUSHING (GST_BASE_PARSE_SRC_PAD (parse))) {
+ GST_INFO_OBJECT (parse, "Src pad is flushing");
+ return GST_FLOW_FLUSHING;
+ } else {
+ GST_INFO_OBJECT (parse, "Src pad is not negotiated!");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+ }
+
+ taglist = gst_tag_list_new_empty ();
+ gst_pb_utils_add_codec_description_to_tag_list (taglist,
+ GST_TAG_AUDIO_CODEC, caps);
+ gst_caps_unref (caps);
+
+ gst_base_parse_merge_tags (parse, taglist, GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (taglist);
+
+ /* also signals the end of first-frame processing */
+ ac3parse->sent_codec_tag = TRUE;
+ }
+
+ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_CLIP;
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_ac3_parse_src_event (GstBaseParse * parse, GstEvent * event)
+{
+ GstAc3Parse *ac3parse = GST_AC3_PARSE (parse);
+
+ if (G_UNLIKELY (GST_EVENT_TYPE (event) == GST_EVENT_CUSTOM_UPSTREAM) &&
+ gst_event_has_name (event, "ac3parse-set-alignment")) {
+ const GstStructure *st = gst_event_get_structure (event);
+ const gchar *align = gst_structure_get_string (st, "alignment");
+
+ if (g_str_equal (align, "iec61937")) {
+ GST_DEBUG_OBJECT (ac3parse, "Switching to iec61937 alignment");
+ g_atomic_int_set (&ac3parse->align, GST_AC3_PARSE_ALIGN_IEC61937);
+ } else if (g_str_equal (align, "frame")) {
+ GST_DEBUG_OBJECT (ac3parse, "Switching to frame alignment");
+ g_atomic_int_set (&ac3parse->align, GST_AC3_PARSE_ALIGN_FRAME);
+ } else {
+ g_atomic_int_set (&ac3parse->align, GST_AC3_PARSE_ALIGN_FRAME);
+ GST_WARNING_OBJECT (ac3parse, "Got unknown alignment request (%s) "
+ "reverting to frame alignment.",
+ gst_structure_get_string (st, "alignment"));
+ }
+
+ gst_event_unref (event);
+ return TRUE;
+ }
+
+ return GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
+}
+
+static void
+remove_fields (GstCaps * caps)
+{
+ guint i, n;
+
+ n = gst_caps_get_size (caps);
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (caps, i);
+
+ gst_structure_remove_field (s, "framed");
+ gst_structure_remove_field (s, "alignment");
+ }
+}
+
+static GstCaps *
+extend_caps (GstCaps * caps, gboolean add_private)
+{
+ guint i, n;
+ GstCaps *ncaps = gst_caps_new_empty ();
+
+ n = gst_caps_get_size (caps);
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (caps, i);
+
+ if (add_private && !gst_structure_has_name (s, "audio/x-private1-ac3")) {
+ GstStructure *ns = gst_structure_copy (s);
+ gst_structure_set_name (ns, "audio/x-private1-ac3");
+ gst_caps_append_structure (ncaps, ns);
+ } else if (!add_private &&
+ gst_structure_has_name (s, "audio/x-private1-ac3")) {
+ GstStructure *ns = gst_structure_copy (s);
+ gst_structure_set_name (ns, "audio/x-ac3");
+ gst_caps_append_structure (ncaps, ns);
+ ns = gst_structure_copy (s);
+ gst_structure_set_name (ns, "audio/x-eac3");
+ gst_caps_append_structure (ncaps, ns);
+ } else if (!add_private) {
+ gst_caps_append_structure (ncaps, gst_structure_copy (s));
+ }
+ }
+
+ if (add_private) {
+ gst_caps_append (caps, ncaps);
+ } else {
+ gst_caps_unref (caps);
+ caps = ncaps;
+ }
+
+ return caps;
+}
+
+static GstCaps *
+gst_ac3_parse_get_sink_caps (GstBaseParse * parse, GstCaps * filter)
+{
+ GstCaps *peercaps, *templ;
+ GstCaps *res;
+
+ templ = gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD (parse));
+ if (filter) {
+ GstCaps *fcopy = gst_caps_copy (filter);
+ /* Remove the fields we convert */
+ remove_fields (fcopy);
+ /* we do not ask downstream to handle x-private1-ac3 */
+ fcopy = extend_caps (fcopy, FALSE);
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), fcopy);
+ gst_caps_unref (fcopy);
+ } else
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), NULL);
+
+ if (peercaps) {
+ /* Remove the framed and alignment field. We can convert
+ * between different alignments. */
+ peercaps = gst_caps_make_writable (peercaps);
+ remove_fields (peercaps);
+ /* also allow for x-private1-ac3 input */
+ peercaps = extend_caps (peercaps, TRUE);
+
+ res = gst_caps_intersect_full (peercaps, templ, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (peercaps);
+ gst_caps_unref (templ);
+ } else {
+ res = templ;
+ }
+
+ if (filter) {
+ GstCaps *intersection;
+
+ intersection =
+ gst_caps_intersect_full (filter, res, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (res);
+ res = intersection;
+ }
+
+ return res;
+}
+
+static gboolean
+gst_ac3_parse_set_sink_caps (GstBaseParse * parse, GstCaps * caps)
+{
+ GstStructure *s;
+ GstAc3Parse *ac3parse = GST_AC3_PARSE (parse);
+
+ s = gst_caps_get_structure (caps, 0);
+ if (gst_structure_has_name (s, "audio/x-private1-ac3")) {
+ gst_pad_set_chain_function (parse->sinkpad, gst_ac3_parse_chain_priv);
+ } else {
+ gst_pad_set_chain_function (parse->sinkpad, ac3parse->baseparse_chainfunc);
+ }
+ return TRUE;
+}
diff --git a/gst/audioparsers/gstac3parse.h b/gst/audioparsers/gstac3parse.h
new file mode 100644
index 0000000000..81e2104d5e
--- /dev/null
+++ b/gst/audioparsers/gstac3parse.h
@@ -0,0 +1,83 @@
+/* GStreamer AC3 parser
+ * Copyright (C) 2009 Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) 2009 Mark Nauwelaerts <mnauw users sf net>
+ * Copyright (C) 2009 Nokia Corporation. All rights reserved.
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AC3_PARSE_H__
+#define __GST_AC3_PARSE_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbaseparse.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AC3_PARSE \
+ (gst_ac3_parse_get_type())
+#define GST_AC3_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_AC3_PARSE, GstAc3Parse))
+#define GST_AC3_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_AC3_PARSE, GstAc3ParseClass))
+#define GST_IS_AC3_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_AC3_PARSE))
+#define GST_IS_AC3_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_AC3_PARSE))
+
+typedef struct _GstAc3Parse GstAc3Parse;
+typedef struct _GstAc3ParseClass GstAc3ParseClass;
+
+enum {
+ GST_AC3_PARSE_ALIGN_NONE,
+ GST_AC3_PARSE_ALIGN_FRAME,
+ GST_AC3_PARSE_ALIGN_IEC61937,
+};
+
+/**
+ * GstAc3Parse:
+ *
+ * The opaque GstAc3Parse object
+ */
+struct _GstAc3Parse {
+ GstBaseParse baseparse;
+
+ /*< private >*/
+ gint sample_rate;
+ gint channels;
+ gint blocks;
+ gboolean eac;
+ gboolean sent_codec_tag;
+ gint align;
+ GstPadChainFunction baseparse_chainfunc;
+};
+
+/**
+ * GstAc3ParseClass:
+ * @parent_class: Element parent class.
+ *
+ * The opaque GstAc3ParseClass data structure.
+ */
+struct _GstAc3ParseClass {
+ GstBaseParseClass baseparse_class;
+};
+
+GType gst_ac3_parse_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_AC3_PARSE_H__ */
diff --git a/gst/audioparsers/gstamrparse.c b/gst/audioparsers/gstamrparse.c
new file mode 100644
index 0000000000..0ef612e1ad
--- /dev/null
+++ b/gst/audioparsers/gstamrparse.c
@@ -0,0 +1,457 @@
+/* GStreamer Adaptive Multi-Rate parser plugin
+ * Copyright (C) 2006 Edgard Lima <edgard.lima@gmail.com>
+ * Copyright (C) 2008 Nokia Corporation. All rights reserved.
+ *
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-amrparse
+ * @title: amrparse
+ * @short_description: AMR parser
+ * @see_also: #GstAmrnbDec, #GstAmrnbEnc
+ *
+ * This is an AMR parser capable of handling both narrow-band and wideband
+ * formats.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=abc.amr ! amrparse ! amrdec ! audioresample ! audioconvert ! alsasink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+
+#include "gstaudioparserselements.h"
+#include "gstamrparse.h"
+#include <gst/pbutils/pbutils.h>
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/AMR, " "rate = (int) 8000, " "channels = (int) 1;"
+ "audio/AMR-WB, " "rate = (int) 16000, " "channels = (int) 1;")
+ );
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-amr-nb-sh; audio/x-amr-wb-sh"));
+
+GST_DEBUG_CATEGORY_STATIC (amrparse_debug);
+#define GST_CAT_DEFAULT amrparse_debug
+
+static const gint block_size_nb[16] =
+ { 12, 13, 15, 17, 19, 20, 26, 31, 5, 0, 0, 0, 0, 0, 0, 0 };
+
+static const gint block_size_wb[16] =
+ { 17, 23, 32, 36, 40, 46, 50, 58, 60, 5, -1, -1, -1, -1, 0, 0 };
+
+/* AMR has a "hardcoded" framerate of 50fps */
+#define AMR_FRAMES_PER_SECOND 50
+#define AMR_FRAME_DURATION (GST_SECOND/AMR_FRAMES_PER_SECOND)
+#define AMR_MIME_HEADER_SIZE 9
+
+static gboolean gst_amr_parse_start (GstBaseParse * parse);
+static gboolean gst_amr_parse_stop (GstBaseParse * parse);
+
+static gboolean gst_amr_parse_sink_setcaps (GstBaseParse * parse,
+ GstCaps * caps);
+static GstCaps *gst_amr_parse_sink_getcaps (GstBaseParse * parse,
+ GstCaps * filter);
+
+static GstFlowReturn gst_amr_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize);
+static GstFlowReturn gst_amr_parse_pre_push_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame);
+
+G_DEFINE_TYPE (GstAmrParse, gst_amr_parse, GST_TYPE_BASE_PARSE);
+GST_ELEMENT_REGISTER_DEFINE (amrparse, "amrparse",
+ GST_RANK_PRIMARY + 1, GST_TYPE_AMR_PARSE);
+
+/**
+ * gst_amr_parse_class_init:
+ * @klass: GstAmrParseClass.
+ *
+ */
+static void
+gst_amr_parse_class_init (GstAmrParseClass * klass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (amrparse_debug, "amrparse", 0,
+ "AMR-NB audio stream parser");
+
+ gst_element_class_add_static_pad_template (element_class, &sink_template);
+ gst_element_class_add_static_pad_template (element_class, &src_template);
+
+ gst_element_class_set_static_metadata (element_class,
+ "AMR audio stream parser", "Codec/Parser/Audio",
+ "Adaptive Multi-Rate audio parser",
+ "Ronald Bultje <rbultje@ronald.bitfreak.net>");
+
+ parse_class->start = GST_DEBUG_FUNCPTR (gst_amr_parse_start);
+ parse_class->stop = GST_DEBUG_FUNCPTR (gst_amr_parse_stop);
+ parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_amr_parse_sink_setcaps);
+ parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_amr_parse_sink_getcaps);
+ parse_class->handle_frame = GST_DEBUG_FUNCPTR (gst_amr_parse_handle_frame);
+ parse_class->pre_push_frame =
+ GST_DEBUG_FUNCPTR (gst_amr_parse_pre_push_frame);
+}
+
+
+/**
+ * gst_amr_parse_init:
+ * @amrparse: #GstAmrParse
+ * @klass: #GstAmrParseClass.
+ *
+ */
+static void
+gst_amr_parse_init (GstAmrParse * amrparse)
+{
+ /* init rest */
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (amrparse), 62);
+ GST_DEBUG ("initialized");
+ GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (amrparse));
+ GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_PARSE_SINK_PAD (amrparse));
+}
+
+
+/**
+ * gst_amr_parse_set_src_caps:
+ * @amrparse: #GstAmrParse.
+ *
+ * Set source pad caps according to current knowledge about the
+ * audio stream.
+ *
+ * Returns: TRUE if caps were successfully set.
+ */
+static gboolean
+gst_amr_parse_set_src_caps (GstAmrParse * amrparse)
+{
+ GstCaps *src_caps = NULL;
+ gboolean res = FALSE;
+
+ if (amrparse->wide) {
+ GST_DEBUG_OBJECT (amrparse, "setting srcpad caps to AMR-WB");
+ src_caps = gst_caps_new_simple ("audio/AMR-WB",
+ "channels", G_TYPE_INT, 1, "rate", G_TYPE_INT, 16000, NULL);
+ } else {
+ GST_DEBUG_OBJECT (amrparse, "setting srcpad caps to AMR-NB");
+ /* Max. size of NB frame is 31 bytes, so we can set the min. frame
+ size to 32 (+1 for next frame header) */
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (amrparse), 32);
+ src_caps = gst_caps_new_simple ("audio/AMR",
+ "channels", G_TYPE_INT, 1, "rate", G_TYPE_INT, 8000, NULL);
+ }
+ gst_pad_use_fixed_caps (GST_BASE_PARSE (amrparse)->srcpad);
+ res = gst_pad_set_caps (GST_BASE_PARSE (amrparse)->srcpad, src_caps);
+ gst_caps_unref (src_caps);
+ return res;
+}
+
+
+/**
+ * gst_amr_parse_sink_setcaps:
+ * @sinkpad: GstPad
+ * @caps: GstCaps
+ *
+ * Returns: TRUE on success.
+ */
+static gboolean
+gst_amr_parse_sink_setcaps (GstBaseParse * parse, GstCaps * caps)
+{
+ GstAmrParse *amrparse;
+ GstStructure *structure;
+ const gchar *name;
+
+ amrparse = GST_AMR_PARSE (parse);
+ structure = gst_caps_get_structure (caps, 0);
+ name = gst_structure_get_name (structure);
+
+ GST_DEBUG_OBJECT (amrparse, "setcaps: %s", name);
+
+ if (!strncmp (name, "audio/x-amr-wb-sh", 17)) {
+ amrparse->block_size = block_size_wb;
+ amrparse->wide = 1;
+ } else if (!strncmp (name, "audio/x-amr-nb-sh", 17)) {
+ amrparse->block_size = block_size_nb;
+ amrparse->wide = 0;
+ } else {
+ GST_WARNING ("Unknown caps");
+ return FALSE;
+ }
+
+ amrparse->need_header = FALSE;
+ gst_base_parse_set_frame_rate (GST_BASE_PARSE (amrparse), 50, 1, 2, 2);
+ gst_amr_parse_set_src_caps (amrparse);
+ return TRUE;
+}
+
+/**
+ * gst_amr_parse_parse_header:
+ * @amrparse: #GstAmrParse
+ * @data: Header data to be parsed.
+ * @skipsize: Output argument where the frame size will be stored.
+ *
+ * Check if the given data contains an AMR mime header.
+ *
+ * Returns: TRUE on success.
+ */
+static gboolean
+gst_amr_parse_parse_header (GstAmrParse * amrparse,
+ const guint8 * data, gint * skipsize)
+{
+ GST_DEBUG_OBJECT (amrparse, "Parsing header data");
+
+ if (!memcmp (data, "#!AMR-WB\n", 9)) {
+ GST_DEBUG_OBJECT (amrparse, "AMR-WB detected");
+ amrparse->block_size = block_size_wb;
+ amrparse->wide = TRUE;
+ *skipsize = amrparse->header = 9;
+ } else if (!memcmp (data, "#!AMR\n", 6)) {
+ GST_DEBUG_OBJECT (amrparse, "AMR-NB detected");
+ amrparse->block_size = block_size_nb;
+ amrparse->wide = FALSE;
+ *skipsize = amrparse->header = 6;
+ } else
+ return FALSE;
+
+ gst_amr_parse_set_src_caps (amrparse);
+ return TRUE;
+}
+
+
+/**
+ * gst_amr_parse_check_valid_frame:
+ * @parse: #GstBaseParse.
+ * @buffer: #GstBuffer.
+ * @framesize: Output variable where the found frame size is put.
+ * @skipsize: Output variable which tells how much data needs to be skipped
+ * until a frame header is found.
+ *
+ * Implementation of "check_valid_frame" vmethod in #GstBaseParse class.
+ *
+ * Returns: TRUE if the given data contains valid frame.
+ */
+static GstFlowReturn
+gst_amr_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize)
+{
+ GstBuffer *buffer;
+ GstMapInfo map;
+ gint fsize = 0, mode, dsize;
+ GstAmrParse *amrparse;
+ GstFlowReturn ret = GST_FLOW_OK;
+ gboolean found = FALSE;
+
+ amrparse = GST_AMR_PARSE (parse);
+ buffer = frame->buffer;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ dsize = map.size;
+
+ GST_LOG ("buffer: %d bytes", dsize);
+
+ if (amrparse->need_header) {
+ if (dsize >= AMR_MIME_HEADER_SIZE &&
+ gst_amr_parse_parse_header (amrparse, map.data, skipsize)) {
+ amrparse->need_header = FALSE;
+ gst_base_parse_set_frame_rate (GST_BASE_PARSE (amrparse), 50, 1, 2, 2);
+ } else {
+ GST_WARNING ("media doesn't look like a AMR format");
+ }
+ /* We return FALSE, so this frame won't get pushed forward. Instead,
+ the "skip" value is set, so next time we will receive a valid frame. */
+ goto done;
+ }
+
+ *skipsize = 1;
+ /* Does this look like a possible frame header candidate? */
+ if ((map.data[0] & 0x83) == 0) {
+ /* Yep. Retrieve the frame size */
+ mode = (map.data[0] >> 3) & 0x0F;
+ fsize = amrparse->block_size[mode] + 1; /* +1 for the header byte */
+
+ /* We recognize this data as a valid frame when:
+ * - We are in sync. There is no need for extra checks then
+ * - We are in EOS. There might not be enough data to check next frame
+ * - Sync is lost, but the following data after this frame seem
+ * to contain a valid header as well (and there is enough data to
+ * perform this check)
+ */
+ if (fsize) {
+ *skipsize = 0;
+ /* in sync, no further check */
+ if (!GST_BASE_PARSE_LOST_SYNC (parse)) {
+ found = TRUE;
+ } else if (dsize > fsize) {
+ /* enough data, check for next sync */
+ if ((map.data[fsize] & 0x83) == 0)
+ found = TRUE;
+ } else if (GST_BASE_PARSE_DRAINING (parse)) {
+ /* not enough, but draining, so ok */
+ found = TRUE;
+ }
+ }
+ }
+
+done:
+ gst_buffer_unmap (buffer, &map);
+
+ if (found && fsize <= map.size) {
+ ret = gst_base_parse_finish_frame (parse, frame, fsize);
+ }
+
+ return ret;
+}
+
+/**
+ * gst_amr_parse_start:
+ * @parse: #GstBaseParse.
+ *
+ * Implementation of "start" vmethod in #GstBaseParse class.
+ *
+ * Returns: TRUE on success.
+ */
+static gboolean
+gst_amr_parse_start (GstBaseParse * parse)
+{
+ GstAmrParse *amrparse;
+
+ amrparse = GST_AMR_PARSE (parse);
+ GST_DEBUG ("start");
+ amrparse->need_header = TRUE;
+ amrparse->header = 0;
+ amrparse->sent_codec_tag = FALSE;
+ return TRUE;
+}
+
+
+/**
+ * gst_amr_parse_stop:
+ * @parse: #GstBaseParse.
+ *
+ * Implementation of "stop" vmethod in #GstBaseParse class.
+ *
+ * Returns: TRUE on success.
+ */
+static gboolean
+gst_amr_parse_stop (GstBaseParse * parse)
+{
+ GstAmrParse *amrparse;
+
+ amrparse = GST_AMR_PARSE (parse);
+ GST_DEBUG ("stop");
+ amrparse->need_header = TRUE;
+ amrparse->header = 0;
+ return TRUE;
+}
+
+static GstCaps *
+gst_amr_parse_sink_getcaps (GstBaseParse * parse, GstCaps * filter)
+{
+ GstCaps *peercaps, *templ;
+ GstCaps *res;
+
+
+ templ = gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD (parse));
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), filter);
+
+ if (peercaps) {
+ guint i, n;
+
+ /* Rename structure names */
+ peercaps = gst_caps_make_writable (peercaps);
+ n = gst_caps_get_size (peercaps);
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (peercaps, i);
+
+ if (gst_structure_has_name (s, "audio/AMR"))
+ gst_structure_set_name (s, "audio/x-amr-nb-sh");
+ else
+ gst_structure_set_name (s, "audio/x-amr-wb-sh");
+ }
+
+ res = gst_caps_intersect_full (peercaps, templ, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (peercaps);
+ res = gst_caps_make_writable (res);
+ /* Append the template caps because we still want to accept
+ * caps without any fields in the case upstream does not
+ * know anything.
+ */
+ gst_caps_append (res, templ);
+ } else {
+ res = templ;
+ }
+
+ if (filter) {
+ GstCaps *intersection;
+
+ intersection =
+ gst_caps_intersect_full (filter, res, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (res);
+ res = intersection;
+ }
+
+ return res;
+}
+
+static GstFlowReturn
+gst_amr_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
+{
+ GstAmrParse *amrparse = GST_AMR_PARSE (parse);
+
+ if (!amrparse->sent_codec_tag) {
+ GstTagList *taglist;
+ GstCaps *caps;
+
+ /* codec tag */
+ caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (parse));
+ if (G_UNLIKELY (caps == NULL)) {
+ if (GST_PAD_IS_FLUSHING (GST_BASE_PARSE_SRC_PAD (parse))) {
+ GST_INFO_OBJECT (parse, "Src pad is flushing");
+ return GST_FLOW_FLUSHING;
+ } else {
+ GST_INFO_OBJECT (parse, "Src pad is not negotiated!");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+ }
+
+ taglist = gst_tag_list_new_empty ();
+ gst_pb_utils_add_codec_description_to_tag_list (taglist,
+ GST_TAG_AUDIO_CODEC, caps);
+ gst_caps_unref (caps);
+
+ gst_base_parse_merge_tags (parse, taglist, GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (taglist);
+
+ /* also signals the end of first-frame processing */
+ amrparse->sent_codec_tag = TRUE;
+ }
+
+ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_CLIP;
+
+ return GST_FLOW_OK;
+}
diff --git a/gst/audioparsers/gstamrparse.h b/gst/audioparsers/gstamrparse.h
new file mode 100644
index 0000000000..bba29c0e89
--- /dev/null
+++ b/gst/audioparsers/gstamrparse.h
@@ -0,0 +1,78 @@
+/* GStreamer Adaptive Multi-Rate parser
+ * Copyright (C) 2004 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * Copyright (C) 2008 Nokia Corporation. All rights reserved.
+ *
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AMR_PARSE_H__
+#define __GST_AMR_PARSE_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbaseparse.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AMR_PARSE \
+ (gst_amr_parse_get_type())
+#define GST_AMR_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_AMR_PARSE, GstAmrParse))
+#define GST_AMR_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_AMR_PARSE, GstAmrParseClass))
+#define GST_IS_AMR_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_AMR_PARSE))
+#define GST_IS_AMR_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_AMR_PARSE))
+
+
+typedef struct _GstAmrParse GstAmrParse;
+typedef struct _GstAmrParseClass GstAmrParseClass;
+
+/**
+ * GstAmrParse:
+ * @element: the parent element.
+ * @block_size: Pointer to frame size lookup table.
+ * @need_header: Tells whether the MIME header should be read in the beginning.
+ * @wide: Wideband mode.
+ *
+ * The opaque GstAacParse data structure.
+ */
+struct _GstAmrParse {
+ GstBaseParse element;
+ const gint *block_size;
+ gboolean need_header;
+ gboolean sent_codec_tag;
+ gint header;
+ gboolean wide;
+};
+
+/**
+ * GstAmrParseClass:
+ * @parent_class: Element parent class.
+ *
+ * The opaque GstAmrParseClass data structure.
+ */
+struct _GstAmrParseClass {
+ GstBaseParseClass parent_class;
+};
+
+GType gst_amr_parse_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_AMR_PARSE_H__ */
diff --git a/gst/audioparsers/gstaudioparserselements.h b/gst/audioparsers/gstaudioparserselements.h
new file mode 100644
index 0000000000..2cf1369739
--- /dev/null
+++ b/gst/audioparsers/gstaudioparserselements.h
@@ -0,0 +1,39 @@
+/* GStreamer audio parsers
+ * Copyright (C) 2009 Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) 2020 Huawei Technologies Co., Ltd.
+ * @Author: Julian Bouzas <julian.bouzas@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AUDIOPARSERS_ELEMENTS_H__
+#define __GST_AUDIOPARSERS_ELEMENTS_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+GST_ELEMENT_REGISTER_DECLARE (aacparse);
+GST_ELEMENT_REGISTER_DECLARE (amrparse);
+GST_ELEMENT_REGISTER_DECLARE (ac3parse);
+GST_ELEMENT_REGISTER_DECLARE (dcaparse);
+GST_ELEMENT_REGISTER_DECLARE (flacparse);
+GST_ELEMENT_REGISTER_DECLARE (mpegaudioparse);
+GST_ELEMENT_REGISTER_DECLARE (sbcparse);
+GST_ELEMENT_REGISTER_DECLARE (wavpackparse);
+
+G_END_DECLS
+
+#endif /* __GST_AUDIOPARSERS_ELEMENTS_H__ */
diff --git a/gst/audioparsers/gstdcaparse.c b/gst/audioparsers/gstdcaparse.c
new file mode 100644
index 0000000000..e9c870250b
--- /dev/null
+++ b/gst/audioparsers/gstdcaparse.c
@@ -0,0 +1,622 @@
+/* GStreamer DCA parser
+ * Copyright (C) 2010 Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-dcaparse
+ * @title: dcaparse
+ * @short_description: DCA (DTS Coherent Acoustics) parser
+ * @see_also: #GstAmrParse, #GstAACParse, #GstAc3Parse
+ *
+ * This is a DCA (DTS Coherent Acoustics) parser.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=abc.dts ! dcaparse ! dtsdec ! audioresample ! audioconvert ! autoaudiosink
+ * ]|
+ *
+ */
+
+/* TODO:
+ * - should accept framed and unframed input (needs decodebin fixes first)
+ * - seeking in raw .dts files doesn't seem to work, but duration estimate ok
+ *
+ * - if frames have 'odd' durations, the frame durations (plus timestamps)
+ * aren't adjusted up occasionally to make up for rounding error gaps.
+ * (e.g. if 512 samples per frame @ 48kHz = 10.666666667 ms/frame)
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+
+#include "gstaudioparserselements.h"
+#include "gstdcaparse.h"
+#include <gst/base/base.h>
+#include <gst/pbutils/pbutils.h>
+
+GST_DEBUG_CATEGORY_STATIC (dca_parse_debug);
+#define GST_CAT_DEFAULT dca_parse_debug
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-dts,"
+ " framed = (boolean) true,"
+ " channels = (int) [ 1, 8 ],"
+ " rate = (int) [ 8000, 192000 ],"
+ " depth = (int) { 14, 16 },"
+ " endianness = (int) { LITTLE_ENDIAN, BIG_ENDIAN }, "
+ " block-size = (int) [ 1, MAX], " " frame-size = (int) [ 1, MAX]"));
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-dts; " "audio/x-private1-dts"));
+
+static void gst_dca_parse_finalize (GObject * object);
+
+static gboolean gst_dca_parse_start (GstBaseParse * parse);
+static gboolean gst_dca_parse_stop (GstBaseParse * parse);
+static GstFlowReturn gst_dca_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize);
+static GstFlowReturn gst_dca_parse_pre_push_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame);
+static GstCaps *gst_dca_parse_get_sink_caps (GstBaseParse * parse,
+ GstCaps * filter);
+static gboolean gst_dca_parse_set_sink_caps (GstBaseParse * parse,
+ GstCaps * caps);
+
+#define gst_dca_parse_parent_class parent_class
+G_DEFINE_TYPE (GstDcaParse, gst_dca_parse, GST_TYPE_BASE_PARSE);
+GST_ELEMENT_REGISTER_DEFINE (dcaparse, "dcaparse",
+ GST_RANK_PRIMARY + 1, GST_TYPE_DCA_PARSE);
+
+static void
+gst_dca_parse_class_init (GstDcaParseClass * klass)
+{
+ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (dca_parse_debug, "dcaparse", 0,
+ "DCA audio stream parser");
+
+ object_class->finalize = gst_dca_parse_finalize;
+
+ parse_class->start = GST_DEBUG_FUNCPTR (gst_dca_parse_start);
+ parse_class->stop = GST_DEBUG_FUNCPTR (gst_dca_parse_stop);
+ parse_class->handle_frame = GST_DEBUG_FUNCPTR (gst_dca_parse_handle_frame);
+ parse_class->pre_push_frame =
+ GST_DEBUG_FUNCPTR (gst_dca_parse_pre_push_frame);
+ parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_dca_parse_get_sink_caps);
+ parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_dca_parse_set_sink_caps);
+
+ gst_element_class_add_static_pad_template (element_class, &sink_template);
+ gst_element_class_add_static_pad_template (element_class, &src_template);
+
+ gst_element_class_set_static_metadata (element_class,
+ "DTS Coherent Acoustics audio stream parser", "Codec/Parser/Audio",
+ "DCA parser", "Tim-Philipp Müller <tim centricular net>");
+}
+
+static void
+gst_dca_parse_reset (GstDcaParse * dcaparse)
+{
+ dcaparse->channels = -1;
+ dcaparse->rate = -1;
+ dcaparse->depth = -1;
+ dcaparse->endianness = -1;
+ dcaparse->block_size = -1;
+ dcaparse->frame_size = -1;
+ dcaparse->last_sync = 0;
+ dcaparse->sent_codec_tag = FALSE;
+}
+
+static void
+gst_dca_parse_init (GstDcaParse * dcaparse)
+{
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (dcaparse),
+ DCA_MIN_FRAMESIZE);
+ gst_dca_parse_reset (dcaparse);
+ dcaparse->baseparse_chainfunc =
+ GST_BASE_PARSE_SINK_PAD (GST_BASE_PARSE (dcaparse))->chainfunc;
+
+ GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (dcaparse));
+ GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_PARSE_SINK_PAD (dcaparse));
+}
+
+static void
+gst_dca_parse_finalize (GObject * object)
+{
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_dca_parse_start (GstBaseParse * parse)
+{
+ GstDcaParse *dcaparse = GST_DCA_PARSE (parse);
+
+ GST_DEBUG_OBJECT (parse, "starting");
+
+ gst_dca_parse_reset (dcaparse);
+
+ return TRUE;
+}
+
+static gboolean
+gst_dca_parse_stop (GstBaseParse * parse)
+{
+ GST_DEBUG_OBJECT (parse, "stopping");
+
+ return TRUE;
+}
+
+static gboolean
+gst_dca_parse_parse_header (GstDcaParse * dcaparse,
+ const GstByteReader * reader, guint * frame_size,
+ guint * sample_rate, guint * channels, guint * depth,
+ gint * endianness, guint * num_blocks, guint * samples_per_block,
+ gboolean * terminator)
+{
+ static const int sample_rates[16] = { 0, 8000, 16000, 32000, 0, 0, 11025,
+ 22050, 44100, 0, 0, 12000, 24000, 48000, 96000, 192000
+ };
+ static const guint8 channels_table[16] = { 1, 2, 2, 2, 2, 3, 3, 4, 4, 5,
+ 6, 6, 6, 7, 8, 8
+ };
+ GstByteReader r = *reader;
+ guint16 hdr[8];
+ guint32 marker;
+ guint chans, lfe, i;
+
+ if (gst_byte_reader_get_remaining (&r) < (4 + sizeof (hdr)))
+ return FALSE;
+
+ marker = gst_byte_reader_peek_uint32_be_unchecked (&r);
+
+ /* raw big endian or 14-bit big endian */
+ if (marker == 0x7FFE8001 || marker == 0x1FFFE800) {
+ for (i = 0; i < G_N_ELEMENTS (hdr); ++i)
+ hdr[i] = gst_byte_reader_get_uint16_be_unchecked (&r);
+ } else
+ /* raw little endian or 14-bit little endian */
+ if (marker == 0xFE7F0180 || marker == 0xFF1F00E8) {
+ for (i = 0; i < G_N_ELEMENTS (hdr); ++i)
+ hdr[i] = gst_byte_reader_get_uint16_le_unchecked (&r);
+ } else {
+ return FALSE;
+ }
+
+ GST_LOG_OBJECT (dcaparse, "dts sync marker 0x%08x at offset %u", marker,
+ gst_byte_reader_get_pos (reader));
+
+ /* 14-bit mode */
+ if (marker == 0x1FFFE800 || marker == 0xFF1F00E8) {
+ if ((hdr[2] & 0xFFF0) != 0x07F0)
+ return FALSE;
+ /* discard top 2 bits (2 void), shift in 2 */
+ hdr[0] = (hdr[0] << 2) | ((hdr[1] >> 12) & 0x0003);
+ /* discard top 4 bits (2 void, 2 shifted into hdr[0]), shift in 4 etc. */
+ hdr[1] = (hdr[1] << 4) | ((hdr[2] >> 10) & 0x000F);
+ hdr[2] = (hdr[2] << 6) | ((hdr[3] >> 8) & 0x003F);
+ hdr[3] = (hdr[3] << 8) | ((hdr[4] >> 6) & 0x00FF);
+ hdr[4] = (hdr[4] << 10) | ((hdr[5] >> 4) & 0x03FF);
+ hdr[5] = (hdr[5] << 12) | ((hdr[6] >> 2) & 0x0FFF);
+ hdr[6] = (hdr[6] << 14) | ((hdr[7] >> 0) & 0x3FFF);
+ g_assert (hdr[0] == 0x7FFE && hdr[1] == 0x8001);
+ }
+
+ GST_LOG_OBJECT (dcaparse, "frame header: %04x%04x%04x%04x",
+ hdr[2], hdr[3], hdr[4], hdr[5]);
+
+ *terminator = (hdr[2] & 0x80) ? FALSE : TRUE;
+ *samples_per_block = ((hdr[2] >> 10) & 0x1f) + 1;
+ *num_blocks = ((hdr[2] >> 2) & 0x7F) + 1;
+ *frame_size = (((hdr[2] & 0x03) << 12) | (hdr[3] >> 4)) + 1;
+ chans = ((hdr[3] & 0x0F) << 2) | (hdr[4] >> 14);
+ *sample_rate = sample_rates[(hdr[4] >> 10) & 0x0F];
+ lfe = (hdr[5] >> 9) & 0x03;
+
+ GST_TRACE_OBJECT (dcaparse, "frame size %u, num_blocks %u, rate %u, "
+ "samples per block %u", *frame_size, *num_blocks, *sample_rate,
+ *samples_per_block);
+
+ if (*num_blocks < 6 || *frame_size < 96 || *sample_rate == 0)
+ return FALSE;
+
+ if (marker == 0x1FFFE800 || marker == 0xFF1F00E8)
+ *frame_size = (*frame_size * 16) / 14; /* FIXME: round up? */
+
+ if (chans < G_N_ELEMENTS (channels_table))
+ *channels = channels_table[chans] + ((lfe) ? 1 : 0);
+ else
+ return FALSE;
+
+ if (depth)
+ *depth = (marker == 0x1FFFE800 || marker == 0xFF1F00E8) ? 14 : 16;
+ if (endianness)
+ *endianness = (marker == 0xFE7F0180 || marker == 0xFF1F00E8) ?
+ G_LITTLE_ENDIAN : G_BIG_ENDIAN;
+
+ GST_TRACE_OBJECT (dcaparse, "frame size %u, channels %u, rate %u, "
+ "num_blocks %u, samples_per_block %u", *frame_size, *channels,
+ *sample_rate, *num_blocks, *samples_per_block);
+
+ return TRUE;
+}
+
+static gint
+gst_dca_parse_find_sync (GstDcaParse * dcaparse, GstByteReader * reader,
+ gsize bufsize, guint32 * sync)
+{
+ guint32 best_sync = 0;
+ guint best_offset = G_MAXUINT;
+ gint off;
+
+ /* FIXME: verify syncs via _parse_header() here already */
+
+ /* Raw little endian */
+ off = gst_byte_reader_masked_scan_uint32 (reader, 0xffffffff, 0xfe7f0180,
+ 0, bufsize);
+ if (off >= 0 && off < best_offset) {
+ best_offset = off;
+ best_sync = 0xfe7f0180;
+ }
+
+ /* Raw big endian */
+ off = gst_byte_reader_masked_scan_uint32 (reader, 0xffffffff, 0x7ffe8001,
+ 0, bufsize);
+ if (off >= 0 && off < best_offset) {
+ best_offset = off;
+ best_sync = 0x7ffe8001;
+ }
+
+ /* FIXME: check next 2 bytes as well for 14-bit formats (but then don't
+ * forget to adjust the *skipsize= in _check_valid_frame() */
+
+ /* 14-bit little endian */
+ off = gst_byte_reader_masked_scan_uint32 (reader, 0xffffffff, 0xff1f00e8,
+ 0, bufsize);
+ if (off >= 0 && off < best_offset) {
+ best_offset = off;
+ best_sync = 0xff1f00e8;
+ }
+
+ /* 14-bit big endian */
+ off = gst_byte_reader_masked_scan_uint32 (reader, 0xffffffff, 0x1fffe800,
+ 0, bufsize);
+ if (off >= 0 && off < best_offset) {
+ best_offset = off;
+ best_sync = 0x1fffe800;
+ }
+
+ if (best_offset == G_MAXUINT)
+ return -1;
+
+ *sync = best_sync;
+ return best_offset;
+}
+
+static GstFlowReturn
+gst_dca_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize)
+{
+ GstDcaParse *dcaparse = GST_DCA_PARSE (parse);
+ GstBuffer *buf = frame->buffer;
+ GstByteReader r;
+ gboolean parser_in_sync;
+ gboolean terminator;
+ guint32 sync = 0;
+ guint size = 0, rate, chans, num_blocks, samples_per_block, depth;
+ gint block_size;
+ gint endianness;
+ gint off = -1;
+ GstMapInfo map;
+ GstFlowReturn ret = GST_FLOW_EOS;
+ gsize extra_size = 0;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ if (G_UNLIKELY (map.size < 16)) {
+ *skipsize = 1;
+ goto cleanup;
+ }
+
+ parser_in_sync = !GST_BASE_PARSE_LOST_SYNC (parse);
+
+ gst_byte_reader_init (&r, map.data, map.size);
+
+ if (G_LIKELY (parser_in_sync && dcaparse->last_sync != 0)) {
+ off = gst_byte_reader_masked_scan_uint32 (&r, 0xffffffff,
+ dcaparse->last_sync, 0, map.size);
+ }
+
+ if (G_UNLIKELY (off < 0)) {
+ off = gst_dca_parse_find_sync (dcaparse, &r, map.size, &sync);
+ }
+
+ /* didn't find anything that looks like a sync word, skip */
+ if (off < 0) {
+ *skipsize = map.size - 3;
+ GST_DEBUG_OBJECT (dcaparse, "no sync, skipping %d bytes", *skipsize);
+ goto cleanup;
+ }
+
+ GST_LOG_OBJECT (parse, "possible sync %08x at buffer offset %d", sync, off);
+
+ /* possible frame header, but not at offset 0? skip bytes before sync */
+ if (off > 0) {
+ *skipsize = off;
+ goto cleanup;
+ }
+
+ /* make sure the values in the frame header look sane */
+ if (!gst_dca_parse_parse_header (dcaparse, &r, &size, &rate, &chans, &depth,
+ &endianness, &num_blocks, &samples_per_block, &terminator)) {
+ *skipsize = 4;
+ goto cleanup;
+ }
+
+ GST_LOG_OBJECT (parse, "got frame, sync %08x, size %u, rate %d, channels %d",
+ sync, size, rate, chans);
+
+ dcaparse->last_sync = sync;
+
+ /* FIXME: Don't look for a second syncword, there are streams out there
+ * that consistently contain garbage between every frame so we never ever
+ * find a second consecutive syncword.
+ * See https://bugzilla.gnome.org/show_bug.cgi?id=738237
+ */
+#if 0
+ parser_draining = GST_BASE_PARSE_DRAINING (parse);
+
+ if (!parser_in_sync && !parser_draining) {
+ /* check for second frame to be sure */
+ GST_DEBUG_OBJECT (dcaparse, "resyncing; checking next frame syncword");
+ if (map.size >= (size + 16)) {
+ guint s2, r2, c2, n2, s3;
+ gboolean t;
+
+ GST_MEMDUMP ("buf", map.data, size + 16);
+ gst_byte_reader_init (&r, map.data, map.size);
+ gst_byte_reader_skip_unchecked (&r, size);
+
+ if (!gst_dca_parse_parse_header (dcaparse, &r, &s2, &r2, &c2, NULL, NULL,
+ &n2, &s3, &t)) {
+ GST_DEBUG_OBJECT (dcaparse, "didn't find second syncword");
+ *skipsize = 4;
+ goto cleanup;
+ }
+
+ /* ok, got sync now, let's assume constant frame size */
+ gst_base_parse_set_min_frame_size (parse, size);
+ } else {
+ /* wait for some more data */
+ GST_LOG_OBJECT (dcaparse,
+ "next sync out of reach (%" G_GSIZE_FORMAT " < %u)", map.size,
+ size + 16);
+ goto cleanup;
+ }
+ }
+#endif
+
+ /* found frame */
+ ret = GST_FLOW_OK;
+
+ /* metadata handling */
+ block_size = num_blocks * samples_per_block;
+
+ if (G_UNLIKELY (dcaparse->rate != rate || dcaparse->channels != chans
+ || dcaparse->depth != depth || dcaparse->endianness != endianness
+ || (!terminator && dcaparse->block_size != block_size)
+ || (size != dcaparse->frame_size))) {
+ GstCaps *caps;
+
+ caps = gst_caps_new_simple ("audio/x-dts",
+ "framed", G_TYPE_BOOLEAN, TRUE,
+ "rate", G_TYPE_INT, rate, "channels", G_TYPE_INT, chans,
+ "endianness", G_TYPE_INT, endianness, "depth", G_TYPE_INT, depth,
+ "block-size", G_TYPE_INT, block_size, "frame-size", G_TYPE_INT, size,
+ NULL);
+ gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps);
+ gst_caps_unref (caps);
+
+ dcaparse->rate = rate;
+ dcaparse->channels = chans;
+ dcaparse->depth = depth;
+ dcaparse->endianness = endianness;
+ dcaparse->block_size = block_size;
+ dcaparse->frame_size = size;
+
+ gst_base_parse_set_frame_rate (parse, rate, block_size, 0, 0);
+ }
+
+cleanup:
+ /* it is possible that DTS HD substream after DTS core */
+ if (parse->flags & GST_BASE_PARSE_FLAG_DRAINING || map.size >= size + 9) {
+ extra_size = 0;
+ if (map.size >= size + 9) {
+ const guint8 *next = map.data + size;
+ /* Check for DTS_SYNCWORD_SUBSTREAM */
+ if (next[0] == 0x64 && next[1] == 0x58 && next[2] == 0x20
+ && next[3] == 0x25) {
+ /* 7.4.1 Extension Substream Header */
+ GstBitReader reader;
+ gst_bit_reader_init (&reader, next + 4, 5);
+ gst_bit_reader_skip (&reader, 8 + 2); /* skip UserDefinedBits and nExtSSIndex) */
+ if (gst_bit_reader_get_bits_uint8_unchecked (&reader, 1) == 0) {
+ gst_bit_reader_skip (&reader, 8);
+ extra_size =
+ gst_bit_reader_get_bits_uint32_unchecked (&reader, 16) + 1;
+ } else {
+ gst_bit_reader_skip (&reader, 12);
+ extra_size =
+ gst_bit_reader_get_bits_uint32_unchecked (&reader, 20) + 1;
+ }
+ }
+ }
+ gst_buffer_unmap (buf, &map);
+ if (ret == GST_FLOW_OK && size + extra_size <= map.size) {
+ ret = gst_base_parse_finish_frame (parse, frame, size + extra_size);
+ } else {
+ ret = GST_FLOW_OK;
+ }
+ } else {
+ gst_buffer_unmap (buf, &map);
+ }
+
+ return ret;
+}
+
+/*
+ * MPEG-PS private1 streams add a 2 bytes "Audio Substream Headers" for each
+ * buffer (not each frame) with the offset of the next frame's start.
+ * These 2 bytes can be dropped safely as they do not include any timing
+ * information, only the offset to the start of the next frame.
+ * See gstac3parse.c for a more detailed description.
+ * */
+
+static GstFlowReturn
+gst_dca_parse_chain_priv (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+{
+ GstDcaParse *dcaparse = GST_DCA_PARSE (parent);
+ GstFlowReturn ret;
+ GstBuffer *newbuf;
+ gsize size;
+
+ size = gst_buffer_get_size (buffer);
+ if (size >= 2) {
+ newbuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, 2, size - 2);
+ gst_buffer_unref (buffer);
+ ret = dcaparse->baseparse_chainfunc (pad, parent, newbuf);
+ } else {
+ gst_buffer_unref (buffer);
+ ret = GST_FLOW_OK;
+ }
+
+ return ret;
+}
+
+static void
+remove_fields (GstCaps * caps)
+{
+ guint i, n;
+
+ n = gst_caps_get_size (caps);
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (caps, i);
+
+ gst_structure_remove_field (s, "framed");
+ }
+}
+
+static GstCaps *
+gst_dca_parse_get_sink_caps (GstBaseParse * parse, GstCaps * filter)
+{
+ GstCaps *peercaps, *templ;
+ GstCaps *res;
+
+ templ = gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD (parse));
+ if (filter) {
+ GstCaps *fcopy = gst_caps_copy (filter);
+ /* Remove the fields we convert */
+ remove_fields (fcopy);
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), fcopy);
+ gst_caps_unref (fcopy);
+ } else
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), NULL);
+
+ if (peercaps) {
+ /* Remove the framed field */
+ peercaps = gst_caps_make_writable (peercaps);
+ remove_fields (peercaps);
+
+ res = gst_caps_intersect_full (peercaps, templ, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (peercaps);
+ gst_caps_unref (templ);
+ } else {
+ res = templ;
+ }
+
+ if (filter) {
+ GstCaps *intersection;
+
+ intersection =
+ gst_caps_intersect_full (filter, res, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (res);
+ res = intersection;
+ }
+
+ return res;
+}
+
+static gboolean
+gst_dca_parse_set_sink_caps (GstBaseParse * parse, GstCaps * caps)
+{
+ GstStructure *s;
+ GstDcaParse *dcaparse = GST_DCA_PARSE (parse);
+
+ s = gst_caps_get_structure (caps, 0);
+ if (gst_structure_has_name (s, "audio/x-private1-dts")) {
+ gst_pad_set_chain_function (parse->sinkpad, gst_dca_parse_chain_priv);
+ } else {
+ gst_pad_set_chain_function (parse->sinkpad, dcaparse->baseparse_chainfunc);
+ }
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_dca_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
+{
+ GstDcaParse *dcaparse = GST_DCA_PARSE (parse);
+
+ if (!dcaparse->sent_codec_tag) {
+ GstTagList *taglist;
+ GstCaps *caps;
+
+ /* codec tag */
+ caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (parse));
+ if (G_UNLIKELY (caps == NULL)) {
+ if (GST_PAD_IS_FLUSHING (GST_BASE_PARSE_SRC_PAD (parse))) {
+ GST_INFO_OBJECT (parse, "Src pad is flushing");
+ return GST_FLOW_FLUSHING;
+ } else {
+ GST_INFO_OBJECT (parse, "Src pad is not negotiated!");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+ }
+
+ taglist = gst_tag_list_new_empty ();
+ gst_pb_utils_add_codec_description_to_tag_list (taglist,
+ GST_TAG_AUDIO_CODEC, caps);
+ gst_caps_unref (caps);
+
+ gst_base_parse_merge_tags (parse, taglist, GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (taglist);
+
+ /* also signals the end of first-frame processing */
+ dcaparse->sent_codec_tag = TRUE;
+ }
+
+ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_CLIP;
+
+ return GST_FLOW_OK;
+}
diff --git a/gst/audioparsers/gstdcaparse.h b/gst/audioparsers/gstdcaparse.h
new file mode 100644
index 0000000000..9198a06b8e
--- /dev/null
+++ b/gst/audioparsers/gstdcaparse.h
@@ -0,0 +1,82 @@
+/* GStreamer DCA parser
+ * Copyright (C) 2010 Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_DCA_PARSE_H__
+#define __GST_DCA_PARSE_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbaseparse.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_DCA_PARSE \
+ (gst_dca_parse_get_type())
+#define GST_DCA_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_DCA_PARSE, GstDcaParse))
+#define GST_DCA_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_DCA_PARSE, GstDcaParseClass))
+#define GST_IS_DCA_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_DCA_PARSE))
+#define GST_IS_DCA_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_DCA_PARSE))
+
+#define DCA_MIN_FRAMESIZE 96
+#define DCA_MAX_FRAMESIZE 18725 /* 16384*16/14 */
+
+typedef struct _GstDcaParse GstDcaParse;
+typedef struct _GstDcaParseClass GstDcaParseClass;
+
+/**
+ * GstDcaParse:
+ *
+ * The opaque GstDcaParse object
+ */
+struct _GstDcaParse {
+ GstBaseParse baseparse;
+
+ /*< private >*/
+ gint rate;
+ gint channels;
+ gint depth;
+ gint endianness;
+ gint block_size;
+ gint frame_size;
+
+ gboolean sent_codec_tag;
+
+ guint32 last_sync;
+
+ GstPadChainFunction baseparse_chainfunc;
+};
+
+/**
+ * GstDcaParseClass:
+ * @parent_class: Element parent class.
+ *
+ * The opaque GstDcaParseClass data structure.
+ */
+struct _GstDcaParseClass {
+ GstBaseParseClass baseparse_class;
+};
+
+GType gst_dca_parse_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_DCA_PARSE_H__ */
diff --git a/gst/audioparsers/gstflacparse.c b/gst/audioparsers/gstflacparse.c
new file mode 100644
index 0000000000..8fca410899
--- /dev/null
+++ b/gst/audioparsers/gstflacparse.c
@@ -0,0 +1,1914 @@
+/* GStreamer
+ *
+ * Copyright (C) 2008 Sebastian Dröge <sebastian.droege@collabora.co.uk>.
+ * Copyright (C) 2009 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>
+ * Copyright (C) 2009 Nokia Corporation. All rights reserved.
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-flacparse
+ * @title: flacparse
+ * @see_also: flacdec, oggdemux, vorbisparse
+ *
+ * The flacparse element will parse the header packets of the FLAC
+ * stream and put them as the streamheader in the caps. This is used in the
+ * multifdsink case where you want to stream live FLAC streams to multiple
+ * clients, each client has to receive the streamheaders first before they can
+ * consume the FLAC packets.
+ *
+ * This element also makes sure that the buffers that it pushes out are properly
+ * timestamped and that their offset and offset_end are set. The buffers that
+ * flacparse outputs have all of the metadata that oggmux expects to receive,
+ * which allows you to (for example) remux an ogg/flac or convert a native FLAC
+ * format file to an ogg bitstream.
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 -v filesrc location=sine.flac ! flacparse ! identity \
+ * ! oggmux ! filesink location=sine-remuxed.ogg
+ * ]| This pipeline converts a native FLAC format file to an ogg bitstream.
+ * It also illustrates that the streamheader is set in the caps, and that each
+ * buffer has the timestamp, duration, offset, and offset_end set.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstaudioparserselements.h"
+#include "gstflacparse.h"
+
+#include <string.h>
+#include <gst/tag/tag.h>
+#include <gst/audio/audio.h>
+#include <gst/base/base.h>
+#include <gst/pbutils/pbutils.h>
+
+GST_DEBUG_CATEGORY_STATIC (flacparse_debug);
+#define GST_CAT_DEFAULT flacparse_debug
+
+/* CRC-8, poly = x^8 + x^2 + x^1 + x^0, init = 0 */
+static const guint8 crc8_table[256] = {
+ 0x00, 0x07, 0x0E, 0x09, 0x1C, 0x1B, 0x12, 0x15,
+ 0x38, 0x3F, 0x36, 0x31, 0x24, 0x23, 0x2A, 0x2D,
+ 0x70, 0x77, 0x7E, 0x79, 0x6C, 0x6B, 0x62, 0x65,
+ 0x48, 0x4F, 0x46, 0x41, 0x54, 0x53, 0x5A, 0x5D,
+ 0xE0, 0xE7, 0xEE, 0xE9, 0xFC, 0xFB, 0xF2, 0xF5,
+ 0xD8, 0xDF, 0xD6, 0xD1, 0xC4, 0xC3, 0xCA, 0xCD,
+ 0x90, 0x97, 0x9E, 0x99, 0x8C, 0x8B, 0x82, 0x85,
+ 0xA8, 0xAF, 0xA6, 0xA1, 0xB4, 0xB3, 0xBA, 0xBD,
+ 0xC7, 0xC0, 0xC9, 0xCE, 0xDB, 0xDC, 0xD5, 0xD2,
+ 0xFF, 0xF8, 0xF1, 0xF6, 0xE3, 0xE4, 0xED, 0xEA,
+ 0xB7, 0xB0, 0xB9, 0xBE, 0xAB, 0xAC, 0xA5, 0xA2,
+ 0x8F, 0x88, 0x81, 0x86, 0x93, 0x94, 0x9D, 0x9A,
+ 0x27, 0x20, 0x29, 0x2E, 0x3B, 0x3C, 0x35, 0x32,
+ 0x1F, 0x18, 0x11, 0x16, 0x03, 0x04, 0x0D, 0x0A,
+ 0x57, 0x50, 0x59, 0x5E, 0x4B, 0x4C, 0x45, 0x42,
+ 0x6F, 0x68, 0x61, 0x66, 0x73, 0x74, 0x7D, 0x7A,
+ 0x89, 0x8E, 0x87, 0x80, 0x95, 0x92, 0x9B, 0x9C,
+ 0xB1, 0xB6, 0xBF, 0xB8, 0xAD, 0xAA, 0xA3, 0xA4,
+ 0xF9, 0xFE, 0xF7, 0xF0, 0xE5, 0xE2, 0xEB, 0xEC,
+ 0xC1, 0xC6, 0xCF, 0xC8, 0xDD, 0xDA, 0xD3, 0xD4,
+ 0x69, 0x6E, 0x67, 0x60, 0x75, 0x72, 0x7B, 0x7C,
+ 0x51, 0x56, 0x5F, 0x58, 0x4D, 0x4A, 0x43, 0x44,
+ 0x19, 0x1E, 0x17, 0x10, 0x05, 0x02, 0x0B, 0x0C,
+ 0x21, 0x26, 0x2F, 0x28, 0x3D, 0x3A, 0x33, 0x34,
+ 0x4E, 0x49, 0x40, 0x47, 0x52, 0x55, 0x5C, 0x5B,
+ 0x76, 0x71, 0x78, 0x7F, 0x6A, 0x6D, 0x64, 0x63,
+ 0x3E, 0x39, 0x30, 0x37, 0x22, 0x25, 0x2C, 0x2B,
+ 0x06, 0x01, 0x08, 0x0F, 0x1A, 0x1D, 0x14, 0x13,
+ 0xAE, 0xA9, 0xA0, 0xA7, 0xB2, 0xB5, 0xBC, 0xBB,
+ 0x96, 0x91, 0x98, 0x9F, 0x8A, 0x8D, 0x84, 0x83,
+ 0xDE, 0xD9, 0xD0, 0xD7, 0xC2, 0xC5, 0xCC, 0xCB,
+ 0xE6, 0xE1, 0xE8, 0xEF, 0xFA, 0xFD, 0xF4, 0xF3
+};
+
+static guint8
+gst_flac_calculate_crc8 (const guint8 * data, guint length)
+{
+ guint8 crc = 0;
+
+ while (length--) {
+ crc = crc8_table[crc ^ *data];
+ ++data;
+ }
+
+ return crc;
+}
+
+/* CRC-16, poly = x^16 + x^15 + x^2 + x^0, init = 0 */
+static const guint16 crc16_table[256] = {
+ 0x0000, 0x8005, 0x800f, 0x000a, 0x801b, 0x001e, 0x0014, 0x8011,
+ 0x8033, 0x0036, 0x003c, 0x8039, 0x0028, 0x802d, 0x8027, 0x0022,
+ 0x8063, 0x0066, 0x006c, 0x8069, 0x0078, 0x807d, 0x8077, 0x0072,
+ 0x0050, 0x8055, 0x805f, 0x005a, 0x804b, 0x004e, 0x0044, 0x8041,
+ 0x80c3, 0x00c6, 0x00cc, 0x80c9, 0x00d8, 0x80dd, 0x80d7, 0x00d2,
+ 0x00f0, 0x80f5, 0x80ff, 0x00fa, 0x80eb, 0x00ee, 0x00e4, 0x80e1,
+ 0x00a0, 0x80a5, 0x80af, 0x00aa, 0x80bb, 0x00be, 0x00b4, 0x80b1,
+ 0x8093, 0x0096, 0x009c, 0x8099, 0x0088, 0x808d, 0x8087, 0x0082,
+ 0x8183, 0x0186, 0x018c, 0x8189, 0x0198, 0x819d, 0x8197, 0x0192,
+ 0x01b0, 0x81b5, 0x81bf, 0x01ba, 0x81ab, 0x01ae, 0x01a4, 0x81a1,
+ 0x01e0, 0x81e5, 0x81ef, 0x01ea, 0x81fb, 0x01fe, 0x01f4, 0x81f1,
+ 0x81d3, 0x01d6, 0x01dc, 0x81d9, 0x01c8, 0x81cd, 0x81c7, 0x01c2,
+ 0x0140, 0x8145, 0x814f, 0x014a, 0x815b, 0x015e, 0x0154, 0x8151,
+ 0x8173, 0x0176, 0x017c, 0x8179, 0x0168, 0x816d, 0x8167, 0x0162,
+ 0x8123, 0x0126, 0x012c, 0x8129, 0x0138, 0x813d, 0x8137, 0x0132,
+ 0x0110, 0x8115, 0x811f, 0x011a, 0x810b, 0x010e, 0x0104, 0x8101,
+ 0x8303, 0x0306, 0x030c, 0x8309, 0x0318, 0x831d, 0x8317, 0x0312,
+ 0x0330, 0x8335, 0x833f, 0x033a, 0x832b, 0x032e, 0x0324, 0x8321,
+ 0x0360, 0x8365, 0x836f, 0x036a, 0x837b, 0x037e, 0x0374, 0x8371,
+ 0x8353, 0x0356, 0x035c, 0x8359, 0x0348, 0x834d, 0x8347, 0x0342,
+ 0x03c0, 0x83c5, 0x83cf, 0x03ca, 0x83db, 0x03de, 0x03d4, 0x83d1,
+ 0x83f3, 0x03f6, 0x03fc, 0x83f9, 0x03e8, 0x83ed, 0x83e7, 0x03e2,
+ 0x83a3, 0x03a6, 0x03ac, 0x83a9, 0x03b8, 0x83bd, 0x83b7, 0x03b2,
+ 0x0390, 0x8395, 0x839f, 0x039a, 0x838b, 0x038e, 0x0384, 0x8381,
+ 0x0280, 0x8285, 0x828f, 0x028a, 0x829b, 0x029e, 0x0294, 0x8291,
+ 0x82b3, 0x02b6, 0x02bc, 0x82b9, 0x02a8, 0x82ad, 0x82a7, 0x02a2,
+ 0x82e3, 0x02e6, 0x02ec, 0x82e9, 0x02f8, 0x82fd, 0x82f7, 0x02f2,
+ 0x02d0, 0x82d5, 0x82df, 0x02da, 0x82cb, 0x02ce, 0x02c4, 0x82c1,
+ 0x8243, 0x0246, 0x024c, 0x8249, 0x0258, 0x825d, 0x8257, 0x0252,
+ 0x0270, 0x8275, 0x827f, 0x027a, 0x826b, 0x026e, 0x0264, 0x8261,
+ 0x0220, 0x8225, 0x822f, 0x022a, 0x823b, 0x023e, 0x0234, 0x8231,
+ 0x8213, 0x0216, 0x021c, 0x8219, 0x0208, 0x820d, 0x8207, 0x0202
+};
+
+static guint16
+gst_flac_calculate_crc16 (const guint8 * data, guint length)
+{
+ guint16 crc = 0;
+
+ while (length--) {
+ crc = ((crc << 8) ^ crc16_table[(crc >> 8) ^ *data]) & 0xffff;
+ data++;
+ }
+
+ return crc;
+}
+
+enum
+{
+ PROP_0,
+ PROP_CHECK_FRAME_CHECKSUMS
+};
+
+#define DEFAULT_CHECK_FRAME_CHECKSUMS FALSE
+
+static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-flac, framed = (boolean) true, "
+ "channels = (int) [ 1, 8 ], " "rate = (int) [ 1, 655350 ]")
+ );
+
+static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-flac")
+ );
+
+static GstBuffer *gst_flac_parse_generate_vorbiscomment (GstFlacParse *
+ flacparse, gboolean is_last);
+
+static inline void gst_flac_parse_reset_buffer_time_and_offset (GstBuffer *
+ buffer);
+static void gst_flac_parse_reset (GstFlacParse * parser);
+static gboolean gst_flac_parse_handle_block_type (GstFlacParse * flacparse,
+ guint type, GstBuffer * sbuffer);
+static void gst_flac_parse_finalize (GObject * object);
+static void gst_flac_parse_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_flac_parse_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_flac_parse_start (GstBaseParse * parse);
+static gboolean gst_flac_parse_stop (GstBaseParse * parse);
+static GstFlowReturn gst_flac_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize);
+static GstFlowReturn gst_flac_parse_parse_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint size);
+static GstFlowReturn gst_flac_parse_pre_push_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame);
+static gboolean gst_flac_parse_convert (GstBaseParse * parse,
+ GstFormat src_format, gint64 src_value, GstFormat dest_format,
+ gint64 * dest_value);
+static gboolean gst_flac_parse_src_event (GstBaseParse * parse,
+ GstEvent * event);
+static GstCaps *gst_flac_parse_get_sink_caps (GstBaseParse * parse,
+ GstCaps * filter);
+static gboolean gst_flac_parse_set_sink_caps (GstBaseParse * parse,
+ GstCaps * caps);
+
+#define gst_flac_parse_parent_class parent_class
+G_DEFINE_TYPE (GstFlacParse, gst_flac_parse, GST_TYPE_BASE_PARSE);
+GST_ELEMENT_REGISTER_DEFINE (flacparse, "flacparse",
+ GST_RANK_PRIMARY + 1, GST_TYPE_FLAC_PARSE);
+
+static void
+gst_flac_parse_class_init (GstFlacParseClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseParseClass *baseparse_class = GST_BASE_PARSE_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (flacparse_debug, "flacparse", 0,
+ "Flac parser element");
+
+ gobject_class->finalize = gst_flac_parse_finalize;
+ gobject_class->set_property = gst_flac_parse_set_property;
+ gobject_class->get_property = gst_flac_parse_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_CHECK_FRAME_CHECKSUMS,
+ g_param_spec_boolean ("check-frame-checksums", "Check Frame Checksums",
+ "Check the overall checksums of every frame",
+ DEFAULT_CHECK_FRAME_CHECKSUMS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ baseparse_class->start = GST_DEBUG_FUNCPTR (gst_flac_parse_start);
+ baseparse_class->stop = GST_DEBUG_FUNCPTR (gst_flac_parse_stop);
+ baseparse_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_flac_parse_handle_frame);
+ baseparse_class->pre_push_frame =
+ GST_DEBUG_FUNCPTR (gst_flac_parse_pre_push_frame);
+ baseparse_class->convert = GST_DEBUG_FUNCPTR (gst_flac_parse_convert);
+ baseparse_class->src_event = GST_DEBUG_FUNCPTR (gst_flac_parse_src_event);
+ baseparse_class->get_sink_caps =
+ GST_DEBUG_FUNCPTR (gst_flac_parse_get_sink_caps);
+ baseparse_class->set_sink_caps =
+ GST_DEBUG_FUNCPTR (gst_flac_parse_set_sink_caps);
+
+ gst_element_class_add_static_pad_template (element_class, &src_factory);
+ gst_element_class_add_static_pad_template (element_class, &sink_factory);
+
+ gst_element_class_set_static_metadata (element_class, "FLAC audio parser",
+ "Codec/Parser/Audio",
+ "Parses audio with the FLAC lossless audio codec",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+}
+
+static void
+gst_flac_parse_init (GstFlacParse * flacparse)
+{
+ flacparse->check_frame_checksums = DEFAULT_CHECK_FRAME_CHECKSUMS;
+ GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (flacparse));
+ GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_PARSE_SINK_PAD (flacparse));
+}
+
+static void
+gst_flac_parse_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstFlacParse *flacparse = GST_FLAC_PARSE (object);
+
+ switch (prop_id) {
+ case PROP_CHECK_FRAME_CHECKSUMS:
+ flacparse->check_frame_checksums = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_flac_parse_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstFlacParse *flacparse = GST_FLAC_PARSE (object);
+
+ switch (prop_id) {
+ case PROP_CHECK_FRAME_CHECKSUMS:
+ g_value_set_boolean (value, flacparse->check_frame_checksums);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_flac_parse_reset (GstFlacParse * parser)
+{
+ if (parser->tags) {
+ gst_tag_list_unref (parser->tags);
+ parser->tags = NULL;
+ }
+ if (parser->toc) {
+ gst_toc_unref (parser->toc);
+ parser->toc = NULL;
+ }
+ if (parser->seektable) {
+ gst_buffer_unref (parser->seektable);
+ parser->seektable = NULL;
+ }
+
+ g_list_foreach (parser->headers, (GFunc) gst_mini_object_unref, NULL);
+ g_list_free (parser->headers);
+ parser->headers = NULL;
+}
+
+static void
+gst_flac_parse_finalize (GObject * object)
+{
+ GstFlacParse *flacparse = GST_FLAC_PARSE (object);
+
+ gst_flac_parse_reset (flacparse);
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_flac_parse_start (GstBaseParse * parse)
+{
+ GstFlacParse *flacparse = GST_FLAC_PARSE (parse);
+
+ flacparse->state = GST_FLAC_PARSE_STATE_INIT;
+ flacparse->min_blocksize = 0;
+ flacparse->max_blocksize = 0;
+ flacparse->min_framesize = 0;
+ flacparse->max_framesize = 0;
+
+ flacparse->upstream_length = -1;
+
+ flacparse->samplerate = 0;
+ flacparse->channels = 0;
+ flacparse->bps = 0;
+ flacparse->total_samples = 0;
+
+ flacparse->offset = GST_CLOCK_TIME_NONE;
+ flacparse->blocking_strategy = 0;
+ flacparse->block_size = 0;
+ flacparse->sample_number = 0;
+ flacparse->strategy_checked = FALSE;
+
+ flacparse->sent_codec_tag = FALSE;
+
+ /* "fLaC" marker */
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (flacparse), 4);
+
+ /* inform baseclass we can come up with ts, based on counters in packets */
+ gst_base_parse_set_has_timing_info (GST_BASE_PARSE_CAST (flacparse), TRUE);
+ gst_base_parse_set_syncable (GST_BASE_PARSE_CAST (flacparse), TRUE);
+
+ return TRUE;
+}
+
+static gboolean
+gst_flac_parse_stop (GstBaseParse * parse)
+{
+ GstFlacParse *flacparse = GST_FLAC_PARSE (parse);
+
+ gst_flac_parse_reset (flacparse);
+ return TRUE;
+}
+
+static const guint8 sample_size_table[] = { 0, 8, 12, 0, 16, 20, 24, 0 };
+
+static const guint16 blocksize_table[16] = {
+ 0, 192, 576 << 0, 576 << 1, 576 << 2, 576 << 3, 0, 0,
+ 256 << 0, 256 << 1, 256 << 2, 256 << 3, 256 << 4, 256 << 5, 256 << 6,
+ 256 << 7,
+};
+
+static const guint32 sample_rate_table[16] = {
+ 0,
+ 88200, 176400, 192000,
+ 8000, 16000, 22050, 24000, 32000, 44100, 48000, 96000,
+ 0, 0, 0, 0,
+};
+
+typedef enum
+{
+ FRAME_HEADER_VALID,
+ FRAME_HEADER_INVALID,
+ FRAME_HEADER_MORE_DATA
+} FrameHeaderCheckReturn;
+
+static FrameHeaderCheckReturn
+gst_flac_parse_frame_header_is_valid (GstFlacParse * flacparse,
+ const guint8 * data, guint size, gboolean set, guint16 * block_size_ret,
+ gboolean * suspect)
+{
+ GstBitReader reader = GST_BIT_READER_INIT (data, size);
+ guint8 blocking_strategy;
+ guint16 block_size;
+ guint32 samplerate = 0;
+ guint64 sample_number;
+ guint8 channels, bps;
+ guint8 tmp = 0;
+ guint8 actual_crc, expected_crc = 0;
+
+ /* Skip 14 bit sync code */
+ gst_bit_reader_skip_unchecked (&reader, 14);
+
+ /* Must be 0 */
+ if (gst_bit_reader_get_bits_uint8_unchecked (&reader, 1) != 0)
+ goto error;
+
+ /* 0 == fixed block size, 1 == variable block size */
+ blocking_strategy = gst_bit_reader_get_bits_uint8_unchecked (&reader, 1);
+ if (flacparse->force_variable_block_size)
+ blocking_strategy = 1;
+
+ /* block size index, calculation of the real blocksize below */
+ block_size = gst_bit_reader_get_bits_uint16_unchecked (&reader, 4);
+ if (block_size == 0)
+ goto error;
+
+ /* sample rate index, calculation of the real samplerate below */
+ samplerate = gst_bit_reader_get_bits_uint16_unchecked (&reader, 4);
+ if (samplerate == 0x0f)
+ goto error;
+
+ /* channel assignment */
+ channels = gst_bit_reader_get_bits_uint8_unchecked (&reader, 4);
+ if (channels < 8) {
+ channels++;
+ } else if (channels <= 10) {
+ channels = 2;
+ } else if (channels > 10) {
+ goto error;
+ }
+ if (flacparse->channels && flacparse->channels != channels)
+ goto error;
+
+ /* bits per sample */
+ bps = gst_bit_reader_get_bits_uint8_unchecked (&reader, 3);
+ if (bps == 0x03 || bps == 0x07) {
+ goto error;
+ } else if (bps == 0 && flacparse->bps == 0) {
+ goto need_streaminfo;
+ }
+ bps = sample_size_table[bps];
+ if (flacparse->bps && bps != flacparse->bps)
+ goto error;
+
+ /* reserved, must be 0 */
+ if (gst_bit_reader_get_bits_uint8_unchecked (&reader, 1) != 0)
+ goto error;
+
+ /* read "utf8" encoded sample/frame number */
+ {
+ gint len = 0;
+
+ len = gst_bit_reader_get_bits_uint8_unchecked (&reader, 8);
+
+ /* This is slightly faster than a loop */
+ if (!(len & 0x80)) {
+ sample_number = len;
+ len = 0;
+ } else if ((len & 0xc0) && !(len & 0x20)) {
+ sample_number = len & 0x1f;
+ len = 1;
+ } else if ((len & 0xe0) && !(len & 0x10)) {
+ sample_number = len & 0x0f;
+ len = 2;
+ } else if ((len & 0xf0) && !(len & 0x08)) {
+ sample_number = len & 0x07;
+ len = 3;
+ } else if ((len & 0xf8) && !(len & 0x04)) {
+ sample_number = len & 0x03;
+ len = 4;
+ } else if ((len & 0xfc) && !(len & 0x02)) {
+ sample_number = len & 0x01;
+ len = 5;
+ } else if ((len & 0xfe) && !(len & 0x01)) {
+ sample_number = len & 0x0;
+ len = 6;
+ } else {
+ goto error;
+ }
+
+ if ((blocking_strategy == 0 && len > 5) ||
+ (blocking_strategy == 1 && len > 6))
+ goto error;
+
+ while (len > 0) {
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &tmp, 8))
+ goto need_more_data;
+
+ if ((tmp & 0xc0) != 0x80)
+ goto error;
+
+ sample_number <<= 6;
+ sample_number |= (tmp & 0x3f);
+ len--;
+ }
+ }
+
+ /* calculate real blocksize from the blocksize index */
+ if (block_size == 6) {
+ if (!gst_bit_reader_get_bits_uint16 (&reader, &block_size, 8))
+ goto need_more_data;
+ block_size++;
+ } else if (block_size == 7) {
+ if (!gst_bit_reader_get_bits_uint16 (&reader, &block_size, 16))
+ goto need_more_data;
+ block_size++;
+ } else {
+ block_size = blocksize_table[block_size];
+ }
+
+ /* calculate the real samplerate from the samplerate index */
+ if (samplerate == 0 && flacparse->samplerate == 0) {
+ goto need_streaminfo;
+ } else if (samplerate < 12) {
+ samplerate = sample_rate_table[samplerate];
+ } else if (samplerate == 12) {
+ if (!gst_bit_reader_get_bits_uint32 (&reader, &samplerate, 8))
+ goto need_more_data;
+ samplerate *= 1000;
+ } else if (samplerate == 13) {
+ if (!gst_bit_reader_get_bits_uint32 (&reader, &samplerate, 16))
+ goto need_more_data;
+ } else if (samplerate == 14) {
+ if (!gst_bit_reader_get_bits_uint32 (&reader, &samplerate, 16))
+ goto need_more_data;
+ samplerate *= 10;
+ }
+
+ if (flacparse->samplerate && flacparse->samplerate != samplerate)
+ goto error;
+
+ /* check crc-8 for the header */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &expected_crc, 8))
+ goto need_more_data;
+
+ actual_crc =
+ gst_flac_calculate_crc8 (data,
+ (gst_bit_reader_get_pos (&reader) / 8) - 1);
+ if (actual_crc != expected_crc) {
+ GST_DEBUG_OBJECT (flacparse,
+ "Checksum mismatch. Header CRC was '%d' but frame has '%d'",
+ expected_crc, actual_crc);
+ goto error;
+ }
+
+ /* Sanity check sample number against blocking strategy, as it seems
+ some files claim fixed block size but supply sample numbers,
+ rather than block numbers. */
+ if (blocking_strategy == 0 && flacparse->block_size != 0) {
+ if (!flacparse->strategy_checked) {
+ if (block_size == sample_number) {
+ GST_WARNING_OBJECT (flacparse, "This file claims fixed block size, "
+ "but seems to be lying: assuming variable block size");
+ flacparse->force_variable_block_size = TRUE;
+ blocking_strategy = 1;
+ }
+ flacparse->strategy_checked = TRUE;
+ }
+ }
+
+ /* documentation says:
+ * The "blocking strategy" bit must be the same throughout the entire stream. */
+ if (flacparse->blocking_strategy != blocking_strategy) {
+ if (flacparse->block_size != 0) {
+ GST_WARNING_OBJECT (flacparse, "blocking strategy is not constant");
+ if (suspect)
+ *suspect = TRUE;
+ }
+ }
+
+ /*
+ The FLAC format documentation says:
+ The "blocking strategy" bit determines how to calculate the sample number
+ of the first sample in the frame. If the bit is 0 (fixed-blocksize), the
+ frame header encodes the frame number as above, and the frame's starting
+ sample number will be the frame number times the blocksize. If it is 1
+ (variable-blocksize), the frame header encodes the frame's starting
+ sample number itself. (In the case of a fixed-blocksize stream, only the
+ last block may be shorter than the stream blocksize; its starting sample
+ number will be calculated as the frame number times the previous frame's
+ blocksize, or zero if it is the first frame).
+
+ Therefore, when in fixed block size mode, we only update the block size
+ the first time, then reuse that block size for subsequent calls.
+ This will also fix a timestamp problem with the last block's timestamp
+ being miscalculated by scaling the block number by a "wrong" block size.
+ */
+ if (blocking_strategy == 0) {
+ if (flacparse->block_size != 0) {
+ /* after first block */
+ if (flacparse->block_size != block_size) {
+ /* TODO: can we know we're on the last frame, to avoid warning ? */
+ GST_WARNING_OBJECT (flacparse, "Block size is not constant");
+ block_size = flacparse->block_size;
+ if (suspect)
+ *suspect = TRUE;
+ }
+ }
+ }
+
+ if (set) {
+ flacparse->block_size = block_size;
+ if (!flacparse->samplerate)
+ flacparse->samplerate = samplerate;
+ if (!flacparse->bps)
+ flacparse->bps = bps;
+ if (!flacparse->blocking_strategy)
+ flacparse->blocking_strategy = blocking_strategy;
+ if (!flacparse->channels)
+ flacparse->channels = channels;
+ if (!flacparse->sample_number)
+ flacparse->sample_number = sample_number;
+
+ GST_DEBUG_OBJECT (flacparse,
+ "Parsed frame at offset %" G_GUINT64_FORMAT ":\n" "Block size: %u\n"
+ "Sample/Frame number: %" G_GUINT64_FORMAT, flacparse->offset,
+ flacparse->block_size, flacparse->sample_number);
+ }
+
+ if (block_size_ret)
+ *block_size_ret = block_size;
+
+ return FRAME_HEADER_VALID;
+
+need_streaminfo:
+ GST_ERROR_OBJECT (flacparse, "Need STREAMINFO metadata. Bits per sample "
+ "or sample rate not in frame header");
+error:
+ return FRAME_HEADER_INVALID;
+
+need_more_data:
+ return FRAME_HEADER_MORE_DATA;
+}
+
+static gboolean
+gst_flac_parse_frame_is_valid (GstFlacParse * flacparse,
+ const guint8 * data, gsize size, guint * ret)
+{
+ guint max, remaining;
+ guint i, search_start, search_end;
+ FrameHeaderCheckReturn header_ret;
+ guint16 block_size;
+ gboolean suspect_start = FALSE, suspect_end = FALSE;
+
+ if (size < flacparse->min_framesize)
+ goto need_more;
+
+ header_ret =
+ gst_flac_parse_frame_header_is_valid (flacparse, data, size, TRUE,
+ &block_size, &suspect_start);
+ if (header_ret == FRAME_HEADER_INVALID) {
+ *ret = 0;
+ return FALSE;
+ }
+ if (header_ret == FRAME_HEADER_MORE_DATA)
+ goto need_more;
+
+ /* mind unknown framesize */
+ search_start = MAX (2, flacparse->min_framesize);
+ if (flacparse->max_framesize)
+ search_end = MIN (size, flacparse->max_framesize + 9 + 2);
+ else
+ search_end = size;
+ search_end -= 2;
+
+ remaining = size;
+
+ for (i = search_start; i < search_end; i++, remaining--) {
+
+ if ((GST_READ_UINT16_BE (data + i) & 0xfffe) != 0xfff8)
+ continue;
+
+ GST_LOG_OBJECT (flacparse, "possible frame end at offset %d", i);
+ suspect_end = FALSE;
+ header_ret =
+ gst_flac_parse_frame_header_is_valid (flacparse, data + i,
+ remaining, FALSE, NULL, &suspect_end);
+ if (header_ret == FRAME_HEADER_VALID) {
+ if (flacparse->check_frame_checksums || suspect_start || suspect_end) {
+ guint16 actual_crc = gst_flac_calculate_crc16 (data, i - 2);
+ guint16 expected_crc = GST_READ_UINT16_BE (data + i - 2);
+
+ GST_LOG_OBJECT (flacparse,
+ "Found possible frame (%d, %d). Checking for CRC match",
+ suspect_start, suspect_end);
+ if (actual_crc != expected_crc) {
+ GST_DEBUG_OBJECT (flacparse,
+ "Checksum mismatch. Header CRC was '%d' but frame has '%d'",
+ expected_crc, actual_crc);
+ continue;
+ }
+ }
+ *ret = i;
+ flacparse->block_size = block_size;
+ return TRUE;
+ } else if (header_ret == FRAME_HEADER_MORE_DATA) {
+ goto need_more;
+ }
+ }
+
+ /* For the last frame output everything to the end */
+ if (G_UNLIKELY (GST_BASE_PARSE_DRAINING (flacparse))) {
+ if (flacparse->check_frame_checksums) {
+ guint16 actual_crc = gst_flac_calculate_crc16 (data, size - 2);
+ guint16 expected_crc = GST_READ_UINT16_BE (data + size - 2);
+
+ if (actual_crc == expected_crc) {
+ *ret = size;
+ flacparse->block_size = block_size;
+ return TRUE;
+ }
+ } else {
+ *ret = size;
+ flacparse->block_size = block_size;
+ return TRUE;
+ }
+ }
+
+ /* so we searched to expected end and found nothing,
+ * give up on this frame (start) */
+ if (flacparse->max_framesize && i > 2 * flacparse->max_framesize) {
+ GST_LOG_OBJECT (flacparse,
+ "could not determine valid frame end, discarding frame (start)");
+ *ret = 1;
+ return FALSE;
+ }
+
+need_more:
+ max = flacparse->max_framesize + 16;
+ if (max == 16)
+ max = 1 << 24;
+ *ret = MIN (size + 4096, max);
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_flac_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize)
+{
+ GstFlacParse *flacparse = GST_FLAC_PARSE (parse);
+ GstBuffer *buffer = frame->buffer;
+ GstMapInfo map;
+ gboolean result = TRUE;
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint framesize = 0;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ *skipsize = 1;
+
+ if (G_UNLIKELY (map.size < 4)) {
+ result = FALSE;
+ goto cleanup;
+ }
+
+ if (flacparse->state == GST_FLAC_PARSE_STATE_INIT) {
+ if (memcmp (map.data, "fLaC", 4) == 0) {
+ GST_DEBUG_OBJECT (flacparse, "fLaC marker found");
+ framesize = 4;
+ goto cleanup;
+ }
+ if (map.data[0] == 0xff && (map.data[1] >> 2) == 0x3e) {
+ GST_DEBUG_OBJECT (flacparse, "Found headerless FLAC");
+ /* Minimal size of a frame header */
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (flacparse), 9);
+ flacparse->state = GST_FLAC_PARSE_STATE_GENERATE_HEADERS;
+ *skipsize = 0;
+ result = FALSE;
+ goto cleanup;
+ }
+ GST_DEBUG_OBJECT (flacparse, "fLaC marker not found");
+ result = FALSE;
+ goto cleanup;
+ }
+
+ if (flacparse->state == GST_FLAC_PARSE_STATE_HEADERS) {
+ guint size = 4 + ((map.data[1] << 16) | (map.data[2] << 8) | (map.data[3]));
+
+ GST_DEBUG_OBJECT (flacparse, "Found metadata block of size %u", size);
+ framesize = size;
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (flacparse), framesize);
+ goto cleanup;
+ }
+
+ if ((GST_READ_UINT16_BE (map.data) & 0xfffe) == 0xfff8) {
+ gboolean ret, is_first = !flacparse->strategy_checked;
+ guint next;
+
+ flacparse->offset = GST_BUFFER_OFFSET (buffer);
+ flacparse->blocking_strategy = 0;
+ flacparse->sample_number = 0;
+
+ GST_DEBUG_OBJECT (flacparse, "Found sync code");
+ ret = gst_flac_parse_frame_is_valid (flacparse, map.data, map.size, &next);
+ if (ret) {
+ if (is_first) {
+ GST_INFO_OBJECT (flacparse, "First sample number is %" G_GUINT64_FORMAT,
+ flacparse->sample_number);
+ flacparse->first_sample_number = flacparse->sample_number;
+ }
+ framesize = next;
+ goto cleanup;
+ }
+
+ /* If we're at EOS and the frame was not valid, drop it! */
+ if (G_UNLIKELY (GST_BASE_PARSE_DRAINING (flacparse))) {
+ GST_WARNING_OBJECT (flacparse, "EOS");
+ result = FALSE;
+ goto cleanup;
+ }
+
+ if (next == 0) {
+ } else if (next > map.size) {
+ GST_DEBUG_OBJECT (flacparse, "Requesting %u bytes", next);
+ *skipsize = 0;
+ gst_base_parse_set_min_frame_size (parse, next);
+ result = FALSE;
+ goto cleanup;
+ } else {
+ GST_ERROR_OBJECT (flacparse,
+ "Giving up on invalid frame (%" G_GSIZE_FORMAT " bytes)", map.size);
+ result = FALSE;
+ goto cleanup;
+ }
+ } else {
+ GstByteReader reader;
+ gint off;
+
+ gst_byte_reader_init (&reader, map.data, map.size);
+ off =
+ gst_byte_reader_masked_scan_uint32 (&reader, 0xfffc0000, 0xfff80000,
+ 0, map.size);
+
+ if (off > 0) {
+ GST_DEBUG_OBJECT (parse, "Possible sync at buffer offset %d", off);
+ *skipsize = off;
+ result = FALSE;
+ goto cleanup;
+ }
+
+ GST_DEBUG_OBJECT (flacparse, "Sync code not found");
+ *skipsize = map.size - 3;
+ result = FALSE;
+ goto cleanup;
+ }
+
+ result = FALSE;
+
+cleanup:
+ gst_buffer_unmap (buffer, &map);
+
+ if (result)
+ *skipsize = 0;
+
+ if (result && framesize <= map.size) {
+ ret = gst_flac_parse_parse_frame (parse, frame, framesize);
+ if (ret == GST_BASE_PARSE_FLOW_DROPPED) {
+ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_DROP;
+ ret = GST_FLOW_OK;
+ }
+ if (ret == GST_FLOW_OK)
+ ret = gst_base_parse_finish_frame (parse, frame, framesize);
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_flac_parse_handle_streaminfo (GstFlacParse * flacparse, GstBuffer * buffer)
+{
+ GstBitReader reader;
+ GstMapInfo map;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ gst_bit_reader_init (&reader, map.data, map.size);
+
+ if (map.size != 4 + 34) {
+ GST_ERROR_OBJECT (flacparse,
+ "Invalid metablock size for STREAMINFO: %" G_GSIZE_FORMAT "", map.size);
+ goto failure;
+ }
+
+ /* Skip metadata block header */
+ if (!gst_bit_reader_skip (&reader, 32))
+ goto error;
+
+ if (!gst_bit_reader_get_bits_uint16 (&reader, &flacparse->min_blocksize, 16))
+ goto error;
+ if (flacparse->min_blocksize < 16) {
+ GST_WARNING_OBJECT (flacparse, "Invalid minimum block size: %u",
+ flacparse->min_blocksize);
+ }
+
+ if (!gst_bit_reader_get_bits_uint16 (&reader, &flacparse->max_blocksize, 16))
+ goto error;
+ if (flacparse->max_blocksize < 16) {
+ GST_WARNING_OBJECT (flacparse, "Invalid maximum block size: %u",
+ flacparse->max_blocksize);
+ }
+
+ if (!gst_bit_reader_get_bits_uint32 (&reader, &flacparse->min_framesize, 24))
+ goto error;
+ if (!gst_bit_reader_get_bits_uint32 (&reader, &flacparse->max_framesize, 24))
+ goto error;
+
+ if (!gst_bit_reader_get_bits_uint32 (&reader, &flacparse->samplerate, 20))
+ goto error;
+ if (flacparse->samplerate == 0) {
+ GST_ERROR_OBJECT (flacparse, "Invalid sample rate 0");
+ goto failure;
+ }
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &flacparse->channels, 3))
+ goto error;
+ flacparse->channels++;
+ if (flacparse->channels > 8) {
+ GST_ERROR_OBJECT (flacparse, "Invalid number of channels %u",
+ flacparse->channels);
+ goto failure;
+ }
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &flacparse->bps, 5))
+ goto error;
+ flacparse->bps++;
+
+ if (!gst_bit_reader_get_bits_uint64 (&reader, &flacparse->total_samples, 36))
+ goto error;
+ if (flacparse->total_samples) {
+ gst_base_parse_set_duration (GST_BASE_PARSE (flacparse),
+ GST_FORMAT_DEFAULT, flacparse->total_samples, 0);
+ }
+
+ gst_buffer_unmap (buffer, &map);
+
+ GST_DEBUG_OBJECT (flacparse, "STREAMINFO:\n"
+ "\tmin/max blocksize: %u/%u,\n"
+ "\tmin/max framesize: %u/%u,\n"
+ "\tsamplerate: %u,\n"
+ "\tchannels: %u,\n"
+ "\tbits per sample: %u,\n"
+ "\ttotal samples: %" G_GUINT64_FORMAT,
+ flacparse->min_blocksize, flacparse->max_blocksize,
+ flacparse->min_framesize, flacparse->max_framesize,
+ flacparse->samplerate,
+ flacparse->channels, flacparse->bps, flacparse->total_samples);
+
+ return TRUE;
+
+error:
+ GST_ERROR_OBJECT (flacparse, "Failed to read data");
+failure:
+ gst_buffer_unmap (buffer, &map);
+ return FALSE;
+}
+
+static gboolean
+gst_flac_parse_handle_vorbiscomment (GstFlacParse * flacparse,
+ GstBuffer * buffer)
+{
+ GstTagList *tags;
+ GstMapInfo map;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ tags =
+ gst_tag_list_from_vorbiscomment (map.data, map.size, map.data, 4, NULL);
+ gst_buffer_unmap (buffer, &map);
+
+ if (tags == NULL) {
+ GST_ERROR_OBJECT (flacparse, "Invalid vorbiscomment block");
+ } else if (gst_tag_list_is_empty (tags)) {
+ gst_tag_list_unref (tags);
+ } else if (flacparse->tags == NULL) {
+ flacparse->tags = tags;
+ } else {
+ gst_tag_list_insert (flacparse->tags, tags, GST_TAG_MERGE_APPEND);
+ gst_tag_list_unref (tags);
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_flac_parse_handle_cuesheet (GstFlacParse * flacparse, GstBuffer * buffer)
+{
+ GstByteReader reader;
+ GstMapInfo map;
+ guint i, j;
+ guint8 n_tracks, track_num, index;
+ guint64 offset;
+ gint64 start, stop;
+ gchar *id;
+ gchar isrc[13];
+ GstTagList *tags;
+ GstToc *toc;
+ GstTocEntry *cur_entry = NULL, *prev_entry = NULL;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ gst_byte_reader_init (&reader, map.data, map.size);
+
+ toc = gst_toc_new (GST_TOC_SCOPE_GLOBAL);
+
+ /* skip 4 bytes METADATA_BLOCK_HEADER */
+ /* https://xiph.org/flac/format.html#metadata_block_header */
+ if (!gst_byte_reader_skip (&reader, 4))
+ goto error;
+
+ /* skip 395 bytes from METADATA_BLOCK_CUESHEET */
+ /* https://xiph.org/flac/format.html#metadata_block_cuesheet */
+ if (!gst_byte_reader_skip (&reader, 395))
+ goto error;
+
+ if (!gst_byte_reader_get_uint8 (&reader, &n_tracks))
+ goto error;
+
+ /* CUESHEET_TRACK */
+ /* https://xiph.org/flac/format.html#cuesheet_track */
+ for (i = 0; i < n_tracks; i++) {
+ if (!gst_byte_reader_get_uint64_be (&reader, &offset))
+ goto error;
+ if (!gst_byte_reader_get_uint8 (&reader, &track_num))
+ goto error;
+
+ if (gst_byte_reader_get_remaining (&reader) < 12)
+ goto error;
+ memcpy (isrc, map.data + gst_byte_reader_get_pos (&reader), 12);
+ /* \0-terminate the string */
+ isrc[12] = '\0';
+ if (!gst_byte_reader_skip (&reader, 12))
+ goto error;
+
+ /* skip 14 bytes from CUESHEET_TRACK */
+ if (!gst_byte_reader_skip (&reader, 14))
+ goto error;
+ if (!gst_byte_reader_get_uint8 (&reader, &index))
+ goto error;
+ /* add tracks in TOC */
+ /* lead-out tack has number 170 or 255 */
+ if (track_num != 170 && track_num != 255) {
+ prev_entry = cur_entry;
+ /* previous track stop time = current track start time */
+ if (prev_entry != NULL) {
+ gst_toc_entry_get_start_stop_times (prev_entry, &start, NULL);
+ stop =
+ gst_util_uint64_scale_round (offset, GST_SECOND,
+ flacparse->samplerate);
+ gst_toc_entry_set_start_stop_times (prev_entry, start, stop);
+ }
+ id = g_strdup_printf ("%08x", track_num);
+ cur_entry = gst_toc_entry_new (GST_TOC_ENTRY_TYPE_TRACK, id);
+ g_free (id);
+ start =
+ gst_util_uint64_scale_round (offset, GST_SECOND,
+ flacparse->samplerate);
+ gst_toc_entry_set_start_stop_times (cur_entry, start, -1);
+ /* add ISRC as tag in track */
+ if (strlen (isrc) != 0) {
+ tags = gst_tag_list_new_empty ();
+ gst_tag_list_add (tags, GST_TAG_MERGE_APPEND, GST_TAG_ISRC, isrc, NULL);
+ gst_toc_entry_set_tags (cur_entry, tags);
+ }
+ gst_toc_append_entry (toc, cur_entry);
+ /* CUESHEET_TRACK_INDEX */
+ /* https://xiph.org/flac/format.html#cuesheet_track_index */
+ for (j = 0; j < index; j++) {
+ if (!gst_byte_reader_skip (&reader, 12))
+ goto error;
+ }
+ } else {
+ /* set stop time in last track */
+ stop =
+ gst_util_uint64_scale_round (offset, GST_SECOND,
+ flacparse->samplerate);
+ gst_toc_entry_set_start_stop_times (cur_entry, start, stop);
+ }
+ }
+
+ /* send data as TOC */
+ if (!flacparse->toc)
+ flacparse->toc = toc;
+
+ gst_buffer_unmap (buffer, &map);
+ return TRUE;
+
+error:
+ GST_ERROR_OBJECT (flacparse, "Error reading data");
+ gst_buffer_unmap (buffer, &map);
+ return FALSE;
+}
+
+static gboolean
+gst_flac_parse_handle_picture (GstFlacParse * flacparse, GstBuffer * buffer)
+{
+ GstByteReader reader;
+ GstMapInfo map;
+ guint32 img_len = 0, img_type = 0;
+ guint32 img_mimetype_len = 0, img_description_len = 0;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ gst_byte_reader_init (&reader, map.data, map.size);
+
+ if (!gst_byte_reader_skip (&reader, 4))
+ goto error;
+
+ if (!gst_byte_reader_get_uint32_be (&reader, &img_type))
+ goto error;
+
+ if (!gst_byte_reader_get_uint32_be (&reader, &img_mimetype_len))
+ goto error;
+ if (!gst_byte_reader_skip (&reader, img_mimetype_len))
+ goto error;
+
+ if (!gst_byte_reader_get_uint32_be (&reader, &img_description_len))
+ goto error;
+ if (!gst_byte_reader_skip (&reader, img_description_len))
+ goto error;
+
+ if (!gst_byte_reader_skip (&reader, 4 * 4))
+ goto error;
+
+ if (!gst_byte_reader_get_uint32_be (&reader, &img_len))
+ goto error;
+
+ if (gst_byte_reader_get_pos (&reader) + img_len > map.size)
+ goto error;
+
+ GST_INFO_OBJECT (flacparse, "Got image of %d bytes", img_len);
+
+ if (img_len > 0) {
+ if (flacparse->tags == NULL)
+ flacparse->tags = gst_tag_list_new_empty ();
+
+ gst_tag_list_add_id3_image (flacparse->tags,
+ map.data + gst_byte_reader_get_pos (&reader), img_len, img_type);
+ }
+
+ gst_buffer_unmap (buffer, &map);
+ return TRUE;
+
+error:
+ GST_ERROR_OBJECT (flacparse, "Error reading data");
+ gst_buffer_unmap (buffer, &map);
+ return FALSE;
+}
+
+static gboolean
+gst_flac_parse_handle_seektable (GstFlacParse * flacparse, GstBuffer * buffer)
+{
+
+ GST_DEBUG_OBJECT (flacparse, "storing seektable");
+ /* only store for now;
+ * offset of the first frame is needed to get real info */
+ if (flacparse->seektable)
+ gst_buffer_unref (flacparse->seektable);
+ flacparse->seektable = gst_buffer_ref (buffer);
+
+ return TRUE;
+}
+
+static void
+gst_flac_parse_process_seektable (GstFlacParse * flacparse, gint64 boffset)
+{
+ GstByteReader br;
+ gint64 offset = 0, samples = 0;
+ GstMapInfo map;
+
+ GST_DEBUG_OBJECT (flacparse,
+ "parsing seektable; base offset %" G_GINT64_FORMAT, boffset);
+
+ if (boffset <= 0)
+ goto exit;
+
+ gst_buffer_map (flacparse->seektable, &map, GST_MAP_READ);
+ gst_byte_reader_init (&br, map.data, map.size);
+
+ /* skip header */
+ if (!gst_byte_reader_skip (&br, 4))
+ goto done;
+
+ /* seekpoints */
+ while (gst_byte_reader_get_remaining (&br)) {
+ if (!gst_byte_reader_get_int64_be (&br, &samples))
+ break;
+ if (!gst_byte_reader_get_int64_be (&br, &offset))
+ break;
+ if (!gst_byte_reader_skip (&br, 2))
+ break;
+
+ GST_LOG_OBJECT (flacparse, "samples %" G_GINT64_FORMAT " -> offset %"
+ G_GINT64_FORMAT, samples, offset);
+
+ /* sanity check */
+ if (G_LIKELY (offset > 0 && samples > 0)) {
+ gst_base_parse_add_index_entry (GST_BASE_PARSE (flacparse),
+ boffset + offset, gst_util_uint64_scale (samples, GST_SECOND,
+ flacparse->samplerate), TRUE, FALSE);
+ }
+ }
+
+done:
+ gst_buffer_unmap (flacparse->seektable, &map);
+exit:
+ gst_buffer_unref (flacparse->seektable);
+ flacparse->seektable = NULL;
+}
+
+static void
+_value_array_append_buffer (GValue * array_val, GstBuffer * buf)
+{
+ GValue value = { 0, };
+
+ g_value_init (&value, GST_TYPE_BUFFER);
+ /* copy buffer to avoid problems with circular refcounts */
+ buf = gst_buffer_copy (buf);
+ /* again, for good measure */
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
+ gst_value_set_buffer (&value, buf);
+ gst_buffer_unref (buf);
+ gst_value_array_append_value (array_val, &value);
+ g_value_unset (&value);
+}
+
+static GstFlowReturn
+gst_flac_parse_handle_headers (GstFlacParse * flacparse)
+{
+ GstBuffer *vorbiscomment = NULL;
+ GstBuffer *streaminfo = NULL;
+ GstBuffer *marker = NULL;
+ GValue array = { 0, };
+ GstCaps *caps;
+ GList *l;
+ GstFlowReturn res = GST_FLOW_OK;
+ gboolean is_streaminfo_last = FALSE;
+
+ caps = gst_caps_new_simple ("audio/x-flac",
+ "channels", G_TYPE_INT, flacparse->channels,
+ "framed", G_TYPE_BOOLEAN, TRUE,
+ "rate", G_TYPE_INT, flacparse->samplerate, NULL);
+
+ if (!flacparse->headers)
+ goto push_headers;
+
+ for (l = flacparse->headers; l; l = l->next) {
+ GstBuffer *header = l->data;
+ GstMapInfo map;
+
+ gst_buffer_map (header, &map, GST_MAP_READ);
+
+ GST_BUFFER_FLAG_SET (header, GST_BUFFER_FLAG_HEADER);
+
+ if (map.size == 4 && memcmp (map.data, "fLaC", 4) == 0) {
+ marker = header;
+ } else if (map.size > 1 && (map.data[0] & 0x7f) == 0) {
+ streaminfo = header;
+ is_streaminfo_last = (map.data[0] & 0x80) != 0;
+ } else if (map.size > 1 && (map.data[0] & 0x7f) == 4) {
+ vorbiscomment = header;
+ }
+
+ gst_buffer_unmap (header, &map);
+ }
+
+ /* at least this one we can generate easily
+ * to provide full headers downstream */
+ if (vorbiscomment == NULL && streaminfo != NULL) {
+ GST_DEBUG_OBJECT (flacparse,
+ "missing vorbiscomment header; generating dummy");
+ /* this vorbiscomment header is inserted after streaminfo and inherits its last-metadata-block flag */
+ vorbiscomment =
+ gst_flac_parse_generate_vorbiscomment (flacparse, is_streaminfo_last);
+ flacparse->headers =
+ g_list_insert (flacparse->headers, vorbiscomment,
+ g_list_index (flacparse->headers, streaminfo) + 1);
+ }
+
+ if (marker == NULL || streaminfo == NULL || vorbiscomment == NULL) {
+ GST_WARNING_OBJECT (flacparse,
+ "missing header %p %p %p, muxing into container "
+ "formats may be broken", marker, streaminfo, vorbiscomment);
+ goto push_headers;
+ }
+
+ g_value_init (&array, GST_TYPE_ARRAY);
+
+ /* add marker including STREAMINFO header */
+ {
+ GstBuffer *buf;
+ guint16 num;
+ GstMapInfo sinfomap, writemap;
+
+ gst_buffer_map (streaminfo, &sinfomap, GST_MAP_READ);
+
+ /* minus one for the marker that is merged with streaminfo here */
+ num = g_list_length (flacparse->headers) - 1;
+
+ buf = gst_buffer_new_and_alloc (13 + sinfomap.size);
+ gst_buffer_map (buf, &writemap, GST_MAP_WRITE);
+
+ writemap.data[0] = 0x7f;
+ memcpy (writemap.data + 1, "FLAC", 4);
+ writemap.data[5] = 0x01; /* mapping version major */
+ writemap.data[6] = 0x00; /* mapping version minor */
+ writemap.data[7] = (num & 0xFF00) >> 8;
+ writemap.data[8] = (num & 0x00FF) >> 0;
+ memcpy (writemap.data + 9, "fLaC", 4);
+ memcpy (writemap.data + 13, sinfomap.data, sinfomap.size);
+ /* clear the last-metadata-block flag because a VORBISCOMMENT always follows */
+ writemap.data[13] = 0x00; /* is_last = 0; type = 0; */
+ _value_array_append_buffer (&array, buf);
+
+ gst_buffer_unmap (streaminfo, &sinfomap);
+ gst_buffer_unmap (buf, &writemap);
+ gst_buffer_unref (buf);
+ }
+
+ /* add other headers, including VORBISCOMMENT */
+ for (l = flacparse->headers; l; l = l->next) {
+ if (GST_BUFFER_CAST (l->data) != marker &&
+ GST_BUFFER_CAST (l->data) != streaminfo) {
+ _value_array_append_buffer (&array, GST_BUFFER_CAST (l->data));
+ }
+ }
+
+ gst_structure_set_value (gst_caps_get_structure (caps, 0),
+ "streamheader", &array);
+ g_value_unset (&array);
+
+push_headers:
+
+ gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (GST_BASE_PARSE (flacparse)), caps);
+ gst_caps_unref (caps);
+
+ /* push header buffers; update caps, so when we push the first buffer the
+ * negotiated caps will change to caps that include the streamheader field */
+ while (flacparse->headers) {
+ GstBuffer *buf = GST_BUFFER (flacparse->headers->data);
+ GstBaseParseFrame frame;
+
+ flacparse->headers =
+ g_list_delete_link (flacparse->headers, flacparse->headers);
+ buf = gst_buffer_make_writable (buf);
+
+ /* init, set and give away frame */
+ gst_base_parse_frame_init (&frame);
+ frame.buffer = buf;
+ frame.overhead = -1;
+ res = gst_base_parse_push_frame (GST_BASE_PARSE (flacparse), &frame);
+ gst_base_parse_frame_free (&frame);
+ if (res != GST_FLOW_OK)
+ break;
+ }
+ g_list_foreach (flacparse->headers, (GFunc) gst_mini_object_unref, NULL);
+ g_list_free (flacparse->headers);
+ flacparse->headers = NULL;
+
+ return res;
+}
+
+/* empty vorbiscomment */
+static GstBuffer *
+gst_flac_parse_generate_vorbiscomment (GstFlacParse * flacparse,
+ gboolean is_last)
+{
+ GstTagList *taglist = gst_tag_list_new_empty ();
+ guchar header[4];
+ guint size;
+ GstBuffer *vorbiscomment;
+ GstMapInfo map;
+
+ header[0] = (is_last ? 0x80 : 0x00) | 0x04; /* is_last may vary; type = 4; */
+
+ vorbiscomment =
+ gst_tag_list_to_vorbiscomment_buffer (taglist, header,
+ sizeof (header), NULL);
+ gst_tag_list_unref (taglist);
+
+ gst_buffer_map (vorbiscomment, &map, GST_MAP_WRITE);
+
+ /* Get rid of framing bit */
+ if (map.data[map.size - 1] == 1) {
+ GstBuffer *sub;
+
+ sub =
+ gst_buffer_copy_region (vorbiscomment, GST_BUFFER_COPY_ALL, 0,
+ map.size - 1);
+ gst_buffer_unmap (vorbiscomment, &map);
+ gst_buffer_unref (vorbiscomment);
+ vorbiscomment = sub;
+ gst_buffer_map (vorbiscomment, &map, GST_MAP_WRITE);
+ }
+
+ size = map.size - 4;
+ map.data[1] = ((size & 0xFF0000) >> 16);
+ map.data[2] = ((size & 0x00FF00) >> 8);
+ map.data[3] = (size & 0x0000FF);
+ gst_buffer_unmap (vorbiscomment, &map);
+ gst_flac_parse_reset_buffer_time_and_offset (vorbiscomment);
+
+ return vorbiscomment;
+}
+
+static gboolean
+gst_flac_parse_generate_headers (GstFlacParse * flacparse)
+{
+ GstBuffer *marker, *streaminfo;
+ GstMapInfo map;
+
+ marker = gst_buffer_new_and_alloc (4);
+ gst_buffer_map (marker, &map, GST_MAP_WRITE);
+ memcpy (map.data, "fLaC", 4);
+ gst_buffer_unmap (marker, &map);
+ gst_flac_parse_reset_buffer_time_and_offset (marker);
+ flacparse->headers = g_list_append (flacparse->headers, marker);
+
+ streaminfo = gst_buffer_new_and_alloc (4 + 34);
+ gst_buffer_map (streaminfo, &map, GST_MAP_WRITE);
+ memset (map.data, 0, 4 + 34);
+
+ /* metadata block header */
+ map.data[0] = 0x00; /* is_last = 0; type = 0; */
+ map.data[1] = 0x00; /* length = 34; */
+ map.data[2] = 0x00;
+ map.data[3] = 0x22;
+
+ /* streaminfo */
+
+ map.data[4] = (flacparse->block_size >> 8) & 0xff; /* min blocksize = blocksize; */
+ map.data[5] = (flacparse->block_size) & 0xff;
+ map.data[6] = (flacparse->block_size >> 8) & 0xff; /* max blocksize = blocksize; */
+ map.data[7] = (flacparse->block_size) & 0xff;
+
+ map.data[8] = 0x00; /* min framesize = 0; */
+ map.data[9] = 0x00;
+ map.data[10] = 0x00;
+ map.data[11] = 0x00; /* max framesize = 0; */
+ map.data[12] = 0x00;
+ map.data[13] = 0x00;
+
+ map.data[14] = (flacparse->samplerate >> 12) & 0xff;
+ map.data[15] = (flacparse->samplerate >> 4) & 0xff;
+ map.data[16] = (flacparse->samplerate >> 0) & 0xf0;
+
+ map.data[16] |= (flacparse->channels - 1) << 1;
+
+ map.data[16] |= ((flacparse->bps - 1) >> 4) & 0x01;
+ map.data[17] = (((flacparse->bps - 1)) & 0x0f) << 4;
+
+ {
+ gint64 duration;
+
+ if (gst_pad_peer_query_duration (GST_BASE_PARSE_SINK_PAD (flacparse),
+ GST_FORMAT_TIME, &duration) && duration != -1) {
+ duration = GST_CLOCK_TIME_TO_FRAMES (duration, flacparse->samplerate);
+
+ map.data[17] |= (duration >> 32) & 0xff;
+ map.data[18] |= (duration >> 24) & 0xff;
+ map.data[19] |= (duration >> 16) & 0xff;
+ map.data[20] |= (duration >> 8) & 0xff;
+ map.data[21] |= (duration >> 0) & 0xff;
+ }
+ }
+ /* MD5 = 0; */
+
+ gst_buffer_unmap (streaminfo, &map);
+ gst_flac_parse_reset_buffer_time_and_offset (streaminfo);
+ flacparse->headers = g_list_append (flacparse->headers, streaminfo);
+
+ flacparse->headers = g_list_append (flacparse->headers,
+ gst_flac_parse_generate_vorbiscomment (flacparse, TRUE));
+
+ return TRUE;
+}
+
+static inline void
+gst_flac_parse_reset_buffer_time_and_offset (GstBuffer * buffer)
+{
+ GST_BUFFER_TIMESTAMP (buffer) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_DURATION (buffer) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_OFFSET (buffer) = 0;
+ GST_BUFFER_OFFSET_END (buffer) = 0;
+}
+
+/* Type 127 is invalid for a metadata block header & should
+ * be discarded _before_ calling this function */
+static gboolean
+gst_flac_parse_handle_block_type (GstFlacParse * flacparse, guint type,
+ GstBuffer * sbuffer)
+{
+ gboolean ret = TRUE;
+
+ switch (type) {
+ case 0: /* STREAMINFO */
+ GST_INFO_OBJECT (flacparse, "STREAMINFO header");
+ ret = gst_flac_parse_handle_streaminfo (flacparse, sbuffer);
+ break;
+ case 3: /* SEEKTABLE */
+ GST_INFO_OBJECT (flacparse, "SEEKTABLE header");
+ ret = gst_flac_parse_handle_seektable (flacparse, sbuffer);
+ break;
+ case 4: /* VORBIS_COMMENT */
+ GST_INFO_OBJECT (flacparse, "VORBISCOMMENT header");
+ ret = gst_flac_parse_handle_vorbiscomment (flacparse, sbuffer);
+ break;
+ case 5: /* CUESHEET */
+ GST_INFO_OBJECT (flacparse, "CUESHEET header");
+ ret = gst_flac_parse_handle_cuesheet (flacparse, sbuffer);
+ break;
+ case 6: /* PICTURE */
+ GST_INFO_OBJECT (flacparse, "PICTURE header");
+ ret = gst_flac_parse_handle_picture (flacparse, sbuffer);
+ break;
+ case 1: /* PADDING */
+ GST_INFO_OBJECT (flacparse, "PADDING header");
+ break;
+ case 2: /* APPLICATION */
+ GST_INFO_OBJECT (flacparse, "APPLICATION header");
+ break;
+ default: /* RESERVED */
+ GST_INFO_OBJECT (flacparse, "Unhandled metadata header type '%u'", type);
+ GST_FIXME_OBJECT (flacparse, "FLAC version might not be fully supported");
+ break;
+ }
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_flac_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame,
+ gint size)
+{
+ GstFlacParse *flacparse = GST_FLAC_PARSE (parse);
+ GstBuffer *buffer = frame->buffer, *sbuffer;
+ GstMapInfo map;
+ GstFlowReturn res = GST_FLOW_ERROR;
+ guint64 relative_sample_number;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ if (flacparse->state == GST_FLAC_PARSE_STATE_INIT) {
+ sbuffer = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, 0, size);
+ gst_flac_parse_reset_buffer_time_and_offset (sbuffer);
+
+ /* 32 bits metadata block */
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (flacparse), 4);
+ flacparse->state = GST_FLAC_PARSE_STATE_HEADERS;
+
+ flacparse->headers = g_list_append (flacparse->headers, sbuffer);
+
+ res = GST_BASE_PARSE_FLOW_DROPPED;
+ } else if (flacparse->state == GST_FLAC_PARSE_STATE_HEADERS) {
+ gboolean is_last = map.data[0] >> 7;
+ guint type = (map.data[0] & 0x7F);
+
+ if (type == 127) {
+ GST_WARNING_OBJECT (flacparse, "Invalid metadata block type 127");
+ res = GST_BASE_PARSE_FLOW_DROPPED;
+ goto cleanup;
+ }
+
+ GST_DEBUG_OBJECT (flacparse, "Handling metadata block of type %u", type);
+
+ sbuffer = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, 0, size);
+
+ if (gst_flac_parse_handle_block_type (flacparse, type, sbuffer)) {
+ gst_flac_parse_reset_buffer_time_and_offset (sbuffer);
+ flacparse->headers = g_list_append (flacparse->headers, sbuffer);
+ } else {
+ GST_WARNING_OBJECT (parse, "failed to parse header of type %u", type);
+ GST_MEMDUMP_OBJECT (parse, "bad header data", map.data, size);
+
+ gst_buffer_unref (sbuffer);
+
+ /* error out unless we have a STREAMINFO header */
+ if (flacparse->samplerate == 0 || flacparse->bps == 0)
+ goto header_parsing_error;
+
+ /* .. in which case just stop header parsing and try to find audio */
+ is_last = TRUE;
+ }
+
+ if (is_last) {
+ res = gst_flac_parse_handle_headers (flacparse);
+
+ /* Minimal size of a frame header */
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (flacparse), MAX (9,
+ flacparse->min_framesize));
+ flacparse->state = GST_FLAC_PARSE_STATE_DATA;
+
+ if (res != GST_FLOW_OK)
+ goto cleanup;
+ } else {
+ /* Header length */
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (flacparse), 4);
+ }
+
+ /* DROPPED because we pushed already or will push all headers manually */
+ res = GST_BASE_PARSE_FLOW_DROPPED;
+ } else {
+ if (flacparse->offset != GST_BUFFER_OFFSET (buffer)) {
+ FrameHeaderCheckReturn ret;
+
+ flacparse->offset = GST_BUFFER_OFFSET (buffer);
+ ret =
+ gst_flac_parse_frame_header_is_valid (flacparse,
+ map.data, map.size, TRUE, NULL, NULL);
+ if (ret != FRAME_HEADER_VALID) {
+ GST_ERROR_OBJECT (flacparse,
+ "Baseclass didn't provide a complete frame");
+ goto cleanup;
+ }
+ }
+
+ if (flacparse->block_size == 0) {
+ GST_ERROR_OBJECT (flacparse, "Unparsed frame");
+ goto cleanup;
+ }
+
+ if (flacparse->seektable)
+ gst_flac_parse_process_seektable (flacparse, GST_BUFFER_OFFSET (buffer));
+
+ if (flacparse->state == GST_FLAC_PARSE_STATE_GENERATE_HEADERS) {
+ if (flacparse->blocking_strategy == 1) {
+ GST_WARNING_OBJECT (flacparse,
+ "Generating headers for variable blocksize streams not supported");
+
+ res = gst_flac_parse_handle_headers (flacparse);
+ } else {
+ GST_DEBUG_OBJECT (flacparse, "Generating headers");
+
+ if (!gst_flac_parse_generate_headers (flacparse))
+ goto cleanup;
+
+ res = gst_flac_parse_handle_headers (flacparse);
+ }
+ flacparse->state = GST_FLAC_PARSE_STATE_DATA;
+ if (res != GST_FLOW_OK)
+ goto cleanup;
+ }
+
+ /* also cater for oggmux metadata */
+ relative_sample_number =
+ flacparse->sample_number - flacparse->first_sample_number;
+ if (flacparse->blocking_strategy == 0) {
+ GST_BUFFER_PTS (buffer) =
+ gst_util_uint64_scale (relative_sample_number,
+ flacparse->block_size * GST_SECOND, flacparse->samplerate);
+ GST_BUFFER_OFFSET_END (buffer) =
+ relative_sample_number * flacparse->block_size +
+ flacparse->block_size;
+ } else {
+ GST_BUFFER_PTS (buffer) =
+ gst_util_uint64_scale (relative_sample_number, GST_SECOND,
+ flacparse->samplerate);
+ GST_BUFFER_OFFSET_END (buffer) =
+ relative_sample_number + flacparse->block_size;
+ }
+
+ GST_BUFFER_DTS (buffer) = GST_BUFFER_PTS (buffer);
+ GST_BUFFER_OFFSET (buffer) =
+ gst_util_uint64_scale (GST_BUFFER_OFFSET_END (buffer), GST_SECOND,
+ flacparse->samplerate);
+ GST_BUFFER_DURATION (buffer) =
+ GST_BUFFER_OFFSET (buffer) - GST_BUFFER_PTS (buffer);
+
+ /* To simplify, we just assume that it's a fixed size header and ignore
+ * subframe headers. The first could lead us to be off by 88 bits and
+ * the second even less, so the total inaccuracy is negligible. */
+ frame->overhead = 7;
+
+ /* Minimal size of a frame header */
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (flacparse), MAX (9,
+ flacparse->min_framesize));
+
+ flacparse->offset = -1;
+ flacparse->blocking_strategy = 0;
+ flacparse->sample_number = 0;
+ res = GST_FLOW_OK;
+ }
+
+cleanup:
+ gst_buffer_unmap (buffer, &map);
+ return res;
+
+header_parsing_error:
+ GST_ELEMENT_ERROR (flacparse, STREAM, DECODE, (NULL),
+ ("Failed to parse headers"));
+ goto cleanup;
+}
+
+static GstFlowReturn
+gst_flac_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
+{
+ GstFlacParse *flacparse = GST_FLAC_PARSE (parse);
+
+ if (!flacparse->sent_codec_tag) {
+ GstCaps *caps;
+
+ if (flacparse->tags == NULL)
+ flacparse->tags = gst_tag_list_new_empty ();
+
+ /* codec tag */
+ caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (parse));
+ if (G_UNLIKELY (caps == NULL)) {
+ if (GST_PAD_IS_FLUSHING (GST_BASE_PARSE_SRC_PAD (parse))) {
+ GST_INFO_OBJECT (parse, "Src pad is flushing");
+ return GST_FLOW_FLUSHING;
+ }
+ GST_INFO_OBJECT (parse, "Src pad is not negotiated!");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+ gst_pb_utils_add_codec_description_to_tag_list (flacparse->tags,
+ GST_TAG_AUDIO_CODEC, caps);
+ gst_caps_unref (caps);
+
+ /* Announce our pending tags */
+ gst_base_parse_merge_tags (parse, flacparse->tags, GST_TAG_MERGE_REPLACE);
+
+ /* also signals the end of first-frame processing */
+ flacparse->sent_codec_tag = TRUE;
+ }
+
+ /* Push toc */
+ if (flacparse->toc) {
+ gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (flacparse),
+ gst_event_new_toc (flacparse->toc, FALSE));
+ }
+
+ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_CLIP;
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_flac_parse_convert (GstBaseParse * parse,
+ GstFormat src_format, gint64 src_value, GstFormat dest_format,
+ gint64 * dest_value)
+{
+ GstFlacParse *flacparse = GST_FLAC_PARSE (parse);
+
+ if (flacparse->samplerate > 0) {
+ if (src_format == GST_FORMAT_DEFAULT && dest_format == GST_FORMAT_TIME) {
+ if (src_value != -1)
+ *dest_value =
+ gst_util_uint64_scale (src_value, GST_SECOND,
+ flacparse->samplerate);
+ else
+ *dest_value = -1;
+ return TRUE;
+ } else if (src_format == GST_FORMAT_TIME &&
+ dest_format == GST_FORMAT_DEFAULT) {
+ if (src_value != -1)
+ *dest_value =
+ gst_util_uint64_scale (src_value, flacparse->samplerate,
+ GST_SECOND);
+ else
+ *dest_value = -1;
+ return TRUE;
+ }
+ }
+
+ return GST_BASE_PARSE_CLASS (parent_class)->convert (parse, src_format,
+ src_value, dest_format, dest_value);
+}
+
+static gboolean
+gst_flac_parse_src_event (GstBaseParse * parse, GstEvent * event)
+{
+ GstFlacParse *flacparse = GST_FLAC_PARSE (parse);
+ gboolean res = FALSE;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_TOC_SELECT:
+ {
+ GstTocEntry *entry = NULL;
+ GstEvent *seek_event;
+ GstToc *toc = NULL;
+ gint64 start_pos;
+ gchar *uid = NULL;
+
+ /* FIXME: some locking would be good */
+ if (flacparse->toc)
+ toc = gst_toc_ref (flacparse->toc);
+
+ if (toc != NULL) {
+ gst_event_parse_toc_select (event, &uid);
+ if (uid != NULL) {
+ entry = gst_toc_find_entry (toc, uid);
+ if (entry != NULL) {
+ gst_toc_entry_get_start_stop_times (entry, &start_pos, NULL);
+
+ /* FIXME: use segment rate here instead? */
+ seek_event = gst_event_new_seek (1.0,
+ GST_FORMAT_TIME,
+ GST_SEEK_FLAG_FLUSH,
+ GST_SEEK_TYPE_SET, start_pos, GST_SEEK_TYPE_NONE, -1);
+
+ res =
+ GST_BASE_PARSE_CLASS (parent_class)->src_event (parse,
+ seek_event);
+
+ } else {
+ GST_WARNING_OBJECT (parse, "no TOC entry with given UID: %s", uid);
+ }
+ g_free (uid);
+ }
+ gst_toc_unref (toc);
+ } else {
+ GST_DEBUG_OBJECT (flacparse, "no TOC to select");
+ }
+ gst_event_unref (event);
+ break;
+ }
+ default:
+ res = GST_BASE_PARSE_CLASS (parent_class)->src_event (parse, event);
+ break;
+ }
+ return res;
+}
+
+static void
+remove_fields (GstCaps * caps)
+{
+ guint i, n;
+
+ n = gst_caps_get_size (caps);
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (caps, i);
+
+ gst_structure_remove_field (s, "framed");
+ }
+}
+
+static GstCaps *
+gst_flac_parse_get_sink_caps (GstBaseParse * parse, GstCaps * filter)
+{
+ GstCaps *peercaps, *templ;
+ GstCaps *res;
+
+ templ = gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD (parse));
+ if (filter) {
+ GstCaps *fcopy = gst_caps_copy (filter);
+ /* Remove the fields we convert */
+ remove_fields (fcopy);
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), fcopy);
+ gst_caps_unref (fcopy);
+ } else
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), NULL);
+
+ if (peercaps) {
+ /* Remove the framed field */
+ peercaps = gst_caps_make_writable (peercaps);
+ remove_fields (peercaps);
+
+ res = gst_caps_intersect_full (peercaps, templ, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (peercaps);
+ gst_caps_unref (templ);
+ } else {
+ res = templ;
+ }
+
+ if (filter) {
+ GstCaps *intersection;
+
+ intersection =
+ gst_caps_intersect_full (filter, res, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (res);
+ res = intersection;
+ }
+
+ return res;
+}
+
+static gboolean
+gst_flac_parse_set_sink_caps (GstBaseParse * parse, GstCaps * caps)
+{
+ GstCaps *current_caps;
+ GstFlacParse *flacparse = GST_FLAC_PARSE (parse);
+
+ /* If caps are changing, drain any pending frames we have so that afterwards
+ * we can potentially accept a new stream that is starting with the FLAC
+ * headers again. If headers appear in the middle of the stream we can't
+ * detect them
+ */
+ gst_base_parse_drain (parse);
+
+ /* If the caps did really change we need to reset the parser */
+ current_caps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (parse));
+ if (current_caps) {
+ if (!gst_caps_is_strictly_equal (caps, current_caps)) {
+ GST_DEBUG_OBJECT (flacparse, "Reset parser on sink pad caps change");
+ gst_flac_parse_stop (parse);
+ gst_flac_parse_start (parse);
+ }
+ gst_caps_unref (current_caps);
+ }
+
+ return TRUE;
+}
diff --git a/gst/audioparsers/gstflacparse.h b/gst/audioparsers/gstflacparse.h
new file mode 100644
index 0000000000..55418dfbbe
--- /dev/null
+++ b/gst/audioparsers/gstflacparse.h
@@ -0,0 +1,99 @@
+/* GStreamer
+ *
+ * Copyright (C) 2008 Sebastian Dröge <sebastian.droege@collabora.co.uk>.
+ * Copyright (C) 2009 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>
+ * Copyright (C) 2009 Nokia Corporation. All rights reserved.
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_FLAC_PARSE_H__
+#define __GST_FLAC_PARSE_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbaseparse.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_FLAC_PARSE (gst_flac_parse_get_type())
+#define GST_FLAC_PARSE(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_FLAC_PARSE,GstFlacParse))
+#define GST_FLAC_PARSE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_FLAC_PARSE,GstFlacParseClass))
+#define GST_FLAC_PARSE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_FLAC_PARSE,GstFlacParseClass))
+#define GST_IS_FLAC_PARSE(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_FLAC_PARSE))
+#define GST_IS_FLAC_PARSE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_FLAC_PARSE))
+#define GST_FLAC_PARSE_CAST(obj) ((GstFlacParse *)(obj))
+
+typedef struct _GstFlacParse GstFlacParse;
+typedef struct _GstFlacParseClass GstFlacParseClass;
+
+typedef enum {
+ GST_FLAC_PARSE_STATE_INIT,
+ GST_FLAC_PARSE_STATE_HEADERS,
+ GST_FLAC_PARSE_STATE_GENERATE_HEADERS,
+ GST_FLAC_PARSE_STATE_DATA
+} GstFlacParseState;
+
+typedef struct {
+ guint8 type;
+} GstFlacParseSubFrame;
+
+struct _GstFlacParse {
+ GstBaseParse parent;
+
+ /* Properties */
+ gboolean check_frame_checksums;
+
+ GstFlacParseState state;
+
+ gint64 upstream_length;
+
+ /* STREAMINFO content */
+ guint16 min_blocksize, max_blocksize;
+ guint32 min_framesize, max_framesize;
+ guint32 samplerate;
+ guint8 channels;
+ guint8 bps;
+ guint64 total_samples;
+
+ /* Current frame */
+ guint64 offset;
+ guint8 blocking_strategy;
+ guint16 block_size;
+ guint64 sample_number;
+ guint64 first_sample_number;
+ gboolean strategy_checked;
+
+ gboolean sent_codec_tag;
+
+ GstTagList *tags;
+ GstToc *toc;
+
+ GList *headers;
+ GstBuffer *seektable;
+
+ gboolean force_variable_block_size;
+};
+
+struct _GstFlacParseClass {
+ GstBaseParseClass parent_class;
+};
+
+GType gst_flac_parse_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_FLAC_PARSE_H__ */
diff --git a/gst/audioparsers/gstmpegaudioparse.c b/gst/audioparsers/gstmpegaudioparse.c
new file mode 100644
index 0000000000..f5e0fc2290
--- /dev/null
+++ b/gst/audioparsers/gstmpegaudioparse.c
@@ -0,0 +1,1478 @@
+/* GStreamer MPEG audio parser
+ * Copyright (C) 2006-2007 Jan Schmidt <thaytan@mad.scientist.com>
+ * Copyright (C) 2010 Mark Nauwelaerts <mnauw users sf net>
+ * Copyright (C) 2010 Nokia Corporation. All rights reserved.
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-mpegaudioparse
+ * @title: mpegaudioparse
+ * @short_description: MPEG audio parser
+ * @see_also: #GstAmrParse, #GstAACParse
+ *
+ * Parses and frames mpeg1 audio streams. Provides seeking.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=test.mp3 ! mpegaudioparse ! mpg123audiodec
+ * ! audioconvert ! audioresample ! autoaudiosink
+ * ]|
+ *
+ */
+
+/* FIXME: we should make the base class (GstBaseParse) aware of the
+ * XING seek table somehow, so it can use it properly for things like
+ * accurate seeks. Currently it can only do a lookup via the convert function,
+ * but then doesn't know what the result represents exactly. One could either
+ * add a vfunc for index lookup, or just make mpegaudioparse populate the
+ * base class's index via the API provided.
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+
+#include "gstaudioparserselements.h"
+#include "gstmpegaudioparse.h"
+#include <gst/base/gstbytereader.h>
+#include <gst/pbutils/pbutils.h>
+
+GST_DEBUG_CATEGORY_STATIC (mpeg_audio_parse_debug);
+#define GST_CAT_DEFAULT mpeg_audio_parse_debug
+
+#define MPEG_AUDIO_CHANNEL_MODE_UNKNOWN -1
+#define MPEG_AUDIO_CHANNEL_MODE_STEREO 0
+#define MPEG_AUDIO_CHANNEL_MODE_JOINT_STEREO 1
+#define MPEG_AUDIO_CHANNEL_MODE_DUAL_CHANNEL 2
+#define MPEG_AUDIO_CHANNEL_MODE_MONO 3
+
+#define CRC_UNKNOWN -1
+#define CRC_PROTECTED 0
+#define CRC_NOT_PROTECTED 1
+
+#define XING_FRAMES_FLAG 0x0001
+#define XING_BYTES_FLAG 0x0002
+#define XING_TOC_FLAG 0x0004
+#define XING_VBR_SCALE_FLAG 0x0008
+
+#define MIN_FRAME_SIZE 6
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/mpeg, "
+ "mpegversion = (int) 1, "
+ "layer = (int) [ 1, 3 ], "
+ "mpegaudioversion = (int) [ 1, 3], "
+ "rate = (int) [ 8000, 48000 ], "
+ "channels = (int) [ 1, 2 ], " "parsed=(boolean) true")
+ );
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/mpeg, mpegversion = (int) 1")
+ );
+
+static void gst_mpeg_audio_parse_finalize (GObject * object);
+
+static gboolean gst_mpeg_audio_parse_start (GstBaseParse * parse);
+static gboolean gst_mpeg_audio_parse_stop (GstBaseParse * parse);
+static GstFlowReturn gst_mpeg_audio_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize);
+static GstFlowReturn gst_mpeg_audio_parse_pre_push_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame);
+static gboolean gst_mpeg_audio_parse_convert (GstBaseParse * parse,
+ GstFormat src_format, gint64 src_value,
+ GstFormat dest_format, gint64 * dest_value);
+static GstCaps *gst_mpeg_audio_parse_get_sink_caps (GstBaseParse * parse,
+ GstCaps * filter);
+
+static void gst_mpeg_audio_parse_handle_first_frame (GstMpegAudioParse *
+ mp3parse, GstBuffer * buf);
+
+#define gst_mpeg_audio_parse_parent_class parent_class
+G_DEFINE_TYPE (GstMpegAudioParse, gst_mpeg_audio_parse, GST_TYPE_BASE_PARSE);
+GST_ELEMENT_REGISTER_DEFINE (mpegaudioparse, "mpegaudioparse",
+ GST_RANK_PRIMARY + 2, GST_TYPE_MPEG_AUDIO_PARSE);
+
+#define GST_TYPE_MPEG_AUDIO_CHANNEL_MODE \
+ (gst_mpeg_audio_channel_mode_get_type())
+
+static const GEnumValue mpeg_audio_channel_mode[] = {
+ {MPEG_AUDIO_CHANNEL_MODE_UNKNOWN, "Unknown", "unknown"},
+ {MPEG_AUDIO_CHANNEL_MODE_MONO, "Mono", "mono"},
+ {MPEG_AUDIO_CHANNEL_MODE_DUAL_CHANNEL, "Dual Channel", "dual-channel"},
+ {MPEG_AUDIO_CHANNEL_MODE_JOINT_STEREO, "Joint Stereo", "joint-stereo"},
+ {MPEG_AUDIO_CHANNEL_MODE_STEREO, "Stereo", "stereo"},
+ {0, NULL, NULL},
+};
+
+static GType
+gst_mpeg_audio_channel_mode_get_type (void)
+{
+ static GType mpeg_audio_channel_mode_type = 0;
+
+ if (!mpeg_audio_channel_mode_type) {
+ mpeg_audio_channel_mode_type =
+ g_enum_register_static ("GstMpegAudioChannelMode",
+ mpeg_audio_channel_mode);
+ }
+ return mpeg_audio_channel_mode_type;
+}
+
+static const gchar *
+gst_mpeg_audio_channel_mode_get_nick (gint mode)
+{
+ guint i;
+ for (i = 0; i < G_N_ELEMENTS (mpeg_audio_channel_mode); i++) {
+ if (mpeg_audio_channel_mode[i].value == mode)
+ return mpeg_audio_channel_mode[i].value_nick;
+ }
+ return NULL;
+}
+
+static void
+gst_mpeg_audio_parse_class_init (GstMpegAudioParseClass * klass)
+{
+ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (mpeg_audio_parse_debug, "mpegaudioparse", 0,
+ "MPEG1 audio stream parser");
+
+ object_class->finalize = gst_mpeg_audio_parse_finalize;
+
+ parse_class->start = GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_start);
+ parse_class->stop = GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_stop);
+ parse_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_handle_frame);
+ parse_class->pre_push_frame =
+ GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_pre_push_frame);
+ parse_class->convert = GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_convert);
+ parse_class->get_sink_caps =
+ GST_DEBUG_FUNCPTR (gst_mpeg_audio_parse_get_sink_caps);
+
+ /* register tags */
+#define GST_TAG_CRC "has-crc"
+#define GST_TAG_MODE "channel-mode"
+
+ gst_tag_register (GST_TAG_CRC, GST_TAG_FLAG_META, G_TYPE_BOOLEAN,
+ "has crc", "Using CRC", NULL);
+ gst_tag_register (GST_TAG_MODE, GST_TAG_FLAG_ENCODED, G_TYPE_STRING,
+ "channel mode", "MPEG audio channel mode", NULL);
+
+ g_type_class_ref (GST_TYPE_MPEG_AUDIO_CHANNEL_MODE);
+
+ gst_element_class_add_static_pad_template (element_class, &sink_template);
+ gst_element_class_add_static_pad_template (element_class, &src_template);
+
+ gst_element_class_set_static_metadata (element_class, "MPEG1 Audio Parser",
+ "Codec/Parser/Audio",
+ "Parses and frames mpeg1 audio streams (levels 1-3), provides seek",
+ "Jan Schmidt <thaytan@mad.scientist.com>,"
+ "Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
+}
+
+static void
+gst_mpeg_audio_parse_reset (GstMpegAudioParse * mp3parse)
+{
+ mp3parse->channels = -1;
+ mp3parse->rate = -1;
+ mp3parse->sent_codec_tag = FALSE;
+ mp3parse->last_posted_crc = CRC_UNKNOWN;
+ mp3parse->last_posted_channel_mode = MPEG_AUDIO_CHANNEL_MODE_UNKNOWN;
+ mp3parse->freerate = 0;
+
+ mp3parse->hdr_bitrate = 0;
+ mp3parse->bitrate_is_constant = TRUE;
+
+ mp3parse->xing_flags = 0;
+ mp3parse->xing_bitrate = 0;
+ mp3parse->xing_frames = 0;
+ mp3parse->xing_total_time = 0;
+ mp3parse->xing_bytes = 0;
+ mp3parse->xing_vbr_scale = 0;
+ memset (mp3parse->xing_seek_table, 0, sizeof (mp3parse->xing_seek_table));
+ memset (mp3parse->xing_seek_table_inverse, 0,
+ sizeof (mp3parse->xing_seek_table_inverse));
+
+ mp3parse->vbri_bitrate = 0;
+ mp3parse->vbri_frames = 0;
+ mp3parse->vbri_total_time = 0;
+ mp3parse->vbri_bytes = 0;
+ mp3parse->vbri_seek_points = 0;
+ g_free (mp3parse->vbri_seek_table);
+ mp3parse->vbri_seek_table = NULL;
+
+ mp3parse->encoder_delay = 0;
+ mp3parse->encoder_padding = 0;
+}
+
+static void
+gst_mpeg_audio_parse_init (GstMpegAudioParse * mp3parse)
+{
+ gst_mpeg_audio_parse_reset (mp3parse);
+ GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (mp3parse));
+ GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_PARSE_SINK_PAD (mp3parse));
+}
+
+static void
+gst_mpeg_audio_parse_finalize (GObject * object)
+{
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_mpeg_audio_parse_start (GstBaseParse * parse)
+{
+ GstMpegAudioParse *mp3parse = GST_MPEG_AUDIO_PARSE (parse);
+
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (mp3parse), MIN_FRAME_SIZE);
+ GST_DEBUG_OBJECT (parse, "starting");
+
+ gst_mpeg_audio_parse_reset (mp3parse);
+
+ return TRUE;
+}
+
+static gboolean
+gst_mpeg_audio_parse_stop (GstBaseParse * parse)
+{
+ GstMpegAudioParse *mp3parse = GST_MPEG_AUDIO_PARSE (parse);
+
+ GST_DEBUG_OBJECT (parse, "stopping");
+
+ gst_mpeg_audio_parse_reset (mp3parse);
+
+ return TRUE;
+}
+
+static const guint mp3types_bitrates[2][3][16] = {
+ {
+ {0, 32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448,},
+ {0, 32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384,},
+ {0, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320,}
+ },
+ {
+ {0, 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256,},
+ {0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160,},
+ {0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160,}
+ },
+};
+
+static const guint mp3types_freqs[3][3] = { {44100, 48000, 32000},
+{22050, 24000, 16000},
+{11025, 12000, 8000}
+};
+
+static inline guint
+mp3_type_frame_length_from_header (GstMpegAudioParse * mp3parse, guint32 header,
+ guint * put_version, guint * put_layer, guint * put_channels,
+ guint * put_bitrate, guint * put_samplerate, guint * put_mode,
+ guint * put_crc)
+{
+ guint length;
+ gulong mode, samplerate, bitrate, layer, channels, padding, crc;
+ gulong version;
+ gint lsf, mpg25;
+
+ if (header & (1 << 20)) {
+ lsf = (header & (1 << 19)) ? 0 : 1;
+ mpg25 = 0;
+ } else {
+ lsf = 1;
+ mpg25 = 1;
+ }
+
+ version = 1 + lsf + mpg25;
+
+ layer = 4 - ((header >> 17) & 0x3);
+
+ crc = (header >> 16) & 0x1;
+
+ bitrate = (header >> 12) & 0xF;
+ bitrate = mp3types_bitrates[lsf][layer - 1][bitrate] * 1000;
+ if (!bitrate) {
+ GST_LOG_OBJECT (mp3parse, "using freeform bitrate");
+ bitrate = mp3parse->freerate;
+ }
+
+ samplerate = (header >> 10) & 0x3;
+ samplerate = mp3types_freqs[lsf + mpg25][samplerate];
+
+ /* force 0 length if 0 bitrate */
+ padding = (bitrate > 0) ? (header >> 9) & 0x1 : 0;
+
+ mode = (header >> 6) & 0x3;
+ channels = (mode == 3) ? 1 : 2;
+
+ switch (layer) {
+ case 1:
+ length = 4 * ((bitrate * 12) / samplerate + padding);
+ break;
+ case 2:
+ length = (bitrate * 144) / samplerate + padding;
+ break;
+ default:
+ case 3:
+ length = (bitrate * 144) / (samplerate << lsf) + padding;
+ break;
+ }
+
+ GST_DEBUG_OBJECT (mp3parse, "Calculated mp3 frame length of %u bytes",
+ length);
+ GST_DEBUG_OBJECT (mp3parse, "samplerate = %lu, bitrate = %lu, version = %lu, "
+ "layer = %lu, channels = %lu, mode = %s", samplerate, bitrate, version,
+ layer, channels, gst_mpeg_audio_channel_mode_get_nick (mode));
+
+ if (put_version)
+ *put_version = version;
+ if (put_layer)
+ *put_layer = layer;
+ if (put_channels)
+ *put_channels = channels;
+ if (put_bitrate)
+ *put_bitrate = bitrate;
+ if (put_samplerate)
+ *put_samplerate = samplerate;
+ if (put_mode)
+ *put_mode = mode;
+ if (put_crc)
+ *put_crc = crc;
+
+ return length;
+}
+
+/* Minimum number of consecutive, valid-looking frames to consider
+ * for resyncing */
+#define MIN_RESYNC_FRAMES 3
+
+/* Perform extended validation to check that subsequent headers match
+ * the first header given here in important characteristics, to avoid
+ * false sync. We look for a minimum of MIN_RESYNC_FRAMES consecutive
+ * frames to match their major characteristics.
+ *
+ * If at_eos is set to TRUE, we just check that we don't find any invalid
+ * frames in whatever data is available, rather than requiring a full
+ * MIN_RESYNC_FRAMES of data.
+ *
+ * Returns TRUE if we've seen enough data to validate or reject the frame.
+ * If TRUE is returned, then *valid contains TRUE if it validated, or false
+ * if we decided it was false sync.
+ * If FALSE is returned, then *valid contains minimum needed data.
+ */
+static gboolean
+gst_mp3parse_validate_extended (GstMpegAudioParse * mp3parse, GstBuffer * buf,
+ guint32 header, int bpf, gboolean at_eos, gint * valid)
+{
+ guint32 next_header;
+ GstMapInfo map;
+ gboolean res = TRUE;
+ int frames_found = 1;
+ int offset = bpf;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ while (frames_found < MIN_RESYNC_FRAMES) {
+ /* Check if we have enough data for all these frames, plus the next
+ frame header. */
+ if (map.size < offset + 4) {
+ if (at_eos) {
+ /* Running out of data at EOS is fine; just accept it */
+ *valid = TRUE;
+ goto cleanup;
+ } else {
+ *valid = offset + 4;
+ res = FALSE;
+ goto cleanup;
+ }
+ }
+
+ next_header = GST_READ_UINT32_BE (map.data + offset);
+ GST_DEBUG_OBJECT (mp3parse, "At %d: header=%08X, header2=%08X, bpf=%d",
+ offset, (unsigned int) header, (unsigned int) next_header, bpf);
+
+/* mask the bits which are allowed to differ between frames */
+#define HDRMASK ~((0xF << 12) /* bitrate */ | \
+ (0x1 << 9) /* padding */ | \
+ (0xf << 4) /* mode|mode extension */ | \
+ (0xf)) /* copyright|emphasis */
+
+ if ((next_header & HDRMASK) != (header & HDRMASK)) {
+ /* If any of the unmasked bits don't match, then it's not valid */
+ GST_DEBUG_OBJECT (mp3parse, "next header doesn't match "
+ "(header=%08X (%08X), header2=%08X (%08X), bpf=%d)",
+ (guint) header, (guint) header & HDRMASK, (guint) next_header,
+ (guint) next_header & HDRMASK, bpf);
+ *valid = FALSE;
+ goto cleanup;
+ } else if (((next_header >> 12) & 0xf) == 0xf) {
+ /* The essential parts were the same, but the bitrate held an
+ invalid value - also reject */
+ GST_DEBUG_OBJECT (mp3parse, "next header invalid (bitrate)");
+ *valid = FALSE;
+ goto cleanup;
+ }
+
+ bpf = mp3_type_frame_length_from_header (mp3parse, next_header,
+ NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+
+ /* if no bitrate, and no freeform rate known, then fail */
+ if (G_UNLIKELY (!bpf)) {
+ GST_DEBUG_OBJECT (mp3parse, "next header invalid (bitrate 0)");
+ *valid = FALSE;
+ goto cleanup;
+ }
+
+ offset += bpf;
+ frames_found++;
+ }
+
+ *valid = TRUE;
+
+cleanup:
+ gst_buffer_unmap (buf, &map);
+ return res;
+}
+
+static gboolean
+gst_mpeg_audio_parse_head_check (GstMpegAudioParse * mp3parse,
+ unsigned long head)
+{
+ GST_DEBUG_OBJECT (mp3parse, "checking mp3 header 0x%08lx", head);
+ /* if it's not a valid sync */
+ if ((head & 0xffe00000) != 0xffe00000) {
+ GST_WARNING_OBJECT (mp3parse, "invalid sync");
+ return FALSE;
+ }
+ /* if it's an invalid MPEG version */
+ if (((head >> 19) & 3) == 0x1) {
+ GST_WARNING_OBJECT (mp3parse, "invalid MPEG version: 0x%lx",
+ (head >> 19) & 3);
+ return FALSE;
+ }
+ /* if it's an invalid layer */
+ if (!((head >> 17) & 3)) {
+ GST_WARNING_OBJECT (mp3parse, "invalid layer: 0x%lx", (head >> 17) & 3);
+ return FALSE;
+ }
+ /* if it's an invalid bitrate */
+ if (((head >> 12) & 0xf) == 0xf) {
+ GST_WARNING_OBJECT (mp3parse, "invalid bitrate: 0x%lx", (head >> 12) & 0xf);
+ return FALSE;
+ }
+ /* if it's an invalid samplerate */
+ if (((head >> 10) & 0x3) == 0x3) {
+ GST_WARNING_OBJECT (mp3parse, "invalid samplerate: 0x%lx",
+ (head >> 10) & 0x3);
+ return FALSE;
+ }
+
+ if ((head & 0x3) == 0x2) {
+ /* Ignore this as there are some files with emphasis 0x2 that can
+ * be played fine. See BGO #537235 */
+ GST_WARNING_OBJECT (mp3parse, "invalid emphasis: 0x%lx", head & 0x3);
+ }
+
+ return TRUE;
+}
+
+/* Determines possible freeform frame rate/size by looking for next
+ * header with valid bitrate (0 or otherwise valid) (and sufficiently
+ * matching current header).
+ *
+ * Returns TRUE if we've found such one, and *rate then contains rate
+ * (or *rate contains 0 if decided no freeframe size could be determined).
+ * If not enough data, returns FALSE.
+ */
+static gboolean
+gst_mp3parse_find_freerate (GstMpegAudioParse * mp3parse, GstMapInfo * map,
+ guint32 header, gboolean at_eos, gint * _rate)
+{
+ guint32 next_header;
+ const guint8 *data;
+ guint available;
+ int offset = 4;
+ gulong samplerate, rate, layer, padding;
+ gboolean valid;
+ gint lsf, mpg25;
+
+ available = map->size;
+ data = map->data;
+
+ *_rate = 0;
+
+ /* pick apart header again partially */
+ if (header & (1 << 20)) {
+ lsf = (header & (1 << 19)) ? 0 : 1;
+ mpg25 = 0;
+ } else {
+ lsf = 1;
+ mpg25 = 1;
+ }
+ layer = 4 - ((header >> 17) & 0x3);
+ samplerate = (header >> 10) & 0x3;
+ samplerate = mp3types_freqs[lsf + mpg25][samplerate];
+ padding = (header >> 9) & 0x1;
+
+ for (; offset < available; ++offset) {
+ /* Check if we have enough data for all these frames, plus the next
+ frame header. */
+ if (available < offset + 4) {
+ if (at_eos) {
+ /* Running out of data; failed to determine size */
+ return TRUE;
+ } else {
+ return FALSE;
+ }
+ }
+
+ valid = FALSE;
+ next_header = GST_READ_UINT32_BE (data + offset);
+ if ((next_header & 0xFFE00000) != 0xFFE00000)
+ goto next;
+
+ GST_DEBUG_OBJECT (mp3parse, "At %d: header=%08X, header2=%08X",
+ offset, (unsigned int) header, (unsigned int) next_header);
+
+ if ((next_header & HDRMASK) != (header & HDRMASK)) {
+ /* If any of the unmasked bits don't match, then it's not valid */
+ GST_DEBUG_OBJECT (mp3parse, "next header doesn't match "
+ "(header=%08X (%08X), header2=%08X (%08X))",
+ (guint) header, (guint) header & HDRMASK, (guint) next_header,
+ (guint) next_header & HDRMASK);
+ goto next;
+ } else if (((next_header >> 12) & 0xf) == 0xf) {
+ /* The essential parts were the same, but the bitrate held an
+ invalid value - also reject */
+ GST_DEBUG_OBJECT (mp3parse, "next header invalid (bitrate)");
+ goto next;
+ }
+
+ valid = TRUE;
+
+ next:
+ /* almost accept as free frame */
+ if (layer == 1) {
+ rate = samplerate * (offset - 4 * padding + 4) / 48000;
+ } else {
+ rate = samplerate * (offset - padding + 1) / (144 >> lsf) / 1000;
+ }
+
+ if (valid) {
+ GST_LOG_OBJECT (mp3parse, "calculated rate %lu", rate * 1000);
+ if (rate < 8 || (layer == 3 && rate > 640)) {
+ GST_DEBUG_OBJECT (mp3parse, "rate invalid");
+ if (rate < 8) {
+ /* maybe some hope */
+ continue;
+ } else {
+ GST_DEBUG_OBJECT (mp3parse, "aborting");
+ /* give up */
+ break;
+ }
+ }
+ *_rate = rate * 1000;
+ break;
+ } else {
+ /* avoid indefinite searching */
+ if (rate > 1000) {
+ GST_DEBUG_OBJECT (mp3parse, "exceeded sanity rate; aborting");
+ break;
+ }
+ }
+ }
+
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_mpeg_audio_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize)
+{
+ GstMpegAudioParse *mp3parse = GST_MPEG_AUDIO_PARSE (parse);
+ GstBuffer *buf = frame->buffer;
+ GstByteReader reader;
+ gint off, bpf = 0;
+ gboolean lost_sync, draining, valid, caps_change;
+ guint32 header;
+ guint bitrate, layer, rate, channels, version, mode, crc;
+ GstMapInfo map;
+ gboolean res = FALSE;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (G_UNLIKELY (map.size < 6)) {
+ *skipsize = 1;
+ goto cleanup;
+ }
+
+ gst_byte_reader_init (&reader, map.data, map.size);
+
+ off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffe00000, 0xffe00000,
+ 0, map.size);
+
+ GST_LOG_OBJECT (parse, "possible sync at buffer offset %d", off);
+
+ /* didn't find anything that looks like a sync word, skip */
+ if (off < 0) {
+ *skipsize = map.size - 3;
+ goto cleanup;
+ }
+
+ /* possible frame header, but not at offset 0? skip bytes before sync */
+ if (off > 0) {
+ *skipsize = off;
+ goto cleanup;
+ }
+
+ /* make sure the values in the frame header look sane */
+ header = GST_READ_UINT32_BE (map.data);
+ if (!gst_mpeg_audio_parse_head_check (mp3parse, header)) {
+ *skipsize = 1;
+ goto cleanup;
+ }
+
+ GST_LOG_OBJECT (parse, "got frame");
+
+ lost_sync = GST_BASE_PARSE_LOST_SYNC (parse);
+ draining = GST_BASE_PARSE_DRAINING (parse);
+
+ if (G_UNLIKELY (lost_sync))
+ mp3parse->freerate = 0;
+
+ bpf = mp3_type_frame_length_from_header (mp3parse, header,
+ &version, &layer, &channels, &bitrate, &rate, &mode, &crc);
+
+ if (channels != mp3parse->channels || rate != mp3parse->rate ||
+ layer != mp3parse->layer || version != mp3parse->version)
+ caps_change = TRUE;
+ else
+ caps_change = FALSE;
+
+ /* maybe free format */
+ if (bpf == 0) {
+ GST_LOG_OBJECT (mp3parse, "possibly free format");
+ if (lost_sync || mp3parse->freerate == 0) {
+ GST_DEBUG_OBJECT (mp3parse, "finding free format rate");
+ if (!gst_mp3parse_find_freerate (mp3parse, &map, header, draining,
+ &valid)) {
+ /* not enough data */
+ gst_base_parse_set_min_frame_size (parse, valid);
+ *skipsize = 0;
+ goto cleanup;
+ } else {
+ GST_DEBUG_OBJECT (parse, "determined freeform size %d", valid);
+ mp3parse->freerate = valid;
+ }
+ }
+ /* try again */
+ bpf = mp3_type_frame_length_from_header (mp3parse, header,
+ &version, &layer, &channels, &bitrate, &rate, &mode, &crc);
+ if (!bpf) {
+ /* did not come up with valid freeform length, reject after all */
+ *skipsize = 1;
+ goto cleanup;
+ }
+ }
+
+ if (!draining && (lost_sync || caps_change)) {
+ if (!gst_mp3parse_validate_extended (mp3parse, buf, header, bpf, draining,
+ &valid)) {
+ /* not enough data */
+ gst_base_parse_set_min_frame_size (parse, valid);
+ *skipsize = 0;
+ goto cleanup;
+ } else {
+ if (!valid) {
+ *skipsize = off + 2;
+ goto cleanup;
+ }
+ }
+ } else if (draining && lost_sync && caps_change && mp3parse->rate > 0) {
+ /* avoid caps jitter that we can't be sure of */
+ *skipsize = off + 2;
+ goto cleanup;
+ }
+
+ /* restore default minimum */
+ gst_base_parse_set_min_frame_size (parse, MIN_FRAME_SIZE);
+
+ res = TRUE;
+
+ /* metadata handling */
+ if (G_UNLIKELY (caps_change)) {
+ GstCaps *caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 1,
+ "mpegaudioversion", G_TYPE_INT, version,
+ "layer", G_TYPE_INT, layer,
+ "rate", G_TYPE_INT, rate,
+ "channels", G_TYPE_INT, channels, "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
+ gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps);
+ gst_caps_unref (caps);
+
+ mp3parse->rate = rate;
+ mp3parse->channels = channels;
+ mp3parse->layer = layer;
+ mp3parse->version = version;
+
+ /* see http://www.codeproject.com/audio/MPEGAudioInfo.asp */
+ if (mp3parse->layer == 1)
+ mp3parse->spf = 384;
+ else if (mp3parse->layer == 2)
+ mp3parse->spf = 1152;
+ else if (mp3parse->version == 1) {
+ mp3parse->spf = 1152;
+ } else {
+ /* MPEG-2 or "2.5" */
+ mp3parse->spf = 576;
+ }
+
+ /* lead_in:
+ * We start pushing 9 frames earlier (29 frames for MPEG2) than
+ * segment start to be able to decode the first frame we want.
+ * 9 (29) frames are the theoretical maximum of frames that contain
+ * data for the current frame (bit reservoir).
+ *
+ * lead_out:
+ * Some mp3 streams have an offset in the timestamps, for which we have to
+ * push the frame *after* the end position in order for the decoder to be
+ * able to decode everything up until the segment.stop position. */
+ gst_base_parse_set_frame_rate (parse, mp3parse->rate, mp3parse->spf,
+ (version == 1) ? 10 : 30, 2);
+ }
+
+ if (mp3parse->hdr_bitrate && mp3parse->hdr_bitrate != bitrate) {
+ mp3parse->bitrate_is_constant = FALSE;
+ }
+ mp3parse->hdr_bitrate = bitrate;
+
+ /* For first frame; check for seek tables and output a codec tag */
+ gst_mpeg_audio_parse_handle_first_frame (mp3parse, buf);
+
+ /* store some frame info for later processing */
+ mp3parse->last_crc = crc;
+ mp3parse->last_mode = mode;
+
+cleanup:
+ gst_buffer_unmap (buf, &map);
+
+ if (res && bpf <= map.size) {
+ return gst_base_parse_finish_frame (parse, frame, bpf);
+ }
+
+ return GST_FLOW_OK;
+}
+
+static void
+gst_mpeg_audio_parse_handle_first_frame (GstMpegAudioParse * mp3parse,
+ GstBuffer * buf)
+{
+ const guint32 xing_id = 0x58696e67; /* 'Xing' in hex */
+ const guint32 info_id = 0x496e666f; /* 'Info' in hex - found in LAME CBR files */
+ const guint32 vbri_id = 0x56425249; /* 'VBRI' in hex */
+ const guint32 lame_id = 0x4c414d45; /* 'LAME' in hex */
+ gint offset_xing, offset_vbri;
+ guint64 avail;
+ gint64 upstream_total_bytes = 0;
+ guint32 read_id_xing = 0, read_id_vbri = 0;
+ GstMapInfo map;
+ guint8 *data;
+ guint bitrate;
+
+ if (mp3parse->sent_codec_tag)
+ return;
+
+ /* Check first frame for Xing info */
+ if (mp3parse->version == 1) { /* MPEG-1 file */
+ if (mp3parse->channels == 1)
+ offset_xing = 0x11;
+ else
+ offset_xing = 0x20;
+ } else { /* MPEG-2 header */
+ if (mp3parse->channels == 1)
+ offset_xing = 0x09;
+ else
+ offset_xing = 0x11;
+ }
+
+ /* The VBRI tag is always at offset 0x20 */
+ offset_vbri = 0x20;
+
+ /* Skip the 4 bytes of the MP3 header too */
+ offset_xing += 4;
+ offset_vbri += 4;
+
+ /* Check if we have enough data to read the Xing header */
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ avail = map.size;
+
+ if (avail >= offset_xing + 4) {
+ read_id_xing = GST_READ_UINT32_BE (data + offset_xing);
+ }
+ if (avail >= offset_vbri + 4) {
+ read_id_vbri = GST_READ_UINT32_BE (data + offset_vbri);
+ }
+
+ /* obtain real upstream total bytes */
+ if (!gst_pad_peer_query_duration (GST_BASE_PARSE_SINK_PAD (mp3parse),
+ GST_FORMAT_BYTES, &upstream_total_bytes))
+ upstream_total_bytes = 0;
+
+ if (read_id_xing == xing_id || read_id_xing == info_id) {
+ guint32 xing_flags;
+ guint bytes_needed = offset_xing + 8;
+ gint64 total_bytes;
+ GstClockTime total_time;
+
+ GST_DEBUG_OBJECT (mp3parse, "Found Xing header marker 0x%x", xing_id);
+
+ /* Move data after Xing header */
+ data += offset_xing + 4;
+
+ /* Read 4 base bytes of flags, big-endian */
+ xing_flags = GST_READ_UINT32_BE (data);
+ data += 4;
+ if (xing_flags & XING_FRAMES_FLAG)
+ bytes_needed += 4;
+ if (xing_flags & XING_BYTES_FLAG)
+ bytes_needed += 4;
+ if (xing_flags & XING_TOC_FLAG)
+ bytes_needed += 100;
+ if (xing_flags & XING_VBR_SCALE_FLAG)
+ bytes_needed += 4;
+ if (avail < bytes_needed) {
+ GST_DEBUG_OBJECT (mp3parse,
+ "Not enough data to read Xing header (need %d)", bytes_needed);
+ goto cleanup;
+ }
+
+ GST_DEBUG_OBJECT (mp3parse, "Reading Xing header");
+ mp3parse->xing_flags = xing_flags;
+
+ if (xing_flags & XING_FRAMES_FLAG) {
+ mp3parse->xing_frames = GST_READ_UINT32_BE (data);
+ if (mp3parse->xing_frames == 0) {
+ GST_WARNING_OBJECT (mp3parse,
+ "Invalid number of frames in Xing header");
+ mp3parse->xing_flags &= ~XING_FRAMES_FLAG;
+ } else {
+ mp3parse->xing_total_time = gst_util_uint64_scale (GST_SECOND,
+ (guint64) (mp3parse->xing_frames) * (mp3parse->spf),
+ mp3parse->rate);
+ }
+
+ data += 4;
+ } else {
+ mp3parse->xing_frames = 0;
+ mp3parse->xing_total_time = 0;
+ }
+
+ if (xing_flags & XING_BYTES_FLAG) {
+ mp3parse->xing_bytes = GST_READ_UINT32_BE (data);
+ if (mp3parse->xing_bytes == 0) {
+ GST_WARNING_OBJECT (mp3parse, "Invalid number of bytes in Xing header");
+ mp3parse->xing_flags &= ~XING_BYTES_FLAG;
+ }
+ data += 4;
+ } else {
+ mp3parse->xing_bytes = 0;
+ }
+
+ /* If we know the upstream size and duration, compute the
+ * total bitrate, rounded up to the nearest kbit/sec */
+ if ((total_time = mp3parse->xing_total_time) &&
+ (total_bytes = mp3parse->xing_bytes)) {
+ mp3parse->xing_bitrate = gst_util_uint64_scale (total_bytes,
+ 8 * GST_SECOND, total_time);
+ mp3parse->xing_bitrate += 500;
+ mp3parse->xing_bitrate -= mp3parse->xing_bitrate % 1000;
+ }
+
+ if (xing_flags & XING_TOC_FLAG) {
+ int i, percent = 0;
+ guchar *table = mp3parse->xing_seek_table;
+ guchar old = 0, new;
+ guint first;
+
+ first = data[0];
+ GST_DEBUG_OBJECT (mp3parse,
+ "Subtracting initial offset of %d bytes from Xing TOC", first);
+
+ /* xing seek table: percent time -> 1/256 bytepos */
+ for (i = 0; i < 100; i++) {
+ new = data[i] - first;
+ if (old > new) {
+ GST_WARNING_OBJECT (mp3parse, "Skipping broken Xing TOC");
+ mp3parse->xing_flags &= ~XING_TOC_FLAG;
+ goto skip_toc;
+ }
+ mp3parse->xing_seek_table[i] = old = new;
+ }
+
+ /* build inverse table: 1/256 bytepos -> 1/100 percent time */
+ for (i = 0; i < 256; i++) {
+ while (percent < 99 && table[percent + 1] <= i)
+ percent++;
+
+ if (table[percent] == i) {
+ mp3parse->xing_seek_table_inverse[i] = percent * 100;
+ } else if (percent < 99 && table[percent]) {
+ gdouble fa, fb, fx;
+ gint a = percent, b = percent + 1;
+
+ fa = table[a];
+ fb = table[b];
+ fx = (b - a) / (fb - fa) * (i - fa) + a;
+ mp3parse->xing_seek_table_inverse[i] = (guint16) (fx * 100);
+ } else if (percent == 99) {
+ gdouble fa, fb, fx;
+ gint a = percent, b = 100;
+
+ fa = table[a];
+ fb = 256.0;
+ fx = (b - a) / (fb - fa) * (i - fa) + a;
+ mp3parse->xing_seek_table_inverse[i] = (guint16) (fx * 100);
+ }
+ }
+ skip_toc:
+ data += 100;
+ } else {
+ memset (mp3parse->xing_seek_table, 0, sizeof (mp3parse->xing_seek_table));
+ memset (mp3parse->xing_seek_table_inverse, 0,
+ sizeof (mp3parse->xing_seek_table_inverse));
+ }
+
+ if (xing_flags & XING_VBR_SCALE_FLAG) {
+ mp3parse->xing_vbr_scale = GST_READ_UINT32_BE (data);
+ data += 4;
+ } else
+ mp3parse->xing_vbr_scale = 0;
+
+ GST_DEBUG_OBJECT (mp3parse, "Xing header reported %u frames, time %"
+ GST_TIME_FORMAT ", %u bytes, vbr scale %u", mp3parse->xing_frames,
+ GST_TIME_ARGS (mp3parse->xing_total_time), mp3parse->xing_bytes,
+ mp3parse->xing_vbr_scale);
+
+ /* check for truncated file */
+ if (upstream_total_bytes && mp3parse->xing_bytes &&
+ mp3parse->xing_bytes * 0.8 > upstream_total_bytes) {
+ GST_WARNING_OBJECT (mp3parse, "File appears to have been truncated; "
+ "invalidating Xing header duration and size");
+ mp3parse->xing_flags &= ~XING_BYTES_FLAG;
+ mp3parse->xing_flags &= ~XING_FRAMES_FLAG;
+ }
+
+ /* Optional LAME tag? */
+ if (avail - bytes_needed >= 36 && GST_READ_UINT32_BE (data) == lame_id) {
+ gchar lame_version[10] = { 0, };
+ guint tag_rev;
+ guint32 encoder_delay, encoder_padding;
+
+ memcpy (lame_version, data, 9);
+ data += 9;
+ tag_rev = data[0] >> 4;
+ GST_DEBUG_OBJECT (mp3parse, "Found LAME tag revision %d created by '%s'",
+ tag_rev, lame_version);
+
+ /* Skip all the information we're not interested in */
+ data += 12;
+ /* Encoder delay and end padding */
+ encoder_delay = GST_READ_UINT24_BE (data);
+ encoder_delay >>= 12;
+ encoder_padding = GST_READ_UINT24_BE (data);
+ encoder_padding &= 0x000fff;
+
+ mp3parse->encoder_delay = encoder_delay;
+ mp3parse->encoder_padding = encoder_padding;
+
+ GST_DEBUG_OBJECT (mp3parse, "Encoder delay %u, encoder padding %u",
+ encoder_delay, encoder_padding);
+ }
+ } else if (read_id_vbri == vbri_id) {
+ gint64 total_bytes, total_frames;
+ GstClockTime total_time;
+ guint16 nseek_points;
+
+ GST_DEBUG_OBJECT (mp3parse, "Found VBRI header marker 0x%x", vbri_id);
+
+ if (avail < offset_vbri + 26) {
+ GST_DEBUG_OBJECT (mp3parse,
+ "Not enough data to read VBRI header (need %d)", offset_vbri + 26);
+ goto cleanup;
+ }
+
+ GST_DEBUG_OBJECT (mp3parse, "Reading VBRI header");
+
+ /* Move data after VBRI header */
+ data += offset_vbri + 4;
+
+ if (GST_READ_UINT16_BE (data) != 0x0001) {
+ GST_WARNING_OBJECT (mp3parse,
+ "Unsupported VBRI version 0x%x", GST_READ_UINT16_BE (data));
+ goto cleanup;
+ }
+ data += 2;
+
+ /* Skip encoder delay */
+ data += 2;
+
+ /* Skip quality */
+ data += 2;
+
+ total_bytes = GST_READ_UINT32_BE (data);
+ if (total_bytes != 0)
+ mp3parse->vbri_bytes = total_bytes;
+ data += 4;
+
+ total_frames = GST_READ_UINT32_BE (data);
+ if (total_frames != 0) {
+ mp3parse->vbri_frames = total_frames;
+ mp3parse->vbri_total_time = gst_util_uint64_scale (GST_SECOND,
+ (guint64) (mp3parse->vbri_frames) * (mp3parse->spf), mp3parse->rate);
+ }
+ data += 4;
+
+ /* If we know the upstream size and duration, compute the
+ * total bitrate, rounded up to the nearest kbit/sec */
+ if ((total_time = mp3parse->vbri_total_time) &&
+ (total_bytes = mp3parse->vbri_bytes)) {
+ mp3parse->vbri_bitrate = gst_util_uint64_scale (total_bytes,
+ 8 * GST_SECOND, total_time);
+ mp3parse->vbri_bitrate += 500;
+ mp3parse->vbri_bitrate -= mp3parse->vbri_bitrate % 1000;
+ }
+
+ nseek_points = GST_READ_UINT16_BE (data);
+ data += 2;
+
+ if (nseek_points > 0) {
+ guint scale, seek_bytes, seek_frames;
+ gint i;
+
+ mp3parse->vbri_seek_points = nseek_points;
+
+ scale = GST_READ_UINT16_BE (data);
+ data += 2;
+
+ seek_bytes = GST_READ_UINT16_BE (data);
+ data += 2;
+
+ seek_frames = GST_READ_UINT16_BE (data);
+
+ if (scale == 0 || seek_bytes == 0 || seek_bytes > 4 || seek_frames == 0) {
+ GST_WARNING_OBJECT (mp3parse, "Unsupported VBRI seek table");
+ goto out_vbri;
+ }
+
+ if (avail < offset_vbri + 26 + nseek_points * seek_bytes) {
+ GST_WARNING_OBJECT (mp3parse,
+ "Not enough data to read VBRI seek table (need %d)",
+ offset_vbri + 26 + nseek_points * seek_bytes);
+ goto out_vbri;
+ }
+
+ if (seek_frames * nseek_points < total_frames - seek_frames ||
+ seek_frames * nseek_points > total_frames + seek_frames) {
+ GST_WARNING_OBJECT (mp3parse,
+ "VBRI seek table doesn't cover the complete file");
+ goto out_vbri;
+ }
+
+ data = map.data;
+ data += offset_vbri + 26;
+
+ /* VBRI seek table: frame/seek_frames -> byte */
+ mp3parse->vbri_seek_table = g_new (guint32, nseek_points);
+ if (seek_bytes == 4)
+ for (i = 0; i < nseek_points; i++) {
+ mp3parse->vbri_seek_table[i] = GST_READ_UINT32_BE (data) * scale;
+ data += 4;
+ } else if (seek_bytes == 3)
+ for (i = 0; i < nseek_points; i++) {
+ mp3parse->vbri_seek_table[i] = GST_READ_UINT24_BE (data) * scale;
+ data += 3;
+ } else if (seek_bytes == 2)
+ for (i = 0; i < nseek_points; i++) {
+ mp3parse->vbri_seek_table[i] = GST_READ_UINT16_BE (data) * scale;
+ data += 2;
+ } else /* seek_bytes == 1 */
+ for (i = 0; i < nseek_points; i++) {
+ mp3parse->vbri_seek_table[i] = GST_READ_UINT8 (data) * scale;
+ data += 1;
+ }
+ }
+ out_vbri:
+
+ GST_DEBUG_OBJECT (mp3parse, "VBRI header reported %u frames, time %"
+ GST_TIME_FORMAT ", bytes %u", mp3parse->vbri_frames,
+ GST_TIME_ARGS (mp3parse->vbri_total_time), mp3parse->vbri_bytes);
+
+ /* check for truncated file */
+ if (upstream_total_bytes && mp3parse->vbri_bytes &&
+ mp3parse->vbri_bytes * 0.8 > upstream_total_bytes) {
+ GST_WARNING_OBJECT (mp3parse, "File appears to have been truncated; "
+ "invalidating VBRI header duration and size");
+ mp3parse->vbri_valid = FALSE;
+ } else {
+ mp3parse->vbri_valid = TRUE;
+ }
+ } else {
+ GST_DEBUG_OBJECT (mp3parse,
+ "Xing, LAME or VBRI header not found in first frame");
+ }
+
+ /* set duration if tables provided a valid one */
+ if (mp3parse->xing_flags & XING_FRAMES_FLAG) {
+ gst_base_parse_set_duration (GST_BASE_PARSE (mp3parse), GST_FORMAT_TIME,
+ mp3parse->xing_total_time, 0);
+ }
+ if (mp3parse->vbri_total_time != 0 && mp3parse->vbri_valid) {
+ gst_base_parse_set_duration (GST_BASE_PARSE (mp3parse), GST_FORMAT_TIME,
+ mp3parse->vbri_total_time, 0);
+ }
+
+ /* tell baseclass how nicely we can seek, and a bitrate if one found */
+ /* FIXME: fill index with seek table */
+#if 0
+ seekable = GST_BASE_PARSE_SEEK_DEFAULT;
+ if ((mp3parse->xing_flags & XING_TOC_FLAG) && mp3parse->xing_bytes &&
+ mp3parse->xing_total_time)
+ seekable = GST_BASE_PARSE_SEEK_TABLE;
+
+ if (mp3parse->vbri_seek_table && mp3parse->vbri_bytes &&
+ mp3parse->vbri_total_time)
+ seekable = GST_BASE_PARSE_SEEK_TABLE;
+#endif
+
+ if (mp3parse->xing_bitrate)
+ bitrate = mp3parse->xing_bitrate;
+ else if (mp3parse->vbri_bitrate)
+ bitrate = mp3parse->vbri_bitrate;
+ else
+ bitrate = 0;
+
+ gst_base_parse_set_average_bitrate (GST_BASE_PARSE (mp3parse), bitrate);
+
+cleanup:
+ gst_buffer_unmap (buf, &map);
+}
+
+static gboolean
+gst_mpeg_audio_parse_time_to_bytepos (GstMpegAudioParse * mp3parse,
+ GstClockTime ts, gint64 * bytepos)
+{
+ gint64 total_bytes;
+ GstClockTime total_time;
+
+ /* If XING seek table exists use this for time->byte conversion */
+ if ((mp3parse->xing_flags & XING_TOC_FLAG) &&
+ (total_bytes = mp3parse->xing_bytes) &&
+ (total_time = mp3parse->xing_total_time)) {
+ gdouble fa, fb, fx;
+ gdouble percent =
+ CLAMP ((100.0 * gst_util_guint64_to_gdouble (ts)) /
+ gst_util_guint64_to_gdouble (total_time), 0.0, 100.0);
+ gint index = CLAMP (percent, 0, 99);
+
+ fa = mp3parse->xing_seek_table[index];
+ if (index < 99)
+ fb = mp3parse->xing_seek_table[index + 1];
+ else
+ fb = 256.0;
+
+ fx = fa + (fb - fa) * (percent - index);
+
+ *bytepos = (1.0 / 256.0) * fx * total_bytes;
+
+ return TRUE;
+ }
+
+ if (mp3parse->vbri_seek_table && (total_bytes = mp3parse->vbri_bytes) &&
+ (total_time = mp3parse->vbri_total_time)) {
+ gint i, j;
+ gdouble a, b, fa, fb;
+
+ i = gst_util_uint64_scale (ts, mp3parse->vbri_seek_points - 1, total_time);
+ i = CLAMP (i, 0, mp3parse->vbri_seek_points - 1);
+
+ a = gst_guint64_to_gdouble (gst_util_uint64_scale (i, total_time,
+ mp3parse->vbri_seek_points));
+ fa = 0.0;
+ for (j = i; j >= 0; j--)
+ fa += mp3parse->vbri_seek_table[j];
+
+ if (i + 1 < mp3parse->vbri_seek_points) {
+ b = gst_guint64_to_gdouble (gst_util_uint64_scale (i + 1, total_time,
+ mp3parse->vbri_seek_points));
+ fb = fa + mp3parse->vbri_seek_table[i + 1];
+ } else {
+ b = gst_guint64_to_gdouble (total_time);
+ fb = total_bytes;
+ }
+
+ *bytepos = fa + ((fb - fa) / (b - a)) * (gst_guint64_to_gdouble (ts) - a);
+
+ return TRUE;
+ }
+
+ /* If we have had a constant bit rate (so far), use it directly, as it
+ * may give slightly more accurate results than the base class. */
+ if (mp3parse->bitrate_is_constant && mp3parse->hdr_bitrate) {
+ *bytepos = gst_util_uint64_scale (ts, mp3parse->hdr_bitrate,
+ 8 * GST_SECOND);
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+static gboolean
+gst_mpeg_audio_parse_bytepos_to_time (GstMpegAudioParse * mp3parse,
+ gint64 bytepos, GstClockTime * ts)
+{
+ gint64 total_bytes;
+ GstClockTime total_time;
+
+ /* If XING seek table exists use this for byte->time conversion */
+ if ((mp3parse->xing_flags & XING_TOC_FLAG) &&
+ (total_bytes = mp3parse->xing_bytes) &&
+ (total_time = mp3parse->xing_total_time)) {
+ gdouble fa, fb, fx;
+ gdouble pos;
+ gint index;
+
+ pos = CLAMP ((bytepos * 256.0) / total_bytes, 0.0, 256.0);
+ index = CLAMP (pos, 0, 255);
+ fa = mp3parse->xing_seek_table_inverse[index];
+ if (index < 255)
+ fb = mp3parse->xing_seek_table_inverse[index + 1];
+ else
+ fb = 10000.0;
+
+ fx = fa + (fb - fa) * (pos - index);
+
+ *ts = (1.0 / 10000.0) * fx * gst_util_guint64_to_gdouble (total_time);
+
+ return TRUE;
+ }
+
+ if (mp3parse->vbri_seek_table &&
+ (total_bytes = mp3parse->vbri_bytes) &&
+ (total_time = mp3parse->vbri_total_time)) {
+ gint i = 0;
+ guint64 sum = 0;
+ gdouble a, b, fa, fb;
+
+ do {
+ sum += mp3parse->vbri_seek_table[i];
+ i++;
+ } while (i + 1 < mp3parse->vbri_seek_points
+ && sum + mp3parse->vbri_seek_table[i] < bytepos);
+ i--;
+
+ a = gst_guint64_to_gdouble (sum);
+ fa = gst_guint64_to_gdouble (gst_util_uint64_scale (i, total_time,
+ mp3parse->vbri_seek_points));
+
+ if (i + 1 < mp3parse->vbri_seek_points) {
+ b = a + mp3parse->vbri_seek_table[i + 1];
+ fb = gst_guint64_to_gdouble (gst_util_uint64_scale (i + 1, total_time,
+ mp3parse->vbri_seek_points));
+ } else {
+ b = total_bytes;
+ fb = gst_guint64_to_gdouble (total_time);
+ }
+
+ *ts = gst_gdouble_to_guint64 (fa + ((fb - fa) / (b - a)) * (bytepos - a));
+
+ return TRUE;
+ }
+
+ /* If we have had a constant bit rate (so far), use it directly, as it
+ * may give slightly more accurate results than the base class. */
+ if (mp3parse->bitrate_is_constant && mp3parse->hdr_bitrate) {
+ *ts = gst_util_uint64_scale (bytepos, 8 * GST_SECOND,
+ mp3parse->hdr_bitrate);
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+static gboolean
+gst_mpeg_audio_parse_convert (GstBaseParse * parse, GstFormat src_format,
+ gint64 src_value, GstFormat dest_format, gint64 * dest_value)
+{
+ GstMpegAudioParse *mp3parse = GST_MPEG_AUDIO_PARSE (parse);
+ gboolean res = FALSE;
+
+ if (src_format == GST_FORMAT_TIME && dest_format == GST_FORMAT_BYTES)
+ res =
+ gst_mpeg_audio_parse_time_to_bytepos (mp3parse, src_value, dest_value);
+ else if (src_format == GST_FORMAT_BYTES && dest_format == GST_FORMAT_TIME)
+ res = gst_mpeg_audio_parse_bytepos_to_time (mp3parse, src_value,
+ (GstClockTime *) dest_value);
+
+ /* if no tables, fall back to default estimated rate based conversion */
+ if (!res)
+ return gst_base_parse_convert_default (parse, src_format, src_value,
+ dest_format, dest_value);
+
+ return res;
+}
+
+static GstFlowReturn
+gst_mpeg_audio_parse_pre_push_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame)
+{
+ GstMpegAudioParse *mp3parse = GST_MPEG_AUDIO_PARSE (parse);
+ GstTagList *taglist = NULL;
+
+ /* we will create a taglist (if any of the parameters has changed)
+ * to add the tags that changed */
+ if (mp3parse->last_posted_crc != mp3parse->last_crc) {
+ gboolean using_crc;
+
+ if (!taglist)
+ taglist = gst_tag_list_new_empty ();
+
+ mp3parse->last_posted_crc = mp3parse->last_crc;
+ if (mp3parse->last_posted_crc == CRC_PROTECTED) {
+ using_crc = TRUE;
+ } else {
+ using_crc = FALSE;
+ }
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_CRC,
+ using_crc, NULL);
+ }
+
+ if (mp3parse->last_posted_channel_mode != mp3parse->last_mode) {
+ if (!taglist)
+ taglist = gst_tag_list_new_empty ();
+
+ mp3parse->last_posted_channel_mode = mp3parse->last_mode;
+
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_MODE,
+ gst_mpeg_audio_channel_mode_get_nick (mp3parse->last_mode), NULL);
+ }
+
+ /* tag sending done late enough in hook to ensure pending events
+ * have already been sent */
+ if (taglist != NULL || !mp3parse->sent_codec_tag) {
+ GstCaps *caps;
+
+ if (taglist == NULL)
+ taglist = gst_tag_list_new_empty ();
+
+ /* codec tag */
+ caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (parse));
+ if (G_UNLIKELY (caps == NULL)) {
+ gst_tag_list_unref (taglist);
+
+ if (GST_PAD_IS_FLUSHING (GST_BASE_PARSE_SRC_PAD (parse))) {
+ GST_INFO_OBJECT (parse, "Src pad is flushing");
+ return GST_FLOW_FLUSHING;
+ } else {
+ GST_INFO_OBJECT (parse, "Src pad is not negotiated!");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+ }
+ gst_pb_utils_add_codec_description_to_tag_list (taglist,
+ GST_TAG_AUDIO_CODEC, caps);
+ gst_caps_unref (caps);
+
+ if (mp3parse->hdr_bitrate > 0 && mp3parse->xing_bitrate == 0 &&
+ mp3parse->vbri_bitrate == 0) {
+ /* We don't have a VBR bitrate, so post the available bitrate as
+ * nominal and let baseparse calculate the real bitrate */
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_NOMINAL_BITRATE, mp3parse->hdr_bitrate, NULL);
+ }
+
+ /* also signals the end of first-frame processing */
+ mp3parse->sent_codec_tag = TRUE;
+ }
+
+ /* if the taglist exists, we need to update it so it gets sent out */
+ if (taglist) {
+ gst_base_parse_merge_tags (parse, taglist, GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (taglist);
+ }
+
+ /* usual clipping applies */
+ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_CLIP;
+
+ return GST_FLOW_OK;
+}
+
+static void
+remove_fields (GstCaps * caps)
+{
+ guint i, n;
+
+ n = gst_caps_get_size (caps);
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (caps, i);
+
+ gst_structure_remove_field (s, "parsed");
+ }
+}
+
+static GstCaps *
+gst_mpeg_audio_parse_get_sink_caps (GstBaseParse * parse, GstCaps * filter)
+{
+ GstCaps *peercaps, *templ;
+ GstCaps *res;
+
+ templ = gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD (parse));
+ if (filter) {
+ GstCaps *fcopy = gst_caps_copy (filter);
+ /* Remove the fields we convert */
+ remove_fields (fcopy);
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), fcopy);
+ gst_caps_unref (fcopy);
+ } else
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), NULL);
+
+ if (peercaps) {
+ /* Remove the parsed field */
+ peercaps = gst_caps_make_writable (peercaps);
+ remove_fields (peercaps);
+
+ res = gst_caps_intersect_full (peercaps, templ, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (peercaps);
+ gst_caps_unref (templ);
+ } else {
+ res = templ;
+ }
+
+ if (filter) {
+ GstCaps *intersection;
+
+ intersection =
+ gst_caps_intersect_full (filter, res, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (res);
+ res = intersection;
+ }
+
+ return res;
+}
diff --git a/gst/audioparsers/gstmpegaudioparse.h b/gst/audioparsers/gstmpegaudioparse.h
new file mode 100644
index 0000000000..e7fa8099fb
--- /dev/null
+++ b/gst/audioparsers/gstmpegaudioparse.h
@@ -0,0 +1,114 @@
+/* GStreamer MPEG audio parser
+ * Copyright (C) 2006-2007 Jan Schmidt <thaytan@mad.scientist.com>
+ * Copyright (C) 2010 Mark Nauwelaerts <mnauw users sf net>
+ * Copyright (C) 2010 Nokia Corporation. All rights reserved.
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_MPEG_AUDIO_PARSE_H__
+#define __GST_MPEG_AUDIO_PARSE_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbaseparse.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPEG_AUDIO_PARSE \
+ (gst_mpeg_audio_parse_get_type())
+#define GST_MPEG_AUDIO_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_MPEG_AUDIO_PARSE, GstMpegAudioParse))
+#define GST_MPEG_AUDIO_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_MPEG_AUDIO_PARSE, GstMpegAudioParseClass))
+#define GST_IS_MPEG_AUDIO_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_MPEG_AUDIO_PARSE))
+#define GST_IS_MPEG_AUDIO_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_MPEG_AUDIO_PARSE))
+
+typedef struct _GstMpegAudioParse GstMpegAudioParse;
+typedef struct _GstMpegAudioParseClass GstMpegAudioParseClass;
+
+/**
+ * GstMpegAudioParse:
+ *
+ * The opaque GstMpegAudioParse object
+ */
+struct _GstMpegAudioParse {
+ GstBaseParse baseparse;
+
+ /*< private >*/
+ gint rate;
+ gint channels;
+ gint layer;
+ gint version;
+
+ GstClockTime max_bitreservoir;
+ /* samples per frame */
+ gint spf;
+
+ gint freerate;
+
+ gboolean sent_codec_tag;
+ guint last_posted_bitrate;
+ gint last_posted_crc, last_crc;
+ guint last_posted_channel_mode, last_mode;
+
+ /* Bitrate from non-vbr headers */
+ guint32 hdr_bitrate;
+ gboolean bitrate_is_constant;
+
+ /* Xing info */
+ guint32 xing_flags;
+ guint32 xing_frames;
+ GstClockTime xing_total_time;
+ guint32 xing_bytes;
+ /* percent -> filepos mapping */
+ guchar xing_seek_table[100];
+ /* filepos -> percent mapping */
+ guint16 xing_seek_table_inverse[256];
+ guint32 xing_vbr_scale;
+ guint xing_bitrate;
+
+ /* VBRI info */
+ guint32 vbri_frames;
+ GstClockTime vbri_total_time;
+ guint32 vbri_bytes;
+ guint vbri_bitrate;
+ guint vbri_seek_points;
+ guint32 *vbri_seek_table;
+ gboolean vbri_valid;
+
+ /* LAME info */
+ guint32 encoder_delay;
+ guint32 encoder_padding;
+};
+
+/**
+ * GstMpegAudioParseClass:
+ * @parent_class: Element parent class.
+ *
+ * The opaque GstMpegAudioParseClass data structure.
+ */
+struct _GstMpegAudioParseClass {
+ GstBaseParseClass baseparse_class;
+};
+
+GType gst_mpeg_audio_parse_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_MPEG_AUDIO_PARSE_H__ */
diff --git a/gst/audioparsers/gstsbcparse.c b/gst/audioparsers/gstsbcparse.c
new file mode 100644
index 0000000000..3fa8bb44c5
--- /dev/null
+++ b/gst/audioparsers/gstsbcparse.c
@@ -0,0 +1,539 @@
+/* GStreamer SBC audio parser
+ * Copyright (C) 2012 Collabora Ltd. <tim.muller@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+/**
+ * SECTION:element-sbcparse
+ * @title: sbcparse
+ * @see_also: sbcdec, sbcenc
+ *
+ * The sbcparse element will parse a bluetooth SBC audio stream into
+ * frames and timestamp them properly.
+ *
+ * Since: 1.2.0
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstaudioparserselements.h"
+#include "gstsbcparse.h"
+
+#include <string.h>
+#include <gst/tag/tag.h>
+#include <gst/audio/audio.h>
+#include <gst/base/base.h>
+#include <gst/pbutils/pbutils.h>
+
+#define SBC_SYNCBYTE 0x9C
+
+GST_DEBUG_CATEGORY_STATIC (sbcparse_debug);
+#define GST_CAT_DEFAULT sbcparse_debug
+
+static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-sbc, parsed = (boolean) true, "
+ "channels = (int) [ 1, 2 ], "
+ "rate = (int) { 16000, 32000, 44100, 48000 }")
+ );
+
+static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-sbc")
+ );
+
+static gboolean gst_sbc_parse_start (GstBaseParse * parse);
+static gboolean gst_sbc_parse_stop (GstBaseParse * parse);
+static GstFlowReturn gst_sbc_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize);
+static GstFlowReturn gst_sbc_parse_pre_push_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame);
+static GstCaps *gst_sbc_parse_get_sink_caps (GstBaseParse * parse,
+ GstCaps * filter);
+
+static guint8 gst_sbc_calculate_crc8 (const guint8 * data, gint bits_crc);
+static gsize gst_sbc_calc_framelen (guint subbands, GstSbcChannelMode ch_mode,
+ guint blocks, guint bitpool);
+static gsize gst_sbc_parse_header (const guint8 * data, guint * rate,
+ guint * n_blocks, GstSbcChannelMode * ch_mode,
+ GstSbcAllocationMethod * alloc_method, guint * n_subbands, guint * bitpool);
+
+#define parent_class gst_sbc_parse_parent_class
+G_DEFINE_TYPE (GstSbcParse, gst_sbc_parse, GST_TYPE_BASE_PARSE);
+GST_ELEMENT_REGISTER_DEFINE (sbcparse, "sbcparse",
+ GST_RANK_PRIMARY + 1, GST_TYPE_SBC_PARSE);
+
+static void
+gst_sbc_parse_class_init (GstSbcParseClass * klass)
+{
+ GstBaseParseClass *baseparse_class = GST_BASE_PARSE_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (sbcparse_debug, "sbcparse", 0, "SBC audio parser");
+
+ baseparse_class->start = GST_DEBUG_FUNCPTR (gst_sbc_parse_start);
+ baseparse_class->stop = GST_DEBUG_FUNCPTR (gst_sbc_parse_stop);
+ baseparse_class->pre_push_frame =
+ GST_DEBUG_FUNCPTR (gst_sbc_parse_pre_push_frame);
+ baseparse_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_sbc_parse_handle_frame);
+ baseparse_class->get_sink_caps =
+ GST_DEBUG_FUNCPTR (gst_sbc_parse_get_sink_caps);
+
+ gst_element_class_add_static_pad_template (element_class, &src_factory);
+ gst_element_class_add_static_pad_template (element_class, &sink_factory);
+
+ gst_element_class_set_static_metadata (element_class, "SBC audio parser",
+ "Codec/Parser/Audio", "Parses an SBC bluetooth audio stream",
+ "Tim-Philipp Müller <tim.muller@collabora.co.uk>");
+}
+
+static void
+gst_sbc_parse_reset (GstSbcParse * sbcparse)
+{
+ sbcparse->alloc_method = GST_SBC_ALLOCATION_METHOD_INVALID;
+ sbcparse->ch_mode = GST_SBC_CHANNEL_MODE_INVALID;
+ sbcparse->rate = -1;
+ sbcparse->n_blocks = -1;
+ sbcparse->n_subbands = -1;
+ sbcparse->bitpool = -1;
+ sbcparse->sent_codec_tag = FALSE;
+}
+
+static void
+gst_sbc_parse_init (GstSbcParse * sbcparse)
+{
+ gst_sbc_parse_reset (sbcparse);
+ GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (sbcparse));
+ GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_PARSE_SINK_PAD (sbcparse));
+}
+
+static gboolean
+gst_sbc_parse_start (GstBaseParse * parse)
+{
+ gst_base_parse_set_min_frame_size (parse,
+ gst_sbc_calc_framelen (4, GST_SBC_CHANNEL_MODE_MONO, 4, 2));
+
+ gst_base_parse_set_has_timing_info (parse, FALSE);
+
+ gst_base_parse_set_syncable (parse, TRUE);
+
+ return TRUE;
+}
+
+static gboolean
+gst_sbc_parse_stop (GstBaseParse * parse)
+{
+ gst_sbc_parse_reset (GST_SBC_PARSE (parse));
+ return TRUE;
+}
+
+static const gchar *
+gst_sbc_channel_mode_get_name (GstSbcChannelMode ch_mode)
+{
+ switch (ch_mode) {
+ case GST_SBC_CHANNEL_MODE_MONO:
+ return "mono";
+ case GST_SBC_CHANNEL_MODE_DUAL:
+ return "dual";
+ case GST_SBC_CHANNEL_MODE_STEREO:
+ return "stereo";
+ case GST_SBC_CHANNEL_MODE_JOINT_STEREO:
+ return "joint";
+ default:
+ break;
+ }
+ return "invalid";
+}
+
+static const gchar *
+gst_sbc_allocation_method_get_name (GstSbcAllocationMethod alloc_method)
+{
+ switch (alloc_method) {
+ case GST_SBC_ALLOCATION_METHOD_SNR:
+ return "snr";
+ case GST_SBC_ALLOCATION_METHOD_LOUDNESS:
+ return "loudness";
+ default:
+ break;
+ }
+ return "invalid";
+}
+
+static GstFlowReturn
+gst_sbc_parse_handle_frame (GstBaseParse * parse, GstBaseParseFrame * frame,
+ gint * skipsize)
+{
+ GstSbcParse *sbcparse = GST_SBC_PARSE (parse);
+ GstSbcAllocationMethod alloc_method = GST_SBC_ALLOCATION_METHOD_INVALID;
+ GstSbcChannelMode ch_mode = GST_SBC_CHANNEL_MODE_INVALID;
+ GstMapInfo map;
+ guint rate = 0, n_blocks = 0, n_subbands = 0, bitpool = 0;
+ gsize frame_len, next_len;
+ gint i, max_frames;
+
+ gst_buffer_map (frame->buffer, &map, GST_MAP_READ);
+
+ g_assert (map.size >= 6);
+
+ frame_len = gst_sbc_parse_header (map.data, &rate, &n_blocks, &ch_mode,
+ &alloc_method, &n_subbands, &bitpool);
+
+ GST_LOG_OBJECT (parse, "frame_len: %u", (guint) frame_len);
+
+ if (frame_len == 0)
+ goto resync;
+
+ if (sbcparse->alloc_method != alloc_method
+ || sbcparse->ch_mode != ch_mode
+ || sbcparse->rate != rate
+ || sbcparse->n_blocks != n_blocks
+ || sbcparse->n_subbands != n_subbands || sbcparse->bitpool != bitpool) {
+ guint avg_bitrate;
+ GstCaps *caps;
+
+ /* FIXME: do all of these need to be in the caps? */
+ caps = gst_caps_new_simple ("audio/x-sbc", "rate", G_TYPE_INT, rate,
+ "channels", G_TYPE_INT, (ch_mode == GST_SBC_CHANNEL_MODE_MONO) ? 1 : 2,
+ "channel-mode", G_TYPE_STRING, gst_sbc_channel_mode_get_name (ch_mode),
+ "blocks", G_TYPE_INT, n_blocks, "subbands", G_TYPE_INT, n_subbands,
+ "allocation-method", G_TYPE_STRING,
+ gst_sbc_allocation_method_get_name (alloc_method),
+ "bitpool", G_TYPE_INT, bitpool, "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
+
+ GST_INFO_OBJECT (sbcparse, "caps changed to %" GST_PTR_FORMAT, caps);
+
+ gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (sbcparse),
+ gst_event_new_caps (caps));
+
+ avg_bitrate = (8 * frame_len * rate) / (n_subbands * n_blocks);
+ gst_base_parse_set_average_bitrate (parse, avg_bitrate);
+
+ gst_base_parse_set_frame_rate (parse, rate, n_subbands * n_blocks, 0, 0);
+
+ sbcparse->alloc_method = alloc_method;
+ sbcparse->ch_mode = ch_mode;
+ sbcparse->rate = rate;
+ sbcparse->n_blocks = n_blocks;
+ sbcparse->n_subbands = n_subbands;
+ sbcparse->bitpool = bitpool;
+
+ gst_caps_unref (caps);
+ }
+
+ if (frame_len > map.size)
+ goto need_more_data;
+
+ GST_BUFFER_OFFSET (frame->buffer) = GST_BUFFER_OFFSET_NONE;
+ GST_BUFFER_OFFSET_END (frame->buffer) = GST_BUFFER_OFFSET_NONE;
+
+ /* completely arbitrary limit, we only process data we already have,
+ * so we aren't introducing latency here */
+ max_frames = MIN (map.size / frame_len, n_blocks * n_subbands * 5);
+ GST_LOG_OBJECT (sbcparse, "parsing up to %d frames", max_frames);
+
+ for (i = 1; i < max_frames; ++i) {
+ next_len = gst_sbc_parse_header (map.data + (i * frame_len), &rate,
+ &n_blocks, &ch_mode, &alloc_method, &n_subbands, &bitpool);
+
+ if (next_len != frame_len || sbcparse->alloc_method != alloc_method ||
+ sbcparse->ch_mode != ch_mode || sbcparse->rate != rate ||
+ sbcparse->n_blocks != n_blocks || sbcparse->n_subbands != n_subbands ||
+ sbcparse->bitpool != bitpool) {
+ break;
+ }
+ }
+ GST_LOG_OBJECT (sbcparse, "packing %d SBC frames into next output buffer", i);
+
+ /* Note: local n_subbands and n_blocks variables might be tainted if we
+ * bailed out of the loop above because of a header configuration mismatch */
+ gst_base_parse_set_frame_rate (parse, rate,
+ sbcparse->n_subbands * sbcparse->n_blocks * i, 0, 0);
+
+ gst_buffer_unmap (frame->buffer, &map);
+ return gst_base_parse_finish_frame (parse, frame, i * frame_len);
+
+resync:
+ {
+ const guint8 *possible_sync;
+
+ GST_DEBUG_OBJECT (parse, "no sync, resyncing");
+
+ possible_sync = memchr (map.data, SBC_SYNCBYTE, map.size);
+
+ if (possible_sync != NULL)
+ *skipsize = (gint) (possible_sync - map.data);
+ else
+ *skipsize = map.size;
+
+ gst_buffer_unmap (frame->buffer, &map);
+
+ /* we could optimise things here by looping over the data and checking
+ * whether the sync is good or not instead of handing control back to
+ * the base class just to be called again */
+ return GST_FLOW_OK;
+ }
+need_more_data:
+ {
+ GST_LOG_OBJECT (parse,
+ "need %" G_GSIZE_FORMAT " bytes, but only have %" G_GSIZE_FORMAT,
+ frame_len, map.size);
+ gst_base_parse_set_min_frame_size (parse, frame_len);
+ gst_buffer_unmap (frame->buffer, &map);
+ return GST_FLOW_OK;
+ }
+}
+
+static void
+remove_fields (GstCaps * caps)
+{
+ guint i, n;
+
+ n = gst_caps_get_size (caps);
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (caps, i);
+
+ gst_structure_remove_field (s, "parsed");
+ }
+}
+
+static GstCaps *
+gst_sbc_parse_get_sink_caps (GstBaseParse * parse, GstCaps * filter)
+{
+ GstCaps *peercaps, *templ;
+ GstCaps *res;
+
+ templ = gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD (parse));
+ if (filter) {
+ GstCaps *fcopy = gst_caps_copy (filter);
+ /* Remove the fields we convert */
+ remove_fields (fcopy);
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), fcopy);
+ gst_caps_unref (fcopy);
+ } else
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), NULL);
+
+ if (peercaps) {
+ /* Remove the parsed field */
+ peercaps = gst_caps_make_writable (peercaps);
+ remove_fields (peercaps);
+
+ res = gst_caps_intersect_full (peercaps, templ, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (peercaps);
+ gst_caps_unref (templ);
+ } else {
+ res = templ;
+ }
+
+ if (filter) {
+ GstCaps *intersection;
+
+ intersection =
+ gst_caps_intersect_full (filter, res, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (res);
+ res = intersection;
+ }
+
+ return res;
+}
+
+static const guint8 crc_table[256] = {
+ 0x00, 0x1D, 0x3A, 0x27, 0x74, 0x69, 0x4E, 0x53,
+ 0xE8, 0xF5, 0xD2, 0xCF, 0x9C, 0x81, 0xA6, 0xBB,
+ 0xCD, 0xD0, 0xF7, 0xEA, 0xB9, 0xA4, 0x83, 0x9E,
+ 0x25, 0x38, 0x1F, 0x02, 0x51, 0x4C, 0x6B, 0x76,
+ 0x87, 0x9A, 0xBD, 0xA0, 0xF3, 0xEE, 0xC9, 0xD4,
+ 0x6F, 0x72, 0x55, 0x48, 0x1B, 0x06, 0x21, 0x3C,
+ 0x4A, 0x57, 0x70, 0x6D, 0x3E, 0x23, 0x04, 0x19,
+ 0xA2, 0xBF, 0x98, 0x85, 0xD6, 0xCB, 0xEC, 0xF1,
+ 0x13, 0x0E, 0x29, 0x34, 0x67, 0x7A, 0x5D, 0x40,
+ 0xFB, 0xE6, 0xC1, 0xDC, 0x8F, 0x92, 0xB5, 0xA8,
+ 0xDE, 0xC3, 0xE4, 0xF9, 0xAA, 0xB7, 0x90, 0x8D,
+ 0x36, 0x2B, 0x0C, 0x11, 0x42, 0x5F, 0x78, 0x65,
+ 0x94, 0x89, 0xAE, 0xB3, 0xE0, 0xFD, 0xDA, 0xC7,
+ 0x7C, 0x61, 0x46, 0x5B, 0x08, 0x15, 0x32, 0x2F,
+ 0x59, 0x44, 0x63, 0x7E, 0x2D, 0x30, 0x17, 0x0A,
+ 0xB1, 0xAC, 0x8B, 0x96, 0xC5, 0xD8, 0xFF, 0xE2,
+ 0x26, 0x3B, 0x1C, 0x01, 0x52, 0x4F, 0x68, 0x75,
+ 0xCE, 0xD3, 0xF4, 0xE9, 0xBA, 0xA7, 0x80, 0x9D,
+ 0xEB, 0xF6, 0xD1, 0xCC, 0x9F, 0x82, 0xA5, 0xB8,
+ 0x03, 0x1E, 0x39, 0x24, 0x77, 0x6A, 0x4D, 0x50,
+ 0xA1, 0xBC, 0x9B, 0x86, 0xD5, 0xC8, 0xEF, 0xF2,
+ 0x49, 0x54, 0x73, 0x6E, 0x3D, 0x20, 0x07, 0x1A,
+ 0x6C, 0x71, 0x56, 0x4B, 0x18, 0x05, 0x22, 0x3F,
+ 0x84, 0x99, 0xBE, 0xA3, 0xF0, 0xED, 0xCA, 0xD7,
+ 0x35, 0x28, 0x0F, 0x12, 0x41, 0x5C, 0x7B, 0x66,
+ 0xDD, 0xC0, 0xE7, 0xFA, 0xA9, 0xB4, 0x93, 0x8E,
+ 0xF8, 0xE5, 0xC2, 0xDF, 0x8C, 0x91, 0xB6, 0xAB,
+ 0x10, 0x0D, 0x2A, 0x37, 0x64, 0x79, 0x5E, 0x43,
+ 0xB2, 0xAF, 0x88, 0x95, 0xC6, 0xDB, 0xFC, 0xE1,
+ 0x5A, 0x47, 0x60, 0x7D, 0x2E, 0x33, 0x14, 0x09,
+ 0x7F, 0x62, 0x45, 0x58, 0x0B, 0x16, 0x31, 0x2C,
+ 0x97, 0x8A, 0xAD, 0xB0, 0xE3, 0xFE, 0xD9, 0xC4
+};
+
+static guint8
+gst_sbc_calculate_crc8 (const guint8 * data, gint crc_bits)
+{
+ guint8 crc = 0x0f;
+ guint8 octet;
+
+ while (crc_bits >= 8) {
+ crc = crc_table[crc ^ *data];
+ crc_bits -= 8;
+ ++data;
+ }
+
+ octet = *data;
+ while (crc_bits > 0) {
+ gchar bit = ((octet ^ crc) & 0x80) >> 7;
+
+ crc = ((crc & 0x7f) << 1) ^ (bit ? 0x1d : 0);
+
+ octet = octet << 1;
+ --crc_bits;
+ }
+
+ return crc;
+}
+
+static gsize
+gst_sbc_calc_framelen (guint subbands, GstSbcChannelMode ch_mode,
+ guint blocks, guint bitpool)
+{
+ switch (ch_mode) {
+ case GST_SBC_CHANNEL_MODE_MONO:
+ return 4 + (subbands * 1) / 2 + ((blocks * 1 * bitpool) + 7) / 8;
+ case GST_SBC_CHANNEL_MODE_DUAL:
+ return 4 + (subbands * 2) / 2 + ((blocks * 2 * bitpool) + 7) / 8;
+ case GST_SBC_CHANNEL_MODE_STEREO:
+ return 4 + (subbands * 2) / 2 + ((blocks * bitpool) + 7) / 8;
+ case GST_SBC_CHANNEL_MODE_JOINT_STEREO:
+ return 4 + (subbands * 2) / 2 + ((subbands + blocks * bitpool) + 7) / 8;
+ default:
+ break;
+ }
+
+ g_return_val_if_reached (0);
+}
+
+static gsize
+gst_sbc_parse_header (const guint8 * data, guint * rate, guint * n_blocks,
+ GstSbcChannelMode * ch_mode, GstSbcAllocationMethod * alloc_method,
+ guint * n_subbands, guint * bitpool)
+{
+ static const guint16 sbc_rates[4] = { 16000, 32000, 44100, 48000 };
+ static const guint8 sbc_blocks[4] = { 4, 8, 12, 16 };
+ guint8 crc_data[2 + 1 + 8], crc_bits, i;
+
+ GST_MEMDUMP ("header", data, 8);
+
+ if (data[0] != SBC_SYNCBYTE)
+ return 0;
+
+ *rate = sbc_rates[(data[1] >> 6) & 0x03];
+ *n_blocks = sbc_blocks[(data[1] >> 4) & 0x03];
+ *ch_mode = (GstSbcChannelMode) ((data[1] >> 2) & 0x03);
+ *alloc_method = (data[1] >> 1) & 0x01;
+ *n_subbands = (data[1] & 0x01) ? 8 : 4;
+ *bitpool = data[2];
+
+ GST_TRACE ("rate=%u, n_blocks=%u, ch_mode=%u, alloc_method=%u, "
+ "n_subbands=%u, bitpool=%u", *rate, *n_blocks, *ch_mode, *alloc_method,
+ *n_subbands, *bitpool);
+
+ if (*bitpool < 2)
+ return 0;
+
+ /* check CRC */
+ crc_data[0] = data[1];
+ crc_data[1] = data[2];
+ crc_bits = 16;
+
+ /* joint flags and RFA */
+ if (*ch_mode == GST_SBC_CHANNEL_MODE_JOINT_STEREO)
+ crc_bits += *n_subbands;
+
+ /* scale factors */
+ if (*ch_mode == GST_SBC_CHANNEL_MODE_MONO)
+ crc_bits += *n_subbands * 1 * 4;
+ else
+ crc_bits += *n_subbands * 2 * 4;
+
+ for (i = 16; i < crc_bits; i += 8) {
+ crc_data[i / 8] = data[1 + (i / 8) + 1];
+ }
+
+ if (i > crc_bits) {
+ crc_data[(i / 8) - 1] &= 0xF0;
+ }
+
+ GST_MEMDUMP ("crc bytes", crc_data, GST_ROUND_UP_8 (crc_bits) / 8);
+ if (gst_sbc_calculate_crc8 (crc_data, crc_bits) != data[3]) {
+ GST_LOG ("header CRC check failed, bits=%u, got 0x%02x, expected 0x%02x",
+ crc_bits, gst_sbc_calculate_crc8 (crc_data, crc_bits), data[3]);
+ return 0;
+ }
+
+ return gst_sbc_calc_framelen (*n_subbands, *ch_mode, *n_blocks, *bitpool);
+}
+
+static GstFlowReturn
+gst_sbc_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
+{
+ GstSbcParse *sbcparse = GST_SBC_PARSE (parse);
+
+ if (!sbcparse->sent_codec_tag) {
+ GstTagList *taglist;
+ GstCaps *caps;
+
+ /* codec tag */
+ caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (parse));
+ if (G_UNLIKELY (caps == NULL)) {
+ if (GST_PAD_IS_FLUSHING (GST_BASE_PARSE_SRC_PAD (parse))) {
+ GST_INFO_OBJECT (parse, "Src pad is flushing");
+ return GST_FLOW_FLUSHING;
+ } else {
+ GST_INFO_OBJECT (parse, "Src pad is not negotiated!");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+ }
+
+ taglist = gst_tag_list_new_empty ();
+ gst_pb_utils_add_codec_description_to_tag_list (taglist,
+ GST_TAG_AUDIO_CODEC, caps);
+ gst_caps_unref (caps);
+
+ gst_base_parse_merge_tags (parse, taglist, GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (taglist);
+
+ /* also signals the end of first-frame processing */
+ sbcparse->sent_codec_tag = TRUE;
+ }
+
+ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_CLIP;
+
+ return GST_FLOW_OK;
+}
diff --git a/gst/audioparsers/gstsbcparse.h b/gst/audioparsers/gstsbcparse.h
new file mode 100644
index 0000000000..6d204bf14f
--- /dev/null
+++ b/gst/audioparsers/gstsbcparse.h
@@ -0,0 +1,76 @@
+/* GStreamer SBC audio parser
+ * Copyright (C) 2012 Collabora Ltd. <tim.muller@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_SBC_PARSE_H_INCLUDED__
+#define __GST_SBC_PARSE_H_INCLUDED__
+
+
+#include <gst/gst.h>
+#include <gst/base/gstbaseparse.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_SBC_PARSE (gst_sbc_parse_get_type())
+#define GST_SBC_PARSE(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_SBC_PARSE,GstSbcParse))
+#define GST_SBC_PARSE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_SBC_PARSE,GstSbcParseClass))
+#define GST_SBC_PARSE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_SBC_PARSE,GstSbcParseClass))
+#define GST_IS_SBC_PARSE(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_SBC_PARSE))
+#define GST_IS_SBC_PARSE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_SBC_PARSE))
+#define GST_SBC_PARSE_CAST(obj) ((GstSbcParse *)(obj))
+
+typedef enum {
+ GST_SBC_CHANNEL_MODE_INVALID = -1,
+ GST_SBC_CHANNEL_MODE_MONO = 0,
+ GST_SBC_CHANNEL_MODE_DUAL = 1,
+ GST_SBC_CHANNEL_MODE_STEREO = 2,
+ GST_SBC_CHANNEL_MODE_JOINT_STEREO = 3
+} GstSbcChannelMode;
+
+typedef enum {
+ GST_SBC_ALLOCATION_METHOD_INVALID = -1,
+ GST_SBC_ALLOCATION_METHOD_LOUDNESS = 0,
+ GST_SBC_ALLOCATION_METHOD_SNR = 1
+} GstSbcAllocationMethod;
+
+typedef struct _GstSbcParse GstSbcParse;
+typedef struct _GstSbcParseClass GstSbcParseClass;
+
+struct _GstSbcParse {
+ GstBaseParse baseparse;
+
+ /* current output format */
+ GstSbcAllocationMethod alloc_method;
+ GstSbcChannelMode ch_mode;
+ gint rate;
+ gint n_blocks;
+ gint n_subbands;
+ gint bitpool;
+
+ gboolean sent_codec_tag;
+};
+
+struct _GstSbcParseClass {
+ GstBaseParseClass baseparse_class;
+};
+
+GType gst_sbc_parse_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_SBC_PARSE_H_INCLUDED__ */
diff --git a/gst/audioparsers/gstwavpackparse.c b/gst/audioparsers/gstwavpackparse.c
new file mode 100644
index 0000000000..16b9b4e58e
--- /dev/null
+++ b/gst/audioparsers/gstwavpackparse.c
@@ -0,0 +1,713 @@
+/* GStreamer Wavpack parser
+ * Copyright (C) 2012 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>
+ * Copyright (C) 2012 Nokia Corporation. All rights reserved.
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-wavpackparse
+ * @title: wavpackparse
+ * @short_description: Wavpack parser
+ * @see_also: #GstAmrParse, #GstAACParse
+ *
+ * This is an Wavpack parser.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=abc.wavpack ! wavpackparse ! wavpackdec ! audioresample ! audioconvert ! autoaudiosink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+
+#include "gstaudioparserselements.h"
+#include "gstwavpackparse.h"
+
+#include <gst/base/base.h>
+#include <gst/pbutils/pbutils.h>
+#include <gst/audio/audio.h>
+
+GST_DEBUG_CATEGORY_STATIC (wavpack_parse_debug);
+#define GST_CAT_DEFAULT wavpack_parse_debug
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-wavpack, "
+ "depth = (int) [ 1, 32 ], "
+ "channels = (int) [ 1, 8 ], "
+ "rate = (int) [ 6000, 192000 ], " "framed = (boolean) TRUE; "
+ "audio/x-wavpack-correction, " "framed = (boolean) TRUE")
+ );
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-wavpack; audio/x-wavpack-correction"));
+
+static void gst_wavpack_parse_finalize (GObject * object);
+
+static gboolean gst_wavpack_parse_start (GstBaseParse * parse);
+static gboolean gst_wavpack_parse_stop (GstBaseParse * parse);
+static GstFlowReturn gst_wavpack_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize);
+static GstCaps *gst_wavpack_parse_get_sink_caps (GstBaseParse * parse,
+ GstCaps * filter);
+static GstFlowReturn gst_wavpack_parse_pre_push_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame);
+
+#define gst_wavpack_parse_parent_class parent_class
+G_DEFINE_TYPE (GstWavpackParse, gst_wavpack_parse, GST_TYPE_BASE_PARSE);
+GST_ELEMENT_REGISTER_DEFINE (wavpackparse, "wavpackparse",
+ GST_RANK_PRIMARY + 1, GST_TYPE_WAVPACK_PARSE);
+
+static void
+gst_wavpack_parse_class_init (GstWavpackParseClass * klass)
+{
+ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (wavpack_parse_debug, "wavpackparse", 0,
+ "Wavpack audio stream parser");
+
+ object_class->finalize = gst_wavpack_parse_finalize;
+
+ parse_class->start = GST_DEBUG_FUNCPTR (gst_wavpack_parse_start);
+ parse_class->stop = GST_DEBUG_FUNCPTR (gst_wavpack_parse_stop);
+ parse_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_wavpack_parse_handle_frame);
+ parse_class->get_sink_caps =
+ GST_DEBUG_FUNCPTR (gst_wavpack_parse_get_sink_caps);
+ parse_class->pre_push_frame =
+ GST_DEBUG_FUNCPTR (gst_wavpack_parse_pre_push_frame);
+
+ gst_element_class_add_static_pad_template (element_class, &sink_template);
+ gst_element_class_add_static_pad_template (element_class, &src_template);
+
+ gst_element_class_set_static_metadata (element_class,
+ "Wavpack audio stream parser", "Codec/Parser/Audio",
+ "Wavpack parser", "Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
+}
+
+static void
+gst_wavpack_parse_reset (GstWavpackParse * wvparse)
+{
+ wvparse->channels = -1;
+ wvparse->channel_mask = 0;
+ wvparse->sample_rate = -1;
+ wvparse->width = -1;
+ wvparse->total_samples = 0;
+ wvparse->sent_codec_tag = FALSE;
+}
+
+static void
+gst_wavpack_parse_init (GstWavpackParse * wvparse)
+{
+ gst_wavpack_parse_reset (wvparse);
+ GST_PAD_SET_ACCEPT_INTERSECT (GST_BASE_PARSE_SINK_PAD (wvparse));
+ GST_PAD_SET_ACCEPT_TEMPLATE (GST_BASE_PARSE_SINK_PAD (wvparse));
+}
+
+static void
+gst_wavpack_parse_finalize (GObject * object)
+{
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_wavpack_parse_start (GstBaseParse * parse)
+{
+ GstWavpackParse *wvparse = GST_WAVPACK_PARSE (parse);
+
+ GST_DEBUG_OBJECT (parse, "starting");
+
+ gst_wavpack_parse_reset (wvparse);
+
+ /* need header at least */
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (wvparse),
+ sizeof (WavpackHeader));
+
+ /* inform baseclass we can come up with ts, based on counters in packets */
+ gst_base_parse_set_has_timing_info (GST_BASE_PARSE_CAST (wvparse), TRUE);
+ gst_base_parse_set_syncable (GST_BASE_PARSE_CAST (wvparse), TRUE);
+
+ return TRUE;
+}
+
+static gboolean
+gst_wavpack_parse_stop (GstBaseParse * parse)
+{
+ GST_DEBUG_OBJECT (parse, "stopping");
+
+ return TRUE;
+}
+
+static gint
+gst_wavpack_get_default_channel_mask (gint nchannels)
+{
+ gint channel_mask = 0;
+
+ /* Set the default channel mask for the given number of channels.
+ * It's the same as for WAVE_FORMAT_EXTENDED:
+ * http://www.microsoft.com/whdc/device/audio/multichaud.mspx
+ */
+ switch (nchannels) {
+ case 11:
+ channel_mask |= 0x00400;
+ channel_mask |= 0x00200;
+ case 9:
+ channel_mask |= 0x00100;
+ case 8:
+ channel_mask |= 0x00080;
+ channel_mask |= 0x00040;
+ case 6:
+ channel_mask |= 0x00020;
+ channel_mask |= 0x00010;
+ case 4:
+ channel_mask |= 0x00008;
+ case 3:
+ channel_mask |= 0x00004;
+ case 2:
+ channel_mask |= 0x00002;
+ channel_mask |= 0x00001;
+ break;
+ case 1:
+ /* For mono use front center */
+ channel_mask |= 0x00004;
+ break;
+ }
+
+ return channel_mask;
+}
+
+static const struct
+{
+ const guint32 ms_mask;
+ const GstAudioChannelPosition gst_pos;
+} layout_mapping[] = {
+ {
+ 0x00001, GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT}, {
+ 0x00002, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT}, {
+ 0x00004, GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER}, {
+ 0x00008, GST_AUDIO_CHANNEL_POSITION_LFE1}, {
+ 0x00010, GST_AUDIO_CHANNEL_POSITION_REAR_LEFT}, {
+ 0x00020, GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT}, {
+ 0x00040, GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER}, {
+ 0x00080, GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER}, {
+ 0x00100, GST_AUDIO_CHANNEL_POSITION_REAR_CENTER}, {
+ 0x00200, GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT}, {
+ 0x00400, GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT}, {
+ 0x00800, GST_AUDIO_CHANNEL_POSITION_TOP_CENTER}, {
+ 0x01000, GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_LEFT}, {
+ 0x02000, GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_CENTER}, {
+ 0x04000, GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_RIGHT}, {
+ 0x08000, GST_AUDIO_CHANNEL_POSITION_TOP_REAR_LEFT}, {
+ 0x10000, GST_AUDIO_CHANNEL_POSITION_TOP_REAR_CENTER}, {
+ 0x20000, GST_AUDIO_CHANNEL_POSITION_TOP_REAR_RIGHT}
+};
+
+#define MAX_CHANNEL_POSITIONS G_N_ELEMENTS (layout_mapping)
+
+static gboolean
+gst_wavpack_get_channel_positions (gint num_channels, gint layout,
+ GstAudioChannelPosition * pos)
+{
+ gint i, p;
+
+ if (num_channels == 1 && layout == 0x00004) {
+ pos[0] = GST_AUDIO_CHANNEL_POSITION_MONO;
+ return TRUE;
+ }
+
+ p = 0;
+ for (i = 0; i < MAX_CHANNEL_POSITIONS; ++i) {
+ if ((layout & layout_mapping[i].ms_mask) != 0) {
+ if (p >= num_channels) {
+ GST_WARNING ("More bits set in the channel layout map than there "
+ "are channels! Broken file");
+ return FALSE;
+ }
+ if (layout_mapping[i].gst_pos == GST_AUDIO_CHANNEL_POSITION_INVALID) {
+ GST_WARNING ("Unsupported channel position (mask 0x%08x) in channel "
+ "layout map - ignoring those channels", layout_mapping[i].ms_mask);
+ /* what to do? just ignore it and let downstream deal with a channel
+ * layout that has INVALID positions in it for now ... */
+ }
+ pos[p] = layout_mapping[i].gst_pos;
+ ++p;
+ }
+ }
+
+ if (p != num_channels) {
+ GST_WARNING ("Only %d bits set in the channel layout map, but there are "
+ "supposed to be %d channels! Broken file", p, num_channels);
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+static const guint32 sample_rates[] = {
+ 6000, 8000, 9600, 11025, 12000, 16000, 22050,
+ 24000, 32000, 44100, 48000, 64000, 88200, 96000, 192000
+};
+
+#define CHECK(call) { \
+ if (!call) \
+ goto read_failed; \
+}
+
+/* caller ensures properly sync'ed with enough data */
+static gboolean
+gst_wavpack_parse_frame_metadata (GstWavpackParse * parse, GstBuffer * buf,
+ gint skip, WavpackHeader * wph, WavpackInfo * wpi)
+{
+ GstByteReader br;
+ gint i;
+ GstMapInfo map;
+
+ g_return_val_if_fail (wph != NULL || wpi != NULL, FALSE);
+ g_return_val_if_fail (gst_buffer_get_size (buf) >=
+ skip + sizeof (WavpackHeader), FALSE);
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ gst_byte_reader_init (&br, map.data + skip, wph->ckSize + 8);
+ /* skip past header */
+ gst_byte_reader_skip_unchecked (&br, sizeof (WavpackHeader));
+
+ /* get some basics from header */
+ i = (wph->flags >> 23) & 0xF;
+ if (!wpi->rate)
+ wpi->rate = (i < G_N_ELEMENTS (sample_rates)) ? sample_rates[i] : 44100;
+ wpi->width = ((wph->flags & 0x3) + 1) * 8;
+ if (!wpi->channels)
+ wpi->channels = (wph->flags & 0x4) ? 1 : 2;
+ if (!wpi->channel_mask)
+ wpi->channel_mask = 5 - wpi->channels;
+
+ /* need to dig metadata blocks for some more */
+ while (gst_byte_reader_get_remaining (&br)) {
+ gint size = 0;
+ guint16 size2 = 0;
+ guint8 c, id;
+ const guint8 *data;
+ GstByteReader mbr;
+
+ CHECK (gst_byte_reader_get_uint8 (&br, &id));
+ CHECK (gst_byte_reader_get_uint8 (&br, &c));
+ if (id & ID_LARGE)
+ CHECK (gst_byte_reader_get_uint16_le (&br, &size2));
+ size = size2;
+ size <<= 8;
+ size += c;
+ size <<= 1;
+ if (id & ID_ODD_SIZE)
+ size--;
+
+ CHECK (gst_byte_reader_get_data (&br, size + (size & 1), &data));
+ gst_byte_reader_init (&mbr, data, size);
+
+ /* 0x1f is the metadata id mask and 0x20 flag is for later extensions
+ * that do not need to be handled by the decoder */
+ switch (id & 0x3f) {
+ case ID_WVC_BITSTREAM:
+ GST_LOG_OBJECT (parse, "correction bitstream");
+ wpi->correction = TRUE;
+ break;
+ case ID_WV_BITSTREAM:
+ case ID_WVX_BITSTREAM:
+ break;
+ case ID_SAMPLE_RATE:
+ if (size == 3) {
+ CHECK (gst_byte_reader_get_uint24_le (&mbr, &wpi->rate));
+ GST_LOG_OBJECT (parse, "updated with custom rate %d", wpi->rate);
+ } else {
+ GST_DEBUG_OBJECT (parse, "unexpected size for SAMPLE_RATE metadata");
+ }
+ break;
+ case ID_CHANNEL_INFO:
+ {
+ guint16 channels;
+ guint32 mask = 0;
+
+ if (size == 6) {
+ CHECK (gst_byte_reader_get_uint16_le (&mbr, &channels));
+ channels = channels & 0xFFF;
+ CHECK (gst_byte_reader_get_uint24_le (&mbr, &mask));
+ } else if (size) {
+ CHECK (gst_byte_reader_get_uint8 (&mbr, &c));
+ channels = c;
+ while (gst_byte_reader_get_uint8 (&mbr, &c))
+ mask |= (((guint32) c) << 8);
+ } else {
+ GST_DEBUG_OBJECT (parse, "unexpected size for CHANNEL_INFO metadata");
+ break;
+ }
+ wpi->channels = channels;
+ wpi->channel_mask = mask;
+ break;
+ }
+ default:
+ GST_LOG_OBJECT (parse, "unparsed ID 0x%x", id);
+ break;
+ }
+ }
+
+ gst_buffer_unmap (buf, &map);
+
+ return TRUE;
+
+ /* ERRORS */
+read_failed:
+ {
+ gst_buffer_unmap (buf, &map);
+ GST_DEBUG_OBJECT (parse, "short read while parsing metadata");
+ /* let's look the other way anyway */
+ return TRUE;
+ }
+}
+
+/* caller ensures properly sync'ed with enough data */
+static gboolean
+gst_wavpack_parse_frame_header (GstWavpackParse * parse, GstBuffer * buf,
+ gint skip, WavpackHeader * _wph)
+{
+ GstByteReader br;
+ WavpackHeader wph = { {0,}, 0, };
+ GstMapInfo map;
+ gboolean hdl = TRUE;
+
+ g_return_val_if_fail (gst_buffer_get_size (buf) >=
+ skip + sizeof (WavpackHeader), FALSE);
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ gst_byte_reader_init (&br, map.data, map.size);
+
+ /* marker */
+ gst_byte_reader_skip_unchecked (&br, skip + 4);
+
+ /* read */
+ hdl &= gst_byte_reader_get_uint32_le (&br, &wph.ckSize);
+ hdl &= gst_byte_reader_get_uint16_le (&br, &wph.version);
+ hdl &= gst_byte_reader_get_uint8 (&br, &wph.track_no);
+ hdl &= gst_byte_reader_get_uint8 (&br, &wph.index_no);
+ hdl &= gst_byte_reader_get_uint32_le (&br, &wph.total_samples);
+ hdl &= gst_byte_reader_get_uint32_le (&br, &wph.block_index);
+ hdl &= gst_byte_reader_get_uint32_le (&br, &wph.block_samples);
+ hdl &= gst_byte_reader_get_uint32_le (&br, &wph.flags);
+ hdl &= gst_byte_reader_get_uint32_le (&br, &wph.crc);
+
+ if (!hdl)
+ GST_WARNING_OBJECT (parse, "Error reading header");
+
+ /* dump */
+ GST_LOG_OBJECT (parse, "size %d", wph.ckSize);
+ GST_LOG_OBJECT (parse, "version 0x%x", wph.version);
+ GST_LOG_OBJECT (parse, "total samples %d", wph.total_samples);
+ GST_LOG_OBJECT (parse, "block index %d", wph.block_index);
+ GST_LOG_OBJECT (parse, "block samples %d", wph.block_samples);
+ GST_LOG_OBJECT (parse, "flags 0x%x", wph.flags);
+ GST_LOG_OBJECT (parse, "crc 0x%x", wph.flags);
+
+ if (!parse->total_samples && wph.block_index == 0 && wph.total_samples != -1) {
+ GST_DEBUG_OBJECT (parse, "determined duration of %u samples",
+ wph.total_samples);
+ parse->total_samples = wph.total_samples;
+ }
+
+ if (_wph)
+ *_wph = wph;
+
+ gst_buffer_unmap (buf, &map);
+
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_wavpack_parse_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize)
+{
+ GstWavpackParse *wvparse = GST_WAVPACK_PARSE (parse);
+ GstBuffer *buf = frame->buffer;
+ GstByteReader reader;
+ gint off;
+ guint rate, chans, width, mask;
+ gboolean lost_sync, draining, final;
+ guint frmsize = 0;
+ WavpackHeader wph;
+ WavpackInfo wpi = { 0, };
+ GstMapInfo map;
+
+ if (G_UNLIKELY (gst_buffer_get_size (buf) < sizeof (WavpackHeader)))
+ return FALSE;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ gst_byte_reader_init (&reader, map.data, map.size);
+
+ /* scan for 'wvpk' marker */
+ off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff, 0x7776706b,
+ 0, map.size);
+
+ GST_LOG_OBJECT (parse, "possible sync at buffer offset %d", off);
+
+ /* didn't find anything that looks like a sync word, skip */
+ if (off < 0) {
+ *skipsize = map.size - 3;
+ goto skip;
+ }
+
+ /* possible frame header, but not at offset 0? skip bytes before sync */
+ if (off > 0) {
+ *skipsize = off;
+ goto skip;
+ }
+
+ /* make sure the values in the frame header look sane */
+ gst_wavpack_parse_frame_header (wvparse, buf, 0, &wph);
+ frmsize = wph.ckSize + 8;
+
+ /* need the entire frame for parsing */
+ if (gst_byte_reader_get_remaining (&reader) < frmsize)
+ goto more;
+
+ /* got a frame, now we can dig for some more metadata */
+ GST_LOG_OBJECT (parse, "got frame");
+ gst_wavpack_parse_frame_metadata (wvparse, buf, 0, &wph, &wpi);
+
+ lost_sync = GST_BASE_PARSE_LOST_SYNC (parse);
+ draining = GST_BASE_PARSE_DRAINING (parse);
+
+ while (!(final = (wph.flags & FLAG_FINAL_BLOCK)) || (lost_sync && !draining)) {
+ guint32 word = 0;
+
+ GST_LOG_OBJECT (wvparse, "checking next frame syncword; "
+ "lost_sync: %d, draining: %d, final: %d", lost_sync, draining, final);
+
+ if (!gst_byte_reader_skip (&reader, wph.ckSize + 8) ||
+ !gst_byte_reader_peek_uint32_be (&reader, &word)) {
+ GST_DEBUG_OBJECT (wvparse, "... but not sufficient data");
+ frmsize += 4;
+ goto more;
+ } else {
+ if (word != 0x7776706b) {
+ GST_DEBUG_OBJECT (wvparse, "0x%x not OK", word);
+ *skipsize = off + 2;
+ goto skip;
+ }
+ /* need to parse each frame/block for metadata if several ones */
+ if (!final) {
+ gint av;
+
+ GST_LOG_OBJECT (wvparse, "checking frame at offset %d (0x%x)",
+ frmsize, frmsize);
+ av = gst_byte_reader_get_remaining (&reader);
+ if (av < sizeof (WavpackHeader)) {
+ frmsize += sizeof (WavpackHeader);
+ goto more;
+ }
+ gst_wavpack_parse_frame_header (wvparse, buf, frmsize, &wph);
+ off = frmsize;
+ frmsize += wph.ckSize + 8;
+ if (av < wph.ckSize + 8)
+ goto more;
+ gst_wavpack_parse_frame_metadata (wvparse, buf, off, &wph, &wpi);
+ /* could also check for matching block_index and block_samples ?? */
+ }
+ }
+
+ /* resynced if we make it here */
+ lost_sync = FALSE;
+ }
+
+ rate = wpi.rate;
+ width = wpi.width;
+ chans = wpi.channels;
+ mask = wpi.channel_mask;
+
+ GST_LOG_OBJECT (parse, "rate: %u, width: %u, chans: %u", rate, width, chans);
+
+ GST_BUFFER_PTS (buf) =
+ gst_util_uint64_scale_int (wph.block_index, GST_SECOND, rate);
+ GST_BUFFER_DTS (buf) = GST_BUFFER_PTS (buf);
+ GST_BUFFER_DURATION (buf) =
+ gst_util_uint64_scale_int (wph.block_index + wph.block_samples,
+ GST_SECOND, rate) - GST_BUFFER_PTS (buf);
+
+ if (G_UNLIKELY (wvparse->sample_rate != rate || wvparse->channels != chans
+ || wvparse->width != width || wvparse->channel_mask != mask)) {
+ GstCaps *caps;
+
+ if (wpi.correction) {
+ caps = gst_caps_new_simple ("audio/x-wavpack-correction",
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ } else {
+ caps = gst_caps_new_simple ("audio/x-wavpack",
+ "channels", G_TYPE_INT, chans,
+ "rate", G_TYPE_INT, rate,
+ "depth", G_TYPE_INT, width, "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+
+ if (!mask)
+ mask = gst_wavpack_get_default_channel_mask (wvparse->channels);
+ if (mask != 0) {
+ GstAudioChannelPosition pos[64] =
+ { GST_AUDIO_CHANNEL_POSITION_INVALID, };
+ guint64 gmask;
+
+ if (!gst_wavpack_get_channel_positions (chans, mask, pos)) {
+ GST_WARNING_OBJECT (wvparse, "Failed to determine channel layout");
+ } else {
+ gst_audio_channel_positions_to_mask (pos, chans, FALSE, &gmask);
+ if (gmask)
+ gst_caps_set_simple (caps,
+ "channel-mask", GST_TYPE_BITMASK, gmask, NULL);
+ }
+ }
+ }
+
+ gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps);
+ gst_caps_unref (caps);
+
+ wvparse->sample_rate = rate;
+ wvparse->channels = chans;
+ wvparse->width = width;
+ wvparse->channel_mask = mask;
+
+ if (wvparse->total_samples) {
+ GST_DEBUG_OBJECT (wvparse, "setting duration");
+ gst_base_parse_set_duration (GST_BASE_PARSE (wvparse),
+ GST_FORMAT_TIME, gst_util_uint64_scale_int (wvparse->total_samples,
+ GST_SECOND, wvparse->sample_rate), 0);
+ }
+ }
+
+ /* return to normal size */
+ gst_base_parse_set_min_frame_size (parse, sizeof (WavpackHeader));
+ gst_buffer_unmap (buf, &map);
+
+ return gst_base_parse_finish_frame (parse, frame, frmsize);
+
+skip:
+ gst_buffer_unmap (buf, &map);
+ GST_LOG_OBJECT (wvparse, "skipping %d", *skipsize);
+ return GST_FLOW_OK;
+
+more:
+ gst_buffer_unmap (buf, &map);
+ GST_LOG_OBJECT (wvparse, "need at least %u", frmsize);
+ gst_base_parse_set_min_frame_size (parse, frmsize);
+ *skipsize = 0;
+ return GST_FLOW_OK;
+}
+
+static void
+remove_fields (GstCaps * caps)
+{
+ guint i, n;
+
+ n = gst_caps_get_size (caps);
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (caps, i);
+
+ gst_structure_remove_field (s, "framed");
+ }
+}
+
+static GstCaps *
+gst_wavpack_parse_get_sink_caps (GstBaseParse * parse, GstCaps * filter)
+{
+ GstCaps *peercaps, *templ;
+ GstCaps *res;
+
+ templ = gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD (parse));
+ if (filter) {
+ GstCaps *fcopy = gst_caps_copy (filter);
+ /* Remove the fields we convert */
+ remove_fields (fcopy);
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), fcopy);
+ gst_caps_unref (fcopy);
+ } else
+ peercaps = gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), NULL);
+
+ if (peercaps) {
+ /* Remove the framed field */
+ peercaps = gst_caps_make_writable (peercaps);
+ remove_fields (peercaps);
+
+ res = gst_caps_intersect_full (peercaps, templ, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (peercaps);
+ gst_caps_unref (templ);
+ } else {
+ res = templ;
+ }
+
+ if (filter) {
+ GstCaps *intersection;
+
+ intersection =
+ gst_caps_intersect_full (filter, res, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (res);
+ res = intersection;
+ }
+
+ return res;
+}
+
+static GstFlowReturn
+gst_wavpack_parse_pre_push_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame)
+{
+ GstWavpackParse *wavpackparse = GST_WAVPACK_PARSE (parse);
+
+ if (!wavpackparse->sent_codec_tag) {
+ GstTagList *taglist;
+ GstCaps *caps;
+
+ /* codec tag */
+ caps = gst_pad_get_current_caps (GST_BASE_PARSE_SRC_PAD (parse));
+ if (G_UNLIKELY (caps == NULL)) {
+ if (GST_PAD_IS_FLUSHING (GST_BASE_PARSE_SRC_PAD (parse))) {
+ GST_INFO_OBJECT (parse, "Src pad is flushing");
+ return GST_FLOW_FLUSHING;
+ } else {
+ GST_INFO_OBJECT (parse, "Src pad is not negotiated!");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+ }
+
+ taglist = gst_tag_list_new_empty ();
+ gst_pb_utils_add_codec_description_to_tag_list (taglist,
+ GST_TAG_AUDIO_CODEC, caps);
+ gst_caps_unref (caps);
+
+ gst_base_parse_merge_tags (parse, taglist, GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (taglist);
+
+ /* also signals the end of first-frame processing */
+ wavpackparse->sent_codec_tag = TRUE;
+ }
+
+ frame->flags |= GST_BASE_PARSE_FRAME_FLAG_CLIP;
+
+ return GST_FLOW_OK;
+}
diff --git a/gst/audioparsers/gstwavpackparse.h b/gst/audioparsers/gstwavpackparse.h
new file mode 100644
index 0000000000..30325aa26b
--- /dev/null
+++ b/gst/audioparsers/gstwavpackparse.h
@@ -0,0 +1,133 @@
+/* GStreamer Wavpack parser
+ * Copyright (C) 2012 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>
+ * Copyright (C) 2012 Nokia Corporation. All rights reserved.
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_WAVPACK_PARSE_H__
+#define __GST_WAVPACK_PARSE_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbaseparse.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_WAVPACK_PARSE \
+ (gst_wavpack_parse_get_type())
+#define GST_WAVPACK_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_WAVPACK_PARSE, GstWavpackParse))
+#define GST_WAVPACK_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_WAVPACK_PARSE, GstWavpackParseClass))
+#define GST_IS_WAVPACK_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_WAVPACK_PARSE))
+#define GST_IS_WAVPACK_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_WAVPACK_PARSE))
+
+
+#define ID_UNIQUE 0x3f
+#define ID_OPTIONAL_DATA 0x20
+#define ID_ODD_SIZE 0x40
+#define ID_LARGE 0x80
+
+#define ID_DUMMY 0x0
+#define ID_ENCODER_INFO 0x1
+#define ID_DECORR_TERMS 0x2
+#define ID_DECORR_WEIGHTS 0x3
+#define ID_DECORR_SAMPLES 0x4
+#define ID_ENTROPY_VARS 0x5
+#define ID_HYBRID_PROFILE 0x6
+#define ID_SHAPING_WEIGHTS 0x7
+#define ID_FLOAT_INFO 0x8
+#define ID_INT32_INFO 0x9
+#define ID_WV_BITSTREAM 0xa
+#define ID_WVC_BITSTREAM 0xb
+#define ID_WVX_BITSTREAM 0xc
+#define ID_CHANNEL_INFO 0xd
+
+#define ID_RIFF_HEADER (ID_OPTIONAL_DATA | 0x1)
+#define ID_RIFF_TRAILER (ID_OPTIONAL_DATA | 0x2)
+#define ID_REPLAY_GAIN (ID_OPTIONAL_DATA | 0x3)
+#define ID_CUESHEET (ID_OPTIONAL_DATA | 0x4)
+#define ID_CONFIG_BLOCK (ID_OPTIONAL_DATA | 0x5)
+#define ID_MD5_CHECKSUM (ID_OPTIONAL_DATA | 0x6)
+#define ID_SAMPLE_RATE (ID_OPTIONAL_DATA | 0x7)
+
+#define FLAG_FINAL_BLOCK (1 << 12)
+
+typedef struct {
+ char ckID [4]; /* "wvpk" */
+ guint32 ckSize; /* size of entire block (minus 8, of course) */
+ guint16 version; /* 0x402 to 0x410 are currently valid for decode */
+ guchar track_no; /* track number (0 if not used, like now) */
+ guchar index_no; /* track sub-index (0 if not used, like now) */
+ guint32 total_samples; /* total samples for entire file, but this is
+ * only valid if block_index == 0 and a value of
+ * -1 indicates unknown length */
+ guint32 block_index; /* index of first sample in block relative to
+ * beginning of file (normally this would start
+ * at 0 for the first block) */
+ guint32 block_samples; /* number of samples in this block (0 = no audio) */
+ guint32 flags; /* various flags for id and decoding */
+ guint32 crc; /* crc for actual decoded data */
+} WavpackHeader;
+
+typedef struct {
+ gboolean correction;
+ guint rate;
+ guint width;
+ guint channels;
+ guint channel_mask;
+} WavpackInfo;
+
+typedef struct _GstWavpackParse GstWavpackParse;
+typedef struct _GstWavpackParseClass GstWavpackParseClass;
+
+/**
+ * GstWavpackParse:
+ *
+ * The opaque GstWavpackParse object
+ */
+struct _GstWavpackParse {
+ GstBaseParse baseparse;
+
+ /*< private >*/
+ gint sample_rate;
+ gint channels;
+ gint width;
+ gint channel_mask;
+
+ guint total_samples;
+
+ gboolean sent_codec_tag;
+};
+
+/**
+ * GstWavpackParseClass:
+ * @parent_class: Element parent class.
+ *
+ * The opaque GstWavpackParseClass data structure.
+ */
+struct _GstWavpackParseClass {
+ GstBaseParseClass baseparse_class;
+};
+
+GType gst_wavpack_parse_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_WAVPACK_PARSE_H__ */
diff --git a/gst/audioparsers/meson.build b/gst/audioparsers/meson.build
new file mode 100644
index 0000000000..2bd5d245c3
--- /dev/null
+++ b/gst/audioparsers/meson.build
@@ -0,0 +1,23 @@
+audioparsers_src = [
+ 'gstaacparse.c',
+ 'gstamrparse.c',
+ 'gstac3parse.c',
+ 'gstdcaparse.c',
+ 'gstflacparse.c',
+ 'gstmpegaudioparse.c',
+ 'gstsbcparse.c',
+ 'gstwavpackparse.c',
+ 'plugin.c',
+]
+
+gstaudioparsers = library('gstaudioparsers',
+ audioparsers_src,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gst_dep, gstbase_dep, gstpbutils_dep,
+ gstaudio_dep, gsttag_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstaudioparsers, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstaudioparsers]
diff --git a/gst/audioparsers/plugin.c b/gst/audioparsers/plugin.c
new file mode 100644
index 0000000000..0126af6fe3
--- /dev/null
+++ b/gst/audioparsers/plugin.c
@@ -0,0 +1,48 @@
+/* GStreamer audio parsers
+ * Copyright (C) 2009 Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstaudioparserselements.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (aacparse, plugin);
+ ret |= GST_ELEMENT_REGISTER (amrparse, plugin);
+ ret |= GST_ELEMENT_REGISTER (ac3parse, plugin);
+ ret |= GST_ELEMENT_REGISTER (dcaparse, plugin);
+ ret |= GST_ELEMENT_REGISTER (flacparse, plugin);
+ ret |= GST_ELEMENT_REGISTER (mpegaudioparse, plugin);
+ ret |= GST_ELEMENT_REGISTER (sbcparse, plugin);
+ ret |= GST_ELEMENT_REGISTER (wavpackparse, plugin);
+
+ return ret;
+}
+
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ audioparsers,
+ "Parsers for various audio formats",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
diff --git a/gst/auparse/gstauparse.c b/gst/auparse/gstauparse.c
new file mode 100644
index 0000000000..83547be4a4
--- /dev/null
+++ b/gst/auparse/gstauparse.c
@@ -0,0 +1,812 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2006> Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-auparse
+ * @title: auparse
+ *
+ * Parses .au files mostly originating from sun os based computers.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <stdlib.h>
+#include <string.h>
+
+#include "gstauparse.h"
+#include <gst/audio/audio.h>
+
+GST_DEBUG_CATEGORY_STATIC (auparse_debug);
+#define GST_CAT_DEFAULT (auparse_debug)
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-au")
+ );
+
+#define GST_AU_PARSE_RAW_PAD_TEMPLATE_CAPS \
+ "audio/x-raw, " \
+ "format= (string) { S8, S16LE, S16BE, S24LE, S24BE, " \
+ "S32LE, S32BE, F32LE, F32BE, " \
+ "F64LE, F64BE }, " \
+ "rate = (int) [ 8000, 192000 ], " \
+ "channels = (int) 1, " \
+ "layout = (string) interleaved;" \
+ "audio/x-raw, " \
+ "format= (string) { S8, S16LE, S16BE, S24LE, S24BE, " \
+ "S32LE, S32BE, F32LE, F32BE, " \
+ "F64LE, F64BE }, " \
+ "rate = (int) [ 8000, 192000 ], " \
+ "channels = (int) 2, " \
+ "channel-mask = (bitmask) 0x3," \
+ "layout = (string) interleaved"
+
+#define GST_AU_PARSE_ALAW_PAD_TEMPLATE_CAPS \
+ "audio/x-alaw, " \
+ "rate = (int) [ 8000, 192000 ], " \
+ "channels = (int) [ 1, 2 ]"
+
+#define GST_AU_PARSE_MULAW_PAD_TEMPLATE_CAPS \
+ "audio/x-mulaw, " \
+ "rate = (int) [ 8000, 192000 ], " \
+ "channels = (int) [ 1, 2 ]"
+
+/* Nothing to decode those ADPCM streams for now */
+#define GST_AU_PARSE_ADPCM_PAD_TEMPLATE_CAPS \
+ "audio/x-adpcm, " \
+ "layout = (string) { g721, g722, g723_3, g723_5 }"
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_AU_PARSE_RAW_PAD_TEMPLATE_CAPS "; "
+ GST_AU_PARSE_ALAW_PAD_TEMPLATE_CAPS ";"
+ GST_AU_PARSE_MULAW_PAD_TEMPLATE_CAPS ";"
+ GST_AU_PARSE_ADPCM_PAD_TEMPLATE_CAPS));
+
+
+static void gst_au_parse_dispose (GObject * object);
+static GstFlowReturn gst_au_parse_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+static GstStateChangeReturn gst_au_parse_change_state (GstElement * element,
+ GstStateChange transition);
+static void gst_au_parse_reset (GstAuParse * auparse);
+static gboolean gst_au_parse_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+static gboolean gst_au_parse_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static gboolean gst_au_parse_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static gboolean gst_au_parse_src_convert (GstAuParse * auparse,
+ GstFormat src_format, gint64 srcval, GstFormat dest_format,
+ gint64 * destval);
+
+#define gst_au_parse_parent_class parent_class
+G_DEFINE_TYPE (GstAuParse, gst_au_parse, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE (auparse, "auparse", GST_RANK_SECONDARY,
+ GST_TYPE_AU_PARSE);
+
+static void
+gst_au_parse_class_init (GstAuParseClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ GST_DEBUG_CATEGORY_INIT (auparse_debug, "auparse", 0, ".au parser");
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->dispose = gst_au_parse_dispose;
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_au_parse_change_state);
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class, &src_template);
+ gst_element_class_set_static_metadata (gstelement_class,
+ "AU audio demuxer",
+ "Codec/Demuxer/Audio",
+ "Parse an .au file into raw audio",
+ "Erik Walthinsen <omega@cse.ogi.edu>");
+}
+
+static void
+gst_au_parse_init (GstAuParse * auparse)
+{
+ auparse->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink");
+ gst_pad_set_chain_function (auparse->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_au_parse_chain));
+ gst_pad_set_event_function (auparse->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_au_parse_sink_event));
+ gst_element_add_pad (GST_ELEMENT (auparse), auparse->sinkpad);
+
+ auparse->srcpad = gst_pad_new_from_static_template (&src_template, "src");
+ gst_pad_set_query_function (auparse->srcpad,
+ GST_DEBUG_FUNCPTR (gst_au_parse_src_query));
+ gst_pad_set_event_function (auparse->srcpad,
+ GST_DEBUG_FUNCPTR (gst_au_parse_src_event));
+ gst_pad_use_fixed_caps (auparse->srcpad);
+ gst_element_add_pad (GST_ELEMENT (auparse), auparse->srcpad);
+
+ auparse->adapter = gst_adapter_new ();
+ gst_au_parse_reset (auparse);
+}
+
+static void
+gst_au_parse_dispose (GObject * object)
+{
+ GstAuParse *au = GST_AU_PARSE (object);
+
+ if (au->adapter != NULL) {
+ g_object_unref (au->adapter);
+ au->adapter = NULL;
+ }
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static void
+gst_au_parse_reset (GstAuParse * auparse)
+{
+ auparse->offset = 0;
+ auparse->buffer_offset = 0;
+ auparse->encoding = 0;
+ auparse->samplerate = 0;
+ auparse->channels = 0;
+
+ gst_adapter_clear (auparse->adapter);
+
+ gst_caps_replace (&auparse->src_caps, NULL);
+
+ /* gst_segment_init (&auparse->segment, GST_FORMAT_TIME); */
+}
+
+static void
+gst_au_parse_negotiate_srcpad (GstAuParse * auparse, GstCaps * new_caps)
+{
+ if (auparse->src_caps && gst_caps_is_equal (new_caps, auparse->src_caps)) {
+ GST_LOG_OBJECT (auparse, "same caps, nothing to do");
+ return;
+ }
+
+ gst_caps_replace (&auparse->src_caps, new_caps);
+ GST_DEBUG_OBJECT (auparse, "Changing src pad caps to %" GST_PTR_FORMAT,
+ auparse->src_caps);
+ gst_pad_set_caps (auparse->srcpad, auparse->src_caps);
+
+ return;
+}
+
+static GstFlowReturn
+gst_au_parse_parse_header (GstAuParse * auparse)
+{
+ GstCaps *tempcaps;
+ guint32 size;
+ guint8 *head;
+ gchar layout[7] = { 0, };
+ GstAudioFormat format = GST_AUDIO_FORMAT_UNKNOWN;
+ gint law = 0;
+ guint endianness;
+
+ head = (guint8 *) gst_adapter_map (auparse->adapter, 24);
+ g_assert (head != NULL);
+
+ GST_DEBUG_OBJECT (auparse, "[%c%c%c%c]", head[0], head[1], head[2], head[3]);
+
+ switch (GST_READ_UINT32_BE (head)) {
+ /* normal format is big endian (au is a Sparc format) */
+ case 0x2e736e64:{ /* ".snd" */
+ endianness = G_BIG_ENDIAN;
+ break;
+ }
+ /* and of course, someone had to invent a little endian
+ * version. Used by DEC systems. */
+ case 0x646e732e: /* dns. */
+ case 0x0064732e:{ /* other source say it is "dns." */
+ endianness = G_LITTLE_ENDIAN;
+ break;
+ }
+ default:{
+ goto unknown_header;
+ }
+ }
+
+ auparse->offset = GST_READ_UINT32_BE (head + 4);
+ /* Do not trust size, could be set to -1 : unknown
+ * otherwise: filesize = size + auparse->offset
+ */
+ size = GST_READ_UINT32_BE (head + 8);
+ auparse->encoding = GST_READ_UINT32_BE (head + 12);
+ auparse->samplerate = GST_READ_UINT32_BE (head + 16);
+ auparse->channels = GST_READ_UINT32_BE (head + 20);
+
+ if (auparse->samplerate < 8000 || auparse->samplerate > 192000)
+ goto unsupported_sample_rate;
+
+ if (auparse->channels < 1 || auparse->channels > 2)
+ goto unsupported_number_of_channels;
+
+ GST_DEBUG_OBJECT (auparse, "offset %" G_GINT64_FORMAT ", size %u, "
+ "encoding %u, frequency %u, channels %u", auparse->offset, size,
+ auparse->encoding, auparse->samplerate, auparse->channels);
+
+ /* Docs:
+ * http://www.opengroup.org/public/pubs/external/auformat.html
+ * http://astronomy.swin.edu.au/~pbourke/dataformats/au/
+ * Solaris headers : /usr/include/audio/au.h
+ * libsndfile : src/au.c
+ *
+ * Samples :
+ * http://www.tsp.ece.mcgill.ca/MMSP/Documents/AudioFormats/AU/Samples.html
+ */
+
+ switch (auparse->encoding) {
+ case 1: /* 8-bit ISDN mu-law G.711 */
+ law = 1;
+ break;
+ case 27: /* 8-bit ISDN A-law G.711 */
+ law = 2;
+ break;
+
+ case 2: /* 8-bit linear PCM, FIXME signed? */
+ format = GST_AUDIO_FORMAT_S8;
+ auparse->sample_size = auparse->channels;
+ break;
+ case 3: /* 16-bit linear PCM */
+ if (endianness == G_LITTLE_ENDIAN)
+ format = GST_AUDIO_FORMAT_S16LE;
+ else
+ format = GST_AUDIO_FORMAT_S16BE;
+ auparse->sample_size = auparse->channels * 2;
+ break;
+ case 4: /* 24-bit linear PCM */
+ if (endianness == G_LITTLE_ENDIAN)
+ format = GST_AUDIO_FORMAT_S24LE;
+ else
+ format = GST_AUDIO_FORMAT_S24BE;
+ auparse->sample_size = auparse->channels * 3;
+ break;
+ case 5: /* 32-bit linear PCM */
+ if (endianness == G_LITTLE_ENDIAN)
+ format = GST_AUDIO_FORMAT_S32LE;
+ else
+ format = GST_AUDIO_FORMAT_S32BE;
+ auparse->sample_size = auparse->channels * 4;
+ break;
+
+ case 6: /* 32-bit IEEE floating point */
+ if (endianness == G_LITTLE_ENDIAN)
+ format = GST_AUDIO_FORMAT_F32LE;
+ else
+ format = GST_AUDIO_FORMAT_F32BE;
+ auparse->sample_size = auparse->channels * 4;
+ break;
+ case 7: /* 64-bit IEEE floating point */
+ if (endianness == G_LITTLE_ENDIAN)
+ format = GST_AUDIO_FORMAT_F64LE;
+ else
+ format = GST_AUDIO_FORMAT_F64BE;
+ auparse->sample_size = auparse->channels * 8;
+ break;
+
+ case 23: /* 4-bit CCITT G.721 ADPCM 32kbps -> modplug/libsndfile (compressed 8-bit mu-law) */
+ strcpy (layout, "g721");
+ break;
+ case 24: /* 8-bit CCITT G.722 ADPCM -> rtp */
+ strcpy (layout, "g722");
+ break;
+ case 25: /* 3-bit CCITT G.723.3 ADPCM 24kbps -> rtp/xine/modplug/libsndfile */
+ strcpy (layout, "g723_3");
+ break;
+ case 26: /* 5-bit CCITT G.723.5 ADPCM 40kbps -> rtp/xine/modplug/libsndfile */
+ strcpy (layout, "g723_5");
+ break;
+
+ case 8: /* Fragmented sample data */
+ case 9: /* AU_ENCODING_NESTED */
+
+ case 10: /* DSP program */
+ case 11: /* DSP 8-bit fixed point */
+ case 12: /* DSP 16-bit fixed point */
+ case 13: /* DSP 24-bit fixed point */
+ case 14: /* DSP 32-bit fixed point */
+
+ case 16: /* AU_ENCODING_DISPLAY : non-audio display data */
+ case 17: /* AU_ENCODING_MULAW_SQUELCH */
+
+ case 18: /* 16-bit linear with emphasis */
+ case 19: /* 16-bit linear compressed (NeXT) */
+ case 20: /* 16-bit linear with emphasis and compression */
+
+ case 21: /* Music kit DSP commands */
+ case 22: /* Music kit DSP commands samples */
+
+ default:
+ goto unknown_format;
+ }
+
+ if (law) {
+ tempcaps =
+ gst_caps_new_simple ((law == 1) ? "audio/x-mulaw" : "audio/x-alaw",
+ "rate", G_TYPE_INT, auparse->samplerate,
+ "channels", G_TYPE_INT, auparse->channels, NULL);
+ auparse->sample_size = auparse->channels;
+ } else if (format != GST_AUDIO_FORMAT_UNKNOWN) {
+ GstCaps *templ_caps = gst_pad_get_pad_template_caps (auparse->srcpad);
+ GstCaps *intersection;
+
+ tempcaps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, gst_audio_format_to_string (format),
+ "rate", G_TYPE_INT, auparse->samplerate,
+ "channels", G_TYPE_INT, auparse->channels, NULL);
+
+ intersection = gst_caps_intersect (tempcaps, templ_caps);
+ gst_caps_unref (tempcaps);
+ gst_caps_unref (templ_caps);
+ tempcaps = intersection;
+ } else if (layout[0]) {
+ tempcaps = gst_caps_new_simple ("audio/x-adpcm",
+ "layout", G_TYPE_STRING, layout, NULL);
+ auparse->sample_size = 0;
+ } else
+ goto unknown_format;
+
+ GST_DEBUG_OBJECT (auparse, "sample_size=%d", auparse->sample_size);
+
+ gst_au_parse_negotiate_srcpad (auparse, tempcaps);
+
+ GST_DEBUG_OBJECT (auparse, "offset=%" G_GINT64_FORMAT, auparse->offset);
+ gst_adapter_unmap (auparse->adapter);
+ gst_adapter_flush (auparse->adapter, auparse->offset);
+
+ gst_caps_unref (tempcaps);
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+unknown_header:
+ {
+ gst_adapter_unmap (auparse->adapter);
+ GST_ELEMENT_ERROR (auparse, STREAM, WRONG_TYPE, (NULL), (NULL));
+ return GST_FLOW_ERROR;
+ }
+unsupported_sample_rate:
+ {
+ gst_adapter_unmap (auparse->adapter);
+ GST_ELEMENT_ERROR (auparse, STREAM, FORMAT, (NULL),
+ ("Unsupported samplerate: %u", auparse->samplerate));
+ return GST_FLOW_ERROR;
+ }
+unsupported_number_of_channels:
+ {
+ gst_adapter_unmap (auparse->adapter);
+ GST_ELEMENT_ERROR (auparse, STREAM, FORMAT, (NULL),
+ ("Unsupported number of channels: %u", auparse->channels));
+ return GST_FLOW_ERROR;
+ }
+unknown_format:
+ {
+ gst_adapter_unmap (auparse->adapter);
+ GST_ELEMENT_ERROR (auparse, STREAM, FORMAT, (NULL),
+ ("Unsupported encoding: %u", auparse->encoding));
+ return GST_FLOW_ERROR;
+ }
+}
+
+#define AU_HEADER_SIZE 24
+
+static GstFlowReturn
+gst_au_parse_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstAuParse *auparse;
+ gint avail, sendnow = 0;
+ gint64 timestamp = 0;
+ gint64 duration = 0;
+ gint64 offset = 0;
+
+ auparse = GST_AU_PARSE (parent);
+
+ GST_LOG_OBJECT (auparse, "got buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (buf));
+
+ gst_adapter_push (auparse->adapter, buf);
+ buf = NULL;
+
+ /* if we haven't seen any data yet... */
+ if (!gst_pad_has_current_caps (auparse->srcpad)) {
+ if (gst_adapter_available (auparse->adapter) < AU_HEADER_SIZE) {
+ GST_DEBUG_OBJECT (auparse, "need more data to parse header");
+ ret = GST_FLOW_OK;
+ goto out;
+ }
+
+ ret = gst_au_parse_parse_header (auparse);
+ if (ret != GST_FLOW_OK)
+ goto out;
+
+ if (auparse->need_segment) {
+ gst_pad_push_event (auparse->srcpad,
+ gst_event_new_segment (&auparse->segment));
+ auparse->need_segment = FALSE;
+ }
+ }
+
+ avail = gst_adapter_available (auparse->adapter);
+
+ if (auparse->sample_size > 0) {
+ /* Ensure we push a buffer that's a multiple of the frame size downstream */
+ sendnow = avail - (avail % auparse->sample_size);
+ } else {
+ /* It's something non-trivial (such as ADPCM), we don't understand it, so
+ * just push downstream and assume it will know what to do with it */
+ sendnow = avail;
+ }
+
+ if (sendnow > 0) {
+ GstBuffer *outbuf;
+ gint64 pos;
+
+ outbuf = gst_adapter_take_buffer (auparse->adapter, sendnow);
+ outbuf = gst_buffer_make_writable (outbuf);
+
+ pos = auparse->buffer_offset - auparse->offset;
+ pos = MAX (pos, 0);
+
+ if (auparse->sample_size > 0 && auparse->samplerate > 0) {
+ gst_au_parse_src_convert (auparse, GST_FORMAT_BYTES, pos,
+ GST_FORMAT_DEFAULT, &offset);
+ gst_au_parse_src_convert (auparse, GST_FORMAT_BYTES, pos,
+ GST_FORMAT_TIME, &timestamp);
+ gst_au_parse_src_convert (auparse, GST_FORMAT_BYTES,
+ sendnow, GST_FORMAT_TIME, &duration);
+
+ GST_BUFFER_OFFSET (outbuf) = offset;
+ GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
+ GST_BUFFER_DURATION (outbuf) = duration;
+ }
+
+ auparse->buffer_offset += sendnow;
+
+ ret = gst_pad_push (auparse->srcpad, outbuf);
+ }
+
+out:
+
+ return ret;
+}
+
+static gboolean
+gst_au_parse_src_convert (GstAuParse * auparse, GstFormat src_format,
+ gint64 srcval, GstFormat dest_format, gint64 * destval)
+{
+ gboolean ret = TRUE;
+ guint samplesize, rate;
+
+ if (dest_format == src_format) {
+ *destval = srcval;
+ return TRUE;
+ }
+
+ GST_OBJECT_LOCK (auparse);
+ samplesize = auparse->sample_size;
+ rate = auparse->samplerate;
+ GST_OBJECT_UNLOCK (auparse);
+
+ if (samplesize == 0 || rate == 0) {
+ GST_LOG_OBJECT (auparse, "cannot convert, sample_size or rate unknown");
+ return FALSE;
+ }
+
+ switch (src_format) {
+ case GST_FORMAT_BYTES:
+ srcval /= samplesize;
+ /* fallthrough */
+ case GST_FORMAT_DEFAULT:{
+ switch (dest_format) {
+ case GST_FORMAT_DEFAULT:
+ *destval = srcval;
+ break;
+ case GST_FORMAT_BYTES:
+ *destval = srcval * samplesize;
+ break;
+ case GST_FORMAT_TIME:
+ *destval = gst_util_uint64_scale_int (srcval, GST_SECOND, rate);
+ break;
+ default:
+ ret = FALSE;
+ break;
+ }
+ break;
+ }
+ case GST_FORMAT_TIME:{
+ switch (dest_format) {
+ case GST_FORMAT_BYTES:
+ *destval = samplesize *
+ gst_util_uint64_scale_int (srcval, rate, GST_SECOND);
+ break;
+ case GST_FORMAT_DEFAULT:
+ *destval = gst_util_uint64_scale_int (srcval, rate, GST_SECOND);
+ break;
+ default:
+ ret = FALSE;
+ break;
+ }
+ break;
+ }
+ default:{
+ ret = FALSE;
+ break;
+ }
+ }
+
+ if (!ret) {
+ GST_DEBUG_OBJECT (auparse, "could not convert from %s to %s format",
+ gst_format_get_name (src_format), gst_format_get_name (dest_format));
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_au_parse_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ GstAuParse *auparse;
+ gboolean ret = FALSE;
+
+ auparse = GST_AU_PARSE (parent);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_DURATION:{
+ GstFormat format;
+ gint64 len, val;
+
+ gst_query_parse_duration (query, &format, NULL);
+ if (!gst_pad_peer_query_duration (auparse->sinkpad, GST_FORMAT_BYTES,
+ &len)) {
+ GST_DEBUG_OBJECT (auparse, "failed to query upstream length");
+ break;
+ }
+ GST_OBJECT_LOCK (auparse);
+ len -= auparse->offset;
+ GST_OBJECT_UNLOCK (auparse);
+
+ ret =
+ gst_au_parse_src_convert (auparse, GST_FORMAT_BYTES, len, format,
+ &val);
+
+ if (ret) {
+ gst_query_set_duration (query, format, val);
+ }
+ break;
+ }
+ case GST_QUERY_POSITION:{
+ GstFormat format;
+ gint64 pos, val;
+
+ gst_query_parse_position (query, &format, NULL);
+ if (!gst_pad_peer_query_position (auparse->sinkpad, GST_FORMAT_BYTES,
+ &pos)) {
+ GST_DEBUG_OBJECT (auparse, "failed to query upstream position");
+ break;
+ }
+ GST_OBJECT_LOCK (auparse);
+ pos -= auparse->offset;
+ GST_OBJECT_UNLOCK (auparse);
+
+ ret = gst_au_parse_src_convert (auparse, GST_FORMAT_BYTES, pos,
+ format, &val);
+
+ if (ret) {
+ gst_query_set_position (query, format, val);
+ }
+ break;
+ }
+ case GST_QUERY_SEEKING:{
+ GstFormat format;
+
+ gst_query_parse_seeking (query, &format, NULL, NULL, NULL);
+ /* FIXME: query duration in 'format'
+ gst_query_set_seeking (query, format, TRUE, 0, duration);
+ */
+ gst_query_set_seeking (query, format, TRUE, 0, GST_CLOCK_TIME_NONE);
+ ret = TRUE;
+ break;
+ }
+ default:
+ ret = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_au_parse_handle_seek (GstAuParse * auparse, GstEvent * event)
+{
+ GstSeekType start_type, stop_type;
+ GstSeekFlags flags;
+ GstFormat format;
+ gdouble rate;
+ gint64 start, stop;
+ gboolean res;
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
+ &stop_type, &stop);
+
+ if (format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (auparse, "only support seeks in TIME format");
+ return FALSE;
+ }
+
+ res = gst_au_parse_src_convert (auparse, GST_FORMAT_TIME, start,
+ GST_FORMAT_BYTES, &start);
+
+ if (stop > 0) {
+ res = gst_au_parse_src_convert (auparse, GST_FORMAT_TIME, stop,
+ GST_FORMAT_BYTES, &stop);
+ }
+
+ GST_INFO_OBJECT (auparse,
+ "seeking: %" G_GINT64_FORMAT " ... %" G_GINT64_FORMAT, start, stop);
+
+ event = gst_event_new_seek (rate, GST_FORMAT_BYTES, flags, start_type, start,
+ stop_type, stop);
+ res = gst_pad_push_event (auparse->sinkpad, event);
+ return res;
+}
+
+static gboolean
+gst_au_parse_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstAuParse *auparse;
+ gboolean ret = TRUE;
+
+ auparse = GST_AU_PARSE (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ /* discard, we'll come up with proper src caps */
+ gst_event_unref (event);
+ break;
+ }
+ case GST_EVENT_SEGMENT:
+ {
+ gint64 start, stop, offset = 0;
+ GstSegment segment;
+
+ /* some debug output */
+ gst_event_copy_segment (event, &segment);
+ GST_DEBUG_OBJECT (auparse, "received newsegment %" GST_SEGMENT_FORMAT,
+ &segment);
+
+ start = segment.start;
+ stop = segment.stop;
+ if (auparse->sample_size > 0) {
+ if (start > 0) {
+ offset = start;
+ start -= auparse->offset;
+ start = MAX (start, 0);
+ }
+ if (stop > 0) {
+ stop -= auparse->offset;
+ stop = MAX (stop, 0);
+ }
+ gst_au_parse_src_convert (auparse, GST_FORMAT_BYTES, start,
+ GST_FORMAT_TIME, &start);
+ gst_au_parse_src_convert (auparse, GST_FORMAT_BYTES, stop,
+ GST_FORMAT_TIME, &stop);
+ }
+
+ GST_INFO_OBJECT (auparse,
+ "new segment: %" GST_TIME_FORMAT " ... %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
+
+ gst_segment_init (&segment, GST_FORMAT_TIME);
+ segment.start = segment.time = start;
+ segment.stop = stop;
+
+ gst_segment_copy_into (&segment, &auparse->segment);
+
+ if (!gst_pad_has_current_caps (auparse->srcpad)) {
+ auparse->need_segment = TRUE;
+ ret = TRUE;
+ } else {
+ auparse->need_segment = FALSE;
+ ret = gst_pad_push_event (auparse->srcpad,
+ gst_event_new_segment (&segment));
+ }
+
+ auparse->buffer_offset = offset;
+
+ gst_event_unref (event);
+ break;
+ }
+ case GST_EVENT_EOS:
+ if (!auparse->srcpad) {
+ GST_ELEMENT_ERROR (auparse, STREAM, WRONG_TYPE,
+ ("No valid input found before end of stream"), (NULL));
+ }
+ /* fall-through */
+ default:
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_au_parse_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstAuParse *auparse;
+ gboolean ret;
+
+ auparse = GST_AU_PARSE (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ ret = gst_au_parse_handle_seek (auparse, event);
+ gst_event_unref (event);
+ break;
+ default:
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return ret;
+}
+
+static GstStateChangeReturn
+gst_au_parse_change_state (GstElement * element, GstStateChange transition)
+{
+ GstAuParse *auparse = GST_AU_PARSE (element);
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ return ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_au_parse_reset (auparse);
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ if (!GST_ELEMENT_REGISTER (auparse, plugin))
+ return FALSE;
+
+ return TRUE;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ auparse,
+ "parses au streams", plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME,
+ GST_PACKAGE_ORIGIN)
diff --git a/gst/auparse/gstauparse.h b/gst/auparse/gstauparse.h
new file mode 100644
index 0000000000..7ac385352f
--- /dev/null
+++ b/gst/auparse/gstauparse.h
@@ -0,0 +1,77 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2006> Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_AU_PARSE_H__
+#define __GST_AU_PARSE_H__
+
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AU_PARSE \
+ (gst_au_parse_get_type())
+#define GST_AU_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AU_PARSE,GstAuParse))
+#define GST_AU_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_AU_PARSE,GstAuParseClass))
+#define GST_IS_AU_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AU_PARSE))
+#define GST_IS_AU_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_AU_PARSE))
+
+typedef struct _GstAuParse GstAuParse;
+typedef struct _GstAuParseClass GstAuParseClass;
+
+struct _GstAuParse {
+ GstElement element;
+
+ GstPad *sinkpad;
+ GstPad *srcpad;
+
+ GstCaps *src_caps;
+
+ GstAdapter *adapter;
+
+ GstSegment segment;
+ gboolean need_segment;
+
+ gint64 offset; /* where sample data starts */
+ gint64 buffer_offset;
+ guint sample_size;
+ guint encoding;
+ guint samplerate;
+ guint channels;
+};
+
+struct _GstAuParseClass {
+ GstElementClass parent_class;
+};
+
+GType gst_au_parse_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (auparse);
+
+G_END_DECLS
+
+#endif /* __GST_AU_PARSE_H__ */
diff --git a/gst/auparse/meson.build b/gst/auparse/meson.build
new file mode 100644
index 0000000000..6dd335dfb5
--- /dev/null
+++ b/gst/auparse/meson.build
@@ -0,0 +1,10 @@
+gstauparse = library('gstauparse',
+ 'gstauparse.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gstaudio_dep, gstbase_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstauparse, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstauparse]
diff --git a/gst/autodetect/gstautoaudiosink.c b/gst/autodetect/gstautoaudiosink.c
new file mode 100644
index 0000000000..5c18972890
--- /dev/null
+++ b/gst/autodetect/gstautoaudiosink.c
@@ -0,0 +1,147 @@
+/* GStreamer
+ * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
+ * (c) 2006 Jan Schmidt <thaytan@noraisin.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-autoaudiosink
+ * @title: autoaudiosink
+ * @see_also: autovideosink, alsasink, osssink
+ *
+ * autoaudiosink is an audio sink that automatically detects an appropriate
+ * audio sink to use. It does so by scanning the registry for all elements
+ * that have "Sink" and "Audio" in the class field
+ * of their element information, and also have a non-zero autoplugging rank.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v -m audiotestsrc ! audioconvert ! audioresample ! autoaudiosink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstautodetectelements.h"
+#include "gstautodetect.h"
+#include "gstautoaudiosink.h"
+
+#define DEFAULT_TS_OFFSET 0
+
+/* Properties */
+enum
+{
+ PROP_0,
+ PROP_TS_OFFSET,
+};
+
+static void gst_auto_audio_sink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_auto_audio_sink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_auto_audio_sink_configure (GstAutoDetect * autodetect,
+ GstElement * kid);
+
+G_DEFINE_TYPE (GstAutoAudioSink, gst_auto_audio_sink, GST_TYPE_AUTO_DETECT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (autoaudiosink, "autoaudiosink",
+ GST_RANK_NONE, GST_TYPE_AUTO_AUDIO_SINK, autodetect_element_init (plugin));
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+static void
+gst_auto_audio_sink_class_init (GstAutoAudioSinkClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
+ GstAutoDetectClass *aklass = GST_AUTO_DETECT_CLASS (klass);
+
+ gobject_class->set_property = gst_auto_audio_sink_set_property;
+ gobject_class->get_property = gst_auto_audio_sink_get_property;
+
+ aklass->configure = gst_auto_audio_sink_configure;
+
+ g_object_class_install_property (gobject_class, PROP_TS_OFFSET,
+ g_param_spec_int64 ("ts-offset", "TS Offset",
+ "Timestamp offset in nanoseconds", G_MININT64, G_MAXINT64,
+ DEFAULT_TS_OFFSET, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (eklass, &sink_template);
+ gst_element_class_set_static_metadata (eklass, "Auto audio sink",
+ "Sink/Audio",
+ "Wrapper audio sink for automatically detected audio sink",
+ "Jan Schmidt <thaytan@noraisin.net>");
+}
+
+static void
+gst_auto_audio_sink_init (GstAutoAudioSink * sink)
+{
+ GstAutoDetect *autodetect = GST_AUTO_DETECT (sink);
+
+ autodetect->media_klass = "Audio";
+ autodetect->flag = GST_ELEMENT_FLAG_SINK;
+
+ sink->ts_offset = DEFAULT_TS_OFFSET;
+}
+
+static void
+gst_auto_audio_sink_configure (GstAutoDetect * autodetect, GstElement * kid)
+{
+ GstAutoAudioSink *self = GST_AUTO_AUDIO_SINK (autodetect);
+
+ g_object_set (G_OBJECT (kid), "ts-offset", self->ts_offset, NULL);
+}
+
+static void
+gst_auto_audio_sink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAutoAudioSink *sink = GST_AUTO_AUDIO_SINK (object);
+ GstAutoDetect *autodetect = (GstAutoDetect *) sink;
+
+ switch (prop_id) {
+ case PROP_TS_OFFSET:
+ sink->ts_offset = g_value_get_int64 (value);
+ if (autodetect->kid)
+ g_object_set_property (G_OBJECT (autodetect->kid), pspec->name, value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_auto_audio_sink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAutoAudioSink *sink = GST_AUTO_AUDIO_SINK (object);
+
+ switch (prop_id) {
+ case PROP_TS_OFFSET:
+ g_value_set_int64 (value, sink->ts_offset);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/autodetect/gstautoaudiosink.h b/gst/autodetect/gstautoaudiosink.h
new file mode 100644
index 0000000000..24a0b25edf
--- /dev/null
+++ b/gst/autodetect/gstautoaudiosink.h
@@ -0,0 +1,55 @@
+/* GStreamer
+ * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AUTO_AUDIO_SINK_H__
+#define __GST_AUTO_AUDIO_SINK_H__
+
+#include <gst/gst.h>
+#include "gstautodetect.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AUTO_AUDIO_SINK \
+ (gst_auto_audio_sink_get_type ())
+#define GST_AUTO_AUDIO_SINK(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_AUTO_AUDIO_SINK, \
+ GstAutoAudioSink))
+#define GST_AUTO_AUDIO_SINK_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_AUTO_AUDIO_SINK, \
+ GstAutoAudioSinkClass))
+#define GST_IS_AUTO_AUDIO_SINK(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_AUTO_AUDIO_SINK))
+#define GST_IS_AUTO_AUDIO_SINK_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_AUTO_AUDIO_SINK))
+
+typedef struct _GstAutoAudioSink {
+ GstAutoDetect parent;
+
+ GstClockTimeDiff ts_offset;
+} GstAutoAudioSink;
+
+typedef struct _GstAutoAudioSinkClass {
+ GstAutoDetectClass parent_class;
+} GstAutoAudioSinkClass;
+
+GType gst_auto_audio_sink_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_AUTO_AUDIO_SINK_H__ */
diff --git a/gst/autodetect/gstautoaudiosrc.c b/gst/autodetect/gstautoaudiosrc.c
new file mode 100644
index 0000000000..6c957cc0c0
--- /dev/null
+++ b/gst/autodetect/gstautoaudiosrc.c
@@ -0,0 +1,99 @@
+/* GStreamer
+ * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
+ * (c) 2006 Jan Schmidt <thaytan@noraisin.net>
+ * (c) 2008 Stefan Kost <ensonic@users.sf.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-autoaudiosrc
+ * @title: autoaudiosrc
+ * @see_also: autovideosrc, alsasrc, osssrc
+ *
+ * autoaudiosrc is an audio source that automatically detects an appropriate
+ * audio source to use. It does so by scanning the registry for all elements
+ * that have "Source" and "Audio" in the class field
+ * of their element information, and also have a non-zero autoplugging rank.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v -m autoaudiosrc ! audioconvert ! audioresample ! autoaudiosink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstautodetectelements.h"
+#include "gstautodetect.h"
+#include "gstautoaudiosrc.h"
+
+G_DEFINE_TYPE (GstAutoAudioSrc, gst_auto_audio_src, GST_TYPE_AUTO_DETECT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (autoaudiosrc, "autoaudiosrc",
+ GST_RANK_NONE, GST_TYPE_AUTO_AUDIO_SRC, autodetect_element_init (plugin));
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+static GstElement *
+gst_auto_audio_src_create_fake_element (GstAutoDetect * autodetect)
+{
+ GstElement *fake;
+
+ fake = gst_element_factory_make ("audiotestsrc", "fake-auto-audio-src");
+ if (fake != NULL) {
+ g_object_set (fake, "is-live", TRUE, NULL);
+ gst_util_set_object_arg (G_OBJECT (fake), "wave", "silence");
+ } else {
+ GST_ELEMENT_ERROR (autodetect, RESOURCE, NOT_FOUND,
+ ("Failed to find usable audio source element."),
+ ("Failed to find a usable audio source and couldn't create an audio"
+ "testsrc as fallback either, check your GStreamer installation."));
+ /* This will error out with not-negotiated.. */
+ fake = gst_element_factory_make ("fakesrc", "fake-auto-audio-src");
+ }
+ return fake;
+}
+
+static void
+gst_auto_audio_src_class_init (GstAutoAudioSrcClass * klass)
+{
+ GstAutoDetectClass *autoclass = GST_AUTO_DETECT_CLASS (klass);
+ GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
+
+ gst_element_class_add_static_pad_template (eklass, &src_template);
+ gst_element_class_set_static_metadata (eklass, "Auto audio source",
+ "Source/Audio",
+ "Wrapper audio source for automatically detected audio source",
+ "Jan Schmidt <thaytan@noraisin.net>, "
+ "Stefan Kost <ensonic@users.sf.net>");
+
+ autoclass->create_fake_element = gst_auto_audio_src_create_fake_element;
+}
+
+static void
+gst_auto_audio_src_init (GstAutoAudioSrc * src)
+{
+ GstAutoDetect *autodetect = GST_AUTO_DETECT (src);
+
+ autodetect->media_klass = "Audio";
+ autodetect->flag = GST_ELEMENT_FLAG_SOURCE;
+}
diff --git a/gst/autodetect/gstautoaudiosrc.h b/gst/autodetect/gstautoaudiosrc.h
new file mode 100644
index 0000000000..427cdaefbf
--- /dev/null
+++ b/gst/autodetect/gstautoaudiosrc.h
@@ -0,0 +1,54 @@
+/* GStreamer
+ * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
+ * (c) 2008 Stefan Kost <ensonic@users.sf.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AUTO_AUDIO_SRC_H__
+#define __GST_AUTO_AUDIO_SRC_H__
+
+#include <gst/gst.h>
+#include "gstautodetect.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AUTO_AUDIO_SRC \
+ (gst_auto_audio_src_get_type ())
+#define GST_AUTO_AUDIO_SRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_AUTO_AUDIO_SRC, \
+ GstAutoAudioSrc))
+#define GST_AUTO_AUDIO_SRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_AUTO_AUDIO_SRC, \
+ GstAutoAudioSrcClass))
+#define GST_IS_AUTO_AUDIO_SRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_AUTO_AUDIO_SRC))
+#define GST_IS_AUTO_AUDIO_SRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_AUTO_AUDIO_SRC))
+
+typedef struct _GstAutoAudioSrc {
+ GstAutoDetect parent;
+} GstAutoAudioSrc;
+
+typedef struct _GstAutoAudioSrcClass {
+ GstAutoDetectClass parent_class;
+} GstAutoAudioSrcClass;
+
+GType gst_auto_audio_src_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_AUTO_AUDIO_SRC_H__ */
diff --git a/gst/autodetect/gstautodetect.c b/gst/autodetect/gstautodetect.c
new file mode 100644
index 0000000000..864d465ee3
--- /dev/null
+++ b/gst/autodetect/gstautodetect.c
@@ -0,0 +1,482 @@
+/* GStreamer
+ * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <stdio.h>
+#include <string.h>
+
+#include <gst/gst.h>
+
+#include "gstautodetect.h"
+
+#define DEFAULT_SYNC TRUE
+
+/* Properties */
+enum
+{
+ PROP_0,
+ PROP_CAPS,
+ PROP_SYNC,
+};
+
+static GstStateChangeReturn gst_auto_detect_change_state (GstElement * element,
+ GstStateChange transition);
+static void gst_auto_detect_constructed (GObject * object);
+static void gst_auto_detect_dispose (GObject * self);
+static void gst_auto_detect_clear_kid (GstAutoDetect * self);
+static void gst_auto_detect_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_auto_detect_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+#define gst_auto_detect_parent_class parent_class
+G_DEFINE_ABSTRACT_TYPE (GstAutoDetect, gst_auto_detect, GST_TYPE_BIN);
+
+static void
+gst_auto_detect_class_init (GstAutoDetectClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *eklass;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ eklass = GST_ELEMENT_CLASS (klass);
+
+ gobject_class->constructed = gst_auto_detect_constructed;
+ gobject_class->dispose = gst_auto_detect_dispose;
+ gobject_class->set_property = gst_auto_detect_set_property;
+ gobject_class->get_property = gst_auto_detect_get_property;
+
+ eklass->change_state = GST_DEBUG_FUNCPTR (gst_auto_detect_change_state);
+
+ /**
+ * GstAutoDetect:filter-caps:
+ *
+ * This property will filter out candidate sinks that can handle the specified
+ * caps. By default only elements that support uncompressed data are selected.
+ *
+ * This property can only be set before the element goes to the READY state.
+ */
+ g_object_class_install_property (gobject_class, PROP_CAPS,
+ g_param_spec_boxed ("filter-caps", "Filter caps",
+ "Filter sink candidates using these caps.", GST_TYPE_CAPS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_DOC_SHOW_DEFAULT));
+
+ g_object_class_install_property (gobject_class, PROP_SYNC,
+ g_param_spec_boolean ("sync", "Sync",
+ "Sync on the clock", DEFAULT_SYNC,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_type_mark_as_plugin_api (GST_TYPE_AUTO_DETECT, 0);
+}
+
+static void
+gst_auto_detect_dispose (GObject * object)
+{
+ GstAutoDetect *self = GST_AUTO_DETECT (object);
+
+ gst_auto_detect_clear_kid (self);
+
+ if (self->filter_caps)
+ gst_caps_unref (self->filter_caps);
+ self->filter_caps = NULL;
+
+ G_OBJECT_CLASS (parent_class)->dispose ((GObject *) self);
+}
+
+static void
+gst_auto_detect_clear_kid (GstAutoDetect * self)
+{
+ if (self->kid) {
+ gst_element_set_state (self->kid, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN (self), self->kid);
+ self->kid = NULL;
+ }
+}
+
+static GstElement *
+gst_auto_detect_create_fake_element_default (GstAutoDetect * self)
+{
+ GstElement *fake;
+ gchar dummy_factory[10], dummy_name[20];
+
+ sprintf (dummy_factory, "fake%s", self->type_klass_lc);
+ sprintf (dummy_name, "fake-%s-%s", self->media_klass_lc, self->type_klass_lc);
+ fake = gst_element_factory_make (dummy_factory, dummy_name);
+ g_object_set (fake, "sync", self->sync, NULL);
+
+ return fake;
+}
+
+static GstElement *
+gst_auto_detect_create_fake_element (GstAutoDetect * self)
+{
+ GstAutoDetectClass *klass = GST_AUTO_DETECT_GET_CLASS (self);
+ GstElement *fake;
+
+ if (klass->create_fake_element)
+ fake = klass->create_fake_element (self);
+ else
+ fake = gst_auto_detect_create_fake_element_default (self);
+
+ return fake;
+}
+
+static gboolean
+gst_auto_detect_attach_ghost_pad (GstAutoDetect * self)
+{
+ GstPad *target = gst_element_get_static_pad (self->kid, self->type_klass_lc);
+ gboolean res = gst_ghost_pad_set_target (GST_GHOST_PAD (self->pad), target);
+ gst_object_unref (target);
+
+ return res;
+}
+
+/* Hack to make initial linking work; ideally, this would work even when
+ * no target has been assigned to the ghostpad yet. */
+static void
+gst_auto_detect_reset (GstAutoDetect * self)
+{
+ gst_auto_detect_clear_kid (self);
+
+ /* placeholder element */
+ self->kid = gst_auto_detect_create_fake_element (self);
+ gst_bin_add (GST_BIN (self), self->kid);
+
+ gst_auto_detect_attach_ghost_pad (self);
+}
+
+static GstStaticCaps raw_audio_caps = GST_STATIC_CAPS ("audio/x-raw");
+static GstStaticCaps raw_video_caps = GST_STATIC_CAPS ("video/x-raw");
+
+static void
+gst_auto_detect_init (GstAutoDetect * self)
+{
+ self->sync = DEFAULT_SYNC;
+}
+
+static void
+gst_auto_detect_constructed (GObject * object)
+{
+ GstAutoDetect *self = GST_AUTO_DETECT (object);
+ gboolean is_audio;
+
+ if (G_OBJECT_CLASS (parent_class)->constructed)
+ G_OBJECT_CLASS (parent_class)->constructed (object);
+
+ is_audio = !g_strcmp0 (self->media_klass, "Audio");
+ self->type_klass = (self->flag == GST_ELEMENT_FLAG_SINK) ? "Sink" : "Source";
+ self->type_klass_lc = (self->flag == GST_ELEMENT_FLAG_SINK) ? "sink" : "src";
+ self->media_klass_lc = is_audio ? "audio" : "video";
+ /* set the default raw caps */
+ self->filter_caps = gst_static_caps_get (is_audio ? &raw_audio_caps :
+ &raw_video_caps);
+
+ self->pad = gst_ghost_pad_new_no_target (self->type_klass_lc,
+ (self->flag == GST_ELEMENT_FLAG_SINK) ? GST_PAD_SINK : GST_PAD_SRC);
+ gst_element_add_pad (GST_ELEMENT (self), self->pad);
+
+ gst_auto_detect_reset (self);
+
+ /* mark element type */
+ GST_OBJECT_FLAG_SET (self, self->flag);
+ gst_bin_set_suppressed_flags (GST_BIN (self),
+ GST_ELEMENT_FLAG_SOURCE | GST_ELEMENT_FLAG_SINK);
+}
+
+static gboolean
+gst_auto_detect_factory_filter (GstPluginFeature * feature, gpointer data)
+{
+ GstAutoDetect *self = (GstAutoDetect *) data;
+ guint rank;
+ const gchar *klass;
+
+ /* we only care about element factories */
+ if (!GST_IS_ELEMENT_FACTORY (feature))
+ return FALSE;
+
+ /* audio sinks */
+ klass = gst_element_factory_get_metadata (GST_ELEMENT_FACTORY (feature),
+ GST_ELEMENT_METADATA_KLASS);
+ if (!(strstr (klass, self->type_klass) && strstr (klass, self->media_klass)))
+ return FALSE;
+
+ /* only select elements with autoplugging rank */
+ rank = gst_plugin_feature_get_rank (feature);
+ if (rank < GST_RANK_MARGINAL)
+ return FALSE;
+
+ return TRUE;
+}
+
+static GstElement *
+create_element_with_pretty_name (GstAutoDetect * self,
+ GstElementFactory * factory)
+{
+ GstElement *element;
+ gchar *name, *marker;
+
+ marker = g_strdup (GST_OBJECT_NAME (factory));
+ if (g_str_has_suffix (marker, self->type_klass_lc))
+ marker[strlen (marker) - 4] = '\0';
+ if (g_str_has_prefix (marker, "gst"))
+ memmove (marker, marker + 3, strlen (marker + 3) + 1);
+ name = g_strdup_printf ("%s-actual-%s-%s", GST_OBJECT_NAME (self),
+ self->type_klass_lc, marker);
+ g_free (marker);
+
+ element = gst_element_factory_create (factory, name);
+ g_free (name);
+
+ return element;
+}
+
+static GstElement *
+gst_auto_detect_find_best (GstAutoDetect * self)
+{
+ GList *list, *item;
+ GstElement *choice = NULL;
+ GstMessage *message = NULL;
+ GSList *errors = NULL;
+ GstBus *bus = gst_bus_new ();
+ GstPad *el_pad = NULL;
+ GstCaps *el_caps = NULL;
+ gboolean no_match = TRUE;
+
+ /* We don't treat sound server sinks special. Our policy is that sound
+ * server sinks that have a rank must not auto-spawn a daemon under any
+ * circumstances, so there's nothing for us to worry about here */
+ list = gst_registry_feature_filter (gst_registry_get (),
+ (GstPluginFeatureFilter) gst_auto_detect_factory_filter, FALSE, self);
+ list =
+ g_list_sort (list, (GCompareFunc) gst_plugin_feature_rank_compare_func);
+
+ GST_LOG_OBJECT (self, "Trying to find usable %s elements ...",
+ self->media_klass_lc);
+
+ for (item = list; item != NULL; item = item->next) {
+ GstElementFactory *f = GST_ELEMENT_FACTORY (item->data);
+ GstElement *el;
+
+ if ((el = create_element_with_pretty_name (self, f))) {
+ GstStateChangeReturn ret;
+
+ GST_DEBUG_OBJECT (self, "Testing %s", GST_OBJECT_NAME (f));
+
+ /* If autodetect has been provided with filter caps,
+ * accept only elements that match with the filter caps */
+ if (self->filter_caps) {
+ el_pad = gst_element_get_static_pad (el, self->type_klass_lc);
+ el_caps = gst_pad_query_caps (el_pad, NULL);
+ gst_object_unref (el_pad);
+ GST_DEBUG_OBJECT (self,
+ "Checking caps: %" GST_PTR_FORMAT " vs. %" GST_PTR_FORMAT,
+ self->filter_caps, el_caps);
+ no_match = !gst_caps_can_intersect (self->filter_caps, el_caps);
+ gst_caps_unref (el_caps);
+
+ if (no_match) {
+ GST_DEBUG_OBJECT (self, "Incompatible caps");
+ gst_object_unref (el);
+ continue;
+ } else {
+ GST_DEBUG_OBJECT (self, "Found compatible caps");
+ }
+ }
+
+ gst_element_set_bus (el, bus);
+ ret = gst_element_set_state (el, GST_STATE_READY);
+ if (ret == GST_STATE_CHANGE_SUCCESS) {
+ GST_DEBUG_OBJECT (self, "This worked!");
+ gst_element_set_state (el, GST_STATE_NULL);
+ choice = el;
+ break;
+ }
+
+ /* collect all error messages */
+ while ((message = gst_bus_pop_filtered (bus, GST_MESSAGE_ERROR))) {
+ GST_DEBUG_OBJECT (self, "error message %" GST_PTR_FORMAT, message);
+ errors = g_slist_append (errors, message);
+ }
+
+ gst_element_set_state (el, GST_STATE_NULL);
+ gst_object_unref (el);
+ }
+ }
+
+ GST_DEBUG_OBJECT (self, "done trying");
+ if (!choice) {
+ /* We post a warning and plug a fake-element. This is convenient for running
+ * tests without requiring hardware src/sinks. */
+ if (errors) {
+ GError *err = NULL;
+ gchar *dbg = NULL;
+
+ /* FIXME: we forward the first message for now; but later on it might make
+ * sense to forward all so that apps can actually analyse them. */
+ gst_message_parse_error (GST_MESSAGE (errors->data), &err, &dbg);
+ gst_element_post_message (GST_ELEMENT_CAST (self),
+ gst_message_new_warning (GST_OBJECT_CAST (self), err, dbg));
+ g_error_free (err);
+ g_free (dbg);
+ } else {
+ /* send warning message to application and use a fakesrc */
+ GST_ELEMENT_WARNING (self, RESOURCE, NOT_FOUND, (NULL),
+ ("Failed to find a usable %s %s", self->media_klass_lc,
+ self->type_klass_lc));
+ }
+ choice = gst_auto_detect_create_fake_element (self);
+ gst_element_set_state (choice, GST_STATE_READY);
+ }
+ gst_object_unref (bus);
+ gst_plugin_feature_list_free (list);
+ g_slist_foreach (errors, (GFunc) gst_mini_object_unref, NULL);
+ g_slist_free (errors);
+
+ return choice;
+}
+
+static gboolean
+gst_auto_detect_detect (GstAutoDetect * self)
+{
+ GstElement *kid;
+ GstAutoDetectClass *klass = GST_AUTO_DETECT_GET_CLASS (self);
+
+ gst_auto_detect_clear_kid (self);
+
+ /* find element */
+ GST_DEBUG_OBJECT (self, "Creating new kid");
+ if (!(kid = gst_auto_detect_find_best (self)))
+ goto no_sink;
+
+ self->has_sync =
+ g_object_class_find_property (G_OBJECT_GET_CLASS (kid), "sync") != NULL;
+ if (self->has_sync)
+ g_object_set (G_OBJECT (kid), "sync", self->sync, NULL);
+ if (klass->configure) {
+ klass->configure (self, kid);
+ }
+
+ self->kid = kid;
+
+ gst_bin_add (GST_BIN (self), kid);
+
+ /* Ensure the child is brought up to the right state to match the parent. */
+ if (GST_STATE (self->kid) < GST_STATE (self))
+ gst_element_set_state (self->kid, GST_STATE (self));
+
+ /* attach ghost pad */
+ GST_DEBUG_OBJECT (self, "Re-assigning ghostpad");
+ if (!gst_auto_detect_attach_ghost_pad (self))
+ goto target_failed;
+
+ GST_DEBUG_OBJECT (self, "done changing auto %s %s", self->media_klass_lc,
+ self->type_klass_lc);
+
+ return TRUE;
+
+ /* ERRORS */
+no_sink:
+ {
+ GST_ELEMENT_ERROR (self, LIBRARY, INIT, (NULL),
+ ("Failed to find a supported audio sink"));
+ return FALSE;
+ }
+target_failed:
+ {
+ GST_ELEMENT_ERROR (self, LIBRARY, INIT, (NULL),
+ ("Failed to set target pad"));
+ return FALSE;
+ }
+}
+
+static GstStateChangeReturn
+gst_auto_detect_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+ GstAutoDetect *sink = GST_AUTO_DETECT (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ if (!gst_auto_detect_detect (sink))
+ return GST_STATE_CHANGE_FAILURE;
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ return ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ gst_auto_detect_reset (sink);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static void
+gst_auto_detect_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAutoDetect *self = GST_AUTO_DETECT (object);
+
+ switch (prop_id) {
+ case PROP_CAPS:
+ if (self->filter_caps)
+ gst_caps_unref (self->filter_caps);
+ self->filter_caps = gst_caps_copy (gst_value_get_caps (value));
+ break;
+ case PROP_SYNC:
+ self->sync = g_value_get_boolean (value);
+ if (self->kid && self->has_sync)
+ g_object_set_property (G_OBJECT (self->kid), pspec->name, value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_auto_detect_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAutoDetect *self = GST_AUTO_DETECT (object);
+
+ switch (prop_id) {
+ case PROP_CAPS:
+ gst_value_set_caps (value, self->filter_caps);
+ break;
+ case PROP_SYNC:
+ g_value_set_boolean (value, self->sync);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/autodetect/gstautodetect.h b/gst/autodetect/gstautodetect.h
new file mode 100644
index 0000000000..09592ff564
--- /dev/null
+++ b/gst/autodetect/gstautodetect.h
@@ -0,0 +1,72 @@
+/* GStreamer
+ * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AUTO_DETECT_H__
+#define __GST_AUTO_DETECT_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AUTO_DETECT (gst_auto_detect_get_type ())
+#define GST_AUTO_DETECT(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_AUTO_DETECT, GstAutoDetect))
+#define GST_AUTO_DETECT_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_AUTO_DETECT, GstAutoDetectClass))
+#define GST_IS_AUTO_DETECT(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_AUTO_DETECT))
+#define GST_IS_AUTO_DETECT_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_AUTO_DETECT))
+#define GST_AUTO_DETECT_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_AUTO_DETECT, GstAutoDetectClass))
+
+typedef struct _GstAutoDetect {
+ GstBin parent;
+
+ /* configuration for subclasses */
+ const gchar *media_klass; /* Audio/Video/... */
+ GstElementFlags flag; /* GST_ELEMENT_FLAG_{SINK/SOURCE} */
+
+ /* explicit pointers to stuff used */
+ GstPad *pad;
+ GstCaps *filter_caps;
+ gboolean sync;
+
+ /* < private > */
+ GstElement *kid;
+ gboolean has_sync;
+ const gchar *type_klass; /* Source/Sink */
+ const gchar *media_klass_lc, *type_klass_lc; /* lower case versions */
+
+} GstAutoDetect;
+
+typedef struct _GstAutoDetectClass {
+ GstBinClass parent_class;
+
+ /*< private >*/
+ /* virtual methods for subclasses */
+ void (*configure)(GstAutoDetect *self, GstElement *kid);
+ GstElement * (*create_fake_element) (GstAutoDetect * autodetect);
+} GstAutoDetectClass;
+
+GType gst_auto_detect_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_AUTO_DETECT_H__ */
diff --git a/gst/autodetect/gstautodetectelement.c b/gst/autodetect/gstautodetectelement.c
new file mode 100644
index 0000000000..ad736e51eb
--- /dev/null
+++ b/gst/autodetect/gstautodetectelement.c
@@ -0,0 +1,42 @@
+/* GStreamer
+ * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <stdio.h>
+#include <string.h>
+
+#include <gst/gst.h>
+
+#include "gstautodetectelements.h"
+
+GST_DEBUG_CATEGORY (autodetect_debug);
+
+void
+autodetect_element_init (GstPlugin * plugin)
+{
+ static gsize res = FALSE;
+ if (g_once_init_enter (&res)) {
+ GST_DEBUG_CATEGORY_INIT (autodetect_debug, "autodetect", 0,
+ "Autodetection audio/video output wrapper elements");
+ g_once_init_leave (&res, TRUE);
+ }
+}
diff --git a/gst/autodetect/gstautodetectelements.h b/gst/autodetect/gstautodetectelements.h
new file mode 100644
index 0000000000..7d51d38a89
--- /dev/null
+++ b/gst/autodetect/gstautodetectelements.h
@@ -0,0 +1,41 @@
+/* GStreamer
+ * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
+ * Copyright (C) 2020 Huawei Technologies Co., Ltd.
+ * @Author: Stéphane Cerveau <stephane.cerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AUTO_DETECT_ELEMENTS_H__
+#define __GST_AUTO_DETECT_ELEMENTS_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+void autodetect_element_init (GstPlugin * plugin);
+
+GST_ELEMENT_REGISTER_DECLARE (autovideosink);
+GST_ELEMENT_REGISTER_DECLARE (autovideosrc);
+GST_ELEMENT_REGISTER_DECLARE (autoaudiosink);
+GST_ELEMENT_REGISTER_DECLARE (autoaudiosrc);
+
+GST_DEBUG_CATEGORY_EXTERN (autodetect_debug);
+#define GST_CAT_DEFAULT autodetect_debug
+
+G_END_DECLS
+
+#endif /* __GST_AUTO_DETECT_ELEMENTS_H__ */
diff --git a/gst/autodetect/gstautodetectplugin.c b/gst/autodetect/gstautodetectplugin.c
new file mode 100644
index 0000000000..c925e5a289
--- /dev/null
+++ b/gst/autodetect/gstautodetectplugin.c
@@ -0,0 +1,45 @@
+/* GStreamer
+ * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+
+#include "gstautodetectelements.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (autovideosink, plugin);
+ ret |= GST_ELEMENT_REGISTER (autovideosrc, plugin);
+ ret |= GST_ELEMENT_REGISTER (autoaudiosink, plugin);
+ ret |= GST_ELEMENT_REGISTER (autoaudiosrc, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ autodetect,
+ "Plugin contains auto-detection plugins for video/audio in- and outputs",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/autodetect/gstautovideosink.c b/gst/autodetect/gstautovideosink.c
new file mode 100644
index 0000000000..b3eaf1f365
--- /dev/null
+++ b/gst/autodetect/gstautovideosink.c
@@ -0,0 +1,147 @@
+/* GStreamer
+ * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
+ * (c) 2006 Jan Schmidt <thaytan@noraisin.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-autovideosink
+ * @title: autovideosink
+ * @see_also: autoaudiosink, ximagesink, xvimagesink, sdlvideosink
+ *
+ * autovideosink is a video sink that automatically detects an appropriate
+ * video sink to use. It does so by scanning the registry for all elements
+ * that have "Sink" and "Video" in the class field
+ * of their element information, and also have a non-zero autoplugging rank.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v -m videotestsrc ! autovideosink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstautodetectelements.h"
+#include "gstautodetect.h"
+#include "gstautovideosink.h"
+
+#define DEFAULT_TS_OFFSET 0
+
+/* Properties */
+enum
+{
+ PROP_0,
+ PROP_TS_OFFSET,
+};
+
+static void gst_auto_video_sink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_auto_video_sink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_auto_video_sink_configure (GstAutoDetect * autodetect,
+ GstElement * kid);
+
+G_DEFINE_TYPE (GstAutoVideoSink, gst_auto_video_sink, GST_TYPE_AUTO_DETECT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (autovideosink, "autovideosink",
+ GST_RANK_NONE, GST_TYPE_AUTO_VIDEO_SINK, autodetect_element_init (plugin));
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+static void
+gst_auto_video_sink_class_init (GstAutoVideoSinkClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
+ GstAutoDetectClass *aklass = GST_AUTO_DETECT_CLASS (klass);
+
+ gobject_class->set_property = gst_auto_video_sink_set_property;
+ gobject_class->get_property = gst_auto_video_sink_get_property;
+
+ aklass->configure = gst_auto_video_sink_configure;
+
+ g_object_class_install_property (gobject_class, PROP_TS_OFFSET,
+ g_param_spec_int64 ("ts-offset", "TS Offset",
+ "Timestamp offset in nanoseconds", G_MININT64, G_MAXINT64,
+ DEFAULT_TS_OFFSET, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (eklass, &sink_template);
+ gst_element_class_set_static_metadata (eklass, "Auto video sink",
+ "Sink/Video",
+ "Wrapper video sink for automatically detected video sink",
+ "Jan Schmidt <thaytan@noraisin.net>");
+}
+
+static void
+gst_auto_video_sink_init (GstAutoVideoSink * sink)
+{
+ GstAutoDetect *autodetect = GST_AUTO_DETECT (sink);
+
+ autodetect->media_klass = "Video";
+ autodetect->flag = GST_ELEMENT_FLAG_SINK;
+
+ sink->ts_offset = DEFAULT_TS_OFFSET;
+}
+
+static void
+gst_auto_video_sink_configure (GstAutoDetect * autodetect, GstElement * kid)
+{
+ GstAutoVideoSink *self = GST_AUTO_VIDEO_SINK (autodetect);
+
+ g_object_set (G_OBJECT (kid), "ts-offset", self->ts_offset, NULL);
+}
+
+static void
+gst_auto_video_sink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAutoVideoSink *sink = GST_AUTO_VIDEO_SINK (object);
+ GstAutoDetect *autodetect = (GstAutoDetect *) sink;
+
+ switch (prop_id) {
+ case PROP_TS_OFFSET:
+ sink->ts_offset = g_value_get_int64 (value);
+ if (autodetect->kid)
+ g_object_set_property (G_OBJECT (autodetect->kid), pspec->name, value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_auto_video_sink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAutoVideoSink *sink = GST_AUTO_VIDEO_SINK (object);
+
+ switch (prop_id) {
+ case PROP_TS_OFFSET:
+ g_value_set_int64 (value, sink->ts_offset);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/autodetect/gstautovideosink.h b/gst/autodetect/gstautovideosink.h
new file mode 100644
index 0000000000..5b7db40fec
--- /dev/null
+++ b/gst/autodetect/gstautovideosink.h
@@ -0,0 +1,55 @@
+/* GStreamer
+ * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AUTO_VIDEO_SINK_H__
+#define __GST_AUTO_VIDEO_SINK_H__
+
+#include <gst/gst.h>
+#include "gstautodetect.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AUTO_VIDEO_SINK \
+ (gst_auto_video_sink_get_type ())
+#define GST_AUTO_VIDEO_SINK(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_AUTO_VIDEO_SINK, \
+ GstAutoVideoSink))
+#define GST_AUTO_VIDEO_SINK_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_AUTO_VIDEO_SINK, \
+ GstAutoVideoSinkClass))
+#define GST_IS_AUTO_VIDEO_SINK(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_AUTO_VIDEO_SINK))
+#define GST_IS_AUTO_VIDEO_SINK_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_AUTO_VIDEO_SINK))
+
+typedef struct _GstAutoVideoSink {
+ GstAutoDetect parent;
+
+ GstClockTimeDiff ts_offset;
+} GstAutoVideoSink;
+
+typedef struct _GstAutoVideoSinkClass {
+ GstAutoDetectClass parent_class;
+} GstAutoVideoSinkClass;
+
+GType gst_auto_video_sink_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_AUTO_VIDEO_SINK_H__ */
diff --git a/gst/autodetect/gstautovideosrc.c b/gst/autodetect/gstautovideosrc.c
new file mode 100644
index 0000000000..00d085567e
--- /dev/null
+++ b/gst/autodetect/gstautovideosrc.c
@@ -0,0 +1,98 @@
+/* GStreamer
+ * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
+ * (c) 2006 Jan Schmidt <thaytan@noraisin.net>
+ * (c) 2008 Stefan Kost <ensonic@users.sf.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-autovideosrc
+ * @title: autovideosrc
+ * @see_also: autoaudiosrc, v4l2src, v4lsrc
+ *
+ * autovideosrc is a video src that automatically detects an appropriate
+ * video source to use. It does so by scanning the registry for all elements
+ * that have "Source" and "Video" in the class field
+ * of their element information, and also have a non-zero autoplugging rank.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v -m autovideosrc ! xvimagesink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstautodetectelements.h"
+#include "gstautodetect.h"
+#include "gstautovideosrc.h"
+
+G_DEFINE_TYPE (GstAutoVideoSrc, gst_auto_video_src, GST_TYPE_AUTO_DETECT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (autovideosrc, "autovideosrc",
+ GST_RANK_NONE, GST_TYPE_AUTO_VIDEO_SRC, autodetect_element_init (plugin));
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+static GstElement *
+gst_auto_video_src_create_fake_element (GstAutoDetect * autodetect)
+{
+ GstElement *fake;
+
+ fake = gst_element_factory_make ("videotestsrc", "fake-auto-video-src");
+ if (fake != NULL) {
+ g_object_set (fake, "is-live", TRUE, NULL);
+ } else {
+ GST_ELEMENT_ERROR (autodetect, RESOURCE, NOT_FOUND,
+ ("Failed to find usable video source element."),
+ ("Failed to find a usable video source and couldn't create a video"
+ "testsrc as fallback either, check your GStreamer installation."));
+ /* This will error out with not-negotiated.. */
+ fake = gst_element_factory_make ("fakesrc", "fake-auto-video-src");
+ }
+ return fake;
+}
+
+static void
+gst_auto_video_src_class_init (GstAutoVideoSrcClass * klass)
+{
+ GstAutoDetectClass *autoclass = GST_AUTO_DETECT_CLASS (klass);
+ GstElementClass *eklass = GST_ELEMENT_CLASS (klass);
+
+ gst_element_class_add_static_pad_template (eklass, &src_template);
+ gst_element_class_set_static_metadata (eklass, "Auto video source",
+ "Source/Video",
+ "Wrapper video source for automatically detected video source",
+ "Jan Schmidt <thaytan@noraisin.net>, "
+ "Stefan Kost <ensonic@users.sf.net>");
+
+ autoclass->create_fake_element = gst_auto_video_src_create_fake_element;
+}
+
+static void
+gst_auto_video_src_init (GstAutoVideoSrc * src)
+{
+ GstAutoDetect *autodetect = GST_AUTO_DETECT (src);
+
+ autodetect->media_klass = "Video";
+ autodetect->flag = GST_ELEMENT_FLAG_SOURCE;
+}
diff --git a/gst/autodetect/gstautovideosrc.h b/gst/autodetect/gstautovideosrc.h
new file mode 100644
index 0000000000..e76cc7240f
--- /dev/null
+++ b/gst/autodetect/gstautovideosrc.h
@@ -0,0 +1,54 @@
+/* GStreamer
+ * (c) 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
+ * (c) 2008 Stefan Kost <ensonic@users.sf.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AUTO_VIDEO_SRC_H__
+#define __GST_AUTO_VIDEO_SRC_H__
+
+#include <gst/gst.h>
+#include "gstautodetect.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AUTO_VIDEO_SRC \
+ (gst_auto_video_src_get_type ())
+#define GST_AUTO_VIDEO_SRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_AUTO_VIDEO_SRC, \
+ GstAutoVideoSrc))
+#define GST_AUTO_VIDEO_SRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_AUTO_VIDEO_SRC, \
+ GstAutoVideoSrcClass))
+#define GST_IS_AUTO_VIDEO_SRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_AUTO_VIDEO_SRC))
+#define GST_IS_AUTO_VIDEO_SRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_AUTO_VIDEO_SRC))
+
+typedef struct _GstAutoVideoSrc {
+ GstAutoDetect parent;
+} GstAutoVideoSrc;
+
+typedef struct _GstAutoVideoSrcClass {
+ GstAutoDetectClass parent_class;
+} GstAutoVideoSrcClass;
+
+GType gst_auto_video_src_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_AUTO_VIDEO_SRC_H__ */
diff --git a/gst/autodetect/meson.build b/gst/autodetect/meson.build
new file mode 100644
index 0000000000..2cbe424678
--- /dev/null
+++ b/gst/autodetect/meson.build
@@ -0,0 +1,20 @@
+autodetect_sources = [
+ 'gstautoaudiosink.c',
+ 'gstautoaudiosrc.c',
+ 'gstautodetect.c',
+ 'gstautodetectplugin.c',
+ 'gstautodetectelement.c',
+ 'gstautovideosink.c',
+ 'gstautovideosrc.c',
+]
+
+gstautodetect = library('gstautodetect',
+ autodetect_sources,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gst_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstautodetect, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstautodetect]
diff --git a/gst/avi/README b/gst/avi/README
new file mode 100644
index 0000000000..69a08a9814
--- /dev/null
+++ b/gst/avi/README
@@ -0,0 +1,72 @@
+The avi decoder plugins
+-----------------------
+
+The avi decoder consists of a set of gstreamer plugins:
+
+ - demuxer (avidemux)
+ - avi to gstreamer type converter (avitypes)
+ - windows dlls wrappers.
+
+the avidecoder element uses the above plugins to perform the avi
+decoding. It is constructed as a custom bin which initially only has
+the demuxer element in it. The demuxer has a set of padtemplates for
+raw audio and video.
+
+ (------------------------------------)
+ ! avidecoder !
+ ! (video/raw)...
+ ! (----------) !
+ ! ! demuxer (video/x-msvideo, auds)..
+ ! ! ! !
+ ! -src ! !
+ ! / ! (video/x-msvideo, vids)..
+ - src ! ! !
+ ! (----------) (audio/raw)...
+ ! !
+ (------------------------------------)
+
+the demuxer has a set of padtemplates for the raw avi header properties.
+
+The avi decoder will act on the new_pad signal of the demuxer element
+and will attach an avitype plugin to the new pad. Caps negotiation will
+convert the raw avi caps to the gstreamer caps. If the src pad of the
+avitypes plugin are compatible with the avidecoder padtemplate, the
+avitype pad is ghosted to the avidecoder bin, this is the case where no
+codec is needed (for raw PCM samples, for example).
+
+When the avitypes caps are not compatible with one of the avidecoder
+templates, a static autoplugger is used the find an element to connect
+the demuxers pad to the decoders padtemplate.
+
+When no element could be found, an windec plugin is attached to the
+demuxers pad and the avitypes plugin is removed from the decoder.
+
+
+example:
+--------
+
+ An avidecoder that has a video pad (decoded with windows dlls) and an
+ audio pad (raw PCM).
+
+ (----------------------------------------------------------------)
+ ! avidecoder (--------) (------) !
+ ! !avitypes! !windec! /-- (video/raw)
+ ! (----------) /-sink src--sink src ----- !
+ ! !demuxer (video/x-msvideo, ! ! ! !
+ ! ! ! auds).. (--------) (------) !
+ ! -sink ! (--------) !
+ ! / ! (video/x-..,!avitypes! !
+ -sink ! ! vids).. ! ! !
+ ! (----------) \-sink src -------------------- (audio/raw)
+ ! (--------) !
+ (----------------------------------------------------------------)
+
+
+
+TODO
+----
+
+automatically generate the padtemplates from all possible avi types
+found in the registry.
+
+
diff --git a/gst/avi/avi-ids.h b/gst/avi/avi-ids.h
new file mode 100644
index 0000000000..9c09803342
--- /dev/null
+++ b/gst/avi/avi-ids.h
@@ -0,0 +1,79 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AVI_H__
+#define __GST_AVI_H__
+
+#include <gst/gst.h>
+
+typedef struct _gst_riff_avih {
+ guint32 us_frame; /* microsec per frame */
+ guint32 max_bps; /* byte/s overall */
+ guint32 pad_gran; /* pad_granularity */
+ guint32 flags;
+/* flags values */
+#define GST_RIFF_AVIH_HASINDEX 0x00000010 /* has idx1 chunk */
+#define GST_RIFF_AVIH_MUSTUSEINDEX 0x00000020 /* must use idx1 chunk to determine order */
+#define GST_RIFF_AVIH_ISINTERLEAVED 0x00000100 /* AVI file is interleaved */
+#define GST_RIFF_AVIH_TRUSTCKTYPE 0x00000800 /* Use CKType to find key frames */
+#define GST_RIFF_AVIH_WASCAPTUREFILE 0x00010000 /* specially allocated used for capturing real time video */
+#define GST_RIFF_AVIH_COPYRIGHTED 0x00020000 /* contains copyrighted data */
+ guint32 tot_frames; /* # of frames (all) */
+ guint32 init_frames; /* initial frames (???) */
+ guint32 streams;
+ guint32 bufsize; /* suggested buffer size */
+ guint32 width;
+ guint32 height;
+ guint32 scale;
+ guint32 rate;
+ guint32 start;
+ guint32 length;
+} gst_riff_avih;
+
+/* vprp (video properties) ODML header */
+/* see ODML spec for some/more explanation */
+#define GST_RIFF_TAG_vprp GST_MAKE_FOURCC ('v','p','r','p')
+#define GST_RIFF_DXSB GST_MAKE_FOURCC ('D','X','S','B')
+#define GST_RIFF_VPRP_VIDEO_FIELDS (2)
+
+typedef struct _gst_riff_vprp_video_field_desc {
+ guint32 compressed_bm_height;
+ guint32 compressed_bm_width;
+ guint32 valid_bm_height;
+ guint32 valid_bm_width;
+ guint32 valid_bm_x_offset;
+ guint32 valid_bm_y_offset;
+ guint32 video_x_t_offset;
+ guint32 video_y_start;
+} gst_riff_vprp_video_field_desc;
+
+typedef struct _gst_riff_vprp {
+ guint32 format_token; /* whether fields defined by standard */
+ guint32 standard; /* video display standard, UNKNOWN, PAL, etc */
+ guint32 vert_rate; /* vertical refresh rate */
+ guint32 hor_t_total; /* width */
+ guint32 vert_lines; /* height */
+ guint32 aspect; /* aspect ratio high word:low word */
+ guint32 width; /* active width */
+ guint32 height; /* active height */
+ guint32 fields; /* field count */
+ gst_riff_vprp_video_field_desc field_info[GST_RIFF_VPRP_VIDEO_FIELDS];
+} gst_riff_vprp;
+
+#endif /* __GST_AVI_H__ */
diff --git a/gst/avi/gstavi.c b/gst/avi/gstavi.c
new file mode 100644
index 0000000000..012f5c47bc
--- /dev/null
+++ b/gst/avi/gstavi.c
@@ -0,0 +1,44 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@temple-baptist.com>
+ *
+ * gstavi.c: plugin registering
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstavielements.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (avidemux, plugin);
+ ret |= GST_ELEMENT_REGISTER (avimux, plugin);
+ ret |= GST_ELEMENT_REGISTER (avisubtitle, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ avi,
+ "AVI stream handling",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/avi/gstavidemux.c b/gst/avi/gstavidemux.c
new file mode 100644
index 0000000000..e25085f7f6
--- /dev/null
+++ b/gst/avi/gstavidemux.c
@@ -0,0 +1,6051 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@temple-baptist.com>
+ * Copyright (C) <2006> Nokia Corporation (contact <stefan.kost@nokia.com>)
+ * Copyright (C) <2009-2010> STEricsson <benjamin.gaignard@stericsson.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/* Element-Checklist-Version: 5 */
+
+/**
+ * SECTION:element-avidemux
+ * @title: avidemux
+ *
+ * Demuxes an .avi file into raw or compressed audio and/or video streams.
+ *
+ * This element supports both push and pull-based scheduling, depending on the
+ * capabilities of the upstream elements.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=test.avi ! avidemux name=demux demux.audio_00 ! decodebin ! audioconvert ! audioresample ! autoaudiosink demux.video_00 ! queue ! decodebin ! videoconvert ! videoscale ! autovideosink
+ * ]| Play (parse and decode) an .avi file and try to output it to
+ * an automatically detected soundcard and videosink. If the AVI file contains
+ * compressed audio or video data, this will only work if you have the
+ * right decoder elements/plugins installed.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <stdio.h>
+
+#include "gst/riff/riff-media.h"
+#include "gstavielements.h"
+#include "gstavidemux.h"
+#include "avi-ids.h"
+#include <gst/gst-i18n-plugin.h>
+#include <gst/base/gstadapter.h>
+#include <gst/tag/tag.h>
+
+#define DIV_ROUND_UP(s,v) (((s) + ((v)-1)) / (v))
+
+#define GST_AVI_KEYFRAME (1 << 0)
+#define ENTRY_IS_KEYFRAME(e) ((e)->flags == GST_AVI_KEYFRAME)
+#define ENTRY_SET_KEYFRAME(e) ((e)->flags = GST_AVI_KEYFRAME)
+#define ENTRY_UNSET_KEYFRAME(e) ((e)->flags = 0)
+
+
+GST_DEBUG_CATEGORY_STATIC (avidemux_debug);
+#define GST_CAT_DEFAULT avidemux_debug
+
+static GstStaticPadTemplate sink_templ = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-msvideo")
+ );
+
+#ifndef GST_DISABLE_GST_DEBUG
+static const char *const snap_types[2][2] = {
+ {"any", "after"},
+ {"before", "nearest"},
+};
+#endif
+
+static void gst_avi_demux_finalize (GObject * object);
+
+static void gst_avi_demux_reset (GstAviDemux * avi);
+
+#if 0
+static const GstEventMask *gst_avi_demux_get_event_mask (GstPad * pad);
+#endif
+static gboolean gst_avi_demux_handle_src_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+static gboolean gst_avi_demux_handle_sink_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+static gboolean gst_avi_demux_push_event (GstAviDemux * avi, GstEvent * event);
+
+#if 0
+static const GstFormat *gst_avi_demux_get_src_formats (GstPad * pad);
+#endif
+static gboolean gst_avi_demux_handle_src_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
+static gboolean gst_avi_demux_src_convert (GstPad * pad, GstFormat src_format,
+ gint64 src_value, GstFormat * dest_format, gint64 * dest_value);
+
+static gboolean gst_avi_demux_do_seek (GstAviDemux * avi, GstSegment * segment,
+ GstSeekFlags flags);
+static gboolean gst_avi_demux_handle_seek (GstAviDemux * avi, GstPad * pad,
+ GstEvent * event);
+static gboolean gst_avi_demux_handle_seek_push (GstAviDemux * avi, GstPad * pad,
+ GstEvent * event);
+static void gst_avi_demux_loop (GstPad * pad);
+static gboolean gst_avi_demux_sink_activate (GstPad * sinkpad,
+ GstObject * parent);
+static gboolean gst_avi_demux_sink_activate_mode (GstPad * sinkpad,
+ GstObject * parent, GstPadMode mode, gboolean active);
+static GstFlowReturn gst_avi_demux_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+#if 0
+static void gst_avi_demux_set_index (GstElement * element, GstIndex * index);
+static GstIndex *gst_avi_demux_get_index (GstElement * element);
+#endif
+static GstStateChangeReturn gst_avi_demux_change_state (GstElement * element,
+ GstStateChange transition);
+static void gst_avi_demux_calculate_durations_from_index (GstAviDemux * avi);
+static void gst_avi_demux_get_buffer_info (GstAviDemux * avi,
+ GstAviStream * stream, guint entry_n, GstClockTime * timestamp,
+ GstClockTime * ts_end, guint64 * offset, guint64 * offset_end);
+
+static void gst_avi_demux_parse_idit (GstAviDemux * avi, GstBuffer * buf);
+static void gst_avi_demux_parse_strd (GstAviDemux * avi, GstBuffer * buf);
+
+static void parse_tag_value (GstAviDemux * avi, GstTagList * taglist,
+ const gchar * type, guint8 * ptr, guint tsize);
+
+/* GObject methods */
+
+#define gst_avi_demux_parent_class parent_class
+G_DEFINE_TYPE (GstAviDemux, gst_avi_demux, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (avidemux, "avidemux", GST_RANK_PRIMARY,
+ GST_TYPE_AVI_DEMUX, avi_element_init (plugin));
+
+static void
+gst_avi_demux_class_init (GstAviDemuxClass * klass)
+{
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstPadTemplate *videosrctempl, *audiosrctempl, *subsrctempl, *subpicsrctempl;
+ GstCaps *audcaps, *vidcaps, *subcaps, *subpiccaps;
+
+ GST_DEBUG_CATEGORY_INIT (avidemux_debug, "avidemux",
+ 0, "Demuxer for AVI streams");
+
+ gobject_class->finalize = gst_avi_demux_finalize;
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_avi_demux_change_state);
+#if 0
+ gstelement_class->set_index = GST_DEBUG_FUNCPTR (gst_avi_demux_set_index);
+ gstelement_class->get_index = GST_DEBUG_FUNCPTR (gst_avi_demux_get_index);
+#endif
+
+ audcaps = gst_riff_create_audio_template_caps ();
+ gst_caps_append (audcaps, gst_caps_new_empty_simple ("audio/x-avi-unknown"));
+ audiosrctempl = gst_pad_template_new ("audio_%u",
+ GST_PAD_SRC, GST_PAD_SOMETIMES, audcaps);
+
+ vidcaps = gst_riff_create_video_template_caps ();
+ gst_caps_append (vidcaps, gst_riff_create_iavs_template_caps ());
+ gst_caps_append (vidcaps, gst_caps_new_empty_simple ("video/x-avi-unknown"));
+ videosrctempl = gst_pad_template_new ("video_%u",
+ GST_PAD_SRC, GST_PAD_SOMETIMES, vidcaps);
+
+ subcaps = gst_caps_new_empty_simple ("application/x-subtitle-avi");
+ subsrctempl = gst_pad_template_new ("subtitle_%u",
+ GST_PAD_SRC, GST_PAD_SOMETIMES, subcaps);
+ subpiccaps = gst_caps_new_empty_simple ("subpicture/x-xsub");
+ subpicsrctempl = gst_pad_template_new ("subpicture_%u",
+ GST_PAD_SRC, GST_PAD_SOMETIMES, subpiccaps);
+ gst_element_class_add_pad_template (gstelement_class, audiosrctempl);
+ gst_element_class_add_pad_template (gstelement_class, videosrctempl);
+ gst_element_class_add_pad_template (gstelement_class, subsrctempl);
+ gst_element_class_add_pad_template (gstelement_class, subpicsrctempl);
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_templ);
+
+ gst_caps_unref (audcaps);
+ gst_caps_unref (vidcaps);
+ gst_caps_unref (subcaps);
+ gst_caps_unref (subpiccaps);
+
+ gst_element_class_set_static_metadata (gstelement_class, "Avi demuxer",
+ "Codec/Demuxer",
+ "Demultiplex an avi file into audio and video",
+ "Erik Walthinsen <omega@cse.ogi.edu>, "
+ "Wim Taymans <wim.taymans@chello.be>, "
+ "Thijs Vermeir <thijsvermeir@gmail.com>");
+}
+
+static void
+gst_avi_demux_init (GstAviDemux * avi)
+{
+ avi->sinkpad = gst_pad_new_from_static_template (&sink_templ, "sink");
+ gst_pad_set_activate_function (avi->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_sink_activate));
+ gst_pad_set_activatemode_function (avi->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_sink_activate_mode));
+ gst_pad_set_chain_function (avi->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_chain));
+ gst_pad_set_event_function (avi->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_handle_sink_event));
+ gst_element_add_pad (GST_ELEMENT_CAST (avi), avi->sinkpad);
+
+ avi->adapter = gst_adapter_new ();
+ avi->flowcombiner = gst_flow_combiner_new ();
+
+ gst_avi_demux_reset (avi);
+
+ GST_OBJECT_FLAG_SET (avi, GST_ELEMENT_FLAG_INDEXABLE);
+}
+
+static void
+gst_avi_demux_finalize (GObject * object)
+{
+ GstAviDemux *avi = GST_AVI_DEMUX (object);
+
+ GST_DEBUG ("AVI: finalize");
+
+ g_object_unref (avi->adapter);
+ gst_flow_combiner_free (avi->flowcombiner);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_avi_demux_reset_stream (GstAviDemux * avi, GstAviStream * stream)
+{
+ g_free (stream->strh);
+ g_free (stream->strf.data);
+ g_free (stream->name);
+ g_free (stream->index);
+ g_free (stream->indexes);
+ if (stream->initdata)
+ gst_buffer_unref (stream->initdata);
+ if (stream->extradata)
+ gst_buffer_unref (stream->extradata);
+ if (stream->rgb8_palette)
+ gst_buffer_unref (stream->rgb8_palette);
+ if (stream->pad) {
+ if (stream->exposed) {
+ gst_pad_set_active (stream->pad, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (avi), stream->pad);
+ gst_flow_combiner_remove_pad (avi->flowcombiner, stream->pad);
+ } else
+ gst_object_unref (stream->pad);
+ }
+ if (stream->taglist) {
+ gst_tag_list_unref (stream->taglist);
+ stream->taglist = NULL;
+ }
+ memset (stream, 0, sizeof (GstAviStream));
+}
+
+static void
+gst_avi_demux_reset (GstAviDemux * avi)
+{
+ gint i;
+
+ GST_DEBUG ("AVI: reset");
+
+ for (i = 0; i < avi->num_streams; i++)
+ gst_avi_demux_reset_stream (avi, &avi->stream[i]);
+
+ avi->header_state = GST_AVI_DEMUX_HEADER_TAG_LIST;
+ avi->num_streams = 0;
+ avi->num_v_streams = 0;
+ avi->num_a_streams = 0;
+ avi->num_t_streams = 0;
+ avi->num_sp_streams = 0;
+ avi->main_stream = -1;
+
+ avi->have_group_id = FALSE;
+ avi->group_id = G_MAXUINT;
+
+ avi->state = GST_AVI_DEMUX_START;
+ avi->offset = 0;
+ avi->building_index = FALSE;
+
+ avi->index_offset = 0;
+ g_free (avi->avih);
+ avi->avih = NULL;
+
+#if 0
+ if (avi->element_index)
+ gst_object_unref (avi->element_index);
+ avi->element_index = NULL;
+#endif
+
+ if (avi->seg_event) {
+ gst_event_unref (avi->seg_event);
+ avi->seg_event = NULL;
+ }
+ if (avi->seek_event) {
+ gst_event_unref (avi->seek_event);
+ avi->seek_event = NULL;
+ }
+
+ if (avi->globaltags)
+ gst_tag_list_unref (avi->globaltags);
+ avi->globaltags = NULL;
+
+ avi->got_tags = TRUE; /* we always want to push global tags */
+ avi->have_eos = FALSE;
+ avi->seekable = TRUE;
+
+ gst_adapter_clear (avi->adapter);
+
+ gst_segment_init (&avi->segment, GST_FORMAT_TIME);
+ avi->segment_seqnum = 0;
+}
+
+
+/* GstElement methods */
+
+#if 0
+static const GstFormat *
+gst_avi_demux_get_src_formats (GstPad * pad)
+{
+ GstAviStream *stream = gst_pad_get_element_private (pad);
+
+ static const GstFormat src_a_formats[] = {
+ GST_FORMAT_TIME,
+ GST_FORMAT_BYTES,
+ GST_FORMAT_DEFAULT,
+ 0
+ };
+ static const GstFormat src_v_formats[] = {
+ GST_FORMAT_TIME,
+ GST_FORMAT_DEFAULT,
+ 0
+ };
+
+ return (stream->strh->type == GST_RIFF_FCC_auds ?
+ src_a_formats : src_v_formats);
+}
+#endif
+
+/* assumes stream->strf.auds->av_bps != 0 */
+static inline GstClockTime
+avi_stream_convert_bytes_to_time_unchecked (GstAviStream * stream,
+ guint64 bytes)
+{
+ return gst_util_uint64_scale_int (bytes, GST_SECOND,
+ stream->strf.auds->av_bps);
+}
+
+static inline guint64
+avi_stream_convert_time_to_bytes_unchecked (GstAviStream * stream,
+ GstClockTime time)
+{
+ return gst_util_uint64_scale_int (time, stream->strf.auds->av_bps,
+ GST_SECOND);
+}
+
+/* assumes stream->strh->rate != 0 */
+static inline GstClockTime
+avi_stream_convert_frames_to_time_unchecked (GstAviStream * stream,
+ guint64 frames)
+{
+ return gst_util_uint64_scale (frames, stream->strh->scale * GST_SECOND,
+ stream->strh->rate);
+}
+
+static inline guint64
+avi_stream_convert_time_to_frames_unchecked (GstAviStream * stream,
+ GstClockTime time)
+{
+ return gst_util_uint64_scale (time, stream->strh->rate,
+ stream->strh->scale * GST_SECOND);
+}
+
+static gboolean
+gst_avi_demux_src_convert (GstPad * pad,
+ GstFormat src_format,
+ gint64 src_value, GstFormat * dest_format, gint64 * dest_value)
+{
+ GstAviStream *stream = gst_pad_get_element_private (pad);
+ gboolean res = TRUE;
+
+ GST_LOG_OBJECT (pad,
+ "Received src_format:%s, src_value:%" G_GUINT64_FORMAT
+ ", dest_format:%s", gst_format_get_name (src_format), src_value,
+ gst_format_get_name (*dest_format));
+
+ if (G_UNLIKELY (src_format == *dest_format)) {
+ *dest_value = src_value;
+ goto done;
+ }
+ if (G_UNLIKELY (!stream->strh || !stream->strf.data)) {
+ res = FALSE;
+ goto done;
+ }
+ if (G_UNLIKELY (stream->strh->type == GST_RIFF_FCC_vids &&
+ (src_format == GST_FORMAT_BYTES
+ || *dest_format == GST_FORMAT_BYTES))) {
+ res = FALSE;
+ goto done;
+ }
+
+ switch (src_format) {
+ case GST_FORMAT_TIME:
+ switch (*dest_format) {
+ case GST_FORMAT_BYTES:
+ *dest_value = gst_util_uint64_scale_int (src_value,
+ stream->strf.auds->av_bps, GST_SECOND);
+ break;
+ case GST_FORMAT_DEFAULT:
+ *dest_value =
+ gst_util_uint64_scale_round (src_value, stream->strh->rate,
+ stream->strh->scale * GST_SECOND);
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ case GST_FORMAT_BYTES:
+ switch (*dest_format) {
+ case GST_FORMAT_TIME:
+ if (stream->strf.auds->av_bps != 0) {
+ *dest_value = avi_stream_convert_bytes_to_time_unchecked (stream,
+ src_value);
+ } else
+ res = FALSE;
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ case GST_FORMAT_DEFAULT:
+ switch (*dest_format) {
+ case GST_FORMAT_TIME:
+ *dest_value =
+ avi_stream_convert_frames_to_time_unchecked (stream, src_value);
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ default:
+ res = FALSE;
+ }
+
+done:
+ GST_LOG_OBJECT (pad,
+ "Returning res:%d dest_format:%s dest_value:%" G_GUINT64_FORMAT, res,
+ gst_format_get_name (*dest_format), *dest_value);
+ return res;
+}
+
+static gboolean
+gst_avi_demux_handle_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ gboolean res = TRUE;
+ GstAviDemux *avi = GST_AVI_DEMUX (parent);
+
+ GstAviStream *stream = gst_pad_get_element_private (pad);
+
+ if (!stream->strh || !stream->strf.data)
+ return gst_pad_query_default (pad, parent, query);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:{
+ gint64 pos = 0;
+
+ GST_DEBUG ("pos query for stream %u: frames %u, bytes %u",
+ stream->num, stream->current_entry, stream->current_total);
+
+ /* FIXME, this looks clumsy */
+ if (stream->strh->type == GST_RIFF_FCC_auds) {
+ if (stream->is_vbr) {
+ /* VBR */
+ pos = avi_stream_convert_frames_to_time_unchecked (stream,
+ stream->current_entry);
+ GST_DEBUG_OBJECT (avi, "VBR convert frame %u, time %"
+ GST_TIME_FORMAT, stream->current_entry, GST_TIME_ARGS (pos));
+ } else if (stream->strf.auds->av_bps != 0) {
+ /* CBR */
+ pos = avi_stream_convert_bytes_to_time_unchecked (stream,
+ stream->current_total);
+ GST_DEBUG_OBJECT (avi,
+ "CBR convert bytes %u, time %" GST_TIME_FORMAT,
+ stream->current_total, GST_TIME_ARGS (pos));
+ } else if (stream->idx_n != 0 && stream->total_bytes != 0) {
+ /* calculate timestamps based on percentage of length */
+ guint64 xlen = avi->avih->us_frame *
+ avi->avih->tot_frames * GST_USECOND;
+
+ pos = gst_util_uint64_scale (xlen, stream->current_total,
+ stream->total_bytes);
+ GST_DEBUG_OBJECT (avi,
+ "CBR perc convert bytes %u, time %" GST_TIME_FORMAT,
+ stream->current_total, GST_TIME_ARGS (pos));
+ } else {
+ /* we don't know */
+ res = FALSE;
+ }
+ } else {
+ if (stream->strh->rate != 0) {
+ pos = gst_util_uint64_scale ((guint64) stream->current_entry *
+ stream->strh->scale, GST_SECOND, (guint64) stream->strh->rate);
+ } else {
+ pos = stream->current_entry * avi->avih->us_frame * GST_USECOND;
+ }
+ }
+ if (res) {
+ GST_DEBUG ("pos query : %" GST_TIME_FORMAT, GST_TIME_ARGS (pos));
+ gst_query_set_position (query, GST_FORMAT_TIME, pos);
+ } else
+ GST_WARNING ("pos query failed");
+ break;
+ }
+ case GST_QUERY_DURATION:
+ {
+ GstFormat fmt;
+ GstClockTime duration;
+
+ /* only act on audio or video streams */
+ if (stream->strh->type != GST_RIFF_FCC_auds &&
+ stream->strh->type != GST_RIFF_FCC_vids &&
+ stream->strh->type != GST_RIFF_FCC_iavs) {
+ res = FALSE;
+ break;
+ }
+
+ /* take stream duration, fall back to avih duration */
+ if ((duration = stream->duration) == -1)
+ if ((duration = stream->hdr_duration) == -1)
+ duration = avi->duration;
+
+ gst_query_parse_duration (query, &fmt, NULL);
+
+ switch (fmt) {
+ case GST_FORMAT_TIME:
+ gst_query_set_duration (query, fmt, duration);
+ break;
+ case GST_FORMAT_DEFAULT:
+ {
+ gint64 dur;
+ GST_DEBUG_OBJECT (query, "total frames is %" G_GUINT32_FORMAT,
+ stream->idx_n);
+
+ if (stream->idx_n > 0)
+ gst_query_set_duration (query, fmt, stream->idx_n);
+ else if (gst_pad_query_convert (pad, GST_FORMAT_TIME,
+ duration, fmt, &dur))
+ gst_query_set_duration (query, fmt, dur);
+ break;
+ }
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ }
+ case GST_QUERY_SEEKING:{
+ GstFormat fmt;
+
+ gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+ if (fmt == GST_FORMAT_TIME) {
+ gboolean seekable = TRUE;
+
+ if (avi->streaming) {
+ seekable = avi->seekable;
+ }
+
+ gst_query_set_seeking (query, GST_FORMAT_TIME, seekable,
+ 0, stream->duration);
+ res = TRUE;
+ }
+ break;
+ }
+ case GST_QUERY_CONVERT:{
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_val, dest_val;
+
+ gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
+ if ((res = gst_avi_demux_src_convert (pad, src_fmt, src_val, &dest_fmt,
+ &dest_val)))
+ gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
+ else
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+ case GST_QUERY_SEGMENT:
+ {
+ GstFormat format;
+ gint64 start, stop;
+
+ format = avi->segment.format;
+
+ start =
+ gst_segment_to_stream_time (&avi->segment, format,
+ avi->segment.start);
+ if ((stop = avi->segment.stop) == -1)
+ stop = avi->segment.duration;
+ else
+ stop = gst_segment_to_stream_time (&avi->segment, format, stop);
+
+ gst_query_set_segment (query, avi->segment.rate, format, start, stop);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return res;
+}
+
+#if 0
+static const GstEventMask *
+gst_avi_demux_get_event_mask (GstPad * pad)
+{
+ static const GstEventMask masks[] = {
+ {GST_EVENT_SEEK, GST_SEEK_METHOD_SET | GST_SEEK_FLAG_KEY_UNIT},
+ {0,}
+ };
+
+ return masks;
+}
+#endif
+
+#if 0
+static guint64
+gst_avi_demux_seek_streams (GstAviDemux * avi, guint64 offset, gboolean before)
+{
+ GstAviStream *stream;
+ GstIndexEntry *entry;
+ gint i;
+ gint64 val, min = offset;
+
+ for (i = 0; i < avi->num_streams; i++) {
+ stream = &avi->stream[i];
+
+ entry = gst_index_get_assoc_entry (avi->element_index, stream->index_id,
+ before ? GST_INDEX_LOOKUP_BEFORE : GST_INDEX_LOOKUP_AFTER,
+ GST_ASSOCIATION_FLAG_NONE, GST_FORMAT_BYTES, offset);
+
+ if (before) {
+ if (entry) {
+ gst_index_entry_assoc_map (entry, GST_FORMAT_BYTES, &val);
+ GST_DEBUG_OBJECT (avi, "stream %d, previous entry at %"
+ G_GUINT64_FORMAT, i, val);
+ if (val < min)
+ min = val;
+ }
+ continue;
+ }
+
+ if (!entry) {
+ GST_DEBUG_OBJECT (avi, "no position for stream %d, assuming at start", i);
+ stream->current_entry = 0;
+ stream->current_total = 0;
+ continue;
+ }
+
+ gst_index_entry_assoc_map (entry, GST_FORMAT_BYTES, &val);
+ GST_DEBUG_OBJECT (avi, "stream %d, next entry at %" G_GUINT64_FORMAT,
+ i, val);
+
+ gst_index_entry_assoc_map (entry, GST_FORMAT_TIME, &val);
+ stream->current_total = val;
+ gst_index_entry_assoc_map (entry, GST_FORMAT_DEFAULT, &val);
+ stream->current_entry = val;
+ }
+
+ return min;
+}
+#endif
+
+static gint
+gst_avi_demux_index_entry_offset_search (GstAviIndexEntry * entry,
+ guint64 * offset)
+{
+ if (entry->offset < *offset)
+ return -1;
+ else if (entry->offset > *offset)
+ return 1;
+ return 0;
+}
+
+static guint64
+gst_avi_demux_seek_streams_index (GstAviDemux * avi, guint64 offset,
+ gboolean before)
+{
+ GstAviStream *stream;
+ GstAviIndexEntry *entry;
+ gint i;
+ gint64 val, min = offset;
+ guint index = 0;
+
+ for (i = 0; i < avi->num_streams; i++) {
+ stream = &avi->stream[i];
+
+ /* compensate for chunk header */
+ offset += 8;
+ entry =
+ gst_util_array_binary_search (stream->index, stream->idx_n,
+ sizeof (GstAviIndexEntry),
+ (GCompareDataFunc) gst_avi_demux_index_entry_offset_search,
+ before ? GST_SEARCH_MODE_BEFORE : GST_SEARCH_MODE_AFTER, &offset, NULL);
+ offset -= 8;
+
+ if (entry)
+ index = entry - stream->index;
+
+ if (before) {
+ if (entry) {
+ val = stream->index[index].offset;
+ GST_DEBUG_OBJECT (avi,
+ "stream %d, previous entry at %" G_GUINT64_FORMAT, i, val);
+ if (val < min)
+ min = val;
+ }
+ continue;
+ }
+
+ if (!entry) {
+ GST_DEBUG_OBJECT (avi, "no position for stream %d, assuming at start", i);
+ stream->current_entry = 0;
+ stream->current_total = 0;
+ continue;
+ }
+
+ val = stream->index[index].offset - 8;
+ GST_DEBUG_OBJECT (avi, "stream %d, next entry at %" G_GUINT64_FORMAT, i,
+ val);
+
+ stream->current_total = stream->index[index].total;
+ stream->current_entry = index;
+ }
+
+ return min;
+}
+
+#define GST_AVI_SEEK_PUSH_DISPLACE (4 * GST_SECOND)
+
+static gboolean
+gst_avi_demux_handle_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ gboolean res = TRUE;
+ GstAviDemux *avi = GST_AVI_DEMUX (parent);
+
+ GST_DEBUG_OBJECT (avi,
+ "have event type %s: %p on sink pad", GST_EVENT_TYPE_NAME (event), event);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:
+ {
+ gint64 boffset, offset = 0;
+ GstSegment segment;
+ GstEvent *segment_event;
+
+ /* some debug output */
+ gst_event_copy_segment (event, &segment);
+ GST_DEBUG_OBJECT (avi, "received newsegment %" GST_SEGMENT_FORMAT,
+ &segment);
+
+ /* chain will send initial newsegment after pads have been added */
+ if (avi->state != GST_AVI_DEMUX_MOVI) {
+ GST_DEBUG_OBJECT (avi, "still starting, eating event");
+ goto exit;
+ }
+
+ /* we only expect a BYTE segment, e.g. following a seek */
+ if (segment.format != GST_FORMAT_BYTES) {
+ GST_DEBUG_OBJECT (avi, "unsupported segment format, ignoring");
+ goto exit;
+ }
+
+ if (avi->have_index) {
+ GstAviIndexEntry *entry;
+ guint i = 0, index = 0, k = 0;
+ GstAviStream *stream;
+
+ /* compensate chunk header, stored index offset points after header */
+ boffset = segment.start + 8;
+ /* find which stream we're on */
+ do {
+ stream = &avi->stream[i];
+
+ /* find the index for start bytes offset */
+ entry = gst_util_array_binary_search (stream->index,
+ stream->idx_n, sizeof (GstAviIndexEntry),
+ (GCompareDataFunc) gst_avi_demux_index_entry_offset_search,
+ GST_SEARCH_MODE_AFTER, &boffset, NULL);
+
+ if (entry == NULL)
+ continue;
+ index = entry - stream->index;
+
+ /* we are on the stream with a chunk start offset closest to start */
+ if (!offset || stream->index[index].offset < offset) {
+ offset = stream->index[index].offset;
+ k = i;
+ }
+ /* exact match needs no further searching */
+ if (stream->index[index].offset == boffset)
+ break;
+ } while (++i < avi->num_streams);
+ boffset -= 8;
+ offset -= 8;
+ stream = &avi->stream[k];
+
+ /* so we have no idea what is to come, or where we are */
+ if (!offset) {
+ GST_WARNING_OBJECT (avi, "insufficient index data, forcing EOS");
+ goto eos;
+ }
+
+ /* get the ts corresponding to start offset bytes for the stream */
+ gst_avi_demux_get_buffer_info (avi, stream, index,
+ (GstClockTime *) & segment.time, NULL, NULL, NULL);
+#if 0
+ } else if (avi->element_index) {
+ GstIndexEntry *entry;
+
+ /* Let's check if we have an index entry for this position */
+ entry = gst_index_get_assoc_entry (avi->element_index, avi->index_id,
+ GST_INDEX_LOOKUP_AFTER, GST_ASSOCIATION_FLAG_NONE,
+ GST_FORMAT_BYTES, segment.start);
+
+ /* we can not go where we have not yet been before ... */
+ if (!entry) {
+ GST_WARNING_OBJECT (avi, "insufficient index data, forcing EOS");
+ goto eos;
+ }
+
+ gst_index_entry_assoc_map (entry, GST_FORMAT_TIME,
+ (gint64 *) & segment.time);
+ gst_index_entry_assoc_map (entry, GST_FORMAT_BYTES, &offset);
+#endif
+ } else {
+ GST_WARNING_OBJECT (avi, "no index data, forcing EOS");
+ goto eos;
+ }
+
+ segment.format = GST_FORMAT_TIME;
+ segment.start = segment.time;
+ segment.stop = GST_CLOCK_TIME_NONE;
+ segment.position = segment.start;
+
+ /* rescue duration */
+ segment.duration = avi->segment.duration;
+
+ /* set up segment and send downstream */
+ gst_segment_copy_into (&segment, &avi->segment);
+
+ GST_DEBUG_OBJECT (avi, "Pushing newseg %" GST_SEGMENT_FORMAT, &segment);
+ avi->segment_seqnum = gst_event_get_seqnum (event);
+ segment_event = gst_event_new_segment (&segment);
+ gst_event_set_seqnum (segment_event, gst_event_get_seqnum (event));
+ gst_avi_demux_push_event (avi, segment_event);
+
+ GST_DEBUG_OBJECT (avi, "next chunk expected at %" G_GINT64_FORMAT,
+ boffset);
+
+ /* adjust state for streaming thread accordingly */
+ if (avi->have_index)
+ gst_avi_demux_seek_streams_index (avi, offset, FALSE);
+#if 0
+ else
+ gst_avi_demux_seek_streams (avi, offset, FALSE);
+#endif
+
+ /* set up streaming thread */
+ g_assert (offset >= boffset);
+ avi->offset = boffset;
+ avi->todrop = offset - boffset;
+
+ exit:
+ gst_event_unref (event);
+ res = TRUE;
+ break;
+ eos:
+ /* set up for EOS */
+ avi->have_eos = TRUE;
+ goto exit;
+ }
+ case GST_EVENT_EOS:
+ {
+ if (avi->state != GST_AVI_DEMUX_MOVI) {
+ gst_event_unref (event);
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX,
+ (NULL), ("got eos and didn't receive a complete header object"));
+ } else if (!gst_avi_demux_push_event (avi, event)) {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX,
+ (NULL), ("got eos but no streams (yet)"));
+ }
+ break;
+ }
+ case GST_EVENT_FLUSH_STOP:
+ {
+ gint i;
+
+ gst_adapter_clear (avi->adapter);
+ avi->have_eos = FALSE;
+ for (i = 0; i < avi->num_streams; i++) {
+ avi->stream[i].discont = TRUE;
+ }
+ /* fall through to default case so that the event gets passed downstream */
+ }
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return res;
+}
+
+static gboolean
+gst_avi_demux_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ gboolean res = TRUE;
+ GstAviDemux *avi = GST_AVI_DEMUX (parent);
+
+ GST_DEBUG_OBJECT (avi,
+ "have event type %s: %p on src pad", GST_EVENT_TYPE_NAME (event), event);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ if (!avi->streaming) {
+ res = gst_avi_demux_handle_seek (avi, pad, event);
+ } else {
+ res = gst_avi_demux_handle_seek_push (avi, pad, event);
+ }
+ gst_event_unref (event);
+ break;
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return res;
+}
+
+/* streaming helper (push) */
+
+/*
+ * gst_avi_demux_peek_chunk_info:
+ * @avi: Avi object
+ * @tag: holder for tag
+ * @size: holder for tag size
+ *
+ * Peek next chunk info (tag and size)
+ *
+ * Returns: TRUE when one chunk info has been got
+ */
+static gboolean
+gst_avi_demux_peek_chunk_info (GstAviDemux * avi, guint32 * tag, guint32 * size)
+{
+ const guint8 *data = NULL;
+
+ if (gst_adapter_available (avi->adapter) < 8)
+ return FALSE;
+
+ data = gst_adapter_map (avi->adapter, 8);
+ *tag = GST_READ_UINT32_LE (data);
+ *size = GST_READ_UINT32_LE (data + 4);
+ gst_adapter_unmap (avi->adapter);
+
+ return TRUE;
+}
+
+/*
+ * gst_avi_demux_peek_chunk:
+ * @avi: Avi object
+ * @tag: holder for tag
+ * @size: holder for tag size
+ *
+ * Peek enough data for one full chunk
+ *
+ * Returns: %TRUE when one chunk has been got
+ */
+static gboolean
+gst_avi_demux_peek_chunk (GstAviDemux * avi, guint32 * tag, guint32 * size)
+{
+ guint32 peek_size = 0;
+ gint available;
+
+ if (!gst_avi_demux_peek_chunk_info (avi, tag, size))
+ goto peek_failed;
+
+ /* size 0 -> empty data buffer would surprise most callers,
+ * large size -> do not bother trying to squeeze that into adapter,
+ * so we throw poor man's exception, which can be caught if caller really
+ * wants to handle 0 size chunk */
+ if (!(*size) || (*size) >= (1 << 30))
+ goto strange_size;
+
+ peek_size = (*size + 1) & ~1;
+ available = gst_adapter_available (avi->adapter);
+
+ GST_DEBUG_OBJECT (avi,
+ "Need to peek chunk of %d bytes to read chunk %" GST_FOURCC_FORMAT
+ ", %d bytes available", *size, GST_FOURCC_ARGS (*tag), available);
+
+ if (available < (8 + peek_size))
+ goto need_more;
+
+ return TRUE;
+
+ /* ERRORS */
+peek_failed:
+ {
+ GST_INFO_OBJECT (avi, "Failed to peek");
+ return FALSE;
+ }
+strange_size:
+ {
+ GST_INFO_OBJECT (avi,
+ "Invalid/unexpected chunk size %d for tag %" GST_FOURCC_FORMAT, *size,
+ GST_FOURCC_ARGS (*tag));
+ /* chain should give up */
+ avi->abort_buffering = TRUE;
+ return FALSE;
+ }
+need_more:
+ {
+ GST_INFO_OBJECT (avi, "need more %d < %" G_GUINT32_FORMAT,
+ available, 8 + peek_size);
+ return FALSE;
+ }
+}
+
+/* AVI init */
+
+/*
+ * gst_avi_demux_parse_file_header:
+ * @element: caller element (used for errors/debug).
+ * @buf: input data to be used for parsing.
+ *
+ * "Open" a RIFF/AVI file. The buffer should be at least 12
+ * bytes long. Takes ownership of @buf.
+ *
+ * Returns: TRUE if the file is a RIFF/AVI file, FALSE otherwise.
+ * Throws an error, caller should error out (fatal).
+ */
+static gboolean
+gst_avi_demux_parse_file_header (GstElement * element, GstBuffer * buf)
+{
+ guint32 doctype;
+ GstClockTime stamp;
+
+ stamp = gst_util_get_timestamp ();
+
+ /* riff_parse posts an error */
+ if (!gst_riff_parse_file_header (element, buf, &doctype))
+ return FALSE;
+
+ if (doctype != GST_RIFF_RIFF_AVI)
+ goto not_avi;
+
+ stamp = gst_util_get_timestamp () - stamp;
+ GST_DEBUG_OBJECT (element, "header parsing took %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stamp));
+
+ return TRUE;
+
+ /* ERRORS */
+not_avi:
+ {
+ GST_ELEMENT_ERROR (element, STREAM, WRONG_TYPE, (NULL),
+ ("File is not an AVI file: 0x%" G_GINT32_MODIFIER "x", doctype));
+ return FALSE;
+ }
+}
+
+/*
+ * Read AVI file tag when streaming
+ */
+static GstFlowReturn
+gst_avi_demux_stream_init_push (GstAviDemux * avi)
+{
+ if (gst_adapter_available (avi->adapter) >= 12) {
+ GstBuffer *tmp;
+
+ tmp = gst_adapter_take_buffer (avi->adapter, 12);
+
+ GST_DEBUG ("Parsing avi header");
+ if (!gst_avi_demux_parse_file_header (GST_ELEMENT_CAST (avi), tmp)) {
+ return GST_FLOW_ERROR;
+ }
+ GST_DEBUG ("header ok");
+ avi->offset += 12;
+
+ avi->state = GST_AVI_DEMUX_HEADER;
+ }
+ return GST_FLOW_OK;
+}
+
+/*
+ * Read AVI file tag
+ */
+static GstFlowReturn
+gst_avi_demux_stream_init_pull (GstAviDemux * avi)
+{
+ GstFlowReturn res;
+ GstBuffer *buf = NULL;
+
+ res = gst_pad_pull_range (avi->sinkpad, avi->offset, 12, &buf);
+ if (res != GST_FLOW_OK)
+ return res;
+ else if (!gst_avi_demux_parse_file_header (GST_ELEMENT_CAST (avi), buf))
+ goto wrong_header;
+
+ avi->offset += 12;
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+wrong_header:
+ {
+ GST_DEBUG_OBJECT (avi, "error parsing file header");
+ return GST_FLOW_ERROR;
+ }
+}
+
+/* AVI header handling */
+/*
+ * gst_avi_demux_parse_avih:
+ * @avi: caller element (used for errors/debug).
+ * @buf: input data to be used for parsing.
+ * @avih: pointer to structure (filled in by function) containing
+ * stream information (such as flags, number of streams, etc.).
+ *
+ * Read 'avih' header. Discards buffer after use.
+ *
+ * Returns: TRUE on success, FALSE otherwise. Throws an error if
+ * the header is invalid. The caller should error out
+ * (fatal).
+ */
+static gboolean
+gst_avi_demux_parse_avih (GstAviDemux * avi,
+ GstBuffer * buf, gst_riff_avih ** _avih)
+{
+ gst_riff_avih *avih;
+ gsize size;
+
+ if (buf == NULL)
+ goto no_buffer;
+
+ size = gst_buffer_get_size (buf);
+ if (size < sizeof (gst_riff_avih))
+ goto avih_too_small;
+
+ avih = g_malloc (size);
+ gst_buffer_extract (buf, 0, avih, size);
+
+#if (G_BYTE_ORDER == G_BIG_ENDIAN)
+ avih->us_frame = GUINT32_FROM_LE (avih->us_frame);
+ avih->max_bps = GUINT32_FROM_LE (avih->max_bps);
+ avih->pad_gran = GUINT32_FROM_LE (avih->pad_gran);
+ avih->flags = GUINT32_FROM_LE (avih->flags);
+ avih->tot_frames = GUINT32_FROM_LE (avih->tot_frames);
+ avih->init_frames = GUINT32_FROM_LE (avih->init_frames);
+ avih->streams = GUINT32_FROM_LE (avih->streams);
+ avih->bufsize = GUINT32_FROM_LE (avih->bufsize);
+ avih->width = GUINT32_FROM_LE (avih->width);
+ avih->height = GUINT32_FROM_LE (avih->height);
+ avih->scale = GUINT32_FROM_LE (avih->scale);
+ avih->rate = GUINT32_FROM_LE (avih->rate);
+ avih->start = GUINT32_FROM_LE (avih->start);
+ avih->length = GUINT32_FROM_LE (avih->length);
+#endif
+
+ /* debug stuff */
+ GST_INFO_OBJECT (avi, "avih tag found:");
+ GST_INFO_OBJECT (avi, " us_frame %u", avih->us_frame);
+ GST_INFO_OBJECT (avi, " max_bps %u", avih->max_bps);
+ GST_INFO_OBJECT (avi, " pad_gran %u", avih->pad_gran);
+ GST_INFO_OBJECT (avi, " flags 0x%08x", avih->flags);
+ GST_INFO_OBJECT (avi, " tot_frames %u", avih->tot_frames);
+ GST_INFO_OBJECT (avi, " init_frames %u", avih->init_frames);
+ GST_INFO_OBJECT (avi, " streams %u", avih->streams);
+ GST_INFO_OBJECT (avi, " bufsize %u", avih->bufsize);
+ GST_INFO_OBJECT (avi, " width %u", avih->width);
+ GST_INFO_OBJECT (avi, " height %u", avih->height);
+ GST_INFO_OBJECT (avi, " scale %u", avih->scale);
+ GST_INFO_OBJECT (avi, " rate %u", avih->rate);
+ GST_INFO_OBJECT (avi, " start %u", avih->start);
+ GST_INFO_OBJECT (avi, " length %u", avih->length);
+
+ *_avih = avih;
+ gst_buffer_unref (buf);
+
+ if (avih->us_frame != 0 && avih->tot_frames != 0)
+ avi->duration =
+ (guint64) avih->us_frame * (guint64) avih->tot_frames * 1000;
+ else
+ avi->duration = GST_CLOCK_TIME_NONE;
+
+ GST_INFO_OBJECT (avi, " header duration %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (avi->duration));
+
+ return TRUE;
+
+ /* ERRORS */
+no_buffer:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL), ("No buffer"));
+ return FALSE;
+ }
+avih_too_small:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Too small avih (%" G_GSIZE_FORMAT " available, %d needed)",
+ size, (int) sizeof (gst_riff_avih)));
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+}
+
+/*
+ * gst_avi_demux_parse_superindex:
+ * @avi: caller element (used for debugging/errors).
+ * @buf: input data to use for parsing.
+ * @locations: locations in the file (byte-offsets) that contain
+ * the actual indexes (see get_avi_demux_parse_subindex()).
+ * The array ends with GST_BUFFER_OFFSET_NONE.
+ *
+ * Reads superindex (openDML-2 spec stuff) from the provided data.
+ *
+ * Returns: TRUE on success, FALSE otherwise. Indexes should be skipped
+ * on error, but they are not fatal.
+ */
+static gboolean
+gst_avi_demux_parse_superindex (GstAviDemux * avi,
+ GstBuffer * buf, guint64 ** _indexes)
+{
+ GstMapInfo map;
+ guint8 *data;
+ guint16 bpe = 16;
+ guint32 num, i;
+ guint64 *indexes;
+ gsize size;
+
+ *_indexes = NULL;
+
+ if (buf) {
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+ } else {
+ data = NULL;
+ size = 0;
+ }
+
+ if (size < 24)
+ goto too_small;
+
+ /* check type of index. The opendml2 specs state that
+ * there should be 4 dwords per array entry. Type can be
+ * either frame or field (and we don't care). */
+ if (GST_READ_UINT16_LE (data) != 4 ||
+ (data[2] & 0xfe) != 0x0 || data[3] != 0x0) {
+ GST_WARNING_OBJECT (avi,
+ "Superindex for stream has unexpected "
+ "size_entry %d (bytes) or flags 0x%02x/0x%02x",
+ GST_READ_UINT16_LE (data), data[2], data[3]);
+ bpe = GST_READ_UINT16_LE (data) * 4;
+ }
+ num = GST_READ_UINT32_LE (&data[4]);
+
+ GST_DEBUG_OBJECT (avi, "got %d indexes", num);
+
+ /* this can't work out well ... */
+ if (num > G_MAXUINT32 >> 1 || bpe < 8) {
+ goto invalid_params;
+ }
+
+ indexes = g_new (guint64, num + 1);
+ for (i = 0; i < num; i++) {
+ if (size < 24 + bpe * (i + 1))
+ break;
+ indexes[i] = GST_READ_UINT64_LE (&data[24 + bpe * i]);
+ GST_DEBUG_OBJECT (avi, "index %d at %" G_GUINT64_FORMAT, i, indexes[i]);
+ }
+ indexes[i] = GST_BUFFER_OFFSET_NONE;
+ *_indexes = indexes;
+
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ return TRUE;
+
+ /* ERRORS */
+too_small:
+ {
+ GST_ERROR_OBJECT (avi,
+ "Not enough data to parse superindex (%" G_GSIZE_FORMAT
+ " available, 24 needed)", size);
+ if (buf) {
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ }
+ return FALSE;
+ }
+invalid_params:
+ {
+ GST_ERROR_OBJECT (avi, "invalid index parameters (num = %d, bpe = %d)",
+ num, bpe);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+}
+
+/* add an entry to the index of a stream. @num should be an estimate of the
+ * total amount of index entries for all streams and is used to dynamically
+ * allocate memory for the index entries. */
+static inline gboolean
+gst_avi_demux_add_index (GstAviDemux * avi, GstAviStream * stream,
+ guint num, GstAviIndexEntry * entry)
+{
+ /* ensure index memory */
+ if (G_UNLIKELY (stream->idx_n >= stream->idx_max)) {
+ guint idx_max = stream->idx_max;
+ GstAviIndexEntry *new_idx;
+
+ /* we need to make some more room */
+ if (idx_max == 0) {
+ /* initial size guess, assume each stream has an equal amount of entries,
+ * overshoot with at least 8K */
+ idx_max = (num / avi->num_streams) + (8192 / sizeof (GstAviIndexEntry));
+ } else {
+ idx_max += 8192 / sizeof (GstAviIndexEntry);
+ GST_DEBUG_OBJECT (avi, "expanded index from %u to %u",
+ stream->idx_max, idx_max);
+ }
+ new_idx = g_try_renew (GstAviIndexEntry, stream->index, idx_max);
+ /* out of memory, if this fails stream->index is untouched. */
+ if (G_UNLIKELY (!new_idx))
+ return FALSE;
+ /* use new index */
+ stream->index = new_idx;
+ stream->idx_max = idx_max;
+ }
+
+ /* update entry total and stream stats. The entry total can be converted to
+ * the timestamp of the entry easily. */
+ if (stream->strh->type == GST_RIFF_FCC_auds) {
+ gint blockalign;
+
+ if (stream->is_vbr) {
+ entry->total = stream->total_blocks;
+ } else {
+ entry->total = stream->total_bytes;
+ }
+ blockalign = stream->strf.auds->blockalign;
+ if (blockalign > 0)
+ stream->total_blocks += DIV_ROUND_UP (entry->size, blockalign);
+ else
+ stream->total_blocks++;
+ } else {
+ if (stream->is_vbr) {
+ entry->total = stream->idx_n;
+ } else {
+ entry->total = stream->total_bytes;
+ }
+ }
+ stream->total_bytes += entry->size;
+ if (ENTRY_IS_KEYFRAME (entry))
+ stream->n_keyframes++;
+
+ /* and add */
+ GST_LOG_OBJECT (avi,
+ "Adding stream %u, index entry %d, kf %d, size %u "
+ ", offset %" G_GUINT64_FORMAT ", total %" G_GUINT64_FORMAT, stream->num,
+ stream->idx_n, ENTRY_IS_KEYFRAME (entry), entry->size, entry->offset,
+ entry->total);
+ stream->index[stream->idx_n++] = *entry;
+
+ return TRUE;
+}
+
+/* given @entry_n in @stream, calculate info such as timestamps and
+ * offsets for the entry. */
+static void
+gst_avi_demux_get_buffer_info (GstAviDemux * avi, GstAviStream * stream,
+ guint entry_n, GstClockTime * timestamp, GstClockTime * ts_end,
+ guint64 * offset, guint64 * offset_end)
+{
+ GstAviIndexEntry *entry;
+
+ entry = &stream->index[entry_n];
+
+ if (stream->is_vbr) {
+ /* VBR stream next timestamp */
+ if (stream->strh->type == GST_RIFF_FCC_auds) {
+ if (timestamp)
+ *timestamp =
+ avi_stream_convert_frames_to_time_unchecked (stream, entry->total);
+ if (ts_end) {
+ gint size = 1;
+ if (G_LIKELY (entry_n + 1 < stream->idx_n))
+ size = stream->index[entry_n + 1].total - entry->total;
+ *ts_end = avi_stream_convert_frames_to_time_unchecked (stream,
+ entry->total + size);
+ }
+ } else {
+ if (timestamp)
+ *timestamp =
+ avi_stream_convert_frames_to_time_unchecked (stream, entry_n);
+ if (ts_end)
+ *ts_end = avi_stream_convert_frames_to_time_unchecked (stream,
+ entry_n + 1);
+ }
+ } else if (stream->strh->type == GST_RIFF_FCC_auds) {
+ /* constant rate stream */
+ if (timestamp)
+ *timestamp =
+ avi_stream_convert_bytes_to_time_unchecked (stream, entry->total);
+ if (ts_end)
+ *ts_end = avi_stream_convert_bytes_to_time_unchecked (stream,
+ entry->total + entry->size);
+ }
+ if (stream->strh->type == GST_RIFF_FCC_vids) {
+ /* video offsets are the frame number */
+ if (offset)
+ *offset = entry_n;
+ if (offset_end)
+ *offset_end = entry_n + 1;
+ } else {
+ /* no offsets for audio */
+ if (offset)
+ *offset = -1;
+ if (offset_end)
+ *offset_end = -1;
+ }
+}
+
+/* collect and debug stats about the indexes for all streams.
+ * This method is also responsible for filling in the stream duration
+ * as measured by the amount of index entries.
+ *
+ * Returns TRUE if the index is not empty, else FALSE */
+static gboolean
+gst_avi_demux_do_index_stats (GstAviDemux * avi)
+{
+ guint total_idx = 0;
+ guint i;
+#ifndef GST_DISABLE_GST_DEBUG
+ guint total_max = 0;
+#endif
+
+ /* get stream stats now */
+ for (i = 0; i < avi->num_streams; i++) {
+ GstAviStream *stream;
+
+ if (G_UNLIKELY (!(stream = &avi->stream[i])))
+ continue;
+ if (G_UNLIKELY (!stream->strh))
+ continue;
+ if (G_UNLIKELY (!stream->index || stream->idx_n == 0))
+ continue;
+
+ /* we interested in the end_ts of the last entry, which is the total
+ * duration of this stream */
+ gst_avi_demux_get_buffer_info (avi, stream, stream->idx_n - 1,
+ NULL, &stream->idx_duration, NULL, NULL);
+
+ total_idx += stream->idx_n;
+#ifndef GST_DISABLE_GST_DEBUG
+ total_max += stream->idx_max;
+#endif
+ GST_INFO_OBJECT (avi, "Stream %d, dur %" GST_TIME_FORMAT ", %6u entries, "
+ "%5u keyframes, entry size = %2u, total size = %10u, allocated %10u",
+ i, GST_TIME_ARGS (stream->idx_duration), stream->idx_n,
+ stream->n_keyframes, (guint) sizeof (GstAviIndexEntry),
+ (guint) (stream->idx_n * sizeof (GstAviIndexEntry)),
+ (guint) (stream->idx_max * sizeof (GstAviIndexEntry)));
+
+ /* knowing all that we do, that also includes avg bitrate */
+ if (!stream->taglist) {
+ stream->taglist = gst_tag_list_new_empty ();
+ }
+ if (stream->total_bytes && stream->idx_duration)
+ gst_tag_list_add (stream->taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_BITRATE,
+ (guint) gst_util_uint64_scale (stream->total_bytes * 8,
+ GST_SECOND, stream->idx_duration), NULL);
+ }
+ total_idx *= sizeof (GstAviIndexEntry);
+#ifndef GST_DISABLE_GST_DEBUG
+ total_max *= sizeof (GstAviIndexEntry);
+#endif
+ GST_INFO_OBJECT (avi, "%u bytes for index vs %u ideally, %u wasted",
+ total_max, total_idx, total_max - total_idx);
+
+ if (total_idx == 0) {
+ GST_WARNING_OBJECT (avi, "Index is empty !");
+ return FALSE;
+ }
+ return TRUE;
+}
+
+/*
+ * gst_avi_demux_parse_subindex:
+ * @avi: Avi object
+ * @buf: input data to use for parsing.
+ * @stream: stream context.
+ * @entries_list: a list (returned by the function) containing all the
+ * indexes parsed in this specific subindex. The first
+ * entry is also a pointer to allocated memory that needs
+ * to be free´ed. May be NULL if no supported indexes were
+ * found.
+ *
+ * Reads superindex (openDML-2 spec stuff) from the provided data.
+ * The buffer should contain a GST_RIFF_TAG_ix?? chunk.
+ *
+ * Returns: TRUE on success, FALSE otherwise. Errors are fatal, we
+ * throw an error, caller should bail out asap.
+ */
+static gboolean
+gst_avi_demux_parse_subindex (GstAviDemux * avi, GstAviStream * stream,
+ GstBuffer * buf)
+{
+ GstMapInfo map;
+ guint8 *data;
+ guint16 bpe;
+ guint32 num, i;
+ guint64 baseoff;
+
+ if (buf == NULL)
+ return TRUE;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+
+ /* check size */
+ if (map.size < 24)
+ goto too_small;
+
+ /* We don't support index-data yet */
+ if (data[3] & 0x80)
+ goto not_implemented;
+
+ /* check type of index. The opendml2 specs state that
+ * there should be 4 dwords per array entry. Type can be
+ * either frame or field (and we don't care). */
+ bpe = (data[2] & 0x01) ? 12 : 8;
+ if (GST_READ_UINT16_LE (data) != bpe / 4 ||
+ (data[2] & 0xfe) != 0x0 || data[3] != 0x1) {
+ GST_WARNING_OBJECT (avi,
+ "Superindex for stream %d has unexpected "
+ "size_entry %d (bytes) or flags 0x%02x/0x%02x",
+ stream->num, GST_READ_UINT16_LE (data), data[2], data[3]);
+ bpe = GST_READ_UINT16_LE (data) * 4;
+ }
+ num = GST_READ_UINT32_LE (&data[4]);
+ baseoff = GST_READ_UINT64_LE (&data[12]);
+
+ /* If there's nothing, just return ! */
+ if (num == 0)
+ goto empty_index;
+
+ GST_INFO_OBJECT (avi, "Parsing subindex, nr_entries = %6d", num);
+
+ for (i = 0; i < num; i++) {
+ GstAviIndexEntry entry;
+
+ if (map.size < 24 + bpe * (i + 1))
+ break;
+
+ /* fill in offset and size. offset contains the keyframe flag in the
+ * upper bit*/
+ entry.offset = baseoff + GST_READ_UINT32_LE (&data[24 + bpe * i]);
+ entry.size = GST_READ_UINT32_LE (&data[24 + bpe * i + 4]);
+ /* handle flags */
+ if (stream->strh->type == GST_RIFF_FCC_auds) {
+ /* all audio frames are keyframes */
+ ENTRY_SET_KEYFRAME (&entry);
+ } else {
+ /* else read flags */
+ entry.flags = (entry.size & 0x80000000) ? 0 : GST_AVI_KEYFRAME;
+ }
+ entry.size &= ~0x80000000;
+
+ /* and add */
+ if (G_UNLIKELY (!gst_avi_demux_add_index (avi, stream, num, &entry)))
+ goto out_of_mem;
+ }
+done:
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ return TRUE;
+
+ /* ERRORS */
+too_small:
+ {
+ GST_ERROR_OBJECT (avi,
+ "Not enough data to parse subindex (%" G_GSIZE_FORMAT
+ " available, 24 needed)", map.size);
+ goto done; /* continue */
+ }
+not_implemented:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, NOT_IMPLEMENTED, (NULL),
+ ("Subindex-is-data is not implemented"));
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+empty_index:
+ {
+ GST_DEBUG_OBJECT (avi, "the index is empty");
+ goto done; /* continue */
+ }
+out_of_mem:
+ {
+ GST_ELEMENT_ERROR (avi, RESOURCE, NO_SPACE_LEFT, (NULL),
+ ("Cannot allocate memory for %u*%u=%u bytes",
+ (guint) sizeof (GstAviIndexEntry), num,
+ (guint) sizeof (GstAviIndexEntry) * num));
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+}
+
+/*
+ * Create and push a flushing seek event upstream
+ */
+static gboolean
+perform_seek_to_offset (GstAviDemux * demux, guint64 offset, guint32 seqnum)
+{
+ GstEvent *event;
+ gboolean res = 0;
+
+ GST_DEBUG_OBJECT (demux, "Seeking to %" G_GUINT64_FORMAT, offset);
+
+ event =
+ gst_event_new_seek (1.0, GST_FORMAT_BYTES,
+ GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, GST_SEEK_TYPE_SET, offset,
+ GST_SEEK_TYPE_NONE, -1);
+ gst_event_set_seqnum (event, seqnum);
+ res = gst_pad_push_event (demux->sinkpad, event);
+
+ if (res)
+ demux->offset = offset;
+ return res;
+}
+
+/*
+ * Read AVI index when streaming
+ */
+static gboolean
+gst_avi_demux_read_subindexes_push (GstAviDemux * avi)
+{
+ guint32 tag = 0, size;
+ GstBuffer *buf = NULL;
+ guint odml_stream;
+
+ GST_DEBUG_OBJECT (avi, "read subindexes for %d streams", avi->num_streams);
+
+ if (avi->odml_subidxs[avi->odml_subidx] != avi->offset)
+ return FALSE;
+
+ if (!gst_avi_demux_peek_chunk (avi, &tag, &size))
+ return TRUE;
+
+ /* this is the ODML chunk we expect */
+ odml_stream = avi->odml_stream;
+
+ if ((tag != GST_MAKE_FOURCC ('i', 'x', '0' + odml_stream / 10,
+ '0' + odml_stream % 10)) &&
+ (tag != GST_MAKE_FOURCC ('0' + odml_stream / 10,
+ '0' + odml_stream % 10, 'i', 'x'))) {
+ GST_WARNING_OBJECT (avi, "Not an ix## chunk (%" GST_FOURCC_FORMAT ")",
+ GST_FOURCC_ARGS (tag));
+ return FALSE;
+ }
+
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ /* flush chunk header so we get just the 'size' payload data */
+ gst_adapter_flush (avi->adapter, 8);
+ buf = gst_adapter_take_buffer (avi->adapter, size);
+
+ if (!gst_avi_demux_parse_subindex (avi, &avi->stream[odml_stream], buf))
+ return FALSE;
+
+ /* we parsed the index, go to next subindex */
+ avi->odml_subidx++;
+
+ if (avi->odml_subidxs[avi->odml_subidx] == GST_BUFFER_OFFSET_NONE) {
+ /* we reached the end of the indexes for this stream, move to the next
+ * stream to handle the first index */
+ avi->odml_stream++;
+ avi->odml_subidx = 0;
+
+ if (avi->odml_stream < avi->num_streams) {
+ /* there are more indexes */
+ avi->odml_subidxs = avi->stream[avi->odml_stream].indexes;
+ } else {
+ /* we're done, get stream stats now */
+ avi->have_index = gst_avi_demux_do_index_stats (avi);
+
+ return TRUE;
+ }
+ }
+
+ /* seek to next index */
+ return perform_seek_to_offset (avi, avi->odml_subidxs[avi->odml_subidx],
+ avi->segment_seqnum);
+}
+
+/*
+ * Read AVI index
+ */
+static void
+gst_avi_demux_read_subindexes_pull (GstAviDemux * avi)
+{
+ guint32 tag;
+ GstBuffer *buf;
+ gint i, n;
+
+ GST_DEBUG_OBJECT (avi, "read subindexes for %d streams", avi->num_streams);
+
+ for (n = 0; n < avi->num_streams; n++) {
+ GstAviStream *stream = &avi->stream[n];
+
+ if (stream->indexes == NULL)
+ continue;
+
+ for (i = 0; stream->indexes[i] != GST_BUFFER_OFFSET_NONE; i++) {
+ if (gst_riff_read_chunk (GST_ELEMENT_CAST (avi), avi->sinkpad,
+ &stream->indexes[i], &tag, &buf) != GST_FLOW_OK)
+ continue;
+ else if ((tag != GST_MAKE_FOURCC ('i', 'x', '0' + stream->num / 10,
+ '0' + stream->num % 10)) &&
+ (tag != GST_MAKE_FOURCC ('0' + stream->num / 10,
+ '0' + stream->num % 10, 'i', 'x'))) {
+ /* Some ODML files (created by god knows what muxer) have a ##ix format
+ * instead of the 'official' ix##. They are still valid though. */
+ GST_WARNING_OBJECT (avi, "Not an ix## chunk (%" GST_FOURCC_FORMAT ")",
+ GST_FOURCC_ARGS (tag));
+ gst_buffer_unref (buf);
+ continue;
+ }
+
+ if (!gst_avi_demux_parse_subindex (avi, stream, buf))
+ continue;
+ }
+
+ g_free (stream->indexes);
+ stream->indexes = NULL;
+ }
+ /* get stream stats now */
+ avi->have_index = gst_avi_demux_do_index_stats (avi);
+}
+
+/*
+ * gst_avi_demux_riff_parse_vprp:
+ * @element: caller element (used for debugging/error).
+ * @buf: input data to be used for parsing, stripped from header.
+ * @vprp: a pointer (returned by this function) to a filled-in vprp
+ * structure. Caller should free it.
+ *
+ * Parses a video stream´s vprp. This function takes ownership of @buf.
+ *
+ * Returns: TRUE if parsing succeeded, otherwise FALSE. The stream
+ * should be skipped on error, but it is not fatal.
+ */
+static gboolean
+gst_avi_demux_riff_parse_vprp (GstElement * element,
+ GstBuffer * buf, gst_riff_vprp ** _vprp)
+{
+ gst_riff_vprp *vprp;
+ gint k;
+ gsize size;
+
+ g_return_val_if_fail (buf != NULL, FALSE);
+ g_return_val_if_fail (_vprp != NULL, FALSE);
+
+ size = gst_buffer_get_size (buf);
+
+ if (size < G_STRUCT_OFFSET (gst_riff_vprp, field_info))
+ goto too_small;
+
+ vprp = g_malloc (size);
+ gst_buffer_extract (buf, 0, vprp, size);
+
+#if (G_BYTE_ORDER == G_BIG_ENDIAN)
+ vprp->format_token = GUINT32_FROM_LE (vprp->format_token);
+ vprp->standard = GUINT32_FROM_LE (vprp->standard);
+ vprp->vert_rate = GUINT32_FROM_LE (vprp->vert_rate);
+ vprp->hor_t_total = GUINT32_FROM_LE (vprp->hor_t_total);
+ vprp->vert_lines = GUINT32_FROM_LE (vprp->vert_lines);
+ vprp->aspect = GUINT32_FROM_LE (vprp->aspect);
+ vprp->width = GUINT32_FROM_LE (vprp->width);
+ vprp->height = GUINT32_FROM_LE (vprp->height);
+ vprp->fields = GUINT32_FROM_LE (vprp->fields);
+#endif
+
+ /* size checking */
+ /* calculate fields based on size */
+ k = (size - G_STRUCT_OFFSET (gst_riff_vprp, field_info)) / vprp->fields;
+ if (vprp->fields > k) {
+ GST_WARNING_OBJECT (element,
+ "vprp header indicated %d fields, only %d available", vprp->fields, k);
+ vprp->fields = k;
+ }
+ if (vprp->fields > GST_RIFF_VPRP_VIDEO_FIELDS) {
+ GST_WARNING_OBJECT (element,
+ "vprp header indicated %d fields, at most %d supported", vprp->fields,
+ GST_RIFF_VPRP_VIDEO_FIELDS);
+ vprp->fields = GST_RIFF_VPRP_VIDEO_FIELDS;
+ }
+#if (G_BYTE_ORDER == G_BIG_ENDIAN)
+ for (k = 0; k < vprp->fields; k++) {
+ gst_riff_vprp_video_field_desc *fd;
+
+ fd = &vprp->field_info[k];
+ fd->compressed_bm_height = GUINT32_FROM_LE (fd->compressed_bm_height);
+ fd->compressed_bm_width = GUINT32_FROM_LE (fd->compressed_bm_width);
+ fd->valid_bm_height = GUINT32_FROM_LE (fd->valid_bm_height);
+ fd->valid_bm_width = GUINT16_FROM_LE (fd->valid_bm_width);
+ fd->valid_bm_x_offset = GUINT16_FROM_LE (fd->valid_bm_x_offset);
+ fd->valid_bm_y_offset = GUINT32_FROM_LE (fd->valid_bm_y_offset);
+ fd->video_x_t_offset = GUINT32_FROM_LE (fd->video_x_t_offset);
+ fd->video_y_start = GUINT32_FROM_LE (fd->video_y_start);
+ }
+#endif
+
+ /* debug */
+ GST_INFO_OBJECT (element, "vprp tag found in context vids:");
+ GST_INFO_OBJECT (element, " format_token %d", vprp->format_token);
+ GST_INFO_OBJECT (element, " standard %d", vprp->standard);
+ GST_INFO_OBJECT (element, " vert_rate %d", vprp->vert_rate);
+ GST_INFO_OBJECT (element, " hor_t_total %d", vprp->hor_t_total);
+ GST_INFO_OBJECT (element, " vert_lines %d", vprp->vert_lines);
+ GST_INFO_OBJECT (element, " aspect %d:%d", vprp->aspect >> 16,
+ vprp->aspect & 0xffff);
+ GST_INFO_OBJECT (element, " width %d", vprp->width);
+ GST_INFO_OBJECT (element, " height %d", vprp->height);
+ GST_INFO_OBJECT (element, " fields %d", vprp->fields);
+ for (k = 0; k < vprp->fields; k++) {
+ gst_riff_vprp_video_field_desc *fd;
+
+ fd = &(vprp->field_info[k]);
+ GST_INFO_OBJECT (element, " field %u description:", k);
+ GST_INFO_OBJECT (element, " compressed_bm_height %d",
+ fd->compressed_bm_height);
+ GST_INFO_OBJECT (element, " compressed_bm_width %d",
+ fd->compressed_bm_width);
+ GST_INFO_OBJECT (element, " valid_bm_height %d",
+ fd->valid_bm_height);
+ GST_INFO_OBJECT (element, " valid_bm_width %d", fd->valid_bm_width);
+ GST_INFO_OBJECT (element, " valid_bm_x_offset %d",
+ fd->valid_bm_x_offset);
+ GST_INFO_OBJECT (element, " valid_bm_y_offset %d",
+ fd->valid_bm_y_offset);
+ GST_INFO_OBJECT (element, " video_x_t_offset %d",
+ fd->video_x_t_offset);
+ GST_INFO_OBJECT (element, " video_y_start %d", fd->video_y_start);
+ }
+
+ gst_buffer_unref (buf);
+
+ *_vprp = vprp;
+
+ return TRUE;
+
+ /* ERRORS */
+too_small:
+ {
+ GST_ERROR_OBJECT (element,
+ "Too small vprp (%" G_GSIZE_FORMAT " available, at least %d needed)",
+ size, (int) G_STRUCT_OFFSET (gst_riff_vprp, field_info));
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+}
+
+static void
+gst_avi_demux_expose_streams (GstAviDemux * avi, gboolean force)
+{
+ guint i;
+
+ GST_DEBUG_OBJECT (avi, "force : %d", force);
+
+ for (i = 0; i < avi->num_streams; i++) {
+ GstAviStream *stream = &avi->stream[i];
+
+ if (force || stream->idx_n != 0) {
+ GST_LOG_OBJECT (avi, "Adding pad %s", GST_PAD_NAME (stream->pad));
+ gst_element_add_pad ((GstElement *) avi, stream->pad);
+ gst_flow_combiner_add_pad (avi->flowcombiner, stream->pad);
+
+#if 0
+ if (avi->element_index)
+ gst_index_get_writer_id (avi->element_index,
+ GST_OBJECT_CAST (stream->pad), &stream->index_id);
+#endif
+
+ stream->exposed = TRUE;
+ if (avi->main_stream == -1)
+ avi->main_stream = i;
+ } else {
+ GST_WARNING_OBJECT (avi, "Stream #%d doesn't have any entry, removing it",
+ i);
+ gst_avi_demux_reset_stream (avi, stream);
+ }
+ }
+}
+
+/* buf contains LIST chunk data, and will be padded to even size,
+ * since some buggy files do not account for the padding of chunks
+ * within a LIST in the size of the LIST */
+static inline void
+gst_avi_demux_roundup_list (GstAviDemux * avi, GstBuffer ** buf)
+{
+ gsize size;
+
+ size = gst_buffer_get_size (*buf);
+
+ if (G_UNLIKELY (size & 1)) {
+ GstBuffer *obuf;
+ GstMapInfo map;
+
+ GST_DEBUG_OBJECT (avi, "rounding up dubious list size %" G_GSIZE_FORMAT,
+ size);
+ obuf = gst_buffer_new_and_alloc (size + 1);
+
+ gst_buffer_map (obuf, &map, GST_MAP_WRITE);
+ gst_buffer_extract (*buf, 0, map.data, size);
+ /* assume 0 padding, at least makes outcome deterministic */
+ map.data[size] = 0;
+ gst_buffer_unmap (obuf, &map);
+ gst_buffer_replace (buf, obuf);
+ }
+}
+
+static GstCaps *
+gst_avi_demux_check_caps (GstAviDemux * avi, GstAviStream * stream,
+ GstCaps * caps)
+{
+ GstStructure *s;
+ const GValue *val;
+ GstBuffer *buf;
+
+ caps = gst_caps_make_writable (caps);
+
+ s = gst_caps_get_structure (caps, 0);
+ if (gst_structure_has_name (s, "video/x-raw")) {
+ stream->is_raw = TRUE;
+ stream->alignment = 32;
+ if (!gst_structure_has_field (s, "pixel-aspect-ratio"))
+ gst_structure_set (s, "pixel-aspect-ratio", GST_TYPE_FRACTION,
+ 1, 1, NULL);
+ if (gst_structure_has_field_typed (s, "palette_data", GST_TYPE_BUFFER)) {
+ gst_structure_get (s, "palette_data", GST_TYPE_BUFFER,
+ &stream->rgb8_palette, NULL);
+ gst_structure_remove_field (s, "palette_data");
+ return caps;
+ }
+ } else if (gst_structure_has_name (s, "video/x-h264")) {
+ GST_DEBUG_OBJECT (avi, "checking caps %" GST_PTR_FORMAT, caps);
+
+ /* some muxers put invalid bytestream stuff in h264 extra data */
+ val = gst_structure_get_value (s, "codec_data");
+ if (val && (buf = gst_value_get_buffer (val))) {
+ guint8 *data;
+ gint size;
+ GstMapInfo map;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+ if (size >= 4) {
+ guint32 h = GST_READ_UINT32_BE (data);
+
+ gst_buffer_unmap (buf, &map);
+ if (h == 0x01 || (h >> 8) == 0x01) {
+ /* can hardly be valid AVC codec data */
+ GST_DEBUG_OBJECT (avi,
+ "discarding invalid codec_data containing byte-stream");
+ /* so do not pretend to downstream that it is packetized avc */
+ gst_structure_remove_field (s, "codec_data");
+ /* ... but rather properly parsed bytestream */
+ gst_structure_set (s, "stream-format", G_TYPE_STRING, "byte-stream",
+ "alignment", G_TYPE_STRING, "au", NULL);
+ }
+ } else {
+ gst_buffer_unmap (buf, &map);
+ }
+ }
+ }
+
+ return caps;
+}
+
+/*
+ * gst_avi_demux_parse_stream:
+ * @avi: calling element (used for debugging/errors).
+ * @buf: input buffer used to parse the stream.
+ *
+ * Parses all subchunks in a strl chunk (which defines a single
+ * stream). Discards the buffer after use. This function will
+ * increment the stream counter internally.
+ *
+ * Returns: whether the stream was identified successfully.
+ * Errors are not fatal. It does indicate the stream
+ * was skipped.
+ */
+static gboolean
+gst_avi_demux_parse_stream (GstAviDemux * avi, GstBuffer * buf)
+{
+ GstAviStream *stream;
+ GstElementClass *klass;
+ GstPadTemplate *templ;
+ GstBuffer *sub = NULL;
+ guint offset = 4;
+ guint32 tag = 0;
+ gchar *codec_name = NULL, *padname = NULL;
+ const gchar *tag_name;
+ GstCaps *caps = NULL;
+ GstPad *pad;
+ GstElement *element;
+ gboolean got_strh = FALSE, got_strf = FALSE, got_vprp = FALSE;
+ gst_riff_vprp *vprp = NULL;
+ GstEvent *event;
+ gchar *stream_id;
+ GstMapInfo map;
+ gboolean sparse = FALSE;
+
+ element = GST_ELEMENT_CAST (avi);
+
+ GST_DEBUG_OBJECT (avi, "Parsing stream");
+
+ gst_avi_demux_roundup_list (avi, &buf);
+
+ if (avi->num_streams >= GST_AVI_DEMUX_MAX_STREAMS) {
+ GST_WARNING_OBJECT (avi,
+ "maximum no of streams (%d) exceeded, ignoring stream",
+ GST_AVI_DEMUX_MAX_STREAMS);
+ gst_buffer_unref (buf);
+ /* not a fatal error, let's say */
+ return TRUE;
+ }
+
+ stream = &avi->stream[avi->num_streams];
+
+ /* initial settings */
+ stream->idx_duration = GST_CLOCK_TIME_NONE;
+ stream->hdr_duration = GST_CLOCK_TIME_NONE;
+ stream->duration = GST_CLOCK_TIME_NONE;
+
+ while (gst_riff_parse_chunk (element, buf, &offset, &tag, &sub)) {
+ /* sub can be NULL if the chunk is empty */
+ if (sub == NULL) {
+ GST_DEBUG_OBJECT (avi, "ignoring empty chunk %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ continue;
+ }
+ switch (tag) {
+ case GST_RIFF_TAG_strh:
+ {
+ gst_riff_strh *strh;
+
+ if (got_strh) {
+ GST_WARNING_OBJECT (avi, "Ignoring additional strh chunk");
+ break;
+ }
+ if (!gst_riff_parse_strh (element, sub, &stream->strh)) {
+ /* ownership given away */
+ sub = NULL;
+ GST_WARNING_OBJECT (avi, "Failed to parse strh chunk");
+ goto fail;
+ }
+ sub = NULL;
+ strh = stream->strh;
+ /* sanity check; stream header frame rate matches global header
+ * frame duration */
+ if (stream->strh->type == GST_RIFF_FCC_vids) {
+ GstClockTime s_dur;
+ GstClockTime h_dur = avi->avih->us_frame * GST_USECOND;
+
+ s_dur = gst_util_uint64_scale (GST_SECOND, strh->scale, strh->rate);
+ GST_DEBUG_OBJECT (avi, "verifying stream framerate %d/%d, "
+ "frame duration = %d ms", strh->rate, strh->scale,
+ (gint) (s_dur / GST_MSECOND));
+ if (h_dur > (10 * GST_MSECOND) && (s_dur > 10 * h_dur)) {
+ strh->rate = GST_SECOND / GST_USECOND;
+ strh->scale = h_dur / GST_USECOND;
+ GST_DEBUG_OBJECT (avi, "correcting stream framerate to %d/%d",
+ strh->rate, strh->scale);
+ }
+ }
+ /* determine duration as indicated by header */
+ stream->hdr_duration = gst_util_uint64_scale ((guint64) strh->length *
+ strh->scale, GST_SECOND, (guint64) strh->rate);
+ GST_INFO ("Stream duration according to header: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stream->hdr_duration));
+ if (stream->hdr_duration == 0)
+ stream->hdr_duration = GST_CLOCK_TIME_NONE;
+
+ got_strh = TRUE;
+ break;
+ }
+ case GST_RIFF_TAG_strf:
+ {
+ gboolean res = FALSE;
+
+ if (got_strf) {
+ GST_WARNING_OBJECT (avi, "Ignoring additional strf chunk");
+ break;
+ }
+ if (!got_strh) {
+ GST_ERROR_OBJECT (avi, "Found strf chunk before strh chunk");
+ goto fail;
+ }
+ switch (stream->strh->type) {
+ case GST_RIFF_FCC_vids:
+ stream->is_vbr = TRUE;
+ res = gst_riff_parse_strf_vids (element, sub,
+ &stream->strf.vids, &stream->extradata);
+ sub = NULL;
+ GST_DEBUG_OBJECT (element, "marking video as VBR, res %d", res);
+ break;
+ case GST_RIFF_FCC_auds:
+ res =
+ gst_riff_parse_strf_auds (element, sub, &stream->strf.auds,
+ &stream->extradata);
+ sub = NULL;
+ if (!res)
+ break;
+ stream->is_vbr = (stream->strh->samplesize == 0)
+ && stream->strh->scale > 1
+ && stream->strf.auds->blockalign != 1;
+ GST_DEBUG_OBJECT (element, "marking audio as VBR:%d, res %d",
+ stream->is_vbr, res);
+ /* we need these or we have no way to come up with timestamps */
+ if ((!stream->is_vbr && !stream->strf.auds->av_bps) ||
+ (stream->is_vbr && (!stream->strh->scale ||
+ !stream->strh->rate))) {
+ GST_WARNING_OBJECT (element,
+ "invalid audio header, ignoring stream");
+ goto fail;
+ }
+ /* some more sanity checks */
+ if (stream->is_vbr) {
+ if (stream->strf.auds->blockalign <= 4) {
+ /* that would mean (too) many frames per chunk,
+ * so not likely set as expected */
+ GST_DEBUG_OBJECT (element,
+ "suspicious blockalign %d for VBR audio; "
+ "overriding to 1 frame per chunk",
+ stream->strf.auds->blockalign);
+ /* this should top any likely value */
+ stream->strf.auds->blockalign = (1 << 12);
+ }
+ }
+ break;
+ case GST_RIFF_FCC_iavs:
+ stream->is_vbr = TRUE;
+ res = gst_riff_parse_strf_iavs (element, sub,
+ &stream->strf.iavs, &stream->extradata);
+ sub = NULL;
+ GST_DEBUG_OBJECT (element, "marking iavs as VBR, res %d", res);
+ break;
+ case GST_RIFF_FCC_txts:
+ /* nothing to parse here */
+ stream->is_vbr = (stream->strh->samplesize == 0)
+ && (stream->strh->scale > 1);
+ res = TRUE;
+ break;
+ default:
+ GST_ERROR_OBJECT (avi,
+ "Don´t know how to handle stream type %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (stream->strh->type));
+ break;
+ }
+ if (sub) {
+ gst_buffer_unref (sub);
+ sub = NULL;
+ }
+ if (!res)
+ goto fail;
+ got_strf = TRUE;
+ break;
+ }
+ case GST_RIFF_TAG_vprp:
+ {
+ if (got_vprp) {
+ GST_WARNING_OBJECT (avi, "Ignoring additional vprp chunk");
+ break;
+ }
+ if (!got_strh) {
+ GST_ERROR_OBJECT (avi, "Found vprp chunk before strh chunk");
+ goto fail;
+ }
+ if (!got_strf) {
+ GST_ERROR_OBJECT (avi, "Found vprp chunk before strf chunk");
+ goto fail;
+ }
+
+ if (!gst_avi_demux_riff_parse_vprp (element, sub, &vprp)) {
+ GST_WARNING_OBJECT (avi, "Failed to parse vprp chunk");
+ /* not considered fatal */
+ g_free (vprp);
+ vprp = NULL;
+ } else
+ got_vprp = TRUE;
+ sub = NULL;
+ break;
+ }
+ case GST_RIFF_TAG_strd:
+ if (stream->initdata)
+ gst_buffer_unref (stream->initdata);
+ stream->initdata = sub;
+ if (sub != NULL) {
+ gst_avi_demux_parse_strd (avi, sub);
+ sub = NULL;
+ }
+ break;
+ case GST_RIFF_TAG_strn:
+ {
+ gchar *stream_name = NULL;
+
+ gst_buffer_map (sub, &map, GST_MAP_READ);
+
+ if (avi->globaltags == NULL)
+ avi->globaltags = gst_tag_list_new_empty ();
+ parse_tag_value (avi, avi->globaltags, GST_TAG_TITLE,
+ map.data, map.size);
+
+ if (gst_tag_list_get_string (avi->globaltags, GST_TAG_TITLE,
+ &stream_name)) {
+ GST_DEBUG_OBJECT (avi, "stream name: %s", stream_name);
+ g_free (stream->name);
+ stream->name = stream_name;
+ }
+
+ gst_buffer_unmap (sub, &map);
+ gst_buffer_unref (sub);
+ sub = NULL;
+ }
+ break;
+ case GST_RIFF_IDIT:
+ gst_avi_demux_parse_idit (avi, sub);
+ break;
+ default:
+ if (tag == GST_MAKE_FOURCC ('i', 'n', 'd', 'x') ||
+ tag == GST_MAKE_FOURCC ('i', 'x', '0' + avi->num_streams / 10,
+ '0' + avi->num_streams % 10)) {
+ g_free (stream->indexes);
+ gst_avi_demux_parse_superindex (avi, sub, &stream->indexes);
+ stream->superindex = TRUE;
+ sub = NULL;
+ break;
+ }
+ GST_WARNING_OBJECT (avi,
+ "Unknown stream header tag %" GST_FOURCC_FORMAT ", ignoring",
+ GST_FOURCC_ARGS (tag));
+ /* Only get buffer for debugging if the memdump is needed */
+ if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= 9) {
+ GstMapInfo map;
+
+ gst_buffer_map (sub, &map, GST_MAP_READ);
+ GST_MEMDUMP_OBJECT (avi, "Unknown stream header tag", map.data,
+ map.size);
+ gst_buffer_unmap (sub, &map);
+ }
+ /* fall-through */
+ case GST_RIFF_TAG_JUNQ:
+ case GST_RIFF_TAG_JUNK:
+ break;
+ }
+ if (sub != NULL) {
+ gst_buffer_unref (sub);
+ sub = NULL;
+ }
+ }
+
+ if (!got_strh) {
+ GST_WARNING_OBJECT (avi, "Failed to find strh chunk");
+ goto fail;
+ }
+
+ if (!got_strf) {
+ GST_WARNING_OBJECT (avi, "Failed to find strf chunk");
+ goto fail;
+ }
+
+ /* get class to figure out the template */
+ klass = GST_ELEMENT_GET_CLASS (avi);
+
+ /* we now have all info, let´s set up a pad and a caps and be done */
+ /* create stream name + pad */
+ switch (stream->strh->type) {
+ case GST_RIFF_FCC_vids:{
+ guint32 fourcc;
+
+ fourcc = (stream->strf.vids->compression) ?
+ stream->strf.vids->compression : stream->strh->fcc_handler;
+ caps = gst_riff_create_video_caps (fourcc, stream->strh,
+ stream->strf.vids, stream->extradata, stream->initdata, &codec_name);
+
+ /* DXSB is XSUB, and it is placed inside a vids */
+ if (!caps || (fourcc != GST_MAKE_FOURCC ('D', 'X', 'S', 'B') &&
+ fourcc != GST_MAKE_FOURCC ('D', 'X', 'S', 'A'))) {
+ padname = g_strdup_printf ("video_%u", avi->num_v_streams);
+ templ = gst_element_class_get_pad_template (klass, "video_%u");
+ if (!caps) {
+ caps = gst_caps_new_simple ("video/x-avi-unknown", "fourcc",
+ G_TYPE_INT, fourcc, NULL);
+ } else if (got_vprp && vprp) {
+ guint32 aspect_n, aspect_d;
+ gint n, d;
+
+ aspect_n = vprp->aspect >> 16;
+ aspect_d = vprp->aspect & 0xffff;
+ /* calculate the pixel aspect ratio using w/h and aspect ratio */
+ n = aspect_n * stream->strf.vids->height;
+ d = aspect_d * stream->strf.vids->width;
+ if (n && d)
+ gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION,
+ n, d, NULL);
+ }
+ caps = gst_avi_demux_check_caps (avi, stream, caps);
+ tag_name = GST_TAG_VIDEO_CODEC;
+ avi->num_v_streams++;
+ } else {
+ padname = g_strdup_printf ("subpicture_%u", avi->num_sp_streams);
+ templ = gst_element_class_get_pad_template (klass, "subpicture_%u");
+ tag_name = NULL;
+ avi->num_sp_streams++;
+ sparse = TRUE;
+ }
+ break;
+ }
+ case GST_RIFF_FCC_auds:{
+ /* FIXME: Do something with the channel reorder map */
+ padname = g_strdup_printf ("audio_%u", avi->num_a_streams);
+ templ = gst_element_class_get_pad_template (klass, "audio_%u");
+ caps = gst_riff_create_audio_caps (stream->strf.auds->format,
+ stream->strh, stream->strf.auds, stream->extradata,
+ stream->initdata, &codec_name, NULL);
+ if (!caps) {
+ caps = gst_caps_new_simple ("audio/x-avi-unknown", "codec_id",
+ G_TYPE_INT, stream->strf.auds->format, NULL);
+ }
+ tag_name = GST_TAG_AUDIO_CODEC;
+ avi->num_a_streams++;
+ break;
+ }
+ case GST_RIFF_FCC_iavs:{
+ guint32 fourcc = stream->strh->fcc_handler;
+
+ padname = g_strdup_printf ("video_%u", avi->num_v_streams);
+ templ = gst_element_class_get_pad_template (klass, "video_%u");
+ caps = gst_riff_create_iavs_caps (fourcc, stream->strh,
+ stream->strf.iavs, stream->extradata, stream->initdata, &codec_name);
+ if (!caps) {
+ caps = gst_caps_new_simple ("video/x-avi-unknown", "fourcc",
+ G_TYPE_INT, fourcc, NULL);
+ }
+ tag_name = GST_TAG_VIDEO_CODEC;
+ avi->num_v_streams++;
+ break;
+ }
+ case GST_RIFF_FCC_txts:{
+ padname = g_strdup_printf ("subtitle_%u", avi->num_t_streams);
+ templ = gst_element_class_get_pad_template (klass, "subtitle_%u");
+ caps = gst_caps_new_empty_simple ("application/x-subtitle-avi");
+ tag_name = NULL;
+ avi->num_t_streams++;
+ sparse = TRUE;
+ break;
+ }
+ default:
+ g_return_val_if_reached (FALSE);
+ }
+
+ /* no caps means no stream */
+ if (!caps) {
+ GST_ERROR_OBJECT (element, "Did not find caps for stream %s", padname);
+ goto fail;
+ }
+
+ GST_DEBUG_OBJECT (element, "codec-name=%s", codec_name ? codec_name : "NULL");
+ GST_DEBUG_OBJECT (element, "caps=%" GST_PTR_FORMAT, caps);
+
+ /* set proper settings and add it */
+ if (stream->pad)
+ gst_object_unref (stream->pad);
+ pad = stream->pad = gst_pad_new_from_template (templ, padname);
+ g_free (padname);
+
+ gst_pad_use_fixed_caps (pad);
+#if 0
+ gst_pad_set_formats_function (pad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_get_src_formats));
+ gst_pad_set_event_mask_function (pad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_get_event_mask));
+#endif
+ gst_pad_set_event_function (pad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_handle_src_event));
+ gst_pad_set_query_function (pad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_handle_src_query));
+#if 0
+ gst_pad_set_convert_function (pad,
+ GST_DEBUG_FUNCPTR (gst_avi_demux_src_convert));
+#endif
+
+ stream->num = avi->num_streams;
+
+ stream->start_entry = 0;
+ stream->step_entry = 0;
+ stream->stop_entry = 0;
+
+ stream->current_entry = -1;
+ stream->current_total = 0;
+
+ stream->discont = TRUE;
+
+ stream->total_bytes = 0;
+ stream->total_blocks = 0;
+ stream->n_keyframes = 0;
+
+ stream->idx_n = 0;
+ stream->idx_max = 0;
+
+ gst_pad_set_element_private (pad, stream);
+ avi->num_streams++;
+
+ gst_pad_set_active (pad, TRUE);
+ stream_id =
+ gst_pad_create_stream_id_printf (pad, GST_ELEMENT_CAST (avi), "%03u",
+ avi->num_streams);
+
+ event = gst_pad_get_sticky_event (avi->sinkpad, GST_EVENT_STREAM_START, 0);
+ if (event) {
+ if (gst_event_parse_group_id (event, &avi->group_id))
+ avi->have_group_id = TRUE;
+ else
+ avi->have_group_id = FALSE;
+ gst_event_unref (event);
+ } else if (!avi->have_group_id) {
+ avi->have_group_id = TRUE;
+ avi->group_id = gst_util_group_id_next ();
+ }
+
+ event = gst_event_new_stream_start (stream_id);
+ if (avi->have_group_id)
+ gst_event_set_group_id (event, avi->group_id);
+ if (sparse)
+ gst_event_set_stream_flags (event, GST_STREAM_FLAG_SPARSE);
+
+ gst_pad_push_event (pad, event);
+ g_free (stream_id);
+ gst_pad_set_caps (pad, caps);
+ gst_caps_unref (caps);
+
+ /* make tags */
+ if (codec_name && tag_name) {
+ if (!stream->taglist)
+ stream->taglist = gst_tag_list_new_empty ();
+
+ avi->got_tags = TRUE;
+
+ gst_tag_list_add (stream->taglist, GST_TAG_MERGE_APPEND, tag_name,
+ codec_name, NULL);
+ }
+
+ g_free (vprp);
+ g_free (codec_name);
+ gst_buffer_unref (buf);
+
+ return TRUE;
+
+ /* ERRORS */
+fail:
+ {
+ /* unref any mem that may be in use */
+ if (buf)
+ gst_buffer_unref (buf);
+ if (sub)
+ gst_buffer_unref (sub);
+ g_free (vprp);
+ g_free (codec_name);
+ gst_avi_demux_reset_stream (avi, stream);
+ avi->num_streams++;
+ return FALSE;
+ }
+}
+
+/*
+ * gst_avi_demux_parse_odml:
+ * @avi: calling element (used for debug/error).
+ * @buf: input buffer to be used for parsing.
+ *
+ * Read an openDML-2.0 extension header. Fills in the frame number
+ * in the avi demuxer object when reading succeeds.
+ */
+static void
+gst_avi_demux_parse_odml (GstAviDemux * avi, GstBuffer * buf)
+{
+ guint32 tag = 0;
+ guint offset = 4;
+ GstBuffer *sub = NULL;
+
+ while (gst_riff_parse_chunk (GST_ELEMENT_CAST (avi), buf, &offset, &tag,
+ &sub)) {
+ switch (tag) {
+ case GST_RIFF_TAG_dmlh:{
+ gst_riff_dmlh dmlh, *_dmlh;
+ GstMapInfo map;
+
+ /* sub == NULL is possible and means an empty buffer */
+ if (sub == NULL)
+ goto next;
+
+ gst_buffer_map (sub, &map, GST_MAP_READ);
+
+ /* check size */
+ if (map.size < sizeof (gst_riff_dmlh)) {
+ GST_ERROR_OBJECT (avi,
+ "DMLH entry is too small (%" G_GSIZE_FORMAT " bytes, %d needed)",
+ map.size, (int) sizeof (gst_riff_dmlh));
+ gst_buffer_unmap (sub, &map);
+ goto next;
+ }
+ _dmlh = (gst_riff_dmlh *) map.data;
+ dmlh.totalframes = GST_READ_UINT32_LE (&_dmlh->totalframes);
+ gst_buffer_unmap (sub, &map);
+
+ GST_INFO_OBJECT (avi, "dmlh tag found: totalframes: %u",
+ dmlh.totalframes);
+
+ avi->avih->tot_frames = dmlh.totalframes;
+ goto next;
+ }
+
+ default:
+ GST_WARNING_OBJECT (avi,
+ "Unknown tag %" GST_FOURCC_FORMAT " in ODML header",
+ GST_FOURCC_ARGS (tag));
+ /* Only get buffer for debugging if the memdump is needed */
+ if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= 9) {
+ GstMapInfo map;
+
+ gst_buffer_map (sub, &map, GST_MAP_READ);
+ GST_MEMDUMP_OBJECT (avi, "Unknown ODML tag", map.data, map.size);
+ gst_buffer_unmap (sub, &map);
+ }
+ /* fall-through */
+ case GST_RIFF_TAG_JUNQ:
+ case GST_RIFF_TAG_JUNK:
+ next:
+ /* skip and move to next chunk */
+ if (sub) {
+ gst_buffer_unref (sub);
+ sub = NULL;
+ }
+ break;
+ }
+ }
+ if (buf)
+ gst_buffer_unref (buf);
+}
+
+/* Index helper */
+static guint
+gst_avi_demux_index_last (GstAviDemux * avi, GstAviStream * stream)
+{
+ return stream->idx_n;
+}
+
+/* find a previous entry in the index with the given flags */
+static guint
+gst_avi_demux_index_prev (GstAviDemux * avi, GstAviStream * stream,
+ guint last, gboolean keyframe)
+{
+ GstAviIndexEntry *entry;
+ guint i;
+
+ for (i = last; i > 0; i--) {
+ entry = &stream->index[i - 1];
+ if (!keyframe || ENTRY_IS_KEYFRAME (entry)) {
+ return i - 1;
+ }
+ }
+ return 0;
+}
+
+static guint
+gst_avi_demux_index_next (GstAviDemux * avi, GstAviStream * stream,
+ guint last, gboolean keyframe)
+{
+ GstAviIndexEntry *entry;
+ gint i;
+
+ for (i = last + 1; i < stream->idx_n; i++) {
+ entry = &stream->index[i];
+ if (!keyframe || ENTRY_IS_KEYFRAME (entry)) {
+ return i;
+ }
+ }
+ return stream->idx_n - 1;
+}
+
+static guint
+gst_avi_demux_index_entry_search (GstAviIndexEntry * entry, guint64 * total)
+{
+ if (entry->total < *total)
+ return -1;
+ else if (entry->total > *total)
+ return 1;
+ return 0;
+}
+
+/*
+ * gst_avi_demux_index_for_time:
+ * @avi: Avi object
+ * @stream: the stream
+ * @time: a time position
+ * @next: whether to look for entry before or after @time
+ *
+ * Finds the index entry which time is less/more or equal than the requested time.
+ * Try to avoid binary search when we can convert the time to an index
+ * position directly (for example for video frames with a fixed duration).
+ *
+ * Returns: the found position in the index.
+ */
+static guint
+gst_avi_demux_index_for_time (GstAviDemux * avi,
+ GstAviStream * stream, guint64 time, gboolean next)
+{
+ guint index = -1;
+ guint64 total;
+
+ GST_LOG_OBJECT (avi, "search time:%" GST_TIME_FORMAT, GST_TIME_ARGS (time));
+
+ /* easy (and common) cases */
+ if (time == 0 || stream->idx_n == 0)
+ return 0;
+ if (time >= stream->idx_duration)
+ return stream->idx_n - 1;
+
+ /* figure out where we need to go. For that we convert the time to an
+ * index entry or we convert it to a total and then do a binary search. */
+ if (stream->is_vbr) {
+ /* VBR stream next timestamp */
+ if (stream->strh->type == GST_RIFF_FCC_auds) {
+ total = avi_stream_convert_time_to_frames_unchecked (stream, time);
+ } else {
+ index = avi_stream_convert_time_to_frames_unchecked (stream, time);
+ /* this entry typically undershoots the target time,
+ * so check a bit more if next needed */
+ if (next && index != -1) {
+ GstClockTime itime =
+ avi_stream_convert_frames_to_time_unchecked (stream, index);
+ if (itime < time && index + 1 < stream->idx_n)
+ index++;
+ }
+ }
+ } else if (stream->strh->type == GST_RIFF_FCC_auds) {
+ /* constant rate stream */
+ total = avi_stream_convert_time_to_bytes_unchecked (stream, time);
+ } else
+ return -1;
+
+ if (index == -1) {
+ GstAviIndexEntry *entry;
+
+ /* no index, find index with binary search on total */
+ GST_LOG_OBJECT (avi, "binary search for entry with total %"
+ G_GUINT64_FORMAT, total);
+
+ entry = gst_util_array_binary_search (stream->index,
+ stream->idx_n, sizeof (GstAviIndexEntry),
+ (GCompareDataFunc) gst_avi_demux_index_entry_search,
+ next ? GST_SEARCH_MODE_AFTER : GST_SEARCH_MODE_BEFORE, &total, NULL);
+
+ if (entry == NULL) {
+ GST_LOG_OBJECT (avi, "not found, assume index 0");
+ index = 0;
+ } else {
+ index = entry - stream->index;
+ GST_LOG_OBJECT (avi, "found at %u", index);
+ }
+ } else {
+ GST_LOG_OBJECT (avi, "converted time to index %u", index);
+ }
+
+ return index;
+}
+
+static inline GstAviStream *
+gst_avi_demux_stream_for_id (GstAviDemux * avi, guint32 id)
+{
+ guint stream_nr;
+ GstAviStream *stream;
+
+ /* get the stream for this entry */
+ stream_nr = CHUNKID_TO_STREAMNR (id);
+ if (G_UNLIKELY (stream_nr >= avi->num_streams)) {
+ GST_WARNING_OBJECT (avi,
+ "invalid stream nr %d (0x%08x, %" GST_FOURCC_FORMAT ")", stream_nr, id,
+ GST_FOURCC_ARGS (id));
+ return NULL;
+ }
+ stream = &avi->stream[stream_nr];
+ if (G_UNLIKELY (!stream->strh)) {
+ GST_WARNING_OBJECT (avi, "Unhandled stream %d, skipping", stream_nr);
+ return NULL;
+ }
+ return stream;
+}
+
+/*
+ * gst_avi_demux_parse_index:
+ * @avi: calling element (used for debugging/errors).
+ * @buf: buffer containing the full index.
+ *
+ * Read index entries from the provided buffer.
+ * The buffer should contain a GST_RIFF_TAG_idx1 chunk.
+ */
+static gboolean
+gst_avi_demux_parse_index (GstAviDemux * avi, GstBuffer * buf)
+{
+ GstMapInfo map;
+ guint i, num, n;
+ gst_riff_index_entry *index;
+ GstClockTime stamp;
+ GstAviStream *stream;
+ GstAviIndexEntry entry;
+ guint32 id;
+
+ if (!buf)
+ return FALSE;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ stamp = gst_util_get_timestamp ();
+
+ /* see how many items in the index */
+ num = map.size / sizeof (gst_riff_index_entry);
+ if (num == 0)
+ goto empty_list;
+
+ GST_INFO_OBJECT (avi, "Parsing index, nr_entries = %6d", num);
+
+ index = (gst_riff_index_entry *) map.data;
+
+ /* figure out if the index is 0 based or relative to the MOVI start */
+ entry.offset = GST_READ_UINT32_LE (&index[0].offset);
+ if (entry.offset < avi->offset) {
+ avi->index_offset = avi->offset + 8;
+ GST_DEBUG ("index_offset = %" G_GUINT64_FORMAT, avi->index_offset);
+ } else {
+ avi->index_offset = 0;
+ GST_DEBUG ("index is 0 based");
+ }
+
+ for (i = 0, n = 0; i < num; i++) {
+ id = GST_READ_UINT32_LE (&index[i].id);
+ entry.offset = GST_READ_UINT32_LE (&index[i].offset);
+
+ /* some sanity checks */
+ if (G_UNLIKELY (id == GST_RIFF_rec || id == 0 ||
+ (entry.offset == 0 && n > 0)))
+ continue;
+
+ /* get the stream for this entry */
+ stream = gst_avi_demux_stream_for_id (avi, id);
+ if (G_UNLIKELY (!stream))
+ continue;
+
+ /* handle offset and size */
+ entry.offset += avi->index_offset + 8;
+ entry.size = GST_READ_UINT32_LE (&index[i].size);
+
+ /* handle flags */
+ if (stream->strh->type == GST_RIFF_FCC_auds) {
+ /* all audio frames are keyframes */
+ ENTRY_SET_KEYFRAME (&entry);
+ } else if (stream->strh->type == GST_RIFF_FCC_vids &&
+ stream->strf.vids->compression == GST_RIFF_DXSB) {
+ /* all xsub frames are keyframes */
+ ENTRY_SET_KEYFRAME (&entry);
+ } else {
+ guint32 flags;
+ /* else read flags */
+ flags = GST_READ_UINT32_LE (&index[i].flags);
+ if (flags & GST_RIFF_IF_KEYFRAME) {
+ ENTRY_SET_KEYFRAME (&entry);
+ } else {
+ ENTRY_UNSET_KEYFRAME (&entry);
+ }
+ }
+
+ /* and add */
+ if (G_UNLIKELY (!gst_avi_demux_add_index (avi, stream, num, &entry)))
+ goto out_of_mem;
+
+ n++;
+ }
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ /* get stream stats now */
+ avi->have_index = gst_avi_demux_do_index_stats (avi);
+
+ stamp = gst_util_get_timestamp () - stamp;
+ GST_DEBUG_OBJECT (avi, "index parsing took %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stamp));
+
+ return TRUE;
+
+ /* ERRORS */
+empty_list:
+ {
+ GST_DEBUG_OBJECT (avi, "empty index");
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+out_of_mem:
+ {
+ GST_ELEMENT_ERROR (avi, RESOURCE, NO_SPACE_LEFT, (NULL),
+ ("Cannot allocate memory for %u*%u=%u bytes",
+ (guint) sizeof (GstAviIndexEntry), num,
+ (guint) sizeof (GstAviIndexEntry) * num));
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+}
+
+/*
+ * gst_avi_demux_stream_index:
+ * @avi: avi demuxer object.
+ *
+ * Seeks to index and reads it.
+ */
+static void
+gst_avi_demux_stream_index (GstAviDemux * avi)
+{
+ GstFlowReturn res;
+ guint64 offset = avi->offset;
+ GstBuffer *buf = NULL;
+ guint32 tag;
+ guint32 size;
+ GstMapInfo map;
+
+ GST_DEBUG ("demux stream index at offset %" G_GUINT64_FORMAT, offset);
+
+ /* get chunk information */
+ res = gst_pad_pull_range (avi->sinkpad, offset, 8, &buf);
+ if (res != GST_FLOW_OK)
+ goto pull_failed;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (map.size < 8)
+ goto too_small;
+
+ /* check tag first before blindly trying to read 'size' bytes */
+ tag = GST_READ_UINT32_LE (map.data);
+ size = GST_READ_UINT32_LE (map.data + 4);
+ if (tag == GST_RIFF_TAG_LIST) {
+ /* this is the movi tag */
+ GST_DEBUG_OBJECT (avi, "skip LIST chunk, size %" G_GUINT32_FORMAT,
+ (8 + GST_ROUND_UP_2 (size)));
+ offset += 8 + GST_ROUND_UP_2 (size);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ buf = NULL;
+ res = gst_pad_pull_range (avi->sinkpad, offset, 8, &buf);
+ if (res != GST_FLOW_OK)
+ goto pull_failed;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (map.size < 8)
+ goto too_small;
+
+ tag = GST_READ_UINT32_LE (map.data);
+ size = GST_READ_UINT32_LE (map.data + 4);
+ }
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ if (tag != GST_RIFF_TAG_idx1)
+ goto no_index;
+ if (!size)
+ goto zero_index;
+
+ GST_DEBUG ("index found at offset %" G_GUINT64_FORMAT, offset);
+
+ /* read chunk, advance offset */
+ if (gst_riff_read_chunk (GST_ELEMENT_CAST (avi),
+ avi->sinkpad, &offset, &tag, &buf) != GST_FLOW_OK)
+ return;
+
+ GST_DEBUG ("will parse index chunk size %" G_GSIZE_FORMAT " for tag %"
+ GST_FOURCC_FORMAT, gst_buffer_get_size (buf), GST_FOURCC_ARGS (tag));
+
+ gst_avi_demux_parse_index (avi, buf);
+
+#ifndef GST_DISABLE_GST_DEBUG
+ /* debug our indexes */
+ {
+ gint i;
+ GstAviStream *stream;
+
+ for (i = 0; i < avi->num_streams; i++) {
+ stream = &avi->stream[i];
+ GST_DEBUG_OBJECT (avi, "stream %u: %u frames, %" G_GINT64_FORMAT " bytes",
+ i, stream->idx_n, stream->total_bytes);
+ }
+ }
+#endif
+ return;
+
+ /* ERRORS */
+pull_failed:
+ {
+ GST_DEBUG_OBJECT (avi,
+ "pull range failed: pos=%" G_GUINT64_FORMAT " size=8", offset);
+ return;
+ }
+too_small:
+ {
+ GST_DEBUG_OBJECT (avi, "Buffer is too small");
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ return;
+ }
+no_index:
+ {
+ GST_WARNING_OBJECT (avi,
+ "No index data (idx1) after movi chunk, but %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ return;
+ }
+zero_index:
+ {
+ GST_WARNING_OBJECT (avi, "Empty index data (idx1) after movi chunk");
+ return;
+ }
+}
+
+/*
+ * gst_avi_demux_stream_index_push:
+ * @avi: avi demuxer object.
+ *
+ * Read index.
+ */
+static void
+gst_avi_demux_stream_index_push (GstAviDemux * avi)
+{
+ guint64 offset = avi->idx1_offset;
+ GstBuffer *buf;
+ guint32 tag;
+ guint32 size;
+
+ GST_DEBUG ("demux stream index at offset %" G_GUINT64_FORMAT, offset);
+
+ /* get chunk information */
+ if (!gst_avi_demux_peek_chunk (avi, &tag, &size))
+ return;
+
+ /* check tag first before blindly trying to read 'size' bytes */
+ if (tag == GST_RIFF_TAG_LIST) {
+ /* this is the movi tag */
+ GST_DEBUG_OBJECT (avi, "skip LIST chunk, size %" G_GUINT32_FORMAT,
+ (8 + GST_ROUND_UP_2 (size)));
+ avi->idx1_offset = offset + 8 + GST_ROUND_UP_2 (size);
+ /* issue seek to allow chain function to handle it and return! */
+ perform_seek_to_offset (avi, avi->idx1_offset, avi->segment_seqnum);
+ return;
+ }
+
+ if (tag != GST_RIFF_TAG_idx1)
+ goto no_index;
+
+ GST_DEBUG ("index found at offset %" G_GUINT64_FORMAT, offset);
+
+ /* flush chunk header */
+ gst_adapter_flush (avi->adapter, 8);
+ /* read chunk payload */
+ buf = gst_adapter_take_buffer (avi->adapter, size);
+ if (!buf)
+ goto pull_failed;
+ /* advance offset */
+ offset += 8 + GST_ROUND_UP_2 (size);
+
+ GST_DEBUG ("will parse index chunk size %" G_GSIZE_FORMAT " for tag %"
+ GST_FOURCC_FORMAT, gst_buffer_get_size (buf), GST_FOURCC_ARGS (tag));
+
+ avi->offset = avi->first_movi_offset;
+ gst_avi_demux_parse_index (avi, buf);
+
+#ifndef GST_DISABLE_GST_DEBUG
+ /* debug our indexes */
+ {
+ gint i;
+ GstAviStream *stream;
+
+ for (i = 0; i < avi->num_streams; i++) {
+ stream = &avi->stream[i];
+ GST_DEBUG_OBJECT (avi, "stream %u: %u frames, %" G_GINT64_FORMAT " bytes",
+ i, stream->idx_n, stream->total_bytes);
+ }
+ }
+#endif
+ return;
+
+ /* ERRORS */
+pull_failed:
+ {
+ GST_DEBUG_OBJECT (avi,
+ "taking data from adapter failed: pos=%" G_GUINT64_FORMAT " size=%u",
+ offset, size);
+ return;
+ }
+no_index:
+ {
+ GST_WARNING_OBJECT (avi,
+ "No index data (idx1) after movi chunk, but %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ return;
+ }
+}
+
+/*
+ * gst_avi_demux_peek_tag:
+ *
+ * Returns the tag and size of the next chunk
+ */
+static GstFlowReturn
+gst_avi_demux_peek_tag (GstAviDemux * avi, guint64 offset, guint32 * tag,
+ guint * size)
+{
+ GstFlowReturn res;
+ GstBuffer *buf = NULL;
+ GstMapInfo map;
+
+ res = gst_pad_pull_range (avi->sinkpad, offset, 8, &buf);
+ if (res != GST_FLOW_OK)
+ goto pull_failed;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (map.size != 8)
+ goto wrong_size;
+
+ *tag = GST_READ_UINT32_LE (map.data);
+ *size = GST_READ_UINT32_LE (map.data + 4);
+
+ GST_LOG_OBJECT (avi, "Tag[%" GST_FOURCC_FORMAT "] (size:%d) %"
+ G_GINT64_FORMAT " -- %" G_GINT64_FORMAT, GST_FOURCC_ARGS (*tag),
+ *size, offset + 8, offset + 8 + (gint64) * size);
+
+done:
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ return res;
+
+ /* ERRORS */
+pull_failed:
+ {
+ GST_DEBUG_OBJECT (avi, "pull_ranged returned %s", gst_flow_get_name (res));
+ return res;
+ }
+wrong_size:
+ {
+ GST_DEBUG_OBJECT (avi, "got %" G_GSIZE_FORMAT " bytes which is <> 8 bytes",
+ map.size);
+ res = GST_FLOW_ERROR;
+ goto done;
+ }
+}
+
+/*
+ * gst_avi_demux_next_data_buffer:
+ *
+ * Returns the offset and size of the next buffer
+ * Position is the position of the buffer (after tag and size)
+ */
+static GstFlowReturn
+gst_avi_demux_next_data_buffer (GstAviDemux * avi, guint64 * offset,
+ guint32 * tag, guint * size)
+{
+ guint64 off = *offset;
+ guint _size = 0;
+ GstFlowReturn res;
+
+ do {
+ res = gst_avi_demux_peek_tag (avi, off, tag, &_size);
+ if (res != GST_FLOW_OK)
+ break;
+ if (*tag == GST_RIFF_TAG_LIST || *tag == GST_RIFF_TAG_RIFF)
+ off += 8 + 4; /* skip tag + size + subtag */
+ else {
+ *offset = off + 8;
+ *size = _size;
+ break;
+ }
+ } while (TRUE);
+
+ return res;
+}
+
+/*
+ * gst_avi_demux_stream_scan:
+ * @avi: calling element (used for debugging/errors).
+ *
+ * Scan the file for all chunks to "create" a new index.
+ * pull-range based
+ */
+static gboolean
+gst_avi_demux_stream_scan (GstAviDemux * avi)
+{
+ GstFlowReturn res;
+ GstAviStream *stream;
+ guint64 pos = 0;
+ guint64 length;
+ gint64 tmplength;
+ guint32 tag = 0;
+ guint num;
+
+ /* FIXME:
+ * - implement non-seekable source support.
+ */
+ GST_DEBUG_OBJECT (avi, "Creating index");
+
+ /* get the size of the file */
+ if (!gst_pad_peer_query_duration (avi->sinkpad, GST_FORMAT_BYTES, &tmplength))
+ return FALSE;
+ length = tmplength;
+
+ /* guess the total amount of entries we expect */
+ num = 16000;
+
+ while (TRUE) {
+ GstAviIndexEntry entry;
+ guint size = 0;
+
+ /* start reading data buffers to find the id and offset */
+ res = gst_avi_demux_next_data_buffer (avi, &pos, &tag, &size);
+ if (G_UNLIKELY (res != GST_FLOW_OK))
+ break;
+
+ /* get stream */
+ stream = gst_avi_demux_stream_for_id (avi, tag);
+ if (G_UNLIKELY (!stream))
+ goto next;
+
+ /* we can't figure out the keyframes, assume they all are */
+ entry.flags = GST_AVI_KEYFRAME;
+ entry.offset = pos;
+ entry.size = size;
+
+ /* and add to the index of this stream */
+ if (G_UNLIKELY (!gst_avi_demux_add_index (avi, stream, num, &entry)))
+ goto out_of_mem;
+
+ next:
+ /* update position */
+ pos += GST_ROUND_UP_2 (size);
+ if (G_UNLIKELY (pos > length)) {
+ GST_WARNING_OBJECT (avi,
+ "Stopping index lookup since we are further than EOF");
+ break;
+ }
+ }
+
+ /* collect stats */
+ avi->have_index = gst_avi_demux_do_index_stats (avi);
+
+ return TRUE;
+
+ /* ERRORS */
+out_of_mem:
+ {
+ GST_ELEMENT_ERROR (avi, RESOURCE, NO_SPACE_LEFT, (NULL),
+ ("Cannot allocate memory for %u*%u=%u bytes",
+ (guint) sizeof (GstAviIndexEntry), num,
+ (guint) sizeof (GstAviIndexEntry) * num));
+ return FALSE;
+ }
+}
+
+static void
+gst_avi_demux_calculate_durations_from_index (GstAviDemux * avi)
+{
+ guint i;
+ GstClockTime total;
+ GstAviStream *stream;
+
+ total = GST_CLOCK_TIME_NONE;
+
+ /* all streams start at a timestamp 0 */
+ for (i = 0; i < avi->num_streams; i++) {
+ GstClockTime duration, hduration;
+ gst_riff_strh *strh;
+
+ stream = &avi->stream[i];
+ if (G_UNLIKELY (!stream || !stream->idx_n || !(strh = stream->strh)))
+ continue;
+
+ /* get header duration for the stream */
+ hduration = stream->hdr_duration;
+ /* index duration calculated during parsing */
+ duration = stream->idx_duration;
+
+ /* now pick a good duration */
+ if (GST_CLOCK_TIME_IS_VALID (duration)) {
+ /* index gave valid duration, use that */
+ GST_INFO ("Stream %p duration according to index: %" GST_TIME_FORMAT,
+ stream, GST_TIME_ARGS (duration));
+ } else {
+ /* fall back to header info to calculate a duration */
+ duration = hduration;
+ }
+ GST_INFO ("Setting duration of stream #%d to %" GST_TIME_FORMAT,
+ i, GST_TIME_ARGS (duration));
+ /* set duration for the stream */
+ stream->duration = duration;
+
+ /* find total duration */
+ if (total == GST_CLOCK_TIME_NONE ||
+ (GST_CLOCK_TIME_IS_VALID (duration) && duration > total))
+ total = duration;
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (total) && (total > 0)) {
+ /* now update the duration for those streams where we had none */
+ for (i = 0; i < avi->num_streams; i++) {
+ stream = &avi->stream[i];
+
+ if (!GST_CLOCK_TIME_IS_VALID (stream->duration)
+ || stream->duration == 0) {
+ stream->duration = total;
+
+ GST_INFO ("Stream %p duration according to total: %" GST_TIME_FORMAT,
+ stream, GST_TIME_ARGS (total));
+ }
+ }
+ }
+
+ /* and set the total duration in the segment. */
+ GST_INFO ("Setting total duration to: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (total));
+
+ avi->segment.duration = total;
+}
+
+/* returns FALSE if there are no pads to deliver event to,
+ * otherwise TRUE (whatever the outcome of event sending),
+ * takes ownership of the event. */
+static gboolean
+gst_avi_demux_push_event (GstAviDemux * avi, GstEvent * event)
+{
+ gboolean result = FALSE;
+ gint i;
+
+ GST_DEBUG_OBJECT (avi, "sending %s event to %d streams",
+ GST_EVENT_TYPE_NAME (event), avi->num_streams);
+
+ for (i = 0; i < avi->num_streams; i++) {
+ GstAviStream *stream = &avi->stream[i];
+
+ if (stream->pad) {
+ result = TRUE;
+ gst_pad_push_event (stream->pad, gst_event_ref (event));
+ }
+ }
+ gst_event_unref (event);
+ return result;
+}
+
+static void
+gst_avi_demux_check_seekability (GstAviDemux * avi)
+{
+ GstQuery *query;
+ gboolean seekable = FALSE;
+ gint64 start = -1, stop = -1;
+
+ query = gst_query_new_seeking (GST_FORMAT_BYTES);
+ if (!gst_pad_peer_query (avi->sinkpad, query)) {
+ GST_DEBUG_OBJECT (avi, "seeking query failed");
+ goto done;
+ }
+
+ gst_query_parse_seeking (query, NULL, &seekable, &start, &stop);
+
+ /* try harder to query upstream size if we didn't get it the first time */
+ if (seekable && stop == -1) {
+ GST_DEBUG_OBJECT (avi, "doing duration query to fix up unset stop");
+ gst_pad_peer_query_duration (avi->sinkpad, GST_FORMAT_BYTES, &stop);
+ }
+
+ /* if upstream doesn't know the size, it's likely that it's not seekable in
+ * practice even if it technically may be seekable */
+ if (seekable && (start != 0 || stop <= start)) {
+ GST_DEBUG_OBJECT (avi, "seekable but unknown start/stop -> disable");
+ seekable = FALSE;
+ }
+
+done:
+ GST_INFO_OBJECT (avi, "seekable: %d (%" G_GUINT64_FORMAT " - %"
+ G_GUINT64_FORMAT ")", seekable, start, stop);
+ avi->seekable = seekable;
+
+ gst_query_unref (query);
+}
+
+/*
+ * Read AVI headers when streaming
+ */
+static GstFlowReturn
+gst_avi_demux_stream_header_push (GstAviDemux * avi)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint32 tag = 0;
+ guint32 ltag = 0;
+ guint32 size = 0;
+ const guint8 *data;
+ GstBuffer *buf = NULL, *sub = NULL;
+ guint offset = 4;
+ gint i;
+ GstTagList *tags = NULL;
+ guint8 fourcc[4];
+
+ GST_DEBUG ("Reading and parsing avi headers: %d", avi->header_state);
+
+ switch (avi->header_state) {
+ case GST_AVI_DEMUX_HEADER_TAG_LIST:
+ again:
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size)) {
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ if (tag != GST_RIFF_TAG_LIST)
+ goto header_no_list;
+
+ gst_adapter_flush (avi->adapter, 8);
+ /* Find the 'hdrl' LIST tag */
+ GST_DEBUG ("Reading %d bytes", size);
+ buf = gst_adapter_take_buffer (avi->adapter, size);
+
+ gst_buffer_extract (buf, 0, fourcc, 4);
+
+ if (GST_READ_UINT32_LE (fourcc) != GST_RIFF_LIST_hdrl) {
+ GST_WARNING_OBJECT (avi, "Invalid AVI header (no hdrl at start): %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag));
+ gst_buffer_unref (buf);
+ goto again;
+ }
+
+ /* mind padding */
+ if (size & 1)
+ gst_adapter_flush (avi->adapter, 1);
+
+ GST_DEBUG ("'hdrl' LIST tag found. Parsing next chunk");
+
+ gst_avi_demux_roundup_list (avi, &buf);
+
+ /* the hdrl starts with a 'avih' header */
+ if (!gst_riff_parse_chunk (GST_ELEMENT_CAST (avi), buf, &offset, &tag,
+ &sub))
+ goto header_no_avih;
+
+ if (tag != GST_RIFF_TAG_avih)
+ goto header_no_avih;
+
+ if (!gst_avi_demux_parse_avih (avi, sub, &avi->avih))
+ goto header_wrong_avih;
+
+ GST_DEBUG_OBJECT (avi, "AVI header ok, reading elements from header");
+
+ /* now, read the elements from the header until the end */
+ while (gst_riff_parse_chunk (GST_ELEMENT_CAST (avi), buf, &offset, &tag,
+ &sub)) {
+ /* sub can be NULL on empty tags */
+ if (!sub)
+ continue;
+
+ switch (tag) {
+ case GST_RIFF_TAG_LIST:
+ if (gst_buffer_get_size (sub) < 4)
+ goto next;
+
+ gst_buffer_extract (sub, 0, fourcc, 4);
+
+ switch (GST_READ_UINT32_LE (fourcc)) {
+ case GST_RIFF_LIST_strl:
+ if (!(gst_avi_demux_parse_stream (avi, sub))) {
+ sub = NULL;
+ GST_ELEMENT_WARNING (avi, STREAM, DEMUX, (NULL),
+ ("failed to parse stream, ignoring"));
+ goto next;
+ }
+ sub = NULL;
+ goto next;
+ case GST_RIFF_LIST_odml:
+ gst_avi_demux_parse_odml (avi, sub);
+ sub = NULL;
+ break;
+ default:
+ GST_WARNING_OBJECT (avi,
+ "Unknown list %" GST_FOURCC_FORMAT " in AVI header",
+ GST_FOURCC_ARGS (GST_READ_UINT32_LE (fourcc)));
+ /* fall-through */
+ case GST_RIFF_TAG_JUNQ:
+ case GST_RIFF_TAG_JUNK:
+ goto next;
+ }
+ break;
+ case GST_RIFF_IDIT:
+ gst_avi_demux_parse_idit (avi, sub);
+ goto next;
+ default:
+ GST_WARNING_OBJECT (avi,
+ "Unknown tag %" GST_FOURCC_FORMAT " in AVI header",
+ GST_FOURCC_ARGS (tag));
+ /* Only get buffer for debugging if the memdump is needed */
+ if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= 9) {
+ GstMapInfo map;
+
+ gst_buffer_map (sub, &map, GST_MAP_READ);
+ GST_MEMDUMP_OBJECT (avi, "Unknown tag", map.data, map.size);
+ gst_buffer_unmap (sub, &map);
+ }
+ /* fall-through */
+ case GST_RIFF_TAG_JUNQ:
+ case GST_RIFF_TAG_JUNK:
+ next:
+ /* move to next chunk */
+ if (sub)
+ gst_buffer_unref (sub);
+ sub = NULL;
+ break;
+ }
+ }
+ gst_buffer_unref (buf);
+ GST_DEBUG ("elements parsed");
+
+ /* check parsed streams */
+ if (avi->num_streams == 0) {
+ goto no_streams;
+ } else if (avi->num_streams != avi->avih->streams) {
+ GST_WARNING_OBJECT (avi,
+ "Stream header mentioned %d streams, but %d available",
+ avi->avih->streams, avi->num_streams);
+ }
+ GST_DEBUG ("Get junk and info next");
+ avi->header_state = GST_AVI_DEMUX_HEADER_INFO;
+ } else {
+ /* Need more data */
+ return ret;
+ }
+ /* fall-though */
+ case GST_AVI_DEMUX_HEADER_INFO:
+ GST_DEBUG_OBJECT (avi, "skipping junk between header and data ...");
+ while (TRUE) {
+ if (gst_adapter_available (avi->adapter) < 12)
+ return GST_FLOW_OK;
+
+ data = gst_adapter_map (avi->adapter, 12);
+ tag = GST_READ_UINT32_LE (data);
+ size = GST_READ_UINT32_LE (data + 4);
+ ltag = GST_READ_UINT32_LE (data + 8);
+ gst_adapter_unmap (avi->adapter);
+
+ if (tag == GST_RIFF_TAG_LIST) {
+ switch (ltag) {
+ case GST_RIFF_LIST_movi:
+ gst_adapter_flush (avi->adapter, 12);
+ if (!avi->first_movi_offset)
+ avi->first_movi_offset = avi->offset;
+ avi->offset += 12;
+ avi->idx1_offset = avi->offset + size - 4;
+ goto skipping_done;
+ case GST_RIFF_LIST_INFO:
+ GST_DEBUG ("Found INFO chunk");
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size)) {
+ GST_DEBUG ("got size %d", size);
+ avi->offset += 12;
+ gst_adapter_flush (avi->adapter, 12);
+ if (size > 4) {
+ buf = gst_adapter_take_buffer (avi->adapter, size - 4);
+ /* mind padding */
+ if (size & 1)
+ gst_adapter_flush (avi->adapter, 1);
+ gst_riff_parse_info (GST_ELEMENT_CAST (avi), buf, &tags);
+ if (tags) {
+ if (avi->globaltags) {
+ gst_tag_list_insert (avi->globaltags, tags,
+ GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (tags);
+ } else {
+ avi->globaltags = tags;
+ }
+ }
+ tags = NULL;
+ gst_buffer_unref (buf);
+
+ avi->offset += GST_ROUND_UP_2 (size) - 4;
+ } else {
+ GST_DEBUG ("skipping INFO LIST prefix");
+ }
+ } else {
+ /* Need more data */
+ return GST_FLOW_OK;
+ }
+ break;
+ default:
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size) || size == 0) {
+ /* accept 0 size buffer here */
+ avi->abort_buffering = FALSE;
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ } else {
+ /* Need more data */
+ return GST_FLOW_OK;
+ }
+ break;
+ }
+ } else {
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size) || size == 0) {
+ /* accept 0 size buffer here */
+ avi->abort_buffering = FALSE;
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ } else {
+ /* Need more data */
+ return GST_FLOW_OK;
+ }
+ }
+ }
+ break;
+ default:
+ GST_WARNING ("unhandled header state: %d", avi->header_state);
+ break;
+ }
+skipping_done:
+
+ GST_DEBUG_OBJECT (avi, "skipping done ... (streams=%u, stream[0].indexes=%p)",
+ avi->num_streams, avi->stream[0].indexes);
+
+ GST_DEBUG ("Found movi chunk. Starting to stream data");
+ avi->state = GST_AVI_DEMUX_MOVI;
+
+ /* no indexes in push mode, but it still sets some variables */
+ gst_avi_demux_calculate_durations_from_index (avi);
+
+ gst_avi_demux_expose_streams (avi, TRUE);
+
+ /* prepare all streams for index 0 */
+ for (i = 0; i < avi->num_streams; i++)
+ avi->stream[i].current_entry = 0;
+
+ /* create initial NEWSEGMENT event */
+ if (avi->seg_event)
+ gst_event_unref (avi->seg_event);
+ avi->seg_event = gst_event_new_segment (&avi->segment);
+ if (avi->segment_seqnum)
+ gst_event_set_seqnum (avi->seg_event, avi->segment_seqnum);
+
+ gst_avi_demux_check_seekability (avi);
+
+ /* at this point we know all the streams and we can signal the no more
+ * pads signal */
+ GST_DEBUG_OBJECT (avi, "signaling no more pads");
+ gst_element_no_more_pads (GST_ELEMENT_CAST (avi));
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+no_streams:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL), ("No streams found"));
+ return GST_FLOW_ERROR;
+ }
+header_no_list:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Invalid AVI header (no LIST at start): %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag)));
+ return GST_FLOW_ERROR;
+ }
+header_no_avih:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Invalid AVI header (no avih at start): %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag)));
+ if (sub)
+ gst_buffer_unref (sub);
+
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+header_wrong_avih:
+ {
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+}
+
+static void
+gst_avi_demux_add_date_tag (GstAviDemux * avi, gint y, gint m, gint d,
+ gint h, gint min, gint s)
+{
+ GDate *date;
+ GstDateTime *dt;
+
+ date = g_date_new_dmy (d, m, y);
+ if (!g_date_valid (date)) {
+ /* bogus date */
+ GST_WARNING_OBJECT (avi, "Refusing to add invalid date %d-%d-%d", y, m, d);
+ g_date_free (date);
+ return;
+ }
+
+ dt = gst_date_time_new_local_time (y, m, d, h, min, s);
+
+ if (avi->globaltags == NULL)
+ avi->globaltags = gst_tag_list_new_empty ();
+
+ gst_tag_list_add (avi->globaltags, GST_TAG_MERGE_REPLACE, GST_TAG_DATE, date,
+ NULL);
+ g_date_free (date);
+ if (dt) {
+ gst_tag_list_add (avi->globaltags, GST_TAG_MERGE_REPLACE, GST_TAG_DATE_TIME,
+ dt, NULL);
+ gst_date_time_unref (dt);
+ }
+}
+
+static void
+gst_avi_demux_parse_idit_nums_only (GstAviDemux * avi, gchar * data)
+{
+ gint y, m, d;
+ gint hr = 0, min = 0, sec = 0;
+ gint ret;
+
+ GST_DEBUG ("data : '%s'", data);
+
+ ret = sscanf (data, "%d:%d:%d %d:%d:%d", &y, &m, &d, &hr, &min, &sec);
+ if (ret < 3) {
+ /* Attempt YYYY/MM/DD/ HH:MM variant (found in CASIO cameras) */
+ ret = sscanf (data, "%04d/%02d/%02d/ %d:%d", &y, &m, &d, &hr, &min);
+ if (ret < 3) {
+ GST_WARNING_OBJECT (avi, "Failed to parse IDIT tag");
+ return;
+ }
+ }
+ gst_avi_demux_add_date_tag (avi, y, m, d, hr, min, sec);
+}
+
+static gint
+get_month_num (gchar * data, guint size)
+{
+ if (g_ascii_strncasecmp (data, "jan", 3) == 0) {
+ return 1;
+ } else if (g_ascii_strncasecmp (data, "feb", 3) == 0) {
+ return 2;
+ } else if (g_ascii_strncasecmp (data, "mar", 3) == 0) {
+ return 3;
+ } else if (g_ascii_strncasecmp (data, "apr", 3) == 0) {
+ return 4;
+ } else if (g_ascii_strncasecmp (data, "may", 3) == 0) {
+ return 5;
+ } else if (g_ascii_strncasecmp (data, "jun", 3) == 0) {
+ return 6;
+ } else if (g_ascii_strncasecmp (data, "jul", 3) == 0) {
+ return 7;
+ } else if (g_ascii_strncasecmp (data, "aug", 3) == 0) {
+ return 8;
+ } else if (g_ascii_strncasecmp (data, "sep", 3) == 0) {
+ return 9;
+ } else if (g_ascii_strncasecmp (data, "oct", 3) == 0) {
+ return 10;
+ } else if (g_ascii_strncasecmp (data, "nov", 3) == 0) {
+ return 11;
+ } else if (g_ascii_strncasecmp (data, "dec", 3) == 0) {
+ return 12;
+ }
+
+ return 0;
+}
+
+static void
+gst_avi_demux_parse_idit_text (GstAviDemux * avi, gchar * data)
+{
+ gint year, month, day;
+ gint hour, min, sec;
+ gint ret;
+ gchar weekday[4];
+ gchar monthstr[4];
+
+ ret = sscanf (data, "%3s %3s %d %d:%d:%d %d", weekday, monthstr, &day, &hour,
+ &min, &sec, &year);
+ if (ret != 7) {
+ GST_WARNING_OBJECT (avi, "Failed to parse IDIT tag");
+ return;
+ }
+ month = get_month_num (monthstr, strlen (monthstr));
+ gst_avi_demux_add_date_tag (avi, year, month, day, hour, min, sec);
+}
+
+static void
+gst_avi_demux_parse_idit (GstAviDemux * avi, GstBuffer * buf)
+{
+ GstMapInfo map;
+ gchar *ptr;
+ gsize left;
+ gchar *safedata = NULL;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ /*
+ * According to:
+ * http://www.eden-foundation.org/products/code/film_date_stamp/index.html
+ *
+ * This tag could be in one of the below formats
+ * 2005:08:17 11:42:43
+ * THU OCT 26 16:46:04 2006
+ * Mon Mar 3 09:44:56 2008
+ *
+ * FIXME: Our date tag doesn't include hours
+ */
+
+ /* skip eventual initial whitespace */
+ ptr = (gchar *) map.data;
+ left = map.size;
+
+ while (left > 0 && g_ascii_isspace (ptr[0])) {
+ ptr++;
+ left--;
+ }
+
+ if (left == 0) {
+ goto non_parsable;
+ }
+
+ /* make a safe copy to add a \0 to the end of the string */
+ safedata = g_strndup (ptr, left);
+
+ /* test if the first char is a alpha or a number */
+ if (g_ascii_isdigit (ptr[0])) {
+ gst_avi_demux_parse_idit_nums_only (avi, safedata);
+ g_free (safedata);
+ gst_buffer_unmap (buf, &map);
+ return;
+ } else if (g_ascii_isalpha (ptr[0])) {
+ gst_avi_demux_parse_idit_text (avi, safedata);
+ g_free (safedata);
+ gst_buffer_unmap (buf, &map);
+ return;
+ }
+
+ g_free (safedata);
+
+non_parsable:
+ GST_WARNING_OBJECT (avi, "IDIT tag has no parsable info");
+ gst_buffer_unmap (buf, &map);
+}
+
+static void
+parse_tag_value (GstAviDemux * avi, GstTagList * taglist, const gchar * type,
+ guint8 * ptr, guint tsize)
+{
+ static const gchar *env_vars[] = { "GST_AVI_TAG_ENCODING",
+ "GST_RIFF_TAG_ENCODING", "GST_TAG_ENCODING", NULL
+ };
+ GType tag_type;
+ gchar *val;
+
+ tag_type = gst_tag_get_type (type);
+ val = gst_tag_freeform_string_to_utf8 ((gchar *) ptr, tsize, env_vars);
+
+ if (val != NULL) {
+ if (tag_type == G_TYPE_STRING) {
+ gst_tag_list_add (taglist, GST_TAG_MERGE_APPEND, type, val, NULL);
+ } else {
+ GValue tag_val = { 0, };
+
+ g_value_init (&tag_val, tag_type);
+ if (gst_value_deserialize (&tag_val, val)) {
+ gst_tag_list_add_value (taglist, GST_TAG_MERGE_APPEND, type, &tag_val);
+ } else {
+ GST_WARNING_OBJECT (avi, "could not deserialize '%s' into a "
+ "tag %s of type %s", val, type, g_type_name (tag_type));
+ }
+ g_value_unset (&tag_val);
+ }
+ g_free (val);
+ } else {
+ GST_WARNING_OBJECT (avi, "could not extract %s tag", type);
+ }
+}
+
+static void
+gst_avi_demux_parse_strd (GstAviDemux * avi, GstBuffer * buf)
+{
+ GstMapInfo map;
+ guint32 tag;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (map.size > 4) {
+ guint8 *ptr = map.data;
+ gsize left = map.size;
+
+ /* parsing based on
+ * http://www.eden-foundation.org/products/code/film_date_stamp/index.html
+ */
+ tag = GST_READ_UINT32_LE (ptr);
+ if ((tag == GST_MAKE_FOURCC ('A', 'V', 'I', 'F')) && (map.size > 98)) {
+ gsize sub_size;
+
+ ptr += 98;
+ left -= 98;
+ if (!memcmp (ptr, "FUJIFILM", 8)) {
+ GST_MEMDUMP_OBJECT (avi, "fujifim tag", ptr, 48);
+
+ ptr += 10;
+ left -= 10;
+ sub_size = 0;
+ while (ptr[sub_size] && sub_size < left)
+ sub_size++;
+
+ if (avi->globaltags == NULL)
+ avi->globaltags = gst_tag_list_new_empty ();
+
+ gst_tag_list_add (avi->globaltags, GST_TAG_MERGE_APPEND,
+ GST_TAG_DEVICE_MANUFACTURER, "FUJIFILM", NULL);
+ parse_tag_value (avi, avi->globaltags, GST_TAG_DEVICE_MODEL, ptr,
+ sub_size);
+
+ while (ptr[sub_size] == '\0' && sub_size < left)
+ sub_size++;
+
+ ptr += sub_size;
+ left -= sub_size;
+ sub_size = 0;
+ while (ptr[sub_size] && sub_size < left)
+ sub_size++;
+ if (ptr[4] == ':')
+ ptr[4] = '-';
+ if (ptr[7] == ':')
+ ptr[7] = '-';
+
+ parse_tag_value (avi, avi->globaltags, GST_TAG_DATE_TIME, ptr,
+ sub_size);
+ }
+ }
+ }
+ gst_buffer_unmap (buf, &map);
+}
+
+/*
+ * gst_avi_demux_parse_ncdt:
+ * @element: caller element (used for debugging/error).
+ * @buf: input data to be used for parsing, stripped from header.
+ * @taglist: a pointer to a taglist (returned by this function)
+ * containing information about this stream. May be
+ * NULL if no supported tags were found.
+ *
+ * Parses Nikon metadata from input data.
+ */
+static void
+gst_avi_demux_parse_ncdt (GstAviDemux * avi, GstBuffer * buf,
+ GstTagList ** _taglist)
+{
+ GstMapInfo info;
+ guint8 *ptr;
+ gsize left;
+ guint tsize;
+ guint32 tag;
+ const gchar *type;
+ GstTagList *taglist;
+
+ g_return_if_fail (_taglist != NULL);
+
+ if (!buf) {
+ *_taglist = NULL;
+ return;
+ }
+ gst_buffer_map (buf, &info, GST_MAP_READ);
+
+ taglist = gst_tag_list_new_empty ();
+
+ ptr = info.data;
+ left = info.size;
+
+ while (left > 8) {
+ tag = GST_READ_UINT32_LE (ptr);
+ tsize = GST_READ_UINT32_LE (ptr + 4);
+
+ GST_MEMDUMP_OBJECT (avi, "tag chunk", ptr, MIN (tsize + 8, left));
+
+ left -= 8;
+ ptr += 8;
+
+ GST_DEBUG_OBJECT (avi, "tag %" GST_FOURCC_FORMAT ", size %u",
+ GST_FOURCC_ARGS (tag), tsize);
+
+ if (tsize > left) {
+ GST_WARNING_OBJECT (avi,
+ "Tagsize %d is larger than available data %" G_GSIZE_FORMAT,
+ tsize, left);
+ tsize = left;
+ }
+
+ /* find out the type of metadata */
+ switch (tag) {
+ case GST_RIFF_LIST_nctg:
+ while (tsize > 4) {
+ guint16 sub_tag = GST_READ_UINT16_LE (ptr);
+ guint16 sub_size = GST_READ_UINT16_LE (ptr + 2);
+
+ tsize -= 4;
+ ptr += 4;
+ left -= 4;
+
+ if (sub_size > tsize)
+ break;
+
+ GST_DEBUG_OBJECT (avi, "sub-tag %u, size %u", sub_tag, sub_size);
+ /* http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/Nikon.html#NCTG
+ * for some reason the sub_tag has a +2 offset
+ */
+ switch (sub_tag) {
+ case 0x03: /* Make */
+ type = GST_TAG_DEVICE_MANUFACTURER;
+ break;
+ case 0x04: /* Model */
+ type = GST_TAG_DEVICE_MODEL;
+ break;
+ /* TODO: 0x05: is software version, like V1.0 */
+ case 0x06: /* Software */
+ type = GST_TAG_ENCODER;
+ break;
+ case 0x13: /* CreationDate */
+ type = GST_TAG_DATE_TIME;
+ if (left > 7) {
+ if (ptr[4] == ':')
+ ptr[4] = '-';
+ if (ptr[7] == ':')
+ ptr[7] = '-';
+ }
+ break;
+ default:
+ type = NULL;
+ break;
+ }
+ if (type != NULL && ptr[0] != '\0') {
+ GST_DEBUG_OBJECT (avi, "mapped tag %u to tag %s", sub_tag, type);
+
+ parse_tag_value (avi, taglist, type, ptr, sub_size);
+ }
+
+ ptr += sub_size;
+ tsize -= sub_size;
+ left -= sub_size;
+ }
+ break;
+ default:
+ type = NULL;
+ GST_WARNING_OBJECT (avi,
+ "Unknown ncdt (metadata) tag entry %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ GST_MEMDUMP_OBJECT (avi, "Unknown ncdt", ptr, tsize);
+ break;
+ }
+
+ if (tsize & 1) {
+ tsize++;
+ if (tsize > left)
+ tsize = left;
+ }
+
+ ptr += tsize;
+ left -= tsize;
+ }
+
+ if (!gst_tag_list_is_empty (taglist)) {
+ GST_INFO_OBJECT (avi, "extracted tags: %" GST_PTR_FORMAT, taglist);
+ *_taglist = taglist;
+ } else {
+ *_taglist = NULL;
+ gst_tag_list_unref (taglist);
+ }
+ gst_buffer_unmap (buf, &info);
+
+ return;
+}
+
+/*
+ * Read full AVI headers.
+ */
+static GstFlowReturn
+gst_avi_demux_stream_header_pull (GstAviDemux * avi)
+{
+ GstFlowReturn res;
+ GstBuffer *buf, *sub = NULL;
+ guint32 tag;
+ guint offset = 4;
+ GstElement *element = GST_ELEMENT_CAST (avi);
+ GstClockTime stamp;
+ GstTagList *tags = NULL;
+ guint8 fourcc[4];
+
+ stamp = gst_util_get_timestamp ();
+
+ /* the header consists of a 'hdrl' LIST tag */
+ res = gst_riff_read_chunk (element, avi->sinkpad, &avi->offset, &tag, &buf);
+ if (res != GST_FLOW_OK)
+ goto pull_range_failed;
+ else if (tag != GST_RIFF_TAG_LIST)
+ goto no_list;
+ else if (gst_buffer_get_size (buf) < 4)
+ goto no_header;
+
+ GST_DEBUG_OBJECT (avi, "parsing headers");
+
+ /* Find the 'hdrl' LIST tag */
+ gst_buffer_extract (buf, 0, fourcc, 4);
+ while (GST_READ_UINT32_LE (fourcc) != GST_RIFF_LIST_hdrl) {
+ GST_LOG_OBJECT (avi, "buffer contains %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (GST_READ_UINT32_LE (fourcc)));
+
+ /* Eat up */
+ gst_buffer_unref (buf);
+
+ /* read new chunk */
+ res = gst_riff_read_chunk (element, avi->sinkpad, &avi->offset, &tag, &buf);
+ if (res != GST_FLOW_OK)
+ goto pull_range_failed;
+ else if (tag != GST_RIFF_TAG_LIST)
+ goto no_list;
+ else if (gst_buffer_get_size (buf) < 4)
+ goto no_header;
+ gst_buffer_extract (buf, 0, fourcc, 4);
+ }
+
+ GST_DEBUG_OBJECT (avi, "hdrl LIST tag found");
+
+ gst_avi_demux_roundup_list (avi, &buf);
+
+ /* the hdrl starts with a 'avih' header */
+ if (!gst_riff_parse_chunk (element, buf, &offset, &tag, &sub))
+ goto no_avih;
+ else if (tag != GST_RIFF_TAG_avih)
+ goto no_avih;
+ else if (!gst_avi_demux_parse_avih (avi, sub, &avi->avih))
+ goto invalid_avih;
+
+ GST_DEBUG_OBJECT (avi, "AVI header ok, reading elements from header");
+
+ /* now, read the elements from the header until the end */
+ while (gst_riff_parse_chunk (element, buf, &offset, &tag, &sub)) {
+ GstMapInfo map;
+
+ /* sub can be NULL on empty tags */
+ if (!sub)
+ continue;
+
+ gst_buffer_map (sub, &map, GST_MAP_READ);
+
+ switch (tag) {
+ case GST_RIFF_TAG_LIST:
+ if (map.size < 4)
+ goto next;
+
+ switch (GST_READ_UINT32_LE (map.data)) {
+ case GST_RIFF_LIST_strl:
+ gst_buffer_unmap (sub, &map);
+ if (!(gst_avi_demux_parse_stream (avi, sub))) {
+ GST_ELEMENT_WARNING (avi, STREAM, DEMUX, (NULL),
+ ("failed to parse stream, ignoring"));
+ sub = NULL;
+ }
+ sub = NULL;
+ goto next;
+ case GST_RIFF_LIST_odml:
+ gst_buffer_unmap (sub, &map);
+ gst_avi_demux_parse_odml (avi, sub);
+ sub = NULL;
+ break;
+ case GST_RIFF_LIST_INFO:
+ gst_buffer_unmap (sub, &map);
+ gst_buffer_resize (sub, 4, -1);
+ gst_riff_parse_info (element, sub, &tags);
+ if (tags) {
+ if (avi->globaltags) {
+ gst_tag_list_insert (avi->globaltags, tags,
+ GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (tags);
+ } else {
+ avi->globaltags = tags;
+ }
+ }
+ tags = NULL;
+ gst_buffer_unref (sub);
+ sub = NULL;
+ break;
+ case GST_RIFF_LIST_ncdt:
+ gst_buffer_unmap (sub, &map);
+ gst_buffer_resize (sub, 4, -1);
+ gst_avi_demux_parse_ncdt (avi, sub, &tags);
+ if (tags) {
+ if (avi->globaltags) {
+ gst_tag_list_insert (avi->globaltags, tags,
+ GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (tags);
+ } else {
+ avi->globaltags = tags;
+ }
+ }
+ tags = NULL;
+ gst_buffer_unref (sub);
+ sub = NULL;
+ break;
+ default:
+ GST_WARNING_OBJECT (avi,
+ "Unknown list %" GST_FOURCC_FORMAT " in AVI header",
+ GST_FOURCC_ARGS (GST_READ_UINT32_LE (map.data)));
+ GST_MEMDUMP_OBJECT (avi, "Unknown list", map.data, map.size);
+ /* fall-through */
+ case GST_RIFF_TAG_JUNQ:
+ case GST_RIFF_TAG_JUNK:
+ goto next;
+ }
+ break;
+ case GST_RIFF_IDIT:
+ gst_avi_demux_parse_idit (avi, sub);
+ goto next;
+ default:
+ GST_WARNING_OBJECT (avi,
+ "Unknown tag %" GST_FOURCC_FORMAT " in AVI header",
+ GST_FOURCC_ARGS (tag));
+ GST_MEMDUMP_OBJECT (avi, "Unknown tag", map.data, map.size);
+ /* fall-through */
+ case GST_RIFF_TAG_JUNQ:
+ case GST_RIFF_TAG_JUNK:
+ next:
+ if (sub) {
+ gst_buffer_unmap (sub, &map);
+ gst_buffer_unref (sub);
+ }
+ sub = NULL;
+ break;
+ }
+ }
+ gst_buffer_unref (buf);
+ GST_DEBUG ("elements parsed");
+
+ /* check parsed streams */
+ if (avi->num_streams == 0)
+ goto no_streams;
+ else if (avi->num_streams != avi->avih->streams) {
+ GST_WARNING_OBJECT (avi,
+ "Stream header mentioned %d streams, but %d available",
+ avi->avih->streams, avi->num_streams);
+ }
+
+ GST_DEBUG_OBJECT (avi, "skipping junk between header and data, offset=%"
+ G_GUINT64_FORMAT, avi->offset);
+
+ /* Now, find the data (i.e. skip all junk between header and data) */
+ do {
+ GstMapInfo map;
+ guint size;
+ guint32 tag, ltag;
+
+ buf = NULL;
+ res = gst_pad_pull_range (avi->sinkpad, avi->offset, 12, &buf);
+ if (res != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (avi, "pull_range failure while looking for tags");
+ goto pull_range_failed;
+ } else if (gst_buffer_get_size (buf) < 12) {
+ GST_DEBUG_OBJECT (avi,
+ "got %" G_GSIZE_FORMAT " bytes which is less than 12 bytes",
+ gst_buffer_get_size (buf));
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ tag = GST_READ_UINT32_LE (map.data);
+ size = GST_READ_UINT32_LE (map.data + 4);
+ ltag = GST_READ_UINT32_LE (map.data + 8);
+
+ GST_DEBUG ("tag %" GST_FOURCC_FORMAT ", size %u",
+ GST_FOURCC_ARGS (tag), size);
+ GST_MEMDUMP ("Tag content", map.data, map.size);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ switch (tag) {
+ case GST_RIFF_TAG_LIST:{
+ switch (ltag) {
+ case GST_RIFF_LIST_movi:
+ GST_DEBUG_OBJECT (avi,
+ "Reached the 'movi' tag, we're done with skipping");
+ goto skipping_done;
+ case GST_RIFF_LIST_INFO:
+ res =
+ gst_riff_read_chunk (element, avi->sinkpad, &avi->offset, &tag,
+ &buf);
+ if (res != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (avi, "couldn't read INFO chunk");
+ goto pull_range_failed;
+ }
+ GST_DEBUG ("got size %" G_GSIZE_FORMAT, gst_buffer_get_size (buf));
+ if (size < 4) {
+ GST_DEBUG ("skipping INFO LIST prefix");
+ avi->offset += (4 - GST_ROUND_UP_2 (size));
+ gst_buffer_unref (buf);
+ continue;
+ }
+
+ sub = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, 4, -1);
+ gst_riff_parse_info (element, sub, &tags);
+ if (tags) {
+ if (avi->globaltags) {
+ gst_tag_list_insert (avi->globaltags, tags,
+ GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (tags);
+ } else {
+ avi->globaltags = tags;
+ }
+ }
+ tags = NULL;
+ if (sub) {
+ gst_buffer_unref (sub);
+ sub = NULL;
+ }
+ gst_buffer_unref (buf);
+ /* gst_riff_read_chunk() has already advanced avi->offset */
+ break;
+ case GST_RIFF_LIST_ncdt:
+ res =
+ gst_riff_read_chunk (element, avi->sinkpad, &avi->offset, &tag,
+ &buf);
+ if (res != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (avi, "couldn't read ncdt chunk");
+ goto pull_range_failed;
+ }
+ GST_DEBUG ("got size %" G_GSIZE_FORMAT, gst_buffer_get_size (buf));
+ if (size < 4) {
+ GST_DEBUG ("skipping ncdt LIST prefix");
+ avi->offset += (4 - GST_ROUND_UP_2 (size));
+ gst_buffer_unref (buf);
+ continue;
+ }
+
+ sub = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, 4, -1);
+ gst_avi_demux_parse_ncdt (avi, sub, &tags);
+ if (tags) {
+ if (avi->globaltags) {
+ gst_tag_list_insert (avi->globaltags, tags,
+ GST_TAG_MERGE_REPLACE);
+ gst_tag_list_unref (tags);
+ } else {
+ avi->globaltags = tags;
+ }
+ }
+ tags = NULL;
+ if (sub) {
+ gst_buffer_unref (sub);
+ sub = NULL;
+ }
+ gst_buffer_unref (buf);
+ /* gst_riff_read_chunk() has already advanced avi->offset */
+ break;
+ default:
+ GST_WARNING_OBJECT (avi,
+ "Skipping unknown list tag %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (ltag));
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ break;
+ }
+ }
+ break;
+ default:
+ GST_WARNING_OBJECT (avi, "Skipping unknown tag %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ /* Fall-through */
+ case GST_MAKE_FOURCC ('J', 'U', 'N', 'Q'):
+ case GST_MAKE_FOURCC ('J', 'U', 'N', 'K'):
+ /* Only get buffer for debugging if the memdump is needed */
+ if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= 9) {
+ buf = NULL;
+ res = gst_pad_pull_range (avi->sinkpad, avi->offset, size, &buf);
+ if (res != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (avi, "couldn't read INFO chunk");
+ goto pull_range_failed;
+ }
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ GST_MEMDUMP ("Junk", map.data, map.size);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ }
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ break;
+ }
+ } while (1);
+skipping_done:
+
+ GST_DEBUG_OBJECT (avi, "skipping done ... (streams=%u, stream[0].indexes=%p)",
+ avi->num_streams, avi->stream[0].indexes);
+
+ /* create or read stream index (for seeking) */
+ if (avi->stream[0].indexes != NULL) {
+ /* we read a super index already (gst_avi_demux_parse_superindex() ) */
+ gst_avi_demux_read_subindexes_pull (avi);
+ }
+ if (!avi->have_index) {
+ if (avi->avih->flags & GST_RIFF_AVIH_HASINDEX)
+ gst_avi_demux_stream_index (avi);
+
+ /* still no index, scan */
+ if (!avi->have_index) {
+ gst_avi_demux_stream_scan (avi);
+
+ /* still no index.. this is a fatal error for now.
+ * FIXME, we should switch to plain push mode without seeking
+ * instead of failing. */
+ if (!avi->have_index)
+ goto no_index;
+ }
+ }
+ /* use the indexes now to construct nice durations */
+ gst_avi_demux_calculate_durations_from_index (avi);
+
+ gst_avi_demux_expose_streams (avi, FALSE);
+
+ /* do initial seek to the default segment values */
+ gst_avi_demux_do_seek (avi, &avi->segment, 0);
+
+ /* create initial NEWSEGMENT event */
+ if (avi->seg_event)
+ gst_event_unref (avi->seg_event);
+ avi->seg_event = gst_event_new_segment (&avi->segment);
+ if (avi->segment_seqnum)
+ gst_event_set_seqnum (avi->seg_event, avi->segment_seqnum);
+
+ stamp = gst_util_get_timestamp () - stamp;
+ GST_DEBUG_OBJECT (avi, "pulling header took %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stamp));
+
+ /* at this point we know all the streams and we can signal the no more
+ * pads signal */
+ GST_DEBUG_OBJECT (avi, "signaling no more pads");
+ gst_element_no_more_pads (GST_ELEMENT_CAST (avi));
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+no_list:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Invalid AVI header (no LIST at start): %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag)));
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+no_header:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Invalid AVI header (no hdrl at start): %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag)));
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+no_avih:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Invalid AVI header (no avih at start): %"
+ GST_FOURCC_FORMAT, GST_FOURCC_ARGS (tag)));
+ if (sub)
+ gst_buffer_unref (sub);
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+invalid_avih:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Invalid AVI header (cannot parse avih at start)"));
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+no_streams:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL), ("No streams found"));
+ return GST_FLOW_ERROR;
+ }
+no_index:
+ {
+ GST_WARNING ("file without or too big index");
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("Could not get/create index"));
+ return GST_FLOW_ERROR;
+ }
+pull_range_failed:
+ {
+ if (res == GST_FLOW_FLUSHING)
+ return res;
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL),
+ ("pull_range flow reading header: %s", gst_flow_get_name (res)));
+ return res;
+ }
+}
+
+/* move a stream to @index */
+static void
+gst_avi_demux_move_stream (GstAviDemux * avi, GstAviStream * stream,
+ GstSegment * segment, guint index)
+{
+ GST_DEBUG_OBJECT (avi, "Move stream %d to %u", stream->num, index);
+
+ if (segment->rate < 0.0) {
+ guint next_key;
+ /* Because we don't know the frame order we need to push from the prev keyframe
+ * to the next keyframe. If there is a smart decoder downstream he will notice
+ * that there are too many encoded frames send and return EOS when there
+ * are enough decoded frames to fill the segment. */
+ next_key = gst_avi_demux_index_next (avi, stream, index, TRUE);
+
+ /* FIXME, we go back to 0, we should look at segment.start. We will however
+ * stop earlier when the see the timestamp < segment.start */
+ stream->start_entry = 0;
+ stream->step_entry = index;
+ stream->current_entry = index;
+ stream->stop_entry = next_key;
+
+ GST_DEBUG_OBJECT (avi, "reverse seek: start %u, step %u, stop %u",
+ stream->start_entry, stream->step_entry, stream->stop_entry);
+ } else {
+ stream->start_entry = index;
+ stream->step_entry = index;
+ stream->stop_entry = gst_avi_demux_index_last (avi, stream);
+ }
+ if (stream->current_entry != index) {
+ GST_DEBUG_OBJECT (avi, "Move DISCONT from %u to %u",
+ stream->current_entry, index);
+ stream->current_entry = index;
+ stream->discont = TRUE;
+ }
+
+ /* update the buffer info */
+ gst_avi_demux_get_buffer_info (avi, stream, index,
+ &stream->current_timestamp, &stream->current_ts_end,
+ &stream->current_offset, &stream->current_offset_end);
+
+ GST_DEBUG_OBJECT (avi, "Moved to %u, ts %" GST_TIME_FORMAT
+ ", ts_end %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT
+ ", off_end %" G_GUINT64_FORMAT, index,
+ GST_TIME_ARGS (stream->current_timestamp),
+ GST_TIME_ARGS (stream->current_ts_end), stream->current_offset,
+ stream->current_offset_end);
+
+ GST_DEBUG_OBJECT (avi, "Seeking to offset %" G_GUINT64_FORMAT,
+ stream->index[index].offset);
+}
+
+/*
+ * Do the actual seeking.
+ */
+static gboolean
+gst_avi_demux_do_seek (GstAviDemux * avi, GstSegment * segment,
+ GstSeekFlags flags)
+{
+ GstClockTime seek_time;
+ gboolean keyframe, before, after;
+ guint i, index;
+ GstAviStream *stream;
+ gboolean next;
+
+ seek_time = segment->position;
+ keyframe = ! !(flags & GST_SEEK_FLAG_KEY_UNIT);
+ before = ! !(flags & GST_SEEK_FLAG_SNAP_BEFORE);
+ after = ! !(flags & GST_SEEK_FLAG_SNAP_AFTER);
+
+ GST_DEBUG_OBJECT (avi, "seek to: %" GST_TIME_FORMAT
+ " keyframe seeking:%d, %s", GST_TIME_ARGS (seek_time), keyframe,
+ snap_types[before ? 1 : 0][after ? 1 : 0]);
+
+ /* FIXME, this code assumes the main stream with keyframes is stream 0,
+ * which is mostly correct... */
+ stream = &avi->stream[avi->main_stream];
+
+ next = after && !before;
+ if (segment->rate < 0)
+ next = !next;
+
+ /* get the entry index for the requested position */
+ index = gst_avi_demux_index_for_time (avi, stream, seek_time, next);
+ GST_DEBUG_OBJECT (avi, "Got entry %u", index);
+ if (index == -1)
+ return FALSE;
+
+ /* check if we are already on a keyframe */
+ if (!ENTRY_IS_KEYFRAME (&stream->index[index])) {
+ if (next) {
+ GST_DEBUG_OBJECT (avi, "not keyframe, searching forward");
+ /* now go to the next keyframe, this is where we should start
+ * decoding from. */
+ index = gst_avi_demux_index_next (avi, stream, index, TRUE);
+ GST_DEBUG_OBJECT (avi, "next keyframe at %u", index);
+ } else {
+ GST_DEBUG_OBJECT (avi, "not keyframe, searching back");
+ /* now go to the previous keyframe, this is where we should start
+ * decoding from. */
+ index = gst_avi_demux_index_prev (avi, stream, index, TRUE);
+ GST_DEBUG_OBJECT (avi, "previous keyframe at %u", index);
+ }
+ }
+
+ /* move the main stream to this position */
+ gst_avi_demux_move_stream (avi, stream, segment, index);
+
+ if (keyframe) {
+ /* when seeking to a keyframe, we update the result seek time
+ * to the time of the keyframe. */
+ seek_time = stream->current_timestamp;
+ GST_DEBUG_OBJECT (avi, "keyframe adjusted to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (seek_time));
+ /* the seek time is always the position ... */
+ segment->position = seek_time;
+ /* ... and start and stream time when going forwards,
+ * otherwise only stop time */
+ if (segment->rate > 0.0)
+ segment->start = segment->time = seek_time;
+ else
+ segment->stop = seek_time;
+ }
+
+ /* now set DISCONT and align the other streams */
+ for (i = 0; i < avi->num_streams; i++) {
+ GstAviStream *ostream;
+
+ ostream = &avi->stream[i];
+ if ((ostream == stream) || (ostream->index == NULL))
+ continue;
+
+ /* get the entry index for the requested position */
+ index = gst_avi_demux_index_for_time (avi, ostream, seek_time, FALSE);
+ if (index == -1)
+ continue;
+
+ /* move to previous keyframe */
+ if (!ENTRY_IS_KEYFRAME (&ostream->index[index]))
+ index = gst_avi_demux_index_prev (avi, ostream, index, TRUE);
+
+ gst_avi_demux_move_stream (avi, ostream, segment, index);
+ }
+ GST_DEBUG_OBJECT (avi, "done seek to: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (seek_time));
+
+ return TRUE;
+}
+
+/*
+ * Handle seek event in pull mode.
+ */
+static gboolean
+gst_avi_demux_handle_seek (GstAviDemux * avi, GstPad * pad, GstEvent * event)
+{
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType cur_type = GST_SEEK_TYPE_NONE, stop_type;
+ gint64 cur, stop;
+ gboolean flush;
+ gboolean update;
+ GstSegment seeksegment = { 0, };
+ gint i;
+ guint32 seqnum = 0;
+
+ if (event) {
+ GST_DEBUG_OBJECT (avi, "doing seek with event");
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &cur_type, &cur, &stop_type, &stop);
+ seqnum = gst_event_get_seqnum (event);
+
+ /* we have to have a format as the segment format. Try to convert
+ * if not. */
+ if (format != GST_FORMAT_TIME) {
+ gboolean res = TRUE;
+
+ if (cur_type != GST_SEEK_TYPE_NONE)
+ res = gst_pad_query_convert (pad, format, cur, GST_FORMAT_TIME, &cur);
+ if (res && stop_type != GST_SEEK_TYPE_NONE)
+ res = gst_pad_query_convert (pad, format, stop, GST_FORMAT_TIME, &stop);
+ if (!res)
+ goto no_format;
+
+ format = GST_FORMAT_TIME;
+ }
+ GST_DEBUG_OBJECT (avi,
+ "seek requested: rate %g cur %" GST_TIME_FORMAT " stop %"
+ GST_TIME_FORMAT, rate, GST_TIME_ARGS (cur), GST_TIME_ARGS (stop));
+ /* FIXME: can we do anything with rate!=1.0 */
+ } else {
+ GST_DEBUG_OBJECT (avi, "doing seek without event");
+ flags = 0;
+ rate = 1.0;
+ }
+
+ /* save flush flag */
+ flush = flags & GST_SEEK_FLAG_FLUSH;
+
+ if (flush) {
+ GstEvent *fevent = gst_event_new_flush_start ();
+
+ if (seqnum)
+ gst_event_set_seqnum (fevent, seqnum);
+ /* for a flushing seek, we send a flush_start on all pads. This will
+ * eventually stop streaming with a WRONG_STATE. We can thus eventually
+ * take the STREAM_LOCK. */
+ GST_DEBUG_OBJECT (avi, "sending flush start");
+ gst_avi_demux_push_event (avi, gst_event_ref (fevent));
+ gst_pad_push_event (avi->sinkpad, fevent);
+ } else {
+ /* a non-flushing seek, we PAUSE the task so that we can take the
+ * STREAM_LOCK */
+ GST_DEBUG_OBJECT (avi, "non flushing seek, pausing task");
+ gst_pad_pause_task (avi->sinkpad);
+ }
+
+ /* wait for streaming to stop */
+ GST_DEBUG_OBJECT (avi, "wait for streaming to stop");
+ GST_PAD_STREAM_LOCK (avi->sinkpad);
+
+ /* copy segment, we need this because we still need the old
+ * segment when we close the current segment. */
+ memcpy (&seeksegment, &avi->segment, sizeof (GstSegment));
+
+ if (event) {
+ GST_DEBUG_OBJECT (avi, "configuring seek");
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
+ cur_type, cur, stop_type, stop, &update);
+ }
+ /* do the seek, seeksegment.position contains the new position, this
+ * actually never fails. */
+ gst_avi_demux_do_seek (avi, &seeksegment, flags);
+
+ if (flush) {
+ GstEvent *fevent = gst_event_new_flush_stop (TRUE);
+
+ if (seqnum)
+ gst_event_set_seqnum (fevent, seqnum);
+
+ GST_DEBUG_OBJECT (avi, "sending flush stop");
+ gst_avi_demux_push_event (avi, gst_event_ref (fevent));
+ gst_pad_push_event (avi->sinkpad, fevent);
+ }
+
+ /* now update the real segment info */
+ memcpy (&avi->segment, &seeksegment, sizeof (GstSegment));
+
+ /* post the SEGMENT_START message when we do segmented playback */
+ if (avi->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ GstMessage *segment_start_msg =
+ gst_message_new_segment_start (GST_OBJECT_CAST (avi),
+ avi->segment.format, avi->segment.position);
+ if (seqnum)
+ gst_message_set_seqnum (segment_start_msg, seqnum);
+ gst_element_post_message (GST_ELEMENT_CAST (avi), segment_start_msg);
+ }
+
+ /* queue the segment event for the streaming thread. */
+ if (avi->seg_event)
+ gst_event_unref (avi->seg_event);
+ avi->seg_event = gst_event_new_segment (&avi->segment);
+ if (seqnum)
+ gst_event_set_seqnum (avi->seg_event, seqnum);
+ avi->segment_seqnum = seqnum;
+
+ if (!avi->streaming) {
+ gst_pad_start_task (avi->sinkpad, (GstTaskFunction) gst_avi_demux_loop,
+ avi->sinkpad, NULL);
+ }
+ /* reset the last flow and mark discont, seek is always DISCONT */
+ for (i = 0; i < avi->num_streams; i++) {
+ GST_DEBUG_OBJECT (avi, "marking DISCONT");
+ avi->stream[i].discont = TRUE;
+ }
+ /* likewise for the whole new segment */
+ gst_flow_combiner_reset (avi->flowcombiner);
+ GST_PAD_STREAM_UNLOCK (avi->sinkpad);
+
+ return TRUE;
+
+ /* ERRORS */
+no_format:
+ {
+ GST_DEBUG_OBJECT (avi, "unsupported format given, seek aborted.");
+ return FALSE;
+ }
+}
+
+/*
+ * Handle seek event in push mode.
+ */
+static gboolean
+avi_demux_handle_seek_push (GstAviDemux * avi, GstPad * pad, GstEvent * event)
+{
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType cur_type = GST_SEEK_TYPE_NONE, stop_type;
+ gint64 cur, stop;
+ gboolean keyframe, before, after, next;
+ GstAviStream *stream;
+ guint index;
+ guint n, str_num;
+ guint64 min_offset;
+ GstSegment seeksegment;
+ gboolean update;
+
+ /* check we have the index */
+ if (!avi->have_index) {
+ GST_DEBUG_OBJECT (avi, "no seek index built, seek aborted.");
+ return FALSE;
+ } else {
+ GST_DEBUG_OBJECT (avi, "doing push-based seek with event");
+ }
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &cur_type, &cur, &stop_type, &stop);
+
+ if (format != GST_FORMAT_TIME) {
+ gboolean res = TRUE;
+
+ if (cur_type != GST_SEEK_TYPE_NONE)
+ res = gst_pad_query_convert (pad, format, cur, GST_FORMAT_TIME, &cur);
+ if (res && stop_type != GST_SEEK_TYPE_NONE)
+ res = gst_pad_query_convert (pad, format, stop, GST_FORMAT_TIME, &stop);
+ if (!res) {
+ GST_DEBUG_OBJECT (avi, "unsupported format given, seek aborted.");
+ return FALSE;
+ }
+
+ format = GST_FORMAT_TIME;
+ }
+
+ /* let gst_segment handle any tricky stuff */
+ GST_DEBUG_OBJECT (avi, "configuring seek");
+ memcpy (&seeksegment, &avi->segment, sizeof (GstSegment));
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
+ cur_type, cur, stop_type, stop, &update);
+
+ keyframe = ! !(flags & GST_SEEK_FLAG_KEY_UNIT);
+ cur = seeksegment.position;
+ before = ! !(flags & GST_SEEK_FLAG_SNAP_BEFORE);
+ after = ! !(flags & GST_SEEK_FLAG_SNAP_AFTER);
+
+ GST_DEBUG_OBJECT (avi,
+ "Seek requested: ts %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT
+ ", kf %u, %s, rate %lf", GST_TIME_ARGS (cur), GST_TIME_ARGS (stop),
+ keyframe, snap_types[before ? 1 : 0][after ? 1 : 0], rate);
+
+ if (rate < 0) {
+ GST_DEBUG_OBJECT (avi, "negative rate seek not supported in push mode");
+ return FALSE;
+ }
+
+ /* FIXME, this code assumes the main stream with keyframes is stream 0,
+ * which is mostly correct... */
+ str_num = avi->main_stream;
+ stream = &avi->stream[str_num];
+
+ next = after && !before;
+ if (seeksegment.rate < 0)
+ next = !next;
+
+ /* get the entry index for the requested position */
+ index = gst_avi_demux_index_for_time (avi, stream, cur, next);
+ GST_DEBUG_OBJECT (avi, "str %u: Found entry %u for %" GST_TIME_FORMAT,
+ str_num, index, GST_TIME_ARGS (cur));
+ if (index == -1)
+ return -1;
+
+ /* check if we are already on a keyframe */
+ if (!ENTRY_IS_KEYFRAME (&stream->index[index])) {
+ if (next) {
+ GST_DEBUG_OBJECT (avi, "Entry is not a keyframe - searching forward");
+ /* now go to the next keyframe, this is where we should start
+ * decoding from. */
+ index = gst_avi_demux_index_next (avi, stream, index, TRUE);
+ GST_DEBUG_OBJECT (avi, "Found next keyframe at %u", index);
+ } else {
+ GST_DEBUG_OBJECT (avi, "Entry is not a keyframe - searching back");
+ /* now go to the previous keyframe, this is where we should start
+ * decoding from. */
+ index = gst_avi_demux_index_prev (avi, stream, index, TRUE);
+ GST_DEBUG_OBJECT (avi, "Found previous keyframe at %u", index);
+ }
+ }
+
+ gst_avi_demux_get_buffer_info (avi, stream, index,
+ &stream->current_timestamp, &stream->current_ts_end,
+ &stream->current_offset, &stream->current_offset_end);
+
+ /* re-use cur to be the timestamp of the seek as it _will_ be */
+ cur = stream->current_timestamp;
+
+ min_offset = stream->index[index].offset;
+ avi->seek_kf_offset = min_offset - 8;
+
+ GST_DEBUG_OBJECT (avi,
+ "Seek to: ts %" GST_TIME_FORMAT " (on str %u, idx %u, offset %"
+ G_GUINT64_FORMAT ")", GST_TIME_ARGS (stream->current_timestamp), str_num,
+ index, min_offset);
+
+ for (n = 0; n < avi->num_streams; n++) {
+ GstAviStream *str = &avi->stream[n];
+ guint idx;
+
+ if (n == avi->main_stream)
+ continue;
+
+ /* get the entry index for the requested position */
+ idx = gst_avi_demux_index_for_time (avi, str, cur, FALSE);
+ GST_DEBUG_OBJECT (avi, "str %u: Found entry %u for %" GST_TIME_FORMAT, n,
+ idx, GST_TIME_ARGS (cur));
+ if (idx == -1)
+ continue;
+
+ /* check if we are already on a keyframe */
+ if (!ENTRY_IS_KEYFRAME (&str->index[idx])) {
+ if (next) {
+ GST_DEBUG_OBJECT (avi, "Entry is not a keyframe - searching forward");
+ /* now go to the next keyframe, this is where we should start
+ * decoding from. */
+ idx = gst_avi_demux_index_next (avi, str, idx, TRUE);
+ GST_DEBUG_OBJECT (avi, "Found next keyframe at %u", idx);
+ } else {
+ GST_DEBUG_OBJECT (avi, "Entry is not a keyframe - searching back");
+ /* now go to the previous keyframe, this is where we should start
+ * decoding from. */
+ idx = gst_avi_demux_index_prev (avi, str, idx, TRUE);
+ GST_DEBUG_OBJECT (avi, "Found previous keyframe at %u", idx);
+ }
+ }
+
+ gst_avi_demux_get_buffer_info (avi, str, idx,
+ &str->current_timestamp, &str->current_ts_end,
+ &str->current_offset, &str->current_offset_end);
+
+ if (str->index[idx].offset < min_offset) {
+ min_offset = str->index[idx].offset;
+ GST_DEBUG_OBJECT (avi,
+ "Found an earlier offset at %" G_GUINT64_FORMAT ", str %u",
+ min_offset, n);
+ str_num = n;
+ stream = str;
+ index = idx;
+ }
+ }
+
+ GST_DEBUG_OBJECT (avi,
+ "Seek performed: str %u, offset %" G_GUINT64_FORMAT ", idx %u, ts %"
+ GST_TIME_FORMAT ", ts_end %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT
+ ", off_end %" G_GUINT64_FORMAT, str_num, min_offset, index,
+ GST_TIME_ARGS (stream->current_timestamp),
+ GST_TIME_ARGS (stream->current_ts_end), stream->current_offset,
+ stream->current_offset_end);
+
+ /* index data refers to data, not chunk header (for pull mode convenience) */
+ min_offset -= 8;
+ GST_DEBUG_OBJECT (avi, "seeking to chunk at offset %" G_GUINT64_FORMAT,
+ min_offset);
+
+ if (!perform_seek_to_offset (avi, min_offset, gst_event_get_seqnum (event))) {
+ GST_DEBUG_OBJECT (avi, "seek event failed!");
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+/*
+ * Handle whether we can perform the seek event or if we have to let the chain
+ * function handle seeks to build the seek indexes first.
+ */
+static gboolean
+gst_avi_demux_handle_seek_push (GstAviDemux * avi, GstPad * pad,
+ GstEvent * event)
+{
+ /* check for having parsed index already */
+ if (!avi->have_index) {
+ guint64 offset = 0;
+ gboolean building_index;
+
+ GST_OBJECT_LOCK (avi);
+ /* handle the seek event in the chain function */
+ avi->state = GST_AVI_DEMUX_SEEK;
+
+ /* copy the event */
+ if (avi->seek_event)
+ gst_event_unref (avi->seek_event);
+ avi->seek_event = gst_event_ref (event);
+
+ /* set the building_index flag so that only one thread can setup the
+ * structures for index seeking. */
+ building_index = avi->building_index;
+ if (!building_index) {
+ avi->building_index = TRUE;
+ if (avi->stream[0].indexes) {
+ avi->odml_stream = 0;
+ avi->odml_subidxs = avi->stream[avi->odml_stream].indexes;
+ offset = avi->odml_subidxs[0];
+ } else {
+ offset = avi->idx1_offset;
+ }
+ }
+ GST_OBJECT_UNLOCK (avi);
+
+ if (!building_index) {
+ /* seek to the first subindex or legacy index */
+ GST_INFO_OBJECT (avi,
+ "Seeking to legacy index/first subindex at %" G_GUINT64_FORMAT,
+ offset);
+ return perform_seek_to_offset (avi, offset, gst_event_get_seqnum (event));
+ }
+
+ /* FIXME: we have to always return true so that we don't block the seek
+ * thread.
+ * Note: maybe it is OK to return true if we're still building the index */
+ return TRUE;
+ }
+
+ return avi_demux_handle_seek_push (avi, pad, event);
+}
+
+/*
+ * Helper for gst_avi_demux_invert()
+ */
+static inline void
+swap_line (guint8 * d1, guint8 * d2, guint8 * tmp, gint bytes)
+{
+ memcpy (tmp, d1, bytes);
+ memcpy (d1, d2, bytes);
+ memcpy (d2, tmp, bytes);
+}
+
+
+#define gst_avi_demux_is_uncompressed(fourcc) \
+ (fourcc == GST_RIFF_DIB || \
+ fourcc == GST_RIFF_rgb || \
+ fourcc == GST_RIFF_RGB || fourcc == GST_RIFF_RAW)
+
+/*
+ * Invert DIB buffers... Takes existing buffer and
+ * returns either the buffer or a new one (with old
+ * one dereferenced).
+ * FIXME: can't we preallocate tmp? and remember stride, bpp?
+ */
+static GstBuffer *
+gst_avi_demux_invert (GstAviStream * stream, GstBuffer * buf)
+{
+ gint y, w, h;
+ gint bpp, stride;
+ guint8 *tmp = NULL;
+ GstMapInfo map;
+ guint32 fourcc;
+
+ if (stream->strh->type != GST_RIFF_FCC_vids)
+ return buf;
+
+ if (stream->strf.vids == NULL) {
+ GST_WARNING ("Failed to retrieve vids for stream");
+ return buf;
+ }
+
+ fourcc = (stream->strf.vids->compression) ?
+ stream->strf.vids->compression : stream->strh->fcc_handler;
+ if (!gst_avi_demux_is_uncompressed (fourcc)) {
+ return buf; /* Ignore non DIB buffers */
+ }
+
+ /* raw rgb data is stored topdown, but instead of inverting the buffer, */
+ /* some tools just negate the height field in the header (e.g. ffmpeg) */
+ if (((gint32) stream->strf.vids->height) < 0)
+ return buf;
+
+ h = stream->strf.vids->height;
+ w = stream->strf.vids->width;
+ bpp = stream->strf.vids->bit_cnt ? stream->strf.vids->bit_cnt : 8;
+ stride = GST_ROUND_UP_4 (w * (bpp / 8));
+
+ buf = gst_buffer_make_writable (buf);
+
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+ if (map.size < (stride * h)) {
+ GST_WARNING ("Buffer is smaller than reported Width x Height x Depth");
+ gst_buffer_unmap (buf, &map);
+ return buf;
+ }
+
+ tmp = g_malloc (stride);
+
+ for (y = 0; y < h / 2; y++) {
+ swap_line (map.data + stride * y, map.data + stride * (h - 1 - y), tmp,
+ stride);
+ }
+
+ g_free (tmp);
+
+ gst_buffer_unmap (buf, &map);
+
+ /* append palette to paletted RGB8 buffer data */
+ if (stream->rgb8_palette != NULL)
+ buf = gst_buffer_append (buf, gst_buffer_ref (stream->rgb8_palette));
+
+ return buf;
+}
+
+#if 0
+static void
+gst_avi_demux_add_assoc (GstAviDemux * avi, GstAviStream * stream,
+ GstClockTime timestamp, guint64 offset, gboolean keyframe)
+{
+ /* do not add indefinitely for open-ended streaming */
+ if (G_UNLIKELY (avi->element_index && avi->seekable)) {
+ GST_LOG_OBJECT (avi, "adding association %" GST_TIME_FORMAT "-> %"
+ G_GUINT64_FORMAT, GST_TIME_ARGS (timestamp), offset);
+ gst_index_add_association (avi->element_index, avi->index_id,
+ keyframe ? GST_ASSOCIATION_FLAG_KEY_UNIT :
+ GST_ASSOCIATION_FLAG_DELTA_UNIT, GST_FORMAT_TIME, timestamp,
+ GST_FORMAT_BYTES, offset, NULL);
+ /* current_entry is DEFAULT (frame #) */
+ gst_index_add_association (avi->element_index, stream->index_id,
+ keyframe ? GST_ASSOCIATION_FLAG_KEY_UNIT :
+ GST_ASSOCIATION_FLAG_DELTA_UNIT, GST_FORMAT_TIME, timestamp,
+ GST_FORMAT_BYTES, offset, GST_FORMAT_DEFAULT, stream->current_entry,
+ NULL);
+ }
+}
+#endif
+
+/*
+ * Returns the aggregated GstFlowReturn.
+ */
+static GstFlowReturn
+gst_avi_demux_combine_flows (GstAviDemux * avi, GstAviStream * stream,
+ GstFlowReturn ret)
+{
+ GST_LOG_OBJECT (avi, "Stream %s:%s flow return: %s",
+ GST_DEBUG_PAD_NAME (stream->pad), gst_flow_get_name (ret));
+ ret = gst_flow_combiner_update_pad_flow (avi->flowcombiner, stream->pad, ret);
+ GST_LOG_OBJECT (avi, "combined to return %s", gst_flow_get_name (ret));
+
+ return ret;
+}
+
+/* move @stream to the next position in its index */
+static GstFlowReturn
+gst_avi_demux_advance (GstAviDemux * avi, GstAviStream * stream,
+ GstFlowReturn ret)
+{
+ guint old_entry, new_entry;
+
+ old_entry = stream->current_entry;
+ /* move forwards */
+ new_entry = old_entry + 1;
+
+ /* see if we reached the end */
+ if (new_entry >= stream->stop_entry) {
+ if (avi->segment.rate < 0.0) {
+ if (stream->step_entry == stream->start_entry) {
+ /* we stepped all the way to the start, eos */
+ GST_DEBUG_OBJECT (avi, "reverse reached start %u", stream->start_entry);
+ goto eos;
+ }
+ /* backwards, stop becomes step, find a new step */
+ stream->stop_entry = stream->step_entry;
+ stream->step_entry = gst_avi_demux_index_prev (avi, stream,
+ stream->stop_entry, TRUE);
+
+ GST_DEBUG_OBJECT (avi,
+ "reverse playback jump: start %u, step %u, stop %u",
+ stream->start_entry, stream->step_entry, stream->stop_entry);
+
+ /* and start from the previous keyframe now */
+ new_entry = stream->step_entry;
+ } else {
+ /* EOS */
+ GST_DEBUG_OBJECT (avi, "forward reached stop %u", stream->stop_entry);
+ goto eos;
+ }
+ }
+
+ if (new_entry != old_entry) {
+ stream->current_entry = new_entry;
+ stream->current_total = stream->index[new_entry].total;
+
+ if (new_entry == old_entry + 1) {
+ GST_DEBUG_OBJECT (avi, "moved forwards from %u to %u",
+ old_entry, new_entry);
+ /* we simply moved one step forwards, reuse current info */
+ stream->current_timestamp = stream->current_ts_end;
+ stream->current_offset = stream->current_offset_end;
+ gst_avi_demux_get_buffer_info (avi, stream, new_entry,
+ NULL, &stream->current_ts_end, NULL, &stream->current_offset_end);
+ } else {
+ /* we moved DISCONT, full update */
+ gst_avi_demux_get_buffer_info (avi, stream, new_entry,
+ &stream->current_timestamp, &stream->current_ts_end,
+ &stream->current_offset, &stream->current_offset_end);
+ /* and MARK discont for this stream */
+ stream->discont = TRUE;
+ GST_DEBUG_OBJECT (avi, "Moved from %u to %u, ts %" GST_TIME_FORMAT
+ ", ts_end %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT
+ ", off_end %" G_GUINT64_FORMAT, old_entry, new_entry,
+ GST_TIME_ARGS (stream->current_timestamp),
+ GST_TIME_ARGS (stream->current_ts_end), stream->current_offset,
+ stream->current_offset_end);
+ }
+ }
+ return ret;
+
+ /* ERROR */
+eos:
+ {
+ GST_DEBUG_OBJECT (avi, "we are EOS");
+ /* setting current_timestamp to -1 marks EOS */
+ stream->current_timestamp = -1;
+ return GST_FLOW_EOS;
+ }
+}
+
+/* find the stream with the lowest current position when going forwards or with
+ * the highest position when going backwards, this is the stream
+ * we should push from next */
+static gint
+gst_avi_demux_find_next (GstAviDemux * avi, gfloat rate)
+{
+ guint64 min_time, max_time;
+ guint stream_num, i;
+
+ max_time = 0;
+ min_time = G_MAXUINT64;
+ stream_num = -1;
+
+ for (i = 0; i < avi->num_streams; i++) {
+ guint64 position;
+ GstAviStream *stream;
+
+ stream = &avi->stream[i];
+
+ /* ignore streams that finished */
+ if (stream->pad && GST_PAD_LAST_FLOW_RETURN (stream->pad) == GST_FLOW_EOS)
+ continue;
+
+ position = stream->current_timestamp;
+
+ /* position of -1 is EOS */
+ if (position != -1) {
+ if (rate > 0.0 && position < min_time) {
+ min_time = position;
+ stream_num = i;
+ } else if (rate < 0.0 && position >= max_time) {
+ max_time = position;
+ stream_num = i;
+ }
+ }
+ }
+ return stream_num;
+}
+
+static GstBuffer *
+gst_avi_demux_align_buffer (GstAviDemux * demux,
+ GstBuffer * buffer, gsize alignment)
+{
+ GstMapInfo map;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ if (map.size < sizeof (guintptr)) {
+ gst_buffer_unmap (buffer, &map);
+ return buffer;
+ }
+
+ if (((guintptr) map.data) & (alignment - 1)) {
+ GstBuffer *new_buffer;
+ GstAllocationParams params = { 0, alignment - 1, 0, 0, };
+
+ new_buffer = gst_buffer_new_allocate (NULL,
+ gst_buffer_get_size (buffer), &params);
+
+ /* Copy data "by hand", so ensure alignment is kept: */
+ gst_buffer_fill (new_buffer, 0, map.data, map.size);
+
+ gst_buffer_copy_into (new_buffer, buffer, GST_BUFFER_COPY_METADATA, 0, -1);
+ GST_DEBUG_OBJECT (demux,
+ "We want output aligned on %" G_GSIZE_FORMAT ", reallocated",
+ alignment);
+
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+
+ return new_buffer;
+ }
+
+ gst_buffer_unmap (buffer, &map);
+ return buffer;
+}
+
+static GstFlowReturn
+gst_avi_demux_loop_data (GstAviDemux * avi)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint stream_num;
+ GstAviStream *stream;
+ gboolean processed = FALSE;
+ GstBuffer *buf;
+ guint64 offset, size;
+ GstClockTime timestamp, duration;
+ guint64 out_offset, out_offset_end;
+ gboolean keyframe;
+ GstAviIndexEntry *entry;
+
+ do {
+ stream_num = gst_avi_demux_find_next (avi, avi->segment.rate);
+
+ /* all are EOS */
+ if (G_UNLIKELY (stream_num == -1)) {
+ GST_DEBUG_OBJECT (avi, "all streams are EOS");
+ goto eos;
+ }
+
+ /* we have the stream now */
+ stream = &avi->stream[stream_num];
+
+ /* skip streams without pads */
+ if (!stream->pad) {
+ GST_DEBUG_OBJECT (avi, "skipping entry from stream %d without pad",
+ stream_num);
+ goto next;
+ }
+
+ /* get the timing info for the entry */
+ timestamp = stream->current_timestamp;
+ duration = stream->current_ts_end - timestamp;
+ out_offset = stream->current_offset;
+ out_offset_end = stream->current_offset_end;
+
+ /* get the entry data info */
+ entry = &stream->index[stream->current_entry];
+ offset = entry->offset;
+ size = entry->size;
+ keyframe = ENTRY_IS_KEYFRAME (entry);
+
+ /* skip empty entries */
+ if (size == 0) {
+ GST_DEBUG_OBJECT (avi, "Skipping entry %u (%" G_GUINT64_FORMAT ", %p)",
+ stream->current_entry, size, stream->pad);
+ goto next;
+ }
+
+ if (avi->segment.rate > 0.0) {
+ /* only check this for forwards playback for now */
+ if (keyframe && GST_CLOCK_TIME_IS_VALID (avi->segment.stop)
+ && (timestamp > avi->segment.stop)) {
+ goto eos_stop;
+ }
+ } else {
+ if (keyframe && GST_CLOCK_TIME_IS_VALID (avi->segment.start)
+ && (timestamp < avi->segment.start))
+ goto eos_stop;
+ }
+
+ GST_LOG ("reading buffer (size=%" G_GUINT64_FORMAT "), stream %d, pos %"
+ G_GUINT64_FORMAT " (0x%" G_GINT64_MODIFIER "x), kf %d", size,
+ stream_num, offset, offset, keyframe);
+
+ /* FIXME, check large chunks and cut them up */
+
+ /* pull in the data */
+ buf = NULL;
+ ret = gst_pad_pull_range (avi->sinkpad, offset, size, &buf);
+ if (ret != GST_FLOW_OK)
+ goto pull_failed;
+
+ /* check for short buffers, this is EOS as well */
+ if (gst_buffer_get_size (buf) < size)
+ goto short_buffer;
+
+ /* invert the picture if needed, and append palette for RGB8P */
+ buf = gst_avi_demux_invert (stream, buf);
+
+ /* mark non-keyframes */
+ if (keyframe || stream->is_raw) {
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+ GST_BUFFER_PTS (buf) = timestamp;
+ } else {
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+ GST_BUFFER_PTS (buf) = GST_CLOCK_TIME_NONE;
+ }
+
+ GST_BUFFER_DTS (buf) = timestamp;
+
+ GST_BUFFER_DURATION (buf) = duration;
+ GST_BUFFER_OFFSET (buf) = out_offset;
+ GST_BUFFER_OFFSET_END (buf) = out_offset_end;
+
+ /* mark discont when pending */
+ if (stream->discont) {
+ GST_DEBUG_OBJECT (avi, "setting DISCONT flag");
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+ stream->discont = FALSE;
+ } else {
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
+ }
+#if 0
+ gst_avi_demux_add_assoc (avi, stream, timestamp, offset, keyframe);
+#endif
+
+ /* update current position in the segment */
+ avi->segment.position = timestamp;
+
+ GST_DEBUG_OBJECT (avi, "Pushing buffer of size %" G_GSIZE_FORMAT ", ts %"
+ GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT
+ ", off_end %" G_GUINT64_FORMAT,
+ gst_buffer_get_size (buf), GST_TIME_ARGS (timestamp),
+ GST_TIME_ARGS (duration), out_offset, out_offset_end);
+
+ if (stream->alignment > 1)
+ buf = gst_avi_demux_align_buffer (avi, buf, stream->alignment);
+ ret = gst_pad_push (stream->pad, buf);
+
+ /* mark as processed, we increment the frame and byte counters then
+ * leave the while loop and return the GstFlowReturn */
+ processed = TRUE;
+
+ if (avi->segment.rate < 0) {
+ if (timestamp > avi->segment.stop && ret == GST_FLOW_EOS) {
+ /* In reverse playback we can get a GST_FLOW_EOS when
+ * we are at the end of the segment, so we just need to jump
+ * back to the previous section. */
+ GST_DEBUG_OBJECT (avi, "downstream has reached end of segment");
+ ret = GST_FLOW_OK;
+ }
+ }
+ next:
+ /* move to next item */
+ ret = gst_avi_demux_advance (avi, stream, ret);
+
+ /* combine flows */
+ ret = gst_avi_demux_combine_flows (avi, stream, ret);
+ } while (!processed);
+
+beach:
+ return ret;
+
+ /* special cases */
+eos:
+ {
+ GST_DEBUG_OBJECT (avi, "No samples left for any streams - EOS");
+ ret = GST_FLOW_EOS;
+ goto beach;
+ }
+eos_stop:
+ {
+ GST_LOG_OBJECT (avi, "Found keyframe after segment,"
+ " setting EOS (%" GST_TIME_FORMAT " > %" GST_TIME_FORMAT ")",
+ GST_TIME_ARGS (timestamp), GST_TIME_ARGS (avi->segment.stop));
+ ret = GST_FLOW_EOS;
+ /* move to next stream */
+ goto next;
+ }
+pull_failed:
+ {
+ GST_DEBUG_OBJECT (avi, "pull range failed: pos=%" G_GUINT64_FORMAT
+ " size=%" G_GUINT64_FORMAT, offset, size);
+ goto beach;
+ }
+short_buffer:
+ {
+ GST_WARNING_OBJECT (avi, "Short read at offset %" G_GUINT64_FORMAT
+ ", only got %" G_GSIZE_FORMAT "/%" G_GUINT64_FORMAT
+ " bytes (truncated file?)", offset, gst_buffer_get_size (buf), size);
+ gst_buffer_unref (buf);
+ ret = GST_FLOW_EOS;
+ goto beach;
+ }
+}
+
+/*
+ * Read data. If we have an index it delegates to
+ * gst_avi_demux_process_next_entry().
+ */
+static GstFlowReturn
+gst_avi_demux_stream_data (GstAviDemux * avi)
+{
+ guint32 tag = 0;
+ guint32 size = 0;
+ gint stream_nr = 0;
+ GstFlowReturn res = GST_FLOW_OK;
+
+ if (G_UNLIKELY (avi->have_eos)) {
+ /* Clean adapter, we're done */
+ gst_adapter_clear (avi->adapter);
+ return GST_FLOW_EOS;
+ }
+
+ if (G_UNLIKELY (avi->todrop)) {
+ guint drop;
+
+ if ((drop = gst_adapter_available (avi->adapter))) {
+ if (drop > avi->todrop)
+ drop = avi->todrop;
+ GST_DEBUG_OBJECT (avi, "Dropping %d bytes", drop);
+ gst_adapter_flush (avi->adapter, drop);
+ avi->todrop -= drop;
+ avi->offset += drop;
+ }
+ }
+
+ /* Iterate until need more data, so adapter won't grow too much */
+ while (1) {
+ if (G_UNLIKELY (!gst_avi_demux_peek_chunk_info (avi, &tag, &size))) {
+ return GST_FLOW_OK;
+ }
+
+ GST_DEBUG ("Trying chunk (%" GST_FOURCC_FORMAT "), size %d",
+ GST_FOURCC_ARGS (tag), size);
+
+ if (G_LIKELY ((tag & 0xff) >= '0' && (tag & 0xff) <= '9' &&
+ ((tag >> 8) & 0xff) >= '0' && ((tag >> 8) & 0xff) <= '9')) {
+ GST_LOG ("Chunk ok");
+ } else if ((tag & 0xffff) == (('x' << 8) | 'i')) {
+ GST_DEBUG ("Found sub-index tag");
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size) || size == 0) {
+ /* accept 0 size buffer here */
+ avi->abort_buffering = FALSE;
+ GST_DEBUG (" skipping %d bytes for now", size);
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ }
+ return GST_FLOW_OK;
+ } else if (tag == GST_RIFF_TAG_RIFF) {
+ /* RIFF tags can appear in ODML files, just jump over them */
+ if (gst_adapter_available (avi->adapter) >= 12) {
+ GST_DEBUG ("Found RIFF tag, skipping RIFF header");
+ gst_adapter_flush (avi->adapter, 12);
+ continue;
+ }
+ return GST_FLOW_OK;
+ } else if (tag == GST_RIFF_TAG_idx1) {
+ GST_DEBUG ("Found index tag");
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size) || size == 0) {
+ /* accept 0 size buffer here */
+ avi->abort_buffering = FALSE;
+ GST_DEBUG (" skipping %d bytes for now", size);
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ }
+ return GST_FLOW_OK;
+ } else if (tag == GST_RIFF_TAG_LIST) {
+ /* movi chunks might be grouped in rec list */
+ if (gst_adapter_available (avi->adapter) >= 12) {
+ GST_DEBUG ("Found LIST tag, skipping LIST header");
+ gst_adapter_flush (avi->adapter, 12);
+ continue;
+ }
+ return GST_FLOW_OK;
+ } else if (tag == GST_RIFF_TAG_JUNK || tag == GST_RIFF_TAG_JUNQ) {
+ /* rec list might contain JUNK chunks */
+ GST_DEBUG ("Found JUNK tag");
+ if (gst_avi_demux_peek_chunk (avi, &tag, &size) || size == 0) {
+ /* accept 0 size buffer here */
+ avi->abort_buffering = FALSE;
+ GST_DEBUG (" skipping %d bytes for now", size);
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ }
+ return GST_FLOW_OK;
+ } else {
+ GST_DEBUG ("No more stream chunks, send EOS");
+ avi->have_eos = TRUE;
+ return GST_FLOW_EOS;
+ }
+
+ if (G_UNLIKELY (!gst_avi_demux_peek_chunk (avi, &tag, &size))) {
+ /* supposedly one hopes to catch a nicer chunk later on ... */
+ /* FIXME ?? give up here rather than possibly ending up going
+ * through the whole file */
+ if (avi->abort_buffering) {
+ avi->abort_buffering = FALSE;
+ if (size) {
+ gst_adapter_flush (avi->adapter, 8);
+ return GST_FLOW_OK;
+ }
+ } else {
+ return GST_FLOW_OK;
+ }
+ }
+ GST_DEBUG ("chunk ID %" GST_FOURCC_FORMAT ", size %u",
+ GST_FOURCC_ARGS (tag), size);
+
+ stream_nr = CHUNKID_TO_STREAMNR (tag);
+
+ if (G_UNLIKELY (stream_nr < 0 || stream_nr >= avi->num_streams)) {
+ /* recoverable */
+ GST_WARNING ("Invalid stream ID %d (%" GST_FOURCC_FORMAT ")",
+ stream_nr, GST_FOURCC_ARGS (tag));
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ } else {
+ GstAviStream *stream;
+ GstClockTime next_ts = 0;
+ GstBuffer *buf = NULL;
+#if 0
+ guint64 offset;
+#endif
+ gboolean saw_desired_kf = stream_nr != avi->main_stream
+ || avi->offset >= avi->seek_kf_offset;
+
+ if (stream_nr == avi->main_stream && avi->offset == avi->seek_kf_offset) {
+ GST_DEBUG_OBJECT (avi, "Desired keyframe reached");
+ avi->seek_kf_offset = 0;
+ }
+
+ if (saw_desired_kf) {
+ gst_adapter_flush (avi->adapter, 8);
+ /* get buffer */
+ if (size) {
+ buf = gst_adapter_take_buffer (avi->adapter, GST_ROUND_UP_2 (size));
+ /* patch the size */
+ gst_buffer_resize (buf, 0, size);
+ } else {
+ buf = NULL;
+ }
+ } else {
+ GST_DEBUG_OBJECT (avi,
+ "Desired keyframe not yet reached, flushing chunk");
+ gst_adapter_flush (avi->adapter, 8 + GST_ROUND_UP_2 (size));
+ }
+
+#if 0
+ offset = avi->offset;
+#endif
+ avi->offset += 8 + GST_ROUND_UP_2 (size);
+
+ stream = &avi->stream[stream_nr];
+
+ /* set delay (if any)
+ if (stream->strh->init_frames == stream->current_frame &&
+ stream->delay == 0)
+ stream->delay = next_ts;
+ */
+
+ /* parsing of corresponding header may have failed */
+ if (G_UNLIKELY (!stream->pad)) {
+ GST_WARNING_OBJECT (avi, "no pad for stream ID %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ if (buf)
+ gst_buffer_unref (buf);
+ } else {
+ /* get time of this buffer */
+ gst_pad_query_position (stream->pad, GST_FORMAT_TIME,
+ (gint64 *) & next_ts);
+
+#if 0
+ gst_avi_demux_add_assoc (avi, stream, next_ts, offset, FALSE);
+#endif
+
+ /* increment our positions */
+ stream->current_entry++;
+ /* as in pull mode, 'total' is either bytes (CBR) or frames (VBR) */
+ if (stream->strh->type == GST_RIFF_FCC_auds && stream->is_vbr) {
+ gint blockalign = stream->strf.auds->blockalign;
+ if (blockalign > 0)
+ stream->current_total += DIV_ROUND_UP (size, blockalign);
+ else
+ stream->current_total++;
+ } else {
+ stream->current_total += size;
+ }
+ GST_LOG_OBJECT (avi, "current entry %u, total %u",
+ stream->current_entry, stream->current_total);
+
+ /* update current position in the segment */
+ avi->segment.position = next_ts;
+
+ if (saw_desired_kf && buf) {
+ GstClockTime dur_ts = 0;
+
+ /* invert the picture if needed, and append palette for RGB8P */
+ buf = gst_avi_demux_invert (stream, buf);
+
+ gst_pad_query_position (stream->pad, GST_FORMAT_TIME,
+ (gint64 *) & dur_ts);
+
+ GST_BUFFER_DTS (buf) = next_ts;
+ GST_BUFFER_PTS (buf) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_DURATION (buf) = dur_ts - next_ts;
+ if (stream->strh->type == GST_RIFF_FCC_vids) {
+ GST_BUFFER_OFFSET (buf) = stream->current_entry - 1;
+ GST_BUFFER_OFFSET_END (buf) = stream->current_entry;
+ } else {
+ GST_BUFFER_OFFSET (buf) = GST_BUFFER_OFFSET_NONE;
+ GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE;
+ }
+
+ GST_DEBUG_OBJECT (avi,
+ "Pushing buffer with time=%" GST_TIME_FORMAT ", duration %"
+ GST_TIME_FORMAT ", offset %" G_GUINT64_FORMAT
+ " and size %d over pad %s", GST_TIME_ARGS (next_ts),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buf)),
+ GST_BUFFER_OFFSET (buf), size, GST_PAD_NAME (stream->pad));
+
+ /* mark discont when pending */
+ if (G_UNLIKELY (stream->discont)) {
+ GST_DEBUG_OBJECT (avi, "Setting DISCONT");
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+ stream->discont = FALSE;
+ } else {
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
+ }
+
+ if (stream->alignment > 1)
+ buf = gst_avi_demux_align_buffer (avi, buf, stream->alignment);
+ res = gst_pad_push (stream->pad, buf);
+ buf = NULL;
+
+ /* combine flows */
+ res = gst_avi_demux_combine_flows (avi, stream, res);
+ if (G_UNLIKELY (res != GST_FLOW_OK)) {
+ GST_DEBUG ("Push failed; %s", gst_flow_get_name (res));
+ return res;
+ }
+ }
+ }
+ }
+ }
+
+ return res;
+}
+
+/*
+ * Send pending tags.
+ */
+static void
+push_tag_lists (GstAviDemux * avi)
+{
+ guint i;
+ GstTagList *tags;
+
+ if (!avi->got_tags)
+ return;
+
+ GST_DEBUG_OBJECT (avi, "Pushing pending tag lists");
+
+ for (i = 0; i < avi->num_streams; i++) {
+ GstAviStream *stream = &avi->stream[i];
+ GstPad *pad = stream->pad;
+
+ tags = stream->taglist;
+
+ if (pad && tags) {
+ GST_DEBUG_OBJECT (pad, "Tags: %" GST_PTR_FORMAT, tags);
+
+ gst_pad_push_event (pad, gst_event_new_tag (tags));
+ stream->taglist = NULL;
+ }
+ }
+
+ if (!(tags = avi->globaltags))
+ tags = gst_tag_list_new_empty ();
+
+ gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_CONTAINER_FORMAT, "AVI", NULL);
+
+ GST_DEBUG_OBJECT (avi, "Global tags: %" GST_PTR_FORMAT, tags);
+ gst_tag_list_set_scope (tags, GST_TAG_SCOPE_GLOBAL);
+ gst_avi_demux_push_event (avi, gst_event_new_tag (tags));
+ avi->globaltags = NULL;
+ avi->got_tags = FALSE;
+}
+
+static void
+gst_avi_demux_loop (GstPad * pad)
+{
+ GstFlowReturn res;
+ GstAviDemux *avi = GST_AVI_DEMUX (GST_PAD_PARENT (pad));
+
+ switch (avi->state) {
+ case GST_AVI_DEMUX_START:
+ res = gst_avi_demux_stream_init_pull (avi);
+ if (G_UNLIKELY (res != GST_FLOW_OK)) {
+ GST_WARNING ("stream_init flow: %s", gst_flow_get_name (res));
+ goto pause;
+ }
+ avi->state = GST_AVI_DEMUX_HEADER;
+ /* fall-through */
+ case GST_AVI_DEMUX_HEADER:
+ res = gst_avi_demux_stream_header_pull (avi);
+ if (G_UNLIKELY (res != GST_FLOW_OK)) {
+ GST_WARNING ("stream_header flow: %s", gst_flow_get_name (res));
+ goto pause;
+ }
+ avi->state = GST_AVI_DEMUX_MOVI;
+ break;
+ case GST_AVI_DEMUX_MOVI:
+ if (G_UNLIKELY (avi->seg_event)) {
+ gst_avi_demux_push_event (avi, avi->seg_event);
+ avi->seg_event = NULL;
+ }
+ if (G_UNLIKELY (avi->got_tags)) {
+ push_tag_lists (avi);
+ }
+ /* process each index entry in turn */
+ res = gst_avi_demux_loop_data (avi);
+
+ /* pause when error */
+ if (G_UNLIKELY (res != GST_FLOW_OK)) {
+ GST_INFO ("stream_movi flow: %s", gst_flow_get_name (res));
+ goto pause;
+ }
+ break;
+ default:
+ GST_ERROR_OBJECT (avi, "unknown state %d", avi->state);
+ res = GST_FLOW_ERROR;
+ goto pause;
+ }
+
+ return;
+
+ /* ERRORS */
+pause:{
+
+ gboolean push_eos = FALSE;
+ GST_LOG_OBJECT (avi, "pausing task, reason %s", gst_flow_get_name (res));
+ gst_pad_pause_task (avi->sinkpad);
+
+ if (res == GST_FLOW_EOS) {
+ /* handle end-of-stream/segment */
+ /* so align our position with the end of it, if there is one
+ * this ensures a subsequent will arrive at correct base/acc time */
+ if (avi->segment.rate > 0.0 &&
+ GST_CLOCK_TIME_IS_VALID (avi->segment.stop))
+ avi->segment.position = avi->segment.stop;
+ else if (avi->segment.rate < 0.0)
+ avi->segment.position = avi->segment.start;
+ if (avi->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ gint64 stop;
+ GstEvent *event;
+ GstMessage *msg;
+
+ if ((stop = avi->segment.stop) == -1)
+ stop = avi->segment.duration;
+
+ GST_INFO_OBJECT (avi, "sending segment_done");
+
+ msg =
+ gst_message_new_segment_done (GST_OBJECT_CAST (avi),
+ GST_FORMAT_TIME, stop);
+ if (avi->segment_seqnum)
+ gst_message_set_seqnum (msg, avi->segment_seqnum);
+ gst_element_post_message (GST_ELEMENT_CAST (avi), msg);
+
+ event = gst_event_new_segment_done (GST_FORMAT_TIME, stop);
+ if (avi->segment_seqnum)
+ gst_event_set_seqnum (event, avi->segment_seqnum);
+ gst_avi_demux_push_event (avi, event);
+ } else {
+ push_eos = TRUE;
+ }
+ } else if (res == GST_FLOW_NOT_LINKED || res < GST_FLOW_EOS) {
+ /* for fatal errors we post an error message, wrong-state is
+ * not fatal because it happens due to flushes and only means
+ * that we should stop now. */
+ GST_ELEMENT_FLOW_ERROR (avi, res);
+ push_eos = TRUE;
+ }
+ if (push_eos) {
+ GstEvent *event;
+
+ GST_INFO_OBJECT (avi, "sending eos");
+ event = gst_event_new_eos ();
+ if (avi->segment_seqnum)
+ gst_event_set_seqnum (event, avi->segment_seqnum);
+ if (!gst_avi_demux_push_event (avi, event) && (res == GST_FLOW_EOS)) {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX,
+ (NULL), ("got eos but no streams (yet)"));
+ }
+ }
+ }
+}
+
+
+static GstFlowReturn
+gst_avi_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+{
+ GstFlowReturn res;
+ GstAviDemux *avi = GST_AVI_DEMUX (parent);
+ gint i;
+
+ if (GST_BUFFER_IS_DISCONT (buf)) {
+ GST_DEBUG_OBJECT (avi, "got DISCONT");
+ gst_adapter_clear (avi->adapter);
+ /* mark all streams DISCONT */
+ for (i = 0; i < avi->num_streams; i++)
+ avi->stream[i].discont = TRUE;
+ }
+
+ GST_DEBUG ("Store %" G_GSIZE_FORMAT " bytes in adapter",
+ gst_buffer_get_size (buf));
+ gst_adapter_push (avi->adapter, buf);
+
+ switch (avi->state) {
+ case GST_AVI_DEMUX_START:
+ if ((res = gst_avi_demux_stream_init_push (avi)) != GST_FLOW_OK) {
+ GST_WARNING ("stream_init flow: %s", gst_flow_get_name (res));
+ break;
+ }
+ break;
+ case GST_AVI_DEMUX_HEADER:
+ if ((res = gst_avi_demux_stream_header_push (avi)) != GST_FLOW_OK) {
+ GST_WARNING ("stream_header flow: %s", gst_flow_get_name (res));
+ break;
+ }
+ break;
+ case GST_AVI_DEMUX_MOVI:
+ if (G_UNLIKELY (avi->seg_event)) {
+ gst_avi_demux_push_event (avi, avi->seg_event);
+ avi->seg_event = NULL;
+ }
+ if (G_UNLIKELY (avi->got_tags)) {
+ push_tag_lists (avi);
+ }
+ res = gst_avi_demux_stream_data (avi);
+ break;
+ case GST_AVI_DEMUX_SEEK:
+ {
+ GstEvent *event;
+
+ res = GST_FLOW_OK;
+
+ /* obtain and parse indexes */
+ if (avi->stream[0].indexes && !gst_avi_demux_read_subindexes_push (avi))
+ /* seek in subindex read function failed */
+ goto index_failed;
+
+ if (!avi->stream[0].indexes && !avi->have_index
+ && avi->avih->flags & GST_RIFF_AVIH_HASINDEX)
+ gst_avi_demux_stream_index_push (avi);
+
+ if (avi->have_index) {
+ /* use the indexes now to construct nice durations */
+ gst_avi_demux_calculate_durations_from_index (avi);
+ } else {
+ /* still parsing indexes */
+ break;
+ }
+
+ GST_OBJECT_LOCK (avi);
+ event = avi->seek_event;
+ avi->seek_event = NULL;
+ GST_OBJECT_UNLOCK (avi);
+
+ /* calculate and perform seek */
+ if (!avi_demux_handle_seek_push (avi, avi->sinkpad, event)) {
+ gst_event_unref (event);
+ goto seek_failed;
+ }
+
+ gst_event_unref (event);
+ avi->state = GST_AVI_DEMUX_MOVI;
+ break;
+ }
+ default:
+ GST_ELEMENT_ERROR (avi, STREAM, FAILED, (NULL),
+ ("Illegal internal state"));
+ res = GST_FLOW_ERROR;
+ break;
+ }
+
+ GST_DEBUG_OBJECT (avi, "state: %d res:%s", avi->state,
+ gst_flow_get_name (res));
+
+ if (G_UNLIKELY (avi->abort_buffering))
+ goto abort_buffering;
+
+ return res;
+
+ /* ERRORS */
+index_failed:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL), ("failed to read indexes"));
+ return GST_FLOW_ERROR;
+ }
+seek_failed:
+ {
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL), ("push mode seek failed"));
+ return GST_FLOW_ERROR;
+ }
+abort_buffering:
+ {
+ avi->abort_buffering = FALSE;
+ GST_ELEMENT_ERROR (avi, STREAM, DEMUX, (NULL), ("unhandled buffer size"));
+ return GST_FLOW_ERROR;
+ }
+}
+
+static gboolean
+gst_avi_demux_sink_activate (GstPad * sinkpad, GstObject * parent)
+{
+ GstQuery *query;
+ gboolean pull_mode;
+
+ query = gst_query_new_scheduling ();
+
+ if (!gst_pad_peer_query (sinkpad, query)) {
+ gst_query_unref (query);
+ goto activate_push;
+ }
+
+ pull_mode = gst_query_has_scheduling_mode_with_flags (query,
+ GST_PAD_MODE_PULL, GST_SCHEDULING_FLAG_SEEKABLE);
+ gst_query_unref (query);
+
+ if (!pull_mode)
+ goto activate_push;
+
+ GST_DEBUG_OBJECT (sinkpad, "activating pull");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE);
+
+activate_push:
+ {
+ GST_DEBUG_OBJECT (sinkpad, "activating push");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
+ }
+}
+
+static gboolean
+gst_avi_demux_sink_activate_mode (GstPad * sinkpad, GstObject * parent,
+ GstPadMode mode, gboolean active)
+{
+ gboolean res;
+ GstAviDemux *avi = GST_AVI_DEMUX (parent);
+
+ switch (mode) {
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ avi->streaming = FALSE;
+ res = gst_pad_start_task (sinkpad, (GstTaskFunction) gst_avi_demux_loop,
+ sinkpad, NULL);
+ } else {
+ res = gst_pad_stop_task (sinkpad);
+ }
+ break;
+ case GST_PAD_MODE_PUSH:
+ if (active) {
+ GST_DEBUG ("avi: activating push/chain function");
+ avi->streaming = TRUE;
+ } else {
+ GST_DEBUG ("avi: deactivating push/chain function");
+ }
+ res = TRUE;
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ return res;
+}
+
+#if 0
+static void
+gst_avi_demux_set_index (GstElement * element, GstIndex * index)
+{
+ GstAviDemux *avi = GST_AVI_DEMUX (element);
+
+ GST_OBJECT_LOCK (avi);
+ if (avi->element_index)
+ gst_object_unref (avi->element_index);
+ if (index) {
+ avi->element_index = gst_object_ref (index);
+ } else {
+ avi->element_index = NULL;
+ }
+ GST_OBJECT_UNLOCK (avi);
+ /* object lock might be taken again */
+ if (index)
+ gst_index_get_writer_id (index, GST_OBJECT_CAST (element), &avi->index_id);
+ GST_DEBUG_OBJECT (avi, "Set index %" GST_PTR_FORMAT, avi->element_index);
+}
+
+static GstIndex *
+gst_avi_demux_get_index (GstElement * element)
+{
+ GstIndex *result = NULL;
+ GstAviDemux *avi = GST_AVI_DEMUX (element);
+
+ GST_OBJECT_LOCK (avi);
+ if (avi->element_index)
+ result = gst_object_ref (avi->element_index);
+ GST_OBJECT_UNLOCK (avi);
+
+ GST_DEBUG_OBJECT (avi, "Returning index %" GST_PTR_FORMAT, result);
+
+ return result;
+}
+#endif
+
+static GstStateChangeReturn
+gst_avi_demux_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstAviDemux *avi = GST_AVI_DEMUX (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ avi->streaming = FALSE;
+ gst_segment_init (&avi->segment, GST_FORMAT_TIME);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto done;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ avi->have_index = FALSE;
+ gst_avi_demux_reset (avi);
+ break;
+ default:
+ break;
+ }
+
+done:
+ return ret;
+}
diff --git a/gst/avi/gstavidemux.h b/gst/avi/gstavidemux.h
new file mode 100644
index 0000000000..22e46a2edc
--- /dev/null
+++ b/gst/avi/gstavidemux.h
@@ -0,0 +1,221 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2006> Nokia Corporation (contact <stefan.kost@nokia.com>)
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AVI_DEMUX_H__
+#define __GST_AVI_DEMUX_H__
+
+#include <gst/gst.h>
+
+#include "avi-ids.h"
+#include "gst/riff/riff-ids.h"
+#include "gst/riff/riff-read.h"
+#include <gst/base/gstadapter.h>
+#include <gst/base/gstflowcombiner.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AVI_DEMUX \
+ (gst_avi_demux_get_type ())
+#define GST_AVI_DEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_AVI_DEMUX, GstAviDemux))
+#define GST_AVI_DEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_AVI_DEMUX, GstAviDemuxClass))
+#define GST_IS_AVI_DEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_AVI_DEMUX))
+#define GST_IS_AVI_DEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_AVI_DEMUX))
+
+#define GST_AVI_DEMUX_MAX_STREAMS 16
+
+#define CHUNKID_TO_STREAMNR(chunkid) \
+ ((((chunkid) & 0xff) - '0') * 10 + \
+ (((chunkid) >> 8) & 0xff) - '0')
+
+
+/* new index entries 24 bytes */
+typedef struct {
+ guint32 flags;
+ guint32 size; /* bytes of the data */
+ guint64 offset; /* data offset in file */
+ guint64 total; /* total bytes before */
+} GstAviIndexEntry;
+
+typedef struct {
+ /* index of this streamcontext */
+ guint num;
+
+ /* pad*/
+ GstPad *pad;
+ gboolean exposed;
+
+ /* stream info and headers */
+ gst_riff_strh *strh;
+ union {
+ gst_riff_strf_vids *vids;
+ gst_riff_strf_auds *auds;
+ gst_riff_strf_iavs *iavs;
+ gpointer data;
+ } strf;
+ GstBuffer *extradata, *initdata;
+ GstBuffer *rgb8_palette;
+ gchar *name;
+
+ /* the start/step/stop entries */
+ guint start_entry;
+ guint step_entry;
+ guint stop_entry;
+
+ /* current index entry */
+ guint current_entry;
+ /* position (byte, frame, time) for current_entry */
+ guint current_total;
+ GstClockTime current_timestamp;
+ GstClockTime current_ts_end;
+ guint64 current_offset;
+ guint64 current_offset_end;
+
+ gboolean discont;
+
+ /* stream length */
+ guint64 total_bytes;
+ guint32 total_blocks;
+ guint n_keyframes;
+ /* stream length according to index */
+ GstClockTime idx_duration;
+ /* stream length according to header */
+ GstClockTime hdr_duration;
+ /* stream length based on header/index */
+ GstClockTime duration;
+
+ /* VBR indicator */
+ gboolean is_vbr;
+
+ /* openDML support (for files >4GB) */
+ gboolean superindex;
+ guint64 *indexes;
+
+ /* new indexes */
+ GstAviIndexEntry *index; /* array with index entries */
+ guint idx_n; /* number of entries */
+ guint idx_max; /* max allocated size of entries */
+
+ GstTagList *taglist;
+
+ gint index_id;
+ gboolean is_raw;
+ gsize alignment;
+} GstAviStream;
+
+typedef enum {
+ GST_AVI_DEMUX_START,
+ GST_AVI_DEMUX_HEADER,
+ GST_AVI_DEMUX_MOVI,
+ GST_AVI_DEMUX_SEEK,
+} GstAviDemuxState;
+
+typedef enum {
+ GST_AVI_DEMUX_HEADER_TAG_LIST,
+ GST_AVI_DEMUX_HEADER_AVIH,
+ GST_AVI_DEMUX_HEADER_ELEMENTS,
+ GST_AVI_DEMUX_HEADER_INFO,
+ GST_AVI_DEMUX_HEADER_JUNK,
+ GST_AVI_DEMUX_HEADER_DATA
+} GstAviDemuxHeaderState;
+
+typedef struct _GstAviDemux {
+ GstElement parent;
+
+ /* pads */
+ GstPad *sinkpad;
+
+ /* AVI decoding state */
+ GstAviDemuxState state;
+ GstAviDemuxHeaderState header_state;
+ guint64 offset;
+ gboolean abort_buffering;
+
+ /* when we loaded the indexes */
+ gboolean have_index;
+ /* index offset in the file */
+ guint64 index_offset;
+
+ /* streams */
+ GstAviStream stream[GST_AVI_DEMUX_MAX_STREAMS];
+ guint num_streams;
+ guint num_v_streams;
+ guint num_a_streams;
+ guint num_t_streams; /* subtitle text streams */
+ guint num_sp_streams; /* subpicture streams */
+
+ guint main_stream; /* used for seeking */
+
+ GstFlowCombiner *flowcombiner;
+
+ gboolean have_group_id;
+ guint group_id;
+
+ /* for streaming mode */
+ gboolean streaming;
+ gboolean have_eos;
+ GstAdapter *adapter;
+ guint todrop;
+
+ /* some stream info for length */
+ gst_riff_avih *avih;
+ GstClockTime duration;
+
+ /* segment in TIME */
+ GstSegment segment;
+ guint32 segment_seqnum;
+
+ /* pending tags/events */
+ GstEvent *seg_event;
+ GstTagList *globaltags;
+ gboolean got_tags;
+
+#if 0
+ /* gst index support */
+ GstIndex *element_index;
+ gint index_id;
+#endif
+
+ gboolean seekable;
+
+ guint64 first_movi_offset;
+ guint64 idx1_offset; /* offset in file of list/chunk after movi */
+ GstEvent *seek_event;
+
+ gboolean building_index;
+ guint odml_stream;
+ guint odml_subidx;
+ guint64 *odml_subidxs;
+
+ guint64 seek_kf_offset; /* offset of the keyframe to which we want to seek */
+} GstAviDemux;
+
+typedef struct _GstAviDemuxClass {
+ GstElementClass parent_class;
+} GstAviDemuxClass;
+
+GType gst_avi_demux_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_AVI_DEMUX_H__ */
diff --git a/gst/avi/gstavielement.c b/gst/avi/gstavielement.c
new file mode 100644
index 0000000000..37ff7c3380
--- /dev/null
+++ b/gst/avi/gstavielement.c
@@ -0,0 +1,44 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@temple-baptist.com>
+ * Copyright (C) 2020 Huawei Technologies Co., Ltd.
+ * @Author: Stéphane Cerveau <stephane.cerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gst/gst-i18n-plugin.h"
+#include "gst/riff/riff-read.h"
+
+#include "gstavielements.h"
+
+void
+avi_element_init (GstPlugin * plugin)
+{
+ static gsize res = FALSE;
+ if (g_once_init_enter (&res)) {
+ gst_riff_init ();
+
+#ifdef ENABLE_NLS
+ bindtextdomain (GETTEXT_PACKAGE, LOCALEDIR);
+ bind_textdomain_codeset (GETTEXT_PACKAGE, "UTF-8");
+#endif /* ENABLE_NLS */
+ g_once_init_leave (&res, TRUE);
+ }
+}
diff --git a/gst/avi/gstavielements.h b/gst/avi/gstavielements.h
new file mode 100644
index 0000000000..4be022dfcb
--- /dev/null
+++ b/gst/avi/gstavielements.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) <1999> Erik Walthinsen <omega@temple-baptist.com>
+ * Copyright (C) 2020 Huawei Technologies Co., Ltd.
+ * @Author: Stéphane Cerveau <stephane.cerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AVI_ELEMENTS_H__
+#define __GST_AVI_ELEMENTS_H__
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+void avi_element_init (GstPlugin * plugin);
+
+GST_ELEMENT_REGISTER_DECLARE (avidemux);
+GST_ELEMENT_REGISTER_DECLARE (avimux);
+GST_ELEMENT_REGISTER_DECLARE (avisubtitle);
+
+G_END_DECLS
+
+#endif /* __GST_AVI_ELEMENT_H__ */
diff --git a/gst/avi/gstavimux.c b/gst/avi/gstavimux.c
new file mode 100644
index 0000000000..a71c243695
--- /dev/null
+++ b/gst/avi/gstavimux.c
@@ -0,0 +1,2428 @@
+/* AVI muxer plugin for GStreamer
+ * Copyright (C) 2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * (C) 2006 Mark Nauwelaerts <manauw@skynet.be>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/* based on:
+ * - the old avimuxer (by Wim Taymans)
+ * - xawtv's aviwriter (by Gerd Knorr)
+ * - mjpegtools' avilib (by Rainer Johanni)
+ * - openDML large-AVI docs
+ */
+
+/**
+ * SECTION:element-avimux
+ * @title: avimux
+ *
+ * Muxes raw or compressed audio and/or video streams into an AVI file.
+ *
+ * ## Example launch lines
+ * (write everything in one line, without the backslash characters)
+ * |[
+ * gst-launch-1.0 videotestsrc num-buffers=250 \
+ * ! 'video/x-raw,format=(string)I420,width=320,height=240,framerate=(fraction)25/1' \
+ * ! queue ! mux. \
+ * audiotestsrc num-buffers=440 ! audioconvert \
+ * ! 'audio/x-raw,rate=44100,channels=2' ! queue ! mux. \
+ * avimux name=mux ! filesink location=test.avi
+ * ]| This will create an .AVI file containing an uncompressed video stream
+ * with a test picture and an uncompressed audio stream containing a
+ * test sound.
+ * |[
+ * gst-launch-1.0 videotestsrc num-buffers=250 \
+ * ! 'video/x-raw,format=(string)I420,width=320,height=240,framerate=(fraction)25/1' \
+ * ! xvidenc ! queue ! mux. \
+ * audiotestsrc num-buffers=440 ! audioconvert ! 'audio/x-raw,rate=44100,channels=2' \
+ * ! lame ! queue ! mux. \
+ * avimux name=mux ! filesink location=test.avi
+ * ]| This will create an .AVI file containing the same test video and sound
+ * as above, only that both streams will be compressed this time. This will
+ * only work if you have the necessary encoder elements installed of course.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gst/gst-i18n-plugin.h"
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include <gst/video/video.h>
+#include <gst/audio/audio.h>
+#include <gst/base/gstbytewriter.h>
+
+#include "gstavielements.h"
+#include "gstavimux.h"
+
+GST_DEBUG_CATEGORY_STATIC (avimux_debug);
+#define GST_CAT_DEFAULT avimux_debug
+
+enum
+{
+ PROP_0,
+ PROP_BIGFILE
+};
+
+#define DEFAULT_BIGFILE TRUE
+
+static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-msvideo")
+ );
+
+static GstStaticPadTemplate video_sink_factory =
+ GST_STATIC_PAD_TEMPLATE ("video_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("video/x-raw, "
+ "format = (string) { YUY2, I420, BGR, BGRx, BGRA, GRAY8, UYVY, v210 }, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], "
+ "framerate = (fraction) [ 0, MAX ]; "
+ "image/jpeg, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], "
+ "framerate = (fraction) [ 0, MAX ]; "
+ "video/x-divx, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], "
+ "framerate = (fraction) [ 0, MAX ], "
+ "divxversion = (int) [ 3, 5 ]; "
+ "video/x-msmpeg, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], "
+ "framerate = (fraction) [ 0, MAX ], "
+ "msmpegversion = (int) [ 41, 43 ]; "
+ "video/mpeg, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], "
+ "framerate = (fraction) [ 0, MAX ], "
+ "mpegversion = (int) { 1, 2, 4}, "
+ "systemstream = (boolean) FALSE; "
+ "video/x-h263, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], "
+ "framerate = (fraction) [ 0, MAX ]; "
+ "video/x-h264, "
+ "stream-format = (string) byte-stream, "
+ "alignment = (string) au, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], "
+ "framerate = (fraction) [ 0, MAX ]; "
+ "video/x-dv, "
+ "width = (int) 720, "
+ "height = (int) { 576, 480 }, "
+ "framerate = (fraction) [ 0, MAX ], "
+ "systemstream = (boolean) FALSE; "
+ "video/x-huffyuv, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], " "framerate = (fraction) [ 0, MAX ];"
+ "video/x-wmv, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], " "framerate = (fraction) [ 0, MAX ], "
+ "wmvversion = (int) [ 1, 3];"
+ "image/x-jpc, "
+ "width = (int) [ 1, 2147483647 ], "
+ "height = (int) [ 1, 2147483647 ], "
+ "framerate = (fraction) [ 0, MAX ];"
+ "video/x-vp8, "
+ "width = (int) [ 1, 2147483647 ], "
+ "height = (int) [ 1, 2147483647 ], "
+ "framerate = (fraction) [ 0, MAX ];"
+ "image/png, "
+ "width = (int) [ 16, 4096 ], "
+ "height = (int) [ 16, 4096 ], framerate = (fraction) [ 0, MAX ]")
+ );
+
+static GstStaticPadTemplate audio_sink_factory =
+ GST_STATIC_PAD_TEMPLATE ("audio_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) { U8, S16LE, S24LE, S32LE }, "
+ "rate = (int) [ 1000, 96000 ], "
+ "channels = (int) [ 1, 65535 ]; "
+ "audio/mpeg, "
+ "mpegversion = (int) 1, "
+ "layer = (int) [ 1, 3 ], "
+ "rate = (int) [ 1000, 96000 ], " "channels = (int) [ 1, 2 ]; "
+ "audio/mpeg, "
+ "mpegversion = (int) 4, "
+ "stream-format = (string) raw, "
+ "rate = (int) [ 1000, 96000 ], " "channels = (int) [ 1, 2 ]; "
+/*#if 0 VC6 doesn't support #if here ...
+ "audio/x-vorbis, "
+ "rate = (int) [ 1000, 96000 ], " "channels = (int) [ 1, 2 ]; "
+#endif*/
+ "audio/x-ac3, "
+ "rate = (int) [ 1000, 96000 ], " "channels = (int) [ 1, 6 ]; "
+ "audio/x-alaw, "
+ "rate = (int) [ 1000, 48000 ], " "channels = (int) [ 1, 2 ]; "
+ "audio/x-mulaw, "
+ "rate = (int) [ 1000, 48000 ], " "channels = (int) [ 1, 2 ]; "
+ "audio/x-wma, "
+ "rate = (int) [ 1000, 96000 ], " "channels = (int) [ 1, 2 ], "
+ "wmaversion = (int) [ 1, 2 ] ")
+ );
+
+static void gst_avi_mux_pad_reset (GstAviPad * avipad, gboolean free);
+
+static GstFlowReturn gst_avi_mux_collect_pads (GstCollectPads * pads,
+ GstAviMux * avimux);
+static gboolean gst_avi_mux_handle_event (GstCollectPads * pad,
+ GstCollectData * data, GstEvent * event, gpointer user_data);
+static GstPad *gst_avi_mux_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
+static void gst_avi_mux_release_pad (GstElement * element, GstPad * pad);
+static void gst_avi_mux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_avi_mux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+static GstStateChangeReturn gst_avi_mux_change_state (GstElement * element,
+ GstStateChange transition);
+
+#define gst_avi_mux_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstAviMux, gst_avi_mux, GST_TYPE_ELEMENT,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL));
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (avimux, "avimux", GST_RANK_PRIMARY,
+ GST_TYPE_AVI_MUX, avi_element_init (plugin));
+
+static void
+gst_avi_mux_finalize (GObject * object)
+{
+ GstAviMux *mux = GST_AVI_MUX (object);
+ GSList *node;
+
+ /* completely free each sinkpad */
+ node = mux->sinkpads;
+ while (node) {
+ GstAviPad *avipad = (GstAviPad *) node->data;
+
+ node = node->next;
+
+ gst_avi_mux_pad_reset (avipad, TRUE);
+ g_free (avipad);
+ }
+ g_slist_free (mux->sinkpads);
+ mux->sinkpads = NULL;
+
+ g_free (mux->idx);
+ mux->idx = NULL;
+
+ gst_object_unref (mux->collect);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_avi_mux_class_init (GstAviMuxClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (avimux_debug, "avimux", 0, "Muxer for AVI streams");
+
+ gobject_class->get_property = gst_avi_mux_get_property;
+ gobject_class->set_property = gst_avi_mux_set_property;
+ gobject_class->finalize = gst_avi_mux_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_BIGFILE,
+ g_param_spec_boolean ("bigfile", "Bigfile Support (>2GB)",
+ "Support for openDML-2.0 (big) AVI files", DEFAULT_BIGFILE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstelement_class->request_new_pad =
+ GST_DEBUG_FUNCPTR (gst_avi_mux_request_new_pad);
+ gstelement_class->release_pad = GST_DEBUG_FUNCPTR (gst_avi_mux_release_pad);
+ gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_avi_mux_change_state);
+
+ gst_element_class_add_static_pad_template (gstelement_class, &src_factory);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &audio_sink_factory);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &video_sink_factory);
+
+ gst_element_class_set_static_metadata (gstelement_class, "Avi muxer",
+ "Codec/Muxer",
+ "Muxes audio and video into an avi stream",
+ "GStreamer maintainers <gstreamer-devel@lists.freedesktop.org>");
+}
+
+/* reset pad to initial state
+ * free - if true, release all, not only stream related, data */
+static void
+gst_avi_mux_pad_reset (GstAviPad * avipad, gboolean free)
+{
+ /* generic part */
+ memset (&(avipad->hdr), 0, sizeof (gst_riff_strh));
+
+ memset (&(avipad->idx[0]), 0, sizeof (avipad->idx));
+
+ if (free) {
+ g_free (avipad->tag);
+ avipad->tag = NULL;
+ g_free (avipad->idx_tag);
+ avipad->idx_tag = NULL;
+ }
+
+ if (avipad->is_video) {
+ GstAviVideoPad *vidpad = (GstAviVideoPad *) avipad;
+
+ avipad->hdr.type = GST_MAKE_FOURCC ('v', 'i', 'd', 's');
+ if (vidpad->vids_codec_data) {
+ gst_buffer_unref (vidpad->vids_codec_data);
+ vidpad->vids_codec_data = NULL;
+ }
+
+ if (vidpad->prepend_buffer) {
+ gst_buffer_unref (vidpad->prepend_buffer);
+ vidpad->prepend_buffer = NULL;
+ }
+
+ memset (&(vidpad->vids), 0, sizeof (gst_riff_strf_vids));
+ memset (&(vidpad->vprp), 0, sizeof (gst_riff_vprp));
+ } else {
+ GstAviAudioPad *audpad = (GstAviAudioPad *) avipad;
+
+ audpad->samples = 0;
+
+ avipad->hdr.type = GST_MAKE_FOURCC ('a', 'u', 'd', 's');
+ if (audpad->auds_codec_data) {
+ gst_buffer_unref (audpad->auds_codec_data);
+ audpad->auds_codec_data = NULL;
+ }
+
+ memset (&(audpad->auds), 0, sizeof (gst_riff_strf_auds));
+
+ audpad->audio_size = 0;
+ audpad->audio_time = 0;
+ audpad->max_audio_chunk = 0;
+ }
+}
+
+static void
+gst_avi_mux_reset (GstAviMux * avimux)
+{
+ GSList *node, *newlist = NULL;
+
+ /* free and reset each sinkpad */
+ node = avimux->sinkpads;
+ while (node) {
+ GstAviPad *avipad = (GstAviPad *) node->data;
+
+ node = node->next;
+
+ gst_avi_mux_pad_reset (avipad, FALSE);
+ /* if this pad has collectdata, keep it, otherwise dump it completely */
+ if (avipad->collect)
+ newlist = g_slist_append (newlist, avipad);
+ else {
+ gst_avi_mux_pad_reset (avipad, TRUE);
+ g_free (avipad);
+ }
+ }
+
+ /* free the old list of sinkpads, only keep the real collecting ones */
+ g_slist_free (avimux->sinkpads);
+ avimux->sinkpads = newlist;
+
+ /* avi data */
+ avimux->num_frames = 0;
+ memset (&(avimux->avi_hdr), 0, sizeof (gst_riff_avih));
+ avimux->avi_hdr.max_bps = 10000000;
+ avimux->codec_data_size = 0;
+
+ if (avimux->tags_snap) {
+ gst_tag_list_unref (avimux->tags_snap);
+ avimux->tags_snap = NULL;
+ }
+
+ g_free (avimux->idx);
+ avimux->idx = NULL;
+
+ /* state info */
+ avimux->write_header = TRUE;
+
+ /* tags */
+ gst_tag_setter_reset_tags (GST_TAG_SETTER (avimux));
+}
+
+static void
+gst_avi_mux_init (GstAviMux * avimux)
+{
+ avimux->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
+ gst_pad_use_fixed_caps (avimux->srcpad);
+ gst_element_add_pad (GST_ELEMENT (avimux), avimux->srcpad);
+
+ /* property */
+ avimux->enable_large_avi = DEFAULT_BIGFILE;
+
+ avimux->collect = gst_collect_pads_new ();
+ gst_collect_pads_set_function (avimux->collect,
+ (GstCollectPadsFunction) (GST_DEBUG_FUNCPTR (gst_avi_mux_collect_pads)),
+ avimux);
+ gst_collect_pads_set_event_function (avimux->collect,
+ (GstCollectPadsEventFunction) (GST_DEBUG_FUNCPTR
+ (gst_avi_mux_handle_event)), avimux);
+
+ /* set to clean state */
+ gst_avi_mux_reset (avimux);
+}
+
+static gboolean
+gst_avi_mux_vidsink_set_caps (GstPad * pad, GstCaps * vscaps)
+{
+ GstAviMux *avimux;
+ GstAviVideoPad *avipad;
+ GstAviCollectData *collect_pad;
+ GstStructure *structure;
+ const gchar *mimetype;
+ const GValue *fps, *par;
+ const GValue *codec_data;
+ gint width, height;
+ gint par_n, par_d;
+ gboolean codec_data_in_headers = TRUE;
+ gboolean valid_caps = TRUE;
+
+ avimux = GST_AVI_MUX (gst_pad_get_parent (pad));
+
+ /* find stream data */
+ collect_pad = (GstAviCollectData *) gst_pad_get_element_private (pad);
+ g_assert (collect_pad);
+ avipad = (GstAviVideoPad *) collect_pad->avipad;
+ g_assert (avipad);
+ g_assert (avipad->parent.is_video);
+ g_assert (avipad->parent.hdr.type == GST_MAKE_FOURCC ('v', 'i', 'd', 's'));
+
+ GST_DEBUG_OBJECT (avimux, "%s:%s, caps=%" GST_PTR_FORMAT,
+ GST_DEBUG_PAD_NAME (pad), vscaps);
+
+ structure = gst_caps_get_structure (vscaps, 0);
+ mimetype = gst_structure_get_name (structure);
+
+ /* global */
+ avipad->vids.size = sizeof (gst_riff_strf_vids);
+ avipad->vids.planes = 1;
+ if (!gst_structure_get_int (structure, "width", &width) ||
+ !gst_structure_get_int (structure, "height", &height)) {
+ goto refuse_caps;
+ }
+
+ avipad->vids.width = width;
+ avipad->vids.height = height;
+
+ fps = gst_structure_get_value (structure, "framerate");
+ if (fps == NULL || !GST_VALUE_HOLDS_FRACTION (fps))
+ goto refuse_caps;
+
+ avipad->parent.hdr.rate = gst_value_get_fraction_numerator (fps);
+ avipad->parent.hdr.scale = gst_value_get_fraction_denominator (fps);
+ if (avipad->parent.hdr.rate <= 0 || avipad->parent.hdr.scale <= 0)
+ goto refuse_caps;
+
+ /* (pixel) aspect ratio data, if any */
+ par = gst_structure_get_value (structure, "pixel-aspect-ratio");
+ /* only use video properties header if there is non-trivial aspect info */
+ if (par && GST_VALUE_HOLDS_FRACTION (par) &&
+ ((par_n = gst_value_get_fraction_numerator (par)) !=
+ (par_d = gst_value_get_fraction_denominator (par)))) {
+ GValue to_ratio = { 0, };
+ guint ratio_n, ratio_d;
+
+ /* some fraction voodoo to obtain simplest possible ratio */
+ g_value_init (&to_ratio, GST_TYPE_FRACTION);
+ gst_value_set_fraction (&to_ratio, width * par_n, height * par_d);
+ ratio_n = gst_value_get_fraction_numerator (&to_ratio);
+ ratio_d = gst_value_get_fraction_denominator (&to_ratio);
+ GST_DEBUG_OBJECT (avimux, "generating vprp data with aspect ratio %d/%d",
+ ratio_n, ratio_d);
+ /* simply fill in */
+ avipad->vprp.vert_rate = avipad->parent.hdr.rate / avipad->parent.hdr.scale;
+ avipad->vprp.hor_t_total = width;
+ avipad->vprp.vert_lines = height;
+ avipad->vprp.aspect = (ratio_n) << 16 | (ratio_d & 0xffff);
+ avipad->vprp.width = width;
+ avipad->vprp.height = height;
+ avipad->vprp.fields = 1;
+ avipad->vprp.field_info[0].compressed_bm_height = height;
+ avipad->vprp.field_info[0].compressed_bm_width = width;
+ avipad->vprp.field_info[0].valid_bm_height = height;
+ avipad->vprp.field_info[0].valid_bm_width = width;
+ }
+
+ if (!strcmp (mimetype, "video/x-raw")) {
+ const gchar *format;
+ GstVideoFormat fmt;
+
+ format = gst_structure_get_string (structure, "format");
+ fmt = gst_video_format_from_string (format);
+
+ switch (fmt) {
+ case GST_VIDEO_FORMAT_YUY2:
+ avipad->vids.compression = GST_MAKE_FOURCC ('Y', 'U', 'Y', '2');
+ avipad->vids.bit_cnt = 16;
+ break;
+ case GST_VIDEO_FORMAT_UYVY:
+ avipad->vids.compression = GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y');
+ avipad->vids.bit_cnt = 16;
+ break;
+ case GST_VIDEO_FORMAT_I420:
+ avipad->vids.compression = GST_MAKE_FOURCC ('I', '4', '2', '0');
+ avipad->vids.bit_cnt = 12;
+ break;
+ case GST_VIDEO_FORMAT_GRAY8:
+ avipad->vids.compression = GST_MAKE_FOURCC ('Y', '8', '0', '0');
+ avipad->vids.bit_cnt = 8;
+ break;
+ case GST_VIDEO_FORMAT_v210:
+ avipad->vids.compression = GST_MAKE_FOURCC ('v', '2', '1', '0');
+ avipad->vids.bit_cnt = 20;
+ break;
+ case GST_VIDEO_FORMAT_BGR:
+ avipad->vids.compression = GST_MAKE_FOURCC (0x00, 0x00, 0x00, 0x00);
+ avipad->vids.bit_cnt = 24;
+ break;
+ case GST_VIDEO_FORMAT_BGRx:
+ case GST_VIDEO_FORMAT_BGRA:
+ avipad->vids.compression = GST_MAKE_FOURCC (0x00, 0x00, 0x00, 0x00);
+ avipad->vids.bit_cnt = 32;
+ break;
+ default:
+ valid_caps = FALSE;
+ break;
+ }
+ } else {
+ avipad->vids.bit_cnt = 24;
+ avipad->vids.compression = 0;
+
+ /* find format */
+ if (!strcmp (mimetype, "video/x-huffyuv")) {
+ avipad->vids.compression = GST_MAKE_FOURCC ('H', 'F', 'Y', 'U');
+ } else if (!strcmp (mimetype, "image/jpeg")) {
+ avipad->vids.compression = GST_MAKE_FOURCC ('M', 'J', 'P', 'G');
+ } else if (!strcmp (mimetype, "video/x-divx")) {
+ gint divxversion;
+
+ gst_structure_get_int (structure, "divxversion", &divxversion);
+ switch (divxversion) {
+ case 3:
+ avipad->vids.compression = GST_MAKE_FOURCC ('D', 'I', 'V', '3');
+ break;
+ case 4:
+ avipad->vids.compression = GST_MAKE_FOURCC ('D', 'I', 'V', 'X');
+ break;
+ case 5:
+ avipad->vids.compression = GST_MAKE_FOURCC ('D', 'X', '5', '0');
+ break;
+ default:
+ valid_caps = FALSE;
+ }
+ } else if (gst_structure_has_name (structure, "video/x-msmpeg")) {
+ gint msmpegversion;
+
+ gst_structure_get_int (structure, "msmpegversion", &msmpegversion);
+ switch (msmpegversion) {
+ case 41:
+ avipad->vids.compression = GST_MAKE_FOURCC ('M', 'P', 'G', '4');
+ break;
+ case 42:
+ avipad->vids.compression = GST_MAKE_FOURCC ('M', 'P', '4', '2');
+ break;
+ case 43:
+ avipad->vids.compression = GST_MAKE_FOURCC ('M', 'P', '4', '3');
+ break;
+ default:
+ GST_INFO ("unhandled msmpegversion : %d, fall back to fourcc=MPEG",
+ msmpegversion);
+ avipad->vids.compression = GST_MAKE_FOURCC ('M', 'P', 'E', 'G');
+ break;
+ }
+ } else if (!strcmp (mimetype, "video/x-dv")) {
+ avipad->vids.compression = GST_MAKE_FOURCC ('D', 'V', 'S', 'D');
+ } else if (!strcmp (mimetype, "video/x-h263")) {
+ avipad->vids.compression = GST_MAKE_FOURCC ('H', '2', '6', '3');
+ } else if (!strcmp (mimetype, "video/x-h264")) {
+ avipad->vids.compression = GST_MAKE_FOURCC ('H', '2', '6', '4');
+ } else if (!strcmp (mimetype, "video/mpeg")) {
+ gint mpegversion;
+
+ gst_structure_get_int (structure, "mpegversion", &mpegversion);
+
+ switch (mpegversion) {
+ case 2:
+ avipad->vids.compression = GST_MAKE_FOURCC ('M', 'P', 'G', '2');
+ break;
+ case 4:
+ /* mplayer/ffmpeg might not work with DIVX, but with FMP4 */
+ avipad->vids.compression = GST_MAKE_FOURCC ('D', 'I', 'V', 'X');
+
+ /* DIVX/XVID in AVI store the codec_data chunk as part of the
+ first data buffer. So for this case, we prepend the codec_data
+ blob (if any) to that first buffer */
+ codec_data_in_headers = FALSE;
+ break;
+ default:
+ GST_INFO ("unhandled mpegversion : %d, fall back to fourcc=MPEG",
+ mpegversion);
+ avipad->vids.compression = GST_MAKE_FOURCC ('M', 'P', 'E', 'G');
+ break;
+ }
+ } else if (!strcmp (mimetype, "video/x-wmv")) {
+ gint wmvversion;
+
+ if (gst_structure_get_int (structure, "wmvversion", &wmvversion)) {
+ switch (wmvversion) {
+ case 1:
+ avipad->vids.compression = GST_MAKE_FOURCC ('W', 'M', 'V', '1');
+ break;
+ case 2:
+ avipad->vids.compression = GST_MAKE_FOURCC ('W', 'M', 'V', '2');
+ break;
+ case 3:
+ avipad->vids.compression = GST_MAKE_FOURCC ('W', 'M', 'V', '3');
+ break;
+ default:
+ valid_caps = FALSE;
+ break;
+ }
+ }
+ } else if (!strcmp (mimetype, "image/x-jpc")) {
+ avipad->vids.compression = GST_MAKE_FOURCC ('M', 'J', '2', 'C');
+ } else if (!strcmp (mimetype, "video/x-vp8")) {
+ avipad->vids.compression = GST_MAKE_FOURCC ('V', 'P', '8', '0');
+ } else if (!strcmp (mimetype, "image/png")) {
+ avipad->vids.compression = GST_MAKE_FOURCC ('p', 'n', 'g', ' ');
+ } else {
+ valid_caps = FALSE;
+ }
+
+ if (!valid_caps)
+ goto refuse_caps;
+ }
+
+ /* codec initialization data, if any */
+ codec_data = gst_structure_get_value (structure, "codec_data");
+ if (codec_data) {
+ if (codec_data_in_headers) {
+ avipad->vids_codec_data = gst_value_get_buffer (codec_data);
+ gst_buffer_ref (avipad->vids_codec_data);
+ /* keep global track of size */
+ avimux->codec_data_size += gst_buffer_get_size (avipad->vids_codec_data);
+ } else {
+ avipad->prepend_buffer =
+ gst_buffer_ref (gst_value_get_buffer (codec_data));
+ }
+ }
+
+ avipad->parent.hdr.fcc_handler = avipad->vids.compression;
+ avipad->vids.image_size = avipad->vids.height * avipad->vids.width;
+ /* hm, maybe why avi only handles one stream well ... */
+ avimux->avi_hdr.width = avipad->vids.width;
+ avimux->avi_hdr.height = avipad->vids.height;
+ avimux->avi_hdr.us_frame = 1000000. * avipad->parent.hdr.scale /
+ avipad->parent.hdr.rate;
+
+ gst_object_unref (avimux);
+ return TRUE;
+
+refuse_caps:
+ {
+ GST_WARNING_OBJECT (avimux, "refused caps %" GST_PTR_FORMAT, vscaps);
+ gst_object_unref (avimux);
+ return FALSE;
+ }
+}
+
+static void gst_avi_mux_audsink_set_fields (GstAviMux * avimux,
+ GstAviAudioPad * avipad);
+
+static GstFlowReturn
+gst_avi_mux_audsink_scan_mpeg_audio (GstAviMux * avimux, GstAviPad * avipad,
+ GstBuffer * buffer)
+{
+ GstMapInfo map;
+ guint spf;
+ guint32 header;
+ gulong layer;
+ gulong version;
+ gint lsf, mpg25;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ if (map.size < 4)
+ goto not_parsed;
+
+ header = GST_READ_UINT32_BE (map.data);
+
+ if ((header & 0xffe00000) != 0xffe00000)
+ goto not_parsed;
+
+ /* thanks go to mp3parse */
+ if (header & (1 << 20)) {
+ lsf = (header & (1 << 19)) ? 0 : 1;
+ mpg25 = 0;
+ } else {
+ lsf = 1;
+ mpg25 = 1;
+ }
+
+ version = 1 + lsf + mpg25;
+ layer = 4 - ((header >> 17) & 0x3);
+
+ /* see http://www.codeproject.com/audio/MPEGAudioInfo.asp */
+ if (layer == 1)
+ spf = 384;
+ else if (layer == 2)
+ spf = 1152;
+ else if (version == 1) {
+ spf = 1152;
+ } else {
+ /* MPEG-2 or "2.5" */
+ spf = 576;
+ }
+
+ if (G_UNLIKELY (avipad->hdr.scale <= 1)) {
+ avipad->hdr.scale = spf;
+ gst_avi_mux_audsink_set_fields (avimux, (GstAviAudioPad *) avipad);
+ } else if (G_UNLIKELY (avipad->hdr.scale != spf)) {
+ GST_WARNING_OBJECT (avimux, "input mpeg audio has varying frame size");
+ goto cbr_fallback;
+ }
+done:
+ gst_buffer_unmap (buffer, &map);
+
+ return GST_FLOW_OK;
+
+ /* EXITS */
+not_parsed:
+ {
+ GST_WARNING_OBJECT (avimux, "input mpeg audio is not parsed");
+ /* fall-through */
+ }
+cbr_fallback:
+ {
+ GST_WARNING_OBJECT (avimux, "falling back to CBR muxing");
+ avipad->hdr.scale = 1;
+ gst_avi_mux_audsink_set_fields (avimux, (GstAviAudioPad *) avipad);
+ /* no need to check further */
+ avipad->hook = NULL;
+ goto done;
+ }
+}
+
+static void
+gst_avi_mux_audsink_set_fields (GstAviMux * avimux, GstAviAudioPad * avipad)
+{
+ if (avipad->parent.hdr.scale > 1) {
+ /* vbr case: fixed duration per frame/chunk */
+ avipad->parent.hdr.rate = avipad->auds.rate;
+ avipad->parent.hdr.samplesize = 0;
+ /* this triggers determining largest audio chunk size to write at end */
+ avipad->max_audio_chunk = avipad->auds.blockalign =
+ avipad->parent.hdr.scale;
+ } else {
+ /* by spec, hdr.rate is av_bps related, is calculated that way in stop_file,
+ * and reduces to sample rate in PCM like cases */
+ avipad->parent.hdr.rate = avipad->auds.av_bps / avipad->auds.blockalign;
+ avipad->parent.hdr.samplesize = avipad->auds.blockalign;
+ avipad->parent.hdr.scale = 1;
+ }
+}
+
+/* Taken from wavenc */
+static guint64
+gstmask_to_wavmask (guint64 gstmask, GstAudioChannelPosition * pos)
+{
+ const GstAudioChannelPosition valid_pos =
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT |
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT |
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER |
+ GST_AUDIO_CHANNEL_POSITION_LFE1 |
+ GST_AUDIO_CHANNEL_POSITION_REAR_LEFT |
+ GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT |
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER |
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER |
+ GST_AUDIO_CHANNEL_POSITION_REAR_CENTER |
+ GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT |
+ GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT |
+ GST_AUDIO_CHANNEL_POSITION_TOP_CENTER |
+ GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_LEFT |
+ GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_CENTER |
+ GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_RIGHT |
+ GST_AUDIO_CHANNEL_POSITION_TOP_REAR_LEFT |
+ GST_AUDIO_CHANNEL_POSITION_TOP_REAR_CENTER |
+ GST_AUDIO_CHANNEL_POSITION_TOP_REAR_RIGHT;
+
+ const GstAudioChannelPosition wav_pos[] = {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
+ GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_REAR_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_TOP_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_TOP_REAR_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_TOP_REAR_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_TOP_REAR_RIGHT,
+ };
+ int k;
+ int chan = 0;
+ guint64 ret = 0;
+ guint64 mask = 1;
+
+ if (gstmask == 0 || ((gstmask & ~valid_pos) != 0))
+ return 0;
+
+ for (k = 0; k < G_N_ELEMENTS (wav_pos); ++k) {
+ if (gstmask & (G_GUINT64_CONSTANT (1) << wav_pos[k])) {
+ ret |= mask;
+ pos[chan++] = wav_pos[k];
+ }
+ mask <<= 1;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_avi_mux_audsink_set_caps (GstPad * pad, GstCaps * vscaps)
+{
+ GstAviMux *avimux;
+ GstAviAudioPad *avipad;
+ GstAviCollectData *collect_pad;
+ GstStructure *structure;
+ const gchar *mimetype;
+ const GValue *codec_data;
+ gint channels, rate;
+
+ avimux = GST_AVI_MUX (gst_pad_get_parent (pad));
+
+ /* find stream data */
+ collect_pad = (GstAviCollectData *) gst_pad_get_element_private (pad);
+ g_assert (collect_pad);
+ avipad = (GstAviAudioPad *) collect_pad->avipad;
+ g_assert (avipad);
+ g_assert (!avipad->parent.is_video);
+ g_assert (avipad->parent.hdr.type == GST_MAKE_FOURCC ('a', 'u', 'd', 's'));
+
+ GST_DEBUG_OBJECT (avimux, "%s:%s, caps=%" GST_PTR_FORMAT,
+ GST_DEBUG_PAD_NAME (pad), vscaps);
+
+ structure = gst_caps_get_structure (vscaps, 0);
+ mimetype = gst_structure_get_name (structure);
+
+ /* we want these for all */
+ if (!gst_structure_get_int (structure, "channels", &channels) ||
+ !gst_structure_get_int (structure, "rate", &rate)) {
+ goto refuse_caps;
+ }
+
+ avipad->auds.channels = channels;
+ avipad->auds.rate = rate;
+
+ /* codec initialization data, if any */
+ codec_data = gst_structure_get_value (structure, "codec_data");
+ if (codec_data) {
+ avipad->auds_codec_data = gst_value_get_buffer (codec_data);
+ gst_buffer_ref (avipad->auds_codec_data);
+ /* keep global track of size */
+ avimux->codec_data_size += gst_buffer_get_size (avipad->auds_codec_data);
+ }
+
+ if (!strcmp (mimetype, "audio/x-raw")) {
+ const gchar *format;
+ GstAudioFormat fmt;
+ guint64 channel_mask;
+
+ format = gst_structure_get_string (structure, "format");
+ fmt = gst_audio_format_from_string (format);
+
+ if (!gst_structure_get (structure, "channel-mask", GST_TYPE_BITMASK,
+ &channel_mask, NULL))
+ channel_mask = gst_audio_channel_get_fallback_mask (channels);
+
+ switch (fmt) {
+ case GST_AUDIO_FORMAT_U8:
+ avipad->auds.blockalign = 8;
+ avipad->auds.bits_per_sample = 8;
+ break;
+ case GST_AUDIO_FORMAT_S16LE:
+ avipad->auds.blockalign = 16;
+ avipad->auds.bits_per_sample = 16;
+ break;
+ case GST_AUDIO_FORMAT_S24LE:
+ avipad->auds.blockalign = 24;
+ avipad->auds.bits_per_sample = 24;
+ break;
+ case GST_AUDIO_FORMAT_S32LE:
+ avipad->auds.blockalign = 32;
+ avipad->auds.bits_per_sample = 32;
+ break;
+ default:
+ goto refuse_caps;
+ }
+
+ avipad->audio_format = fmt;
+
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_PCM;
+ /* set some more info straight */
+ avipad->auds.blockalign /= 8;
+ avipad->auds.blockalign *= avipad->auds.channels;
+ avipad->auds.av_bps = avipad->auds.blockalign * avipad->auds.rate;
+
+ if (channels > 2 || (channels == 1 && channel_mask != 0x0)
+ || (channels == 2 && channel_mask != 0x3)) {
+ avipad->write_waveformatex = TRUE;
+ /* The same for now as we don't support e.g. S24_32 */
+ avipad->valid_bits_per_sample = avipad->auds.bits_per_sample;
+ avipad->channel_mask =
+ gstmask_to_wavmask (channel_mask, avipad->wav_positions);
+
+ gst_audio_channel_positions_from_mask (channels, channel_mask,
+ avipad->gst_positions);
+ avipad->needs_reorder =
+ memcmp (avipad->gst_positions, avipad->wav_positions,
+ channels * sizeof (*avipad->gst_positions)) != 0;
+ }
+ } else {
+ avipad->auds.format = 0;
+ /* set some defaults */
+ avipad->auds.blockalign = 1;
+ avipad->auds.av_bps = 0;
+ avipad->auds.bits_per_sample = 16;
+
+ if (!strcmp (mimetype, "audio/mpeg")) {
+ gint mpegversion;
+
+ gst_structure_get_int (structure, "mpegversion", &mpegversion);
+ switch (mpegversion) {
+ case 1:{
+ gint layer = 3;
+ gboolean parsed = FALSE;
+
+ gst_structure_get_int (structure, "layer", &layer);
+ gst_structure_get_boolean (structure, "parsed", &parsed);
+ switch (layer) {
+ case 3:
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_MPEGL3;
+ break;
+ case 1:
+ case 2:
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_MPEGL12;
+ break;
+ }
+ if (parsed) {
+ /* treat as VBR, should also cover CBR case;
+ * setup hook to parse frame header and determine spf */
+ avipad->parent.hook = gst_avi_mux_audsink_scan_mpeg_audio;
+ } else {
+ GST_WARNING_OBJECT (avimux, "unparsed MPEG audio input (?), "
+ "doing CBR muxing");
+ }
+ break;
+ }
+ case 4:
+ {
+ GstBuffer *codec_data_buf = avipad->auds_codec_data;
+ const gchar *stream_format;
+ guint codec;
+ guint8 data[2];
+
+ stream_format = gst_structure_get_string (structure, "stream-format");
+ if (stream_format) {
+ if (strcmp (stream_format, "raw") != 0) {
+ GST_WARNING_OBJECT (avimux, "AAC's stream format '%s' is not "
+ "supported, please use 'raw'", stream_format);
+ break;
+ }
+ } else {
+ GST_WARNING_OBJECT (avimux, "AAC's stream-format not specified, "
+ "assuming 'raw'");
+ }
+
+ /* vbr case needs some special handling */
+ if (!codec_data_buf || gst_buffer_get_size (codec_data_buf) < 2) {
+ GST_WARNING_OBJECT (avimux, "no (valid) codec_data for AAC audio");
+ break;
+ }
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_AAC;
+ /* need to determine frame length */
+ gst_buffer_extract (codec_data_buf, 0, data, 2);
+ codec = GST_READ_UINT16_BE (data);
+ avipad->parent.hdr.scale = (codec & 0x4) ? 960 : 1024;
+ break;
+ }
+ }
+ } else if (!strcmp (mimetype, "audio/x-vorbis")) {
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_VORBIS3;
+ } else if (!strcmp (mimetype, "audio/x-ac3")) {
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_A52;
+ } else if (!strcmp (mimetype, "audio/x-alaw")) {
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_ALAW;
+ avipad->auds.bits_per_sample = 8;
+ avipad->auds.blockalign = avipad->auds.channels;
+ avipad->auds.av_bps = avipad->auds.blockalign * avipad->auds.rate;
+ } else if (!strcmp (mimetype, "audio/x-mulaw")) {
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_MULAW;
+ avipad->auds.bits_per_sample = 8;
+ avipad->auds.blockalign = avipad->auds.channels;
+ avipad->auds.av_bps = avipad->auds.blockalign * avipad->auds.rate;
+ } else if (!strcmp (mimetype, "audio/x-wma")) {
+ gint version;
+ gint bitrate;
+ gint block_align;
+
+ if (gst_structure_get_int (structure, "wmaversion", &version)) {
+ switch (version) {
+ case 1:
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_WMAV1;
+ break;
+ case 2:
+ avipad->auds.format = GST_RIFF_WAVE_FORMAT_WMAV2;
+ break;
+ default:
+ break;
+ }
+ }
+
+ if (avipad->auds.format != 0) {
+ if (gst_structure_get_int (structure, "block_align", &block_align)) {
+ avipad->auds.blockalign = block_align;
+ }
+ if (gst_structure_get_int (structure, "bitrate", &bitrate)) {
+ avipad->auds.av_bps = bitrate / 8;
+ }
+ }
+ }
+ }
+
+ if (!avipad->auds.format)
+ goto refuse_caps;
+
+ avipad->parent.hdr.fcc_handler = avipad->auds.format;
+ gst_avi_mux_audsink_set_fields (avimux, avipad);
+
+ gst_object_unref (avimux);
+ return TRUE;
+
+refuse_caps:
+ {
+ GST_WARNING_OBJECT (avimux, "refused caps %" GST_PTR_FORMAT, vscaps);
+ gst_object_unref (avimux);
+ return FALSE;
+ }
+}
+
+
+static GstPad *
+gst_avi_mux_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
+{
+ GstAviMux *avimux;
+ GstPad *newpad;
+ GstAviPad *avipad;
+ GstElementClass *klass;
+ gchar *name = NULL;
+ const gchar *pad_name = NULL;
+ gint pad_id;
+
+ g_return_val_if_fail (templ != NULL, NULL);
+
+ if (templ->direction != GST_PAD_SINK)
+ goto wrong_direction;
+
+ g_return_val_if_fail (GST_IS_AVI_MUX (element), NULL);
+ avimux = GST_AVI_MUX (element);
+
+ if (!avimux->write_header)
+ goto too_late;
+
+ klass = GST_ELEMENT_GET_CLASS (element);
+
+ if (templ == gst_element_class_get_pad_template (klass, "audio_%u")) {
+ /* don't mix named and unnamed pads, if the pad already exists we fail when
+ * trying to add it */
+ if (req_name != NULL && sscanf (req_name, "audio_%u", &pad_id) == 1) {
+ pad_name = req_name;
+ } else {
+ name = g_strdup_printf ("audio_%u", avimux->audio_pads++);
+ pad_name = name;
+ }
+
+ /* init pad specific data */
+ avipad = g_malloc0 (sizeof (GstAviAudioPad));
+ avipad->is_video = FALSE;
+ avipad->hdr.type = GST_MAKE_FOURCC ('a', 'u', 'd', 's');
+ /* audio goes last */
+ avimux->sinkpads = g_slist_append (avimux->sinkpads, avipad);
+ } else if (templ == gst_element_class_get_pad_template (klass, "video_%u")) {
+ /* though streams are pretty generic and relatively self-contained,
+ * some video info goes in a single avi header -and therefore mux struct-
+ * so video restricted to one stream */
+ if (avimux->video_pads > 0)
+ goto too_many_video_pads;
+
+ /* setup pad */
+ pad_name = "video_0";
+ avimux->video_pads++;
+
+ /* init pad specific data */
+ avipad = g_malloc0 (sizeof (GstAviVideoPad));
+ avipad->is_video = TRUE;
+ avipad->hdr.type = GST_MAKE_FOURCC ('v', 'i', 'd', 's');
+ /* video goes first */
+ avimux->sinkpads = g_slist_prepend (avimux->sinkpads, avipad);
+ } else
+ goto wrong_template;
+
+ newpad = gst_pad_new_from_template (templ, pad_name);
+
+ avipad->collect = gst_collect_pads_add_pad (avimux->collect,
+ newpad, sizeof (GstAviCollectData), NULL, TRUE);
+ ((GstAviCollectData *) (avipad->collect))->avipad = avipad;
+
+ if (!gst_element_add_pad (element, newpad))
+ goto pad_add_failed;
+
+ g_free (name);
+
+ GST_DEBUG_OBJECT (newpad, "Added new request pad");
+
+ return newpad;
+
+ /* ERRORS */
+wrong_direction:
+ {
+ g_warning ("avimux: request pad that is not a SINK pad\n");
+ return NULL;
+ }
+too_late:
+ {
+ g_warning ("avimux: request pad cannot be added after streaming started\n");
+ return NULL;
+ }
+wrong_template:
+ {
+ g_warning ("avimux: this is not our template!\n");
+ return NULL;
+ }
+too_many_video_pads:
+ {
+ GST_WARNING_OBJECT (avimux, "Can only have one video stream");
+ return NULL;
+ }
+pad_add_failed:
+ {
+ GST_WARNING_OBJECT (avimux, "Adding the new pad '%s' failed", pad_name);
+ g_free (name);
+ gst_object_unref (newpad);
+ return NULL;
+ }
+}
+
+static void
+gst_avi_mux_release_pad (GstElement * element, GstPad * pad)
+{
+ GstAviMux *avimux = GST_AVI_MUX (element);
+ GSList *node;
+
+ node = avimux->sinkpads;
+ while (node) {
+ GstAviPad *avipad = (GstAviPad *) node->data;
+
+ if (avipad->collect->pad == pad) {
+ /* pad count should not be adjusted,
+ * as it also represent number of streams present */
+ avipad->collect = NULL;
+ GST_DEBUG_OBJECT (avimux, "removed pad '%s'", GST_PAD_NAME (pad));
+ gst_collect_pads_remove_pad (avimux->collect, pad);
+ gst_element_remove_pad (element, pad);
+ /* if not started yet, we can remove any sign this pad ever existed */
+ /* in this case _start will take care of the real pad count */
+ if (avimux->write_header) {
+ avimux->sinkpads = g_slist_remove (avimux->sinkpads, avipad);
+ gst_avi_mux_pad_reset (avipad, TRUE);
+ g_free (avipad);
+ }
+ return;
+ }
+
+ node = node->next;
+ }
+
+ g_warning ("Unknown pad %s", GST_PAD_NAME (pad));
+}
+
+static inline guint
+gst_avi_mux_start_chunk (GstByteWriter * bw, const gchar * tag, guint32 fourcc)
+{
+ guint chunk_offset;
+
+ if (tag)
+ gst_byte_writer_put_data (bw, (const guint8 *) tag, 4);
+ else
+ gst_byte_writer_put_uint32_le (bw, fourcc);
+
+ chunk_offset = gst_byte_writer_get_pos (bw);
+ /* real chunk size comes later */
+ gst_byte_writer_put_uint32_le (bw, 0);
+
+ return chunk_offset;
+}
+
+static inline void
+gst_avi_mux_end_chunk (GstByteWriter * bw, guint chunk_offset)
+{
+ guint size;
+
+ size = gst_byte_writer_get_pos (bw);
+
+ gst_byte_writer_set_pos (bw, chunk_offset);
+ gst_byte_writer_put_uint32_le (bw, size - chunk_offset - 4);
+ gst_byte_writer_set_pos (bw, size);
+
+ /* arrange for even padding */
+ if (size & 1)
+ gst_byte_writer_put_uint8 (bw, 0);
+}
+
+/* maybe some of these functions should be moved to riff.h? */
+
+static void
+gst_avi_mux_write_tag (const GstTagList * list, const gchar * tag,
+ gpointer data)
+{
+ const struct
+ {
+ guint32 fcc;
+ const gchar *tag;
+ } rifftags[] = {
+ {
+ GST_RIFF_INFO_IARL, GST_TAG_LOCATION}, {
+ GST_RIFF_INFO_IART, GST_TAG_ARTIST}, {
+ GST_RIFF_INFO_ICMT, GST_TAG_COMMENT}, {
+ GST_RIFF_INFO_ICOP, GST_TAG_COPYRIGHT}, {
+ GST_RIFF_INFO_ICRD, GST_TAG_DATE}, {
+ GST_RIFF_INFO_IGNR, GST_TAG_GENRE}, {
+ GST_RIFF_INFO_IKEY, GST_TAG_KEYWORDS}, {
+ GST_RIFF_INFO_INAM, GST_TAG_TITLE}, {
+ GST_RIFF_INFO_ISFT, GST_TAG_ENCODER}, {
+ GST_RIFF_INFO_ISRC, GST_TAG_ISRC}, {
+ 0, NULL}
+ };
+ gint n;
+ gchar *str = NULL;
+ GstByteWriter *bw = data;
+ guint chunk;
+
+ for (n = 0; rifftags[n].fcc != 0; n++) {
+ if (!strcmp (rifftags[n].tag, tag)) {
+ if (rifftags[n].fcc == GST_RIFF_INFO_ICRD) {
+ GDate *date;
+ /* special case for the date tag */
+ if (gst_tag_list_get_date (list, tag, &date)) {
+ str =
+ g_strdup_printf ("%04d:%02d:%02d", g_date_get_year (date),
+ g_date_get_month (date), g_date_get_day (date));
+ g_date_free (date);
+ }
+ } else {
+ gst_tag_list_get_string (list, tag, &str);
+ }
+ if (str) {
+ chunk = gst_avi_mux_start_chunk (bw, NULL, rifftags[n].fcc);
+ gst_byte_writer_put_string (bw, str);
+ gst_avi_mux_end_chunk (bw, chunk);
+ g_free (str);
+ str = NULL;
+ break;
+ }
+ }
+ }
+}
+
+static GstBuffer *
+gst_avi_mux_riff_get_avi_header (GstAviMux * avimux)
+{
+ const GstTagList *tags;
+ GstBuffer *buffer = NULL;
+ gint size = 0;
+ GstByteWriter bw;
+ GSList *node;
+ guint avih, riff, hdrl;
+ GstMapInfo map;
+ gboolean hdl = TRUE;
+
+ GST_DEBUG_OBJECT (avimux, "creating avi header, data_size %u, idx_size %u",
+ avimux->data_size, avimux->idx_size);
+
+ if (avimux->tags_snap)
+ tags = avimux->tags_snap;
+ else {
+ /* need to make snapshot of current state of tags to ensure the same set
+ * is used next time around during header rewrite at the end */
+ tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (avimux));
+ if (tags)
+ tags = avimux->tags_snap = gst_tag_list_copy (tags);
+ }
+
+ gst_byte_writer_init_with_size (&bw, 1024, FALSE);
+
+ /* avi header metadata */
+ riff = gst_avi_mux_start_chunk (&bw, "RIFF", 0);
+ hdl &= gst_byte_writer_put_data (&bw, (guint8 *) "AVI ", 4);
+ hdrl = gst_avi_mux_start_chunk (&bw, "LIST", 0);
+ hdl &= gst_byte_writer_put_data (&bw, (guint8 *) "hdrl", 4);
+
+ avih = gst_avi_mux_start_chunk (&bw, "avih", 0);
+ /* the AVI header itself */
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.us_frame);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.max_bps);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.pad_gran);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.flags);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.tot_frames);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.init_frames);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.streams);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.bufsize);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.width);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.height);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.scale);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.rate);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.start);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avimux->avi_hdr.length);
+ gst_avi_mux_end_chunk (&bw, avih);
+
+ /* stream data */
+ node = avimux->sinkpads;
+ while (node) {
+ GstAviPad *avipad = (GstAviPad *) node->data;
+ GstAviVideoPad *vidpad = (GstAviVideoPad *) avipad;
+ GstAviAudioPad *audpad = (GstAviAudioPad *) avipad;
+ gint codec_size = 0;
+ guint strh, strl, strf, indx;
+
+ /* stream list metadata */
+ strl = gst_avi_mux_start_chunk (&bw, "LIST", 0);
+ hdl &= gst_byte_writer_put_data (&bw, (guint8 *) "strl", 4);
+
+ /* generic header */
+ strh = gst_avi_mux_start_chunk (&bw, "strh", 0);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avipad->hdr.type);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avipad->hdr.fcc_handler);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avipad->hdr.flags);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avipad->hdr.priority);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avipad->hdr.init_frames);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avipad->hdr.scale);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avipad->hdr.rate);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avipad->hdr.start);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avipad->hdr.length);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avipad->hdr.bufsize);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avipad->hdr.quality);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avipad->hdr.samplesize);
+ hdl &= gst_byte_writer_put_uint16_le (&bw, 0);
+ hdl &= gst_byte_writer_put_uint16_le (&bw, 0);
+ hdl &= gst_byte_writer_put_uint16_le (&bw, 0);
+ hdl &= gst_byte_writer_put_uint16_le (&bw, 0);
+ gst_avi_mux_end_chunk (&bw, strh);
+
+ if (avipad->is_video) {
+ codec_size = vidpad->vids_codec_data ?
+ gst_buffer_get_size (vidpad->vids_codec_data) : 0;
+ /* the video header */
+ strf = gst_avi_mux_start_chunk (&bw, "strf", 0);
+ /* the actual header */
+ hdl &=
+ gst_byte_writer_put_uint32_le (&bw, vidpad->vids.size + codec_size);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, vidpad->vids.width);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, vidpad->vids.height);
+ hdl &= gst_byte_writer_put_uint16_le (&bw, vidpad->vids.planes);
+ hdl &= gst_byte_writer_put_uint16_le (&bw, vidpad->vids.bit_cnt);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, vidpad->vids.compression);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, vidpad->vids.image_size);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, vidpad->vids.xpels_meter);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, vidpad->vids.ypels_meter);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, vidpad->vids.num_colors);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, vidpad->vids.imp_colors);
+ if (vidpad->vids_codec_data) {
+ gst_buffer_map (vidpad->vids_codec_data, &map, GST_MAP_READ);
+ hdl &= gst_byte_writer_put_data (&bw, map.data, map.size);
+ gst_buffer_unmap (vidpad->vids_codec_data, &map);
+ }
+ gst_avi_mux_end_chunk (&bw, strf);
+
+ /* add video property data, mainly for aspect ratio, if any */
+ if (vidpad->vprp.aspect) {
+ gint f;
+ guint vprp;
+
+ /* let's be on the safe side */
+ vidpad->vprp.fields = MIN (vidpad->vprp.fields,
+ GST_RIFF_VPRP_VIDEO_FIELDS);
+ /* the vprp header */
+ vprp = gst_avi_mux_start_chunk (&bw, "vprp", 0);
+ /* the actual data */
+ hdl &= gst_byte_writer_put_uint32_le (&bw, vidpad->vprp.format_token);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, vidpad->vprp.standard);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, vidpad->vprp.vert_rate);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, vidpad->vprp.hor_t_total);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, vidpad->vprp.vert_lines);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, vidpad->vprp.aspect);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, vidpad->vprp.width);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, vidpad->vprp.height);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, vidpad->vprp.fields);
+
+ for (f = 0; f < vidpad->vprp.fields; ++f) {
+ gst_riff_vprp_video_field_desc *fd;
+
+ fd = &(vidpad->vprp.field_info[f]);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, fd->compressed_bm_height);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, fd->compressed_bm_width);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, fd->valid_bm_height);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, fd->valid_bm_width);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, fd->valid_bm_x_offset);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, fd->valid_bm_y_offset);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, fd->video_x_t_offset);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, fd->video_y_start);
+ }
+ gst_avi_mux_end_chunk (&bw, vprp);
+ }
+ } else {
+ codec_size = audpad->auds_codec_data ?
+ gst_buffer_get_size (audpad->auds_codec_data) : 0;
+ /* the audio header */
+ strf = gst_avi_mux_start_chunk (&bw, "strf", 0);
+ /* the actual header */
+ if (audpad->write_waveformatex)
+ hdl &= gst_byte_writer_put_uint16_le (&bw, 0xfffe);
+ else
+ hdl &= gst_byte_writer_put_uint16_le (&bw, audpad->auds.format);
+ hdl &= gst_byte_writer_put_uint16_le (&bw, audpad->auds.channels);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, audpad->auds.rate);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, audpad->auds.av_bps);
+ hdl &= gst_byte_writer_put_uint16_le (&bw, audpad->auds.blockalign);
+ hdl &= gst_byte_writer_put_uint16_le (&bw, audpad->auds.bits_per_sample);
+ if (audpad->write_waveformatex) {
+ hdl &= gst_byte_writer_put_uint16_le (&bw, codec_size + 22);
+ hdl &=
+ gst_byte_writer_put_uint16_le (&bw, audpad->valid_bits_per_sample);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, audpad->channel_mask);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, audpad->auds.format);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, 0x00100000);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, 0xAA000080);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, 0x719B3800);
+ } else {
+ hdl &= gst_byte_writer_put_uint16_le (&bw, codec_size);
+ }
+ if (audpad->auds_codec_data) {
+ gst_buffer_map (audpad->auds_codec_data, &map, GST_MAP_READ);
+ hdl &= gst_byte_writer_put_data (&bw, map.data, map.size);
+ gst_buffer_unmap (audpad->auds_codec_data, &map);
+ }
+ gst_avi_mux_end_chunk (&bw, strf);
+ }
+
+ /* odml superindex chunk */
+ if (avipad->idx_index > 0)
+ indx = gst_avi_mux_start_chunk (&bw, "indx", 0);
+ else
+ indx = gst_avi_mux_start_chunk (&bw, "JUNK", 0);
+ hdl &= gst_byte_writer_put_uint16_le (&bw, 4); /* bytes per entry */
+ hdl &= gst_byte_writer_put_uint8 (&bw, 0); /* index subtype */
+ hdl &= gst_byte_writer_put_uint8 (&bw, GST_AVI_INDEX_OF_INDEXES); /* index type */
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avipad->idx_index); /* entries in use */
+ hdl &= gst_byte_writer_put_data (&bw, (guint8 *) avipad->tag, 4); /* stream id */
+ hdl &= gst_byte_writer_put_uint32_le (&bw, 0); /* reserved */
+ hdl &= gst_byte_writer_put_uint32_le (&bw, 0); /* reserved */
+ hdl &= gst_byte_writer_put_uint32_le (&bw, 0); /* reserved */
+ hdl &= gst_byte_writer_put_data (&bw, (guint8 *) avipad->idx,
+ GST_AVI_SUPERINDEX_COUNT * sizeof (gst_avi_superindex_entry));
+ gst_avi_mux_end_chunk (&bw, indx);
+
+ /* end strl for this stream */
+ gst_avi_mux_end_chunk (&bw, strl);
+
+ node = node->next;
+ }
+
+ if (avimux->video_pads > 0) {
+ guint odml, dmlh;
+ /* odml header */
+ odml = gst_avi_mux_start_chunk (&bw, "LIST", 0);
+ hdl &= gst_byte_writer_put_data (&bw, (guint8 *) "odml", 4);
+ dmlh = gst_avi_mux_start_chunk (&bw, "dmlh", 0);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avimux->total_frames);
+ gst_avi_mux_end_chunk (&bw, dmlh);
+ gst_avi_mux_end_chunk (&bw, odml);
+ }
+
+ /* end hdrl */
+ gst_avi_mux_end_chunk (&bw, hdrl);
+
+ /* tags */
+ if (tags) {
+ guint info;
+
+ info = gst_avi_mux_start_chunk (&bw, "LIST", 0);
+ hdl &= gst_byte_writer_put_data (&bw, (guint8 *) "INFO", 4);
+
+ gst_tag_list_foreach (tags, gst_avi_mux_write_tag, &bw);
+ if (info + 8 == gst_byte_writer_get_pos (&bw)) {
+ /* no tags written, remove the empty INFO LIST as it is useless
+ * and prevents playback in vlc */
+ gst_byte_writer_set_pos (&bw, info - 4);
+ } else {
+ gst_avi_mux_end_chunk (&bw, info);
+ }
+ }
+
+ /* pop RIFF */
+ gst_avi_mux_end_chunk (&bw, riff);
+
+ /* avi data header */
+ hdl &= gst_byte_writer_put_data (&bw, (guint8 *) "LIST", 4);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, avimux->data_size);
+ hdl &= gst_byte_writer_put_data (&bw, (guint8 *) "movi", 4);
+
+ if (!hdl)
+ goto beach;
+
+ /* now get the data */
+ buffer = gst_byte_writer_reset_and_get_buffer (&bw);
+
+ /* ... but RIFF includes more than just header */
+ gst_buffer_map (buffer, &map, GST_MAP_READWRITE);
+ size = GST_READ_UINT32_LE (map.data + 4);
+ size += 8 + avimux->data_size + avimux->idx_size;
+ GST_WRITE_UINT32_LE (map.data + 4, size);
+
+ GST_MEMDUMP_OBJECT (avimux, "avi header", map.data, map.size);
+ gst_buffer_unmap (buffer, &map);
+
+beach:
+ return buffer;
+}
+
+static GstBuffer *
+gst_avi_mux_riff_get_avix_header (guint32 datax_size)
+{
+ GstBuffer *buffer;
+ GstMapInfo map;
+
+ buffer = gst_buffer_new_and_alloc (24);
+
+ gst_buffer_map (buffer, &map, GST_MAP_WRITE);
+ memcpy (map.data + 0, "RIFF", 4);
+ GST_WRITE_UINT32_LE (map.data + 4, datax_size + 3 * 4);
+ memcpy (map.data + 8, "AVIX", 4);
+ memcpy (map.data + 12, "LIST", 4);
+ GST_WRITE_UINT32_LE (map.data + 16, datax_size);
+ memcpy (map.data + 20, "movi", 4);
+ gst_buffer_unmap (buffer, &map);
+
+ return buffer;
+}
+
+static inline GstBuffer *
+gst_avi_mux_riff_get_header (GstAviPad * avipad, guint32 video_frame_size)
+{
+ GstBuffer *buffer;
+ GstMapInfo map;
+
+ buffer = gst_buffer_new_and_alloc (8);
+
+ gst_buffer_map (buffer, &map, GST_MAP_WRITE);
+ memcpy (map.data + 0, avipad->tag, 4);
+ GST_WRITE_UINT32_LE (map.data + 4, video_frame_size);
+ gst_buffer_unmap (buffer, &map);
+
+ return buffer;
+}
+
+/* write an odml index chunk in the movi list */
+static GstFlowReturn
+gst_avi_mux_write_avix_index (GstAviMux * avimux, GstAviPad * avipad,
+ gchar * code, gchar * chunk, gst_avi_superindex_entry * super_index,
+ gint * super_index_count)
+{
+ GstFlowReturn res;
+ GstBuffer *buffer;
+ guint8 *data;
+ gst_riff_index_entry *entry;
+ gint i;
+ guint32 size, entry_count;
+ gboolean is_pcm = FALSE;
+ guint32 pcm_samples = 0;
+ GstMapInfo map;
+
+ /* check if it is pcm */
+ if (avipad && !avipad->is_video) {
+ GstAviAudioPad *audiopad = (GstAviAudioPad *) avipad;
+ if (audiopad->auds.format == GST_RIFF_WAVE_FORMAT_PCM) {
+ pcm_samples = audiopad->samples;
+ is_pcm = TRUE;
+ }
+ }
+
+ /* allocate the maximum possible */
+ buffer = gst_buffer_new_and_alloc (32 + 8 * avimux->idx_index);
+
+ gst_buffer_map (buffer, &map, GST_MAP_WRITE);
+ data = map.data;
+
+ /* general index chunk info */
+ memcpy (map.data + 0, chunk, 4); /* chunk id */
+ GST_WRITE_UINT32_LE (map.data + 4, 0); /* chunk size; fill later */
+ GST_WRITE_UINT16_LE (map.data + 8, 2); /* index entry is 2 words */
+ map.data[10] = 0; /* index subtype */
+ map.data[11] = GST_AVI_INDEX_OF_CHUNKS; /* index type: AVI_INDEX_OF_CHUNKS */
+ GST_WRITE_UINT32_LE (map.data + 12, 0); /* entries in use; fill later */
+ memcpy (map.data + 16, code, 4); /* stream to which index refers */
+ GST_WRITE_UINT64_LE (map.data + 20, avimux->avix_start); /* base offset */
+ GST_WRITE_UINT32_LE (map.data + 28, 0); /* reserved */
+ map.data += 32;
+
+ /* now the actual index entries */
+ i = avimux->idx_index;
+ entry = avimux->idx;
+ while (i > 0) {
+ if (memcmp (&entry->id, code, 4) == 0) {
+ /* enter relative offset to the data (!) */
+ GST_WRITE_UINT32_LE (map.data, GUINT32_FROM_LE (entry->offset) + 8);
+ /* msb is set if not (!) keyframe */
+ GST_WRITE_UINT32_LE (map.data + 4, GUINT32_FROM_LE (entry->size)
+ | (GUINT32_FROM_LE (entry->flags)
+ & GST_RIFF_IF_KEYFRAME ? 0 : 1U << 31));
+ map.data += 8;
+ }
+ i--;
+ entry++;
+ }
+
+ /* ok, now we know the size and no of entries, fill in where needed */
+ size = map.data - data;
+ GST_WRITE_UINT32_LE (data + 4, size - 8);
+ entry_count = (size - 32) / 8;
+ GST_WRITE_UINT32_LE (data + 12, entry_count);
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_resize (buffer, 0, size);
+
+ /* send */
+ if ((res = gst_pad_push (avimux->srcpad, buffer)) != GST_FLOW_OK)
+ return res;
+
+ /* keep track of this in superindex (if room) ... */
+ if (*super_index_count < GST_AVI_SUPERINDEX_COUNT) {
+ i = *super_index_count;
+ super_index[i].offset = GUINT64_TO_LE (avimux->total_data);
+ super_index[i].size = GUINT32_TO_LE (size);
+ if (is_pcm) {
+ super_index[i].duration = GUINT32_TO_LE (pcm_samples);
+ } else {
+ super_index[i].duration = GUINT32_TO_LE (entry_count);
+ }
+ (*super_index_count)++;
+ } else
+ GST_WARNING_OBJECT (avimux, "No more room in superindex of stream %s",
+ code);
+
+ /* ... and in size */
+ avimux->total_data += size;
+ if (avimux->is_bigfile)
+ avimux->datax_size += size;
+ else
+ avimux->data_size += size;
+
+ return GST_FLOW_OK;
+}
+
+/* some other usable functions (thankyou xawtv ;-) ) */
+
+static void
+gst_avi_mux_add_index (GstAviMux * avimux, GstAviPad * avipad, guint32 flags,
+ guint32 size)
+{
+ gchar *code = avipad->tag;
+ if (avimux->idx_index == avimux->idx_count) {
+ avimux->idx_count += 256;
+ avimux->idx =
+ g_realloc (avimux->idx,
+ avimux->idx_count * sizeof (gst_riff_index_entry));
+ }
+
+ /* in case of pcm audio, we need to count the number of samples for
+ * putting in the indx entries */
+ if (!avipad->is_video) {
+ GstAviAudioPad *audiopad = (GstAviAudioPad *) avipad;
+ if (audiopad->auds.format == GST_RIFF_WAVE_FORMAT_PCM) {
+ audiopad->samples += size / audiopad->auds.blockalign;
+ }
+ }
+
+ memcpy (&(avimux->idx[avimux->idx_index].id), code, 4);
+ avimux->idx[avimux->idx_index].flags = GUINT32_TO_LE (flags);
+ avimux->idx[avimux->idx_index].offset = GUINT32_TO_LE (avimux->idx_offset);
+ avimux->idx[avimux->idx_index].size = GUINT32_TO_LE (size);
+ avimux->idx_index++;
+}
+
+static GstFlowReturn
+gst_avi_mux_write_index (GstAviMux * avimux)
+{
+ GstFlowReturn res;
+ GstBuffer *buffer;
+ GstMapInfo map;
+ guint8 *data;
+ gsize size;
+
+ buffer = gst_buffer_new_and_alloc (8);
+
+ gst_buffer_map (buffer, &map, GST_MAP_WRITE);
+ memcpy (map.data + 0, "idx1", 4);
+ GST_WRITE_UINT32_LE (map.data + 4,
+ avimux->idx_index * sizeof (gst_riff_index_entry));
+ gst_buffer_unmap (buffer, &map);
+
+ res = gst_pad_push (avimux->srcpad, buffer);
+ if (res != GST_FLOW_OK)
+ return res;
+
+ buffer = gst_buffer_new ();
+
+ size = avimux->idx_index * sizeof (gst_riff_index_entry);
+ data = (guint8 *) avimux->idx;
+ avimux->idx = NULL; /* will be free()'ed by gst_buffer_unref() */
+
+ gst_buffer_append_memory (buffer,
+ gst_memory_new_wrapped (0, data, size, 0, size, data, g_free));
+
+ avimux->total_data += size + 8;
+
+ res = gst_pad_push (avimux->srcpad, buffer);
+ if (res != GST_FLOW_OK)
+ return res;
+
+ avimux->idx_size += avimux->idx_index * sizeof (gst_riff_index_entry) + 8;
+
+ /* update header */
+ avimux->avi_hdr.flags |= GST_RIFF_AVIH_HASINDEX;
+ return GST_FLOW_OK;
+}
+
+static GstFlowReturn
+gst_avi_mux_bigfile (GstAviMux * avimux, gboolean last)
+{
+ GstFlowReturn res = GST_FLOW_OK;
+ GstBuffer *header;
+ GSList *node;
+
+ /* first some odml standard index chunks in the movi list */
+ node = avimux->sinkpads;
+ while (node) {
+ GstAviPad *avipad = (GstAviPad *) node->data;
+
+ node = node->next;
+
+ res = gst_avi_mux_write_avix_index (avimux, avipad, avipad->tag,
+ avipad->idx_tag, avipad->idx, &avipad->idx_index);
+ if (res != GST_FLOW_OK)
+ return res;
+ }
+
+ if (avimux->is_bigfile) {
+ GstSegment segment;
+
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+
+ /* search back */
+ segment.start = avimux->avix_start;
+ segment.time = avimux->avix_start;
+ gst_pad_push_event (avimux->srcpad, gst_event_new_segment (&segment));
+
+ /* rewrite AVIX header */
+ header = gst_avi_mux_riff_get_avix_header (avimux->datax_size);
+ res = gst_pad_push (avimux->srcpad, header);
+
+ /* go back to current location, at least try */
+ segment.start = avimux->total_data;
+ segment.time = avimux->total_data;
+ gst_pad_push_event (avimux->srcpad, gst_event_new_segment (&segment));
+
+ if (res != GST_FLOW_OK)
+ return res;
+ } else { /* write a standard index in the first riff chunk */
+ res = gst_avi_mux_write_index (avimux);
+ /* the index data/buffer is freed by pushing it */
+ avimux->idx_count = 0;
+ if (res != GST_FLOW_OK)
+ return res;
+ }
+
+ avimux->avix_start = avimux->total_data;
+
+ if (last)
+ return res;
+
+ avimux->is_bigfile = TRUE;
+ avimux->numx_frames = 0;
+ avimux->datax_size = 4; /* movi tag */
+ avimux->idx_index = 0;
+ node = avimux->sinkpads;
+ while (node) {
+ GstAviPad *avipad = (GstAviPad *) node->data;
+ node = node->next;
+ if (!avipad->is_video) {
+ GstAviAudioPad *audiopad = (GstAviAudioPad *) avipad;
+ audiopad->samples = 0;
+ }
+ }
+
+ header = gst_avi_mux_riff_get_avix_header (0);
+ avimux->total_data += gst_buffer_get_size (header);
+ /* avix_start is used as base offset for the odml index chunk */
+ avimux->idx_offset = avimux->total_data - avimux->avix_start;
+
+ return gst_pad_push (avimux->srcpad, header);
+}
+
+/* enough header blabla now, let's go on to actually writing the headers */
+
+static GstFlowReturn
+gst_avi_mux_start_file (GstAviMux * avimux)
+{
+ GstFlowReturn res;
+ GstBuffer *header;
+ GSList *node;
+ GstCaps *caps;
+ GstSegment segment;
+
+ avimux->total_data = 0;
+ avimux->total_frames = 0;
+ avimux->data_size = 4; /* movi tag */
+ avimux->datax_size = 0;
+ avimux->num_frames = 0;
+ avimux->numx_frames = 0;
+ avimux->avix_start = 0;
+
+ avimux->idx_index = 0;
+ avimux->idx_offset = 0; /* see 10 lines below */
+ avimux->idx_size = 0;
+ avimux->idx_count = 0;
+ avimux->idx = NULL;
+
+ /* state */
+ avimux->write_header = FALSE;
+ avimux->restart = FALSE;
+
+ /* init streams, see what we've got */
+ node = avimux->sinkpads;
+ avimux->audio_pads = avimux->video_pads = 0;
+ while (node) {
+ GstAviPad *avipad = (GstAviPad *) node->data;
+
+ node = node->next;
+
+ if (!avipad->is_video) {
+ /* audio stream numbers must start at 1 iff there is a video stream 0;
+ * request_pad inserts video pad at head of list, so this test suffices */
+ if (avimux->video_pads)
+ avimux->audio_pads++;
+ avipad->tag = g_strdup_printf ("%02uwb", avimux->audio_pads);
+ avipad->idx_tag = g_strdup_printf ("ix%02u", avimux->audio_pads);
+ if (!avimux->video_pads)
+ avimux->audio_pads++;
+ } else {
+ avipad->tag = g_strdup_printf ("%02udb", avimux->video_pads);
+ avipad->idx_tag = g_strdup_printf ("ix%02u", avimux->video_pads++);
+ }
+ }
+
+ /* stream-start (FIXME: create id based on input ids) */
+ {
+ gchar s_id[32];
+
+ g_snprintf (s_id, sizeof (s_id), "avimux-%08x", g_random_int ());
+ gst_pad_push_event (avimux->srcpad, gst_event_new_stream_start (s_id));
+ }
+
+ caps = gst_pad_get_pad_template_caps (avimux->srcpad);
+ gst_pad_set_caps (avimux->srcpad, caps);
+ gst_caps_unref (caps);
+
+ /* let downstream know we think in BYTES and expect to do seeking later on */
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ gst_pad_push_event (avimux->srcpad, gst_event_new_segment (&segment));
+
+ /* header */
+ avimux->avi_hdr.streams = g_slist_length (avimux->sinkpads);
+ avimux->is_bigfile = FALSE;
+
+ header = gst_avi_mux_riff_get_avi_header (avimux);
+ avimux->total_data += gst_buffer_get_size (header);
+
+ res = gst_pad_push (avimux->srcpad, header);
+
+ avimux->idx_offset = avimux->total_data;
+
+ return res;
+}
+
+static GstFlowReturn
+gst_avi_mux_stop_file (GstAviMux * avimux)
+{
+ GstFlowReturn res = GST_FLOW_OK;
+ GstBuffer *header;
+ GSList *node;
+ GstSegment segment;
+
+ /* Do not write index and header, if the index has no data */
+ if (avimux->idx == NULL)
+ return GST_FLOW_OK;
+
+ /* if bigfile, rewrite header, else write indexes */
+ /* don't bail out at once if error, still try to re-write header */
+ if (avimux->video_pads > 0) {
+ if (avimux->is_bigfile) {
+ res = gst_avi_mux_bigfile (avimux, TRUE);
+ } else {
+ res = gst_avi_mux_write_index (avimux);
+ }
+ }
+
+ /* we do our best to make it interleaved at least ... */
+ if (avimux->audio_pads > 0 && avimux->video_pads > 0)
+ avimux->avi_hdr.flags |= GST_RIFF_AVIH_ISINTERLEAVED;
+
+ /* set rate and everything having to do with that */
+ avimux->avi_hdr.max_bps = 0;
+ node = avimux->sinkpads;
+ while (node) {
+ GstAviPad *avipad = (GstAviPad *) node->data;
+
+ node = node->next;
+
+ if (!avipad->is_video) {
+ GstAviAudioPad *audpad = (GstAviAudioPad *) avipad;
+
+ /* calculate bps if needed */
+ if (!audpad->auds.av_bps) {
+ if (audpad->audio_time) {
+ audpad->auds.av_bps =
+ (GST_SECOND * audpad->audio_size) / audpad->audio_time;
+ /* round bps to nearest multiple of 8;
+ * which is much more likely to be the (cbr) bitrate in use;
+ * which in turn results in better timestamp calculation on playback */
+ audpad->auds.av_bps = GST_ROUND_UP_8 (audpad->auds.av_bps - 4);
+ } else {
+ GST_ELEMENT_WARNING (avimux, STREAM, MUX,
+ (_("No or invalid input audio, AVI stream will be corrupt.")),
+ (NULL));
+ audpad->auds.av_bps = 0;
+ }
+ }
+ /* housekeeping for vbr case */
+ if (audpad->max_audio_chunk)
+ audpad->auds.blockalign = audpad->max_audio_chunk;
+ if (audpad->auds.blockalign == 0)
+ audpad->auds.blockalign = 1;
+ /* note that hdr.rate is actually used by demux in cbr case */
+ if (avipad->hdr.scale <= 1)
+ avipad->hdr.rate = audpad->auds.av_bps / audpad->auds.blockalign;
+ avimux->avi_hdr.max_bps += audpad->auds.av_bps;
+ avipad->hdr.length = gst_util_uint64_scale (audpad->audio_time,
+ avipad->hdr.rate, avipad->hdr.scale * GST_SECOND);
+ } else {
+ GstAviVideoPad *vidpad = (GstAviVideoPad *) avipad;
+
+ avimux->avi_hdr.max_bps += ((vidpad->vids.bit_cnt + 7) / 8) *
+ (1000000. / avimux->avi_hdr.us_frame) * vidpad->vids.image_size;
+ avipad->hdr.length = avimux->total_frames;
+ }
+ }
+
+ /* statistics/total_frames/... */
+ avimux->avi_hdr.tot_frames = avimux->num_frames;
+
+ /* seek and rewrite the header */
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ gst_pad_push_event (avimux->srcpad, gst_event_new_segment (&segment));
+
+ /* the first error survives */
+ header = gst_avi_mux_riff_get_avi_header (avimux);
+ if (res == GST_FLOW_OK)
+ res = gst_pad_push (avimux->srcpad, header);
+ else
+ gst_pad_push (avimux->srcpad, header);
+
+ segment.start = avimux->total_data;
+ segment.time = avimux->total_data;
+ gst_pad_push_event (avimux->srcpad, gst_event_new_segment (&segment));
+
+ avimux->write_header = TRUE;
+
+ return res;
+}
+
+static GstFlowReturn
+gst_avi_mux_restart_file (GstAviMux * avimux)
+{
+ GstFlowReturn res;
+
+ if ((res = gst_avi_mux_stop_file (avimux)) != GST_FLOW_OK)
+ return res;
+
+ gst_pad_push_event (avimux->srcpad, gst_event_new_eos ());
+
+ return gst_avi_mux_start_file (avimux);
+}
+
+/* handle events (search) */
+static gboolean
+gst_avi_mux_handle_event (GstCollectPads * pads, GstCollectData * data,
+ GstEvent * event, gpointer user_data)
+{
+ GstAviMux *avimux;
+ gboolean ret = TRUE;
+
+ avimux = GST_AVI_MUX (user_data);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+ GstAviCollectData *collect_pad;
+ GstAviVideoPad *avipad;
+
+ gst_event_parse_caps (event, &caps);
+
+ /* find stream data */
+ collect_pad = (GstAviCollectData *) data;
+ g_assert (collect_pad);
+ avipad = (GstAviVideoPad *) collect_pad->avipad;
+ g_assert (avipad);
+
+ if (avipad->parent.is_video) {
+ ret = gst_avi_mux_vidsink_set_caps (data->pad, caps);
+ } else {
+ ret = gst_avi_mux_audsink_set_caps (data->pad, caps);
+ }
+ gst_event_unref (event);
+ event = NULL;
+ break;
+ }
+ case GST_EVENT_TAG:{
+ GstTagList *list;
+ GstTagSetter *setter = GST_TAG_SETTER (avimux);
+ const GstTagMergeMode mode = gst_tag_setter_get_tag_merge_mode (setter);
+
+ gst_event_parse_tag (event, &list);
+ gst_tag_setter_merge_tags (setter, list, mode);
+ gst_event_unref (event);
+ event = NULL;
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (event != NULL)
+ return gst_collect_pads_event_default (pads, data, event, FALSE);
+
+ return ret;
+}
+
+/* send extra 'padding' data */
+static GstFlowReturn
+gst_avi_mux_send_pad_data (GstAviMux * avimux, gulong num_bytes)
+{
+ GstBuffer *buffer;
+
+ buffer = gst_buffer_new_and_alloc (num_bytes);
+ gst_buffer_memset (buffer, 0, 0, num_bytes);
+
+ return gst_pad_push (avimux->srcpad, buffer);
+}
+
+#define gst_avi_mux_is_uncompressed(fourcc) \
+ (fourcc == GST_RIFF_DIB || \
+ fourcc == GST_RIFF_rgb || \
+ fourcc == GST_RIFF_RGB || fourcc == GST_RIFF_RAW)
+
+/*
+ * Helper for gst_avi_demux_invert()
+ */
+static inline void
+swap_line (guint8 * d1, guint8 * d2, guint8 * tmp, gint bytes)
+{
+ memcpy (tmp, d1, bytes);
+ memcpy (d1, d2, bytes);
+ memcpy (d2, tmp, bytes);
+}
+
+/*
+ * Invert DIB buffers... Takes existing buffer and
+ * returns either the buffer or a new one (with old
+ * one dereferenced).
+ * FFMPEG does this by simply negating the height in the header. Should we?
+ * FIXME: can't we preallocate tmp? and remember stride, bpp?
+ * this could be done in do_one_buffer() I suppose
+ */
+static GstBuffer *
+gst_avi_mux_invert (GstAviPad * avipad, GstBuffer * buf)
+{
+ gint y, w, h;
+ gint bpp, stride;
+ guint8 *tmp = NULL;
+ GstMapInfo map;
+
+ GstAviVideoPad *vidpad = (GstAviVideoPad *) avipad;
+
+ h = vidpad->vids.height;
+ w = vidpad->vids.width;
+ bpp = vidpad->vids.bit_cnt ? vidpad->vids.bit_cnt : 8;
+ stride = GST_ROUND_UP_4 (w * (bpp / 8));
+
+ buf = gst_buffer_make_writable (buf);
+
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+ if (map.size < (stride * h)) {
+ GST_WARNING ("Buffer is smaller than reported Width x Height x Depth");
+ gst_buffer_unmap (buf, &map);
+ return buf;
+ }
+
+ tmp = g_malloc (stride);
+
+ for (y = 0; y < h / 2; y++) {
+ swap_line (map.data + stride * y, map.data + stride * (h - 1 - y), tmp,
+ stride);
+ }
+
+ g_free (tmp);
+
+ gst_buffer_unmap (buf, &map);
+
+ return buf;
+}
+
+/* do buffer */
+static GstFlowReturn
+gst_avi_mux_do_buffer (GstAviMux * avimux, GstAviPad * avipad)
+{
+ GstFlowReturn res;
+ GstBuffer *data, *header;
+ gulong total_size, pad_bytes = 0;
+ guint flags;
+ gsize datasize;
+ GstClockTime time;
+
+ data = gst_collect_pads_pop (avimux->collect, avipad->collect);
+ /* arrange downstream running time */
+ time = gst_segment_to_running_time (&avipad->collect->segment,
+ GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (data));
+ if (time != GST_BUFFER_TIMESTAMP (data)) {
+ data = gst_buffer_make_writable (data);
+ GST_BUFFER_TIMESTAMP (data) = time;
+ }
+
+ /* Prepend a special buffer to the first one for some formats */
+ if (avipad->is_video) {
+ GstAviVideoPad *vidpad = (GstAviVideoPad *) avipad;
+
+ if (vidpad->prepend_buffer) {
+ /* Keep a reference to data until we copy the timestamps, then release it */
+ GstBuffer *newdata =
+ gst_buffer_append (vidpad->prepend_buffer, gst_buffer_ref (data));
+ gst_buffer_copy_into (newdata, data, GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
+ gst_buffer_unref (data);
+
+ data = newdata;
+ vidpad->prepend_buffer = NULL;
+ }
+
+ /* DIB buffers are stored topdown (I don't know why) */
+ if (gst_avi_mux_is_uncompressed (avipad->hdr.fcc_handler)) {
+ data = gst_avi_mux_invert (avipad, data);
+ }
+ } else {
+ GstAviAudioPad *audpad = (GstAviAudioPad *) avipad;
+
+ if (audpad->needs_reorder) {
+ data = gst_buffer_make_writable (data);
+ if (!gst_audio_buffer_reorder_channels (data, audpad->audio_format,
+ audpad->auds.channels, audpad->gst_positions,
+ audpad->wav_positions)) {
+ GST_WARNING_OBJECT (avimux, "Could not reorder channels");
+ }
+ }
+ }
+
+ if (avimux->restart) {
+ if ((res = gst_avi_mux_restart_file (avimux)) != GST_FLOW_OK)
+ goto done;
+ }
+
+ datasize = gst_buffer_get_size (data);
+
+ /* need to restart or start a next avix chunk ? */
+ if ((avimux->is_bigfile ? avimux->datax_size : avimux->data_size) +
+ datasize > GST_AVI_MAX_SIZE) {
+ if (avimux->enable_large_avi) {
+ if ((res = gst_avi_mux_bigfile (avimux, FALSE)) != GST_FLOW_OK)
+ goto done;
+ } else {
+ if ((res = gst_avi_mux_restart_file (avimux)) != GST_FLOW_OK)
+ goto done;
+ }
+ }
+
+ /* get header and record some stats */
+ if (datasize & 1) {
+ pad_bytes = 2 - (datasize & 1);
+ }
+ header = gst_avi_mux_riff_get_header (avipad, datasize);
+ total_size = gst_buffer_get_size (header) + datasize + pad_bytes;
+
+ if (avimux->is_bigfile) {
+ avimux->datax_size += total_size;
+ } else {
+ avimux->data_size += total_size;
+ }
+
+ if (G_UNLIKELY (avipad->hook)) {
+ gst_buffer_ref (data);
+ avipad->hook (avimux, avipad, data);
+ }
+
+ /* the suggested buffer size is the max frame size */
+ if (avipad->hdr.bufsize < datasize)
+ avipad->hdr.bufsize = datasize;
+
+ if (avipad->is_video) {
+ avimux->total_frames++;
+
+ if (avimux->is_bigfile) {
+ avimux->numx_frames++;
+ } else {
+ avimux->num_frames++;
+ }
+
+ flags = 0x02;
+ if (!GST_BUFFER_FLAG_IS_SET (data, GST_BUFFER_FLAG_DELTA_UNIT))
+ flags |= 0x10;
+ } else {
+ GstAviAudioPad *audpad = (GstAviAudioPad *) avipad;
+
+ flags = 0;
+ audpad->audio_size += datasize;
+ audpad->audio_time += GST_BUFFER_DURATION (data);
+ if (audpad->max_audio_chunk && datasize > audpad->max_audio_chunk)
+ audpad->max_audio_chunk = datasize;
+ }
+
+ gst_avi_mux_add_index (avimux, avipad, flags, datasize);
+
+ /* send buffers */
+ GST_LOG_OBJECT (avimux, "pushing buffers: head, data");
+
+ if ((res = gst_pad_push (avimux->srcpad, header)) != GST_FLOW_OK)
+ goto done;
+
+ gst_buffer_ref (data);
+ if ((res = gst_pad_push (avimux->srcpad, data)) != GST_FLOW_OK)
+ goto done;
+
+ if (pad_bytes) {
+ if ((res = gst_avi_mux_send_pad_data (avimux, pad_bytes)) != GST_FLOW_OK)
+ goto done;
+ }
+
+ /* if any push above fails, we're in trouble with file consistency anyway */
+ avimux->total_data += total_size;
+ avimux->idx_offset += total_size;
+
+done:
+ gst_buffer_unref (data);
+ return res;
+}
+
+/* pick the oldest buffer from the pads and push it */
+static GstFlowReturn
+gst_avi_mux_do_one_buffer (GstAviMux * avimux)
+{
+ GstAviPad *avipad, *best_pad;
+ GSList *node;
+ GstBuffer *buffer;
+ GstClockTime time, best_time, delay;
+
+ node = avimux->sinkpads;
+ best_pad = NULL;
+ best_time = GST_CLOCK_TIME_NONE;
+ for (; node; node = node->next) {
+ avipad = (GstAviPad *) node->data;
+
+ if (!avipad->collect)
+ continue;
+
+ buffer = gst_collect_pads_peek (avimux->collect, avipad->collect);
+ if (!buffer)
+ continue;
+ time = GST_BUFFER_TIMESTAMP (buffer);
+ gst_buffer_unref (buffer);
+
+ /* invalid should pass */
+ if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (time))) {
+ time = gst_segment_to_running_time (&avipad->collect->segment,
+ GST_FORMAT_TIME, time);
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (time))) {
+ GST_DEBUG_OBJECT (avimux, "clipping buffer on pad %s outside segment",
+ GST_PAD_NAME (avipad->collect->pad));
+ buffer = gst_collect_pads_pop (avimux->collect, avipad->collect);
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ }
+ }
+
+ delay = avipad->is_video ? GST_SECOND / 2 : 0;
+
+ /* invalid timestamp buffers pass first,
+ * these are probably initialization buffers */
+ if (best_pad == NULL || !GST_CLOCK_TIME_IS_VALID (time)
+ || (GST_CLOCK_TIME_IS_VALID (best_time) && time + delay < best_time)) {
+ best_pad = avipad;
+ best_time = time + delay;
+ }
+ }
+
+ if (best_pad) {
+ GST_LOG_OBJECT (avimux, "selected pad %s with time %" GST_TIME_FORMAT,
+ GST_PAD_NAME (best_pad->collect->pad), GST_TIME_ARGS (best_time));
+
+ return gst_avi_mux_do_buffer (avimux, best_pad);
+ } else {
+ /* simply finish off the file and send EOS */
+ gst_avi_mux_stop_file (avimux);
+ gst_pad_push_event (avimux->srcpad, gst_event_new_eos ());
+ return GST_FLOW_EOS;
+ }
+
+}
+
+static GstFlowReturn
+gst_avi_mux_collect_pads (GstCollectPads * pads, GstAviMux * avimux)
+{
+ GstFlowReturn res;
+
+ if (G_UNLIKELY (avimux->write_header)) {
+ if ((res = gst_avi_mux_start_file (avimux)) != GST_FLOW_OK)
+ return res;
+ }
+
+ return gst_avi_mux_do_one_buffer (avimux);
+}
+
+
+static void
+gst_avi_mux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstAviMux *avimux;
+
+ avimux = GST_AVI_MUX (object);
+
+ switch (prop_id) {
+ case PROP_BIGFILE:
+ g_value_set_boolean (value, avimux->enable_large_avi);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_avi_mux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstAviMux *avimux;
+
+ avimux = GST_AVI_MUX (object);
+
+ switch (prop_id) {
+ case PROP_BIGFILE:
+ avimux->enable_large_avi = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstStateChangeReturn
+gst_avi_mux_change_state (GstElement * element, GstStateChange transition)
+{
+ GstAviMux *avimux;
+ GstStateChangeReturn ret;
+
+ avimux = GST_AVI_MUX (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_collect_pads_start (avimux->collect);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_collect_pads_stop (avimux->collect);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto done;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_avi_mux_reset (avimux);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+
+done:
+ return ret;
+}
diff --git a/gst/avi/gstavimux.h b/gst/avi/gstavimux.h
new file mode 100644
index 0000000000..05db3cdc15
--- /dev/null
+++ b/gst/avi/gstavimux.h
@@ -0,0 +1,209 @@
+/* AVI muxer plugin for GStreamer
+ * Copyright (C) 2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_AVI_MUX_H__
+#define __GST_AVI_MUX_H__
+
+
+#include <gst/gst.h>
+#include <gst/base/gstcollectpads.h>
+#include <gst/riff/riff-ids.h>
+#include <gst/audio/audio.h>
+#include "avi-ids.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AVI_MUX \
+ (gst_avi_mux_get_type())
+#define GST_AVI_MUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AVI_MUX,GstAviMux))
+#define GST_AVI_MUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_AVI_MUX,GstAviMuxClass))
+#define GST_IS_AVI_MUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AVI_MUX))
+#define GST_IS_AVI_MUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_AVI_MUX))
+
+#define GST_AVI_INDEX_OF_INDEXES 0
+#define GST_AVI_INDEX_OF_CHUNKS 1
+
+/* this allows indexing up to 64GB avi file */
+#define GST_AVI_SUPERINDEX_COUNT 32
+
+/* max size */
+#define GST_AVI_MAX_SIZE 0x40000000
+
+typedef struct _gst_avi_superindex_entry {
+ guint64 offset;
+ guint32 size;
+ guint32 duration;
+} gst_avi_superindex_entry;
+
+typedef struct _gst_riff_strh_full {
+ gst_riff_strh parent;
+ /* rcFrame, RECT structure (struct of 4 shorts) */
+ gint16 left;
+ gint16 top;
+ gint16 right;
+ gint16 bottom;
+} gst_riff_strh_full;
+
+typedef struct _GstAviPad GstAviPad;
+typedef struct _GstAviMux GstAviMux;
+typedef struct _GstAviMuxClass GstAviMuxClass;
+
+typedef GstFlowReturn (*GstAviPadHook) (GstAviMux * avi, GstAviPad * avipad,
+ GstBuffer * buffer);
+
+struct _GstAviPad {
+ /* do not extend, link to it */
+ /* is NULL if original sink request pad has been removed */
+ GstCollectData *collect;
+
+ /* type */
+ gboolean is_video;
+ gboolean connected;
+
+ /* chunk tag */
+ gchar *tag;
+
+ /* stream header */
+ gst_riff_strh hdr;
+
+ /* odml super indexes */
+ gst_avi_superindex_entry idx[GST_AVI_SUPERINDEX_COUNT];
+ gint idx_index;
+ gchar *idx_tag;
+
+ /* stream specific hook */
+ GstAviPadHook hook;
+};
+
+typedef struct _GstAviVideoPad {
+ GstAviPad parent;
+
+ /* stream format */
+ gst_riff_strf_vids vids;
+ /* extra data */
+ GstBuffer *vids_codec_data;
+ /* ODML video properties */
+ gst_riff_vprp vprp;
+
+ GstBuffer *prepend_buffer;
+
+} GstAviVideoPad;
+
+typedef struct _GstAviAudioPad {
+ GstAviPad parent;
+
+ /* stream format */
+ gst_riff_strf_auds auds;
+ /* additional fields for WAVEFORMATEX */
+ gboolean write_waveformatex;
+ guint16 valid_bits_per_sample;
+ guint32 channel_mask;
+
+ /* for raw audio */
+ gboolean needs_reorder;
+ GstAudioFormat audio_format;
+ GstAudioChannelPosition gst_positions[64], wav_positions[64];
+
+ /* audio info for bps calculation */
+ guint32 audio_size;
+ guint64 audio_time;
+ /* max audio chunk size for vbr */
+ guint32 max_audio_chunk;
+
+ /* counts the number of samples to put in indx chunk
+ * useful for raw audio where usually there are more than
+ * 1 sample in each GstBuffer */
+ gint samples;
+
+ /* extra data */
+ GstBuffer *auds_codec_data;
+} GstAviAudioPad;
+
+typedef struct _GstAviCollectData {
+ /* extend the CollectData */
+ GstCollectData collect;
+
+ GstAviPad *avipad;
+} GstAviCollectData;
+
+struct _GstAviMux {
+ GstElement element;
+
+ /* pads */
+ GstPad *srcpad;
+ /* sinkpads, video first */
+ GSList *sinkpads;
+ /* video restricted to 1 pad */
+ guint video_pads, audio_pads;
+ GstCollectPads *collect;
+
+ /* the AVI header */
+ /* still some single stream video data in mux struct */
+ gst_riff_avih avi_hdr;
+ /* total number of (video) frames */
+ guint32 total_frames;
+ /* amount of total data (bytes) */
+ guint64 total_data;
+ /* amount of data (bytes) in the AVI/AVIX block;
+ * actually the movi list, so counted from and including the movi tag */
+ guint32 data_size, datax_size;
+ /* num (video) frames in the AVI/AVIX block */
+ guint32 num_frames, numx_frames;
+ /* size of hdrl list, including tag as usual */
+
+ /* total size of extra codec data */
+ guint32 codec_data_size;
+ /* state info */
+ gboolean write_header;
+ gboolean restart;
+
+ /* tags */
+ GstTagList *tags_snap;
+
+ /* information about the AVI index ('idx') */
+ gst_riff_index_entry *idx;
+ gint idx_index, idx_count;
+ /* offset of *chunk* (relative to a base offset); entered in the index */
+ guint32 idx_offset;
+ /* size of idx1 chunk (including! chunk header and size bytes) */
+ guint32 idx_size;
+
+ /* are we a big file already? */
+ gboolean is_bigfile;
+ guint64 avix_start;
+
+ /* whether to use "large AVI files" or just stick to small indexed files */
+ gboolean enable_large_avi;
+};
+
+struct _GstAviMuxClass {
+ GstElementClass parent_class;
+};
+
+GType gst_avi_mux_get_type(void);
+
+G_END_DECLS
+
+
+#endif /* __GST_AVI_MUX_H__ */
diff --git a/gst/avi/gstavisubtitle.c b/gst/avi/gstavisubtitle.c
new file mode 100644
index 0000000000..efc5f04051
--- /dev/null
+++ b/gst/avi/gstavisubtitle.c
@@ -0,0 +1,390 @@
+/* GStreamer AVI GAB2 subtitle parser
+ * Copyright (C) <2007> Thijs Vermeir <thijsvermeir@gmail.com>
+ * Copyright (C) <2007> Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-avisubtitle
+ * @title: avisubtitle
+ *
+ * Parses the subtitle stream from an avi file.
+ *
+ * ## Example launch line
+ *
+ * |[
+ * gst-launch-1.0 filesrc location=subtitle.avi ! avidemux name=demux ! queue ! avisubtitle ! subparse ! textoverlay name=overlay ! videoconvert ! autovideosink demux. ! queue ! decodebin ! overlay.
+ * ]|
+ * This plays an avi file with a video and subtitle stream.
+ *
+ */
+
+/* example of a subtitle chunk in an avi file
+ * 00000000: 47 41 42 32 00 02 00 10 00 00 00 45 00 6e 00 67 GAB2.......E.n.g
+ * 00000010: 00 6c 00 69 00 73 00 68 00 00 00 04 00 8e 00 00 .l.i.s.h........
+ * 00000020: 00 ef bb bf 31 0d 0a 30 30 3a 30 30 3a 30 30 2c ....1..00:00:00,
+ * 00000030: 31 30 30 20 2d 2d 3e 20 30 30 3a 30 30 3a 30 32 100 --> 00:00:02
+ * 00000040: 2c 30 30 30 0d 0a 3c 62 3e 41 6e 20 55 54 46 38 ,000..<b>An UTF8
+ * 00000050: 20 53 75 62 74 69 74 6c 65 20 77 69 74 68 20 42 Subtitle with B
+ * 00000060: 4f 4d 3c 2f 62 3e 0d 0a 0d 0a 32 0d 0a 30 30 3a OM</b>....2..00:
+ * 00000070: 30 30 3a 30 32 2c 31 30 30 20 2d 2d 3e 20 30 30 00:02,100 --> 00
+ * 00000080: 3a 30 30 3a 30 34 2c 30 30 30 0d 0a 53 6f 6d 65 :00:04,000..Some
+ * 00000090: 74 68 69 6e 67 20 6e 6f 6e 41 53 43 49 49 20 2d thing nonASCII -
+ * 000000a0: 20 c2 b5 c3 b6 c3 a4 c3 bc c3 9f 0d 0a 0d 0a ..............
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+
+#include "gstavielements.h"
+#include "gstavisubtitle.h"
+
+GST_DEBUG_CATEGORY_STATIC (avisubtitle_debug);
+#define GST_CAT_DEFAULT avisubtitle_debug
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-subtitle-avi")
+ );
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-subtitle")
+ );
+
+static void gst_avi_subtitle_title_tag (GstAviSubtitle * sub, gchar * title);
+static GstFlowReturn gst_avi_subtitle_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+static GstStateChangeReturn gst_avi_subtitle_change_state (GstElement * element,
+ GstStateChange transition);
+static gboolean gst_avi_subtitle_send_event (GstElement * element,
+ GstEvent * event);
+
+#define gst_avi_subtitle_parent_class parent_class
+G_DEFINE_TYPE (GstAviSubtitle, gst_avi_subtitle, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (avisubtitle, "avisubtitle",
+ GST_RANK_PRIMARY, GST_TYPE_AVI_SUBTITLE, avi_element_init (plugin));
+
+#define IS_BOM_UTF8(data) ((GST_READ_UINT32_BE(data) >> 8) == 0xEFBBBF)
+#define IS_BOM_UTF16_BE(data) (GST_READ_UINT16_BE(data) == 0xFEFF)
+#define IS_BOM_UTF16_LE(data) (GST_READ_UINT16_LE(data) == 0xFEFF)
+#define IS_BOM_UTF32_BE(data) (GST_READ_UINT32_BE(data) == 0xFEFF)
+#define IS_BOM_UTF32_LE(data) (GST_READ_UINT32_LE(data) == 0xFEFF)
+
+static GstBuffer *
+gst_avi_subtitle_extract_file (GstAviSubtitle * sub, GstBuffer * buffer,
+ guint offset, guint len)
+{
+ const gchar *input_enc = NULL;
+ GstBuffer *ret = NULL;
+ gchar *data;
+ GstMapInfo map;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = (gchar *) (map.data + offset);
+
+ if (len >= (3 + 1) && IS_BOM_UTF8 (data) &&
+ g_utf8_validate (data + 3, len - 3, NULL)) {
+ ret =
+ gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, offset + 3,
+ len - 3);
+ } else if (len >= 2 && IS_BOM_UTF16_BE (data)) {
+ input_enc = "UTF-16BE";
+ data += 2;
+ len -= 2;
+ } else if (len >= 2 && IS_BOM_UTF16_LE (data)) {
+ input_enc = "UTF-16LE";
+ data += 2;
+ len -= 2;
+ } else if (len >= 4 && IS_BOM_UTF32_BE (data)) {
+ input_enc = "UTF-32BE";
+ data += 4;
+ len -= 4;
+ } else if (len >= 4 && IS_BOM_UTF32_LE (data)) {
+ input_enc = "UTF-32LE";
+ data += 4;
+ len -= 4;
+ } else if (g_utf8_validate (data, len, NULL)) {
+ /* not specified, check if it's UTF-8 */
+ ret = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, offset, len);
+ } else {
+ /* we could fall back to gst_tag_freeform_to_utf8() here */
+ GST_WARNING_OBJECT (sub, "unspecified encoding, and not UTF-8");
+ ret = NULL;
+ goto done;
+ }
+
+ g_return_val_if_fail (ret != NULL || input_enc != NULL, NULL);
+
+ if (input_enc) {
+ GError *err = NULL;
+ gchar *utf8;
+ gsize slen;
+
+ GST_DEBUG_OBJECT (sub, "converting subtitles from %s to UTF-8", input_enc);
+ utf8 = g_convert (data, len, "UTF-8", input_enc, NULL, NULL, &err);
+
+ if (err != NULL) {
+ GST_WARNING_OBJECT (sub, "conversion to UTF-8 failed : %s", err->message);
+ g_error_free (err);
+ ret = NULL;
+ goto done;
+ }
+
+ ret = gst_buffer_new ();
+ slen = strlen (utf8);
+ gst_buffer_append_memory (ret,
+ gst_memory_new_wrapped (0, utf8, slen, 0, slen, utf8, g_free));
+
+ GST_BUFFER_OFFSET (ret) = 0;
+ }
+
+done:
+ gst_buffer_unmap (buffer, &map);
+
+ return ret;
+}
+
+/**
+ * gst_avi_subtitle_title_tag:
+ * @sub: subtitle element
+ * @title: the title of this subtitle stream
+ *
+ * Send an event to the srcpad of the @sub element with the title
+ * of the subtitle stream as a GST_TAG_TITLE
+ */
+static void
+gst_avi_subtitle_title_tag (GstAviSubtitle * sub, gchar * title)
+{
+ gst_pad_push_event (sub->src,
+ gst_event_new_tag (gst_tag_list_new (GST_TAG_TITLE, title, NULL)));
+}
+
+static GstFlowReturn
+gst_avi_subtitle_parse_gab2_chunk (GstAviSubtitle * sub, GstBuffer * buf)
+{
+ gchar *name_utf8;
+ guint name_length;
+ guint file_length;
+ GstMapInfo map;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ /* check the magic word "GAB2\0", and the next word must be 2 */
+ if (map.size < 12 || memcmp (map.data, "GAB2\0\2\0", 5 + 2) != 0)
+ goto wrong_magic_word;
+
+ /* read 'name' of subtitle */
+ name_length = GST_READ_UINT32_LE (map.data + 5 + 2);
+ GST_LOG_OBJECT (sub, "length of name: %u", name_length);
+ if (map.size <= 17 + name_length)
+ goto wrong_name_length;
+
+ name_utf8 =
+ g_convert ((gchar *) map.data + 11, name_length, "UTF-8", "UTF-16LE",
+ NULL, NULL, NULL);
+
+ if (name_utf8) {
+ GST_LOG_OBJECT (sub, "subtitle name: %s", name_utf8);
+ gst_avi_subtitle_title_tag (sub, name_utf8);
+ g_free (name_utf8);
+ }
+
+ /* next word must be 4 */
+ if (GST_READ_UINT16_LE (map.data + 11 + name_length) != 0x4)
+ goto wrong_fixed_word_2;
+
+ file_length = GST_READ_UINT32_LE (map.data + 13 + name_length);
+ GST_LOG_OBJECT (sub, "length srt/ssa file: %u", file_length);
+
+ if (map.size < (17 + name_length + file_length))
+ goto wrong_total_length;
+
+ /* store this, so we can send it again after a seek; note that we shouldn't
+ * assume all the remaining data in the chunk is subtitle data, there may
+ * be padding at the end for some reason, so only parse file_length bytes */
+ sub->subfile =
+ gst_avi_subtitle_extract_file (sub, buf, 17 + name_length, file_length);
+
+ if (sub->subfile == NULL)
+ goto extract_failed;
+
+ gst_buffer_unmap (buf, &map);
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+wrong_magic_word:
+ {
+ GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL), ("Wrong magic word"));
+ gst_buffer_unmap (buf, &map);
+ return GST_FLOW_ERROR;
+ }
+wrong_name_length:
+ {
+ GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL),
+ ("name doesn't fit in buffer (%" G_GSIZE_FORMAT " < %d)", map.size,
+ 17 + name_length));
+ gst_buffer_unmap (buf, &map);
+ return GST_FLOW_ERROR;
+ }
+wrong_fixed_word_2:
+ {
+ GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL),
+ ("wrong fixed word: expected %u, got %u", 4,
+ GST_READ_UINT16_LE (map.data + 11 + name_length)));
+ gst_buffer_unmap (buf, &map);
+ return GST_FLOW_ERROR;
+ }
+wrong_total_length:
+ {
+ GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL),
+ ("buffer size is wrong: need %d bytes, have %" G_GSIZE_FORMAT " bytes",
+ 17 + name_length + file_length, map.size));
+ gst_buffer_unmap (buf, &map);
+ return GST_FLOW_ERROR;
+ }
+extract_failed:
+ {
+ GST_ELEMENT_ERROR (sub, STREAM, DECODE, (NULL),
+ ("could not extract subtitles"));
+ gst_buffer_unmap (buf, &map);
+ return GST_FLOW_ERROR;
+ }
+}
+
+static GstFlowReturn
+gst_avi_subtitle_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+{
+ GstAviSubtitle *sub = GST_AVI_SUBTITLE (parent);
+ GstFlowReturn ret;
+
+ if (sub->subfile != NULL) {
+ GST_WARNING_OBJECT (sub, "Got more buffers than expected, dropping");
+ ret = GST_FLOW_EOS;
+ goto done;
+ }
+
+ /* we expect exactly one buffer with the whole srt/ssa file in it */
+ ret = gst_avi_subtitle_parse_gab2_chunk (sub, buffer);
+ if (ret != GST_FLOW_OK)
+ goto done;
+
+ /* now push the subtitle data downstream */
+ ret = gst_pad_push (sub->src, gst_buffer_ref (sub->subfile));
+
+done:
+
+ gst_buffer_unref (buffer);
+ return ret;
+}
+
+static gboolean
+gst_avi_subtitle_send_event (GstElement * element, GstEvent * event)
+{
+ GstAviSubtitle *avisubtitle = GST_AVI_SUBTITLE (element);
+ gboolean ret = FALSE;
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_SEEK) {
+ if (avisubtitle->subfile) {
+ if (gst_pad_push (avisubtitle->src,
+ gst_buffer_ref (avisubtitle->subfile)) == GST_FLOW_OK)
+ ret = TRUE;
+ }
+ }
+ gst_event_unref (event);
+ return ret;
+}
+
+static void
+gst_avi_subtitle_class_init (GstAviSubtitleClass * klass)
+{
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (avisubtitle_debug, "avisubtitle", 0,
+ "parse avi subtitle stream");
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_avi_subtitle_change_state);
+ gstelement_class->send_event =
+ GST_DEBUG_FUNCPTR (gst_avi_subtitle_send_event);
+
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class, &src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Avi subtitle parser", "Codec/Parser/Subtitle",
+ "Parse avi subtitle stream", "Thijs Vermeir <thijsvermeir@gmail.com>");
+}
+
+static void
+gst_avi_subtitle_init (GstAviSubtitle * self)
+{
+ GstCaps *caps;
+
+ self->src = gst_pad_new_from_static_template (&src_template, "src");
+ gst_element_add_pad (GST_ELEMENT (self), self->src);
+
+ self->sink = gst_pad_new_from_static_template (&sink_template, "sink");
+ gst_pad_set_chain_function (self->sink,
+ GST_DEBUG_FUNCPTR (gst_avi_subtitle_chain));
+
+ caps = gst_static_pad_template_get_caps (&src_template);
+ gst_pad_set_caps (self->src, caps);
+ gst_caps_unref (caps);
+
+ gst_pad_use_fixed_caps (self->src);
+ gst_element_add_pad (GST_ELEMENT (self), self->sink);
+
+ self->subfile = NULL;
+}
+
+static GstStateChangeReturn
+gst_avi_subtitle_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstAviSubtitle *sub = GST_AVI_SUBTITLE (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ return ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ if (sub->subfile) {
+ gst_buffer_unref (sub->subfile);
+ sub->subfile = NULL;
+ }
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
diff --git a/gst/avi/gstavisubtitle.h b/gst/avi/gstavisubtitle.h
new file mode 100644
index 0000000000..0f8048ee2d
--- /dev/null
+++ b/gst/avi/gstavisubtitle.h
@@ -0,0 +1,39 @@
+
+#ifndef __GSTAVISUBTITLE_H__
+#define __GSTAVISUBTITLE_H__
+
+#include <glib.h>
+#include <glib-object.h>
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstAviSubtitle GstAviSubtitle;
+typedef struct _GstAviSubtitleClass GstAviSubtitleClass;
+
+#define GST_TYPE_AVI_SUBTITLE (gst_avi_subtitle_get_type ())
+#define GST_AVI_SUBTITLE(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_AVI_SUBTITLE, GstAviSubtitle))
+#define GST_AVI_SUBTITLE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_AVI_SUBTITLE, GstAviSubtitleClass))
+#define GST_IS_AVI_SUBTITLE(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_AVI_SUBTITLE))
+#define GST_IS_AVI_SUBTITLE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_AVI_SUBTITLE))
+#define GST_AVI_SUBTITLE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_AVI_SUBTITLE, GstAviSubtitleClass))
+
+GType gst_avi_subtitle_get_type (void);
+
+struct _GstAviSubtitle
+{
+ GstElement parent;
+
+ GstPad *src;
+ GstPad *sink;
+
+ GstBuffer *subfile; /* the complete subtitle file in one buffer */
+};
+
+struct _GstAviSubtitleClass
+{
+ GstElementClass parent;
+};
+
+G_END_DECLS
+#endif
diff --git a/gst/avi/meson.build b/gst/avi/meson.build
new file mode 100644
index 0000000000..cd9d40fde6
--- /dev/null
+++ b/gst/avi/meson.build
@@ -0,0 +1,18 @@
+avi_sources = [
+ 'gstavielement.c',
+ 'gstavi.c',
+ 'gstavimux.c',
+ 'gstavidemux.c',
+ 'gstavisubtitle.c'
+]
+
+gstavi = library('gstavi',
+ avi_sources,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc, libsinc],
+ dependencies : [gst_dep, gstriff_dep, gstaudio_dep, gstvideo_dep, gsttag_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstavi, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstavi]
diff --git a/gst/cutter/README b/gst/cutter/README
new file mode 100644
index 0000000000..fc0975e3d0
--- /dev/null
+++ b/gst/cutter/README
@@ -0,0 +1,38 @@
+cutter plugin by thomas <thomas@apestaart.org>
+
+SYNOPSIS
+
+This plugin emits signals when RMS level of audio signal crosses a
+threshold for a given amount of time.
+
+As soon as the buffer's RMS is greater than the threshold value, the plugin fires a CUT_START signal.
+
+When the buffer's RMS level drops below the threshold value for a consecutive run length longer than the given runlength, it sends a CUT_STOP signal.
+
+When a pre-recording buffer is used, the plugin will delay throughput of data when it's in "silent" mode for a maximum length equal to the pre-recording buffer length. As soon as the input level crosses the threshold level, this pre-recorded buffer is flushed to the src pad (so you can actually record the audio just before the threshold crossing) after sending the signal.
+
+ARGUMENTS
+
+GstCutter::threshold
+ level (between 0 and 1) of threshold
+GstCutter::threshold_dB
+ level of threshold in dB (between -inf and 0)
+GstCutter::runlength
+ minimum length (in seconds) before plugin sends cut_stop signal
+GstCutter::prelength
+ length of pre-recording buffer
+
+SIGNALS
+
+ CUT_START
+ gets sent when the level of the signal goes above threshold level
+ CUT_STOP
+ gets sent when the level of the signal has been below the
+ threshold level for a number of consecutive iterations of which
+ the cumulative length is more than the runlength
+
+LIMITATIONS
+
+ * RMS value is calculated over the whole data buffer, so
+ the time resolution is limited to the buffer length
+ * RMS value is calculated over all of the channels combined
diff --git a/gst/cutter/filter.func b/gst/cutter/filter.func
new file mode 100644
index 0000000000..bdbe5663fa
--- /dev/null
+++ b/gst/cutter/filter.func
@@ -0,0 +1,16 @@
+{
+ guint j;
+ register double squaresum = 0.0;
+
+ /*
+ * process data here
+ * input sample data enters in *in_data as 8 or 16 bit data
+ * samples for left and right channel are interleaved
+ */
+
+ for (j = 0; j < num_samples; j++)
+ squaresum += data[j] * data[j];
+
+ return (squaresum / (float) num_samples);
+}
+
diff --git a/gst/cutter/gstcutter.c b/gst/cutter/gstcutter.c
new file mode 100644
index 0000000000..aa1424f5af
--- /dev/null
+++ b/gst/cutter/gstcutter.c
@@ -0,0 +1,498 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) 2002,2003,2005
+ * Thomas Vander Stichele <thomas at apestaart dot org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-cutter
+ * @title: cutter
+ *
+ * Analyses the audio signal for periods of silence. The start and end of
+ * silence is signalled by bus messages named
+ * `cutter`.
+ *
+ * The message's structure contains two fields:
+ *
+ * * #GstClockTime `timestamp`: the timestamp of the buffer that triggered the message.
+ * * gboolean `above`: %TRUE for begin of silence and %FALSE for end of silence.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -m filesrc location=foo.ogg ! decodebin ! audioconvert ! cutter ! autoaudiosink
+ * ]| Show cut messages.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include <gst/gst.h>
+#include <gst/audio/audio.h>
+#include "gstcutter.h"
+#include "math.h"
+
+GST_DEBUG_CATEGORY_STATIC (cutter_debug);
+#define GST_CAT_DEFAULT cutter_debug
+
+#define CUTTER_DEFAULT_THRESHOLD_LEVEL 0.1
+#define CUTTER_DEFAULT_THRESHOLD_LENGTH (500 * GST_MSECOND)
+#define CUTTER_DEFAULT_PRE_LENGTH (200 * GST_MSECOND)
+
+static GstStaticPadTemplate cutter_src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) { S8," GST_AUDIO_NE (S16) " }, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ], "
+ "layout = (string) interleaved")
+ );
+
+static GstStaticPadTemplate cutter_sink_factory =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) { S8," GST_AUDIO_NE (S16) " }, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ], "
+ "layout = (string) interleaved")
+ );
+
+enum
+{
+ PROP_0,
+ PROP_THRESHOLD,
+ PROP_THRESHOLD_DB,
+ PROP_RUN_LENGTH,
+ PROP_PRE_LENGTH,
+ PROP_LEAKY
+};
+
+#define gst_cutter_parent_class parent_class
+G_DEFINE_TYPE (GstCutter, gst_cutter, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE (cutter, "cutter", GST_RANK_NONE, GST_TYPE_CUTTER);
+
+static GstStateChangeReturn
+gst_cutter_change_state (GstElement * element, GstStateChange transition);
+
+static void gst_cutter_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_cutter_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_cutter_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstFlowReturn gst_cutter_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+
+static void
+gst_cutter_class_init (GstCutterClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+
+ gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
+
+ gobject_class->set_property = gst_cutter_set_property;
+ gobject_class->get_property = gst_cutter_get_property;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_THRESHOLD,
+ g_param_spec_double ("threshold", "Threshold",
+ "Volume threshold before trigger",
+ -G_MAXDOUBLE, G_MAXDOUBLE, 0.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_THRESHOLD_DB,
+ g_param_spec_double ("threshold-dB", "Threshold (dB)",
+ "Volume threshold before trigger (in dB)",
+ -G_MAXDOUBLE, G_MAXDOUBLE, 0.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_RUN_LENGTH,
+ g_param_spec_uint64 ("run-length", "Run length",
+ "Length of drop below threshold before cut_stop (in nanoseconds)",
+ 0, G_MAXUINT64, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_PRE_LENGTH,
+ g_param_spec_uint64 ("pre-length", "Pre-recording buffer length",
+ "Length of pre-recording buffer (in nanoseconds)",
+ 0, G_MAXUINT64, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_LEAKY,
+ g_param_spec_boolean ("leaky", "Leaky",
+ "do we leak buffers when below threshold ?",
+ FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ GST_DEBUG_CATEGORY_INIT (cutter_debug, "cutter", 0, "Audio cutting");
+
+ gst_element_class_add_static_pad_template (element_class,
+ &cutter_src_factory);
+ gst_element_class_add_static_pad_template (element_class,
+ &cutter_sink_factory);
+ gst_element_class_set_static_metadata (element_class, "Audio cutter",
+ "Filter/Editor/Audio", "Audio Cutter to split audio into non-silent bits",
+ "Thomas Vander Stichele <thomas at apestaart dot org>");
+ element_class->change_state = gst_cutter_change_state;
+}
+
+static void
+gst_cutter_init (GstCutter * filter)
+{
+ filter->sinkpad =
+ gst_pad_new_from_static_template (&cutter_sink_factory, "sink");
+ gst_pad_set_chain_function (filter->sinkpad, gst_cutter_chain);
+ gst_pad_set_event_function (filter->sinkpad, gst_cutter_event);
+ gst_pad_use_fixed_caps (filter->sinkpad);
+ gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
+
+ filter->srcpad =
+ gst_pad_new_from_static_template (&cutter_src_factory, "src");
+ gst_pad_use_fixed_caps (filter->srcpad);
+ gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad);
+
+ filter->threshold_level = CUTTER_DEFAULT_THRESHOLD_LEVEL;
+ filter->threshold_length = CUTTER_DEFAULT_THRESHOLD_LENGTH;
+ filter->silent_run_length = 0 * GST_SECOND;
+ filter->silent = TRUE;
+ filter->silent_prev = FALSE; /* previous value of silent */
+
+ filter->pre_length = CUTTER_DEFAULT_PRE_LENGTH;
+ filter->pre_run_length = 0 * GST_SECOND;
+ filter->pre_buffer = NULL;
+ filter->leaky = FALSE;
+}
+
+static GstMessage *
+gst_cutter_message_new (GstCutter * c, gboolean above, GstClockTime timestamp)
+{
+ GstStructure *s;
+
+ s = gst_structure_new ("cutter",
+ "above", G_TYPE_BOOLEAN, above,
+ "timestamp", GST_TYPE_CLOCK_TIME, timestamp, NULL);
+
+ return gst_message_new_element (GST_OBJECT (c), s);
+}
+
+/* Calculate the Normalized Cumulative Square over a buffer of the given type
+ * and over all channels combined */
+
+#define DEFINE_CUTTER_CALCULATOR(TYPE, RESOLUTION) \
+static void inline \
+gst_cutter_calculate_##TYPE (TYPE * in, guint num, \
+ double *NCS) \
+{ \
+ register int j; \
+ double squaresum = 0.0; /* square sum of the integer samples */ \
+ register double square = 0.0; /* Square */ \
+ gdouble normalizer; /* divisor to get a [-1.0, 1.0] range */ \
+ \
+ *NCS = 0.0; /* Normalized Cumulative Square */ \
+ \
+ normalizer = (double) (1 << (RESOLUTION * 2)); \
+ \
+ for (j = 0; j < num; j++) \
+ { \
+ square = ((double) in[j]) * in[j]; \
+ squaresum += square; \
+ } \
+ \
+ \
+ *NCS = squaresum / normalizer; \
+}
+
+DEFINE_CUTTER_CALCULATOR (gint16, 15);
+DEFINE_CUTTER_CALCULATOR (gint8, 7);
+
+static gboolean
+gst_cutter_setcaps (GstCutter * filter, GstCaps * caps)
+{
+ GstAudioInfo info;
+
+ if (!gst_audio_info_from_caps (&info, caps))
+ return FALSE;
+
+ filter->info = info;
+
+ return gst_pad_set_caps (filter->srcpad, caps);
+}
+
+static GstStateChangeReturn
+gst_cutter_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstCutter *filter = GST_CUTTER (element);
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ g_list_free_full (filter->pre_buffer, (GDestroyNotify) gst_buffer_unref);
+ filter->pre_buffer = NULL;
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
+
+static gboolean
+gst_cutter_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ gboolean ret;
+ GstCutter *filter;
+
+ filter = GST_CUTTER (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ ret = gst_cutter_setcaps (filter, caps);
+ gst_event_unref (event);
+ break;
+ }
+ default:
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+ return ret;
+}
+
+static GstFlowReturn
+gst_cutter_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstCutter *filter;
+ GstMapInfo map;
+ gint16 *in_data;
+ gint bpf, rate;
+ gsize in_size;
+ guint num_samples;
+ gdouble NCS = 0.0; /* Normalized Cumulative Square of buffer */
+ gdouble RMS = 0.0; /* RMS of signal in buffer */
+ gdouble NMS = 0.0; /* Normalized Mean Square of buffer */
+ GstBuffer *prebuf; /* pointer to a prebuffer element */
+ GstClockTime duration;
+
+ filter = GST_CUTTER (parent);
+
+ if (GST_AUDIO_INFO_FORMAT (&filter->info) == GST_AUDIO_FORMAT_UNKNOWN)
+ goto not_negotiated;
+
+ bpf = GST_AUDIO_INFO_BPF (&filter->info);
+ rate = GST_AUDIO_INFO_RATE (&filter->info);
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ in_data = (gint16 *) map.data;
+ in_size = map.size;
+
+ GST_LOG_OBJECT (filter, "length of prerec buffer: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (filter->pre_run_length));
+
+ /* calculate mean square value on buffer */
+ switch (GST_AUDIO_INFO_FORMAT (&filter->info)) {
+ case GST_AUDIO_FORMAT_S16:
+ num_samples = in_size / 2;
+ gst_cutter_calculate_gint16 (in_data, num_samples, &NCS);
+ NMS = NCS / num_samples;
+ break;
+ case GST_AUDIO_FORMAT_S8:
+ num_samples = in_size;
+ gst_cutter_calculate_gint8 ((gint8 *) in_data, num_samples, &NCS);
+ NMS = NCS / num_samples;
+ break;
+ default:
+ /* this shouldn't happen */
+ g_warning ("no mean square function for format");
+ break;
+ }
+
+ gst_buffer_unmap (buf, &map);
+
+ filter->silent_prev = filter->silent;
+
+ duration = gst_util_uint64_scale (in_size / bpf, GST_SECOND, rate);
+
+ RMS = sqrt (NMS);
+ /* if RMS below threshold, add buffer length to silent run length count
+ * if not, reset
+ */
+ GST_LOG_OBJECT (filter, "buffer stats: NMS %f, RMS %f, audio length %f", NMS,
+ RMS, gst_guint64_to_gdouble (duration));
+
+ if (RMS < filter->threshold_level)
+ filter->silent_run_length += gst_guint64_to_gdouble (duration);
+ else {
+ filter->silent_run_length = 0 * GST_SECOND;
+ filter->silent = FALSE;
+ }
+
+ if (filter->silent_run_length > filter->threshold_length)
+ /* it has been silent long enough, flag it */
+ filter->silent = TRUE;
+
+ /* has the silent status changed ? if so, send right signal
+ * and, if from silent -> not silent, flush pre_record buffer
+ */
+ if (filter->silent != filter->silent_prev) {
+ if (filter->silent) {
+ GstMessage *m =
+ gst_cutter_message_new (filter, FALSE, GST_BUFFER_TIMESTAMP (buf));
+ GST_DEBUG_OBJECT (filter, "signaling CUT_STOP");
+ gst_element_post_message (GST_ELEMENT (filter), m);
+ } else {
+ gint count = 0;
+ GstMessage *m =
+ gst_cutter_message_new (filter, TRUE, GST_BUFFER_TIMESTAMP (buf));
+
+ GST_DEBUG_OBJECT (filter, "signaling CUT_START");
+ gst_element_post_message (GST_ELEMENT (filter), m);
+ /* first of all, flush current buffer */
+ GST_DEBUG_OBJECT (filter, "flushing buffer of length %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (filter->pre_run_length));
+
+ while (filter->pre_buffer) {
+ prebuf = (g_list_first (filter->pre_buffer))->data;
+ filter->pre_buffer = g_list_remove (filter->pre_buffer, prebuf);
+ gst_pad_push (filter->srcpad, prebuf);
+ ++count;
+ }
+ GST_DEBUG_OBJECT (filter, "flushed %d buffers", count);
+ filter->pre_run_length = 0 * GST_SECOND;
+ }
+ }
+ /* now check if we have to send the new buffer to the internal buffer cache
+ * or to the srcpad */
+ if (filter->silent) {
+ filter->pre_buffer = g_list_append (filter->pre_buffer, buf);
+ filter->pre_run_length += gst_guint64_to_gdouble (duration);
+
+ while (filter->pre_run_length > filter->pre_length) {
+ GstClockTime pduration;
+ gsize psize;
+
+ prebuf = (g_list_first (filter->pre_buffer))->data;
+ g_assert (GST_IS_BUFFER (prebuf));
+
+ psize = gst_buffer_get_size (prebuf);
+ pduration = gst_util_uint64_scale (psize / bpf, GST_SECOND, rate);
+
+ filter->pre_buffer = g_list_remove (filter->pre_buffer, prebuf);
+ filter->pre_run_length -= gst_guint64_to_gdouble (pduration);
+
+ /* only pass buffers if we don't leak */
+ if (!filter->leaky)
+ ret = gst_pad_push (filter->srcpad, prebuf);
+ else
+ gst_buffer_unref (prebuf);
+ }
+ } else
+ ret = gst_pad_push (filter->srcpad, buf);
+
+ return ret;
+
+ /* ERRORS */
+not_negotiated:
+ {
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+}
+
+static void
+gst_cutter_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstCutter *filter;
+
+ g_return_if_fail (GST_IS_CUTTER (object));
+ filter = GST_CUTTER (object);
+
+ switch (prop_id) {
+ case PROP_THRESHOLD:
+ filter->threshold_level = g_value_get_double (value);
+ GST_DEBUG ("DEBUG: set threshold level to %f", filter->threshold_level);
+ break;
+ case PROP_THRESHOLD_DB:
+ /* set the level given in dB
+ * value in dB = 20 * log (value)
+ * values in dB < 0 result in values between 0 and 1
+ */
+ filter->threshold_level = pow (10, g_value_get_double (value) / 20);
+ GST_DEBUG_OBJECT (filter, "set threshold level to %f",
+ filter->threshold_level);
+ break;
+ case PROP_RUN_LENGTH:
+ /* set the minimum length of the silent run required */
+ filter->threshold_length =
+ gst_guint64_to_gdouble (g_value_get_uint64 (value));
+ break;
+ case PROP_PRE_LENGTH:
+ /* set the length of the pre-record block */
+ filter->pre_length = gst_guint64_to_gdouble (g_value_get_uint64 (value));
+ break;
+ case PROP_LEAKY:
+ /* set if the pre-record buffer is leaky or not */
+ filter->leaky = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_cutter_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstCutter *filter;
+
+ g_return_if_fail (GST_IS_CUTTER (object));
+ filter = GST_CUTTER (object);
+
+ switch (prop_id) {
+ case PROP_RUN_LENGTH:
+ g_value_set_uint64 (value, filter->threshold_length);
+ break;
+ case PROP_THRESHOLD:
+ g_value_set_double (value, filter->threshold_level);
+ break;
+ case PROP_THRESHOLD_DB:
+ g_value_set_double (value, 20 * log (filter->threshold_level));
+ break;
+ case PROP_PRE_LENGTH:
+ g_value_set_uint64 (value, filter->pre_length);
+ break;
+ case PROP_LEAKY:
+ g_value_set_boolean (value, filter->leaky);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ return GST_ELEMENT_REGISTER (cutter, plugin);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ cutter,
+ "Audio Cutter to split audio into non-silent bits",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
diff --git a/gst/cutter/gstcutter.h b/gst/cutter/gstcutter.h
new file mode 100644
index 0000000000..31c135befa
--- /dev/null
+++ b/gst/cutter/gstcutter.h
@@ -0,0 +1,85 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_CUTTER_H__
+#define __GST_CUTTER_H__
+
+
+#include <gst/gst.h>
+/* #include <gst/meta/audioraw.h> */
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif /* __cplusplus */
+
+
+#define GST_TYPE_CUTTER \
+ (gst_cutter_get_type())
+#define GST_CUTTER(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_CUTTER,GstCutter))
+#define GST_CUTTER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_CUTTER,GstCutterClass))
+#define GST_IS_CUTTER(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_CUTTER))
+#define GST_IS_CUTTER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_CUTTER))
+
+typedef struct _GstCutter GstCutter;
+typedef struct _GstCutterClass GstCutterClass;
+
+struct _GstCutter
+{
+ GstElement element;
+
+ GstPad *sinkpad, *srcpad;
+
+ double threshold_level; /* level below which to cut */
+ double threshold_length; /* how long signal has to remain
+ * below this level before cutting */
+ double silent_run_length; /* how long has it been below threshold ? */
+ gboolean silent;
+ gboolean silent_prev;
+
+ double pre_length; /* how long can the pre-record buffer be ? */
+ double pre_run_length; /* how long is it currently ? */
+ GList *pre_buffer; /* list of GstBuffers in pre-record buffer */
+ gboolean leaky; /* do we leak an overflowing prebuffer ? */
+
+ GstAudioInfo info;
+};
+
+struct _GstCutterClass
+{
+ GstElementClass parent_class;
+ void (*cut_start) (GstCutter* filter);
+ void (*cut_stop) (GstCutter* filter);
+};
+
+GType gst_cutter_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (cutter);
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+
+
+#endif /* __GST_STEREO_H__ */
diff --git a/gst/cutter/meson.build b/gst/cutter/meson.build
new file mode 100644
index 0000000000..2a493c3c8e
--- /dev/null
+++ b/gst/cutter/meson.build
@@ -0,0 +1,9 @@
+gstcutter = library('gstcutter', 'gstcutter.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gstbase_dep, gstaudio_dep, libm],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstcutter, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstcutter]
diff --git a/gst/debugutils/breakmydata.c b/gst/debugutils/breakmydata.c
new file mode 100644
index 0000000000..558d25f85c
--- /dev/null
+++ b/gst/debugutils/breakmydata.c
@@ -0,0 +1,297 @@
+/* GStreamer
+ * Copyright (C) 2004 Benjamin Otte <otte@gnome.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-breakmydata
+ * @title: breakmydata
+ *
+ * This element modifies the contents of the buffer it is passed randomly
+ * according to the parameters set.
+ * It otherwise acts as an identity.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+
+#include "gstdebugutilselements.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_break_my_data_debug);
+#define GST_CAT_DEFAULT gst_break_my_data_debug
+
+#define GST_TYPE_BREAK_MY_DATA \
+ (gst_break_my_data_get_type())
+#define GST_BREAK_MY_DATA(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_BREAK_MY_DATA,GstBreakMyData))
+#define GST_BREAK_MY_DATA_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_BREAK_MY_DATA,GstBreakMyDataClass))
+#define GST_IS_BREAK_MY_DATA(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_BREAK_MY_DATA))
+#define GST_IS_BREAK_MY_DATA_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_BREAK_MY_DATA))
+
+GType gst_break_my_data_get_type (void);
+
+enum
+{
+ PROP_0,
+ PROP_SEED,
+ PROP_SET_TO,
+ PROP_SKIP,
+ PROP_PROBABILITY
+};
+
+typedef struct _GstBreakMyData GstBreakMyData;
+typedef struct _GstBreakMyDataClass GstBreakMyDataClass;
+
+struct _GstBreakMyData
+{
+ GstBaseTransform basetransform;
+
+ GRand *rand;
+ guint skipped;
+
+ guint32 seed;
+ gint set;
+ guint skip;
+ gdouble probability;
+};
+
+struct _GstBreakMyDataClass
+{
+ GstBaseTransformClass parent_class;
+};
+
+static void gst_break_my_data_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_break_my_data_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+static GstFlowReturn gst_break_my_data_transform_ip (GstBaseTransform * trans,
+ GstBuffer * buf);
+static gboolean gst_break_my_data_stop (GstBaseTransform * trans);
+static gboolean gst_break_my_data_start (GstBaseTransform * trans);
+
+GstStaticPadTemplate bmd_src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+GstStaticPadTemplate bmd_sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+#define gst_break_my_data_parent_class parent_class
+G_DEFINE_TYPE (GstBreakMyData, gst_break_my_data, GST_TYPE_BASE_TRANSFORM);
+GST_ELEMENT_REGISTER_DEFINE (breakmydata, "breakmydata",
+ GST_RANK_NONE, gst_break_my_data_get_type ());
+
+static void
+gst_break_my_data_class_init (GstBreakMyDataClass * klass)
+{
+ GstBaseTransformClass *gstbasetrans_class;
+ GstElementClass *gstelement_class;
+ GObjectClass *gobject_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ gstelement_class = GST_ELEMENT_CLASS (klass);
+ gstbasetrans_class = GST_BASE_TRANSFORM_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (gst_break_my_data_debug, "breakmydata", 0,
+ "debugging category for breakmydata element");
+
+ gobject_class->set_property = gst_break_my_data_set_property;
+ gobject_class->get_property = gst_break_my_data_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_SEED,
+ g_param_spec_uint ("seed", "seed",
+ "seed for randomness (initialized when going from READY to PAUSED)",
+ 0, G_MAXUINT32, 0,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_SET_TO,
+ g_param_spec_int ("set-to", "set-to",
+ "set changed bytes to this value (-1 means random value",
+ -1, G_MAXUINT8, -1,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_SKIP,
+ g_param_spec_uint ("skip", "skip",
+ "amount of bytes skipped at the beginning of stream",
+ 0, G_MAXUINT, 0,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_PROBABILITY,
+ g_param_spec_double ("probability", "probability",
+ "probability for each byte in the buffer to be changed", 0.0, 1.0,
+ 0.0, G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &bmd_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &bmd_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "Break my data",
+ "Testing",
+ "randomly change data in the stream", "Benjamin Otte <otte@gnome>");
+
+ gstbasetrans_class->transform_ip =
+ GST_DEBUG_FUNCPTR (gst_break_my_data_transform_ip);
+ gstbasetrans_class->start = GST_DEBUG_FUNCPTR (gst_break_my_data_start);
+ gstbasetrans_class->stop = GST_DEBUG_FUNCPTR (gst_break_my_data_stop);
+}
+
+static void
+gst_break_my_data_init (GstBreakMyData * bmd)
+{
+ gst_base_transform_set_in_place (GST_BASE_TRANSFORM (bmd), TRUE);
+}
+
+static void
+gst_break_my_data_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstBreakMyData *bmd = GST_BREAK_MY_DATA (object);
+
+ GST_OBJECT_LOCK (bmd);
+
+ switch (prop_id) {
+ case PROP_SEED:
+ bmd->seed = g_value_get_uint (value);
+ break;
+ case PROP_SET_TO:
+ bmd->set = g_value_get_int (value);
+ break;
+ case PROP_SKIP:
+ bmd->skip = g_value_get_uint (value);
+ break;
+ case PROP_PROBABILITY:
+ bmd->probability = g_value_get_double (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+
+ GST_OBJECT_UNLOCK (bmd);
+}
+
+static void
+gst_break_my_data_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstBreakMyData *bmd = GST_BREAK_MY_DATA (object);
+
+ GST_OBJECT_LOCK (bmd);
+
+ switch (prop_id) {
+ case PROP_SEED:
+ g_value_set_uint (value, bmd->seed);
+ break;
+ case PROP_SET_TO:
+ g_value_set_int (value, bmd->set);
+ break;
+ case PROP_SKIP:
+ g_value_set_uint (value, bmd->skip);
+ break;
+ case PROP_PROBABILITY:
+ g_value_set_double (value, bmd->probability);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+
+ GST_OBJECT_UNLOCK (bmd);
+}
+
+static GstFlowReturn
+gst_break_my_data_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
+{
+ GstBreakMyData *bmd = GST_BREAK_MY_DATA (trans);
+ GstMapInfo map;
+ gsize i;
+
+ g_return_val_if_fail (gst_buffer_is_writable (buf), GST_FLOW_ERROR);
+
+ GST_OBJECT_LOCK (bmd);
+
+ if (bmd->skipped < bmd->skip) {
+ i = bmd->skip - bmd->skipped;
+ } else {
+ i = 0;
+ }
+
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+
+ GST_LOG_OBJECT (bmd,
+ "got buffer %p (size %" G_GSIZE_FORMAT ", timestamp %" G_GUINT64_FORMAT
+ ", offset %" G_GUINT64_FORMAT "", buf, map.size,
+ GST_BUFFER_TIMESTAMP (buf), GST_BUFFER_OFFSET (buf));
+
+ for (; i < map.size; i++) {
+ if (g_rand_double_range (bmd->rand, 0, 1.0) <= bmd->probability) {
+ guint8 new;
+
+ if (bmd->set < 0) {
+ new = g_rand_int_range (bmd->rand, 0, 256);
+ } else {
+ new = bmd->set;
+ }
+ GST_INFO_OBJECT (bmd,
+ "changing byte %" G_GSIZE_FORMAT " from 0x%02X to 0x%02X", i,
+ (guint) GST_READ_UINT8 (map.data + i), (guint) ((guint8) new));
+ map.data[i] = new;
+ }
+ }
+ /* don't overflow */
+ bmd->skipped += MIN (G_MAXUINT - bmd->skipped, map.size);
+
+ gst_buffer_unmap (buf, &map);
+
+ GST_OBJECT_UNLOCK (bmd);
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_break_my_data_start (GstBaseTransform * trans)
+{
+ GstBreakMyData *bmd = GST_BREAK_MY_DATA (trans);
+
+ GST_OBJECT_LOCK (bmd);
+ bmd->rand = g_rand_new_with_seed (bmd->seed);
+ bmd->skipped = 0;
+ GST_OBJECT_UNLOCK (bmd);
+
+ return TRUE;
+}
+
+static gboolean
+gst_break_my_data_stop (GstBaseTransform * trans)
+{
+ GstBreakMyData *bmd = GST_BREAK_MY_DATA (trans);
+
+ GST_OBJECT_LOCK (bmd);
+ g_rand_free (bmd->rand);
+ bmd->rand = NULL;
+ GST_OBJECT_UNLOCK (bmd);
+
+ return TRUE;
+}
diff --git a/gst/debugutils/cpureport.c b/gst/debugutils/cpureport.c
new file mode 100644
index 0000000000..058fc7c5e6
--- /dev/null
+++ b/gst/debugutils/cpureport.c
@@ -0,0 +1,149 @@
+/* GStreamer Cpu Report Element
+ * Copyright (C) <2010> Zaheer Abbas Merali <zaheerabbas merali org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <string.h>
+#include <math.h>
+#include <time.h>
+
+#include "gstdebugutilselements.h"
+#include "cpureport.h"
+
+
+enum
+{
+ PROP_0,
+};
+
+GstStaticPadTemplate cpu_report_src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+GstStaticPadTemplate cpu_report_sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+static GstFlowReturn gst_cpu_report_transform_ip (GstBaseTransform * trans,
+ GstBuffer * buf);
+
+static gboolean gst_cpu_report_start (GstBaseTransform * trans);
+static gboolean gst_cpu_report_stop (GstBaseTransform * trans);
+
+#define gst_cpu_report_parent_class parent_class
+G_DEFINE_TYPE (GstCpuReport, gst_cpu_report, GST_TYPE_BASE_TRANSFORM);
+GST_ELEMENT_REGISTER_DEFINE (cpureport, "cpureport",
+ GST_RANK_NONE, gst_cpu_report_get_type ());
+
+static void
+gst_cpu_report_finalize (GObject * obj)
+{
+ G_OBJECT_CLASS (parent_class)->finalize (obj);
+}
+
+static void
+gst_cpu_report_class_init (GstCpuReportClass * g_class)
+{
+ GstBaseTransformClass *gstbasetrans_class;
+ GstElementClass *element_class;
+ GObjectClass *gobject_class;
+
+ gobject_class = G_OBJECT_CLASS (g_class);
+ element_class = GST_ELEMENT_CLASS (g_class);
+ gstbasetrans_class = GST_BASE_TRANSFORM_CLASS (g_class);
+
+ gobject_class->finalize = gst_cpu_report_finalize;
+
+ gst_element_class_add_static_pad_template (element_class,
+ &cpu_report_sink_template);
+ gst_element_class_add_static_pad_template (element_class,
+ &cpu_report_src_template);
+
+ gst_element_class_set_static_metadata (element_class, "CPU report",
+ "Testing",
+ "Post cpu usage information every buffer",
+ "Zaheer Abbas Merali <zaheerabbas at merali dot org>");
+
+ gstbasetrans_class->transform_ip =
+ GST_DEBUG_FUNCPTR (gst_cpu_report_transform_ip);
+ gstbasetrans_class->start = GST_DEBUG_FUNCPTR (gst_cpu_report_start);
+ gstbasetrans_class->stop = GST_DEBUG_FUNCPTR (gst_cpu_report_stop);
+}
+
+static void
+gst_cpu_report_init (GstCpuReport * report)
+{
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (report), TRUE);
+
+}
+
+static GstFlowReturn
+gst_cpu_report_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
+{
+ GstCpuReport *filter;
+ GstClockTime cur_time;
+ clock_t cur_cpu_time;
+ GstMessage *msg;
+ GstStructure *s;
+ GstClockTimeDiff time_taken;
+
+ cur_time = g_get_real_time () * GST_USECOND;
+ cur_cpu_time = clock ();
+
+ filter = GST_CPU_REPORT (trans);
+
+
+ time_taken = cur_time - filter->last_time;
+
+ s = gst_structure_new ("cpu-report", "cpu-time", G_TYPE_DOUBLE,
+ ((gdouble) (cur_cpu_time - filter->last_cpu_time)),
+ "actual-time", G_TYPE_INT64, time_taken, "buffer-time", G_TYPE_INT64,
+ GST_BUFFER_TIMESTAMP (buf), NULL);
+ msg = gst_message_new_element (GST_OBJECT_CAST (filter), s);
+ gst_element_post_message (GST_ELEMENT_CAST (filter), msg);
+ filter->last_time = cur_time;
+ filter->last_cpu_time = cur_cpu_time;
+
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_cpu_report_start (GstBaseTransform * trans)
+{
+ GstCpuReport *filter;
+
+ filter = GST_CPU_REPORT (trans);
+
+ filter->start_time = filter->last_time = g_get_real_time () * GST_USECOND;
+ filter->last_cpu_time = clock ();
+ return TRUE;
+}
+
+static gboolean
+gst_cpu_report_stop (GstBaseTransform * trans)
+{
+ /* anything we should be doing here? */
+ return TRUE;
+}
diff --git a/gst/debugutils/cpureport.h b/gst/debugutils/cpureport.h
new file mode 100644
index 0000000000..1735e733cc
--- /dev/null
+++ b/gst/debugutils/cpureport.h
@@ -0,0 +1,58 @@
+/* GStreamer CPU Report Element
+ * Copyright (C) <2010> Zaheer Abbas Merali <zaheerabbas merali org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_CPU_REPORT_H__
+#define __GST_CPU_REPORT_H__
+
+#include <time.h>
+
+#include <gst/base/gstbasetransform.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_CPU_REPORT \
+ (gst_cpu_report_get_type())
+#define GST_CPU_REPORT(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_CPU_REPORT,GstCpuReport))
+#define GST_CPU_REPORT_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_CPU_REPORT,GstCpuReportClass))
+#define GST_IS_CPU_REPORT(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_CPU_REPORT))
+#define GST_IS_CPU_REPORT_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_CPU_REPORT))
+typedef struct _GstCpuReport GstCpuReport;
+typedef struct _GstCpuReportClass GstCpuReportClass;
+
+struct _GstCpuReport
+{
+ GstBaseTransform basetransform;
+
+ GstClockTime start_time;
+ GstClockTime last_time;
+ clock_t last_cpu_time;
+};
+
+struct _GstCpuReportClass
+{
+ GstBaseTransformClass parent_class;
+};
+
+GType gst_cpu_report_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_CPU_REPORT_H__ */
diff --git a/gst/debugutils/gstcapsdebug.c b/gst/debugutils/gstcapsdebug.c
new file mode 100644
index 0000000000..84d75caf6a
--- /dev/null
+++ b/gst/debugutils/gstcapsdebug.c
@@ -0,0 +1,261 @@
+/* GStreamer
+ * Copyright (C) 2010 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/gst.h>
+#include "gstdebugutilselements.h"
+#include "gstcapsdebug.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_caps_debug_debug);
+#define GST_CAT_DEFAULT gst_caps_debug_debug
+
+/* prototypes */
+
+
+static void gst_caps_debug_dispose (GObject * object);
+static void gst_caps_debug_finalize (GObject * object);
+
+static GstFlowReturn gst_caps_debug_sink_chain (GstPad * pad,
+ GstBuffer * buffer);
+static GstCaps *gst_caps_debug_getcaps (GstPad * pad);
+static gboolean gst_caps_debug_acceptcaps (GstPad * pad, GstCaps * caps);
+static GstFlowReturn gst_caps_debug_bufferalloc (GstPad * pad,
+ guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf);
+
+static GstStateChangeReturn
+gst_caps_debug_change_state (GstElement * element, GstStateChange transition);
+
+/* pad templates */
+
+static GstStaticPadTemplate gst_caps_debug_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+static GstStaticPadTemplate gst_caps_debug_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+/* class initialization */
+
+#define gst_caps_debug_parent_class parent_class
+G_DEFINE_TYPE (GstCapsDebug, gst_caps_debug, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE (capsdebug, "capsdebug",
+ GST_RANK_PRIMARY, gst_caps_debug_get_type ());
+
+static void
+gst_caps_debug_class_init (GstCapsDebugClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ gobject_class->dispose = gst_caps_debug_dispose;
+ gobject_class->finalize = gst_caps_debug_finalize;
+ element_class->change_state = GST_DEBUG_FUNCPTR (gst_caps_debug_change_state);
+
+ GST_DEBUG_CATEGORY_INIT (gst_caps_debug_debug, "capsdebug", 0,
+ "debug category for capsdebug element");
+
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_caps_debug_src_template);
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_caps_debug_sink_template);
+
+ gst_element_class_set_static_metadata (element_class, "Caps debug",
+ "Generic", "Debug caps negotiation", "David Schleef <ds@schleef.org>");
+}
+
+static void
+gst_caps_debug_init (GstCapsDebug * capsdebug)
+{
+
+ capsdebug->srcpad =
+ gst_pad_new_from_static_template (&gst_caps_debug_src_template, "src");
+ gst_pad_set_getcaps_function (capsdebug->srcpad,
+ GST_DEBUG_FUNCPTR (gst_caps_debug_getcaps));
+ gst_pad_set_acceptcaps_function (capsdebug->srcpad,
+ GST_DEBUG_FUNCPTR (gst_caps_debug_acceptcaps));
+ gst_element_add_pad (GST_ELEMENT (capsdebug), capsdebug->srcpad);
+
+ capsdebug->sinkpad =
+ gst_pad_new_from_static_template (&gst_caps_debug_sink_template, "sink");
+ gst_pad_set_chain_function (capsdebug->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_caps_debug_sink_chain));
+ gst_pad_set_bufferalloc_function (capsdebug->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_caps_debug_bufferalloc));
+ gst_pad_set_getcaps_function (capsdebug->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_caps_debug_getcaps));
+ gst_pad_set_acceptcaps_function (capsdebug->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_caps_debug_acceptcaps));
+ gst_element_add_pad (GST_ELEMENT (capsdebug), capsdebug->sinkpad);
+
+}
+
+void
+gst_caps_debug_dispose (GObject * object)
+{
+ /* clean up as possible. may be called multiple times */
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+void
+gst_caps_debug_finalize (GObject * object)
+{
+ /* clean up object here */
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+
+
+static GstStateChangeReturn
+gst_caps_debug_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ return ret;
+}
+
+
+static GstFlowReturn
+gst_caps_debug_sink_chain (GstPad * pad, GstBuffer * buffer)
+{
+ GstFlowReturn ret;
+ GstCapsDebug *capsdebug;
+
+ capsdebug = GST_CAPS_DEBUG (gst_pad_get_parent (pad));
+
+ ret = gst_pad_push (capsdebug->srcpad, buffer);
+
+ gst_object_unref (capsdebug);
+
+ return ret;
+}
+
+#define THISPAD ((pad == capsdebug->srcpad) ? "downstream" : "upstream")
+#define OTHERPAD ((pad == capsdebug->srcpad) ? "upstream" : "downstream")
+
+static GstCaps *
+gst_caps_debug_getcaps (GstPad * pad)
+{
+ GstCaps *caps;
+ GstCapsDebug *capsdebug;
+ gchar *s;
+ GstPad *otherpad;
+
+ capsdebug = GST_CAPS_DEBUG (gst_pad_get_parent (pad));
+ otherpad =
+ (pad == capsdebug->srcpad) ? capsdebug->sinkpad : capsdebug->srcpad;
+
+ GST_INFO ("%s called getcaps", THISPAD);
+
+ caps = gst_pad_peer_get_caps (otherpad);
+
+ s = gst_caps_to_string (caps);
+ GST_INFO ("%s returned %s", OTHERPAD, s);
+ g_free (s);
+
+ if (caps == NULL)
+ caps = gst_caps_new_any ();
+
+ gst_object_unref (capsdebug);
+
+ return caps;
+}
+
+
+static gboolean
+gst_caps_debug_acceptcaps (GstPad * pad, GstCaps * caps)
+{
+ GstCapsDebug *capsdebug;
+ gchar *s;
+ gboolean ret;
+ GstPad *otherpad;
+
+ capsdebug = GST_CAPS_DEBUG (gst_pad_get_parent (pad));
+ otherpad =
+ (pad == capsdebug->srcpad) ? capsdebug->sinkpad : capsdebug->srcpad;
+
+ s = gst_caps_to_string (caps);
+ GST_INFO ("%s called acceptcaps with %s", THISPAD, s);
+ g_free (s);
+
+ ret = gst_pad_peer_accept_caps (otherpad, caps);
+
+ GST_INFO ("%s returned %s", OTHERPAD, ret ? "TRUE" : "FALSE");
+
+ gst_object_unref (capsdebug);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_caps_debug_bufferalloc (GstPad * pad, guint64 offset, guint size,
+ GstCaps * caps, GstBuffer ** buf)
+{
+ GstCapsDebug *capsdebug;
+ gchar *s;
+ gchar *t;
+ GstFlowReturn ret;
+ GstPad *otherpad;
+ gboolean newcaps;
+
+ capsdebug = GST_CAPS_DEBUG (gst_pad_get_parent (pad));
+ otherpad =
+ (pad == capsdebug->srcpad) ? capsdebug->sinkpad : capsdebug->srcpad;
+
+ newcaps = (caps != GST_PAD_CAPS (pad));
+
+ if (newcaps) {
+ s = gst_caps_to_string (caps);
+ GST_INFO ("%s called bufferalloc with new caps, offset=%" G_GUINT64_FORMAT
+ " size=%d caps=%s", THISPAD, offset, size, s);
+ g_free (s);
+ }
+
+ ret = gst_pad_alloc_buffer_and_set_caps (otherpad, offset, size, caps, buf);
+
+ if (newcaps) {
+ GST_INFO ("%s returned %s", OTHERPAD, gst_flow_get_name (ret));
+ }
+ if (caps != GST_BUFFER_CAPS (*buf)) {
+ s = gst_caps_to_string (caps);
+ t = gst_caps_to_string (GST_BUFFER_CAPS (*buf));
+ GST_INFO
+ ("%s returned from bufferalloc with different caps, requested=%s returned=%s",
+ OTHERPAD, s, t);
+ g_free (s);
+ g_free (t);
+ }
+
+ gst_object_unref (capsdebug);
+
+ return ret;
+}
diff --git a/gst/debugutils/gstcapsdebug.h b/gst/debugutils/gstcapsdebug.h
new file mode 100644
index 0000000000..9d0930dcc6
--- /dev/null
+++ b/gst/debugutils/gstcapsdebug.h
@@ -0,0 +1,55 @@
+/* GStreamer
+ * Copyright (C) 2010 FIXME <fixme@example.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef _GST_CAPS_DEBUG_H_
+#define _GST_CAPS_DEBUG_H_
+
+#include <gst/gst.h>
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_CAPS_DEBUG (gst_caps_debug_get_type())
+#define GST_CAPS_DEBUG(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_CAPS_DEBUG,GstCapsDebug))
+#define GST_CAPS_DEBUG_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_CAPS_DEBUG,GstCapsDebugClass))
+#define GST_IS_CAPS_DEBUG(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_CAPS_DEBUG))
+#define GST_IS_CAPS_DEBUG_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_CAPS_DEBUG))
+
+typedef struct _GstCapsDebug GstCapsDebug;
+typedef struct _GstCapsDebugClass GstCapsDebugClass;
+
+struct _GstCapsDebug
+{
+ GstElement base_capsdebug;
+
+ GstPad *srcpad;
+ GstPad *sinkpad;
+
+};
+
+struct _GstCapsDebugClass
+{
+ GstElementClass base_capsdebug_class;
+};
+
+GType gst_caps_debug_get_type (void);
+
+G_END_DECLS
+
+#endif
diff --git a/gst/debugutils/gstcapssetter.c b/gst/debugutils/gstcapssetter.c
new file mode 100644
index 0000000000..f6dad048d8
--- /dev/null
+++ b/gst/debugutils/gstcapssetter.c
@@ -0,0 +1,335 @@
+/* GStreamer Element
+ * Copyright (C) 2006-2009 Mark Nauwelaerts <mnauw@users.sourceforge.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1307, USA.
+ */
+
+/**
+ * SECTION:element-capssetter
+ * @title: capssetter
+ *
+ * Sets or merges caps on a stream's buffers. That is, a buffer's caps are
+ * updated using (fields of) #GstCapsSetter:caps. Note that this may contain
+ * multiple structures (though not likely recommended), but each of these must
+ * be fixed (or will otherwise be rejected).
+ *
+ * If #GstCapsSetter:join is %TRUE, then the incoming caps' mime-type is
+ * compared to the mime-type(s) of provided caps and only matching structure(s)
+ * are considered for updating.
+ *
+ * If #GstCapsSetter:replace is %TRUE, then any caps update is preceded by
+ * clearing existing fields, making provided fields (as a whole) replace
+ * incoming ones. Otherwise, no clearing is performed, in which case provided
+ * fields are added/merged onto incoming caps
+ *
+ * Although this element might mainly serve as debug helper,
+ * it can also practically be used to correct a faulty pixel-aspect-ratio,
+ * or to modify a yuv fourcc value to effectively swap chroma components or such
+ * alike.
+ */
+
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstdebugutilselements.h"
+#include "gstcapssetter.h"
+
+#include <string.h>
+
+
+GST_DEBUG_CATEGORY_STATIC (caps_setter_debug);
+#define GST_CAT_DEFAULT caps_setter_debug
+
+
+/* signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_CAPS,
+ PROP_JOIN,
+ PROP_REPLACE
+ /* FILL ME */
+};
+
+#define DEFAULT_JOIN TRUE
+#define DEFAULT_REPLACE FALSE
+
+static GstStaticPadTemplate gst_caps_setter_src_template =
+GST_STATIC_PAD_TEMPLATE (GST_BASE_TRANSFORM_SRC_NAME,
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+static GstStaticPadTemplate gst_caps_setter_sink_template =
+GST_STATIC_PAD_TEMPLATE (GST_BASE_TRANSFORM_SINK_NAME,
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+
+static gboolean gst_caps_setter_transform_size (GstBaseTransform * trans,
+ GstPadDirection direction, GstCaps * caps, gsize size,
+ GstCaps * othercaps, gsize * othersize);
+static GstCaps *gst_caps_setter_transform_caps (GstBaseTransform * trans,
+ GstPadDirection direction, GstCaps * caps, GstCaps * cfilter);
+static GstFlowReturn gst_caps_setter_transform_ip (GstBaseTransform * btrans,
+ GstBuffer * in);
+
+static void gst_caps_setter_finalize (GObject * object);
+
+static void gst_caps_setter_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_caps_setter_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+#define gst_caps_setter_parent_class parent_class
+G_DEFINE_TYPE (GstCapsSetter, gst_caps_setter, GST_TYPE_BASE_TRANSFORM);
+GST_ELEMENT_REGISTER_DEFINE (capssetter, "capssetter",
+ GST_RANK_NONE, gst_caps_setter_get_type ());
+
+static void
+gst_caps_setter_class_init (GstCapsSetterClass * g_class)
+{
+ GObjectClass *gobject_class = (GObjectClass *) g_class;
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) g_class;
+
+ GST_DEBUG_CATEGORY_INIT (caps_setter_debug, "capssetter", 0, "capssetter");
+
+ gobject_class->set_property = gst_caps_setter_set_property;
+ gobject_class->get_property = gst_caps_setter_get_property;
+
+ gobject_class->finalize = gst_caps_setter_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_CAPS,
+ g_param_spec_boxed ("caps", "Merge caps",
+ "Merge these caps (thereby overwriting) in the stream",
+ GST_TYPE_CAPS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_JOIN,
+ g_param_spec_boolean ("join", "Join",
+ "Match incoming caps' mime-type to mime-type of provided caps",
+ DEFAULT_JOIN, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_REPLACE,
+ g_param_spec_boolean ("replace", "Replace",
+ "Drop fields of incoming caps", DEFAULT_REPLACE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (element_class, "CapsSetter",
+ "Generic",
+ "Set/merge caps on stream",
+ "Mark Nauwelaerts <mnauw@users.sourceforge.net>");
+
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_caps_setter_sink_template);
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_caps_setter_src_template);
+
+ trans_class->transform_size =
+ GST_DEBUG_FUNCPTR (gst_caps_setter_transform_size);
+ trans_class->transform_caps =
+ GST_DEBUG_FUNCPTR (gst_caps_setter_transform_caps);
+ /* dummy seems needed */
+ trans_class->transform_ip = GST_DEBUG_FUNCPTR (gst_caps_setter_transform_ip);
+}
+
+static void
+gst_caps_setter_init (GstCapsSetter * filter)
+{
+ filter->caps = gst_caps_new_any ();
+ filter->join = DEFAULT_JOIN;
+ filter->replace = DEFAULT_REPLACE;
+}
+
+static void
+gst_caps_setter_finalize (GObject * object)
+{
+ GstCapsSetter *filter = GST_CAPS_SETTER (object);
+
+ gst_caps_replace (&filter->caps, NULL);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_caps_setter_transform_size (GstBaseTransform * trans,
+ GstPadDirection direction, GstCaps * caps, gsize size,
+ GstCaps * othercaps, gsize * othersize)
+{
+ *othersize = size;
+
+ return TRUE;
+}
+
+static GstCaps *
+gst_caps_setter_transform_caps (GstBaseTransform * trans,
+ GstPadDirection direction, GstCaps * caps, GstCaps * cfilter)
+{
+ GstCapsSetter *filter = GST_CAPS_SETTER (trans);
+ GstCaps *ret = NULL, *filter_caps = NULL;
+ GstStructure *structure, *merge;
+ const gchar *name;
+ gint i, j, k;
+
+ GST_DEBUG_OBJECT (trans,
+ "receiving caps: %" GST_PTR_FORMAT ", with filter: %" GST_PTR_FORMAT,
+ caps, cfilter);
+
+ /* pass filter caps upstream, or any if no filter */
+ if (direction != GST_PAD_SINK) {
+ if (!cfilter || gst_caps_is_empty (cfilter)) {
+ return gst_caps_ref (GST_CAPS_ANY);
+ } else {
+ return gst_caps_ref (cfilter);
+ }
+ }
+
+ ret = gst_caps_copy (caps);
+
+ GST_OBJECT_LOCK (filter);
+ filter_caps = gst_caps_ref (filter->caps);
+ GST_OBJECT_UNLOCK (filter);
+
+ for (k = 0; k < gst_caps_get_size (ret); k++) {
+ structure = gst_caps_get_structure (ret, k);
+ name = gst_structure_get_name (structure);
+
+ for (i = 0; i < gst_caps_get_size (filter_caps); ++i) {
+ merge = gst_caps_get_structure (filter_caps, i);
+ if (gst_structure_has_name (merge, name) || !filter->join) {
+
+ if (!filter->join)
+ gst_structure_set_name (structure, gst_structure_get_name (merge));
+
+ if (filter->replace)
+ gst_structure_remove_all_fields (structure);
+
+ for (j = 0; j < gst_structure_n_fields (merge); ++j) {
+ const gchar *fname;
+
+ fname = gst_structure_nth_field_name (merge, j);
+ gst_structure_set_value (structure, fname,
+ gst_structure_get_value (merge, fname));
+ }
+ }
+ }
+ }
+
+ GST_DEBUG_OBJECT (trans, "returning caps: %" GST_PTR_FORMAT, ret);
+
+ gst_caps_unref (filter_caps);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_caps_setter_transform_ip (GstBaseTransform * btrans, GstBuffer * in)
+{
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_caps_is_fixed_foreach (GQuark field_id, const GValue * value,
+ gpointer unused)
+{
+ return gst_value_is_fixed (value);
+}
+
+static void
+gst_caps_setter_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstCapsSetter *filter = GST_CAPS_SETTER (object);
+
+ switch (prop_id) {
+ case PROP_CAPS:{
+ GstCaps *new_caps;
+ const GstCaps *new_caps_val = gst_value_get_caps (value);
+ gint i;
+
+ if (new_caps_val == NULL) {
+ new_caps = gst_caps_new_any ();
+ } else {
+ new_caps = gst_caps_copy (new_caps_val);
+ }
+
+ for (i = 0; new_caps && (i < gst_caps_get_size (new_caps)); ++i) {
+ GstStructure *s;
+
+ s = gst_caps_get_structure (new_caps, i);
+ if (!gst_structure_foreach (s, gst_caps_is_fixed_foreach, NULL)) {
+ GST_ERROR_OBJECT (filter, "rejected unfixed caps: %" GST_PTR_FORMAT,
+ new_caps);
+ gst_caps_unref (new_caps);
+ new_caps = NULL;
+ break;
+ }
+ }
+
+ if (new_caps) {
+ GST_OBJECT_LOCK (filter);
+ gst_caps_replace (&filter->caps, new_caps);
+ /* drop extra ref */
+ gst_caps_unref (new_caps);
+ GST_OBJECT_UNLOCK (filter);
+
+ GST_DEBUG_OBJECT (filter, "set new caps %" GST_PTR_FORMAT, new_caps);
+ }
+
+ /* try to activate these new caps next time around */
+ gst_base_transform_reconfigure_src (GST_BASE_TRANSFORM (filter));
+ break;
+ }
+ case PROP_JOIN:
+ filter->join = g_value_get_boolean (value);
+ break;
+ case PROP_REPLACE:
+ filter->replace = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_caps_setter_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstCapsSetter *filter = GST_CAPS_SETTER (object);
+
+ switch (prop_id) {
+ case PROP_CAPS:
+ gst_value_set_caps (value, filter->caps);
+ break;
+ case PROP_JOIN:
+ g_value_set_boolean (value, filter->join);
+ break;
+ case PROP_REPLACE:
+ g_value_set_boolean (value, filter->replace);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/debugutils/gstcapssetter.h b/gst/debugutils/gstcapssetter.h
new file mode 100644
index 0000000000..246bf19b5f
--- /dev/null
+++ b/gst/debugutils/gstcapssetter.h
@@ -0,0 +1,63 @@
+/* GStreamer Element
+ * Copyright (C) 2006-2009 Mark Nauwelaerts <mnauw@users.sourceforge.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1307, USA.
+ */
+
+
+#ifndef __GST_CAPS_SETTER_H__
+#define __GST_CAPS_SETTER_H__
+
+#include <gst/base/gstbasetransform.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_CAPS_SETTER \
+ (gst_caps_setter_get_type())
+#define GST_CAPS_SETTER(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_CAPS_SETTER,GstCapsSetter))
+#define GST_CAPS_SETTER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_CAPS_SETTER,GstCapsSetterClass))
+#define GST_IS_CAPS_SETTER(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_CAPS_SETTER))
+#define GST_IS_CAPS_SETTER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_CAPS_SETTER))
+
+GType gst_caps_setter_get_type (void);
+
+typedef struct _GstCapsSetter GstCapsSetter;
+typedef struct _GstCapsSetterClass GstCapsSetterClass;
+
+struct _GstCapsSetter
+{
+ GstBaseTransform parent;
+
+ /* < private > */
+ /* properties */
+ GstCaps *caps;
+ gboolean join;
+ gboolean replace;
+};
+
+
+struct _GstCapsSetterClass
+{
+ GstBaseTransformClass parent_class;
+};
+
+G_END_DECLS
+
+#endif /* __GST_CAPS_SETTER_H__ */
diff --git a/gst/debugutils/gstdebug.c b/gst/debugutils/gstdebug.c
new file mode 100644
index 0000000000..7e2446d508
--- /dev/null
+++ b/gst/debugutils/gstdebug.c
@@ -0,0 +1,55 @@
+/* GStreamer
+ * Copyright (C) 2004 Benjamin Otte <otte@gnome.org>
+ * Copyright (C) 2020 Huawei Technologies Co., Ltd.
+ * @Author: Stéphane Cerveau <stephane.cerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/gst.h>
+
+#include "gstdebugutilselements.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (breakmydata, plugin);
+ ret |= GST_ELEMENT_REGISTER (capssetter, plugin);
+ ret |= GST_ELEMENT_REGISTER (rndbuffersize, plugin);
+ ret |= GST_ELEMENT_REGISTER (navseek, plugin);
+ ret |= GST_ELEMENT_REGISTER (pushfilesrc, plugin);
+ ret |= GST_ELEMENT_REGISTER (progressreport, plugin);
+ ret |= GST_ELEMENT_REGISTER (taginject, plugin);
+ ret |= GST_ELEMENT_REGISTER (testsink, plugin);
+#if 0
+ ret |= GST_ELEMENT_REGISTER (capsdebug, plugin);
+#endif
+ ret |= GST_ELEMENT_REGISTER (cpureport, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ debug,
+ "elements for testing and debugging",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/debugutils/gstdebugutilselements.h b/gst/debugutils/gstdebugutilselements.h
new file mode 100644
index 0000000000..ad6522cc6f
--- /dev/null
+++ b/gst/debugutils/gstdebugutilselements.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2004 Benjamin Otte <otte@gnome.org>
+ * Copyright (C) 2020 Huawei Technologies Co., Ltd.
+ * @Author: Stéphane Cerveau <stephane.cerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_DEBUGUTILS_ELEMENTS_H__
+#define __GST_DEBUGUTILS_ELEMENTS_H__
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+GST_ELEMENT_REGISTER_DECLARE (breakmydata);
+GST_ELEMENT_REGISTER_DECLARE (capssetter);
+GST_ELEMENT_REGISTER_DECLARE (rndbuffersize);
+GST_ELEMENT_REGISTER_DECLARE (navseek);
+GST_ELEMENT_REGISTER_DECLARE (pushfilesrc);
+GST_ELEMENT_REGISTER_DECLARE (progressreport);
+GST_ELEMENT_REGISTER_DECLARE (taginject);
+GST_ELEMENT_REGISTER_DECLARE (testsink);
+#if 0
+GST_ELEMENT_REGISTER_DECLARE (capsdebug);
+#endif
+GST_ELEMENT_REGISTER_DECLARE (cpureport);
+
+G_END_DECLS
+
+#endif /* __GST_DEBUGUTILS_ELEMENTS_H__ */
diff --git a/gst/debugutils/gstnavigationtest.c b/gst/debugutils/gstnavigationtest.c
new file mode 100644
index 0000000000..eb2361fb6e
--- /dev/null
+++ b/gst/debugutils/gstnavigationtest.c
@@ -0,0 +1,278 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstnavigationtest.h"
+#include <string.h>
+#include <math.h>
+
+#include <gst/video/video.h>
+
+#ifdef _MSC_VER
+#define rint(x) (floor((x)+0.5))
+#endif
+
+GST_DEBUG_CATEGORY_STATIC (navigationtest_debug);
+#define GST_CAT_DEFAULT navigationtest_debug
+
+static GstStaticPadTemplate gst_navigationtest_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420"))
+ );
+
+static GstStaticPadTemplate gst_navigationtest_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420"))
+ );
+
+#define gst_navigationtest_parent_class parent_class
+G_DEFINE_TYPE (GstNavigationtest, gst_navigationtest, GST_TYPE_VIDEO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (navigationtest, "navigationtest", GST_RANK_NONE,
+ GST_TYPE_NAVIGATIONTEST);
+
+static gboolean
+gst_navigationtest_src_event (GstBaseTransform * trans, GstEvent * event)
+{
+ GstVideoInfo *info;
+ GstNavigationtest *navtest;
+ const gchar *type;
+
+ navtest = GST_NAVIGATIONTEST (trans);
+
+ info = &GST_VIDEO_FILTER (trans)->in_info;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_NAVIGATION:
+ {
+ const GstStructure *s = gst_event_get_structure (event);
+ gint fps_n, fps_d;
+
+ fps_n = GST_VIDEO_INFO_FPS_N (info);
+ fps_d = GST_VIDEO_INFO_FPS_D (info);
+
+ type = gst_structure_get_string (s, "event");
+ if (g_str_equal (type, "mouse-move")) {
+ gst_structure_get_double (s, "pointer_x", &navtest->x);
+ gst_structure_get_double (s, "pointer_y", &navtest->y);
+ } else if (g_str_equal (type, "mouse-button-press")) {
+ ButtonClick *click = g_new (ButtonClick, 1);
+
+ gst_structure_get_double (s, "pointer_x", &click->x);
+ gst_structure_get_double (s, "pointer_y", &click->y);
+ click->images_left = (fps_n + fps_d - 1) / fps_d;
+ /* green */
+ click->cy = 150;
+ click->cu = 46;
+ click->cv = 21;
+ navtest->clicks = g_slist_prepend (navtest->clicks, click);
+ } else if (g_str_equal (type, "mouse-button-release")) {
+ ButtonClick *click = g_new (ButtonClick, 1);
+
+ gst_structure_get_double (s, "pointer_x", &click->x);
+ gst_structure_get_double (s, "pointer_y", &click->y);
+ click->images_left = (fps_n + fps_d - 1) / fps_d;
+ /* red */
+ click->cy = 76;
+ click->cu = 85;
+ click->cv = 255;
+ navtest->clicks = g_slist_prepend (navtest->clicks, click);
+ }
+ break;
+ }
+ default:
+ break;
+ }
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->src_event (trans, event);
+}
+
+/* Useful macros */
+#define GST_VIDEO_I420_Y_ROWSTRIDE(width) (GST_ROUND_UP_4(width))
+#define GST_VIDEO_I420_U_ROWSTRIDE(width) (GST_ROUND_UP_8(width)/2)
+#define GST_VIDEO_I420_V_ROWSTRIDE(width) ((GST_ROUND_UP_8(GST_VIDEO_I420_Y_ROWSTRIDE(width)))/2)
+
+#define GST_VIDEO_I420_Y_OFFSET(w,h) (0)
+#define GST_VIDEO_I420_U_OFFSET(w,h) (GST_VIDEO_I420_Y_OFFSET(w,h)+(GST_VIDEO_I420_Y_ROWSTRIDE(w)*GST_ROUND_UP_2(h)))
+#define GST_VIDEO_I420_V_OFFSET(w,h) (GST_VIDEO_I420_U_OFFSET(w,h)+(GST_VIDEO_I420_U_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
+
+#define GST_VIDEO_I420_SIZE(w,h) (GST_VIDEO_I420_V_OFFSET(w,h)+(GST_VIDEO_I420_V_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
+
+static void
+draw_box_planar411 (GstVideoFrame * frame, int x, int y,
+ guint8 colory, guint8 coloru, guint8 colorv)
+{
+ gint width, height;
+ int x1, x2, y1, y2;
+ guint8 *d;
+ gint stride;
+
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
+ if (x < 0 || y < 0 || x >= width || y >= height)
+ return;
+
+ x1 = MAX (x - 5, 0);
+ x2 = MIN (x + 5, width);
+ y1 = MAX (y - 5, 0);
+ y2 = MIN (y + 5, height);
+
+ d = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+
+ for (y = y1; y < y2; y++) {
+ for (x = x1; x < x2; x++) {
+ d[y * stride + x] = colory;
+ }
+ }
+
+ d = GST_VIDEO_FRAME_PLANE_DATA (frame, 1);
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 1);
+
+ x1 /= 2;
+ x2 /= 2;
+ y1 /= 2;
+ y2 /= 2;
+ for (y = y1; y < y2; y++) {
+ for (x = x1; x < x2; x++) {
+ d[y * stride + x] = coloru;
+ }
+ }
+
+ d = GST_VIDEO_FRAME_PLANE_DATA (frame, 2);
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 2);
+
+ for (y = y1; y < y2; y++) {
+ for (x = x1; x < x2; x++) {
+ d[y * stride + x] = colorv;
+ }
+ }
+}
+
+static GstFlowReturn
+gst_navigationtest_transform_frame (GstVideoFilter * filter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame)
+{
+ GstNavigationtest *navtest = GST_NAVIGATIONTEST (filter);
+ GSList *walk;
+
+ gst_video_frame_copy (out_frame, in_frame);
+
+ walk = navtest->clicks;
+ while (walk) {
+ ButtonClick *click = walk->data;
+
+ walk = g_slist_next (walk);
+ draw_box_planar411 (out_frame,
+ rint (click->x), rint (click->y), click->cy, click->cu, click->cv);
+ if (--click->images_left < 1) {
+ navtest->clicks = g_slist_remove (navtest->clicks, click);
+ g_free (click);
+ }
+ }
+ draw_box_planar411 (out_frame,
+ rint (navtest->x), rint (navtest->y), 0, 128, 128);
+
+ return GST_FLOW_OK;
+}
+
+static GstStateChangeReturn
+gst_navigationtest_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+ GstNavigationtest *navtest = GST_NAVIGATIONTEST (element);
+
+ if (GST_ELEMENT_CLASS (parent_class)->change_state)
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ /* downwards state changes */
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ {
+ g_slist_foreach (navtest->clicks, (GFunc) g_free, NULL);
+ g_slist_free (navtest->clicks);
+ navtest->clicks = NULL;
+ break;
+ }
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static void
+gst_navigationtest_class_init (GstNavigationtestClass * klass)
+{
+ GstElementClass *element_class;
+ GstBaseTransformClass *trans_class;
+ GstVideoFilterClass *vfilter_class;
+
+ element_class = (GstElementClass *) klass;
+ trans_class = (GstBaseTransformClass *) klass;
+ vfilter_class = (GstVideoFilterClass *) klass;
+
+ element_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_navigationtest_change_state);
+
+ gst_element_class_set_static_metadata (element_class, "Video navigation test",
+ "Filter/Effect/Video",
+ "Handle navigation events showing a black square following mouse pointer",
+ "David Schleef <ds@schleef.org>");
+
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_navigationtest_sink_template);
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_navigationtest_src_template);
+
+ trans_class->src_event = GST_DEBUG_FUNCPTR (gst_navigationtest_src_event);
+
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_navigationtest_transform_frame);
+}
+
+static void
+gst_navigationtest_init (GstNavigationtest * navtest)
+{
+ navtest->x = -1;
+ navtest->y = -1;
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ GST_DEBUG_CATEGORY_INIT (navigationtest_debug, "navigationtest", 0,
+ "navigationtest");
+
+ return GST_ELEMENT_REGISTER (navigationtest, plugin);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ navigationtest,
+ "Template for a video filter",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/debugutils/gstnavigationtest.h b/gst/debugutils/gstnavigationtest.h
new file mode 100644
index 0000000000..c412359329
--- /dev/null
+++ b/gst/debugutils/gstnavigationtest.h
@@ -0,0 +1,67 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_NAVIGATIONTEST_H__
+#define __GST_NAVIGATIONTEST_H__
+
+#include <gst/video/video.h>
+#include <gst/video/gstvideofilter.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_NAVIGATIONTEST \
+ (gst_navigationtest_get_type())
+#define GST_NAVIGATIONTEST(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_NAVIGATIONTEST,GstNavigationtest))
+#define GST_NAVIGATIONTEST_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_NAVIGATIONTEST,GstNavigationtestClass))
+#define GST_IS_NAVIGATIONTEST(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_NAVIGATIONTEST))
+#define GST_IS_NAVIGATIONTEST_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_NAVIGATIONTEST))
+typedef struct _GstNavigationtest GstNavigationtest;
+typedef struct _GstNavigationtestClass GstNavigationtestClass;
+
+typedef struct
+{
+ gdouble x;
+ gdouble y;
+ gint images_left;
+ guint8 cy, cu, cv;
+} ButtonClick;
+
+struct _GstNavigationtest
+{
+ GstVideoFilter videofilter;
+
+ gdouble x, y;
+ GSList *clicks;
+};
+
+struct _GstNavigationtestClass
+{
+ GstVideoFilterClass parent_class;
+};
+
+GType gst_navigationtest_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (navigationtest);
+
+G_END_DECLS
+#endif /* __GST_NAVIGATIONTEST_H__ */
diff --git a/gst/debugutils/gstnavseek.c b/gst/debugutils/gstnavseek.c
new file mode 100644
index 0000000000..cfd854f386
--- /dev/null
+++ b/gst/debugutils/gstnavseek.c
@@ -0,0 +1,432 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * This file was (probably) generated from gstnavseek.c,
+ * gstnavseek.c,v 1.7 2003/11/08 02:48:59 dschleef Exp
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstdebugutilselements.h"
+#include "gstnavseek.h"
+#include <string.h>
+#include <math.h>
+
+enum
+{
+ PROP_0,
+ PROP_SEEKOFFSET,
+ PROP_HOLD_EOS,
+};
+
+GstStaticPadTemplate navseek_src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+GstStaticPadTemplate navseek_sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+static gboolean gst_navseek_sink_event (GstBaseTransform * trans,
+ GstEvent * event);
+static GstFlowReturn gst_navseek_transform_ip (GstBaseTransform * basetrans,
+ GstBuffer * buf);
+static gboolean gst_navseek_src_event (GstBaseTransform * trans,
+ GstEvent * event);
+static gboolean gst_navseek_stop (GstBaseTransform * trans);
+static gboolean gst_navseek_start (GstBaseTransform * trans);
+
+static void gst_navseek_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_navseek_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+GType gst_navseek_get_type (void);
+#define gst_navseek_parent_class parent_class
+G_DEFINE_TYPE (GstNavSeek, gst_navseek, GST_TYPE_BASE_TRANSFORM);
+GST_ELEMENT_REGISTER_DEFINE (navseek, "navseek",
+ GST_RANK_NONE, gst_navseek_get_type ());
+
+static void
+gst_navseek_class_init (GstNavSeekClass * klass)
+{
+ GstBaseTransformClass *gstbasetrans_class;
+ GstElementClass *element_class;
+ GObjectClass *gobject_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ element_class = GST_ELEMENT_CLASS (klass);
+ gstbasetrans_class = GST_BASE_TRANSFORM_CLASS (klass);
+
+ gobject_class->set_property = gst_navseek_set_property;
+ gobject_class->get_property = gst_navseek_get_property;
+
+ g_object_class_install_property (gobject_class,
+ PROP_SEEKOFFSET, g_param_spec_double ("seek-offset", "Seek Offset",
+ "Time in seconds to seek by", 0.0, G_MAXDOUBLE, 5.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * navseek:hold-eos:
+ *
+ * Hold eos until the next 'Return' keystroke.
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class,
+ PROP_HOLD_EOS, g_param_spec_boolean ("hold-eos", "Hold EOS",
+ "Hold eos until the next 'Return' keystroke", FALSE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (element_class,
+ &navseek_sink_template);
+ gst_element_class_add_static_pad_template (element_class,
+ &navseek_src_template);
+
+ gst_element_class_set_static_metadata (element_class,
+ "Seek based on left-right arrows", "Filter/Video",
+ "Seek based on navigation keys left-right",
+ "Jan Schmidt <thaytan@mad.scientist.com>");
+
+ gstbasetrans_class->src_event = GST_DEBUG_FUNCPTR (gst_navseek_src_event);
+ gstbasetrans_class->sink_event = GST_DEBUG_FUNCPTR (gst_navseek_sink_event);
+ gstbasetrans_class->transform_ip =
+ GST_DEBUG_FUNCPTR (gst_navseek_transform_ip);
+ gstbasetrans_class->start = GST_DEBUG_FUNCPTR (gst_navseek_start);
+ gstbasetrans_class->stop = GST_DEBUG_FUNCPTR (gst_navseek_stop);
+}
+
+static void
+gst_navseek_init (GstNavSeek * navseek)
+{
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (navseek), TRUE);
+
+ navseek->seek_offset = 5.0;
+ navseek->loop = FALSE;
+ navseek->hold_eos = FALSE;
+ navseek->eos = NULL;
+ navseek->grab_seg_start = FALSE;
+ navseek->grab_seg_end = FALSE;
+ navseek->segment_start = GST_CLOCK_TIME_NONE;
+ navseek->segment_end = GST_CLOCK_TIME_NONE;
+}
+
+static void
+gst_navseek_seek (GstNavSeek * navseek, gint64 offset)
+{
+ gboolean ret;
+ GstPad *peer_pad;
+ gint64 peer_value;
+
+ /* Query for the current time then attempt to set to time + offset */
+ peer_pad = gst_pad_get_peer (GST_BASE_TRANSFORM (navseek)->sinkpad);
+ ret = gst_pad_query_position (peer_pad, GST_FORMAT_TIME, &peer_value);
+
+ if (ret) {
+ GstEvent *event;
+
+ peer_value += offset;
+ if (peer_value < 0)
+ peer_value = 0;
+
+ event = gst_event_new_seek (1.0, GST_FORMAT_TIME,
+ GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH,
+ GST_SEEK_TYPE_SET, peer_value, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
+
+ gst_pad_send_event (peer_pad, event);
+ }
+
+ gst_object_unref (peer_pad);
+}
+
+static void
+gst_navseek_change_playback_rate (GstNavSeek * navseek, gdouble rate)
+{
+ gboolean ret;
+ GstPad *peer_pad;
+ gint64 current_position;
+
+ peer_pad = gst_pad_get_peer (GST_BASE_TRANSFORM (navseek)->sinkpad);
+ ret = gst_pad_query_position (peer_pad, GST_FORMAT_TIME, &current_position);
+
+ if (ret) {
+ GstEvent *event;
+ gint64 start;
+ gint64 stop;
+
+ if (rate > 0.0) {
+ start = current_position;
+ stop = -1;
+ } else {
+ /* negative rate: we play from stop to start */
+ start = 0;
+ stop = current_position;
+ }
+
+ event = gst_event_new_seek (rate, GST_FORMAT_TIME,
+ GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_SKIP,
+ GST_SEEK_TYPE_SET, start, GST_SEEK_TYPE_SET, stop);
+
+ gst_pad_send_event (peer_pad, event);
+ }
+ gst_object_unref (peer_pad);
+}
+
+static void
+gst_navseek_segseek (GstNavSeek * navseek)
+{
+ GstEvent *event;
+ GstPad *peer_pad;
+
+ if ((navseek->segment_start == GST_CLOCK_TIME_NONE) ||
+ (navseek->segment_end == GST_CLOCK_TIME_NONE) ||
+ (!GST_PAD_IS_LINKED (GST_BASE_TRANSFORM (navseek)->sinkpad))) {
+ return;
+ }
+
+ if (navseek->loop) {
+ event =
+ gst_event_new_seek (1.0, GST_FORMAT_TIME,
+ GST_SEEK_FLAG_ACCURATE | GST_SEEK_FLAG_SEGMENT,
+ GST_SEEK_TYPE_SET, navseek->segment_start, GST_SEEK_TYPE_SET,
+ navseek->segment_end);
+ } else {
+ event =
+ gst_event_new_seek (1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_ACCURATE,
+ GST_SEEK_TYPE_SET, navseek->segment_start, GST_SEEK_TYPE_SET,
+ navseek->segment_end);
+ }
+
+ peer_pad = gst_pad_get_peer (GST_BASE_TRANSFORM (navseek)->sinkpad);
+ gst_pad_send_event (peer_pad, event);
+ gst_object_unref (peer_pad);
+}
+
+static void
+gst_navseek_toggle_play_pause (GstNavSeek * navseek)
+{
+ GstStateChangeReturn sret;
+ GstState current, pending, state;
+
+ sret = gst_element_get_state (GST_ELEMENT (navseek), &current, &pending, 0);
+ if (sret == GST_STATE_CHANGE_FAILURE)
+ return;
+
+ state = (pending != GST_STATE_VOID_PENDING) ? pending : current;
+
+ gst_element_post_message (GST_ELEMENT (navseek),
+ gst_message_new_request_state (GST_OBJECT (navseek),
+ (state == GST_STATE_PLAYING) ? GST_STATE_PAUSED : GST_STATE_PLAYING));
+}
+
+static gboolean
+gst_navseek_src_event (GstBaseTransform * trans, GstEvent * event)
+{
+ GstNavSeek *navseek;
+ gboolean ret = TRUE;
+
+ navseek = GST_NAVSEEK (trans);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_NAVIGATION:
+ {
+ /* Check for a keyup and convert left/right to a seek event */
+ const GstStructure *structure;
+ const gchar *event_type;
+
+ structure = gst_event_get_structure (event);
+ g_return_val_if_fail (structure != NULL, FALSE);
+
+ event_type = gst_structure_get_string (structure, "event");
+ g_return_val_if_fail (event_type != NULL, FALSE);
+
+ if (strcmp (event_type, "key-press") == 0) {
+ const gchar *key;
+
+ key = gst_structure_get_string (structure, "key");
+ g_return_val_if_fail (key != NULL, FALSE);
+
+ if (strcmp (key, "Left") == 0) {
+ /* Seek backward by 5 secs */
+ gst_navseek_seek (navseek, -1.0 * navseek->seek_offset * GST_SECOND);
+ } else if (strcmp (key, "Right") == 0) {
+ /* Seek forward */
+ gst_navseek_seek (navseek, navseek->seek_offset * GST_SECOND);
+ } else if (strcmp (key, "s") == 0) {
+ /* Grab the next frame as the start frame of a segment */
+ navseek->grab_seg_start = TRUE;
+ } else if (strcmp (key, "e") == 0) {
+ /* Grab the next frame as the end frame of a segment */
+ navseek->grab_seg_end = TRUE;
+ } else if (strcmp (key, "l") == 0) {
+ /* Toggle the loop flag. If we have both start and end segment times send a seek */
+ navseek->loop = !navseek->loop;
+ gst_navseek_segseek (navseek);
+ } else if (strcmp (key, "f") == 0) {
+ /* fast forward */
+ gst_navseek_change_playback_rate (navseek, 2.0);
+ } else if (strcmp (key, "r") == 0) {
+ /* rewind */
+ gst_navseek_change_playback_rate (navseek, -2.0);
+ } else if (strcmp (key, "n") == 0) {
+ /* normal speed */
+ gst_navseek_change_playback_rate (navseek, 1.0);
+ } else if (strcmp (key, "space") == 0) {
+ gst_navseek_toggle_play_pause (navseek);
+ } else if (strcmp (key, "Return") == 0) {
+ if (navseek->eos) {
+ gst_pad_push_event (GST_BASE_TRANSFORM (navseek)->srcpad,
+ navseek->eos);
+ navseek->eos = NULL;
+ }
+ }
+ } else {
+ break;
+ }
+ gst_event_unref (event);
+ event = NULL;
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (event)
+ ret = GST_BASE_TRANSFORM_CLASS (parent_class)->src_event (trans, event);
+
+ return ret;
+}
+
+static void
+gst_navseek_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstNavSeek *navseek = GST_NAVSEEK (object);
+
+ switch (prop_id) {
+ case PROP_SEEKOFFSET:
+ GST_OBJECT_LOCK (navseek);
+ navseek->seek_offset = g_value_get_double (value);
+ GST_OBJECT_UNLOCK (navseek);
+ break;
+ case PROP_HOLD_EOS:
+ GST_OBJECT_LOCK (navseek);
+ navseek->hold_eos = g_value_get_boolean (value);
+ GST_OBJECT_UNLOCK (navseek);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_navseek_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstNavSeek *navseek = GST_NAVSEEK (object);
+
+ switch (prop_id) {
+ case PROP_SEEKOFFSET:
+ GST_OBJECT_LOCK (navseek);
+ g_value_set_double (value, navseek->seek_offset);
+ GST_OBJECT_UNLOCK (navseek);
+ break;
+ case PROP_HOLD_EOS:
+ GST_OBJECT_LOCK (navseek);
+ g_value_set_boolean (value, navseek->hold_eos);
+ GST_OBJECT_UNLOCK (navseek);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+gst_navseek_sink_event (GstBaseTransform * trans, GstEvent * event)
+{
+ GstNavSeek *navseek = GST_NAVSEEK (trans);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ GST_OBJECT_LOCK (navseek);
+ if (navseek->loop)
+ gst_navseek_segseek (navseek);
+ if (navseek->hold_eos)
+ navseek->eos = event;
+ GST_OBJECT_UNLOCK (navseek);
+ if (navseek->eos)
+ return TRUE;
+ break;
+ default:
+ break;
+ }
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, event);
+}
+
+static GstFlowReturn
+gst_navseek_transform_ip (GstBaseTransform * basetrans, GstBuffer * buf)
+{
+ GstNavSeek *navseek = GST_NAVSEEK (basetrans);
+
+ GST_OBJECT_LOCK (navseek);
+
+ if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
+ if (navseek->grab_seg_start) {
+ navseek->segment_start = GST_BUFFER_TIMESTAMP (buf);
+ navseek->segment_end = GST_CLOCK_TIME_NONE;
+ navseek->grab_seg_start = FALSE;
+ }
+
+ if (navseek->grab_seg_end) {
+ navseek->segment_end = GST_BUFFER_TIMESTAMP (buf);
+ navseek->grab_seg_end = FALSE;
+ gst_navseek_segseek (navseek);
+ }
+ }
+
+ GST_OBJECT_UNLOCK (navseek);
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_navseek_start (GstBaseTransform * trans)
+{
+ /* anything we should be doing here? */
+ return TRUE;
+}
+
+static gboolean
+gst_navseek_stop (GstBaseTransform * trans)
+{
+ GstNavSeek *navseek = GST_NAVSEEK (trans);
+
+ if (navseek->eos) {
+ gst_event_unref (navseek->eos);
+ navseek->eos = NULL;
+ }
+ return TRUE;
+}
diff --git a/gst/debugutils/gstnavseek.h b/gst/debugutils/gstnavseek.h
new file mode 100644
index 0000000000..6b596b21e0
--- /dev/null
+++ b/gst/debugutils/gstnavseek.h
@@ -0,0 +1,62 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_NAVSEEK_H__
+#define __GST_NAVSEEK_H__
+
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_NAVSEEK \
+ (gst_navseek_get_type())
+#define GST_NAVSEEK(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_NAVSEEK,GstNavSeek))
+#define GST_NAVSEEK_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_NAVSEEK,GstNavSeekClass))
+#define GST_IS_NAVSEEK(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_NAVSEEK))
+#define GST_IS_NAVSEEK_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_NAVSEEK))
+typedef struct _GstNavSeek GstNavSeek;
+typedef struct _GstNavSeekClass GstNavSeekClass;
+
+struct _GstNavSeek
+{
+ GstBaseTransform basetransform;
+
+ gdouble seek_offset;
+ gboolean loop;
+ gboolean hold_eos;
+ GstEvent* eos;
+ gboolean grab_seg_start;
+ gboolean grab_seg_end;
+ GstClockTime segment_start;
+ GstClockTime segment_end;
+};
+
+struct _GstNavSeekClass
+{
+ GstBaseTransformClass parent_class;
+};
+
+G_END_DECLS
+#endif /* __GST_NAVSEEK_H__ */
diff --git a/gst/debugutils/gstpushfilesrc.c b/gst/debugutils/gstpushfilesrc.c
new file mode 100644
index 0000000000..d9f3b36060
--- /dev/null
+++ b/gst/debugutils/gstpushfilesrc.c
@@ -0,0 +1,424 @@
+/* GStreamer Push File Source
+ * Copyright (C) <2007> Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-pushfilesrc
+ * @title: pushfilesrc
+ * @see_also: filesrc
+ *
+ * This element is only useful for debugging purposes. It implements an URI
+ * protocol handler for the 'pushfile' protocol and behaves like a file source
+ * element that cannot be activated in pull-mode. This makes it very easy to
+ * debug demuxers or decoders that can operate both pull and push-based in
+ * connection with the playbin element (which creates a source based on the
+ * URI passed).
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -m playbin uri=pushfile:///home/you/some/file.ogg
+ * ]| This plays back the given file using playbin, with the demuxer operating
+ * push-based.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstdebugutilselements.h"
+#include "gstpushfilesrc.h"
+
+#include <gst/gst.h>
+
+GST_DEBUG_CATEGORY_STATIC (pushfilesrc_debug);
+#define GST_CAT_DEFAULT pushfilesrc_debug
+
+enum
+{
+ PROP_0,
+ PROP_LOCATION,
+ PROP_TIME_SEGMENT,
+ PROP_STREAM_TIME,
+ PROP_START_TIME,
+ PROP_INITIAL_TIMESTAMP,
+ PROP_RATE,
+ PROP_APPLIED_RATE
+};
+
+#define DEFAULT_TIME_SEGMENT FALSE
+#define DEFAULT_STREAM_TIME 0
+#define DEFAULT_START_TIME 0
+#define DEFAULT_INITIAL_TIMESTAMP GST_CLOCK_TIME_NONE
+#define DEFAULT_RATE 1.0
+#define DEFAULT_APPLIED_RATE 1.0
+
+static void gst_push_file_src_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_push_file_src_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+static void gst_push_file_src_uri_handler_init (gpointer g_iface,
+ gpointer iface_data);
+
+#define gst_push_file_src_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstPushFileSrc, gst_push_file_src, GST_TYPE_BIN,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER,
+ gst_push_file_src_uri_handler_init));
+GST_ELEMENT_REGISTER_DEFINE (pushfilesrc, "pushfilesrc",
+ GST_RANK_NONE, gst_push_file_src_get_type ());
+
+static void
+gst_push_file_src_dispose (GObject * obj)
+{
+ GstPushFileSrc *src = GST_PUSH_FILE_SRC (obj);
+
+ if (src->srcpad) {
+ gst_element_remove_pad (GST_ELEMENT (src), src->srcpad);
+ src->srcpad = NULL;
+ }
+ if (src->filesrc) {
+ gst_bin_remove (GST_BIN (src), src->filesrc);
+ src->filesrc = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->dispose (obj);
+}
+
+static void
+gst_push_file_src_class_init (GstPushFileSrcClass * g_class)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+
+ gobject_class = G_OBJECT_CLASS (g_class);
+ element_class = GST_ELEMENT_CLASS (g_class);
+
+ GST_DEBUG_CATEGORY_INIT (pushfilesrc_debug, "pushfilesrc", 0,
+ "pushfilesrc element");
+
+ gobject_class->dispose = gst_push_file_src_dispose;
+ gobject_class->set_property = gst_push_file_src_set_property;
+ gobject_class->get_property = gst_push_file_src_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_LOCATION,
+ g_param_spec_string ("location", "File Location",
+ "Location of the file to read", NULL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+
+ g_object_class_install_property (gobject_class, PROP_TIME_SEGMENT,
+ g_param_spec_boolean ("time-segment", "Time Segment",
+ "Emit TIME SEGMENTS", DEFAULT_TIME_SEGMENT, G_PARAM_READWRITE));
+
+ g_object_class_install_property (gobject_class, PROP_STREAM_TIME,
+ g_param_spec_int64 ("stream-time", "Stream Time",
+ "Initial Stream Time (if time-segment TRUE)", 0, G_MAXINT64,
+ DEFAULT_STREAM_TIME, G_PARAM_READWRITE));
+
+ g_object_class_install_property (gobject_class, PROP_START_TIME,
+ g_param_spec_int64 ("start-time", "Start Time",
+ "Initial Start Time (if time-segment TRUE)", 0, G_MAXINT64,
+ DEFAULT_START_TIME, G_PARAM_READWRITE));
+
+ g_object_class_install_property (gobject_class, PROP_INITIAL_TIMESTAMP,
+ g_param_spec_uint64 ("initial-timestamp", "Initial Timestamp",
+ "Initial Buffer Timestamp (if time-segment TRUE)", 0, G_MAXUINT64,
+ DEFAULT_INITIAL_TIMESTAMP, G_PARAM_READWRITE));
+
+ g_object_class_install_property (gobject_class, PROP_RATE,
+ g_param_spec_double ("rate", "Rate", "Rate to use in TIME SEGMENT",
+ G_MINDOUBLE, G_MAXDOUBLE, DEFAULT_RATE, G_PARAM_READWRITE));
+
+ g_object_class_install_property (gobject_class, PROP_APPLIED_RATE,
+ g_param_spec_double ("applied-rate", "Applied Rate",
+ "Applied rate to use in TIME SEGMENT", G_MINDOUBLE, G_MAXDOUBLE,
+ DEFAULT_APPLIED_RATE, G_PARAM_READWRITE));
+
+ gst_element_class_add_static_pad_template (element_class, &srctemplate);
+
+ gst_element_class_set_static_metadata (element_class, "Push File Source",
+ "Testing",
+ "Implements pushfile:// URI-handler for push-based file access",
+ "Tim-Philipp Müller <tim centricular net>");
+}
+
+static void
+gst_push_file_src_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstPushFileSrc *src = (GstPushFileSrc *) object;
+
+ switch (prop_id) {
+ case PROP_LOCATION:
+ g_object_set_property (G_OBJECT (src->filesrc), "location", value);
+ break;
+ case PROP_TIME_SEGMENT:
+ src->time_segment = g_value_get_boolean (value);
+ break;
+ case PROP_STREAM_TIME:
+ src->stream_time = g_value_get_int64 (value);
+ break;
+ case PROP_START_TIME:
+ src->start_time = g_value_get_int64 (value);
+ break;
+ case PROP_INITIAL_TIMESTAMP:
+ src->initial_timestamp = g_value_get_uint64 (value);
+ break;
+ case PROP_RATE:
+ src->rate = g_value_get_double (value);
+ break;
+ case PROP_APPLIED_RATE:
+ src->applied_rate = g_value_get_double (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_push_file_src_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstPushFileSrc *src = (GstPushFileSrc *) object;
+
+ switch (prop_id) {
+ case PROP_LOCATION:
+ g_object_get_property (G_OBJECT (src->filesrc), "location", value);
+ break;
+ case PROP_TIME_SEGMENT:
+ g_value_set_boolean (value, src->time_segment);
+ break;
+ case PROP_STREAM_TIME:
+ g_value_set_int64 (value, src->stream_time);
+ break;
+ case PROP_START_TIME:
+ g_value_set_int64 (value, src->start_time);
+ break;
+ case PROP_INITIAL_TIMESTAMP:
+ g_value_set_uint64 (value, src->initial_timestamp);
+ break;
+ case PROP_RATE:
+ g_value_set_double (value, src->rate);
+ break;
+ case PROP_APPLIED_RATE:
+ g_value_set_double (value, src->applied_rate);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstPadProbeReturn
+gst_push_file_src_ghostpad_buffer_probe (GstPad * pad, GstPadProbeInfo * info,
+ GstPushFileSrc * src)
+{
+ GstBuffer *buffer = GST_PAD_PROBE_INFO_BUFFER (info);
+
+ if (src->time_segment && !src->seen_first_buffer) {
+ GST_BUFFER_TIMESTAMP (buffer) = src->initial_timestamp;
+ src->seen_first_buffer = TRUE;
+ }
+ return GST_PAD_PROBE_OK;
+}
+
+static GstPadProbeReturn
+gst_push_file_src_ghostpad_event_probe (GstPad * pad, GstPadProbeInfo * info,
+ GstPushFileSrc * src)
+{
+ GstEvent *event = GST_PAD_PROBE_INFO_EVENT (info);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:
+ {
+ if (src->time_segment) {
+ GstSegment segment;
+ GstEvent *replacement;
+ GST_DEBUG_OBJECT (src, "Replacing outgoing segment with TIME SEGMENT");
+ gst_segment_init (&segment, GST_FORMAT_TIME);
+ segment.start = src->start_time;
+ segment.time = src->stream_time;
+ segment.rate = src->rate;
+ segment.applied_rate = src->applied_rate;
+ replacement = gst_event_new_segment (&segment);
+ gst_event_unref (event);
+ GST_PAD_PROBE_INFO_DATA (info) = replacement;
+ }
+ }
+ default:
+ break;
+ }
+ return GST_PAD_PROBE_OK;
+}
+
+static gboolean
+gst_push_file_src_ghostpad_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstPushFileSrc *src = (GstPushFileSrc *) parent;
+ gboolean ret;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ if (src->time_segment) {
+ /* When working in time we don't allow seeks */
+ GST_DEBUG_OBJECT (src, "Refusing seek event in TIME mode");
+ gst_event_unref (event);
+ ret = FALSE;
+ break;
+ }
+ /* PASSTHROUGH */
+ default:
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_push_file_src_ghostpad_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ GstPushFileSrc *src = (GstPushFileSrc *) parent;
+ gboolean res;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_SCHEDULING:
+ /* When working in time we don't allow seeks */
+ if (src->time_segment)
+ gst_query_set_scheduling (query, GST_SCHEDULING_FLAG_SEQUENTIAL, 1, -1,
+ 0);
+ else
+ gst_query_set_scheduling (query, GST_SCHEDULING_FLAG_SEEKABLE, 1, -1,
+ 0);
+ gst_query_add_scheduling_mode (query, GST_PAD_MODE_PUSH);
+ res = TRUE;
+ break;
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+ return res;
+}
+
+static void
+gst_push_file_src_init (GstPushFileSrc * src)
+{
+ src->time_segment = DEFAULT_TIME_SEGMENT;
+ src->stream_time = DEFAULT_STREAM_TIME;
+ src->start_time = DEFAULT_START_TIME;
+ src->initial_timestamp = DEFAULT_INITIAL_TIMESTAMP;
+ src->rate = DEFAULT_RATE;
+ src->applied_rate = DEFAULT_APPLIED_RATE;
+ src->seen_first_buffer = FALSE;
+
+ src->filesrc = gst_element_factory_make ("filesrc", "real-filesrc");
+ if (src->filesrc) {
+ GstPad *pad;
+
+ gst_bin_add (GST_BIN (src), src->filesrc);
+ pad = gst_element_get_static_pad (src->filesrc, "src");
+ g_assert (pad != NULL);
+ src->srcpad = gst_ghost_pad_new ("src", pad);
+ /* FIXME^H^HCORE: try pushfile:///foo/bar.ext ! typefind ! fakesink without
+ * this and watch core bugginess (some pad stays in flushing state) */
+ gst_pad_set_query_function (src->srcpad,
+ GST_DEBUG_FUNCPTR (gst_push_file_src_ghostpad_query));
+ gst_pad_set_event_function (src->srcpad,
+ GST_DEBUG_FUNCPTR (gst_push_file_src_ghostpad_event));
+ /* Add outgoing event probe to replace segment and buffer timestamp */
+ gst_pad_add_probe (src->srcpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
+ (GstPadProbeCallback) gst_push_file_src_ghostpad_event_probe,
+ src, NULL);
+ gst_pad_add_probe (src->srcpad, GST_PAD_PROBE_TYPE_BUFFER,
+ (GstPadProbeCallback) gst_push_file_src_ghostpad_buffer_probe,
+ src, NULL);
+ gst_element_add_pad (GST_ELEMENT (src), src->srcpad);
+ gst_object_unref (pad);
+ }
+}
+
+/*** GSTURIHANDLER INTERFACE *************************************************/
+
+static GstURIType
+gst_push_file_src_uri_get_type (GType type)
+{
+ return GST_URI_SRC;
+}
+
+static const gchar *const *
+gst_push_file_src_uri_get_protocols (GType type)
+{
+ static const gchar *protocols[] = { "pushfile", NULL };
+
+ return protocols;
+}
+
+static gchar *
+gst_push_file_src_uri_get_uri (GstURIHandler * handler)
+{
+ GstPushFileSrc *src = GST_PUSH_FILE_SRC (handler);
+ gchar *fileuri, *pushfileuri;
+
+ if (src->filesrc == NULL)
+ return NULL;
+
+ fileuri = gst_uri_handler_get_uri (GST_URI_HANDLER (src->filesrc));
+ if (fileuri == NULL)
+ return NULL;
+ pushfileuri = g_strconcat ("push", fileuri, NULL);
+ g_free (fileuri);
+
+ return pushfileuri;
+}
+
+static gboolean
+gst_push_file_src_uri_set_uri (GstURIHandler * handler, const gchar * uri,
+ GError ** error)
+{
+ GstPushFileSrc *src = GST_PUSH_FILE_SRC (handler);
+
+ if (src->filesrc == NULL) {
+ g_set_error_literal (error, GST_URI_ERROR, GST_URI_ERROR_BAD_STATE,
+ "Could not create file source element");
+ return FALSE;
+ }
+
+ /* skip 'push' bit */
+ return gst_uri_handler_set_uri (GST_URI_HANDLER (src->filesrc), uri + 4,
+ error);
+}
+
+static void
+gst_push_file_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
+{
+ GstURIHandlerInterface *iface = (GstURIHandlerInterface *) g_iface;
+
+ iface->get_type = gst_push_file_src_uri_get_type;
+ iface->get_protocols = gst_push_file_src_uri_get_protocols;
+ iface->get_uri = gst_push_file_src_uri_get_uri;
+ iface->set_uri = gst_push_file_src_uri_set_uri;
+}
diff --git a/gst/debugutils/gstpushfilesrc.h b/gst/debugutils/gstpushfilesrc.h
new file mode 100644
index 0000000000..482ae13cf0
--- /dev/null
+++ b/gst/debugutils/gstpushfilesrc.h
@@ -0,0 +1,64 @@
+/* GStreamer Push File Source
+ * Copyright (C) <2007> Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_PUSH_FILE_SRC_H__
+#define __GST_PUSH_FILE_SRC_H__
+
+#include <gst/gstbin.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_PUSH_FILE_SRC \
+ (gst_push_file_src_get_type())
+#define GST_PUSH_FILE_SRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_PUSH_FILE_SRC,GstPushFileSrc))
+#define GST_PUSH_FILE_SRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_PUSH_FILE_SRC,GstPushFileSrcClass))
+#define GST_IS_PUSH_FILE_SRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_PUSH_FILE_SRC))
+#define GST_IS_PUSH_FILE_SRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_PUSH_FILE_SRC))
+typedef struct _GstPushFileSrc GstPushFileSrc;
+typedef struct _GstPushFileSrcClass GstPushFileSrcClass;
+
+struct _GstPushFileSrc
+{
+ GstBin parent;
+
+ /*< private > */
+ GstElement *filesrc;
+ GstPad *srcpad;
+
+ gboolean time_segment;
+ gboolean seen_first_buffer;
+ gint64 stream_time;
+ gint64 start_time;
+ guint64 initial_timestamp;
+ gdouble rate;
+ gdouble applied_rate;
+};
+
+struct _GstPushFileSrcClass
+{
+ GstBinClass parent_class;
+};
+
+GType gst_push_file_src_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_PUSH_FILE_SRC_H__ */
diff --git a/gst/debugutils/gsttaginject.c b/gst/debugutils/gsttaginject.c
new file mode 100644
index 0000000000..223a0da388
--- /dev/null
+++ b/gst/debugutils/gsttaginject.c
@@ -0,0 +1,206 @@
+/* GStreamer
+ * Copyright (C) 2008 Stefan Kost <ensonic@users.sf.net>
+ *
+ * gsttaginject.c:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-taginject
+ * @title: taginject
+ *
+ * Element that injects new metadata tags, but passes incoming data through
+ * unmodified.
+ *
+ * ## Example launch lines
+ * |[
+ * gst-launch-1.0 audiotestsrc num-buffers=100 ! taginject tags="title=testsrc,artist=gstreamer" ! vorbisenc ! oggmux ! filesink location=test.ogg
+ * ]| set title and artist
+ * |[
+ * gst-launch-1.0 audiotestsrc num-buffers=100 ! taginject tags="keywords=\{\"testone\",\"audio\"\},title=\"audio\ testtone\"" ! vorbisenc ! oggmux ! filesink location=test.ogg
+ * ]| set keywords and title demonstrating quoting of special chars and handling lists
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <stdlib.h>
+
+#include "gstdebugutilselements.h"
+#include "gsttaginject.h"
+
+static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+GST_DEBUG_CATEGORY_STATIC (gst_tag_inject_debug);
+#define GST_CAT_DEFAULT gst_tag_inject_debug
+
+enum
+{
+ PROP_TAGS = 1
+};
+
+
+#define gst_tag_inject_parent_class parent_class
+G_DEFINE_TYPE (GstTagInject, gst_tag_inject, GST_TYPE_BASE_TRANSFORM);
+GST_ELEMENT_REGISTER_DEFINE (taginject, "taginject",
+ GST_RANK_NONE, gst_tag_inject_get_type ());
+
+static void gst_tag_inject_finalize (GObject * object);
+static void gst_tag_inject_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_tag_inject_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static GstFlowReturn gst_tag_inject_transform_ip (GstBaseTransform * trans,
+ GstBuffer * buf);
+static gboolean gst_tag_inject_start (GstBaseTransform * trans);
+
+
+static void
+gst_tag_inject_finalize (GObject * object)
+{
+ GstTagInject *self = GST_TAG_INJECT (object);
+
+ if (self->tags) {
+ gst_tag_list_unref (self->tags);
+ self->tags = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_tag_inject_class_init (GstTagInjectClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstBaseTransformClass *gstbasetrans_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ gstelement_class = GST_ELEMENT_CLASS (klass);
+ gstbasetrans_class = GST_BASE_TRANSFORM_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (gst_tag_inject_debug, "taginject", 0,
+ "tag inject element");
+
+ gobject_class->set_property = gst_tag_inject_set_property;
+ gobject_class->get_property = gst_tag_inject_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_TAGS,
+ g_param_spec_string ("tags", "taglist",
+ "List of tags to inject into the target file",
+ NULL, G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS));
+
+ gobject_class->finalize = gst_tag_inject_finalize;
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "TagInject",
+ "Generic", "inject metadata tags", "Stefan Kost <ensonic@users.sf.net>");
+ gst_element_class_add_static_pad_template (gstelement_class, &srctemplate);
+ gst_element_class_add_static_pad_template (gstelement_class, &sinktemplate);
+
+ gstbasetrans_class->transform_ip =
+ GST_DEBUG_FUNCPTR (gst_tag_inject_transform_ip);
+
+ gstbasetrans_class->start = GST_DEBUG_FUNCPTR (gst_tag_inject_start);
+}
+
+static void
+gst_tag_inject_init (GstTagInject * self)
+{
+ GstBaseTransform *trans = GST_BASE_TRANSFORM (self);
+
+ gst_base_transform_set_gap_aware (trans, TRUE);
+
+ self->tags = NULL;
+}
+
+static GstFlowReturn
+gst_tag_inject_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
+{
+ GstTagInject *self = GST_TAG_INJECT (trans);
+
+ if (G_UNLIKELY (!self->tags_sent)) {
+ self->tags_sent = TRUE;
+ /* send tags */
+ if (self->tags && !gst_tag_list_is_empty (self->tags)) {
+ GST_DEBUG ("tag event :%" GST_PTR_FORMAT, self->tags);
+ gst_pad_push_event (GST_BASE_TRANSFORM_SRC_PAD (trans),
+ gst_event_new_tag (gst_tag_list_ref (self->tags)));
+ }
+ }
+
+ return GST_FLOW_OK;
+}
+
+static void
+gst_tag_inject_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstTagInject *self = GST_TAG_INJECT (object);
+
+ switch (prop_id) {
+ case PROP_TAGS:{
+ gchar *structure =
+ g_strdup_printf ("taglist,%s", g_value_get_string (value));
+ if (!(self->tags = gst_tag_list_new_from_string (structure))) {
+ GST_WARNING ("unparsable taglist = '%s'", structure);
+ }
+
+ /* make sure that tags will be send */
+ self->tags_sent = FALSE;
+ g_free (structure);
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_tag_inject_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ /*GstTagInject *self = GST_TAG_INJECT (object); */
+
+ switch (prop_id) {
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+gst_tag_inject_start (GstBaseTransform * trans)
+{
+ GstTagInject *self = GST_TAG_INJECT (trans);
+
+ /* we need to sent tags _transform_ip() once */
+ self->tags_sent = FALSE;
+
+ return TRUE;
+}
diff --git a/gst/debugutils/gsttaginject.h b/gst/debugutils/gsttaginject.h
new file mode 100644
index 0000000000..a5459275f0
--- /dev/null
+++ b/gst/debugutils/gsttaginject.h
@@ -0,0 +1,66 @@
+/* GStreamer
+ * Copyright (C) 2008 Stefan Kost <ensonic@users.sf.net>
+ *
+ * gsttaginject.h:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_TAG_INJECT_H__
+#define __GST_TAG_INJECT_H__
+
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_TAG_INJECT \
+ (gst_tag_inject_get_type())
+#define GST_TAG_INJECT(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_TAG_INJECT,GstTagInject))
+#define GST_TAG_INJECT_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_TAG_INJECT,GstTagInjectClass))
+#define GST_IS_TAG_INJECT(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_TAG_INJECT))
+#define GST_IS_TAG_INJECT_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_TAG_INJECT))
+typedef struct _GstTagInject GstTagInject;
+typedef struct _GstTagInjectClass GstTagInjectClass;
+
+/**
+ * GstTagInject:
+ *
+ * Opaque #GstTagInject data structure
+ */
+struct _GstTagInject
+{
+ GstBaseTransform element;
+
+ /*< private > */
+ GstTagList *tags;
+ gboolean tags_sent;
+};
+
+struct _GstTagInjectClass
+{
+ GstBaseTransformClass parent_class;
+};
+
+GType gst_tag_inject_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_TAG_INJECT_H__ */
diff --git a/gst/debugutils/meson.build b/gst/debugutils/meson.build
new file mode 100644
index 0000000000..1342d5334a
--- /dev/null
+++ b/gst/debugutils/meson.build
@@ -0,0 +1,31 @@
+gstnavigationtest = library('gstnavigationtest',
+ 'gstnavigationtest.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gstbase_dep, gstvideo_dep, libm],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstnavigationtest, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstnavigationtest]
+
+gstdebug = library('gstdebug',
+ 'gstdebug.c',
+ 'breakmydata.c',
+ 'gstcapssetter.c',
+ 'gstnavseek.c',
+ 'gstpushfilesrc.c',
+ 'gsttaginject.c',
+ 'rndbuffersize.c',
+ 'progressreport.c',
+ 'tests.c',
+ 'cpureport.c',
+ 'testplugin.c',
+ c_args: gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gst_dep, gstbase_dep, gstvideo_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstdebug, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstdebug]
diff --git a/gst/debugutils/progressreport.c b/gst/debugutils/progressreport.c
new file mode 100644
index 0000000000..8b2d2cfbc5
--- /dev/null
+++ b/gst/debugutils/progressreport.c
@@ -0,0 +1,522 @@
+/* GStreamer Progress Report Element
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David Schleef <ds@schleef.org>
+ * Copyright (C) <2004> Jan Schmidt <thaytan@mad.scientist.com>
+ * Copyright (C) <2006> Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-progressreport
+ * @title: progressreport
+ *
+ * The progressreport element can be put into a pipeline to report progress,
+ * which is done by doing upstream duration and position queries in regular
+ * (real-time) intervals. Both the interval and the preferred query format
+ * can be specified via the #GstProgressReport:update-freq and the
+ * #GstProgressReport:format property.
+ *
+ * Element messages containing a "progress" structure are posted on the bus
+ * whenever progress has been queried (since gst-plugins-good 0.10.6 only).
+ *
+ * Since the element was originally designed for debugging purposes, it will
+ * by default also print information about the current progress to the
+ * terminal. This can be prevented by setting the #GstProgressReport:silent
+ * property to %TRUE.
+ *
+ * This element is most useful in transcoding pipelines or other situations
+ * where just querying the pipeline might not lead to the wanted result. For
+ * progress in TIME format, the element is best placed in a 'raw stream'
+ * section of the pipeline (or after any demuxers/decoders/parsers).
+ *
+ * Three more things should be pointed out: firstly, the element will only
+ * query progress when data flow happens. If data flow is stalled for some
+ * reason, no progress messages will be posted. Secondly, there are other
+ * elements (like qtdemux, for example) that may also post "progress" element
+ * messages on the bus. Applications should check the source of any element
+ * messages they receive, if needed. Finally, applications should not take
+ * action on receiving notification of progress being 100%, they should only
+ * take action when they receive an EOS message (since the progress reported
+ * is in reference to an internal point of a pipeline and not the pipeline as
+ * a whole).
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -m filesrc location=foo.ogg ! decodebin ! progressreport update-freq=1 ! audioconvert ! audioresample ! autoaudiosink
+ * ]| This shows a progress query where a duration is available.
+ * |[
+ * gst-launch-1.0 -m audiotestsrc ! progressreport update-freq=1 ! audioconvert ! autoaudiosink
+ * ]| This shows a progress query where no duration is available.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <string.h>
+#include <math.h>
+#include <time.h>
+
+#include "gstdebugutilselements.h"
+#include "progressreport.h"
+
+
+enum
+{
+ PROP_0,
+ PROP_UPDATE_FREQ,
+ PROP_SILENT,
+ PROP_DO_QUERY,
+ PROP_FORMAT
+};
+
+GstStaticPadTemplate progress_report_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+GstStaticPadTemplate progress_report_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+#define DEFAULT_UPDATE_FREQ 5
+#define DEFAULT_SILENT FALSE
+#define DEFAULT_DO_QUERY TRUE
+#define DEFAULT_FORMAT "auto"
+
+static void gst_progress_report_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_progress_report_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_progress_report_sink_event (GstBaseTransform * trans,
+ GstEvent * event);
+static GstFlowReturn gst_progress_report_transform_ip (GstBaseTransform * trans,
+ GstBuffer * buf);
+
+static gboolean gst_progress_report_start (GstBaseTransform * trans);
+static gboolean gst_progress_report_stop (GstBaseTransform * trans);
+
+#define gst_progress_report_parent_class parent_class
+G_DEFINE_TYPE (GstProgressReport, gst_progress_report, GST_TYPE_BASE_TRANSFORM);
+GST_ELEMENT_REGISTER_DEFINE (progressreport, "progressreport",
+ GST_RANK_NONE, gst_progress_report_get_type ());
+
+static void
+gst_progress_report_finalize (GObject * obj)
+{
+ GstProgressReport *filter = GST_PROGRESS_REPORT (obj);
+
+ g_free (filter->format);
+ filter->format = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (obj);
+}
+
+static void
+gst_progress_report_class_init (GstProgressReportClass * g_class)
+{
+ GstBaseTransformClass *gstbasetrans_class;
+ GstElementClass *element_class;
+ GObjectClass *gobject_class;
+
+ gobject_class = G_OBJECT_CLASS (g_class);
+ element_class = GST_ELEMENT_CLASS (g_class);
+ gstbasetrans_class = GST_BASE_TRANSFORM_CLASS (g_class);
+
+ gobject_class->finalize = gst_progress_report_finalize;
+ gobject_class->set_property = gst_progress_report_set_property;
+ gobject_class->get_property = gst_progress_report_get_property;
+
+ g_object_class_install_property (gobject_class,
+ PROP_UPDATE_FREQ, g_param_spec_int ("update-freq", "Update Frequency",
+ "Number of seconds between reports when data is flowing", 1, G_MAXINT,
+ DEFAULT_UPDATE_FREQ, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_SILENT, g_param_spec_boolean ("silent",
+ "Do not print output to stdout", "Do not print output to stdout",
+ DEFAULT_SILENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_DO_QUERY, g_param_spec_boolean ("do-query",
+ "Use a query instead of buffer metadata to determine stream position",
+ "Use a query instead of buffer metadata to determine stream position",
+ DEFAULT_DO_QUERY, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_FORMAT, g_param_spec_string ("format", "format",
+ "Format to use for the querying", DEFAULT_FORMAT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (element_class,
+ &progress_report_sink_template);
+ gst_element_class_add_static_pad_template (element_class,
+ &progress_report_src_template);
+
+ gst_element_class_set_static_metadata (element_class, "Progress report",
+ "Testing",
+ "Periodically query and report on processing progress",
+ "Jan Schmidt <thaytan@mad.scientist.com>");
+
+ gstbasetrans_class->sink_event =
+ GST_DEBUG_FUNCPTR (gst_progress_report_sink_event);
+ gstbasetrans_class->transform_ip =
+ GST_DEBUG_FUNCPTR (gst_progress_report_transform_ip);
+ gstbasetrans_class->start = GST_DEBUG_FUNCPTR (gst_progress_report_start);
+ gstbasetrans_class->stop = GST_DEBUG_FUNCPTR (gst_progress_report_stop);
+}
+
+static void
+gst_progress_report_init (GstProgressReport * report)
+{
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (report), TRUE);
+
+ report->update_freq = DEFAULT_UPDATE_FREQ;
+ report->silent = DEFAULT_SILENT;
+ report->do_query = DEFAULT_DO_QUERY;
+ report->format = g_strdup (DEFAULT_FORMAT);
+}
+
+static void
+gst_progress_report_post_progress (GstProgressReport * filter,
+ GstFormat format, gint64 current, gint64 total)
+{
+ GstStructure *s = NULL;
+
+ if (current >= 0 && total > 0) {
+ gdouble perc;
+
+ perc = gst_util_guint64_to_gdouble (current) * 100.0 /
+ gst_util_guint64_to_gdouble (total);
+ perc = CLAMP (perc, 0.0, 100.0);
+
+ /* we provide a "percent" field of integer type to stay compatible
+ * with qtdemux, but add a second "percent-double" field for those who
+ * want more precision and are too lazy to calculate it themselves */
+ s = gst_structure_new ("progress", "percent", G_TYPE_INT, (gint) perc,
+ "percent-double", G_TYPE_DOUBLE, perc, "current", G_TYPE_INT64, current,
+ "total", G_TYPE_INT64, total, NULL);
+ } else if (current >= 0) {
+ s = gst_structure_new ("progress", "current", G_TYPE_INT64, current, NULL);
+ }
+
+ if (s) {
+ GST_LOG_OBJECT (filter, "posting progress message: %" GST_PTR_FORMAT, s);
+ gst_structure_set (s, "format", GST_TYPE_FORMAT, format, NULL);
+ /* can't post it right here because we're holding the object lock */
+ filter->pending_msg = gst_message_new_element (GST_OBJECT_CAST (filter), s);
+ }
+}
+
+static gboolean
+gst_progress_report_do_query (GstProgressReport * filter, GstFormat format,
+ gint hh, gint mm, gint ss, GstBuffer * buf)
+{
+ const gchar *format_name = NULL;
+ GstPad *sink_pad;
+ gint64 cur, total;
+
+ sink_pad = GST_BASE_TRANSFORM (filter)->sinkpad;
+
+ GST_LOG_OBJECT (filter, "querying using format %d (%s)", format,
+ gst_format_get_name (format));
+
+ if (filter->do_query || !buf) {
+ GST_LOG_OBJECT (filter, "using upstream query");
+ if (!gst_pad_peer_query_position (sink_pad, format, &cur) ||
+ !gst_pad_peer_query_duration (sink_pad, format, &total)) {
+ return FALSE;
+ }
+ } else {
+ GstBaseTransform *base = GST_BASE_TRANSFORM (filter);
+
+ GST_LOG_OBJECT (filter, "using buffer metadata");
+ if (format == GST_FORMAT_TIME && base->segment.format == GST_FORMAT_TIME) {
+ cur = gst_segment_to_stream_time (&base->segment, format,
+ GST_BUFFER_TIMESTAMP (buf));
+ total = base->segment.duration;
+ } else if (format == GST_FORMAT_BUFFERS) {
+ cur = filter->buffer_count;
+ total = -1;
+ } else {
+ return FALSE;
+ }
+ }
+
+ switch (format) {
+ case GST_FORMAT_BYTES:
+ format_name = "bytes";
+ break;
+ case GST_FORMAT_BUFFERS:
+ format_name = "buffers";
+ break;
+ case GST_FORMAT_PERCENT:
+ format_name = "percent";
+ break;
+ case GST_FORMAT_TIME:
+ format_name = "seconds";
+ cur /= GST_SECOND;
+ total /= GST_SECOND;
+ break;
+ case GST_FORMAT_DEFAULT:{
+ GstCaps *caps;
+
+ format_name = "bogounits";
+ caps = gst_pad_get_current_caps (GST_BASE_TRANSFORM (filter)->sinkpad);
+ if (caps) {
+ if (gst_caps_is_fixed (caps) && !gst_caps_is_any (caps)) {
+ GstStructure *s = gst_caps_get_structure (caps, 0);
+ const gchar *mime_type = gst_structure_get_name (s);
+
+ if (g_str_has_prefix (mime_type, "video/") ||
+ g_str_has_prefix (mime_type, "image/")) {
+ format_name = "frames";
+ } else if (g_str_has_prefix (mime_type, "audio/")) {
+ format_name = "samples";
+ }
+ }
+ gst_caps_unref (caps);
+ }
+ break;
+ }
+ default:{
+ const GstFormatDefinition *details;
+
+ details = gst_format_get_details (format);
+ if (details) {
+ format_name = details->nick;
+ } else {
+ format_name = "unknown";
+ }
+ break;
+ }
+ }
+
+ if (!filter->silent) {
+ if (total > 0) {
+ g_print ("%s (%02d:%02d:%02d): %" G_GINT64_FORMAT " / %"
+ G_GINT64_FORMAT " %s (%4.1f %%)\n", GST_OBJECT_NAME (filter), hh,
+ mm, ss, cur, total, format_name, (gdouble) cur / total * 100.0);
+ } else {
+ g_print ("%s (%02d:%02d:%02d): %" G_GINT64_FORMAT " %s\n",
+ GST_OBJECT_NAME (filter), hh, mm, ss, cur, format_name);
+ }
+ }
+
+ gst_progress_report_post_progress (filter, format, cur, total);
+ return TRUE;
+}
+
+static void
+gst_progress_report_report (GstProgressReport * filter, gint64 cur_time_s,
+ GstBuffer * buf)
+{
+ GstFormat try_formats[] = { GST_FORMAT_TIME, GST_FORMAT_BYTES,
+ GST_FORMAT_PERCENT, GST_FORMAT_BUFFERS,
+ GST_FORMAT_DEFAULT
+ };
+ GstMessage *msg;
+ GstFormat format = GST_FORMAT_UNDEFINED;
+ gboolean done = FALSE;
+ glong run_time;
+ gint hh, mm, ss;
+
+ run_time = cur_time_s - filter->start_time_s;
+
+ hh = (run_time / 3600) % 100;
+ mm = (run_time / 60) % 60;
+ ss = (run_time % 60);
+
+ GST_OBJECT_LOCK (filter);
+
+ if (filter->format != NULL && strcmp (filter->format, "auto") != 0) {
+ format = gst_format_get_by_nick (filter->format);
+ }
+
+ if (format != GST_FORMAT_UNDEFINED) {
+ done = gst_progress_report_do_query (filter, format, hh, mm, ss, buf);
+ } else {
+ gint i;
+
+ for (i = 0; i < G_N_ELEMENTS (try_formats); ++i) {
+ done = gst_progress_report_do_query (filter, try_formats[i], hh, mm, ss,
+ buf);
+ if (done)
+ break;
+ }
+ }
+
+ if (!done && !filter->silent) {
+ g_print ("%s (%2d:%2d:%2d): Could not query position and/or duration\n",
+ GST_OBJECT_NAME (filter), hh, mm, ss);
+ }
+
+ msg = filter->pending_msg;
+ filter->pending_msg = NULL;
+ GST_OBJECT_UNLOCK (filter);
+
+ if (msg) {
+ gst_element_post_message (GST_ELEMENT_CAST (filter), msg);
+ }
+}
+
+static gboolean
+gst_progress_report_sink_event (GstBaseTransform * trans, GstEvent * event)
+{
+ GstProgressReport *filter;
+
+ filter = GST_PROGRESS_REPORT (trans);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ {
+ gint64 cur_time_s = g_get_real_time () / G_USEC_PER_SEC;
+
+ gst_progress_report_report (filter, cur_time_s, NULL);
+ break;
+ }
+ default:
+ break;
+ }
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, event);
+}
+
+static GstFlowReturn
+gst_progress_report_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
+{
+ GstProgressReport *filter;
+ gboolean need_update;
+ gint64 cur_time;
+
+ cur_time = g_get_real_time () / G_USEC_PER_SEC;
+
+ filter = GST_PROGRESS_REPORT (trans);
+
+ /* Check if update_freq seconds have passed since the last update */
+ GST_OBJECT_LOCK (filter);
+ need_update = (cur_time - filter->last_report_s) >= filter->update_freq;
+ filter->buffer_count++;
+ GST_OBJECT_UNLOCK (filter);
+
+ if (need_update) {
+ gst_progress_report_report (filter, cur_time, buf);
+ GST_OBJECT_LOCK (filter);
+ filter->last_report_s = cur_time;
+ GST_OBJECT_UNLOCK (filter);
+ }
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_progress_report_start (GstBaseTransform * trans)
+{
+ GstProgressReport *filter;
+
+ filter = GST_PROGRESS_REPORT (trans);
+
+ filter->start_time_s = filter->last_report_s =
+ g_get_real_time () / G_USEC_PER_SEC;
+ filter->buffer_count = 0;
+
+ return TRUE;
+}
+
+static gboolean
+gst_progress_report_stop (GstBaseTransform * trans)
+{
+ /* anything we should be doing here? */
+ return TRUE;
+}
+
+static void
+gst_progress_report_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstProgressReport *filter;
+
+ filter = GST_PROGRESS_REPORT (object);
+
+ switch (prop_id) {
+ case PROP_UPDATE_FREQ:
+ GST_OBJECT_LOCK (filter);
+ filter->update_freq = g_value_get_int (value);
+ GST_OBJECT_UNLOCK (filter);
+ break;
+ case PROP_SILENT:
+ GST_OBJECT_LOCK (filter);
+ filter->silent = g_value_get_boolean (value);
+ GST_OBJECT_UNLOCK (filter);
+ break;
+ case PROP_DO_QUERY:
+ GST_OBJECT_LOCK (filter);
+ filter->do_query = g_value_get_boolean (value);
+ GST_OBJECT_UNLOCK (filter);
+ break;
+ case PROP_FORMAT:
+ GST_OBJECT_LOCK (filter);
+ g_free (filter->format);
+ filter->format = g_value_dup_string (value);
+ if (filter->format == NULL)
+ filter->format = g_strdup ("auto");
+ GST_OBJECT_UNLOCK (filter);
+ break;
+ default:
+ break;
+ }
+}
+
+static void
+gst_progress_report_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstProgressReport *filter;
+
+ filter = GST_PROGRESS_REPORT (object);
+
+ switch (prop_id) {
+ case PROP_UPDATE_FREQ:
+ GST_OBJECT_LOCK (filter);
+ g_value_set_int (value, filter->update_freq);
+ GST_OBJECT_UNLOCK (filter);
+ break;
+ case PROP_SILENT:
+ GST_OBJECT_LOCK (filter);
+ g_value_set_boolean (value, filter->silent);
+ GST_OBJECT_UNLOCK (filter);
+ break;
+ case PROP_DO_QUERY:
+ GST_OBJECT_LOCK (filter);
+ g_value_set_boolean (value, filter->do_query);
+ GST_OBJECT_UNLOCK (filter);
+ break;
+ case PROP_FORMAT:
+ GST_OBJECT_LOCK (filter);
+ g_value_set_string (value, filter->format);
+ GST_OBJECT_UNLOCK (filter);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/debugutils/progressreport.h b/gst/debugutils/progressreport.h
new file mode 100644
index 0000000000..904ba38699
--- /dev/null
+++ b/gst/debugutils/progressreport.h
@@ -0,0 +1,68 @@
+/* GStreamer Progress Report Element
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David Schleef <ds@schleef.org>
+ * Copyright (C) <2004> Jan Schmidt <thaytan@mad.scientist.com>
+ * Copyright (C) <2006> Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_PROGRESS_REPORT_H__
+#define __GST_PROGRESS_REPORT_H__
+
+#include <gst/base/gstbasetransform.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_PROGRESS_REPORT \
+ (gst_progress_report_get_type())
+#define GST_PROGRESS_REPORT(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_PROGRESS_REPORT,GstProgressReport))
+#define GST_PROGRESS_REPORT_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_PROGRESS_REPORT,GstProgressReportClass))
+#define GST_IS_PROGRESS_REPORT(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_PROGRESS_REPORT))
+#define GST_IS_PROGRESS_REPORT_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_PROGRESS_REPORT))
+typedef struct _GstProgressReport GstProgressReport;
+typedef struct _GstProgressReportClass GstProgressReportClass;
+
+struct _GstProgressReport
+{
+ GstBaseTransform basetransform;
+
+ GstMessage *pending_msg;
+
+ gint update_freq;
+ gboolean silent;
+ gboolean do_query;
+ gint64 start_time_s;
+ gint64 last_report_s;
+ gint64 buffer_count;
+
+ /* Format used for querying. Using a string here because the
+ * format might not be registered yet when the property is set */
+ gchar *format;
+};
+
+struct _GstProgressReportClass
+{
+ GstBaseTransformClass parent_class;
+};
+
+GType gst_progress_report_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_PROGRESS_REPORT_H__ */
diff --git a/gst/debugutils/rndbuffersize.c b/gst/debugutils/rndbuffersize.c
new file mode 100644
index 0000000000..1fee416fbb
--- /dev/null
+++ b/gst/debugutils/rndbuffersize.c
@@ -0,0 +1,581 @@
+/* GStreamer
+ * Copyright (C) 2007 Nokia Corporation (contact <stefan.kost@nokia.com>)
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-rndbuffersize
+ * @title: rndbuffersize
+ *
+ * This element pulls buffers with random sizes from the source.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+
+#include "gstdebugutilselements.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_rnd_buffer_size_debug);
+#define GST_CAT_DEFAULT gst_rnd_buffer_size_debug
+
+#define GST_TYPE_RND_BUFFER_SIZE (gst_rnd_buffer_size_get_type())
+#define GST_RND_BUFFER_SIZE(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RND_BUFFER_SIZE,GstRndBufferSize))
+#define GST_RND_BUFFER_SIZE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RND_BUFFER_SIZE,GstRndBufferSizeClass))
+#define GST_IS_RND_BUFFER_SIZE(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RND_BUFFER_SIZE))
+#define GST_IS_RND_BUFFER_SIZE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RND_BUFFER_SIZE))
+
+typedef struct _GstRndBufferSize GstRndBufferSize;
+typedef struct _GstRndBufferSizeClass GstRndBufferSizeClass;
+
+struct _GstRndBufferSize
+{
+ GstElement parent;
+
+ /*< private > */
+ GRand *rand;
+ guint seed;
+ gint min, max;
+
+ GstPad *sinkpad, *srcpad;
+ guint64 offset;
+
+ gboolean need_newsegment;
+
+ GstAdapter *adapter;
+};
+
+struct _GstRndBufferSizeClass
+{
+ GstElementClass parent_class;
+};
+
+enum
+{
+ PROP_SEED = 1,
+ PROP_MINIMUM,
+ PROP_MAXIMUM
+};
+
+#define DEFAULT_SEED 0
+#define DEFAULT_MIN 1
+#define DEFAULT_MAX (8*1024)
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+static void gst_rnd_buffer_size_finalize (GObject * object);
+static void gst_rnd_buffer_size_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rnd_buffer_size_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_rnd_buffer_size_activate (GstPad * pad, GstObject * parent);
+static gboolean gst_rnd_buffer_size_activate_mode (GstPad * pad,
+ GstObject * parent, GstPadMode mode, gboolean active);
+static void gst_rnd_buffer_size_loop (GstRndBufferSize * self);
+static GstStateChangeReturn gst_rnd_buffer_size_change_state (GstElement *
+ element, GstStateChange transition);
+static gboolean gst_rnd_buffer_size_src_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+static gboolean gst_rnd_buffer_size_sink_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+static GstFlowReturn gst_rnd_buffer_size_chain (GstPad * pad,
+ GstObject * parent, GstBuffer * buffer);
+
+GType gst_rnd_buffer_size_get_type (void);
+#define gst_rnd_buffer_size_parent_class parent_class
+G_DEFINE_TYPE (GstRndBufferSize, gst_rnd_buffer_size, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE (rndbuffersize, "rndbuffersize",
+ GST_RANK_NONE, gst_rnd_buffer_size_get_type ());
+
+static void
+gst_rnd_buffer_size_class_init (GstRndBufferSizeClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (gst_rnd_buffer_size_debug, "rndbuffersize", 0,
+ "rndbuffersize element");
+
+ gobject_class->set_property = gst_rnd_buffer_size_set_property;
+ gobject_class->get_property = gst_rnd_buffer_size_get_property;
+ gobject_class->finalize = gst_rnd_buffer_size_finalize;
+
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class, &src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "Random buffer size",
+ "Testing", "pull random sized buffers",
+ "Stefan Kost <stefan.kost@nokia.com>");
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_rnd_buffer_size_change_state);
+
+ g_object_class_install_property (gobject_class, PROP_SEED,
+ g_param_spec_uint ("seed", "random number seed",
+ "seed for randomness (initialized when going from READY to PAUSED)",
+ 0, G_MAXUINT32, DEFAULT_SEED,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MINIMUM,
+ g_param_spec_int ("min", "minimum", "minimum buffer size",
+ 0, G_MAXINT32, DEFAULT_MIN,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MAXIMUM,
+ g_param_spec_int ("max", "maximum", "maximum buffer size",
+ 1, G_MAXINT32, DEFAULT_MAX,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+}
+
+static void
+gst_rnd_buffer_size_init (GstRndBufferSize * self)
+{
+ self->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink");
+ gst_pad_set_activate_function (self->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rnd_buffer_size_activate));
+ gst_pad_set_activatemode_function (self->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rnd_buffer_size_activate_mode));
+ gst_pad_set_event_function (self->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rnd_buffer_size_sink_event));
+ gst_pad_set_chain_function (self->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rnd_buffer_size_chain));
+ GST_OBJECT_FLAG_SET (self->sinkpad, GST_PAD_FLAG_PROXY_CAPS);
+ GST_OBJECT_FLAG_SET (self->sinkpad, GST_PAD_FLAG_PROXY_ALLOCATION);
+ GST_OBJECT_FLAG_SET (self->sinkpad, GST_PAD_FLAG_PROXY_SCHEDULING);
+ gst_element_add_pad (GST_ELEMENT (self), self->sinkpad);
+
+ self->srcpad = gst_pad_new_from_static_template (&src_template, "src");
+ gst_pad_set_event_function (self->srcpad,
+ GST_DEBUG_FUNCPTR (gst_rnd_buffer_size_src_event));
+ GST_OBJECT_FLAG_SET (self->srcpad, GST_PAD_FLAG_PROXY_CAPS);
+ GST_OBJECT_FLAG_SET (self->srcpad, GST_PAD_FLAG_PROXY_ALLOCATION);
+ GST_OBJECT_FLAG_SET (self->srcpad, GST_PAD_FLAG_PROXY_SCHEDULING);
+ gst_element_add_pad (GST_ELEMENT (self), self->srcpad);
+}
+
+
+static void
+gst_rnd_buffer_size_finalize (GObject * object)
+{
+ GstRndBufferSize *self = GST_RND_BUFFER_SIZE (object);
+
+ if (self->rand) {
+ g_rand_free (self->rand);
+ self->rand = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+
+static void
+gst_rnd_buffer_size_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRndBufferSize *self = GST_RND_BUFFER_SIZE (object);
+
+ switch (prop_id) {
+ case PROP_SEED:
+ self->seed = g_value_get_uint (value);
+ break;
+ case PROP_MINIMUM:
+ self->min = g_value_get_int (value);
+ break;
+ case PROP_MAXIMUM:
+ self->max = g_value_get_int (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+
+static void
+gst_rnd_buffer_size_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRndBufferSize *self = GST_RND_BUFFER_SIZE (object);
+
+ switch (prop_id) {
+ case PROP_SEED:
+ g_value_set_uint (value, self->seed);
+ break;
+ case PROP_MINIMUM:
+ g_value_set_int (value, self->min);
+ break;
+ case PROP_MAXIMUM:
+ g_value_set_int (value, self->max);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+
+static gboolean
+gst_rnd_buffer_size_activate (GstPad * pad, GstObject * parent)
+{
+ GstQuery *query;
+ gboolean pull_mode;
+
+ query = gst_query_new_scheduling ();
+
+ if (gst_pad_peer_query (pad, query))
+ pull_mode = gst_query_has_scheduling_mode_with_flags (query,
+ GST_PAD_MODE_PULL, GST_SCHEDULING_FLAG_SEEKABLE);
+ else
+ pull_mode = FALSE;
+
+ gst_query_unref (query);
+
+ if (pull_mode) {
+ GST_DEBUG_OBJECT (pad, "activating pull");
+ return gst_pad_activate_mode (pad, GST_PAD_MODE_PULL, TRUE);
+ } else {
+ GST_DEBUG_OBJECT (pad, "activating push");
+ return gst_pad_activate_mode (pad, GST_PAD_MODE_PUSH, TRUE);
+ }
+}
+
+
+static gboolean
+gst_rnd_buffer_size_activate_mode (GstPad * pad, GstObject * parent,
+ GstPadMode mode, gboolean active)
+{
+ gboolean res;
+ GstRndBufferSize *self = GST_RND_BUFFER_SIZE (parent);
+
+ switch (mode) {
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ GST_INFO_OBJECT (self, "starting pull");
+ res =
+ gst_pad_start_task (pad, (GstTaskFunction) gst_rnd_buffer_size_loop,
+ self, NULL);
+ self->need_newsegment = TRUE;
+ } else {
+ GST_INFO_OBJECT (self, "stopping pull");
+ res = gst_pad_stop_task (pad);
+ }
+ break;
+ case GST_PAD_MODE_PUSH:
+ GST_INFO_OBJECT (self, "%sactivating in push mode", (active) ? "" : "de");
+ res = TRUE;
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ return res;
+}
+
+static gboolean
+gst_rnd_buffer_size_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstRndBufferSize *self;
+ GstSeekType start_type;
+ GstSeekFlags flags;
+ GstFormat format;
+ gint64 start;
+
+ if (GST_EVENT_TYPE (event) != GST_EVENT_SEEK) {
+ return gst_pad_event_default (pad, parent, event);
+ }
+
+ self = GST_RND_BUFFER_SIZE (parent);
+ gst_event_parse_seek (event, NULL, &format, &flags, &start_type, &start,
+ NULL, NULL);
+
+ if (format != GST_FORMAT_BYTES) {
+ GST_WARNING_OBJECT (pad, "only BYTE format supported");
+ return FALSE;
+ }
+ if (start_type != GST_SEEK_TYPE_SET) {
+ GST_WARNING_OBJECT (pad, "only SEEK_TYPE_SET supported");
+ return FALSE;
+ }
+
+ if ((flags & GST_SEEK_FLAG_FLUSH)) {
+ gst_pad_push_event (self->srcpad, gst_event_new_flush_start ());
+ gst_pad_push_event (self->sinkpad, gst_event_new_flush_start ());
+ } else {
+ gst_pad_pause_task (self->sinkpad);
+ }
+
+ GST_PAD_STREAM_LOCK (self->sinkpad);
+
+ if ((flags & GST_SEEK_FLAG_FLUSH)) {
+ gst_pad_push_event (self->srcpad, gst_event_new_flush_stop (TRUE));
+ gst_pad_push_event (self->sinkpad, gst_event_new_flush_stop (TRUE));
+ }
+
+ GST_INFO_OBJECT (pad, "seeking to offset %" G_GINT64_FORMAT, start);
+
+ self->offset = start;
+ self->need_newsegment = TRUE;
+
+ gst_pad_start_task (self->sinkpad, (GstTaskFunction) gst_rnd_buffer_size_loop,
+ self, NULL);
+
+ GST_PAD_STREAM_UNLOCK (self->sinkpad);
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_rnd_buffer_size_drain_adapter (GstRndBufferSize * self, gboolean eos)
+{
+ GstFlowReturn flow;
+ GstBuffer *buf;
+ guint num_bytes, avail;
+
+ flow = GST_FLOW_OK;
+
+ if (G_UNLIKELY (self->min > self->max))
+ goto bogus_minmax;
+
+ do {
+ if (self->min != self->max) {
+ num_bytes = g_rand_int_range (self->rand, self->min, self->max);
+ } else {
+ num_bytes = self->min;
+ }
+
+ GST_LOG_OBJECT (self, "pulling %u bytes out of adapter", num_bytes);
+
+ buf = gst_adapter_take_buffer (self->adapter, num_bytes);
+
+ if (buf == NULL) {
+ if (!eos) {
+ GST_LOG_OBJECT (self, "not enough bytes in adapter");
+ break;
+ }
+
+ avail = gst_adapter_available (self->adapter);
+
+ if (avail == 0)
+ break;
+
+ if (avail < self->min) {
+ GST_WARNING_OBJECT (self, "discarding %u bytes at end (min=%u)",
+ avail, self->min);
+ gst_adapter_clear (self->adapter);
+ break;
+ }
+ buf = gst_adapter_take_buffer (self->adapter, avail);
+ g_assert (buf != NULL);
+ }
+
+ flow = gst_pad_push (self->srcpad, buf);
+ }
+ while (flow == GST_FLOW_OK);
+
+ return flow;
+
+/* ERRORS */
+bogus_minmax:
+ {
+ GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS,
+ ("The minimum buffer size is smaller than the maximum buffer size."),
+ ("buffer sizes: max=%d, min=%d", self->min, self->max));
+ return GST_FLOW_ERROR;
+ }
+}
+
+static gboolean
+gst_rnd_buffer_size_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstRndBufferSize *rnd = GST_RND_BUFFER_SIZE (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ gst_rnd_buffer_size_drain_adapter (rnd, TRUE);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ if (rnd->adapter != NULL)
+ gst_adapter_clear (rnd->adapter);
+ break;
+ default:
+ break;
+ }
+
+ return gst_pad_event_default (pad, parent, event);
+}
+
+static GstFlowReturn
+gst_rnd_buffer_size_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+{
+ GstRndBufferSize *rnd = GST_RND_BUFFER_SIZE (parent);
+ GstFlowReturn flow;
+
+ if (rnd->adapter == NULL)
+ rnd->adapter = gst_adapter_new ();
+
+ gst_adapter_push (rnd->adapter, buf);
+
+ flow = gst_rnd_buffer_size_drain_adapter (rnd, FALSE);
+
+ if (flow != GST_FLOW_OK)
+ GST_INFO_OBJECT (rnd, "flow: %s", gst_flow_get_name (flow));
+
+ return flow;
+}
+
+static void
+gst_rnd_buffer_size_loop (GstRndBufferSize * self)
+{
+ GstBuffer *buf = NULL;
+ GstFlowReturn ret;
+ guint num_bytes, size;
+
+ if (G_UNLIKELY (self->min > self->max))
+ goto bogus_minmax;
+
+ if (G_UNLIKELY (self->min != self->max)) {
+ num_bytes = g_rand_int_range (self->rand, self->min, self->max);
+ } else {
+ num_bytes = self->min;
+ }
+
+ GST_LOG_OBJECT (self, "pulling %u bytes at offset %" G_GUINT64_FORMAT,
+ num_bytes, self->offset);
+
+ ret = gst_pad_pull_range (self->sinkpad, self->offset, num_bytes, &buf);
+
+ if (ret != GST_FLOW_OK)
+ goto pull_failed;
+
+ size = gst_buffer_get_size (buf);
+
+ if (size < num_bytes) {
+ GST_WARNING_OBJECT (self, "short buffer: %u bytes", size);
+ }
+
+ if (self->need_newsegment) {
+ GstSegment segment;
+
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ segment.start = self->offset;
+ gst_pad_push_event (self->srcpad, gst_event_new_segment (&segment));
+ self->need_newsegment = FALSE;
+ }
+
+ self->offset += size;
+
+ ret = gst_pad_push (self->srcpad, buf);
+
+ if (ret != GST_FLOW_OK)
+ goto push_failed;
+
+ return;
+
+pause_task:
+ {
+ GST_DEBUG_OBJECT (self, "pausing task");
+ gst_pad_pause_task (self->sinkpad);
+ return;
+ }
+
+pull_failed:
+ {
+ if (ret == GST_FLOW_EOS) {
+ GST_DEBUG_OBJECT (self, "eos");
+ gst_pad_push_event (self->srcpad, gst_event_new_eos ());
+ } else {
+ GST_WARNING_OBJECT (self, "pull_range flow: %s", gst_flow_get_name (ret));
+ }
+ goto pause_task;
+ }
+
+push_failed:
+ {
+ GST_DEBUG_OBJECT (self, "push flow: %s", gst_flow_get_name (ret));
+ if (ret == GST_FLOW_EOS) {
+ GST_DEBUG_OBJECT (self, "eos");
+ gst_pad_push_event (self->srcpad, gst_event_new_eos ());
+ } else if (ret < GST_FLOW_EOS || ret == GST_FLOW_NOT_LINKED) {
+ GST_ELEMENT_FLOW_ERROR (self, ret);
+ }
+ goto pause_task;
+ }
+
+bogus_minmax:
+ {
+ GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS,
+ ("The minimum buffer size is smaller than the maximum buffer size."),
+ ("buffer sizes: max=%d, min=%d", self->min, self->max));
+ goto pause_task;
+ }
+}
+
+static GstStateChangeReturn
+gst_rnd_buffer_size_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstRndBufferSize *self = GST_RND_BUFFER_SIZE (element);
+ GstStateChangeReturn ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ self->offset = 0;
+ if (!self->rand) {
+ self->rand = g_rand_new_with_seed (self->seed);
+ }
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ if (self->rand) {
+ g_rand_free (self->rand);
+ self->rand = NULL;
+ }
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ if (self->adapter) {
+ g_object_unref (self->adapter);
+ self->adapter = NULL;
+ }
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
diff --git a/gst/debugutils/testplugin.c b/gst/debugutils/testplugin.c
new file mode 100644
index 0000000000..8fdb4f9aa4
--- /dev/null
+++ b/gst/debugutils/testplugin.c
@@ -0,0 +1,306 @@
+/* GStreamer
+ * Copyright (C) 2004 Benjamin Otte <otte@gnome.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/base/gstbasesink.h>
+#include "gstdebugutilselements.h"
+#include "tests.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_test_debug);
+#define GST_CAT_DEFAULT gst_test_debug
+
+/* This plugin does all the tests registered in the tests.h file
+ */
+
+#define GST_TYPE_TEST \
+ (gst_test_get_type())
+#define GST_TEST(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_TEST,GstTest))
+#define GST_TEST_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_TEST,GstTestClass))
+#define GST_TEST_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS ((obj),GST_TYPE_TEST,GstTestClass))
+#define GST_IS_TEST(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_TEST))
+#define GST_IS_TEST_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_TEST))
+
+typedef struct _GstTest GstTest;
+typedef struct _GstTestClass GstTestClass;
+
+struct _GstTest
+{
+ GstBaseSink basesink;
+
+ gpointer tests[TESTS_COUNT];
+ GValue values[TESTS_COUNT];
+};
+
+struct _GstTestClass
+{
+ GstBaseSinkClass parent_class;
+
+ gchar *param_names[2 * TESTS_COUNT];
+};
+
+static void gst_test_finalize (GstTest * test);
+
+static gboolean gst_test_start (GstBaseSink * trans);
+static gboolean gst_test_stop (GstBaseSink * trans);
+static gboolean gst_test_sink_event (GstBaseSink * basesink, GstEvent * event);
+static GstFlowReturn gst_test_render_buffer (GstBaseSink * basesink,
+ GstBuffer * buf);
+
+static void gst_test_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_test_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+GType gst_test_get_type (void);
+#define gst_test_parent_class parent_class
+G_DEFINE_TYPE (GstTest, gst_test, GST_TYPE_BASE_SINK);
+GST_ELEMENT_REGISTER_DEFINE (testsink, "testsink",
+ GST_RANK_NONE, gst_test_get_type ());
+
+static void
+gst_test_class_init (GstTestClass * klass)
+{
+ GstBaseSinkClass *basesink_class = GST_BASE_SINK_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+ guint i;
+
+ GST_DEBUG_CATEGORY_INIT (gst_test_debug, "testsink", 0,
+ "debugging category for testsink element");
+
+ object_class->set_property = gst_test_set_property;
+ object_class->get_property = gst_test_get_property;
+
+ object_class->finalize = (GObjectFinalizeFunc) gst_test_finalize;
+
+ for (i = 0; i < TESTS_COUNT; i++) {
+ GParamSpec *spec;
+
+ spec = tests[i].get_spec (&tests[i], FALSE);
+ klass->param_names[2 * i] = g_strdup (g_param_spec_get_name (spec));
+ g_object_class_install_property (object_class, 2 * i + 1, spec);
+ spec = tests[i].get_spec (&tests[i], TRUE);
+ klass->param_names[2 * i + 1] = g_strdup (g_param_spec_get_name (spec));
+ g_object_class_install_property (object_class, 2 * i + 2, spec);
+ }
+
+ gst_element_class_add_static_pad_template (gstelement_class, &sinktemplate);
+
+ gst_element_class_set_static_metadata (gstelement_class, "Test plugin",
+ "Testing", "perform a number of tests", "Benjamin Otte <otte@gnome>");
+
+ basesink_class->render = GST_DEBUG_FUNCPTR (gst_test_render_buffer);
+ basesink_class->event = GST_DEBUG_FUNCPTR (gst_test_sink_event);
+ basesink_class->start = GST_DEBUG_FUNCPTR (gst_test_start);
+ basesink_class->stop = GST_DEBUG_FUNCPTR (gst_test_stop);
+}
+
+static void
+gst_test_init (GstTest * test)
+{
+ GstTestClass *klass;
+ guint i;
+
+ klass = GST_TEST_GET_CLASS (test);
+ for (i = 0; i < TESTS_COUNT; i++) {
+ GParamSpec *spec = g_object_class_find_property (G_OBJECT_CLASS (klass),
+ klass->param_names[2 * i + 1]);
+
+ g_value_init (&test->values[i], G_PARAM_SPEC_VALUE_TYPE (spec));
+ }
+}
+
+static void
+gst_test_finalize (GstTest * test)
+{
+ guint i;
+
+ for (i = 0; i < TESTS_COUNT; i++) {
+ g_value_unset (&test->values[i]);
+ }
+
+ G_OBJECT_CLASS (parent_class)->finalize ((GObject *) test);
+}
+
+static void
+tests_unset (GstTest * test)
+{
+ guint i;
+
+ for (i = 0; i < TESTS_COUNT; i++) {
+ if (test->tests[i]) {
+ tests[i].free (test->tests[i]);
+ test->tests[i] = NULL;
+ }
+ }
+}
+
+static void
+tests_set (GstTest * test)
+{
+ guint i;
+
+ for (i = 0; i < TESTS_COUNT; i++) {
+ g_assert (test->tests[i] == NULL);
+ test->tests[i] = tests[i].new (&tests[i]);
+ }
+}
+
+static gboolean
+gst_test_sink_event (GstBaseSink * basesink, GstEvent * event)
+{
+ GstTestClass *klass = GST_TEST_GET_CLASS (basesink);
+ GstTest *test = GST_TEST (basesink);
+
+ switch (GST_EVENT_TYPE (event)) {
+/*
+ case GST_EVENT_NEWSEGMENT:
+ if (GST_EVENT_DISCONT_NEW_MEDIA (event)) {
+ tests_unset (test);
+ tests_set (test);
+ }
+ break;
+*/
+ case GST_EVENT_EOS:{
+ gint i;
+
+ g_object_freeze_notify (G_OBJECT (test));
+ for (i = 0; i < TESTS_COUNT; i++) {
+ if (test->tests[i]) {
+ if (!tests[i].finish (test->tests[i], &test->values[i])) {
+ GValue v = { 0, };
+ gchar *real, *expected;
+
+ expected = gst_value_serialize (&test->values[i]);
+ g_value_init (&v, G_VALUE_TYPE (&test->values[i]));
+ g_object_get_property (G_OBJECT (test), klass->param_names[2 * i],
+ &v);
+ real = gst_value_serialize (&v);
+ g_value_unset (&v);
+ GST_ELEMENT_ERROR (test, STREAM, FORMAT, (NULL),
+ ("test %s returned value \"%s\" and not expected value \"%s\"",
+ klass->param_names[2 * i], real, expected));
+ g_free (real);
+ g_free (expected);
+ }
+ g_object_notify (G_OBJECT (test), klass->param_names[2 * i]);
+ }
+ }
+ g_object_thaw_notify (G_OBJECT (test));
+ break;
+ }
+ default:
+ break;
+ }
+
+ return GST_BASE_SINK_CLASS (parent_class)->event (basesink, event);
+}
+
+static GstFlowReturn
+gst_test_render_buffer (GstBaseSink * basesink, GstBuffer * buf)
+{
+ GstTest *test = GST_TEST (basesink);
+ guint i;
+
+ for (i = 0; i < TESTS_COUNT; i++) {
+ if (test->tests[i]) {
+ tests[i].add (test->tests[i], buf);
+ }
+ }
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_test_start (GstBaseSink * sink)
+{
+ GstTest *test = GST_TEST (sink);
+
+ tests_set (test);
+ return TRUE;
+}
+
+static gboolean
+gst_test_stop (GstBaseSink * sink)
+{
+ GstTest *test = GST_TEST (sink);
+
+ tests_unset (test);
+ return TRUE;
+}
+
+static void
+gst_test_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstTest *test = GST_TEST (object);
+
+ if (prop_id == 0 || prop_id > 2 * TESTS_COUNT) {
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ return;
+ }
+
+ if (prop_id % 2) {
+ /* real values can't be set */
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ } else {
+ /* expected values */
+ GST_OBJECT_LOCK (test);
+ g_value_copy (value, &test->values[prop_id / 2 - 1]);
+ GST_OBJECT_UNLOCK (test);
+ }
+}
+
+static void
+gst_test_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstTest *test = GST_TEST (object);
+ guint id = (prop_id - 1) / 2;
+
+ if (prop_id == 0 || prop_id > 2 * TESTS_COUNT) {
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ return;
+ }
+
+ GST_OBJECT_LOCK (test);
+
+ if (prop_id % 2) {
+ /* real values */
+ tests[id].get_value (test->tests[id], value);
+ } else {
+ /* expected values */
+ g_value_copy (&test->values[id], value);
+ }
+
+ GST_OBJECT_UNLOCK (test);
+}
diff --git a/gst/debugutils/tests.c b/gst/debugutils/tests.c
new file mode 100644
index 0000000000..9ca2af90c9
--- /dev/null
+++ b/gst/debugutils/tests.c
@@ -0,0 +1,269 @@
+/* GStreamer
+ * Copyright (C) 2004 Benjamin Otte <otte@gnome.org>
+ *
+ * includes code based on glibc 2.2.3's crypt/md5.c,
+ * Copyright (C) 1995, 1996, 1997, 1999, 2000 Free Software Foundation, Inc.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "tests.h"
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+
+/*
+ *** LENGTH ***
+ */
+
+typedef struct
+{
+ gint64 value;
+}
+LengthTest;
+
+static GParamSpec *
+length_get_spec (const GstTestInfo * info, gboolean compare_value)
+{
+ if (compare_value) {
+ return g_param_spec_int64 ("expected-length", "expected length",
+ "expected length of stream", -1, G_MAXINT64, -1,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT);
+ } else {
+ return g_param_spec_int64 ("length", "length", "length of stream",
+ -1, G_MAXINT64, -1, G_PARAM_READABLE);
+ }
+}
+
+static gpointer
+length_new (const GstTestInfo * info)
+{
+ return g_new0 (LengthTest, 1);
+}
+
+static void
+length_add (gpointer test, GstBuffer * buffer)
+{
+ LengthTest *t = test;
+
+ t->value += gst_buffer_get_size (buffer);
+}
+
+static gboolean
+length_finish (gpointer test, GValue * value)
+{
+ LengthTest *t = test;
+
+ if (g_value_get_int64 (value) == -1)
+ return TRUE;
+
+ return t->value == g_value_get_int64 (value);
+}
+
+static void
+length_get_value (gpointer test, GValue * value)
+{
+ LengthTest *t = test;
+
+ g_value_set_int64 (value, t ? t->value : -1);
+}
+
+/*
+ *** BUFFER COUNT ***
+ */
+
+static GParamSpec *
+buffer_count_get_spec (const GstTestInfo * info, gboolean compare_value)
+{
+ if (compare_value) {
+ return g_param_spec_int64 ("expected-buffer-count", "expected buffer count",
+ "expected number of buffers in stream",
+ -1, G_MAXINT64, -1, G_PARAM_READWRITE | G_PARAM_CONSTRUCT);
+ } else {
+ return g_param_spec_int64 ("buffer-count", "buffer count",
+ "number of buffers in stream", -1, G_MAXINT64, -1, G_PARAM_READABLE);
+ }
+}
+
+static void
+buffer_count_add (gpointer test, GstBuffer * buffer)
+{
+ LengthTest *t = test;
+
+ t->value++;
+}
+
+/*
+ *** TIMESTAMP / DURATION MATCHING ***
+ */
+
+typedef struct
+{
+ guint64 diff;
+ guint count;
+ GstClockTime expected;
+}
+TimeDurTest;
+
+static GParamSpec *
+timedur_get_spec (const GstTestInfo * info, gboolean compare_value)
+{
+ if (compare_value) {
+ return g_param_spec_int64 ("allowed-timestamp-deviation",
+ "allowed timestamp deviation",
+ "allowed average difference in usec between timestamp of next buffer "
+ "and expected timestamp from analyzing last buffer",
+ -1, G_MAXINT64, -1, G_PARAM_READWRITE | G_PARAM_CONSTRUCT);
+ } else {
+ return g_param_spec_int64 ("timestamp-deviation",
+ "timestamp deviation",
+ "average difference in usec between timestamp of next buffer "
+ "and expected timestamp from analyzing last buffer",
+ -1, G_MAXINT64, -1, G_PARAM_READABLE);
+ }
+}
+
+static gpointer
+timedur_new (const GstTestInfo * info)
+{
+ TimeDurTest *ret = g_new0 (TimeDurTest, 1);
+
+ ret->expected = GST_CLOCK_TIME_NONE;
+
+ return ret;
+}
+
+static void
+timedur_add (gpointer test, GstBuffer * buffer)
+{
+ TimeDurTest *t = test;
+
+ if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer) &&
+ GST_CLOCK_TIME_IS_VALID (t->expected)) {
+ t->diff +=
+ ABS (GST_CLOCK_DIFF (t->expected, GST_BUFFER_TIMESTAMP (buffer)));
+ t->count++;
+ }
+ if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer) &&
+ GST_BUFFER_DURATION_IS_VALID (buffer)) {
+ t->expected = GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer);
+ } else {
+ t->expected = GST_CLOCK_TIME_NONE;
+ }
+}
+
+static gboolean
+timedur_finish (gpointer test, GValue * value)
+{
+ TimeDurTest *t = test;
+
+ if (g_value_get_int64 (value) == -1)
+ return TRUE;
+
+ return (t->diff / MAX (1, t->count)) <= g_value_get_int64 (value);
+}
+
+static void
+timedur_get_value (gpointer test, GValue * value)
+{
+ TimeDurTest *t = test;
+
+ g_value_set_int64 (value, t ? (t->diff / MAX (1, t->count)) : -1);
+}
+
+/*
+ *** MD5 ***
+ */
+
+static GParamSpec *
+md5_get_spec (const GstTestInfo * info, gboolean compare_value)
+{
+ if (compare_value) {
+ return g_param_spec_string ("expected-md5", "expected md5",
+ "expected md5 of processing the whole data",
+ "---", G_PARAM_READWRITE | G_PARAM_CONSTRUCT);
+ } else {
+ return g_param_spec_string ("md5", "md5",
+ "md5 of processing the whole data", "---", G_PARAM_READABLE);
+ }
+}
+
+static gpointer
+md5_new (const GstTestInfo * info)
+{
+ return g_checksum_new (G_CHECKSUM_MD5);
+}
+
+static void
+md5_add (gpointer checksum, GstBuffer * buffer)
+{
+ GstMapInfo map;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ g_checksum_update (checksum, map.data, map.size);
+ gst_buffer_unmap (buffer, &map);
+}
+
+static gboolean
+md5_finish (gpointer checksum, GValue * value)
+{
+ const gchar *expected, *result;
+
+ expected = g_value_get_string (value);
+ result = g_checksum_get_string (checksum);
+
+ if (g_str_equal (expected, "---"))
+ return TRUE;
+ if (g_str_equal (expected, result))
+ return TRUE;
+ return FALSE;
+}
+
+static void
+md5_get_value (gpointer checksum, GValue * value)
+{
+ if (!checksum) {
+ g_value_set_string (value, "---");
+ } else {
+ g_value_set_string (value, g_checksum_get_string (checksum));
+ }
+}
+
+static void
+md5_free (gpointer checksum)
+{
+ g_checksum_free (checksum);
+}
+
+/*
+ *** TESTINFO ***
+ */
+
+const GstTestInfo tests[] = {
+ {length_get_spec, length_new, length_add,
+ length_finish, length_get_value, g_free},
+ {buffer_count_get_spec, length_new, buffer_count_add,
+ length_finish, length_get_value, g_free},
+ {timedur_get_spec, timedur_new, timedur_add,
+ timedur_finish, timedur_get_value, g_free},
+ {md5_get_spec, md5_new, md5_add,
+ md5_finish, md5_get_value, md5_free}
+};
diff --git a/gst/debugutils/tests.h b/gst/debugutils/tests.h
new file mode 100644
index 0000000000..c8fa088cbd
--- /dev/null
+++ b/gst/debugutils/tests.h
@@ -0,0 +1,43 @@
+/* GStreamer
+ * Copyright (C) 2004 Benjamin Otte <otte@gnome.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include <gst/gst.h>
+
+#ifndef __GST_TESTS_H__
+#define __GST_TESTS_H__
+
+
+typedef struct _GstTestInfo GstTestInfo;
+
+struct _GstTestInfo
+{
+ GParamSpec *(*get_spec) (const GstTestInfo * info, gboolean compare_value);
+ gpointer (*new) (const GstTestInfo * info);
+ void (*add) (gpointer test, GstBuffer * buffer);
+ gboolean (*finish) (gpointer test, GValue * value);
+ void (*get_value) (gpointer test, GValue * value);
+ void (*free) (gpointer test);
+};
+
+extern const GstTestInfo tests[];
+/* keep up to date! */
+#define TESTS_COUNT (4)
+
+
+#endif /* __GST_TESTS_H__ */
diff --git a/gst/deinterlace/gstdeinterlace.c b/gst/deinterlace/gstdeinterlace.c
new file mode 100644
index 0000000000..e3fc59499b
--- /dev/null
+++ b/gst/deinterlace/gstdeinterlace.c
@@ -0,0 +1,3354 @@
+/*
+ * GStreamer
+ * Copyright (C) 2005 Martin Eikermann <meiker@upb.de>
+ * Copyright (C) 2008-2010 Sebastian Dröge <slomo@collabora.co.uk>
+ * Copyright (C) 2011 Robert Swain <robert.swain@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-deinterlace
+ * @title: deinterlace
+ *
+ * deinterlace deinterlaces interlaced video frames to progressive video frames.
+ * For this different algorithms can be selected which will be described later.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v filesrc location=/path/to/file ! decodebin ! videoconvert ! deinterlace ! videoconvert ! autovideosink
+ * ]| This pipeline deinterlaces a video file with the default deinterlacing options.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstdeinterlace.h"
+#include "tvtime/plugins.h"
+#include "yadif.h"
+
+#include <string.h>
+
+#if HAVE_ORC
+#include <orc/orc.h>
+#endif
+
+GST_DEBUG_CATEGORY_STATIC (deinterlace_debug);
+#define GST_CAT_DEFAULT (deinterlace_debug)
+
+/* Properties */
+
+#define DEFAULT_MODE GST_DEINTERLACE_MODE_AUTO
+#define DEFAULT_METHOD GST_DEINTERLACE_LINEAR
+#define DEFAULT_FIELDS GST_DEINTERLACE_ALL
+#define DEFAULT_FIELD_LAYOUT GST_DEINTERLACE_LAYOUT_AUTO
+#define DEFAULT_LOCKING GST_DEINTERLACE_LOCKING_NONE
+#define DEFAULT_IGNORE_OBSCURE TRUE
+#define DEFAULT_DROP_ORPHANS TRUE
+
+enum
+{
+ PROP_0,
+ PROP_MODE,
+ PROP_METHOD,
+ PROP_FIELDS,
+ PROP_FIELD_LAYOUT,
+ PROP_LOCKING,
+ PROP_IGNORE_OBSCURE,
+ PROP_DROP_ORPHANS
+};
+
+/* P is progressive, meaning the top and bottom fields belong to
+ * the same frame, i.e. they were sampled at the same time */
+#define GST_DEINTERLACE_BUFFER_STATE_P (1<<0)
+/* I is interlaced meaning that the two fields were sampled at
+ * different times, usually equidistant in time so one at 1/60,
+ * the other at 2/60 */
+#define GST_DEINTERLACE_BUFFER_STATE_I (1<<1)
+/* TC is telecine, B means bottom, T means top */
+#define GST_DEINTERLACE_BUFFER_STATE_TC_B (1<<2)
+#define GST_DEINTERLACE_BUFFER_STATE_TC_T (1<<3)
+/* TC_P means telecine progressive meaning that the two fields
+ * in the frame were sampled at the same time */
+#define GST_DEINTERLACE_BUFFER_STATE_TC_P (1<<4)
+/* TC_M i think means telecine mixed, meaning that the two fields
+ * are sampled at different times so you need to find the other field
+ * in the previous or next frame */
+#define GST_DEINTERLACE_BUFFER_STATE_TC_M (1<<5)
+/* RFF means repeat field flag and indicates a field that has
+ * previously been seen */
+#define GST_DEINTERLACE_BUFFER_STATE_RFF (1<<6)
+
+#define GST_ONE \
+ (GST_DEINTERLACE_BUFFER_STATE_TC_T | GST_DEINTERLACE_BUFFER_STATE_TC_B)
+#define GST_PRG \
+ (GST_DEINTERLACE_BUFFER_STATE_P | GST_DEINTERLACE_BUFFER_STATE_TC_P)
+#define GST_INT \
+ (GST_DEINTERLACE_BUFFER_STATE_I | GST_DEINTERLACE_BUFFER_STATE_TC_M)
+#define GST_RFF (GST_DEINTERLACE_BUFFER_STATE_RFF)
+
+#define GST_DEINTERLACE_OBSCURE_THRESHOLD 5
+
+static const TelecinePattern telecine_patterns[] = {
+ /* 60i -> 60p or 50i -> 50p (NOTE THE WEIRD RATIOS) */
+ {"1:1", 1, 2, 1, {GST_ONE,}},
+ /* 60i -> 30p or 50i -> 25p */
+ {"2:2", 1, 1, 1, {GST_INT,}},
+ /* 60i telecine -> 24p */
+ {"2:3-RFF", 4, 4, 5, {GST_PRG, GST_RFF, GST_PRG, GST_RFF,}},
+ {"2:3", 5, 4, 5, {GST_PRG, GST_PRG, GST_ONE, GST_ONE, GST_PRG,}},
+ {"3:2:2:3-RFF", 4, 4, 5, {GST_RFF, GST_PRG, GST_PRG, GST_RFF,}},
+ {"3:2:2:3", 5, 4, 5, {GST_PRG, GST_ONE, GST_INT, GST_ONE, GST_PRG,}},
+ /* fieldanalysis should indicate this using RFF on the second and fourth
+ * buffers and not send the third buffer at all. it will be identified as
+ * 3:2:2:3-RFF */
+ /* {"2:3:3:2", 5, 4, 5, {GST_PRG, GST_PRG, GST_DRP, GST_PRG, GST_PRG,}}, */
+
+ /* The following patterns are obscure and are ignored if ignore-obscure is
+ * set to true. If any patterns are added above this line, check and edit
+ * GST_DEINTERLACE_OBSCURE_THRESHOLD */
+
+ /* 50i Euro pulldown -> 24p */
+ {"2-11:3", 25, 24, 25, {GST_PRG, GST_PRG, GST_PRG, GST_PRG, GST_PRG,
+ GST_PRG, GST_PRG, GST_PRG, GST_PRG, GST_PRG,
+ GST_PRG, GST_PRG, GST_ONE, GST_INT, GST_INT,
+ GST_INT, GST_INT, GST_INT, GST_INT, GST_INT,
+ GST_INT, GST_INT, GST_INT, GST_ONE, GST_PRG,}},
+#if 0
+ /* haven't figured out how fieldanalysis should handle these yet */
+ /* 60i (NTSC 30000/1001) -> 16p (16000/1001) */
+ {"3:4-3", 15, 8, 15, {GST_PRG, GST_DRP, GST_PRG, GST_DRP, GST_PRG,
+ GST_DRP, GST_PRG, GST_DRP, GST_PRG, GST_DRP,
+ GST_PRG, GST_DRP, GST_PRG, GST_DRP, GST_PRG,}},
+ /* 50i (PAL) -> 16p */
+ {"3-7:4", 25, 16, 25, {GST_PRG, GST_DRP, GST_PRG, GST_PRG, GST_DRP,
+ GST_PRG, GST_PRG, GST_DRP, GST_PRG, GST_PRG,
+ GST_DRP, GST_PRG, GST_DRP, GST_PRG, GST_PRG,
+ GST_DRP, GST_PRG, GST_PRG, GST_DRP, GST_PRG,
+ GST_PRG, GST_DRP, GST_PRG, GST_PRG, GST_DRP,}},
+ /* NTSC 60i -> 18p */
+ {"3:3:4", 5, 3, 5, {GST_PRG, GST_DRP, GST_PRG, GST_DRP, GST_PRG,}},
+ /* NTSC 60i -> 20p */
+ {"3:3", 3, 2, 3, {GST_PRG, GST_DRP, GST_PRG,}},
+#endif
+ /* NTSC 60i -> 27.5 */
+ {"3:2-4", 11, 10, 11, {GST_PRG, GST_PRG, GST_PRG, GST_PRG, GST_PRG,
+ GST_PRG, GST_ONE, GST_INT, GST_INT, GST_INT,
+ GST_ONE,}},
+ /* PAL 50i -> 27.5 */
+ {"1:2-4", 9, 9, 10, {GST_PRG, GST_PRG, GST_PRG, GST_PRG, GST_INT,
+ GST_INT, GST_INT, GST_INT, GST_INT,}},
+};
+
+static const GEnumValue methods_types[] = {
+ {GST_DEINTERLACE_TOMSMOCOMP, "Motion Adaptive: Motion Search",
+ "tomsmocomp"},
+ {GST_DEINTERLACE_GREEDY_H, "Motion Adaptive: Advanced Detection",
+ "greedyh"},
+ {GST_DEINTERLACE_GREEDY_L, "Motion Adaptive: Simple Detection", "greedyl"},
+ {GST_DEINTERLACE_VFIR, "Blur Vertical", "vfir"},
+ {GST_DEINTERLACE_LINEAR, "Linear", "linear"},
+ {GST_DEINTERLACE_LINEAR_BLEND, "Blur: Temporal (Do Not Use)",
+ "linearblend"},
+ {GST_DEINTERLACE_SCALER_BOB, "Double lines", "scalerbob"},
+ {GST_DEINTERLACE_WEAVE, "Weave (Do Not Use)", "weave"},
+ {GST_DEINTERLACE_WEAVE_TFF, "Progressive: Top Field First (Do Not Use)",
+ "weavetff"},
+ {GST_DEINTERLACE_WEAVE_BFF, "Progressive: Bottom Field First (Do Not Use)",
+ "weavebff"},
+ {GST_DEINTERLACE_YADIF, "YADIF Adaptive Deinterlacer", "yadif"},
+ {0, NULL, NULL},
+};
+
+static const GEnumValue locking_types[] = {
+ {GST_DEINTERLACE_LOCKING_NONE,
+ "No pattern locking", "none"},
+ {GST_DEINTERLACE_LOCKING_AUTO,
+ "Choose passive/active locking depending on whether upstream is live",
+ "auto"},
+ {GST_DEINTERLACE_LOCKING_ACTIVE,
+ "Block until pattern-locked. Use accurate timestamp interpolation within a pattern repeat.",
+ "active"},
+ {GST_DEINTERLACE_LOCKING_PASSIVE,
+ "Do not block. Use naïve timestamp adjustment until pattern-locked based on state history.",
+ "passive"},
+ {0, NULL, NULL},
+};
+
+
+#define GST_TYPE_DEINTERLACE_METHODS (gst_deinterlace_methods_get_type ())
+static GType
+gst_deinterlace_methods_get_type (void)
+{
+ static GType deinterlace_methods_type = 0;
+
+ if (!deinterlace_methods_type) {
+ deinterlace_methods_type =
+ g_enum_register_static ("GstDeinterlaceMethods", methods_types);
+ }
+ return deinterlace_methods_type;
+}
+
+#define GST_TYPE_DEINTERLACE_FIELDS (gst_deinterlace_fields_get_type ())
+static GType
+gst_deinterlace_fields_get_type (void)
+{
+ static GType deinterlace_fields_type = 0;
+
+ static const GEnumValue fields_types[] = {
+ {GST_DEINTERLACE_ALL, "All fields", "all"},
+ {GST_DEINTERLACE_TF, "Top fields only", "top"},
+ {GST_DEINTERLACE_BF, "Bottom fields only", "bottom"},
+ {GST_DEINTERLACE_FIELDS_AUTO, "Automatically detect", "auto"},
+ {0, NULL, NULL},
+ };
+
+ if (!deinterlace_fields_type) {
+ deinterlace_fields_type =
+ g_enum_register_static ("GstDeinterlaceFields", fields_types);
+ }
+ return deinterlace_fields_type;
+}
+
+#define GST_TYPE_DEINTERLACE_FIELD_LAYOUT (gst_deinterlace_field_layout_get_type ())
+static GType
+gst_deinterlace_field_layout_get_type (void)
+{
+ static GType deinterlace_field_layout_type = 0;
+
+ static const GEnumValue field_layout_types[] = {
+ {GST_DEINTERLACE_LAYOUT_AUTO, "Auto detection", "auto"},
+ {GST_DEINTERLACE_LAYOUT_TFF, "Top field first", "tff"},
+ {GST_DEINTERLACE_LAYOUT_BFF, "Bottom field first", "bff"},
+ {0, NULL, NULL},
+ };
+
+ if (!deinterlace_field_layout_type) {
+ deinterlace_field_layout_type =
+ g_enum_register_static ("GstDeinterlaceFieldLayout",
+ field_layout_types);
+ }
+ return deinterlace_field_layout_type;
+}
+
+#define GST_TYPE_DEINTERLACE_MODES (gst_deinterlace_modes_get_type ())
+static GType
+gst_deinterlace_modes_get_type (void)
+{
+ static GType deinterlace_modes_type = 0;
+
+ static const GEnumValue modes_types[] = {
+ {GST_DEINTERLACE_MODE_AUTO, "Auto detection (best effort)", "auto"},
+ {GST_DEINTERLACE_MODE_INTERLACED, "Force deinterlacing", "interlaced"},
+ {GST_DEINTERLACE_MODE_DISABLED, "Run in passthrough mode", "disabled"},
+ {GST_DEINTERLACE_MODE_AUTO_STRICT, "Auto detection (strict)",
+ "auto-strict"},
+ {0, NULL, NULL},
+ };
+
+ if (!deinterlace_modes_type) {
+ deinterlace_modes_type =
+ g_enum_register_static ("GstDeinterlaceModes", modes_types);
+ }
+ return deinterlace_modes_type;
+}
+
+#define GST_TYPE_DEINTERLACE_LOCKING (gst_deinterlace_locking_get_type ())
+static GType
+gst_deinterlace_locking_get_type (void)
+{
+ static GType deinterlace_locking_type = 0;
+
+ if (!deinterlace_locking_type) {
+ deinterlace_locking_type =
+ g_enum_register_static ("GstDeinterlaceLocking", locking_types);
+ }
+
+ return deinterlace_locking_type;
+}
+
+#define DEINTERLACE_VIDEO_FORMATS \
+ "{ AYUV, ARGB, ABGR, RGBA, BGRA, Y444, xRGB, xBGR, RGBx, BGRx, RGB, " \
+ "BGR, YUY2, YVYU, UYVY, Y42B, I420, YV12, Y41B, NV12, NV21 }"
+
+#define DEINTERLACE_CAPS GST_VIDEO_CAPS_MAKE(DEINTERLACE_VIDEO_FORMATS)
+
+#define DEINTERLACE_ALL_CAPS DEINTERLACE_CAPS ";" \
+ GST_VIDEO_CAPS_MAKE_WITH_FEATURES ("ANY", GST_VIDEO_FORMATS_ALL)
+
+static GstStaticCaps progressive_caps =
+GST_STATIC_CAPS ("video/x-raw(ANY),interlace-mode=(string)progressive");
+static GstStaticCaps deinterlace_caps = GST_STATIC_CAPS (DEINTERLACE_CAPS);
+
+static GstStaticPadTemplate src_templ = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (DEINTERLACE_ALL_CAPS)
+ );
+
+static GstStaticPadTemplate sink_templ = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (DEINTERLACE_ALL_CAPS)
+ );
+
+static void gst_deinterlace_finalize (GObject * self);
+static void gst_deinterlace_set_property (GObject * self, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_deinterlace_get_property (GObject * self, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static GstCaps *gst_deinterlace_getcaps (GstDeinterlace * self, GstPad * pad,
+ GstCaps * filter);
+static gboolean gst_deinterlace_setcaps (GstDeinterlace * self, GstPad * pad,
+ GstCaps * caps, gboolean force);
+static gboolean gst_deinterlace_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static gboolean gst_deinterlace_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+static GstFlowReturn gst_deinterlace_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+static GstStateChangeReturn gst_deinterlace_change_state (GstElement * element,
+ GstStateChange transition);
+static gboolean gst_deinterlace_set_allocation (GstDeinterlace * self,
+ GstBufferPool * pool, GstAllocator * allocator,
+ GstAllocationParams * params);
+
+static gboolean gst_deinterlace_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static gboolean gst_deinterlace_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+
+static GstFlowReturn gst_deinterlace_output_frame (GstDeinterlace * self,
+ gboolean flushing);
+static void gst_deinterlace_reset (GstDeinterlace * self);
+static void gst_deinterlace_update_qos (GstDeinterlace * self,
+ gdouble proportion, GstClockTimeDiff diff, GstClockTime time);
+static void gst_deinterlace_reset_qos (GstDeinterlace * self);
+static void gst_deinterlace_read_qos (GstDeinterlace * self,
+ gdouble * proportion, GstClockTime * time);
+static gboolean deinterlace_element_init (GstPlugin * plugin);
+
+#define IS_TELECINE(m) ((m) == GST_VIDEO_INTERLACE_MODE_MIXED && self->pattern > 1)
+
+/* FIXME: what's the point of the childproxy interface here? What can you
+ * actually do with it? The method objects seem to have no properties */
+#if 0
+static void gst_deinterlace_child_proxy_interface_init (gpointer g_iface,
+ gpointer iface_data);
+
+static void
+_do_init (GType object_type)
+{
+ const GInterfaceInfo child_proxy_interface_info = {
+ (GInterfaceInitFunc) gst_deinterlace_child_proxy_interface_init,
+ NULL, /* interface_finalize */
+ NULL /* interface_data */
+ };
+
+ g_type_add_interface_static (object_type, GST_TYPE_CHILD_PROXY,
+ &child_proxy_interface_info);
+}
+#endif
+
+#define parent_class gst_deinterlace_parent_class
+G_DEFINE_TYPE (GstDeinterlace, gst_deinterlace, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE_CUSTOM (deinterlace, deinterlace_element_init);
+
+static const struct
+{
+ GType (*get_type) (void);
+} _method_types[] = {
+ {
+ gst_deinterlace_method_tomsmocomp_get_type}, {
+ gst_deinterlace_method_greedy_h_get_type}, {
+ gst_deinterlace_method_greedy_l_get_type}, {
+ gst_deinterlace_method_vfir_get_type}, {
+ gst_deinterlace_method_linear_get_type}, {
+ gst_deinterlace_method_linear_blend_get_type}, {
+ gst_deinterlace_method_scaler_bob_get_type}, {
+ gst_deinterlace_method_weave_get_type}, {
+ gst_deinterlace_method_weave_tff_get_type}, {
+ gst_deinterlace_method_weave_bff_get_type}, {
+ gst_deinterlace_method_yadif_get_type}
+};
+
+static void
+gst_deinterlace_set_method (GstDeinterlace * self, GstDeinterlaceMethods method)
+{
+ GType method_type;
+ gint width, height;
+ GstVideoFormat format;
+
+ GST_DEBUG_OBJECT (self, "Setting new method %d", method);
+
+ width = GST_VIDEO_INFO_WIDTH (&self->vinfo);
+ height = GST_VIDEO_INFO_HEIGHT (&self->vinfo);
+ format = GST_VIDEO_INFO_FORMAT (&self->vinfo);
+
+ if (self->method) {
+ if (self->method_id == method &&
+ gst_deinterlace_method_supported (G_TYPE_FROM_INSTANCE (self->method),
+ format, width, height)) {
+ GST_DEBUG_OBJECT (self, "Reusing current method");
+ return;
+ }
+#if 0
+ gst_child_proxy_child_removed (GST_OBJECT (self),
+ GST_OBJECT (self->method));
+#endif
+ gst_object_unparent (GST_OBJECT (self->method));
+ self->method = NULL;
+ }
+
+ method_type =
+ _method_types[method].get_type !=
+ NULL ? _method_types[method].get_type () : G_TYPE_INVALID;
+ if (method_type == G_TYPE_INVALID
+ || !gst_deinterlace_method_supported (method_type, format,
+ width, height)) {
+ GType tmp;
+ gint i;
+
+ method_type = G_TYPE_INVALID;
+
+ GST_WARNING_OBJECT (self, "Method doesn't support requested format");
+ for (i = 0; i < G_N_ELEMENTS (_method_types); i++) {
+ if (_method_types[i].get_type == NULL)
+ continue;
+ tmp = _method_types[i].get_type ();
+ if (gst_deinterlace_method_supported (tmp, format, width, height)) {
+ GST_DEBUG_OBJECT (self, "Using method %d", i);
+ method_type = tmp;
+ method = i;
+ break;
+ }
+ }
+ /* If we get here we must have invalid caps! */
+ g_assert (method_type != G_TYPE_INVALID);
+ }
+
+ self->method = g_object_new (method_type, "name", "method", NULL);
+ self->method_id = method;
+
+ gst_object_set_parent (GST_OBJECT (self->method), GST_OBJECT (self));
+#if 0
+ gst_child_proxy_child_added (GST_OBJECT (self), GST_OBJECT (self->method));
+#endif
+
+ if (self->method)
+ gst_deinterlace_method_setup (self->method, &self->vinfo);
+}
+
+static gboolean
+gst_deinterlace_clip_buffer (GstDeinterlace * self, GstBuffer * buffer)
+{
+ gboolean ret = TRUE;
+ GstClockTime start, stop;
+ guint64 cstart, cstop;
+
+ GST_DEBUG_OBJECT (self,
+ "Clipping buffer to the current segment: %" GST_TIME_FORMAT " -- %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buffer)));
+ GST_DEBUG_OBJECT (self, "Current segment: %" GST_SEGMENT_FORMAT,
+ &self->segment);
+
+ if (G_UNLIKELY (self->segment.format != GST_FORMAT_TIME))
+ goto beach;
+ if (G_UNLIKELY (!GST_BUFFER_TIMESTAMP_IS_VALID (buffer)))
+ goto beach;
+
+ start = GST_BUFFER_TIMESTAMP (buffer);
+ stop = start + GST_BUFFER_DURATION (buffer);
+
+ if (!(ret = gst_segment_clip (&self->segment, GST_FORMAT_TIME,
+ start, stop, &cstart, &cstop)))
+ goto beach;
+
+ GST_BUFFER_TIMESTAMP (buffer) = cstart;
+ if (GST_CLOCK_TIME_IS_VALID (cstop))
+ GST_BUFFER_DURATION (buffer) = cstop - cstart;
+
+beach:
+ if (ret)
+ GST_DEBUG_OBJECT (self,
+ "Clipped buffer to the current segment: %" GST_TIME_FORMAT " -- %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buffer)));
+ else
+ GST_DEBUG_OBJECT (self, "Buffer outside the current segment -- dropping");
+
+ return ret;
+}
+
+static void
+gst_deinterlace_class_init (GstDeinterlaceClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ GstElementClass *element_class = (GstElementClass *) klass;
+
+ gst_element_class_add_static_pad_template (element_class, &src_templ);
+ gst_element_class_add_static_pad_template (element_class, &sink_templ);
+
+ gst_element_class_set_static_metadata (element_class,
+ "Deinterlacer",
+ "Filter/Effect/Video/Deinterlace",
+ "Deinterlace Methods ported from DScaler/TvTime",
+ "Martin Eikermann <meiker@upb.de>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ gobject_class->set_property = gst_deinterlace_set_property;
+ gobject_class->get_property = gst_deinterlace_get_property;
+ gobject_class->finalize = gst_deinterlace_finalize;
+
+ /**
+ * GstDeinterlace:mode:
+ *
+ * This selects whether the deinterlacing methods should
+ * always be applied or if they should only be applied
+ * on content that has the "interlaced" flag on the caps.
+ */
+ g_object_class_install_property (gobject_class, PROP_MODE,
+ g_param_spec_enum ("mode",
+ "Mode",
+ "Deinterlace Mode",
+ GST_TYPE_DEINTERLACE_MODES,
+ DEFAULT_MODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ /**
+ * GstDeinterlace:method:
+ *
+ * Selects the different deinterlacing algorithms that can be used.
+ * These provide different quality and CPU usage.
+ *
+ * Some methods provide parameters which can be set by getting
+ * the "method" child via the #GstChildProxy interface and
+ * setting the appropriate properties on it.
+ *
+ * * tomsmocomp Motion Adaptive: Motion Search
+ * * greedyh Motion Adaptive: Advanced Detection
+ * * greedyl Motion Adaptive: Simple Detection
+ * * vfir Blur vertical
+ * * linear Linear interpolation
+ * * linearblend Linear interpolation in time domain.
+ * Any motion causes significant ghosting, so this
+ * method should not be used.
+ * * scalerbob Double lines
+ * * weave Weave. Bad quality, do not use.
+ * * weavetff Progressive: Top Field First. Bad quality, do not use.
+ * * weavebff Progressive: Bottom Field First. Bad quality, do not use.
+ * * yadif YADIF Adaptive.
+ */
+ g_object_class_install_property (gobject_class, PROP_METHOD,
+ g_param_spec_enum ("method",
+ "Method",
+ "Deinterlace Method",
+ GST_TYPE_DEINTERLACE_METHODS,
+ DEFAULT_METHOD, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ /**
+ * GstDeinterlace:fields:
+ *
+ * This selects which fields should be output. If "all" is selected
+ * the output framerate will be double.
+ */
+ g_object_class_install_property (gobject_class, PROP_FIELDS,
+ g_param_spec_enum ("fields",
+ "fields",
+ "Fields to use for deinterlacing",
+ GST_TYPE_DEINTERLACE_FIELDS,
+ DEFAULT_FIELDS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ /**
+ * GstDeinterlace:layout:
+ *
+ * This selects which fields is the first in time.
+ *
+ */
+ g_object_class_install_property (gobject_class, PROP_FIELD_LAYOUT,
+ g_param_spec_enum ("tff",
+ "tff",
+ "Deinterlace top field first",
+ GST_TYPE_DEINTERLACE_FIELD_LAYOUT,
+ DEFAULT_FIELD_LAYOUT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ /**
+ * GstDeinterlace:locking:
+ *
+ * This selects which approach to pattern locking is used which affects
+ * processing latency and accuracy of timestamp adjustment for telecine
+ * streams.
+ */
+ g_object_class_install_property (gobject_class, PROP_LOCKING,
+ g_param_spec_enum ("locking", "locking", "Pattern locking mode",
+ GST_TYPE_DEINTERLACE_LOCKING, DEFAULT_LOCKING,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstDeinterlace:ignore-obscure:
+ *
+ * This selects whether to ignore obscure/rare telecine patterns.
+ * NTSC 2:3 pulldown variants are the only really common patterns.
+ */
+ g_object_class_install_property (gobject_class, PROP_IGNORE_OBSCURE,
+ g_param_spec_boolean ("ignore-obscure", "ignore-obscure",
+ "Ignore obscure telecine patterns (only consider P, I and 2:3 "
+ "variants).", DEFAULT_IGNORE_OBSCURE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstDeinterlace:drop-orphans:
+ *
+ * This selects whether to drop orphan fields at the beginning of telecine
+ * patterns in active locking mode.
+ */
+ g_object_class_install_property (gobject_class, PROP_DROP_ORPHANS,
+ g_param_spec_boolean ("drop-orphans", "drop-orphans",
+ "Drop orphan fields at the beginning of telecine patterns in "
+ "active locking mode.", DEFAULT_DROP_ORPHANS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ element_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_deinterlace_change_state);
+
+ gst_type_mark_as_plugin_api (GST_TYPE_DEINTERLACE_METHODS, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_DEINTERLACE_FIELDS, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_DEINTERLACE_FIELD_LAYOUT, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_DEINTERLACE_MODES, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_DEINTERLACE_LOCKING, 0);
+}
+
+#if 0
+static GstObject *
+gst_deinterlace_child_proxy_get_child_by_index (GstChildProxy * child_proxy,
+ guint index)
+{
+ GstDeinterlace *self = GST_DEINTERLACE (child_proxy);
+
+ g_return_val_if_fail (index == 0, NULL);
+
+ return gst_object_ref (self->method);
+}
+
+static guint
+gst_deinterlace_child_proxy_get_children_count (GstChildProxy * child_proxy)
+{
+ GstDeinterlace *self = GST_DEINTERLACE (child_proxy);
+
+ return ((self->method) ? 1 : 0);
+}
+
+static void
+gst_deinterlace_child_proxy_interface_init (gpointer g_iface,
+ gpointer iface_data)
+{
+ GstChildProxyInterface *iface = g_iface;
+
+ iface->get_child_by_index = gst_deinterlace_child_proxy_get_child_by_index;
+ iface->get_children_count = gst_deinterlace_child_proxy_get_children_count;
+}
+#endif
+
+static void
+gst_deinterlace_init (GstDeinterlace * self)
+{
+ self->sinkpad = gst_pad_new_from_static_template (&sink_templ, "sink");
+ gst_pad_set_chain_function (self->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_deinterlace_chain));
+ gst_pad_set_event_function (self->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_deinterlace_sink_event));
+ gst_pad_set_query_function (self->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_deinterlace_sink_query));
+ gst_element_add_pad (GST_ELEMENT (self), self->sinkpad);
+
+ self->srcpad = gst_pad_new_from_static_template (&src_templ, "src");
+ gst_pad_set_event_function (self->srcpad,
+ GST_DEBUG_FUNCPTR (gst_deinterlace_src_event));
+ gst_pad_set_query_function (self->srcpad,
+ GST_DEBUG_FUNCPTR (gst_deinterlace_src_query));
+ gst_element_add_pad (GST_ELEMENT (self), self->srcpad);
+
+ self->mode = DEFAULT_MODE;
+ self->user_set_method_id = DEFAULT_METHOD;
+ gst_video_info_init (&self->vinfo);
+ gst_video_info_init (&self->vinfo_out);
+ gst_deinterlace_set_method (self, self->user_set_method_id);
+ self->fields = DEFAULT_FIELDS;
+ self->user_set_fields = DEFAULT_FIELDS;
+ self->field_layout = DEFAULT_FIELD_LAYOUT;
+ self->locking = DEFAULT_LOCKING;
+ self->ignore_obscure = DEFAULT_IGNORE_OBSCURE;
+ self->drop_orphans = DEFAULT_DROP_ORPHANS;
+
+ self->low_latency = -1;
+ self->pattern = -1;
+ self->pattern_phase = -1;
+ self->pattern_count = 0;
+ self->output_count = 0;
+ self->pattern_base_ts = GST_CLOCK_TIME_NONE;
+ self->pattern_buf_dur = GST_CLOCK_TIME_NONE;
+ self->still_frame_mode = FALSE;
+ self->telecine_tc_warned = FALSE;
+
+ gst_deinterlace_reset (self);
+}
+
+static GstVideoFrame *
+gst_video_frame_new_and_map (GstVideoInfo * vinfo, GstBuffer * buffer,
+ GstMapFlags flags)
+{
+ GstVideoFrame *frame = g_malloc0 (sizeof (GstVideoFrame));
+ if (!gst_video_frame_map (frame, vinfo, buffer, flags)) {
+ g_free (frame);
+ g_return_val_if_reached (NULL);
+ return NULL;
+ }
+ return frame;
+}
+
+static void
+gst_video_frame_unmap_and_free (GstVideoFrame * frame)
+{
+ gst_video_frame_unmap (frame);
+ g_free (frame);
+}
+
+static GstVideoFrame *
+gst_deinterlace_pop_history (GstDeinterlace * self)
+{
+ GstVideoFrame *frame;
+
+ g_return_val_if_fail (self->history_count > 0, NULL);
+
+ GST_DEBUG_OBJECT (self, "Pop last history frame -- current history size %d",
+ self->history_count);
+
+ frame = self->field_history[self->history_count - 1].frame;
+
+ self->history_count--;
+ if (self->locking != GST_DEINTERLACE_LOCKING_NONE && (!self->history_count
+ || GST_VIDEO_FRAME_PLANE_DATA (frame, 0) !=
+ GST_VIDEO_FRAME_PLANE_DATA (self->field_history[self->history_count -
+ 1].frame, 0))) {
+ if (!self->low_latency)
+ self->state_count--;
+ if (self->pattern_lock) {
+ self->pattern_count++;
+ if (self->pattern != -1
+ && self->pattern_count >= telecine_patterns[self->pattern].length) {
+ self->pattern_count = 0;
+ self->output_count = 0;
+ }
+ }
+ }
+
+ GST_DEBUG_OBJECT (self, "Returning frame: %p %" GST_TIME_FORMAT
+ " with duration %" GST_TIME_FORMAT " and size %" G_GSIZE_FORMAT, frame,
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (frame->buffer)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (frame->buffer)),
+ GST_VIDEO_FRAME_SIZE (frame));
+
+ return frame;
+}
+
+static void
+gst_deinterlace_delete_meta_at (GstDeinterlace * self, gint idx)
+{
+ if (self->field_history[idx].frame) {
+ if (self->field_history[idx].tc) {
+ gst_video_time_code_free (self->field_history[idx].tc);
+ self->field_history[idx].tc = NULL;
+ }
+ if (self->field_history[idx].caption) {
+ g_free (self->field_history[idx].caption->data);
+ g_free (self->field_history[idx].caption);
+ self->field_history[idx].caption = NULL;
+ }
+ }
+}
+
+static void
+gst_deinterlace_pop_and_clear (GstDeinterlace * self)
+{
+ gint idx;
+
+ if (self->history_count <= 0)
+ return;
+
+ idx = self->history_count - 1;
+ gst_deinterlace_delete_meta_at (self, idx);
+
+ /* FIXME: pop_history should return a structure with the frame and its meta.
+ * Currently we're just doing guesswork with the indices. Maybe just
+ * refactor the history functionality to make something clearer */
+ gst_video_frame_unmap_and_free (gst_deinterlace_pop_history (self));
+}
+
+static void
+gst_deinterlace_reset_history (GstDeinterlace * self, gboolean drop_all)
+{
+ gint i;
+
+ if (!drop_all) {
+ GST_DEBUG_OBJECT (self, "Flushing history (count %d)", self->history_count);
+ while (self->history_count > 0) {
+ if (gst_deinterlace_output_frame (self, TRUE) != GST_FLOW_OK) {
+ /* Encountered error, or flushing -> skip and drop all remaining */
+ drop_all = TRUE;
+ break;
+ }
+ }
+ }
+ if (drop_all) {
+ GST_DEBUG_OBJECT (self, "Resetting history (count %d)",
+ self->history_count);
+
+ for (i = 0; i < self->history_count; i++) {
+ if (self->field_history[i].frame) {
+ gst_video_frame_unmap_and_free (self->field_history[i].frame);
+ self->field_history[i].frame = NULL;
+ gst_deinterlace_delete_meta_at (self, i);
+ }
+ }
+ }
+ memset (self->field_history, 0,
+ GST_DEINTERLACE_MAX_FIELD_HISTORY * sizeof (GstDeinterlaceField));
+ self->history_count = 0;
+ memset (self->buf_states, 0,
+ GST_DEINTERLACE_MAX_BUFFER_STATE_HISTORY *
+ sizeof (GstDeinterlaceBufferState));
+ self->state_count = 0;
+ self->pattern_lock = FALSE;
+ self->pattern_refresh = TRUE;
+ self->cur_field_idx = -1;
+
+ if (!self->still_frame_mode && self->last_buffer) {
+ gst_buffer_unref (self->last_buffer);
+ self->last_buffer = NULL;
+ }
+}
+
+static void
+gst_deinterlace_reset (GstDeinterlace * self)
+{
+ GST_DEBUG_OBJECT (self, "Resetting internal state");
+
+ gst_video_info_init (&self->vinfo);
+ gst_video_info_init (&self->vinfo_out);
+
+ self->passthrough = FALSE;
+
+ self->reconfigure = FALSE;
+ if ((gint) self->new_mode != -1)
+ self->mode = self->new_mode;
+ if ((gint) self->new_fields != -1)
+ self->user_set_fields = self->new_fields;
+ self->new_mode = -1;
+ self->new_fields = -1;
+
+ gst_segment_init (&self->segment, GST_FORMAT_UNDEFINED);
+
+ if (self->request_caps)
+ gst_caps_unref (self->request_caps);
+ self->request_caps = NULL;
+
+ gst_deinterlace_reset_history (self, TRUE);
+
+ gst_deinterlace_reset_qos (self);
+
+ self->need_more = FALSE;
+ self->have_eos = FALSE;
+
+ self->discont = TRUE;
+ self->telecine_tc_warned = FALSE;
+
+ gst_deinterlace_set_allocation (self, NULL, NULL, NULL);
+}
+
+static void
+gst_deinterlace_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstDeinterlace *self;
+
+ self = GST_DEINTERLACE (object);
+
+ switch (prop_id) {
+ case PROP_MODE:{
+ gint new_mode;
+
+ GST_OBJECT_LOCK (self);
+ new_mode = g_value_get_enum (value);
+ if (self->mode != new_mode && gst_pad_has_current_caps (self->srcpad)) {
+ self->reconfigure = TRUE;
+ self->new_mode = new_mode;
+ } else {
+ self->mode = new_mode;
+ }
+ GST_OBJECT_UNLOCK (self);
+ break;
+ }
+ case PROP_METHOD:
+ self->user_set_method_id = g_value_get_enum (value);
+ gst_deinterlace_set_method (self, self->user_set_method_id);
+ break;
+ case PROP_FIELDS:{
+ gint new_fields;
+
+ GST_OBJECT_LOCK (self);
+ new_fields = g_value_get_enum (value);
+ if (self->user_set_fields != new_fields
+ && gst_pad_has_current_caps (self->srcpad)) {
+ self->reconfigure = TRUE;
+ self->new_fields = new_fields;
+ } else {
+ self->user_set_fields = new_fields;
+ }
+ GST_OBJECT_UNLOCK (self);
+ break;
+ }
+ case PROP_FIELD_LAYOUT:
+ self->field_layout = g_value_get_enum (value);
+ break;
+ case PROP_LOCKING:
+ self->locking = g_value_get_enum (value);
+ break;
+ case PROP_IGNORE_OBSCURE:
+ self->ignore_obscure = g_value_get_boolean (value);
+ break;
+ case PROP_DROP_ORPHANS:
+ self->drop_orphans = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
+ }
+
+}
+
+static void
+gst_deinterlace_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstDeinterlace *self;
+
+ self = GST_DEINTERLACE (object);
+
+ switch (prop_id) {
+ case PROP_MODE:
+ g_value_set_enum (value, self->mode);
+ break;
+ case PROP_METHOD:
+ g_value_set_enum (value, self->user_set_method_id);
+ break;
+ case PROP_FIELDS:
+ g_value_set_enum (value, self->user_set_fields);
+ break;
+ case PROP_FIELD_LAYOUT:
+ g_value_set_enum (value, self->field_layout);
+ break;
+ case PROP_LOCKING:
+ g_value_set_enum (value, self->locking);
+ break;
+ case PROP_IGNORE_OBSCURE:
+ g_value_set_boolean (value, self->ignore_obscure);
+ break;
+ case PROP_DROP_ORPHANS:
+ g_value_set_boolean (value, self->drop_orphans);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
+ }
+}
+
+static void
+gst_deinterlace_finalize (GObject * object)
+{
+ GstDeinterlace *self = GST_DEINTERLACE (object);
+
+ gst_deinterlace_reset (self);
+
+ if (self->method) {
+ gst_object_unparent (GST_OBJECT (self->method));
+ self->method = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_deinterlace_update_pattern_timestamps (GstDeinterlace * self)
+{
+ gint state_idx;
+ if (self->low_latency) {
+ /* in low-latency mode the buffer state history contains old buffer
+ * states as well as the current one and perhaps some future ones.
+ * the current buffer's state is given by the number of field pairs
+ * rounded up, minus 1. the below is equivalent */
+ state_idx = (self->history_count - 1) >> 1;
+ } else {
+ /* in high-latency mode state_count - 1 is the current buffer's state */
+ state_idx = self->state_count - 1;
+ }
+
+ self->pattern_base_ts = self->buf_states[state_idx].timestamp;
+ if (self->buf_states[state_idx].state != GST_RFF) {
+ self->pattern_buf_dur =
+ (self->buf_states[state_idx].duration *
+ telecine_patterns[self->pattern].ratio_d) /
+ telecine_patterns[self->pattern].ratio_n;
+ } else {
+ self->pattern_buf_dur =
+ (self->buf_states[state_idx].duration *
+ telecine_patterns[self->pattern].ratio_d * 2) /
+ (telecine_patterns[self->pattern].ratio_n * 3);
+ }
+ GST_DEBUG_OBJECT (self,
+ "Starting a new pattern repeat with base ts %" GST_TIME_FORMAT
+ " and dur %" GST_TIME_FORMAT, GST_TIME_ARGS (self->pattern_base_ts),
+ GST_TIME_ARGS (self->pattern_buf_dur));
+}
+
+static void
+gst_deinterlace_get_buffer_state (GstDeinterlace * self, GstVideoFrame * frame,
+ guint8 * state, GstVideoInterlaceMode * i_mode)
+{
+ GstVideoInterlaceMode interlacing_mode;
+
+ if (!(i_mode || state))
+ return;
+
+ interlacing_mode = GST_VIDEO_INFO_INTERLACE_MODE (&frame->info);
+ if (self->mode == GST_DEINTERLACE_MODE_INTERLACED)
+ interlacing_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
+
+ if (state) {
+ if (interlacing_mode == GST_VIDEO_INTERLACE_MODE_MIXED ||
+ interlacing_mode == GST_VIDEO_INTERLACE_MODE_ALTERNATE) {
+ if (GST_VIDEO_FRAME_IS_RFF (frame)) {
+ *state = GST_DEINTERLACE_BUFFER_STATE_RFF;
+ } else if (GST_VIDEO_FRAME_IS_ONEFIELD (frame)) {
+ /* tc top if tff, tc bottom otherwise */
+ if (GST_VIDEO_FRAME_IS_TFF (frame)) {
+ *state = GST_DEINTERLACE_BUFFER_STATE_TC_T;
+ } else {
+ *state = GST_DEINTERLACE_BUFFER_STATE_TC_B;
+ }
+ } else if (GST_VIDEO_FRAME_IS_INTERLACED (frame)) {
+ *state = GST_DEINTERLACE_BUFFER_STATE_TC_M;
+ } else {
+ *state = GST_DEINTERLACE_BUFFER_STATE_TC_P;
+ }
+ } else {
+ if (interlacing_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED) {
+ *state = GST_DEINTERLACE_BUFFER_STATE_I;
+ } else {
+ *state = GST_DEINTERLACE_BUFFER_STATE_P;
+ }
+ }
+ }
+
+ if (i_mode)
+ *i_mode = interlacing_mode;
+}
+
+#define STATE_TO_STRING(s) ((s) == GST_DEINTERLACE_BUFFER_STATE_P ? "P" : \
+ (s) == GST_DEINTERLACE_BUFFER_STATE_I ? "I" : \
+ (s) == GST_DEINTERLACE_BUFFER_STATE_TC_B ? "B" : \
+ (s) == GST_DEINTERLACE_BUFFER_STATE_TC_T ? "T" : \
+ (s) == GST_DEINTERLACE_BUFFER_STATE_TC_P ? "TCP" : \
+ (s) == GST_DEINTERLACE_BUFFER_STATE_TC_M ? "TCM" : "RFF")
+
+#define MODE_TO_STRING(m) ((m) == GST_VIDEO_INTERLACE_MODE_MIXED ? "MIXED" : \
+ (m) == GST_VIDEO_INTERLACE_MODE_INTERLEAVED ? "I" : \
+ (m) == GST_VIDEO_INTERLACE_MODE_ALTERNATE ? "A" : \
+ (m) == GST_VIDEO_INTERLACE_MODE_FIELDS ? "FIELDS" : "P")
+
+static void
+gst_deinterlace_push_history (GstDeinterlace * self, GstBuffer * buffer)
+{
+ int i = 1;
+ GstDeinterlaceFieldLayout field_layout = self->field_layout;
+ gboolean tff;
+ gboolean onefield;
+ GstVideoFrame *frame = NULL;
+ GstVideoFrame *field1, *field2 = NULL;
+ guint fields_to_push;
+ guint field1_flags, field2_flags;
+ GstVideoInterlaceMode interlacing_mode;
+ guint8 buf_state;
+
+ /* we will only read from this buffer and write into fresh output buffers
+ * if this is not the case, change the map flags as appropriate
+ */
+ frame = gst_video_frame_new_and_map (&self->vinfo, buffer, GST_MAP_READ);
+
+ tff = GST_VIDEO_FRAME_IS_TFF (frame);
+ onefield = GST_VIDEO_FRAME_IS_ONEFIELD (frame);
+ fields_to_push = (onefield) ? 1 : 2;
+
+ if (G_UNLIKELY (self->history_count >=
+ GST_DEINTERLACE_MAX_FIELD_HISTORY - fields_to_push)) {
+ GST_WARNING_OBJECT (self, "history count exceeded limit");
+ gst_video_frame_unmap_and_free (frame);
+ return;
+ }
+
+ gst_deinterlace_get_buffer_state (self, frame, &buf_state, &interlacing_mode);
+
+ GST_DEBUG_OBJECT (self,
+ "Pushing new frame as %d fields to the history (count before %d): ptr %p at %"
+ GST_TIME_FORMAT " with duration %" GST_TIME_FORMAT
+ ", size %" G_GSIZE_FORMAT ", state %s, interlacing mode %s",
+ fields_to_push, self->history_count, frame,
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buffer)),
+ gst_buffer_get_size (buffer),
+ STATE_TO_STRING (buf_state), MODE_TO_STRING (interlacing_mode));
+
+ /* move up for new state */
+ memmove (&self->buf_states[1], &self->buf_states[0],
+ (GST_DEINTERLACE_MAX_BUFFER_STATE_HISTORY - 1) *
+ sizeof (GstDeinterlaceBufferState));
+ self->buf_states[0].state = buf_state;
+ self->buf_states[0].timestamp = GST_BUFFER_TIMESTAMP (buffer);
+ self->buf_states[0].duration = GST_BUFFER_DURATION (buffer);
+ if (self->state_count < GST_DEINTERLACE_MAX_BUFFER_STATE_HISTORY)
+ self->state_count++;
+
+ for (i = GST_DEINTERLACE_MAX_FIELD_HISTORY - 1; i >= fields_to_push; i--) {
+ self->field_history[i].frame =
+ self->field_history[i - fields_to_push].frame;
+ self->field_history[i].flags =
+ self->field_history[i - fields_to_push].flags;
+ self->field_history[i].tc = self->field_history[i - fields_to_push].tc;
+ self->field_history[i].caption =
+ self->field_history[i - fields_to_push].caption;
+ self->field_history[i - fields_to_push].frame = NULL;
+ self->field_history[i - fields_to_push].tc = NULL;
+ self->field_history[i - fields_to_push].caption = NULL;
+ }
+
+ if (field_layout == GST_DEINTERLACE_LAYOUT_AUTO) {
+ if (!GST_VIDEO_INFO_IS_INTERLACED (&self->vinfo)) {
+ GST_WARNING_OBJECT (self, "Can't detect field layout -- assuming TFF");
+ field_layout = GST_DEINTERLACE_LAYOUT_TFF;
+ } else if (tff) {
+ field_layout = GST_DEINTERLACE_LAYOUT_TFF;
+ } else {
+ field_layout = GST_DEINTERLACE_LAYOUT_BFF;
+ }
+ }
+
+ field1 = frame;
+ field2 = gst_video_frame_new_and_map (&self->vinfo, buffer, GST_MAP_READ);
+ if (field_layout == GST_DEINTERLACE_LAYOUT_TFF) {
+ GST_DEBUG_OBJECT (self, "Top field first");
+ field1_flags = PICTURE_INTERLACED_TOP;
+ field2_flags = PICTURE_INTERLACED_BOTTOM;
+ } else {
+ GST_DEBUG_OBJECT (self, "Bottom field first");
+ field1_flags = PICTURE_INTERLACED_BOTTOM;
+ field2_flags = PICTURE_INTERLACED_TOP;
+ }
+
+ /* Swap for reverse playback */
+ if (self->segment.rate < 0) {
+ field1_flags = field1_flags ^ field2_flags;
+ field2_flags = field1_flags ^ field2_flags;
+ field1_flags = field1_flags ^ field2_flags;
+ }
+
+ if (!onefield) {
+ GstVideoTimeCodeMeta *meta = gst_buffer_get_video_time_code_meta (buffer);
+ GstVideoCaptionMeta *cc_meta = gst_buffer_get_video_caption_meta (buffer);
+
+ GST_DEBUG_OBJECT (self, "Two fields");
+ self->field_history[1].frame = field1;
+ self->field_history[1].flags = field1_flags;
+
+ self->field_history[0].frame = field2;
+ self->field_history[0].flags = field2_flags;
+
+ if (meta) {
+ self->field_history[0].tc = gst_video_time_code_copy (&meta->tc);
+ self->field_history[0].tc->config.flags &=
+ ~GST_VIDEO_TIME_CODE_FLAGS_INTERLACED;
+ self->field_history[1].tc = gst_video_time_code_copy (&meta->tc);
+ self->field_history[1].tc->config.flags &=
+ ~GST_VIDEO_TIME_CODE_FLAGS_INTERLACED;
+ }
+
+ if (cc_meta) {
+ self->field_history[0].caption = g_new (GstVideoCaptionMeta, 1);
+ self->field_history[0].caption->data = g_malloc (cc_meta->size);
+ self->field_history[0].caption->caption_type = cc_meta->caption_type;
+ self->field_history[0].caption->size = cc_meta->size;
+ memcpy (self->field_history[0].caption->data, cc_meta->data,
+ cc_meta->size);
+ self->field_history[1].caption = g_new (GstVideoCaptionMeta, 1);
+ self->field_history[1].caption->data = g_malloc (cc_meta->size);
+ self->field_history[1].caption->caption_type = cc_meta->caption_type;
+ self->field_history[1].caption->size = cc_meta->size;
+ memcpy (self->field_history[1].caption->data, cc_meta->data,
+ cc_meta->size);
+ }
+ } else { /* onefield */
+ GstVideoTimeCodeMeta *meta = gst_buffer_get_video_time_code_meta (buffer);
+ GstVideoCaptionMeta *cc_meta = gst_buffer_get_video_caption_meta (buffer);
+
+ GST_DEBUG_OBJECT (self, "One field");
+ self->field_history[0].frame = field1;
+ self->field_history[0].flags = field1_flags;
+ if (meta) {
+ self->field_history[0].tc = gst_video_time_code_copy (&meta->tc);
+ self->field_history[0].tc->config.flags &=
+ ~GST_VIDEO_TIME_CODE_FLAGS_INTERLACED;
+ }
+
+ if (cc_meta) {
+ self->field_history[0].caption = g_new (GstVideoCaptionMeta, 1);
+ self->field_history[0].caption->data = g_malloc (cc_meta->size);
+ self->field_history[0].caption->caption_type = cc_meta->caption_type;
+ self->field_history[0].caption->size = cc_meta->size;
+ memcpy (self->field_history[0].caption->data, cc_meta->data,
+ cc_meta->size);
+ }
+ gst_video_frame_unmap_and_free (field2);
+ }
+
+ /* we can manage the buffer ref count using the maps from here on */
+ gst_buffer_unref (buffer);
+
+ self->history_count += fields_to_push;
+ self->cur_field_idx += fields_to_push;
+
+ GST_DEBUG_OBJECT (self, "Pushed buffer -- current history size %d, index %d",
+ self->history_count, self->cur_field_idx);
+
+ if (self->last_buffer)
+ gst_buffer_unref (self->last_buffer);
+ self->last_buffer = gst_buffer_ref (buffer);
+}
+
+static void
+gst_deinterlace_update_qos (GstDeinterlace * self, gdouble proportion,
+ GstClockTimeDiff diff, GstClockTime timestamp)
+{
+ GST_DEBUG_OBJECT (self,
+ "Updating QoS: proportion %lf, diff %" GST_STIME_FORMAT ", timestamp %"
+ GST_TIME_FORMAT, proportion, GST_STIME_ARGS (diff),
+ GST_TIME_ARGS (timestamp));
+
+ GST_OBJECT_LOCK (self);
+ self->proportion = proportion;
+ if (G_LIKELY (timestamp != GST_CLOCK_TIME_NONE)) {
+ if (G_UNLIKELY (diff > 0))
+ self->earliest_time =
+ timestamp + 2 * diff + ((self->fields ==
+ GST_DEINTERLACE_ALL) ? self->field_duration : 2 *
+ self->field_duration);
+ else
+ self->earliest_time = timestamp + diff;
+ } else {
+ self->earliest_time = GST_CLOCK_TIME_NONE;
+ }
+ GST_OBJECT_UNLOCK (self);
+}
+
+static void
+gst_deinterlace_reset_qos (GstDeinterlace * self)
+{
+ gst_deinterlace_update_qos (self, 0.5, 0, GST_CLOCK_TIME_NONE);
+ self->processed = 0;
+ self->dropped = 0;
+}
+
+static void
+gst_deinterlace_read_qos (GstDeinterlace * self, gdouble * proportion,
+ GstClockTime * time)
+{
+ GST_OBJECT_LOCK (self);
+ *proportion = self->proportion;
+ *time = self->earliest_time;
+ GST_OBJECT_UNLOCK (self);
+}
+
+/* Perform qos calculations before processing the next frame. Returns TRUE if
+ * the frame should be processed, FALSE if the frame can be dropped entirely */
+static gboolean
+gst_deinterlace_do_qos (GstDeinterlace * self, const GstBuffer * buffer)
+{
+ GstClockTime qostime, earliest_time;
+ GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer);
+ gdouble proportion;
+
+ /* no timestamp, can't do QoS => process frame */
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (timestamp))) {
+ GST_LOG_OBJECT (self, "invalid timestamp, can't do QoS, process frame");
+ goto keep_frame;
+ }
+
+ /* get latest QoS observation values */
+ gst_deinterlace_read_qos (self, &proportion, &earliest_time);
+
+ /* skip qos if we have no observation (yet) => process frame */
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (earliest_time))) {
+ GST_LOG_OBJECT (self, "no observation yet, process frame");
+ goto keep_frame;
+ }
+
+ /* qos is done on running time */
+ qostime = gst_segment_to_running_time (&self->segment, GST_FORMAT_TIME,
+ timestamp);
+
+ /* see how our next timestamp relates to the latest qos timestamp */
+ GST_LOG_OBJECT (self, "qostime %" GST_TIME_FORMAT ", earliest %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (qostime), GST_TIME_ARGS (earliest_time));
+
+ if (qostime != GST_CLOCK_TIME_NONE && qostime <= earliest_time) {
+ GstClockTime stream_time, jitter;
+ GstMessage *qos_msg;
+
+ GST_DEBUG_OBJECT (self, "we are late, drop frame");
+ self->dropped++;
+ stream_time =
+ gst_segment_to_stream_time (&self->segment, GST_FORMAT_TIME, timestamp);
+ jitter = GST_CLOCK_DIFF (qostime, earliest_time);
+ qos_msg =
+ gst_message_new_qos (GST_OBJECT (self), FALSE, qostime, stream_time,
+ timestamp, GST_BUFFER_DURATION (buffer));
+ gst_message_set_qos_values (qos_msg, jitter, proportion, 1000000);
+ gst_message_set_qos_stats (qos_msg, GST_FORMAT_BUFFERS,
+ self->processed, self->dropped);
+ gst_element_post_message (GST_ELEMENT (self), qos_msg);
+ return FALSE;
+ }
+
+ GST_LOG_OBJECT (self, "process frame");
+keep_frame:
+ self->processed++;
+ return TRUE;
+}
+
+static gboolean
+gst_deinterlace_fix_timestamps (GstDeinterlace * self,
+ GstVideoFrame * field1, GstVideoFrame * field2)
+{
+ GstVideoFrame *field3, *field4;
+ GstVideoInterlaceMode interlacing_mode;
+
+ /* FIXME: This is broken for rate < 0 */
+ if (self->pattern_lock && self->pattern > -1) {
+ /* accurate pattern-locked timestamp adjustment */
+ if (!self->pattern_count)
+ gst_deinterlace_update_pattern_timestamps (self);
+
+ GST_BUFFER_TIMESTAMP (field1->buffer) =
+ self->pattern_base_ts + self->output_count * self->pattern_buf_dur;
+ GST_BUFFER_DURATION (field1->buffer) = self->pattern_buf_dur;
+ self->output_count++;
+ } else {
+ /* naive (but low-latency) timestamp adjustment based on subsequent
+ * fields/buffers */
+ if (field2
+ && GST_VIDEO_FRAME_PLANE_DATA (field1,
+ 0) != GST_VIDEO_FRAME_PLANE_DATA (field2, 0)) {
+ if (GST_BUFFER_TIMESTAMP (field1->buffer) +
+ GST_BUFFER_DURATION (field1->buffer) ==
+ GST_BUFFER_TIMESTAMP (field2->buffer)) {
+ GST_BUFFER_TIMESTAMP (field1->buffer) =
+ GST_BUFFER_TIMESTAMP (field2->buffer) =
+ (GST_BUFFER_TIMESTAMP (field1->buffer) +
+ GST_BUFFER_TIMESTAMP (field2->buffer)) / 2;
+ } else {
+ GST_BUFFER_TIMESTAMP (field2->buffer) =
+ GST_BUFFER_TIMESTAMP (field1->buffer);
+ }
+ }
+
+ if (self->history_count < 3) {
+ GST_DEBUG_OBJECT (self, "Need more fields (have %d, need 3)",
+ self->history_count);
+ return FALSE;
+ }
+
+ field3 = self->field_history[self->history_count - 3].frame;
+ interlacing_mode = GST_VIDEO_INFO_INTERLACE_MODE (&field3->info);
+ if (IS_TELECINE (interlacing_mode)) {
+ if (self->history_count < 4) {
+ GST_DEBUG_OBJECT (self, "Need more fields (have %d, need 4)",
+ self->history_count);
+ return FALSE;
+ }
+
+ field4 = self->field_history[self->history_count - 4].frame;
+ if (GST_VIDEO_FRAME_PLANE_DATA (field3,
+ 0) != GST_VIDEO_FRAME_PLANE_DATA (field4, 0)) {
+ /* telecine fields in separate buffers */
+ GST_BUFFER_TIMESTAMP (field3->buffer) =
+ (GST_BUFFER_TIMESTAMP (field3->buffer) +
+ GST_BUFFER_TIMESTAMP (field4->buffer)) / 2;
+ }
+ }
+
+ GST_BUFFER_DURATION (field1->buffer) =
+ GST_BUFFER_TIMESTAMP (field3->buffer) -
+ GST_BUFFER_TIMESTAMP (field1->buffer);
+ }
+
+ GST_DEBUG_OBJECT (self,
+ "Field 1 adjusted to ts %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (field1->buffer)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (field1->buffer)));
+ return TRUE;
+}
+
+static void
+gst_deinterlace_get_pattern_lock (GstDeinterlace * self, gboolean * flush_one)
+{
+ /* loop over all possible patterns and all possible phases
+ * giving each a score. the highest score gets the lock */
+ /* the score is calculated as the number of matched buffers in the
+ * sequence starting at the phase offset with those from the history
+ * then the longest duration pattern match is taken. if there is more than
+ * one pattern matching all buffers, we take the longest pattern of those.
+ * matches to complete patterns are preferred. if no non-trivial pattern is
+ * matched, trivial patterns are tested. */
+ gint i, j, k, score, pattern, phase;
+ const gint state_count = self->state_count;
+ const gint n_required = self->ignore_obscure ?
+ GST_DEINTERLACE_OBSCURE_THRESHOLD :
+ GST_DEINTERLACE_MAX_BUFFER_STATE_HISTORY;
+
+ /* set unknown pattern as this is used in logic outside this function */
+ self->pattern = -1;
+
+ /* wait for more buffers */
+ if (!self->have_eos && state_count < n_required) {
+ GST_DEBUG_OBJECT (self, "Need more buffers in state history - %d/%d",
+ state_count, n_required);
+ return;
+ }
+
+ score = pattern = phase = -1;
+
+ /* loop over all patterns */
+ for (i = 0; i < G_N_ELEMENTS (telecine_patterns); i++) {
+ const guint8 length = telecine_patterns[i].length;
+
+ if (self->ignore_obscure && i >= GST_DEINTERLACE_OBSCURE_THRESHOLD)
+ break;
+
+ if (state_count < length)
+ continue;
+
+ /* loop over all phases */
+ for (j = 0; j < length; j++) {
+ /* low-latency mode looks at past buffers, high latency at future buffers */
+ const gint state_idx =
+ self->low_latency ? (self->history_count - 1) >> 1 : state_count - 1;
+ /* loop over history, breaking on differing buffer states */
+ for (k = 0; k < length && k < state_count; k++) {
+ const guint8 hist = self->buf_states[state_idx - k].state;
+ const guint8 patt = telecine_patterns[i].states[(j + k) % length];
+ if (!(hist & patt))
+ break;
+ }
+
+ /* make complete matches more significant */
+ if (k == length)
+ k += GST_DEINTERLACE_MAX_BUFFER_STATE_HISTORY;
+
+ /* take as new best pattern if the number of matched buffers is more than
+ * for other patterns */
+ if (k > score) {
+ score = k;
+ pattern = i;
+ phase = j;
+ }
+ }
+ }
+
+ if (pattern < 0) {
+ GST_WARNING_OBJECT (self, "Failed to select a pattern");
+ return;
+ }
+
+ GST_DEBUG_OBJECT (self,
+ "Final pattern match result: pa %d, ph %d, l %d, s %d", pattern, phase,
+ telecine_patterns[pattern].length, score);
+ self->pattern = pattern;
+ self->pattern_phase = phase;
+ self->pattern_count = 0;
+ self->output_count = 0;
+ self->pattern_lock = TRUE;
+
+ for (i = 0; i < telecine_patterns[pattern].length; i++) {
+ gint state_idx =
+ self->low_latency ? (self->history_count - 1) >> 1 : self->state_count -
+ 1;
+ state_idx -= i;
+ GST_LOG_OBJECT (self, "buf[%d] %s", i,
+ STATE_TO_STRING (self->buf_states[state_idx].state));
+ }
+
+ /* check for the case that the first field of the pattern is an orphan */
+ if (pattern > 1
+ && telecine_patterns[pattern].states[phase] & (GST_ONE | GST_INT)) {
+ gint i = phase, field_count = 0;
+ guint8 state = telecine_patterns[pattern].states[i];
+
+ do {
+ if (state & GST_ONE) {
+ field_count++;
+#if 0
+ } else if (!(state & GST_DRP)) {
+#endif
+ } else {
+ field_count += 2;
+ }
+ i++;
+ i %= telecine_patterns[pattern].length;
+ state = telecine_patterns[pattern].states[i];
+ } while (!(state & GST_PRG));
+
+ /* if field_count is odd, we have an orphan field at the beginning of the
+ * sequence
+ * note - don't do this in low-latency mode as we are somewhere within the
+ * pattern already */
+ if (!self->low_latency && (*flush_one = field_count & 1)) {
+ GST_DEBUG_OBJECT (self, "Orphan field detected at the beginning of the "
+ "pattern - it will be deinterlaced.");
+ }
+ }
+}
+
+static GstFlowReturn
+gst_deinterlace_output_frame (GstDeinterlace * self, gboolean flushing)
+{
+ GstClockTime timestamp;
+ GstFlowReturn ret;
+ gint fields_required;
+ GstBuffer *buf, *outbuf;
+ GstVideoFrame *outframe = NULL;
+ GstDeinterlaceField *field1, *field2;
+ GstVideoInterlaceMode interlacing_mode;
+ guint8 buf_state;
+ gboolean hl_no_lock; /* indicates high latency timestamp adjustment but no pattern lock (could be ONEF or I) */
+ gboolean same_buffer; /* are field1 and field2 in the same buffer? */
+ gboolean flush_one; /* used for flushing one field when in high latency mode and not locked */
+ TelecinePattern pattern;
+ guint8 phase, count;
+ const GstDeinterlaceLocking locking = self->locking;
+ gboolean cc_added = FALSE;
+
+ memset (&pattern, 0, sizeof (pattern));
+
+restart:
+ ret = GST_FLOW_OK;
+ hl_no_lock = FALSE;
+ flush_one = FALSE;
+ self->need_more = FALSE;
+ phase = self->pattern_phase;
+ count = self->pattern_count;
+
+ if (!self->history_count) {
+ GST_DEBUG_OBJECT (self, "History is empty, waiting for more buffers!");
+ goto need_more;
+ }
+
+ field1 = &self->field_history[self->history_count - 1];
+
+ if (locking != GST_DEINTERLACE_LOCKING_NONE) {
+ GstCaps *sinkcaps;
+
+ if (!self->state_count) {
+ GST_ERROR_OBJECT (self,
+ "BROKEN! Fields in history + no states should not happen!");
+ return GST_FLOW_ERROR;
+ }
+
+ gst_deinterlace_get_buffer_state (self, field1->frame, &buf_state,
+ &interlacing_mode);
+
+ if (self->pattern != -1)
+ pattern = telecine_patterns[self->pattern];
+
+ /* patterns 0 and 1 are interlaced, the rest are telecine */
+ if (self->pattern > 1)
+ interlacing_mode = GST_VIDEO_INTERLACE_MODE_MIXED;
+
+ if (self->pattern == -1 || self->pattern_refresh
+ || !(buf_state & pattern.states[(phase + count) % pattern.length])) {
+ if (self->pattern == -1) {
+ GST_DEBUG_OBJECT (self, "No pattern lock - refresh lock");
+ } else if (self->pattern_refresh) {
+ GST_DEBUG_OBJECT (self, "Pattern refresh - refresh lock");
+ } else {
+ GST_DEBUG_OBJECT (self, "Unexpected buffer state - refresh lock");
+ }
+ /* no pattern, pattern refresh set or unexpected buffer state */
+ self->pattern_lock = FALSE;
+ self->pattern_refresh = TRUE;
+
+ /* refresh pattern lock */
+ gst_deinterlace_get_pattern_lock (self, &flush_one);
+
+ if (self->pattern != -1) {
+ /* locked onto a valid pattern so refresh complete */
+ GST_DEBUG_OBJECT (self, "Pattern locked! %s starting at %d",
+ telecine_patterns[self->pattern].nick, self->pattern_phase);
+ self->pattern_refresh = FALSE;
+ } else if (!self->low_latency) {
+ if (!self->pattern_lock) {
+ goto need_more;
+ } else {
+ hl_no_lock = TRUE;
+ }
+ }
+
+ /* setcaps on sink and src pads */
+ sinkcaps = gst_pad_get_current_caps (self->sinkpad);
+ if (!sinkcaps
+ || !gst_deinterlace_setcaps (self, self->sinkpad, sinkcaps, FALSE)) {
+ if (sinkcaps)
+ gst_caps_unref (sinkcaps);
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+
+ gst_caps_unref (sinkcaps);
+
+ if (flush_one && self->drop_orphans) {
+ GST_DEBUG_OBJECT (self, "Dropping orphan first field");
+ self->cur_field_idx--;
+ gst_deinterlace_pop_and_clear (self);
+ goto restart;
+ }
+ }
+ } else {
+ gst_deinterlace_get_buffer_state (self, field1->frame, NULL,
+ &interlacing_mode);
+ }
+
+ same_buffer = self->history_count >= 2
+ && (GST_VIDEO_FRAME_PLANE_DATA (field1->frame, 0) ==
+ GST_VIDEO_FRAME_PLANE_DATA (self->field_history[self->history_count -
+ 2].frame, 0));
+
+ if ((flushing && self->history_count == 1) || (flush_one
+ && !self->drop_orphans) || (hl_no_lock && (self->history_count == 1
+ || !same_buffer))) {
+ /* This case is for flushing a single field:
+ * - flushing and 1 field in the history
+ * - flush one (due to orphans in the pattern) and do not drop orphans
+ * - high-latency pattern locking with no possible lock given the current
+ * state and either only one field in the history or the tip two fields
+ * are in separate buffers */
+ GST_DEBUG_OBJECT (self, "Flushing one field using linear method");
+ gst_deinterlace_set_method (self, GST_DEINTERLACE_LINEAR);
+ fields_required = gst_deinterlace_method_get_fields_required (self->method);
+ } else if (interlacing_mode == GST_VIDEO_INTERLACE_MODE_PROGRESSIVE ||
+ (interlacing_mode == GST_VIDEO_INTERLACE_MODE_MIXED &&
+ !GST_VIDEO_FRAME_IS_INTERLACED (field1->frame))) {
+ /* This case is for processing progressive buffers, telecine or plain
+ * progressive */
+ GstVideoFrame *field1_frame;
+ GstBuffer *field1_buffer;
+
+ /* progressive */
+ fields_required = 2;
+
+ /* Not enough fields in the history */
+ if (!flushing && self->history_count < fields_required) {
+ GST_DEBUG_OBJECT (self, "Need more fields (have %d, need %d)",
+ self->history_count, self->cur_field_idx + fields_required);
+ goto need_more;
+ }
+
+ field2 = &self->field_history[self->history_count - 2];
+ if (GST_VIDEO_FRAME_PLANE_DATA (field1->frame,
+ 0) != GST_VIDEO_FRAME_PLANE_DATA (field2->frame, 0)) {
+ /* ERROR - next two fields in field history are not one progressive buffer - weave? */
+ GST_ERROR_OBJECT (self,
+ "Progressive buffer but two fields at tip aren't in the same buffer!");
+ }
+
+ if (IS_TELECINE (interlacing_mode)
+ && !gst_deinterlace_fix_timestamps (self, field1->frame, field2->frame)
+ && !flushing)
+ goto need_more;
+
+ GST_DEBUG_OBJECT (self,
+ "Frame type: Progressive; pushing buffer as a frame");
+ /* pop and push */
+ gst_deinterlace_delete_meta_at (self, self->history_count - 1);
+ self->cur_field_idx--;
+ field1_frame = gst_deinterlace_pop_history (self);
+ field1_buffer = field1_frame->buffer;
+ gst_buffer_ref (field1_buffer);
+ gst_video_frame_unmap_and_free (field1_frame);
+
+ /* field2 is the same buffer as field1, but we need to remove it from the
+ * history anyway */
+ self->cur_field_idx--;
+ gst_deinterlace_pop_and_clear (self);
+ GST_DEBUG_OBJECT (self,
+ "[OUT] ts %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", end %"
+ GST_TIME_FORMAT,
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (field1_buffer)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (field1_buffer)),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (field1_buffer) +
+ GST_BUFFER_DURATION (field1_buffer)));
+ if (self->discont) {
+ GST_BUFFER_FLAG_SET (field1_buffer, GST_BUFFER_FLAG_DISCONT);
+ self->discont = FALSE;
+ }
+ return gst_pad_push (self->srcpad, field1_buffer);
+ } else if (IS_TELECINE (interlacing_mode)
+ && GST_VIDEO_FRAME_IS_INTERLACED (field1->frame) && !same_buffer) {
+ /* This case needs to identify telecine mixed buffers that require weaving
+ * of two fields in different buffers.
+ * - interlacing mode is mixed
+ * - locked on to a telecine pattern
+ * - frame is interlaced
+ * - fields are in separate buffers
+ * If we don't yet have a pattern lock, we will have to deinterlace as we
+ * don't explicitly know we have a telecine sequence and so we drop through
+ * to the plain deinterlace case */
+ fields_required = 2;
+ if (!flushing && self->history_count < fields_required) {
+ GST_DEBUG_OBJECT (self, "Need more fields (have %d, need %d)",
+ self->history_count, self->cur_field_idx + fields_required);
+ goto need_more;
+ }
+
+ field2 = &self->field_history[self->history_count - 2];
+ if (!gst_deinterlace_fix_timestamps (self, field1->frame, field2->frame)
+ && !flushing)
+ goto need_more;
+
+ /* check field1 and field2 buffer caps and flags are corresponding */
+ if (field1->flags == field2->flags) {
+ /* ERROR - fields are of same parity - what should be done here?
+ * perhaps deinterlace the tip field and start again? */
+ GST_ERROR_OBJECT (self, "Telecine mixed with fields of same parity!");
+ }
+ GST_DEBUG_OBJECT (self,
+ "Frame type: Telecine Mixed; weaving tip two fields into a frame");
+ /* set method to WEAVE */
+ gst_deinterlace_set_method (self, GST_DEINTERLACE_WEAVE);
+ } else {
+ /* This is the final catch-all case that applies the selected deinterlacing
+ * method. At this point the fields to be processed are either definitely
+ * interlaced or we do not yet know that we have a telecine pattern lock
+ * and so the best we can do is to deinterlace the fields. */
+ gst_deinterlace_set_method (self, self->user_set_method_id);
+ fields_required = gst_deinterlace_method_get_fields_required (self->method);
+ if (flushing && self->history_count < fields_required) {
+ /* note: we already checked for flushing with history count == 1 above
+ * so we must have 2 or more fields in here */
+ gst_deinterlace_set_method (self, GST_DEINTERLACE_VFIR);
+ fields_required =
+ gst_deinterlace_method_get_fields_required (self->method);
+ GST_DEBUG_OBJECT (self, "Flushing field(s) using %s method",
+ methods_types[self->method_id].value_nick);
+ }
+
+ /* Not enough fields in the history */
+ if (!flushing && self->history_count < fields_required) {
+ GST_DEBUG_OBJECT (self, "Need more fields (have %d, need %d)",
+ self->history_count, self->cur_field_idx + fields_required);
+ goto need_more;
+ }
+
+ GST_DEBUG_OBJECT (self,
+ "Frame type: Interlaced; deinterlacing using %s method",
+ methods_types[self->method_id].value_nick);
+ }
+
+ if (!flushing && self->cur_field_idx < 1) {
+ goto need_more;
+ } else if (self->cur_field_idx < 0 && flushing) {
+ self->cur_field_idx++;
+ }
+
+ if (self->fields == GST_DEINTERLACE_ALL || IS_TELECINE (interlacing_mode))
+ GST_DEBUG_OBJECT (self, "All fields");
+ else if (self->fields == GST_DEINTERLACE_TF)
+ GST_DEBUG_OBJECT (self, "Top fields");
+ else if (self->fields == GST_DEINTERLACE_BF)
+ GST_DEBUG_OBJECT (self, "Bottom fields");
+
+ if ((self->field_history[self->cur_field_idx].flags == PICTURE_INTERLACED_TOP
+ && (self->fields == GST_DEINTERLACE_TF
+ || IS_TELECINE (interlacing_mode)))
+ || (self->fields == GST_DEINTERLACE_ALL
+ && !IS_TELECINE (interlacing_mode))) {
+ gint index;
+
+ GST_DEBUG_OBJECT (self, "deinterlacing top field");
+
+ /* create new buffer */
+ ret = gst_buffer_pool_acquire_buffer (self->pool, &outbuf, NULL);
+ if (ret != GST_FLOW_OK)
+ goto no_buffer;
+
+ g_return_val_if_fail (self->history_count >=
+ 1 + gst_deinterlace_method_get_latency (self->method), GST_FLOW_ERROR);
+
+ index =
+ self->history_count - 1 -
+ gst_deinterlace_method_get_latency (self->method);
+ buf = self->field_history[index].frame->buffer;
+
+ if (self->field_history[index].tc) {
+ gst_buffer_add_video_time_code_meta (outbuf,
+ self->field_history[index].tc);
+ }
+ if (self->field_history[index].caption) {
+ g_assert (self->field_history[index].caption->data != NULL);
+ g_assert (!cc_added);
+ gst_buffer_add_video_caption_meta (outbuf,
+ self->field_history[index].caption->caption_type,
+ self->field_history[index].caption->data,
+ self->field_history[index].caption->size);
+ cc_added = TRUE;
+ }
+ if (IS_TELECINE (interlacing_mode) && !self->telecine_tc_warned) {
+ self->telecine_tc_warned = TRUE;
+ GST_FIXME_OBJECT (self,
+ "Detected telecine timecodes when deinterlacing. This is not "
+ "supported yet. Resulting timecode may be wrong");
+ }
+ if (self->fields == GST_DEINTERLACE_ALL) {
+ GstVideoTimeCodeMeta *meta = gst_buffer_get_video_time_code_meta (outbuf);
+ if (meta) {
+ meta->tc.config.fps_n = 2 * meta->tc.config.fps_n;
+ meta->tc.frames = 2 * meta->tc.frames;
+ }
+ }
+ if (!IS_TELECINE (interlacing_mode)) {
+ timestamp = GST_BUFFER_TIMESTAMP (buf);
+
+ if (self->fields == GST_DEINTERLACE_ALL) {
+ if (self->segment.rate < 0)
+ GST_BUFFER_TIMESTAMP (outbuf) = timestamp + self->field_duration;
+ else
+ GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
+ GST_BUFFER_DURATION (outbuf) = self->field_duration;
+ } else {
+ GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
+ GST_BUFFER_DURATION (outbuf) = 2 * self->field_duration;
+ }
+ GST_DEBUG_OBJECT (self,
+ "[ADJUST] ts %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", end %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf) +
+ GST_BUFFER_DURATION (outbuf)));
+ } else {
+ GST_BUFFER_TIMESTAMP (outbuf) =
+ GST_BUFFER_TIMESTAMP (field1->frame->buffer);
+ GST_BUFFER_DURATION (outbuf) =
+ GST_BUFFER_DURATION (field1->frame->buffer);
+ }
+
+ /* Check if we need to drop the frame because of QoS */
+ if (!gst_deinterlace_do_qos (self, buf)) {
+ self->cur_field_idx--;
+ gst_deinterlace_pop_and_clear (self);
+ gst_buffer_unref (outbuf);
+ outbuf = NULL;
+ ret = GST_FLOW_OK;
+ } else {
+ if (self->cur_field_idx < 0 && flushing) {
+ if (self->history_count == 1) {
+ gst_deinterlace_pop_and_clear (self);
+ goto need_more;
+ }
+ self->cur_field_idx++;
+ }
+ if (self->cur_field_idx < 0) {
+ goto need_more;
+ }
+ if (!flushing && self->cur_field_idx < 1) {
+ goto need_more;
+ }
+
+ /* map the frame so the deinterlace methods can write the data to the
+ * correct memory locations */
+ outframe =
+ gst_video_frame_new_and_map (&self->vinfo_out, outbuf, GST_MAP_WRITE);
+
+ /* do magic calculus */
+ gst_deinterlace_method_deinterlace_frame (self->method,
+ self->field_history, self->history_count, outframe,
+ self->cur_field_idx);
+
+ gst_video_frame_unmap_and_free (outframe);
+
+ self->cur_field_idx--;
+ /* need to remove the field in the telecine weaving case */
+ if ((IS_TELECINE (interlacing_mode)
+ && self->method_id == GST_DEINTERLACE_WEAVE)
+ || self->cur_field_idx + 1 +
+ gst_deinterlace_method_get_latency (self->method) <
+ self->history_count || flushing) {
+ gst_deinterlace_pop_and_clear (self);
+ }
+
+ if (gst_deinterlace_clip_buffer (self, outbuf)) {
+ GST_DEBUG_OBJECT (self,
+ "[OUT] ts %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", end %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf) +
+ GST_BUFFER_DURATION (outbuf)));
+ if (self->discont) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
+ self->discont = FALSE;
+ }
+ ret = gst_pad_push (self->srcpad, outbuf);
+ } else {
+ ret = GST_FLOW_OK;
+ gst_buffer_unref (outbuf);
+ }
+
+ outbuf = NULL;
+ if (ret != GST_FLOW_OK)
+ return ret;
+ if (IS_TELECINE (interlacing_mode)
+ && self->method_id == GST_DEINTERLACE_WEAVE) {
+ /* pop off the second field */
+ GST_DEBUG_OBJECT (self, "Removing unused field (count: %d)",
+ self->history_count);
+ self->cur_field_idx--;
+ gst_deinterlace_pop_and_clear (self);
+ interlacing_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
+ return ret;
+ }
+ }
+
+ if (flush_one && !self->drop_orphans) {
+ GST_DEBUG_OBJECT (self, "Orphan field deinterlaced - reconfiguring");
+ goto restart;
+ }
+ }
+ /* no calculation done: remove excess field */
+ else if (self->field_history[self->cur_field_idx].flags ==
+ PICTURE_INTERLACED_TOP && (self->fields == GST_DEINTERLACE_BF
+ && !IS_TELECINE (interlacing_mode))) {
+ GST_DEBUG_OBJECT (self, "Removing unused top field");
+ self->cur_field_idx--;
+ gst_deinterlace_pop_and_clear (self);
+
+ if (flush_one && !self->drop_orphans) {
+ GST_DEBUG_OBJECT (self, "Orphan field deinterlaced - reconfiguring");
+ goto restart;
+ }
+ }
+
+ if (self->history_count < fields_required)
+ return ret;
+
+ if (self->cur_field_idx < 0)
+ return ret;
+
+ /* deinterlace bottom_field */
+ if ((self->field_history[self->cur_field_idx].flags ==
+ PICTURE_INTERLACED_BOTTOM && (self->fields == GST_DEINTERLACE_BF
+ || IS_TELECINE (interlacing_mode)))
+ || (self->fields == GST_DEINTERLACE_ALL
+ && !IS_TELECINE (interlacing_mode))) {
+ gint index;
+
+ GST_DEBUG_OBJECT (self, "deinterlacing bottom field");
+
+ /* create new buffer */
+ ret = gst_buffer_pool_acquire_buffer (self->pool, &outbuf, NULL);
+ if (ret != GST_FLOW_OK)
+ goto no_buffer;
+
+ g_return_val_if_fail (self->history_count >=
+ gst_deinterlace_method_get_latency (self->method) + 1, GST_FLOW_ERROR);
+
+ index =
+ self->history_count - 1 -
+ gst_deinterlace_method_get_latency (self->method);
+ buf = self->field_history[index].frame->buffer;
+
+ if (self->field_history[index].tc) {
+ gst_buffer_add_video_time_code_meta (outbuf,
+ self->field_history[index].tc);
+ }
+ if (self->field_history[index].caption && !cc_added) {
+ g_assert (self->field_history[index].caption->data != NULL);
+ gst_buffer_add_video_caption_meta (outbuf,
+ self->field_history[index].caption->caption_type,
+ self->field_history[index].caption->data,
+ self->field_history[index].caption->size);
+ cc_added = TRUE;
+ }
+ if (IS_TELECINE (interlacing_mode) && !self->telecine_tc_warned) {
+ self->telecine_tc_warned = TRUE;
+ GST_FIXME_OBJECT (self,
+ "Detected telecine timecodes when deinterlacing. This is not "
+ "supported yet. Resulting timecode may be wrong");
+ }
+ if (self->fields == GST_DEINTERLACE_ALL) {
+ GstVideoTimeCodeMeta *meta = gst_buffer_get_video_time_code_meta (outbuf);
+ if (meta) {
+ meta->tc.config.fps_n = 2 * meta->tc.config.fps_n;
+ meta->tc.frames = 2 * meta->tc.frames + 1;
+ }
+ }
+ if (!IS_TELECINE (interlacing_mode)) {
+ timestamp = GST_BUFFER_TIMESTAMP (buf);
+
+ if (self->fields == GST_DEINTERLACE_ALL) {
+ if (self->segment.rate < 0)
+ GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
+ else
+ GST_BUFFER_TIMESTAMP (outbuf) = timestamp + self->field_duration;
+ GST_BUFFER_DURATION (outbuf) = self->field_duration;
+ } else {
+ GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
+ GST_BUFFER_DURATION (outbuf) = 2 * self->field_duration;
+ }
+ GST_DEBUG_OBJECT (self,
+ "[ADJUST] ts %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", end %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf) +
+ GST_BUFFER_DURATION (outbuf)));
+ } else {
+ GST_BUFFER_TIMESTAMP (outbuf) =
+ GST_BUFFER_TIMESTAMP (field1->frame->buffer);
+ GST_BUFFER_DURATION (outbuf) =
+ GST_BUFFER_DURATION (field1->frame->buffer);
+ }
+
+ /* Check if we need to drop the frame because of QoS */
+ if (!gst_deinterlace_do_qos (self, buf)) {
+ self->cur_field_idx--;
+ gst_deinterlace_pop_and_clear (self);
+ gst_buffer_unref (outbuf);
+ outbuf = NULL;
+ ret = GST_FLOW_OK;
+ } else {
+ /* map the frame so the deinterlace methods can write the data to the
+ * correct memory locations */
+ outframe =
+ gst_video_frame_new_and_map (&self->vinfo_out, outbuf, GST_MAP_WRITE);
+
+ /* do magic calculus */
+ gst_deinterlace_method_deinterlace_frame (self->method,
+ self->field_history, self->history_count, outframe,
+ self->cur_field_idx);
+
+ gst_video_frame_unmap_and_free (outframe);
+
+ self->cur_field_idx--;
+ /* need to remove the field in the telecine weaving case */
+ if ((IS_TELECINE (interlacing_mode)
+ && self->method_id == GST_DEINTERLACE_WEAVE)
+ || self->cur_field_idx + 1 +
+ gst_deinterlace_method_get_latency (self->method) <
+ self->history_count) {
+ gst_deinterlace_pop_and_clear (self);
+ }
+
+ if (gst_deinterlace_clip_buffer (self, outbuf)) {
+ GST_DEBUG_OBJECT (self,
+ "[OUT] ts %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", end %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf) +
+ GST_BUFFER_DURATION (outbuf)));
+ ret = gst_pad_push (self->srcpad, outbuf);
+ } else {
+ ret = GST_FLOW_OK;
+ gst_buffer_unref (outbuf);
+ }
+
+ outbuf = NULL;
+ if (ret != GST_FLOW_OK)
+ return ret;
+ if (IS_TELECINE (interlacing_mode)
+ && self->method_id == GST_DEINTERLACE_WEAVE) {
+ /* pop off the second field */
+ GST_DEBUG_OBJECT (self, "Removing unused field (count: %d)",
+ self->history_count);
+ self->cur_field_idx--;
+ gst_deinterlace_pop_and_clear (self);
+ interlacing_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
+ return ret;
+ }
+ }
+
+ if (flush_one && !self->drop_orphans) {
+ GST_DEBUG_OBJECT (self, "Orphan field deinterlaced - reconfiguring");
+ goto restart;
+ }
+ }
+ /* no calculation done: remove excess field */
+ else if (self->field_history[self->cur_field_idx].flags ==
+ PICTURE_INTERLACED_BOTTOM && (self->fields == GST_DEINTERLACE_TF
+ && !IS_TELECINE (interlacing_mode))) {
+ GST_DEBUG_OBJECT (self, "Removing unused bottom field");
+ self->cur_field_idx--;
+ gst_deinterlace_pop_and_clear (self);
+
+ if (flush_one && !self->drop_orphans) {
+ GST_DEBUG_OBJECT (self, "Orphan field deinterlaced - reconfiguring");
+ goto restart;
+ }
+ }
+
+ return ret;
+
+need_more:
+ {
+ self->need_more = TRUE;
+ return ret;
+ }
+no_buffer:
+ {
+ GST_DEBUG_OBJECT (self, "could not allocate buffer");
+ return ret;
+ }
+}
+
+static gboolean
+gst_deinterlace_get_latency (GstDeinterlace * self)
+{
+ if (self->locking == GST_DEINTERLACE_LOCKING_AUTO) {
+ GstQuery *query;
+
+ query = gst_query_new_latency ();
+ if ((gst_pad_peer_query (self->sinkpad, query))) {
+ gboolean is_live;
+ /* if upstream is live, we use low-latency passive locking mode
+ * else high-latency active locking mode */
+ gst_query_parse_latency (query, &is_live, NULL, NULL);
+ GST_DEBUG_OBJECT (self, "Latency query indicates stream is %s",
+ is_live ? "live - using passive locking" :
+ "not live - using active locking");
+ gst_query_unref (query);
+ return is_live;
+ } else {
+ /* conservatively use passive locking if the query fails */
+ GST_WARNING_OBJECT (self,
+ "Latency query failed - fall back to using passive locking");
+ gst_query_unref (query);
+ return TRUE;
+ }
+ } else {
+ return self->locking - 2;
+ }
+}
+
+static GstFlowReturn
+gst_deinterlace_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+{
+ GstDeinterlace *self = GST_DEINTERLACE (parent);
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ GST_OBJECT_LOCK (self);
+ if (self->reconfigure || gst_pad_check_reconfigure (self->srcpad)) {
+ GstCaps *caps;
+ gboolean force_reconfigure = FALSE;
+
+ if ((gint) self->new_fields != -1) {
+ force_reconfigure |= (self->user_set_fields != self->new_fields);
+ self->user_set_fields = self->new_fields;
+ }
+ if ((gint) self->new_mode != -1) {
+ force_reconfigure |= (self->mode != self->new_mode);
+ self->mode = self->new_mode;
+ }
+ self->new_mode = -1;
+ self->new_fields = -1;
+
+ self->reconfigure = FALSE;
+ GST_OBJECT_UNLOCK (self);
+ caps = gst_pad_get_current_caps (self->sinkpad);
+ if (caps != NULL) {
+ if (!gst_deinterlace_setcaps (self, self->sinkpad, caps,
+ force_reconfigure)) {
+ gst_pad_mark_reconfigure (self->srcpad);
+ gst_caps_unref (caps);
+ if (GST_PAD_IS_FLUSHING (self->srcpad))
+ return GST_FLOW_FLUSHING;
+ else
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+ gst_caps_unref (caps);
+ } else {
+ gst_pad_mark_reconfigure (self->srcpad);
+ return GST_FLOW_FLUSHING;
+ }
+ } else {
+ GST_OBJECT_UNLOCK (self);
+ }
+
+ GST_DEBUG_OBJECT (self,
+ "[IN] ts %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", end %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buf)),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf)));
+
+ if (self->still_frame_mode || self->passthrough) {
+ GST_DEBUG_OBJECT (self,
+ "Frame type: Progressive?; pushing buffer using pass-through");
+ GST_DEBUG_OBJECT (self,
+ "[OUT] ts %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", end %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buf)),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf)));
+
+ return gst_pad_push (self->srcpad, buf);
+ }
+
+ if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) {
+ GST_DEBUG_OBJECT (self, "DISCONT buffer, resetting history");
+ gst_deinterlace_reset_history (self, FALSE);
+ self->discont = TRUE;
+ }
+
+ gst_deinterlace_push_history (self, buf);
+ buf = NULL;
+
+ do {
+ ret = gst_deinterlace_output_frame (self, FALSE);
+ } while (!self->need_more && self->history_count > 0 && ret == GST_FLOW_OK);
+
+ return ret;
+}
+
+static gboolean
+gst_deinterlace_acceptcaps (GstDeinterlace * self, GstPad * pad, GstCaps * caps)
+{
+ gboolean ret;
+ GstCaps *ourcaps;
+
+ /* In AUTO/DISABLED mode we accept everything that is compatible with
+ * our template caps. In INTERLACED mode we force deinterlacing, meaning
+ * we can only possibly support the deinterlace caps.
+ * In AUTO_STRICT mode we accept all progressive formats, but only those
+ * interlaced format that we can actually deinterlace */
+ if (self->mode == GST_DEINTERLACE_MODE_DISABLED
+ || self->mode == GST_DEINTERLACE_MODE_AUTO) {
+ ourcaps = gst_pad_get_pad_template_caps (pad);
+ ret = gst_caps_is_subset (caps, ourcaps);
+ gst_caps_unref (ourcaps);
+ } else if (self->mode == GST_DEINTERLACE_MODE_INTERLACED) {
+ ourcaps = gst_static_caps_get (&deinterlace_caps);
+ ret = gst_caps_is_subset (caps, ourcaps);
+ gst_caps_unref (ourcaps);
+ } else if (self->mode == GST_DEINTERLACE_MODE_AUTO_STRICT) {
+ ourcaps = gst_static_caps_get (&progressive_caps);
+ ret = gst_caps_is_subset (caps, ourcaps);
+ gst_caps_unref (ourcaps);
+
+ if (!ret) {
+ ourcaps = gst_static_caps_get (&deinterlace_caps);
+ ret = gst_caps_is_subset (caps, ourcaps);
+ gst_caps_unref (ourcaps);
+ }
+ } else {
+ g_assert_not_reached ();
+ }
+
+ GST_DEBUG_OBJECT (pad, "accept-caps result:%d for caps %" GST_PTR_FORMAT,
+ ret, caps);
+
+ return ret;
+}
+
+static gboolean
+gst_deinterlace_fraction_double (gint * n_out, gint * d_out, gboolean half)
+{
+ gint n, d, gcd;
+
+ n = *n_out;
+ d = *d_out;
+
+ if (d == 0)
+ return FALSE;
+
+ if (n == 0)
+ return TRUE;
+
+ gcd = gst_util_greatest_common_divisor (n, d);
+ n /= gcd;
+ d /= gcd;
+
+ if (half) {
+ if (G_MAXINT / 2 >= ABS (d)) {
+ d *= 2;
+ } else if (n >= 2 && n != G_MAXINT) {
+ n /= 2;
+ } else {
+ d = G_MAXINT;
+ }
+ } else {
+ if (G_MAXINT / 2 >= ABS (n)) {
+ n *= 2;
+ } else if (d >= 2 && d != G_MAXINT) {
+ d /= 2;
+ } else {
+ n = G_MAXINT;
+ }
+ }
+
+ *n_out = n;
+ *d_out = d;
+
+ return TRUE;
+}
+
+static GstCaps *
+gst_deinterlace_caps_double_framerate (GstCaps * caps, gboolean half)
+{
+ guint len;
+
+ for (len = gst_caps_get_size (caps); len > 0; len--) {
+ GstStructure *s = gst_caps_get_structure (caps, len - 1);
+ const GValue *val;
+
+ val = gst_structure_get_value (s, "framerate");
+ if (!val)
+ continue;
+
+ if (G_VALUE_TYPE (val) == GST_TYPE_FRACTION) {
+ gint n, d;
+
+ n = gst_value_get_fraction_numerator (val);
+ d = gst_value_get_fraction_denominator (val);
+
+ if (!gst_deinterlace_fraction_double (&n, &d, half)) {
+ gst_caps_remove_structure (caps, len - 1);
+ continue;
+ }
+
+ gst_structure_set (s, "framerate", GST_TYPE_FRACTION, n, d, NULL);
+ } else if (G_VALUE_TYPE (val) == GST_TYPE_FRACTION_RANGE) {
+ const GValue *min, *max;
+ GValue nrange = { 0, }, nmin = {
+ 0,}, nmax = {
+ 0,};
+ gint n, d;
+
+ g_value_init (&nrange, GST_TYPE_FRACTION_RANGE);
+ g_value_init (&nmin, GST_TYPE_FRACTION);
+ g_value_init (&nmax, GST_TYPE_FRACTION);
+
+ min = gst_value_get_fraction_range_min (val);
+ max = gst_value_get_fraction_range_max (val);
+
+ n = gst_value_get_fraction_numerator (min);
+ d = gst_value_get_fraction_denominator (min);
+
+ if (!gst_deinterlace_fraction_double (&n, &d, half)) {
+ g_value_unset (&nrange);
+ g_value_unset (&nmax);
+ g_value_unset (&nmin);
+ gst_caps_remove_structure (caps, len - 1);
+ continue;
+ }
+
+ gst_value_set_fraction (&nmin, n, d);
+
+ n = gst_value_get_fraction_numerator (max);
+ d = gst_value_get_fraction_denominator (max);
+
+ if (!gst_deinterlace_fraction_double (&n, &d, half)) {
+ g_value_unset (&nrange);
+ g_value_unset (&nmax);
+ g_value_unset (&nmin);
+ gst_caps_remove_structure (caps, len - 1);
+ continue;
+ }
+
+ gst_value_set_fraction (&nmax, n, d);
+ gst_value_set_fraction_range (&nrange, &nmin, &nmax);
+
+ gst_structure_take_value (s, "framerate", &nrange);
+
+ g_value_unset (&nmin);
+ g_value_unset (&nmax);
+ } else if (G_VALUE_TYPE (val) == GST_TYPE_LIST) {
+ const GValue *lval;
+ GValue nlist = { 0, };
+ GValue nval = { 0, };
+ gint i;
+
+ g_value_init (&nlist, GST_TYPE_LIST);
+ for (i = gst_value_list_get_size (val); i > 0; i--) {
+ gint n, d;
+
+ lval = gst_value_list_get_value (val, i - 1);
+
+ if (G_VALUE_TYPE (lval) != GST_TYPE_FRACTION)
+ continue;
+
+ n = gst_value_get_fraction_numerator (lval);
+ d = gst_value_get_fraction_denominator (lval);
+
+ /* Double/Half the framerate but if this fails simply
+ * skip this value from the list */
+ if (!gst_deinterlace_fraction_double (&n, &d, half)) {
+ continue;
+ }
+
+ g_value_init (&nval, GST_TYPE_FRACTION);
+
+ gst_value_set_fraction (&nval, n, d);
+ gst_value_list_append_and_take_value (&nlist, &nval);
+ }
+ gst_structure_take_value (s, "framerate", &nlist);
+ }
+ }
+
+ return caps;
+}
+
+static GstCaps *
+dup_caps_with_alternate (GstCaps * caps)
+{
+ GstCaps *with_alternate;
+ GstCapsFeatures *features;
+
+ with_alternate = gst_caps_copy (caps);
+ features = gst_caps_features_new (GST_CAPS_FEATURE_FORMAT_INTERLACED, NULL);
+ gst_caps_set_features_simple (with_alternate, features);
+
+ gst_caps_set_simple (with_alternate, "interlace-mode", G_TYPE_STRING,
+ "alternate", NULL);
+
+ return with_alternate;
+}
+
+static GstCaps *
+gst_deinterlace_getcaps (GstDeinterlace * self, GstPad * pad, GstCaps * filter)
+{
+ GstCaps *ret, *caps;
+ GstPad *otherpad;
+ gint len;
+ GstCaps *ourcaps;
+ GstCaps *peercaps;
+ GstCaps *tmp, *tmp2;
+
+ otherpad = (pad == self->srcpad) ? self->sinkpad : self->srcpad;
+
+ ourcaps = gst_pad_get_pad_template_caps (pad);
+ peercaps = gst_pad_peer_query_caps (otherpad, NULL);
+
+ /* Filter any peercaps that are available with our template
+ * to get started with the subset of caps we actually support */
+ if (peercaps) {
+ GST_DEBUG_OBJECT (pad, "Peer has caps %" GST_PTR_FORMAT, peercaps);
+ caps = gst_caps_make_writable (gst_caps_intersect (ourcaps, peercaps));
+ gst_caps_unref (peercaps);
+ gst_caps_unref (ourcaps);
+ peercaps = ourcaps = NULL;
+ } else {
+ caps = gst_caps_make_writable (ourcaps);
+ ourcaps = NULL;
+ }
+
+ GST_DEBUG_OBJECT (pad,
+ "Transforming caps %" GST_PTR_FORMAT " with filter %" GST_PTR_FORMAT,
+ caps, filter);
+
+ /* If deinterlacing is disabled, we just passthrough the
+ * caps and everything */
+ if (self->mode == GST_DEINTERLACE_MODE_DISABLED) {
+ ret = caps;
+ caps = NULL;
+ goto done;
+ }
+
+ /* If deinterlacing is enforced, we can only accept the
+ * caps for which we can actually do deinterlacing */
+ if (self->mode == GST_DEINTERLACE_MODE_INTERLACED) {
+ tmp = gst_static_caps_get (&deinterlace_caps);
+ ret = gst_caps_intersect_full (caps, tmp, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (tmp);
+ tmp = NULL;
+ gst_caps_unref (caps);
+ caps = NULL;
+ goto done;
+ }
+
+ g_assert (self->mode == GST_DEINTERLACE_MODE_AUTO
+ || self->mode == GST_DEINTERLACE_MODE_AUTO_STRICT);
+
+ /* For the auto mode we have to do a bit more than that */
+ ret = gst_caps_new_empty ();
+
+ /* We can accept any structure if
+ * - they are progressive already
+ *
+ */
+ tmp = gst_static_caps_get (&progressive_caps);
+ tmp2 = gst_caps_intersect_full (caps, tmp, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (tmp);
+ tmp = NULL;
+ ret = gst_caps_merge (ret, tmp2);
+ tmp2 = NULL;
+
+ /* or
+ * - they have sysmem caps features and a format for which we support
+ * deinterlacing
+ * or
+ * - they have ANY caps features, in which case we support it for
+ * sysmem caps features for formats we support
+ *
+ * NOTE: These are the caps where we actually would do deinterlacing
+ * ourselves. If fields == ALL we would double the framerate so would
+ * have to half the framerate constraints from downstream here
+ */
+ tmp = gst_static_caps_get (&deinterlace_caps);
+ tmp2 = gst_caps_intersect_full (caps, tmp, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (tmp);
+ tmp = NULL;
+
+ for (len = gst_caps_get_size (tmp2); len > 0; len--) {
+ GstStructure *s = gst_caps_get_structure (tmp2, len - 1);
+
+ /* Drop fields which can be converted by us.
+ * Specifically "field-order" here.
+ * "field-order" with "progressive" and/or
+ * unspecified "interlace-mode" would cause negotiation issue */
+ gst_structure_remove_field (s, "field-order");
+
+ if (pad == self->sinkpad) {
+ gst_structure_remove_field (s, "interlace-mode");
+ } else {
+ gst_structure_set (s, "interlace-mode", G_TYPE_STRING, "progressive",
+ NULL);
+ }
+ }
+
+ if (self->user_set_fields == GST_DEINTERLACE_ALL) {
+ tmp2 = gst_deinterlace_caps_double_framerate (tmp2, (pad == self->sinkpad));
+ }
+ if (self->user_set_fields == GST_DEINTERLACE_FIELDS_AUTO) {
+ tmp = gst_caps_copy (tmp2);
+ tmp = gst_deinterlace_caps_double_framerate (tmp, (pad == self->sinkpad));
+ }
+
+ ret = gst_caps_merge (ret, tmp2);
+ tmp2 = NULL;
+ if (tmp != NULL) {
+ ret = gst_caps_merge (ret, tmp);
+ tmp = NULL;
+ }
+
+ /* or
+ * - anything else in which case we would just passthrough again if we're
+ * only in AUTO and not AUTO_STRICT mode
+ */
+ if (self->mode == GST_DEINTERLACE_MODE_AUTO)
+ ret = gst_caps_merge (ret, gst_caps_copy (caps));
+
+ gst_caps_unref (caps);
+ caps = NULL;
+
+ if (pad == self->sinkpad) {
+ GstCaps *can_deinterlace;
+
+ tmp = gst_static_caps_get (&deinterlace_caps);
+ can_deinterlace = gst_caps_intersect (ret, tmp);
+ gst_caps_unref (tmp);
+
+ ret = gst_caps_merge (ret, dup_caps_with_alternate (can_deinterlace));
+ gst_caps_unref (can_deinterlace);
+ }
+
+done:
+
+ if (filter) {
+ GstCaps *tmp;
+
+ GST_LOG_OBJECT (pad, "intersecting with %" GST_PTR_FORMAT, filter);
+ tmp = gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (ret);
+ ret = tmp;
+ }
+
+ GST_DEBUG_OBJECT (pad, "Returning caps %" GST_PTR_FORMAT, ret);
+
+ return ret;
+}
+
+/* takes ownership of the pool, allocator and query */
+static gboolean
+gst_deinterlace_set_allocation (GstDeinterlace * self,
+ GstBufferPool * pool, GstAllocator * allocator,
+ GstAllocationParams * params)
+{
+ GstAllocator *oldalloc;
+ GstBufferPool *oldpool;
+
+ GST_OBJECT_LOCK (self);
+ oldpool = self->pool;
+ self->pool = pool;
+
+ oldalloc = self->allocator;
+ self->allocator = allocator;
+
+ if (params)
+ self->params = *params;
+ else
+ gst_allocation_params_init (&self->params);
+ GST_OBJECT_UNLOCK (self);
+
+ if (oldpool) {
+ GST_DEBUG_OBJECT (self, "deactivating old pool %p", oldpool);
+ gst_buffer_pool_set_active (oldpool, FALSE);
+ gst_object_unref (oldpool);
+ }
+ if (oldalloc) {
+ gst_object_unref (oldalloc);
+ }
+ if (pool) {
+ GST_DEBUG_OBJECT (self, "activating new pool %p", pool);
+ gst_buffer_pool_set_active (pool, TRUE);
+ }
+ return TRUE;
+}
+
+static gboolean
+gst_deinterlace_do_bufferpool (GstDeinterlace * self, GstCaps * outcaps)
+{
+ GstQuery *query;
+ gboolean result = TRUE;
+ GstBufferPool *pool;
+ GstAllocator *allocator;
+ GstAllocationParams params;
+ GstStructure *config;
+ guint size, min, max;
+
+ if (self->passthrough) {
+ /* we are in passthrough, the input buffer is never copied and always passed
+ * along. We never allocate an output buffer on the srcpad. What we do is
+ * let the upstream element decide if it wants to use a bufferpool and
+ * then we will proxy the downstream pool */
+ GST_DEBUG_OBJECT (self, "we're passthough, delay bufferpool");
+ gst_deinterlace_set_allocation (self, NULL, NULL, NULL);
+ return TRUE;
+ }
+
+ /* not passthrough, we need to allocate */
+ /* find a pool for the negotiated caps now */
+ GST_DEBUG_OBJECT (self, "doing allocation query");
+ query = gst_query_new_allocation (outcaps, TRUE);
+ if (!gst_pad_peer_query (self->srcpad, query)) {
+ /* not a problem, just debug a little */
+ GST_DEBUG_OBJECT (self, "peer ALLOCATION query failed");
+ }
+
+ GST_DEBUG_OBJECT (self, "ALLOCATION (%d) params: %" GST_PTR_FORMAT, result,
+ query);
+
+ /* we got configuration from our peer or the decide_allocation method,
+ * parse them */
+ if (gst_query_get_n_allocation_params (query) > 0) {
+ gst_query_parse_nth_allocation_param (query, 0, &allocator, &params);
+ } else {
+ allocator = NULL;
+ gst_allocation_params_init (&params);
+ }
+
+ if (gst_query_get_n_allocation_pools (query) > 0)
+ gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
+ else {
+ GstVideoInfo out_info;
+
+ gst_video_info_from_caps (&out_info, outcaps);
+
+ pool = NULL;
+ size = GST_VIDEO_INFO_SIZE (&out_info);
+ min =
+ MAX ((gst_deinterlace_method_get_fields_required (self->method) +
+ 1) / 2 + 1, 4);
+ max = 0;
+ }
+
+ if (pool == NULL) {
+ /* no pool, we can make our own */
+ GST_DEBUG_OBJECT (self, "no pool, making new pool");
+ pool = gst_video_buffer_pool_new ();
+ }
+
+ /* now configure */
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_set_params (config, outcaps, size, min, max);
+ gst_buffer_pool_config_set_allocator (config, allocator, &params);
+ gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
+ gst_buffer_pool_set_config (pool, config);
+
+ /* now store */
+ result = gst_deinterlace_set_allocation (self, pool, allocator, &params);
+
+ gst_query_unref (query);
+
+ return result;
+}
+
+
+static gboolean
+gst_deinterlace_setcaps (GstDeinterlace * self, GstPad * pad, GstCaps * caps,
+ gboolean force)
+{
+ GstCaps *srccaps = NULL, *caps_no_feat = NULL;
+ GstVideoInterlaceMode interlacing_mode;
+ gint fps_n, fps_d;
+ GstCaps *peercaps, *current_caps;
+
+ gst_pad_check_reconfigure (self->srcpad);
+
+ /* If the force flag is set, always re-check the downstream caps,
+ * and reconfigure as the deinterlace mode has changed */
+ if (!force && (current_caps = gst_pad_get_current_caps (pad))) {
+ if (gst_caps_is_equal (caps, current_caps)) {
+ GST_DEBUG_OBJECT (pad, "Got same caps again, returning");
+ gst_caps_unref (current_caps);
+ return TRUE;
+ }
+ gst_deinterlace_reset_history (self, FALSE);
+ gst_caps_unref (current_caps);
+ }
+ peercaps = gst_pad_peer_query_caps (self->srcpad, NULL);
+
+ /* Make sure the peer caps are compatible with the template caps */
+ if (peercaps) {
+ GstCaps *tmp = gst_pad_get_pad_template_caps (self->srcpad);
+ GstCaps *tmp2 = gst_caps_intersect (peercaps, tmp);
+
+ gst_caps_unref (peercaps);
+ peercaps = NULL;
+ gst_caps_unref (tmp);
+
+ if (gst_caps_is_empty (tmp2)) {
+ gst_caps_unref (tmp2);
+ GST_ERROR_OBJECT (self, "Peer caps not compatible with template caps");
+ goto invalid_caps;
+ }
+ peercaps = tmp2;
+ }
+
+ if (self->locking != GST_DEINTERLACE_LOCKING_NONE) {
+ if (self->low_latency == -1)
+ self->low_latency = gst_deinterlace_get_latency (self);
+
+ if (self->pattern_lock) {
+ /* refresh has been successful - we have a lock now */
+ self->pattern_refresh = FALSE;
+ } else {
+ /* if we were not refreshing (!pattern_refresh) the caps have changed
+ * so we need to refresh and we don't have a lock anymore
+ * otherwise we have pattern_fresh and !pattern_lock anyway */
+ self->pattern_refresh = TRUE;
+ self->pattern_lock = FALSE;
+ }
+ }
+
+ if (!gst_video_info_from_caps (&self->vinfo, caps))
+ goto invalid_caps;
+
+ gst_video_info_set_interlaced_format (&self->vinfo_out,
+ GST_VIDEO_INFO_FORMAT (&self->vinfo),
+ GST_VIDEO_INTERLACE_MODE_PROGRESSIVE,
+ GST_VIDEO_INFO_WIDTH (&self->vinfo),
+ GST_VIDEO_INFO_HEIGHT (&self->vinfo));
+
+ if (GST_VIDEO_INFO_INTERLACE_MODE (&self->vinfo) ==
+ GST_VIDEO_INTERLACE_MODE_ALTERNATE) {
+ /* alternate interlace mode uses a caps feature, remove it when interesecting caps
+ * and setting the src pad caps. */
+ GstCapsFeatures *features;
+
+ caps_no_feat = gst_caps_copy (caps);
+
+ features = gst_caps_get_features (caps_no_feat, 0);
+ gst_caps_features_remove (features, GST_CAPS_FEATURE_FORMAT_INTERLACED);
+ } else {
+ caps_no_feat = gst_caps_ref (caps);
+ }
+
+ fps_n = GST_VIDEO_INFO_FPS_N (&self->vinfo);
+ fps_d = GST_VIDEO_INFO_FPS_D (&self->vinfo);
+
+ /* Update passthrough information */
+ if (self->mode == GST_DEINTERLACE_MODE_DISABLED) {
+ self->passthrough = TRUE;
+ GST_DEBUG_OBJECT (self, "Passthrough because mode=disabled");
+ } else if (self->mode == GST_DEINTERLACE_MODE_INTERLACED) {
+ GstCaps *tmp = gst_static_caps_get (&deinterlace_caps);
+
+ if (!gst_caps_can_intersect (caps_no_feat, tmp)) {
+ gst_caps_unref (tmp);
+ GST_ERROR_OBJECT (self, "Unsupported caps for mode=interlaced");
+ goto invalid_caps;
+ }
+
+ self->passthrough = FALSE;
+ GST_DEBUG_OBJECT (self, "Not passthrough because mode=interlaced");
+ } else if (self->mode == GST_DEINTERLACE_MODE_AUTO
+ || self->mode == GST_DEINTERLACE_MODE_AUTO_STRICT) {
+ GstCaps *tmp = gst_static_caps_get (&deinterlace_caps);
+
+ /* Already progressive? Passthrough */
+ if (!GST_VIDEO_INFO_IS_INTERLACED (&self->vinfo)) {
+ GST_DEBUG_OBJECT (self,
+ "Passthrough because mode=auto and progressive caps");
+ self->passthrough = TRUE;
+ } else if (gst_caps_can_intersect (caps_no_feat, tmp)) {
+ if (peercaps) {
+ GstCaps *allowed_caps;
+ GstCaps *tmp2;
+ GstStructure *s;
+
+ allowed_caps = gst_caps_intersect (peercaps, tmp);
+
+ tmp2 = gst_caps_copy (caps);
+ s = gst_caps_get_structure (tmp2, 0);
+ gst_structure_set (s, "interlace-mode", G_TYPE_STRING, "progressive",
+ NULL);
+ gst_structure_remove_field (s, "framerate");
+
+ /* Downstream does not support progressive caps but supports
+ * the upstream caps, go passthrough.
+ * TODO: We might want to check the framerate compatibility
+ * of the caps too here
+ */
+ if (gst_caps_can_intersect (allowed_caps, caps)
+ && !gst_caps_can_intersect (allowed_caps, tmp2)) {
+ GST_DEBUG_OBJECT (self,
+ "Passthrough because mode=auto, "
+ "downstream does not support progressive caps and interlaced caps");
+ self->passthrough = TRUE;
+ } else {
+ GST_DEBUG_OBJECT (self, "Not passthrough because mode=auto, "
+ "downstream supports progressive caps and interlaced caps");
+ self->passthrough = FALSE;
+ }
+
+ gst_caps_unref (allowed_caps);
+ gst_caps_unref (tmp2);
+ } else {
+ GST_DEBUG_OBJECT (self,
+ "Not passthrough because mode=auto and interlaced caps");
+ self->passthrough = FALSE;
+ }
+ } else {
+ if (self->mode == GST_DEINTERLACE_MODE_AUTO) {
+ GST_WARNING_OBJECT (self,
+ "Passthrough because mode=auto and unsupported interlaced caps");
+ self->passthrough = TRUE;
+ } else {
+ gst_caps_unref (tmp);
+ GST_ERROR_OBJECT (self,
+ "Unsupported interlaced caps in mode=auto-strict");
+ goto invalid_caps;
+ }
+ }
+
+ gst_caps_unref (tmp);
+ } else {
+ g_assert_not_reached ();
+ }
+
+ interlacing_mode = GST_VIDEO_INFO_INTERLACE_MODE (&self->vinfo);
+
+ if (!self->passthrough) {
+ if (self->pattern_lock) {
+ srccaps = gst_caps_copy (caps_no_feat);
+ if (self->pattern != -1
+ && G_UNLIKELY (!gst_util_fraction_multiply (fps_n, fps_d,
+ telecine_patterns[self->pattern].ratio_n,
+ telecine_patterns[self->pattern].ratio_d, &fps_n, &fps_d)))
+ GST_ERROR_OBJECT (self,
+ "Multiplying the framerate by the telecine pattern ratio overflowed!");
+ gst_caps_set_simple (srccaps, "framerate", GST_TYPE_FRACTION, fps_n,
+ fps_d, NULL);
+ } else if (self->locking == GST_DEINTERLACE_LOCKING_ACTIVE
+ || self->low_latency == 0) {
+ /* in high latency pattern locking mode if we don't have a pattern lock,
+ * the sink pad caps are the best we know */
+ srccaps = gst_caps_copy (caps_no_feat);
+ } else if (self->low_latency > 0
+ && interlacing_mode == GST_VIDEO_INTERLACE_MODE_MIXED
+ && self->pattern == -1) {
+ /* for initial buffers of a telecine pattern, until there is a lock we
+ * we output naïvely adjusted timestamps in low-latency pattern locking
+ * mode */
+ srccaps = gst_caps_copy (caps_no_feat);
+ gst_caps_set_simple (srccaps, "framerate", GST_TYPE_FRACTION, 0, 1, NULL);
+ } else if (self->user_set_fields == GST_DEINTERLACE_FIELDS_AUTO) {
+ srccaps = gst_caps_copy (caps_no_feat);
+ if (peercaps) {
+ gboolean can_be_tf = FALSE;
+
+ /* We already know that we are not passthrough: interlace-mode will
+ * be progressive */
+ gst_caps_set_simple (srccaps, "interlace-mode", G_TYPE_STRING,
+ "progressive", NULL);
+
+ if (gst_caps_can_intersect (peercaps, srccaps)) {
+ GST_DEBUG_OBJECT (self, "Can deinterlace top fields");
+ can_be_tf = TRUE;
+ }
+ srccaps = gst_deinterlace_caps_double_framerate (srccaps, FALSE);
+ if (!gst_caps_can_intersect (peercaps, srccaps)) {
+ if (can_be_tf) {
+ GST_DEBUG_OBJECT (self, "Will deinterlace top fields");
+ gst_caps_set_simple (srccaps, "framerate", GST_TYPE_FRACTION, fps_n,
+ fps_d, NULL);
+ self->fields = GST_DEINTERLACE_TF;
+ } else {
+ GST_DEBUG_OBJECT (self,
+ "Can't negotiate upstream and downstream caps");
+ gst_caps_unref (srccaps);
+ goto invalid_caps;
+ }
+ } else {
+ GST_DEBUG_OBJECT (self, "Deinterlacing all fields");
+ self->fields = GST_DEINTERLACE_ALL;
+ }
+ } else {
+ GST_DEBUG_OBJECT (self,
+ "No peer caps yet, falling back to deinterlacing all fields");
+ self->fields = GST_DEINTERLACE_ALL;
+ srccaps = gst_deinterlace_caps_double_framerate (srccaps, FALSE);
+ }
+ } else {
+ self->fields = self->user_set_fields;
+ srccaps = gst_caps_copy (caps_no_feat);
+ if (self->fields == GST_DEINTERLACE_ALL)
+ srccaps = gst_deinterlace_caps_double_framerate (srccaps, FALSE);
+ }
+
+ /* If not passthrough, we are going to output progressive content */
+ gst_caps_set_simple (srccaps, "interlace-mode", G_TYPE_STRING,
+ "progressive", NULL);
+
+ {
+ GstStructure *s = gst_caps_get_structure (srccaps, 0);
+ gst_structure_remove_field (s, "field-order");
+ }
+
+ gst_deinterlace_set_method (self, self->method_id);
+ gst_deinterlace_method_setup (self->method, &self->vinfo);
+ } else {
+ srccaps = gst_caps_ref (caps_no_feat);
+ }
+
+ if (fps_n != 0) {
+ self->field_duration = gst_util_uint64_scale (GST_SECOND, fps_d, 2 * fps_n);
+ } else {
+ self->field_duration = 0;
+ }
+
+ GST_DEBUG_OBJECT (pad, "Sink caps: %" GST_PTR_FORMAT, caps);
+ GST_DEBUG_OBJECT (pad, "Src caps: %" GST_PTR_FORMAT, srccaps);
+
+ if (!gst_pad_set_caps (self->srcpad, srccaps))
+ goto set_caps_failed;
+
+ if (!gst_deinterlace_do_bufferpool (self, srccaps))
+ goto no_bufferpool;
+
+ if (peercaps)
+ gst_caps_unref (peercaps);
+ gst_caps_unref (srccaps);
+ g_clear_pointer (&caps_no_feat, gst_caps_unref);
+
+ return TRUE;
+
+invalid_caps:
+ {
+ if (peercaps)
+ gst_caps_unref (peercaps);
+ g_clear_pointer (&caps_no_feat, gst_caps_unref);
+ GST_ERROR_OBJECT (pad, "Invalid caps: %" GST_PTR_FORMAT, caps);
+ gst_pad_mark_reconfigure (self->srcpad);
+ return FALSE;
+ }
+set_caps_failed:
+ {
+ GST_INFO_OBJECT (pad, "Failed to set caps: %" GST_PTR_FORMAT, srccaps);
+ if (peercaps)
+ gst_caps_unref (peercaps);
+ gst_caps_unref (srccaps);
+ g_clear_pointer (&caps_no_feat, gst_caps_unref);
+ gst_pad_mark_reconfigure (self->srcpad);
+ return FALSE;
+ }
+no_bufferpool:
+ {
+ GST_ERROR_OBJECT (pad, "could not negotiate bufferpool");
+ if (peercaps)
+ gst_caps_unref (peercaps);
+ gst_caps_unref (srccaps);
+ g_clear_pointer (&caps_no_feat, gst_caps_unref);
+ gst_pad_mark_reconfigure (self->srcpad);
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_deinterlace_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ gboolean res = TRUE;
+ GstDeinterlace *self = GST_DEINTERLACE (parent);
+
+ GST_LOG_OBJECT (pad, "received %s event: %" GST_PTR_FORMAT,
+ GST_EVENT_TYPE_NAME (event), event);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps = NULL;
+
+ gst_event_parse_caps (event, &caps);
+ res = gst_deinterlace_setcaps (self, pad, caps, FALSE);
+ gst_event_unref (event);
+ break;
+ }
+ case GST_EVENT_SEGMENT:
+ {
+ const GstSegment *segment;
+
+ gst_event_parse_segment (event, &segment);
+
+ gst_deinterlace_reset_qos (self);
+ gst_deinterlace_reset_history (self, FALSE);
+
+ if (segment->format == GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (pad,
+ "Got SEGMENT event in TIME format, passing on (%"
+ GST_TIME_FORMAT " - %" GST_TIME_FORMAT ")",
+ GST_TIME_ARGS (segment->start), GST_TIME_ARGS (segment->stop));
+ gst_segment_copy_into (segment, &self->segment);
+ } else {
+ GST_WARNING_OBJECT (pad, "Got SEGMENT event in %s format",
+ gst_format_get_name (segment->format));
+ gst_segment_init (&self->segment, GST_FORMAT_UNDEFINED);
+ }
+
+ res = gst_pad_push_event (self->srcpad, event);
+ break;
+ }
+ case GST_EVENT_CUSTOM_DOWNSTREAM:{
+ gboolean still_state;
+
+ if (gst_video_event_parse_still_frame (event, &still_state)) {
+ GST_DEBUG_OBJECT (self, "Received still frame event, state %d",
+ still_state);
+
+ if (still_state) {
+ GstFlowReturn ret;
+
+ GST_DEBUG_OBJECT (self, "Handling still frame");
+ self->still_frame_mode = TRUE;
+ gst_deinterlace_reset_history (self, FALSE);
+ if (self->last_buffer) {
+ ret =
+ gst_pad_push (self->srcpad, gst_buffer_ref (self->last_buffer));
+ GST_DEBUG_OBJECT (self, "Pushed still frame, result: %s",
+ gst_flow_get_name (ret));
+ } else {
+ GST_WARNING_OBJECT (self, "No pending buffer!");
+ }
+ } else {
+ GST_DEBUG_OBJECT (self, "Ending still frames");
+ self->still_frame_mode = FALSE;
+ }
+ }
+
+ res = gst_pad_push_event (self->srcpad, event);
+ break;
+ }
+ case GST_EVENT_EOS:
+ self->have_eos = TRUE;
+ gst_deinterlace_reset_history (self, FALSE);
+ res = gst_pad_push_event (self->srcpad, event);
+ break;
+
+ case GST_EVENT_FLUSH_STOP:
+ if (self->still_frame_mode) {
+ GST_DEBUG_OBJECT (self, "Ending still frames");
+ self->still_frame_mode = FALSE;
+ }
+ self->telecine_tc_warned = FALSE;
+ gst_deinterlace_reset_qos (self);
+ res = gst_pad_push_event (self->srcpad, event);
+ gst_deinterlace_reset_history (self, TRUE);
+ break;
+
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return res;
+}
+
+static gboolean
+gst_deinterlace_propose_allocation (GstDeinterlace * self, GstQuery * query)
+{
+ GstBufferPool *pool;
+ GstCaps *caps;
+ GstVideoInfo info;
+ guint size;
+ GstStructure *config;
+
+ gst_query_parse_allocation (query, &caps, NULL);
+
+ if (caps == NULL)
+ return FALSE;
+
+ if (!gst_video_info_from_caps (&info, caps))
+ return FALSE;
+
+ size = GST_VIDEO_INFO_SIZE (&info);
+
+ pool = gst_video_buffer_pool_new ();
+
+ gst_query_add_allocation_pool (query, pool, size, 0, 0);
+
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_set_params (config, caps, size,
+ (gst_deinterlace_method_get_fields_required (self->method) + 1) / 2 + 1,
+ 0);
+ gst_buffer_pool_set_config (pool, config);
+
+ gst_object_unref (pool);
+ gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
+
+ return TRUE;
+}
+
+static gboolean
+gst_deinterlace_sink_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ GstDeinterlace *self = GST_DEINTERLACE (parent);
+ gboolean res = FALSE;
+
+ GST_LOG_OBJECT (pad, "%s query", GST_QUERY_TYPE_NAME (query));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_deinterlace_getcaps (self, pad, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ res = TRUE;
+ break;
+ }
+ case GST_QUERY_ACCEPT_CAPS:
+ {
+ GstCaps *caps;
+ gboolean ret;
+
+ gst_query_parse_accept_caps (query, &caps);
+ ret = gst_deinterlace_acceptcaps (self, pad, caps);
+ gst_query_set_accept_caps_result (query, ret);
+ res = TRUE;
+ break;
+ }
+ case GST_QUERY_ALLOCATION:
+ if (self->passthrough)
+ res = gst_pad_peer_query (self->srcpad, query);
+ else
+ res = gst_deinterlace_propose_allocation (self, query);
+ break;
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+ return res;
+}
+
+static GstStateChangeReturn
+gst_deinterlace_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstDeinterlace *self = GST_DEINTERLACE (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret != GST_STATE_CHANGE_SUCCESS)
+ return ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_deinterlace_reset (self);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_deinterlace_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstDeinterlace *self = GST_DEINTERLACE (parent);
+ gboolean res;
+
+ GST_DEBUG_OBJECT (pad, "received %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_QOS:{
+ GstClockTimeDiff diff;
+ GstClockTime timestamp;
+ GstQOSType type;
+ gdouble proportion;
+
+ gst_event_parse_qos (event, &type, &proportion, &diff, &timestamp);
+
+ gst_deinterlace_update_qos (self, proportion, diff, timestamp);
+ }
+ /* fall through */
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return res;
+}
+
+static gboolean
+gst_deinterlace_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ GstDeinterlace *self = GST_DEINTERLACE (parent);
+ gboolean res = FALSE;
+
+ GST_LOG_OBJECT (pad, "%s query", GST_QUERY_TYPE_NAME (query));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_LATENCY:
+ if (!self->passthrough) {
+ GstClockTime min, max;
+ gboolean live;
+ GstPad *peer;
+
+ if ((peer = gst_pad_get_peer (self->sinkpad))) {
+ if ((res = gst_pad_query (peer, query))) {
+ GstClockTime latency;
+ gint fields_required = 0;
+ gint method_latency = 0;
+
+ if (self->method) {
+ fields_required =
+ gst_deinterlace_method_get_fields_required (self->method);
+ method_latency =
+ gst_deinterlace_method_get_latency (self->method);
+ }
+
+ gst_query_parse_latency (query, &live, &min, &max);
+
+ GST_DEBUG_OBJECT (self, "Peer latency: min %"
+ GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (min), GST_TIME_ARGS (max));
+
+ /* add our own latency */
+ latency = (fields_required + method_latency) * self->field_duration;
+
+ GST_DEBUG_OBJECT (self, "Our latency: min %" GST_TIME_FORMAT
+ ", max %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (latency), GST_TIME_ARGS (latency));
+
+ min += latency;
+ if (max != GST_CLOCK_TIME_NONE)
+ max += latency;
+
+ GST_DEBUG_OBJECT (self, "Calculated total latency : min %"
+ GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (min), GST_TIME_ARGS (max));
+
+ gst_query_set_latency (query, live, min, max);
+ }
+ gst_object_unref (peer);
+ } else {
+ res = FALSE;
+ }
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return res;
+}
+
+
+static gboolean
+deinterlace_element_init (GstPlugin * plugin)
+{
+ GST_DEBUG_CATEGORY_INIT (deinterlace_debug, "deinterlace", 0, "Deinterlacer");
+
+#if HAVE_ORC
+ orc_init ();
+#endif
+
+ return gst_element_register (plugin, "deinterlace", GST_RANK_NONE,
+ GST_TYPE_DEINTERLACE);
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ return GST_ELEMENT_REGISTER (deinterlace, plugin);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ deinterlace,
+ "Deinterlacer", plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME,
+ GST_PACKAGE_ORIGIN);
diff --git a/gst/deinterlace/gstdeinterlace.h b/gst/deinterlace/gstdeinterlace.h
new file mode 100644
index 0000000000..e3cd4b2088
--- /dev/null
+++ b/gst/deinterlace/gstdeinterlace.h
@@ -0,0 +1,213 @@
+/*
+ * GStreamer
+ * Copyright (C) 2005 Martin Eikermann <meiker@upb.de>
+ * Copyright (C) 2008-2010 Sebastian Dröge <slomo@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_DEINTERLACE_H__
+#define __GST_DEINTERLACE_H__
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include <gst/video/gstvideopool.h>
+#include <gst/video/gstvideometa.h>
+
+#include "gstdeinterlacemethod.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_DEINTERLACE \
+ (gst_deinterlace_get_type())
+#define GST_DEINTERLACE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_DEINTERLACE,GstDeinterlace))
+#define GST_DEINTERLACE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_DEINTERLACE,GstDeinterlace))
+#define GST_IS_DEINTERLACE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_DEINTERLACE))
+#define GST_IS_DEINTERLACE_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_DEINTERLACE))
+
+typedef struct _GstDeinterlace GstDeinterlace;
+typedef struct _GstDeinterlaceClass GstDeinterlaceClass;
+
+typedef enum
+{
+ GST_DEINTERLACE_TOMSMOCOMP,
+ GST_DEINTERLACE_GREEDY_H,
+ GST_DEINTERLACE_GREEDY_L,
+ GST_DEINTERLACE_VFIR,
+ GST_DEINTERLACE_LINEAR,
+ GST_DEINTERLACE_LINEAR_BLEND,
+ GST_DEINTERLACE_SCALER_BOB,
+ GST_DEINTERLACE_WEAVE,
+ GST_DEINTERLACE_WEAVE_TFF,
+ GST_DEINTERLACE_WEAVE_BFF,
+ GST_DEINTERLACE_YADIF
+} GstDeinterlaceMethods;
+
+typedef enum
+{
+ GST_DEINTERLACE_ALL, /* All (missing data is interp.) */
+ GST_DEINTERLACE_TF, /* Top Fields Only */
+ GST_DEINTERLACE_BF, /* Bottom Fields Only */
+ GST_DEINTERLACE_FIELDS_AUTO /* Automatically detect */
+} GstDeinterlaceFields;
+
+typedef enum
+{
+ GST_DEINTERLACE_LAYOUT_AUTO,
+ GST_DEINTERLACE_LAYOUT_TFF,
+ GST_DEINTERLACE_LAYOUT_BFF
+} GstDeinterlaceFieldLayout;
+
+typedef enum {
+ GST_DEINTERLACE_MODE_AUTO,
+ GST_DEINTERLACE_MODE_INTERLACED,
+ GST_DEINTERLACE_MODE_DISABLED,
+ GST_DEINTERLACE_MODE_AUTO_STRICT
+} GstDeinterlaceMode;
+
+typedef enum
+{
+ GST_DEINTERLACE_LOCKING_NONE,
+ GST_DEINTERLACE_LOCKING_AUTO,
+ GST_DEINTERLACE_LOCKING_ACTIVE,
+ GST_DEINTERLACE_LOCKING_PASSIVE,
+} GstDeinterlaceLocking;
+
+#define GST_DEINTERLACE_MAX_FIELD_HISTORY 10
+#define GST_DEINTERLACE_MAX_BUFFER_STATE_HISTORY 50
+/* check max field history is large enough */
+#if GST_DEINTERLACE_MAX_FIELD_HISTORY < GST_DEINTERLACE_MAX_BUFFER_STATE_HISTORY * 3
+#undef GST_DEINTERLACE_MAX_FIELD_HISTORY
+#define GST_DEINTERLACE_MAX_FIELD_HISTORY (GST_DEINTERLACE_MAX_BUFFER_STATE_HISTORY * 3)
+#endif
+
+typedef struct _TelecinePattern TelecinePattern;
+struct _TelecinePattern
+{
+ const gchar *nick;
+ guint8 length;
+ guint8 ratio_n, ratio_d;
+ guint8 states[GST_DEINTERLACE_MAX_BUFFER_STATE_HISTORY];
+};
+
+typedef struct _GstDeinterlaceBufferState GstDeinterlaceBufferState;
+struct _GstDeinterlaceBufferState
+{
+ GstClockTime timestamp;
+ GstClockTime duration;
+ guint8 state;
+};
+
+struct _GstDeinterlace
+{
+ GstElement parent;
+
+ GstPad *srcpad, *sinkpad;
+
+ /* <private> */
+ GstDeinterlaceMode mode;
+
+ GstDeinterlaceFieldLayout field_layout;
+
+ GstDeinterlaceFields fields;
+
+ GstDeinterlaceFields user_set_fields;
+
+ /* current state (differs when flushing/inverse telecine using weave) */
+ GstDeinterlaceMethods method_id;
+ /* property value */
+ GstDeinterlaceMethods user_set_method_id;
+ GstDeinterlaceMethod *method;
+
+ GstVideoInfo vinfo;
+ GstVideoInfo vinfo_out;
+ GstBufferPool *pool;
+ GstAllocator *allocator;
+ GstAllocationParams params;
+
+ gboolean passthrough;
+ gboolean discont;
+
+ GstClockTime field_duration; /* Duration of one field */
+
+ /* The most recent pictures
+ PictureHistory[0] is always the most recent.
+ Pointers are NULL if the picture in question isn't valid, e.g. because
+ the program just started or a picture was skipped.
+ */
+ GstDeinterlaceField field_history[GST_DEINTERLACE_MAX_FIELD_HISTORY];
+ guint history_count;
+ int cur_field_idx;
+
+ /* Set to TRUE if we're in still frame mode,
+ i.e. just forward all buffers
+ */
+ gboolean still_frame_mode;
+
+ /* Last buffer that was pushed in */
+ GstBuffer *last_buffer;
+
+ /* Current segment */
+ GstSegment segment;
+
+ /* QoS stuff */
+ gdouble proportion;
+ GstClockTime earliest_time;
+ gint64 processed;
+ gint64 dropped;
+
+ GstCaps *request_caps;
+
+ gboolean reconfigure;
+ GstDeinterlaceMode new_mode;
+ GstDeinterlaceFields new_fields;
+
+ GstDeinterlaceLocking locking;
+ gint low_latency;
+ gboolean drop_orphans;
+ gboolean ignore_obscure;
+ gboolean pattern_lock;
+ gboolean pattern_refresh;
+ GstDeinterlaceBufferState buf_states[GST_DEINTERLACE_MAX_BUFFER_STATE_HISTORY];
+ gint state_count;
+ gint pattern;
+ guint8 pattern_phase;
+ guint8 pattern_count;
+ guint8 output_count;
+ GstClockTime pattern_base_ts;
+ GstClockTime pattern_buf_dur;
+
+ gboolean need_more;
+ gboolean have_eos;
+ gboolean telecine_tc_warned;
+};
+
+struct _GstDeinterlaceClass
+{
+ GstElementClass parent_class;
+};
+
+GType gst_deinterlace_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (deinterlace);
+
+G_END_DECLS
+
+#endif /* __GST_DEINTERLACE_H__ */
diff --git a/gst/deinterlace/gstdeinterlacemethod.c b/gst/deinterlace/gstdeinterlacemethod.c
new file mode 100644
index 0000000000..24893f8dfa
--- /dev/null
+++ b/gst/deinterlace/gstdeinterlacemethod.c
@@ -0,0 +1,857 @@
+/*
+ * GStreamer
+ * Copyright (C) 2008-2010 Sebastian Dröge <slomo@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+
+#include "gstdeinterlacemethod.h"
+
+G_DEFINE_ABSTRACT_TYPE (GstDeinterlaceMethod, gst_deinterlace_method,
+ GST_TYPE_OBJECT);
+
+gboolean
+gst_deinterlace_method_supported (GType type, GstVideoFormat format, gint width,
+ gint height)
+{
+ GstDeinterlaceMethodClass *klass =
+ GST_DEINTERLACE_METHOD_CLASS (g_type_class_ref (type));
+ gboolean ret;
+
+ if (format == GST_VIDEO_FORMAT_UNKNOWN)
+ ret = TRUE;
+ else
+ ret = klass->supported (klass, format, width, height);
+ g_type_class_unref (klass);
+
+ return ret;
+}
+
+static gboolean
+gst_deinterlace_method_supported_impl (GstDeinterlaceMethodClass * klass,
+ GstVideoFormat format, gint width, gint height)
+{
+ switch (format) {
+ case GST_VIDEO_FORMAT_YUY2:
+ return (klass->deinterlace_frame_yuy2 != NULL);
+ case GST_VIDEO_FORMAT_YVYU:
+ return (klass->deinterlace_frame_yvyu != NULL);
+ case GST_VIDEO_FORMAT_UYVY:
+ return (klass->deinterlace_frame_uyvy != NULL);
+ case GST_VIDEO_FORMAT_I420:
+ return (klass->deinterlace_frame_i420 != NULL);
+ case GST_VIDEO_FORMAT_YV12:
+ return (klass->deinterlace_frame_yv12 != NULL);
+ case GST_VIDEO_FORMAT_Y444:
+ return (klass->deinterlace_frame_y444 != NULL);
+ case GST_VIDEO_FORMAT_Y42B:
+ return (klass->deinterlace_frame_y42b != NULL);
+ case GST_VIDEO_FORMAT_Y41B:
+ return (klass->deinterlace_frame_y41b != NULL);
+ case GST_VIDEO_FORMAT_AYUV:
+ return (klass->deinterlace_frame_ayuv != NULL);
+ case GST_VIDEO_FORMAT_NV12:
+ return (klass->deinterlace_frame_nv12 != NULL);
+ case GST_VIDEO_FORMAT_NV21:
+ return (klass->deinterlace_frame_nv21 != NULL);
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_xRGB:
+ return (klass->deinterlace_frame_argb != NULL);
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_xBGR:
+ return (klass->deinterlace_frame_abgr != NULL);
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_RGBx:
+ return (klass->deinterlace_frame_rgba != NULL);
+ case GST_VIDEO_FORMAT_BGRA:
+ case GST_VIDEO_FORMAT_BGRx:
+ return (klass->deinterlace_frame_bgra != NULL);
+ case GST_VIDEO_FORMAT_RGB:
+ return (klass->deinterlace_frame_rgb != NULL);
+ case GST_VIDEO_FORMAT_BGR:
+ return (klass->deinterlace_frame_bgr != NULL);
+ default:
+ return FALSE;
+ }
+}
+
+void
+gst_deinterlace_method_setup (GstDeinterlaceMethod * self, GstVideoInfo * vinfo)
+{
+ GstDeinterlaceMethodClass *klass = GST_DEINTERLACE_METHOD_GET_CLASS (self);
+
+ klass->setup (self, vinfo);
+}
+
+static void
+gst_deinterlace_method_setup_impl (GstDeinterlaceMethod * self,
+ GstVideoInfo * vinfo)
+{
+ GstDeinterlaceMethodClass *klass = GST_DEINTERLACE_METHOD_GET_CLASS (self);
+
+ self->vinfo = vinfo;
+
+ self->deinterlace_frame = NULL;
+
+ if (GST_VIDEO_INFO_FORMAT (self->vinfo) == GST_VIDEO_FORMAT_UNKNOWN)
+ return;
+
+ switch (GST_VIDEO_INFO_FORMAT (self->vinfo)) {
+ case GST_VIDEO_FORMAT_YUY2:
+ self->deinterlace_frame = klass->deinterlace_frame_yuy2;
+ break;
+ case GST_VIDEO_FORMAT_YVYU:
+ self->deinterlace_frame = klass->deinterlace_frame_yvyu;
+ break;
+ case GST_VIDEO_FORMAT_UYVY:
+ self->deinterlace_frame = klass->deinterlace_frame_uyvy;
+ break;
+ case GST_VIDEO_FORMAT_I420:
+ self->deinterlace_frame = klass->deinterlace_frame_i420;
+ break;
+ case GST_VIDEO_FORMAT_YV12:
+ self->deinterlace_frame = klass->deinterlace_frame_yv12;
+ break;
+ case GST_VIDEO_FORMAT_Y444:
+ self->deinterlace_frame = klass->deinterlace_frame_y444;
+ break;
+ case GST_VIDEO_FORMAT_Y42B:
+ self->deinterlace_frame = klass->deinterlace_frame_y42b;
+ break;
+ case GST_VIDEO_FORMAT_Y41B:
+ self->deinterlace_frame = klass->deinterlace_frame_y41b;
+ break;
+ case GST_VIDEO_FORMAT_AYUV:
+ self->deinterlace_frame = klass->deinterlace_frame_ayuv;
+ break;
+ case GST_VIDEO_FORMAT_NV12:
+ self->deinterlace_frame = klass->deinterlace_frame_nv12;
+ break;
+ case GST_VIDEO_FORMAT_NV21:
+ self->deinterlace_frame = klass->deinterlace_frame_nv21;
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_xRGB:
+ self->deinterlace_frame = klass->deinterlace_frame_argb;
+ break;
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_xBGR:
+ self->deinterlace_frame = klass->deinterlace_frame_abgr;
+ break;
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_RGBx:
+ self->deinterlace_frame = klass->deinterlace_frame_rgba;
+ break;
+ case GST_VIDEO_FORMAT_BGRA:
+ case GST_VIDEO_FORMAT_BGRx:
+ self->deinterlace_frame = klass->deinterlace_frame_bgra;
+ break;
+ case GST_VIDEO_FORMAT_RGB:
+ self->deinterlace_frame = klass->deinterlace_frame_rgb;
+ break;
+ case GST_VIDEO_FORMAT_BGR:
+ self->deinterlace_frame = klass->deinterlace_frame_bgr;
+ break;
+ default:
+ self->deinterlace_frame = NULL;
+ break;
+ }
+}
+
+static void
+gst_deinterlace_method_class_init (GstDeinterlaceMethodClass * klass)
+{
+ klass->setup = gst_deinterlace_method_setup_impl;
+ klass->supported = gst_deinterlace_method_supported_impl;
+}
+
+static void
+gst_deinterlace_method_init (GstDeinterlaceMethod * self)
+{
+ self->vinfo = NULL;
+}
+
+void
+gst_deinterlace_method_deinterlace_frame (GstDeinterlaceMethod * self,
+ const GstDeinterlaceField * history, guint history_count,
+ GstVideoFrame * outframe, int cur_field_idx)
+{
+ g_assert (self->deinterlace_frame != NULL);
+ self->deinterlace_frame (self, history, history_count, outframe,
+ cur_field_idx);
+}
+
+gint
+gst_deinterlace_method_get_fields_required (GstDeinterlaceMethod * self)
+{
+ GstDeinterlaceMethodClass *klass = GST_DEINTERLACE_METHOD_GET_CLASS (self);
+
+ return klass->fields_required;
+}
+
+gint
+gst_deinterlace_method_get_latency (GstDeinterlaceMethod * self)
+{
+ GstDeinterlaceMethodClass *klass = GST_DEINTERLACE_METHOD_GET_CLASS (self);
+
+ return klass->latency;
+}
+
+G_DEFINE_ABSTRACT_TYPE (GstDeinterlaceSimpleMethod,
+ gst_deinterlace_simple_method, GST_TYPE_DEINTERLACE_METHOD);
+
+static gboolean
+gst_deinterlace_simple_method_supported (GstDeinterlaceMethodClass * mklass,
+ GstVideoFormat format, gint width, gint height)
+{
+ GstDeinterlaceSimpleMethodClass *klass =
+ GST_DEINTERLACE_SIMPLE_METHOD_CLASS (mklass);
+
+ if (!GST_DEINTERLACE_METHOD_CLASS
+ (gst_deinterlace_simple_method_parent_class)->supported (mklass, format,
+ width, height))
+ return FALSE;
+
+ switch (format) {
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_xRGB:
+ return (klass->interpolate_scanline_argb != NULL
+ && klass->copy_scanline_argb != NULL);
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_RGBx:
+ return (klass->interpolate_scanline_rgba != NULL
+ && klass->copy_scanline_rgba != NULL);
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_xBGR:
+ return (klass->interpolate_scanline_abgr != NULL
+ && klass->copy_scanline_abgr != NULL);
+ case GST_VIDEO_FORMAT_BGRA:
+ case GST_VIDEO_FORMAT_BGRx:
+ return (klass->interpolate_scanline_bgra != NULL
+ && klass->copy_scanline_bgra != NULL);
+ case GST_VIDEO_FORMAT_RGB:
+ return (klass->interpolate_scanline_rgb != NULL
+ && klass->copy_scanline_rgb != NULL);
+ case GST_VIDEO_FORMAT_BGR:
+ return (klass->interpolate_scanline_bgr != NULL
+ && klass->copy_scanline_bgr != NULL);
+ case GST_VIDEO_FORMAT_YUY2:
+ return (klass->interpolate_scanline_yuy2 != NULL
+ && klass->copy_scanline_yuy2 != NULL);
+ case GST_VIDEO_FORMAT_YVYU:
+ return (klass->interpolate_scanline_yvyu != NULL
+ && klass->copy_scanline_yvyu != NULL);
+ case GST_VIDEO_FORMAT_UYVY:
+ return (klass->interpolate_scanline_uyvy != NULL
+ && klass->copy_scanline_uyvy != NULL);
+ case GST_VIDEO_FORMAT_AYUV:
+ return (klass->interpolate_scanline_ayuv != NULL
+ && klass->copy_scanline_ayuv != NULL);
+ case GST_VIDEO_FORMAT_NV12:
+ return (klass->interpolate_scanline_nv12 != NULL
+ && klass->copy_scanline_nv12 != NULL
+ && klass->interpolate_scanline_planar_y != NULL
+ && klass->copy_scanline_planar_y != NULL);
+ case GST_VIDEO_FORMAT_NV21:
+ return (klass->interpolate_scanline_nv21 != NULL
+ && klass->copy_scanline_nv21 != NULL
+ && klass->interpolate_scanline_planar_y != NULL
+ && klass->copy_scanline_planar_y != NULL);
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
+ case GST_VIDEO_FORMAT_Y444:
+ case GST_VIDEO_FORMAT_Y42B:
+ case GST_VIDEO_FORMAT_Y41B:
+ return (klass->interpolate_scanline_planar_y != NULL
+ && klass->copy_scanline_planar_y != NULL &&
+ klass->interpolate_scanline_planar_u != NULL
+ && klass->copy_scanline_planar_u != NULL &&
+ klass->interpolate_scanline_planar_v != NULL
+ && klass->copy_scanline_planar_v != NULL);
+ default:
+ return FALSE;
+ }
+}
+
+static void
+ gst_deinterlace_simple_method_interpolate_scanline_packed
+ (GstDeinterlaceSimpleMethod * self, guint8 * out,
+ const GstDeinterlaceScanlineData * scanlines, guint stride)
+{
+ memcpy (out, scanlines->m1, stride);
+}
+
+static void
+gst_deinterlace_simple_method_copy_scanline_packed (GstDeinterlaceSimpleMethod *
+ self, guint8 * out, const GstDeinterlaceScanlineData * scanlines,
+ guint stride)
+{
+ memcpy (out, scanlines->m0, stride);
+}
+
+typedef struct
+{
+ const GstDeinterlaceField *history;
+ guint history_count;
+ gint cur_field_idx;
+} LinesGetter;
+
+#define CLAMP_LOW(i) (((i)<0) ? (i+2) : (i))
+#define CLAMP_HI(i) (((i)>=(frame_height)) ? (i-2) : (i))
+
+static guint8 *
+get_line (LinesGetter * lg, gint field_offset, guint plane, gint line,
+ gint line_offset)
+{
+ const GstVideoFrame *frame;
+ gint idx, frame_height;
+ guint8 *data;
+
+ idx = lg->cur_field_idx + field_offset;
+ if (idx < 0 || idx >= lg->history_count)
+ return NULL;
+
+ frame = lg->history[idx].frame;
+ g_assert (frame);
+
+ /* Now frame already refers to the field we want, the correct one is taken
+ * from the history */
+ if (GST_VIDEO_INFO_INTERLACE_MODE (&frame->info) ==
+ GST_VIDEO_INTERLACE_MODE_ALTERNATE) {
+ /* Alternate frame containing a single field, adjust the line index */
+ line /= 2;
+ switch (line_offset) {
+ case -2:
+ case 2:
+ line_offset /= 2;
+ break;
+ case 1:
+ /* the "next" line of a top field line is the same line of a bottom
+ * field */
+ if (!GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_TFF))
+ line_offset = 0;
+ break;
+ case -1:
+ /* the "previous" line of a bottom field line is the same line of a
+ * top field */
+ if (GST_VIDEO_FRAME_FLAG_IS_SET (frame, GST_VIDEO_FRAME_FLAG_TFF))
+ line_offset = 0;
+ break;
+ case 0:
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ }
+
+ frame_height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, plane);
+ line += line_offset;
+
+ data = GST_VIDEO_FRAME_PLANE_DATA ((frame), plane);
+ data += CLAMP_HI (CLAMP_LOW (line)) * GST_VIDEO_FRAME_PLANE_STRIDE ((frame),
+ plane);
+
+ return data;
+}
+
+static void
+gst_deinterlace_simple_method_deinterlace_frame_packed (GstDeinterlaceMethod *
+ method, const GstDeinterlaceField * history, guint history_count,
+ GstVideoFrame * outframe, gint cur_field_idx)
+{
+ GstDeinterlaceSimpleMethod *self = GST_DEINTERLACE_SIMPLE_METHOD (method);
+#ifndef G_DISABLE_ASSERT
+ GstDeinterlaceMethodClass *dm_class = GST_DEINTERLACE_METHOD_GET_CLASS (self);
+#endif
+ GstDeinterlaceScanlineData scanlines;
+ guint cur_field_flags;
+ gint i;
+ gint frame_height, frame_width;
+ LinesGetter lg = { history, history_count, cur_field_idx };
+ GstVideoFrame *framep, *frame0, *frame1, *frame2;
+
+ g_assert (self->interpolate_scanline_packed != NULL);
+ g_assert (self->copy_scanline_packed != NULL);
+
+ frame_height = GST_VIDEO_FRAME_HEIGHT (outframe);
+ frame_width = GST_VIDEO_FRAME_PLANE_STRIDE (outframe, 0);
+
+ frame0 = history[cur_field_idx].frame;
+ frame_width = MIN (frame_width, GST_VIDEO_FRAME_PLANE_STRIDE (frame0, 0));
+ cur_field_flags = history[cur_field_idx].flags;
+
+ framep = (cur_field_idx > 0 ? history[cur_field_idx - 1].frame : NULL);
+ if (framep)
+ frame_width = MIN (frame_width, GST_VIDEO_FRAME_PLANE_STRIDE (framep, 0));
+
+ g_assert (dm_class->fields_required <= 5);
+
+ frame1 =
+ (cur_field_idx + 1 <
+ history_count ? history[cur_field_idx + 1].frame : NULL);
+ if (frame1)
+ frame_width = MIN (frame_width, GST_VIDEO_FRAME_PLANE_STRIDE (frame1, 0));
+
+ frame2 =
+ (cur_field_idx + 2 <
+ history_count ? history[cur_field_idx + 2].frame : NULL);
+ if (frame2)
+ frame_width = MIN (frame_width, GST_VIDEO_FRAME_PLANE_STRIDE (frame2, 0));
+
+#define LINE(x,i) (((guint8*)GST_VIDEO_FRAME_PLANE_DATA((x),0)) + i * \
+ GST_VIDEO_FRAME_PLANE_STRIDE((x),0))
+
+ for (i = 0; i < frame_height; i++) {
+ memset (&scanlines, 0, sizeof (scanlines));
+ scanlines.bottom_field = (cur_field_flags == PICTURE_INTERLACED_BOTTOM);
+
+ if (!((i & 1) ^ scanlines.bottom_field)) {
+ /* copying */
+ scanlines.tp = get_line (&lg, -1, 0, i, -1);
+ scanlines.bp = get_line (&lg, -1, 0, i, 1);
+
+ scanlines.tt0 = get_line (&lg, 0, 0, i, -2);
+ scanlines.m0 = get_line (&lg, 0, 0, i, 0);
+ scanlines.bb0 = get_line (&lg, 0, 0, i, 2);
+
+ scanlines.t1 = get_line (&lg, 1, 0, i, -1);
+ scanlines.b1 = get_line (&lg, 1, 0, i, 1);
+
+ scanlines.tt2 = get_line (&lg, 2, 0, i, -2);
+ scanlines.m2 = get_line (&lg, 2, 0, i, 0);
+ scanlines.bb2 = get_line (&lg, 2, 0, i, 2);
+
+ self->copy_scanline_packed (self, LINE (outframe, i), &scanlines,
+ frame_width);
+ } else {
+ /* interpolating */
+ scanlines.tp2 = get_line (&lg, -2, 0, i, -1);
+ scanlines.bp2 = get_line (&lg, -2, 0, i, 1);
+
+ scanlines.ttp = get_line (&lg, -1, 0, i, -2);
+ scanlines.mp = get_line (&lg, -1, 0, i, 0);
+ scanlines.bbp = get_line (&lg, -1, 0, i, 2);
+
+ scanlines.t0 = get_line (&lg, 0, 0, i, -1);
+ scanlines.b0 = get_line (&lg, 0, 0, i, 1);
+
+ scanlines.tt1 = get_line (&lg, 1, 0, i, -2);
+ scanlines.m1 = get_line (&lg, 1, 0, i, 0);
+ scanlines.bb1 = get_line (&lg, 1, 0, i, 2);
+
+ scanlines.t2 = get_line (&lg, 2, 0, i, -1);
+ scanlines.b2 = get_line (&lg, 2, 0, i, 1);
+
+ self->interpolate_scanline_packed (self, LINE (outframe, i), &scanlines,
+ frame_width);
+ }
+#undef LINE
+ }
+}
+
+static void
+ gst_deinterlace_simple_method_interpolate_scanline_planar_y
+ (GstDeinterlaceSimpleMethod * self, guint8 * out,
+ const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->m1, size);
+}
+
+static void
+gst_deinterlace_simple_method_copy_scanline_planar_y (GstDeinterlaceSimpleMethod
+ * self, guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint
+ size)
+{
+ memcpy (out, scanlines->m0, size);
+}
+
+static void
+ gst_deinterlace_simple_method_interpolate_scanline_planar_u
+ (GstDeinterlaceSimpleMethod * self, guint8 * out,
+ const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->m1, size);
+}
+
+static void
+gst_deinterlace_simple_method_copy_scanline_planar_u (GstDeinterlaceSimpleMethod
+ * self, guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint
+ size)
+{
+ memcpy (out, scanlines->m0, size);
+}
+
+static void
+ gst_deinterlace_simple_method_interpolate_scanline_planar_v
+ (GstDeinterlaceSimpleMethod * self, guint8 * out,
+ const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->m1, size);
+}
+
+static void
+gst_deinterlace_simple_method_copy_scanline_planar_v (GstDeinterlaceSimpleMethod
+ * self, guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint
+ size)
+{
+ memcpy (out, scanlines->m0, size);
+}
+
+static void
+ gst_deinterlace_simple_method_deinterlace_frame_planar_plane
+ (GstDeinterlaceSimpleMethod * self, GstVideoFrame * dest,
+ LinesGetter * lg,
+ guint cur_field_flags, gint plane,
+ GstDeinterlaceSimpleMethodFunction copy_scanline,
+ GstDeinterlaceSimpleMethodFunction interpolate_scanline)
+{
+ GstDeinterlaceScanlineData scanlines;
+ gint i;
+ gint frame_height, frame_width;
+
+ frame_height = GST_VIDEO_FRAME_COMP_HEIGHT (dest, plane);
+ frame_width = GST_VIDEO_FRAME_COMP_WIDTH (dest, plane) *
+ GST_VIDEO_FRAME_COMP_PSTRIDE (dest, plane);
+
+ g_assert (interpolate_scanline != NULL);
+ g_assert (copy_scanline != NULL);
+
+#define LINE(x,i) (((guint8*)GST_VIDEO_FRAME_PLANE_DATA((x),plane)) + i * \
+ GST_VIDEO_FRAME_PLANE_STRIDE((x),plane))
+
+ for (i = 0; i < frame_height; i++) {
+ memset (&scanlines, 0, sizeof (scanlines));
+ scanlines.bottom_field = (cur_field_flags == PICTURE_INTERLACED_BOTTOM);
+
+ if (!((i & 1) ^ scanlines.bottom_field)) {
+ /* copying */
+ scanlines.tp = get_line (lg, -1, plane, i, -1);
+ scanlines.bp = get_line (lg, -1, plane, i, 1);
+
+ scanlines.tt0 = get_line (lg, 0, plane, i, -2);
+ scanlines.m0 = get_line (lg, 0, plane, i, 0);
+ scanlines.bb0 = get_line (lg, 0, plane, i, 2);
+
+ scanlines.t1 = get_line (lg, 1, plane, i, -1);
+ scanlines.b1 = get_line (lg, 1, plane, i, 1);
+
+ scanlines.tt2 = get_line (lg, 2, plane, i, -2);
+ scanlines.m2 = get_line (lg, 2, plane, i, 0);
+ scanlines.bb2 = get_line (lg, 2, plane, i, 2);
+
+ copy_scanline (self, LINE (dest, i), &scanlines, frame_width);
+ } else {
+ /* interpolating */
+ scanlines.tp2 = get_line (lg, -2, plane, i, -1);
+ scanlines.bp2 = get_line (lg, -2, plane, i, 1);
+
+ scanlines.ttp = get_line (lg, -1, plane, i, -2);
+ scanlines.mp = get_line (lg, -1, plane, i, 0);
+ scanlines.bbp = get_line (lg, -1, plane, i, 2);
+
+ scanlines.t0 = get_line (lg, 0, plane, i, -1);
+ scanlines.b0 = get_line (lg, 0, plane, i, 1);
+
+ scanlines.tt1 = get_line (lg, 1, plane, i, -2);
+ scanlines.m1 = get_line (lg, 1, plane, i, 0);
+ scanlines.bb1 = get_line (lg, 1, plane, i, 2);
+
+ scanlines.t2 = get_line (lg, 2, plane, i, -1);
+ scanlines.b2 = get_line (lg, 2, plane, i, 1);
+
+ interpolate_scanline (self, LINE (dest, i), &scanlines, frame_width);
+ }
+#undef LINE
+ }
+}
+
+static void
+gst_deinterlace_simple_method_deinterlace_frame_planar (GstDeinterlaceMethod *
+ method, const GstDeinterlaceField * history, guint history_count,
+ GstVideoFrame * outframe, gint cur_field_idx)
+{
+ GstDeinterlaceSimpleMethod *self = GST_DEINTERLACE_SIMPLE_METHOD (method);
+#ifndef G_DISABLE_ASSERT
+ GstDeinterlaceMethodClass *dm_class = GST_DEINTERLACE_METHOD_GET_CLASS (self);
+#endif
+ guint cur_field_flags = history[cur_field_idx].flags;
+ gint i;
+ GstDeinterlaceSimpleMethodFunction copy_scanline;
+ GstDeinterlaceSimpleMethodFunction interpolate_scanline;
+ LinesGetter lg = { history, history_count, cur_field_idx };
+
+ g_assert (self->interpolate_scanline_planar[0] != NULL);
+ g_assert (self->interpolate_scanline_planar[1] != NULL);
+ g_assert (self->interpolate_scanline_planar[2] != NULL);
+ g_assert (self->copy_scanline_planar[0] != NULL);
+ g_assert (self->copy_scanline_planar[1] != NULL);
+ g_assert (self->copy_scanline_planar[2] != NULL);
+ g_assert (dm_class->fields_required <= 5);
+
+ for (i = 0; i < 3; i++) {
+ copy_scanline = self->copy_scanline_planar[i];
+ interpolate_scanline = self->interpolate_scanline_planar[i];
+
+ gst_deinterlace_simple_method_deinterlace_frame_planar_plane (self,
+ outframe, &lg, cur_field_flags, i, copy_scanline, interpolate_scanline);
+ }
+}
+
+static void
+gst_deinterlace_simple_method_deinterlace_frame_nv12 (GstDeinterlaceMethod *
+ method, const GstDeinterlaceField * history, guint history_count,
+ GstVideoFrame * outframe, gint cur_field_idx)
+{
+ GstDeinterlaceSimpleMethod *self = GST_DEINTERLACE_SIMPLE_METHOD (method);
+#ifndef G_DISABLE_ASSERT
+ GstDeinterlaceMethodClass *dm_class = GST_DEINTERLACE_METHOD_GET_CLASS (self);
+#endif
+ guint cur_field_flags = history[cur_field_idx].flags;
+ LinesGetter lg = { history, history_count, cur_field_idx, };
+
+ /* Y plane is at position 0 */
+ g_assert (self->interpolate_scanline_packed != NULL);
+ g_assert (self->copy_scanline_packed != NULL);
+ g_assert (self->interpolate_scanline_planar[0] != NULL);
+ g_assert (self->copy_scanline_planar[0] != NULL);
+ g_assert (dm_class->fields_required <= 5);
+
+ /* Y plane first, then UV/VU plane */
+ gst_deinterlace_simple_method_deinterlace_frame_planar_plane (self,
+ outframe, &lg, cur_field_flags, 0,
+ self->copy_scanline_planar[0], self->interpolate_scanline_planar[0]);
+ gst_deinterlace_simple_method_deinterlace_frame_planar_plane (self,
+ outframe, &lg, cur_field_flags, 1,
+ self->copy_scanline_packed, self->interpolate_scanline_packed);
+}
+
+static void
+gst_deinterlace_simple_method_setup (GstDeinterlaceMethod * method,
+ GstVideoInfo * vinfo)
+{
+ GstDeinterlaceSimpleMethod *self = GST_DEINTERLACE_SIMPLE_METHOD (method);
+ GstDeinterlaceSimpleMethodClass *klass =
+ GST_DEINTERLACE_SIMPLE_METHOD_GET_CLASS (self);
+
+ GST_DEINTERLACE_METHOD_CLASS
+ (gst_deinterlace_simple_method_parent_class)->setup (method, vinfo);
+
+ self->interpolate_scanline_packed = NULL;
+ self->copy_scanline_packed = NULL;
+
+ self->interpolate_scanline_planar[0] = NULL;
+ self->interpolate_scanline_planar[1] = NULL;
+ self->interpolate_scanline_planar[2] = NULL;
+ self->copy_scanline_planar[0] = NULL;
+ self->copy_scanline_planar[1] = NULL;
+ self->copy_scanline_planar[2] = NULL;
+
+ if (GST_VIDEO_INFO_FORMAT (vinfo) == GST_VIDEO_FORMAT_UNKNOWN)
+ return;
+
+ switch (GST_VIDEO_INFO_FORMAT (vinfo)) {
+ case GST_VIDEO_FORMAT_YUY2:
+ self->interpolate_scanline_packed = klass->interpolate_scanline_yuy2;
+ self->copy_scanline_packed = klass->copy_scanline_yuy2;
+ break;
+ case GST_VIDEO_FORMAT_YVYU:
+ self->interpolate_scanline_packed = klass->interpolate_scanline_yvyu;
+ self->copy_scanline_packed = klass->copy_scanline_yvyu;
+ break;
+ case GST_VIDEO_FORMAT_UYVY:
+ self->interpolate_scanline_packed = klass->interpolate_scanline_uyvy;
+ self->copy_scanline_packed = klass->copy_scanline_uyvy;
+ break;
+ case GST_VIDEO_FORMAT_AYUV:
+ self->interpolate_scanline_packed = klass->interpolate_scanline_ayuv;
+ self->copy_scanline_packed = klass->copy_scanline_ayuv;
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_xRGB:
+ self->interpolate_scanline_packed = klass->interpolate_scanline_argb;
+ self->copy_scanline_packed = klass->copy_scanline_argb;
+ break;
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_xBGR:
+ self->interpolate_scanline_packed = klass->interpolate_scanline_abgr;
+ self->copy_scanline_packed = klass->copy_scanline_abgr;
+ break;
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_RGBx:
+ self->interpolate_scanline_packed = klass->interpolate_scanline_rgba;
+ self->copy_scanline_packed = klass->copy_scanline_rgba;
+ break;
+ case GST_VIDEO_FORMAT_BGRA:
+ case GST_VIDEO_FORMAT_BGRx:
+ self->interpolate_scanline_packed = klass->interpolate_scanline_bgra;
+ self->copy_scanline_packed = klass->copy_scanline_bgra;
+ break;
+ case GST_VIDEO_FORMAT_RGB:
+ self->interpolate_scanline_packed = klass->interpolate_scanline_rgb;
+ self->copy_scanline_packed = klass->copy_scanline_rgb;
+ break;
+ case GST_VIDEO_FORMAT_BGR:
+ self->interpolate_scanline_packed = klass->interpolate_scanline_bgr;
+ self->copy_scanline_packed = klass->copy_scanline_bgr;
+ break;
+ case GST_VIDEO_FORMAT_NV12:
+ self->interpolate_scanline_packed = klass->interpolate_scanline_nv12;
+ self->copy_scanline_packed = klass->copy_scanline_nv12;
+ self->interpolate_scanline_planar[0] =
+ klass->interpolate_scanline_planar_y;
+ self->copy_scanline_planar[0] = klass->copy_scanline_planar_y;
+ break;
+ case GST_VIDEO_FORMAT_NV21:
+ self->interpolate_scanline_packed = klass->interpolate_scanline_nv21;
+ self->copy_scanline_packed = klass->copy_scanline_nv21;
+ self->interpolate_scanline_planar[0] =
+ klass->interpolate_scanline_planar_y;
+ self->copy_scanline_planar[0] = klass->copy_scanline_planar_y;
+ break;
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
+ case GST_VIDEO_FORMAT_Y444:
+ case GST_VIDEO_FORMAT_Y42B:
+ case GST_VIDEO_FORMAT_Y41B:
+ self->interpolate_scanline_planar[0] =
+ klass->interpolate_scanline_planar_y;
+ self->copy_scanline_planar[0] = klass->copy_scanline_planar_y;
+ self->interpolate_scanline_planar[1] =
+ klass->interpolate_scanline_planar_u;
+ self->copy_scanline_planar[1] = klass->copy_scanline_planar_u;
+ self->interpolate_scanline_planar[2] =
+ klass->interpolate_scanline_planar_v;
+ self->copy_scanline_planar[2] = klass->copy_scanline_planar_v;
+ break;
+ default:
+ break;
+ }
+}
+
+static void
+gst_deinterlace_simple_method_class_init (GstDeinterlaceSimpleMethodClass
+ * klass)
+{
+ GstDeinterlaceMethodClass *dm_class = (GstDeinterlaceMethodClass *) klass;
+
+ dm_class->deinterlace_frame_ayuv =
+ gst_deinterlace_simple_method_deinterlace_frame_packed;
+ dm_class->deinterlace_frame_yuy2 =
+ gst_deinterlace_simple_method_deinterlace_frame_packed;
+ dm_class->deinterlace_frame_yvyu =
+ gst_deinterlace_simple_method_deinterlace_frame_packed;
+ dm_class->deinterlace_frame_uyvy =
+ gst_deinterlace_simple_method_deinterlace_frame_packed;
+ dm_class->deinterlace_frame_argb =
+ gst_deinterlace_simple_method_deinterlace_frame_packed;
+ dm_class->deinterlace_frame_abgr =
+ gst_deinterlace_simple_method_deinterlace_frame_packed;
+ dm_class->deinterlace_frame_rgba =
+ gst_deinterlace_simple_method_deinterlace_frame_packed;
+ dm_class->deinterlace_frame_bgra =
+ gst_deinterlace_simple_method_deinterlace_frame_packed;
+ dm_class->deinterlace_frame_rgb =
+ gst_deinterlace_simple_method_deinterlace_frame_packed;
+ dm_class->deinterlace_frame_bgr =
+ gst_deinterlace_simple_method_deinterlace_frame_packed;
+ dm_class->deinterlace_frame_i420 =
+ gst_deinterlace_simple_method_deinterlace_frame_planar;
+ dm_class->deinterlace_frame_yv12 =
+ gst_deinterlace_simple_method_deinterlace_frame_planar;
+ dm_class->deinterlace_frame_y444 =
+ gst_deinterlace_simple_method_deinterlace_frame_planar;
+ dm_class->deinterlace_frame_y42b =
+ gst_deinterlace_simple_method_deinterlace_frame_planar;
+ dm_class->deinterlace_frame_y41b =
+ gst_deinterlace_simple_method_deinterlace_frame_planar;
+ dm_class->deinterlace_frame_nv12 =
+ gst_deinterlace_simple_method_deinterlace_frame_nv12;
+ dm_class->deinterlace_frame_nv21 =
+ gst_deinterlace_simple_method_deinterlace_frame_nv12;
+ dm_class->fields_required = 2;
+ dm_class->setup = gst_deinterlace_simple_method_setup;
+ dm_class->supported = gst_deinterlace_simple_method_supported;
+
+ klass->interpolate_scanline_yuy2 =
+ gst_deinterlace_simple_method_interpolate_scanline_packed;
+ klass->copy_scanline_yuy2 =
+ gst_deinterlace_simple_method_copy_scanline_packed;
+ klass->interpolate_scanline_yvyu =
+ gst_deinterlace_simple_method_interpolate_scanline_packed;
+ klass->copy_scanline_yvyu =
+ gst_deinterlace_simple_method_copy_scanline_packed;
+ klass->interpolate_scanline_ayuv =
+ gst_deinterlace_simple_method_interpolate_scanline_packed;
+ klass->copy_scanline_ayuv =
+ gst_deinterlace_simple_method_copy_scanline_packed;
+ klass->interpolate_scanline_uyvy =
+ gst_deinterlace_simple_method_interpolate_scanline_packed;
+ klass->copy_scanline_uyvy =
+ gst_deinterlace_simple_method_copy_scanline_packed;
+ klass->interpolate_scanline_nv12 =
+ gst_deinterlace_simple_method_interpolate_scanline_packed;
+ klass->copy_scanline_nv12 =
+ gst_deinterlace_simple_method_copy_scanline_packed;
+
+ klass->interpolate_scanline_argb =
+ gst_deinterlace_simple_method_interpolate_scanline_packed;
+ klass->copy_scanline_argb =
+ gst_deinterlace_simple_method_copy_scanline_packed;
+ klass->interpolate_scanline_abgr =
+ gst_deinterlace_simple_method_interpolate_scanline_packed;
+ klass->copy_scanline_abgr =
+ gst_deinterlace_simple_method_copy_scanline_packed;
+
+ klass->interpolate_scanline_rgba =
+ gst_deinterlace_simple_method_interpolate_scanline_packed;
+ klass->copy_scanline_rgba =
+ gst_deinterlace_simple_method_copy_scanline_packed;
+ klass->interpolate_scanline_bgra =
+ gst_deinterlace_simple_method_interpolate_scanline_packed;
+ klass->copy_scanline_bgra =
+ gst_deinterlace_simple_method_copy_scanline_packed;
+ klass->interpolate_scanline_rgb =
+ gst_deinterlace_simple_method_interpolate_scanline_packed;
+ klass->copy_scanline_rgb = gst_deinterlace_simple_method_copy_scanline_packed;
+ klass->interpolate_scanline_bgr =
+ gst_deinterlace_simple_method_interpolate_scanline_packed;
+ klass->copy_scanline_bgr = gst_deinterlace_simple_method_copy_scanline_packed;
+
+ klass->interpolate_scanline_planar_y =
+ gst_deinterlace_simple_method_interpolate_scanline_planar_y;
+ klass->copy_scanline_planar_y =
+ gst_deinterlace_simple_method_copy_scanline_planar_y;
+ klass->interpolate_scanline_planar_u =
+ gst_deinterlace_simple_method_interpolate_scanline_planar_u;
+ klass->copy_scanline_planar_u =
+ gst_deinterlace_simple_method_copy_scanline_planar_u;
+ klass->interpolate_scanline_planar_v =
+ gst_deinterlace_simple_method_interpolate_scanline_planar_v;
+ klass->copy_scanline_planar_v =
+ gst_deinterlace_simple_method_copy_scanline_planar_v;
+}
+
+static void
+gst_deinterlace_simple_method_init (GstDeinterlaceSimpleMethod * self)
+{
+}
diff --git a/gst/deinterlace/gstdeinterlacemethod.h b/gst/deinterlace/gstdeinterlacemethod.h
new file mode 100644
index 0000000000..6c421c1e7b
--- /dev/null
+++ b/gst/deinterlace/gstdeinterlacemethod.h
@@ -0,0 +1,222 @@
+/*
+ * GStreamer
+ * Copyright (C) 2008-2010 Sebastian Dröge <slomo@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_DEINTERLACE_METHOD_H__
+#define __GST_DEINTERLACE_METHOD_H__
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+#if defined(HAVE_GCC_ASM) && defined(HAVE_ORC)
+#if defined(HAVE_CPU_I386) || defined(HAVE_CPU_X86_64)
+#define BUILD_X86_ASM
+#endif
+#endif
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_DEINTERLACE_METHOD (gst_deinterlace_method_get_type ())
+#define GST_IS_DEINTERLACE_METHOD(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_DEINTERLACE_METHOD))
+#define GST_IS_DEINTERLACE_METHOD_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_DEINTERLACE_METHOD))
+#define GST_DEINTERLACE_METHOD_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_DEINTERLACE_METHOD, GstDeinterlaceMethodClass))
+#define GST_DEINTERLACE_METHOD(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_DEINTERLACE_METHOD, GstDeinterlaceMethod))
+#define GST_DEINTERLACE_METHOD_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_DEINTERLACE_METHOD, GstDeinterlaceMethodClass))
+#define GST_DEINTERLACE_METHOD_CAST(obj) ((GstDeinterlaceMethod*)(obj))
+
+typedef struct _GstDeinterlaceMethod GstDeinterlaceMethod;
+typedef struct _GstDeinterlaceMethodClass GstDeinterlaceMethodClass;
+
+
+#define PICTURE_PROGRESSIVE 0
+#define PICTURE_INTERLACED_BOTTOM 1
+#define PICTURE_INTERLACED_TOP 2
+#define PICTURE_INTERLACED_MASK (PICTURE_INTERLACED_BOTTOM | PICTURE_INTERLACED_TOP)
+
+typedef struct
+{
+ GstVideoFrame *frame;
+ /* see PICTURE_ flags in *.c */
+ guint flags;
+ GstVideoTimeCode *tc;
+ GstVideoCaptionMeta *caption;
+} GstDeinterlaceField;
+
+/*
+ * This structure defines the deinterlacer plugin.
+ */
+
+typedef void (*GstDeinterlaceMethodDeinterlaceFunction) (
+ GstDeinterlaceMethod *self, const GstDeinterlaceField *history,
+ guint history_count, GstVideoFrame *outframe, int cur_field_idx);
+
+struct _GstDeinterlaceMethod {
+ GstObject parent;
+
+ GstVideoInfo *vinfo;
+
+ GstDeinterlaceMethodDeinterlaceFunction deinterlace_frame;
+};
+
+struct _GstDeinterlaceMethodClass {
+ GstObjectClass parent_class;
+ guint fields_required;
+ guint latency;
+
+ gboolean (*supported) (GstDeinterlaceMethodClass *klass, GstVideoFormat format, gint width, gint height);
+
+ void (*setup) (GstDeinterlaceMethod *self, GstVideoInfo * vinfo);
+
+ GstDeinterlaceMethodDeinterlaceFunction deinterlace_frame_yuy2;
+ GstDeinterlaceMethodDeinterlaceFunction deinterlace_frame_yvyu;
+ GstDeinterlaceMethodDeinterlaceFunction deinterlace_frame_uyvy;
+ GstDeinterlaceMethodDeinterlaceFunction deinterlace_frame_i420;
+ GstDeinterlaceMethodDeinterlaceFunction deinterlace_frame_yv12;
+ GstDeinterlaceMethodDeinterlaceFunction deinterlace_frame_y444;
+ GstDeinterlaceMethodDeinterlaceFunction deinterlace_frame_y42b;
+ GstDeinterlaceMethodDeinterlaceFunction deinterlace_frame_y41b;
+ GstDeinterlaceMethodDeinterlaceFunction deinterlace_frame_ayuv;
+ GstDeinterlaceMethodDeinterlaceFunction deinterlace_frame_nv12;
+ GstDeinterlaceMethodDeinterlaceFunction deinterlace_frame_nv21;
+ GstDeinterlaceMethodDeinterlaceFunction deinterlace_frame_argb;
+ GstDeinterlaceMethodDeinterlaceFunction deinterlace_frame_abgr;
+ GstDeinterlaceMethodDeinterlaceFunction deinterlace_frame_rgba;
+ GstDeinterlaceMethodDeinterlaceFunction deinterlace_frame_bgra;
+ GstDeinterlaceMethodDeinterlaceFunction deinterlace_frame_rgb;
+ GstDeinterlaceMethodDeinterlaceFunction deinterlace_frame_bgr;
+
+ const gchar *name;
+ const gchar *nick;
+};
+
+GType gst_deinterlace_method_get_type (void);
+
+gboolean gst_deinterlace_method_supported (GType type, GstVideoFormat format, gint width, gint height);
+void gst_deinterlace_method_setup (GstDeinterlaceMethod * self, GstVideoInfo * vinfo);
+void gst_deinterlace_method_deinterlace_frame (GstDeinterlaceMethod * self, const GstDeinterlaceField * history, guint history_count, GstVideoFrame * outframe,
+ int cur_field_idx);
+gint gst_deinterlace_method_get_fields_required (GstDeinterlaceMethod * self);
+gint gst_deinterlace_method_get_latency (GstDeinterlaceMethod * self);
+
+#define GST_TYPE_DEINTERLACE_SIMPLE_METHOD (gst_deinterlace_simple_method_get_type ())
+#define GST_IS_DEINTERLACE_SIMPLE_METHOD(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_DEINTERLACE_SIMPLE_METHOD))
+#define GST_IS_DEINTERLACE_SIMPLE_METHOD_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_DEINTERLACE_SIMPLE_METHOD))
+#define GST_DEINTERLACE_SIMPLE_METHOD_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_DEINTERLACE_SIMPLE_METHOD, GstDeinterlaceSimpleMethodClass))
+#define GST_DEINTERLACE_SIMPLE_METHOD(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_DEINTERLACE_SIMPLE_METHOD, GstDeinterlaceSimpleMethod))
+#define GST_DEINTERLACE_SIMPLE_METHOD_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_DEINTERLACE_SIMPLE_METHOD, GstDeinterlaceSimpleMethodClass))
+#define GST_DEINTERLACE_SIMPLE_METHOD_CAST(obj) ((GstDeinterlaceSimpleMethod*)(obj))
+
+typedef struct _GstDeinterlaceSimpleMethod GstDeinterlaceSimpleMethod;
+typedef struct _GstDeinterlaceSimpleMethodClass GstDeinterlaceSimpleMethodClass;
+typedef struct _GstDeinterlaceScanlineData GstDeinterlaceScanlineData;
+
+/*
+ * This structure defines the simple deinterlacer plugin.
+ */
+
+struct _GstDeinterlaceScanlineData {
+ const guint8 *ttp, *tp, *mp, *bp, *bbp;
+ const guint8 *tt0, *t0, *m0, *b0, *bb0;
+ const guint8 *tt1, *t1, *m1, *b1, *bb1;
+ const guint8 *tt2, *t2, *m2, *b2, *bb2;
+ const guint8 *tp2, *bp2;
+ gboolean bottom_field;
+};
+
+/*
+ * For interpolate_scanline the input is:
+ *
+ * | t-3 t-2 t-1 t t+1 t+2
+ * | Field 3 | Field 2 | Field 1 | Field 0 | Field -1 | Field -2
+ * | TT3 | | TT1 | | TTp |
+ * | | T2 | | T0 | | Tp2
+ * | M3 | | M1 | | Mp |
+ * | | B2 | | B0 | | Bp2
+ * | BB3 | | BB1 | | BBp |
+ *
+ * For copy_scanline the input is:
+ *
+ * | t-3 t-2 t-1 t t+1
+ * | Field 3 | Field 2 | Field 1 | Field 0 | Field -1
+ * | | TT2 | | TT0 |
+ * | T3 | | T1 | | Tp
+ * | | M2 | | M0 |
+ * | B3 | | B1 | | Bp
+ * | | BB2 | | BB0 |
+ *
+ * All other values are NULL.
+ */
+
+typedef void (*GstDeinterlaceSimpleMethodFunction) (GstDeinterlaceSimpleMethod *self, guint8 *out, const GstDeinterlaceScanlineData *scanlines, guint size);
+
+struct _GstDeinterlaceSimpleMethod {
+ GstDeinterlaceMethod parent;
+
+ GstDeinterlaceSimpleMethodFunction interpolate_scanline_packed;
+ GstDeinterlaceSimpleMethodFunction copy_scanline_packed;
+
+ GstDeinterlaceSimpleMethodFunction interpolate_scanline_planar[3];
+ GstDeinterlaceSimpleMethodFunction copy_scanline_planar[3];
+};
+
+struct _GstDeinterlaceSimpleMethodClass {
+ GstDeinterlaceMethodClass parent_class;
+
+ /* Packed formats */
+ GstDeinterlaceSimpleMethodFunction interpolate_scanline_yuy2;
+ GstDeinterlaceSimpleMethodFunction copy_scanline_yuy2;
+ GstDeinterlaceSimpleMethodFunction interpolate_scanline_yvyu;
+ GstDeinterlaceSimpleMethodFunction copy_scanline_yvyu;
+ GstDeinterlaceSimpleMethodFunction interpolate_scanline_uyvy;
+ GstDeinterlaceSimpleMethodFunction copy_scanline_uyvy;
+ GstDeinterlaceSimpleMethodFunction interpolate_scanline_ayuv;
+ GstDeinterlaceSimpleMethodFunction copy_scanline_ayuv;
+ GstDeinterlaceSimpleMethodFunction interpolate_scanline_argb;
+ GstDeinterlaceSimpleMethodFunction copy_scanline_argb;
+ GstDeinterlaceSimpleMethodFunction interpolate_scanline_abgr;
+ GstDeinterlaceSimpleMethodFunction copy_scanline_abgr;
+ GstDeinterlaceSimpleMethodFunction interpolate_scanline_rgba;
+ GstDeinterlaceSimpleMethodFunction copy_scanline_rgba;
+ GstDeinterlaceSimpleMethodFunction interpolate_scanline_bgra;
+ GstDeinterlaceSimpleMethodFunction copy_scanline_bgra;
+ GstDeinterlaceSimpleMethodFunction interpolate_scanline_rgb;
+ GstDeinterlaceSimpleMethodFunction copy_scanline_rgb;
+ GstDeinterlaceSimpleMethodFunction interpolate_scanline_bgr;
+ GstDeinterlaceSimpleMethodFunction copy_scanline_bgr;
+
+ /* Semi-planar formats */
+ GstDeinterlaceSimpleMethodFunction interpolate_scanline_nv12;
+ GstDeinterlaceSimpleMethodFunction copy_scanline_nv12;
+ GstDeinterlaceSimpleMethodFunction interpolate_scanline_nv21;
+ GstDeinterlaceSimpleMethodFunction copy_scanline_nv21;
+
+ /* Planar formats */
+ GstDeinterlaceSimpleMethodFunction copy_scanline_planar_y;
+ GstDeinterlaceSimpleMethodFunction interpolate_scanline_planar_y;
+ GstDeinterlaceSimpleMethodFunction copy_scanline_planar_u;
+ GstDeinterlaceSimpleMethodFunction interpolate_scanline_planar_u;
+ GstDeinterlaceSimpleMethodFunction copy_scanline_planar_v;
+ GstDeinterlaceSimpleMethodFunction interpolate_scanline_planar_v;
+};
+
+GType gst_deinterlace_simple_method_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_DEINTERLACE_METHOD_H__ */
diff --git a/gst/deinterlace/meson.build b/gst/deinterlace/meson.build
new file mode 100644
index 0000000000..e86bebe613
--- /dev/null
+++ b/gst/deinterlace/meson.build
@@ -0,0 +1,101 @@
+interlace_sources = [
+ 'gstdeinterlace.c',
+ 'gstdeinterlacemethod.c',
+ 'tvtime/tomsmocomp.c',
+ 'tvtime/greedy.c',
+ 'tvtime/greedyh.c',
+ 'tvtime/vfir.c',
+ 'tvtime/weavetff.c',
+ 'tvtime/weavebff.c',
+ 'tvtime/weave.c',
+ 'tvtime/linear.c',
+ 'tvtime/linearblend.c',
+ 'tvtime/scalerbob.c',
+ 'yadif.c'
+]
+
+orcsrc = 'tvtime'
+if have_orcc
+ orc_h = custom_target(orcsrc + '.h',
+ input : orcsrc + '.orc',
+ output : orcsrc + '.h',
+ command : orcc_args + ['--header', '-o', '@OUTPUT@', '@INPUT@'])
+ orc_c = custom_target(orcsrc + '.c',
+ input : orcsrc + '.orc',
+ output : orcsrc + '.c',
+ command : orcc_args + ['--implementation', '-o', '@OUTPUT@', '@INPUT@'])
+ orc_targets += {'name': orcsrc, 'orc-source': files(orcsrc + '.orc'), 'header': orc_h, 'source': orc_c}
+else
+ orc_h = configure_file(input : orcsrc + '-dist.h',
+ output : orcsrc + '.h',
+ copy : true)
+ orc_c = configure_file(input : orcsrc + '-dist.c',
+ output : orcsrc + '.c',
+ copy : true)
+endif
+
+asm_gen_objs = []
+if have_nasm and host_cpu == 'x86_64'
+ if host_system == 'windows'
+ outputname = '@PLAINNAME@.obj'
+ else
+ outputname = '@PLAINNAME@.o'
+ endif
+
+ if get_option('b_staticpic')
+ asm_pic_def = '-DPIC'
+ else
+ asm_pic_def = '-UPIC'
+ endif
+
+ # Assembly has to be told when the symbols have to be prefixed with _
+ # Note that symbols_have_underscore_prefix does not work properly on macos
+ # if the compiler -g flag is used. See:
+ # https://github.com/mesonbuild/meson/issues/5482
+ if ['darwin', 'ios'].contains(host_system)
+ asm_prefix_def = '-DPREFIX'
+ elif cc.symbols_have_underscore_prefix()
+ asm_prefix_def = '-DPREFIX'
+ else
+ asm_prefix_def = '-UPREFIX'
+ endif
+
+ asm_arch_def = '-DARCH_X86_64=1'
+ if host_system == 'windows'
+ asm_outformat = 'win64'
+ elif ['darwin', 'ios'].contains(host_system)
+ asm_outformat = 'macho64'
+ else
+ asm_outformat = 'elf64'
+ endif
+ asm_x = files('x86/yadif.asm')
+
+ asm_stackalign_def = '-DSTACK_ALIGNMENT=64'
+ asm_incdir = 'x86'
+
+ message('Nasm configured on x86-64')
+ asm_gen = generator(nasm,
+ output: outputname,
+ arguments: ['-I@CURRENT_SOURCE_DIR@',
+ '-I@CURRENT_SOURCE_DIR@/@0@/'.format(asm_incdir),
+ asm_arch_def,
+ asm_stackalign_def,
+ asm_pic_def,
+ asm_prefix_def,
+ '-f', asm_outformat,
+ '-o', '@OUTPUT@',
+ '@INPUT@'])
+ asm_gen_objs = asm_gen.process(asm_x)
+endif
+
+gstdeinterlace = library('gstdeinterlace',
+ interlace_sources, asm_gen_objs, orc_c, orc_h,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [orc_dep, gstbase_dep, gstvideo_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstdeinterlace, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstdeinterlace]
+
diff --git a/gst/deinterlace/tvtime-dist.c b/gst/deinterlace/tvtime-dist.c
new file mode 100644
index 0000000000..47c1056e37
--- /dev/null
+++ b/gst/deinterlace/tvtime-dist.c
@@ -0,0 +1,1119 @@
+
+/* autogenerated from tvtime.orc */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include <glib.h>
+
+#ifndef _ORC_INTEGER_TYPEDEFS_
+#define _ORC_INTEGER_TYPEDEFS_
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#include <stdint.h>
+typedef int8_t orc_int8;
+typedef int16_t orc_int16;
+typedef int32_t orc_int32;
+typedef int64_t orc_int64;
+typedef uint8_t orc_uint8;
+typedef uint16_t orc_uint16;
+typedef uint32_t orc_uint32;
+typedef uint64_t orc_uint64;
+#define ORC_UINT64_C(x) UINT64_C(x)
+#elif defined(_MSC_VER)
+typedef signed __int8 orc_int8;
+typedef signed __int16 orc_int16;
+typedef signed __int32 orc_int32;
+typedef signed __int64 orc_int64;
+typedef unsigned __int8 orc_uint8;
+typedef unsigned __int16 orc_uint16;
+typedef unsigned __int32 orc_uint32;
+typedef unsigned __int64 orc_uint64;
+#define ORC_UINT64_C(x) (x##Ui64)
+#define inline __inline
+#else
+#include <limits.h>
+typedef signed char orc_int8;
+typedef short orc_int16;
+typedef int orc_int32;
+typedef unsigned char orc_uint8;
+typedef unsigned short orc_uint16;
+typedef unsigned int orc_uint32;
+#if INT_MAX == LONG_MAX
+typedef long long orc_int64;
+typedef unsigned long long orc_uint64;
+#define ORC_UINT64_C(x) (x##ULL)
+#else
+typedef long orc_int64;
+typedef unsigned long orc_uint64;
+#define ORC_UINT64_C(x) (x##UL)
+#endif
+#endif
+typedef union
+{
+ orc_int16 i;
+ orc_int8 x2[2];
+} orc_union16;
+typedef union
+{
+ orc_int32 i;
+ float f;
+ orc_int16 x2[2];
+ orc_int8 x4[4];
+} orc_union32;
+typedef union
+{
+ orc_int64 i;
+ double f;
+ orc_int32 x2[2];
+ float x2f[2];
+ orc_int16 x4[4];
+} orc_union64;
+#endif
+#ifndef ORC_RESTRICT
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#define ORC_RESTRICT restrict
+#elif defined(__GNUC__) && __GNUC__ >= 4
+#define ORC_RESTRICT __restrict__
+#else
+#define ORC_RESTRICT
+#endif
+#endif
+
+#ifndef ORC_INTERNAL
+#if defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
+#define ORC_INTERNAL __attribute__((visibility("hidden")))
+#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x550)
+#define ORC_INTERNAL __hidden
+#elif defined (__GNUC__)
+#define ORC_INTERNAL __attribute__((visibility("hidden")))
+#else
+#define ORC_INTERNAL
+#endif
+#endif
+
+
+#ifndef DISABLE_ORC
+#include <orc/orc.h>
+#endif
+void deinterlace_line_vfir (guint8 * ORC_RESTRICT d1,
+ const guint8 * ORC_RESTRICT s1, const guint8 * ORC_RESTRICT s2,
+ const guint8 * ORC_RESTRICT s3, const guint8 * ORC_RESTRICT s4,
+ const guint8 * ORC_RESTRICT s5, int n);
+void deinterlace_line_linear (guint8 * ORC_RESTRICT d1,
+ const guint8 * ORC_RESTRICT s1, const guint8 * ORC_RESTRICT s2, int n);
+void deinterlace_line_linear_blend (guint8 * ORC_RESTRICT d1,
+ const guint8 * ORC_RESTRICT s1, const guint8 * ORC_RESTRICT s2,
+ const guint8 * ORC_RESTRICT s3, int n);
+void deinterlace_line_greedy (orc_uint8 * ORC_RESTRICT d1,
+ const orc_uint8 * ORC_RESTRICT s1, const orc_uint8 * ORC_RESTRICT s2,
+ const orc_uint8 * ORC_RESTRICT s3, const orc_uint8 * ORC_RESTRICT s4,
+ int p1, int n);
+
+
+/* begin Orc C target preamble */
+#define ORC_CLAMP(x,a,b) ((x)<(a) ? (a) : ((x)>(b) ? (b) : (x)))
+#define ORC_ABS(a) ((a)<0 ? -(a) : (a))
+#define ORC_MIN(a,b) ((a)<(b) ? (a) : (b))
+#define ORC_MAX(a,b) ((a)>(b) ? (a) : (b))
+#define ORC_SB_MAX 127
+#define ORC_SB_MIN (-1-ORC_SB_MAX)
+#define ORC_UB_MAX (orc_uint8) 255
+#define ORC_UB_MIN 0
+#define ORC_SW_MAX 32767
+#define ORC_SW_MIN (-1-ORC_SW_MAX)
+#define ORC_UW_MAX (orc_uint16)65535
+#define ORC_UW_MIN 0
+#define ORC_SL_MAX 2147483647
+#define ORC_SL_MIN (-1-ORC_SL_MAX)
+#define ORC_UL_MAX 4294967295U
+#define ORC_UL_MIN 0
+#define ORC_CLAMP_SB(x) ORC_CLAMP(x,ORC_SB_MIN,ORC_SB_MAX)
+#define ORC_CLAMP_UB(x) ORC_CLAMP(x,ORC_UB_MIN,ORC_UB_MAX)
+#define ORC_CLAMP_SW(x) ORC_CLAMP(x,ORC_SW_MIN,ORC_SW_MAX)
+#define ORC_CLAMP_UW(x) ORC_CLAMP(x,ORC_UW_MIN,ORC_UW_MAX)
+#define ORC_CLAMP_SL(x) ORC_CLAMP(x,ORC_SL_MIN,ORC_SL_MAX)
+#define ORC_CLAMP_UL(x) ORC_CLAMP(x,ORC_UL_MIN,ORC_UL_MAX)
+#define ORC_SWAP_W(x) ((((x)&0xffU)<<8) | (((x)&0xff00U)>>8))
+#define ORC_SWAP_L(x) ((((x)&0xffU)<<24) | (((x)&0xff00U)<<8) | (((x)&0xff0000U)>>8) | (((x)&0xff000000U)>>24))
+#define ORC_SWAP_Q(x) ((((x)&ORC_UINT64_C(0xff))<<56) | (((x)&ORC_UINT64_C(0xff00))<<40) | (((x)&ORC_UINT64_C(0xff0000))<<24) | (((x)&ORC_UINT64_C(0xff000000))<<8) | (((x)&ORC_UINT64_C(0xff00000000))>>8) | (((x)&ORC_UINT64_C(0xff0000000000))>>24) | (((x)&ORC_UINT64_C(0xff000000000000))>>40) | (((x)&ORC_UINT64_C(0xff00000000000000))>>56))
+#define ORC_PTR_OFFSET(ptr,offset) ((void *)(((unsigned char *)(ptr)) + (offset)))
+#define ORC_DENORMAL(x) ((x) & ((((x)&0x7f800000) == 0) ? 0xff800000 : 0xffffffff))
+#define ORC_ISNAN(x) ((((x)&0x7f800000) == 0x7f800000) && (((x)&0x007fffff) != 0))
+#define ORC_DENORMAL_DOUBLE(x) ((x) & ((((x)&ORC_UINT64_C(0x7ff0000000000000)) == 0) ? ORC_UINT64_C(0xfff0000000000000) : ORC_UINT64_C(0xffffffffffffffff)))
+#define ORC_ISNAN_DOUBLE(x) ((((x)&ORC_UINT64_C(0x7ff0000000000000)) == ORC_UINT64_C(0x7ff0000000000000)) && (((x)&ORC_UINT64_C(0x000fffffffffffff)) != 0))
+#ifndef ORC_RESTRICT
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#define ORC_RESTRICT restrict
+#elif defined(__GNUC__) && __GNUC__ >= 4
+#define ORC_RESTRICT __restrict__
+#else
+#define ORC_RESTRICT
+#endif
+#endif
+/* end Orc C target preamble */
+
+
+
+/* deinterlace_line_vfir */
+#ifdef DISABLE_ORC
+void
+deinterlace_line_vfir (guint8 * ORC_RESTRICT d1, const guint8 * ORC_RESTRICT s1,
+ const guint8 * ORC_RESTRICT s2, const guint8 * ORC_RESTRICT s3,
+ const guint8 * ORC_RESTRICT s4, const guint8 * ORC_RESTRICT s5, int n)
+{
+ int i;
+ orc_int8 *ORC_RESTRICT ptr0;
+ const orc_int8 *ORC_RESTRICT ptr4;
+ const orc_int8 *ORC_RESTRICT ptr5;
+ const orc_int8 *ORC_RESTRICT ptr6;
+ const orc_int8 *ORC_RESTRICT ptr7;
+ const orc_int8 *ORC_RESTRICT ptr8;
+ orc_int8 var35;
+ orc_int8 var36;
+ orc_int8 var37;
+ orc_int8 var38;
+ orc_int8 var39;
+#if defined(__APPLE__) && __GNUC__ == 4 && __GNUC_MINOR__ == 2 && defined (__i386__)
+ volatile orc_union16 var40;
+#else
+ orc_union16 var40;
+#endif
+ orc_int8 var41;
+ orc_union16 var42;
+ orc_union16 var43;
+ orc_union16 var44;
+ orc_union16 var45;
+ orc_union16 var46;
+ orc_union16 var47;
+ orc_union16 var48;
+ orc_union16 var49;
+ orc_union16 var50;
+ orc_union16 var51;
+ orc_union16 var52;
+ orc_union16 var53;
+ orc_union16 var54;
+
+ ptr0 = (orc_int8 *) d1;
+ ptr4 = (orc_int8 *) s1;
+ ptr5 = (orc_int8 *) s2;
+ ptr6 = (orc_int8 *) s3;
+ ptr7 = (orc_int8 *) s4;
+ ptr8 = (orc_int8 *) s5;
+
+ /* 16: loadpw */
+ var40.i = 0x00000004; /* 4 or 1.97626e-323f */
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadb */
+ var35 = ptr4[i];
+ /* 1: convubw */
+ var42.i = (orc_uint8) var35;
+ /* 2: loadb */
+ var36 = ptr8[i];
+ /* 3: convubw */
+ var43.i = (orc_uint8) var36;
+ /* 4: addw */
+ var44.i = var42.i + var43.i;
+ /* 5: loadb */
+ var37 = ptr5[i];
+ /* 6: convubw */
+ var45.i = (orc_uint8) var37;
+ /* 7: loadb */
+ var38 = ptr7[i];
+ /* 8: convubw */
+ var46.i = (orc_uint8) var38;
+ /* 9: addw */
+ var47.i = var45.i + var46.i;
+ /* 10: shlw */
+ var48.i = ((orc_uint16) var47.i) << 2;
+ /* 11: loadb */
+ var39 = ptr6[i];
+ /* 12: convubw */
+ var49.i = (orc_uint8) var39;
+ /* 13: shlw */
+ var50.i = ((orc_uint16) var49.i) << 1;
+ /* 14: subw */
+ var51.i = var48.i - var44.i;
+ /* 15: addw */
+ var52.i = var51.i + var50.i;
+ /* 17: addw */
+ var53.i = var52.i + var40.i;
+ /* 18: shrsw */
+ var54.i = var53.i >> 3;
+ /* 19: convsuswb */
+ var41 = ORC_CLAMP_UB (var54.i);
+ /* 20: storeb */
+ ptr0[i] = var41;
+ }
+
+}
+
+#else
+static void
+_backup_deinterlace_line_vfir (OrcExecutor * ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_int8 *ORC_RESTRICT ptr0;
+ const orc_int8 *ORC_RESTRICT ptr4;
+ const orc_int8 *ORC_RESTRICT ptr5;
+ const orc_int8 *ORC_RESTRICT ptr6;
+ const orc_int8 *ORC_RESTRICT ptr7;
+ const orc_int8 *ORC_RESTRICT ptr8;
+ orc_int8 var35;
+ orc_int8 var36;
+ orc_int8 var37;
+ orc_int8 var38;
+ orc_int8 var39;
+#if defined(__APPLE__) && __GNUC__ == 4 && __GNUC_MINOR__ == 2 && defined (__i386__)
+ volatile orc_union16 var40;
+#else
+ orc_union16 var40;
+#endif
+ orc_int8 var41;
+ orc_union16 var42;
+ orc_union16 var43;
+ orc_union16 var44;
+ orc_union16 var45;
+ orc_union16 var46;
+ orc_union16 var47;
+ orc_union16 var48;
+ orc_union16 var49;
+ orc_union16 var50;
+ orc_union16 var51;
+ orc_union16 var52;
+ orc_union16 var53;
+ orc_union16 var54;
+
+ ptr0 = (orc_int8 *) ex->arrays[0];
+ ptr4 = (orc_int8 *) ex->arrays[4];
+ ptr5 = (orc_int8 *) ex->arrays[5];
+ ptr6 = (orc_int8 *) ex->arrays[6];
+ ptr7 = (orc_int8 *) ex->arrays[7];
+ ptr8 = (orc_int8 *) ex->arrays[8];
+
+ /* 16: loadpw */
+ var40.i = 0x00000004; /* 4 or 1.97626e-323f */
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadb */
+ var35 = ptr4[i];
+ /* 1: convubw */
+ var42.i = (orc_uint8) var35;
+ /* 2: loadb */
+ var36 = ptr8[i];
+ /* 3: convubw */
+ var43.i = (orc_uint8) var36;
+ /* 4: addw */
+ var44.i = var42.i + var43.i;
+ /* 5: loadb */
+ var37 = ptr5[i];
+ /* 6: convubw */
+ var45.i = (orc_uint8) var37;
+ /* 7: loadb */
+ var38 = ptr7[i];
+ /* 8: convubw */
+ var46.i = (orc_uint8) var38;
+ /* 9: addw */
+ var47.i = var45.i + var46.i;
+ /* 10: shlw */
+ var48.i = ((orc_uint16) var47.i) << 2;
+ /* 11: loadb */
+ var39 = ptr6[i];
+ /* 12: convubw */
+ var49.i = (orc_uint8) var39;
+ /* 13: shlw */
+ var50.i = ((orc_uint16) var49.i) << 1;
+ /* 14: subw */
+ var51.i = var48.i - var44.i;
+ /* 15: addw */
+ var52.i = var51.i + var50.i;
+ /* 17: addw */
+ var53.i = var52.i + var40.i;
+ /* 18: shrsw */
+ var54.i = var53.i >> 3;
+ /* 19: convsuswb */
+ var41 = ORC_CLAMP_UB (var54.i);
+ /* 20: storeb */
+ ptr0[i] = var41;
+ }
+
+}
+
+void
+deinterlace_line_vfir (guint8 * ORC_RESTRICT d1, const guint8 * ORC_RESTRICT s1,
+ const guint8 * ORC_RESTRICT s2, const guint8 * ORC_RESTRICT s3,
+ const guint8 * ORC_RESTRICT s4, const guint8 * ORC_RESTRICT s5, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 21, 100, 101, 105, 110, 116, 101, 114, 108, 97, 99, 101, 95, 108,
+ 105, 110, 101, 95, 118, 102, 105, 114, 11, 1, 1, 12, 1, 1, 12, 1,
+ 1, 12, 1, 1, 12, 1, 1, 12, 1, 1, 14, 2, 2, 0, 0, 0,
+ 14, 2, 1, 0, 0, 0, 14, 2, 4, 0, 0, 0, 14, 2, 3, 0,
+ 0, 0, 20, 2, 20, 2, 20, 2, 150, 32, 4, 150, 33, 8, 70, 32,
+ 32, 33, 150, 33, 5, 150, 34, 7, 70, 33, 33, 34, 93, 33, 33, 16,
+ 150, 34, 6, 93, 34, 34, 17, 98, 33, 33, 32, 70, 33, 33, 34, 70,
+ 33, 33, 18, 94, 33, 33, 19, 160, 0, 33, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p, _backup_deinterlace_line_vfir);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "deinterlace_line_vfir");
+ orc_program_set_backup_function (p, _backup_deinterlace_line_vfir);
+ orc_program_add_destination (p, 1, "d1");
+ orc_program_add_source (p, 1, "s1");
+ orc_program_add_source (p, 1, "s2");
+ orc_program_add_source (p, 1, "s3");
+ orc_program_add_source (p, 1, "s4");
+ orc_program_add_source (p, 1, "s5");
+ orc_program_add_constant (p, 2, 0x00000002, "c1");
+ orc_program_add_constant (p, 2, 0x00000001, "c2");
+ orc_program_add_constant (p, 2, 0x00000004, "c3");
+ orc_program_add_constant (p, 2, 0x00000003, "c4");
+ orc_program_add_temporary (p, 2, "t1");
+ orc_program_add_temporary (p, 2, "t2");
+ orc_program_add_temporary (p, 2, "t3");
+
+ orc_program_append_2 (p, "convubw", 0, ORC_VAR_T1, ORC_VAR_S1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 0, ORC_VAR_T2, ORC_VAR_S5, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "addw", 0, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_T2,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 0, ORC_VAR_T2, ORC_VAR_S2, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 0, ORC_VAR_T3, ORC_VAR_S4, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "addw", 0, ORC_VAR_T2, ORC_VAR_T2, ORC_VAR_T3,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "shlw", 0, ORC_VAR_T2, ORC_VAR_T2, ORC_VAR_C1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 0, ORC_VAR_T3, ORC_VAR_S3, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "shlw", 0, ORC_VAR_T3, ORC_VAR_T3, ORC_VAR_C2,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "subw", 0, ORC_VAR_T2, ORC_VAR_T2, ORC_VAR_T1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "addw", 0, ORC_VAR_T2, ORC_VAR_T2, ORC_VAR_T3,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "addw", 0, ORC_VAR_T2, ORC_VAR_T2, ORC_VAR_C3,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "shrsw", 0, ORC_VAR_T2, ORC_VAR_T2, ORC_VAR_C4,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convsuswb", 0, ORC_VAR_D1, ORC_VAR_T2,
+ ORC_VAR_D1, ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ ex->arrays[ORC_VAR_S2] = (void *) s2;
+ ex->arrays[ORC_VAR_S3] = (void *) s3;
+ ex->arrays[ORC_VAR_S4] = (void *) s4;
+ ex->arrays[ORC_VAR_S5] = (void *) s5;
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* deinterlace_line_linear */
+#ifdef DISABLE_ORC
+void
+deinterlace_line_linear (guint8 * ORC_RESTRICT d1,
+ const guint8 * ORC_RESTRICT s1, const guint8 * ORC_RESTRICT s2, int n)
+{
+ int i;
+ orc_int8 *ORC_RESTRICT ptr0;
+ const orc_int8 *ORC_RESTRICT ptr4;
+ const orc_int8 *ORC_RESTRICT ptr5;
+ orc_int8 var32;
+ orc_int8 var33;
+ orc_int8 var34;
+
+ ptr0 = (orc_int8 *) d1;
+ ptr4 = (orc_int8 *) s1;
+ ptr5 = (orc_int8 *) s2;
+
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadb */
+ var32 = ptr4[i];
+ /* 1: loadb */
+ var33 = ptr5[i];
+ /* 2: avgub */
+ var34 = ((orc_uint8) var32 + (orc_uint8) var33 + 1) >> 1;
+ /* 3: storeb */
+ ptr0[i] = var34;
+ }
+
+}
+
+#else
+static void
+_backup_deinterlace_line_linear (OrcExecutor * ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_int8 *ORC_RESTRICT ptr0;
+ const orc_int8 *ORC_RESTRICT ptr4;
+ const orc_int8 *ORC_RESTRICT ptr5;
+ orc_int8 var32;
+ orc_int8 var33;
+ orc_int8 var34;
+
+ ptr0 = (orc_int8 *) ex->arrays[0];
+ ptr4 = (orc_int8 *) ex->arrays[4];
+ ptr5 = (orc_int8 *) ex->arrays[5];
+
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadb */
+ var32 = ptr4[i];
+ /* 1: loadb */
+ var33 = ptr5[i];
+ /* 2: avgub */
+ var34 = ((orc_uint8) var32 + (orc_uint8) var33 + 1) >> 1;
+ /* 3: storeb */
+ ptr0[i] = var34;
+ }
+
+}
+
+void
+deinterlace_line_linear (guint8 * ORC_RESTRICT d1,
+ const guint8 * ORC_RESTRICT s1, const guint8 * ORC_RESTRICT s2, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 23, 100, 101, 105, 110, 116, 101, 114, 108, 97, 99, 101, 95, 108,
+ 105, 110, 101, 95, 108, 105, 110, 101, 97, 114, 11, 1, 1, 12, 1, 1,
+ 12, 1, 1, 39, 0, 4, 5, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p, _backup_deinterlace_line_linear);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "deinterlace_line_linear");
+ orc_program_set_backup_function (p, _backup_deinterlace_line_linear);
+ orc_program_add_destination (p, 1, "d1");
+ orc_program_add_source (p, 1, "s1");
+ orc_program_add_source (p, 1, "s2");
+
+ orc_program_append_2 (p, "avgub", 0, ORC_VAR_D1, ORC_VAR_S1, ORC_VAR_S2,
+ ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ ex->arrays[ORC_VAR_S2] = (void *) s2;
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* deinterlace_line_linear_blend */
+#ifdef DISABLE_ORC
+void
+deinterlace_line_linear_blend (guint8 * ORC_RESTRICT d1,
+ const guint8 * ORC_RESTRICT s1, const guint8 * ORC_RESTRICT s2,
+ const guint8 * ORC_RESTRICT s3, int n)
+{
+ int i;
+ orc_int8 *ORC_RESTRICT ptr0;
+ const orc_int8 *ORC_RESTRICT ptr4;
+ const orc_int8 *ORC_RESTRICT ptr5;
+ const orc_int8 *ORC_RESTRICT ptr6;
+ orc_int8 var35;
+ orc_int8 var36;
+ orc_int8 var37;
+#if defined(__APPLE__) && __GNUC__ == 4 && __GNUC_MINOR__ == 2 && defined (__i386__)
+ volatile orc_union16 var38;
+#else
+ orc_union16 var38;
+#endif
+ orc_int8 var39;
+ orc_union16 var40;
+ orc_union16 var41;
+ orc_union16 var42;
+ orc_union16 var43;
+ orc_union16 var44;
+ orc_union16 var45;
+ orc_union16 var46;
+ orc_union16 var47;
+
+ ptr0 = (orc_int8 *) d1;
+ ptr4 = (orc_int8 *) s1;
+ ptr5 = (orc_int8 *) s2;
+ ptr6 = (orc_int8 *) s3;
+
+ /* 9: loadpw */
+ var38.i = 0x00000002; /* 2 or 9.88131e-324f */
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadb */
+ var35 = ptr4[i];
+ /* 1: convubw */
+ var40.i = (orc_uint8) var35;
+ /* 2: loadb */
+ var36 = ptr5[i];
+ /* 3: convubw */
+ var41.i = (orc_uint8) var36;
+ /* 4: loadb */
+ var37 = ptr6[i];
+ /* 5: convubw */
+ var42.i = (orc_uint8) var37;
+ /* 6: addw */
+ var43.i = var40.i + var41.i;
+ /* 7: addw */
+ var44.i = var42.i + var42.i;
+ /* 8: addw */
+ var45.i = var43.i + var44.i;
+ /* 10: addw */
+ var46.i = var45.i + var38.i;
+ /* 11: shrsw */
+ var47.i = var46.i >> 2;
+ /* 12: convsuswb */
+ var39 = ORC_CLAMP_UB (var47.i);
+ /* 13: storeb */
+ ptr0[i] = var39;
+ }
+
+}
+
+#else
+static void
+_backup_deinterlace_line_linear_blend (OrcExecutor * ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_int8 *ORC_RESTRICT ptr0;
+ const orc_int8 *ORC_RESTRICT ptr4;
+ const orc_int8 *ORC_RESTRICT ptr5;
+ const orc_int8 *ORC_RESTRICT ptr6;
+ orc_int8 var35;
+ orc_int8 var36;
+ orc_int8 var37;
+#if defined(__APPLE__) && __GNUC__ == 4 && __GNUC_MINOR__ == 2 && defined (__i386__)
+ volatile orc_union16 var38;
+#else
+ orc_union16 var38;
+#endif
+ orc_int8 var39;
+ orc_union16 var40;
+ orc_union16 var41;
+ orc_union16 var42;
+ orc_union16 var43;
+ orc_union16 var44;
+ orc_union16 var45;
+ orc_union16 var46;
+ orc_union16 var47;
+
+ ptr0 = (orc_int8 *) ex->arrays[0];
+ ptr4 = (orc_int8 *) ex->arrays[4];
+ ptr5 = (orc_int8 *) ex->arrays[5];
+ ptr6 = (orc_int8 *) ex->arrays[6];
+
+ /* 9: loadpw */
+ var38.i = 0x00000002; /* 2 or 9.88131e-324f */
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadb */
+ var35 = ptr4[i];
+ /* 1: convubw */
+ var40.i = (orc_uint8) var35;
+ /* 2: loadb */
+ var36 = ptr5[i];
+ /* 3: convubw */
+ var41.i = (orc_uint8) var36;
+ /* 4: loadb */
+ var37 = ptr6[i];
+ /* 5: convubw */
+ var42.i = (orc_uint8) var37;
+ /* 6: addw */
+ var43.i = var40.i + var41.i;
+ /* 7: addw */
+ var44.i = var42.i + var42.i;
+ /* 8: addw */
+ var45.i = var43.i + var44.i;
+ /* 10: addw */
+ var46.i = var45.i + var38.i;
+ /* 11: shrsw */
+ var47.i = var46.i >> 2;
+ /* 12: convsuswb */
+ var39 = ORC_CLAMP_UB (var47.i);
+ /* 13: storeb */
+ ptr0[i] = var39;
+ }
+
+}
+
+void
+deinterlace_line_linear_blend (guint8 * ORC_RESTRICT d1,
+ const guint8 * ORC_RESTRICT s1, const guint8 * ORC_RESTRICT s2,
+ const guint8 * ORC_RESTRICT s3, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 29, 100, 101, 105, 110, 116, 101, 114, 108, 97, 99, 101, 95, 108,
+ 105, 110, 101, 95, 108, 105, 110, 101, 97, 114, 95, 98, 108, 101, 110,
+ 100,
+ 11, 1, 1, 12, 1, 1, 12, 1, 1, 12, 1, 1, 14, 2, 2, 0,
+ 0, 0, 20, 2, 20, 2, 20, 2, 150, 32, 4, 150, 33, 5, 150, 34,
+ 6, 70, 32, 32, 33, 70, 34, 34, 34, 70, 32, 32, 34, 70, 32, 32,
+ 16, 94, 32, 32, 16, 160, 0, 32, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p,
+ _backup_deinterlace_line_linear_blend);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "deinterlace_line_linear_blend");
+ orc_program_set_backup_function (p,
+ _backup_deinterlace_line_linear_blend);
+ orc_program_add_destination (p, 1, "d1");
+ orc_program_add_source (p, 1, "s1");
+ orc_program_add_source (p, 1, "s2");
+ orc_program_add_source (p, 1, "s3");
+ orc_program_add_constant (p, 2, 0x00000002, "c1");
+ orc_program_add_temporary (p, 2, "t1");
+ orc_program_add_temporary (p, 2, "t2");
+ orc_program_add_temporary (p, 2, "t3");
+
+ orc_program_append_2 (p, "convubw", 0, ORC_VAR_T1, ORC_VAR_S1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 0, ORC_VAR_T2, ORC_VAR_S2, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 0, ORC_VAR_T3, ORC_VAR_S3, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "addw", 0, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_T2,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "addw", 0, ORC_VAR_T3, ORC_VAR_T3, ORC_VAR_T3,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "addw", 0, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_T3,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "addw", 0, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_C1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "shrsw", 0, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_C1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convsuswb", 0, ORC_VAR_D1, ORC_VAR_T1,
+ ORC_VAR_D1, ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ ex->arrays[ORC_VAR_S2] = (void *) s2;
+ ex->arrays[ORC_VAR_S3] = (void *) s3;
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* deinterlace_line_greedy */
+#ifdef DISABLE_ORC
+void
+deinterlace_line_greedy (orc_uint8 * ORC_RESTRICT d1,
+ const orc_uint8 * ORC_RESTRICT s1, const orc_uint8 * ORC_RESTRICT s2,
+ const orc_uint8 * ORC_RESTRICT s3, const orc_uint8 * ORC_RESTRICT s4,
+ int p1, int n)
+{
+ int i;
+ orc_int8 *ORC_RESTRICT ptr0;
+ const orc_int8 *ORC_RESTRICT ptr4;
+ const orc_int8 *ORC_RESTRICT ptr5;
+ const orc_int8 *ORC_RESTRICT ptr6;
+ const orc_int8 *ORC_RESTRICT ptr7;
+#if defined(__APPLE__) && __GNUC__ == 4 && __GNUC_MINOR__ == 2 && defined (__i386__)
+ volatile orc_int8 var44;
+#else
+ orc_int8 var44;
+#endif
+ orc_int8 var45;
+ orc_int8 var46;
+ orc_int8 var47;
+ orc_int8 var48;
+ orc_int8 var49;
+ orc_int8 var50;
+ orc_int8 var51;
+ orc_int8 var52;
+ orc_int8 var53;
+ orc_int8 var54;
+ orc_int8 var55;
+ orc_int8 var56;
+ orc_int8 var57;
+ orc_int8 var58;
+ orc_int8 var59;
+ orc_int8 var60;
+ orc_int8 var61;
+ orc_int8 var62;
+ orc_int8 var63;
+ orc_int8 var64;
+ orc_int8 var65;
+ orc_int8 var66;
+ orc_int8 var67;
+ orc_int8 var68;
+
+ ptr0 = (orc_int8 *) d1;
+ ptr4 = (orc_int8 *) s1;
+ ptr5 = (orc_int8 *) s2;
+ ptr6 = (orc_int8 *) s3;
+ ptr7 = (orc_int8 *) s4;
+
+ /* 11: loadpb */
+ var44 = 0x00000080; /* 128 or 6.32404e-322f */
+ /* 20: loadpb */
+ var45 = p1;
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadb */
+ var47 = ptr4[i];
+ /* 1: loadb */
+ var48 = ptr7[i];
+ /* 2: loadb */
+ var49 = ptr6[i];
+ /* 3: loadb */
+ var50 = ptr5[i];
+ /* 4: avgub */
+ var51 = ((orc_uint8) var50 + (orc_uint8) var49 + 1) >> 1;
+ /* 5: maxub */
+ var52 = ORC_MAX ((orc_uint8) var47, (orc_uint8) var51);
+ /* 6: minub */
+ var53 = ORC_MIN ((orc_uint8) var47, (orc_uint8) var51);
+ /* 7: subb */
+ var54 = var52 - var53;
+ /* 8: maxub */
+ var55 = ORC_MAX ((orc_uint8) var48, (orc_uint8) var51);
+ /* 9: minub */
+ var56 = ORC_MIN ((orc_uint8) var48, (orc_uint8) var51);
+ /* 10: subb */
+ var57 = var55 - var56;
+ /* 12: xorb */
+ var58 = var54 ^ var44;
+ /* 13: xorb */
+ var59 = var57 ^ var44;
+ /* 14: cmpgtsb */
+ var60 = (var58 > var59) ? (~0) : 0;
+ /* 15: andb */
+ var61 = var48 & var60;
+ /* 16: andnb */
+ var62 = (~var60) & var47;
+ /* 17: orb */
+ var63 = var61 | var62;
+ /* 18: maxub */
+ var64 = ORC_MAX ((orc_uint8) var50, (orc_uint8) var49);
+ /* 19: minub */
+ var65 = ORC_MIN ((orc_uint8) var50, (orc_uint8) var49);
+ /* 21: addusb */
+ var66 = ORC_CLAMP_UB ((orc_uint8) var64 + (orc_uint8) var45);
+ /* 22: subusb */
+ var67 = ORC_CLAMP_UB ((orc_uint8) var65 - (orc_uint8) var45);
+ /* 23: minub */
+ var68 = ORC_MIN ((orc_uint8) var63, (orc_uint8) var66);
+ /* 24: maxub */
+ var46 = ORC_MAX ((orc_uint8) var68, (orc_uint8) var67);
+ /* 25: storeb */
+ ptr0[i] = var46;
+ }
+
+}
+
+#else
+static void
+_backup_deinterlace_line_greedy (OrcExecutor * ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_int8 *ORC_RESTRICT ptr0;
+ const orc_int8 *ORC_RESTRICT ptr4;
+ const orc_int8 *ORC_RESTRICT ptr5;
+ const orc_int8 *ORC_RESTRICT ptr6;
+ const orc_int8 *ORC_RESTRICT ptr7;
+#if defined(__APPLE__) && __GNUC__ == 4 && __GNUC_MINOR__ == 2 && defined (__i386__)
+ volatile orc_int8 var44;
+#else
+ orc_int8 var44;
+#endif
+ orc_int8 var45;
+ orc_int8 var46;
+ orc_int8 var47;
+ orc_int8 var48;
+ orc_int8 var49;
+ orc_int8 var50;
+ orc_int8 var51;
+ orc_int8 var52;
+ orc_int8 var53;
+ orc_int8 var54;
+ orc_int8 var55;
+ orc_int8 var56;
+ orc_int8 var57;
+ orc_int8 var58;
+ orc_int8 var59;
+ orc_int8 var60;
+ orc_int8 var61;
+ orc_int8 var62;
+ orc_int8 var63;
+ orc_int8 var64;
+ orc_int8 var65;
+ orc_int8 var66;
+ orc_int8 var67;
+ orc_int8 var68;
+
+ ptr0 = (orc_int8 *) ex->arrays[0];
+ ptr4 = (orc_int8 *) ex->arrays[4];
+ ptr5 = (orc_int8 *) ex->arrays[5];
+ ptr6 = (orc_int8 *) ex->arrays[6];
+ ptr7 = (orc_int8 *) ex->arrays[7];
+
+ /* 11: loadpb */
+ var44 = 0x00000080; /* 128 or 6.32404e-322f */
+ /* 20: loadpb */
+ var45 = ex->params[24];
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadb */
+ var47 = ptr4[i];
+ /* 1: loadb */
+ var48 = ptr7[i];
+ /* 2: loadb */
+ var49 = ptr6[i];
+ /* 3: loadb */
+ var50 = ptr5[i];
+ /* 4: avgub */
+ var51 = ((orc_uint8) var50 + (orc_uint8) var49 + 1) >> 1;
+ /* 5: maxub */
+ var52 = ORC_MAX ((orc_uint8) var47, (orc_uint8) var51);
+ /* 6: minub */
+ var53 = ORC_MIN ((orc_uint8) var47, (orc_uint8) var51);
+ /* 7: subb */
+ var54 = var52 - var53;
+ /* 8: maxub */
+ var55 = ORC_MAX ((orc_uint8) var48, (orc_uint8) var51);
+ /* 9: minub */
+ var56 = ORC_MIN ((orc_uint8) var48, (orc_uint8) var51);
+ /* 10: subb */
+ var57 = var55 - var56;
+ /* 12: xorb */
+ var58 = var54 ^ var44;
+ /* 13: xorb */
+ var59 = var57 ^ var44;
+ /* 14: cmpgtsb */
+ var60 = (var58 > var59) ? (~0) : 0;
+ /* 15: andb */
+ var61 = var48 & var60;
+ /* 16: andnb */
+ var62 = (~var60) & var47;
+ /* 17: orb */
+ var63 = var61 | var62;
+ /* 18: maxub */
+ var64 = ORC_MAX ((orc_uint8) var50, (orc_uint8) var49);
+ /* 19: minub */
+ var65 = ORC_MIN ((orc_uint8) var50, (orc_uint8) var49);
+ /* 21: addusb */
+ var66 = ORC_CLAMP_UB ((orc_uint8) var64 + (orc_uint8) var45);
+ /* 22: subusb */
+ var67 = ORC_CLAMP_UB ((orc_uint8) var65 - (orc_uint8) var45);
+ /* 23: minub */
+ var68 = ORC_MIN ((orc_uint8) var63, (orc_uint8) var66);
+ /* 24: maxub */
+ var46 = ORC_MAX ((orc_uint8) var68, (orc_uint8) var67);
+ /* 25: storeb */
+ ptr0[i] = var46;
+ }
+
+}
+
+void
+deinterlace_line_greedy (orc_uint8 * ORC_RESTRICT d1,
+ const orc_uint8 * ORC_RESTRICT s1, const orc_uint8 * ORC_RESTRICT s2,
+ const orc_uint8 * ORC_RESTRICT s3, const orc_uint8 * ORC_RESTRICT s4,
+ int p1, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 23, 100, 101, 105, 110, 116, 101, 114, 108, 97, 99, 101, 95, 108,
+ 105, 110, 101, 95, 103, 114, 101, 101, 100, 121, 11, 1, 1, 12, 1, 1,
+ 12, 1, 1, 12, 1, 1, 12, 1, 1, 14, 1, 128, 0, 0, 0, 16,
+ 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20,
+ 1, 20, 1, 20, 1, 20, 1, 20, 1, 43, 32, 4, 43, 33, 7, 43,
+ 34, 6, 43, 35, 5, 39, 36, 35, 34, 53, 39, 32, 36, 55, 40, 32,
+ 36, 65, 37, 39, 40, 53, 39, 33, 36, 55, 40, 33, 36, 65, 38, 39,
+ 40, 68, 37, 37, 16, 68, 38, 38, 16, 41, 40, 37, 38, 36, 39, 33,
+ 40, 37, 40, 40, 32, 59, 41, 39, 40, 53, 43, 35, 34, 55, 42, 35,
+ 34, 35, 43, 43, 24, 67, 42, 42, 24, 55, 41, 41, 43, 53, 0, 41,
+ 42, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p, _backup_deinterlace_line_greedy);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "deinterlace_line_greedy");
+ orc_program_set_backup_function (p, _backup_deinterlace_line_greedy);
+ orc_program_add_destination (p, 1, "d1");
+ orc_program_add_source (p, 1, "s1");
+ orc_program_add_source (p, 1, "s2");
+ orc_program_add_source (p, 1, "s3");
+ orc_program_add_source (p, 1, "s4");
+ orc_program_add_constant (p, 1, 0x00000080, "c1");
+ orc_program_add_parameter (p, 1, "p1");
+ orc_program_add_temporary (p, 1, "t1");
+ orc_program_add_temporary (p, 1, "t2");
+ orc_program_add_temporary (p, 1, "t3");
+ orc_program_add_temporary (p, 1, "t4");
+ orc_program_add_temporary (p, 1, "t5");
+ orc_program_add_temporary (p, 1, "t6");
+ orc_program_add_temporary (p, 1, "t7");
+ orc_program_add_temporary (p, 1, "t8");
+ orc_program_add_temporary (p, 1, "t9");
+ orc_program_add_temporary (p, 1, "t10");
+ orc_program_add_temporary (p, 1, "t11");
+ orc_program_add_temporary (p, 1, "t12");
+
+ orc_program_append_2 (p, "loadb", 0, ORC_VAR_T1, ORC_VAR_S1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "loadb", 0, ORC_VAR_T2, ORC_VAR_S4, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "loadb", 0, ORC_VAR_T3, ORC_VAR_S3, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "loadb", 0, ORC_VAR_T4, ORC_VAR_S2, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "avgub", 0, ORC_VAR_T5, ORC_VAR_T4, ORC_VAR_T3,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "maxub", 0, ORC_VAR_T8, ORC_VAR_T1, ORC_VAR_T5,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "minub", 0, ORC_VAR_T9, ORC_VAR_T1, ORC_VAR_T5,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "subb", 0, ORC_VAR_T6, ORC_VAR_T8, ORC_VAR_T9,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "maxub", 0, ORC_VAR_T8, ORC_VAR_T2, ORC_VAR_T5,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "minub", 0, ORC_VAR_T9, ORC_VAR_T2, ORC_VAR_T5,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "subb", 0, ORC_VAR_T7, ORC_VAR_T8, ORC_VAR_T9,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "xorb", 0, ORC_VAR_T6, ORC_VAR_T6, ORC_VAR_C1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "xorb", 0, ORC_VAR_T7, ORC_VAR_T7, ORC_VAR_C1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "cmpgtsb", 0, ORC_VAR_T9, ORC_VAR_T6, ORC_VAR_T7,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "andb", 0, ORC_VAR_T8, ORC_VAR_T2, ORC_VAR_T9,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "andnb", 0, ORC_VAR_T9, ORC_VAR_T9, ORC_VAR_T1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "orb", 0, ORC_VAR_T10, ORC_VAR_T8, ORC_VAR_T9,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "maxub", 0, ORC_VAR_T12, ORC_VAR_T4, ORC_VAR_T3,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "minub", 0, ORC_VAR_T11, ORC_VAR_T4, ORC_VAR_T3,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "addusb", 0, ORC_VAR_T12, ORC_VAR_T12,
+ ORC_VAR_P1, ORC_VAR_D1);
+ orc_program_append_2 (p, "subusb", 0, ORC_VAR_T11, ORC_VAR_T11,
+ ORC_VAR_P1, ORC_VAR_D1);
+ orc_program_append_2 (p, "minub", 0, ORC_VAR_T10, ORC_VAR_T10,
+ ORC_VAR_T12, ORC_VAR_D1);
+ orc_program_append_2 (p, "maxub", 0, ORC_VAR_D1, ORC_VAR_T10, ORC_VAR_T11,
+ ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ ex->arrays[ORC_VAR_S2] = (void *) s2;
+ ex->arrays[ORC_VAR_S3] = (void *) s3;
+ ex->arrays[ORC_VAR_S4] = (void *) s4;
+ ex->params[ORC_VAR_P1] = p1;
+
+ func = c->exec;
+ func (ex);
+}
+#endif
diff --git a/gst/deinterlace/tvtime-dist.h b/gst/deinterlace/tvtime-dist.h
new file mode 100644
index 0000000000..adbc9014af
--- /dev/null
+++ b/gst/deinterlace/tvtime-dist.h
@@ -0,0 +1,93 @@
+
+/* autogenerated from tvtime.orc */
+
+#ifndef _TVTIME_H_
+#define _TVTIME_H_
+
+#include <glib.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#ifndef _ORC_INTEGER_TYPEDEFS_
+#define _ORC_INTEGER_TYPEDEFS_
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#include <stdint.h>
+typedef int8_t orc_int8;
+typedef int16_t orc_int16;
+typedef int32_t orc_int32;
+typedef int64_t orc_int64;
+typedef uint8_t orc_uint8;
+typedef uint16_t orc_uint16;
+typedef uint32_t orc_uint32;
+typedef uint64_t orc_uint64;
+#define ORC_UINT64_C(x) UINT64_C(x)
+#elif defined(_MSC_VER)
+typedef signed __int8 orc_int8;
+typedef signed __int16 orc_int16;
+typedef signed __int32 orc_int32;
+typedef signed __int64 orc_int64;
+typedef unsigned __int8 orc_uint8;
+typedef unsigned __int16 orc_uint16;
+typedef unsigned __int32 orc_uint32;
+typedef unsigned __int64 orc_uint64;
+#define ORC_UINT64_C(x) (x##Ui64)
+#define inline __inline
+#else
+#include <limits.h>
+typedef signed char orc_int8;
+typedef short orc_int16;
+typedef int orc_int32;
+typedef unsigned char orc_uint8;
+typedef unsigned short orc_uint16;
+typedef unsigned int orc_uint32;
+#if INT_MAX == LONG_MAX
+typedef long long orc_int64;
+typedef unsigned long long orc_uint64;
+#define ORC_UINT64_C(x) (x##ULL)
+#else
+typedef long orc_int64;
+typedef unsigned long orc_uint64;
+#define ORC_UINT64_C(x) (x##UL)
+#endif
+#endif
+typedef union { orc_int16 i; orc_int8 x2[2]; } orc_union16;
+typedef union { orc_int32 i; float f; orc_int16 x2[2]; orc_int8 x4[4]; } orc_union32;
+typedef union { orc_int64 i; double f; orc_int32 x2[2]; float x2f[2]; orc_int16 x4[4]; } orc_union64;
+#endif
+#ifndef ORC_RESTRICT
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#define ORC_RESTRICT restrict
+#elif defined(__GNUC__) && __GNUC__ >= 4
+#define ORC_RESTRICT __restrict__
+#else
+#define ORC_RESTRICT
+#endif
+#endif
+
+#ifndef ORC_INTERNAL
+#if defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
+#define ORC_INTERNAL __attribute__((visibility("hidden")))
+#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x550)
+#define ORC_INTERNAL __hidden
+#elif defined (__GNUC__)
+#define ORC_INTERNAL __attribute__((visibility("hidden")))
+#else
+#define ORC_INTERNAL
+#endif
+#endif
+
+void deinterlace_line_vfir (guint8 * ORC_RESTRICT d1, const guint8 * ORC_RESTRICT s1, const guint8 * ORC_RESTRICT s2, const guint8 * ORC_RESTRICT s3, const guint8 * ORC_RESTRICT s4, const guint8 * ORC_RESTRICT s5, int n);
+void deinterlace_line_linear (guint8 * ORC_RESTRICT d1, const guint8 * ORC_RESTRICT s1, const guint8 * ORC_RESTRICT s2, int n);
+void deinterlace_line_linear_blend (guint8 * ORC_RESTRICT d1, const guint8 * ORC_RESTRICT s1, const guint8 * ORC_RESTRICT s2, const guint8 * ORC_RESTRICT s3, int n);
+void deinterlace_line_greedy (orc_uint8 * ORC_RESTRICT d1, const orc_uint8 * ORC_RESTRICT s1, const orc_uint8 * ORC_RESTRICT s2, const orc_uint8 * ORC_RESTRICT s3, const orc_uint8 * ORC_RESTRICT s4, int p1, int n);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
+
diff --git a/gst/deinterlace/tvtime.orc b/gst/deinterlace/tvtime.orc
new file mode 100644
index 0000000000..44217e74db
--- /dev/null
+++ b/gst/deinterlace/tvtime.orc
@@ -0,0 +1,109 @@
+
+.function deinterlace_line_vfir
+.dest 1 d1 guint8
+.source 1 s1 guint8
+.source 1 s2 guint8
+.source 1 s3 guint8
+.source 1 s4 guint8
+.source 1 s5 guint8
+.temp 2 t1
+.temp 2 t2
+.temp 2 t3
+
+convubw t1, s1
+convubw t2, s5
+addw t1, t1, t2
+convubw t2, s2
+convubw t3, s4
+addw t2, t2, t3
+shlw t2, t2, 2
+convubw t3, s3
+shlw t3, t3, 1
+subw t2, t2, t1
+addw t2, t2, t3
+addw t2, t2, 4
+shrsw t2, t2, 3
+convsuswb d1, t2
+
+
+.function deinterlace_line_linear
+.dest 1 d1 guint8
+.source 1 s1 guint8
+.source 1 s2 guint8
+
+avgub d1, s1, s2
+
+
+.function deinterlace_line_linear_blend
+.dest 1 d1 guint8
+.source 1 s1 guint8
+.source 1 s2 guint8
+.source 1 s3 guint8
+.temp 2 t1
+.temp 2 t2
+.temp 2 t3
+
+convubw t1, s1
+convubw t2, s2
+convubw t3, s3
+addw t1, t1, t2
+addw t3, t3, t3
+addw t1, t1, t3
+addw t1, t1, 2
+shrsw t1, t1, 2
+convsuswb d1, t1
+
+
+.function deinterlace_line_greedy
+.dest 1 d1
+.source 1 m0
+.source 1 t1
+.source 1 b1
+.source 1 m2
+.param 1 max_comb
+.temp 1 tm0
+.temp 1 tm2
+.temp 1 tb1
+.temp 1 tt1
+.temp 1 avg
+.temp 1 l2_diff
+.temp 1 lp2_diff
+.temp 1 t2
+.temp 1 t3
+.temp 1 best
+.temp 1 min
+.temp 1 max
+
+
+loadb tm0, m0
+loadb tm2, m2
+
+loadb tb1, b1
+loadb tt1, t1
+avgub avg, tt1, tb1
+
+maxub t2, tm0, avg
+minub t3, tm0, avg
+subb l2_diff, t2, t3
+
+maxub t2, tm2, avg
+minub t3, tm2, avg
+subb lp2_diff, t2, t3
+
+xorb l2_diff, l2_diff, 0x80
+xorb lp2_diff, lp2_diff, 0x80
+cmpgtsb t3, l2_diff, lp2_diff
+
+andb t2, tm2, t3
+andnb t3, t3, tm0
+orb best, t2, t3
+
+maxub max, tt1, tb1
+minub min, tt1, tb1
+addusb max, max, max_comb
+subusb min, min, max_comb
+minub best, best, max
+maxub d1, best, min
+
+
+
diff --git a/gst/deinterlace/tvtime/greedy.c b/gst/deinterlace/tvtime/greedy.c
new file mode 100644
index 0000000000..5a01cc6f3c
--- /dev/null
+++ b/gst/deinterlace/tvtime/greedy.c
@@ -0,0 +1,250 @@
+/*
+ *
+ * GStreamer
+ * Copyright (c) 2000 Tom Barry All rights reserved.
+ * mmx.h port copyright (c) 2002 Billy Biggs <vektor@dumbterm.net>.
+ *
+ * Copyright (C) 2008,2010 Sebastian Dröge <slomo@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * Relicensed for GStreamer from GPL to LGPL with permit from Tom Barry
+ * and Billy Biggs.
+ * See: http://bugzilla.gnome.org/show_bug.cgi?id=163578
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include "gstdeinterlacemethod.h"
+#include <string.h>
+#include "tvtime.h"
+
+
+#define GST_TYPE_DEINTERLACE_METHOD_GREEDY_L (gst_deinterlace_method_greedy_l_get_type ())
+#define GST_IS_DEINTERLACE_METHOD_GREEDY_L(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_DEINTERLACE_METHOD_GREEDY_L))
+#define GST_IS_DEINTERLACE_METHOD_GREEDY_L_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_DEINTERLACE_METHOD_GREEDY_L))
+#define GST_DEINTERLACE_METHOD_GREEDY_L_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_DEINTERLACE_METHOD_GREEDY_L, GstDeinterlaceMethodGreedyLClass))
+#define GST_DEINTERLACE_METHOD_GREEDY_L(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_DEINTERLACE_METHOD_GREEDY_L, GstDeinterlaceMethodGreedyL))
+#define GST_DEINTERLACE_METHOD_GREEDY_L_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_DEINTERLACE_METHOD_GREEDY_L, GstDeinterlaceMethodGreedyLClass))
+#define GST_DEINTERLACE_METHOD_GREEDY_L_CAST(obj) ((GstDeinterlaceMethodGreedyL*)(obj))
+
+GType gst_deinterlace_method_greedy_l_get_type (void);
+
+typedef struct
+{
+ GstDeinterlaceSimpleMethod parent;
+
+ guint max_comb;
+} GstDeinterlaceMethodGreedyL;
+
+typedef GstDeinterlaceSimpleMethodClass GstDeinterlaceMethodGreedyLClass;
+
+// This is a simple lightweight DeInterlace method that uses little CPU time
+// but gives very good results for low or intermedite motion.
+// It defers frames by one field, but that does not seem to produce noticeable
+// lip sync problems.
+//
+// The method used is to take either the older or newer weave pixel depending
+// upon which give the smaller comb factor, and then clip to avoid large damage
+// when wrong.
+//
+// I'd intended this to be part of a larger more elaborate method added to
+// Blended Clip but this give too good results for the CPU to ignore here.
+
+static inline void
+deinterlace_greedy_interpolate_scanline_orc (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ guint max_comb = GST_DEINTERLACE_METHOD_GREEDY_L (self)->max_comb;
+
+ if (scanlines->m1 == NULL || scanlines->mp == NULL) {
+ deinterlace_line_linear (out, scanlines->t0, scanlines->b0, size);
+ } else {
+ deinterlace_line_greedy (out, scanlines->m1, scanlines->t0, scanlines->b0,
+ scanlines->mp ? scanlines->mp : scanlines->m1, max_comb, size);
+ }
+}
+
+static inline void
+deinterlace_greedy_interpolate_scanline_orc_planar_u (GstDeinterlaceSimpleMethod
+ * self, guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint
+ size)
+{
+ guint max_comb = GST_DEINTERLACE_METHOD_GREEDY_L (self)->max_comb;
+
+ if (scanlines->m1 == NULL || scanlines->mp == NULL) {
+ deinterlace_line_linear (out, scanlines->t0, scanlines->b0, size);
+ } else {
+ deinterlace_line_greedy (out, scanlines->m1, scanlines->t0, scanlines->b0,
+ scanlines->mp ? scanlines->mp : scanlines->m1, max_comb, size);
+ }
+}
+
+static inline void
+deinterlace_greedy_interpolate_scanline_orc_planar_v (GstDeinterlaceSimpleMethod
+ * self, guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint
+ size)
+{
+ guint max_comb = GST_DEINTERLACE_METHOD_GREEDY_L (self)->max_comb;
+
+ if (scanlines->m1 == NULL || scanlines->mp == NULL) {
+ deinterlace_line_linear (out, scanlines->t0, scanlines->b0, size);
+ } else {
+ deinterlace_line_greedy (out, scanlines->m1, scanlines->t0, scanlines->b0,
+ scanlines->mp ? scanlines->mp : scanlines->m1, max_comb, size);
+ }
+}
+
+static void
+deinterlace_greedy_copy_scanline (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->m0, size);
+}
+
+static void
+deinterlace_greedy_copy_scanline_planar_u (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->m0, size);
+}
+
+static void
+deinterlace_greedy_copy_scanline_planar_v (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->m0, size);
+}
+
+G_DEFINE_TYPE (GstDeinterlaceMethodGreedyL, gst_deinterlace_method_greedy_l,
+ GST_TYPE_DEINTERLACE_SIMPLE_METHOD);
+
+enum
+{
+ PROP_0,
+ PROP_MAX_COMB
+};
+
+static void
+gst_deinterlace_method_greedy_l_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstDeinterlaceMethodGreedyL *self = GST_DEINTERLACE_METHOD_GREEDY_L (object);
+
+ switch (prop_id) {
+ case PROP_MAX_COMB:
+ self->max_comb = g_value_get_uint (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static void
+gst_deinterlace_method_greedy_l_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstDeinterlaceMethodGreedyL *self = GST_DEINTERLACE_METHOD_GREEDY_L (object);
+
+ switch (prop_id) {
+ case PROP_MAX_COMB:
+ g_value_set_uint (value, self->max_comb);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static void
+gst_deinterlace_method_greedy_l_class_init (GstDeinterlaceMethodGreedyLClass *
+ klass)
+{
+ GstDeinterlaceMethodClass *dim_class = (GstDeinterlaceMethodClass *) klass;
+ GstDeinterlaceSimpleMethodClass *dism_class =
+ (GstDeinterlaceSimpleMethodClass *) klass;
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ gobject_class->set_property = gst_deinterlace_method_greedy_l_set_property;
+ gobject_class->get_property = gst_deinterlace_method_greedy_l_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_MAX_COMB,
+ g_param_spec_uint ("max-comb",
+ "Max comb",
+ "Max Comb", 0, 255, 15, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ dim_class->fields_required = 2;
+ dim_class->name = "Motion Adaptive: Simple Detection";
+ dim_class->nick = "greedyl";
+ dim_class->latency = 1;
+
+ dism_class->interpolate_scanline_ayuv =
+ deinterlace_greedy_interpolate_scanline_orc;
+ dism_class->interpolate_scanline_yuy2 =
+ deinterlace_greedy_interpolate_scanline_orc;
+ dism_class->interpolate_scanline_yvyu =
+ deinterlace_greedy_interpolate_scanline_orc;
+ dism_class->interpolate_scanline_uyvy =
+ deinterlace_greedy_interpolate_scanline_orc;
+ dism_class->interpolate_scanline_nv12 =
+ deinterlace_greedy_interpolate_scanline_orc;
+ dism_class->interpolate_scanline_nv21 =
+ deinterlace_greedy_interpolate_scanline_orc;
+ dism_class->interpolate_scanline_argb =
+ deinterlace_greedy_interpolate_scanline_orc;
+ dism_class->interpolate_scanline_abgr =
+ deinterlace_greedy_interpolate_scanline_orc;
+ dism_class->interpolate_scanline_rgba =
+ deinterlace_greedy_interpolate_scanline_orc;
+ dism_class->interpolate_scanline_bgra =
+ deinterlace_greedy_interpolate_scanline_orc;
+ dism_class->interpolate_scanline_rgb =
+ deinterlace_greedy_interpolate_scanline_orc;
+ dism_class->interpolate_scanline_bgr =
+ deinterlace_greedy_interpolate_scanline_orc;
+ dism_class->interpolate_scanline_planar_y =
+ deinterlace_greedy_interpolate_scanline_orc;
+ dism_class->interpolate_scanline_planar_u =
+ deinterlace_greedy_interpolate_scanline_orc_planar_u;
+ dism_class->interpolate_scanline_planar_v =
+ deinterlace_greedy_interpolate_scanline_orc_planar_v;
+
+ dism_class->copy_scanline_ayuv = deinterlace_greedy_copy_scanline;
+ dism_class->copy_scanline_yuy2 = deinterlace_greedy_copy_scanline;
+ dism_class->copy_scanline_yvyu = deinterlace_greedy_copy_scanline;
+ dism_class->copy_scanline_uyvy = deinterlace_greedy_copy_scanline;
+ dism_class->copy_scanline_argb = deinterlace_greedy_copy_scanline;
+ dism_class->copy_scanline_abgr = deinterlace_greedy_copy_scanline;
+ dism_class->copy_scanline_rgba = deinterlace_greedy_copy_scanline;
+ dism_class->copy_scanline_bgra = deinterlace_greedy_copy_scanline;
+ dism_class->copy_scanline_rgb = deinterlace_greedy_copy_scanline;
+ dism_class->copy_scanline_bgr = deinterlace_greedy_copy_scanline;
+ dism_class->copy_scanline_planar_y = deinterlace_greedy_copy_scanline;
+ dism_class->copy_scanline_planar_u =
+ deinterlace_greedy_copy_scanline_planar_u;
+ dism_class->copy_scanline_planar_v =
+ deinterlace_greedy_copy_scanline_planar_v;
+}
+
+static void
+gst_deinterlace_method_greedy_l_init (GstDeinterlaceMethodGreedyL * self)
+{
+ self->max_comb = 15;
+}
diff --git a/gst/deinterlace/tvtime/greedyh.asm b/gst/deinterlace/tvtime/greedyh.asm
new file mode 100644
index 0000000000..71a03dfc15
--- /dev/null
+++ b/gst/deinterlace/tvtime/greedyh.asm
@@ -0,0 +1,472 @@
+/*
+ *
+ * GStreamer
+ * Copyright (c) 2001 Tom Barry. All rights reserved.
+ * Copyright (C) 2008,2010 Sebastian Dröge <slomo@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License aglong with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+/*
+ * Relicensed for GStreamer from GPL to LGPL with permit from Tom Barry.
+ * See: http://bugzilla.gnome.org/show_bug.cgi?id=163578
+ */
+
+
+#include "x86-64_macros.inc"
+
+static void
+FUNCT_NAME_YUY2 (GstDeinterlaceMethodGreedyH *self, const guint8 * L1, const guint8 * L2, const guint8 * L3, const guint8 * L2P, guint8 * Dest, gint width)
+{
+
+ // in tight loop some vars are accessed faster in local storage
+ gint64 YMask = 0x00ff00ff00ff00ffull; // to keep only luma
+ gint64 UVMask = 0xff00ff00ff00ff00ull; // to keep only chroma
+ gint64 ShiftMask = 0xfefefefefefefefeull; // to avoid shifting chroma to luma
+ gint64 QW256 = 0x0100010001000100ull; // 4 256's
+ gint64 MaxComb;
+ gint64 MotionThreshold;
+ gint64 MotionSense;
+ gint64 i;
+ glong LoopCtr;
+ glong oldbx = 0;
+
+ gint64 QW256B;
+ gint64 LastAvg = 0; //interp value from left qword
+
+ // FIXME: Use C implementation if the width is not a multiple of 4
+ // Do something more optimal later
+ if (width % 4 != 0)
+ C_FUNCT_YUY2 (self, L1, L2, L3, L2P, Dest, width);
+
+ // Set up our two parms that are actually evaluated for each pixel
+ i = self->max_comb;
+ MaxComb =
+ i << 56 | i << 48 | i << 40 | i << 32 | i << 24 | i << 16 | i << 8 | i;
+
+ i = self->motion_threshold; // scale to range of 0-257
+ MotionThreshold = i << 48 | i << 32 | i << 16 | i | UVMask;
+
+ i = self->motion_sense; // scale to range of 0-257
+ MotionSense = i << 48 | i << 32 | i << 16 | i;
+
+ i = 0xffffffff - 256;
+ QW256B = i << 48 | i << 32 | i << 16 | i; // save a couple instr on PMINSW instruct.
+
+ LoopCtr = width / 8 - 1; // there are LineLength / 4 qwords per line but do 1 less, adj at end of loop
+
+ // For ease of reading, the comments below assume that we're operating on an odd
+ // field (i.e., that InfoIsOdd is true). Assume the obvious for even lines..
+ __asm__ __volatile__ (
+ // save ebx (-fPIC)
+ MOVX " %%" XBX ", %[oldbx]\n\t"
+ MOVX " %[L1], %%" XAX "\n\t"
+ LEAX " 8(%%" XAX "), %%" XBX "\n\t" // next qword needed by DJR
+ MOVX " %[L3], %%" XCX "\n\t"
+ SUBX " %%" XAX ", %%" XCX "\n\t" // carry L3 addr as an offset
+ MOVX " %[L2P], %%" XDX "\n\t"
+ MOVX " %[L2], %%" XSI "\n\t"
+ MOVX " %[Dest], %%" XDI "\n\t" // DL1 if Odd or DL2 if Even
+
+ ".align 8\n\t"
+ "1:\n\t"
+ "movq (%%" XSI "), %%mm0\n\t" // L2 - the newest weave pixel value
+ "movq (%%" XAX "), %%mm1\n\t" // L1 - the top pixel
+ "movq (%%" XDX "), %%mm2\n\t" // L2P - the prev weave pixel
+ "movq (%%" XAX ", %%" XCX "), %%mm3\n\t" // L3, next odd row
+ "movq %%mm1, %%mm6\n\t" // L1 - get simple single pixel interp
+
+ // pavgb mm6, mm3 // use macro below
+ V_PAVGB ("%%mm6", "%%mm3", "%%mm4", "%[ShiftMask]")
+
+ // DJR - Diagonal Jaggie Reduction
+ // In the event that we are going to use an average (Bob) pixel we do not want a jagged
+ // stair step effect. To combat this we avg in the 2 horizontally adjacen pixels into the
+ // interpolated Bob mix. This will do horizontal smoothing for only the Bob'd pixels.
+
+ "movq %[LastAvg], %%mm4\n\t" // the bob value from prev qword in row
+ "movq %%mm6, %[LastAvg]\n\t" // save for next pass
+ "psrlq $48, %%mm4\n\t" // right justify 1 pixel
+ "movq %%mm6, %%mm7\n\t" // copy of simple bob pixel
+ "psllq $16, %%mm7\n\t" // left justify 3 pixels
+ "por %%mm7, %%mm4\n\t" // and combine
+ "movq (%%" XBX "), %%mm5\n\t" // next horiz qword from L1
+ // pavgb mm5, qword ptr[ebx+ecx] // next horiz qword from L3, use macro below
+
+ V_PAVGB ("%%mm5", "(%%" XBX ",%%" XCX ")", "%%mm7", "%[ShiftMask]")
+ "psllq $48, %%mm5\n\t" // left just 1 pixel
+ "movq %%mm6, %%mm7\n\t" // another copy of simple bob pixel
+ "psrlq $16, %%mm7\n\t" // right just 3 pixels
+ "por %%mm7, %%mm5\n\t" // combine
+ // pavgb mm4, mm5 // avg of forward and prev by 1 pixel, use macro
+ V_PAVGB ("%%mm4", "%%mm5", "%%mm5", "%[ShiftMask]") // mm5 gets modified if MMX
+ // pavgb mm6, mm4 // avg of center and surround interp vals, use macro
+ V_PAVGB ("%%mm6", "%%mm4", "%%mm7", "%[ShiftMask]")
+
+ // Don't do any more averaging than needed for mmx. It hurts performance and causes rounding errors.
+#ifndef IS_MMX
+ // pavgb mm4, mm6 // 1/4 center, 3/4 adjacent
+ V_PAVGB ("%%mm4", "%%mm6", "%%mm7", "%[ShiftMask]")
+ // pavgb mm6, mm4 // 3/8 center, 5/8 adjacent
+ V_PAVGB ("%%mm6", "%%mm4", "%%mm7", "%[ShiftMask]")
+#endif
+
+ // get abs value of possible L2 comb
+ "movq %%mm6, %%mm4\n\t" // work copy of interp val
+ "movq %%mm2, %%mm7\n\t" // L2
+ "psubusb %%mm4, %%mm7\n\t" // L2 - avg
+ "movq %%mm4, %%mm5\n\t" // avg
+ "psubusb %%mm2, %%mm5\n\t" // avg - L2
+ "por %%mm7, %%mm5\n\t" // abs(avg-L2)
+
+ // get abs value of possible L2P comb
+ "movq %%mm0, %%mm7\n\t" // L2P
+ "psubusb %%mm4, %%mm7\n\t" // L2P - avg
+ "psubusb %%mm0, %%mm4\n\t" // avg - L2P
+ "por %%mm7, %%mm4\n\t" // abs(avg-L2P)
+
+ // use L2 or L2P depending upon which makes smaller comb
+ "psubusb %%mm5, %%mm4\n\t" // see if it goes to zero
+ "psubusb %%mm5, %%mm5\n\t" // 0
+ "pcmpeqb %%mm5, %%mm4\n\t" // if (mm4=0) then FF else 0
+ "pcmpeqb %%mm4, %%mm5\n\t" // opposite of mm4
+
+ // if Comb(L2P) <= Comb(L2) then mm4=ff, mm5=0 else mm4=0, mm5 = 55
+ "pand %%mm2, %%mm5\n\t" // use L2 if mm5 == ff, else 0
+ "pand %%mm0, %%mm4\n\t" // use L2P if mm4 = ff, else 0
+ "por %%mm5, %%mm4\n\t" // may the best win
+
+ // Inventory: at this point we have the following values:
+ // mm0 = L2P (or L2)
+ // mm1 = L1
+ // mm2 = L2 (or L2P)
+ // mm3 = L3
+ // mm4 = the best of L2,L2P weave pixel, base upon comb
+ // mm6 = the avg interpolated value, if we need to use it
+ // Let's measure movement, as how much the weave pixel has changed
+
+ "movq %%mm2, %%mm7\n\t"
+ "psubusb %%mm0, %%mm2\n\t"
+ "psubusb %%mm7, %%mm0\n\t"
+ "por %%mm2, %%mm0\n\t" // abs value of change, used later
+
+ // Now lets clip our chosen value to be not outside of the range
+ // of the high/low range L1-L3 by more than MaxComb.
+ // This allows some comb but limits the damages and also allows more
+ // detail than a boring oversmoothed clip.
+
+ "movq %%mm1, %%mm2\n\t" // copy L1
+ // pmaxub mm2, mm3 // use macro
+ V_PMAXUB ("%%mm2", "%%mm3") // now = Max(L1,L3)
+ "movq %%mm1, %%mm5\n\t" // copy L1
+ // pminub mm5, mm3 // now = Min(L1,L3), use macro
+ V_PMINUB ("%%mm5", "%%mm3", "%%mm7")
+
+ // allow the value to be above the high or below the low by amt of MaxComb
+ "psubusb %[MaxComb], %%mm5\n\t" // lower min by diff
+ "paddusb %[MaxComb], %%mm2\n\t" // increase max by diff
+ // pmaxub mm4, mm5 // now = Max(best,Min(L1,L3) use macro
+ V_PMAXUB ("%%mm4", "%%mm5")
+ // pminub mm4, mm2 // now = Min( Max(best, Min(L1,L3), L2 )=L2 clipped
+ V_PMINUB ("%%mm4", "%%mm2", "%%mm7")
+
+ // Blend weave pixel with bob pixel, depending on motion val in mm0
+ "psubusb %[MotionThreshold], %%mm0\n\t" // test Threshold, clear chroma change
+ "pmullw %[MotionSense], %%mm0\n\t" // mul by user factor, keep low 16 bits
+ "movq %[QW256], %%mm7\n\t"
+#ifdef IS_MMXEXT
+ "pminsw %%mm7, %%mm0\n\t" // max = 256
+#else
+ "paddusw %[QW256B], %%mm0\n\t" // add, may sat at fff..
+ "psubusw %[QW256B], %%mm0\n\t" // now = Min(L1,256)
+#endif
+ "psubusw %%mm0, %%mm7\n\t" // so the 2 sum to 256, weighted avg
+ "movq %%mm4, %%mm2\n\t" // save weave chroma info before trashing
+ "pand %[YMask], %%mm4\n\t" // keep only luma from calc'd value
+ "pmullw %%mm7, %%mm4\n\t" // use more weave for less motion
+ "pand %[YMask], %%mm6\n\t" // keep only luma from calc'd value
+ "pmullw %%mm0, %%mm6\n\t" // use more bob for large motion
+ "paddusw %%mm6, %%mm4\n\t" // combine
+ "psrlw $8, %%mm4\n\t" // div by 256 to get weighted avg
+ // chroma comes from weave pixel
+ "pand %[UVMask], %%mm2\n\t" // keep chroma
+ "por %%mm4, %%mm2\n\t" // and combine
+ V_MOVNTQ ("(%%" XDI ")", "%%mm2") // move in our clipped best, use macro
+ // bump ptrs and loop
+ LEAX " 8(%%" XAX "), %%" XAX "\n\t"
+ LEAX " 8(%%" XBX "), %%" XBX "\n\t"
+ LEAX " 8(%%" XDX "), %%" XDX "\n\t"
+ LEAX " 8(%%" XDI "), %%" XDI "\n\t"
+ LEAX " 8(%%" XSI "), %%" XSI "\n\t"
+ DECX " %[LoopCtr]\n\t"
+
+ "jg 1b\n\t" // loop if not to last line
+ // note P-III default assumes backward branches taken
+ "jl 1f\n\t" // done
+ MOVX " %%" XAX ", %%" XBX "\n\t" // sharpness lookahead 1 byte only, be wrong on 1
+ "jmp 1b\n\t"
+
+ "1:\n\t"
+ MOVX " %[oldbx], %%" XBX "\n\t"
+ "emms\n\t": /* no outputs */
+
+ :[LastAvg] "m" (LastAvg),
+ [L1] "m" (L1),
+ [L3] "m" (L3),
+ [L2P] "m" (L2P),
+ [L2] "m" (L2),
+ [Dest] "m" (Dest),
+ [ShiftMask] "m" (ShiftMask),
+ [MaxComb] "m" (MaxComb),
+ [MotionThreshold] "m" (MotionThreshold),
+ [MotionSense] "m" (MotionSense),
+ [QW256B] "m" (QW256B),
+ [YMask] "m" (YMask),
+ [UVMask] "m" (UVMask),
+ [LoopCtr] "m" (LoopCtr),
+ [QW256] "m" (QW256),
+ [oldbx] "m" (oldbx)
+ : XAX, XCX, XDX, XSI, XDI,
+ "st", "st(1)", "st(2)", "st(3)", "st(4)", "st(5)", "st(6)", "st(7)",
+#ifdef __MMX__
+ "mm0", "mm1", "mm2", "mm3", "mm4", "mm5", "mm6", "mm7",
+#endif
+ "memory", "cc");
+}
+
+static void
+FUNCT_NAME_UYVY (GstDeinterlaceMethodGreedyH *self, const guint8 * L1, const guint8 * L2, const guint8 * L3, const guint8 * L2P, guint8 * Dest, gint width)
+{
+
+ // in tight loop some vars are accessed faster in local storage
+ gint64 YMask = 0xff00ff00ff00ff00ull; // to keep only luma
+ gint64 UVMask = 0x00ff00ff00ff00ffull; // to keep only chroma
+ gint64 ShiftMask = 0xfefefefefefefefeull; // to avoid shifting chroma to luma
+ gint64 QW256 = 0x0100010001000100ull; // 4 256's
+ gint64 MaxComb;
+ gint64 MotionThreshold;
+ gint64 MotionSense;
+ gint64 i;
+ glong LoopCtr;
+ glong oldbx = 0;
+
+ gint64 QW256B;
+ gint64 LastAvg = 0; //interp value from left qword
+
+ // FIXME: Use C implementation if the width is not a multiple of 4
+ // Do something more optimal later
+ if (width % 4 != 0)
+ C_FUNCT_UYVY (self, L1, L2, L3, L2P, Dest, width);
+
+ // Set up our two parms that are actually evaluated for each pixel
+ i = self->max_comb;
+ MaxComb =
+ i << 56 | i << 48 | i << 40 | i << 32 | i << 24 | i << 16 | i << 8 | i;
+
+ i = self->motion_threshold; // scale to range of 0-257
+ MotionThreshold = i << 48 | i << 32 | i << 16 | i | UVMask;
+
+ i = self->motion_sense; // scale to range of 0-257
+ MotionSense = i << 48 | i << 32 | i << 16 | i;
+
+ i = 0xffffffff - 256;
+ QW256B = i << 48 | i << 32 | i << 16 | i; // save a couple instr on PMINSW instruct.
+
+ LoopCtr = width / 8 - 1; // there are LineLength / 4 qwords per line but do 1 less, adj at end of loop
+
+ // For ease of reading, the comments below assume that we're operating on an odd
+ // field (i.e., that InfoIsOdd is true). Assume the obvious for even lines..
+ __asm__ __volatile__ (
+ // save ebx (-fPIC)
+ MOVX " %%" XBX ", %[oldbx]\n\t"
+ MOVX " %[L1], %%" XAX "\n\t"
+ LEAX " 8(%%" XAX "), %%" XBX "\n\t" // next qword needed by DJR
+ MOVX " %[L3], %%" XCX "\n\t"
+ SUBX " %%" XAX ", %%" XCX "\n\t" // carry L3 addr as an offset
+ MOVX " %[L2P], %%" XDX "\n\t"
+ MOVX " %[L2], %%" XSI "\n\t"
+ MOVX " %[Dest], %%" XDI "\n\t" // DL1 if Odd or DL2 if Even
+
+ ".align 8\n\t"
+ "1:\n\t"
+ "movq (%%" XSI "), %%mm0\n\t" // L2 - the newest weave pixel value
+ "movq (%%" XAX "), %%mm1\n\t" // L1 - the top pixel
+ "movq (%%" XDX "), %%mm2\n\t" // L2P - the prev weave pixel
+ "movq (%%" XAX ", %%" XCX "), %%mm3\n\t" // L3, next odd row
+ "movq %%mm1, %%mm6\n\t" // L1 - get simple single pixel interp
+
+ // pavgb mm6, mm3 // use macro below
+ V_PAVGB ("%%mm6", "%%mm3", "%%mm4", "%[ShiftMask]")
+
+ // DJR - Diagonal Jaggie Reduction
+ // In the event that we are going to use an average (Bob) pixel we do not want a jagged
+ // stair step effect. To combat this we avg in the 2 horizontally adjacen pixels into the
+ // interpolated Bob mix. This will do horizontal smoothing for only the Bob'd pixels.
+
+ "movq %[LastAvg], %%mm4\n\t" // the bob value from prev qword in row
+ "movq %%mm6, %[LastAvg]\n\t" // save for next pass
+ "psrlq $48, %%mm4\n\t" // right justify 1 pixel
+ "movq %%mm6, %%mm7\n\t" // copy of simple bob pixel
+ "psllq $16, %%mm7\n\t" // left justify 3 pixels
+ "por %%mm7, %%mm4\n\t" // and combine
+ "movq (%%" XBX "), %%mm5\n\t" // next horiz qword from L1
+ // pavgb mm5, qword ptr[ebx+ecx] // next horiz qword from L3, use macro below
+
+ V_PAVGB ("%%mm5", "(%%" XBX ",%%" XCX ")", "%%mm7", "%[ShiftMask]")
+ "psllq $48, %%mm5\n\t" // left just 1 pixel
+ "movq %%mm6, %%mm7\n\t" // another copy of simple bob pixel
+ "psrlq $16, %%mm7\n\t" // right just 3 pixels
+ "por %%mm7, %%mm5\n\t" // combine
+ // pavgb mm4, mm5 // avg of forward and prev by 1 pixel, use macro
+ V_PAVGB ("%%mm4", "%%mm5", "%%mm5", "%[ShiftMask]") // mm5 gets modified if MMX
+ // pavgb mm6, mm4 // avg of center and surround interp vals, use macro
+ V_PAVGB ("%%mm6", "%%mm4", "%%mm7", "%[ShiftMask]")
+
+ // Don't do any more averaging than needed for mmx. It hurts performance and causes rounding errors.
+#ifndef IS_MMX
+ // pavgb mm4, mm6 // 1/4 center, 3/4 adjacent
+ V_PAVGB ("%%mm4", "%%mm6", "%%mm7", "%[ShiftMask]")
+ // pavgb mm6, mm4 // 3/8 center, 5/8 adjacent
+ V_PAVGB ("%%mm6", "%%mm4", "%%mm7", "%[ShiftMask]")
+#endif
+
+ // get abs value of possible L2 comb
+ "movq %%mm6, %%mm4\n\t" // work copy of interp val
+ "movq %%mm2, %%mm7\n\t" // L2
+ "psubusb %%mm4, %%mm7\n\t" // L2 - avg
+ "movq %%mm4, %%mm5\n\t" // avg
+ "psubusb %%mm2, %%mm5\n\t" // avg - L2
+ "por %%mm7, %%mm5\n\t" // abs(avg-L2)
+
+ // get abs value of possible L2P comb
+ "movq %%mm0, %%mm7\n\t" // L2P
+ "psubusb %%mm4, %%mm7\n\t" // L2P - avg
+ "psubusb %%mm0, %%mm4\n\t" // avg - L2P
+ "por %%mm7, %%mm4\n\t" // abs(avg-L2P)
+
+ // use L2 or L2P depending upon which makes smaller comb
+ "psubusb %%mm5, %%mm4\n\t" // see if it goes to zero
+ "psubusb %%mm5, %%mm5\n\t" // 0
+ "pcmpeqb %%mm5, %%mm4\n\t" // if (mm4=0) then FF else 0
+ "pcmpeqb %%mm4, %%mm5\n\t" // opposite of mm4
+
+ // if Comb(L2P) <= Comb(L2) then mm4=ff, mm5=0 else mm4=0, mm5 = 55
+ "pand %%mm2, %%mm5\n\t" // use L2 if mm5 == ff, else 0
+ "pand %%mm0, %%mm4\n\t" // use L2P if mm4 = ff, else 0
+ "por %%mm5, %%mm4\n\t" // may the best win
+
+ // Inventory: at this point we have the following values:
+ // mm0 = L2P (or L2)
+ // mm1 = L1
+ // mm2 = L2 (or L2P)
+ // mm3 = L3
+ // mm4 = the best of L2,L2P weave pixel, base upon comb
+ // mm6 = the avg interpolated value, if we need to use it
+ // Let's measure movement, as how much the weave pixel has changed
+
+ "movq %%mm2, %%mm7\n\t"
+ "psubusb %%mm0, %%mm2\n\t"
+ "psubusb %%mm7, %%mm0\n\t"
+ "por %%mm2, %%mm0\n\t" // abs value of change, used later
+
+ // Now lets clip our chosen value to be not outside of the range
+ // of the high/low range L1-L3 by more than MaxComb.
+ // This allows some comb but limits the damages and also allows more
+ // detail than a boring oversmoothed clip.
+
+ "movq %%mm1, %%mm2\n\t" // copy L1
+ // pmaxub mm2, mm3 // use macro
+ V_PMAXUB ("%%mm2", "%%mm3") // now = Max(L1,L3)
+ "movq %%mm1, %%mm5\n\t" // copy L1
+ // pminub mm5, mm3 // now = Min(L1,L3), use macro
+ V_PMINUB ("%%mm5", "%%mm3", "%%mm7")
+
+ // allow the value to be above the high or below the low by amt of MaxComb
+ "psubusb %[MaxComb], %%mm5\n\t" // lower min by diff
+ "paddusb %[MaxComb], %%mm2\n\t" // increase max by diff
+ // pmaxub mm4, mm5 // now = Max(best,Min(L1,L3) use macro
+ V_PMAXUB ("%%mm4", "%%mm5")
+ // pminub mm4, mm2 // now = Min( Max(best, Min(L1,L3), L2 )=L2 clipped
+ V_PMINUB ("%%mm4", "%%mm2", "%%mm7")
+
+ // Blend weave pixel with bob pixel, depending on motion val in mm0
+ "psubusb %[MotionThreshold], %%mm0\n\t" // test Threshold, clear chroma change
+ "psrlw $8, %%mm0\n\t" // div by 256 to get weighted avg
+ "pmullw %[MotionSense], %%mm0\n\t" // mul by user factor, keep low 16 bits
+ "movq %[QW256], %%mm7\n\t"
+#ifdef IS_MMXEXT
+ "pminsw %%mm7, %%mm0\n\t" // max = 256
+#else
+ "paddusw %[QW256B], %%mm0\n\t" // add, may sat at fff..
+ "psubusw %[QW256B], %%mm0\n\t" // now = Min(L1,256)
+#endif
+ "psubusw %%mm0, %%mm7\n\t" // so the 2 sum to 256, weighted avg
+ "movq %%mm4, %%mm2\n\t" // save weave chroma info before trashing
+ "pand %[YMask], %%mm4\n\t" // keep only luma from calc'd value
+ "psrlw $8, %%mm4\n\t" // div by 256 to get weighted avg
+ "pmullw %%mm7, %%mm4\n\t" // use more weave for less motion
+ "pand %[YMask], %%mm6\n\t" // keep only luma from calc'd value
+ "psrlw $8, %%mm6\n\t" // div by 256 to get weighted avg
+ "pmullw %%mm0, %%mm6\n\t" // use more bob for large motion
+ "paddusw %%mm6, %%mm4\n\t" // combine
+ "pand %[YMask], %%mm4\n\t" // keep only luma from calc'd value
+ // chroma comes from weave pixel
+ "pand %[UVMask], %%mm2\n\t" // keep chroma
+ "por %%mm4, %%mm2\n\t" // and combine
+ V_MOVNTQ ("(%%" XDI ")", "%%mm2") // move in our clipped best, use macro
+ // bump ptrs and loop
+ LEAX " 8(%%" XAX "), %%" XAX "\n\t"
+ LEAX " 8(%%" XBX "), %%" XBX "\n\t"
+ LEAX " 8(%%" XDX "), %%" XDX "\n\t"
+ LEAX " 8(%%" XDI "), %%" XDI "\n\t"
+ LEAX " 8(%%" XSI "), %%" XSI "\n\t"
+ DECX " %[LoopCtr]\n\t"
+
+ "jg 1b\n\t" // loop if not to last line
+ // note P-III default assumes backward branches taken
+ "jl 1f\n\t" // done
+ MOVX " %%" XAX ", %%" XBX "\n\t" // sharpness lookahead 1 byte only, be wrong on 1
+ "jmp 1b\n\t"
+
+ "1:\n\t"
+ MOVX " %[oldbx], %%" XBX "\n\t"
+ "emms\n\t": /* no outputs */
+
+ :[LastAvg] "m" (LastAvg),
+ [L1] "m" (L1),
+ [L3] "m" (L3),
+ [L2P] "m" (L2P),
+ [L2] "m" (L2),
+ [Dest] "m" (Dest),
+ [ShiftMask] "m" (ShiftMask),
+ [MaxComb] "m" (MaxComb),
+ [MotionThreshold] "m" (MotionThreshold),
+ [MotionSense] "m" (MotionSense),
+ [QW256B] "m" (QW256B),
+ [YMask] "m" (YMask),
+ [UVMask] "m" (UVMask),
+ [LoopCtr] "m" (LoopCtr),
+ [QW256] "m" (QW256),
+ [oldbx] "m" (oldbx)
+ : XAX, XCX, XDX, XSI, XDI,
+ "st", "st(1)", "st(2)", "st(3)", "st(4)", "st(5)", "st(6)", "st(7)",
+#ifdef __MMX__
+ "mm0", "mm1", "mm2", "mm3", "mm4", "mm5", "mm6", "mm7",
+#endif
+ "memory", "cc");
+}
+
diff --git a/gst/deinterlace/tvtime/greedyh.c b/gst/deinterlace/tvtime/greedyh.c
new file mode 100644
index 0000000000..69f75ee2dd
--- /dev/null
+++ b/gst/deinterlace/tvtime/greedyh.c
@@ -0,0 +1,1076 @@
+/*
+ *
+ * GStreamer
+ * Copyright (C) 2004 Billy Biggs <vektor@dumbterm.net>
+ * Copyright (C) 2008,2010 Sebastian Dröge <slomo@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * Relicensed for GStreamer from GPL to LGPL with permit from Billy Biggs.
+ * See: http://bugzilla.gnome.org/show_bug.cgi?id=163578
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include "greedyhmacros.h"
+
+#include <stdlib.h>
+#include <string.h>
+
+#include <gst/gst.h>
+#include "plugins.h"
+#include "gstdeinterlacemethod.h"
+#ifdef HAVE_ORC
+#include <orc/orc.h>
+#endif
+
+#define GST_TYPE_DEINTERLACE_METHOD_GREEDY_H (gst_deinterlace_method_greedy_h_get_type ())
+#define GST_IS_DEINTERLACE_METHOD_GREEDY_H(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_DEINTERLACE_METHOD_GREEDY_H))
+#define GST_IS_DEINTERLACE_METHOD_GREEDY_H_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_DEINTERLACE_METHOD_GREEDY_H))
+#define GST_DEINTERLACE_METHOD_GREEDY_H_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_DEINTERLACE_METHOD_GREEDY_H, GstDeinterlaceMethodGreedyHClass))
+#define GST_DEINTERLACE_METHOD_GREEDY_H(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_DEINTERLACE_METHOD_GREEDY_H, GstDeinterlaceMethodGreedyH))
+#define GST_DEINTERLACE_METHOD_GREEDY_H_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_DEINTERLACE_METHOD_GREEDY_H, GstDeinterlaceMethodGreedyHClass))
+#define GST_DEINTERLACE_METHOD_GREEDY_H_CAST(obj) ((GstDeinterlaceMethodGreedyH*)(obj))
+
+typedef struct
+{
+ GstDeinterlaceMethod parent;
+
+ guint max_comb, motion_threshold, motion_sense;
+} GstDeinterlaceMethodGreedyH;
+
+typedef void (*ScanlineFunction) (GstDeinterlaceMethodGreedyH * self,
+ const guint8 * L2, const guint8 * L1, const guint8 * L3, const guint8 * L2P,
+ guint8 * Dest, gint width);
+
+typedef struct
+{
+ GstDeinterlaceMethodClass parent_class;
+ ScanlineFunction scanline_yuy2; /* This is for YVYU too */
+ ScanlineFunction scanline_uyvy;
+ ScanlineFunction scanline_ayuv;
+ ScanlineFunction scanline_planar_y;
+ ScanlineFunction scanline_planar_uv;
+} GstDeinterlaceMethodGreedyHClass;
+
+static void
+greedyh_scanline_C_ayuv (GstDeinterlaceMethodGreedyH * self, const guint8 * L1,
+ const guint8 * L2, const guint8 * L3, const guint8 * L2P, guint8 * Dest,
+ gint width)
+{
+ gint Pos, Comp;
+ guint8 l1, l1_1, l3, l3_1;
+ guint8 avg, avg_1;
+ guint8 avg__1[4] = { 0, };
+ guint8 avg_s;
+ guint8 avg_sc;
+ guint8 best;
+ guint16 mov;
+ guint8 out;
+ guint8 l2, lp2;
+ guint8 l2_diff, lp2_diff;
+ guint8 min, max;
+ guint max_comb = self->max_comb;
+ guint motion_sense = self->motion_sense;
+ guint motion_threshold = self->motion_threshold;
+
+ width /= 4;
+ for (Pos = 0; Pos < width; Pos++) {
+ for (Comp = 0; Comp < 4; Comp++) {
+ l1 = L1[0];
+ l3 = L3[0];
+
+ if (Pos == width - 1) {
+ l1_1 = l1;
+ l3_1 = l3;
+ } else {
+ l1_1 = L1[4];
+ l3_1 = L3[4];
+ }
+
+ /* Average of L1 and L3 */
+ avg = (l1 + l3) / 2;
+
+ if (Pos == 0) {
+ avg__1[Comp] = avg;
+ }
+
+ /* Average of next L1 and next L3 */
+ avg_1 = (l1_1 + l3_1) / 2;
+
+ /* Calculate average of one pixel forward and previous */
+ avg_s = (avg__1[Comp] + avg_1) / 2;
+
+ /* Calculate average of center and surrounding pixels */
+ avg_sc = (avg + avg_s) / 2;
+
+ /* move forward */
+ avg__1[Comp] = avg;
+
+ /* Get best L2/L2P, i.e. least diff from above average */
+ l2 = L2[0];
+ lp2 = L2P[0];
+
+ l2_diff = ABS (l2 - avg_sc);
+
+ lp2_diff = ABS (lp2 - avg_sc);
+
+ if (l2_diff > lp2_diff)
+ best = lp2;
+ else
+ best = l2;
+
+ /* Clip this best L2/L2P by L1/L3 and allow to differ by GreedyMaxComb */
+ max = MAX (l1, l3);
+ min = MIN (l1, l3);
+
+ if (max < 256 - max_comb)
+ max += max_comb;
+ else
+ max = 255;
+
+ if (min > max_comb)
+ min -= max_comb;
+ else
+ min = 0;
+
+ out = CLAMP (best, min, max);
+
+ if (Comp < 2) {
+ /* Do motion compensation for luma, i.e. how much
+ * the weave pixel differs */
+ mov = ABS (l2 - lp2);
+ if (mov > motion_threshold)
+ mov -= motion_threshold;
+ else
+ mov = 0;
+
+ mov = mov * motion_sense;
+ if (mov > 256)
+ mov = 256;
+
+ /* Weighted sum on clipped weave pixel and average */
+ out = (out * (256 - mov) + avg_sc * mov) / 256;
+ }
+
+ Dest[0] = out;
+
+ Dest += 1;
+ L1 += 1;
+ L2 += 1;
+ L3 += 1;
+ L2P += 1;
+ }
+ }
+}
+
+static void
+greedyh_scanline_C_yuy2 (GstDeinterlaceMethodGreedyH * self, const guint8 * L1,
+ const guint8 * L2, const guint8 * L3, const guint8 * L2P, guint8 * Dest,
+ gint width)
+{
+ gint Pos;
+ guint8 l1_l, l1_1_l, l3_l, l3_1_l;
+ guint8 l1_c, l1_1_c, l3_c, l3_1_c;
+ guint8 avg_l, avg_c, avg_l_1, avg_c_1;
+ guint8 avg_l__1 = 0, avg_c__1 = 0;
+ guint8 avg_s_l, avg_s_c;
+ guint8 avg_sc_l, avg_sc_c;
+ guint8 best_l, best_c;
+ guint16 mov_l;
+ guint8 out_l, out_c;
+ guint8 l2_l, l2_c, lp2_l, lp2_c;
+ guint8 l2_l_diff, l2_c_diff, lp2_l_diff, lp2_c_diff;
+ guint8 min_l, min_c, max_l, max_c;
+ guint max_comb = self->max_comb;
+ guint motion_sense = self->motion_sense;
+ guint motion_threshold = self->motion_threshold;
+
+ width /= 2;
+ for (Pos = 0; Pos < width; Pos++) {
+ l1_l = L1[0];
+ l1_c = L1[1];
+ l3_l = L3[0];
+ l3_c = L3[1];
+
+ if (Pos == width - 1) {
+ l1_1_l = l1_l;
+ l1_1_c = l1_c;
+ l3_1_l = l3_l;
+ l3_1_c = l3_c;
+ } else {
+ l1_1_l = L1[2];
+ l1_1_c = L1[3];
+ l3_1_l = L3[2];
+ l3_1_c = L3[3];
+ }
+
+ /* Average of L1 and L3 */
+ avg_l = (l1_l + l3_l) / 2;
+ avg_c = (l1_c + l3_c) / 2;
+
+ if (Pos == 0) {
+ avg_l__1 = avg_l;
+ avg_c__1 = avg_c;
+ }
+
+ /* Average of next L1 and next L3 */
+ avg_l_1 = (l1_1_l + l3_1_l) / 2;
+ avg_c_1 = (l1_1_c + l3_1_c) / 2;
+
+ /* Calculate average of one pixel forward and previous */
+ avg_s_l = (avg_l__1 + avg_l_1) / 2;
+ avg_s_c = (avg_c__1 + avg_c_1) / 2;
+
+ /* Calculate average of center and surrounding pixels */
+ avg_sc_l = (avg_l + avg_s_l) / 2;
+ avg_sc_c = (avg_c + avg_s_c) / 2;
+
+ /* move forward */
+ avg_l__1 = avg_l;
+ avg_c__1 = avg_c;
+
+ /* Get best L2/L2P, i.e. least diff from above average */
+ l2_l = L2[0];
+ l2_c = L2[1];
+ lp2_l = L2P[0];
+ lp2_c = L2P[1];
+
+ l2_l_diff = ABS (l2_l - avg_sc_l);
+ l2_c_diff = ABS (l2_c - avg_sc_c);
+
+ lp2_l_diff = ABS (lp2_l - avg_sc_l);
+ lp2_c_diff = ABS (lp2_c - avg_sc_c);
+
+ if (l2_l_diff > lp2_l_diff)
+ best_l = lp2_l;
+ else
+ best_l = l2_l;
+
+ if (l2_c_diff > lp2_c_diff)
+ best_c = lp2_c;
+ else
+ best_c = l2_c;
+
+ /* Clip this best L2/L2P by L1/L3 and allow to differ by GreedyMaxComb */
+ max_l = MAX (l1_l, l3_l);
+ min_l = MIN (l1_l, l3_l);
+
+ if (max_l < 256 - max_comb)
+ max_l += max_comb;
+ else
+ max_l = 255;
+
+ if (min_l > max_comb)
+ min_l -= max_comb;
+ else
+ min_l = 0;
+
+ max_c = MAX (l1_c, l3_c);
+ min_c = MIN (l1_c, l3_c);
+
+ if (max_c < 256 - max_comb)
+ max_c += max_comb;
+ else
+ max_c = 255;
+
+ if (min_c > max_comb)
+ min_c -= max_comb;
+ else
+ min_c = 0;
+
+ out_l = CLAMP (best_l, min_l, max_l);
+ out_c = CLAMP (best_c, min_c, max_c);
+
+ /* Do motion compensation for luma, i.e. how much
+ * the weave pixel differs */
+ mov_l = ABS (l2_l - lp2_l);
+ if (mov_l > motion_threshold)
+ mov_l -= motion_threshold;
+ else
+ mov_l = 0;
+
+ mov_l = mov_l * motion_sense;
+ if (mov_l > 256)
+ mov_l = 256;
+
+ /* Weighted sum on clipped weave pixel and average */
+ out_l = (out_l * (256 - mov_l) + avg_sc_l * mov_l) / 256;
+
+ Dest[0] = out_l;
+ Dest[1] = out_c;
+
+ Dest += 2;
+ L1 += 2;
+ L2 += 2;
+ L3 += 2;
+ L2P += 2;
+ }
+}
+
+static void
+greedyh_scanline_C_uyvy (GstDeinterlaceMethodGreedyH * self, const guint8 * L1,
+ const guint8 * L2, const guint8 * L3, const guint8 * L2P, guint8 * Dest,
+ gint width)
+{
+ gint Pos;
+ guint8 l1_l, l1_1_l, l3_l, l3_1_l;
+ guint8 l1_c, l1_1_c, l3_c, l3_1_c;
+ guint8 avg_l, avg_c, avg_l_1, avg_c_1;
+ guint8 avg_l__1 = 0, avg_c__1 = 0;
+ guint8 avg_s_l, avg_s_c;
+ guint8 avg_sc_l, avg_sc_c;
+ guint8 best_l, best_c;
+ guint16 mov_l;
+ guint8 out_l, out_c;
+ guint8 l2_l, l2_c, lp2_l, lp2_c;
+ guint8 l2_l_diff, l2_c_diff, lp2_l_diff, lp2_c_diff;
+ guint8 min_l, min_c, max_l, max_c;
+ guint max_comb = self->max_comb;
+ guint motion_sense = self->motion_sense;
+ guint motion_threshold = self->motion_threshold;
+
+ width /= 2;
+ for (Pos = 0; Pos < width; Pos++) {
+ l1_l = L1[1];
+ l1_c = L1[0];
+ l3_l = L3[1];
+ l3_c = L3[0];
+
+ if (Pos == width - 1) {
+ l1_1_l = l1_l;
+ l1_1_c = l1_c;
+ l3_1_l = l3_l;
+ l3_1_c = l3_c;
+ } else {
+ l1_1_l = L1[3];
+ l1_1_c = L1[2];
+ l3_1_l = L3[3];
+ l3_1_c = L3[2];
+ }
+
+ /* Average of L1 and L3 */
+ avg_l = (l1_l + l3_l) / 2;
+ avg_c = (l1_c + l3_c) / 2;
+
+ if (Pos == 0) {
+ avg_l__1 = avg_l;
+ avg_c__1 = avg_c;
+ }
+
+ /* Average of next L1 and next L3 */
+ avg_l_1 = (l1_1_l + l3_1_l) / 2;
+ avg_c_1 = (l1_1_c + l3_1_c) / 2;
+
+ /* Calculate average of one pixel forward and previous */
+ avg_s_l = (avg_l__1 + avg_l_1) / 2;
+ avg_s_c = (avg_c__1 + avg_c_1) / 2;
+
+ /* Calculate average of center and surrounding pixels */
+ avg_sc_l = (avg_l + avg_s_l) / 2;
+ avg_sc_c = (avg_c + avg_s_c) / 2;
+
+ /* move forward */
+ avg_l__1 = avg_l;
+ avg_c__1 = avg_c;
+
+ /* Get best L2/L2P, i.e. least diff from above average */
+ l2_l = L2[1];
+ l2_c = L2[0];
+ lp2_l = L2P[1];
+ lp2_c = L2P[0];
+
+ l2_l_diff = ABS (l2_l - avg_sc_l);
+ l2_c_diff = ABS (l2_c - avg_sc_c);
+
+ lp2_l_diff = ABS (lp2_l - avg_sc_l);
+ lp2_c_diff = ABS (lp2_c - avg_sc_c);
+
+ if (l2_l_diff > lp2_l_diff)
+ best_l = lp2_l;
+ else
+ best_l = l2_l;
+
+ if (l2_c_diff > lp2_c_diff)
+ best_c = lp2_c;
+ else
+ best_c = l2_c;
+
+ /* Clip this best L2/L2P by L1/L3 and allow to differ by GreedyMaxComb */
+ max_l = MAX (l1_l, l3_l);
+ min_l = MIN (l1_l, l3_l);
+
+ if (max_l < 256 - max_comb)
+ max_l += max_comb;
+ else
+ max_l = 255;
+
+ if (min_l > max_comb)
+ min_l -= max_comb;
+ else
+ min_l = 0;
+
+ max_c = MAX (l1_c, l3_c);
+ min_c = MIN (l1_c, l3_c);
+
+ if (max_c < 256 - max_comb)
+ max_c += max_comb;
+ else
+ max_c = 255;
+
+ if (min_c > max_comb)
+ min_c -= max_comb;
+ else
+ min_c = 0;
+
+ out_l = CLAMP (best_l, min_l, max_l);
+ out_c = CLAMP (best_c, min_c, max_c);
+
+ /* Do motion compensation for luma, i.e. how much
+ * the weave pixel differs */
+ mov_l = ABS (l2_l - lp2_l);
+ if (mov_l > motion_threshold)
+ mov_l -= motion_threshold;
+ else
+ mov_l = 0;
+
+ mov_l = mov_l * motion_sense;
+ if (mov_l > 256)
+ mov_l = 256;
+
+ /* Weighted sum on clipped weave pixel and average */
+ out_l = (out_l * (256 - mov_l) + avg_sc_l * mov_l) / 256;
+
+ Dest[1] = out_l;
+ Dest[0] = out_c;
+
+ Dest += 2;
+ L1 += 2;
+ L2 += 2;
+ L3 += 2;
+ L2P += 2;
+ }
+}
+
+static void
+greedyh_scanline_C_planar_y (GstDeinterlaceMethodGreedyH * self,
+ const guint8 * L1, const guint8 * L2, const guint8 * L3, const guint8 * L2P,
+ guint8 * Dest, gint width)
+{
+ gint Pos;
+ guint8 l1, l1_1, l3, l3_1;
+ guint8 avg, avg_1;
+ guint8 avg__1 = 0;
+ guint8 avg_s;
+ guint8 avg_sc;
+ guint8 best;
+ guint16 mov;
+ guint8 out;
+ guint8 l2, lp2;
+ guint8 l2_diff, lp2_diff;
+ guint8 min, max;
+ guint max_comb = self->max_comb;
+ guint motion_sense = self->motion_sense;
+ guint motion_threshold = self->motion_threshold;
+
+ for (Pos = 0; Pos < width; Pos++) {
+ l1 = L1[0];
+ l3 = L3[0];
+
+ if (Pos == width - 1) {
+ l1_1 = l1;
+ l3_1 = l3;
+ } else {
+ l1_1 = L1[1];
+ l3_1 = L3[1];
+ }
+
+ /* Average of L1 and L3 */
+ avg = (l1 + l3) / 2;
+
+ if (Pos == 0) {
+ avg__1 = avg;
+ }
+
+ /* Average of next L1 and next L3 */
+ avg_1 = (l1_1 + l3_1) / 2;
+
+ /* Calculate average of one pixel forward and previous */
+ avg_s = (avg__1 + avg_1) / 2;
+
+ /* Calculate average of center and surrounding pixels */
+ avg_sc = (avg + avg_s) / 2;
+
+ /* move forward */
+ avg__1 = avg;
+
+ /* Get best L2/L2P, i.e. least diff from above average */
+ l2 = L2[0];
+ lp2 = L2P[0];
+
+ l2_diff = ABS (l2 - avg_sc);
+
+ lp2_diff = ABS (lp2 - avg_sc);
+
+ if (l2_diff > lp2_diff)
+ best = lp2;
+ else
+ best = l2;
+
+ /* Clip this best L2/L2P by L1/L3 and allow to differ by GreedyMaxComb */
+ max = MAX (l1, l3);
+ min = MIN (l1, l3);
+
+ if (max < 256 - max_comb)
+ max += max_comb;
+ else
+ max = 255;
+
+ if (min > max_comb)
+ min -= max_comb;
+ else
+ min = 0;
+
+ out = CLAMP (best, min, max);
+
+ /* Do motion compensation for luma, i.e. how much
+ * the weave pixel differs */
+ mov = ABS (l2 - lp2);
+ if (mov > motion_threshold)
+ mov -= motion_threshold;
+ else
+ mov = 0;
+
+ mov = mov * motion_sense;
+ if (mov > 256)
+ mov = 256;
+
+ /* Weighted sum on clipped weave pixel and average */
+ out = (out * (256 - mov) + avg_sc * mov) / 256;
+
+ Dest[0] = out;
+
+ Dest += 1;
+ L1 += 1;
+ L2 += 1;
+ L3 += 1;
+ L2P += 1;
+ }
+}
+
+static void
+greedyh_scanline_C_planar_uv (GstDeinterlaceMethodGreedyH * self,
+ const guint8 * L1, const guint8 * L2, const guint8 * L3, const guint8 * L2P,
+ guint8 * Dest, gint width)
+{
+ gint Pos;
+ guint8 l1, l1_1, l3, l3_1;
+ guint8 avg, avg_1;
+ guint8 avg__1 = 0;
+ guint8 avg_s;
+ guint8 avg_sc;
+ guint8 best;
+ guint8 out;
+ guint8 l2, lp2;
+ guint8 l2_diff, lp2_diff;
+ guint8 min, max;
+ guint max_comb = self->max_comb;
+
+ for (Pos = 0; Pos < width; Pos++) {
+ l1 = L1[0];
+ l3 = L3[0];
+
+ if (Pos == width - 1) {
+ l1_1 = l1;
+ l3_1 = l3;
+ } else {
+ l1_1 = L1[1];
+ l3_1 = L3[1];
+ }
+
+ /* Average of L1 and L3 */
+ avg = (l1 + l3) / 2;
+
+ if (Pos == 0) {
+ avg__1 = avg;
+ }
+
+ /* Average of next L1 and next L3 */
+ avg_1 = (l1_1 + l3_1) / 2;
+
+ /* Calculate average of one pixel forward and previous */
+ avg_s = (avg__1 + avg_1) / 2;
+
+ /* Calculate average of center and surrounding pixels */
+ avg_sc = (avg + avg_s) / 2;
+
+ /* move forward */
+ avg__1 = avg;
+
+ /* Get best L2/L2P, i.e. least diff from above average */
+ l2 = L2[0];
+ lp2 = L2P[0];
+
+ l2_diff = ABS (l2 - avg_sc);
+
+ lp2_diff = ABS (lp2 - avg_sc);
+
+ if (l2_diff > lp2_diff)
+ best = lp2;
+ else
+ best = l2;
+
+ /* Clip this best L2/L2P by L1/L3 and allow to differ by GreedyMaxComb */
+ max = MAX (l1, l3);
+ min = MIN (l1, l3);
+
+ if (max < 256 - max_comb)
+ max += max_comb;
+ else
+ max = 255;
+
+ if (min > max_comb)
+ min -= max_comb;
+ else
+ min = 0;
+
+ out = CLAMP (best, min, max);
+
+ Dest[0] = out;
+
+ Dest += 1;
+ L1 += 1;
+ L2 += 1;
+ L3 += 1;
+ L2P += 1;
+ }
+}
+
+#ifdef BUILD_X86_ASM
+
+#define IS_MMXEXT
+#define SIMD_TYPE MMXEXT
+#define C_FUNCT_YUY2 greedyh_scanline_C_yuy2
+#define C_FUNCT_UYVY greedyh_scanline_C_uyvy
+#define C_FUNCT_PLANAR_Y greedyh_scanline_C_planar_y
+#define C_FUNCT_PLANAR_UV greedyh_scanline_C_planar_uv
+#define FUNCT_NAME_YUY2 greedyh_scanline_MMXEXT_yuy2
+#define FUNCT_NAME_UYVY greedyh_scanline_MMXEXT_uyvy
+#define FUNCT_NAME_PLANAR_Y greedyh_scanline_MMXEXT_planar_y
+#define FUNCT_NAME_PLANAR_UV greedyh_scanline_MMXEXT_planar_uv
+#include "greedyh.asm"
+#undef SIMD_TYPE
+#undef IS_MMXEXT
+#undef FUNCT_NAME_YUY2
+#undef FUNCT_NAME_UYVY
+#undef FUNCT_NAME_PLANAR_Y
+#undef FUNCT_NAME_PLANAR_UV
+
+#define IS_3DNOW
+#define SIMD_TYPE 3DNOW
+#define FUNCT_NAME_YUY2 greedyh_scanline_3DNOW_yuy2
+#define FUNCT_NAME_UYVY greedyh_scanline_3DNOW_uyvy
+#define FUNCT_NAME_PLANAR_Y greedyh_scanline_3DNOW_planar_y
+#define FUNCT_NAME_PLANAR_UV greedyh_scanline_3DNOW_planar_uv
+#include "greedyh.asm"
+#undef SIMD_TYPE
+#undef IS_3DNOW
+#undef FUNCT_NAME_YUY2
+#undef FUNCT_NAME_UYVY
+#undef FUNCT_NAME_PLANAR_Y
+#undef FUNCT_NAME_PLANAR_UV
+
+#define IS_MMX
+#define SIMD_TYPE MMX
+#define FUNCT_NAME_YUY2 greedyh_scanline_MMX_yuy2
+#define FUNCT_NAME_UYVY greedyh_scanline_MMX_uyvy
+#define FUNCT_NAME_PLANAR_Y greedyh_scanline_MMX_planar_y
+#define FUNCT_NAME_PLANAR_UV greedyh_scanline_MMX_planar_uv
+#include "greedyh.asm"
+#undef SIMD_TYPE
+#undef IS_MMX
+#undef FUNCT_NAME_YUY2
+#undef FUNCT_NAME_UYVY
+#undef FUNCT_NAME_PLANAR_Y
+#undef FUNCT_NAME_PLANAR_UV
+#undef C_FUNCT_YUY2
+#undef C_FUNCT_PLANAR_Y
+#undef C_FUNCT_PLANAR_UV
+
+#endif
+
+static void
+deinterlace_frame_di_greedyh_packed (GstDeinterlaceMethod * method,
+ const GstDeinterlaceField * history, guint history_count,
+ GstVideoFrame * outframe, int cur_field_idx)
+{
+ GstDeinterlaceMethodGreedyH *self = GST_DEINTERLACE_METHOD_GREEDY_H (method);
+ GstDeinterlaceMethodGreedyHClass *klass =
+ GST_DEINTERLACE_METHOD_GREEDY_H_GET_CLASS (self);
+ gint InfoIsOdd = 0;
+ gint Line;
+ gint RowStride = GST_VIDEO_FRAME_COMP_STRIDE (outframe, 0);
+ gint FieldHeight = GST_VIDEO_FRAME_HEIGHT (outframe) / 2;
+ gint Pitch = RowStride * 2;
+ const guint8 *L1; // ptr to Line1, of 3
+ const guint8 *L2; // ptr to Line2, the weave line
+ const guint8 *L3; // ptr to Line3
+ const guint8 *L2P; // ptr to prev Line2
+ guint8 *Dest = GST_VIDEO_FRAME_COMP_DATA (outframe, 0);
+ ScanlineFunction scanline;
+
+ if (cur_field_idx + 2 > history_count || cur_field_idx < 1) {
+ GstDeinterlaceMethod *backup_method;
+
+ backup_method = g_object_new (gst_deinterlace_method_linear_get_type (),
+ NULL);
+
+ gst_deinterlace_method_setup (backup_method, method->vinfo);
+ gst_deinterlace_method_deinterlace_frame (backup_method,
+ history, history_count, outframe, cur_field_idx);
+
+ g_object_unref (backup_method);
+ return;
+ }
+
+ cur_field_idx += 2;
+
+ switch (GST_VIDEO_INFO_FORMAT (method->vinfo)) {
+ case GST_VIDEO_FORMAT_YUY2:
+ case GST_VIDEO_FORMAT_YVYU:
+ scanline = klass->scanline_yuy2;
+ break;
+ case GST_VIDEO_FORMAT_UYVY:
+ scanline = klass->scanline_uyvy;
+ break;
+ case GST_VIDEO_FORMAT_AYUV:
+ scanline = klass->scanline_ayuv;
+ break;
+ default:
+ g_assert_not_reached ();
+ return;
+ }
+
+ // copy first even line no matter what, and the first odd line if we're
+ // processing an EVEN field. (note diff from other deint rtns.)
+
+ if (history[cur_field_idx - 1].flags == PICTURE_INTERLACED_BOTTOM) {
+ InfoIsOdd = 1;
+
+ L1 = GST_VIDEO_FRAME_COMP_DATA (history[cur_field_idx - 2].frame, 0);
+ if (history[cur_field_idx - 2].flags & PICTURE_INTERLACED_BOTTOM)
+ L1 += RowStride;
+
+ L2 = GST_VIDEO_FRAME_COMP_DATA (history[cur_field_idx - 1].frame, 0);
+ if (history[cur_field_idx - 1].flags & PICTURE_INTERLACED_BOTTOM)
+ L2 += RowStride;
+
+ L3 = L1 + Pitch;
+ L2P = GST_VIDEO_FRAME_COMP_DATA (history[cur_field_idx - 3].frame, 0);
+ if (history[cur_field_idx - 3].flags & PICTURE_INTERLACED_BOTTOM)
+ L2P += RowStride;
+
+ // copy first even line
+ memcpy (Dest, L1, RowStride);
+ Dest += RowStride;
+ } else {
+ InfoIsOdd = 0;
+ L1 = GST_VIDEO_FRAME_COMP_DATA (history[cur_field_idx - 2].frame, 0);
+ if (history[cur_field_idx - 2].flags & PICTURE_INTERLACED_BOTTOM)
+ L1 += RowStride;
+
+ L2 = (guint8 *) GST_VIDEO_FRAME_COMP_DATA (history[cur_field_idx -
+ 1].frame, 0) + Pitch;
+ if (history[cur_field_idx - 1].flags & PICTURE_INTERLACED_BOTTOM)
+ L2 += RowStride;
+
+ L3 = L1 + Pitch;
+ L2P =
+ (guint8 *) GST_VIDEO_FRAME_COMP_DATA (history[cur_field_idx - 3].frame,
+ 0) + Pitch;
+ if (history[cur_field_idx - 3].flags & PICTURE_INTERLACED_BOTTOM)
+ L2P += RowStride;
+
+ // copy first even line
+ memcpy (Dest, L1, RowStride);
+ Dest += RowStride;
+ // then first odd line
+ memcpy (Dest, L1, RowStride);
+ Dest += RowStride;
+ }
+
+ for (Line = 0; Line < (FieldHeight - 1); ++Line) {
+ scanline (self, L1, L2, L3, L2P, Dest, RowStride);
+ Dest += RowStride;
+ memcpy (Dest, L3, RowStride);
+ Dest += RowStride;
+
+ L1 += Pitch;
+ L2 += Pitch;
+ L3 += Pitch;
+ L2P += Pitch;
+ }
+
+ if (InfoIsOdd) {
+ memcpy (Dest, L2, RowStride);
+ }
+}
+
+static void
+deinterlace_frame_di_greedyh_planar_plane (GstDeinterlaceMethodGreedyH * self,
+ const guint8 * L1, const guint8 * L2, const guint8 * L3, const guint8 * L2P,
+ guint8 * Dest, gint RowStride, gint FieldHeight, gint Pitch, gint InfoIsOdd,
+ ScanlineFunction scanline)
+{
+ gint Line;
+
+ // copy first even line no matter what, and the first odd line if we're
+ // processing an EVEN field. (note diff from other deint rtns.)
+
+ if (InfoIsOdd) {
+ // copy first even line
+ memcpy (Dest, L1, RowStride);
+ Dest += RowStride;
+ } else {
+ // copy first even line
+ memcpy (Dest, L1, RowStride);
+ Dest += RowStride;
+ // then first odd line
+ memcpy (Dest, L1, RowStride);
+ Dest += RowStride;
+ }
+
+ for (Line = 0; Line < (FieldHeight - 1); ++Line) {
+ scanline (self, L1, L2, L3, L2P, Dest, RowStride);
+ Dest += RowStride;
+ memcpy (Dest, L3, RowStride);
+ Dest += RowStride;
+
+ L1 += Pitch;
+ L2 += Pitch;
+ L3 += Pitch;
+ L2P += Pitch;
+ }
+
+ if (InfoIsOdd) {
+ memcpy (Dest, L2, RowStride);
+ }
+}
+
+static void
+deinterlace_frame_di_greedyh_planar (GstDeinterlaceMethod * method,
+ const GstDeinterlaceField * history, guint history_count,
+ GstVideoFrame * outframe, int cur_field_idx)
+{
+ GstDeinterlaceMethodGreedyH *self = GST_DEINTERLACE_METHOD_GREEDY_H (method);
+ GstDeinterlaceMethodGreedyHClass *klass =
+ GST_DEINTERLACE_METHOD_GREEDY_H_GET_CLASS (self);
+ gint InfoIsOdd;
+ gint RowStride;
+ gint FieldHeight;
+ gint Pitch;
+ const guint8 *L1; // ptr to Line1, of 3
+ const guint8 *L2; // ptr to Line2, the weave line
+ const guint8 *L3; // ptr to Line3
+ const guint8 *L2P; // ptr to prev Line2
+ guint8 *Dest;
+ gint i;
+ ScanlineFunction scanline;
+
+ if (cur_field_idx + 2 > history_count || cur_field_idx < 1) {
+ GstDeinterlaceMethod *backup_method;
+
+ backup_method = g_object_new (gst_deinterlace_method_linear_get_type (),
+ NULL);
+
+ gst_deinterlace_method_setup (backup_method, method->vinfo);
+ gst_deinterlace_method_deinterlace_frame (backup_method,
+ history, history_count, outframe, cur_field_idx);
+
+ g_object_unref (backup_method);
+ return;
+ }
+
+ cur_field_idx += 2;
+
+ for (i = 0; i < 3; i++) {
+ InfoIsOdd = (history[cur_field_idx - 1].flags == PICTURE_INTERLACED_BOTTOM);
+ RowStride = GST_VIDEO_FRAME_COMP_STRIDE (outframe, i);
+ FieldHeight = GST_VIDEO_FRAME_COMP_HEIGHT (outframe, i) / 2;
+ Pitch = RowStride * 2;
+
+ if (i == 0)
+ scanline = klass->scanline_planar_y;
+ else
+ scanline = klass->scanline_planar_uv;
+
+ Dest = GST_VIDEO_FRAME_COMP_DATA (outframe, i);
+
+ L1 = GST_VIDEO_FRAME_COMP_DATA (history[cur_field_idx - 2].frame, i);
+ if (history[cur_field_idx - 2].flags & PICTURE_INTERLACED_BOTTOM)
+ L1 += RowStride;
+
+ L2 = GST_VIDEO_FRAME_COMP_DATA (history[cur_field_idx - 1].frame, i);
+ if (history[cur_field_idx - 1].flags & PICTURE_INTERLACED_BOTTOM)
+ L2 += RowStride;
+
+ L3 = L1 + Pitch;
+ L2P = GST_VIDEO_FRAME_COMP_DATA (history[cur_field_idx - 3].frame, i);
+ if (history[cur_field_idx - 3].flags & PICTURE_INTERLACED_BOTTOM)
+ L2P += RowStride;
+
+ deinterlace_frame_di_greedyh_planar_plane (self, L1, L2, L3, L2P, Dest,
+ RowStride, FieldHeight, Pitch, InfoIsOdd, scanline);
+ }
+}
+
+G_DEFINE_TYPE (GstDeinterlaceMethodGreedyH, gst_deinterlace_method_greedy_h,
+ GST_TYPE_DEINTERLACE_METHOD);
+
+enum
+{
+ PROP_0,
+ PROP_MAX_COMB,
+ PROP_MOTION_THRESHOLD,
+ PROP_MOTION_SENSE
+};
+
+static void
+gst_deinterlace_method_greedy_h_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstDeinterlaceMethodGreedyH *self = GST_DEINTERLACE_METHOD_GREEDY_H (object);
+
+ switch (prop_id) {
+ case PROP_MAX_COMB:
+ self->max_comb = g_value_get_uint (value);
+ break;
+ case PROP_MOTION_THRESHOLD:
+ self->motion_threshold = g_value_get_uint (value);
+ break;
+ case PROP_MOTION_SENSE:
+ self->motion_sense = g_value_get_uint (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static void
+gst_deinterlace_method_greedy_h_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstDeinterlaceMethodGreedyH *self = GST_DEINTERLACE_METHOD_GREEDY_H (object);
+
+ switch (prop_id) {
+ case PROP_MAX_COMB:
+ g_value_set_uint (value, self->max_comb);
+ break;
+ case PROP_MOTION_THRESHOLD:
+ g_value_set_uint (value, self->motion_threshold);
+ break;
+ case PROP_MOTION_SENSE:
+ g_value_set_uint (value, self->motion_sense);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static void
+gst_deinterlace_method_greedy_h_class_init (GstDeinterlaceMethodGreedyHClass *
+ klass)
+{
+ GstDeinterlaceMethodClass *dim_class = (GstDeinterlaceMethodClass *) klass;
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+#ifdef BUILD_X86_ASM
+ guint cpu_flags =
+ orc_target_get_default_flags (orc_target_get_by_name ("mmx"));
+#endif
+
+ gobject_class->set_property = gst_deinterlace_method_greedy_h_set_property;
+ gobject_class->get_property = gst_deinterlace_method_greedy_h_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_MAX_COMB,
+ g_param_spec_uint ("max-comb",
+ "Max comb",
+ "Max Comb", 0, 255, 5, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ g_object_class_install_property (gobject_class, PROP_MOTION_THRESHOLD,
+ g_param_spec_uint ("motion-threshold",
+ "Motion Threshold",
+ "Motion Threshold",
+ 0, 255, 25, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ g_object_class_install_property (gobject_class, PROP_MOTION_SENSE,
+ g_param_spec_uint ("motion-sense",
+ "Motion Sense",
+ "Motion Sense",
+ 0, 255, 30, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ dim_class->fields_required = 4;
+ dim_class->name = "Motion Adaptive: Advanced Detection";
+ dim_class->nick = "greedyh";
+ dim_class->latency = 1;
+
+ dim_class->deinterlace_frame_yuy2 = deinterlace_frame_di_greedyh_packed;
+ dim_class->deinterlace_frame_yvyu = deinterlace_frame_di_greedyh_packed;
+ dim_class->deinterlace_frame_uyvy = deinterlace_frame_di_greedyh_packed;
+ dim_class->deinterlace_frame_ayuv = deinterlace_frame_di_greedyh_packed;
+ dim_class->deinterlace_frame_y444 = deinterlace_frame_di_greedyh_planar;
+ dim_class->deinterlace_frame_i420 = deinterlace_frame_di_greedyh_planar;
+ dim_class->deinterlace_frame_yv12 = deinterlace_frame_di_greedyh_planar;
+ dim_class->deinterlace_frame_y42b = deinterlace_frame_di_greedyh_planar;
+ dim_class->deinterlace_frame_y41b = deinterlace_frame_di_greedyh_planar;
+
+#ifdef BUILD_X86_ASM
+ if (cpu_flags & ORC_TARGET_MMX_MMXEXT) {
+ klass->scanline_yuy2 = greedyh_scanline_MMXEXT_yuy2;
+ klass->scanline_uyvy = greedyh_scanline_MMXEXT_uyvy;
+ } else if (cpu_flags & ORC_TARGET_MMX_3DNOW) {
+ klass->scanline_yuy2 = greedyh_scanline_3DNOW_yuy2;
+ klass->scanline_uyvy = greedyh_scanline_3DNOW_uyvy;
+ } else if (cpu_flags & ORC_TARGET_MMX_MMX) {
+ klass->scanline_yuy2 = greedyh_scanline_MMX_yuy2;
+ klass->scanline_uyvy = greedyh_scanline_MMX_uyvy;
+ } else {
+ klass->scanline_yuy2 = greedyh_scanline_C_yuy2;
+ klass->scanline_uyvy = greedyh_scanline_C_uyvy;
+ }
+#else
+ klass->scanline_yuy2 = greedyh_scanline_C_yuy2;
+ klass->scanline_uyvy = greedyh_scanline_C_uyvy;
+#endif
+ /* TODO: MMX implementation of these two */
+ klass->scanline_ayuv = greedyh_scanline_C_ayuv;
+ klass->scanline_planar_y = greedyh_scanline_C_planar_y;
+ klass->scanline_planar_uv = greedyh_scanline_C_planar_uv;
+}
+
+static void
+gst_deinterlace_method_greedy_h_init (GstDeinterlaceMethodGreedyH * self)
+{
+ self->max_comb = 5;
+ self->motion_threshold = 25;
+ self->motion_sense = 30;
+}
diff --git a/gst/deinterlace/tvtime/greedyhmacros.h b/gst/deinterlace/tvtime/greedyhmacros.h
new file mode 100644
index 0000000000..cfeaff53b1
--- /dev/null
+++ b/gst/deinterlace/tvtime/greedyhmacros.h
@@ -0,0 +1,83 @@
+/*
+ * GStreamer
+ * Copyright (c) 2001 Tom Barry All rights reserved.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * Relicensed for GStreamer from GPL to LGPL with permit from Tom Barry.
+ * See: http://bugzilla.gnome.org/show_bug.cgi?id=163578
+ */
+
+// Define a few macros for CPU dependent instructions.
+// I suspect I don't really understand how the C macro preprocessor works but
+// this seems to get the job done. // TRB 7/01
+
+// BEFORE USING THESE YOU MUST SET:
+
+// #define SIMD_TYPE MMXEXT (or MMX or 3DNOW)
+
+// some macros for pavgb instruction
+// V_PAVGB(mmr1, mmr2, mmr work register, smask) mmr2 may = mmrw if you can trash it
+
+#define V_PAVGB_MMX(mmr1, mmr2, mmrw, smask) \
+ "movq "mmr2", "mmrw"\n\t" \
+ "pand "smask", "mmrw"\n\t" \
+ "psrlw $1, "mmrw"\n\t" \
+ "pand "smask", "mmr1"\n\t" \
+ "psrlw $1, "mmr1"\n\t" \
+ "paddusb "mmrw", "mmr1"\n\t"
+#define V_PAVGB_MMXEXT(mmr1, mmr2, mmrw, smask) "pavgb "mmr2", "mmr1"\n\t"
+#define V_PAVGB_3DNOW(mmr1, mmr2, mmrw, smask) "pavgusb "mmr2", "mmr1"\n\t"
+#define V_PAVGB(mmr1, mmr2, mmrw, smask) V_PAVGB2(mmr1, mmr2, mmrw, smask, SIMD_TYPE)
+#define V_PAVGB2(mmr1, mmr2, mmrw, smask, simd_type) V_PAVGB3(mmr1, mmr2, mmrw, smask, simd_type)
+#define V_PAVGB3(mmr1, mmr2, mmrw, smask, simd_type) V_PAVGB_##simd_type(mmr1, mmr2, mmrw, smask)
+
+// some macros for pmaxub instruction
+#define V_PMAXUB_MMX(mmr1, mmr2) \
+ "psubusb "mmr2", "mmr1"\n\t" \
+ "paddusb "mmr2", "mmr1"\n\t"
+#define V_PMAXUB_MMXEXT(mmr1, mmr2) "pmaxub "mmr2", "mmr1"\n\t"
+#define V_PMAXUB_3DNOW(mmr1, mmr2) V_PMAXUB_MMX(mmr1, mmr2) // use MMX version
+#define V_PMAXUB(mmr1, mmr2) V_PMAXUB2(mmr1, mmr2, SIMD_TYPE)
+#define V_PMAXUB2(mmr1, mmr2, simd_type) V_PMAXUB3(mmr1, mmr2, simd_type)
+#define V_PMAXUB3(mmr1, mmr2, simd_type) V_PMAXUB_##simd_type(mmr1, mmr2)
+
+// some macros for pminub instruction
+// V_PMINUB(mmr1, mmr2, mmr work register) mmr2 may NOT = mmrw
+#define V_PMINUB_MMX(mmr1, mmr2, mmrw) \
+ "pcmpeqb "mmrw", "mmrw"\n\t" \
+ "psubusb "mmr2", "mmrw"\n\t" \
+ "paddusb "mmrw", "mmr1"\n\t" \
+ "psubusb "mmrw", "mmr1"\n\t"
+#define V_PMINUB_MMXEXT(mmr1, mmr2, mmrw) "pminub "mmr2", "mmr1"\n\t"
+#define V_PMINUB_3DNOW(mmr1, mmr2, mmrw) V_PMINUB_MMX(mmr1, mmr2, mmrw) // use MMX version
+#define V_PMINUB(mmr1, mmr2, mmrw) V_PMINUB2(mmr1, mmr2, mmrw, SIMD_TYPE)
+#define V_PMINUB2(mmr1, mmr2, mmrw, simd_type) V_PMINUB3(mmr1, mmr2, mmrw, simd_type)
+#define V_PMINUB3(mmr1, mmr2, mmrw, simd_type) V_PMINUB_##simd_type(mmr1, mmr2, mmrw)
+
+// some macros for movntq instruction
+// V_MOVNTQ(mmr1, mmr2)
+#define V_MOVNTQ_MMX(mmr1, mmr2) "movq "mmr2", "mmr1"\n\t"
+#define V_MOVNTQ_3DNOW(mmr1, mmr2) "movq "mmr2", "mmr1"\n\t"
+#define V_MOVNTQ_MMXEXT(mmr1, mmr2) "movntq "mmr2", "mmr1"\n\t"
+#define V_MOVNTQ(mmr1, mmr2) V_MOVNTQ2(mmr1, mmr2, SIMD_TYPE)
+#define V_MOVNTQ2(mmr1, mmr2, simd_type) V_MOVNTQ3(mmr1, mmr2, simd_type)
+#define V_MOVNTQ3(mmr1, mmr2, simd_type) V_MOVNTQ_##simd_type(mmr1, mmr2)
+
+// end of macros
+
diff --git a/gst/deinterlace/tvtime/linear.c b/gst/deinterlace/tvtime/linear.c
new file mode 100644
index 0000000000..9c45353e0b
--- /dev/null
+++ b/gst/deinterlace/tvtime/linear.c
@@ -0,0 +1,125 @@
+/*
+ * Copyright (C) 2002 Billy Biggs <vektor@dumbterm.net>.
+ * Copyright (C) 2008,2010 Sebastian Dröge <slomo@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * Relicensed for GStreamer from GPL to LGPL with permit from Billy Biggs.
+ * See: http://bugzilla.gnome.org/show_bug.cgi?id=163578
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include "gstdeinterlacemethod.h"
+#include <string.h>
+#ifdef HAVE_ORC
+#include <orc/orc.h>
+#endif
+#include "tvtime.h"
+
+#define GST_TYPE_DEINTERLACE_METHOD_LINEAR (gst_deinterlace_method_linear_get_type ())
+#define GST_IS_DEINTERLACE_METHOD_LINEAR(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_DEINTERLACE_METHOD_LINEAR))
+#define GST_IS_DEINTERLACE_METHOD_LINEAR_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_DEINTERLACE_METHOD_LINEAR))
+#define GST_DEINTERLACE_METHOD_LINEAR_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_DEINTERLACE_METHOD_LINEAR, GstDeinterlaceMethodLinearClass))
+#define GST_DEINTERLACE_METHOD_LINEAR(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_DEINTERLACE_METHOD_LINEAR, GstDeinterlaceMethodLinear))
+#define GST_DEINTERLACE_METHOD_LINEAR_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_DEINTERLACE_METHOD_LINEAR, GstDeinterlaceMethodLinearClass))
+#define GST_DEINTERLACE_METHOD_LINEAR_CAST(obj) ((GstDeinterlaceMethodLinear*)(obj))
+
+GType gst_deinterlace_method_linear_get_type (void);
+
+typedef GstDeinterlaceSimpleMethod GstDeinterlaceMethodLinear;
+typedef GstDeinterlaceSimpleMethodClass GstDeinterlaceMethodLinearClass;
+
+static void
+deinterlace_scanline_linear_c (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const guint8 * s1, const guint8 * s2, gint size)
+{
+ deinterlace_line_linear (out, s1, s2, size);
+}
+
+static void
+deinterlace_scanline_linear_packed_c (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ deinterlace_scanline_linear_c (self, out, scanlines->t0, scanlines->b0, size);
+}
+
+static void
+deinterlace_scanline_linear_planar_y_c (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ deinterlace_scanline_linear_c (self, out, scanlines->t0, scanlines->b0, size);
+}
+
+static void
+deinterlace_scanline_linear_planar_u_c (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ deinterlace_scanline_linear_c (self, out, scanlines->t0, scanlines->b0, size);
+}
+
+static void
+deinterlace_scanline_linear_planar_v_c (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ deinterlace_scanline_linear_c (self, out, scanlines->t0, scanlines->b0, size);
+}
+
+G_DEFINE_TYPE (GstDeinterlaceMethodLinear, gst_deinterlace_method_linear,
+ GST_TYPE_DEINTERLACE_SIMPLE_METHOD);
+
+static void
+gst_deinterlace_method_linear_class_init (GstDeinterlaceMethodLinearClass *
+ klass)
+{
+ GstDeinterlaceMethodClass *dim_class = (GstDeinterlaceMethodClass *) klass;
+ GstDeinterlaceSimpleMethodClass *dism_class =
+ (GstDeinterlaceSimpleMethodClass *) klass;
+
+ dim_class->fields_required = 1;
+ dim_class->name = "Television: Full resolution";
+ dim_class->nick = "linear";
+ dim_class->latency = 0;
+
+ dism_class->interpolate_scanline_yuy2 = deinterlace_scanline_linear_packed_c;
+ dism_class->interpolate_scanline_yvyu = deinterlace_scanline_linear_packed_c;
+ dism_class->interpolate_scanline_uyvy = deinterlace_scanline_linear_packed_c;
+ dism_class->interpolate_scanline_ayuv = deinterlace_scanline_linear_packed_c;
+ dism_class->interpolate_scanline_argb = deinterlace_scanline_linear_packed_c;
+ dism_class->interpolate_scanline_abgr = deinterlace_scanline_linear_packed_c;
+ dism_class->interpolate_scanline_rgba = deinterlace_scanline_linear_packed_c;
+ dism_class->interpolate_scanline_bgra = deinterlace_scanline_linear_packed_c;
+ dism_class->interpolate_scanline_rgb = deinterlace_scanline_linear_packed_c;
+ dism_class->interpolate_scanline_bgr = deinterlace_scanline_linear_packed_c;
+ dism_class->interpolate_scanline_nv12 = deinterlace_scanline_linear_packed_c;
+ dism_class->interpolate_scanline_nv21 = deinterlace_scanline_linear_packed_c;
+ dism_class->interpolate_scanline_planar_y =
+ deinterlace_scanline_linear_planar_y_c;
+ dism_class->interpolate_scanline_planar_u =
+ deinterlace_scanline_linear_planar_u_c;
+ dism_class->interpolate_scanline_planar_v =
+ deinterlace_scanline_linear_planar_v_c;
+
+}
+
+static void
+gst_deinterlace_method_linear_init (GstDeinterlaceMethodLinear * self)
+{
+}
diff --git a/gst/deinterlace/tvtime/linearblend.c b/gst/deinterlace/tvtime/linearblend.c
new file mode 100644
index 0000000000..c0e4a0b535
--- /dev/null
+++ b/gst/deinterlace/tvtime/linearblend.c
@@ -0,0 +1,217 @@
+/*
+ * Linear blend deinterlacing plugin. The idea for this algorithm came
+ * from the linear blend deinterlacer which originated in the mplayer
+ * sources.
+ *
+ * Copyright (C) 2002 Billy Biggs <vektor@dumbterm.net>.
+ * Copyright (C) 2008,2010 Sebastian Dröge <slomo@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * Relicensed for GStreamer from GPL to LGPL with permit from Billy Biggs.
+ * See: http://bugzilla.gnome.org/show_bug.cgi?id=163578
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include "gstdeinterlacemethod.h"
+#include <string.h>
+#ifdef HAVE_ORC
+#include <orc/orc.h>
+#endif
+#include "tvtime.h"
+
+#define GST_TYPE_DEINTERLACE_METHOD_LINEAR_BLEND (gst_deinterlace_method_linear_blend_get_type ())
+#define GST_IS_DEINTERLACE_METHOD_LINEAR_BLEND(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_DEINTERLACE_METHOD_LINEAR_BLEND))
+#define GST_IS_DEINTERLACE_METHOD_LINEAR_BLEND_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_DEINTERLACE_METHOD_LINEAR_BLEND))
+#define GST_DEINTERLACE_METHOD_LINEAR_BLEND_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_DEINTERLACE_METHOD_LINEAR_BLEND, GstDeinterlaceMethodLinearBlendClass))
+#define GST_DEINTERLACE_METHOD_LINEAR_BLEND(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_DEINTERLACE_METHOD_LINEAR_BLEND, GstDeinterlaceMethodLinearBlend))
+#define GST_DEINTERLACE_METHOD_LINEAR_BLEND_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_DEINTERLACE_METHOD_LINEAR_BLEND, GstDeinterlaceMethodLinearBlendClass))
+#define GST_DEINTERLACE_METHOD_LINEAR_BLEND_CAST(obj) ((GstDeinterlaceMethodLinearBlend*)(obj))
+
+GType gst_deinterlace_method_linear_blend_get_type (void);
+
+typedef GstDeinterlaceSimpleMethod GstDeinterlaceMethodLinearBlend;
+typedef GstDeinterlaceSimpleMethodClass GstDeinterlaceMethodLinearBlendClass;
+
+static inline void
+deinterlace_scanline_linear_blend_c (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const guint8 * t0, const guint8 * b0, const guint8 * m1,
+ gint size)
+{
+ if (m1 == NULL) {
+ deinterlace_line_linear (out, t0, b0, size);
+ } else {
+ deinterlace_line_linear_blend (out, t0, b0, m1, size);
+ }
+}
+
+static void
+deinterlace_scanline_linear_blend_packed_c (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ deinterlace_scanline_linear_blend_c (self, out, scanlines->t0, scanlines->b0,
+ scanlines->m1, size);
+}
+
+static void
+deinterlace_scanline_linear_blend_planar_y_c (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ deinterlace_scanline_linear_blend_c (self, out, scanlines->t0, scanlines->b0,
+ scanlines->m1, size);
+}
+
+static void
+deinterlace_scanline_linear_blend_planar_u_c (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ deinterlace_scanline_linear_blend_c (self, out, scanlines->t0, scanlines->b0,
+ scanlines->m1, size);
+}
+
+static void
+deinterlace_scanline_linear_blend_planar_v_c (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ deinterlace_scanline_linear_blend_c (self, out, scanlines->t0, scanlines->b0,
+ scanlines->m1, size);
+}
+
+static inline void
+deinterlace_scanline_linear_blend2_c (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const guint8 * m0, const guint8 * t1, const guint8 * b1,
+ gint size)
+{
+ if (t1 == NULL) {
+ memcpy (out, m0, size);
+ } else {
+ deinterlace_line_linear_blend (out, t1, b1, m0, size);
+ }
+}
+
+static void
+deinterlace_scanline_linear_blend2_packed_c (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ deinterlace_scanline_linear_blend2_c (self, out, scanlines->m0, scanlines->t1,
+ scanlines->b1, size);
+}
+
+static void
+deinterlace_scanline_linear_blend2_planar_y_c (GstDeinterlaceSimpleMethod *
+ self, guint8 * out, const GstDeinterlaceScanlineData * scanlines,
+ guint size)
+{
+ deinterlace_scanline_linear_blend2_c (self, out, scanlines->m0, scanlines->t1,
+ scanlines->b1, size);
+}
+
+static void
+deinterlace_scanline_linear_blend2_planar_u_c (GstDeinterlaceSimpleMethod *
+ self, guint8 * out, const GstDeinterlaceScanlineData * scanlines,
+ guint size)
+{
+ deinterlace_scanline_linear_blend2_c (self, out, scanlines->m0, scanlines->t1,
+ scanlines->b1, size);
+}
+
+static void
+deinterlace_scanline_linear_blend2_planar_v_c (GstDeinterlaceSimpleMethod *
+ self, guint8 * out, const GstDeinterlaceScanlineData * scanlines,
+ guint size)
+{
+ deinterlace_scanline_linear_blend2_c (self, out, scanlines->m0, scanlines->t1,
+ scanlines->b1, size);
+}
+
+G_DEFINE_TYPE (GstDeinterlaceMethodLinearBlend,
+ gst_deinterlace_method_linear_blend, GST_TYPE_DEINTERLACE_SIMPLE_METHOD);
+
+static void
+ gst_deinterlace_method_linear_blend_class_init
+ (GstDeinterlaceMethodLinearBlendClass * klass)
+{
+ GstDeinterlaceMethodClass *dim_class = (GstDeinterlaceMethodClass *) klass;
+ GstDeinterlaceSimpleMethodClass *dism_class =
+ (GstDeinterlaceSimpleMethodClass *) klass;
+
+ dim_class->fields_required = 2;
+ dim_class->name = "Blur: Temporal";
+ dim_class->nick = "linearblend";
+ dim_class->latency = 1;
+
+ dism_class->interpolate_scanline_yuy2 =
+ deinterlace_scanline_linear_blend_packed_c;
+ dism_class->interpolate_scanline_yvyu =
+ deinterlace_scanline_linear_blend_packed_c;
+ dism_class->interpolate_scanline_uyvy =
+ deinterlace_scanline_linear_blend_packed_c;
+ dism_class->interpolate_scanline_ayuv =
+ deinterlace_scanline_linear_blend_packed_c;
+ dism_class->interpolate_scanline_argb =
+ deinterlace_scanline_linear_blend_packed_c;
+ dism_class->interpolate_scanline_rgba =
+ deinterlace_scanline_linear_blend_packed_c;
+ dism_class->interpolate_scanline_abgr =
+ deinterlace_scanline_linear_blend_packed_c;
+ dism_class->interpolate_scanline_bgra =
+ deinterlace_scanline_linear_blend_packed_c;
+ dism_class->interpolate_scanline_rgb =
+ deinterlace_scanline_linear_blend_packed_c;
+ dism_class->interpolate_scanline_bgr =
+ deinterlace_scanline_linear_blend_packed_c;
+ dism_class->interpolate_scanline_nv12 =
+ deinterlace_scanline_linear_blend_packed_c;
+ dism_class->interpolate_scanline_nv21 =
+ deinterlace_scanline_linear_blend_packed_c;
+
+ dism_class->interpolate_scanline_planar_y =
+ deinterlace_scanline_linear_blend_planar_y_c;
+ dism_class->interpolate_scanline_planar_u =
+ deinterlace_scanline_linear_blend_planar_u_c;
+ dism_class->interpolate_scanline_planar_v =
+ deinterlace_scanline_linear_blend_planar_v_c;
+
+ dism_class->copy_scanline_yuy2 = deinterlace_scanline_linear_blend2_packed_c;
+ dism_class->copy_scanline_yvyu = deinterlace_scanline_linear_blend2_packed_c;
+ dism_class->copy_scanline_uyvy = deinterlace_scanline_linear_blend2_packed_c;
+ dism_class->copy_scanline_ayuv = deinterlace_scanline_linear_blend2_packed_c;
+ dism_class->copy_scanline_argb = deinterlace_scanline_linear_blend2_packed_c;
+ dism_class->copy_scanline_abgr = deinterlace_scanline_linear_blend2_packed_c;
+ dism_class->copy_scanline_rgba = deinterlace_scanline_linear_blend2_packed_c;
+ dism_class->copy_scanline_bgra = deinterlace_scanline_linear_blend2_packed_c;
+ dism_class->copy_scanline_rgb = deinterlace_scanline_linear_blend2_packed_c;
+ dism_class->copy_scanline_bgr = deinterlace_scanline_linear_blend2_packed_c;
+
+ dism_class->copy_scanline_planar_y =
+ deinterlace_scanline_linear_blend2_planar_y_c;
+ dism_class->copy_scanline_planar_u =
+ deinterlace_scanline_linear_blend2_planar_u_c;
+ dism_class->copy_scanline_planar_v =
+ deinterlace_scanline_linear_blend2_planar_v_c;
+
+}
+
+static void
+gst_deinterlace_method_linear_blend_init (GstDeinterlaceMethodLinearBlend *
+ self)
+{
+}
diff --git a/gst/deinterlace/tvtime/mmx.h b/gst/deinterlace/tvtime/mmx.h
new file mode 100644
index 0000000000..b0e582b7cc
--- /dev/null
+++ b/gst/deinterlace/tvtime/mmx.h
@@ -0,0 +1,723 @@
+/* mmx.h
+
+ MultiMedia eXtensions GCC interface library for IA32.
+
+ To use this library, simply include this header file
+ and compile with GCC. You MUST have inlining enabled
+ in order for mmx_ok() to work; this can be done by
+ simply using -O on the GCC command line.
+
+ Compiling with -DMMX_TRACE will cause detailed trace
+ output to be sent to stderr for each mmx operation.
+ This adds lots of code, and obviously slows execution to
+ a crawl, but can be very useful for debugging.
+
+ THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY
+ EXPRESS OR IMPLIED WARRANTIES, INCLUDING, WITHOUT
+ LIMITATION, THE IMPLIED WARRANTIES OF MERCHANTABILITY
+ AND FITNESS FOR ANY PARTICULAR PURPOSE.
+
+ 1997-98 by H. Dietz and R. Fisher
+
+ History:
+ 97-98* R.Fisher Early versions
+ 980501 R.Fisher Original Release
+ 980611* H.Dietz Rewrite, correctly implementing inlines, and
+ R.Fisher including direct register accesses.
+ 980616 R.Fisher Release of 980611 as 980616.
+ 980714 R.Fisher Minor corrections to Makefile, etc.
+ 980715 R.Fisher mmx_ok() now prevents optimizer from using
+ clobbered values.
+ mmx_ok() now checks if cpuid instruction is
+ available before trying to use it.
+ 980726* R.Fisher mm_support() searches for AMD 3DNow, Cyrix
+ Extended MMX, and standard MMX. It returns a
+ value which is positive if any of these are
+ supported, and can be masked with constants to
+ see which. mmx_ok() is now a call to this
+ 980726* R.Fisher Added i2r support for shift functions
+ 980919 R.Fisher Fixed AMD extended feature recognition bug.
+ 980921 R.Fisher Added definition/check for _MMX_H.
+ Added "float s[2]" to mmx_t for use with
+ 3DNow and EMMX. So same mmx_t can be used.
+ 981013 R.Fisher Fixed cpuid function 1 bug (looked at wrong reg)
+ Fixed psllq_i2r error in mmxtest.c
+
+ * Unreleased (internal or interim) versions
+
+ Notes:
+ It appears that the latest gas has the pand problem fixed, therefore
+ I'll undefine BROKEN_PAND by default.
+ String compares may be quicker than the multiple test/jumps in vendor
+ test sequence in mmx_ok(), but I'm not concerned with that right now.
+
+ Acknowledgments:
+ Jussi Laako for pointing out the errors ultimately found to be
+ connected to the failure to notify the optimizer of clobbered values.
+ Roger Hardiman for reminding us that CPUID isn't everywhere, and that
+ someone may actually try to use this on a machine without CPUID.
+ Also for suggesting code for checking this.
+ Robert Dale for pointing out the AMD recognition bug.
+ Jimmy Mayfield and Carl Witty for pointing out the Intel recognition
+ bug.
+ Carl Witty for pointing out the psllq_i2r test bug.
+*/
+
+#ifndef _MMX_H
+#define _MMX_H
+
+/*#define MMX_TRACE */
+
+/* Warning: at this writing, the version of GAS packaged
+ with most Linux distributions does not handle the
+ parallel AND operation mnemonic correctly. If the
+ symbol BROKEN_PAND is defined, a slower alternative
+ coding will be used. If execution of mmxtest results
+ in an illegal instruction fault, define this symbol.
+*/
+#undef BROKEN_PAND
+
+
+/* The type of an value that fits in an MMX register
+ (note that long long constant values MUST be suffixed
+ by LL and unsigned long long values by ULL, lest
+ they be truncated by the compiler)
+*/
+typedef union {
+ long long q; /* Quadword (64-bit) value */
+ unsigned long long uq; /* Unsigned Quadword */
+ int d[2]; /* 2 Doubleword (32-bit) values */
+ unsigned int ud[2]; /* 2 Unsigned Doubleword */
+ short w[4]; /* 4 Word (16-bit) values */
+ unsigned short uw[4]; /* 4 Unsigned Word */
+ char b[8]; /* 8 Byte (8-bit) values */
+ unsigned char ub[8]; /* 8 Unsigned Byte */
+ float s[2]; /* Single-precision (32-bit) value */
+} mmx_t;
+
+
+/* Function to test if multimedia instructions are supported...
+*/
+static inline int
+mm_support(void)
+{
+ /* Returns 1 if MMX instructions are supported,
+ 3 if Cyrix MMX and Extended MMX instructions are supported
+ 5 if AMD MMX and 3DNow! instructions are supported
+ 0 if hardware does not support any of these
+ */
+ register int rval = 0;
+
+ __asm__ __volatile__ (
+ /* See if CPUID instruction is supported ... */
+ /* ... Get copies of EFLAGS into eax and ecx */
+ "pushf\n\t"
+ "popl %%eax\n\t"
+ "movl %%eax, %%ecx\n\t"
+
+ /* ... Toggle the ID bit in one copy and store */
+ /* to the EFLAGS reg */
+ "xorl $0x200000, %%eax\n\t"
+ "push %%eax\n\t"
+ "popf\n\t"
+
+ /* ... Get the (hopefully modified) EFLAGS */
+ "pushf\n\t"
+ "popl %%eax\n\t"
+
+ /* ... Compare and test result */
+ "xorl %%eax, %%ecx\n\t"
+ "testl $0x200000, %%ecx\n\t"
+ "jz NotSupported1\n\t" /* Nothing supported */
+
+
+ /* Get standard CPUID information, and
+ go to a specific vendor section */
+ "movl $0, %%eax\n\t"
+ "cpuid\n\t"
+
+ /* Check for Intel */
+ "cmpl $0x756e6547, %%ebx\n\t"
+ "jne TryAMD\n\t"
+ "cmpl $0x49656e69, %%edx\n\t"
+ "jne TryAMD\n\t"
+ "cmpl $0x6c65746e, %%ecx\n"
+ "jne TryAMD\n\t"
+ "jmp Intel\n\t"
+
+ /* Check for AMD */
+ "\nTryAMD:\n\t"
+ "cmpl $0x68747541, %%ebx\n\t"
+ "jne TryCyrix\n\t"
+ "cmpl $0x69746e65, %%edx\n\t"
+ "jne TryCyrix\n\t"
+ "cmpl $0x444d4163, %%ecx\n"
+ "jne TryCyrix\n\t"
+ "jmp AMD\n\t"
+
+ /* Check for Cyrix */
+ "\nTryCyrix:\n\t"
+ "cmpl $0x69727943, %%ebx\n\t"
+ "jne NotSupported2\n\t"
+ "cmpl $0x736e4978, %%edx\n\t"
+ "jne NotSupported3\n\t"
+ "cmpl $0x64616574, %%ecx\n\t"
+ "jne NotSupported4\n\t"
+ /* Drop through to Cyrix... */
+
+
+ /* Cyrix Section */
+ /* See if extended CPUID is supported */
+ "movl $0x80000000, %%eax\n\t"
+ "cpuid\n\t"
+ "cmpl $0x80000000, %%eax\n\t"
+ "jl MMXtest\n\t" /* Try standard CPUID instead */
+
+ /* Extended CPUID supported, so get extended features */
+ "movl $0x80000001, %%eax\n\t"
+ "cpuid\n\t"
+ "testl $0x00800000, %%eax\n\t" /* Test for MMX */
+ "jz NotSupported5\n\t" /* MMX not supported */
+ "testl $0x01000000, %%eax\n\t" /* Test for Ext'd MMX */
+ "jnz EMMXSupported\n\t"
+ "movl $1, %0\n\n\t" /* MMX Supported */
+ "jmp Return\n\n"
+ "EMMXSupported:\n\t"
+ "movl $3, %0\n\n\t" /* EMMX and MMX Supported */
+ "jmp Return\n\t"
+
+
+ /* AMD Section */
+ "AMD:\n\t"
+
+ /* See if extended CPUID is supported */
+ "movl $0x80000000, %%eax\n\t"
+ "cpuid\n\t"
+ "cmpl $0x80000000, %%eax\n\t"
+ "jl MMXtest\n\t" /* Try standard CPUID instead */
+
+ /* Extended CPUID supported, so get extended features */
+ "movl $0x80000001, %%eax\n\t"
+ "cpuid\n\t"
+ "testl $0x00800000, %%edx\n\t" /* Test for MMX */
+ "jz NotSupported6\n\t" /* MMX not supported */
+ "testl $0x80000000, %%edx\n\t" /* Test for 3DNow! */
+ "jnz ThreeDNowSupported\n\t"
+ "movl $1, %0\n\n\t" /* MMX Supported */
+ "jmp Return\n\n"
+ "ThreeDNowSupported:\n\t"
+ "movl $5, %0\n\n\t" /* 3DNow! and MMX Supported */
+ "jmp Return\n\t"
+
+
+ /* Intel Section */
+ "Intel:\n\t"
+
+ /* Check for MMX */
+ "MMXtest:\n\t"
+ "movl $1, %%eax\n\t"
+ "cpuid\n\t"
+ "testl $0x00800000, %%edx\n\t" /* Test for MMX */
+ "jz NotSupported7\n\t" /* MMX Not supported */
+ "movl $1, %0\n\n\t" /* MMX Supported */
+ "jmp Return\n\t"
+
+ /* Nothing supported */
+ "\nNotSupported1:\n\t"
+ "#movl $101, %0\n\n\t"
+ "\nNotSupported2:\n\t"
+ "#movl $102, %0\n\n\t"
+ "\nNotSupported3:\n\t"
+ "#movl $103, %0\n\n\t"
+ "\nNotSupported4:\n\t"
+ "#movl $104, %0\n\n\t"
+ "\nNotSupported5:\n\t"
+ "#movl $105, %0\n\n\t"
+ "\nNotSupported6:\n\t"
+ "#movl $106, %0\n\n\t"
+ "\nNotSupported7:\n\t"
+ "#movl $107, %0\n\n\t"
+ "movl $0, %0\n\n\t"
+
+ "Return:\n\t"
+ : "=a" (rval)
+ : /* no input */
+ : "eax", "ebx", "ecx", "edx"
+ );
+
+ /* Return */
+ return(rval);
+}
+
+/* Function to test if mmx instructions are supported...
+*/
+static inline int
+mmx_ok(void)
+{
+ /* Returns 1 if MMX instructions are supported, 0 otherwise */
+ return ( mm_support() & 0x1 );
+}
+
+
+/* Helper functions for the instruction macros that follow...
+ (note that memory-to-register, m2r, instructions are nearly
+ as efficient as register-to-register, r2r, instructions;
+ however, memory-to-memory instructions are really simulated
+ as a convenience, and are only 1/3 as efficient)
+*/
+#ifdef MMX_TRACE
+
+/* Include the stuff for printing a trace to stderr...
+*/
+
+#include <stdio.h>
+
+#define mmx_i2r(op, imm, reg) \
+ { \
+ mmx_t mmx_trace; \
+ mmx_trace = (imm); \
+ fprintf(stderr, #op "_i2r(" #imm "=0x%016llx, ", mmx_trace.q); \
+ __asm__ __volatile__ ("movq %%" #reg ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #reg "=0x%016llx) => ", mmx_trace.q); \
+ __asm__ __volatile__ (#op " %0, %%" #reg \
+ : /* nothing */ \
+ : "X" (imm)); \
+ __asm__ __volatile__ ("movq %%" #reg ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #reg "=0x%016llx\n", mmx_trace.q); \
+ }
+
+#define mmx_m2r(op, mem, reg) \
+ { \
+ mmx_t mmx_trace; \
+ mmx_trace = (mem); \
+ fprintf(stderr, #op "_m2r(" #mem "=0x%016llx, ", mmx_trace.q); \
+ __asm__ __volatile__ ("movq %%" #reg ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #reg "=0x%016llx) => ", mmx_trace.q); \
+ __asm__ __volatile__ (#op " %0, %%" #reg \
+ : /* nothing */ \
+ : "X" (mem)); \
+ __asm__ __volatile__ ("movq %%" #reg ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #reg "=0x%016llx\n", mmx_trace.q); \
+ }
+
+#define mmx_r2m(op, reg, mem) \
+ { \
+ mmx_t mmx_trace; \
+ __asm__ __volatile__ ("movq %%" #reg ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #op "_r2m(" #reg "=0x%016llx, ", mmx_trace.q); \
+ mmx_trace = (mem); \
+ fprintf(stderr, #mem "=0x%016llx) => ", mmx_trace.q); \
+ __asm__ __volatile__ (#op " %%" #reg ", %0" \
+ : "=X" (mem) \
+ : /* nothing */ ); \
+ mmx_trace = (mem); \
+ fprintf(stderr, #mem "=0x%016llx\n", mmx_trace.q); \
+ }
+
+#define mmx_r2r(op, regs, regd) \
+ { \
+ mmx_t mmx_trace; \
+ __asm__ __volatile__ ("movq %%" #regs ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #op "_r2r(" #regs "=0x%016llx, ", mmx_trace.q); \
+ __asm__ __volatile__ ("movq %%" #regd ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #regd "=0x%016llx) => ", mmx_trace.q); \
+ __asm__ __volatile__ (#op " %" #regs ", %" #regd); \
+ __asm__ __volatile__ ("movq %%" #regd ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #regd "=0x%016llx\n", mmx_trace.q); \
+ }
+
+#define mmx_m2m(op, mems, memd) \
+ { \
+ mmx_t mmx_trace; \
+ mmx_trace = (mems); \
+ fprintf(stderr, #op "_m2m(" #mems "=0x%016llx, ", mmx_trace.q); \
+ mmx_trace = (memd); \
+ fprintf(stderr, #memd "=0x%016llx) => ", mmx_trace.q); \
+ __asm__ __volatile__ ("movq %0, %%mm0\n\t" \
+ #op " %1, %%mm0\n\t" \
+ "movq %%mm0, %0" \
+ : "=X" (memd) \
+ : "X" (mems)); \
+ mmx_trace = (memd); \
+ fprintf(stderr, #memd "=0x%016llx\n", mmx_trace.q); \
+ }
+
+#else
+
+/* These macros are a lot simpler without the tracing...
+*/
+
+#define mmx_i2r(op, imm, reg) \
+ __asm__ __volatile__ (#op " $" #imm ", %%" #reg \
+ : /* nothing */ \
+ : /* nothing */);
+
+#define mmx_m2r(op, mem, reg) \
+ __asm__ __volatile__ (#op " %0, %%" #reg \
+ : /* nothing */ \
+ : "m" (mem))
+
+#define mmx_r2m(op, reg, mem) \
+ __asm__ __volatile__ (#op " %%" #reg ", %0" \
+ : "=m" (mem) \
+ : /* nothing */ )
+
+#define mmx_r2r(op, regs, regd) \
+ __asm__ __volatile__ (#op " %" #regs ", %" #regd)
+
+#define mmx_m2m(op, mems, memd) \
+ __asm__ __volatile__ ("movq %0, %%mm0\n\t" \
+ #op " %1, %%mm0\n\t" \
+ "movq %%mm0, %0" \
+ : "=m" (memd) \
+ : "m" (mems))
+
+#endif
+
+
+/* 1x64 MOVe Quadword
+ (this is both a load and a store...
+ in fact, it is the only way to store)
+*/
+#define movq_m2r(var, reg) mmx_m2r(movq, var, reg)
+#define movq_r2m(reg, var) mmx_r2m(movq, reg, var)
+#define movq_r2r(regs, regd) mmx_r2r(movq, regs, regd)
+#define movq(vars, vard) \
+ __asm__ __volatile__ ("movq %1, %%mm0\n\t" \
+ "movq %%mm0, %0" \
+ : "=X" (vard) \
+ : "X" (vars))
+
+
+/* 1x32 MOVe Doubleword
+ (like movq, this is both load and store...
+ but is most useful for moving things between
+ mmx registers and ordinary registers)
+*/
+#define movd_m2r(var, reg) mmx_m2r(movd, var, reg)
+#define movd_r2m(reg, var) mmx_r2m(movd, reg, var)
+#define movd_r2r(regs, regd) mmx_r2r(movd, regs, regd)
+#define movd(vars, vard) \
+ __asm__ __volatile__ ("movd %1, %%mm0\n\t" \
+ "movd %%mm0, %0" \
+ : "=X" (vard) \
+ : "X" (vars))
+
+
+/* 2x32, 4x16, and 8x8 Parallel ADDs
+*/
+#define paddd_m2r(var, reg) mmx_m2r(paddd, var, reg)
+#define paddd_r2r(regs, regd) mmx_r2r(paddd, regs, regd)
+#define paddd(vars, vard) mmx_m2m(paddd, vars, vard)
+
+#define paddw_m2r(var, reg) mmx_m2r(paddw, var, reg)
+#define paddw_r2r(regs, regd) mmx_r2r(paddw, regs, regd)
+#define paddw(vars, vard) mmx_m2m(paddw, vars, vard)
+
+#define paddb_m2r(var, reg) mmx_m2r(paddb, var, reg)
+#define paddb_r2r(regs, regd) mmx_r2r(paddb, regs, regd)
+#define paddb(vars, vard) mmx_m2m(paddb, vars, vard)
+
+
+/* 4x16 and 8x8 Parallel ADDs using Saturation arithmetic
+*/
+#define paddsw_m2r(var, reg) mmx_m2r(paddsw, var, reg)
+#define paddsw_r2r(regs, regd) mmx_r2r(paddsw, regs, regd)
+#define paddsw(vars, vard) mmx_m2m(paddsw, vars, vard)
+
+#define paddsb_m2r(var, reg) mmx_m2r(paddsb, var, reg)
+#define paddsb_r2r(regs, regd) mmx_r2r(paddsb, regs, regd)
+#define paddsb(vars, vard) mmx_m2m(paddsb, vars, vard)
+
+
+/* 4x16 and 8x8 Parallel ADDs using Unsigned Saturation arithmetic
+*/
+#define paddusw_m2r(var, reg) mmx_m2r(paddusw, var, reg)
+#define paddusw_r2r(regs, regd) mmx_r2r(paddusw, regs, regd)
+#define paddusw(vars, vard) mmx_m2m(paddusw, vars, vard)
+
+#define paddusb_m2r(var, reg) mmx_m2r(paddusb, var, reg)
+#define paddusb_r2r(regs, regd) mmx_r2r(paddusb, regs, regd)
+#define paddusb(vars, vard) mmx_m2m(paddusb, vars, vard)
+
+
+/* 2x32, 4x16, and 8x8 Parallel SUBs
+*/
+#define psubd_m2r(var, reg) mmx_m2r(psubd, var, reg)
+#define psubd_r2r(regs, regd) mmx_r2r(psubd, regs, regd)
+#define psubd(vars, vard) mmx_m2m(psubd, vars, vard)
+
+#define psubw_m2r(var, reg) mmx_m2r(psubw, var, reg)
+#define psubw_r2r(regs, regd) mmx_r2r(psubw, regs, regd)
+#define psubw(vars, vard) mmx_m2m(psubw, vars, vard)
+
+#define psubb_m2r(var, reg) mmx_m2r(psubb, var, reg)
+#define psubb_r2r(regs, regd) mmx_r2r(psubb, regs, regd)
+#define psubb(vars, vard) mmx_m2m(psubb, vars, vard)
+
+
+/* 4x16 and 8x8 Parallel SUBs using Saturation arithmetic
+*/
+#define psubsw_m2r(var, reg) mmx_m2r(psubsw, var, reg)
+#define psubsw_r2r(regs, regd) mmx_r2r(psubsw, regs, regd)
+#define psubsw(vars, vard) mmx_m2m(psubsw, vars, vard)
+
+#define psubsb_m2r(var, reg) mmx_m2r(psubsb, var, reg)
+#define psubsb_r2r(regs, regd) mmx_r2r(psubsb, regs, regd)
+#define psubsb(vars, vard) mmx_m2m(psubsb, vars, vard)
+
+
+/* 4x16 and 8x8 Parallel SUBs using Unsigned Saturation arithmetic
+*/
+#define psubusw_m2r(var, reg) mmx_m2r(psubusw, var, reg)
+#define psubusw_r2r(regs, regd) mmx_r2r(psubusw, regs, regd)
+#define psubusw(vars, vard) mmx_m2m(psubusw, vars, vard)
+
+#define psubusb_m2r(var, reg) mmx_m2r(psubusb, var, reg)
+#define psubusb_r2r(regs, regd) mmx_r2r(psubusb, regs, regd)
+#define psubusb(vars, vard) mmx_m2m(psubusb, vars, vard)
+
+
+/* 4x16 Parallel MULs giving Low 4x16 portions of results
+*/
+#define pmullw_m2r(var, reg) mmx_m2r(pmullw, var, reg)
+#define pmullw_r2r(regs, regd) mmx_r2r(pmullw, regs, regd)
+#define pmullw(vars, vard) mmx_m2m(pmullw, vars, vard)
+
+
+/* 4x16 Parallel MULs giving High 4x16 portions of results
+*/
+#define pmulhw_m2r(var, reg) mmx_m2r(pmulhw, var, reg)
+#define pmulhw_r2r(regs, regd) mmx_r2r(pmulhw, regs, regd)
+#define pmulhw(vars, vard) mmx_m2m(pmulhw, vars, vard)
+
+
+/* 4x16->2x32 Parallel Mul-ADD
+ (muls like pmullw, then adds adjacent 16-bit fields
+ in the multiply result to make the final 2x32 result)
+*/
+#define pmaddwd_m2r(var, reg) mmx_m2r(pmaddwd, var, reg)
+#define pmaddwd_r2r(regs, regd) mmx_r2r(pmaddwd, regs, regd)
+#define pmaddwd(vars, vard) mmx_m2m(pmaddwd, vars, vard)
+
+
+/* 1x64 bitwise AND
+*/
+#ifdef BROKEN_PAND
+#define pand_m2r(var, reg) \
+ { \
+ mmx_m2r(pandn, (mmx_t) -1LL, reg); \
+ mmx_m2r(pandn, var, reg); \
+ }
+#define pand_r2r(regs, regd) \
+ { \
+ mmx_m2r(pandn, (mmx_t) -1LL, regd); \
+ mmx_r2r(pandn, regs, regd); \
+ }
+#define pand(vars, vard) \
+ { \
+ movq_m2r(vard, mm0); \
+ mmx_m2r(pandn, (mmx_t) -1LL, mm0); \
+ mmx_m2r(pandn, vars, mm0); \
+ movq_r2m(mm0, vard); \
+ }
+#else
+#define pand_m2r(var, reg) mmx_m2r(pand, var, reg)
+#define pand_r2r(regs, regd) mmx_r2r(pand, regs, regd)
+#define pand(vars, vard) mmx_m2m(pand, vars, vard)
+#endif
+
+
+/* 1x64 bitwise AND with Not the destination
+*/
+#define pandn_m2r(var, reg) mmx_m2r(pandn, var, reg)
+#define pandn_r2r(regs, regd) mmx_r2r(pandn, regs, regd)
+#define pandn(vars, vard) mmx_m2m(pandn, vars, vard)
+
+
+/* 1x64 bitwise OR
+*/
+#define por_m2r(var, reg) mmx_m2r(por, var, reg)
+#define por_r2r(regs, regd) mmx_r2r(por, regs, regd)
+#define por(vars, vard) mmx_m2m(por, vars, vard)
+
+
+/* 1x64 bitwise eXclusive OR
+*/
+#define pxor_m2r(var, reg) mmx_m2r(pxor, var, reg)
+#define pxor_r2r(regs, regd) mmx_r2r(pxor, regs, regd)
+#define pxor(vars, vard) mmx_m2m(pxor, vars, vard)
+
+
+/* 2x32, 4x16, and 8x8 Parallel CoMPare for EQuality
+ (resulting fields are either 0 or -1)
+*/
+#define pcmpeqd_m2r(var, reg) mmx_m2r(pcmpeqd, var, reg)
+#define pcmpeqd_r2r(regs, regd) mmx_r2r(pcmpeqd, regs, regd)
+#define pcmpeqd(vars, vard) mmx_m2m(pcmpeqd, vars, vard)
+
+#define pcmpeqw_m2r(var, reg) mmx_m2r(pcmpeqw, var, reg)
+#define pcmpeqw_r2r(regs, regd) mmx_r2r(pcmpeqw, regs, regd)
+#define pcmpeqw(vars, vard) mmx_m2m(pcmpeqw, vars, vard)
+
+#define pcmpeqb_m2r(var, reg) mmx_m2r(pcmpeqb, var, reg)
+#define pcmpeqb_r2r(regs, regd) mmx_r2r(pcmpeqb, regs, regd)
+#define pcmpeqb(vars, vard) mmx_m2m(pcmpeqb, vars, vard)
+
+
+/* 2x32, 4x16, and 8x8 Parallel CoMPare for Greater Than
+ (resulting fields are either 0 or -1)
+*/
+#define pcmpgtd_m2r(var, reg) mmx_m2r(pcmpgtd, var, reg)
+#define pcmpgtd_r2r(regs, regd) mmx_r2r(pcmpgtd, regs, regd)
+#define pcmpgtd(vars, vard) mmx_m2m(pcmpgtd, vars, vard)
+
+#define pcmpgtw_m2r(var, reg) mmx_m2r(pcmpgtw, var, reg)
+#define pcmpgtw_r2r(regs, regd) mmx_r2r(pcmpgtw, regs, regd)
+#define pcmpgtw(vars, vard) mmx_m2m(pcmpgtw, vars, vard)
+
+#define pcmpgtb_m2r(var, reg) mmx_m2r(pcmpgtb, var, reg)
+#define pcmpgtb_r2r(regs, regd) mmx_r2r(pcmpgtb, regs, regd)
+#define pcmpgtb(vars, vard) mmx_m2m(pcmpgtb, vars, vard)
+
+
+/* 1x64, 2x32, and 4x16 Parallel Shift Left Logical
+*/
+#define psllq_i2r(imm, reg) mmx_i2r(psllq, imm, reg)
+#define psllq_m2r(var, reg) mmx_m2r(psllq, var, reg)
+#define psllq_r2r(regs, regd) mmx_r2r(psllq, regs, regd)
+#define psllq(vars, vard) mmx_m2m(psllq, vars, vard)
+
+#define pslld_i2r(imm, reg) mmx_i2r(pslld, imm, reg)
+#define pslld_m2r(var, reg) mmx_m2r(pslld, var, reg)
+#define pslld_r2r(regs, regd) mmx_r2r(pslld, regs, regd)
+#define pslld(vars, vard) mmx_m2m(pslld, vars, vard)
+
+#define psllw_i2r(imm, reg) mmx_i2r(psllw, imm, reg)
+#define psllw_m2r(var, reg) mmx_m2r(psllw, var, reg)
+#define psllw_r2r(regs, regd) mmx_r2r(psllw, regs, regd)
+#define psllw(vars, vard) mmx_m2m(psllw, vars, vard)
+
+
+/* 1x64, 2x32, and 4x16 Parallel Shift Right Logical
+*/
+#define psrlq_i2r(imm, reg) mmx_i2r(psrlq, imm, reg)
+#define psrlq_m2r(var, reg) mmx_m2r(psrlq, var, reg)
+#define psrlq_r2r(regs, regd) mmx_r2r(psrlq, regs, regd)
+#define psrlq(vars, vard) mmx_m2m(psrlq, vars, vard)
+
+#define psrld_i2r(imm, reg) mmx_i2r(psrld, imm, reg)
+#define psrld_m2r(var, reg) mmx_m2r(psrld, var, reg)
+#define psrld_r2r(regs, regd) mmx_r2r(psrld, regs, regd)
+#define psrld(vars, vard) mmx_m2m(psrld, vars, vard)
+
+#define psrlw_i2r(imm, reg) mmx_i2r(psrlw, imm, reg)
+#define psrlw_m2r(var, reg) mmx_m2r(psrlw, var, reg)
+#define psrlw_r2r(regs, regd) mmx_r2r(psrlw, regs, regd)
+#define psrlw(vars, vard) mmx_m2m(psrlw, vars, vard)
+
+
+/* 2x32 and 4x16 Parallel Shift Right Arithmetic
+*/
+#define psrad_i2r(imm, reg) mmx_i2r(psrad, imm, reg)
+#define psrad_m2r(var, reg) mmx_m2r(psrad, var, reg)
+#define psrad_r2r(regs, regd) mmx_r2r(psrad, regs, regd)
+#define psrad(vars, vard) mmx_m2m(psrad, vars, vard)
+
+#define psraw_i2r(imm, reg) mmx_i2r(psraw, imm, reg)
+#define psraw_m2r(var, reg) mmx_m2r(psraw, var, reg)
+#define psraw_r2r(regs, regd) mmx_r2r(psraw, regs, regd)
+#define psraw(vars, vard) mmx_m2m(psraw, vars, vard)
+
+
+/* 2x32->4x16 and 4x16->8x8 PACK and Signed Saturate
+ (packs source and dest fields into dest in that order)
+*/
+#define packssdw_m2r(var, reg) mmx_m2r(packssdw, var, reg)
+#define packssdw_r2r(regs, regd) mmx_r2r(packssdw, regs, regd)
+#define packssdw(vars, vard) mmx_m2m(packssdw, vars, vard)
+
+#define packsswb_m2r(var, reg) mmx_m2r(packsswb, var, reg)
+#define packsswb_r2r(regs, regd) mmx_r2r(packsswb, regs, regd)
+#define packsswb(vars, vard) mmx_m2m(packsswb, vars, vard)
+
+
+/* 4x16->8x8 PACK and Unsigned Saturate
+ (packs source and dest fields into dest in that order)
+*/
+#define packuswb_m2r(var, reg) mmx_m2r(packuswb, var, reg)
+#define packuswb_r2r(regs, regd) mmx_r2r(packuswb, regs, regd)
+#define packuswb(vars, vard) mmx_m2m(packuswb, vars, vard)
+
+
+/* 2x32->1x64, 4x16->2x32, and 8x8->4x16 UNPaCK Low
+ (interleaves low half of dest with low half of source
+ as padding in each result field)
+*/
+#define punpckldq_m2r(var, reg) mmx_m2r(punpckldq, var, reg)
+#define punpckldq_r2r(regs, regd) mmx_r2r(punpckldq, regs, regd)
+#define punpckldq(vars, vard) mmx_m2m(punpckldq, vars, vard)
+
+#define punpcklwd_m2r(var, reg) mmx_m2r(punpcklwd, var, reg)
+#define punpcklwd_r2r(regs, regd) mmx_r2r(punpcklwd, regs, regd)
+#define punpcklwd(vars, vard) mmx_m2m(punpcklwd, vars, vard)
+
+#define punpcklbw_m2r(var, reg) mmx_m2r(punpcklbw, var, reg)
+#define punpcklbw_r2r(regs, regd) mmx_r2r(punpcklbw, regs, regd)
+#define punpcklbw(vars, vard) mmx_m2m(punpcklbw, vars, vard)
+
+
+/* 2x32->1x64, 4x16->2x32, and 8x8->4x16 UNPaCK High
+ (interleaves high half of dest with high half of source
+ as padding in each result field)
+*/
+#define punpckhdq_m2r(var, reg) mmx_m2r(punpckhdq, var, reg)
+#define punpckhdq_r2r(regs, regd) mmx_r2r(punpckhdq, regs, regd)
+#define punpckhdq(vars, vard) mmx_m2m(punpckhdq, vars, vard)
+
+#define punpckhwd_m2r(var, reg) mmx_m2r(punpckhwd, var, reg)
+#define punpckhwd_r2r(regs, regd) mmx_r2r(punpckhwd, regs, regd)
+#define punpckhwd(vars, vard) mmx_m2m(punpckhwd, vars, vard)
+
+#define punpckhbw_m2r(var, reg) mmx_m2r(punpckhbw, var, reg)
+#define punpckhbw_r2r(regs, regd) mmx_r2r(punpckhbw, regs, regd)
+#define punpckhbw(vars, vard) mmx_m2m(punpckhbw, vars, vard)
+
+
+/* Empty MMx State
+ (used to clean-up when going from mmx to float use
+ of the registers that are shared by both; note that
+ there is no float-to-mmx operation needed, because
+ only the float tag word info is corruptible)
+*/
+#ifdef MMX_TRACE
+
+#define emms() \
+ { \
+ fprintf(stderr, "emms()\n"); \
+ __asm__ __volatile__ ("emms"); \
+ }
+
+#else
+
+#define emms() __asm__ __volatile__ ("emms")
+
+#endif
+
+#endif
diff --git a/gst/deinterlace/tvtime/plugins.h b/gst/deinterlace/tvtime/plugins.h
new file mode 100644
index 0000000000..58085b620f
--- /dev/null
+++ b/gst/deinterlace/tvtime/plugins.h
@@ -0,0 +1,54 @@
+/*
+ *
+ * GStreamer
+ * Copyright (C) 2004 Billy Biggs <vektor@dumbterm.net>
+ * Copyright (C) 2008 Sebastian Dröge <slomo@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * Relicensed for GStreamer from GPL to LGPL with permit from Billy Biggs.
+ * See: http://bugzilla.gnome.org/show_bug.cgi?id=163578
+ */
+
+#ifndef TVTIME_PLUGINS_H_INCLUDED
+#define TVTIME_PLUGINS_H_INCLUDED
+
+#define GST_TYPE_DEINTERLACE_TOMSMOCOMP (gst_deinterlace_method_tomsmocomp_get_type ())
+#define GST_TYPE_DEINTERLACE_GREEDY_H (gst_deinterlace_method_greedy_h_get_type ())
+#define GST_TYPE_DEINTERLACE_GREEDY_L (gst_deinterlace_method_greedy_l_get_type ())
+#define GST_TYPE_DEINTERLACE_VFIR (gst_deinterlace_method_vfir_get_type ())
+#define GST_TYPE_DEINTERLACE_LINEAR (gst_deinterlace_method_linear_get_type ())
+#define GST_TYPE_DEINTERLACE_LINEAR_BLEND (gst_deinterlace_method_linear_blend_get_type ())
+#define GST_TYPE_DEINTERLACE_SCALER_BOB (gst_deinterlace_method_scaler_bob_get_type ())
+#define GST_TYPE_DEINTERLACE_WEAVE (gst_deinterlace_method_weave_get_type ())
+#define GST_TYPE_DEINTERLACE_WEAVE_TFF (gst_deinterlace_method_weave_tff_get_type ())
+#define GST_TYPE_DEINTERLACE_WEAVE_BFF (gst_deinterlace_method_weave_bff_get_type ())
+
+GType gst_deinterlace_method_tomsmocomp_get_type (void);
+GType gst_deinterlace_method_greedy_h_get_type (void);
+GType gst_deinterlace_method_greedy_l_get_type (void);
+GType gst_deinterlace_method_vfir_get_type (void);
+
+GType gst_deinterlace_method_linear_get_type (void);
+GType gst_deinterlace_method_linear_blend_get_type (void);
+GType gst_deinterlace_method_scaler_bob_get_type (void);
+GType gst_deinterlace_method_weave_get_type (void);
+GType gst_deinterlace_method_weave_tff_get_type (void);
+GType gst_deinterlace_method_weave_bff_get_type (void);
+
+#endif /* TVTIME_PLUGINS_H_INCLUDED */
diff --git a/gst/deinterlace/tvtime/scalerbob.c b/gst/deinterlace/tvtime/scalerbob.c
new file mode 100644
index 0000000000..ed20bd4c36
--- /dev/null
+++ b/gst/deinterlace/tvtime/scalerbob.c
@@ -0,0 +1,118 @@
+/*
+ * Double lines
+ * Copyright (C) 2008,2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include "gstdeinterlacemethod.h"
+#include <string.h>
+
+#define GST_TYPE_DEINTERLACE_METHOD_SCALER_BOB (gst_deinterlace_method_scaler_bob_get_type ())
+#define GST_IS_DEINTERLACE_METHOD_SCALER_BOB(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_DEINTERLACE_METHOD_SCALER_BOB))
+#define GST_IS_DEINTERLACE_METHOD_SCALER_BOB_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_DEINTERLACE_METHOD_SCALER_BOB))
+#define GST_DEINTERLACE_METHOD_SCALER_BOB_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_DEINTERLACE_METHOD_SCALER_BOB, GstDeinterlaceMethodScalerBobClass))
+#define GST_DEINTERLACE_METHOD_SCALER_BOB(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_DEINTERLACE_METHOD_SCALER_BOB, GstDeinterlaceMethodScalerBob))
+#define GST_DEINTERLACE_METHOD_SCALER_BOB_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_DEINTERLACE_METHOD_SCALER_BOB, GstDeinterlaceMethodScalerBobClass))
+#define GST_DEINTERLACE_METHOD_SCALER_BOB_CAST(obj) ((GstDeinterlaceMethodScalerBob*)(obj))
+
+GType gst_deinterlace_method_scaler_bob_get_type (void);
+
+typedef GstDeinterlaceSimpleMethod GstDeinterlaceMethodScalerBob;
+typedef GstDeinterlaceSimpleMethodClass GstDeinterlaceMethodScalerBobClass;
+
+static void
+deinterlace_scanline_scaler_bob_packed (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->t0, size);
+}
+
+static void
+deinterlace_scanline_scaler_bob_planar_y (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->t0, size);
+}
+
+static void
+deinterlace_scanline_scaler_bob_planar_u (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->t0, size);
+}
+
+static void
+deinterlace_scanline_scaler_bob_planar_v (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->t0, size);
+}
+
+G_DEFINE_TYPE (GstDeinterlaceMethodScalerBob, gst_deinterlace_method_scaler_bob,
+ GST_TYPE_DEINTERLACE_SIMPLE_METHOD);
+
+static void
+gst_deinterlace_method_scaler_bob_class_init (GstDeinterlaceMethodScalerBobClass
+ * klass)
+{
+ GstDeinterlaceMethodClass *dim_class = (GstDeinterlaceMethodClass *) klass;
+ GstDeinterlaceSimpleMethodClass *dism_class =
+ (GstDeinterlaceSimpleMethodClass *) klass;
+
+ dim_class->fields_required = 2;
+ dim_class->name = "Double lines";
+ dim_class->nick = "scalerbob";
+ dim_class->latency = 1;
+
+ dism_class->interpolate_scanline_ayuv =
+ deinterlace_scanline_scaler_bob_packed;
+ dism_class->interpolate_scanline_yuy2 =
+ deinterlace_scanline_scaler_bob_packed;
+ dism_class->interpolate_scanline_yvyu =
+ deinterlace_scanline_scaler_bob_packed;
+ dism_class->interpolate_scanline_uyvy =
+ deinterlace_scanline_scaler_bob_packed;
+ dism_class->interpolate_scanline_nv12 =
+ deinterlace_scanline_scaler_bob_packed;
+ dism_class->interpolate_scanline_nv21 =
+ deinterlace_scanline_scaler_bob_packed;
+ dism_class->interpolate_scanline_argb =
+ deinterlace_scanline_scaler_bob_packed;
+ dism_class->interpolate_scanline_abgr =
+ deinterlace_scanline_scaler_bob_packed;
+ dism_class->interpolate_scanline_rgba =
+ deinterlace_scanline_scaler_bob_packed;
+ dism_class->interpolate_scanline_bgra =
+ deinterlace_scanline_scaler_bob_packed;
+ dism_class->interpolate_scanline_rgb = deinterlace_scanline_scaler_bob_packed;
+ dism_class->interpolate_scanline_bgr = deinterlace_scanline_scaler_bob_packed;
+ dism_class->interpolate_scanline_planar_y =
+ deinterlace_scanline_scaler_bob_planar_y;
+ dism_class->interpolate_scanline_planar_u =
+ deinterlace_scanline_scaler_bob_planar_u;
+ dism_class->interpolate_scanline_planar_v =
+ deinterlace_scanline_scaler_bob_planar_v;
+}
+
+static void
+gst_deinterlace_method_scaler_bob_init (GstDeinterlaceMethodScalerBob * self)
+{
+}
diff --git a/gst/deinterlace/tvtime/sse.h b/gst/deinterlace/tvtime/sse.h
new file mode 100644
index 0000000000..b3eaf61407
--- /dev/null
+++ b/gst/deinterlace/tvtime/sse.h
@@ -0,0 +1,992 @@
+/* sse.h
+
+ Streaming SIMD Extensions (a.k.a. Katmai New Instructions)
+ GCC interface library for IA32.
+
+ To use this library, simply include this header file
+ and compile with GCC. You MUST have inlining enabled
+ in order for sse_ok() to work; this can be done by
+ simply using -O on the GCC command line.
+
+ Compiling with -DSSE_TRACE will cause detailed trace
+ output to be sent to stderr for each sse operation.
+ This adds lots of code, and obviously slows execution to
+ a crawl, but can be very useful for debugging.
+
+ THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY
+ EXPRESS OR IMPLIED WARRANTIES, INCLUDING, WITHOUT
+ LIMITATION, THE IMPLIED WARRANTIES OF MERCHANTABILITY
+ AND FITNESS FOR ANY PARTICULAR PURPOSE.
+
+ 1999 by R. Fisher
+ Based on libmmx by H. Dietz and R. Fisher
+
+ Notes:
+ This is still extremely alpha.
+ Because this library depends on an assembler which understands the
+ SSE opcodes, you probably won't be able to use this yet.
+ For now, do not use TRACE versions. These both make use
+ of the MMX registers, not the SSE registers. This will be resolved
+ at a later date.
+ ToDo:
+ Rewrite TRACE macros
+ Major Debugging Work
+*/
+
+#ifndef _SSE_H
+#define _SSE_H
+
+
+
+/* The type of an value that fits in an SSE register
+ (note that long long constant values MUST be suffixed
+ by LL and unsigned long long values by ULL, lest
+ they be truncated by the compiler)
+*/
+typedef union {
+ float sf[4]; /* Single-precision (32-bit) value */
+} __attribute__ ((aligned (16))) sse_t; /* On a 16 byte (128-bit) boundary */
+
+
+#if 0
+/* Function to test if multimedia instructions are supported...
+*/
+inline extern int
+mm_support(void)
+{
+ /* Returns 1 if MMX instructions are supported,
+ 3 if Cyrix MMX and Extended MMX instructions are supported
+ 5 if AMD MMX and 3DNow! instructions are supported
+ 9 if MMX and SSE instructions are supported
+ 0 if hardware does not support any of these
+ */
+ register int rval = 0;
+
+ __asm__ __volatile__ (
+ /* See if CPUID instruction is supported ... */
+ /* ... Get copies of EFLAGS into eax and ecx */
+ "pushf\n\t"
+ "popl %%eax\n\t"
+ "movl %%eax, %%ecx\n\t"
+
+ /* ... Toggle the ID bit in one copy and store */
+ /* to the EFLAGS reg */
+ "xorl $0x200000, %%eax\n\t"
+ "push %%eax\n\t"
+ "popf\n\t"
+
+ /* ... Get the (hopefully modified) EFLAGS */
+ "pushf\n\t"
+ "popl %%eax\n\t"
+
+ /* ... Compare and test result */
+ "xorl %%eax, %%ecx\n\t"
+ "testl $0x200000, %%ecx\n\t"
+ "jz NotSupported1\n\t" /* CPUID not supported */
+
+
+ /* Get standard CPUID information, and
+ go to a specific vendor section */
+ "movl $0, %%eax\n\t"
+ "cpuid\n\t"
+
+ /* Check for Intel */
+ "cmpl $0x756e6547, %%ebx\n\t"
+ "jne TryAMD\n\t"
+ "cmpl $0x49656e69, %%edx\n\t"
+ "jne TryAMD\n\t"
+ "cmpl $0x6c65746e, %%ecx\n"
+ "jne TryAMD\n\t"
+ "jmp Intel\n\t"
+
+ /* Check for AMD */
+ "\nTryAMD:\n\t"
+ "cmpl $0x68747541, %%ebx\n\t"
+ "jne TryCyrix\n\t"
+ "cmpl $0x69746e65, %%edx\n\t"
+ "jne TryCyrix\n\t"
+ "cmpl $0x444d4163, %%ecx\n"
+ "jne TryCyrix\n\t"
+ "jmp AMD\n\t"
+
+ /* Check for Cyrix */
+ "\nTryCyrix:\n\t"
+ "cmpl $0x69727943, %%ebx\n\t"
+ "jne NotSupported2\n\t"
+ "cmpl $0x736e4978, %%edx\n\t"
+ "jne NotSupported3\n\t"
+ "cmpl $0x64616574, %%ecx\n\t"
+ "jne NotSupported4\n\t"
+ /* Drop through to Cyrix... */
+
+
+ /* Cyrix Section */
+ /* See if extended CPUID level 80000001 is supported */
+ /* The value of CPUID/80000001 for the 6x86MX is undefined
+ according to the Cyrix CPU Detection Guide (Preliminary
+ Rev. 1.01 table 1), so we'll check the value of eax for
+ CPUID/0 to see if standard CPUID level 2 is supported.
+ According to the table, the only CPU which supports level
+ 2 is also the only one which supports extended CPUID levels.
+ */
+ "cmpl $0x2, %%eax\n\t"
+ "jne MMXtest\n\t" /* Use standard CPUID instead */
+
+ /* Extended CPUID supported (in theory), so get extended
+ features */
+ "movl $0x80000001, %%eax\n\t"
+ "cpuid\n\t"
+ "testl $0x00800000, %%eax\n\t" /* Test for MMX */
+ "jz NotSupported5\n\t" /* MMX not supported */
+ "testl $0x01000000, %%eax\n\t" /* Test for Ext'd MMX */
+ "jnz EMMXSupported\n\t"
+ "movl $1, %0:\n\n\t" /* MMX Supported */
+ "jmp Return\n\n"
+ "EMMXSupported:\n\t"
+ "movl $3, %0:\n\n\t" /* EMMX and MMX Supported */
+ "jmp Return\n\t"
+
+
+ /* AMD Section */
+ "AMD:\n\t"
+
+ /* See if extended CPUID is supported */
+ "movl $0x80000000, %%eax\n\t"
+ "cpuid\n\t"
+ "cmpl $0x80000000, %%eax\n\t"
+ "jl MMXtest\n\t" /* Use standard CPUID instead */
+
+ /* Extended CPUID supported, so get extended features */
+ "movl $0x80000001, %%eax\n\t"
+ "cpuid\n\t"
+ "testl $0x00800000, %%edx\n\t" /* Test for MMX */
+ "jz NotSupported6\n\t" /* MMX not supported */
+ "testl $0x80000000, %%edx\n\t" /* Test for 3DNow! */
+ "jnz ThreeDNowSupported\n\t"
+ "movl $1, %0:\n\n\t" /* MMX Supported */
+ "jmp Return\n\n"
+ "ThreeDNowSupported:\n\t"
+ "movl $5, %0:\n\n\t" /* 3DNow! and MMX Supported */
+ "jmp Return\n\t"
+
+
+ /* Intel Section */
+ "Intel:\n\t"
+
+ /* Check for SSE */
+ "SSEtest:\n\t"
+ "movl $1, %%eax\n\t"
+ "cpuid\n\t"
+ "testl $0x02000000, %%edx\n\t" /* Test for SSE */
+ "jz MMXtest\n\t" /* SSE Not supported */
+ "movl $9, %0:\n\n\t" /* SSE Supported */
+ "jmp Return\n\t"
+
+ /* Check for MMX */
+ "MMXtest:\n\t"
+ "movl $1, %%eax\n\t"
+ "cpuid\n\t"
+ "testl $0x00800000, %%edx\n\t" /* Test for MMX */
+ "jz NotSupported7\n\t" /* MMX Not supported */
+ "movl $1, %0:\n\n\t" /* MMX Supported */
+ "jmp Return\n\t"
+
+ /* Nothing supported */
+ "\nNotSupported1:\n\t"
+ "#movl $101, %0:\n\n\t"
+ "\nNotSupported2:\n\t"
+ "#movl $102, %0:\n\n\t"
+ "\nNotSupported3:\n\t"
+ "#movl $103, %0:\n\n\t"
+ "\nNotSupported4:\n\t"
+ "#movl $104, %0:\n\n\t"
+ "\nNotSupported5:\n\t"
+ "#movl $105, %0:\n\n\t"
+ "\nNotSupported6:\n\t"
+ "#movl $106, %0:\n\n\t"
+ "\nNotSupported7:\n\t"
+ "#movl $107, %0:\n\n\t"
+ "movl $0, %0:\n\n\t"
+
+ "Return:\n\t"
+ : "=a" (rval)
+ : /* no input */
+ : "eax", "ebx", "ecx", "edx"
+ );
+
+ /* Return */
+ return(rval);
+}
+
+/* Function to test if sse instructions are supported...
+*/
+inline extern int
+sse_ok(void)
+{
+ /* Returns 1 if SSE instructions are supported, 0 otherwise */
+ return ( (mm_support() & 0x8) >> 3 );
+}
+#endif
+
+
+
+/* Helper functions for the instruction macros that follow...
+ (note that memory-to-register, m2r, instructions are nearly
+ as efficient as register-to-register, r2r, instructions;
+ however, memory-to-memory instructions are really simulated
+ as a convenience, and are only 1/3 as efficient)
+*/
+#ifdef SSE_TRACE
+
+/* Include the stuff for printing a trace to stderr...
+*/
+
+#include <stdio.h>
+
+#define sse_i2r(op, imm, reg) \
+ { \
+ sse_t sse_trace; \
+ sse_trace.uq = (imm); \
+ fprintf(stderr, #op "_i2r(" #imm "=0x%08x%08x, ", \
+ sse_trace.d[1], sse_trace.d[0]); \
+ __asm__ __volatile__ ("movq %%" #reg ", %0" \
+ : "=X" (sse_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #reg "=0x%08x%08x) => ", \
+ sse_trace.d[1], sse_trace.d[0]); \
+ __asm__ __volatile__ (#op " %0, %%" #reg \
+ : /* nothing */ \
+ : "X" (imm)); \
+ __asm__ __volatile__ ("movq %%" #reg ", %0" \
+ : "=X" (sse_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #reg "=0x%08x%08x\n", \
+ sse_trace.d[1], sse_trace.d[0]); \
+ }
+
+#define sse_m2r(op, mem, reg) \
+ { \
+ sse_t sse_trace; \
+ sse_trace = (mem); \
+ fprintf(stderr, #op "_m2r(" #mem "=0x%08x%08x, ", \
+ sse_trace.d[1], sse_trace.d[0]); \
+ __asm__ __volatile__ ("movq %%" #reg ", %0" \
+ : "=X" (sse_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #reg "=0x%08x%08x) => ", \
+ sse_trace.d[1], sse_trace.d[0]); \
+ __asm__ __volatile__ (#op " %0, %%" #reg \
+ : /* nothing */ \
+ : "X" (mem)); \
+ __asm__ __volatile__ ("movq %%" #reg ", %0" \
+ : "=X" (sse_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #reg "=0x%08x%08x\n", \
+ sse_trace.d[1], sse_trace.d[0]); \
+ }
+
+#define sse_r2m(op, reg, mem) \
+ { \
+ sse_t sse_trace; \
+ __asm__ __volatile__ ("movq %%" #reg ", %0" \
+ : "=X" (sse_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #op "_r2m(" #reg "=0x%08x%08x, ", \
+ sse_trace.d[1], sse_trace.d[0]); \
+ sse_trace = (mem); \
+ fprintf(stderr, #mem "=0x%08x%08x) => ", \
+ sse_trace.d[1], sse_trace.d[0]); \
+ __asm__ __volatile__ (#op " %%" #reg ", %0" \
+ : "=X" (mem) \
+ : /* nothing */ ); \
+ sse_trace = (mem); \
+ fprintf(stderr, #mem "=0x%08x%08x\n", \
+ sse_trace.d[1], sse_trace.d[0]); \
+ }
+
+#define sse_r2r(op, regs, regd) \
+ { \
+ sse_t sse_trace; \
+ __asm__ __volatile__ ("movq %%" #regs ", %0" \
+ : "=X" (sse_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #op "_r2r(" #regs "=0x%08x%08x, ", \
+ sse_trace.d[1], sse_trace.d[0]); \
+ __asm__ __volatile__ ("movq %%" #regd ", %0" \
+ : "=X" (sse_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #regd "=0x%08x%08x) => ", \
+ sse_trace.d[1], sse_trace.d[0]); \
+ __asm__ __volatile__ (#op " %" #regs ", %" #regd); \
+ __asm__ __volatile__ ("movq %%" #regd ", %0" \
+ : "=X" (sse_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #regd "=0x%08x%08x\n", \
+ sse_trace.d[1], sse_trace.d[0]); \
+ }
+
+#define sse_m2m(op, mems, memd) \
+ { \
+ sse_t sse_trace; \
+ sse_trace = (mems); \
+ fprintf(stderr, #op "_m2m(" #mems "=0x%08x%08x, ", \
+ sse_trace.d[1], sse_trace.d[0]); \
+ sse_trace = (memd); \
+ fprintf(stderr, #memd "=0x%08x%08x) => ", \
+ sse_trace.d[1], sse_trace.d[0]); \
+ __asm__ __volatile__ ("movq %0, %%mm0\n\t" \
+ #op " %1, %%mm0\n\t" \
+ "movq %%mm0, %0" \
+ : "=X" (memd) \
+ : "X" (mems)); \
+ sse_trace = (memd); \
+ fprintf(stderr, #memd "=0x%08x%08x\n", \
+ sse_trace.d[1], sse_trace.d[0]); \
+ }
+
+#else
+
+/* These macros are a lot simpler without the tracing...
+*/
+
+#define sse_i2r(op, imm, reg) \
+ __asm__ __volatile__ (#op " %0, %%" #reg \
+ : /* nothing */ \
+ : "X" (imm) )
+
+#define sse_m2r(op, mem, reg) \
+ __asm__ __volatile__ (#op " %0, %%" #reg \
+ : /* nothing */ \
+ : "X" (mem))
+
+#define sse_r2m(op, reg, mem) \
+ __asm__ __volatile__ (#op " %%" #reg ", %0" \
+ : "=X" (mem) \
+ : /* nothing */ )
+
+#define sse_r2r(op, regs, regd) \
+ __asm__ __volatile__ (#op " %" #regs ", %" #regd)
+
+#define sse_r2ri(op, regs, regd, imm) \
+ __asm__ __volatile__ (#op " %0, %%" #regs ", %%" #regd \
+ : /* nothing */ \
+ : "X" (imm) )
+
+/* Load data from mems to xmmreg, operate on xmmreg, and store data to memd */
+#define sse_m2m(op, mems, memd, xmmreg) \
+ __asm__ __volatile__ ("movups %0, %%xmm0\n\t" \
+ #op " %1, %%xmm0\n\t" \
+ "movups %%mm0, %0" \
+ : "=X" (memd) \
+ : "X" (mems))
+
+#define sse_m2ri(op, mem, reg, subop) \
+ __asm__ __volatile__ (#op " %0, %%" #reg ", " #subop \
+ : /* nothing */ \
+ : "X" (mem))
+
+#define sse_m2mi(op, mems, memd, xmmreg, subop) \
+ __asm__ __volatile__ ("movups %0, %%xmm0\n\t" \
+ #op " %1, %%xmm0, " #subop "\n\t" \
+ "movups %%mm0, %0" \
+ : "=X" (memd) \
+ : "X" (mems))
+#endif
+
+
+
+
+/* 1x128 MOVe Aligned four Packed Single-fp
+*/
+#define movaps_m2r(var, reg) sse_m2r(movaps, var, reg)
+#define movaps_r2m(reg, var) sse_r2m(movaps, reg, var)
+#define movaps_r2r(regs, regd) sse_r2r(movaps, regs, regd)
+#define movaps(vars, vard) \
+ __asm__ __volatile__ ("movaps %1, %%mm0\n\t" \
+ "movaps %%mm0, %0" \
+ : "=X" (vard) \
+ : "X" (vars))
+
+
+/* 1x128 MOVe aligned Non-Temporal four Packed Single-fp
+*/
+#define movntps_r2m(xmmreg, var) sse_r2m(movntps, xmmreg, var)
+
+
+/* 1x64 MOVe Non-Temporal Quadword
+*/
+#define movntq_r2m(mmreg, var) sse_r2m(movntq, mmreg, var)
+
+
+/* 1x128 MOVe Unaligned four Packed Single-fp
+*/
+#define movups_m2r(var, reg) sse_m2r(movups, var, reg)
+#define movups_r2m(reg, var) sse_r2m(movups, reg, var)
+#define movups_r2r(regs, regd) sse_r2r(movups, regs, regd)
+#define movups(vars, vard) \
+ __asm__ __volatile__ ("movups %1, %%mm0\n\t" \
+ "movups %%mm0, %0" \
+ : "=X" (vard) \
+ : "X" (vars))
+
+
+/* MOVe High to Low Packed Single-fp
+ high half of 4x32f (x) -> low half of 4x32f (y)
+*/
+#define movhlps_r2r(regs, regd) sse_r2r(movhlps, regs, regd)
+
+
+/* MOVe Low to High Packed Single-fp
+ low half of 4x32f (x) -> high half of 4x32f (y)
+*/
+#define movlhps_r2r(regs, regd) sse_r2r(movlhps, regs, regd)
+
+
+/* MOVe High Packed Single-fp
+ 2x32f -> high half of 4x32f
+*/
+#define movhps_m2r(var, reg) sse_m2r(movhps, var, reg)
+#define movhps_r2m(reg, var) sse_r2m(movhps, reg, var)
+#define movhps(vars, vard) \
+ __asm__ __volatile__ ("movhps %1, %%mm0\n\t" \
+ "movhps %%mm0, %0" \
+ : "=X" (vard) \
+ : "X" (vars))
+
+
+/* MOVe Low Packed Single-fp
+ 2x32f -> low half of 4x32f
+*/
+#define movlps_m2r(var, reg) sse_m2r(movlps, var, reg)
+#define movlps_r2m(reg, var) sse_r2m(movlps, reg, var)
+#define movlps(vars, vard) \
+ __asm__ __volatile__ ("movlps %1, %%mm0\n\t" \
+ "movlps %%mm0, %0" \
+ : "=X" (vard) \
+ : "X" (vars))
+
+
+/* MOVe Scalar Single-fp
+ lowest field of 4x32f (x) -> lowest field of 4x32f (y)
+*/
+#define movss_m2r(var, reg) sse_m2r(movss, var, reg)
+#define movss_r2m(reg, var) sse_r2m(movss, reg, var)
+#define movss_r2r(regs, regd) sse_r2r(movss, regs, regd)
+#define movss(vars, vard) \
+ __asm__ __volatile__ ("movss %1, %%mm0\n\t" \
+ "movss %%mm0, %0" \
+ : "=X" (vard) \
+ : "X" (vars))
+
+
+/* 4x16 Packed SHUFfle Word
+*/
+#define pshufw_m2r(var, reg, index) sse_m2ri(pshufw, var, reg, index)
+#define pshufw_r2r(regs, regd, index) sse_r2ri(pshufw, regs, regd, index)
+
+
+/* 1x128 SHUFfle Packed Single-fp
+*/
+#define shufps_m2r(var, reg, index) sse_m2ri(shufps, var, reg, index)
+#define shufps_r2r(regs, regd, index) sse_r2ri(shufps, regs, regd, index)
+
+
+/* ConVerT Packed signed Int32 to(2) Packed Single-fp
+*/
+#define cvtpi2ps_m2r(var, xmmreg) sse_m2r(cvtpi2ps, var, xmmreg)
+#define cvtpi2ps_r2r(mmreg, xmmreg) sse_r2r(cvtpi2ps, mmreg, xmmreg)
+
+
+/* ConVerT Packed Single-fp to(2) Packed signed Int32
+*/
+#define cvtps2pi_m2r(var, mmreg) sse_m2r(cvtps2pi, var, mmreg)
+#define cvtps2pi_r2r(xmmreg, mmreg) sse_r2r(cvtps2pi, mmreg, xmmreg)
+
+
+/* ConVerT with Truncate Packed Single-fp to(2) Packed Int32
+*/
+#define cvttps2pi_m2r(var, mmreg) sse_m2r(cvttps2pi, var, mmreg)
+#define cvttps2pi_r2r(xmmreg, mmreg) sse_r2r(cvttps2pi, mmreg, xmmreg)
+
+
+/* ConVerT Signed Int32 to(2) Single-fp (Scalar)
+*/
+#define cvtsi2ss_m2r(var, xmmreg) sse_m2r(cvtsi2ss, var, xmmreg)
+#define cvtsi2ss_r2r(reg, xmmreg) sse_r2r(cvtsi2ss, reg, xmmreg)
+
+
+/* ConVerT Scalar Single-fp to(2) Signed Int32
+*/
+#define cvtss2si_m2r(var, reg) sse_m2r(cvtss2si, var, reg)
+#define cvtss2si_r2r(xmmreg, reg) sse_r2r(cvtss2si, xmmreg, reg)
+
+
+/* ConVerT with Truncate Scalar Single-fp to(2) Signed Int32
+*/
+#define cvttss2si_m2r(var, reg) sse_m2r(cvtss2si, var, reg)
+#define cvttss2si_r2r(xmmreg, reg) sse_r2r(cvtss2si, xmmreg, reg)
+
+
+/* Parallel EXTRact Word from 4x16
+*/
+#define pextrw_r2r(mmreg, reg, field) sse_r2ri(pextrw, mmreg, reg, field)
+
+
+/* Parallel INSeRt Word from 4x16
+*/
+#define pinsrw_r2r(reg, mmreg, field) sse_r2ri(pinsrw, reg, mmreg, field)
+
+
+
+/* MOVe MaSK from Packed Single-fp
+*/
+#ifdef SSE_TRACE
+ #define movmskps(xmmreg, reg) \
+ { \
+ fprintf(stderr, "movmskps()\n"); \
+ __asm__ __volatile__ ("movmskps %" #xmmreg ", %" #reg) \
+ }
+#else
+ #define movmskps(xmmreg, reg) \
+ __asm__ __volatile__ ("movmskps %" #xmmreg ", %" #reg)
+#endif
+
+
+/* Parallel MOVe MaSK from mmx reg to 32-bit reg
+*/
+#ifdef SSE_TRACE
+ #define pmovmskb(mmreg, reg) \
+ { \
+ fprintf(stderr, "movmskps()\n"); \
+ __asm__ __volatile__ ("movmskps %" #mmreg ", %" #reg) \
+ }
+#else
+ #define pmovmskb(mmreg, reg) \
+ __asm__ __volatile__ ("movmskps %" #mmreg ", %" #reg)
+#endif
+
+
+/* MASKed MOVe from 8x8 to memory pointed to by (e)di register
+*/
+#define maskmovq(mmregs, fieldreg) sse_r2ri(maskmovq, mmregs, fieldreg)
+
+
+
+
+/* 4x32f Parallel ADDs
+*/
+#define addps_m2r(var, reg) sse_m2r(addps, var, reg)
+#define addps_r2r(regs, regd) sse_r2r(addps, regs, regd)
+#define addps(vars, vard, xmmreg) sse_m2m(addps, vars, vard, xmmreg)
+
+
+/* Lowest Field of 4x32f Parallel ADDs
+*/
+#define addss_m2r(var, reg) sse_m2r(addss, var, reg)
+#define addss_r2r(regs, regd) sse_r2r(addss, regs, regd)
+#define addss(vars, vard, xmmreg) sse_m2m(addss, vars, vard, xmmreg)
+
+
+/* 4x32f Parallel SUBs
+*/
+#define subps_m2r(var, reg) sse_m2r(subps, var, reg)
+#define subps_r2r(regs, regd) sse_r2r(subps, regs, regd)
+#define subps(vars, vard, xmmreg) sse_m2m(subps, vars, vard, xmmreg)
+
+
+/* Lowest Field of 4x32f Parallel SUBs
+*/
+#define subss_m2r(var, reg) sse_m2r(subss, var, reg)
+#define subss_r2r(regs, regd) sse_r2r(subss, regs, regd)
+#define subss(vars, vard, xmmreg) sse_m2m(subss, vars, vard, xmmreg)
+
+
+/* 8x8u -> 4x16u Packed Sum of Absolute Differences
+*/
+#define psadbw_m2r(var, reg) sse_m2r(psadbw, var, reg)
+#define psadbw_r2r(regs, regd) sse_r2r(psadbw, regs, regd)
+#define psadbw(vars, vard, mmreg) sse_m2m(psadbw, vars, vard, mmreg)
+
+
+/* 4x16u Parallel MUL High Unsigned
+*/
+#define pmulhuw_m2r(var, reg) sse_m2r(pmulhuw, var, reg)
+#define pmulhuw_r2r(regs, regd) sse_r2r(pmulhuw, regs, regd)
+#define pmulhuw(vars, vard, mmreg) sse_m2m(pmulhuw, vars, vard, mmreg)
+
+
+/* 4x32f Parallel MULs
+*/
+#define mulps_m2r(var, reg) sse_m2r(mulps, var, reg)
+#define mulps_r2r(regs, regd) sse_r2r(mulps, regs, regd)
+#define mulps(vars, vard, xmmreg) sse_m2m(mulps, vars, vard, xmmreg)
+
+
+/* Lowest Field of 4x32f Parallel MULs
+*/
+#define mulss_m2r(var, reg) sse_m2r(mulss, var, reg)
+#define mulss_r2r(regs, regd) sse_r2r(mulss, regs, regd)
+#define mulss(vars, vard, xmmreg) sse_m2m(mulss, vars, vard, xmmreg)
+
+
+/* 4x32f Parallel DIVs
+*/
+#define divps_m2r(var, reg) sse_m2r(divps, var, reg)
+#define divps_r2r(regs, regd) sse_r2r(divps, regs, regd)
+#define divps(vars, vard, xmmreg) sse_m2m(divps, vars, vard, xmmreg)
+
+
+/* Lowest Field of 4x32f Parallel DIVs
+*/
+#define divss_m2r(var, reg) sse_m2r(divss, var, reg)
+#define divss_r2r(regs, regd) sse_r2r(divss, regs, regd)
+#define divss(vars, vard, xmmreg) sse_m2m(divss, vars, vard, xmmreg)
+
+
+/* 4x32f Parallel Reciprocals
+*/
+#define rcpps_m2r(var, reg) sse_m2r(rcpps, var, reg)
+#define rcpps_r2r(regs, regd) sse_r2r(rcpps, regs, regd)
+#define rcpps(vars, vard, xmmreg) sse_m2m(rcpps, vars, vard, xmmreg)
+
+
+/* Lowest Field of 4x32f Parallel Reciprocals
+*/
+#define rcpss_m2r(var, reg) sse_m2r(rcpss, var, reg)
+#define rcpss_r2r(regs, regd) sse_r2r(rcpss, regs, regd)
+#define rcpss(vars, vard, xmmreg) sse_m2m(rcpss, vars, vard, xmmreg)
+
+
+/* 4x32f Parallel Square Root of Reciprocals
+*/
+#define rsqrtps_m2r(var, reg) sse_m2r(rsqrtps, var, reg)
+#define rsqrtps_r2r(regs, regd) sse_r2r(rsqrtps, regs, regd)
+#define rsqrtps(vars, vard, xmmreg) sse_m2m(rsqrtps, vars, vard, xmmreg)
+
+
+/* Lowest Field of 4x32f Parallel Square Root of Reciprocals
+*/
+#define rsqrtss_m2r(var, reg) sse_m2r(rsqrtss, var, reg)
+#define rsqrtss_r2r(regs, regd) sse_r2r(rsqrtss, regs, regd)
+#define rsqrtss(vars, vard, xmmreg) sse_m2m(rsqrtss, vars, vard, xmmreg)
+
+
+/* 4x32f Parallel Square Roots
+*/
+#define sqrtps_m2r(var, reg) sse_m2r(sqrtps, var, reg)
+#define sqrtps_r2r(regs, regd) sse_r2r(sqrtps, regs, regd)
+#define sqrtps(vars, vard, xmmreg) sse_m2m(sqrtps, vars, vard, xmmreg)
+
+
+/* Lowest Field of 4x32f Parallel Square Roots
+*/
+#define sqrtss_m2r(var, reg) sse_m2r(sqrtss, var, reg)
+#define sqrtss_r2r(regs, regd) sse_r2r(sqrtss, regs, regd)
+#define sqrtss(vars, vard, xmmreg) sse_m2m(sqrtss, vars, vard, xmmreg)
+
+
+/* 8x8u and 4x16u Parallel AVeraGe
+*/
+#define pavgb_m2r(var, reg) sse_m2r(pavgb, var, reg)
+#define pavgb_r2r(regs, regd) sse_r2r(pavgb, regs, regd)
+#define pavgb(vars, vard, mmreg) sse_m2m(pavgb, vars, vard, mmreg)
+
+#define pavgw_m2r(var, reg) sse_m2r(pavgw, var, reg)
+#define pavgw_r2r(regs, regd) sse_r2r(pavgw, regs, regd)
+#define pavgw(vars, vard, mmreg) sse_m2m(pavgw, vars, vard, mmreg)
+
+
+/* 1x128 bitwise AND
+*/
+#define andps_m2r(var, reg) sse_m2r(andps, var, reg)
+#define andps_r2r(regs, regd) sse_r2r(andps, regs, regd)
+#define andps(vars, vard, xmmreg) sse_m2m(andps, vars, vard, xmmreg)
+
+
+/* 1x128 bitwise AND with Not the destination
+*/
+#define andnps_m2r(var, reg) sse_m2r(andnps, var, reg)
+#define andnps_r2r(regs, regd) sse_r2r(andnps, regs, regd)
+#define andnps(vars, vard, xmmreg) sse_m2m(andnps, vars, vard, xmmreg)
+
+
+/* 1x128 bitwise OR
+*/
+#define orps_m2r(var, reg) sse_m2r(orps, var, reg)
+#define orps_r2r(regs, regd) sse_r2r(orps, regs, regd)
+#define orps(vars, vard, xmmreg) sse_m2m(orps, vars, vard, xmmreg)
+
+
+/* 1x128 bitwise eXclusive OR
+*/
+#define xorps_m2r(var, reg) sse_m2r(xorps, var, reg)
+#define xorps_r2r(regs, regd) sse_r2r(xorps, regs, regd)
+#define xorps(vars, vard, xmmreg) sse_m2m(xorps, vars, vard, xmmreg)
+
+
+/* 8x8u, 4x16, and 4x32f Parallel Maximum
+*/
+#define pmaxub_m2r(var, reg) sse_m2r(pmaxub, var, reg)
+#define pmaxub_r2r(regs, regd) sse_r2r(pmaxub, regs, regd)
+#define pmaxub(vars, vard, mmreg) sse_m2m(pmaxub, vars, vard, mmreg)
+
+#define pmaxsw_m2r(var, reg) sse_m2r(pmaxsw, var, reg)
+#define pmaxsw_r2r(regs, regd) sse_r2r(pmaxsw, regs, regd)
+#define pmaxsw(vars, vard, mmreg) sse_m2m(pmaxsw, vars, vard, mmreg)
+
+#define maxps_m2r(var, reg) sse_m2r(maxps, var, reg)
+#define maxps_r2r(regs, regd) sse_r2r(maxps, regs, regd)
+#define maxps(vars, vard, xmmreg) sse_m2m(maxps, vars, vard, xmmreg)
+
+
+/* Lowest Field of 4x32f Parallel Maximum
+*/
+#define maxss_m2r(var, reg) sse_m2r(maxss, var, reg)
+#define maxss_r2r(regs, regd) sse_r2r(maxss, regs, regd)
+#define maxss(vars, vard, xmmreg) sse_m2m(maxss, vars, vard, xmmreg)
+
+
+/* 8x8u, 4x16, and 4x32f Parallel Minimum
+*/
+#define pminub_m2r(var, reg) sse_m2r(pminub, var, reg)
+#define pminub_r2r(regs, regd) sse_r2r(pminub, regs, regd)
+#define pminub(vars, vard, mmreg) sse_m2m(pminub, vars, vard, mmreg)
+
+#define pminsw_m2r(var, reg) sse_m2r(pminsw, var, reg)
+#define pminsw_r2r(regs, regd) sse_r2r(pminsw, regs, regd)
+#define pminsw(vars, vard, mmreg) sse_m2m(pminsw, vars, vard, mmreg)
+
+#define minps_m2r(var, reg) sse_m2r(minps, var, reg)
+#define minps_r2r(regs, regd) sse_r2r(minps, regs, regd)
+#define minps(vars, vard, xmmreg) sse_m2m(minps, vars, vard, xmmreg)
+
+
+/* Lowest Field of 4x32f Parallel Minimum
+*/
+#define minss_m2r(var, reg) sse_m2r(minss, var, reg)
+#define minss_r2r(regs, regd) sse_r2r(minss, regs, regd)
+#define minss(vars, vard, xmmreg) sse_m2m(minss, vars, vard, xmmreg)
+
+
+/* 4x32f Parallel CoMPares
+ (resulting fields are either 0 or -1)
+*/
+#define cmpps_m2r(var, reg, op) sse_m2ri(cmpps, var, reg, op)
+#define cmpps_r2r(regs, regd, op) sse_r2ri(cmpps, regs, regd, op)
+#define cmpps(vars, vard, op, xmmreg) sse_m2mi(cmpps, vars, vard, xmmreg, op)
+
+#define cmpeqps_m2r(var, reg) sse_m2ri(cmpps, var, reg, 0)
+#define cmpeqps_r2r(regs, regd) sse_r2ri(cmpps, regs, regd, 0)
+#define cmpeqps(vars, vard, xmmreg) sse_m2mi(cmpps, vars, vard, xmmreg, 0)
+
+#define cmpltps_m2r(var, reg) sse_m2ri(cmpps, var, reg, 1)
+#define cmpltps_r2r(regs, regd) sse_r2ri(cmpps, regs, regd, 1)
+#define cmpltps(vars, vard, xmmreg) sse_m2mi(cmpps, vars, vard, xmmreg, 1)
+
+#define cmpleps_m2r(var, reg) sse_m2ri(cmpps, var, reg, 2)
+#define cmpleps_r2r(regs, regd) sse_r2ri(cmpps, regs, regd, 2)
+#define cmpleps(vars, vard, xmmreg) sse_m2mi(cmpps, vars, vard, xmmreg, 2)
+
+#define cmpunordps_m2r(var, reg) sse_m2ri(cmpps, var, reg, 3)
+#define cmpunordps_r2r(regs, regd) sse_r2ri(cmpps, regs, regd, 3)
+#define cmpunordps(vars, vard, xmmreg) sse_m2mi(cmpps, vars, vard, xmmreg, 3)
+
+#define cmpneqps_m2r(var, reg) sse_m2ri(cmpps, var, reg, 4)
+#define cmpneqps_r2r(regs, regd) sse_r2ri(cmpps, regs, regd, 4)
+#define cmpneqps(vars, vard, xmmreg) sse_m2mi(cmpps, vars, vard, xmmreg, 4)
+
+#define cmpnltps_m2r(var, reg) sse_m2ri(cmpps, var, reg, 5)
+#define cmpnltps_r2r(regs, regd) sse_r2ri(cmpps, regs, regd, 5)
+#define cmpnltps(vars, vard, xmmreg) sse_m2mi(cmpps, vars, vard, xmmreg, 5)
+
+#define cmpnleps_m2r(var, reg) sse_m2ri(cmpps, var, reg, 6)
+#define cmpnleps_r2r(regs, regd) sse_r2ri(cmpps, regs, regd, 6)
+#define cmpnleps(vars, vard, xmmreg) sse_m2mi(cmpps, vars, vard, xmmreg, 6)
+
+#define cmpordps_m2r(var, reg) sse_m2ri(cmpps, var, reg, 7)
+#define cmpordps_r2r(regs, regd) sse_r2ri(cmpps, regs, regd, 7)
+#define cmpordps(vars, vard, xmmreg) sse_m2mi(cmpps, vars, vard, xmmreg, 7)
+
+
+/* Lowest Field of 4x32f Parallel CoMPares
+ (resulting fields are either 0 or -1)
+*/
+#define cmpss_m2r(var, reg, op) sse_m2ri(cmpss, var, reg, op)
+#define cmpss_r2r(regs, regd, op) sse_r2ri(cmpss, regs, regd, op)
+#define cmpss(vars, vard, op, xmmreg) sse_m2mi(cmpss, vars, vard, xmmreg, op)
+
+#define cmpeqss_m2r(var, reg) sse_m2ri(cmpss, var, reg, 0)
+#define cmpeqss_r2r(regs, regd) sse_r2ri(cmpss, regs, regd, 0)
+#define cmpeqss(vars, vard, xmmreg) sse_m2mi(cmpss, vars, vard, xmmreg, 0)
+
+#define cmpltss_m2r(var, reg) sse_m2ri(cmpss, var, reg, 1)
+#define cmpltss_r2r(regs, regd) sse_r2ri(cmpss, regs, regd, 1)
+#define cmpltss(vars, vard, xmmreg) sse_m2mi(cmpss, vars, vard, xmmreg, 1)
+
+#define cmpless_m2r(var, reg) sse_m2ri(cmpss, var, reg, 2)
+#define cmpless_r2r(regs, regd) sse_r2ri(cmpss, regs, regd, 2)
+#define cmpless(vars, vard, xmmreg) sse_m2mi(cmpss, vars, vard, xmmreg, 2)
+
+#define cmpunordss_m2r(var, reg) sse_m2ri(cmpss, var, reg, 3)
+#define cmpunordss_r2r(regs, regd) sse_r2ri(cmpss, regs, regd, 3)
+#define cmpunordss(vars, vard, xmmreg) sse_m2mi(cmpss, vars, vard, xmmreg, 3)
+
+#define cmpneqss_m2r(var, reg) sse_m2ri(cmpss, var, reg, 4)
+#define cmpneqss_r2r(regs, regd) sse_r2ri(cmpss, regs, regd, 4)
+#define cmpneqss(vars, vard, xmmreg) sse_m2mi(cmpss, vars, vard, xmmreg, 4)
+
+#define cmpnltss_m2r(var, reg) sse_m2ri(cmpss, var, reg, 5)
+#define cmpnltss_r2r(regs, regd) sse_r2ri(cmpss, regs, regd, 5)
+#define cmpnltss(vars, vard, xmmreg) sse_m2mi(cmpss, vars, vard, xmmreg, 5)
+
+#define cmpnless_m2r(var, reg) sse_m2ri(cmpss, var, reg, 6)
+#define cmpnless_r2r(regs, regd) sse_r2ri(cmpss, regs, regd, 6)
+#define cmpnless(vars, vard, xmmreg) sse_m2mi(cmpss, vars, vard, xmmreg, 6)
+
+#define cmpordss_m2r(var, reg) sse_m2ri(cmpss, var, reg, 7)
+#define cmpordss_r2r(regs, regd) sse_r2ri(cmpss, regs, regd, 7)
+#define cmpordss(vars, vard, xmmreg) sse_m2mi(cmpss, vars, vard, xmmreg, 7)
+
+
+/* Lowest Field of 4x32f Parallel CoMPares to set EFLAGS
+ (resulting fields are either 0 or -1)
+*/
+#define comiss_m2r(var, reg) sse_m2r(comiss, var, reg)
+#define comiss_r2r(regs, regd) sse_r2r(comiss, regs, regd)
+#define comiss(vars, vard, xmmreg) sse_m2m(comiss, vars, vard, xmmreg)
+
+
+/* Lowest Field of 4x32f Unordered Parallel CoMPares to set EFLAGS
+ (resulting fields are either 0 or -1)
+*/
+#define ucomiss_m2r(var, reg) sse_m2r(ucomiss, var, reg)
+#define ucomiss_r2r(regs, regd) sse_r2r(ucomiss, regs, regd)
+#define ucomiss(vars, vard, xmmreg) sse_m2m(ucomiss, vars, vard, xmmreg)
+
+
+/* 2-(4x32f) -> 4x32f UNPaCK Low Packed Single-fp
+ (interleaves low half of dest with low half of source
+ as padding in each result field)
+*/
+#define unpcklps_m2r(var, reg) sse_m2r(unpcklps, var, reg)
+#define unpcklps_r2r(regs, regd) sse_r2r(unpcklps, regs, regd)
+
+
+/* 2-(4x32f) -> 4x32f UNPaCK High Packed Single-fp
+ (interleaves high half of dest with high half of source
+ as padding in each result field)
+*/
+#define unpckhps_m2r(var, reg) sse_m2r(unpckhps, var, reg)
+#define unpckhps_r2r(regs, regd) sse_r2r(unpckhps, regs, regd)
+
+
+
+/* Fp and mmX ReSTORe state
+*/
+#ifdef SSE_TRACE
+ #define fxrstor(mem) \
+ { \
+ fprintf(stderr, "fxrstor()\n"); \
+ __asm__ __volatile__ ("fxrstor %0" \
+ : /* nothing */ \
+ : "X" (mem)) \
+ }
+#else
+ #define fxrstor(mem) \
+ __asm__ __volatile__ ("fxrstor %0" \
+ : /* nothing */ \
+ : "X" (mem))
+#endif
+
+
+/* Fp and mmX SAVE state
+*/
+#ifdef SSE_TRACE
+ #define fxsave(mem) \
+ { \
+ fprintf(stderr, "fxsave()\n"); \
+ __asm__ __volatile__ ("fxsave %0" \
+ : /* nothing */ \
+ : "X" (mem)) \
+ }
+#else
+ #define fxsave(mem) \
+ __asm__ __volatile__ ("fxsave %0" \
+ : /* nothing */ \
+ : "X" (mem))
+#endif
+
+
+/* STore streaMing simd eXtensions Control/Status Register
+*/
+#ifdef SSE_TRACE
+ #define stmxcsr(mem) \
+ { \
+ fprintf(stderr, "stmxcsr()\n"); \
+ __asm__ __volatile__ ("stmxcsr %0" \
+ : /* nothing */ \
+ : "X" (mem)) \
+ }
+#else
+ #define stmxcsr(mem) \
+ __asm__ __volatile__ ("stmxcsr %0" \
+ : /* nothing */ \
+ : "X" (mem))
+#endif
+
+
+/* LoaD streaMing simd eXtensions Control/Status Register
+*/
+#ifdef SSE_TRACE
+ #define ldmxcsr(mem) \
+ { \
+ fprintf(stderr, "ldmxcsr()\n"); \
+ __asm__ __volatile__ ("ldmxcsr %0" \
+ : /* nothing */ \
+ : "X" (mem)) \
+ }
+#else
+ #define ldmxcsr(mem) \
+ __asm__ __volatile__ ("ldmxcsr %0" \
+ : /* nothing */ \
+ : "X" (mem))
+#endif
+
+
+/* Store FENCE - enforce ordering of stores before fence vs. stores
+ occurring after fence in source code.
+*/
+#ifdef SSE_TRACE
+ #define sfence() \
+ { \
+ fprintf(stderr, "sfence()\n"); \
+ __asm__ __volatile__ ("sfence\n\t") \
+ }
+#else
+ #define sfence() \
+ __asm__ __volatile__ ("sfence\n\t")
+#endif
+
+
+/* PREFETCH data using T0, T1, T2, or NTA hint
+ T0 = Prefetch into all cache levels
+ T1 = Prefetch into all cache levels except 0th level
+ T2 = Prefetch into all cache levels except 0th and 1st levels
+ NTA = Prefetch data into non-temporal cache structure
+*/
+#ifdef SSE_TRACE
+#else
+ #define prefetch(mem, hint) \
+ __asm__ __volatile__ ("prefetch" #hint " %0" \
+ : /* nothing */ \
+ : "X" (mem))
+
+ #define prefetcht0(mem) prefetch(mem, t0)
+ #define prefetcht1(mem) prefetch(mem, t1)
+ #define prefetcht2(mem) prefetch(mem, t2)
+ #define prefetchnta(mem) prefetch(mem, nta)
+#endif
+
+
+
+#endif
diff --git a/gst/deinterlace/tvtime/tomsmocomp.c b/gst/deinterlace/tvtime/tomsmocomp.c
new file mode 100644
index 0000000000..003e72b06b
--- /dev/null
+++ b/gst/deinterlace/tvtime/tomsmocomp.c
@@ -0,0 +1,216 @@
+/*
+ * Copyright (C) 2004 Billy Biggs <vektor@dumbterm.net>
+ * Copyright (C) 2008,2010 Sebastian Dröge <slomo@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * Relicensed for GStreamer from GPL to LGPL with permit from Tom Barry.
+ * See: http://bugzilla.gnome.org/show_bug.cgi?id=163578
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <stdlib.h>
+#include <string.h>
+
+#include <gst/gst.h>
+#ifdef HAVE_ORC
+#include <orc/orc.h>
+#endif
+#include "gstdeinterlacemethod.h"
+#include "plugins.h"
+
+#define GST_TYPE_DEINTERLACE_METHOD_TOMSMOCOMP (gst_deinterlace_method_tomsmocomp_get_type ())
+#define GST_IS_DEINTERLACE_METHOD_TOMSMOCOMP(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_DEINTERLACE_METHOD_TOMSMOCOMP))
+#define GST_IS_DEINTERLACE_METHOD_TOMSMOCOMP_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_DEINTERLACE_METHOD_TOMSMOCOMP))
+#define GST_DEINTERLACE_METHOD_TOMSMOCOMP_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_DEINTERLACE_METHOD_TOMSMOCOMP, GstDeinterlaceMethodTomsMoCompClass))
+#define GST_DEINTERLACE_METHOD_TOMSMOCOMP(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_DEINTERLACE_METHOD_TOMSMOCOMP, GstDeinterlaceMethodTomsMoComp))
+#define GST_DEINTERLACE_METHOD_TOMSMOCOMP_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_DEINTERLACE_METHOD_TOMSMOCOMP, GstDeinterlaceMethodTomsMoCompClass))
+#define GST_DEINTERLACE_METHOD_TOMSMOCOMP_CAST(obj) ((GstDeinterlaceMethodTomsMoComp*)(obj))
+
+typedef struct
+{
+ GstDeinterlaceMethod parent;
+
+ guint search_effort;
+ gboolean strange_bob;
+} GstDeinterlaceMethodTomsMoComp;
+
+typedef GstDeinterlaceMethodClass GstDeinterlaceMethodTomsMoCompClass;
+
+static void
+Fieldcopy (guint8 * dest, const guint8 * src, gint count,
+ gint rows, gint dst_pitch, gint src_pitch)
+{
+ gint i;
+
+ for (i = 0; i < rows; i++) {
+ memcpy (dest, src, count);
+ src += src_pitch;
+ dest += dst_pitch;
+ }
+}
+
+#define USE_FOR_DSCALER
+
+#define IS_C
+#define SIMD_TYPE C
+#define FUNCT_NAME tomsmocompDScaler_C
+#include "tomsmocomp/TomsMoCompAll.inc"
+#undef IS_C
+#undef SIMD_TYPE
+#undef FUNCT_NAME
+
+#ifdef BUILD_X86_ASM
+
+#include "tomsmocomp/tomsmocompmacros.h"
+#include "x86-64_macros.inc"
+
+#define IS_MMX
+#define SIMD_TYPE MMX
+#define FUNCT_NAME tomsmocompDScaler_MMX
+#include "tomsmocomp/TomsMoCompAll.inc"
+#undef IS_MMX
+#undef SIMD_TYPE
+#undef FUNCT_NAME
+
+#define IS_3DNOW
+#define SIMD_TYPE 3DNOW
+#define FUNCT_NAME tomsmocompDScaler_3DNOW
+#include "tomsmocomp/TomsMoCompAll.inc"
+#undef IS_3DNOW
+#undef SIMD_TYPE
+#undef FUNCT_NAME
+
+#define IS_MMXEXT
+#define SIMD_TYPE MMXEXT
+#define FUNCT_NAME tomsmocompDScaler_MMXEXT
+#include "tomsmocomp/TomsMoCompAll.inc"
+#undef IS_MMXEXT
+#undef SIMD_TYPE
+#undef FUNCT_NAME
+
+#endif
+
+G_DEFINE_TYPE (GstDeinterlaceMethodTomsMoComp,
+ gst_deinterlace_method_tomsmocomp, GST_TYPE_DEINTERLACE_METHOD);
+
+enum
+{
+ PROP_0,
+ PROP_SEARCH_EFFORT,
+ PROP_STRANGE_BOB
+};
+
+static void
+gst_deinterlace_method_tomsmocomp_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstDeinterlaceMethodTomsMoComp *self =
+ GST_DEINTERLACE_METHOD_TOMSMOCOMP (object);
+
+ switch (prop_id) {
+ case PROP_SEARCH_EFFORT:
+ self->search_effort = g_value_get_uint (value);
+ break;
+ case PROP_STRANGE_BOB:
+ self->strange_bob = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static void
+gst_deinterlace_method_tomsmocomp_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstDeinterlaceMethodTomsMoComp *self =
+ GST_DEINTERLACE_METHOD_TOMSMOCOMP (object);
+
+ switch (prop_id) {
+ case PROP_SEARCH_EFFORT:
+ g_value_set_uint (value, self->search_effort);
+ break;
+ case PROP_STRANGE_BOB:
+ g_value_set_boolean (value, self->strange_bob);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static void
+ gst_deinterlace_method_tomsmocomp_class_init
+ (GstDeinterlaceMethodTomsMoCompClass * klass)
+{
+ GstDeinterlaceMethodClass *dim_class = (GstDeinterlaceMethodClass *) klass;
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+#ifdef BUILD_X86_ASM
+ guint cpu_flags =
+ orc_target_get_default_flags (orc_target_get_by_name ("mmx"));
+#endif
+
+ gobject_class->set_property = gst_deinterlace_method_tomsmocomp_set_property;
+ gobject_class->get_property = gst_deinterlace_method_tomsmocomp_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_SEARCH_EFFORT,
+ g_param_spec_uint ("search-effort",
+ "Search Effort",
+ "Search Effort", 0, 27, 5, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ g_object_class_install_property (gobject_class, PROP_STRANGE_BOB,
+ g_param_spec_boolean ("strange-bob",
+ "Strange Bob",
+ "Use strange bob", FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ dim_class->fields_required = 4;
+ dim_class->name = "Motion Adaptive: Motion Search";
+ dim_class->nick = "tomsmocomp";
+ dim_class->latency = 1;
+
+#ifdef BUILD_X86_ASM
+ if (cpu_flags & ORC_TARGET_MMX_MMXEXT) {
+ dim_class->deinterlace_frame_yuy2 = tomsmocompDScaler_MMXEXT;
+ dim_class->deinterlace_frame_yvyu = tomsmocompDScaler_MMXEXT;
+ } else if (cpu_flags & ORC_TARGET_MMX_3DNOW) {
+ dim_class->deinterlace_frame_yuy2 = tomsmocompDScaler_3DNOW;
+ dim_class->deinterlace_frame_yvyu = tomsmocompDScaler_3DNOW;
+ } else if (cpu_flags & ORC_TARGET_MMX_MMX) {
+ dim_class->deinterlace_frame_yuy2 = tomsmocompDScaler_MMX;
+ dim_class->deinterlace_frame_yvyu = tomsmocompDScaler_MMX;
+ } else {
+ dim_class->deinterlace_frame_yuy2 = tomsmocompDScaler_C;
+ dim_class->deinterlace_frame_yvyu = tomsmocompDScaler_C;
+ }
+#else
+ dim_class->deinterlace_frame_yuy2 = tomsmocompDScaler_C;
+ dim_class->deinterlace_frame_yvyu = tomsmocompDScaler_C;
+#endif
+}
+
+static void
+gst_deinterlace_method_tomsmocomp_init (GstDeinterlaceMethodTomsMoComp * self)
+{
+ self->search_effort = 5;
+ self->strange_bob = FALSE;
+}
diff --git a/gst/deinterlace/tvtime/tomsmocomp/SearchLoop0A.inc b/gst/deinterlace/tvtime/tomsmocomp/SearchLoop0A.inc
new file mode 100644
index 0000000000..b1d9aeca79
--- /dev/null
+++ b/gst/deinterlace/tvtime/tomsmocomp/SearchLoop0A.inc
@@ -0,0 +1,15 @@
+// -*- c++ -*-
+
+// Searches just the center pixel, in both the old
+// and new fields, but takes averages. This is an even
+// pixel address. Any chroma match will be used. (YUY2)
+// We best like finding 0 motion so we will bias everything we found previously
+// up by a little, and adjust later
+
+#ifdef IS_SSE2
+ "paddusb "_ONES", %%xmm7\n\t" // bias toward no motion
+#else
+ "paddusb "_ONES", %%mm7\n\t" // bias toward no motion
+#endif
+
+ MERGE4PIXavg("(%%"XDI", %%"XCX")", "(%%"XSI", %%"XCX")") // center, in old and new
diff --git a/gst/deinterlace/tvtime/tomsmocomp/SearchLoopBottom.inc b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopBottom.inc
new file mode 100644
index 0000000000..1f4155ed6e
--- /dev/null
+++ b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopBottom.inc
@@ -0,0 +1,174 @@
+// -*- c++ -*-
+
+// Version for non-SSE2
+
+#ifndef IS_C
+
+#ifdef SKIP_SEARCH
+ "movq %%mm6, %%mm0\n\t" // just use the results of our weird bob
+#else
+
+
+ // JA 9/Dec/2002
+ // failed experiment
+ // but leave in placeholder for me to play about
+#ifdef DONT_USE_STRANGE_BOB
+ // Use the best weave if diffs less than 10 as that
+ // means the image is still or moving cleanly
+ // if there is motion we will clip which will catch anything
+ "psubusb "_FOURS", %%mm7\n\t" // sets bits to zero if weave diff < 4
+ "pxor %%mm0, %%mm0\n\t"
+ "pcmpeqb %%mm0, %%mm7\n\t" // all ff where weave better, else 00
+ "pcmpeqb %%mm7, %%mm0\n\t" // all ff where bob better, else 00
+ "pand %%mm6, %%mm0\n\t" // use bob for these pixel values
+ "pand %%mm5, %%mm7\n\t" // use weave for these
+ "por %%mm7, %%mm0\n\t" // combine both
+#else
+ // Use the better of bob or weave
+ // pminub mm4, TENS // the most we care about
+ V_PMINUB ("%%mm4", _TENS, "%%mm0") // the most we care about
+
+ "psubusb %%mm4, %%mm7\n\t" // foregive that much from weave est?
+ "psubusb "_FOURS", %%mm7\n\t" // bias it a bit toward weave
+ "pxor %%mm0, %%mm0\n\t"
+ "pcmpeqb %%mm0, %%mm7\n\t" // all ff where weave better, else 00
+ "pcmpeqb %%mm7, %%mm0\n\t" // all ff where bob better, else 00
+ "pand %%mm6, %%mm0\n\t" // use bob for these pixel values
+ "pand %%mm5, %%mm7\n\t" // use weave for these
+ "por %%mm7, %%mm0\n\t" // combine both
+#endif
+
+
+ // pminub mm0, Max_Vals // but clip to catch the stray error
+ V_PMINUB ("%%mm0", _Max_Vals, "%%mm1") // but clip to catch the stray error
+ // pmaxub mm0, Min_Vals
+ V_PMAXUB ("%%mm0", _Min_Vals)
+
+#endif
+
+
+ MOVX" "_pDest", %%"XAX"\n\t"
+
+#ifdef USE_VERTICAL_FILTER
+ "movq %%mm0, %%mm1\n\t"
+ // pavgb mm0, qword ptr["XBX"]
+ V_PAVGB ("%%mm0", "(%%"XBX")", "%%mm2", _ShiftMask)
+ // movntq qword ptr["XAX"+"XDX"], mm0
+ V_MOVNTQ ("(%"XAX", %%"XDX")", "%%mm0")
+ // pavgb mm1, qword ptr["XBX"+"XCX"]
+ V_PAVGB ("%%mm1", "(%%"XBX", %%"XCX")", "%%mm2", _ShiftMask)
+ //FIXME: XDX or XAX!!
+ "addq "_dst_pitchw", %%"XBX
+ // movntq qword ptr["XAX"+"XDX"], mm1
+ V_MOVNTQ ("(%%"XAX", %%"XDX")", "%%mm1")
+#else
+
+ // movntq qword ptr["XAX"+"XDX"], mm0
+ V_MOVNTQ ("(%%"XAX", %%"XDX")", "%%mm0")
+#endif
+
+ LEAX" 8(%%"XDX"), %%"XDX"\n\t" // bump offset pointer
+ CMPX" "_Last8", %%"XDX"\n\t" // done with line?
+ "jb 1b\n\t" // y
+
+ MOVX" "_oldbx", %%"XBX"\n\t"
+
+ : /* no outputs */
+
+ : "m"(pBob),
+ "m"(src_pitch2),
+ "m"(ShiftMask),
+ "m"(pDest),
+ "m"(dst_pitchw),
+ "m"(Last8),
+ "m"(pSrc),
+ "m"(pSrcP),
+ "m"(pBobP),
+ "m"(DiffThres),
+ "m"(Min_Vals),
+ "m"(Max_Vals),
+ "m"(FOURS),
+ "m"(TENS),
+ "m"(ONES),
+ "m"(UVMask),
+ "m"(Max_Mov),
+ "m"(YMask),
+ "m"(oldbx)
+
+ : XAX, XCX, XDX, XSI, XDI,
+ "st", "st(1)", "st(2)", "st(3)", "st(4)", "st(5)", "st(6)", "st(7)",
+#ifdef __MMX__
+ "mm0", "mm1", "mm2", "mm3", "mm4", "mm5", "mm6", "mm7",
+#endif
+ "memory", "cc"
+ );
+
+ // adjust for next line
+ pSrc += src_pitch2;
+ pSrcP += src_pitch2;
+ pDest += dst_pitch2;
+ pBob += src_pitch2;
+ pBobP += src_pitch2;
+ }
+
+ return 0;
+#else
+#ifdef SKIP_SEARCH
+ out[0] = best[0]; // just use the results of our weird bob
+ out[1] = best[1];
+#else
+ diff[0] = diff[0] - MIN (diff[0], 10) - 4;
+ diff[1] = diff[1] - MIN (diff[1] - 10) - 4;
+ if (diff[0] < 0)
+ out[0] = weave[0];
+ else
+ out[0] = best[0];
+
+ if (diff[1] < 0)
+ out[1] = weave[1];
+ else
+ out[1] = best[1];
+
+
+ out[0] = CLAMP (out[0], MinVals[0], MaxVals[0]);
+ out[1] = CLAMP (out[1], MinVals[1], MaxVals[1]);
+#endif
+
+#ifdef USE_VERTICAL_FILTER
+ pDest[x] = (out[0] + pBob[0]) / 2;
+ pDest[x + dst_pitchw] = (pBob[src_pitch2] + out[0]) / 2;
+ pDest[x + 1] = (out[1] + pBob[1]) / 2;
+ pDest[x + 1 + dst_pitchw] = (pBob[src_pitch2 + 1] + out[1]) / 2;
+#else
+ pDest[x] = out[0];
+ pDest[x+1] = out[1];
+#endif
+ pBob += 2;
+ pBobP += 2;
+ pSrc += 2;
+ pSrcP += 2;
+ }
+ // adjust for next line
+ pSrc = src_pitch2 * (y+1) + pWeaveSrc;
+ pSrcP = src_pitch2 * (y+1) + pWeaveSrcP;
+ pDest = dst_pitch2 * (y+1) + pWeaveDest + dst_pitch2;
+
+
+ if (TopFirst)
+ {
+ pBob = pCopySrc + src_pitch2;
+ pBobP = pCopySrcP + src_pitch2;
+ }
+ else
+ {
+ pBob = pCopySrc;
+ pBobP = pCopySrcP;
+ }
+
+ pBob += src_pitch2 * (y+1);
+ pBobP += src_pitch2 * (y+1);
+ }
+
+ return 0;
+
+#endif
diff --git a/gst/deinterlace/tvtime/tomsmocomp/SearchLoopEdgeA.inc b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopEdgeA.inc
new file mode 100644
index 0000000000..6208fe8c24
--- /dev/null
+++ b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopEdgeA.inc
@@ -0,0 +1,11 @@
+// -*- c++ -*-
+
+// Searches 2 pixel to the left and right, in both the old
+// and new fields, but takes averages. These are even
+// pixel addresses. Chroma match will be used. (YUY2)
+ MERGE4PIXavg("-4(%%"XDI")", "4(%%"XSI", %%"XCX", 2)") // up left, down right
+ MERGE4PIXavg("4(%%"XDI")", "-4(%%"XSI", %%"XCX", 2)") // up right, down left
+ MERGE4PIXavg("-4(%%"XDI", %%"XCX")", "4(%%"XSI", %%"XCX")") // left, right
+ MERGE4PIXavg("4(%%"XDI", %%"XCX")", "-4(%%"XSI", %%"XCX")") // right, left
+ MERGE4PIXavg("-4(%%"XDI", %%"XCX", 2)", "4(%%"XSI")") // down left, up right
+ MERGE4PIXavg("4(%%"XDI", %%"XCX", 2)", "-4(%%"XSI")") // down right, up left
diff --git a/gst/deinterlace/tvtime/tomsmocomp/SearchLoopEdgeA8.inc b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopEdgeA8.inc
new file mode 100644
index 0000000000..2841c3f6dd
--- /dev/null
+++ b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopEdgeA8.inc
@@ -0,0 +1,12 @@
+// -*- c++ -*-
+
+// Searches 4 pixel to the left and right, in both the old
+// and new fields, but takes averages. These are even
+// pixel addresses. Chroma match will be used. (YUY2)
+ MERGE4PIXavg("-8(%%"XDI")", "8(%%"XSI", %%"XCX", 2)") // up left, down right
+ MERGE4PIXavg("8(%%"XDI")", "-8(%%"XSI", %%"XCX", 2)") // up right, down left
+ MERGE4PIXavg("-8(%%"XDI", %%"XCX")", "8(%%"XSI", %%"XCX")") // left, right
+ MERGE4PIXavg("8(%%"XDI", %%"XCX")", "-8(%%"XSI", %%"XCX")") // right, left
+ MERGE4PIXavg("-8(%%"XDI", %%"XCX", 2)", "8(%%"XSI")") // down left, up right
+ MERGE4PIXavg("8(%%"XDI", %%"XCX", 2)", "-8(%%"XSI")") // down right, up left
+
diff --git a/gst/deinterlace/tvtime/tomsmocomp/SearchLoopOddA.inc b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopOddA.inc
new file mode 100644
index 0000000000..ab5375f484
--- /dev/null
+++ b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopOddA.inc
@@ -0,0 +1,10 @@
+// -*- c++ -*-
+
+// Searches 1 pixel to the left and right, in both the old
+// and new fields, but takes averages. These are odd
+// pixel addresses. Any chroma match will not be used. (YUY2)
+ MERGE4PIXavg("-2(%%"XDI")", "2(%%"XSI", %%"XCX", 2)") // up left, down right
+ MERGE4PIXavg("2(%%"XDI")", "-2(%%"XSI", %%"XCX", 2)") // up right, down left
+ MERGE4PIXavg("-2(%%"XDI", %%"XCX", 2)", "2(%%"XSI")") // down left, up right
+ MERGE4PIXavg("2(%%"XDI", %%"XCX", 2)", "-2(%%"XSI")") // down right, up left
+#include "SearchLoopOddA2.inc"
diff --git a/gst/deinterlace/tvtime/tomsmocomp/SearchLoopOddA2.inc b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopOddA2.inc
new file mode 100644
index 0000000000..fd3f6fb0b8
--- /dev/null
+++ b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopOddA2.inc
@@ -0,0 +1,5 @@
+// Searches 1 pixel to the left and right, in both the old
+// and new fields, but takes averages. These are odd
+// pixel addresses. Any chroma match will not be used. (YUY2)
+ MERGE4PIXavg("-2(%%"XDI", %%"XCX")", "2(%%"XSI", %%"XCX")") // left, right
+ MERGE4PIXavg("2(%%"XDI", %%"XCX")", "-2(%%"XSI", %%"XCX")") // right, left
diff --git a/gst/deinterlace/tvtime/tomsmocomp/SearchLoopOddA6.inc b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopOddA6.inc
new file mode 100644
index 0000000000..cbae014eb4
--- /dev/null
+++ b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopOddA6.inc
@@ -0,0 +1,11 @@
+// -*- c++ -*-
+
+// Searches 3 pixels to the left and right, in both the old
+// and new fields, but takes averages. These are odd
+// pixel addresses. Any chroma match will not be used. (YUY2)
+ MERGE4PIXavg("-6(%%"XDI")", "6(%%"XSI", %%"XCX", 2)") // up left, down right
+ MERGE4PIXavg("6(%%"XDI")", "-6(%%"XSI", %%"XCX", 2)") // up right, down left
+ MERGE4PIXavg("-6(%%"XDI", %%"XCX")", "6(%%"XSI", %%"XCX")") // left, right
+ MERGE4PIXavg("6(%%"XDI", %%"XCX")", "-6(%%"XSI", %%"XCX")") // right, left
+ MERGE4PIXavg("-6(%%"XDI", %%"XCX", 2)", "6(%%"XSI")") // down left, up right
+ MERGE4PIXavg("6(%%"XDI", %%"XCX", 2)", "-6(%%"XSI")") // down right, up left
diff --git a/gst/deinterlace/tvtime/tomsmocomp/SearchLoopOddAH.inc b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopOddAH.inc
new file mode 100644
index 0000000000..e59e3c7e81
--- /dev/null
+++ b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopOddAH.inc
@@ -0,0 +1,10 @@
+// Searches 1 pixel to the left and right, in both the old
+// and new fields, but takes v-half pel averages. These are odd
+// pixel addresses. Any chroma match will not be used. (YUY2)
+ __asm
+ {
+ MERGE4PIXavgH("XDI"-2, "XDI"+"XCX"-2, "XSI"+"XCX"+2, "XSI"+2*"XCX"+2) // up left, down right
+ MERGE4PIXavgH("XDI"+2, "XDI"+"XCX"+2, "XSI"+"XCX"-2, "XSI"+2*"XCX"-2) // up right, down left
+ MERGE4PIXavgH("XDI"+2*"XCX"-2, "XDI"+"XCX"-2, "XSI"+"XCX"+2, "XSI"+2) // down left, up right
+ MERGE4PIXavgH("XDI"+2*"XCX"+2, "XDI"+"XCX"+2, "XSI"+"XCX"-2, "XSI"-2) // down right, up left
+ }
diff --git a/gst/deinterlace/tvtime/tomsmocomp/SearchLoopOddAH2.inc b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopOddAH2.inc
new file mode 100644
index 0000000000..cd7d812a11
--- /dev/null
+++ b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopOddAH2.inc
@@ -0,0 +1,5 @@
+// Searches 1 pixel to the left and right, in both the old
+// and new fields, but takes vertical averages. These are odd
+// pixel addresses. Any chroma match will not be used. (YUY2)
+ MERGE4PIXavgH("-2(%%"XDI", %%"XCX")", "(%%"XDI", %%"XCX")", "(%%"XSI", %%"XCX")", "2(%%"XSI", %%"XCX")") // left, right
+ MERGE4PIXavgH("2(%%"XDI", %%"XCX")", "(%%"XDI", %%"XCX")", "(%%"XSI", %%"XCX")", "-2(%%"XSI", %%"XCX")") // right, left
diff --git a/gst/deinterlace/tvtime/tomsmocomp/SearchLoopTop.inc b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopTop.inc
new file mode 100644
index 0000000000..275c7dd982
--- /dev/null
+++ b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopTop.inc
@@ -0,0 +1,254 @@
+// -*- c++ -*-
+
+unsigned char* pDest;
+const unsigned char* pSrcP;
+const unsigned char* pSrc;
+const unsigned char* pBob;
+const unsigned char* pBobP;
+
+// long is int32 on ARCH_368, int64 on ARCH_AMD64. Declaring it this way
+// saves a lot of xor's to delete 64bit garbage.
+
+#if defined(DBL_RESIZE) || defined(USE_FOR_DSCALER)
+long src_pitch2 = src_pitch; // even & odd lines are not interleaved in DScaler
+#else
+long src_pitch2 = 2 * src_pitch; // even & odd lines are interleaved in Avisynth
+#endif
+
+
+long dst_pitch2 = 2 * dst_pitch;
+long y;
+
+long Last8;
+
+ pSrc = pWeaveSrc; // points 1 weave line above
+ pSrcP = pWeaveSrcP; // "
+
+#ifdef DBL_RESIZE
+
+#ifdef USE_VERTICAL_FILTER
+ pDest = pWeaveDest + dst_pitch2;
+#else
+ pDest = pWeaveDest + 3*dst_pitch;
+#endif
+
+#else
+
+#ifdef USE_VERTICAL_FILTER
+ pDest = pWeaveDest + dst_pitch;
+#else
+ pDest = pWeaveDest + dst_pitch2;
+#endif
+
+#endif
+
+ if (TopFirst)
+ {
+ pBob = pCopySrc + src_pitch2; // remember one weave line just copied previously
+ pBobP = pCopySrcP + src_pitch2;
+ }
+ else
+ {
+ pBob = pCopySrc;
+ pBobP = pCopySrcP;
+ }
+
+#ifndef IS_C
+
+#ifndef _pBob
+#define _pBob "%0"
+#define _src_pitch2 "%1"
+#define _ShiftMask "%2"
+#define _pDest "%3"
+#define _dst_pitchw "%4"
+#define _Last8 "%5"
+#define _pSrc "%6"
+#define _pSrcP "%7"
+#define _pBobP "%8"
+#define _DiffThres "%9"
+#define _Min_Vals "%10"
+#define _Max_Vals "%11"
+#define _FOURS "%12"
+#define _TENS "%13"
+#define _ONES "%14"
+#define _UVMask "%15"
+#define _Max_Mov "%16"
+#define _YMask "%17"
+#define _oldbx "%18"
+#endif
+ Last8 = (rowsize-8);
+
+ for (y=1; y < FldHeight-1; y++)
+ {
+ long dst_pitchw = dst_pitch; // local stor so asm can ref
+ int64_t Max_Mov = 0x0404040404040404ull;
+ int64_t DiffThres = 0x0f0f0f0f0f0f0f0full;
+ int64_t YMask = 0x00ff00ff00ff00ffull; // keeps only luma
+ int64_t UVMask = 0xff00ff00ff00ff00ull; // keeps only chroma
+ int64_t TENS = 0x0a0a0a0a0a0a0a0aull;
+ int64_t FOURS = 0x0404040404040404ull;
+ int64_t ONES = 0x0101010101010101ull;
+ int64_t Min_Vals = 0x0000000000000000ull;
+ int64_t Max_Vals = 0x0000000000000000ull;
+ int64_t ShiftMask = 0xfefffefffefffeffull;
+
+ long oldbx = 0;
+
+ // pretend it's indented -->>
+ __asm__ __volatile__
+ (
+ // Loop general reg usage
+ //
+ // XAX - pBobP, then pDest
+ // XBX - pBob
+ // XCX - src_pitch2
+ // XDX - current offset
+ // XDI - prev weave pixels, 1 line up
+ // XSI - next weave pixels, 1 line up
+
+ // Save "XBX" (-fPIC)
+ MOVX" %%"XBX", "_oldbx"\n\t"
+
+ // simple bob first 8 bytes
+ MOVX" "_pBob", %%"XBX"\n\t"
+ MOVX" "_src_pitch2", %%"XCX"\n\t"
+
+#ifdef USE_VERTICAL_FILTER
+ "movq (%%"XBX"), %%mm0\n\t"
+ "movq (%%"XBX", %%"XCX"), %%mm1\n\t" //, qword ptr["XBX"+"XCX"]
+ "movq %%mm0, %%mm2\n\t"
+ V_PAVGB ("%%mm2", "%%mm1", "%%mm3", _ShiftMask) // halfway between
+ V_PAVGB ("%%mm0", "%%mm2", "%%mm3", _ShiftMask) // 1/4 way
+ V_PAVGB ("%%mm1", "%%mm2", "%%mm3", _ShiftMask) // 3/4 way
+ MOVX" "_pDest", %%"XDI"\n\t"
+ MOVX" "_dst_pitchw", %%"XAX"\n\t"
+ V_MOVNTQ ("(%%"XDI")", "%%mm0")
+ V_MOVNTQ ("(%%"XDI", %%"XAX")", "%%mm1") // qword ptr["XDI"+"XAX"], mm1
+
+ // simple bob last 8 bytes
+ MOVX" "_Last8", %%"XDX"\n\t"
+ LEAX" (%%"XBX", %%"XDX"), %%"XSI"\n\t" // ["XBX"+"XDX"]
+ "movq (%%"XSI"), %%mm0\n\t"
+ "movq (%%"XSI", %%"XCX"), %%mm1\n\t" // qword ptr["XSI"+"XCX"]
+ "movq %%mm0, %%mm2\n\t"
+ V_PAVGB ("%%mm2", "%%mm1", "%%mm3", _ShiftMask) // halfway between
+ V_PAVGB ("%%mm0", "%%mm2", "%%mm3", _ShiftMask) // 1/4 way
+ V_PAVGB ("%%mm1", "%%mm2", "%%mm3", _ShiftMask) // 3/4 way
+ ADDX" %%"XDX", %%"XDI"\n\t" // last 8 bytes of dest
+ V_MOVNTQ ("%%"XDI"", "%%mm0")
+ V_MOVNTQ ("(%%"XDI", %%"XAX")", "%%mm1") // qword ptr["XDI"+"XAX"], mm1)
+
+#else
+ "movq (%%"XBX"), %%mm0\n\t"
+ // pavgb mm0, qword ptr["XBX"+"XCX"]
+ V_PAVGB ("%%mm0", "(%%"XBX", %%"XCX")", "%%mm2", _ShiftMask) // qword ptr["XBX"+"XCX"], mm2, ShiftMask)
+ MOVX" "_pDest", %%"XDI"\n\t"
+ V_MOVNTQ ("(%%"XDI")", "%%mm0")
+
+ // simple bob last 8 bytes
+ MOVX" "_Last8", %%"XDX"\n\t"
+ LEAX" (%%"XBX", %%"XDX"), %%"XSI"\n\t" //"XSI", ["XBX"+"XDX"]
+ "movq (%%"XSI"), %%mm0\n\t"
+ // pavgb mm0, qword ptr["XSI"+"XCX"]
+ V_PAVGB ("%%mm0", "(%%"XSI", %%"XCX")", "%%mm2", _ShiftMask) // qword ptr["XSI"+"XCX"], mm2, ShiftMask)
+ V_MOVNTQ ("(%%"XDI", %%"XDX")", "%%mm0") // qword ptr["XDI"+"XDX"], mm0)
+#endif
+ // now loop and get the middle qwords
+ MOVX" "_pSrc", %%"XSI"\n\t"
+ MOVX" "_pSrcP", %%"XDI"\n\t"
+ MOVX" $8, %%"XDX"\n\t" // curr offset longo all lines
+
+ "1:\n\t"
+ MOVX" "_pBobP", %%"XAX"\n\t"
+ ADDX" $8, %%"XDI"\n\t"
+ ADDX" $8, %%"XSI"\n\t"
+ ADDX" $8, %%"XBX"\n\t"
+ ADDX" %%"XDX", %%"XAX"\n\t"
+
+#ifdef USE_STRANGE_BOB
+#include "StrangeBob.inc"
+#else
+#include "WierdBob.inc"
+#endif
+
+ // For non-SSE2:
+ // through out most of the rest of this loop we will maintain
+ // mm4 our min bob value
+ // mm5 best weave pixels so far
+ // mm6 our max Bob value
+ // mm7 best weighted pixel ratings so far
+
+ // We will keep a slight bias to using the weave pixels
+ // from the current location, by rating them by the min distance
+ // from the Bob value instead of the avg distance from that value.
+ // our best and only rating so far
+ "pcmpeqb %%mm7, %%mm7\n\t" // ffff, say we didn't find anything good yet
+
+#else
+ Last8 = (rowsize - 4);
+
+ for (y=1; y < FldHeight-1; y++)
+ {
+ #ifdef USE_STRANGE_BOB
+ long DiffThres = 0x0f;
+ #endif
+
+ #ifndef SKIP_SEARCH
+ long weave[2], MaxVals[2], MinVals[2];
+ #endif
+
+ long diff[2], best[2], avg[2], diff2[2], out[2], x;
+
+#ifdef USE_VERTICAL_FILTER
+ pDest[0] = (3 * pBob[0] + pBob[src_pitch2]) / 4;
+ pDest[1] = (3 * pBob[1] + pBob[src_pitch2 + 1]) / 4;
+ pDest[2] = (3 * pBob[2] + pBob[src_pitch2 + 2]) / 4;
+ pDest[3] = (3 * pBob[3] + pBob[src_pitch2 + 3]) / 4;
+ pDest[dst_pitchw] = (pBob[0] + 3 * pBob[src_pitch2]) / 4;
+ pDest[dst_pitchw + 1] = (pBob[1] + 3 * pBob[src_pitch2 + 1]) / 4;
+ pDest[dst_pitchw + 2] = (pBob[2] + 3 * pBob[src_pitch2 + 2]) / 4;
+ pDest[dst_pitchw + 3] = (pBob[3] + 3 * pBob[src_pitch2 + 3]) / 4;
+
+ // simple bob last byte
+ pDest[Last8] = (3 * pBob[Last8] + pBob[Last8 + src_pitch2]) / 4;
+ pDest[Last8 + 1] = (3 * pBob[Last8 + 1] + pBob[Last8 + src_pitch2 + 1]) / 4;
+ pDest[Last8 + 2] = (3 * pBob[Last8 + 2] + pBob[Last8 + src_pitch2 + 2]) / 4;
+ pDest[Last8 + 3] = (3 * pBob[Last8 + 3] + pBob[Last8 + src_pitch2 + 3]) / 4;
+ pDest[Last8 + src_pitch2] = (pBob[Last8] + 3 * pBob[Last8 + src_pitch2]) / 4;
+ pDest[Last8 + src_pitch2 + 1] = (pBob[Last8 + 1] + 3 * pBob[Last8 + src_pitch2 + 1]) / 4;
+ pDest[Last8 + src_pitch2 + 2] = (pBob[Last8 + 2] + 3 * pBob[Last8 + src_pitch2 + 2]) / 4;
+ pDest[Last8 + src_pitch2 + 3] = (pBob[Last8 + 3] + 3 * pBob[Last8 + src_pitch2 + 3]) / 4;
+#else
+ pDest[0] = (pBob[0] + pBob[src_pitch2 + 1]) / 2;
+ pDest[1] = (pBob[1] + pBob[src_pitch2 + 1]) / 2;
+ pDest[2] = (pBob[2] + pBob[src_pitch2 + 2]) / 2;
+ pDest[3] = (pBob[3] + pBob[src_pitch2 + 3]) / 2;
+
+ // simple bob last byte
+ pDest[Last8] = (pBob[Last8] + pBob[Last8 + src_pitch2]) / 2;
+ pDest[Last8 + 1] = (pBob[Last8 + 1] + pBob[Last8 + src_pitch2 + 1]) / 2;
+ pDest[Last8 + 2] = (pBob[Last8 + 2] + pBob[Last8 + src_pitch2 + 2]) / 2;
+ pDest[Last8 + 3] = (pBob[Last8 + 3] + pBob[Last8 + src_pitch2 + 3]) / 2;
+#endif
+
+ pBob += 4;
+ pBobP += 4;
+ pSrc += 4;
+ pSrcP += 4;
+
+ for (x=4; x < Last8; x += 2) {
+
+#ifdef USE_STRANGE_BOB
+#include "StrangeBob.inc"
+#else
+#include "WierdBob.inc"
+#endif
+
+ // We will keep a slight bias to using the weave pixels
+ // from the current location, by rating them by the min distance
+ // from the Bob value instead of the avg distance from that value.
+ // our best and only rating so far
+ diff[0] = diff[1] = 255;
+
+
+#endif
diff --git a/gst/deinterlace/tvtime/tomsmocomp/SearchLoopVA.inc b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopVA.inc
new file mode 100644
index 0000000000..3e3d19b5c9
--- /dev/null
+++ b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopVA.inc
@@ -0,0 +1,6 @@
+// -*- c++ -*-
+
+// Searches the center vertical line above center and below, in both the old
+// and new fields, but takes averages. These are even pixel addresses.
+ MERGE4PIXavg("(%%"XDI", %%"XCX", 2)", "(%%"XSI")") // down, up
+ MERGE4PIXavg("(%%"XDI")", "(%%"XSI", %%"XCX", 2)") // up, down
diff --git a/gst/deinterlace/tvtime/tomsmocomp/SearchLoopVAH.inc b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopVAH.inc
new file mode 100644
index 0000000000..33155bc1df
--- /dev/null
+++ b/gst/deinterlace/tvtime/tomsmocomp/SearchLoopVAH.inc
@@ -0,0 +1,6 @@
+// -*- c++ -*-
+
+// Searches the center vertical line above center and below, in both the old
+// and new fields, but takes averages. These are even pixel addresses.
+ MERGE4PIXavgH("(%%"XDI", %%"XCX", 2)", "(%%"XDI", %%"XCX")", "(%%"XSI", %%"XCX")", "(%%"XSI")") // down, up
+ MERGE4PIXavgH("(%%"XDI")", "(%%"XDI", %%"XCX")", "(%%"XSI", %%"XCX")", "(%%"XSI", %%"XCX", 2)") // up, down
diff --git a/gst/deinterlace/tvtime/tomsmocomp/StrangeBob.inc b/gst/deinterlace/tvtime/tomsmocomp/StrangeBob.inc
new file mode 100644
index 0000000000..a61a0cd9e9
--- /dev/null
+++ b/gst/deinterlace/tvtime/tomsmocomp/StrangeBob.inc
@@ -0,0 +1,435 @@
+// -*- c++ -*-
+
+ // First, get and save our possible Bob values
+ // Assume our pixels are laid out as follows with x the calc'd bob value
+ // and the other pixels are from the current field
+ //
+ // j a b c k current field
+ // x calculated line
+ // m d e f n current field
+ //
+ // we calc the bob value luma value as:
+ // if |j - n| < Thres && |a - m| > Thres
+ // avg(j,n)
+ // end if
+ // if |k - m| < Thres && |c - n| > Thres
+ // avg(k,m)
+ // end if
+ // if |c - d| < Thres && |b - f| > Thres
+ // avg(c,d)
+ // end if
+ // if |a - f| < Thres && |b - d| > Thres
+ // avg(a,f)
+ // end if
+ // if |b - e| < Thres
+ // avg(b,e)
+ // end if
+ // pickup any thing not yet set with avg(b,e)
+
+#ifndef IS_C
+
+ // j, n
+ "pxor %%mm5, %%mm5\n\t"
+ "pxor %%mm6, %%mm6\n\t"
+ "pxor %%mm7, %%mm7\n\t"
+
+ "movq -2(%%"XBX"), %%mm0\n\t" // value a from top left
+ "movq -4(%%"XBX", %%"XCX"), %%mm1\n\t" // value m from bottom right
+
+ "movq %%mm0, %%mm3\n\t"
+ "psubusb %%mm1, %%mm3\n\t"
+ "psubusb %%mm0, %%mm1\n\t"
+ "por %%mm1, %%mm3\n\t" // abs(a,m)
+
+ "psubusb "_DiffThres", %%mm3\n\t" // nonzero where abs(a,m) > Thres else 0
+ "pxor %%mm4, %%mm4\n\t"
+ "pcmpeqb %%mm4, %%mm3\n\t" // now ff where abs(a,m) < Thres, else 00
+ "pcmpeqb %%mm3, %%mm4\n\t" // here ff where abs(a,m) > Thres, else 00
+
+
+ "movq -4(%%"XBX"), %%mm0\n\t" // value j
+ "movq 4(%%"XBX", %%"XCX"), %%mm1\n\t" // value n
+ "movq %%mm0, %%mm2\n\t"
+ V_PAVGB ("%%mm2", "%%mm1", "%%mm3", _ShiftMask) // avg(j,n)
+ "movq %%mm0, %%mm3\n\t"
+ "psubusb %%mm1, %%mm0\n\t"
+ "psubusb %%mm3, %%mm1\n\t"
+ "por %%mm1, %%mm0\n\t" // abs(j,n)
+
+ "movq %%mm0, %%mm1\n\t"
+ "psubusb "_DiffThres", %%mm1\n\t" // nonzero where abs(j,n) > Thres else 0
+ "pxor %%mm3, %%mm3\n\t"
+ "pcmpeqb %%mm3, %%mm1\n\t" // now ff where abs(j,n) < Thres, else 00
+
+ "pand %%mm4, %%mm1\n\t"
+ "pand %%mm1, %%mm2\n\t"
+ "pand %%mm1, %%mm0\n\t"
+
+ "movq %%mm1, %%mm3\n\t"
+ "pxor %%mm5, %%mm3\n\t"
+ "pand %%mm3, %%mm6\n\t"
+ "pand %%mm3, %%mm7\n\t"
+ "pand %%mm3, %%mm5\n\t"
+
+ "por %%mm1, %%mm5\n\t"
+ "por %%mm2, %%mm6\n\t"
+ "por %%mm0, %%mm7\n\t"
+
+ // k & m
+ "movq 2(%%"XBX"), %%mm0\n\t" // value c from top left
+ "movq 4(%%"XBX", %%"XCX"), %%mm1\n\t" // value n from bottom right
+
+ "movq %%mm0, %%mm3\n\t"
+ "psubusb %%mm1, %%mm3\n\t"
+ "psubusb %%mm0, %%mm1\n\t"
+ "por %%mm1, %%mm3\n\t" // abs(c,n)
+
+ "psubusb "_DiffThres", %%mm3\n\t" // nonzero where abs(c,n) > Thres else 0
+ "pxor %%mm4, %%mm4\n\t"
+ "pcmpeqb %%mm4, %%mm3\n\t" // now ff where abs(c,n) < Thres, else 00
+ "pcmpeqb %%mm3, %%mm4\n\t" // here ff where abs(c,n) > Thres, else 00
+
+
+ "movq 4(%%"XBX"), %%mm0\n\t" // value k
+ "movq -4(%%"XBX", %%"XCX"), %%mm1\n\t" // value m
+ "movq %%mm0, %%mm2\n\t"
+ V_PAVGB ("%%mm2", "%%mm1", "%%mm3", _ShiftMask) // avg(k,m)
+ "movq %%mm0, %%mm3\n\t"
+ "psubusb %%mm1, %%mm0\n\t"
+ "psubusb %%mm3, %%mm1\n\t"
+ "por %%mm1, %%mm0\n\t" // abs(k,m)
+
+ "movq %%mm0, %%mm1\n\t"
+ "psubusb "_DiffThres", %%mm1\n\t" // nonzero where abs(k,m) > Thres else 0
+ "pxor %%mm3, %%mm3\n\t"
+ "pcmpeqb %%mm3, %%mm1\n\t" // now ff where abs(k,m) < Thres, else 00
+
+ "pand %%mm4, %%mm1\n\t"
+
+ "pand %%mm1, %%mm2\n\t"
+ "pand %%mm1, %%mm0\n\t"
+
+ "movq %%mm1, %%mm3\n\t"
+ "pxor %%mm5, %%mm3\n\t"
+ "pand %%mm3, %%mm6\n\t"
+ "pand %%mm3, %%mm7\n\t"
+ "pand %%mm3, %%mm5\n\t"
+
+ "por %%mm1, %%mm5\n\t"
+ "por %%mm2, %%mm6\n\t"
+ "por %%mm0, %%mm7\n\t"
+
+
+ // c & d
+ "movq (%%"XBX"), %%mm0\n\t" // value b from top left
+ "movq 2(%%"XBX", %%"XCX"), %%mm1\n\t" // value f from bottom right
+
+ "movq %%mm0, %%mm3\n\t"
+ "psubusb %%mm1, %%mm3\n\t"
+ "psubusb %%mm0, %%mm1\n\t"
+ "por %%mm1, %%mm3\n\t" // abs(b,f)
+
+ "psubusb "_DiffThres", %%mm3\n\t" // nonzero where abs(b,f) > Thres else 0
+ "pxor %%mm4, %%mm4\n\t"
+ "pcmpeqb %%mm4, %%mm3\n\t" // now ff where abs(b,f) < Thres, else 00
+ "pcmpeqb %%mm3, %%mm4\n\t" // here ff where abs(b,f) > Thres, else 00
+
+ "movq 2(%%"XBX"), %%mm0\n\t" // value c
+ "movq -2(%%"XBX", %%"XCX"), %%mm1\n\t" // value d
+ "movq %%mm0, %%mm2\n\t"
+ V_PAVGB ("%%mm2", "%%mm1", "%%mm3", _ShiftMask) // avg(c,d)
+ "movq %%mm0, %%mm3\n\t"
+ "psubusb %%mm1, %%mm0\n\t"
+ "psubusb %%mm3, %%mm1\n\t"
+ "por %%mm1, %%mm0\n\t" // abs(c,d)
+
+ "movq %%mm0, %%mm1\n\t"
+ "psubusb "_DiffThres", %%mm1\n\t" // nonzero where abs(c,d) > Thres else 0
+ "pxor %%mm3, %%mm3\n\t"
+ "pcmpeqb %%mm3, %%mm1\n\t" // now ff where abs(c,d) < Thres, else 00
+
+ "pand %%mm4, %%mm1\n\t"
+
+ "pand %%mm1, %%mm2\n\t"
+ "pand %%mm1, %%mm0\n\t"
+
+ "movq %%mm1, %%mm3\n\t"
+ "pxor %%mm5, %%mm3\n\t"
+ "pand %%mm3, %%mm6\n\t"
+ "pand %%mm3, %%mm7\n\t"
+ "pand %%mm3, %%mm5\n\t"
+
+ "por %%mm1, %%mm5\n\t"
+ "por %%mm2, %%mm6\n\t"
+ "por %%mm0, %%mm7\n\t"
+
+ // a & f
+ "movq (%%"XBX"), %%mm0\n\t" // value b from top left
+ "movq -2(%%"XBX", %%"XCX"), %%mm1\n\t" // value d from bottom right
+
+ "movq %%mm0, %%mm3\n\t"
+ "psubusb %%mm1, %%mm3\n\t"
+ "psubusb %%mm0, %%mm1\n\t"
+ "por %%mm1, %%mm3\n\t" // abs(b,d)
+
+ "psubusb "_DiffThres", %%mm3\n\t" // nonzero where abs(b,d) > Thres else 0
+ "pxor %%mm4, %%mm4\n\t"
+ "pcmpeqb %%mm4, %%mm3\n\t" // now ff where abs(b,d) < Thres, else 00
+ "pcmpeqb %%mm3, %%mm4\n\t" // here ff where abs(b,d) > Thres, else 00
+
+ "movq -2(%%"XBX"), %%mm0\n\t" // value a
+ "movq 2(%%"XBX", %%"XCX"), %%mm1\n\t" // value f
+ "movq %%mm0, %%mm2\n\t"
+ V_PAVGB ("%%mm2", "%%mm1", "%%mm3", _ShiftMask) // avg(a,f)
+ "movq %%mm0, %%mm3\n\t"
+ "psubusb %%mm1, %%mm0\n\t"
+ "psubusb %%mm3, %%mm1\n\t"
+ "por %%mm1, %%mm0\n\t" // abs(a,f)
+
+ "movq %%mm0, %%mm1\n\t"
+ "psubusb "_DiffThres", %%mm1\n\t" // nonzero where abs(a,f) > Thres else 0
+ "pxor %%mm3, %%mm3\n\t"
+ "pcmpeqb %%mm3, %%mm1\n\t" // now ff where abs(a,f) < Thres, else 00
+
+ "pand %%mm4, %%mm1\n\t"
+
+ "pand %%mm1, %%mm2\n\t"
+ "pand %%mm1, %%mm0\n\t"
+
+ "movq %%mm1, %%mm3\n\t"
+ "pxor %%mm5, %%mm3\n\t"
+ "pand %%mm3, %%mm6\n\t"
+ "pand %%mm3, %%mm7\n\t"
+ "pand %%mm3, %%mm5\n\t"
+
+ "por %%mm1, %%mm5\n\t"
+ "por %%mm2, %%mm6\n\t"
+ "por %%mm0, %%mm7\n\t"
+
+ "pand "_YMask", %%mm5\n\t" // mask out chroma from here
+ "pand "_YMask", %%mm6\n\t" // mask out chroma from here
+ "pand "_YMask", %%mm7\n\t" // mask out chroma from here
+
+ // b,e
+ "movq (%%"XBX"), %%mm0\n\t" // value b from top
+ "movq (%%"XBX", %%"XCX"), %%mm1\n\t" // value e from bottom
+ "movq %%mm0, %%mm2\n\t"
+ V_PAVGB ("%%mm2", "%%mm1", "%%mm3", _ShiftMask) // avg(b,e)
+ "movq %%mm0, %%mm3\n\t"
+ "psubusb %%mm1, %%mm0\n\t"
+ "psubusb %%mm3, %%mm1\n\t"
+ "por %%mm1, %%mm0\n\t" // abs(b,e)
+
+ "movq %%mm0, %%mm1\n\t"
+ "psubusb "_DiffThres", %%mm1\n\t" // nonzero where abs(b,e) > Thres else 0
+ "pxor %%mm3, %%mm3\n\t"
+ "pcmpeqb %%mm3, %%mm1\n\t" // now ff where abs(b,e) < Thres, else 00
+
+ "pand %%mm1, %%mm2\n\t"
+ "pand %%mm1, %%mm0\n\t"
+
+ "movq %%mm1, %%mm3\n\t"
+ "pxor %%mm5, %%mm3\n\t"
+ "pand %%mm3, %%mm6\n\t"
+ "pand %%mm3, %%mm7\n\t"
+ "pand %%mm3, %%mm5\n\t"
+
+ "por %%mm1, %%mm5\n\t"
+ "por %%mm2, %%mm6\n\t"
+ "por %%mm0, %%mm7\n\t"
+
+ // bob in any leftovers
+ "movq (%%"XBX"), %%mm0\n\t" // value b from top
+ "movq (%%"XBX", %%"XCX"), %%mm1\n\t" // value e from bottom
+
+
+// We will also calc here the max/min values to later limit comb
+// so the max excursion will not exceed the Max_Comb constant
+
+#ifdef SKIP_SEARCH
+ "movq %%mm0, %%mm2\n\t"
+// pminub %%mm2, %%mm1
+ V_PMINUB ("%%mm2", "%%mm1", "%%mm4")
+
+// pmaxub %%mm6, %%mm2 // clip our current results so far to be above this
+ V_PMAXUB ("%%mm6", "%%mm2")
+ "movq %%mm0, %%mm2\n\t"
+ V_PMAXUB ("%%mm2", "%%mm1")
+// pminub %%mm6, %%mm2 // clip our current results so far to be below this
+ V_PMINUB ("%%mm6", "%%mm2", "%%mm4")
+
+#else
+ "movq %%mm0, %%mm2\n\t"
+ "movq (%%"XAX"), %%mm4\n\t"
+ "psubusb %%mm4, %%mm2\n\t"
+ "psubusb %%mm0, %%mm4\n\t"
+ "por %%mm2, %%mm4\n\t" // abs diff
+
+ "movq %%mm1, %%mm2\n\t"
+ "movq (%%"XAX", %%"XCX"), %%mm3\n\t"
+ "psubusb %%mm3, %%mm2\n\t"
+ "psubusb %%mm1, %%mm3\n\t"
+ "por %%mm2, %%mm3\n\t" // abs diff
+// pmaxub %%mm3, %%mm4 // top or bottom pixel moved most
+ V_PMAXUB ("%%mm3", "%%mm4") // top or bottom pixel moved most
+ "psubusb "_DiffThres", %%mm3\n\t" // moved more than allowed? or goes to 0?
+ "pxor %%mm4, %%mm4\n\t"
+ "pcmpeqb %%mm4, %%mm3\n\t" // now ff where low motion, else high motion
+
+ "movq %%mm0, %%mm2\n\t"
+// pminub %%mm2, %%mm1
+ V_PMINUB ("%%mm2", "%%mm1", "%%mm4")
+
+// pmaxub %%mm6, %%mm2 // clip our current results so far to be above this
+ V_PMAXUB ("%%mm6", "%%mm2")
+
+ "psubusb %%mm3, %%mm2\n\t" // maybe decrease it to 0000.. if no surround motion
+ "movq %%mm2, "_Min_Vals"\n\t"
+
+ "movq %%mm0, %%mm2\n\t"
+ V_PMAXUB ("%%mm2", "%%mm1")
+// pminub %%mm6, %%mm2 // clip our current results so far to be below this
+ V_PMINUB ("%%mm6", "%%mm2", "%%mm4")
+ "paddusb %%mm3, %%mm2\n\t" // maybe increase it to ffffff if no surround motion
+ "movq %%mm2, "_Max_Vals"\n\t"
+#endif
+
+ "movq %%mm0, %%mm2\n\t"
+// pavgb %%mm2, %%mm1 // avg(b,e)
+ V_PAVGB ("%%mm2", "%%mm1", "%%mm3", _ShiftMask) // avg(b,e)
+
+ "movq %%mm0, %%mm3\n\t"
+ "psubusb %%mm1, %%mm3\n\t"
+ "psubusb %%mm0, %%mm1\n\t"
+ "por %%mm1, %%mm3\n\t" // abs(b,e)
+ "movq %%mm3, %%mm1\n\t" // keep copy of diffs
+
+ "pxor %%mm4, %%mm4\n\t"
+ "psubusb %%mm7, %%mm3\n\t" // nonzero where new weights bigger, else 0
+ "pcmpeqb %%mm4, %%mm3\n\t" // now ff where new better, else 00
+ "pcmpeqb %%mm0, %%mm0\n\t"
+ "pandn %%mm0, %%mm5\n\t"
+ "por %%mm5, %%mm3\n\t"
+ "pcmpeqb %%mm3, %%mm4\n\t" // here ff where old better, else 00
+
+ "pand %%mm3, %%mm1\n\t"
+ "pand %%mm3, %%mm2\n\t"
+
+ "pand %%mm4, %%mm6\n\t"
+ "pand %%mm4, %%mm7\n\t"
+
+ "por %%mm2, %%mm6\n\t" // our x2 value
+ "por %%mm1, %%mm7\n\t" // our x2 diffs
+ "movq %%mm7, %%mm4\n\t" // save as bob uncertainty indicator
+
+#else
+
+ diff[0] = -1;
+ diff[1] = -1;
+ best[0] = 0;
+ best[1] = 0;
+ // j, n
+ if (ABS (pBob[-2] - pBob[src_pitch2 - 4]) < DiffThres &&
+ ABS (pBob[-4] - pBob[src_pitch2 + 4]) > DiffThres) {
+ best[0] = (pBob[-2] + pBob[src_pitch2 - 4]) / 2;
+ diff[0] = ABS (pBob[-2] - pBob[src_pitch2 - 4]);
+ }
+ if (ABS (pBob[-1] - pBob[src_pitch2 - 3]) < DiffThres &&
+ ABS (pBob[-3] - pBob[src_pitch2 + 5]) > DiffThres) {
+ best[1] = (pBob[-1] + pBob[src_pitch2 - 3]) / 2;
+ diff[1] = ABS (pBob[-1] - pBob[src_pitch2 - 3]);
+ }
+
+ // k & m
+ if (ABS (pBob[2] - pBob[src_pitch2 + 4]) < DiffThres &&
+ ABS (pBob[4] - pBob[src_pitch2 - 4]) > DiffThres) {
+ best[0] = (pBob[4] + pBob[src_pitch2 - 4]) / 2;
+ diff[0] = ABS (pBob[4] - pBob[src_pitch2 - 4]);
+ }
+
+ if (ABS (pBob[3] - pBob[src_pitch2 + 5]) < DiffThres &&
+ ABS (pBob[5] - pBob[src_pitch2 - 3]) > DiffThres) {
+ best[1] = (pBob[5] + pBob[src_pitch2 - 3]) / 2;
+ diff[1] = ABS (pBob[5] - pBob[src_pitch2 - 3]);
+ }
+
+ // c & d
+ if (ABS (pBob[0] - pBob[src_pitch2 + 2]) < DiffThres &&
+ ABS (pBob[2] - pBob[src_pitch2 - 2]) > DiffThres) {
+ best[0] = (pBob[2] + pBob[src_pitch2 - 2]) / 2;
+ diff[0] = ABS (pBob[2] - pBob[src_pitch2 - 2]);
+ }
+
+ if (ABS (pBob[1] - pBob[src_pitch2 + 3]) < DiffThres &&
+ ABS (pBob[3] - pBob[src_pitch2 - 1]) > DiffThres) {
+ best[1] = (pBob[3] + pBob[src_pitch2 - 1]) / 2;
+ diff[1] = ABS (pBob[3] - pBob[src_pitch2 - 1]);
+ }
+
+ // a & f
+ if (ABS (pBob[0] - pBob[src_pitch2 - 2]) < DiffThres &&
+ ABS (pBob[-2] - pBob[src_pitch2 + 2]) > DiffThres) {
+ best[0] = (pBob[-2] + pBob[src_pitch2 + 2]) / 2;
+ diff[0] = ABS (pBob[-2] - pBob[src_pitch2 + 2]);
+ }
+
+ if (ABS (pBob[1] - pBob[src_pitch2 - 1]) < DiffThres &&
+ ABS (pBob[-1] - pBob[src_pitch2 + 3]) > DiffThres) {
+ best[1] = (pBob[-1] + pBob[src_pitch2 + 3]) / 2;
+ diff[1] = ABS (pBob[-1] - pBob[src_pitch2 + 3]);
+ }
+
+ // b,e
+ if (ABS (pBob[0] - pBob[src_pitch2]) < DiffThres) {
+ best[0] = (pBob[0] + pBob[src_pitch2]) / 2;
+ diff[0] = ABS (pBob[0] - pBob[src_pitch2]);
+ }
+
+ if (ABS (pBob[1] - pBob[src_pitch2 + 1]) < DiffThres) {
+ best[1] = (pBob[1] + pBob[src_pitch2 + 1]) / 2;
+ diff[1] = ABS (pBob[1] - pBob[src_pitch2 + 1]);
+ }
+
+
+// We will also calc here the max/min values to later limit comb
+// so the max excursion will not exceed the Max_Comb constant
+
+#ifdef SKIP_SEARCH
+ best[0] = CLAMP (best[0], MIN (pBob[src_pitch2], pBob[0]), MAX (pBob[src_pitch2], pBob[0]));
+ best[1] = CLAMP (best[1], MIN (pBob[src_pitch2 + 1], pBob[1]), MAX (pBob[src_pitch2 + 1], pBob[1]));
+#else
+ mov[0] = MAX (ABS (pBob[0] - pBobP[0]), ABS (pBob[src_pitch2] - pBobP[src_pitch2]));
+ mov[1] = MAX (ABS (pBob[1] - pBobP[1]), ABS (pBob[src_pitch2 + 1] - pBobP[src_pitch2 + 1]));
+
+ MinVals[0] = 0;
+ MinVals[1] = 0;
+ MaxVals[0] = 255;
+ MaxVals[1] = 255;
+ if (mov[0] > DiffThres) {
+ MinVals[0] = MAX (MIN (pBob[0], pBob[src_pitch2]), best[0]);
+ MaxVals[0] = MIN (MAX (pBob[0], pBob[src_pitch2]), best[0]);
+ }
+
+ if (mov[1] > DiffThres) {
+ MinVals[1] = MAX (MIN (pBob[1], pBob[src_pitch2+1]), best[1]);
+ MaxVals[1] = MIN (MAX (pBob[1], pBob[src_pitch2+1]), best[1]);
+ }
+
+ best[0] = CLAMP (best[0], MIN (pBob[src_pitch2], pBob[0]), MAX (pBob[src_pitch2], pBob[0]));
+ best[1] = CLAMP (best[1], MIN (pBob[src_pitch2 + 1], pBob[1]), MAX (pBob[src_pitch2 + 1], pBob[1]));
+#endif
+ avg[0] = (pBob[src_pitch2] + pBob[0]) / 2;
+ avg[1] = (pBob[src_pitch2 + 1] + pBob[1]) / 2;
+ diff2[0] = ABS (pBob[src_pitch2 + 1] - pBob[1]);
+ diff2[1] = ABS (pBob[src_pitch2 + 1] - pBob[1]);
+
+ if (diff[0] == -1 || diff2[0] < diff[0]) {
+ best[0] = avg[0];
+ diff[0] = diff2[0];
+ }
+
+ if (diff[1] == -1 || diff2[1] < diff[1]) {
+ best[1] = avg[1];
+ diff[1] = diff2[1];
+ }
+#endif
diff --git a/gst/deinterlace/tvtime/tomsmocomp/TomsMoCompAll.inc b/gst/deinterlace/tvtime/tomsmocomp/TomsMoCompAll.inc
new file mode 100644
index 0000000000..be58bba9f5
--- /dev/null
+++ b/gst/deinterlace/tvtime/tomsmocomp/TomsMoCompAll.inc
@@ -0,0 +1,266 @@
+/*
+ * GStreamer
+ * Copyright (c) 2002 Tom Barry All rights reserved.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * Relicensed for GStreamer from GPL to LGPL with permit from Tom Barry.
+ * See: http://bugzilla.gnome.org/show_bug.cgi?id=163578
+ */
+
+
+#ifndef TopFirst
+#define TopFirst IsOdd
+#endif
+
+#ifdef SEFUNC
+#undef SEFUNC
+#endif
+
+#if defined(IS_MMXEXT)
+#define SEFUNC(x) Search_Effort_MMXEXT_##x(int src_pitch, int dst_pitch, int rowsize, const unsigned char *pWeaveSrc, const unsigned char *pWeaveSrcP, unsigned char *pWeaveDest, int IsOdd, const unsigned char *pCopySrc, const unsigned char *pCopySrcP, int FldHeight)
+#elif defined(IS_3DNOW)
+#define SEFUNC(x) Search_Effort_3DNOW_##x(int src_pitch, int dst_pitch, int rowsize, const unsigned char *pWeaveSrc, const unsigned char *pWeaveSrcP, unsigned char *pWeaveDest, int IsOdd, const unsigned char *pCopySrc, const unsigned char *pCopySrcP, int FldHeight)
+#elif defined(IS_MMX)
+#define SEFUNC(x) Search_Effort_MMX_##x(int src_pitch, int dst_pitch, int rowsize, const unsigned char *pWeaveSrc, const unsigned char *pWeaveSrcP, unsigned char *pWeaveDest, int IsOdd, const unsigned char *pCopySrc, const unsigned char *pCopySrcP, int FldHeight)
+#else
+#define SEFUNC(x) Search_Effort_C_##x(int src_pitch, int dst_pitch, int rowsize, const unsigned char *pWeaveSrc, const unsigned char *pWeaveSrcP, unsigned char *pWeaveDest, int IsOdd, const unsigned char *pCopySrc, const unsigned char *pCopySrcP, int FldHeight)
+#endif
+
+#include "TomsMoCompAll2.inc"
+
+#define USE_STRANGE_BOB
+
+#include "TomsMoCompAll2.inc"
+
+#undef USE_STRANGE_BOB
+
+#undef SEFUNC
+#if defined(IS_MMXEXT)
+#define SEFUNC(x) Search_Effort_MMXEXT_##x(src_pitch, dst_pitch, rowsize, pWeaveSrc, pWeaveSrcP, pWeaveDest, IsOdd, pCopySrc, pCopySrcP, FldHeight)
+#elif defined(IS_3DNOW)
+#define SEFUNC(x) Search_Effort_3DNOW_##x(src_pitch, dst_pitch, rowsize, pWeaveSrc, pWeaveSrcP, pWeaveDest, IsOdd, pCopySrc, pCopySrcP, FldHeight)
+#elif defined(IS_MMX)
+#define SEFUNC(x) Search_Effort_MMX_##x(src_pitch, dst_pitch, rowsize, pWeaveSrc, pWeaveSrcP, pWeaveDest, IsOdd, pCopySrc, pCopySrcP, FldHeight)
+#else
+#define SEFUNC(x) Search_Effort_C_##x(src_pitch, dst_pitch, rowsize, pWeaveSrc, pWeaveSrcP, pWeaveDest, IsOdd, pCopySrc, pCopySrcP, FldHeight)
+#endif
+
+static void FUNCT_NAME(GstDeinterlaceMethod *d_method,
+ const GstDeinterlaceField* history, guint history_count,
+ GstVideoFrame *outframe, int cur_field_idx)
+{
+ GstDeinterlaceMethodTomsMoComp *self = GST_DEINTERLACE_METHOD_TOMSMOCOMP (d_method);
+ glong SearchEffort = self->search_effort;
+ gint UseStrangeBob = self->strange_bob;
+ gint IsOdd;
+ const guint8 *pWeaveSrc;
+ const guint8 *pWeaveSrcP;
+ guint8 *pWeaveDest;
+ const guint8 *pCopySrc;
+ const guint8 *pCopySrcP;
+ guint8 *pCopyDest;
+ gint src_pitch;
+ gint dst_pitch;
+ gint rowsize;
+ gint FldHeight;
+
+ if (cur_field_idx + 2 > history_count || cur_field_idx < 1) {
+ GstDeinterlaceMethod *backup_method;
+
+ backup_method = g_object_new (gst_deinterlace_method_linear_get_type(),
+ NULL);
+
+ gst_deinterlace_method_setup (backup_method, d_method->vinfo);
+ gst_deinterlace_method_deinterlace_frame (backup_method,
+ history, history_count, outframe, cur_field_idx);
+
+ g_object_unref (backup_method);
+ return;
+ }
+
+ /* double stride do address just every odd/even scanline */
+ src_pitch = GST_VIDEO_FRAME_PLANE_STRIDE (outframe, 0) * 2;
+ dst_pitch = GST_VIDEO_FRAME_PLANE_STRIDE (outframe, 0);
+ rowsize = GST_VIDEO_FRAME_PLANE_STRIDE (outframe, 0);
+
+ FldHeight = GST_VIDEO_INFO_HEIGHT (self->parent.vinfo) / 2;
+
+ pCopySrc = GST_VIDEO_FRAME_PLANE_DATA (history[history_count-1].frame, 0);
+ if (history[history_count - 1].flags & PICTURE_INTERLACED_BOTTOM)
+ pCopySrc += GST_VIDEO_FRAME_PLANE_STRIDE (history[history_count-1].frame, 0);
+ pCopySrcP = GST_VIDEO_FRAME_PLANE_DATA (history[history_count-3].frame, 0);
+ if (history[history_count - 3].flags & PICTURE_INTERLACED_BOTTOM)
+ pCopySrcP += GST_VIDEO_FRAME_PLANE_STRIDE (history[history_count-3].frame, 0);
+ pWeaveSrc = GST_VIDEO_FRAME_PLANE_DATA (history[history_count-2].frame, 0);
+ if (history[history_count - 2].flags & PICTURE_INTERLACED_BOTTOM)
+ pWeaveSrc += GST_VIDEO_FRAME_PLANE_STRIDE (history[history_count-2].frame, 0);
+ pWeaveSrcP = GST_VIDEO_FRAME_PLANE_DATA (history[history_count-4].frame, 0);
+ if (history[history_count - 4].flags & PICTURE_INTERLACED_BOTTOM)
+ pWeaveSrcP += GST_VIDEO_FRAME_PLANE_STRIDE (history[history_count-4].frame, 0);
+
+ /* use bottom field and interlace top field */
+ if (history[history_count-2].flags == PICTURE_INTERLACED_BOTTOM) {
+ IsOdd = 1;
+
+ // if we have an odd field we copy an even field and weave an odd field
+ pCopyDest = GST_VIDEO_FRAME_PLANE_DATA (outframe, 0);
+ pWeaveDest = pCopyDest + dst_pitch;
+ }
+ /* do it vice verca */
+ else {
+
+ IsOdd = 0;
+ // if we have an even field we copy an odd field and weave an even field
+ pCopyDest = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (outframe, 0) + dst_pitch;
+ pWeaveDest = GST_VIDEO_FRAME_PLANE_DATA (outframe, 0);
+ }
+
+
+ // copy 1st and last weave lines
+ Fieldcopy(pWeaveDest, pCopySrc, rowsize,
+ 1, dst_pitch*2, src_pitch);
+ Fieldcopy(pWeaveDest+(FldHeight-1)*dst_pitch*2,
+ pCopySrc+(FldHeight-1)*src_pitch, rowsize,
+ 1, dst_pitch*2, src_pitch);
+
+#ifdef USE_VERTICAL_FILTER
+ // Vertical Filter currently not implemented for DScaler !!
+ // copy 1st and last lines the copy field
+ Fieldcopy(pCopyDest, pCopySrc, rowsize,
+ 1, dst_pitch*2, src_pitch);
+ Fieldcopy(pCopyDest+(FldHeight-1)*dst_pitch*2,
+ pCopySrc+(FldHeight-1)*src_pitch, rowsize,
+ 1, dst_pitch*2, src_pitch);
+#else
+
+ // copy all of the copy field
+ Fieldcopy(pCopyDest, pCopySrc, rowsize,
+ FldHeight, dst_pitch*2, src_pitch);
+#endif
+ // then go fill in the hard part, being variously lazy depending upon
+ // SearchEffort
+
+ if(!UseStrangeBob) {
+ if (SearchEffort == 0)
+ {
+ SEFUNC(0);
+ }
+ else if (SearchEffort <= 1)
+ {
+ SEFUNC(1);
+ }
+ /* else if (SearchEffort <= 2)
+ {
+ SEFUNC(2);
+ }
+ */
+ else if (SearchEffort <= 3)
+ {
+ SEFUNC(3);
+ }
+ else if (SearchEffort <= 5)
+ {
+ SEFUNC(5);
+ }
+ else if (SearchEffort <= 9)
+ {
+ SEFUNC(9);
+ }
+ else if (SearchEffort <= 11)
+ {
+ SEFUNC(11);
+ }
+ else if (SearchEffort <= 13)
+ {
+ SEFUNC(13);
+ }
+ else if (SearchEffort <= 15)
+ {
+ SEFUNC(15);
+ }
+ else if (SearchEffort <= 19)
+ {
+ SEFUNC(19);
+ }
+ else if (SearchEffort <= 21)
+ {
+ SEFUNC(21);
+ }
+ else
+ {
+ SEFUNC(Max);
+ }
+ }
+ else
+ {
+ if (SearchEffort == 0)
+ {
+ SEFUNC(0SB);
+ }
+ else if (SearchEffort <= 1)
+ {
+ SEFUNC(1SB);
+ }
+ /* else if (SearchEffort <= 2)
+ {
+ SEFUNC(2SB);
+ }
+ */
+ else if (SearchEffort <= 3)
+ {
+ SEFUNC(3SB);
+ }
+ else if (SearchEffort <= 5)
+ {
+ SEFUNC(5SB);
+ }
+ else if (SearchEffort <= 9)
+ {
+ SEFUNC(9SB);
+ }
+ else if (SearchEffort <= 11)
+ {
+ SEFUNC(11SB);
+ }
+ else if (SearchEffort <= 13)
+ {
+ SEFUNC(13SB);
+ }
+ else if (SearchEffort <= 15)
+ {
+ SEFUNC(15SB);
+ }
+ else if (SearchEffort <= 19)
+ {
+ SEFUNC(19SB);
+ }
+ else if (SearchEffort <= 21)
+ {
+ SEFUNC(21SB);
+ }
+ else
+ {
+ SEFUNC(MaxSB);
+ }
+ }
+
+#if defined(BUILD_X86_ASM) && !defined(IS_C)
+ __asm__ __volatile__("emms");
+#endif
+}
diff --git a/gst/deinterlace/tvtime/tomsmocomp/TomsMoCompAll2.inc b/gst/deinterlace/tvtime/tomsmocomp/TomsMoCompAll2.inc
new file mode 100644
index 0000000000..f6344eabb3
--- /dev/null
+++ b/gst/deinterlace/tvtime/tomsmocomp/TomsMoCompAll2.inc
@@ -0,0 +1,243 @@
+// -*- c++ -*-
+
+#ifdef SEARCH_EFFORT_FUNC
+#undef SEARCH_EFFORT_FUNC
+#endif
+
+#ifdef USE_STRANGE_BOB
+#define SEARCH_EFFORT_FUNC(n) SEFUNC(n##SB)
+#else
+#define SEARCH_EFFORT_FUNC(n) SEFUNC(n)
+#endif
+
+static inline int SEARCH_EFFORT_FUNC(0) // we don't try at all ;-)
+{
+ //see Search_Effort_Max() for comments
+#define SKIP_SEARCH
+#include "SearchLoopTop.inc"
+#include "SearchLoopBottom.inc"
+#undef SKIP_SEARCH
+}
+
+static inline int SEARCH_EFFORT_FUNC(1)
+{
+#ifdef IS_C
+#define SKIP_SEARCH
+#include "SearchLoopTop.inc"
+#include "SearchLoopBottom.inc"
+#undef SKIP_SEARCH
+#else
+ //see Search_Effort_Max() for comments
+#include "SearchLoopTop.inc"
+ RESET_CHROMA // pretend chroma diffs was 255 each
+#include "SearchLoop0A.inc"
+#include "SearchLoopBottom.inc"
+#endif
+}
+
+static inline int SEARCH_EFFORT_FUNC(3)
+{
+#ifdef IS_C
+#define SKIP_SEARCH
+#include "SearchLoopTop.inc"
+#include "SearchLoopBottom.inc"
+#undef SKIP_SEARCH
+#else
+ //see Search_Effort_Max() for comments
+#include "SearchLoopTop.inc"
+#include "SearchLoopOddA2.inc"
+ RESET_CHROMA // pretend chroma diffs was 255 each
+#include "SearchLoop0A.inc"
+#include "SearchLoopBottom.inc"
+#endif
+}
+
+static inline int SEARCH_EFFORT_FUNC(5)
+{
+#ifdef IS_C
+#define SKIP_SEARCH
+#include "SearchLoopTop.inc"
+#include "SearchLoopBottom.inc"
+#undef SKIP_SEARCH
+#else
+ //see Search_Effort_Max() for comments
+#include "SearchLoopTop.inc"
+#include "SearchLoopOddA2.inc"
+#include "SearchLoopOddAH2.inc"
+ RESET_CHROMA // pretend chroma diffs was 255 each
+#include "SearchLoop0A.inc"
+#include "SearchLoopBottom.inc"
+#endif
+}
+
+// 3x3 search
+static inline int SEARCH_EFFORT_FUNC(9)
+{
+#ifdef IS_C
+#define SKIP_SEARCH
+#include "SearchLoopTop.inc"
+#include "SearchLoopBottom.inc"
+#undef SKIP_SEARCH
+#else
+ //see SearchEffortMax() for comments
+#include "SearchLoopTop.inc"
+#include "SearchLoopOddA.inc"
+ RESET_CHROMA // pretend chroma diffs was 255 each
+#include "SearchLoopVA.inc"
+#include "SearchLoop0A.inc"
+#include "SearchLoopBottom.inc"
+#endif
+}
+
+// Search 9 with 2 H-half pels added
+static inline int SEARCH_EFFORT_FUNC(11)
+{
+#ifdef IS_C
+#define SKIP_SEARCH
+#include "SearchLoopTop.inc"
+#include "SearchLoopBottom.inc"
+#undef SKIP_SEARCH
+#else
+ //see SearchEffortMax() for comments
+#include "SearchLoopTop.inc"
+#include "SearchLoopOddA.inc"
+#include "SearchLoopOddAH2.inc"
+ RESET_CHROMA // pretend chroma diffs was 255 each
+#include "SearchLoopVA.inc"
+#include "SearchLoop0A.inc"
+#include "SearchLoopBottom.inc"
+#endif
+}
+
+// Search 11 with 2 V-half pels added
+static inline int SEARCH_EFFORT_FUNC(13)
+{
+#ifdef IS_C
+#define SKIP_SEARCH
+#include "SearchLoopTop.inc"
+#include "SearchLoopBottom.inc"
+#undef SKIP_SEARCH
+#else
+ //see SearchEffortMax() for comments
+#include "SearchLoopTop.inc"
+#include "SearchLoopOddA.inc"
+#include "SearchLoopOddAH2.inc"
+ RESET_CHROMA // pretend chroma diffs was 255 each
+#include "SearchLoopVAH.inc"
+#include "SearchLoopVA.inc"
+#include "SearchLoop0A.inc"
+#include "SearchLoopBottom.inc"
+#endif
+}
+
+// 5x3
+static inline int SEARCH_EFFORT_FUNC(15)
+{
+#ifdef IS_C
+#define SKIP_SEARCH
+#include "SearchLoopTop.inc"
+#include "SearchLoopBottom.inc"
+#undef SKIP_SEARCH
+#else
+ //see SearchEffortMax() for comments
+#include "SearchLoopTop.inc"
+#include "SearchLoopOddA.inc"
+ RESET_CHROMA // pretend chroma diffs was 255 each
+#include "SearchLoopEdgeA.inc"
+#include "SearchLoopVA.inc"
+#include "SearchLoop0A.inc"
+#include "SearchLoopBottom.inc"
+#endif
+}
+
+// 5x3 + 4 half pels
+static inline int SEARCH_EFFORT_FUNC(19)
+{
+#ifdef IS_C
+#define SKIP_SEARCH
+#include "SearchLoopTop.inc"
+#include "SearchLoopBottom.inc"
+#undef SKIP_SEARCH
+#else
+ //see SearchEffortMax() for comments
+#include "SearchLoopTop.inc"
+#include "SearchLoopOddA.inc"
+#include "SearchLoopOddAH2.inc"
+ RESET_CHROMA // pretend chroma diffs was 255 each
+#include "SearchLoopEdgeA.inc"
+#include "SearchLoopVAH.inc"
+#include "SearchLoopVA.inc"
+#include "SearchLoop0A.inc"
+#include "SearchLoopBottom.inc"
+#endif
+}
+
+// Handle one 4x1 block of pixels
+// Search a 7x3 area, no half pels
+
+static inline int SEARCH_EFFORT_FUNC(21)
+{
+#ifdef IS_C
+#define SKIP_SEARCH
+#include "SearchLoopTop.inc"
+#include "SearchLoopBottom.inc"
+#undef SKIP_SEARCH
+#else
+ //see SearchLoopTop.inc for comments
+#include "SearchLoopTop.inc"
+
+ // odd addresses -- the pixels at odd address wouldn't generate
+ // good luma values but we will mask those off
+
+#include "SearchLoopOddA6.inc" // 4 odd v half pels, 3 to left & right
+#include "SearchLoopOddA.inc" // 6 odd pels, 1 to left & right
+
+ RESET_CHROMA // pretend chroma diffs was 255 each
+
+ // even addresses -- use both luma and chroma from these
+ // search averages of 2 pixels left and right
+#include "SearchLoopEdgeA.inc"
+ // search vertical line and averages, -1,0,+1
+#include "SearchLoopVA.inc"
+ // blend our results and loop
+#include "SearchLoop0A.inc"
+#include "SearchLoopBottom.inc"
+#endif
+}
+
+// Handle one 4x1 block of pixels
+// Search a 9x3 area, no half pels
+static inline int SEARCH_EFFORT_FUNC(Max)
+{
+#ifdef IS_C
+#define SKIP_SEARCH
+#include "SearchLoopTop.inc"
+#include "SearchLoopBottom.inc"
+#undef SKIP_SEARCH
+#else
+ //see SearchLoopTop.inc for comments
+#include "SearchLoopTop.inc"
+
+ // odd addresses -- the pixels at odd address wouldn't generate
+ // good luma values but we will mask those off
+
+#include "SearchLoopOddA6.inc" // 4 odd v half pels, 3 to left & right
+#include "SearchLoopOddA.inc" // 6 odd pels, 1 to left & right
+
+ RESET_CHROMA // pretend chroma diffs was 255 each
+
+ // even addresses -- use both luma and chroma from these
+ // search averages of 4 pixels left and right
+#include "SearchLoopEdgeA8.inc"
+ // search averages of 2 pixels left and right
+#include "SearchLoopEdgeA.inc"
+ // search vertical line and averages, -1,0,+1
+#include "SearchLoopVA.inc"
+ // blend our results and loop
+#include "SearchLoop0A.inc"
+#include "SearchLoopBottom.inc"
+#endif
+}
+
+#undef SEARCH_EFFORT_FUNC
+
diff --git a/gst/deinterlace/tvtime/tomsmocomp/WierdBob.inc b/gst/deinterlace/tvtime/tomsmocomp/WierdBob.inc
new file mode 100644
index 0000000000..0c71db8124
--- /dev/null
+++ b/gst/deinterlace/tvtime/tomsmocomp/WierdBob.inc
@@ -0,0 +1,286 @@
+// -*- c++ -*-
+
+ // First, get and save our possible Bob values
+ // Assume our pixels are laid out as follows with x the calc'd bob value
+ // and the other pixels are from the current field
+ //
+ // j a b c k current field
+ // x calculated line
+ // m d e f n current field
+ //
+ // we calc the bob value as:
+ // x2 = either avg(a,f), avg(c,d), avg(b,e), avg(j,n), or avg(k,m)
+
+ // selected for the smallest of abs(a,f), abs(c,d), or abs(b,e), etc.
+
+#ifndef IS_C
+ // a,f
+ "movq -2(%%"XBX"), %%mm0\n\t" // value a from top left
+ "movq 2(%%"XBX", %%"XCX"), %%mm1\n\t" // value f from bottom right
+ "movq %%mm0, %%mm6\n\t"
+// pavgb %%mm6, %%mm1 // avg(a,f), also best so far
+ V_PAVGB ("%%mm6", "%%mm1", "%%mm7", _ShiftMask) // avg(a,f), also best so far
+ "movq %%mm0, %%mm7\n\t"
+ "psubusb %%mm1, %%mm7\n\t"
+ "psubusb %%mm0, %%mm1\n\t"
+ "por %%mm1, %%mm7\n\t" // abs diff, also best so far
+
+ // c,d
+ "movq 2(%%"XBX"), %%mm0\n\t" // value a from top left
+ "movq -2(%%"XBX", %%"XCX"), %%mm1\n\t" // value f from bottom right
+ "movq %%mm0, %%mm2\n\t"
+// pavgb %%mm2, %%mm1 // avg(c,d)
+ V_PAVGB ("%%mm2", "%%mm1", "%%mm3", _ShiftMask) // avg(c,d)
+ "movq %%mm0, %%mm3\n\t"
+ "psubusb %%mm1, %%mm3\n\t"
+ "psubusb %%mm0, %%mm1\n\t"
+ "por %%mm1, %%mm3\n\t" // abs(c,d)
+ "movq %%mm3, %%mm1\n\t" // keep copy
+
+ "psubusb %%mm7, %%mm3\n\t" // nonzero where new weights bigger, else 0
+ "pxor %%mm4, %%mm4\n\t"
+ "pcmpeqb %%mm4, %%mm3\n\t" // now ff where new better, else 00
+ "pcmpeqb %%mm3, %%mm4\n\t" // here ff where old better, else 00
+
+ "pand %%mm3, %%mm1\n\t" // keep only better new avg and abs
+ "pand %%mm3, %%mm2\n\t"
+
+ "pand %%mm4, %%mm6\n\t"
+ "pand %%mm4, %%mm7\n\t"
+
+ "por %%mm2, %%mm6\n\t" // and merge new & old vals keeping best
+ "por %%mm1, %%mm7\n\t"
+ "por "_UVMask", %%mm7\n\t" // but we know chroma is worthless so far
+ "pand "_YMask", %%mm5\n\t" // mask out chroma from here also
+
+ // j,n
+ "movq -4(%%"XBX"), %%mm0\n\t" // value j from top left
+ "movq 4(%%"XBX", %%"XCX"), %%mm1\n\t" // value n from bottom right
+ "movq %%mm0, %%mm2\n\t"
+// pavgb %%mm2, %%mm1 // avg(j,n)
+ V_PAVGB ("%%mm2", "%%mm1", "%%mm3", _ShiftMask) // avg(j,n)
+ "movq %%mm0, %%mm3\n\t"
+ "psubusb %%mm1, %%mm3\n\t"
+ "psubusb %%mm0, %%mm1\n\t"
+ "por %%mm1, %%mm3\n\t" // abs(j-n)
+ "movq %%mm3, %%mm1\n\t" // keep copy
+
+ "psubusb %%mm7, %%mm3\n\t" // nonzero where new weights bigger, else 0
+ "pxor %%mm4, %%mm4\n\t"
+ "pcmpeqb %%mm4, %%mm3\n\t" // now ff where new better, else 00
+ "pcmpeqb %%mm3, %%mm4\n\t" // here ff where old better, else 00
+
+ "pand %%mm3, %%mm1\n\t" // keep only better new avg and abs
+ "pand %%mm2, %%mm3\n\t"
+
+ "pand %%mm4, %%mm6\n\t"
+ "pand %%mm4, %%mm7\n\t"
+
+ "por %%mm3, %%mm6\n\t" // and merge new & old vals keeping best
+ "por %%mm1, %%mm7\n\t" // "
+
+ // k, m
+ "movq 4(%%"XBX"), %%mm0\n\t" // value k from top right
+ "movq -4(%%"XBX", %%"XCX"), %%mm1\n\t" // value n from bottom left
+ "movq %%mm0, %%mm4\n\t"
+// pavgb %%mm4, %%mm1 // avg(k,m)
+ V_PAVGB ("%%mm4", "%%mm1", "%%mm3", _ShiftMask) // avg(k,m)
+
+ "movq %%mm0, %%mm3\n\t"
+ "psubusb %%mm1, %%mm3\n\t"
+ "psubusb %%mm0, %%mm1\n\t"
+ "por %%mm1, %%mm3\n\t" // abs(k,m)
+ "movq %%mm3, %%mm1\n\t" // keep copy
+
+ "movq %%mm4, %%mm2\n\t" // avg(k,m)
+
+ "psubusb %%mm7, %%mm3\n\t" // nonzero where new weights bigger, else 0
+ "pxor %%mm4, %%mm4\n\t"
+ "pcmpeqb %%mm4, %%mm3\n\t" // now ff where new better, else 00
+ "pcmpeqb %%mm3, %%mm4\n\t" // here ff where old better, else 00
+
+ "pand %%mm3, %%mm1\n\t" // keep only better new avg and abs
+ "pand %%mm2, %%mm3\n\t"
+
+ "pand %%mm4, %%mm6\n\t"
+ "pand %%mm4, %%mm7\n\t"
+
+ "por %%mm3, %%mm6\n\t" // and merge new & old vals keeping best
+ "por %%mm1, %%mm7\n\t" // "
+
+ // b,e
+ "movq (%%"XBX"), %%mm0\n\t" // value b from top
+ "movq (%%"XBX", %%"XCX"), %%mm1\n\t" // value e from bottom
+
+// We will also calc here the max/min values to later limit comb
+// so the max excursion will not exceed the Max_Comb constant
+
+#ifdef SKIP_SEARCH
+ "movq %%mm0, %%mm2\n\t"
+// pminub %%mm2, %%mm1
+ V_PMINUB ("%%mm2", "%%mm1", "%%mm4")
+
+// pmaxub %%mm6, %%mm2 // clip our current results so far to be above this
+ V_PMAXUB ("%%mm6", "%%mm2")
+ "movq %%mm0, %%mm2\n\t"
+ V_PMAXUB ("%%mm2", "%%mm1")
+// pminub %%mm6, %%mm2 // clip our current results so far to be below this
+ V_PMINUB ("%%mm6", "%%mm2", "%%mm4")
+
+#else
+ "movq %%mm0, %%mm2\n\t"
+ "movq (%%"XAX"), %%mm4\n\t"
+ "psubusb %%mm4, %%mm2\n\t"
+ "psubusb %%mm0, %%mm4\n\t"
+ "por %%mm2, %%mm4\n\t" // abs diff
+
+ "movq %%mm1, %%mm2\n\t"
+ "movq (%%"XAX", %%"XCX"), %%mm3\n\t"
+ "psubusb %%mm3, %%mm2\n\t"
+ "psubusb %%mm1, %%mm3\n\t"
+ "por %%mm2, %%mm3\n\t" // abs diff
+// pmaxub %%mm3, %%mm4 // top or bottom pixel moved most
+ V_PMAXUB ("%%mm3", "%%mm4") // top or bottom pixel moved most
+ "psubusb "_Max_Mov", %%mm3\n\t" // moved more than allowed? or goes to 0?
+ "pxor %%mm4, %%mm4\n\t"
+ "pcmpeqb %%mm4, %%mm3\n\t" // now ff where low motion, else high motion
+
+ "movq %%mm0, %%mm2\n\t"
+// pminub %%mm2, %%mm1
+ V_PMINUB ("%%mm2", "%%mm1", "%%mm4")
+
+// pmaxub %%mm6, %%mm2 // clip our current results so far to be above this
+ V_PMAXUB ("%%mm6", "%%mm2")
+
+ "psubusb %%mm3, %%mm2\n\t" // maybe decrease it to 0000.. if no surround motion
+ "movq %%mm2, "_Min_Vals"\n\t"
+
+ "movq %%mm0, %%mm2\n\t"
+ V_PMAXUB ("%%mm2", "%%mm1")
+// pminub %%mm6, %%mm2 // clip our current results so far to be below this
+ V_PMINUB ("%%mm6", "%%mm2", "%%mm4")
+ "paddusb %%mm3, %%mm2\n\t" // maybe increase it to ffffff if no surround motion
+ "movq %%mm2, "_Max_Vals"\n\t"
+#endif
+
+ "movq %%mm0, %%mm2\n\t"
+// pavgb %%mm2, %%mm1 // avg(b,e)
+ V_PAVGB ("%%mm2", "%%mm1", "%%mm3", _ShiftMask) // avg(b,e)
+
+ "movq %%mm0, %%mm3\n\t"
+ "psubusb %%mm1, %%mm3\n\t"
+ "psubusb %%mm0, %%mm1\n\t"
+ "por %%mm1, %%mm3\n\t" // abs(c,d)
+ "movq %%mm3, %%mm1\n\t" // keep copy of diffs
+
+ "pxor %%mm4, %%mm4\n\t"
+ "psubusb %%mm7, %%mm3\n\t" // nonzero where new weights bigger, else 0
+ "pcmpeqb %%mm4, %%mm3\n\t" // now ff where new better, else 00
+
+ "pcmpeqb %%mm3, %%mm4\n\t" // here ff where old better, else 00
+
+ "pand %%mm3, %%mm1\n\t"
+ "pand %%mm3, %%mm2\n\t"
+
+ "pand %%mm4, %%mm6\n\t"
+ "pand %%mm4, %%mm7\n\t"
+
+ "por %%mm2, %%mm6\n\t" // our x2 value
+ "por %%mm1, %%mm7\n\t" // our x2 diffs
+ "movq %%mm7, %%mm4\n\t" // save as bob uncertainty indicator
+
+#else
+
+ // a,f
+ best[0] = (pBob[-2] + pBob[src_pitch2 + 2]) / 2;
+ diff[0] = ABS (pBob[-2] - pBob[src_pitch2 + 2]);
+ best[1] = (pBob[-1] + pBob[src_pitch2 + 3]) / 2;
+ diff[1] = ABS (pBob[-1] - pBob[src_pitch2 + 3]);
+
+ // c,d
+ if (ABS (pBob[2] - pBob[src_pitch2 - 2]) < diff[0]) {
+ best[0] = (pBob[2] + pBob[src_pitch2 - 2]) / 2;
+ diff[0] = ABS (pBob[2] - pBob[src_pitch2 - 2]);
+ }
+
+ if (ABS (pBob[3] - pBob[src_pitch2 - 1]) < diff[1]) {
+ best[1] = (pBob[3] + pBob[src_pitch2 - 1]) / 2;
+ diff[1] = ABS (pBob[3] - pBob[src_pitch2 - 1]);
+ }
+
+ // j,n
+ if (ABS (pBob[-4] - pBob[src_pitch2 + 4]) < diff[0]) {
+ best[0] = (pBob[-4] + pBob[src_pitch2 + 4]) / 2;
+ diff[0] = ABS (pBob[-4] - pBob[src_pitch2 + 4]);
+ }
+
+ if (ABS (pBob[-3] - pBob[src_pitch2 + 5]) < diff[1]) {
+ best[1] = (pBob[-3] + pBob[src_pitch2 + 5]) / 2;
+ diff[1] = ABS (pBob[-3] - pBob[src_pitch2 + 5]);
+ }
+
+ // k,m
+ if (ABS (pBob[4] - pBob[src_pitch2 - 4]) < diff[0]) {
+ best[0] = (pBob[4] + pBob[src_pitch2 - 4]) / 2;
+ diff[0] = ABS (pBob[-4] - pBob[src_pitch2 - 4]);
+ }
+
+ if (ABS (pBob[5] - pBob[src_pitch2 - 3]) < diff[1]) {
+ best[1] = (pBob[5] + pBob[src_pitch2 - 3]) / 2;
+ diff[1] = ABS (pBob[-3] - pBob[src_pitch2 - 3]);
+ }
+ // k,m
+ if (ABS (pBob[4] - pBob[src_pitch2 - 4]) < diff[0]) {
+ best[0] = (pBob[4] + pBob[src_pitch2 - 4]) / 2;
+ diff[0] = ABS (pBob[-4] - pBob[src_pitch2 - 4]);
+ }
+
+ if (ABS (pBob[5] - pBob[src_pitch2 - 3]) < diff[1]) {
+ best[1] = (pBob[5] + pBob[src_pitch2 - 3]) / 2;
+ diff[1] = ABS (pBob[-3] - pBob[src_pitch2 - 3]);
+ }
+
+// We will also calc here the max/min values to later limit comb
+// so the max excursion will not exceed the Max_Comb constant
+
+#ifdef SKIP_SEARCH
+ best[0] = CLAMP (best[0], MIN (pBob[src_pitch2], pBob[0]), MAX (pBob[src_pitch2], pBob[0]));
+ best[1] = CLAMP (best[1], MIN (pBob[src_pitch2 + 1], pBob[1]), MAX (pBob[src_pitch2 + 1], pBob[1]));
+#else
+ mov[0] = MAX (ABS (pBob[0] - pBobP[0]), ABS (pBob[src_pitch2] - pBobP[src_pitch2]));
+ mov[1] = MAX (ABS (pBob[1] - pBobP[1]), ABS (pBob[src_pitch2 + 1] - pBobP[src_pitch2 + 1]));
+
+ MinVals[0] = 0;
+ MinVals[1] = 0;
+ MaxVals[0] = 255;
+ MaxVals[1] = 255;
+
+ if (mov[0] > Max_Mov[0]) {
+ MinVals[0] = MAX (MIN (pBob[0], pBob[src_pitch2]), best[0]);
+ MaxVals[0] = MIN (MAX (pBob[0], pBob[src_pitch2]), best[0]);
+ }
+
+ if (mov[1] > Max_Mov[1]) {
+ MinVals[1] = MAX (MIN (pBob[1], pBob[src_pitch2 + 1]), best[1]);
+ MaxVals[1] = MIN (MAX (pBob[1], pBob[src_pitch2 + 1]), best[1]);
+ }
+
+ best[0] = CLAMP (best[0], MIN (pBob[src_pitch2], pBob[0]), MAX (pBob[src_pitch2], pBob[0]));
+ best[1] = CLAMP (best[1], MIN (pBob[src_pitch2 + 1], pBob[1]), MAX (pBob[src_pitch2 + 1], pBob[1]));
+#endif
+
+ avg[0] = (pBob[src_pitch2] + pBob[0]) / 2;
+ avg[1] = (pBob[src_pitch2 + 1] + pBob[1]) / 2;
+ diff2[0] = ABS (pBob[src_pitch2] - pBob[0]);
+ diff2[1] = ABS (pBob[src_pitch2 + 1] - pBob[1]);
+
+ if (diff2[0] < diff[0]) {
+ best[0] = avg[0];
+ diff[0] = diff2[0];
+ }
+
+ if (diff2[1] < diff[1]) {
+ best[1] = avg[1];
+ diff[1] = diff2[1];
+ }
+#endif
diff --git a/gst/deinterlace/tvtime/tomsmocomp/tomsmocompmacros.h b/gst/deinterlace/tvtime/tomsmocomp/tomsmocompmacros.h
new file mode 100644
index 0000000000..7e8147ec96
--- /dev/null
+++ b/gst/deinterlace/tvtime/tomsmocomp/tomsmocompmacros.h
@@ -0,0 +1,164 @@
+#include <string.h>
+#include <math.h>
+
+// Define a few macros for CPU dependent instructions.
+// I suspect I don't really understand how the C macro preprocessor works but
+// this seems to get the job done. // TRB 7/01
+
+// BEFORE USING THESE YOU MUST SET:
+
+// #define SIMD_TYPE MMXEXT (or MMX or 3DNOW)
+
+// some macros for pavgb instruction
+// V_PAVGB(mmr1, mmr2, mmr work register, smask) mmr2 may = mmrw if you can trash it
+
+#define V_PAVGB_MMX(mmr1, mmr2, mmrw, smask) \
+ "movq "mmr2", "mmrw"\n\t" \
+ "pand "smask", "mmrw"\n\t" \
+ "psrlw $1, "mmrw"\n\t" \
+ "pand "smask", "mmr1"\n\t" \
+ "psrlw $1, "mmr1"\n\t" \
+ "paddusb "mmrw", "mmr1"\n\t"
+#define V_PAVGB_MMXEXT(mmr1, mmr2, mmrw, smask) "pavgb "mmr2", "mmr1"\n\t"
+#define V_PAVGB_3DNOW(mmr1, mmr2, mmrw, smask) "pavgusb "mmr2", "mmr1"\n\t"
+#define V_PAVGB(mmr1, mmr2, mmrw, smask) V_PAVGB2(mmr1, mmr2, mmrw, smask, SIMD_TYPE)
+#define V_PAVGB2(mmr1, mmr2, mmrw, smask, simd_type) V_PAVGB3(mmr1, mmr2, mmrw, smask, simd_type)
+#define V_PAVGB3(mmr1, mmr2, mmrw, smask, simd_type) V_PAVGB_##simd_type(mmr1, mmr2, mmrw, smask)
+
+// some macros for pmaxub instruction
+#define V_PMAXUB_MMX(mmr1, mmr2) \
+ "psubusb "mmr2", "mmr1"\n\t" \
+ "paddusb "mmr2", "mmr1"\n\t"
+#define V_PMAXUB_MMXEXT(mmr1, mmr2) "pmaxub "mmr2", "mmr1"\n\t"
+#define V_PMAXUB_3DNOW(mmr1, mmr2) V_PMAXUB_MMX(mmr1, mmr2) // use MMX version
+#define V_PMAXUB(mmr1, mmr2) V_PMAXUB2(mmr1, mmr2, SIMD_TYPE)
+#define V_PMAXUB2(mmr1, mmr2, simd_type) V_PMAXUB3(mmr1, mmr2, simd_type)
+#define V_PMAXUB3(mmr1, mmr2, simd_type) V_PMAXUB_##simd_type(mmr1, mmr2)
+
+// some macros for pminub instruction
+// V_PMINUB(mmr1, mmr2, mmr work register) mmr2 may NOT = mmrw
+#define V_PMINUB_MMX(mmr1, mmr2, mmrw) \
+ "pcmpeqb "mmrw", "mmrw"\n\t" \
+ "psubusb "mmr2", "mmrw"\n\t" \
+ "paddusb "mmrw", "mmr1"\n\t" \
+ "psubusb "mmrw", "mmr1"\n\t"
+#define V_PMINUB_MMXEXT(mmr1, mmr2, mmrw) "pminub "mmr2", "mmr1"\n\t"
+#define V_PMINUB_3DNOW(mmr1, mmr2, mmrw) V_PMINUB_MMX(mmr1, mmr2, mmrw) // use MMX version
+#define V_PMINUB(mmr1, mmr2, mmrw) V_PMINUB2(mmr1, mmr2, mmrw, SIMD_TYPE)
+#define V_PMINUB2(mmr1, mmr2, mmrw, simd_type) V_PMINUB3(mmr1, mmr2, mmrw, simd_type)
+#define V_PMINUB3(mmr1, mmr2, mmrw, simd_type) V_PMINUB_##simd_type(mmr1, mmr2, mmrw)
+
+// some macros for movntq instruction
+// V_MOVNTQ(mmr1, mmr2)
+#define V_MOVNTQ_MMX(mmr1, mmr2) "movq "mmr2", "mmr1"\n\t"
+#define V_MOVNTQ_3DNOW(mmr1, mmr2) "movq "mmr2", "mmr1"\n\t"
+#define V_MOVNTQ_MMXEXT(mmr1, mmr2) "movntq "mmr2", "mmr1"\n\t"
+#define V_MOVNTQ(mmr1, mmr2) V_MOVNTQ2(mmr1, mmr2, SIMD_TYPE)
+#define V_MOVNTQ2(mmr1, mmr2, simd_type) V_MOVNTQ3(mmr1, mmr2, simd_type)
+#define V_MOVNTQ3(mmr1, mmr2, simd_type) V_MOVNTQ_##simd_type(mmr1, mmr2)
+
+// end of macros
+
+#ifdef IS_SSE2
+
+#define MERGE4PIXavg(PADDR1, PADDR2) \
+ "movdqu "PADDR1", %%xmm0\n\t" /* our 4 pixels */ \
+ "movdqu "PADDR2", %%xmm1\n\t" /* our pixel2 value */ \
+ "movdqa %%xmm0, %%xmm2\n\t" /* another copy of our pixel1 value */ \
+ "movdqa %%xmm1, %%xmm3\n\t" /* another copy of our pixel1 value */ \
+ "psubusb %%xmm1, %%xmm2\n\t" \
+ "psubusb %%xmm0, %%xmm3\n\t" \
+ "por %%xmm3, %%xmm2\n\t" \
+ "pavgb %%xmm1, %%xmm0\n\t" /* avg of 2 pixels */ \
+ "movdqa %%xmm2, %%xmm3\n\t" /* another copy of our our weights */ \
+ "pxor %%xmm1, %%xmm1\n\t" \
+ "psubusb %%xmm7, %%xmm3\n\t" /* nonzero where old weights lower, else 0 */ \
+ "pcmpeqb %%xmm1, %%xmm3\n\t" /* now ff where new better, else 00 */ \
+ "pcmpeqb %%xmm3, %%xmm1\n\t" /* here ff where old better, else 00 */ \
+ "pand %%xmm3, %%xmm0\n\t" /* keep only better new pixels */ \
+ "pand %%xmm3, %%xmm2\n\t" /* and weights */ \
+ "pand %%xmm1, %%xmm5\n\t" /* keep only better old pixels */ \
+ "pand %%xmm1, %%xmm7\n\t" \
+ "por %%xmm0, %%xmm5\n\t" /* and merge new & old vals */ \
+ "por %%xmm2, %%xmm7\n\t"
+
+#define MERGE4PIXavgH(PADDR1A, PADDR1B, PADDR2A, PADDR2B) \
+ "movdqu "PADDR1A", %%xmm0\n\t" /* our 4 pixels */ \
+ "movdqu "PADDR2A", %%xmm1\n\t" /* our pixel2 value */ \
+ "movdqu "PADDR1B", %%xmm2\n\t" /* our 4 pixels */ \
+ "movdqu "PADDR2B", %%xmm3\n\t" /* our pixel2 value */ \
+ "pavgb %%xmm2, %%xmm0\n\t" \
+ "pavgb %%xmm3, %%xmm1\n\t" \
+ "movdqa %%xmm0, %%xmm2\n\t" /* another copy of our pixel1 value */ \
+ "movdqa %%xmm1, %%xmm3\n\t" /* another copy of our pixel1 value */ \
+ "psubusb %%xmm1, %%xmm2\n\t" \
+ "psubusb %%xmm0, %%xmm3\n\t" \
+ "por %%xmm3, %%xmm2\n\t" \
+ "pavgb %%xmm1, %%xmm0\n\t" /* avg of 2 pixels */ \
+ "movdqa %%xmm2, %%xmm3\n\t" /* another copy of our our weights */ \
+ "pxor %%xmm1, %%xmm1\n\t" \
+ "psubusb %%xmm7, %%xmm3\n\t" /* nonzero where old weights lower, else 0 */ \
+ "pcmpeqb %%xmm1, %%xmm3\n\t" /* now ff where new better, else 00 */ \
+ "pcmpeqb %%xmm3, %%xmm1\n\t" /* here ff where old better, else 00 */ \
+ "pand %%xmm3, %%xmm0\n\t" /* keep only better new pixels */ \
+ "pand %%xmm3, %%xmm2\n\t" /* and weights */ \
+ "pand %%xmm1, %%xmm5\n\t" /* keep only better old pixels */ \
+ "pand %%xmm1, %%xmm7\n\t" \
+ "por %%xmm0, %%xmm5\n\t" /* and merge new & old vals */ \
+ "por %%xmm2, %%xmm7\n\t"
+
+#define RESET_CHROMA "por "_UVMask", %%xmm7\n\t"
+
+#else // ifdef IS_SSE2
+
+#define MERGE4PIXavg(PADDR1, PADDR2) \
+ "movq "PADDR1", %%mm0\n\t" /* our 4 pixels */ \
+ "movq "PADDR2", %%mm1\n\t" /* our pixel2 value */ \
+ "movq %%mm0, %%mm2\n\t" /* another copy of our pixel1 value */ \
+ "movq %%mm1, %%mm3\n\t" /* another copy of our pixel1 value */ \
+ "psubusb %%mm1, %%mm2\n\t" \
+ "psubusb %%mm0, %%mm3\n\t" \
+ "por %%mm3, %%mm2\n\t" \
+ V_PAVGB ("%%mm0", "%%mm1", "%%mm3", _ShiftMask) /* avg of 2 pixels */ \
+ "movq %%mm2, %%mm3\n\t" /* another copy of our our weights */ \
+ "pxor %%mm1, %%mm1\n\t" \
+ "psubusb %%mm7, %%mm3\n\t" /* nonzero where old weights lower, else 0 */ \
+ "pcmpeqb %%mm1, %%mm3\n\t" /* now ff where new better, else 00 */ \
+ "pcmpeqb %%mm3, %%mm1\n\t" /* here ff where old better, else 00 */ \
+ "pand %%mm3, %%mm0\n\t" /* keep only better new pixels */ \
+ "pand %%mm3, %%mm2\n\t" /* and weights */ \
+ "pand %%mm1, %%mm5\n\t" /* keep only better old pixels */ \
+ "pand %%mm1, %%mm7\n\t" \
+ "por %%mm0, %%mm5\n\t" /* and merge new & old vals */ \
+ "por %%mm2, %%mm7\n\t"
+
+#define MERGE4PIXavgH(PADDR1A, PADDR1B, PADDR2A, PADDR2B) \
+ "movq "PADDR1A", %%mm0\n\t" /* our 4 pixels */ \
+ "movq "PADDR2A", %%mm1\n\t" /* our pixel2 value */ \
+ "movq "PADDR1B", %%mm2\n\t" /* our 4 pixels */ \
+ "movq "PADDR2B", %%mm3\n\t" /* our pixel2 value */ \
+ V_PAVGB("%%mm0", "%%mm2", "%%mm2", _ShiftMask) \
+ V_PAVGB("%%mm1", "%%mm3", "%%mm3", _ShiftMask) \
+ "movq %%mm0, %%mm2\n\t" /* another copy of our pixel1 value */ \
+ "movq %%mm1, %%mm3\n\t" /* another copy of our pixel1 value */ \
+ "psubusb %%mm1, %%mm2\n\t" \
+ "psubusb %%mm0, %%mm3\n\t" \
+ "por %%mm3, %%mm2\n\t" \
+ V_PAVGB("%%mm0", "%%mm1", "%%mm3", _ShiftMask) /* avg of 2 pixels */ \
+ "movq %%mm2, %%mm3\n\t" /* another copy of our our weights */ \
+ "pxor %%mm1, %%mm1\n\t" \
+ "psubusb %%mm7, %%mm3\n\t" /* nonzero where old weights lower, else 0 */ \
+ "pcmpeqb %%mm1, %%mm3\n\t" /* now ff where new better, else 00 */ \
+ "pcmpeqb %%mm3, %%mm1\n\t" /* here ff where old better, else 00 */ \
+ "pand %%mm3, %%mm0\n\t" /* keep only better new pixels */ \
+ "pand %%mm3, %%mm2\n\t" /* and weights */ \
+ "pand %%mm1, %%mm5\n\t" /* keep only better old pixels */ \
+ "pand %%mm1, %%mm7\n\t" \
+ "por %%mm0, %%mm5\n\t" /* and merge new & old vals */ \
+ "por %%mm2, %%mm7\n\t"
+
+#define RESET_CHROMA "por "_UVMask", %%mm7\n\t"
+
+#endif
+
+
diff --git a/gst/deinterlace/tvtime/vfir.c b/gst/deinterlace/tvtime/vfir.c
new file mode 100644
index 0000000000..f77178c861
--- /dev/null
+++ b/gst/deinterlace/tvtime/vfir.c
@@ -0,0 +1,314 @@
+/*
+ *
+ * GStreamer
+ * Copyright (C) 2004 Billy Biggs <vektor@dumbterm.net>
+ * Copyright (c) 2001, 2002, 2003 Fabrice Bellard.
+ * Copyright (C) 2008,2010 Sebastian Dröge <slomo@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * This file contains code from ffmpeg, see http://ffmpeg.org/ (LGPL)
+ * and modifications by Billy Biggs.
+ *
+ * Relicensed for GStreamer from GPL to LGPL with permit from Billy Biggs.
+ * See: http://bugzilla.gnome.org/show_bug.cgi?id=163578
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include "gstdeinterlacemethod.h"
+#include <string.h>
+#ifdef HAVE_ORC
+#include <orc/orc.h>
+#endif
+#include "tvtime.h"
+
+#define GST_TYPE_DEINTERLACE_METHOD_VFIR (gst_deinterlace_method_vfir_get_type ())
+#define GST_IS_DEINTERLACE_METHOD_VFIR(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_DEINTERLACE_METHOD_VFIR))
+#define GST_IS_DEINTERLACE_METHOD_VFIR_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_DEINTERLACE_METHOD_VFIR))
+#define GST_DEINTERLACE_METHOD_VFIR_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_DEINTERLACE_METHOD_VFIR, GstDeinterlaceMethodVFIRClass))
+#define GST_DEINTERLACE_METHOD_VFIR(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_DEINTERLACE_METHOD_VFIR, GstDeinterlaceMethodVFIR))
+#define GST_DEINTERLACE_METHOD_VFIR_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_DEINTERLACE_METHOD_VFIR, GstDeinterlaceMethodVFIRClass))
+#define GST_DEINTERLACE_METHOD_VFIR_CAST(obj) ((GstDeinterlaceMethodVFIR*)(obj))
+
+GType gst_deinterlace_method_vfir_get_type (void);
+
+typedef GstDeinterlaceSimpleMethod GstDeinterlaceMethodVFIR;
+
+typedef GstDeinterlaceSimpleMethodClass GstDeinterlaceMethodVFIRClass;
+
+/*
+ * The MPEG2 spec uses a slightly harsher filter, they specify
+ * [-1 8 2 8 -1]. ffmpeg uses a similar filter but with more of
+ * a tendency to blur than to use the local information. The
+ * filter taps here are: [-1 4 2 4 -1].
+ */
+
+ /*
+ * C implementation.
+ */
+static inline void
+deinterlace_c (guint8 * dst, const guint8 * lum_m4, const guint8 * lum_m3,
+ const guint8 * lum_m2, const guint8 * lum_m1, const guint8 * lum, gint size)
+{
+ if (lum_m2 == NULL) {
+ deinterlace_line_linear (dst, lum_m1, lum_m3, size);
+ } else {
+ deinterlace_line_vfir (dst, lum_m4, lum_m3, lum_m2, lum_m1, lum, size);
+ }
+}
+
+static void
+deinterlace_line_packed_c (GstDeinterlaceSimpleMethod * self, guint8 * dst,
+ const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ const guint8 *lum_m4 = scanlines->tt1;
+ const guint8 *lum_m3 = scanlines->t0;
+ const guint8 *lum_m2 = scanlines->m1;
+ const guint8 *lum_m1 = scanlines->b0;
+ const guint8 *lum = scanlines->bb1;
+
+ deinterlace_c (dst, lum_m4, lum_m3, lum_m2, lum_m1, lum, size);
+}
+
+static void
+deinterlace_line_planar_y_c (GstDeinterlaceSimpleMethod * self, guint8 * dst,
+ const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ const guint8 *lum_m4 = scanlines->tt1;
+ const guint8 *lum_m3 = scanlines->t0;
+ const guint8 *lum_m2 = scanlines->m1;
+ const guint8 *lum_m1 = scanlines->b0;
+ const guint8 *lum = scanlines->bb1;
+
+ deinterlace_c (dst, lum_m4, lum_m3, lum_m2, lum_m1, lum, size);
+}
+
+static void
+deinterlace_line_planar_u_c (GstDeinterlaceSimpleMethod * self, guint8 * dst,
+ const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ const guint8 *lum_m4 = scanlines->tt1;
+ const guint8 *lum_m3 = scanlines->t0;
+ const guint8 *lum_m2 = scanlines->m1;
+ const guint8 *lum_m1 = scanlines->b0;
+ const guint8 *lum = scanlines->bb1;
+
+ deinterlace_c (dst, lum_m4, lum_m3, lum_m2, lum_m1, lum, size);
+}
+
+static void
+deinterlace_line_planar_v_c (GstDeinterlaceSimpleMethod * self, guint8 * dst,
+ const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ const guint8 *lum_m4 = scanlines->tt1;
+ const guint8 *lum_m3 = scanlines->t0;
+ const guint8 *lum_m2 = scanlines->m1;
+ const guint8 *lum_m1 = scanlines->b0;
+ const guint8 *lum = scanlines->bb1;
+
+ deinterlace_c (dst, lum_m4, lum_m3, lum_m2, lum_m1, lum, size);
+}
+
+#undef BUILD_X86_ASM
+
+#ifdef BUILD_X86_ASM
+#include "mmx.h"
+static void
+deinterlace_mmx (guint8 * dst, const guint8 * lum_m4, const guint8 * lum_m3,
+ const guint8 * lum_m2, const guint8 * lum_m1, const guint8 * lum, gint size)
+{
+ mmx_t rounder;
+
+ rounder.uw[0] = 4;
+ rounder.uw[1] = 4;
+ rounder.uw[2] = 4;
+ rounder.uw[3] = 4;
+ pxor_r2r (mm7, mm7);
+ movq_m2r (rounder, mm6);
+
+ for (; size > 3; size -= 4) {
+ movd_m2r (*lum_m4, mm0);
+ movd_m2r (*lum_m3, mm1);
+ movd_m2r (*lum_m2, mm2);
+ movd_m2r (*lum_m1, mm3);
+ movd_m2r (*lum, mm4);
+ punpcklbw_r2r (mm7, mm0);
+ punpcklbw_r2r (mm7, mm1);
+ punpcklbw_r2r (mm7, mm2);
+ punpcklbw_r2r (mm7, mm3);
+ punpcklbw_r2r (mm7, mm4);
+ paddw_r2r (mm3, mm1);
+ psllw_i2r (1, mm2);
+ paddw_r2r (mm4, mm0);
+ psllw_i2r (2, mm1); // 2
+ paddw_r2r (mm6, mm2);
+ paddw_r2r (mm2, mm1);
+ psubusw_r2r (mm0, mm1);
+ psrlw_i2r (3, mm1); // 3
+ packuswb_r2r (mm7, mm1);
+ movd_r2m (mm1, *dst);
+ lum_m4 += 4;
+ lum_m3 += 4;
+ lum_m2 += 4;
+ lum_m1 += 4;
+ lum += 4;
+ dst += 4;
+ }
+ emms ();
+
+ /* Handle odd widths */
+ if (size > 0)
+ deinterlace_c (dst, lum_m4, lum_m3, lum_m2, lum_m1, lum, size);
+}
+
+static void
+deinterlace_line_packed_mmx (GstDeinterlaceSimpleMethod * self, guint8 * dst,
+ const GstDeinterlaceScanlineData * scanlines)
+{
+ const guint8 *lum_m4 = scanlines->tt1;
+ const guint8 *lum_m3 = scanlines->t0;
+ const guint8 *lum_m2 = scanlines->m1;
+ const guint8 *lum_m1 = scanlines->b0;
+ const guint8 *lum = scanlines->bb1;
+ gint size = self->parent.row_stride[0];
+
+ deinterlace_mmx (dst, lum_m4, lum_m3, lum_m2, lum_m1, lum, size);
+}
+
+static void
+deinterlace_line_planar_y_mmx (GstDeinterlaceSimpleMethod * self, guint8 * dst,
+ const GstDeinterlaceScanlineData * scanlines)
+{
+ const guint8 *lum_m4 = scanlines->tt1;
+ const guint8 *lum_m3 = scanlines->t0;
+ const guint8 *lum_m2 = scanlines->m1;
+ const guint8 *lum_m1 = scanlines->b0;
+ const guint8 *lum = scanlines->bb1;
+ gint size = self->parent.row_stride[0];
+
+ deinterlace_mmx (dst, lum_m4, lum_m3, lum_m2, lum_m1, lum, size);
+}
+
+static void
+deinterlace_line_planar_u_mmx (GstDeinterlaceSimpleMethod * self, guint8 * dst,
+ const GstDeinterlaceScanlineData * scanlines)
+{
+ const guint8 *lum_m4 = scanlines->tt1;
+ const guint8 *lum_m3 = scanlines->t0;
+ const guint8 *lum_m2 = scanlines->m1;
+ const guint8 *lum_m1 = scanlines->b0;
+ const guint8 *lum = scanlines->bb1;
+ gint size = self->parent.row_stride[1];
+
+ deinterlace_mmx (dst, lum_m4, lum_m3, lum_m2, lum_m1, lum, size);
+}
+
+static void
+deinterlace_line_planar_v_mmx (GstDeinterlaceSimpleMethod * self, guint8 * dst,
+ const GstDeinterlaceScanlineData * scanlines)
+{
+ const guint8 *lum_m4 = scanlines->tt1;
+ const guint8 *lum_m3 = scanlines->t0;
+ const guint8 *lum_m2 = scanlines->m1;
+ const guint8 *lum_m1 = scanlines->b0;
+ const guint8 *lum = scanlines->bb1;
+ gint size = self->parent.row_stride[2];
+
+ deinterlace_mmx (dst, lum_m4, lum_m3, lum_m2, lum_m1, lum, size);
+}
+#endif
+
+G_DEFINE_TYPE (GstDeinterlaceMethodVFIR, gst_deinterlace_method_vfir,
+ GST_TYPE_DEINTERLACE_SIMPLE_METHOD);
+
+static void
+gst_deinterlace_method_vfir_class_init (GstDeinterlaceMethodVFIRClass * klass)
+{
+ GstDeinterlaceMethodClass *dim_class = (GstDeinterlaceMethodClass *) klass;
+ GstDeinterlaceSimpleMethodClass *dism_class =
+ (GstDeinterlaceSimpleMethodClass *) klass;
+#ifdef BUILD_X86_ASM
+ guint cpu_flags =
+ orc_target_get_default_flags (orc_target_get_by_name ("mmx"));
+#endif
+
+ dim_class->fields_required = 2;
+ dim_class->name = "Blur Vertical";
+ dim_class->nick = "vfir";
+ dim_class->latency = 1;
+
+#ifdef BUILD_X86_ASM
+ if (cpu_flags & ORC_TARGET_MMX_MMX) {
+ dism_class->interpolate_scanline_ayuv = deinterlace_line_packed_mmx;
+ dism_class->interpolate_scanline_yuy2 = deinterlace_line_packed_mmx;
+ dism_class->interpolate_scanline_yvyu = deinterlace_line_packed_mmx;
+ dism_class->interpolate_scanline_uyvy = deinterlace_line_packed_mmx;
+ dism_class->interpolate_scanline_nv12 = deinterlace_line_packed_mmx;
+ dism_class->interpolate_scanline_nv21 = deinterlace_line_packed_mmx;
+ dism_class->interpolate_scanline_argb = deinterlace_line_packed_mmx;
+ dism_class->interpolate_scanline_abgr = deinterlace_line_packed_mmx;
+ dism_class->interpolate_scanline_rgba = deinterlace_line_packed_mmx;
+ dism_class->interpolate_scanline_bgra = deinterlace_line_packed_mmx;
+ dism_class->interpolate_scanline_rgb = deinterlace_line_packed_mmx;
+ dism_class->interpolate_scanline_bgr = deinterlace_line_packed_mmx;
+ dism_class->interpolate_scanline_planar_y = deinterlace_line_planar_y_mmx;
+ dism_class->interpolate_scanline_planar_u = deinterlace_line_planar_u_mmx;
+ dism_class->interpolate_scanline_planar_v = deinterlace_line_planar_v_mmx;
+ } else {
+ dism_class->interpolate_scanline_yuy2 = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_yvyu = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_uyvy = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_ayuv = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_nv12 = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_nv21 = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_argb = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_abgr = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_rgba = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_bgra = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_rgb = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_bgr = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_planar_y = deinterlace_line_planar_y_c;
+ dism_class->interpolate_scanline_planar_u = deinterlace_line_planar_u_c;
+ dism_class->interpolate_scanline_planar_v = deinterlace_line_planar_v_c;
+ }
+#else
+ dism_class->interpolate_scanline_ayuv = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_yuy2 = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_yvyu = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_uyvy = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_nv12 = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_nv21 = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_argb = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_abgr = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_rgba = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_bgra = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_rgb = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_bgr = deinterlace_line_packed_c;
+ dism_class->interpolate_scanline_planar_y = deinterlace_line_planar_y_c;
+ dism_class->interpolate_scanline_planar_u = deinterlace_line_planar_u_c;
+ dism_class->interpolate_scanline_planar_v = deinterlace_line_planar_v_c;
+#endif
+}
+
+static void
+gst_deinterlace_method_vfir_init (GstDeinterlaceMethodVFIR * self)
+{
+}
diff --git a/gst/deinterlace/tvtime/weave.c b/gst/deinterlace/tvtime/weave.c
new file mode 100644
index 0000000000..804c889f07
--- /dev/null
+++ b/gst/deinterlace/tvtime/weave.c
@@ -0,0 +1,173 @@
+/*
+ * Weave frames
+ * Copyright (C) 2002 Billy Biggs <vektor@dumbterm.net>.
+ * Copyright (C) 2008,2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * Relicensed for GStreamer from GPL to LGPL with permit from Billy Biggs.
+ * See: http://bugzilla.gnome.org/show_bug.cgi?id=163578
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include "gstdeinterlacemethod.h"
+#include <string.h>
+
+#define GST_TYPE_DEINTERLACE_METHOD_WEAVE (gst_deinterlace_method_weave_get_type ())
+#define GST_IS_DEINTERLACE_METHOD_WEAVE(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_DEINTERLACE_METHOD_WEAVE))
+#define GST_IS_DEINTERLACE_METHOD_WEAVE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_DEINTERLACE_METHOD_WEAVE))
+#define GST_DEINTERLACE_METHOD_WEAVE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_DEINTERLACE_METHOD_WEAVE, GstDeinterlaceMethodWeaveClass))
+#define GST_DEINTERLACE_METHOD_WEAVE(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_DEINTERLACE_METHOD_WEAVE, GstDeinterlaceMethodWeave))
+#define GST_DEINTERLACE_METHOD_WEAVE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_DEINTERLACE_METHOD_WEAVE, GstDeinterlaceMethodWeaveClass))
+#define GST_DEINTERLACE_METHOD_WEAVE_CAST(obj) ((GstDeinterlaceMethodWeave*)(obj))
+
+GType gst_deinterlace_method_weave_get_type (void);
+
+typedef GstDeinterlaceSimpleMethod GstDeinterlaceMethodWeave;
+typedef GstDeinterlaceSimpleMethodClass GstDeinterlaceMethodWeaveClass;
+
+static void
+deinterlace_scanline_weave_packed (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ if (scanlines->m1 == NULL) {
+ memcpy (out, scanlines->t0, size);
+ } else {
+ memcpy (out, scanlines->m1, size);
+ }
+}
+
+static void
+deinterlace_scanline_weave_planar_y (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ if (scanlines->m1 == NULL) {
+ memcpy (out, scanlines->t0, size);
+ } else {
+ memcpy (out, scanlines->m1, size);
+ }
+}
+
+static void
+deinterlace_scanline_weave_planar_u (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ if (scanlines->m1 == NULL) {
+ memcpy (out, scanlines->t0, size);
+ } else {
+ memcpy (out, scanlines->m1, size);
+ }
+}
+
+static void
+deinterlace_scanline_weave_planar_v (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ if (scanlines->m1 == NULL) {
+ memcpy (out, scanlines->t0, size);
+ } else {
+ memcpy (out, scanlines->m1, size);
+ }
+}
+
+static void
+copy_scanline_packed (GstDeinterlaceSimpleMethod * self, guint8 * out,
+ const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->m0, size);
+}
+
+static void
+copy_scanline_planar_y (GstDeinterlaceSimpleMethod * self, guint8 * out,
+ const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->m0, size);
+}
+
+static void
+copy_scanline_planar_u (GstDeinterlaceSimpleMethod * self, guint8 * out,
+ const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->m0, size);
+}
+
+static void
+copy_scanline_planar_v (GstDeinterlaceSimpleMethod * self, guint8 * out,
+ const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->m0, size);
+}
+
+G_DEFINE_TYPE (GstDeinterlaceMethodWeave, gst_deinterlace_method_weave,
+ GST_TYPE_DEINTERLACE_SIMPLE_METHOD);
+
+static void
+gst_deinterlace_method_weave_class_init (GstDeinterlaceMethodWeaveClass * klass)
+{
+ GstDeinterlaceMethodClass *dim_class = (GstDeinterlaceMethodClass *) klass;
+ GstDeinterlaceSimpleMethodClass *dism_class =
+ (GstDeinterlaceSimpleMethodClass *) klass;
+
+ dim_class->fields_required = 2;
+ dim_class->name = "Weave";
+ dim_class->nick = "weave";
+ dim_class->latency = 1;
+
+ dism_class->interpolate_scanline_ayuv = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_yuy2 = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_yvyu = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_uyvy = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_nv12 = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_nv21 = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_argb = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_abgr = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_rgba = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_bgra = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_rgb = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_bgr = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_planar_y =
+ deinterlace_scanline_weave_planar_y;
+ dism_class->interpolate_scanline_planar_u =
+ deinterlace_scanline_weave_planar_u;
+ dism_class->interpolate_scanline_planar_v =
+ deinterlace_scanline_weave_planar_v;
+
+ dism_class->copy_scanline_ayuv = copy_scanline_packed;
+ dism_class->copy_scanline_yuy2 = copy_scanline_packed;
+ dism_class->copy_scanline_yvyu = copy_scanline_packed;
+ dism_class->copy_scanline_uyvy = copy_scanline_packed;
+ dism_class->copy_scanline_nv12 = copy_scanline_packed;
+ dism_class->copy_scanline_nv21 = copy_scanline_packed;
+ dism_class->copy_scanline_argb = copy_scanline_packed;
+ dism_class->copy_scanline_abgr = copy_scanline_packed;
+ dism_class->copy_scanline_rgba = copy_scanline_packed;
+ dism_class->copy_scanline_bgra = copy_scanline_packed;
+ dism_class->copy_scanline_rgb = copy_scanline_packed;
+ dism_class->copy_scanline_bgr = copy_scanline_packed;
+ dism_class->copy_scanline_planar_y = copy_scanline_planar_y;
+ dism_class->copy_scanline_planar_u = copy_scanline_planar_u;
+ dism_class->copy_scanline_planar_v = copy_scanline_planar_v;
+}
+
+static void
+gst_deinterlace_method_weave_init (GstDeinterlaceMethodWeave * self)
+{
+}
diff --git a/gst/deinterlace/tvtime/weavebff.c b/gst/deinterlace/tvtime/weavebff.c
new file mode 100644
index 0000000000..7424e96fed
--- /dev/null
+++ b/gst/deinterlace/tvtime/weavebff.c
@@ -0,0 +1,174 @@
+/*
+ * Weave frames, bottom-field-first.
+ * Copyright (C) 2003 Billy Biggs <vektor@dumbterm.net>.
+ * Copyright (C) 2008,2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * Relicensed for GStreamer from GPL to LGPL with permit from Billy Biggs.
+ * See: http://bugzilla.gnome.org/show_bug.cgi?id=163578
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include "gstdeinterlacemethod.h"
+#include <string.h>
+
+#define GST_TYPE_DEINTERLACE_METHOD_WEAVE_BFF (gst_deinterlace_method_weave_bff_get_type ())
+#define GST_IS_DEINTERLACE_METHOD_WEAVE_BFF(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_DEINTERLACE_METHOD_WEAVE_BFF))
+#define GST_IS_DEINTERLACE_METHOD_WEAVE_BFF_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_DEINTERLACE_METHOD_WEAVE_BFF))
+#define GST_DEINTERLACE_METHOD_WEAVE_BFF_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_DEINTERLACE_METHOD_WEAVE_BFF, GstDeinterlaceMethodWeaveBFFClass))
+#define GST_DEINTERLACE_METHOD_WEAVE_BFF(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_DEINTERLACE_METHOD_WEAVE_BFF, GstDeinterlaceMethodWeaveBFF))
+#define GST_DEINTERLACE_METHOD_WEAVE_BFF_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_DEINTERLACE_METHOD_WEAVE_BFF, GstDeinterlaceMethodWeaveBFFClass))
+#define GST_DEINTERLACE_METHOD_WEAVE_BFF_CAST(obj) ((GstDeinterlaceMethodWeaveBFF*)(obj))
+
+GType gst_deinterlace_method_weave_bff_get_type (void);
+
+typedef GstDeinterlaceSimpleMethod GstDeinterlaceMethodWeaveBFF;
+typedef GstDeinterlaceSimpleMethodClass GstDeinterlaceMethodWeaveBFFClass;
+
+static void
+deinterlace_scanline_weave_packed (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ if (scanlines->m1 == NULL) {
+ memcpy (out, scanlines->b0, size);
+ } else {
+ memcpy (out, scanlines->m1, size);
+ }
+}
+
+static void
+deinterlace_scanline_weave_planar_y (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ if (scanlines->m1 == NULL) {
+ memcpy (out, scanlines->b0, size);
+ } else {
+ memcpy (out, scanlines->m1, size);
+ }
+}
+
+static void
+deinterlace_scanline_weave_planar_u (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ if (scanlines->m1 == NULL) {
+ memcpy (out, scanlines->b0, size);
+ } else {
+ memcpy (out, scanlines->m1, size);
+ }
+}
+
+static void
+deinterlace_scanline_weave_planar_v (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ if (scanlines->m1 == NULL) {
+ memcpy (out, scanlines->b0, size);
+ } else {
+ memcpy (out, scanlines->m1, size);
+ }
+}
+
+static void
+copy_scanline_packed (GstDeinterlaceSimpleMethod * self, guint8 * out,
+ const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->m0, size);
+}
+
+static void
+copy_scanline_planar_y (GstDeinterlaceSimpleMethod * self, guint8 * out,
+ const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->m0, size);
+}
+
+static void
+copy_scanline_planar_u (GstDeinterlaceSimpleMethod * self, guint8 * out,
+ const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->m0, size);
+}
+
+static void
+copy_scanline_planar_v (GstDeinterlaceSimpleMethod * self, guint8 * out,
+ const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->m0, size);
+}
+
+G_DEFINE_TYPE (GstDeinterlaceMethodWeaveBFF, gst_deinterlace_method_weave_bff,
+ GST_TYPE_DEINTERLACE_SIMPLE_METHOD);
+
+static void
+gst_deinterlace_method_weave_bff_class_init (GstDeinterlaceMethodWeaveBFFClass *
+ klass)
+{
+ GstDeinterlaceMethodClass *dim_class = (GstDeinterlaceMethodClass *) klass;
+ GstDeinterlaceSimpleMethodClass *dism_class =
+ (GstDeinterlaceSimpleMethodClass *) klass;
+
+ dim_class->fields_required = 2;
+ dim_class->name = "Progressive: Bottom Field First";
+ dim_class->nick = "weavebff";
+ dim_class->latency = 1;
+
+ dism_class->interpolate_scanline_ayuv = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_yuy2 = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_yvyu = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_uyvy = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_nv12 = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_nv21 = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_argb = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_abgr = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_rgba = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_bgra = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_rgb = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_bgr = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_planar_y =
+ deinterlace_scanline_weave_planar_y;
+ dism_class->interpolate_scanline_planar_u =
+ deinterlace_scanline_weave_planar_u;
+ dism_class->interpolate_scanline_planar_v =
+ deinterlace_scanline_weave_planar_v;
+
+ dism_class->copy_scanline_ayuv = copy_scanline_packed;
+ dism_class->copy_scanline_yuy2 = copy_scanline_packed;
+ dism_class->copy_scanline_yvyu = copy_scanline_packed;
+ dism_class->copy_scanline_uyvy = copy_scanline_packed;
+ dism_class->copy_scanline_nv12 = copy_scanline_packed;
+ dism_class->copy_scanline_nv21 = copy_scanline_packed;
+ dism_class->copy_scanline_argb = copy_scanline_packed;
+ dism_class->copy_scanline_abgr = copy_scanline_packed;
+ dism_class->copy_scanline_rgba = copy_scanline_packed;
+ dism_class->copy_scanline_bgra = copy_scanline_packed;
+ dism_class->copy_scanline_rgb = copy_scanline_packed;
+ dism_class->copy_scanline_bgr = copy_scanline_packed;
+ dism_class->copy_scanline_planar_y = copy_scanline_planar_y;
+ dism_class->copy_scanline_planar_u = copy_scanline_planar_u;
+ dism_class->copy_scanline_planar_v = copy_scanline_planar_v;
+}
+
+static void
+gst_deinterlace_method_weave_bff_init (GstDeinterlaceMethodWeaveBFF * self)
+{
+}
diff --git a/gst/deinterlace/tvtime/weavetff.c b/gst/deinterlace/tvtime/weavetff.c
new file mode 100644
index 0000000000..f33bb2e551
--- /dev/null
+++ b/gst/deinterlace/tvtime/weavetff.c
@@ -0,0 +1,175 @@
+/*
+ * Weave frames, top-field-first.
+ * Copyright (C) 2003 Billy Biggs <vektor@dumbterm.net>.
+ * Copyright (C) 2008,2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * Relicensed for GStreamer from GPL to LGPL with permit from Billy Biggs.
+ * See: http://bugzilla.gnome.org/show_bug.cgi?id=163578
+ */
+
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include "gstdeinterlacemethod.h"
+#include <string.h>
+
+#define GST_TYPE_DEINTERLACE_METHOD_WEAVE_TFF (gst_deinterlace_method_weave_tff_get_type ())
+#define GST_IS_DEINTERLACE_METHOD_WEAVE_TFF(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_DEINTERLACE_METHOD_WEAVE_TFF))
+#define GST_IS_DEINTERLACE_METHOD_WEAVE_TFF_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_DEINTERLACE_METHOD_WEAVE_TFF))
+#define GST_DEINTERLACE_METHOD_WEAVE_TFF_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_DEINTERLACE_METHOD_WEAVE_TFF, GstDeinterlaceMethodWeaveTFFClass))
+#define GST_DEINTERLACE_METHOD_WEAVE_TFF(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_DEINTERLACE_METHOD_WEAVE_TFF, GstDeinterlaceMethodWeaveTFF))
+#define GST_DEINTERLACE_METHOD_WEAVE_TFF_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_DEINTERLACE_METHOD_WEAVE_TFF, GstDeinterlaceMethodWeaveTFFClass))
+#define GST_DEINTERLACE_METHOD_WEAVE_TFF_CAST(obj) ((GstDeinterlaceMethodWeaveTFF*)(obj))
+
+GType gst_deinterlace_method_weave_tff_get_type (void);
+
+typedef GstDeinterlaceSimpleMethod GstDeinterlaceMethodWeaveTFF;
+typedef GstDeinterlaceSimpleMethodClass GstDeinterlaceMethodWeaveTFFClass;
+
+static void
+deinterlace_scanline_weave_packed (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ if (scanlines->m1 == NULL) {
+ memcpy (out, scanlines->t0, size);
+ } else {
+ memcpy (out, scanlines->m1, size);
+ }
+}
+
+static void
+deinterlace_scanline_weave_planar_y (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ if (scanlines->m1 == NULL) {
+ memcpy (out, scanlines->t0, size);
+ } else {
+ memcpy (out, scanlines->m1, size);
+ }
+}
+
+static void
+deinterlace_scanline_weave_planar_u (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ if (scanlines->m1 == NULL) {
+ memcpy (out, scanlines->t0, size);
+ } else {
+ memcpy (out, scanlines->m1, size);
+ }
+}
+
+static void
+deinterlace_scanline_weave_planar_v (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ if (scanlines->m1 == NULL) {
+ memcpy (out, scanlines->t0, size);
+ } else {
+ memcpy (out, scanlines->m1, size);
+ }
+}
+
+static void
+copy_scanline_packed (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->m0, size);
+}
+
+static void
+copy_scanline_planar_y (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->m0, size);
+}
+
+static void
+copy_scanline_planar_u (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->m0, size);
+}
+
+static void
+copy_scanline_planar_v (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->m0, size);
+}
+
+G_DEFINE_TYPE (GstDeinterlaceMethodWeaveTFF, gst_deinterlace_method_weave_tff,
+ GST_TYPE_DEINTERLACE_SIMPLE_METHOD);
+
+static void
+gst_deinterlace_method_weave_tff_class_init (GstDeinterlaceMethodWeaveTFFClass *
+ klass)
+{
+ GstDeinterlaceMethodClass *dim_class = (GstDeinterlaceMethodClass *) klass;
+ GstDeinterlaceSimpleMethodClass *dism_class =
+ (GstDeinterlaceSimpleMethodClass *) klass;
+
+ dim_class->fields_required = 2;
+ dim_class->name = "Progressive: Top Field First";
+ dim_class->nick = "weavetff";
+ dim_class->latency = 1;
+
+ dism_class->interpolate_scanline_ayuv = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_yuy2 = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_yvyu = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_uyvy = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_nv12 = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_nv21 = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_argb = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_abgr = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_rgba = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_bgra = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_rgb = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_bgr = deinterlace_scanline_weave_packed;
+ dism_class->interpolate_scanline_planar_y =
+ deinterlace_scanline_weave_planar_y;
+ dism_class->interpolate_scanline_planar_u =
+ deinterlace_scanline_weave_planar_u;
+ dism_class->interpolate_scanline_planar_v =
+ deinterlace_scanline_weave_planar_v;
+
+ dism_class->copy_scanline_ayuv = copy_scanline_packed;
+ dism_class->copy_scanline_yuy2 = copy_scanline_packed;
+ dism_class->copy_scanline_yvyu = copy_scanline_packed;
+ dism_class->copy_scanline_uyvy = copy_scanline_packed;
+ dism_class->copy_scanline_nv12 = copy_scanline_packed;
+ dism_class->copy_scanline_nv21 = copy_scanline_packed;
+ dism_class->copy_scanline_argb = copy_scanline_packed;
+ dism_class->copy_scanline_abgr = copy_scanline_packed;
+ dism_class->copy_scanline_rgba = copy_scanline_packed;
+ dism_class->copy_scanline_bgra = copy_scanline_packed;
+ dism_class->copy_scanline_rgb = copy_scanline_packed;
+ dism_class->copy_scanline_bgr = copy_scanline_packed;
+ dism_class->copy_scanline_planar_y = copy_scanline_planar_y;
+ dism_class->copy_scanline_planar_u = copy_scanline_planar_u;
+ dism_class->copy_scanline_planar_v = copy_scanline_planar_v;
+}
+
+static void
+gst_deinterlace_method_weave_tff_init (GstDeinterlaceMethodWeaveTFF * self)
+{
+}
diff --git a/gst/deinterlace/tvtime/x86-64_macros.inc b/gst/deinterlace/tvtime/x86-64_macros.inc
new file mode 100644
index 0000000000..312e3c7490
--- /dev/null
+++ b/gst/deinterlace/tvtime/x86-64_macros.inc
@@ -0,0 +1,82 @@
+/*
+ *
+ * GStreamer
+ * Copyright (C) 2004 Dirk Ziegelmeier <dziegel@gmx.de>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ *
+ * See: http://bugzilla.gnome.org/show_bug.cgi?id=163578
+ */
+
+/*
+ * This file is copied from TVTIME's sources.
+ * Original author: Achim Schneider <batchall@mordor.ch>
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#ifndef XAX
+
+#if defined (HAVE_CPU_I386) && !defined(HAVE_CPU_X86_64)
+
+#define XAX "eax"
+#define XBX "ebx"
+#define XCX "ecx"
+#define XDX "edx"
+#define XSI "esi"
+#define XDI "edi"
+#define XSP "esp"
+#define MOVX "movl"
+#define LEAX "leal"
+#define DECX "decl"
+#define PUSHX "pushl"
+#define POPX "popl"
+#define CMPX "cmpl"
+#define ADDX "addl"
+#define SHLX "shll"
+#define SHRX "shrl"
+#define SUBX "subl"
+
+#elif defined (HAVE_CPU_X86_64)
+
+#define XAX "rax"
+#define XBX "rbx"
+#define XCX "rcx"
+#define XDX "rdx"
+#define XSI "rsi"
+#define XDI "rdi"
+#define XSP "rsp"
+#define MOVX "movq"
+#define LEAX "leaq"
+#define DECX "decq"
+#define PUSHX "pushq"
+#define POPX "popq"
+#define CMPX "cmpq"
+#define ADDX "addq"
+#define SHLX "shlq"
+#define SHRX "shrq"
+#define SUBX "subq"
+
+#else
+#error Undefined architecture. Define either ARCH_X86 or ARCH_X86_64.
+#endif
+
+#endif
diff --git a/gst/deinterlace/x86/x86inc.asm b/gst/deinterlace/x86/x86inc.asm
new file mode 100644
index 0000000000..7404bfed36
--- /dev/null
+++ b/gst/deinterlace/x86/x86inc.asm
@@ -0,0 +1,1701 @@
+;*****************************************************************************
+;* x86inc.asm: x264asm abstraction layer
+;*****************************************************************************
+;* Copyright (C) 2005-2018 x264 project
+;*
+;* Authors: Loren Merritt <lorenm@u.washington.edu>
+;* Henrik Gramner <henrik@gramner.com>
+;* Anton Mitrofanov <BugMaster@narod.ru>
+;* Fiona Glaser <fiona@x264.com>
+;*
+;* Permission to use, copy, modify, and/or distribute this software for any
+;* purpose with or without fee is hereby granted, provided that the above
+;* copyright notice and this permission notice appear in all copies.
+;*
+;* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+;* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+;* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+;* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+;* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+;* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+;* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+;*****************************************************************************
+
+; This is a header file for the x264ASM assembly language, which uses
+; NASM/YASM syntax combined with a large number of macros to provide easy
+; abstraction between different calling conventions (x86_32, win64, linux64).
+; It also has various other useful features to simplify writing the kind of
+; DSP functions that are most often used in x264.
+
+; Unlike the rest of x264, this file is available under an ISC license, as it
+; has significant usefulness outside of x264 and we want it to be available
+; to the largest audience possible. Of course, if you modify it for your own
+; purposes to add a new feature, we strongly encourage contributing a patch
+; as this feature might be useful for others as well. Send patches or ideas
+; to x264-devel@videolan.org .
+
+%ifndef private_prefix
+ %define private_prefix gst
+%endif
+
+%ifndef public_prefix
+ %define public_prefix private_prefix
+%endif
+
+%ifndef STACK_ALIGNMENT
+ %if ARCH_X86_64
+ %define STACK_ALIGNMENT 16
+ %else
+ %define STACK_ALIGNMENT 4
+ %endif
+%endif
+
+%define WIN64 0
+%define UNIX64 0
+%if ARCH_X86_64
+ %ifidn __OUTPUT_FORMAT__,win32
+ %define WIN64 1
+ %elifidn __OUTPUT_FORMAT__,win64
+ %define WIN64 1
+ %elifidn __OUTPUT_FORMAT__,x64
+ %define WIN64 1
+ %else
+ %define UNIX64 1
+ %endif
+%endif
+
+; Only 1 for yasm. Workaround here.
+%define HAVE_CPUNOP 0
+
+%define FORMAT_ELF 0
+%ifidn __OUTPUT_FORMAT__,elf
+ %define FORMAT_ELF 1
+%elifidn __OUTPUT_FORMAT__,elf32
+ %define FORMAT_ELF 1
+%elifidn __OUTPUT_FORMAT__,elf64
+ %define FORMAT_ELF 1
+%endif
+
+%ifdef PREFIX
+ %define mangle(x) _ %+ x
+%else
+ %define mangle(x) x
+%endif
+
+; aout does not support align=
+; NOTE: This section is out of sync with x264, in order to
+; keep supporting OS/2.
+%macro SECTION_RODATA 0-1 16
+ %ifidn __OUTPUT_FORMAT__,aout
+ SECTION .text
+ %elifidn __OUTPUT_FORMAT__,coff
+ SECTION .text
+ %elifidn __OUTPUT_FORMAT__,win32
+ SECTION .rdata align=%1
+ %elif WIN64
+ SECTION .rdata align=%1
+ %else
+ SECTION .rodata align=%1
+ %endif
+%endmacro
+
+%if WIN64
+ %define PIC
+%elif ARCH_X86_64 == 0
+; x86_32 doesn't require PIC.
+; Some distros prefer shared objects to be PIC, but nothing breaks if
+; the code contains a few textrels, so we'll skip that complexity.
+ %undef PIC
+%endif
+%ifdef PIC
+ default rel
+%endif
+
+%macro CPUNOP 1
+ %if HAVE_CPUNOP
+ CPU %1
+ %endif
+%endmacro
+
+; Macros to eliminate most code duplication between x86_32 and x86_64:
+; Currently this works only for leaf functions which load all their arguments
+; into registers at the start, and make no other use of the stack. Luckily that
+; covers most of x264's asm.
+
+; PROLOGUE:
+; %1 = number of arguments. loads them from stack if needed.
+; %2 = number of registers used. pushes callee-saved regs if needed.
+; %3 = number of xmm registers used. pushes callee-saved xmm regs if needed.
+; %4 = (optional) stack size to be allocated. The stack will be aligned before
+; allocating the specified stack size. If the required stack alignment is
+; larger than the known stack alignment the stack will be manually aligned
+; and an extra register will be allocated to hold the original stack
+; pointer (to not invalidate r0m etc.). To prevent the use of an extra
+; register as stack pointer, request a negative stack size.
+; %4+/%5+ = list of names to define to registers
+; PROLOGUE can also be invoked by adding the same options to cglobal
+
+; e.g.
+; cglobal foo, 2,3,7,0x40, dst, src, tmp
+; declares a function (foo) that automatically loads two arguments (dst and
+; src) into registers, uses one additional register (tmp) plus 7 vector
+; registers (m0-m6) and allocates 0x40 bytes of stack space.
+
+; TODO Some functions can use some args directly from the stack. If they're the
+; last args then you can just not declare them, but if they're in the middle
+; we need more flexible macro.
+
+; RET:
+; Pops anything that was pushed by PROLOGUE, and returns.
+
+; REP_RET:
+; Use this instead of RET if it's a branch target.
+
+; registers:
+; rN and rNq are the native-size register holding function argument N
+; rNd, rNw, rNb are dword, word, and byte size
+; rNh is the high 8 bits of the word size
+; rNm is the original location of arg N (a register or on the stack), dword
+; rNmp is native size
+
+%macro DECLARE_REG 2-3
+ %define r%1q %2
+ %define r%1d %2d
+ %define r%1w %2w
+ %define r%1b %2b
+ %define r%1h %2h
+ %define %2q %2
+ %if %0 == 2
+ %define r%1m %2d
+ %define r%1mp %2
+ %elif ARCH_X86_64 ; memory
+ %define r%1m [rstk + stack_offset + %3]
+ %define r%1mp qword r %+ %1 %+ m
+ %else
+ %define r%1m [rstk + stack_offset + %3]
+ %define r%1mp dword r %+ %1 %+ m
+ %endif
+ %define r%1 %2
+%endmacro
+
+%macro DECLARE_REG_SIZE 3
+ %define r%1q r%1
+ %define e%1q r%1
+ %define r%1d e%1
+ %define e%1d e%1
+ %define r%1w %1
+ %define e%1w %1
+ %define r%1h %3
+ %define e%1h %3
+ %define r%1b %2
+ %define e%1b %2
+ %if ARCH_X86_64 == 0
+ %define r%1 e%1
+ %endif
+%endmacro
+
+DECLARE_REG_SIZE ax, al, ah
+DECLARE_REG_SIZE bx, bl, bh
+DECLARE_REG_SIZE cx, cl, ch
+DECLARE_REG_SIZE dx, dl, dh
+DECLARE_REG_SIZE si, sil, null
+DECLARE_REG_SIZE di, dil, null
+DECLARE_REG_SIZE bp, bpl, null
+
+; t# defines for when per-arch register allocation is more complex than just function arguments
+
+%macro DECLARE_REG_TMP 1-*
+ %assign %%i 0
+ %rep %0
+ CAT_XDEFINE t, %%i, r%1
+ %assign %%i %%i+1
+ %rotate 1
+ %endrep
+%endmacro
+
+%macro DECLARE_REG_TMP_SIZE 0-*
+ %rep %0
+ %define t%1q t%1 %+ q
+ %define t%1d t%1 %+ d
+ %define t%1w t%1 %+ w
+ %define t%1h t%1 %+ h
+ %define t%1b t%1 %+ b
+ %rotate 1
+ %endrep
+%endmacro
+
+DECLARE_REG_TMP_SIZE 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14
+
+%if ARCH_X86_64
+ %define gprsize 8
+%else
+ %define gprsize 4
+%endif
+
+%macro PUSH 1
+ push %1
+ %ifidn rstk, rsp
+ %assign stack_offset stack_offset+gprsize
+ %endif
+%endmacro
+
+%macro POP 1
+ pop %1
+ %ifidn rstk, rsp
+ %assign stack_offset stack_offset-gprsize
+ %endif
+%endmacro
+
+%macro PUSH_IF_USED 1-*
+ %rep %0
+ %if %1 < regs_used
+ PUSH r%1
+ %endif
+ %rotate 1
+ %endrep
+%endmacro
+
+%macro POP_IF_USED 1-*
+ %rep %0
+ %if %1 < regs_used
+ pop r%1
+ %endif
+ %rotate 1
+ %endrep
+%endmacro
+
+%macro LOAD_IF_USED 1-*
+ %rep %0
+ %if %1 < num_args
+ mov r%1, r %+ %1 %+ mp
+ %endif
+ %rotate 1
+ %endrep
+%endmacro
+
+%macro SUB 2
+ sub %1, %2
+ %ifidn %1, rstk
+ %assign stack_offset stack_offset+(%2)
+ %endif
+%endmacro
+
+%macro ADD 2
+ add %1, %2
+ %ifidn %1, rstk
+ %assign stack_offset stack_offset-(%2)
+ %endif
+%endmacro
+
+%macro movifnidn 2
+ %ifnidn %1, %2
+ mov %1, %2
+ %endif
+%endmacro
+
+%macro movsxdifnidn 2
+ %ifnidn %1, %2
+ movsxd %1, %2
+ %endif
+%endmacro
+
+%macro ASSERT 1
+ %if (%1) == 0
+ %error assertion ``%1'' failed
+ %endif
+%endmacro
+
+%macro DEFINE_ARGS 0-*
+ %ifdef n_arg_names
+ %assign %%i 0
+ %rep n_arg_names
+ CAT_UNDEF arg_name %+ %%i, q
+ CAT_UNDEF arg_name %+ %%i, d
+ CAT_UNDEF arg_name %+ %%i, w
+ CAT_UNDEF arg_name %+ %%i, h
+ CAT_UNDEF arg_name %+ %%i, b
+ CAT_UNDEF arg_name %+ %%i, m
+ CAT_UNDEF arg_name %+ %%i, mp
+ CAT_UNDEF arg_name, %%i
+ %assign %%i %%i+1
+ %endrep
+ %endif
+
+ %xdefine %%stack_offset stack_offset
+ %undef stack_offset ; so that the current value of stack_offset doesn't get baked in by xdefine
+ %assign %%i 0
+ %rep %0
+ %xdefine %1q r %+ %%i %+ q
+ %xdefine %1d r %+ %%i %+ d
+ %xdefine %1w r %+ %%i %+ w
+ %xdefine %1h r %+ %%i %+ h
+ %xdefine %1b r %+ %%i %+ b
+ %xdefine %1m r %+ %%i %+ m
+ %xdefine %1mp r %+ %%i %+ mp
+ CAT_XDEFINE arg_name, %%i, %1
+ %assign %%i %%i+1
+ %rotate 1
+ %endrep
+ %xdefine stack_offset %%stack_offset
+ %assign n_arg_names %0
+%endmacro
+
+%define required_stack_alignment ((mmsize + 15) & ~15)
+%define vzeroupper_required (mmsize > 16 && (ARCH_X86_64 == 0 || xmm_regs_used > 16 || notcpuflag(avx512)))
+%define high_mm_regs (16*cpuflag(avx512))
+
+%macro ALLOC_STACK 1-2 0 ; stack_size, n_xmm_regs (for win64 only)
+ %ifnum %1
+ %if %1 != 0
+ %assign %%pad 0
+ %assign stack_size %1
+ %if stack_size < 0
+ %assign stack_size -stack_size
+ %endif
+ %if WIN64
+ %assign %%pad %%pad + 32 ; shadow space
+ %if mmsize != 8
+ %assign xmm_regs_used %2
+ %if xmm_regs_used > 8
+ %assign %%pad %%pad + (xmm_regs_used-8)*16 ; callee-saved xmm registers
+ %endif
+ %endif
+ %endif
+ %if required_stack_alignment <= STACK_ALIGNMENT
+ ; maintain the current stack alignment
+ %assign stack_size_padded stack_size + %%pad + ((-%%pad-stack_offset-gprsize) & (STACK_ALIGNMENT-1))
+ SUB rsp, stack_size_padded
+ %else
+ %assign %%reg_num (regs_used - 1)
+ %xdefine rstk r %+ %%reg_num
+ ; align stack, and save original stack location directly above
+ ; it, i.e. in [rsp+stack_size_padded], so we can restore the
+ ; stack in a single instruction (i.e. mov rsp, rstk or mov
+ ; rsp, [rsp+stack_size_padded])
+ %if %1 < 0 ; need to store rsp on stack
+ %xdefine rstkm [rsp + stack_size + %%pad]
+ %assign %%pad %%pad + gprsize
+ %else ; can keep rsp in rstk during whole function
+ %xdefine rstkm rstk
+ %endif
+ %assign stack_size_padded stack_size + ((%%pad + required_stack_alignment-1) & ~(required_stack_alignment-1))
+ mov rstk, rsp
+ and rsp, ~(required_stack_alignment-1)
+ sub rsp, stack_size_padded
+ movifnidn rstkm, rstk
+ %endif
+ WIN64_PUSH_XMM
+ %endif
+ %endif
+%endmacro
+
+%macro SETUP_STACK_POINTER 1
+ %ifnum %1
+ %if %1 != 0 && required_stack_alignment > STACK_ALIGNMENT
+ %if %1 > 0
+ ; Reserve an additional register for storing the original stack pointer, but avoid using
+ ; eax/rax for this purpose since it can potentially get overwritten as a return value.
+ %assign regs_used (regs_used + 1)
+ %if ARCH_X86_64 && regs_used == 7
+ %assign regs_used 8
+ %elif ARCH_X86_64 == 0 && regs_used == 1
+ %assign regs_used 2
+ %endif
+ %endif
+ %if ARCH_X86_64 && regs_used < 5 + UNIX64 * 3
+ ; Ensure that we don't clobber any registers containing arguments. For UNIX64 we also preserve r6 (rax)
+ ; since it's used as a hidden argument in vararg functions to specify the number of vector registers used.
+ %assign regs_used 5 + UNIX64 * 3
+ %endif
+ %endif
+ %endif
+%endmacro
+
+%macro DEFINE_ARGS_INTERNAL 3+
+ %ifnum %2
+ DEFINE_ARGS %3
+ %elif %1 == 4
+ DEFINE_ARGS %2
+ %elif %1 > 4
+ DEFINE_ARGS %2, %3
+ %endif
+%endmacro
+
+%if WIN64 ; Windows x64 ;=================================================
+
+DECLARE_REG 0, rcx
+DECLARE_REG 1, rdx
+DECLARE_REG 2, R8
+DECLARE_REG 3, R9
+DECLARE_REG 4, R10, 40
+DECLARE_REG 5, R11, 48
+DECLARE_REG 6, rax, 56
+DECLARE_REG 7, rdi, 64
+DECLARE_REG 8, rsi, 72
+DECLARE_REG 9, rbx, 80
+DECLARE_REG 10, rbp, 88
+DECLARE_REG 11, R14, 96
+DECLARE_REG 12, R15, 104
+DECLARE_REG 13, R12, 112
+DECLARE_REG 14, R13, 120
+
+%macro PROLOGUE 2-5+ 0 ; #args, #regs, #xmm_regs, [stack_size,] arg_names...
+ %assign num_args %1
+ %assign regs_used %2
+ ASSERT regs_used >= num_args
+ SETUP_STACK_POINTER %4
+ ASSERT regs_used <= 15
+ PUSH_IF_USED 7, 8, 9, 10, 11, 12, 13, 14
+ ALLOC_STACK %4, %3
+ %if mmsize != 8 && stack_size == 0
+ WIN64_SPILL_XMM %3
+ %endif
+ LOAD_IF_USED 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14
+ DEFINE_ARGS_INTERNAL %0, %4, %5
+%endmacro
+
+%macro WIN64_PUSH_XMM 0
+ ; Use the shadow space to store XMM6 and XMM7, the rest needs stack space allocated.
+ %if xmm_regs_used > 6 + high_mm_regs
+ movaps [rstk + stack_offset + 8], xmm6
+ %endif
+ %if xmm_regs_used > 7 + high_mm_regs
+ movaps [rstk + stack_offset + 24], xmm7
+ %endif
+ %assign %%xmm_regs_on_stack xmm_regs_used - high_mm_regs - 8
+ %if %%xmm_regs_on_stack > 0
+ %assign %%i 8
+ %rep %%xmm_regs_on_stack
+ movaps [rsp + (%%i-8)*16 + stack_size + 32], xmm %+ %%i
+ %assign %%i %%i+1
+ %endrep
+ %endif
+%endmacro
+
+%macro WIN64_SPILL_XMM 1
+ %assign xmm_regs_used %1
+ ASSERT xmm_regs_used <= 16 + high_mm_regs
+ %assign %%xmm_regs_on_stack xmm_regs_used - high_mm_regs - 8
+ %if %%xmm_regs_on_stack > 0
+ ; Allocate stack space for callee-saved xmm registers plus shadow space and align the stack.
+ %assign %%pad %%xmm_regs_on_stack*16 + 32
+ %assign stack_size_padded %%pad + ((-%%pad-stack_offset-gprsize) & (STACK_ALIGNMENT-1))
+ SUB rsp, stack_size_padded
+ %endif
+ WIN64_PUSH_XMM
+%endmacro
+
+%macro WIN64_RESTORE_XMM_INTERNAL 0
+ %assign %%pad_size 0
+ %assign %%xmm_regs_on_stack xmm_regs_used - high_mm_regs - 8
+ %if %%xmm_regs_on_stack > 0
+ %assign %%i xmm_regs_used - high_mm_regs
+ %rep %%xmm_regs_on_stack
+ %assign %%i %%i-1
+ movaps xmm %+ %%i, [rsp + (%%i-8)*16 + stack_size + 32]
+ %endrep
+ %endif
+ %if stack_size_padded > 0
+ %if stack_size > 0 && required_stack_alignment > STACK_ALIGNMENT
+ mov rsp, rstkm
+ %else
+ add rsp, stack_size_padded
+ %assign %%pad_size stack_size_padded
+ %endif
+ %endif
+ %if xmm_regs_used > 7 + high_mm_regs
+ movaps xmm7, [rsp + stack_offset - %%pad_size + 24]
+ %endif
+ %if xmm_regs_used > 6 + high_mm_regs
+ movaps xmm6, [rsp + stack_offset - %%pad_size + 8]
+ %endif
+%endmacro
+
+%macro WIN64_RESTORE_XMM 0
+ WIN64_RESTORE_XMM_INTERNAL
+ %assign stack_offset (stack_offset-stack_size_padded)
+ %assign stack_size_padded 0
+ %assign xmm_regs_used 0
+%endmacro
+
+%define has_epilogue regs_used > 7 || stack_size > 0 || vzeroupper_required || xmm_regs_used > 6+high_mm_regs
+
+%macro RET 0
+ WIN64_RESTORE_XMM_INTERNAL
+ POP_IF_USED 14, 13, 12, 11, 10, 9, 8, 7
+ %if vzeroupper_required
+ vzeroupper
+ %endif
+ AUTO_REP_RET
+%endmacro
+
+%elif ARCH_X86_64 ; *nix x64 ;=============================================
+
+DECLARE_REG 0, rdi
+DECLARE_REG 1, rsi
+DECLARE_REG 2, rdx
+DECLARE_REG 3, rcx
+DECLARE_REG 4, R8
+DECLARE_REG 5, R9
+DECLARE_REG 6, rax, 8
+DECLARE_REG 7, R10, 16
+DECLARE_REG 8, R11, 24
+DECLARE_REG 9, rbx, 32
+DECLARE_REG 10, rbp, 40
+DECLARE_REG 11, R14, 48
+DECLARE_REG 12, R15, 56
+DECLARE_REG 13, R12, 64
+DECLARE_REG 14, R13, 72
+
+%macro PROLOGUE 2-5+ 0 ; #args, #regs, #xmm_regs, [stack_size,] arg_names...
+ %assign num_args %1
+ %assign regs_used %2
+ %assign xmm_regs_used %3
+ ASSERT regs_used >= num_args
+ SETUP_STACK_POINTER %4
+ ASSERT regs_used <= 15
+ PUSH_IF_USED 9, 10, 11, 12, 13, 14
+ ALLOC_STACK %4
+ LOAD_IF_USED 6, 7, 8, 9, 10, 11, 12, 13, 14
+ DEFINE_ARGS_INTERNAL %0, %4, %5
+%endmacro
+
+%define has_epilogue regs_used > 9 || stack_size > 0 || vzeroupper_required
+
+%macro RET 0
+ %if stack_size_padded > 0
+ %if required_stack_alignment > STACK_ALIGNMENT
+ mov rsp, rstkm
+ %else
+ add rsp, stack_size_padded
+ %endif
+ %endif
+ POP_IF_USED 14, 13, 12, 11, 10, 9
+ %if vzeroupper_required
+ vzeroupper
+ %endif
+ AUTO_REP_RET
+%endmacro
+
+%else ; X86_32 ;==============================================================
+
+DECLARE_REG 0, eax, 4
+DECLARE_REG 1, ecx, 8
+DECLARE_REG 2, edx, 12
+DECLARE_REG 3, ebx, 16
+DECLARE_REG 4, esi, 20
+DECLARE_REG 5, edi, 24
+DECLARE_REG 6, ebp, 28
+%define rsp esp
+
+%macro DECLARE_ARG 1-*
+ %rep %0
+ %define r%1m [rstk + stack_offset + 4*%1 + 4]
+ %define r%1mp dword r%1m
+ %rotate 1
+ %endrep
+%endmacro
+
+DECLARE_ARG 7, 8, 9, 10, 11, 12, 13, 14
+
+%macro PROLOGUE 2-5+ ; #args, #regs, #xmm_regs, [stack_size,] arg_names...
+ %assign num_args %1
+ %assign regs_used %2
+ ASSERT regs_used >= num_args
+ %if num_args > 7
+ %assign num_args 7
+ %endif
+ %if regs_used > 7
+ %assign regs_used 7
+ %endif
+ SETUP_STACK_POINTER %4
+ ASSERT regs_used <= 7
+ PUSH_IF_USED 3, 4, 5, 6
+ ALLOC_STACK %4
+ LOAD_IF_USED 0, 1, 2, 3, 4, 5, 6
+ DEFINE_ARGS_INTERNAL %0, %4, %5
+%endmacro
+
+%define has_epilogue regs_used > 3 || stack_size > 0 || vzeroupper_required
+
+%macro RET 0
+ %if stack_size_padded > 0
+ %if required_stack_alignment > STACK_ALIGNMENT
+ mov rsp, rstkm
+ %else
+ add rsp, stack_size_padded
+ %endif
+ %endif
+ POP_IF_USED 6, 5, 4, 3
+ %if vzeroupper_required
+ vzeroupper
+ %endif
+ AUTO_REP_RET
+%endmacro
+
+%endif ;======================================================================
+
+%if WIN64 == 0
+ %macro WIN64_SPILL_XMM 1
+ %endmacro
+ %macro WIN64_RESTORE_XMM 0
+ %endmacro
+ %macro WIN64_PUSH_XMM 0
+ %endmacro
+%endif
+
+; On AMD cpus <=K10, an ordinary ret is slow if it immediately follows either
+; a branch or a branch target. So switch to a 2-byte form of ret in that case.
+; We can automatically detect "follows a branch", but not a branch target.
+; (SSSE3 is a sufficient condition to know that your cpu doesn't have this problem.)
+%macro REP_RET 0
+ %if has_epilogue || cpuflag(ssse3)
+ RET
+ %else
+ rep ret
+ %endif
+ annotate_function_size
+%endmacro
+
+%define last_branch_adr $$
+%macro AUTO_REP_RET 0
+ %if notcpuflag(ssse3)
+ times ((last_branch_adr-$)>>31)+1 rep ; times 1 iff $ == last_branch_adr.
+ %endif
+ ret
+ annotate_function_size
+%endmacro
+
+%macro BRANCH_INSTR 0-*
+ %rep %0
+ %macro %1 1-2 %1
+ %2 %1
+ %if notcpuflag(ssse3)
+ %%branch_instr equ $
+ %xdefine last_branch_adr %%branch_instr
+ %endif
+ %endmacro
+ %rotate 1
+ %endrep
+%endmacro
+
+BRANCH_INSTR jz, je, jnz, jne, jl, jle, jnl, jnle, jg, jge, jng, jnge, ja, jae, jna, jnae, jb, jbe, jnb, jnbe, jc, jnc, js, jns, jo, jno, jp, jnp
+
+%macro TAIL_CALL 2 ; callee, is_nonadjacent
+ %if has_epilogue
+ call %1
+ RET
+ %elif %2
+ jmp %1
+ %endif
+ annotate_function_size
+%endmacro
+
+;=============================================================================
+; arch-independent part
+;=============================================================================
+
+%assign function_align 16
+
+; Begin a function.
+; Applies any symbol mangling needed for C linkage, and sets up a define such that
+; subsequent uses of the function name automatically refer to the mangled version.
+; Appends cpuflags to the function name if cpuflags has been specified.
+; The "" empty default parameter is a workaround for nasm, which fails if SUFFIX
+; is empty and we call cglobal_internal with just %1 %+ SUFFIX (without %2).
+%macro cglobal 1-2+ "" ; name, [PROLOGUE args]
+ cglobal_internal 1, %1 %+ SUFFIX, %2
+%endmacro
+%macro cvisible 1-2+ "" ; name, [PROLOGUE args]
+ cglobal_internal 0, %1 %+ SUFFIX, %2
+%endmacro
+%macro cglobal_internal 2-3+
+ annotate_function_size
+ %if %1
+ %xdefine %%FUNCTION_PREFIX private_prefix
+ %xdefine %%VISIBILITY hidden
+ %else
+ %xdefine %%FUNCTION_PREFIX public_prefix
+ %xdefine %%VISIBILITY
+ %endif
+ %ifndef cglobaled_%2
+ %xdefine %2 mangle(%%FUNCTION_PREFIX %+ _ %+ %2)
+ %xdefine %2.skip_prologue %2 %+ .skip_prologue
+ CAT_XDEFINE cglobaled_, %2, 1
+ %endif
+ %xdefine current_function %2
+ %xdefine current_function_section __SECT__
+ %if FORMAT_ELF
+ global %2:function %%VISIBILITY
+ %else
+ global %2
+ %endif
+ align function_align
+ %2:
+ RESET_MM_PERMUTATION ; needed for x86-64, also makes disassembly somewhat nicer
+ %xdefine rstk rsp ; copy of the original stack pointer, used when greater alignment than the known stack alignment is required
+ %assign stack_offset 0 ; stack pointer offset relative to the return address
+ %assign stack_size 0 ; amount of stack space that can be freely used inside a function
+ %assign stack_size_padded 0 ; total amount of allocated stack space, including space for callee-saved xmm registers on WIN64 and alignment padding
+ %assign xmm_regs_used 0 ; number of XMM registers requested, used for dealing with callee-saved registers on WIN64 and vzeroupper
+ %ifnidn %3, ""
+ PROLOGUE %3
+ %endif
+%endmacro
+
+; Create a global symbol from a local label with the correct name mangling and type
+%macro cglobal_label 1
+ %if FORMAT_ELF
+ global current_function %+ %1:function hidden
+ %else
+ global current_function %+ %1
+ %endif
+ %1:
+%endmacro
+
+%macro cextern 1
+ %xdefine %1 mangle(private_prefix %+ _ %+ %1)
+ CAT_XDEFINE cglobaled_, %1, 1
+ extern %1
+%endmacro
+
+; like cextern, but without the prefix
+%macro cextern_naked 1
+ %ifdef PREFIX
+ %xdefine %1 mangle(%1)
+ %endif
+ CAT_XDEFINE cglobaled_, %1, 1
+ extern %1
+%endmacro
+
+%macro const 1-2+
+ %xdefine %1 mangle(private_prefix %+ _ %+ %1)
+ %if FORMAT_ELF
+ global %1:data hidden
+ %else
+ global %1
+ %endif
+ %1: %2
+%endmacro
+
+; This is needed for ELF, otherwise the GNU linker assumes the stack is executable by default.
+%if FORMAT_ELF
+ [SECTION .note.GNU-stack noalloc noexec nowrite progbits]
+%endif
+
+; Tell debuggers how large the function was.
+; This may be invoked multiple times per function; we rely on later instances overriding earlier ones.
+; This is invoked by RET and similar macros, and also cglobal does it for the previous function,
+; but if the last function in a source file doesn't use any of the standard macros for its epilogue,
+; then its size might be unspecified.
+%macro annotate_function_size 0
+ %ifdef __YASM_VER__
+ %ifdef current_function
+ %if FORMAT_ELF
+ current_function_section
+ %%ecf equ $
+ size current_function %%ecf - current_function
+ __SECT__
+ %endif
+ %endif
+ %endif
+%endmacro
+
+; cpuflags
+
+%assign cpuflags_mmx (1<<0)
+%assign cpuflags_mmx2 (1<<1) | cpuflags_mmx
+%assign cpuflags_3dnow (1<<2) | cpuflags_mmx
+%assign cpuflags_3dnowext (1<<3) | cpuflags_3dnow
+%assign cpuflags_sse (1<<4) | cpuflags_mmx2
+%assign cpuflags_sse2 (1<<5) | cpuflags_sse
+%assign cpuflags_sse2slow (1<<6) | cpuflags_sse2
+%assign cpuflags_lzcnt (1<<7) | cpuflags_sse2
+%assign cpuflags_sse3 (1<<8) | cpuflags_sse2
+%assign cpuflags_ssse3 (1<<9) | cpuflags_sse3
+%assign cpuflags_sse4 (1<<10)| cpuflags_ssse3
+%assign cpuflags_sse42 (1<<11)| cpuflags_sse4
+%assign cpuflags_aesni (1<<12)| cpuflags_sse42
+%assign cpuflags_avx (1<<13)| cpuflags_sse42
+%assign cpuflags_xop (1<<14)| cpuflags_avx
+%assign cpuflags_fma4 (1<<15)| cpuflags_avx
+%assign cpuflags_fma3 (1<<16)| cpuflags_avx
+%assign cpuflags_bmi1 (1<<17)| cpuflags_avx|cpuflags_lzcnt
+%assign cpuflags_bmi2 (1<<18)| cpuflags_bmi1
+%assign cpuflags_avx2 (1<<19)| cpuflags_fma3|cpuflags_bmi2
+%assign cpuflags_avx512 (1<<20)| cpuflags_avx2 ; F, CD, BW, DQ, VL
+
+%assign cpuflags_cache32 (1<<21)
+%assign cpuflags_cache64 (1<<22)
+%assign cpuflags_aligned (1<<23) ; not a cpu feature, but a function variant
+%assign cpuflags_atom (1<<24)
+
+; Returns a boolean value expressing whether or not the specified cpuflag is enabled.
+%define cpuflag(x) (((((cpuflags & (cpuflags_ %+ x)) ^ (cpuflags_ %+ x)) - 1) >> 31) & 1)
+%define notcpuflag(x) (cpuflag(x) ^ 1)
+
+; Takes an arbitrary number of cpuflags from the above list.
+; All subsequent functions (up to the next INIT_CPUFLAGS) is built for the specified cpu.
+; You shouldn't need to invoke this macro directly, it's a subroutine for INIT_MMX &co.
+%macro INIT_CPUFLAGS 0-*
+ %xdefine SUFFIX
+ %undef cpuname
+ %assign cpuflags 0
+
+ %if %0 >= 1
+ %rep %0
+ %ifdef cpuname
+ %xdefine cpuname cpuname %+ _%1
+ %else
+ %xdefine cpuname %1
+ %endif
+ %assign cpuflags cpuflags | cpuflags_%1
+ %rotate 1
+ %endrep
+ %xdefine SUFFIX _ %+ cpuname
+
+ %if cpuflag(avx)
+ %assign avx_enabled 1
+ %endif
+ %if (mmsize == 16 && notcpuflag(sse2)) || (mmsize == 32 && notcpuflag(avx2))
+ %define mova movaps
+ %define movu movups
+ %define movnta movntps
+ %endif
+ %if cpuflag(aligned)
+ %define movu mova
+ %elif cpuflag(sse3) && notcpuflag(ssse3)
+ %define movu lddqu
+ %endif
+ %endif
+
+ %if ARCH_X86_64 || cpuflag(sse2)
+ CPUNOP amdnop
+ %else
+ CPUNOP basicnop
+ %endif
+%endmacro
+
+; Merge mmx, sse*, and avx*
+; m# is a simd register of the currently selected size
+; xm# is the corresponding xmm register if mmsize >= 16, otherwise the same as m#
+; ym# is the corresponding ymm register if mmsize >= 32, otherwise the same as m#
+; zm# is the corresponding zmm register if mmsize >= 64, otherwise the same as m#
+; (All 4 remain in sync through SWAP.)
+
+%macro CAT_XDEFINE 3
+ %xdefine %1%2 %3
+%endmacro
+
+%macro CAT_UNDEF 2
+ %undef %1%2
+%endmacro
+
+%macro DEFINE_MMREGS 1 ; mmtype
+ %assign %%prev_mmregs 0
+ %ifdef num_mmregs
+ %assign %%prev_mmregs num_mmregs
+ %endif
+
+ %assign num_mmregs 8
+ %if ARCH_X86_64 && mmsize >= 16
+ %assign num_mmregs 16
+ %if cpuflag(avx512) || mmsize == 64
+ %assign num_mmregs 32
+ %endif
+ %endif
+
+ %assign %%i 0
+ %rep num_mmregs
+ CAT_XDEFINE m, %%i, %1 %+ %%i
+ CAT_XDEFINE nn%1, %%i, %%i
+ %assign %%i %%i+1
+ %endrep
+ %if %%prev_mmregs > num_mmregs
+ %rep %%prev_mmregs - num_mmregs
+ CAT_UNDEF m, %%i
+ CAT_UNDEF nn %+ mmtype, %%i
+ %assign %%i %%i+1
+ %endrep
+ %endif
+ %xdefine mmtype %1
+%endmacro
+
+; Prefer registers 16-31 over 0-15 to avoid having to use vzeroupper
+%macro AVX512_MM_PERMUTATION 0-1 0 ; start_reg
+ %if ARCH_X86_64 && cpuflag(avx512)
+ %assign %%i %1
+ %rep 16-%1
+ %assign %%i_high %%i+16
+ SWAP %%i, %%i_high
+ %assign %%i %%i+1
+ %endrep
+ %endif
+%endmacro
+
+%macro INIT_MMX 0-1+
+ %assign avx_enabled 0
+ %define RESET_MM_PERMUTATION INIT_MMX %1
+ %define mmsize 8
+ %define mova movq
+ %define movu movq
+ %define movh movd
+ %define movnta movntq
+ INIT_CPUFLAGS %1
+ DEFINE_MMREGS mm
+%endmacro
+
+%macro INIT_XMM 0-1+
+ %assign avx_enabled 0
+ %define RESET_MM_PERMUTATION INIT_XMM %1
+ %define mmsize 16
+ %define mova movdqa
+ %define movu movdqu
+ %define movh movq
+ %define movnta movntdq
+ INIT_CPUFLAGS %1
+ DEFINE_MMREGS xmm
+ %if WIN64
+ AVX512_MM_PERMUTATION 6 ; Swap callee-saved registers with volatile registers
+ %endif
+%endmacro
+
+%macro INIT_YMM 0-1+
+ %assign avx_enabled 1
+ %define RESET_MM_PERMUTATION INIT_YMM %1
+ %define mmsize 32
+ %define mova movdqa
+ %define movu movdqu
+ %undef movh
+ %define movnta movntdq
+ INIT_CPUFLAGS %1
+ DEFINE_MMREGS ymm
+ AVX512_MM_PERMUTATION
+%endmacro
+
+%macro INIT_ZMM 0-1+
+ %assign avx_enabled 1
+ %define RESET_MM_PERMUTATION INIT_ZMM %1
+ %define mmsize 64
+ %define mova movdqa
+ %define movu movdqu
+ %undef movh
+ %define movnta movntdq
+ INIT_CPUFLAGS %1
+ DEFINE_MMREGS zmm
+ AVX512_MM_PERMUTATION
+%endmacro
+
+INIT_XMM
+
+%macro DECLARE_MMCAST 1
+ %define mmmm%1 mm%1
+ %define mmxmm%1 mm%1
+ %define mmymm%1 mm%1
+ %define mmzmm%1 mm%1
+ %define xmmmm%1 mm%1
+ %define xmmxmm%1 xmm%1
+ %define xmmymm%1 xmm%1
+ %define xmmzmm%1 xmm%1
+ %define ymmmm%1 mm%1
+ %define ymmxmm%1 xmm%1
+ %define ymmymm%1 ymm%1
+ %define ymmzmm%1 ymm%1
+ %define zmmmm%1 mm%1
+ %define zmmxmm%1 xmm%1
+ %define zmmymm%1 ymm%1
+ %define zmmzmm%1 zmm%1
+ %define xm%1 xmm %+ m%1
+ %define ym%1 ymm %+ m%1
+ %define zm%1 zmm %+ m%1
+%endmacro
+
+%assign i 0
+%rep 32
+ DECLARE_MMCAST i
+ %assign i i+1
+%endrep
+
+; I often want to use macros that permute their arguments. e.g. there's no
+; efficient way to implement butterfly or transpose or dct without swapping some
+; arguments.
+;
+; I would like to not have to manually keep track of the permutations:
+; If I insert a permutation in the middle of a function, it should automatically
+; change everything that follows. For more complex macros I may also have multiple
+; implementations, e.g. the SSE2 and SSSE3 versions may have different permutations.
+;
+; Hence these macros. Insert a PERMUTE or some SWAPs at the end of a macro that
+; permutes its arguments. It's equivalent to exchanging the contents of the
+; registers, except that this way you exchange the register names instead, so it
+; doesn't cost any cycles.
+
+%macro PERMUTE 2-* ; takes a list of pairs to swap
+ %rep %0/2
+ %xdefine %%tmp%2 m%2
+ %rotate 2
+ %endrep
+ %rep %0/2
+ %xdefine m%1 %%tmp%2
+ CAT_XDEFINE nn, m%1, %1
+ %rotate 2
+ %endrep
+%endmacro
+
+%macro SWAP 2+ ; swaps a single chain (sometimes more concise than pairs)
+ %ifnum %1 ; SWAP 0, 1, ...
+ SWAP_INTERNAL_NUM %1, %2
+ %else ; SWAP m0, m1, ...
+ SWAP_INTERNAL_NAME %1, %2
+ %endif
+%endmacro
+
+%macro SWAP_INTERNAL_NUM 2-*
+ %rep %0-1
+ %xdefine %%tmp m%1
+ %xdefine m%1 m%2
+ %xdefine m%2 %%tmp
+ CAT_XDEFINE nn, m%1, %1
+ CAT_XDEFINE nn, m%2, %2
+ %rotate 1
+ %endrep
+%endmacro
+
+%macro SWAP_INTERNAL_NAME 2-*
+ %xdefine %%args nn %+ %1
+ %rep %0-1
+ %xdefine %%args %%args, nn %+ %2
+ %rotate 1
+ %endrep
+ SWAP_INTERNAL_NUM %%args
+%endmacro
+
+; If SAVE_MM_PERMUTATION is placed at the end of a function, then any later
+; calls to that function will automatically load the permutation, so values can
+; be returned in mmregs.
+%macro SAVE_MM_PERMUTATION 0-1
+ %if %0
+ %xdefine %%f %1_m
+ %else
+ %xdefine %%f current_function %+ _m
+ %endif
+ %assign %%i 0
+ %rep num_mmregs
+ CAT_XDEFINE %%f, %%i, m %+ %%i
+ %assign %%i %%i+1
+ %endrep
+%endmacro
+
+%macro LOAD_MM_PERMUTATION 1 ; name to load from
+ %ifdef %1_m0
+ %assign %%i 0
+ %rep num_mmregs
+ CAT_XDEFINE m, %%i, %1_m %+ %%i
+ CAT_XDEFINE nn, m %+ %%i, %%i
+ %assign %%i %%i+1
+ %endrep
+ %endif
+%endmacro
+
+; Append cpuflags to the callee's name iff the appended name is known and the plain name isn't
+%macro call 1
+ %ifid %1
+ call_internal %1 %+ SUFFIX, %1
+ %else
+ call %1
+ %endif
+%endmacro
+%macro call_internal 2
+ %xdefine %%i %2
+ %ifndef cglobaled_%2
+ %ifdef cglobaled_%1
+ %xdefine %%i %1
+ %endif
+ %endif
+ call %%i
+ LOAD_MM_PERMUTATION %%i
+%endmacro
+
+; Substitutions that reduce instruction size but are functionally equivalent
+%macro add 2
+ %ifnum %2
+ %if %2==128
+ sub %1, -128
+ %else
+ add %1, %2
+ %endif
+ %else
+ add %1, %2
+ %endif
+%endmacro
+
+%macro sub 2
+ %ifnum %2
+ %if %2==128
+ add %1, -128
+ %else
+ sub %1, %2
+ %endif
+ %else
+ sub %1, %2
+ %endif
+%endmacro
+
+;=============================================================================
+; AVX abstraction layer
+;=============================================================================
+
+%assign i 0
+%rep 32
+ %if i < 8
+ CAT_XDEFINE sizeofmm, i, 8
+ CAT_XDEFINE regnumofmm, i, i
+ %endif
+ CAT_XDEFINE sizeofxmm, i, 16
+ CAT_XDEFINE sizeofymm, i, 32
+ CAT_XDEFINE sizeofzmm, i, 64
+ CAT_XDEFINE regnumofxmm, i, i
+ CAT_XDEFINE regnumofymm, i, i
+ CAT_XDEFINE regnumofzmm, i, i
+ %assign i i+1
+%endrep
+%undef i
+
+%macro CHECK_AVX_INSTR_EMU 3-*
+ %xdefine %%opcode %1
+ %xdefine %%dst %2
+ %rep %0-2
+ %ifidn %%dst, %3
+ %error non-avx emulation of ``%%opcode'' is not supported
+ %endif
+ %rotate 1
+ %endrep
+%endmacro
+
+;%1 == instruction
+;%2 == minimal instruction set
+;%3 == 1 if float, 0 if int
+;%4 == 1 if 4-operand emulation, 0 if 3-operand emulation, 255 otherwise (no emulation)
+;%5 == 1 if commutative (i.e. doesn't matter which src arg is which), 0 if not
+;%6+: operands
+%macro RUN_AVX_INSTR 6-9+
+ %ifnum sizeof%7
+ %assign __sizeofreg sizeof%7
+ %elifnum sizeof%6
+ %assign __sizeofreg sizeof%6
+ %else
+ %assign __sizeofreg mmsize
+ %endif
+ %assign __emulate_avx 0
+ %if avx_enabled && __sizeofreg >= 16
+ %xdefine __instr v%1
+ %else
+ %xdefine __instr %1
+ %if %0 >= 8+%4
+ %assign __emulate_avx 1
+ %endif
+ %endif
+ %ifnidn %2, fnord
+ %ifdef cpuname
+ %if notcpuflag(%2)
+ %error use of ``%1'' %2 instruction in cpuname function: current_function
+ %elif cpuflags_%2 < cpuflags_sse && notcpuflag(sse2) && __sizeofreg > 8
+ %error use of ``%1'' sse2 instruction in cpuname function: current_function
+ %endif
+ %endif
+ %endif
+
+ %if __emulate_avx
+ %xdefine __src1 %7
+ %xdefine __src2 %8
+ %if %5 && %4 == 0
+ %ifnidn %6, %7
+ %ifidn %6, %8
+ %xdefine __src1 %8
+ %xdefine __src2 %7
+ %elifnnum sizeof%8
+ ; 3-operand AVX instructions with a memory arg can only have it in src2,
+ ; whereas SSE emulation prefers to have it in src1 (i.e. the mov).
+ ; So, if the instruction is commutative with a memory arg, swap them.
+ %xdefine __src1 %8
+ %xdefine __src2 %7
+ %endif
+ %endif
+ %endif
+ %ifnidn %6, __src1
+ %if %0 >= 9
+ CHECK_AVX_INSTR_EMU {%1 %6, %7, %8, %9}, %6, __src2, %9
+ %else
+ CHECK_AVX_INSTR_EMU {%1 %6, %7, %8}, %6, __src2
+ %endif
+ %if __sizeofreg == 8
+ MOVQ %6, __src1
+ %elif %3
+ MOVAPS %6, __src1
+ %else
+ MOVDQA %6, __src1
+ %endif
+ %endif
+ %if %0 >= 9
+ %1 %6, __src2, %9
+ %else
+ %1 %6, __src2
+ %endif
+ %elif %0 >= 9
+ __instr %6, %7, %8, %9
+ %elif %0 == 8
+ __instr %6, %7, %8
+ %elif %0 == 7
+ __instr %6, %7
+ %else
+ __instr %6
+ %endif
+%endmacro
+
+;%1 == instruction
+;%2 == minimal instruction set
+;%3 == 1 if float, 0 if int
+;%4 == 1 if 4-operand emulation, 0 if 3-operand emulation, 255 otherwise (no emulation)
+;%5 == 1 if commutative (i.e. doesn't matter which src arg is which), 0 if not
+%macro AVX_INSTR 1-5 fnord, 0, 255, 0
+ %macro %1 1-10 fnord, fnord, fnord, fnord, %1, %2, %3, %4, %5
+ %ifidn %2, fnord
+ RUN_AVX_INSTR %6, %7, %8, %9, %10, %1
+ %elifidn %3, fnord
+ RUN_AVX_INSTR %6, %7, %8, %9, %10, %1, %2
+ %elifidn %4, fnord
+ RUN_AVX_INSTR %6, %7, %8, %9, %10, %1, %2, %3
+ %elifidn %5, fnord
+ RUN_AVX_INSTR %6, %7, %8, %9, %10, %1, %2, %3, %4
+ %else
+ RUN_AVX_INSTR %6, %7, %8, %9, %10, %1, %2, %3, %4, %5
+ %endif
+ %endmacro
+%endmacro
+
+; Instructions with both VEX/EVEX and legacy encodings
+; Non-destructive instructions are written without parameters
+AVX_INSTR addpd, sse2, 1, 0, 1
+AVX_INSTR addps, sse, 1, 0, 1
+AVX_INSTR addsd, sse2, 1, 0, 0
+AVX_INSTR addss, sse, 1, 0, 0
+AVX_INSTR addsubpd, sse3, 1, 0, 0
+AVX_INSTR addsubps, sse3, 1, 0, 0
+AVX_INSTR aesdec, aesni, 0, 0, 0
+AVX_INSTR aesdeclast, aesni, 0, 0, 0
+AVX_INSTR aesenc, aesni, 0, 0, 0
+AVX_INSTR aesenclast, aesni, 0, 0, 0
+AVX_INSTR aesimc, aesni
+AVX_INSTR aeskeygenassist, aesni
+AVX_INSTR andnpd, sse2, 1, 0, 0
+AVX_INSTR andnps, sse, 1, 0, 0
+AVX_INSTR andpd, sse2, 1, 0, 1
+AVX_INSTR andps, sse, 1, 0, 1
+AVX_INSTR blendpd, sse4, 1, 1, 0
+AVX_INSTR blendps, sse4, 1, 1, 0
+AVX_INSTR blendvpd, sse4 ; can't be emulated
+AVX_INSTR blendvps, sse4 ; can't be emulated
+AVX_INSTR cmpeqpd, sse2, 1, 0, 1
+AVX_INSTR cmpeqps, sse, 1, 0, 1
+AVX_INSTR cmpeqsd, sse2, 1, 0, 0
+AVX_INSTR cmpeqss, sse, 1, 0, 0
+AVX_INSTR cmplepd, sse2, 1, 0, 0
+AVX_INSTR cmpleps, sse, 1, 0, 0
+AVX_INSTR cmplesd, sse2, 1, 0, 0
+AVX_INSTR cmpless, sse, 1, 0, 0
+AVX_INSTR cmpltpd, sse2, 1, 0, 0
+AVX_INSTR cmpltps, sse, 1, 0, 0
+AVX_INSTR cmpltsd, sse2, 1, 0, 0
+AVX_INSTR cmpltss, sse, 1, 0, 0
+AVX_INSTR cmpneqpd, sse2, 1, 0, 1
+AVX_INSTR cmpneqps, sse, 1, 0, 1
+AVX_INSTR cmpneqsd, sse2, 1, 0, 0
+AVX_INSTR cmpneqss, sse, 1, 0, 0
+AVX_INSTR cmpnlepd, sse2, 1, 0, 0
+AVX_INSTR cmpnleps, sse, 1, 0, 0
+AVX_INSTR cmpnlesd, sse2, 1, 0, 0
+AVX_INSTR cmpnless, sse, 1, 0, 0
+AVX_INSTR cmpnltpd, sse2, 1, 0, 0
+AVX_INSTR cmpnltps, sse, 1, 0, 0
+AVX_INSTR cmpnltsd, sse2, 1, 0, 0
+AVX_INSTR cmpnltss, sse, 1, 0, 0
+AVX_INSTR cmpordpd, sse2 1, 0, 1
+AVX_INSTR cmpordps, sse 1, 0, 1
+AVX_INSTR cmpordsd, sse2 1, 0, 0
+AVX_INSTR cmpordss, sse 1, 0, 0
+AVX_INSTR cmppd, sse2, 1, 1, 0
+AVX_INSTR cmpps, sse, 1, 1, 0
+AVX_INSTR cmpsd, sse2, 1, 1, 0
+AVX_INSTR cmpss, sse, 1, 1, 0
+AVX_INSTR cmpunordpd, sse2, 1, 0, 1
+AVX_INSTR cmpunordps, sse, 1, 0, 1
+AVX_INSTR cmpunordsd, sse2, 1, 0, 0
+AVX_INSTR cmpunordss, sse, 1, 0, 0
+AVX_INSTR comisd, sse2
+AVX_INSTR comiss, sse
+AVX_INSTR cvtdq2pd, sse2
+AVX_INSTR cvtdq2ps, sse2
+AVX_INSTR cvtpd2dq, sse2
+AVX_INSTR cvtpd2ps, sse2
+AVX_INSTR cvtps2dq, sse2
+AVX_INSTR cvtps2pd, sse2
+AVX_INSTR cvtsd2si, sse2
+AVX_INSTR cvtsd2ss, sse2, 1, 0, 0
+AVX_INSTR cvtsi2sd, sse2, 1, 0, 0
+AVX_INSTR cvtsi2ss, sse, 1, 0, 0
+AVX_INSTR cvtss2sd, sse2, 1, 0, 0
+AVX_INSTR cvtss2si, sse
+AVX_INSTR cvttpd2dq, sse2
+AVX_INSTR cvttps2dq, sse2
+AVX_INSTR cvttsd2si, sse2
+AVX_INSTR cvttss2si, sse
+AVX_INSTR divpd, sse2, 1, 0, 0
+AVX_INSTR divps, sse, 1, 0, 0
+AVX_INSTR divsd, sse2, 1, 0, 0
+AVX_INSTR divss, sse, 1, 0, 0
+AVX_INSTR dppd, sse4, 1, 1, 0
+AVX_INSTR dpps, sse4, 1, 1, 0
+AVX_INSTR extractps, sse4
+AVX_INSTR haddpd, sse3, 1, 0, 0
+AVX_INSTR haddps, sse3, 1, 0, 0
+AVX_INSTR hsubpd, sse3, 1, 0, 0
+AVX_INSTR hsubps, sse3, 1, 0, 0
+AVX_INSTR insertps, sse4, 1, 1, 0
+AVX_INSTR lddqu, sse3
+AVX_INSTR ldmxcsr, sse
+AVX_INSTR maskmovdqu, sse2
+AVX_INSTR maxpd, sse2, 1, 0, 1
+AVX_INSTR maxps, sse, 1, 0, 1
+AVX_INSTR maxsd, sse2, 1, 0, 0
+AVX_INSTR maxss, sse, 1, 0, 0
+AVX_INSTR minpd, sse2, 1, 0, 1
+AVX_INSTR minps, sse, 1, 0, 1
+AVX_INSTR minsd, sse2, 1, 0, 0
+AVX_INSTR minss, sse, 1, 0, 0
+AVX_INSTR movapd, sse2
+AVX_INSTR movaps, sse
+AVX_INSTR movd, mmx
+AVX_INSTR movddup, sse3
+AVX_INSTR movdqa, sse2
+AVX_INSTR movdqu, sse2
+AVX_INSTR movhlps, sse, 1, 0, 0
+AVX_INSTR movhpd, sse2, 1, 0, 0
+AVX_INSTR movhps, sse, 1, 0, 0
+AVX_INSTR movlhps, sse, 1, 0, 0
+AVX_INSTR movlpd, sse2, 1, 0, 0
+AVX_INSTR movlps, sse, 1, 0, 0
+AVX_INSTR movmskpd, sse2
+AVX_INSTR movmskps, sse
+AVX_INSTR movntdq, sse2
+AVX_INSTR movntdqa, sse4
+AVX_INSTR movntpd, sse2
+AVX_INSTR movntps, sse
+AVX_INSTR movq, mmx
+AVX_INSTR movsd, sse2, 1, 0, 0
+AVX_INSTR movshdup, sse3
+AVX_INSTR movsldup, sse3
+AVX_INSTR movss, sse, 1, 0, 0
+AVX_INSTR movupd, sse2
+AVX_INSTR movups, sse
+AVX_INSTR mpsadbw, sse4, 0, 1, 0
+AVX_INSTR mulpd, sse2, 1, 0, 1
+AVX_INSTR mulps, sse, 1, 0, 1
+AVX_INSTR mulsd, sse2, 1, 0, 0
+AVX_INSTR mulss, sse, 1, 0, 0
+AVX_INSTR orpd, sse2, 1, 0, 1
+AVX_INSTR orps, sse, 1, 0, 1
+AVX_INSTR pabsb, ssse3
+AVX_INSTR pabsd, ssse3
+AVX_INSTR pabsw, ssse3
+AVX_INSTR packsswb, mmx, 0, 0, 0
+AVX_INSTR packssdw, mmx, 0, 0, 0
+AVX_INSTR packuswb, mmx, 0, 0, 0
+AVX_INSTR packusdw, sse4, 0, 0, 0
+AVX_INSTR paddb, mmx, 0, 0, 1
+AVX_INSTR paddw, mmx, 0, 0, 1
+AVX_INSTR paddd, mmx, 0, 0, 1
+AVX_INSTR paddq, sse2, 0, 0, 1
+AVX_INSTR paddsb, mmx, 0, 0, 1
+AVX_INSTR paddsw, mmx, 0, 0, 1
+AVX_INSTR paddusb, mmx, 0, 0, 1
+AVX_INSTR paddusw, mmx, 0, 0, 1
+AVX_INSTR palignr, ssse3, 0, 1, 0
+AVX_INSTR pand, mmx, 0, 0, 1
+AVX_INSTR pandn, mmx, 0, 0, 0
+AVX_INSTR pavgb, mmx2, 0, 0, 1
+AVX_INSTR pavgw, mmx2, 0, 0, 1
+AVX_INSTR pblendvb, sse4 ; can't be emulated
+AVX_INSTR pblendw, sse4, 0, 1, 0
+AVX_INSTR pclmulqdq, fnord, 0, 1, 0
+AVX_INSTR pclmulhqhqdq, fnord, 0, 0, 0
+AVX_INSTR pclmulhqlqdq, fnord, 0, 0, 0
+AVX_INSTR pclmullqhqdq, fnord, 0, 0, 0
+AVX_INSTR pclmullqlqdq, fnord, 0, 0, 0
+AVX_INSTR pcmpestri, sse42
+AVX_INSTR pcmpestrm, sse42
+AVX_INSTR pcmpistri, sse42
+AVX_INSTR pcmpistrm, sse42
+AVX_INSTR pcmpeqb, mmx, 0, 0, 1
+AVX_INSTR pcmpeqw, mmx, 0, 0, 1
+AVX_INSTR pcmpeqd, mmx, 0, 0, 1
+AVX_INSTR pcmpeqq, sse4, 0, 0, 1
+AVX_INSTR pcmpgtb, mmx, 0, 0, 0
+AVX_INSTR pcmpgtw, mmx, 0, 0, 0
+AVX_INSTR pcmpgtd, mmx, 0, 0, 0
+AVX_INSTR pcmpgtq, sse42, 0, 0, 0
+AVX_INSTR pextrb, sse4
+AVX_INSTR pextrd, sse4
+AVX_INSTR pextrq, sse4
+AVX_INSTR pextrw, mmx2
+AVX_INSTR phaddw, ssse3, 0, 0, 0
+AVX_INSTR phaddd, ssse3, 0, 0, 0
+AVX_INSTR phaddsw, ssse3, 0, 0, 0
+AVX_INSTR phminposuw, sse4
+AVX_INSTR phsubw, ssse3, 0, 0, 0
+AVX_INSTR phsubd, ssse3, 0, 0, 0
+AVX_INSTR phsubsw, ssse3, 0, 0, 0
+AVX_INSTR pinsrb, sse4, 0, 1, 0
+AVX_INSTR pinsrd, sse4, 0, 1, 0
+AVX_INSTR pinsrq, sse4, 0, 1, 0
+AVX_INSTR pinsrw, mmx2, 0, 1, 0
+AVX_INSTR pmaddwd, mmx, 0, 0, 1
+AVX_INSTR pmaddubsw, ssse3, 0, 0, 0
+AVX_INSTR pmaxsb, sse4, 0, 0, 1
+AVX_INSTR pmaxsw, mmx2, 0, 0, 1
+AVX_INSTR pmaxsd, sse4, 0, 0, 1
+AVX_INSTR pmaxub, mmx2, 0, 0, 1
+AVX_INSTR pmaxuw, sse4, 0, 0, 1
+AVX_INSTR pmaxud, sse4, 0, 0, 1
+AVX_INSTR pminsb, sse4, 0, 0, 1
+AVX_INSTR pminsw, mmx2, 0, 0, 1
+AVX_INSTR pminsd, sse4, 0, 0, 1
+AVX_INSTR pminub, mmx2, 0, 0, 1
+AVX_INSTR pminuw, sse4, 0, 0, 1
+AVX_INSTR pminud, sse4, 0, 0, 1
+AVX_INSTR pmovmskb, mmx2
+AVX_INSTR pmovsxbw, sse4
+AVX_INSTR pmovsxbd, sse4
+AVX_INSTR pmovsxbq, sse4
+AVX_INSTR pmovsxwd, sse4
+AVX_INSTR pmovsxwq, sse4
+AVX_INSTR pmovsxdq, sse4
+AVX_INSTR pmovzxbw, sse4
+AVX_INSTR pmovzxbd, sse4
+AVX_INSTR pmovzxbq, sse4
+AVX_INSTR pmovzxwd, sse4
+AVX_INSTR pmovzxwq, sse4
+AVX_INSTR pmovzxdq, sse4
+AVX_INSTR pmuldq, sse4, 0, 0, 1
+AVX_INSTR pmulhrsw, ssse3, 0, 0, 1
+AVX_INSTR pmulhuw, mmx2, 0, 0, 1
+AVX_INSTR pmulhw, mmx, 0, 0, 1
+AVX_INSTR pmullw, mmx, 0, 0, 1
+AVX_INSTR pmulld, sse4, 0, 0, 1
+AVX_INSTR pmuludq, sse2, 0, 0, 1
+AVX_INSTR por, mmx, 0, 0, 1
+AVX_INSTR psadbw, mmx2, 0, 0, 1
+AVX_INSTR pshufb, ssse3, 0, 0, 0
+AVX_INSTR pshufd, sse2
+AVX_INSTR pshufhw, sse2
+AVX_INSTR pshuflw, sse2
+AVX_INSTR psignb, ssse3, 0, 0, 0
+AVX_INSTR psignw, ssse3, 0, 0, 0
+AVX_INSTR psignd, ssse3, 0, 0, 0
+AVX_INSTR psllw, mmx, 0, 0, 0
+AVX_INSTR pslld, mmx, 0, 0, 0
+AVX_INSTR psllq, mmx, 0, 0, 0
+AVX_INSTR pslldq, sse2, 0, 0, 0
+AVX_INSTR psraw, mmx, 0, 0, 0
+AVX_INSTR psrad, mmx, 0, 0, 0
+AVX_INSTR psrlw, mmx, 0, 0, 0
+AVX_INSTR psrld, mmx, 0, 0, 0
+AVX_INSTR psrlq, mmx, 0, 0, 0
+AVX_INSTR psrldq, sse2, 0, 0, 0
+AVX_INSTR psubb, mmx, 0, 0, 0
+AVX_INSTR psubw, mmx, 0, 0, 0
+AVX_INSTR psubd, mmx, 0, 0, 0
+AVX_INSTR psubq, sse2, 0, 0, 0
+AVX_INSTR psubsb, mmx, 0, 0, 0
+AVX_INSTR psubsw, mmx, 0, 0, 0
+AVX_INSTR psubusb, mmx, 0, 0, 0
+AVX_INSTR psubusw, mmx, 0, 0, 0
+AVX_INSTR ptest, sse4
+AVX_INSTR punpckhbw, mmx, 0, 0, 0
+AVX_INSTR punpckhwd, mmx, 0, 0, 0
+AVX_INSTR punpckhdq, mmx, 0, 0, 0
+AVX_INSTR punpckhqdq, sse2, 0, 0, 0
+AVX_INSTR punpcklbw, mmx, 0, 0, 0
+AVX_INSTR punpcklwd, mmx, 0, 0, 0
+AVX_INSTR punpckldq, mmx, 0, 0, 0
+AVX_INSTR punpcklqdq, sse2, 0, 0, 0
+AVX_INSTR pxor, mmx, 0, 0, 1
+AVX_INSTR rcpps, sse
+AVX_INSTR rcpss, sse, 1, 0, 0
+AVX_INSTR roundpd, sse4
+AVX_INSTR roundps, sse4
+AVX_INSTR roundsd, sse4, 1, 1, 0
+AVX_INSTR roundss, sse4, 1, 1, 0
+AVX_INSTR rsqrtps, sse
+AVX_INSTR rsqrtss, sse, 1, 0, 0
+AVX_INSTR shufpd, sse2, 1, 1, 0
+AVX_INSTR shufps, sse, 1, 1, 0
+AVX_INSTR sqrtpd, sse2
+AVX_INSTR sqrtps, sse
+AVX_INSTR sqrtsd, sse2, 1, 0, 0
+AVX_INSTR sqrtss, sse, 1, 0, 0
+AVX_INSTR stmxcsr, sse
+AVX_INSTR subpd, sse2, 1, 0, 0
+AVX_INSTR subps, sse, 1, 0, 0
+AVX_INSTR subsd, sse2, 1, 0, 0
+AVX_INSTR subss, sse, 1, 0, 0
+AVX_INSTR ucomisd, sse2
+AVX_INSTR ucomiss, sse
+AVX_INSTR unpckhpd, sse2, 1, 0, 0
+AVX_INSTR unpckhps, sse, 1, 0, 0
+AVX_INSTR unpcklpd, sse2, 1, 0, 0
+AVX_INSTR unpcklps, sse, 1, 0, 0
+AVX_INSTR xorpd, sse2, 1, 0, 1
+AVX_INSTR xorps, sse, 1, 0, 1
+
+; 3DNow instructions, for sharing code between AVX, SSE and 3DN
+AVX_INSTR pfadd, 3dnow, 1, 0, 1
+AVX_INSTR pfsub, 3dnow, 1, 0, 0
+AVX_INSTR pfmul, 3dnow, 1, 0, 1
+
+; base-4 constants for shuffles
+%assign i 0
+%rep 256
+ %assign j ((i>>6)&3)*1000 + ((i>>4)&3)*100 + ((i>>2)&3)*10 + (i&3)
+ %if j < 10
+ CAT_XDEFINE q000, j, i
+ %elif j < 100
+ CAT_XDEFINE q00, j, i
+ %elif j < 1000
+ CAT_XDEFINE q0, j, i
+ %else
+ CAT_XDEFINE q, j, i
+ %endif
+ %assign i i+1
+%endrep
+%undef i
+%undef j
+
+%macro FMA_INSTR 3
+ %macro %1 4-7 %1, %2, %3
+ %if cpuflag(xop)
+ v%5 %1, %2, %3, %4
+ %elifnidn %1, %4
+ %6 %1, %2, %3
+ %7 %1, %4
+ %else
+ %error non-xop emulation of ``%5 %1, %2, %3, %4'' is not supported
+ %endif
+ %endmacro
+%endmacro
+
+FMA_INSTR pmacsww, pmullw, paddw
+FMA_INSTR pmacsdd, pmulld, paddd ; sse4 emulation
+FMA_INSTR pmacsdql, pmuldq, paddq ; sse4 emulation
+FMA_INSTR pmadcswd, pmaddwd, paddd
+
+; tzcnt is equivalent to "rep bsf" and is backwards-compatible with bsf.
+; This lets us use tzcnt without bumping the yasm version requirement yet.
+%define tzcnt rep bsf
+
+; Macros for consolidating FMA3 and FMA4 using 4-operand (dst, src1, src2, src3) syntax.
+; FMA3 is only possible if dst is the same as one of the src registers.
+; Either src2 or src3 can be a memory operand.
+%macro FMA4_INSTR 2-*
+ %push fma4_instr
+ %xdefine %$prefix %1
+ %rep %0 - 1
+ %macro %$prefix%2 4-6 %$prefix, %2
+ %if notcpuflag(fma3) && notcpuflag(fma4)
+ %error use of ``%5%6'' fma instruction in cpuname function: current_function
+ %elif cpuflag(fma4)
+ v%5%6 %1, %2, %3, %4
+ %elifidn %1, %2
+ ; If %3 or %4 is a memory operand it needs to be encoded as the last operand.
+ %ifnum sizeof%3
+ v%{5}213%6 %2, %3, %4
+ %else
+ v%{5}132%6 %2, %4, %3
+ %endif
+ %elifidn %1, %3
+ v%{5}213%6 %3, %2, %4
+ %elifidn %1, %4
+ v%{5}231%6 %4, %2, %3
+ %else
+ %error fma3 emulation of ``%5%6 %1, %2, %3, %4'' is not supported
+ %endif
+ %endmacro
+ %rotate 1
+ %endrep
+ %pop
+%endmacro
+
+FMA4_INSTR fmadd, pd, ps, sd, ss
+FMA4_INSTR fmaddsub, pd, ps
+FMA4_INSTR fmsub, pd, ps, sd, ss
+FMA4_INSTR fmsubadd, pd, ps
+FMA4_INSTR fnmadd, pd, ps, sd, ss
+FMA4_INSTR fnmsub, pd, ps, sd, ss
+
+; Macros for converting VEX instructions to equivalent EVEX ones.
+%macro EVEX_INSTR 2-3 0 ; vex, evex, prefer_evex
+ %macro %1 2-7 fnord, fnord, %1, %2, %3
+ %ifidn %3, fnord
+ %define %%args %1, %2
+ %elifidn %4, fnord
+ %define %%args %1, %2, %3
+ %else
+ %define %%args %1, %2, %3, %4
+ %endif
+ %assign %%evex_required cpuflag(avx512) & %7
+ %ifnum regnumof%1
+ %if regnumof%1 >= 16 || sizeof%1 > 32
+ %assign %%evex_required 1
+ %endif
+ %endif
+ %ifnum regnumof%2
+ %if regnumof%2 >= 16 || sizeof%2 > 32
+ %assign %%evex_required 1
+ %endif
+ %endif
+ %if %%evex_required
+ %6 %%args
+ %else
+ %5 %%args ; Prefer VEX over EVEX due to shorter instruction length
+ %endif
+ %endmacro
+%endmacro
+
+EVEX_INSTR vbroadcastf128, vbroadcastf32x4
+EVEX_INSTR vbroadcasti128, vbroadcasti32x4
+EVEX_INSTR vextractf128, vextractf32x4
+EVEX_INSTR vextracti128, vextracti32x4
+EVEX_INSTR vinsertf128, vinsertf32x4
+EVEX_INSTR vinserti128, vinserti32x4
+EVEX_INSTR vmovdqa, vmovdqa32
+EVEX_INSTR vmovdqu, vmovdqu32
+EVEX_INSTR vpand, vpandd
+EVEX_INSTR vpandn, vpandnd
+EVEX_INSTR vpor, vpord
+EVEX_INSTR vpxor, vpxord
+EVEX_INSTR vrcpps, vrcp14ps, 1 ; EVEX versions have higher precision
+EVEX_INSTR vrcpss, vrcp14ss, 1
+EVEX_INSTR vrsqrtps, vrsqrt14ps, 1
+EVEX_INSTR vrsqrtss, vrsqrt14ss, 1
+
+; workaround: vpbroadcastq is broken in x86_32 due to a yasm bug (fixed in 1.3.0)
+%ifdef __YASM_VER__
+ %if __YASM_VERSION_ID__ < 0x01030000 && ARCH_X86_64 == 0
+ %macro vpbroadcastq 2
+ %if sizeof%1 == 16
+ movddup %1, %2
+ %else
+ vbroadcastsd %1, %2
+ %endif
+ %endmacro
+ %endif
+%endif
diff --git a/gst/deinterlace/x86/yadif.asm b/gst/deinterlace/x86/yadif.asm
new file mode 100644
index 0000000000..08734bd395
--- /dev/null
+++ b/gst/deinterlace/x86/yadif.asm
@@ -0,0 +1,410 @@
+;*****************************************************************************
+;* x86-optimized functions for yadif filter
+;* Copyright (C) 2020 Vivia Nikolaidou <vivia.nikolaidou@ltnglobal.com>
+;*
+;* Based on libav's vf_yadif.asm file
+;* Copyright (C) 2006 Michael Niedermayer <michaelni@gmx.at>
+;* Copyright (c) 2013 Daniel Kang <daniel.d.kang@gmail.com>
+;*
+;* This file is part of FFmpeg.
+;*
+;* FFmpeg is free software; you can redistribute it and/or
+;* modify it under the terms of the GNU Lesser General Public
+;* License as published by the Free Software Foundation; either
+;* version 2.1 of the License, or (at your option) any later version.
+;*
+;* FFmpeg is distributed in the hope that it will be useful,
+;* but WITHOUT ANY WARRANTY; without even the implied warranty of
+;* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+;* Lesser General Public License for more details.
+;*
+;* You should have received a copy of the GNU Lesser General Public
+;* License along with FFmpeg; if not, write to the Free Software
+;* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+;******************************************************************************
+
+%include "x86inc.asm"
+
+SECTION_RODATA
+
+; 16 bytes of value 1
+pb_1: times 16 db 1
+; 8 words of value 1
+pw_1: times 8 dw 1
+
+SECTION .text
+
+%macro ABS1 2
+%if cpuflag(ssse3)
+ pabsw %1, %1
+%elif cpuflag(mmxext) ; a, tmp
+ pxor %2, %2
+ psubw %2, %1
+ pmaxsw %1, %2
+%else ; a, tmp
+ pxor %2, %2
+ pcmpgtw %2, %1
+ pxor %1, %2
+ psubw %1, %2
+%endif
+%endmacro
+
+%macro CHECK 2
+; %1 = 1+j, %2 = 1-j
+ ; m2 = t0[x+1+j]
+ movu m2, [tzeroq+%1]
+ ; m3 = b0[x+1-j]
+ movu m3, [bzeroq+%2]
+ ; m4 = t0[x+1+j]
+ mova m4, m2
+ ; m5 = t0[x+1+j]
+ mova m5, m2
+ ; m4 = xor(t0[x+1+j], b0[x+1-j]
+ pxor m4, m3
+ pavgb m5, m3
+ ; round down to 0
+ pand m4, [pb_1]
+ ; m5 = rounded down average of the whole thing
+ psubusb m5, m4
+ ; shift by 1 quadword to prepare for spatial_pred
+ psrldq m5, 1
+ ; m7 = 0
+ ; Interleave low-order bytes with 0
+ ; so one pixel doesn't spill into the next one
+ punpcklbw m5, m7
+ ; m4 = t0[x+1+j] (reset)
+ mova m4, m2
+ ; m2 = t0[x+1+j] - b0[x+1-j]
+ psubusb m2, m3
+ ; m3 = -m2
+ psubusb m3, m4
+ ; m2 = FFABS(t0[x+1+j] - b0[x+1-j]);
+ pmaxub m2, m3
+ ; m3 = FFABS(t0[x+1+j] - b0[x+1-j]);
+ mova m3, m2
+ ; m4 = FFABS(FFABS(t0[x+1+j] - b0[x+1-j]);
+ mova m4, m2
+ ; m3 = FFABS(t0[x+j] - b0[x-j])
+ psrldq m3, 1
+ ; m4 = FFABS(t0[x-1+j] - b0[x-1-j])
+ psrldq m4, 2
+ ; prevent pixel spilling for all of them
+ punpcklbw m2, m7
+ punpcklbw m3, m7
+ punpcklbw m4, m7
+ paddw m2, m3
+ ; m2 = score
+ paddw m2, m4
+%endmacro
+
+%macro CHECK1 0
+; m0 was spatial_score
+; m1 was spatial_pred
+ mova m3, m0
+ ; compare for greater than
+ ; each word will be 1111 or 0000
+ pcmpgtw m3, m2
+ ; if (score < spatial_score) spatial_score = score;
+ pminsw m0, m2
+ ; m6 = the mask
+ mova m6, m3
+ ; m5 = becomes 0 if it should change
+ pand m5, m3
+ ; nand: m3 = becomes 0 if it should not change
+ pandn m3, m1
+ ; m3 = put them together in an OR
+ por m3, m5
+ ; and put it in spatial_pred
+ mova m1, m3
+%endmacro
+
+%macro CHECK2 0
+; m6 was the mask from CHECK1 (we don't change it)
+ paddw m6, [pw_1]
+ ; shift words left while shifting in 14 0s (16 - j)
+ ; essentially to not recalculate the mask!
+ psllw m6, 14
+ ; add it to score
+ paddsw m2, m6
+ ; same as CHECK1
+ mova m3, m0
+ pcmpgtw m3, m2
+ pminsw m0, m2
+ pand m5, m3
+ pandn m3, m1
+ por m3, m5
+ mova m1, m3
+%endmacro
+
+%macro LOAD 2
+ movh %1, %2
+ punpcklbw %1, m7
+%endmacro
+
+%macro FILTER_HEAD 0
+ ; m7 = 0
+ pxor m7, m7
+ ; m0 = c
+ LOAD m0, [tzeroq]
+ ; m1 = e
+ LOAD m1, [bzeroq]
+ ; m3 = mp
+ LOAD m3, [mpq]
+ ; m2 = m1
+ LOAD m2, [moneq]
+ ; m4 = mp
+ mova m4, m3
+ ; m3 = m1 + mp
+ paddw m3, m2
+ ; m3 = d
+ psraw m3, 1
+ ; rsp + 0 = d
+ mova [rsp+ 0], m3
+ ; m2 = m1 - mp
+ psubw m2, m4
+ ; m2 = temporal_diff0 (m4 is temporary)
+ ABS1 m2, m4
+ ; m3 = t2
+ LOAD m3, [ttwoq]
+ ; m4 = b2
+ LOAD m4, [btwoq]
+ ; m3 = t2 - c
+ psubw m3, m0
+ ; m4 = b2 - e
+ psubw m4, m1
+ ; m3 = ABS(t2 - c)
+ ABS1 m3, m5
+ ; m4 = ABS(b2 - e)
+ ABS1 m4, m5
+ paddw m3, m4
+ psrlw m2, 1
+ ; m3 = temporal_diff1
+ psrlw m3, 1
+ ; m2 = left part of diff
+ pmaxsw m2, m3
+ ; m3 = tp2
+ LOAD m3, [tptwoq]
+ ; m4 = bp2
+ LOAD m4, [bptwoq]
+ psubw m3, m0
+ psubw m4, m1
+ ABS1 m3, m5
+ ABS1 m4, m5
+ paddw m3, m4
+ ; m3 = temporal_diff2
+ psrlw m3, 1
+ ; m2 = diff (for real)
+ pmaxsw m2, m3
+ ; rsp + 16 = diff
+ mova [rsp+16], m2
+
+ ; m1 = e + c
+ paddw m1, m0
+ ; m0 = 2c
+ paddw m0, m0
+ ; m0 = c - e
+ psubw m0, m1
+ ; m1 = spatial_pred
+ psrlw m1, 1
+ ; m0 = FFABS(c-e)
+ ABS1 m0, m2
+
+ ; m2 = t0[x-1]
+ ; if it's unpacked it should contain 4 bytes
+ movu m2, [tzeroq-1]
+ ; m3 = b0[x-1]
+ movu m3, [bzeroq-1]
+ ; m4 = t0[x-1]
+ mova m4, m2
+ ; m2 = t0[x-1]-b0[x-1] unsigned packed
+ psubusb m2, m3
+ ; m3 = m3 - m4 = b0[x-1]-t0[x-1] = -m2 unsigned packed
+ psubusb m3, m4
+ ; m2 = max(m2, -m2) = abs(t0[x-1]-b0[x-1])
+ pmaxub m2, m3
+%if mmsize == 16
+ ; m3 = m2 >> 2quadwords
+ ; pixel jump: go from x-1 to x+1
+ mova m3, m2
+ psrldq m3, 2
+%else
+ pshufw m3, m2, q0021
+%endif
+ ; m7 = 0
+ ; unpack and interleave low-order bytes
+ ; to prevent pixel spilling when adding
+ punpcklbw m2, m7
+ punpcklbw m3, m7
+ paddw m0, m2
+ paddw m0, m3
+ ; m0 = spatial_score
+ psubw m0, [pw_1]
+
+ CHECK -2, 0
+ CHECK1
+ CHECK -3, 1
+ CHECK2
+ CHECK 0, -2
+ CHECK1
+ CHECK 1, -3
+ CHECK2
+ ; now m0 = spatial_score, m1 = spatial_pred
+
+ ; m6 = diff
+ mova m6, [rsp+16]
+%endmacro
+
+%macro FILTER_TAIL 0
+ ; m2 = d
+ mova m2, [rsp]
+ ; m3 = d
+ mova m3, m2
+ ; m2 = d - diff
+ psubw m2, m6
+ ; m3 = d + diff
+ paddw m3, m6
+ ; m1 = max(spatial_pred, d-diff)
+ pmaxsw m1, m2
+ ; m1 = min(d + diff, max(spatial_pred, d-diff))
+ ; m1 = spatial_pred
+ pminsw m1, m3
+ ; Converts 8 signed word integers into 16 unsigned byte integers with saturation
+ packuswb m1, m1
+
+ ; dst = spatial_pred
+ movh [dstq], m1
+ ; half the register size
+ add dstq, mmsize/2
+ add tzeroq, mmsize/2
+ add bzeroq, mmsize/2
+ add moneq, mmsize/2
+ add mpq, mmsize/2
+ add ttwoq, mmsize/2
+ add btwoq, mmsize/2
+ add tptwoq, mmsize/2
+ add bptwoq, mmsize/2
+ add ttoneq, mmsize/2
+ add ttpq, mmsize/2
+ add bboneq, mmsize/2
+ add bbpq, mmsize/2
+%endmacro
+
+%macro FILTER_MODE0 0
+.loop0:
+ FILTER_HEAD
+ ; m2 = tt1
+ LOAD m2, [ttoneq]
+ ; m4 = ttp
+ LOAD m4, [ttpq]
+ ; m3 = bb1
+ LOAD m3, [bboneq]
+ ; m5 = bbp
+ LOAD m5, [bbpq]
+ paddw m2, m4
+ paddw m3, m5
+ ; m2 = b
+ psrlw m2, 1
+ ; m3 = f
+ psrlw m3, 1
+ ; m4 = c
+ LOAD m4, [tzeroq]
+ ; m5 = d
+ mova m5, [rsp]
+ ; m7 = e
+ LOAD m7, [bzeroq]
+ ; m2 = b - c
+ psubw m2, m4
+ ; m3 = f - e
+ psubw m3, m7
+ ; m0 = d
+ mova m0, m5
+ ; m5 = d - c
+ psubw m5, m4
+ ; m0 = d - e
+ psubw m0, m7
+ ; m4 = b - c
+ mova m4, m2
+ ; m2 = FFMIN(b-c, f-e)
+ pminsw m2, m3
+ ; m3 = FFMAX(f-e, b-c)
+ pmaxsw m3, m4
+ ; m2 = FFMAX(d-c, FFMIN(b-c, f-e))
+ pmaxsw m2, m5
+ ; m3 = FFMIN(d-c, FFMAX(f-e, b-c))
+ pminsw m3, m5
+ ; m2 = max
+ pmaxsw m2, m0
+ ; m3 = min
+ pminsw m3, m0
+ ; m4 = 0
+ pxor m4, m4
+ ; m6 = MAX(diff, min)
+ pmaxsw m6, m3
+ ; m4 = -max
+ psubw m4, m2
+ ; m6 = diff
+ pmaxsw m6, m4
+
+ FILTER_TAIL
+ ; r13m = w
+ sub DWORD r13m, mmsize/2
+ jg .loop0
+%endmacro
+
+%macro FILTER_MODE2 0
+.loop2:
+ FILTER_HEAD
+ FILTER_TAIL
+ ; r13m = w
+ sub DWORD r13m, mmsize/2
+ jg .loop2
+%endmacro
+
+%macro YADIF_ADD3 0
+ ; start 3 pixels later
+ add dstq, 3
+ add tzeroq, 3
+ add bzeroq, 3
+ add moneq, 3
+ add mpq, 3
+ add ttwoq, 3
+ add btwoq, 3
+ add tptwoq, 3
+ add bptwoq, 3
+ add ttoneq, 3
+ add ttpq, 3
+ add bboneq, 3
+ add bbpq, 3
+%endmacro
+
+; cglobal foo, 2,3,7,0x40, dst, src, tmp
+; declares a function (foo) that automatically loads two arguments (dst and
+; src) into registers, uses one additional register (tmp) plus 7 vector
+; registers (m0-m6) and allocates 0x40 bytes of stack space.
+%macro YADIF_MODE0 0
+cglobal yadif_filter_line_mode0, 13, 14, 8, 80, dst, tzero, bzero, mone, mp, \
+ ttwo, btwo, tptwo, bptwo, ttone, \
+ ttp, bbone, bbp, w
+
+ YADIF_ADD3
+ FILTER_MODE0
+ RET
+%endmacro
+
+%macro YADIF_MODE2 0
+cglobal yadif_filter_line_mode2, 13, 14, 8, 80, dst, tzero, bzero, mone, mp, \
+ ttwo, btwo, tptwo, bptwo, ttone, \
+ ttp, bbone, bbp, w
+
+ YADIF_ADD3
+ FILTER_MODE2
+ RET
+%endmacro
+
+; declares two functions for ssse3, and two for sse2
+INIT_XMM ssse3
+YADIF_MODE0
+YADIF_MODE2
+INIT_XMM sse2
+YADIF_MODE0
+YADIF_MODE2
diff --git a/gst/deinterlace/yadif.c b/gst/deinterlace/yadif.c
new file mode 100644
index 0000000000..3692e157a3
--- /dev/null
+++ b/gst/deinterlace/yadif.c
@@ -0,0 +1,486 @@
+/*
+ * GStreamer
+ * Copyright (C) 2019 Jan Schmidt <jan@centricular.com>
+ *
+ * Portions of this file extracted from libav
+ * Copyright (C) 2006 Michael Niedermayer <michaelni@gmx.at>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <stdlib.h>
+#include <string.h>
+
+#include <gst/gst.h>
+#ifdef HAVE_ORC
+#include <orc/orc.h>
+#include <orc/orcsse.h>
+#endif
+#include "gstdeinterlacemethod.h"
+#include "yadif.h"
+
+#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 96) || defined(__clang__)
+#define ALWAYS_INLINE __attribute__((always_inline)) inline
+#elif defined(_MSC_VER)
+#define ALWAYS_INLINE __forceinline
+#else
+#define ALWAYS_INLINE inline
+#endif
+#ifndef ORC_RESTRICT
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#define ORC_RESTRICT restrict
+#elif defined(__GNUC__) && __GNUC__ >= 4
+#define ORC_RESTRICT __restrict__
+#else
+#define ORC_RESTRICT
+#endif
+#endif
+
+#define GST_TYPE_DEINTERLACE_METHOD_YADIF (gst_deinterlace_method_yadif_get_type ())
+#define GST_IS_DEINTERLACE_METHOD_YADIF(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_DEINTERLACE_METHOD_YADIF))
+#define GST_IS_DEINTERLACE_METHOD_YADIF_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_DEINTERLACE_METHOD_YADIF))
+#define GST_DEINTERLACE_METHOD_YADIF_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_DEINTERLACE_METHOD_YADIF, GstDeinterlaceMethodYadifClass))
+#define GST_DEINTERLACE_METHOD_YADIF(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_DEINTERLACE_METHOD_YADIF, GstDeinterlaceMethodYadif))
+#define GST_DEINTERLACE_METHOD_YADIF_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_DEINTERLACE_METHOD_YADIF, GstDeinterlaceMethodYadifClass))
+#define GST_DEINTERLACE_METHOD_YADIF_CAST(obj) ((GstDeinterlaceMethodYadif*)(obj))
+
+typedef GstDeinterlaceSimpleMethod GstDeinterlaceMethodYadif;
+typedef GstDeinterlaceSimpleMethodClass GstDeinterlaceMethodYadifClass;
+
+G_DEFINE_TYPE (GstDeinterlaceMethodYadif,
+ gst_deinterlace_method_yadif, GST_TYPE_DEINTERLACE_SIMPLE_METHOD);
+
+static void
+filter_scanline_yadif (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size,
+ int colors, int y_alternates_every);
+
+static void
+filter_scanline_yadif_planar (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size);
+
+static void
+filter_scanline_yadif_semiplanar (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size);
+
+static void
+filter_scanline_yadif_packed_4 (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size);
+
+static void
+filter_scanline_yadif_packed_yvyu (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size);
+
+static void
+filter_scanline_yadif_packed_uyvy (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size);
+
+static void
+filter_scanline_yadif_packed_3 (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * scanlines, guint size);
+
+static void
+filter_line_c_planar_mode0 (void *ORC_RESTRICT dst,
+ const void *ORC_RESTRICT tzero, const void *ORC_RESTRICT bzero,
+ const void *ORC_RESTRICT mone, const void *ORC_RESTRICT mp,
+ const void *ORC_RESTRICT ttwo, const void *ORC_RESTRICT btwo,
+ const void *ORC_RESTRICT tptwo, const void *ORC_RESTRICT bptwo,
+ const void *ORC_RESTRICT ttone, const void *ORC_RESTRICT ttp,
+ const void *ORC_RESTRICT bbone, const void *ORC_RESTRICT bbp, int w);
+
+static void
+filter_line_c_planar_mode2 (void *ORC_RESTRICT dst,
+ const void *ORC_RESTRICT tzero, const void *ORC_RESTRICT bzero,
+ const void *ORC_RESTRICT mone, const void *ORC_RESTRICT mp,
+ const void *ORC_RESTRICT ttwo, const void *ORC_RESTRICT btwo,
+ const void *ORC_RESTRICT tptwo, const void *ORC_RESTRICT bptwo,
+ const void *ORC_RESTRICT ttone, const void *ORC_RESTRICT ttp,
+ const void *ORC_RESTRICT bbone, const void *ORC_RESTRICT bbp, int w);
+
+static void (*filter_mode2) (void *ORC_RESTRICT dst,
+ const void *ORC_RESTRICT tzero, const void *ORC_RESTRICT bzero,
+ const void *ORC_RESTRICT mone, const void *ORC_RESTRICT mp,
+ const void *ORC_RESTRICT ttwo, const void *ORC_RESTRICT btwo,
+ const void *ORC_RESTRICT tptwo, const void *ORC_RESTRICT bptwo,
+ const void *ORC_RESTRICT ttone, const void *ORC_RESTRICT ttp,
+ const void *ORC_RESTRICT bbone, const void *ORC_RESTRICT bbp, int w);
+
+static void (*filter_mode0) (void *ORC_RESTRICT dst,
+ const void *ORC_RESTRICT tzero, const void *ORC_RESTRICT bzero,
+ const void *ORC_RESTRICT mone, const void *ORC_RESTRICT mp,
+ const void *ORC_RESTRICT ttwo, const void *ORC_RESTRICT btwo,
+ const void *ORC_RESTRICT tptwo, const void *ORC_RESTRICT bptwo,
+ const void *ORC_RESTRICT ttone, const void *ORC_RESTRICT ttp,
+ const void *ORC_RESTRICT bbone, const void *ORC_RESTRICT bbp, int w);
+
+
+static void
+copy_scanline (GstDeinterlaceSimpleMethod * self, guint8 * out,
+ const GstDeinterlaceScanlineData * scanlines, guint size)
+{
+ memcpy (out, scanlines->m0, size);
+}
+
+static void
+ gst_deinterlace_method_yadif_class_init
+ (GstDeinterlaceMethodYadifClass * klass)
+{
+ GstDeinterlaceMethodClass *dim_class = (GstDeinterlaceMethodClass *) klass;
+ GstDeinterlaceSimpleMethodClass *dism_class =
+ (GstDeinterlaceSimpleMethodClass *) klass;
+
+ dim_class->name = "YADIF Adaptive Deinterlacer";
+ dim_class->nick = "yadif";
+ dim_class->fields_required = 5;
+ dim_class->latency = 2;
+
+ dism_class->copy_scanline_planar_y = copy_scanline;
+ dism_class->copy_scanline_planar_u = copy_scanline;
+ dism_class->copy_scanline_planar_v = copy_scanline;
+ dism_class->copy_scanline_yuy2 = copy_scanline;
+ dism_class->copy_scanline_yvyu = copy_scanline;
+ dism_class->copy_scanline_uyvy = copy_scanline;
+ dism_class->copy_scanline_ayuv = copy_scanline;
+ dism_class->copy_scanline_argb = copy_scanline;
+ dism_class->copy_scanline_abgr = copy_scanline;
+ dism_class->copy_scanline_rgba = copy_scanline;
+ dism_class->copy_scanline_bgra = copy_scanline;
+ dism_class->copy_scanline_rgb = copy_scanline;
+ dism_class->copy_scanline_bgr = copy_scanline;
+ dism_class->copy_scanline_nv12 = copy_scanline;
+ dism_class->copy_scanline_nv21 = copy_scanline;
+
+ dism_class->interpolate_scanline_planar_y = filter_scanline_yadif_planar;
+ dism_class->interpolate_scanline_planar_u = filter_scanline_yadif_planar;
+ dism_class->interpolate_scanline_planar_v = filter_scanline_yadif_planar;
+ dism_class->interpolate_scanline_yuy2 = filter_scanline_yadif_packed_yvyu;
+ dism_class->interpolate_scanline_yvyu = filter_scanline_yadif_packed_yvyu;
+ dism_class->interpolate_scanline_uyvy = filter_scanline_yadif_packed_uyvy;
+ dism_class->interpolate_scanline_ayuv = filter_scanline_yadif_packed_4;
+ dism_class->interpolate_scanline_argb = filter_scanline_yadif_packed_4;
+ dism_class->interpolate_scanline_abgr = filter_scanline_yadif_packed_4;
+ dism_class->interpolate_scanline_rgba = filter_scanline_yadif_packed_4;
+ dism_class->interpolate_scanline_bgra = filter_scanline_yadif_packed_4;
+ dism_class->interpolate_scanline_rgb = filter_scanline_yadif_packed_3;
+ dism_class->interpolate_scanline_bgr = filter_scanline_yadif_packed_3;
+ dism_class->interpolate_scanline_nv12 = filter_scanline_yadif_semiplanar;
+ dism_class->interpolate_scanline_nv21 = filter_scanline_yadif_semiplanar;
+}
+
+#define FFABS(a) ABS(a)
+#define FFMIN(a,b) MIN(a,b)
+#define FFMAX(a,b) MAX(a,b)
+#define FFMAX3(a,b,c) FFMAX(FFMAX(a,b),c)
+#define FFMIN3(a,b,c) FFMIN(FFMIN(a,b),c)
+
+#define CHECK(j1, j2, j3)\
+ { int score = FFABS(stzero[x - j1] - sbzero[x - j2])\
+ + FFABS(stzero[x + j3] - sbzero[x - j3])\
+ + FFABS(stzero[x + j1] - sbzero[x + j2]);\
+ if (score < spatial_score) {\
+ spatial_score= score;\
+ spatial_pred= (stzero[x + j3] + sbzero[x - j3])>>1;\
+
+/* The is_not_edge argument here controls when the code will enter a branch
+ * which reads up to and including x-3 and x+3. */
+
+#define FILTER(start, end, is_not_edge) \
+ for (x = start; x < end; x++) { \
+ int c = stzero[x]; \
+ int d = (smone[x] + smp[x])>>1; \
+ int e = sbzero[x]; \
+ int temporal_diff0 = FFABS(smone[x] - smp[x]); \
+ int temporal_diff1 =(FFABS(sttwo[x] - c) + FFABS(sbtwo[x] - e) )>>1; \
+ int temporal_diff2 =(FFABS(stptwo[x] - c) + FFABS(sbptwo[x] - e) )>>1; \
+ int diff = FFMAX3(temporal_diff0 >> 1, temporal_diff1, temporal_diff2); \
+ int spatial_pred = (c+e) >> 1; \
+ int colors2 = colors; \
+ if ((y_alternates_every == 1 && (x%2 == 0)) || \
+ (y_alternates_every == 2 && (x%2 == 1))) \
+ colors2 = 2; \
+ \
+ if (is_not_edge) {\
+ int spatial_score = FFABS(stzero[x-colors2] - sbzero[x-colors2]) + FFABS(c-e) \
+ + FFABS(stzero[x+colors2] - sbzero[x+colors2]); \
+ int twice_colors2 = colors2 << 1; \
+ int minus_colors2 = -colors2; \
+ int thrice_colors2 = colors2 * 3; \
+ int minus2_colors2 = colors2 * -2; \
+ CHECK(0, twice_colors2, minus_colors2) \
+ CHECK(-colors2, thrice_colors2, minus2_colors2) }} }} \
+ CHECK(twice_colors2, 0, colors2) \
+ CHECK(thrice_colors2, minus_colors2, twice_colors2) }} }} \
+ }\
+ \
+ if (!(mode&2)) { \
+ int b = (sttone[x] + sttp[x])>>1; \
+ int f = (sbbone[x] + sbbp[x])>>1; \
+ int max = FFMAX3(d - e, d - c, FFMIN(b - c, f - e)); \
+ int min = FFMIN3(d - e, d - c, FFMAX(b - c, f - e)); \
+ \
+ diff = FFMAX3(diff, min, -max); \
+ } \
+ \
+ if (spatial_pred > d + diff) \
+ spatial_pred = d + diff; \
+ else if (spatial_pred < d - diff) \
+ spatial_pred = d - diff; \
+ \
+ sdst[x] = spatial_pred; \
+ \
+ }
+
+ALWAYS_INLINE static void
+filter_line_c (guint8 * sdst, const guint8 * stzero, const guint8 * sbzero,
+ const guint8 * smone, const guint8 * smp, const guint8 * sttwo,
+ const guint8 * sbtwo, const guint8 * stptwo, const guint8 * sbptwo,
+ const guint8 * sttone, const guint8 * sttp, const guint8 * sbbone,
+ const guint8 * sbbp, int w, int colors, int y_alternates_every, int start,
+ int end, int mode)
+{
+ int x;
+
+ /* The function is called for processing the middle
+ * pixels of each line, excluding 3 at each end.
+ * This allows the FILTER macro to be
+ * called so that it processes all the pixels normally. A constant value of
+ * true for is_not_edge lets the compiler ignore the if statement. */
+ FILTER (start, end, 1)
+}
+
+#define MAX_ALIGN 8
+
+ALWAYS_INLINE static void
+filter_line_c_planar (void *ORC_RESTRICT dst, const void *ORC_RESTRICT tzero,
+ const void *ORC_RESTRICT bzero, const void *ORC_RESTRICT mone,
+ const void *ORC_RESTRICT mp, const void *ORC_RESTRICT ttwo,
+ const void *ORC_RESTRICT btwo, const void *ORC_RESTRICT tptwo,
+ const void *ORC_RESTRICT bptwo, const void *ORC_RESTRICT ttone,
+ const void *ORC_RESTRICT ttp, const void *ORC_RESTRICT bbone,
+ const void *ORC_RESTRICT bbp, int w, int mode)
+{
+ int x;
+ const int start = 0;
+ const int colors = 1;
+ const int y_alternates_every = 0;
+ /* hardcode colors = 1, bpp = 1 */
+ const int end = w;
+ guint8 *sdst = (guint8 *) dst + 3;
+ guint8 *stzero = (guint8 *) tzero + 3;
+ guint8 *sbzero = (guint8 *) bzero + 3;
+ guint8 *smone = (guint8 *) mone + 3;
+ guint8 *smp = (guint8 *) mp + 3;
+ guint8 *sttwo = (guint8 *) ttwo + 3;
+ guint8 *sbtwo = (guint8 *) btwo + 3;
+ guint8 *stptwo = (guint8 *) tptwo + 3;
+ guint8 *sbptwo = (guint8 *) bptwo + 3;
+ guint8 *sttone = (guint8 *) ttone + 3;
+ guint8 *sttp = (guint8 *) ttp + 3;
+ guint8 *sbbone = (guint8 *) bbone + 3;
+ guint8 *sbbp = (guint8 *) bbp + 3;
+ /* The function is called for processing the middle
+ * pixels of each line, excluding 3 at each end.
+ * This allows the FILTER macro to be
+ * called so that it processes all the pixels normally. A constant value of
+ * true for is_not_edge lets the compiler ignore the if statement. */
+ FILTER (start, end, 1)
+}
+
+ALWAYS_INLINE G_GNUC_UNUSED static void
+filter_line_c_planar_mode0 (void *ORC_RESTRICT dst,
+ const void *ORC_RESTRICT tzero, const void *ORC_RESTRICT bzero,
+ const void *ORC_RESTRICT mone, const void *ORC_RESTRICT mp,
+ const void *ORC_RESTRICT ttwo, const void *ORC_RESTRICT btwo,
+ const void *ORC_RESTRICT tptwo, const void *ORC_RESTRICT bptwo,
+ const void *ORC_RESTRICT ttone, const void *ORC_RESTRICT ttp,
+ const void *ORC_RESTRICT bbone, const void *ORC_RESTRICT bbp, int w)
+{
+ filter_line_c_planar (dst, tzero, bzero, mone, mp, ttwo, btwo, tptwo, bptwo,
+ ttone, ttp, bbone, bbp, w, 0);
+}
+
+ALWAYS_INLINE G_GNUC_UNUSED static void
+filter_line_c_planar_mode2 (void *ORC_RESTRICT dst,
+ const void *ORC_RESTRICT tzero, const void *ORC_RESTRICT bzero,
+ const void *ORC_RESTRICT mone, const void *ORC_RESTRICT mp,
+ const void *ORC_RESTRICT ttwo, const void *ORC_RESTRICT btwo,
+ const void *ORC_RESTRICT tptwo, const void *ORC_RESTRICT bptwo,
+ const void *ORC_RESTRICT ttone, const void *ORC_RESTRICT ttp,
+ const void *ORC_RESTRICT bbone, const void *ORC_RESTRICT bbp, int w)
+{
+ filter_line_c_planar (dst, tzero, bzero, mone, mp, ttwo, btwo, tptwo, bptwo,
+ ttone, ttp, bbone, bbp, w, 2);
+}
+
+ALWAYS_INLINE static void
+filter_edges (guint8 * sdst, const guint8 * stzero, const guint8 * sbzero,
+ const guint8 * smone, const guint8 * smp, const guint8 * sttwo,
+ const guint8 * sbtwo, const guint8 * stptwo, const guint8 * sbptwo,
+ const guint8 * sttone, const guint8 * sttp, const guint8 * sbbone,
+ const guint8 * sbbp, int w, int colors, int y_alternates_every,
+ int mode, const int bpp)
+{
+ int x;
+ const int edge = colors * (MAX_ALIGN / bpp);
+ const int border = 3 * colors;
+
+ /* Only edge pixels need to be processed here. A constant value of false
+ * for is_not_edge should let the compiler ignore the whole branch. */
+ FILTER (0, border, 0)
+ FILTER (w - edge, w - border, 1)
+ FILTER (w - border, w, 0)
+}
+
+static void
+filter_scanline_yadif_semiplanar (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * s_orig, guint size)
+{
+ filter_scanline_yadif (self, out, s_orig, size, 2, 0);
+}
+
+static void
+filter_scanline_yadif_packed_3 (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * s_orig, guint size)
+{
+ filter_scanline_yadif (self, out, s_orig, size, 3, 0);
+}
+
+static void
+filter_scanline_yadif_packed_4 (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * s_orig, guint size)
+{
+ filter_scanline_yadif (self, out, s_orig, size, 4, 0);
+}
+
+static void
+filter_scanline_yadif_packed_yvyu (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * s_orig, guint size)
+{
+ filter_scanline_yadif (self, out, s_orig, size, 4, 1);
+}
+
+static void
+filter_scanline_yadif_packed_uyvy (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * s_orig, guint size)
+{
+ filter_scanline_yadif (self, out, s_orig, size, 4, 2);
+}
+
+ALWAYS_INLINE static void
+filter_scanline_yadif (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * s_orig, guint size,
+ int colors, int y_alternates_every)
+{
+ guint8 *dst = out;
+ const int bpp = 1; // Hard code 8-bit atm
+ int w = size / bpp;
+ int edge = colors * MAX_ALIGN / bpp;
+ GstDeinterlaceScanlineData s = *s_orig;
+
+ int mode = (s.tt1 == NULL || s.bb1 == NULL || s.ttp == NULL
+ || s.bbp == NULL) ? 2 : 0;
+
+ /* When starting up, some data might not yet be available, so use the current frame */
+ if (s.m1 == NULL)
+ s.m1 = s.mp;
+ if (s.tt1 == NULL)
+ s.tt1 = s.ttp;
+ if (s.bb1 == NULL)
+ s.bb1 = s.bbp;
+ if (s.t2 == NULL)
+ s.t2 = s.tp2;
+ if (s.b2 == NULL)
+ s.b2 = s.bp2;
+
+ filter_edges (dst, s.t0, s.b0, s.m1, s.mp, s.t2, s.b2, s.tp2, s.bp2, s.tt1,
+ s.ttp, s.bb1, s.bbp, w, colors, y_alternates_every, mode, bpp);
+ filter_line_c (dst, s.t0, s.b0, s.m1, s.mp, s.t2, s.b2, s.tp2, s.bp2, s.tt1,
+ s.ttp, s.bb1, s.bbp, w, colors, y_alternates_every, colors * 3, w - edge,
+ mode);
+}
+
+ALWAYS_INLINE static void
+filter_scanline_yadif_planar (GstDeinterlaceSimpleMethod * self,
+ guint8 * out, const GstDeinterlaceScanlineData * s_orig, guint size)
+{
+ guint8 *dst = out;
+ const int bpp = 1; // Hard code 8-bit atm
+ int w = size / bpp;
+ int edge = MAX_ALIGN / bpp;
+ GstDeinterlaceScanlineData s = *s_orig;
+
+ int mode = (s.tt1 == NULL || s.bb1 == NULL || s.ttp == NULL
+ || s.bbp == NULL) ? 2 : 0;
+
+ /* When starting up, some data might not yet be available, so use the current frame */
+ if (s.m1 == NULL)
+ s.m1 = s.mp;
+ if (s.tt1 == NULL)
+ s.tt1 = s.ttp;
+ if (s.bb1 == NULL)
+ s.bb1 = s.bbp;
+ if (s.t2 == NULL)
+ s.t2 = s.tp2;
+ if (s.b2 == NULL)
+ s.b2 = s.bp2;
+
+ filter_edges (dst, s.t0, s.b0, s.m1, s.mp, s.t2, s.b2, s.tp2, s.bp2, s.tt1,
+ s.ttp, s.bb1, s.bbp, w, 1, 0, mode, bpp);
+ if (mode == 0)
+ filter_mode0 (dst, (void *) s.t0, (void *) s.b0, (void *) s.m1,
+ (void *) s.mp, (void *) s.t2, (void *) s.b2, (void *) s.tp2,
+ (void *) s.bp2, (void *) s.tt1, (void *) s.ttp, (void *) s.bb1,
+ (void *) s.bbp, w - edge);
+ else
+ filter_mode2 (dst, (void *) s.t0, (void *) s.b0, (void *) s.m1,
+ (void *) s.mp, (void *) s.t2, (void *) s.b2, (void *) s.tp2,
+ (void *) s.bp2, (void *) s.tt1, (void *) s.ttp, (void *) s.bb1,
+ (void *) s.bbp, w - edge);
+}
+
+static void
+gst_deinterlace_method_yadif_init (GstDeinterlaceMethodYadif * self)
+{
+#if (defined __x86_64__ || defined _M_X64) && defined HAVE_NASM
+ if (
+# if defined HAVE_ORC
+ orc_sse_get_cpu_flags () & ORC_TARGET_SSE_SSSE3
+# elif defined __SSSE3__
+ TRUE
+# else
+ FALSE
+# endif
+ ) {
+ GST_DEBUG ("SSSE3 optimization enabled");
+ filter_mode0 = gst_yadif_filter_line_mode0_ssse3;
+ filter_mode2 = gst_yadif_filter_line_mode2_ssse3;
+ } else {
+ GST_DEBUG ("SSE2 optimization enabled");
+ filter_mode0 = gst_yadif_filter_line_mode0_sse2;
+ filter_mode2 = gst_yadif_filter_line_mode2_sse2;
+ }
+#else
+ {
+ GST_DEBUG ("SSE optimization disabled");
+ filter_mode0 = filter_line_c_planar_mode0;
+ filter_mode2 = filter_line_c_planar_mode2;
+ }
+#endif
+}
diff --git a/gst/deinterlace/yadif.h b/gst/deinterlace/yadif.h
new file mode 100644
index 0000000000..444c1d00ef
--- /dev/null
+++ b/gst/deinterlace/yadif.h
@@ -0,0 +1,48 @@
+/*
+ * GStreamer
+ * Copyright (C) 2019 Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __YADIF_H__
+#define __YADIF_H__
+
+#define GST_TYPE_DEINTERLACE_YADIF (gst_deinterlace_method_yadif_get_type ())
+
+GType gst_deinterlace_method_yadif_get_type (void);
+
+void
+gst_yadif_filter_line_mode0_sse2 (void *dst, const void *tzero, const void *bzero,
+ const void *mone, const void *mp, const void *ttwo, const void *btwo, const void *tptwo, const void *bptwo,
+ const void *ttone, const void *ttp, const void *bbone, const void *bbp, int w);
+
+void
+gst_yadif_filter_line_mode2_sse2 (void *dst, const void *tzero, const void *bzero,
+ const void *mone, const void *mp, const void *ttwo, const void *btwo, const void *tptwo, const void *bptwo,
+ const void *ttone, const void *ttp, const void *bbone, const void *bbp, int w);
+
+void
+gst_yadif_filter_line_mode0_ssse3 (void *dst, const void *tzero, const void *bzero,
+ const void *mone, const void *mp, const void *ttwo, const void *btwo, const void *tptwo, const void *bptwo,
+ const void *ttone, const void *ttp, const void *bbone, const void *bbp, int w);
+
+void
+gst_yadif_filter_line_mode2_ssse3 (void *dst, const void *tzero, const void *bzero,
+ const void *mone, const void *mp, const void *ttwo, const void *btwo, const void *tptwo, const void *bptwo,
+ const void *ttone, const void *ttp, const void *bbone, const void *bbp, int w);
+
+#endif
diff --git a/gst/dtmf/gstdtmf.c b/gst/dtmf/gstdtmf.c
new file mode 100644
index 0000000000..bde66ae46a
--- /dev/null
+++ b/gst/dtmf/gstdtmf.c
@@ -0,0 +1,42 @@
+/* GStreamer DTMF plugin
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstdtmfsrc.h"
+#include "gstrtpdtmfsrc.h"
+#include "gstrtpdtmfdepay.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (dtmfsrc, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpdtmfsrc, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpdtmfdepay, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ dtmf, "DTMF plugins",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/dtmf/gstdtmfcommon.h b/gst/dtmf/gstdtmfcommon.h
new file mode 100644
index 0000000000..5459523f61
--- /dev/null
+++ b/gst/dtmf/gstdtmfcommon.h
@@ -0,0 +1,42 @@
+/* GStreamer DTMF plugin
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_DTMF_COMMON_H__
+#define __GST_RTP_DTMF_COMMON_H__
+
+#include <gst/math-compat.h>
+
+#define MIN_INTER_DIGIT_INTERVAL 100 /* ms */
+#define MIN_PULSE_DURATION 250 /* ms */
+
+#define MIN_VOLUME 0
+#define MAX_VOLUME 36
+
+#define MIN_EVENT 0
+#define MAX_EVENT 15
+#define MIN_EVENT_STRING "0"
+#define MAX_EVENT_STRING "15"
+
+typedef struct
+{
+ guint8 event; /* Current DTMF event */
+ guint8 volume; /* power level of the tone, in dBm0 */
+ guint16 duration; /* Duration of digit, in timestamp units */
+} GstRTPDTMFPayload;
+
+#endif /* __GST_RTP_DTMF_COMMON_H__ */
diff --git a/gst/dtmf/gstdtmfsrc.c b/gst/dtmf/gstdtmfsrc.c
new file mode 100644
index 0000000000..1945c0f795
--- /dev/null
+++ b/gst/dtmf/gstdtmfsrc.c
@@ -0,0 +1,953 @@
+/* GStreamer DTMF source
+ *
+ * gstdtmfsrc.c:
+ *
+ * Copyright (C) <2007> Collabora.
+ * Contact: Youness Alaoui <youness.alaoui@collabora.co.uk>
+ * Copyright (C) <2007> Nokia Corporation.
+ * Contact: Zeeshan Ali <zeeshan.ali@nokia.com>
+ * Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2000,2005 Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-dtmfsrc
+ * @title: dtmfsrc
+ * @see_also: rtpdtmsrc, rtpdtmfmuxx
+ *
+ * The DTMFSrc element generates DTMF (ITU-T Q.23 Specification) tone packets on request
+ * from application. The application communicates the beginning and end of a
+ * DTMF event using custom upstream gstreamer events. To report a DTMF event, an
+ * application must send an event of type GST_EVENT_CUSTOM_UPSTREAM, having a
+ * structure of name "dtmf-event" with fields set according to the following
+ * table:
+ *
+ * * `type` (G_TYPE_INT, 0-1): The application uses this field to specify which of the two methods
+ * specified in RFC 2833 to use. The value should be 0 for tones and 1 for
+ * named events. Tones are specified by their frequencies and events are specified
+ * by their number. This element can only take events as input. Do not confuse
+ * with "method" which specified the output.
+ *
+ * * `number` (G_TYPE_INT, 0-15): The event number.
+ *
+ * * `volume` (G_TYPE_INT, 0-36): This field describes the power level of the tone, expressed in dBm0
+ * after dropping the sign. Power levels range from 0 to -63 dBm0. The range of
+ * valid DTMF is from 0 to -36 dBm0. Can be omitted if start is set to FALSE.
+ *
+ * * `start` (G_TYPE_BOOLEAN, True or False): Whether the event is starting or ending.
+ *
+ * * `method` (G_TYPE_INT, 2): The method used for sending event, this element will react if this
+ * field is absent or 2.
+ *
+ * For example, the following code informs the pipeline (and in turn, the
+ * DTMFSrc element inside the pipeline) about the start of a DTMF named
+ * event '1' of volume -25 dBm0:
+ *
+ * |[
+ * structure = gst_structure_new ("dtmf-event",
+ * "type", G_TYPE_INT, 1,
+ * "number", G_TYPE_INT, 1,
+ * "volume", G_TYPE_INT, 25,
+ * "start", G_TYPE_BOOLEAN, TRUE, NULL);
+ *
+ * event = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM, structure);
+ * gst_element_send_event (pipeline, event);
+ * ]|
+ *
+ * When a DTMF tone actually starts or stop, a "dtmf-event-processed"
+ * element #GstMessage with the same fields as the "dtmf-event"
+ * #GstEvent that was used to request the event. Also, if any event
+ * has not been processed when the element goes from the PAUSED to the
+ * READY state, then a "dtmf-event-dropped" message is posted on the
+ * #GstBus in the order that they were received.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <stdlib.h>
+#include <string.h>
+#include <math.h>
+
+#include <glib.h>
+
+#include "gstdtmfcommon.h"
+
+#include "gstdtmfsrc.h"
+
+#include <gst/audio/audio.h>
+
+#define GST_TONE_DTMF_TYPE_EVENT 1
+#define DEFAULT_PACKET_INTERVAL 50 /* ms */
+#define MIN_PACKET_INTERVAL 10 /* ms */
+#define MAX_PACKET_INTERVAL 50 /* ms */
+#define DEFAULT_SAMPLE_RATE 8000
+#define SAMPLE_SIZE 16
+#define CHANNELS 1
+#define MIN_DUTY_CYCLE (MIN_INTER_DIGIT_INTERVAL + MIN_PULSE_DURATION)
+
+
+typedef struct st_dtmf_key
+{
+ const char *event_name;
+ int event_encoding;
+ float low_frequency;
+ float high_frequency;
+} DTMF_KEY;
+
+static const DTMF_KEY DTMF_KEYS[] = {
+ {"DTMF_KEY_EVENT_0", 0, 941, 1336},
+ {"DTMF_KEY_EVENT_1", 1, 697, 1209},
+ {"DTMF_KEY_EVENT_2", 2, 697, 1336},
+ {"DTMF_KEY_EVENT_3", 3, 697, 1477},
+ {"DTMF_KEY_EVENT_4", 4, 770, 1209},
+ {"DTMF_KEY_EVENT_5", 5, 770, 1336},
+ {"DTMF_KEY_EVENT_6", 6, 770, 1477},
+ {"DTMF_KEY_EVENT_7", 7, 852, 1209},
+ {"DTMF_KEY_EVENT_8", 8, 852, 1336},
+ {"DTMF_KEY_EVENT_9", 9, 852, 1477},
+ {"DTMF_KEY_EVENT_S", 10, 941, 1209},
+ {"DTMF_KEY_EVENT_P", 11, 941, 1477},
+ {"DTMF_KEY_EVENT_A", 12, 697, 1633},
+ {"DTMF_KEY_EVENT_B", 13, 770, 1633},
+ {"DTMF_KEY_EVENT_C", 14, 852, 1633},
+ {"DTMF_KEY_EVENT_D", 15, 941, 1633},
+};
+
+#define MAX_DTMF_EVENTS 16
+
+enum
+{
+ DTMF_KEY_EVENT_1 = 1,
+ DTMF_KEY_EVENT_2 = 2,
+ DTMF_KEY_EVENT_3 = 3,
+ DTMF_KEY_EVENT_4 = 4,
+ DTMF_KEY_EVENT_5 = 5,
+ DTMF_KEY_EVENT_6 = 6,
+ DTMF_KEY_EVENT_7 = 7,
+ DTMF_KEY_EVENT_8 = 8,
+ DTMF_KEY_EVENT_9 = 9,
+ DTMF_KEY_EVENT_0 = 0,
+ DTMF_KEY_EVENT_STAR = 10,
+ DTMF_KEY_EVENT_POUND = 11,
+ DTMF_KEY_EVENT_A = 12,
+ DTMF_KEY_EVENT_B = 13,
+ DTMF_KEY_EVENT_C = 14,
+ DTMF_KEY_EVENT_D = 15,
+};
+
+GST_DEBUG_CATEGORY_STATIC (gst_dtmf_src_debug);
+#define GST_CAT_DEFAULT gst_dtmf_src_debug
+
+enum
+{
+ PROP_0,
+ PROP_INTERVAL,
+};
+
+static GstStaticPadTemplate gst_dtmf_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) \"" GST_AUDIO_NE (S16) "\", "
+ "rate = " GST_AUDIO_RATE_RANGE ", " "channels = (int) 1, "
+ "layout = (string)interleaved")
+ );
+
+#define parent_class gst_dtmf_src_parent_class
+G_DEFINE_TYPE (GstDTMFSrc, gst_dtmf_src, GST_TYPE_BASE_SRC);
+GST_ELEMENT_REGISTER_DEFINE (dtmfsrc, "dtmfsrc", GST_RANK_NONE,
+ GST_TYPE_DTMF_SRC);
+
+static void gst_dtmf_src_finalize (GObject * object);
+
+static void gst_dtmf_src_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_dtmf_src_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static gboolean gst_dtmf_src_handle_event (GstBaseSrc * src, GstEvent * event);
+static gboolean gst_dtmf_src_send_event (GstElement * src, GstEvent * event);
+static GstStateChangeReturn gst_dtmf_src_change_state (GstElement * element,
+ GstStateChange transition);
+static GstFlowReturn gst_dtmf_src_create (GstBaseSrc * basesrc,
+ guint64 offset, guint length, GstBuffer ** buffer);
+static void gst_dtmf_src_add_start_event (GstDTMFSrc * dtmfsrc,
+ gint event_number, gint event_volume);
+static void gst_dtmf_src_add_stop_event (GstDTMFSrc * dtmfsrc);
+
+static gboolean gst_dtmf_src_unlock (GstBaseSrc * src);
+
+static gboolean gst_dtmf_src_unlock_stop (GstBaseSrc * src);
+static gboolean gst_dtmf_src_negotiate (GstBaseSrc * basesrc);
+static gboolean gst_dtmf_src_query (GstBaseSrc * basesrc, GstQuery * query);
+
+
+static void
+gst_dtmf_src_class_init (GstDTMFSrcClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstBaseSrcClass *gstbasesrc_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
+ gstelement_class = GST_ELEMENT_CLASS (klass);
+
+
+ GST_DEBUG_CATEGORY_INIT (gst_dtmf_src_debug, "dtmfsrc", 0, "dtmfsrc element");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_dtmf_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "DTMF tone generator", "Source/Audio", "Generates DTMF tones",
+ "Youness Alaoui <youness.alaoui@collabora.co.uk>");
+
+
+ gobject_class->finalize = gst_dtmf_src_finalize;
+ gobject_class->set_property = gst_dtmf_src_set_property;
+ gobject_class->get_property = gst_dtmf_src_get_property;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_INTERVAL,
+ g_param_spec_uint ("interval", "Interval between tone packets",
+ "Interval in ms between two tone packets", MIN_PACKET_INTERVAL,
+ MAX_PACKET_INTERVAL, DEFAULT_PACKET_INTERVAL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_dtmf_src_change_state);
+ gstelement_class->send_event = GST_DEBUG_FUNCPTR (gst_dtmf_src_send_event);
+ gstbasesrc_class->unlock = GST_DEBUG_FUNCPTR (gst_dtmf_src_unlock);
+ gstbasesrc_class->unlock_stop = GST_DEBUG_FUNCPTR (gst_dtmf_src_unlock_stop);
+
+ gstbasesrc_class->event = GST_DEBUG_FUNCPTR (gst_dtmf_src_handle_event);
+ gstbasesrc_class->create = GST_DEBUG_FUNCPTR (gst_dtmf_src_create);
+ gstbasesrc_class->negotiate = GST_DEBUG_FUNCPTR (gst_dtmf_src_negotiate);
+ gstbasesrc_class->query = GST_DEBUG_FUNCPTR (gst_dtmf_src_query);
+}
+
+static void
+event_free (GstDTMFSrcEvent * event)
+{
+ if (event)
+ g_slice_free (GstDTMFSrcEvent, event);
+}
+
+static void
+gst_dtmf_src_init (GstDTMFSrc * dtmfsrc)
+{
+ /* we operate in time */
+ gst_base_src_set_format (GST_BASE_SRC (dtmfsrc), GST_FORMAT_TIME);
+ gst_base_src_set_live (GST_BASE_SRC (dtmfsrc), TRUE);
+
+ dtmfsrc->interval = DEFAULT_PACKET_INTERVAL;
+
+ dtmfsrc->event_queue = g_async_queue_new_full ((GDestroyNotify) event_free);
+ dtmfsrc->last_event = NULL;
+
+ dtmfsrc->sample_rate = DEFAULT_SAMPLE_RATE;
+
+ GST_DEBUG_OBJECT (dtmfsrc, "init done");
+}
+
+static void
+gst_dtmf_src_finalize (GObject * object)
+{
+ GstDTMFSrc *dtmfsrc;
+
+ dtmfsrc = GST_DTMF_SRC (object);
+
+ if (dtmfsrc->event_queue) {
+ g_async_queue_unref (dtmfsrc->event_queue);
+ dtmfsrc->event_queue = NULL;
+ }
+
+ G_OBJECT_CLASS (gst_dtmf_src_parent_class)->finalize (object);
+}
+
+static gboolean
+gst_dtmf_src_handle_dtmf_event (GstDTMFSrc * dtmfsrc, GstEvent * event)
+{
+ const GstStructure *event_structure;
+ GstStateChangeReturn sret;
+ GstState state;
+ gint event_type;
+ gboolean start;
+ gint method;
+ GstClockTime last_stop;
+ gint event_number;
+ gint event_volume;
+ gboolean correct_order;
+
+ sret = gst_element_get_state (GST_ELEMENT (dtmfsrc), &state, NULL, 0);
+ if (sret != GST_STATE_CHANGE_SUCCESS || state != GST_STATE_PLAYING) {
+ GST_DEBUG_OBJECT (dtmfsrc, "dtmf-event, but not in PLAYING state");
+ goto failure;
+ }
+
+ event_structure = gst_event_get_structure (event);
+
+ if (!gst_structure_get_int (event_structure, "type", &event_type) ||
+ !gst_structure_get_boolean (event_structure, "start", &start) ||
+ (start == TRUE && event_type != GST_TONE_DTMF_TYPE_EVENT))
+ goto failure;
+
+ if (gst_structure_get_int (event_structure, "method", &method)) {
+ if (method != 2) {
+ goto failure;
+ }
+ }
+
+ if (start)
+ if (!gst_structure_get_int (event_structure, "number", &event_number) ||
+ !gst_structure_get_int (event_structure, "volume", &event_volume))
+ goto failure;
+
+
+ GST_OBJECT_LOCK (dtmfsrc);
+ if (gst_structure_get_clock_time (event_structure, "last-stop", &last_stop))
+ dtmfsrc->last_stop = last_stop;
+ else
+ dtmfsrc->last_stop = GST_CLOCK_TIME_NONE;
+ correct_order = (start != dtmfsrc->last_event_was_start);
+ dtmfsrc->last_event_was_start = start;
+ GST_OBJECT_UNLOCK (dtmfsrc);
+
+ if (!correct_order)
+ goto failure;
+
+ if (start) {
+ GST_DEBUG_OBJECT (dtmfsrc, "Received start event %d with volume %d",
+ event_number, event_volume);
+ gst_dtmf_src_add_start_event (dtmfsrc, event_number, event_volume);
+ }
+
+ else {
+ GST_DEBUG_OBJECT (dtmfsrc, "Received stop event");
+ gst_dtmf_src_add_stop_event (dtmfsrc);
+ }
+
+ return TRUE;
+failure:
+ return FALSE;
+}
+
+static gboolean
+gst_dtmf_src_handle_event (GstBaseSrc * src, GstEvent * event)
+{
+ GstDTMFSrc *dtmfsrc;
+ gboolean result = FALSE;
+
+ dtmfsrc = GST_DTMF_SRC (src);
+
+ GST_LOG_OBJECT (dtmfsrc, "Received an %s event on the src pad",
+ GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_UPSTREAM:
+ if (gst_event_has_name (event, "dtmf-event")) {
+ result = gst_dtmf_src_handle_dtmf_event (dtmfsrc, event);
+ break;
+ }
+ /* fall through */
+ default:
+ result = GST_BASE_SRC_CLASS (parent_class)->event (src, event);
+ break;
+ }
+
+ return result;
+}
+
+
+static gboolean
+gst_dtmf_src_send_event (GstElement * element, GstEvent * event)
+{
+ GstDTMFSrc *dtmfsrc = GST_DTMF_SRC (element);
+ gboolean ret;
+
+ GST_LOG_OBJECT (dtmfsrc, "Received an %s event via send_event",
+ GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_BOTH:
+ case GST_EVENT_CUSTOM_BOTH_OOB:
+ case GST_EVENT_CUSTOM_UPSTREAM:
+ case GST_EVENT_CUSTOM_DOWNSTREAM:
+ case GST_EVENT_CUSTOM_DOWNSTREAM_OOB:
+ if (gst_event_has_name (event, "dtmf-event")) {
+ ret = gst_dtmf_src_handle_dtmf_event (dtmfsrc, event);
+ break;
+ }
+ /* fall through */
+ default:
+ ret = GST_ELEMENT_CLASS (parent_class)->send_event (element, event);
+ break;
+ }
+
+ return ret;
+}
+
+static void
+gst_dtmf_src_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstDTMFSrc *dtmfsrc;
+
+ dtmfsrc = GST_DTMF_SRC (object);
+
+ switch (prop_id) {
+ case PROP_INTERVAL:
+ dtmfsrc->interval = g_value_get_uint (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_dtmf_src_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstDTMFSrc *dtmfsrc;
+
+ dtmfsrc = GST_DTMF_SRC (object);
+
+ switch (prop_id) {
+ case PROP_INTERVAL:
+ g_value_set_uint (value, dtmfsrc->interval);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_dtmf_prepare_timestamps (GstDTMFSrc * dtmfsrc)
+{
+ GstClockTime last_stop;
+ GstClockTime timestamp;
+
+ GST_OBJECT_LOCK (dtmfsrc);
+ last_stop = dtmfsrc->last_stop;
+ GST_OBJECT_UNLOCK (dtmfsrc);
+
+ if (GST_CLOCK_TIME_IS_VALID (last_stop)) {
+ timestamp = last_stop;
+ } else {
+ GstClock *clock;
+
+ /* If there is no valid start time, lets use now as the start time */
+
+ clock = gst_element_get_clock (GST_ELEMENT (dtmfsrc));
+ if (clock != NULL) {
+ timestamp = gst_clock_get_time (clock)
+ - gst_element_get_base_time (GST_ELEMENT (dtmfsrc));
+ gst_object_unref (clock);
+ } else {
+ gchar *dtmf_name = gst_element_get_name (dtmfsrc);
+ GST_ERROR_OBJECT (dtmfsrc, "No clock set for element %s", dtmf_name);
+ dtmfsrc->timestamp = GST_CLOCK_TIME_NONE;
+ g_free (dtmf_name);
+ return;
+ }
+ }
+
+ /* Make sure the timestamp always goes forward */
+ if (timestamp > dtmfsrc->timestamp)
+ dtmfsrc->timestamp = timestamp;
+}
+
+static void
+gst_dtmf_src_add_start_event (GstDTMFSrc * dtmfsrc, gint event_number,
+ gint event_volume)
+{
+
+ GstDTMFSrcEvent *event = g_slice_new0 (GstDTMFSrcEvent);
+ event->event_type = DTMF_EVENT_TYPE_START;
+ event->sample = 0;
+ event->event_number = CLAMP (event_number, MIN_EVENT, MAX_EVENT);
+ event->volume = CLAMP (event_volume, MIN_VOLUME, MAX_VOLUME);
+
+ g_async_queue_push (dtmfsrc->event_queue, event);
+}
+
+static void
+gst_dtmf_src_add_stop_event (GstDTMFSrc * dtmfsrc)
+{
+
+ GstDTMFSrcEvent *event = g_slice_new0 (GstDTMFSrcEvent);
+ event->event_type = DTMF_EVENT_TYPE_STOP;
+ event->sample = 0;
+ event->event_number = 0;
+ event->volume = 0;
+
+ g_async_queue_push (dtmfsrc->event_queue, event);
+}
+
+static GstBuffer *
+gst_dtmf_src_generate_silence (float duration, gint sample_rate)
+{
+ gint buf_size;
+
+ /* Create a buffer with data set to 0 */
+ buf_size = ((duration / 1000) * sample_rate * SAMPLE_SIZE * CHANNELS) / 8;
+
+ return gst_buffer_new_wrapped (g_malloc0 (buf_size), buf_size);
+}
+
+static GstBuffer *
+gst_dtmf_src_generate_tone (GstDTMFSrcEvent * event, DTMF_KEY key,
+ float duration, gint sample_rate)
+{
+ GstBuffer *buffer;
+ GstMapInfo map;
+ gint16 *p;
+ gint tone_size;
+ double i = 0;
+ double amplitude, f1, f2;
+ double volume_factor;
+ static GstAllocationParams params = { 0, 1, 0, 0, };
+
+ /* Create a buffer for the tone */
+ tone_size = ((duration / 1000) * sample_rate * SAMPLE_SIZE * CHANNELS) / 8;
+
+ buffer = gst_buffer_new_allocate (NULL, tone_size, &params);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READWRITE);
+ p = (gint16 *) map.data;
+
+ volume_factor = pow (10, (-event->volume) / 20);
+
+ /*
+ * For each sample point we calculate 'x' as the
+ * the amplitude value.
+ */
+ for (i = 0; i < (tone_size / (SAMPLE_SIZE / 8)); i++) {
+ /*
+ * We add the fundamental frequencies together.
+ */
+ f1 = sin (2 * M_PI * key.low_frequency * (event->sample / sample_rate));
+ f2 = sin (2 * M_PI * key.high_frequency * (event->sample / sample_rate));
+
+ amplitude = (f1 + f2) / 2;
+
+ /* Adjust the volume */
+ amplitude *= volume_factor;
+
+ /* Make the [-1:1] interval into a [-32767:32767] interval */
+ amplitude *= 32767;
+
+ /* Store it in the data buffer */
+ *(p++) = (gint16) amplitude;
+
+ (event->sample)++;
+ }
+
+ gst_buffer_unmap (buffer, &map);
+
+ return buffer;
+}
+
+
+
+static GstBuffer *
+gst_dtmf_src_create_next_tone_packet (GstDTMFSrc * dtmfsrc,
+ GstDTMFSrcEvent * event)
+{
+ GstBuffer *buf = NULL;
+ gboolean send_silence = FALSE;
+
+ GST_LOG_OBJECT (dtmfsrc, "Creating buffer for tone %s",
+ DTMF_KEYS[event->event_number].event_name);
+
+ if (event->packet_count * dtmfsrc->interval < MIN_INTER_DIGIT_INTERVAL) {
+ send_silence = TRUE;
+ }
+
+ if (send_silence) {
+ GST_LOG_OBJECT (dtmfsrc, "Generating silence");
+ buf = gst_dtmf_src_generate_silence (dtmfsrc->interval,
+ dtmfsrc->sample_rate);
+ } else {
+ GST_LOG_OBJECT (dtmfsrc, "Generating tone");
+ buf = gst_dtmf_src_generate_tone (event, DTMF_KEYS[event->event_number],
+ dtmfsrc->interval, dtmfsrc->sample_rate);
+ }
+ event->packet_count++;
+
+
+ /* timestamp and duration of GstBuffer */
+ GST_BUFFER_DURATION (buf) = dtmfsrc->interval * GST_MSECOND;
+ GST_BUFFER_TIMESTAMP (buf) = dtmfsrc->timestamp;
+
+ GST_LOG_OBJECT (dtmfsrc, "Creating new buffer with event %u duration "
+ " gst: %" GST_TIME_FORMAT " at %" GST_TIME_FORMAT,
+ event->event_number, GST_TIME_ARGS (GST_BUFFER_DURATION (buf)),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));
+
+ dtmfsrc->timestamp += GST_BUFFER_DURATION (buf);
+
+ return buf;
+}
+
+static void
+gst_dtmf_src_post_message (GstDTMFSrc * dtmfsrc, const gchar * message_name,
+ GstDTMFSrcEvent * event)
+{
+ GstStructure *s = NULL;
+
+ switch (event->event_type) {
+ case DTMF_EVENT_TYPE_START:
+ s = gst_structure_new (message_name,
+ "type", G_TYPE_INT, 1,
+ "method", G_TYPE_INT, 2,
+ "start", G_TYPE_BOOLEAN, TRUE,
+ "number", G_TYPE_INT, event->event_number,
+ "volume", G_TYPE_INT, event->volume, NULL);
+ break;
+ case DTMF_EVENT_TYPE_STOP:
+ s = gst_structure_new (message_name,
+ "type", G_TYPE_INT, 1, "method", G_TYPE_INT, 2,
+ "start", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ case DTMF_EVENT_TYPE_PAUSE_TASK:
+ return;
+ }
+
+ if (s)
+ gst_element_post_message (GST_ELEMENT (dtmfsrc),
+ gst_message_new_element (GST_OBJECT (dtmfsrc), s));
+}
+
+static GstFlowReturn
+gst_dtmf_src_create (GstBaseSrc * basesrc, guint64 offset,
+ guint length, GstBuffer ** buffer)
+{
+ GstBuffer *buf = NULL;
+ GstDTMFSrcEvent *event;
+ GstDTMFSrc *dtmfsrc;
+ GstClock *clock;
+ GstClockID *clockid;
+ GstClockReturn clockret;
+
+ dtmfsrc = GST_DTMF_SRC (basesrc);
+
+ do {
+
+ if (dtmfsrc->last_event == NULL) {
+ GST_DEBUG_OBJECT (dtmfsrc, "popping");
+ event = g_async_queue_pop (dtmfsrc->event_queue);
+
+ GST_DEBUG_OBJECT (dtmfsrc, "popped %d", event->event_type);
+
+ switch (event->event_type) {
+ case DTMF_EVENT_TYPE_STOP:
+ GST_WARNING_OBJECT (dtmfsrc,
+ "Received a DTMF stop event when already stopped");
+ gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-dropped", event);
+ break;
+ case DTMF_EVENT_TYPE_START:
+ gst_dtmf_prepare_timestamps (dtmfsrc);
+
+ event->packet_count = 0;
+ dtmfsrc->last_event = event;
+ event = NULL;
+ gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-processed",
+ dtmfsrc->last_event);
+ break;
+ case DTMF_EVENT_TYPE_PAUSE_TASK:
+ /*
+ * We're pushing it back because it has to stay in there until
+ * the task is really paused (and the queue will then be flushed)
+ */
+ GST_DEBUG_OBJECT (dtmfsrc, "pushing pause_task...");
+ GST_OBJECT_LOCK (dtmfsrc);
+ if (dtmfsrc->paused) {
+ g_async_queue_push (dtmfsrc->event_queue, event);
+ goto paused_locked;
+ }
+ GST_OBJECT_UNLOCK (dtmfsrc);
+ break;
+ }
+ if (event)
+ g_slice_free (GstDTMFSrcEvent, event);
+ } else if (dtmfsrc->last_event->packet_count * dtmfsrc->interval >=
+ MIN_DUTY_CYCLE) {
+ event = g_async_queue_try_pop (dtmfsrc->event_queue);
+
+ if (event != NULL) {
+
+ switch (event->event_type) {
+ case DTMF_EVENT_TYPE_START:
+ GST_WARNING_OBJECT (dtmfsrc,
+ "Received two consecutive DTMF start events");
+ gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-dropped", event);
+ break;
+ case DTMF_EVENT_TYPE_STOP:
+ g_slice_free (GstDTMFSrcEvent, dtmfsrc->last_event);
+ dtmfsrc->last_event = NULL;
+ gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-processed", event);
+ break;
+ case DTMF_EVENT_TYPE_PAUSE_TASK:
+ /*
+ * We're pushing it back because it has to stay in there until
+ * the task is really paused (and the queue will then be flushed)
+ */
+ GST_DEBUG_OBJECT (dtmfsrc, "pushing pause_task...");
+
+ GST_OBJECT_LOCK (dtmfsrc);
+ if (dtmfsrc->paused) {
+ g_async_queue_push (dtmfsrc->event_queue, event);
+ goto paused_locked;
+ }
+ GST_OBJECT_UNLOCK (dtmfsrc);
+
+ break;
+ }
+ g_slice_free (GstDTMFSrcEvent, event);
+ }
+ }
+ } while (dtmfsrc->last_event == NULL);
+
+ GST_LOG_OBJECT (dtmfsrc, "end event check, now wait for the proper time");
+
+ clock = gst_element_get_clock (GST_ELEMENT (basesrc));
+
+ clockid = gst_clock_new_single_shot_id (clock, dtmfsrc->timestamp +
+ gst_element_get_base_time (GST_ELEMENT (dtmfsrc)));
+ gst_object_unref (clock);
+
+ GST_OBJECT_LOCK (dtmfsrc);
+ if (!dtmfsrc->paused) {
+ dtmfsrc->clockid = clockid;
+ GST_OBJECT_UNLOCK (dtmfsrc);
+
+ clockret = gst_clock_id_wait (clockid, NULL);
+
+ GST_OBJECT_LOCK (dtmfsrc);
+ if (dtmfsrc->paused)
+ clockret = GST_CLOCK_UNSCHEDULED;
+ } else {
+ clockret = GST_CLOCK_UNSCHEDULED;
+ }
+ gst_clock_id_unref (clockid);
+ dtmfsrc->clockid = NULL;
+ GST_OBJECT_UNLOCK (dtmfsrc);
+
+ if (clockret == GST_CLOCK_UNSCHEDULED) {
+ goto paused;
+ }
+
+ buf = gst_dtmf_src_create_next_tone_packet (dtmfsrc, dtmfsrc->last_event);
+
+ GST_LOG_OBJECT (dtmfsrc, "Created buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (buf));
+ *buffer = buf;
+
+ return GST_FLOW_OK;
+
+paused_locked:
+ GST_OBJECT_UNLOCK (dtmfsrc);
+
+paused:
+
+ if (dtmfsrc->last_event) {
+ GST_DEBUG_OBJECT (dtmfsrc, "Stopping current event");
+ /* Don't forget to release the stream lock */
+ g_slice_free (GstDTMFSrcEvent, dtmfsrc->last_event);
+ dtmfsrc->last_event = NULL;
+ }
+
+ return GST_FLOW_FLUSHING;
+
+}
+
+static gboolean
+gst_dtmf_src_unlock (GstBaseSrc * src)
+{
+ GstDTMFSrc *dtmfsrc = GST_DTMF_SRC (src);
+ GstDTMFSrcEvent *event = NULL;
+
+ GST_DEBUG_OBJECT (dtmfsrc, "Called unlock");
+
+ GST_OBJECT_LOCK (dtmfsrc);
+ dtmfsrc->paused = TRUE;
+ if (dtmfsrc->clockid) {
+ gst_clock_id_unschedule (dtmfsrc->clockid);
+ }
+ GST_OBJECT_UNLOCK (dtmfsrc);
+
+ GST_DEBUG_OBJECT (dtmfsrc, "Pushing the PAUSE_TASK event on unlock request");
+ event = g_slice_new0 (GstDTMFSrcEvent);
+ event->event_type = DTMF_EVENT_TYPE_PAUSE_TASK;
+ g_async_queue_push (dtmfsrc->event_queue, event);
+
+ return TRUE;
+}
+
+
+static gboolean
+gst_dtmf_src_unlock_stop (GstBaseSrc * src)
+{
+ GstDTMFSrc *dtmfsrc = GST_DTMF_SRC (src);
+
+ GST_DEBUG_OBJECT (dtmfsrc, "Unlock stopped");
+
+ GST_OBJECT_LOCK (dtmfsrc);
+ dtmfsrc->paused = FALSE;
+ GST_OBJECT_UNLOCK (dtmfsrc);
+
+ return TRUE;
+}
+
+
+static gboolean
+gst_dtmf_src_negotiate (GstBaseSrc * basesrc)
+{
+ GstDTMFSrc *dtmfsrc = GST_DTMF_SRC (basesrc);
+ GstCaps *caps;
+ GstStructure *s;
+ gboolean ret;
+
+ caps = gst_pad_get_allowed_caps (GST_BASE_SRC_PAD (basesrc));
+
+ if (!caps)
+ caps = gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD (basesrc));
+
+ if (gst_caps_is_empty (caps)) {
+ gst_caps_unref (caps);
+ return FALSE;
+ }
+
+ caps = gst_caps_truncate (caps);
+
+ caps = gst_caps_make_writable (caps);
+ s = gst_caps_get_structure (caps, 0);
+
+ gst_structure_fixate_field_nearest_int (s, "rate", DEFAULT_SAMPLE_RATE);
+
+ if (!gst_structure_get_int (s, "rate", &dtmfsrc->sample_rate)) {
+ GST_ERROR_OBJECT (dtmfsrc, "Could not get rate");
+ gst_caps_unref (caps);
+ return FALSE;
+ }
+
+ ret = gst_pad_set_caps (GST_BASE_SRC_PAD (basesrc), caps);
+
+ gst_caps_unref (caps);
+
+ return ret;
+}
+
+static gboolean
+gst_dtmf_src_query (GstBaseSrc * basesrc, GstQuery * query)
+{
+ GstDTMFSrc *dtmfsrc = GST_DTMF_SRC (basesrc);
+ gboolean res = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_LATENCY:
+ {
+ GstClockTime latency;
+
+ latency = dtmfsrc->interval * GST_MSECOND;
+ gst_query_set_latency (query, gst_base_src_is_live (basesrc), latency,
+ GST_CLOCK_TIME_NONE);
+ GST_DEBUG_OBJECT (dtmfsrc, "Reporting latency of %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (latency));
+ res = TRUE;
+ }
+ break;
+ default:
+ res = GST_BASE_SRC_CLASS (parent_class)->query (basesrc, query);
+ break;
+ }
+
+ return res;
+}
+
+static GstStateChangeReturn
+gst_dtmf_src_change_state (GstElement * element, GstStateChange transition)
+{
+ GstDTMFSrc *dtmfsrc;
+ GstStateChangeReturn result;
+ gboolean no_preroll = FALSE;
+ GstDTMFSrcEvent *event = NULL;
+
+ dtmfsrc = GST_DTMF_SRC (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ /* Flushing the event queue */
+ event = g_async_queue_try_pop (dtmfsrc->event_queue);
+
+ while (event != NULL) {
+ gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-dropped", event);
+ g_slice_free (GstDTMFSrcEvent, event);
+ event = g_async_queue_try_pop (dtmfsrc->event_queue);
+ }
+ dtmfsrc->last_event_was_start = FALSE;
+ dtmfsrc->timestamp = 0;
+ no_preroll = TRUE;
+ break;
+ default:
+ break;
+ }
+
+ if ((result =
+ GST_ELEMENT_CLASS (gst_dtmf_src_parent_class)->change_state (element,
+ transition)) == GST_STATE_CHANGE_FAILURE)
+ goto failure;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ no_preroll = TRUE;
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ GST_DEBUG_OBJECT (dtmfsrc, "Flushing event queue");
+ /* Flushing the event queue */
+ event = g_async_queue_try_pop (dtmfsrc->event_queue);
+
+ while (event != NULL) {
+ gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-dropped", event);
+ g_slice_free (GstDTMFSrcEvent, event);
+ event = g_async_queue_try_pop (dtmfsrc->event_queue);
+ }
+ dtmfsrc->last_event_was_start = FALSE;
+
+ break;
+ default:
+ break;
+ }
+
+ if (no_preroll && result == GST_STATE_CHANGE_SUCCESS)
+ result = GST_STATE_CHANGE_NO_PREROLL;
+
+ return result;
+
+ /* ERRORS */
+failure:
+ {
+ GST_ERROR_OBJECT (dtmfsrc, "parent failed state change");
+ return result;
+ }
+}
diff --git a/gst/dtmf/gstdtmfsrc.h b/gst/dtmf/gstdtmfsrc.h
new file mode 100644
index 0000000000..996d9268b8
--- /dev/null
+++ b/gst/dtmf/gstdtmfsrc.h
@@ -0,0 +1,101 @@
+/* GStreamer DTMF source
+ *
+ * gstdtmfsrc.h:
+ *
+ * Copyright (C) <2007> Collabora.
+ * Contact: Youness Alaoui <youness.alaoui@collabora.co.uk>
+ * Copyright (C) <2007> Nokia Corporation.
+ * Contact: Zeeshan Ali <zeeshan.ali@nokia.com>
+ * Copyright (C) <2005> Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_DTMF_SRC_H__
+#define __GST_DTMF_SRC_H__
+
+#include <gst/gst.h>
+#include <gst/gstbuffer.h>
+#include <gst/base/gstbasesrc.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_DTMF_SRC (gst_dtmf_src_get_type())
+#define GST_DTMF_SRC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_DTMF_SRC,GstDTMFSrc))
+#define GST_DTMF_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_DTMF_SRC,GstDTMFSrcClass))
+#define GST_DTMF_SRC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_DTMF_SRC, GstDTMFSrcClass))
+#define GST_IS_DTMF_SRC(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_DTMF_SRC))
+#define GST_IS_DTMF_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_DTMF_SRC))
+#define GST_DTMF_SRC_CAST(obj) ((GstDTMFSrc *)(obj))
+typedef struct _GstDTMFSrc GstDTMFSrc;
+typedef struct _GstDTMFSrcClass GstDTMFSrcClass;
+
+enum _GstDTMFEventType
+{
+ DTMF_EVENT_TYPE_START,
+ DTMF_EVENT_TYPE_STOP,
+ DTMF_EVENT_TYPE_PAUSE_TASK
+};
+
+typedef enum _GstDTMFEventType GstDTMFEventType;
+
+struct _GstDTMFSrcEvent
+{
+ GstDTMFEventType event_type;
+ double sample;
+ guint16 event_number;
+ guint16 volume;
+ guint32 packet_count;
+};
+
+typedef struct _GstDTMFSrcEvent GstDTMFSrcEvent;
+
+/**
+ * GstDTMFSrc:
+ * @element: the parent element.
+ *
+ * The opaque #GstDTMFSrc data structure.
+ */
+struct _GstDTMFSrc
+{
+ /*< private >*/
+ GstBaseSrc parent;
+ GAsyncQueue *event_queue;
+ GstDTMFSrcEvent *last_event;
+ gboolean last_event_was_start;
+
+ guint16 interval;
+ GstClockTime timestamp;
+
+ gboolean paused;
+ GstClockID clockid;
+
+ GstClockTime last_stop;
+
+ gint sample_rate;
+};
+
+
+struct _GstDTMFSrcClass
+{
+ GstBaseSrcClass parent_class;
+};
+
+GType gst_dtmf_src_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (dtmfsrc);
+
+G_END_DECLS
+#endif /* __GST_DTMF_SRC_H__ */
diff --git a/gst/dtmf/gstrtpdtmfdepay.c b/gst/dtmf/gstrtpdtmfdepay.c
new file mode 100644
index 0000000000..a819467a68
--- /dev/null
+++ b/gst/dtmf/gstrtpdtmfdepay.c
@@ -0,0 +1,497 @@
+/* GstRtpDtmfDepay
+ *
+ * Copyright (C) 2008 Collabora Limited
+ * Copyright (C) 2008 Nokia Corporation
+ * Contact: Youness Alaoui <youness.alaoui@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-rtpdtmfdepay
+ * @title: rtpdtmfdepay
+ * @see_also: rtpdtmfsrc, rtpdtmfmux
+ *
+ * This element takes RTP DTMF packets and produces sound. It also emits a
+ * message on the #GstBus.
+ *
+ * The message is called "dtmf-event" and has the following fields:
+ *
+ * * `type` (G_TYPE_INT, 0-1): Which of the two methods
+ * specified in RFC 2833 to use. The value should be 0 for tones and 1 for
+ * named events. Tones are specified by their frequencies and events are specified
+ * by their number. This element currently only recognizes events.
+ * Do not confuse with "method" which specified the output.
+ *
+ * * `number` (G_TYPE_INT, 0-16): The event number.
+ *
+ * * `volume` (G_TYPE_INT, 0-36): This field describes the power level of the tone, expressed in dBm0
+ * after dropping the sign. Power levels range from 0 to -63 dBm0. The range of
+ * valid DTMF is from 0 to -36 dBm0.
+ *
+ * * `method` (G_TYPE_INT, 1): This field will always been 1 (ie RTP event) from this element.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstrtpdtmfdepay.h"
+
+#include <string.h>
+#include <math.h>
+
+#include <gst/audio/audio.h>
+#include <gst/base/gstbitreader.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+#define DEFAULT_PACKET_INTERVAL 50 /* ms */
+#define MIN_PACKET_INTERVAL 10 /* ms */
+#define MAX_PACKET_INTERVAL 50 /* ms */
+#define SAMPLE_RATE 8000
+#define SAMPLE_SIZE 16
+#define CHANNELS 1
+#define MIN_DUTY_CYCLE (MIN_INTER_DIGIT_INTERVAL + MIN_PULSE_DURATION)
+
+#define MIN_UNIT_TIME 0
+#define MAX_UNIT_TIME 1000
+#define DEFAULT_UNIT_TIME 0
+
+#define DEFAULT_MAX_DURATION 0
+
+typedef struct st_dtmf_key
+{
+ float low_frequency;
+ float high_frequency;
+} DTMF_KEY;
+
+static const DTMF_KEY DTMF_KEYS[] = {
+ {941, 1336},
+ {697, 1209},
+ {697, 1336},
+ {697, 1477},
+ {770, 1209},
+ {770, 1336},
+ {770, 1477},
+ {852, 1209},
+ {852, 1336},
+ {852, 1477},
+ {941, 1209},
+ {941, 1477},
+ {697, 1633},
+ {770, 1633},
+ {852, 1633},
+ {941, 1633},
+};
+
+#define MAX_DTMF_EVENTS 16
+
+enum
+{
+ DTMF_KEY_EVENT_1 = 1,
+ DTMF_KEY_EVENT_2 = 2,
+ DTMF_KEY_EVENT_3 = 3,
+ DTMF_KEY_EVENT_4 = 4,
+ DTMF_KEY_EVENT_5 = 5,
+ DTMF_KEY_EVENT_6 = 6,
+ DTMF_KEY_EVENT_7 = 7,
+ DTMF_KEY_EVENT_8 = 8,
+ DTMF_KEY_EVENT_9 = 9,
+ DTMF_KEY_EVENT_0 = 0,
+ DTMF_KEY_EVENT_STAR = 10,
+ DTMF_KEY_EVENT_POUND = 11,
+ DTMF_KEY_EVENT_A = 12,
+ DTMF_KEY_EVENT_B = 13,
+ DTMF_KEY_EVENT_C = 14,
+ DTMF_KEY_EVENT_D = 15,
+};
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_dtmf_depay_debug);
+#define GST_CAT_DEFAULT gst_rtp_dtmf_depay_debug
+
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_UNIT_TIME,
+ PROP_MAX_DURATION
+};
+
+static GstStaticPadTemplate gst_rtp_dtmf_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) \"" GST_AUDIO_NE (S16) "\", "
+ "rate = " GST_AUDIO_RATE_RANGE ", " "channels = (int) 1")
+ );
+
+static GstStaticPadTemplate gst_rtp_dtmf_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) [ 0, MAX ], "
+ "encoding-name = (string) \"TELEPHONE-EVENT\"")
+ );
+
+G_DEFINE_TYPE (GstRtpDTMFDepay, gst_rtp_dtmf_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE (rtpdtmfdepay, "rtpdtmfdepay", GST_RANK_MARGINAL,
+ GST_TYPE_RTP_DTMF_DEPAY);
+
+static void gst_rtp_dtmf_depay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtp_dtmf_depay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static GstBuffer *gst_rtp_dtmf_depay_process (GstRTPBaseDepayload * depayload,
+ GstBuffer * buf);
+gboolean gst_rtp_dtmf_depay_setcaps (GstRTPBaseDepayload * filter,
+ GstCaps * caps);
+
+static void
+gst_rtp_dtmf_depay_class_init (GstRtpDTMFDepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ gstelement_class = GST_ELEMENT_CLASS (klass);
+ gstrtpbasedepayload_class = GST_RTP_BASE_DEPAYLOAD_CLASS (klass);
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_dtmf_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_dtmf_depay_sink_template);
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_dtmf_depay_debug,
+ "rtpdtmfdepay", 0, "rtpdtmfdepay element");
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP DTMF packet depayloader", "Codec/Depayloader/Network",
+ "Generates DTMF Sound from telephone-event RTP packets",
+ "Youness Alaoui <youness.alaoui@collabora.co.uk>");
+
+ gobject_class->set_property =
+ GST_DEBUG_FUNCPTR (gst_rtp_dtmf_depay_set_property);
+ gobject_class->get_property =
+ GST_DEBUG_FUNCPTR (gst_rtp_dtmf_depay_get_property);
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_UNIT_TIME,
+ g_param_spec_uint ("unit-time", "Duration unittime",
+ "The smallest unit (ms) the duration must be a multiple of (0 disables it)",
+ MIN_UNIT_TIME, MAX_UNIT_TIME, DEFAULT_UNIT_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_MAX_DURATION,
+ g_param_spec_uint ("max-duration", "Maximum duration",
+ "The maxumimum duration (ms) of the outgoing soundpacket. "
+ "(0 = no limit)", 0, G_MAXUINT, DEFAULT_MAX_DURATION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstrtpbasedepayload_class->process =
+ GST_DEBUG_FUNCPTR (gst_rtp_dtmf_depay_process);
+ gstrtpbasedepayload_class->set_caps =
+ GST_DEBUG_FUNCPTR (gst_rtp_dtmf_depay_setcaps);
+
+}
+
+static void
+gst_rtp_dtmf_depay_init (GstRtpDTMFDepay * rtpdtmfdepay)
+{
+ rtpdtmfdepay->unit_time = DEFAULT_UNIT_TIME;
+}
+
+static void
+gst_rtp_dtmf_depay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpDTMFDepay *rtpdtmfdepay;
+
+ rtpdtmfdepay = GST_RTP_DTMF_DEPAY (object);
+
+ switch (prop_id) {
+ case PROP_UNIT_TIME:
+ rtpdtmfdepay->unit_time = g_value_get_uint (value);
+ break;
+ case PROP_MAX_DURATION:
+ rtpdtmfdepay->max_duration = g_value_get_uint (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_dtmf_depay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpDTMFDepay *rtpdtmfdepay;
+
+ rtpdtmfdepay = GST_RTP_DTMF_DEPAY (object);
+
+ switch (prop_id) {
+ case PROP_UNIT_TIME:
+ g_value_set_uint (value, rtpdtmfdepay->unit_time);
+ break;
+ case PROP_MAX_DURATION:
+ g_value_set_uint (value, rtpdtmfdepay->max_duration);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+gboolean
+gst_rtp_dtmf_depay_setcaps (GstRTPBaseDepayload * filter, GstCaps * caps)
+{
+ GstCaps *filtercaps, *srccaps;
+ GstStructure *structure = gst_caps_get_structure (caps, 0);
+ gint clock_rate = 8000; /* default */
+
+ gst_structure_get_int (structure, "clock-rate", &clock_rate);
+ filter->clock_rate = clock_rate;
+
+ filtercaps =
+ gst_pad_get_pad_template_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (filter));
+
+ filtercaps = gst_caps_make_writable (filtercaps);
+ gst_caps_set_simple (filtercaps, "rate", G_TYPE_INT, clock_rate, NULL);
+
+ srccaps = gst_pad_peer_query_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (filter),
+ filtercaps);
+ gst_caps_unref (filtercaps);
+
+ gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (filter), srccaps);
+ gst_caps_unref (srccaps);
+
+ return TRUE;
+}
+
+static GstBuffer *
+gst_dtmf_src_generate_tone (GstRtpDTMFDepay * rtpdtmfdepay,
+ GstRTPDTMFPayload payload)
+{
+ GstBuffer *buf;
+ GstMapInfo map;
+ gint16 *p;
+ gint tone_size;
+ double i = 0;
+ double amplitude, f1, f2;
+ double volume_factor;
+ DTMF_KEY key = DTMF_KEYS[payload.event];
+ guint32 clock_rate;
+ GstRTPBaseDepayload *depayload = GST_RTP_BASE_DEPAYLOAD (rtpdtmfdepay);
+ gint volume;
+ static GstAllocationParams params = { 0, 1, 0, 0, };
+
+ clock_rate = depayload->clock_rate;
+
+ /* Create a buffer for the tone */
+ tone_size = (payload.duration * SAMPLE_SIZE * CHANNELS) / 8;
+ buf = gst_buffer_new_allocate (NULL, tone_size, &params);
+ GST_BUFFER_DURATION (buf) = payload.duration * GST_SECOND / clock_rate;
+ volume = payload.volume;
+
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ p = (gint16 *) map.data;
+
+ volume_factor = pow (10, (-volume) / 20);
+
+ /*
+ * For each sample point we calculate 'x' as the
+ * the amplitude value.
+ */
+ for (i = 0; i < (tone_size / (SAMPLE_SIZE / 8)); i++) {
+ /*
+ * We add the fundamental frequencies together.
+ */
+ f1 = sin (2 * M_PI * key.low_frequency * (rtpdtmfdepay->sample /
+ clock_rate));
+ f2 = sin (2 * M_PI * key.high_frequency * (rtpdtmfdepay->sample /
+ clock_rate));
+
+ amplitude = (f1 + f2) / 2;
+
+ /* Adjust the volume */
+ amplitude *= volume_factor;
+
+ /* Make the [-1:1] interval into a [-32767:32767] interval */
+ amplitude *= 32767;
+
+ /* Store it in the data buffer */
+ *(p++) = (gint16) amplitude;
+
+ (rtpdtmfdepay->sample)++;
+ }
+
+ gst_buffer_unmap (buf, &map);
+
+ return buf;
+}
+
+
+static GstBuffer *
+gst_rtp_dtmf_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
+{
+
+ GstRtpDTMFDepay *rtpdtmfdepay = NULL;
+ GstBuffer *outbuf = NULL;
+ guint payload_len;
+ guint8 *payload = NULL;
+ guint32 timestamp;
+ GstRTPDTMFPayload dtmf_payload;
+ gboolean marker;
+ GstStructure *structure = NULL;
+ GstMessage *dtmf_message = NULL;
+ GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT;
+ GstBitReader bitreader;
+
+ rtpdtmfdepay = GST_RTP_DTMF_DEPAY (depayload);
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtpbuffer);
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtpbuffer);
+ payload = gst_rtp_buffer_get_payload (&rtpbuffer);
+
+ if (payload_len != 4)
+ goto bad_packet;
+
+ gst_bit_reader_init (&bitreader, payload, payload_len);
+ gst_bit_reader_get_bits_uint8 (&bitreader, &dtmf_payload.event, 8);
+ gst_bit_reader_skip (&bitreader, 2);
+ gst_bit_reader_get_bits_uint8 (&bitreader, &dtmf_payload.volume, 6);
+ gst_bit_reader_get_bits_uint16 (&bitreader, &dtmf_payload.duration, 16);
+
+ if (dtmf_payload.event > MAX_EVENT)
+ goto bad_packet;
+
+ marker = gst_rtp_buffer_get_marker (&rtpbuffer);
+
+ timestamp = gst_rtp_buffer_get_timestamp (&rtpbuffer);
+
+ /* clip to whole units of unit_time */
+ if (rtpdtmfdepay->unit_time) {
+ guint unit_time_clock =
+ (rtpdtmfdepay->unit_time * depayload->clock_rate) / 1000;
+ if (dtmf_payload.duration % unit_time_clock) {
+ /* Make sure we don't overflow the duration */
+ if (dtmf_payload.duration < G_MAXUINT16 - unit_time_clock)
+ dtmf_payload.duration += unit_time_clock -
+ (dtmf_payload.duration % unit_time_clock);
+ else
+ dtmf_payload.duration -= dtmf_payload.duration % unit_time_clock;
+ }
+ }
+
+ /* clip to max duration */
+ if (rtpdtmfdepay->max_duration) {
+ guint max_duration_clock =
+ (rtpdtmfdepay->max_duration * depayload->clock_rate) / 1000;
+
+ if (max_duration_clock < G_MAXUINT16 &&
+ dtmf_payload.duration > max_duration_clock)
+ dtmf_payload.duration = max_duration_clock;
+ }
+
+ GST_DEBUG_OBJECT (depayload, "Received new RTP DTMF packet : "
+ "marker=%d - timestamp=%u - event=%d - duration=%d",
+ marker, timestamp, dtmf_payload.event, dtmf_payload.duration);
+
+ GST_DEBUG_OBJECT (depayload,
+ "Previous information : timestamp=%u - duration=%d",
+ rtpdtmfdepay->previous_ts, rtpdtmfdepay->previous_duration);
+
+ /* First packet */
+ if (marker || rtpdtmfdepay->previous_ts != timestamp) {
+ rtpdtmfdepay->sample = 0;
+ rtpdtmfdepay->previous_ts = timestamp;
+ rtpdtmfdepay->previous_duration = dtmf_payload.duration;
+ rtpdtmfdepay->first_gst_ts = GST_BUFFER_PTS (buf);
+
+ structure = gst_structure_new ("dtmf-event",
+ "number", G_TYPE_INT, dtmf_payload.event,
+ "volume", G_TYPE_INT, dtmf_payload.volume,
+ "type", G_TYPE_INT, 1, "method", G_TYPE_INT, 1, NULL);
+ if (structure) {
+ dtmf_message =
+ gst_message_new_element (GST_OBJECT (depayload), structure);
+ if (dtmf_message) {
+ if (!gst_element_post_message (GST_ELEMENT (depayload), dtmf_message)) {
+ GST_ERROR_OBJECT (depayload,
+ "Unable to send dtmf-event message to bus");
+ }
+ } else {
+ GST_ERROR_OBJECT (depayload, "Unable to create dtmf-event message");
+ }
+ } else {
+ GST_ERROR_OBJECT (depayload, "Unable to create dtmf-event structure");
+ }
+ } else {
+ guint16 duration = dtmf_payload.duration;
+ dtmf_payload.duration -= rtpdtmfdepay->previous_duration;
+ /* If late buffer, ignore */
+ if (duration > rtpdtmfdepay->previous_duration)
+ rtpdtmfdepay->previous_duration = duration;
+ }
+
+ GST_DEBUG_OBJECT (depayload, "new previous duration : %d - new duration : %d"
+ " - diff : %d - clock rate : %d - timestamp : %" G_GUINT64_FORMAT,
+ rtpdtmfdepay->previous_duration, dtmf_payload.duration,
+ (rtpdtmfdepay->previous_duration - dtmf_payload.duration),
+ depayload->clock_rate, GST_BUFFER_TIMESTAMP (buf));
+
+ /* If late or duplicate packet (like the redundant end packet). Ignore */
+ if (dtmf_payload.duration > 0) {
+ outbuf = gst_dtmf_src_generate_tone (rtpdtmfdepay, dtmf_payload);
+
+
+ GST_BUFFER_PTS (outbuf) = rtpdtmfdepay->first_gst_ts +
+ (rtpdtmfdepay->previous_duration - dtmf_payload.duration) *
+ GST_SECOND / depayload->clock_rate;
+ GST_BUFFER_OFFSET (outbuf) =
+ (rtpdtmfdepay->previous_duration - dtmf_payload.duration) *
+ GST_SECOND / depayload->clock_rate;
+ GST_BUFFER_OFFSET_END (outbuf) = rtpdtmfdepay->previous_duration *
+ GST_SECOND / depayload->clock_rate;
+
+ GST_DEBUG_OBJECT (depayload,
+ "timestamp : %" G_GUINT64_FORMAT " - time %" GST_TIME_FORMAT,
+ GST_BUFFER_TIMESTAMP (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));
+
+ }
+
+ gst_rtp_buffer_unmap (&rtpbuffer);
+
+ return outbuf;
+
+bad_packet:
+ GST_ELEMENT_WARNING (rtpdtmfdepay, STREAM, DECODE,
+ ("Packet did not validate"), (NULL));
+
+ if (rtpbuffer.buffer != NULL)
+ gst_rtp_buffer_unmap (&rtpbuffer);
+
+ return NULL;
+}
diff --git a/gst/dtmf/gstrtpdtmfdepay.h b/gst/dtmf/gstrtpdtmfdepay.h
new file mode 100644
index 0000000000..8b7aec6a2d
--- /dev/null
+++ b/gst/dtmf/gstrtpdtmfdepay.h
@@ -0,0 +1,68 @@
+/* GstRtpDtmfDepay
+ *
+ * Copyright (C) 2008 Collabora Limited
+ * Copyright (C) 2008 Nokia Corporation
+ * Contact: Youness Alaoui <youness.alaoui@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_DTMF_DEPAY_H__
+#define __GST_RTP_DTMF_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+#include "gstdtmfcommon.h"
+
+G_BEGIN_DECLS
+#define GST_TYPE_RTP_DTMF_DEPAY \
+ (gst_rtp_dtmf_depay_get_type())
+#define GST_RTP_DTMF_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_DTMF_DEPAY,GstRtpDTMFDepay))
+#define GST_RTP_DTMF_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_DTMF_DEPAY,GstRtpDTMFDepayClass))
+#define GST_IS_RTP_DTMF_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_DTMF_DEPAY))
+#define GST_IS_RTP_DTMF_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_DTMF_DEPAY))
+typedef struct _GstRtpDTMFDepay GstRtpDTMFDepay;
+typedef struct _GstRtpDTMFDepayClass GstRtpDTMFDepayClass;
+
+struct _GstRtpDTMFDepay
+{
+ /*< private >*/
+ GstRTPBaseDepayload depayload;
+ double sample;
+ guint32 previous_ts;
+ guint16 previous_duration;
+ GstClockTime first_gst_ts;
+ guint unit_time;
+ guint max_duration;
+};
+
+struct _GstRtpDTMFDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_dtmf_depay_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (rtpdtmfdepay);
+
+G_END_DECLS
+#endif /* __GST_RTP_DTMF_DEPAY_H__ */
diff --git a/gst/dtmf/gstrtpdtmfsrc.c b/gst/dtmf/gstrtpdtmfsrc.c
new file mode 100644
index 0000000000..eceb9b5f4d
--- /dev/null
+++ b/gst/dtmf/gstrtpdtmfsrc.c
@@ -0,0 +1,1144 @@
+/* GStreamer RTP DTMF source
+ *
+ * gstrtpdtmfsrc.c:
+ *
+ * Copyright (C) <2007> Nokia Corporation.
+ * Contact: Zeeshan Ali <zeeshan.ali@nokia.com>
+ * Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2000,2005 Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpdtmfsrc
+ * @title: rtpdtmfsrc
+ * @see_also: dtmfsrc, rtpdtmfdepay, rtpdtmfmux
+ *
+ * The RTPDTMFSrc element generates RTP DTMF (RFC 2833) event packets on request
+ * from application. The application communicates the beginning and end of a
+ * DTMF event using custom upstream gstreamer events. To report a DTMF event, an
+ * application must send an event of type GST_EVENT_CUSTOM_UPSTREAM, having a
+ * structure of name "dtmf-event" with fields set according to the following
+ * table:
+ *
+ * * `type` (G_TYPE_INT, 0-1): The application uses this field to specify which of the two methods
+ * specified in RFC 2833 to use. The value should be 0 for tones and 1 for
+ * named events. Tones are specified by their frequencies and events are specified
+ * by their number. This element can only take events as input. Do not confuse
+ * with "method" which specified the output.
+ *
+ * * `number` (G_TYPE_INT, 0-15): The event number.
+ *
+ * * `volume` (G_TYPE_INT, 0-36): This field describes the power level of the tone, expressed in dBm0
+ * after dropping the sign. Power levels range from 0 to -63 dBm0. The range of
+ * valid DTMF is from 0 to -36 dBm0. Can be omitted if start is set to FALSE.
+ *
+ * * `start` (G_TYPE_BOOLEAN, True or False): Whether the event is starting or ending.
+ *
+ * * `method` (G_TYPE_INT, 1): The method used for sending event, this element will react if this
+ * field is absent or 1.
+ *
+ * For example, the following code informs the pipeline (and in turn, the
+ * RTPDTMFSrc element inside the pipeline) about the start of an RTP DTMF named
+ * event '1' of volume -25 dBm0:
+ *
+ * |[
+ * structure = gst_structure_new ("dtmf-event",
+ * "type", G_TYPE_INT, 1,
+ * "number", G_TYPE_INT, 1,
+ * "volume", G_TYPE_INT, 25,
+ * "start", G_TYPE_BOOLEAN, TRUE, NULL);
+ *
+ * event = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM, structure);
+ * gst_element_send_event (pipeline, event);
+ * ]|
+ *
+ * When a DTMF tone actually starts or stop, a "dtmf-event-processed"
+ * element #GstMessage with the same fields as the "dtmf-event"
+ * #GstEvent that was used to request the event. Also, if any event
+ * has not been processed when the element goes from the PAUSED to the
+ * READY state, then a "dtmf-event-dropped" message is posted on the
+ * #GstBus in the order that they were received.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <stdlib.h>
+#include <string.h>
+
+#include <glib.h>
+
+#include "gstrtpdtmfsrc.h"
+#include <gst/base/gstbitwriter.h>
+
+#define GST_RTP_DTMF_TYPE_EVENT 1
+#define DEFAULT_PTIME 40 /* ms */
+#define DEFAULT_SSRC -1
+#define DEFAULT_PT 96
+#define DEFAULT_TIMESTAMP_OFFSET -1
+#define DEFAULT_SEQNUM_OFFSET -1
+#define DEFAULT_CLOCK_RATE 8000
+
+#define DEFAULT_PACKET_REDUNDANCY 1
+#define MIN_PACKET_REDUNDANCY 1
+#define MAX_PACKET_REDUNDANCY 5
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_dtmf_src_debug);
+#define GST_CAT_DEFAULT gst_rtp_dtmf_src_debug
+
+/* signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_SSRC,
+ PROP_TIMESTAMP_OFFSET,
+ PROP_SEQNUM_OFFSET,
+ PROP_PT,
+ PROP_CLOCK_RATE,
+ PROP_TIMESTAMP,
+ PROP_SEQNUM,
+ PROP_REDUNDANCY
+};
+
+static GstStaticPadTemplate gst_rtp_dtmf_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) [ 96, 127 ], "
+ "clock-rate = (int) [ 0, MAX ], "
+ "encoding-name = (string) \"TELEPHONE-EVENT\"")
+ /* "events = (string) \"0-15\" */
+ );
+
+
+G_DEFINE_TYPE (GstRTPDTMFSrc, gst_rtp_dtmf_src, GST_TYPE_BASE_SRC);
+GST_ELEMENT_REGISTER_DEFINE (rtpdtmfsrc, "rtpdtmfsrc", GST_RANK_NONE,
+ GST_TYPE_RTP_DTMF_SRC);
+
+static void gst_rtp_dtmf_src_finalize (GObject * object);
+
+static void gst_rtp_dtmf_src_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtp_dtmf_src_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static gboolean gst_rtp_dtmf_src_handle_event (GstBaseSrc * basesrc,
+ GstEvent * event);
+static GstStateChangeReturn gst_rtp_dtmf_src_change_state (GstElement * element,
+ GstStateChange transition);
+static void gst_rtp_dtmf_src_add_start_event (GstRTPDTMFSrc * dtmfsrc,
+ gint event_number, gint event_volume);
+static void gst_rtp_dtmf_src_add_stop_event (GstRTPDTMFSrc * dtmfsrc);
+
+static gboolean gst_rtp_dtmf_src_unlock (GstBaseSrc * src);
+static gboolean gst_rtp_dtmf_src_unlock_stop (GstBaseSrc * src);
+static GstFlowReturn gst_rtp_dtmf_src_create (GstBaseSrc * basesrc,
+ guint64 offset, guint length, GstBuffer ** buffer);
+static gboolean gst_rtp_dtmf_src_negotiate (GstBaseSrc * basesrc);
+static gboolean gst_rtp_dtmf_src_query (GstBaseSrc * basesrc, GstQuery * query);
+
+
+static void
+gst_rtp_dtmf_src_class_init (GstRTPDTMFSrcClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstBaseSrcClass *gstbasesrc_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
+ gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_dtmf_src_debug,
+ "rtpdtmfsrc", 0, "rtpdtmfsrc element");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_dtmf_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP DTMF packet generator", "Source/Network",
+ "Generates RTP DTMF packets", "Zeeshan Ali <zeeshan.ali@nokia.com>");
+
+ gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_rtp_dtmf_src_finalize);
+ gobject_class->set_property =
+ GST_DEBUG_FUNCPTR (gst_rtp_dtmf_src_set_property);
+ gobject_class->get_property =
+ GST_DEBUG_FUNCPTR (gst_rtp_dtmf_src_get_property);
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_TIMESTAMP,
+ g_param_spec_uint ("timestamp", "Timestamp",
+ "The RTP timestamp of the last processed packet",
+ 0, G_MAXUINT, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_SEQNUM,
+ g_param_spec_uint ("seqnum", "Sequence number",
+ "The RTP sequence number of the last processed packet",
+ 0, G_MAXUINT, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass),
+ PROP_TIMESTAMP_OFFSET, g_param_spec_int ("timestamp-offset",
+ "Timestamp Offset",
+ "Offset to add to all outgoing timestamps (-1 = random)", -1,
+ G_MAXINT, DEFAULT_TIMESTAMP_OFFSET,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_SEQNUM_OFFSET,
+ g_param_spec_int ("seqnum-offset", "Sequence number Offset",
+ "Offset to add to all outgoing seqnum (-1 = random)", -1, G_MAXINT,
+ DEFAULT_SEQNUM_OFFSET, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_CLOCK_RATE,
+ g_param_spec_uint ("clock-rate", "clockrate",
+ "The clock-rate at which to generate the dtmf packets",
+ 0, G_MAXUINT, DEFAULT_CLOCK_RATE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_SSRC,
+ g_param_spec_uint ("ssrc", "SSRC",
+ "The SSRC of the packets (-1 == random)",
+ 0, G_MAXUINT, DEFAULT_SSRC,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_PT,
+ g_param_spec_uint ("pt", "payload type",
+ "The payload type of the packets",
+ 0, 0x80, DEFAULT_PT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_REDUNDANCY,
+ g_param_spec_uint ("packet-redundancy", "Packet Redundancy",
+ "Number of packets to send to indicate start and stop dtmf events",
+ MIN_PACKET_REDUNDANCY, MAX_PACKET_REDUNDANCY,
+ DEFAULT_PACKET_REDUNDANCY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_rtp_dtmf_src_change_state);
+
+ gstbasesrc_class->unlock = GST_DEBUG_FUNCPTR (gst_rtp_dtmf_src_unlock);
+ gstbasesrc_class->unlock_stop =
+ GST_DEBUG_FUNCPTR (gst_rtp_dtmf_src_unlock_stop);
+
+ gstbasesrc_class->event = GST_DEBUG_FUNCPTR (gst_rtp_dtmf_src_handle_event);
+ gstbasesrc_class->create = GST_DEBUG_FUNCPTR (gst_rtp_dtmf_src_create);
+ gstbasesrc_class->negotiate = GST_DEBUG_FUNCPTR (gst_rtp_dtmf_src_negotiate);
+ gstbasesrc_class->query = GST_DEBUG_FUNCPTR (gst_rtp_dtmf_src_query);
+}
+
+static void
+gst_rtp_dtmf_src_event_free (GstRTPDTMFSrcEvent * event)
+{
+ if (event) {
+ if (event->payload)
+ g_slice_free (GstRTPDTMFPayload, event->payload);
+ g_slice_free (GstRTPDTMFSrcEvent, event);
+ }
+}
+
+static void
+gst_rtp_dtmf_src_init (GstRTPDTMFSrc * object)
+{
+ gst_base_src_set_format (GST_BASE_SRC (object), GST_FORMAT_TIME);
+ gst_base_src_set_live (GST_BASE_SRC (object), TRUE);
+
+ object->ssrc = DEFAULT_SSRC;
+ object->seqnum_offset = DEFAULT_SEQNUM_OFFSET;
+ object->ts_offset = DEFAULT_TIMESTAMP_OFFSET;
+ object->pt = DEFAULT_PT;
+ object->clock_rate = DEFAULT_CLOCK_RATE;
+ object->ptime = DEFAULT_PTIME;
+ object->packet_redundancy = DEFAULT_PACKET_REDUNDANCY;
+
+ object->event_queue =
+ g_async_queue_new_full ((GDestroyNotify) gst_rtp_dtmf_src_event_free);
+ object->payload = NULL;
+
+ GST_DEBUG_OBJECT (object, "init done");
+}
+
+static void
+gst_rtp_dtmf_src_finalize (GObject * object)
+{
+ GstRTPDTMFSrc *dtmfsrc;
+
+ dtmfsrc = GST_RTP_DTMF_SRC (object);
+
+ if (dtmfsrc->event_queue) {
+ g_async_queue_unref (dtmfsrc->event_queue);
+ dtmfsrc->event_queue = NULL;
+ }
+
+
+ G_OBJECT_CLASS (gst_rtp_dtmf_src_parent_class)->finalize (object);
+}
+
+static gboolean
+gst_rtp_dtmf_src_handle_dtmf_event (GstRTPDTMFSrc * dtmfsrc,
+ const GstStructure * event_structure)
+{
+ gint event_type;
+ gboolean start;
+ gint method;
+ GstClockTime last_stop;
+ gint event_number;
+ gint event_volume;
+ gboolean correct_order;
+
+ if (!gst_structure_get_int (event_structure, "type", &event_type) ||
+ !gst_structure_get_boolean (event_structure, "start", &start) ||
+ event_type != GST_RTP_DTMF_TYPE_EVENT)
+ goto failure;
+
+ if (gst_structure_get_int (event_structure, "method", &method)) {
+ if (method != 1) {
+ goto failure;
+ }
+ }
+
+ if (start)
+ if (!gst_structure_get_int (event_structure, "number", &event_number) ||
+ !gst_structure_get_int (event_structure, "volume", &event_volume))
+ goto failure;
+
+ GST_OBJECT_LOCK (dtmfsrc);
+ if (gst_structure_get_clock_time (event_structure, "last-stop", &last_stop))
+ dtmfsrc->last_stop = last_stop;
+ else
+ dtmfsrc->last_stop = GST_CLOCK_TIME_NONE;
+ correct_order = (start != dtmfsrc->last_event_was_start);
+ dtmfsrc->last_event_was_start = start;
+ GST_OBJECT_UNLOCK (dtmfsrc);
+
+ if (!correct_order)
+ goto failure;
+
+ if (start) {
+ if (!gst_structure_get_int (event_structure, "number", &event_number) ||
+ !gst_structure_get_int (event_structure, "volume", &event_volume))
+ goto failure;
+
+ GST_DEBUG_OBJECT (dtmfsrc, "Received start event %d with volume %d",
+ event_number, event_volume);
+ gst_rtp_dtmf_src_add_start_event (dtmfsrc, event_number, event_volume);
+ }
+
+ else {
+ GST_DEBUG_OBJECT (dtmfsrc, "Received stop event");
+ gst_rtp_dtmf_src_add_stop_event (dtmfsrc);
+ }
+
+ return TRUE;
+failure:
+ return FALSE;
+}
+
+static gboolean
+gst_rtp_dtmf_src_handle_custom_upstream (GstRTPDTMFSrc * dtmfsrc,
+ GstEvent * event)
+{
+ gboolean result = FALSE;
+ gchar *struct_str;
+ const GstStructure *structure;
+
+ GstState state;
+ GstStateChangeReturn ret;
+
+ ret = gst_element_get_state (GST_ELEMENT (dtmfsrc), &state, NULL, 0);
+ if (ret != GST_STATE_CHANGE_SUCCESS || state != GST_STATE_PLAYING) {
+ GST_DEBUG_OBJECT (dtmfsrc, "Received event while not in PLAYING state");
+ goto ret;
+ }
+
+ GST_DEBUG_OBJECT (dtmfsrc, "Received event is of our interest");
+ structure = gst_event_get_structure (event);
+ struct_str = gst_structure_to_string (structure);
+ GST_DEBUG_OBJECT (dtmfsrc, "Event has structure %s", struct_str);
+ g_free (struct_str);
+ if (structure && gst_structure_has_name (structure, "dtmf-event"))
+ result = gst_rtp_dtmf_src_handle_dtmf_event (dtmfsrc, structure);
+
+ret:
+ return result;
+}
+
+static gboolean
+gst_rtp_dtmf_src_handle_event (GstBaseSrc * basesrc, GstEvent * event)
+{
+ GstRTPDTMFSrc *dtmfsrc;
+ gboolean result = FALSE;
+
+ dtmfsrc = GST_RTP_DTMF_SRC (basesrc);
+
+ GST_DEBUG_OBJECT (dtmfsrc, "Received an event on the src pad");
+ if (GST_EVENT_TYPE (event) == GST_EVENT_CUSTOM_UPSTREAM) {
+ result = gst_rtp_dtmf_src_handle_custom_upstream (dtmfsrc, event);
+ }
+
+ return result;
+}
+
+static void
+gst_rtp_dtmf_src_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRTPDTMFSrc *dtmfsrc;
+
+ dtmfsrc = GST_RTP_DTMF_SRC (object);
+
+ switch (prop_id) {
+ case PROP_TIMESTAMP_OFFSET:
+ dtmfsrc->ts_offset = g_value_get_int (value);
+ break;
+ case PROP_SEQNUM_OFFSET:
+ dtmfsrc->seqnum_offset = g_value_get_int (value);
+ break;
+ case PROP_CLOCK_RATE:
+ dtmfsrc->clock_rate = g_value_get_uint (value);
+ dtmfsrc->dirty = TRUE;
+ break;
+ case PROP_SSRC:
+ dtmfsrc->ssrc = g_value_get_uint (value);
+ break;
+ case PROP_PT:
+ dtmfsrc->pt = g_value_get_uint (value);
+ dtmfsrc->dirty = TRUE;
+ break;
+ case PROP_REDUNDANCY:
+ dtmfsrc->packet_redundancy = g_value_get_uint (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_dtmf_src_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstRTPDTMFSrc *dtmfsrc;
+
+ dtmfsrc = GST_RTP_DTMF_SRC (object);
+
+ switch (prop_id) {
+ case PROP_TIMESTAMP_OFFSET:
+ g_value_set_int (value, dtmfsrc->ts_offset);
+ break;
+ case PROP_SEQNUM_OFFSET:
+ g_value_set_int (value, dtmfsrc->seqnum_offset);
+ break;
+ case PROP_CLOCK_RATE:
+ g_value_set_uint (value, dtmfsrc->clock_rate);
+ break;
+ case PROP_SSRC:
+ g_value_set_uint (value, dtmfsrc->ssrc);
+ break;
+ case PROP_PT:
+ g_value_set_uint (value, dtmfsrc->pt);
+ break;
+ case PROP_TIMESTAMP:
+ g_value_set_uint (value, dtmfsrc->rtp_timestamp);
+ break;
+ case PROP_SEQNUM:
+ g_value_set_uint (value, dtmfsrc->seqnum);
+ break;
+ case PROP_REDUNDANCY:
+ g_value_set_uint (value, dtmfsrc->packet_redundancy);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+gst_rtp_dtmf_prepare_timestamps (GstRTPDTMFSrc * dtmfsrc)
+{
+ GstClockTime last_stop;
+
+ GST_OBJECT_LOCK (dtmfsrc);
+ last_stop = dtmfsrc->last_stop;
+ GST_OBJECT_UNLOCK (dtmfsrc);
+
+ if (GST_CLOCK_TIME_IS_VALID (last_stop)) {
+ dtmfsrc->start_timestamp = last_stop;
+ } else {
+ GstClock *clock = gst_element_get_clock (GST_ELEMENT (dtmfsrc));
+
+ if (clock == NULL)
+ return FALSE;
+
+ dtmfsrc->start_timestamp = gst_clock_get_time (clock)
+ - gst_element_get_base_time (GST_ELEMENT (dtmfsrc));
+ gst_object_unref (clock);
+ }
+
+ /* If the last stop was in the past, then lets add the buffers together */
+ if (dtmfsrc->start_timestamp < dtmfsrc->timestamp)
+ dtmfsrc->start_timestamp = dtmfsrc->timestamp;
+
+ dtmfsrc->timestamp = dtmfsrc->start_timestamp;
+
+ dtmfsrc->rtp_timestamp = dtmfsrc->ts_base +
+ gst_util_uint64_scale_int (gst_segment_to_running_time (&GST_BASE_SRC
+ (dtmfsrc)->segment, GST_FORMAT_TIME, dtmfsrc->timestamp),
+ dtmfsrc->clock_rate, GST_SECOND);
+
+ return TRUE;
+}
+
+
+static void
+gst_rtp_dtmf_src_add_start_event (GstRTPDTMFSrc * dtmfsrc, gint event_number,
+ gint event_volume)
+{
+
+ GstRTPDTMFSrcEvent *event = g_slice_new0 (GstRTPDTMFSrcEvent);
+ event->event_type = RTP_DTMF_EVENT_TYPE_START;
+
+ event->payload = g_slice_new0 (GstRTPDTMFPayload);
+ event->payload->event = CLAMP (event_number, MIN_EVENT, MAX_EVENT);
+ event->payload->volume = CLAMP (event_volume, MIN_VOLUME, MAX_VOLUME);
+
+ g_async_queue_push (dtmfsrc->event_queue, event);
+}
+
+static void
+gst_rtp_dtmf_src_add_stop_event (GstRTPDTMFSrc * dtmfsrc)
+{
+
+ GstRTPDTMFSrcEvent *event = g_slice_new0 (GstRTPDTMFSrcEvent);
+ event->event_type = RTP_DTMF_EVENT_TYPE_STOP;
+
+ g_async_queue_push (dtmfsrc->event_queue, event);
+}
+
+
+static void
+gst_rtp_dtmf_prepare_rtp_headers (GstRTPDTMFSrc * dtmfsrc,
+ GstRTPBuffer * rtpbuf)
+{
+ gst_rtp_buffer_set_ssrc (rtpbuf, dtmfsrc->current_ssrc);
+ gst_rtp_buffer_set_payload_type (rtpbuf, dtmfsrc->pt);
+ /* Only the very first packet gets a marker */
+ if (dtmfsrc->first_packet) {
+ gst_rtp_buffer_set_marker (rtpbuf, TRUE);
+ }
+
+ dtmfsrc->seqnum++;
+ gst_rtp_buffer_set_seq (rtpbuf, dtmfsrc->seqnum);
+
+ /* timestamp of RTP header */
+ gst_rtp_buffer_set_timestamp (rtpbuf, dtmfsrc->rtp_timestamp);
+}
+
+static GstBuffer *
+gst_rtp_dtmf_src_create_next_rtp_packet (GstRTPDTMFSrc * dtmfsrc)
+{
+ GstBuffer *buf;
+ GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT;
+ GstBitWriter bitwriter;
+ guint8 *payload;
+ guint8 end = dtmfsrc->last_packet ? 0x02 : 0;
+
+ buf = gst_rtp_buffer_new_allocate (4, 0, 0);
+
+ gst_rtp_buffer_map (buf, GST_MAP_READWRITE, &rtpbuffer);
+
+ gst_rtp_dtmf_prepare_rtp_headers (dtmfsrc, &rtpbuffer);
+
+ /* timestamp and duration of GstBuffer */
+ /* Redundant buffer have no duration ... */
+ if (dtmfsrc->redundancy_count > 1)
+ GST_BUFFER_DURATION (buf) = 0;
+ else
+ GST_BUFFER_DURATION (buf) = dtmfsrc->ptime * GST_MSECOND;
+ GST_BUFFER_PTS (buf) = dtmfsrc->timestamp;
+
+ payload = gst_rtp_buffer_get_payload (&rtpbuffer);
+
+ memset (payload, 0, 4);
+ gst_bit_writer_init_with_data (&bitwriter, payload, 4, FALSE);
+ gst_bit_writer_put_bits_uint8 (&bitwriter, dtmfsrc->payload->event, 8);
+ gst_bit_writer_put_bits_uint8 (&bitwriter, end, 2);
+ gst_bit_writer_put_bits_uint8 (&bitwriter, dtmfsrc->payload->volume, 6);
+ gst_bit_writer_put_bits_uint16 (&bitwriter, dtmfsrc->payload->duration, 16);
+
+ if (dtmfsrc->redundancy_count <= 1 && dtmfsrc->last_packet) {
+ GstClockTime inter_digit_interval = MIN_INTER_DIGIT_INTERVAL;
+
+ if (inter_digit_interval % dtmfsrc->ptime != 0)
+ inter_digit_interval += dtmfsrc->ptime -
+ (MIN_INTER_DIGIT_INTERVAL % dtmfsrc->ptime);
+
+ GST_BUFFER_DURATION (buf) += inter_digit_interval * GST_MSECOND;
+ }
+
+ GST_LOG_OBJECT (dtmfsrc, "Creating new buffer with event %u duration "
+ " gst: %" GST_TIME_FORMAT " at %" GST_TIME_FORMAT "(rtp ts:%u dur:%u)",
+ dtmfsrc->payload->event, GST_TIME_ARGS (GST_BUFFER_DURATION (buf)),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), dtmfsrc->rtp_timestamp,
+ dtmfsrc->payload->duration);
+
+ /* duration of DTMF payloadfor the NEXT packet */
+ /* not updated for redundant packets */
+ if (dtmfsrc->redundancy_count <= 1)
+ dtmfsrc->payload->duration += dtmfsrc->ptime * dtmfsrc->clock_rate / 1000;
+
+ if (GST_CLOCK_TIME_IS_VALID (dtmfsrc->timestamp))
+ dtmfsrc->timestamp += GST_BUFFER_DURATION (buf);
+
+ gst_rtp_buffer_unmap (&rtpbuffer);
+
+ return buf;
+}
+
+static GstMessage *
+gst_dtmf_src_prepare_message (GstRTPDTMFSrc * dtmfsrc,
+ const gchar * message_name, GstRTPDTMFSrcEvent * event)
+{
+ GstStructure *s;
+
+ switch (event->event_type) {
+ case RTP_DTMF_EVENT_TYPE_START:
+ s = gst_structure_new (message_name,
+ "type", G_TYPE_INT, 1,
+ "method", G_TYPE_INT, 1,
+ "start", G_TYPE_BOOLEAN, TRUE,
+ "number", G_TYPE_INT, event->payload->event,
+ "volume", G_TYPE_INT, event->payload->volume, NULL);
+ break;
+ case RTP_DTMF_EVENT_TYPE_STOP:
+ s = gst_structure_new (message_name,
+ "type", G_TYPE_INT, 1, "method", G_TYPE_INT, 1,
+ "start", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ case RTP_DTMF_EVENT_TYPE_PAUSE_TASK:
+ return NULL;
+ default:
+ return NULL;
+ }
+
+ return gst_message_new_element (GST_OBJECT (dtmfsrc), s);
+}
+
+static void
+gst_dtmf_src_post_message (GstRTPDTMFSrc * dtmfsrc, const gchar * message_name,
+ GstRTPDTMFSrcEvent * event)
+{
+ GstMessage *m = gst_dtmf_src_prepare_message (dtmfsrc, message_name, event);
+
+
+ if (m)
+ gst_element_post_message (GST_ELEMENT (dtmfsrc), m);
+}
+
+
+static GstFlowReturn
+gst_rtp_dtmf_src_create (GstBaseSrc * basesrc, guint64 offset,
+ guint length, GstBuffer ** buffer)
+{
+ GstRTPDTMFSrcEvent *event;
+ GstRTPDTMFSrc *dtmfsrc;
+ GstClock *clock;
+ GstClockID *clockid;
+ GstClockReturn clockret;
+ GstMessage *message;
+ GQueue messages = G_QUEUE_INIT;
+
+ dtmfsrc = GST_RTP_DTMF_SRC (basesrc);
+
+ do {
+
+ if (dtmfsrc->payload == NULL) {
+ GST_DEBUG_OBJECT (dtmfsrc, "popping");
+ event = g_async_queue_pop (dtmfsrc->event_queue);
+
+ GST_DEBUG_OBJECT (dtmfsrc, "popped %d", event->event_type);
+
+ switch (event->event_type) {
+ case RTP_DTMF_EVENT_TYPE_STOP:
+ GST_WARNING_OBJECT (dtmfsrc,
+ "Received a DTMF stop event when already stopped");
+ gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-dropped", event);
+ break;
+
+ case RTP_DTMF_EVENT_TYPE_START:
+ dtmfsrc->first_packet = TRUE;
+ dtmfsrc->last_packet = FALSE;
+ /* Set the redundancy on the first packet */
+ dtmfsrc->redundancy_count = dtmfsrc->packet_redundancy;
+ if (!gst_rtp_dtmf_prepare_timestamps (dtmfsrc))
+ goto no_clock;
+
+ g_queue_push_tail (&messages,
+ gst_dtmf_src_prepare_message (dtmfsrc, "dtmf-event-processed",
+ event));
+ dtmfsrc->payload = event->payload;
+ dtmfsrc->payload->duration =
+ dtmfsrc->ptime * dtmfsrc->clock_rate / 1000;
+ event->payload = NULL;
+ break;
+
+ case RTP_DTMF_EVENT_TYPE_PAUSE_TASK:
+ /*
+ * We're pushing it back because it has to stay in there until
+ * the task is really paused (and the queue will then be flushed
+ */
+ GST_OBJECT_LOCK (dtmfsrc);
+ if (dtmfsrc->paused) {
+ g_async_queue_push (dtmfsrc->event_queue, event);
+ goto paused_locked;
+ }
+ GST_OBJECT_UNLOCK (dtmfsrc);
+ break;
+ }
+
+ gst_rtp_dtmf_src_event_free (event);
+ } else if (!dtmfsrc->first_packet && !dtmfsrc->last_packet &&
+ (dtmfsrc->timestamp - dtmfsrc->start_timestamp) / GST_MSECOND >=
+ MIN_PULSE_DURATION) {
+ GST_DEBUG_OBJECT (dtmfsrc, "try popping");
+ event = g_async_queue_try_pop (dtmfsrc->event_queue);
+
+
+ if (event != NULL) {
+ GST_DEBUG_OBJECT (dtmfsrc, "try popped %d", event->event_type);
+
+ switch (event->event_type) {
+ case RTP_DTMF_EVENT_TYPE_START:
+ GST_WARNING_OBJECT (dtmfsrc,
+ "Received two consecutive DTMF start events");
+ gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-dropped", event);
+ break;
+
+ case RTP_DTMF_EVENT_TYPE_STOP:
+ dtmfsrc->first_packet = FALSE;
+ dtmfsrc->last_packet = TRUE;
+ /* Set the redundancy on the last packet */
+ dtmfsrc->redundancy_count = dtmfsrc->packet_redundancy;
+ g_queue_push_tail (&messages,
+ gst_dtmf_src_prepare_message (dtmfsrc, "dtmf-event-processed",
+ event));
+ break;
+
+ case RTP_DTMF_EVENT_TYPE_PAUSE_TASK:
+ /*
+ * We're pushing it back because it has to stay in there until
+ * the task is really paused (and the queue will then be flushed)
+ */
+ GST_DEBUG_OBJECT (dtmfsrc, "pushing pause_task...");
+ GST_OBJECT_LOCK (dtmfsrc);
+ if (dtmfsrc->paused) {
+ g_async_queue_push (dtmfsrc->event_queue, event);
+ goto paused_locked;
+ }
+ GST_OBJECT_UNLOCK (dtmfsrc);
+ break;
+ }
+ gst_rtp_dtmf_src_event_free (event);
+ }
+ }
+ } while (dtmfsrc->payload == NULL);
+
+
+ GST_DEBUG_OBJECT (dtmfsrc, "Processed events, now lets wait on the clock");
+
+ clock = gst_element_get_clock (GST_ELEMENT (basesrc));
+ if (!clock)
+ goto no_clock;
+ clockid = gst_clock_new_single_shot_id (clock, dtmfsrc->timestamp +
+ gst_element_get_base_time (GST_ELEMENT (dtmfsrc)));
+ gst_object_unref (clock);
+
+ GST_OBJECT_LOCK (dtmfsrc);
+ if (!dtmfsrc->paused) {
+ dtmfsrc->clockid = clockid;
+ GST_OBJECT_UNLOCK (dtmfsrc);
+
+ clockret = gst_clock_id_wait (clockid, NULL);
+
+ GST_OBJECT_LOCK (dtmfsrc);
+ if (dtmfsrc->paused)
+ clockret = GST_CLOCK_UNSCHEDULED;
+ } else {
+ clockret = GST_CLOCK_UNSCHEDULED;
+ }
+ gst_clock_id_unref (clockid);
+ dtmfsrc->clockid = NULL;
+ GST_OBJECT_UNLOCK (dtmfsrc);
+
+ while ((message = g_queue_pop_head (&messages)) != NULL)
+ gst_element_post_message (GST_ELEMENT (dtmfsrc), message);
+
+ if (clockret == GST_CLOCK_UNSCHEDULED) {
+ goto paused;
+ }
+
+send_last:
+
+ if (dtmfsrc->dirty)
+ if (!gst_rtp_dtmf_src_negotiate (basesrc))
+ return GST_FLOW_NOT_NEGOTIATED;
+
+ /* create buffer to hold the payload */
+ *buffer = gst_rtp_dtmf_src_create_next_rtp_packet (dtmfsrc);
+
+ if (dtmfsrc->redundancy_count)
+ dtmfsrc->redundancy_count--;
+
+ /* Only the very first one has a marker */
+ dtmfsrc->first_packet = FALSE;
+
+ /* This is the end of the event */
+ if (dtmfsrc->last_packet == TRUE && dtmfsrc->redundancy_count == 0) {
+
+ g_slice_free (GstRTPDTMFPayload, dtmfsrc->payload);
+ dtmfsrc->payload = NULL;
+
+ dtmfsrc->last_packet = FALSE;
+ }
+
+ return GST_FLOW_OK;
+
+paused_locked:
+
+ GST_OBJECT_UNLOCK (dtmfsrc);
+
+paused:
+
+ if (dtmfsrc->payload) {
+ dtmfsrc->first_packet = FALSE;
+ dtmfsrc->last_packet = TRUE;
+ /* Set the redundanc on the last packet */
+ dtmfsrc->redundancy_count = dtmfsrc->packet_redundancy;
+ goto send_last;
+ } else {
+ return GST_FLOW_FLUSHING;
+ }
+
+no_clock:
+ GST_ELEMENT_ERROR (dtmfsrc, STREAM, MUX, ("No available clock"),
+ ("No available clock"));
+ gst_pad_pause_task (GST_BASE_SRC_PAD (dtmfsrc));
+ return GST_FLOW_ERROR;
+}
+
+
+static gboolean
+gst_rtp_dtmf_src_negotiate (GstBaseSrc * basesrc)
+{
+ GstCaps *srccaps, *peercaps;
+ GstRTPDTMFSrc *dtmfsrc = GST_RTP_DTMF_SRC (basesrc);
+ gboolean ret;
+
+ /* fill in the defaults, there properties cannot be negotiated. */
+ srccaps = gst_caps_new_simple ("application/x-rtp",
+ "media", G_TYPE_STRING, "audio",
+ "encoding-name", G_TYPE_STRING, "TELEPHONE-EVENT", NULL);
+
+ /* the peer caps can override some of the defaults */
+ peercaps = gst_pad_peer_query_caps (GST_BASE_SRC_PAD (basesrc), NULL);
+ if (peercaps == NULL) {
+ /* no peer caps, just add the other properties */
+ gst_caps_set_simple (srccaps,
+ "payload", G_TYPE_INT, dtmfsrc->pt,
+ "ssrc", G_TYPE_UINT, dtmfsrc->current_ssrc,
+ "timestamp-offset", G_TYPE_UINT, dtmfsrc->ts_base,
+ "clock-rate", G_TYPE_INT, dtmfsrc->clock_rate,
+ "seqnum-offset", G_TYPE_UINT, dtmfsrc->seqnum_base, NULL);
+
+ GST_DEBUG_OBJECT (dtmfsrc, "no peer caps: %" GST_PTR_FORMAT, srccaps);
+ } else {
+ GstCaps *temp;
+ GstStructure *s;
+ const GValue *value;
+ gint pt;
+ gint clock_rate;
+
+ /* peer provides caps we can use to fixate, intersect. This always returns a
+ * writable caps. */
+ temp = gst_caps_intersect (srccaps, peercaps);
+ gst_caps_unref (srccaps);
+ gst_caps_unref (peercaps);
+
+ if (!temp) {
+ GST_DEBUG_OBJECT (dtmfsrc, "Could not get intersection with peer caps");
+ return FALSE;
+ }
+
+ if (gst_caps_is_empty (temp)) {
+ GST_DEBUG_OBJECT (dtmfsrc, "Intersection with peer caps is empty");
+ gst_caps_unref (temp);
+ return FALSE;
+ }
+
+ /* now fixate, start by taking the first caps */
+ temp = gst_caps_truncate (temp);
+ temp = gst_caps_make_writable (temp);
+ srccaps = temp;
+
+ /* get first structure */
+ s = gst_caps_get_structure (srccaps, 0);
+
+ if (gst_structure_get_int (s, "payload", &pt)) {
+ /* use peer pt */
+ dtmfsrc->pt = pt;
+ GST_LOG_OBJECT (dtmfsrc, "using peer pt %d", pt);
+ } else {
+ if (gst_structure_has_field (s, "payload")) {
+ /* can only fixate if there is a field */
+ gst_structure_fixate_field_nearest_int (s, "payload", dtmfsrc->pt);
+ gst_structure_get_int (s, "payload", &pt);
+ GST_LOG_OBJECT (dtmfsrc, "using peer pt %d", pt);
+ } else {
+ /* no pt field, use the internal pt */
+ pt = dtmfsrc->pt;
+ gst_structure_set (s, "payload", G_TYPE_INT, pt, NULL);
+ GST_LOG_OBJECT (dtmfsrc, "using internal pt %d", pt);
+ }
+ }
+
+ if (gst_structure_get_int (s, "clock-rate", &clock_rate)) {
+ dtmfsrc->clock_rate = clock_rate;
+ GST_LOG_OBJECT (dtmfsrc, "using clock-rate from caps %d",
+ dtmfsrc->clock_rate);
+ } else {
+ GST_LOG_OBJECT (dtmfsrc, "using existing clock-rate %d",
+ dtmfsrc->clock_rate);
+ }
+ gst_structure_set (s, "clock-rate", G_TYPE_INT, dtmfsrc->clock_rate, NULL);
+
+
+ if (gst_structure_has_field_typed (s, "ssrc", G_TYPE_UINT)) {
+ value = gst_structure_get_value (s, "ssrc");
+ dtmfsrc->current_ssrc = g_value_get_uint (value);
+ GST_LOG_OBJECT (dtmfsrc, "using peer ssrc %08x", dtmfsrc->current_ssrc);
+ } else {
+ /* FIXME, fixate_nearest_uint would be even better */
+ gst_structure_set (s, "ssrc", G_TYPE_UINT, dtmfsrc->current_ssrc, NULL);
+ GST_LOG_OBJECT (dtmfsrc, "using internal ssrc %08x",
+ dtmfsrc->current_ssrc);
+ }
+
+ if (gst_structure_has_field_typed (s, "timestamp-offset", G_TYPE_UINT)) {
+ value = gst_structure_get_value (s, "timestamp-offset");
+ dtmfsrc->ts_base = g_value_get_uint (value);
+ GST_LOG_OBJECT (dtmfsrc, "using peer timestamp-offset %u",
+ dtmfsrc->ts_base);
+ } else {
+ /* FIXME, fixate_nearest_uint would be even better */
+ gst_structure_set (s, "timestamp-offset", G_TYPE_UINT, dtmfsrc->ts_base,
+ NULL);
+ GST_LOG_OBJECT (dtmfsrc, "using internal timestamp-offset %u",
+ dtmfsrc->ts_base);
+ }
+ if (gst_structure_has_field_typed (s, "seqnum-offset", G_TYPE_UINT)) {
+ value = gst_structure_get_value (s, "seqnum-offset");
+ dtmfsrc->seqnum_base = g_value_get_uint (value);
+ GST_LOG_OBJECT (dtmfsrc, "using peer seqnum-offset %u",
+ dtmfsrc->seqnum_base);
+ } else {
+ /* FIXME, fixate_nearest_uint would be even better */
+ gst_structure_set (s, "seqnum-offset", G_TYPE_UINT, dtmfsrc->seqnum_base,
+ NULL);
+ GST_LOG_OBJECT (dtmfsrc, "using internal seqnum-offset %u",
+ dtmfsrc->seqnum_base);
+ }
+
+ if (gst_structure_has_field_typed (s, "ptime", G_TYPE_UINT)) {
+ value = gst_structure_get_value (s, "ptime");
+ dtmfsrc->ptime = g_value_get_uint (value);
+ GST_LOG_OBJECT (dtmfsrc, "using peer ptime %u", dtmfsrc->ptime);
+ } else if (gst_structure_has_field_typed (s, "maxptime", G_TYPE_UINT)) {
+ value = gst_structure_get_value (s, "maxptime");
+ dtmfsrc->ptime = g_value_get_uint (value);
+ GST_LOG_OBJECT (dtmfsrc, "using peer maxptime as ptime %u",
+ dtmfsrc->ptime);
+ } else {
+ /* FIXME, fixate_nearest_uint would be even better */
+ gst_structure_set (s, "ptime", G_TYPE_UINT, dtmfsrc->ptime, NULL);
+ GST_LOG_OBJECT (dtmfsrc, "using internal ptime %u", dtmfsrc->ptime);
+ }
+
+
+ GST_DEBUG_OBJECT (dtmfsrc, "with peer caps: %" GST_PTR_FORMAT, srccaps);
+ }
+
+ ret = gst_pad_set_caps (GST_BASE_SRC_PAD (basesrc), srccaps);
+ gst_caps_unref (srccaps);
+
+ dtmfsrc->dirty = FALSE;
+
+ return ret;
+
+}
+
+static gboolean
+gst_rtp_dtmf_src_query (GstBaseSrc * basesrc, GstQuery * query)
+{
+ GstRTPDTMFSrc *dtmfsrc = GST_RTP_DTMF_SRC (basesrc);
+ gboolean res = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_LATENCY:
+ {
+ GstClockTime latency;
+
+ latency = dtmfsrc->ptime * GST_MSECOND;
+ gst_query_set_latency (query, gst_base_src_is_live (basesrc), latency,
+ GST_CLOCK_TIME_NONE);
+ GST_DEBUG_OBJECT (dtmfsrc, "Reporting latency of %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (latency));
+ res = TRUE;
+ }
+ break;
+ default:
+ res = GST_BASE_SRC_CLASS (gst_rtp_dtmf_src_parent_class)->query (basesrc,
+ query);
+ break;
+ }
+
+ return res;
+}
+
+static void
+gst_rtp_dtmf_src_ready_to_paused (GstRTPDTMFSrc * dtmfsrc)
+{
+ if (dtmfsrc->ssrc == -1)
+ dtmfsrc->current_ssrc = g_random_int ();
+ else
+ dtmfsrc->current_ssrc = dtmfsrc->ssrc;
+
+ if (dtmfsrc->seqnum_offset == -1)
+ dtmfsrc->seqnum_base = g_random_int_range (0, G_MAXUINT16);
+ else
+ dtmfsrc->seqnum_base = dtmfsrc->seqnum_offset;
+ dtmfsrc->seqnum = dtmfsrc->seqnum_base;
+
+ if (dtmfsrc->ts_offset == -1)
+ dtmfsrc->ts_base = g_random_int ();
+ else
+ dtmfsrc->ts_base = dtmfsrc->ts_offset;
+
+ dtmfsrc->timestamp = 0;
+}
+
+static GstStateChangeReturn
+gst_rtp_dtmf_src_change_state (GstElement * element, GstStateChange transition)
+{
+ GstRTPDTMFSrc *dtmfsrc;
+ GstStateChangeReturn result;
+ gboolean no_preroll = FALSE;
+ GstRTPDTMFSrcEvent *event = NULL;
+
+ dtmfsrc = GST_RTP_DTMF_SRC (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_rtp_dtmf_src_ready_to_paused (dtmfsrc);
+
+ /* Flushing the event queue */
+ while ((event = g_async_queue_try_pop (dtmfsrc->event_queue)) != NULL) {
+ gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-dropped", event);
+ gst_rtp_dtmf_src_event_free (event);
+ }
+ dtmfsrc->last_event_was_start = FALSE;
+
+ no_preroll = TRUE;
+ break;
+ default:
+ break;
+ }
+
+ if ((result =
+ GST_ELEMENT_CLASS (gst_rtp_dtmf_src_parent_class)->change_state
+ (element, transition)) == GST_STATE_CHANGE_FAILURE)
+ goto failure;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ no_preroll = TRUE;
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+
+ /* Flushing the event queue */
+ while ((event = g_async_queue_try_pop (dtmfsrc->event_queue)) != NULL) {
+ gst_dtmf_src_post_message (dtmfsrc, "dtmf-event-dropped", event);
+ gst_rtp_dtmf_src_event_free (event);
+ }
+ dtmfsrc->last_event_was_start = FALSE;
+
+ /* Indicate that we don't do PRE_ROLL */
+ break;
+
+ default:
+ break;
+ }
+
+ if (no_preroll && result == GST_STATE_CHANGE_SUCCESS)
+ result = GST_STATE_CHANGE_NO_PREROLL;
+
+ return result;
+
+ /* ERRORS */
+failure:
+ {
+ GST_ERROR_OBJECT (dtmfsrc, "parent failed state change");
+ return result;
+ }
+}
+
+
+static gboolean
+gst_rtp_dtmf_src_unlock (GstBaseSrc * src)
+{
+ GstRTPDTMFSrc *dtmfsrc = GST_RTP_DTMF_SRC (src);
+ GstRTPDTMFSrcEvent *event = NULL;
+
+ GST_DEBUG_OBJECT (dtmfsrc, "Called unlock");
+
+ GST_OBJECT_LOCK (dtmfsrc);
+ dtmfsrc->paused = TRUE;
+ if (dtmfsrc->clockid) {
+ gst_clock_id_unschedule (dtmfsrc->clockid);
+ }
+ GST_OBJECT_UNLOCK (dtmfsrc);
+
+ GST_DEBUG_OBJECT (dtmfsrc, "Pushing the PAUSE_TASK event on unlock request");
+ event = g_slice_new0 (GstRTPDTMFSrcEvent);
+ event->event_type = RTP_DTMF_EVENT_TYPE_PAUSE_TASK;
+ g_async_queue_push (dtmfsrc->event_queue, event);
+
+ return TRUE;
+}
+
+
+static gboolean
+gst_rtp_dtmf_src_unlock_stop (GstBaseSrc * src)
+{
+ GstRTPDTMFSrc *dtmfsrc = GST_RTP_DTMF_SRC (src);
+
+ GST_DEBUG_OBJECT (dtmfsrc, "Unlock stopped");
+
+ GST_OBJECT_LOCK (dtmfsrc);
+ dtmfsrc->paused = FALSE;
+ GST_OBJECT_UNLOCK (dtmfsrc);
+
+ return TRUE;
+}
diff --git a/gst/dtmf/gstrtpdtmfsrc.h b/gst/dtmf/gstrtpdtmfsrc.h
new file mode 100644
index 0000000000..b694b0112d
--- /dev/null
+++ b/gst/dtmf/gstrtpdtmfsrc.h
@@ -0,0 +1,114 @@
+/* GStreamer RTP DTMF source
+ *
+ * gstrtpdtmfsrc.h:
+ *
+ * Copyright (C) <2007> Nokia Corporation.
+ * Contact: Zeeshan Ali <zeeshan.ali@nokia.com>
+ * Copyright (C) <2005> Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_DTMF_SRC_H__
+#define __GST_RTP_DTMF_SRC_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbasesrc.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "gstdtmfcommon.h"
+
+G_BEGIN_DECLS
+#define GST_TYPE_RTP_DTMF_SRC (gst_rtp_dtmf_src_get_type())
+#define GST_RTP_DTMF_SRC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_DTMF_SRC,GstRTPDTMFSrc))
+#define GST_RTP_DTMF_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_DTMF_SRC,GstRTPDTMFSrcClass))
+#define GST_RTP_DTMF_SRC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_RTP_DTMF_SRC, GstRTPDTMFSrcClass))
+#define GST_IS_RTP_DTMF_SRC(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_DTMF_SRC))
+#define GST_IS_RTP_DTMF_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_DTMF_SRC))
+#define GST_RTP_DTMF_SRC_CAST(obj) ((GstRTPDTMFSrc *)(obj))
+typedef struct _GstRTPDTMFSrc GstRTPDTMFSrc;
+typedef struct _GstRTPDTMFSrcClass GstRTPDTMFSrcClass;
+
+
+
+enum _GstRTPDTMFEventType
+{
+ RTP_DTMF_EVENT_TYPE_START,
+ RTP_DTMF_EVENT_TYPE_STOP,
+ RTP_DTMF_EVENT_TYPE_PAUSE_TASK
+};
+
+typedef enum _GstRTPDTMFEventType GstRTPDTMFEventType;
+
+struct _GstRTPDTMFSrcEvent
+{
+ GstRTPDTMFEventType event_type;
+ GstRTPDTMFPayload *payload;
+};
+
+typedef struct _GstRTPDTMFSrcEvent GstRTPDTMFSrcEvent;
+
+/**
+ * GstRTPDTMFSrc:
+ * @element: the parent element.
+ *
+ * The opaque #GstRTPDTMFSrc data structure.
+ */
+struct _GstRTPDTMFSrc
+{
+ /*< private >*/
+ GstBaseSrc basesrc;
+
+ GAsyncQueue *event_queue;
+ GstClockID clockid;
+ gboolean paused;
+ GstRTPDTMFPayload *payload;
+
+ GstClockTime timestamp;
+ GstClockTime start_timestamp;
+ gboolean first_packet;
+ gboolean last_packet;
+ guint32 ts_base;
+ guint16 seqnum_base;
+ gint16 seqnum_offset;
+ guint16 seqnum;
+ gint32 ts_offset;
+ guint32 rtp_timestamp;
+ guint pt;
+ guint ssrc;
+ guint current_ssrc;
+ guint16 ptime;
+ guint16 packet_redundancy;
+ guint32 clock_rate;
+ gboolean last_event_was_start;
+
+ GstClockTime last_stop;
+
+ gboolean dirty;
+ guint16 redundancy_count;
+};
+
+struct _GstRTPDTMFSrcClass
+{
+ GstBaseSrcClass parent_class;
+};
+
+GType gst_rtp_dtmf_src_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (rtpdtmfsrc);
+
+G_END_DECLS
+#endif /* __GST_RTP_DTMF_SRC_H__ */
diff --git a/gst/dtmf/meson.build b/gst/dtmf/meson.build
new file mode 100644
index 0000000000..02efb81ba5
--- /dev/null
+++ b/gst/dtmf/meson.build
@@ -0,0 +1,18 @@
+dtmf_sources = [
+ 'gstdtmfsrc.c',
+ 'gstrtpdtmfsrc.c',
+ 'gstrtpdtmfdepay.c',
+ 'gstdtmf.c'
+]
+
+gstdtmf = library('gstdtmf',
+ dtmf_sources,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gstbase_dep, gstrtp_dep, libm],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstdtmf, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstdtmf]
+
diff --git a/gst/effectv/gstaging.c b/gst/effectv/gstaging.c
new file mode 100644
index 0000000000..2b36d68197
--- /dev/null
+++ b/gst/effectv/gstaging.c
@@ -0,0 +1,404 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David Schleef <ds@schleef.org>
+ * Copyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * EffecTV - Realtime Digital Video Effector
+ * Copyright (C) 2001-2002 FUKUCHI Kentarou
+ *
+ * AgingTV - film-aging effect.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-agingtv
+ * @title: agingtv
+ *
+ * AgingTV ages a video stream in realtime, changes the colors and adds
+ * scratches and dust.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v videotestsrc ! agingtv ! videoconvert ! autovideosink
+ * ]| This pipeline shows the effect of agingtv on a test stream.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <math.h>
+
+#include "gstaging.h"
+#include "gsteffectv.h"
+
+static const gint dx[8] = { 1, 1, 0, -1, -1, -1, 0, 1 };
+static const gint dy[8] = { 0, -1, -1, -1, 0, 1, 1, 1 };
+
+enum
+{
+ PROP_0 = 0,
+ PROP_SCRATCH_LINES,
+ PROP_COLOR_AGING,
+ PROP_PITS,
+ PROP_DUSTS
+};
+
+#define DEFAULT_SCRATCH_LINES 7
+#define DEFAULT_COLOR_AGING TRUE
+#define DEFAULT_PITS TRUE
+#define DEFAULT_DUSTS TRUE
+
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ BGRx, RGBx }")
+#else
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ xRGB, xBGR }")
+#endif
+
+static GstStaticPadTemplate gst_agingtv_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (CAPS_STR)
+ );
+
+static GstStaticPadTemplate gst_agingtv_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (CAPS_STR)
+ );
+
+G_DEFINE_TYPE (GstAgingTV, gst_agingtv, GST_TYPE_VIDEO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (agingtv, "agingtv", GST_RANK_NONE,
+ GST_TYPE_AGINGTV);
+
+static void
+coloraging (guint32 * src, guint32 * dest, gint video_area, gint * c)
+{
+ guint32 a, b;
+ gint i;
+ gint c_tmp = *c;
+
+ c_tmp -= (gint) (fastrand ()) >> 28;
+ if (c_tmp < 0)
+ c_tmp = 0;
+ if (c_tmp > 0x18)
+ c_tmp = 0x18;
+
+ for (i = 0; i < video_area; i++) {
+ a = *src++;
+ b = (a & 0xfcfcfc) >> 2;
+ *dest++ =
+ a - b + (c_tmp | (c_tmp << 8) | (c_tmp << 16)) +
+ ((fastrand () >> 8) & 0x101010);
+ }
+ *c = c_tmp;
+}
+
+
+static void
+scratching (scratch * scratches, gint scratch_lines, guint32 * dest, gint width,
+ gint height)
+{
+ gint i, y, y1, y2;
+ guint32 *p, a, b;
+ scratch *scratch;
+
+ for (i = 0; i < scratch_lines; i++) {
+ scratch = &scratches[i];
+
+ if (scratch->life) {
+ scratch->x = scratch->x + scratch->dx;
+
+ if (scratch->x < 0 || scratch->x > width * 256) {
+ scratch->life = 0;
+ break;
+ }
+ p = dest + (scratch->x >> 8);
+ if (scratch->init) {
+ y1 = scratch->init;
+ scratch->init = 0;
+ } else {
+ y1 = 0;
+ }
+ scratch->life--;
+ if (scratch->life) {
+ y2 = height;
+ } else {
+ y2 = fastrand () % height;
+ }
+ for (y = y1; y < y2; y++) {
+ a = *p & 0xfefeff;
+ a += 0x202020;
+ b = a & 0x1010100;
+ *p = a | (b - (b >> 8));
+ p += width;
+ }
+ } else {
+ if ((fastrand () & 0xf0000000) == 0) {
+ scratch->life = 2 + (fastrand () >> 27);
+ scratch->x = fastrand () % (width * 256);
+ scratch->dx = ((int) fastrand ()) >> 23;
+ scratch->init = (fastrand () % (height - 1)) + 1;
+ }
+ }
+ }
+}
+
+static void
+dusts (guint32 * dest, gint width, gint height, gint * dust_interval,
+ gint area_scale)
+{
+ gint i, j;
+ gint dnum;
+ gint d, len;
+ guint x, y;
+
+ if (*dust_interval == 0) {
+ if ((fastrand () & 0xf0000000) == 0) {
+ *dust_interval = fastrand () >> 29;
+ }
+ return;
+ }
+ dnum = area_scale * 4 + (fastrand () >> 27);
+
+ for (i = 0; i < dnum; i++) {
+ x = fastrand () % width;
+ y = fastrand () % height;
+ d = fastrand () >> 29;
+ len = fastrand () % area_scale + 5;
+ for (j = 0; j < len; j++) {
+ dest[y * width + x] = 0x101010;
+ y += dy[d];
+ x += dx[d];
+
+ if (y >= height || x >= width)
+ break;
+
+ d = (d + fastrand () % 3 - 1) & 7;
+ }
+ }
+ *dust_interval = *dust_interval - 1;
+}
+
+static void
+pits (guint32 * dest, gint width, gint height, gint area_scale,
+ gint * pits_interval)
+{
+ gint i, j;
+ gint pnum, size, pnumscale;
+ guint x, y;
+
+ pnumscale = area_scale * 2;
+ if (*pits_interval) {
+ pnum = pnumscale + (fastrand () % pnumscale);
+
+ *pits_interval = *pits_interval - 1;
+ } else {
+ pnum = fastrand () % pnumscale;
+
+ if ((fastrand () & 0xf8000000) == 0) {
+ *pits_interval = (fastrand () >> 28) + 20;
+ }
+ }
+ for (i = 0; i < pnum; i++) {
+ x = fastrand () % (width - 1);
+ y = fastrand () % (height - 1);
+
+ size = fastrand () >> 28;
+
+ for (j = 0; j < size; j++) {
+ x = x + fastrand () % 3 - 1;
+ y = y + fastrand () % 3 - 1;
+
+ if (y >= height || x >= width)
+ break;
+
+ dest[y * width + x] = 0xc0c0c0;
+ }
+ }
+}
+
+static void
+gst_agingtv_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAgingTV *agingtv = GST_AGINGTV (object);
+
+ GST_OBJECT_LOCK (agingtv);
+ switch (prop_id) {
+ case PROP_SCRATCH_LINES:
+ g_value_set_uint (value, agingtv->scratch_lines);
+ break;
+ case PROP_COLOR_AGING:
+ g_value_set_boolean (value, agingtv->color_aging);
+ break;
+ case PROP_PITS:
+ g_value_set_boolean (value, agingtv->pits);
+ break;
+ case PROP_DUSTS:
+ g_value_set_boolean (value, agingtv->dusts);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+ GST_OBJECT_UNLOCK (agingtv);
+}
+
+static void
+gst_agingtv_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAgingTV *agingtv = GST_AGINGTV (object);
+
+ switch (prop_id) {
+ case PROP_SCRATCH_LINES:
+ agingtv->scratch_lines = g_value_get_uint (value);
+ break;
+ case PROP_COLOR_AGING:
+ agingtv->color_aging = g_value_get_boolean (value);
+ break;
+ case PROP_PITS:
+ agingtv->pits = g_value_get_boolean (value);
+ break;
+ case PROP_DUSTS:
+ agingtv->dusts = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static gboolean
+gst_agingtv_start (GstBaseTransform * trans)
+{
+ GstAgingTV *agingtv = GST_AGINGTV (trans);
+
+ agingtv->coloraging_state = 0x18;
+ agingtv->dust_interval = 0;
+ agingtv->pits_interval = 0;
+
+ memset (agingtv->scratches, 0, sizeof (agingtv->scratches));
+
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_agingtv_transform_frame (GstVideoFilter * filter, GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame)
+{
+ GstAgingTV *agingtv = GST_AGINGTV (filter);
+ gint area_scale;
+ GstClockTime timestamp, stream_time;
+ gint width, height, stride, video_size;
+ guint32 *src, *dest;
+
+ timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
+ stream_time =
+ gst_segment_to_stream_time (&GST_BASE_TRANSFORM (filter)->segment,
+ GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (agingtv, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (agingtv), stream_time);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
+ video_size = stride * height / 4;
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ area_scale = width * height / 64 / 480;
+ if (area_scale <= 0)
+ area_scale = 1;
+
+ if (agingtv->color_aging)
+ coloraging (src, dest, video_size, &agingtv->coloraging_state);
+ else
+ memcpy (dest, src, stride * height);
+
+ scratching (agingtv->scratches, agingtv->scratch_lines, dest, width, height);
+ if (agingtv->pits)
+ pits (dest, width, height, area_scale, &agingtv->pits_interval);
+ if (area_scale > 1 && agingtv->dusts)
+ dusts (dest, width, height, &agingtv->dust_interval, area_scale);
+
+ return GST_FLOW_OK;
+}
+
+static void
+gst_agingtv_class_init (GstAgingTVClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
+
+ gobject_class->set_property = gst_agingtv_set_property;
+ gobject_class->get_property = gst_agingtv_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_SCRATCH_LINES,
+ g_param_spec_uint ("scratch-lines", "Scratch Lines",
+ "Number of scratch lines", 0, SCRATCH_MAX, DEFAULT_SCRATCH_LINES,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ g_object_class_install_property (gobject_class, PROP_COLOR_AGING,
+ g_param_spec_boolean ("color-aging", "Color Aging",
+ "Color Aging", DEFAULT_COLOR_AGING,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ g_object_class_install_property (gobject_class, PROP_PITS,
+ g_param_spec_boolean ("pits", "Pits",
+ "Pits", DEFAULT_PITS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ g_object_class_install_property (gobject_class, PROP_DUSTS,
+ g_param_spec_boolean ("dusts", "Dusts",
+ "Dusts", DEFAULT_DUSTS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ gst_element_class_set_static_metadata (gstelement_class, "AgingTV effect",
+ "Filter/Effect/Video",
+ "AgingTV adds age to video input using scratches and dust",
+ "Sam Lantinga <slouken@devolution.com>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_agingtv_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_agingtv_src_template);
+
+ trans_class->start = GST_DEBUG_FUNCPTR (gst_agingtv_start);
+
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_agingtv_transform_frame);
+}
+
+static void
+gst_agingtv_init (GstAgingTV * agingtv)
+{
+ agingtv->scratch_lines = DEFAULT_SCRATCH_LINES;
+ agingtv->color_aging = DEFAULT_COLOR_AGING;
+ agingtv->pits = DEFAULT_PITS;
+ agingtv->dusts = DEFAULT_DUSTS;
+}
diff --git a/gst/effectv/gstaging.h b/gst/effectv/gstaging.h
new file mode 100644
index 0000000000..52466d15e3
--- /dev/null
+++ b/gst/effectv/gstaging.h
@@ -0,0 +1,89 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David Schleef <ds@schleef.org>
+ * Copyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * EffecTV - Realtime Digital Video Effector
+ * Copyright (C) 2001-2002 FUKUCHI Kentarou
+ *
+ * AgingTV - film-aging effect.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_AGING_H__
+#define __GST_AGING_H__
+
+#include <gst/gst.h>
+
+#include <gst/video/video.h>
+#include <gst/video/gstvideofilter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_AGINGTV \
+ (gst_agingtv_get_type())
+#define GST_AGINGTV(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_AGINGTV,GstAgingTV))
+#define GST_AGINGTV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_AGINGTV,GstAgingTVClass))
+#define GST_IS_AGINGTV(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_AGINGTV))
+#define GST_IS_AGINGTV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_AGINGTV))
+
+typedef struct _scratch
+{
+ gint life;
+ gint x;
+ gint dx;
+ gint init;
+} scratch;
+#define SCRATCH_MAX 20
+
+typedef struct _GstAgingTV GstAgingTV;
+typedef struct _GstAgingTVClass GstAgingTVClass;
+
+struct _GstAgingTV
+{
+ GstVideoFilter videofilter;
+
+ /* < private > */
+ gboolean color_aging;
+ gboolean pits;
+ gboolean dusts;
+
+ gint coloraging_state;
+
+ scratch scratches[SCRATCH_MAX];
+ gint scratch_lines;
+
+ gint dust_interval;
+ gint pits_interval;
+
+};
+
+struct _GstAgingTVClass
+{
+ GstVideoFilterClass parent_class;
+};
+
+GType gst_agingtv_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_AGING_H__ */
+
diff --git a/gst/effectv/gstdice.c b/gst/effectv/gstdice.c
new file mode 100644
index 0000000000..e7929fbeae
--- /dev/null
+++ b/gst/effectv/gstdice.c
@@ -0,0 +1,316 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * dice.c: a 'dicing' effect
+ * copyright (c) 2001 Sam Mertens. This code is subject to the provisions of
+ * the GNU Library Public License.
+ *
+ * I suppose this looks similar to PuzzleTV, but it's not. The screen is
+ * divided into small squares, each of which is rotated either 0, 90, 180 or
+ * 270 degrees. The amount of rotation for each square is chosen at random.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-dicetv
+ * @title: dicetv
+ *
+ * DiceTV 'dices' the screen up into many small squares, each defaulting
+ * to a size of 16 pixels by 16 pixels.. Each square is rotated randomly
+ * in one of four directions: up (no change), down (180 degrees, or
+ * upside down), right (90 degrees clockwise), or left (90 degrees
+ * counterclockwise). The direction of each square normally remains
+ * consistent between each frame.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v videotestsrc ! dicetv ! videoconvert ! autovideosink
+ * ]| This pipeline shows the effect of dicetv on a test stream.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+
+#include "gstdice.h"
+#include "gsteffectv.h"
+
+#define DEFAULT_CUBE_BITS 4
+#define MAX_CUBE_BITS 5
+#define MIN_CUBE_BITS 0
+
+typedef enum _dice_dir
+{
+ DICE_UP = 0,
+ DICE_RIGHT = 1,
+ DICE_DOWN = 2,
+ DICE_LEFT = 3
+} DiceDir;
+
+#define gst_dicetv_parent_class parent_class
+G_DEFINE_TYPE (GstDiceTV, gst_dicetv, GST_TYPE_VIDEO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (dicetv, "dicetv", GST_RANK_NONE, GST_TYPE_DICETV);
+
+static void gst_dicetv_create_map (GstDiceTV * filter, GstVideoInfo * info);
+
+static GstStaticPadTemplate gst_dicetv_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGBx, xRGB, BGRx, xBGR }"))
+ );
+
+static GstStaticPadTemplate gst_dicetv_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGBx, xRGB, BGRx, xBGR }"))
+ );
+
+enum
+{
+ PROP_0,
+ PROP_CUBE_BITS
+};
+
+static gboolean
+gst_dicetv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
+{
+ GstDiceTV *filter = GST_DICETV (vfilter);
+
+ g_free (filter->dicemap);
+ filter->dicemap =
+ (guint8 *) g_malloc (GST_VIDEO_INFO_WIDTH (in_info) *
+ GST_VIDEO_INFO_WIDTH (in_info));
+ gst_dicetv_create_map (filter, in_info);
+
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_dicetv_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame)
+{
+ GstDiceTV *filter = GST_DICETV (vfilter);
+ guint32 *src, *dest;
+ gint i, map_x, map_y, map_i, base, dx, dy, di;
+ gint video_stride, g_cube_bits, g_cube_size;
+ gint g_map_height, g_map_width;
+ GstClockTime timestamp, stream_time;
+ const guint8 *dicemap;
+
+ timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
+ stream_time =
+ gst_segment_to_stream_time (&GST_BASE_TRANSFORM (vfilter)->segment,
+ GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
+
+ src = (guint32 *) GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = (guint32 *) GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+ video_stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
+
+ GST_OBJECT_LOCK (filter);
+ g_cube_bits = filter->g_cube_bits;
+ g_cube_size = filter->g_cube_size;
+ g_map_height = filter->g_map_height;
+ g_map_width = filter->g_map_width;
+
+ dicemap = filter->dicemap;
+ video_stride /= 4;
+
+ map_i = 0;
+ for (map_y = 0; map_y < g_map_height; map_y++) {
+ for (map_x = 0; map_x < g_map_width; map_x++) {
+ base = (map_y << g_cube_bits) * video_stride + (map_x << g_cube_bits);
+
+ switch (dicemap[map_i]) {
+ case DICE_UP:
+ for (dy = 0; dy < g_cube_size; dy++) {
+ i = base + dy * video_stride;
+ for (dx = 0; dx < g_cube_size; dx++) {
+ dest[i] = src[i];
+ i++;
+ }
+ }
+ break;
+ case DICE_LEFT:
+ for (dy = 0; dy < g_cube_size; dy++) {
+ i = base + dy * video_stride;
+
+ for (dx = 0; dx < g_cube_size; dx++) {
+ di = base + (dx * video_stride) + (g_cube_size - dy - 1);
+ dest[di] = src[i];
+ i++;
+ }
+ }
+ break;
+ case DICE_DOWN:
+ for (dy = 0; dy < g_cube_size; dy++) {
+ di = base + dy * video_stride;
+ i = base + (g_cube_size - dy - 1) * video_stride + g_cube_size;
+ for (dx = 0; dx < g_cube_size; dx++) {
+ i--;
+ dest[di] = src[i];
+ di++;
+ }
+ }
+ break;
+ case DICE_RIGHT:
+ for (dy = 0; dy < g_cube_size; dy++) {
+ i = base + (dy * video_stride);
+ for (dx = 0; dx < g_cube_size; dx++) {
+ di = base + dy + (g_cube_size - dx - 1) * video_stride;
+ dest[di] = src[i];
+ i++;
+ }
+ }
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ map_i++;
+ }
+ }
+ GST_OBJECT_UNLOCK (filter);
+
+ return GST_FLOW_OK;
+}
+
+static void
+gst_dicetv_create_map (GstDiceTV * filter, GstVideoInfo * info)
+{
+ gint x, y, i;
+ gint width, height;
+
+ width = GST_VIDEO_INFO_WIDTH (info);
+ height = GST_VIDEO_INFO_HEIGHT (info);
+
+ if (width <= 0 || height <= 0)
+ return;
+
+ filter->g_map_height = height >> filter->g_cube_bits;
+ filter->g_map_width = width >> filter->g_cube_bits;
+ filter->g_cube_size = 1 << filter->g_cube_bits;
+
+ i = 0;
+
+ for (y = 0; y < filter->g_map_height; y++) {
+ for (x = 0; x < filter->g_map_width; x++) {
+ // dicemap[i] = ((i + y) & 0x3); /* Up, Down, Left or Right */
+ filter->dicemap[i] = (fastrand () >> 24) & 0x03;
+ i++;
+ }
+ }
+}
+
+static void
+gst_dicetv_set_property (GObject * object, guint prop_id, const GValue * value,
+ GParamSpec * pspec)
+{
+ GstDiceTV *filter = GST_DICETV (object);
+
+ switch (prop_id) {
+ case PROP_CUBE_BITS:
+ GST_OBJECT_LOCK (filter);
+ filter->g_cube_bits = g_value_get_int (value);
+ gst_dicetv_create_map (filter, &GST_VIDEO_FILTER (filter)->in_info);
+ GST_OBJECT_UNLOCK (filter);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_dicetv_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstDiceTV *filter = GST_DICETV (object);
+
+ switch (prop_id) {
+ case PROP_CUBE_BITS:
+ g_value_set_int (value, filter->g_cube_bits);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_dicetv_finalize (GObject * object)
+{
+ GstDiceTV *filter = GST_DICETV (object);
+
+ g_free (filter->dicemap);
+ filter->dicemap = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_dicetv_class_init (GstDiceTVClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
+
+ gobject_class->set_property = gst_dicetv_set_property;
+ gobject_class->get_property = gst_dicetv_get_property;
+ gobject_class->finalize = gst_dicetv_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_CUBE_BITS,
+ g_param_spec_int ("square-bits", "Square Bits", "The size of the Squares",
+ MIN_CUBE_BITS, MAX_CUBE_BITS, DEFAULT_CUBE_BITS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ gst_element_class_set_static_metadata (gstelement_class, "DiceTV effect",
+ "Filter/Effect/Video",
+ "'Dices' the screen up into many small squares",
+ "Wim Taymans <wim.taymans@gmail.be>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_dicetv_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_dicetv_src_template);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_dicetv_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_dicetv_transform_frame);
+}
+
+static void
+gst_dicetv_init (GstDiceTV * filter)
+{
+ filter->dicemap = NULL;
+ filter->g_cube_bits = DEFAULT_CUBE_BITS;
+ filter->g_cube_size = 0;
+ filter->g_map_height = 0;
+ filter->g_map_width = 0;
+}
diff --git a/gst/effectv/gstdice.h b/gst/effectv/gstdice.h
new file mode 100644
index 0000000000..4c14c976e6
--- /dev/null
+++ b/gst/effectv/gstdice.h
@@ -0,0 +1,75 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * dice.c: a 'dicing' effect
+ * copyright (c) 2001 Sam Mertens. This code is subject to the provisions of
+ * the GNU Library Public License.
+ *
+ * I suppose this looks similar to PuzzleTV, but it's not. The screen is
+ * divided into small squares, each of which is rotated either 0, 90, 180 or
+ * 270 degrees. The amount of rotation for each square is chosen at random.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_DICE_H__
+#define __GST_DICE_H__
+
+#include <gst/gst.h>
+
+#include <gst/video/video.h>
+#include <gst/video/gstvideofilter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_DICETV \
+ (gst_dicetv_get_type())
+#define GST_DICETV(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_DICETV,GstDiceTV))
+#define GST_DICETV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_DICETV,GstDiceTVClass))
+#define GST_IS_DICETV(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_DICETV))
+#define GST_IS_DICETV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_DICETV))
+
+typedef struct _GstDiceTV GstDiceTV;
+typedef struct _GstDiceTVClass GstDiceTVClass;
+
+struct _GstDiceTV
+{
+ GstVideoFilter videofilter;
+
+ /* < private > */
+ guint8 *dicemap;
+
+ gint g_cube_bits;
+ gint g_cube_size;
+ gint g_map_height;
+ gint g_map_width;
+};
+
+struct _GstDiceTVClass
+{
+ GstVideoFilterClass parent_class;
+};
+
+GType gst_dicetv_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_DICE_H__ */
diff --git a/gst/effectv/gstedge.c b/gst/effectv/gstedge.c
new file mode 100644
index 0000000000..3e6a9de8b6
--- /dev/null
+++ b/gst/effectv/gstedge.c
@@ -0,0 +1,256 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * EffecTV:
+ * Copyright (C) 2001-2002 FUKUCHI Kentarou
+ *
+ * EdgeTV - detects edge and display it in good old computer way
+ *
+ * EffecTV is free software. This library is free software;
+ * you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-edgetv
+ * @title: edgetv
+ *
+ * EdgeTV detects edges and display it in good old low resolution
+ * computer way.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v videotestsrc ! edgetv ! videoconvert ! autovideosink
+ * ]| This pipeline shows the effect of edgetv on a test stream.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+
+#include "gstedge.h"
+#include "gsteffectv.h"
+
+#define gst_edgetv_parent_class parent_class
+G_DEFINE_TYPE (GstEdgeTV, gst_edgetv, GST_TYPE_VIDEO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (edgetv, "edgetv", GST_RANK_NONE, GST_TYPE_EDGETV);
+
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ BGRx, RGBx }")
+#else
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ xBGR, xRGB }")
+#endif
+
+static GstStaticPadTemplate gst_edgetv_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (CAPS_STR)
+ );
+
+static GstStaticPadTemplate gst_edgetv_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (CAPS_STR)
+ );
+
+static gboolean
+gst_edgetv_set_info (GstVideoFilter * filter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
+{
+ GstEdgeTV *edgetv = GST_EDGETV (filter);
+ guint map_size;
+ gint width, height;
+
+ width = GST_VIDEO_INFO_WIDTH (in_info);
+ height = GST_VIDEO_INFO_HEIGHT (in_info);
+
+ edgetv->map_width = width / 4;
+ edgetv->map_height = height / 4;
+ edgetv->video_width_margin = width % 4;
+
+ map_size = edgetv->map_width * edgetv->map_height * sizeof (guint32) * 2;
+
+ g_free (edgetv->map);
+ edgetv->map = (guint32 *) g_malloc0 (map_size);
+
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_edgetv_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame)
+{
+ GstEdgeTV *filter = GST_EDGETV (vfilter);
+ gint x, y, r, g, b;
+ guint32 *src, *dest;
+ guint32 p, q;
+ guint32 v0, v1, v2, v3;
+ gint width, map_height, map_width;
+ gint video_width_margin;
+ guint32 *map;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ map = filter->map;
+ map_height = filter->map_height;
+ map_width = filter->map_width;
+ video_width_margin = filter->video_width_margin;
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+
+ src += width * 4 + 4;
+ dest += width * 4 + 4;
+
+ for (y = 1; y < map_height - 1; y++) {
+ for (x = 1; x < map_width - 1; x++) {
+ p = *src;
+ q = *(src - 4);
+
+ /* difference between the current pixel and left neighbor. */
+ r = ((p & 0xff0000) - (q & 0xff0000)) >> 16;
+ g = ((p & 0xff00) - (q & 0xff00)) >> 8;
+ b = (p & 0xff) - (q & 0xff);
+ r *= r;
+ g *= g;
+ b *= b;
+ r = r >> 5; /* To lack the lower bit for saturated addition, */
+ g = g >> 5; /* divide the value with 32, instead of 16. It is */
+ b = b >> 4; /* same as `v2 &= 0xfefeff' */
+ if (r > 127)
+ r = 127;
+ if (g > 127)
+ g = 127;
+ if (b > 255)
+ b = 255;
+ v2 = (r << 17) | (g << 9) | b;
+
+ /* difference between the current pixel and upper neighbor. */
+ q = *(src - width * 4);
+ r = ((p & 0xff0000) - (q & 0xff0000)) >> 16;
+ g = ((p & 0xff00) - (q & 0xff00)) >> 8;
+ b = (p & 0xff) - (q & 0xff);
+ r *= r;
+ g *= g;
+ b *= b;
+ r = r >> 5;
+ g = g >> 5;
+ b = b >> 4;
+ if (r > 127)
+ r = 127;
+ if (g > 127)
+ g = 127;
+ if (b > 255)
+ b = 255;
+ v3 = (r << 17) | (g << 9) | b;
+
+ v0 = map[(y - 1) * map_width * 2 + x * 2];
+ v1 = map[y * map_width * 2 + (x - 1) * 2 + 1];
+ map[y * map_width * 2 + x * 2] = v2;
+ map[y * map_width * 2 + x * 2 + 1] = v3;
+ r = v0 + v1;
+ g = r & 0x01010100;
+ dest[0] = r | (g - (g >> 8));
+ r = v0 + v3;
+ g = r & 0x01010100;
+ dest[1] = r | (g - (g >> 8));
+ dest[2] = v3;
+ dest[3] = v3;
+ r = v2 + v1;
+ g = r & 0x01010100;
+ dest[width] = r | (g - (g >> 8));
+ r = v2 + v3;
+ g = r & 0x01010100;
+ dest[width + 1] = r | (g - (g >> 8));
+ dest[width + 2] = v3;
+ dest[width + 3] = v3;
+ dest[width * 2] = v2;
+ dest[width * 2 + 1] = v2;
+ dest[width * 2 + 2] = 0;
+ dest[width * 2 + 3] = 0;
+ dest[width * 3] = v2;
+ dest[width * 3 + 1] = v2;
+ dest[width * 3 + 2] = 0;
+ dest[width * 3 + 3] = 0;
+
+ src += 4;
+ dest += 4;
+ }
+ src += width * 3 + 8 + video_width_margin;
+ dest += width * 3 + 8 + video_width_margin;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_edgetv_start (GstBaseTransform * trans)
+{
+ GstEdgeTV *edgetv = GST_EDGETV (trans);
+
+ if (edgetv->map)
+ memset (edgetv->map, 0,
+ edgetv->map_width * edgetv->map_height * sizeof (guint32) * 2);
+ return TRUE;
+}
+
+static void
+gst_edgetv_finalize (GObject * object)
+{
+ GstEdgeTV *edgetv = GST_EDGETV (object);
+
+ g_free (edgetv->map);
+ edgetv->map = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_edgetv_class_init (GstEdgeTVClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
+
+ gobject_class->finalize = gst_edgetv_finalize;
+
+ gst_element_class_set_static_metadata (gstelement_class, "EdgeTV effect",
+ "Filter/Effect/Video",
+ "Apply edge detect on video", "Wim Taymans <wim.taymans@chello.be>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_edgetv_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_edgetv_src_template);
+
+ trans_class->start = GST_DEBUG_FUNCPTR (gst_edgetv_start);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_edgetv_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_edgetv_transform_frame);
+}
+
+static void
+gst_edgetv_init (GstEdgeTV * edgetv)
+{
+}
diff --git a/gst/effectv/gstedge.h b/gst/effectv/gstedge.h
new file mode 100644
index 0000000000..290c312838
--- /dev/null
+++ b/gst/effectv/gstedge.h
@@ -0,0 +1,70 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * EffecTV:
+ * Copyright (C) 2001-2002 FUKUCHI Kentarou
+ *
+ * EdgeTV - detects edge and display it in good old computer way
+ *
+ * EffecTV is free software. This library is free software;
+ * you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_EDGE_H__
+#define __GST_EDGE_H__
+
+#include <gst/gst.h>
+
+#include <gst/video/video.h>
+#include <gst/video/gstvideofilter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_EDGETV \
+ (gst_edgetv_get_type())
+#define GST_EDGETV(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_EDGETV,GstEdgeTV))
+#define GST_EDGETV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_EDGETV,GstEdgeTVClass))
+#define GST_IS_EDGETV(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_EDGETV))
+#define GST_IS_EDGETV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_EDGETV))
+
+typedef struct _GstEdgeTV GstEdgeTV;
+typedef struct _GstEdgeTVClass GstEdgeTVClass;
+
+struct _GstEdgeTV
+{
+ GstVideoFilter videofilter;
+
+ /* < private > */
+ gint map_width, map_height;
+ guint32 *map;
+ gint video_width_margin;
+};
+
+struct _GstEdgeTVClass
+{
+ GstVideoFilterClass parent_class;
+};
+
+GType gst_edgetv_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_EDGE_H__ */
diff --git a/gst/effectv/gsteffectv.c b/gst/effectv/gsteffectv.c
new file mode 100644
index 0000000000..57f1a755ce
--- /dev/null
+++ b/gst/effectv/gsteffectv.c
@@ -0,0 +1,67 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * EffecTV:
+ * Copyright (C) 2001 FUKUCHI Kentarou
+ *
+ * EffecTV is free software. This library is free software;
+ * you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gsteffectv.h"
+#include "gstaging.h"
+#include "gstdice.h"
+#include "gstedge.h"
+#include "gstquark.h"
+#include "gstrev.h"
+#include "gstshagadelic.h"
+#include "gstvertigo.h"
+#include "gstwarp.h"
+#include "gstop.h"
+#include "gstradioac.h"
+#include "gststreak.h"
+#include "gstripple.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (edgetv, plugin);
+ ret |= GST_ELEMENT_REGISTER (agingtv, plugin);
+ ret |= GST_ELEMENT_REGISTER (dicetv, plugin);
+ ret |= GST_ELEMENT_REGISTER (warptv, plugin);
+ ret |= GST_ELEMENT_REGISTER (shagadelictv, plugin);
+ ret |= GST_ELEMENT_REGISTER (vertigotv, plugin);
+ ret |= GST_ELEMENT_REGISTER (revtv, plugin);
+ ret |= GST_ELEMENT_REGISTER (quarktv, plugin);
+ ret |= GST_ELEMENT_REGISTER (optv, plugin);
+ ret |= GST_ELEMENT_REGISTER (radioactv, plugin);
+ ret |= GST_ELEMENT_REGISTER (streaktv, plugin);
+ ret |= GST_ELEMENT_REGISTER (rippletv, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ effectv,
+ "effect plugins from the effectv project",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
diff --git a/gst/effectv/gsteffectv.h b/gst/effectv/gsteffectv.h
new file mode 100644
index 0000000000..311e4a156b
--- /dev/null
+++ b/gst/effectv/gsteffectv.h
@@ -0,0 +1,46 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * EffecTV:
+ * Copyright (C) 2001 FUKUCHI Kentarou
+ *
+ * EffecTV is free software. This library is free software;
+ * you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include <gst/gst.h>
+
+GST_ELEMENT_REGISTER_DECLARE (edgetv);
+GST_ELEMENT_REGISTER_DECLARE (agingtv);
+GST_ELEMENT_REGISTER_DECLARE (dicetv);
+GST_ELEMENT_REGISTER_DECLARE (warptv);
+GST_ELEMENT_REGISTER_DECLARE (shagadelictv);
+GST_ELEMENT_REGISTER_DECLARE (vertigotv);
+GST_ELEMENT_REGISTER_DECLARE (revtv);
+GST_ELEMENT_REGISTER_DECLARE (quarktv);
+GST_ELEMENT_REGISTER_DECLARE (optv);
+GST_ELEMENT_REGISTER_DECLARE (radioactv);
+GST_ELEMENT_REGISTER_DECLARE (streaktv);
+GST_ELEMENT_REGISTER_DECLARE (rippletv);
+
+static inline guint
+fastrand (void)
+{
+ static guint fastrand_val;
+
+ return (fastrand_val = fastrand_val * 1103515245 + 12345);
+}
+
diff --git a/gst/effectv/gstop.c b/gst/effectv/gstop.c
new file mode 100644
index 0000000000..b40c893c7a
--- /dev/null
+++ b/gst/effectv/gstop.c
@@ -0,0 +1,421 @@
+/* GStreamer
+ * Copyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * EffecTV - Realtime Digital Video Effector
+ * Copyright (C) 2001-2006 FUKUCHI Kentaro
+ *
+ * OpTV - Optical art meets real-time video effect.
+ * Copyright (C) 2004-2005 FUKUCHI Kentaro
+ *
+ * EffecTV is free software. This library is free software;
+ * you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-optv
+ * @title: optv
+ *
+ * Traditional black-white optical animation is now resurrected as a
+ * real-time video effect. Input images are binarized and combined with
+ * various optical pattern.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v videotestsrc ! optv ! videoconvert ! autovideosink
+ * ]| This pipeline shows the effect of optv on a test stream.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <math.h>
+#include <string.h>
+
+#include "gstop.h"
+#include "gsteffectv.h"
+
+#include <gst/video/video.h>
+
+enum
+{
+ OP_SPIRAL1 = 0,
+ OP_SPIRAL2,
+ OP_PARABOLA,
+ OP_HSTRIPE
+};
+
+#define GST_TYPE_OPTV_MODE (gst_optv_mode_get_type())
+static GType
+gst_optv_mode_get_type (void)
+{
+ static GType type = 0;
+
+ static const GEnumValue enumvalue[] = {
+ {OP_SPIRAL1, "Maelstrom", "maelstrom"},
+ {OP_SPIRAL2, "Radiation", "radiation"},
+ {OP_PARABOLA, "Horizontal Stripes",
+ "horizontal-stripes"},
+ {OP_HSTRIPE, "Vertical Stripes", "vertical-stripes"},
+ {0, NULL, NULL},
+ };
+
+ if (!type) {
+ type = g_enum_register_static ("GstOpTVMode", enumvalue);
+ }
+ return type;
+}
+
+#define DEFAULT_MODE OP_SPIRAL1
+#define DEFAULT_SPEED 16
+#define DEFAULT_THRESHOLD 60
+
+enum
+{
+ PROP_0,
+ PROP_MODE,
+ PROP_SPEED,
+ PROP_THRESHOLD
+};
+
+static guint32 palette[256];
+
+#define gst_optv_parent_class parent_class
+G_DEFINE_TYPE (GstOpTV, gst_optv, GST_TYPE_VIDEO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (optv, "optv", GST_RANK_NONE, gst_optv_get_type ());
+
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ BGRx, RGBx }")
+#else
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ xBGR, xRGB }")
+#endif
+
+static GstStaticPadTemplate gst_optv_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (CAPS_STR)
+ );
+
+static GstStaticPadTemplate gst_optv_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (CAPS_STR)
+ );
+
+static void
+initPalette (void)
+{
+ gint i;
+ guint8 v;
+
+ for (i = 0; i < 112; i++) {
+ palette[i] = 0;
+ palette[i + 128] = 0xffffff;
+ }
+ for (i = 0; i < 16; i++) {
+ v = 16 * (i + 1) - 1;
+ palette[i + 112] = (v << 16) | (v << 8) | v;
+ v = 255 - v;
+ palette[i + 240] = (v << 16) | (v << 8) | v;
+ }
+}
+
+static void
+setOpmap (gint8 * opmap[4], gint width, gint height)
+{
+ gint i, j, x, y;
+#ifndef PS2
+ gdouble xx, yy, r, at, rr;
+#else
+ gfloat xx, yy, r, at, rr;
+#endif
+ gint sci;
+
+ sci = 640 / width;
+ i = 0;
+ for (y = 0; y < height; y++) {
+ yy = (gdouble) (y - height / 2) / width;
+ for (x = 0; x < width; x++) {
+ xx = (gdouble) x / width - 0.5;
+#ifndef PS2
+ r = sqrt (xx * xx + yy * yy);
+ at = atan2 (xx, yy);
+#else
+ r = sqrtf (xx * xx + yy * yy);
+ at = atan2f (xx, yy);
+#endif
+
+ opmap[OP_SPIRAL1][i] = ((guint)
+ ((at / G_PI * 256) + (r * 4000))) & 255;
+
+ j = r * 300 / 32;
+ rr = r * 300 - j * 32;
+ j *= 64;
+ j += (rr > 28) ? (rr - 28) * 16 : 0;
+ opmap[OP_SPIRAL2][i] = ((guint)
+ ((at / G_PI * 4096) + (r * 1600) - j)) & 255;
+
+ opmap[OP_PARABOLA][i] =
+ ((guint) (yy / (xx * xx * 0.3 + 0.1) * 400)) & 255;
+ opmap[OP_HSTRIPE][i] = x * 8 * sci;
+ i++;
+ }
+ }
+}
+
+/* Taken from effectv/image.c */
+/* Y value filters */
+static void
+image_y_over (guint32 * src, guint8 * diff, gint y_threshold, gint video_area)
+{
+ gint i;
+ gint R, G, B, v;
+ guint8 *p = diff;
+
+ for (i = video_area; i > 0; i--) {
+ R = ((*src) & 0xff0000) >> (16 - 1);
+ G = ((*src) & 0xff00) >> (8 - 2);
+ B = (*src) & 0xff;
+ v = y_threshold * 7 - (R + G + B);
+ *p = (guint8) (v >> 24);
+ src++;
+ p++;
+ }
+}
+
+static GstFlowReturn
+gst_optv_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame)
+{
+ GstOpTV *filter = GST_OPTV (vfilter);
+ guint32 *src, *dest;
+ gint8 *p;
+ guint8 *diff;
+ gint x, y, width, height;
+ GstClockTime timestamp, stream_time;
+ guint8 phase;
+
+ timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
+ stream_time =
+ gst_segment_to_stream_time (&GST_BASE_TRANSFORM (vfilter)->segment,
+ GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
+
+ if (G_UNLIKELY (filter->opmap[0] == NULL))
+ return GST_FLOW_NOT_NEGOTIATED;
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ GST_OBJECT_LOCK (filter);
+ switch (filter->mode) {
+ default:
+ case 0:
+ p = filter->opmap[OP_SPIRAL1];
+ break;
+ case 1:
+ p = filter->opmap[OP_SPIRAL2];
+ break;
+ case 2:
+ p = filter->opmap[OP_PARABOLA];
+ break;
+ case 3:
+ p = filter->opmap[OP_HSTRIPE];
+ break;
+ }
+
+ filter->phase -= filter->speed;
+
+ diff = filter->diff;
+ image_y_over (src, diff, filter->threshold, width * height);
+ phase = filter->phase;
+
+ for (y = 0; y < height; y++) {
+ for (x = 0; x < width; x++) {
+ *dest++ = palette[(((guint8) (*p + phase)) ^ *diff++) & 255];
+ p++;
+ }
+ }
+ GST_OBJECT_UNLOCK (filter);
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_optv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
+{
+ GstOpTV *filter = GST_OPTV (vfilter);
+ gint i, width, height;
+
+ width = GST_VIDEO_INFO_WIDTH (in_info);
+ height = GST_VIDEO_INFO_HEIGHT (in_info);
+
+ for (i = 0; i < 4; i++) {
+ g_free (filter->opmap[i]);
+ filter->opmap[i] = g_new (gint8, width * height);
+ }
+ setOpmap (filter->opmap, width, height);
+
+ g_free (filter->diff);
+ filter->diff = g_new (guint8, width * height);
+
+ return TRUE;
+}
+
+static gboolean
+gst_optv_start (GstBaseTransform * trans)
+{
+ GstOpTV *filter = GST_OPTV (trans);
+
+ filter->phase = 0;
+
+ return TRUE;
+}
+
+static void
+gst_optv_finalize (GObject * object)
+{
+ GstOpTV *filter = GST_OPTV (object);
+
+ if (filter->opmap[0]) {
+ gint i;
+
+ for (i = 0; i < 4; i++) {
+ g_free (filter->opmap[i]);
+ filter->opmap[i] = NULL;
+ }
+ }
+
+ g_free (filter->diff);
+ filter->diff = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_optv_set_property (GObject * object, guint prop_id, const GValue * value,
+ GParamSpec * pspec)
+{
+ GstOpTV *filter = GST_OPTV (object);
+
+ GST_OBJECT_LOCK (filter);
+ switch (prop_id) {
+ case PROP_MODE:
+ filter->mode = g_value_get_enum (value);
+ break;
+ case PROP_SPEED:
+ filter->speed = g_value_get_int (value);
+ break;
+ case PROP_THRESHOLD:
+ filter->threshold = g_value_get_uint (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (filter);
+}
+
+static void
+gst_optv_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstOpTV *filter = GST_OPTV (object);
+
+ switch (prop_id) {
+ case PROP_MODE:
+ g_value_set_enum (value, filter->mode);
+ break;
+ case PROP_SPEED:
+ g_value_set_int (value, filter->speed);
+ break;
+ case PROP_THRESHOLD:
+ g_value_set_uint (value, filter->threshold);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_optv_class_init (GstOpTVClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
+
+ gobject_class->set_property = gst_optv_set_property;
+ gobject_class->get_property = gst_optv_get_property;
+
+ gobject_class->finalize = gst_optv_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_MODE,
+ g_param_spec_enum ("mode", "Mode",
+ "Mode", GST_TYPE_OPTV_MODE, DEFAULT_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_SPEED,
+ g_param_spec_int ("speed", "Speed",
+ "Effect speed", G_MININT, G_MAXINT, DEFAULT_SPEED,
+ GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_THRESHOLD,
+ g_param_spec_uint ("threshold", "Threshold",
+ "Luma threshold", 0, G_MAXINT, DEFAULT_THRESHOLD,
+ GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (gstelement_class, "OpTV effect",
+ "Filter/Effect/Video",
+ "Optical art meets real-time video effect",
+ "FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_optv_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_optv_src_template);
+
+ trans_class->start = GST_DEBUG_FUNCPTR (gst_optv_start);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_optv_set_info);
+ vfilter_class->transform_frame = GST_DEBUG_FUNCPTR (gst_optv_transform_frame);
+
+ initPalette ();
+
+ gst_type_mark_as_plugin_api (GST_TYPE_OPTV_MODE, 0);
+}
+
+static void
+gst_optv_init (GstOpTV * filter)
+{
+ filter->speed = DEFAULT_SPEED;
+ filter->mode = DEFAULT_MODE;
+ filter->threshold = DEFAULT_THRESHOLD;
+}
diff --git a/gst/effectv/gstop.h b/gst/effectv/gstop.h
new file mode 100644
index 0000000000..5572402321
--- /dev/null
+++ b/gst/effectv/gstop.h
@@ -0,0 +1,74 @@
+/* GStreamer
+ * Copyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * EffecTV - Realtime Digital Video Effector
+ * Copyright (C) 2001-2006 FUKUCHI Kentaro
+ *
+ * OpTV - Optical art meets real-time video effect.
+ * Copyright (C) 2004-2005 FUKUCHI Kentaro
+ *
+ * EffecTV is free software. This library is free software;
+ * you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_OP_H__
+#define __GST_OP_H__
+
+#include <gst/gst.h>
+
+#include <gst/video/video.h>
+#include <gst/video/gstvideofilter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_OPTV \
+ (gst_optv_get_type())
+#define GST_OPTV(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_OPTV,GstOpTV))
+#define GST_OPTV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_OPTV,GstOpTVClass))
+#define GST_IS_OPTV(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_OPTV))
+#define GST_IS_OPTV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_OPTV))
+
+typedef struct _GstOpTV GstOpTV;
+typedef struct _GstOpTVClass GstOpTVClass;
+
+struct _GstOpTV
+{
+ GstVideoFilter element;
+
+ /* < private > */
+ gint mode;
+ gint speed;
+ guint threshold;
+
+ gint8 *opmap[4];
+ guint8 *diff;
+ guint8 phase;
+};
+
+struct _GstOpTVClass
+{
+ GstVideoFilterClass parent_class;
+};
+
+GType gst_optv_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_OP_H__ */
diff --git a/gst/effectv/gstquark.c b/gst/effectv/gstquark.c
new file mode 100644
index 0000000000..5d0536867e
--- /dev/null
+++ b/gst/effectv/gstquark.c
@@ -0,0 +1,301 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * EffecTV:
+ * Copyright (C) 2001-2002 FUKUCHI Kentarou
+ *
+ * QuarkTV - motion disolver.
+ *
+ * EffecTV is free software. This library is free software;
+ * you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-quarktv
+ * @title: quarktv
+ *
+ * QuarkTV disolves moving objects. It picks up pixels from
+ * the last frames randomly.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v videotestsrc ! quarktv ! videoconvert ! autovideosink
+ * ]| This pipeline shows the effect of quarktv on a test stream.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <math.h>
+#include <string.h>
+
+#include "gstquark.h"
+#include "gsteffectv.h"
+
+/* number of frames of time-buffer. It should be as a configurable parameter */
+/* This number also must be 2^n just for the speed. */
+#define PLANES 16
+
+enum
+{
+ PROP_0,
+ PROP_PLANES
+};
+
+#define gst_quarktv_parent_class parent_class
+G_DEFINE_TYPE (GstQuarkTV, gst_quarktv, GST_TYPE_VIDEO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (quarktv, "quarktv", GST_RANK_NONE,
+ GST_TYPE_QUARKTV);
+
+static void gst_quarktv_planetable_clear (GstQuarkTV * filter);
+
+static GstStaticPadTemplate gst_quarktv_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ xRGB, xBGR, BGRx, RGBx }"))
+ );
+
+static GstStaticPadTemplate gst_quarktv_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ xRGB, xBGR, BGRx, RGBx }"))
+ );
+
+static gboolean
+gst_quarktv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
+{
+ GstQuarkTV *filter = GST_QUARKTV (vfilter);
+ gint width, height;
+
+ width = GST_VIDEO_INFO_WIDTH (in_info);
+ height = GST_VIDEO_INFO_HEIGHT (in_info);
+
+ gst_quarktv_planetable_clear (filter);
+ filter->area = width * height;
+
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_quarktv_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame)
+{
+ GstQuarkTV *filter = GST_QUARKTV (vfilter);
+ gint area;
+ guint32 *src, *dest;
+ GstClockTime timestamp;
+ GstBuffer **planetable;
+ gint planes, current_plane;
+
+ timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
+ timestamp =
+ gst_segment_to_stream_time (&GST_BASE_TRANSFORM (vfilter)->segment,
+ GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (timestamp))
+ gst_object_sync_values (GST_OBJECT (filter), timestamp);
+
+ if (G_UNLIKELY (filter->planetable == NULL))
+ return GST_FLOW_FLUSHING;
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ GST_OBJECT_LOCK (filter);
+ area = filter->area;
+ planetable = filter->planetable;
+ planes = filter->planes;
+ current_plane = filter->current_plane;
+
+ if (planetable[current_plane])
+ gst_buffer_unref (planetable[current_plane]);
+ planetable[current_plane] = gst_buffer_ref (in_frame->buffer);
+
+ /* For each pixel */
+ while (--area) {
+ GstBuffer *rand;
+
+ /* pick a random buffer */
+ rand = planetable[(current_plane + (fastrand () >> 24)) % planes];
+
+ /* Copy the pixel from the random buffer to dest, FIXME, slow */
+ if (rand)
+ gst_buffer_extract (rand, area * 4, &dest[area], 4);
+ else
+ dest[area] = src[area];
+ }
+
+ filter->current_plane--;
+ if (filter->current_plane < 0)
+ filter->current_plane = planes - 1;
+ GST_OBJECT_UNLOCK (filter);
+
+ return GST_FLOW_OK;
+}
+
+static void
+gst_quarktv_planetable_clear (GstQuarkTV * filter)
+{
+ gint i;
+
+ if (filter->planetable == NULL)
+ return;
+
+ for (i = 0; i < filter->planes; i++) {
+ if (GST_IS_BUFFER (filter->planetable[i])) {
+ gst_buffer_unref (filter->planetable[i]);
+ }
+ filter->planetable[i] = NULL;
+ }
+}
+
+static gboolean
+gst_quarktv_start (GstBaseTransform * trans)
+{
+ GstQuarkTV *filter = GST_QUARKTV (trans);
+
+ if (filter->planetable) {
+ gst_quarktv_planetable_clear (filter);
+ g_free (filter->planetable);
+ }
+ filter->planetable =
+ (GstBuffer **) g_malloc0 (filter->planes * sizeof (GstBuffer *));
+
+ return TRUE;
+}
+
+static void
+gst_quarktv_finalize (GObject * object)
+{
+ GstQuarkTV *filter = GST_QUARKTV (object);
+
+ if (filter->planetable) {
+ gst_quarktv_planetable_clear (filter);
+ g_free (filter->planetable);
+ filter->planetable = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_quarktv_set_property (GObject * object, guint prop_id, const GValue * value,
+ GParamSpec * pspec)
+{
+ GstQuarkTV *filter = GST_QUARKTV (object);
+
+ GST_OBJECT_LOCK (filter);
+ switch (prop_id) {
+ case PROP_PLANES:
+ {
+ gint new_n_planes = g_value_get_int (value);
+ GstBuffer **new_planetable;
+ gint i;
+
+ /* If the number of planes changed, copy across any existing planes */
+ if (new_n_planes != filter->planes) {
+ new_planetable =
+ (GstBuffer **) g_malloc0 (new_n_planes * sizeof (GstBuffer *));
+
+ if (filter->planetable) {
+ for (i = 0; (i < new_n_planes) && (i < filter->planes); i++) {
+ new_planetable[i] = filter->planetable[i];
+ }
+ for (; i < filter->planes; i++) {
+ if (filter->planetable[i])
+ gst_buffer_unref (filter->planetable[i]);
+ }
+ g_free (filter->planetable);
+ }
+
+ filter->planetable = new_planetable;
+ filter->planes = new_n_planes;
+ filter->current_plane = filter->planes - 1;
+ }
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (filter);
+}
+
+static void
+gst_quarktv_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstQuarkTV *filter = GST_QUARKTV (object);
+
+ switch (prop_id) {
+ case PROP_PLANES:
+ g_value_set_int (value, filter->planes);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_quarktv_class_init (GstQuarkTVClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
+
+ gobject_class->set_property = gst_quarktv_set_property;
+ gobject_class->get_property = gst_quarktv_get_property;
+
+ gobject_class->finalize = gst_quarktv_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_PLANES,
+ g_param_spec_int ("planes", "Planes",
+ "Number of planes", 1, 64, PLANES,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ gst_element_class_set_static_metadata (gstelement_class, "QuarkTV effect",
+ "Filter/Effect/Video",
+ "Motion dissolver", "FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_quarktv_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_quarktv_src_template);
+
+ trans_class->start = GST_DEBUG_FUNCPTR (gst_quarktv_start);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_quarktv_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_quarktv_transform_frame);
+}
+
+static void
+gst_quarktv_init (GstQuarkTV * filter)
+{
+ filter->planes = PLANES;
+ filter->current_plane = filter->planes - 1;
+}
diff --git a/gst/effectv/gstquark.h b/gst/effectv/gstquark.h
new file mode 100644
index 0000000000..5557756830
--- /dev/null
+++ b/gst/effectv/gstquark.h
@@ -0,0 +1,71 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * EffecTV:
+ * Copyright (C) 2001-2002 FUKUCHI Kentarou
+ *
+ * QuarkTV - motion disolver.
+ *
+ * EffecTV is free software. This library is free software;
+ * you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_QUARK_H__
+#define __GST_QUARK_H__
+
+#include <gst/gst.h>
+
+#include <gst/video/video.h>
+#include <gst/video/gstvideofilter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_QUARKTV \
+ (gst_quarktv_get_type())
+#define GST_QUARKTV(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_QUARKTV,GstQuarkTV))
+#define GST_QUARKTV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_QUARKTV,GstQuarkTVClass))
+#define GST_IS_QUARKTV(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_QUARKTV))
+#define GST_IS_QUARKTV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_QUARKTV))
+
+typedef struct _GstQuarkTV GstQuarkTV;
+typedef struct _GstQuarkTVClass GstQuarkTVClass;
+
+struct _GstQuarkTV
+{
+ GstVideoFilter element;
+
+ /* < private > */
+ gint area;
+ gint planes;
+ gint current_plane;
+ GstBuffer **planetable;
+};
+
+struct _GstQuarkTVClass
+{
+ GstVideoFilterClass parent_class;
+};
+
+GType gst_quarktv_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_QUARK_H__ */
diff --git a/gst/effectv/gstradioac.c b/gst/effectv/gstradioac.c
new file mode 100644
index 0000000000..34496aa12e
--- /dev/null
+++ b/gst/effectv/gstradioac.c
@@ -0,0 +1,627 @@
+/* GStreamer
+ * Cradioacyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * EffecTV - Realtime Digital Video Effector
+ * Cradioacyright (C) 2001-2006 FUKUCHI Kentaro
+ *
+ * RadioacTV - motion-enlightment effect.
+ * Cradioacyright (C) 2001-2002 FUKUCHI Kentaro
+ *
+ * EffecTV is free software. This library is free software;
+ * you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your radioaction) any later version.
+ *
+ * This library is distributed in the hradioace that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a cradioacy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-radioactv
+ * @title: radioactv
+ *
+ * RadioacTV does *NOT* detect a radioactivity. It detects a difference
+ * from previous frame and blurs it.
+ *
+ * RadioacTV has 4 mode, normal, strobe1, strobe2 and trigger.
+ * In trigger mode, effect appears only when the trigger property is %TRUE.
+ *
+ * strobe1 and strobe2 mode drops some frames. strobe1 mode uses the difference between
+ * current frame and previous frame dropped, while strobe2 mode uses the difference from
+ * previous frame displayed. The effect of strobe2 is stronger than strobe1.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v videotestsrc ! radioactv ! videoconvert ! autovideosink
+ * ]| This pipeline shows the effect of radioactv on a test stream.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <math.h>
+#include <string.h>
+
+#include "gstradioac.h"
+#include "gsteffectv.h"
+
+enum
+{
+ RADIOAC_NORMAL = 0,
+ RADIOAC_STROBE,
+ RADIOAC_STROBE2,
+ RADIOAC_TRIGGER
+};
+
+enum
+{
+ COLOR_RED = 0,
+ COLOR_GREEN,
+ COLOR_BLUE,
+ COLOR_WHITE
+};
+
+#define GST_TYPE_RADIOACTV_MODE (gst_radioactv_mode_get_type())
+static GType
+gst_radioactv_mode_get_type (void)
+{
+ static GType type = 0;
+
+ static const GEnumValue enumvalue[] = {
+ {RADIOAC_NORMAL, "Normal", "normal"},
+ {RADIOAC_STROBE, "Strobe 1", "strobe1"},
+ {RADIOAC_STROBE2, "Strobe 2", "strobe2"},
+ {RADIOAC_TRIGGER, "Trigger", "trigger"},
+ {0, NULL, NULL},
+ };
+
+ if (!type) {
+ type = g_enum_register_static ("GstRadioacTVMode", enumvalue);
+ }
+ return type;
+}
+
+#define GST_TYPE_RADIOACTV_COLOR (gst_radioactv_color_get_type())
+static GType
+gst_radioactv_color_get_type (void)
+{
+ static GType type = 0;
+
+ static const GEnumValue enumvalue[] = {
+ {COLOR_RED, "Red", "red"},
+ {COLOR_GREEN, "Green", "green"},
+ {COLOR_BLUE, "Blue", "blue"},
+ {COLOR_WHITE, "White", "white"},
+ {0, NULL, NULL},
+ };
+
+ if (!type) {
+ type = g_enum_register_static ("GstRadioacTVColor", enumvalue);
+ }
+ return type;
+}
+
+#define DEFAULT_MODE RADIOAC_NORMAL
+#define DEFAULT_COLOR COLOR_WHITE
+#define DEFAULT_INTERVAL 3
+#define DEFAULT_TRIGGER FALSE
+
+enum
+{
+ PROP_0,
+ PROP_MODE,
+ PROP_COLOR,
+ PROP_INTERVAL,
+ PROP_TRIGGER
+};
+
+#define COLORS 32
+#define PATTERN 4
+#define MAGIC_THRESHOLD 40
+#define RATIO 0.95
+
+static guint32 palettes[COLORS * PATTERN];
+static const gint swap_tab[] = { 2, 1, 0, 3 };
+
+#define gst_radioactv_parent_class parent_class
+G_DEFINE_TYPE (GstRadioacTV, gst_radioactv, GST_TYPE_VIDEO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (radioactv, "radioactv", GST_RANK_NONE,
+ GST_TYPE_RADIOACTV);
+
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ RGBx, BGRx }")
+#else
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ xBGR, xRGB }")
+#endif
+
+static GstStaticPadTemplate gst_radioactv_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (CAPS_STR)
+ );
+
+static GstStaticPadTemplate gst_radioactv_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (CAPS_STR)
+ );
+
+static void
+makePalette (void)
+{
+ gint i;
+
+#define DELTA (255/(COLORS/2-1))
+
+ /* red, gree, blue */
+ for (i = 0; i < COLORS / 2; i++) {
+ palettes[i] = i * DELTA;
+ palettes[COLORS + i] = (i * DELTA) << 8;
+ palettes[COLORS * 2 + i] = (i * DELTA) << 16;
+ }
+ for (i = 0; i < COLORS / 2; i++) {
+ palettes[i + COLORS / 2] = 255 | (i * DELTA) << 16 | (i * DELTA) << 8;
+ palettes[COLORS + i + COLORS / 2] =
+ (255 << 8) | (i * DELTA) << 16 | i * DELTA;
+ palettes[COLORS * 2 + i + COLORS / 2] =
+ (255 << 16) | (i * DELTA) << 8 | i * DELTA;
+ }
+ /* white */
+ for (i = 0; i < COLORS; i++) {
+ palettes[COLORS * 3 + i] = (255 * i / COLORS) * 0x10101;
+ }
+ for (i = 0; i < COLORS * PATTERN; i++) {
+ palettes[i] = palettes[i] & 0xfefeff;
+ }
+#undef DELTA
+}
+
+#define VIDEO_HWIDTH (filter->buf_width/2)
+#define VIDEO_HHEIGHT (filter->buf_height/2)
+
+/* this table assumes that video_width is times of 32 */
+static void
+setTable (GstRadioacTV * filter)
+{
+ guint bits;
+ gint x, y, tx, ty, xx;
+ gint ptr, prevptr;
+
+ prevptr = (gint) (0.5 + RATIO * (-VIDEO_HWIDTH) + VIDEO_HWIDTH);
+ for (xx = 0; xx < (filter->buf_width_blocks); xx++) {
+ bits = 0;
+ for (x = 0; x < 32; x++) {
+ ptr = (gint) (0.5 + RATIO * (xx * 32 + x - VIDEO_HWIDTH) + VIDEO_HWIDTH);
+ bits = bits >> 1;
+ if (ptr != prevptr)
+ bits |= 0x80000000;
+ prevptr = ptr;
+ }
+ filter->blurzoomx[xx] = bits;
+ }
+
+ ty = (gint) (0.5 + RATIO * (-VIDEO_HHEIGHT) + VIDEO_HHEIGHT);
+ tx = (gint) (0.5 + RATIO * (-VIDEO_HWIDTH) + VIDEO_HWIDTH);
+ xx = (gint) (0.5 + RATIO * (filter->buf_width - 1 - VIDEO_HWIDTH) +
+ VIDEO_HWIDTH);
+ filter->blurzoomy[0] = ty * filter->buf_width + tx;
+ prevptr = ty * filter->buf_width + xx;
+ for (y = 1; y < filter->buf_height; y++) {
+ ty = (gint) (0.5 + RATIO * (y - VIDEO_HHEIGHT) + VIDEO_HHEIGHT);
+ filter->blurzoomy[y] = ty * filter->buf_width + tx - prevptr;
+ prevptr = ty * filter->buf_width + xx;
+ }
+}
+
+#undef VIDEO_HWIDTH
+#undef VIDEO_HHEIGHT
+
+static void
+blur (GstRadioacTV * filter)
+{
+ gint x, y;
+ gint width;
+ guint8 *p, *q;
+ guint8 v;
+ GstVideoInfo *info;
+
+ info = &GST_VIDEO_FILTER (filter)->in_info;
+
+ width = filter->buf_width;
+ p = filter->blurzoombuf + GST_VIDEO_INFO_WIDTH (info) + 1;
+ q = p + filter->buf_area;
+
+ for (y = filter->buf_height - 2; y > 0; y--) {
+ for (x = width - 2; x > 0; x--) {
+ v = (*(p - width) + *(p - 1) + *(p + 1) + *(p + width)) / 4 - 1;
+ if (v == 255)
+ v = 0;
+ *q = v;
+ p++;
+ q++;
+ }
+ p += 2;
+ q += 2;
+ }
+}
+
+static void
+zoom (GstRadioacTV * filter)
+{
+ gint b, x, y;
+ guint8 *p, *q;
+ gint blocks, height;
+ gint dx;
+
+ p = filter->blurzoombuf + filter->buf_area;
+ q = filter->blurzoombuf;
+ height = filter->buf_height;
+ blocks = filter->buf_width_blocks;
+
+ for (y = 0; y < height; y++) {
+ p += filter->blurzoomy[y];
+ for (b = 0; b < blocks; b++) {
+ dx = filter->blurzoomx[b];
+ for (x = 0; x < 32; x++) {
+ p += (dx & 1);
+ *q++ = *p;
+ dx = dx >> 1;
+ }
+ }
+ }
+}
+
+static void
+blurzoomcore (GstRadioacTV * filter)
+{
+ blur (filter);
+ zoom (filter);
+}
+
+/* Background image is refreshed every frame */
+static void
+image_bgsubtract_update_y (guint32 * src, gint16 * background, guint8 * diff,
+ gint video_area, gint y_threshold)
+{
+ gint i;
+ gint R, G, B;
+ guint32 *p;
+ gint16 *q;
+ guint8 *r;
+ gint v;
+
+ p = src;
+ q = background;
+ r = diff;
+ for (i = 0; i < video_area; i++) {
+ R = ((*p) & 0xff0000) >> (16 - 1);
+ G = ((*p) & 0xff00) >> (8 - 2);
+ B = (*p) & 0xff;
+ v = (R + G + B) - (gint) (*q);
+ *q = (gint16) (R + G + B);
+ *r = ((v + y_threshold) >> 24) | ((y_threshold - v) >> 24);
+
+ p++;
+ q++;
+ r++;
+ }
+}
+
+static GstFlowReturn
+gst_radioactv_transform_frame (GstVideoFilter * vfilter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame)
+{
+ GstRadioacTV *filter = GST_RADIOACTV (vfilter);
+ guint32 *src, *dest;
+ GstClockTime timestamp, stream_time;
+ gint x, y, width, height;
+ guint32 a, b;
+ guint8 *diff, *p;
+ guint32 *palette;
+
+ timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
+ stream_time =
+ gst_segment_to_stream_time (&GST_BASE_TRANSFORM (filter)->segment,
+ GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ GST_OBJECT_LOCK (filter);
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ if (GST_VIDEO_FRAME_FORMAT (in_frame) == GST_VIDEO_FORMAT_RGBx) {
+ palette = &palettes[COLORS * filter->color];
+ } else {
+ palette = &palettes[COLORS * swap_tab[filter->color]];
+ }
+#else
+ if (GST_VIDEO_FRAME_FORMAT (in_frame) == GST_VIDEO_FORMAT_xBGR) {
+ palette = &palettes[COLORS * filter->color];
+ } else {
+ palette = &palettes[COLORS * swap_tab[filter->color]];
+ }
+#endif
+ diff = filter->diff;
+
+ if (filter->mode == 3 && filter->trigger)
+ filter->snaptime = 0;
+ else if (filter->mode == 3 && !filter->trigger)
+ filter->snaptime = 1;
+
+ if (filter->mode != 2 || filter->snaptime <= 0) {
+ image_bgsubtract_update_y (src, filter->background, diff,
+ width * height, MAGIC_THRESHOLD * 7);
+ if (filter->mode == 0 || filter->snaptime <= 0) {
+ diff += filter->buf_margin_left;
+ p = filter->blurzoombuf;
+ for (y = 0; y < filter->buf_height; y++) {
+ for (x = 0; x < filter->buf_width; x++) {
+ p[x] |= diff[x] >> 3;
+ }
+ diff += width;
+ p += filter->buf_width;
+ }
+ if (filter->mode == 1 || filter->mode == 2) {
+ memcpy (filter->snapframe, src, width * height * 4);
+ }
+ }
+ }
+ blurzoomcore (filter);
+
+ if (filter->mode == 1 || filter->mode == 2) {
+ src = filter->snapframe;
+ }
+ p = filter->blurzoombuf;
+ for (y = 0; y < height; y++) {
+ for (x = 0; x < filter->buf_margin_left; x++) {
+ *dest++ = *src++;
+ }
+ for (x = 0; x < filter->buf_width; x++) {
+ a = *src++ & 0xfefeff;
+ b = palette[*p++];
+ a += b;
+ b = a & 0x1010100;
+ *dest++ = a | (b - (b >> 8));
+ }
+ for (x = 0; x < filter->buf_margin_right; x++) {
+ *dest++ = *src++;
+ }
+ }
+
+ if (filter->mode == 1 || filter->mode == 2) {
+ filter->snaptime--;
+ if (filter->snaptime < 0) {
+ filter->snaptime = filter->interval;
+ }
+ }
+ GST_OBJECT_UNLOCK (filter);
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_radioactv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
+{
+ GstRadioacTV *filter = GST_RADIOACTV (vfilter);
+ gint width, height;
+
+ width = GST_VIDEO_INFO_WIDTH (in_info);
+ height = GST_VIDEO_INFO_HEIGHT (in_info);
+
+ filter->buf_width_blocks = width / 32;
+ if (filter->buf_width_blocks > 255)
+ goto too_wide;
+
+ filter->buf_width = filter->buf_width_blocks * 32;
+ filter->buf_height = height;
+ filter->buf_area = filter->buf_height * filter->buf_width;
+ filter->buf_margin_left = (width - filter->buf_width) / 2;
+ filter->buf_margin_right =
+ height - filter->buf_width - filter->buf_margin_left;
+
+ g_free (filter->blurzoombuf);
+ filter->blurzoombuf = g_new0 (guint8, filter->buf_area * 2);
+
+ g_free (filter->blurzoomx);
+ filter->blurzoomx = g_new0 (gint, filter->buf_width);
+
+ g_free (filter->blurzoomy);
+ filter->blurzoomy = g_new0 (gint, filter->buf_height);
+
+ g_free (filter->snapframe);
+ filter->snapframe = g_new (guint32, width * height);
+
+ g_free (filter->diff);
+ filter->diff = g_new (guint8, width * height);
+
+ g_free (filter->background);
+ filter->background = g_new0 (gint16, width * height);
+
+ setTable (filter);
+
+ return TRUE;
+
+ /* ERRORS */
+too_wide:
+ {
+ GST_DEBUG_OBJECT (filter, "frame too wide");
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_radioactv_start (GstBaseTransform * trans)
+{
+ GstRadioacTV *filter = GST_RADIOACTV (trans);
+
+ filter->snaptime = 0;
+
+ return TRUE;
+}
+
+static void
+gst_radioactv_finalize (GObject * object)
+{
+ GstRadioacTV *filter = GST_RADIOACTV (object);
+
+ g_free (filter->snapframe);
+ filter->snapframe = NULL;
+
+ g_free (filter->blurzoombuf);
+ filter->blurzoombuf = NULL;
+
+ g_free (filter->diff);
+ filter->diff = NULL;
+
+ g_free (filter->background);
+ filter->background = NULL;
+
+ g_free (filter->blurzoomx);
+ filter->blurzoomx = NULL;
+
+ g_free (filter->blurzoomy);
+ filter->blurzoomy = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_radioactv_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRadioacTV *filter = GST_RADIOACTV (object);
+
+ GST_OBJECT_LOCK (filter);
+ switch (prop_id) {
+ case PROP_MODE:
+ filter->mode = g_value_get_enum (value);
+ if (filter->mode == 3)
+ filter->snaptime = 1;
+ break;
+ case PROP_COLOR:
+ filter->color = g_value_get_enum (value);
+ break;
+ case PROP_INTERVAL:
+ filter->interval = g_value_get_uint (value);
+ break;
+ case PROP_TRIGGER:
+ filter->trigger = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (filter);
+}
+
+static void
+gst_radioactv_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRadioacTV *filter = GST_RADIOACTV (object);
+
+ switch (prop_id) {
+ case PROP_MODE:
+ g_value_set_enum (value, filter->mode);
+ break;
+ case PROP_COLOR:
+ g_value_set_enum (value, filter->color);
+ break;
+ case PROP_INTERVAL:
+ g_value_set_uint (value, filter->interval);
+ break;
+ case PROP_TRIGGER:
+ g_value_set_boolean (value, filter->trigger);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_radioactv_class_init (GstRadioacTVClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
+
+ gobject_class->set_property = gst_radioactv_set_property;
+ gobject_class->get_property = gst_radioactv_get_property;
+
+ gobject_class->finalize = gst_radioactv_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_MODE,
+ g_param_spec_enum ("mode", "Mode",
+ "Mode", GST_TYPE_RADIOACTV_MODE, DEFAULT_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_COLOR,
+ g_param_spec_enum ("color", "Color",
+ "Color", GST_TYPE_RADIOACTV_COLOR, DEFAULT_COLOR,
+ GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_INTERVAL,
+ g_param_spec_uint ("interval", "Interval",
+ "Snapshot interval (in strobe mode)", 0, G_MAXINT, DEFAULT_INTERVAL,
+ GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_TRIGGER,
+ g_param_spec_boolean ("trigger", "Trigger",
+ "Trigger (in trigger mode)", DEFAULT_TRIGGER,
+ GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (gstelement_class, "RadioacTV effect",
+ "Filter/Effect/Video",
+ "motion-enlightment effect",
+ "FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_radioactv_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_radioactv_src_template);
+
+ trans_class->start = GST_DEBUG_FUNCPTR (gst_radioactv_start);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_radioactv_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_radioactv_transform_frame);
+
+ makePalette ();
+
+ gst_type_mark_as_plugin_api (GST_TYPE_RADIOACTV_MODE, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_RADIOACTV_COLOR, 0);
+}
+
+static void
+gst_radioactv_init (GstRadioacTV * filter)
+{
+ filter->mode = DEFAULT_MODE;
+ filter->color = DEFAULT_COLOR;
+ filter->interval = DEFAULT_INTERVAL;
+ filter->trigger = DEFAULT_TRIGGER;
+}
diff --git a/gst/effectv/gstradioac.h b/gst/effectv/gstradioac.h
new file mode 100644
index 0000000000..c8d7c99e5b
--- /dev/null
+++ b/gst/effectv/gstradioac.h
@@ -0,0 +1,87 @@
+/* GStreamer
+ * Cradioacyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * EffecTV - Realtime Digital Video Effector
+ * Cradioacyright (C) 2001-2006 FUKUCHI Kentaro
+ *
+ * RadioacTV - motion-enlightment effect.
+ * Copyright (C) 2001-2002 FUKUCHI Kentaro
+ *
+ * EffecTV is free software. This library is free software;
+ * you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your radioaction) any later version.
+ *
+ * This library is distributed in the hradioace that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a cradioacy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RADIOAC_H__
+#define __GST_RADIOAC_H__
+
+#include <gst/gst.h>
+
+#include <gst/video/video.h>
+#include <gst/video/gstvideofilter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RADIOACTV \
+ (gst_radioactv_get_type())
+#define GST_RADIOACTV(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RADIOACTV,GstRadioacTV))
+#define GST_RADIOACTV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RADIOACTV,GstRadioacTVClass))
+#define GST_IS_RADIOACTV(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RADIOACTV))
+#define GST_IS_RADIOACTV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RADIOACTV))
+
+typedef struct _GstRadioacTV GstRadioacTV;
+typedef struct _GstRadioacTVClass GstRadioacTVClass;
+
+struct _GstRadioacTV
+{
+ GstVideoFilter element;
+
+ /* < private > */
+ gint mode;
+ gint color;
+ guint interval;
+ gboolean trigger;
+
+ gint snaptime;
+
+ guint32 *snapframe;
+ guint8 *blurzoombuf;
+ guint8 *diff;
+ gint16 *background;
+ gint *blurzoomx;
+ gint *blurzoomy;
+
+ gint buf_width_blocks;
+ gint buf_width;
+ gint buf_height;
+ gint buf_area;
+ gint buf_margin_right;
+ gint buf_margin_left;
+};
+
+struct _GstRadioacTVClass
+{
+ GstVideoFilterClass parent_class;
+};
+
+GType gst_radioactv_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RADIOAC_H__ */
diff --git a/gst/effectv/gstrev.c b/gst/effectv/gstrev.c
new file mode 100644
index 0000000000..3ea55cf789
--- /dev/null
+++ b/gst/effectv/gstrev.c
@@ -0,0 +1,252 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * EffecTV:
+ * Copyright (C) 2001 FUKUCHI Kentarou
+ *
+ * EffecTV - Realtime Digital Video Effector
+ * Copyright (C) 2001 FUKUCHI Kentarou
+ *
+ * revTV based on Rutt-Etra Video Synthesizer 1974?
+
+ * (c)2002 Ed Tannenbaum
+ *
+ * This effect acts like a waveform monitor on each line.
+ * It was originally done by deflecting the electron beam on a monitor using
+ * additional electromagnets on the yoke of a b/w CRT.
+ * Here it is emulated digitally.
+
+ * Experimaental tapes were made with this system by Bill and
+ * Louise Etra and Woody and Steina Vasulka
+
+ * The line spacing can be controlled using the 1 and 2 Keys.
+ * The gain is controlled using the 3 and 4 keys.
+ * The update rate is controlled using the 0 and - keys.
+
+ * EffecTV is free software. This library is free software;
+ * you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-revtv
+ * @title: revtv
+ *
+ * RevTV acts like a video waveform monitor for each line of video
+ * processed. This creates a pseudo 3D effect based on the brightness
+ * of the video along each line.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v videotestsrc ! revtv ! videoconvert ! autovideosink
+ * ]| This pipeline shows the effect of revtv on a test stream.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <math.h>
+#include <string.h>
+
+#include "gsteffectv.h"
+#include "gstrev.h"
+
+#define THE_COLOR 0xffffffff
+
+enum
+{
+ PROP_0,
+ PROP_DELAY,
+ PROP_LINESPACE,
+ PROP_GAIN
+};
+
+#define gst_revtv_parent_class parent_class
+G_DEFINE_TYPE (GstRevTV, gst_revtv, GST_TYPE_VIDEO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (revtv, "revtv", GST_RANK_NONE,
+ gst_revtv_get_type ());
+
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ BGRx, RGBx }")
+#else
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ xBGR, xRGB }")
+#endif
+
+static GstStaticPadTemplate gst_revtv_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (CAPS_STR)
+ );
+
+static GstStaticPadTemplate gst_revtv_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (CAPS_STR)
+ );
+
+static GstFlowReturn
+gst_revtv_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame)
+{
+ GstRevTV *filter = GST_REVTV (vfilter);
+ guint32 *src, *dest;
+ gint width, height, sstride, dstride;
+ guint32 *nsrc;
+ gint y, x, R, G, B, yval;
+ gint linespace, vscale;
+ GstClockTime timestamp, stream_time;
+
+ timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
+ stream_time =
+ gst_segment_to_stream_time (&GST_BASE_TRANSFORM (vfilter)->segment,
+ GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ sstride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+ dstride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ /* Clear everything to black */
+ memset (dest, 0, dstride * height);
+
+ GST_OBJECT_LOCK (filter);
+ linespace = filter->linespace;
+ vscale = filter->vscale;
+
+ /* draw the offset lines */
+ for (y = 0; y < height; y += linespace) {
+ for (x = 0; x <= width; x++) {
+ nsrc = src + (y * sstride / 4) + x;
+
+ /* Calc Y Value for curpix */
+ R = ((*nsrc) & 0xff0000) >> (16 - 1);
+ G = ((*nsrc) & 0xff00) >> (8 - 2);
+ B = (*nsrc) & 0xff;
+
+ yval = y - ((short) (R + G + B) / vscale);
+
+ if (yval > 0) {
+ dest[x + (yval * dstride / 4)] = THE_COLOR;
+ }
+ }
+ }
+ GST_OBJECT_UNLOCK (filter);
+
+ return GST_FLOW_OK;
+}
+
+static void
+gst_revtv_set_property (GObject * object, guint prop_id, const GValue * value,
+ GParamSpec * pspec)
+{
+ GstRevTV *filter = GST_REVTV (object);
+
+ GST_OBJECT_LOCK (filter);
+ switch (prop_id) {
+ case PROP_DELAY:
+ filter->vgrabtime = g_value_get_int (value);
+ break;
+ case PROP_LINESPACE:
+ filter->linespace = g_value_get_int (value);
+ break;
+ case PROP_GAIN:
+ filter->vscale = g_value_get_int (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (filter);
+}
+
+static void
+gst_revtv_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstRevTV *filter = GST_REVTV (object);
+
+ switch (prop_id) {
+ case PROP_DELAY:
+ g_value_set_int (value, filter->vgrabtime);
+ break;
+ case PROP_LINESPACE:
+ g_value_set_int (value, filter->linespace);
+ break;
+ case PROP_GAIN:
+ g_value_set_int (value, filter->vscale);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_revtv_class_init (GstRevTVClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
+
+ gobject_class->set_property = gst_revtv_set_property;
+ gobject_class->get_property = gst_revtv_get_property;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_DELAY,
+ g_param_spec_int ("delay", "Delay", "Delay in frames between updates",
+ 1, 100, 1,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_LINESPACE,
+ g_param_spec_int ("linespace", "Linespace", "Control line spacing", 1,
+ 100, 6,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_GAIN,
+ g_param_spec_int ("gain", "Gain", "Control gain", 1, 200, 50,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ gst_element_class_set_static_metadata (gstelement_class, "RevTV effect",
+ "Filter/Effect/Video",
+ "A video waveform monitor for each line of video processed",
+ "Wim Taymans <wim.taymans@gmail.be>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_revtv_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_revtv_src_template);
+
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_revtv_transform_frame);
+}
+
+static void
+gst_revtv_init (GstRevTV * restv)
+{
+ restv->vgrabtime = 1;
+ restv->vgrab = 0;
+ restv->linespace = 6;
+ restv->vscale = 50;
+}
diff --git a/gst/effectv/gstrev.h b/gst/effectv/gstrev.h
new file mode 100644
index 0000000000..198311b99b
--- /dev/null
+++ b/gst/effectv/gstrev.h
@@ -0,0 +1,87 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * EffecTV:
+ * Copyright (C) 2001 FUKUCHI Kentarou
+ *
+ * EffecTV - Realtime Digital Video Effector
+ * Copyright (C) 2001 FUKUCHI Kentarou
+ *
+ * revTV based on Rutt-Etra Video Synthesizer 1974?
+
+ * (c)2002 Ed Tannenbaum
+ *
+ * This effect acts like a waveform monitor on each line.
+ * It was originally done by deflecting the electron beam on a monitor using
+ * additional electromagnets on the yoke of a b/w CRT.
+ * Here it is emulated digitally.
+
+ * Experimaental tapes were made with this system by Bill and
+ * Louise Etra and Woody and Steina Vasulka
+
+ * The line spacing can be controlled using the 1 and 2 Keys.
+ * The gain is controlled using the 3 and 4 keys.
+ * The update rate is controlled using the 0 and - keys.
+
+ * EffecTV is free software. This library is free software;
+ * you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_REV_H__
+#define __GST_REV_H__
+
+#include <gst/gst.h>
+
+#include <gst/video/video.h>
+#include <gst/video/gstvideofilter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_REVTV \
+ (gst_revtv_get_type())
+#define GST_REVTV(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_REVTV,GstRevTV))
+#define GST_REVTV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_REVTV,GstRevTVClass))
+#define GST_IS_REVTV(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_REVTV))
+#define GST_IS_REVTV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_REVTV))
+
+typedef struct _GstRevTV GstRevTV;
+typedef struct _GstRevTVClass GstRevTVClass;
+
+struct _GstRevTV
+{
+ GstVideoFilter videofilter;
+
+ /* < private > */
+ gint vgrabtime;
+ gint vgrab;
+ gint linespace;
+ gint vscale;
+};
+
+struct _GstRevTVClass
+{
+ GstVideoFilterClass parent_class;
+};
+
+GType gst_revtv_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_REV_H__ */
diff --git a/gst/effectv/gstripple.c b/gst/effectv/gstripple.c
new file mode 100644
index 0000000000..1425b2b360
--- /dev/null
+++ b/gst/effectv/gstripple.c
@@ -0,0 +1,615 @@
+/* GStreamer
+ * Copyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * EffecTV - Realtime Digital Video Effector
+ * Copyright (C) 2001-2006 FUKUCHI Kentaro
+ *
+ * RippleTV - Water ripple effect.
+ * Copyright (C) 2001-2002 FUKUCHI Kentaro
+ *
+ * This combines the RippleTV and BaltanTV effects, which are
+ * very similar. BaltanTV is used if the feedback property is set
+ * to TRUE, otherwise RippleTV is used.
+ *
+ * EffecTV is free software. This library is free software;
+ * you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rippletv
+ * @title: rippletv
+ *
+ * RippleTV does ripple mark effect on the video input. The ripple is caused
+ * by motion or random rain drops.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v videotestsrc ! rippletv ! videoconvert ! autovideosink
+ * ]| This pipeline shows the effect of rippletv on a test stream.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <math.h>
+#include <string.h>
+
+#include "gstripple.h"
+#include "gsteffectv.h"
+
+#define DEFAULT_MODE 0
+
+enum
+{
+ PROP_0,
+ PROP_RESET,
+ PROP_MODE
+};
+
+static gint sqrtable[256];
+
+#define GST_TYPE_RIPPLETV_MODE (gst_rippletv_mode_get_type())
+static GType
+gst_rippletv_mode_get_type (void)
+{
+ static GType type = 0;
+
+ static const GEnumValue enumvalue[] = {
+ {0, "Motion Detection", "motion-detection"},
+ {1, "Rain", "rain"},
+ {0, NULL, NULL},
+ };
+
+ if (!type) {
+ type = g_enum_register_static ("GstRippleTVMode", enumvalue);
+ }
+ return type;
+}
+
+#define gst_rippletv_parent_class parent_class
+G_DEFINE_TYPE (GstRippleTV, gst_rippletv, GST_TYPE_VIDEO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (rippletv, "rippletv", GST_RANK_NONE,
+ GST_TYPE_RIPPLETV);
+
+static GstStaticPadTemplate gst_rippletv_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ BGRx, RGBx, xBGR, xRGB }"))
+ );
+
+static GstStaticPadTemplate gst_rippletv_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ BGRx, RGBx, xBGR, xRGB }"))
+ );
+
+static const gint point = 16;
+static const gint impact = 2;
+static const gint decay = 8;
+static const gint loopnum = 2;
+
+static void
+setTable (void)
+{
+ gint i;
+
+ for (i = 0; i < 128; i++) {
+ sqrtable[i] = i * i;
+ }
+ for (i = 1; i <= 128; i++) {
+ sqrtable[256 - i] = -i * i;
+ }
+}
+
+static void
+image_bgset_y (guint32 * src, gint16 * background, gint video_area)
+{
+ gint i;
+ gint R, G, B;
+ guint32 *p;
+ gint16 *q;
+
+ p = src;
+ q = background;
+ for (i = 0; i < video_area; i++) {
+ R = ((*p) & 0xff0000) >> (16 - 1);
+ G = ((*p) & 0xff00) >> (8 - 2);
+ B = (*p) & 0xff;
+ *q = (gint16) (R + G + B);
+ p++;
+ q++;
+ }
+}
+
+static gint
+setBackground (GstRippleTV * filter, guint32 * src)
+{
+ GstVideoInfo *info;
+
+ info = &GST_VIDEO_FILTER (filter)->in_info;
+
+ image_bgset_y (src, filter->background,
+ GST_VIDEO_INFO_WIDTH (info) * GST_VIDEO_INFO_HEIGHT (info));
+ filter->bg_is_set = TRUE;
+
+ return 0;
+}
+
+static void
+image_bgsubtract_update_y (guint32 * src, gint16 * background, guint8 * diff,
+ gint video_area)
+{
+ gint i;
+ gint R, G, B;
+ guint32 *p;
+ gint16 *q;
+ guint8 *r;
+ gint v;
+
+ p = src;
+ q = background;
+ r = diff;
+ for (i = 0; i < video_area; i++) {
+ R = ((*p) & 0xff0000) >> (16 - 1);
+ G = ((*p) & 0xff00) >> (8 - 2);
+ B = (*p) & 0xff;
+ v = (R + G + B) - (gint) (*q);
+ *q = (gint16) (R + G + B);
+ *r = ((v + 70 * 7) >> 24) | ((70 * 7 - v) >> 24);
+
+ p++;
+ q++;
+ r++;
+ }
+}
+
+static void
+motiondetect (GstRippleTV * filter, guint32 * src)
+{
+ guint8 *diff = filter->diff;
+ gint width, height;
+ gint *p, *q;
+ gint x, y, h;
+ GstVideoInfo *info;
+
+ info = &GST_VIDEO_FILTER (filter)->in_info;
+
+ width = GST_VIDEO_INFO_WIDTH (info);
+ height = GST_VIDEO_INFO_HEIGHT (info);
+
+ if (!filter->bg_is_set)
+ setBackground (filter, src);
+
+ image_bgsubtract_update_y (src, filter->background, filter->diff,
+ width * height);
+ p = filter->map1 + filter->map_w + 1;
+ q = filter->map2 + filter->map_w + 1;
+ diff += width + 2;
+
+ for (y = filter->map_h - 2; y > 0; y--) {
+ for (x = filter->map_w - 2; x > 0; x--) {
+ h = (gint) * diff + (gint) * (diff + 1) + (gint) * (diff + width) +
+ (gint) * (diff + width + 1);
+ if (h > 0) {
+ *p = h << (point + impact - 8);
+ *q = *p;
+ }
+ p++;
+ q++;
+ diff += 2;
+ }
+ diff += width + 2;
+ p += 2;
+ q += 2;
+ }
+}
+
+static inline void
+drop (gint power, gint * map1, gint * map2, gint map_w, gint map_h)
+{
+ gint x, y;
+ gint *p, *q;
+
+ x = fastrand () % (map_w - 4) + 2;
+ y = fastrand () % (map_h - 4) + 2;
+ p = map1 + y * map_w + x;
+ q = map2 + y * map_w + x;
+ *p = power;
+ *q = power;
+ *(p - map_w) = *(p - 1) = *(p + 1) = *(p + map_w) = power / 2;
+ *(p - map_w - 1) = *(p - map_w + 1) = *(p + map_w - 1) = *(p + map_w + 1) =
+ power / 4;
+ *(q - map_w) = *(q - 1) = *(q + 1) = *(q + map_w) = power / 2;
+ *(q - map_w - 1) = *(q - map_w + 1) = *(q + map_w - 1) = *(p + map_w + 1) =
+ power / 4;
+}
+
+static void
+raindrop (GstRippleTV * filter)
+{
+ gint i;
+
+ if (filter->period == 0) {
+ switch (filter->rain_stat) {
+ case 0:
+ filter->period = (fastrand () >> 23) + 100;
+ filter->drop_prob = 0;
+ filter->drop_prob_increment = 0x00ffffff / filter->period;
+ filter->drop_power = (-(fastrand () >> 28) - 2) << point;
+ filter->drops_per_frame_max = 2 << (fastrand () >> 30); // 2,4,8 or 16
+ filter->rain_stat = 1;
+ break;
+ case 1:
+ filter->drop_prob = 0x00ffffff;
+ filter->drops_per_frame = 1;
+ filter->drop_prob_increment = 1;
+ filter->period = (filter->drops_per_frame_max - 1) * 16;
+ filter->rain_stat = 2;
+ break;
+ case 2:
+ filter->period = (fastrand () >> 22) + 1000;
+ filter->drop_prob_increment = 0;
+ filter->rain_stat = 3;
+ break;
+ case 3:
+ filter->period = (filter->drops_per_frame_max - 1) * 16;
+ filter->drop_prob_increment = -1;
+ filter->rain_stat = 4;
+ break;
+ case 4:
+ filter->period = (fastrand () >> 24) + 60;
+ filter->drop_prob_increment = -(filter->drop_prob / filter->period);
+ filter->rain_stat = 5;
+ break;
+ case 5:
+ default:
+ filter->period = (fastrand () >> 23) + 500;
+ filter->drop_prob = 0;
+ filter->rain_stat = 0;
+ break;
+ }
+ }
+ switch (filter->rain_stat) {
+ default:
+ case 0:
+ break;
+ case 1:
+ case 5:
+ if ((fastrand () >> 8) < filter->drop_prob) {
+ drop (filter->drop_power, filter->map1, filter->map2, filter->map_w,
+ filter->map_h);
+ }
+ filter->drop_prob += filter->drop_prob_increment;
+ break;
+ case 2:
+ case 3:
+ case 4:
+ for (i = filter->drops_per_frame / 16; i > 0; i--) {
+ drop (filter->drop_power, filter->map1, filter->map2, filter->map_w,
+ filter->map_h);
+ }
+ filter->drops_per_frame += filter->drop_prob_increment;
+ break;
+ }
+ filter->period--;
+}
+
+static GstFlowReturn
+gst_rippletv_transform_frame (GstVideoFilter * vfilter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame)
+{
+ GstRippleTV *filter = GST_RIPPLETV (vfilter);
+ guint32 *src, *dest;
+ gint x, y, i;
+ gint dx, dy, o_dx;
+ gint h, v;
+ gint m_w, m_h, v_w, v_h;
+ gint *p, *q, *r;
+ gint8 *vp;
+ GstClockTime timestamp, stream_time;
+
+ timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
+ stream_time =
+ gst_segment_to_stream_time (&GST_BASE_TRANSFORM (vfilter)->segment,
+ GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ GST_OBJECT_LOCK (filter);
+ /* impact from the motion or rain drop */
+ if (filter->mode)
+ raindrop (filter);
+ else
+ motiondetect (filter, src);
+
+ m_w = filter->map_w;
+ m_h = filter->map_h;
+ v_w = GST_VIDEO_FRAME_WIDTH (in_frame);
+ v_h = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ /* simulate surface wave */
+
+ /* This function is called only 30 times per second. To increase a speed
+ * of wave, iterates this loop several times. */
+ for (i = loopnum; i > 0; i--) {
+ /* wave simulation */
+ p = filter->map1 + m_w + 1;
+ q = filter->map2 + m_w + 1;
+ r = filter->map3 + m_w + 1;
+ for (y = m_h - 2; y > 0; y--) {
+ for (x = m_w - 2; x > 0; x--) {
+ h = *(p - m_w - 1) + *(p - m_w + 1) + *(p + m_w - 1) + *(p + m_w + 1)
+ + *(p - m_w) + *(p - 1) + *(p + 1) + *(p + m_w) - (*p) * 9;
+ h = h >> 3;
+ v = *p - *q;
+ v += h - (v >> decay);
+ *r = v + *p;
+ p++;
+ q++;
+ r++;
+ }
+ p += 2;
+ q += 2;
+ r += 2;
+ }
+
+ /* low pass filter */
+ p = filter->map3 + m_w + 1;
+ q = filter->map2 + m_w + 1;
+ for (y = m_h - 2; y > 0; y--) {
+ for (x = m_w - 2; x > 0; x--) {
+ h = *(p - m_w) + *(p - 1) + *(p + 1) + *(p + m_w) + (*p) * 60;
+ *q = h >> 6;
+ p++;
+ q++;
+ }
+ p += 2;
+ q += 2;
+ }
+
+ p = filter->map1;
+ filter->map1 = filter->map2;
+ filter->map2 = p;
+ }
+
+ vp = filter->vtable;
+ p = filter->map1;
+ for (y = m_h - 1; y > 0; y--) {
+ for (x = m_w - 1; x > 0; x--) {
+ /* difference of the height between two voxel. They are twiced to
+ * emphasise the wave. */
+ vp[0] = sqrtable[((p[0] - p[1]) >> (point - 1)) & 0xff];
+ vp[1] = sqrtable[((p[0] - p[m_w]) >> (point - 1)) & 0xff];
+ p++;
+ vp += 2;
+ }
+ p++;
+ vp += 2;
+ }
+
+ vp = filter->vtable;
+
+ /* draw refracted image. The vector table is stretched. */
+ for (y = 0; y < v_h; y += 2) {
+ for (x = 0; x < v_w; x += 2) {
+ h = (gint) vp[0];
+ v = (gint) vp[1];
+ dx = x + h;
+ dy = y + v;
+ dx = CLAMP (dx, 0, (v_w - 2));
+ dy = CLAMP (dy, 0, (v_h - 2));
+ dest[0] = src[dy * v_w + dx];
+
+ o_dx = dx;
+
+ dx = x + 1 + (h + (gint) vp[2]) / 2;
+ dx = CLAMP (dx, 0, (v_w - 2));
+ dest[1] = src[dy * v_w + dx];
+
+ dy = y + 1 + (v + (gint) vp[m_w * 2 + 1]) / 2;
+ dy = CLAMP (dy, 0, (v_h - 2));
+ dest[v_w] = src[dy * v_w + o_dx];
+
+ dest[v_w + 1] = src[dy * v_w + dx];
+ dest += 2;
+ vp += 2;
+ }
+ dest += v_w;
+ vp += 2;
+ }
+ GST_OBJECT_UNLOCK (filter);
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_rippletv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
+{
+ GstRippleTV *filter = GST_RIPPLETV (vfilter);
+ gint width, height;
+
+ width = GST_VIDEO_INFO_WIDTH (in_info);
+ height = GST_VIDEO_INFO_HEIGHT (in_info);
+
+ GST_OBJECT_LOCK (filter);
+ filter->map_h = height / 2 + 1;
+ filter->map_w = width / 2 + 1;
+
+ /* we over allocate the buffers, as the render code does not handle clipping
+ * very well */
+ g_free (filter->map);
+ filter->map = g_new0 (gint, (1 + filter->map_h) * filter->map_w * 3);
+
+ filter->map1 = filter->map;
+ filter->map2 = filter->map + filter->map_w * filter->map_h;
+ filter->map3 = filter->map + filter->map_w * filter->map_h * 2;
+
+ g_free (filter->vtable);
+ filter->vtable = g_new0 (gint8, (1 + filter->map_h) * filter->map_w * 2);
+
+ g_free (filter->background);
+ filter->background = g_new0 (gint16, width * (height + 1));
+
+ g_free (filter->diff);
+ filter->diff = g_new0 (guint8, width * (height + 1));
+ GST_OBJECT_UNLOCK (filter);
+
+ return TRUE;
+}
+
+static gboolean
+gst_rippletv_start (GstBaseTransform * trans)
+{
+ GstRippleTV *filter = GST_RIPPLETV (trans);
+
+ filter->bg_is_set = FALSE;
+
+ filter->period = 0;
+ filter->rain_stat = 0;
+ filter->drop_prob = 0;
+ filter->drop_prob_increment = 0;
+ filter->drops_per_frame_max = 0;
+ filter->drops_per_frame = 0;
+ filter->drop_power = 0;
+
+ return TRUE;
+}
+
+static void
+gst_rippletv_finalize (GObject * object)
+{
+ GstRippleTV *filter = GST_RIPPLETV (object);
+
+ g_free (filter->map);
+ filter->map = NULL;
+
+ g_free (filter->vtable);
+ filter->vtable = NULL;
+
+ g_free (filter->background);
+ filter->background = NULL;
+
+ g_free (filter->diff);
+ filter->diff = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_rippletv_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRippleTV *filter = GST_RIPPLETV (object);
+
+ GST_OBJECT_LOCK (filter);
+ switch (prop_id) {
+ case PROP_RESET:{
+ memset (filter->map, 0,
+ filter->map_h * filter->map_w * 2 * sizeof (gint));
+ break;
+ }
+ case PROP_MODE:
+ filter->mode = g_value_get_enum (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (filter);
+}
+
+static void
+gst_rippletv_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstRippleTV *filter = GST_RIPPLETV (object);
+
+ switch (prop_id) {
+ case PROP_MODE:
+ g_value_set_enum (value, filter->mode);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rippletv_class_init (GstRippleTVClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
+
+ gobject_class->set_property = gst_rippletv_set_property;
+ gobject_class->get_property = gst_rippletv_get_property;
+
+ gobject_class->finalize = gst_rippletv_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_RESET,
+ g_param_spec_boolean ("reset", "Reset",
+ "Reset all current ripples", FALSE,
+ G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ g_object_class_install_property (gobject_class, PROP_MODE,
+ g_param_spec_enum ("mode", "Mode",
+ "Mode", GST_TYPE_RIPPLETV_MODE, DEFAULT_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ gst_element_class_set_static_metadata (gstelement_class, "RippleTV effect",
+ "Filter/Effect/Video",
+ "RippleTV does ripple mark effect on the video input",
+ "FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rippletv_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rippletv_src_template);
+
+ trans_class->start = GST_DEBUG_FUNCPTR (gst_rippletv_start);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_rippletv_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_rippletv_transform_frame);
+
+ setTable ();
+
+ gst_type_mark_as_plugin_api (GST_TYPE_RIPPLETV_MODE, 0);
+}
+
+static void
+gst_rippletv_init (GstRippleTV * filter)
+{
+ filter->mode = DEFAULT_MODE;
+
+ /* FIXME: remove this when memory corruption after resizes are fixed */
+ gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SRC_PAD (filter));
+ gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SINK_PAD (filter));
+}
diff --git a/gst/effectv/gstripple.h b/gst/effectv/gstripple.h
new file mode 100644
index 0000000000..329e54308d
--- /dev/null
+++ b/gst/effectv/gstripple.h
@@ -0,0 +1,86 @@
+/* GStreamer
+ * Copyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * EffecTV - Realtime Digital Video Effector
+ * Copyright (C) 2001-2006 FUKUCHI Kentaro
+ *
+ * RippleTV - Water ripple effect.
+ * Copyright (C) 2001-2002 FUKUCHI Kentaro
+ *
+ * EffecTV is free software. This library is free software;
+ * you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RIPPLE_H__
+#define __GST_RIPPLE_H__
+
+#include <gst/gst.h>
+
+#include <gst/video/video.h>
+#include <gst/video/gstvideofilter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RIPPLETV \
+ (gst_rippletv_get_type())
+#define GST_RIPPLETV(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RIPPLETV,GstRippleTV))
+#define GST_RIPPLETV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RIPPLETV,GstRippleTVClass))
+#define GST_IS_RIPPLETV(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RIPPLETV))
+#define GST_IS_RIPPLETV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RIPPLETV))
+
+typedef struct _GstRippleTV GstRippleTV;
+typedef struct _GstRippleTVClass GstRippleTVClass;
+
+struct _GstRippleTV
+{
+ GstVideoFilter element;
+
+ /* < private > */
+ gint mode;
+
+ gint16 *background;
+ guint8 *diff;
+
+ gint *map, *map1, *map2, *map3;
+ gint map_h, map_w;
+
+ gint8 *vtable;
+
+ gboolean bg_is_set;
+
+ gint period;
+ gint rain_stat;
+ guint drop_prob;
+ gint drop_prob_increment;
+ gint drops_per_frame_max;
+ gint drops_per_frame;
+ gint drop_power;
+};
+
+struct _GstRippleTVClass
+{
+ GstVideoFilterClass parent_class;
+};
+
+GType gst_rippletv_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RIPPLE_H__ */
diff --git a/gst/effectv/gstshagadelic.c b/gst/effectv/gstshagadelic.c
new file mode 100644
index 0000000000..767aa64958
--- /dev/null
+++ b/gst/effectv/gstshagadelic.c
@@ -0,0 +1,258 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * EffecTV:
+ * Copyright (C) 2001 FUKUCHI Kentarou
+ *
+ * Inspired by Adrian Likin's script for the GIMP.
+ * EffecTV is free software. This library is free software;
+ * you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-shagadelictv
+ * @title: shagadelictv
+ *
+ * Oh behave, ShagedelicTV makes images shagadelic!
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v videotestsrc ! shagadelictv ! videoconvert ! autovideosink
+ * ]| This pipeline shows the effect of shagadelictv on a test stream.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <math.h>
+#include <string.h>
+
+#include "gstshagadelic.h"
+#include "gsteffectv.h"
+
+#ifndef M_PI
+#define M_PI 3.14159265358979323846
+#endif
+
+#define gst_shagadelictv_parent_class parent_class
+G_DEFINE_TYPE (GstShagadelicTV, gst_shagadelictv, GST_TYPE_VIDEO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (shagadelictv, "shagadelictv",
+ GST_RANK_NONE, GST_TYPE_SHAGADELICTV);
+
+static void gst_shagadelic_initialize (GstShagadelicTV * filter,
+ GstVideoInfo * in_info);
+
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("BGRx")
+#else
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("xRGB")
+#endif
+
+static GstStaticPadTemplate gst_shagadelictv_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (CAPS_STR)
+ );
+
+static GstStaticPadTemplate gst_shagadelictv_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (CAPS_STR)
+ );
+
+static gboolean
+gst_shagadelictv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
+{
+ GstShagadelicTV *filter = GST_SHAGADELICTV (vfilter);
+ gint width, height, area;
+
+ width = GST_VIDEO_INFO_WIDTH (in_info);
+ height = GST_VIDEO_INFO_HEIGHT (in_info);
+
+ area = width * height;
+
+ g_free (filter->ripple);
+ g_free (filter->spiral);
+ filter->ripple = (guint8 *) g_malloc (area * 4);
+ filter->spiral = (guint8 *) g_malloc (area);
+
+ gst_shagadelic_initialize (filter, in_info);
+
+ return TRUE;
+}
+
+static void
+gst_shagadelic_initialize (GstShagadelicTV * filter, GstVideoInfo * info)
+{
+ int i, x, y;
+#ifdef PS2
+ float xx, yy;
+#else
+ double xx, yy;
+#endif
+ gint width, height;
+
+ width = GST_VIDEO_INFO_WIDTH (info);
+ height = GST_VIDEO_INFO_HEIGHT (info);
+
+ i = 0;
+ for (y = 0; y < height * 2; y++) {
+ yy = y - height;
+ yy *= yy;
+
+ for (x = 0; x < width * 2; x++) {
+ xx = x - width;
+#ifdef PS2
+ filter->ripple[i++] = ((unsigned int) (sqrtf (xx * xx + yy) * 8)) & 255;
+#else
+ filter->ripple[i++] = ((unsigned int) (sqrt (xx * xx + yy) * 8)) & 255;
+#endif
+ }
+ }
+
+ i = 0;
+ for (y = 0; y < height; y++) {
+ yy = y - height / 2;
+
+ for (x = 0; x < width; x++) {
+ xx = x - width / 2;
+#ifdef PS2
+ filter->spiral[i++] = ((unsigned int)
+ ((atan2f (xx,
+ yy) / ((float) M_PI) * 256 * 9) + (sqrtf (xx * xx +
+ yy * yy) * 5))) & 255;
+#else
+ filter->spiral[i++] = ((unsigned int)
+ ((atan2 (xx, yy) / M_PI * 256 * 9) + (sqrt (xx * xx +
+ yy * yy) * 5))) & 255;
+#endif
+/* Here is another Swinger!
+ * ((atan2(xx, yy)/M_PI*256) + (sqrt(xx*xx+yy*yy)*10))&255;
+ */
+ }
+ }
+ filter->rx = fastrand () % width;
+ filter->ry = fastrand () % height;
+ filter->bx = fastrand () % width;
+ filter->by = fastrand () % height;
+ filter->rvx = -2;
+ filter->rvy = -2;
+ filter->bvx = 2;
+ filter->bvy = 2;
+ filter->phase = 0;
+}
+
+static GstFlowReturn
+gst_shagadelictv_transform_frame (GstVideoFilter * vfilter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame)
+{
+ GstShagadelicTV *filter = GST_SHAGADELICTV (vfilter);
+ guint32 *src, *dest;
+ gint x, y;
+ guint32 v;
+ guint8 r, g, b;
+ gint width, height;
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ for (y = 0; y < height; y++) {
+ for (x = 0; x < width; x++) {
+ v = *src++ | 0x1010100;
+ v = (v - 0x707060) & 0x1010100;
+ v -= v >> 8;
+/* Try another Babe!
+ * v = *src++;
+ * *dest++ = v & ((r<<16)|(g<<8)|b);
+ */
+ r = ((gint8) (filter->ripple[(filter->ry + y) * width * 2 + filter->rx +
+ x] + filter->phase * 2)) >> 7;
+ g = ((gint8) (filter->spiral[y * width + x] + filter->phase * 3)) >> 7;
+ b = ((gint8) (filter->ripple[(filter->by + y) * width * 2 + filter->bx +
+ x] - filter->phase)) >> 7;
+ *dest++ = v & ((r << 16) | (g << 8) | b);
+ }
+ }
+
+ filter->phase -= 8;
+ if ((filter->rx + filter->rvx) < 0 || (filter->rx + filter->rvx) >= width)
+ filter->rvx = -filter->rvx;
+ if ((filter->ry + filter->rvy) < 0 || (filter->ry + filter->rvy) >= height)
+ filter->rvy = -filter->rvy;
+ if ((filter->bx + filter->bvx) < 0 || (filter->bx + filter->bvx) >= width)
+ filter->bvx = -filter->bvx;
+ if ((filter->by + filter->bvy) < 0 || (filter->by + filter->bvy) >= height)
+ filter->bvy = -filter->bvy;
+ filter->rx += filter->rvx;
+ filter->ry += filter->rvy;
+ filter->bx += filter->bvx;
+ filter->by += filter->bvy;
+
+ return GST_FLOW_OK;
+}
+
+static void
+gst_shagadelictv_finalize (GObject * object)
+{
+ GstShagadelicTV *filter = GST_SHAGADELICTV (object);
+
+ g_free (filter->ripple);
+ filter->ripple = NULL;
+
+ g_free (filter->spiral);
+ filter->spiral = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_shagadelictv_class_init (GstShagadelicTVClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
+
+ gobject_class->finalize = gst_shagadelictv_finalize;
+
+ gst_element_class_set_static_metadata (gstelement_class, "ShagadelicTV",
+ "Filter/Effect/Video",
+ "Oh behave, ShagedelicTV makes images shagadelic!",
+ "Wim Taymans <wim.taymans@chello.be>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_shagadelictv_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_shagadelictv_src_template);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_shagadelictv_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_shagadelictv_transform_frame);
+}
+
+static void
+gst_shagadelictv_init (GstShagadelicTV * filter)
+{
+ filter->ripple = NULL;
+ filter->spiral = NULL;
+}
diff --git a/gst/effectv/gstshagadelic.h b/gst/effectv/gstshagadelic.h
new file mode 100644
index 0000000000..a89cbf43c4
--- /dev/null
+++ b/gst/effectv/gstshagadelic.h
@@ -0,0 +1,72 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * EffecTV:
+ * Copyright (C) 2001 FUKUCHI Kentarou
+ *
+ * Inspired by Adrian Likin's script for the GIMP.
+ * EffecTV is free software. This library is free software;
+ * you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_SHAGADELIC_H__
+#define __GST_SHAGADELIC_H__
+
+#include <gst/gst.h>
+
+#include <gst/video/video.h>
+#include <gst/video/gstvideofilter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_SHAGADELICTV \
+ (gst_shagadelictv_get_type())
+#define GST_SHAGADELICTV(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_SHAGADELICTV,GstShagadelicTV))
+#define GST_SHAGADELICTV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_SHAGADELICTV,GstShagadelicTVClass))
+#define GST_IS_SHAGADELICTV(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_SHAGADELICTV))
+#define GST_IS_SHAGADELICTV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_SHAGADELICTV))
+
+typedef struct _GstShagadelicTV GstShagadelicTV;
+typedef struct _GstShagadelicTVClass GstShagadelicTVClass;
+
+struct _GstShagadelicTV
+{
+ GstVideoFilter videofilter;
+
+ /* < private > */
+ guint8 *ripple;
+ guint8 *spiral;
+ guint8 phase;
+ gint rx, ry;
+ gint bx, by;
+ gint rvx, rvy;
+ gint bvx, bvy;
+};
+
+struct _GstShagadelicTVClass
+{
+ GstVideoFilterClass parent_class;
+};
+
+GType gst_shagadelictv_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_SHAGADELIC_H__ */
diff --git a/gst/effectv/gststreak.c b/gst/effectv/gststreak.c
new file mode 100644
index 0000000000..e34eb20632
--- /dev/null
+++ b/gst/effectv/gststreak.c
@@ -0,0 +1,267 @@
+/* GStreamer
+ * Copyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * EffecTV - Realtime Digital Video Effector
+ * Copyright (C) 2001-2006 FUKUCHI Kentaro
+ *
+ * StreakTV - afterimage effector.
+ * Copyright (C) 2001-2002 FUKUCHI Kentaro
+ *
+ * This combines the StreakTV and BaltanTV effects, which are
+ * very similar. BaltanTV is used if the feedback property is set
+ * to TRUE, otherwise StreakTV is used.
+ *
+ * EffecTV is free software. This library is free software;
+ * you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-streaktv
+ * @title: streaktv
+ *
+ * StreakTV makes after images of moving objects.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v videotestsrc ! streaktv ! videoconvert ! autovideosink
+ * ]| This pipeline shows the effect of streaktv on a test stream.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <math.h>
+#include <string.h>
+
+#include "gststreak.h"
+#include "gsteffectv.h"
+
+#define DEFAULT_FEEDBACK FALSE
+
+enum
+{
+ PROP_0,
+ PROP_FEEDBACK
+};
+
+#define gst_streaktv_parent_class parent_class
+G_DEFINE_TYPE (GstStreakTV, gst_streaktv, GST_TYPE_VIDEO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (streaktv, "streaktv", GST_RANK_NONE,
+ GST_TYPE_STREAKTV);
+
+static GstStaticPadTemplate gst_streaktv_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ BGRx, RGBx, xBGR, xRGB }"))
+ );
+
+static GstStaticPadTemplate gst_streaktv_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ BGRx, RGBx, xBGR, xRGB }"))
+ );
+
+
+static GstFlowReturn
+gst_streaktv_transform_frame (GstVideoFilter * vfilter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame)
+{
+ GstStreakTV *filter = GST_STREAKTV (vfilter);
+ guint32 *src, *dest;
+ gint i, cf;
+ gint video_area, width, height;
+ guint32 **planetable = filter->planetable;
+ gint plane = filter->plane;
+ guint stride_mask, stride_shift, stride;
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ video_area = width * height;
+
+ GST_OBJECT_LOCK (filter);
+ if (filter->feedback) {
+ stride_mask = 0xfcfcfcfc;
+ stride = 8;
+ stride_shift = 2;
+ } else {
+ stride_mask = 0xf8f8f8f8;
+ stride = 4;
+ stride_shift = 3;
+ }
+
+ for (i = 0; i < video_area; i++) {
+ planetable[plane][i] = (src[i] & stride_mask) >> stride_shift;
+ }
+
+ cf = plane & (stride - 1);
+ if (filter->feedback) {
+ for (i = 0; i < video_area; i++) {
+ dest[i] = planetable[cf][i]
+ + planetable[cf + stride][i]
+ + planetable[cf + stride * 2][i]
+ + planetable[cf + stride * 3][i];
+ planetable[plane][i] = (dest[i] & stride_mask) >> stride_shift;
+ }
+ } else {
+ for (i = 0; i < video_area; i++) {
+ dest[i] = planetable[cf][i]
+ + planetable[cf + stride][i]
+ + planetable[cf + stride * 2][i]
+ + planetable[cf + stride * 3][i]
+ + planetable[cf + stride * 4][i]
+ + planetable[cf + stride * 5][i]
+ + planetable[cf + stride * 6][i]
+ + planetable[cf + stride * 7][i];
+ }
+ }
+
+ plane++;
+ filter->plane = plane & (PLANES - 1);
+ GST_OBJECT_UNLOCK (filter);
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_streaktv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
+{
+ GstStreakTV *filter = GST_STREAKTV (vfilter);
+ gint i, width, height;
+
+ width = GST_VIDEO_INFO_WIDTH (in_info);
+ height = GST_VIDEO_INFO_HEIGHT (in_info);
+
+ g_free (filter->planebuffer);
+
+ filter->planebuffer = g_new0 (guint32, width * height * 4 * PLANES);
+
+ for (i = 0; i < PLANES; i++)
+ filter->planetable[i] = &filter->planebuffer[width * height * i];
+
+ return TRUE;
+}
+
+static gboolean
+gst_streaktv_start (GstBaseTransform * trans)
+{
+ GstStreakTV *filter = GST_STREAKTV (trans);
+
+ filter->plane = 0;
+
+ return TRUE;
+}
+
+static void
+gst_streaktv_finalize (GObject * object)
+{
+ GstStreakTV *filter = GST_STREAKTV (object);
+
+ if (filter->planebuffer) {
+ g_free (filter->planebuffer);
+ filter->planebuffer = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_streaktv_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstStreakTV *filter = GST_STREAKTV (object);
+
+ switch (prop_id) {
+ case PROP_FEEDBACK:
+ if (G_UNLIKELY (GST_STATE (filter) >= GST_STATE_PAUSED)) {
+ g_warning ("Changing the \"feedback\" property only allowed "
+ "in state < PLAYING");
+ return;
+ }
+
+ filter->feedback = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_streaktv_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstStreakTV *filter = GST_STREAKTV (object);
+
+ switch (prop_id) {
+ case PROP_FEEDBACK:
+ g_value_set_boolean (value, filter->feedback);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_streaktv_class_init (GstStreakTVClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
+
+ gobject_class->set_property = gst_streaktv_set_property;
+ gobject_class->get_property = gst_streaktv_get_property;
+
+ gobject_class->finalize = gst_streaktv_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_FEEDBACK,
+ g_param_spec_boolean ("feedback", "Feedback",
+ "Feedback", DEFAULT_FEEDBACK,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (gstelement_class, "StreakTV effect",
+ "Filter/Effect/Video",
+ "StreakTV makes after images of moving objects",
+ "FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_streaktv_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_streaktv_src_template);
+
+ trans_class->start = GST_DEBUG_FUNCPTR (gst_streaktv_start);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_streaktv_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_streaktv_transform_frame);
+}
+
+static void
+gst_streaktv_init (GstStreakTV * filter)
+{
+ filter->feedback = DEFAULT_FEEDBACK;
+}
diff --git a/gst/effectv/gststreak.h b/gst/effectv/gststreak.h
new file mode 100644
index 0000000000..5d9362de0e
--- /dev/null
+++ b/gst/effectv/gststreak.h
@@ -0,0 +1,74 @@
+/* GStreamer
+ * Copyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * EffecTV - Realtime Digital Video Effector
+ * Copyright (C) 2001-2006 FUKUCHI Kentaro
+ *
+ * StreakTV - afterimage effector.
+ * Copyright (C) 2001-2002 FUKUCHI Kentaro
+ *
+ * EffecTV is free software. This library is free software;
+ * you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_STREAK_H__
+#define __GST_STREAK_H__
+
+#include <gst/gst.h>
+
+#include <gst/video/video.h>
+#include <gst/video/gstvideofilter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_STREAKTV \
+ (gst_streaktv_get_type())
+#define GST_STREAKTV(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_STREAKTV,GstStreakTV))
+#define GST_STREAKTV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_STREAKTV,GstStreakTVClass))
+#define GST_IS_STREAKTV(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_STREAKTV))
+#define GST_IS_STREAKTV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_STREAKTV))
+
+typedef struct _GstStreakTV GstStreakTV;
+typedef struct _GstStreakTVClass GstStreakTVClass;
+
+#define PLANES 32
+
+struct _GstStreakTV
+{
+ GstVideoFilter element;
+
+ /* < private > */
+ gboolean feedback;
+
+ guint32 *planebuffer;
+ guint32 *planetable[PLANES];
+ gint plane;
+};
+
+struct _GstStreakTVClass
+{
+ GstVideoFilterClass parent_class;
+};
+
+GType gst_streaktv_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_STREAK_H__ */
diff --git a/gst/effectv/gstvertigo.c b/gst/effectv/gstvertigo.c
new file mode 100644
index 0000000000..2be7fd58e2
--- /dev/null
+++ b/gst/effectv/gstvertigo.c
@@ -0,0 +1,330 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * EffecTV:
+ * Copyright (C) 2001 FUKUCHI Kentarou
+ *
+ * EffecTV is free software. This library is free software;
+ * you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-vertigotv
+ * @title: vertigotv
+ *
+ * VertigoTV is a loopback alpha blending effector with rotating and scaling.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v videotestsrc ! vertigotv ! videoconvert ! autovideosink
+ * ]| This pipeline shows the effect of vertigotv on a test stream.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <math.h>
+#include <string.h>
+
+#include "gsteffectv.h"
+#include "gstvertigo.h"
+
+#define gst_vertigotv_parent_class parent_class
+G_DEFINE_TYPE (GstVertigoTV, gst_vertigotv, GST_TYPE_VIDEO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (vertigotv, "vertigotv", GST_RANK_NONE,
+ GST_TYPE_VERTIGOTV);
+
+/* Filter signals and args */
+enum
+{
+ PROP_0,
+ PROP_SPEED,
+ PROP_ZOOM_SPEED
+};
+
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ RGBx, BGRx }")
+#else
+#define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ xRGB, xBGR }")
+#endif
+
+static GstStaticPadTemplate gst_vertigotv_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (CAPS_STR)
+ );
+
+static GstStaticPadTemplate gst_vertigotv_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (CAPS_STR)
+ );
+
+static gboolean
+gst_vertigotv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
+{
+ GstVertigoTV *filter = GST_VERTIGOTV (vfilter);
+ gint area, width, height;
+
+ width = GST_VIDEO_INFO_WIDTH (in_info);
+ height = GST_VIDEO_INFO_HEIGHT (in_info);
+
+ area = width * height;
+
+ g_free (filter->buffer);
+ filter->buffer = (guint32 *) g_malloc0 (area * 2 * sizeof (guint32));
+
+ filter->current_buffer = filter->buffer;
+ filter->alt_buffer = filter->buffer + area;
+ filter->phase = 0;
+
+ return TRUE;
+}
+
+static void
+gst_vertigotv_set_parms (GstVertigoTV * filter)
+{
+ double vx, vy;
+ double t;
+ double x, y;
+ double dizz;
+ gint width, height;
+ GstVideoInfo *info;
+
+ dizz = sin (filter->phase) * 10 + sin (filter->phase * 1.9 + 5) * 5;
+
+ info = &GST_VIDEO_FILTER (filter)->in_info;
+
+ width = GST_VIDEO_INFO_WIDTH (info);
+ height = GST_VIDEO_INFO_HEIGHT (info);
+
+ x = width / 2;
+ y = height / 2;
+
+ t = (x * x + y * y) * filter->zoomrate;
+
+ if (width > height) {
+ if (dizz >= 0) {
+ if (dizz > x)
+ dizz = x;
+ vx = (x * (x - dizz) + y * y) / t;
+ } else {
+ if (dizz < -x)
+ dizz = -x;
+ vx = (x * (x + dizz) + y * y) / t;
+ }
+ vy = (dizz * y) / t;
+ } else {
+ if (dizz >= 0) {
+ if (dizz > y)
+ dizz = y;
+ vx = (x * x + y * (y - dizz)) / t;
+ } else {
+ if (dizz < -y)
+ dizz = -y;
+ vx = (x * x + y * (y + dizz)) / t;
+ }
+ vy = (dizz * x) / t;
+ }
+ filter->dx = vx * 65536;
+ filter->dy = vy * 65536;
+ filter->sx = (-vx * x + vy * y + x + cos (filter->phase * 5) * 2) * 65536;
+ filter->sy = (-vx * y - vy * x + y + sin (filter->phase * 6) * 2) * 65536;
+
+ filter->phase += filter->phase_increment;
+ if (filter->phase > 5700000)
+ filter->phase = 0;
+}
+
+static GstFlowReturn
+gst_vertigotv_transform_frame (GstVideoFilter * vfilter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame)
+{
+ GstVertigoTV *filter = GST_VERTIGOTV (vfilter);
+ guint32 *src, *dest, *p;
+ guint32 v;
+ gint x, y, ox, oy, i, width, height, area, sstride, dstride;
+ GstClockTime timestamp, stream_time;
+
+ timestamp = GST_BUFFER_TIMESTAMP (in_frame->buffer);
+ stream_time =
+ gst_segment_to_stream_time (&GST_BASE_TRANSFORM (filter)->segment,
+ GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (filter, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (filter), stream_time);
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ sstride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+ dstride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ area = width * height;
+
+ sstride /= 4;
+ dstride /= 4;
+
+ gst_vertigotv_set_parms (filter);
+ p = filter->alt_buffer;
+
+ for (y = 0; y < height; y++) {
+ ox = filter->sx;
+ oy = filter->sy;
+
+ for (x = 0; x < width; x++) {
+ i = (oy >> 16) * width + (ox >> 16);
+ if (i < 0)
+ i = 0;
+ if (i >= area)
+ i = area;
+
+ v = filter->current_buffer[i] & 0xfcfcff;
+ v = (v * 3) + (src[x] & 0xfcfcff);
+
+ *p++ = dest[x] = (v >> 2);
+ ox += filter->dx;
+ oy += filter->dy;
+ }
+ filter->sx -= filter->dy;
+ filter->sy += filter->dx;
+
+ src += sstride;
+ dest += dstride;
+ }
+
+ p = filter->current_buffer;
+ filter->current_buffer = filter->alt_buffer;
+ filter->alt_buffer = p;
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_vertigotv_start (GstBaseTransform * trans)
+{
+ GstVertigoTV *filter = GST_VERTIGOTV (trans);
+
+ filter->phase = 0.0;
+
+ return TRUE;
+}
+
+static void
+gst_vertigotv_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstVertigoTV *filter = GST_VERTIGOTV (object);
+
+ GST_OBJECT_LOCK (filter);
+ switch (prop_id) {
+ case PROP_SPEED:
+ filter->phase_increment = g_value_get_float (value);
+ break;
+ case PROP_ZOOM_SPEED:
+ filter->zoomrate = g_value_get_float (value);
+ break;
+ default:
+ break;
+ }
+ GST_OBJECT_UNLOCK (filter);
+}
+
+static void
+gst_vertigotv_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstVertigoTV *filter = GST_VERTIGOTV (object);
+
+ switch (prop_id) {
+ case PROP_SPEED:
+ g_value_set_float (value, filter->phase_increment);
+ break;
+ case PROP_ZOOM_SPEED:
+ g_value_set_float (value, filter->zoomrate);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_vertigotv_finalize (GObject * object)
+{
+ GstVertigoTV *filter = GST_VERTIGOTV (object);
+
+ g_free (filter->buffer);
+ filter->buffer = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_vertigotv_class_init (GstVertigoTVClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
+
+ gobject_class->set_property = gst_vertigotv_set_property;
+ gobject_class->get_property = gst_vertigotv_get_property;
+ gobject_class->finalize = gst_vertigotv_finalize;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_SPEED,
+ g_param_spec_float ("speed", "Speed", "Control the speed of movement",
+ 0.01, 100.0, 0.02, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_ZOOM_SPEED,
+ g_param_spec_float ("zoom-speed", "Zoom Speed",
+ "Control the rate of zooming", 1.01, 1.1, 1.01,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (gstelement_class, "VertigoTV effect",
+ "Filter/Effect/Video",
+ "A loopback alpha blending effector with rotating and scaling",
+ "Wim Taymans <wim.taymans@gmail.be>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_vertigotv_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_vertigotv_src_template);
+
+ trans_class->start = GST_DEBUG_FUNCPTR (gst_vertigotv_start);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_vertigotv_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_vertigotv_transform_frame);
+}
+
+static void
+gst_vertigotv_init (GstVertigoTV * filter)
+{
+ filter->buffer = NULL;
+ filter->phase = 0.0;
+ filter->phase_increment = 0.02;
+ filter->zoomrate = 1.01;
+}
diff --git a/gst/effectv/gstvertigo.h b/gst/effectv/gstvertigo.h
new file mode 100644
index 0000000000..39c225b355
--- /dev/null
+++ b/gst/effectv/gstvertigo.h
@@ -0,0 +1,71 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * EffecTV:
+ * Copyright (C) 2001 FUKUCHI Kentarou
+ *
+ * EffecTV is free software. This library is free software;
+ * you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VERTIGO_H__
+#define __GST_VERTIGO_H__
+
+#include <gst/gst.h>
+
+#include <gst/video/video.h>
+#include <gst/video/gstvideofilter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_VERTIGOTV \
+ (gst_vertigotv_get_type())
+#define GST_VERTIGOTV(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VERTIGOTV,GstVertigoTV))
+#define GST_VERTIGOTV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VERTIGOTV,GstVertigoTVClass))
+#define GST_IS_VERTIGOTV(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VERTIGOTV))
+#define GST_IS_VERTIGOTV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VERTIGOTV))
+
+typedef struct _GstVertigoTV GstVertigoTV;
+typedef struct _GstVertigoTVClass GstVertigoTVClass;
+
+struct _GstVertigoTV
+{
+ GstVideoFilter videofilter;
+
+ /* < private > */
+ guint32 *buffer;
+ guint32 *current_buffer, *alt_buffer;
+ gint dx, dy;
+ gint sx, sy;
+ gdouble phase;
+ gdouble phase_increment;
+ gdouble zoomrate;
+};
+
+struct _GstVertigoTVClass
+{
+ GstVideoFilterClass parent_class;
+};
+
+GType gst_vertigotv_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_VERTIGO_H__ */
diff --git a/gst/effectv/gstwarp.c b/gst/effectv/gstwarp.c
new file mode 100644
index 0000000000..2915b4270d
--- /dev/null
+++ b/gst/effectv/gstwarp.c
@@ -0,0 +1,268 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David Schleef <ds@schleef.org>
+ *
+ * EffecTV - Realtime Digital Video Effector
+ * Copyright (C) 2001 FUKUCHI Kentarou
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * This file was (probably) generated from gstvideotemplate.c,
+ * gstvideotemplate.c,v 1.11 2004/01/07 08:56:45 ds Exp
+ */
+
+/* From main.c of warp-1.1:
+ *
+ * Simple DirectMedia Layer demo
+ * Realtime picture 'gooing'
+ * by sam lantinga slouken@devolution.com
+ */
+
+/**
+ * SECTION:element-warptv
+ * @title: warptv
+ *
+ * WarpTV does realtime goo'ing of the video input.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v videotestsrc ! warptv ! videoconvert ! autovideosink
+ * ]| This pipeline shows the effect of warptv on a test stream.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gsteffectv.h"
+#include <string.h>
+#include <math.h>
+
+#include "gstwarp.h"
+#include <gst/video/gstvideometa.h>
+#include <gst/video/gstvideopool.h>
+
+#ifndef M_PI
+#define M_PI 3.14159265358979323846
+#endif
+
+#define gst_warptv_parent_class parent_class
+G_DEFINE_TYPE (GstWarpTV, gst_warptv, GST_TYPE_VIDEO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (warptv, "warptv", GST_RANK_NONE, GST_TYPE_WARPTV);
+
+static void initSinTable ();
+static void initDistTable (GstWarpTV * filter, gint width, gint height);
+
+static GstStaticPadTemplate gst_warptv_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGBx, xRGB, BGRx, xBGR }"))
+ );
+
+static GstStaticPadTemplate gst_warptv_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ RGBx, xRGB, BGRx, xBGR }"))
+ );
+
+static gboolean
+gst_warptv_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
+{
+ GstWarpTV *filter = GST_WARPTV (vfilter);
+ gint width, height;
+
+ width = GST_VIDEO_INFO_WIDTH (in_info);
+ height = GST_VIDEO_INFO_HEIGHT (in_info);
+
+ g_free (filter->disttable);
+ filter->disttable = g_malloc (width * height * sizeof (guint32));
+ initDistTable (filter, width, height);
+
+ return TRUE;
+}
+
+static gint32 sintable[1024 + 256];
+
+static void
+initSinTable (void)
+{
+ gint32 *tptr, *tsinptr;
+ gint i;
+
+ tsinptr = tptr = sintable;
+
+ for (i = 0; i < 1024; i++)
+ *tptr++ = (int) (sin (i * M_PI / 512) * 32767);
+
+ for (i = 0; i < 256; i++)
+ *tptr++ = *tsinptr++;
+}
+
+static void
+initDistTable (GstWarpTV * filter, gint width, gint height)
+{
+ gint32 halfw, halfh, *distptr;
+ gint x, y;
+ float m;
+
+ halfw = width >> 1;
+ halfh = height >> 1;
+
+ distptr = filter->disttable;
+
+ m = sqrt ((double) (halfw * halfw + halfh * halfh));
+
+ for (y = -halfh; y < halfh; y++)
+ for (x = -halfw; x < halfw; x++)
+#ifdef PS2
+ *distptr++ = ((int) ((sqrtf (x * x + y * y) * 511.9999) / m)) << 1;
+#else
+ *distptr++ = ((int) ((sqrt (x * x + y * y) * 511.9999) / m)) << 1;
+#endif
+}
+
+static GstFlowReturn
+gst_warptv_transform_frame (GstVideoFilter * filter, GstVideoFrame * in_frame,
+ GstVideoFrame * out_frame)
+{
+ GstWarpTV *warptv = GST_WARPTV (filter);
+ gint width, height;
+ gint xw, yw, cw;
+ gint32 c, i, x, y, dx, dy, maxx, maxy;
+ gint32 *ctptr, *distptr;
+ gint32 *ctable;
+ guint32 *src, *dest;
+ gint sstride, dstride;
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ sstride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
+ dstride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);
+
+ width = GST_VIDEO_FRAME_WIDTH (in_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (in_frame);
+
+ GST_OBJECT_LOCK (warptv);
+ xw = (gint) (sin ((warptv->tval + 100) * M_PI / 128) * 30);
+ yw = (gint) (sin ((warptv->tval) * M_PI / 256) * -35);
+ cw = (gint) (sin ((warptv->tval - 70) * M_PI / 64) * 50);
+ xw += (gint) (sin ((warptv->tval - 10) * M_PI / 512) * 40);
+ yw += (gint) (sin ((warptv->tval + 30) * M_PI / 512) * 40);
+
+ ctptr = warptv->ctable;
+ distptr = warptv->disttable;
+ ctable = warptv->ctable;
+
+ c = 0;
+
+ for (x = 0; x < 512; x++) {
+ i = (c >> 3) & 0x3FE;
+ *ctptr++ = ((sintable[i] * yw) >> 15);
+ *ctptr++ = ((sintable[i + 256] * xw) >> 15);
+ c += cw;
+ }
+ maxx = width - 2;
+ maxy = height - 2;
+
+ for (y = 0; y < height - 1; y++) {
+ for (x = 0; x < width; x++) {
+ i = *distptr++;
+ dx = ctable[i + 1] + x;
+ dy = ctable[i] + y;
+
+ if (dx < 0)
+ dx = 0;
+ else if (dx > maxx)
+ dx = maxx;
+
+ if (dy < 0)
+ dy = 0;
+ else if (dy > maxy)
+ dy = maxy;
+
+ dest[x] = src[dy * sstride / 4 + dx];
+ }
+ dest += dstride / 4;
+ }
+
+ warptv->tval = (warptv->tval + 1) & 511;
+ GST_OBJECT_UNLOCK (warptv);
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_warptv_start (GstBaseTransform * trans)
+{
+ GstWarpTV *warptv = GST_WARPTV (trans);
+
+ warptv->tval = 0;
+
+ return TRUE;
+}
+
+static void
+gst_warptv_finalize (GObject * object)
+{
+ GstWarpTV *warptv = GST_WARPTV (object);
+
+ g_free (warptv->disttable);
+ warptv->disttable = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_warptv_class_init (GstWarpTVClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
+
+ gobject_class->finalize = gst_warptv_finalize;
+
+ gst_element_class_set_static_metadata (gstelement_class, "WarpTV effect",
+ "Filter/Effect/Video",
+ "WarpTV does realtime goo'ing of the video input",
+ "Sam Lantinga <slouken@devolution.com>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_warptv_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_warptv_src_template);
+
+ trans_class->start = GST_DEBUG_FUNCPTR (gst_warptv_start);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_warptv_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_warptv_transform_frame);
+
+ initSinTable ();
+}
+
+static void
+gst_warptv_init (GstWarpTV * warptv)
+{
+ /* nothing to do */
+}
diff --git a/gst/effectv/gstwarp.h b/gst/effectv/gstwarp.h
new file mode 100644
index 0000000000..73eeebc0de
--- /dev/null
+++ b/gst/effectv/gstwarp.h
@@ -0,0 +1,67 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David Schleef <ds@schleef.org>
+ *
+ * EffecTV - Realtime Digital Video Effector
+ * Copyright (C) 2001 FUKUCHI Kentarou
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_WARP_H__
+#define __GST_WARP_H__
+
+#include <gst/gst.h>
+
+#include <gst/video/video.h>
+#include <gst/video/gstvideofilter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_WARPTV \
+ (gst_warptv_get_type())
+#define GST_WARPTV(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_WARPTV,GstWarpTV))
+#define GST_WARPTV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_WARPTV,GstWarpTVClass))
+#define GST_IS_WARPTV(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_WARPTV))
+#define GST_IS_WARPTV_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_WARPTV))
+
+typedef struct _GstWarpTV GstWarpTV;
+typedef struct _GstWarpTVClass GstWarpTVClass;
+
+struct _GstWarpTV
+{
+ GstVideoFilter videofilter;
+
+ /* < private > */
+ gint32 *disttable;
+ gint32 ctable[1024];
+ gint tval;
+};
+
+struct _GstWarpTVClass
+{
+ GstVideoFilterClass parent_class;
+};
+
+GType gst_warptv_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_WARP_H__ */
diff --git a/gst/effectv/meson.build b/gst/effectv/meson.build
new file mode 100644
index 0000000000..17610e222a
--- /dev/null
+++ b/gst/effectv/meson.build
@@ -0,0 +1,16 @@
+effect_sources = [
+ 'gsteffectv.c', 'gstedge.c', 'gstaging.c', 'gstdice.c', 'gstwarp.c',
+ 'gstshagadelic.c', 'gstvertigo.c', 'gstrev.c', 'gstquark.c', 'gstop.c',
+ 'gstradioac.c', 'gststreak.c', 'gstripple.c'
+]
+
+gsteffectv = library('gsteffectv',
+ effect_sources,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gst_dep, gstbase_dep, gstvideo_dep, libm],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gsteffectv, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gsteffectv]
diff --git a/gst/equalizer/GstIirEqualizer10Bands.prs b/gst/equalizer/GstIirEqualizer10Bands.prs
new file mode 100644
index 0000000000..9b261b8499
--- /dev/null
+++ b/gst/equalizer/GstIirEqualizer10Bands.prs
@@ -0,0 +1,174 @@
+[_presets_]
+version=0.10
+element-name=GstIirEqualizer10Bands
+
+[ballad]
+band0=4
+band1=3.75
+band2=2.5
+band3=0
+band4=-4
+band5=-6
+band6=-3
+band7=0
+band8=2.5
+band9=9
+
+[classic]
+band0=0
+band1=0
+band2=0
+band3=0
+band4=0
+band5=0
+band6=-6
+band7=-7
+band8=-7
+band9=-9.5
+
+[club]
+band0=0
+band1=0
+band2=8
+band3=6
+band4=5.5
+band5=5
+band6=3
+band7=0
+band8=0
+band9=0
+
+[dance]
+band0=9.6
+band1=7
+band2=2.5
+band3=0
+band4=0
+band5=-5.6
+band6=-7
+band7=-7
+band8=0
+band9=0
+
+[pop]
+band0=-1.6
+band1=4.5
+band2=7
+band3=8
+band4=5.6
+band5=0
+band6=-2.5
+band7=-2
+band8=-1.6
+band9=-1.5
+
+[reggae]
+band0=0
+band1=0
+band2=0
+band3=-5.5
+band4=0
+band5=6.5
+band6=6.5
+band7=0
+band8=0
+band9=0
+
+[rock]
+band0=8
+band1=5
+band2=-5.5
+band3=-8
+band4=-3
+band5=4
+band6=8
+band7=11
+band8=11
+band9=11.5
+
+[ska]
+band0=-2.5
+band1=-5
+band2=-4
+band3=0
+band4=4
+band5=5.5
+band6=8
+band7=9
+band8=11
+band9=9
+
+[soft]
+band0=5
+band1=1.5
+band2=0
+band3=-2.5
+band4=0
+band5=4
+band6=8
+band7=9
+band8=11
+band9=12
+
+[techno]
+band0=8
+band1=5.5
+band2=0
+band3=-5.5
+band4=-5
+band5=0
+band6=8
+band7=10
+band8=10
+band9=9
+
+
+[party]
+band0=7
+band1=7
+band2=0
+band3=0
+band4=0
+band5=0
+band6=0
+band7=0
+band8=7
+band9=7
+
+
+[more bass]
+band0=-8
+band1=10
+band2=10
+band3=5.5
+band4=1.5
+band5=-4
+band6=-8
+band7=-10
+band8=-11
+band9=-11
+
+[more bass and treble]
+band0=8
+band1=5.5
+band2=0
+band3=-7
+band4=-5
+band5=1.5
+band6=8
+band7=11.2
+band8=12
+band9=12
+
+[more treble]
+band0=-10
+band1=-10
+band2=-10
+band3=-4
+band4=2.5
+band5=11
+band6=16
+band7=16
+band8=16
+band9=18
+
diff --git a/gst/equalizer/GstIirEqualizer3Bands.prs b/gst/equalizer/GstIirEqualizer3Bands.prs
new file mode 100644
index 0000000000..fb8ef5a403
--- /dev/null
+++ b/gst/equalizer/GstIirEqualizer3Bands.prs
@@ -0,0 +1,14 @@
+[_presets_]
+version=0.10
+element-name=GstIirEqualizer3Bands
+
+[more bass]
+band0=7.75
+band1=0
+band2=0
+
+[more trebble]
+band0=0
+band1=0
+band2=6.0
+
diff --git a/gst/equalizer/gstiirequalizer.c b/gst/equalizer/gstiirequalizer.c
new file mode 100644
index 0000000000..5ebd9d26de
--- /dev/null
+++ b/gst/equalizer/gstiirequalizer.c
@@ -0,0 +1,904 @@
+/* GStreamer
+ * Copyright (C) <2004> Benjamin Otte <otte@gnome.org>
+ * <2007> Stefan Kost <ensonic@users.sf.net>
+ * <2007> Sebastian Dröge <slomo@circular-chaos.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <math.h>
+#include <stdio.h>
+#include <string.h>
+
+#include "gstiirequalizer.h"
+#include "gstiirequalizernbands.h"
+#include "gstiirequalizer3bands.h"
+#include "gstiirequalizer10bands.h"
+
+#include "gst/glib-compat-private.h"
+
+GST_DEBUG_CATEGORY (equalizer_debug);
+#define GST_CAT_DEFAULT equalizer_debug
+
+#define BANDS_LOCK(equ) g_mutex_lock(&equ->bands_lock)
+#define BANDS_UNLOCK(equ) g_mutex_unlock(&equ->bands_lock)
+
+static void gst_iir_equalizer_child_proxy_interface_init (gpointer g_iface,
+ gpointer iface_data);
+
+static void gst_iir_equalizer_finalize (GObject * object);
+
+static gboolean gst_iir_equalizer_setup (GstAudioFilter * filter,
+ const GstAudioInfo * info);
+static GstFlowReturn gst_iir_equalizer_transform_ip (GstBaseTransform * btrans,
+ GstBuffer * buf);
+static void set_passthrough (GstIirEqualizer * equ);
+
+#define ALLOWED_CAPS \
+ "audio/x-raw," \
+ " format=(string) {"GST_AUDIO_NE(S16)","GST_AUDIO_NE(F32)"," \
+ GST_AUDIO_NE(F64)" }, " \
+ " rate=(int)[1000,MAX]," \
+ " channels=(int)[1,MAX]," \
+ " layout=(string)interleaved"
+
+#define gst_iir_equalizer_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstIirEqualizer, gst_iir_equalizer,
+ GST_TYPE_AUDIO_FILTER,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_CHILD_PROXY,
+ gst_iir_equalizer_child_proxy_interface_init)
+ G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL));
+
+
+/* child object */
+
+enum
+{
+ PROP_GAIN = 1,
+ PROP_FREQ,
+ PROP_BANDWIDTH,
+ PROP_TYPE
+};
+
+typedef enum
+{
+ BAND_TYPE_PEAK = 0,
+ BAND_TYPE_LOW_SHELF,
+ BAND_TYPE_HIGH_SHELF
+} GstIirEqualizerBandType;
+
+#define GST_TYPE_IIR_EQUALIZER_BAND_TYPE (gst_iir_equalizer_band_type_get_type ())
+static GType
+gst_iir_equalizer_band_type_get_type (void)
+{
+ static GType gtype = 0;
+
+ if (gtype == 0) {
+ static const GEnumValue values[] = {
+ {BAND_TYPE_PEAK, "Peak filter (default for inner bands)", "peak"},
+ {BAND_TYPE_LOW_SHELF, "Low shelf filter (default for first band)",
+ "low-shelf"},
+ {BAND_TYPE_HIGH_SHELF, "High shelf filter (default for last band)",
+ "high-shelf"},
+ {0, NULL, NULL}
+ };
+
+ gtype = g_enum_register_static ("GstIirEqualizerBandType", values);
+ }
+ return gtype;
+}
+
+
+typedef struct _GstIirEqualizerBandClass GstIirEqualizerBandClass;
+
+#define GST_TYPE_IIR_EQUALIZER_BAND \
+ (gst_iir_equalizer_band_get_type())
+#define GST_IIR_EQUALIZER_BAND(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_IIR_EQUALIZER_BAND,GstIirEqualizerBand))
+#define GST_IIR_EQUALIZER_BAND_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_IIR_EQUALIZER_BAND,GstIirEqualizerBandClass))
+#define GST_IS_IIR_EQUALIZER_BAND(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_IIR_EQUALIZER_BAND))
+#define GST_IS_IIR_EQUALIZER_BAND_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_IIR_EQUALIZER_BAND))
+
+struct _GstIirEqualizerBand
+{
+ GstObject object;
+
+ /*< private > */
+ /* center frequency and gain */
+ gdouble freq;
+ gdouble gain;
+ gdouble width;
+ GstIirEqualizerBandType type;
+
+ /* second order iir filter */
+ gdouble b1, b2; /* IIR coefficients for outputs */
+ gdouble a0, a1, a2; /* IIR coefficients for inputs */
+};
+
+struct _GstIirEqualizerBandClass
+{
+ GstObjectClass parent_class;
+};
+
+static GType gst_iir_equalizer_band_get_type (void);
+
+static void
+gst_iir_equalizer_band_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstIirEqualizerBand *band = GST_IIR_EQUALIZER_BAND (object);
+ GstIirEqualizer *equ =
+ GST_IIR_EQUALIZER (gst_object_get_parent (GST_OBJECT (band)));
+
+ switch (prop_id) {
+ case PROP_GAIN:{
+ gdouble gain;
+
+ gain = g_value_get_double (value);
+ GST_DEBUG_OBJECT (band, "gain = %lf -> %lf", band->gain, gain);
+ if (gain != band->gain) {
+ BANDS_LOCK (equ);
+ equ->need_new_coefficients = TRUE;
+ band->gain = gain;
+ set_passthrough (equ);
+ BANDS_UNLOCK (equ);
+ GST_DEBUG_OBJECT (band, "changed gain = %lf ", band->gain);
+ }
+ break;
+ }
+ case PROP_FREQ:{
+ gdouble freq;
+
+ freq = g_value_get_double (value);
+ GST_DEBUG_OBJECT (band, "freq = %lf -> %lf", band->freq, freq);
+ if (freq != band->freq) {
+ BANDS_LOCK (equ);
+ equ->need_new_coefficients = TRUE;
+ band->freq = freq;
+ BANDS_UNLOCK (equ);
+ GST_DEBUG_OBJECT (band, "changed freq = %lf ", band->freq);
+ }
+ break;
+ }
+ case PROP_BANDWIDTH:{
+ gdouble width;
+
+ width = g_value_get_double (value);
+ GST_DEBUG_OBJECT (band, "width = %lf -> %lf", band->width, width);
+ if (width != band->width) {
+ BANDS_LOCK (equ);
+ equ->need_new_coefficients = TRUE;
+ band->width = width;
+ BANDS_UNLOCK (equ);
+ GST_DEBUG_OBJECT (band, "changed width = %lf ", band->width);
+ }
+ break;
+ }
+ case PROP_TYPE:{
+ GstIirEqualizerBandType type;
+
+ type = g_value_get_enum (value);
+ GST_DEBUG_OBJECT (band, "type = %d -> %d", band->type, type);
+ if (type != band->type) {
+ BANDS_LOCK (equ);
+ equ->need_new_coefficients = TRUE;
+ band->type = type;
+ BANDS_UNLOCK (equ);
+ GST_DEBUG_OBJECT (band, "changed type = %d ", band->type);
+ }
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+
+ gst_object_unref (equ);
+}
+
+static void
+gst_iir_equalizer_band_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstIirEqualizerBand *band = GST_IIR_EQUALIZER_BAND (object);
+
+ switch (prop_id) {
+ case PROP_GAIN:
+ g_value_set_double (value, band->gain);
+ break;
+ case PROP_FREQ:
+ g_value_set_double (value, band->freq);
+ break;
+ case PROP_BANDWIDTH:
+ g_value_set_double (value, band->width);
+ break;
+ case PROP_TYPE:
+ g_value_set_enum (value, band->type);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_iir_equalizer_band_class_init (GstIirEqualizerBandClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+
+ gobject_class->set_property = gst_iir_equalizer_band_set_property;
+ gobject_class->get_property = gst_iir_equalizer_band_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_GAIN,
+ g_param_spec_double ("gain", "gain",
+ "gain for the frequency band ranging from -24.0 dB to +12.0 dB",
+ -24.0, 12.0, 0.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ g_object_class_install_property (gobject_class, PROP_FREQ,
+ g_param_spec_double ("freq", "freq",
+ "center frequency of the band",
+ 0.0, 100000.0, 0.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ g_object_class_install_property (gobject_class, PROP_BANDWIDTH,
+ g_param_spec_double ("bandwidth", "bandwidth",
+ "difference between bandedges in Hz",
+ 0.0, 100000.0, 1.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ g_object_class_install_property (gobject_class, PROP_TYPE,
+ g_param_spec_enum ("type", "Type",
+ "Filter type", GST_TYPE_IIR_EQUALIZER_BAND_TYPE,
+ BAND_TYPE_PEAK,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ gst_type_mark_as_plugin_api (GST_TYPE_IIR_EQUALIZER, 0);
+}
+
+static void
+gst_iir_equalizer_band_init (GstIirEqualizerBand * band,
+ GstIirEqualizerBandClass * klass)
+{
+ band->freq = 0.0;
+ band->gain = 0.0;
+ band->width = 1.0;
+ band->type = BAND_TYPE_PEAK;
+}
+
+static GType
+gst_iir_equalizer_band_get_type (void)
+{
+ static GType type = 0;
+
+ if (G_UNLIKELY (!type)) {
+ const GTypeInfo type_info = {
+ sizeof (GstIirEqualizerBandClass),
+ NULL,
+ NULL,
+ (GClassInitFunc) gst_iir_equalizer_band_class_init,
+ NULL,
+ NULL,
+ sizeof (GstIirEqualizerBand),
+ 0,
+ (GInstanceInitFunc) gst_iir_equalizer_band_init,
+ };
+ type =
+ g_type_register_static (GST_TYPE_OBJECT, "GstIirEqualizerBand",
+ &type_info, 0);
+ }
+ return (type);
+}
+
+
+/* child proxy iface */
+static GObject *
+gst_iir_equalizer_child_proxy_get_child_by_index (GstChildProxy * child_proxy,
+ guint index)
+{
+ GstIirEqualizer *equ = GST_IIR_EQUALIZER (child_proxy);
+ GObject *ret;
+
+ BANDS_LOCK (equ);
+ if (G_UNLIKELY (index >= equ->freq_band_count)) {
+ BANDS_UNLOCK (equ);
+ g_return_val_if_fail (index < equ->freq_band_count, NULL);
+ }
+
+ ret = g_object_ref (G_OBJECT (equ->bands[index]));
+ BANDS_UNLOCK (equ);
+
+ GST_LOG_OBJECT (equ, "return child[%d] %" GST_PTR_FORMAT, index, ret);
+ return ret;
+}
+
+static guint
+gst_iir_equalizer_child_proxy_get_children_count (GstChildProxy * child_proxy)
+{
+ GstIirEqualizer *equ = GST_IIR_EQUALIZER (child_proxy);
+
+ GST_LOG ("we have %d children", equ->freq_band_count);
+ return equ->freq_band_count;
+}
+
+static void
+gst_iir_equalizer_child_proxy_interface_init (gpointer g_iface,
+ gpointer iface_data)
+{
+ GstChildProxyInterface *iface = g_iface;
+
+ GST_DEBUG ("initializing iface");
+
+ iface->get_child_by_index = gst_iir_equalizer_child_proxy_get_child_by_index;
+ iface->get_children_count = gst_iir_equalizer_child_proxy_get_children_count;
+}
+
+/* equalizer implementation */
+
+static void
+gst_iir_equalizer_class_init (GstIirEqualizerClass * klass)
+{
+ GstAudioFilterClass *audio_filter_class = (GstAudioFilterClass *) klass;
+ GstBaseTransformClass *btrans_class = (GstBaseTransformClass *) klass;
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstCaps *caps;
+
+ gobject_class->finalize = gst_iir_equalizer_finalize;
+ audio_filter_class->setup = gst_iir_equalizer_setup;
+ btrans_class->transform_ip = gst_iir_equalizer_transform_ip;
+ btrans_class->transform_ip_on_passthrough = FALSE;
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (audio_filter_class, caps);
+ gst_caps_unref (caps);
+}
+
+static void
+gst_iir_equalizer_init (GstIirEqualizer * eq)
+{
+ g_mutex_init (&eq->bands_lock);
+ /* Band gains are 0 by default, passthrough until they are changed */
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (eq), TRUE);
+}
+
+static void
+gst_iir_equalizer_finalize (GObject * object)
+{
+ GstIirEqualizer *equ = GST_IIR_EQUALIZER (object);
+ gint i;
+
+ for (i = 0; i < equ->freq_band_count; i++) {
+ if (equ->bands[i])
+ gst_object_unparent (GST_OBJECT (equ->bands[i]));
+ equ->bands[i] = NULL;
+ }
+ equ->freq_band_count = 0;
+
+ g_free (equ->bands);
+ g_free (equ->history);
+
+ g_mutex_clear (&equ->bands_lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+/* Filter taken from
+ *
+ * The Equivalence of Various Methods of Computing
+ * Biquad Coefficients for Audio Parametric Equalizers
+ *
+ * by Robert Bristow-Johnson
+ *
+ * http://www.aes.org/e-lib/browse.cfm?elib=6326
+ * http://www.musicdsp.org/files/EQ-Coefficients.pdf
+ * http://www.musicdsp.org/files/Audio-EQ-Cookbook.txt
+ *
+ * The bandwidth method that we use here is the preferred
+ * one from this article transformed from octaves to frequency
+ * in Hz.
+ */
+static inline gdouble
+arg_to_scale (gdouble arg)
+{
+ return (pow (10.0, arg / 40.0));
+}
+
+static gdouble
+calculate_omega (gdouble freq, gint rate)
+{
+ gdouble omega;
+
+ if (freq / rate >= 0.5)
+ omega = G_PI;
+ else if (freq <= 0.0)
+ omega = 0.0;
+ else
+ omega = 2.0 * G_PI * (freq / rate);
+
+ return omega;
+}
+
+static gdouble
+calculate_bw (GstIirEqualizerBand * band, gint rate)
+{
+ gdouble bw = 0.0;
+
+ if (band->width / rate >= 0.5) {
+ /* If bandwidth == 0.5 the calculation below fails as tan(G_PI/2)
+ * is undefined. So set the bandwidth to a slightly smaller value.
+ */
+ bw = G_PI - 0.00000001;
+ } else if (band->width <= 0.0) {
+ /* If bandwidth == 0 this band won't change anything so set
+ * the coefficients accordingly. The coefficient calculation
+ * below would create coefficients that for some reason amplify
+ * the band.
+ */
+ band->a0 = 1.0;
+ band->a1 = 0.0;
+ band->a2 = 0.0;
+ band->b1 = 0.0;
+ band->b2 = 0.0;
+ } else {
+ bw = 2.0 * G_PI * (band->width / rate);
+ }
+ return bw;
+}
+
+static void
+setup_peak_filter (GstIirEqualizer * equ, GstIirEqualizerBand * band)
+{
+ gint rate = GST_AUDIO_FILTER_RATE (equ);
+
+ g_return_if_fail (rate);
+
+ {
+ gdouble gain, omega, bw;
+ gdouble alpha, alpha1, alpha2, b0;
+
+ gain = arg_to_scale (band->gain);
+ omega = calculate_omega (band->freq, rate);
+ bw = calculate_bw (band, rate);
+ if (bw == 0.0)
+ goto out;
+
+ alpha = tan (bw / 2.0);
+
+ alpha1 = alpha * gain;
+ alpha2 = alpha / gain;
+
+ b0 = (1.0 + alpha2);
+
+ band->a0 = (1.0 + alpha1) / b0;
+ band->a1 = (-2.0 * cos (omega)) / b0;
+ band->a2 = (1.0 - alpha1) / b0;
+ band->b1 = (2.0 * cos (omega)) / b0;
+ band->b2 = -(1.0 - alpha2) / b0;
+
+ out:
+ GST_INFO
+ ("gain = %5.1f, width= %7.2f, freq = %7.2f, a0 = %7.5g, a1 = %7.5g, a2=%7.5g b1 = %7.5g, b2 = %7.5g",
+ band->gain, band->width, band->freq, band->a0, band->a1, band->a2,
+ band->b1, band->b2);
+ }
+}
+
+static void
+setup_low_shelf_filter (GstIirEqualizer * equ, GstIirEqualizerBand * band)
+{
+ gint rate = GST_AUDIO_FILTER_RATE (equ);
+
+ g_return_if_fail (rate);
+
+ {
+ gdouble gain, omega, bw;
+ gdouble alpha, delta, b0;
+ gdouble egp, egm;
+
+ gain = arg_to_scale (band->gain);
+ omega = calculate_omega (band->freq, rate);
+ bw = calculate_bw (band, rate);
+ if (bw == 0.0)
+ goto out;
+
+ egm = gain - 1.0;
+ egp = gain + 1.0;
+ alpha = tan (bw / 2.0);
+
+ delta = 2.0 * sqrt (gain) * alpha;
+ b0 = egp + egm * cos (omega) + delta;
+
+ band->a0 = ((egp - egm * cos (omega) + delta) * gain) / b0;
+ band->a1 = ((egm - egp * cos (omega)) * 2.0 * gain) / b0;
+ band->a2 = ((egp - egm * cos (omega) - delta) * gain) / b0;
+ band->b1 = ((egm + egp * cos (omega)) * 2.0) / b0;
+ band->b2 = -((egp + egm * cos (omega) - delta)) / b0;
+
+
+ out:
+ GST_INFO
+ ("gain = %5.1f, width= %7.2f, freq = %7.2f, a0 = %7.5g, a1 = %7.5g, a2=%7.5g b1 = %7.5g, b2 = %7.5g",
+ band->gain, band->width, band->freq, band->a0, band->a1, band->a2,
+ band->b1, band->b2);
+ }
+}
+
+static void
+setup_high_shelf_filter (GstIirEqualizer * equ, GstIirEqualizerBand * band)
+{
+ gint rate = GST_AUDIO_FILTER_RATE (equ);
+
+ g_return_if_fail (rate);
+
+ {
+ gdouble gain, omega, bw;
+ gdouble alpha, delta, b0;
+ gdouble egp, egm;
+
+ gain = arg_to_scale (band->gain);
+ omega = calculate_omega (band->freq, rate);
+ bw = calculate_bw (band, rate);
+ if (bw == 0.0)
+ goto out;
+
+ egm = gain - 1.0;
+ egp = gain + 1.0;
+ alpha = tan (bw / 2.0);
+
+ delta = 2.0 * sqrt (gain) * alpha;
+ b0 = egp - egm * cos (omega) + delta;
+
+ band->a0 = ((egp + egm * cos (omega) + delta) * gain) / b0;
+ band->a1 = ((egm + egp * cos (omega)) * -2.0 * gain) / b0;
+ band->a2 = ((egp + egm * cos (omega) - delta) * gain) / b0;
+ band->b1 = ((egm - egp * cos (omega)) * -2.0) / b0;
+ band->b2 = -((egp - egm * cos (omega) - delta)) / b0;
+
+
+ out:
+ GST_INFO
+ ("gain = %5.1f, width= %7.2f, freq = %7.2f, a0 = %7.5g, a1 = %7.5g, a2=%7.5g b1 = %7.5g, b2 = %7.5g",
+ band->gain, band->width, band->freq, band->a0, band->a1, band->a2,
+ band->b1, band->b2);
+ }
+}
+
+/* Must be called with bands_lock and transform lock! */
+static void
+set_passthrough (GstIirEqualizer * equ)
+{
+ gint i;
+ gboolean passthrough = TRUE;
+
+ for (i = 0; i < equ->freq_band_count; i++) {
+ passthrough = passthrough && (equ->bands[i]->gain == 0.0);
+ }
+
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (equ), passthrough);
+ GST_DEBUG ("Passthrough mode: %d\n", passthrough);
+}
+
+/* Must be called with bands_lock and transform lock! */
+static void
+update_coefficients (GstIirEqualizer * equ)
+{
+ gint i, n = equ->freq_band_count;
+
+ for (i = 0; i < n; i++) {
+ if (equ->bands[i]->type == BAND_TYPE_PEAK)
+ setup_peak_filter (equ, equ->bands[i]);
+ else if (equ->bands[i]->type == BAND_TYPE_LOW_SHELF)
+ setup_low_shelf_filter (equ, equ->bands[i]);
+ else
+ setup_high_shelf_filter (equ, equ->bands[i]);
+ }
+
+ equ->need_new_coefficients = FALSE;
+}
+
+/* Must be called with transform lock! */
+static void
+alloc_history (GstIirEqualizer * equ, const GstAudioInfo * info)
+{
+ /* free + alloc = no memcpy */
+ g_free (equ->history);
+ equ->history =
+ g_malloc0 (equ->history_size * GST_AUDIO_INFO_CHANNELS (info) *
+ equ->freq_band_count);
+}
+
+void
+gst_iir_equalizer_compute_frequencies (GstIirEqualizer * equ, guint new_count)
+{
+ guint old_count, i;
+ gdouble freq0, freq1, step;
+ gchar name[20];
+
+ if (equ->freq_band_count == new_count)
+ return;
+
+ BANDS_LOCK (equ);
+
+ if (G_UNLIKELY (equ->freq_band_count == new_count)) {
+ BANDS_UNLOCK (equ);
+ return;
+ }
+
+ old_count = equ->freq_band_count;
+ equ->freq_band_count = new_count;
+ GST_DEBUG ("bands %u -> %u", old_count, new_count);
+
+ if (old_count < new_count) {
+ /* add new bands */
+ equ->bands = g_realloc (equ->bands, sizeof (GstObject *) * new_count);
+ for (i = old_count; i < new_count; i++) {
+ /* otherwise they get names like 'iirequalizerband5' */
+ sprintf (name, "band%u", i);
+ equ->bands[i] = g_object_new (GST_TYPE_IIR_EQUALIZER_BAND,
+ "name", name, NULL);
+ GST_DEBUG ("adding band[%d]=%p", i, equ->bands[i]);
+
+ gst_object_set_parent (GST_OBJECT (equ->bands[i]), GST_OBJECT (equ));
+ gst_child_proxy_child_added (GST_CHILD_PROXY (equ),
+ G_OBJECT (equ->bands[i]), name);
+ }
+ } else {
+ /* free unused bands */
+ for (i = new_count; i < old_count; i++) {
+ GST_DEBUG ("removing band[%d]=%p", i, equ->bands[i]);
+ gst_child_proxy_child_removed (GST_CHILD_PROXY (equ),
+ G_OBJECT (equ->bands[i]), GST_OBJECT_NAME (equ->bands[i]));
+ gst_object_unparent (GST_OBJECT (equ->bands[i]));
+ equ->bands[i] = NULL;
+ }
+ }
+
+ alloc_history (equ, GST_AUDIO_FILTER_INFO (equ));
+
+ /* set center frequencies and name band objects
+ * FIXME: arg! we can't change the name of parented objects :(
+ * application should read band->freq to get the name
+ */
+
+ step = pow (HIGHEST_FREQ / LOWEST_FREQ, 1.0 / new_count);
+ freq0 = LOWEST_FREQ;
+ for (i = 0; i < new_count; i++) {
+ freq1 = freq0 * step;
+
+ if (i == 0)
+ equ->bands[i]->type = BAND_TYPE_LOW_SHELF;
+ else if (i == new_count - 1)
+ equ->bands[i]->type = BAND_TYPE_HIGH_SHELF;
+ else
+ equ->bands[i]->type = BAND_TYPE_PEAK;
+
+ equ->bands[i]->freq = freq0 + ((freq1 - freq0) / 2.0);
+ equ->bands[i]->width = freq1 - freq0;
+ GST_DEBUG ("band[%2d] = '%lf'", i, equ->bands[i]->freq);
+
+ g_object_notify (G_OBJECT (equ->bands[i]), "bandwidth");
+ g_object_notify (G_OBJECT (equ->bands[i]), "freq");
+ g_object_notify (G_OBJECT (equ->bands[i]), "type");
+
+ /*
+ if(equ->bands[i]->freq<10000.0)
+ sprintf (name,"%dHz",(gint)equ->bands[i]->freq);
+ else
+ sprintf (name,"%dkHz",(gint)(equ->bands[i]->freq/1000.0));
+ gst_object_set_name( GST_OBJECT (equ->bands[i]), name);
+ GST_DEBUG ("band[%2d] = '%s'",i,name);
+ */
+ freq0 = freq1;
+ }
+
+ equ->need_new_coefficients = TRUE;
+ BANDS_UNLOCK (equ);
+}
+
+/* start of code that is type specific */
+
+#define CREATE_OPTIMIZED_FUNCTIONS_INT(TYPE,BIG_TYPE,MIN_VAL,MAX_VAL) \
+typedef struct { \
+ BIG_TYPE x1, x2; /* history of input values for a filter */ \
+ BIG_TYPE y1, y2; /* history of output values for a filter */ \
+} SecondOrderHistory ## TYPE; \
+ \
+static inline BIG_TYPE \
+one_step_ ## TYPE (GstIirEqualizerBand *filter, \
+ SecondOrderHistory ## TYPE *history, BIG_TYPE input) \
+{ \
+ /* calculate output */ \
+ BIG_TYPE output = filter->a0 * input + \
+ filter->a1 * history->x1 + filter->a2 * history->x2 + \
+ filter->b1 * history->y1 + filter->b2 * history->y2; \
+ /* update history */ \
+ history->y2 = history->y1; \
+ history->y1 = output; \
+ history->x2 = history->x1; \
+ history->x1 = input; \
+ \
+ return output; \
+} \
+ \
+static const guint \
+history_size_ ## TYPE = sizeof (SecondOrderHistory ## TYPE); \
+ \
+static void \
+gst_iir_equ_process_ ## TYPE (GstIirEqualizer *equ, guint8 *data, \
+guint size, guint channels) \
+{ \
+ guint frames = size / channels / sizeof (TYPE); \
+ guint i, c, f, nf = equ->freq_band_count; \
+ BIG_TYPE cur; \
+ GstIirEqualizerBand **filters = equ->bands; \
+ \
+ for (i = 0; i < frames; i++) { \
+ SecondOrderHistory ## TYPE *history = equ->history; \
+ for (c = 0; c < channels; c++) { \
+ cur = *((TYPE *) data); \
+ for (f = 0; f < nf; f++) { \
+ cur = one_step_ ## TYPE (filters[f], history, cur); \
+ history++; \
+ } \
+ cur = CLAMP (cur, MIN_VAL, MAX_VAL); \
+ *((TYPE *) data) = (TYPE) floor (cur); \
+ data += sizeof (TYPE); \
+ } \
+ } \
+}
+
+#define CREATE_OPTIMIZED_FUNCTIONS(TYPE) \
+typedef struct { \
+ TYPE x1, x2; /* history of input values for a filter */ \
+ TYPE y1, y2; /* history of output values for a filter */ \
+} SecondOrderHistory ## TYPE; \
+ \
+static inline TYPE \
+one_step_ ## TYPE (GstIirEqualizerBand *filter, \
+ SecondOrderHistory ## TYPE *history, TYPE input) \
+{ \
+ /* calculate output */ \
+ TYPE output = filter->a0 * input + filter->a1 * history->x1 + \
+ filter->a2 * history->x2 + filter->b1 * history->y1 + \
+ filter->b2 * history->y2; \
+ /* update history */ \
+ history->y2 = history->y1; \
+ history->y1 = output; \
+ history->x2 = history->x1; \
+ history->x1 = input; \
+ \
+ return output; \
+} \
+ \
+static const guint \
+history_size_ ## TYPE = sizeof (SecondOrderHistory ## TYPE); \
+ \
+static void \
+gst_iir_equ_process_ ## TYPE (GstIirEqualizer *equ, guint8 *data, \
+guint size, guint channels) \
+{ \
+ guint frames = size / channels / sizeof (TYPE); \
+ guint i, c, f, nf = equ->freq_band_count; \
+ TYPE cur; \
+ GstIirEqualizerBand **filters = equ->bands; \
+ \
+ for (i = 0; i < frames; i++) { \
+ SecondOrderHistory ## TYPE *history = equ->history; \
+ for (c = 0; c < channels; c++) { \
+ cur = *((TYPE *) data); \
+ for (f = 0; f < nf; f++) { \
+ cur = one_step_ ## TYPE (filters[f], history, cur); \
+ history++; \
+ } \
+ *((TYPE *) data) = (TYPE) cur; \
+ data += sizeof (TYPE); \
+ } \
+ } \
+}
+
+CREATE_OPTIMIZED_FUNCTIONS_INT (gint16, gfloat, -32768.0, 32767.0);
+CREATE_OPTIMIZED_FUNCTIONS (gfloat);
+CREATE_OPTIMIZED_FUNCTIONS (gdouble);
+
+static GstFlowReturn
+gst_iir_equalizer_transform_ip (GstBaseTransform * btrans, GstBuffer * buf)
+{
+ GstAudioFilter *filter = GST_AUDIO_FILTER (btrans);
+ GstIirEqualizer *equ = GST_IIR_EQUALIZER (btrans);
+ GstClockTime timestamp;
+ GstMapInfo map;
+ gint channels = GST_AUDIO_FILTER_CHANNELS (filter);
+ gboolean need_new_coefficients;
+
+ if (G_UNLIKELY (channels < 1 || equ->process == NULL))
+ return GST_FLOW_NOT_NEGOTIATED;
+
+ BANDS_LOCK (equ);
+ need_new_coefficients = equ->need_new_coefficients;
+ BANDS_UNLOCK (equ);
+
+ timestamp = GST_BUFFER_TIMESTAMP (buf);
+ timestamp =
+ gst_segment_to_stream_time (&btrans->segment, GST_FORMAT_TIME, timestamp);
+
+ if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
+ GstIirEqualizerBand **filters = equ->bands;
+ guint f, nf = equ->freq_band_count;
+
+ gst_object_sync_values (GST_OBJECT (equ), timestamp);
+
+ /* sync values for bands too */
+ /* FIXME: iterating equ->bands is not thread-safe here */
+ for (f = 0; f < nf; f++) {
+ gst_object_sync_values (GST_OBJECT (filters[f]), timestamp);
+ }
+ }
+
+ BANDS_LOCK (equ);
+ if (need_new_coefficients) {
+ update_coefficients (equ);
+ }
+ BANDS_UNLOCK (equ);
+
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+ equ->process (equ, map.data, map.size, channels);
+ gst_buffer_unmap (buf, &map);
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_iir_equalizer_setup (GstAudioFilter * audio, const GstAudioInfo * info)
+{
+ GstIirEqualizer *equ = GST_IIR_EQUALIZER (audio);
+
+ switch (GST_AUDIO_INFO_FORMAT (info)) {
+ case GST_AUDIO_FORMAT_S16:
+ equ->history_size = history_size_gint16;
+ equ->process = gst_iir_equ_process_gint16;
+ break;
+ case GST_AUDIO_FORMAT_F32:
+ equ->history_size = history_size_gfloat;
+ equ->process = gst_iir_equ_process_gfloat;
+ break;
+ case GST_AUDIO_FORMAT_F64:
+ equ->history_size = history_size_gdouble;
+ equ->process = gst_iir_equ_process_gdouble;
+ break;
+ default:
+ return FALSE;
+ }
+
+ alloc_history (equ, info);
+ return TRUE;
+}
+
+void
+equalizer_element_init (GstPlugin * plugin)
+{
+ static gsize res = FALSE;
+ if (g_once_init_enter (&res)) {
+ GST_DEBUG_CATEGORY_INIT (equalizer_debug, "equalizer", 0, "equalizer");
+ g_once_init_leave (&res, TRUE);
+ }
+}
diff --git a/gst/equalizer/gstiirequalizer.h b/gst/equalizer/gstiirequalizer.h
new file mode 100644
index 0000000000..6d8a21415a
--- /dev/null
+++ b/gst/equalizer/gstiirequalizer.h
@@ -0,0 +1,82 @@
+/* GStreamer IIR equalizer
+ * Copyright (C) <2004> Benjamin Otte <otte@gnome.org>
+ * <2007> Stefan Kost <ensonic@users.sf.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_IIR_EQUALIZER__
+#define __GST_IIR_EQUALIZER__
+
+#include <gst/audio/gstaudiofilter.h>
+
+void equalizer_element_init (GstPlugin * plugin);
+
+GST_ELEMENT_REGISTER_DECLARE (equalizer_nbands);
+GST_ELEMENT_REGISTER_DECLARE (equalizer_3bands);
+GST_ELEMENT_REGISTER_DECLARE (equalizer_10bands);
+
+typedef struct _GstIirEqualizer GstIirEqualizer;
+typedef struct _GstIirEqualizerClass GstIirEqualizerClass;
+typedef struct _GstIirEqualizerBand GstIirEqualizerBand;
+
+#define GST_TYPE_IIR_EQUALIZER \
+ (gst_iir_equalizer_get_type())
+#define GST_IIR_EQUALIZER(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_IIR_EQUALIZER,GstIirEqualizer))
+#define GST_IIR_EQUALIZER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_IIR_EQUALIZER,GstIirEqualizerClass))
+#define GST_IS_IIR_EQUALIZER(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_IIR_EQUALIZER))
+#define GST_IS_IIR_EQUALIZER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_IIR_EQUALIZER))
+
+#define LOWEST_FREQ (20.0)
+#define HIGHEST_FREQ (20000.0)
+
+typedef void (*ProcessFunc) (GstIirEqualizer * eq, guint8 * data, guint size,
+ guint channels);
+
+struct _GstIirEqualizer
+{
+ GstAudioFilter audiofilter;
+
+ /*< private >*/
+
+ GMutex bands_lock;
+ GstIirEqualizerBand **bands;
+
+ /* properties */
+ guint freq_band_count;
+ /* for each band and channel */
+ gpointer history;
+ guint history_size;
+
+ gboolean need_new_coefficients;
+
+ ProcessFunc process;
+};
+
+struct _GstIirEqualizerClass
+{
+ GstAudioFilterClass audiofilter_class;
+};
+
+extern void gst_iir_equalizer_compute_frequencies (GstIirEqualizer * equ, guint new_count);
+
+extern GType gst_iir_equalizer_get_type(void);
+
+#endif /* __GST_IIR_EQUALIZER__ */
diff --git a/gst/equalizer/gstiirequalizer10bands.c b/gst/equalizer/gstiirequalizer10bands.c
new file mode 100644
index 0000000000..97c60c6b9c
--- /dev/null
+++ b/gst/equalizer/gstiirequalizer10bands.c
@@ -0,0 +1,232 @@
+/* GStreamer
+ * Copyright (C) <2007> Stefan Kost <ensonic@users.sf.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-equalizer-10bands
+ * @title: equalizer-10bands
+ *
+ * The 10 band equalizer element allows to change the gain of 10 equally distributed
+ * frequency bands between 30 Hz and 15 kHz.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=song.ogg ! oggdemux ! vorbisdec ! audioconvert ! equalizer-10bands band2=3.0 ! alsasink
+ * ]| This raises the volume of the 3rd band which is at 119 Hz by 3 db.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstiirequalizer.h"
+#include "gstiirequalizer10bands.h"
+
+
+enum
+{
+ PROP_BAND0 = 1,
+ PROP_BAND1,
+ PROP_BAND2,
+ PROP_BAND3,
+ PROP_BAND4,
+ PROP_BAND5,
+ PROP_BAND6,
+ PROP_BAND7,
+ PROP_BAND8,
+ PROP_BAND9,
+};
+
+static void gst_iir_equalizer_10bands_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_iir_equalizer_10bands_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+GST_DEBUG_CATEGORY_EXTERN (equalizer_debug);
+#define GST_CAT_DEFAULT equalizer_debug
+
+
+#define gst_iir_equalizer_10bands_parent_class parent_class
+G_DEFINE_TYPE (GstIirEqualizer10Bands, gst_iir_equalizer_10bands,
+ GST_TYPE_IIR_EQUALIZER);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (equalizer_10bands, "equalizer-10bands",
+ GST_RANK_NONE, GST_TYPE_IIR_EQUALIZER_10BANDS,
+ equalizer_element_init (plugin));
+
+/* equalizer implementation */
+
+static void
+gst_iir_equalizer_10bands_class_init (GstIirEqualizer10BandsClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->set_property = gst_iir_equalizer_10bands_set_property;
+ gobject_class->get_property = gst_iir_equalizer_10bands_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_BAND0,
+ g_param_spec_double ("band0", "29 Hz",
+ "gain for the frequency band 29 Hz, ranging from -24 dB to +12 dB",
+ -24.0, 12.0, 0.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (gobject_class, PROP_BAND1,
+ g_param_spec_double ("band1", "59 Hz",
+ "gain for the frequency band 59 Hz, ranging from -24 dB to +12 dB",
+ -24.0, 12.0, 0.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (gobject_class, PROP_BAND2,
+ g_param_spec_double ("band2", "119 Hz",
+ "gain for the frequency band 119 Hz, ranging from -24 dB to +12 dB",
+ -24.0, 12.0, 0.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (gobject_class, PROP_BAND3,
+ g_param_spec_double ("band3", "237 Hz",
+ "gain for the frequency band 237 Hz, ranging from -24 dB to +12 dB",
+ -24.0, 12.0, 0.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (gobject_class, PROP_BAND4,
+ g_param_spec_double ("band4", "474 Hz",
+ "gain for the frequency band 474 Hz, ranging from -24 dB to +12 dB",
+ -24.0, 12.0, 0.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (gobject_class, PROP_BAND5,
+ g_param_spec_double ("band5", "947 Hz",
+ "gain for the frequency band 947 Hz, ranging from -24 dB to +12 dB",
+ -24.0, 12.0, 0.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (gobject_class, PROP_BAND6,
+ g_param_spec_double ("band6", "1889 Hz",
+ "gain for the frequency band 1889 Hz, ranging from -24 dB to +12 dB",
+ -24.0, 12.0, 0.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (gobject_class, PROP_BAND7,
+ g_param_spec_double ("band7", "3770 Hz",
+ "gain for the frequency band 3770 Hz, ranging from -24 dB to +12 dB",
+ -24.0, 12.0, 0.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (gobject_class, PROP_BAND8,
+ g_param_spec_double ("band8", "7523 Hz",
+ "gain for the frequency band 7523 Hz, ranging from -24 dB to +12 dB",
+ -24.0, 12.0, 0.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (gobject_class, PROP_BAND9,
+ g_param_spec_double ("band9", "15011 Hz",
+ "gain for the frequency band 15011 Hz, ranging from -24 dB to +12 dB",
+ -24.0, 12.0, 0.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ gst_element_class_set_static_metadata (gstelement_class, "10 Band Equalizer",
+ "Filter/Effect/Audio",
+ "Direct Form 10 band IIR equalizer",
+ "Stefan Kost <ensonic@users.sf.net>");
+}
+
+static void
+gst_iir_equalizer_10bands_init (GstIirEqualizer10Bands * equ_n)
+{
+ GstIirEqualizer *equ = GST_IIR_EQUALIZER (equ_n);
+
+ gst_iir_equalizer_compute_frequencies (equ, 10);
+}
+
+static void
+gst_iir_equalizer_10bands_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstChildProxy *equ = GST_CHILD_PROXY (object);
+
+ switch (prop_id) {
+ case PROP_BAND0:
+ gst_child_proxy_set_property (equ, "band0::gain", value);
+ break;
+ case PROP_BAND1:
+ gst_child_proxy_set_property (equ, "band1::gain", value);
+ break;
+ case PROP_BAND2:
+ gst_child_proxy_set_property (equ, "band2::gain", value);
+ break;
+ case PROP_BAND3:
+ gst_child_proxy_set_property (equ, "band3::gain", value);
+ break;
+ case PROP_BAND4:
+ gst_child_proxy_set_property (equ, "band4::gain", value);
+ break;
+ case PROP_BAND5:
+ gst_child_proxy_set_property (equ, "band5::gain", value);
+ break;
+ case PROP_BAND6:
+ gst_child_proxy_set_property (equ, "band6::gain", value);
+ break;
+ case PROP_BAND7:
+ gst_child_proxy_set_property (equ, "band7::gain", value);
+ break;
+ case PROP_BAND8:
+ gst_child_proxy_set_property (equ, "band8::gain", value);
+ break;
+ case PROP_BAND9:
+ gst_child_proxy_set_property (equ, "band9::gain", value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_iir_equalizer_10bands_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstChildProxy *equ = GST_CHILD_PROXY (object);
+
+ switch (prop_id) {
+ case PROP_BAND0:
+ gst_child_proxy_get_property (equ, "band0::gain", value);
+ break;
+ case PROP_BAND1:
+ gst_child_proxy_get_property (equ, "band1::gain", value);
+ break;
+ case PROP_BAND2:
+ gst_child_proxy_get_property (equ, "band2::gain", value);
+ break;
+ case PROP_BAND3:
+ gst_child_proxy_get_property (equ, "band3::gain", value);
+ break;
+ case PROP_BAND4:
+ gst_child_proxy_get_property (equ, "band4::gain", value);
+ break;
+ case PROP_BAND5:
+ gst_child_proxy_get_property (equ, "band5::gain", value);
+ break;
+ case PROP_BAND6:
+ gst_child_proxy_get_property (equ, "band6::gain", value);
+ break;
+ case PROP_BAND7:
+ gst_child_proxy_get_property (equ, "band7::gain", value);
+ break;
+ case PROP_BAND8:
+ gst_child_proxy_get_property (equ, "band8::gain", value);
+ break;
+ case PROP_BAND9:
+ gst_child_proxy_get_property (equ, "band9::gain", value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/equalizer/gstiirequalizer10bands.h b/gst/equalizer/gstiirequalizer10bands.h
new file mode 100644
index 0000000000..7066721fa7
--- /dev/null
+++ b/gst/equalizer/gstiirequalizer10bands.h
@@ -0,0 +1,51 @@
+/* GStreamer
+ * Copyright (C) <2007> Stefan Kost <ensonic@users.sf.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_IIR_EQUALIZER_10BANDS__
+#define __GST_IIR_EQUALIZER_10BANDS__
+
+#include "gstiirequalizer.h"
+
+typedef struct _GstIirEqualizer10Bands GstIirEqualizer10Bands;
+typedef struct _GstIirEqualizer10BandsClass GstIirEqualizer10BandsClass;
+
+#define GST_TYPE_IIR_EQUALIZER_10BANDS \
+ (gst_iir_equalizer_10bands_get_type())
+#define GST_IIR_EQUALIZER_10BANDS(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_IIR_EQUALIZER_10BANDS,GstIirEqualizer10Bands))
+#define GST_IIR_EQUALIZER_10BANDS_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_IIR_EQUALIZER_10BANDS,GstIirEqualizer10BandsClass))
+#define GST_IS_IIR_EQUALIZER_10BANDS(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_IIR_EQUALIZER_10BANDS))
+#define GST_IS_IIR_EQUALIZER_10BANDS_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_IIR_EQUALIZER_10BANDS))
+
+struct _GstIirEqualizer10Bands
+{
+ GstIirEqualizer equalizer;
+};
+
+struct _GstIirEqualizer10BandsClass
+{
+ GstIirEqualizerClass equalizer_class;
+};
+
+extern GType gst_iir_equalizer_10bands_get_type(void);
+
+#endif /* __GST_IIR_EQUALIZER_10BANDS__ */
diff --git a/gst/equalizer/gstiirequalizer3bands.c b/gst/equalizer/gstiirequalizer3bands.c
new file mode 100644
index 0000000000..3ea1667196
--- /dev/null
+++ b/gst/equalizer/gstiirequalizer3bands.c
@@ -0,0 +1,145 @@
+/* GStreamer
+ * Copyright (C) <2007> Stefan Kost <ensonic@users.sf.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-equalizer-3bands
+ * @title: equalizer-3bands
+ *
+ * The 3-band equalizer element allows to change the gain of a low frequency,
+ * medium frequency and high frequency band.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=song.ogg ! oggdemux ! vorbisdec ! audioconvert ! equalizer-3bands band1=6.0 ! alsasink
+ * ]| This raises the volume of the 2nd band, which is at 1110 Hz, by 6 db.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstiirequalizer.h"
+#include "gstiirequalizer3bands.h"
+
+enum
+{
+ PROP_BAND0 = 1,
+ PROP_BAND1,
+ PROP_BAND2,
+};
+
+static void gst_iir_equalizer_3bands_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_iir_equalizer_3bands_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+GST_DEBUG_CATEGORY_EXTERN (equalizer_debug);
+#define GST_CAT_DEFAULT equalizer_debug
+
+#define gst_iir_equalizer_3bands_parent_class parent_class
+G_DEFINE_TYPE (GstIirEqualizer3Bands, gst_iir_equalizer_3bands,
+ GST_TYPE_IIR_EQUALIZER);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (equalizer_3bands, "equalizer-3bands",
+ GST_RANK_NONE, GST_TYPE_IIR_EQUALIZER_3BANDS,
+ equalizer_element_init (plugin));
+
+/* equalizer implementation */
+
+static void
+gst_iir_equalizer_3bands_class_init (GstIirEqualizer3BandsClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->set_property = gst_iir_equalizer_3bands_set_property;
+ gobject_class->get_property = gst_iir_equalizer_3bands_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_BAND0,
+ g_param_spec_double ("band0", "110 Hz",
+ "gain for the frequency band 100 Hz, ranging from -24.0 to +12.0",
+ -24.0, 12.0, 0.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (gobject_class, PROP_BAND1,
+ g_param_spec_double ("band1", "1100 Hz",
+ "gain for the frequency band 1100 Hz, ranging from -24.0 to +12.0",
+ -24.0, 12.0, 0.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (gobject_class, PROP_BAND2,
+ g_param_spec_double ("band2", "11 kHz",
+ "gain for the frequency band 11 kHz, ranging from -24.0 to +12.0",
+ -24.0, 12.0, 0.0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+
+ gst_element_class_set_static_metadata (gstelement_class, "3 Band Equalizer",
+ "Filter/Effect/Audio",
+ "Direct Form 3 band IIR equalizer", "Stefan Kost <ensonic@users.sf.net>");
+}
+
+static void
+gst_iir_equalizer_3bands_init (GstIirEqualizer3Bands * equ_n)
+{
+ GstIirEqualizer *equ = GST_IIR_EQUALIZER (equ_n);
+
+ gst_iir_equalizer_compute_frequencies (equ, 3);
+}
+
+static void
+gst_iir_equalizer_3bands_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstChildProxy *equ = GST_CHILD_PROXY (object);
+
+ switch (prop_id) {
+ case PROP_BAND0:
+ gst_child_proxy_set_property (equ, "band0::gain", value);
+ break;
+ case PROP_BAND1:
+ gst_child_proxy_set_property (equ, "band1::gain", value);
+ break;
+ case PROP_BAND2:
+ gst_child_proxy_set_property (equ, "band2::gain", value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_iir_equalizer_3bands_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstChildProxy *equ = GST_CHILD_PROXY (object);
+
+ switch (prop_id) {
+ case PROP_BAND0:
+ gst_child_proxy_get_property (equ, "band0::gain", value);
+ break;
+ case PROP_BAND1:
+ gst_child_proxy_get_property (equ, "band1::gain", value);
+ break;
+ case PROP_BAND2:
+ gst_child_proxy_get_property (equ, "band2::gain", value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/equalizer/gstiirequalizer3bands.h b/gst/equalizer/gstiirequalizer3bands.h
new file mode 100644
index 0000000000..3790fca99e
--- /dev/null
+++ b/gst/equalizer/gstiirequalizer3bands.h
@@ -0,0 +1,51 @@
+/* GStreamer
+ * Copyright (C) <2007> Stefan Kost <ensonic@users.sf.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_IIR_EQUALIZER_3BANDS__
+#define __GST_IIR_EQUALIZER_3BANDS__
+
+#include "gstiirequalizer.h"
+
+typedef struct _GstIirEqualizer3Bands GstIirEqualizer3Bands;
+typedef struct _GstIirEqualizer3BandsClass GstIirEqualizer3BandsClass;
+
+#define GST_TYPE_IIR_EQUALIZER_3BANDS \
+ (gst_iir_equalizer_3bands_get_type())
+#define GST_IIR_EQUALIZER_3BANDS(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_IIR_EQUALIZER_3BANDS,GstIirEqualizer3Bands))
+#define GST_IIR_EQUALIZER_3BANDS_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_IIR_EQUALIZER_3BANDS,GstIirEqualizer3BandsClass))
+#define GST_IS_IIR_EQUALIZER_3BANDS(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_IIR_EQUALIZER_3BANDS))
+#define GST_IS_IIR_EQUALIZER_3BANDS_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_IIR_EQUALIZER_3BANDS))
+
+struct _GstIirEqualizer3Bands
+{
+ GstIirEqualizer equalizer;
+};
+
+struct _GstIirEqualizer3BandsClass
+{
+ GstIirEqualizerClass equalizer_class;
+};
+
+extern GType gst_iir_equalizer_3bands_get_type(void);
+
+#endif /* __GST_IIR_EQUALIZER_3BANDS__ */
diff --git a/gst/equalizer/gstiirequalizernbands.c b/gst/equalizer/gstiirequalizernbands.c
new file mode 100644
index 0000000000..2f9f9a4040
--- /dev/null
+++ b/gst/equalizer/gstiirequalizernbands.c
@@ -0,0 +1,165 @@
+/* GStreamer
+ * Copyright (C) <2004> Benjamin Otte <otte@gnome.org>
+ * <2007> Stefan Kost <ensonic@users.sf.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-equalizer-nbands
+ * @title: equalizer-nbands
+ *
+ * The n-band equalizer element is a fully parametric equalizer. It allows to
+ * select between 1 and 64 bands and has properties on each band to change
+ * the center frequency, band width and gain.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=song.ogg ! oggdemux ! vorbisdec ! audioconvert ! equalizer-nbands num-bands=15 band5::gain=6.0 ! alsasink
+ * ]| This make the equalizer use 15 bands and raises the volume of the 5th band by 6 db.
+ *
+ * ## Example code
+ * |[
+ * #include &lt;gst/gst.h&gt;
+ *
+ * ...
+ * typedef struct {
+ * gfloat freq;
+ * gfloat width;
+ * gfloat gain;
+ * } GstEqualizerBandState;
+ *
+ * ...
+ *
+ * GstElement *equalizer;
+ * GObject *band;
+ * gint i;
+ * GstEqualizerBandState state[] = {
+ * { 120.0, 50.0, - 3.0},
+ * { 500.0, 20.0, 12.0},
+ * {1503.0, 2.0, -20.0},
+ * {6000.0, 1000.0, 6.0},
+ * {3000.0, 120.0, 2.0}
+ * };
+ *
+ * ...
+ *
+ * equalizer = gst_element_factory_make ("equalizer-nbands", "equalizer");
+ * g_object_set (G_OBJECT (equalizer), "num-bands", 5, NULL);
+ *
+ * ...
+ *
+ * for (i = 0; i &lt; 5; i++) {
+ * band = gst_child_proxy_get_child_by_index (GST_CHILD_PROXY (equalizer), i);
+ * g_object_set (G_OBJECT (band), "freq", state[i].freq,
+ * "bandwidth", state[i].width,
+ * "gain", state[i].gain);
+ * g_object_unref (G_OBJECT (band));
+ * }
+ *
+ * ...
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstiirequalizer.h"
+#include "gstiirequalizernbands.h"
+
+
+enum
+{
+ PROP_NUM_BANDS = 1
+};
+
+static void gst_iir_equalizer_nbands_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_iir_equalizer_nbands_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+GST_DEBUG_CATEGORY_EXTERN (equalizer_debug);
+#define GST_CAT_DEFAULT equalizer_debug
+
+#define gst_iir_equalizer_nbands_parent_class parent_class
+G_DEFINE_TYPE (GstIirEqualizerNBands, gst_iir_equalizer_nbands,
+ GST_TYPE_IIR_EQUALIZER);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (equalizer_nbands, "equalizer-nbands",
+ GST_RANK_NONE, GST_TYPE_IIR_EQUALIZER_NBANDS,
+ equalizer_element_init (plugin));
+/* equalizer implementation */
+
+static void
+gst_iir_equalizer_nbands_class_init (GstIirEqualizerNBandsClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->set_property = gst_iir_equalizer_nbands_set_property;
+ gobject_class->get_property = gst_iir_equalizer_nbands_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_NUM_BANDS,
+ g_param_spec_uint ("num-bands", "num-bands",
+ "number of different bands to use", 1, 64, 10,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_CONSTRUCT));
+
+ gst_element_class_set_static_metadata (gstelement_class, "N Band Equalizer",
+ "Filter/Effect/Audio",
+ "Direct Form IIR equalizer",
+ "Benjamin Otte <otte@gnome.org>," " Stefan Kost <ensonic@users.sf.net>");
+}
+
+static void
+gst_iir_equalizer_nbands_init (GstIirEqualizerNBands * equ_n)
+{
+ GstIirEqualizer *equ = GST_IIR_EQUALIZER (equ_n);
+
+ gst_iir_equalizer_compute_frequencies (equ, 10);
+}
+
+static void
+gst_iir_equalizer_nbands_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstIirEqualizer *equ = GST_IIR_EQUALIZER (object);
+
+ switch (prop_id) {
+ case PROP_NUM_BANDS:
+ gst_iir_equalizer_compute_frequencies (equ, g_value_get_uint (value));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_iir_equalizer_nbands_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstIirEqualizer *equ = GST_IIR_EQUALIZER (object);
+
+ switch (prop_id) {
+ case PROP_NUM_BANDS:
+ g_value_set_uint (value, equ->freq_band_count);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/equalizer/gstiirequalizernbands.h b/gst/equalizer/gstiirequalizernbands.h
new file mode 100644
index 0000000000..c119a1ff1a
--- /dev/null
+++ b/gst/equalizer/gstiirequalizernbands.h
@@ -0,0 +1,52 @@
+/* GStreamer
+ * Copyright (C) <2004> Benjamin Otte <otte@gnome.org>
+ * <2007> Stefan Kost <ensonic@users.sf.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_IIR_EQUALIZER_NBANDS__
+#define __GST_IIR_EQUALIZER_NBANDS__
+
+#include "gstiirequalizer.h"
+
+typedef struct _GstIirEqualizerNBands GstIirEqualizerNBands;
+typedef struct _GstIirEqualizerNBandsClass GstIirEqualizerNBandsClass;
+
+#define GST_TYPE_IIR_EQUALIZER_NBANDS \
+ (gst_iir_equalizer_nbands_get_type())
+#define GST_IIR_EQUALIZER_NBANDS(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_IIR_EQUALIZER_NBANDS,GstIirEqualizerNBands))
+#define GST_IIR_EQUALIZER_NBANDS_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_IIR_EQUALIZER_NBANDS,GstIirEqualizerNBandsClass))
+#define GST_IS_IIR_EQUALIZER_NBANDS(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_IIR_EQUALIZER_NBANDS))
+#define GST_IS_IIR_EQUALIZER_NBANDS_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_IIR_EQUALIZER_NBANDS))
+
+struct _GstIirEqualizerNBands
+{
+ GstIirEqualizer equalizer;
+};
+
+struct _GstIirEqualizerNBandsClass
+{
+ GstIirEqualizerClass equalizer_class;
+};
+
+extern GType gst_iir_equalizer_nbands_get_type(void);
+
+#endif /* __GST_IIR_EQUALIZER_NBANDS__ */
diff --git a/gst/equalizer/gstiirequalizerplugin.c b/gst/equalizer/gstiirequalizerplugin.c
new file mode 100644
index 0000000000..74af309620
--- /dev/null
+++ b/gst/equalizer/gstiirequalizerplugin.c
@@ -0,0 +1,45 @@
+/* GStreamer
+ * Copyright (C) <2004> Benjamin Otte <otte@gnome.org>
+ * <2007> Stefan Kost <ensonic@users.sf.net>
+ * <2007> Sebastian Dröge <slomo@circular-chaos.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstiirequalizer.h"
+
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (equalizer_nbands, plugin);
+ ret |= GST_ELEMENT_REGISTER (equalizer_3bands, plugin);
+ ret |= GST_ELEMENT_REGISTER (equalizer_10bands, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ equalizer,
+ "GStreamer audio equalizers",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/equalizer/meson.build b/gst/equalizer/meson.build
new file mode 100644
index 0000000000..ef796f830e
--- /dev/null
+++ b/gst/equalizer/meson.build
@@ -0,0 +1,22 @@
+eq_sources = [
+ 'gstiirequalizer.c',
+ 'gstiirequalizerplugin.c',
+ 'gstiirequalizernbands.c',
+ 'gstiirequalizer3bands.c',
+ 'gstiirequalizer10bands.c',
+ 'gstiirequalizerplugin.c',
+]
+
+gstequalizer = library('gstequalizer',
+ eq_sources,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc, libsinc],
+ dependencies : [gstbase_dep, gstaudio_dep, libm],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstequalizer, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstequalizer]
+
+install_data(sources: ['GstIirEqualizer3Bands.prs',
+ 'GstIirEqualizer10Bands.prs'], install_dir: presetdir)
diff --git a/gst/flv/amfdefs.h b/gst/flv/amfdefs.h
new file mode 100644
index 0000000000..d1e772539c
--- /dev/null
+++ b/gst/flv/amfdefs.h
@@ -0,0 +1,44 @@
+/* GStreamer
+ *
+ * Copyright (c) 2011 Jan Schmidt <thaytan@noraisin.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __AMFDEFS_H__
+#define __AMFDEFS_H__
+
+#include <glib.h>
+
+#define AMF0_NUMBER_MARKER 0x0
+#define AMF0_BOOLEAN_MARKER 0x1
+#define AMF0_STRING_MARKER 0x2
+#define AMF0_OBJECT_MARKER 0x3
+#define AMF0_MOVIECLIP_MARKER 0x4 /* Reserved, not supported */
+#define AMF0_NULL_MARKER 0x5
+#define AMF0_UNDEFINED_MARKER 0x6
+#define AMF0_REFERENCE_MARKER 0x7
+#define AMF0_ECMA_ARRAY_MARKER 0x8
+#define AMF0_OBJECT_END_MARKER 0x9
+#define AMF0_STRICT_ARRAY_MARKER 0xA
+#define AMF0_DATE_MARKER 0xB
+#define AMF0_LONG_STRING_MARKER 0xC
+#define AMF0_UNSUPPORTED_MARKER 0xD
+#define AMF0_RECORDSET_MARKER 0xE /* Reserved, not supported */
+#define AMF0_XML_DOCUMENT_MARKER 0xF
+#define AMF0_TYPED_OBJECT_MARKER 0x10
+
+#endif
diff --git a/gst/flv/gstflvdemux.c b/gst/flv/gstflvdemux.c
new file mode 100644
index 0000000000..51dee816dc
--- /dev/null
+++ b/gst/flv/gstflvdemux.c
@@ -0,0 +1,3837 @@
+/* GStreamer
+ * Copyright (C) <2007> Julien Moutte <julien@moutte.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-flvdemux
+ * @title: flvdemux
+ *
+ * flvdemux demuxes an FLV file into the different contained streams.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v filesrc location=/path/to/flv ! flvdemux ! audioconvert ! autoaudiosink
+ * ]| This pipeline demuxes an FLV file and outputs the contained raw audio streams.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstflvelements.h"
+#include "gstflvdemux.h"
+#include "gstflvmux.h"
+
+#include <string.h>
+#include <stdio.h>
+#include <gst/base/gstbytereader.h>
+#include <gst/base/gstbytewriter.h>
+#include <gst/pbutils/descriptions.h>
+#include <gst/pbutils/pbutils.h>
+#include <gst/audio/audio.h>
+#include <gst/video/video.h>
+#include <gst/tag/tag.h>
+
+/* FIXME: don't rely on own GstIndex */
+#include "gstindex.c"
+#include "gstmemindex.c"
+#define GST_ASSOCIATION_FLAG_NONE GST_INDEX_ASSOCIATION_FLAG_NONE
+#define GST_ASSOCIATION_FLAG_KEY_UNIT GST_INDEX_ASSOCIATION_FLAG_KEY_UNIT
+#define GST_ASSOCIATION_FLAG_DELTA_UNIT GST_INDEX_ASSOCIATION_FLAG_DELTA_UNIT
+
+static GstStaticPadTemplate flv_sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-flv")
+ );
+
+static GstStaticPadTemplate audio_src_template =
+ GST_STATIC_PAD_TEMPLATE ("audio",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS
+ ("audio/x-adpcm, layout = (string) swf, channels = (int) { 1, 2 }, rate = (int) { 5512, 11025, 22050, 44100 }; "
+ "audio/mpeg, mpegversion = (int) 1, layer = (int) 3, channels = (int) { 1, 2 }, rate = (int) { 5512, 8000, 11025, 22050, 44100 }, parsed = (boolean) TRUE; "
+ "audio/mpeg, mpegversion = (int) 4, stream-format = (string) raw, framed = (boolean) TRUE; "
+ "audio/x-nellymoser, channels = (int) { 1, 2 }, rate = (int) { 5512, 8000, 11025, 16000, 22050, 44100 }; "
+ "audio/x-raw, format = (string) { U8, S16LE }, layout = (string) interleaved, channels = (int) { 1, 2 }, rate = (int) { 5512, 11025, 22050, 44100 }; "
+ "audio/x-alaw, channels = (int) { 1, 2 }, rate = (int) 8000; "
+ "audio/x-mulaw, channels = (int) { 1, 2 }, rate = (int) 8000; "
+ "audio/x-speex, channels = (int) 1, rate = (int) 16000;")
+ );
+
+static GstStaticPadTemplate video_src_template =
+ GST_STATIC_PAD_TEMPLATE ("video",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("video/x-flash-video, flvversion=(int) 1; "
+ "video/x-flash-screen; "
+ "video/x-vp6-flash; " "video/x-vp6-alpha; "
+ "video/x-h264, stream-format=avc;")
+ );
+
+#define gst_flv_demux_parent_class parent_class
+G_DEFINE_TYPE (GstFlvDemux, gst_flv_demux, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (flvdemux, "flvdemux",
+ GST_RANK_PRIMARY, GST_TYPE_FLV_DEMUX, flv_element_init (plugin));
+
+/* 9 bytes of header + 4 bytes of first previous tag size */
+#define FLV_HEADER_SIZE 13
+/* 1 byte of tag type + 3 bytes of tag data size */
+#define FLV_TAG_TYPE_SIZE 4
+
+/* two seconds - consider dts are resynced to another base if this different */
+#define RESYNC_THRESHOLD 2000
+
+/* how much stream time to wait for audio tags to appear after we have video, or vice versa */
+#define NO_MORE_PADS_THRESHOLD (6 * GST_SECOND)
+
+static gboolean flv_demux_handle_seek_push (GstFlvDemux * demux,
+ GstEvent * event);
+static gboolean gst_flv_demux_handle_seek_pull (GstFlvDemux * demux,
+ GstEvent * event, gboolean seeking);
+
+static gboolean gst_flv_demux_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+static gboolean gst_flv_demux_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+static gboolean gst_flv_demux_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+
+static GstIndex *gst_flv_demux_get_index (GstElement * element);
+
+static void gst_flv_demux_push_tags (GstFlvDemux * demux);
+
+static void
+gst_flv_demux_parse_and_add_index_entry (GstFlvDemux * demux, GstClockTime ts,
+ guint64 pos, gboolean keyframe)
+{
+ GstIndexAssociation associations[2];
+ GstIndex *index;
+ GstIndexEntry *entry;
+
+ GST_LOG_OBJECT (demux,
+ "adding key=%d association %" GST_TIME_FORMAT "-> %" G_GUINT64_FORMAT,
+ keyframe, GST_TIME_ARGS (ts), pos);
+
+ /* if upstream is not seekable there is no point in building an index */
+ if (!demux->upstream_seekable)
+ return;
+
+ index = gst_flv_demux_get_index (GST_ELEMENT (demux));
+
+ if (!index)
+ return;
+
+ /* entry may already have been added before, avoid adding indefinitely */
+ entry = gst_index_get_assoc_entry (index, demux->index_id,
+ GST_INDEX_LOOKUP_EXACT, GST_ASSOCIATION_FLAG_NONE, GST_FORMAT_BYTES, pos);
+
+ if (entry) {
+#ifndef GST_DISABLE_GST_DEBUG
+ gint64 time = 0;
+ gboolean key;
+
+ gst_index_entry_assoc_map (entry, GST_FORMAT_TIME, &time);
+ key = ! !(GST_INDEX_ASSOC_FLAGS (entry) & GST_ASSOCIATION_FLAG_KEY_UNIT);
+ GST_LOG_OBJECT (demux, "position already mapped to time %" GST_TIME_FORMAT
+ ", keyframe %d", GST_TIME_ARGS (time), key);
+ /* there is not really a way to delete the existing one */
+ if (time != ts || key != ! !keyframe)
+ GST_DEBUG_OBJECT (demux, "metadata mismatch");
+#endif
+ gst_object_unref (index);
+ return;
+ }
+
+ associations[0].format = GST_FORMAT_TIME;
+ associations[0].value = ts;
+ associations[1].format = GST_FORMAT_BYTES;
+ associations[1].value = pos;
+
+ gst_index_add_associationv (index, demux->index_id,
+ (keyframe) ? GST_ASSOCIATION_FLAG_KEY_UNIT :
+ GST_ASSOCIATION_FLAG_DELTA_UNIT, 2,
+ (const GstIndexAssociation *) &associations);
+
+ if (pos > demux->index_max_pos)
+ demux->index_max_pos = pos;
+ if (ts > demux->index_max_time)
+ demux->index_max_time = ts;
+
+ gst_object_unref (index);
+}
+
+static gchar *
+FLV_GET_STRING (GstByteReader * reader)
+{
+ guint16 string_size = 0;
+ gchar *string = NULL;
+ const guint8 *str = NULL;
+
+ g_return_val_if_fail (reader != NULL, NULL);
+
+ if (G_UNLIKELY (!gst_byte_reader_get_uint16_be (reader, &string_size)))
+ return NULL;
+
+ if (G_UNLIKELY (string_size > gst_byte_reader_get_remaining (reader)))
+ return NULL;
+
+ string = g_try_malloc0 (string_size + 1);
+ if (G_UNLIKELY (!string)) {
+ return NULL;
+ }
+
+ if (G_UNLIKELY (!gst_byte_reader_get_data (reader, string_size, &str))) {
+ g_free (string);
+ return NULL;
+ }
+
+ memcpy (string, str, string_size);
+ /* Check utf-8 validity if it's not an empty string */
+ if (string[0] && !g_utf8_validate (string, string_size, NULL)) {
+ g_free (string);
+ return NULL;
+ }
+
+ return string;
+}
+
+static void
+gst_flv_demux_check_seekability (GstFlvDemux * demux)
+{
+ GstQuery *query;
+ gint64 start = -1, stop = -1;
+
+ demux->upstream_seekable = FALSE;
+
+ query = gst_query_new_seeking (GST_FORMAT_BYTES);
+ if (!gst_pad_peer_query (demux->sinkpad, query)) {
+ GST_DEBUG_OBJECT (demux, "seeking query failed");
+ gst_query_unref (query);
+ return;
+ }
+
+ gst_query_parse_seeking (query, NULL, &demux->upstream_seekable,
+ &start, &stop);
+
+ gst_query_unref (query);
+
+ /* try harder to query upstream size if we didn't get it the first time */
+ if (demux->upstream_seekable && stop == -1) {
+ GST_DEBUG_OBJECT (demux, "doing duration query to fix up unset stop");
+ gst_pad_peer_query_duration (demux->sinkpad, GST_FORMAT_BYTES, &stop);
+ }
+
+ /* if upstream doesn't know the size, it's likely that it's not seekable in
+ * practice even if it technically may be seekable */
+ if (demux->upstream_seekable && (start != 0 || stop <= start)) {
+ GST_DEBUG_OBJECT (demux, "seekable but unknown start/stop -> disable");
+ demux->upstream_seekable = FALSE;
+ }
+
+ GST_DEBUG_OBJECT (demux, "upstream seekable: %d", demux->upstream_seekable);
+}
+
+static GstDateTime *
+parse_flv_demux_parse_date_string (const gchar * s)
+{
+ static const gchar months[12][4] = {
+ "Jan", "Feb", "Mar", "Apr",
+ "May", "Jun", "Jul", "Aug",
+ "Sep", "Oct", "Nov", "Dec"
+ };
+ GstDateTime *dt = NULL;
+ gchar **tokens;
+ guint64 d;
+ gchar *endptr, *stripped;
+ gint i, hh, mm, ss;
+ gint year = -1, month = -1, day = -1;
+ gint hour = -1, minute = -1, seconds = -1;
+
+ stripped = g_strstrip (g_strdup (s));
+
+ /* "Fri Oct 15 15:13:16 2004" needs to be parsed */
+ tokens = g_strsplit (stripped, " ", -1);
+
+ g_free (stripped);
+
+ if (g_strv_length (tokens) != 5)
+ goto out;
+
+ /* year */
+ d = g_ascii_strtoull (tokens[4], &endptr, 10);
+ if (d == 0 && *endptr != '\0')
+ goto out;
+
+ year = d;
+
+ /* month */
+ if (strlen (tokens[1]) != 3)
+ goto out;
+ for (i = 0; i < 12; i++) {
+ if (!strcmp (tokens[1], months[i])) {
+ break;
+ }
+ }
+ if (i == 12)
+ goto out;
+
+ month = i + 1;
+
+ /* day */
+ d = g_ascii_strtoull (tokens[2], &endptr, 10);
+ if (d == 0 && *endptr != '\0')
+ goto out;
+
+ day = d;
+
+ /* time */
+ hh = mm = ss = 0;
+ if (sscanf (tokens[3], "%d:%d:%d", &hh, &mm, &ss) < 2)
+ goto out;
+ if (hh >= 0 && hh < 24 && mm >= 0 && mm < 60 && ss >= 0 && ss < 60) {
+ hour = hh;
+ minute = mm;
+ seconds = ss;
+ }
+
+out:
+
+ if (tokens)
+ g_strfreev (tokens);
+
+ if (year > 0)
+ dt = gst_date_time_new (0.0, year, month, day, hour, minute, seconds);
+
+ return dt;
+}
+
+static gboolean
+gst_flv_demux_parse_metadata_item (GstFlvDemux * demux, GstByteReader * reader,
+ gboolean * end_marker)
+{
+ gchar *tag_name = NULL;
+ guint8 tag_type = 0;
+
+ /* Initialize the end_marker flag to FALSE */
+ *end_marker = FALSE;
+
+ /* Name of the tag */
+ tag_name = FLV_GET_STRING (reader);
+ if (G_UNLIKELY (!tag_name)) {
+ GST_WARNING_OBJECT (demux, "failed reading tag name");
+ return FALSE;
+ }
+
+ /* What kind of object is that */
+ if (!gst_byte_reader_get_uint8 (reader, &tag_type))
+ goto error;
+
+ GST_DEBUG_OBJECT (demux, "tag name %s, tag type %d", tag_name, tag_type);
+
+ switch (tag_type) {
+ case 0: /* Double */
+ { /* Use a union to read the uint64 and then as a double */
+ gdouble d = 0;
+
+ if (!gst_byte_reader_get_float64_be (reader, &d))
+ goto error;
+
+ GST_DEBUG_OBJECT (demux, "%s => (double) %f", tag_name, d);
+
+ if (!strcmp (tag_name, "duration")) {
+ demux->duration = d * GST_SECOND;
+
+ gst_tag_list_add (demux->taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_DURATION, demux->duration, NULL);
+ } else if (!strcmp (tag_name, "AspectRatioX")) {
+ demux->par_x = d;
+ demux->got_par = TRUE;
+ } else if (!strcmp (tag_name, "AspectRatioY")) {
+ demux->par_y = d;
+ demux->got_par = TRUE;
+ } else if (!strcmp (tag_name, "width")) {
+ demux->w = d;
+ } else if (!strcmp (tag_name, "height")) {
+ demux->h = d;
+ } else if (!strcmp (tag_name, "framerate")) {
+ demux->framerate = d;
+ } else if (!strcmp (tag_name, "audiodatarate")) {
+ demux->audio_bitrate = (guint) (d * 1024);
+ gst_tag_list_add (demux->audio_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_NOMINAL_BITRATE, demux->audio_bitrate, NULL);
+ } else if (!strcmp (tag_name, "videodatarate")) {
+ demux->video_bitrate = (guint) (d * 1024);
+ gst_tag_list_add (demux->video_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_NOMINAL_BITRATE, demux->video_bitrate, NULL);
+ } else {
+ GST_INFO_OBJECT (demux, "Tag \'%s\' not handled", tag_name);
+ }
+
+ break;
+ }
+ case 1: /* Boolean */
+ {
+ guint8 b = 0;
+
+ if (!gst_byte_reader_get_uint8 (reader, &b))
+ goto error;
+
+ GST_DEBUG_OBJECT (demux, "%s => (boolean) %d", tag_name, b);
+
+ GST_INFO_OBJECT (demux, "Tag \'%s\' not handled", tag_name);
+
+ break;
+ }
+ case 2: /* String */
+ {
+ gchar *s = NULL;
+
+ s = FLV_GET_STRING (reader);
+ if (s == NULL)
+ goto error;
+ if (!strcmp (s, "")) {
+ /* Not strictly an error, just an empty string */
+ g_free (s);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "%s => (string) %s", tag_name, s);
+
+ if (!strcmp (tag_name, "creationdate")) {
+ GstDateTime *dt;
+
+ dt = parse_flv_demux_parse_date_string (s);
+ if (dt == NULL) {
+ GST_DEBUG_OBJECT (demux, "Failed to parse '%s' into datetime", s);
+ } else {
+ gst_tag_list_add (demux->taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_DATE_TIME, dt, NULL);
+ gst_date_time_unref (dt);
+ }
+ } else if (!strcmp (tag_name, "creator")) {
+ gst_tag_list_add (demux->taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_ARTIST, s, NULL);
+ } else if (!strcmp (tag_name, "title")) {
+ gst_tag_list_add (demux->taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_TITLE, s, NULL);
+ } else if (!strcmp (tag_name, "metadatacreator")
+ || !strcmp (tag_name, "encoder")) {
+ gst_tag_list_add (demux->taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_ENCODER, s, NULL);
+ } else {
+ GST_INFO_OBJECT (demux, "Tag \'%s\' not handled", tag_name);
+ }
+
+ g_free (s);
+
+ break;
+ }
+ case 3: /* Object */
+ {
+ gboolean end_of_object_marker = FALSE;
+
+ while (!end_of_object_marker) {
+ gboolean ok = gst_flv_demux_parse_metadata_item (demux, reader,
+ &end_of_object_marker);
+
+ if (G_UNLIKELY (!ok)) {
+ GST_WARNING_OBJECT (demux, "failed reading a tag, skipping");
+ goto error;
+ }
+ }
+
+ break;
+ }
+ case 8: /* ECMA array */
+ {
+ guint32 nb_elems = 0;
+ gboolean end_of_object_marker = FALSE;
+
+ if (!gst_byte_reader_get_uint32_be (reader, &nb_elems))
+ goto error;
+
+ GST_DEBUG_OBJECT (demux, "there are approx. %d elements in the array",
+ nb_elems);
+
+ while (!end_of_object_marker) {
+ gboolean ok = gst_flv_demux_parse_metadata_item (demux, reader,
+ &end_of_object_marker);
+
+ if (G_UNLIKELY (!ok)) {
+ GST_WARNING_OBJECT (demux, "failed reading a tag, skipping");
+ goto error;
+ }
+ }
+
+ break;
+ }
+ case 9: /* End marker */
+ {
+ GST_DEBUG_OBJECT (demux, "end marker ?");
+ if (tag_name[0] == '\0') {
+
+ GST_DEBUG_OBJECT (demux, "end marker detected");
+
+ *end_marker = TRUE;
+ }
+
+ break;
+ }
+ case 10: /* Array */
+ {
+ guint32 nb_elems = 0;
+
+ if (!gst_byte_reader_get_uint32_be (reader, &nb_elems))
+ goto error;
+
+ GST_DEBUG_OBJECT (demux, "array has %d elements", nb_elems);
+
+ if (!strcmp (tag_name, "times")) {
+ if (demux->times) {
+ g_array_free (demux->times, TRUE);
+ }
+ demux->times = g_array_new (FALSE, TRUE, sizeof (gdouble));
+ } else if (!strcmp (tag_name, "filepositions")) {
+ if (demux->filepositions) {
+ g_array_free (demux->filepositions, TRUE);
+ }
+ demux->filepositions = g_array_new (FALSE, TRUE, sizeof (gdouble));
+ }
+
+ while (nb_elems--) {
+ guint8 elem_type = 0;
+
+ if (!gst_byte_reader_get_uint8 (reader, &elem_type))
+ goto error;
+
+ switch (elem_type) {
+ case 0:
+ {
+ gdouble d;
+
+ if (!gst_byte_reader_get_float64_be (reader, &d))
+ goto error;
+
+ GST_DEBUG_OBJECT (demux, "element is a double %f", d);
+
+ if (!strcmp (tag_name, "times") && demux->times) {
+ g_array_append_val (demux->times, d);
+ } else if (!strcmp (tag_name, "filepositions") &&
+ demux->filepositions) {
+ g_array_append_val (demux->filepositions, d);
+ }
+ break;
+ }
+ default:
+ GST_WARNING_OBJECT (demux, "unsupported array element type %d",
+ elem_type);
+ }
+ }
+
+ break;
+ }
+ case 11: /* Date */
+ {
+ gdouble d = 0;
+ gint16 i = 0;
+
+ if (!gst_byte_reader_get_float64_be (reader, &d))
+ goto error;
+
+ if (!gst_byte_reader_get_int16_be (reader, &i))
+ goto error;
+
+ GST_DEBUG_OBJECT (demux,
+ "%s => (date as a double) %f, timezone offset %d", tag_name, d, i);
+
+ GST_INFO_OBJECT (demux, "Tag \'%s\' not handled", tag_name);
+
+ break;
+ }
+ default:
+ GST_WARNING_OBJECT (demux, "unsupported tag type %d", tag_type);
+ }
+
+ g_free (tag_name);
+
+ return TRUE;
+
+error:
+ g_free (tag_name);
+
+ return FALSE;
+}
+
+static void
+gst_flv_demux_clear_tags (GstFlvDemux * demux)
+{
+ GST_DEBUG_OBJECT (demux, "clearing taglist");
+
+ if (demux->taglist) {
+ gst_tag_list_unref (demux->taglist);
+ }
+ demux->taglist = gst_tag_list_new_empty ();
+ gst_tag_list_set_scope (demux->taglist, GST_TAG_SCOPE_GLOBAL);
+
+ if (demux->audio_tags) {
+ gst_tag_list_unref (demux->audio_tags);
+ }
+ demux->audio_tags = gst_tag_list_new_empty ();
+
+ if (demux->video_tags) {
+ gst_tag_list_unref (demux->video_tags);
+ }
+ demux->video_tags = gst_tag_list_new_empty ();
+}
+
+static GstFlowReturn
+gst_flv_demux_parse_tag_script (GstFlvDemux * demux, GstBuffer * buffer)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstByteReader reader;
+ guint8 type = 0;
+ GstMapInfo map;
+
+ g_return_val_if_fail (gst_buffer_get_size (buffer) >= 7, GST_FLOW_ERROR);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ gst_byte_reader_init (&reader, map.data, map.size);
+
+ gst_byte_reader_skip_unchecked (&reader, 7);
+
+ GST_LOG_OBJECT (demux, "parsing a script tag");
+
+ if (!gst_byte_reader_get_uint8 (&reader, &type))
+ goto cleanup;
+
+ /* Must be string */
+ if (type == 2) {
+ gchar *function_name;
+ guint i;
+
+ function_name = FLV_GET_STRING (&reader);
+
+ GST_LOG_OBJECT (demux, "function name is %s", GST_STR_NULL (function_name));
+
+ if (function_name != NULL && strcmp (function_name, "onMetaData") == 0) {
+ gboolean end_marker = FALSE;
+ GST_DEBUG_OBJECT (demux, "we have a metadata script object");
+
+ gst_flv_demux_clear_tags (demux);
+
+ if (!gst_byte_reader_get_uint8 (&reader, &type)) {
+ g_free (function_name);
+ goto cleanup;
+ }
+
+ switch (type) {
+ case 8:
+ {
+ guint32 nb_elems = 0;
+
+ /* ECMA array */
+ if (!gst_byte_reader_get_uint32_be (&reader, &nb_elems)) {
+ g_free (function_name);
+ goto cleanup;
+ }
+
+ /* The number of elements is just a hint, some files have
+ nb_elements == 0 and actually contain items. */
+ GST_DEBUG_OBJECT (demux, "there are approx. %d elements in the array",
+ nb_elems);
+ }
+ /* fallthrough to read data */
+ case 3:
+ {
+ /* Object */
+ while (!end_marker) {
+ gboolean ok =
+ gst_flv_demux_parse_metadata_item (demux, &reader, &end_marker);
+
+ if (G_UNLIKELY (!ok)) {
+ GST_WARNING_OBJECT (demux, "failed reading a tag, skipping");
+ break;
+ }
+ }
+ }
+ break;
+ default:
+ GST_DEBUG_OBJECT (demux, "Unhandled script data type : %d", type);
+ g_free (function_name);
+ goto cleanup;
+ }
+
+ gst_flv_demux_push_tags (demux);
+ }
+
+ g_free (function_name);
+
+ if (demux->times && demux->filepositions) {
+ guint num;
+
+ /* If an index was found, insert associations */
+ num = MIN (demux->times->len, demux->filepositions->len);
+ for (i = 0; i < num; i++) {
+ guint64 time, fileposition;
+
+ time = g_array_index (demux->times, gdouble, i) * GST_SECOND;
+ fileposition = g_array_index (demux->filepositions, gdouble, i);
+ gst_flv_demux_parse_and_add_index_entry (demux, time, fileposition,
+ TRUE);
+ }
+ demux->indexed = TRUE;
+ }
+ }
+
+cleanup:
+ gst_buffer_unmap (buffer, &map);
+
+ return ret;
+}
+
+static gboolean
+have_group_id (GstFlvDemux * demux)
+{
+ GstEvent *event;
+
+ event = gst_pad_get_sticky_event (demux->sinkpad, GST_EVENT_STREAM_START, 0);
+ if (event) {
+ if (gst_event_parse_group_id (event, &demux->group_id))
+ demux->have_group_id = TRUE;
+ else
+ demux->have_group_id = FALSE;
+ gst_event_unref (event);
+ } else if (!demux->have_group_id) {
+ demux->have_group_id = TRUE;
+ demux->group_id = gst_util_group_id_next ();
+ }
+
+ return demux->have_group_id;
+}
+
+static gboolean
+gst_flv_demux_audio_negotiate (GstFlvDemux * demux, guint32 codec_tag,
+ guint32 rate, guint32 channels, guint32 width)
+{
+ GstCaps *caps = NULL, *old_caps;
+ gboolean ret = FALSE;
+ guint adjusted_rate = rate;
+ guint adjusted_channels = channels;
+ GstEvent *event;
+ gchar *stream_id;
+
+ switch (codec_tag) {
+ case 1:
+ caps = gst_caps_new_simple ("audio/x-adpcm", "layout", G_TYPE_STRING,
+ "swf", NULL);
+ break;
+ case 2:
+ case 14:
+ caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 1, "layer", G_TYPE_INT, 3,
+ "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
+ break;
+ case 0:
+ case 3:
+ {
+ GstAudioFormat format;
+
+ /* Assuming little endian for 0 (aka endianness of the
+ * system on which the file was created) as most people
+ * are probably using little endian machines */
+ format = gst_audio_format_build_integer ((width == 8) ? FALSE : TRUE,
+ G_LITTLE_ENDIAN, width, width);
+
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, gst_audio_format_to_string (format),
+ "layout", G_TYPE_STRING, "interleaved", NULL);
+ break;
+ }
+ case 4:
+ case 5:
+ case 6:
+ caps = gst_caps_new_empty_simple ("audio/x-nellymoser");
+ break;
+ case 10:
+ {
+ GstMapInfo map;
+ if (!demux->audio_codec_data) {
+ GST_DEBUG_OBJECT (demux, "don't have AAC codec data yet");
+ ret = TRUE;
+ goto done;
+ }
+
+ gst_buffer_map (demux->audio_codec_data, &map, GST_MAP_READ);
+
+ /* use codec-data to extract and verify samplerate */
+ if (map.size >= 2) {
+ gint freq_index;
+
+ freq_index = GST_READ_UINT16_BE (map.data);
+ freq_index = (freq_index & 0x0780) >> 7;
+ adjusted_rate =
+ gst_codec_utils_aac_get_sample_rate_from_index (freq_index);
+
+ if (adjusted_rate && (rate != adjusted_rate)) {
+ GST_LOG_OBJECT (demux, "Ajusting AAC sample rate %d -> %d", rate,
+ adjusted_rate);
+ } else {
+ adjusted_rate = rate;
+ }
+
+ adjusted_channels =
+ gst_codec_utils_aac_get_channels (map.data, map.size);
+
+ if (adjusted_channels && (channels != adjusted_channels)) {
+ GST_LOG_OBJECT (demux, "Ajusting AAC channels %d -> %d", channels,
+ adjusted_channels);
+ } else {
+ adjusted_channels = channels;
+ }
+ }
+ gst_buffer_unmap (demux->audio_codec_data, &map);
+
+ caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 4, "framed", G_TYPE_BOOLEAN, TRUE,
+ "stream-format", G_TYPE_STRING, "raw", NULL);
+ break;
+ }
+ case 7:
+ caps = gst_caps_new_empty_simple ("audio/x-alaw");
+ break;
+ case 8:
+ caps = gst_caps_new_empty_simple ("audio/x-mulaw");
+ break;
+ case 11:
+ {
+ GValue streamheader = G_VALUE_INIT;
+ GValue value = G_VALUE_INIT;
+ GstByteWriter w;
+ GstStructure *structure;
+ GstBuffer *buf;
+ GstTagList *tags;
+
+ caps = gst_caps_new_empty_simple ("audio/x-speex");
+ structure = gst_caps_get_structure (caps, 0);
+
+ GST_DEBUG_OBJECT (demux, "generating speex header");
+
+ /* Speex decoder expects streamheader to be { [header], [comment] } */
+ g_value_init (&streamheader, GST_TYPE_ARRAY);
+
+ /* header part */
+ gst_byte_writer_init_with_size (&w, 80, TRUE);
+ gst_byte_writer_put_data (&w, (guint8 *) "Speex ", 8);
+ gst_byte_writer_put_data (&w, (guint8 *) "1.1.12", 7);
+ gst_byte_writer_fill (&w, 0, 13);
+ gst_byte_writer_put_uint32_le (&w, 1); /* version */
+ gst_byte_writer_put_uint32_le (&w, 80); /* header_size */
+ gst_byte_writer_put_uint32_le (&w, 16000); /* rate */
+ gst_byte_writer_put_uint32_le (&w, 1); /* mode: Wideband */
+ gst_byte_writer_put_uint32_le (&w, 4); /* mode_bitstream_version */
+ gst_byte_writer_put_uint32_le (&w, 1); /* nb_channels: 1 */
+ gst_byte_writer_put_uint32_le (&w, -1); /* bitrate */
+ gst_byte_writer_put_uint32_le (&w, 0x50); /* frame_size */
+ gst_byte_writer_put_uint32_le (&w, 0); /* VBR */
+ gst_byte_writer_put_uint32_le (&w, 1); /* frames_per_packet */
+ gst_byte_writer_put_uint32_le (&w, 0); /* extra_headers */
+ gst_byte_writer_put_uint32_le (&w, 0); /* reserved1 */
+ gst_byte_writer_put_uint32_le (&w, 0); /* reserved2 */
+ g_assert (gst_byte_writer_get_size (&w) == 80);
+
+ g_value_init (&value, GST_TYPE_BUFFER);
+ g_value_take_boxed (&value, gst_byte_writer_reset_and_get_buffer (&w));
+ gst_value_array_append_value (&streamheader, &value);
+ g_value_unset (&value);
+
+ /* comment part */
+ g_value_init (&value, GST_TYPE_BUFFER);
+ tags = gst_tag_list_new_empty ();
+ buf = gst_tag_list_to_vorbiscomment_buffer (tags, NULL, 0, "No comments");
+ gst_tag_list_unref (tags);
+ g_value_take_boxed (&value, buf);
+ gst_value_array_append_value (&streamheader, &value);
+ g_value_unset (&value);
+
+ gst_structure_take_value (structure, "streamheader", &streamheader);
+
+ channels = 1;
+ adjusted_rate = 16000;
+ break;
+ }
+ default:
+ GST_WARNING_OBJECT (demux, "unsupported audio codec tag %u", codec_tag);
+ break;
+ }
+
+ if (G_UNLIKELY (!caps)) {
+ GST_WARNING_OBJECT (demux, "failed creating caps for audio pad");
+ goto beach;
+ }
+
+ gst_caps_set_simple (caps, "rate", G_TYPE_INT, adjusted_rate,
+ "channels", G_TYPE_INT, adjusted_channels, NULL);
+
+ if (demux->audio_codec_data) {
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER,
+ demux->audio_codec_data, NULL);
+ }
+
+ old_caps = gst_pad_get_current_caps (demux->audio_pad);
+ if (!old_caps) {
+ stream_id =
+ gst_pad_create_stream_id (demux->audio_pad, GST_ELEMENT_CAST (demux),
+ "audio");
+
+ event = gst_event_new_stream_start (stream_id);
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ if (have_group_id (demux))
+ gst_event_set_group_id (event, demux->group_id);
+ gst_pad_push_event (demux->audio_pad, event);
+ g_free (stream_id);
+ }
+ if (!old_caps || !gst_caps_is_equal (old_caps, caps))
+ ret = gst_pad_set_caps (demux->audio_pad, caps);
+ else
+ ret = TRUE;
+
+ if (old_caps)
+ gst_caps_unref (old_caps);
+
+done:
+ if (G_LIKELY (ret)) {
+ /* Store the caps we got from tags */
+ demux->audio_codec_tag = codec_tag;
+ demux->rate = rate;
+ demux->channels = channels;
+ demux->width = width;
+
+ if (caps) {
+ GST_DEBUG_OBJECT (demux->audio_pad, "successfully negotiated caps %"
+ GST_PTR_FORMAT, caps);
+
+ gst_flv_demux_push_tags (demux);
+ } else {
+ GST_DEBUG_OBJECT (demux->audio_pad, "delayed setting caps");
+ }
+ } else {
+ GST_WARNING_OBJECT (demux->audio_pad, "failed negotiating caps %"
+ GST_PTR_FORMAT, caps);
+ }
+
+ if (caps)
+ gst_caps_unref (caps);
+
+beach:
+ return ret;
+}
+
+static gboolean
+gst_flv_demux_push_src_event (GstFlvDemux * demux, GstEvent * event)
+{
+ gboolean ret = TRUE;
+
+ if (demux->audio_pad)
+ ret |= gst_pad_push_event (demux->audio_pad, gst_event_ref (event));
+
+ if (demux->video_pad)
+ ret |= gst_pad_push_event (demux->video_pad, gst_event_ref (event));
+
+ gst_event_unref (event);
+
+ return ret;
+}
+
+static void
+gst_flv_demux_add_codec_tag (GstFlvDemux * demux, const gchar * tag,
+ GstPad * pad)
+{
+ if (pad) {
+ GstCaps *caps = gst_pad_get_current_caps (pad);
+
+ if (caps) {
+ gchar *codec_name = gst_pb_utils_get_codec_description (caps);
+
+ if (codec_name) {
+ gst_tag_list_add (demux->taglist, GST_TAG_MERGE_REPLACE,
+ tag, codec_name, NULL);
+ g_free (codec_name);
+ }
+
+ gst_caps_unref (caps);
+ }
+ }
+}
+
+static void
+gst_flv_demux_push_tags (GstFlvDemux * demux)
+{
+ GstEvent *tag_event;
+
+ gst_flv_demux_add_codec_tag (demux, GST_TAG_AUDIO_CODEC, demux->audio_pad);
+ gst_flv_demux_add_codec_tag (demux, GST_TAG_VIDEO_CODEC, demux->video_pad);
+
+ GST_DEBUG_OBJECT (demux, "pushing %" GST_PTR_FORMAT, demux->taglist);
+
+ tag_event = gst_event_new_tag (gst_tag_list_copy (demux->taglist));
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (tag_event, demux->segment_seqnum);
+ gst_flv_demux_push_src_event (demux, tag_event);
+
+ if (demux->audio_pad) {
+ GST_DEBUG_OBJECT (demux->audio_pad, "pushing audio %" GST_PTR_FORMAT,
+ demux->audio_tags);
+ tag_event = gst_event_new_tag (gst_tag_list_copy (demux->audio_tags));
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (tag_event, demux->segment_seqnum);
+ gst_pad_push_event (demux->audio_pad, tag_event);
+ }
+
+ if (demux->video_pad) {
+ GST_DEBUG_OBJECT (demux->video_pad, "pushing video %" GST_PTR_FORMAT,
+ demux->video_tags);
+ tag_event = gst_event_new_tag (gst_tag_list_copy (demux->video_tags));
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (tag_event, demux->segment_seqnum);
+ gst_pad_push_event (demux->video_pad, tag_event);
+ }
+}
+
+static gboolean
+gst_flv_demux_update_resync (GstFlvDemux * demux, guint32 dts, gboolean discont,
+ guint32 * last, GstClockTime * offset)
+{
+ gboolean ret = FALSE;
+ gint32 ddts = dts - *last;
+ if (!discont && ddts <= -RESYNC_THRESHOLD) {
+ /* Theoretically, we should use subtract the duration of the last buffer,
+ but this demuxer sends no durations on buffers, not sure if it cannot
+ know, or just does not care to calculate. */
+ *offset -= ddts * GST_MSECOND;
+ GST_WARNING_OBJECT (demux,
+ "Large dts gap (%" G_GINT32_FORMAT " ms), assuming resync, offset now %"
+ GST_TIME_FORMAT "", ddts, GST_TIME_ARGS (*offset));
+
+ ret = TRUE;
+ }
+ *last = dts;
+
+ return ret;
+}
+
+static void
+gst_flv_demux_sync_streams (GstFlvDemux * demux)
+{
+ /* Check if the audio or video stream are more than 3s behind the other
+ * stream, and if so send a gap event accordingly */
+
+ if (demux->audio_pad && GST_CLOCK_TIME_IS_VALID (demux->segment.position) &&
+ demux->last_audio_pts * GST_MSECOND + demux->audio_time_offset +
+ 3 * GST_SECOND < demux->segment.position) {
+ GstEvent *event;
+ guint64 start =
+ demux->last_audio_pts * GST_MSECOND + demux->audio_time_offset;
+ guint64 stop = demux->segment.position - 3 * GST_SECOND;
+
+ GST_DEBUG_OBJECT (demux,
+ "Synchronizing audio stream with video stream by advancing time from %"
+ GST_TIME_FORMAT " to %" GST_TIME_FORMAT, GST_TIME_ARGS (start),
+ GST_TIME_ARGS (stop));
+
+ demux->last_audio_pts = (stop - demux->audio_time_offset) / GST_MSECOND;
+
+ event = gst_event_new_gap (start, stop - start);
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ gst_pad_push_event (demux->audio_pad, event);
+ }
+
+ if (demux->video_pad && GST_CLOCK_TIME_IS_VALID (demux->segment.position) &&
+ demux->last_video_dts * GST_MSECOND + demux->video_time_offset +
+ 3 * GST_SECOND < demux->segment.position) {
+ GstEvent *event;
+ guint64 start =
+ demux->last_video_dts * GST_MSECOND + demux->video_time_offset;
+ guint64 stop = demux->segment.position - 3 * GST_SECOND;
+
+ GST_DEBUG_OBJECT (demux,
+ "Synchronizing video stream with audio stream by advancing time from %"
+ GST_TIME_FORMAT " to %" GST_TIME_FORMAT, GST_TIME_ARGS (start),
+ GST_TIME_ARGS (stop));
+
+ demux->last_video_dts = (stop - demux->video_time_offset) / GST_MSECOND;
+
+ event = gst_event_new_gap (start, stop - start);
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ gst_pad_push_event (demux->video_pad, event);
+ }
+}
+
+static GstFlowReturn
+gst_flv_demux_parse_tag_audio (GstFlvDemux * demux, GstBuffer * buffer)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint32 pts = 0, codec_tag = 0, rate = 5512, width = 8, channels = 1;
+ guint32 codec_data = 0, pts_ext = 0;
+ guint8 flags = 0;
+ GstMapInfo map;
+ GstBuffer *outbuf;
+ guint8 *data;
+
+ GST_LOG_OBJECT (demux, "parsing an audio tag");
+
+ if (G_UNLIKELY (!demux->audio_pad && demux->no_more_pads)) {
+#ifndef GST_DISABLE_DEBUG
+ if (G_UNLIKELY (!demux->no_audio_warned)) {
+ GST_WARNING_OBJECT (demux,
+ "Signaled no-more-pads already but had no audio pad -- ignoring");
+ demux->no_audio_warned = TRUE;
+ }
+#endif
+ return GST_FLOW_OK;
+ }
+
+ g_return_val_if_fail (gst_buffer_get_size (buffer) == demux->tag_size,
+ GST_FLOW_ERROR);
+
+ /* Error out on tags with too small headers */
+ if (gst_buffer_get_size (buffer) < 11) {
+ GST_ERROR_OBJECT (demux, "Too small tag size (%" G_GSIZE_FORMAT ")",
+ gst_buffer_get_size (buffer));
+ return GST_FLOW_ERROR;
+ }
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+
+ /* Grab information about audio tag */
+ pts = GST_READ_UINT24_BE (data);
+ /* read the pts extension to 32 bits integer */
+ pts_ext = GST_READ_UINT8 (data + 3);
+ /* Combine them */
+ pts |= pts_ext << 24;
+
+ GST_LOG_OBJECT (demux, "pts bytes %02X %02X %02X %02X (%d)", data[0], data[1],
+ data[2], data[3], pts);
+
+ /* Skip the stream id and go directly to the flags */
+ flags = GST_READ_UINT8 (data + 7);
+
+ /* Silently skip buffers with no data */
+ if (map.size == 11)
+ goto beach;
+
+ /* Channels */
+ if (flags & 0x01) {
+ channels = 2;
+ }
+ /* Width */
+ if (flags & 0x02) {
+ width = 16;
+ }
+ /* Sampling rate */
+ if ((flags & 0x0C) == 0x0C) {
+ rate = 44100;
+ } else if ((flags & 0x0C) == 0x08) {
+ rate = 22050;
+ } else if ((flags & 0x0C) == 0x04) {
+ rate = 11025;
+ }
+ /* Codec tag */
+ codec_tag = flags >> 4;
+ if (codec_tag == 10) { /* AAC has an extra byte for packet type */
+ codec_data = 2;
+ } else {
+ codec_data = 1;
+ }
+
+ /* codec tags with special rates */
+ if (codec_tag == 5 || codec_tag == 14 || codec_tag == 7 || codec_tag == 8)
+ rate = 8000;
+ else if ((codec_tag == 4) || (codec_tag == 11))
+ rate = 16000;
+
+ GST_LOG_OBJECT (demux, "audio tag with %d channels, %dHz sampling rate, "
+ "%d bits width, codec tag %u (flags %02X)", channels, rate, width,
+ codec_tag, flags);
+
+ if (codec_tag == 10) {
+ guint8 aac_packet_type = GST_READ_UINT8 (data + 8);
+
+ switch (aac_packet_type) {
+ case 0:
+ {
+ /* AudioSpecificConfig data */
+ GST_LOG_OBJECT (demux, "got an AAC codec data packet");
+ if (demux->audio_codec_data) {
+ gst_buffer_unref (demux->audio_codec_data);
+ }
+ demux->audio_codec_data =
+ gst_buffer_copy_region (buffer, GST_BUFFER_COPY_MEMORY,
+ 7 + codec_data, demux->tag_data_size - codec_data);
+
+ /* Use that buffer data in the caps */
+ if (demux->audio_pad)
+ gst_flv_demux_audio_negotiate (demux, codec_tag, rate, channels,
+ width);
+ goto beach;
+ }
+ case 1:
+ if (!demux->audio_codec_data) {
+ GST_ERROR_OBJECT (demux, "got AAC audio packet before codec data");
+ ret = GST_FLOW_OK;
+ goto beach;
+ }
+ /* AAC raw packet */
+ GST_LOG_OBJECT (demux, "got a raw AAC audio packet");
+ break;
+ default:
+ GST_WARNING_OBJECT (demux, "invalid AAC packet type %u",
+ aac_packet_type);
+ }
+ }
+
+ /* If we don't have our audio pad created, then create it. */
+ if (G_UNLIKELY (!demux->audio_pad)) {
+ demux->audio_pad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template
+ (GST_ELEMENT_GET_CLASS (demux), "audio"), "audio");
+ if (G_UNLIKELY (!demux->audio_pad)) {
+ GST_WARNING_OBJECT (demux, "failed creating audio pad");
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+
+ /* Set functions on the pad */
+ gst_pad_set_query_function (demux->audio_pad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_query));
+ gst_pad_set_event_function (demux->audio_pad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_src_event));
+
+ gst_pad_use_fixed_caps (demux->audio_pad);
+
+ /* Make it active */
+ gst_pad_set_active (demux->audio_pad, TRUE);
+
+ /* Negotiate caps */
+ if (!gst_flv_demux_audio_negotiate (demux, codec_tag, rate, channels,
+ width)) {
+ gst_object_unref (demux->audio_pad);
+ demux->audio_pad = NULL;
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+#ifndef GST_DISABLE_GST_DEBUG
+ {
+ GstCaps *caps;
+
+ caps = gst_pad_get_current_caps (demux->audio_pad);
+ GST_DEBUG_OBJECT (demux, "created audio pad with caps %" GST_PTR_FORMAT,
+ caps);
+ if (caps)
+ gst_caps_unref (caps);
+ }
+#endif
+
+ /* We need to set caps before adding */
+ gst_element_add_pad (GST_ELEMENT (demux),
+ gst_object_ref (demux->audio_pad));
+ gst_flow_combiner_add_pad (demux->flowcombiner, demux->audio_pad);
+
+ /* We only emit no more pads when we have audio and video. Indeed we can
+ * not trust the FLV header to tell us if there will be only audio or
+ * only video and we would just break discovery of some files */
+ if (demux->audio_pad && demux->video_pad) {
+ GST_DEBUG_OBJECT (demux, "emitting no more pads");
+ gst_element_no_more_pads (GST_ELEMENT (demux));
+ demux->no_more_pads = TRUE;
+ }
+ }
+
+ /* Check if caps have changed */
+ if (G_UNLIKELY (rate != demux->rate || channels != demux->channels ||
+ codec_tag != demux->audio_codec_tag || width != demux->width)) {
+ GST_DEBUG_OBJECT (demux, "audio settings have changed, changing caps");
+
+ gst_buffer_replace (&demux->audio_codec_data, NULL);
+
+ /* Negotiate caps */
+ if (!gst_flv_demux_audio_negotiate (demux, codec_tag, rate, channels,
+ width)) {
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+ }
+
+ /* Check if we have anything to push */
+ if (demux->tag_data_size <= codec_data) {
+ GST_LOG_OBJECT (demux, "Nothing left in this tag, returning");
+ goto beach;
+ }
+
+ /* Create buffer from pad */
+ outbuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_MEMORY,
+ 7 + codec_data, demux->tag_data_size - codec_data);
+
+ /* detect (and deem to be resyncs) large pts gaps */
+ if (gst_flv_demux_update_resync (demux, pts, demux->audio_need_discont,
+ &demux->last_audio_pts, &demux->audio_time_offset)) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC);
+ }
+
+ /* Fill buffer with data */
+ GST_BUFFER_PTS (outbuf) = pts * GST_MSECOND + demux->audio_time_offset;
+ GST_BUFFER_DTS (outbuf) = GST_BUFFER_PTS (outbuf);
+ GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_OFFSET (outbuf) = demux->audio_offset++;
+ GST_BUFFER_OFFSET_END (outbuf) = demux->audio_offset;
+
+ if (demux->duration == GST_CLOCK_TIME_NONE ||
+ demux->duration < GST_BUFFER_TIMESTAMP (outbuf))
+ demux->duration = GST_BUFFER_TIMESTAMP (outbuf);
+
+ /* Only add audio frames to the index if we have no video,
+ * and if the index is not yet complete */
+ if (!demux->has_video && !demux->indexed) {
+ gst_flv_demux_parse_and_add_index_entry (demux,
+ GST_BUFFER_TIMESTAMP (outbuf), demux->cur_tag_offset, TRUE);
+ }
+
+ if (G_UNLIKELY (demux->audio_need_discont)) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
+ demux->audio_need_discont = FALSE;
+ }
+
+ demux->segment.position = GST_BUFFER_TIMESTAMP (outbuf);
+
+ /* Do we need a newsegment event ? */
+ if (G_UNLIKELY (demux->audio_need_segment)) {
+ if (!demux->new_seg_event) {
+ GST_DEBUG_OBJECT (demux, "pushing newsegment from %"
+ GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->segment.position),
+ GST_TIME_ARGS (demux->segment.stop));
+ demux->segment.start = demux->segment.time = demux->segment.position;
+ demux->new_seg_event = gst_event_new_segment (&demux->segment);
+ gst_event_set_seqnum (demux->new_seg_event, demux->segment_seqnum);
+ } else {
+ GST_DEBUG_OBJECT (demux, "pushing pre-generated newsegment event");
+ }
+
+ gst_pad_push_event (demux->audio_pad, gst_event_ref (demux->new_seg_event));
+
+ demux->audio_need_segment = FALSE;
+ }
+
+ GST_LOG_OBJECT (demux,
+ "pushing %" G_GSIZE_FORMAT " bytes buffer at pts %" GST_TIME_FORMAT
+ " with duration %" GST_TIME_FORMAT ", offset %" G_GUINT64_FORMAT,
+ gst_buffer_get_size (outbuf),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf));
+
+ if (!GST_CLOCK_TIME_IS_VALID (demux->audio_start)) {
+ demux->audio_start = GST_BUFFER_TIMESTAMP (outbuf);
+ }
+ if (!GST_CLOCK_TIME_IS_VALID (demux->audio_first_ts)) {
+ demux->audio_first_ts = GST_BUFFER_TIMESTAMP (outbuf);
+ }
+
+ if (G_UNLIKELY (!demux->no_more_pads
+ && (GST_CLOCK_DIFF (demux->audio_start,
+ GST_BUFFER_TIMESTAMP (outbuf)) > NO_MORE_PADS_THRESHOLD))) {
+ GST_DEBUG_OBJECT (demux,
+ "Signalling no-more-pads because no video stream was found"
+ " after 6 seconds of audio");
+ gst_element_no_more_pads (GST_ELEMENT_CAST (demux));
+ demux->no_more_pads = TRUE;
+ }
+
+ /* Push downstream */
+ ret = gst_pad_push (demux->audio_pad, outbuf);
+
+ if (G_UNLIKELY (ret != GST_FLOW_OK) &&
+ demux->segment.rate < 0.0 && ret == GST_FLOW_EOS &&
+ demux->segment.position > demux->segment.stop) {
+ /* In reverse playback we can get a GST_FLOW_EOS when
+ * we are at the end of the segment, so we just need to jump
+ * back to the previous section. */
+ GST_DEBUG_OBJECT (demux, "downstream has reached end of segment");
+ demux->audio_done = TRUE;
+ ret = GST_FLOW_OK;
+ goto beach;
+ }
+
+ ret = gst_flow_combiner_update_pad_flow (demux->flowcombiner,
+ demux->audio_pad, ret);
+
+ if (ret == GST_FLOW_OK) {
+ gst_flv_demux_sync_streams (demux);
+ }
+
+beach:
+ gst_buffer_unmap (buffer, &map);
+
+ return ret;
+}
+
+static gboolean
+gst_flv_demux_video_negotiate (GstFlvDemux * demux, guint32 codec_tag)
+{
+ gboolean ret = FALSE;
+ GstCaps *caps = NULL, *old_caps;
+ GstEvent *event;
+ gchar *stream_id;
+
+ /* Generate caps for that pad */
+ switch (codec_tag) {
+ case 2:
+ caps =
+ gst_caps_new_simple ("video/x-flash-video", "flvversion", G_TYPE_INT,
+ 1, NULL);
+ break;
+ case 3:
+ caps = gst_caps_new_empty_simple ("video/x-flash-screen");
+ break;
+ case 4:
+ caps = gst_caps_new_empty_simple ("video/x-vp6-flash");
+ break;
+ case 5:
+ caps = gst_caps_new_empty_simple ("video/x-vp6-alpha");
+ break;
+ case 7:
+ if (!demux->video_codec_data) {
+ GST_DEBUG_OBJECT (demux, "don't have h264 codec data yet");
+ ret = TRUE;
+ goto done;
+ }
+ caps =
+ gst_caps_new_simple ("video/x-h264", "stream-format", G_TYPE_STRING,
+ "avc", NULL);
+ break;
+ /* The following two are non-standard but apparently used, see in ffmpeg
+ * https://git.videolan.org/?p=ffmpeg.git;a=blob;f=libavformat/flvdec.c;h=2bf1e059e1cbeeb79e4af9542da23f4560e1cf59;hb=b18d6c58000beed872d6bb1fe7d0fbe75ae26aef#l254
+ * https://git.videolan.org/?p=ffmpeg.git;a=blob;f=libavformat/flvdec.c;h=2bf1e059e1cbeeb79e4af9542da23f4560e1cf59;hb=b18d6c58000beed872d6bb1fe7d0fbe75ae26aef#l282
+ */
+ case 8:
+ caps = gst_caps_new_empty_simple ("video/x-h263");
+ break;
+ case 9:
+ caps =
+ gst_caps_new_simple ("video/mpeg", "mpegversion", G_TYPE_INT, 4,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ default:
+ GST_WARNING_OBJECT (demux, "unsupported video codec tag %u", codec_tag);
+ }
+
+ if (G_UNLIKELY (!caps)) {
+ GST_WARNING_OBJECT (demux, "failed creating caps for video pad");
+ goto beach;
+ }
+
+ if (demux->got_par) {
+ gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION,
+ demux->par_x, demux->par_y, NULL);
+ }
+
+ if (G_LIKELY (demux->w)) {
+ gst_caps_set_simple (caps, "width", G_TYPE_INT, demux->w, NULL);
+ }
+
+ if (G_LIKELY (demux->h)) {
+ gst_caps_set_simple (caps, "height", G_TYPE_INT, demux->h, NULL);
+ }
+
+ if (G_LIKELY (demux->framerate)) {
+ gint num = 0, den = 0;
+
+ gst_video_guess_framerate (GST_SECOND / demux->framerate, &num, &den);
+ GST_DEBUG_OBJECT (demux->video_pad,
+ "fps to be used on caps %f (as a fraction = %d/%d)", demux->framerate,
+ num, den);
+
+ gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, num, den, NULL);
+ }
+
+ if (demux->video_codec_data) {
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER,
+ demux->video_codec_data, NULL);
+ }
+
+ old_caps = gst_pad_get_current_caps (demux->video_pad);
+ if (!old_caps) {
+ stream_id =
+ gst_pad_create_stream_id (demux->video_pad, GST_ELEMENT_CAST (demux),
+ "video");
+ event = gst_event_new_stream_start (stream_id);
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ g_free (stream_id);
+
+ if (have_group_id (demux))
+ gst_event_set_group_id (event, demux->group_id);
+ gst_pad_push_event (demux->video_pad, event);
+ }
+
+ if (!old_caps || !gst_caps_is_equal (old_caps, caps))
+ ret = gst_pad_set_caps (demux->video_pad, caps);
+ else
+ ret = TRUE;
+
+ if (old_caps)
+ gst_caps_unref (old_caps);
+
+done:
+ if (G_LIKELY (ret)) {
+ /* Store the caps we have set */
+ demux->video_codec_tag = codec_tag;
+
+ if (caps) {
+ GST_DEBUG_OBJECT (demux->video_pad, "successfully negotiated caps %"
+ GST_PTR_FORMAT, caps);
+
+ gst_flv_demux_push_tags (demux);
+ } else {
+ GST_DEBUG_OBJECT (demux->video_pad, "delayed setting caps");
+ }
+ } else {
+ GST_WARNING_OBJECT (demux->video_pad, "failed negotiating caps %"
+ GST_PTR_FORMAT, caps);
+ }
+
+ if (caps)
+ gst_caps_unref (caps);
+
+beach:
+ return ret;
+}
+
+static GstFlowReturn
+gst_flv_demux_parse_tag_video (GstFlvDemux * demux, GstBuffer * buffer)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint32 dts = 0, codec_data = 1, dts_ext = 0;
+ gint32 cts = 0;
+ gboolean keyframe = FALSE;
+ guint8 flags = 0, codec_tag = 0;
+ GstBuffer *outbuf;
+ GstMapInfo map;
+ guint8 *data;
+
+ g_return_val_if_fail (gst_buffer_get_size (buffer) == demux->tag_size,
+ GST_FLOW_ERROR);
+
+ GST_LOG_OBJECT (demux, "parsing a video tag");
+
+ if G_UNLIKELY
+ (!demux->video_pad && demux->no_more_pads) {
+#ifndef GST_DISABLE_DEBUG
+ if G_UNLIKELY
+ (!demux->no_video_warned) {
+ GST_WARNING_OBJECT (demux,
+ "Signaled no-more-pads already but had no video pad -- ignoring");
+ demux->no_video_warned = TRUE;
+ }
+#endif
+ return GST_FLOW_OK;
+ }
+
+ if (gst_buffer_get_size (buffer) < 12) {
+ GST_ERROR_OBJECT (demux, "Too small tag size");
+ return GST_FLOW_ERROR;
+ }
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+
+ /* Grab information about video tag */
+ dts = GST_READ_UINT24_BE (data);
+ /* read the dts extension to 32 bits integer */
+ dts_ext = GST_READ_UINT8 (data + 3);
+ /* Combine them */
+ dts |= dts_ext << 24;
+
+ GST_LOG_OBJECT (demux, "dts bytes %02X %02X %02X %02X (%d)", data[0], data[1],
+ data[2], data[3], dts);
+
+ /* Skip the stream id and go directly to the flags */
+ flags = GST_READ_UINT8 (data + 7);
+
+ /* Keyframe */
+ if ((flags >> 4) == 1) {
+ keyframe = TRUE;
+ }
+ /* Codec tag */
+ codec_tag = flags & 0x0F;
+ if (codec_tag == 4 || codec_tag == 5) {
+ codec_data = 2;
+ } else if (codec_tag == 7) {
+ codec_data = 5;
+
+ cts = GST_READ_UINT24_BE (data + 9);
+ cts = (cts + 0xff800000) ^ 0xff800000;
+
+ if (cts < 0 && ABS (cts) > dts) {
+ GST_ERROR_OBJECT (demux, "Detected a negative composition time offset "
+ "'%d' that would lead to negative PTS, fixing", cts);
+ cts += ABS (cts) - dts;
+ }
+
+ GST_LOG_OBJECT (demux, "got cts %d", cts);
+ }
+
+ GST_LOG_OBJECT (demux, "video tag with codec tag %u, keyframe (%d) "
+ "(flags %02X)", codec_tag, keyframe, flags);
+
+ if (codec_tag == 7) {
+ guint8 avc_packet_type = GST_READ_UINT8 (data + 8);
+
+ switch (avc_packet_type) {
+ case 0:
+ {
+ if (demux->tag_data_size < codec_data) {
+ GST_ERROR_OBJECT (demux, "Got invalid H.264 codec, ignoring.");
+ break;
+ }
+
+ /* AVCDecoderConfigurationRecord data */
+ GST_LOG_OBJECT (demux, "got an H.264 codec data packet");
+ if (demux->video_codec_data) {
+ gst_buffer_unref (demux->video_codec_data);
+ }
+ demux->video_codec_data = gst_buffer_copy_region (buffer,
+ GST_BUFFER_COPY_MEMORY, 7 + codec_data,
+ demux->tag_data_size - codec_data);;
+ /* Use that buffer data in the caps */
+ if (demux->video_pad)
+ gst_flv_demux_video_negotiate (demux, codec_tag);
+ goto beach;
+ }
+ case 1:
+ /* H.264 NALU packet */
+ if (!demux->video_codec_data) {
+ GST_ERROR_OBJECT (demux, "got H.264 video packet before codec data");
+ ret = GST_FLOW_OK;
+ goto beach;
+ }
+ GST_LOG_OBJECT (demux, "got a H.264 NALU video packet");
+ break;
+ default:
+ GST_WARNING_OBJECT (demux, "invalid video packet type %u",
+ avc_packet_type);
+ }
+ }
+
+ /* If we don't have our video pad created, then create it. */
+ if (G_UNLIKELY (!demux->video_pad)) {
+ demux->video_pad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template
+ (GST_ELEMENT_GET_CLASS (demux), "video"), "video");
+ if (G_UNLIKELY (!demux->video_pad)) {
+ GST_WARNING_OBJECT (demux, "failed creating video pad");
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+
+ /* Set functions on the pad */
+ gst_pad_set_query_function (demux->video_pad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_query));
+ gst_pad_set_event_function (demux->video_pad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_src_event));
+
+ gst_pad_use_fixed_caps (demux->video_pad);
+
+ /* Make it active */
+ gst_pad_set_active (demux->video_pad, TRUE);
+
+ /* Needs to be active before setting caps */
+ if (!gst_flv_demux_video_negotiate (demux, codec_tag)) {
+ gst_object_unref (demux->video_pad);
+ demux->video_pad = NULL;
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+
+ /* When we ve set pixel-aspect-ratio we use that boolean to detect a
+ * metadata tag that would come later and trigger a caps change */
+ demux->got_par = FALSE;
+
+#ifndef GST_DISABLE_GST_DEBUG
+ {
+ GstCaps *caps;
+
+ caps = gst_pad_get_current_caps (demux->video_pad);
+ GST_DEBUG_OBJECT (demux, "created video pad with caps %" GST_PTR_FORMAT,
+ caps);
+ if (caps)
+ gst_caps_unref (caps);
+ }
+#endif
+
+ /* We need to set caps before adding */
+ gst_element_add_pad (GST_ELEMENT (demux),
+ gst_object_ref (demux->video_pad));
+ gst_flow_combiner_add_pad (demux->flowcombiner, demux->video_pad);
+
+ /* We only emit no more pads when we have audio and video. Indeed we can
+ * not trust the FLV header to tell us if there will be only audio or
+ * only video and we would just break discovery of some files */
+ if (demux->audio_pad && demux->video_pad) {
+ GST_DEBUG_OBJECT (demux, "emitting no more pads");
+ gst_element_no_more_pads (GST_ELEMENT (demux));
+ demux->no_more_pads = TRUE;
+ }
+ }
+
+ /* Check if caps have changed */
+ if (G_UNLIKELY (codec_tag != demux->video_codec_tag || demux->got_par)) {
+ GST_DEBUG_OBJECT (demux, "video settings have changed, changing caps");
+ if (codec_tag != demux->video_codec_tag)
+ gst_buffer_replace (&demux->video_codec_data, NULL);
+
+ if (!gst_flv_demux_video_negotiate (demux, codec_tag)) {
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+
+ /* When we ve set pixel-aspect-ratio we use that boolean to detect a
+ * metadata tag that would come later and trigger a caps change */
+ demux->got_par = FALSE;
+ }
+
+ /* Check if we have anything to push */
+ if (demux->tag_data_size <= codec_data) {
+ GST_LOG_OBJECT (demux, "Nothing left in this tag, returning");
+ goto beach;
+ }
+
+ /* Create buffer from pad */
+ outbuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_MEMORY,
+ 7 + codec_data, demux->tag_data_size - codec_data);
+
+ /* detect (and deem to be resyncs) large dts gaps */
+ if (gst_flv_demux_update_resync (demux, dts, demux->video_need_discont,
+ &demux->last_video_dts, &demux->video_time_offset)) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC);
+ }
+
+ /* Fill buffer with data */
+ GST_LOG_OBJECT (demux, "dts %u pts %u cts %d", dts, dts + cts, cts);
+
+ GST_BUFFER_PTS (outbuf) =
+ (dts + cts) * GST_MSECOND + demux->video_time_offset;
+ GST_BUFFER_DTS (outbuf) = dts * GST_MSECOND + demux->video_time_offset;
+ GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_OFFSET (outbuf) = demux->video_offset++;
+ GST_BUFFER_OFFSET_END (outbuf) = demux->video_offset;
+
+ if (demux->duration == GST_CLOCK_TIME_NONE ||
+ demux->duration < GST_BUFFER_TIMESTAMP (outbuf))
+ demux->duration = GST_BUFFER_TIMESTAMP (outbuf);
+
+ if (!keyframe)
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ if (!demux->indexed) {
+ gst_flv_demux_parse_and_add_index_entry (demux,
+ GST_BUFFER_TIMESTAMP (outbuf), demux->cur_tag_offset, keyframe);
+ }
+
+ if (G_UNLIKELY (demux->video_need_discont)) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
+ demux->video_need_discont = FALSE;
+ }
+
+ demux->segment.position = GST_BUFFER_TIMESTAMP (outbuf);
+
+ /* Do we need a newsegment event ? */
+ if (G_UNLIKELY (demux->video_need_segment)) {
+ if (!demux->new_seg_event) {
+ GST_DEBUG_OBJECT (demux, "pushing newsegment from %"
+ GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->segment.position),
+ GST_TIME_ARGS (demux->segment.stop));
+ demux->segment.start = demux->segment.time = demux->segment.position;
+ demux->new_seg_event = gst_event_new_segment (&demux->segment);
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (demux->new_seg_event, demux->segment_seqnum);
+ } else {
+ GST_DEBUG_OBJECT (demux, "pushing pre-generated newsegment event");
+ }
+
+ gst_pad_push_event (demux->video_pad, gst_event_ref (demux->new_seg_event));
+
+ demux->video_need_segment = FALSE;
+ }
+
+ GST_LOG_OBJECT (demux,
+ "pushing %" G_GSIZE_FORMAT " bytes buffer at dts %" GST_TIME_FORMAT
+ " with duration %" GST_TIME_FORMAT ", offset %" G_GUINT64_FORMAT
+ ", keyframe (%d)", gst_buffer_get_size (outbuf),
+ GST_TIME_ARGS (GST_BUFFER_DTS (outbuf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)), GST_BUFFER_OFFSET (outbuf),
+ keyframe);
+
+ if (!GST_CLOCK_TIME_IS_VALID (demux->video_start)) {
+ demux->video_start = GST_BUFFER_TIMESTAMP (outbuf);
+ }
+ if (!GST_CLOCK_TIME_IS_VALID (demux->audio_first_ts)) {
+ demux->video_first_ts = GST_BUFFER_TIMESTAMP (outbuf);
+ }
+
+ if (G_UNLIKELY (!demux->no_more_pads
+ && (GST_CLOCK_DIFF (demux->video_start,
+ GST_BUFFER_TIMESTAMP (outbuf)) > NO_MORE_PADS_THRESHOLD))) {
+ GST_DEBUG_OBJECT (demux,
+ "Signalling no-more-pads because no audio stream was found"
+ " after 6 seconds of video");
+ gst_element_no_more_pads (GST_ELEMENT_CAST (demux));
+ demux->no_more_pads = TRUE;
+ }
+
+ /* Push downstream */
+ ret = gst_pad_push (demux->video_pad, outbuf);
+
+ if (G_UNLIKELY (ret != GST_FLOW_OK) &&
+ demux->segment.rate < 0.0 && ret == GST_FLOW_EOS &&
+ demux->segment.position > demux->segment.stop) {
+ /* In reverse playback we can get a GST_FLOW_EOS when
+ * we are at the end of the segment, so we just need to jump
+ * back to the previous section. */
+ GST_DEBUG_OBJECT (demux, "downstream has reached end of segment");
+ demux->video_done = TRUE;
+ ret = GST_FLOW_OK;
+ goto beach;
+ }
+
+ ret = gst_flow_combiner_update_pad_flow (demux->flowcombiner,
+ demux->video_pad, ret);
+
+ if (ret == GST_FLOW_OK) {
+ gst_flv_demux_sync_streams (demux);
+ }
+
+beach:
+ gst_buffer_unmap (buffer, &map);
+ return ret;
+}
+
+static GstClockTime
+gst_flv_demux_parse_tag_timestamp (GstFlvDemux * demux, gboolean index,
+ GstBuffer * buffer, size_t * tag_size)
+{
+ guint32 dts = 0, dts_ext = 0;
+ guint32 tag_data_size;
+ guint8 type;
+ gboolean keyframe = TRUE;
+ GstClockTime ret = GST_CLOCK_TIME_NONE;
+ GstMapInfo map;
+ guint8 *data;
+ gsize size;
+
+ g_return_val_if_fail (gst_buffer_get_size (buffer) >= 12,
+ GST_CLOCK_TIME_NONE);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ type = data[0];
+
+ if (type != 9 && type != 8 && type != 18) {
+ GST_WARNING_OBJECT (demux, "Unsupported tag type %u", data[0]);
+ goto exit;
+ }
+
+ if (type == 9)
+ demux->has_video = TRUE;
+ else if (type == 8)
+ demux->has_audio = TRUE;
+
+ tag_data_size = GST_READ_UINT24_BE (data + 1);
+
+ if (size >= tag_data_size + 11 + 4) {
+ if (GST_READ_UINT32_BE (data + tag_data_size + 11) != tag_data_size + 11) {
+ GST_WARNING_OBJECT (demux, "Invalid tag size");
+ goto exit;
+ }
+ }
+
+ if (tag_size)
+ *tag_size = tag_data_size + 11 + 4;
+
+ data += 4;
+
+ GST_LOG_OBJECT (demux, "dts bytes %02X %02X %02X %02X", data[0], data[1],
+ data[2], data[3]);
+
+ /* Grab timestamp of tag tag */
+ dts = GST_READ_UINT24_BE (data);
+ /* read the dts extension to 32 bits integer */
+ dts_ext = GST_READ_UINT8 (data + 3);
+ /* Combine them */
+ dts |= dts_ext << 24;
+
+ if (type == 9) {
+ data += 7;
+
+ keyframe = ((data[0] >> 4) == 1);
+ }
+
+ ret = dts * GST_MSECOND;
+ GST_LOG_OBJECT (demux, "dts: %" GST_TIME_FORMAT, GST_TIME_ARGS (ret));
+
+ if (index && !demux->indexed && (type == 9 || (type == 8
+ && !demux->has_video))) {
+ gst_flv_demux_parse_and_add_index_entry (demux, ret, demux->offset,
+ keyframe);
+ }
+
+ if (demux->duration == GST_CLOCK_TIME_NONE || demux->duration < ret)
+ demux->duration = ret;
+
+exit:
+ gst_buffer_unmap (buffer, &map);
+ return ret;
+}
+
+static GstFlowReturn
+gst_flv_demux_parse_tag_type (GstFlvDemux * demux, GstBuffer * buffer)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint8 tag_type = 0;
+ GstMapInfo map;
+
+ g_return_val_if_fail (gst_buffer_get_size (buffer) >= 4, GST_FLOW_ERROR);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ tag_type = map.data[0];
+
+ /* Tag size is 1 byte of type + 3 bytes of size + 7 bytes + tag data size +
+ * 4 bytes of previous tag size */
+ demux->tag_data_size = GST_READ_UINT24_BE (map.data + 1);
+ demux->tag_size = demux->tag_data_size + 11;
+
+ GST_LOG_OBJECT (demux, "tag data size is %" G_GUINT64_FORMAT,
+ demux->tag_data_size);
+
+ gst_buffer_unmap (buffer, &map);
+
+ switch (tag_type) {
+ case 9:
+ demux->state = FLV_STATE_TAG_VIDEO;
+ demux->has_video = TRUE;
+ break;
+ case 8:
+ demux->state = FLV_STATE_TAG_AUDIO;
+ demux->has_audio = TRUE;
+ break;
+ case 18:
+ demux->state = FLV_STATE_TAG_SCRIPT;
+ break;
+ default:
+ GST_WARNING_OBJECT (demux, "unsupported tag type %u", tag_type);
+ demux->state = FLV_STATE_SKIP;
+ }
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_flv_demux_parse_header (GstFlvDemux * demux, GstBuffer * buffer)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstMapInfo map;
+
+ g_return_val_if_fail (gst_buffer_get_size (buffer) >= 9, GST_FLOW_ERROR);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ /* Check for the FLV tag */
+ if (map.data[0] == 'F' && map.data[1] == 'L' && map.data[2] == 'V') {
+ GST_DEBUG_OBJECT (demux, "FLV header detected");
+ } else {
+ if (G_UNLIKELY (demux->strict)) {
+ GST_WARNING_OBJECT (demux, "invalid header tag detected");
+ ret = GST_FLOW_EOS;
+ goto beach;
+ }
+ }
+
+ if (map.data[3] == '1') {
+ GST_DEBUG_OBJECT (demux, "FLV version 1 detected");
+ } else {
+ if (G_UNLIKELY (demux->strict)) {
+ GST_WARNING_OBJECT (demux, "invalid header version detected");
+ ret = GST_FLOW_EOS;
+ goto beach;
+ }
+
+ }
+
+ /* Now look at audio/video flags */
+ {
+ guint8 flags = map.data[4];
+
+ demux->has_video = demux->has_audio = FALSE;
+
+ if (flags & 1) {
+ GST_DEBUG_OBJECT (demux, "there is a video stream");
+ demux->has_video = TRUE;
+ }
+ if (flags & 4) {
+ GST_DEBUG_OBJECT (demux, "there is an audio stream");
+ demux->has_audio = TRUE;
+ }
+ }
+
+ /* do a one-time seekability check */
+ gst_flv_demux_check_seekability (demux);
+
+ /* We don't care about the rest */
+ demux->need_header = FALSE;
+
+beach:
+ gst_buffer_unmap (buffer, &map);
+ return ret;
+}
+
+
+static void
+gst_flv_demux_flush (GstFlvDemux * demux, gboolean discont)
+{
+ GST_DEBUG_OBJECT (demux, "flushing queued data in the FLV demuxer");
+
+ gst_adapter_clear (demux->adapter);
+
+ demux->audio_need_discont = TRUE;
+ demux->video_need_discont = TRUE;
+
+ demux->flushing = FALSE;
+
+ /* Only in push mode and if we're not during a seek */
+ if (!demux->random_access && demux->state != FLV_STATE_SEEK) {
+ /* After a flush we expect a tag_type */
+ demux->state = FLV_STATE_TAG_TYPE;
+ /* We reset the offset and will get one from first push */
+ demux->offset = 0;
+ }
+}
+
+static void
+gst_flv_demux_cleanup (GstFlvDemux * demux)
+{
+ GST_DEBUG_OBJECT (demux, "cleaning up FLV demuxer");
+
+ demux->state = FLV_STATE_HEADER;
+
+ demux->have_group_id = FALSE;
+ demux->group_id = G_MAXUINT;
+
+ demux->flushing = FALSE;
+ demux->need_header = TRUE;
+ demux->audio_need_segment = TRUE;
+ demux->video_need_segment = TRUE;
+ demux->audio_need_discont = TRUE;
+ demux->video_need_discont = TRUE;
+
+ demux->has_audio = FALSE;
+ demux->has_video = FALSE;
+ demux->got_par = FALSE;
+
+ demux->indexed = FALSE;
+ demux->upstream_seekable = FALSE;
+ demux->file_size = 0;
+ demux->segment_seqnum = 0;
+
+ demux->index_max_pos = 0;
+ demux->index_max_time = 0;
+
+ demux->audio_start = demux->video_start = GST_CLOCK_TIME_NONE;
+ demux->last_audio_pts = demux->last_video_dts = 0;
+ demux->audio_time_offset = demux->video_time_offset = 0;
+
+ demux->no_more_pads = FALSE;
+
+#ifndef GST_DISABLE_DEBUG
+ demux->no_audio_warned = FALSE;
+ demux->no_video_warned = FALSE;
+#endif
+
+ gst_segment_init (&demux->segment, GST_FORMAT_TIME);
+
+ demux->w = demux->h = 0;
+ demux->framerate = 0.0;
+ demux->par_x = demux->par_y = 1;
+ demux->video_offset = 0;
+ demux->audio_offset = 0;
+ demux->offset = demux->cur_tag_offset = 0;
+ demux->tag_size = demux->tag_data_size = 0;
+ demux->duration = GST_CLOCK_TIME_NONE;
+
+ if (demux->new_seg_event) {
+ gst_event_unref (demux->new_seg_event);
+ demux->new_seg_event = NULL;
+ }
+
+ gst_adapter_clear (demux->adapter);
+
+ if (demux->audio_codec_data) {
+ gst_buffer_unref (demux->audio_codec_data);
+ demux->audio_codec_data = NULL;
+ }
+
+ if (demux->video_codec_data) {
+ gst_buffer_unref (demux->video_codec_data);
+ demux->video_codec_data = NULL;
+ }
+
+ if (demux->audio_pad) {
+ gst_flow_combiner_remove_pad (demux->flowcombiner, demux->audio_pad);
+ gst_element_remove_pad (GST_ELEMENT (demux), demux->audio_pad);
+ gst_object_unref (demux->audio_pad);
+ demux->audio_pad = NULL;
+ }
+
+ if (demux->video_pad) {
+ gst_flow_combiner_remove_pad (demux->flowcombiner, demux->video_pad);
+ gst_element_remove_pad (GST_ELEMENT (demux), demux->video_pad);
+ gst_object_unref (demux->video_pad);
+ demux->video_pad = NULL;
+ }
+
+ if (demux->times) {
+ g_array_free (demux->times, TRUE);
+ demux->times = NULL;
+ }
+
+ if (demux->filepositions) {
+ g_array_free (demux->filepositions, TRUE);
+ demux->filepositions = NULL;
+ }
+
+ demux->video_bitrate = 0;
+ demux->audio_bitrate = 0;
+
+ gst_flv_demux_clear_tags (demux);
+}
+
+/*
+ * Create and push a flushing seek event upstream
+ */
+static gboolean
+flv_demux_seek_to_offset (GstFlvDemux * demux, guint64 offset)
+{
+ GstEvent *event;
+ gboolean res = 0;
+
+ GST_DEBUG_OBJECT (demux, "Seeking to %" G_GUINT64_FORMAT, offset);
+
+ event =
+ gst_event_new_seek (1.0, GST_FORMAT_BYTES,
+ GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, GST_SEEK_TYPE_SET, offset,
+ GST_SEEK_TYPE_NONE, -1);
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+
+ res = gst_pad_push_event (demux->sinkpad, event);
+
+ if (res)
+ demux->offset = offset;
+ return res;
+}
+
+static GstFlowReturn
+gst_flv_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstFlvDemux *demux = NULL;
+
+ demux = GST_FLV_DEMUX (parent);
+
+ GST_LOG_OBJECT (demux,
+ "received buffer of %" G_GSIZE_FORMAT " bytes at offset %"
+ G_GUINT64_FORMAT, gst_buffer_get_size (buffer),
+ GST_BUFFER_OFFSET (buffer));
+
+ if (G_UNLIKELY (GST_BUFFER_OFFSET (buffer) == 0)) {
+ GST_DEBUG_OBJECT (demux, "beginning of file, expect header");
+ demux->state = FLV_STATE_HEADER;
+ demux->offset = 0;
+ }
+
+ if (G_UNLIKELY (demux->offset == 0 && GST_BUFFER_OFFSET (buffer) != 0)) {
+ GST_DEBUG_OBJECT (demux, "offset was zero, synchronizing with buffer's");
+ demux->offset = GST_BUFFER_OFFSET (buffer);
+ }
+
+ if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
+ GST_DEBUG_OBJECT (demux, "Discontinuity");
+ gst_adapter_clear (demux->adapter);
+ }
+
+ gst_adapter_push (demux->adapter, buffer);
+
+ if (demux->seeking) {
+ demux->state = FLV_STATE_SEEK;
+ GST_OBJECT_LOCK (demux);
+ demux->seeking = FALSE;
+ GST_OBJECT_UNLOCK (demux);
+ }
+
+parse:
+ if (G_UNLIKELY (ret != GST_FLOW_OK)) {
+ GST_DEBUG_OBJECT (demux, "got flow return %s", gst_flow_get_name (ret));
+ goto beach;
+ }
+
+ if (G_UNLIKELY (demux->flushing)) {
+ GST_DEBUG_OBJECT (demux, "we are now flushing, exiting parser loop");
+ ret = GST_FLOW_FLUSHING;
+ goto beach;
+ }
+
+ switch (demux->state) {
+ case FLV_STATE_HEADER:
+ {
+ if (gst_adapter_available (demux->adapter) >= FLV_HEADER_SIZE) {
+ GstBuffer *buffer;
+
+ buffer = gst_adapter_take_buffer (demux->adapter, FLV_HEADER_SIZE);
+
+ ret = gst_flv_demux_parse_header (demux, buffer);
+
+ gst_buffer_unref (buffer);
+ demux->offset += FLV_HEADER_SIZE;
+
+ demux->state = FLV_STATE_TAG_TYPE;
+ goto parse;
+ } else {
+ goto beach;
+ }
+ }
+ case FLV_STATE_TAG_TYPE:
+ {
+ if (gst_adapter_available (demux->adapter) >= FLV_TAG_TYPE_SIZE) {
+ GstBuffer *buffer;
+
+ /* Remember the tag offset in bytes */
+ demux->cur_tag_offset = demux->offset;
+
+ buffer = gst_adapter_take_buffer (demux->adapter, FLV_TAG_TYPE_SIZE);
+
+ ret = gst_flv_demux_parse_tag_type (demux, buffer);
+
+ gst_buffer_unref (buffer);
+ demux->offset += FLV_TAG_TYPE_SIZE;
+
+ /* last tag is not an index => no index/don't know where the index is
+ * seek back to the beginning */
+ if (demux->seek_event && demux->state != FLV_STATE_TAG_SCRIPT)
+ goto no_index;
+
+ goto parse;
+ } else {
+ goto beach;
+ }
+ }
+ case FLV_STATE_TAG_VIDEO:
+ {
+ if (gst_adapter_available (demux->adapter) >= demux->tag_size) {
+ GstBuffer *buffer;
+
+ buffer = gst_adapter_take_buffer (demux->adapter, demux->tag_size);
+
+ ret = gst_flv_demux_parse_tag_video (demux, buffer);
+
+ gst_buffer_unref (buffer);
+ demux->offset += demux->tag_size;
+
+ demux->state = FLV_STATE_TAG_TYPE;
+ goto parse;
+ } else {
+ goto beach;
+ }
+ }
+ case FLV_STATE_TAG_AUDIO:
+ {
+ if (gst_adapter_available (demux->adapter) >= demux->tag_size) {
+ GstBuffer *buffer;
+
+ buffer = gst_adapter_take_buffer (demux->adapter, demux->tag_size);
+
+ ret = gst_flv_demux_parse_tag_audio (demux, buffer);
+
+ gst_buffer_unref (buffer);
+ demux->offset += demux->tag_size;
+
+ demux->state = FLV_STATE_TAG_TYPE;
+ goto parse;
+ } else {
+ goto beach;
+ }
+ }
+ case FLV_STATE_TAG_SCRIPT:
+ {
+ if (gst_adapter_available (demux->adapter) >= demux->tag_size) {
+ GstBuffer *buffer;
+
+ buffer = gst_adapter_take_buffer (demux->adapter, demux->tag_size);
+
+ ret = gst_flv_demux_parse_tag_script (demux, buffer);
+
+ gst_buffer_unref (buffer);
+ demux->offset += demux->tag_size;
+
+ demux->state = FLV_STATE_TAG_TYPE;
+
+ /* if there's a seek event we're here for the index so if we don't have it
+ * we seek back to the beginning */
+ if (demux->seek_event) {
+ if (demux->indexed)
+ demux->state = FLV_STATE_SEEK;
+ else
+ goto no_index;
+ }
+
+ goto parse;
+ } else {
+ goto beach;
+ }
+ }
+ case FLV_STATE_SEEK:
+ {
+ GstEvent *event;
+
+ ret = GST_FLOW_OK;
+
+ if (!demux->indexed) {
+ if (demux->offset == demux->file_size - sizeof (guint32)) {
+ guint64 seek_offset;
+ guint8 *data;
+
+ data = gst_adapter_take (demux->adapter, 4);
+ if (!data)
+ goto no_index;
+
+ seek_offset = demux->file_size - sizeof (guint32) -
+ GST_READ_UINT32_BE (data);
+ g_free (data);
+
+ GST_INFO_OBJECT (demux,
+ "Seeking to beginning of last tag at %" G_GUINT64_FORMAT,
+ seek_offset);
+ demux->state = FLV_STATE_TAG_TYPE;
+ flv_demux_seek_to_offset (demux, seek_offset);
+ goto beach;
+ } else
+ goto no_index;
+ }
+
+ GST_OBJECT_LOCK (demux);
+ event = demux->seek_event;
+ demux->seek_event = NULL;
+ GST_OBJECT_UNLOCK (demux);
+
+ /* calculate and perform seek */
+ if (!flv_demux_handle_seek_push (demux, event))
+ goto seek_failed;
+
+ gst_event_unref (event);
+ demux->state = FLV_STATE_TAG_TYPE;
+ goto beach;
+ }
+ case FLV_STATE_SKIP:
+ /* Skip unknown tags (set in _parse_tag_type()) */
+ if (gst_adapter_available (demux->adapter) >= demux->tag_size) {
+ gst_adapter_flush (demux->adapter, demux->tag_size);
+ demux->offset += demux->tag_size;
+ demux->state = FLV_STATE_TAG_TYPE;
+ goto parse;
+ } else {
+ goto beach;
+ }
+ default:
+ GST_DEBUG_OBJECT (demux, "unexpected demuxer state");
+ }
+
+beach:
+ return ret;
+
+/* ERRORS */
+no_index:
+ {
+ GST_OBJECT_LOCK (demux);
+ demux->seeking = FALSE;
+ gst_event_unref (demux->seek_event);
+ demux->seek_event = NULL;
+ GST_OBJECT_UNLOCK (demux);
+ GST_WARNING_OBJECT (demux,
+ "failed to find an index, seeking back to beginning");
+ flv_demux_seek_to_offset (demux, 0);
+ return GST_FLOW_OK;
+ }
+seek_failed:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX, (NULL), ("seek failed"));
+ return GST_FLOW_ERROR;
+ }
+
+}
+
+static GstFlowReturn
+gst_flv_demux_pull_range (GstFlvDemux * demux, GstPad * pad, guint64 offset,
+ guint size, GstBuffer ** buffer)
+{
+ GstFlowReturn ret;
+
+ ret = gst_pad_pull_range (pad, offset, size, buffer);
+ if (G_UNLIKELY (ret != GST_FLOW_OK)) {
+ GST_WARNING_OBJECT (demux,
+ "failed when pulling %d bytes from offset %" G_GUINT64_FORMAT ": %s",
+ size, offset, gst_flow_get_name (ret));
+ *buffer = NULL;
+ return ret;
+ }
+
+ if (G_UNLIKELY (*buffer && gst_buffer_get_size (*buffer) != size)) {
+ GST_WARNING_OBJECT (demux,
+ "partial pull got %" G_GSIZE_FORMAT " when expecting %d from offset %"
+ G_GUINT64_FORMAT, gst_buffer_get_size (*buffer), size, offset);
+ gst_buffer_unref (*buffer);
+ ret = GST_FLOW_EOS;
+ *buffer = NULL;
+ return ret;
+ }
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_flv_demux_pull_tag (GstPad * pad, GstFlvDemux * demux)
+{
+ GstBuffer *buffer = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ /* Store tag offset */
+ demux->cur_tag_offset = demux->offset;
+
+ /* Get the first 4 bytes to identify tag type and size */
+ if (G_UNLIKELY ((ret = gst_flv_demux_pull_range (demux, pad, demux->offset,
+ FLV_TAG_TYPE_SIZE, &buffer)) != GST_FLOW_OK))
+ goto beach;
+
+ /* Identify tag type */
+ ret = gst_flv_demux_parse_tag_type (demux, buffer);
+
+ gst_buffer_unref (buffer);
+
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto beach;
+
+ /* Jump over tag type + size */
+ demux->offset += FLV_TAG_TYPE_SIZE;
+
+ /* Pull the whole tag */
+ buffer = NULL;
+ if (G_UNLIKELY ((ret = gst_flv_demux_pull_range (demux, pad, demux->offset,
+ demux->tag_size, &buffer)) != GST_FLOW_OK))
+ goto beach;
+
+ switch (demux->state) {
+ case FLV_STATE_TAG_VIDEO:
+ ret = gst_flv_demux_parse_tag_video (demux, buffer);
+ break;
+ case FLV_STATE_TAG_AUDIO:
+ ret = gst_flv_demux_parse_tag_audio (demux, buffer);
+ break;
+ case FLV_STATE_TAG_SCRIPT:
+ ret = gst_flv_demux_parse_tag_script (demux, buffer);
+ break;
+ default:
+ GST_WARNING_OBJECT (demux, "unexpected state %d", demux->state);
+ }
+
+ gst_buffer_unref (buffer);
+
+ /* Jump over that part we've just parsed */
+ demux->offset += demux->tag_size;
+
+ /* Make sure we reinitialize the tag size */
+ demux->tag_size = 0;
+
+ /* Ready for the next tag */
+ demux->state = FLV_STATE_TAG_TYPE;
+
+ if (G_UNLIKELY (ret == GST_FLOW_NOT_LINKED)) {
+ GST_WARNING_OBJECT (demux, "parsing this tag returned not-linked and "
+ "neither video nor audio are linked");
+ }
+
+beach:
+ return ret;
+}
+
+static GstFlowReturn
+gst_flv_demux_pull_header (GstPad * pad, GstFlvDemux * demux)
+{
+ GstBuffer *buffer = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ /* Get the first 9 bytes */
+ if (G_UNLIKELY ((ret = gst_flv_demux_pull_range (demux, pad, demux->offset,
+ FLV_HEADER_SIZE, &buffer)) != GST_FLOW_OK))
+ goto beach;
+
+ ret = gst_flv_demux_parse_header (demux, buffer);
+
+ gst_buffer_unref (buffer);
+
+ /* Jump over the header now */
+ demux->offset += FLV_HEADER_SIZE;
+ demux->state = FLV_STATE_TAG_TYPE;
+
+beach:
+ return ret;
+}
+
+static void
+gst_flv_demux_move_to_offset (GstFlvDemux * demux, gint64 offset,
+ gboolean reset)
+{
+ demux->offset = offset;
+
+ /* Tell all the stream we moved to a different position (discont) */
+ demux->audio_need_discont = TRUE;
+ demux->video_need_discont = TRUE;
+
+ /* next section setup */
+ demux->from_offset = -1;
+ demux->audio_done = demux->video_done = FALSE;
+ demux->audio_first_ts = demux->video_first_ts = GST_CLOCK_TIME_NONE;
+
+ if (reset) {
+ demux->from_offset = -1;
+ demux->to_offset = G_MAXINT64;
+ }
+
+ /* If we seeked at the beginning of the file parse the header again */
+ if (G_UNLIKELY (!demux->offset)) {
+ demux->state = FLV_STATE_HEADER;
+ } else { /* or parse a tag */
+ demux->state = FLV_STATE_TAG_TYPE;
+ }
+}
+
+static GstFlowReturn
+gst_flv_demux_seek_to_prev_keyframe (GstFlvDemux * demux)
+{
+ GstFlowReturn ret = GST_FLOW_EOS;
+ GstIndex *index;
+ GstIndexEntry *entry = NULL;
+
+ GST_DEBUG_OBJECT (demux,
+ "terminated section started at offset %" G_GINT64_FORMAT,
+ demux->from_offset);
+
+ /* we are done if we got all audio and video */
+ if ((!GST_CLOCK_TIME_IS_VALID (demux->audio_first_ts) ||
+ demux->audio_first_ts < demux->segment.start) &&
+ (!GST_CLOCK_TIME_IS_VALID (demux->video_first_ts) ||
+ demux->video_first_ts < demux->segment.start))
+ goto done;
+
+ if (demux->from_offset <= 0)
+ goto done;
+
+ GST_DEBUG_OBJECT (demux, "locating previous position");
+
+ index = gst_flv_demux_get_index (GST_ELEMENT (demux));
+
+ /* locate index entry before previous start position */
+ if (index) {
+ entry = gst_index_get_assoc_entry (index, demux->index_id,
+ GST_INDEX_LOOKUP_BEFORE, GST_ASSOCIATION_FLAG_KEY_UNIT,
+ GST_FORMAT_BYTES, demux->from_offset - 1);
+
+ if (entry) {
+ gint64 bytes = 0, time = 0;
+
+ gst_index_entry_assoc_map (entry, GST_FORMAT_BYTES, &bytes);
+ gst_index_entry_assoc_map (entry, GST_FORMAT_TIME, &time);
+
+ GST_DEBUG_OBJECT (demux, "found index entry for %" G_GINT64_FORMAT
+ " at %" GST_TIME_FORMAT ", seeking to %" G_GINT64_FORMAT,
+ demux->offset - 1, GST_TIME_ARGS (time), bytes);
+
+ /* setup for next section */
+ demux->to_offset = demux->from_offset;
+ gst_flv_demux_move_to_offset (demux, bytes, FALSE);
+ ret = GST_FLOW_OK;
+ }
+
+ gst_object_unref (index);
+ }
+
+done:
+ return ret;
+}
+
+static GstFlowReturn
+gst_flv_demux_create_index (GstFlvDemux * demux, gint64 pos, GstClockTime ts)
+{
+ gint64 size;
+ size_t tag_size;
+ guint64 old_offset;
+ GstBuffer *buffer;
+ GstClockTime tag_time;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ if (!gst_pad_peer_query_duration (demux->sinkpad, GST_FORMAT_BYTES, &size))
+ return GST_FLOW_OK;
+
+ GST_DEBUG_OBJECT (demux, "building index at %" G_GINT64_FORMAT
+ " looking for time %" GST_TIME_FORMAT, pos, GST_TIME_ARGS (ts));
+
+ old_offset = demux->offset;
+ demux->offset = pos;
+
+ buffer = NULL;
+ while ((ret = gst_flv_demux_pull_range (demux, demux->sinkpad, demux->offset,
+ 12, &buffer)) == GST_FLOW_OK) {
+ tag_time =
+ gst_flv_demux_parse_tag_timestamp (demux, TRUE, buffer, &tag_size);
+
+ gst_buffer_unref (buffer);
+ buffer = NULL;
+
+ if (G_UNLIKELY (tag_time == GST_CLOCK_TIME_NONE || tag_time > ts))
+ goto exit;
+
+ demux->offset += tag_size;
+ }
+
+ if (ret == GST_FLOW_EOS) {
+ /* file ran out, so mark we have complete index */
+ demux->indexed = TRUE;
+ ret = GST_FLOW_OK;
+ }
+
+exit:
+ demux->offset = old_offset;
+
+ return ret;
+}
+
+static gint64
+gst_flv_demux_get_metadata (GstFlvDemux * demux)
+{
+ gint64 ret = 0, offset;
+ size_t tag_size, size;
+ GstBuffer *buffer = NULL;
+ GstMapInfo map;
+
+ if (!gst_pad_peer_query_duration (demux->sinkpad, GST_FORMAT_BYTES, &offset))
+ goto exit;
+
+ ret = offset;
+ GST_DEBUG_OBJECT (demux, "upstream size: %" G_GINT64_FORMAT, offset);
+ if (G_UNLIKELY (offset < 4))
+ goto exit;
+
+ offset -= 4;
+ if (GST_FLOW_OK != gst_flv_demux_pull_range (demux, demux->sinkpad, offset,
+ 4, &buffer))
+ goto exit;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ tag_size = GST_READ_UINT32_BE (map.data);
+ gst_buffer_unmap (buffer, &map);
+ GST_DEBUG_OBJECT (demux, "last tag size: %" G_GSIZE_FORMAT, tag_size);
+ gst_buffer_unref (buffer);
+ buffer = NULL;
+
+ if (G_UNLIKELY (offset < tag_size))
+ goto exit;
+
+ offset -= tag_size;
+ if (GST_FLOW_OK != gst_flv_demux_pull_range (demux, demux->sinkpad, offset,
+ 12, &buffer))
+ goto exit;
+
+ /* a consistency check */
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ size = GST_READ_UINT24_BE (map.data + 1);
+ if (size != tag_size - 11) {
+ gst_buffer_unmap (buffer, &map);
+ GST_DEBUG_OBJECT (demux,
+ "tag size %" G_GSIZE_FORMAT ", expected %" G_GSIZE_FORMAT
+ ", corrupt or truncated file", size, tag_size - 11);
+ goto exit;
+ }
+
+ /* try to update duration with timestamp in any case */
+ gst_flv_demux_parse_tag_timestamp (demux, FALSE, buffer, &size);
+
+ /* maybe get some more metadata */
+ if (map.data[0] == 18) {
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ buffer = NULL;
+ GST_DEBUG_OBJECT (demux, "script tag, pulling it to parse");
+ offset += 4;
+ if (GST_FLOW_OK == gst_flv_demux_pull_range (demux, demux->sinkpad, offset,
+ tag_size, &buffer))
+ gst_flv_demux_parse_tag_script (demux, buffer);
+ } else {
+ gst_buffer_unmap (buffer, &map);
+ }
+
+exit:
+ if (buffer)
+ gst_buffer_unref (buffer);
+
+ return ret;
+}
+
+static void
+gst_flv_demux_loop (GstPad * pad)
+{
+ GstFlvDemux *demux = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ demux = GST_FLV_DEMUX (gst_pad_get_parent (pad));
+
+ /* pull in data */
+ switch (demux->state) {
+ case FLV_STATE_TAG_TYPE:
+ if (demux->from_offset == -1)
+ demux->from_offset = demux->offset;
+ ret = gst_flv_demux_pull_tag (pad, demux);
+ /* if we have seen real data, we probably passed a possible metadata
+ * header located at start. So if we do not yet have an index,
+ * try to pick up metadata (index, duration) at the end */
+ if (G_UNLIKELY (!demux->file_size && !demux->indexed &&
+ (demux->has_video || demux->has_audio)))
+ demux->file_size = gst_flv_demux_get_metadata (demux);
+ break;
+ case FLV_STATE_DONE:
+ ret = GST_FLOW_EOS;
+ break;
+ case FLV_STATE_SEEK:
+ /* seek issued with insufficient index;
+ * scan for index in task thread from current maximum offset to
+ * desired time and then perform seek */
+ /* TODO maybe some buffering message or so to indicate scan progress */
+ ret = gst_flv_demux_create_index (demux, demux->index_max_pos,
+ demux->seek_time);
+ if (ret != GST_FLOW_OK)
+ goto pause;
+ /* position and state arranged by seek,
+ * also unrefs event */
+ gst_flv_demux_handle_seek_pull (demux, demux->seek_event, FALSE);
+ demux->seek_event = NULL;
+ break;
+ default:
+ ret = gst_flv_demux_pull_header (pad, demux);
+ /* index scans start after header */
+ demux->index_max_pos = demux->offset;
+ break;
+ }
+
+ if (demux->segment.rate < 0.0) {
+ /* check end of section */
+ if ((gint64) demux->offset >= demux->to_offset ||
+ demux->segment.position >= demux->segment.stop + 2 * GST_SECOND ||
+ (demux->audio_done && demux->video_done))
+ ret = gst_flv_demux_seek_to_prev_keyframe (demux);
+ } else {
+ /* check EOS condition */
+ if ((demux->segment.stop != -1) &&
+ (demux->segment.position >= demux->segment.stop)) {
+ ret = GST_FLOW_EOS;
+ }
+ }
+
+ /* pause if something went wrong or at end */
+ if (G_UNLIKELY (ret != GST_FLOW_OK) && !(ret == GST_FLOW_NOT_LINKED
+ && !demux->no_more_pads))
+ goto pause;
+
+ gst_object_unref (demux);
+
+ return;
+
+pause:
+ {
+ const gchar *reason = gst_flow_get_name (ret);
+ GstMessage *message;
+ GstEvent *event;
+
+ GST_LOG_OBJECT (demux, "pausing task, reason %s", reason);
+ gst_pad_pause_task (pad);
+
+ if (ret == GST_FLOW_EOS) {
+ /* handle end-of-stream/segment */
+ /* so align our position with the end of it, if there is one
+ * this ensures a subsequent will arrive at correct base/acc time */
+ if (demux->segment.rate > 0.0 &&
+ GST_CLOCK_TIME_IS_VALID (demux->segment.stop))
+ demux->segment.position = demux->segment.stop;
+ else if (demux->segment.rate < 0.0)
+ demux->segment.position = demux->segment.start;
+
+ /* perform EOS logic */
+ if (!demux->no_more_pads) {
+ gst_element_no_more_pads (GST_ELEMENT_CAST (demux));
+ demux->no_more_pads = TRUE;
+ }
+
+ if (demux->segment.flags & GST_SEGMENT_FLAG_SEGMENT) {
+ gint64 stop;
+
+ /* for segment playback we need to post when (in stream time)
+ * we stopped, this is either stop (when set) or the duration. */
+ if ((stop = demux->segment.stop) == -1)
+ stop = demux->segment.duration;
+
+ if (demux->segment.rate >= 0) {
+ GST_LOG_OBJECT (demux, "Sending segment done, at end of segment");
+ message = gst_message_new_segment_done (GST_OBJECT_CAST (demux),
+ GST_FORMAT_TIME, stop);
+ gst_message_set_seqnum (message, demux->segment_seqnum);
+ gst_element_post_message (GST_ELEMENT_CAST (demux), message);
+ event = gst_event_new_segment_done (GST_FORMAT_TIME, stop);
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ gst_flv_demux_push_src_event (demux, event);
+ } else { /* Reverse playback */
+ GST_LOG_OBJECT (demux, "Sending segment done, at beginning of "
+ "segment");
+ message = gst_message_new_segment_done (GST_OBJECT_CAST (demux),
+ GST_FORMAT_TIME, demux->segment.start);
+ gst_message_set_seqnum (message, demux->segment_seqnum);
+ gst_element_post_message (GST_ELEMENT_CAST (demux), message);
+ event = gst_event_new_segment_done (GST_FORMAT_TIME,
+ demux->segment.start);
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ gst_flv_demux_push_src_event (demux, event);
+ }
+ } else {
+ /* normal playback, send EOS to all linked pads */
+ if (!demux->no_more_pads) {
+ gst_element_no_more_pads (GST_ELEMENT (demux));
+ demux->no_more_pads = TRUE;
+ }
+
+ GST_LOG_OBJECT (demux, "Sending EOS, at end of stream");
+ if (!demux->audio_pad && !demux->video_pad) {
+ GST_ELEMENT_ERROR (demux, STREAM, FAILED,
+ ("Internal data stream error."), ("Got EOS before any data"));
+ } else {
+ event = gst_event_new_eos ();
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ if (!gst_flv_demux_push_src_event (demux, event))
+ GST_WARNING_OBJECT (demux, "failed pushing EOS on streams");
+ }
+ }
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
+ GST_ELEMENT_FLOW_ERROR (demux, ret);
+ event = gst_event_new_eos ();
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ gst_flv_demux_push_src_event (demux, event);
+ }
+ gst_object_unref (demux);
+ return;
+ }
+}
+
+static guint64
+gst_flv_demux_find_offset (GstFlvDemux * demux, GstSegment * segment,
+ GstSeekFlags seek_flags)
+{
+ gint64 bytes = 0;
+ gint64 time = 0;
+ GstIndex *index;
+ GstIndexEntry *entry;
+
+ g_return_val_if_fail (segment != NULL, 0);
+
+ time = segment->position;
+
+ index = gst_flv_demux_get_index (GST_ELEMENT (demux));
+
+ if (index) {
+ /* Let's check if we have an index entry for that seek time */
+ entry = gst_index_get_assoc_entry (index, demux->index_id,
+ seek_flags & GST_SEEK_FLAG_SNAP_AFTER ?
+ GST_INDEX_LOOKUP_AFTER : GST_INDEX_LOOKUP_BEFORE,
+ GST_ASSOCIATION_FLAG_KEY_UNIT, GST_FORMAT_TIME, time);
+
+ if (entry) {
+ gst_index_entry_assoc_map (entry, GST_FORMAT_BYTES, &bytes);
+ gst_index_entry_assoc_map (entry, GST_FORMAT_TIME, &time);
+
+ GST_DEBUG_OBJECT (demux, "found index entry for %" GST_TIME_FORMAT
+ " at %" GST_TIME_FORMAT ", seeking to %" G_GINT64_FORMAT,
+ GST_TIME_ARGS (segment->position), GST_TIME_ARGS (time), bytes);
+
+ /* Key frame seeking */
+ if (seek_flags & GST_SEEK_FLAG_KEY_UNIT) {
+ /* Adjust the segment so that the keyframe fits in */
+ segment->start = segment->time = time;
+ segment->position = time;
+ }
+ } else {
+ GST_DEBUG_OBJECT (demux, "no index entry found for %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (segment->start));
+ }
+
+ gst_object_unref (index);
+ }
+
+ return bytes;
+}
+
+static gboolean
+flv_demux_handle_seek_push (GstFlvDemux * demux, GstEvent * event)
+{
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType start_type, stop_type;
+ gint64 start, stop;
+ gdouble rate;
+ gboolean update, flush, ret;
+ GstSegment seeksegment;
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &start_type, &start, &stop_type, &stop);
+
+ if (format != GST_FORMAT_TIME)
+ goto wrong_format;
+
+ flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
+
+ /* Work on a copy until we are sure the seek succeeded. */
+ memcpy (&seeksegment, &demux->segment, sizeof (GstSegment));
+
+ GST_DEBUG_OBJECT (demux, "segment before configure %" GST_SEGMENT_FORMAT,
+ &demux->segment);
+
+ /* Apply the seek to our segment */
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
+ start_type, start, stop_type, stop, &update);
+
+ GST_DEBUG_OBJECT (demux, "segment configured %" GST_SEGMENT_FORMAT,
+ &seeksegment);
+
+ if (flush || seeksegment.position != demux->segment.position) {
+ /* Do the actual seeking */
+ guint64 offset = gst_flv_demux_find_offset (demux, &seeksegment, flags);
+
+ GST_DEBUG_OBJECT (demux, "generating an upstream seek at position %"
+ G_GUINT64_FORMAT, offset);
+ event = gst_event_new_seek (seeksegment.rate, GST_FORMAT_BYTES,
+ flags | GST_SEEK_FLAG_ACCURATE, GST_SEEK_TYPE_SET,
+ offset, GST_SEEK_TYPE_NONE, 0);
+ gst_event_set_seqnum (event, gst_event_get_seqnum (event));
+ ret = gst_pad_push_event (demux->sinkpad, event);
+ if (G_UNLIKELY (!ret)) {
+ GST_WARNING_OBJECT (demux, "upstream seek failed");
+ }
+
+ gst_flow_combiner_reset (demux->flowcombiner);
+ /* Tell all the stream we moved to a different position (discont) */
+ demux->audio_need_discont = TRUE;
+ demux->video_need_discont = TRUE;
+ } else {
+ ret = TRUE;
+ }
+
+ if (ret) {
+ /* Ok seek succeeded, take the newly configured segment */
+ memcpy (&demux->segment, &seeksegment, sizeof (GstSegment));
+
+ /* Tell all the stream a new segment is needed */
+ demux->audio_need_segment = TRUE;
+ demux->video_need_segment = TRUE;
+ /* Clean any potential newsegment event kept for the streams. The first
+ * stream needing a new segment will create a new one. */
+ if (G_UNLIKELY (demux->new_seg_event)) {
+ gst_event_unref (demux->new_seg_event);
+ demux->new_seg_event = NULL;
+ }
+ GST_DEBUG_OBJECT (demux, "preparing newsegment from %"
+ GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->segment.start),
+ GST_TIME_ARGS (demux->segment.stop));
+ demux->new_seg_event = gst_event_new_segment (&demux->segment);
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (demux->new_seg_event, demux->segment_seqnum);
+ gst_event_unref (event);
+ } else {
+ ret = gst_pad_push_event (demux->sinkpad, event);
+ }
+
+ return ret;
+
+/* ERRORS */
+wrong_format:
+ {
+ GST_WARNING_OBJECT (demux, "we only support seeking in TIME format");
+ gst_event_unref (event);
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_flv_demux_handle_seek_push (GstFlvDemux * demux, GstEvent * event)
+{
+ GstFormat format;
+
+ gst_event_parse_seek (event, NULL, &format, NULL, NULL, NULL, NULL, NULL);
+
+ if (format != GST_FORMAT_TIME) {
+ GST_WARNING_OBJECT (demux, "we only support seeking in TIME format");
+ gst_event_unref (event);
+ return FALSE;
+ }
+
+ /* First try upstream */
+ if (gst_pad_push_event (demux->sinkpad, gst_event_ref (event))) {
+ GST_DEBUG_OBJECT (demux, "Upstream successfully seeked");
+ gst_event_unref (event);
+ return TRUE;
+ }
+
+ if (!demux->indexed) {
+ guint64 seek_offset = 0;
+ gboolean building_index;
+
+ GST_OBJECT_LOCK (demux);
+ /* handle the seek in the chain function */
+ demux->seeking = TRUE;
+ demux->state = FLV_STATE_SEEK;
+
+ /* copy the event */
+ gst_event_replace (&demux->seek_event, event);
+
+ /* set the building_index flag so that only one thread can setup the
+ * structures for index seeking. */
+ building_index = demux->building_index;
+ if (!building_index) {
+ demux->building_index = TRUE;
+ if (!demux->file_size
+ && !gst_pad_peer_query_duration (demux->sinkpad, GST_FORMAT_BYTES,
+ &demux->file_size)) {
+ GST_WARNING_OBJECT (demux, "Failed to query upstream file size");
+ GST_OBJECT_UNLOCK (demux);
+ return FALSE;
+ }
+
+ /* we hope the last tag is a scriptdataobject containing an index
+ * the size of the last tag is given in the last guint32 bits
+ * then we seek to the beginning of the tag, parse it and hopefully obtain an index */
+ seek_offset = demux->file_size - sizeof (guint32);
+ GST_DEBUG_OBJECT (demux,
+ "File size obtained, seeking to %" G_GUINT64_FORMAT, seek_offset);
+ }
+ GST_OBJECT_UNLOCK (demux);
+
+ if (!building_index) {
+ GST_INFO_OBJECT (demux, "Seeking to last 4 bytes at %" G_GUINT64_FORMAT,
+ seek_offset);
+ return flv_demux_seek_to_offset (demux, seek_offset);
+ }
+
+ /* FIXME: we have to always return true so that we don't block the seek
+ * thread.
+ * Note: maybe it is OK to return true if we're still building the index */
+ return TRUE;
+ }
+
+ return flv_demux_handle_seek_push (demux, event);
+}
+
+static gboolean
+gst_flv_demux_handle_seek_pull (GstFlvDemux * demux, GstEvent * event,
+ gboolean seeking)
+{
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType start_type, stop_type;
+ gint64 start, stop;
+ gdouble rate;
+ gboolean update, flush, ret = FALSE;
+ GstSegment seeksegment;
+ GstEvent *flush_event;
+ GstMessage *message;
+ guint32 seqnum;
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &start_type, &start, &stop_type, &stop);
+ seqnum = gst_event_get_seqnum (event);
+
+ if (format != GST_FORMAT_TIME)
+ goto wrong_format;
+
+ /* mark seeking thread entering flushing/pausing */
+ GST_OBJECT_LOCK (demux);
+ if (seeking)
+ demux->seeking = seeking;
+ GST_OBJECT_UNLOCK (demux);
+
+ flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
+
+ if (flush) {
+ /* Flush start up and downstream to make sure data flow and loops are
+ idle */
+ flush_event = gst_event_new_flush_start ();
+ gst_event_set_seqnum (flush_event, seqnum);
+ gst_flv_demux_push_src_event (demux, flush_event);
+
+ flush_event = gst_event_new_flush_start ();
+ gst_event_set_seqnum (flush_event, seqnum);
+ gst_pad_push_event (demux->sinkpad, flush_event);
+ } else {
+ /* Pause the pulling task */
+ gst_pad_pause_task (demux->sinkpad);
+ }
+
+ /* Take the stream lock */
+ GST_PAD_STREAM_LOCK (demux->sinkpad);
+ demux->segment_seqnum = seqnum;
+
+ if (flush) {
+ /* Stop flushing upstream we need to pull */
+ flush_event = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (flush_event, seqnum);
+ gst_pad_push_event (demux->sinkpad, flush_event);
+ }
+
+ /* Work on a copy until we are sure the seek succeeded. */
+ memcpy (&seeksegment, &demux->segment, sizeof (GstSegment));
+
+ GST_DEBUG_OBJECT (demux, "segment before configure %" GST_SEGMENT_FORMAT,
+ &demux->segment);
+
+ /* Apply the seek to our segment */
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
+ start_type, start, stop_type, stop, &update);
+
+ GST_DEBUG_OBJECT (demux, "segment configured %" GST_SEGMENT_FORMAT,
+ &seeksegment);
+
+ if (flush || seeksegment.position != demux->segment.position) {
+ /* Do the actual seeking */
+ /* index is reliable if it is complete or we do not go to far ahead */
+ if (seeking && !demux->indexed &&
+ seeksegment.position > demux->index_max_time + 10 * GST_SECOND) {
+ GST_DEBUG_OBJECT (demux, "delaying seek to post-scan; "
+ " index only up to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->index_max_time));
+ /* stop flushing for now */
+ if (flush) {
+ flush_event = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (flush_event, seqnum);
+ gst_flv_demux_push_src_event (demux, flush_event);
+ }
+ /* delegate scanning and index building to task thread to avoid
+ * occupying main (UI) loop */
+ if (demux->seek_event)
+ gst_event_unref (demux->seek_event);
+ demux->seek_event = gst_event_ref (event);
+ demux->seek_time = seeksegment.position;
+ demux->state = FLV_STATE_SEEK;
+ /* do not know about success yet, but we did care and handled it */
+ ret = TRUE;
+ goto exit;
+ }
+
+ /* now index should be as reliable as it can be for current purpose */
+ gst_flv_demux_move_to_offset (demux,
+ gst_flv_demux_find_offset (demux, &seeksegment, flags), TRUE);
+ ret = TRUE;
+ } else {
+ ret = TRUE;
+ }
+
+ if (flush) {
+ /* Stop flushing, the sinks are at time 0 now */
+ flush_event = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (flush_event, seqnum);
+ gst_flv_demux_push_src_event (demux, flush_event);
+ }
+
+ if (ret) {
+ /* Ok seek succeeded, take the newly configured segment */
+ memcpy (&demux->segment, &seeksegment, sizeof (GstSegment));
+
+ /* Notify about the start of a new segment */
+ if (demux->segment.flags & GST_SEGMENT_FLAG_SEGMENT) {
+ message = gst_message_new_segment_start (GST_OBJECT (demux),
+ demux->segment.format, demux->segment.position);
+ gst_message_set_seqnum (message, seqnum);
+ gst_element_post_message (GST_ELEMENT (demux), message);
+ }
+
+ gst_flow_combiner_reset (demux->flowcombiner);
+ /* Tell all the stream a new segment is needed */
+ demux->audio_need_segment = TRUE;
+ demux->video_need_segment = TRUE;
+ /* Clean any potential newsegment event kept for the streams. The first
+ * stream needing a new segment will create a new one. */
+ if (G_UNLIKELY (demux->new_seg_event)) {
+ gst_event_unref (demux->new_seg_event);
+ demux->new_seg_event = NULL;
+ }
+ GST_DEBUG_OBJECT (demux, "preparing newsegment from %"
+ GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->segment.start),
+ GST_TIME_ARGS (demux->segment.stop));
+ demux->new_seg_event = gst_event_new_segment (&demux->segment);
+ gst_event_set_seqnum (demux->new_seg_event, seqnum);
+ }
+
+exit:
+ GST_OBJECT_LOCK (demux);
+ seeking = demux->seeking && !seeking;
+ demux->seeking = FALSE;
+ GST_OBJECT_UNLOCK (demux);
+
+ /* if we detect an external seek having started (and possibly already having
+ * flushed), do not restart task to give it a chance.
+ * Otherwise external one's flushing will take care to pause task */
+ if (seeking) {
+ gst_pad_pause_task (demux->sinkpad);
+ } else {
+ gst_pad_start_task (demux->sinkpad,
+ (GstTaskFunction) gst_flv_demux_loop, demux->sinkpad, NULL);
+ }
+
+ GST_PAD_STREAM_UNLOCK (demux->sinkpad);
+
+ gst_event_unref (event);
+ return ret;
+
+ /* ERRORS */
+wrong_format:
+ {
+ GST_WARNING_OBJECT (demux, "we only support seeking in TIME format");
+ gst_event_unref (event);
+ return ret;
+ }
+}
+
+/* If we can pull that's preferred */
+static gboolean
+gst_flv_demux_sink_activate (GstPad * sinkpad, GstObject * parent)
+{
+ GstQuery *query;
+ gboolean pull_mode;
+
+ query = gst_query_new_scheduling ();
+
+ if (!gst_pad_peer_query (sinkpad, query)) {
+ gst_query_unref (query);
+ goto activate_push;
+ }
+
+ pull_mode = gst_query_has_scheduling_mode_with_flags (query,
+ GST_PAD_MODE_PULL, GST_SCHEDULING_FLAG_SEEKABLE);
+ gst_query_unref (query);
+
+ if (!pull_mode)
+ goto activate_push;
+
+ GST_DEBUG_OBJECT (sinkpad, "activating pull");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE);
+
+activate_push:
+ {
+ GST_DEBUG_OBJECT (sinkpad, "activating push");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
+ }
+}
+
+static gboolean
+gst_flv_demux_sink_activate_mode (GstPad * sinkpad, GstObject * parent,
+ GstPadMode mode, gboolean active)
+{
+ gboolean res;
+ GstFlvDemux *demux;
+
+ demux = GST_FLV_DEMUX (parent);
+
+ switch (mode) {
+ case GST_PAD_MODE_PUSH:
+ demux->random_access = FALSE;
+ res = TRUE;
+ break;
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ demux->random_access = TRUE;
+ demux->segment_seqnum = gst_util_seqnum_next ();
+ res = gst_pad_start_task (sinkpad, (GstTaskFunction) gst_flv_demux_loop,
+ sinkpad, NULL);
+ } else {
+ demux->random_access = FALSE;
+ res = gst_pad_stop_task (sinkpad);
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ return res;
+}
+
+static gboolean
+gst_flv_demux_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstFlvDemux *demux;
+ gboolean ret = FALSE;
+
+ demux = GST_FLV_DEMUX (parent);
+
+ GST_DEBUG_OBJECT (demux, "handling event %s", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_START:
+ GST_DEBUG_OBJECT (demux, "trying to force chain function to exit");
+ demux->flushing = TRUE;
+ ret = gst_flv_demux_push_src_event (demux, event);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ GST_DEBUG_OBJECT (demux, "flushing FLV demuxer");
+ gst_flv_demux_flush (demux, TRUE);
+ ret = gst_flv_demux_push_src_event (demux, event);
+ break;
+ case GST_EVENT_EOS:
+ {
+ GstIndex *index;
+
+ GST_DEBUG_OBJECT (demux, "received EOS");
+
+ index = gst_flv_demux_get_index (GST_ELEMENT (demux));
+
+ if (index) {
+ GST_DEBUG_OBJECT (demux, "committing index");
+ gst_index_commit (index, demux->index_id);
+ gst_object_unref (index);
+ }
+
+ if (!demux->audio_pad && !demux->video_pad) {
+ GST_ELEMENT_ERROR (demux, STREAM, FAILED,
+ ("Internal data stream error."), ("Got EOS before any data"));
+ gst_event_unref (event);
+ } else {
+ if (!demux->no_more_pads) {
+ gst_element_no_more_pads (GST_ELEMENT (demux));
+ demux->no_more_pads = TRUE;
+ }
+
+ if (!gst_flv_demux_push_src_event (demux, event))
+ GST_WARNING_OBJECT (demux, "failed pushing EOS on streams");
+ }
+ ret = TRUE;
+ break;
+ }
+ case GST_EVENT_SEGMENT:
+ {
+ GstSegment in_segment;
+
+ GST_DEBUG_OBJECT (demux, "received new segment");
+
+ gst_event_copy_segment (event, &in_segment);
+ demux->segment_seqnum = gst_event_get_seqnum (event);
+
+ if (in_segment.format == GST_FORMAT_TIME) {
+ /* time segment, this is perfect, copy over the values. */
+ memcpy (&demux->segment, &in_segment, sizeof (in_segment));
+
+ GST_DEBUG_OBJECT (demux, "NEWSEGMENT: %" GST_SEGMENT_FORMAT,
+ &demux->segment);
+
+ /* and forward */
+ ret = gst_flv_demux_push_src_event (demux, event);
+ } else {
+ /* non-time format */
+ demux->audio_need_segment = TRUE;
+ demux->video_need_segment = TRUE;
+ ret = TRUE;
+ gst_event_unref (event);
+ if (demux->new_seg_event) {
+ gst_event_unref (demux->new_seg_event);
+ demux->new_seg_event = NULL;
+ }
+ }
+ gst_flow_combiner_reset (demux->flowcombiner);
+ break;
+ }
+ default:
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_flv_demux_sink_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ GstFlvDemux *demux;
+ gboolean ret = FALSE;
+
+ demux = GST_FLV_DEMUX (parent);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_BITRATE:
+ {
+ guint total_bitrate = 0;
+
+ if (demux->audio_pad) {
+ if (!demux->audio_bitrate) {
+ GST_DEBUG_OBJECT (demux,
+ "Have audio pad but no audio bitrate, can't answer BITRATE query");
+ break;
+ }
+ total_bitrate = demux->audio_bitrate;
+ }
+ if (demux->video_pad) {
+ if (!demux->video_bitrate) {
+ GST_DEBUG_OBJECT (demux,
+ "Have video pad but no video bitrate, can't answer BITRATE query");
+ break;
+ }
+ total_bitrate += demux->video_bitrate;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "bitrate query. total_bitrate:%" G_GUINT32_FORMAT, total_bitrate);
+
+ if (total_bitrate) {
+ /* Padding of 2kbit/s for container overhead */
+ gst_query_set_bitrate (query, total_bitrate + 2048);
+ ret = TRUE;
+ }
+ break;
+ }
+ default:
+ ret = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_flv_demux_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstFlvDemux *demux;
+ gboolean ret = FALSE;
+
+ demux = GST_FLV_DEMUX (parent);
+
+ GST_DEBUG_OBJECT (demux, "handling event %s", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ /* Try to push upstream first */
+ gst_event_ref (event);
+ ret = gst_pad_push_event (demux->sinkpad, event);
+ if (ret) {
+ gst_event_unref (event);
+ break;
+ }
+ if (demux->random_access) {
+ ret = gst_flv_demux_handle_seek_pull (demux, event, TRUE);
+ } else {
+ ret = gst_flv_demux_handle_seek_push (demux, event);
+ }
+ break;
+ default:
+ ret = gst_pad_push_event (demux->sinkpad, event);
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_flv_demux_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ gboolean res = TRUE;
+ GstFlvDemux *demux;
+
+ demux = GST_FLV_DEMUX (parent);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_DURATION:
+ {
+ GstFormat format;
+
+ gst_query_parse_duration (query, &format, NULL);
+
+ /* duration is time only */
+ if (format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (demux, "duration query only supported for time "
+ "format");
+ res = FALSE;
+ goto beach;
+ }
+
+ /* Try to push upstream first */
+ res = gst_pad_peer_query (demux->sinkpad, query);
+ if (res)
+ goto beach;
+
+ GST_DEBUG_OBJECT (pad, "duration query, replying %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->duration));
+
+ gst_query_set_duration (query, GST_FORMAT_TIME, demux->duration);
+ res = TRUE;
+ break;
+ }
+ case GST_QUERY_POSITION:
+ {
+ GstFormat format;
+
+ gst_query_parse_position (query, &format, NULL);
+
+ /* position is time only */
+ if (format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (demux, "position query only supported for time "
+ "format");
+ res = FALSE;
+ goto beach;
+ }
+
+ GST_DEBUG_OBJECT (pad, "position query, replying %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->segment.position));
+
+ gst_query_set_position (query, GST_FORMAT_TIME, demux->segment.position);
+
+ break;
+ }
+
+ case GST_QUERY_SEEKING:{
+ GstFormat fmt;
+
+ gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+
+ /* First ask upstream */
+ if (fmt == GST_FORMAT_TIME && gst_pad_peer_query (demux->sinkpad, query)) {
+ gboolean seekable;
+
+ gst_query_parse_seeking (query, NULL, &seekable, NULL, NULL);
+ if (seekable) {
+ res = TRUE;
+ break;
+ }
+ }
+ res = TRUE;
+ /* FIXME, check index this way is not thread safe */
+ if (fmt != GST_FORMAT_TIME || !demux->index) {
+ gst_query_set_seeking (query, fmt, FALSE, -1, -1);
+ } else if (demux->random_access) {
+ gst_query_set_seeking (query, GST_FORMAT_TIME, TRUE, 0,
+ demux->duration);
+ } else {
+ GstQuery *peerquery = gst_query_new_seeking (GST_FORMAT_BYTES);
+ gboolean seekable = gst_pad_peer_query (demux->sinkpad, peerquery);
+
+ if (seekable)
+ gst_query_parse_seeking (peerquery, NULL, &seekable, NULL, NULL);
+ gst_query_unref (peerquery);
+
+ if (seekable)
+ gst_query_set_seeking (query, GST_FORMAT_TIME, seekable, 0,
+ demux->duration);
+ else
+ gst_query_set_seeking (query, GST_FORMAT_TIME, FALSE, -1, -1);
+ }
+ break;
+ }
+ case GST_QUERY_SEGMENT:
+ {
+ GstFormat format;
+ gint64 start, stop;
+
+ format = demux->segment.format;
+
+ start =
+ gst_segment_to_stream_time (&demux->segment, format,
+ demux->segment.start);
+ if ((stop = demux->segment.stop) == -1)
+ stop = demux->segment.duration;
+ else
+ stop = gst_segment_to_stream_time (&demux->segment, format, stop);
+
+ gst_query_set_segment (query, demux->segment.rate, format, start, stop);
+ res = TRUE;
+ break;
+ }
+ case GST_QUERY_LATENCY:
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+beach:
+
+ return res;
+}
+
+static GstStateChangeReturn
+gst_flv_demux_change_state (GstElement * element, GstStateChange transition)
+{
+ GstFlvDemux *demux;
+ GstStateChangeReturn ret;
+
+ demux = GST_FLV_DEMUX (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ /* If this is our own index destroy it as the
+ * old entries might be wrong for the new stream */
+ if (demux->own_index) {
+ gst_object_unref (demux->index);
+ demux->index = NULL;
+ demux->own_index = FALSE;
+ }
+
+ /* If no index was created, generate one */
+ if (G_UNLIKELY (!demux->index)) {
+ GST_DEBUG_OBJECT (demux, "no index provided creating our own");
+
+ demux->index = g_object_new (gst_mem_index_get_type (), NULL);
+
+ gst_index_get_writer_id (demux->index, GST_OBJECT (demux),
+ &demux->index_id);
+ demux->own_index = TRUE;
+ }
+ gst_flv_demux_cleanup (demux);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ return ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_flv_demux_cleanup (demux);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+#if 0
+static void
+gst_flv_demux_set_index (GstElement * element, GstIndex * index)
+{
+ GstFlvDemux *demux = GST_FLV_DEMUX (element);
+ GstIndex *old_index;
+
+ GST_OBJECT_LOCK (demux);
+
+ old_index = demux->index;
+
+ if (index) {
+ demux->index = gst_object_ref (index);
+ demux->own_index = FALSE;
+ } else
+ demux->index = NULL;
+
+ if (old_index)
+ gst_object_unref (demux->index);
+
+ gst_object_ref (index);
+
+ GST_OBJECT_UNLOCK (demux);
+
+ /* object lock might be taken again */
+ if (index)
+ gst_index_get_writer_id (index, GST_OBJECT (element), &demux->index_id);
+
+ GST_DEBUG_OBJECT (demux, "Set index %" GST_PTR_FORMAT, demux->index);
+
+ gst_object_unref (index);
+}
+#endif
+
+static GstIndex *
+gst_flv_demux_get_index (GstElement * element)
+{
+ GstIndex *result = NULL;
+
+ GstFlvDemux *demux = GST_FLV_DEMUX (element);
+
+ GST_OBJECT_LOCK (demux);
+ if (demux->index)
+ result = gst_object_ref (demux->index);
+ GST_OBJECT_UNLOCK (demux);
+
+ return result;
+}
+
+static void
+gst_flv_demux_dispose (GObject * object)
+{
+ GstFlvDemux *demux = GST_FLV_DEMUX (object);
+
+ GST_DEBUG_OBJECT (demux, "disposing FLV demuxer");
+
+ if (demux->adapter) {
+ gst_adapter_clear (demux->adapter);
+ g_object_unref (demux->adapter);
+ demux->adapter = NULL;
+ }
+
+ if (demux->taglist) {
+ gst_tag_list_unref (demux->taglist);
+ demux->taglist = NULL;
+ }
+
+ if (demux->audio_tags) {
+ gst_tag_list_unref (demux->audio_tags);
+ demux->audio_tags = NULL;
+ }
+
+ if (demux->video_tags) {
+ gst_tag_list_unref (demux->video_tags);
+ demux->video_tags = NULL;
+ }
+
+ if (demux->flowcombiner) {
+ gst_flow_combiner_free (demux->flowcombiner);
+ demux->flowcombiner = NULL;
+ }
+
+ if (demux->new_seg_event) {
+ gst_event_unref (demux->new_seg_event);
+ demux->new_seg_event = NULL;
+ }
+
+ if (demux->audio_codec_data) {
+ gst_buffer_unref (demux->audio_codec_data);
+ demux->audio_codec_data = NULL;
+ }
+
+ if (demux->video_codec_data) {
+ gst_buffer_unref (demux->video_codec_data);
+ demux->video_codec_data = NULL;
+ }
+
+ if (demux->audio_pad) {
+ gst_object_unref (demux->audio_pad);
+ demux->audio_pad = NULL;
+ }
+
+ if (demux->video_pad) {
+ gst_object_unref (demux->video_pad);
+ demux->video_pad = NULL;
+ }
+
+ if (demux->index) {
+ gst_object_unref (demux->index);
+ demux->index = NULL;
+ }
+
+ if (demux->times) {
+ g_array_free (demux->times, TRUE);
+ demux->times = NULL;
+ }
+
+ if (demux->filepositions) {
+ g_array_free (demux->filepositions, TRUE);
+ demux->filepositions = NULL;
+ }
+
+ GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
+}
+
+static void
+gst_flv_demux_class_init (GstFlvDemuxClass * klass)
+{
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+
+ gobject_class->dispose = gst_flv_demux_dispose;
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_flv_demux_change_state);
+
+#if 0
+ gstelement_class->set_index = GST_DEBUG_FUNCPTR (gst_flv_demux_set_index);
+ gstelement_class->get_index = GST_DEBUG_FUNCPTR (gst_flv_demux_get_index);
+#endif
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &flv_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &audio_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &video_src_template);
+ gst_element_class_set_static_metadata (gstelement_class, "FLV Demuxer",
+ "Codec/Demuxer", "Demux FLV feeds into digital streams",
+ "Julien Moutte <julien@moutte.net>");
+}
+
+static void
+gst_flv_demux_init (GstFlvDemux * demux)
+{
+ demux->sinkpad =
+ gst_pad_new_from_static_template (&flv_sink_template, "sink");
+ GST_PAD_SET_ACCEPT_TEMPLATE (demux->sinkpad);
+ gst_pad_set_event_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_sink_event));
+ gst_pad_set_chain_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_chain));
+ gst_pad_set_activate_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_sink_activate));
+ gst_pad_set_activatemode_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_sink_activate_mode));
+ gst_pad_set_query_function (demux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_flv_demux_sink_query));
+
+ gst_element_add_pad (GST_ELEMENT (demux), demux->sinkpad);
+
+ demux->adapter = gst_adapter_new ();
+ demux->flowcombiner = gst_flow_combiner_new ();
+
+ demux->own_index = FALSE;
+
+ GST_OBJECT_FLAG_SET (demux, GST_ELEMENT_FLAG_INDEXABLE);
+
+ gst_flv_demux_cleanup (demux);
+}
diff --git a/gst/flv/gstflvdemux.h b/gst/flv/gstflvdemux.h
new file mode 100644
index 0000000000..6669dc4aeb
--- /dev/null
+++ b/gst/flv/gstflvdemux.h
@@ -0,0 +1,170 @@
+/* GStreamer
+ * Copyright (C) <2007> Julien Moutte <julien@moutte.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __FLV_DEMUX_H__
+#define __FLV_DEMUX_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/base/gstflowcombiner.h>
+#include "gstindex.h"
+
+G_BEGIN_DECLS
+#define GST_TYPE_FLV_DEMUX \
+ (gst_flv_demux_get_type())
+#define GST_FLV_DEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_FLV_DEMUX,GstFlvDemux))
+#define GST_FLV_DEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_FLV_DEMUX,GstFlvDemuxClass))
+#define GST_IS_FLV_DEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_FLV_DEMUX))
+#define GST_IS_FLV_DEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_FLV_DEMUX))
+typedef struct _GstFlvDemux GstFlvDemux;
+typedef struct _GstFlvDemuxClass GstFlvDemuxClass;
+
+typedef enum
+{
+ FLV_STATE_HEADER,
+ FLV_STATE_TAG_TYPE,
+ FLV_STATE_TAG_VIDEO,
+ FLV_STATE_TAG_AUDIO,
+ FLV_STATE_TAG_SCRIPT,
+ FLV_STATE_SEEK,
+ FLV_STATE_DONE,
+ FLV_STATE_SKIP,
+ FLV_STATE_NONE
+} GstFlvDemuxState;
+
+struct _GstFlvDemux
+{
+ GstElement element;
+
+ GstPad *sinkpad;
+
+ GstPad *audio_pad;
+ GstPad *video_pad;
+
+ gboolean have_group_id;
+ guint group_id;
+
+ /* <private> */
+
+ GstIndex *index;
+ gint index_id;
+ gboolean own_index;
+
+ GArray * times;
+ GArray * filepositions;
+
+ GstAdapter *adapter;
+
+ GstFlowCombiner *flowcombiner;
+
+ GstSegment segment;
+
+ GstEvent *new_seg_event;
+
+ GstTagList *taglist;
+ GstTagList *audio_tags;
+ GstTagList *video_tags;
+
+ GstFlvDemuxState state;
+
+ guint64 offset;
+ guint64 cur_tag_offset;
+ GstClockTime duration;
+ guint64 tag_size;
+ guint64 tag_data_size;
+
+ /* Audio infos */
+ guint16 rate;
+ guint16 channels;
+ guint16 width;
+ guint16 audio_codec_tag;
+ guint64 audio_offset;
+ gboolean audio_need_discont;
+ gboolean audio_need_segment;
+ GstBuffer * audio_codec_data;
+ GstClockTime audio_start;
+ guint32 last_audio_pts;
+ GstClockTime audio_time_offset;
+ guint32 audio_bitrate;
+
+ /* Video infos */
+ guint32 w;
+ guint32 h;
+ guint32 par_x;
+ guint32 par_y;
+ guint16 video_codec_tag;
+ guint64 video_offset;
+ gboolean video_need_discont;
+ gboolean video_need_segment;
+ gboolean got_par;
+ GstBuffer * video_codec_data;
+ GstClockTime video_start;
+ guint32 last_video_dts;
+ GstClockTime video_time_offset;
+ gdouble framerate;
+ guint32 video_bitrate;
+
+ gboolean random_access;
+ gboolean need_header;
+ gboolean has_audio;
+ gboolean has_video;
+ gboolean strict;
+ gboolean flushing;
+
+ gboolean no_more_pads;
+
+#ifndef GST_DISABLE_DEBUG
+ gboolean no_audio_warned;
+ gboolean no_video_warned;
+#endif
+
+ gboolean seeking;
+ gboolean building_index;
+ gboolean indexed; /* TRUE if index is completely built */
+ gboolean upstream_seekable; /* TRUE if upstream is seekable */
+ gint64 file_size;
+ GstEvent *seek_event;
+ gint64 seek_time;
+ guint32 segment_seqnum;
+
+ GstClockTime index_max_time;
+ gint64 index_max_pos;
+
+ /* reverse playback */
+ GstClockTime video_first_ts;
+ GstClockTime audio_first_ts;
+ gboolean video_done;
+ gboolean audio_done;
+ gint64 from_offset;
+ gint64 to_offset;
+};
+
+struct _GstFlvDemuxClass
+{
+ GstElementClass parent_class;
+};
+
+GType gst_flv_demux_get_type (void);
+
+G_END_DECLS
+#endif /* __FLV_DEMUX_H__ */
diff --git a/gst/flv/gstflvelement.c b/gst/flv/gstflvelement.c
new file mode 100644
index 0000000000..3028a5e982
--- /dev/null
+++ b/gst/flv/gstflvelement.c
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) <2007> Julien Moutte <julien@moutte.net>
+ * Copyright (c) 2008,2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * Copyright (c) 2008-2017 Collabora Ltd
+ * @author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * @author: Vincent Penquerc'h <vincent.penquerch@collabora.com>
+ * Copyright (C) 2020 Huawei Technologies Co., Ltd.
+ * @Author: Stéphane Cerveau <stephane.cerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstflvelements.h"
+
+GST_DEBUG_CATEGORY_STATIC (flvdemux_debug);
+#define GST_CAT_DEFAULT flvdemux_debug
+
+void
+flv_element_init (GstPlugin * plugin)
+{
+ static gsize res = FALSE;
+ if (g_once_init_enter (&res)) {
+ GST_DEBUG_CATEGORY_INIT (flvdemux_debug, "flvdemux", 0, "FLV demuxer");
+ g_once_init_leave (&res, TRUE);
+ }
+}
diff --git a/gst/flv/gstflvelements.h b/gst/flv/gstflvelements.h
new file mode 100644
index 0000000000..5be73d3f2e
--- /dev/null
+++ b/gst/flv/gstflvelements.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) <2007> Julien Moutte <julien@moutte.net>
+ * Copyright (c) 2008,2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * Copyright (c) 2008-2017 Collabora Ltd
+ * @author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * @author: Vincent Penquerc'h <vincent.penquerch@collabora.com>
+ * Copyright (C) 2020 Huawei Technologies Co., Ltd.
+ * @Author: Stéphane Cerveau <stephane.cerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_FLV_ELEMENTS_H__
+#define __GST_FLV_ELEMENTS_H__
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+void flv_element_init (GstPlugin * plugin);
+
+GST_ELEMENT_REGISTER_DECLARE (flvdemux);
+GST_ELEMENT_REGISTER_DECLARE (flvmux);
+
+G_END_DECLS
+
+#endif /* __GST_FLV_ELEMENTS_H__ */
diff --git a/gst/flv/gstflvmux.c b/gst/flv/gstflvmux.c
new file mode 100644
index 0000000000..6ce6413e5b
--- /dev/null
+++ b/gst/flv/gstflvmux.c
@@ -0,0 +1,2203 @@
+/* GStreamer
+ *
+ * Copyright (c) 2008,2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * Copyright (c) 2008-2017 Collabora Ltd
+ * @author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * @author: Vincent Penquerc'h <vincent.penquerch@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-flvmux
+ * @title: flvmux
+ *
+ * flvmux muxes different streams into an FLV file.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v flvmux name=mux ! filesink location=test.flv audiotestsrc samplesperbuffer=44100 num-buffers=10 ! faac ! mux. videotestsrc num-buffers=250 ! video/x-raw,framerate=25/1 ! x264enc ! mux.
+ * ]| This pipeline encodes a test audio and video stream and muxes both into an FLV file.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <math.h>
+#include <string.h>
+
+#include <gst/audio/audio.h>
+
+#include "gstflvelements.h"
+#include "gstflvmux.h"
+#include "amfdefs.h"
+
+GST_DEBUG_CATEGORY_STATIC (flvmux_debug);
+#define GST_CAT_DEFAULT flvmux_debug
+
+enum
+{
+ PROP_0,
+ PROP_STREAMABLE,
+ PROP_METADATACREATOR,
+ PROP_ENCODER,
+ PROP_SKIP_BACKWARDS_STREAMS,
+};
+
+#define DEFAULT_STREAMABLE FALSE
+#define MAX_INDEX_ENTRIES 128
+#define DEFAULT_METADATACREATOR "GStreamer " PACKAGE_VERSION " FLV muxer"
+#define DEFAULT_SKIP_BACKWARDS_STREAMS FALSE
+
+static GstStaticPadTemplate src_templ = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-flv")
+ );
+
+static GstStaticPadTemplate videosink_templ = GST_STATIC_PAD_TEMPLATE ("video",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("video/x-flash-video; "
+ "video/x-flash-screen; "
+ "video/x-vp6-flash; " "video/x-vp6-alpha; "
+ "video/x-h264, stream-format=avc;")
+ );
+
+static GstStaticPadTemplate audiosink_templ = GST_STATIC_PAD_TEMPLATE ("audio",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS
+ ("audio/x-adpcm, layout = (string) swf, channels = (int) { 1, 2 }, rate = (int) { 5512, 11025, 22050, 44100 }; "
+ "audio/mpeg, mpegversion = (int) 1, layer = (int) 3, channels = (int) { 1, 2 }, rate = (int) { 5512, 8000, 11025, 22050, 44100 }, parsed = (boolean) TRUE; "
+ "audio/mpeg, mpegversion = (int) { 4, 2 }, stream-format = (string) raw; "
+ "audio/x-nellymoser, channels = (int) { 1, 2 }, rate = (int) { 5512, 8000, 11025, 16000, 22050, 44100 }; "
+ "audio/x-raw, format = (string) { U8, S16LE}, layout = (string) interleaved, channels = (int) { 1, 2 }, rate = (int) { 5512, 11025, 22050, 44100 }; "
+ "audio/x-alaw, channels = (int) { 1, 2 }, rate = (int) 8000; "
+ "audio/x-mulaw, channels = (int) { 1, 2 }, rate = (int) 8000; "
+ "audio/x-speex, channels = (int) 1, rate = (int) 16000;")
+ );
+
+G_DEFINE_TYPE (GstFlvMuxPad, gst_flv_mux_pad, GST_TYPE_AGGREGATOR_PAD);
+
+#define gst_flv_mux_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstFlvMux, gst_flv_mux, GST_TYPE_AGGREGATOR,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL));
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (flvmux, "flvmux",
+ GST_RANK_PRIMARY, GST_TYPE_FLV_MUX, flv_element_init (plugin));
+
+static GstFlowReturn
+gst_flv_mux_aggregate (GstAggregator * aggregator, gboolean timeout);
+static gboolean
+gst_flv_mux_sink_event (GstAggregator * aggregator, GstAggregatorPad * pad,
+ GstEvent * event);
+
+static GstAggregatorPad *gst_flv_mux_create_new_pad (GstAggregator * agg,
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps);
+static void gst_flv_mux_release_pad (GstElement * element, GstPad * pad);
+
+static gboolean gst_flv_mux_video_pad_setcaps (GstFlvMuxPad * pad,
+ GstCaps * caps);
+static gboolean gst_flv_mux_audio_pad_setcaps (GstFlvMuxPad * pad,
+ GstCaps * caps);
+
+static void gst_flv_mux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+static void gst_flv_mux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_flv_mux_finalize (GObject * object);
+
+static void gst_flv_mux_reset (GstElement * element);
+static void gst_flv_mux_reset_pad (GstFlvMuxPad * pad);
+
+static void gst_flv_mux_pad_finalize (GObject * object);
+
+static gboolean gst_flv_mux_start (GstAggregator * aggregator);
+static GstFlowReturn gst_flv_mux_flush (GstAggregator * aggregator);
+static GstClockTime gst_flv_mux_get_next_time (GstAggregator * aggregator);
+static GstFlowReturn gst_flv_mux_write_eos (GstFlvMux * mux);
+static GstFlowReturn gst_flv_mux_write_header (GstFlvMux * mux);
+static GstFlowReturn gst_flv_mux_rewrite_header (GstFlvMux * mux);
+static gboolean gst_flv_mux_are_all_pads_eos (GstFlvMux * mux);
+static GstFlowReturn gst_flv_mux_update_src_caps (GstAggregator * aggregator,
+ GstCaps * caps, GstCaps ** ret);
+static GstClockTime gst_flv_mux_query_upstream_duration (GstFlvMux * mux);
+static GstClockTime gst_flv_mux_segment_to_running_time (const GstSegment *
+ segment, GstClockTime t);
+
+static GstFlowReturn
+gst_flv_mux_pad_flush (GstAggregatorPad * pad, GstAggregator * aggregator)
+{
+ GstFlvMuxPad *flvpad = GST_FLV_MUX_PAD (pad);
+
+ flvpad->last_timestamp = GST_CLOCK_TIME_NONE;
+ flvpad->pts = GST_CLOCK_TIME_NONE;
+ flvpad->dts = GST_CLOCK_TIME_NONE;
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_flv_mux_skip_buffer (GstAggregatorPad * apad, GstAggregator * aggregator,
+ GstBuffer * buffer)
+{
+ GstFlvMuxPad *fpad = GST_FLV_MUX_PAD_CAST (apad);
+ GstFlvMux *mux = GST_FLV_MUX_CAST (aggregator);
+ GstClockTime t;
+
+ if (!mux->skip_backwards_streams)
+ return FALSE;
+
+ if (fpad->drop_deltas) {
+ if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT)) {
+ GST_INFO_OBJECT (fpad, "Waiting for keyframe, dropping %" GST_PTR_FORMAT,
+ buffer);
+ return TRUE;
+ } else {
+ /* drop-deltas is set and the buffer isn't delta, drop flag */
+ fpad->drop_deltas = FALSE;
+ }
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DTS_OR_PTS (buffer))) {
+ t = gst_flv_mux_segment_to_running_time (&apad->segment,
+ GST_BUFFER_DTS_OR_PTS (buffer));
+
+ if (t < (GST_MSECOND * mux->last_dts)) {
+ GST_WARNING_OBJECT (fpad,
+ "Timestamp %" GST_TIME_FORMAT " going backwards from last used %"
+ GST_TIME_FORMAT ", dropping %" GST_PTR_FORMAT,
+ GST_TIME_ARGS (t), GST_TIME_ARGS (GST_MSECOND * mux->last_dts),
+ buffer);
+ /* Look for non-delta buffer */
+ fpad->drop_deltas = TRUE;
+ return TRUE;
+ }
+ }
+
+ return FALSE;
+}
+
+static void
+gst_flv_mux_pad_class_init (GstFlvMuxPadClass * klass)
+{
+ GstAggregatorPadClass *aggregatorpad_class = (GstAggregatorPadClass *) klass;
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ gobject_class->finalize = gst_flv_mux_pad_finalize;
+
+ aggregatorpad_class->flush = GST_DEBUG_FUNCPTR (gst_flv_mux_pad_flush);
+ aggregatorpad_class->skip_buffer =
+ GST_DEBUG_FUNCPTR (gst_flv_mux_skip_buffer);
+}
+
+static void
+gst_flv_mux_pad_init (GstFlvMuxPad * pad)
+{
+ gst_flv_mux_reset_pad (pad);
+}
+
+typedef struct
+{
+ gdouble position;
+ gdouble time;
+} GstFlvMuxIndexEntry;
+
+static void
+gst_flv_mux_index_entry_free (GstFlvMuxIndexEntry * entry)
+{
+ g_slice_free (GstFlvMuxIndexEntry, entry);
+}
+
+static GstBuffer *
+_gst_buffer_new_wrapped (gpointer mem, gsize size, GFreeFunc free_func)
+{
+ GstBuffer *buf;
+
+ buf = gst_buffer_new ();
+ gst_buffer_append_memory (buf,
+ gst_memory_new_wrapped (free_func ? 0 : GST_MEMORY_FLAG_READONLY,
+ mem, size, 0, size, mem, free_func));
+
+ return buf;
+}
+
+static void
+_gst_buffer_new_and_alloc (gsize size, GstBuffer ** buffer, guint8 ** data)
+{
+ g_return_if_fail (data != NULL);
+ g_return_if_fail (buffer != NULL);
+
+ *data = g_malloc (size);
+ *buffer = _gst_buffer_new_wrapped (*data, size, g_free);
+}
+
+static void
+gst_flv_mux_class_init (GstFlvMuxClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstAggregatorClass *gstaggregator_class;
+
+ GST_DEBUG_CATEGORY_INIT (flvmux_debug, "flvmux", 0, "FLV muxer");
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstaggregator_class = (GstAggregatorClass *) klass;
+
+ gobject_class->get_property = gst_flv_mux_get_property;
+ gobject_class->set_property = gst_flv_mux_set_property;
+ gobject_class->finalize = gst_flv_mux_finalize;
+
+ /* FIXME: ideally the right mode of operation should be detected
+ * automatically using queries when parameter not specified. */
+ /**
+ * GstFlvMux:streamable
+ *
+ * If True, the output will be streaming friendly. (ie without indexes and
+ * duration)
+ */
+ g_object_class_install_property (gobject_class, PROP_STREAMABLE,
+ g_param_spec_boolean ("streamable", "streamable",
+ "If set to true, the output should be as if it is to be streamed "
+ "and hence no indexes written or duration written.",
+ DEFAULT_STREAMABLE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_METADATACREATOR,
+ g_param_spec_string ("metadatacreator", "metadatacreator",
+ "The value of metadatacreator in the meta packet.",
+ NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_ENCODER,
+ g_param_spec_string ("encoder", "encoder",
+ "The value of encoder in the meta packet.",
+ NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_SKIP_BACKWARDS_STREAMS,
+ g_param_spec_boolean ("skip-backwards-streams", "Skip backwards streams",
+ "If set to true, streams that go backwards related to the other stream "
+ "will have buffers dropped until they reach the correct timestamp",
+ DEFAULT_SKIP_BACKWARDS_STREAMS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstaggregator_class->create_new_pad =
+ GST_DEBUG_FUNCPTR (gst_flv_mux_create_new_pad);
+ gstelement_class->release_pad = GST_DEBUG_FUNCPTR (gst_flv_mux_release_pad);
+
+ gstaggregator_class->start = GST_DEBUG_FUNCPTR (gst_flv_mux_start);
+ gstaggregator_class->aggregate = GST_DEBUG_FUNCPTR (gst_flv_mux_aggregate);
+ gstaggregator_class->sink_event = GST_DEBUG_FUNCPTR (gst_flv_mux_sink_event);
+ gstaggregator_class->flush = GST_DEBUG_FUNCPTR (gst_flv_mux_flush);
+ gstaggregator_class->get_next_time =
+ GST_DEBUG_FUNCPTR (gst_flv_mux_get_next_time);
+ gstaggregator_class->update_src_caps =
+ GST_DEBUG_FUNCPTR (gst_flv_mux_update_src_caps);
+
+ gst_element_class_add_static_pad_template_with_gtype (gstelement_class,
+ &videosink_templ, GST_TYPE_FLV_MUX_PAD);
+ gst_element_class_add_static_pad_template_with_gtype (gstelement_class,
+ &audiosink_templ, GST_TYPE_FLV_MUX_PAD);
+ gst_element_class_add_static_pad_template_with_gtype (gstelement_class,
+ &src_templ, GST_TYPE_AGGREGATOR_PAD);
+ gst_element_class_set_static_metadata (gstelement_class, "FLV muxer",
+ "Codec/Muxer",
+ "Muxes video/audio streams into a FLV stream",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ GST_DEBUG_CATEGORY_INIT (flvmux_debug, "flvmux", 0, "FLV muxer");
+
+ gst_type_mark_as_plugin_api (GST_TYPE_FLV_MUX_PAD, 0);
+}
+
+static void
+gst_flv_mux_init (GstFlvMux * mux)
+{
+ mux->srcpad = GST_AGGREGATOR_CAST (mux)->srcpad;
+
+ /* property */
+ mux->streamable = DEFAULT_STREAMABLE;
+ mux->metadatacreator = g_strdup (DEFAULT_METADATACREATOR);
+ mux->encoder = g_strdup (DEFAULT_METADATACREATOR);
+
+ mux->new_tags = FALSE;
+
+ gst_flv_mux_reset (GST_ELEMENT (mux));
+}
+
+static void
+gst_flv_mux_finalize (GObject * object)
+{
+ GstFlvMux *mux = GST_FLV_MUX (object);
+
+ gst_flv_mux_reset (GST_ELEMENT (object));
+ g_free (mux->metadatacreator);
+ g_free (mux->encoder);
+
+ G_OBJECT_CLASS (gst_flv_mux_parent_class)->finalize (object);
+}
+
+static void
+gst_flv_mux_pad_finalize (GObject * object)
+{
+ GstFlvMuxPad *pad = GST_FLV_MUX_PAD (object);
+
+ gst_flv_mux_reset_pad (pad);
+
+ G_OBJECT_CLASS (gst_flv_mux_pad_parent_class)->finalize (object);
+}
+
+static GstFlowReturn
+gst_flv_mux_flush (GstAggregator * aggregator)
+{
+ /* TODO: What is the right behaviour on flush? Should we just ignore it ?
+ * This still needs to be defined. */
+
+ gst_flv_mux_reset (GST_ELEMENT (aggregator));
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_flv_mux_start (GstAggregator * aggregator)
+{
+ gst_flv_mux_reset (GST_ELEMENT (aggregator));
+ return TRUE;
+}
+
+static void
+gst_flv_mux_reset (GstElement * element)
+{
+ GstFlvMux *mux = GST_FLV_MUX (element);
+
+ g_list_foreach (mux->index, (GFunc) gst_flv_mux_index_entry_free, NULL);
+ g_list_free (mux->index);
+ mux->index = NULL;
+ mux->byte_count = 0;
+
+ mux->duration = GST_CLOCK_TIME_NONE;
+ mux->new_tags = FALSE;
+ mux->first_timestamp = GST_CLOCK_TIME_NONE;
+ mux->last_dts = 0;
+
+ mux->state = GST_FLV_MUX_STATE_HEADER;
+ mux->sent_header = FALSE;
+
+ /* tags */
+ gst_tag_setter_reset_tags (GST_TAG_SETTER (mux));
+}
+
+/* Extract per-codec relevant tags for
+ * insertion into the metadata later - ie bitrate,
+ * but maybe others in the future */
+static void
+gst_flv_mux_store_codec_tags (GstFlvMux * mux,
+ GstFlvMuxPad * flvpad, GstTagList * list)
+{
+ /* Look for a bitrate as either nominal or actual bitrate tag */
+ if (gst_tag_list_get_uint (list, GST_TAG_NOMINAL_BITRATE, &flvpad->bitrate)
+ || gst_tag_list_get_uint (list, GST_TAG_BITRATE, &flvpad->bitrate)) {
+ GST_DEBUG_OBJECT (mux, "Stored bitrate for pad %" GST_PTR_FORMAT " = %u",
+ flvpad, flvpad->bitrate);
+ }
+}
+
+static gboolean
+gst_flv_mux_sink_event (GstAggregator * aggregator, GstAggregatorPad * pad,
+ GstEvent * event)
+{
+ GstFlvMux *mux = GST_FLV_MUX (aggregator);
+ GstFlvMuxPad *flvpad = (GstFlvMuxPad *) pad;
+ gboolean ret = TRUE;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+
+ if (mux->video_pad == flvpad) {
+ ret = gst_flv_mux_video_pad_setcaps (flvpad, caps);
+ } else if (mux->audio_pad == flvpad) {
+ ret = gst_flv_mux_audio_pad_setcaps (flvpad, caps);
+ } else {
+ g_assert_not_reached ();
+ }
+ break;
+ }
+ case GST_EVENT_TAG:{
+ GstTagList *list;
+ GstTagSetter *setter = GST_TAG_SETTER (mux);
+ const GstTagMergeMode mode = gst_tag_setter_get_tag_merge_mode (setter);
+
+ gst_event_parse_tag (event, &list);
+ gst_tag_setter_merge_tags (setter, list, mode);
+ gst_flv_mux_store_codec_tags (mux, flvpad, list);
+ mux->new_tags = TRUE;
+ ret = TRUE;
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (!ret)
+ return FALSE;
+
+ return GST_AGGREGATOR_CLASS (parent_class)->sink_event (aggregator, pad,
+ event);;
+}
+
+static gboolean
+gst_flv_mux_video_pad_setcaps (GstFlvMuxPad * pad, GstCaps * caps)
+{
+ GstFlvMux *mux = GST_FLV_MUX (gst_pad_get_parent (pad));
+ gboolean ret = TRUE;
+ GstStructure *s;
+ guint old_codec;
+ GstBuffer *old_codec_data = NULL;
+
+ old_codec = pad->codec;
+ if (pad->codec_data)
+ old_codec_data = gst_buffer_ref (pad->codec_data);
+
+ s = gst_caps_get_structure (caps, 0);
+
+ if (strcmp (gst_structure_get_name (s), "video/x-flash-video") == 0) {
+ pad->codec = 2;
+ } else if (strcmp (gst_structure_get_name (s), "video/x-flash-screen") == 0) {
+ pad->codec = 3;
+ } else if (strcmp (gst_structure_get_name (s), "video/x-vp6-flash") == 0) {
+ pad->codec = 4;
+ } else if (strcmp (gst_structure_get_name (s), "video/x-vp6-alpha") == 0) {
+ pad->codec = 5;
+ } else if (strcmp (gst_structure_get_name (s), "video/x-h264") == 0) {
+ pad->codec = 7;
+ } else {
+ ret = FALSE;
+ }
+
+ if (ret && gst_structure_has_field (s, "codec_data")) {
+ const GValue *val = gst_structure_get_value (s, "codec_data");
+
+ if (val)
+ gst_buffer_replace (&pad->codec_data, gst_value_get_buffer (val));
+ else if (!val && pad->codec_data)
+ gst_buffer_unref (pad->codec_data);
+ }
+
+ if (ret && mux->streamable && mux->state != GST_FLV_MUX_STATE_HEADER) {
+ if (old_codec != pad->codec) {
+ pad->info_changed = TRUE;
+ }
+
+ if (old_codec_data && pad->codec_data) {
+ GstMapInfo map;
+
+ gst_buffer_map (old_codec_data, &map, GST_MAP_READ);
+ if (map.size != gst_buffer_get_size (pad->codec_data) ||
+ gst_buffer_memcmp (pad->codec_data, 0, map.data, map.size))
+ pad->info_changed = TRUE;
+
+ gst_buffer_unmap (old_codec_data, &map);
+ } else if (!old_codec_data && pad->codec_data) {
+ pad->info_changed = TRUE;
+ }
+
+ if (pad->info_changed)
+ mux->state = GST_FLV_MUX_STATE_HEADER;
+ }
+
+ if (old_codec_data)
+ gst_buffer_unref (old_codec_data);
+
+ gst_object_unref (mux);
+
+ return ret;
+}
+
+static gboolean
+gst_flv_mux_audio_pad_setcaps (GstFlvMuxPad * pad, GstCaps * caps)
+{
+ GstFlvMux *mux = GST_FLV_MUX (gst_pad_get_parent (pad));
+ gboolean ret = TRUE;
+ GstStructure *s;
+ guint old_codec, old_rate, old_width, old_channels;
+ GstBuffer *old_codec_data = NULL;
+
+ old_codec = pad->codec;
+ old_rate = pad->rate;
+ old_width = pad->width;
+ old_channels = pad->channels;
+ if (pad->codec_data)
+ old_codec_data = gst_buffer_ref (pad->codec_data);
+
+ s = gst_caps_get_structure (caps, 0);
+
+ if (strcmp (gst_structure_get_name (s), "audio/x-adpcm") == 0) {
+ const gchar *layout = gst_structure_get_string (s, "layout");
+ if (layout && strcmp (layout, "swf") == 0) {
+ pad->codec = 1;
+ } else {
+ ret = FALSE;
+ }
+ } else if (strcmp (gst_structure_get_name (s), "audio/mpeg") == 0) {
+ gint mpegversion;
+
+ if (gst_structure_get_int (s, "mpegversion", &mpegversion)) {
+ if (mpegversion == 1) {
+ gint layer;
+
+ if (gst_structure_get_int (s, "layer", &layer) && layer == 3) {
+ gint rate;
+
+ if (gst_structure_get_int (s, "rate", &rate) && rate == 8000)
+ pad->codec = 14;
+ else
+ pad->codec = 2;
+ } else {
+ ret = FALSE;
+ }
+ } else if (mpegversion == 4 || mpegversion == 2) {
+ pad->codec = 10;
+ } else {
+ ret = FALSE;
+ }
+ } else {
+ ret = FALSE;
+ }
+ } else if (strcmp (gst_structure_get_name (s), "audio/x-nellymoser") == 0) {
+ gint rate, channels;
+
+ if (gst_structure_get_int (s, "rate", &rate)
+ && gst_structure_get_int (s, "channels", &channels)) {
+ if (channels == 1 && rate == 16000)
+ pad->codec = 4;
+ else if (channels == 1 && rate == 8000)
+ pad->codec = 5;
+ else
+ pad->codec = 6;
+ } else {
+ pad->codec = 6;
+ }
+ } else if (strcmp (gst_structure_get_name (s), "audio/x-raw") == 0) {
+ GstAudioInfo info;
+
+ if (gst_audio_info_from_caps (&info, caps)) {
+ pad->codec = 3;
+
+ if (GST_AUDIO_INFO_WIDTH (&info) == 8)
+ pad->width = 0;
+ else if (GST_AUDIO_INFO_WIDTH (&info) == 16)
+ pad->width = 1;
+ else
+ ret = FALSE;
+ } else
+ ret = FALSE;
+ } else if (strcmp (gst_structure_get_name (s), "audio/x-alaw") == 0) {
+ pad->codec = 7;
+ } else if (strcmp (gst_structure_get_name (s), "audio/x-mulaw") == 0) {
+ pad->codec = 8;
+ } else if (strcmp (gst_structure_get_name (s), "audio/x-speex") == 0) {
+ pad->codec = 11;
+ } else {
+ ret = FALSE;
+ }
+
+ if (ret) {
+ gint rate, channels;
+
+ if (gst_structure_get_int (s, "rate", &rate)) {
+ if (pad->codec == 10)
+ pad->rate = 3;
+ else if (rate == 5512)
+ pad->rate = 0;
+ else if (rate == 11025)
+ pad->rate = 1;
+ else if (rate == 22050)
+ pad->rate = 2;
+ else if (rate == 44100)
+ pad->rate = 3;
+ else if (rate == 8000 && (pad->codec == 5 || pad->codec == 14
+ || pad->codec == 7 || pad->codec == 8))
+ pad->rate = 0;
+ else if (rate == 16000 && (pad->codec == 4 || pad->codec == 11))
+ pad->rate = 0;
+ else
+ ret = FALSE;
+ } else if (pad->codec == 10) {
+ pad->rate = 3;
+ } else {
+ ret = FALSE;
+ }
+
+ if (gst_structure_get_int (s, "channels", &channels)) {
+ if (pad->codec == 4 || pad->codec == 5
+ || pad->codec == 6 || pad->codec == 11)
+ pad->channels = 0;
+ else if (pad->codec == 10)
+ pad->channels = 1;
+ else if (channels == 1)
+ pad->channels = 0;
+ else if (channels == 2)
+ pad->channels = 1;
+ else
+ ret = FALSE;
+ } else if (pad->codec == 4 || pad->codec == 5 || pad->codec == 6) {
+ pad->channels = 0;
+ } else if (pad->codec == 10) {
+ pad->channels = 1;
+ } else {
+ ret = FALSE;
+ }
+
+ if (pad->codec != 3)
+ pad->width = 1;
+ }
+
+ if (ret && gst_structure_has_field (s, "codec_data")) {
+ const GValue *val = gst_structure_get_value (s, "codec_data");
+
+ if (val)
+ gst_buffer_replace (&pad->codec_data, gst_value_get_buffer (val));
+ else if (!val && pad->codec_data)
+ gst_buffer_unref (pad->codec_data);
+ }
+
+ if (ret && mux->streamable && mux->state != GST_FLV_MUX_STATE_HEADER) {
+ if (old_codec != pad->codec || old_rate != pad->rate ||
+ old_width != pad->width || old_channels != pad->channels) {
+ pad->info_changed = TRUE;
+ }
+
+ if (old_codec_data && pad->codec_data) {
+ GstMapInfo map;
+
+ gst_buffer_map (old_codec_data, &map, GST_MAP_READ);
+ if (map.size != gst_buffer_get_size (pad->codec_data) ||
+ gst_buffer_memcmp (pad->codec_data, 0, map.data, map.size))
+ pad->info_changed = TRUE;
+
+ gst_buffer_unmap (old_codec_data, &map);
+ } else if (!old_codec_data && pad->codec_data) {
+ pad->info_changed = TRUE;
+ }
+
+ if (pad->info_changed)
+ mux->state = GST_FLV_MUX_STATE_HEADER;
+ }
+
+ if (old_codec_data)
+ gst_buffer_unref (old_codec_data);
+
+ gst_object_unref (mux);
+
+ return ret;
+}
+
+static void
+gst_flv_mux_reset_pad (GstFlvMuxPad * pad)
+{
+ GST_DEBUG_OBJECT (pad, "resetting pad");
+
+ if (pad->codec_data)
+ gst_buffer_unref (pad->codec_data);
+ pad->codec_data = NULL;
+ pad->codec = G_MAXUINT;
+ pad->rate = G_MAXUINT;
+ pad->width = G_MAXUINT;
+ pad->channels = G_MAXUINT;
+ pad->info_changed = FALSE;
+ pad->drop_deltas = FALSE;
+
+ gst_flv_mux_pad_flush (GST_AGGREGATOR_PAD_CAST (pad), NULL);
+}
+
+static GstAggregatorPad *
+gst_flv_mux_create_new_pad (GstAggregator * agg,
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
+{
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (agg);
+ GstAggregatorPad *aggpad;
+ GstFlvMux *mux = GST_FLV_MUX (agg);
+ GstFlvMuxPad *pad = NULL;
+ const gchar *name = NULL;
+ gboolean video;
+
+ if (mux->state != GST_FLV_MUX_STATE_HEADER && !mux->streamable) {
+ GST_ELEMENT_WARNING (mux, STREAM, MUX,
+ ("Requested a late stream in a non-streamable file"),
+ ("Stream added after file started and therefore won't be playable"));
+ return NULL;
+ }
+
+ if (templ == gst_element_class_get_pad_template (klass, "audio")) {
+ if (mux->audio_pad) {
+ GST_WARNING_OBJECT (mux, "Already have an audio pad");
+ return NULL;
+ }
+ name = "audio";
+ video = FALSE;
+ } else if (templ == gst_element_class_get_pad_template (klass, "video")) {
+ if (mux->video_pad) {
+ GST_WARNING_OBJECT (mux, "Already have a video pad");
+ return NULL;
+ }
+ name = "video";
+ video = TRUE;
+ } else {
+ GST_WARNING_OBJECT (mux, "Invalid template");
+ return NULL;
+ }
+
+ aggpad =
+ GST_AGGREGATOR_CLASS (gst_flv_mux_parent_class)->create_new_pad (agg,
+ templ, name, caps);
+ if (aggpad == NULL)
+ return NULL;
+
+ pad = GST_FLV_MUX_PAD (aggpad);
+
+ gst_flv_mux_reset_pad (pad);
+
+ if (video)
+ mux->video_pad = pad;
+ else
+ mux->audio_pad = pad;
+
+ return aggpad;
+}
+
+static void
+gst_flv_mux_release_pad (GstElement * element, GstPad * pad)
+{
+ GstFlvMux *mux = GST_FLV_MUX (element);
+ GstFlvMuxPad *flvpad = GST_FLV_MUX_PAD (gst_object_ref (pad));
+
+ GST_ELEMENT_CLASS (gst_flv_mux_parent_class)->release_pad (element, pad);
+
+ gst_flv_mux_reset_pad (flvpad);
+
+ if (flvpad == mux->video_pad) {
+ mux->video_pad = NULL;
+ } else if (flvpad == mux->audio_pad) {
+ mux->audio_pad = NULL;
+ } else {
+ GST_WARNING_OBJECT (pad, "Pad is not known audio or video pad");
+ }
+
+ gst_object_unref (flvpad);
+}
+
+static GstFlowReturn
+gst_flv_mux_push (GstFlvMux * mux, GstBuffer * buffer)
+{
+ GstAggregator *agg = GST_AGGREGATOR (mux);
+ GstAggregatorPad *srcpad = GST_AGGREGATOR_PAD (agg->srcpad);
+
+ if (GST_BUFFER_PTS_IS_VALID (buffer))
+ srcpad->segment.position = GST_BUFFER_PTS (buffer);
+
+ /* pushing the buffer that rewrites the header will make it no longer be the
+ * total output size in bytes, but it doesn't matter at that point */
+ mux->byte_count += gst_buffer_get_size (buffer);
+
+ return gst_aggregator_finish_buffer (GST_AGGREGATOR_CAST (mux), buffer);
+}
+
+static GstBuffer *
+gst_flv_mux_create_header (GstFlvMux * mux)
+{
+ GstBuffer *header;
+ guint8 *data;
+ gboolean have_audio;
+ gboolean have_video;
+
+ _gst_buffer_new_and_alloc (9 + 4, &header, &data);
+
+ data[0] = 'F';
+ data[1] = 'L';
+ data[2] = 'V';
+ data[3] = 0x01; /* Version */
+
+ have_audio = (mux->audio_pad && mux->audio_pad->codec != G_MAXUINT);
+ have_video = (mux->video_pad && mux->video_pad->codec != G_MAXUINT);
+
+ data[4] = (have_audio << 2) | have_video; /* flags */
+ GST_WRITE_UINT32_BE (data + 5, 9); /* data offset */
+ GST_WRITE_UINT32_BE (data + 9, 0); /* previous tag size */
+
+ return header;
+}
+
+static GstBuffer *
+gst_flv_mux_preallocate_index (GstFlvMux * mux)
+{
+ GstBuffer *tmp;
+ guint8 *data;
+ gint preallocate_size;
+
+ /* preallocate index of size:
+ * - 'keyframes' ECMA array key: 2 + 9 = 11 bytes
+ * - nested ECMA array header, length and end marker: 8 bytes
+ * - 'times' and 'filepositions' keys: 22 bytes
+ * - two strict arrays headers and lengths: 10 bytes
+ * - each index entry: 18 bytes
+ */
+ preallocate_size = 11 + 8 + 22 + 10 + MAX_INDEX_ENTRIES * 18;
+ GST_DEBUG_OBJECT (mux, "preallocating %d bytes for the index",
+ preallocate_size);
+
+ _gst_buffer_new_and_alloc (preallocate_size, &tmp, &data);
+
+ /* prefill the space with a gstfiller: <spaces> script tag variable */
+ GST_WRITE_UINT16_BE (data, 9); /* 9 characters */
+ memcpy (data + 2, "gstfiller", 9);
+ GST_WRITE_UINT8 (data + 11, AMF0_STRING_MARKER); /* a string value */
+ GST_WRITE_UINT16_BE (data + 12, preallocate_size - 14);
+ memset (data + 14, ' ', preallocate_size - 14); /* the rest is spaces */
+ return tmp;
+}
+
+static GstBuffer *
+gst_flv_mux_create_number_script_value (const gchar * name, gdouble value)
+{
+ GstBuffer *tmp;
+ guint8 *data;
+ gsize len = strlen (name);
+
+ _gst_buffer_new_and_alloc (2 + len + 1 + 8, &tmp, &data);
+
+ GST_WRITE_UINT16_BE (data, len);
+ data += 2; /* name length */
+ memcpy (data, name, len);
+ data += len;
+ *data++ = AMF0_NUMBER_MARKER; /* double type */
+ GST_WRITE_DOUBLE_BE (data, value);
+
+ return tmp;
+}
+
+static GstBuffer *
+gst_flv_mux_create_metadata (GstFlvMux * mux)
+{
+ const GstTagList *tags;
+ GstBuffer *script_tag, *tmp;
+ GstMapInfo map;
+ guint64 dts;
+ guint8 *data;
+ gint i, n_tags, tags_written = 0;
+
+ tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (mux));
+
+ dts = mux->last_dts;
+
+ /* Timestamp must start at zero */
+ if (GST_CLOCK_TIME_IS_VALID (mux->first_timestamp)) {
+ dts -= mux->first_timestamp / GST_MSECOND;
+ }
+
+ GST_DEBUG_OBJECT (mux,
+ "Creating metadata, dts %" G_GUINT64_FORMAT ", tags = %" GST_PTR_FORMAT,
+ dts, tags);
+
+ if (dts > G_MAXUINT32) {
+ GST_LOG_OBJECT (mux,
+ "Detected rollover, timestamp will be truncated (previous:%"
+ G_GUINT64_FORMAT ", new:%u)", dts, (guint32) dts);
+ }
+
+ /* FIXME perhaps some bytewriter'ing here ... */
+
+ _gst_buffer_new_and_alloc (11, &script_tag, &data);
+
+ data[0] = 18;
+
+ /* Data size, unknown for now */
+ data[1] = 0;
+ data[2] = 0;
+ data[3] = 0;
+
+ /* Timestamp */
+ GST_WRITE_UINT24_BE (data + 4, dts);
+ data[7] = (((guint) dts) >> 24) & 0xff;
+
+ /* Stream ID */
+ data[8] = data[9] = data[10] = 0;
+
+ _gst_buffer_new_and_alloc (13, &tmp, &data);
+ data[0] = AMF0_STRING_MARKER; /* string */
+ data[1] = 0;
+ data[2] = 10; /* length 10 */
+ memcpy (&data[3], "onMetaData", 10);
+
+ script_tag = gst_buffer_append (script_tag, tmp);
+
+ n_tags = (tags) ? gst_tag_list_n_tags (tags) : 0;
+ _gst_buffer_new_and_alloc (5, &tmp, &data);
+ data[0] = 8; /* ECMA array */
+ GST_WRITE_UINT32_BE (data + 1, n_tags);
+ script_tag = gst_buffer_append (script_tag, tmp);
+
+ /* Some players expect the 'duration' to be always set. Fill it out later,
+ after querying the pads or after getting EOS */
+ if (!mux->streamable) {
+ tmp = gst_flv_mux_create_number_script_value ("duration", 86400);
+ script_tag = gst_buffer_append (script_tag, tmp);
+ tags_written++;
+
+ /* Sometimes the information about the total file size is useful for the
+ player. It will be filled later, after getting EOS */
+ tmp = gst_flv_mux_create_number_script_value ("filesize", 0);
+ script_tag = gst_buffer_append (script_tag, tmp);
+ tags_written++;
+
+ /* Preallocate space for the index to be written at EOS */
+ tmp = gst_flv_mux_preallocate_index (mux);
+ script_tag = gst_buffer_append (script_tag, tmp);
+ } else {
+ GST_DEBUG_OBJECT (mux, "not preallocating index, streamable mode");
+ }
+
+ for (i = 0; tags && i < n_tags; i++) {
+ const gchar *tag_name = gst_tag_list_nth_tag_name (tags, i);
+ if (!strcmp (tag_name, GST_TAG_DURATION)) {
+ GstClockTime dur;
+
+ if (!gst_tag_list_get_uint64 (tags, GST_TAG_DURATION, &dur))
+ continue;
+ mux->duration = dur;
+ } else if (!strcmp (tag_name, GST_TAG_ARTIST) ||
+ !strcmp (tag_name, GST_TAG_TITLE)) {
+ gchar *s;
+ const gchar *t = NULL;
+
+ if (!strcmp (tag_name, GST_TAG_ARTIST))
+ t = "creator";
+ else if (!strcmp (tag_name, GST_TAG_TITLE))
+ t = "title";
+
+ if (!gst_tag_list_get_string (tags, tag_name, &s))
+ continue;
+
+ _gst_buffer_new_and_alloc (2 + strlen (t) + 1 + 2 + strlen (s),
+ &tmp, &data);
+ data[0] = 0; /* tag name length */
+ data[1] = strlen (t);
+ memcpy (&data[2], t, strlen (t));
+ data[2 + strlen (t)] = 2; /* string */
+ data[3 + strlen (t)] = (strlen (s) >> 8) & 0xff;
+ data[4 + strlen (t)] = (strlen (s)) & 0xff;
+ memcpy (&data[5 + strlen (t)], s, strlen (s));
+ script_tag = gst_buffer_append (script_tag, tmp);
+
+ g_free (s);
+ tags_written++;
+ }
+ }
+
+ if (!mux->streamable && mux->duration == GST_CLOCK_TIME_NONE) {
+ mux->duration = gst_flv_mux_query_upstream_duration (mux);
+ }
+
+ if (!mux->streamable && mux->duration != GST_CLOCK_TIME_NONE) {
+ gdouble d;
+ GstMapInfo map;
+
+ d = gst_guint64_to_gdouble (mux->duration);
+ d /= (gdouble) GST_SECOND;
+
+ GST_DEBUG_OBJECT (mux, "determined the duration to be %f", d);
+ gst_buffer_map (script_tag, &map, GST_MAP_WRITE);
+ GST_WRITE_DOUBLE_BE (map.data + 29 + 2 + 8 + 1, d);
+ gst_buffer_unmap (script_tag, &map);
+ }
+
+ if (mux->video_pad && mux->video_pad->codec != G_MAXUINT) {
+ GstCaps *caps = NULL;
+
+ if (mux->video_pad)
+ caps = gst_pad_get_current_caps (GST_PAD (mux->video_pad));
+
+ if (caps != NULL) {
+ GstStructure *s;
+ gint size;
+ gint num, den;
+
+ GST_DEBUG_OBJECT (mux, "putting videocodecid %d in the metadata",
+ mux->video_pad->codec);
+
+ tmp = gst_flv_mux_create_number_script_value ("videocodecid",
+ mux->video_pad->codec);
+ script_tag = gst_buffer_append (script_tag, tmp);
+ tags_written++;
+
+ s = gst_caps_get_structure (caps, 0);
+ gst_caps_unref (caps);
+
+ if (gst_structure_get_int (s, "width", &size)) {
+ GST_DEBUG_OBJECT (mux, "putting width %d in the metadata", size);
+
+ tmp = gst_flv_mux_create_number_script_value ("width", size);
+ script_tag = gst_buffer_append (script_tag, tmp);
+ tags_written++;
+ }
+
+ if (gst_structure_get_int (s, "height", &size)) {
+ GST_DEBUG_OBJECT (mux, "putting height %d in the metadata", size);
+
+ tmp = gst_flv_mux_create_number_script_value ("height", size);
+ script_tag = gst_buffer_append (script_tag, tmp);
+ tags_written++;
+ }
+
+ if (gst_structure_get_fraction (s, "pixel-aspect-ratio", &num, &den)) {
+ gdouble d;
+
+ d = num;
+ GST_DEBUG_OBJECT (mux, "putting AspectRatioX %f in the metadata", d);
+
+ tmp = gst_flv_mux_create_number_script_value ("AspectRatioX", d);
+ script_tag = gst_buffer_append (script_tag, tmp);
+ tags_written++;
+
+ d = den;
+ GST_DEBUG_OBJECT (mux, "putting AspectRatioY %f in the metadata", d);
+
+ tmp = gst_flv_mux_create_number_script_value ("AspectRatioY", d);
+ script_tag = gst_buffer_append (script_tag, tmp);
+ tags_written++;
+ }
+
+ if (gst_structure_get_fraction (s, "framerate", &num, &den)) {
+ gdouble d;
+
+ gst_util_fraction_to_double (num, den, &d);
+ GST_DEBUG_OBJECT (mux, "putting framerate %f in the metadata", d);
+
+ tmp = gst_flv_mux_create_number_script_value ("framerate", d);
+ script_tag = gst_buffer_append (script_tag, tmp);
+ tags_written++;
+ }
+
+ GST_DEBUG_OBJECT (mux, "putting videodatarate %u KB/s in the metadata",
+ mux->video_pad->bitrate / 1024);
+ tmp = gst_flv_mux_create_number_script_value ("videodatarate",
+ mux->video_pad->bitrate / 1024);
+ script_tag = gst_buffer_append (script_tag, tmp);
+ tags_written++;
+ }
+ }
+
+ if (mux->audio_pad && mux->audio_pad->codec != G_MAXUINT) {
+ GST_DEBUG_OBJECT (mux, "putting audiocodecid %d in the metadata",
+ mux->audio_pad->codec);
+
+ tmp = gst_flv_mux_create_number_script_value ("audiocodecid",
+ mux->audio_pad->codec);
+ script_tag = gst_buffer_append (script_tag, tmp);
+ tags_written++;
+
+ GST_DEBUG_OBJECT (mux, "putting audiodatarate %u KB/s in the metadata",
+ mux->audio_pad->bitrate / 1024);
+ tmp = gst_flv_mux_create_number_script_value ("audiodatarate",
+ mux->audio_pad->bitrate / 1024);
+ script_tag = gst_buffer_append (script_tag, tmp);
+ tags_written++;
+ }
+
+ _gst_buffer_new_and_alloc (2 + 15 + 1 + 2 + strlen (mux->metadatacreator),
+ &tmp, &data);
+ data[0] = 0; /* 15 bytes name */
+ data[1] = 15;
+ memcpy (&data[2], "metadatacreator", 15);
+ data[17] = 2; /* string */
+ data[18] = (strlen (mux->metadatacreator) >> 8) & 0xff;
+ data[19] = (strlen (mux->metadatacreator)) & 0xff;
+ memcpy (&data[20], mux->metadatacreator, strlen (mux->metadatacreator));
+ script_tag = gst_buffer_append (script_tag, tmp);
+ tags_written++;
+
+ _gst_buffer_new_and_alloc (2 + 7 + 1 + 2 + strlen (mux->encoder),
+ &tmp, &data);
+ data[0] = 0; /* 7 bytes name */
+ data[1] = 7;
+ memcpy (&data[2], "encoder", 7);
+ data[9] = 2; /* string */
+ data[10] = (strlen (mux->encoder) >> 8) & 0xff;
+ data[11] = (strlen (mux->encoder)) & 0xff;
+ memcpy (&data[12], mux->encoder, strlen (mux->encoder));
+ script_tag = gst_buffer_append (script_tag, tmp);
+ tags_written++;
+
+ {
+ time_t secs;
+ struct tm tm;
+ gchar *s;
+ static const gchar *weekdays[] = {
+ "Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"
+ };
+ static const gchar *months[] = {
+ "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul",
+ "Aug", "Sep", "Oct", "Nov", "Dec"
+ };
+
+ secs = g_get_real_time () / G_USEC_PER_SEC;
+#ifdef HAVE_GMTIME_R
+ gmtime_r (&secs, &tm);
+#else
+ tm = *gmtime (&secs);
+#endif
+
+ s = g_strdup_printf ("%s %s %d %02d:%02d:%02d %d", weekdays[tm.tm_wday],
+ months[tm.tm_mon], tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec,
+ tm.tm_year + 1900);
+
+ _gst_buffer_new_and_alloc (2 + 12 + 1 + 2 + strlen (s), &tmp, &data);
+ data[0] = 0; /* 12 bytes name */
+ data[1] = 12;
+ memcpy (&data[2], "creationdate", 12);
+ data[14] = 2; /* string */
+ data[15] = (strlen (s) >> 8) & 0xff;
+ data[16] = (strlen (s)) & 0xff;
+ memcpy (&data[17], s, strlen (s));
+ script_tag = gst_buffer_append (script_tag, tmp);
+
+ g_free (s);
+ tags_written++;
+ }
+
+ if (!tags_written) {
+ gst_buffer_unref (script_tag);
+ script_tag = NULL;
+ goto exit;
+ }
+
+ _gst_buffer_new_and_alloc (2 + 0 + 1, &tmp, &data);
+ data[0] = 0; /* 0 byte size */
+ data[1] = 0;
+ data[2] = 9; /* end marker */
+ script_tag = gst_buffer_append (script_tag, tmp);
+
+ _gst_buffer_new_and_alloc (4, &tmp, &data);
+ GST_WRITE_UINT32_BE (data, gst_buffer_get_size (script_tag));
+ script_tag = gst_buffer_append (script_tag, tmp);
+
+ gst_buffer_map (script_tag, &map, GST_MAP_WRITE);
+ map.data[1] = ((gst_buffer_get_size (script_tag) - 11 - 4) >> 16) & 0xff;
+ map.data[2] = ((gst_buffer_get_size (script_tag) - 11 - 4) >> 8) & 0xff;
+ map.data[3] = ((gst_buffer_get_size (script_tag) - 11 - 4) >> 0) & 0xff;
+
+ GST_WRITE_UINT32_BE (map.data + 11 + 13 + 1, tags_written);
+ gst_buffer_unmap (script_tag, &map);
+
+exit:
+ return script_tag;
+}
+
+static GstBuffer *
+gst_flv_mux_buffer_to_tag_internal (GstFlvMux * mux, GstBuffer * buffer,
+ GstFlvMuxPad * pad, gboolean is_codec_data)
+{
+ GstBuffer *tag;
+ GstMapInfo map;
+ guint size;
+ guint64 pts, dts, cts;
+ guint8 *data, *bdata = NULL;
+ gsize bsize = 0;
+
+ if (GST_CLOCK_TIME_IS_VALID (pad->dts)) {
+ pts = pad->pts / GST_MSECOND;
+ dts = pad->dts / GST_MSECOND;
+ GST_LOG_OBJECT (mux,
+ "Pad %s: Created dts %" GST_TIME_FORMAT ", pts %" GST_TIME_FORMAT
+ " from rounding %" GST_TIME_FORMAT ", %" GST_TIME_FORMAT,
+ GST_PAD_NAME (pad), GST_TIME_ARGS (dts * GST_MSECOND),
+ GST_TIME_ARGS (pts * GST_MSECOND), GST_TIME_ARGS (pad->dts),
+ GST_TIME_ARGS (pad->pts));
+ } else if (GST_CLOCK_TIME_IS_VALID (pad->last_timestamp)) {
+ pts = dts = pad->last_timestamp / GST_MSECOND;
+ GST_DEBUG_OBJECT (mux,
+ "Pad %s: Created dts and pts %" GST_TIME_FORMAT
+ " from rounding last pad timestamp %" GST_TIME_FORMAT,
+ GST_PAD_NAME (pad), GST_TIME_ARGS (pts * GST_MSECOND),
+ GST_TIME_ARGS (pad->last_timestamp));
+ } else {
+ pts = dts = mux->last_dts;
+ GST_DEBUG_OBJECT (mux,
+ "Pad %s: Created dts and pts %" GST_TIME_FORMAT
+ " from last mux timestamp",
+ GST_PAD_NAME (pad), GST_TIME_ARGS (pts * GST_MSECOND));
+ }
+
+ /* We prevent backwards timestamps because they confuse librtmp,
+ * it expects timestamps to go forward not only inside one stream, but
+ * also between the audio & video streams.
+ */
+ if (dts < mux->last_dts) {
+ GST_WARNING_OBJECT (pad, "Got backwards dts! (%" GST_TIME_FORMAT
+ " < %" GST_TIME_FORMAT ")", GST_TIME_ARGS (dts * GST_MSECOND),
+ GST_TIME_ARGS (mux->last_dts * GST_MSECOND));
+ dts = mux->last_dts;
+ }
+ mux->last_dts = dts;
+
+ /* Be safe in case TS are buggy */
+ if (pts > dts)
+ cts = pts - dts;
+ else
+ cts = 0;
+
+ /* Timestamp must start at zero */
+ if (GST_CLOCK_TIME_IS_VALID (mux->first_timestamp)) {
+ dts -= mux->first_timestamp / GST_MSECOND;
+ pts = dts + cts;
+ }
+
+ GST_LOG_OBJECT (mux,
+ "got pts %" G_GUINT64_FORMAT " dts %" G_GUINT64_FORMAT " cts %"
+ G_GUINT64_FORMAT, pts, dts, cts);
+
+ if (dts > G_MAXUINT32) {
+ GST_LOG_OBJECT (mux,
+ "Detected rollover, timestamp will be truncated (previous:%"
+ G_GUINT64_FORMAT ", new:%u)", dts, (guint32) dts);
+ }
+
+ if (buffer != NULL) {
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ bdata = map.data;
+ bsize = map.size;
+ }
+
+ size = 11;
+ if (mux->video_pad == pad) {
+ size += 1;
+ if (pad->codec == 7)
+ size += 4 + bsize;
+ else
+ size += bsize;
+ } else {
+ size += 1;
+ if (pad->codec == 10)
+ size += 1 + bsize;
+ else
+ size += bsize;
+ }
+ size += 4;
+
+ _gst_buffer_new_and_alloc (size, &tag, &data);
+ memset (data, 0, size);
+
+ data[0] = (mux->video_pad == pad) ? 9 : 8;
+
+ data[1] = ((size - 11 - 4) >> 16) & 0xff;
+ data[2] = ((size - 11 - 4) >> 8) & 0xff;
+ data[3] = ((size - 11 - 4) >> 0) & 0xff;
+
+ GST_WRITE_UINT24_BE (data + 4, dts);
+ data[7] = (((guint) dts) >> 24) & 0xff;
+
+ data[8] = data[9] = data[10] = 0;
+
+ if (mux->video_pad == pad) {
+ if (buffer && GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT))
+ data[11] |= 2 << 4;
+ else
+ data[11] |= 1 << 4;
+
+ data[11] |= pad->codec & 0x0f;
+
+ if (pad->codec == 7) {
+ if (is_codec_data) {
+ data[12] = 0;
+ GST_WRITE_UINT24_BE (data + 13, 0);
+ } else if (bsize == 0) {
+ /* AVC end of sequence */
+ data[12] = 2;
+ GST_WRITE_UINT24_BE (data + 13, 0);
+ } else {
+ /* ACV NALU */
+ data[12] = 1;
+ GST_WRITE_UINT24_BE (data + 13, cts);
+ }
+ memcpy (data + 11 + 1 + 4, bdata, bsize);
+ } else {
+ memcpy (data + 11 + 1, bdata, bsize);
+ }
+ } else {
+ data[11] |= (pad->codec << 4) & 0xf0;
+ data[11] |= (pad->rate << 2) & 0x0c;
+ data[11] |= (pad->width << 1) & 0x02;
+ data[11] |= (pad->channels << 0) & 0x01;
+
+ GST_LOG_OBJECT (mux, "Creating byte %02x with "
+ "codec:%d, rate:%d, width:%d, channels:%d",
+ data[11], pad->codec, pad->rate, pad->width, pad->channels);
+
+ if (pad->codec == 10) {
+ data[12] = is_codec_data ? 0 : 1;
+
+ memcpy (data + 11 + 1 + 1, bdata, bsize);
+ } else {
+ memcpy (data + 11 + 1, bdata, bsize);
+ }
+ }
+
+ if (buffer)
+ gst_buffer_unmap (buffer, &map);
+
+ GST_WRITE_UINT32_BE (data + size - 4, size - 4);
+
+ GST_BUFFER_PTS (tag) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_DTS (tag) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_DURATION (tag) = GST_CLOCK_TIME_NONE;
+
+ if (buffer) {
+ /* if we are streamable we copy over timestamps and offsets,
+ if not just copy the offsets */
+ if (mux->streamable) {
+ GstClockTime timestamp = GST_CLOCK_TIME_NONE;
+
+ if (gst_segment_to_running_time_full (&GST_AGGREGATOR_PAD (pad)->segment,
+ GST_FORMAT_TIME, GST_BUFFER_DTS_OR_PTS (buffer),
+ &timestamp) == 1) {
+ GST_BUFFER_PTS (tag) = timestamp;
+ GST_BUFFER_DURATION (tag) = GST_BUFFER_DURATION (buffer);
+ }
+ GST_BUFFER_OFFSET (tag) = GST_BUFFER_OFFSET_NONE;
+ GST_BUFFER_OFFSET_END (tag) = GST_BUFFER_OFFSET_NONE;
+ } else {
+ GST_BUFFER_OFFSET (tag) = GST_BUFFER_OFFSET (buffer);
+ GST_BUFFER_OFFSET_END (tag) = GST_BUFFER_OFFSET_END (buffer);
+ }
+
+ /* mark the buffer if it's an audio buffer and there's also video being muxed
+ * or it's a video interframe */
+ if (mux->video_pad == pad &&
+ GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT))
+ GST_BUFFER_FLAG_SET (tag, GST_BUFFER_FLAG_DELTA_UNIT);
+ } else {
+ GST_BUFFER_FLAG_SET (tag, GST_BUFFER_FLAG_DELTA_UNIT);
+ GST_BUFFER_OFFSET (tag) = GST_BUFFER_OFFSET_END (tag) =
+ GST_BUFFER_OFFSET_NONE;
+ }
+
+ return tag;
+}
+
+static inline GstBuffer *
+gst_flv_mux_buffer_to_tag (GstFlvMux * mux, GstBuffer * buffer,
+ GstFlvMuxPad * pad)
+{
+ return gst_flv_mux_buffer_to_tag_internal (mux, buffer, pad, FALSE);
+}
+
+static inline GstBuffer *
+gst_flv_mux_codec_data_buffer_to_tag (GstFlvMux * mux, GstBuffer * buffer,
+ GstFlvMuxPad * pad)
+{
+ return gst_flv_mux_buffer_to_tag_internal (mux, buffer, pad, TRUE);
+}
+
+static inline GstBuffer *
+gst_flv_mux_eos_to_tag (GstFlvMux * mux, GstFlvMuxPad * pad)
+{
+ return gst_flv_mux_buffer_to_tag_internal (mux, NULL, pad, FALSE);
+}
+
+static void
+gst_flv_mux_put_buffer_in_streamheader (GValue * streamheader,
+ GstBuffer * buffer)
+{
+ GValue value = { 0 };
+ GstBuffer *buf;
+
+ g_value_init (&value, GST_TYPE_BUFFER);
+ buf = gst_buffer_copy (buffer);
+ gst_value_set_buffer (&value, buf);
+ gst_buffer_unref (buf);
+ gst_value_array_append_value (streamheader, &value);
+ g_value_unset (&value);
+}
+
+static GstCaps *
+gst_flv_mux_prepare_src_caps (GstFlvMux * mux, GstBuffer ** header_buf,
+ GstBuffer ** metadata_buf, GstBuffer ** video_codec_data_buf,
+ GstBuffer ** audio_codec_data_buf)
+{
+ GstBuffer *header, *metadata;
+ GstBuffer *video_codec_data, *audio_codec_data;
+ GstCaps *caps;
+ GstStructure *structure;
+ GValue streamheader = { 0 };
+ GList *l;
+
+ header = gst_flv_mux_create_header (mux);
+ metadata = gst_flv_mux_create_metadata (mux);
+ video_codec_data = NULL;
+ audio_codec_data = NULL;
+
+ GST_OBJECT_LOCK (mux);
+ for (l = GST_ELEMENT_CAST (mux)->sinkpads; l != NULL; l = l->next) {
+ GstFlvMuxPad *pad = l->data;
+
+ /* Get H.264 and AAC codec data, if present */
+ if (pad && mux->video_pad == pad && pad->codec == 7) {
+ if (pad->codec_data == NULL)
+ GST_WARNING_OBJECT (mux, "Codec data for video stream not found, "
+ "output might not be playable");
+ else
+ video_codec_data =
+ gst_flv_mux_codec_data_buffer_to_tag (mux, pad->codec_data, pad);
+ } else if (pad && mux->audio_pad == pad && pad->codec == 10) {
+ if (pad->codec_data == NULL)
+ GST_WARNING_OBJECT (mux, "Codec data for audio stream not found, "
+ "output might not be playable");
+ else
+ audio_codec_data =
+ gst_flv_mux_codec_data_buffer_to_tag (mux, pad->codec_data, pad);
+ }
+ }
+ GST_OBJECT_UNLOCK (mux);
+
+ /* mark buffers that will go in the streamheader */
+ GST_BUFFER_FLAG_SET (header, GST_BUFFER_FLAG_HEADER);
+ GST_BUFFER_FLAG_SET (metadata, GST_BUFFER_FLAG_HEADER);
+ if (video_codec_data != NULL) {
+ GST_BUFFER_FLAG_SET (video_codec_data, GST_BUFFER_FLAG_HEADER);
+ /* mark as a delta unit, so downstream will not try to synchronize on that
+ * buffer - to actually start playback you need a real video keyframe */
+ GST_BUFFER_FLAG_SET (video_codec_data, GST_BUFFER_FLAG_DELTA_UNIT);
+ }
+ if (audio_codec_data != NULL) {
+ GST_BUFFER_FLAG_SET (audio_codec_data, GST_BUFFER_FLAG_HEADER);
+ }
+
+ /* put buffers in streamheader */
+ g_value_init (&streamheader, GST_TYPE_ARRAY);
+ gst_flv_mux_put_buffer_in_streamheader (&streamheader, header);
+ gst_flv_mux_put_buffer_in_streamheader (&streamheader, metadata);
+ if (video_codec_data != NULL)
+ gst_flv_mux_put_buffer_in_streamheader (&streamheader, video_codec_data);
+ if (audio_codec_data != NULL)
+ gst_flv_mux_put_buffer_in_streamheader (&streamheader, audio_codec_data);
+
+ /* create the caps and put the streamheader in them */
+ caps = gst_caps_new_empty_simple ("video/x-flv");
+ structure = gst_caps_get_structure (caps, 0);
+ gst_structure_set_value (structure, "streamheader", &streamheader);
+ g_value_unset (&streamheader);
+
+ if (header_buf) {
+ *header_buf = header;
+ } else {
+ gst_buffer_unref (header);
+ }
+
+ if (metadata_buf) {
+ *metadata_buf = metadata;
+ } else {
+ gst_buffer_unref (metadata);
+ }
+
+ if (video_codec_data_buf) {
+ *video_codec_data_buf = video_codec_data;
+ } else if (video_codec_data) {
+ gst_buffer_unref (video_codec_data);
+ }
+
+ if (audio_codec_data_buf) {
+ *audio_codec_data_buf = audio_codec_data;
+ } else if (audio_codec_data) {
+ gst_buffer_unref (audio_codec_data);
+ }
+
+ return caps;
+}
+
+static GstFlowReturn
+gst_flv_mux_write_header (GstFlvMux * mux)
+{
+ GstBuffer *header, *metadata;
+ GstBuffer *video_codec_data, *audio_codec_data;
+ GstCaps *caps;
+ GstFlowReturn ret;
+
+ header = metadata = video_codec_data = audio_codec_data = NULL;
+
+ /* if not streaming, check if downstream is seekable */
+ if (!mux->streamable) {
+ gboolean seekable;
+ GstQuery *query;
+
+ query = gst_query_new_seeking (GST_FORMAT_BYTES);
+ if (gst_pad_peer_query (mux->srcpad, query)) {
+ gst_query_parse_seeking (query, NULL, &seekable, NULL, NULL);
+ GST_INFO_OBJECT (mux, "downstream is %sseekable", seekable ? "" : "not ");
+ } else {
+ /* have to assume seeking is supported if query not handled downstream */
+ GST_WARNING_OBJECT (mux, "downstream did not handle seeking query");
+ seekable = FALSE;
+ }
+ if (!seekable) {
+ mux->streamable = TRUE;
+ g_object_notify (G_OBJECT (mux), "streamable");
+ GST_WARNING_OBJECT (mux, "downstream is not seekable, but "
+ "streamable=false. Will ignore that and create streamable output "
+ "instead");
+ }
+ gst_query_unref (query);
+ }
+
+ if (mux->streamable && mux->sent_header) {
+ GstBuffer **video_codec_data_p = NULL, **audio_codec_data_p = NULL;
+
+ if (mux->video_pad && mux->video_pad->info_changed)
+ video_codec_data_p = &video_codec_data;
+ if (mux->audio_pad && mux->audio_pad->info_changed)
+ audio_codec_data_p = &audio_codec_data;
+
+ caps = gst_flv_mux_prepare_src_caps (mux,
+ NULL, NULL, video_codec_data_p, audio_codec_data_p);
+ } else {
+ caps = gst_flv_mux_prepare_src_caps (mux,
+ &header, &metadata, &video_codec_data, &audio_codec_data);
+ }
+
+ gst_aggregator_set_src_caps (GST_AGGREGATOR_CAST (mux), caps);
+
+ gst_caps_unref (caps);
+
+ /* push the header buffer, the metadata and the codec info, if any */
+ if (header != NULL) {
+ ret = gst_flv_mux_push (mux, header);
+ if (ret != GST_FLOW_OK)
+ goto failure_header;
+ mux->sent_header = TRUE;
+ }
+ if (metadata != NULL) {
+ ret = gst_flv_mux_push (mux, metadata);
+ if (ret != GST_FLOW_OK)
+ goto failure_metadata;
+ mux->new_tags = FALSE;
+ }
+ if (video_codec_data != NULL) {
+ ret = gst_flv_mux_push (mux, video_codec_data);
+ if (ret != GST_FLOW_OK)
+ goto failure_video_codec_data;
+ mux->video_pad->info_changed = FALSE;
+ }
+ if (audio_codec_data != NULL) {
+ ret = gst_flv_mux_push (mux, audio_codec_data);
+ if (ret != GST_FLOW_OK)
+ goto failure_audio_codec_data;
+ mux->audio_pad->info_changed = FALSE;
+ }
+ return GST_FLOW_OK;
+
+failure_header:
+ gst_buffer_unref (metadata);
+
+failure_metadata:
+ if (video_codec_data != NULL)
+ gst_buffer_unref (video_codec_data);
+
+failure_video_codec_data:
+ if (audio_codec_data != NULL)
+ gst_buffer_unref (audio_codec_data);
+
+failure_audio_codec_data:
+ return ret;
+}
+
+static GstClockTime
+gst_flv_mux_segment_to_running_time (const GstSegment * segment, GstClockTime t)
+{
+ /* we can get a dts before the segment, if dts < pts and pts is inside
+ * the segment, so we consider early times as 0 */
+ if (t < segment->start)
+ return 0;
+ return gst_segment_to_running_time (segment, GST_FORMAT_TIME, t);
+}
+
+static void
+gst_flv_mux_update_index (GstFlvMux * mux, GstBuffer * buffer,
+ GstFlvMuxPad * pad)
+{
+ /*
+ * Add the tag byte offset and to the index if it's a valid seek point, which
+ * means it's either a video keyframe or if there is no video pad (in that
+ * case every FLV tag is a valid seek point)
+ */
+ if (mux->video_pad == pad &&
+ GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT))
+ return;
+
+ if (GST_BUFFER_PTS_IS_VALID (buffer)) {
+ GstFlvMuxIndexEntry *entry = g_slice_new (GstFlvMuxIndexEntry);
+ GstClockTime pts =
+ gst_flv_mux_segment_to_running_time (&GST_AGGREGATOR_PAD
+ (pad)->segment, GST_BUFFER_PTS (buffer));
+ entry->position = mux->byte_count;
+ entry->time = gst_guint64_to_gdouble (pts) / GST_SECOND;
+ mux->index = g_list_prepend (mux->index, entry);
+ }
+}
+
+static GstFlowReturn
+gst_flv_mux_write_buffer (GstFlvMux * mux, GstFlvMuxPad * pad,
+ GstBuffer * buffer)
+{
+ GstBuffer *tag;
+ GstFlowReturn ret;
+ GstClockTime dts =
+ gst_flv_mux_segment_to_running_time (&GST_AGGREGATOR_PAD (pad)->segment,
+ GST_BUFFER_DTS (buffer));
+
+ /* clipping function arranged for running_time */
+
+ if (!mux->streamable)
+ gst_flv_mux_update_index (mux, buffer, pad);
+
+ tag = gst_flv_mux_buffer_to_tag (mux, buffer, pad);
+
+ gst_buffer_unref (buffer);
+
+ ret = gst_flv_mux_push (mux, tag);
+
+ if (ret == GST_FLOW_OK && GST_CLOCK_TIME_IS_VALID (dts))
+ pad->last_timestamp = dts;
+
+ return ret;
+}
+
+static guint64
+gst_flv_mux_determine_duration (GstFlvMux * mux)
+{
+ GList *l;
+ GstClockTime duration = GST_CLOCK_TIME_NONE;
+
+ GST_DEBUG_OBJECT (mux, "trying to determine the duration "
+ "from pad timestamps");
+
+ GST_OBJECT_LOCK (mux);
+ for (l = GST_ELEMENT_CAST (mux)->sinkpads; l != NULL; l = l->next) {
+ GstFlvMuxPad *pad = GST_FLV_MUX_PAD (l->data);
+
+ if (pad && (pad->last_timestamp != GST_CLOCK_TIME_NONE)) {
+ if (duration == GST_CLOCK_TIME_NONE)
+ duration = pad->last_timestamp;
+ else
+ duration = MAX (duration, pad->last_timestamp);
+ }
+ }
+ GST_OBJECT_UNLOCK (mux);
+
+ return duration;
+}
+
+struct DurationData
+{
+ GstClockTime duration;
+};
+
+static gboolean
+duration_query_cb (GstElement * element, GstPad * pad,
+ struct DurationData *data)
+{
+ GstClockTime dur;
+
+ if (gst_pad_peer_query_duration (GST_PAD (pad), GST_FORMAT_TIME,
+ (gint64 *) & dur) && dur != GST_CLOCK_TIME_NONE) {
+ if (data->duration == GST_CLOCK_TIME_NONE)
+ data->duration = dur;
+ else
+ data->duration = MAX (dur, data->duration);
+ }
+
+ return TRUE;
+}
+
+static GstClockTime
+gst_flv_mux_query_upstream_duration (GstFlvMux * mux)
+{
+ struct DurationData cb_data = { GST_CLOCK_TIME_NONE };
+
+ gst_element_foreach_sink_pad (GST_ELEMENT (mux),
+ (GstElementForeachPadFunc) (duration_query_cb), &cb_data);
+
+ return cb_data.duration;
+}
+
+static gboolean
+gst_flv_mux_are_all_pads_eos (GstFlvMux * mux)
+{
+ GList *l;
+
+ GST_OBJECT_LOCK (mux);
+ for (l = GST_ELEMENT_CAST (mux)->sinkpads; l; l = l->next) {
+ GstFlvMuxPad *pad = GST_FLV_MUX_PAD (l->data);
+
+ if (!gst_aggregator_pad_is_eos (GST_AGGREGATOR_PAD (pad))) {
+ GST_OBJECT_UNLOCK (mux);
+ return FALSE;
+ }
+ }
+ GST_OBJECT_UNLOCK (mux);
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_flv_mux_write_eos (GstFlvMux * mux)
+{
+ GstBuffer *tag;
+
+ if (mux->video_pad == NULL)
+ return GST_FLOW_OK;
+
+ tag = gst_flv_mux_eos_to_tag (mux, mux->video_pad);
+
+ return gst_flv_mux_push (mux, tag);
+}
+
+static GstFlowReturn
+gst_flv_mux_rewrite_header (GstFlvMux * mux)
+{
+ GstBuffer *rewrite, *index, *tmp;
+ GstEvent *event;
+ guint8 *data;
+ gdouble d;
+ GList *l;
+ guint32 index_len, allocate_size;
+ guint32 i, index_skip;
+ GstSegment segment;
+ GstClockTime dur;
+
+ if (mux->streamable)
+ return GST_FLOW_OK;
+
+ /* seek back to the preallocated index space */
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ segment.start = segment.time = 13 + 29;
+ event = gst_event_new_segment (&segment);
+ if (!gst_pad_push_event (mux->srcpad, event)) {
+ GST_WARNING_OBJECT (mux, "Seek to rewrite header failed");
+ return GST_FLOW_OK;
+ }
+
+ /* determine duration now based on our own timestamping,
+ * so that it is likely many times better and consistent
+ * than whatever obtained by some query */
+ dur = gst_flv_mux_determine_duration (mux);
+ if (dur != GST_CLOCK_TIME_NONE)
+ mux->duration = dur;
+
+ /* rewrite the duration tag */
+ d = gst_guint64_to_gdouble (mux->duration);
+ d /= (gdouble) GST_SECOND;
+
+ GST_DEBUG_OBJECT (mux, "determined the final duration to be %f", d);
+
+ rewrite = gst_flv_mux_create_number_script_value ("duration", d);
+
+ /* rewrite the filesize tag */
+ d = gst_guint64_to_gdouble (mux->byte_count);
+
+ GST_DEBUG_OBJECT (mux, "putting total filesize %f in the metadata", d);
+
+ tmp = gst_flv_mux_create_number_script_value ("filesize", d);
+ rewrite = gst_buffer_append (rewrite, tmp);
+
+ if (!mux->index) {
+ /* no index, so push buffer and return */
+ return gst_flv_mux_push (mux, rewrite);
+ }
+
+ /* rewrite the index */
+ mux->index = g_list_reverse (mux->index);
+ index_len = g_list_length (mux->index);
+
+ /* We write at most MAX_INDEX_ENTRIES elements */
+ if (index_len > MAX_INDEX_ENTRIES) {
+ index_skip = 1 + index_len / MAX_INDEX_ENTRIES;
+ index_len = (index_len + index_skip - 1) / index_skip;
+ } else {
+ index_skip = 1;
+ }
+
+ GST_DEBUG_OBJECT (mux, "Index length is %d", index_len);
+ /* see size calculation in gst_flv_mux_preallocate_index */
+ allocate_size = 11 + 8 + 22 + 10 + index_len * 18;
+ GST_DEBUG_OBJECT (mux, "Allocating %d bytes for index", allocate_size);
+ _gst_buffer_new_and_alloc (allocate_size, &index, &data);
+
+ GST_WRITE_UINT16_BE (data, 9); /* the 'keyframes' key */
+ memcpy (data + 2, "keyframes", 9);
+ GST_WRITE_UINT8 (data + 11, 8); /* nested ECMA array */
+ GST_WRITE_UINT32_BE (data + 12, 2); /* two elements */
+ GST_WRITE_UINT16_BE (data + 16, 5); /* first string key: 'times' */
+ memcpy (data + 18, "times", 5);
+ GST_WRITE_UINT8 (data + 23, 10); /* strict array */
+ GST_WRITE_UINT32_BE (data + 24, index_len);
+ data += 28;
+
+ /* the keyframes' times */
+ for (i = 0, l = mux->index; l; l = l->next, i++) {
+ GstFlvMuxIndexEntry *entry = l->data;
+
+ if (i % index_skip != 0)
+ continue;
+ GST_WRITE_UINT8 (data, 0); /* numeric (aka double) */
+ GST_WRITE_DOUBLE_BE (data + 1, entry->time);
+ data += 9;
+ }
+
+ GST_WRITE_UINT16_BE (data, 13); /* second string key: 'filepositions' */
+ memcpy (data + 2, "filepositions", 13);
+ GST_WRITE_UINT8 (data + 15, 10); /* strict array */
+ GST_WRITE_UINT32_BE (data + 16, index_len);
+ data += 20;
+
+ /* the keyframes' file positions */
+ for (i = 0, l = mux->index; l; l = l->next, i++) {
+ GstFlvMuxIndexEntry *entry = l->data;
+
+ if (i % index_skip != 0)
+ continue;
+ GST_WRITE_UINT8 (data, 0);
+ GST_WRITE_DOUBLE_BE (data + 1, entry->position);
+ data += 9;
+ }
+
+ GST_WRITE_UINT24_BE (data, 9); /* finish the ECMA array */
+
+ /* If there is space left in the prefilled area, reinsert the filler.
+ There is at least 18 bytes free, so it will always fit. */
+ if (index_len < MAX_INDEX_ENTRIES) {
+ GstBuffer *tmp;
+ guint8 *data;
+ guint32 remaining_filler_size;
+
+ _gst_buffer_new_and_alloc (14, &tmp, &data);
+ GST_WRITE_UINT16_BE (data, 9);
+ memcpy (data + 2, "gstfiller", 9);
+ GST_WRITE_UINT8 (data + 11, 2); /* string */
+
+ /* There is 18 bytes per remaining index entry minus what is used for
+ * the'gstfiller' key. The rest is already filled with spaces, so just need
+ * to update length. */
+ remaining_filler_size = (MAX_INDEX_ENTRIES - index_len) * 18 - 14;
+ GST_DEBUG_OBJECT (mux, "Remaining filler size is %d bytes",
+ remaining_filler_size);
+ GST_WRITE_UINT16_BE (data + 12, remaining_filler_size);
+ index = gst_buffer_append (index, tmp);
+ }
+
+ rewrite = gst_buffer_append (rewrite, index);
+
+ return gst_flv_mux_push (mux, rewrite);
+}
+
+/* Returns NULL, or a reference to the pad with the
+ * buffer with lowest running time */
+static GstFlvMuxPad *
+gst_flv_mux_find_best_pad (GstAggregator * aggregator, GstClockTime * ts,
+ gboolean timeout)
+{
+ GstFlvMuxPad *best = NULL;
+ GstClockTime best_ts = GST_CLOCK_TIME_NONE;
+ GstIterator *pads;
+ GValue padptr = { 0, };
+ gboolean done = FALSE;
+
+ pads = gst_element_iterate_sink_pads (GST_ELEMENT (aggregator));
+
+ while (!done) {
+ switch (gst_iterator_next (pads, &padptr)) {
+ case GST_ITERATOR_OK:{
+ GstAggregatorPad *apad = g_value_get_object (&padptr);
+ GstClockTime t = GST_CLOCK_TIME_NONE;
+ GstBuffer *buffer;
+
+ buffer = gst_aggregator_pad_peek_buffer (apad);
+ if (!buffer) {
+ if (!timeout && !GST_PAD_IS_EOS (apad)) {
+ gst_object_replace ((GstObject **) & best, NULL);
+ best_ts = GST_CLOCK_TIME_NONE;
+ done = TRUE;
+ }
+ break;
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DTS_OR_PTS (buffer))) {
+ t = gst_flv_mux_segment_to_running_time (&apad->segment,
+ GST_BUFFER_DTS_OR_PTS (buffer));
+ }
+
+ if (!GST_CLOCK_TIME_IS_VALID (best_ts) ||
+ (GST_CLOCK_TIME_IS_VALID (t) && t < best_ts)) {
+ gst_object_replace ((GstObject **) & best, GST_OBJECT (apad));
+ best_ts = t;
+ }
+ gst_buffer_unref (buffer);
+ break;
+ }
+ case GST_ITERATOR_DONE:
+ done = TRUE;
+ break;
+ case GST_ITERATOR_RESYNC:
+ gst_iterator_resync (pads);
+ /* Clear the best pad and start again. It might have disappeared */
+ gst_object_replace ((GstObject **) & best, NULL);
+ best_ts = GST_CLOCK_TIME_NONE;
+ break;
+ case GST_ITERATOR_ERROR:
+ /* This can't happen if the parameters to gst_iterator_next() are valid */
+ g_assert_not_reached ();
+ break;
+ }
+ g_value_reset (&padptr);
+ }
+ g_value_unset (&padptr);
+ gst_iterator_free (pads);
+
+ if (best) {
+ GST_DEBUG_OBJECT (aggregator,
+ "Best pad found with TS %" GST_TIME_FORMAT ": %" GST_PTR_FORMAT,
+ GST_TIME_ARGS (best_ts), best);
+ } else {
+ GST_DEBUG_OBJECT (aggregator, "Best pad not found");
+ }
+
+ if (ts)
+ *ts = best_ts;
+ return best;
+}
+
+static GstFlowReturn
+gst_flv_mux_aggregate (GstAggregator * aggregator, gboolean timeout)
+{
+ GstFlvMux *mux = GST_FLV_MUX (aggregator);
+ GstFlvMuxPad *best;
+ GstClockTime best_time = GST_CLOCK_TIME_NONE;
+ GstFlowReturn ret;
+ GstClockTime ts;
+ GstBuffer *buffer = NULL;
+
+ if (mux->state == GST_FLV_MUX_STATE_HEADER) {
+ if (GST_ELEMENT_CAST (mux)->sinkpads == NULL) {
+ GST_ELEMENT_ERROR (mux, STREAM, MUX, (NULL),
+ ("No input streams configured"));
+ return GST_FLOW_ERROR;
+ }
+
+ best = gst_flv_mux_find_best_pad (aggregator, &ts, timeout);
+ if (!best) {
+ if (!gst_flv_mux_are_all_pads_eos (mux))
+ return GST_AGGREGATOR_FLOW_NEED_DATA;
+ else
+ return GST_FLOW_OK;
+ }
+
+ ret = gst_flv_mux_write_header (mux);
+ if (ret != GST_FLOW_OK) {
+ gst_object_unref (best);
+ return ret;
+ }
+
+ mux->state = GST_FLV_MUX_STATE_DATA;
+
+ if (!mux->streamable || mux->first_timestamp == GST_CLOCK_TIME_NONE) {
+ if (best && GST_CLOCK_TIME_IS_VALID (ts))
+ mux->first_timestamp = ts;
+ else
+ mux->first_timestamp = 0;
+ }
+ } else {
+ best = gst_flv_mux_find_best_pad (aggregator, &ts, timeout);
+ }
+
+ if (best) {
+ buffer = gst_aggregator_pad_pop_buffer (GST_AGGREGATOR_PAD (best));
+ if (!buffer) {
+ /* We might have gotten a flush event after we picked the pad */
+ gst_object_unref (best);
+ return GST_AGGREGATOR_FLOW_NEED_DATA;
+ }
+ }
+
+ if (mux->new_tags && mux->streamable) {
+ GstBuffer *buf = gst_flv_mux_create_metadata (mux);
+ if (buf)
+ gst_flv_mux_push (mux, buf);
+ mux->new_tags = FALSE;
+ }
+
+ if (best) {
+ best->dts =
+ gst_flv_mux_segment_to_running_time (&GST_AGGREGATOR_PAD
+ (best)->segment, GST_BUFFER_DTS_OR_PTS (buffer));
+
+ if (GST_CLOCK_TIME_IS_VALID (best->dts))
+ best_time = best->dts - mux->first_timestamp;
+
+ if (GST_BUFFER_PTS_IS_VALID (buffer))
+ best->pts =
+ gst_flv_mux_segment_to_running_time (&GST_AGGREGATOR_PAD
+ (best)->segment, GST_BUFFER_PTS (buffer));
+ else
+ best->pts = best->dts;
+
+ GST_LOG_OBJECT (best,
+ "got buffer PTS %" GST_TIME_FORMAT " DTS %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (best->pts), GST_TIME_ARGS (best->dts));
+ } else {
+ if (!gst_flv_mux_are_all_pads_eos (mux))
+ return GST_AGGREGATOR_FLOW_NEED_DATA;
+ best_time = GST_CLOCK_STIME_NONE;
+ }
+
+ /* The FLV timestamp is an int32 field. For non-live streams error out if a
+ bigger timestamp is seen, for live the timestamp will get wrapped in
+ gst_flv_mux_buffer_to_tag */
+ if (!mux->streamable && (GST_CLOCK_TIME_IS_VALID (best_time))
+ && best_time / GST_MSECOND > G_MAXINT32) {
+ GST_WARNING_OBJECT (mux, "Timestamp larger than FLV supports - EOS");
+ if (buffer) {
+ gst_buffer_unref (buffer);
+ buffer = NULL;
+ }
+ gst_object_unref (best);
+ best = NULL;
+ }
+
+ if (best) {
+ GstFlowReturn ret = gst_flv_mux_write_buffer (mux, best, buffer);
+ gst_object_unref (best);
+ return ret;
+ } else {
+ if (gst_flv_mux_are_all_pads_eos (mux)) {
+ gst_flv_mux_write_eos (mux);
+ gst_flv_mux_rewrite_header (mux);
+ return GST_FLOW_EOS;
+ }
+ return GST_FLOW_OK;
+ }
+}
+
+static void
+gst_flv_mux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstFlvMux *mux = GST_FLV_MUX (object);
+
+ switch (prop_id) {
+ case PROP_STREAMABLE:
+ g_value_set_boolean (value, mux->streamable);
+ break;
+ case PROP_METADATACREATOR:
+ g_value_set_string (value, mux->metadatacreator);
+ break;
+ case PROP_ENCODER:
+ g_value_set_string (value, mux->encoder);
+ break;
+ case PROP_SKIP_BACKWARDS_STREAMS:
+ g_value_set_boolean (value, mux->skip_backwards_streams);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_flv_mux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstFlvMux *mux = GST_FLV_MUX (object);
+
+ switch (prop_id) {
+ case PROP_STREAMABLE:
+ mux->streamable = g_value_get_boolean (value);
+ if (mux->streamable)
+ gst_tag_setter_set_tag_merge_mode (GST_TAG_SETTER (mux),
+ GST_TAG_MERGE_REPLACE);
+ else
+ gst_tag_setter_set_tag_merge_mode (GST_TAG_SETTER (mux),
+ GST_TAG_MERGE_KEEP);
+ break;
+ case PROP_METADATACREATOR:
+ g_free (mux->metadatacreator);
+ if (!g_value_get_string (value)) {
+ GST_WARNING_OBJECT (mux, "metadatacreator property can not be NULL");
+ mux->metadatacreator = g_strdup (DEFAULT_METADATACREATOR);
+ } else {
+ mux->metadatacreator = g_value_dup_string (value);
+ }
+ break;
+ case PROP_ENCODER:
+ g_free (mux->encoder);
+ if (!g_value_get_string (value)) {
+ GST_WARNING_OBJECT (mux, "encoder property can not be NULL");
+ mux->encoder = g_strdup (DEFAULT_METADATACREATOR);
+ } else {
+ mux->encoder = g_value_dup_string (value);
+ }
+ break;
+ case PROP_SKIP_BACKWARDS_STREAMS:
+ mux->skip_backwards_streams = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstClockTime
+gst_flv_mux_get_next_time (GstAggregator * aggregator)
+{
+ GstFlvMux *mux = GST_FLV_MUX (aggregator);
+ GstAggregatorPad *agg_audio_pad = GST_AGGREGATOR_PAD_CAST (mux->audio_pad);
+ GstAggregatorPad *agg_video_pad = GST_AGGREGATOR_PAD_CAST (mux->video_pad);
+
+ GST_OBJECT_LOCK (aggregator);
+ if (mux->state == GST_FLV_MUX_STATE_HEADER &&
+ ((mux->audio_pad && mux->audio_pad->codec == G_MAXUINT) ||
+ (mux->video_pad && mux->video_pad->codec == G_MAXUINT)))
+ goto wait_for_data;
+
+ if (!((agg_audio_pad && gst_aggregator_pad_has_buffer (agg_audio_pad)) ||
+ (agg_video_pad && gst_aggregator_pad_has_buffer (agg_video_pad))))
+ goto wait_for_data;
+ GST_OBJECT_UNLOCK (aggregator);
+
+ return gst_aggregator_simple_get_next_time (aggregator);
+
+wait_for_data:
+ GST_OBJECT_UNLOCK (aggregator);
+ return GST_CLOCK_TIME_NONE;
+}
+
+static GstFlowReturn
+gst_flv_mux_update_src_caps (GstAggregator * aggregator,
+ GstCaps * caps, GstCaps ** ret)
+{
+ GstFlvMux *mux = GST_FLV_MUX (aggregator);
+
+ *ret = gst_flv_mux_prepare_src_caps (mux, NULL, NULL, NULL, NULL);
+
+ return GST_FLOW_OK;
+}
diff --git a/gst/flv/gstflvmux.h b/gst/flv/gstflvmux.h
new file mode 100644
index 0000000000..40d87562c2
--- /dev/null
+++ b/gst/flv/gstflvmux.h
@@ -0,0 +1,120 @@
+/* GStreamer
+ *
+ * Copyright (c) 2008,2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * Copyright (c) 2008-2017 Collabora Ltd
+ * @author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * @author: Vincent Penquerc'h <vincent.penquerch@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_FLV_MUX_H__
+#define __GST_FLV_MUX_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstaggregator.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_FLV_MUX_PAD (gst_flv_mux_pad_get_type())
+#define GST_FLV_MUX_PAD(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_FLV_MUX_PAD, GstFlvMuxPad))
+#define GST_FLV_MUX_PAD_CAST(obj) ((GstFlvMuxPad *)(obj))
+#define GST_FLV_MUX_PAD_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_FLV_MUX_PAD, GstFlvMuxPad))
+#define GST_IS_FLV_MUX_PAD(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_FLV_MUX_PAD))
+#define GST_IS_FLV_MUX_PAD_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_FLV_MUX_PAD))
+
+typedef struct _GstFlvMuxPad GstFlvMuxPad;
+typedef struct _GstFlvMuxPadClass GstFlvMuxPadClass;
+typedef struct _GstFlvMux GstFlvMux;
+typedef struct _GstFlvMuxClass GstFlvMuxClass;
+
+#define GST_TYPE_FLV_MUX \
+ (gst_flv_mux_get_type ())
+#define GST_FLV_MUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_FLV_MUX, GstFlvMux))
+#define GST_FLV_MUX_CAST(obj) ((GstFlvMux *)obj)
+#define GST_FLV_MUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_FLV_MUX, GstFlvMuxClass))
+#define GST_IS_FLV_MUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_FLV_MUX))
+#define GST_IS_FLV_MUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_FLV_MUX))
+
+struct _GstFlvMuxPad
+{
+ GstAggregatorPad aggregator_pad;
+
+ guint codec;
+ guint rate;
+ guint width;
+ guint channels;
+ GstBuffer *codec_data;
+
+ guint bitrate;
+
+ GstClockTime last_timestamp;
+ GstClockTime pts;
+ GstClockTime dts;
+
+ gboolean info_changed;
+ gboolean drop_deltas;
+};
+
+struct _GstFlvMuxPadClass {
+ GstAggregatorPadClass parent;
+};
+
+typedef enum
+{
+ GST_FLV_MUX_STATE_HEADER,
+ GST_FLV_MUX_STATE_DATA
+} GstFlvMuxState;
+
+struct _GstFlvMux {
+ GstAggregator aggregator;
+
+ GstPad *srcpad;
+
+ /* <private> */
+ GstFlvMuxState state;
+ GstFlvMuxPad *audio_pad;
+ GstFlvMuxPad *video_pad;
+ gboolean streamable;
+ gchar *metadatacreator;
+ gchar *encoder;
+ gboolean skip_backwards_streams;
+
+ GstTagList *tags;
+ gboolean new_tags;
+ GList *index;
+ guint64 byte_count;
+ GstClockTime duration;
+ GstClockTime first_timestamp;
+ guint64 last_dts;
+
+ gboolean sent_header;
+};
+
+struct _GstFlvMuxClass {
+ GstAggregatorClass parent;
+};
+
+GType gst_flv_mux_pad_get_type(void);
+GType gst_flv_mux_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_FLV_MUX_H__ */
diff --git a/gst/flv/gstflvplugin.c b/gst/flv/gstflvplugin.c
new file mode 100644
index 0000000000..315e43082e
--- /dev/null
+++ b/gst/flv/gstflvplugin.c
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 Julian Bouzas <julian.bouzas@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstflvelements.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (flvdemux, plugin);
+ ret |= GST_ELEMENT_REGISTER (flvmux, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, GST_VERSION_MINOR,
+ flv, "FLV muxing and demuxing plugin",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/flv/gstindex.c b/gst/flv/gstindex.c
new file mode 100644
index 0000000000..4b11bb2198
--- /dev/null
+++ b/gst/flv/gstindex.c
@@ -0,0 +1,1017 @@
+/* GStreamer
+ * Copyright (C) 2001 RidgeRun (http://www.ridgerun.com/)
+ * Written by Erik Walthinsen <omega@ridgerun.com>
+ *
+ * gstindex.c: Index for mappings and other data
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:gstindex
+ * @title: GstIndex
+ * @short_description: Generate indexes on objects
+ * @see_also: #GstIndexFactory
+ *
+ * GstIndex is used to generate a stream index of one or more elements
+ * in a pipeline.
+ *
+ * Elements will overload the set_index and get_index virtual methods in
+ * #GstElement. When streaming data, the element will add index entries if it
+ * has an index set.
+ *
+ * Each element that adds to the index will do that using a writer_id. The
+ * writer_id is obtained from gst_index_get_writer_id().
+ *
+ * The application that wants to index the stream will create a new index object
+ * using gst_index_new() or gst_index_factory_make(). The index is assigned to a
+ * specific element, a bin or the whole pipeline. This will cause indexable
+ * elements to add entries to the index while playing.
+ */
+
+/* FIXME: complete gobject annotations */
+/* FIXME-0.11: cleanup API
+ * - no one seems to use GstIndexGroup, GstIndexCertainty
+ *
+ * - the API for application to use the index is mostly missing
+ * - apps need to get a list of writers
+ * - apps need to be able to iterate over each writers index entry collection
+ * - gst_index_get_assoc_entry() should pass ownership
+ * - the GstIndexEntry structure is large and contains repetitive information
+ * - we want to allow Indexers to implement a saner storage and create
+ * GstIndexEntries on demand (the app has to free them), might even make
+ * sense to ask the app to provide a ptr and fill it.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+
+/* Index signals and args */
+enum
+{
+ ENTRY_ADDED,
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_RESOLVER
+ /* FILL ME */
+};
+
+#if 0
+GST_DEBUG_CATEGORY_STATIC (index_debug);
+#define GST_CAT_DEFAULT index_debug
+#endif
+
+static void gst_index_finalize (GObject * object);
+
+static void gst_index_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_index_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static GstIndexGroup *gst_index_group_new (guint groupnum);
+static void gst_index_group_free (GstIndexGroup * group);
+
+static gboolean gst_index_path_resolver (GstIndex * index, GstObject * writer,
+ gchar ** writer_string, gpointer data);
+static gboolean gst_index_gtype_resolver (GstIndex * index, GstObject * writer,
+ gchar ** writer_string, gpointer data);
+static void gst_index_add_entry (GstIndex * index, GstIndexEntry * entry);
+
+static guint gst_index_signals[LAST_SIGNAL] = { 0 };
+
+typedef struct
+{
+ GstIndexResolverMethod method;
+ GstIndexResolver resolver;
+ gpointer user_data;
+}
+ResolverEntry;
+
+static const ResolverEntry resolvers[] = {
+ {GST_INDEX_RESOLVER_CUSTOM, NULL, NULL},
+ {GST_INDEX_RESOLVER_GTYPE, gst_index_gtype_resolver, NULL},
+ {GST_INDEX_RESOLVER_PATH, gst_index_path_resolver, NULL},
+};
+
+#define GST_TYPE_INDEX_RESOLVER (gst_index_resolver_get_type())
+static GType
+gst_index_resolver_get_type (void)
+{
+ static GType index_resolver_type = 0;
+ static const GEnumValue index_resolver[] = {
+ {GST_INDEX_RESOLVER_CUSTOM, "GST_INDEX_RESOLVER_CUSTOM", "custom"},
+ {GST_INDEX_RESOLVER_GTYPE, "GST_INDEX_RESOLVER_GTYPE", "gtype"},
+ {GST_INDEX_RESOLVER_PATH, "GST_INDEX_RESOLVER_PATH", "path"},
+ {0, NULL, NULL},
+ };
+
+ if (!index_resolver_type) {
+ index_resolver_type =
+ g_enum_register_static ("GstFlvDemuxIndexResolver", index_resolver);
+ }
+ return index_resolver_type;
+}
+
+GType
+gst_index_entry_get_type (void)
+{
+ static GType index_entry_type = 0;
+
+ if (!index_entry_type) {
+ index_entry_type = g_boxed_type_register_static ("GstFlvDemuxIndexEntry",
+ (GBoxedCopyFunc) gst_index_entry_copy,
+ (GBoxedFreeFunc) gst_index_entry_free);
+ }
+ return index_entry_type;
+}
+
+#if 0
+#define _do_init \
+{ \
+ GST_DEBUG_CATEGORY_INIT (index_debug, "GST_INDEX", GST_DEBUG_BOLD, \
+ "Generic indexing support"); \
+}
+#endif
+
+typedef GstIndex GstFlvDemuxIndex;
+typedef GstIndexClass GstFlvDemuxIndexClass;
+//typedef GstIndexEntry GstFlvDemuxIndexEntry;
+G_DEFINE_TYPE (GstFlvDemuxIndex, gst_index, GST_TYPE_OBJECT);
+
+static void
+gst_index_class_init (GstIndexClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+
+ /**
+ * GstIndex::entry-added
+ * @gstindex: the object which received the signal.
+ * @arg1: The entry added to the index.
+ *
+ * Is emitted when a new entry is added to the index.
+ */
+ gst_index_signals[ENTRY_ADDED] =
+ g_signal_new ("entry-added", G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET (GstIndexClass, entry_added), NULL, NULL,
+ NULL, G_TYPE_NONE, 1, GST_TYPE_INDEX_ENTRY);
+
+ gobject_class->set_property = gst_index_set_property;
+ gobject_class->get_property = gst_index_get_property;
+ gobject_class->finalize = gst_index_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_RESOLVER,
+ g_param_spec_enum ("resolver", "Resolver",
+ "Select a predefined object to string mapper",
+ GST_TYPE_INDEX_RESOLVER, GST_INDEX_RESOLVER_PATH,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+}
+
+static void
+gst_index_init (GstIndex * index)
+{
+ index->curgroup = gst_index_group_new (0);
+ index->maxgroup = 0;
+ index->groups = g_list_prepend (NULL, index->curgroup);
+
+ index->writers = g_hash_table_new (NULL, NULL);
+ index->last_id = 0;
+
+ index->method = GST_INDEX_RESOLVER_PATH;
+ index->resolver = resolvers[index->method].resolver;
+ index->resolver_user_data = resolvers[index->method].user_data;
+
+ GST_OBJECT_FLAG_SET (index, GST_INDEX_WRITABLE);
+ GST_OBJECT_FLAG_SET (index, GST_INDEX_READABLE);
+
+ GST_DEBUG ("created new index");
+}
+
+static void
+gst_index_free_writer (gpointer key, gpointer value, gpointer user_data)
+{
+ GstIndexEntry *entry = (GstIndexEntry *) value;
+
+ if (entry) {
+ gst_index_entry_free (entry);
+ }
+}
+
+static void
+gst_index_finalize (GObject * object)
+{
+ GstIndex *index = GST_INDEX (object);
+
+ if (index->groups) {
+ g_list_foreach (index->groups, (GFunc) gst_index_group_free, NULL);
+ g_list_free (index->groups);
+ index->groups = NULL;
+ }
+
+ if (index->writers) {
+ g_hash_table_foreach (index->writers, gst_index_free_writer, NULL);
+ g_hash_table_destroy (index->writers);
+ index->writers = NULL;
+ }
+
+ if (index->filter_user_data && index->filter_user_data_destroy)
+ index->filter_user_data_destroy (index->filter_user_data);
+
+ if (index->resolver_user_data && index->resolver_user_data_destroy)
+ index->resolver_user_data_destroy (index->resolver_user_data);
+
+ G_OBJECT_CLASS (gst_index_parent_class)->finalize (object);
+}
+
+static void
+gst_index_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstIndex *index;
+
+ index = GST_INDEX (object);
+
+ switch (prop_id) {
+ case PROP_RESOLVER:
+ index->method = (GstIndexResolverMethod) g_value_get_enum (value);
+ index->resolver = resolvers[index->method].resolver;
+ index->resolver_user_data = resolvers[index->method].user_data;
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_index_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstIndex *index;
+
+ index = GST_INDEX (object);
+
+ switch (prop_id) {
+ case PROP_RESOLVER:
+ g_value_set_enum (value, index->method);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstIndexGroup *
+gst_index_group_new (guint groupnum)
+{
+ GstIndexGroup *indexgroup = g_slice_new (GstIndexGroup);
+
+ indexgroup->groupnum = groupnum;
+ indexgroup->entries = NULL;
+ indexgroup->certainty = GST_INDEX_UNKNOWN;
+ indexgroup->peergroup = -1;
+
+ GST_DEBUG ("created new index group %d", groupnum);
+
+ return indexgroup;
+}
+
+static void
+gst_index_group_free (GstIndexGroup * group)
+{
+ g_slice_free (GstIndexGroup, group);
+}
+
+/* do not resurrect this, add a derived dummy index class instead */
+#if 0
+/**
+ * gst_index_new:
+ *
+ * Create a new dummy index object. Use gst_element_set_index() to assign that
+ * to an element or pipeline. This index is not storing anything, but will
+ * still emit e.g. the #GstIndex::entry-added signal.
+ *
+ * Returns: (transfer full): a new index object
+ */
+GstIndex *
+gst_index_new (void)
+{
+ GstIndex *index;
+
+ index = g_object_newv (gst_index_get_type (), 0, NULL);
+
+ return index;
+}
+#endif
+
+/**
+ * gst_index_commit:
+ * @index: the index to commit
+ * @id: the writer that committed the index
+ *
+ * Tell the index that the writer with the given id is done
+ * with this index and is not going to write any more entries
+ * to it.
+ */
+void
+gst_index_commit (GstIndex * index, gint id)
+{
+ GstIndexClass *iclass;
+
+ iclass = GST_INDEX_GET_CLASS (index);
+
+ if (iclass->commit)
+ iclass->commit (index, id);
+}
+
+#if 0
+/**
+ * gst_index_get_group:
+ * @index: the index to get the current group from
+ *
+ * Get the id of the current group.
+ *
+ * Returns: the id of the current group.
+ */
+gint
+gst_index_get_group (GstIndex * index)
+{
+ return index->curgroup->groupnum;
+}
+
+/**
+ * gst_index_new_group:
+ * @index: the index to create the new group in
+ *
+ * Create a new group for the given index. It will be
+ * set as the current group.
+ *
+ * Returns: the id of the newly created group.
+ */
+gint
+gst_index_new_group (GstIndex * index)
+{
+ index->curgroup = gst_index_group_new (++index->maxgroup);
+ index->groups = g_list_append (index->groups, index->curgroup);
+ GST_DEBUG ("created new group %d in index", index->maxgroup);
+ return index->maxgroup;
+}
+
+/**
+ * gst_index_set_group:
+ * @index: the index to set the new group in
+ * @groupnum: the groupnumber to set
+ *
+ * Set the current groupnumber to the given argument.
+ *
+ * Returns: TRUE if the operation succeeded, FALSE if the group
+ * did not exist.
+ */
+gboolean
+gst_index_set_group (GstIndex * index, gint groupnum)
+{
+ GList *list;
+ GstIndexGroup *indexgroup;
+
+ /* first check for null change */
+ if (groupnum == index->curgroup->groupnum)
+ return TRUE;
+
+ /* else search for the proper group */
+ list = index->groups;
+ while (list) {
+ indexgroup = (GstIndexGroup *) (list->data);
+ list = g_list_next (list);
+ if (indexgroup->groupnum == groupnum) {
+ index->curgroup = indexgroup;
+ GST_DEBUG ("switched to index group %d", indexgroup->groupnum);
+ return TRUE;
+ }
+ }
+
+ /* couldn't find the group in question */
+ GST_DEBUG ("couldn't find index group %d", groupnum);
+ return FALSE;
+}
+#endif
+
+#if 0
+/**
+ * gst_index_set_certainty:
+ * @index: the index to set the certainty on
+ * @certainty: the certainty to set
+ *
+ * Set the certainty of the given index.
+ */
+void
+gst_index_set_certainty (GstIndex * index, GstIndexCertainty certainty)
+{
+ index->curgroup->certainty = certainty;
+}
+
+/**
+ * gst_index_get_certainty:
+ * @index: the index to get the certainty of
+ *
+ * Get the certainty of the given index.
+ *
+ * Returns: the certainty of the index.
+ */
+GstIndexCertainty
+gst_index_get_certainty (GstIndex * index)
+{
+ return index->curgroup->certainty;
+}
+#endif
+
+#if 0
+/**
+ * gst_index_set_filter:
+ * @index: the index to register the filter on
+ * @filter: the filter to register
+ * @user_data: data passed to the filter function
+ *
+ * Lets the app register a custom filter function so that
+ * it can select what entries should be stored in the index.
+ */
+void
+gst_index_set_filter (GstIndex * index,
+ GstIndexFilter filter, gpointer user_data)
+{
+ g_return_if_fail (GST_IS_INDEX (index));
+
+ gst_index_set_filter_full (index, filter, user_data, NULL);
+}
+
+/**
+ * gst_index_set_filter_full:
+ * @index: the index to register the filter on
+ * @filter: the filter to register
+ * @user_data: data passed to the filter function
+ * @user_data_destroy: function to call when @user_data is unset
+ *
+ * Lets the app register a custom filter function so that
+ * it can select what entries should be stored in the index.
+ */
+void
+gst_index_set_filter_full (GstIndex * index,
+ GstIndexFilter filter, gpointer user_data, GDestroyNotify user_data_destroy)
+{
+ g_return_if_fail (GST_IS_INDEX (index));
+
+ if (index->filter_user_data && index->filter_user_data_destroy)
+ index->filter_user_data_destroy (index->filter_user_data);
+
+ index->filter = filter;
+ index->filter_user_data = user_data;
+ index->filter_user_data_destroy = user_data_destroy;
+}
+
+/**
+ * gst_index_set_resolver:
+ * @index: the index to register the resolver on
+ * @resolver: the resolver to register
+ * @user_data: data passed to the resolver function
+ *
+ * Lets the app register a custom function to map index
+ * ids to writer descriptions.
+ */
+void
+gst_index_set_resolver (GstIndex * index,
+ GstIndexResolver resolver, gpointer user_data)
+{
+ gst_index_set_resolver_full (index, resolver, user_data, NULL);
+}
+
+/**
+ * gst_index_set_resolver_full:
+ * @index: the index to register the resolver on
+ * @resolver: the resolver to register
+ * @user_data: data passed to the resolver function
+ * @user_data_destroy: destroy function for @user_data
+ *
+ * Lets the app register a custom function to map index
+ * ids to writer descriptions.
+ *
+ */
+void
+gst_index_set_resolver_full (GstIndex * index, GstIndexResolver resolver,
+ gpointer user_data, GDestroyNotify user_data_destroy)
+{
+ g_return_if_fail (GST_IS_INDEX (index));
+
+ if (index->resolver_user_data && index->resolver_user_data_destroy)
+ index->resolver_user_data_destroy (index->resolver_user_data);
+
+ index->resolver = resolver;
+ index->resolver_user_data = user_data;
+ index->resolver_user_data_destroy = user_data_destroy;
+ index->method = GST_INDEX_RESOLVER_CUSTOM;
+}
+#endif
+
+/**
+ * gst_index_entry_copy:
+ * @entry: the entry to copy
+ *
+ * Copies an entry and returns the result.
+ *
+ * Free-function: gst_index_entry_free
+ *
+ * Returns: (transfer full): a newly allocated #GstIndexEntry.
+ */
+GstIndexEntry *
+gst_index_entry_copy (GstIndexEntry * entry)
+{
+ GstIndexEntry *new_entry = g_slice_new (GstIndexEntry);
+
+ memcpy (new_entry, entry, sizeof (GstIndexEntry));
+ return new_entry;
+}
+
+/**
+ * gst_index_entry_free:
+ * @entry: (transfer full): the entry to free
+ *
+ * Free the memory used by the given entry.
+ */
+void
+gst_index_entry_free (GstIndexEntry * entry)
+{
+ switch (entry->type) {
+ case GST_INDEX_ENTRY_ID:
+ if (entry->data.id.description) {
+ g_free (entry->data.id.description);
+ entry->data.id.description = NULL;
+ }
+ break;
+ case GST_INDEX_ENTRY_ASSOCIATION:
+ if (entry->data.assoc.assocs) {
+ g_free (entry->data.assoc.assocs);
+ entry->data.assoc.assocs = NULL;
+ }
+ break;
+ case GST_INDEX_ENTRY_OBJECT:
+ break;
+ case GST_INDEX_ENTRY_FORMAT:
+ break;
+ }
+
+ g_slice_free (GstIndexEntry, entry);
+}
+
+#if 0
+/**
+ * gst_index_add_format:
+ * @index: the index to add the entry to
+ * @id: the id of the index writer
+ * @format: the format to add to the index
+ *
+ * Adds a format entry into the index. This function is
+ * used to map dynamic GstFormat ids to their original
+ * format key.
+ *
+ * Free-function: gst_index_entry_free
+ *
+ * Returns: (transfer full): a pointer to the newly added entry in the index.
+ */
+GstIndexEntry *
+gst_index_add_format (GstIndex * index, gint id, GstFormat format)
+{
+ GstIndexEntry *entry;
+ const GstFormatDefinition *def;
+
+ g_return_val_if_fail (GST_IS_INDEX (index), NULL);
+ g_return_val_if_fail (format != 0, NULL);
+
+ if (!GST_INDEX_IS_WRITABLE (index) || id == -1)
+ return NULL;
+
+ entry = g_slice_new (GstIndexEntry);
+ entry->type = GST_INDEX_ENTRY_FORMAT;
+ entry->id = id;
+ entry->data.format.format = format;
+
+ def = gst_format_get_details (format);
+ entry->data.format.key = def->nick;
+
+ gst_index_add_entry (index, entry);
+
+ return entry;
+}
+#endif
+
+/**
+ * gst_index_add_id:
+ * @index: the index to add the entry to
+ * @id: the id of the index writer
+ * @description: the description of the index writer
+ *
+ * Add an id entry into the index.
+ *
+ * Returns: a pointer to the newly added entry in the index.
+ */
+GstIndexEntry *
+gst_index_add_id (GstIndex * index, gint id, gchar * description)
+{
+ GstIndexEntry *entry;
+
+ g_return_val_if_fail (GST_IS_INDEX (index), NULL);
+ g_return_val_if_fail (description != NULL, NULL);
+
+ if (!GST_INDEX_IS_WRITABLE (index) || id == -1)
+ return NULL;
+
+ entry = g_slice_new (GstIndexEntry);
+ entry->type = GST_INDEX_ENTRY_ID;
+ entry->id = id;
+ entry->data.id.description = description;
+
+ gst_index_add_entry (index, entry);
+
+ return entry;
+}
+
+static gboolean
+gst_index_path_resolver (GstIndex * index, GstObject * writer,
+ gchar ** writer_string, gpointer data)
+{
+ *writer_string = gst_object_get_path_string (writer);
+
+ return TRUE;
+}
+
+static gboolean
+gst_index_gtype_resolver (GstIndex * index, GstObject * writer,
+ gchar ** writer_string, gpointer data)
+{
+ g_return_val_if_fail (writer != NULL, FALSE);
+
+ if (GST_IS_PAD (writer)) {
+ GstObject *element = gst_object_get_parent (GST_OBJECT (writer));
+ gchar *name;
+
+ name = gst_object_get_name (writer);
+ if (element) {
+ *writer_string = g_strdup_printf ("%s.%s",
+ G_OBJECT_TYPE_NAME (element), name);
+ gst_object_unref (element);
+ } else {
+ *writer_string = name;
+ name = NULL;
+ }
+
+ g_free (name);
+
+ } else {
+ *writer_string = g_strdup (G_OBJECT_TYPE_NAME (writer));
+ }
+
+ return TRUE;
+}
+
+/**
+ * gst_index_get_writer_id:
+ * @index: the index to get a unique write id for
+ * @writer: the GstObject to allocate an id for
+ * @id: a pointer to a gint to hold the id
+ *
+ * Before entries can be added to the index, a writer
+ * should obtain a unique id. The methods to add new entries
+ * to the index require this id as an argument.
+ *
+ * The application can implement a custom function to map the writer object
+ * to a string. That string will be used to register or look up an id
+ * in the index.
+ *
+ * > The caller must not hold @writer's #GST_OBJECT_LOCK, as the default
+ * > resolver may call functions that take the object lock as well, and
+ * > the lock is not recursive.
+ *
+ * Returns: TRUE if the writer would be mapped to an id.
+ */
+gboolean
+gst_index_get_writer_id (GstIndex * index, GstObject * writer, gint * id)
+{
+ gchar *writer_string = NULL;
+ GstIndexEntry *entry;
+ GstIndexClass *iclass;
+ gboolean success = FALSE;
+
+ g_return_val_if_fail (GST_IS_INDEX (index), FALSE);
+ g_return_val_if_fail (GST_IS_OBJECT (writer), FALSE);
+ g_return_val_if_fail (id, FALSE);
+
+ *id = -1;
+
+ /* first try to get a previously cached id */
+ entry = g_hash_table_lookup (index->writers, writer);
+ if (entry == NULL) {
+
+ iclass = GST_INDEX_GET_CLASS (index);
+
+ /* let the app make a string */
+ if (index->resolver) {
+ gboolean res;
+
+ res =
+ index->resolver (index, writer, &writer_string,
+ index->resolver_user_data);
+ if (!res)
+ return FALSE;
+ } else {
+ g_warning ("no resolver found");
+ return FALSE;
+ }
+
+ /* if the index has a resolver, make it map this string to an id */
+ if (iclass->get_writer_id) {
+ success = iclass->get_writer_id (index, id, writer_string);
+ }
+ /* if the index could not resolve, we allocate one ourselves */
+ if (!success) {
+ *id = ++index->last_id;
+ }
+
+ entry = gst_index_add_id (index, *id, writer_string);
+ if (!entry) {
+ /* index is probably not writable, make an entry anyway
+ * to keep it in our cache */
+ entry = g_slice_new (GstIndexEntry);
+ entry->type = GST_INDEX_ENTRY_ID;
+ entry->id = *id;
+ entry->data.id.description = writer_string;
+ }
+ g_hash_table_insert (index->writers, writer, entry);
+ } else {
+ *id = entry->id;
+ }
+
+ return TRUE;
+}
+
+static void
+gst_index_add_entry (GstIndex * index, GstIndexEntry * entry)
+{
+ GstIndexClass *iclass;
+
+ iclass = GST_INDEX_GET_CLASS (index);
+
+ if (iclass->add_entry) {
+ iclass->add_entry (index, entry);
+ }
+
+ g_signal_emit (index, gst_index_signals[ENTRY_ADDED], 0, entry);
+}
+
+/**
+ * gst_index_add_associationv:
+ * @index: the index to add the entry to
+ * @id: the id of the index writer
+ * @flags: optional flags for this entry
+ * @n: number of associations
+ * @list: list of associations
+ *
+ * Associate given format/value pairs with each other.
+ *
+ * Returns: a pointer to the newly added entry in the index.
+ */
+GstIndexEntry *
+gst_index_add_associationv (GstIndex * index, gint id,
+ GstIndexAssociationFlags flags, gint n, const GstIndexAssociation * list)
+{
+ GstIndexEntry *entry;
+
+ g_return_val_if_fail (n > 0, NULL);
+ g_return_val_if_fail (list != NULL, NULL);
+ g_return_val_if_fail (GST_IS_INDEX (index), NULL);
+
+ if (!GST_INDEX_IS_WRITABLE (index) || id == -1)
+ return NULL;
+
+ entry = g_slice_new (GstIndexEntry);
+
+ entry->type = GST_INDEX_ENTRY_ASSOCIATION;
+ entry->id = id;
+ entry->data.assoc.flags = flags;
+ entry->data.assoc.assocs = g_memdup2 (list, sizeof (GstIndexAssociation) * n);
+ entry->data.assoc.nassocs = n;
+
+ gst_index_add_entry (index, entry);
+
+ return entry;
+}
+
+#if 0
+/**
+ * gst_index_add_association:
+ * @index: the index to add the entry to
+ * @id: the id of the index writer
+ * @flags: optional flags for this entry
+ * @format: the format of the value
+ * @value: the value
+ * @...: other format/value pairs or 0 to end the list
+ *
+ * Associate given format/value pairs with each other.
+ * Be sure to pass gint64 values to this functions varargs,
+ * you might want to use a gint64 cast to be sure.
+ *
+ * Returns: a pointer to the newly added entry in the index.
+ */
+GstIndexEntry *
+gst_index_add_association (GstIndex * index, gint id,
+ GstIndexAssociationFlags flags, GstFormat format, gint64 value, ...)
+{
+ va_list args;
+ GstIndexEntry *entry;
+ GstIndexAssociation *list;
+ gint n_assocs = 0;
+ GstFormat cur_format;
+ GArray *array;
+
+ g_return_val_if_fail (GST_IS_INDEX (index), NULL);
+ g_return_val_if_fail (format != 0, NULL);
+
+ if (!GST_INDEX_IS_WRITABLE (index) || id == -1)
+ return NULL;
+
+ array = g_array_new (FALSE, FALSE, sizeof (GstIndexAssociation));
+
+ {
+ GstIndexAssociation a;
+
+ a.format = format;
+ a.value = value;
+ n_assocs = 1;
+ g_array_append_val (array, a);
+ }
+
+ va_start (args, value);
+
+ while ((cur_format = va_arg (args, GstFormat))) {
+ GstIndexAssociation a;
+
+ a.format = cur_format;
+ a.value = va_arg (args, gint64);
+ n_assocs++;
+ g_array_append_val (array, a);
+ }
+
+ va_end (args);
+
+ list = (GstIndexAssociation *) g_array_free (array, FALSE);
+
+ entry = gst_index_add_associationv (index, id, flags, n_assocs, list);
+ g_free (list);
+
+ return entry;
+}
+
+/**
+ * gst_index_add_object:
+ * @index: the index to add the object to
+ * @id: the id of the index writer
+ * @key: a key for the object
+ * @type: the GType of the object
+ * @object: a pointer to the object to add
+ *
+ * Add the given object to the index with the given key.
+ *
+ * This function is not yet implemented.
+ *
+ * Returns: a pointer to the newly added entry in the index.
+ */
+GstIndexEntry *
+gst_index_add_object (GstIndex * index, gint id, gchar * key,
+ GType type, gpointer object)
+{
+ if (!GST_INDEX_IS_WRITABLE (index) || id == -1)
+ return NULL;
+
+ return NULL;
+}
+#endif
+
+static gint
+gst_index_compare_func (gconstpointer a, gconstpointer b, gpointer user_data)
+{
+ if (a < b)
+ return -1;
+ if (a > b)
+ return 1;
+ return 0;
+}
+
+/**
+ * gst_index_get_assoc_entry:
+ * @index: the index to search
+ * @id: the id of the index writer
+ * @method: The lookup method to use
+ * @flags: Flags for the entry
+ * @format: the format of the value
+ * @value: the value to find
+ *
+ * Finds the given format/value in the index
+ *
+ * Returns: the entry associated with the value or NULL if the
+ * value was not found.
+ */
+GstIndexEntry *
+gst_index_get_assoc_entry (GstIndex * index, gint id,
+ GstIndexLookupMethod method, GstIndexAssociationFlags flags,
+ GstFormat format, gint64 value)
+{
+ g_return_val_if_fail (GST_IS_INDEX (index), NULL);
+
+ if (id == -1)
+ return NULL;
+
+ return gst_index_get_assoc_entry_full (index, id, method, flags, format,
+ value, gst_index_compare_func, NULL);
+}
+
+/**
+ * gst_index_get_assoc_entry_full:
+ * @index: the index to search
+ * @id: the id of the index writer
+ * @method: The lookup method to use
+ * @flags: Flags for the entry
+ * @format: the format of the value
+ * @value: the value to find
+ * @func: the function used to compare entries
+ * @user_data: user data passed to the compare function
+ *
+ * Finds the given format/value in the index with the given
+ * compare function and user_data.
+ *
+ * Returns: the entry associated with the value or NULL if the
+ * value was not found.
+ */
+GstIndexEntry *
+gst_index_get_assoc_entry_full (GstIndex * index, gint id,
+ GstIndexLookupMethod method, GstIndexAssociationFlags flags,
+ GstFormat format, gint64 value, GCompareDataFunc func, gpointer user_data)
+{
+ GstIndexClass *iclass;
+
+ g_return_val_if_fail (GST_IS_INDEX (index), NULL);
+
+ if (id == -1)
+ return NULL;
+
+ iclass = GST_INDEX_GET_CLASS (index);
+
+ if (iclass->get_assoc_entry)
+ return iclass->get_assoc_entry (index, id, method, flags, format, value,
+ func, user_data);
+
+ return NULL;
+}
+
+/**
+ * gst_index_entry_assoc_map:
+ * @entry: the index to search
+ * @format: the format of the value the find
+ * @value: a pointer to store the value
+ *
+ * Gets alternative formats associated with the indexentry.
+ *
+ * Returns: TRUE if there was a value associated with the given
+ * format.
+ */
+gboolean
+gst_index_entry_assoc_map (GstIndexEntry * entry,
+ GstFormat format, gint64 * value)
+{
+ gint i;
+
+ g_return_val_if_fail (entry != NULL, FALSE);
+ g_return_val_if_fail (value != NULL, FALSE);
+
+ for (i = 0; i < GST_INDEX_NASSOCS (entry); i++) {
+ if (GST_INDEX_ASSOC_FORMAT (entry, i) == format) {
+ *value = GST_INDEX_ASSOC_VALUE (entry, i);
+ return TRUE;
+ }
+ }
+ return FALSE;
+}
diff --git a/gst/flv/gstindex.h b/gst/flv/gstindex.h
new file mode 100644
index 0000000000..0a51f9c646
--- /dev/null
+++ b/gst/flv/gstindex.h
@@ -0,0 +1,448 @@
+/* GStreamer
+ * Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2000 Wim Taymans <wim.taymans@chello.be>
+ *
+ * gstindex.h: Header for GstIndex, base class to handle efficient
+ * storage or caching of seeking information.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_INDEX_H__
+#define __GST_INDEX_H__
+
+#include <gst/gstobject.h>
+#include <gst/gstformat.h>
+#include <gst/gstpluginfeature.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_INDEX (gst_index_get_type ())
+#define GST_INDEX(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_INDEX, GstIndex))
+#define GST_IS_INDEX(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_INDEX))
+#define GST_INDEX_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_INDEX, GstIndexClass))
+#define GST_IS_INDEX_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_INDEX))
+#define GST_INDEX_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_INDEX, GstIndexClass))
+
+#define GST_TYPE_INDEX_ENTRY (gst_index_entry_get_type())
+
+typedef struct _GstIndexEntry GstIndexEntry;
+typedef struct _GstIndexGroup GstIndexGroup;
+typedef struct _GstIndex GstIndex;
+typedef struct _GstIndexClass GstIndexClass;
+
+/**
+ * GstIndexCertainty:
+ * @GST_INDEX_UNKNOWN: accuracy is not known
+ * @GST_INDEX_CERTAIN: accuracy is perfect
+ * @GST_INDEX_FUZZY: accuracy is fuzzy
+ *
+ * The certainty of a group in the index.
+ */
+typedef enum {
+ GST_INDEX_UNKNOWN,
+ GST_INDEX_CERTAIN,
+ GST_INDEX_FUZZY
+} GstIndexCertainty;
+
+/**
+ * GstIndexEntryType:
+ * @GST_INDEX_ENTRY_ID: This entry is an id that maps an index id to its owner object
+ * @GST_INDEX_ENTRY_ASSOCIATION: This entry is an association between formats
+ * @GST_INDEX_ENTRY_OBJECT: An object
+ * @GST_INDEX_ENTRY_FORMAT: A format definition
+ *
+ * The different types of entries in the index.
+ */
+typedef enum {
+ GST_INDEX_ENTRY_ID,
+ GST_INDEX_ENTRY_ASSOCIATION,
+ GST_INDEX_ENTRY_OBJECT,
+ GST_INDEX_ENTRY_FORMAT
+} GstIndexEntryType;
+
+/**
+ * GstIndexLookupMethod:
+ * @GST_INDEX_LOOKUP_EXACT: There has to be an exact indexentry with the given format/value
+ * @GST_INDEX_LOOKUP_BEFORE: The exact entry or the one before it
+ * @GST_INDEX_LOOKUP_AFTER: The exact entry or the one after it
+ *
+ * Specify the method to find an index entry in the index.
+ */
+typedef enum {
+ GST_INDEX_LOOKUP_EXACT,
+ GST_INDEX_LOOKUP_BEFORE,
+ GST_INDEX_LOOKUP_AFTER
+} GstIndexLookupMethod;
+
+/**
+ * GST_INDEX_NASSOCS:
+ * @entry: The entry to query
+ *
+ * Get the number of associations in the entry.
+ */
+#define GST_INDEX_NASSOCS(entry) ((entry)->data.assoc.nassocs)
+
+/**
+ * GST_INDEX_ASSOC_FLAGS:
+ * @entry: The entry to query
+ *
+ * Get the flags for this entry.
+ */
+#define GST_INDEX_ASSOC_FLAGS(entry) ((entry)->data.assoc.flags)
+
+/**
+ * GST_INDEX_ASSOC_FORMAT:
+ * @entry: The entry to query
+ * @i: The format index
+ *
+ * Get the i-th format of the entry.
+ */
+#define GST_INDEX_ASSOC_FORMAT(entry,i) ((entry)->data.assoc.assocs[(i)].format)
+
+/**
+ * GST_INDEX_ASSOC_VALUE:
+ * @entry: The entry to query
+ * @i: The value index
+ *
+ * Get the i-th value of the entry.
+ */
+#define GST_INDEX_ASSOC_VALUE(entry,i) ((entry)->data.assoc.assocs[(i)].value)
+
+typedef struct _GstIndexAssociation GstIndexAssociation;
+
+/**
+ * GstIndexAssociation:
+ * @format: the format of the association
+ * @value: the value of the association
+ *
+ * An association in an entry.
+ */
+struct _GstIndexAssociation {
+ GstFormat format;
+ gint64 value;
+};
+
+/**
+ * GstIndexAssociationFlags:
+ * @GST_INDEX_ASSOCIATION_FLAG_NONE: no extra flags
+ * @GST_INDEX_ASSOCIATION_FLAG_KEY_UNIT: the entry marks a key unit, a key unit is one
+ * that marks a place where one can randomly seek to.
+ * @GST_INDEX_ASSOCIATION_FLAG_DELTA_UNIT: the entry marks a delta unit, a delta unit
+ * is one that marks a place where one can relatively seek to.
+ * @GST_INDEX_ASSOCIATION_FLAG_LAST: extra user defined flags should start here.
+ *
+ * Flags for an association entry.
+ */
+typedef enum {
+ GST_INDEX_ASSOCIATION_FLAG_NONE = 0,
+ GST_INDEX_ASSOCIATION_FLAG_KEY_UNIT = (1 << 0),
+ GST_INDEX_ASSOCIATION_FLAG_DELTA_UNIT = (1 << 1),
+
+ /* new flags should start here */
+ GST_INDEX_ASSOCIATION_FLAG_LAST = (1 << 8)
+} GstIndexAssociationFlags;
+
+/**
+ * GST_INDEX_FORMAT_FORMAT:
+ * @entry: The entry to query
+ *
+ * Get the format of the format entry
+ */
+#define GST_INDEX_FORMAT_FORMAT(entry) ((entry)->data.format.format)
+
+/**
+ * GST_INDEX_FORMAT_KEY:
+ * @entry: The entry to query
+ *
+ * Get the key of the format entry
+ */
+#define GST_INDEX_FORMAT_KEY(entry) ((entry)->data.format.key)
+
+/**
+ * GST_INDEX_ID_INVALID:
+ *
+ * Constant for an invalid index id
+ */
+#define GST_INDEX_ID_INVALID (-1)
+
+/**
+ * GST_INDEX_ID_DESCRIPTION:
+ * @entry: The entry to query
+ *
+ * Get the description of the id entry
+ */
+#define GST_INDEX_ID_DESCRIPTION(entry) ((entry)->data.id.description)
+
+/**
+ * GstIndexEntry:
+ *
+ * The basic element of an index.
+ */
+struct _GstIndexEntry {
+ /*< private >*/
+ GstIndexEntryType type;
+ gint id;
+
+ union {
+ struct {
+ gchar *description;
+ } id;
+ struct {
+ gint nassocs;
+ GstIndexAssociation
+ *assocs;
+ GstIndexAssociationFlags flags;
+ } assoc;
+ struct {
+ gchar *key;
+ GType type;
+ gpointer object;
+ } object;
+ struct {
+ GstFormat format;
+ const gchar *key;
+ } format;
+ } data;
+};
+
+/**
+ * GstIndexGroup:
+ *
+ * A group of related entries in an index.
+ */
+
+struct _GstIndexGroup {
+ /*< private >*/
+ /* unique ID of group in index */
+ gint groupnum;
+
+ /* list of entries */
+ GList *entries;
+
+ /* the certainty level of the group */
+ GstIndexCertainty certainty;
+
+ /* peer group that contains more certain entries */
+ gint peergroup;
+};
+
+/**
+ * GstIndexFilter:
+ * @index: The index being queried
+ * @entry: The entry to be added.
+ * @user_data: User data passed to the function.
+ *
+ * Function to filter out entries in the index.
+ *
+ * Returns: This function should return %TRUE if the entry is to be added
+ * to the index, %FALSE otherwise.
+ *
+ */
+typedef gboolean (*GstIndexFilter) (GstIndex *index,
+ GstIndexEntry *entry,
+ gpointer user_data);
+/**
+ * GstIndexResolverMethod:
+ * @GST_INDEX_RESOLVER_CUSTOM: Use a custom resolver
+ * @GST_INDEX_RESOLVER_GTYPE: Resolve based on the GType of the object
+ * @GST_INDEX_RESOLVER_PATH: Resolve on the path in graph
+ *
+ * The method used to resolve index writers
+ */
+typedef enum {
+ GST_INDEX_RESOLVER_CUSTOM,
+ GST_INDEX_RESOLVER_GTYPE,
+ GST_INDEX_RESOLVER_PATH
+} GstIndexResolverMethod;
+
+/**
+ * GstIndexResolver:
+ * @index: the index being queried.
+ * @writer: The object that wants to write
+ * @writer_string: A description of the writer.
+ * @user_data: user_data as registered
+ *
+ * Function to resolve ids to writer descriptions.
+ *
+ * Returns: %TRUE if an id could be assigned to the writer.
+ */
+typedef gboolean (*GstIndexResolver) (GstIndex *index,
+ GstObject *writer,
+ gchar **writer_string,
+ gpointer user_data);
+
+/**
+ * GstIndexFlags:
+ * @GST_INDEX_WRITABLE: The index is writable
+ * @GST_INDEX_READABLE: The index is readable
+ * @GST_INDEX_FLAG_LAST: First flag that can be used by subclasses
+ *
+ * Flags for this index
+ */
+typedef enum {
+ GST_INDEX_WRITABLE = (GST_OBJECT_FLAG_LAST << 0),
+ GST_INDEX_READABLE = (GST_OBJECT_FLAG_LAST << 1),
+
+ GST_INDEX_FLAG_LAST = (GST_OBJECT_FLAG_LAST << 8)
+} GstIndexFlags;
+
+/**
+ * GST_INDEX_IS_READABLE:
+ * @obj: The index to check
+ *
+ * Check if the index can be read from
+ */
+#define GST_INDEX_IS_READABLE(obj) (GST_OBJECT_FLAG_IS_SET (obj, GST_INDEX_READABLE))
+
+/**
+ * GST_INDEX_IS_WRITABLE:
+ * @obj: The index to check
+ *
+ * Check if the index can be written to
+ */
+#define GST_INDEX_IS_WRITABLE(obj) (GST_OBJECT_FLAG_IS_SET (obj, GST_INDEX_WRITABLE))
+
+/**
+ * GstIndex:
+ *
+ * Opaque #GstIndex structure.
+ */
+struct _GstIndex {
+ GstObject object;
+
+ /*< private >*/
+ GList *groups;
+ GstIndexGroup *curgroup;
+ gint maxgroup;
+
+ GstIndexResolverMethod method;
+ GstIndexResolver resolver;
+ gpointer resolver_user_data;
+ GDestroyNotify resolver_user_data_destroy;
+
+ GstIndexFilter filter;
+ gpointer filter_user_data;
+ GDestroyNotify filter_user_data_destroy;
+
+ GHashTable *writers;
+ gint last_id;
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+struct _GstIndexClass {
+ GstObjectClass parent_class;
+
+ /*< protected >*/
+ gboolean (*get_writer_id) (GstIndex *index, gint *id, gchar *writer);
+
+ void (*commit) (GstIndex *index, gint id);
+
+ /* abstract methods */
+ void (*add_entry) (GstIndex *index, GstIndexEntry *entry);
+
+ GstIndexEntry* (*get_assoc_entry) (GstIndex *index, gint id,
+ GstIndexLookupMethod method, GstIndexAssociationFlags flags,
+ GstFormat format, gint64 value,
+ GCompareDataFunc func,
+ gpointer user_data);
+ /* signals */
+ void (*entry_added) (GstIndex *index, GstIndexEntry *entry);
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+static
+GType gst_index_get_type (void);
+
+#if 0
+GstIndex* gst_index_new (void);
+#endif
+void gst_index_commit (GstIndex *index, gint id);
+
+#if 0
+gint gst_index_get_group (GstIndex *index);
+gint gst_index_new_group (GstIndex *index);
+gboolean gst_index_set_group (GstIndex *index, gint groupnum);
+
+void gst_index_set_certainty (GstIndex *index,
+ GstIndexCertainty certainty);
+GstIndexCertainty gst_index_get_certainty (GstIndex *index);
+
+static
+void gst_index_set_filter (GstIndex *index,
+ GstIndexFilter filter, gpointer user_data);
+static
+void gst_index_set_filter_full (GstIndex *index,
+ GstIndexFilter filter, gpointer user_data,
+ GDestroyNotify user_data_destroy);
+
+void gst_index_set_resolver (GstIndex *index,
+ GstIndexResolver resolver, gpointer user_data);
+void gst_index_set_resolver_full (GstIndex *index, GstIndexResolver resolver,
+ gpointer user_data,
+ GDestroyNotify user_data_destroy);
+#endif
+
+static
+gboolean gst_index_get_writer_id (GstIndex *index, GstObject *writer, gint *id);
+
+#if 0
+GstIndexEntry* gst_index_add_format (GstIndex *index, gint id, GstFormat format);
+#endif
+
+static
+GstIndexEntry* gst_index_add_associationv (GstIndex * index, gint id, GstIndexAssociationFlags flags,
+ gint n, const GstIndexAssociation * list);
+#if 0
+GstIndexEntry* gst_index_add_association (GstIndex *index, gint id, GstIndexAssociationFlags flags,
+ GstFormat format, gint64 value, ...)
+GstIndexEntry* gst_index_add_object (GstIndex *index, gint id, gchar *key,
+ GType type, gpointer object);
+#endif
+
+static
+GstIndexEntry* gst_index_add_id (GstIndex *index, gint id,
+ gchar *description);
+
+static
+GstIndexEntry* gst_index_get_assoc_entry (GstIndex *index, gint id,
+ GstIndexLookupMethod method, GstIndexAssociationFlags flags,
+ GstFormat format, gint64 value);
+static
+GstIndexEntry* gst_index_get_assoc_entry_full (GstIndex *index, gint id,
+ GstIndexLookupMethod method, GstIndexAssociationFlags flags,
+ GstFormat format, gint64 value,
+ GCompareDataFunc func,
+ gpointer user_data);
+
+/* working with index entries */
+static
+GType gst_index_entry_get_type (void);
+static
+GstIndexEntry * gst_index_entry_copy (GstIndexEntry *entry);
+static
+void gst_index_entry_free (GstIndexEntry *entry);
+static
+gboolean gst_index_entry_assoc_map (GstIndexEntry *entry,
+ GstFormat format, gint64 *value);
+
+G_END_DECLS
+
+#endif /* __GST_INDEX_H__ */
diff --git a/gst/flv/gstmemindex.c b/gst/flv/gstmemindex.c
new file mode 100644
index 0000000000..eef99c2c58
--- /dev/null
+++ b/gst/flv/gstmemindex.c
@@ -0,0 +1,432 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include <gst/gst.h>
+
+#define GST_TYPE_MEM_INDEX \
+ (gst_index_get_type ())
+#define GST_MEM_INDEX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_MEM_INDEX, GstMemIndex))
+#define GST_MEM_INDEX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_MEM_INDEX, GstMemIndexClass))
+#define GST_IS_MEM_INDEX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_MEM_INDEX))
+#define GST_IS_MEM_INDEX_CLASS(klass) \
+ (GST_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_MEM_INDEX))
+
+/*
+ * Object model:
+ *
+ * All entries are simply added to a GList first. Then we build
+ * an index to each entry for each id/format
+ *
+ *
+ * memindex
+ * -----------------------------...
+ * ! !
+ * id1 id2
+ * ------------
+ * ! !
+ * format1 format2
+ * ! !
+ * GTree GTree
+ *
+ *
+ * The memindex creates a MemIndexId object for each writer id, a
+ * Hashtable is kept to map the id to the MemIndexId
+ *
+ * The MemIndexId keeps a MemIndexFormatIndex for each format the
+ * specific writer wants indexed.
+ *
+ * The MemIndexFormatIndex keeps all the values of the particular
+ * format in a GTree, The values of the GTree point back to the entry.
+ *
+ * Finding a value for an id/format requires locating the correct GTree,
+ * then do a lookup in the Tree to get the required value.
+ */
+
+typedef struct
+{
+ GstFormat format;
+ gint offset;
+ GTree *tree;
+}
+GstMemIndexFormatIndex;
+
+typedef struct
+{
+ gint id;
+ GHashTable *format_index;
+}
+GstMemIndexId;
+
+typedef struct _GstMemIndex GstMemIndex;
+typedef struct _GstMemIndexClass GstMemIndexClass;
+
+struct _GstMemIndex
+{
+ GstIndex parent;
+
+ GList *associations;
+
+ GHashTable *id_index;
+};
+
+struct _GstMemIndexClass
+{
+ GstIndexClass parent_class;
+};
+
+static void gst_mem_index_finalize (GObject * object);
+
+static void gst_mem_index_add_entry (GstIndex * index, GstIndexEntry * entry);
+static GstIndexEntry *gst_mem_index_get_assoc_entry (GstIndex * index, gint id,
+ GstIndexLookupMethod method, GstIndexAssociationFlags flags,
+ GstFormat format, gint64 value, GCompareDataFunc func, gpointer user_data);
+
+#define CLASS(mem_index) GST_MEM_INDEX_CLASS (G_OBJECT_GET_CLASS (mem_index))
+
+static GType gst_mem_index_get_type (void);
+
+typedef GstMemIndex GstFlvDemuxMemIndex;
+typedef GstMemIndexClass GstFlvDemuxMemIndexClass;
+G_DEFINE_TYPE (GstFlvDemuxMemIndex, gst_mem_index, GST_TYPE_INDEX);
+
+static void
+gst_mem_index_class_init (GstMemIndexClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstIndexClass *gstindex_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstindex_class = (GstIndexClass *) klass;
+
+ gobject_class->finalize = gst_mem_index_finalize;
+
+ gstindex_class->add_entry = GST_DEBUG_FUNCPTR (gst_mem_index_add_entry);
+ gstindex_class->get_assoc_entry =
+ GST_DEBUG_FUNCPTR (gst_mem_index_get_assoc_entry);
+}
+
+static void
+gst_mem_index_init (GstMemIndex * index)
+{
+ GST_DEBUG ("created new mem index");
+
+ index->associations = NULL;
+ index->id_index = g_hash_table_new (g_int_hash, g_int_equal);
+}
+
+static void
+gst_mem_index_free_format (gpointer key, gpointer value, gpointer user_data)
+{
+ GstMemIndexFormatIndex *index = (GstMemIndexFormatIndex *) value;
+
+ if (index->tree) {
+ g_tree_destroy (index->tree);
+ }
+
+ g_slice_free (GstMemIndexFormatIndex, index);
+}
+
+static void
+gst_mem_index_free_id (gpointer key, gpointer value, gpointer user_data)
+{
+ GstMemIndexId *id_index = (GstMemIndexId *) value;
+
+ if (id_index->format_index) {
+ g_hash_table_foreach (id_index->format_index, gst_mem_index_free_format,
+ NULL);
+ g_hash_table_destroy (id_index->format_index);
+ id_index->format_index = NULL;
+ }
+
+ g_slice_free (GstMemIndexId, id_index);
+}
+
+static void
+gst_mem_index_finalize (GObject * object)
+{
+ GstMemIndex *memindex = GST_MEM_INDEX (object);
+
+ /* Delete the trees referencing the associations first */
+ if (memindex->id_index) {
+ g_hash_table_foreach (memindex->id_index, gst_mem_index_free_id, NULL);
+ g_hash_table_destroy (memindex->id_index);
+ memindex->id_index = NULL;
+ }
+
+ /* Then delete the associations themselves */
+ if (memindex->associations) {
+ g_list_foreach (memindex->associations, (GFunc) gst_index_entry_free, NULL);
+ g_list_free (memindex->associations);
+ memindex->associations = NULL;
+ }
+
+ G_OBJECT_CLASS (gst_mem_index_parent_class)->finalize (object);
+}
+
+static void
+gst_mem_index_add_id (GstIndex * index, GstIndexEntry * entry)
+{
+ GstMemIndex *memindex = GST_MEM_INDEX (index);
+ GstMemIndexId *id_index;
+
+ id_index = g_hash_table_lookup (memindex->id_index, &entry->id);
+
+ if (!id_index) {
+ id_index = g_slice_new0 (GstMemIndexId);
+
+ id_index->id = entry->id;
+ id_index->format_index = g_hash_table_new (g_int_hash, g_int_equal);
+ g_hash_table_insert (memindex->id_index, &id_index->id, id_index);
+ }
+}
+
+static gint
+mem_index_compare (gconstpointer a, gconstpointer b, gpointer user_data)
+{
+ GstMemIndexFormatIndex *index = user_data;
+ gint64 val1, val2;
+ gint64 diff;
+
+ val1 = GST_INDEX_ASSOC_VALUE (((GstIndexEntry *) a), index->offset);
+ val2 = GST_INDEX_ASSOC_VALUE (((GstIndexEntry *) b), index->offset);
+
+ diff = (val2 - val1);
+
+ return (diff == 0 ? 0 : (diff > 0 ? 1 : -1));
+}
+
+static void
+gst_mem_index_index_format (GstMemIndexId * id_index, GstIndexEntry * entry,
+ gint assoc)
+{
+ GstMemIndexFormatIndex *index;
+ GstFormat *format;
+
+ format = &GST_INDEX_ASSOC_FORMAT (entry, assoc);
+
+ index = g_hash_table_lookup (id_index->format_index, format);
+
+ if (!index) {
+ index = g_slice_new0 (GstMemIndexFormatIndex);
+
+ index->format = *format;
+ index->offset = assoc;
+ index->tree = g_tree_new_with_data (mem_index_compare, index);
+
+ g_hash_table_insert (id_index->format_index, &index->format, index);
+ }
+
+ g_tree_insert (index->tree, entry, entry);
+}
+
+static void
+gst_mem_index_add_association (GstIndex * index, GstIndexEntry * entry)
+{
+ GstMemIndex *memindex = GST_MEM_INDEX (index);
+ GstMemIndexId *id_index;
+
+ memindex->associations = g_list_prepend (memindex->associations, entry);
+
+ id_index = g_hash_table_lookup (memindex->id_index, &entry->id);
+ if (id_index) {
+ gint i;
+
+ for (i = 0; i < GST_INDEX_NASSOCS (entry); i++) {
+ gst_mem_index_index_format (id_index, entry, i);
+ }
+ }
+}
+
+static void
+gst_mem_index_add_object (GstIndex * index, GstIndexEntry * entry)
+{
+}
+
+static void
+gst_mem_index_add_format (GstIndex * index, GstIndexEntry * entry)
+{
+}
+
+static void
+gst_mem_index_add_entry (GstIndex * index, GstIndexEntry * entry)
+{
+ GST_LOG_OBJECT (index, "added this entry");
+
+ switch (entry->type) {
+ case GST_INDEX_ENTRY_ID:
+ gst_mem_index_add_id (index, entry);
+ break;
+ case GST_INDEX_ENTRY_ASSOCIATION:
+ gst_mem_index_add_association (index, entry);
+ break;
+ case GST_INDEX_ENTRY_OBJECT:
+ gst_mem_index_add_object (index, entry);
+ break;
+ case GST_INDEX_ENTRY_FORMAT:
+ gst_mem_index_add_format (index, entry);
+ break;
+ default:
+ break;
+ }
+}
+
+typedef struct
+{
+ gint64 value;
+ GstMemIndexFormatIndex *index;
+ gboolean exact;
+ GstIndexEntry *lower;
+ gint64 low_diff;
+ GstIndexEntry *higher;
+ gint64 high_diff;
+}
+GstMemIndexSearchData;
+
+static gint
+mem_index_search (gconstpointer a, gconstpointer b)
+{
+ GstMemIndexSearchData *data = (GstMemIndexSearchData *) b;
+ GstMemIndexFormatIndex *index = data->index;
+ gint64 val1, val2;
+ gint64 diff;
+
+ val1 = GST_INDEX_ASSOC_VALUE (((GstIndexEntry *) a), index->offset);
+ val2 = data->value;
+
+ diff = (val1 - val2);
+ if (diff == 0)
+ return 0;
+
+ /* exact matching, don't update low/high */
+ if (data->exact)
+ return (diff > 0 ? 1 : -1);
+
+ if (diff < 0) {
+ if (diff > data->low_diff) {
+ data->low_diff = diff;
+ data->lower = (GstIndexEntry *) a;
+ }
+ diff = -1;
+ } else {
+ if (diff < data->high_diff) {
+ data->high_diff = diff;
+ data->higher = (GstIndexEntry *) a;
+ }
+ diff = 1;
+ }
+
+ return diff;
+}
+
+static GstIndexEntry *
+gst_mem_index_get_assoc_entry (GstIndex * index, gint id,
+ GstIndexLookupMethod method,
+ GstIndexAssociationFlags flags,
+ GstFormat format, gint64 value, GCompareDataFunc func, gpointer user_data)
+{
+ GstMemIndex *memindex = GST_MEM_INDEX (index);
+ GstMemIndexId *id_index;
+ GstMemIndexFormatIndex *format_index;
+ GstIndexEntry *entry;
+ GstMemIndexSearchData data;
+
+ id_index = g_hash_table_lookup (memindex->id_index, &id);
+ if (!id_index)
+ return NULL;
+
+ format_index = g_hash_table_lookup (id_index->format_index, &format);
+ if (!format_index)
+ return NULL;
+
+ data.value = value;
+ data.index = format_index;
+ data.exact = (method == GST_INDEX_LOOKUP_EXACT);
+
+ /* setup data for low/high checks if we are not looking
+ * for an exact match */
+ if (!data.exact) {
+ data.low_diff = G_MININT64;
+ data.lower = NULL;
+ data.high_diff = G_MAXINT64;
+ data.higher = NULL;
+ }
+
+ entry = g_tree_search (format_index->tree, mem_index_search, &data);
+
+ /* get the low/high values if we're not exact */
+ if (entry == NULL && !data.exact) {
+ if (method == GST_INDEX_LOOKUP_BEFORE)
+ entry = data.lower;
+ else if (method == GST_INDEX_LOOKUP_AFTER) {
+ entry = data.higher;
+ }
+ }
+
+ if (entry && ((GST_INDEX_ASSOC_FLAGS (entry) & flags) != flags)) {
+ if (method != GST_INDEX_LOOKUP_EXACT) {
+ GList *l_entry = g_list_find (memindex->associations, entry);
+
+ entry = NULL;
+
+ while (l_entry) {
+ entry = (GstIndexEntry *) l_entry->data;
+
+ if (entry->id == id && (GST_INDEX_ASSOC_FLAGS (entry) & flags) == flags)
+ break;
+
+ if (method == GST_INDEX_LOOKUP_BEFORE)
+ l_entry = g_list_next (l_entry);
+ else if (method == GST_INDEX_LOOKUP_AFTER) {
+ l_entry = g_list_previous (l_entry);
+ }
+ }
+ } else {
+ entry = NULL;
+ }
+ }
+
+ return entry;
+}
+
+#if 0
+gboolean
+gst_mem_index_plugin_init (GstPlugin * plugin)
+{
+ GstIndexFactory *factory;
+
+ factory = gst_index_factory_new ("memindex",
+ "A index that stores entries in memory", gst_mem_index_get_type ());
+
+ if (factory == NULL) {
+ g_warning ("failed to create memindex factory");
+ return FALSE;
+ }
+
+ GST_PLUGIN_FEATURE (factory)->plugin_name = plugin->desc.name;
+ GST_PLUGIN_FEATURE (factory)->loaded = TRUE;
+
+ gst_registry_add_feature (gst_registry_get_default (),
+ GST_PLUGIN_FEATURE (factory));
+
+ return TRUE;
+}
+#endif
diff --git a/gst/flv/meson.build b/gst/flv/meson.build
new file mode 100644
index 0000000000..f05ad4f6ba
--- /dev/null
+++ b/gst/flv/meson.build
@@ -0,0 +1,10 @@
+gstflv = library('gstflv',
+ 'gstflvdemux.c', 'gstflvmux.c', 'gstflvplugin.c','gstflvelement.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc, libsinc],
+ dependencies : [gstpbutils_dep, gstvideo_dep, gsttag_dep, gstaudio_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstflv, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstflv]
diff --git a/gst/flx/flx_color.c b/gst/flx/flx_color.c
new file mode 100644
index 0000000000..3a581356a4
--- /dev/null
+++ b/gst/flx/flx_color.c
@@ -0,0 +1,117 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@temple-baptist.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <gst/gst.h>
+
+#include "flx_color.h"
+
+FlxColorSpaceConverter *
+flx_colorspace_converter_new (gint width, gint height)
+{
+ FlxColorSpaceConverter *new = g_malloc (sizeof (FlxColorSpaceConverter));
+
+ new->width = width;
+ new->height = height;
+
+ memset (new->palvec, 0, sizeof (new->palvec));
+ return new;
+}
+
+void
+flx_colorspace_converter_destroy (FlxColorSpaceConverter * flxpal)
+{
+ g_return_if_fail (flxpal != NULL);
+
+ g_free (flxpal);
+}
+
+void
+flx_colorspace_convert (FlxColorSpaceConverter * flxpal, guchar * src,
+ guchar * dest)
+{
+ guint size, col;
+
+ g_return_if_fail (flxpal != NULL);
+ g_return_if_fail (src != dest);
+
+
+ size = flxpal->width * flxpal->height;
+
+ while (size--) {
+ col = (*src++ * 3);
+
+#if G_BYTE_ORDER == G_BIG_ENDIAN
+ *dest++ = 0;
+ *dest++ = flxpal->palvec[col];
+ *dest++ = flxpal->palvec[col + 1];
+ *dest++ = flxpal->palvec[col + 2];
+#else
+ *dest++ = flxpal->palvec[col + 2];
+ *dest++ = flxpal->palvec[col + 1];
+ *dest++ = flxpal->palvec[col];
+ *dest++ = 0;
+#endif
+ }
+
+}
+
+
+void
+flx_set_palette_vector (FlxColorSpaceConverter * flxpal, guint start, guint num,
+ guchar * newpal, gint scale)
+{
+ guint grab;
+
+ g_return_if_fail (flxpal != NULL);
+ g_return_if_fail (start < 0x100);
+
+ grab = ((start + num) > 0x100 ? 0x100 - start : num);
+
+ if (scale) {
+ gint i = 0;
+
+ start *= 3;
+ while (grab) {
+ flxpal->palvec[start++] = newpal[i++] << scale;
+ flxpal->palvec[start++] = newpal[i++] << scale;
+ flxpal->palvec[start++] = newpal[i++] << scale;
+ grab--;
+ }
+ } else {
+ memcpy (&flxpal->palvec[start * 3], newpal, grab * 3);
+ }
+}
+
+void
+flx_set_color (FlxColorSpaceConverter * flxpal, guint colr, guint red,
+ guint green, guint blue, gint scale)
+{
+
+ g_return_if_fail (flxpal != NULL);
+ g_return_if_fail (colr < 0x100);
+
+ flxpal->palvec[(colr * 3)] = red << scale;
+ flxpal->palvec[(colr * 3) + 1] = green << scale;
+ flxpal->palvec[(colr * 3) + 2] = blue << scale;
+}
diff --git a/gst/flx/flx_color.h b/gst/flx/flx_color.h
new file mode 100644
index 0000000000..fd36ab7dc2
--- /dev/null
+++ b/gst/flx/flx_color.h
@@ -0,0 +1,52 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __FLX_COLOR_H__
+#define __FLX_COLOR_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+typedef enum {
+ FLX_COLORSPACE_RGB8,
+ FLX_COLORSPACE_RGB32,
+} FlxColorSpaceType;
+
+
+typedef struct _FlxColorSpaceConverter FlxColorSpaceConverter;
+
+struct _FlxColorSpaceConverter {
+ guint width;
+ guint height;
+ guchar palvec[768];
+};
+
+void flx_colorspace_converter_destroy(FlxColorSpaceConverter *flxpal);
+void flx_colorspace_convert(FlxColorSpaceConverter *flxpal, guchar *src, guchar *dest);
+FlxColorSpaceConverter * flx_colorspace_converter_new(gint width, gint height);
+
+void flx_set_palette_vector(FlxColorSpaceConverter *flxpal, guint start, guint num,
+ guchar *newpal, gint scale);
+void flx_set_color(FlxColorSpaceConverter *flxpal, guint colr, guint red, guint green,
+ guint blue, gint scale);
+
+G_END_DECLS
+
+#endif /* __FLX_COLOR_H__ */
diff --git a/gst/flx/flx_fmt.h b/gst/flx/flx_fmt.h
new file mode 100644
index 0000000000..abff200d4f
--- /dev/null
+++ b/gst/flx/flx_fmt.h
@@ -0,0 +1,128 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_FLX_FMT_H__
+#define __GST_FLX_FMT_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+enum Flx_TypeChunk
+{
+ /* frame chunks */
+ FLX_PREFIX_TYPE = 0xf100,
+ FLX_SCRIPT_CHUNK = 0xf1e0,
+ FLX_FRAME_TYPE = 0xf1fa,
+ FLX_SEGMENT_TABLE = 0xf1fb,
+ FLX_HUFFMAN_TABLE = 0xf1fc,
+
+ /* sub chunks */
+ FLX_CEL_DATA = 3,
+ FLX_COLOR256 = 4,
+ FLX_SS2 = 7,
+ FLX_COLOR64 = 11,
+ FLX_LC = 12,
+ FLX_BLACK = 13,
+ FLX_BRUN = 15,
+ FLX_COPY = 16,
+ FLX_MINI = 18,
+ FLX_DTA_RUN = 25,
+ FLX_DTA_COPY = 26,
+ FLX_DTA_LC = 27,
+ FLX_LABEL = 31,
+ FLX_BMP_MASK = 32,
+ FLX_MLEV_MASK = 33,
+ FLX_SEGMENT = 34,
+ FLX_KEY_IMAGE = 35,
+ FLX_KEY_PAL = 36,
+ FLX_REGION = 37,
+ FLX_WAVE = 38,
+ FLX_USERSTRING = 39,
+ FLX_RGN_MASK = 40
+
+};
+
+enum Flx_MagicHdr
+{
+ FLX_MAGICHDR_FLI = 0xaf11,
+ FLX_MAGICHDR_FLC = 0xaf12,
+ FLX_MAGICHDR_FLX = 0xaf44,
+ FLX_MAGICHDR_HUFFBWT = 0xaf30
+};
+
+typedef struct _FlxHeader
+{
+ guint32 size;
+ guint16 type;
+ guint16 frames;
+ guint16 width,height,depth,flags;
+ guint32 speed;
+ guint16 reserved1;
+ /* FLC */
+ guint32 created,creator,updated,updater;
+ guint16 aspect_dx, aspect_dy;
+ /* EGI */
+ guint16 ext_flags,keyframes,totalframes;
+ guint32 req_memory;
+ guint16 max_regions,transp_num;
+ guchar reserved2[24];
+ /* FLC */
+ guint32 oframe1,oframe2;
+ guchar reserved3[40];
+} FlxHeader;
+#define FlxHeaderSize 128
+
+typedef struct _FlxFrameChunk
+{
+ guint32 size;
+ guint16 id;
+} FlxFrameChunk;
+#define FlxFrameChunkSize 6
+
+typedef struct _FlxPrefixChunk
+{
+ guint16 chunks;
+ guchar reserved[8];
+} FlxPrefixChunk;
+
+typedef struct _FlxSegmentTable
+{
+ guint16 segments;
+} FlxSegmentTable;
+
+typedef struct _FlxHuffmanTable
+{
+ guint16 codelength;
+ guint16 numcodes;
+ guchar reserved[6];
+} FlxHuffmanTable;
+
+typedef struct _FlxFrameType
+{
+ guint16 chunks;
+ guint16 delay;
+ guchar reserved[6];
+} FlxFrameType;
+#define FlxFrameTypeSize 10
+
+G_END_DECLS
+
+#endif /* __GST_FLX_FMT_H__ */
diff --git a/gst/flx/gstflxdec.c b/gst/flx/gstflxdec.c
new file mode 100644
index 0000000000..1016876885
--- /dev/null
+++ b/gst/flx/gstflxdec.c
@@ -0,0 +1,1000 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@temple-baptist.com>
+ * Copyright (C) <2016> Matthew Waters <matthew@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-flxdec
+ * @title: flxdec
+ *
+ * This element decodes fli/flc/flx-video into raw video
+ */
+/*
+ * http://www.coolutils.com/Formats/FLI
+ * http://woodshole.er.usgs.gov/operations/modeling/flc.html
+ * http://www.compuphase.com/flic.htm
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include <string.h>
+
+#include "flx_fmt.h"
+#include "gstflxdec.h"
+#include <gst/video/video.h>
+
+#define JIFFIE (GST_SECOND/70)
+
+GST_DEBUG_CATEGORY_STATIC (flxdec_debug);
+#define GST_CAT_DEFAULT flxdec_debug
+
+/* input */
+static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-fli")
+ );
+
+#if G_BYTE_ORDER == G_BIG_ENDIAN
+#define RGB_ORDER "xRGB"
+#else
+#define RGB_ORDER "BGRx"
+#endif
+
+/* output */
+static GstStaticPadTemplate src_video_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (RGB_ORDER))
+ );
+
+static void gst_flxdec_dispose (GstFlxDec * flxdec);
+
+static GstFlowReturn gst_flxdec_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+static gboolean gst_flxdec_sink_event_handler (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+
+static GstStateChangeReturn gst_flxdec_change_state (GstElement * element,
+ GstStateChange transition);
+
+static gboolean gst_flxdec_src_query_handler (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+
+static gboolean flx_decode_color (GstFlxDec * flxdec, GstByteReader * reader,
+ GstByteWriter * writer, gint scale);
+static gboolean flx_decode_brun (GstFlxDec * flxdec,
+ GstByteReader * reader, GstByteWriter * writer);
+static gboolean flx_decode_delta_fli (GstFlxDec * flxdec,
+ GstByteReader * reader, GstByteWriter * writer);
+static gboolean flx_decode_delta_flc (GstFlxDec * flxdec,
+ GstByteReader * reader, GstByteWriter * writer);
+
+#define rndalign(off) ((off) + ((off) & 1))
+
+#define gst_flxdec_parent_class parent_class
+G_DEFINE_TYPE (GstFlxDec, gst_flxdec, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE (flxdec, "flxdec",
+ GST_RANK_PRIMARY, GST_TYPE_FLXDEC);
+
+static void
+gst_flxdec_class_init (GstFlxDecClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->dispose = (GObjectFinalizeFunc) gst_flxdec_dispose;
+
+ GST_DEBUG_CATEGORY_INIT (flxdec_debug, "flxdec", 0, "FLX video decoder");
+
+ gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_flxdec_change_state);
+
+ gst_element_class_set_static_metadata (gstelement_class, "FLX video decoder",
+ "Codec/Decoder/Video",
+ "FLC/FLI/FLX video decoder",
+ "Sepp Wijnands <mrrazz@garbage-coderz.net>, Zeeshan Ali <zeenix@gmail.com>");
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&sink_factory));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&src_video_factory));
+}
+
+static void
+gst_flxdec_init (GstFlxDec * flxdec)
+{
+ flxdec->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
+ gst_element_add_pad (GST_ELEMENT (flxdec), flxdec->sinkpad);
+ gst_pad_set_chain_function (flxdec->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_flxdec_chain));
+ gst_pad_set_event_function (flxdec->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_flxdec_sink_event_handler));
+
+ flxdec->srcpad = gst_pad_new_from_static_template (&src_video_factory, "src");
+ gst_element_add_pad (GST_ELEMENT (flxdec), flxdec->srcpad);
+ gst_pad_set_query_function (flxdec->srcpad,
+ GST_DEBUG_FUNCPTR (gst_flxdec_src_query_handler));
+
+ gst_pad_use_fixed_caps (flxdec->srcpad);
+
+ flxdec->adapter = gst_adapter_new ();
+}
+
+static void
+gst_flxdec_dispose (GstFlxDec * flxdec)
+{
+ if (flxdec->adapter) {
+ g_object_unref (flxdec->adapter);
+ flxdec->adapter = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->dispose ((GObject *) flxdec);
+}
+
+static gboolean
+gst_flxdec_src_query_handler (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ GstFlxDec *flxdec = (GstFlxDec *) parent;
+ gboolean ret = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_DURATION:
+ {
+ GstFormat format;
+
+ gst_query_parse_duration (query, &format, NULL);
+
+ if (format != GST_FORMAT_TIME)
+ goto done;
+
+ gst_query_set_duration (query, format, flxdec->duration);
+
+ ret = TRUE;
+ }
+ default:
+ break;
+ }
+done:
+ if (!ret)
+ ret = gst_pad_query_default (pad, parent, query);
+
+ return ret;
+}
+
+static gboolean
+gst_flxdec_sink_event_handler (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstFlxDec *flxdec;
+ gboolean ret;
+
+ flxdec = GST_FLXDEC (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:
+ {
+ gst_event_copy_segment (event, &flxdec->segment);
+ if (flxdec->segment.format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (flxdec, "generating TIME segment");
+ gst_segment_init (&flxdec->segment, GST_FORMAT_TIME);
+ gst_event_unref (event);
+ event = gst_event_new_segment (&flxdec->segment);
+ }
+
+ if (gst_pad_has_current_caps (flxdec->srcpad)) {
+ ret = gst_pad_event_default (pad, parent, event);
+ } else {
+ flxdec->need_segment = TRUE;
+ gst_event_unref (event);
+ ret = TRUE;
+ }
+ break;
+ }
+ case GST_EVENT_FLUSH_STOP:
+ gst_segment_init (&flxdec->segment, GST_FORMAT_UNDEFINED);
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ default:
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+flx_decode_chunks (GstFlxDec * flxdec, gulong n_chunks, GstByteReader * reader,
+ GstByteWriter * writer)
+{
+ gboolean ret = TRUE;
+
+ while (n_chunks--) {
+ GstByteReader chunk;
+ guint32 size;
+ guint16 type;
+
+ if (!gst_byte_reader_get_uint32_le (reader, &size))
+ goto parse_error;
+ if (!gst_byte_reader_get_uint16_le (reader, &type))
+ goto parse_error;
+ GST_LOG_OBJECT (flxdec, "chunk has type 0x%02x size %d", type, size);
+
+ if (!gst_byte_reader_get_sub_reader (reader, &chunk,
+ size - FlxFrameChunkSize)) {
+ GST_ERROR_OBJECT (flxdec, "Incorrect size in the chunk header");
+ goto error;
+ }
+
+ switch (type) {
+ case FLX_COLOR64:
+ ret = flx_decode_color (flxdec, &chunk, writer, 2);
+ break;
+
+ case FLX_COLOR256:
+ ret = flx_decode_color (flxdec, &chunk, writer, 0);
+ break;
+
+ case FLX_BRUN:
+ ret = flx_decode_brun (flxdec, &chunk, writer);
+ break;
+
+ case FLX_LC:
+ ret = flx_decode_delta_fli (flxdec, &chunk, writer);
+ break;
+
+ case FLX_SS2:
+ ret = flx_decode_delta_flc (flxdec, &chunk, writer);
+ break;
+
+ case FLX_BLACK:
+ ret = gst_byte_writer_fill (writer, 0, flxdec->size);
+ break;
+
+ case FLX_MINI:
+ break;
+
+ default:
+ GST_WARNING ("Unimplemented chunk type: 0x%02x size: %d - skipping",
+ type, size);
+ break;
+ }
+
+ if (!ret)
+ break;
+ }
+
+ return ret;
+
+parse_error:
+ GST_ERROR_OBJECT (flxdec, "Failed to decode chunk");
+error:
+ return FALSE;
+}
+
+
+static gboolean
+flx_decode_color (GstFlxDec * flxdec, GstByteReader * reader,
+ GstByteWriter * writer, gint scale)
+{
+ guint8 count, indx;
+ guint16 packs;
+
+ if (!gst_byte_reader_get_uint16_le (reader, &packs))
+ goto error;
+ indx = 0;
+
+ GST_LOG ("GstFlxDec: cmap packs: %d", (guint) packs);
+ while (packs--) {
+ const guint8 *data;
+ guint16 actual_count;
+
+ /* color map index + skip count */
+ if (!gst_byte_reader_get_uint8 (reader, &indx))
+ goto error;
+
+ /* number of rgb triplets */
+ if (!gst_byte_reader_get_uint8 (reader, &count))
+ goto error;
+
+ actual_count = count == 0 ? 256 : count;
+
+ if (!gst_byte_reader_get_data (reader, count * 3, &data))
+ goto error;
+
+ GST_LOG_OBJECT (flxdec, "cmap count: %d (indx: %d)", actual_count, indx);
+ flx_set_palette_vector (flxdec->converter, indx, actual_count,
+ (guchar *) data, scale);
+ }
+
+ return TRUE;
+
+error:
+ GST_ERROR_OBJECT (flxdec, "Error decoding color palette");
+ return FALSE;
+}
+
+static gboolean
+flx_decode_brun (GstFlxDec * flxdec, GstByteReader * reader,
+ GstByteWriter * writer)
+{
+ gulong lines, row;
+
+ g_return_val_if_fail (flxdec != NULL, FALSE);
+
+ lines = flxdec->hdr.height;
+ while (lines--) {
+ /* packet count.
+ * should not be used anymore, since the flc format can
+ * contain more then 255 RLE packets. we use the frame
+ * width instead.
+ */
+ if (!gst_byte_reader_skip (reader, 1))
+ goto error;
+
+ row = flxdec->hdr.width;
+ while (row) {
+ gint8 count;
+
+ if (!gst_byte_reader_get_int8 (reader, &count))
+ goto error;
+
+ if (count <= 0) {
+ const guint8 *data;
+
+ /* literal run */
+ count = ABS (count);
+
+ GST_LOG_OBJECT (flxdec, "have literal run of size %d", count);
+
+ if (count > row) {
+ GST_ERROR_OBJECT (flxdec, "Invalid BRUN line detected. "
+ "bytes to write exceeds the end of the row");
+ return FALSE;
+ }
+ row -= count;
+
+ if (!gst_byte_reader_get_data (reader, count, &data))
+ goto error;
+ if (!gst_byte_writer_put_data (writer, data, count))
+ goto error;
+ } else {
+ guint8 x;
+
+ GST_LOG_OBJECT (flxdec, "have replicate run of size %d", count);
+
+ if (count > row) {
+ GST_ERROR_OBJECT (flxdec, "Invalid BRUN packet detected."
+ "bytes to write exceeds the end of the row");
+ return FALSE;
+ }
+
+ /* replicate run */
+ row -= count;
+
+ if (!gst_byte_reader_get_uint8 (reader, &x))
+ goto error;
+ if (!gst_byte_writer_fill (writer, x, count))
+ goto error;
+ }
+ }
+ }
+
+ return TRUE;
+
+error:
+ GST_ERROR_OBJECT (flxdec, "Failed to decode BRUN packet");
+ return FALSE;
+}
+
+static gboolean
+flx_decode_delta_fli (GstFlxDec * flxdec, GstByteReader * reader,
+ GstByteWriter * writer)
+{
+ guint16 start_line, lines;
+ guint line_start_i;
+
+ g_return_val_if_fail (flxdec != NULL, FALSE);
+ g_return_val_if_fail (flxdec->delta_data != NULL, FALSE);
+
+ /* use last frame for delta */
+ if (!gst_byte_writer_put_data (writer, flxdec->delta_data, flxdec->size))
+ goto error;
+
+ if (!gst_byte_reader_get_uint16_le (reader, &start_line))
+ goto error;
+ if (!gst_byte_reader_get_uint16_le (reader, &lines))
+ goto error;
+ GST_LOG_OBJECT (flxdec, "height %d start line %d line count %d",
+ flxdec->hdr.height, start_line, lines);
+
+ if (start_line + lines > flxdec->hdr.height) {
+ GST_ERROR_OBJECT (flxdec, "Invalid FLI packet detected. too many lines.");
+ return FALSE;
+ }
+
+ line_start_i = flxdec->hdr.width * start_line;
+ if (!gst_byte_writer_set_pos (writer, line_start_i))
+ goto error;
+
+ while (lines--) {
+ guint8 packets;
+
+ /* packet count */
+ if (!gst_byte_reader_get_uint8 (reader, &packets))
+ goto error;
+ GST_LOG_OBJECT (flxdec, "have %d packets", packets);
+
+ while (packets--) {
+ /* skip count */
+ guint8 skip;
+ gint8 count;
+ if (!gst_byte_reader_get_uint8 (reader, &skip))
+ goto error;
+
+ /* skip bytes */
+ if (!gst_byte_writer_set_pos (writer,
+ gst_byte_writer_get_pos (writer) + skip))
+ goto error;
+
+ /* RLE count */
+ if (!gst_byte_reader_get_int8 (reader, &count))
+ goto error;
+
+ if (count < 0) {
+ guint8 x;
+
+ /* literal run */
+ count = ABS (count);
+ GST_LOG_OBJECT (flxdec, "have literal run of size %d at offset %d",
+ count, skip);
+
+ if (skip + count > flxdec->hdr.width) {
+ GST_ERROR_OBJECT (flxdec, "Invalid FLI packet detected. "
+ "line too long.");
+ return FALSE;
+ }
+
+ if (!gst_byte_reader_get_uint8 (reader, &x))
+ goto error;
+ if (!gst_byte_writer_fill (writer, x, count))
+ goto error;
+ } else {
+ const guint8 *data;
+
+ GST_LOG_OBJECT (flxdec, "have replicate run of size %d at offset %d",
+ count, skip);
+
+ if (skip + count > flxdec->hdr.width) {
+ GST_ERROR_OBJECT (flxdec, "Invalid FLI packet detected. "
+ "line too long.");
+ return FALSE;
+ }
+
+ /* replicate run */
+ if (!gst_byte_reader_get_data (reader, count, &data))
+ goto error;
+ if (!gst_byte_writer_put_data (writer, data, count))
+ goto error;
+ }
+ }
+ line_start_i += flxdec->hdr.width;
+ if (!gst_byte_writer_set_pos (writer, line_start_i))
+ goto error;
+ }
+
+ return TRUE;
+
+error:
+ GST_ERROR_OBJECT (flxdec, "Failed to decode FLI packet");
+ return FALSE;
+}
+
+static gboolean
+flx_decode_delta_flc (GstFlxDec * flxdec, GstByteReader * reader,
+ GstByteWriter * writer)
+{
+ guint16 lines, start_l;
+
+ g_return_val_if_fail (flxdec != NULL, FALSE);
+ g_return_val_if_fail (flxdec->delta_data != NULL, FALSE);
+
+ /* use last frame for delta */
+ if (!gst_byte_writer_put_data (writer, flxdec->delta_data, flxdec->size))
+ goto error;
+ if (!gst_byte_reader_get_uint16_le (reader, &lines))
+ goto error;
+
+ if (lines > flxdec->hdr.height) {
+ GST_ERROR_OBJECT (flxdec, "Invalid FLC packet detected. too many lines.");
+ return FALSE;
+ }
+
+ start_l = lines;
+
+ while (lines) {
+ guint16 opcode;
+
+ if (!gst_byte_writer_set_pos (writer,
+ flxdec->hdr.width * (start_l - lines)))
+ goto error;
+
+ /* process opcode(s) */
+ while (TRUE) {
+ if (!gst_byte_reader_get_uint16_le (reader, &opcode))
+ goto error;
+ if ((opcode & 0xc000) == 0)
+ break;
+
+ if ((opcode & 0xc000) == 0xc000) {
+ /* line skip count */
+ gulong skip = (0x10000 - opcode);
+ if (skip > flxdec->hdr.height) {
+ GST_ERROR_OBJECT (flxdec, "Invalid FLC packet detected. "
+ "skip line count too big.");
+ return FALSE;
+ }
+ start_l += skip;
+ if (!gst_byte_writer_set_pos (writer,
+ gst_byte_writer_get_pos (writer) + flxdec->hdr.width * skip))
+ goto error;
+ } else {
+ /* last pixel */
+ if (!gst_byte_writer_set_pos (writer,
+ gst_byte_writer_get_pos (writer) + flxdec->hdr.width))
+ goto error;
+ if (!gst_byte_writer_put_uint8 (writer, opcode & 0xff))
+ goto error;
+ }
+ }
+
+ /* last opcode is the packet count */
+ GST_LOG_OBJECT (flxdec, "have %d packets", opcode);
+ while (opcode--) {
+ /* skip count */
+ guint8 skip;
+ gint8 count;
+
+ if (!gst_byte_reader_get_uint8 (reader, &skip))
+ goto error;
+ if (!gst_byte_writer_set_pos (writer,
+ gst_byte_writer_get_pos (writer) + skip))
+ goto error;
+
+ /* RLE count */
+ if (!gst_byte_reader_get_int8 (reader, &count))
+ goto error;
+
+ if (count < 0) {
+ guint16 x;
+
+ /* replicate word run */
+ count = ABS (count);
+
+ GST_LOG_OBJECT (flxdec, "have replicate run of size %d at offset %d",
+ count, skip);
+
+ if (skip + count > flxdec->hdr.width) {
+ GST_ERROR_OBJECT (flxdec, "Invalid FLC packet detected. "
+ "line too long.");
+ return FALSE;
+ }
+
+ if (!gst_byte_reader_get_uint16_le (reader, &x))
+ goto error;
+
+ while (count--) {
+ if (!gst_byte_writer_put_uint16_le (writer, x)) {
+ goto error;
+ }
+ }
+ } else {
+ GST_LOG_OBJECT (flxdec, "have literal run of size %d at offset %d",
+ count, skip);
+
+ if (skip + count > flxdec->hdr.width) {
+ GST_ERROR_OBJECT (flxdec, "Invalid FLC packet detected. "
+ "line too long.");
+ return FALSE;
+ }
+
+ while (count--) {
+ guint16 x;
+
+ if (!gst_byte_reader_get_uint16_le (reader, &x))
+ goto error;
+ if (!gst_byte_writer_put_uint16_le (writer, x))
+ goto error;
+ }
+ }
+ }
+ lines--;
+ }
+
+ return TRUE;
+
+error:
+ GST_ERROR_OBJECT (flxdec, "Failed to decode FLI packet");
+ return FALSE;
+}
+
+static gboolean
+_read_flx_header (GstFlxDec * flxdec, GstByteReader * reader, FlxHeader * flxh)
+{
+ memset (flxh, 0, sizeof (*flxh));
+
+ if (!gst_byte_reader_get_uint32_le (reader, &flxh->size))
+ goto error;
+ if (flxh->size < FlxHeaderSize) {
+ GST_ERROR_OBJECT (flxdec, "Invalid file size in the header");
+ return FALSE;
+ }
+
+ if (!gst_byte_reader_get_uint16_le (reader, &flxh->type))
+ goto error;
+ if (!gst_byte_reader_get_uint16_le (reader, &flxh->frames))
+ goto error;
+ if (!gst_byte_reader_get_uint16_le (reader, &flxh->width))
+ goto error;
+ if (!gst_byte_reader_get_uint16_le (reader, &flxh->height))
+ goto error;
+ if (!gst_byte_reader_get_uint16_le (reader, &flxh->depth))
+ goto error;
+ if (!gst_byte_reader_get_uint16_le (reader, &flxh->flags))
+ goto error;
+ if (!gst_byte_reader_get_uint32_le (reader, &flxh->speed))
+ goto error;
+ if (!gst_byte_reader_skip (reader, 2)) /* reserved */
+ goto error;
+ /* FLC */
+ if (!gst_byte_reader_get_uint32_le (reader, &flxh->created))
+ goto error;
+ if (!gst_byte_reader_get_uint32_le (reader, &flxh->creator))
+ goto error;
+ if (!gst_byte_reader_get_uint32_le (reader, &flxh->updated))
+ goto error;
+ if (!gst_byte_reader_get_uint32_le (reader, &flxh->updater))
+ goto error;
+ if (!gst_byte_reader_get_uint16_le (reader, &flxh->aspect_dx))
+ goto error;
+ if (!gst_byte_reader_get_uint16_le (reader, &flxh->aspect_dy))
+ goto error;
+ /* EGI */
+ if (!gst_byte_reader_get_uint16_le (reader, &flxh->ext_flags))
+ goto error;
+ if (!gst_byte_reader_get_uint16_le (reader, &flxh->keyframes))
+ goto error;
+ if (!gst_byte_reader_get_uint16_le (reader, &flxh->totalframes))
+ goto error;
+ if (!gst_byte_reader_get_uint32_le (reader, &flxh->req_memory))
+ goto error;
+ if (!gst_byte_reader_get_uint16_le (reader, &flxh->max_regions))
+ goto error;
+ if (!gst_byte_reader_get_uint16_le (reader, &flxh->transp_num))
+ goto error;
+ if (!gst_byte_reader_skip (reader, 24)) /* reserved */
+ goto error;
+ /* FLC */
+ if (!gst_byte_reader_get_uint32_le (reader, &flxh->oframe1))
+ goto error;
+ if (!gst_byte_reader_get_uint32_le (reader, &flxh->oframe2))
+ goto error;
+ if (!gst_byte_reader_skip (reader, 40)) /* reserved */
+ goto error;
+
+ return TRUE;
+
+error:
+ GST_ERROR_OBJECT (flxdec, "Error reading file header");
+ return FALSE;
+}
+
+static GstFlowReturn
+gst_flxdec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+{
+ GstByteReader reader;
+ GstBuffer *input;
+ GstMapInfo map_info;
+ GstCaps *caps;
+ guint available;
+ GstFlowReturn res = GST_FLOW_OK;
+
+ GstFlxDec *flxdec;
+ FlxHeader *flxh;
+
+ g_return_val_if_fail (buf != NULL, GST_FLOW_ERROR);
+ flxdec = (GstFlxDec *) parent;
+ g_return_val_if_fail (flxdec != NULL, GST_FLOW_ERROR);
+
+ gst_adapter_push (flxdec->adapter, buf);
+ available = gst_adapter_available (flxdec->adapter);
+ input = gst_adapter_get_buffer (flxdec->adapter, available);
+ if (!gst_buffer_map (input, &map_info, GST_MAP_READ)) {
+ GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
+ ("%s", "Failed to map buffer"), (NULL));
+ goto error;
+ }
+ gst_byte_reader_init (&reader, map_info.data, map_info.size);
+
+ if (flxdec->state == GST_FLXDEC_READ_HEADER) {
+ if (available >= FlxHeaderSize) {
+ GstByteReader header;
+ GstCaps *templ;
+
+ if (!gst_byte_reader_get_sub_reader (&reader, &header, FlxHeaderSize)) {
+ GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
+ ("%s", "Could not read header"), (NULL));
+ goto unmap_input_error;
+ }
+ gst_adapter_flush (flxdec->adapter, FlxHeaderSize);
+ available -= FlxHeaderSize;
+
+ if (!_read_flx_header (flxdec, &header, &flxdec->hdr)) {
+ GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
+ ("%s", "Failed to parse header"), (NULL));
+ goto unmap_input_error;
+ }
+
+ flxh = &flxdec->hdr;
+
+ /* check header */
+ if (flxh->type != FLX_MAGICHDR_FLI &&
+ flxh->type != FLX_MAGICHDR_FLC && flxh->type != FLX_MAGICHDR_FLX) {
+ GST_ELEMENT_ERROR (flxdec, STREAM, WRONG_TYPE, (NULL),
+ ("not a flx file (type %x)", flxh->type));
+ goto unmap_input_error;
+ }
+
+ GST_INFO_OBJECT (flxdec, "size : %d", flxh->size);
+ GST_INFO_OBJECT (flxdec, "frames : %d", flxh->frames);
+ GST_INFO_OBJECT (flxdec, "width : %d", flxh->width);
+ GST_INFO_OBJECT (flxdec, "height : %d", flxh->height);
+ GST_INFO_OBJECT (flxdec, "depth : %d", flxh->depth);
+ GST_INFO_OBJECT (flxdec, "speed : %d", flxh->speed);
+
+ flxdec->next_time = 0;
+
+ if (flxh->type == FLX_MAGICHDR_FLI) {
+ flxdec->frame_time = JIFFIE * flxh->speed;
+ } else if (flxh->speed == 0) {
+ flxdec->frame_time = GST_SECOND / 70;
+ } else {
+ flxdec->frame_time = flxh->speed * GST_MSECOND;
+ }
+
+ flxdec->duration = flxh->frames * flxdec->frame_time;
+ GST_LOG ("duration : %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (flxdec->duration));
+
+ templ = gst_pad_get_pad_template_caps (flxdec->srcpad);
+ caps = gst_caps_copy (templ);
+ gst_caps_unref (templ);
+ gst_caps_set_simple (caps,
+ "width", G_TYPE_INT, flxh->width,
+ "height", G_TYPE_INT, flxh->height,
+ "framerate", GST_TYPE_FRACTION, (gint) GST_MSECOND,
+ (gint) flxdec->frame_time / 1000, NULL);
+
+ gst_pad_set_caps (flxdec->srcpad, caps);
+ gst_caps_unref (caps);
+
+ if (flxdec->need_segment) {
+ gst_pad_push_event (flxdec->srcpad,
+ gst_event_new_segment (&flxdec->segment));
+ flxdec->need_segment = FALSE;
+ }
+
+ /* zero means 8 */
+ if (flxh->depth == 0)
+ flxh->depth = 8;
+
+ if (flxh->depth != 8) {
+ GST_ELEMENT_ERROR (flxdec, STREAM, WRONG_TYPE,
+ ("%s", "Don't know how to decode non 8 bit depth streams"), (NULL));
+ goto unmap_input_error;
+ }
+
+ flxdec->converter =
+ flx_colorspace_converter_new (flxh->width, flxh->height);
+
+ if (flxh->type == FLX_MAGICHDR_FLC || flxh->type == FLX_MAGICHDR_FLX) {
+ GST_INFO_OBJECT (flxdec, "(FLC) aspect_dx : %d", flxh->aspect_dx);
+ GST_INFO_OBJECT (flxdec, "(FLC) aspect_dy : %d", flxh->aspect_dy);
+ GST_INFO_OBJECT (flxdec, "(FLC) oframe1 : 0x%08x", flxh->oframe1);
+ GST_INFO_OBJECT (flxdec, "(FLC) oframe2 : 0x%08x", flxh->oframe2);
+ }
+
+ flxdec->size = ((guint) flxh->width * (guint) flxh->height);
+ if (flxdec->size >= G_MAXSIZE / 4) {
+ GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
+ ("%s", "Cannot allocate required memory"), (NULL));
+ goto unmap_input_error;
+ }
+
+ /* create delta and output frame */
+ flxdec->frame_data = g_malloc0 (flxdec->size);
+ flxdec->delta_data = g_malloc0 (flxdec->size);
+
+ flxdec->state = GST_FLXDEC_PLAYING;
+ }
+ } else if (flxdec->state == GST_FLXDEC_PLAYING) {
+ GstBuffer *out;
+
+ /* while we have enough data in the adapter */
+ while (available >= FlxFrameChunkSize && res == GST_FLOW_OK) {
+ guint32 size;
+ guint16 type;
+
+ if (!gst_byte_reader_get_uint32_le (&reader, &size))
+ goto parse_error;
+ if (available < size)
+ goto need_more_data;
+
+ available -= size;
+ gst_adapter_flush (flxdec->adapter, size);
+
+ if (!gst_byte_reader_get_uint16_le (&reader, &type))
+ goto parse_error;
+
+ switch (type) {
+ case FLX_FRAME_TYPE:{
+ GstByteReader chunks;
+ GstByteWriter writer;
+ guint16 n_chunks;
+ GstMapInfo map;
+
+ GST_LOG_OBJECT (flxdec, "Have frame type 0x%02x of size %d", type,
+ size);
+
+ if (!gst_byte_reader_get_sub_reader (&reader, &chunks,
+ size - FlxFrameChunkSize))
+ goto parse_error;
+
+ if (!gst_byte_reader_get_uint16_le (&chunks, &n_chunks))
+ goto parse_error;
+ GST_LOG_OBJECT (flxdec, "Have %d chunks", n_chunks);
+
+ if (n_chunks == 0)
+ break;
+ if (!gst_byte_reader_skip (&chunks, 8)) /* reserved */
+ goto parse_error;
+
+ gst_byte_writer_init_with_data (&writer, flxdec->frame_data,
+ flxdec->size, TRUE);
+
+ /* decode chunks */
+ if (!flx_decode_chunks (flxdec, n_chunks, &chunks, &writer)) {
+ GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
+ ("%s", "Could not decode chunk"), NULL);
+ goto unmap_input_error;
+ }
+ gst_byte_writer_reset (&writer);
+
+ /* save copy of the current frame for possible delta. */
+ memcpy (flxdec->delta_data, flxdec->frame_data, flxdec->size);
+
+ out = gst_buffer_new_and_alloc (flxdec->size * 4);
+ if (!gst_buffer_map (out, &map, GST_MAP_WRITE)) {
+ GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
+ ("%s", "Could not map output buffer"), NULL);
+ gst_buffer_unref (out);
+ goto unmap_input_error;
+ }
+
+ /* convert current frame. */
+ flx_colorspace_convert (flxdec->converter, flxdec->frame_data,
+ map.data);
+ gst_buffer_unmap (out, &map);
+
+ GST_BUFFER_TIMESTAMP (out) = flxdec->next_time;
+ flxdec->next_time += flxdec->frame_time;
+
+ res = gst_pad_push (flxdec->srcpad, out);
+ break;
+ }
+ default:
+ GST_DEBUG_OBJECT (flxdec, "Unknown frame type 0x%02x, skipping %d",
+ type, size);
+ if (!gst_byte_reader_skip (&reader, size - FlxFrameChunkSize))
+ goto parse_error;
+ break;
+ }
+ }
+ }
+
+need_more_data:
+ gst_buffer_unmap (input, &map_info);
+ gst_buffer_unref (input);
+ return res;
+
+ /* ERRORS */
+parse_error:
+ GST_ELEMENT_ERROR (flxdec, STREAM, DECODE,
+ ("%s", "Failed to parse stream"), (NULL));
+unmap_input_error:
+ gst_buffer_unmap (input, &map_info);
+error:
+ gst_buffer_unref (input);
+ return GST_FLOW_ERROR;
+}
+
+static GstStateChangeReturn
+gst_flxdec_change_state (GstElement * element, GstStateChange transition)
+{
+ GstFlxDec *flxdec;
+ GstStateChangeReturn ret;
+
+ flxdec = GST_FLXDEC (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_adapter_clear (flxdec->adapter);
+ flxdec->state = GST_FLXDEC_READ_HEADER;
+ gst_segment_init (&flxdec->segment, GST_FORMAT_UNDEFINED);
+ flxdec->need_segment = TRUE;
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ if (flxdec->frame_data) {
+ g_free (flxdec->frame_data);
+ flxdec->frame_data = NULL;
+ }
+ if (flxdec->delta_data) {
+ g_free (flxdec->delta_data);
+ flxdec->delta_data = NULL;
+ }
+ if (flxdec->converter) {
+ flx_colorspace_converter_destroy (flxdec->converter);
+ flxdec->converter = NULL;
+ }
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ return GST_ELEMENT_REGISTER (flxdec, plugin);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ flxdec,
+ "FLC/FLI/FLX video decoder",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/flx/gstflxdec.h b/gst/flx/gstflxdec.h
new file mode 100644
index 0000000000..7fa87dfd42
--- /dev/null
+++ b/gst/flx/gstflxdec.h
@@ -0,0 +1,89 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_FLX_DECODER_H__
+#define __GST_FLX_DECODER_H__
+
+#include <gst/gst.h>
+
+#include <gst/base/gstadapter.h>
+#include <gst/base/gstbytereader.h>
+#include <gst/base/gstbytewriter.h>
+#include "flx_color.h"
+
+G_BEGIN_DECLS
+
+typedef enum {
+ GST_FLXDEC_READ_HEADER,
+ GST_FLXDEC_PLAYING,
+} GstFlxDecState;
+
+
+/* Definition of structure storing data for this element. */
+typedef struct _GstFlxDec GstFlxDec;
+
+struct _GstFlxDec {
+ GstElement element;
+
+ GstPad *sinkpad, *srcpad;
+
+ GstSegment segment;
+ gboolean need_segment;
+
+ gboolean active, new_meta;
+
+ guint8 *delta_data, *frame_data;
+ GstAdapter *adapter;
+ gsize size;
+ GstFlxDecState state;
+ gint64 frame_time;
+ gint64 next_time;
+ gint64 duration;
+
+ FlxColorSpaceConverter *converter;
+
+ FlxHeader hdr;
+};
+
+/* Standard definition defining a class for this element. */
+typedef struct _GstFlxDecClass GstFlxDecClass;
+struct _GstFlxDecClass {
+ GstElementClass parent_class;
+};
+
+/* Standard macros for defining types for this element. */
+#define GST_TYPE_FLXDEC \
+ (gst_flxdec_get_type())
+#define GST_FLXDEC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_FLXDEC,GstFlxDec))
+#define GST_FLXDEC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_FLXDEC,GstFlxDecClass))
+#define GST_IS_FLXDEC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_FLXDEC))
+#define GST_IS_FLXDEC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_FLXDEC))
+
+/* Standard function returning type information. */
+GType gst_flxdec_get_type(void);
+
+GST_ELEMENT_REGISTER_DECLARE (flxdec);
+
+G_END_DECLS
+
+#endif /* __GST_FLX_DECODER_H__ */
diff --git a/gst/flx/meson.build b/gst/flx/meson.build
new file mode 100644
index 0000000000..9994f1c22d
--- /dev/null
+++ b/gst/flx/meson.build
@@ -0,0 +1,10 @@
+flxdec = library('gstflxdec',
+ 'gstflxdec.c', 'flx_color.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gstbase_dep, gstvideo_dep, gst_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(flxdec, install_dir : plugins_pkgconfig_install_dir)
+plugins += [flxdec]
diff --git a/gst/goom/README b/gst/goom/README
new file mode 100644
index 0000000000..08af2baae8
--- /dev/null
+++ b/gst/goom/README
@@ -0,0 +1,13 @@
+The Goom plugin is based on the Goom visualization code from
+the Goom homepage found at:
+http://ios.free.fr/?page=projet&quoi=1
+
+Like the original library so is the Goom plugin available under the LGPL license
+
+This is based on goom2k4 with changes to plugin_info.c and mmx.h to use Orc for CPU
+detection and GStreamer-specific ifdef's for architecture detection.
+
+These files are not in use right now:
+filters_mmx.s
+goomsl*
+surf3d.s
diff --git a/gst/goom/config_param.c b/gst/goom/config_param.c
new file mode 100644
index 0000000000..3a1277f62a
--- /dev/null
+++ b/gst/goom/config_param.c
@@ -0,0 +1,142 @@
+/* Goom Project
+ * Copyright (C) <2003> Jean-Christophe Hoelt <jeko@free.fr>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "goom_config_param.h"
+#include <string.h>
+
+static void
+empty_fct (PluginParam * dummy)
+{
+}
+
+void
+goom_secure_param (PluginParam * p)
+{
+ p->changed = empty_fct;
+ p->change_listener = empty_fct;
+ p->user_data = 0;
+ p->name = p->desc = 0;
+ p->rw = 1;
+}
+
+void
+goom_secure_f_param (PluginParam * p, const char *name)
+{
+ secure_param (p);
+
+ p->name = name;
+ p->type = PARAM_FLOATVAL;
+ FVAL (*p) = 0.5f;
+ FMIN (*p) = 0.0f;
+ FMAX (*p) = 1.0f;
+ FSTEP (*p) = 0.01f;
+}
+
+void
+goom_secure_f_feedback (PluginParam * p, const char *name)
+{
+ secure_f_param (p, name);
+
+ p->rw = 0;
+}
+
+void
+goom_secure_s_param (PluginParam * p, const char *name)
+{
+ secure_param (p);
+
+ p->name = name;
+ p->type = PARAM_STRVAL;
+ SVAL (*p) = 0;
+}
+
+void
+goom_secure_b_param (PluginParam * p, const char *name, int value)
+{
+ secure_param (p);
+
+ p->name = name;
+ p->type = PARAM_BOOLVAL;
+ BVAL (*p) = value;
+}
+
+void
+goom_secure_i_param (PluginParam * p, const char *name)
+{
+ secure_param (p);
+
+ p->name = name;
+ p->type = PARAM_INTVAL;
+ IVAL (*p) = 50;
+ IMIN (*p) = 0;
+ IMAX (*p) = 100;
+ ISTEP (*p) = 1;
+}
+
+void
+goom_secure_i_feedback (PluginParam * p, const char *name)
+{
+ secure_i_param (p, name);
+
+ p->rw = 0;
+}
+
+void
+goom_plugin_parameters (PluginParameters * p, const char *name, int nb)
+{
+ p->name = name;
+ p->desc = "";
+ p->nbParams = nb;
+ p->params = malloc (nb * sizeof (PluginParam *));
+}
+
+void
+goom_plugin_parameters_free (PluginParameters * p)
+{
+ free (p->params);
+}
+
+/*---------------------------------------------------------------------------*/
+
+void
+goom_set_str_param_value (PluginParam * p, const char *str)
+{
+ int len = strlen (str);
+
+ if (SVAL (*p))
+ SVAL (*p) = (char *) realloc (SVAL (*p), len + 1);
+ else
+ SVAL (*p) = (char *) malloc (len + 1);
+ memcpy (SVAL (*p), str, len + 1);
+}
+
+void
+goom_set_list_param_value (PluginParam * p, const char *str)
+{
+ int len = strlen (str);
+
+#ifdef VERBOSE
+ printf ("%s: %d\n", str, len);
+#endif
+ if (LVAL (*p))
+ LVAL (*p) = (char *) realloc (LVAL (*p), len + 1);
+ else
+ LVAL (*p) = (char *) malloc (len + 1);
+ memcpy (LVAL (*p), str, len + 1);
+}
diff --git a/gst/goom/convolve_fx.c b/gst/goom/convolve_fx.c
new file mode 100644
index 0000000000..980e113a5b
--- /dev/null
+++ b/gst/goom/convolve_fx.c
@@ -0,0 +1,368 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#include "goom_fx.h"
+#include "goom_plugin_info.h"
+#include "goom_config.h"
+#include <math.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+//#define CONV_MOTIF_W 32
+//#define CONV_MOTIF_WMASK 0x1f
+
+/* Define if you like the wacky GOOM logo: */
+#undef DRAW_MOTIF
+
+#define CONV_MOTIF_W 128
+#define CONV_MOTIF_WMASK 0x7f
+
+typedef char Motif[CONV_MOTIF_W][CONV_MOTIF_W];
+
+#include "motif_goom1.h"
+#include "motif_goom2.h"
+
+#define NB_THETA 512
+
+typedef struct _CONV_DATA
+{
+ PluginParam light;
+ PluginParam factor_adj_p;
+ PluginParam factor_p;
+ PluginParameters params;
+
+ /* rotozoom */
+ int theta;
+ float ftheta;
+ int h_sin[NB_THETA];
+ int h_cos[NB_THETA];
+ int h_height;
+ float visibility;
+ Motif conv_motif;
+ int inverse_motif;
+
+} ConvData;
+
+/* init rotozoom tables */
+static void
+compute_tables (VisualFX * _this, PluginInfo * info)
+{
+ ConvData *data = (ConvData *) _this->fx_data;
+ double screen_coef;
+ int i;
+ double h;
+ double radian;
+
+ if (data->h_height == info->screen.height)
+ return;
+
+ screen_coef = 2.0 * 300.0 / (double) info->screen.height;
+ data->h_height = info->screen.height;
+
+ for (i = 0; i < NB_THETA; i++) {
+ radian = 2 * i * G_PI / NB_THETA;
+ h = (0.2 + cos (radian) / 15.0 * sin (radian * 2.0 + 12.123)) * screen_coef;
+ data->h_cos[i] = 0x10000 * (-h * cos (radian) * cos (radian));
+ data->h_sin[i] = 0x10000 * (h * sin (radian + 1.57) * sin (radian));
+ }
+}
+
+static void
+set_motif (ConvData * data, Motif motif)
+{
+ int i, j;
+
+ for (i = 0; i < CONV_MOTIF_W; ++i)
+ for (j = 0; j < CONV_MOTIF_W; ++j)
+ data->conv_motif[i][j] =
+ motif[CONV_MOTIF_W - i - 1][CONV_MOTIF_W - j - 1];
+}
+
+static void
+convolve_init (VisualFX * _this, PluginInfo * info)
+{
+ ConvData *data;
+
+ data = (ConvData *) malloc (sizeof (ConvData));
+ _this->fx_data = (void *) data;
+
+ secure_f_param (&data->light, "Screen Brightness");
+ data->light.param.fval.max = 300.0f;
+ data->light.param.fval.step = 1.0f;
+ data->light.param.fval.value = 100.0f;
+
+ secure_f_param (&data->factor_adj_p, "Flash Intensity");
+ data->factor_adj_p.param.fval.max = 200.0f;
+ data->factor_adj_p.param.fval.step = 1.0f;
+ data->factor_adj_p.param.fval.value = 70.0f;
+
+ secure_f_feedback (&data->factor_p, "Factor");
+
+ plugin_parameters (&data->params, "Bright Flash", 5);
+ data->params.params[0] = &data->light;
+ data->params.params[1] = &data->factor_adj_p;
+ data->params.params[2] = 0;
+ data->params.params[3] = &data->factor_p;
+ data->params.params[4] = 0;
+
+ data->h_height = 0;
+
+ /* init rotozoom tables */
+ compute_tables (_this, info);
+ data->theta = 0;
+ data->ftheta = 0.0;
+ data->visibility = 1.0;
+ set_motif (data, CONV_MOTIF2);
+ data->inverse_motif = 0;
+
+ _this->params = &data->params;
+}
+
+static void
+convolve_free (VisualFX * _this)
+{
+ ConvData *data = (ConvData *) _this->fx_data;
+
+ goom_plugin_parameters_free (&data->params);
+
+ free (_this->fx_data);
+}
+
+#ifdef DRAW_MOTIF
+static void
+create_output_with_brightness (VisualFX * _this, Pixel * src, Pixel * dest,
+ PluginInfo * info, int iff)
+{
+ ConvData *data = (ConvData *) _this->fx_data;
+
+ int x, y;
+ int i = 0; //info->screen.height * info->screen.width - 1;
+
+ const int c = data->h_cos[data->theta];
+ const int s = data->h_sin[data->theta];
+
+ const int xi = -(info->screen.width / 2) * c;
+ const int yi = (info->screen.width / 2) * s;
+
+ const int xj = -(info->screen.height / 2) * s;
+ const int yj = -(info->screen.height / 2) * c;
+
+ int xprime = xj;
+ int yprime = yj;
+
+ int ifftab[16];
+
+ if (data->inverse_motif) {
+ int i;
+
+ for (i = 0; i < 16; ++i)
+ ifftab[i] = (double) iff *(1.0 + data->visibility * (15.0 - i) / 15.0);
+ } else {
+ int i;
+
+ for (i = 0; i < 16; ++i)
+ ifftab[i] = (double) iff / (1.0 + data->visibility * (15.0 - i) / 15.0);
+ }
+
+ for (y = info->screen.height; y--;) {
+ int xtex, ytex;
+
+ xtex = xprime + xi + CONV_MOTIF_W * 0x10000 / 2;
+ xprime += s;
+
+ ytex = yprime + yi + CONV_MOTIF_W * 0x10000 / 2;
+ yprime += c;
+
+#ifdef HAVE_MMX
+ __asm__ __volatile__ ("\n\t pxor %%mm7, %%mm7" /* mm7 = 0 */
+ "\n\t movd %[xtex], %%mm2" "\n\t movd %[ytex], %%mm3" "\n\t punpckldq %%mm3, %%mm2" /* mm2 = [ ytex | xtex ] */
+ "\n\t movd %[c], %%mm4" "\n\t movd %[s], %%mm6" "\n\t pxor %%mm5, %%mm5" "\n\t psubd %%mm6, %%mm5" "\n\t punpckldq %%mm5, %%mm4" /* mm4 = [ -s | c ] */
+ "\n\t movd %[motif], %%mm6" /* mm6 = motif */
+ ::[xtex] "g" (xtex),[ytex] "g" (ytex)
+ ,[c] "g" (c),[s] "g" (s)
+ ,[motif] "g" (&data->conv_motif[0][0]));
+
+ for (x = info->screen.width; x--;) {
+ __asm__ __volatile__ ("\n\t movd %[src], %%mm0" /* mm0 = src */
+ "\n\t paddd %%mm4, %%mm2" /* [ ytex | xtex ] += [ -s | s ] */
+ "\n\t movd %%esi, %%mm5" /* save esi into mm5 */
+ "\n\t movq %%mm2, %%mm3" "\n\t psrld $16, %%mm3" /* mm3 = [ (ytex>>16) | (xtex>>16) ] */
+ "\n\t movd %%mm3, %%eax" /* eax = xtex' */
+ "\n\t psrlq $25, %%mm3" "\n\t movd %%mm3, %%ecx" /* ecx = ytex' << 7 */
+ "\n\t andl $127, %%eax" "\n\t andl $16256, %%ecx" "\n\t addl %%ecx, %%eax" "\n\t movd %%mm6, %%esi" /* esi = motif */
+ "\n\t xorl %%ecx, %%ecx" "\n\t movb (%%eax,%%esi), %%cl" "\n\t movl %[ifftab], %%eax" "\n\t movd %%mm5, %%esi" /* restore esi from mm5 */
+ "\n\t movd (%%eax,%%ecx,4), %%mm1" /* mm1 = [0|0|0|iff2] */
+ "\n\t punpcklwd %%mm1, %%mm1"
+ "\n\t punpcklbw %%mm7, %%mm0"
+ "\n\t punpckldq %%mm1, %%mm1"
+ "\n\t psrlw $1, %%mm0"
+ "\n\t psrlw $2, %%mm1"
+ "\n\t pmullw %%mm1, %%mm0"
+ "\n\t psrlw $5, %%mm0"
+ "\n\t packuswb %%mm7, %%mm0"
+ "\n\t movd %%mm0, %[dest]":[dest] "=g" (dest[i].val)
+ :[src] "g" (src[i].val)
+ ,[ifftab] "g" (&ifftab[0])
+ :"eax", "ecx");
+
+ i++;
+ }
+#else
+ for (x = info->screen.width; x--;) {
+
+ int iff2;
+ unsigned int f0, f1, f2, f3;
+
+ xtex += c;
+ ytex -= s;
+
+ iff2 =
+ ifftab[(int) data->conv_motif[(ytex >> 16) & CONV_MOTIF_WMASK][(xtex
+ >> 16) & CONV_MOTIF_WMASK]];
+
+#define sat(a) ((a)>0xFF?0xFF:(a))
+ f0 = src[i].val;
+ f1 = ((f0 >> R_OFFSET) & 0xFF) * iff2 >> 8;
+ f2 = ((f0 >> G_OFFSET) & 0xFF) * iff2 >> 8;
+ f3 = ((f0 >> B_OFFSET) & 0xFF) * iff2 >> 8;
+ dest[i].val =
+ (sat (f1) << R_OFFSET) | (sat (f2) << G_OFFSET) | (sat (f3) <<
+ B_OFFSET);
+/*
+ f0 = (src[i].cop[0] * iff2) >> 8;
+ f1 = (src[i].cop[1] * iff2) >> 8;
+ f2 = (src[i].cop[2] * iff2) >> 8;
+ f3 = (src[i].cop[3] * iff2) >> 8;
+
+ dest[i].cop[0] = (f0 & 0xffffff00) ? 0xff : (unsigned char)f0;
+ dest[i].cop[1] = (f1 & 0xffffff00) ? 0xff : (unsigned char)f1;
+ dest[i].cop[2] = (f2 & 0xffffff00) ? 0xff : (unsigned char)f2;
+ dest[i].cop[3] = (f3 & 0xffffff00) ? 0xff : (unsigned char)f3;
+*/
+ i++;
+ }
+#endif
+ }
+#ifdef HAVE_MMX
+ __asm__ __volatile__ ("\n\t emms");
+#endif
+
+ compute_tables (_this, info);
+}
+#endif
+
+/*#include <stdint.h>
+
+static uint64_t GetTick()
+{
+ uint64_t x;
+ asm volatile ("RDTSC" : "=A" (x));
+ return x;
+}*/
+
+
+static void
+convolve_apply (VisualFX * _this, Pixel * src, Pixel * dest, PluginInfo * info)
+{
+
+ ConvData *data = (ConvData *) _this->fx_data;
+#ifdef DRAW_MOTIF
+ float ff;
+ int iff;
+
+ ff = (FVAL (data->factor_p) * FVAL (data->factor_adj_p) +
+ FVAL (data->light)) / 100.0f;
+ iff = (unsigned int) (ff * 256);
+#endif
+
+ {
+ double fcycle = (double) info->cycle;
+ double rotate_param, rotate_coef;
+ float INCREASE_RATE = 1.5;
+ float DECAY_RATE = 0.955;
+
+ if (FVAL (info->sound.last_goom_p) > 0.8)
+ FVAL (data->factor_p) += FVAL (info->sound.goom_power_p) * INCREASE_RATE;
+ FVAL (data->factor_p) *= DECAY_RATE;
+
+ rotate_param = FVAL (info->sound.last_goom_p);
+ if (rotate_param < 0.0)
+ rotate_param = 0.0;
+ rotate_param += FVAL (info->sound.goom_power_p);
+
+ rotate_coef = 4.0 + FVAL (info->sound.goom_power_p) * 6.0;
+ data->ftheta = (data->ftheta + rotate_coef * sin (rotate_param * 6.3));
+ data->theta = ((unsigned int) data->ftheta) % NB_THETA;
+ data->visibility =
+ (cos (fcycle * 0.001 + 1.5) * sin (fcycle * 0.008) +
+ cos (fcycle * 0.011 + 5.0) - 0.8 + info->sound.speedvar) * 1.5;
+ if (data->visibility < 0.0)
+ data->visibility = 0.0;
+ data->factor_p.change_listener (&data->factor_p);
+ }
+
+ if (data->visibility < 0.01) {
+ switch (goom_irand (info->gRandom, 300)) {
+ case 1:
+ set_motif (data, CONV_MOTIF1);
+ data->inverse_motif = 1;
+ break;
+ case 2:
+ set_motif (data, CONV_MOTIF2);
+ data->inverse_motif = 0;
+ break;
+ }
+ }
+#ifdef DRAW_MOTIF
+ if ((ff > 0.98f) && (ff < 1.02f))
+ memcpy (dest, src, info->screen.size * sizeof (Pixel));
+ else
+ create_output_with_brightness (_this, src, dest, info, iff);
+#else
+ memcpy (dest, src, info->screen.size * sizeof (Pixel));
+#endif
+
+/*
+// Benching suite...
+ {
+ uint64_t before, after;
+ double timed;
+ static double stimed = 10000.0;
+ before = GetTick();
+ data->visibility = 1.0;
+ create_output_with_brightness(_this,src,dest,info,iff);
+ after = GetTick();
+ timed = (double)((after-before) / info->screen.size);
+ if (timed < stimed) {
+ stimed = timed;
+ printf ("CLK = %3.0f CPP\n", stimed);
+ }
+ }
+*/
+}
+
+void
+convolve_create (VisualFX * vfx)
+{
+ vfx->init = convolve_init;
+ vfx->free = convolve_free;
+ vfx->apply = convolve_apply;
+ vfx->fx_data = NULL;
+ vfx->params = NULL;
+}
diff --git a/gst/goom/drawmethods.c b/gst/goom/drawmethods.c
new file mode 100644
index 0000000000..97f0cf1396
--- /dev/null
+++ b/gst/goom/drawmethods.c
@@ -0,0 +1,222 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#include "drawmethods.h"
+
+#define DRAWMETHOD_PLUS(_out,_backbuf,_col) \
+{\
+ int tra=0,i=0;\
+ unsigned char *bra = (unsigned char*)&(_backbuf);\
+ unsigned char *dra = (unsigned char*)&(_out);\
+ unsigned char *cra = (unsigned char*)&(_col);\
+ for (;i<4;i++) {\
+ tra = *cra;\
+ tra += *bra;\
+ if (tra>255) tra=255;\
+ *dra = tra;\
+ ++dra;++cra;++bra;\
+ }\
+}
+
+#define DRAWMETHOD DRAWMETHOD_PLUS(*p,*p,col)
+
+void
+draw_line (Pixel * data, int x1, int y1, int x2, int y2, int col, int screenx,
+ int screeny)
+{
+ int x, y, dx, dy, yy, xx;
+ Pixel *p;
+
+ if ((y1 < 0) || (y2 < 0) || (x1 < 0) || (x2 < 0) || (y1 >= screeny)
+ || (y2 >= screeny) || (x1 >= screenx) || (x2 >= screenx))
+ return;
+
+ /* clip to top edge
+ if ((y1 < 0) && (y2 < 0))
+ return;
+
+ if (y1 < 0) {
+ x1 += (y1 * (x1 - x2)) / (y2 - y1);
+ y1 = 0;
+ }
+ if (y2 < 0) {
+ x2 += (y2 * (x1 - x2)) / (y2 - y1);
+ y2 = 0;
+ }
+
+ clip to bottom edge
+ if ((y1 >= screeny) && (y2 >= screeny))
+ return;
+ if (y1 >= screeny) {
+ x1 -= ((screeny - y1) * (x1 - x2)) / (y2 - y1);
+ y1 = screeny - 1;
+ }
+ if (y2 >= screeny) {
+ x2 -= ((screeny - y2) * (x1 - x2)) / (y2 - y1);
+ y2 = screeny - 1;
+ }
+ clip to left edge
+ if ((x1 < 0) && (x2 < 0))
+ return;
+ if (x1 < 0) {
+ y1 += (x1 * (y1 - y2)) / (x2 - x1);
+ x1 = 0;
+ }
+ if (x2 < 0) {
+ y2 += (x2 * (y1 - y2)) / (x2 - x1);
+ x2 = 0;
+ }
+ clip to right edge
+ if ((x1 >= screenx) && (x2 >= screenx))
+ return;
+ if (x1 >= screenx) {
+ y1 -= ((screenx - x1) * (y1 - y2)) / (x2 - x1);
+ x1 = screenx - 1;
+ }
+ if (x2 >= screenx) {
+ y2 -= ((screenx - x2) * (y1 - y2)) / (x2 - x1);
+ x2 = screenx - 1;
+ }
+ */
+
+ dx = x2 - x1;
+ dy = y2 - y1;
+ if (x1 > x2) {
+ int tmp;
+
+ tmp = x1;
+ x1 = x2;
+ x2 = tmp;
+ tmp = y1;
+ y1 = y2;
+ y2 = tmp;
+ dx = x2 - x1;
+ dy = y2 - y1;
+ }
+
+ /* vertical line */
+ if (dx == 0) {
+ if (y1 < y2) {
+ p = &(data[(screenx * y1) + x1]);
+ for (y = y1; y <= y2; y++) {
+ DRAWMETHOD;
+ p += screenx;
+ }
+ } else {
+ p = &(data[(screenx * y2) + x1]);
+ for (y = y2; y <= y1; y++) {
+ DRAWMETHOD;
+ p += screenx;
+ }
+ }
+ return;
+ }
+ /* horizontal line */
+ if (dy == 0) {
+ if (x1 < x2) {
+ p = &(data[(screenx * y1) + x1]);
+ for (x = x1; x <= x2; x++) {
+ DRAWMETHOD;
+ p++;
+ }
+ return;
+ } else {
+ p = &(data[(screenx * y1) + x2]);
+ for (x = x2; x <= x1; x++) {
+ DRAWMETHOD;
+ p++;
+ }
+ return;
+ }
+ }
+ /* 1 */
+ /* \ */
+ /* \ */
+ /* 2 */
+ if (y2 > y1) {
+ /* steep */
+ if (dy > dx) {
+ dx = ((dx << 16) / dy);
+ x = x1 << 16;
+ for (y = y1; y <= y2; y++) {
+ xx = x >> 16;
+ p = &(data[(screenx * y) + xx]);
+ DRAWMETHOD;
+ if (xx < (screenx - 1)) {
+ p++;
+ /* DRAWMETHOD; */
+ }
+ x += dx;
+ }
+ return;
+ }
+ /* shallow */
+ else {
+ dy = ((dy << 16) / dx);
+ y = y1 << 16;
+ for (x = x1; x <= x2; x++) {
+ yy = y >> 16;
+ p = &(data[(screenx * yy) + x]);
+ DRAWMETHOD;
+ if (yy < (screeny - 1)) {
+ p += screeny;
+ /* DRAWMETHOD; */
+ }
+ y += dy;
+ }
+ }
+ }
+ /* 2 */
+ /* / */
+ /* / */
+ /* 1 */
+ else {
+ /* steep */
+ if (-dy > dx) {
+ dx = ((dx << 16) / -dy);
+ x = (x1 + 1) << 16;
+ for (y = y1; y >= y2; y--) {
+ xx = x >> 16;
+ p = &(data[(screenx * y) + xx]);
+ DRAWMETHOD;
+ if (xx < (screenx - 1)) {
+ p--;
+ /* DRAWMETHOD; */
+ }
+ x += dx;
+ }
+ return;
+ }
+ /* shallow */
+ else {
+ dy = ((dy << 16) / dx);
+ y = y1 << 16;
+ for (x = x1; x <= x2; x++) {
+ yy = y >> 16;
+ p = &(data[(screenx * yy) + x]);
+ DRAWMETHOD;
+ if (yy < (screeny - 1)) {
+ p += screeny;
+ /* DRAWMETHOD; */
+ }
+ y += dy;
+ }
+ return;
+ }
+ }
+}
diff --git a/gst/goom/drawmethods.h b/gst/goom/drawmethods.h
new file mode 100644
index 0000000000..3f86274241
--- /dev/null
+++ b/gst/goom/drawmethods.h
@@ -0,0 +1,27 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifndef _DRAWMETHODS_H
+#define _DRAWMETHODS_H
+
+#include "goom_config.h"
+#include "goom_graphic.h"
+
+void draw_line (Pixel *data, int x1, int y1, int x2, int y2, int col, int screenx, int screeny);
+
+#endif /* _DRAWMETHODS_H */
diff --git a/gst/goom/filters.c b/gst/goom/filters.c
new file mode 100644
index 0000000000..fc930747f7
--- /dev/null
+++ b/gst/goom/filters.c
@@ -0,0 +1,861 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+// --- CHUI EN TRAIN DE SUPPRIMER LES EXTERN RESOLX ET C_RESOLY ---
+
+/* filter.c version 0.7
+* contient les filtres applicable a un buffer
+* creation : 01/10/2000
+* -ajout de sinFilter()
+* -ajout de zoomFilter()
+* -copie de zoomFilter() en zoomFilterRGB(), gerant les 3 couleurs
+* -optimisation de sinFilter (utilisant une table de sin)
+* -asm
+* -optimisation de la procedure de generation du buffer de transformation
+* la vitesse est maintenant comprise dans [0..128] au lieu de [0..100]
+*/
+
+/* #define _DEBUG_PIXEL */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <stdlib.h>
+#include <math.h>
+#include <stdio.h>
+
+#ifdef HAVE_INTTYPES_H
+#include <inttypes.h>
+#endif
+
+#include "goom_filters.h"
+#include "goom_graphic.h"
+#include "goom_tools.h"
+#include "goom_plugin_info.h"
+#include "goom_fx.h"
+#include "v3d.h"
+
+/* TODO : MOVE THIS AWAY !!! */
+/* jeko: j'ai essayer de le virer, mais si on veut les laisser inline c'est un peu lourdo... */
+static inline void
+setPixelRGB (PluginInfo * goomInfo, Pixel * buffer, Uint x, Uint y, Color c)
+{
+ Pixel i;
+
+ i.channels.b = c.b;
+ i.channels.g = c.v;
+ i.channels.r = c.r;
+ *(buffer + (x + y * goomInfo->screen.width)) = i;
+}
+
+static inline void
+setPixelRGB_ (Pixel * buffer, Uint x, Color c)
+{
+ buffer[x].channels.r = c.r;
+ buffer[x].channels.g = c.v;
+ buffer[x].channels.b = c.b;
+}
+
+static inline void
+getPixelRGB_ (Pixel * buffer, Uint x, Color * c)
+{
+ Pixel i = *(buffer + x);
+
+ c->b = i.channels.b;
+ c->v = i.channels.g;
+ c->r = i.channels.r;
+}
+
+/* END TODO */
+
+
+/* DEPRECATED */
+// retourne x>>s , en testant le signe de x
+//#define ShiftRight(_x,_s) (((_x)<0) ? -(-(_x)>>(_s)) : ((_x)>>(_s)))
+//#define EFFECT_DISTORS 4
+//#define EFFECT_DISTORS_SL 2
+//#define INTERLACE_ADD 9
+//#define INTERLACE_AND 0xf
+/* END DEPRECATED */
+
+#define BUFFPOINTNB 16
+#define BUFFPOINTNBF 16.0f
+#define BUFFPOINTMASK 0xffff
+
+#define sqrtperte 16
+/* faire : a % sqrtperte <=> a & pertemask */
+#define PERTEMASK 0xf
+/* faire : a / sqrtperte <=> a >> PERTEDEC */
+#define PERTEDEC 4
+
+/* pure c version of the zoom filter */
+static void c_zoom (Pixel * expix1, Pixel * expix2, unsigned int prevX,
+ unsigned int prevY, signed int *brutS, signed int *brutD, int buffratio,
+ int precalCoef[BUFFPOINTNB][BUFFPOINTNB]);
+
+/* simple wrapper to give it the same proto than the others */
+void
+zoom_filter_c (int sizeX, int sizeY, Pixel * src, Pixel * dest, int *brutS,
+ int *brutD, int buffratio, int precalCoef[16][16])
+{
+ c_zoom (src, dest, sizeX, sizeY, brutS, brutD, buffratio, precalCoef);
+}
+
+static void generatePrecalCoef (int precalCoef[BUFFPOINTNB][BUFFPOINTNB]);
+
+
+typedef struct _ZOOM_FILTER_FX_WRAPPER_DATA
+{
+
+ PluginParam enabled_bp;
+ PluginParameters params;
+
+ unsigned int *coeffs, *freecoeffs;
+
+ signed int *brutS, *freebrutS; /* source */
+ signed int *brutD, *freebrutD; /* dest */
+ signed int *brutT, *freebrutT; /* temp (en cours de generation) */
+
+ guint32 zoom_width;
+
+ unsigned int prevX, prevY;
+
+ float general_speed;
+ int reverse; /* reverse the speed */
+ char theMode;
+ int waveEffect;
+ int hypercosEffect;
+ int vPlaneEffect;
+ int hPlaneEffect;
+ char noisify;
+ int middleX, middleY;
+
+ int mustInitBuffers;
+ int interlace_start;
+
+ /* modif by jeko : fixedpoint : buffration = (16:16) (donc 0<=buffration<=2^16) */
+ int buffratio;
+ int *firedec;
+
+ /* modif d'optim by Jeko : precalcul des 4 coefs resultant des 2 pos */
+ int precalCoef[BUFFPOINTNB][BUFFPOINTNB];
+
+ /* calculatePXandPY statics */
+ int wave;
+ int wavesp;
+
+} ZoomFilterFXWrapperData;
+
+
+
+
+static inline void
+zoomVector (v2g * vecteur, ZoomFilterFXWrapperData * data, float X, float Y)
+{
+ float vx, vy;
+ float sq_dist = X * X + Y * Y;
+
+ /* sx = (X < 0.0f) ? -1.0f : 1.0f;
+ sy = (Y < 0.0f) ? -1.0f : 1.0f;
+ */
+ float coefVitesse = (1.0f + data->general_speed) / 50.0f;
+
+ // Effects
+
+ /* Centralized FX */
+
+ switch (data->theMode) {
+ case CRYSTAL_BALL_MODE:
+ coefVitesse -= (sq_dist - 0.3f) / 15.0f;
+ break;
+ case AMULETTE_MODE:
+ coefVitesse += sq_dist * 3.5f;
+ break;
+ case WAVE_MODE:
+ coefVitesse += sin (sq_dist * 20.0f) / 100.0f;
+ break;
+ case SCRUNCH_MODE:
+ coefVitesse += sq_dist / 10.0f;
+ break;
+ //case HYPERCOS1_MODE:
+ break;
+ //case HYPERCOS2_MODE:
+ break;
+ //case YONLY_MODE:
+ break;
+ case SPEEDWAY_MODE:
+ coefVitesse *= 4.0f * Y;
+ break;
+ default:
+ break;
+ }
+
+ if (coefVitesse < -2.01f)
+ coefVitesse = -2.01f;
+ if (coefVitesse > 2.01f)
+ coefVitesse = 2.01f;
+
+ vx = coefVitesse * X;
+ vy = coefVitesse * Y;
+
+ /* Amulette 2 */
+ // vx = X * tan(dist);
+ // vy = Y * tan(dist);
+
+ /* Rotate */
+ //vx = (X+Y)*0.1;
+ //vy = (Y-X)*0.1;
+
+
+ // Effects adds-on
+
+ /* Noise */
+ if (data->noisify) {
+ vx += (((float) rand ()) / ((float) RAND_MAX) - 0.5f) / 50.0f;
+ vy += (((float) rand ()) / ((float) RAND_MAX) - 0.5f) / 50.0f;
+ }
+
+ /* Hypercos */
+ if (data->hypercosEffect) {
+ vx += sin (Y * 10.0f) / 120.0f;
+ vy += sin (X * 10.0f) / 120.0f;
+ }
+
+ /* H Plane */
+ if (data->hPlaneEffect)
+ vx += Y * 0.0025f * data->hPlaneEffect;
+
+ /* V Plane */
+ if (data->vPlaneEffect)
+ vy += X * 0.0025f * data->vPlaneEffect;
+
+ /* TODO : Water Mode */
+ // if (data->waveEffect)
+
+ vecteur->x = vx;
+ vecteur->y = vy;
+}
+
+
+/*
+ * Makes a stripe of a transform buffer (brutT)
+ *
+ * The transform is (in order) :
+ * Translation (-data->middleX, -data->middleY)
+ * Homothetie (Center : 0,0 Coeff : 2/data->prevX)
+ */
+static void
+makeZoomBufferStripe (ZoomFilterFXWrapperData * data, int INTERLACE_INCR)
+{
+ // Position of the pixel to compute in pixmap coordinates
+ Uint x, y;
+
+ // Where (verticaly) to stop generating the buffer stripe
+ int maxEnd;
+
+ // Ratio from pixmap to normalized coordinates
+ float ratio = 2.0f / ((float) data->prevX);
+
+ // Ratio from normalized to virtual pixmap coordinates
+ float inv_ratio = BUFFPOINTNBF / ratio;
+ float min = ratio / BUFFPOINTNBF;
+
+ // Y position of the pixel to compute in normalized coordinates
+ float Y = ((float) (data->interlace_start - data->middleY)) * ratio;
+
+ maxEnd = data->prevY;
+ if (maxEnd > (data->interlace_start + INTERLACE_INCR))
+ maxEnd = (data->interlace_start + INTERLACE_INCR);
+
+ for (y = data->interlace_start;
+ (y < data->prevY) && ((signed int) y < maxEnd); y++) {
+ Uint premul_y_prevX = y * data->prevX * 2;
+ float X = -((float) data->middleX) * ratio;
+
+ for (x = 0; x < data->prevX; x++) {
+ v2g vector;
+
+ zoomVector (&vector, data, X, Y);
+
+ /* Finish and avoid null displacement */
+ if (fabs (vector.x) < min)
+ vector.x = (vector.x < 0.0f) ? -min : min;
+ if (fabs (vector.y) < min)
+ vector.y = (vector.y < 0.0f) ? -min : min;
+
+ data->brutT[premul_y_prevX] =
+ ((int) ((X - vector.x) * inv_ratio) +
+ ((int) (data->middleX * BUFFPOINTNB)));
+ data->brutT[premul_y_prevX + 1] =
+ ((int) ((Y - vector.y) * inv_ratio) +
+ ((int) (data->middleY * BUFFPOINTNB)));
+ premul_y_prevX += 2;
+ X += ratio;
+ }
+ Y += ratio;
+ }
+ data->interlace_start += INTERLACE_INCR;
+ if (y >= data->prevY - 1)
+ data->interlace_start = -1;
+}
+
+
+/*
+ * calculer px et py en fonction de x,y,middleX,middleY et theMode
+ * px et py indique la nouvelle position (en sqrtperte ieme de pixel)
+ * (valeur * 16)
+
+ inline void calculatePXandPY (PluginInfo *goomInfo, ZoomFilterFXWrapperData *data, int x, int y, int *px, int *py)
+ {
+ if (data->theMode == WATER_MODE) {
+ int yy;
+
+ yy = y + goom_irand(goomInfo->gRandom, 4) - goom_irand(goomInfo->gRandom, 4) + data->wave / 10;
+ if (yy < 0)
+ yy = 0;
+ if (yy >= (signed int)goomInfo->screen.height)
+ yy = goomInfo->screen.height - 1;
+
+ *px = (x << 4) + data->firedec[yy] + (data->wave / 10);
+ *py = (y << 4) + 132 - ((data->vitesse < 131) ? data->vitesse : 130);
+
+ data->wavesp += goom_irand(goomInfo->gRandom, 3) - goom_irand(goomInfo->gRandom, 3);
+ if (data->wave < -10)
+ data->wavesp += 2;
+ if (data->wave > 10)
+ data->wavesp -= 2;
+ data->wave += (data->wavesp / 10) + goom_irand(goomInfo->gRandom, 3) - goom_irand(goomInfo->gRandom, 3);
+ if (data->wavesp > 100)
+ data->wavesp = (data->wavesp * 9) / 10;
+ }
+ else {
+ int dist = 0, vx9, vy9;
+ int vx, vy;
+ int ppx, ppy;
+ int fvitesse = data->vitesse << 4;
+
+ if (data->noisify) {
+ x += goom_irand(goomInfo->gRandom, data->noisify) - goom_irand(goomInfo->gRandom, data->noisify);
+ y += goom_irand(goomInfo->gRandom, data->noisify) - goom_irand(goomInfo->gRandom, data->noisify);
+ }
+ vx = (x - data->middleX) << 9;
+ vy = (y - data->middleY) << 9;
+
+ if (data->hPlaneEffect)
+ vx += data->hPlaneEffect * (y - data->middleY);
+
+ if (data->vPlaneEffect)
+ vy += data->vPlaneEffect * (x - data->middleX);
+
+ if (data->waveEffect) {
+ fvitesse *=
+ 1024 +
+ ShiftRight (goomInfo->sintable
+ [(unsigned short) (dist * 0xffff + EFFECT_DISTORS)], 6);
+ fvitesse /= 1024;
+ }
+
+ if (data->hypercosEffect) {
+ vx += ShiftRight (goomInfo->sintable[(-vy + dist) & 0xffff], 1);
+ vy += ShiftRight (goomInfo->sintable[(vx + dist) & 0xffff], 1);
+ }
+
+ vx9 = ShiftRight (vx, 9);
+ vy9 = ShiftRight (vy, 9);
+ dist = vx9 * vx9 + vy9 * vy9;
+
+ switch (data->theMode) {
+ case WAVE_MODE:
+ fvitesse *=
+ 1024 +
+ ShiftRight (goomInfo->sintable
+ [(unsigned short) (dist * 0xffff * EFFECT_DISTORS)], 6);
+ fvitesse>>=10;
+ break;
+ case CRYSTAL_BALL_MODE:
+ fvitesse += (dist >> (10-EFFECT_DISTORS_SL));
+ break;
+ case AMULETTE_MODE:
+ fvitesse -= (dist >> (4 - EFFECT_DISTORS_SL));
+ break;
+ case SCRUNCH_MODE:
+ fvitesse -= (dist >> (10 - EFFECT_DISTORS_SL));
+ break;
+ case HYPERCOS1_MODE:
+ vx = vx + ShiftRight (goomInfo->sintable[(-vy + dist) & 0xffff], 1);
+ vy = vy + ShiftRight (goomInfo->sintable[(vx + dist) & 0xffff], 1);
+ break;
+ case HYPERCOS2_MODE:
+ vx =
+ vx + ShiftRight (goomInfo->sintable[(-ShiftRight (vy, 1) + dist) & 0xffff], 0);
+ vy =
+ vy + ShiftRight (goomInfo->sintable[(ShiftRight (vx, 1) + dist) & 0xffff], 0);
+ fvitesse = 128 << 4;
+ break;
+ case YONLY_MODE:
+ fvitesse *= 1024 + ShiftRight (goomInfo->sintable[vy & 0xffff], 6);
+ fvitesse >>= 10;
+ break;
+ case SPEEDWAY_MODE:
+ fvitesse -= (ShiftRight(vy,10-EFFECT_DISTORS_SL));
+ break;
+ }
+
+ if (fvitesse < -3024)
+ fvitesse = -3024;
+
+ if (vx < 0) // pb avec decalage sur nb negatif
+ ppx = -(-(vx * fvitesse) >> 16);
+ // 16 = 9 + 7 (7 = nb chiffre virgule de vitesse * (v = 128 => immobile)
+ // * * * * * 9 = nb chiffre virgule de vx)
+ else
+ ppx = ((vx * fvitesse) >> 16);
+
+ if (vy < 0)
+ ppy = -(-(vy * fvitesse) >> 16);
+ else
+ ppy = ((vy * fvitesse) >> 16);
+
+ *px = (data->middleX << 4) + ppx;
+ *py = (data->middleY << 4) + ppy;
+ }
+ }
+ */
+
+
+
+static void
+c_zoom (Pixel * expix1, Pixel * expix2, unsigned int prevX, unsigned int prevY,
+ signed int *brutS, signed int *brutD, int buffratio, int precalCoef[16][16])
+{
+ int myPos, myPos2;
+ Color couleur;
+
+ unsigned int ax = (prevX - 1) << PERTEDEC, ay = (prevY - 1) << PERTEDEC;
+
+ int bufsize = prevX * prevY * 2;
+ int bufwidth = prevX;
+
+ expix1[0].val = expix1[prevX - 1].val = expix1[prevX * prevY - 1].val =
+ expix1[prevX * prevY - prevX].val = 0;
+
+ for (myPos = 0; myPos < bufsize; myPos += 2) {
+ Color col1, col2, col3, col4;
+ int c1, c2, c3, c4, px, py;
+ int pos;
+ int coeffs;
+
+ int brutSmypos = brutS[myPos];
+
+ myPos2 = myPos + 1;
+
+ px = brutSmypos + (((brutD[myPos] -
+ brutSmypos) * buffratio) >> BUFFPOINTNB);
+ brutSmypos = brutS[myPos2];
+ py = brutSmypos + (((brutD[myPos2] -
+ brutSmypos) * buffratio) >> BUFFPOINTNB);
+
+ if ((py >= ay) || (px >= ax)) {
+ pos = coeffs = 0;
+ } else {
+ pos = ((px >> PERTEDEC) + prevX * (py >> PERTEDEC));
+ /* coef en modulo 15 */
+ coeffs = precalCoef[px & PERTEMASK][py & PERTEMASK];
+ }
+ getPixelRGB_ (expix1, pos, &col1);
+ getPixelRGB_ (expix1, pos + 1, &col2);
+ getPixelRGB_ (expix1, pos + bufwidth, &col3);
+ getPixelRGB_ (expix1, pos + bufwidth + 1, &col4);
+
+ c1 = coeffs;
+ c2 = (c1 >> 8) & 0xFF;
+ c3 = (c1 >> 16) & 0xFF;
+ c4 = (c1 >> 24) & 0xFF;
+ c1 = c1 & 0xff;
+
+ couleur.r = col1.r * c1 + col2.r * c2 + col3.r * c3 + col4.r * c4;
+ if (couleur.r > 5)
+ couleur.r -= 5;
+ couleur.r >>= 8;
+
+ couleur.v = col1.v * c1 + col2.v * c2 + col3.v * c3 + col4.v * c4;
+ if (couleur.v > 5)
+ couleur.v -= 5;
+ couleur.v >>= 8;
+
+ couleur.b = col1.b * c1 + col2.b * c2 + col3.b * c3 + col4.b * c4;
+ if (couleur.b > 5)
+ couleur.b -= 5;
+ couleur.b >>= 8;
+
+ setPixelRGB_ (expix2, myPos >> 1, couleur);
+ }
+}
+
+/* generate the water fx horizontal direction buffer */
+static void
+generateTheWaterFXHorizontalDirectionBuffer (PluginInfo * goomInfo,
+ ZoomFilterFXWrapperData * data)
+{
+
+ int loopv;
+ int decc = goom_irand (goomInfo->gRandom, 8) - 4;
+ int spdc = goom_irand (goomInfo->gRandom, 8) - 4;
+ int accel = goom_irand (goomInfo->gRandom, 8) - 4;
+
+ for (loopv = data->prevY; loopv != 0;) {
+
+ loopv--;
+ data->firedec[loopv] = decc;
+ decc += spdc / 10;
+ spdc +=
+ goom_irand (goomInfo->gRandom, 3) - goom_irand (goomInfo->gRandom, 3);
+
+ if (decc > 4)
+ spdc -= 1;
+ if (decc < -4)
+ spdc += 1;
+
+ if (spdc > 30)
+ spdc = spdc - goom_irand (goomInfo->gRandom, 3) + accel / 10;
+ if (spdc < -30)
+ spdc = spdc + goom_irand (goomInfo->gRandom, 3) + accel / 10;
+
+ if (decc > 8 && spdc > 1)
+ spdc -= goom_irand (goomInfo->gRandom, 3) - 2;
+
+ if (decc < -8 && spdc < -1)
+ spdc += goom_irand (goomInfo->gRandom, 3) + 2;
+
+ if (decc > 8 || decc < -8)
+ decc = decc * 8 / 9;
+
+ accel +=
+ goom_irand (goomInfo->gRandom, 2) - goom_irand (goomInfo->gRandom, 2);
+ if (accel > 20)
+ accel -= 2;
+ if (accel < -20)
+ accel += 2;
+ }
+}
+
+
+
+/*
+ * Main work for the dynamic displacement map.
+ *
+ * Reads data from pix1, write to pix2.
+ *
+ * Useful datas for this FX are stored in ZoomFilterData.
+ * If you think that this is a strange function name, let me say that a long time ago,
+ * there has been a slow version and a gray-level only one. Then came these function,
+ * fast and workin in RGB colorspace ! nice but it only was applying a zoom to the image.
+ * So that is why you have this name, for the nostalgy of the first days of goom
+ * when it was just a tiny program writen in Turbo Pascal on my i486...
+ */
+void
+zoomFilterFastRGB (PluginInfo * goomInfo, Pixel * pix1, Pixel * pix2,
+ ZoomFilterData * zf, Uint resx, Uint resy, int switchIncr, float switchMult)
+{
+ Uint x, y;
+
+ ZoomFilterFXWrapperData *data =
+ (ZoomFilterFXWrapperData *) goomInfo->zoomFilter_fx.fx_data;
+
+ if (!BVAL (data->enabled_bp))
+ return;
+
+ /* changement de taille */
+ if ((data->prevX != resx) || (data->prevY != resy)) {
+ data->prevX = resx;
+ data->prevY = resy;
+
+ if (data->brutS)
+ free (data->freebrutS);
+ data->brutS = 0;
+ if (data->brutD)
+ free (data->freebrutD);
+ data->brutD = 0;
+ if (data->brutT)
+ free (data->freebrutT);
+ data->brutT = 0;
+
+ data->middleX = resx / 2;
+ data->middleY = resy / 2;
+ data->mustInitBuffers = 1;
+ if (data->firedec)
+ free (data->firedec);
+ data->firedec = 0;
+ }
+
+ if (data->interlace_start != -2)
+ zf = NULL;
+
+ /* changement de config */
+ if (zf) {
+ data->reverse = zf->reverse;
+ data->general_speed = (float) (zf->vitesse - 128) / 128.0f;
+ if (data->reverse)
+ data->general_speed = -data->general_speed;
+ data->middleX = zf->middleX;
+ data->middleY = zf->middleY;
+ data->theMode = zf->mode;
+ data->hPlaneEffect = zf->hPlaneEffect;
+ data->vPlaneEffect = zf->vPlaneEffect;
+ data->waveEffect = zf->waveEffect;
+ data->hypercosEffect = zf->hypercosEffect;
+ data->noisify = zf->noisify;
+ data->interlace_start = 0;
+ }
+
+
+ if (data->mustInitBuffers) {
+
+ data->mustInitBuffers = 0;
+ data->freebrutS =
+ (signed int *) calloc (resx * resy * 2 + 128, sizeof (unsigned int));
+ data->brutS =
+ (gint32 *) ((1 + ((uintptr_t) (data->freebrutS)) / 128) * 128);
+
+ data->freebrutD =
+ (signed int *) calloc (resx * resy * 2 + 128, sizeof (unsigned int));
+ data->brutD =
+ (gint32 *) ((1 + ((uintptr_t) (data->freebrutD)) / 128) * 128);
+
+ data->freebrutT =
+ (signed int *) calloc (resx * resy * 2 + 128, sizeof (unsigned int));
+ data->brutT =
+ (gint32 *) ((1 + ((uintptr_t) (data->freebrutT)) / 128) * 128);
+
+ data->buffratio = 0;
+
+ data->firedec = (int *) malloc (data->prevY * sizeof (int));
+ generateTheWaterFXHorizontalDirectionBuffer (goomInfo, data);
+
+ data->interlace_start = 0;
+ makeZoomBufferStripe (data, resy);
+
+ /* Copy the data from temp to dest and source */
+ memcpy (data->brutS, data->brutT, resx * resy * 2 * sizeof (int));
+ memcpy (data->brutD, data->brutT, resx * resy * 2 * sizeof (int));
+ }
+
+ /* generation du buffer de trans */
+ if (data->interlace_start == -1) {
+
+ /* sauvegarde de l'etat actuel dans la nouvelle source
+ * TODO: write that in MMX (has been done in previous version, but did not follow some new fonctionnalities) */
+ y = data->prevX * data->prevY * 2;
+ for (x = 0; x < y; x += 2) {
+ int brutSmypos = data->brutS[x];
+ int x2 = x + 1;
+
+ data->brutS[x] =
+ brutSmypos + (((data->brutD[x] -
+ brutSmypos) * data->buffratio) >> BUFFPOINTNB);
+ brutSmypos = data->brutS[x2];
+ data->brutS[x2] =
+ brutSmypos + (((data->brutD[x2] -
+ brutSmypos) * data->buffratio) >> BUFFPOINTNB);
+ }
+ data->buffratio = 0;
+ }
+
+ if (data->interlace_start == -1) {
+ signed int *tmp;
+
+ tmp = data->brutD;
+ data->brutD = data->brutT;
+ data->brutT = tmp;
+ tmp = data->freebrutD;
+ data->freebrutD = data->freebrutT;
+ data->freebrutT = tmp;
+ data->interlace_start = -2;
+ }
+
+ if (data->interlace_start >= 0) {
+ /* creation de la nouvelle destination */
+ makeZoomBufferStripe (data, resy / 16);
+ }
+
+ if (switchIncr != 0) {
+ data->buffratio += switchIncr;
+ if (data->buffratio > BUFFPOINTMASK)
+ data->buffratio = BUFFPOINTMASK;
+ }
+
+ if (switchMult != 1.0f) {
+ data->buffratio = (int) ((float) BUFFPOINTMASK * (1.0f - switchMult) +
+ (float) data->buffratio * switchMult);
+ }
+
+ data->zoom_width = data->prevX;
+
+ goomInfo->methods.zoom_filter (data->prevX, data->prevY, pix1, pix2,
+ data->brutS, data->brutD, data->buffratio, data->precalCoef);
+}
+
+static void
+generatePrecalCoef (int precalCoef[16][16])
+{
+ int coefh, coefv;
+
+ for (coefh = 0; coefh < 16; coefh++) {
+ for (coefv = 0; coefv < 16; coefv++) {
+
+ int i;
+ int diffcoeffh;
+ int diffcoeffv;
+
+ diffcoeffh = sqrtperte - coefh;
+ diffcoeffv = sqrtperte - coefv;
+
+ if (!(coefh || coefv)) {
+ i = 255;
+ } else {
+ Uint i1, i2, i3, i4;
+
+ i1 = diffcoeffh * diffcoeffv;
+ i2 = coefh * diffcoeffv;
+ i3 = diffcoeffh * coefv;
+ i4 = coefh * coefv;
+
+ // TODO: faire mieux...
+ if (i1)
+ i1--;
+ if (i2)
+ i2--;
+ if (i3)
+ i3--;
+ if (i4)
+ i4--;
+
+ i = (i1) | (i2 << 8) | (i3 << 16) | (i4 << 24);
+ }
+ precalCoef[coefh][coefv] = i;
+ }
+ }
+}
+
+/* VisualFX Wrapper */
+
+static void
+zoomFilterVisualFXWrapper_init (struct _VISUAL_FX *_this, PluginInfo * info)
+{
+ ZoomFilterFXWrapperData *data =
+ (ZoomFilterFXWrapperData *) malloc (sizeof (ZoomFilterFXWrapperData));
+
+ data->coeffs = 0;
+ data->freecoeffs = 0;
+ data->brutS = 0;
+ data->freebrutS = 0;
+ data->brutD = 0;
+ data->freebrutD = 0;
+ data->brutT = 0;
+ data->freebrutT = 0;
+ data->prevX = 0;
+ data->prevY = 0;
+
+ data->mustInitBuffers = 1;
+ data->interlace_start = -2;
+
+ data->general_speed = 0.0f;
+ data->reverse = 0;
+ data->theMode = AMULETTE_MODE;
+ data->waveEffect = 0;
+ data->hypercosEffect = 0;
+ data->vPlaneEffect = 0;
+ data->hPlaneEffect = 0;
+ data->noisify = 2;
+
+ /* modif by jeko : fixedpoint : buffration = (16:16) (donc 0<=buffration<=2^16) */
+ data->buffratio = 0;
+ data->firedec = 0;
+
+ data->wave = data->wavesp = 0;
+
+ secure_b_param (&data->enabled_bp, "Enabled", 1);
+
+ plugin_parameters (&data->params, "Zoom Filter", 1);
+ data->params.params[0] = &data->enabled_bp;
+
+ _this->params = &data->params;
+ _this->fx_data = (void *) data;
+
+ /* modif d'optim by Jeko : precalcul des 4 coefs resultant des 2 pos */
+ generatePrecalCoef (data->precalCoef);
+}
+
+static void
+zoomFilterVisualFXWrapper_free (struct _VISUAL_FX *_this)
+{
+ ZoomFilterFXWrapperData *data = (ZoomFilterFXWrapperData *) _this->fx_data;
+
+ if (data->freebrutT)
+ free (data->freebrutT);
+ if (data->freebrutS)
+ free (data->freebrutS);
+ if (data->freebrutD)
+ free (data->freebrutD);
+ if (data->firedec)
+ free (data->firedec);
+
+ goom_plugin_parameters_free (_this->params);
+
+ free (_this->fx_data);
+}
+
+static void
+zoomFilterVisualFXWrapper_apply (struct _VISUAL_FX *_this, Pixel * src,
+ Pixel * dest, PluginInfo * info)
+{
+}
+
+void
+zoomFilterVisualFXWrapper_create (VisualFX * fx)
+{
+ fx->init = zoomFilterVisualFXWrapper_init;
+ fx->free = zoomFilterVisualFXWrapper_free;
+ fx->apply = zoomFilterVisualFXWrapper_apply;
+ fx->params = NULL;
+ fx->fx_data = NULL;
+}
+
+
+/* TODO : MOVE THIS AWAY */
+
+void
+pointFilter (PluginInfo * goomInfo, Pixel * pix1, Color c, float t1, float t2,
+ float t3, float t4, Uint cycle)
+{
+ Uint x = (Uint) ((int) (goomInfo->screen.width / 2)
+ + (int) (t1 * cos ((float) cycle / t3)));
+ Uint y = (Uint) ((int) (goomInfo->screen.height / 2)
+ + (int) (t2 * sin ((float) cycle / t4)));
+
+ if ((x > 1) && (y > 1) && (x < goomInfo->screen.width - 2)
+ && (y < goomInfo->screen.height - 2)) {
+ setPixelRGB (goomInfo, pix1, x + 1, y, c);
+ setPixelRGB (goomInfo, pix1, x, y + 1, c);
+ setPixelRGB (goomInfo, pix1, x + 1, y + 1, WHITE);
+ setPixelRGB (goomInfo, pix1, x + 2, y + 1, c);
+ setPixelRGB (goomInfo, pix1, x + 1, y + 2, c);
+ }
+}
diff --git a/gst/goom/filters_mmx.s b/gst/goom/filters_mmx.s
new file mode 100644
index 0000000000..9320d05ec7
--- /dev/null
+++ b/gst/goom/filters_mmx.s
@@ -0,0 +1,216 @@
+; Goom Project
+; Copyright (C) <2001> Jean-Christophe Hoelt <jeko@free.fr>
+;
+; This library is free software; you can redistribute it and/or
+; modify it under the terms of the GNU Library General Public
+; License as published by the Free Software Foundation; either
+; version 2 of the License, or (at your option) any later version.
+;
+; This library is distributed in the hope that it will be useful,
+; but WITHOUT ANY WARRANTY; without even the implied warranty of
+; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+; Library General Public License for more details.
+;
+; You should have received a copy of the GNU Library General Public
+; License along with this library; if not, write to the
+; Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+; Boston, MA 02110-1301, USA.
+;
+;//
+;// history
+;// 07/01/2001 : Changing FEMMS to EMMS : slower... but run on intel machines
+;// 03/01/2001 : WIDTH and HEIGHT are now variable
+;// 28/12/2000 : adding comments to the code, suppress some useless lines
+;// 27/12/2000 : reducing memory access... improving performance by 20%
+;// coefficients are now on 1 byte
+;// 22/12/2000 : Changing data structure
+;// 16/12/2000 : AT&T version
+;// 14/12/2000 : unrolling loop
+;// 12/12/2000 : 64 bits memory access
+
+
+.data
+
+chaine:
+ .string "pos = %d\n\0"
+ .long 0x0
+
+thezero:
+ .long 0x00000000
+ .long 0x00000000
+
+.text
+
+.globl mmx_zoom ;// name of the function to call by C program
+/* .extern coeffs ;// the transformation buffer */
+.extern expix1,expix2 ;// the source and destination buffer
+.extern mmx_zoom_size, zoom_width ;// size of the buffers
+
+.extern brutS,brutD,buffratio,precalCoef,prevX,prevY
+
+#define PERTEMASK 15
+/* faire : a / sqrtperte <=> a >> PERTEDEC*/
+#define PERTEDEC 4
+
+.align 16
+mmx_zoom:
+
+ pushl %ebp
+ movl %esp,%ebp
+ subl $12,%esp
+
+ movl prevX,%eax
+ decl %eax
+ sarl $4,%eax
+ movl %eax,-4(%ebp)
+
+ movl prevY,%eax
+ decl %eax
+ sarl $4,%eax
+ movl %eax,-8(%ebp)
+
+;// initialisation du mm7 � zero
+ movq (thezero), %mm7
+
+movl mmx_zoom_size, %ecx
+decl %ecx
+
+.while:
+ ;// esi <- nouvelle position
+ movl brutS, %eax
+ leal (%eax, %ecx, 8),%eax
+
+ movl (%eax),%edx /* = brutS.px (brutSmypos) */
+ movl 4(%eax),%eax /* = brutS.py */
+
+ movl brutD,%ebx
+ leal (%ebx, %ecx, 8),%ebx
+ movl (%ebx),%esi
+ subl %edx, %esi
+ imull buffratio,%esi
+ sarl $16,%esi
+ addl %edx,%esi /* esi = px */
+
+ /* eax contient deja brutS.py = le nouveau brutSmypos*/
+ /* ebx pointe sur brutD[myPos] */
+ movl 4(%ebx),%edi
+ subl %eax,%edi
+ imull buffratio,%edi
+ sarl $16,%edi
+ addl %eax,%edi /* edi = py */
+
+/* pushl %eax
+ pushl %ebx*/
+/* popl %ebx
+ popl %eax*/
+
+ movl %esi,%eax
+ andl $15,%eax /* eax = coefh */
+ movl %edi,%ebx
+ andl $15,%ebx /* ebx = coefv */
+
+ leal 0(,%ebx,4),%ebx
+ sall $6,%eax
+ addl %ebx,%eax
+ movl $precalCoef,%ebx
+/* movd (%eax,%ebx),%mm6*/ /* mm6 = coeffs */
+
+ cmpl -8(%ebp),%edi
+ jge .then1
+ cmpl -4(%ebp),%esi
+ jge .then1
+
+ sarl $4,%esi
+ sarl $4,%edi
+ imull zoom_width,%edi
+ leal (%esi,%edi),%esi
+ jmp .finsi1
+
+.then1:
+ movl $0,%esi
+.finsi1:
+
+ /** apres ce calcul, %esi = pos, %mm6 = coeffs **/
+/* pushl %esi
+ pushl $chaine
+ call printf
+ addl $8,%esp*/
+
+ movl expix1,%eax
+
+ ;// recuperation des deux premiers pixels dans mm0 et mm1
+/* movq (%eax,%esi,4), %mm0 /* b1-v1-r1-a1-b2-v2-r2-a2 */
+ movq %mm0, %mm1 /* b1-v1-r1-a1-b2-v2-r2-a2 */
+
+ ;// depackage du premier pixel
+ punpcklbw %mm7, %mm0 /* 00-b2-00-v2-00-r2-00-a2 */
+
+ movq %mm6, %mm5 /* ??-??-??-??-c4-c3-c2-c1 */
+ ;// depackage du 2ieme pixel
+ punpckhbw %mm7, %mm1 /* 00-b1-00-v1-00-r1-00-a1 */
+
+ ;// extraction des coefficients...
+ punpcklbw %mm5, %mm6 /* c4-c4-c3-c3-c2-c2-c1-c1 */
+ movq %mm6, %mm4 /* c4-c4-c3-c3-c2-c2-c1-c1 */
+ movq %mm6, %mm5 /* c4-c4-c3-c3-c2-c2-c1-c1 */
+
+ punpcklbw %mm5, %mm6 /* c2-c2-c2-c2-c1-c1-c1-c1 */
+ punpckhbw %mm5, %mm4 /* c4-c4-c4-c4-c3-c3-c3-c3 */
+
+ movq %mm6, %mm3 /* c2-c2-c2-c2-c1-c1-c1-c1 */
+ punpcklbw %mm7, %mm6 /* 00-c1-00-c1-00-c1-00-c1 */
+ punpckhbw %mm7, %mm3 /* 00-c2-00-c2-00-c2-00-c2 */
+
+ ;// multiplication des pixels par les coefficients
+ pmullw %mm6, %mm0 /* c1*b2-c1*v2-c1*r2-c1*a2 */
+ pmullw %mm3, %mm1 /* c2*b1-c2*v1-c2*r1-c2*a1 */
+ paddw %mm1, %mm0
+
+ ;// ...extraction des 2 derniers coefficients
+ movq %mm4, %mm5 /* c4-c4-c4-c4-c3-c3-c3-c3 */
+ punpcklbw %mm7, %mm4 /* 00-c3-00-c3-00-c3-00-c3 */
+ punpckhbw %mm7, %mm5 /* 00-c4-00-c4-00-c4-00-c4 */
+
+ /* ajouter la longueur de ligne a esi */
+ addl prevX,%esi
+
+ ;// recuperation des 2 derniers pixels
+/* movq (%eax,%esi,4), %mm1*/
+ movq %mm1, %mm2
+
+ ;// depackage des pixels
+ punpcklbw %mm7, %mm1
+ punpckhbw %mm7, %mm2
+
+ ;// multiplication pas les coeffs
+ pmullw %mm4, %mm1
+ pmullw %mm5, %mm2
+
+ ;// ajout des valeurs obtenues � la valeur finale
+ paddw %mm1, %mm0
+ paddw %mm2, %mm0
+
+ ;// division par 256 = 16+16+16+16, puis repackage du pixel final
+ psrlw $8, %mm0
+ packuswb %mm7, %mm0
+
+ ;// passage au suivant
+
+ ;// enregistrement du resultat
+ movl expix2,%eax
+/* movd %mm0,(%eax,%ecx,4)*/
+
+ decl %ecx
+ ;// test de fin du tantque
+ cmpl $0, %ecx ;// 400x300
+
+ jz .fin_while
+ jmp .while
+
+.fin_while:
+ emms
+
+ movl %ebp,%esp
+ popl %ebp
+
+ ret ;//The End
diff --git a/gst/goom/flying_stars_fx.c b/gst/goom/flying_stars_fx.c
new file mode 100644
index 0000000000..4f97be9c38
--- /dev/null
+++ b/gst/goom/flying_stars_fx.c
@@ -0,0 +1,361 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#include "goom_fx.h"
+#include "goom_plugin_info.h"
+#include "goom_tools.h"
+
+#include "mathtools.h"
+
+/* TODO:-- FAIRE PROPREMENT... BOAH... */
+#define NCOL 15
+
+/*static const int colval[] = {
+0xfdf6f5,
+0xfae4e4,
+0xf7d1d1,
+0xf3b6b5,
+0xefa2a2,
+0xec9190,
+0xea8282,
+0xe87575,
+0xe46060,
+0xe14b4c,
+0xde3b3b,
+0xdc2d2f,
+0xd92726,
+0xd81619,
+0xd50c09,
+0
+};
+*/
+static const int colval[] = {
+ 0x1416181a,
+ 0x1419181a,
+ 0x141f181a,
+ 0x1426181a,
+ 0x142a181a,
+ 0x142f181a,
+ 0x1436181a,
+ 0x142f1819,
+ 0x14261615,
+ 0x13201411,
+ 0x111a100a,
+ 0x0c180508,
+ 0x08100304,
+ 0x00050101,
+ 0x0
+};
+
+
+/* The different modes of the visual FX.
+ * Put this values on fx_mode */
+#define FIREWORKS_FX 0
+#define RAIN_FX 1
+#define FOUNTAIN_FX 2
+#define LAST_FX 3
+
+typedef struct _FS_STAR
+{
+ float x, y;
+ float vx, vy;
+ float ax, ay;
+ float age, vage;
+} Star;
+
+typedef struct _FS_DATA
+{
+
+ int fx_mode;
+ int nbStars;
+
+ int maxStars;
+ Star *stars;
+
+ float min_age;
+ float max_age;
+
+ PluginParam min_age_p;
+ PluginParam max_age_p;
+ PluginParam nbStars_p;
+ PluginParam nbStars_limit_p;
+ PluginParam fx_mode_p;
+
+ PluginParameters params;
+} FSData;
+
+static void
+fs_init (VisualFX * _this, PluginInfo * info)
+{
+
+ FSData *data;
+
+ data = (FSData *) malloc (sizeof (FSData));
+
+ data->fx_mode = FIREWORKS_FX;
+ data->maxStars = 4096;
+ data->stars = (Star *) malloc (data->maxStars * sizeof (Star));
+ data->nbStars = 0;
+
+ secure_i_param (&data->max_age_p, "Fireworks Smallest Bombs");
+ IVAL (data->max_age_p) = 80;
+ IMIN (data->max_age_p) = 0;
+ IMAX (data->max_age_p) = 100;
+ ISTEP (data->max_age_p) = 1;
+
+ secure_i_param (&data->min_age_p, "Fireworks Largest Bombs");
+ IVAL (data->min_age_p) = 99;
+ IMIN (data->min_age_p) = 0;
+ IMAX (data->min_age_p) = 100;
+ ISTEP (data->min_age_p) = 1;
+
+ secure_i_param (&data->nbStars_limit_p, "Max Number of Particules");
+ IVAL (data->nbStars_limit_p) = 512;
+ IMIN (data->nbStars_limit_p) = 0;
+ IMAX (data->nbStars_limit_p) = data->maxStars;
+ ISTEP (data->nbStars_limit_p) = 64;
+
+ secure_i_param (&data->fx_mode_p, "FX Mode");
+ IVAL (data->fx_mode_p) = data->fx_mode;
+ IMIN (data->fx_mode_p) = 1;
+ IMAX (data->fx_mode_p) = 3;
+ ISTEP (data->fx_mode_p) = 1;
+
+ secure_f_feedback (&data->nbStars_p, "Number of Particules (% of Max)");
+
+ plugin_parameters (&data->params, "Particule System", 7);
+ data->params.params[0] = &data->fx_mode_p;
+ data->params.params[1] = &data->nbStars_limit_p;
+ data->params.params[2] = 0;
+ data->params.params[3] = &data->min_age_p;
+ data->params.params[4] = &data->max_age_p;
+ data->params.params[5] = 0;
+ data->params.params[6] = &data->nbStars_p;
+
+ _this->params = &data->params;
+ _this->fx_data = (void *) data;
+}
+
+static void
+fs_free (VisualFX * _this)
+{
+ FSData *data = (FSData *) _this->fx_data;
+
+ goom_plugin_parameters_free (&data->params);
+
+ free (data->stars);
+ free (_this->fx_data);
+}
+
+
+/*
+ * Cree une nouvelle 'bombe', c'est a dire une particule appartenant a une fusee d'artifice.
+ */
+static void
+addABomb (FSData * fs, int mx, int my, float radius, float vage, float gravity,
+ PluginInfo * info)
+{
+
+ int i = fs->nbStars;
+ float ro;
+ int theta;
+
+ if (fs->nbStars >= fs->maxStars)
+ return;
+ fs->nbStars++;
+
+ fs->stars[i].x = mx;
+ fs->stars[i].y = my;
+
+ ro = radius * (float) goom_irand (info->gRandom, 100) / 100.0f;
+ ro *= (float) goom_irand (info->gRandom, 100) / 100.0f + 1.0f;
+ theta = goom_irand (info->gRandom, 256);
+
+ fs->stars[i].vx = ro * cos256[theta];
+ fs->stars[i].vy = -0.2f + ro * sin256[theta];
+
+ fs->stars[i].ax = 0;
+ fs->stars[i].ay = gravity;
+
+ fs->stars[i].age = 0;
+ if (vage < fs->min_age)
+ vage = fs->min_age;
+ fs->stars[i].vage = vage;
+}
+
+
+/*
+ * Met a jour la position et vitesse d'une particule.
+ */
+static void
+updateStar (Star * s)
+{
+ s->x += s->vx;
+ s->y += s->vy;
+ s->vx += s->ax;
+ s->vy += s->ay;
+ s->age += s->vage;
+}
+
+
+/*
+ * Ajoute de nouvelles particules au moment d'un evenement sonore.
+ */
+static void
+fs_sound_event_occured (VisualFX * _this, PluginInfo * info)
+{
+
+ FSData *data = (FSData *) _this->fx_data;
+ int i;
+
+ int max = (int) ((1.0f + info->sound.goomPower) * goom_irand (info->gRandom,
+ 150)) + 100;
+ float radius =
+ (1.0f + info->sound.goomPower) * (float) (goom_irand (info->gRandom,
+ 150) + 50) / 300;
+ int mx;
+ int my;
+ float vage, gravity = 0.02f;
+
+ switch (data->fx_mode) {
+ case FIREWORKS_FX:
+ {
+ double dx, dy;
+
+ do {
+ mx = goom_irand (info->gRandom, info->screen.width);
+ my = goom_irand (info->gRandom, info->screen.height);
+ dx = (mx - info->screen.width / 2);
+ dy = (my - info->screen.height / 2);
+ } while (dx * dx + dy * dy <
+ (info->screen.height / 2) * (info->screen.height / 2));
+ vage = data->max_age * (1.0f - info->sound.goomPower);
+ }
+ break;
+ case RAIN_FX:
+ mx = goom_irand (info->gRandom, info->screen.width);
+ if (mx > info->screen.width / 2)
+ mx = info->screen.width;
+ else
+ mx = 0;
+ my = -(info->screen.height / 3) - goom_irand (info->gRandom,
+ info->screen.width / 3);
+ radius *= 1.5;
+ vage = 0.002f;
+ break;
+ case FOUNTAIN_FX:
+ my = info->screen.height + 2;
+ vage = 0.001f;
+ radius += 1.0f;
+ mx = info->screen.width / 2;
+ gravity = 0.04f;
+ break;
+ default:
+ return;
+ /* my = i R A N D (info->screen.height); vage = 0.01f; */
+ }
+
+ radius *= info->screen.height / 200.0f; /* why 200 ? because the FX was developped on 320x200 */
+ max *= info->screen.height / 200.0f;
+
+ if (info->sound.timeSinceLastBigGoom < 1) {
+ radius *= 1.5;
+ max *= 2;
+ }
+ for (i = 0; i < max; ++i)
+ addABomb (data, mx, my, radius, vage, gravity, info);
+}
+
+
+/*
+ * Main methode of the FX.
+ */
+static void
+fs_apply (VisualFX * _this, Pixel * src, Pixel * dest, PluginInfo * info)
+{
+
+ int i;
+ int col;
+ FSData *data = (FSData *) _this->fx_data;
+
+ /* Get the new parameters values */
+ data->min_age = 1.0f - (float) IVAL (data->min_age_p) / 100.0f;
+ data->max_age = 1.0f - (float) IVAL (data->max_age_p) / 100.0f;
+ FVAL (data->nbStars_p) = (float) data->nbStars / (float) data->maxStars;
+ data->nbStars_p.change_listener (&data->nbStars_p);
+ data->maxStars = IVAL (data->nbStars_limit_p);
+ data->fx_mode = IVAL (data->fx_mode_p);
+
+ /* look for events */
+ if (info->sound.timeSinceLastGoom < 1) {
+ fs_sound_event_occured (_this, info);
+ if (goom_irand (info->gRandom, 20) == 1) {
+ IVAL (data->fx_mode_p) = goom_irand (info->gRandom, (LAST_FX * 3));
+ data->fx_mode_p.change_listener (&data->fx_mode_p);
+ }
+ }
+
+ /* update particules */
+ for (i = 0; i < data->nbStars; ++i) {
+ updateStar (&data->stars[i]);
+
+ /* dead particule */
+ if (data->stars[i].age >= NCOL)
+ continue;
+
+ /* choose the color of the particule */
+ col = colval[(int) data->stars[i].age];
+
+ /* draws the particule */
+ info->methods.draw_line (dest, (int) data->stars[i].x,
+ (int) data->stars[i].y,
+ (int) (data->stars[i].x - data->stars[i].vx * 6),
+ (int) (data->stars[i].y - data->stars[i].vy * 6), col,
+ (int) info->screen.width, (int) info->screen.height);
+ info->methods.draw_line (dest, (int) data->stars[i].x,
+ (int) data->stars[i].y,
+ (int) (data->stars[i].x - data->stars[i].vx * 2),
+ (int) (data->stars[i].y - data->stars[i].vy * 2), col,
+ (int) info->screen.width, (int) info->screen.height);
+ }
+
+ /* look for dead particules */
+ for (i = 0; i < data->nbStars;) {
+
+ if ((data->stars[i].x > info->screen.width + 64)
+ || ((data->stars[i].vy >= 0)
+ && (data->stars[i].y - 16 * data->stars[i].vy >
+ info->screen.height))
+ || (data->stars[i].x < -64)
+ || (data->stars[i].age >= NCOL)) {
+ data->stars[i] = data->stars[data->nbStars - 1];
+ data->nbStars--;
+ } else
+ ++i;
+ }
+}
+
+void
+flying_star_create (VisualFX * vfx)
+{
+ vfx->init = fs_init;
+ vfx->free = fs_free;
+ vfx->apply = fs_apply;
+ vfx->fx_data = NULL;
+ vfx->params = NULL;
+}
diff --git a/gst/goom/goom.h b/gst/goom/goom.h
new file mode 100644
index 0000000000..02a0936e7d
--- /dev/null
+++ b/gst/goom/goom.h
@@ -0,0 +1,42 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifndef _GOOMCORE_H
+#define _GOOMCORE_H
+
+#include "goom_config.h"
+#include "goom_plugin_info.h"
+
+#define NB_FX 10
+
+PluginInfo *goom_init (guint32 resx, guint32 resy);
+void goom_set_resolution (PluginInfo *goomInfo, guint32 resx, guint32 resy);
+
+/*
+ * forceMode == 0 : do nothing
+ * forceMode == -1 : lock the FX
+ * forceMode == 1..NB_FX : force a switch to FX n# forceMode
+ */
+guint32 *goom_update (PluginInfo *goomInfo, gint16 data[2][512], int forceMode, float fps);
+
+/* returns 0 if the buffer wasn't accepted */
+int goom_set_screenbuffer(PluginInfo *goomInfo, void *buffer);
+
+void goom_close (PluginInfo *goomInfo);
+
+#endif
diff --git a/gst/goom/goom_config.h b/gst/goom/goom_config.h
new file mode 100644
index 0000000000..39b95a0f75
--- /dev/null
+++ b/gst/goom/goom_config.h
@@ -0,0 +1,45 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#include <glib.h>
+
+#if G_BYTE_ORDER == G_BIG_ENDIAN
+#define COLOR_ARGB
+#else
+#define COLOR_BGRA
+#endif
+
+#if 1
+/* ndef COLOR_BGRA */
+/* position des composantes */
+ #define BLEU 0
+ #define VERT 1
+ #define ROUGE 2
+ #define ALPHA 3
+#else
+ #define ROUGE 1
+ #define BLEU 3
+ #define VERT 2
+ #define ALPHA 0
+#endif
+
+#if defined (BUILD_MMX) && defined (HAVE_GCC_ASM)
+
+#define HAVE_MMX
+#endif
+
diff --git a/gst/goom/goom_config_param.h b/gst/goom/goom_config_param.h
new file mode 100644
index 0000000000..ba08da95c5
--- /dev/null
+++ b/gst/goom/goom_config_param.h
@@ -0,0 +1,134 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifndef _CONFIG_PARAM_H
+#define _CONFIG_PARAM_H
+
+#include <stdlib.h>
+
+/*
+ * File created on 2003-05-24 by Jeko.
+ * (c)2003, JC Hoelt for iOS-software.
+ *
+ * LGPL Licence.
+ */
+
+typedef enum {
+ PARAM_INTVAL,
+ PARAM_FLOATVAL,
+ PARAM_BOOLVAL,
+ PARAM_STRVAL,
+ PARAM_LISTVAL,
+} ParamType;
+
+struct IntVal {
+ int value;
+ int min;
+ int max;
+ int step;
+};
+struct FloatVal {
+ float value;
+ float min;
+ float max;
+ float step;
+};
+struct StrVal {
+ char *value;
+};
+struct ListVal {
+ char *value;
+ int nbChoices;
+ char **choices;
+};
+struct BoolVal {
+ int value;
+};
+
+
+typedef struct _PARAM {
+ const char *name;
+ const char *desc;
+ char rw;
+ ParamType type;
+ union {
+ struct IntVal ival;
+ struct FloatVal fval;
+ struct StrVal sval;
+ struct ListVal slist;
+ struct BoolVal bval;
+ } param;
+
+ /* used by the core to inform the GUI of a change */
+ void (*change_listener)(struct _PARAM *_this);
+
+ /* used by the GUI to inform the core of a change */
+ void (*changed)(struct _PARAM *_this);
+
+ void *user_data; /* can be used by the GUI */
+} PluginParam;
+
+#define IVAL(p) ((p).param.ival.value)
+#define SVAL(p) ((p).param.sval.value)
+#define FVAL(p) ((p).param.fval.value)
+#define BVAL(p) ((p).param.bval.value)
+#define LVAL(p) ((p).param.slist.value)
+
+#define FMIN(p) ((p).param.fval.min)
+#define FMAX(p) ((p).param.fval.max)
+#define FSTEP(p) ((p).param.fval.step)
+
+#define IMIN(p) ((p).param.ival.min)
+#define IMAX(p) ((p).param.ival.max)
+#define ISTEP(p) ((p).param.ival.step)
+
+void goom_secure_param(PluginParam *p);
+
+void goom_secure_f_param(PluginParam *p, const char *name);
+void goom_secure_i_param(PluginParam *p, const char *name);
+void goom_secure_b_param(PluginParam *p, const char *name, int value);
+void goom_secure_s_param(PluginParam *p, const char *name);
+
+void goom_secure_f_feedback(PluginParam *p, const char *name);
+void goom_secure_i_feedback(PluginParam *p, const char *name);
+
+void goom_set_str_param_value(PluginParam *p, const char *str);
+void goom_set_list_param_value(PluginParam *p, const char *str);
+
+typedef struct _PARAMETERS {
+ const char *name;
+ const char *desc;
+ int nbParams;
+ PluginParam **params;
+} PluginParameters;
+
+void goom_plugin_parameters(PluginParameters *p, const char *name, int nb);
+void goom_plugin_parameters_free(PluginParameters *p);
+
+#define secure_param goom_secure_param
+#define secure_f_param goom_secure_f_param
+#define secure_i_param goom_secure_i_param
+#define secure_b_param goom_secure_b_param
+#define secure_s_param goom_secure_s_param
+#define secure_f_feedback goom_secure_f_feedback
+#define secure_i_feedback goom_secure_i_feedback
+#define set_list_param_value goom_set_list_param_value
+#define set_str_param_value goom_set_str_param_value
+#define plugin_parameters goom_plugin_parameters
+
+#endif
diff --git a/gst/goom/goom_core.c b/gst/goom/goom_core.c
new file mode 100644
index 0000000000..5b1d4f76c8
--- /dev/null
+++ b/gst/goom/goom_core.c
@@ -0,0 +1,856 @@
+/* Goom Project
+ * Copyright (C) <2003> Jean-Christophe Hoelt <jeko@free.fr>
+ *
+ * goom_core.c:Contains the core of goom's work.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <math.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#ifdef HAVE_INTTYPES_H
+#include <inttypes.h>
+#endif
+
+#include "goom.h"
+#include "goom_tools.h"
+#include "goom_filters.h"
+#include "lines.h"
+#include "ifs.h"
+#include "tentacle3d.h"
+
+#include "sound_tester.h"
+#include "goom_plugin_info.h"
+#include "goom_fx.h"
+
+/* #define VERBOSE */
+
+#define STOP_SPEED 128
+/* TODO: put that as variable in PluginInfo */
+#define TIME_BTW_CHG 300
+
+static void choose_a_goom_line (PluginInfo * goomInfo, float *param1,
+ float *param2, int *couleur, int *mode, float *amplitude, int far);
+
+static void
+init_buffers (PluginInfo * goomInfo, int buffsize)
+{
+ goomInfo->pixel = (guint32 *) malloc (buffsize * sizeof (guint32) + 128);
+ memset (goomInfo->pixel, 0, buffsize * sizeof (guint32) + 128);
+ goomInfo->back = (guint32 *) malloc (buffsize * sizeof (guint32) + 128);
+ memset (goomInfo->back, 0, buffsize * sizeof (guint32) + 128);
+ goomInfo->conv = (Pixel *) malloc (buffsize * sizeof (guint32) + 128);
+ memset (goomInfo->conv, 0, buffsize * sizeof (guint32) + 128);
+
+ goomInfo->outputBuf = goomInfo->conv;
+
+ goomInfo->p1 = (Pixel *) ((1 + ((uintptr_t) (goomInfo->pixel)) / 128) * 128);
+ goomInfo->p2 = (Pixel *) ((1 + ((uintptr_t) (goomInfo->back)) / 128) * 128);
+}
+
+/**************************
+* INIT *
+**************************/
+PluginInfo *
+goom_init (guint32 resx, guint32 resy)
+{
+ PluginInfo *goomInfo = (PluginInfo *) malloc (sizeof (PluginInfo));
+
+#ifdef VERBOSE
+ printf ("GOOM: init (%d, %d);\n", resx, resy);
+#endif
+
+ plugin_info_init (goomInfo, 4);
+
+ goomInfo->screen.width = resx;
+ goomInfo->screen.height = resy;
+ goomInfo->screen.size = resx * resy;
+
+ init_buffers (goomInfo, goomInfo->screen.size);
+ goomInfo->gRandom = goom_random_init ((uintptr_t) goomInfo->pixel);
+
+ goomInfo->cycle = 0;
+
+ flying_star_create (&goomInfo->star_fx);
+ goomInfo->star_fx.init (&goomInfo->star_fx, goomInfo);
+
+ zoomFilterVisualFXWrapper_create (&goomInfo->zoomFilter_fx);
+ goomInfo->zoomFilter_fx.init (&goomInfo->zoomFilter_fx, goomInfo);
+
+ tentacle_fx_create (&goomInfo->tentacles_fx);
+ goomInfo->tentacles_fx.init (&goomInfo->tentacles_fx, goomInfo);
+
+ convolve_create (&goomInfo->convolve_fx);
+ goomInfo->convolve_fx.init (&goomInfo->convolve_fx, goomInfo);
+
+ plugin_info_add_visual (goomInfo, 0, &goomInfo->zoomFilter_fx);
+ plugin_info_add_visual (goomInfo, 1, &goomInfo->tentacles_fx);
+ plugin_info_add_visual (goomInfo, 2, &goomInfo->star_fx);
+ plugin_info_add_visual (goomInfo, 3, &goomInfo->convolve_fx);
+
+ ifs_visualfx_create (&goomInfo->ifs_fx);
+ goomInfo->ifs_fx.init (&goomInfo->ifs_fx, goomInfo);
+
+ goomInfo->gmline1 = goom_lines_init (goomInfo, resx, goomInfo->screen.height,
+ GML_HLINE, goomInfo->screen.height, GML_BLACK,
+ GML_CIRCLE, 0.4f * (float) goomInfo->screen.height, GML_VERT);
+ goomInfo->gmline2 = goom_lines_init (goomInfo, resx, goomInfo->screen.height,
+ GML_HLINE, 0, GML_BLACK,
+ GML_CIRCLE, 0.2f * (float) goomInfo->screen.height, GML_RED);
+
+ /* goom_set_main_script(goomInfo, goomInfo->main_script_str); */
+
+ return goomInfo;
+}
+
+
+
+void
+goom_set_resolution (PluginInfo * goomInfo, guint32 resx, guint32 resy)
+{
+ free (goomInfo->pixel);
+ free (goomInfo->back);
+ free (goomInfo->conv);
+
+ goomInfo->screen.width = resx;
+ goomInfo->screen.height = resy;
+ goomInfo->screen.size = resx * resy;
+
+ init_buffers (goomInfo, goomInfo->screen.size);
+
+ /* init_ifs (goomInfo, resx, goomInfo->screen.height); */
+ goomInfo->ifs_fx.free (&goomInfo->ifs_fx);
+ goomInfo->ifs_fx.init (&goomInfo->ifs_fx, goomInfo);
+
+ goom_lines_set_res (goomInfo->gmline1, resx, goomInfo->screen.height);
+ goom_lines_set_res (goomInfo->gmline2, resx, goomInfo->screen.height);
+}
+
+int
+goom_set_screenbuffer (PluginInfo * goomInfo, void *buffer)
+{
+ goomInfo->outputBuf = (Pixel *) buffer;
+ return 1;
+}
+
+/********************************************
+* UPDATE *
+********************************************
+
+* WARNING: this is a 600 lines function ! (21-11-2003)
+*/
+guint32 *
+goom_update (PluginInfo * goomInfo, gint16 data[2][512], int forceMode,
+ float fps)
+{
+ Pixel *return_val;
+ guint32 pointWidth;
+ guint32 pointHeight;
+ int i;
+ float largfactor; /* elargissement de l'intervalle d'�volution des points */
+ Pixel *tmp;
+
+ ZoomFilterData *pzfd;
+
+ /* test if the config has changed, update it if so */
+ pointWidth = (goomInfo->screen.width * 2) / 5;
+ pointHeight = ((goomInfo->screen.height) * 2) / 5;
+
+ /* ! etude du signal ... */
+ evaluate_sound (data, &(goomInfo->sound));
+
+ /* goom_execute_main_script(goomInfo); */
+
+ /* ! calcul du deplacement des petits points ... */
+ largfactor =
+ goomInfo->sound.speedvar / 150.0f + goomInfo->sound.volume / 1.5f;
+
+ if (largfactor > 1.5f)
+ largfactor = 1.5f;
+
+ goomInfo->update.decay_ifs--;
+ if (goomInfo->update.decay_ifs > 0)
+ goomInfo->update.ifs_incr += 2;
+ if (goomInfo->update.decay_ifs == 0)
+ goomInfo->update.ifs_incr = 0;
+
+ if (goomInfo->update.recay_ifs) {
+ goomInfo->update.ifs_incr -= 2;
+ goomInfo->update.recay_ifs--;
+ if ((goomInfo->update.recay_ifs == 0) && (goomInfo->update.ifs_incr <= 0))
+ goomInfo->update.ifs_incr = 1;
+ }
+
+ if (goomInfo->update.ifs_incr > 0)
+ goomInfo->ifs_fx.apply (&goomInfo->ifs_fx, goomInfo->p2, goomInfo->p1,
+ goomInfo);
+
+ if (goomInfo->curGState->drawPoints) {
+ for (i = 1; i * 15 <= goomInfo->sound.speedvar * 80.0f + 15; i++) {
+ goomInfo->update.loopvar += goomInfo->sound.speedvar * 50 + 1;
+
+ pointFilter (goomInfo, goomInfo->p1,
+ YELLOW,
+ ((pointWidth - 6.0f) * largfactor + 5.0f),
+ ((pointHeight - 6.0f) * largfactor + 5.0f),
+ i * 152.0f, 128.0f, goomInfo->update.loopvar + i * 2032);
+ pointFilter (goomInfo, goomInfo->p1, ORANGE,
+ ((pointWidth / 2) * largfactor) / i + 10.0f * i,
+ ((pointHeight / 2) * largfactor) / i + 10.0f * i,
+ 96.0f, i * 80.0f, goomInfo->update.loopvar / i);
+ pointFilter (goomInfo, goomInfo->p1, VIOLET,
+ ((pointHeight / 3 + 5.0f) * largfactor) / i + 10.0f * i,
+ ((pointHeight / 3 + 5.0f) * largfactor) / i + 10.0f * i,
+ i + 122.0f, 134.0f, goomInfo->update.loopvar / i);
+ pointFilter (goomInfo, goomInfo->p1, BLACK,
+ ((pointHeight / 3) * largfactor + 20.0f),
+ ((pointHeight / 3) * largfactor + 20.0f),
+ 58.0f, i * 66.0f, goomInfo->update.loopvar / i);
+ pointFilter (goomInfo, goomInfo->p1, WHITE,
+ (pointHeight * largfactor + 10.0f * i) / i,
+ (pointHeight * largfactor + 10.0f * i) / i,
+ 66.0f, 74.0f, goomInfo->update.loopvar + i * 500);
+ }
+ }
+
+ /* par d�faut pas de changement de zoom */
+ pzfd = NULL;
+
+ /*
+ * Test forceMode
+ */
+#ifdef VERBOSE
+ if (forceMode != 0) {
+ printf ("forcemode = %d\n", forceMode);
+ }
+#endif
+
+
+ /* diminuer de 1 le temps de lockage */
+ /* note pour ceux qui n'ont pas suivis : le lockvar permet d'empecher un */
+ /* changement d'etat du plugin juste apres un autre changement d'etat. oki */
+ if (--goomInfo->update.lockvar < 0)
+ goomInfo->update.lockvar = 0;
+
+ /* on verifie qu'il ne se pas un truc interressant avec le son. */
+ if ((goomInfo->sound.timeSinceLastGoom == 0)
+ || (forceMode > 0)
+ || (goomInfo->update.cyclesSinceLastChange > TIME_BTW_CHG)) {
+
+ /* changement eventuel de mode */
+ if (goom_irand (goomInfo->gRandom, 16) == 0)
+ switch (goom_irand (goomInfo->gRandom, 34)) {
+ case 0:
+ case 10:
+ goomInfo->update.zoomFilterData.hypercosEffect =
+ goom_irand (goomInfo->gRandom, 2);
+ case 13:
+ case 20:
+ case 21:
+ goomInfo->update.zoomFilterData.mode = WAVE_MODE;
+ goomInfo->update.zoomFilterData.reverse = 0;
+ goomInfo->update.zoomFilterData.waveEffect =
+ (goom_irand (goomInfo->gRandom, 3) == 0);
+ if (goom_irand (goomInfo->gRandom, 2))
+ goomInfo->update.zoomFilterData.vitesse =
+ (goomInfo->update.zoomFilterData.vitesse + 127) >> 1;
+ break;
+ case 1:
+ case 11:
+ goomInfo->update.zoomFilterData.mode = CRYSTAL_BALL_MODE;
+ goomInfo->update.zoomFilterData.waveEffect = 0;
+ goomInfo->update.zoomFilterData.hypercosEffect = 0;
+ break;
+ case 2:
+ case 12:
+ goomInfo->update.zoomFilterData.mode = AMULETTE_MODE;
+ goomInfo->update.zoomFilterData.waveEffect = 0;
+ goomInfo->update.zoomFilterData.hypercosEffect = 0;
+ break;
+ case 3:
+ goomInfo->update.zoomFilterData.mode = WATER_MODE;
+ goomInfo->update.zoomFilterData.waveEffect = 0;
+ goomInfo->update.zoomFilterData.hypercosEffect = 0;
+ break;
+ case 4:
+ case 14:
+ goomInfo->update.zoomFilterData.mode = SCRUNCH_MODE;
+ goomInfo->update.zoomFilterData.waveEffect = 0;
+ goomInfo->update.zoomFilterData.hypercosEffect = 0;
+ break;
+ case 5:
+ case 15:
+ case 22:
+ goomInfo->update.zoomFilterData.mode = HYPERCOS1_MODE;
+ goomInfo->update.zoomFilterData.waveEffect = 0;
+ goomInfo->update.zoomFilterData.hypercosEffect =
+ (goom_irand (goomInfo->gRandom, 3) == 0);
+ break;
+ case 6:
+ case 16:
+ goomInfo->update.zoomFilterData.mode = HYPERCOS2_MODE;
+ goomInfo->update.zoomFilterData.waveEffect = 0;
+ goomInfo->update.zoomFilterData.hypercosEffect = 0;
+ break;
+ case 7:
+ case 17:
+ goomInfo->update.zoomFilterData.mode = CRYSTAL_BALL_MODE;
+ goomInfo->update.zoomFilterData.waveEffect =
+ (goom_irand (goomInfo->gRandom, 4) == 0);
+ goomInfo->update.zoomFilterData.hypercosEffect =
+ goom_irand (goomInfo->gRandom, 2);
+ break;
+ case 8:
+ case 18:
+ case 19:
+ goomInfo->update.zoomFilterData.mode = SCRUNCH_MODE;
+ goomInfo->update.zoomFilterData.waveEffect = 1;
+ goomInfo->update.zoomFilterData.hypercosEffect = 1;
+ break;
+ case 29:
+ case 30:
+ goomInfo->update.zoomFilterData.mode = YONLY_MODE;
+ break;
+ case 31:
+ case 32:
+ case 33:
+ goomInfo->update.zoomFilterData.mode = SPEEDWAY_MODE;
+ break;
+ default:
+ goomInfo->update.zoomFilterData.mode = NORMAL_MODE;
+ goomInfo->update.zoomFilterData.waveEffect = 0;
+ goomInfo->update.zoomFilterData.hypercosEffect = 0;
+ }
+ }
+
+ /* tout ceci ne sera fait qu'en cas de non-blocage */
+ if (goomInfo->update.lockvar == 0) {
+ /* reperage de goom (acceleration forte de l'acceleration du volume) */
+ /* -> coup de boost de la vitesse si besoin.. */
+ if (goomInfo->sound.timeSinceLastGoom == 0) {
+
+ int i;
+
+ goomInfo->update.goomvar++;
+
+ /* SELECTION OF THE GOOM STATE */
+ if ((!goomInfo->update.stateSelectionBlocker)
+ && (goom_irand (goomInfo->gRandom, 3))) {
+ goomInfo->update.stateSelectionRnd =
+ goom_irand (goomInfo->gRandom, goomInfo->statesRangeMax);
+ goomInfo->update.stateSelectionBlocker = 3;
+ } else if (goomInfo->update.stateSelectionBlocker)
+ goomInfo->update.stateSelectionBlocker--;
+
+ for (i = 0; i < goomInfo->statesNumber; i++)
+ if ((goomInfo->update.stateSelectionRnd >= goomInfo->states[i].rangemin)
+ && (goomInfo->update.stateSelectionRnd <=
+ goomInfo->states[i].rangemax))
+ goomInfo->curGState = &(goomInfo->states[i]);
+
+ if ((goomInfo->curGState->drawIFS) && (goomInfo->update.ifs_incr <= 0)) {
+ goomInfo->update.recay_ifs = 5;
+ goomInfo->update.ifs_incr = 11;
+ }
+
+ if ((!goomInfo->curGState->drawIFS) && (goomInfo->update.ifs_incr > 0)
+ && (goomInfo->update.decay_ifs <= 0))
+ goomInfo->update.decay_ifs = 100;
+
+ if (!goomInfo->curGState->drawScope)
+ goomInfo->update.stop_lines = 0xf000 & 5;
+
+ if (!goomInfo->curGState->drawScope) {
+ goomInfo->update.stop_lines = 0;
+ goomInfo->update.lineMode = goomInfo->update.drawLinesDuration;
+ }
+
+ /* if (goomInfo->update.goomvar % 1 == 0) */
+ {
+ guint32 vtmp;
+ guint32 newvit;
+
+ goomInfo->update.lockvar = 50;
+ newvit =
+ STOP_SPEED + 1 -
+ ((float) 3.5f * log10 (goomInfo->sound.speedvar * 60 + 1));
+ /* retablir le zoom avant.. */
+ if ((goomInfo->update.zoomFilterData.reverse)
+ && (!(goomInfo->cycle % 13)) && (rand () % 5 == 0)) {
+ goomInfo->update.zoomFilterData.reverse = 0;
+ goomInfo->update.zoomFilterData.vitesse = STOP_SPEED - 2;
+ goomInfo->update.lockvar = 75;
+ }
+ if (goom_irand (goomInfo->gRandom, 10) == 0) {
+ goomInfo->update.zoomFilterData.reverse = 1;
+ goomInfo->update.lockvar = 100;
+ }
+
+ if (goom_irand (goomInfo->gRandom, 10) == 0)
+ goomInfo->update.zoomFilterData.vitesse = STOP_SPEED - 1;
+ if (goom_irand (goomInfo->gRandom, 12) == 0)
+ goomInfo->update.zoomFilterData.vitesse = STOP_SPEED + 1;
+
+ /* changement de milieu.. */
+ switch (goom_irand (goomInfo->gRandom, 25)) {
+ case 0:
+ case 3:
+ case 6:
+ goomInfo->update.zoomFilterData.middleY =
+ goomInfo->screen.height - 1;
+ goomInfo->update.zoomFilterData.middleX =
+ goomInfo->screen.width / 2;
+ break;
+ case 1:
+ case 4:
+ goomInfo->update.zoomFilterData.middleX =
+ goomInfo->screen.width - 1;
+ break;
+ case 2:
+ case 5:
+ goomInfo->update.zoomFilterData.middleX = 1;
+ break;
+ default:
+ goomInfo->update.zoomFilterData.middleY =
+ goomInfo->screen.height / 2;
+ goomInfo->update.zoomFilterData.middleX =
+ goomInfo->screen.width / 2;
+ }
+
+ if ((goomInfo->update.zoomFilterData.mode == WATER_MODE)
+ || (goomInfo->update.zoomFilterData.mode == YONLY_MODE)
+ || (goomInfo->update.zoomFilterData.mode == AMULETTE_MODE)) {
+ goomInfo->update.zoomFilterData.middleX = goomInfo->screen.width / 2;
+ goomInfo->update.zoomFilterData.middleY = goomInfo->screen.height / 2;
+ }
+
+ switch (vtmp = (goom_irand (goomInfo->gRandom, 15))) {
+ case 0:
+ goomInfo->update.zoomFilterData.vPlaneEffect =
+ goom_irand (goomInfo->gRandom, 3)
+ - goom_irand (goomInfo->gRandom, 3);
+ goomInfo->update.zoomFilterData.hPlaneEffect =
+ goom_irand (goomInfo->gRandom, 3)
+ - goom_irand (goomInfo->gRandom, 3);
+ break;
+ case 3:
+ goomInfo->update.zoomFilterData.vPlaneEffect = 0;
+ goomInfo->update.zoomFilterData.hPlaneEffect =
+ goom_irand (goomInfo->gRandom, 8)
+ - goom_irand (goomInfo->gRandom, 8);
+ break;
+ case 4:
+ case 5:
+ case 6:
+ case 7:
+ goomInfo->update.zoomFilterData.vPlaneEffect =
+ goom_irand (goomInfo->gRandom, 5)
+ - goom_irand (goomInfo->gRandom, 5);
+ goomInfo->update.zoomFilterData.hPlaneEffect =
+ -goomInfo->update.zoomFilterData.vPlaneEffect;
+ break;
+ case 8:
+ goomInfo->update.zoomFilterData.hPlaneEffect =
+ 5 + goom_irand (goomInfo->gRandom, 8);
+ goomInfo->update.zoomFilterData.vPlaneEffect =
+ -goomInfo->update.zoomFilterData.hPlaneEffect;
+ break;
+ case 9:
+ goomInfo->update.zoomFilterData.vPlaneEffect =
+ 5 + goom_irand (goomInfo->gRandom, 8);
+ goomInfo->update.zoomFilterData.hPlaneEffect =
+ -goomInfo->update.zoomFilterData.hPlaneEffect;
+ break;
+ case 13:
+ goomInfo->update.zoomFilterData.hPlaneEffect = 0;
+ goomInfo->update.zoomFilterData.vPlaneEffect =
+ goom_irand (goomInfo->gRandom, 10)
+ - goom_irand (goomInfo->gRandom, 10);
+ break;
+ case 14:
+ goomInfo->update.zoomFilterData.hPlaneEffect =
+ goom_irand (goomInfo->gRandom, 10)
+ - goom_irand (goomInfo->gRandom, 10);
+ goomInfo->update.zoomFilterData.vPlaneEffect =
+ goom_irand (goomInfo->gRandom, 10)
+ - goom_irand (goomInfo->gRandom, 10);
+ break;
+ default:
+ if (vtmp < 10) {
+ goomInfo->update.zoomFilterData.vPlaneEffect = 0;
+ goomInfo->update.zoomFilterData.hPlaneEffect = 0;
+ }
+ }
+
+ if (goom_irand (goomInfo->gRandom, 5) != 0)
+ goomInfo->update.zoomFilterData.noisify = 0;
+ else {
+ goomInfo->update.zoomFilterData.noisify =
+ goom_irand (goomInfo->gRandom, 2) + 1;
+ goomInfo->update.lockvar *= 2;
+ }
+
+ if (goomInfo->update.zoomFilterData.mode == AMULETTE_MODE) {
+ goomInfo->update.zoomFilterData.vPlaneEffect = 0;
+ goomInfo->update.zoomFilterData.hPlaneEffect = 0;
+ goomInfo->update.zoomFilterData.noisify = 0;
+ }
+
+ if ((goomInfo->update.zoomFilterData.middleX == 1)
+ || (goomInfo->update.zoomFilterData.middleX ==
+ (signed int) goomInfo->screen.width - 1)) {
+ goomInfo->update.zoomFilterData.vPlaneEffect = 0;
+ if (goom_irand (goomInfo->gRandom, 2))
+ goomInfo->update.zoomFilterData.hPlaneEffect = 0;
+ }
+
+ if ((signed int) newvit < goomInfo->update.zoomFilterData.vitesse) { /* on accelere */
+ pzfd = &goomInfo->update.zoomFilterData;
+ if (((newvit < STOP_SPEED - 7) &&
+ (goomInfo->update.zoomFilterData.vitesse < STOP_SPEED - 6) &&
+ (goomInfo->cycle % 3 == 0))
+ || (goom_irand (goomInfo->gRandom, 40) == 0)) {
+ goomInfo->update.zoomFilterData.vitesse =
+ STOP_SPEED - goom_irand (goomInfo->gRandom, 2)
+ + goom_irand (goomInfo->gRandom, 2);
+ goomInfo->update.zoomFilterData.reverse =
+ !goomInfo->update.zoomFilterData.reverse;
+ } else {
+ goomInfo->update.zoomFilterData.vitesse =
+ (newvit + goomInfo->update.zoomFilterData.vitesse * 7) / 8;
+ }
+ goomInfo->update.lockvar += 50;
+ }
+ }
+
+ if (goomInfo->update.lockvar > 150) {
+ goomInfo->update.switchIncr = goomInfo->update.switchIncrAmount;
+ goomInfo->update.switchMult = 1.0f;
+ }
+ }
+ /* mode mega-lent */
+ if (goom_irand (goomInfo->gRandom, 700) == 0) {
+ /*
+ * printf ("coup du sort...\n") ;
+ */
+ pzfd = &goomInfo->update.zoomFilterData;
+ goomInfo->update.zoomFilterData.vitesse = STOP_SPEED - 1;
+ goomInfo->update.zoomFilterData.pertedec = 8;
+ goomInfo->update.zoomFilterData.sqrtperte = 16;
+ goomInfo->update.goomvar = 1;
+ goomInfo->update.lockvar += 50;
+ goomInfo->update.switchIncr = goomInfo->update.switchIncrAmount;
+ goomInfo->update.switchMult = 1.0f;
+ }
+ }
+
+ /*
+ * gros frein si la musique est calme
+ */
+ if ((goomInfo->sound.speedvar < 0.01f)
+ && (goomInfo->update.zoomFilterData.vitesse < STOP_SPEED - 4)
+ && (goomInfo->cycle % 16 == 0)) {
+ pzfd = &goomInfo->update.zoomFilterData;
+ goomInfo->update.zoomFilterData.vitesse += 3;
+ goomInfo->update.zoomFilterData.pertedec = 8;
+ goomInfo->update.zoomFilterData.sqrtperte = 16;
+ goomInfo->update.goomvar = 0;
+ }
+
+ /*
+ * baisser regulierement la vitesse...
+ */
+ if ((goomInfo->cycle % 73 == 0)
+ && (goomInfo->update.zoomFilterData.vitesse < STOP_SPEED - 5)) {
+ pzfd = &goomInfo->update.zoomFilterData;
+ goomInfo->update.zoomFilterData.vitesse++;
+ }
+
+ /*
+ * arreter de decr�menter au bout d'un certain temps
+ */
+ if ((goomInfo->cycle % 101 == 0)
+ && (goomInfo->update.zoomFilterData.pertedec == 7)) {
+ pzfd = &goomInfo->update.zoomFilterData;
+ goomInfo->update.zoomFilterData.pertedec = 8;
+ goomInfo->update.zoomFilterData.sqrtperte = 16;
+ }
+
+ /*
+ * Permet de forcer un effet.
+ */
+ if ((forceMode > 0) && (forceMode <= NB_FX)) {
+ pzfd = &goomInfo->update.zoomFilterData;
+ pzfd->mode = forceMode - 1;
+ }
+
+ if (forceMode == -1) {
+ pzfd = NULL;
+ }
+
+ /*
+ * Changement d'effet de zoom !
+ */
+ if (pzfd != NULL) {
+ int dif;
+
+ goomInfo->update.cyclesSinceLastChange = 0;
+
+ goomInfo->update.switchIncr = goomInfo->update.switchIncrAmount;
+
+ dif =
+ goomInfo->update.zoomFilterData.vitesse -
+ goomInfo->update.previousZoomSpeed;
+ if (dif < 0)
+ dif = -dif;
+
+ if (dif > 2) {
+ goomInfo->update.switchIncr *= (dif + 2) / 2;
+ }
+ goomInfo->update.previousZoomSpeed =
+ goomInfo->update.zoomFilterData.vitesse;
+ goomInfo->update.switchMult = 1.0f;
+
+ if (((goomInfo->sound.timeSinceLastGoom == 0)
+ && (goomInfo->sound.totalgoom < 2)) || (forceMode > 0)) {
+ goomInfo->update.switchIncr = 0;
+ goomInfo->update.switchMult = goomInfo->update.switchMultAmount;
+ }
+ } else {
+ if (goomInfo->update.cyclesSinceLastChange > TIME_BTW_CHG) {
+ pzfd = &goomInfo->update.zoomFilterData;
+ goomInfo->update.cyclesSinceLastChange = 0;
+ } else
+ goomInfo->update.cyclesSinceLastChange++;
+ }
+
+#ifdef VERBOSE
+ if (pzfd) {
+ printf ("GOOM: pzfd->mode = %d\n", pzfd->mode);
+ }
+#endif
+
+ /* Zoom here ! */
+ zoomFilterFastRGB (goomInfo, goomInfo->p1, goomInfo->p2, pzfd,
+ goomInfo->screen.width, goomInfo->screen.height,
+ goomInfo->update.switchIncr, goomInfo->update.switchMult);
+
+ /*
+ * Affichage tentacule
+ */
+
+ goomInfo->tentacles_fx.apply (&goomInfo->tentacles_fx, goomInfo->p1,
+ goomInfo->p2, goomInfo);
+ goomInfo->star_fx.apply (&goomInfo->star_fx, goomInfo->p2, goomInfo->p1,
+ goomInfo);
+
+ /*
+ * Gestion du Scope
+ */
+
+ /*
+ * arret demande
+ */
+ if ((goomInfo->update.stop_lines & 0xf000)
+ || (!goomInfo->curGState->drawScope)) {
+ float param1 = 0, param2 = 0, amplitude;
+ int couleur;
+ int mode;
+
+ choose_a_goom_line (goomInfo, &param1, &param2, &couleur, &mode, &amplitude,
+ 1);
+ couleur = GML_BLACK;
+
+ goom_lines_switch_to (goomInfo->gmline1, mode, param1, amplitude, couleur);
+ goom_lines_switch_to (goomInfo->gmline2, mode, param2, amplitude, couleur);
+ goomInfo->update.stop_lines &= 0x0fff;
+ }
+
+ /*
+ * arret aleatore.. changement de mode de ligne..
+ */
+ if (goomInfo->update.lineMode != goomInfo->update.drawLinesDuration) {
+ goomInfo->update.lineMode--;
+ if (goomInfo->update.lineMode == -1)
+ goomInfo->update.lineMode = 0;
+ } else if ((goomInfo->cycle % 80 == 0)
+ && (goom_irand (goomInfo->gRandom, 5) == 0) && goomInfo->update.lineMode)
+ goomInfo->update.lineMode--;
+
+ if ((goomInfo->cycle % 120 == 0)
+ && (goom_irand (goomInfo->gRandom, 4) == 0)
+ && (goomInfo->curGState->drawScope)) {
+ if (goomInfo->update.lineMode == 0)
+ goomInfo->update.lineMode = goomInfo->update.drawLinesDuration;
+ else if (goomInfo->update.lineMode == goomInfo->update.drawLinesDuration) {
+ float param1, param2, amplitude;
+ int couleur1, couleur2;
+ int mode;
+
+ goomInfo->update.lineMode--;
+ choose_a_goom_line (goomInfo, &param1, &param2, &couleur1,
+ &mode, &amplitude, goomInfo->update.stop_lines);
+
+ couleur2 = 5 - couleur1;
+ if (goomInfo->update.stop_lines) {
+ goomInfo->update.stop_lines--;
+ if (goom_irand (goomInfo->gRandom, 2))
+ couleur2 = couleur1 = GML_BLACK;
+ }
+
+ goom_lines_switch_to (goomInfo->gmline1, mode, param1, amplitude,
+ couleur1);
+ goom_lines_switch_to (goomInfo->gmline2, mode, param2, amplitude,
+ couleur2);
+ }
+ }
+
+ /*
+ * si on est dans un goom : afficher les lignes...
+ */
+ if ((goomInfo->update.lineMode != 0)
+ || (goomInfo->sound.timeSinceLastGoom < 5)) {
+ goomInfo->gmline2->power = goomInfo->gmline1->power;
+
+ goom_lines_draw (goomInfo, goomInfo->gmline1, data[0], goomInfo->p2);
+ goom_lines_draw (goomInfo, goomInfo->gmline2, data[1], goomInfo->p2);
+
+ if (((goomInfo->cycle % 121) == 9)
+ && (goom_irand (goomInfo->gRandom, 3) == 1)
+ && ((goomInfo->update.lineMode == 0)
+ || (goomInfo->update.lineMode ==
+ goomInfo->update.drawLinesDuration))) {
+ float param1, param2, amplitude;
+ int couleur1, couleur2;
+ int mode;
+
+ choose_a_goom_line (goomInfo, &param1, &param2, &couleur1,
+ &mode, &amplitude, goomInfo->update.stop_lines);
+ couleur2 = 5 - couleur1;
+
+ if (goomInfo->update.stop_lines) {
+ goomInfo->update.stop_lines--;
+ if (goom_irand (goomInfo->gRandom, 2))
+ couleur2 = couleur1 = GML_BLACK;
+ }
+ goom_lines_switch_to (goomInfo->gmline1, mode, param1, amplitude,
+ couleur1);
+ goom_lines_switch_to (goomInfo->gmline2, mode, param2, amplitude,
+ couleur2);
+ }
+ }
+
+ return_val = goomInfo->p1;
+ tmp = goomInfo->p1;
+ goomInfo->p1 = goomInfo->p2;
+ goomInfo->p2 = tmp;
+
+ /* affichage et swappage des buffers.. */
+ goomInfo->cycle++;
+
+ goomInfo->convolve_fx.apply (&goomInfo->convolve_fx, return_val,
+ goomInfo->outputBuf, goomInfo);
+
+ return (guint32 *) goomInfo->outputBuf;
+}
+
+/****************************************
+* CLOSE *
+****************************************/
+void
+goom_close (PluginInfo * goomInfo)
+{
+ if (goomInfo->pixel != NULL)
+ free (goomInfo->pixel);
+ if (goomInfo->back != NULL)
+ free (goomInfo->back);
+ if (goomInfo->conv != NULL)
+ free (goomInfo->conv);
+
+ goomInfo->pixel = goomInfo->back = NULL;
+ goomInfo->conv = NULL;
+ goom_random_free (goomInfo->gRandom);
+ goom_lines_free (&goomInfo->gmline1);
+ goom_lines_free (&goomInfo->gmline2);
+
+ /* release_ifs (); */
+ goomInfo->ifs_fx.free (&goomInfo->ifs_fx);
+ goomInfo->convolve_fx.free (&goomInfo->convolve_fx);
+ goomInfo->star_fx.free (&goomInfo->star_fx);
+ goomInfo->tentacles_fx.free (&goomInfo->tentacles_fx);
+ goomInfo->zoomFilter_fx.free (&goomInfo->zoomFilter_fx);
+
+ plugin_info_free (goomInfo);
+ free (goomInfo);
+}
+
+
+/* *** */
+void
+choose_a_goom_line (PluginInfo * goomInfo, float *param1, float *param2,
+ int *couleur, int *mode, float *amplitude, int far)
+{
+ *mode = goom_irand (goomInfo->gRandom, 3);
+ *amplitude = 1.0f;
+ switch (*mode) {
+ case GML_CIRCLE:
+ if (far) {
+ *param1 = *param2 = 0.47f;
+ *amplitude = 0.8f;
+ break;
+ }
+ if (goom_irand (goomInfo->gRandom, 3) == 0) {
+ *param1 = *param2 = 0;
+ *amplitude = 3.0f;
+ } else if (goom_irand (goomInfo->gRandom, 2)) {
+ *param1 = 0.40f * goomInfo->screen.height;
+ *param2 = 0.22f * goomInfo->screen.height;
+ } else {
+ *param1 = *param2 = goomInfo->screen.height * 0.35;
+ }
+ break;
+ case GML_HLINE:
+ if (goom_irand (goomInfo->gRandom, 4) || far) {
+ *param1 = goomInfo->screen.height / 7;
+ *param2 = 6.0f * goomInfo->screen.height / 7.0f;
+ } else {
+ *param1 = *param2 = goomInfo->screen.height / 2.0f;
+ *amplitude = 2.0f;
+ }
+ break;
+ case GML_VLINE:
+ if (goom_irand (goomInfo->gRandom, 3) || far) {
+ *param1 = goomInfo->screen.width / 7.0f;
+ *param2 = 6.0f * goomInfo->screen.width / 7.0f;
+ } else {
+ *param1 = *param2 = goomInfo->screen.width / 2.0f;
+ *amplitude = 1.5f;
+ }
+ break;
+ default:
+ *param1 = *param2 = 0;
+ break;
+ }
+
+ *couleur = goom_irand (goomInfo->gRandom, 6);
+}
diff --git a/gst/goom/goom_filters.h b/gst/goom/goom_filters.h
new file mode 100644
index 0000000000..e4cfaeb0ce
--- /dev/null
+++ b/gst/goom/goom_filters.h
@@ -0,0 +1,70 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifndef FILTERS_H
+#define FILTERS_H
+
+#include "goom_config.h"
+#include "goom_typedefs.h"
+#include "goom_visual_fx.h"
+#include "goom_graphic.h"
+
+void zoomFilterVisualFXWrapper_create(VisualFX *fx);
+
+struct _ZOOM_FILTER_DATA
+{
+ int vitesse; /* 128 = vitesse nule... * * 256 = en arriere
+ * hyper vite.. * * 0 = en avant hype vite. */
+ unsigned char pertedec;
+ unsigned char sqrtperte;
+ int middleX, middleY; /* milieu de l'effet */
+ char reverse; /* inverse la vitesse */
+ char mode; /* type d'effet � appliquer (cf les #define) */
+ /* @since June 2001 */
+ int hPlaneEffect; /* deviation horitontale */
+ int vPlaneEffect; /* deviation verticale */
+ /* @since April 2002 */
+ int waveEffect; /* applique une "surcouche" de wave effect */
+ int hypercosEffect; /* applique une "surcouche de hypercos effect */
+
+ char noisify; /* ajoute un bruit a la transformation */
+};
+
+#define NORMAL_MODE 0
+#define WAVE_MODE 1
+#define CRYSTAL_BALL_MODE 2
+#define SCRUNCH_MODE 3
+#define AMULETTE_MODE 4
+#define WATER_MODE 5
+#define HYPERCOS1_MODE 6
+#define HYPERCOS2_MODE 7
+#define YONLY_MODE 8
+#define SPEEDWAY_MODE 9
+
+void pointFilter (PluginInfo *goomInfo, Pixel * pix1, Color c,
+ float t1, float t2, float t3, float t4, guint32 cycle);
+
+/* filtre de zoom :
+ * le contenu de pix1 est copie dans pix2.
+ * zf : si non NULL, configure l'effet.
+ * resx,resy : taille des buffers.
+ */
+void zoomFilterFastRGB (PluginInfo *goomInfo, Pixel * pix1, Pixel * pix2, ZoomFilterData * zf, guint32 resx,
+ guint32 resy, int switchIncr, float switchMult);
+
+#endif
diff --git a/gst/goom/goom_fx.h b/gst/goom/goom_fx.h
new file mode 100644
index 0000000000..7c82d6cfe2
--- /dev/null
+++ b/gst/goom/goom_fx.h
@@ -0,0 +1,30 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifndef _GOOM_FX_H
+#define _GOOM_FX_H
+
+#include "goom_visual_fx.h"
+#include "goom_plugin_info.h"
+
+void convolve_create (VisualFX *vfx);
+void flying_star_create (VisualFX *vfx);
+
+void zoom_filter_c(int sizeX, int sizeY, Pixel *src, Pixel *dest, int *brutS, int *brutD, int buffratio, int precalCoef[16][16]);
+
+#endif
diff --git a/gst/goom/goom_graphic.h b/gst/goom/goom_graphic.h
new file mode 100644
index 0000000000..54dde3765d
--- /dev/null
+++ b/gst/goom/goom_graphic.h
@@ -0,0 +1,92 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifndef GRAPHIC_H
+#define GRAPHIC_H
+
+typedef unsigned int Uint;
+
+typedef struct
+{
+ unsigned short r, v, b;
+}
+Color;
+
+extern const Color BLACK;
+extern const Color WHITE;
+extern const Color RED;
+extern const Color BLUE;
+extern const Color GREEN;
+extern const Color YELLOW;
+extern const Color ORANGE;
+extern const Color VIOLET;
+
+
+#ifdef COLOR_BGRA
+
+#define B_CHANNEL 0xFF000000
+#define G_CHANNEL 0x00FF0000
+#define R_CHANNEL 0x0000FF00
+#define A_CHANNEL 0x000000FF
+#define B_OFFSET 24
+#define G_OFFSET 16
+#define R_OFFSET 8
+#define A_OFFSET 0
+
+typedef union _PIXEL {
+ struct {
+ unsigned char b;
+ unsigned char g;
+ unsigned char r;
+ unsigned char a;
+ } channels;
+ unsigned int val;
+ unsigned char cop[4];
+} Pixel;
+
+#else
+
+#define A_CHANNEL 0xFF000000
+#define R_CHANNEL 0x00FF0000
+#define G_CHANNEL 0x0000FF00
+#define B_CHANNEL 0x000000FF
+#define A_OFFSET 24
+#define R_OFFSET 16
+#define G_OFFSET 8
+#define B_OFFSET 0
+
+typedef union _PIXEL {
+ struct {
+ unsigned char a;
+ unsigned char r;
+ unsigned char g;
+ unsigned char b;
+ } channels;
+ unsigned int val;
+ unsigned char cop[4];
+} Pixel;
+
+#endif /* COLOR_BGRA */
+
+/*
+inline void setPixelRGB (Pixel * buffer, Uint x, Uint y, Color c);
+inline void getPixelRGB (Pixel * buffer, Uint x, Uint y, Color * c);
+*/
+
+
+#endif /* GRAPHIC_H */
diff --git a/gst/goom/goom_plugin_info.h b/gst/goom/goom_plugin_info.h
new file mode 100644
index 0000000000..907d780c23
--- /dev/null
+++ b/gst/goom/goom_plugin_info.h
@@ -0,0 +1,181 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifndef _PLUGIN_INFO_H
+#define _PLUGIN_INFO_H
+
+#include "goom_typedefs.h"
+
+#include "goom_config.h"
+
+#include "goom_graphic.h"
+#include "goom_config_param.h"
+#include "goom_visual_fx.h"
+#include "goom_filters.h"
+#include "goom_tools.h"
+
+typedef struct {
+ char drawIFS;
+ char drawPoints;
+ char drawTentacle;
+
+ char drawScope;
+ int farScope;
+
+ int rangemin;
+ int rangemax;
+} GoomState;
+
+#define STATES_MAX_NB 128
+
+/*
+ * Gives informations about the sound.
+ */
+struct _SOUND_INFO {
+
+ /* nota : a Goom is just a sound event... */
+
+ int timeSinceLastGoom; /* >= 0 */
+ float goomPower; /* power of the last Goom [0..1] */
+
+ int timeSinceLastBigGoom; /* >= 0 */
+
+ float volume; /* [0..1] */
+ short samples[2][512];
+
+ /* other "internal" datas for the sound_tester */
+ float goom_limit; /* auto-updated limit of goom_detection */
+ float bigGoomLimit;
+ float accelvar; /* acceleration of the sound - [0..1] */
+ float speedvar; /* speed of the sound - [0..100] */
+ int allTimesMax;
+ int totalgoom; /* number of goom since last reset
+ * (a reset every 64 cycles) */
+
+ float prov_max; /* accel max since last reset */
+
+ int cycle;
+
+ /* private */
+ PluginParam volume_p;
+ PluginParam speed_p;
+ PluginParam accel_p;
+ PluginParam goom_limit_p;
+ PluginParam goom_power_p;
+ PluginParam last_goom_p;
+ PluginParam last_biggoom_p;
+ PluginParam biggoom_speed_limit_p;
+ PluginParam biggoom_factor_p;
+
+ PluginParameters params; /* contains the previously defined parameters. */
+};
+
+
+/*
+ * Allows FXs to know the current state of the plugin.
+ */
+struct _PLUGIN_INFO {
+
+ /* public datas */
+
+ int nbParams;
+ PluginParameters *params;
+
+ /* private datas */
+
+ struct _SIZE_TYPE {
+ int width;
+ int height;
+ int size; /* == screen.height * screen.width. */
+ } screen;
+
+ SoundInfo sound;
+
+ int nbVisuals;
+ VisualFX **visuals; /* pointers on all the visual fx */
+
+ /* The known FX */
+ VisualFX convolve_fx;
+ VisualFX star_fx;
+ VisualFX zoomFilter_fx;
+ VisualFX tentacles_fx;
+ VisualFX ifs_fx;
+
+ /* image buffers */
+ guint32 *pixel;
+ guint32 *back;
+ Pixel *p1, *p2;
+ Pixel *conv;
+ Pixel *outputBuf;
+
+ /* state of goom */
+ guint32 cycle;
+ GoomState states[STATES_MAX_NB];
+ int statesNumber;
+ int statesRangeMax;
+
+ GoomState *curGState;
+
+ /* effet de ligne.. */
+ GMLine *gmline1;
+ GMLine *gmline2;
+
+ /* sinus table */
+ int sintable[0x10000];
+
+ /* INTERNALS */
+
+ /* goom_update internals.
+ * I took all static variables from goom_update and put them here.. for the moment.
+ */
+ struct {
+ int lockvar; /* pour empecher de nouveaux changements */
+ int goomvar; /* boucle des gooms */
+ int loopvar; /* mouvement des points */
+ int stop_lines;
+ int ifs_incr; /* dessiner l'ifs (0 = non: > = increment) */
+ int decay_ifs; /* disparition de l'ifs */
+ int recay_ifs; /* dedisparition de l'ifs */
+ int cyclesSinceLastChange; /* nombre de Cycle Depuis Dernier Changement */
+ int drawLinesDuration; /* duree de la transition entre afficher les lignes ou pas */
+ int lineMode; /* l'effet lineaire a dessiner */
+ float switchMultAmount; /* SWITCHMULT (29.0f/30.0f) */
+ int switchIncrAmount; /* 0x7f */
+ float switchMult; /* 1.0f */
+ int switchIncr; /* = SWITCHINCR; */
+ int stateSelectionRnd;
+ int stateSelectionBlocker;
+ int previousZoomSpeed;
+ ZoomFilterData zoomFilterData;
+ } update;
+
+ struct {
+ void (*draw_line) (Pixel *data, int x1, int y1, int x2, int y2, int col, int screenx, int screeny);
+ void (*zoom_filter) (int sizeX, int sizeY, Pixel *src, Pixel *dest, int *brutS, int *brutD, int buffratio, int precalCoef[16][16]);
+ } methods;
+
+ GoomRandom *gRandom;
+};
+
+void plugin_info_init(PluginInfo *p, int nbVisual);
+void plugin_info_free(PluginInfo *p);
+
+/* i = [0..p->nbVisual-1] */
+void plugin_info_add_visual(PluginInfo *p, int i, VisualFX *visual);
+
+#endif
diff --git a/gst/goom/goom_tools.c b/gst/goom/goom_tools.c
new file mode 100644
index 0000000000..01758fd987
--- /dev/null
+++ b/gst/goom/goom_tools.c
@@ -0,0 +1,50 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#include "goom_tools.h"
+#include <stdlib.h>
+
+GoomRandom *
+goom_random_init (int i)
+{
+ GoomRandom *grandom = (GoomRandom *) malloc (sizeof (GoomRandom));
+
+ srand (i);
+ grandom->pos = 1;
+ goom_random_update_array (grandom, GOOM_NB_RAND);
+ return grandom;
+}
+
+void
+goom_random_free (GoomRandom * grandom)
+{
+ free (grandom);
+}
+
+void
+goom_random_update_array (GoomRandom * grandom, int numberOfValuesToChange)
+{
+ while (numberOfValuesToChange > 0) {
+#if RAND_MAX < 0x10000
+ grandom->array[grandom->pos++] = ((rand () << 16) + rand ()) / 127;
+#else
+ grandom->array[grandom->pos++] = rand () / 127;
+#endif
+ numberOfValuesToChange--;
+ }
+}
diff --git a/gst/goom/goom_tools.h b/gst/goom/goom_tools.h
new file mode 100644
index 0000000000..db5c407fe0
--- /dev/null
+++ b/gst/goom/goom_tools.h
@@ -0,0 +1,53 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifndef _GOOMTOOLS_H
+#define _GOOMTOOLS_H
+
+#include "goom_config.h"
+
+/*
+ * Random number generator wrapper for faster random number.
+ */
+
+#define GOOM_NB_RAND 0x10000
+
+typedef struct _GOOM_RANDOM {
+ int array[GOOM_NB_RAND];
+ unsigned short pos;
+} GoomRandom;
+
+GoomRandom *goom_random_init(int i);
+void goom_random_free(GoomRandom *grandom);
+
+inline static int goom_random(GoomRandom *grandom) {
+
+ grandom->pos++; /* works because pos is an unsigned short */
+ return grandom->array[grandom->pos];
+}
+
+inline static int goom_irand(GoomRandom *grandom, int i) {
+
+ grandom->pos++;
+ return grandom->array[grandom->pos] % i;
+}
+
+/* called to change the specified number of value in the array, so that the array does not remain the same*/
+void goom_random_update_array(GoomRandom *grandom, int numberOfValuesToChange);
+
+#endif
diff --git a/gst/goom/goom_typedefs.h b/gst/goom/goom_typedefs.h
new file mode 100644
index 0000000000..d3a33a999b
--- /dev/null
+++ b/gst/goom/goom_typedefs.h
@@ -0,0 +1,29 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifndef _GOOM_TYPEDEFS_H
+#define _GOOM_TYPEDEFS_H
+
+typedef struct _PLUGIN_INFO PluginInfo;
+typedef struct _SOUND_INFO SoundInfo;
+typedef struct _GMLINE GMLine;
+typedef struct _GMUNITPOINTER GMUnitPointer;
+typedef struct _ZOOM_FILTER_DATA ZoomFilterData;
+typedef struct _VISUAL_FX VisualFX;
+
+#endif
diff --git a/gst/goom/goom_visual_fx.h b/gst/goom/goom_visual_fx.h
new file mode 100644
index 0000000000..0bb5a49c29
--- /dev/null
+++ b/gst/goom/goom_visual_fx.h
@@ -0,0 +1,35 @@
+/* Goom Project
+ * Copyright (C) <2003> Jean-Christophe Hoelt <jeko@free.fr>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifndef _VISUAL_FX_H
+#define _VISUAL_FX_H
+
+#include "goom_config_param.h"
+#include "goom_graphic.h"
+#include "goom_typedefs.h"
+
+struct _VISUAL_FX {
+ void (*init) (struct _VISUAL_FX *_this, PluginInfo *info);
+ void (*free) (struct _VISUAL_FX *_this);
+ void (*apply) (struct _VISUAL_FX *_this, Pixel *src, Pixel *dest, PluginInfo *info);
+ void *fx_data;
+
+ PluginParameters *params;
+};
+
+#endif
diff --git a/gst/goom/goomsl_lex.l b/gst/goom/goomsl_lex.l
new file mode 100644
index 0000000000..7a7de29069
--- /dev/null
+++ b/gst/goom/goomsl_lex.l
@@ -0,0 +1,94 @@
+%{
+
+#include <math.h>
+#include <stdlib.h>
+#include <string.h>
+#include "goomsl.h"
+#include "goomsl_private.h"
+#include "goomsl_yacc.h"
+void yyerror(char *);
+void yyparse(void);
+
+GoomSL *currentGoomSL;
+static int string_size;
+static char string[1024];
+%}
+
+DIGIT [0-9]
+XDIGIT [0-9a-f]
+ID [a-zA-Z_@&][a-zA-Z0-9_\.]*
+
+%S C_COMMENT
+%S LINE_COMMENT
+%S STRING
+
+%%
+
+<LINE_COMMENT,C_COMMENT,INITIAL>^[ \t]*\n { ++currentGoomSL->num_lines; /* Ignore empty lines */ }
+<LINE_COMMENT,C_COMMENT,INITIAL>^[ \t]*"//"[^\n]*\n { ++currentGoomSL->num_lines; /* Ignore empty lines */ }
+
+<LINE_COMMENT>\n { ++currentGoomSL->num_lines; yylval.charValue=*yytext; BEGIN INITIAL; return '\n'; }
+<INITIAL>\n { ++currentGoomSL->num_lines; yylval.charValue=*yytext; return '\n'; }
+
+<C_COMMENT>"*/" { BEGIN INITIAL; }
+<C_COMMENT>\n { ++currentGoomSL->num_lines; }
+<C_COMMENT,LINE_COMMENT>. { /* eat up comment */ }
+
+<INITIAL>"#RST_LINE#" { currentGoomSL->num_lines = 0; }
+<INITIAL>"#FILE ".*"#" { currentGoomSL->num_lines = 0; /* printf("%s\n", yytext); */ }
+<INITIAL>"#"[^\n]* { /* ignore preprocessor lines */ }
+
+<INITIAL>"/*" { BEGIN C_COMMENT; }
+<INITIAL>"//" { BEGIN LINE_COMMENT; }
+<INITIAL>\" { BEGIN STRING; string_size=0; }
+
+<STRING>"\\n" { string[string_size++] = '\n'; }
+<STRING>"\\\"" { string[string_size++] = '\"'; }
+<STRING>\" { /* fin de la chaine: on cree le pointeur qui va bien */
+ unsigned int tmp;
+ BEGIN INITIAL;
+ string[string_size]=0;
+ tmp = gsl_malloc(currentGoomSL, string_size+1);
+ strcpy((char*)currentGoomSL->ptrArray[tmp],string);
+ sprintf(yylval.strValue, "0x%08x", tmp);
+ return LTYPE_PTR;
+ }
+<STRING>. { string[string_size++] = *yytext; }
+
+<INITIAL>"float" { return FLOAT_TK; }
+<INITIAL>"int" { return INT_TK; }
+<INITIAL>"boolean" { return INT_TK; }
+<INITIAL>"ptr" { return PTR_TK; }
+<INITIAL>"string" { return PTR_TK; }
+<INITIAL>"declare" { return DECLARE; }
+<INITIAL>"external" { return EXTERNAL; }
+<INITIAL>"struct" { return STRUCT; }
+<INITIAL>"not" { return NOT; }
+<INITIAL>"while" { return WHILE; }
+<INITIAL>"do" { return DO; }
+<INITIAL>"for" { return FOR; }
+<INITIAL>"in" { return IN; }
+<INITIAL>"true" { strncpy(yylval.strValue, "1", 2047); return LTYPE_INTEGER; }
+<INITIAL>"false" { strncpy(yylval.strValue, "0", 2047); return LTYPE_INTEGER; }
+<INITIAL>{ID} { strncpy(yylval.strValue, yytext, 2047); return LTYPE_VAR; }
+<INITIAL>{DIGIT}+ { strncpy(yylval.strValue, yytext, 2047); return LTYPE_INTEGER; }
+<INITIAL>\'.\' { sprintf(yylval.strValue, "%d", (int)yytext[1]); return LTYPE_INTEGER; }
+<INITIAL>"0x"{XDIGIT}+ { strncpy(yylval.strValue, yytext, 2047); return LTYPE_INTEGER; }
+<INITIAL>{DIGIT}+"."{DIGIT}* { strncpy(yylval.strValue, yytext, 2047); return LTYPE_FLOAT; }
+<INITIAL>{DIGIT}+"%" { sprintf(yylval.strValue, "%3.2f", atof(yytext)/100.0f); return LTYPE_FLOAT; }
+<INITIAL>"+=" { return PLUS_EQ; }
+<INITIAL>"*=" { return MUL_EQ; }
+<INITIAL>"-=" { return SUB_EQ; }
+<INITIAL>"/=" { return DIV_EQ; }
+<INITIAL>"<=" { return LOW_EQ; }
+<INITIAL>">=" { return SUP_EQ; }
+<INITIAL>"!=" { return NOT_EQ; }
+<INITIAL>"<>" { return NOT_EQ; }
+<INITIAL>[ \t]+ /* eat up whitespace */
+<INITIAL>. { yylval.charValue = *yytext; return *yytext; }
+
+%%
+
+
+int yywrap(void) { return 1; yyunput(0,0); }
+
diff --git a/gst/goom/goomsl_yacc.y b/gst/goom/goomsl_yacc.y
new file mode 100644
index 0000000000..078933c278
--- /dev/null
+++ b/gst/goom/goomsl_yacc.y
@@ -0,0 +1,1438 @@
+/**
+ * copyright 2004, Jean-Christophe Hoelt <jeko@ios-software.com>
+ *
+ * This program is released under the terms of the GNU Lesser General Public Licence.
+ */
+%{
+ #include <stdio.h>
+ #include <stdlib.h>
+ #include <string.h>
+ #include <glib.h>
+ #include "goomsl.h"
+ #include "goomsl_private.h"
+
+#define STRUCT_ALIGNMENT 16
+/* #define VERBOSE */
+
+ int yylex(void);
+ void yyerror(char *);
+ extern GoomSL *currentGoomSL;
+
+ static NodeType *nodeNew(const char *str, int type, int line_number);
+ static NodeType *nodeClone(NodeType *node);
+ static void nodeFreeInternals(NodeType *node);
+ static void nodeFree(NodeType *node);
+
+ static void commit_node(NodeType *node, int releaseIfTemp);
+ static void precommit_node(NodeType *node);
+
+ static NodeType *new_constInt(const char *str, int line_number);
+ static NodeType *new_constFloat(const char *str, int line_number);
+ static NodeType *new_constPtr(const char *str, int line_number);
+ static NodeType *new_var(const char *str, int line_number);
+ static NodeType *new_nop(const char *str);
+ static NodeType *new_op(const char *str, int type, int nbOp);
+
+ static int allocateLabel();
+ static int allocateTemp();
+ static void releaseTemp(int n);
+ static void releaseAllTemps();
+
+ static int is_tmp_expr(NodeType *node) {
+ if (node->str) {
+ return (!strncmp(node->str,"_i_tmp_",7))
+ || (!strncmp(node->str,"_f_tmp_",7))
+ || (!strncmp(node->str,"_p_tmp",7));
+ }
+ return 0;
+ }
+ /* pre: is_tmp_expr(node); */
+ static int get_tmp_id(NodeType *node) { return atoi((node->str)+5); }
+
+ static int is_commutative_expr(int itype)
+ { /* {{{ */
+ return (itype == INSTR_ADD)
+ || (itype == INSTR_MUL)
+ || (itype == INSTR_ISEQUAL);
+ } /* }}} */
+
+ static void GSL_PUT_LABEL(char *name, int line_number)
+ { /* {{{ */
+#ifdef VERBOSE
+ printf("label %s\n", name);
+#endif
+ currentGoomSL->instr = gsl_instr_init(currentGoomSL, "label", INSTR_LABEL, 1, line_number);
+ gsl_instr_add_param(currentGoomSL->instr, name, TYPE_LABEL);
+ } /* }}} */
+ static void GSL_PUT_JUMP(char *name, int line_number)
+ { /* {{{ */
+#ifdef VERBOSE
+ printf("jump %s\n", name);
+#endif
+ currentGoomSL->instr = gsl_instr_init(currentGoomSL, "jump", INSTR_JUMP, 1, line_number);
+ gsl_instr_add_param(currentGoomSL->instr, name, TYPE_LABEL);
+ } /* }}} */
+
+ static void GSL_PUT_JXXX(char *name, char *iname, int instr_id, int line_number)
+ { /* {{{ */
+#ifdef VERBOSE
+ printf("%s %s\n", iname, name);
+#endif
+ currentGoomSL->instr = gsl_instr_init(currentGoomSL, iname, instr_id, 1, line_number);
+ gsl_instr_add_param(currentGoomSL->instr, name, TYPE_LABEL);
+ } /* }}} */
+ static void GSL_PUT_JZERO(char *name,int line_number)
+ { /* {{{ */
+ GSL_PUT_JXXX(name,"jzero.i",INSTR_JZERO,line_number);
+ } /* }}} */
+ static void GSL_PUT_JNZERO(char *name, int line_number)
+ { /* {{{ */
+ GSL_PUT_JXXX(name,"jnzero.i",INSTR_JNZERO,line_number);
+ } /* }}} */
+
+ /* Structures Management */
+
+#define ALIGN_ADDR(_addr,_align) {\
+ if (_align>1) {\
+ int _dec = (_addr%_align);\
+ if (_dec != 0) _addr += _align - _dec;\
+ }}
+
+ /* */
+ void gsl_prepare_struct(GSL_Struct *s, int s_align, int i_align, int f_align)
+ {
+ int i;
+ int consumed = 0;
+ int iblk=0, fblk=0;
+
+ s->iBlock[0].size = 0;
+ s->iBlock[0].data = 0;
+ s->fBlock[0].size = 0;
+ s->fBlock[0].data = 0;
+
+ /* Prepare sub-struct and calculate space needed for their storage */
+ for (i = 0; i < s->nbFields; ++i)
+ {
+ if (s->fields[i]->type < FIRST_RESERVED)
+ {
+ int j=0;
+ GSL_Struct *substruct = currentGoomSL->gsl_struct[s->fields[i]->type];
+ consumed += sizeof(int); /* stocke le prefix */
+ ALIGN_ADDR(consumed, s_align);
+ s->fields[i]->offsetInStruct = consumed;
+ gsl_prepare_struct(substruct, s_align, i_align, f_align);
+ for(j=0;substruct->iBlock[j].size>0;++j) {
+ s->iBlock[iblk].data = consumed + substruct->iBlock[j].data;
+ s->iBlock[iblk].size = substruct->iBlock[j].size;
+ iblk++;
+ }
+ for(j=0;substruct->fBlock[j].size>0;++j) {
+ s->fBlock[fblk].data = consumed + substruct->fBlock[j].data;
+ s->fBlock[fblk].size = substruct->fBlock[j].size;
+ fblk++;
+ }
+ consumed += substruct->size;
+ }
+ }
+
+ /* Then prepare integers */
+ ALIGN_ADDR(consumed, i_align);
+ for (i = 0; i < s->nbFields; ++i)
+ {
+ if (s->fields[i]->type == INSTR_INT)
+ {
+ if (s->iBlock[iblk].size == 0) {
+ s->iBlock[iblk].size = 1;
+ s->iBlock[iblk].data = consumed;
+ } else {
+ s->iBlock[iblk].size += 1;
+ }
+ s->fields[i]->offsetInStruct = consumed;
+ consumed += sizeof(int);
+ }
+ }
+
+ iblk++;
+ s->iBlock[iblk].size = 0;
+ s->iBlock[iblk].data = 0;
+
+ /* Then prepare floats */
+ ALIGN_ADDR(consumed, f_align);
+ for (i = 0; i < s->nbFields; ++i)
+ {
+ if (s->fields[i]->type == INSTR_FLOAT)
+ {
+ if (s->fBlock[fblk].size == 0) {
+ s->fBlock[fblk].size = 1;
+ s->fBlock[fblk].data = consumed;
+ } else {
+ s->fBlock[fblk].size += 1;
+ }
+ s->fields[i]->offsetInStruct = consumed;
+ consumed += sizeof(int);
+ }
+ }
+
+ fblk++;
+ s->fBlock[fblk].size = 0;
+ s->fBlock[fblk].data = 0;
+
+ /* Finally prepare pointers */
+ ALIGN_ADDR(consumed, i_align);
+ for (i = 0; i < s->nbFields; ++i)
+ {
+ if (s->fields[i]->type == INSTR_PTR)
+ {
+ s->fields[i]->offsetInStruct = consumed;
+ consumed += sizeof(int);
+ }
+ }
+ s->size = consumed;
+ }
+
+ /* Returns the ID of a struct from its name */
+ int gsl_get_struct_id(const char *name) /* {{{ */
+ {
+ HashValue *ret = goom_hash_get(currentGoomSL->structIDS, name);
+ if (ret != NULL) return ret->i;
+ return -1;
+ } /* }}} */
+
+ /* Adds the definition of a struct */
+ void gsl_add_struct(const char *name, GSL_Struct *gsl_struct) /* {{{ */
+ {
+ /* Prepare the struct: ie calculate internal storage format */
+ gsl_prepare_struct(gsl_struct, STRUCT_ALIGNMENT, STRUCT_ALIGNMENT, STRUCT_ALIGNMENT);
+
+ /* If the struct does not already exists */
+ if (gsl_get_struct_id(name) < 0)
+ {
+ /* adds it */
+ int id = currentGoomSL->nbStructID++;
+ goom_hash_put_int(currentGoomSL->structIDS, name, id);
+ if (currentGoomSL->gsl_struct_size <= id) {
+ currentGoomSL->gsl_struct_size *= 2;
+ currentGoomSL->gsl_struct = (GSL_Struct**)realloc(currentGoomSL->gsl_struct,
+ sizeof(GSL_Struct*) * currentGoomSL->gsl_struct_size);
+ }
+ currentGoomSL->gsl_struct[id] = gsl_struct;
+ }
+ } /* }}} */
+
+ /* Creates a field for a struct */
+ GSL_StructField *gsl_new_struct_field(const char *name, int type)
+ {
+ GSL_StructField *field = (GSL_StructField*)malloc(sizeof(GSL_StructField));
+ strcpy(field->name, name);
+ field->type = type;
+ return field;
+ }
+
+ /* Create as field for a struct which will be a struct itself */
+ GSL_StructField *gsl_new_struct_field_struct(const char *name, const char *type)
+ {
+ GSL_StructField *field = gsl_new_struct_field(name, gsl_get_struct_id(type));
+ if (field->type < 0) {
+ g_assert_not_reached ();
+#if 0
+ fprintf(stderr, "ERROR: Line %d, Unknown structure: '%s'\n",
+ currentGoomSL->num_lines, type);
+ exit(1);
+#endif
+ }
+ return field;
+ }
+
+ /* Creates a Struct */
+ GSL_Struct *gsl_new_struct(GSL_StructField *field)
+ {
+ GSL_Struct *s = (GSL_Struct*)malloc(sizeof(GSL_Struct));
+ s->nbFields = 1;
+ s->fields[0] = field;
+ return s;
+ }
+
+ /* Adds a field to a struct */
+ void gsl_add_struct_field(GSL_Struct *s, GSL_StructField *field)
+ {
+ s->fields[s->nbFields++] = field;
+ }
+
+ int gsl_type_of_var(GoomHash *ns, const char *name)
+ {
+ char type_of[256];
+ HashValue *hv;
+ sprintf(type_of, "__type_of_%s", name);
+ hv = goom_hash_get(ns, type_of);
+ if (hv != NULL)
+ return hv->i;
+ fprintf(stderr, "ERROR: Unknown variable type: '%s'\n", name);
+ return -1;
+ }
+
+ static void gsl_declare_var(GoomHash *ns, const char *name, int type, void *space)
+ {
+ char type_of[256];
+ if (name[0] == '@') { ns = currentGoomSL->vars; }
+
+ if (space == NULL) {
+ switch (type) {
+ case INSTR_INT:
+ case INSTR_FLOAT:
+ case INSTR_PTR:
+ space = goom_heap_malloc_with_alignment(currentGoomSL->data_heap,
+ sizeof(int), sizeof(int));
+ break;
+#if 0
+ case -1:
+ fprintf(stderr, "What the fuck!\n");
+ exit(1);
+#endif
+ default: /* On a un struct_id */
+ space = goom_heap_malloc_with_alignment_prefixed(currentGoomSL->data_heap,
+ currentGoomSL->gsl_struct[type]->size, STRUCT_ALIGNMENT, sizeof(int));
+ }
+ }
+ goom_hash_put_ptr(ns, name, (void*)space);
+ sprintf(type_of, "__type_of_%s", name);
+ goom_hash_put_int(ns, type_of, type);
+
+ /* Ensuite le hack: on ajoute les champs en tant que variables. */
+ if (type < FIRST_RESERVED)
+ {
+ int i;
+ GSL_Struct *gsl_struct = currentGoomSL->gsl_struct[type];
+ ((int*)space)[-1] = type; /* stockage du type dans le prefixe de structure */
+ for (i = 0; i < gsl_struct->nbFields; ++i)
+ {
+ char full_name[256];
+ char *cspace = (char*)space + gsl_struct->fields[i]->offsetInStruct;
+ sprintf(full_name, "%s.%s", name, gsl_struct->fields[i]->name);
+ gsl_declare_var(ns, full_name, gsl_struct->fields[i]->type, cspace);
+ }
+ }
+ }
+
+ /* Declare a variable which will be a struct */
+ static void gsl_struct_decl(GoomHash *namespace, const char *struct_name, const char *name)
+ {
+ int struct_id = gsl_get_struct_id(struct_name);
+ gsl_declare_var(namespace, name, struct_id, NULL);
+ }
+
+ static void gsl_float_decl_global(const char *name)
+ {
+ gsl_declare_var(currentGoomSL->vars, name, INSTR_FLOAT, NULL);
+ }
+ static void gsl_int_decl_global(const char *name)
+ {
+ gsl_declare_var(currentGoomSL->vars, name, INSTR_INT, NULL);
+ }
+ static void gsl_ptr_decl_global(const char *name)
+ {
+ gsl_declare_var(currentGoomSL->vars, name, INSTR_PTR, NULL);
+ }
+ static void gsl_struct_decl_global_from_id(const char *name, int id)
+ {
+ gsl_declare_var(currentGoomSL->vars, name, id, NULL);
+ }
+
+ /* FLOAT */
+ static void gsl_float_decl_local(const char *name)
+ {
+ gsl_declare_var(currentGoomSL->namespaces[currentGoomSL->currentNS], name, INSTR_FLOAT, NULL);
+ }
+ /* INT */
+ static void gsl_int_decl_local(const char *name)
+ {
+ gsl_declare_var(currentGoomSL->namespaces[currentGoomSL->currentNS], name, INSTR_INT, NULL);
+ }
+ /* PTR */
+ static void gsl_ptr_decl_local(const char *name)
+ {
+ gsl_declare_var(currentGoomSL->namespaces[currentGoomSL->currentNS], name, INSTR_PTR, NULL);
+ }
+ /* STRUCT */
+ static void gsl_struct_decl_local(const char *struct_name, const char *name)
+ {
+ gsl_struct_decl(currentGoomSL->namespaces[currentGoomSL->currentNS],struct_name,name);
+ }
+
+
+ static void commit_test2(NodeType *set,const char *type, int instr);
+ static NodeType *new_call(const char *name, NodeType *affect_list);
+
+ /* SETTER */
+ static NodeType *new_set(NodeType *lvalue, NodeType *expression)
+ { /* {{{ */
+ NodeType *set = new_op("set", OPR_SET, 2);
+ set->unode.opr.op[0] = lvalue;
+ set->unode.opr.op[1] = expression;
+ return set;
+ } /* }}} */
+ static void commit_set(NodeType *set)
+ { /* {{{ */
+ commit_test2(set,"set",INSTR_SET);
+ } /* }}} */
+
+ /* PLUS_EQ */
+ static NodeType *new_plus_eq(NodeType *lvalue, NodeType *expression) /* {{{ */
+ {
+ NodeType *set = new_op("plus_eq", OPR_PLUS_EQ, 2);
+ set->unode.opr.op[0] = lvalue;
+ set->unode.opr.op[1] = expression;
+ return set;
+ }
+ static void commit_plus_eq(NodeType *set)
+ {
+ precommit_node(set->unode.opr.op[1]);
+#ifdef VERBOSE
+ printf("add %s %s\n", set->unode.opr.op[0]->str, set->unode.opr.op[1]->str);
+#endif
+ currentGoomSL->instr = gsl_instr_init(currentGoomSL, "add", INSTR_ADD, 2, set->line_number);
+ commit_node(set->unode.opr.op[0],0);
+ commit_node(set->unode.opr.op[1],1);
+ } /* }}} */
+
+ /* SUB_EQ */
+ static NodeType *new_sub_eq(NodeType *lvalue, NodeType *expression) /* {{{ */
+ {
+ NodeType *set = new_op("sub_eq", OPR_SUB_EQ, 2);
+ set->unode.opr.op[0] = lvalue;
+ set->unode.opr.op[1] = expression;
+ return set;
+ }
+ static void commit_sub_eq(NodeType *set)
+ {
+ precommit_node(set->unode.opr.op[1]);
+#ifdef VERBOSE
+ printf("sub %s %s\n", set->unode.opr.op[0]->str, set->unode.opr.op[1]->str);
+#endif
+ currentGoomSL->instr = gsl_instr_init(currentGoomSL, "sub", INSTR_SUB, 2, set->line_number);
+ commit_node(set->unode.opr.op[0],0);
+ commit_node(set->unode.opr.op[1],1);
+ } /* }}} */
+
+ /* MUL_EQ */
+ static NodeType *new_mul_eq(NodeType *lvalue, NodeType *expression) /* {{{ */
+ {
+ NodeType *set = new_op("mul_eq", OPR_MUL_EQ, 2);
+ set->unode.opr.op[0] = lvalue;
+ set->unode.opr.op[1] = expression;
+ return set;
+ }
+ static void commit_mul_eq(NodeType *set)
+ {
+ precommit_node(set->unode.opr.op[1]);
+#ifdef VERBOSE
+ printf("mul %s %s\n", set->unode.opr.op[0]->str, set->unode.opr.op[1]->str);
+#endif
+ currentGoomSL->instr = gsl_instr_init(currentGoomSL, "mul", INSTR_MUL, 2, set->line_number);
+ commit_node(set->unode.opr.op[0],0);
+ commit_node(set->unode.opr.op[1],1);
+ } /* }}} */
+
+ /* DIV_EQ */
+ static NodeType *new_div_eq(NodeType *lvalue, NodeType *expression) /* {{{ */
+ {
+ NodeType *set = new_op("div_eq", OPR_DIV_EQ, 2);
+ set->unode.opr.op[0] = lvalue;
+ set->unode.opr.op[1] = expression;
+ return set;
+ }
+ static void commit_div_eq(NodeType *set)
+ {
+ precommit_node(set->unode.opr.op[1]);
+#ifdef VERBOSE
+ printf("div %s %s\n", set->unode.opr.op[0]->str, set->unode.opr.op[1]->str);
+#endif
+ currentGoomSL->instr = gsl_instr_init(currentGoomSL, "div", INSTR_DIV, 2, set->line_number);
+ commit_node(set->unode.opr.op[0],0);
+ commit_node(set->unode.opr.op[1],1);
+ } /* }}} */
+
+ /* commodity method for add, mult, ... */
+
+ static void precommit_expr(NodeType *expr, const char *type, int instr_id)
+ { /* {{{ */
+ NodeType *tmp, *tmpcpy;
+ int toAdd;
+
+ /* compute "left" and "right" */
+ switch (expr->unode.opr.nbOp) {
+ case 2:
+ precommit_node(expr->unode.opr.op[1]);
+ case 1:
+ precommit_node(expr->unode.opr.op[0]);
+ }
+
+ if (is_tmp_expr(expr->unode.opr.op[0])) {
+ tmp = expr->unode.opr.op[0];
+ toAdd = 1;
+ }
+ else if (is_commutative_expr(instr_id) && (expr->unode.opr.nbOp==2) && is_tmp_expr(expr->unode.opr.op[1])) {
+ tmp = expr->unode.opr.op[1];
+ toAdd = 0;
+ }
+ else {
+ char stmp[256];
+ /* declare a temporary variable to store the result */
+ if (expr->unode.opr.op[0]->type == CONST_INT_NODE) {
+ sprintf(stmp,"_i_tmp_%i",allocateTemp());
+ gsl_int_decl_global(stmp);
+ }
+ else if (expr->unode.opr.op[0]->type == CONST_FLOAT_NODE) {
+ sprintf(stmp,"_f_tmp%i",allocateTemp());
+ gsl_float_decl_global(stmp);
+ }
+ else if (expr->unode.opr.op[0]->type == CONST_PTR_NODE) {
+ sprintf(stmp,"_p_tmp%i",allocateTemp());
+ gsl_ptr_decl_global(stmp);
+ }
+ else {
+ int type = gsl_type_of_var(expr->unode.opr.op[0]->vnamespace, expr->unode.opr.op[0]->str);
+ if (type == INSTR_FLOAT) {
+ sprintf(stmp,"_f_tmp_%i",allocateTemp());
+ gsl_float_decl_global(stmp);
+ }
+ else if (type == INSTR_PTR) {
+ sprintf(stmp,"_p_tmp_%i",allocateTemp());
+ gsl_ptr_decl_global(stmp);
+ }
+ else if (type == INSTR_INT) {
+ sprintf(stmp,"_i_tmp_%i",allocateTemp());
+ gsl_int_decl_global(stmp);
+ }
+ else if (type == -1) {
+ g_assert_not_reached ();
+#if 0
+ fprintf(stderr, "ERROR: Line %d, Could not find variable '%s'\n",
+ expr->line_number, expr->unode.opr.op[0]->str);
+ exit(1);
+#endif
+ }
+ else { /* type is a struct_id */
+ sprintf(stmp,"_s_tmp_%i",allocateTemp());
+ gsl_struct_decl_global_from_id(stmp,type);
+ }
+ }
+ tmp = new_var(stmp,expr->line_number);
+
+ /* set the tmp to the value of "op1" */
+ tmpcpy = nodeClone(tmp);
+ commit_node(new_set(tmp,expr->unode.opr.op[0]),0);
+ toAdd = 1;
+
+ tmp = tmpcpy;
+ }
+
+ /* add op2 to tmp */
+#ifdef VERBOSE
+ if (expr->unode.opr.nbOp == 2)
+ printf("%s %s %s\n", type, tmp->str, expr->unode.opr.op[toAdd]->str);
+ else
+ printf("%s %s\n", type, tmp->str);
+#endif
+ currentGoomSL->instr = gsl_instr_init(currentGoomSL, type, instr_id, expr->unode.opr.nbOp, expr->line_number);
+ tmpcpy = nodeClone(tmp);
+ commit_node(tmp,0);
+ if (expr->unode.opr.nbOp == 2) {
+ commit_node(expr->unode.opr.op[toAdd],1);
+ }
+
+ /* redefine the ADD node now as the computed variable */
+ nodeFreeInternals(expr);
+ *expr = *tmpcpy;
+ free(tmpcpy);
+ } /* }}} */
+
+ static NodeType *new_expr1(const char *name, int id, NodeType *expr1)
+ { /* {{{ */
+ NodeType *add = new_op(name, id, 1);
+ add->unode.opr.op[0] = expr1;
+ return add;
+ } /* }}} */
+
+ static NodeType *new_expr2(const char *name, int id, NodeType *expr1, NodeType *expr2)
+ { /* {{{ */
+ NodeType *add = new_op(name, id, 2);
+ add->unode.opr.op[0] = expr1;
+ add->unode.opr.op[1] = expr2;
+ return add;
+ } /* }}} */
+
+ /* ADD */
+ static NodeType *new_add(NodeType *expr1, NodeType *expr2) { /* {{{ */
+ return new_expr2("add", OPR_ADD, expr1, expr2);
+ }
+ static void precommit_add(NodeType *add) {
+ precommit_expr(add,"add",INSTR_ADD);
+ } /* }}} */
+
+ /* SUB */
+ static NodeType *new_sub(NodeType *expr1, NodeType *expr2) { /* {{{ */
+ return new_expr2("sub", OPR_SUB, expr1, expr2);
+ }
+ static void precommit_sub(NodeType *sub) {
+ precommit_expr(sub,"sub",INSTR_SUB);
+ } /* }}} */
+
+ /* NEG */
+ static NodeType *new_neg(NodeType *expr) { /* {{{ */
+ NodeType *zeroConst = NULL;
+ if (expr->type == CONST_INT_NODE)
+ zeroConst = new_constInt("0", currentGoomSL->num_lines);
+ else if (expr->type == CONST_FLOAT_NODE)
+ zeroConst = new_constFloat("0.0", currentGoomSL->num_lines);
+ else if (expr->type == CONST_PTR_NODE) {
+ g_assert_not_reached ();
+#if 0
+ fprintf(stderr, "ERROR: Line %d, Could not negate const pointer.\n",
+ currentGoomSL->num_lines);
+ exit(1);
+#endif
+ }
+ else {
+ int type = gsl_type_of_var(expr->vnamespace, expr->str);
+ if (type == INSTR_FLOAT)
+ zeroConst = new_constFloat("0.0", currentGoomSL->num_lines);
+ else if (type == INSTR_PTR) {
+ g_assert_not_reached ();
+#if 0
+ fprintf(stderr, "ERROR: Line %d, Could not negate pointer.\n",
+ currentGoomSL->num_lines);
+ exit(1);
+#endif
+ }
+ else if (type == INSTR_INT)
+ zeroConst = new_constInt("0", currentGoomSL->num_lines);
+ else if (type == -1) {
+ g_assert_not_reached ();
+#if 0
+ fprintf(stderr, "ERROR: Line %d, Could not find variable '%s'\n",
+ expr->line_number, expr->unode.opr.op[0]->str);
+ exit(1);
+#endif
+ }
+ else { /* type is a struct_id */
+ g_assert_not_reached ();
+#if 0
+ fprintf(stderr, "ERROR: Line %d, Could not negate struct '%s'\n",
+ expr->line_number, expr->str);
+ exit(1);
+#endif
+ }
+ }
+ return new_expr2("sub", OPR_SUB, zeroConst, expr);
+ }
+ /* }}} */
+
+ /* MUL */
+ static NodeType *new_mul(NodeType *expr1, NodeType *expr2) { /* {{{ */
+ return new_expr2("mul", OPR_MUL, expr1, expr2);
+ }
+ static void precommit_mul(NodeType *mul) {
+ precommit_expr(mul,"mul",INSTR_MUL);
+ } /* }}} */
+
+ /* DIV */
+ static NodeType *new_div(NodeType *expr1, NodeType *expr2) { /* {{{ */
+ return new_expr2("div", OPR_DIV, expr1, expr2);
+ }
+ static void precommit_div(NodeType *mul) {
+ precommit_expr(mul,"div",INSTR_DIV);
+ } /* }}} */
+
+ /* CALL EXPRESSION */
+ static NodeType *new_call_expr(const char *name, NodeType *affect_list) { /* {{{ */
+ NodeType *call = new_call(name,affect_list);
+ NodeType *node = new_expr1(name, OPR_CALL_EXPR, call);
+ node->vnamespace = gsl_find_namespace(name);
+ if (node->vnamespace == NULL)
+ /* fprintf(stderr, "ERROR: Line %d, No return type for: '%s'\n", currentGoomSL->num_lines, name); */
+ return node;
+ }
+ static void precommit_call_expr(NodeType *call) {
+ char stmp[256];
+ NodeType *tmp,*tmpcpy;
+ int type = gsl_type_of_var(call->vnamespace, call->str);
+ if (type == INSTR_FLOAT) {
+ sprintf(stmp,"_f_tmp_%i",allocateTemp());
+ gsl_float_decl_global(stmp);
+ }
+ else if (type == INSTR_PTR) {
+ sprintf(stmp,"_p_tmp_%i",allocateTemp());
+ gsl_ptr_decl_global(stmp);
+ }
+ else if (type == INSTR_INT) {
+ sprintf(stmp,"_i_tmp_%i",allocateTemp());
+ gsl_int_decl_global(stmp);
+ }
+ else if (type == -1) {
+ g_assert_not_reached ();
+#if 0
+ fprintf(stderr, "ERROR: Line %d, Could not find variable '%s'\n",
+ call->line_number, call->str);
+ exit(1);
+#endif
+ }
+ else { /* type is a struct_id */
+ sprintf(stmp,"_s_tmp_%i",allocateTemp());
+ gsl_struct_decl_global_from_id(stmp,type);
+ }
+ tmp = new_var(stmp,call->line_number);
+ commit_node(call->unode.opr.op[0],0);
+ tmpcpy = nodeClone(tmp);
+ commit_node(new_set(tmp,new_var(call->str,call->line_number)),0);
+
+ nodeFreeInternals(call);
+ *call = *tmpcpy;
+ free(tmpcpy);
+ } /* }}} */
+
+ static void commit_test2(NodeType *set,const char *type, int instr)
+ { /* {{{ */
+ NodeType *tmp;
+ char stmp[256];
+ precommit_node(set->unode.opr.op[0]);
+ precommit_node(set->unode.opr.op[1]);
+ tmp = set->unode.opr.op[0];
+
+ stmp[0] = 0;
+ if (set->unode.opr.op[0]->type == CONST_INT_NODE) {
+ sprintf(stmp,"_i_tmp_%i",allocateTemp());
+ gsl_int_decl_global(stmp);
+ }
+ else if (set->unode.opr.op[0]->type == CONST_FLOAT_NODE) {
+ sprintf(stmp,"_f_tmp%i",allocateTemp());
+ gsl_float_decl_global(stmp);
+ }
+ else if (set->unode.opr.op[0]->type == CONST_PTR_NODE) {
+ sprintf(stmp,"_p_tmp%i",allocateTemp());
+ gsl_ptr_decl_global(stmp);
+ }
+ if (stmp[0]) {
+ NodeType *tmpcpy;
+ tmp = new_var(stmp, set->line_number);
+ tmpcpy = nodeClone(tmp);
+ commit_node(new_set(tmp,set->unode.opr.op[0]),0);
+ tmp = tmpcpy;
+ }
+
+#ifdef VERBOSE
+ printf("%s %s %s\n", type, tmp->str, set->unode.opr.op[1]->str);
+#endif
+ currentGoomSL->instr = gsl_instr_init(currentGoomSL, type, instr, 2, set->line_number);
+ commit_node(tmp,instr!=INSTR_SET);
+ commit_node(set->unode.opr.op[1],1);
+ } /* }}} */
+
+ /* NOT */
+ static NodeType *new_not(NodeType *expr1) { /* {{{ */
+ return new_expr1("not", OPR_NOT, expr1);
+ }
+ static void commit_not(NodeType *set)
+ {
+ commit_node(set->unode.opr.op[0],0);
+#ifdef VERBOSE
+ printf("not\n");
+#endif
+ currentGoomSL->instr = gsl_instr_init(currentGoomSL, "not", INSTR_NOT, 1, set->line_number);
+ gsl_instr_add_param(currentGoomSL->instr, "|dummy|", TYPE_LABEL);
+ } /* }}} */
+
+ /* EQU */
+ static NodeType *new_equ(NodeType *expr1, NodeType *expr2) { /* {{{ */
+ return new_expr2("isequal", OPR_EQU, expr1, expr2);
+ }
+ static void commit_equ(NodeType *mul) {
+ commit_test2(mul,"isequal",INSTR_ISEQUAL);
+ } /* }}} */
+
+ /* INF */
+ static NodeType *new_low(NodeType *expr1, NodeType *expr2) { /* {{{ */
+ return new_expr2("islower", OPR_LOW, expr1, expr2);
+ }
+ static void commit_low(NodeType *mul) {
+ commit_test2(mul,"islower",INSTR_ISLOWER);
+ } /* }}} */
+
+ /* WHILE */
+ static NodeType *new_while(NodeType *expression, NodeType *instr) { /* {{{ */
+ NodeType *node = new_op("while", OPR_WHILE, 2);
+ node->unode.opr.op[0] = expression;
+ node->unode.opr.op[1] = instr;
+ return node;
+ }
+
+ static void commit_while(NodeType *node)
+ {
+ int lbl = allocateLabel();
+ char start_while[1024], test_while[1024];
+ sprintf(start_while, "|start_while_%d|", lbl);
+ sprintf(test_while, "|test_while_%d|", lbl);
+
+ GSL_PUT_JUMP(test_while,node->line_number);
+ GSL_PUT_LABEL(start_while,node->line_number);
+
+ /* code */
+ commit_node(node->unode.opr.op[1],0);
+
+ GSL_PUT_LABEL(test_while,node->line_number);
+ commit_node(node->unode.opr.op[0],0);
+ GSL_PUT_JNZERO(start_while,node->line_number);
+ } /* }}} */
+
+ /* FOR EACH */
+ static NodeType *new_static_foreach(NodeType *var, NodeType *var_list, NodeType *instr) { /* {{{ */
+ NodeType *node = new_op("for", OPR_FOREACH, 3);
+ node->unode.opr.op[0] = var;
+ node->unode.opr.op[1] = var_list;
+ node->unode.opr.op[2] = instr;
+ node->line_number = currentGoomSL->num_lines;
+ return node;
+ }
+ static void commit_foreach(NodeType *node)
+ {
+ NodeType *cur = node->unode.opr.op[1];
+ char tmp_func[256], tmp_loop[256];
+ int lbl = allocateLabel();
+ sprintf(tmp_func, "|foreach_func_%d|", lbl);
+ sprintf(tmp_loop, "|foreach_loop_%d|", lbl);
+
+ GSL_PUT_JUMP(tmp_loop, node->line_number);
+ GSL_PUT_LABEL(tmp_func, node->line_number);
+
+ precommit_node(node->unode.opr.op[2]);
+ commit_node(node->unode.opr.op[2], 0);
+
+ currentGoomSL->instr = gsl_instr_init(currentGoomSL, "ret", INSTR_RET, 1, node->line_number);
+ gsl_instr_add_param(currentGoomSL->instr, "|dummy|", TYPE_LABEL);
+#ifdef VERBOSE
+ printf("ret\n");
+#endif
+
+ GSL_PUT_LABEL(tmp_loop, node->line_number);
+
+ while (cur != NULL)
+ {
+ NodeType *x, *var;
+
+ /* 1: x=var */
+ x = nodeClone(node->unode.opr.op[0]);
+ var = nodeClone(cur->unode.opr.op[0]);
+ commit_node(new_set(x, var),0);
+
+ /* 2: instr */
+ currentGoomSL->instr = gsl_instr_init(currentGoomSL, "call", INSTR_CALL, 1, node->line_number);
+ gsl_instr_add_param(currentGoomSL->instr, tmp_func, TYPE_LABEL);
+#ifdef VERBOSE
+ printf("call %s\n", tmp_func);
+#endif
+
+ /* 3: var=x */
+ x = nodeClone(node->unode.opr.op[0]);
+ var = cur->unode.opr.op[0];
+ commit_node(new_set(var, x),0);
+ cur = cur->unode.opr.op[1];
+ }
+ nodeFree(node->unode.opr.op[0]);
+ } /* }}} */
+
+ /* IF */
+ static NodeType *new_if(NodeType *expression, NodeType *instr) { /* {{{ */
+ NodeType *node = new_op("if", OPR_IF, 2);
+ node->unode.opr.op[0] = expression;
+ node->unode.opr.op[1] = instr;
+ return node;
+ }
+ static void commit_if(NodeType *node) {
+
+ char slab[1024];
+ sprintf(slab, "|eif%d|", allocateLabel());
+ commit_node(node->unode.opr.op[0],0);
+ GSL_PUT_JZERO(slab,node->line_number);
+ /* code */
+ commit_node(node->unode.opr.op[1],0);
+ GSL_PUT_LABEL(slab,node->line_number);
+ } /* }}} */
+
+ /* BLOCK */
+ static NodeType *new_block(NodeType *lastNode) { /* {{{ */
+ NodeType *blk = new_op("block", OPR_BLOCK, 2);
+ blk->unode.opr.op[0] = new_nop("start_of_block");
+ blk->unode.opr.op[1] = lastNode;
+ return blk;
+ }
+ static void commit_block(NodeType *node) {
+ commit_node(node->unode.opr.op[0]->unode.opr.next,0);
+ } /* }}} */
+
+ /* FUNCTION INTRO */
+ static NodeType *new_function_intro(const char *name) { /* {{{ */
+ char stmp[256];
+ if (strlen(name) < 200) {
+ sprintf(stmp, "|__func_%s|", name);
+ }
+ return new_op(stmp, OPR_FUNC_INTRO, 0);
+ }
+ static void commit_function_intro(NodeType *node) {
+ currentGoomSL->instr = gsl_instr_init(currentGoomSL, "label", INSTR_LABEL, 1, node->line_number);
+ gsl_instr_add_param(currentGoomSL->instr, node->str, TYPE_LABEL);
+#ifdef VERBOSE
+ printf("label %s\n", node->str);
+#endif
+ } /* }}} */
+
+ /* FUNCTION OUTRO */
+ static NodeType *new_function_outro() { /* {{{ */
+ return new_op("ret", OPR_FUNC_OUTRO, 0);
+ }
+ static void commit_function_outro(NodeType *node) {
+ currentGoomSL->instr = gsl_instr_init(currentGoomSL, "ret", INSTR_RET, 1, node->line_number);
+ gsl_instr_add_param(currentGoomSL->instr, "|dummy|", TYPE_LABEL);
+ releaseAllTemps();
+#ifdef VERBOSE
+ printf("ret\n");
+#endif
+ } /* }}} */
+
+ /* AFFECTATION LIST */
+ static NodeType *new_affec_list(NodeType *set, NodeType *next) /* {{{ */
+ {
+ NodeType *node = new_op("affect_list", OPR_AFFECT_LIST, 2);
+ node->unode.opr.op[0] = set;
+ node->unode.opr.op[1] = next;
+ return node;
+ }
+ static NodeType *new_affect_list_after(NodeType *affect_list)
+ {
+ NodeType *ret = NULL;
+ NodeType *cur = affect_list;
+ while(cur != NULL) {
+ NodeType *set = cur->unode.opr.op[0];
+ NodeType *next = cur->unode.opr.op[1];
+ NodeType *lvalue = set->unode.opr.op[0];
+ NodeType *expression = set->unode.opr.op[1];
+ if ((lvalue->str[0] == '&') && (expression->type == VAR_NODE)) {
+ NodeType *nset = new_set(nodeClone(expression), nodeClone(lvalue));
+ ret = new_affec_list(nset, ret);
+ }
+ cur = next;
+ }
+ return ret;
+ }
+ static void commit_affect_list(NodeType *node)
+ {
+ NodeType *cur = node;
+ while(cur != NULL) {
+ NodeType *set = cur->unode.opr.op[0];
+ precommit_node(set->unode.opr.op[0]);
+ precommit_node(set->unode.opr.op[1]);
+ cur = cur->unode.opr.op[1];
+ }
+ cur = node;
+ while(cur != NULL) {
+ NodeType *set = cur->unode.opr.op[0];
+ commit_node(set,0);
+ cur = cur->unode.opr.op[1];
+ }
+ } /* }}} */
+
+ /* VAR LIST */
+ static NodeType *new_var_list(NodeType *var, NodeType *next) /* {{{ */
+ {
+ NodeType *node = new_op("var_list", OPR_VAR_LIST, 2);
+ node->unode.opr.op[0] = var;
+ node->unode.opr.op[1] = next;
+ return node;
+ }
+ static void commit_var_list(NodeType *node)
+ {
+ } /* }}} */
+
+ /* FUNCTION CALL */
+ static NodeType *new_call(const char *name, NodeType *affect_list) { /* {{{ */
+ HashValue *fval;
+ fval = goom_hash_get(currentGoomSL->functions, name);
+ if (!fval) {
+ gsl_declare_task(name);
+ fval = goom_hash_get(currentGoomSL->functions, name);
+ }
+ if (!fval) {
+ g_assert_not_reached ();
+#if 0
+ fprintf(stderr, "ERROR: Line %d, Could not find function %s\n", currentGoomSL->num_lines, name);
+ exit(1);
+ return NULL;
+#endif
+ }
+ else {
+ ExternalFunctionStruct *gef = (ExternalFunctionStruct*)fval->ptr;
+ if (gef->is_extern) {
+ NodeType *node = new_op(name, OPR_EXT_CALL, 1);
+ node->unode.opr.op[0] = affect_list;
+ return node;
+ }
+ else {
+ NodeType *node;
+ char stmp[256];
+ if (strlen(name) < 200) {
+ sprintf(stmp, "|__func_%s|", name);
+ }
+ node = new_op(stmp, OPR_CALL, 1);
+ node->unode.opr.op[0] = affect_list;
+ return node;
+ }
+ }
+ }
+ static void commit_ext_call(NodeType *node) {
+ NodeType *alafter = new_affect_list_after(node->unode.opr.op[0]);
+ commit_node(node->unode.opr.op[0],0);
+ currentGoomSL->instr = gsl_instr_init(currentGoomSL, "extcall", INSTR_EXT_CALL, 1, node->line_number);
+ gsl_instr_add_param(currentGoomSL->instr, node->str, TYPE_VAR);
+#ifdef VERBOSE
+ printf("extcall %s\n", node->str);
+#endif
+ commit_node(alafter,0);
+ }
+ static void commit_call(NodeType *node) {
+ NodeType *alafter = new_affect_list_after(node->unode.opr.op[0]);
+ commit_node(node->unode.opr.op[0],0);
+ currentGoomSL->instr = gsl_instr_init(currentGoomSL, "call", INSTR_CALL, 1, node->line_number);
+ gsl_instr_add_param(currentGoomSL->instr, node->str, TYPE_LABEL);
+#ifdef VERBOSE
+ printf("call %s\n", node->str);
+#endif
+ commit_node(alafter,0);
+ } /* }}} */
+
+ /** **/
+
+ static NodeType *rootNode = 0; /* TODO: reinitialiser a chaque compilation. */
+ static NodeType *lastNode = 0;
+ static NodeType *gsl_append(NodeType *curNode) {
+ if (curNode == 0) return 0; /* {{{ */
+ if (lastNode)
+ lastNode->unode.opr.next = curNode;
+ lastNode = curNode;
+ while(lastNode->unode.opr.next) lastNode = lastNode->unode.opr.next;
+ if (rootNode == 0)
+ rootNode = curNode;
+ return curNode;
+ } /* }}} */
+
+#if 1
+ int allocateTemp() {
+ return allocateLabel();
+ }
+ void releaseAllTemps() {}
+ void releaseTemp(int n) {}
+#else
+ static int nbTemp = 0;
+ static int *tempArray = 0;
+ static int tempArraySize = 0;
+ int allocateTemp() { /* TODO: allocateITemp, allocateFTemp */
+ int i = 0; /* {{{ */
+ if (tempArray == 0) {
+ tempArraySize = 256;
+ tempArray = (int*)malloc(tempArraySize * sizeof(int));
+ }
+ while (1) {
+ int j;
+ for (j=0;j<nbTemp;++j) {
+ if (tempArray[j] == i) break;
+ }
+ if (j == nbTemp) {
+ if (nbTemp == tempArraySize) {
+ tempArraySize *= 2;
+ tempArray = (int*)realloc(tempArray,tempArraySize * sizeof(int));
+ }
+ tempArray[nbTemp++] = i;
+ return i;
+ }
+ i++;
+ }
+ } /* }}} */
+ void releaseAllTemps() {
+ nbTemp = 0; /* {{{ */
+ } /* }}} */
+ void releaseTemp(int n) {
+ int j; /* {{{ */
+ for (j=0;j<nbTemp;++j) {
+ if (tempArray[j] == n) {
+ tempArray[j] = tempArray[--nbTemp];
+ break;
+ }
+ }
+ } /* }}} */
+#endif
+
+ static int lastLabel = 0;
+ int allocateLabel() {
+ return ++lastLabel; /* {{{ */
+ } /* }}} */
+
+ void gsl_commit_compilation()
+ { /* {{{ */
+ commit_node(rootNode,0);
+ rootNode = 0;
+ lastNode = 0;
+ } /* }}} */
+
+ void precommit_node(NodeType *node)
+ { /* {{{ */
+ /* do here stuff for expression.. for exemple */
+ if (node->type == OPR_NODE)
+ switch(node->unode.opr.type) {
+ case OPR_ADD: precommit_add(node); break;
+ case OPR_SUB: precommit_sub(node); break;
+ case OPR_MUL: precommit_mul(node); break;
+ case OPR_DIV: precommit_div(node); break;
+ case OPR_CALL_EXPR: precommit_call_expr(node); break;
+ }
+ } /* }}} */
+
+ void commit_node(NodeType *node, int releaseIfTmp)
+ { /* {{{ */
+ if (node == 0) return;
+
+ switch(node->type) {
+ case OPR_NODE:
+ switch(node->unode.opr.type) {
+ case OPR_SET: commit_set(node); break;
+ case OPR_PLUS_EQ: commit_plus_eq(node); break;
+ case OPR_SUB_EQ: commit_sub_eq(node); break;
+ case OPR_MUL_EQ: commit_mul_eq(node); break;
+ case OPR_DIV_EQ: commit_div_eq(node); break;
+ case OPR_IF: commit_if(node); break;
+ case OPR_WHILE: commit_while(node); break;
+ case OPR_BLOCK: commit_block(node); break;
+ case OPR_FUNC_INTRO: commit_function_intro(node); break;
+ case OPR_FUNC_OUTRO: commit_function_outro(node); break;
+ case OPR_CALL: commit_call(node); break;
+ case OPR_EXT_CALL: commit_ext_call(node); break;
+ case OPR_EQU: commit_equ(node); break;
+ case OPR_LOW: commit_low(node); break;
+ case OPR_NOT: commit_not(node); break;
+ case OPR_AFFECT_LIST: commit_affect_list(node); break;
+ case OPR_FOREACH: commit_foreach(node); break;
+ case OPR_VAR_LIST: commit_var_list(node); break;
+#ifdef VERBOSE
+ case EMPTY_NODE: printf("NOP\n"); break;
+#endif
+ }
+
+ commit_node(node->unode.opr.next,0); /* recursive for the moment, maybe better to do something iterative? */
+ break;
+
+ case VAR_NODE: gsl_instr_set_namespace(currentGoomSL->instr, node->vnamespace);
+ gsl_instr_add_param(currentGoomSL->instr, node->str, TYPE_VAR); break;
+ case CONST_INT_NODE: gsl_instr_add_param(currentGoomSL->instr, node->str, TYPE_INTEGER); break;
+ case CONST_FLOAT_NODE: gsl_instr_add_param(currentGoomSL->instr, node->str, TYPE_FLOAT); break;
+ case CONST_PTR_NODE: gsl_instr_add_param(currentGoomSL->instr, node->str, TYPE_PTR); break;
+ }
+ if (releaseIfTmp && is_tmp_expr(node))
+ releaseTemp(get_tmp_id(node));
+
+ nodeFree(node);
+ } /* }}} */
+
+ NodeType *nodeNew(const char *str, int type, int line_number) {
+ NodeType *node = (NodeType*)malloc(sizeof(NodeType)); /* {{{ */
+ node->type = type;
+ node->str = (char*)malloc(strlen(str)+1);
+ node->vnamespace = NULL;
+ node->line_number = line_number;
+ strcpy(node->str, str);
+ return node;
+ } /* }}} */
+ static NodeType *nodeClone(NodeType *node) {
+ NodeType *ret = nodeNew(node->str, node->type, node->line_number); /* {{{ */
+ ret->vnamespace = node->vnamespace;
+ ret->unode = node->unode;
+ return ret;
+ } /* }}} */
+
+ void nodeFreeInternals(NodeType *node) {
+ free(node->str); /* {{{ */
+ } /* }}} */
+
+ void nodeFree(NodeType *node) {
+ nodeFreeInternals(node); /* {{{ */
+ free(node);
+ } /* }}} */
+
+ NodeType *new_constInt(const char *str, int line_number) {
+ NodeType *node = nodeNew(str, CONST_INT_NODE, line_number); /* {{{ */
+ node->unode.constInt.val = atoi(str);
+ return node;
+ } /* }}} */
+
+ NodeType *new_constPtr(const char *str, int line_number) {
+ NodeType *node = nodeNew(str, CONST_PTR_NODE, line_number); /* {{{ */
+ node->unode.constPtr.id = strtol(str,NULL,0);
+ return node;
+ } /* }}} */
+
+ NodeType *new_constFloat(const char *str, int line_number) {
+ NodeType *node = nodeNew(str, CONST_FLOAT_NODE, line_number); /* {{{ */
+ node->unode.constFloat.val = atof(str);
+ return node;
+ } /* }}} */
+
+ NodeType *new_var(const char *str, int line_number) {
+ NodeType *node = nodeNew(str, VAR_NODE, line_number); /* {{{ */
+ node->vnamespace = gsl_find_namespace(str);
+ if (node->vnamespace == 0) {
+ g_assert_not_reached ();
+#if 0
+ fprintf(stderr, "ERROR: Line %d, Variable not found: '%s'\n", line_number, str);
+ exit(1);
+#endif
+ }
+ return node;
+ } /* }}} */
+
+ NodeType *new_nop(const char *str) {
+ NodeType *node = new_op(str, EMPTY_NODE, 0); /* {{{ */
+ return node;
+ } /* }}} */
+
+ NodeType *new_op(const char *str, int type, int nbOp) {
+ int i; /* {{{ */
+ NodeType *node = nodeNew(str, OPR_NODE, currentGoomSL->num_lines);
+ node->unode.opr.next = 0;
+ node->unode.opr.type = type;
+ node->unode.opr.nbOp = nbOp;
+ for (i=0;i<nbOp;++i) node->unode.opr.op[i] = 0;
+ return node;
+ } /* }}} */
+
+
+ void gsl_declare_global_variable(int type, char *name) {
+ switch(type){
+ case -1: break;
+ case FLOAT_TK:gsl_float_decl_global(name);break;
+ case INT_TK: gsl_int_decl_global(name);break;
+ case PTR_TK: gsl_ptr_decl_global(name);break;
+ default:
+ {
+ int id = type - 1000;
+ gsl_struct_decl_global_from_id(name,id);
+ }
+ }
+ }
+
+%}
+
+%union {
+ int intValue;
+ float floatValue;
+ char charValue;
+ char strValue[2048];
+ NodeType *nPtr;
+ GoomHash *namespace;
+ GSL_Struct *gsl_struct;
+ GSL_StructField *gsl_struct_field;
+ };
+
+%token <strValue> LTYPE_INTEGER
+%token <strValue> LTYPE_FLOAT
+%token <strValue> LTYPE_VAR
+%token <strValue> LTYPE_PTR
+
+%token PTR_TK INT_TK FLOAT_TK DECLARE EXTERNAL WHILE DO NOT PLUS_EQ SUB_EQ DIV_EQ MUL_EQ SUP_EQ LOW_EQ NOT_EQ STRUCT FOR IN
+
+%type <intValue> return_type
+%type <nPtr> expression constValue instruction test func_call func_call_expression
+%type <nPtr> start_block affectation_list affectation_in_list affectation declaration
+%type <nPtr> var_list_content var_list
+%type <strValue> task_name ext_task_name
+%type <namespace> leave_namespace
+%type <gsl_struct> struct_members
+%type <gsl_struct_field> struct_member
+%left '\n'
+%left PLUS_EQ SUB_EQ MUL_EQ DIV_EQ
+%left NOT
+%left '=' '<' '>'
+%left '+' '-'
+%left '/' '*'
+
+%%
+
+/* -------------- Global architechture of a GSL program ------------*/
+
+gsl: gsl_code function_outro gsl_def_functions ;
+
+gsl_code: gsl_code instruction { gsl_append($2); }
+ | gsl_code EXTERNAL '<' ext_task_name '>' return_type '\n' leave_namespace { gsl_declare_global_variable($6,$4); }
+ | gsl_code EXTERNAL '<' ext_task_name ':' arglist '>' return_type '\n' leave_namespace { gsl_declare_global_variable($8,$4); }
+ | gsl_code DECLARE '<' task_name '>' return_type '\n' leave_namespace { gsl_declare_global_variable($6,$4); }
+ | gsl_code DECLARE '<' task_name ':' arglist '>' return_type '\n' leave_namespace { gsl_declare_global_variable($8,$4); }
+ | gsl_code struct_declaration
+ | gsl_code '\n'
+ |
+ ;
+
+/* ------------- Declaration of a structure ------------ */
+
+struct_declaration: STRUCT '<' LTYPE_VAR ':' struct_members '>' '\n' { gsl_add_struct($3, $5); }
+ ;
+
+struct_members: opt_nl struct_member { $$ = gsl_new_struct($2); }
+ | struct_members ',' opt_nl struct_member { $$ = $1; gsl_add_struct_field($1, $4); }
+ ;
+
+struct_member: INT_TK LTYPE_VAR { $$ = gsl_new_struct_field($2, INSTR_INT); }
+ | FLOAT_TK LTYPE_VAR { $$ = gsl_new_struct_field($2, INSTR_FLOAT); }
+ | PTR_TK LTYPE_VAR { $$ = gsl_new_struct_field($2, INSTR_PTR); }
+ | LTYPE_VAR LTYPE_VAR { $$ = gsl_new_struct_field_struct($2, $1); }
+ ;
+
+/* ------------- Fonction declarations -------------- */
+
+ext_task_name: LTYPE_VAR { gsl_declare_external_task($1); gsl_enternamespace($1); strcpy($$,$1); }
+ ;
+task_name: LTYPE_VAR { gsl_declare_task($1); gsl_enternamespace($1); strcpy($$,$1); strcpy($$,$1); }
+ ;
+
+return_type: { $$=-1; }
+ | ':' INT_TK { $$=INT_TK; }
+ | ':' FLOAT_TK { $$=FLOAT_TK; }
+ | ':' PTR_TK { $$=PTR_TK; }
+ | ':' LTYPE_VAR { $$= 1000 + gsl_get_struct_id($2); }
+ ;
+
+arglist: empty_declaration
+ | empty_declaration ',' arglist
+ ;
+
+/* ------------- Fonction definition -------------- */
+
+gsl_def_functions: gsl_def_functions function
+ |
+ ;
+
+function: function_intro gsl_code function_outro { gsl_leavenamespace(); }
+
+function_intro: '<' task_name '>' return_type '\n' { gsl_append(new_function_intro($2));
+ gsl_declare_global_variable($4,$2); }
+ | '<' task_name ':' arglist '>' return_type '\n' { gsl_append(new_function_intro($2));
+ gsl_declare_global_variable($6,$2); }
+ ;
+function_outro: { gsl_append(new_function_outro()); } ;
+
+leave_namespace: { $$ = gsl_leavenamespace(); };
+
+/* ------------ Variable declaration ---------------- */
+
+declaration: FLOAT_TK LTYPE_VAR '=' expression { gsl_float_decl_local($2); $$ = new_set(new_var($2,currentGoomSL->num_lines), $4); }
+ | INT_TK LTYPE_VAR '=' expression { gsl_int_decl_local($2); $$ = new_set(new_var($2,currentGoomSL->num_lines), $4); }
+ | PTR_TK LTYPE_VAR '=' expression { gsl_ptr_decl_local($2); $$ = new_set(new_var($2,currentGoomSL->num_lines), $4); }
+ | LTYPE_VAR LTYPE_VAR '=' expression { gsl_struct_decl_local($1,$2); $$ = new_set(new_var($2,currentGoomSL->num_lines), $4); }
+ | empty_declaration { $$ = 0; }
+ ;
+
+empty_declaration: FLOAT_TK LTYPE_VAR { gsl_float_decl_local($2); }
+ | INT_TK LTYPE_VAR { gsl_int_decl_local($2); }
+ | PTR_TK LTYPE_VAR { gsl_ptr_decl_local($2); }
+ | LTYPE_VAR LTYPE_VAR { gsl_struct_decl_local($1,$2); }
+ ;
+
+/* -------------- Instructions and Expressions ------------------ */
+
+instruction: affectation '\n' { $$ = $1; }
+ | declaration '\n' { $$ = $1; }
+ | '(' test ')' '?' opt_nl instruction { $$ = new_if($2,$6); }
+ | WHILE test opt_nl DO opt_nl instruction { $$ = new_while($2,$6); }
+ | '{' '\n' start_block gsl_code '}' '\n' { lastNode = $3->unode.opr.op[1]; $$=$3; }
+ | func_call { $$ = $1; }
+ | LTYPE_VAR PLUS_EQ expression { $$ = new_plus_eq(new_var($1,currentGoomSL->num_lines),$3); }
+ | LTYPE_VAR SUB_EQ expression { $$ = new_sub_eq(new_var($1,currentGoomSL->num_lines),$3); }
+ | LTYPE_VAR MUL_EQ expression { $$ = new_mul_eq(new_var($1,currentGoomSL->num_lines),$3); }
+ | LTYPE_VAR DIV_EQ expression { $$ = new_div_eq(new_var($1,currentGoomSL->num_lines),$3); }
+ | FOR LTYPE_VAR IN var_list DO instruction { $$ = new_static_foreach(new_var($2, currentGoomSL->num_lines), $4, $6); }
+ ;
+
+var_list: '(' var_list_content ')' { $$ = $2; }
+ ;
+var_list_content: LTYPE_VAR { $$ = new_var_list(new_var($1,currentGoomSL->num_lines), NULL); }
+ | LTYPE_VAR var_list_content { $$ = new_var_list(new_var($1,currentGoomSL->num_lines), $2); }
+ ;
+
+affectation: LTYPE_VAR '=' expression { $$ = new_set(new_var($1,currentGoomSL->num_lines),$3); } ;
+
+start_block: { $$ = new_block(lastNode); lastNode = $$->unode.opr.op[0]; }
+ ;
+
+expression: LTYPE_VAR { $$ = new_var($1,currentGoomSL->num_lines); }
+ | constValue { $$ = $1; }
+ | expression '*' expression { $$ = new_mul($1,$3); }
+ | expression '/' expression { $$ = new_div($1,$3); }
+ | expression '+' expression { $$ = new_add($1,$3); }
+ | expression '-' expression { $$ = new_sub($1,$3); }
+ | '-' expression { $$ = new_neg($2); }
+ | '(' expression ')' { $$ = $2; }
+ | func_call_expression { $$ = $1; }
+ ;
+
+test: expression '=' expression { $$ = new_equ($1,$3); }
+ | expression '<' expression { $$ = new_low($1,$3); }
+ | expression '>' expression { $$ = new_low($3,$1); }
+ | expression SUP_EQ expression { $$ = new_not(new_low($1,$3)); }
+ | expression LOW_EQ expression { $$ = new_not(new_low($3,$1)); }
+ | expression NOT_EQ expression { $$ = new_not(new_equ($1,$3)); }
+ | NOT test { $$ = new_not($2); }
+ ;
+
+constValue: LTYPE_FLOAT { $$ = new_constFloat($1,currentGoomSL->num_lines); }
+ | LTYPE_INTEGER { $$ = new_constInt($1,currentGoomSL->num_lines); }
+ | LTYPE_PTR { $$ = new_constPtr($1,currentGoomSL->num_lines); }
+ ;
+
+/* ---------------- Function Calls ------------------ */
+
+func_call: task_name '\n' leave_namespace { $$ = new_call($1,NULL); }
+ | task_name ':' affectation_list '\n' leave_namespace { $$ = new_call($1,$3); }
+ | '[' task_name ']' '\n' leave_namespace { $$ = new_call($2,NULL); }
+ | '[' task_name ':' affectation_list ']' '\n' leave_namespace { $$ = new_call($2,$4); }
+ ;
+
+func_call_expression:
+ '[' task_name leave_namespace ']' { $$ = new_call_expr($2,NULL); }
+ | '[' task_name ':' affectation_list ']' leave_namespace { $$ = new_call_expr($2,$4); }
+ ;
+
+affectation_list: affectation_in_list affectation_list { $$ = new_affec_list($1,$2); }
+ | affectation_in_list { $$ = new_affec_list($1,NULL); }
+
+affectation_in_list: LTYPE_VAR '=' leave_namespace expression {
+ gsl_reenternamespace($3);
+ $$ = new_set(new_var($1,currentGoomSL->num_lines),$4);
+ }
+ | ':' leave_namespace expression {
+ gsl_reenternamespace($2);
+ $$ = new_set(new_var("&this", currentGoomSL->num_lines),$3);
+ }
+ ;
+
+
+/* ------------ Misc ---------- */
+
+opt_nl: '\n' | ;
+
+
+%%
+
+
+void yyerror(char *str)
+{ /* {{{ */
+ g_assert_not_reached ();
+#if 0
+ fprintf(stderr, "ERROR: Line %d, %s\n", currentGoomSL->num_lines, str);
+ currentGoomSL->compilationOK = 0;
+ exit(1);
+#endif
+} /* }}} */
+
diff --git a/gst/goom/graphic.c b/gst/goom/graphic.c
new file mode 100644
index 0000000000..9f3044829d
--- /dev/null
+++ b/gst/goom/graphic.c
@@ -0,0 +1,28 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#include "goom_graphic.h"
+
+const Color BLACK = { 0, 0, 0 };
+const Color WHITE = { 0xff, 0xff, 0xff };
+const Color RED = { 0xff, 0x05, 0x05 };
+const Color GREEN = { 0x05, 0xff, 0x05 };
+const Color BLUE = { 0x05, 0x05, 0xff };
+const Color YELLOW = { 0xff, 0xff, 0x33 };
+const Color ORANGE = { 0xff, 0xcc, 0x05 };
+const Color VIOLET = { 0x55, 0x05, 0xff };
diff --git a/gst/goom/gstgoom.c b/gst/goom/gstgoom.c
new file mode 100644
index 0000000000..e40bfa7a97
--- /dev/null
+++ b/gst/goom/gstgoom.c
@@ -0,0 +1,218 @@
+/* gstgoom.c: implementation of goom drawing element
+ * Copyright (C) <2001> Richard Boulton <richard@tartarus.org>
+ * (C) <2006> Wim Taymans <wim at fluendo dot com>
+ * (C) <2011> Wim Taymans <wim.taymans at gmail dot com>
+ * (C) <2015> Luis de Bethencourt <luis@debethencourt.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-goom
+ * @title: goom
+ * @see_also: synaesthesia
+ *
+ * Goom is an audio visualisation element. It creates warping structures
+ * based on the incoming audio signal.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v audiotestsrc ! goom ! videoconvert ! xvimagesink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include "gstgoom.h"
+#include "goom.h"
+
+#if HAVE_ORC
+#include <orc/orc.h>
+#endif
+
+GST_DEBUG_CATEGORY (goom_debug);
+#define GST_CAT_DEFAULT goom_debug
+
+#define DEFAULT_WIDTH 320
+#define DEFAULT_HEIGHT 240
+
+/* signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ ARG_0
+ /* FILL ME */
+};
+
+#if G_BYTE_ORDER == G_BIG_ENDIAN
+#define RGB_ORDER "xRGB"
+#else
+#define RGB_ORDER "BGRx"
+#endif
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (RGB_ORDER))
+ );
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink", /* the name of the pads */
+ GST_PAD_SINK, /* type of the pad */
+ GST_PAD_ALWAYS, /* ALWAYS/SOMETIMES */
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (S16) ", "
+ "rate = (int) [ 8000, 96000 ], "
+ "channels = (int) 1, "
+ "layout = (string) interleaved; "
+ "audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (S16) ", "
+ "rate = (int) [ 8000, 96000 ], "
+ "channels = (int) 2, "
+ "channel-mask = (bitmask) 0x3, " "layout = (string) interleaved")
+ );
+
+
+static void gst_goom_finalize (GObject * object);
+
+static gboolean gst_goom_setup (GstAudioVisualizer * base);
+static gboolean gst_goom_render (GstAudioVisualizer * base, GstBuffer * audio,
+ GstVideoFrame * video);
+static gboolean goom_element_init (GstPlugin * plugin);
+
+G_DEFINE_TYPE (GstGoom, gst_goom, GST_TYPE_AUDIO_VISUALIZER);
+GST_ELEMENT_REGISTER_DEFINE_CUSTOM (goom, goom_element_init);
+
+static void
+gst_goom_class_init (GstGoomClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstAudioVisualizerClass *visualizer_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ visualizer_class = (GstAudioVisualizerClass *) klass;
+
+ gobject_class->finalize = gst_goom_finalize;
+
+ gst_element_class_set_static_metadata (gstelement_class, "GOOM: what a GOOM!",
+ "Visualization",
+ "Takes frames of data and outputs video frames using the GOOM filter",
+ "Wim Taymans <wim@fluendo.com>");
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class, &src_template);
+
+ visualizer_class->setup = GST_DEBUG_FUNCPTR (gst_goom_setup);
+ visualizer_class->render = GST_DEBUG_FUNCPTR (gst_goom_render);
+}
+
+static void
+gst_goom_init (GstGoom * goom)
+{
+ goom->width = DEFAULT_WIDTH;
+ goom->height = DEFAULT_HEIGHT;
+ goom->channels = 0;
+
+ goom->plugin = goom_init (goom->width, goom->height);
+}
+
+static void
+gst_goom_finalize (GObject * object)
+{
+ GstGoom *goom = GST_GOOM (object);
+
+ goom_close (goom->plugin);
+ goom->plugin = NULL;
+
+ G_OBJECT_CLASS (gst_goom_parent_class)->finalize (object);
+}
+
+static gboolean
+gst_goom_setup (GstAudioVisualizer * base)
+{
+ GstGoom *goom = GST_GOOM (base);
+
+ goom->width = GST_VIDEO_INFO_WIDTH (&base->vinfo);
+ goom->height = GST_VIDEO_INFO_HEIGHT (&base->vinfo);
+ goom_set_resolution (goom->plugin, goom->width, goom->height);
+
+ return TRUE;
+}
+
+static gboolean
+gst_goom_render (GstAudioVisualizer * base, GstBuffer * audio,
+ GstVideoFrame * video)
+{
+ GstGoom *goom = GST_GOOM (base);
+ GstMapInfo amap;
+ gint16 datain[2][GOOM_SAMPLES];
+ gint16 *adata;
+ gint i;
+
+ /* get next GOOM_SAMPLES, we have at least this amount of samples */
+ gst_buffer_map (audio, &amap, GST_MAP_READ);
+ adata = (gint16 *) amap.data;
+
+ if (goom->channels == 2) {
+ for (i = 0; i < GOOM_SAMPLES; i++) {
+ datain[0][i] = *adata++;
+ datain[1][i] = *adata++;
+ }
+ } else {
+ for (i = 0; i < GOOM_SAMPLES; i++) {
+ datain[0][i] = *adata;
+ datain[1][i] = *adata++;
+ }
+ }
+
+ video->data[0] = goom_update (goom->plugin, datain, 0, 0);
+ gst_buffer_unmap (audio, &amap);
+
+ return TRUE;
+}
+
+static gboolean
+goom_element_init (GstPlugin * plugin)
+{
+ GST_DEBUG_CATEGORY_INIT (goom_debug, "goom", 0, "goom visualisation element");
+
+#if HAVE_ORC
+ orc_init ();
+#endif
+
+ return gst_element_register (plugin, "goom", GST_RANK_NONE, GST_TYPE_GOOM);
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ return GST_ELEMENT_REGISTER (goom, plugin);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ goom,
+ "GOOM visualization filter",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/goom/gstgoom.h b/gst/goom/gstgoom.h
new file mode 100644
index 0000000000..d19246096b
--- /dev/null
+++ b/gst/goom/gstgoom.h
@@ -0,0 +1,68 @@
+/* gstgoom.c: implementation of goom drawing element
+ * Copyright (C) <2001> Richard Boulton <richard@tartarus.org>
+ * Copyright (C) <2015> Luis de Bethencourt <luis@debethencourt.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_GOOM_H__
+#define __GST_GOOM_H__
+
+#include <gst/pbutils/gstaudiovisualizer.h>
+
+#include "goom.h"
+
+G_BEGIN_DECLS
+
+#define GOOM_SAMPLES 512
+
+#define GST_TYPE_GOOM (gst_goom_get_type())
+#define GST_GOOM(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_GOOM,GstGoom))
+#define GST_GOOM_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_GOOM,GstGoomClass))
+#define GST_IS_GOOM(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_GOOM))
+#define GST_IS_GOOM_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_GOOM))
+
+typedef struct _GstGoom GstGoom;
+typedef struct _GstGoomClass GstGoomClass;
+
+struct _GstGoom
+{
+ GstAudioVisualizer parent;
+
+ /* input tracking */
+ gint channels;
+
+ /* video state */
+ gint width;
+ gint height;
+
+ /* goom stuff */
+ PluginInfo *plugin;
+};
+
+struct _GstGoomClass
+{
+ GstAudioVisualizerClass parent_class;
+};
+
+GType gst_goom_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (goom);
+
+G_END_DECLS
+
+#endif /* __GST_GOOM_H__ */
+
diff --git a/gst/goom/ifs.c b/gst/goom/ifs.c
new file mode 100644
index 0000000000..5af721fb6e
--- /dev/null
+++ b/gst/goom/ifs.c
@@ -0,0 +1,774 @@
+/*-
+ * Copyright (c) 1997 by Massimino Pascal <Pascal.Massimon@ens.fr>
+ *
+ * ifs.c: modified iterated functions system for goom.
+ *
+ * Permission to use, copy, modify, and distribute this software and its
+ * documentation for any purpose and without fee is hereby granted,
+ * provided that the above copyright notice appear in all copies and that
+ * both that copyright notice and this permission notice appear in
+ * supporting documentation.
+ *
+ * This file is provided AS IS with no warranties of any kind. The author
+ * shall have no liability with respect to the infringement of copyrights,
+ * trade secrets or any patents by this file or any part thereof. In no
+ * event will the author be liable for any lost revenue or profits or
+ * other special, indirect and consequential damages.
+ *
+ * If this mode is weird and you have an old MetroX server, it is buggy.
+ * There is a free SuSE-enhanced MetroX X server that is fine.
+ *
+ * When shown ifs, Diana Rose (4 years old) said, "It looks like dancing."
+ *
+ * Revision History:
+ * 13-Dec-2003: Added some goom specific stuffs (to make ifs a VisualFX).
+ * 11-Apr-2002: jeko@ios-software.com: Make ifs.c system-indendant. (ifs.h added)
+ * 01-Nov-2000: Allocation checks
+ * 10-May-1997: jwz@jwz.org: turned into a standalone program.
+ * Made it render into an offscreen bitmap and then copy
+ * that onto the screen, to reduce flicker.
+ */
+
+/* #ifdef STANDALONE */
+
+#include <math.h>
+#include <stdlib.h>
+#include <stdio.h>
+
+#include "goom_config.h"
+
+#ifdef HAVE_MMX
+#include "mmx.h"
+#endif
+
+#include "goom_graphic.h"
+#include "ifs.h"
+#include "goom_tools.h"
+
+typedef struct _ifsPoint
+{
+ gint32 x, y;
+}
+IFSPoint;
+
+
+#define MODE_ifs
+
+#define PROGCLASS "IFS"
+
+#define HACK_INIT init_ifs
+#define HACK_DRAW draw_ifs
+
+#define ifs_opts xlockmore_opts
+
+#define DEFAULTS "*delay: 20000 \n" \
+"*ncolors: 100 \n"
+
+#define SMOOTH_COLORS
+
+#define LRAND() ((long) (goom_random(goomInfo->gRandom) & 0x7fffffff))
+#define NRAND(n) ((int) (LRAND() % (n)))
+
+#if RAND_MAX < 0x10000
+#define MAXRAND (((float)(RAND_MAX<16)+((float)RAND_MAX)+1.0f)/127.0f)
+#else
+#define MAXRAND (2147483648.0/127.0) /* unsigned 1<<31 / 127.0 (cf goom_tools) as a float */
+#endif
+
+/*****************************************************/
+
+typedef float DBL;
+typedef int F_PT;
+
+/* typedef float F_PT; */
+
+/*****************************************************/
+
+#define FIX 12
+#define UNIT ( 1<<FIX )
+#define MAX_SIMI 6
+
+#define MAX_DEPTH_2 10
+#define MAX_DEPTH_3 6
+#define MAX_DEPTH_4 4
+#define MAX_DEPTH_5 2
+
+/* PREVIOUS VALUE
+#define MAX_SIMI 6
+
+ * settings for a PC 120Mhz... *
+#define MAX_DEPTH_2 10
+#define MAX_DEPTH_3 6
+#define MAX_DEPTH_4 4
+#define MAX_DEPTH_5 3
+*/
+
+#define DBL_To_F_PT(x) (F_PT)( (DBL)(UNIT)*(x) )
+
+typedef struct Similitude_Struct SIMI;
+typedef struct Fractal_Struct FRACTAL;
+
+struct Similitude_Struct
+{
+
+ DBL c_x, c_y;
+ DBL r, r2, A, A2;
+ F_PT Ct, St, Ct2, St2;
+ F_PT Cx, Cy;
+ F_PT R, R2;
+};
+
+
+struct Fractal_Struct
+{
+
+ int Nb_Simi;
+ SIMI Components[5 * MAX_SIMI];
+ int Depth, Col;
+ int Count, Speed;
+ int Width, Height, Lx, Ly;
+ DBL r_mean, dr_mean, dr2_mean;
+ int Cur_Pt, Max_Pt;
+
+ IFSPoint *Buffer1, *Buffer2;
+};
+
+typedef struct _IFS_DATA
+{
+ FRACTAL *Root;
+ FRACTAL *Cur_F;
+
+ /* Used by the Trace recursive method */
+ IFSPoint *Buf;
+ int Cur_Pt;
+ int initalized;
+} IfsData;
+
+
+/*****************************************************/
+
+static DBL
+Gauss_Rand (PluginInfo * goomInfo, DBL c, DBL A, DBL S)
+{
+ DBL y;
+
+ y = (DBL) LRAND () / MAXRAND;
+ y = A * (1.0 - exp (-y * y * S)) / (1.0 - exp (-S));
+ if (NRAND (2))
+ return (c + y);
+ return (c - y);
+}
+
+static DBL
+Half_Gauss_Rand (PluginInfo * goomInfo, DBL c, DBL A, DBL S)
+{
+ DBL y;
+
+ y = (DBL) LRAND () / MAXRAND;
+ y = A * (1.0 - exp (-y * y * S)) / (1.0 - exp (-S));
+ return (c + y);
+}
+
+static void
+Random_Simis (PluginInfo * goomInfo, FRACTAL * F, SIMI * Cur, int i)
+{
+ while (i--) {
+ Cur->c_x = Gauss_Rand (goomInfo, 0.0, .8, 4.0);
+ Cur->c_y = Gauss_Rand (goomInfo, 0.0, .8, 4.0);
+ Cur->r = Gauss_Rand (goomInfo, F->r_mean, F->dr_mean, 3.0);
+ Cur->r2 = Half_Gauss_Rand (goomInfo, 0.0, F->dr2_mean, 2.0);
+ Cur->A = Gauss_Rand (goomInfo, 0.0, 360.0, 4.0) * (G_PI / 180.0);
+ Cur->A2 = Gauss_Rand (goomInfo, 0.0, 360.0, 4.0) * (G_PI / 180.0);
+ Cur++;
+ }
+}
+
+static void
+free_ifs_buffers (FRACTAL * Fractal)
+{
+ if (Fractal->Buffer1 != NULL) {
+ (void) free ((void *) Fractal->Buffer1);
+ Fractal->Buffer1 = (IFSPoint *) NULL;
+ }
+ if (Fractal->Buffer2 != NULL) {
+ (void) free ((void *) Fractal->Buffer2);
+ Fractal->Buffer2 = (IFSPoint *) NULL;
+ }
+}
+
+
+static void
+free_ifs (FRACTAL * Fractal)
+{
+ free_ifs_buffers (Fractal);
+}
+
+/***************************************************************/
+
+static void
+init_ifs (PluginInfo * goomInfo, IfsData * data)
+{
+ int i;
+ FRACTAL *Fractal;
+ int width = goomInfo->screen.width;
+ int height = goomInfo->screen.height;
+
+ if (data->Root == NULL) {
+ data->Root = (FRACTAL *) malloc (sizeof (FRACTAL));
+ if (data->Root == NULL)
+ return;
+ data->Root->Buffer1 = (IFSPoint *) NULL;
+ data->Root->Buffer2 = (IFSPoint *) NULL;
+ }
+ Fractal = data->Root;
+
+ free_ifs_buffers (Fractal);
+
+ i = (NRAND (4)) + 2; /* Number of centers */
+ switch (i) {
+ case 3:
+ Fractal->Depth = MAX_DEPTH_3;
+ Fractal->r_mean = .6;
+ Fractal->dr_mean = .4;
+ Fractal->dr2_mean = .3;
+ break;
+
+ case 4:
+ Fractal->Depth = MAX_DEPTH_4;
+ Fractal->r_mean = .5;
+ Fractal->dr_mean = .4;
+ Fractal->dr2_mean = .3;
+ break;
+
+ case 5:
+ Fractal->Depth = MAX_DEPTH_5;
+ Fractal->r_mean = .5;
+ Fractal->dr_mean = .4;
+ Fractal->dr2_mean = .3;
+ break;
+
+ default:
+ case 2:
+ Fractal->Depth = MAX_DEPTH_2;
+ Fractal->r_mean = .7;
+ Fractal->dr_mean = .3;
+ Fractal->dr2_mean = .4;
+ break;
+ }
+ Fractal->Nb_Simi = i;
+ Fractal->Max_Pt = Fractal->Nb_Simi - 1;
+ for (i = 0; i <= Fractal->Depth + 2; ++i)
+ Fractal->Max_Pt *= Fractal->Nb_Simi;
+
+ if ((Fractal->Buffer1 = (IFSPoint *) calloc (Fractal->Max_Pt,
+ sizeof (IFSPoint))) == NULL) {
+ free_ifs (Fractal);
+ return;
+ }
+ if ((Fractal->Buffer2 = (IFSPoint *) calloc (Fractal->Max_Pt,
+ sizeof (IFSPoint))) == NULL) {
+ free_ifs (Fractal);
+ return;
+ }
+
+ Fractal->Speed = 6;
+ Fractal->Width = width; /* modif by JeKo */
+ Fractal->Height = height; /* modif by JeKo */
+ Fractal->Cur_Pt = 0;
+ Fractal->Count = 0;
+ Fractal->Lx = (Fractal->Width - 1) / 2;
+ Fractal->Ly = (Fractal->Height - 1) / 2;
+ Fractal->Col = rand () % (width * height); /* modif by JeKo */
+
+ Random_Simis (goomInfo, Fractal, Fractal->Components, 5 * MAX_SIMI);
+}
+
+
+/***************************************************************/
+
+static inline void
+Transform (SIMI * Simi, F_PT xo, F_PT yo, F_PT * x, F_PT * y)
+{
+ F_PT xx, yy;
+
+ xo = xo - Simi->Cx;
+ xo = (xo * Simi->R) >> FIX; /* / UNIT; */
+ yo = yo - Simi->Cy;
+ yo = (yo * Simi->R) >> FIX; /* / UNIT; */
+
+ xx = xo - Simi->Cx;
+ xx = (xx * Simi->R2) >> FIX; /* / UNIT; */
+ yy = -yo - Simi->Cy;
+ yy = (yy * Simi->R2) >> FIX; /* / UNIT; */
+
+ *x = ((xo * Simi->Ct - yo * Simi->St + xx * Simi->Ct2 - yy * Simi->St2)
+ >> FIX /* / UNIT */ ) + Simi->Cx;
+ *y = ((xo * Simi->St + yo * Simi->Ct + xx * Simi->St2 + yy * Simi->Ct2)
+ >> FIX /* / UNIT */ ) + Simi->Cy;
+}
+
+/***************************************************************/
+
+static void
+Trace (FRACTAL * F, F_PT xo, F_PT yo, IfsData * data)
+{
+ F_PT x, y, i;
+ SIMI *Cur;
+
+ Cur = data->Cur_F->Components;
+ for (i = data->Cur_F->Nb_Simi; i; --i, Cur++) {
+ Transform (Cur, xo, yo, &x, &y);
+
+ data->Buf->x = F->Lx + ((x * F->Lx) >> (FIX + 1) /* /(UNIT*2) */ );
+ data->Buf->y = F->Ly - ((y * F->Ly) >> (FIX + 1) /* /(UNIT*2) */ );
+ data->Buf++;
+
+ data->Cur_Pt++;
+
+ if (F->Depth && ((x - xo) >> 4) && ((y - yo) >> 4)) {
+ F->Depth--;
+ Trace (F, x, y, data);
+ F->Depth++;
+ }
+ }
+}
+
+static void
+Draw_Fractal (IfsData * data)
+{
+ FRACTAL *F = data->Root;
+ int i, j;
+ F_PT x, y, xo, yo;
+ SIMI *Cur, *Simi;
+
+ for (Cur = F->Components, i = F->Nb_Simi; i; --i, Cur++) {
+ Cur->Cx = DBL_To_F_PT (Cur->c_x);
+ Cur->Cy = DBL_To_F_PT (Cur->c_y);
+
+ Cur->Ct = DBL_To_F_PT (cos (Cur->A));
+ Cur->St = DBL_To_F_PT (sin (Cur->A));
+ Cur->Ct2 = DBL_To_F_PT (cos (Cur->A2));
+ Cur->St2 = DBL_To_F_PT (sin (Cur->A2));
+
+ Cur->R = DBL_To_F_PT (Cur->r);
+ Cur->R2 = DBL_To_F_PT (Cur->r2);
+ }
+
+
+ data->Cur_Pt = 0;
+ data->Cur_F = F;
+ data->Buf = F->Buffer2;
+ for (Cur = F->Components, i = F->Nb_Simi; i; --i, Cur++) {
+ xo = Cur->Cx;
+ yo = Cur->Cy;
+ for (Simi = F->Components, j = F->Nb_Simi; j; --j, Simi++) {
+ if (Simi == Cur)
+ continue;
+ Transform (Simi, xo, yo, &x, &y);
+ Trace (F, x, y, data);
+ }
+ }
+
+ /* Erase previous */
+
+ F->Cur_Pt = data->Cur_Pt;
+ data->Buf = F->Buffer1;
+ F->Buffer1 = F->Buffer2;
+ F->Buffer2 = data->Buf;
+}
+
+
+static IFSPoint *
+draw_ifs (PluginInfo * goomInfo, int *nbpt, IfsData * data)
+{
+ int i;
+ DBL u, uu, v, vv, u0, u1, u2, u3;
+ SIMI *S, *S1, *S2, *S3, *S4;
+ FRACTAL *F;
+
+ if (data->Root == NULL)
+ return NULL;
+ F = data->Root;
+ if (F->Buffer1 == NULL)
+ return NULL;
+
+ u = (DBL) (F->Count) * (DBL) (F->Speed) / 1000.0;
+ uu = u * u;
+ v = 1.0 - u;
+ vv = v * v;
+ u0 = vv * v;
+ u1 = 3.0 * vv * u;
+ u2 = 3.0 * v * uu;
+ u3 = u * uu;
+
+ S = F->Components;
+ S1 = S + F->Nb_Simi;
+ S2 = S1 + F->Nb_Simi;
+ S3 = S2 + F->Nb_Simi;
+ S4 = S3 + F->Nb_Simi;
+
+ for (i = F->Nb_Simi; i; --i, S++, S1++, S2++, S3++, S4++) {
+ S->c_x = u0 * S1->c_x + u1 * S2->c_x + u2 * S3->c_x + u3 * S4->c_x;
+ S->c_y = u0 * S1->c_y + u1 * S2->c_y + u2 * S3->c_y + u3 * S4->c_y;
+ S->r = u0 * S1->r + u1 * S2->r + u2 * S3->r + u3 * S4->r;
+ S->r2 = u0 * S1->r2 + u1 * S2->r2 + u2 * S3->r2 + u3 * S4->r2;
+ S->A = u0 * S1->A + u1 * S2->A + u2 * S3->A + u3 * S4->A;
+ S->A2 = u0 * S1->A2 + u1 * S2->A2 + u2 * S3->A2 + u3 * S4->A2;
+ }
+
+ Draw_Fractal (data);
+
+ if (F->Count >= 1000 / F->Speed) {
+ S = F->Components;
+ S1 = S + F->Nb_Simi;
+ S2 = S1 + F->Nb_Simi;
+ S3 = S2 + F->Nb_Simi;
+ S4 = S3 + F->Nb_Simi;
+
+ for (i = F->Nb_Simi; i; --i, S++, S1++, S2++, S3++, S4++) {
+ S2->c_x = 2.0 * S4->c_x - S3->c_x;
+ S2->c_y = 2.0 * S4->c_y - S3->c_y;
+ S2->r = 2.0 * S4->r - S3->r;
+ S2->r2 = 2.0 * S4->r2 - S3->r2;
+ S2->A = 2.0 * S4->A - S3->A;
+ S2->A2 = 2.0 * S4->A2 - S3->A2;
+
+ *S1 = *S4;
+ }
+ Random_Simis (goomInfo, F, F->Components + 3 * F->Nb_Simi, F->Nb_Simi);
+
+ Random_Simis (goomInfo, F, F->Components + 4 * F->Nb_Simi, F->Nb_Simi);
+
+ F->Count = 0;
+ } else
+ F->Count++;
+
+ F->Col++;
+
+ (*nbpt) = data->Cur_Pt;
+ return F->Buffer2;
+}
+
+
+/***************************************************************/
+
+static void
+release_ifs (IfsData * data)
+{
+ if (data->Root != NULL) {
+ free_ifs (data->Root);
+ (void) free ((void *) data->Root);
+ data->Root = (FRACTAL *) NULL;
+ }
+}
+
+#define RAND() goom_random(goomInfo->gRandom)
+
+static void
+ifs_update (PluginInfo * goomInfo, Pixel * data, Pixel * back, int increment,
+ IfsData * fx_data)
+{
+ static unsigned int couleur = 0xc0c0c0c0;
+ static int v[4] = { 2, 4, 3, 2 };
+ static int col[4] = { 2, 4, 3, 2 };
+
+#define MOD_MER 0
+#define MOD_FEU 1
+#define MOD_MERVER 2
+ static int mode = MOD_MERVER;
+ static int justChanged = 0;
+ static int cycle = 0;
+ int cycle10;
+
+ int nbpt = 0;
+ IFSPoint *points;
+ int i;
+
+ unsigned int couleursl = couleur;
+ int width = goomInfo->screen.width;
+ int height = goomInfo->screen.height;
+
+ cycle++;
+ if (cycle >= 80)
+ cycle = 0;
+
+ if (cycle < 40)
+ cycle10 = cycle / 10;
+ else
+ cycle10 = 7 - cycle / 10;
+
+ {
+ unsigned char *tmp = (unsigned char *) &couleursl;
+
+ for (i = 0; i < 4; i++) {
+ *tmp = (*tmp) >> cycle10;
+ tmp++;
+ }
+ }
+
+ points = draw_ifs (goomInfo, &nbpt, fx_data);
+ nbpt--;
+
+#ifdef HAVE_MMX
+ movd_m2r (couleursl, mm1);
+ punpckldq_r2r (mm1, mm1);
+ for (i = 0; i < nbpt; i += increment) {
+ int x = points[i].x;
+ int y = points[i].y;
+
+ if ((x < width) && (y < height) && (x > 0) && (y > 0)) {
+ int pos = x + (y * width);
+
+ movd_m2r (back[pos], mm0);
+ paddusb_r2r (mm1, mm0);
+ movd_r2m (mm0, data[pos]);
+ }
+ }
+ emms (); /*__asm__ __volatile__ ("emms");*/
+#else
+ for (i = 0; i < nbpt; i += increment) {
+ int x = (int) points[i].x & 0x7fffffff;
+ int y = (int) points[i].y & 0x7fffffff;
+
+ if ((x < width) && (y < height)) {
+ int pos = x + (int) (y * width);
+ int tra = 0, i = 0;
+ unsigned char *bra = (unsigned char *) &back[pos];
+ unsigned char *dra = (unsigned char *) &data[pos];
+ unsigned char *cra = (unsigned char *) &couleursl;
+
+ for (; i < 4; i++) {
+ tra = *cra;
+ tra += *bra;
+ if (tra > 255)
+ tra = 255;
+ *dra = tra;
+ ++dra;
+ ++cra;
+ ++bra;
+ }
+ }
+ }
+#endif /*MMX*/
+ justChanged--;
+
+ col[ALPHA] = couleur >> (ALPHA * 8) & 0xff;
+ col[BLEU] = couleur >> (BLEU * 8) & 0xff;
+ col[VERT] = couleur >> (VERT * 8) & 0xff;
+ col[ROUGE] = couleur >> (ROUGE * 8) & 0xff;
+
+ if (mode == MOD_MER) {
+ col[BLEU] += v[BLEU];
+ if (col[BLEU] > 255) {
+ col[BLEU] = 255;
+ v[BLEU] = -(RAND () % 4) - 1;
+ }
+ if (col[BLEU] < 32) {
+ col[BLEU] = 32;
+ v[BLEU] = (RAND () % 4) + 1;
+ }
+
+ col[VERT] += v[VERT];
+ if (col[VERT] > 200) {
+ col[VERT] = 200;
+ v[VERT] = -(RAND () % 3) - 2;
+ }
+ if (col[VERT] > col[BLEU]) {
+ col[VERT] = col[BLEU];
+ v[VERT] = v[BLEU];
+ }
+ if (col[VERT] < 32) {
+ col[VERT] = 32;
+ v[VERT] = (RAND () % 3) + 2;
+ }
+
+ col[ROUGE] += v[ROUGE];
+ if (col[ROUGE] > 64) {
+ col[ROUGE] = 64;
+ v[ROUGE] = -(RAND () % 4) - 1;
+ }
+ if (col[ROUGE] < 0) {
+ col[ROUGE] = 0;
+ v[ROUGE] = (RAND () % 4) + 1;
+ }
+
+ col[ALPHA] += v[ALPHA];
+ if (col[ALPHA] > 0) {
+ col[ALPHA] = 0;
+ v[ALPHA] = -(RAND () % 4) - 1;
+ }
+ if (col[ALPHA] < 0) {
+ col[ALPHA] = 0;
+ v[ALPHA] = (RAND () % 4) + 1;
+ }
+
+ if (((col[VERT] > 32) && (col[ROUGE] < col[VERT] + 40)
+ && (col[VERT] < col[ROUGE] + 20) && (col[BLEU] < 64)
+ && (RAND () % 20 == 0)) && (justChanged < 0)) {
+ mode = (RAND () % 3) ? MOD_FEU : MOD_MERVER;
+ justChanged = 250;
+ }
+ } else if (mode == MOD_MERVER) {
+ col[BLEU] += v[BLEU];
+ if (col[BLEU] > 128) {
+ col[BLEU] = 128;
+ v[BLEU] = -(RAND () % 4) - 1;
+ }
+ if (col[BLEU] < 16) {
+ col[BLEU] = 16;
+ v[BLEU] = (RAND () % 4) + 1;
+ }
+
+ col[VERT] += v[VERT];
+ if (col[VERT] > 200) {
+ col[VERT] = 200;
+ v[VERT] = -(RAND () % 3) - 2;
+ }
+ if (col[VERT] > col[ALPHA]) {
+ col[VERT] = col[ALPHA];
+ v[VERT] = v[ALPHA];
+ }
+ if (col[VERT] < 32) {
+ col[VERT] = 32;
+ v[VERT] = (RAND () % 3) + 2;
+ }
+
+ col[ROUGE] += v[ROUGE];
+ if (col[ROUGE] > 128) {
+ col[ROUGE] = 128;
+ v[ROUGE] = -(RAND () % 4) - 1;
+ }
+ if (col[ROUGE] < 0) {
+ col[ROUGE] = 0;
+ v[ROUGE] = (RAND () % 4) + 1;
+ }
+
+ col[ALPHA] += v[ALPHA];
+ if (col[ALPHA] > 255) {
+ col[ALPHA] = 255;
+ v[ALPHA] = -(RAND () % 4) - 1;
+ }
+ if (col[ALPHA] < 0) {
+ col[ALPHA] = 0;
+ v[ALPHA] = (RAND () % 4) + 1;
+ }
+
+ if (((col[VERT] > 32) && (col[ROUGE] < col[VERT] + 40)
+ && (col[VERT] < col[ROUGE] + 20) && (col[BLEU] < 64)
+ && (RAND () % 20 == 0)) && (justChanged < 0)) {
+ mode = (RAND () % 3) ? MOD_FEU : MOD_MER;
+ justChanged = 250;
+ }
+ } else if (mode == MOD_FEU) {
+
+ col[BLEU] += v[BLEU];
+ if (col[BLEU] > 64) {
+ col[BLEU] = 64;
+ v[BLEU] = -(RAND () % 4) - 1;
+ }
+ if (col[BLEU] < 0) {
+ col[BLEU] = 0;
+ v[BLEU] = (RAND () % 4) + 1;
+ }
+
+ col[VERT] += v[VERT];
+ if (col[VERT] > 200) {
+ col[VERT] = 200;
+ v[VERT] = -(RAND () % 3) - 2;
+ }
+ if (col[VERT] > col[ROUGE] + 20) {
+ col[VERT] = col[ROUGE] + 20;
+ v[VERT] = -(RAND () % 3) - 2;
+ v[ROUGE] = (RAND () % 4) + 1;
+ v[BLEU] = (RAND () % 4) + 1;
+ }
+ if (col[VERT] < 0) {
+ col[VERT] = 0;
+ v[VERT] = (RAND () % 3) + 2;
+ }
+
+ col[ROUGE] += v[ROUGE];
+ if (col[ROUGE] > 255) {
+ col[ROUGE] = 255;
+ v[ROUGE] = -(RAND () % 4) - 1;
+ }
+ if (col[ROUGE] > col[VERT] + 40) {
+ col[ROUGE] = col[VERT] + 40;
+ v[ROUGE] = -(RAND () % 4) - 1;
+ }
+ if (col[ROUGE] < 0) {
+ col[ROUGE] = 0;
+ v[ROUGE] = (RAND () % 4) + 1;
+ }
+
+ col[ALPHA] += v[ALPHA];
+ if (col[ALPHA] > 0) {
+ col[ALPHA] = 0;
+ v[ALPHA] = -(RAND () % 4) - 1;
+ }
+ if (col[ALPHA] < 0) {
+ col[ALPHA] = 0;
+ v[ALPHA] = (RAND () % 4) + 1;
+ }
+
+ if (((col[ROUGE] < 64) && (col[VERT] > 32) && (col[VERT] < col[BLEU])
+ && (col[BLEU] > 32)
+ && (RAND () % 20 == 0)) && (justChanged < 0)) {
+ mode = (RAND () % 2) ? MOD_MER : MOD_MERVER;
+ justChanged = 250;
+ }
+ }
+
+ couleur = (col[ALPHA] << (ALPHA * 8))
+ | (col[BLEU] << (BLEU * 8))
+ | (col[VERT] << (VERT * 8))
+ | (col[ROUGE] << (ROUGE * 8));
+}
+
+/* VISUAL_FX WRAPPER FOR IFS */
+
+static void
+ifs_vfx_apply (VisualFX * _this, Pixel * src, Pixel * dest,
+ PluginInfo * goomInfo)
+{
+
+ IfsData *data = (IfsData *) _this->fx_data;
+
+ if (!data->initalized) {
+ data->initalized = 1;
+ init_ifs (goomInfo, data);
+ }
+ ifs_update (goomInfo, dest, src, goomInfo->update.ifs_incr, data);
+ /*TODO: trouver meilleur soluce pour increment (mettre le code de gestion de l'ifs dans ce fichier: ifs_vfx_apply) */
+}
+
+static void
+ifs_vfx_init (VisualFX * _this, PluginInfo * info)
+{
+
+ IfsData *data = (IfsData *) malloc (sizeof (IfsData));
+
+ data->Root = (FRACTAL *) NULL;
+ data->initalized = 0;
+ _this->fx_data = data;
+}
+
+static void
+ifs_vfx_free (VisualFX * _this)
+{
+ IfsData *data = (IfsData *) _this->fx_data;
+
+ release_ifs (data);
+ free (data);
+}
+
+void
+ifs_visualfx_create (VisualFX * vfx)
+{
+
+ vfx->init = ifs_vfx_init;
+ vfx->free = ifs_vfx_free;
+ vfx->apply = ifs_vfx_apply;
+ vfx->fx_data = NULL;
+ vfx->params = NULL;
+}
diff --git a/gst/goom/ifs.h b/gst/goom/ifs.h
new file mode 100644
index 0000000000..400377349d
--- /dev/null
+++ b/gst/goom/ifs.h
@@ -0,0 +1,54 @@
+/*-
+ * Copyright (c) 1997 by Massimino Pascal <Pascal.Massimon@ens.fr>
+ *
+ * ifs.h: modified iterated functions system for goom.
+ *
+ * Permission to use, copy, modify, and distribute this software and its
+ * documentation for any purpose and without fee is hereby granted,
+ * provided that the above copyright notice appear in all copies and that
+ * both that copyright notice and this permission notice appear in
+ * supporting documentation.
+ *
+ * This file is provided AS IS with no warranties of any kind. The author
+ * shall have no liability with respect to the infringement of copyrights,
+ * trade secrets or any patents by this file or any part thereof. In no
+ * event will the author be liable for any lost revenue or profits or
+ * other special, indirect and consequential damages.
+ *
+ * If this mode is weird and you have an old MetroX server, it is buggy.
+ * There is a free SuSE-enhanced MetroX X server that is fine.
+ *
+ * When shown ifs, Diana Rose (4 years old) said, "It looks like dancing."
+ *
+ * Revision History:
+ * 13-Dec-2003: Added some goom specific stuffs (to make ifs a VisualFX).
+ * 11-Apr-2002: jeko@ios-software.com: Make ifs.c system-indendant. (ifs.h added)
+ * 01-Nov-2000: Allocation checks
+ * 10-May-1997: jwz@jwz.org: turned into a standalone program.
+ * Made it render into an offscreen bitmap and then copy
+ * that onto the screen, to reduce flicker.
+ */
+
+#ifndef IFS_H
+#define IFS_H
+
+#include "goom_config.h"
+#include "goom_graphic.h"
+#include "goom_plugin_info.h"
+#include "goom_visual_fx.h"
+
+void ifs_visualfx_create(VisualFX *vfx);
+
+/* init ifs for a (width)x(height) output. * /
+void init_ifs (PluginInfo *goomInfo, int width, int height);
+
+/ * draw an ifs on the buffer (which size is width * height)
+ increment means that we draw 1/increment of the ifs's points * /
+void ifs_update (PluginInfo *goomInfo, Pixel * buffer, Pixel * back, int width, int height, int increment);
+
+/ * free all ifs's data. * /
+void release_ifs (void);
+*/
+
+
+#endif
diff --git a/gst/goom/lines.c b/gst/goom/lines.c
new file mode 100644
index 0000000000..a7c1eda914
--- /dev/null
+++ b/gst/goom/lines.c
@@ -0,0 +1,257 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "lines.h"
+#include <math.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include "goom_tools.h"
+#include "drawmethods.h"
+#include "goom_plugin_info.h"
+
+static inline unsigned char
+lighten (unsigned char value, float power)
+{
+ int val = value;
+ float t = (float) val * log10 (power) / 2.0;
+
+ if (t > 0) {
+ val = (int) t; /* (32.0f * log (t)); */
+ if (val > 255)
+ val = 255;
+ if (val < 0)
+ val = 0;
+ return val;
+ } else {
+ return 0;
+ }
+}
+
+static void
+lightencolor (guint32 * col, float power)
+{
+ unsigned char *color;
+
+ color = (unsigned char *) col;
+ *color = lighten (*color, power);
+ color++;
+ *color = lighten (*color, power);
+ color++;
+ *color = lighten (*color, power);
+ color++;
+ *color = lighten (*color, power);
+}
+
+
+
+static void
+genline (int id, float param, GMUnitPointer * l, int rx, int ry)
+{
+ int i;
+
+ switch (id) {
+ case GML_HLINE:
+ for (i = 0; i < 512; i++) {
+ l[i].x = ((float) i * rx) / 512.0f;
+ l[i].y = param;
+ l[i].angle = G_PI / 2.0f;
+ }
+ return;
+ case GML_VLINE:
+ for (i = 0; i < 512; i++) {
+ l[i].y = ((float) i * ry) / 512.0f;
+ l[i].x = param;
+ l[i].angle = 0.0f;
+ }
+ return;
+ case GML_CIRCLE:
+ for (i = 0; i < 512; i++) {
+ float cosa, sina;
+
+ l[i].angle = 2.0f * G_PI * (float) i / 512.0f;
+ cosa = param * cos (l[i].angle);
+ sina = param * sin (l[i].angle);
+ l[i].x = ((float) rx / 2.0f) + cosa;
+ l[i].y = (float) ry / 2.0f + sina;
+ }
+ return;
+ }
+}
+
+static guint32
+getcouleur (int mode)
+{
+ switch (mode) {
+ case GML_RED:
+ return (230 << (ROUGE * 8)) | (120 << (VERT * 8)) | (18 << (BLEU * 8));
+ case GML_ORANGE_J:
+ return (120 << (VERT * 8)) | (252 << (ROUGE * 8)) | (18 << (BLEU * 8));
+ case GML_ORANGE_V:
+ return (160 << (VERT * 8)) | (236 << (ROUGE * 8)) | (40 << (BLEU * 8));
+ case GML_BLEUBLANC:
+ return (40 << (BLEU * 8)) | (220 << (ROUGE * 8)) | (140 << (VERT * 8));
+ case GML_VERT:
+ return (200 << (VERT * 8)) | (80 << (ROUGE * 8)) | (18 << (BLEU * 8));
+ case GML_BLEU:
+ return (250 << (BLEU * 8)) | (30 << (VERT * 8)) | (80 << (ROUGE * 8));
+ case GML_BLACK:
+ return (16 << (BLEU * 8)) | (16 << (VERT * 8)) | (16 << (ROUGE * 8));
+ }
+ return 0;
+}
+
+void
+goom_lines_set_res (GMLine * gml, int rx, int ry)
+{
+ if (gml != NULL) {
+ gml->screenX = rx;
+ gml->screenY = ry;
+
+ genline (gml->IDdest, gml->param, gml->points2, rx, ry);
+ }
+}
+
+
+static void
+goom_lines_move (GMLine * l)
+{
+ int i;
+ unsigned char *c1, *c2;
+
+ for (i = 0; i < 512; i++) {
+ l->points[i].x = (l->points2[i].x + 39.0f * l->points[i].x) / 40.0f;
+ l->points[i].y = (l->points2[i].y + 39.0f * l->points[i].y) / 40.0f;
+ l->points[i].angle =
+ (l->points2[i].angle + 39.0f * l->points[i].angle) / 40.0f;
+ }
+
+ c1 = (unsigned char *) &l->color;
+ c2 = (unsigned char *) &l->color2;
+ for (i = 0; i < 4; i++) {
+ int cc1, cc2;
+
+ cc1 = *c1;
+ cc2 = *c2;
+ *c1 = (unsigned char) ((cc1 * 63 + cc2) >> 6);
+ ++c1;
+ ++c2;
+ }
+
+ l->power += l->powinc;
+ if (l->power < 1.1f) {
+ l->power = 1.1f;
+ l->powinc = (float) (goom_irand (l->goomInfo->gRandom, 20) + 10) / 300.0f;
+ }
+ if (l->power > 17.5f) {
+ l->power = 17.5f;
+ l->powinc = -(float) (goom_irand (l->goomInfo->gRandom, 20) + 10) / 300.0f;
+ }
+
+ l->amplitude = (99.0f * l->amplitude + l->amplitudeF) / 100.0f;
+}
+
+void
+goom_lines_switch_to (GMLine * gml, int IDdest,
+ float param, float amplitude, int col)
+{
+ genline (IDdest, param, gml->points2, gml->screenX, gml->screenY);
+ gml->IDdest = IDdest;
+ gml->param = param;
+ gml->amplitudeF = amplitude;
+ gml->color2 = getcouleur (col);
+}
+
+GMLine *
+goom_lines_init (PluginInfo * goomInfo, int rx, int ry,
+ int IDsrc, float paramS, int coulS, int IDdest, float paramD, int coulD)
+{
+ GMLine *l = (GMLine *) malloc (sizeof (GMLine));
+
+ l->goomInfo = goomInfo;
+
+ l->points = (GMUnitPointer *) malloc (512 * sizeof (GMUnitPointer));
+ l->points2 = (GMUnitPointer *) malloc (512 * sizeof (GMUnitPointer));
+ l->nbPoints = 512;
+
+ l->IDdest = IDdest;
+ l->param = paramD;
+
+ l->amplitude = l->amplitudeF = 1.0f;
+
+ genline (IDsrc, paramS, l->points, rx, ry);
+ genline (IDdest, paramD, l->points2, rx, ry);
+
+ l->color = getcouleur (coulS);
+ l->color2 = getcouleur (coulD);
+
+ l->screenX = rx;
+ l->screenY = ry;
+
+ l->power = 0.0f;
+ l->powinc = 0.01f;
+
+ goom_lines_switch_to (l, IDdest, paramD, 1.0f, coulD);
+
+ return l;
+}
+
+void
+goom_lines_free (GMLine ** l)
+{
+ free ((*l)->points2);
+ free ((*l)->points);
+ free (*l);
+ l = NULL;
+}
+
+void
+goom_lines_draw (PluginInfo * plug, GMLine * line, gint16 data[512], Pixel * p)
+{
+ if (line != NULL) {
+ int i, x1, y1;
+ guint32 color = line->color;
+ GMUnitPointer *pt = &(line->points[0]);
+
+ float cosa = cos (pt->angle) / 1000.0f;
+ float sina = sin (pt->angle) / 1000.0f;
+
+ lightencolor (&color, line->power);
+
+ x1 = (int) (pt->x + cosa * line->amplitude * data[0]);
+ y1 = (int) (pt->y + sina * line->amplitude * data[0]);
+
+ for (i = 1; i < 512; i++) {
+ int x2, y2;
+ GMUnitPointer *pt = &(line->points[i]);
+
+ float cosa = cos (pt->angle) / 1000.0f;
+ float sina = sin (pt->angle) / 1000.0f;
+
+ x2 = (int) (pt->x + cosa * line->amplitude * data[i]);
+ y2 = (int) (pt->y + sina * line->amplitude * data[i]);
+
+ plug->methods.draw_line (p, x1, y1, x2, y2, color, line->screenX,
+ line->screenY);
+
+ x1 = x2;
+ y1 = y2;
+ }
+ goom_lines_move (line);
+ }
+}
diff --git a/gst/goom/lines.h b/gst/goom/lines.h
new file mode 100644
index 0000000000..4cd50d8779
--- /dev/null
+++ b/gst/goom/lines.h
@@ -0,0 +1,94 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef _LINES_H
+#define _LINES_H
+
+#include "goom_typedefs.h"
+#include "goom_graphic.h"
+#include "goom_config.h"
+
+struct _GMUNITPOINTER
+{
+ float x;
+ float y;
+ float angle;
+};
+
+/* tableau de points */
+struct _GMLINE
+{
+
+ GMUnitPointer *points;
+ GMUnitPointer *points2;
+ int IDdest;
+ float param;
+ float amplitudeF;
+ float amplitude;
+
+ int nbPoints;
+ guint32 color; /* pour l'instant je stocke la couleur a terme, on stockera le mode couleur et l'on animera */
+ guint32 color2;
+
+ int screenX;
+ int screenY;
+
+ float power;
+ float powinc;
+
+ PluginInfo *goomInfo;
+};
+
+/* les ID possibles */
+
+#define GML_CIRCLE 0
+/* (param = radius) */
+
+#define GML_HLINE 1
+/* (param = y) */
+
+#define GML_VLINE 2
+/* (param = x) */
+
+/* les modes couleur possible (si tu mets un autre c'est noir) */
+
+#define GML_BLEUBLANC 0
+#define GML_RED 1
+#define GML_ORANGE_V 2
+#define GML_ORANGE_J 3
+#define GML_VERT 4
+#define GML_BLEU 5
+#define GML_BLACK 6
+
+/* construit un effet de line (une ligne horitontale pour commencer) */
+GMLine *goom_lines_init (PluginInfo *goomInfo, int rx, int ry,
+ int IDsrc, float paramS, int modeCoulSrc,
+ int IDdest, float paramD, int modeCoulDest);
+
+void goom_lines_switch_to (GMLine * gml, int IDdest, float param,
+ float amplitude,
+ int modeCoul);
+
+void goom_lines_set_res (GMLine * gml, int rx, int ry);
+
+void goom_lines_free (GMLine ** gml);
+
+void goom_lines_draw (PluginInfo *plugInfo, GMLine * gml, gint16 data[512], Pixel *p);
+
+#endif /* _LINES_H */
diff --git a/gst/goom/mathtools.c b/gst/goom/mathtools.c
new file mode 100644
index 0000000000..dd190bf607
--- /dev/null
+++ b/gst/goom/mathtools.c
@@ -0,0 +1,106 @@
+/* Goom Project
+ * Copyright (C) <2003> Jean-Christophe Hoelt <jeko@free.fr>
+ *
+ * goom_core.c:Contains the core of goom's work.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "mathtools.h"
+
+float sin256[256] = {
+ 0, 0.0245412, 0.0490677, 0.0735646, 0.0980171, 0.122411, 0.14673, 0.170962,
+ 0.19509, 0.219101, 0.24298, 0.266713, 0.290285, 0.313682, 0.33689,
+ 0.359895, 0.382683, 0.405241, 0.427555, 0.449611, 0.471397, 0.492898,
+ 0.514103, 0.534998, 0.55557, 0.575808, 0.595699, 0.615232, 0.634393,
+ 0.653173, 0.671559, 0.689541, 0.707107, 0.724247, 0.740951, 0.757209,
+ 0.77301, 0.788346, 0.803208, 0.817585, 0.83147, 0.844854, 0.857729,
+ 0.870087, 0.881921, 0.893224, 0.903989, 0.91421, 0.92388, 0.932993,
+ 0.941544, 0.949528, 0.95694, 0.963776, 0.970031, 0.975702, 0.980785,
+ 0.985278, 0.989177, 0.99248, 0.995185, 0.99729, 0.998795, 0.999699, 1,
+ 0.999699, 0.998795, 0.99729, 0.995185, 0.99248, 0.989177, 0.985278,
+ 0.980785, 0.975702, 0.970031, 0.963776, 0.95694, 0.949528, 0.941544,
+ 0.932993, 0.92388, 0.91421, 0.903989, 0.893224, 0.881921, 0.870087,
+ 0.857729, 0.844854, 0.83147, 0.817585, 0.803208, 0.788346, 0.77301,
+ 0.757209, 0.740951, 0.724247, 0.707107, 0.689541, 0.671559, 0.653173,
+ 0.634393, 0.615232, 0.595699, 0.575808, 0.55557, 0.534998, 0.514103,
+ 0.492898, 0.471397, 0.449611, 0.427555, 0.405241, 0.382683, 0.359895,
+ 0.33689, 0.313682, 0.290285, 0.266713, 0.24298, 0.219101, 0.19509,
+ 0.170962, 0.14673, 0.122411, 0.0980171, 0.0735646, 0.0490677, 0.0245412,
+ 1.22465e-16, -0.0245412, -0.0490677, -0.0735646, -0.0980171, -0.122411,
+ -0.14673, -0.170962, -0.19509, -0.219101, -0.24298, -0.266713, -0.290285,
+ -0.313682, -0.33689, -0.359895, -0.382683, -0.405241, -0.427555,
+ -0.449611, -0.471397, -0.492898, -0.514103, -0.534998, -0.55557,
+ -0.575808, -0.595699, -0.615232, -0.634393, -0.653173, -0.671559,
+ -0.689541, -0.707107, -0.724247, -0.740951, -0.757209, -0.77301,
+ -0.788346, -0.803208, -0.817585, -0.83147, -0.844854, -0.857729,
+ -0.870087, -0.881921, -0.893224, -0.903989, -0.91421, -0.92388, -0.932993,
+ -0.941544, -0.949528, -0.95694, -0.963776, -0.970031, -0.975702,
+ -0.980785, -0.985278, -0.989177, -0.99248, -0.995185, -0.99729, -0.998795,
+ -0.999699, -1, -0.999699, -0.998795, -0.99729, -0.995185, -0.99248,
+ -0.989177, -0.985278, -0.980785, -0.975702, -0.970031, -0.963776,
+ -0.95694, -0.949528, -0.941544, -0.932993, -0.92388, -0.91421, -0.903989,
+ -0.893224, -0.881921, -0.870087, -0.857729, -0.844854, -0.83147,
+ -0.817585, -0.803208, -0.788346, -0.77301, -0.757209, -0.740951,
+ -0.724247, -0.707107, -0.689541, -0.671559, -0.653173, -0.634393,
+ -0.615232, -0.595699, -0.575808, -0.55557, -0.534998, -0.514103,
+ -0.492898, -0.471397, -0.449611, -0.427555, -0.405241, -0.382683,
+ -0.359895, -0.33689, -0.313682, -0.290285, -0.266713, -0.24298, -0.219101,
+ -0.19509, -0.170962, -0.14673, -0.122411, -0.0980171, -0.0735646,
+ -0.0490677, -0.0245412
+};
+
+float cos256[256] = {
+ 0, 0.999699, 0.998795, 0.99729, 0.995185, 0.99248, 0.989177, 0.985278,
+ 0.980785, 0.975702, 0.970031, 0.963776, 0.95694, 0.949528, 0.941544,
+ 0.932993, 0.92388, 0.91421, 0.903989, 0.893224, 0.881921, 0.870087,
+ 0.857729, 0.844854, 0.83147, 0.817585, 0.803208, 0.788346, 0.77301,
+ 0.757209, 0.740951, 0.724247, 0.707107, 0.689541, 0.671559, 0.653173,
+ 0.634393, 0.615232, 0.595699, 0.575808, 0.55557, 0.534998, 0.514103,
+ 0.492898, 0.471397, 0.449611, 0.427555, 0.405241, 0.382683, 0.359895,
+ 0.33689, 0.313682, 0.290285, 0.266713, 0.24298, 0.219101, 0.19509,
+ 0.170962, 0.14673, 0.122411, 0.0980171, 0.0735646, 0.0490677, 0.0245412,
+ 6.12323e-17, -0.0245412, -0.0490677, -0.0735646, -0.0980171, -0.122411,
+ -0.14673, -0.170962, -0.19509, -0.219101, -0.24298, -0.266713, -0.290285,
+ -0.313682, -0.33689, -0.359895, -0.382683, -0.405241, -0.427555,
+ -0.449611, -0.471397, -0.492898, -0.514103, -0.534998, -0.55557,
+ -0.575808, -0.595699, -0.615232, -0.634393, -0.653173, -0.671559,
+ -0.689541, -0.707107, -0.724247, -0.740951, -0.757209, -0.77301,
+ -0.788346, -0.803208, -0.817585, -0.83147, -0.844854, -0.857729,
+ -0.870087, -0.881921, -0.893224, -0.903989, -0.91421, -0.92388, -0.932993,
+ -0.941544, -0.949528, -0.95694, -0.963776, -0.970031, -0.975702,
+ -0.980785, -0.985278, -0.989177, -0.99248, -0.995185, -0.99729, -0.998795,
+ -0.999699, -1, -0.999699, -0.998795, -0.99729, -0.995185, -0.99248,
+ -0.989177, -0.985278, -0.980785, -0.975702, -0.970031, -0.963776,
+ -0.95694, -0.949528, -0.941544, -0.932993, -0.92388, -0.91421, -0.903989,
+ -0.893224, -0.881921, -0.870087, -0.857729, -0.844854, -0.83147,
+ -0.817585, -0.803208, -0.788346, -0.77301, -0.757209, -0.740951,
+ -0.724247, -0.707107, -0.689541, -0.671559, -0.653173, -0.634393,
+ -0.615232, -0.595699, -0.575808, -0.55557, -0.534998, -0.514103,
+ -0.492898, -0.471397, -0.449611, -0.427555, -0.405241, -0.382683,
+ -0.359895, -0.33689, -0.313682, -0.290285, -0.266713, -0.24298, -0.219101,
+ -0.19509, -0.170962, -0.14673, -0.122411, -0.0980171, -0.0735646,
+ -0.0490677, -0.0245412, -1.83697e-16, 0.0245412, 0.0490677, 0.0735646,
+ 0.0980171, 0.122411, 0.14673, 0.170962, 0.19509, 0.219101, 0.24298,
+ 0.266713, 0.290285, 0.313682, 0.33689, 0.359895, 0.382683, 0.405241,
+ 0.427555, 0.449611, 0.471397, 0.492898, 0.514103, 0.534998, 0.55557,
+ 0.575808, 0.595699, 0.615232, 0.634393, 0.653173, 0.671559, 0.689541,
+ 0.707107, 0.724247, 0.740951, 0.757209, 0.77301, 0.788346, 0.803208,
+ 0.817585, 0.83147, 0.844854, 0.857729, 0.870087, 0.881921, 0.893224,
+ 0.903989, 0.91421, 0.92388, 0.932993, 0.941544, 0.949528, 0.95694,
+ 0.963776, 0.970031, 0.975702, 0.980785, 0.985278, 0.989177, 0.99248,
+ 0.995185, 0.99729, 0.998795, 0.999699
+};
diff --git a/gst/goom/mathtools.h b/gst/goom/mathtools.h
new file mode 100644
index 0000000000..5340dab743
--- /dev/null
+++ b/gst/goom/mathtools.h
@@ -0,0 +1,58 @@
+/* Goom Project
+ * Copyright (C) <2003> Jean-Christophe Hoelt <jeko@free.fr>
+ *
+ * goom_core.c:Contains the core of goom's work.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ #ifndef MATHTOOLS_H
+#define MATHTOOLS_H
+
+#include <glib.h>
+
+#define _double2fixmagic (68719476736.0*1.5)
+/* 2^36 * 1.5, (52-_shiftamt=36) uses limited precisicion to floor */
+#define _shiftamt 16
+/* 16.16 fixed point representation */
+
+#if G_BYTE_ORDER == G_BIG_ENDIAN
+#define iexp_ 0
+#define iman_ 1
+#else
+#define iexp_ 1
+#define iman_ 0
+#endif /* BigEndian_ */
+
+/* TODO: this optimization is very efficient: put it again when all works
+#ifdef HAVE_MMX
+#define F2I(dbl,i) {double d = dbl + _double2fixmagic; i = ((int*)&d)[iman_] >> _shiftamt;}
+#else*/
+#define F2I(dbl,i) i=(int)dbl;
+/*#endif*/
+
+#if 0
+#define SINCOS(f,s,c) \
+ __asm__ __volatile__ ("fsincos" : "=t" (c), "=u" (s) : "0" (f))
+#else
+#define SINCOS(f,s,c) {s=sin(f);c=cos(f);}
+#endif
+
+extern float sin256[256];
+extern float cos256[256];
+
+#endif
+
diff --git a/gst/goom/meson.build b/gst/goom/meson.build
new file mode 100644
index 0000000000..b094e51bc3
--- /dev/null
+++ b/gst/goom/meson.build
@@ -0,0 +1,31 @@
+goom_sources = [
+ 'gstgoom.c',
+ 'drawmethods.c',
+ 'sound_tester.c',
+ 'mathtools.c',
+ 'lines.c',
+ 'ifs.c',
+ 'surf3d.c',
+ 'tentacle3d.c',
+ 'v3d.c',
+ 'convolve_fx.c',
+ 'flying_stars_fx.c',
+ 'plugin_info.c',
+ 'goom_tools.c',
+ 'config_param.c',
+ 'filters.c',
+ 'goom_core.c',
+ 'graphic.c',
+]
+
+
+gstgoom = library('gstgoom',
+ goom_sources,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gst_dep, gstpbutils_dep, gstbase_dep, orc_dep, libm],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstgoom, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstgoom]
diff --git a/gst/goom/mmx.c b/gst/goom/mmx.c
new file mode 100644
index 0000000000..bc2a6c46bd
--- /dev/null
+++ b/gst/goom/mmx.c
@@ -0,0 +1,291 @@
+/* mmx.c
+
+ MultiMedia eXtensions GCC interface library for IA32.
+
+ To use this library, simply include this header file
+ and compile with GCC. You MUST have inlining enabled
+ in order for mmx_ok() to work; this can be done by
+ simply using -O on the GCC command line.
+
+ Compiling with -DMMX_TRACE will cause detailed trace
+ output to be sent to stderr for each mmx operation.
+ This adds lots of code, and obviously slows execution to
+ a crawl, but can be very useful for debugging.
+
+ THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY
+ EXPRESS OR IMPLIED WARRANTIES, INCLUDING, WITHOUT
+ LIMITATION, THE IMPLIED WARRANTIES OF MERCHANTABILITY
+ AND FITNESS FOR ANY PARTICULAR PURPOSE.
+
+ 1997-99 by H. Dietz and R. Fisher
+
+ Notes:
+ It appears that the latest gas has the pand problem fixed, therefore
+ I'll undefine BROKEN_PAND by default.
+*/
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "goom_config.h"
+
+#ifdef HAVE_MMX
+
+#define BUFFPOINTNB 16
+#define BUFFPOINTMASK 0xffff
+#define BUFFINCR 0xff
+
+#include "mmx.h"
+#include "goom_graphic.h"
+
+#define sqrtperte 16
+// faire : a % sqrtperte <=> a & pertemask
+#define PERTEMASK 0xf
+// faire : a / sqrtperte <=> a >> PERTEDEC
+#define PERTEDEC 4
+
+int
+mmx_supported (void)
+{
+ return (mm_support () & 0x1);
+}
+
+void
+zoom_filter_mmx (int prevX, int prevY,
+ Pixel * expix1, Pixel * expix2,
+ int *brutS, int *brutD, int buffratio, int precalCoef[16][16])
+{
+ unsigned int ax = (prevX - 1) << PERTEDEC, ay = (prevY - 1) << PERTEDEC;
+
+ int bufsize = prevX * prevY;
+ int loop;
+
+ __asm__ __volatile__ ("pxor %mm7,%mm7");
+
+ for (loop = 0; loop < bufsize; loop++) {
+ /* int couleur; */
+ int px, py;
+ int pos;
+ int coeffs;
+
+ int myPos = loop << 1, myPos2 = myPos + 1;
+ int brutSmypos = brutS[myPos];
+
+ px = brutSmypos + (((brutD[myPos] -
+ brutSmypos) * buffratio) >> BUFFPOINTNB);
+ brutSmypos = brutS[myPos2];
+ py = brutSmypos + (((brutD[myPos2] -
+ brutSmypos) * buffratio) >> BUFFPOINTNB);
+
+ if ((py >= ay) || (px >= ax)) {
+ pos = coeffs = 0;
+ } else {
+ pos = ((px >> PERTEDEC) + prevX * (py >> PERTEDEC));
+ // coef en modulo 15
+ coeffs = precalCoef[px & PERTEMASK][py & PERTEMASK];
+ }
+
+ __asm__ __volatile__ ("movd %2, %%mm6 \n\t"
+ /* recuperation des deux premiers pixels dans mm0 et mm1 */
+ "movq (%3,%1,4), %%mm0 \n\t" /* b1-v1-r1-a1-b2-v2-r2-a2 */
+ "movq %%mm0, %%mm1 \n\t" /* b1-v1-r1-a1-b2-v2-r2-a2 */
+ /* depackage du premier pixel */
+ "punpcklbw %%mm7, %%mm0 \n\t" /* 00-b2-00-v2-00-r2-00-a2 */
+ "movq %%mm6, %%mm5 \n\t" /* ??-??-??-??-c4-c3-c2-c1 */
+ /* depackage du 2ieme pixel */
+ "punpckhbw %%mm7, %%mm1 \n\t" /* 00-b1-00-v1-00-r1-00-a1 */
+ /* extraction des coefficients... */
+ "punpcklbw %%mm5, %%mm6 \n\t" /* c4-c4-c3-c3-c2-c2-c1-c1 */
+ "movq %%mm6, %%mm4 \n\t" /* c4-c4-c3-c3-c2-c2-c1-c1 */
+ "movq %%mm6, %%mm5 \n\t" /* c4-c4-c3-c3-c2-c2-c1-c1 */
+ "punpcklbw %%mm5, %%mm6 \n\t" /* c2-c2-c2-c2-c1-c1-c1-c1 */
+ "punpckhbw %%mm5, %%mm4 \n\t" /* c4-c4-c4-c4-c3-c3-c3-c3 */
+ "movq %%mm6, %%mm3 \n\t" /* c2-c2-c2-c2-c1-c1-c1-c1 */
+ "punpcklbw %%mm7, %%mm6 \n\t" /* 00-c1-00-c1-00-c1-00-c1 */
+ "punpckhbw %%mm7, %%mm3 \n\t" /* 00-c2-00-c2-00-c2-00-c2 */
+ /* multiplication des pixels par les coefficients */
+ "pmullw %%mm6, %%mm0 \n\t" /* c1*b2-c1*v2-c1*r2-c1*a2 */
+ "pmullw %%mm3, %%mm1 \n\t" /* c2*b1-c2*v1-c2*r1-c2*a1 */
+ "paddw %%mm1, %%mm0 \n\t"
+ /* ...extraction des 2 derniers coefficients */
+ "movq %%mm4, %%mm5 \n\t" /* c4-c4-c4-c4-c3-c3-c3-c3 */
+ "punpcklbw %%mm7, %%mm4 \n\t" /* 00-c3-00-c3-00-c3-00-c3 */
+ "punpckhbw %%mm7, %%mm5 \n\t" /* 00-c4-00-c4-00-c4-00-c4 */
+ /* ajouter la longueur de ligne a esi */
+ "addl 8(%%ebp),%1 \n\t"
+ /* recuperation des 2 derniers pixels */
+ "movq (%3,%1,4), %%mm1 \n\t" "movq %%mm1, %%mm2 \n\t"
+ /* depackage des pixels */
+ "punpcklbw %%mm7, %%mm1 \n\t" "punpckhbw %%mm7, %%mm2 \n\t"
+ /* multiplication pas les coeffs */
+ "pmullw %%mm4, %%mm1 \n\t" "pmullw %%mm5, %%mm2 \n\t"
+ /* ajout des valeurs obtenues ? la valeur finale */
+ "paddw %%mm1, %%mm0 \n\t" "paddw %%mm2, %%mm0 \n\t"
+ /* division par 256 = 16+16+16+16, puis repackage du pixel final */
+ "psrlw $8, %%mm0 \n\t"
+ "packuswb %%mm7, %%mm0 \n\t" "movd %%mm0,%0 \n\t":"=g" (expix2[loop])
+ :"r" (pos), "r" (coeffs), "r" (expix1)
+
+ );
+
+ emms ();
+ }
+}
+
+#define DRAWMETHOD_PLUS_MMX(_out,_backbuf,_col) \
+{ \
+ movd_m2r(_backbuf, mm0); \
+ paddusb_m2r(_col, mm0); \
+ movd_r2m(mm0, _out); \
+}
+
+#define DRAWMETHOD DRAWMETHOD_PLUS_MMX(*p,*p,col)
+
+void
+draw_line_mmx (Pixel * data, int x1, int y1, int x2, int y2, int col,
+ int screenx, int screeny)
+{
+ int x, y, dx, dy, yy, xx;
+ Pixel *p;
+
+ if ((y1 < 0) || (y2 < 0) || (x1 < 0) || (x2 < 0) || (y1 >= screeny)
+ || (y2 >= screeny) || (x1 >= screenx) || (x2 >= screenx))
+ goto end_of_line;
+
+ dx = x2 - x1;
+ dy = y2 - y1;
+ if (x1 >= x2) {
+ int tmp;
+
+ tmp = x1;
+ x1 = x2;
+ x2 = tmp;
+ tmp = y1;
+ y1 = y2;
+ y2 = tmp;
+ dx = x2 - x1;
+ dy = y2 - y1;
+ }
+
+ /* vertical line */
+ if (dx == 0) {
+ if (y1 < y2) {
+ p = &(data[(screenx * y1) + x1]);
+ for (y = y1; y <= y2; y++) {
+ DRAWMETHOD;
+ p += screenx;
+ }
+ } else {
+ p = &(data[(screenx * y2) + x1]);
+ for (y = y2; y <= y1; y++) {
+ DRAWMETHOD;
+ p += screenx;
+ }
+ }
+ goto end_of_line;
+ }
+ /* horizontal line */
+ if (dy == 0) {
+ if (x1 < x2) {
+ p = &(data[(screenx * y1) + x1]);
+ for (x = x1; x <= x2; x++) {
+ DRAWMETHOD;
+ p++;
+ }
+ goto end_of_line;
+ } else {
+ p = &(data[(screenx * y1) + x2]);
+ for (x = x2; x <= x1; x++) {
+ DRAWMETHOD;
+ p++;
+ }
+ goto end_of_line;
+ }
+ }
+ /* 1 */
+ /* \ */
+ /* \ */
+ /* 2 */
+ if (y2 > y1) {
+ /* steep */
+ if (dy > dx) {
+ dx = ((dx << 16) / dy);
+ x = x1 << 16;
+ for (y = y1; y <= y2; y++) {
+ xx = x >> 16;
+ p = &(data[(screenx * y) + xx]);
+ DRAWMETHOD;
+ if (xx < (screenx - 1)) {
+ p++;
+ /* DRAWMETHOD; */
+ }
+ x += dx;
+ }
+ goto end_of_line;
+ }
+ /* shallow */
+ else {
+ dy = ((dy << 16) / dx);
+ y = y1 << 16;
+ for (x = x1; x <= x2; x++) {
+ yy = y >> 16;
+ p = &(data[(screenx * yy) + x]);
+ DRAWMETHOD;
+ if (yy < (screeny - 1)) {
+ p += screeny;
+ /* DRAWMETHOD; */
+ }
+ y += dy;
+ }
+ }
+ }
+ /* 2 */
+ /* / */
+ /* / */
+ /* 1 */
+ else {
+ /* steep */
+ if (-dy > dx) {
+ dx = ((dx << 16) / -dy);
+ x = (x1 + 1) << 16;
+ for (y = y1; y >= y2; y--) {
+ xx = x >> 16;
+ p = &(data[(screenx * y) + xx]);
+ DRAWMETHOD;
+ if (xx < (screenx - 1)) {
+ p--;
+ /* DRAWMETHOD; */
+ }
+ x += dx;
+ }
+ goto end_of_line;
+ }
+ /* shallow */
+ else {
+ dy = ((dy << 16) / dx);
+ y = y1 << 16;
+ for (x = x1; x <= x2; x++) {
+ yy = y >> 16;
+ p = &(data[(screenx * yy) + x]);
+ DRAWMETHOD;
+ if (yy < (screeny - 1)) {
+ p += screeny;
+ /* DRAWMETHOD; */
+ }
+ y += dy;
+ }
+ goto end_of_line;
+ }
+ }
+end_of_line:
+ emms ();
+ /* __asm__ __volatile__ ("emms"); */
+}
+#else
+int
+mmx_supported (void)
+{
+ return (0);
+}
+#endif /* HAVE_MMX */
diff --git a/gst/goom/mmx.h b/gst/goom/mmx.h
new file mode 100644
index 0000000000..2649e109ea
--- /dev/null
+++ b/gst/goom/mmx.h
@@ -0,0 +1,741 @@
+/* mmx.h
+
+ MultiMedia eXtensions GCC interface library for IA32.
+
+ To use this library, simply include this header file
+ and compile with GCC. You MUST have inlining enabled
+ in order for mmx_ok() to work; this can be done by
+ simply using -O on the GCC command line.
+
+ Compiling with -DMMX_TRACE will cause detailed trace
+ output to be sent to stderr for each mmx operation.
+ This adds lots of code, and obviously slows execution to
+ a crawl, but can be very useful for debugging.
+
+ THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY
+ EXPRESS OR IMPLIED WARRANTIES, INCLUDING, WITHOUT
+ LIMITATION, THE IMPLIED WARRANTIES OF MERCHANTABILITY
+ AND FITNESS FOR ANY PARTICULAR PURPOSE.
+
+ 1997-99 by H. Dietz and R. Fisher
+
+ Notes:
+ It appears that the latest gas has the pand problem fixed, therefore
+ I'll undefine BROKEN_PAND by default.
+*/
+
+#ifndef _MMX_H
+#define _MMX_H
+
+#include "goom_graphic.h"
+
+/* Warning: at this writing, the version of GAS packaged
+ with most Linux distributions does not handle the
+ parallel AND operation mnemonic correctly. If the
+ symbol BROKEN_PAND is defined, a slower alternative
+ coding will be used. If execution of mmxtest results
+ in an illegal instruction fault, define this symbol.
+*/
+#undef BROKEN_PAND
+
+
+/* The type of an value that fits in an MMX register
+ (note that long long constant values MUST be suffixed
+ by LL and unsigned long long values by ULL, lest
+ they be truncated by the compiler)
+*/
+typedef union {
+ long long q; /* Quadword (64-bit) value */
+ unsigned long long uq; /* Unsigned Quadword */
+ int d[2]; /* 2 Doubleword (32-bit) values */
+ unsigned int ud[2]; /* 2 Unsigned Doubleword */
+ short w[4]; /* 4 Word (16-bit) values */
+ unsigned short uw[4]; /* 4 Unsigned Word */
+ char b[8]; /* 8 Byte (8-bit) values */
+ unsigned char ub[8]; /* 8 Unsigned Byte */
+ float s[2]; /* Single-precision (32-bit) value */
+} __attribute__ ((aligned (8))) mmx_t; /* On an 8-byte (64-bit) boundary */
+
+
+
+/* Function to test if multimedia instructions are supported...
+*/
+static int
+mm_support(void)
+{
+ /* Returns 1 if MMX instructions are supported,
+ 3 if Cyrix MMX and Extended MMX instructions are supported
+ 5 if AMD MMX and 3DNow! instructions are supported
+ 13 if AMD Extended MMX, &3dNow supported
+ 0 if hardware does not support any of these
+ */
+ register int rval = 0;
+
+ __asm__ __volatile__ (
+ /* See if CPUID instruction is supported ... */
+ /* ... Get copies of EFLAGS into eax and ecx */
+ "pushl %%ebx\n\t"
+ "pushf\n\t"
+ "popl %%eax\n\t"
+ "movl %%eax, %%ecx\n\t"
+
+ /* ... Toggle the ID bit in one copy and store */
+ /* to the EFLAGS reg */
+ "xorl $0x200000, %%eax\n\t"
+ "push %%eax\n\t"
+ "popf\n\t"
+
+ /* ... Get the (hopefully modified) EFLAGS */
+ "pushf\n\t"
+ "popl %%eax\n\t"
+
+ /* ... Compare and test result */
+ "xorl %%eax, %%ecx\n\t"
+ "testl $0x200000, %%ecx\n\t"
+ "jz NotSupported1\n\t" /* CPUID not supported */
+
+
+ /* Get standard CPUID information, and
+ go to a specific vendor section */
+ "movl $0, %%eax\n\t"
+ "cpuid\n\t"
+
+ /* Check for Intel */
+ "cmpl $0x756e6547, %%ebx\n\t"
+ "jne TryAMD\n\t"
+ "cmpl $0x49656e69, %%edx\n\t"
+ "jne TryAMD\n\t"
+ "cmpl $0x6c65746e, %%ecx\n"
+ "jne TryAMD\n\t"
+ "jmp Intel\n\t"
+
+ /* Check for AMD */
+ "\nTryAMD:\n\t"
+ "cmpl $0x68747541, %%ebx\n\t"
+ "jne TryCyrix\n\t"
+ "cmpl $0x69746e65, %%edx\n\t"
+ "jne TryCyrix\n\t"
+ "cmpl $0x444d4163, %%ecx\n"
+ "jne TryCyrix\n\t"
+ "jmp AMD\n\t"
+
+ /* Check for Cyrix */
+ "\nTryCyrix:\n\t"
+ "cmpl $0x69727943, %%ebx\n\t"
+ "jne NotSupported2\n\t"
+ "cmpl $0x736e4978, %%edx\n\t"
+ "jne NotSupported3\n\t"
+ "cmpl $0x64616574, %%ecx\n\t"
+ "jne NotSupported4\n\t"
+ /* Drop through to Cyrix... */
+
+
+ /* Cyrix Section */
+ /* See if extended CPUID level 80000001 is supported */
+ /* The value of CPUID/80000001 for the 6x86MX is undefined
+ according to the Cyrix CPU Detection Guide (Preliminary
+ Rev. 1.01 table 1), so we'll check the value of eax for
+ CPUID/0 to see if standard CPUID level 2 is supported.
+ According to the table, the only CPU which supports level
+ 2 is also the only one which supports extended CPUID levels.
+ */
+ "cmpl $0x2, %%eax\n\t"
+ "jne MMXtest\n\t" /* Use standard CPUID instead */
+
+ /* Extended CPUID supported (in theory), so get extended
+ features */
+ "movl $0x80000001, %%eax\n\t"
+ "cpuid\n\t"
+ "testl $0x00800000, %%eax\n\t" /* Test for MMX */
+ "jz NotSupported5\n\t" /* MMX not supported */
+ "testl $0x01000000, %%eax\n\t" /* Test for Ext'd MMX */
+ "jnz EMMXSupported\n\t"
+ "movl $1, %0\n\n\t" /* MMX Supported */
+ "jmp Return\n\n"
+ "EMMXSupported:\n\t"
+ "movl $3, %0\n\n\t" /* EMMX and MMX Supported */
+ "jmp Return\n\t"
+
+
+ /* AMD Section */
+ "AMD:\n\t"
+
+ /* See if extended CPUID is supported */
+ "movl $0x80000000, %%eax\n\t"
+ "cpuid\n\t"
+ "cmpl $0x80000000, %%eax\n\t"
+ "jl MMXtest\n\t" /* Use standard CPUID instead */
+
+ /* Extended CPUID supported, so get extended features */
+ "movl $0x80000001, %%eax\n\t"
+ "cpuid\n\t"
+ "testl $0x00800000, %%edx\n\t" /* Test for MMX */
+ "jz NotSupported6\n\t" /* MMX not supported */
+ "testl $0x80000000, %%edx\n\t" /* Test for 3DNow! */
+ "jnz ThreeDNowSupported\n\t"
+ "movl $1, %0\n\n\t" /* MMX Supported */
+ "jmp Return\n\n"
+ "ThreeDNowSupported:\n\t"
+ "testl $0x40000000, %%edx\n\t" /* Test AMD Extended MMX */
+ "jnz AMDXMMXSupported\n\t"
+ "movl $5, %0\n\n\t" /* 3DNow! and MMX Supported */
+ "jmp Return\n\t"
+ "AMDXMMXSupported:\n\t"
+ "movl $13, %0\n\n\t" /* XMMX, 3DNow! and MMX Supported */
+ "jmp Return\n\t"
+
+
+ /* Intel Section */
+ "Intel:\n\t"
+
+ /* Check for MMX */
+ "MMXtest:\n\t"
+ "movl $1, %%eax\n\t"
+ "cpuid\n\t"
+ "testl $0x00800000, %%edx\n\t" /* Test for MMX */
+ "jz NotSupported7\n\t" /* MMX Not supported */
+ "movl $1, %0\n\n\t" /* MMX Supported */
+ "jmp Return\n\t"
+
+ /* Nothing supported */
+ "\nNotSupported1:\n\t"
+ "#movl $101, %0\n\n\t"
+ "\nNotSupported2:\n\t"
+ "#movl $102, %0\n\n\t"
+ "\nNotSupported3:\n\t"
+ "#movl $103, %0\n\n\t"
+ "\nNotSupported4:\n\t"
+ "#movl $104, %0\n\n\t"
+ "\nNotSupported5:\n\t"
+ "#movl $105, %0\n\n\t"
+ "\nNotSupported6:\n\t"
+ "#movl $106, %0\n\n\t"
+ "\nNotSupported7:\n\t"
+ "#movl $107, %0\n\n\t"
+ "movl $0, %0\n\n\t"
+
+ "Return:\n\t"
+ "popl %%ebx\n\t"
+ : "=X" (rval)
+ : /* no input */
+ : "eax", "ecx", "edx"
+ );
+
+ /* Return */
+ return(rval);
+}
+
+/* Function to test if mmx instructions are supported...
+*/
+static inline int
+mmx_ok(void)
+{
+ /* Returns 1 if MMX instructions are supported, 0 otherwise */
+ return ( mm_support() & 0x1 );
+}
+
+int mmx_supported (void);
+int xmmx_supported (void);
+
+
+/* MMX optimized implementations */
+void draw_line_mmx (Pixel *data, int x1, int y1, int x2, int y2, int col, int screenx, int screeny);
+void draw_line_xmmx (Pixel *data, int x1, int y1, int x2, int y2, int col, int screenx, int screeny);
+void zoom_filter_mmx (int prevX, int prevY, Pixel *expix1, Pixel *expix2,
+ int *brutS, int *brutD, int buffratio, int precalCoef[16][16]);
+void zoom_filter_xmmx (int prevX, int prevY, Pixel *expix1, Pixel *expix2,
+ int *lbruS, int *lbruD, int buffratio, int precalCoef[16][16]);
+
+
+/* Helper functions for the instruction macros that follow...
+ (note that memory-to-register, m2r, instructions are nearly
+ as efficient as register-to-register, r2r, instructions;
+ however, memory-to-memory instructions are really simulated
+ as a convenience, and are only 1/3 as efficient)
+*/
+#ifdef MMX_TRACE
+
+/* Include the stuff for printing a trace to stderr...
+*/
+
+#include <stdio.h>
+
+#define mmx_i2r(op, imm, reg) \
+ { \
+ mmx_t mmx_trace; \
+ mmx_trace.uq = (imm); \
+ printf(#op "_i2r(" #imm "=0x%08x%08x, ", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ __asm__ __volatile__ ("movq %%" #reg ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ printf(#reg "=0x%08x%08x) => ", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ __asm__ __volatile__ (#op " %0, %%" #reg \
+ : /* nothing */ \
+ : "X" (imm)); \
+ __asm__ __volatile__ ("movq %%" #reg ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ printf(#reg "=0x%08x%08x\n", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ }
+
+#define mmx_m2r(op, mem, reg) \
+ { \
+ mmx_t mmx_trace; \
+ mmx_trace = (mem); \
+ printf(#op "_m2r(" #mem "=0x%08x%08x, ", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ __asm__ __volatile__ ("movq %%" #reg ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ printf(#reg "=0x%08x%08x) => ", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ __asm__ __volatile__ (#op " %0, %%" #reg \
+ : /* nothing */ \
+ : "m" (mem)); \
+ __asm__ __volatile__ ("movq %%" #reg ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ printf(#reg "=0x%08x%08x\n", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ }
+
+#define mmx_r2m(op, reg, mem) \
+ { \
+ mmx_t mmx_trace; \
+ __asm__ __volatile__ ("movq %%" #reg ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ printf(#op "_r2m(" #reg "=0x%08x%08x, ", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ mmx_trace = (mem); \
+ printf(#mem "=0x%08x%08x) => ", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ __asm__ __volatile__ (#op " %%" #reg ", %0" \
+ : "=m" (mem) \
+ : /* nothing */ ); \
+ mmx_trace = (mem); \
+ printf(#mem "=0x%08x%08x\n", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ }
+
+#define mmx_r2r(op, regs, regd) \
+ { \
+ mmx_t mmx_trace; \
+ __asm__ __volatile__ ("movq %%" #regs ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ printf(#op "_r2r(" #regs "=0x%08x%08x, ", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ __asm__ __volatile__ ("movq %%" #regd ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ printf(#regd "=0x%08x%08x) => ", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ __asm__ __volatile__ (#op " %" #regs ", %" #regd); \
+ __asm__ __volatile__ ("movq %%" #regd ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ printf(#regd "=0x%08x%08x\n", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ }
+
+#define mmx_m2m(op, mems, memd) \
+ { \
+ mmx_t mmx_trace; \
+ mmx_trace = (mems); \
+ printf(#op "_m2m(" #mems "=0x%08x%08x, ", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ mmx_trace = (memd); \
+ printf(#memd "=0x%08x%08x) => ", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ __asm__ __volatile__ ("movq %0, %%mm0\n\t" \
+ #op " %1, %%mm0\n\t" \
+ "movq %%mm0, %0" \
+ : "=m" (memd) \
+ : "m" (mems)); \
+ mmx_trace = (memd); \
+ printf(#memd "=0x%08x%08x\n", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ }
+
+#else
+
+/* These macros are a lot simpler without the tracing...
+*/
+
+#define mmx_i2r(op, imm, reg) \
+ __asm__ __volatile__ (#op " %0, %%" #reg \
+ : /* nothing */ \
+ : "X" (imm) )
+
+#define mmx_m2r(op, mem, reg) \
+ __asm__ __volatile__ (#op " %0, %%" #reg \
+ : /* nothing */ \
+ : "m" (mem))
+
+#define mmx_r2m(op, reg, mem) \
+ __asm__ __volatile__ (#op " %%" #reg ", %0" \
+ : "=m" (mem) \
+ : /* nothing */ )
+
+#define mmx_r2r(op, regs, regd) \
+ __asm__ __volatile__ (#op " %" #regs ", %" #regd)
+
+#define mmx_m2m(op, mems, memd) \
+ __asm__ __volatile__ ("movq %0, %%mm0\n\t" \
+ #op " %1, %%mm0\n\t" \
+ "movq %%mm0, %0" \
+ : "=m" (memd) \
+ : "m" (mems))
+
+#endif
+
+
+/* 1x64 MOVe Quadword
+ (this is both a load and a store...
+ in fact, it is the only way to store)
+*/
+#define movq_m2r(var, reg) mmx_m2r(movq, var, reg)
+#define movq_r2m(reg, var) mmx_r2m(movq, reg, var)
+#define movq_r2r(regs, regd) mmx_r2r(movq, regs, regd)
+#define movq(vars, vard) \
+ __asm__ __volatile__ ("movq %1, %%mm0\n\t" \
+ "movq %%mm0, %0" \
+ : "=X" (vard) \
+ : "X" (vars))
+
+
+/* 1x32 MOVe Doubleword
+ (like movq, this is both load and store...
+ but is most useful for moving things between
+ mmx registers and ordinary registers)
+*/
+#define movd_m2r(var, reg) mmx_m2r(movd, var, reg)
+#define movd_r2m(reg, var) mmx_r2m(movd, reg, var)
+#define movd_r2r(regs, regd) mmx_r2r(movd, regs, regd)
+#define movd(vars, vard) \
+ __asm__ __volatile__ ("movd %1, %%mm0\n\t" \
+ "movd %%mm0, %0" \
+ : "=X" (vard) \
+ : "X" (vars))
+
+
+/* 2x32, 4x16, and 8x8 Parallel ADDs
+*/
+#define paddd_m2r(var, reg) mmx_m2r(paddd, var, reg)
+#define paddd_r2r(regs, regd) mmx_r2r(paddd, regs, regd)
+#define paddd(vars, vard) mmx_m2m(paddd, vars, vard)
+
+#define paddw_m2r(var, reg) mmx_m2r(paddw, var, reg)
+#define paddw_r2r(regs, regd) mmx_r2r(paddw, regs, regd)
+#define paddw(vars, vard) mmx_m2m(paddw, vars, vard)
+
+#define paddb_m2r(var, reg) mmx_m2r(paddb, var, reg)
+#define paddb_r2r(regs, regd) mmx_r2r(paddb, regs, regd)
+#define paddb(vars, vard) mmx_m2m(paddb, vars, vard)
+
+
+/* 4x16 and 8x8 Parallel ADDs using Saturation arithmetic
+*/
+#define paddsw_m2r(var, reg) mmx_m2r(paddsw, var, reg)
+#define paddsw_r2r(regs, regd) mmx_r2r(paddsw, regs, regd)
+#define paddsw(vars, vard) mmx_m2m(paddsw, vars, vard)
+
+#define paddsb_m2r(var, reg) mmx_m2r(paddsb, var, reg)
+#define paddsb_r2r(regs, regd) mmx_r2r(paddsb, regs, regd)
+#define paddsb(vars, vard) mmx_m2m(paddsb, vars, vard)
+
+
+/* 4x16 and 8x8 Parallel ADDs using Unsigned Saturation arithmetic
+*/
+#define paddusw_m2r(var, reg) mmx_m2r(paddusw, var, reg)
+#define paddusw_r2r(regs, regd) mmx_r2r(paddusw, regs, regd)
+#define paddusw(vars, vard) mmx_m2m(paddusw, vars, vard)
+
+#define paddusb_m2r(var, reg) mmx_m2r(paddusb, var, reg)
+#define paddusb_r2r(regs, regd) mmx_r2r(paddusb, regs, regd)
+#define paddusb(vars, vard) mmx_m2m(paddusb, vars, vard)
+
+
+/* 2x32, 4x16, and 8x8 Parallel SUBs
+*/
+#define psubd_m2r(var, reg) mmx_m2r(psubd, var, reg)
+#define psubd_r2r(regs, regd) mmx_r2r(psubd, regs, regd)
+#define psubd(vars, vard) mmx_m2m(psubd, vars, vard)
+
+#define psubw_m2r(var, reg) mmx_m2r(psubw, var, reg)
+#define psubw_r2r(regs, regd) mmx_r2r(psubw, regs, regd)
+#define psubw(vars, vard) mmx_m2m(psubw, vars, vard)
+
+#define psubb_m2r(var, reg) mmx_m2r(psubb, var, reg)
+#define psubb_r2r(regs, regd) mmx_r2r(psubb, regs, regd)
+#define psubb(vars, vard) mmx_m2m(psubb, vars, vard)
+
+
+/* 4x16 and 8x8 Parallel SUBs using Saturation arithmetic
+*/
+#define psubsw_m2r(var, reg) mmx_m2r(psubsw, var, reg)
+#define psubsw_r2r(regs, regd) mmx_r2r(psubsw, regs, regd)
+#define psubsw(vars, vard) mmx_m2m(psubsw, vars, vard)
+
+#define psubsb_m2r(var, reg) mmx_m2r(psubsb, var, reg)
+#define psubsb_r2r(regs, regd) mmx_r2r(psubsb, regs, regd)
+#define psubsb(vars, vard) mmx_m2m(psubsb, vars, vard)
+
+
+/* 4x16 and 8x8 Parallel SUBs using Unsigned Saturation arithmetic
+*/
+#define psubusw_m2r(var, reg) mmx_m2r(psubusw, var, reg)
+#define psubusw_r2r(regs, regd) mmx_r2r(psubusw, regs, regd)
+#define psubusw(vars, vard) mmx_m2m(psubusw, vars, vard)
+
+#define psubusb_m2r(var, reg) mmx_m2r(psubusb, var, reg)
+#define psubusb_r2r(regs, regd) mmx_r2r(psubusb, regs, regd)
+#define psubusb(vars, vard) mmx_m2m(psubusb, vars, vard)
+
+
+/* 4x16 Parallel MULs giving Low 4x16 portions of results
+*/
+#define pmullw_m2r(var, reg) mmx_m2r(pmullw, var, reg)
+#define pmullw_r2r(regs, regd) mmx_r2r(pmullw, regs, regd)
+#define pmullw(vars, vard) mmx_m2m(pmullw, vars, vard)
+
+
+/* 4x16 Parallel MULs giving High 4x16 portions of results
+*/
+#define pmulhw_m2r(var, reg) mmx_m2r(pmulhw, var, reg)
+#define pmulhw_r2r(regs, regd) mmx_r2r(pmulhw, regs, regd)
+#define pmulhw(vars, vard) mmx_m2m(pmulhw, vars, vard)
+
+
+/* 4x16->2x32 Parallel Mul-ADD
+ (muls like pmullw, then adds adjacent 16-bit fields
+ in the multiply result to make the final 2x32 result)
+*/
+#define pmaddwd_m2r(var, reg) mmx_m2r(pmaddwd, var, reg)
+#define pmaddwd_r2r(regs, regd) mmx_r2r(pmaddwd, regs, regd)
+#define pmaddwd(vars, vard) mmx_m2m(pmaddwd, vars, vard)
+
+
+/* 1x64 bitwise AND
+*/
+#ifdef BROKEN_PAND
+#define pand_m2r(var, reg) \
+ { \
+ mmx_m2r(pandn, (mmx_t) -1LL, reg); \
+ mmx_m2r(pandn, var, reg); \
+ }
+#define pand_r2r(regs, regd) \
+ { \
+ mmx_m2r(pandn, (mmx_t) -1LL, regd); \
+ mmx_r2r(pandn, regs, regd) \
+ }
+#define pand(vars, vard) \
+ { \
+ movq_m2r(vard, mm0); \
+ mmx_m2r(pandn, (mmx_t) -1LL, mm0); \
+ mmx_m2r(pandn, vars, mm0); \
+ movq_r2m(mm0, vard); \
+ }
+#else
+#define pand_m2r(var, reg) mmx_m2r(pand, var, reg)
+#define pand_r2r(regs, regd) mmx_r2r(pand, regs, regd)
+#define pand(vars, vard) mmx_m2m(pand, vars, vard)
+#endif
+
+
+/* 1x64 bitwise AND with Not the destination
+*/
+#define pandn_m2r(var, reg) mmx_m2r(pandn, var, reg)
+#define pandn_r2r(regs, regd) mmx_r2r(pandn, regs, regd)
+#define pandn(vars, vard) mmx_m2m(pandn, vars, vard)
+
+
+/* 1x64 bitwise OR
+*/
+#define por_m2r(var, reg) mmx_m2r(por, var, reg)
+#define por_r2r(regs, regd) mmx_r2r(por, regs, regd)
+#define por(vars, vard) mmx_m2m(por, vars, vard)
+
+
+/* 1x64 bitwise eXclusive OR
+*/
+#define pxor_m2r(var, reg) mmx_m2r(pxor, var, reg)
+#define pxor_r2r(regs, regd) mmx_r2r(pxor, regs, regd)
+#define pxor(vars, vard) mmx_m2m(pxor, vars, vard)
+
+
+/* 2x32, 4x16, and 8x8 Parallel CoMPare for EQuality
+ (resulting fields are either 0 or -1)
+*/
+#define pcmpeqd_m2r(var, reg) mmx_m2r(pcmpeqd, var, reg)
+#define pcmpeqd_r2r(regs, regd) mmx_r2r(pcmpeqd, regs, regd)
+#define pcmpeqd(vars, vard) mmx_m2m(pcmpeqd, vars, vard)
+
+#define pcmpeqw_m2r(var, reg) mmx_m2r(pcmpeqw, var, reg)
+#define pcmpeqw_r2r(regs, regd) mmx_r2r(pcmpeqw, regs, regd)
+#define pcmpeqw(vars, vard) mmx_m2m(pcmpeqw, vars, vard)
+
+#define pcmpeqb_m2r(var, reg) mmx_m2r(pcmpeqb, var, reg)
+#define pcmpeqb_r2r(regs, regd) mmx_r2r(pcmpeqb, regs, regd)
+#define pcmpeqb(vars, vard) mmx_m2m(pcmpeqb, vars, vard)
+
+
+/* 2x32, 4x16, and 8x8 Parallel CoMPare for Greater Than
+ (resulting fields are either 0 or -1)
+*/
+#define pcmpgtd_m2r(var, reg) mmx_m2r(pcmpgtd, var, reg)
+#define pcmpgtd_r2r(regs, regd) mmx_r2r(pcmpgtd, regs, regd)
+#define pcmpgtd(vars, vard) mmx_m2m(pcmpgtd, vars, vard)
+
+#define pcmpgtw_m2r(var, reg) mmx_m2r(pcmpgtw, var, reg)
+#define pcmpgtw_r2r(regs, regd) mmx_r2r(pcmpgtw, regs, regd)
+#define pcmpgtw(vars, vard) mmx_m2m(pcmpgtw, vars, vard)
+
+#define pcmpgtb_m2r(var, reg) mmx_m2r(pcmpgtb, var, reg)
+#define pcmpgtb_r2r(regs, regd) mmx_r2r(pcmpgtb, regs, regd)
+#define pcmpgtb(vars, vard) mmx_m2m(pcmpgtb, vars, vard)
+
+
+/* 1x64, 2x32, and 4x16 Parallel Shift Left Logical
+*/
+#define psllq_i2r(imm, reg) mmx_i2r(psllq, imm, reg)
+#define psllq_m2r(var, reg) mmx_m2r(psllq, var, reg)
+#define psllq_r2r(regs, regd) mmx_r2r(psllq, regs, regd)
+#define psllq(vars, vard) mmx_m2m(psllq, vars, vard)
+
+#define pslld_i2r(imm, reg) mmx_i2r(pslld, imm, reg)
+#define pslld_m2r(var, reg) mmx_m2r(pslld, var, reg)
+#define pslld_r2r(regs, regd) mmx_r2r(pslld, regs, regd)
+#define pslld(vars, vard) mmx_m2m(pslld, vars, vard)
+
+#define psllw_i2r(imm, reg) mmx_i2r(psllw, imm, reg)
+#define psllw_m2r(var, reg) mmx_m2r(psllw, var, reg)
+#define psllw_r2r(regs, regd) mmx_r2r(psllw, regs, regd)
+#define psllw(vars, vard) mmx_m2m(psllw, vars, vard)
+
+
+/* 1x64, 2x32, and 4x16 Parallel Shift Right Logical
+*/
+#define psrlq_i2r(imm, reg) mmx_i2r(psrlq, imm, reg)
+#define psrlq_m2r(var, reg) mmx_m2r(psrlq, var, reg)
+#define psrlq_r2r(regs, regd) mmx_r2r(psrlq, regs, regd)
+#define psrlq(vars, vard) mmx_m2m(psrlq, vars, vard)
+
+#define psrld_i2r(imm, reg) mmx_i2r(psrld, imm, reg)
+#define psrld_m2r(var, reg) mmx_m2r(psrld, var, reg)
+#define psrld_r2r(regs, regd) mmx_r2r(psrld, regs, regd)
+#define psrld(vars, vard) mmx_m2m(psrld, vars, vard)
+
+#define psrlw_i2r(imm, reg) mmx_i2r(psrlw, imm, reg)
+#define psrlw_m2r(var, reg) mmx_m2r(psrlw, var, reg)
+#define psrlw_r2r(regs, regd) mmx_r2r(psrlw, regs, regd)
+#define psrlw(vars, vard) mmx_m2m(psrlw, vars, vard)
+
+
+/* 2x32 and 4x16 Parallel Shift Right Arithmetic
+*/
+#define psrad_i2r(imm, reg) mmx_i2r(psrad, imm, reg)
+#define psrad_m2r(var, reg) mmx_m2r(psrad, var, reg)
+#define psrad_r2r(regs, regd) mmx_r2r(psrad, regs, regd)
+#define psrad(vars, vard) mmx_m2m(psrad, vars, vard)
+
+#define psraw_i2r(imm, reg) mmx_i2r(psraw, imm, reg)
+#define psraw_m2r(var, reg) mmx_m2r(psraw, var, reg)
+#define psraw_r2r(regs, regd) mmx_r2r(psraw, regs, regd)
+#define psraw(vars, vard) mmx_m2m(psraw, vars, vard)
+
+
+/* 2x32->4x16 and 4x16->8x8 PACK and Signed Saturate
+ (packs source and dest fields into dest in that order)
+*/
+#define packssdw_m2r(var, reg) mmx_m2r(packssdw, var, reg)
+#define packssdw_r2r(regs, regd) mmx_r2r(packssdw, regs, regd)
+#define packssdw(vars, vard) mmx_m2m(packssdw, vars, vard)
+
+#define packsswb_m2r(var, reg) mmx_m2r(packsswb, var, reg)
+#define packsswb_r2r(regs, regd) mmx_r2r(packsswb, regs, regd)
+#define packsswb(vars, vard) mmx_m2m(packsswb, vars, vard)
+
+
+/* 4x16->8x8 PACK and Unsigned Saturate
+ (packs source and dest fields into dest in that order)
+*/
+#define packuswb_m2r(var, reg) mmx_m2r(packuswb, var, reg)
+#define packuswb_r2r(regs, regd) mmx_r2r(packuswb, regs, regd)
+#define packuswb(vars, vard) mmx_m2m(packuswb, vars, vard)
+
+
+/* 2x32->1x64, 4x16->2x32, and 8x8->4x16 UNPaCK Low
+ (interleaves low half of dest with low half of source
+ as padding in each result field)
+*/
+#define punpckldq_m2r(var, reg) mmx_m2r(punpckldq, var, reg)
+#define punpckldq_r2r(regs, regd) mmx_r2r(punpckldq, regs, regd)
+#define punpckldq(vars, vard) mmx_m2m(punpckldq, vars, vard)
+
+#define punpcklwd_m2r(var, reg) mmx_m2r(punpcklwd, var, reg)
+#define punpcklwd_r2r(regs, regd) mmx_r2r(punpcklwd, regs, regd)
+#define punpcklwd(vars, vard) mmx_m2m(punpcklwd, vars, vard)
+
+#define punpcklbw_m2r(var, reg) mmx_m2r(punpcklbw, var, reg)
+#define punpcklbw_r2r(regs, regd) mmx_r2r(punpcklbw, regs, regd)
+#define punpcklbw(vars, vard) mmx_m2m(punpcklbw, vars, vard)
+
+
+/* 2x32->1x64, 4x16->2x32, and 8x8->4x16 UNPaCK High
+ (interleaves high half of dest with high half of source
+ as padding in each result field)
+*/
+#define punpckhdq_m2r(var, reg) mmx_m2r(punpckhdq, var, reg)
+#define punpckhdq_r2r(regs, regd) mmx_r2r(punpckhdq, regs, regd)
+#define punpckhdq(vars, vard) mmx_m2m(punpckhdq, vars, vard)
+
+#define punpckhwd_m2r(var, reg) mmx_m2r(punpckhwd, var, reg)
+#define punpckhwd_r2r(regs, regd) mmx_r2r(punpckhwd, regs, regd)
+#define punpckhwd(vars, vard) mmx_m2m(punpckhwd, vars, vard)
+
+#define punpckhbw_m2r(var, reg) mmx_m2r(punpckhbw, var, reg)
+#define punpckhbw_r2r(regs, regd) mmx_r2r(punpckhbw, regs, regd)
+#define punpckhbw(vars, vard) mmx_m2m(punpckhbw, vars, vard)
+
+
+/* Empty MMx State
+ (used to clean-up when going from mmx to float use
+ of the registers that are shared by both; note that
+ there is no float-to-mmx operation needed, because
+ only the float tag word info is corruptible)
+*/
+#ifdef MMX_TRACE
+
+#ifdef __clang__
+#define emms() \
+ { \
+ printf("emms()\n"); \
+ __asm__ __volatile__ ("emms"); \
+ }
+#else
+#define emms() \
+ { \
+ printf("emms()\n"); \
+ __asm__ __volatile__ ("emms" \
+ "st(1)","st(2)","st(3)","st(4)","st(5)","st(6)","st(7)"); \
+ }
+#endif
+
+#else
+
+#ifdef __clang__
+#define emms() __asm__ __volatile__ ("emms")
+#else
+#define emms() __asm__ __volatile__ ("emms"::: \
+ "st(1)","st(2)","st(3)","st(4)","st(5)","st(6)","st(7)")
+#endif
+
+#endif
+
+#endif
+
diff --git a/gst/goom/motif_goom1.h b/gst/goom/motif_goom1.h
new file mode 100644
index 0000000000..82f381bdb0
--- /dev/null
+++ b/gst/goom/motif_goom1.h
@@ -0,0 +1,1044 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+static Motif CONV_MOTIF1 = {
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,14,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,13,9,9,7,2,2,9,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,11,
+ 11,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,12,7,4,0,0,0,2,0,0,3,14,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,12,10,9,9,4,1,0,
+ 1,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,10,3,0,0,0,1,1,3,5,0,0,1,14,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,14,6,3,1,1,4,9,1,
+ 1,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 10,3,0,0,2,7,13,14,14,14,7,0,0,2,14,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,12,1,9,15,15,15,15,3,
+ 0,13,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,12,4,
+ 0,0,2,10,15,15,15,15,15,15,1,0,0,10,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,9,0,2,14,15,15,15,7,
+ 0,9,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,14,6,0,0,
+ 2,9,15,15,15,15,15,15,15,13,0,0,3,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,11,0,0,10,15,15,15,9,
+ 0,9,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,13,3,0,1,5,
+ 5,4,4,4,6,12,15,15,15,13,0,0,7,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,14,0,0,5,15,15,15,10,
+ 0,7,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,11,1,0,3,3,1,
+ 0,0,0,0,0,0,5,13,15,12,0,0,13,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,13,14,15,
+ 15,15,15,15,15,15,15,15,14,0,0,1,15,15,15,12,
+ 0,3,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,10,1,2,4,0,0,1,
+ 9,12,12,12,9,3,0,2,14,5,0,7,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,14,7,4,4,1,1,12,
+ 15,15,15,15,15,15,15,15,14,1,0,0,12,15,15,15,
+ 1,0,12,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,10,0,3,2,0,0,3,12,
+ 15,15,15,15,15,14,2,1,13,2,0,12,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,14,3,0,0,0,0,0,2,
+ 13,15,15,15,15,15,15,15,14,1,0,0,8,15,15,15,
+ 1,0,9,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,12,2,1,0,0,0,9,14,15,
+ 15,15,15,15,15,14,1,1,11,0,3,14,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,7,4,11,12,10,1,0,
+ 3,12,15,15,15,15,15,15,13,1,1,0,4,15,15,15,
+ 2,0,10,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,14,2,1,0,0,3,12,15,15,15,
+ 15,15,15,15,15,11,0,5,9,1,12,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,5,1,13,15,15,12,1,
+ 0,1,9,15,15,15,15,15,14,2,5,0,1,14,15,15,
+ 2,0,7,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,14,3,0,0,0,7,14,15,15,15,15,
+ 15,15,15,15,15,9,0,8,7,4,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,7,0,7,8,11,15,13,
+ 2,0,0,3,10,15,15,15,15,5,11,0,0,11,15,15,
+ 6,0,2,14,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,6,0,0,0,8,15,15,15,15,15,15,
+ 15,15,15,15,15,6,0,4,0,6,15,15,15,15,15,15,
+ 14,9,14,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,11,0,3,3,0,8,15,
+ 14,5,0,0,0,4,12,15,15,5,13,2,0,6,15,15,
+ 12,0,0,11,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,10,0,0,0,8,15,15,15,15,15,15,15,
+ 15,15,15,15,10,1,7,6,4,13,15,15,15,15,13,11,
+ 6,0,8,11,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,1,1,11,2,0,5,
+ 14,15,8,0,0,0,0,7,15,5,14,6,0,2,15,15,
+ 15,3,0,5,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,13,1,0,0,7,15,15,15,15,15,15,15,15,
+ 15,15,15,15,7,9,15,15,15,15,15,15,12,6,2,1,
+ 1,1,8,6,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,4,0,6,12,1,0,
+ 3,13,15,11,2,0,0,0,8,4,14,10,0,0,13,15,
+ 15,7,0,1,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,5,0,0,5,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,13,15,15,15,15,14,8,3,1,2,7,11,
+ 5,4,5,6,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,8,0,1,14,11,0,
+ 0,1,9,15,14,5,0,0,2,4,14,13,0,0,10,15,
+ 15,12,0,0,12,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,10,0,0,1,14,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,14,13,7,2,0,5,9,15,15,15,
+ 5,3,6,9,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,13,0,0,9,15,12,
+ 2,0,0,4,13,14,4,0,3,2,12,15,1,0,5,15,
+ 15,14,1,0,8,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,2,0,0,9,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,12,11,6,1,0,2,3,10,15,15,15,15,7,
+ 1,2,4,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,5,0,3,14,15,
+ 9,2,0,0,1,6,12,13,13,1,9,12,0,0,2,14,
+ 15,15,4,0,4,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,10,0,0,2,14,15,15,15,15,15,15,15,15,15,15,
+ 13,9,6,0,1,2,9,10,15,15,15,15,14,7,1,0,
+ 6,2,4,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,11,0,0,9,15,
+ 4,4,11,6,1,0,0,1,1,0,10,4,0,0,0,12,
+ 15,15,9,0,1,14,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,3,0,0,8,15,15,15,15,15,15,15,13,12,4,4,
+ 1,1,3,10,12,15,15,15,15,15,9,2,1,0,1,6,
+ 6,0,10,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,4,0,3,14,
+ 4,3,15,15,14,9,7,9,1,0,0,0,0,1,0,7,
+ 15,15,13,0,0,9,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 13,0,0,1,14,15,15,15,15,15,12,9,1,0,1,4,
+ 7,15,15,15,15,15,15,14,8,2,0,0,0,2,13,9,
+ 0,4,14,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,11,0,0,9,
+ 3,0,8,14,15,15,15,15,10,5,4,4,7,4,0,3,
+ 15,15,15,4,0,3,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 5,0,0,5,15,15,15,15,14,8,7,8,10,12,14,15,
+ 15,15,15,15,15,15,11,1,0,0,0,5,11,15,13,1,
+ 1,13,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,3,0,4,
+ 4,0,0,2,6,10,15,15,15,15,15,15,15,10,0,0,
+ 12,15,15,9,0,0,12,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 1,0,0,10,15,15,15,15,15,13,14,15,15,15,15,15,
+ 15,15,15,15,14,7,1,0,0,3,12,15,15,15,6,0,
+ 7,15,15,15,12,10,9,10,12,14,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,9,0,0,
+ 8,3,1,4,1,0,1,12,15,15,15,15,15,14,2,0,
+ 6,15,15,15,2,0,6,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 0,0,1,14,15,15,15,15,14,5,15,15,15,15,15,15,
+ 15,15,15,7,2,0,0,1,8,15,15,15,15,12,0,2,
+ 14,15,12,4,0,0,0,0,0,1,5,10,14,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,14,0,0,
+ 5,4,1,14,15,10,7,13,15,15,15,15,15,15,8,0,
+ 1,14,15,15,7,0,1,14,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,13,
+ 0,0,4,15,15,15,15,15,13,2,13,15,15,15,15,15,
+ 12,7,0,0,0,0,5,12,15,15,15,15,14,3,0,9,
+ 11,1,0,0,1,1,0,1,0,0,0,0,2,12,15,15,
+ 15,15,15,15,15,15,15,14,15,15,15,15,15,15,2,0,
+ 5,2,1,14,15,14,13,15,15,15,15,15,15,15,12,0,
+ 0,10,15,15,13,0,0,9,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,12,
+ 0,0,4,15,15,15,15,15,12,0,12,15,15,15,12,6,
+ 0,0,0,0,6,14,15,15,15,15,15,15,7,0,0,12,
+ 1,0,0,2,2,1,1,7,12,8,3,0,0,1,13,15,
+ 15,15,15,15,15,8,4,8,12,15,15,15,15,15,8,0,
+ 4,2,0,14,15,11,9,15,15,15,15,15,15,15,15,3,
+ 0,5,15,15,15,5,0,3,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,12,
+ 0,0,4,15,15,15,15,15,12,0,12,15,13,3,1,0,
+ 0,0,5,12,15,15,15,15,15,15,15,12,0,0,7,7,
+ 0,0,0,0,0,0,0,1,12,15,15,12,3,0,5,15,
+ 15,15,15,14,5,0,0,0,0,2,2,3,7,14,9,8,
+ 14,2,1,14,15,2,12,13,15,15,15,15,15,15,15,9,
+ 0,0,13,15,15,10,0,0,12,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,13,
+ 0,0,5,15,15,15,15,15,12,0,11,10,1,0,0,1,
+ 5,14,15,15,15,15,15,15,15,15,15,6,0,2,7,0,
+ 0,0,0,1,2,7,4,0,3,14,15,15,14,2,0,12,
+ 15,15,15,9,0,1,2,1,0,0,0,0,0,1,3,7,
+ 15,3,0,14,15,4,12,15,15,15,15,15,15,15,15,14,
+ 1,0,8,15,15,14,1,0,8,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,13,
+ 0,0,4,15,15,15,15,15,12,0,2,0,0,1,10,15,
+ 15,15,15,15,15,15,15,15,15,15,12,0,0,6,0,0,
+ 0,1,10,14,15,15,11,1,0,9,15,15,15,8,0,9,
+ 15,15,12,4,8,14,15,8,1,0,0,0,0,0,1,9,
+ 15,2,0,13,15,1,9,15,15,15,15,15,15,15,15,15,
+ 6,0,1,14,15,14,1,0,3,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,14,
+ 1,0,1,14,15,15,15,15,12,1,3,7,9,13,15,15,
+ 15,15,15,15,15,15,15,15,15,15,3,0,2,3,0,4,
+ 0,8,15,15,15,15,15,13,1,2,14,15,15,10,0,6,
+ 15,14,2,6,15,15,15,1,3,7,3,0,0,0,0,1,
+ 11,1,0,11,12,0,12,15,15,15,15,15,15,15,15,15,
+ 11,0,0,9,15,15,4,0,0,12,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 6,0,0,9,15,15,15,15,15,12,14,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,11,0,0,3,0,8,14,
+ 2,5,15,15,15,15,15,15,5,0,8,15,15,12,0,4,
+ 15,5,2,14,15,15,10,0,13,15,13,2,4,5,5,0,
+ 9,1,0,10,9,1,14,15,15,15,15,15,15,15,15,15,
+ 13,0,0,3,15,15,9,0,0,8,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 12,0,0,3,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,3,0,1,1,5,14,15,
+ 11,0,12,15,15,15,15,15,14,1,1,14,15,12,0,4,
+ 10,0,9,15,15,11,1,8,15,15,8,1,14,15,14,2,
+ 5,0,0,10,6,2,15,15,15,15,15,15,15,15,15,15,
+ 15,3,0,0,12,15,13,0,0,2,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,3,0,0,10,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,10,0,0,1,0,11,15,15,
+ 15,2,6,15,15,15,15,15,15,6,0,9,15,13,0,6,
+ 3,0,13,15,14,2,6,15,15,13,1,8,15,15,15,4,
+ 3,1,0,10,7,2,15,15,15,15,15,15,15,15,15,15,
+ 15,9,0,0,6,15,15,3,0,0,13,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,9,0,0,2,14,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,11,10,2,0,3,0,3,15,15,15,
+ 15,8,1,14,15,15,15,15,15,13,0,2,15,9,1,10,
+ 0,3,15,15,6,2,14,15,14,3,1,14,15,15,15,2,
+ 4,0,0,12,5,3,15,15,15,15,15,15,15,15,15,15,
+ 15,14,1,0,1,14,15,5,0,0,12,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,4,0,0,6,15,15,15,15,15,15,15,15,14,12,
+ 12,9,5,4,4,3,0,0,0,0,4,0,8,15,15,15,
+ 15,13,1,10,15,15,15,15,15,15,2,0,11,3,5,10,
+ 0,7,15,9,1,11,15,15,8,0,6,15,15,15,10,0,
+ 3,0,0,13,3,6,15,15,15,15,15,15,15,15,15,15,
+ 15,15,6,0,0,12,15,5,0,0,7,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,13,1,0,0,8,14,15,15,13,6,4,4,1,0,
+ 0,0,0,0,0,0,2,0,0,4,3,0,12,15,15,15,
+ 15,15,5,3,15,15,15,15,14,8,0,0,1,1,12,9,
+ 0,9,10,0,6,15,15,15,2,2,14,15,15,13,2,0,
+ 4,0,1,13,0,10,15,15,15,15,15,15,15,15,15,15,
+ 15,15,13,1,0,10,15,10,0,0,5,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,9,0,0,0,3,2,2,1,0,0,0,1,4,
+ 4,5,10,12,12,12,11,0,0,11,4,0,12,15,15,15,
+ 15,15,12,0,7,13,15,15,5,0,0,0,1,6,15,9,
+ 0,3,0,0,1,6,14,10,0,12,15,15,11,2,0,2,
+ 3,0,3,12,1,11,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,3,0,6,8,7,0,0,5,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,9,1,0,0,0,2,6,10,12,12,14,15,
+ 15,15,15,15,11,5,4,0,2,14,4,0,12,15,15,15,
+ 15,15,15,4,0,3,13,6,0,0,0,1,2,14,15,12,
+ 0,0,0,0,0,0,2,2,6,15,14,8,0,0,0,7,
+ 4,0,4,12,0,12,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,12,0,0,0,0,0,0,1,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,12,2,0,0,0,1,6,11,15,15,15,
+ 15,15,15,15,2,1,0,0,9,15,6,0,7,15,15,15,
+ 15,15,15,13,2,0,0,0,0,0,0,1,12,15,15,15,
+ 4,0,0,0,0,0,0,6,13,6,1,0,0,4,13,15,
+ 6,0,6,12,0,12,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,14,5,0,0,0,0,0,5,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,14,5,0,0,0,0,0,2,4,5,
+ 7,3,6,3,0,2,0,2,15,15,11,0,0,9,15,15,
+ 15,15,15,15,11,0,0,0,0,0,2,11,15,15,15,15,
+ 12,1,0,0,1,4,6,10,2,0,0,0,7,14,15,15,
+ 9,0,9,9,0,12,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,13,9,8,9,7,13,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,12,6,1,0,0,0,0,0,
+ 0,0,0,2,8,0,0,9,15,15,14,4,0,0,3,10,
+ 14,15,15,15,15,13,3,0,0,4,14,15,15,15,15,15,
+ 15,11,2,0,0,1,1,0,0,0,1,11,15,15,15,15,
+ 9,0,10,5,3,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,14,12,10,5,4,6,
+ 2,4,10,14,8,0,1,14,15,15,15,14,5,0,0,0,
+ 1,2,4,4,4,3,1,2,9,14,15,15,15,15,15,15,
+ 15,15,15,11,11,13,10,9,9,11,15,15,15,15,15,15,
+ 10,0,8,2,4,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 2,7,15,14,1,0,6,15,15,15,15,15,15,10,6,4,
+ 2,2,4,4,4,3,9,14,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 11,0,3,1,4,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,11,
+ 1,10,15,9,0,0,13,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 11,0,11,11,11,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,2,
+ 5,15,14,2,0,5,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 14,1,13,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,10,1,
+ 13,15,11,0,0,12,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,5,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,8,1,
+ 15,15,5,0,3,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,10,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,12,2,3,
+ 15,14,1,0,7,15,15,15,15,15,13,15,15,15,15,14,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,13,12,12,11,9,4,7,14,15,
+ 14,13,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,12,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,14,3,0,10,
+ 15,9,0,0,8,7,4,2,2,1,0,3,4,3,4,9,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,14,13,11,7,4,2,0,0,0,0,0,0,1,12,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,14,13,11,7,4,2,2,13,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,12,0,3,11,
+ 7,1,0,0,0,0,0,1,4,9,11,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,14,12,11,9,7,4,
+ 3,1,0,0,0,0,0,0,0,0,0,2,11,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,13,11,8,
+ 4,3,1,0,0,0,0,3,8,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,14,11,3,0,0,0,
+ 0,0,0,2,6,9,12,14,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,13,9,6,3,1,0,0,0,0,0,0,
+ 0,0,0,0,1,4,7,11,12,12,12,14,15,15,15,15,
+ 15,15,15,15,15,15,15,14,12,11,7,4,2,0,0,0,
+ 0,0,0,1,5,10,13,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,14,12,7,3,1,0,0,0,2,5,
+ 2,0,2,14,15,15,15,15,15,15,15,15,15,14,13,12,
+ 11,9,6,4,2,0,0,0,0,0,0,0,0,1,2,4,
+ 5,9,11,13,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,14,12,11,7,4,3,1,0,0,0,0,0,0,0,1,
+ 4,5,10,14,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,13,10,5,1,0,0,0,1,0,0,2,13,14,
+ 1,0,8,15,15,14,12,11,9,8,4,3,2,1,0,0,
+ 0,0,0,0,1,3,2,3,5,9,10,12,13,14,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,13,11,10,6,4,
+ 3,1,0,0,0,0,0,0,0,0,1,4,7,11,13,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,11,4,0,0,0,1,4,9,13,13,1,0,0,1,7,
+ 0,0,7,8,5,2,0,0,0,0,0,0,1,2,3,4,
+ 5,9,10,12,14,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,14,11,8,4,3,1,0,0,0,0,0,
+ 0,0,0,0,1,4,5,9,12,13,15,15,15,15,15,15,
+ 15,15,14,12,9,8,8,7,4,2,5,4,5,5,12,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,14,10,5,
+ 1,0,1,3,6,11,14,15,15,15,15,13,12,8,3,2,
+ 0,0,1,1,3,3,4,5,8,10,12,13,14,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,13,
+ 11,9,6,4,2,1,0,0,0,0,0,0,0,1,2,4,
+ 6,10,11,13,15,15,15,15,15,15,15,15,13,11,9,7,
+ 4,2,1,0,0,0,0,2,4,7,12,14,14,14,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,10,5,1,1,3,
+ 8,12,14,15,15,15,15,15,15,15,15,15,15,15,15,9,
+ 3,11,14,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,8,6,9,9,9,9,9,8,5,4,4,3,1,0,
+ 0,0,0,0,1,2,3,2,4,5,9,11,12,14,15,15,
+ 15,15,15,15,15,15,15,14,12,9,5,2,0,0,0,0,
+ 0,1,2,4,7,10,14,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,14,9,4,1,3,9,13,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,13,
+ 11,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,13,3,1,1,1,1,1,1,1,0,0,0,0,2,3,
+ 5,8,10,12,14,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,12,5,2,0,0,0,1,3,4,7,10,
+ 12,13,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,13,11,13,13,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,14,12,12,12,13,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,8,1,0,1,4,7,11,13,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,13,7,8,11,14,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15}
+ };
diff --git a/gst/goom/motif_goom2.h b/gst/goom/motif_goom2.h
new file mode 100644
index 0000000000..2de92afa29
--- /dev/null
+++ b/gst/goom/motif_goom2.h
@@ -0,0 +1,1044 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+static Motif CONV_MOTIF2 = {
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,12,5,14,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,12,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,10,1,14,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,10,0,12,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,6,0,12,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,7,0,8,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,13,2,0,10,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,6,0,2,14,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,5,0,0,10,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,9,0,0,12,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,14,9,0,0,1,14,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,8,0,0,8,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,14,8,3,0,0,0,9,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,11,0,0,2,14,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,13,9,5,3,4,1,0,0,0,0,7,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,14,4,0,0,4,11,13,13,15,15,14,12,10,8,5,
+ 6,4,1,0,0,0,0,0,0,0,0,0,0,14,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,12,1,0,0,0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,0,0,9,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 13,9,10,13,14,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,12,3,0,0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,2,5,6,0,0,0,0,12,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 5,0,0,0,3,10,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,14,9,2,1,0,0,0,1,4,6,6,1,
+ 0,0,0,8,13,15,15,15,12,1,0,2,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,14,
+ 2,0,0,0,0,0,4,12,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,3,0,0,10,15,15,15,10,
+ 0,0,4,15,15,15,15,15,15,2,0,6,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,5,
+ 3,11,5,0,0,0,0,0,4,11,14,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,7,0,0,13,15,15,15,11,
+ 0,0,7,15,15,15,15,15,15,1,0,9,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,9,0,
+ 13,15,15,12,5,0,0,0,0,0,1,8,14,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,7,0,1,14,15,15,15,11,
+ 0,0,7,15,15,15,15,15,14,0,0,9,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,11,1,10,
+ 15,15,15,15,15,11,5,0,0,0,0,0,1,6,13,15,
+ 15,15,15,15,14,8,11,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,6,0,2,15,15,15,15,11,
+ 0,0,6,15,15,15,15,15,13,0,0,11,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,11,1,6,15,
+ 15,15,15,15,15,15,15,14,5,0,0,0,0,0,0,6,
+ 14,15,15,15,6,0,4,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,7,0,5,15,15,15,15,11,
+ 0,0,5,15,15,15,15,15,12,0,0,12,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,13,2,1,13,15,
+ 15,15,15,15,15,15,15,15,15,12,2,0,0,0,0,0,
+ 1,6,11,7,0,0,4,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,7,0,7,15,15,15,15,11,
+ 0,0,6,15,15,15,15,15,12,0,0,12,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,5,0,7,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,11,5,0,0,0,
+ 0,0,0,0,0,1,11,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,7,0,10,15,15,15,15,11,
+ 0,0,6,15,15,15,15,15,12,0,0,12,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,10,0,4,14,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,13,7,0,
+ 0,0,0,0,0,1,6,12,14,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,7,0,10,15,15,15,15,11,
+ 0,0,7,15,15,15,15,15,12,0,0,12,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,13,1,1,12,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,13,
+ 5,0,0,0,0,0,0,0,3,10,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,7,0,10,15,15,15,15,11,
+ 0,0,7,15,15,15,15,15,11,0,0,13,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,14,4,0,8,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 13,0,0,0,1,0,0,0,0,1,13,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,7,0,10,15,15,15,15,11,
+ 0,0,8,15,15,15,15,15,8,0,2,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,9,0,4,14,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,14,
+ 4,0,0,5,13,12,6,2,0,2,13,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,7,0,10,15,15,15,15,11,
+ 0,0,7,15,15,15,15,15,4,0,4,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,13,1,1,13,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,11,
+ 0,0,1,13,15,15,15,14,9,13,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,5,0,6,15,15,15,15,11,
+ 0,0,8,15,15,15,15,15,2,0,8,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,5,0,9,15,15,15,15,15,15,
+ 15,15,15,15,15,15,14,11,15,15,15,15,15,15,15,9,
+ 0,0,10,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,4,0,4,15,15,15,15,11,
+ 0,0,7,15,15,15,15,13,0,0,11,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,10,0,3,14,15,15,15,15,15,15,
+ 15,15,15,15,15,14,3,0,13,15,15,15,15,15,15,14,
+ 9,11,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,4,0,4,15,15,15,15,11,
+ 0,0,8,15,15,15,15,12,0,0,12,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,14,2,1,12,15,15,15,15,15,15,15,
+ 15,15,15,15,14,3,0,0,9,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,6,0,3,15,15,15,15,13,
+ 1,0,8,15,15,15,15,12,0,0,12,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,9,0,5,15,15,15,15,15,15,15,15,
+ 15,15,15,14,4,0,0,0,10,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,8,0,2,15,15,15,15,15,
+ 3,0,13,15,15,15,15,12,0,0,12,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,13,2,1,12,15,15,15,15,15,15,15,15,
+ 15,15,15,7,0,0,0,0,8,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,10,0,1,14,15,15,15,15,
+ 11,5,15,15,15,15,15,12,0,0,11,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,6,0,7,15,15,15,15,15,15,15,15,15,
+ 15,15,8,0,0,0,0,0,0,9,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,12,0,0,12,15,15,15,15,
+ 15,14,15,15,15,15,15,10,0,0,12,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,13,1,2,14,15,15,15,15,15,15,15,15,15,
+ 15,10,0,0,0,6,6,0,0,0,5,12,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,12,12,
+ 15,15,15,15,15,15,15,15,13,0,0,11,15,15,15,15,
+ 15,15,15,15,15,15,15,9,0,1,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,7,0,8,15,15,15,15,15,15,15,15,15,15,
+ 15,9,0,0,4,15,15,8,0,0,0,1,5,13,15,15,
+ 15,15,15,15,15,15,15,15,15,15,12,8,7,6,5,3,
+ 3,3,4,12,15,15,15,15,15,15,15,15,15,7,0,6,
+ 15,15,15,15,15,15,15,15,14,1,0,10,15,15,15,15,
+ 15,15,15,15,15,15,15,6,0,3,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,13,1,0,13,15,15,15,15,15,15,15,15,15,15,
+ 15,14,7,8,13,15,15,15,11,2,0,0,0,0,5,11,
+ 15,15,15,15,15,15,15,15,13,3,0,0,0,0,0,0,
+ 0,0,0,5,15,15,15,15,15,15,15,15,12,1,0,0,
+ 3,11,15,15,15,15,15,15,13,1,0,10,15,15,15,15,
+ 15,15,15,15,15,15,15,3,0,5,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,9,0,5,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,14,8,1,0,0,0,0,
+ 4,12,15,15,15,15,15,15,4,0,0,0,0,0,0,0,
+ 0,0,0,2,15,15,15,15,15,15,15,14,4,0,0,0,
+ 0,0,9,15,15,15,15,15,14,1,0,10,15,15,15,15,
+ 15,15,15,15,15,15,15,2,0,9,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,14,4,0,11,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,14,8,2,0,0,
+ 0,0,4,10,14,15,15,15,4,0,0,0,0,0,0,0,
+ 0,0,0,3,15,15,15,15,15,15,15,6,0,0,0,2,
+ 3,0,0,8,15,15,15,15,14,1,0,10,15,15,15,15,
+ 15,15,15,15,15,15,15,1,0,9,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 14,5,0,4,14,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,14,9,3,
+ 0,0,0,0,2,5,10,15,5,0,1,11,11,12,13,15,
+ 11,0,0,7,15,15,15,15,15,15,8,0,0,0,1,12,
+ 14,6,0,0,7,14,15,15,14,1,0,9,15,15,15,15,
+ 15,15,15,15,15,15,15,2,0,10,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 9,0,1,13,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,14,
+ 10,2,0,0,0,0,1,14,4,0,1,14,15,15,15,15,
+ 9,0,0,9,15,15,15,15,15,9,0,0,0,0,9,15,
+ 15,15,7,0,0,6,14,15,15,3,0,6,15,15,15,15,
+ 15,15,15,15,15,15,15,1,0,9,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,9,
+ 0,0,1,10,14,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,13,1,0,0,0,1,14,3,0,0,14,15,15,15,15,
+ 5,0,0,11,15,15,15,15,13,1,0,0,0,6,15,15,
+ 15,15,15,8,0,0,2,10,15,6,0,3,15,15,15,15,
+ 15,15,15,15,15,15,15,2,0,10,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,13,1,
+ 0,0,0,0,3,9,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,6,1,0,0,0,8,15,1,0,0,14,15,15,15,15,
+ 4,0,0,13,15,15,15,14,4,0,0,0,3,14,15,15,
+ 15,15,15,15,5,0,0,1,14,9,0,1,14,15,15,15,
+ 15,15,15,15,15,15,15,1,0,9,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,13,1,
+ 0,0,0,0,0,0,4,12,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 9,0,0,0,0,7,15,15,1,0,0,14,15,15,15,14,
+ 2,0,1,14,15,15,15,12,0,0,0,3,13,15,15,15,
+ 15,15,15,9,0,0,0,1,14,12,0,0,12,15,15,15,
+ 15,15,15,15,15,15,14,1,0,10,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,12,
+ 3,0,0,0,0,0,0,1,8,14,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,9,
+ 0,0,0,0,7,15,15,15,1,0,0,14,15,15,15,13,
+ 0,0,1,15,15,15,15,12,0,0,0,6,14,15,15,15,
+ 15,15,12,0,0,0,0,3,14,12,0,0,12,15,15,15,
+ 15,15,15,15,15,15,12,0,0,12,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,13,3,0,0,0,0,0,0,1,6,13,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,12,0,
+ 0,0,0,3,15,15,15,12,0,0,0,14,15,15,15,11,
+ 0,0,3,15,15,15,15,15,12,7,0,0,4,14,15,15,
+ 15,11,1,0,0,0,4,13,15,12,0,0,12,15,15,15,
+ 15,15,15,15,15,15,10,0,1,14,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,12,7,0,0,0,0,0,0,0,3,8,12,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,4,0,
+ 0,0,1,13,15,15,15,6,0,0,0,14,15,15,15,8,
+ 0,0,7,15,15,15,15,15,15,15,8,1,0,2,13,15,
+ 14,2,0,0,0,4,14,15,15,13,1,0,10,15,15,15,
+ 15,15,15,15,15,15,9,0,2,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,11,6,2,0,0,0,0,0,0,0,1,
+ 10,15,15,15,15,15,15,15,15,15,15,15,15,8,0,0,
+ 0,0,10,15,15,15,15,4,0,0,1,15,15,15,15,4,
+ 0,0,8,15,15,15,15,15,15,15,15,10,1,0,1,8,
+ 2,0,0,0,5,15,15,15,15,15,2,0,6,15,15,15,
+ 15,15,15,15,15,15,9,0,1,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,9,1,0,0,0,0,0,0,
+ 0,1,7,13,14,15,15,15,15,15,15,15,9,0,0,0,
+ 0,6,15,15,15,15,15,4,0,0,4,15,15,15,14,1,
+ 0,0,9,15,15,15,15,15,15,15,15,15,12,2,0,0,
+ 0,0,0,4,14,15,15,15,15,15,4,0,4,15,15,15,
+ 15,15,15,15,15,15,7,0,0,14,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,14,11,6,2,0,0,0,
+ 0,0,0,0,1,9,12,15,15,15,15,14,3,0,0,0,
+ 4,15,15,15,15,15,15,4,0,0,3,6,4,4,2,0,
+ 0,0,13,15,15,15,15,15,15,15,15,15,15,12,1,0,
+ 0,0,3,14,15,15,15,15,15,15,4,0,4,15,15,15,
+ 15,15,15,15,15,15,5,0,0,12,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,8,2,0,
+ 0,0,0,0,0,0,0,1,9,15,15,5,0,0,0,0,
+ 12,15,15,15,15,15,15,4,0,0,0,0,0,0,0,0,
+ 0,3,15,15,15,15,15,15,15,15,15,15,15,14,4,0,
+ 0,1,12,15,15,15,15,15,15,15,6,0,1,14,15,15,
+ 15,15,15,15,15,15,5,0,0,13,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,12,
+ 7,1,0,0,0,0,0,0,0,5,7,0,0,0,0,10,
+ 15,15,15,15,15,15,15,7,0,0,0,0,0,0,0,0,
+ 1,10,15,15,15,15,15,15,15,15,15,15,15,14,3,0,
+ 3,12,15,15,15,15,15,15,15,15,12,0,0,12,15,15,
+ 15,15,15,15,15,15,5,0,0,1,1,4,11,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,11,6,1,0,0,0,0,0,0,0,0,0,7,15,
+ 15,15,15,15,15,15,15,14,7,4,4,4,5,9,12,13,
+ 14,15,15,15,15,15,15,15,15,15,15,15,15,15,11,9,
+ 14,15,15,14,12,11,11,11,10,9,7,0,0,5,13,15,
+ 15,15,15,15,15,12,1,0,0,0,0,0,0,10,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,14,7,1,0,0,0,0,0,3,14,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,13,2,0,0,0,0,0,0,0,0,0,0,0,8,
+ 15,15,15,15,15,11,0,0,0,0,0,0,0,9,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,13,5,0,0,0,0,12,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,8,0,0,0,0,0,0,0,0,0,0,0,0,5,
+ 15,15,15,15,15,15,10,5,6,7,7,7,9,14,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,8,3,2,2,2,2,5,14,15,
+ 15,15,15,15,15,15,15,15,15,10,3,0,6,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,13,3,1,0,1,0,1,1,2,4,4,3,9,14,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,12,4,0,1,6,7,7,4,1,3,13,
+ 15,15,15,15,15,15,15,15,15,15,14,10,13,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,11,11,15,15,15,15,
+ 15,15,15,14,14,14,14,14,14,14,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,14,2,0,4,13,15,15,15,15,10,0,12,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,14,13,14,12,12,12,12,12,12,12,
+ 12,14,15,15,15,15,15,15,15,15,4,14,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,7,1,5,14,15,15,15,15,15,12,1,11,
+ 15,15,15,13,12,13,15,15,14,11,13,15,15,15,15,15,
+ 15,15,15,11,6,3,1,1,1,0,0,0,0,0,0,0,
+ 0,1,4,7,11,14,15,15,15,14,4,15,13,10,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,14,7,4,5,
+ 12,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,13,1,1,14,15,15,14,10,12,15,11,1,12,
+ 15,15,11,1,0,4,15,15,6,0,2,14,15,15,15,15,
+ 15,15,14,8,6,3,3,2,2,1,0,0,0,0,0,0,
+ 0,0,0,0,0,3,11,15,15,11,8,15,12,6,15,9,
+ 8,15,15,15,15,15,15,15,15,15,15,15,10,4,4,1,
+ 4,15,15,15,15,11,6,2,8,14,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,12,0,4,15,15,15,11,2,10,15,9,1,13,
+ 15,13,1,7,6,2,14,14,1,2,1,14,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,13,12,12,12,12,12,12,
+ 11,11,11,10,9,10,12,15,15,6,7,15,9,4,15,4,
+ 1,14,15,15,15,15,15,15,15,15,15,15,2,11,15,4,
+ 4,15,15,15,15,3,9,4,0,9,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,12,0,4,15,15,15,5,0,6,6,1,9,15,
+ 15,4,1,13,10,1,13,9,2,7,1,14,14,14,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,3,5,15,3,5,14,1,
+ 0,12,13,9,14,15,15,15,15,15,15,15,2,2,4,1,
+ 6,15,15,15,14,1,5,6,0,9,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,12,0,4,15,15,15,12,4,2,2,10,15,15,
+ 11,0,6,15,12,0,10,7,9,10,1,14,7,14,15,15,
+ 15,15,15,15,15,15,13,12,11,11,10,9,9,10,11,13,
+ 15,15,15,15,15,15,15,15,15,1,9,15,2,7,14,1,
+ 0,10,7,0,8,15,15,15,15,15,15,15,11,4,4,4,
+ 13,15,15,15,15,10,2,2,4,14,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,12,0,4,15,15,15,15,15,15,15,15,15,15,
+ 4,2,14,15,15,1,9,5,14,9,1,14,8,14,15,15,
+ 15,15,15,15,15,10,3,0,1,0,0,0,0,0,0,5,
+ 15,15,15,15,15,15,15,15,15,1,9,14,1,8,14,1,
+ 0,11,13,6,11,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,14,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,12,0,4,15,15,15,15,15,15,15,15,15,11,
+ 0,6,15,15,15,1,5,3,13,10,0,6,8,15,15,15,
+ 15,15,15,15,15,15,13,12,12,11,10,9,9,10,11,13,
+ 15,15,15,15,15,15,15,15,15,1,9,12,1,11,15,4,
+ 1,14,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 14,10,4,2,12,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,12,1,3,14,15,15,15,15,15,15,15,15,4,
+ 3,14,15,15,15,5,1,8,15,14,5,2,9,15,15,15,
+ 15,15,15,15,15,15,15,15,15,11,9,13,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,1,9,12,1,12,15,13,
+ 11,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 10,2,9,2,3,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,14,4,0,5,14,15,15,15,15,15,15,11,0,
+ 6,15,15,15,15,15,14,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,8,1,0,3,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,1,9,15,11,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 7,1,12,6,1,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,11,1,0,3,8,9,9,10,11,9,5,4,
+ 13,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,12,9,13,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,5,11,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 10,3,4,1,5,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,12,2,0,0,0,0,0,0,1,8,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,14,12,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,13,8,8,10,9,10,11,14,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15},
+ {15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,
+ 15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15}
+ };
diff --git a/gst/goom/plugin_info.c b/gst/goom/plugin_info.c
new file mode 100644
index 0000000000..96d570c63b
--- /dev/null
+++ b/gst/goom/plugin_info.c
@@ -0,0 +1,262 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+
+#include "goom_config.h"
+
+#include "goom_plugin_info.h"
+#include "goom_fx.h"
+#include "drawmethods.h"
+#include <math.h>
+#include <stdio.h>
+#ifdef HAVE_ORC
+#include <orc/orc.h>
+#endif
+
+
+#if defined (HAVE_CPU_PPC64) || defined (HAVE_CPU_PPC)
+#include "ppc_zoom_ultimate.h"
+#include "ppc_drawings.h"
+#endif /* HAVE_CPU_PPC64 || HAVE_CPU_PPC */
+
+
+#ifdef HAVE_MMX
+#include "mmx.h"
+#endif /* HAVE_MMX */
+
+#include <string.h>
+
+GST_DEBUG_CATEGORY_EXTERN (goom_debug);
+#define GST_CAT_DEFAULT goom_debug
+
+static void
+setOptimizedMethods (PluginInfo * p)
+{
+#ifdef HAVE_ORC
+ unsigned int cpuFlavour =
+ orc_target_get_default_flags (orc_target_get_by_name ("mmx"));
+#else
+ unsigned int cpuFlavour = 0;
+#endif
+
+ /* set default methods */
+ p->methods.draw_line = draw_line;
+ p->methods.zoom_filter = zoom_filter_c;
+/* p->methods.create_output_with_brightness = create_output_with_brightness;*/
+
+ GST_INFO ("orc cpu flags: 0x%08x", cpuFlavour);
+
+/* FIXME: what about HAVE_CPU_X86_64 ? */
+#ifdef HAVE_CPU_I386
+#ifdef HAVE_MMX
+#ifdef HAVE_ORC
+ GST_INFO ("have an x86");
+ if (cpuFlavour & ORC_TARGET_MMX_MMXEXT) {
+ GST_INFO ("Extended MMX detected. Using the fastest methods!");
+ p->methods.draw_line = draw_line_xmmx;
+ p->methods.zoom_filter = zoom_filter_xmmx;
+ } else if (cpuFlavour & ORC_TARGET_MMX_MMX) {
+ GST_INFO ("MMX detected. Using fast methods!");
+ p->methods.draw_line = draw_line_mmx;
+ p->methods.zoom_filter = zoom_filter_mmx;
+ } else {
+ GST_INFO ("Too bad ! No SIMD optimization available for your CPU.");
+ }
+#endif
+#endif
+#endif /* HAVE_CPU_I386 */
+
+/* disable all PPC stuff until someone finds out what to use here instead of
+ * CPU_OPTION_64_BITS, and until someone fixes the assembly build for ppc */
+#if 0
+#ifdef HAVE_CPU_PPC64
+ if ((cpuFlavour & CPU_OPTION_64_BITS) != 0) {
+/* p->methods.create_output_with_brightness = ppc_brightness_G5; */
+ p->methods.zoom_filter = ppc_zoom_generic;
+ }
+#endif /* HAVE_CPU_PPC64 */
+
+#ifdef HAVE_CPU_PPC
+ if ((cpuFlavour & ORC_TARGET_ALTIVEC_ALTIVEC) != 0) {
+/* p->methods.create_output_with_brightness = ppc_brightness_G4; */
+ p->methods.zoom_filter = ppc_zoom_G4;
+ } else {
+/* p->methods.create_output_with_brightness = ppc_brightness_generic;*/
+ p->methods.zoom_filter = ppc_zoom_generic;
+ }
+#endif /* HAVE_CPU_PPC */
+#endif
+}
+
+void
+plugin_info_init (PluginInfo * pp, int nbVisuals)
+{
+
+ int i;
+
+ memset (pp, 0, sizeof (PluginInfo));
+
+ pp->sound.speedvar = pp->sound.accelvar = pp->sound.totalgoom = 0;
+ pp->sound.prov_max = 0;
+ pp->sound.goom_limit = 1;
+ pp->sound.allTimesMax = 1;
+ pp->sound.timeSinceLastGoom = 1;
+ pp->sound.timeSinceLastBigGoom = 1;
+ pp->sound.cycle = 0;
+
+ secure_f_feedback (&pp->sound.volume_p, "Sound Volume");
+ secure_f_feedback (&pp->sound.accel_p, "Sound Acceleration");
+ secure_f_feedback (&pp->sound.speed_p, "Sound Speed");
+ secure_f_feedback (&pp->sound.goom_limit_p, "Goom Limit");
+ secure_f_feedback (&pp->sound.last_goom_p, "Goom Detection");
+ secure_f_feedback (&pp->sound.last_biggoom_p, "Big Goom Detection");
+ secure_f_feedback (&pp->sound.goom_power_p, "Goom Power");
+
+ secure_i_param (&pp->sound.biggoom_speed_limit_p, "Big Goom Speed Limit");
+ IVAL (pp->sound.biggoom_speed_limit_p) = 10;
+ IMIN (pp->sound.biggoom_speed_limit_p) = 0;
+ IMAX (pp->sound.biggoom_speed_limit_p) = 100;
+ ISTEP (pp->sound.biggoom_speed_limit_p) = 1;
+
+ secure_i_param (&pp->sound.biggoom_factor_p, "Big Goom Factor");
+ IVAL (pp->sound.biggoom_factor_p) = 10;
+ IMIN (pp->sound.biggoom_factor_p) = 0;
+ IMAX (pp->sound.biggoom_factor_p) = 100;
+ ISTEP (pp->sound.biggoom_factor_p) = 1;
+
+ plugin_parameters (&pp->sound.params, "Sound", 11);
+
+ pp->nbParams = 0;
+ pp->params = NULL;
+ pp->nbVisuals = nbVisuals;
+ pp->visuals = (VisualFX **) malloc (sizeof (VisualFX *) * nbVisuals);
+
+ pp->sound.params.params[0] = &pp->sound.biggoom_speed_limit_p;
+ pp->sound.params.params[1] = &pp->sound.biggoom_factor_p;
+ pp->sound.params.params[2] = 0;
+ pp->sound.params.params[3] = &pp->sound.volume_p;
+ pp->sound.params.params[4] = &pp->sound.accel_p;
+ pp->sound.params.params[5] = &pp->sound.speed_p;
+ pp->sound.params.params[6] = 0;
+ pp->sound.params.params[7] = &pp->sound.goom_limit_p;
+ pp->sound.params.params[8] = &pp->sound.goom_power_p;
+ pp->sound.params.params[9] = &pp->sound.last_goom_p;
+ pp->sound.params.params[10] = &pp->sound.last_biggoom_p;
+
+ pp->statesNumber = 8;
+ pp->statesRangeMax = 510;
+ {
+ GoomState states[8] = {
+ {1, 0, 0, 1, 4, 0, 100}
+ ,
+ {1, 0, 0, 0, 1, 101, 140}
+ ,
+ {1, 0, 0, 1, 2, 141, 200}
+ ,
+ {0, 1, 0, 1, 2, 201, 260}
+ ,
+ {0, 1, 0, 1, 0, 261, 330}
+ ,
+ {0, 1, 1, 1, 4, 331, 400}
+ ,
+ {0, 0, 1, 0, 5, 401, 450}
+ ,
+ {0, 0, 1, 1, 1, 451, 510}
+ };
+ for (i = 0; i < 8; ++i)
+ pp->states[i] = states[i];
+ }
+ pp->curGState = &(pp->states[6]);
+
+ /* datas for the update loop */
+ pp->update.lockvar = 0;
+ pp->update.goomvar = 0;
+ pp->update.loopvar = 0;
+ pp->update.stop_lines = 0;
+ pp->update.ifs_incr = 1; /* dessiner l'ifs (0 = non: > = increment) */
+ pp->update.decay_ifs = 0; /* disparition de l'ifs */
+ pp->update.recay_ifs = 0; /* dedisparition de l'ifs */
+ pp->update.cyclesSinceLastChange = 0;
+ pp->update.drawLinesDuration = 80;
+ pp->update.lineMode = pp->update.drawLinesDuration;
+
+ pp->update.switchMultAmount = (29.0f / 30.0f);
+ pp->update.switchIncrAmount = 0x7f;
+ pp->update.switchMult = 1.0f;
+ pp->update.switchIncr = pp->update.switchIncrAmount;
+
+ pp->update.stateSelectionRnd = 0;
+ pp->update.stateSelectionBlocker = 0;
+ pp->update.previousZoomSpeed = 128;
+
+ {
+ ZoomFilterData zfd = {
+ 127, 8, 16,
+ 1, 1, 0, NORMAL_MODE,
+ 0, 0, 0, 0, 0
+ };
+ pp->update.zoomFilterData = zfd;
+ }
+
+ setOptimizedMethods (pp);
+
+ for (i = 0; i < 0xffff; i++) {
+ pp->sintable[i] =
+ (int) (1024 * sin ((double) i * 360 / (sizeof (pp->sintable) /
+ sizeof (pp->sintable[0]) - 1) * 3.141592 / 180) + .5);
+ /* sintable [us] = (int)(1024.0f * sin (us*2*3.31415f/0xffff)) ; */
+ }
+}
+
+void
+plugin_info_add_visual (PluginInfo * p, int i, VisualFX * visual)
+{
+ p->visuals[i] = visual;
+ if (i == p->nbVisuals - 1) {
+ ++i;
+ p->nbParams = 1;
+ while (i--) {
+ if (p->visuals[i]->params)
+ p->nbParams++;
+ }
+ p->params =
+ (PluginParameters *) malloc (sizeof (PluginParameters) * p->nbParams);
+ i = p->nbVisuals;
+ p->nbParams = 1;
+ p->params[0] = p->sound.params;
+ while (i--) {
+ if (p->visuals[i]->params)
+ p->params[p->nbParams++] = *(p->visuals[i]->params);
+ }
+ }
+}
+
+void
+plugin_info_free (PluginInfo * p)
+{
+ goom_plugin_parameters_free (&p->sound.params);
+
+ if (p->params)
+ free (p->params);
+ free (p->visuals);
+}
diff --git a/gst/goom/ppc_drawings.h b/gst/goom/ppc_drawings.h
new file mode 100644
index 0000000000..d35adf6500
--- /dev/null
+++ b/gst/goom/ppc_drawings.h
@@ -0,0 +1,28 @@
+/* Goom Project
+ * Copyright (C) <2003> Guillaume Borios, iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/* Generic PowerPC Code */
+void ppc_brightness_generic(Pixel *src, Pixel *dest, int size, int coeff);
+
+/* G4 Specific PowerPC Code (Possible use of Altivec and Data Streams) */
+void ppc_brightness_G4(Pixel *src, Pixel *dest, int size, int coeff);
+
+/* G5 Specific PowerPC Code (Possible use of Altivec) */
+void ppc_brightness_G5(Pixel *src, Pixel *dest, int size, int coeff);
+
diff --git a/gst/goom/ppc_drawings.s b/gst/goom/ppc_drawings.s
new file mode 100644
index 0000000000..943cce7fde
--- /dev/null
+++ b/gst/goom/ppc_drawings.s
@@ -0,0 +1,394 @@
+; PowerPC optimized drawing methods for Goom
+; © 2003 Guillaume Borios
+; This library is free software; you can redistribute it and/or
+; modify it under the terms of the GNU Library General Public
+; License as published by the Free Software Foundation; either
+; version 2 of the License, or (at your option) any later version.
+;
+; This library is distributed in the hope that it will be useful,
+; but WITHOUT ANY WARRANTY; without even the implied warranty of
+; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+; Library General Public License for more details.
+;
+; You should have received a copy of the GNU Library General Public
+; License along with this library; if not, write to the
+; Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+; Boston, MA 02110-1301, USA.
+
+; Change log :
+; 30 May 2003 : File creation
+
+; Section definition : We use a read only code section for the whole file
+.section __TEXT,__text,regular,pure_instructions
+
+
+; --------------------------------------------------------------------------------------
+; Single 32b pixel drawing macros
+; Usage :
+; DRAWMETHOD_XXXX_MACRO *pixelIN, *pixelOUT, COLOR, WR1, WR2, WR3, WR4
+; Only the work registers (WR) can be touched by the macros
+;
+; Available methods :
+; DRAWMETHOD_DFLT_MACRO : Default drawing method (Actually OVRW)
+; DRAWMETHOD_PLUS_MACRO : RVB Saturated per channel addition (SLOWEST)
+; DRAWMETHOD_HALF_MACRO : 50% Transparency color drawing
+; DRAWMETHOD_OVRW_MACRO : Direct COLOR drawing (FASTEST)
+; DRAWMETHOD_B_OR_MACRO : Bitwise OR
+; DRAWMETHOD_BAND_MACRO : Bitwise AND
+; DRAWMETHOD_BXOR_MACRO : Bitwise XOR
+; DRAWMETHOD_BNOT_MACRO : Bitwise NOT
+; --------------------------------------------------------------------------------------
+
+.macro DRAWMETHOD_OVRW_MACRO
+ stw $2,0($1) ;; *$1 <- $2
+.endmacro
+
+.macro DRAWMETHOD_B_OR_MACRO
+ lwz $3,0($0) ;; $3 <- *$0
+ or $3,$3,$2 ;; $3 <- $3 | $2
+ stw $3,0($1) ;; *$1 <- $3
+.endmacro
+
+.macro DRAWMETHOD_BAND_MACRO
+ lwz $3,0($0) ;; $3 <- *$0
+ and $3,$3,$2 ;; $3 <- $3 & $2
+ stw $3,0($1) ;; *$1 <- $3
+.endmacro
+
+.macro DRAWMETHOD_BXOR_MACRO
+ lwz $3,0($0) ;; $3 <- *$0
+ xor $3,$3,$2 ;; $3 <- $3 ^ $2
+ stw $3,0($1) ;; *$1 <- $3
+.endmacro
+
+.macro DRAWMETHOD_BNOT_MACRO
+ lwz $3,0($0) ;; $3 <- *$0
+ nand $3,$3,$3 ;; $3 <- ~$3
+ stw $3,0($1) ;; *$1 <- $3
+.endmacro
+
+.macro DRAWMETHOD_PLUS_MACRO
+ lwz $4,0($0) ;; $4 <- *$0
+ andi. $3,$4,0xFF00 ;; $3 <- $4 & 0x0000FF00
+ andi. $5,$2,0xFF00 ;; $5 <- $2 & 0x0000FF00
+ add $3,$3,$5 ;; $3 <- $3 + $5
+ rlwinm $5,$3,15,0,0 ;; $5 <- 0 | ($3[15] << 15)
+ srawi $5,$5,23 ;; $5 <- $5 >> 23 (algebraic for sign extension)
+ or $3,$3,$5 ;; $3 <- $3 | $5
+ lis $5,0xFF ;; $5 <- 0x00FF00FF
+ addi $5,$5,0xFF
+ and $4,$4,$5 ;; $4 <- $4 & $5
+ and $6,$2,$5 ;; $6 <- $2 & $5
+ add $4,$4,$6 ;; $4 <- $4 + $6
+ rlwinm $6,$4,7,0,0 ;; $6 <- 0 | ($4[7] << 7)
+ srawi $6,$6,15 ;; $6 <- $6 >> 15 (algebraic for sign extension)
+ rlwinm $5,$4,23,0,0 ;; $5 <- 0 | ($4[23] << 23)
+ srawi $5,$5,31 ;; $5 <- $5 >> 31 (algebraic for sign extension)
+ rlwimi $6,$5,0,24,31 ;; $6[24..31] <- $5[24..31]
+ or $4,$4,$6 ;; $4 <- $4 | $6
+ rlwimi $4,$3,0,16,23 ;; $4[16..23] <- $3[16..23]
+ stw $4,0($1) ;; *$1 <- $4
+.endmacro
+
+.macro DRAWMETHOD_HALF_MACRO
+ lwz $4,0($0) ;; $4 <- *$0
+ andi. $3,$4,0xFF00 ;; $3 <- $4 & 0x0000FF00
+ andi. $5,$2,0xFF00 ;; $5 <- $2 & 0x0000FF00
+ add $3,$3,$5 ;; $3 <- $3 + $5
+ lis $5,0xFF ;; $5 <- 0x00FF00FF
+ addi $5,$5,0xFF
+ and $4,$4,$5 ;; $4 <- $4 & $5
+ and $5,$2,$5 ;; $5 <- $2 & $5
+ add $4,$4,$5 ;; $4 <- $4 + $5
+ srwi $4,$4,1 ;; $4 <- $4 >> 1
+ rlwimi $4,$3,31,16,23 ;; $4[16..23] <- $3[15..22]
+ stw $4,0($1) ;; *$1 <- $4
+.endmacro
+
+.macro DRAWMETHOD_DFLT_MACRO
+ DRAWMETHOD_PLUS_MACRO
+.endmacro
+
+; --------------------------------------------------------------------------------------
+
+
+
+; **************************************************************************************
+; void DRAWMETHOD_PLUS_PPC(unsigned int * buf, unsigned int _col);
+; void DRAWMETHOD_PLUS_2_PPC(unsigned * in, unsigned int * out, unsigned int _col);
+; **************************************************************************************
+.globl _DRAWMETHOD_PLUS_2_PPC
+.align 3
+_DRAWMETHOD_PLUS_2_PPC:
+ DRAWMETHOD_PLUS_MACRO r3,r4,r5,r6,r7,r8,r9
+ blr ;; return
+
+.globl _DRAWMETHOD_PLUS_PPC
+.align 3
+_DRAWMETHOD_PLUS_PPC:
+ DRAWMETHOD_PLUS_MACRO r3,r3,r4,r5,r6,r7,r9
+ blr ;; return
+
+
+; **************************************************************************************
+; void DRAWMETHOD_HALF_PPC(unsigned int * buf, unsigned int _col);
+; void DRAWMETHOD_HALF_2_PPC(unsigned * in, unsigned int * out, unsigned int _col);
+; **************************************************************************************
+.globl _DRAWMETHOD_HALF_2_PPC
+.align 3
+_DRAWMETHOD_HALF_2_PPC:
+ DRAWMETHOD_HALF_MACRO r3,r4,r5,r6,r7,r8
+ blr ;; return
+
+.globl _DRAWMETHOD_HALF_PPC
+.align 3
+_DRAWMETHOD_HALF_PPC:
+ DRAWMETHOD_HALF_MACRO r3,r3,r4,r5,r6,r7
+ blr ;; return
+
+
+; **************************************************************************************
+; void DRAW_LINE_PPC(unsigned int *data, int x1, int y1, int x2, int y2, unsigned int col,
+; unsigned int screenx, unsigned int screeny)
+; **************************************************************************************
+.globl _DRAW_LINE_PPC
+.align 3
+_DRAW_LINE_PPC:
+ ;; NOT IMPLEMENTED YET
+ blr ;; return
+
+
+; **************************************************************************************
+; void _ppc_brightness(Pixel * src, Pixel * dest, unsigned int size, unsigned int coeff)
+; **************************************************************************************
+
+
+.const
+.align 4
+vectorZERO:
+ .long 0,0,0,0
+ .long 0x10101000, 0x10101001, 0x10101002, 0x10101003
+ .long 0x10101004, 0x10101005, 0x10101006, 0x10101007
+ .long 0x10101008, 0x10101009, 0x1010100A, 0x1010100B
+ .long 0x1010100C, 0x1010100D, 0x1010100E, 0x1010100F
+
+
+.section __TEXT,__text,regular,pure_instructions
+
+.globl _ppc_brightness_G4
+.align 3
+_ppc_brightness_G4:
+
+
+;; PowerPC Altivec code
+ srwi r5,r5,2
+ mtctr r5
+
+;;vrsave
+ mfspr r11,256
+ lis r12,0xCFFC
+ mtspr 256,r12
+
+ mflr r0
+ bcl 20,31,"L00000000001$pb"
+"L00000000001$pb":
+ mflr r10
+ mtlr r0
+
+ addis r9,r10,ha16(vectorZERO-"L00000000001$pb")
+ addi r9,r9,lo16(vectorZERO-"L00000000001$pb")
+
+ vxor v0,v0,v0 ;; V0 = NULL vector
+
+ addi r9,r9,16
+ lvx v10,0,r9
+ addi r9,r9,16
+ lvx v11,0,r9
+ addi r9,r9,16
+ lvx v12,0,r9
+ addi r9,r9,16
+ lvx v13,0,r9
+
+ addis r9,r10,ha16(vectortmpwork-"L00000000001$pb")
+ addi r9,r9,lo16(vectortmpwork-"L00000000001$pb")
+ stw r6,0(r9)
+ li r6,8
+ stw r6,4(r9)
+ lvx v9,0,r9
+ li r9,128
+ vspltw v8,v9,0
+ vspltw v9,v9,1
+
+;; elt counter
+ li r9,0
+ lis r7,0x0F01
+ b L7
+.align 4
+L7:
+ lvx v1,r9,r3
+
+ vperm v4,v1,v0,v10
+ ;*********************
+ add r10,r9,r3
+ ;*********************
+ vperm v5,v1,v0,v11
+ vperm v6,v1,v0,v12
+ vperm v7,v1,v0,v13
+
+ vmulouh v4,v4,v8
+ ;*********************
+ dst r10,r7,3
+ ;*********************
+ vmulouh v5,v5,v8
+ vmulouh v6,v6,v8
+ vmulouh v7,v7,v8
+ vsrw v4,v4,v9
+ vsrw v5,v5,v9
+ vsrw v6,v6,v9
+ vsrw v7,v7,v9
+
+ vpkuwus v4,v4,v5
+ vpkuwus v6,v6,v7
+ vpkuhus v1,v4,v6
+
+ stvx v1,r9,r4
+ addi r9,r9,16
+
+ bdnz L7
+
+ mtspr 256,r11
+ blr
+
+
+.globl _ppc_brightness_G5
+.align 3
+_ppc_brightness_G5:
+
+;; PowerPC Altivec G5 code
+ srwi r5,r5,2
+ mtctr r5
+
+;;vrsave
+ mfspr r11,256
+ lis r12,0xCFFC
+ mtspr 256,r12
+
+ mflr r0
+ bcl 20,31,"L00000000002$pb"
+"L00000000002$pb":
+ mflr r10
+ mtlr r0
+
+ addis r9,r10,ha16(vectorZERO-"L00000000002$pb")
+ addi r9,r9,lo16(vectorZERO-"L00000000002$pb")
+
+ vxor v0,v0,v0 ;; V0 = NULL vector
+
+ addi r9,r9,16
+ lvx v10,0,r9
+ addi r9,r9,16
+ lvx v11,0,r9
+ addi r9,r9,16
+ lvx v12,0,r9
+ addi r9,r9,16
+ lvx v13,0,r9
+
+ addis r9,r10,ha16(vectortmpwork-"L00000000002$pb")
+ addi r9,r9,lo16(vectortmpwork-"L00000000002$pb")
+ stw r6,0(r9)
+ li r6,8
+ stw r6,4(r9)
+ lvx v9,0,r9
+ li r9,128
+ vspltw v8,v9,0
+ vspltw v9,v9,1
+
+;; elt counter
+ li r9,0
+ lis r7,0x0F01
+ b L6
+.align 4
+L6:
+ lvx v1,r9,r3
+
+ vperm v4,v1,v0,v10
+ ;*********************
+ add r10,r9,r3
+ ;*********************
+ vperm v5,v1,v0,v11
+ vperm v6,v1,v0,v12
+ vperm v7,v1,v0,v13
+
+ vmulouh v4,v4,v8
+ vmulouh v5,v5,v8
+ vmulouh v6,v6,v8
+ vmulouh v7,v7,v8
+ vsrw v4,v4,v9
+ vsrw v5,v5,v9
+ vsrw v6,v6,v9
+ vsrw v7,v7,v9
+
+ vpkuwus v4,v4,v5
+ vpkuwus v6,v6,v7
+ vpkuhus v1,v4,v6
+
+ stvx v1,r9,r4
+ addi r9,r9,16
+
+ bdnz L6
+
+ mtspr 256,r11
+ blr
+
+
+.globl _ppc_brightness_generic
+.align 3
+_ppc_brightness_generic:
+ lis r12,0x00FF
+ ori r12,r12,0x00FF
+ subi r3,r3,4
+ subi r4,r4,4
+ mtctr r5
+ b L1
+.align 4
+L1:
+ lwzu r7,4(r3)
+
+ rlwinm r8,r7,16,24,31
+ rlwinm r9,r7,24,24,31
+ mullw r8,r8,r6
+ rlwinm r10,r7,0,24,31
+ mullw r9,r9,r6
+ srwi r8,r8,8
+ mullw r10,r10,r6
+ srwi r9,r9,8
+
+ rlwinm. r11,r8,0,0,23
+ beq L2
+ li r8,0xFF
+L2:
+ srwi r10,r10,8
+ rlwinm. r11,r9,0,0,23
+ beq L3
+ li r9,0xFF
+L3:
+ rlwinm r7,r8,16,8,15
+ rlwinm. r11,r10,0,0,23
+ beq L4
+ li r10,0xFF
+L4:
+ rlwimi r7,r9,8,16,23
+ rlwimi r7,r10,0,24,31
+
+ stwu r7,4(r4)
+ bdnz L1
+
+ blr
+
+
+
+.static_data
+.align 4
+vectortmpwork:
+ .long 0,0,0,0
+
diff --git a/gst/goom/ppc_zoom_ultimate.h b/gst/goom/ppc_zoom_ultimate.h
new file mode 100644
index 0000000000..cd2c8a2683
--- /dev/null
+++ b/gst/goom/ppc_zoom_ultimate.h
@@ -0,0 +1,25 @@
+/* Goom Project
+ * Copyright (C) <2003> Guillaume Borios, iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/* Generic PowerPC Code */
+void ppc_zoom_generic (int sizeX, int sizeY, Pixel *src, Pixel *dest, int *brutS, int *brutD, int buffratio, int precalCoef[16][16]);
+
+/* G4 Specific PowerPC Code (Possible use of Altivec and Data Streams) */
+void ppc_zoom_G4 (int sizeX, int sizeY, Pixel *src, Pixel *dest, int *brutS, int *brutD, int buffratio, int precalCoef[16][16]);
+
diff --git a/gst/goom/ppc_zoom_ultimate.s b/gst/goom/ppc_zoom_ultimate.s
new file mode 100644
index 0000000000..c37ec503e0
--- /dev/null
+++ b/gst/goom/ppc_zoom_ultimate.s
@@ -0,0 +1,336 @@
+; PowerPC optimized zoom for Goom
+; © 2001-2003 Guillaume Borios
+; This library is free software; you can redistribute it and/or
+; modify it under the terms of the GNU Library General Public
+; License as published by the Free Software Foundation; either
+; version 2 of the License, or (at your option) any later version.
+;
+; This library is distributed in the hope that it will be useful,
+; but WITHOUT ANY WARRANTY; without even the implied warranty of
+; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+; Library General Public License for more details.
+;
+; You should have received a copy of the GNU Library General Public
+; License along with this library; if not, write to the
+; Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+; Boston, MA 02110-1301, USA.
+
+; Change log :
+; 21 Dec 2003 : Use of altivec is now determined with a parameter
+
+; Section definition : We use a read only section
+.text
+
+; name of the function to call by C program : ppc_zoom
+; We declare this label as a global to extend its scope outside this file
+.globl _ppc_zoom_generic
+.globl _ppc_zoom_G4
+
+; Description :
+; This routine dynamically computes and applies a zoom filter
+
+; parameters :
+; r3 <=> unsigned int sizeX (in pixels)
+; r4 <=> unsigned int sizeY (in pixels)
+; r5 <=> unsigned int * frompixmap
+; r6 <=> unsigned int * topixmap
+; r7 <=> unsigned int * brutS
+; r8 <=> unsigned int * brutD
+; r9 <=> unsigned int buffratio
+; r10 <=> int [16][16] precalccoeffs
+
+; globals after init
+; r5 <=> frompixmap - 1 byte needed for preincremental fetch (replaces r5)
+; r6 <=> topixmap - 1 byte needed for preincremental fetch (replaces r6)
+; r3 <=> ax = x max in 16th of pixels (replaces old r3)
+; r4 <=> ay = y max in 16th of pixels (replaces old r4)
+; r20 <=> row size in bytes
+; r12 <=> 0xFF00FF (mask for parallel 32 bits pixs computing)
+; r30 <=> brutS - 1 byte needed for preincremental fetch (replaces r7)
+; r31 <=> brutD - 1 byte needed for preincremental fetch (replaces r8)
+
+; ABI notes :
+; r1 is the Stack Pointer (SP) => Do not use
+; r13..r31 are non-volatiles => Do not use
+
+_ppc_zoom_generic:
+
+; Saves the used non volatile registers in the Mach-O stack s Red-Zone
+stmw r18,-56(r1)
+
+; init
+li r18,0 ; Default value if out of range : 0 (Black)
+mr r11,r10
+lis r12,0xFF
+mullw r2,r3,r4 ; Number of pixels to compute
+subi r30,r8,0
+slwi r20,r3,2
+srawi r19,r20,2
+ori r12,r12,0xFF
+subi r3,r3,1
+subi r4,r4,1
+mtspr ctr,r2 ; Init the loop count (one loop per pixel computed)
+subi r31,r7,0
+subi r6,r6,4
+slwi r3,r3,4
+slwi r4,r4,4
+
+;pre init for loop
+lwz r2,0(r31) ; px
+lwz r29,4(r31) ; py
+lwz r8,0(r30) ; px2
+lwz r10,4(r30) ; py2
+
+b L1
+.align 5
+L1:
+
+; computes dynamically the position to fetch
+sub r8,r8,r2
+sub r10,r10,r29
+mullw r8,r8,r9
+addi r31,r31,8
+mullw r10,r10,r9
+addi r30,r30,8
+
+srawi r8,r8,16
+srawi r10,r10,16
+add r2,r2,r8
+add r29,r29,r10
+
+; if px>ax or py>ay goto outofrange
+; computes the attenuation coeffs and the original point address
+rlwinm r10,r2,6,28-6,31-6 ; r10 <- (r2 << 2) & 0x000002D0 (r10=(r2%16)*4*16)
+cmpl cr4,0,r2,r3
+rlwimi r10, r29, 2, 28-2, 31-2 ; r10 <- ((r29 << 2) & 0x0000002D) | (r10 & !0x0000002D) (r10=(r10%16)*4 | r10)
+cmpl cr7,0,r29,r4
+srawi r29,r29,4 ; pos computing
+bge- cr4,L4
+srawi r2,r2,4 ; pos computing
+mullw r29, r29,r19 ; pos computing
+bge- cr7,L4
+
+; Channels notation : 00112233 (AARRVVBB)
+
+add r2,r2,r29 ; pos computing
+lwzx r10,r11,r10 ; Loads coefs
+slwi r2,r2,2 ; pos computing
+add r2,r2,r5 ; pos computing
+rlwinm r21,r10,0,24,31 ; Isolates coef1 (??????11 -> 00000011)
+lwz r25,0(r2) ; Loads col1 -> r25
+lwz r26,4(r2) ; Loads col2 -> r26
+rlwinm r22,r10,24,24,31 ; Isolates coef2 (????22?? -> 00000022)
+rlwinm r23,r10,16,24,31 ; Isolates coef3 (??33???? -> 00000033)
+add r2,r2,r20 ; Adds one line for future load of col3 and col4
+and r8, r25,r12 ; Masks col1 channels 1 & 3 : 0x00XX00XX
+rlwinm r24,r10,8,24,31 ; Isolates coef4 (44?????? -> 00000044)
+andi. r25,r25,0xFF00 ; Masks col1 channel 2 : 0x0000XX00
+mullw r8, r8, r21 ; Applies coef1 on col1 channels 1 & 3
+
+
+; computes final pixel color
+and r10,r26,r12 ; Masks col2 channels 1 & 3 : 0x00XX00XX
+lwz r27,0(r2) ; Loads col3 -> r27
+mullw r10,r10,r22 ; Applies coef2 on col2 channels 1 & 3
+mullw r25,r25,r21 ; Applies coef1 on col1 channel 2
+andi. r29,r26,0xFF00 ; Masks col2 channel 2 : 0x0000XX00
+mullw r29,r29,r22 ; Applies coef2 on col2 channel 2
+lwz r28,4(r2) ; Loads col4 -> r28
+add r8 ,r8 ,r10 ; Adds col1 & col2 channels 1 & 3
+and r10,r27,r12 ; Masks col3 channels 1 & 3 : 0x00XX00XX
+add r25,r25,r29 ; Adds col1 & col2 channel 2
+mullw r10,r10,r23 ; Applies coef3 on col3 channels 1 & 3
+andi. r29,r27,0xFF00 ; Masks col3 channel 2 : 0x0000XX00
+mullw r29,r29,r23 ; Applies coef3 on col3 channel 2
+lwz r2,0(r31) ; px
+add r7 ,r8 ,r10 ; Adds col3 to (col1 + col2) channels 1 & 3
+and r10,r28,r12 ; Masks col4 channels 1 & 3 : 0x00XX00XX
+mullw r10,r10,r24 ; Applies coef4 on col4 channels 1 & 3
+add r25,r25,r29 ; Adds col 3 to (col1 + col2) channel 2
+lwz r8,0(r30) ; px2
+andi. r28,r28,0xFF00 ; Masks col4 channel 2 : 0x0000XX00
+add r7 ,r7 ,r10 ; Adds col4 to (col1 + col2 + col3) channels 1 & 3
+lwz r10,4(r30) ; py2
+mullw r28,r28,r24 ; Applies coef4 on col4 channel 2
+srawi r7, r7, 8 ; (sum of channels 1 & 3) >> 8
+lwz r29,4(r31) ; py
+add r25,r25,r28 ; Adds col 4 to (col1 + col2 + col3) channel 2
+rlwimi r7, r25, 24, 16, 23 ; (((sum of channels 2) >> 8 ) & 0x0000FF00) | ((sum of channels 1 and 3) & 0xFFFF00FF)
+stwu r7,4(r6) ; Stores the computed pixel
+bdnz L1 ; Iterate again if needed
+b L3 ;goto end ; If not, returns from the function
+
+
+; if out of range
+L4:
+stwu r18,4(r6)
+lwz r8,0(r30) ; px2
+lwz r10,4(r30) ; py2
+lwz r2,0(r31) ; px
+lwz r29,4(r31) ; py
+bdnz L1
+
+
+L3:
+
+; Restore saved registers and return
+lmw r18,-56(r1)
+blr
+
+
+
+
+
+
+
+
+_ppc_zoom_G4:
+
+; Saves the used non volatile registers in the Mach-O stack s Red-Zone
+stmw r17,-60(r1)
+
+; init
+li r18,0 ; Default value if out of range : 0 (Black)
+mr r11,r10
+lis r12,0xFF
+mullw r2,r3,r4 ; Number of pixels to compute
+subi r30,r8,0
+slwi r20,r3,2
+srawi r19,r20,2
+ori r12,r12,0xFF
+subi r3,r3,1
+subi r4,r4,1
+mtspr ctr,r2 ; Init the loop count (one loop per pixel computed)
+subi r31,r7,0
+subi r6,r6,4
+slwi r3,r3,4
+slwi r4,r4,4
+
+;pre init for loop
+lwz r2,0(r31) ; px
+lwz r29,4(r31) ; py
+lwz r8,0(r30) ; px2
+lwz r10,4(r30) ; py2
+
+;*********************
+lis r17,0x0F01
+
+b L100
+.align 5
+L100:
+
+addi r6,r6,4
+
+; Optimization to ensure the destination buffer
+; won't be loaded into the data cache
+rlwinm. r0,r6,0,27,31
+bne+ L500
+dcbz 0,r6
+;dcba 0,r6
+L500:
+
+; computes dynamically the position to fetch
+;mullw r8,r8,r29
+;mullw r2,r2,r29
+;add r2,r8,r2
+;srawi r2,r2,17
+
+sub r8,r8,r2
+sub r10,r10,r29
+mullw r8,r8,r9
+addi r31,r31,8
+mullw r10,r10,r9
+addi r30,r30,8
+
+dst r30,r17,0
+
+srawi r8,r8,16
+srawi r10,r10,16
+add r2,r2,r8
+add r29,r29,r10
+
+dst r31,r17,1
+
+; if px>ax or py>ay goto outofrange
+; computes the attenuation coeffs and the original point address
+rlwinm r10,r2,6,28-6,31-6 ; r10 <- (r2 << 2) & 0x000002D0 (r10=(r2%16)*4*16)
+cmpl cr4,0,r2,r3
+rlwimi r10, r29, 2, 28-2, 31-2 ; r10 <- ((r29 << 2) & 0x0000002D) | (r10 & !0x0000002D) (r10=(r29%16)*4 | r10)
+cmpl cr7,0,r29,r4
+srawi r29,r29,4 ; pos computing
+bge- cr4,L400
+srawi r2,r2,4 ; pos computing
+mullw r29, r29,r19 ; pos computing
+bge- cr7,L400
+
+; Channels notation : 00112233 (AARRVVBB)
+
+add r2,r2,r29 ; pos computing
+lwzx r10,r11,r10 ; Loads coefs
+slwi r2,r2,2 ; pos computing
+add r2,r2,r5 ; pos computing
+rlwinm r21,r10,0,24,31 ; Isolates coef1 (??????11 -> 00000011)
+lwz r25,0(r2) ; Loads col1 -> r25
+lwz r26,4(r2) ; Loads col2 -> r26
+rlwinm r22,r10,24,24,31 ; Isolates coef2 (????22?? -> 00000022)
+rlwinm r23,r10,16,24,31 ; Isolates coef3 (??33???? -> 00000033)
+add r2,r2,r20 ; Adds one line for future load of col3 and col4
+and r8, r25,r12 ; Masks col1 channels 1 & 3 : 0x00XX00XX
+rlwinm r24,r10,8,24,31 ; Isolates coef4 (44?????? -> 00000044)
+dst r2,r17,2
+rlwinm r25,r25,0,16,23 ; Masks col1 channel 2 : 0x0000XX00
+;andi. r25,r25,0xFF00 ; Masks col1 channel 2 : 0x0000XX00
+mullw r8, r8, r21 ; Applies coef1 on col1 channels 1 & 3
+
+
+; computes final pixel color
+and r10,r26,r12 ; Masks col2 channels 1 & 3 : 0x00XX00XX
+lwz r27,0(r2) ; Loads col3 -> r27
+mullw r10,r10,r22 ; Applies coef2 on col2 channels 1 & 3
+mullw r25,r25,r21 ; Applies coef1 on col1 channel 2
+rlwinm r29,r26,0,16,23 ; Masks col2 channel 2 : 0x0000XX00
+;andi. r29,r26,0xFF00 ; Masks col2 channel 2 : 0x0000XX00
+mullw r29,r29,r22 ; Applies coef2 on col2 channel 2
+lwz r28,4(r2) ; Loads col4 -> r28
+add r8 ,r8 ,r10 ; Adds col1 & col2 channels 1 & 3
+and r10,r27,r12 ; Masks col3 channels 1 & 3 : 0x00XX00XX
+add r25,r25,r29 ; Adds col1 & col2 channel 2
+mullw r10,r10,r23 ; Applies coef3 on col3 channels 1 & 3
+rlwinm r29,r27,0,16,23 ; Masks col3 channel 2 : 0x0000XX00
+;andi. r29,r27,0xFF00 ; Masks col3 channel 2 : 0x0000XX00
+mullw r29,r29,r23 ; Applies coef3 on col3 channel 2
+lwz r2,0(r31) ; px
+add r7 ,r8 ,r10 ; Adds col3 to (col1 + col2) channels 1 & 3
+and r10,r28,r12 ; Masks col4 channels 1 & 3 : 0x00XX00XX
+mullw r10,r10,r24 ; Applies coef4 on col4 channels 1 & 3
+add r25,r25,r29 ; Adds col 3 to (col1 + col2) channel 2
+lwz r8,0(r30) ; px2
+rlwinm r28,r28,0,16,23 ; Masks col4 channel 2 : 0x0000XX00
+;andi. r28,r28,0xFF00 ; Masks col4 channel 2 : 0x0000XX00
+add r7 ,r7 ,r10 ; Adds col4 to (col1 + col2 + col3) channels 1 & 3
+lwz r10,4(r30) ; py2
+mullw r28,r28,r24 ; Applies coef4 on col4 channel 2
+srawi r7, r7, 8 ; (sum of channels 1 & 3) >> 8
+lwz r29,4(r31) ; py
+add r25,r25,r28 ; Adds col 4 to (col1 + col2 + col3) channel 2
+rlwimi r7, r25, 24, 16, 23 ; (((sum of channels 2) >> 8 ) & 0x0000FF00) | ((sum of channels 1 and 3) & 0xFFFF00FF)
+stw r7,0(r6) ; Stores the computed pixel
+bdnz L100 ; Iterate again if needed
+b L300 ;goto end ; If not, returns from the function
+
+
+; if out of range
+L400:
+stw r18,0(r6)
+lwz r8,0(r30) ; px2
+lwz r10,4(r30) ; py2
+lwz r2,0(r31) ; px
+lwz r29,4(r31) ; py
+bdnz L100
+
+
+L300:
+
+; Restore saved registers and return
+lmw r17,-60(r1)
+blr
diff --git a/gst/goom/sound_tester.c b/gst/goom/sound_tester.c
new file mode 100644
index 0000000000..d02f0b51ac
--- /dev/null
+++ b/gst/goom/sound_tester.c
@@ -0,0 +1,161 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#include "sound_tester.h"
+
+#include <stdlib.h>
+#include <string.h>
+
+/* some constants */
+#define BIG_GOOM_DURATION 100
+#define BIG_GOOM_SPEED_LIMIT 0.1f
+
+#define ACCEL_MULT 0.95f
+#define SPEED_MULT 0.99f
+
+
+void
+evaluate_sound (gint16 data[2][512], SoundInfo * info)
+{
+
+ int i;
+ float difaccel;
+ float prevspeed;
+
+ /* find the max */
+ int incvar = 0;
+
+ for (i = 0; i < 512; i += 2) {
+ if (incvar < data[0][i])
+ incvar = data[0][i];
+ }
+
+ if (incvar > info->allTimesMax)
+ info->allTimesMax = incvar;
+
+ /* volume sonore */
+ info->volume = (float) incvar / (float) info->allTimesMax;
+ memcpy (info->samples[0], data[0], 512 * sizeof (short));
+ memcpy (info->samples[1], data[1], 512 * sizeof (short));
+
+ difaccel = info->accelvar;
+ info->accelvar = info->volume; /* accel entre 0 et 1 */
+
+ /* transformations sur la vitesse du son */
+ if (info->speedvar > 1.0f)
+ info->speedvar = 1.0f;
+
+ if (info->speedvar < 0.1f)
+ info->accelvar *= (1.0f - (float) info->speedvar);
+ else if (info->speedvar < 0.3f)
+ info->accelvar *= (0.9f - (float) (info->speedvar - 0.1f) / 2.0f);
+ else
+ info->accelvar *= (0.8f - (float) (info->speedvar - 0.3f) / 4.0f);
+
+ /* adoucissement de l'acceleration */
+ info->accelvar *= ACCEL_MULT;
+ if (info->accelvar < 0)
+ info->accelvar = 0;
+
+ difaccel = info->accelvar - difaccel;
+ if (difaccel < 0)
+ difaccel = -difaccel;
+
+ /* mise a jour de la vitesse */
+ prevspeed = info->speedvar;
+ info->speedvar = (info->speedvar + difaccel * 0.5f) / 2;
+ info->speedvar *= SPEED_MULT;
+ info->speedvar = (info->speedvar + 3.0f * prevspeed) / 4.0f;
+ if (info->speedvar < 0)
+ info->speedvar = 0;
+ if (info->speedvar > 1)
+ info->speedvar = 1;
+
+ /* temps du goom */
+ info->timeSinceLastGoom++;
+ info->timeSinceLastBigGoom++;
+ info->cycle++;
+
+ /* detection des nouveaux gooms */
+ if ((info->speedvar > (float) IVAL (info->biggoom_speed_limit_p) / 100.0f)
+ && (info->accelvar > info->bigGoomLimit)
+ && (info->timeSinceLastBigGoom > BIG_GOOM_DURATION)) {
+ info->timeSinceLastBigGoom = 0;
+ }
+
+ if (info->accelvar > info->goom_limit) {
+ /* TODO: tester && (info->timeSinceLastGoom > 20)) { */
+ info->totalgoom++;
+ info->timeSinceLastGoom = 0;
+ info->goomPower = info->accelvar - info->goom_limit;
+ }
+
+ if (info->accelvar > info->prov_max)
+ info->prov_max = info->accelvar;
+
+ if (info->goom_limit > 1)
+ info->goom_limit = 1;
+
+ /* toute les 2 secondes : v�rifier si le taux de goom est correct
+ * et le modifier sinon.. */
+ if (info->cycle % 64 == 0) {
+ if (info->speedvar < 0.01f)
+ info->goom_limit *= 0.91;
+ if (info->totalgoom > 4) {
+ info->goom_limit += 0.02;
+ }
+ if (info->totalgoom > 7) {
+ info->goom_limit *= 1.03f;
+ info->goom_limit += 0.03;
+ }
+ if (info->totalgoom > 16) {
+ info->goom_limit *= 1.05f;
+ info->goom_limit += 0.04;
+ }
+ if (info->totalgoom == 0) {
+ info->goom_limit = info->prov_max - 0.02;
+ }
+ if ((info->totalgoom == 1) && (info->goom_limit > 0.02))
+ info->goom_limit -= 0.01;
+ info->totalgoom = 0;
+ info->bigGoomLimit =
+ info->goom_limit * (1.0f +
+ (float) IVAL (info->biggoom_factor_p) / 500.0f);
+ info->prov_max = 0;
+ }
+
+ /* mise a jour des parametres pour la GUI */
+ FVAL (info->volume_p) = info->volume;
+ info->volume_p.change_listener (&info->volume_p);
+ FVAL (info->speed_p) = info->speedvar * 4;
+ info->speed_p.change_listener (&info->speed_p);
+ FVAL (info->accel_p) = info->accelvar;
+ info->accel_p.change_listener (&info->accel_p);
+
+ FVAL (info->goom_limit_p) = info->goom_limit;
+ info->goom_limit_p.change_listener (&info->goom_limit_p);
+ FVAL (info->goom_power_p) = info->goomPower;
+ info->goom_power_p.change_listener (&info->goom_power_p);
+ FVAL (info->last_goom_p) = 1.0 - ((float) info->timeSinceLastGoom / 20.0f);
+ info->last_goom_p.change_listener (&info->last_goom_p);
+ FVAL (info->last_biggoom_p) =
+ 1.0 - ((float) info->timeSinceLastBigGoom / 40.0f);
+ info->last_biggoom_p.change_listener (&info->last_biggoom_p);
+
+ /* bigGoomLimit ==goomLimit*9/8+7 ? */
+}
diff --git a/gst/goom/sound_tester.h b/gst/goom/sound_tester.h
new file mode 100644
index 0000000000..26418c5e24
--- /dev/null
+++ b/gst/goom/sound_tester.h
@@ -0,0 +1,29 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifndef _SOUND_TESTER_H
+#define _SOUND_TESTER_H
+
+#include "goom_plugin_info.h"
+#include "goom_config.h"
+
+/* change les donnees du SoundInfo */
+void evaluate_sound(gint16 data[2][512], SoundInfo *sndInfo);
+
+#endif
+
diff --git a/gst/goom/surf3d.c b/gst/goom/surf3d.c
new file mode 100644
index 0000000000..847284b370
--- /dev/null
+++ b/gst/goom/surf3d.c
@@ -0,0 +1,152 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#include "surf3d.h"
+#include "goom_plugin_info.h"
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+
+grid3d *
+grid3d_new (int sizex, int defx, int sizez, int defz, v3d center)
+{
+ int x = defx;
+ int y = defz;
+ grid3d *g = malloc (sizeof (grid3d));
+ surf3d *s = &(g->surf);
+
+ s->nbvertex = x * y;
+ s->vertex = malloc (x * y * sizeof (v3d));
+ s->svertex = malloc (x * y * sizeof (v3d));
+ s->center = center;
+
+ g->defx = defx;
+ g->sizex = sizex;
+ g->defz = defz;
+ g->sizez = sizez;
+ g->mode = 0;
+
+ while (y) {
+ --y;
+ x = defx;
+ while (x) {
+ --x;
+ s->vertex[x + defx * y].x = (float) (x - defx / 2) * sizex / defx;
+ s->vertex[x + defx * y].y = 0;
+ s->vertex[x + defx * y].z = (float) (y - defz / 2) * sizez / defz;
+ }
+ }
+ return g;
+}
+
+void
+grid3d_free (grid3d * g)
+{
+ surf3d *s = &(g->surf);
+
+ free (s->vertex);
+ free (s->svertex);
+
+ free (g);
+}
+
+void
+grid3d_draw (PluginInfo * plug, grid3d * g, int color, int colorlow,
+ int dist, Pixel * buf, Pixel * back, int W, int H)
+{
+
+ int x;
+ v2d v2, v2x;
+
+ v2d *v2_array = malloc (g->surf.nbvertex * sizeof (v2d));
+
+ v3d_to_v2d (g->surf.svertex, g->surf.nbvertex, W, H, dist, v2_array);
+
+ for (x = 0; x < g->defx; x++) {
+ int z;
+
+ v2x = v2_array[x];
+
+ for (z = 1; z < g->defz; z++) {
+ v2 = v2_array[z * g->defx + x];
+ if (((v2.x != -666) || (v2.y != -666))
+ && ((v2x.x != -666) || (v2x.y != -666))) {
+ plug->methods.draw_line (buf, v2x.x, v2x.y, v2.x, v2.y, colorlow, W, H);
+ plug->methods.draw_line (back, v2x.x, v2x.y, v2.x, v2.y, color, W, H);
+ }
+ v2x = v2;
+ }
+ }
+
+ free (v2_array);
+}
+
+void
+surf3d_rotate (surf3d * s, float angle)
+{
+ int i;
+ float cosa;
+ float sina;
+
+ SINCOS (angle, sina, cosa);
+ for (i = 0; i < s->nbvertex; i++) {
+ Y_ROTATE_V3D (s->vertex[i], s->svertex[i], cosa, sina);
+ }
+}
+
+void
+surf3d_translate (surf3d * s)
+{
+ int i;
+
+ for (i = 0; i < s->nbvertex; i++) {
+ TRANSLATE_V3D (s->center, s->svertex[i]);
+ }
+}
+
+void
+grid3d_update (grid3d * g, float angle, float *vals, float dist)
+{
+ int i;
+ float cosa;
+ float sina;
+ surf3d *s = &(g->surf);
+ v3d cam = s->center;
+
+ cam.z += dist;
+
+ SINCOS ((angle / 4.3f), sina, cosa);
+ cam.y += sina * 2.0f;
+ SINCOS (angle, sina, cosa);
+
+ if (g->mode == 0) {
+ if (vals)
+ for (i = 0; i < g->defx; i++)
+ s->vertex[i].y = s->vertex[i].y * 0.2 + vals[i] * 0.8;
+
+ for (i = g->defx; i < s->nbvertex; i++) {
+ s->vertex[i].y *= 0.255f;
+ s->vertex[i].y += (s->vertex[i - g->defx].y * 0.777f);
+ }
+ }
+
+ for (i = 0; i < s->nbvertex; i++) {
+ Y_ROTATE_V3D (s->vertex[i], s->svertex[i], cosa, sina);
+ TRANSLATE_V3D (cam, s->svertex[i]);
+ }
+}
diff --git a/gst/goom/surf3d.h b/gst/goom/surf3d.h
new file mode 100644
index 0000000000..f8a2180d97
--- /dev/null
+++ b/gst/goom/surf3d.h
@@ -0,0 +1,57 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifndef _SURF3D_H
+#define _SURF3D_H
+
+#include "v3d.h"
+#include "goom_graphic.h"
+#include "goom_typedefs.h"
+
+typedef struct {
+ v3d *vertex;
+ v3d *svertex;
+ int nbvertex;
+
+ v3d center;
+} surf3d;
+
+typedef struct {
+ surf3d surf;
+
+ int defx;
+ int sizex;
+ int defz;
+ int sizez;
+ int mode;
+} grid3d;
+
+/* hi-level */
+
+/* works on grid3d */
+grid3d *grid3d_new (int sizex, int defx, int sizez, int defz, v3d center);
+void grid3d_free (grid3d *g);
+void grid3d_update (grid3d *s, float angle, float *vals, float dist);
+
+/* low level */
+void surf3d_draw (surf3d *s, int color, int dist, int *buf, int *back, int W,int H);
+void grid3d_draw (PluginInfo *plug, grid3d *g, int color, int colorlow, int dist, Pixel *buf, Pixel *back, int W,int H);
+void surf3d_rotate (surf3d *s, float angle);
+void surf3d_translate (surf3d *s);
+
+#endif
diff --git a/gst/goom/surf3d.s b/gst/goom/surf3d.s
new file mode 100644
index 0000000000..f8c8c5d440
--- /dev/null
+++ b/gst/goom/surf3d.s
@@ -0,0 +1,484 @@
+ .file "surf3d.c"
+ .version "01.01"
+gcc2_compiled.:
+.text
+ .align 4
+.globl grid3d_new
+ .type grid3d_new,@function
+grid3d_new:
+ pushl %ebp
+ movl %esp,%ebp
+ subl $44,%esp
+ pushl %edi
+ pushl %esi
+ pushl %ebx
+ movl 20(%ebp),%eax
+ movl 12(%ebp),%esi
+ movl %eax,-8(%ebp)
+ addl $-12,%esp
+ pushl $44
+ call malloc
+ movl %esi,%edx
+ imull -8(%ebp),%edx
+ movl %eax,%edi
+ movl %edx,-12(%ebp)
+ leal (%edx,%edx,2),%ebx
+ movl %edx,8(%edi)
+ addl $-12,%esp
+ sall $2,%ebx
+ pushl %ebx
+ call malloc
+ addl $32,%esp
+ movl %eax,(%edi)
+ addl $-12,%esp
+ pushl %ebx
+ call malloc
+ movl %eax,4(%edi)
+ movl 24(%ebp),%eax
+ movl %eax,12(%edi)
+ movl 28(%ebp),%eax
+ movl %eax,16(%edi)
+ movl 32(%ebp),%eax
+ movl %eax,20(%edi)
+ movl 8(%ebp),%eax
+ movl %eax,28(%edi)
+ movl %esi,24(%edi)
+ movl -8(%ebp),%edx
+ movl 16(%ebp),%eax
+ movl %edx,32(%edi)
+ movl %eax,36(%edi)
+ movl $0,40(%edi)
+ testl %edx,%edx
+ je .L480
+ movl %esi,%eax
+ movl %esi,-28(%ebp)
+ shrl $31,%eax
+ addl %eax,%esi
+ movl -8(%ebp),%eax
+ shrl $31,%eax
+ addl -8(%ebp),%eax
+ movl -12(%ebp),%edx
+ sarl $1,%eax
+ movl %edx,-24(%ebp)
+ negl -28(%ebp)
+ movl %esi,-16(%ebp)
+ movl %eax,-20(%ebp)
+ .p2align 4,,7
+.L481:
+ movl -28(%ebp),%eax
+ addl %eax,-24(%ebp)
+ decl -8(%ebp)
+ movl 12(%ebp),%esi
+ testl %esi,%esi
+ je .L479
+ movl -8(%ebp),%eax
+ subl -20(%ebp),%eax
+ movl %eax,-4(%ebp)
+ fildl -4(%ebp)
+ movl %esi,-4(%ebp)
+ movl -24(%ebp),%edx
+ leal (%edx,%esi),%eax
+ movl -16(%ebp),%ebx
+ fildl 16(%ebp)
+ leal (%eax,%eax,2),%eax
+ sarl $1,%ebx
+ leal 0(,%eax,4),%ecx
+ fmulp %st,%st(1)
+ fildl 20(%ebp)
+ fdivrp %st,%st(1)
+ fildl 8(%ebp)
+ fildl -4(%ebp)
+ jmp .L484
+.L487:
+ fxch %st(2)
+ .p2align 4,,7
+.L484:
+ decl %esi
+ movl %esi,%eax
+ movl (%edi),%edx
+ subl %ebx,%eax
+ movl %eax,-4(%ebp)
+ fildl -4(%ebp)
+ addl $-12,%ecx
+ fmul %st(2),%st
+ fdiv %st(1),%st
+ fstps (%edx,%ecx)
+ fxch %st(2)
+ movl (%edi),%eax
+ movl $0,4(%eax,%ecx)
+ movl (%edi),%eax
+ fsts 8(%eax,%ecx)
+ testl %esi,%esi
+ jne .L487
+ fstp %st(0)
+ fstp %st(0)
+ fstp %st(0)
+.L479:
+ cmpl $0,-8(%ebp)
+ jne .L481
+.L480:
+ leal -56(%ebp),%esp
+ popl %ebx
+ movl %edi,%eax
+ popl %esi
+ popl %edi
+ leave
+ ret
+.Lfe1:
+ .size grid3d_new,.Lfe1-grid3d_new
+.section .rodata
+ .align 8
+.LC48:
+ .long 0x0,0x3fe00000
+ .align 4
+.LC49:
+ .long 0x3f19999a
+ .align 4
+.LC50:
+ .long 0x3ee3d70a
+.text
+ .align 4
+.globl grid3d_update
+ .type grid3d_update,@function
+grid3d_update:
+ pushl %ebp
+ movl %esp,%ebp
+ subl $32,%esp
+ pushl %esi
+ pushl %ebx
+ flds 12(%ebp)
+ movl 8(%ebp),%ebx
+ movl 16(%ebp),%ecx
+ fld %st(0)
+#APP
+ fsin
+#NO_APP
+ fstps -4(%ebp)
+ flds -4(%ebp)
+ fxch %st(1)
+#APP
+ fcos
+#NO_APP
+ fstps -4(%ebp)
+ flds -4(%ebp)
+ cmpl $0,40(%ebx)
+ jne .L519
+ testl %ecx,%ecx
+ je .L520
+ xorl %esi,%esi
+ cmpl 24(%ebx),%esi
+ jge .L520
+ fldl .LC48
+ xorl %edx,%edx
+ .p2align 4,,7
+.L524:
+ movl (%ebx),%eax
+ fld %st(0)
+ fld %st(1)
+ fxch %st(1)
+ fmuls 4(%eax,%edx)
+ fxch %st(1)
+ fmuls (%ecx,%esi,4)
+ faddp %st,%st(1)
+ incl %esi
+ fstps 4(%eax,%edx)
+ addl $12,%edx
+ cmpl 24(%ebx),%esi
+ jl .L524
+ fstp %st(0)
+.L520:
+ movl 24(%ebx),%esi
+ cmpl 8(%ebx),%esi
+ jge .L519
+ leal (%esi,%esi,2),%eax
+ flds .LC49
+ flds .LC50
+ leal 0(,%eax,4),%ecx
+ .p2align 4,,7
+.L529:
+ movl (%ebx),%eax
+ flds 4(%eax,%ecx)
+ fmul %st(2),%st
+ fstps 4(%eax,%ecx)
+ movl %esi,%eax
+ subl 24(%ebx),%eax
+ movl (%ebx),%edx
+ leal (%eax,%eax,2),%eax
+ flds 4(%edx,%eax,4)
+ fmul %st(1),%st
+ fadds 4(%edx,%ecx)
+ incl %esi
+ fstps 4(%edx,%ecx)
+ addl $12,%ecx
+ cmpl 8(%ebx),%esi
+ jl .L529
+ fstp %st(0)
+ fstp %st(0)
+.L519:
+ xorl %esi,%esi
+ cmpl 8(%ebx),%esi
+ jge .L536
+ xorl %ecx,%ecx
+ .p2align 4,,7
+.L534:
+ movl (%ebx),%eax
+ flds (%eax,%ecx)
+ flds 8(%eax,%ecx)
+ fmul %st(2),%st
+ fxch %st(1)
+ fmul %st(3),%st
+ fsubp %st,%st(1)
+ movl 4(%ebx),%edx
+ incl %esi
+ fstps (%edx,%ecx)
+ movl (%ebx),%eax
+ flds (%eax,%ecx)
+ flds 8(%eax,%ecx)
+ fxch %st(1)
+ fmul %st(2),%st
+ fxch %st(1)
+ fmul %st(3),%st
+ faddp %st,%st(1)
+ movl 4(%ebx),%edx
+ fstps 8(%edx,%ecx)
+ movl (%ebx),%eax
+ flds 4(%eax,%ecx)
+ movl 4(%ebx),%edx
+ fstps 4(%edx,%ecx)
+ movl 4(%ebx),%eax
+ flds (%eax,%ecx)
+ fadds 12(%ebx)
+ fstps (%eax,%ecx)
+ movl 4(%ebx),%eax
+ flds 4(%eax,%ecx)
+ fadds 16(%ebx)
+ fstps 4(%eax,%ecx)
+ movl 4(%ebx),%eax
+ flds 8(%eax,%ecx)
+ fadds 20(%ebx)
+ fstps 8(%eax,%ecx)
+ addl $12,%ecx
+ cmpl 8(%ebx),%esi
+ jl .L534
+.L536:
+ fstp %st(0)
+ fstp %st(0)
+ popl %ebx
+ popl %esi
+ leave
+ ret
+.Lfe2:
+ .size grid3d_update,.Lfe2-grid3d_update
+.section .rodata
+ .align 4
+.LC51:
+ .long 0x40000000
+ .align 8
+.LC52:
+ .long 0x0,0x42380000
+.text
+ .align 4
+.globl surf3d_draw
+ .type surf3d_draw,@function
+surf3d_draw:
+ pushl %ebp
+ movl %esp,%ebp
+ subl $60,%esp
+ pushl %edi
+ pushl %esi
+ pushl %ebx
+ movl $0,-20(%ebp)
+ movl -20(%ebp),%edx
+ movl 8(%ebp),%eax
+ cmpl 8(%eax),%edx
+ jge .L493
+ fldl .LC52
+ flds .LC51
+ xorl %edi,%edi
+ .p2align 4,,7
+.L495:
+ movl 8(%ebp),%eax
+ movl 4(%eax),%eax
+ movl %eax,-36(%ebp)
+ fcoms 8(%eax,%edi)
+ fnstsw %ax
+ andb $69,%ah
+ cmpb $1,%ah
+ jne .L496
+ fildl 16(%ebp)
+ movl -36(%ebp),%edx
+ fld %st(0)
+ fmuls (%edx,%edi)
+ fdivs 8(%edx,%edi)
+ fld %st(3)
+ faddp %st,%st(1)
+ fstpl -32(%ebp)
+ movl -32(%ebp),%eax
+ movl -28(%ebp),%edx
+ movl %eax,-40(%ebp)
+ sarl $16,-40(%ebp)
+ movl -36(%ebp),%edx
+ fmuls 4(%edx,%edi)
+ fdivs 8(%edx,%edi)
+ movl -40(%ebp),%ecx
+ fld %st(2)
+ faddp %st,%st(1)
+ fstpl -32(%ebp)
+ movl -32(%ebp),%eax
+ movl -28(%ebp),%edx
+ movl %eax,-44(%ebp)
+ movl 28(%ebp),%eax
+ sarl $1,%eax
+ addl %eax,%ecx
+ movl 32(%ebp),%eax
+ sarl $16,-44(%ebp)
+ sarl $1,%eax
+ movl %ecx,%ebx
+ subl -44(%ebp),%eax
+ movl %eax,%esi
+ cmpl 28(%ebp),%ebx
+ jge .L496
+ testl %ecx,%ecx
+ jl .L496
+ cmpl 32(%ebp),%esi
+ jge .L496
+ testl %eax,%eax
+ jge .L499
+.L496:
+ xorl %esi,%esi
+ xorl %ebx,%ebx
+.L499:
+ movl 20(%ebp),%eax
+ movl %ebx,%edx
+ leal (%eax,%edx,4),%edx
+ movl 28(%ebp),%eax
+ imull %esi,%eax
+ leal (%edx,%eax,4),%eax
+ testl %ebx,%ebx
+ je .L494
+ testl %esi,%esi
+ je .L494
+#APP
+ movd (%eax), %mm0
+ paddusb 12(%ebp), %mm0
+ movd %mm0, (%eax)
+#NO_APP
+.L494:
+ incl -20(%ebp)
+ addl $12,%edi
+ movl -20(%ebp),%eax
+ movl 8(%ebp),%edx
+ cmpl 8(%edx),%eax
+ jl .L495
+ fstp %st(0)
+ fstp %st(0)
+.L493:
+ popl %ebx
+ popl %esi
+ popl %edi
+ leave
+ ret
+.Lfe3:
+ .size surf3d_draw,.Lfe3-surf3d_draw
+ .align 4
+.globl surf3d_rotate
+ .type surf3d_rotate,@function
+surf3d_rotate:
+ pushl %ebp
+ movl %esp,%ebp
+ subl $32,%esp
+ pushl %esi
+ pushl %ebx
+ flds 12(%ebp)
+ movl 8(%ebp),%ebx
+ fld %st(0)
+#APP
+ fsin
+#NO_APP
+ fstps -4(%ebp)
+ flds -4(%ebp)
+ fxch %st(1)
+#APP
+ fcos
+#NO_APP
+ fstps -4(%ebp)
+ xorl %esi,%esi
+ flds -4(%ebp)
+ cmpl 8(%ebx),%esi
+ jge .L537
+ xorl %ecx,%ecx
+ .p2align 4,,7
+.L508:
+ movl (%ebx),%eax
+ flds (%eax,%ecx)
+ flds 8(%eax,%ecx)
+ fmul %st(2),%st
+ fxch %st(1)
+ fmul %st(3),%st
+ fsubp %st,%st(1)
+ movl 4(%ebx),%edx
+ incl %esi
+ fstps (%edx,%ecx)
+ movl (%ebx),%eax
+ flds (%eax,%ecx)
+ flds 8(%eax,%ecx)
+ fxch %st(1)
+ fmul %st(2),%st
+ fxch %st(1)
+ fmul %st(3),%st
+ faddp %st,%st(1)
+ movl 4(%ebx),%edx
+ fstps 8(%edx,%ecx)
+ movl (%ebx),%eax
+ flds 4(%eax,%ecx)
+ movl 4(%ebx),%edx
+ fstps 4(%edx,%ecx)
+ addl $12,%ecx
+ cmpl 8(%ebx),%esi
+ jl .L508
+.L537:
+ fstp %st(0)
+ fstp %st(0)
+ popl %ebx
+ popl %esi
+ leave
+ ret
+.Lfe4:
+ .size surf3d_rotate,.Lfe4-surf3d_rotate
+ .align 4
+.globl surf3d_translate
+ .type surf3d_translate,@function
+surf3d_translate:
+ pushl %ebp
+ movl %esp,%ebp
+ pushl %ebx
+ movl 8(%ebp),%ecx
+ xorl %ebx,%ebx
+ cmpl 8(%ecx),%ebx
+ jge .L512
+ xorl %edx,%edx
+ .p2align 4,,7
+.L514:
+ movl 4(%ecx),%eax
+ flds (%eax,%edx)
+ fadds 12(%ecx)
+ incl %ebx
+ fstps (%eax,%edx)
+ movl 4(%ecx),%eax
+ flds 4(%eax,%edx)
+ fadds 16(%ecx)
+ fstps 4(%eax,%edx)
+ movl 4(%ecx),%eax
+ flds 8(%eax,%edx)
+ fadds 20(%ecx)
+ fstps 8(%eax,%edx)
+ addl $12,%edx
+ cmpl 8(%ecx),%ebx
+ jl .L514
+.L512:
+ popl %ebx
+ leave
+ ret
+.Lfe5:
+ .size surf3d_translate,.Lfe5-surf3d_translate
+ .ident "GCC: (GNU) 2.95.3 19991030 (prerelease)"
diff --git a/gst/goom/tentacle3d.c b/gst/goom/tentacle3d.c
new file mode 100644
index 0000000000..f82ffba380
--- /dev/null
+++ b/gst/goom/tentacle3d.c
@@ -0,0 +1,358 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#include <stdlib.h>
+
+#include "v3d.h"
+#include "surf3d.h"
+#include "goom_tools.h"
+#include "goom_config.h"
+#include "goom_plugin_info.h"
+#include "tentacle3d.h"
+
+#define D 256.0f
+
+#define nbgrid 6
+#define definitionx 15
+#define definitionz 45
+
+typedef struct _TENTACLE_FX_DATA
+{
+ PluginParam enabled_bp;
+ PluginParameters params;
+
+ float cycle;
+ grid3d *grille[nbgrid];
+ float *vals;
+
+#define NB_TENTACLE_COLORS 4
+ int colors[NB_TENTACLE_COLORS];
+
+ int col;
+ int dstcol;
+ float lig;
+ float ligs;
+
+ /* statics from pretty_move */
+ float distt;
+ float distt2;
+ float rot; /* entre 0 et 2 * G_PI */
+ int happens;
+ int rotation;
+ int lock;
+} TentacleFXData;
+
+static void tentacle_new (TentacleFXData * data);
+static void tentacle_update (PluginInfo * goomInfo, Pixel * buf, Pixel * back,
+ int W, int H, short[2][512], float, int drawit, TentacleFXData * data);
+static void tentacle_free (TentacleFXData * data);
+
+/*
+ * VisualFX wrapper for the tentacles
+ */
+
+static void
+tentacle_fx_init (VisualFX * _this, PluginInfo * info)
+{
+
+ TentacleFXData *data = (TentacleFXData *) malloc (sizeof (TentacleFXData));
+
+ secure_b_param (&data->enabled_bp, "Enabled", 1);
+ plugin_parameters (&data->params, "3D Tentacles", 1);
+ data->params.params[0] = &data->enabled_bp;
+
+ data->cycle = 0.0f;
+ data->col =
+ (0x28 << (ROUGE * 8)) | (0x2c << (VERT * 8)) | (0x5f << (BLEU * 8));
+ data->dstcol = 0;
+ data->lig = 1.15f;
+ data->ligs = 0.1f;
+
+ data->distt = 10.0f;
+ data->distt2 = 0.0f;
+ data->rot = 0.0f; /* entre 0 et 2 * G_PI */
+ data->happens = 0;
+
+ data->rotation = 0;
+ data->lock = 0;
+ data->colors[0] =
+ (0x18 << (ROUGE * 8)) | (0x4c << (VERT * 8)) | (0x2f << (BLEU * 8));
+ data->colors[1] =
+ (0x48 << (ROUGE * 8)) | (0x2c << (VERT * 8)) | (0x6f << (BLEU * 8));
+ data->colors[2] =
+ (0x58 << (ROUGE * 8)) | (0x3c << (VERT * 8)) | (0x0f << (BLEU * 8));
+ data->colors[3] =
+ (0x87 << (ROUGE * 8)) | (0x55 << (VERT * 8)) | (0x74 << (BLEU * 8));
+ tentacle_new (data);
+
+ _this->params = &data->params;
+ _this->fx_data = (void *) data;
+}
+
+static void
+tentacle_fx_apply (VisualFX * _this, Pixel * src, Pixel * dest,
+ PluginInfo * goomInfo)
+{
+ TentacleFXData *data = (TentacleFXData *) _this->fx_data;
+
+ if (BVAL (data->enabled_bp)) {
+ tentacle_update (goomInfo, dest, src, goomInfo->screen.width,
+ goomInfo->screen.height, goomInfo->sound.samples,
+ (float) goomInfo->sound.accelvar,
+ goomInfo->curGState->drawTentacle, data);
+ }
+}
+
+static void
+tentacle_fx_free (VisualFX * _this)
+{
+ tentacle_free ((TentacleFXData *) _this->fx_data);
+ free (_this->fx_data);
+}
+
+void
+tentacle_fx_create (VisualFX * fx)
+{
+ fx->init = tentacle_fx_init;
+ fx->apply = tentacle_fx_apply;
+ fx->free = tentacle_fx_free;
+ fx->fx_data = NULL;
+ fx->params = NULL;
+}
+
+/* ----- */
+
+static void
+tentacle_free (TentacleFXData * data)
+{
+ int tmp;
+
+ /* FREE GRID */
+ for (tmp = 0; tmp < nbgrid; tmp++)
+ grid3d_free (data->grille[tmp]);
+ free (data->vals);
+
+ goom_plugin_parameters_free (&data->params);
+}
+
+static void
+tentacle_new (TentacleFXData * data)
+{
+ int tmp;
+
+ v3d center = { 0, -17.0, 0 };
+ data->vals = (float *) malloc ((definitionx + 20) * sizeof (float));
+
+ for (tmp = 0; tmp < nbgrid; tmp++) {
+ int x, z;
+
+ z = 45 + rand () % 30;
+ x = 85 + rand () % 5;
+ center.z = z;
+ data->grille[tmp] =
+ grid3d_new (x, definitionx, z, definitionz + rand () % 10, center);
+ center.y += 8;
+ }
+}
+
+static inline unsigned char
+lighten (unsigned char value, float power)
+{
+ int val = value;
+ float t = (float) val * log10 (power) / 2.0;
+
+ if (t > 0) {
+ val = (int) t; /* (32.0f * log (t)); */
+ if (val > 255)
+ val = 255;
+ if (val < 0)
+ val = 0;
+ return val;
+ } else {
+ return 0;
+ }
+}
+
+static void
+lightencolor (int *col, float power)
+{
+ unsigned char *color;
+
+ color = (unsigned char *) col;
+ *color = lighten (*color, power);
+ color++;
+ *color = lighten (*color, power);
+ color++;
+ *color = lighten (*color, power);
+ color++;
+ *color = lighten (*color, power);
+}
+
+/* retourne x>>s , en testant le signe de x */
+#define ShiftRight(_x,_s) ((_x<0) ? -(-_x>>_s) : (_x>>_s))
+
+static int
+evolutecolor (unsigned int src, unsigned int dest,
+ unsigned int mask, unsigned int incr)
+{
+
+ int color = src & (~mask);
+
+ src &= mask;
+ dest &= mask;
+
+ if ((src != mask)
+ && (src < dest))
+ src += incr;
+
+ if (src > dest)
+ src -= incr;
+ return (src & mask) | color;
+}
+
+static void
+pretty_move (PluginInfo * goomInfo, float cycle, float *dist, float *dist2,
+ float *rotangle, TentacleFXData * fx_data)
+{
+
+ float tmp;
+
+ /* many magic numbers here... I don't really like that. */
+ if (fx_data->happens)
+ fx_data->happens -= 1;
+ else if (fx_data->lock == 0) {
+ fx_data->happens =
+ goom_irand (goomInfo->gRandom,
+ 200) ? 0 : 100 + goom_irand (goomInfo->gRandom, 60);
+ fx_data->lock = fx_data->happens * 3 / 2;
+ } else
+ fx_data->lock--;
+
+ tmp = fx_data->happens ? 8.0f : 0;
+ *dist2 = fx_data->distt2 = (tmp + 15.0f * fx_data->distt2) / 16.0f;
+
+ tmp = 30 + D - 90.0f * (1.0f + sin (cycle * 19 / 20));
+ if (fx_data->happens)
+ tmp *= 0.6f;
+
+ *dist = fx_data->distt = (tmp + 3.0f * fx_data->distt) / 4.0f;
+
+ if (!fx_data->happens) {
+ tmp = G_PI * sin (cycle) / 32 + 3 * G_PI / 2;
+ } else {
+ fx_data->rotation =
+ goom_irand (goomInfo->gRandom,
+ 500) ? fx_data->rotation : goom_irand (goomInfo->gRandom, 2);
+ if (fx_data->rotation)
+ cycle *= 2.0f * G_PI;
+ else
+ cycle *= -1.0f * G_PI;
+ tmp = cycle - (G_PI * 2.0) * floor (cycle / (G_PI * 2.0));
+ }
+
+ if (fabs (tmp - fx_data->rot) > fabs (tmp - (fx_data->rot + 2.0 * G_PI))) {
+ fx_data->rot = (tmp + 15.0f * (fx_data->rot + 2 * G_PI)) / 16.0f;
+ if (fx_data->rot > 2.0 * G_PI)
+ fx_data->rot -= 2.0 * G_PI;
+ *rotangle = fx_data->rot;
+ } else if (fabs (tmp - fx_data->rot) >
+ fabs (tmp - (fx_data->rot - 2.0 * G_PI))) {
+ fx_data->rot = (tmp + 15.0f * (fx_data->rot - 2.0 * G_PI)) / 16.0f;
+ if (fx_data->rot < 0.0f)
+ fx_data->rot += 2.0 * G_PI;
+ *rotangle = fx_data->rot;
+ } else
+ *rotangle = fx_data->rot = (tmp + 15.0f * fx_data->rot) / 16.0f;
+}
+
+static void
+tentacle_update (PluginInfo * goomInfo, Pixel * buf, Pixel * back, int W, int H,
+ short data[2][512], float rapport, int drawit, TentacleFXData * fx_data)
+{
+
+ int tmp;
+ int tmp2;
+
+ int color;
+ int colorlow;
+
+ float dist, dist2, rotangle;
+
+ if ((!drawit) && (fx_data->ligs > 0.0f))
+ fx_data->ligs = -fx_data->ligs;
+
+ fx_data->lig += fx_data->ligs;
+
+ if (fx_data->lig > 1.01f) {
+ if ((fx_data->lig > 10.0f) | (fx_data->lig < 1.1f))
+ fx_data->ligs = -fx_data->ligs;
+
+ if ((fx_data->lig < 6.3f) && (goom_irand (goomInfo->gRandom, 30) == 0))
+ fx_data->dstcol = goom_irand (goomInfo->gRandom, NB_TENTACLE_COLORS);
+
+ fx_data->col =
+ evolutecolor (fx_data->col, fx_data->colors[fx_data->dstcol], 0xff,
+ 0x01);
+ fx_data->col =
+ evolutecolor (fx_data->col, fx_data->colors[fx_data->dstcol], 0xff00,
+ 0x0100);
+ fx_data->col =
+ evolutecolor (fx_data->col, fx_data->colors[fx_data->dstcol], 0xff0000,
+ 0x010000);
+ fx_data->col =
+ evolutecolor (fx_data->col, fx_data->colors[fx_data->dstcol],
+ 0xff000000, 0x01000000);
+
+ color = fx_data->col;
+ colorlow = fx_data->col;
+
+ lightencolor (&color, fx_data->lig * 2.0f + 2.0f);
+ lightencolor (&colorlow, (fx_data->lig / 3.0f) + 0.67f);
+
+ rapport = 1.0f + 2.0f * (rapport - 1.0f);
+ rapport *= 1.2f;
+ if (rapport > 1.12f)
+ rapport = 1.12f;
+
+ pretty_move (goomInfo, fx_data->cycle, &dist, &dist2, &rotangle, fx_data);
+
+ for (tmp = 0; tmp < nbgrid; tmp++) {
+ for (tmp2 = 0; tmp2 < definitionx; tmp2++) {
+ float val =
+ (float) (ShiftRight (data[0][goom_irand (goomInfo->gRandom, 511)],
+ 10)) * rapport;
+
+ fx_data->vals[tmp2] = val;
+ }
+
+ grid3d_update (fx_data->grille[tmp], rotangle, fx_data->vals, dist2);
+ }
+ fx_data->cycle += 0.01f;
+ for (tmp = 0; tmp < nbgrid; tmp++)
+ grid3d_draw (goomInfo, fx_data->grille[tmp], color, colorlow, dist, buf,
+ back, W, H);
+ } else {
+ fx_data->lig = 1.05f;
+ if (fx_data->ligs < 0.0f)
+ fx_data->ligs = -fx_data->ligs;
+ pretty_move (goomInfo, fx_data->cycle, &dist, &dist2, &rotangle, fx_data);
+ fx_data->cycle += 0.1f;
+ if (fx_data->cycle > 1000)
+ fx_data->cycle = 0;
+ }
+}
diff --git a/gst/goom/tentacle3d.h b/gst/goom/tentacle3d.h
new file mode 100644
index 0000000000..9f3ba92342
--- /dev/null
+++ b/gst/goom/tentacle3d.h
@@ -0,0 +1,26 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifndef _TENTACLE3D_H
+#define _TENTACLE3D_H
+
+#include "goom_visual_fx.h"
+
+void tentacle_fx_create(VisualFX *fx);
+
+#endif
diff --git a/gst/goom/v3d.c b/gst/goom/v3d.c
new file mode 100644
index 0000000000..7df4f1388b
--- /dev/null
+++ b/gst/goom/v3d.c
@@ -0,0 +1,38 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#include "v3d.h"
+
+void
+v3d_to_v2d (v3d * v3, int nbvertex, int width, int height, float distance,
+ v2d * v2)
+{
+ int i;
+
+ for (i = 0; i < nbvertex; ++i) {
+ if (v3[i].z > 2) {
+ int Xp, Yp;
+
+ F2I ((distance * v3[i].x / v3[i].z), Xp);
+ F2I ((distance * v3[i].y / v3[i].z), Yp);
+ v2[i].x = Xp + (width >> 1);
+ v2[i].y = -Yp + (height >> 1);
+ } else
+ v2[i].x = v2[i].y = -666;
+ }
+}
diff --git a/gst/goom/v3d.h b/gst/goom/v3d.h
new file mode 100644
index 0000000000..fd5f939236
--- /dev/null
+++ b/gst/goom/v3d.h
@@ -0,0 +1,83 @@
+/* Goom Project
+ * Copyright (C) <2003> iOS-Software
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifndef _V3D_H
+#define _V3D_H
+
+#include <math.h>
+#include <stdlib.h>
+#include <stdio.h>
+
+#include "mathtools.h"
+
+typedef struct {
+ float x,y,z;
+} v3d;
+
+typedef struct {
+ int x,y;
+} v2d;
+
+typedef struct {
+ double x,y;
+} v2g;
+
+/*
+ * projete le vertex 3D sur le plan d'affichage
+ * retourne (0,0) si le point ne doit pas etre affiche.
+ *
+ * bonne valeur pour distance : 256
+ */
+#define V3D_TO_V2D(v3,v2,width,height,distance) \
+{ \
+ int Xp, Yp; \
+ if (v3.z > 2) { \
+ F2I((distance * v3.x / v3.z),Xp) ; \
+ F2I((distance * v3.y / v3.z),Yp) ; \
+ v2.x = Xp + (width>>1); \
+ v2.y = -Yp + (height>>1); \
+ } \
+ else v2.x=v2.y=-666; \
+}
+
+void v3d_to_v2d(v3d *src, int nbvertex, int width, int height, float distance, v2d *v2_array);
+
+/*
+ * rotation selon Y du v3d vi d'angle a (cosa=cos(a), sina=sin(a))
+ * centerz = centre de rotation en z
+ */
+#define Y_ROTATE_V3D(vi,vf,sina,cosa)\
+{\
+ vf.x = vi.x * cosa - vi.z * sina;\
+ vf.z = vi.x * sina + vi.z * cosa;\
+ vf.y = vi.y;\
+}
+
+/*
+ * translation
+ */
+#define TRANSLATE_V3D(vsrc,vdest)\
+{\
+ vdest.x += vsrc.x;\
+ vdest.y += vsrc.y;\
+ vdest.z += vsrc.z;\
+}
+
+#define MUL_V3D(lf,v) {v.x*=lf;v.y*=lf;v.z*=lf;}
+
+#endif
diff --git a/gst/goom/xmmx.c b/gst/goom/xmmx.c
new file mode 100644
index 0000000000..ea87cf980e
--- /dev/null
+++ b/gst/goom/xmmx.c
@@ -0,0 +1,402 @@
+/* xmmx.c
+
+ eXtended MultiMedia eXtensions GCC interface library for IA32.
+
+ To use this library, simply include this header file
+ and compile with GCC. You MUST have inlining enabled
+ in order for xmmx_ok() to work; this can be done by
+ simply using -O on the GCC command line.
+
+ Compiling with -DXMMX_TRACE will cause detailed trace
+ output to be sent to stderr for each mmx operation.
+ This adds lots of code, and obviously slows execution to
+ a crawl, but can be very useful for debugging.
+
+ THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY
+ EXPRESS OR IMPLIED WARRANTIES, INCLUDING, WITHOUT
+ LIMITATION, THE IMPLIED WARRANTIES OF MERCHANTABILITY
+ AND FITNESS FOR ANY PARTICULAR PURPOSE.
+
+ 1999 by R. Fisher
+ Based on libmmx, 1997-99 by H. Dietz and R. Fisher
+
+ Notes:
+ It appears that the latest gas has the pand problem fixed, therefore
+ I'll undefine BROKEN_PAND by default.
+*/
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "goom_config.h"
+
+#ifdef HAVE_MMX
+
+/* a definir pour avoir exactement le meme resultat que la fonction C
+ * (un chouillat plus lent).. mais la difference est assez peu notable.
+ */
+// #define STRICT_COMPAT
+
+#define BUFFPOINTNB 16
+#define BUFFPOINTMASK 0xffff
+#define BUFFINCR 0xff
+
+#define sqrtperte 16
+/* faire : a % sqrtperte <=> a & pertemask*/
+#define PERTEMASK 0xf
+/* faire : a / sqrtperte <=> a >> PERTEDEC*/
+#define PERTEDEC 4
+
+
+/*#define MMX_TRACE*/
+#include "mmx.h"
+/*#include "xmmx.h"*/
+#include "goom_graphic.h"
+
+int
+xmmx_supported (void)
+{
+ return (mm_support () & 0x8) >> 3;
+}
+
+void
+zoom_filter_xmmx (int prevX, int prevY,
+ Pixel * expix1, Pixel * expix2,
+ int *lbruS, int *lbruD, int buffratio, int precalCoef[16][16])
+{
+ int bufsize = prevX * prevY; /* taille du buffer */
+ volatile int loop; /* variable de boucle */
+
+ mmx_t *brutS = (mmx_t *) lbruS; /* buffer de transformation source */
+ mmx_t *brutD = (mmx_t *) lbruD; /* buffer de transformation dest */
+
+ volatile mmx_t prevXY;
+ volatile mmx_t ratiox;
+
+ /* volatile mmx_t interpix; */
+
+ expix1[0].val = expix1[prevX - 1].val = expix1[prevX * prevY - 1].val =
+ expix1[prevX * prevY - prevX].val = 0;
+
+ prevXY.ud[0] = (prevX - 1) << PERTEDEC;
+ prevXY.ud[1] = (prevY - 1) << PERTEDEC;
+
+ ratiox.d[0] = buffratio;
+ ratiox.d[1] = buffratio;
+
+ asm volatile ("\n\t movq %[ratio], %%mm6" "\n\t pslld $16, %%mm6" /* mm6 = [rat16=buffratio<<16 | rat16=buffratio<<16] */
+ "\n\t pxor %%mm7, %%mm7" /* mm7 = 0 */
+ ::[ratio] "m" (ratiox));
+
+ loop = 0;
+
+ /*
+ * NOTE : mm6 et mm7 ne sont pas modifies dans la boucle.
+ */
+ while (loop < bufsize) {
+ /* Thread #1
+ * pre : mm6 = [rat16|rat16]
+ * post : mm0 = S + ((D-S)*rat16 format [X|Y]
+ * modified = mm0,mm1,mm2
+ */
+
+ asm volatile ("#1 \n\t movq 0(%[brutS]), %%mm0" "#1 \n\t movq 0(%[brutD]), %%mm1" "#1 \n\t psubd %%mm0, %%mm1" /* mm1 = D - S */
+ "#1 \n\t movq %%mm1, %%mm2" /* mm2 = D - S */
+ "#1 \n\t pslld $16, %%mm1" "#1 \n\t pmullw %%mm6, %%mm2" "#1 \n\t pmulhuw %%mm6, %%mm1" "#1 \n\t pslld $16, %%mm0" "#1 \n\t paddd %%mm2, %%mm1" /* mm1 = (D - S) * buffratio >> 16 */
+ "#1 \n\t paddd %%mm1, %%mm0" /* mm0 = S + mm1 */
+ "#1 \n\t psrld $16, %%mm0"::[brutS] "r" (&brutS[loop]),
+ [brutD] "r" (&brutD[loop])
+ ); /* mm0 = S */
+
+ /*
+ * pre : mm0 : position vector on screen
+ * prevXY : coordinate of the lower-right point on screen
+ * post : clipped mm0
+ * modified : mm0,mm1,mm2
+ */
+ asm volatile
+ ("#1 \n\t movq %[prevXY], %%mm1" "#1 \n\t pcmpgtd %%mm0, %%mm1"
+ /* mm0 en X contient (idem pour Y) :
+ * 1111 si prevXY > px
+ * 0000 si prevXY <= px */
+#ifdef STRICT_COMPAT
+ "#1 \n\t movq %%mm1, %%mm2"
+ "#1 \n\t punpckhdq %%mm2, %%mm2"
+ "#1 \n\t punpckldq %%mm1, %%mm1" "#1 \n\t pand %%mm2, %%mm0"
+#endif
+ "#1 \n\t pand %%mm1, %%mm0" /* on met a zero la partie qui deborde */
+ ::[prevXY] "m" (prevXY));
+
+ /* Thread #2
+ * pre : mm0 : clipped position on screen
+ *
+ * post : mm3 : coefs for this position
+ * mm1 : X vector [0|X]
+ *
+ * modif : eax,esi
+ */
+ __asm__ __volatile__ ("#2 \n\t movd %%mm0,%%esi"
+ "#2 \n\t movq %%mm0,%%mm1"
+ "#2 \n\t andl $15,%%esi"
+ "#2 \n\t psrlq $32,%%mm1"
+ "#2 \n\t shll $6,%%esi"
+ "#2 \n\t movd %%mm1,%%eax"
+ "#2 \n\t addl %[precalCoef],%%esi"
+ "#2 \n\t andl $15,%%eax"
+ "#2 \n\t movd (%%esi,%%eax,4),%%mm3"::[precalCoef]
+ "g" (precalCoef):"eax", "esi");
+
+ /*
+ * extraction des coefficients... (Thread #3)
+ *
+ * pre : coef dans mm3
+ *
+ * post : coef extraits dans mm3 (c1 & c2)
+ * et mm4 (c3 & c4)
+ *
+ * modif : mm5
+ */
+
+ /* (Thread #4)
+ * pre : mm0 : Y pos [*|Y]
+ * mm1 : X pos [*|X]
+ *
+ * post : mm0 : expix1[position]
+ * mm2 : expix1[position+largeur]
+ *
+ * modif : eax, esi
+ */
+ __asm__ __volatile__ ("#2 \n\t psrld $4, %%mm0" "#2 \n\t psrld $4, %%mm1" /* PERTEDEC = $4 */
+ "#4 \n\t movd %%mm1,%%eax"
+ "#3 \n\t movq %%mm3,%%mm5"
+ "#4 \n\t mull %[prevX]"
+ "#4 \n\t movd %%mm0,%%esi"
+ "#3 \n\t punpcklbw %%mm5, %%mm3"
+ "#4 \n\t addl %%esi, %%eax"
+ "#3 \n\t movq %%mm3, %%mm4"
+ "#3 \n\t movq %%mm3, %%mm5"
+ "#4 \n\t movl %[expix1], %%esi"
+ "#3 \n\t punpcklbw %%mm5, %%mm3"
+ "#4 \n\t movq (%%esi,%%eax,4),%%mm0"
+ "#3 \n\t punpckhbw %%mm5, %%mm4"
+ "#4 \n\t addl %[prevX],%%eax"
+ "#4 \n\t movq (%%esi,%%eax,4),%%mm2"::[expix1] "g" (expix1)
+ ,[prevX] "g" (prevX)
+ :"eax", "esi", "edx");
+
+ /*
+ * pre : mm0 : expix1[position]
+ * mm2 : expix1[position+largeur]
+ * mm3 & mm4 : coefs
+ */
+
+ /* recopie des deux premiers pixels dans mm0 et mm1 */
+ movq_r2r (mm0, mm1); /* b1-v1-r1-a1-b2-v2-r2-a2 */
+
+ /* depackage du premier pixel */
+ punpcklbw_r2r (mm7, mm0); /* 00-b2-00-v2-00-r2-00-a2 */
+
+ /* extraction des coefficients... */
+
+ movq_r2r (mm3, mm5); /* c2-c2-c2-c2-c1-c1-c1-c1 */
+
+ /*^en parrallele^ *//* depackage du 2ieme pixel */
+ /*^ */ punpckhbw_r2r (mm7, mm1);
+ /* 00-b1-00-v1-00-r1-00-a1 */
+
+ punpcklbw_r2r (mm7, mm5); /* 00-c1-00-c1-00-c1-00-c1 */
+ punpckhbw_r2r (mm7, mm3); /* 00-c2-00-c2-00-c2-00-c2 */
+
+ /* multiplication des pixels par les coefficients */
+ pmullw_r2r (mm5, mm0); /* c1*b2-c1*v2-c1*r2-c1*a2 */
+ pmullw_r2r (mm3, mm1); /* c2*b1-c2*v1-c2*r1-c2*a1 */
+ paddw_r2r (mm1, mm0);
+
+ /* ...extraction des 2 derniers coefficients */
+ movq_r2r (mm4, mm5); /* c4-c4-c4-c4-c3-c3-c3-c3 */
+ punpcklbw_r2r (mm7, mm4); /* 00-c3-00-c3-00-c3-00-c3 */
+ punpckhbw_r2r (mm7, mm5); /* 00-c4-00-c4-00-c4-00-c4 */
+
+ /* recuperation des 2 derniers pixels */
+ movq_r2r (mm2, mm1);
+
+ /* depackage des pixels */
+ punpcklbw_r2r (mm7, mm1);
+ punpckhbw_r2r (mm7, mm2);
+
+ /* multiplication pas les coeffs */
+ pmullw_r2r (mm4, mm1);
+ pmullw_r2r (mm5, mm2);
+
+ /* ajout des valeurs obtenues � la valeur finale */
+ paddw_r2r (mm1, mm0);
+ paddw_r2r (mm2, mm0);
+
+ /* division par 256 = 16+16+16+16, puis repackage du pixel final */
+ psrlw_i2r (8, mm0);
+ packuswb_r2r (mm7, mm0);
+
+ movd_r2m (mm0, expix2[loop]);
+
+ ++loop;
+ }
+ /* this was femms, which is AMD 3dnow */
+ __asm__ __volatile__ ("emms\n");
+}
+
+#define DRAWMETHOD_PLUS_XMMX(_out,_backbuf,_col) \
+{ \
+ movd_m2r(_backbuf, mm0); \
+ paddusb_m2r(_col, mm0); \
+ movd_r2m(mm0, _out); \
+}
+
+#define DRAWMETHOD DRAWMETHOD_PLUS_XMMX(*p,*p,col)
+
+void
+draw_line_xmmx (Pixel * data, int x1, int y1, int x2, int y2, int col,
+ int screenx, int screeny)
+{
+ int x, y, dx, dy, yy, xx;
+ Pixel *p;
+
+ if ((y1 < 0) || (y2 < 0) || (x1 < 0) || (x2 < 0) || (y1 >= screeny)
+ || (y2 >= screeny) || (x1 >= screenx) || (x2 >= screenx))
+ goto end_of_line;
+
+ dx = x2 - x1;
+ dy = y2 - y1;
+ if (x1 >= x2) {
+ int tmp;
+
+ tmp = x1;
+ x1 = x2;
+ x2 = tmp;
+ tmp = y1;
+ y1 = y2;
+ y2 = tmp;
+ dx = x2 - x1;
+ dy = y2 - y1;
+ }
+
+ /* vertical line */
+ if (dx == 0) {
+ if (y1 < y2) {
+ p = &(data[(screenx * y1) + x1]);
+ for (y = y1; y <= y2; y++) {
+ DRAWMETHOD;
+ p += screenx;
+ }
+ } else {
+ p = &(data[(screenx * y2) + x1]);
+ for (y = y2; y <= y1; y++) {
+ DRAWMETHOD;
+ p += screenx;
+ }
+ }
+ goto end_of_line;
+ }
+ /* horizontal line */
+ if (dy == 0) {
+ if (x1 < x2) {
+ p = &(data[(screenx * y1) + x1]);
+ for (x = x1; x <= x2; x++) {
+ DRAWMETHOD;
+ p++;
+ }
+ goto end_of_line;
+ } else {
+ p = &(data[(screenx * y1) + x2]);
+ for (x = x2; x <= x1; x++) {
+ DRAWMETHOD;
+ p++;
+ }
+ goto end_of_line;
+ }
+ }
+ /* 1 */
+ /* \ */
+ /* \ */
+ /* 2 */
+ if (y2 > y1) {
+ /* steep */
+ if (dy > dx) {
+ dx = ((dx << 16) / dy);
+ x = x1 << 16;
+ for (y = y1; y <= y2; y++) {
+ xx = x >> 16;
+ p = &(data[(screenx * y) + xx]);
+ DRAWMETHOD;
+ if (xx < (screenx - 1)) {
+ p++;
+ /* DRAWMETHOD; */
+ }
+ x += dx;
+ }
+ goto end_of_line;
+ }
+ /* shallow */
+ else {
+ dy = ((dy << 16) / dx);
+ y = y1 << 16;
+ for (x = x1; x <= x2; x++) {
+ yy = y >> 16;
+ p = &(data[(screenx * yy) + x]);
+ DRAWMETHOD;
+ if (yy < (screeny - 1)) {
+ p += screeny;
+ /* DRAWMETHOD; */
+ }
+ y += dy;
+ }
+ }
+ }
+ /* 2 */
+ /* / */
+ /* / */
+ /* 1 */
+ else {
+ /* steep */
+ if (-dy > dx) {
+ dx = ((dx << 16) / -dy);
+ x = (x1 + 1) << 16;
+ for (y = y1; y >= y2; y--) {
+ xx = x >> 16;
+ p = &(data[(screenx * y) + xx]);
+ DRAWMETHOD;
+ if (xx < (screenx - 1)) {
+ p--;
+ /* DRAWMETHOD; */
+ }
+ x += dx;
+ }
+ goto end_of_line;
+ }
+ /* shallow */
+ else {
+ dy = ((dy << 16) / dx);
+ y = y1 << 16;
+ for (x = x1; x <= x2; x++) {
+ yy = y >> 16;
+ p = &(data[(screenx * yy) + x]);
+ DRAWMETHOD;
+ if (yy < (screeny - 1)) {
+ p += screeny;
+ /* DRAWMETHOD; */
+ }
+ y += dy;
+ }
+ goto end_of_line;
+ }
+ }
+end_of_line:
+ /* this was femms, which is AMD 3dnow */
+ __asm__ __volatile__ ("emms\n");
+}
+#else
+int
+xmmx_supported (void)
+{
+ return (0);
+}
+#endif
diff --git a/gst/goom/xmmx.h b/gst/goom/xmmx.h
new file mode 100644
index 0000000000..70ef36143e
--- /dev/null
+++ b/gst/goom/xmmx.h
@@ -0,0 +1,537 @@
+/* xmmx.h
+
+ eXtended MultiMedia eXtensions GCC interface library for IA32.
+
+ To use this library, simply include this header file
+ and compile with GCC. You MUST have inlining enabled
+ in order for xmmx_ok() to work; this can be done by
+ simply using -O on the GCC command line.
+
+ Compiling with -DXMMX_TRACE will cause detailed trace
+ output to be sent to stderr for each mmx operation.
+ This adds lots of code, and obviously slows execution to
+ a crawl, but can be very useful for debugging.
+
+ THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY
+ EXPRESS OR IMPLIED WARRANTIES, INCLUDING, WITHOUT
+ LIMITATION, THE IMPLIED WARRANTIES OF MERCHANTABILITY
+ AND FITNESS FOR ANY PARTICULAR PURPOSE.
+
+ 1999 by R. Fisher
+ Based on libmmx, 1997-99 by H. Dietz and R. Fisher
+
+ Notes:
+ It appears that the latest gas has the pand problem fixed, therefore
+ I'll undefine BROKEN_PAND by default.
+*/
+
+#ifndef _XMMX_H
+#define _XMMX_H
+
+
+/* Warning: at this writing, the version of GAS packaged
+ with most Linux distributions does not handle the
+ parallel AND operation mnemonic correctly. If the
+ symbol BROKEN_PAND is defined, a slower alternative
+ coding will be used. If execution of mmxtest results
+ in an illegal instruction fault, define this symbol.
+*/
+#undef BROKEN_PAND
+
+
+/* The type of an value that fits in an (Extended) MMX register
+ (note that long long constant values MUST be suffixed
+ by LL and unsigned long long values by ULL, lest
+ they be truncated by the compiler)
+*/
+#ifndef _MMX_H
+typedef union {
+ long long q; /* Quadword (64-bit) value */
+ unsigned long long uq; /* Unsigned Quadword */
+ int d[2]; /* 2 Doubleword (32-bit) values */
+ unsigned int ud[2]; /* 2 Unsigned Doubleword */
+ short w[4]; /* 4 Word (16-bit) values */
+ unsigned short uw[4]; /* 4 Unsigned Word */
+ char b[8]; /* 8 Byte (8-bit) values */
+ unsigned char ub[8]; /* 8 Unsigned Byte */
+ float s[2]; /* Single-precision (32-bit) value */
+} __attribute__ ((aligned (8))) mmx_t; /* On an 8-byte (64-bit) boundary */
+#endif
+
+
+
+/* Function to test if multimedia instructions are supported...
+*/
+static int
+mm_support(void)
+{
+ /* Returns 1 if MMX instructions are supported,
+ 3 if Cyrix MMX and Extended MMX instructions are supported
+ 5 if AMD MMX and 3DNow! instructions are supported
+ 0 if hardware does not support any of these
+ */
+ register int rval = 0;
+
+ __asm__ __volatile__ (
+ /* See if CPUID instruction is supported ... */
+ /* ... Get copies of EFLAGS into eax and ecx */
+ "pushf\n\t"
+ "popl %%eax\n\t"
+ "movl %%eax, %%ecx\n\t"
+
+ /* ... Toggle the ID bit in one copy and store */
+ /* to the EFLAGS reg */
+ "xorl $0x200000, %%eax\n\t"
+ "push %%eax\n\t"
+ "popf\n\t"
+
+ /* ... Get the (hopefully modified) EFLAGS */
+ "pushf\n\t"
+ "popl %%eax\n\t"
+
+ /* ... Compare and test result */
+ "xorl %%eax, %%ecx\n\t"
+ "testl $0x200000, %%ecx\n\t"
+ "jz NotSupported1\n\t" /* CPUID not supported */
+
+
+ /* Get standard CPUID information, and
+ go to a specific vendor section */
+ "movl $0, %%eax\n\t"
+ "cpuid\n\t"
+
+ /* Check for Intel */
+ "cmpl $0x756e6547, %%ebx\n\t"
+ "jne TryAMD\n\t"
+ "cmpl $0x49656e69, %%edx\n\t"
+ "jne TryAMD\n\t"
+ "cmpl $0x6c65746e, %%ecx\n"
+ "jne TryAMD\n\t"
+ "jmp Intel\n\t"
+
+ /* Check for AMD */
+ "\nTryAMD:\n\t"
+ "cmpl $0x68747541, %%ebx\n\t"
+ "jne TryCyrix\n\t"
+ "cmpl $0x69746e65, %%edx\n\t"
+ "jne TryCyrix\n\t"
+ "cmpl $0x444d4163, %%ecx\n"
+ "jne TryCyrix\n\t"
+ "jmp AMD\n\t"
+
+ /* Check for Cyrix */
+ "\nTryCyrix:\n\t"
+ "cmpl $0x69727943, %%ebx\n\t"
+ "jne NotSupported2\n\t"
+ "cmpl $0x736e4978, %%edx\n\t"
+ "jne NotSupported3\n\t"
+ "cmpl $0x64616574, %%ecx\n\t"
+ "jne NotSupported4\n\t"
+ /* Drop through to Cyrix... */
+
+
+ /* Cyrix Section */
+ /* See if extended CPUID level 80000001 is supported */
+ /* The value of CPUID/80000001 for the 6x86MX is undefined
+ according to the Cyrix CPU Detection Guide (Preliminary
+ Rev. 1.01 table 1), so we'll check the value of eax for
+ CPUID/0 to see if standard CPUID level 2 is supported.
+ According to the table, the only CPU which supports level
+ 2 is also the only one which supports extended CPUID levels.
+ */
+ "cmpl $0x2, %%eax\n\t"
+ "jne MMXtest\n\t" /* Use standard CPUID instead */
+
+ /* Extended CPUID supported (in theory), so get extended
+ features */
+ "movl $0x80000001, %%eax\n\t"
+ "cpuid\n\t"
+ "testl $0x00800000, %%eax\n\t" /* Test for MMX */
+ "jz NotSupported5\n\t" /* MMX not supported */
+ "testl $0x01000000, %%eax\n\t" /* Test for Ext'd MMX */
+ "jnz EMMXSupported\n\t"
+ "movl $1, %0:\n\n\t" /* MMX Supported */
+ "jmp Return\n\n"
+ "EMMXSupported:\n\t"
+ "movl $3, %0:\n\n\t" /* EMMX and MMX Supported */
+ "jmp Return\n\t"
+
+
+ /* AMD Section */
+ "AMD:\n\t"
+
+ /* See if extended CPUID is supported */
+ "movl $0x80000000, %%eax\n\t"
+ "cpuid\n\t"
+ "cmpl $0x80000000, %%eax\n\t"
+ "jl MMXtest\n\t" /* Use standard CPUID instead */
+
+ /* Extended CPUID supported, so get extended features */
+ "movl $0x80000001, %%eax\n\t"
+ "cpuid\n\t"
+ "testl $0x00800000, %%edx\n\t" /* Test for MMX */
+ "jz NotSupported6\n\t" /* MMX not supported */
+ "testl $0x80000000, %%edx\n\t" /* Test for 3DNow! */
+ "jnz ThreeDNowSupported\n\t"
+ "movl $1, %0:\n\n\t" /* MMX Supported */
+ "jmp Return\n\n"
+ "ThreeDNowSupported:\n\t"
+ "movl $5, %0:\n\n\t" /* 3DNow! and MMX Supported */
+ "jmp Return\n\t"
+
+
+ /* Intel Section */
+ "Intel:\n\t"
+
+ /* Check for MMX */
+ "MMXtest:\n\t"
+ "movl $1, %%eax\n\t"
+ "cpuid\n\t"
+ "testl $0x00800000, %%edx\n\t" /* Test for MMX */
+ "jz NotSupported7\n\t" /* MMX Not supported */
+ "movl $1, %0:\n\n\t" /* MMX Supported */
+ "jmp Return\n\t"
+
+ /* Nothing supported */
+ "\nNotSupported1:\n\t"
+ "#movl $101, %0:\n\n\t"
+ "\nNotSupported2:\n\t"
+ "#movl $102, %0:\n\n\t"
+ "\nNotSupported3:\n\t"
+ "#movl $103, %0:\n\n\t"
+ "\nNotSupported4:\n\t"
+ "#movl $104, %0:\n\n\t"
+ "\nNotSupported5:\n\t"
+ "#movl $105, %0:\n\n\t"
+ "\nNotSupported6:\n\t"
+ "#movl $106, %0:\n\n\t"
+ "\nNotSupported7:\n\t"
+ "#movl $107, %0:\n\n\t"
+ "movl $0, %0:\n\n\t"
+
+ "Return:\n\t"
+ : "=a" (rval)
+ : /* no input */
+ : "eax", "ebx", "ecx", "edx"
+ );
+
+ /* Return */
+ return(rval);
+}
+
+/* Function to test if mmx instructions are supported...
+*/
+#ifndef _XMMX_H
+inline extern int
+mmx_ok(void)
+{
+ /* Returns 1 if MMX instructions are supported, 0 otherwise */
+ return ( mm_support() & 0x1 );
+}
+#endif
+
+/* Function to test if xmmx instructions are supported...
+*/
+inline extern int
+xmmx_ok(void)
+{
+ /* Returns 1 if Extended MMX instructions are supported, 0 otherwise */
+ return ( (mm_support() & 0x2) >> 1 );
+}
+
+
+/* Helper functions for the instruction macros that follow...
+ (note that memory-to-register, m2r, instructions are nearly
+ as efficient as register-to-register, r2r, instructions;
+ however, memory-to-memory instructions are really simulated
+ as a convenience, and are only 1/3 as efficient)
+*/
+#ifdef XMMX_TRACE
+
+/* Include the stuff for printing a trace to stderr...
+*/
+
+#include <stdio.h>
+
+#define mmx_i2r(op, imm, reg) \
+ { \
+ mmx_t mmx_trace; \
+ mmx_trace.uq = (imm); \
+ fprintf(stderr, #op "_i2r(" #imm "=0x%08x%08x, ", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ __asm__ __volatile__ ("movq %%" #reg ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #reg "=0x%08x%08x) => ", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ __asm__ __volatile__ (#op " %0, %%" #reg \
+ : /* nothing */ \
+ : "X" (imm)); \
+ __asm__ __volatile__ ("movq %%" #reg ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #reg "=0x%08x%08x\n", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ }
+
+#define mmx_m2r(op, mem, reg) \
+ { \
+ mmx_t mmx_trace; \
+ mmx_trace = (mem); \
+ fprintf(stderr, #op "_m2r(" #mem "=0x%08x%08x, ", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ __asm__ __volatile__ ("movq %%" #reg ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #reg "=0x%08x%08x) => ", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ __asm__ __volatile__ (#op " %0, %%" #reg \
+ : /* nothing */ \
+ : "X" (mem)); \
+ __asm__ __volatile__ ("movq %%" #reg ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #reg "=0x%08x%08x\n", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ }
+
+#define mmx_r2m(op, reg, mem) \
+ { \
+ mmx_t mmx_trace; \
+ __asm__ __volatile__ ("movq %%" #reg ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #op "_r2m(" #reg "=0x%08x%08x, ", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ mmx_trace = (mem); \
+ fprintf(stderr, #mem "=0x%08x%08x) => ", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ __asm__ __volatile__ (#op " %%" #reg ", %0" \
+ : "=X" (mem) \
+ : /* nothing */ ); \
+ mmx_trace = (mem); \
+ fprintf(stderr, #mem "=0x%08x%08x\n", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ }
+
+#define mmx_r2r(op, regs, regd) \
+ { \
+ mmx_t mmx_trace; \
+ __asm__ __volatile__ ("movq %%" #regs ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #op "_r2r(" #regs "=0x%08x%08x, ", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ __asm__ __volatile__ ("movq %%" #regd ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #regd "=0x%08x%08x) => ", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ __asm__ __volatile__ (#op " %" #regs ", %" #regd); \
+ __asm__ __volatile__ ("movq %%" #regd ", %0" \
+ : "=X" (mmx_trace) \
+ : /* nothing */ ); \
+ fprintf(stderr, #regd "=0x%08x%08x\n", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ }
+
+#define mmx_m2m(op, mems, memd) \
+ { \
+ mmx_t mmx_trace; \
+ mmx_trace = (mems); \
+ fprintf(stderr, #op "_m2m(" #mems "=0x%08x%08x, ", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ mmx_trace = (memd); \
+ fprintf(stderr, #memd "=0x%08x%08x) => ", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ __asm__ __volatile__ ("movq %0, %%mm0\n\t" \
+ #op " %1, %%mm0\n\t" \
+ "movq %%mm0, %0" \
+ : "=X" (memd) \
+ : "X" (mems)); \
+ mmx_trace = (memd); \
+ fprintf(stderr, #memd "=0x%08x%08x\n", \
+ mmx_trace.d[1], mmx_trace.d[0]); \
+ }
+
+#else
+
+/* These macros are a lot simpler without the tracing...
+*/
+
+#define mmx_i2r(op, imm, reg) \
+ __asm__ __volatile__ (#op " %0, %%" #reg \
+ : /* nothing */ \
+ : "X" (imm) )
+
+#define mmx_m2r(op, mem, reg) \
+ __asm__ __volatile__ (#op " %0, %%" #reg \
+ : /* nothing */ \
+ : "X" (mem))
+
+#define mmx_m2ir(op, mem, rs) \
+ __asm__ __volatile__ (#op " %0, %%" #rs \
+ : /* nothing */ \
+ : "X" (mem) )
+
+#define mmx_r2m(op, reg, mem) \
+ __asm__ __volatile__ (#op " %%" #reg ", %0" \
+ : "=X" (mem) \
+ : /* nothing */ )
+
+#define mmx_r2r(op, regs, regd) \
+ __asm__ __volatile__ (#op " %" #regs ", %" #regd)
+
+#define mmx_r2ir(op, rs1, rs2) \
+ __asm__ __volatile__ (#op " %%" #rs1 ", %%" #rs2 \
+ : /* nothing */ \
+ : /* nothing */ )
+
+#define mmx_m2m(op, mems, memd) \
+ __asm__ __volatile__ ("movq %0, %%mm0\n\t" \
+ #op " %1, %%mm0\n\t" \
+ "movq %%mm0, %0" \
+ : "=X" (memd) \
+ : "X" (mems))
+
+#endif
+
+
+
+/* 1x64 MOVe Quadword
+ (this is both a load and a store...
+ in fact, it is the only way to store)
+*/
+#define movq_m2r(var, reg) mmx_m2r(movq, var, reg)
+#define movq_r2m(reg, var) mmx_r2m(movq, reg, var)
+#define movq_r2r(regs, regd) mmx_r2r(movq, regs, regd)
+#define movq(vars, vard) \
+ __asm__ __volatile__ ("movq %1, %%mm0\n\t" \
+ "movq %%mm0, %0" \
+ : "=X" (vard) \
+ : "X" (vars))
+
+
+/* 1x32 MOVe Doubleword
+ (like movq, this is both load and store...
+ but is most useful for moving things between
+ mmx registers and ordinary registers)
+*/
+#define movd_m2r(var, reg) mmx_m2r(movd, var, reg)
+#define movd_r2m(reg, var) mmx_r2m(movd, reg, var)
+#define movd_r2r(regs, regd) mmx_r2r(movd, regs, regd)
+#define movd(vars, vard) \
+ __asm__ __volatile__ ("movd %1, %%mm0\n\t" \
+ "movd %%mm0, %0" \
+ : "=X" (vard) \
+ : "X" (vars))
+
+
+
+/* 4x16 Parallel MAGnitude
+*/
+#define pmagw_m2r(var, reg) mmx_m2r(pmagw, var, reg)
+#define pmagw_r2r(regs, regd) mmx_r2r(pmagw, regs, regd)
+#define pmagw(vars, vard) mmx_m2m(pmagw, vars, vard)
+
+
+/* 4x16 Parallel ADDs using Saturation arithmetic
+ and Implied destination
+*/
+#define paddsiw_m2ir(var, rs) mmx_m2ir(paddsiw, var, rs)
+#define paddsiw_r2ir(rs1, rs2) mmx_r2ir(paddsiw, rs1, rs2)
+#define paddsiw(vars, vard) mmx_m2m(paddsiw, vars, vard)
+
+
+/* 4x16 Parallel SUBs using Saturation arithmetic
+ and Implied destination
+*/
+#define psubsiw_m2ir(var, rs) mmx_m2ir(psubsiw, var, rs)
+#define psubsiw_r2ir(rs1, rs2) mmx_r2ir(psubsiw, rs1, rs2)
+#define psubsiw(vars, vard) mmx_m2m(psubsiw, vars, vard)
+
+
+/* 4x16 Parallel MULs giving High 4x16 portions of results
+ Rounded with 1/2 bit 15.
+*/
+#define pmulhrw_m2r(var, reg) mmx_m2r(pmulhrw, var, reg)
+#define pmulhrw_r2r(regs, regd) mmx_r2r(pmulhrw, regs, regd)
+#define pmulhrw(vars, vard) mmx_m2m(pmulhrw, vars, vard)
+
+
+/* 4x16 Parallel MULs giving High 4x16 portions of results
+ Rounded with 1/2 bit 15, storing to Implied register
+*/
+#define pmulhriw_m2ir(var, rs) mmx_m2ir(pmulhriw, var, rs)
+#define pmulhriw_r2ir(rs1, rs2) mmx_r2ir(pmulhriw, rs1, rs2)
+#define pmulhriw(vars, vard) mmx_m2m(pmulhriw, vars, vard)
+
+
+/* 4x16 Parallel Muls (and ACcumulate) giving High 4x16 portions
+ of results Rounded with 1/2 bit 15, accumulating with Implied register
+*/
+#define pmachriw_m2ir(var, rs) mmx_m2ir(pmachriw, var, rs)
+#define pmachriw_r2ir(rs1, rs2) mmx_r2ir(pmachriw, rs1, rs2)
+#define pmachriw(vars, vard) mmx_m2m(pmachriw, vars, vard)
+
+
+/* 8x8u Parallel AVErage
+*/
+#define paveb_m2r(var, reg) mmx_m2r(paveb, var, reg)
+#define paveb_r2r(regs, regd) mmx_r2r(paveb, regs, regd)
+#define paveb(vars, vard) mmx_m2m(paveb, vars, vard)
+
+
+/* 8x8u Parallel DISTance and accumulate with
+ unsigned saturation to Implied register
+*/
+#define pdistib_m2ir(var, rs) mmx_m2ir(pdistib, var, rs)
+#define pdistib(vars, vard) mmx_m2m(pdistib, vars, vard)
+
+
+/* 8x8 Parallel conditional MoVe
+ if implied register field is Zero
+*/
+#define pmvzb_m2ir(var, rs) mmx_m2ir(pmvzb, var, rs)
+
+
+/* 8x8 Parallel conditional MoVe
+ if implied register field is Not Zero
+*/
+#define pmvnzb_m2ir(var, rs) mmx_m2ir(pmvnzb, var, rs)
+
+
+/* 8x8 Parallel conditional MoVe
+ if implied register field is Less than Zero
+*/
+#define pmvlzb_m2ir(var, rs) mmx_m2ir(pmvlzb, var, rs)
+
+
+/* 8x8 Parallel conditional MoVe
+ if implied register field is Greater than or Equal to Zero
+*/
+#define pmvgezb_m2ir(var, rs) mmx_m2ir(pmvgezb, var, rs)
+
+
+/* Fast Empty MMx State
+ (used to clean-up when going from mmx to float use
+ of the registers that are shared by both; note that
+ there is no float-to-xmmx operation needed, because
+ only the float tag word info is corruptible)
+*/
+#ifdef XMMX_TRACE
+
+#define femms() \
+ { \
+ fprintf(stderr, "femms()\n"); \
+ __asm__ __volatile__ ("femms"); \
+ }
+
+#else
+
+#define femms() __asm__ __volatile__ ("femms")
+
+#endif
+
+#endif
+
diff --git a/gst/goom2k1/README b/gst/goom2k1/README
new file mode 100644
index 0000000000..f12cf1b55b
--- /dev/null
+++ b/gst/goom2k1/README
@@ -0,0 +1,5 @@
+The Goom plugin is based on the Goom visualization code from
+the Goom homepage found at:
+http://ios.free.fr/?page=projet&quoi=1
+
+Like the original library so is the Goom plugin available under the LGPL license
diff --git a/gst/goom2k1/filters.c b/gst/goom2k1/filters.c
new file mode 100644
index 0000000000..3d1b92c001
--- /dev/null
+++ b/gst/goom2k1/filters.c
@@ -0,0 +1,531 @@
+/* filter.c version 0.7
+ * contient les filtres applicable a un buffer
+ * creation : 01/10/2000
+ * -ajout de sinFilter()
+ * -ajout de zoomFilter()
+ * -copie de zoomFilter() en zoomFilterRGB(), gérant les 3 couleurs
+ * -optimisation de sinFilter (utilisant une table de sin)
+ * -asm
+ * -optimisation de la procedure de génération du buffer de transformation
+ * la vitesse est maintenant comprise dans [0..128] au lieu de [0..100]
+*/
+
+/*#define _DEBUG_PIXEL; */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "filters.h"
+#include "graphic.h"
+#include "goom_tools.h"
+#include "goom_core.h"
+#include <stdlib.h>
+#include <math.h>
+#include <stdio.h>
+
+#ifdef MMX
+#define USE_ASM
+#endif
+#ifdef POWERPC
+#define USE_ASM
+#endif
+
+#ifdef USE_ASM
+#define EFFECT_DISTORS 4
+#else
+#define EFFECT_DISTORS 10
+#endif
+
+
+#ifdef USE_ASM
+
+#ifdef MMX
+int mmx_zoom ();
+guint32 mmx_zoom_size;
+#endif /* MMX */
+
+#ifdef POWERPC
+extern unsigned int useAltivec;
+extern void ppc_zoom (void);
+extern void ppc_zoom_altivec (void);
+unsigned int ppcsize4;
+#endif /* PowerPC */
+
+
+unsigned int *coeffs = 0, *freecoeffs = 0;
+guint32 *expix1 = 0; /* pointeur exporte vers p1 */
+guint32 *expix2 = 0; /* pointeur exporte vers p2 */
+guint32 zoom_width;
+#endif /* ASM */
+
+
+static int firstTime = 1;
+static int sintable[0xffff];
+
+ZoomFilterData *
+zoomFilterNew (void)
+{
+ ZoomFilterData *zf = malloc (sizeof (ZoomFilterData));
+
+ zf->vitesse = 128;
+ zf->pertedec = 8;
+ zf->sqrtperte = 16;
+ zf->middleX = 1;
+ zf->middleY = 1;
+ zf->reverse = 0;
+ zf->mode = WAVE_MODE;
+ zf->hPlaneEffect = 0;
+ zf->vPlaneEffect = 0;
+ zf->noisify = 0;
+ zf->buffsize = 0;
+ zf->res_x = 0;
+ zf->res_y = 0;
+
+ zf->buffer = NULL;
+ zf->firedec = NULL;
+
+ zf->wave = 0;
+ zf->wavesp = 0;
+
+ return zf;
+}
+
+/* retourne x>>s , en testant le signe de x */
+static inline int
+ShiftRight (int x, const unsigned char s)
+{
+ if (x < 0)
+ return -(-x >> s);
+ else
+ return x >> s;
+}
+
+/*
+ calculer px et py en fonction de x,y,middleX,middleY et theMode
+ px et py indique la nouvelle position (en sqrtperte ieme de pixel)
+ (valeur * 16)
+*/
+static void
+calculatePXandPY (GoomData * gd, int x, int y, int *px, int *py)
+{
+ ZoomFilterData *zf = gd->zfd;
+ int middleX, middleY;
+ guint32 resoly = zf->res_y;
+ int vPlaneEffect = zf->vPlaneEffect;
+ int hPlaneEffect = zf->hPlaneEffect;
+ int vitesse = zf->vitesse;
+ char theMode = zf->mode;
+
+ if (theMode == WATER_MODE) {
+ int wavesp = zf->wavesp;
+ int wave = zf->wave;
+ int yy = y + RAND (gd) % 4 + wave / 10;
+
+ yy -= RAND (gd) % 4;
+ if (yy < 0)
+ yy = 0;
+ if (yy >= resoly)
+ yy = resoly - 1;
+
+ *px = (x << 4) + zf->firedec[yy] + (wave / 10);
+ *py = (y << 4) + 132 - ((vitesse < 132) ? vitesse : 131);
+
+ wavesp += RAND (gd) % 3;
+ wavesp -= RAND (gd) % 3;
+ if (wave < -10)
+ wavesp += 2;
+ if (wave > 10)
+ wavesp -= 2;
+ wave += (wavesp / 10) + RAND (gd) % 3;
+ wave -= RAND (gd) % 3;
+ if (wavesp > 100)
+ wavesp = (wavesp * 9) / 10;
+
+ zf->wavesp = wavesp;
+ zf->wave = wave;
+ } else {
+ int dist;
+ register int vx, vy;
+ int fvitesse = vitesse << 4;
+
+ middleX = zf->middleX;
+ middleY = zf->middleY;
+
+ if (zf->noisify) {
+ x += RAND (gd) % zf->noisify;
+ x -= RAND (gd) % zf->noisify;
+ y += RAND (gd) % zf->noisify;
+ y -= RAND (gd) % zf->noisify;
+ }
+
+ if (hPlaneEffect)
+ vx = ((x - middleX) << 9) + hPlaneEffect * (y - middleY);
+ else
+ vx = (x - middleX) << 9;
+
+ if (vPlaneEffect)
+ vy = ((y - middleY) << 9) + vPlaneEffect * (x - middleX);
+ else
+ vy = (y - middleY) << 9;
+
+ switch (theMode) {
+ case WAVE_MODE:
+ dist =
+ ShiftRight (vx, 9) * ShiftRight (vx, 9) + ShiftRight (vy,
+ 9) * ShiftRight (vy, 9);
+ fvitesse *=
+ 1024 +
+ ShiftRight (sintable[(unsigned short) (0xffff * dist *
+ EFFECT_DISTORS)], 6);
+ fvitesse /= 1024;
+ break;
+ case CRYSTAL_BALL_MODE:
+ dist =
+ ShiftRight (vx, 9) * ShiftRight (vx, 9) + ShiftRight (vy,
+ 9) * ShiftRight (vy, 9);
+ fvitesse += (dist * EFFECT_DISTORS >> 10);
+ break;
+ case AMULETTE_MODE:
+ dist =
+ ShiftRight (vx, 9) * ShiftRight (vx, 9) + ShiftRight (vy,
+ 9) * ShiftRight (vy, 9);
+ fvitesse -= (dist * EFFECT_DISTORS >> 4);
+ break;
+ case SCRUNCH_MODE:
+ dist =
+ ShiftRight (vx, 9) * ShiftRight (vx, 9) + ShiftRight (vy,
+ 9) * ShiftRight (vy, 9);
+ fvitesse -= (dist * EFFECT_DISTORS >> 9);
+ break;
+ }
+ if (vx < 0)
+ *px = (middleX << 4) - (-(vx * fvitesse) >> 16);
+ else
+ *px = (middleX << 4) + ((vx * fvitesse) >> 16);
+ if (vy < 0)
+ *py = (middleY << 4) - (-(vy * fvitesse) >> 16);
+ else
+ *py = (middleY << 4) + ((vy * fvitesse) >> 16);
+ }
+}
+
+/*#define _DEBUG */
+
+static inline void
+setPixelRGB (Uint * buffer, Uint x, Uint y, Color c,
+ guint32 resolx, guint32 resoly)
+{
+/* buffer[ y*WIDTH + x ] = (c.r<<16)|(c.v<<8)|c.b */
+#ifdef _DEBUG_PIXEL
+ if (x + y * resolx >= resolx * resoly) {
+ fprintf (stderr, "setPixel ERROR : hors du tableau... %i, %i\n", x, y);
+ /*exit (1) ; */
+ }
+#endif
+
+#ifdef USE_DGA
+ buffer[y * resolx + x] = (c.b << 16) | (c.v << 8) | c.r;
+#else
+ buffer[y * resolx + x] = (c.r << 16) | (c.v << 8) | c.b;
+#endif
+}
+
+
+static inline void
+setPixelRGB_ (Uint * buffer, Uint x, Color c, guint32 resolx, guint32 resoly)
+{
+#ifdef _DEBUG
+ if (x >= resolx * resoly) {
+ printf ("setPixel ERROR : hors du tableau... %i >= %i*%i (%i)\n", x, resolx,
+ resoly, resolx * resoly);
+ exit (1);
+ }
+#endif
+
+#ifdef USE_DGA
+ buffer[x] = (c.b << 16) | (c.v << 8) | c.r;
+#else
+ buffer[x] = (c.r << 16) | (c.v << 8) | c.b;
+#endif
+}
+
+static inline void
+getPixelRGB_ (Uint * buffer, Uint x, Color * c, guint32 resolx, guint32 resoly)
+{
+ register unsigned char *tmp8;
+
+#ifdef _DEBUG
+ if (x >= resolx * resoly) {
+ printf ("getPixel ERROR : hors du tableau... %i\n", x);
+ exit (1);
+ }
+#endif
+
+#ifdef __BIG_ENDIAN__
+ c->b = *(unsigned char *) (tmp8 = (unsigned char *) (buffer + x));
+ c->r = *(unsigned char *) (++tmp8);
+ c->v = *(unsigned char *) (++tmp8);
+ c->b = *(unsigned char *) (++tmp8);
+
+#else
+ /* ATTENTION AU PETIT INDIEN */
+ tmp8 = (unsigned char *) (buffer + x);
+ c->b = *(unsigned char *) (tmp8++);
+ c->v = *(unsigned char *) (tmp8++);
+ c->r = *(unsigned char *) (tmp8);
+/* *c = (Color) buffer[x+y*WIDTH] ; */
+#endif
+}
+
+static void
+zoomFilterSetResolution (GoomData * gd, ZoomFilterData * zf)
+{
+ unsigned short us;
+
+ if (zf->buffsize >= gd->buffsize) {
+ zf->res_x = gd->resolx;
+ zf->res_y = gd->resoly;
+ zf->middleX = gd->resolx / 2;
+ zf->middleY = gd->resoly - 1;
+
+ return;
+ }
+#ifndef USE_ASM
+ if (zf->buffer)
+ free (zf->buffer);
+ zf->buffer = 0;
+#else
+ if (coeffs)
+ free (freecoeffs);
+ coeffs = 0;
+#endif
+ zf->middleX = gd->resolx / 2;
+ zf->middleY = gd->resoly - 1;
+ zf->res_x = gd->resolx;
+ zf->res_y = gd->resoly;
+
+ if (zf->firedec)
+ free (zf->firedec);
+ zf->firedec = 0;
+
+ zf->buffsize = gd->resolx * gd->resoly * sizeof (unsigned int);
+
+#ifdef USE_ASM
+ freecoeffs = (unsigned int *)
+ malloc (resx * resy * 2 * sizeof (unsigned int) + 128);
+ coeffs = (guint32 *) ((1 + ((unsigned int) (freecoeffs)) / 128) * 128);
+
+#else
+ zf->buffer = calloc (sizeof (guint32), zf->buffsize * 5);
+ zf->pos10 = zf->buffer;
+ zf->c[0] = zf->pos10 + zf->buffsize;
+ zf->c[1] = zf->c[0] + zf->buffsize;
+ zf->c[2] = zf->c[1] + zf->buffsize;
+ zf->c[3] = zf->c[2] + zf->buffsize;
+#endif
+ zf->firedec = (int *) malloc (zf->res_y * sizeof (int));
+
+ if (firstTime) {
+ firstTime = 0;
+
+ /* generation d'une table de sinus */
+ for (us = 0; us < 0xffff; us++) {
+ sintable[us] = (int) (1024.0f * sin (us * 2 * 3.31415f / 0xffff));
+ }
+ }
+}
+
+void
+zoomFilterDestroy (ZoomFilterData * zf)
+{
+ if (zf) {
+ if (zf->firedec)
+ free (zf->firedec);
+ if (zf->buffer)
+ free (zf->buffer);
+ free (zf);
+ }
+}
+
+/*===============================================================*/
+void
+zoomFilterFastRGB (GoomData * goomdata, ZoomFilterData * zf, int zfd_update)
+{
+ guint32 prevX = goomdata->resolx;
+ guint32 prevY = goomdata->resoly;
+
+ guint32 *pix1 = goomdata->p1;
+ guint32 *pix2 = goomdata->p2;
+ unsigned int *pos10;
+ unsigned int **c;
+
+ Uint x, y;
+
+/* static unsigned int prevX = 0, prevY = 0; */
+
+#ifdef USE_ASM
+ expix1 = pix1;
+ expix2 = pix2;
+#else
+ Color couleur;
+ Color col1, col2, col3, col4;
+ Uint position;
+#endif
+
+ if ((goomdata->resolx != zf->res_x) || (goomdata->resoly != zf->res_y)) {
+ zoomFilterSetResolution (goomdata, zf);
+ }
+
+ pos10 = zf->pos10;
+ c = zf->c;
+
+ if (zfd_update) {
+ guchar sqrtperte = zf->sqrtperte;
+ gint start_y = 0;
+
+ if (zf->reverse)
+ zf->vitesse = 256 - zf->vitesse;
+
+ /* generation du buffer */
+ for (y = 0; y < zf->res_y; y++) {
+ gint y_16 = y << 4;
+ gint max_px = (prevX - 1) * sqrtperte;
+ gint max_py = (prevY - 1) * sqrtperte;
+
+ for (x = 0; x < zf->res_x; x++) {
+ gint px, py;
+ guchar coefv, coefh;
+
+ /* calculer px et py en fonction de */
+ /* x,y,middleX,middleY et theMode */
+ calculatePXandPY (goomdata, x, y, &px, &py);
+
+ if ((px == x << 4) && (py == y_16))
+ py += 8;
+
+ if ((py < 0) || (px < 0) || (py >= max_py) || (px >= max_px)) {
+#ifdef USE_ASM
+ coeffs[(y * prevX + x) * 2] = 0;
+ coeffs[(y * prevX + x) * 2 + 1] = 0;
+#else
+ pos10[start_y + x] = 0;
+ c[0][start_y + x] = 0;
+ c[1][start_y + x] = 0;
+ c[2][start_y + x] = 0;
+ c[3][start_y + x] = 0;
+#endif
+ } else {
+ int npx10;
+ int npy10;
+ int pos;
+
+ npx10 = (px / sqrtperte);
+ npy10 = (py / sqrtperte);
+
+/* if (npx10 >= prevX) fprintf(stderr,"error npx:%d",npx10);
+ if (npy10 >= prevY) fprintf(stderr,"error npy:%d",npy10);
+*/
+ coefh = px % sqrtperte;
+ coefv = py % sqrtperte;
+#ifdef USE_ASM
+ pos = (y * prevX + x) * 2;
+ coeffs[pos] = (npx10 + prevX * npy10) * 4;
+
+ if (!(coefh || coefv))
+ coeffs[pos + 1] = (sqrtperte * sqrtperte - 1);
+ else
+ coeffs[pos + 1] = ((sqrtperte - coefh) * (sqrtperte - coefv));
+
+ coeffs[pos + 1] |= (coefh * (sqrtperte - coefv)) << 8;
+ coeffs[pos + 1] |= ((sqrtperte - coefh) * coefv) << 16;
+ coeffs[pos + 1] |= (coefh * coefv) << 24;
+#else
+ pos = start_y + x;
+ pos10[pos] = npx10 + prevX * npy10;
+
+ if (!(coefh || coefv))
+ c[0][pos] = sqrtperte * sqrtperte - 1;
+ else
+ c[0][pos] = (sqrtperte - coefh) * (sqrtperte - coefv);
+
+ c[1][pos] = coefh * (sqrtperte - coefv);
+ c[2][pos] = (sqrtperte - coefh) * coefv;
+ c[3][pos] = coefh * coefv;
+#endif
+ }
+ }
+ /* Advance start of line index */
+ start_y += prevX;
+ }
+ }
+#ifdef USE_ASM
+#ifdef MMX
+ zoom_width = prevX;
+ mmx_zoom_size = prevX * prevY;
+ mmx_zoom ();
+#endif
+
+#ifdef POWERPC
+ zoom_width = prevX;
+ if (useAltivec) {
+ ppcsize4 = ((unsigned int) (prevX * prevY)) / 4;
+ ppc_zoom_altivec ();
+ } else {
+ ppcsize4 = ((unsigned int) (prevX * prevY));
+ ppc_zoom ();
+ }
+#endif
+#else
+ for (position = 0; position < prevX * prevY; position++) {
+ getPixelRGB_ (pix1, pos10[position], &col1, goomdata->resolx,
+ goomdata->resoly);
+ getPixelRGB_ (pix1, pos10[position] + 1, &col2, goomdata->resolx,
+ goomdata->resoly);
+ getPixelRGB_ (pix1, pos10[position] + prevX, &col3, goomdata->resolx,
+ goomdata->resoly);
+ getPixelRGB_ (pix1, pos10[position] + prevX + 1, &col4, goomdata->resolx,
+ goomdata->resoly);
+
+ couleur.r = col1.r * c[0][position]
+ + col2.r * c[1][position]
+ + col3.r * c[2][position]
+ + col4.r * c[3][position];
+ couleur.r >>= zf->pertedec;
+
+ couleur.v = col1.v * c[0][position]
+ + col2.v * c[1][position]
+ + col3.v * c[2][position]
+ + col4.v * c[3][position];
+ couleur.v >>= zf->pertedec;
+
+ couleur.b = col1.b * c[0][position]
+ + col2.b * c[1][position]
+ + col3.b * c[2][position]
+ + col4.b * c[3][position];
+ couleur.b >>= zf->pertedec;
+
+ setPixelRGB_ (pix2, position, couleur, goomdata->resolx, goomdata->resoly);
+ }
+#endif
+}
+
+
+void
+pointFilter (GoomData * goomdata, Color c,
+ float t1, float t2, float t3, float t4, Uint cycle)
+{
+ Uint *pix1 = goomdata->p1;
+ ZoomFilterData *zf = goomdata->zfd;
+ Uint x = (Uint) (zf->middleX + (int) (t1 * cos ((float) cycle / t3)));
+ Uint y = (Uint) (zf->middleY + (int) (t2 * sin ((float) cycle / t4)));
+
+ if ((x > 1) && (y > 1) && (x < goomdata->resolx - 2)
+ && (y < goomdata->resoly - 2)) {
+ setPixelRGB (pix1, x + 1, y, c, goomdata->resolx, goomdata->resoly);
+ setPixelRGB (pix1, x, y + 1, c, goomdata->resolx, goomdata->resoly);
+ setPixelRGB (pix1, x + 1, y + 1, WHITE, goomdata->resolx, goomdata->resoly);
+ setPixelRGB (pix1, x + 2, y + 1, c, goomdata->resolx, goomdata->resoly);
+ setPixelRGB (pix1, x + 1, y + 2, c, goomdata->resolx, goomdata->resoly);
+ }
+}
diff --git a/gst/goom2k1/filters.h b/gst/goom2k1/filters.h
new file mode 100644
index 0000000000..0dff5642dd
--- /dev/null
+++ b/gst/goom2k1/filters.h
@@ -0,0 +1,83 @@
+#ifndef FILTERS_H
+#define FILTERS_H
+
+#include <glib.h>
+
+#include "graphic.h"
+#include "goom_core.h"
+
+#define NORMAL_MODE 0
+#define WAVE_MODE 1
+#define CRYSTAL_BALL_MODE 2
+#define SCRUNCH_MODE 3
+#define AMULETTE_MODE 4
+#define WATER_MODE 5
+
+struct ZoomFilterData
+{
+ int vitesse;
+ unsigned char pertedec;
+ unsigned char sqrtperte;
+ int middleX;
+ int middleY;
+ char reverse;
+ char mode;
+ /* @since June 2001 */
+ int hPlaneEffect;
+ int vPlaneEffect;
+ char noisify;
+
+ guint32 res_x;
+ guint32 res_y;
+ guint32 buffsize;
+
+ guint32 *buffer;
+ guint32 *pos10;
+ guint32 *c[4];
+ int *firedec;
+
+ int wave;
+ int wavesp;
+};
+
+void pointFilter(GoomData *goomdata, Color c,
+ float t1, float t2, float t3, float t4,
+ guint32 cycle);
+
+/* filtre de zoom :
+ le contenu de pix1 est copie dans pix2, avec l'effet appliqué
+ midx et midy represente le centre du zoom
+
+void zoomFilter(Uint *pix1, Uint *pix2, Uint middleX, Uint middleY);
+void zoomFilterRGB(Uint *pix1,
+Uint *pix2,
+Uint middleX,
+Uint middleY);
+*/
+
+ZoomFilterData *zoomFilterNew (void);
+void zoomFilterDestroy (ZoomFilterData *zf);
+void zoomFilterFastRGB (GoomData *goomdata, ZoomFilterData *zf,
+ int zfd_update);
+
+/* filtre sin :
+ le contenu de pix1 est copie dans pix2, avec l'effet appliqué
+ cycle est la variable de temps.
+ mode vaut SIN_MUL ou SIN_ADD
+ rate est le pourcentage de l'effet appliqué
+ lenght : la longueur d'onde (1..10) [5]
+ speed : la vitesse (1..100) [10]
+*/
+/*
+void sinFilter(Uint *pix1,Uint *pix2,
+ Uint cycle,
+ Uint mode,
+ Uint rate,
+ char lenght,
+ Uint speed);
+*/
+
+#define SIN_MUL 1
+#define SIN_ADD 2
+
+#endif
diff --git a/gst/goom2k1/filters_mmx.s b/gst/goom2k1/filters_mmx.s
new file mode 100644
index 0000000000..337de56c39
--- /dev/null
+++ b/gst/goom2k1/filters_mmx.s
@@ -0,0 +1,130 @@
+;// file : mmx_zoom.s
+;// author : JC Hoelt <jeko@free.fr>
+;//
+;// history
+;// 07/01/2001 : Changing FEMMS to EMMS : slower... but run on intel machines
+;// 03/01/2001 : WIDTH and HEIGHT are now variable
+;// 28/12/2000 : adding comments to the code, suppress some useless lines
+;// 27/12/2000 : reducing memory access... improving performance by 20%
+;// coefficients are now on 1 byte
+;// 22/12/2000 : Changing data structure
+;// 16/12/2000 : AT&T version
+;// 14/12/2000 : unrolling loop
+;// 12/12/2000 : 64 bits memory access
+
+
+.data
+
+thezero:
+ .long 0x00000000
+ .long 0x00000000
+
+
+.text
+
+.globl mmx_zoom ;// name of the function to call by C program
+.extern coeffs ;// the transformation buffer
+.extern expix1,expix2 ;// the source and destination buffer
+.extern mmx_zoom_size, zoom_width ;// size of the buffers
+
+.align 16
+mmx_zoom:
+
+push %ebp
+push %esp
+
+;// initialisation du mm7 à zero
+movq (thezero), %mm7
+
+movl zoom_width, %eax
+movl $4, %ebx
+mull %ebx
+movl %eax, %ebp
+
+movl (coeffs), %eax
+movl (expix1), %edx
+movl (expix2), %ebx
+movl $10, %edi
+movl mmx_zoom_size, %ecx
+
+.while:
+ ;// esi <- nouvelle position
+ movl (%eax), %esi
+ leal (%edx, %esi), %esi
+
+ ;// recuperation des deux premiers pixels dans mm0 et mm1
+ movq (%esi), %mm0 /* b1-v1-r1-a1-b2-v2-r2-a2 */
+ movq %mm0, %mm1 /* b1-v1-r1-a1-b2-v2-r2-a2 */
+
+ ;// recuperation des 4 coefficients
+ movd 4(%eax), %mm6 /* ??-??-??-??-c4-c3-c2-c1 */
+ ;// depackage du premier pixel
+ punpcklbw %mm7, %mm0 /* 00-b2-00-v2-00-r2-00-a2 */
+
+ movq %mm6, %mm5 /* ??-??-??-??-c4-c3-c2-c1 */
+ ;// depackage du 2ieme pixel
+ punpckhbw %mm7, %mm1 /* 00-b1-00-v1-00-r1-00-a1 */
+
+ ;// extraction des coefficients...
+ punpcklbw %mm5, %mm6 /* c4-c4-c3-c3-c2-c2-c1-c1 */
+ movq %mm6, %mm4 /* c4-c4-c3-c3-c2-c2-c1-c1 */
+ movq %mm6, %mm5 /* c4-c4-c3-c3-c2-c2-c1-c1 */
+
+ punpcklbw %mm5, %mm6 /* c2-c2-c2-c2-c1-c1-c1-c1 */
+ punpckhbw %mm5, %mm4 /* c4-c4-c4-c4-c3-c3-c3-c3 */
+
+ movq %mm6, %mm3 /* c2-c2-c2-c2-c1-c1-c1-c1 */
+ punpcklbw %mm7, %mm6 /* 00-c1-00-c1-00-c1-00-c1 */
+ punpckhbw %mm7, %mm3 /* 00-c2-00-c2-00-c2-00-c2 */
+
+ ;// multiplication des pixels par les coefficients
+ pmullw %mm6, %mm0 /* c1*b2-c1*v2-c1*r2-c1*a2 */
+ pmullw %mm3, %mm1 /* c2*b1-c2*v1-c2*r1-c2*a1 */
+ paddw %mm1, %mm0
+
+ ;// ...extraction des 2 derniers coefficients
+ movq %mm4, %mm5 /* c4-c4-c4-c4-c3-c3-c3-c3 */
+ punpcklbw %mm7, %mm4 /* 00-c3-00-c3-00-c3-00-c3 */
+ punpckhbw %mm7, %mm5 /* 00-c4-00-c4-00-c4-00-c4 */
+
+ ;// recuperation des 2 derniers pixels
+ movq (%esi,%ebp), %mm1
+ movq %mm1, %mm2
+
+ ;// depackage des pixels
+ punpcklbw %mm7, %mm1
+ punpckhbw %mm7, %mm2
+
+ ;// multiplication pas les coeffs
+ pmullw %mm4, %mm1
+ pmullw %mm5, %mm2
+
+ ;// ajout des valeurs obtenues à la valeur finale
+ paddw %mm1, %mm0
+ paddw %mm2, %mm0
+
+ ;// division par 256 = 16+16+16+16, puis repackage du pixel final
+ psrlw $8, %mm0
+ packuswb %mm7, %mm0
+
+ ;// passage au suivant
+ leal 8(%eax), %eax
+
+ decl %ecx
+ ;// enregistrement du resultat
+ movd %mm0, (%ebx)
+ leal 4(%ebx), %ebx
+
+ ;// test de fin du tantque
+ cmpl $0, %ecx ;// 400x300
+
+jz .fin_while
+jmp .while
+
+.fin_while:
+emms
+
+pop %esp
+pop %ebp
+
+ret ;//The End
diff --git a/gst/goom2k1/goom_core.c b/gst/goom2k1/goom_core.c
new file mode 100644
index 0000000000..abe64709c4
--- /dev/null
+++ b/gst/goom2k1/goom_core.c
@@ -0,0 +1,411 @@
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include <glib.h>
+
+#include <stdlib.h>
+#include <string.h>
+#include "goom_core.h"
+#include "goom_tools.h"
+#include "filters.h"
+#include "lines.h"
+
+/*#define VERBOSE */
+
+#ifdef VERBOSE
+#include <stdio.h>
+#endif
+
+#define STOP_SPEED 128
+
+void
+goom_init (GoomData * goomdata, guint32 resx, guint32 resy)
+{
+#ifdef VERBOSE
+ printf ("GOOM: init (%d, %d);\n", resx, resy);
+#endif
+ goomdata->resolx = 0;
+ goomdata->resoly = 0;
+ goomdata->buffsize = 0;
+
+ goomdata->pixel = NULL;
+ goomdata->back = NULL;
+ goomdata->p1 = NULL;
+ goomdata->p2 = NULL;
+
+ goom_set_resolution (goomdata, resx, resy);
+ RAND_INIT (goomdata, GPOINTER_TO_INT (goomdata->pixel));
+ goomdata->cycle = 0;
+
+
+ goomdata->goomlimit = 2; /* sensibilité du goom */
+ goomdata->zfd = zoomFilterNew ();
+ goomdata->lockvar = 0; /* pour empecher de nouveaux changements */
+ goomdata->goomvar = 0; /* boucle des gooms */
+ goomdata->totalgoom = 0; /* nombre de gooms par seconds */
+ goomdata->agoom = 0; /* un goom a eu lieu.. */
+ goomdata->loopvar = 0; /* mouvement des points */
+ goomdata->speedvar = 0; /* vitesse des particules */
+ goomdata->lineMode = 0; /* l'effet lineaire a dessiner */
+}
+
+void
+goom_set_resolution (GoomData * goomdata, guint32 resx, guint32 resy)
+{
+ guint32 buffsize = resx * resy;
+
+ if ((goomdata->resolx == resx) && (goomdata->resoly == resy))
+ return;
+
+ if (goomdata->buffsize < buffsize) {
+ if (goomdata->pixel)
+ free (goomdata->pixel);
+ if (goomdata->back)
+ free (goomdata->back);
+ goomdata->pixel = (guint32 *) malloc (buffsize * sizeof (guint32) + 128);
+ goomdata->back = (guint32 *) malloc (buffsize * sizeof (guint32) + 128);
+ goomdata->buffsize = buffsize;
+
+ goomdata->p1 = (void *) (((guintptr) goomdata->pixel + 0x7f) & (~0x7f));
+ goomdata->p2 = (void *) (((guintptr) goomdata->back + 0x7f) & (~0x7f));
+ }
+
+ goomdata->resolx = resx;
+ goomdata->resoly = resy;
+
+ memset (goomdata->pixel, 0, buffsize * sizeof (guint32) + 128);
+ memset (goomdata->back, 0, buffsize * sizeof (guint32) + 128);
+}
+
+guint32 *
+goom_update (GoomData * goomdata, gint16 data[2][512])
+{
+ guint32 *return_val;
+ guint32 pointWidth;
+ guint32 pointHeight;
+ int incvar; /* volume du son */
+ int accelvar; /* acceleration des particules */
+ int i;
+ float largfactor; /* elargissement de l'intervalle d'évolution des points */
+ int zfd_update = 0;
+ int resolx = goomdata->resolx;
+ int resoly = goomdata->resoly;
+ ZoomFilterData *pzfd = goomdata->zfd;
+ guint32 *tmp;
+
+ /* test if the config has changed, update it if so */
+
+ pointWidth = (resolx * 2) / 5;
+ pointHeight = (resoly * 2) / 5;
+
+ /* ! etude du signal ... */
+ incvar = 0;
+ for (i = 0; i < 512; i++) {
+ if (incvar < data[0][i])
+ incvar = data[0][i];
+ }
+
+ accelvar = incvar / 5000;
+ if (goomdata->speedvar > 5) {
+ accelvar--;
+ if (goomdata->speedvar > 20)
+ accelvar--;
+ if (goomdata->speedvar > 40)
+ goomdata->speedvar = 40;
+ }
+ accelvar--;
+ goomdata->speedvar += accelvar;
+
+ if (goomdata->speedvar < 0)
+ goomdata->speedvar = 0;
+ if (goomdata->speedvar > 40)
+ goomdata->speedvar = 40;
+
+
+ /* ! calcul du deplacement des petits points ... */
+
+ largfactor =
+ ((float) goomdata->speedvar / 40.0f + (float) incvar / 50000.0f) / 1.5f;
+ if (largfactor > 1.5f)
+ largfactor = 1.5f;
+
+ for (i = 1; i * 15 <= goomdata->speedvar + 15; i++) {
+ goomdata->loopvar += goomdata->speedvar + 1;
+
+ pointFilter (goomdata,
+ YELLOW,
+ ((pointWidth - 6.0f) * largfactor + 5.0f),
+ ((pointHeight - 6.0f) * largfactor + 5.0f),
+ i * 152.0f, 128.0f, goomdata->loopvar + i * 2032);
+ pointFilter (goomdata, ORANGE,
+ ((pointWidth / 2) * largfactor) / i + 10.0f * i,
+ ((pointHeight / 2) * largfactor) / i + 10.0f * i,
+ 96.0f, i * 80.0f, goomdata->loopvar / i);
+ pointFilter (goomdata, VIOLET,
+ ((pointHeight / 3 + 5.0f) * largfactor) / i + 10.0f * i,
+ ((pointHeight / 3 + 5.0f) * largfactor) / i + 10.0f * i,
+ i + 122.0f, 134.0f, goomdata->loopvar / i);
+ pointFilter (goomdata, BLACK,
+ ((pointHeight / 3) * largfactor + 20.0f),
+ ((pointHeight / 3) * largfactor + 20.0f),
+ 58.0f, i * 66.0f, goomdata->loopvar / i);
+ pointFilter (goomdata, WHITE,
+ (pointHeight * largfactor + 10.0f * i) / i,
+ (pointHeight * largfactor + 10.0f * i) / i,
+ 66.0f, 74.0f, goomdata->loopvar + i * 500);
+ }
+
+ /* diminuer de 1 le temps de lockage */
+ /* note pour ceux qui n'ont pas suivis : le lockvar permet d'empecher un */
+ /* changement d'etat du plugins juste apres un autre changement d'etat. oki ? */
+ if (--goomdata->lockvar < 0)
+ goomdata->lockvar = 0;
+
+ /* temps du goom */
+ if (--goomdata->agoom < 0)
+ goomdata->agoom = 0;
+
+ /* on verifie qu'il ne se pas un truc interressant avec le son. */
+ if ((accelvar > goomdata->goomlimit) || (accelvar < -goomdata->goomlimit)) {
+ /* UN GOOM !!! YAHOO ! */
+ goomdata->totalgoom++;
+ goomdata->agoom = 20; /* mais pdt 20 cycles, il n'y en aura plus. */
+ goomdata->lineMode = (goomdata->lineMode + 1) % 20; /* Tous les 10 gooms on change de mode lineaire */
+
+ /* changement eventuel de mode */
+ switch (iRAND (goomdata, 10)) {
+ case 0:
+ case 1:
+ case 2:
+ pzfd->mode = WAVE_MODE;
+ pzfd->vitesse = STOP_SPEED - 1;
+ pzfd->reverse = 0;
+ break;
+ case 3:
+ case 4:
+ pzfd->mode = CRYSTAL_BALL_MODE;
+ break;
+ case 5:
+ pzfd->mode = AMULETTE_MODE;
+ break;
+ case 6:
+ pzfd->mode = WATER_MODE;
+ break;
+ case 7:
+ pzfd->mode = SCRUNCH_MODE;
+ break;
+ default:
+ pzfd->mode = NORMAL_MODE;
+ }
+ }
+
+ /* tout ceci ne sera fait qu'en cas de non-blocage */
+ if (goomdata->lockvar == 0) {
+ /* reperage de goom (acceleration forte de l'acceleration du volume) */
+ /* -> coup de boost de la vitesse si besoin.. */
+ if ((accelvar > goomdata->goomlimit) || (accelvar < -goomdata->goomlimit)) {
+ goomdata->goomvar++;
+ /*if (goomvar % 1 == 0) */
+ {
+ guint32 vtmp;
+ guint32 newvit;
+
+ newvit = STOP_SPEED - goomdata->speedvar / 2;
+ /* retablir le zoom avant.. */
+ if ((pzfd->reverse) && (!(goomdata->cycle % 12)) && (rand () % 3 == 0)) {
+ pzfd->reverse = 0;
+ pzfd->vitesse = STOP_SPEED - 2;
+ goomdata->lockvar = 50;
+ }
+ if (iRAND (goomdata, 10) == 0) {
+ pzfd->reverse = 1;
+ goomdata->lockvar = 100;
+ }
+
+ /* changement de milieu.. */
+ switch (iRAND (goomdata, 20)) {
+ case 0:
+ pzfd->middleY = resoly - 1;
+ pzfd->middleX = resolx / 2;
+ break;
+ case 1:
+ pzfd->middleX = resolx - 1;
+ break;
+ case 2:
+ pzfd->middleX = 1;
+ break;
+ default:
+ pzfd->middleY = resoly / 2;
+ pzfd->middleX = resolx / 2;
+ }
+
+ if (pzfd->mode == WATER_MODE) {
+ pzfd->middleX = resolx / 2;
+ pzfd->middleY = resoly / 2;
+ }
+
+ switch (vtmp = (iRAND (goomdata, 27))) {
+ case 0:
+ pzfd->vPlaneEffect = iRAND (goomdata, 3);
+ pzfd->vPlaneEffect -= iRAND (goomdata, 3);
+ pzfd->hPlaneEffect = iRAND (goomdata, 3);
+ pzfd->hPlaneEffect -= iRAND (goomdata, 3);
+ break;
+ case 3:
+ pzfd->vPlaneEffect = 0;
+ pzfd->hPlaneEffect = iRAND (goomdata, 8);
+ pzfd->hPlaneEffect -= iRAND (goomdata, 8);
+ break;
+ case 4:
+ case 5:
+ case 6:
+ case 7:
+ pzfd->vPlaneEffect = iRAND (goomdata, 5);
+ pzfd->vPlaneEffect -= iRAND (goomdata, 5);
+ pzfd->hPlaneEffect = -pzfd->vPlaneEffect;
+ break;
+ case 8:
+ pzfd->hPlaneEffect = 5 + iRAND (goomdata, 8);
+ pzfd->vPlaneEffect = -pzfd->hPlaneEffect;
+ break;
+ case 9:
+ pzfd->vPlaneEffect = 5 + iRAND (goomdata, 8);
+ pzfd->hPlaneEffect = -pzfd->hPlaneEffect;
+ break;
+ case 13:
+ pzfd->hPlaneEffect = 0;
+ pzfd->vPlaneEffect = iRAND (goomdata, 10);
+ pzfd->vPlaneEffect -= iRAND (goomdata, 10);
+ break;
+ default:
+ if (vtmp < 10) {
+ pzfd->vPlaneEffect = 0;
+ pzfd->hPlaneEffect = 0;
+ }
+ }
+
+ if (iRAND (goomdata, 3) != 0)
+ pzfd->noisify = 0;
+ else {
+ pzfd->noisify = iRAND (goomdata, 3) + 2;
+ goomdata->lockvar *= 3;
+ }
+
+ if (pzfd->mode == AMULETTE_MODE) {
+ pzfd->vPlaneEffect = 0;
+ pzfd->hPlaneEffect = 0;
+ pzfd->noisify = 0;
+ }
+
+ if ((pzfd->middleX == 1) || (pzfd->middleX == resolx - 1)) {
+ pzfd->vPlaneEffect = 0;
+ pzfd->hPlaneEffect = iRAND (goomdata, 2) ? 0 : pzfd->hPlaneEffect;
+ }
+
+ if (newvit < pzfd->vitesse) { /* on accelere */
+ zfd_update = 1;
+ if (((newvit < STOP_SPEED - 7) &&
+ (pzfd->vitesse < STOP_SPEED - 6) &&
+ (goomdata->cycle % 3 == 0)) || (iRAND (goomdata, 40) == 0)) {
+ pzfd->vitesse = STOP_SPEED - 1;
+ pzfd->reverse = !pzfd->reverse;
+ } else {
+ pzfd->vitesse = (newvit + pzfd->vitesse * 4) / 5;
+ }
+ goomdata->lockvar += 50;
+ }
+ }
+ }
+ /* mode mega-lent */
+ if (iRAND (goomdata, 1000) == 0) {
+ /*
+ printf ("coup du sort...\n") ;
+ */
+ zfd_update = 1;
+ pzfd->vitesse = STOP_SPEED - 1;
+ pzfd->pertedec = 8;
+ pzfd->sqrtperte = 16;
+ goomdata->goomvar = 1;
+ goomdata->lockvar += 70;
+ }
+ }
+
+ /* gros frein si la musique est calme */
+ if ((goomdata->speedvar < 1) && (pzfd->vitesse < STOP_SPEED - 4)
+ && (goomdata->cycle % 16 == 0)) {
+ /*
+ printf ("++slow part... %i\n", zfd.vitesse) ;
+ */
+ zfd_update = 1;
+ pzfd->vitesse += 3;
+ pzfd->pertedec = 8;
+ pzfd->sqrtperte = 16;
+ goomdata->goomvar = 0;
+ /*
+ printf ("--slow part... %i\n", zfd.vitesse) ;
+ */
+ }
+
+ /* baisser regulierement la vitesse... */
+ if ((goomdata->cycle % 73 == 0) && (pzfd->vitesse < STOP_SPEED - 5)) {
+ /*
+ printf ("slow down...\n") ;
+ */
+ zfd_update = 1;
+ pzfd->vitesse++;
+ }
+
+ /* arreter de decrémenter au bout d'un certain temps */
+ if ((goomdata->cycle % 101 == 0) && (pzfd->pertedec == 7)) {
+ zfd_update = 1;
+ pzfd->pertedec = 8;
+ pzfd->sqrtperte = 16;
+ }
+
+ /* Zoom here ! */
+ zoomFilterFastRGB (goomdata, pzfd, zfd_update);
+
+ /* si on est dans un goom : afficher les lignes... */
+ if (goomdata->agoom > 15)
+ goom_lines (goomdata, data, ((pzfd->middleX == resolx / 2)
+ && (pzfd->middleY == resoly / 2)
+ && (pzfd->mode != WATER_MODE))
+ ? (goomdata->lineMode / 10) : 0, goomdata->p2, goomdata->agoom - 15);
+
+ return_val = goomdata->p2;
+ tmp = goomdata->p1;
+ goomdata->p1 = goomdata->p2;
+ goomdata->p2 = tmp;
+
+ /* affichage et swappage des buffers.. */
+ goomdata->cycle++;
+
+ /* tous les 100 cycles : vérifier si le taux de goom est correct */
+ /* et le modifier sinon.. */
+ if (!(goomdata->cycle % 100)) {
+ if (goomdata->totalgoom > 15) {
+ /* printf ("less gooms\n") ; */
+ goomdata->goomlimit++;
+ } else {
+ if ((goomdata->totalgoom == 0) && (goomdata->goomlimit > 1))
+ goomdata->goomlimit--;
+ }
+ goomdata->totalgoom = 0;
+ }
+ return return_val;
+}
+
+void
+goom_close (GoomData * goomdata)
+{
+ if (goomdata->pixel != NULL)
+ free (goomdata->pixel);
+ if (goomdata->back != NULL)
+ free (goomdata->back);
+ if (goomdata->zfd != NULL) {
+ zoomFilterDestroy (goomdata->zfd);
+ goomdata->zfd = NULL;
+ }
+ goomdata->pixel = goomdata->back = NULL;
+ RAND_CLOSE (goomdata);
+}
diff --git a/gst/goom2k1/goom_core.h b/gst/goom2k1/goom_core.h
new file mode 100644
index 0000000000..3e07af0220
--- /dev/null
+++ b/gst/goom2k1/goom_core.h
@@ -0,0 +1,43 @@
+#ifndef _GOOMCORE_H
+#define _GOOMCORE_H
+
+#include <glib.h>
+
+typedef struct ZoomFilterData ZoomFilterData;
+
+typedef struct
+{
+/*-----------------------------------------------------*
+ * SHARED DATA *
+ *-----------------------------------------------------*/
+ guint32 *pixel;
+ guint32 *back;
+ guint32 *p1, *p2;
+ guint32 cycle;
+
+ guint32 resolx, resoly, buffsize;
+
+ int lockvar; /* pour empecher de nouveaux changements */
+ int goomvar; /* boucle des gooms */
+ int totalgoom; /* nombre de gooms par seconds */
+ int agoom; /* un goom a eu lieu.. */
+ int loopvar; /* mouvement des points */
+ int speedvar; /* vitesse des particules */
+ int lineMode; /* l'effet lineaire a dessiner */
+ char goomlimit; /* sensibilité du goom */
+
+ ZoomFilterData *zfd;
+
+ /* Random table */
+ gint *rand_tab;
+ guint rand_pos;
+} GoomData;
+
+void goom_init (GoomData *goomdata, guint32 resx, guint32 resy);
+void goom_set_resolution (GoomData *goomdata, guint32 resx, guint32 resy);
+
+guint32 *goom_update (GoomData *goomdata, gint16 data [2][512]);
+
+void goom_close (GoomData *goomdata);
+
+#endif
diff --git a/gst/goom2k1/goom_tools.h b/gst/goom2k1/goom_tools.h
new file mode 100644
index 0000000000..6178dbafb1
--- /dev/null
+++ b/gst/goom2k1/goom_tools.h
@@ -0,0 +1,24 @@
+#ifndef _GOOMTOOLS_H
+#define _GOOMTOOLS_H
+
+#define NB_RAND 0x10000
+
+#define RAND_INIT(gd,i) \
+ srand (i); \
+ if (gd->rand_tab == NULL) \
+ gd->rand_tab = g_malloc (NB_RAND * sizeof(gint)) ;\
+ gd->rand_pos = 0; \
+ while (gd->rand_pos < NB_RAND) \
+ gd->rand_tab [gd->rand_pos++] = rand ();
+
+#define RAND(gd) \
+ (gd->rand_tab[gd->rand_pos = ((gd->rand_pos + 1) % NB_RAND)])
+
+#define RAND_CLOSE(gd) \
+ g_free (gd->rand_tab); \
+ gd->rand_tab = NULL;
+
+/*#define iRAND(i) ((guint32)((float)i * RAND()/RAND_MAX)) */
+#define iRAND(gd,i) (RAND(gd) % i)
+
+#endif
diff --git a/gst/goom2k1/graphic.c b/gst/goom2k1/graphic.c
new file mode 100644
index 0000000000..c20f987dd3
--- /dev/null
+++ b/gst/goom2k1/graphic.c
@@ -0,0 +1,14 @@
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "graphic.h"
+
+const Color BLACK = { 0, 0, 0 };
+const Color WHITE = { 0xff, 0xff, 0xff };
+const Color RED = { 0xff, 0, 0 };
+const Color GREEN = { 0, 0xff, 0 };
+const Color BLUE = { 0, 0, 0xff };
+const Color YELLOW = { 0xff, 0xff, 0x33 };
+const Color ORANGE = { 0xff, 0xcc, 0x00 };
+const Color VIOLET = { 0x55, 0x00, 0xff };
diff --git a/gst/goom2k1/graphic.h b/gst/goom2k1/graphic.h
new file mode 100644
index 0000000000..4154d7fdd3
--- /dev/null
+++ b/gst/goom2k1/graphic.h
@@ -0,0 +1,23 @@
+#ifndef GRAPHIC_H
+#define GRAPHIC_H
+
+#include <glib.h> /* defines inline for better portability */
+
+typedef unsigned int Uint;
+
+typedef struct
+{
+ unsigned short r,v,b;
+}
+Color;
+
+extern const Color BLACK;
+extern const Color WHITE;
+extern const Color RED;
+extern const Color BLUE;
+extern const Color GREEN;
+extern const Color YELLOW;
+extern const Color ORANGE;
+extern const Color VIOLET;
+
+#endif /*GRAPHIC_H*/
diff --git a/gst/goom2k1/gstgoom.c b/gst/goom2k1/gstgoom.c
new file mode 100644
index 0000000000..171af308e6
--- /dev/null
+++ b/gst/goom2k1/gstgoom.c
@@ -0,0 +1,193 @@
+/* gstgoom.c: implementation of goom drawing element
+ * Copyright (C) <2001> Richard Boulton <richard@tartarus.org>
+ * (C) <2006> Wim Taymans <wim at fluendo dot com>
+ * (C) <2015> Luis de Bethencourt <luis@debethencourt.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-goom2k1
+ * @title: goom2k1
+ * @see_also: goom, synaesthesia
+ *
+ * Goom2k1 is an audio visualisation element. It creates warping structures
+ * based on the incoming audio signal. Goom2k1 is the older version of the
+ * visualisation. Also available is goom2k4, with a different look.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v audiotestsrc ! goom2k1 ! videoconvert ! xvimagesink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include "gstgoom.h"
+#include "goom_core.h"
+
+GST_DEBUG_CATEGORY_STATIC (goom2k1_debug);
+#define GST_CAT_DEFAULT goom2k1_debug
+
+#define DEFAULT_WIDTH 320
+#define DEFAULT_HEIGHT 240
+#define DEFAULT_FPS_N 25
+#define DEFAULT_FPS_D 1
+
+#if G_BYTE_ORDER == G_BIG_ENDIAN
+#define RGB_ORDER "xRGB"
+#else
+#define RGB_ORDER "BGRx"
+#endif
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (RGB_ORDER))
+ );
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink", /* the name of the pads */
+ GST_PAD_SINK, /* type of the pad */
+ GST_PAD_ALWAYS, /* ALWAYS/SOMETIMES */
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (S16) ", "
+ "rate = (int) [ 8000, 96000 ], "
+ "channels = (int) 1, "
+ "layout = (string) interleaved; "
+ "audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (S16) ", "
+ "rate = (int) [ 8000, 96000 ], "
+ "channels = (int) 2, "
+ "channel-mask = (bitmask) 0x3, " "layout = (string) interleaved")
+ );
+
+static void gst_goom2k1_finalize (GObject * object);
+
+static gboolean gst_goom2k1_setup (GstAudioVisualizer * base);
+static gboolean gst_goom2k1_render (GstAudioVisualizer * base,
+ GstBuffer * audio, GstVideoFrame * video);
+
+
+G_DEFINE_TYPE (GstGoom2k1, gst_goom2k1, GST_TYPE_AUDIO_VISUALIZER);
+GST_ELEMENT_REGISTER_DEFINE (goom2k1, "goom2k1", GST_RANK_NONE,
+ GST_TYPE_GOOM2K1);
+static void
+gst_goom2k1_class_init (GstGoom2k1Class * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstAudioVisualizerClass *visualizer_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ visualizer_class = (GstAudioVisualizerClass *) klass;
+
+ gobject_class->finalize = gst_goom2k1_finalize;
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "GOOM: what a GOOM! 2k1 edition", "Visualization",
+ "Takes frames of data and outputs video frames using the GOOM 2k1 filter",
+ "Wim Taymans <wim@fluendo.com>");
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class, &src_template);
+
+ GST_DEBUG_CATEGORY_INIT (goom2k1_debug, "goom2k1", 0,
+ "goom2k1 visualisation element");
+
+ visualizer_class->setup = GST_DEBUG_FUNCPTR (gst_goom2k1_setup);
+ visualizer_class->render = GST_DEBUG_FUNCPTR (gst_goom2k1_render);
+}
+
+static void
+gst_goom2k1_init (GstGoom2k1 * goom)
+{
+ goom->width = DEFAULT_WIDTH;
+ goom->height = DEFAULT_HEIGHT;
+ goom->channels = 0;
+
+ goom_init (&(goom->goomdata), goom->width, goom->height);
+}
+
+static void
+gst_goom2k1_finalize (GObject * object)
+{
+ GstGoom2k1 *goom = GST_GOOM2K1 (object);
+
+ goom_close (&(goom->goomdata));
+
+ G_OBJECT_CLASS (gst_goom2k1_parent_class)->finalize (object);
+}
+
+static gboolean
+gst_goom2k1_setup (GstAudioVisualizer * base)
+{
+ GstGoom2k1 *goom = GST_GOOM2K1 (base);
+
+ goom->width = GST_VIDEO_INFO_WIDTH (&base->vinfo);
+ goom->height = GST_VIDEO_INFO_HEIGHT (&base->vinfo);
+
+ goom_set_resolution (&(goom->goomdata), goom->width, goom->height);
+
+ return TRUE;
+}
+
+static gboolean
+gst_goom2k1_render (GstAudioVisualizer * base, GstBuffer * audio,
+ GstVideoFrame * video)
+{
+ GstGoom2k1 *goom = GST_GOOM2K1 (base);
+ GstMapInfo amap;
+ gint16 datain[2][GOOM2K1_SAMPLES];
+ gint16 *adata;
+ gint i;
+
+ /* get next GOOM2K1_SAMPLES, we have at least this amount of samples */
+ gst_buffer_map (audio, &amap, GST_MAP_READ);
+ adata = (gint16 *) amap.data;
+
+ if (goom->channels == 2) {
+ for (i = 0; i < GOOM2K1_SAMPLES; i++) {
+ datain[0][i] = *adata++;
+ datain[1][i] = *adata++;
+ }
+ } else {
+ for (i = 0; i < GOOM2K1_SAMPLES; i++) {
+ datain[0][i] = *adata;
+ datain[1][i] = *adata++;
+ }
+ }
+
+ video->data[0] = goom_update (&(goom->goomdata), datain);
+ gst_buffer_unmap (audio, &amap);
+
+ return TRUE;
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ return GST_ELEMENT_REGISTER (goom2k1, plugin);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ goom2k1,
+ "GOOM 2k1 visualization filter",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/goom2k1/gstgoom.h b/gst/goom2k1/gstgoom.h
new file mode 100644
index 0000000000..533388d523
--- /dev/null
+++ b/gst/goom2k1/gstgoom.h
@@ -0,0 +1,68 @@
+/* gstgoom.c: implementation of goom drawing element
+ * Copyright (C) <2001> Richard Boulton <richard@tartarus.org>
+ * (C) <2006> Wim Taymans <wim at fluendo dot com>
+ * Copyright (C) <2015> Luis de Bethencourt <luis@debethencourt.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_GOOM_H__
+#define __GST_GOOM_H__
+
+#include <gst/pbutils/gstaudiovisualizer.h>
+
+#include "goom_core.h"
+
+G_BEGIN_DECLS
+
+#define GOOM2K1_SAMPLES 512
+
+#define GST_TYPE_GOOM2K1 (gst_goom2k1_get_type())
+#define GST_GOOM2K1(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_GOOM2K1,GstGoom2k1))
+#define GST_GOOM2K1_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_GOOM2K1,GstGoom2k1Class))
+#define GST_IS_GOOM2K1(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_GOOM2K1))
+#define GST_IS_GOOM2K1_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_GOOM2K1))
+
+typedef struct _GstGoom2k1 GstGoom2k1;
+typedef struct _GstGoom2k1Class GstGoom2k1Class;
+
+struct _GstGoom2k1
+{
+ GstAudioVisualizer parent;
+
+ /* input tracking */
+ gint channels;
+
+ /* video state */
+ gint width;
+ gint height;
+
+ /* goom stuff */
+ GoomData goomdata;
+};
+
+struct _GstGoom2k1Class
+{
+ GstAudioVisualizerClass parent_class;
+};
+
+GType gst_goom2k1_get_type (void);
+GST_ELEMENT_REGISTER_DECLARE (goom2k1);
+
+G_END_DECLS
+
+#endif /* __GST_GOOM_H__ */
+
diff --git a/gst/goom2k1/lines.c b/gst/goom2k1/lines.c
new file mode 100644
index 0000000000..804b68e45d
--- /dev/null
+++ b/gst/goom2k1/lines.c
@@ -0,0 +1,112 @@
+/*
+ * lines.c
+ * iTunesXPlugIn
+ *
+ * Created by guillaum on Tue Aug 14 2001.
+ * Copyright (c) 2001 __CompanyName__. All rights reserved.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "lines.h"
+#include <math.h>
+
+static inline unsigned char
+lighten (unsigned char value, unsigned char power)
+{
+ unsigned char i;
+
+ for (i = 0; i < power; i++)
+ value += (255 - value) / 5;
+ return value;
+}
+
+void
+goom_lines (GoomData * goomdata, gint16 data[2][512], unsigned int ID,
+ unsigned int *p, guint32 power)
+{
+ guint32 color1;
+ guint32 color2;
+ guint32 resolx = goomdata->resolx;
+ guint32 resoly = goomdata->resoly;
+ unsigned char *color = 1 + (unsigned char *) &color1;
+
+ switch (ID) {
+ case 0: /* Horizontal stereo lines */
+ {
+ color1 = 0x0000AA00;
+ color2 = 0x00AA0000;
+ break;
+ }
+
+ case 1: /* Stereo circles */
+ {
+ color1 = 0x00AA33DD;
+ color2 = 0x00AA33DD;
+ break;
+ }
+ default:{
+ color1 = color2 = 0;
+ g_assert_not_reached ();
+ break;
+ }
+ }
+ *color = lighten (*color, power);
+ color++;
+ *color = lighten (*color, power);
+ color++;
+ *color = lighten (*color, power);
+ color = 1 + (unsigned char *) &color2;
+ *color = lighten (*color, power);
+ color++;
+ *color = lighten (*color, power);
+ color++;
+ *color = lighten (*color, power);
+
+ switch (ID) {
+ case 0: /* Horizontal stereo lines */
+ {
+ unsigned int i;
+
+ for (i = 0; i < 512; i++) {
+ guint32 plot;
+
+ plot = i * resolx / 512 + (resoly / 4 + data[0][i] / 1600) * resolx;
+ p[plot] = color1;
+ p[plot + 1] = color1;
+ plot = i * resolx / 512 + (resoly * 3 / 4 - data[1][i] / 1600) * resolx;
+ p[plot] = color2;
+ p[plot + 1] = color2;
+ }
+ break;
+ }
+
+ case 1: /* Stereo circles */
+ {
+ float z;
+ unsigned int monX = resolx / 2;
+ float monY = (float) resoly / 4;
+ float monY2 = (float) resoly / 2;
+
+ for (z = 0; z < 6.2832f; z += 1.0f / monY) {
+ /* float offset1 = 128+data[1][(unsigned int)(z*81.33f)])/200000; */
+ p[monX + (unsigned int) ((monY + ((float) resoly) * (128 +
+ data[1][(unsigned int) (z * 81.33f)]) / 200000) *
+ cos (z) + resolx * (unsigned int) (monY2 + (monY +
+ ((float) resoly) * (128 +
+ data[1][(unsigned int) (z * 81.33f)]) / 400000) *
+ sin (z)))] = color1;
+ p[monX + (unsigned int) ((monY - ((float) resoly) * (128 +
+ data[0][(unsigned int) (z * 81.33f)]) / 200000) *
+ cos (z) + resolx * (unsigned int) (monY2 + (monY -
+ ((float) resoly) * (128 +
+ data[0][(unsigned int) (z * 81.33f)]) / 400000) *
+ sin (z)))] = color2;
+ }
+ break;
+ }
+ }
+}
diff --git a/gst/goom2k1/lines.h b/gst/goom2k1/lines.h
new file mode 100644
index 0000000000..548f339a06
--- /dev/null
+++ b/gst/goom2k1/lines.h
@@ -0,0 +1,16 @@
+/*
+ * lines.h
+ * iGoom
+ *
+ * Created by guillaum on Tue Aug 14 2001.
+ * Copyright (c) 2001 ios. All rights reserved.
+ *
+ */
+#include <glib.h>
+
+#include "graphic.h"
+#include "goom_core.h"
+
+void goom_lines(GoomData *goomdata, gint16 data [2][512], unsigned int ID,unsigned int* p, guint32 power);
+
+
diff --git a/gst/goom2k1/meson.build b/gst/goom2k1/meson.build
new file mode 100644
index 0000000000..3bbd77f805
--- /dev/null
+++ b/gst/goom2k1/meson.build
@@ -0,0 +1,41 @@
+goom2k1_args = [
+ '-Dgst_goom_get_type=gst_goom2k1_get_type',
+ '-Dgoom_init=goom2k1_init',
+ '-Dgoom_close=goom2k1_close',
+ '-Dgoom_update=goom2k1_update',
+ '-Dgoom_set_resolution=goom2k1_set_resolution',
+ '-Dgoom_lines=goom2k1_lines',
+ '-DBLACK=GOOM2K1_BLACK',
+ '-DWHITE=GOOM2K1_WHITE',
+ '-DRED=GOOM2K1_RED',
+ '-DBLUE=GOOM2K1_BLUE',
+ '-DGREEN=GOOM2K1_GREEN',
+ '-DYELLOW=GOOM2K1_YELLOW',
+ '-DORANGE=GOOM2K1_ORANGE',
+ '-DVIOLET=GOOM2K1_VIOLET',
+ '-DzoomFilterFastRGB=zoomFilterFastRGB2k1',
+ '-DpointFilter=pointFilter2k1',
+ '-DzoomFilterDestroy=zoomFilterDestroy2k1',
+ '-DzoomFilterNew=zoomFilterNew2k1'
+]
+
+filter_args = ['-UMMX', '-UUSE_ASM']
+
+goom2k1_sources = [
+ 'gstgoom.c',
+ 'goom_core.c',
+ 'filters.c',
+ 'graphic.c',
+ 'lines.c'
+]
+
+gstgoom2k1 = library('gstgoom2k1',
+ goom2k1_sources,
+ c_args : gst_plugins_good_args + goom2k1_args + filter_args,
+ include_directories : [configinc],
+ dependencies : [gstpbutils_dep, gstbase_dep, libm],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstgoom2k1, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstgoom2k1]
diff --git a/gst/icydemux/gsticydemux.c b/gst/icydemux/gsticydemux.c
new file mode 100644
index 0000000000..c8420e86fc
--- /dev/null
+++ b/gst/icydemux/gsticydemux.c
@@ -0,0 +1,677 @@
+/* -*- Mode: C; tab-width: 2; indent-tabs-mode: t; c-basic-offset: 2 -*- */
+/* Copyright 2005 Jan Schmidt <thaytan@mad.scientist.com>
+ * 2006 Michael Smith <msmith@fluendo.com>
+ * Copyright (C) 2003-2004 Benjamin Otte <otte@gnome.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-icydemux
+ * @title: icydemux
+ *
+ * icydemux accepts data streams with ICY metadata at known intervals, as
+ * transmitted from an upstream element (usually read as response headers from
+ * an HTTP stream). The mime type of the data between the tag blocks is
+ * detected using typefind functions, and the appropriate output mime type set
+ * on outgoing buffers.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 souphttpsrc location=http://some.server/ iradio-mode=true ! icydemux ! fakesink -t
+ * ]| This pipeline should read any available ICY tag information and output it.
+ * The contents of the stream should be detected, and the appropriate mime
+ * type set on buffers produced from icydemux. (Using gnomevfssrc, neonhttpsrc
+ * or giosrc instead of souphttpsrc should also work.)
+ *
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include <gst/gst.h>
+#include <gst/gst-i18n-plugin.h>
+#include <gst/tag/tag.h>
+
+#include "gsticydemux.h"
+
+#include <string.h>
+
+#define ICY_TYPE_FIND_MAX_SIZE (40*1024)
+
+GST_DEBUG_CATEGORY_STATIC (icydemux_debug);
+#define GST_CAT_DEFAULT (icydemux_debug)
+
+static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-icy, metadata-interval = (int)[0, MAX]")
+ );
+
+static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("ANY")
+ );
+
+static void gst_icydemux_dispose (GObject * object);
+
+static GstFlowReturn gst_icydemux_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+static gboolean gst_icydemux_handle_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+
+static gboolean gst_icydemux_add_srcpad (GstICYDemux * icydemux,
+ GstCaps * new_caps);
+static gboolean gst_icydemux_remove_srcpad (GstICYDemux * icydemux);
+
+static GstStateChangeReturn gst_icydemux_change_state (GstElement * element,
+ GstStateChange transition);
+static gboolean gst_icydemux_sink_setcaps (GstPad * pad, GstCaps * caps);
+
+static gboolean gst_icydemux_send_tag_event (GstICYDemux * icydemux,
+ GstTagList * taglist);
+
+
+#define gst_icydemux_parent_class parent_class
+G_DEFINE_TYPE (GstICYDemux, gst_icydemux, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (icydemux, "icydemux",
+ GST_RANK_PRIMARY, GST_TYPE_ICYDEMUX,
+ GST_DEBUG_CATEGORY_INIT (icydemux_debug, "icydemux", 0,
+ "GStreamer ICY tag demuxer");
+ );
+static void
+gst_icydemux_class_init (GstICYDemuxClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ parent_class = g_type_class_ref (GST_TYPE_ELEMENT);
+
+ gobject_class->dispose = gst_icydemux_dispose;
+
+ gstelement_class->change_state = gst_icydemux_change_state;
+
+ gst_element_class_add_static_pad_template (gstelement_class, &src_factory);
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_factory);
+
+ gst_element_class_set_static_metadata (gstelement_class, "ICY tag demuxer",
+ "Codec/Demuxer/Metadata",
+ "Read and output ICY tags while demuxing the contents",
+ "Jan Schmidt <thaytan@mad.scientist.com>, "
+ "Michael Smith <msmith@fluendo.com>");
+}
+
+static void
+gst_icydemux_reset (GstICYDemux * icydemux)
+{
+ /* Unknown at the moment (this is a fatal error if don't have a value by the
+ * time we get to our chain function)
+ */
+ icydemux->meta_interval = -1;
+ icydemux->remaining = 0;
+
+ icydemux->typefinding = TRUE;
+
+ gst_caps_replace (&(icydemux->src_caps), NULL);
+
+ gst_icydemux_remove_srcpad (icydemux);
+
+ if (icydemux->cached_tags) {
+ gst_tag_list_unref (icydemux->cached_tags);
+ icydemux->cached_tags = NULL;
+ }
+
+ if (icydemux->cached_events) {
+ g_list_foreach (icydemux->cached_events,
+ (GFunc) gst_mini_object_unref, NULL);
+ g_list_free (icydemux->cached_events);
+ icydemux->cached_events = NULL;
+ }
+
+ if (icydemux->meta_adapter) {
+ gst_adapter_clear (icydemux->meta_adapter);
+ g_object_unref (icydemux->meta_adapter);
+ icydemux->meta_adapter = NULL;
+ }
+
+ if (icydemux->typefind_buf) {
+ gst_buffer_unref (icydemux->typefind_buf);
+ icydemux->typefind_buf = NULL;
+ }
+
+ if (icydemux->content_type) {
+ g_free (icydemux->content_type);
+ icydemux->content_type = NULL;
+ }
+}
+
+static void
+gst_icydemux_init (GstICYDemux * icydemux)
+{
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (icydemux);
+
+ icydemux->sinkpad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
+ "sink"), "sink");
+ gst_pad_set_chain_function (icydemux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_icydemux_chain));
+ gst_pad_set_event_function (icydemux->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_icydemux_handle_event));
+ gst_element_add_pad (GST_ELEMENT (icydemux), icydemux->sinkpad);
+
+ gst_icydemux_reset (icydemux);
+}
+
+static gboolean
+gst_icydemux_sink_setcaps (GstPad * pad, GstCaps * caps)
+{
+ GstICYDemux *icydemux = GST_ICYDEMUX (GST_PAD_PARENT (pad));
+ GstStructure *structure = gst_caps_get_structure (caps, 0);
+ const gchar *tmp;
+
+ if (!gst_structure_get_int (structure, "metadata-interval",
+ &icydemux->meta_interval))
+ return FALSE;
+
+ /* If incoming caps have the HTTP Content-Type, copy that over */
+ if ((tmp = gst_structure_get_string (structure, "content-type")))
+ icydemux->content_type = g_strdup (tmp);
+
+ /* We have a meta interval, so initialise the rest */
+ icydemux->remaining = icydemux->meta_interval;
+ icydemux->meta_remaining = 0;
+ return TRUE;
+}
+
+static void
+gst_icydemux_dispose (GObject * object)
+{
+ GstICYDemux *icydemux = GST_ICYDEMUX (object);
+
+ gst_icydemux_reset (icydemux);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+typedef struct
+{
+ GstCaps *caps;
+ GstPad *pad;
+} CopyStickyEventsData;
+
+static gboolean
+copy_sticky_events (GstPad * pad, GstEvent ** event, gpointer user_data)
+{
+ CopyStickyEventsData *data = user_data;
+
+ if (GST_EVENT_TYPE (*event) >= GST_EVENT_CAPS && data->caps) {
+ gst_pad_set_caps (data->pad, data->caps);
+ data->caps = NULL;
+ }
+
+ if (GST_EVENT_TYPE (*event) != GST_EVENT_CAPS)
+ gst_pad_push_event (data->pad, gst_event_ref (*event));
+
+ return TRUE;
+}
+
+static gboolean
+gst_icydemux_add_srcpad (GstICYDemux * icydemux, GstCaps * new_caps)
+{
+ if (icydemux->src_caps == NULL ||
+ !gst_caps_is_equal (new_caps, icydemux->src_caps)) {
+ gst_caps_replace (&(icydemux->src_caps), new_caps);
+ if (icydemux->srcpad != NULL) {
+ GST_DEBUG_OBJECT (icydemux, "Changing src pad caps to %" GST_PTR_FORMAT,
+ icydemux->src_caps);
+
+ gst_pad_set_caps (icydemux->srcpad, icydemux->src_caps);
+ }
+ } else {
+ /* Caps never changed */
+ gst_caps_unref (new_caps);
+ }
+
+ if (icydemux->srcpad == NULL) {
+ CopyStickyEventsData data;
+
+ icydemux->srcpad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template
+ (GST_ELEMENT_GET_CLASS (icydemux), "src"), "src");
+ g_return_val_if_fail (icydemux->srcpad != NULL, FALSE);
+
+ gst_pad_use_fixed_caps (icydemux->srcpad);
+ gst_pad_set_active (icydemux->srcpad, TRUE);
+
+ data.pad = icydemux->srcpad;
+ data.caps = icydemux->src_caps;
+ gst_pad_sticky_events_foreach (icydemux->sinkpad, copy_sticky_events,
+ &data);
+ if (data.caps)
+ gst_pad_set_caps (data.pad, data.caps);
+
+ GST_DEBUG_OBJECT (icydemux, "Adding src pad with caps %" GST_PTR_FORMAT,
+ icydemux->src_caps);
+
+ if (!(gst_element_add_pad (GST_ELEMENT (icydemux), icydemux->srcpad)))
+ return FALSE;
+ gst_element_no_more_pads (GST_ELEMENT (icydemux));
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_icydemux_remove_srcpad (GstICYDemux * icydemux)
+{
+ gboolean res = TRUE;
+
+ if (icydemux->srcpad != NULL) {
+ res = gst_element_remove_pad (GST_ELEMENT (icydemux), icydemux->srcpad);
+ g_return_val_if_fail (res != FALSE, FALSE);
+ icydemux->srcpad = NULL;
+ }
+
+ return res;
+};
+
+static gchar *
+gst_icydemux_unicodify (const gchar * str)
+{
+ const gchar *env_vars[] = { "GST_ICY_TAG_ENCODING",
+ "GST_TAG_ENCODING", NULL
+ };
+
+ return gst_tag_freeform_string_to_utf8 (str, -1, env_vars);
+}
+
+/* takes ownership of tag list */
+static gboolean
+gst_icydemux_tag_found (GstICYDemux * icydemux, GstTagList * tags)
+{
+ /* send the tag event if we have finished typefinding and have a src pad */
+ if (icydemux->srcpad)
+ return gst_icydemux_send_tag_event (icydemux, tags);
+
+ /* if we haven't a source pad yet, cache the tags */
+ if (!icydemux->cached_tags) {
+ icydemux->cached_tags = tags;
+ } else {
+ gst_tag_list_insert (icydemux->cached_tags, tags,
+ GST_TAG_MERGE_REPLACE_ALL);
+ gst_tag_list_unref (tags);
+ }
+
+ return TRUE;
+}
+
+static void
+gst_icydemux_parse_and_send_tags (GstICYDemux * icydemux)
+{
+ GstTagList *tags;
+ const guint8 *data;
+ int length, i;
+ gboolean tags_found = FALSE;
+ gchar *buffer;
+ gchar **strings;
+
+ length = gst_adapter_available (icydemux->meta_adapter);
+
+ data = gst_adapter_map (icydemux->meta_adapter, length);
+
+ /* Now, copy this to a buffer where we can NULL-terminate it to make things
+ * a bit easier, then do that parsing. */
+ buffer = g_strndup ((const gchar *) data, length);
+
+ tags = gst_tag_list_new_empty ();
+ strings = g_strsplit (buffer, "';", 0);
+
+ for (i = 0; strings[i]; i++) {
+ if (!g_ascii_strncasecmp (strings[i], "StreamTitle=", 12)) {
+ char *title = gst_icydemux_unicodify (strings[i] + 13);
+ tags_found = TRUE;
+
+ if (title && *title) {
+ gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_TITLE,
+ title, NULL);
+ g_free (title);
+ }
+ } else if (!g_ascii_strncasecmp (strings[i], "StreamUrl=", 10)) {
+ char *url = gst_icydemux_unicodify (strings[i] + 11);
+ tags_found = TRUE;
+
+ if (url && *url) {
+ gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_HOMEPAGE,
+ url, NULL);
+ g_free (url);
+ }
+ }
+ }
+
+ g_strfreev (strings);
+ g_free (buffer);
+ gst_adapter_unmap (icydemux->meta_adapter);
+ gst_adapter_flush (icydemux->meta_adapter, length);
+
+ if (tags_found)
+ gst_icydemux_tag_found (icydemux, tags);
+ else
+ gst_tag_list_unref (tags);
+}
+
+static gboolean
+gst_icydemux_handle_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstICYDemux *icydemux = GST_ICYDEMUX (parent);
+ gboolean result;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_TAG:
+ {
+ GstTagList *tags;
+
+ gst_event_parse_tag (event, &tags);
+ result = gst_icydemux_tag_found (icydemux, gst_tag_list_copy (tags));
+ gst_event_unref (event);
+ return result;
+ }
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ result = gst_icydemux_sink_setcaps (pad, caps);
+ gst_event_unref (event);
+ return result;
+ }
+ default:
+ break;
+ }
+
+ if (icydemux->typefinding) {
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ g_list_foreach (icydemux->cached_events,
+ (GFunc) gst_mini_object_unref, NULL);
+ g_list_free (icydemux->cached_events);
+ icydemux->cached_events = NULL;
+
+ return gst_pad_event_default (pad, parent, event);
+ default:
+ if (!GST_EVENT_IS_STICKY (event)) {
+ icydemux->cached_events =
+ g_list_append (icydemux->cached_events, event);
+ } else {
+ gst_event_unref (event);
+ }
+ return TRUE;
+ }
+ } else {
+ return gst_pad_event_default (pad, parent, event);
+ }
+}
+
+static void
+gst_icydemux_send_cached_events (GstICYDemux * icydemux)
+{
+ GList *l;
+
+ for (l = icydemux->cached_events; l != NULL; l = l->next) {
+ GstEvent *event = GST_EVENT (l->data);
+
+ gst_pad_push_event (icydemux->srcpad, event);
+ }
+ g_list_free (icydemux->cached_events);
+ icydemux->cached_events = NULL;
+}
+
+static GstFlowReturn
+gst_icydemux_typefind_or_forward (GstICYDemux * icydemux, GstBuffer * buf)
+{
+ if (icydemux->typefinding) {
+ GstBuffer *tf_buf;
+ GstCaps *caps = NULL;
+ GstTypeFindProbability prob;
+
+ /* If we have a content-type from upstream, let's see if we can shortcut
+ * typefinding */
+ if (G_UNLIKELY (icydemux->content_type)) {
+ if (!g_ascii_strcasecmp (icydemux->content_type, "video/nsv")) {
+ GST_DEBUG ("We have a NSV stream");
+ caps = gst_caps_new_empty_simple ("video/x-nsv");
+ } else {
+ GST_DEBUG ("Upstream Content-Type isn't supported");
+ g_free (icydemux->content_type);
+ icydemux->content_type = NULL;
+ }
+ }
+
+ if (icydemux->typefind_buf) {
+ icydemux->typefind_buf = gst_buffer_append (icydemux->typefind_buf, buf);
+ } else {
+ icydemux->typefind_buf = buf;
+ }
+
+ /* Only typefind if we haven't already got some caps */
+ if (caps == NULL) {
+ caps = gst_type_find_helper_for_buffer (GST_OBJECT (icydemux),
+ icydemux->typefind_buf, &prob);
+
+ if (caps == NULL) {
+ if (gst_buffer_get_size (icydemux->typefind_buf) <
+ ICY_TYPE_FIND_MAX_SIZE) {
+ /* Just break for more data */
+ return GST_FLOW_OK;
+ }
+
+ /* We failed typefind */
+ GST_ELEMENT_ERROR (icydemux, STREAM, TYPE_NOT_FOUND, (NULL),
+ ("No caps found for contents within an ICY stream"));
+ gst_buffer_unref (icydemux->typefind_buf);
+ icydemux->typefind_buf = NULL;
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ if (!gst_icydemux_add_srcpad (icydemux, caps)) {
+ GST_DEBUG_OBJECT (icydemux, "Failed to add srcpad");
+ gst_caps_unref (caps);
+ gst_buffer_unref (icydemux->typefind_buf);
+ icydemux->typefind_buf = NULL;
+ return GST_FLOW_ERROR;
+ }
+ gst_caps_unref (caps);
+
+ if (icydemux->cached_events) {
+ gst_icydemux_send_cached_events (icydemux);
+ }
+
+ if (icydemux->cached_tags) {
+ gst_icydemux_send_tag_event (icydemux, icydemux->cached_tags);
+ icydemux->cached_tags = NULL;
+ }
+
+ /* Move onto streaming: call ourselves recursively with the typefind buffer
+ * to get that forwarded. */
+ icydemux->typefinding = FALSE;
+
+ tf_buf = icydemux->typefind_buf;
+ icydemux->typefind_buf = NULL;
+ return gst_icydemux_typefind_or_forward (icydemux, tf_buf);
+ } else {
+ if (G_UNLIKELY (icydemux->srcpad == NULL)) {
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+
+ buf = gst_buffer_make_writable (buf);
+
+ /* Most things don't care, and it's a pain to track (we should preserve a
+ * 0 offset on the first buffer though if it's there, for id3demux etc.) */
+ if (GST_BUFFER_OFFSET (buf) != 0) {
+ GST_BUFFER_OFFSET (buf) = GST_BUFFER_OFFSET_NONE;
+ }
+
+ return gst_pad_push (icydemux->srcpad, buf);
+ }
+}
+
+static void
+gst_icydemux_add_meta (GstICYDemux * icydemux, GstBuffer * buf)
+{
+ if (!icydemux->meta_adapter)
+ icydemux->meta_adapter = gst_adapter_new ();
+
+ gst_adapter_push (icydemux->meta_adapter, buf);
+}
+
+static GstFlowReturn
+gst_icydemux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+{
+ GstICYDemux *icydemux;
+ guint size, chunk, offset;
+ GstBuffer *sub;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ icydemux = GST_ICYDEMUX (parent);
+
+ if (G_UNLIKELY (icydemux->meta_interval < 0))
+ goto not_negotiated;
+
+ if (icydemux->meta_interval == 0) {
+ ret = gst_icydemux_typefind_or_forward (icydemux, buf);
+ buf = NULL;
+ goto done;
+ }
+
+ /* Go through the buffer, chopping it into appropriate chunks. Forward as
+ * tags or buffers, as appropriate
+ */
+ size = gst_buffer_get_size (buf);
+ offset = 0;
+ while (size) {
+ if (icydemux->remaining) {
+ chunk = (size <= icydemux->remaining) ? size : icydemux->remaining;
+ if (offset == 0 && chunk == size) {
+ sub = buf;
+ buf = NULL;
+ } else {
+ sub = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, offset, chunk);
+ }
+ offset += chunk;
+ icydemux->remaining -= chunk;
+ size -= chunk;
+
+ /* This buffer goes onto typefinding, and/or directly pushed out */
+ ret = gst_icydemux_typefind_or_forward (icydemux, sub);
+ if (ret != GST_FLOW_OK)
+ goto done;
+ } else if (icydemux->meta_remaining) {
+ chunk = (size <= icydemux->meta_remaining) ?
+ size : icydemux->meta_remaining;
+ sub = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL, offset, chunk);
+ gst_icydemux_add_meta (icydemux, sub);
+
+ offset += chunk;
+ icydemux->meta_remaining -= chunk;
+ size -= chunk;
+
+ if (icydemux->meta_remaining == 0) {
+ /* Parse tags from meta_adapter, send off as tag messages */
+ GST_DEBUG_OBJECT (icydemux, "No remaining metadata, parsing for tags");
+ gst_icydemux_parse_and_send_tags (icydemux);
+
+ icydemux->remaining = icydemux->meta_interval;
+ }
+ } else {
+ guint8 byte;
+ /* We need to read a single byte (always safe at this point in the loop)
+ * to figure out how many bytes of metadata exist.
+ * The 'spec' tells us to read 16 * (byte_value) bytes of metadata after
+ * this (zero is common, and means the metadata hasn't changed).
+ */
+ gst_buffer_extract (buf, offset, &byte, 1);
+ icydemux->meta_remaining = 16 * byte;
+ if (icydemux->meta_remaining == 0)
+ icydemux->remaining = icydemux->meta_interval;
+
+ offset += 1;
+ size -= 1;
+ }
+ }
+
+done:
+ if (buf)
+ gst_buffer_unref (buf);
+
+ return ret;
+
+ /* ERRORS */
+not_negotiated:
+ {
+ GST_WARNING_OBJECT (icydemux, "meta_interval not set, buffer probably had "
+ "no caps set. Try enabling iradio-mode on the http source element");
+ gst_buffer_unref (buf);
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+}
+
+static GstStateChangeReturn
+gst_icydemux_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstICYDemux *icydemux = GST_ICYDEMUX (element);
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_icydemux_reset (icydemux);
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
+
+/* takes ownership of tag list */
+static gboolean
+gst_icydemux_send_tag_event (GstICYDemux * icydemux, GstTagList * tags)
+{
+ GstEvent *event;
+
+ event = gst_event_new_tag (tags);
+
+ GST_DEBUG_OBJECT (icydemux, "Sending tag event on src pad");
+ return gst_pad_push_event (icydemux->srcpad, event);
+
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ return GST_ELEMENT_REGISTER (icydemux, plugin);
+
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ icydemux,
+ "Demux ICY tags from a stream",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/icydemux/gsticydemux.h b/gst/icydemux/gsticydemux.h
new file mode 100644
index 0000000000..d51a54b0d0
--- /dev/null
+++ b/gst/icydemux/gsticydemux.h
@@ -0,0 +1,89 @@
+/* Copyright 2005 Jan Schmidt <thaytan@mad.scientist.com>
+ * 2006 Michael Smith <msmith@fluendo.com>
+ * Copyright (C) 2003-2004 Benjamin Otte <otte@gnome.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_ICYDEMUX_H__
+#define __GST_ICYDEMUX_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/base/gsttypefindhelper.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_ICYDEMUX \
+ (gst_icydemux_get_type())
+#define GST_ICYDEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_ICYDEMUX,GstICYDemux))
+#define GST_ICYDEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_ICYDEMUX,GstICYDemuxClass))
+#define GST_IS_ICYDEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_ICYDEMUX))
+#define GST_IS_ICYDEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_ICYDEMUX))
+
+typedef struct _GstICYDemux GstICYDemux;
+typedef struct _GstICYDemuxClass GstICYDemuxClass;
+
+struct _GstICYDemux
+{
+ GstElement element;
+
+ GstPad *sinkpad, *srcpad;
+
+ /* Interval between metadata updates */
+ gint meta_interval;
+
+ /* Remaining bytes until the next metadata update */
+ gint remaining;
+
+ /* When 'remaining' is zero, this holds the number of bytes of metadata we
+ * still need to read, or zero if we don't yet know (which means we need to
+ * read one byte, after which we can initialise this properly) */
+ gint meta_remaining;
+
+ /* Caps for the data enclosed */
+ GstCaps *src_caps;
+
+ /* True if we're still typefinding */
+ gboolean typefinding;
+
+ GstTagList *cached_tags;
+ GList *cached_events;
+
+ GstAdapter *meta_adapter;
+
+ GstBuffer *typefind_buf;
+
+ /* upstream HTTP Content-Type */
+ gchar *content_type;
+};
+
+struct _GstICYDemuxClass
+{
+ GstElementClass parent_class;
+};
+
+GType gst_icydemux_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (icydemux);
+
+G_END_DECLS
+
+#endif /* __GST_ICYDEMUX_H__ */
diff --git a/gst/icydemux/meson.build b/gst/icydemux/meson.build
new file mode 100644
index 0000000000..793e6529b7
--- /dev/null
+++ b/gst/icydemux/meson.build
@@ -0,0 +1,10 @@
+gsticydemux = library('gsticydemux',
+ 'gsticydemux.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc, libsinc],
+ dependencies : [gst_dep, gstbase_dep, gsttag_dep, zlib_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gsticydemux, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gsticydemux]
diff --git a/gst/id3demux/gstid3demux.c b/gst/id3demux/gstid3demux.c
new file mode 100644
index 0000000000..7c2b5e75e3
--- /dev/null
+++ b/gst/id3demux/gstid3demux.c
@@ -0,0 +1,292 @@
+/* -*- Mode: C; tab-width: 2; indent-tabs-mode: t; c-basic-offset: 2 -*- */
+/* GStreamer ID3 tag demuxer
+ * Copyright (C) 2005 Jan Schmidt <thaytan@mad.scientist.com>
+ * Copyright (C) 2003-2004 Benjamin Otte <otte@gnome.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-id3demux
+ * @title: id3demux
+ *
+ * id3demux accepts data streams with either (or both) ID3v2 regions at the
+ * start, or ID3v1 at the end. The mime type of the data between the tag blocks
+ * is detected using typefind functions, and the appropriate output mime type
+ * set on outgoing buffers.
+ *
+ * The element is only able to read ID3v1 tags from a seekable stream, because
+ * they are at the end of the stream. That is, when get_range mode is supported
+ * by the upstream elements. If get_range operation is available, id3demux makes
+ * it available downstream. This means that elements which require get_range
+ * mode, such as wavparse, can operate on files containing ID3 tag information.
+ *
+ * This id3demux element replaced an older element with the same name which
+ * relied on libid3tag from the MAD project.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=file.mp3 ! id3demux ! fakesink -t
+ * ]| This pipeline should read any available ID3 tag information and output it.
+ * The contents of the file inside the ID3 tag regions should be detected, and
+ * the appropriate mime type set on buffers produced from id3demux.
+ *
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include <gst/gst.h>
+#include <gst/gst-i18n-plugin.h>
+#include <gst/tag/tag.h>
+#include <gst/pbutils/pbutils.h>
+#include <string.h>
+
+#include "gstid3demux.h"
+
+enum
+{
+ PROP_0,
+ PROP_PREFER_V1
+};
+
+#define DEFAULT_PREFER_V1 FALSE
+
+GST_DEBUG_CATEGORY (id3demux_debug);
+#define GST_CAT_DEFAULT (id3demux_debug)
+
+#define ID3V1_TAG_SIZE 128
+#define ID3V2_HDR_SIZE GST_TAG_ID3V2_HEADER_SIZE
+
+static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-id3")
+ );
+
+static gboolean gst_id3demux_identify_tag (GstTagDemux * demux,
+ GstBuffer * buffer, gboolean start_tag, guint * tag_size);
+static GstTagDemuxResult gst_id3demux_parse_tag (GstTagDemux * demux,
+ GstBuffer * buffer, gboolean start_tag, guint * tag_size,
+ GstTagList ** tags);
+static GstTagList *gst_id3demux_merge_tags (GstTagDemux * tagdemux,
+ const GstTagList * start_tags, const GstTagList * end_tags);
+
+static void gst_id3demux_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_id3demux_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+#define gst_id3demux_parent_class parent_class
+G_DEFINE_TYPE (GstID3Demux, gst_id3demux, GST_TYPE_TAG_DEMUX);
+#define _do_init \
+ GST_DEBUG_CATEGORY_INIT (id3demux_debug, "id3demux", 0, \
+ "GStreamer ID3 tag demuxer"); \
+ gst_tag_register_musicbrainz_tags ();
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (id3demux, "id3demux",
+ GST_RANK_PRIMARY, GST_TYPE_ID3DEMUX, _do_init);
+
+static void
+gst_id3demux_class_init (GstID3DemuxClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstTagDemuxClass *tagdemux_class = (GstTagDemuxClass *) klass;
+
+ gobject_class->set_property = gst_id3demux_set_property;
+ gobject_class->get_property = gst_id3demux_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_PREFER_V1,
+ g_param_spec_boolean ("prefer-v1", "Prefer version 1 tag",
+ "Prefer tags from ID3v1 tag at end of file when both ID3v1 "
+ "and ID3v2 tags are present", DEFAULT_PREFER_V1,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_factory);
+
+ gst_element_class_set_static_metadata (gstelement_class, "ID3 tag demuxer",
+ "Codec/Demuxer/Metadata",
+ "Read and output ID3v1 and ID3v2 tags while demuxing the contents",
+ "Jan Schmidt <thaytan@mad.scientist.com>");
+
+ tagdemux_class->identify_tag = GST_DEBUG_FUNCPTR (gst_id3demux_identify_tag);
+ tagdemux_class->parse_tag = GST_DEBUG_FUNCPTR (gst_id3demux_parse_tag);
+ tagdemux_class->merge_tags = GST_DEBUG_FUNCPTR (gst_id3demux_merge_tags);
+
+ tagdemux_class->min_start_size = ID3V2_HDR_SIZE;
+ tagdemux_class->min_end_size = ID3V1_TAG_SIZE;
+}
+
+static void
+gst_id3demux_init (GstID3Demux * id3demux)
+{
+ id3demux->prefer_v1 = DEFAULT_PREFER_V1;
+}
+
+static gboolean
+gst_id3demux_identify_tag (GstTagDemux * demux, GstBuffer * buf,
+ gboolean start_tag, guint * tag_size)
+{
+ guint8 data[3];
+
+ gst_buffer_extract (buf, 0, data, 3);
+
+ if (start_tag) {
+ if (data[0] != 'I' || data[1] != 'D' || data[2] != '3')
+ goto no_marker;
+
+ *tag_size = gst_tag_get_id3v2_tag_size (buf);
+ } else {
+ if (data[0] != 'T' || data[1] != 'A' || data[2] != 'G')
+ goto no_marker;
+
+ *tag_size = ID3V1_TAG_SIZE;
+ }
+
+ GST_INFO_OBJECT (demux, "Found ID3v%u marker, tag_size = %u",
+ (start_tag) ? 2 : 1, *tag_size);
+
+ return TRUE;
+
+no_marker:
+ {
+ GST_DEBUG_OBJECT (demux, "No ID3v%u marker found", (start_tag) ? 2 : 1);
+ return FALSE;
+ }
+}
+
+static void
+gst_id3demux_add_container_format (GstTagList * tags)
+{
+ GstCaps *sink_caps;
+
+ sink_caps = gst_static_pad_template_get_caps (&sink_factory);
+ gst_pb_utils_add_codec_description_to_tag_list (tags,
+ GST_TAG_CONTAINER_FORMAT, sink_caps);
+ gst_caps_unref (sink_caps);
+}
+
+static GstTagDemuxResult
+gst_id3demux_parse_tag (GstTagDemux * demux, GstBuffer * buffer,
+ gboolean start_tag, guint * tag_size, GstTagList ** tags)
+{
+ if (start_tag) {
+ *tag_size = gst_tag_get_id3v2_tag_size (buffer);
+ *tags = gst_tag_list_from_id3v2_tag (buffer);
+
+ if (G_LIKELY (*tags != NULL)) {
+ gst_id3demux_add_container_format (*tags);
+ return GST_TAG_DEMUX_RESULT_OK;
+ } else {
+ return GST_TAG_DEMUX_RESULT_BROKEN_TAG;
+ }
+ } else {
+ GstMapInfo map;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ *tags = gst_tag_list_new_from_id3v1 (map.data);
+ gst_buffer_unmap (buffer, &map);
+
+ if (G_UNLIKELY (*tags == NULL))
+ return GST_TAG_DEMUX_RESULT_BROKEN_TAG;
+
+ gst_id3demux_add_container_format (*tags);
+ *tag_size = ID3V1_TAG_SIZE;
+ return GST_TAG_DEMUX_RESULT_OK;
+ }
+}
+
+static GstTagList *
+gst_id3demux_merge_tags (GstTagDemux * tagdemux, const GstTagList * start_tags,
+ const GstTagList * end_tags)
+{
+ GstID3Demux *id3demux;
+ GstTagList *merged;
+ gboolean prefer_v1;
+
+ id3demux = GST_ID3DEMUX (tagdemux);
+
+ GST_OBJECT_LOCK (id3demux);
+ prefer_v1 = id3demux->prefer_v1;
+ GST_OBJECT_UNLOCK (id3demux);
+
+ /* we merge in REPLACE mode, so put the less important tags first */
+ if (prefer_v1)
+ merged = gst_tag_list_merge (start_tags, end_tags, GST_TAG_MERGE_REPLACE);
+ else
+ merged = gst_tag_list_merge (end_tags, start_tags, GST_TAG_MERGE_REPLACE);
+
+ GST_LOG_OBJECT (id3demux, "start tags: %" GST_PTR_FORMAT, start_tags);
+ GST_LOG_OBJECT (id3demux, "end tags: %" GST_PTR_FORMAT, end_tags);
+ GST_LOG_OBJECT (id3demux, "merged tags: %" GST_PTR_FORMAT, merged);
+
+ return merged;
+}
+
+static void
+gst_id3demux_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstID3Demux *id3demux;
+
+ id3demux = GST_ID3DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_PREFER_V1:{
+ GST_OBJECT_LOCK (id3demux);
+ id3demux->prefer_v1 = g_value_get_boolean (value);
+ GST_OBJECT_UNLOCK (id3demux);
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_id3demux_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstID3Demux *id3demux;
+
+ id3demux = GST_ID3DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_PREFER_V1:
+ GST_OBJECT_LOCK (id3demux);
+ g_value_set_boolean (value, id3demux->prefer_v1);
+ GST_OBJECT_UNLOCK (id3demux);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+
+ return GST_ELEMENT_REGISTER (id3demux, plugin);
+
+
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ id3demux,
+ "Demux ID3v1 and ID3v2 tags from a file",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/id3demux/gstid3demux.h b/gst/id3demux/gstid3demux.h
new file mode 100644
index 0000000000..e8d74eb096
--- /dev/null
+++ b/gst/id3demux/gstid3demux.h
@@ -0,0 +1,61 @@
+/* GStreamer ID3 tag demuxer
+ * Copyright (C) 2005 Jan Schmidt <thaytan@mad.scientist.com>
+ * Copyright (C) 2003-2004 Benjamin Otte <otte@gnome.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_ID3DEMUX_H__
+#define __GST_ID3DEMUX_H__
+
+#include <gst/tag/gsttagdemux.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_ID3DEMUX \
+ (gst_id3demux_get_type())
+#define GST_ID3DEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_ID3DEMUX,GstID3Demux))
+#define GST_ID3DEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_ID3DEMUX,GstID3DemuxClass))
+#define GST_IS_ID3DEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_ID3DEMUX))
+#define GST_IS_ID3DEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_ID3DEMUX))
+
+typedef struct _GstID3Demux GstID3Demux;
+typedef struct _GstID3DemuxClass GstID3DemuxClass;
+
+struct _GstID3Demux
+{
+ GstTagDemux tagdemux;
+
+ gboolean prefer_v1; /* prefer ID3v1 tags over ID3v2 tags? */
+};
+
+struct _GstID3DemuxClass
+{
+ GstTagDemuxClass parent_class;
+};
+
+GType gst_id3demux_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (id3demux);
+
+G_END_DECLS
+
+#endif /* __GST_ID3DEMUX_H__ */
+
diff --git a/gst/id3demux/meson.build b/gst/id3demux/meson.build
new file mode 100644
index 0000000000..e97c6381f6
--- /dev/null
+++ b/gst/id3demux/meson.build
@@ -0,0 +1,10 @@
+gstid3demux = library('gstid3demux',
+ 'gstid3demux.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc, libsinc],
+ dependencies : [gst_dep, gstbase_dep, gsttag_dep, gstpbutils_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstid3demux, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstid3demux]
diff --git a/gst/imagefreeze/gstimagefreeze.c b/gst/imagefreeze/gstimagefreeze.c
new file mode 100644
index 0000000000..7fc54770c9
--- /dev/null
+++ b/gst/imagefreeze/gstimagefreeze.c
@@ -0,0 +1,1251 @@
+/* GStreamer
+ * Copyright (c) 2005 Edward Hervey <bilboed@bilboed.com>
+ * Copyright (C) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * Copyright (C) 2020 Sebastian Dröge <sebastian@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-imagefreeze
+ * @title: imagefreeze
+ *
+ * The imagefreeze element generates a still frame video stream from
+ * the input. It duplicates the first frame with the framerate requested
+ * by downstream, allows seeking and answers queries.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v filesrc location=some.png ! decodebin ! imagefreeze ! autovideosink
+ * ]| This pipeline shows a still frame stream of a PNG file.
+ *
+ */
+
+/* This is based on the imagefreeze element from PiTiVi:
+ * http://git.gnome.org/browse/pitivi/tree/pitivi/elements/imagefreeze.py
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/glib-compat-private.h>
+
+#include "gstimagefreeze.h"
+
+#define DEFAULT_NUM_BUFFERS -1
+#define DEFAULT_ALLOW_REPLACE FALSE
+#define DEFAULT_IS_LIVE FALSE
+
+enum
+{
+ PROP_0,
+ PROP_NUM_BUFFERS,
+ PROP_ALLOW_REPLACE,
+ PROP_IS_LIVE,
+};
+
+static void gst_image_freeze_finalize (GObject * object);
+
+static void gst_image_freeze_reset (GstImageFreeze * self);
+
+static GstStateChangeReturn gst_image_freeze_change_state (GstElement * element,
+ GstStateChange transition);
+static GstClock *gst_image_freeze_provide_clock (GstElement * element);
+
+static void gst_image_freeze_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_image_freeze_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static GstFlowReturn gst_image_freeze_sink_chain (GstPad * pad,
+ GstObject * parent, GstBuffer * buffer);
+static gboolean gst_image_freeze_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static gboolean gst_image_freeze_sink_setcaps (GstImageFreeze * self,
+ GstCaps * caps);
+static GstCaps *gst_image_freeze_query_caps (GstImageFreeze * self,
+ GstPad * pad, GstCaps * filter);
+static gboolean gst_image_freeze_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+static void gst_image_freeze_src_loop (GstPad * pad);
+static gboolean gst_image_freeze_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static gboolean gst_image_freeze_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+
+static GstStaticPadTemplate sink_pad_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-raw(ANY)"));
+
+static GstStaticPadTemplate src_pad_template =
+GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-raw(ANY)"));
+
+GST_DEBUG_CATEGORY_STATIC (gst_image_freeze_debug);
+#define GST_CAT_DEFAULT gst_image_freeze_debug
+
+#define gst_image_freeze_parent_class parent_class
+G_DEFINE_TYPE (GstImageFreeze, gst_image_freeze, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (imagefreeze, "imagefreeze",
+ GST_RANK_NONE, GST_TYPE_IMAGE_FREEZE,
+ GST_DEBUG_CATEGORY_INIT (gst_image_freeze_debug, "imagefreeze", 0,
+ "imagefreeze element");
+ );
+
+static void
+gst_image_freeze_class_init (GstImageFreezeClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ gobject_class->finalize = gst_image_freeze_finalize;
+ gobject_class->set_property = gst_image_freeze_set_property;
+ gobject_class->get_property = gst_image_freeze_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_NUM_BUFFERS,
+ g_param_spec_int ("num-buffers", "Number of buffers",
+ "Number of buffers to output before sending EOS (-1 = unlimited)",
+ -1, G_MAXINT, DEFAULT_NUM_BUFFERS, G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_ALLOW_REPLACE,
+ g_param_spec_boolean ("allow-replace", "Allow Replace",
+ "Allow replacing the input buffer and always output the latest",
+ DEFAULT_ALLOW_REPLACE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstImageFreeze:is-live
+ *
+ * Selects whether the output stream should be a non-live stream based on
+ * the segment configured via a %GST_EVENT_SEEK, or whether the output
+ * stream should be a live stream with the negotiated framerate.
+ *
+ * Since: 1.18
+ */
+ g_object_class_install_property (gobject_class, PROP_IS_LIVE,
+ g_param_spec_boolean ("is-live", "Is Live",
+ "Whether to output a live video stream",
+ DEFAULT_IS_LIVE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_image_freeze_change_state);
+ gstelement_class->provide_clock =
+ GST_DEBUG_FUNCPTR (gst_image_freeze_provide_clock);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Still frame stream generator",
+ "Filter/Video",
+ "Generates a still frame stream from an image",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &sink_pad_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &src_pad_template);
+}
+
+static void
+gst_image_freeze_init (GstImageFreeze * self)
+{
+ self->sinkpad = gst_pad_new_from_static_template (&sink_pad_template, "sink");
+ gst_pad_set_chain_function (self->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_image_freeze_sink_chain));
+ gst_pad_set_event_function (self->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_image_freeze_sink_event));
+ gst_pad_set_query_function (self->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_image_freeze_sink_query));
+ GST_PAD_SET_PROXY_ALLOCATION (self->sinkpad);
+ gst_element_add_pad (GST_ELEMENT (self), self->sinkpad);
+
+ self->srcpad = gst_pad_new_from_static_template (&src_pad_template, "src");
+ gst_pad_set_event_function (self->srcpad,
+ GST_DEBUG_FUNCPTR (gst_image_freeze_src_event));
+ gst_pad_set_query_function (self->srcpad,
+ GST_DEBUG_FUNCPTR (gst_image_freeze_src_query));
+ gst_pad_use_fixed_caps (self->srcpad);
+ gst_element_add_pad (GST_ELEMENT (self), self->srcpad);
+
+ g_mutex_init (&self->lock);
+ g_cond_init (&self->blocked_cond);
+
+ self->num_buffers = DEFAULT_NUM_BUFFERS;
+ self->allow_replace = DEFAULT_ALLOW_REPLACE;
+ self->is_live = DEFAULT_IS_LIVE;
+
+ gst_image_freeze_reset (self);
+}
+
+static void
+gst_image_freeze_finalize (GObject * object)
+{
+ GstImageFreeze *self = GST_IMAGE_FREEZE (object);
+
+ self->num_buffers = DEFAULT_NUM_BUFFERS;
+
+ gst_image_freeze_reset (self);
+
+ g_mutex_clear (&self->lock);
+ g_cond_clear (&self->blocked_cond);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_image_freeze_reset (GstImageFreeze * self)
+{
+ GST_DEBUG_OBJECT (self, "Resetting internal state");
+
+ g_mutex_lock (&self->lock);
+ gst_buffer_replace (&self->buffer, NULL);
+ gst_caps_replace (&self->buffer_caps, NULL);
+ gst_caps_replace (&self->current_caps, NULL);
+ self->num_buffers_left = self->num_buffers;
+
+ gst_segment_init (&self->segment, GST_FORMAT_TIME);
+ self->need_segment = TRUE;
+ self->flushing = TRUE;
+
+ self->negotiated_framerate = FALSE;
+ self->fps_n = self->fps_d = 0;
+ self->offset = 0;
+ self->seqnum = 0;
+ g_mutex_unlock (&self->lock);
+}
+
+static gboolean
+gst_image_freeze_sink_setcaps (GstImageFreeze * self, GstCaps * caps)
+{
+ gboolean ret = FALSE;
+ GstStructure *s;
+ gint fps_n, fps_d;
+ GstCaps *othercaps, *intersection;
+ guint i, n;
+ GstPad *pad;
+
+ pad = self->sinkpad;
+
+ caps = gst_caps_copy (caps);
+
+ /* If we already negotiated a framerate then only update for the
+ * caps of the new buffer */
+ if (self->negotiated_framerate) {
+ gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, self->fps_n,
+ self->fps_d, NULL);
+ GST_DEBUG_OBJECT (pad, "Setting caps %" GST_PTR_FORMAT, caps);
+ gst_pad_set_caps (self->srcpad, caps);
+ gst_caps_unref (caps);
+ return TRUE;
+ }
+
+ /* Else negotiate a framerate with downstream */
+
+ GST_DEBUG_OBJECT (pad, "Setting caps: %" GST_PTR_FORMAT, caps);
+
+ s = gst_caps_get_structure (caps, 0);
+
+ /* 1. Remove framerate */
+ gst_structure_remove_field (s, "framerate");
+ gst_structure_set (s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1,
+ NULL);
+
+ /* 2. Intersect with template caps */
+ othercaps = (GstCaps *) gst_pad_get_pad_template_caps (pad);
+ intersection = gst_caps_intersect (caps, othercaps);
+ GST_DEBUG_OBJECT (pad, "Intersecting: %" GST_PTR_FORMAT, caps);
+ GST_DEBUG_OBJECT (pad, "with: %" GST_PTR_FORMAT, othercaps);
+ GST_DEBUG_OBJECT (pad, "gave: %" GST_PTR_FORMAT, intersection);
+ gst_caps_unref (caps);
+ gst_caps_unref (othercaps);
+ caps = intersection;
+ intersection = othercaps = NULL;
+
+ /* 3. Intersect with downstream peer caps */
+ othercaps = gst_pad_peer_query_caps (self->srcpad, caps);
+ GST_DEBUG_OBJECT (pad, "Peer query resulted: %" GST_PTR_FORMAT, othercaps);
+ gst_caps_unref (caps);
+ caps = othercaps;
+ othercaps = NULL;
+
+ /* 4. For every candidate try to use it downstream with framerate as
+ * near as possible to 25/1 */
+ n = gst_caps_get_size (caps);
+ for (i = 0; i < n; i++) {
+ GstCaps *candidate = gst_caps_new_empty ();
+ GstStructure *s = gst_structure_copy (gst_caps_get_structure (caps, i));
+ GstCapsFeatures *f =
+ gst_caps_features_copy (gst_caps_get_features (caps, i));
+
+ gst_caps_append_structure_full (candidate, s, f);
+ if (gst_structure_has_field_typed (s, "framerate", GST_TYPE_FRACTION) ||
+ gst_structure_fixate_field_nearest_fraction (s, "framerate", 25, 1)) {
+ gst_structure_get_fraction (s, "framerate", &fps_n, &fps_d);
+ if (fps_d != 0) {
+ gst_pad_set_caps (self->srcpad, candidate);
+ g_mutex_lock (&self->lock);
+ self->fps_n = fps_n;
+ self->fps_d = fps_d;
+ g_mutex_unlock (&self->lock);
+ self->negotiated_framerate = TRUE;
+ GST_DEBUG_OBJECT (pad, "Setting caps %" GST_PTR_FORMAT, candidate);
+ ret = TRUE;
+ gst_caps_unref (candidate);
+ break;
+ } else {
+ GST_WARNING_OBJECT (pad, "Invalid caps with framerate %d/%d", fps_n,
+ fps_d);
+ }
+ }
+ gst_caps_unref (candidate);
+ }
+
+ if (!ret)
+ GST_ERROR_OBJECT (pad, "No usable caps found");
+
+ gst_caps_unref (caps);
+
+ return ret;
+}
+
+/* remove framerate in writable @caps */
+static void
+gst_image_freeze_remove_fps (GstImageFreeze * self, GstCaps * caps)
+{
+ gint i, n;
+
+ n = gst_caps_get_size (caps);
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (caps, i);
+
+ gst_structure_remove_field (s, "framerate");
+ gst_structure_set (s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT,
+ 1, NULL);
+ }
+}
+
+static GstCaps *
+gst_image_freeze_query_caps (GstImageFreeze * self, GstPad * pad,
+ GstCaps * filter)
+{
+ GstCaps *ret, *tmp, *templ;
+ GstPad *otherpad;
+
+ otherpad = (pad == self->srcpad) ? self->sinkpad : self->srcpad;
+
+ if (filter) {
+ filter = gst_caps_copy (filter);
+ gst_image_freeze_remove_fps (self, filter);
+ }
+ templ = gst_pad_get_pad_template_caps (pad);
+ tmp = gst_pad_peer_query_caps (otherpad, filter);
+ if (tmp) {
+ GST_LOG_OBJECT (otherpad, "peer caps %" GST_PTR_FORMAT, tmp);
+ ret = gst_caps_intersect (tmp, templ);
+ gst_caps_unref (tmp);
+ } else {
+ GST_LOG_OBJECT (otherpad, "going to copy");
+ ret = gst_caps_copy (templ);
+ }
+ if (templ)
+ gst_caps_unref (templ);
+ if (filter)
+ gst_caps_unref (filter);
+
+ ret = gst_caps_make_writable (ret);
+ gst_image_freeze_remove_fps (self, ret);
+
+ GST_LOG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret);
+
+ return ret;
+}
+
+static gboolean
+gst_image_freeze_sink_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ GstImageFreeze *self = GST_IMAGE_FREEZE (parent);
+ gboolean ret;
+
+ GST_LOG_OBJECT (pad, "Handling query of type '%s'",
+ gst_query_type_get_name (GST_QUERY_TYPE (query)));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_query_parse_caps (query, &caps);
+ caps = gst_image_freeze_query_caps (self, pad, caps);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ ret = TRUE;
+ break;
+ }
+ default:
+ ret = gst_pad_query_default (pad, parent, query);
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_image_freeze_convert (GstImageFreeze * self,
+ GstFormat src_format, gint64 src_value,
+ GstFormat * dest_format, gint64 * dest_value)
+{
+ gboolean ret = FALSE;
+
+ if (src_format == *dest_format) {
+ *dest_value = src_value;
+ return TRUE;
+ }
+
+ if (src_value == -1) {
+ *dest_value = -1;
+ return TRUE;
+ }
+
+ switch (src_format) {
+ case GST_FORMAT_DEFAULT:{
+ switch (*dest_format) {
+ case GST_FORMAT_TIME:
+ g_mutex_lock (&self->lock);
+ if (self->fps_n == 0)
+ *dest_value = -1;
+ else
+ *dest_value =
+ gst_util_uint64_scale (src_value, GST_SECOND * self->fps_d,
+ self->fps_n);
+ g_mutex_unlock (&self->lock);
+ ret = TRUE;
+ break;
+ default:
+ break;
+ }
+ break;
+ }
+ case GST_FORMAT_TIME:{
+ switch (*dest_format) {
+ case GST_FORMAT_DEFAULT:
+ g_mutex_lock (&self->lock);
+ *dest_value =
+ gst_util_uint64_scale (src_value, self->fps_n,
+ self->fps_d * GST_SECOND);
+ g_mutex_unlock (&self->lock);
+ ret = TRUE;
+ break;
+ default:
+ break;
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_image_freeze_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ GstImageFreeze *self = GST_IMAGE_FREEZE (parent);
+ gboolean ret = FALSE;
+
+ GST_LOG_OBJECT (pad, "Handling query of type '%s'",
+ gst_query_type_get_name (GST_QUERY_TYPE (query)));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CONVERT:{
+ GstFormat src_format, dest_format;
+ gint64 src_value, dest_value;
+
+ gst_query_parse_convert (query, &src_format, &src_value, &dest_format,
+ &dest_value);
+ ret =
+ gst_image_freeze_convert (self, src_format, src_value, &dest_format,
+ &dest_value);
+ if (ret)
+ gst_query_set_convert (query, src_format, src_value, dest_format,
+ dest_value);
+ break;
+ }
+ case GST_QUERY_POSITION:{
+ GstFormat format;
+ gint64 position;
+
+ gst_query_parse_position (query, &format, NULL);
+ switch (format) {
+ case GST_FORMAT_DEFAULT:{
+ g_mutex_lock (&self->lock);
+ position = self->offset;
+ g_mutex_unlock (&self->lock);
+ ret = TRUE;
+ break;
+ }
+ case GST_FORMAT_TIME:{
+ g_mutex_lock (&self->lock);
+ position = self->segment.position;
+ g_mutex_unlock (&self->lock);
+ ret = TRUE;
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (ret) {
+ gst_query_set_position (query, format, position);
+ GST_DEBUG_OBJECT (pad,
+ "Returning position %" G_GINT64_FORMAT " in format %s", position,
+ gst_format_get_name (format));
+ } else {
+ GST_DEBUG_OBJECT (pad, "Position query failed");
+ }
+ break;
+ }
+ case GST_QUERY_DURATION:{
+ GstFormat format;
+ gint64 duration;
+
+ gst_query_parse_duration (query, &format, NULL);
+ switch (format) {
+ case GST_FORMAT_TIME:{
+ g_mutex_lock (&self->lock);
+ duration = self->segment.stop;
+ g_mutex_unlock (&self->lock);
+ ret = TRUE;
+ break;
+ }
+ case GST_FORMAT_DEFAULT:{
+ g_mutex_lock (&self->lock);
+ duration = self->segment.stop;
+ if (duration != -1)
+ duration =
+ gst_util_uint64_scale (duration, self->fps_n,
+ GST_SECOND * self->fps_d);
+ g_mutex_unlock (&self->lock);
+ ret = TRUE;
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (ret) {
+ gst_query_set_duration (query, format, duration);
+ GST_DEBUG_OBJECT (pad,
+ "Returning duration %" G_GINT64_FORMAT " in format %s", duration,
+ gst_format_get_name (format));
+ } else {
+ GST_DEBUG_OBJECT (pad, "Duration query failed");
+ }
+ break;
+ }
+ case GST_QUERY_SEEKING:{
+ GstFormat format;
+ gboolean seekable;
+
+ gst_query_parse_seeking (query, &format, NULL, NULL, NULL);
+ seekable = !self->is_live && (format == GST_FORMAT_TIME
+ || format == GST_FORMAT_DEFAULT);
+
+ gst_query_set_seeking (query, format, seekable, (seekable ? 0 : -1), -1);
+ ret = TRUE;
+ break;
+ }
+ case GST_QUERY_LATENCY:
+ if (self->is_live) {
+ /* If we run live, we output the buffer without any latency but allow
+ * for at most one frame of latency. If downstream takes longer to
+ * consume out frame we would skip ahead */
+ if (self->fps_n > 0 && self->fps_d > 0)
+ gst_query_set_latency (query, TRUE, 0,
+ gst_util_uint64_scale_ceil (GST_SECOND, self->fps_d,
+ self->fps_n));
+ else
+ gst_query_set_latency (query, TRUE, 0, GST_CLOCK_TIME_NONE);
+ } else {
+ /* If we don't run live, even if upstream is live, we never output any
+ * buffers with latency but immediately generate buffers as fast as we
+ * can according to the negotiated framerate */
+ gst_query_set_latency (query, FALSE, 0, GST_CLOCK_TIME_NONE);
+ }
+ ret = TRUE;
+ break;
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *caps;
+ gst_query_parse_caps (query, &caps);
+ caps = gst_image_freeze_query_caps (self, pad, caps);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ ret = TRUE;
+ break;
+ }
+ default:
+ ret = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return ret;
+}
+
+
+static gboolean
+gst_image_freeze_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstImageFreeze *self = GST_IMAGE_FREEZE (parent);
+ gboolean ret;
+
+ GST_LOG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ g_mutex_lock (&self->lock);
+ gst_event_parse_caps (event, &caps);
+ gst_caps_replace (&self->current_caps, caps);
+ g_mutex_unlock (&self->lock);
+ gst_event_unref (event);
+ ret = TRUE;
+ break;
+ }
+ case GST_EVENT_EOS:
+ if (!self->buffer) {
+ /* if we receive EOS before a buffer arrives, then let it pass */
+ GST_DEBUG_OBJECT (self, "EOS without input buffer, passing on");
+ ret = gst_pad_push_event (self->srcpad, event);
+ break;
+ }
+ /* fall-through */
+ case GST_EVENT_SEGMENT:
+ GST_DEBUG_OBJECT (pad, "Dropping event");
+ gst_event_unref (event);
+ ret = TRUE;
+ break;
+ case GST_EVENT_FLUSH_START:
+ gst_image_freeze_reset (self);
+ /* fall through */
+ default:
+ ret = gst_pad_push_event (self->srcpad, gst_event_ref (event));
+ if (GST_EVENT_IS_STICKY (event))
+ ret = TRUE;
+ gst_event_unref (event);
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_image_freeze_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstImageFreeze *self = GST_IMAGE_FREEZE (parent);
+ gboolean ret;
+
+ GST_LOG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_NAVIGATION:
+ case GST_EVENT_QOS:
+ case GST_EVENT_LATENCY:
+ case GST_EVENT_STEP:
+ GST_DEBUG_OBJECT (pad, "Dropping event");
+ gst_event_unref (event);
+ ret = TRUE;
+ break;
+ case GST_EVENT_SEEK:{
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType start_type, stop_type;
+ gint64 start, stop;
+ gint64 last_stop;
+ gboolean start_task;
+ gboolean flush;
+ guint32 seqnum;
+
+ if (self->is_live) {
+ GST_ERROR_OBJECT (pad, "Can't seek in live mode");
+ ret = FALSE;
+ gst_event_unref (event);
+ break;
+ }
+
+ seqnum = gst_event_get_seqnum (event);
+ gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
+ &stop_type, &stop);
+ gst_event_unref (event);
+
+ flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
+
+ if (format != GST_FORMAT_TIME && format != GST_FORMAT_DEFAULT) {
+ GST_ERROR_OBJECT (pad, "Seek in invalid format: %s",
+ gst_format_get_name (format));
+ ret = FALSE;
+ break;
+ }
+
+ if (format == GST_FORMAT_DEFAULT) {
+ format = GST_FORMAT_TIME;
+ if (!gst_image_freeze_convert (self, GST_FORMAT_DEFAULT, start, &format,
+ &start)
+ || !gst_image_freeze_convert (self, GST_FORMAT_DEFAULT, stop,
+ &format, &stop)
+ || start == -1 || stop == -1) {
+ GST_ERROR_OBJECT (pad,
+ "Failed to convert seek from DEFAULT format into TIME format");
+ ret = FALSE;
+ break;
+ }
+ }
+
+ if (flush) {
+ GstEvent *e;
+
+ g_mutex_lock (&self->lock);
+ self->flushing = TRUE;
+ g_mutex_unlock (&self->lock);
+
+ e = gst_event_new_flush_start ();
+ gst_event_set_seqnum (e, seqnum);
+ gst_pad_push_event (self->srcpad, e);
+ } else {
+ gst_pad_pause_task (self->srcpad);
+ }
+
+ GST_PAD_STREAM_LOCK (self->srcpad);
+
+ g_mutex_lock (&self->lock);
+
+ gst_segment_do_seek (&self->segment, rate, format, flags, start_type,
+ start, stop_type, stop, NULL);
+ self->need_segment = TRUE;
+ last_stop = self->segment.position;
+
+ start_task = self->buffer != NULL;
+ self->flushing = FALSE;
+ g_mutex_unlock (&self->lock);
+
+ if (flush) {
+ GstEvent *e;
+
+ e = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (e, seqnum);
+ gst_pad_push_event (self->srcpad, e);
+ }
+
+ if (flags & GST_SEEK_FLAG_SEGMENT) {
+ GstMessage *m;
+
+ m = gst_message_new_segment_start (GST_OBJECT (self),
+ format, last_stop);
+ gst_element_post_message (GST_ELEMENT (self), m);
+ }
+
+ self->seqnum = seqnum;
+ GST_PAD_STREAM_UNLOCK (self->srcpad);
+
+ GST_DEBUG_OBJECT (pad, "Seek successful");
+
+ if (start_task) {
+ g_mutex_lock (&self->lock);
+
+ if (self->buffer != NULL)
+ gst_pad_start_task (self->srcpad,
+ (GstTaskFunction) gst_image_freeze_src_loop, self->srcpad, NULL);
+
+ g_mutex_unlock (&self->lock);
+ }
+
+ ret = TRUE;
+ break;
+ }
+ case GST_EVENT_FLUSH_START:
+ g_mutex_lock (&self->lock);
+ self->flushing = TRUE;
+ g_mutex_unlock (&self->lock);
+ ret = gst_pad_push_event (self->sinkpad, event);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ gst_image_freeze_reset (self);
+ g_mutex_lock (&self->lock);
+ self->flushing = FALSE;
+ g_mutex_unlock (&self->lock);
+ ret = gst_pad_push_event (self->sinkpad, event);
+ break;
+ default:
+ ret = gst_pad_push_event (self->sinkpad, event);
+ break;
+ }
+
+ return ret;
+}
+
+static void
+gst_image_freeze_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstImageFreeze *self;
+
+ self = GST_IMAGE_FREEZE (object);
+
+ switch (prop_id) {
+ case PROP_NUM_BUFFERS:
+ self->num_buffers = g_value_get_int (value);
+ break;
+ case PROP_ALLOW_REPLACE:
+ self->allow_replace = g_value_get_boolean (value);
+ break;
+ case PROP_IS_LIVE:
+ self->is_live = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_image_freeze_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstImageFreeze *self;
+
+ self = GST_IMAGE_FREEZE (object);
+
+ switch (prop_id) {
+ case PROP_NUM_BUFFERS:
+ g_value_set_int (value, self->num_buffers);
+ break;
+ case PROP_ALLOW_REPLACE:
+ g_value_set_boolean (value, self->allow_replace);
+ break;
+ case PROP_IS_LIVE:
+ g_value_set_boolean (value, self->is_live);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstFlowReturn
+gst_image_freeze_sink_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
+{
+ GstImageFreeze *self = GST_IMAGE_FREEZE (parent);
+ GstFlowReturn flow_ret;
+
+ g_mutex_lock (&self->lock);
+ if (self->buffer && !self->allow_replace) {
+ GST_DEBUG_OBJECT (pad, "Already have a buffer, dropping");
+ gst_buffer_unref (buffer);
+ g_mutex_unlock (&self->lock);
+ return GST_FLOW_EOS;
+ }
+
+ if (!self->current_caps) {
+ GST_ERROR_OBJECT (pad, "Not negotiated yet");
+ g_mutex_unlock (&self->lock);
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+
+ gst_buffer_replace (&self->buffer, buffer);
+ if (!self->buffer_caps
+ || !gst_caps_is_equal (self->buffer_caps, self->current_caps))
+ gst_pad_mark_reconfigure (self->srcpad);
+ gst_caps_replace (&self->buffer_caps, self->current_caps);
+ gst_buffer_unref (buffer);
+
+ gst_pad_start_task (self->srcpad, (GstTaskFunction) gst_image_freeze_src_loop,
+ self->srcpad, NULL);
+ flow_ret = self->allow_replace ? GST_FLOW_OK : GST_FLOW_EOS;
+ g_mutex_unlock (&self->lock);
+ return flow_ret;
+}
+
+static void
+gst_image_freeze_src_loop (GstPad * pad)
+{
+ GstImageFreeze *self = GST_IMAGE_FREEZE (GST_PAD_PARENT (pad));
+ GstBuffer *buffer;
+ guint64 offset;
+ GstClockTime timestamp, timestamp_end;
+ guint64 cstart, cstop;
+ gboolean in_seg, eos;
+ GstFlowReturn flow_ret = GST_FLOW_OK;
+ gboolean first = FALSE;
+
+ g_mutex_lock (&self->lock);
+ if (self->flushing) {
+ GST_DEBUG_OBJECT (pad, "Flushing");
+ flow_ret = GST_FLOW_FLUSHING;
+ g_mutex_unlock (&self->lock);
+ goto pause_task;
+ } else if (!self->buffer) {
+ GST_ERROR_OBJECT (pad, "Have no buffer yet");
+ flow_ret = GST_FLOW_ERROR;
+ g_mutex_unlock (&self->lock);
+ goto pause_task;
+ }
+
+ g_assert (self->buffer);
+
+ /* Take a new reference of the buffer here so we're guaranteed to have one
+ * in all the following code even if it disappears while we temporarily
+ * unlock the mutex */
+ buffer = gst_buffer_ref (self->buffer);
+
+ if (gst_pad_check_reconfigure (self->srcpad)) {
+ GstCaps *buffer_caps = gst_caps_ref (self->buffer_caps);
+ g_mutex_unlock (&self->lock);
+ if (!gst_image_freeze_sink_setcaps (self, buffer_caps)) {
+ gst_caps_unref (buffer_caps);
+ gst_buffer_unref (buffer);
+ gst_pad_mark_reconfigure (self->srcpad);
+ flow_ret = GST_FLOW_NOT_NEGOTIATED;
+ goto pause_task;
+ }
+ gst_caps_unref (buffer_caps);
+ g_mutex_lock (&self->lock);
+ }
+
+ /* normally we don't count buffers */
+ if (G_UNLIKELY (self->num_buffers_left >= 0)) {
+ GST_DEBUG_OBJECT (pad, "Buffers left %d", self->num_buffers_left);
+ if (self->num_buffers_left == 0) {
+ flow_ret = GST_FLOW_EOS;
+ gst_buffer_unref (buffer);
+ g_mutex_unlock (&self->lock);
+ goto pause_task;
+ } else {
+ self->num_buffers_left--;
+ }
+ }
+ buffer = gst_buffer_make_writable (buffer);
+ g_mutex_unlock (&self->lock);
+
+ if (self->need_segment) {
+ GstEvent *e;
+
+ GST_DEBUG_OBJECT (pad, "Pushing SEGMENT event: %" GST_SEGMENT_FORMAT,
+ &self->segment);
+ e = gst_event_new_segment (&self->segment);
+
+ if (self->seqnum)
+ gst_event_set_seqnum (e, self->seqnum);
+
+ g_mutex_lock (&self->lock);
+ if (self->segment.rate >= 0) {
+ self->offset =
+ gst_util_uint64_scale (self->segment.start, self->fps_n,
+ self->fps_d * GST_SECOND);
+ } else {
+ self->offset =
+ gst_util_uint64_scale (self->segment.stop, self->fps_n,
+ self->fps_d * GST_SECOND);
+ }
+ g_mutex_unlock (&self->lock);
+
+ self->need_segment = FALSE;
+ first = TRUE;
+
+ gst_pad_push_event (self->srcpad, e);
+ }
+
+ g_mutex_lock (&self->lock);
+ offset = self->offset;
+ if (self->is_live) {
+ GstClockTime base_time, clock_time;
+ GstClockTimeDiff jitter;
+ GstClockReturn clock_ret;
+ GstClock *clock;
+
+ clock = gst_element_get_clock (GST_ELEMENT (self));
+
+ /* Wait until the element went to PLAYING or flushing */
+ while ((!clock || self->blocked) && !self->flushing) {
+ g_cond_wait (&self->blocked_cond, &self->lock);
+ gst_clear_object (&clock);
+ clock = gst_element_get_clock (GST_ELEMENT (self));
+ }
+
+ if (self->flushing) {
+ g_mutex_unlock (&self->lock);
+ gst_buffer_unref (buffer);
+ flow_ret = GST_FLOW_FLUSHING;
+ gst_clear_object (&clock);
+ goto pause_task;
+ }
+
+ /* Wait on the clock until the time for our current frame is reached */
+ base_time = gst_element_get_base_time (GST_ELEMENT (self));
+ if (self->fps_n != 0) {
+ clock_time =
+ base_time + gst_util_uint64_scale (offset, self->fps_d * GST_SECOND,
+ self->fps_n);
+ } else {
+ clock_time = base_time;
+ }
+
+ self->clock_id = gst_clock_new_single_shot_id (clock, clock_time);
+ g_mutex_unlock (&self->lock);
+ GST_TRACE_OBJECT (self,
+ "Waiting for %" GST_TIME_FORMAT ", now %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (clock_time), GST_TIME_ARGS (gst_clock_get_time (clock)));
+ clock_ret = gst_clock_id_wait (self->clock_id, &jitter);
+ GST_TRACE_OBJECT (self,
+ "Waited for %" GST_TIME_FORMAT ", clock ret %d, jitter %"
+ GST_STIME_FORMAT, GST_TIME_ARGS (clock_time), clock_ret,
+ GST_STIME_ARGS (jitter));
+ g_mutex_lock (&self->lock);
+ gst_clock_id_unref (self->clock_id);
+ self->clock_id = NULL;
+ gst_object_unref (clock);
+
+ if (self->flushing || clock_ret == GST_CLOCK_UNSCHEDULED) {
+ g_mutex_unlock (&self->lock);
+ gst_buffer_unref (buffer);
+ flow_ret = GST_FLOW_FLUSHING;
+ goto pause_task;
+ }
+
+ /* If we were late, adjust our offset and jump ahead if needed */
+ if (self->fps_n != 0) {
+ if (jitter > 0) {
+ guint64 new_offset =
+ gst_util_uint64_scale (clock_time + jitter - base_time, self->fps_n,
+ self->fps_d * GST_SECOND);
+
+ if (new_offset != offset) {
+ GST_INFO_OBJECT (self,
+ "Late by %" GST_TIME_FORMAT ", old offset %" G_GUINT64_FORMAT
+ ", new offset %" G_GUINT64_FORMAT, GST_TIME_ARGS (jitter), offset,
+ new_offset);
+ self->offset = offset = new_offset;
+ }
+ }
+
+ timestamp =
+ gst_util_uint64_scale (offset, self->fps_d * GST_SECOND, self->fps_n);
+ timestamp_end =
+ gst_util_uint64_scale (offset + 1, self->fps_d * GST_SECOND,
+ self->fps_n);
+ } else {
+ /* If we have no framerate then we output a single frame now */
+ if (jitter > 0)
+ timestamp = jitter;
+ else
+ timestamp = 0;
+
+ timestamp_end = GST_CLOCK_TIME_NONE;
+ }
+ } else {
+ if (self->fps_n != 0) {
+ timestamp =
+ gst_util_uint64_scale (offset, self->fps_d * GST_SECOND, self->fps_n);
+ timestamp_end =
+ gst_util_uint64_scale (offset + 1, self->fps_d * GST_SECOND,
+ self->fps_n);
+ } else {
+ timestamp = self->segment.start;
+ timestamp_end = GST_CLOCK_TIME_NONE;
+ }
+ }
+
+ eos = (self->fps_n == 0 && offset > 0) ||
+ (self->segment.rate >= 0 && self->segment.stop != -1
+ && timestamp > self->segment.stop) || (self->segment.rate < 0
+ && offset == 0) || (self->segment.rate < 0
+ && self->segment.start != -1 && timestamp_end < self->segment.start);
+
+ if (self->fps_n == 0 && offset > 0)
+ in_seg = FALSE;
+ else
+ in_seg =
+ gst_segment_clip (&self->segment, GST_FORMAT_TIME, timestamp,
+ timestamp_end, &cstart, &cstop);
+
+ if (in_seg) {
+ self->segment.position = cstart;
+ if (self->segment.rate >= 0)
+ self->segment.position = cstop;
+ }
+
+ if (self->segment.rate >= 0)
+ self->offset++;
+ else
+ self->offset--;
+ g_mutex_unlock (&self->lock);
+
+ GST_DEBUG_OBJECT (pad, "Handling buffer with timestamp %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (in_seg) {
+ GST_BUFFER_DTS (buffer) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_PTS (buffer) = cstart;
+ GST_BUFFER_DURATION (buffer) = cstop - cstart;
+ GST_BUFFER_OFFSET (buffer) = offset;
+ GST_BUFFER_OFFSET_END (buffer) = offset + 1;
+ if (first)
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
+ else
+ GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
+ flow_ret = gst_pad_push (self->srcpad, buffer);
+ GST_DEBUG_OBJECT (pad, "Pushing buffer resulted in %s",
+ gst_flow_get_name (flow_ret));
+ if (flow_ret != GST_FLOW_OK)
+ goto pause_task;
+ } else {
+ gst_buffer_unref (buffer);
+ }
+
+ if (eos) {
+ flow_ret = GST_FLOW_EOS;
+ goto pause_task;
+ }
+
+ return;
+
+pause_task:
+ {
+ const gchar *reason = gst_flow_get_name (flow_ret);
+
+ GST_LOG_OBJECT (self, "pausing task, reason %s", reason);
+ gst_pad_pause_task (pad);
+
+ if (flow_ret == GST_FLOW_EOS) {
+ if ((self->segment.flags & GST_SEEK_FLAG_SEGMENT)) {
+ GstMessage *m;
+ GstEvent *e;
+
+ GST_DEBUG_OBJECT (pad, "Sending segment done at end of segment");
+ if (self->segment.rate >= 0) {
+ m = gst_message_new_segment_done (GST_OBJECT_CAST (self),
+ GST_FORMAT_TIME, self->segment.stop);
+ e = gst_event_new_segment_done (GST_FORMAT_TIME, self->segment.stop);
+ } else {
+ m = gst_message_new_segment_done (GST_OBJECT_CAST (self),
+ GST_FORMAT_TIME, self->segment.start);
+ e = gst_event_new_segment_done (GST_FORMAT_TIME, self->segment.start);
+ }
+ gst_element_post_message (GST_ELEMENT_CAST (self), m);
+ gst_pad_push_event (self->srcpad, e);
+ } else {
+ GstEvent *e = gst_event_new_eos ();
+
+ GST_DEBUG_OBJECT (pad, "Sending EOS at end of segment");
+
+ if (self->seqnum)
+ gst_event_set_seqnum (e, self->seqnum);
+ gst_pad_push_event (self->srcpad, e);
+ }
+ } else if (flow_ret == GST_FLOW_NOT_LINKED || flow_ret < GST_FLOW_EOS) {
+ GstEvent *e = gst_event_new_eos ();
+
+ GST_ELEMENT_FLOW_ERROR (self, flow_ret);
+
+ if (self->seqnum)
+ gst_event_set_seqnum (e, self->seqnum);
+
+ gst_pad_push_event (self->srcpad, e);
+ }
+ return;
+ }
+}
+
+static GstStateChangeReturn
+gst_image_freeze_change_state (GstElement * element, GstStateChange transition)
+{
+ GstImageFreeze *self = GST_IMAGE_FREEZE (element);
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+ gboolean no_preroll = FALSE;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_image_freeze_reset (self);
+ g_mutex_lock (&self->lock);
+ self->flushing = FALSE;
+ self->blocked = TRUE;
+ g_mutex_unlock (&self->lock);
+ if (self->is_live)
+ no_preroll = TRUE;
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ g_mutex_lock (&self->lock);
+ self->blocked = FALSE;
+ g_cond_signal (&self->blocked_cond);
+ g_mutex_unlock (&self->lock);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ g_mutex_lock (&self->lock);
+ self->flushing = TRUE;
+ if (self->clock_id) {
+ GST_DEBUG_OBJECT (self, "unlock clock wait");
+ gst_clock_id_unschedule (self->clock_id);
+ }
+ self->blocked = FALSE;
+ g_cond_signal (&self->blocked_cond);
+ g_mutex_unlock (&self->lock);
+ gst_image_freeze_reset (self);
+ gst_pad_stop_task (self->srcpad);
+ break;
+ default:
+ break;
+ }
+
+ if (GST_ELEMENT_CLASS (parent_class)->change_state)
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ g_mutex_lock (&self->lock);
+ self->blocked = TRUE;
+ g_mutex_unlock (&self->lock);
+ if (self->is_live)
+ no_preroll = TRUE;
+ break;
+ default:
+ break;
+ }
+
+ if (no_preroll && ret == GST_STATE_CHANGE_SUCCESS)
+ ret = GST_STATE_CHANGE_NO_PREROLL;
+
+ return ret;
+}
+
+/* FIXME: GStreamer 2.0 */
+static GstClock *
+gst_image_freeze_provide_clock (GstElement * element)
+{
+ return gst_system_clock_obtain ();
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ return GST_ELEMENT_REGISTER (imagefreeze, plugin);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ imagefreeze,
+ "Still frame stream generator",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/imagefreeze/gstimagefreeze.h b/gst/imagefreeze/gstimagefreeze.h
new file mode 100644
index 0000000000..bf2918b6c2
--- /dev/null
+++ b/gst/imagefreeze/gstimagefreeze.h
@@ -0,0 +1,87 @@
+/* GStreamer
+ * Copyright (C) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_IMAGE_FREEZE_H__
+#define __GST_IMAGE_FREEZE_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_IMAGE_FREEZE \
+ (gst_image_freeze_get_type())
+#define GST_IMAGE_FREEZE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_IMAGE_FREEZE,GstImageFreeze))
+#define GST_IMAGE_FREEZE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_IMAGE_FREEZE,GstImageFreezeClass))
+#define GST_IMAGE_FREEZE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj),GST_TYPE_IMAGE_FREEZE,GstImageFreezeClass))
+#define GST_IS_IMAGE_FREEZE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_IMAGE_FREEZE))
+#define GST_IS_IMAGE_FREEZE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_IMAGE_FREEZE))
+
+typedef struct _GstImageFreeze GstImageFreeze;
+typedef struct _GstImageFreezeClass GstImageFreezeClass;
+
+struct _GstImageFreeze
+{
+ GstElement parent;
+
+ /* < private > */
+ GstPad *sinkpad;
+ GstPad *srcpad;
+
+ GMutex lock;
+ GstBuffer *buffer;
+ GstCaps *buffer_caps, *current_caps;
+
+ gboolean negotiated_framerate;
+ gint fps_n, fps_d;
+
+ GstSegment segment;
+ gboolean need_segment;
+ guint seqnum;
+
+ gint num_buffers;
+ gint num_buffers_left;
+
+ gboolean allow_replace;
+
+ gboolean is_live;
+ gboolean blocked;
+ GCond blocked_cond;
+ GstClockID clock_id;
+
+ guint64 offset;
+
+ gboolean flushing;
+};
+
+struct _GstImageFreezeClass
+{
+ GstElementClass parent_class;
+};
+
+GType gst_image_freeze_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (imagefreeze);
+
+G_END_DECLS
+
+#endif /* __GST_IMAGE_FREEZE_H__ */
diff --git a/gst/imagefreeze/meson.build b/gst/imagefreeze/meson.build
new file mode 100644
index 0000000000..ce50e6dd93
--- /dev/null
+++ b/gst/imagefreeze/meson.build
@@ -0,0 +1,10 @@
+gstimagefreeze = library('gstimagefreeze',
+ 'gstimagefreeze.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc, libsinc],
+ dependencies : glib_deps + [gst_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstimagefreeze, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstimagefreeze]
diff --git a/gst/interleave/deinterleave.c b/gst/interleave/deinterleave.c
new file mode 100644
index 0000000000..4d5ce98dbd
--- /dev/null
+++ b/gst/interleave/deinterleave.c
@@ -0,0 +1,1032 @@
+/* GStreamer
+ * Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2000 Wim Taymans <wtay@chello.be>
+ * 2005 Wim Taymans <wim@fluendo.com>
+ * 2007 Andy Wingo <wingo at pobox.com>
+ * 2008 Sebastian Dröge <slomo@circular-chaos.org>
+ *
+ * deinterleave.c: deinterleave samples
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/* TODO:
+ * - handle changes in number of channels
+ * - handle changes in channel positions
+ * - better capsnego by using a buffer alloc function
+ * and passing downstream caps changes upstream there
+ */
+
+/**
+ * SECTION:element-deinterleave
+ * @title: deinterleave
+ * @see_also: interleave
+ *
+ * Splits one interleaved multichannel audio stream into many mono audio streams.
+ *
+ * This element handles all raw audio formats and supports changing the input caps as long as
+ * all downstream elements can handle the new caps and the number of channels and the channel
+ * positions stay the same. This restriction will be removed in later versions by adding or
+ * removing some source pads as required.
+ *
+ * In most cases a queue and an audioconvert element should be added after each source pad
+ * before further processing of the audio data.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=/path/to/file.mp3 ! decodebin ! audioconvert ! "audio/x-raw,channels=2 ! deinterleave name=d d.src_0 ! queue ! audioconvert ! vorbisenc ! oggmux ! filesink location=channel1.ogg d.src_1 ! queue ! audioconvert ! vorbisenc ! oggmux ! filesink location=channel2.ogg
+ * ]| Decodes an MP3 file and encodes the left and right channel into separate
+ * Ogg Vorbis files.
+ * |[
+ * gst-launch-1.0 filesrc location=file.mp3 ! decodebin ! audioconvert ! "audio/x-raw,channels=2" ! deinterleave name=d interleave name=i ! audioconvert ! wavenc ! filesink location=test.wav d.src_0 ! queue ! audioconvert ! i.sink_1 d.src_1 ! queue ! audioconvert ! i.sink_0
+ * ]| Decodes and deinterleaves a Stereo MP3 file into separate channels and
+ * then interleaves the channels again to a WAV file with the channel with the
+ * channels exchanged.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <string.h>
+#include "gstinterleaveelements.h"
+#include "deinterleave.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_deinterleave_debug);
+#define GST_CAT_DEFAULT gst_deinterleave_debug
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_FORMATS_ALL ", "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) 1, layout = (string) {non-interleaved, interleaved}"));
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_FORMATS_ALL ", "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) [ 1, MAX ], layout = (string) interleaved"));
+
+#define MAKE_FUNC(type) \
+static void deinterleave_##type (guint##type *out, guint##type *in, \
+ guint stride, guint nframes) \
+{ \
+ gint i; \
+ \
+ for (i = 0; i < nframes; i++) { \
+ out[i] = *in; \
+ in += stride; \
+ } \
+}
+
+MAKE_FUNC (8);
+MAKE_FUNC (16);
+MAKE_FUNC (32);
+MAKE_FUNC (64);
+
+static void
+deinterleave_24 (guint8 * out, guint8 * in, guint stride, guint nframes)
+{
+ gint i;
+
+ for (i = 0; i < nframes; i++) {
+ memcpy (out, in, 3);
+ out += 3;
+ in += stride * 3;
+ }
+}
+
+#define gst_deinterleave_parent_class parent_class
+G_DEFINE_TYPE (GstDeinterleave, gst_deinterleave, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE (deinterleave, "deinterleave",
+ GST_RANK_NONE, gst_deinterleave_get_type ());
+
+enum
+{
+ PROP_0,
+ PROP_KEEP_POSITIONS
+};
+
+static GstFlowReturn gst_deinterleave_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+
+static gboolean gst_deinterleave_sink_setcaps (GstDeinterleave * self,
+ GstCaps * caps);
+
+static GstStateChangeReturn
+gst_deinterleave_change_state (GstElement * element, GstStateChange transition);
+
+static gboolean gst_deinterleave_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static gboolean gst_deinterleave_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+
+static gboolean gst_deinterleave_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+
+static void gst_deinterleave_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_deinterleave_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+
+static void
+gst_deinterleave_finalize (GObject * obj)
+{
+ GstDeinterleave *self = GST_DEINTERLEAVE (obj);
+
+ if (self->pending_events) {
+ g_list_foreach (self->pending_events, (GFunc) gst_mini_object_unref, NULL);
+ g_list_free (self->pending_events);
+ self->pending_events = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->finalize (obj);
+}
+
+static void
+gst_deinterleave_class_init (GstDeinterleaveClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (gst_deinterleave_debug, "deinterleave", 0,
+ "deinterleave element");
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Audio deinterleaver", "Filter/Converter/Audio",
+ "Splits one interleaved multichannel audio stream into many mono audio streams",
+ "Andy Wingo <wingo at pobox.com>, " "Iain <iain@prettypeople.org>, "
+ "Sebastian Dröge <slomo@circular-chaos.org>");
+
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class, &src_template);
+
+ gstelement_class->change_state = gst_deinterleave_change_state;
+
+ gobject_class->finalize = gst_deinterleave_finalize;
+ gobject_class->set_property = gst_deinterleave_set_property;
+ gobject_class->get_property = gst_deinterleave_get_property;
+
+ /**
+ * GstDeinterleave:keep-positions
+ *
+ * Keep positions: When enable the caps on the output buffers will
+ * contain the original channel positions. This can be used to correctly
+ * interleave the output again later but can also lead to unwanted effects
+ * if the output should be handled as Mono.
+ *
+ */
+ g_object_class_install_property (gobject_class, PROP_KEEP_POSITIONS,
+ g_param_spec_boolean ("keep-positions", "Keep positions",
+ "Keep the original channel positions on the output buffers",
+ FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+}
+
+static void
+gst_deinterleave_init (GstDeinterleave * self)
+{
+ self->keep_positions = FALSE;
+ self->func = NULL;
+ gst_audio_info_init (&self->audio_info);
+
+ /* Add sink pad */
+ self->sink = gst_pad_new_from_static_template (&sink_template, "sink");
+ gst_pad_set_chain_function (self->sink,
+ GST_DEBUG_FUNCPTR (gst_deinterleave_chain));
+ gst_pad_set_event_function (self->sink,
+ GST_DEBUG_FUNCPTR (gst_deinterleave_sink_event));
+ gst_pad_set_query_function (self->sink,
+ GST_DEBUG_FUNCPTR (gst_deinterleave_sink_query));
+ gst_element_add_pad (GST_ELEMENT (self), self->sink);
+}
+
+typedef struct
+{
+ GstCaps *caps;
+ GstPad *pad;
+} CopyStickyEventsData;
+
+static gboolean
+copy_sticky_events (GstPad * pad, GstEvent ** event, gpointer user_data)
+{
+ CopyStickyEventsData *data = user_data;
+
+ if (GST_EVENT_TYPE (*event) >= GST_EVENT_CAPS && data->caps) {
+ gst_pad_set_caps (data->pad, data->caps);
+ data->caps = NULL;
+ }
+
+ if (GST_EVENT_TYPE (*event) != GST_EVENT_CAPS)
+ gst_pad_push_event (data->pad, gst_event_ref (*event));
+
+ return TRUE;
+}
+
+static void
+gst_deinterleave_add_new_pads (GstDeinterleave * self, GstCaps * caps)
+{
+ GstPad *pad;
+ guint i;
+
+ for (i = 0; i < GST_AUDIO_INFO_CHANNELS (&self->audio_info); i++) {
+ gchar *name = g_strdup_printf ("src_%u", i);
+ GstCaps *srccaps;
+ GstAudioInfo info;
+ GstAudioFormat format = GST_AUDIO_INFO_FORMAT (&self->audio_info);
+ gint rate = GST_AUDIO_INFO_RATE (&self->audio_info);
+ GstAudioChannelPosition position = GST_AUDIO_CHANNEL_POSITION_MONO;
+ CopyStickyEventsData data;
+
+ /* Set channel position if we know it */
+ if (self->keep_positions)
+ position = GST_AUDIO_INFO_POSITION (&self->audio_info, i);
+
+ gst_audio_info_init (&info);
+ gst_audio_info_set_format (&info, format, rate, 1, &position);
+
+ srccaps = gst_audio_info_to_caps (&info);
+
+ pad = gst_pad_new_from_static_template (&src_template, name);
+ g_free (name);
+
+ gst_pad_use_fixed_caps (pad);
+ gst_pad_set_query_function (pad,
+ GST_DEBUG_FUNCPTR (gst_deinterleave_src_query));
+ gst_pad_set_active (pad, TRUE);
+
+ data.pad = pad;
+ data.caps = srccaps;
+ gst_pad_sticky_events_foreach (self->sink, copy_sticky_events, &data);
+ if (data.caps)
+ gst_pad_set_caps (pad, data.caps);
+ gst_element_add_pad (GST_ELEMENT (self), pad);
+ self->srcpads = g_list_prepend (self->srcpads, gst_object_ref (pad));
+
+ gst_caps_unref (srccaps);
+ }
+
+ gst_element_no_more_pads (GST_ELEMENT (self));
+ self->srcpads = g_list_reverse (self->srcpads);
+}
+
+static gboolean
+gst_deinterleave_set_pads_caps (GstDeinterleave * self, GstCaps * caps)
+{
+ GList *l;
+ gint i;
+ gboolean ret = TRUE;
+
+ for (l = self->srcpads, i = 0; l; l = l->next, i++) {
+ GstPad *pad = GST_PAD (l->data);
+ GstCaps *srccaps;
+ GstAudioInfo info;
+
+ if (!gst_audio_info_from_caps (&info, caps)) {
+ ret = FALSE;
+ continue;
+ }
+ if (self->keep_positions)
+ GST_AUDIO_INFO_POSITION (&info, 0) =
+ GST_AUDIO_INFO_POSITION (&self->audio_info, i);
+
+ srccaps = gst_audio_info_to_caps (&info);
+
+ gst_pad_set_caps (pad, srccaps);
+ gst_caps_unref (srccaps);
+ }
+ return ret;
+}
+
+static void
+gst_deinterleave_remove_pads (GstDeinterleave * self)
+{
+ GList *l;
+
+ GST_INFO_OBJECT (self, "removing pads");
+
+ for (l = self->srcpads; l; l = l->next) {
+ GstPad *pad = GST_PAD (l->data);
+
+ gst_element_remove_pad (GST_ELEMENT_CAST (self), pad);
+ gst_object_unref (pad);
+ }
+ g_list_free (self->srcpads);
+ self->srcpads = NULL;
+
+ gst_caps_replace (&self->sinkcaps, NULL);
+}
+
+static gboolean
+gst_deinterleave_set_process_function (GstDeinterleave * self)
+{
+ switch (GST_AUDIO_INFO_WIDTH (&self->audio_info)) {
+ case 8:
+ self->func = (GstDeinterleaveFunc) deinterleave_8;
+ break;
+ case 16:
+ self->func = (GstDeinterleaveFunc) deinterleave_16;
+ break;
+ case 24:
+ self->func = (GstDeinterleaveFunc) deinterleave_24;
+ break;
+ case 32:
+ self->func = (GstDeinterleaveFunc) deinterleave_32;
+ break;
+ case 64:
+ self->func = (GstDeinterleaveFunc) deinterleave_64;
+ break;
+ default:
+ return FALSE;
+ }
+ return TRUE;
+}
+
+static gboolean
+gst_deinterleave_check_caps_change (GstDeinterleave * self,
+ GstAudioInfo * old_info, GstAudioInfo * new_info)
+{
+ gint i;
+ gboolean same_layout = TRUE;
+ gboolean was_unpositioned;
+ gboolean is_unpositioned;
+ gint new_channels;
+ gint old_channels;
+
+ new_channels = GST_AUDIO_INFO_CHANNELS (new_info);
+ old_channels = GST_AUDIO_INFO_CHANNELS (old_info);
+
+ if (GST_AUDIO_INFO_IS_UNPOSITIONED (new_info) || new_channels == 1)
+ is_unpositioned = TRUE;
+ else
+ is_unpositioned = FALSE;
+
+ if (GST_AUDIO_INFO_IS_UNPOSITIONED (old_info) || old_channels == 1)
+ was_unpositioned = TRUE;
+ else
+ was_unpositioned = FALSE;
+
+ /* We allow caps changes as long as the number of channels doesn't change
+ * and the channel positions stay the same. _getcaps() should've cared
+ * for this already but better be safe.
+ */
+ if (new_channels != old_channels)
+ goto cannot_change_caps;
+
+ /* Now check the channel positions. If we had no channel positions
+ * and get them or the other way around things have changed.
+ * If we had channel positions and get different ones things have
+ * changed too of course
+ */
+ if ((!was_unpositioned && is_unpositioned) || (was_unpositioned
+ && !is_unpositioned))
+ goto cannot_change_caps;
+
+ if (!is_unpositioned) {
+ if (GST_AUDIO_INFO_CHANNELS (old_info) !=
+ GST_AUDIO_INFO_CHANNELS (new_info))
+ goto cannot_change_caps;
+ for (i = 0; i < GST_AUDIO_INFO_CHANNELS (old_info); i++) {
+ if (new_info->position[i] != old_info->position[i]) {
+ same_layout = FALSE;
+ break;
+ }
+ }
+ if (!same_layout)
+ goto cannot_change_caps;
+ }
+
+ return TRUE;
+
+cannot_change_caps:
+ return FALSE;
+}
+
+static gboolean
+gst_deinterleave_sink_setcaps (GstDeinterleave * self, GstCaps * caps)
+{
+ GstCaps *srccaps;
+ GstStructure *s;
+
+ GST_DEBUG_OBJECT (self, "got caps: %" GST_PTR_FORMAT, caps);
+
+ if (!gst_audio_info_from_caps (&self->audio_info, caps))
+ goto invalid_caps;
+
+ if (!gst_deinterleave_set_process_function (self))
+ goto unsupported_caps;
+
+ if (self->sinkcaps && !gst_caps_is_equal (caps, self->sinkcaps)) {
+ GstAudioInfo old_info;
+
+ gst_audio_info_init (&old_info);
+ if (!gst_audio_info_from_caps (&old_info, self->sinkcaps))
+ goto info_from_caps_failed;
+
+ if (gst_deinterleave_check_caps_change (self, &old_info, &self->audio_info)) {
+ if (!gst_deinterleave_set_process_function (self))
+ goto cannot_change_caps;
+ } else
+ goto cannot_change_caps;
+
+ }
+
+ gst_caps_replace (&self->sinkcaps, caps);
+
+ /* Get srcpad caps */
+ srccaps = gst_caps_copy (caps);
+ s = gst_caps_get_structure (srccaps, 0);
+ gst_structure_set (s, "channels", G_TYPE_INT, 1, NULL);
+ gst_structure_remove_field (s, "channel-mask");
+
+ /* If we already have pads, update the caps otherwise
+ * add new pads */
+ if (self->srcpads) {
+ if (!gst_deinterleave_set_pads_caps (self, srccaps))
+ goto set_caps_failed;
+ } else {
+ gst_deinterleave_add_new_pads (self, srccaps);
+ }
+
+ gst_caps_unref (srccaps);
+
+ return TRUE;
+
+cannot_change_caps:
+ {
+ GST_WARNING_OBJECT (self, "caps change from %" GST_PTR_FORMAT
+ " to %" GST_PTR_FORMAT " not supported: channel number or channel "
+ "positions change", self->sinkcaps, caps);
+ return FALSE;
+ }
+unsupported_caps:
+ {
+ GST_ERROR_OBJECT (self, "caps not supported: %" GST_PTR_FORMAT, caps);
+ return FALSE;
+ }
+invalid_caps:
+ {
+ GST_ERROR_OBJECT (self, "invalid caps");
+ return FALSE;
+ }
+set_caps_failed:
+ {
+ GST_ERROR_OBJECT (self, "set_caps failed");
+ gst_caps_unref (srccaps);
+ return FALSE;
+ }
+info_from_caps_failed:
+ {
+ GST_ERROR_OBJECT (self, "could not get info from caps");
+ return FALSE;
+ }
+}
+
+static void
+__remove_channels (GstCaps * caps)
+{
+ GstStructure *s;
+ gint i, size;
+
+ size = gst_caps_get_size (caps);
+ for (i = 0; i < size; i++) {
+ s = gst_caps_get_structure (caps, i);
+ gst_structure_remove_field (s, "channel-mask");
+ gst_structure_remove_field (s, "channels");
+ }
+}
+
+static void
+__set_channels (GstCaps * caps, gint channels)
+{
+ GstStructure *s;
+ gint i, size;
+
+ size = gst_caps_get_size (caps);
+ for (i = 0; i < size; i++) {
+ s = gst_caps_get_structure (caps, i);
+ if (channels > 0)
+ gst_structure_set (s, "channels", G_TYPE_INT, channels, NULL);
+ else
+ gst_structure_set (s, "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
+ }
+}
+
+static gboolean
+gst_deinterleave_sink_acceptcaps (GstPad * pad, GstObject * parent,
+ GstCaps * caps)
+{
+ GstDeinterleave *self = GST_DEINTERLEAVE (parent);
+ GstCaps *templ_caps = gst_pad_get_pad_template_caps (pad);
+ gboolean ret;
+
+ ret = gst_caps_can_intersect (templ_caps, caps);
+ gst_caps_unref (templ_caps);
+ if (ret && self->sinkcaps) {
+ GstAudioInfo new_info;
+
+ gst_audio_info_init (&new_info);
+ if (!gst_audio_info_from_caps (&new_info, caps))
+ goto info_from_caps_failed;
+ ret =
+ gst_deinterleave_check_caps_change (self, &self->audio_info, &new_info);
+ }
+
+ return ret;
+
+info_from_caps_failed:
+ {
+ GST_ERROR_OBJECT (self, "could not get info from caps");
+ return FALSE;
+ }
+}
+
+static GstCaps *
+gst_deinterleave_getcaps (GstPad * pad, GstObject * parent, GstCaps * filter)
+{
+ GstDeinterleave *self = GST_DEINTERLEAVE (parent);
+ GstCaps *ret;
+ GstIterator *it;
+ GstIteratorResult res;
+ GValue v = G_VALUE_INIT;
+
+ if (pad != self->sink) {
+ ret = gst_pad_get_current_caps (pad);
+ if (ret) {
+ if (filter) {
+ GstCaps *tmp =
+ gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (ret);
+ ret = tmp;
+ }
+ return ret;
+ }
+ }
+
+ /* Intersect all of our pad template caps with the peer caps of the pad
+ * to get all formats that are possible up- and downstream.
+ *
+ * For the pad for which the caps are requested we don't remove the channel
+ * information as they must be in the returned caps and incompatibilities
+ * will be detected here already
+ */
+ ret = gst_caps_new_any ();
+ it = gst_element_iterate_pads (GST_ELEMENT_CAST (self));
+
+ do {
+ res = gst_iterator_next (it, &v);
+ switch (res) {
+ case GST_ITERATOR_OK:{
+ GstPad *ourpad = GST_PAD (g_value_get_object (&v));
+ GstCaps *peercaps = NULL, *ourcaps;
+ GstCaps *templ_caps = gst_pad_get_pad_template_caps (ourpad);
+
+ ourcaps = gst_caps_copy (templ_caps);
+ gst_caps_unref (templ_caps);
+
+ if (pad == ourpad) {
+ if (GST_PAD_DIRECTION (pad) == GST_PAD_SINK)
+ __set_channels (ourcaps,
+ GST_AUDIO_INFO_CHANNELS (&self->audio_info));
+ else
+ __set_channels (ourcaps, 1);
+ } else {
+ __remove_channels (ourcaps);
+ /* Only ask for peer caps for other pads than pad
+ * as otherwise gst_pad_peer_get_caps() might call
+ * back into this function and deadlock
+ */
+ peercaps = gst_pad_peer_query_caps (ourpad, NULL);
+ peercaps = gst_caps_make_writable (peercaps);
+ }
+
+ /* If the peer exists and has caps add them to the intersection,
+ * otherwise assume that the peer accepts everything */
+ if (peercaps) {
+ GstCaps *intersection;
+ GstCaps *oldret = ret;
+
+ __remove_channels (peercaps);
+
+ intersection = gst_caps_intersect (peercaps, ourcaps);
+
+ ret = gst_caps_intersect (ret, intersection);
+ gst_caps_unref (intersection);
+ gst_caps_unref (peercaps);
+ gst_caps_unref (oldret);
+ } else {
+ GstCaps *oldret = ret;
+
+ ret = gst_caps_intersect (ret, ourcaps);
+ gst_caps_unref (oldret);
+ }
+ gst_caps_unref (ourcaps);
+ g_value_reset (&v);
+ break;
+ }
+ case GST_ITERATOR_DONE:
+ break;
+ case GST_ITERATOR_ERROR:
+ gst_caps_unref (ret);
+ ret = gst_caps_new_empty ();
+ break;
+ case GST_ITERATOR_RESYNC:
+ gst_caps_unref (ret);
+ ret = gst_caps_new_any ();
+ gst_iterator_resync (it);
+ break;
+ }
+ } while (res != GST_ITERATOR_DONE && res != GST_ITERATOR_ERROR);
+ g_value_unset (&v);
+ gst_iterator_free (it);
+
+ if (filter) {
+ GstCaps *aux;
+
+ aux = gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (ret);
+ ret = aux;
+ }
+
+ GST_DEBUG_OBJECT (pad, "Intersected caps to %" GST_PTR_FORMAT, ret);
+
+ return ret;
+}
+
+static gboolean
+gst_deinterleave_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstDeinterleave *self = GST_DEINTERLEAVE (parent);
+ gboolean ret;
+
+ GST_DEBUG ("Got %s event on pad %s:%s", GST_EVENT_TYPE_NAME (event),
+ GST_DEBUG_PAD_NAME (pad));
+
+ /* Send FLUSH_STOP, FLUSH_START and EOS immediately, no matter if
+ * we have src pads already or not. Queue all other events and
+ * push them after we have src pads
+ */
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ case GST_EVENT_FLUSH_START:
+ case GST_EVENT_EOS:
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ ret = gst_deinterleave_sink_setcaps (self, caps);
+ gst_event_unref (event);
+ break;
+ }
+
+ default:
+ if (!self->srcpads && !GST_EVENT_IS_STICKY (event)) {
+ /* Sticky events are copied when creating a new pad */
+ GST_OBJECT_LOCK (self);
+ self->pending_events = g_list_append (self->pending_events, event);
+ GST_OBJECT_UNLOCK (self);
+ ret = TRUE;
+ } else {
+ ret = gst_pad_event_default (pad, parent, event);
+ }
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_deinterleave_sink_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ gboolean res;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:{
+ GstCaps *filter;
+ GstCaps *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_deinterleave_getcaps (pad, parent, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ res = TRUE;
+ break;
+ }
+ case GST_QUERY_ACCEPT_CAPS:{
+ GstCaps *caps;
+ gboolean ret;
+
+ gst_query_parse_accept_caps (query, &caps);
+ ret = gst_deinterleave_sink_acceptcaps (pad, parent, caps);
+ gst_query_set_accept_caps_result (query, ret);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return res;
+}
+
+static gboolean
+gst_deinterleave_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ GstDeinterleave *self = GST_DEINTERLEAVE (parent);
+ gboolean res;
+
+ res = gst_pad_query_default (pad, parent, query);
+
+ if (res && GST_QUERY_TYPE (query) == GST_QUERY_DURATION) {
+ GstFormat format;
+ gint64 dur;
+
+ gst_query_parse_duration (query, &format, &dur);
+
+ /* Need to divide by the number of channels in byte format
+ * to get the correct value. All other formats should be fine
+ */
+ if (format == GST_FORMAT_BYTES && dur != -1)
+ gst_query_set_duration (query, format,
+ dur / GST_AUDIO_INFO_CHANNELS (&self->audio_info));
+ } else if (res && GST_QUERY_TYPE (query) == GST_QUERY_POSITION) {
+ GstFormat format;
+ gint64 pos;
+
+ gst_query_parse_position (query, &format, &pos);
+
+ /* Need to divide by the number of channels in byte format
+ * to get the correct value. All other formats should be fine
+ */
+ if (format == GST_FORMAT_BYTES && pos != -1)
+ gst_query_set_position (query, format,
+ pos / GST_AUDIO_INFO_CHANNELS (&self->audio_info));
+ } else if (res && GST_QUERY_TYPE (query) == GST_QUERY_CAPS) {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_deinterleave_getcaps (pad, parent, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ }
+
+ return res;
+}
+
+static void
+gst_deinterleave_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstDeinterleave *self = GST_DEINTERLEAVE (object);
+
+ switch (prop_id) {
+ case PROP_KEEP_POSITIONS:
+ self->keep_positions = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_deinterleave_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstDeinterleave *self = GST_DEINTERLEAVE (object);
+
+ switch (prop_id) {
+ case PROP_KEEP_POSITIONS:
+ g_value_set_boolean (value, self->keep_positions);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstFlowReturn
+gst_deinterleave_process (GstDeinterleave * self, GstBuffer * buf)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint channels = GST_AUDIO_INFO_CHANNELS (&self->audio_info);
+ guint pads_pushed = 0, buffers_allocated = 0;
+ guint nframes =
+ gst_buffer_get_size (buf) / channels /
+ (GST_AUDIO_INFO_WIDTH (&self->audio_info) / 8);
+ guint bufsize = nframes * (GST_AUDIO_INFO_WIDTH (&self->audio_info) / 8);
+ guint i;
+ GList *srcs;
+ GstBuffer **buffers_out = g_new0 (GstBuffer *, channels);
+ guint8 *in, *out;
+ GstMapInfo read_info;
+ GList *pending_events, *l;
+
+ /* Send any pending events to all src pads */
+ GST_OBJECT_LOCK (self);
+ pending_events = self->pending_events;
+ self->pending_events = NULL;
+ GST_OBJECT_UNLOCK (self);
+
+ if (pending_events) {
+ GstEvent *event;
+
+ GST_DEBUG_OBJECT (self, "Sending pending events to all src pads");
+ for (l = pending_events; l; l = l->next) {
+ event = l->data;
+ for (srcs = self->srcpads; srcs != NULL; srcs = srcs->next)
+ gst_pad_push_event (GST_PAD (srcs->data), gst_event_ref (event));
+ gst_event_unref (event);
+ }
+ g_list_free (pending_events);
+ }
+
+ gst_buffer_map (buf, &read_info, GST_MAP_READ);
+
+ /* Allocate buffers */
+ for (srcs = self->srcpads, i = 0; srcs; srcs = srcs->next, i++) {
+ buffers_out[i] = gst_buffer_new_allocate (NULL, bufsize, NULL);
+
+ /* Make sure we got a correct buffer. The only other case we allow
+ * here is an unliked pad */
+ if (!buffers_out[i])
+ goto alloc_buffer_failed;
+ else if (buffers_out[i]
+ && gst_buffer_get_size (buffers_out[i]) != bufsize)
+ goto alloc_buffer_bad_size;
+
+ if (buffers_out[i]) {
+ gst_buffer_copy_into (buffers_out[i], buf, GST_BUFFER_COPY_METADATA, 0,
+ -1);
+ buffers_allocated++;
+ }
+ }
+
+ /* Return NOT_LINKED if no pad was linked */
+ if (!buffers_allocated) {
+ GST_WARNING_OBJECT (self,
+ "Couldn't allocate any buffers because no pad was linked");
+ ret = GST_FLOW_NOT_LINKED;
+ goto done;
+ }
+
+ /* deinterleave */
+ for (srcs = self->srcpads, i = 0; srcs; srcs = srcs->next, i++) {
+ GstPad *pad = (GstPad *) srcs->data;
+ GstMapInfo write_info;
+
+ in = (guint8 *) read_info.data;
+ in += i * (GST_AUDIO_INFO_WIDTH (&self->audio_info) / 8);
+ if (buffers_out[i]) {
+ gst_buffer_map (buffers_out[i], &write_info, GST_MAP_WRITE);
+ out = (guint8 *) write_info.data;
+ self->func (out, in, channels, nframes);
+ gst_buffer_unmap (buffers_out[i], &write_info);
+
+ ret = gst_pad_push (pad, buffers_out[i]);
+ buffers_out[i] = NULL;
+ if (ret == GST_FLOW_OK)
+ pads_pushed++;
+ else if (ret == GST_FLOW_NOT_LINKED)
+ ret = GST_FLOW_OK;
+ else
+ goto push_failed;
+ }
+ }
+
+ /* Return NOT_LINKED if no pad was linked */
+ if (!pads_pushed)
+ ret = GST_FLOW_NOT_LINKED;
+
+ GST_DEBUG_OBJECT (self, "Pushed on %d pads", pads_pushed);
+
+done:
+ gst_buffer_unmap (buf, &read_info);
+ gst_buffer_unref (buf);
+ g_free (buffers_out);
+ return ret;
+
+alloc_buffer_failed:
+ {
+ GST_WARNING ("gst_pad_alloc_buffer() returned %s", gst_flow_get_name (ret));
+ goto clean_buffers;
+
+ }
+alloc_buffer_bad_size:
+ {
+ GST_WARNING ("called alloc_buffer(), but didn't get requested bytes");
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ goto clean_buffers;
+ }
+push_failed:
+ {
+ GST_DEBUG ("push() failed, flow = %s", gst_flow_get_name (ret));
+ goto clean_buffers;
+ }
+clean_buffers:
+ {
+ gst_buffer_unmap (buf, &read_info);
+ for (i = 0; i < channels; i++) {
+ if (buffers_out[i])
+ gst_buffer_unref (buffers_out[i]);
+ }
+ gst_buffer_unref (buf);
+ g_free (buffers_out);
+ return ret;
+ }
+}
+
+static GstFlowReturn
+gst_deinterleave_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+{
+ GstDeinterleave *self = GST_DEINTERLEAVE (parent);
+ GstFlowReturn ret;
+
+ g_return_val_if_fail (self->func != NULL, GST_FLOW_NOT_NEGOTIATED);
+ g_return_val_if_fail (GST_AUDIO_INFO_WIDTH (&self->audio_info) > 0,
+ GST_FLOW_NOT_NEGOTIATED);
+ g_return_val_if_fail (GST_AUDIO_INFO_CHANNELS (&self->audio_info) > 0,
+ GST_FLOW_NOT_NEGOTIATED);
+
+ ret = gst_deinterleave_process (self, buffer);
+
+ if (ret != GST_FLOW_OK)
+ GST_DEBUG_OBJECT (self, "flow return: %s", gst_flow_get_name (ret));
+
+ return ret;
+}
+
+static GstStateChangeReturn
+gst_deinterleave_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstDeinterleave *self = GST_DEINTERLEAVE (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_deinterleave_remove_pads (self);
+
+ self->func = NULL;
+
+ if (self->pending_events) {
+ g_list_foreach (self->pending_events, (GFunc) gst_mini_object_unref,
+ NULL);
+ g_list_free (self->pending_events);
+ self->pending_events = NULL;
+ }
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_deinterleave_remove_pads (self);
+
+ self->func = NULL;
+
+ if (self->pending_events) {
+ g_list_foreach (self->pending_events, (GFunc) gst_mini_object_unref,
+ NULL);
+ g_list_free (self->pending_events);
+ self->pending_events = NULL;
+ }
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/interleave/deinterleave.h b/gst/interleave/deinterleave.h
new file mode 100644
index 0000000000..07c1885747
--- /dev/null
+++ b/gst/interleave/deinterleave.h
@@ -0,0 +1,73 @@
+/* GStreamer
+ * Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2000 Wim Taymans <wtay@chello.be>
+ * 2005 Wim Taymans <wim@fluendo.com>
+ * 2007 Andy Wingo <wingo at pobox.com>
+ * 2008 Sebastian Dröge <slomo@circular-chaos.org>
+ *
+ * deinterleave.c: deinterleave samples
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __DEINTERLEAVE_H__
+#define __DEINTERLEAVE_H__
+
+G_BEGIN_DECLS
+
+#include <gst/gst.h>
+#include <gst/audio/audio.h>
+
+#define GST_TYPE_DEINTERLEAVE (gst_deinterleave_get_type())
+#define GST_DEINTERLEAVE(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_DEINTERLEAVE,GstDeinterleave))
+#define GST_DEINTERLEAVE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_DEINTERLEAVE,GstDeinterleaveClass))
+#define GST_DEINTERLEAVE_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS ((obj),GST_TYPE_DEINTERLEAVE,GstDeinterleaveClass))
+#define GST_IS_DEINTERLEAVE(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_DEINTERLEAVE))
+#define GST_IS_DEINTERLEAVE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_DEINTERLEAVE))
+
+typedef struct _GstDeinterleave GstDeinterleave;
+typedef struct _GstDeinterleaveClass GstDeinterleaveClass;
+
+typedef void (*GstDeinterleaveFunc) (gpointer out, gpointer in, guint stride, guint nframes);
+
+struct _GstDeinterleave
+{
+ GstElement element;
+
+ /*< private > */
+ GList *srcpads;
+ GstCaps *sinkcaps;
+ GstAudioInfo audio_info;
+ gboolean keep_positions;
+
+ GstPad *sink;
+
+ GstDeinterleaveFunc func;
+
+ GList *pending_events;
+};
+
+struct _GstDeinterleaveClass
+{
+ GstElementClass parent_class;
+};
+
+GType gst_deinterleave_get_type (void);
+
+G_END_DECLS
+
+#endif /* __DEINTERLEAVE_H__ */
diff --git a/gst/interleave/gstinterleaveelements.h b/gst/interleave/gstinterleaveelements.h
new file mode 100644
index 0000000000..7435b5ccad
--- /dev/null
+++ b/gst/interleave/gstinterleaveelements.h
@@ -0,0 +1,34 @@
+/* GStreamer interleave plugin
+ * Copyright (C) 2004,2007 Andy Wingo <wingo at pobox.com>
+ *
+ * plugin.h: the stubs for the interleave plugin
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_INTERLEAVE_ELEMENTS_H__
+#define __GST_INTERLEAVE_ELEMENTS_H__
+
+
+#include <gst/gst.h>
+#include "interleave.h"
+#include "deinterleave.h"
+
+GST_ELEMENT_REGISTER_DECLARE (interleave);
+GST_ELEMENT_REGISTER_DECLARE (deinterleave);
+
+#endif /* __GST_INTERLEAVE_ELEMENTS_H__ */
diff --git a/gst/interleave/interleave.c b/gst/interleave/interleave.c
new file mode 100644
index 0000000000..91317dd8cc
--- /dev/null
+++ b/gst/interleave/interleave.c
@@ -0,0 +1,1341 @@
+/* GStreamer
+ * Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2000 Wim Taymans <wtay@chello.be>
+ * 2005 Wim Taymans <wim@fluendo.com>
+ * 2007 Andy Wingo <wingo at pobox.com>
+ * 2008 Sebastian Dröge <slomo@circular-chaos.rg>
+ *
+ * interleave.c: interleave samples, mostly based on adder.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/* TODO:
+ * - handle caps changes
+ * - handle more queries/events
+ */
+
+/**
+ * SECTION:element-interleave
+ * @title: interleave
+ * @see_also: deinterleave
+ *
+ * Merges separate mono inputs into one interleaved stream.
+ *
+ * This element handles all raw floating point sample formats and all signed integer sample formats. The first
+ * caps on one of the sinkpads will set the caps of the output so usually an audioconvert element should be
+ * placed before every sinkpad of interleave.
+ *
+ * It's possible to change the number of channels while the pipeline is running by adding or removing
+ * some of the request pads but this will change the caps of the output buffers. Changing the input
+ * caps is _not_ supported yet.
+ *
+ * The channel number of every sinkpad in the out can be retrieved from the "channel" property of the pad.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=file.mp3 ! decodebin ! audioconvert ! "audio/x-raw,channels=2" ! deinterleave name=d interleave name=i ! audioconvert ! wavenc ! filesink location=test.wav d.src_0 ! queue ! audioconvert ! i.sink_1 d.src_1 ! queue ! audioconvert ! i.sink_0
+ * ]| Decodes and deinterleaves a Stereo MP3 file into separate channels and
+ * then interleaves the channels again to a WAV file with the channels
+ * exchanged.
+ * |[
+ * gst-launch-1.0 interleave name=i ! audioconvert ! wavenc ! filesink location=file.wav filesrc location=file1.wav ! decodebin ! audioconvert ! "audio/x-raw,channels=1,channel-mask=(bitmask)0x1" ! queue ! i.sink_0 filesrc location=file2.wav ! decodebin ! audioconvert ! "audio/x-raw,channels=1,channel-mask=(bitmask)0x2" ! queue ! i.sink_1
+ * ]| Interleaves two Mono WAV files to a single Stereo WAV file. Having
+ * channel-masks defined in the sink pads ensures a sane mapping of the mono
+ * streams into the stereo stream. NOTE: the proper way to map channels in
+ * code is by using the channel-positions property of the interleave element.
+ *
+ */
+
+/* FIXME 0.11: suppress warnings for deprecated API such as GValueArray
+ * with newer GLib versions (>= 2.31.0) */
+#define GLIB_DISABLE_DEPRECATION_WARNINGS
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <string.h>
+#include "gstinterleaveelements.h"
+#include "interleave.h"
+
+#include <gst/audio/audio.h>
+#include <gst/audio/audio-enumtypes.h>
+
+GST_DEBUG_CATEGORY_STATIC (gst_interleave_debug);
+#define GST_CAT_DEFAULT gst_interleave_debug
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) 1, "
+ "format = (string) " GST_AUDIO_FORMATS_ALL ", "
+ "layout = (string) {non-interleaved, interleaved}")
+ );
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "rate = (int) [ 1, MAX ], "
+ "channels = (int) [ 1, MAX ], "
+ "format = (string) " GST_AUDIO_FORMATS_ALL ", "
+ "layout = (string) interleaved")
+ );
+
+#define MAKE_FUNC(type) \
+static void interleave_##type (guint##type *out, guint##type *in, \
+ guint stride, guint nframes) \
+{ \
+ gint i; \
+ \
+ for (i = 0; i < nframes; i++) { \
+ *out = in[i]; \
+ out += stride; \
+ } \
+}
+
+MAKE_FUNC (8);
+MAKE_FUNC (16);
+MAKE_FUNC (32);
+MAKE_FUNC (64);
+
+static void
+interleave_24 (guint8 * out, guint8 * in, guint stride, guint nframes)
+{
+ gint i;
+
+ for (i = 0; i < nframes; i++) {
+ memcpy (out, in, 3);
+ out += stride * 3;
+ in += 3;
+ }
+}
+
+typedef struct
+{
+ GstPad parent;
+ guint channel;
+} GstInterleavePad;
+
+enum
+{
+ PROP_PAD_0,
+ PROP_PAD_CHANNEL
+};
+
+static void gst_interleave_pad_class_init (GstPadClass * klass);
+
+#define GST_TYPE_INTERLEAVE_PAD (gst_interleave_pad_get_type())
+#define GST_INTERLEAVE_PAD(pad) (G_TYPE_CHECK_INSTANCE_CAST((pad),GST_TYPE_INTERLEAVE_PAD,GstInterleavePad))
+#define GST_INTERLEAVE_PAD_CAST(pad) ((GstInterleavePad *) pad)
+#define GST_IS_INTERLEAVE_PAD(pad) (G_TYPE_CHECK_INSTANCE_TYPE((pad),GST_TYPE_INTERLEAVE_PAD))
+static GType
+gst_interleave_pad_get_type (void)
+{
+ static GType type = 0;
+
+ if (G_UNLIKELY (type == 0)) {
+ type = g_type_register_static_simple (GST_TYPE_PAD,
+ g_intern_static_string ("GstInterleavePad"), sizeof (GstPadClass),
+ (GClassInitFunc) gst_interleave_pad_class_init,
+ sizeof (GstInterleavePad), NULL, 0);
+ }
+ return type;
+}
+
+static void
+gst_interleave_pad_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstInterleavePad *self = GST_INTERLEAVE_PAD (object);
+
+ switch (prop_id) {
+ case PROP_PAD_CHANNEL:
+ g_value_set_uint (value, self->channel);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_interleave_pad_class_init (GstPadClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ gobject_class->get_property = gst_interleave_pad_get_property;
+
+ g_object_class_install_property (gobject_class,
+ PROP_PAD_CHANNEL,
+ g_param_spec_uint ("channel",
+ "Channel number",
+ "Number of the channel of this pad in the output", 0, G_MAXUINT, 0,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+}
+
+#define gst_interleave_parent_class parent_class
+G_DEFINE_TYPE (GstInterleave, gst_interleave, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE (interleave, "interleave",
+ GST_RANK_NONE, gst_interleave_get_type ());
+
+enum
+{
+ PROP_0,
+ PROP_CHANNEL_POSITIONS,
+ PROP_CHANNEL_POSITIONS_FROM_INPUT
+};
+
+static void gst_interleave_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_interleave_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+static GstPad *gst_interleave_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
+static void gst_interleave_release_pad (GstElement * element, GstPad * pad);
+
+static GstStateChangeReturn gst_interleave_change_state (GstElement * element,
+ GstStateChange transition);
+
+static gboolean gst_interleave_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+
+static gboolean gst_interleave_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+
+static gboolean gst_interleave_sink_event (GstCollectPads * pads,
+ GstCollectData * data, GstEvent * event, gpointer user_data);
+static gboolean gst_interleave_sink_query (GstCollectPads * pads,
+ GstCollectData * data, GstQuery * query, gpointer user_data);
+
+static gboolean gst_interleave_sink_setcaps (GstInterleave * self,
+ GstPad * pad, const GstCaps * caps, const GstAudioInfo * info);
+
+static GstCaps *gst_interleave_sink_getcaps (GstPad * pad, GstInterleave * self,
+ GstCaps * filter);
+
+static GstFlowReturn gst_interleave_collected (GstCollectPads * pads,
+ GstInterleave * self);
+
+static void
+gst_interleave_finalize (GObject * object)
+{
+ GstInterleave *self = GST_INTERLEAVE (object);
+
+ if (self->collect) {
+ gst_object_unref (self->collect);
+ self->collect = NULL;
+ }
+
+ if (self->channel_positions
+ && self->channel_positions != self->input_channel_positions) {
+ g_value_array_free (self->channel_positions);
+ self->channel_positions = NULL;
+ }
+
+ if (self->input_channel_positions) {
+ g_value_array_free (self->input_channel_positions);
+ self->input_channel_positions = NULL;
+ }
+
+ gst_caps_replace (&self->sinkcaps, NULL);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gint
+compare_positions (gconstpointer a, gconstpointer b, gpointer user_data)
+{
+ const gint i = *(const gint *) a;
+ const gint j = *(const gint *) b;
+ const gint *pos = (const gint *) user_data;
+
+ if (pos[i] < pos[j])
+ return -1;
+ else if (pos[i] > pos[j])
+ return 1;
+ else
+ return 0;
+}
+
+static gboolean
+gst_interleave_channel_positions_to_mask (GValueArray * positions,
+ gint default_ordering_map[64], guint64 * mask)
+{
+ gint i;
+ guint channels;
+ GstAudioChannelPosition *pos;
+ gboolean ret;
+
+ channels = positions->n_values;
+ pos = g_new (GstAudioChannelPosition, channels);
+
+ for (i = 0; i < channels; i++) {
+ GValue *val;
+
+ val = g_value_array_get_nth (positions, i);
+ pos[i] = g_value_get_enum (val);
+ }
+
+ /* sort the default ordering map according to the position order */
+ for (i = 0; i < channels; i++) {
+ default_ordering_map[i] = i;
+ }
+ g_qsort_with_data (default_ordering_map, channels,
+ sizeof (*default_ordering_map), compare_positions, pos);
+
+ ret = gst_audio_channel_positions_to_mask (pos, channels, FALSE, mask);
+ g_free (pos);
+
+ return ret;
+}
+
+static void
+gst_interleave_set_channel_positions (GstInterleave * self, GstStructure * s)
+{
+ if (self->channels <= 64 &&
+ self->channel_positions != NULL &&
+ self->channels == self->channel_positions->n_values) {
+ if (!gst_interleave_channel_positions_to_mask (self->channel_positions,
+ self->default_channels_ordering_map, &self->channel_mask)) {
+ GST_WARNING_OBJECT (self, "Invalid channel positions, using NONE");
+ self->channel_mask = 0;
+ }
+ } else {
+ self->channel_mask = 0;
+ if (self->channels <= 64) {
+ GST_WARNING_OBJECT (self, "Using NONE channel positions");
+ }
+ }
+ gst_structure_set (s, "channel-mask", GST_TYPE_BITMASK, self->channel_mask,
+ NULL);
+}
+
+static void
+gst_interleave_send_stream_start (GstInterleave * self)
+{
+ GST_OBJECT_LOCK (self);
+ if (self->send_stream_start) {
+ gchar s_id[32];
+
+ self->send_stream_start = FALSE;
+ GST_OBJECT_UNLOCK (self);
+
+ /* stream-start (FIXME: create id based on input ids) */
+ g_snprintf (s_id, sizeof (s_id), "interleave-%08x", g_random_int ());
+ gst_pad_push_event (self->src, gst_event_new_stream_start (s_id));
+ } else {
+ GST_OBJECT_UNLOCK (self);
+ }
+}
+
+static void
+gst_interleave_class_init (GstInterleaveClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GObjectClass *gobject_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (gst_interleave_debug, "interleave", 0,
+ "interleave element");
+
+ gst_element_class_set_static_metadata (gstelement_class, "Audio interleaver",
+ "Filter/Converter/Audio",
+ "Folds many mono channels into one interleaved audio stream",
+ "Andy Wingo <wingo at pobox.com>, "
+ "Sebastian Dröge <slomo@circular-chaos.org>");
+
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class, &src_template);
+
+ /* Reference GstInterleavePad class to have the type registered from
+ * a threadsafe context
+ */
+ g_type_class_ref (GST_TYPE_INTERLEAVE_PAD);
+
+ gobject_class->finalize = gst_interleave_finalize;
+ gobject_class->set_property = gst_interleave_set_property;
+ gobject_class->get_property = gst_interleave_get_property;
+
+ /**
+ * GstInterleave:channel-positions
+ *
+ * Channel positions: This property controls the channel positions
+ * that are used on the src caps. The number of elements should be
+ * the same as the number of sink pads and the array should contain
+ * a valid list of channel positions. The n-th element of the array
+ * is the position of the n-th sink pad.
+ *
+ * These channel positions will only be used if they're valid and the
+ * number of elements is the same as the number of channels. If this
+ * is not given a NONE layout will be used.
+ *
+ */
+ g_object_class_install_property (gobject_class, PROP_CHANNEL_POSITIONS,
+ g_param_spec_value_array ("channel-positions", "Channel positions",
+ "Channel positions used on the output",
+ g_param_spec_enum ("channel-position", "Channel position",
+ "Channel position of the n-th input",
+ GST_TYPE_AUDIO_CHANNEL_POSITION,
+ GST_AUDIO_CHANNEL_POSITION_NONE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS),
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstInterleave:channel-positions-from-input
+ *
+ * Channel positions from input: If this property is set to %TRUE the channel
+ * positions will be taken from the input caps if valid channel positions for
+ * the output can be constructed from them. If this is set to %TRUE setting the
+ * channel-positions property overwrites this property again.
+ *
+ */
+ g_object_class_install_property (gobject_class,
+ PROP_CHANNEL_POSITIONS_FROM_INPUT,
+ g_param_spec_boolean ("channel-positions-from-input",
+ "Channel positions from input",
+ "Take channel positions from the input", TRUE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstelement_class->request_new_pad =
+ GST_DEBUG_FUNCPTR (gst_interleave_request_new_pad);
+ gstelement_class->release_pad =
+ GST_DEBUG_FUNCPTR (gst_interleave_release_pad);
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_interleave_change_state);
+}
+
+static void
+gst_interleave_init (GstInterleave * self)
+{
+ self->src = gst_pad_new_from_static_template (&src_template, "src");
+
+ gst_pad_set_query_function (self->src,
+ GST_DEBUG_FUNCPTR (gst_interleave_src_query));
+ gst_pad_set_event_function (self->src,
+ GST_DEBUG_FUNCPTR (gst_interleave_src_event));
+
+ gst_element_add_pad (GST_ELEMENT (self), self->src);
+
+ self->collect = gst_collect_pads_new ();
+ gst_collect_pads_set_function (self->collect,
+ (GstCollectPadsFunction) gst_interleave_collected, self);
+
+ self->input_channel_positions = g_value_array_new (0);
+ self->channel_positions_from_input = TRUE;
+ self->channel_positions = self->input_channel_positions;
+}
+
+static void
+gst_interleave_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstInterleave *self = GST_INTERLEAVE (object);
+
+ switch (prop_id) {
+ case PROP_CHANNEL_POSITIONS:
+ if (self->channel_positions &&
+ self->channel_positions != self->input_channel_positions)
+ g_value_array_free (self->channel_positions);
+
+ self->channel_positions = g_value_dup_boxed (value);
+ self->channel_positions_from_input = FALSE;
+ break;
+ case PROP_CHANNEL_POSITIONS_FROM_INPUT:
+ self->channel_positions_from_input = g_value_get_boolean (value);
+
+ if (self->channel_positions_from_input) {
+ if (self->channel_positions &&
+ self->channel_positions != self->input_channel_positions)
+ g_value_array_free (self->channel_positions);
+ self->channel_positions = self->input_channel_positions;
+ }
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_interleave_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstInterleave *self = GST_INTERLEAVE (object);
+
+ switch (prop_id) {
+ case PROP_CHANNEL_POSITIONS:
+ g_value_set_boxed (value, self->channel_positions);
+ break;
+ case PROP_CHANNEL_POSITIONS_FROM_INPUT:
+ g_value_set_boolean (value, self->channel_positions_from_input);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstPad *
+gst_interleave_request_new_pad (GstElement * element, GstPadTemplate * templ,
+ const gchar * req_name, const GstCaps * caps)
+{
+ GstInterleave *self = GST_INTERLEAVE (element);
+ GstPad *new_pad;
+ gchar *pad_name;
+ gint channel, padnumber;
+ GValue val = { 0, };
+
+ if (templ->direction != GST_PAD_SINK)
+ goto not_sink_pad;
+
+ padnumber = g_atomic_int_add (&self->padcounter, 1);
+
+ channel = g_atomic_int_add (&self->channels, 1);
+ if (!self->channel_positions_from_input)
+ channel = padnumber;
+
+ pad_name = g_strdup_printf ("sink_%u", padnumber);
+ new_pad = GST_PAD_CAST (g_object_new (GST_TYPE_INTERLEAVE_PAD,
+ "name", pad_name, "direction", templ->direction,
+ "template", templ, NULL));
+ GST_INTERLEAVE_PAD_CAST (new_pad)->channel = channel;
+ GST_DEBUG_OBJECT (self, "requested new pad %s", pad_name);
+ g_free (pad_name);
+
+ gst_pad_use_fixed_caps (new_pad);
+
+ gst_collect_pads_add_pad (self->collect, new_pad, sizeof (GstCollectData),
+ NULL, TRUE);
+
+ gst_collect_pads_set_event_function (self->collect,
+ (GstCollectPadsEventFunction)
+ GST_DEBUG_FUNCPTR (gst_interleave_sink_event), self);
+
+ gst_collect_pads_set_query_function (self->collect,
+ (GstCollectPadsQueryFunction)
+ GST_DEBUG_FUNCPTR (gst_interleave_sink_query), self);
+
+ if (!gst_element_add_pad (element, new_pad))
+ goto could_not_add;
+
+ g_value_init (&val, GST_TYPE_AUDIO_CHANNEL_POSITION);
+ g_value_set_enum (&val, GST_AUDIO_CHANNEL_POSITION_NONE);
+ self->input_channel_positions =
+ g_value_array_append (self->input_channel_positions, &val);
+ g_value_unset (&val);
+
+ /* Update the src caps if we already have them */
+ if (self->sinkcaps) {
+ GstCaps *srccaps;
+ GstStructure *s;
+
+ /* Take lock to make sure processing finishes first */
+ GST_OBJECT_LOCK (self->collect);
+
+ srccaps = gst_caps_copy (self->sinkcaps);
+ s = gst_caps_get_structure (srccaps, 0);
+
+ gst_structure_set (s, "channels", G_TYPE_INT, self->channels, NULL);
+ gst_interleave_set_channel_positions (self, s);
+
+ gst_interleave_send_stream_start (self);
+ gst_pad_set_caps (self->src, srccaps);
+ gst_caps_unref (srccaps);
+
+ GST_OBJECT_UNLOCK (self->collect);
+ }
+
+ return new_pad;
+
+ /* errors */
+not_sink_pad:
+ {
+ g_warning ("interleave: requested new pad that is not a SINK pad\n");
+ return NULL;
+ }
+could_not_add:
+ {
+ GST_DEBUG_OBJECT (self, "could not add pad %s", GST_PAD_NAME (new_pad));
+ gst_collect_pads_remove_pad (self->collect, new_pad);
+ gst_object_unref (new_pad);
+ return NULL;
+ }
+}
+
+static void
+gst_interleave_release_pad (GstElement * element, GstPad * pad)
+{
+ GstInterleave *self = GST_INTERLEAVE (element);
+ GList *l;
+ GstAudioChannelPosition position;
+
+ g_return_if_fail (GST_IS_INTERLEAVE_PAD (pad));
+
+ /* Take lock to make sure we're not changing this when processing buffers */
+ GST_OBJECT_LOCK (self->collect);
+
+ g_atomic_int_add (&self->channels, -1);
+
+ if (gst_pad_has_current_caps (pad))
+ g_atomic_int_add (&self->configured_sinkpads_counter, -1);
+
+ position = GST_INTERLEAVE_PAD_CAST (pad)->channel;
+ g_value_array_remove (self->input_channel_positions, position);
+
+ /* Update channel numbers */
+ GST_OBJECT_LOCK (self);
+ for (l = GST_ELEMENT_CAST (self)->sinkpads; l != NULL; l = l->next) {
+ GstInterleavePad *ipad = GST_INTERLEAVE_PAD (l->data);
+
+ if (GST_INTERLEAVE_PAD_CAST (pad)->channel < ipad->channel)
+ ipad->channel--;
+ }
+ GST_OBJECT_UNLOCK (self);
+
+ /* Update the src caps if we already have them */
+ if (self->sinkcaps) {
+ if (self->channels > 0) {
+ GstCaps *srccaps;
+ GstStructure *s;
+
+ srccaps = gst_caps_copy (self->sinkcaps);
+ s = gst_caps_get_structure (srccaps, 0);
+
+ gst_structure_set (s, "channels", G_TYPE_INT, self->channels, NULL);
+ gst_interleave_set_channel_positions (self, s);
+
+ gst_interleave_send_stream_start (self);
+ gst_pad_set_caps (self->src, srccaps);
+ gst_caps_unref (srccaps);
+ } else {
+ gst_caps_replace (&self->sinkcaps, NULL);
+ }
+ }
+
+ GST_OBJECT_UNLOCK (self->collect);
+
+ gst_collect_pads_remove_pad (self->collect, pad);
+ gst_element_remove_pad (element, pad);
+}
+
+static GstStateChangeReturn
+gst_interleave_change_state (GstElement * element, GstStateChange transition)
+{
+ GstInterleave *self;
+ GstStateChangeReturn ret;
+
+ self = GST_INTERLEAVE (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ self->timestamp = 0;
+ self->offset = 0;
+ gst_event_replace (&self->pending_segment, NULL);
+ self->send_stream_start = TRUE;
+ gst_collect_pads_start (self->collect);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ default:
+ break;
+ }
+
+ /* Stop before calling the parent's state change function as
+ * GstCollectPads might take locks and we would deadlock in that
+ * case
+ */
+ if (transition == GST_STATE_CHANGE_PAUSED_TO_READY)
+ gst_collect_pads_stop (self->collect);
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_caps_replace (&self->sinkcaps, NULL);
+ gst_event_replace (&self->pending_segment, NULL);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static void
+__remove_channels (GstCaps * caps)
+{
+ GstStructure *s;
+ gint i, size;
+
+ size = gst_caps_get_size (caps);
+ for (i = 0; i < size; i++) {
+ s = gst_caps_get_structure (caps, i);
+ gst_structure_remove_field (s, "channel-mask");
+ gst_structure_remove_field (s, "channels");
+ }
+}
+
+static void
+__set_channels (GstCaps * caps, gint channels)
+{
+ GstStructure *s;
+ gint i, size;
+
+ size = gst_caps_get_size (caps);
+ for (i = 0; i < size; i++) {
+ s = gst_caps_get_structure (caps, i);
+ if (channels > 0)
+ gst_structure_set (s, "channels", G_TYPE_INT, channels, NULL);
+ else
+ gst_structure_set (s, "channels", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
+ }
+}
+
+/* we can only accept caps that we and downstream can handle. */
+static GstCaps *
+gst_interleave_sink_getcaps (GstPad * pad, GstInterleave * self,
+ GstCaps * filter)
+{
+ GstCaps *result, *peercaps, *sinkcaps;
+
+ GST_OBJECT_LOCK (self);
+
+ /* If we already have caps on one of the sink pads return them */
+ if (self->sinkcaps) {
+ result = gst_caps_copy (self->sinkcaps);
+ } else {
+ /* get the downstream possible caps */
+ peercaps = gst_pad_peer_query_caps (self->src, NULL);
+
+ /* get the allowed caps on this sinkpad */
+ sinkcaps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
+ __remove_channels (sinkcaps);
+ if (peercaps) {
+ peercaps = gst_caps_make_writable (peercaps);
+ __remove_channels (peercaps);
+ /* if the peer has caps, intersect */
+ GST_DEBUG_OBJECT (pad, "intersecting peer and template caps");
+ result = gst_caps_intersect (peercaps, sinkcaps);
+ gst_caps_unref (peercaps);
+ gst_caps_unref (sinkcaps);
+ } else {
+ /* the peer has no caps (or there is no peer), just use the allowed caps
+ * of this sinkpad. */
+ GST_DEBUG_OBJECT (pad, "no peer caps, using sinkcaps");
+ result = sinkcaps;
+ }
+ __set_channels (result, 1);
+ }
+
+ GST_OBJECT_UNLOCK (self);
+
+ if (filter != NULL) {
+ GstCaps *caps = result;
+
+ GST_LOG_OBJECT (pad, "intersecting filter caps %" GST_PTR_FORMAT " with "
+ "preliminary result %" GST_PTR_FORMAT, filter, caps);
+
+ result = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ }
+
+ GST_DEBUG_OBJECT (pad, "Returning caps %" GST_PTR_FORMAT, result);
+
+ return result;
+}
+
+static void
+gst_interleave_set_process_function (GstInterleave * self)
+{
+ switch (self->width) {
+ case 8:
+ self->func = (GstInterleaveFunc) interleave_8;
+ break;
+ case 16:
+ self->func = (GstInterleaveFunc) interleave_16;
+ break;
+ case 24:
+ self->func = (GstInterleaveFunc) interleave_24;
+ break;
+ case 32:
+ self->func = (GstInterleaveFunc) interleave_32;
+ break;
+ case 64:
+ self->func = (GstInterleaveFunc) interleave_64;
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+}
+
+static gboolean
+gst_interleave_sink_setcaps (GstInterleave * self, GstPad * pad,
+ const GstCaps * caps, const GstAudioInfo * info)
+{
+ g_return_val_if_fail (GST_IS_INTERLEAVE_PAD (pad), FALSE);
+
+ /* TODO: handle caps changes */
+ if (self->sinkcaps && !gst_caps_is_subset (caps, self->sinkcaps)) {
+ goto cannot_change_caps;
+ } else {
+ GstCaps *srccaps;
+ GstStructure *s;
+ gboolean res;
+
+ self->width = GST_AUDIO_INFO_WIDTH (info);
+ self->rate = GST_AUDIO_INFO_RATE (info);
+
+ gst_interleave_set_process_function (self);
+
+ srccaps = gst_caps_copy (caps);
+ s = gst_caps_get_structure (srccaps, 0);
+
+ gst_structure_remove_field (s, "channel-mask");
+
+ gst_structure_set (s, "channels", G_TYPE_INT, self->channels, "layout",
+ G_TYPE_STRING, "interleaved", NULL);
+ gst_interleave_set_channel_positions (self, s);
+
+ gst_interleave_send_stream_start (self);
+ res = gst_pad_set_caps (self->src, srccaps);
+ gst_caps_unref (srccaps);
+
+ if (!res)
+ goto src_did_not_accept;
+ }
+
+ if (!self->sinkcaps) {
+ GstCaps *sinkcaps = gst_caps_copy (caps);
+ GstStructure *s = gst_caps_get_structure (sinkcaps, 0);
+
+ gst_structure_remove_field (s, "channel-mask");
+
+ GST_DEBUG_OBJECT (self, "setting sinkcaps %" GST_PTR_FORMAT, sinkcaps);
+
+ gst_caps_replace (&self->sinkcaps, sinkcaps);
+
+ gst_caps_unref (sinkcaps);
+ }
+
+ return TRUE;
+
+cannot_change_caps:
+ {
+ GST_WARNING_OBJECT (self, "caps of %" GST_PTR_FORMAT " already set, can't "
+ "change", self->sinkcaps);
+ return FALSE;
+ }
+src_did_not_accept:
+ {
+ GST_WARNING_OBJECT (self, "src did not accept setcaps()");
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_interleave_sink_event (GstCollectPads * pads, GstCollectData * data,
+ GstEvent * event, gpointer user_data)
+{
+ GstInterleave *self = GST_INTERLEAVE (user_data);
+ gboolean ret = TRUE;
+
+ GST_DEBUG ("Got %s event on pad %s:%s", GST_EVENT_TYPE_NAME (event),
+ GST_DEBUG_PAD_NAME (data->pad));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ GST_OBJECT_LOCK (self);
+ gst_event_replace (&self->pending_segment, NULL);
+ GST_OBJECT_UNLOCK (self);
+ break;
+ case GST_EVENT_SEGMENT:
+ {
+ GST_OBJECT_LOCK (self);
+ gst_event_replace (&self->pending_segment, event);
+ GST_OBJECT_UNLOCK (self);
+ break;
+ }
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+ GstAudioInfo info;
+ GValue *val;
+ guint channel;
+
+ gst_event_parse_caps (event, &caps);
+
+ if (!gst_audio_info_from_caps (&info, caps)) {
+ GST_WARNING_OBJECT (self, "invalid sink caps");
+ gst_event_unref (event);
+ event = NULL;
+ ret = FALSE;
+ break;
+ }
+
+ if (self->channel_positions_from_input
+ && GST_AUDIO_INFO_CHANNELS (&info) == 1) {
+ channel = GST_INTERLEAVE_PAD_CAST (data->pad)->channel;
+ val = g_value_array_get_nth (self->input_channel_positions, channel);
+ g_value_set_enum (val, GST_AUDIO_INFO_POSITION (&info, 0));
+ }
+
+ if (!gst_pad_has_current_caps (data->pad))
+ g_atomic_int_add (&self->configured_sinkpads_counter, 1);
+
+ /* Last caps that are set on a sink pad are used as output caps */
+ if (g_atomic_int_get (&self->configured_sinkpads_counter) ==
+ self->channels) {
+ ret = gst_interleave_sink_setcaps (self, data->pad, caps, &info);
+ gst_event_unref (event);
+ event = NULL;
+ }
+ break;
+ }
+ case GST_EVENT_TAG:
+ GST_FIXME_OBJECT (self, "FIXME: merge tags and send after stream-start");
+ break;
+ default:
+ break;
+ }
+
+ /* now GstCollectPads can take care of the rest, e.g. EOS */
+ if (event != NULL)
+ return gst_collect_pads_event_default (pads, data, event, FALSE);
+
+ return ret;
+}
+
+static gboolean
+gst_interleave_sink_query (GstCollectPads * pads,
+ GstCollectData * data, GstQuery * query, gpointer user_data)
+{
+ GstInterleave *self = GST_INTERLEAVE (user_data);
+ gboolean ret = TRUE;
+
+ GST_DEBUG ("Got %s query on pad %s:%s", GST_QUERY_TYPE_NAME (query),
+ GST_DEBUG_PAD_NAME (data->pad));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_interleave_sink_getcaps (data->pad, self, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ ret = TRUE;
+ break;
+ }
+ default:
+ ret = gst_collect_pads_query_default (pads, data, query, FALSE);
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_interleave_src_query_duration (GstInterleave * self, GstQuery * query)
+{
+ gint64 max;
+ gboolean res;
+ GstFormat format;
+ GstIterator *it;
+ gboolean done;
+
+ /* parse format */
+ gst_query_parse_duration (query, &format, NULL);
+
+ max = -1;
+ res = TRUE;
+ done = FALSE;
+
+ /* Take maximum of all durations */
+ it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (self));
+ while (!done) {
+ GstIteratorResult ires;
+
+ GValue item = { 0, };
+
+ ires = gst_iterator_next (it, &item);
+ switch (ires) {
+ case GST_ITERATOR_DONE:
+ done = TRUE;
+ break;
+ case GST_ITERATOR_OK:
+ {
+ GstPad *pad = GST_PAD_CAST (g_value_dup_object (&item));
+
+ gint64 duration;
+
+ /* ask sink peer for duration */
+ res &= gst_pad_peer_query_duration (pad, format, &duration);
+ /* take max from all valid return values */
+ if (res) {
+ /* valid unknown length, stop searching */
+ if (duration == -1) {
+ max = duration;
+ done = TRUE;
+ }
+ /* else see if bigger than current max */
+ else if (duration > max)
+ max = duration;
+ }
+ gst_object_unref (pad);
+ g_value_unset (&item);
+ break;
+ }
+ case GST_ITERATOR_RESYNC:
+ max = -1;
+ res = TRUE;
+ gst_iterator_resync (it);
+ break;
+ default:
+ res = FALSE;
+ done = TRUE;
+ break;
+ }
+ }
+ gst_iterator_free (it);
+
+ if (res) {
+ /* If in bytes format we have to multiply with the number of channels
+ * to get the correct results. All other formats should be fine */
+ if (format == GST_FORMAT_BYTES && max != -1)
+ max *= self->channels;
+
+ /* and store the max */
+ GST_DEBUG_OBJECT (self, "Total duration in format %s: %"
+ GST_TIME_FORMAT, gst_format_get_name (format), GST_TIME_ARGS (max));
+ gst_query_set_duration (query, format, max);
+ }
+
+ return res;
+}
+
+static gboolean
+gst_interleave_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ GstInterleave *self = GST_INTERLEAVE (parent);
+ gboolean res = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:
+ {
+ GstFormat format;
+
+ gst_query_parse_position (query, &format, NULL);
+
+ switch (format) {
+ case GST_FORMAT_TIME:
+ /* FIXME, bring to stream time, might be tricky */
+ gst_query_set_position (query, format, self->timestamp);
+ res = TRUE;
+ break;
+ case GST_FORMAT_BYTES:
+ gst_query_set_position (query, format,
+ self->offset * self->channels * self->width);
+ res = TRUE;
+ break;
+ case GST_FORMAT_DEFAULT:
+ gst_query_set_position (query, format, self->offset);
+ res = TRUE;
+ break;
+ default:
+ break;
+ }
+ break;
+ }
+ case GST_QUERY_DURATION:
+ res = gst_interleave_src_query_duration (self, query);
+ break;
+ default:
+ /* FIXME, needs a custom query handler because we have multiple
+ * sinkpads */
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return res;
+}
+
+static gboolean
+forward_event_func (const GValue * item, GValue * ret, GstEvent * event)
+{
+ GstPad *pad = GST_PAD_CAST (g_value_dup_object (item));
+ gst_event_ref (event);
+ GST_LOG_OBJECT (pad, "About to send event %s", GST_EVENT_TYPE_NAME (event));
+ if (!gst_pad_push_event (pad, event)) {
+ g_value_set_boolean (ret, FALSE);
+ GST_WARNING_OBJECT (pad, "Sending event %p (%s) failed.",
+ event, GST_EVENT_TYPE_NAME (event));
+ } else {
+ GST_LOG_OBJECT (pad, "Sent event %p (%s).",
+ event, GST_EVENT_TYPE_NAME (event));
+ }
+ gst_object_unref (pad);
+ return TRUE;
+}
+
+static gboolean
+forward_event (GstInterleave * self, GstEvent * event)
+{
+ GstIterator *it;
+ GValue vret = { 0 };
+
+ GST_LOG_OBJECT (self, "Forwarding event %p (%s)", event,
+ GST_EVENT_TYPE_NAME (event));
+
+ g_value_init (&vret, G_TYPE_BOOLEAN);
+ g_value_set_boolean (&vret, TRUE);
+ it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (self));
+ gst_iterator_fold (it, (GstIteratorFoldFunction) forward_event_func, &vret,
+ event);
+ gst_iterator_free (it);
+ gst_event_unref (event);
+
+ return g_value_get_boolean (&vret);
+}
+
+
+static gboolean
+gst_interleave_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstInterleave *self = GST_INTERLEAVE (parent);
+ gboolean result;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_QOS:
+ /* QoS might be tricky */
+ result = FALSE;
+ break;
+ case GST_EVENT_SEEK:
+ {
+ GstSeekFlags flags;
+
+ gst_event_parse_seek (event, NULL, NULL, &flags, NULL, NULL, NULL, NULL);
+
+ /* check if we are flushing */
+ if (flags & GST_SEEK_FLAG_FLUSH) {
+ /* make sure we accept nothing anymore and return WRONG_STATE */
+ gst_collect_pads_set_flushing (self->collect, TRUE);
+
+ /* flushing seek, start flush downstream, the flush will be done
+ * when all pads received a FLUSH_STOP. */
+ gst_pad_push_event (self->src, gst_event_new_flush_start ());
+ }
+ result = forward_event (self, event);
+ break;
+ }
+ case GST_EVENT_NAVIGATION:
+ /* navigation is rather pointless. */
+ result = FALSE;
+ break;
+ default:
+ /* just forward the rest for now */
+ result = forward_event (self, event);
+ break;
+ }
+
+ return result;
+}
+
+static GstFlowReturn
+gst_interleave_collected (GstCollectPads * pads, GstInterleave * self)
+{
+ guint size;
+ GstBuffer *outbuf = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GSList *collected;
+ guint nsamples;
+ guint ncollected = 0;
+ gboolean empty = TRUE;
+ gint width = self->width / 8;
+ GstMapInfo write_info;
+ GstClockTime timestamp = -1;
+
+ size = gst_collect_pads_available (pads);
+ if (size == 0)
+ goto eos;
+
+ g_return_val_if_fail (self->func != NULL, GST_FLOW_NOT_NEGOTIATED);
+ g_return_val_if_fail (self->width > 0, GST_FLOW_NOT_NEGOTIATED);
+ g_return_val_if_fail (self->channels > 0, GST_FLOW_NOT_NEGOTIATED);
+ g_return_val_if_fail (self->rate > 0, GST_FLOW_NOT_NEGOTIATED);
+
+ g_return_val_if_fail (size % width == 0, GST_FLOW_ERROR);
+
+ GST_DEBUG_OBJECT (self, "Starting to collect %u bytes from %d channels", size,
+ self->channels);
+
+ nsamples = size / width;
+
+ outbuf = gst_buffer_new_allocate (NULL, size * self->channels, NULL);
+
+ if (outbuf == NULL || gst_buffer_get_size (outbuf) < size * self->channels) {
+ gst_buffer_unref (outbuf);
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+
+ gst_buffer_map (outbuf, &write_info, GST_MAP_WRITE);
+ memset (write_info.data, 0, size * self->channels);
+
+ for (collected = pads->data; collected != NULL; collected = collected->next) {
+ GstCollectData *cdata;
+ GstBuffer *inbuf;
+ guint8 *outdata;
+ GstMapInfo input_info;
+ gint channel;
+
+ cdata = (GstCollectData *) collected->data;
+
+ inbuf = gst_collect_pads_take_buffer (pads, cdata, size);
+ if (inbuf == NULL) {
+ GST_DEBUG_OBJECT (cdata->pad, "No buffer available");
+ goto next;
+ }
+ ncollected++;
+
+ if (timestamp == -1)
+ timestamp = GST_BUFFER_TIMESTAMP (inbuf);
+
+ if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_GAP))
+ goto next;
+
+ empty = FALSE;
+ channel = GST_INTERLEAVE_PAD_CAST (cdata->pad)->channel;
+ if (self->channels <= 64 && self->channel_mask) {
+ channel = self->default_channels_ordering_map[channel];
+ }
+ outdata = write_info.data + width * channel;
+
+ gst_buffer_map (inbuf, &input_info, GST_MAP_READ);
+ self->func (outdata, input_info.data, self->channels, nsamples);
+ gst_buffer_unmap (inbuf, &input_info);
+
+ next:
+ if (inbuf)
+ gst_buffer_unref (inbuf);
+ }
+
+ if (ncollected == 0) {
+ gst_buffer_unmap (outbuf, &write_info);
+ goto eos;
+ }
+
+ GST_OBJECT_LOCK (self);
+ if (self->pending_segment) {
+ GstEvent *event;
+ GstSegment segment;
+
+ event = self->pending_segment;
+ self->pending_segment = NULL;
+ GST_OBJECT_UNLOCK (self);
+
+ /* convert the input segment to time now */
+ gst_event_copy_segment (event, &segment);
+
+ if (segment.format != GST_FORMAT_TIME) {
+ gst_event_unref (event);
+
+ /* not time, convert */
+ switch (segment.format) {
+ case GST_FORMAT_BYTES:
+ segment.start *= width;
+ if (segment.stop != -1)
+ segment.stop *= width;
+ if (segment.position != -1)
+ segment.position *= width;
+ /* fallthrough for the samples case */
+ case GST_FORMAT_DEFAULT:
+ segment.start =
+ gst_util_uint64_scale_int (segment.start, GST_SECOND, self->rate);
+ if (segment.stop != -1)
+ segment.stop =
+ gst_util_uint64_scale_int (segment.stop, GST_SECOND,
+ self->rate);
+ if (segment.position != -1)
+ segment.position =
+ gst_util_uint64_scale_int (segment.position, GST_SECOND,
+ self->rate);
+ break;
+ default:
+ GST_WARNING ("can't convert segment values");
+ segment.start = 0;
+ segment.stop = -1;
+ segment.position = 0;
+ break;
+ }
+ event = gst_event_new_segment (&segment);
+ }
+ gst_pad_push_event (self->src, event);
+
+ GST_OBJECT_LOCK (self);
+ }
+ GST_OBJECT_UNLOCK (self);
+
+ if (timestamp != -1) {
+ self->offset = gst_util_uint64_scale_int (timestamp, self->rate,
+ GST_SECOND);
+ self->timestamp = timestamp;
+ }
+
+ GST_BUFFER_TIMESTAMP (outbuf) = self->timestamp;
+ GST_BUFFER_OFFSET (outbuf) = self->offset;
+
+ self->offset += nsamples;
+ self->timestamp = gst_util_uint64_scale_int (self->offset,
+ GST_SECOND, self->rate);
+
+ GST_BUFFER_DURATION (outbuf) =
+ self->timestamp - GST_BUFFER_TIMESTAMP (outbuf);
+
+ if (empty)
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_GAP);
+
+ gst_buffer_unmap (outbuf, &write_info);
+
+ GST_LOG_OBJECT (self, "pushing outbuf, timestamp %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));
+ ret = gst_pad_push (self->src, outbuf);
+
+ return ret;
+
+eos:
+ {
+ GST_DEBUG_OBJECT (self, "no data available, must be EOS");
+ if (outbuf)
+ gst_buffer_unref (outbuf);
+ gst_pad_push_event (self->src, gst_event_new_eos ());
+ return GST_FLOW_EOS;
+ }
+}
diff --git a/gst/interleave/interleave.h b/gst/interleave/interleave.h
new file mode 100644
index 0000000000..05ebe3b70d
--- /dev/null
+++ b/gst/interleave/interleave.h
@@ -0,0 +1,90 @@
+/* GStreamer
+ * Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2000 Wim Taymans <wtay@chello.be>
+ * 2005 Wim Taymans <wim@fluendo.com>
+ * 2007 Andy Wingo <wingo at pobox.com>
+ * 2008 Sebastian Dröge <slomo@circular-chaos.org>
+ *
+ * interleave.c: interleave samples, mostly based on adder
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __INTERLEAVE_H__
+#define __INTERLEAVE_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstcollectpads.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_INTERLEAVE (gst_interleave_get_type())
+#define GST_INTERLEAVE(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_INTERLEAVE,GstInterleave))
+#define GST_INTERLEAVE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_INTERLEAVE,GstInterleaveClass))
+#define GST_INTERLEAVE_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS ((obj),GST_TYPE_INTERLEAVE,GstInterleaveClass))
+#define GST_IS_INTERLEAVE(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_INTERLEAVE))
+#define GST_IS_INTERLEAVE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_INTERLEAVE))
+
+typedef struct _GstInterleave GstInterleave;
+typedef struct _GstInterleaveClass GstInterleaveClass;
+
+typedef void (*GstInterleaveFunc) (gpointer out, gpointer in, guint stride, guint nframes);
+
+struct _GstInterleave
+{
+ GstElement element;
+
+ /*< private >*/
+ GstCollectPads *collect;
+
+ gint channels;
+ gint padcounter;
+ gint rate;
+ gint width;
+
+ GValueArray *channel_positions;
+ GValueArray *input_channel_positions;
+ gboolean channel_positions_from_input;
+
+ gint default_channels_ordering_map[64];
+ guint64 channel_mask;
+
+ GstCaps *sinkcaps;
+ gint configured_sinkpads_counter;
+
+ GstClockTime timestamp;
+ guint64 offset;
+
+ GstEvent *pending_segment;
+
+ GstInterleaveFunc func;
+
+ GstPad *src;
+
+ gboolean send_stream_start;
+};
+
+struct _GstInterleaveClass
+{
+ GstElementClass parent_class;
+};
+
+GType gst_interleave_get_type (void);
+
+G_END_DECLS
+
+#endif /* __INTERLEAVE_H__ */
diff --git a/gst/interleave/meson.build b/gst/interleave/meson.build
new file mode 100644
index 0000000000..78f84cbfea
--- /dev/null
+++ b/gst/interleave/meson.build
@@ -0,0 +1,10 @@
+gstinterleave = library('gstinterleave',
+ 'plugin.c', 'interleave.c', 'deinterleave.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gstbase_dep, gstaudio_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstinterleave, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstinterleave]
diff --git a/gst/interleave/plugin.c b/gst/interleave/plugin.c
new file mode 100644
index 0000000000..79596e4355
--- /dev/null
+++ b/gst/interleave/plugin.c
@@ -0,0 +1,43 @@
+/* GStreamer interleave plugin
+ * Copyright (C) 2004,2007 Andy Wingo <wingo at pobox.com>
+ *
+ * plugin.c: the stubs for the interleave plugin
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstinterleaveelements.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (interleave, plugin);
+ ret |= GST_ELEMENT_REGISTER (deinterleave, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ interleave,
+ "Audio interleaver/deinterleaver",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
diff --git a/gst/isomp4/GstQTMux.prs b/gst/isomp4/GstQTMux.prs
new file mode 100644
index 0000000000..be1e644564
--- /dev/null
+++ b/gst/isomp4/GstQTMux.prs
@@ -0,0 +1,6 @@
+[_presets_]
+version=1.6.0
+element-name=GstQTMux
+
+[Profile YouTube]
+faststart=true
diff --git a/gst/isomp4/LEGAL b/gst/isomp4/LEGAL
new file mode 100644
index 0000000000..5af6e8f92e
--- /dev/null
+++ b/gst/isomp4/LEGAL
@@ -0,0 +1,10 @@
+This is a demuxer supporting a subset of the Quicktime video container
+format developed by Apple. Apple and others have some patents on
+some features of the Quicktime container format in regards to technologies
+such as QuicktimeVR and RTP hinting. Due to that be aware that if ever
+such features are added to this demuxer it would need to be moved to the
+-ugly module or those features need to come as add-in functionality stored in
+another module.
+
+As the plugin is as of today's date (19th of June 2007) it does not
+violate any software patents we know of.
diff --git a/gst/isomp4/atoms.c b/gst/isomp4/atoms.c
new file mode 100644
index 0000000000..290d1fdf62
--- /dev/null
+++ b/gst/isomp4/atoms.c
@@ -0,0 +1,5768 @@
+/* Quicktime muxer plugin for GStreamer
+ * Copyright (C) 2008-2010 Thiago Santos <thiagoss@embedded.ufcg.edu.br>
+ * Copyright (C) 2008 Mark Nauwelaerts <mnauw@users.sf.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#include "atoms.h"
+#include <string.h>
+#include <glib.h>
+
+#include <gst/gst.h>
+#include <gst/base/gstbytewriter.h>
+#include <gst/tag/tag.h>
+#include <gst/video/video.h>
+
+/*
+ * Creates a new AtomsContext for the given flavor.
+ */
+AtomsContext *
+atoms_context_new (AtomsTreeFlavor flavor, gboolean force_create_timecode_trak)
+{
+ AtomsContext *context = g_new0 (AtomsContext, 1);
+ context->flavor = flavor;
+ context->force_create_timecode_trak = force_create_timecode_trak;
+ return context;
+}
+
+/*
+ * Frees an AtomsContext and all memory associated with it
+ */
+void
+atoms_context_free (AtomsContext * context)
+{
+ g_free (context);
+}
+
+/* -- creation, initialization, clear and free functions -- */
+
+#define SECS_PER_DAY (24 * 60 * 60)
+#define LEAP_YEARS_FROM_1904_TO_1970 17
+
+guint64
+atoms_get_current_qt_time (void)
+{
+ gint64 curtime_s = g_get_real_time () / G_USEC_PER_SEC;
+
+ /* FIXME this should use UTC coordinated time */
+ return curtime_s + (((1970 - 1904) * (guint64) 365) +
+ LEAP_YEARS_FROM_1904_TO_1970) * SECS_PER_DAY;
+}
+
+static void
+common_time_info_init (TimeInfo * ti)
+{
+ ti->creation_time = ti->modification_time = atoms_get_current_qt_time ();
+ ti->timescale = 0;
+ ti->duration = 0;
+}
+
+static void
+atom_header_set (Atom * header, guint32 fourcc, gint32 size, gint64 ext_size)
+{
+ header->type = fourcc;
+ header->size = size;
+ header->extended_size = ext_size;
+}
+
+static void
+atom_clear (Atom * atom)
+{
+}
+
+static void
+atom_full_init (AtomFull * full, guint32 fourcc, gint32 size, gint64 ext_size,
+ guint8 version, guint8 flags[3])
+{
+ atom_header_set (&(full->header), fourcc, size, ext_size);
+ full->version = version;
+ full->flags[0] = flags[0];
+ full->flags[1] = flags[1];
+ full->flags[2] = flags[2];
+}
+
+static void
+atom_full_clear (AtomFull * full)
+{
+ atom_clear (&full->header);
+}
+
+static void
+atom_full_free (AtomFull * full)
+{
+ atom_full_clear (full);
+ g_free (full);
+}
+
+static guint32
+atom_full_get_flags_as_uint (AtomFull * full)
+{
+ return full->flags[0] << 16 | full->flags[1] << 8 | full->flags[2];
+}
+
+static void
+atom_full_set_flags_as_uint (AtomFull * full, guint32 flags_as_uint)
+{
+ full->flags[2] = flags_as_uint & 0xFF;
+ full->flags[1] = (flags_as_uint & 0xFF00) >> 8;
+ full->flags[0] = (flags_as_uint & 0xFF0000) >> 16;
+}
+
+static AtomInfo *
+build_atom_info_wrapper (Atom * atom, gpointer copy_func, gpointer free_func)
+{
+ AtomInfo *info = NULL;
+
+ if (atom) {
+ info = g_new0 (AtomInfo, 1);
+
+ info->atom = atom;
+ info->copy_data_func = copy_func;
+ info->free_func = free_func;
+ }
+
+ return info;
+}
+
+static GList *
+atom_info_list_prepend_atom (GList * ai, Atom * atom,
+ AtomCopyDataFunc copy_func, AtomFreeFunc free_func)
+{
+ if (atom)
+ return g_list_prepend (ai,
+ build_atom_info_wrapper (atom, copy_func, free_func));
+ else
+ return ai;
+}
+
+static void
+atom_info_list_free (GList * ai)
+{
+ while (ai) {
+ AtomInfo *info = (AtomInfo *) ai->data;
+
+ info->free_func (info->atom);
+ g_free (info);
+ ai = g_list_delete_link (ai, ai);
+ }
+}
+
+static AtomData *
+atom_data_new (guint32 fourcc)
+{
+ AtomData *data = g_new0 (AtomData, 1);
+
+ atom_header_set (&data->header, fourcc, 0, 0);
+ return data;
+}
+
+static void
+atom_data_alloc_mem (AtomData * data, guint32 size)
+{
+ g_free (data->data);
+ data->data = g_new0 (guint8, size);
+ data->datalen = size;
+}
+
+static AtomData *
+atom_data_new_from_data (guint32 fourcc, const guint8 * mem, gsize size)
+{
+ AtomData *data = atom_data_new (fourcc);
+
+ atom_data_alloc_mem (data, size);
+ memcpy (data->data, mem, size);
+ return data;
+}
+
+static AtomData *
+atom_data_new_from_gst_buffer (guint32 fourcc, const GstBuffer * buf)
+{
+ AtomData *data = atom_data_new (fourcc);
+ gsize size = gst_buffer_get_size ((GstBuffer *) buf);
+
+ atom_data_alloc_mem (data, size);
+ gst_buffer_extract ((GstBuffer *) buf, 0, data->data, size);
+ return data;
+}
+
+static void
+atom_data_free (AtomData * data)
+{
+ atom_clear (&data->header);
+ g_free (data->data);
+ g_free (data);
+}
+
+static AtomUUID *
+atom_uuid_new (void)
+{
+ AtomUUID *uuid = g_new0 (AtomUUID, 1);
+
+ atom_header_set (&uuid->header, FOURCC_uuid, 0, 0);
+ return uuid;
+}
+
+static void
+atom_uuid_free (AtomUUID * data)
+{
+ atom_clear (&data->header);
+ g_free (data->data);
+ g_free (data);
+}
+
+static void
+atom_ftyp_init (AtomFTYP * ftyp, guint32 major, guint32 version, GList * brands)
+{
+ gint index;
+ GList *it = NULL;
+
+ atom_header_set (&ftyp->header, FOURCC_ftyp, 16, 0);
+ ftyp->major_brand = major;
+ ftyp->version = version;
+
+ /* always include major brand as compatible brand */
+ ftyp->compatible_brands_size = g_list_length (brands) + 1;
+ ftyp->compatible_brands = g_new (guint32, ftyp->compatible_brands_size);
+
+ ftyp->compatible_brands[0] = major;
+ index = 1;
+ for (it = brands; it != NULL; it = g_list_next (it)) {
+ ftyp->compatible_brands[index++] = GPOINTER_TO_UINT (it->data);
+ }
+}
+
+AtomFTYP *
+atom_ftyp_new (AtomsContext * context, guint32 major, guint32 version,
+ GList * brands)
+{
+ AtomFTYP *ftyp = g_new0 (AtomFTYP, 1);
+
+ atom_ftyp_init (ftyp, major, version, brands);
+ return ftyp;
+}
+
+void
+atom_ftyp_free (AtomFTYP * ftyp)
+{
+ atom_clear (&ftyp->header);
+ g_free (ftyp->compatible_brands);
+ ftyp->compatible_brands = NULL;
+ g_free (ftyp);
+}
+
+static void
+atom_esds_init (AtomESDS * esds)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&esds->header, FOURCC_esds, 0, 0, 0, flags);
+ desc_es_init (&esds->es);
+}
+
+static AtomESDS *
+atom_esds_new (void)
+{
+ AtomESDS *esds = g_new0 (AtomESDS, 1);
+
+ atom_esds_init (esds);
+ return esds;
+}
+
+static void
+atom_esds_free (AtomESDS * esds)
+{
+ atom_full_clear (&esds->header);
+ desc_es_descriptor_clear (&esds->es);
+ g_free (esds);
+}
+
+static AtomFRMA *
+atom_frma_new (void)
+{
+ AtomFRMA *frma = g_new0 (AtomFRMA, 1);
+
+ atom_header_set (&frma->header, FOURCC_frma, 0, 0);
+ return frma;
+}
+
+static void
+atom_frma_free (AtomFRMA * frma)
+{
+ atom_clear (&frma->header);
+ g_free (frma);
+}
+
+static AtomWAVE *
+atom_wave_new (void)
+{
+ AtomWAVE *wave = g_new0 (AtomWAVE, 1);
+
+ atom_header_set (&wave->header, FOURCC_wave, 0, 0);
+ return wave;
+}
+
+static void
+atom_wave_free (AtomWAVE * wave)
+{
+ atom_clear (&wave->header);
+ atom_info_list_free (wave->extension_atoms);
+ g_free (wave);
+}
+
+static void
+atom_elst_init (AtomELST * elst)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+ atom_full_init (&elst->header, FOURCC_elst, 0, 0, 0, flags);
+ elst->entries = 0;
+}
+
+static void
+atom_elst_clear (AtomELST * elst)
+{
+ GSList *walker;
+
+ atom_full_clear (&elst->header);
+ walker = elst->entries;
+ while (walker) {
+ g_free ((EditListEntry *) walker->data);
+ walker = g_slist_next (walker);
+ }
+ g_slist_free (elst->entries);
+}
+
+static void
+atom_edts_init (AtomEDTS * edts)
+{
+ atom_header_set (&edts->header, FOURCC_edts, 0, 0);
+ atom_elst_init (&edts->elst);
+}
+
+static void
+atom_edts_clear (AtomEDTS * edts)
+{
+ atom_clear (&edts->header);
+ atom_elst_clear (&edts->elst);
+}
+
+static AtomEDTS *
+atom_edts_new (void)
+{
+ AtomEDTS *edts = g_new0 (AtomEDTS, 1);
+ atom_edts_init (edts);
+ return edts;
+}
+
+static void
+atom_edts_free (AtomEDTS * edts)
+{
+ atom_edts_clear (edts);
+ g_free (edts);
+}
+
+static void
+atom_tcmi_init (AtomTCMI * tcmi)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&tcmi->header, FOURCC_tcmi, 0, 0, 0, flags);
+}
+
+static void
+atom_tcmi_clear (AtomTCMI * tcmi)
+{
+ atom_full_clear (&tcmi->header);
+ tcmi->text_font = 0;
+ tcmi->text_face = 0;
+ tcmi->text_size = 0;
+ tcmi->text_color[0] = 0;
+ tcmi->text_color[1] = 0;
+ tcmi->text_color[2] = 0;
+ tcmi->bg_color[0] = 0;
+ tcmi->bg_color[1] = 0;
+ tcmi->bg_color[2] = 0;
+ g_free (tcmi->font_name);
+ tcmi->font_name = NULL;
+}
+
+static AtomTMCD *
+atom_tmcd_new (void)
+{
+ AtomTMCD *tmcd = g_new0 (AtomTMCD, 1);
+
+ atom_header_set (&tmcd->header, FOURCC_tmcd, 0, 0);
+ atom_tcmi_init (&tmcd->tcmi);
+
+ return tmcd;
+}
+
+static void
+atom_tmcd_free (AtomTMCD * tmcd)
+{
+ atom_clear (&tmcd->header);
+ atom_tcmi_clear (&tmcd->tcmi);
+ g_free (tmcd);
+}
+
+static void
+atom_gmin_init (AtomGMIN * gmin)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&gmin->header, FOURCC_gmin, 0, 0, 0, flags);
+}
+
+static void
+atom_gmin_clear (AtomGMIN * gmin)
+{
+ atom_full_clear (&gmin->header);
+ gmin->graphics_mode = 0;
+ gmin->opcolor[0] = 0;
+ gmin->opcolor[1] = 0;
+ gmin->opcolor[2] = 0;
+ gmin->balance = 0;
+ gmin->reserved = 0;
+}
+
+static void
+atom_gmhd_init (AtomGMHD * gmhd)
+{
+ atom_header_set (&gmhd->header, FOURCC_gmhd, 0, 0);
+ atom_gmin_init (&gmhd->gmin);
+}
+
+static void
+atom_gmhd_clear (AtomGMHD * gmhd)
+{
+ atom_clear (&gmhd->header);
+ atom_gmin_clear (&gmhd->gmin);
+ if (gmhd->tmcd) {
+ atom_tmcd_free (gmhd->tmcd);
+ gmhd->tmcd = NULL;
+ }
+}
+
+static AtomGMHD *
+atom_gmhd_new (void)
+{
+ AtomGMHD *gmhd = g_new0 (AtomGMHD, 1);
+ atom_gmhd_init (gmhd);
+ return gmhd;
+}
+
+static void
+atom_gmhd_free (AtomGMHD * gmhd)
+{
+ atom_gmhd_clear (gmhd);
+ g_free (gmhd);
+}
+
+static void
+atom_nmhd_init (AtomNMHD * nmhd)
+{
+ atom_header_set (&nmhd->header, FOURCC_nmhd, 0, 0);
+ nmhd->flags = 0;
+}
+
+static void
+atom_nmhd_clear (AtomNMHD * nmhd)
+{
+ atom_clear (&nmhd->header);
+}
+
+static AtomNMHD *
+atom_nmhd_new (void)
+{
+ AtomNMHD *nmhd = g_new0 (AtomNMHD, 1);
+ atom_nmhd_init (nmhd);
+ return nmhd;
+}
+
+static void
+atom_nmhd_free (AtomNMHD * nmhd)
+{
+ atom_nmhd_clear (nmhd);
+ g_free (nmhd);
+}
+
+static void
+atom_sample_entry_init (SampleTableEntry * se, guint32 type)
+{
+ atom_header_set (&se->header, type, 0, 0);
+
+ memset (se->reserved, 0, sizeof (guint8) * 6);
+ se->data_reference_index = 0;
+}
+
+static void
+atom_sample_entry_free (SampleTableEntry * se)
+{
+ atom_clear (&se->header);
+}
+
+static void
+sample_entry_mp4a_init (SampleTableEntryMP4A * mp4a)
+{
+ atom_sample_entry_init (&mp4a->se, FOURCC_mp4a);
+
+ mp4a->version = 0;
+ mp4a->revision_level = 0;
+ mp4a->vendor = 0;
+ mp4a->channels = 2;
+ mp4a->sample_size = 16;
+ mp4a->compression_id = 0;
+ mp4a->packet_size = 0;
+ mp4a->sample_rate = 0;
+ /* following only used if version is 1 */
+ mp4a->samples_per_packet = 0;
+ mp4a->bytes_per_packet = 0;
+ mp4a->bytes_per_frame = 0;
+ mp4a->bytes_per_sample = 0;
+
+ mp4a->extension_atoms = NULL;
+}
+
+static SampleTableEntryMP4A *
+sample_entry_mp4a_new (void)
+{
+ SampleTableEntryMP4A *mp4a = g_new0 (SampleTableEntryMP4A, 1);
+
+ sample_entry_mp4a_init (mp4a);
+ return mp4a;
+}
+
+static void
+sample_entry_mp4a_free (SampleTableEntryMP4A * mp4a)
+{
+ atom_sample_entry_free (&mp4a->se);
+ atom_info_list_free (mp4a->extension_atoms);
+ g_free (mp4a);
+}
+
+static void
+sample_entry_tmcd_init (SampleTableEntryTMCD * tmcd)
+{
+ atom_sample_entry_init (&tmcd->se, FOURCC_tmcd);
+
+ tmcd->tc_flags = 0;
+ tmcd->timescale = 0;
+ tmcd->frame_duration = 0;
+ tmcd->n_frames = 0;
+
+ tmcd->name.language_code = 0;
+ g_free (tmcd->name.name);
+ tmcd->name.name = NULL;
+}
+
+static SampleTableEntryTMCD *
+sample_entry_tmcd_new (void)
+{
+ SampleTableEntryTMCD *tmcd = g_new0 (SampleTableEntryTMCD, 1);
+
+ sample_entry_tmcd_init (tmcd);
+ return tmcd;
+}
+
+static void
+sample_entry_tmcd_free (SampleTableEntryTMCD * tmcd)
+{
+ atom_sample_entry_free (&tmcd->se);
+ g_free (tmcd->name.name);
+ g_free (tmcd);
+}
+
+static void
+sample_entry_mp4v_init (SampleTableEntryMP4V * mp4v, AtomsContext * context)
+{
+ atom_sample_entry_init (&mp4v->se, FOURCC_mp4v);
+
+ mp4v->version = 0;
+ mp4v->revision_level = 0;
+ mp4v->vendor = 0;
+
+ mp4v->temporal_quality = 0;
+ mp4v->spatial_quality = 0;
+
+ /* qt and ISO base media do not contradict, and examples agree */
+ mp4v->horizontal_resolution = 0x00480000;
+ mp4v->vertical_resolution = 0x00480000;
+
+ mp4v->datasize = 0;
+ mp4v->frame_count = 1;
+
+ memset (mp4v->compressor, 0, sizeof (guint8) * 32);
+
+ mp4v->depth = 0;
+ mp4v->color_table_id = 0;
+
+ mp4v->extension_atoms = NULL;
+}
+
+static void
+sample_entry_mp4v_free (SampleTableEntryMP4V * mp4v)
+{
+ atom_sample_entry_free (&mp4v->se);
+ atom_info_list_free (mp4v->extension_atoms);
+ g_free (mp4v);
+}
+
+static SampleTableEntryMP4V *
+sample_entry_mp4v_new (AtomsContext * context)
+{
+ SampleTableEntryMP4V *mp4v = g_new0 (SampleTableEntryMP4V, 1);
+
+ sample_entry_mp4v_init (mp4v, context);
+ return mp4v;
+}
+
+static void
+sample_entry_tx3g_init (SampleTableEntryTX3G * tx3g)
+{
+ atom_sample_entry_init (&tx3g->se, FOURCC_tx3g);
+
+ tx3g->display_flags = 0;
+ tx3g->font_id = 1; /* must be 1 as there is a single font */
+ tx3g->font_face = 0;
+ tx3g->foreground_color_rgba = 0xFFFFFFFF; /* white, opaque */
+
+ /* can't set this now */
+ tx3g->default_text_box = 0;
+ tx3g->font_size = 0;
+}
+
+static void
+sample_entry_tx3g_free (SampleTableEntryTX3G * tx3g)
+{
+ atom_sample_entry_free (&tx3g->se);
+ g_free (tx3g);
+}
+
+static SampleTableEntryTX3G *
+sample_entry_tx3g_new (void)
+{
+ SampleTableEntryTX3G *tx3g = g_new0 (SampleTableEntryTX3G, 1);
+
+ sample_entry_tx3g_init (tx3g);
+ return tx3g;
+}
+
+
+static void
+atom_stsd_init (AtomSTSD * stsd)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&stsd->header, FOURCC_stsd, 0, 0, 0, flags);
+ stsd->entries = NULL;
+ stsd->n_entries = 0;
+}
+
+static void
+atom_stsd_remove_entries (AtomSTSD * stsd)
+{
+ GList *walker;
+
+ walker = stsd->entries;
+ while (walker) {
+ GList *aux = walker;
+ SampleTableEntry *se = (SampleTableEntry *) aux->data;
+
+ walker = g_list_next (walker);
+ stsd->entries = g_list_remove_link (stsd->entries, aux);
+
+ switch (se->kind) {
+ case AUDIO:
+ sample_entry_mp4a_free ((SampleTableEntryMP4A *) se);
+ break;
+ case VIDEO:
+ sample_entry_mp4v_free ((SampleTableEntryMP4V *) se);
+ break;
+ case SUBTITLE:
+ sample_entry_tx3g_free ((SampleTableEntryTX3G *) se);
+ break;
+ case TIMECODE:
+ sample_entry_tmcd_free ((SampleTableEntryTMCD *) se);
+ break;
+ case CLOSEDCAPTION:
+ default:
+ /* best possible cleanup */
+ atom_sample_entry_free (se);
+ }
+ g_list_free (aux);
+ }
+ stsd->n_entries = 0;
+}
+
+static void
+atom_stsd_clear (AtomSTSD * stsd)
+{
+ atom_stsd_remove_entries (stsd);
+ atom_full_clear (&stsd->header);
+}
+
+static void
+atom_ctts_init (AtomCTTS * ctts)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&ctts->header, FOURCC_ctts, 0, 0, 0, flags);
+ atom_array_init (&ctts->entries, 128);
+ ctts->do_pts = FALSE;
+}
+
+static AtomCTTS *
+atom_ctts_new (void)
+{
+ AtomCTTS *ctts = g_new0 (AtomCTTS, 1);
+
+ atom_ctts_init (ctts);
+ return ctts;
+}
+
+static void
+atom_ctts_free (AtomCTTS * ctts)
+{
+ atom_full_clear (&ctts->header);
+ atom_array_clear (&ctts->entries);
+ g_free (ctts);
+}
+
+/* svmi is specified in ISO 23000-11 (Stereoscopic video application format)
+ * MPEG-A */
+static void
+atom_svmi_init (AtomSVMI * svmi)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&svmi->header, FOURCC_svmi, 0, 0, 0, flags);
+ svmi->stereoscopic_composition_type = 0x00;
+ svmi->is_left_first = FALSE;
+}
+
+AtomSVMI *
+atom_svmi_new (guint8 stereoscopic_composition_type, gboolean is_left_first)
+{
+ AtomSVMI *svmi = g_new0 (AtomSVMI, 1);
+
+ atom_svmi_init (svmi);
+ svmi->stereoscopic_composition_type = stereoscopic_composition_type;
+ svmi->is_left_first = is_left_first;
+ return svmi;
+}
+
+static void
+atom_svmi_free (AtomSVMI * svmi)
+{
+ g_free (svmi);
+}
+
+static void
+atom_stts_init (AtomSTTS * stts)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&stts->header, FOURCC_stts, 0, 0, 0, flags);
+ atom_array_init (&stts->entries, 512);
+}
+
+static void
+atom_stts_clear (AtomSTTS * stts)
+{
+ atom_full_clear (&stts->header);
+ atom_array_clear (&stts->entries);
+}
+
+static void
+atom_stsz_init (AtomSTSZ * stsz)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&stsz->header, FOURCC_stsz, 0, 0, 0, flags);
+ atom_array_init (&stsz->entries, 1024);
+ stsz->sample_size = 0;
+ stsz->table_size = 0;
+}
+
+static void
+atom_stsz_clear (AtomSTSZ * stsz)
+{
+ atom_full_clear (&stsz->header);
+ atom_array_clear (&stsz->entries);
+ stsz->table_size = 0;
+}
+
+static void
+atom_stsc_init (AtomSTSC * stsc)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&stsc->header, FOURCC_stsc, 0, 0, 0, flags);
+ atom_array_init (&stsc->entries, 128);
+}
+
+static void
+atom_stsc_clear (AtomSTSC * stsc)
+{
+ atom_full_clear (&stsc->header);
+ atom_array_clear (&stsc->entries);
+}
+
+static void
+atom_co64_init (AtomSTCO64 * co64)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&co64->header, FOURCC_stco, 0, 0, 0, flags);
+
+ co64->chunk_offset = 0;
+ co64->max_offset = 0;
+ atom_array_init (&co64->entries, 256);
+}
+
+static void
+atom_stco64_clear (AtomSTCO64 * stco64)
+{
+ atom_full_clear (&stco64->header);
+ atom_array_clear (&stco64->entries);
+}
+
+static void
+atom_stss_init (AtomSTSS * stss)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&stss->header, FOURCC_stss, 0, 0, 0, flags);
+ atom_array_init (&stss->entries, 128);
+}
+
+static void
+atom_stss_clear (AtomSTSS * stss)
+{
+ atom_full_clear (&stss->header);
+ atom_array_clear (&stss->entries);
+}
+
+void
+atom_stbl_init (AtomSTBL * stbl)
+{
+ atom_header_set (&stbl->header, FOURCC_stbl, 0, 0);
+
+ atom_stts_init (&stbl->stts);
+ atom_stss_init (&stbl->stss);
+ atom_stsd_init (&stbl->stsd);
+ atom_stsz_init (&stbl->stsz);
+ atom_stsc_init (&stbl->stsc);
+ stbl->ctts = NULL;
+ stbl->svmi = NULL;
+
+ atom_co64_init (&stbl->stco64);
+}
+
+void
+atom_stbl_clear (AtomSTBL * stbl)
+{
+ atom_clear (&stbl->header);
+ atom_stsd_clear (&stbl->stsd);
+ atom_stts_clear (&stbl->stts);
+ atom_stss_clear (&stbl->stss);
+ atom_stsc_clear (&stbl->stsc);
+ atom_stsz_clear (&stbl->stsz);
+ if (stbl->ctts) {
+ atom_ctts_free (stbl->ctts);
+ }
+ if (stbl->svmi) {
+ atom_svmi_free (stbl->svmi);
+ }
+ atom_stco64_clear (&stbl->stco64);
+}
+
+static void
+atom_vmhd_init (AtomVMHD * vmhd, AtomsContext * context)
+{
+ guint8 flags[3] = { 0, 0, 1 };
+
+ atom_full_init (&vmhd->header, FOURCC_vmhd, 0, 0, 0, flags);
+ vmhd->graphics_mode = 0x0;
+ memset (vmhd->opcolor, 0, sizeof (guint16) * 3);
+
+ if (context->flavor == ATOMS_TREE_FLAVOR_MOV) {
+ vmhd->graphics_mode = 0x40;
+ vmhd->opcolor[0] = 32768;
+ vmhd->opcolor[1] = 32768;
+ vmhd->opcolor[2] = 32768;
+ }
+}
+
+static AtomVMHD *
+atom_vmhd_new (AtomsContext * context)
+{
+ AtomVMHD *vmhd = g_new0 (AtomVMHD, 1);
+
+ atom_vmhd_init (vmhd, context);
+ return vmhd;
+}
+
+static void
+atom_vmhd_free (AtomVMHD * vmhd)
+{
+ atom_full_clear (&vmhd->header);
+ g_free (vmhd);
+}
+
+static void
+atom_smhd_init (AtomSMHD * smhd)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&smhd->header, FOURCC_smhd, 0, 0, 0, flags);
+ smhd->balance = 0;
+ smhd->reserved = 0;
+}
+
+static AtomSMHD *
+atom_smhd_new (void)
+{
+ AtomSMHD *smhd = g_new0 (AtomSMHD, 1);
+
+ atom_smhd_init (smhd);
+ return smhd;
+}
+
+static void
+atom_smhd_free (AtomSMHD * smhd)
+{
+ atom_full_clear (&smhd->header);
+ g_free (smhd);
+}
+
+static void
+atom_hmhd_free (AtomHMHD * hmhd)
+{
+ atom_full_clear (&hmhd->header);
+ g_free (hmhd);
+}
+
+static void
+atom_hdlr_init (AtomHDLR * hdlr, AtomsContext * context)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&hdlr->header, FOURCC_hdlr, 0, 0, 0, flags);
+
+ hdlr->component_type = 0;
+ hdlr->handler_type = 0;
+ hdlr->manufacturer = 0;
+ hdlr->flags = 0;
+ hdlr->flags_mask = 0;
+ hdlr->name = g_strdup ("");
+
+ /* Store the flavor to know how to serialize the 'name' string */
+ hdlr->flavor = context->flavor;
+}
+
+static AtomHDLR *
+atom_hdlr_new (AtomsContext * context)
+{
+ AtomHDLR *hdlr = g_new0 (AtomHDLR, 1);
+
+ atom_hdlr_init (hdlr, context);
+ return hdlr;
+}
+
+static void
+atom_hdlr_clear (AtomHDLR * hdlr)
+{
+ atom_full_clear (&hdlr->header);
+ if (hdlr->name) {
+ g_free (hdlr->name);
+ hdlr->name = NULL;
+ }
+}
+
+static void
+atom_hdlr_free (AtomHDLR * hdlr)
+{
+ atom_hdlr_clear (hdlr);
+ g_free (hdlr);
+}
+
+static void
+atom_url_init (AtomURL * url)
+{
+ guint8 flags[3] = { 0, 0, 1 };
+
+ atom_full_init (&url->header, FOURCC_url_, 0, 0, 0, flags);
+ url->location = NULL;
+}
+
+static void
+atom_url_free (AtomURL * url)
+{
+ atom_full_clear (&url->header);
+ if (url->location) {
+ g_free (url->location);
+ url->location = NULL;
+ }
+ g_free (url);
+}
+
+static AtomURL *
+atom_url_new (void)
+{
+ AtomURL *url = g_new0 (AtomURL, 1);
+
+ atom_url_init (url);
+ return url;
+}
+
+static AtomFull *
+atom_alis_new (void)
+{
+ guint8 flags[3] = { 0, 0, 1 };
+ AtomFull *alis = g_new0 (AtomFull, 1);
+
+ atom_full_init (alis, FOURCC_alis, 0, 0, 0, flags);
+ return alis;
+}
+
+static void
+atom_dref_init (AtomDREF * dref, AtomsContext * context)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&dref->header, FOURCC_dref, 0, 0, 0, flags);
+
+ /* in either case, alis or url init arranges to set self-contained flag */
+ if (context->flavor == ATOMS_TREE_FLAVOR_MOV) {
+ /* alis dref for qt */
+ AtomFull *alis = atom_alis_new ();
+ dref->entries = g_list_append (dref->entries, alis);
+ } else {
+ /* url for iso spec, as 'alis' not specified there */
+ AtomURL *url = atom_url_new ();
+ dref->entries = g_list_append (dref->entries, url);
+ }
+}
+
+static void
+atom_dref_clear (AtomDREF * dref)
+{
+ GList *walker;
+
+ atom_full_clear (&dref->header);
+ walker = dref->entries;
+ while (walker) {
+ GList *aux = walker;
+ Atom *atom = (Atom *) aux->data;
+
+ walker = g_list_next (walker);
+ dref->entries = g_list_remove_link (dref->entries, aux);
+ switch (atom->type) {
+ case FOURCC_alis:
+ atom_full_free ((AtomFull *) atom);
+ break;
+ case FOURCC_url_:
+ atom_url_free ((AtomURL *) atom);
+ break;
+ default:
+ /* we do nothing, better leak than crash */
+ break;
+ }
+ g_list_free (aux);
+ }
+}
+
+static void
+atom_dinf_init (AtomDINF * dinf, AtomsContext * context)
+{
+ atom_header_set (&dinf->header, FOURCC_dinf, 0, 0);
+ atom_dref_init (&dinf->dref, context);
+}
+
+static void
+atom_dinf_clear (AtomDINF * dinf)
+{
+ atom_clear (&dinf->header);
+ atom_dref_clear (&dinf->dref);
+}
+
+static void
+atom_minf_init (AtomMINF * minf, AtomsContext * context)
+{
+ atom_header_set (&minf->header, FOURCC_minf, 0, 0);
+
+ minf->vmhd = NULL;
+ minf->smhd = NULL;
+ minf->hmhd = NULL;
+ minf->gmhd = NULL;
+
+ if (context->flavor == ATOMS_TREE_FLAVOR_MOV) {
+ minf->hdlr = atom_hdlr_new (context);
+ minf->hdlr->component_type = FOURCC_dhlr;
+ minf->hdlr->handler_type = FOURCC_alis;
+ } else {
+ minf->hdlr = NULL;
+ }
+ atom_dinf_init (&minf->dinf, context);
+ atom_stbl_init (&minf->stbl);
+}
+
+static void
+atom_minf_clear_handlers (AtomMINF * minf)
+{
+ if (minf->vmhd) {
+ atom_vmhd_free (minf->vmhd);
+ minf->vmhd = NULL;
+ }
+ if (minf->smhd) {
+ atom_smhd_free (minf->smhd);
+ minf->smhd = NULL;
+ }
+ if (minf->hmhd) {
+ atom_hmhd_free (minf->hmhd);
+ minf->hmhd = NULL;
+ }
+ if (minf->gmhd) {
+ atom_gmhd_free (minf->gmhd);
+ minf->gmhd = NULL;
+ }
+ if (minf->nmhd) {
+ atom_nmhd_free (minf->nmhd);
+ minf->nmhd = NULL;
+ }
+}
+
+static void
+atom_minf_clear (AtomMINF * minf)
+{
+ atom_clear (&minf->header);
+ atom_minf_clear_handlers (minf);
+ if (minf->hdlr) {
+ atom_hdlr_free (minf->hdlr);
+ }
+ atom_dinf_clear (&minf->dinf);
+ atom_stbl_clear (&minf->stbl);
+}
+
+static void
+atom_mdhd_init (AtomMDHD * mdhd)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&mdhd->header, FOURCC_mdhd, 0, 0, 0, flags);
+ common_time_info_init (&mdhd->time_info);
+ /* tempting as it may be to simply 0-initialize,
+ * that will have the demuxer (correctly) come up with 'eng' as language
+ * so explicitly specify undefined instead */
+ mdhd->language_code = language_code ("und");
+ mdhd->quality = 0;
+}
+
+static void
+atom_mdhd_clear (AtomMDHD * mdhd)
+{
+ atom_full_clear (&mdhd->header);
+}
+
+static void
+atom_mdia_init (AtomMDIA * mdia, AtomsContext * context)
+{
+ atom_header_set (&mdia->header, FOURCC_mdia, 0, 0);
+
+ atom_mdhd_init (&mdia->mdhd);
+ atom_hdlr_init (&mdia->hdlr, context);
+ atom_minf_init (&mdia->minf, context);
+}
+
+static void
+atom_mdia_clear (AtomMDIA * mdia)
+{
+ atom_clear (&mdia->header);
+ atom_mdhd_clear (&mdia->mdhd);
+ atom_hdlr_clear (&mdia->hdlr);
+ atom_minf_clear (&mdia->minf);
+}
+
+static void
+atom_tkhd_init (AtomTKHD * tkhd, AtomsContext * context)
+{
+ /*
+ * flags info
+ * 1 -> track enabled
+ * 2 -> track in movie
+ * 4 -> track in preview
+ */
+ guint8 flags[3] = { 0, 0, 7 };
+
+ atom_full_init (&tkhd->header, FOURCC_tkhd, 0, 0, 0, flags);
+
+ tkhd->creation_time = tkhd->modification_time = atoms_get_current_qt_time ();
+ tkhd->duration = 0;
+ tkhd->track_ID = 0;
+ tkhd->reserved = 0;
+
+ tkhd->reserved2[0] = tkhd->reserved2[1] = 0;
+ tkhd->layer = 0;
+ tkhd->alternate_group = 0;
+ tkhd->volume = 0;
+ tkhd->reserved3 = 0;
+ memset (tkhd->matrix, 0, sizeof (guint32) * 9);
+ tkhd->matrix[0] = 1 << 16;
+ tkhd->matrix[4] = 1 << 16;
+ tkhd->matrix[8] = 16384 << 16;
+ tkhd->width = 0;
+ tkhd->height = 0;
+}
+
+static void
+atom_tkhd_clear (AtomTKHD * tkhd)
+{
+ atom_full_clear (&tkhd->header);
+}
+
+static void
+atom_ilst_init (AtomILST * ilst)
+{
+ atom_header_set (&ilst->header, FOURCC_ilst, 0, 0);
+ ilst->entries = NULL;
+}
+
+static AtomILST *
+atom_ilst_new (void)
+{
+ AtomILST *ilst = g_new0 (AtomILST, 1);
+
+ atom_ilst_init (ilst);
+ return ilst;
+}
+
+static void
+atom_ilst_free (AtomILST * ilst)
+{
+ if (ilst->entries)
+ atom_info_list_free (ilst->entries);
+ atom_clear (&ilst->header);
+ g_free (ilst);
+}
+
+static void
+atom_meta_init (AtomMETA * meta, AtomsContext * context)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&meta->header, FOURCC_meta, 0, 0, 0, flags);
+ atom_hdlr_init (&meta->hdlr, context);
+ /* FIXME (ISOM says this is always 0) */
+ meta->hdlr.component_type = FOURCC_mhlr;
+ meta->hdlr.handler_type = FOURCC_mdir;
+ meta->ilst = NULL;
+}
+
+static AtomMETA *
+atom_meta_new (AtomsContext * context)
+{
+ AtomMETA *meta = g_new0 (AtomMETA, 1);
+
+ atom_meta_init (meta, context);
+ return meta;
+}
+
+static void
+atom_meta_free (AtomMETA * meta)
+{
+ atom_full_clear (&meta->header);
+ atom_hdlr_clear (&meta->hdlr);
+ if (meta->ilst)
+ atom_ilst_free (meta->ilst);
+ meta->ilst = NULL;
+ g_free (meta);
+}
+
+static void
+atom_udta_init_metatags (AtomUDTA * udta, AtomsContext * context)
+{
+ if (context->flavor != ATOMS_TREE_FLAVOR_3GP) {
+ if (!udta->meta) {
+ udta->meta = atom_meta_new (context);
+ }
+ if (!udta->meta->ilst) {
+ udta->meta->ilst = atom_ilst_new ();
+ }
+ }
+}
+
+static void
+atom_udta_init (AtomUDTA * udta, AtomsContext * context)
+{
+ atom_header_set (&udta->header, FOURCC_udta, 0, 0);
+ udta->meta = NULL;
+ udta->context = context;
+
+ atom_udta_init_metatags (udta, context);
+}
+
+static void
+atom_udta_clear (AtomUDTA * udta)
+{
+ atom_clear (&udta->header);
+ if (udta->meta)
+ atom_meta_free (udta->meta);
+ udta->meta = NULL;
+ if (udta->entries)
+ atom_info_list_free (udta->entries);
+}
+
+static void
+atom_tref_init (AtomTREF * tref, guint32 reftype)
+{
+ atom_header_set (&tref->header, FOURCC_tref, 0, 0);
+ tref->reftype = reftype;
+ atom_array_init (&tref->entries, 128);
+}
+
+static void
+atom_tref_clear (AtomTREF * tref)
+{
+ atom_clear (&tref->header);
+ tref->reftype = 0;
+ atom_array_clear (&tref->entries);
+}
+
+AtomTREF *
+atom_tref_new (guint32 reftype)
+{
+ AtomTREF *tref;
+
+ tref = g_new0 (AtomTREF, 1);
+ atom_tref_init (tref, reftype);
+
+ return tref;
+}
+
+static void
+atom_tref_free (AtomTREF * tref)
+{
+ atom_tref_clear (tref);
+ g_free (tref);
+}
+
+/* Clear added tags, but keep the context/flavor the same */
+void
+atom_udta_clear_tags (AtomUDTA * udta)
+{
+ if (udta->entries) {
+ atom_info_list_free (udta->entries);
+ udta->entries = NULL;
+ }
+ if (udta->meta && udta->meta->ilst->entries) {
+ atom_info_list_free (udta->meta->ilst->entries);
+ udta->meta->ilst->entries = NULL;
+ }
+}
+
+static void
+atom_tag_data_init (AtomTagData * data)
+{
+ guint8 flags[] = { 0, 0, 0 };
+
+ atom_full_init (&data->header, FOURCC_data, 0, 0, 0, flags);
+}
+
+static void
+atom_tag_data_clear (AtomTagData * data)
+{
+ atom_full_clear (&data->header);
+ g_free (data->data);
+ data->datalen = 0;
+}
+
+/*
+ * Fourcc is the tag fourcc
+ * flags will be truncated to 24bits
+ */
+static AtomTag *
+atom_tag_new (guint32 fourcc, guint32 flags_as_uint)
+{
+ AtomTag *tag = g_new0 (AtomTag, 1);
+
+ tag->header.type = fourcc;
+ atom_tag_data_init (&tag->data);
+ atom_full_set_flags_as_uint (&tag->data.header, flags_as_uint);
+ return tag;
+}
+
+static void
+atom_tag_free (AtomTag * tag)
+{
+ atom_clear (&tag->header);
+ atom_tag_data_clear (&tag->data);
+ g_free (tag);
+}
+
+static void
+atom_mvhd_init (AtomMVHD * mvhd)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&(mvhd->header), FOURCC_mvhd, sizeof (AtomMVHD), 0, 0, flags);
+
+ common_time_info_init (&mvhd->time_info);
+
+ mvhd->prefered_rate = 1 << 16;
+ mvhd->volume = 1 << 8;
+ mvhd->reserved3 = 0;
+ memset (mvhd->reserved4, 0, sizeof (guint32[2]));
+
+ memset (mvhd->matrix, 0, sizeof (guint32[9]));
+ mvhd->matrix[0] = 1 << 16;
+ mvhd->matrix[4] = 1 << 16;
+ mvhd->matrix[8] = 16384 << 16;
+
+ mvhd->preview_time = 0;
+ mvhd->preview_duration = 0;
+ mvhd->poster_time = 0;
+ mvhd->selection_time = 0;
+ mvhd->selection_duration = 0;
+ mvhd->current_time = 0;
+
+ mvhd->next_track_id = 1;
+}
+
+static void
+atom_mvhd_clear (AtomMVHD * mvhd)
+{
+ atom_full_clear (&mvhd->header);
+}
+
+static void
+atom_mehd_init (AtomMEHD * mehd)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&mehd->header, FOURCC_mehd, 0, 0, 1, flags);
+ mehd->fragment_duration = 0;
+}
+
+static void
+atom_mvex_init (AtomMVEX * mvex)
+{
+ atom_header_set (&mvex->header, FOURCC_mvex, 0, 0);
+ atom_mehd_init (&mvex->mehd);
+ mvex->trexs = NULL;
+}
+
+static void
+atom_trak_init (AtomTRAK * trak, AtomsContext * context)
+{
+ atom_header_set (&trak->header, FOURCC_trak, 0, 0);
+
+ atom_tkhd_init (&trak->tkhd, context);
+ trak->context = context;
+ atom_udta_init (&trak->udta, context);
+ trak->edts = NULL;
+ atom_mdia_init (&trak->mdia, context);
+ trak->tref = NULL;
+}
+
+AtomTRAK *
+atom_trak_new (AtomsContext * context)
+{
+ AtomTRAK *trak = g_new0 (AtomTRAK, 1);
+
+ atom_trak_init (trak, context);
+ return trak;
+}
+
+static void
+atom_trak_clear (AtomTRAK * trak)
+{
+ atom_clear (&trak->header);
+ atom_tkhd_clear (&trak->tkhd);
+ if (trak->edts)
+ atom_edts_free (trak->edts);
+ atom_udta_clear (&trak->udta);
+ atom_mdia_clear (&trak->mdia);
+ if (trak->tref)
+ atom_tref_free (trak->tref);
+}
+
+static void
+atom_trak_free (AtomTRAK * trak)
+{
+ atom_trak_clear (trak);
+ g_free (trak);
+}
+
+
+static void
+atom_moov_init (AtomMOOV * moov, AtomsContext * context)
+{
+ atom_header_set (&(moov->header), FOURCC_moov, 0, 0);
+ atom_mvhd_init (&(moov->mvhd));
+ atom_mvex_init (&(moov->mvex));
+ atom_udta_init (&moov->udta, context);
+ moov->traks = NULL;
+ moov->context = *context;
+}
+
+AtomMOOV *
+atom_moov_new (AtomsContext * context)
+{
+ AtomMOOV *moov = g_new0 (AtomMOOV, 1);
+
+ atom_moov_init (moov, context);
+ return moov;
+}
+
+static void
+atom_trex_free (AtomTREX * trex)
+{
+ atom_full_clear (&trex->header);
+ g_free (trex);
+}
+
+static void
+atom_mvex_clear (AtomMVEX * mvex)
+{
+ GList *walker;
+
+ atom_clear (&mvex->header);
+ walker = mvex->trexs;
+ while (walker) {
+ atom_trex_free ((AtomTREX *) walker->data);
+ walker = g_list_next (walker);
+ }
+ g_list_free (mvex->trexs);
+ mvex->trexs = NULL;
+}
+
+void
+atom_moov_free (AtomMOOV * moov)
+{
+ GList *walker;
+
+ atom_clear (&moov->header);
+ atom_mvhd_clear (&moov->mvhd);
+
+ walker = moov->traks;
+ while (walker) {
+ atom_trak_free ((AtomTRAK *) walker->data);
+ walker = g_list_next (walker);
+ }
+ g_list_free (moov->traks);
+ moov->traks = NULL;
+
+ atom_udta_clear (&moov->udta);
+ atom_mvex_clear (&moov->mvex);
+
+ g_free (moov);
+}
+
+/* -- end of init / free -- */
+
+/* -- copy data functions -- */
+
+static guint8
+atom_full_get_version (AtomFull * full)
+{
+ return full->version;
+}
+
+static guint64
+common_time_info_copy_data (TimeInfo * ti, gboolean trunc_to_32,
+ guint8 ** buffer, guint64 * size, guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (trunc_to_32) {
+ prop_copy_uint32 ((guint32) ti->creation_time, buffer, size, offset);
+ prop_copy_uint32 ((guint32) ti->modification_time, buffer, size, offset);
+ prop_copy_uint32 (ti->timescale, buffer, size, offset);
+ prop_copy_uint32 ((guint32) ti->duration, buffer, size, offset);
+ } else {
+ prop_copy_uint64 (ti->creation_time, buffer, size, offset);
+ prop_copy_uint64 (ti->modification_time, buffer, size, offset);
+ prop_copy_uint32 (ti->timescale, buffer, size, offset);
+ prop_copy_uint64 (ti->duration, buffer, size, offset);
+ }
+ return *offset - original_offset;
+}
+
+static void
+atom_write_size (guint8 ** buffer, guint64 * size, guint64 * offset,
+ guint64 atom_pos)
+{
+ /* this only works for non-extended atom size, which is OK
+ * (though it could be made to do mem_move, etc and write extended size) */
+ prop_copy_uint32 (*offset - atom_pos, buffer, size, &atom_pos);
+}
+
+static guint64
+atom_copy_empty (Atom * atom, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ prop_copy_uint32 (0, buffer, size, offset);
+
+ return *offset - original_offset;
+}
+
+guint64
+atom_copy_data (Atom * atom, guint8 ** buffer, guint64 * size, guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ /* copies type and size */
+ prop_copy_uint32 (atom->size, buffer, size, offset);
+ prop_copy_fourcc (atom->type, buffer, size, offset);
+
+ /* extended size needed */
+ if (atom->size == 1) {
+ /* really should not happen other than with mdat atom;
+ * would be a problem for size (re)write code, not to mention memory */
+ g_return_val_if_fail (atom->type == FOURCC_mdat, 0);
+ prop_copy_uint64 (atom->extended_size, buffer, size, offset);
+ }
+
+ return *offset - original_offset;
+}
+
+static guint64
+atom_full_copy_data (AtomFull * atom, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_copy_data (&atom->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint8 (atom->version, buffer, size, offset);
+ prop_copy_uint8_array (atom->flags, 3, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_info_list_copy_data (GList * ai, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ while (ai) {
+ AtomInfo *info = (AtomInfo *) ai->data;
+
+ if (!info->copy_data_func (info->atom, buffer, size, offset)) {
+ return 0;
+ }
+ ai = g_list_next (ai);
+ }
+
+ return *offset - original_offset;
+}
+
+static guint64
+atom_data_copy_data (AtomData * data, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_copy_data (&data->header, buffer, size, offset)) {
+ return 0;
+ }
+ if (data->datalen)
+ prop_copy_uint8_array (data->data, data->datalen, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_uuid_copy_data (AtomUUID * uuid, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_copy_data (&uuid->header, buffer, size, offset)) {
+ return 0;
+ }
+ prop_copy_uint8_array (uuid->uuid, 16, buffer, size, offset);
+ if (uuid->datalen)
+ prop_copy_uint8_array (uuid->data, uuid->datalen, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+guint64
+atom_ftyp_copy_data (AtomFTYP * ftyp, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_copy_data (&ftyp->header, buffer, size, offset)) {
+ return 0;
+ }
+ prop_copy_fourcc (ftyp->major_brand, buffer, size, offset);
+ prop_copy_uint32 (ftyp->version, buffer, size, offset);
+
+ prop_copy_fourcc_array (ftyp->compatible_brands, ftyp->compatible_brands_size,
+ buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+guint64
+atom_mvhd_copy_data (AtomMVHD * atom, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint8 version;
+ guint64 original_offset = *offset;
+
+ if (!atom_full_copy_data (&(atom->header), buffer, size, offset)) {
+ return 0;
+ }
+
+ version = atom_full_get_version (&(atom->header));
+ if (version == 0) {
+ common_time_info_copy_data (&atom->time_info, TRUE, buffer, size, offset);
+ } else if (version == 1) {
+ common_time_info_copy_data (&atom->time_info, FALSE, buffer, size, offset);
+ } else {
+ *offset = original_offset;
+ return 0;
+ }
+
+ prop_copy_uint32 (atom->prefered_rate, buffer, size, offset);
+ prop_copy_uint16 (atom->volume, buffer, size, offset);
+ prop_copy_uint16 (atom->reserved3, buffer, size, offset);
+ prop_copy_uint32_array (atom->reserved4, 2, buffer, size, offset);
+ prop_copy_uint32_array (atom->matrix, 9, buffer, size, offset);
+ prop_copy_uint32 (atom->preview_time, buffer, size, offset);
+ prop_copy_uint32 (atom->preview_duration, buffer, size, offset);
+ prop_copy_uint32 (atom->poster_time, buffer, size, offset);
+ prop_copy_uint32 (atom->selection_time, buffer, size, offset);
+ prop_copy_uint32 (atom->selection_duration, buffer, size, offset);
+ prop_copy_uint32 (atom->current_time, buffer, size, offset);
+
+ prop_copy_uint32 (atom->next_track_id, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_tkhd_copy_data (AtomTKHD * tkhd, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_full_copy_data (&tkhd->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ if (atom_full_get_version (&tkhd->header) == 0) {
+ prop_copy_uint32 ((guint32) tkhd->creation_time, buffer, size, offset);
+ prop_copy_uint32 ((guint32) tkhd->modification_time, buffer, size, offset);
+ prop_copy_uint32 (tkhd->track_ID, buffer, size, offset);
+ prop_copy_uint32 (tkhd->reserved, buffer, size, offset);
+ prop_copy_uint32 ((guint32) tkhd->duration, buffer, size, offset);
+ } else {
+ prop_copy_uint64 (tkhd->creation_time, buffer, size, offset);
+ prop_copy_uint64 (tkhd->modification_time, buffer, size, offset);
+ prop_copy_uint32 (tkhd->track_ID, buffer, size, offset);
+ prop_copy_uint32 (tkhd->reserved, buffer, size, offset);
+ prop_copy_uint64 (tkhd->duration, buffer, size, offset);
+ }
+
+ prop_copy_uint32_array (tkhd->reserved2, 2, buffer, size, offset);
+ prop_copy_uint16 (tkhd->layer, buffer, size, offset);
+ prop_copy_uint16 (tkhd->alternate_group, buffer, size, offset);
+ prop_copy_uint16 (tkhd->volume, buffer, size, offset);
+ prop_copy_uint16 (tkhd->reserved3, buffer, size, offset);
+ prop_copy_uint32_array (tkhd->matrix, 9, buffer, size, offset);
+
+ prop_copy_uint32 (tkhd->width, buffer, size, offset);
+ prop_copy_uint32 (tkhd->height, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_hdlr_copy_data (AtomHDLR * hdlr, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_full_copy_data (&hdlr->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_fourcc (hdlr->component_type, buffer, size, offset);
+ prop_copy_fourcc (hdlr->handler_type, buffer, size, offset);
+ prop_copy_fourcc (hdlr->manufacturer, buffer, size, offset);
+ prop_copy_uint32 (hdlr->flags, buffer, size, offset);
+ prop_copy_uint32 (hdlr->flags_mask, buffer, size, offset);
+
+ if (hdlr->flavor == ATOMS_TREE_FLAVOR_MOV) {
+ prop_copy_size_string ((guint8 *) hdlr->name, strlen (hdlr->name), buffer,
+ size, offset);
+ } else {
+ /* assume isomedia base is more generic and use null terminated */
+ prop_copy_null_terminated_string (hdlr->name, buffer, size, offset);
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_vmhd_copy_data (AtomVMHD * vmhd, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_full_copy_data (&vmhd->header, buffer, size, offset)) {
+ return 0;
+ }
+ prop_copy_uint16 (vmhd->graphics_mode, buffer, size, offset);
+ prop_copy_uint16_array (vmhd->opcolor, 3, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return original_offset - *offset;
+}
+
+static guint64
+atom_smhd_copy_data (AtomSMHD * smhd, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_full_copy_data (&smhd->header, buffer, size, offset)) {
+ return 0;
+ }
+ prop_copy_uint16 (smhd->balance, buffer, size, offset);
+ prop_copy_uint16 (smhd->reserved, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return original_offset - *offset;
+}
+
+static guint64
+atom_hmhd_copy_data (AtomHMHD * hmhd, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_full_copy_data (&hmhd->header, buffer, size, offset)) {
+ return 0;
+ }
+ prop_copy_uint16 (hmhd->max_pdu_size, buffer, size, offset);
+ prop_copy_uint16 (hmhd->avg_pdu_size, buffer, size, offset);
+ prop_copy_uint32 (hmhd->max_bitrate, buffer, size, offset);
+ prop_copy_uint32 (hmhd->avg_bitrate, buffer, size, offset);
+ prop_copy_uint32 (hmhd->sliding_avg_bitrate, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return original_offset - *offset;
+}
+
+static guint64
+atom_tcmi_copy_data (AtomTCMI * tcmi, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_full_copy_data (&tcmi->header, buffer, size, offset)) {
+ return 0;
+ }
+ prop_copy_uint16 (tcmi->text_font, buffer, size, offset);
+ prop_copy_uint16 (tcmi->text_face, buffer, size, offset);
+ prop_copy_uint16 (tcmi->text_size, buffer, size, offset);
+ prop_copy_uint16 (tcmi->text_color[0], buffer, size, offset);
+ prop_copy_uint16 (tcmi->text_color[1], buffer, size, offset);
+ prop_copy_uint16 (tcmi->text_color[2], buffer, size, offset);
+ prop_copy_uint16 (tcmi->bg_color[0], buffer, size, offset);
+ prop_copy_uint16 (tcmi->bg_color[1], buffer, size, offset);
+ prop_copy_uint16 (tcmi->bg_color[2], buffer, size, offset);
+ /* reserved */
+ prop_copy_uint16 (0, buffer, size, offset);
+ prop_copy_size_string ((guint8 *) tcmi->font_name, strlen (tcmi->font_name),
+ buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return original_offset - *offset;
+}
+
+static guint64
+atom_tmcd_copy_data (AtomTMCD * tmcd, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_copy_data (&tmcd->header, buffer, size, offset)) {
+ return 0;
+ }
+ if (!atom_tcmi_copy_data (&tmcd->tcmi, buffer, size, offset)) {
+ return 0;
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return original_offset - *offset;
+}
+
+static guint64
+atom_gmin_copy_data (AtomGMIN * gmin, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_full_copy_data (&gmin->header, buffer, size, offset)) {
+ return 0;
+ }
+ prop_copy_uint16 (gmin->graphics_mode, buffer, size, offset);
+ prop_copy_uint16 (gmin->opcolor[0], buffer, size, offset);
+ prop_copy_uint16 (gmin->opcolor[1], buffer, size, offset);
+ prop_copy_uint16 (gmin->opcolor[2], buffer, size, offset);
+ prop_copy_uint8 (gmin->balance, buffer, size, offset);
+ /* reserved */
+ prop_copy_uint8 (0, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return original_offset - *offset;
+}
+
+static guint64
+atom_gmhd_copy_data (AtomGMHD * gmhd, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_copy_data (&gmhd->header, buffer, size, offset)) {
+ return 0;
+ }
+ if (!atom_gmin_copy_data (&gmhd->gmin, buffer, size, offset)) {
+ return 0;
+ }
+ if (gmhd->tmcd && !atom_tmcd_copy_data (gmhd->tmcd, buffer, size, offset)) {
+ return 0;
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return original_offset - *offset;
+}
+
+static guint64
+atom_nmhd_copy_data (AtomNMHD * nmhd, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_copy_data (&nmhd->header, buffer, size, offset)) {
+ return 0;
+ }
+ prop_copy_uint32 (nmhd->flags, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return original_offset - *offset;
+}
+
+static gboolean
+atom_url_same_file_flag (AtomURL * url)
+{
+ return (url->header.flags[2] & 0x1) == 1;
+}
+
+static guint64
+atom_url_copy_data (AtomURL * url, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_full_copy_data (&url->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ if (!atom_url_same_file_flag (url)) {
+ prop_copy_null_terminated_string (url->location, buffer, size, offset);
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return original_offset - *offset;
+}
+
+guint64
+atom_stts_copy_data (AtomSTTS * stts, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+ guint i;
+
+ if (!atom_full_copy_data (&stts->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint32 (atom_array_get_len (&stts->entries), buffer, size, offset);
+ /* minimize realloc */
+ prop_copy_ensure_buffer (buffer, size, offset,
+ 8 * atom_array_get_len (&stts->entries));
+ for (i = 0; i < atom_array_get_len (&stts->entries); i++) {
+ STTSEntry *entry = &atom_array_index (&stts->entries, i);
+
+ prop_copy_uint32 (entry->sample_count, buffer, size, offset);
+ prop_copy_int32 (entry->sample_delta, buffer, size, offset);
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_sample_entry_copy_data (SampleTableEntry * se, guint8 ** buffer,
+ guint64 * size, guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_copy_data (&se->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint8_array (se->reserved, 6, buffer, size, offset);
+ prop_copy_uint16 (se->data_reference_index, buffer, size, offset);
+
+ return *offset - original_offset;
+}
+
+static guint64
+atom_esds_copy_data (AtomESDS * esds, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_full_copy_data (&esds->header, buffer, size, offset)) {
+ return 0;
+ }
+ if (!desc_es_descriptor_copy_data (&esds->es, buffer, size, offset)) {
+ return 0;
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_frma_copy_data (AtomFRMA * frma, guint8 ** buffer,
+ guint64 * size, guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_copy_data (&(frma->header), buffer, size, offset))
+ return 0;
+
+ prop_copy_fourcc (frma->media_type, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_hint_sample_entry_copy_data (AtomHintSampleEntry * hse, guint8 ** buffer,
+ guint64 * size, guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_sample_entry_copy_data (&hse->se, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint32 (hse->size, buffer, size, offset);
+ prop_copy_uint8_array (hse->data, hse->size, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+sample_entry_mp4a_copy_data (SampleTableEntryMP4A * mp4a, guint8 ** buffer,
+ guint64 * size, guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_sample_entry_copy_data (&mp4a->se, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint16 (mp4a->version, buffer, size, offset);
+ prop_copy_uint16 (mp4a->revision_level, buffer, size, offset);
+ prop_copy_uint32 (mp4a->vendor, buffer, size, offset);
+ prop_copy_uint16 (mp4a->channels, buffer, size, offset);
+ prop_copy_uint16 (mp4a->sample_size, buffer, size, offset);
+ prop_copy_uint16 (mp4a->compression_id, buffer, size, offset);
+ prop_copy_uint16 (mp4a->packet_size, buffer, size, offset);
+ prop_copy_uint32 (mp4a->sample_rate, buffer, size, offset);
+
+ /* this should always be 0 for mp4 flavor */
+ if (mp4a->version == 1) {
+ prop_copy_uint32 (mp4a->samples_per_packet, buffer, size, offset);
+ prop_copy_uint32 (mp4a->bytes_per_packet, buffer, size, offset);
+ prop_copy_uint32 (mp4a->bytes_per_frame, buffer, size, offset);
+ prop_copy_uint32 (mp4a->bytes_per_sample, buffer, size, offset);
+ }
+
+ if (mp4a->extension_atoms) {
+ if (!atom_info_list_copy_data (mp4a->extension_atoms, buffer, size, offset))
+ return 0;
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+sample_entry_mp4v_copy_data (SampleTableEntryMP4V * mp4v, guint8 ** buffer,
+ guint64 * size, guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_sample_entry_copy_data (&mp4v->se, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint16 (mp4v->version, buffer, size, offset);
+ prop_copy_uint16 (mp4v->revision_level, buffer, size, offset);
+ prop_copy_fourcc (mp4v->vendor, buffer, size, offset);
+ prop_copy_uint32 (mp4v->temporal_quality, buffer, size, offset);
+ prop_copy_uint32 (mp4v->spatial_quality, buffer, size, offset);
+
+ prop_copy_uint16 (mp4v->width, buffer, size, offset);
+ prop_copy_uint16 (mp4v->height, buffer, size, offset);
+
+ prop_copy_uint32 (mp4v->horizontal_resolution, buffer, size, offset);
+ prop_copy_uint32 (mp4v->vertical_resolution, buffer, size, offset);
+ prop_copy_uint32 (mp4v->datasize, buffer, size, offset);
+
+ prop_copy_uint16 (mp4v->frame_count, buffer, size, offset);
+
+ prop_copy_fixed_size_string ((guint8 *) mp4v->compressor, 32, buffer, size,
+ offset);
+
+ prop_copy_uint16 (mp4v->depth, buffer, size, offset);
+ prop_copy_uint16 (mp4v->color_table_id, buffer, size, offset);
+
+ /* extra atoms */
+ if (mp4v->extension_atoms &&
+ !atom_info_list_copy_data (mp4v->extension_atoms, buffer, size, offset))
+ return 0;
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+sample_entry_tx3g_copy_data (SampleTableEntryTX3G * tx3g, guint8 ** buffer,
+ guint64 * size, guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_sample_entry_copy_data (&tx3g->se, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint32 (tx3g->display_flags, buffer, size, offset);
+
+ /* reserved */
+ prop_copy_uint8 (1, buffer, size, offset);
+ prop_copy_uint8 (-1, buffer, size, offset);
+ prop_copy_uint32 (0, buffer, size, offset);
+
+ prop_copy_uint64 (tx3g->default_text_box, buffer, size, offset);
+
+ /* reserved */
+ prop_copy_uint32 (0, buffer, size, offset);
+
+ prop_copy_uint16 (tx3g->font_id, buffer, size, offset);
+ prop_copy_uint8 (tx3g->font_face, buffer, size, offset);
+ prop_copy_uint8 (tx3g->font_size, buffer, size, offset);
+ prop_copy_uint32 (tx3g->foreground_color_rgba, buffer, size, offset);
+
+ /* it must have a fonttable atom */
+ {
+ Atom atom;
+
+ atom_header_set (&atom, FOURCC_ftab, 18, 0);
+ if (!atom_copy_data (&atom, buffer, size, offset))
+ return 0;
+ prop_copy_uint16 (1, buffer, size, offset); /* Count must be 1 */
+ prop_copy_uint16 (1, buffer, size, offset); /* Font id: 1 */
+ prop_copy_size_string ((guint8 *) "Serif", 5, buffer, size, offset);
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+sample_entry_tmcd_copy_data (SampleTableEntryTMCD * tmcd, guint8 ** buffer,
+ guint64 * size, guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_sample_entry_copy_data (&tmcd->se, buffer, size, offset)) {
+ return 0;
+ }
+
+ /* reserved */
+ prop_copy_uint32 (0, buffer, size, offset);
+
+ prop_copy_uint32 (tmcd->tc_flags, buffer, size, offset);
+ prop_copy_uint32 (tmcd->timescale, buffer, size, offset);
+ prop_copy_uint32 (tmcd->frame_duration, buffer, size, offset);
+ prop_copy_uint8 (tmcd->n_frames, buffer, size, offset);
+
+ /* reserved */
+ prop_copy_uint8 (0, buffer, size, offset);
+ {
+ Atom atom;
+ guint64 name_offset = *offset;
+
+ atom_header_set (&atom, FOURCC_name, 0, 0);
+ if (!atom_copy_data (&atom, buffer, size, offset))
+ return 0;
+ prop_copy_uint16 (strlen (tmcd->name.name), buffer, size, offset);
+ prop_copy_uint16 (tmcd->name.language_code, buffer, size, offset);
+ prop_copy_fixed_size_string ((guint8 *) tmcd->name.name,
+ strlen (tmcd->name.name), buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, name_offset);
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+sample_entry_generic_copy_data (SampleTableEntry * entry, guint8 ** buffer,
+ guint64 * size, guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_sample_entry_copy_data (entry, buffer, size, offset)) {
+ return 0;
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+guint64
+atom_stsz_copy_data (AtomSTSZ * stsz, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+ guint i;
+
+ if (!atom_full_copy_data (&stsz->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint32 (stsz->sample_size, buffer, size, offset);
+ prop_copy_uint32 (stsz->table_size, buffer, size, offset);
+ if (stsz->sample_size == 0) {
+ /* minimize realloc */
+ prop_copy_ensure_buffer (buffer, size, offset, 4 * stsz->table_size);
+ /* entry count must match sample count */
+ g_assert (atom_array_get_len (&stsz->entries) == stsz->table_size);
+ for (i = 0; i < atom_array_get_len (&stsz->entries); i++) {
+ prop_copy_uint32 (atom_array_index (&stsz->entries, i), buffer, size,
+ offset);
+ }
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+guint64
+atom_stsc_copy_data (AtomSTSC * stsc, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+ guint i, len;
+ gboolean last_entries_merged = FALSE;
+
+ if (!atom_full_copy_data (&stsc->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ /* Last two entries might be the same size here as we only merge once the
+ * next chunk is started */
+ if ((len = atom_array_get_len (&stsc->entries)) > 1) {
+ STSCEntry *prev_entry = &atom_array_index (&stsc->entries, len - 2);
+ STSCEntry *current_entry = &atom_array_index (&stsc->entries, len - 1);
+ if (prev_entry->samples_per_chunk == current_entry->samples_per_chunk &&
+ prev_entry->sample_description_index ==
+ current_entry->sample_description_index) {
+ stsc->entries.len--;
+ last_entries_merged = TRUE;
+ }
+ }
+
+ prop_copy_uint32 (atom_array_get_len (&stsc->entries), buffer, size, offset);
+ /* minimize realloc */
+ prop_copy_ensure_buffer (buffer, size, offset,
+ 12 * atom_array_get_len (&stsc->entries));
+
+ for (i = 0; i < atom_array_get_len (&stsc->entries); i++) {
+ STSCEntry *entry = &atom_array_index (&stsc->entries, i);
+
+ prop_copy_uint32 (entry->first_chunk, buffer, size, offset);
+ prop_copy_uint32 (entry->samples_per_chunk, buffer, size, offset);
+ prop_copy_uint32 (entry->sample_description_index, buffer, size, offset);
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+
+ /* Need to add the last entry again as in "robust" muxing mode we will most
+ * likely add new samples to the last chunk, thus making the
+ * samples_per_chunk in the last one different to the second to last one,
+ * and thus making it wrong to keep them merged
+ */
+ if (last_entries_merged)
+ stsc->entries.len++;
+
+ return *offset - original_offset;
+}
+
+guint64
+atom_ctts_copy_data (AtomCTTS * ctts, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+ guint i;
+
+ if (!atom_full_copy_data (&ctts->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint32 (atom_array_get_len (&ctts->entries), buffer, size, offset);
+ /* minimize realloc */
+ prop_copy_ensure_buffer (buffer, size, offset,
+ 8 * atom_array_get_len (&ctts->entries));
+ for (i = 0; i < atom_array_get_len (&ctts->entries); i++) {
+ CTTSEntry *entry = &atom_array_index (&ctts->entries, i);
+
+ prop_copy_uint32 (entry->samplecount, buffer, size, offset);
+ prop_copy_uint32 (entry->sampleoffset, buffer, size, offset);
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+guint64
+atom_svmi_copy_data (AtomSVMI * svmi, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_full_copy_data (&svmi->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint8 (svmi->stereoscopic_composition_type, buffer, size, offset);
+ prop_copy_uint8 (svmi->is_left_first ? 1 : 0, buffer, size, offset);
+ /* stereo-mono change count */
+ prop_copy_uint32 (0, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+guint64
+atom_stco64_copy_data (AtomSTCO64 * stco64, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+ guint i;
+
+ /* If any (mdat-relative) offset will by over 32-bits when converted to an
+ * absolute file offset then we need to write a 64-bit co64 atom, otherwise
+ * we can write a smaller stco 32-bit table */
+ gboolean write_stco64 =
+ (stco64->max_offset + stco64->chunk_offset) > G_MAXUINT32;
+
+ if (write_stco64)
+ stco64->header.header.type = FOURCC_co64;
+ else
+ stco64->header.header.type = FOURCC_stco;
+
+ if (!atom_full_copy_data (&stco64->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint32 (atom_array_get_len (&stco64->entries), buffer, size,
+ offset);
+
+ /* minimize realloc */
+ prop_copy_ensure_buffer (buffer, size, offset,
+ 8 * atom_array_get_len (&stco64->entries));
+ for (i = 0; i < atom_array_get_len (&stco64->entries); i++) {
+ guint64 value =
+ atom_array_index (&stco64->entries, i) + stco64->chunk_offset;
+
+ if (write_stco64) {
+ prop_copy_uint64 (value, buffer, size, offset);
+ } else {
+ prop_copy_uint32 ((guint32) value, buffer, size, offset);
+ }
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+guint64
+atom_stss_copy_data (AtomSTSS * stss, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+ guint i;
+
+ if (atom_array_get_len (&stss->entries) == 0) {
+ /* FIXME not needing this atom might be confused with error while copying */
+ return 0;
+ }
+
+ if (!atom_full_copy_data (&stss->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint32 (atom_array_get_len (&stss->entries), buffer, size, offset);
+ /* minimize realloc */
+ prop_copy_ensure_buffer (buffer, size, offset,
+ 4 * atom_array_get_len (&stss->entries));
+ for (i = 0; i < atom_array_get_len (&stss->entries); i++) {
+ prop_copy_uint32 (atom_array_index (&stss->entries, i), buffer, size,
+ offset);
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_stsd_copy_data (AtomSTSD * stsd, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+ GList *walker;
+
+ if (!atom_full_copy_data (&stsd->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint32 (stsd->n_entries, buffer, size, offset);
+
+ for (walker = g_list_last (stsd->entries); walker != NULL;
+ walker = g_list_previous (walker)) {
+ SampleTableEntry *se = (SampleTableEntry *) walker->data;
+
+ switch (((Atom *) walker->data)->type) {
+ case FOURCC_mp4a:
+ if (!sample_entry_mp4a_copy_data ((SampleTableEntryMP4A *) walker->data,
+ buffer, size, offset)) {
+ return 0;
+ }
+ break;
+ case FOURCC_mp4v:
+ if (!sample_entry_mp4v_copy_data ((SampleTableEntryMP4V *) walker->data,
+ buffer, size, offset)) {
+ return 0;
+ }
+ break;
+ default:
+ if (se->kind == VIDEO) {
+ if (!sample_entry_mp4v_copy_data ((SampleTableEntryMP4V *)
+ walker->data, buffer, size, offset)) {
+ return 0;
+ }
+ } else if (se->kind == AUDIO) {
+ if (!sample_entry_mp4a_copy_data ((SampleTableEntryMP4A *)
+ walker->data, buffer, size, offset)) {
+ return 0;
+ }
+ } else if (se->kind == SUBTITLE) {
+ if (!sample_entry_tx3g_copy_data ((SampleTableEntryTX3G *)
+ walker->data, buffer, size, offset)) {
+ return 0;
+ }
+ } else if (se->kind == TIMECODE) {
+ if (!sample_entry_tmcd_copy_data ((SampleTableEntryTMCD *)
+ walker->data, buffer, size, offset)) {
+ return 0;
+ }
+ } else if (se->kind == CLOSEDCAPTION) {
+ if (!sample_entry_generic_copy_data ((SampleTableEntry *)
+ walker->data, buffer, size, offset)) {
+ return 0;
+ }
+ } else {
+ if (!atom_hint_sample_entry_copy_data (
+ (AtomHintSampleEntry *) walker->data, buffer, size, offset)) {
+ return 0;
+ }
+ }
+ break;
+ }
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_stbl_copy_data (AtomSTBL * stbl, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_copy_data (&stbl->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ if (!atom_stsd_copy_data (&stbl->stsd, buffer, size, offset)) {
+ return 0;
+ }
+ if (!atom_stts_copy_data (&stbl->stts, buffer, size, offset)) {
+ return 0;
+ }
+ /* this atom is optional, so let's check if we need it
+ * (to avoid false error) */
+ if (atom_array_get_len (&stbl->stss.entries)) {
+ if (!atom_stss_copy_data (&stbl->stss, buffer, size, offset)) {
+ return 0;
+ }
+ }
+
+ if (!atom_stsc_copy_data (&stbl->stsc, buffer, size, offset)) {
+ return 0;
+ }
+ if (!atom_stsz_copy_data (&stbl->stsz, buffer, size, offset)) {
+ return 0;
+ }
+ if (stbl->ctts && stbl->ctts->do_pts) {
+ if (!atom_ctts_copy_data (stbl->ctts, buffer, size, offset)) {
+ return 0;
+ }
+ }
+ if (stbl->svmi) {
+ if (!atom_svmi_copy_data (stbl->svmi, buffer, size, offset)) {
+ return 0;
+ }
+ }
+ if (!atom_stco64_copy_data (&stbl->stco64, buffer, size, offset)) {
+ return 0;
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return original_offset - *offset;
+}
+
+
+static guint64
+atom_dref_copy_data (AtomDREF * dref, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+ GList *walker;
+
+ if (!atom_full_copy_data (&dref->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint32 (g_list_length (dref->entries), buffer, size, offset);
+
+ walker = dref->entries;
+ while (walker != NULL) {
+ Atom *atom = (Atom *) walker->data;
+
+ if (atom->type == FOURCC_url_) {
+ if (!atom_url_copy_data ((AtomURL *) atom, buffer, size, offset))
+ return 0;
+ } else if (atom->type == FOURCC_alis) {
+ if (!atom_full_copy_data ((AtomFull *) atom, buffer, size, offset))
+ return 0;
+ } else {
+ g_error ("Unsupported atom used inside dref atom");
+ }
+ walker = g_list_next (walker);
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_dinf_copy_data (AtomDINF * dinf, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_copy_data (&dinf->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ if (!atom_dref_copy_data (&dinf->dref, buffer, size, offset)) {
+ return 0;
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return original_offset - *offset;
+}
+
+static guint64
+atom_minf_copy_data (AtomMINF * minf, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_copy_data (&minf->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ if (minf->vmhd) {
+ if (!atom_vmhd_copy_data (minf->vmhd, buffer, size, offset)) {
+ return 0;
+ }
+ } else if (minf->smhd) {
+ if (!atom_smhd_copy_data (minf->smhd, buffer, size, offset)) {
+ return 0;
+ }
+ } else if (minf->hmhd) {
+ if (!atom_hmhd_copy_data (minf->hmhd, buffer, size, offset)) {
+ return 0;
+ }
+ } else if (minf->gmhd) {
+ if (!atom_gmhd_copy_data (minf->gmhd, buffer, size, offset)) {
+ return 0;
+ }
+ } else if (minf->nmhd) {
+ if (!atom_nmhd_copy_data (minf->nmhd, buffer, size, offset)) {
+ return 0;
+ }
+ }
+
+ if (minf->hdlr) {
+ if (!atom_hdlr_copy_data (minf->hdlr, buffer, size, offset)) {
+ return 0;
+ }
+ }
+
+ if (!atom_dinf_copy_data (&minf->dinf, buffer, size, offset)) {
+ return 0;
+ }
+ if (!atom_stbl_copy_data (&minf->stbl, buffer, size, offset)) {
+ return 0;
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_mdhd_copy_data (AtomMDHD * mdhd, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_full_copy_data (&mdhd->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ if (!common_time_info_copy_data (&mdhd->time_info,
+ atom_full_get_version (&mdhd->header) == 0, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint16 (mdhd->language_code, buffer, size, offset);
+ prop_copy_uint16 (mdhd->quality, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_mdia_copy_data (AtomMDIA * mdia, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_copy_data (&mdia->header, buffer, size, offset)) {
+ return 0;
+ }
+ if (!atom_mdhd_copy_data (&mdia->mdhd, buffer, size, offset)) {
+ return 0;
+ }
+ if (!atom_hdlr_copy_data (&mdia->hdlr, buffer, size, offset)) {
+ return 0;
+ }
+
+ if (!atom_minf_copy_data (&mdia->minf, buffer, size, offset)) {
+ return 0;
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_elst_copy_data (AtomELST * elst, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+ GSList *walker;
+
+ if (!atom_full_copy_data (&elst->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint32 (g_slist_length (elst->entries), buffer, size, offset);
+
+ for (walker = elst->entries; walker != NULL; walker = g_slist_next (walker)) {
+ EditListEntry *entry = (EditListEntry *) walker->data;
+ prop_copy_uint32 (entry->duration, buffer, size, offset);
+ prop_copy_uint32 (entry->media_time, buffer, size, offset);
+ prop_copy_uint32 (entry->media_rate, buffer, size, offset);
+ }
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_tref_copy_data (AtomTREF * tref, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+ guint i;
+
+ g_assert (atom_array_get_len (&tref->entries) > 0);
+
+ if (!atom_copy_data (&tref->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint32 (8 + 4 * atom_array_get_len (&tref->entries), buffer, size,
+ offset);
+ prop_copy_fourcc (tref->reftype, buffer, size, offset);
+ /* minimize realloc */
+ prop_copy_ensure_buffer (buffer, size, offset,
+ 4 * atom_array_get_len (&tref->entries));
+ for (i = 0; i < atom_array_get_len (&tref->entries); i++) {
+ prop_copy_uint32 (atom_array_index (&tref->entries, i), buffer, size,
+ offset);
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_edts_copy_data (AtomEDTS * edts, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_copy_data (&(edts->header), buffer, size, offset))
+ return 0;
+
+ if (!atom_elst_copy_data (&(edts->elst), buffer, size, offset))
+ return 0;
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_tag_data_copy_data (AtomTagData * data, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_full_copy_data (&data->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint32 (data->reserved, buffer, size, offset);
+ prop_copy_uint8_array (data->data, data->datalen, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_tag_copy_data (AtomTag * tag, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_copy_data (&tag->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ if (!atom_tag_data_copy_data (&tag->data, buffer, size, offset)) {
+ return 0;
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_ilst_copy_data (AtomILST * ilst, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_copy_data (&ilst->header, buffer, size, offset)) {
+ return 0;
+ }
+ /* extra atoms */
+ if (ilst->entries &&
+ !atom_info_list_copy_data (ilst->entries, buffer, size, offset))
+ return 0;
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_meta_copy_data (AtomMETA * meta, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_full_copy_data (&meta->header, buffer, size, offset)) {
+ return 0;
+ }
+ if (!atom_hdlr_copy_data (&meta->hdlr, buffer, size, offset)) {
+ return 0;
+ }
+ if (meta->ilst) {
+ if (!atom_ilst_copy_data (meta->ilst, buffer, size, offset)) {
+ return 0;
+ }
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_udta_copy_data (AtomUDTA * udta, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_copy_data (&udta->header, buffer, size, offset)) {
+ return 0;
+ }
+ if (udta->meta) {
+ if (!atom_meta_copy_data (udta->meta, buffer, size, offset)) {
+ return 0;
+ }
+ }
+ if (udta->entries) {
+ /* extra atoms */
+ if (!atom_info_list_copy_data (udta->entries, buffer, size, offset))
+ return 0;
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_mehd_copy_data (AtomMEHD * mehd, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_full_copy_data (&mehd->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint64 (mehd->fragment_duration, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_trex_copy_data (AtomTREX * trex, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_full_copy_data (&trex->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint32 (trex->track_ID, buffer, size, offset);
+ prop_copy_uint32 (trex->default_sample_description_index, buffer, size,
+ offset);
+ prop_copy_uint32 (trex->default_sample_duration, buffer, size, offset);
+ prop_copy_uint32 (trex->default_sample_size, buffer, size, offset);
+ prop_copy_uint32 (trex->default_sample_flags, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_mvex_copy_data (AtomMVEX * mvex, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+ GList *walker;
+
+ if (!atom_copy_data (&mvex->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ /* only write mehd if we have anything extra to add */
+ if (!atom_mehd_copy_data (&mvex->mehd, buffer, size, offset)) {
+ return 0;
+ }
+
+ walker = g_list_first (mvex->trexs);
+ while (walker != NULL) {
+ if (!atom_trex_copy_data ((AtomTREX *) walker->data, buffer, size, offset)) {
+ return 0;
+ }
+ walker = g_list_next (walker);
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+guint64
+atom_trak_copy_data (AtomTRAK * trak, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_copy_data (&trak->header, buffer, size, offset)) {
+ return 0;
+ }
+ if (!atom_tkhd_copy_data (&trak->tkhd, buffer, size, offset)) {
+ return 0;
+ }
+ if (trak->tapt) {
+ if (!trak->tapt->copy_data_func (trak->tapt->atom, buffer, size, offset)) {
+ return 0;
+ }
+ }
+ if (trak->edts) {
+ if (!atom_edts_copy_data (trak->edts, buffer, size, offset)) {
+ return 0;
+ }
+ }
+ if (trak->tref) {
+ /* Make sure we need this atom (there is a referenced track */
+ if (atom_array_get_len (&trak->tref->entries) > 0) {
+ if (!atom_tref_copy_data (trak->tref, buffer, size, offset)) {
+ return 0;
+ }
+ }
+ }
+
+ if (!atom_mdia_copy_data (&trak->mdia, buffer, size, offset)) {
+ return 0;
+ }
+
+ if (!atom_udta_copy_data (&trak->udta, buffer, size, offset)) {
+ return 0;
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+
+guint64
+atom_moov_copy_data (AtomMOOV * atom, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+ GList *walker;
+
+ if (!atom_copy_data (&(atom->header), buffer, size, offset))
+ return 0;
+
+ if (!atom_mvhd_copy_data (&(atom->mvhd), buffer, size, offset))
+ return 0;
+
+ walker = g_list_first (atom->traks);
+ while (walker != NULL) {
+ if (!atom_trak_copy_data ((AtomTRAK *) walker->data, buffer, size, offset)) {
+ return 0;
+ }
+ walker = g_list_next (walker);
+ }
+
+ if (!atom_udta_copy_data (&atom->udta, buffer, size, offset)) {
+ return 0;
+ }
+
+ if (atom->fragmented) {
+ if (!atom_mvex_copy_data (&atom->mvex, buffer, size, offset)) {
+ return 0;
+ }
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_wave_copy_data (AtomWAVE * wave, guint8 ** buffer,
+ guint64 * size, guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_copy_data (&(wave->header), buffer, size, offset))
+ return 0;
+
+ if (wave->extension_atoms) {
+ if (!atom_info_list_copy_data (wave->extension_atoms, buffer, size, offset))
+ return 0;
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+/* -- end of copy data functions -- */
+
+/* -- general functions, API and support functions */
+
+/* add samples to tables */
+
+void
+atom_stsc_add_new_entry (AtomSTSC * stsc, guint32 first_chunk, guint32 nsamples,
+ guint32 sample_description_index)
+{
+ gint len;
+
+ if ((len = atom_array_get_len (&stsc->entries)) > 1 &&
+ ((atom_array_index (&stsc->entries, len - 1)).samples_per_chunk ==
+ (atom_array_index (&stsc->entries, len - 2)).samples_per_chunk)) {
+ STSCEntry *nentry;
+
+ /* Merge last two entries as they have the same number of samples per chunk */
+ nentry = &atom_array_index (&stsc->entries, len - 1);
+ nentry->first_chunk = first_chunk;
+ nentry->samples_per_chunk = nsamples;
+ nentry->sample_description_index = sample_description_index;
+ } else {
+ STSCEntry nentry;
+
+ nentry.first_chunk = first_chunk;
+ nentry.samples_per_chunk = nsamples;
+ nentry.sample_description_index = sample_description_index;
+ atom_array_append (&stsc->entries, nentry, 128);
+ }
+}
+
+static void
+atom_stsc_update_entry (AtomSTSC * stsc, guint32 first_chunk, guint32 nsamples)
+{
+ gint len;
+
+ len = atom_array_get_len (&stsc->entries);
+ g_assert (len != 0);
+ g_assert (atom_array_index (&stsc->entries,
+ len - 1).first_chunk == first_chunk);
+
+ atom_array_index (&stsc->entries, len - 1).samples_per_chunk += nsamples;
+}
+
+static void
+atom_stts_add_entry (AtomSTTS * stts, guint32 sample_count, gint32 sample_delta)
+{
+ STTSEntry *entry = NULL;
+
+ if (G_LIKELY (atom_array_get_len (&stts->entries) != 0))
+ entry = &atom_array_index (&stts->entries,
+ atom_array_get_len (&stts->entries) - 1);
+
+ if (entry && entry->sample_delta == sample_delta) {
+ entry->sample_count += sample_count;
+ } else {
+ STTSEntry nentry;
+
+ nentry.sample_count = sample_count;
+ nentry.sample_delta = sample_delta;
+ atom_array_append (&stts->entries, nentry, 256);
+ }
+}
+
+static void
+atom_stsz_add_entry (AtomSTSZ * stsz, guint32 nsamples, guint32 size)
+{
+ guint32 i;
+
+ stsz->table_size += nsamples;
+ if (stsz->sample_size != 0) {
+ /* it is constant size, we don't need entries */
+ return;
+ }
+ for (i = 0; i < nsamples; i++) {
+ atom_array_append (&stsz->entries, size, 1024);
+ }
+}
+
+static guint32
+atom_stco64_get_entry_count (AtomSTCO64 * stco64)
+{
+ return atom_array_get_len (&stco64->entries);
+}
+
+/* returns TRUE if a new entry was added */
+static gboolean
+atom_stco64_add_entry (AtomSTCO64 * stco64, guint64 entry)
+{
+ guint32 len;
+
+ /* Only add a new entry if the chunk offset changed */
+ if ((len = atom_array_get_len (&stco64->entries)) &&
+ ((atom_array_index (&stco64->entries, len - 1)) == entry))
+ return FALSE;
+
+ atom_array_append (&stco64->entries, entry, 256);
+ if (entry > stco64->max_offset)
+ stco64->max_offset = entry;
+
+ return TRUE;
+}
+
+void
+atom_tref_add_entry (AtomTREF * tref, guint32 sample)
+{
+ atom_array_append (&tref->entries, sample, 512);
+}
+
+static void
+atom_stss_add_entry (AtomSTSS * stss, guint32 sample)
+{
+ atom_array_append (&stss->entries, sample, 512);
+}
+
+static void
+atom_stbl_add_stss_entry (AtomSTBL * stbl)
+{
+ guint32 sample_index = stbl->stsz.table_size;
+
+ atom_stss_add_entry (&stbl->stss, sample_index);
+}
+
+static void
+atom_ctts_add_entry (AtomCTTS * ctts, guint32 nsamples, guint32 offset)
+{
+ CTTSEntry *entry = NULL;
+
+ if (G_LIKELY (atom_array_get_len (&ctts->entries) != 0))
+ entry = &atom_array_index (&ctts->entries,
+ atom_array_get_len (&ctts->entries) - 1);
+
+ if (entry == NULL || entry->sampleoffset != offset) {
+ CTTSEntry nentry;
+
+ nentry.samplecount = nsamples;
+ nentry.sampleoffset = offset;
+ atom_array_append (&ctts->entries, nentry, 256);
+ if (offset != 0)
+ ctts->do_pts = TRUE;
+ } else {
+ entry->samplecount += nsamples;
+ }
+}
+
+static void
+atom_stbl_add_ctts_entry (AtomSTBL * stbl, guint32 nsamples, guint32 offset)
+{
+ if (stbl->ctts == NULL) {
+ stbl->ctts = atom_ctts_new ();
+ }
+ atom_ctts_add_entry (stbl->ctts, nsamples, offset);
+}
+
+void
+atom_stbl_add_samples (AtomSTBL * stbl, guint32 nsamples, guint32 delta,
+ guint32 size, guint64 chunk_offset, gboolean sync, gint64 pts_offset)
+{
+ atom_stts_add_entry (&stbl->stts, nsamples, delta);
+ atom_stsz_add_entry (&stbl->stsz, nsamples, size);
+ if (atom_stco64_add_entry (&stbl->stco64, chunk_offset)) {
+ atom_stsc_add_new_entry (&stbl->stsc,
+ atom_stco64_get_entry_count (&stbl->stco64), nsamples,
+ stbl->stsd.n_entries);
+ } else {
+ atom_stsc_update_entry (&stbl->stsc,
+ atom_stco64_get_entry_count (&stbl->stco64), nsamples);
+ }
+
+ if (sync)
+ atom_stbl_add_stss_entry (stbl);
+ /* always store to arrange for consistent content */
+ atom_stbl_add_ctts_entry (stbl, nsamples, pts_offset);
+}
+
+void
+atom_trak_add_samples (AtomTRAK * trak, guint32 nsamples, guint32 delta,
+ guint32 size, guint64 chunk_offset, gboolean sync, gint64 pts_offset)
+{
+ AtomSTBL *stbl = &trak->mdia.minf.stbl;
+ atom_stbl_add_samples (stbl, nsamples, delta, size, chunk_offset, sync,
+ pts_offset);
+}
+
+/* trak and moov molding */
+
+guint32
+atom_trak_get_timescale (AtomTRAK * trak)
+{
+ return trak->mdia.mdhd.time_info.timescale;
+}
+
+guint32
+atom_trak_get_id (AtomTRAK * trak)
+{
+ return trak->tkhd.track_ID;
+}
+
+static void
+atom_trak_set_id (AtomTRAK * trak, guint32 id)
+{
+ trak->tkhd.track_ID = id;
+}
+
+static void
+atom_moov_add_trex (AtomMOOV * moov, AtomTREX * trex)
+{
+ moov->mvex.trexs = g_list_append (moov->mvex.trexs, trex);
+}
+
+static AtomTREX *
+atom_trex_new (AtomTRAK * trak)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+ AtomTREX *trex = g_new0 (AtomTREX, 1);
+
+ atom_full_init (&trex->header, FOURCC_trex, 0, 0, 0, flags);
+
+ trex->track_ID = trak->tkhd.track_ID;
+ trex->default_sample_description_index = 1;
+ trex->default_sample_duration = 0;
+ trex->default_sample_size = 0;
+ trex->default_sample_flags = 0;
+
+ return trex;
+}
+
+void
+atom_moov_add_trak (AtomMOOV * moov, AtomTRAK * trak)
+{
+ atom_trak_set_id (trak, moov->mvhd.next_track_id++);
+ moov->traks = g_list_append (moov->traks, trak);
+ /* additional trak means also new trex */
+ atom_moov_add_trex (moov, atom_trex_new (trak));
+}
+
+guint
+atom_moov_get_trak_count (AtomMOOV * moov)
+{
+ return g_list_length (moov->traks);
+}
+
+static guint64
+atom_trak_get_duration (AtomTRAK * trak)
+{
+ return trak->tkhd.duration;
+}
+
+static guint64
+atom_stts_get_total_duration (AtomSTTS * stts)
+{
+ guint i;
+ guint64 sum = 0;
+
+ for (i = 0; i < atom_array_get_len (&stts->entries); i++) {
+ STTSEntry *entry = &atom_array_index (&stts->entries, i);
+
+ sum += (guint64) (entry->sample_count) * entry->sample_delta;
+ }
+ return sum;
+}
+
+static void
+atom_trak_update_duration (AtomTRAK * trak, guint64 moov_timescale)
+{
+ trak->mdia.mdhd.time_info.duration =
+ atom_stts_get_total_duration (&trak->mdia.minf.stbl.stts);
+ if (trak->mdia.mdhd.time_info.timescale != 0) {
+ trak->tkhd.duration =
+ gst_util_uint64_scale_round (trak->mdia.mdhd.time_info.duration,
+ moov_timescale, trak->mdia.mdhd.time_info.timescale);
+ } else {
+ trak->tkhd.duration = 0;
+ }
+}
+
+static void
+timecode_atom_trak_set_duration (AtomTRAK * trak, guint64 duration,
+ guint64 timescale)
+{
+ STTSEntry *entry;
+ GList *iter;
+
+ /* Sanity checks to ensure we have a timecode */
+ g_assert (trak->mdia.minf.gmhd != NULL);
+ g_assert (atom_array_get_len (&trak->mdia.minf.stbl.stts.entries) == 1);
+
+ for (iter = trak->mdia.minf.stbl.stsd.entries; iter;
+ iter = g_list_next (iter)) {
+ SampleTableEntry *entry = iter->data;
+ if (entry->kind == TIMECODE) {
+ SampleTableEntryTMCD *tmcd = (SampleTableEntryTMCD *) entry;
+
+ duration = duration * tmcd->timescale / timescale;
+ timescale = tmcd->timescale;
+ break;
+ }
+ }
+
+ trak->tkhd.duration = duration;
+ trak->mdia.mdhd.time_info.duration = duration;
+ trak->mdia.mdhd.time_info.timescale = timescale;
+
+ entry = &atom_array_index (&trak->mdia.minf.stbl.stts.entries, 0);
+ entry->sample_delta = duration;
+}
+
+static guint32
+atom_moov_get_timescale (AtomMOOV * moov)
+{
+ return moov->mvhd.time_info.timescale;
+}
+
+void
+atom_moov_update_timescale (AtomMOOV * moov, guint32 timescale)
+{
+ moov->mvhd.time_info.timescale = timescale;
+}
+
+void
+atom_moov_update_duration (AtomMOOV * moov)
+{
+ GList *traks = moov->traks;
+ guint64 dur, duration = 0;
+
+ while (traks) {
+ AtomTRAK *trak = (AtomTRAK *) traks->data;
+
+ /* Skip timecodes for now: they have a placeholder duration */
+ if (trak->mdia.minf.gmhd == NULL || trak->mdia.minf.gmhd->tmcd == NULL) {
+ atom_trak_update_duration (trak, atom_moov_get_timescale (moov));
+ dur = atom_trak_get_duration (trak);
+ if (dur > duration)
+ duration = dur;
+ }
+ traks = g_list_next (traks);
+ }
+ /* Now update the duration of the timecodes */
+ traks = moov->traks;
+ while (traks) {
+ AtomTRAK *trak = (AtomTRAK *) traks->data;
+
+ if (trak->mdia.minf.gmhd != NULL && trak->mdia.minf.gmhd->tmcd != NULL)
+ timecode_atom_trak_set_duration (trak, duration,
+ atom_moov_get_timescale (moov));
+ traks = g_list_next (traks);
+ }
+ moov->mvhd.time_info.duration = duration;
+ moov->mvex.mehd.fragment_duration = duration;
+}
+
+void
+atom_moov_set_fragmented (AtomMOOV * moov, gboolean fragmented)
+{
+ moov->fragmented = fragmented;
+}
+
+void
+atom_stco64_chunks_set_offset (AtomSTCO64 * stco64, guint32 offset)
+{
+ stco64->chunk_offset = offset;
+}
+
+void
+atom_moov_chunks_set_offset (AtomMOOV * moov, guint32 offset)
+{
+ GList *traks = moov->traks;
+
+ if (offset == moov->chunks_offset)
+ return; /* Nothing to do */
+
+ while (traks) {
+ AtomTRAK *trak = (AtomTRAK *) traks->data;
+
+ atom_stco64_chunks_set_offset (&trak->mdia.minf.stbl.stco64, offset);
+ traks = g_list_next (traks);
+ }
+
+ moov->chunks_offset = offset;
+}
+
+void
+atom_trak_update_bitrates (AtomTRAK * trak, guint32 avg_bitrate,
+ guint32 max_bitrate)
+{
+ AtomESDS *esds = NULL;
+ AtomData *btrt = NULL;
+ AtomWAVE *wave = NULL;
+ AtomSTSD *stsd;
+ GList *iter;
+ GList *extensioniter = NULL;
+
+ g_return_if_fail (trak != NULL);
+
+ if (avg_bitrate == 0 && max_bitrate == 0)
+ return;
+
+ stsd = &trak->mdia.minf.stbl.stsd;
+ for (iter = stsd->entries; iter; iter = g_list_next (iter)) {
+ SampleTableEntry *entry = iter->data;
+
+ switch (entry->kind) {
+ case AUDIO:{
+ SampleTableEntryMP4A *audioentry = (SampleTableEntryMP4A *) entry;
+ extensioniter = audioentry->extension_atoms;
+ break;
+ }
+ case VIDEO:{
+ SampleTableEntryMP4V *videoentry = (SampleTableEntryMP4V *) entry;
+ extensioniter = videoentry->extension_atoms;
+ break;
+ }
+ default:
+ break;
+ }
+ }
+
+ for (; extensioniter; extensioniter = g_list_next (extensioniter)) {
+ AtomInfo *atominfo = extensioniter->data;
+ if (atominfo->atom->type == FOURCC_esds) {
+ esds = (AtomESDS *) atominfo->atom;
+ } else if (atominfo->atom->type == FOURCC_btrt) {
+ btrt = (AtomData *) atominfo->atom;
+ } else if (atominfo->atom->type == FOURCC_wave) {
+ wave = (AtomWAVE *) atominfo->atom;
+ }
+ }
+
+ /* wave might have an esds internally */
+ if (wave) {
+ for (extensioniter = wave->extension_atoms; extensioniter;
+ extensioniter = g_list_next (extensioniter)) {
+ AtomInfo *atominfo = extensioniter->data;
+ if (atominfo->atom->type == FOURCC_esds) {
+ esds = (AtomESDS *) atominfo->atom;
+ break;
+ }
+ }
+ }
+
+ if (esds) {
+ if (avg_bitrate && esds->es.dec_conf_desc.avg_bitrate == 0)
+ esds->es.dec_conf_desc.avg_bitrate = avg_bitrate;
+ if (max_bitrate && esds->es.dec_conf_desc.max_bitrate == 0)
+ esds->es.dec_conf_desc.max_bitrate = max_bitrate;
+ }
+ if (btrt) {
+ /* type(4bytes) + size(4bytes) + buffersize(4bytes) +
+ * maxbitrate(bytes) + avgbitrate(bytes) */
+ if (max_bitrate && GST_READ_UINT32_BE (btrt->data + 4) == 0)
+ GST_WRITE_UINT32_BE (btrt->data + 4, max_bitrate);
+ if (avg_bitrate && GST_READ_UINT32_BE (btrt->data + 8) == 0)
+ GST_WRITE_UINT32_BE (btrt->data + 8, avg_bitrate);
+ }
+}
+
+void
+atom_trak_tx3g_update_dimension (AtomTRAK * trak, guint32 width, guint32 height)
+{
+ AtomSTSD *stsd;
+ GList *iter;
+ SampleTableEntryTX3G *tx3g = NULL;
+
+ stsd = &trak->mdia.minf.stbl.stsd;
+ for (iter = stsd->entries; iter && tx3g == NULL; iter = g_list_next (iter)) {
+ SampleTableEntry *entry = iter->data;
+
+ switch (entry->kind) {
+ case SUBTITLE:{
+ tx3g = (SampleTableEntryTX3G *) entry;
+ break;
+ }
+ default:
+ break;
+ }
+ }
+
+ /* Currently we never set the vertical placement flag, so we don't
+ * check for it to set the dimensions differently as the spec says.
+ * Always do it for the not set case */
+ if (tx3g) {
+ tx3g->font_size = 0.05 * height;
+
+ height = 0.15 * height;
+ trak->tkhd.width = width << 16;
+ trak->tkhd.height = height << 16;
+ tx3g->default_text_box = width | (height << 16);
+ }
+}
+
+/*
+ * Meta tags functions
+ */
+static void
+atom_tag_data_alloc_data (AtomTagData * data, guint size)
+{
+ g_free (data->data);
+ data->data = g_new0 (guint8, size);
+ data->datalen = size;
+}
+
+static void
+atom_udta_append_tag (AtomUDTA * udta, AtomInfo * tag)
+{
+ GList **entries;
+
+ if (udta->meta)
+ entries = &udta->meta->ilst->entries;
+ else
+ entries = &udta->entries;
+ *entries = g_list_append (*entries, tag);
+}
+
+void
+atom_udta_add_tag (AtomUDTA * udta, guint32 fourcc, guint32 flags,
+ const guint8 * data, guint size)
+{
+ AtomTag *tag;
+ AtomTagData *tdata;
+
+ tag = atom_tag_new (fourcc, flags);
+ tdata = &tag->data;
+ atom_tag_data_alloc_data (tdata, size);
+ memmove (tdata->data, data, size);
+
+ atom_udta_append_tag (udta,
+ build_atom_info_wrapper ((Atom *) tag, atom_tag_copy_data,
+ atom_tag_free));
+}
+
+void
+atom_udta_add_str_tag (AtomUDTA * udta, guint32 fourcc, const gchar * value)
+{
+ gint len = strlen (value);
+
+ if (len > 0)
+ atom_udta_add_tag (udta, fourcc, METADATA_TEXT_FLAG, (guint8 *) value, len);
+}
+
+void
+atom_udta_add_uint_tag (AtomUDTA * udta, guint32 fourcc, guint32 flags,
+ guint32 value)
+{
+ guint8 data[8] = { 0, };
+
+ if (flags) {
+ GST_WRITE_UINT16_BE (data, value);
+ atom_udta_add_tag (udta, fourcc, flags, data, 2);
+ } else {
+ GST_WRITE_UINT32_BE (data + 2, value);
+ atom_udta_add_tag (udta, fourcc, flags, data, 8);
+ }
+}
+
+#define GST_BUFFER_NEW_READONLY(mem, size) \
+ gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY, mem, size, \
+ 0, size, mem, NULL)
+
+void
+atom_udta_add_blob_tag (AtomUDTA * udta, guint8 * data, guint size)
+{
+ AtomData *data_atom;
+ guint len;
+ guint32 fourcc;
+
+ if (size < 8)
+ return;
+
+ /* blob is unparsed atom;
+ * extract size and fourcc, and wrap remainder in data atom */
+ len = GST_READ_UINT32_BE (data);
+ fourcc = GST_READ_UINT32_LE (data + 4);
+ if (len > size)
+ return;
+
+ data_atom = atom_data_new_from_data (fourcc, data + 8, len - 8);
+
+ atom_udta_append_tag (udta,
+ build_atom_info_wrapper ((Atom *) data_atom, atom_data_copy_data,
+ atom_data_free));
+}
+
+void
+atom_udta_add_3gp_tag (AtomUDTA * udta, guint32 fourcc, guint8 * data,
+ guint size)
+{
+ AtomData *data_atom;
+
+ data_atom = atom_data_new (fourcc);
+
+ /* need full atom */
+ atom_data_alloc_mem (data_atom, size + 4);
+
+ /* full atom: version and flags */
+ GST_WRITE_UINT32_BE (data_atom->data, 0);
+ memcpy (data_atom->data + 4, data, size);
+
+ atom_udta_append_tag (udta,
+ build_atom_info_wrapper ((Atom *) data_atom, atom_data_copy_data,
+ atom_data_free));
+}
+
+guint16
+language_code (const char *lang)
+{
+ g_return_val_if_fail (lang != NULL, 0);
+ g_return_val_if_fail (strlen (lang) == 3, 0);
+
+ return (((lang[0] - 0x60) & 0x1F) << 10) + (((lang[1] - 0x60) & 0x1F) << 5) +
+ ((lang[2] - 0x60) & 0x1F);
+}
+
+void
+atom_udta_add_3gp_str_int_tag (AtomUDTA * udta, guint32 fourcc,
+ const gchar * value, gint16 ivalue)
+{
+ gint len = 0, size = 0;
+ guint8 *data;
+
+ if (value) {
+ len = strlen (value);
+ size = len + 3;
+ }
+
+ if (ivalue >= 0)
+ size += 2;
+
+ data = g_malloc (size + 3);
+ /* language tag and null-terminated UTF-8 string */
+ if (value) {
+ GST_WRITE_UINT16_BE (data, language_code (GST_QT_MUX_DEFAULT_TAG_LANGUAGE));
+ /* include 0 terminator */
+ memcpy (data + 2, value, len + 1);
+ }
+ /* 16-bit unsigned int if standalone, otherwise 8-bit */
+ if (ivalue >= 0) {
+ if (size == 2)
+ GST_WRITE_UINT16_BE (data + size - 2, ivalue);
+ else {
+ GST_WRITE_UINT8 (data + size - 2, ivalue & 0xFF);
+ size--;
+ }
+ }
+
+ atom_udta_add_3gp_tag (udta, fourcc, data, size);
+ g_free (data);
+}
+
+void
+atom_udta_add_3gp_str_tag (AtomUDTA * udta, guint32 fourcc, const gchar * value)
+{
+ atom_udta_add_3gp_str_int_tag (udta, fourcc, value, -1);
+}
+
+void
+atom_udta_add_3gp_uint_tag (AtomUDTA * udta, guint32 fourcc, guint16 value)
+{
+ atom_udta_add_3gp_str_int_tag (udta, fourcc, NULL, value);
+}
+
+void
+atom_udta_add_xmp_tags (AtomUDTA * udta, GstBuffer * xmpbuffer)
+{
+ AtomData *data_atom = NULL;
+
+ if (udta->context->flavor == ATOMS_TREE_FLAVOR_MOV) {
+ if (xmpbuffer) {
+ data_atom = atom_data_new_from_gst_buffer (FOURCC_XMP_, xmpbuffer);
+ udta->entries = g_list_append (udta->entries,
+ build_atom_info_wrapper ((Atom *) data_atom, atom_data_copy_data,
+ atom_data_free));
+ }
+ } else {
+ GST_DEBUG ("Not adding xmp to moov atom, it is only used in 'mov' format");
+ }
+}
+
+/*
+ * Functions for specifying media types
+ */
+
+static void
+atom_minf_set_audio (AtomMINF * minf)
+{
+ atom_minf_clear_handlers (minf);
+ minf->smhd = atom_smhd_new ();
+}
+
+static void
+atom_minf_set_video (AtomMINF * minf, AtomsContext * context)
+{
+ atom_minf_clear_handlers (minf);
+ minf->vmhd = atom_vmhd_new (context);
+}
+
+static void
+atom_minf_set_subtitle (AtomMINF * minf)
+{
+ atom_minf_clear_handlers (minf);
+}
+
+static void
+atom_hdlr_set_type (AtomHDLR * hdlr, AtomsContext * context, guint32 comp_type,
+ guint32 hdlr_type)
+{
+ if (context->flavor == ATOMS_TREE_FLAVOR_MOV) {
+ hdlr->component_type = comp_type;
+ }
+ hdlr->handler_type = hdlr_type;
+}
+
+static void
+atom_hdlr_set_name (AtomHDLR * hdlr, const char *name)
+{
+ g_free (hdlr->name);
+ hdlr->name = g_strdup (name);
+}
+
+static void
+atom_mdia_set_hdlr_type_audio (AtomMDIA * mdia, AtomsContext * context)
+{
+ atom_hdlr_set_type (&mdia->hdlr, context, FOURCC_mhlr, FOURCC_soun);
+ /* Some players (low-end hardware) check for this name, which is what
+ * QuickTime itself sets */
+ atom_hdlr_set_name (&mdia->hdlr, "SoundHandler");
+}
+
+static void
+atom_mdia_set_hdlr_type_video (AtomMDIA * mdia, AtomsContext * context)
+{
+ atom_hdlr_set_type (&mdia->hdlr, context, FOURCC_mhlr, FOURCC_vide);
+ /* Some players (low-end hardware) check for this name, which is what
+ * QuickTime itself sets */
+ atom_hdlr_set_name (&mdia->hdlr, "VideoHandler");
+}
+
+static void
+atom_mdia_set_hdlr_type_subtitle (AtomMDIA * mdia, AtomsContext * context)
+{
+ atom_hdlr_set_type (&mdia->hdlr, context, FOURCC_mhlr, FOURCC_sbtl);
+
+ /* Just follows the pattern from video and audio above */
+ atom_hdlr_set_name (&mdia->hdlr, "SubtitleHandler");
+}
+
+static void
+atom_mdia_set_audio (AtomMDIA * mdia, AtomsContext * context)
+{
+ atom_mdia_set_hdlr_type_audio (mdia, context);
+ atom_minf_set_audio (&mdia->minf);
+}
+
+static void
+atom_mdia_set_video (AtomMDIA * mdia, AtomsContext * context)
+{
+ atom_mdia_set_hdlr_type_video (mdia, context);
+ atom_minf_set_video (&mdia->minf, context);
+}
+
+static void
+atom_mdia_set_subtitle (AtomMDIA * mdia, AtomsContext * context)
+{
+ atom_mdia_set_hdlr_type_subtitle (mdia, context);
+ atom_minf_set_subtitle (&mdia->minf);
+}
+
+static void
+atom_tkhd_set_audio (AtomTKHD * tkhd)
+{
+ tkhd->volume = 0x0100;
+ tkhd->width = tkhd->height = 0;
+}
+
+static void
+atom_tkhd_set_video (AtomTKHD * tkhd, AtomsContext * context, guint32 width,
+ guint32 height)
+{
+ tkhd->volume = 0;
+
+ /* qt and ISO base media do not contradict, and examples agree */
+ tkhd->width = width;
+ tkhd->height = height;
+}
+
+static void
+atom_tkhd_set_subtitle (AtomTKHD * tkhd, AtomsContext * context, guint32 width,
+ guint32 height)
+{
+ tkhd->volume = 0;
+
+ /* qt and ISO base media do not contradict, and examples agree */
+ tkhd->width = width;
+ tkhd->height = height;
+}
+
+
+static void
+atom_edts_add_entry (AtomEDTS * edts, gint index, EditListEntry * entry)
+{
+ EditListEntry *e =
+ (EditListEntry *) g_slist_nth_data (edts->elst.entries, index);
+ /* Create a new entry if missing (appends to the list if index is larger) */
+ if (e == NULL) {
+ e = g_new (EditListEntry, 1);
+ edts->elst.entries = g_slist_insert (edts->elst.entries, e, index);
+ }
+
+ /* Update the entry */
+ *e = *entry;
+}
+
+void
+atom_trak_edts_clear (AtomTRAK * trak)
+{
+ if (trak->edts) {
+ atom_edts_free (trak->edts);
+ trak->edts = NULL;
+ }
+}
+
+/*
+ * Update an entry in this trak edits list, creating it if needed.
+ * index is the index of the entry to update, or create if it's past the end.
+ * duration is in the moov's timescale
+ * media_time is the offset in the media time to start from (media's timescale)
+ * rate is a 32 bits fixed-point
+ */
+void
+atom_trak_set_elst_entry (AtomTRAK * trak, gint index,
+ guint32 duration, guint32 media_time, guint32 rate)
+{
+ EditListEntry entry;
+
+ entry.duration = duration;
+ entry.media_time = media_time;
+ entry.media_rate = rate;
+
+ if (trak->edts == NULL)
+ trak->edts = atom_edts_new ();
+
+ atom_edts_add_entry (trak->edts, index, &entry);
+}
+
+/* re-negotiation is prevented at top-level, so only 1 entry expected.
+ * Quite some more care here and elsewhere may be needed to
+ * support several entries */
+static SampleTableEntryMP4A *
+atom_trak_add_audio_entry (AtomTRAK * trak, AtomsContext * context,
+ guint32 type)
+{
+ AtomSTSD *stsd = &trak->mdia.minf.stbl.stsd;
+ SampleTableEntryMP4A *mp4a = sample_entry_mp4a_new ();
+
+ mp4a->se.header.type = type;
+ mp4a->se.kind = AUDIO;
+ mp4a->compression_id = -1;
+ mp4a->se.data_reference_index = 1;
+
+ stsd->entries = g_list_prepend (stsd->entries, mp4a);
+ stsd->n_entries++;
+ return mp4a;
+}
+
+/* return number of centiframes per second */
+guint
+atom_framerate_to_timescale (gint n, gint d)
+{
+ if (n == 0)
+ return 10000;
+
+ if (d != 1 && d != 1001) {
+ /* otherwise there are probably rounding errors and we should rather guess
+ * if it's close enough to a well known framerate */
+ gst_video_guess_framerate (gst_util_uint64_scale (d, GST_SECOND, n), &n,
+ &d);
+ }
+
+ return gst_util_uint64_scale (n, 100, d);
+}
+
+static SampleTableEntryTMCD *
+atom_trak_add_timecode_entry (AtomTRAK * trak, AtomsContext * context,
+ guint32 trak_timescale, GstVideoTimeCode * tc)
+{
+ AtomSTSD *stsd = &trak->mdia.minf.stbl.stsd;
+ SampleTableEntryTMCD *tmcd = sample_entry_tmcd_new ();
+
+ g_assert (trak_timescale != 0);
+
+ trak->mdia.hdlr.component_type = FOURCC_mhlr;
+ trak->mdia.hdlr.handler_type = FOURCC_tmcd;
+ g_free (trak->mdia.hdlr.name);
+ trak->mdia.hdlr.name = g_strdup ("Time Code Media Handler");
+ trak->mdia.mdhd.time_info.timescale = trak_timescale;
+
+ tmcd->se.kind = TIMECODE;
+ tmcd->se.data_reference_index = 1;
+ tmcd->tc_flags = TC_24H_MAX;
+ if (tc->config.flags &= GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME)
+ tmcd->tc_flags |= TC_DROP_FRAME;
+ tmcd->name.language_code = 0;
+ tmcd->name.name = g_strdup ("Tape");
+ tmcd->timescale = trak_timescale;
+ tmcd->frame_duration =
+ gst_util_uint64_scale (tmcd->timescale, tc->config.fps_d,
+ tc->config.fps_n);
+ if (tc->config.fps_d == 1001)
+ tmcd->n_frames = tc->config.fps_n / 1000;
+ else
+ tmcd->n_frames = tc->config.fps_n / tc->config.fps_d;
+
+ stsd->entries = g_list_prepend (stsd->entries, tmcd);
+ stsd->n_entries++;
+ return tmcd;
+}
+
+static SampleTableEntryMP4V *
+atom_trak_add_video_entry (AtomTRAK * trak, AtomsContext * context,
+ guint32 type)
+{
+ SampleTableEntryMP4V *mp4v = sample_entry_mp4v_new (context);
+ AtomSTSD *stsd = &trak->mdia.minf.stbl.stsd;
+
+ mp4v->se.header.type = type;
+ mp4v->se.kind = VIDEO;
+ mp4v->se.data_reference_index = 1;
+ mp4v->horizontal_resolution = 72 << 16;
+ mp4v->vertical_resolution = 72 << 16;
+ if (context->flavor == ATOMS_TREE_FLAVOR_MOV) {
+ mp4v->spatial_quality = 512;
+ mp4v->temporal_quality = 512;
+ }
+
+ stsd->entries = g_list_prepend (stsd->entries, mp4v);
+ stsd->n_entries++;
+ return mp4v;
+}
+
+static SampleTableEntryTX3G *
+atom_trak_add_subtitle_entry (AtomTRAK * trak, AtomsContext * context,
+ guint32 type)
+{
+ SampleTableEntryTX3G *tx3g = sample_entry_tx3g_new ();
+ AtomSTSD *stsd = &trak->mdia.minf.stbl.stsd;
+
+ tx3g->se.header.type = type;
+ tx3g->se.kind = SUBTITLE;
+ tx3g->se.data_reference_index = 1;
+
+ stsd->entries = g_list_prepend (stsd->entries, tx3g);
+ stsd->n_entries++;
+ return tx3g;
+}
+
+
+void
+atom_trak_set_constant_size_samples (AtomTRAK * trak, guint32 sample_size)
+{
+ trak->mdia.minf.stbl.stsz.sample_size = sample_size;
+}
+
+static void
+atom_trak_set_audio (AtomTRAK * trak, AtomsContext * context)
+{
+ atom_tkhd_set_audio (&trak->tkhd);
+ atom_mdia_set_audio (&trak->mdia, context);
+}
+
+static void
+atom_trak_set_video (AtomTRAK * trak, AtomsContext * context, guint32 width,
+ guint32 height)
+{
+ atom_tkhd_set_video (&trak->tkhd, context, width, height);
+ atom_mdia_set_video (&trak->mdia, context);
+}
+
+static void
+atom_trak_set_subtitle (AtomTRAK * trak, AtomsContext * context)
+{
+ atom_tkhd_set_subtitle (&trak->tkhd, context, 0, 0);
+ atom_mdia_set_subtitle (&trak->mdia, context);
+}
+
+static void
+atom_trak_set_audio_commons (AtomTRAK * trak, AtomsContext * context,
+ guint32 rate)
+{
+ atom_trak_set_audio (trak, context);
+ trak->mdia.mdhd.time_info.timescale = rate;
+}
+
+static void
+atom_trak_set_video_commons (AtomTRAK * trak, AtomsContext * context,
+ guint32 rate, guint32 width, guint32 height)
+{
+ atom_trak_set_video (trak, context, width, height);
+ trak->mdia.mdhd.time_info.timescale = rate;
+ trak->tkhd.width = width << 16;
+ trak->tkhd.height = height << 16;
+}
+
+static void
+atom_trak_set_subtitle_commons (AtomTRAK * trak, AtomsContext * context)
+{
+ atom_trak_set_subtitle (trak, context);
+ trak->mdia.mdhd.time_info.timescale = 1000;
+
+ trak->tkhd.alternate_group = 2; /* same for all subtitles */
+ trak->tkhd.layer = -1; /* above video (layer 0) */
+}
+
+void
+sample_table_entry_add_ext_atom (SampleTableEntry * ste, AtomInfo * ext)
+{
+ GList **list = NULL;
+ if (ste->kind == AUDIO) {
+ list = &(((SampleTableEntryMP4A *) ste)->extension_atoms);
+ } else if (ste->kind == VIDEO) {
+ list = &(((SampleTableEntryMP4V *) ste)->extension_atoms);
+ } else {
+ g_assert_not_reached ();
+ return;
+ }
+
+ *list = g_list_prepend (*list, ext);
+}
+
+SampleTableEntryMP4A *
+atom_trak_set_audio_type (AtomTRAK * trak, AtomsContext * context,
+ AudioSampleEntry * entry, guint32 scale, AtomInfo * ext, gint sample_size)
+{
+ SampleTableEntryMP4A *ste;
+
+ atom_trak_set_audio_commons (trak, context, scale);
+ atom_stsd_remove_entries (&trak->mdia.minf.stbl.stsd);
+ ste = atom_trak_add_audio_entry (trak, context, entry->fourcc);
+
+ trak->is_video = FALSE;
+ trak->is_h264 = FALSE;
+
+ ste->version = entry->version;
+ ste->compression_id = entry->compression_id;
+ ste->sample_size = entry->sample_size;
+ ste->sample_rate = entry->sample_rate << 16;
+ ste->channels = entry->channels;
+
+ ste->samples_per_packet = entry->samples_per_packet;
+ ste->bytes_per_sample = entry->bytes_per_sample;
+ ste->bytes_per_packet = entry->bytes_per_packet;
+ ste->bytes_per_frame = entry->bytes_per_frame;
+
+ if (ext)
+ ste->extension_atoms = g_list_prepend (ste->extension_atoms, ext);
+
+ /* 0 size means variable size */
+ atom_trak_set_constant_size_samples (trak, sample_size);
+
+ return ste;
+}
+
+SampleTableEntryTMCD *
+atom_trak_set_timecode_type (AtomTRAK * trak, AtomsContext * context,
+ guint32 trak_timescale, GstVideoTimeCode * tc)
+{
+ SampleTableEntryTMCD *ste;
+
+ if (context->flavor != ATOMS_TREE_FLAVOR_MOV &&
+ !context->force_create_timecode_trak) {
+ return NULL;
+ }
+
+
+ if (context->flavor == ATOMS_TREE_FLAVOR_MOV) {
+ AtomGMHD *gmhd = trak->mdia.minf.gmhd;
+
+ gmhd = atom_gmhd_new ();
+ gmhd->gmin.graphics_mode = 0x0040;
+ gmhd->gmin.opcolor[0] = 0x8000;
+ gmhd->gmin.opcolor[1] = 0x8000;
+ gmhd->gmin.opcolor[2] = 0x8000;
+ gmhd->tmcd = atom_tmcd_new ();
+ gmhd->tmcd->tcmi.text_size = 12;
+ gmhd->tmcd->tcmi.font_name = g_strdup ("Chicago"); /* Pascal string */
+
+ trak->mdia.minf.gmhd = gmhd;
+ } else if (context->force_create_timecode_trak) {
+ AtomNMHD *nmhd = trak->mdia.minf.nmhd;
+ /* MOV files use GMHD, other files use NMHD */
+
+ nmhd = atom_nmhd_new ();
+ trak->mdia.minf.nmhd = nmhd;
+ } else {
+ return NULL;
+ }
+ ste = atom_trak_add_timecode_entry (trak, context, trak_timescale, tc);
+ trak->is_video = FALSE;
+ trak->is_h264 = FALSE;
+
+ return ste;
+}
+
+SampleTableEntry *
+atom_trak_set_caption_type (AtomTRAK * trak, AtomsContext * context,
+ guint32 trak_timescale, guint32 caption_type)
+{
+ SampleTableEntry *ste;
+ AtomGMHD *gmhd = trak->mdia.minf.gmhd;
+ AtomSTSD *stsd = &trak->mdia.minf.stbl.stsd;
+
+ if (context->flavor != ATOMS_TREE_FLAVOR_MOV) {
+ return NULL;
+ }
+
+ trak->mdia.mdhd.time_info.timescale = trak_timescale;
+ trak->mdia.hdlr.component_type = FOURCC_mhlr;
+ trak->mdia.hdlr.handler_type = FOURCC_clcp;
+ g_free (trak->mdia.hdlr.name);
+ trak->mdia.hdlr.name = g_strdup ("Closed Caption Media Handler");
+
+ ste = g_new0 (SampleTableEntry, 1);
+ atom_sample_entry_init (ste, caption_type);
+ ste->kind = CLOSEDCAPTION;
+ ste->data_reference_index = 1;
+ stsd->entries = g_list_prepend (stsd->entries, ste);
+ stsd->n_entries++;
+
+ gmhd = atom_gmhd_new ();
+ gmhd->gmin.graphics_mode = 0x0040;
+ gmhd->gmin.opcolor[0] = 0x8000;
+ gmhd->gmin.opcolor[1] = 0x8000;
+ gmhd->gmin.opcolor[2] = 0x8000;
+
+ trak->mdia.minf.gmhd = gmhd;
+ trak->is_video = FALSE;
+ trak->is_h264 = FALSE;
+
+ return ste;
+}
+
+static AtomInfo *
+build_pasp_extension (gint par_width, gint par_height)
+{
+ AtomData *atom_data = atom_data_new (FOURCC_pasp);
+ guint8 *data;
+
+ atom_data_alloc_mem (atom_data, 8);
+ data = atom_data->data;
+
+ /* ihdr = image header box */
+ GST_WRITE_UINT32_BE (data, par_width);
+ GST_WRITE_UINT32_BE (data + 4, par_height);
+
+ return build_atom_info_wrapper ((Atom *) atom_data, atom_data_copy_data,
+ atom_data_free);
+}
+
+AtomInfo *
+build_fiel_extension (GstVideoInterlaceMode mode, GstVideoFieldOrder order)
+{
+ AtomData *atom_data = atom_data_new (FOURCC_fiel);
+ guint8 *data;
+ gint field_order;
+ gint interlace;
+
+ atom_data_alloc_mem (atom_data, 2);
+ data = atom_data->data;
+
+ if (mode == GST_VIDEO_INTERLACE_MODE_PROGRESSIVE) {
+ interlace = 1;
+ field_order = 0;
+ } else if (mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED) {
+ interlace = 2;
+ field_order = order == GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST ? 9 : 14;
+ } else {
+ interlace = 0;
+ field_order = 0;
+ }
+
+ GST_WRITE_UINT8 (data, interlace);
+ GST_WRITE_UINT8 (data + 1, field_order);
+
+ return build_atom_info_wrapper ((Atom *) atom_data, atom_data_copy_data,
+ atom_data_free);
+}
+
+AtomInfo *
+build_colr_extension (const GstVideoColorimetry * colorimetry, gboolean is_mp4)
+{
+ AtomData *atom_data = atom_data_new (FOURCC_colr);
+ guint8 *data;
+ guint16 primaries;
+ guint16 transfer_function;
+ guint16 matrix;
+
+ primaries = gst_video_color_primaries_to_iso (colorimetry->primaries);
+ transfer_function =
+ gst_video_transfer_function_to_iso (colorimetry->transfer);
+ matrix = gst_video_color_matrix_to_iso (colorimetry->matrix);
+
+ atom_data_alloc_mem (atom_data, 10 + (is_mp4 ? 1 : 0));
+ data = atom_data->data;
+
+ /* colour specification box */
+ if (is_mp4)
+ GST_WRITE_UINT32_LE (data, FOURCC_nclx);
+ else
+ GST_WRITE_UINT32_LE (data, FOURCC_nclc);
+
+ GST_WRITE_UINT16_BE (data + 4, primaries);
+ GST_WRITE_UINT16_BE (data + 6, transfer_function);
+ GST_WRITE_UINT16_BE (data + 8, matrix);
+
+ if (is_mp4) {
+ GST_WRITE_UINT8 (data + 10,
+ colorimetry->range == GST_VIDEO_COLOR_RANGE_0_255 ? 0x80 : 0x00);
+ }
+
+ return build_atom_info_wrapper ((Atom *) atom_data, atom_data_copy_data,
+ atom_data_free);
+}
+
+AtomInfo *
+build_clap_extension (gint width_n, gint width_d, gint height_n, gint height_d,
+ gint h_off_n, gint h_off_d, gint v_off_n, gint v_off_d)
+{
+ AtomData *atom_data = atom_data_new (FOURCC_clap);
+ guint8 *data;
+
+ atom_data_alloc_mem (atom_data, 32);
+ data = atom_data->data;
+
+ GST_WRITE_UINT32_BE (data, width_n);
+ GST_WRITE_UINT32_BE (data + 4, width_d);
+ GST_WRITE_UINT32_BE (data + 8, height_n);
+ GST_WRITE_UINT32_BE (data + 12, height_d);
+ GST_WRITE_UINT32_BE (data + 16, h_off_n);
+ GST_WRITE_UINT32_BE (data + 20, h_off_d);
+ GST_WRITE_UINT32_BE (data + 24, v_off_n);
+ GST_WRITE_UINT32_BE (data + 28, v_off_d);
+
+ return build_atom_info_wrapper ((Atom *) atom_data, atom_data_copy_data,
+ atom_data_free);
+}
+
+AtomInfo *
+build_tapt_extension (gint clef_width, gint clef_height, gint prof_width,
+ gint prof_height, gint enof_width, gint enof_height)
+{
+ AtomData *atom_data = atom_data_new (FOURCC_tapt);
+ guint8 *data;
+
+ atom_data_alloc_mem (atom_data, 60);
+ data = atom_data->data;
+
+ GST_WRITE_UINT32_BE (data, 20);
+ GST_WRITE_UINT32_LE (data + 4, FOURCC_clef);
+ GST_WRITE_UINT32_BE (data + 8, 0);
+ GST_WRITE_UINT32_BE (data + 12, clef_width);
+ GST_WRITE_UINT32_BE (data + 16, clef_height);
+
+ GST_WRITE_UINT32_BE (data + 20, 20);
+ GST_WRITE_UINT32_LE (data + 24, FOURCC_prof);
+ GST_WRITE_UINT32_BE (data + 28, 0);
+ GST_WRITE_UINT32_BE (data + 32, prof_width);
+ GST_WRITE_UINT32_BE (data + 36, prof_height);
+
+ GST_WRITE_UINT32_BE (data + 40, 20);
+ GST_WRITE_UINT32_LE (data + 44, FOURCC_enof);
+ GST_WRITE_UINT32_BE (data + 48, 0);
+ GST_WRITE_UINT32_BE (data + 52, enof_width);
+ GST_WRITE_UINT32_BE (data + 56, enof_height);
+
+
+ return build_atom_info_wrapper ((Atom *) atom_data, atom_data_copy_data,
+ atom_data_free);
+}
+
+static AtomInfo *
+build_mov_video_sample_description_padding_extension (void)
+{
+ AtomData *atom_data = atom_data_new (FOURCC_clap);
+
+ return build_atom_info_wrapper ((Atom *) atom_data, atom_copy_empty,
+ atom_data_free);
+}
+
+SampleTableEntryMP4V *
+atom_trak_set_video_type (AtomTRAK * trak, AtomsContext * context,
+ VisualSampleEntry * entry, guint32 scale, GList * ext_atoms_list)
+{
+ SampleTableEntryMP4V *ste;
+ guint dwidth, dheight;
+ gint par_n = 0, par_d = 0;
+
+ par_n = entry->par_n;
+ par_d = entry->par_d;
+
+ dwidth = entry->width;
+ dheight = entry->height;
+ /* ISO file spec says track header w/h indicates track's visual presentation
+ * (so this together with pixels w/h implicitly defines PAR) */
+ if (par_n && (context->flavor != ATOMS_TREE_FLAVOR_MOV)) {
+ dwidth = entry->width * par_n / par_d;
+ dheight = entry->height;
+ }
+
+ if (trak->mdia.minf.stbl.stsd.n_entries < 1) {
+ atom_trak_set_video_commons (trak, context, scale, dwidth, dheight);
+ trak->is_video = TRUE;
+ trak->is_h264 = (entry->fourcc == FOURCC_avc1
+ || entry->fourcc == FOURCC_avc3);
+ }
+ ste = atom_trak_add_video_entry (trak, context, entry->fourcc);
+
+ ste->version = entry->version;
+ ste->width = entry->width;
+ ste->height = entry->height;
+ ste->depth = entry->depth;
+ ste->color_table_id = entry->color_table_id;
+ ste->frame_count = entry->frame_count;
+
+ if (ext_atoms_list)
+ ste->extension_atoms = g_list_concat (ste->extension_atoms, ext_atoms_list);
+
+ ste->extension_atoms = g_list_append (ste->extension_atoms,
+ build_pasp_extension (par_n, par_d));
+
+ if (context->flavor == ATOMS_TREE_FLAVOR_MOV) {
+ /* append 0 as a terminator "length" to work around some broken software */
+ ste->extension_atoms =
+ g_list_append (ste->extension_atoms,
+ build_mov_video_sample_description_padding_extension ());
+ }
+
+ return ste;
+}
+
+void
+subtitle_sample_entry_init (SubtitleSampleEntry * entry)
+{
+ entry->font_size = 0;
+ entry->font_face = 0;
+ entry->foreground_color_rgba = 0xFFFFFFFF; /* all white, opaque */
+}
+
+SampleTableEntryTX3G *
+atom_trak_set_subtitle_type (AtomTRAK * trak, AtomsContext * context,
+ SubtitleSampleEntry * entry)
+{
+ SampleTableEntryTX3G *tx3g;
+
+ atom_trak_set_subtitle_commons (trak, context);
+ atom_stsd_remove_entries (&trak->mdia.minf.stbl.stsd);
+ tx3g = atom_trak_add_subtitle_entry (trak, context, entry->fourcc);
+
+ tx3g->font_face = entry->font_face;
+ tx3g->font_size = entry->font_size;
+ tx3g->foreground_color_rgba = entry->foreground_color_rgba;
+
+ trak->is_video = FALSE;
+ trak->is_h264 = FALSE;
+
+ return tx3g;
+}
+
+static void
+atom_mfhd_init (AtomMFHD * mfhd, guint32 sequence_number)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&(mfhd->header), FOURCC_mfhd, 0, 0, 0, flags);
+ mfhd->sequence_number = sequence_number;
+}
+
+static void
+atom_moof_init (AtomMOOF * moof, AtomsContext * context,
+ guint32 sequence_number)
+{
+ atom_header_set (&moof->header, FOURCC_moof, 0, 0);
+ atom_mfhd_init (&moof->mfhd, sequence_number);
+ moof->trafs = NULL;
+}
+
+AtomMOOF *
+atom_moof_new (AtomsContext * context, guint32 sequence_number)
+{
+ AtomMOOF *moof = g_new0 (AtomMOOF, 1);
+
+ atom_moof_init (moof, context, sequence_number);
+ return moof;
+}
+
+void
+atom_moof_set_base_offset (AtomMOOF * moof, guint64 offset)
+{
+ GList *trafs = moof->trafs;
+
+ if (offset == moof->traf_offset)
+ return; /* Nothing to do */
+
+ while (trafs) {
+ AtomTRAF *traf = (AtomTRAF *) trafs->data;
+
+ traf->tfhd.header.flags[2] |= TF_BASE_DATA_OFFSET;
+ traf->tfhd.base_data_offset = offset;
+ trafs = g_list_next (trafs);
+ }
+
+ moof->traf_offset = offset;
+}
+
+static void
+atom_trun_free (AtomTRUN * trun)
+{
+ atom_full_clear (&trun->header);
+ atom_array_clear (&trun->entries);
+ g_free (trun);
+}
+
+static void
+atom_sdtp_free (AtomSDTP * sdtp)
+{
+ atom_full_clear (&sdtp->header);
+ atom_array_clear (&sdtp->entries);
+ g_free (sdtp);
+}
+
+void
+atom_traf_free (AtomTRAF * traf)
+{
+ GList *walker;
+
+ walker = traf->truns;
+ while (walker) {
+ atom_trun_free ((AtomTRUN *) walker->data);
+ walker = g_list_next (walker);
+ }
+ g_list_free (traf->truns);
+ traf->truns = NULL;
+
+ walker = traf->sdtps;
+ while (walker) {
+ atom_sdtp_free ((AtomSDTP *) walker->data);
+ walker = g_list_next (walker);
+ }
+ g_list_free (traf->sdtps);
+ traf->sdtps = NULL;
+
+ g_free (traf);
+}
+
+void
+atom_moof_free (AtomMOOF * moof)
+{
+ GList *walker;
+
+ walker = moof->trafs;
+ while (walker) {
+ atom_traf_free ((AtomTRAF *) walker->data);
+ walker = g_list_next (walker);
+ }
+ g_list_free (moof->trafs);
+ moof->trafs = NULL;
+
+ g_free (moof);
+}
+
+static guint64
+atom_mfhd_copy_data (AtomMFHD * mfhd, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_full_copy_data (&mfhd->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint32 (mfhd->sequence_number, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_tfhd_copy_data (AtomTFHD * tfhd, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+ guint32 flags;
+
+ if (!atom_full_copy_data (&tfhd->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint32 (tfhd->track_ID, buffer, size, offset);
+
+ flags = atom_full_get_flags_as_uint (&tfhd->header);
+
+ if (flags & TF_BASE_DATA_OFFSET)
+ prop_copy_uint64 (tfhd->base_data_offset, buffer, size, offset);
+ if (flags & TF_SAMPLE_DESCRIPTION_INDEX)
+ prop_copy_uint32 (tfhd->sample_description_index, buffer, size, offset);
+ if (flags & TF_DEFAULT_SAMPLE_DURATION)
+ prop_copy_uint32 (tfhd->default_sample_duration, buffer, size, offset);
+ if (flags & TF_DEFAULT_SAMPLE_SIZE)
+ prop_copy_uint32 (tfhd->default_sample_size, buffer, size, offset);
+ if (flags & TF_DEFAULT_SAMPLE_FLAGS)
+ prop_copy_uint32 (tfhd->default_sample_flags, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_tfdt_copy_data (AtomTFDT * tfdt, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_full_copy_data (&tfdt->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ /* 32-bit time if version == 0 else 64-bit: */
+ if (tfdt->header.version == 0)
+ prop_copy_uint32 (tfdt->base_media_decode_time, buffer, size, offset);
+ else
+ prop_copy_uint64 (tfdt->base_media_decode_time, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_trun_copy_data (AtomTRUN * trun, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+ guint32 flags, i;
+
+ flags = atom_full_get_flags_as_uint (&trun->header);
+
+ atom_full_set_flags_as_uint (&trun->header, flags);
+
+ if (!atom_full_copy_data (&trun->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint32 (trun->sample_count, buffer, size, offset);
+
+ if (flags & TR_DATA_OFFSET) {
+ prop_copy_int32 (trun->data_offset, buffer, size, offset);
+ }
+ if (flags & TR_FIRST_SAMPLE_FLAGS)
+ prop_copy_uint32 (trun->first_sample_flags, buffer, size, offset);
+
+ for (i = 0; i < atom_array_get_len (&trun->entries); i++) {
+ TRUNSampleEntry *entry = &atom_array_index (&trun->entries, i);
+
+ if (flags & TR_SAMPLE_DURATION)
+ prop_copy_uint32 (entry->sample_duration, buffer, size, offset);
+ if (flags & TR_SAMPLE_SIZE)
+ prop_copy_uint32 (entry->sample_size, buffer, size, offset);
+ if (flags & TR_SAMPLE_FLAGS)
+ prop_copy_uint32 (entry->sample_flags, buffer, size, offset);
+ if (flags & TR_COMPOSITION_TIME_OFFSETS)
+ prop_copy_uint32 (entry->sample_composition_time_offset,
+ buffer, size, offset);
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_sdtp_copy_data (AtomSDTP * sdtp, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!atom_full_copy_data (&sdtp->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ /* all entries at once */
+ prop_copy_fixed_size_string (&atom_array_index (&sdtp->entries, 0),
+ atom_array_get_len (&sdtp->entries), buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_traf_copy_data (AtomTRAF * traf, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+ GList *walker;
+
+ if (!atom_copy_data (&traf->header, buffer, size, offset)) {
+ return 0;
+ }
+ if (!atom_tfhd_copy_data (&traf->tfhd, buffer, size, offset)) {
+ return 0;
+ }
+ if (!atom_tfdt_copy_data (&traf->tfdt, buffer, size, offset)) {
+ return 0;
+ }
+ walker = g_list_first (traf->truns);
+ while (walker != NULL) {
+ if (!atom_trun_copy_data ((AtomTRUN *) walker->data, buffer, size, offset)) {
+ return 0;
+ }
+ walker = g_list_next (walker);
+ }
+
+ walker = g_list_first (traf->sdtps);
+ while (walker != NULL) {
+ if (!atom_sdtp_copy_data ((AtomSDTP *) walker->data, buffer, size, offset)) {
+ return 0;
+ }
+ walker = g_list_next (walker);
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+/* creates moof atom; metadata is written expecting actual buffer data
+ * is in mdata directly after moof, and is consecutively written per trak */
+guint64
+atom_moof_copy_data (AtomMOOF * moof, guint8 ** buffer,
+ guint64 * size, guint64 * offset)
+{
+ guint64 original_offset = *offset;
+ GList *walker;
+
+ if (!atom_copy_data (&moof->header, buffer, size, offset))
+ return 0;
+
+ if (!atom_mfhd_copy_data (&moof->mfhd, buffer, size, offset))
+ return 0;
+
+ walker = g_list_first (moof->trafs);
+ while (walker != NULL) {
+ if (!atom_traf_copy_data ((AtomTRAF *) walker->data, buffer, size, offset)) {
+ return 0;
+ }
+ walker = g_list_next (walker);
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+
+ return *offset - original_offset;
+}
+
+static void
+atom_tfhd_init (AtomTFHD * tfhd, guint32 track_ID)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&tfhd->header, FOURCC_tfhd, 0, 0, 0, flags);
+ tfhd->track_ID = track_ID;
+ tfhd->base_data_offset = 0;
+ tfhd->sample_description_index = 1;
+ tfhd->default_sample_duration = 0;
+ tfhd->default_sample_size = 0;
+ tfhd->default_sample_flags = 0;
+}
+
+static void
+atom_tfdt_init (AtomTFDT * tfdt)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+ atom_full_init (&tfdt->header, FOURCC_tfdt, 0, 0, 0, flags);
+
+ tfdt->base_media_decode_time = 0;
+}
+
+static void
+atom_trun_init (AtomTRUN * trun)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&trun->header, FOURCC_trun, 0, 0, 0, flags);
+ trun->sample_count = 0;
+ trun->data_offset = 0;
+ trun->first_sample_flags = 0;
+ atom_array_init (&trun->entries, 512);
+}
+
+static AtomTRUN *
+atom_trun_new (void)
+{
+ AtomTRUN *trun = g_new0 (AtomTRUN, 1);
+
+ atom_trun_init (trun);
+ return trun;
+}
+
+static void
+atom_sdtp_init (AtomSDTP * sdtp)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&sdtp->header, FOURCC_sdtp, 0, 0, 0, flags);
+ atom_array_init (&sdtp->entries, 512);
+}
+
+static AtomSDTP *
+atom_sdtp_new (AtomsContext * context)
+{
+ AtomSDTP *sdtp = g_new0 (AtomSDTP, 1);
+
+ atom_sdtp_init (sdtp);
+ return sdtp;
+}
+
+static void
+atom_traf_add_sdtp (AtomTRAF * traf, AtomSDTP * sdtp)
+{
+ traf->sdtps = g_list_append (traf->sdtps, sdtp);
+}
+
+static void
+atom_sdtp_add_samples (AtomSDTP * sdtp, guint8 val)
+{
+ /* it does not make much/any sense according to specs,
+ * but that's how MS isml samples seem to do it */
+ atom_array_append (&sdtp->entries, val, 256);
+}
+
+void
+atom_trun_set_offset (AtomTRUN * trun, gint32 offset)
+{
+ trun->header.flags[2] |= TR_DATA_OFFSET;
+ trun->data_offset = offset;
+}
+
+static gboolean
+atom_trun_can_append_samples_to_entry (AtomTRUN * trun,
+ TRUNSampleEntry * nentry, guint32 nsamples, guint32 delta, guint32 size,
+ guint32 flags, gint32 data_offset, gint64 pts_offset)
+{
+ if (pts_offset != 0)
+ return FALSE;
+ if (nentry->sample_flags != flags)
+ return FALSE;
+ if (trun->data_offset + nentry->sample_size != data_offset)
+ return FALSE;
+ if (nentry->sample_size != size)
+ return FALSE;
+ if (nentry->sample_duration != delta)
+ return FALSE;
+
+ /* FIXME: this should be TRUE but currently fails on demuxing */
+ return FALSE;
+}
+
+static void
+atom_trun_append_samples (AtomTRUN * trun, TRUNSampleEntry * nentry,
+ guint32 nsamples, guint32 delta, guint32 size)
+{
+ trun->sample_count += nsamples;
+}
+
+static void
+atom_trun_add_samples (AtomTRUN * trun, guint32 nsamples, guint32 delta,
+ guint32 size, guint32 flags, gint64 pts_offset)
+{
+ int i;
+
+ if (pts_offset != 0)
+ trun->header.flags[1] |= (TR_COMPOSITION_TIME_OFFSETS >> 8);
+
+ for (i = 0; i < nsamples; i++) {
+ TRUNSampleEntry nentry;
+
+ nentry.sample_duration = delta;
+ nentry.sample_size = size;
+ nentry.sample_flags = flags;
+ if (pts_offset != 0) {
+ nentry.sample_composition_time_offset = pts_offset + i * delta;
+ } else {
+ nentry.sample_composition_time_offset = 0;
+ }
+ atom_array_append (&trun->entries, nentry, 256);
+ trun->sample_count++;
+ }
+}
+
+static void
+atom_traf_init (AtomTRAF * traf, AtomsContext * context, guint32 track_ID)
+{
+ atom_header_set (&traf->header, FOURCC_traf, 0, 0);
+ atom_tfdt_init (&traf->tfdt);
+ atom_tfhd_init (&traf->tfhd, track_ID);
+ traf->truns = NULL;
+
+ if (context->flavor == ATOMS_TREE_FLAVOR_ISML)
+ atom_traf_add_sdtp (traf, atom_sdtp_new (context));
+}
+
+AtomTRAF *
+atom_traf_new (AtomsContext * context, guint32 track_ID)
+{
+ AtomTRAF *traf = g_new0 (AtomTRAF, 1);
+
+ atom_traf_init (traf, context, track_ID);
+ return traf;
+}
+
+void
+atom_traf_set_base_decode_time (AtomTRAF * traf, guint64 base_decode_time)
+{
+ traf->tfdt.base_media_decode_time = base_decode_time;
+ /* If we need to write a 64-bit tfdt, set the atom version */
+ if (base_decode_time > G_MAXUINT32)
+ traf->tfdt.header.version = 1;
+}
+
+static void
+atom_traf_add_trun (AtomTRAF * traf, AtomTRUN * trun)
+{
+ traf->truns = g_list_append (traf->truns, trun);
+}
+
+void
+atom_traf_add_samples (AtomTRAF * traf, guint32 nsamples,
+ guint32 delta, guint32 size, gint32 data_offset, gboolean sync,
+ gint64 pts_offset, gboolean sdtp_sync)
+{
+ GList *l = NULL;
+ AtomTRUN *prev_trun, *trun = NULL;
+ TRUNSampleEntry *nentry = NULL;
+ guint32 flags;
+
+ /* 0x10000 is sample-is-difference-sample flag
+ * low byte stuff is what ismv uses */
+ flags = (sync ? 0x0 : 0x10000) | (sdtp_sync ? 0x40 : 0xc0);
+
+ if (traf->truns) {
+ trun = g_list_last (traf->truns)->data;
+ nentry =
+ &atom_array_index (&trun->entries,
+ atom_array_get_len (&trun->entries) - 1);
+
+ if (!atom_trun_can_append_samples_to_entry (trun, nentry, nsamples, delta,
+ size, flags, data_offset, pts_offset)) {
+ /* if we can't add to the previous trun, write a new one */
+ trun = NULL;
+ nentry = NULL;
+ }
+ }
+ prev_trun = trun;
+
+ if (!traf->truns) {
+ /* optimistic; indicate all defaults present in tfhd */
+ traf->tfhd.header.flags[2] = TF_DEFAULT_SAMPLE_DURATION |
+ TF_DEFAULT_SAMPLE_SIZE | TF_DEFAULT_SAMPLE_FLAGS;
+ traf->tfhd.default_sample_duration = delta;
+ traf->tfhd.default_sample_size = size;
+ traf->tfhd.default_sample_flags = flags;
+ }
+
+ if (!trun) {
+ trun = atom_trun_new ();
+ atom_traf_add_trun (traf, trun);
+ trun->first_sample_flags = flags;
+ trun->data_offset = data_offset;
+ if (data_offset != 0)
+ trun->header.flags[2] |= TR_DATA_OFFSET;
+ }
+
+ /* check if still matching defaults,
+ * if not, abandon default and need entry for each sample */
+ if (traf->tfhd.default_sample_duration != delta || prev_trun == trun) {
+ traf->tfhd.header.flags[2] &= ~TF_DEFAULT_SAMPLE_DURATION;
+ for (l = traf->truns; l; l = g_list_next (l)) {
+ ((AtomTRUN *) l->data)->header.flags[1] |= (TR_SAMPLE_DURATION >> 8);
+ }
+ }
+ if (traf->tfhd.default_sample_size != size || prev_trun == trun) {
+ traf->tfhd.header.flags[2] &= ~TF_DEFAULT_SAMPLE_SIZE;
+ for (l = traf->truns; l; l = g_list_next (l)) {
+ ((AtomTRUN *) l->data)->header.flags[1] |= (TR_SAMPLE_SIZE >> 8);
+ }
+ }
+ if (traf->tfhd.default_sample_flags != flags || prev_trun == trun) {
+ if (trun->sample_count == 1) {
+ /* at least will need first sample flag */
+ traf->tfhd.default_sample_flags = flags;
+ trun->header.flags[2] |= TR_FIRST_SAMPLE_FLAGS;
+ } else {
+ /* now we need sample flags for each sample */
+ traf->tfhd.header.flags[2] &= ~TF_DEFAULT_SAMPLE_FLAGS;
+ trun->header.flags[1] |= (TR_SAMPLE_FLAGS >> 8);
+ trun->header.flags[2] &= ~TR_FIRST_SAMPLE_FLAGS;
+ }
+ }
+
+ if (prev_trun == trun) {
+ atom_trun_append_samples (trun, nentry, nsamples, delta, size);
+ } else {
+ atom_trun_add_samples (trun, nsamples, delta, size, flags, pts_offset);
+ }
+
+ if (traf->sdtps)
+ atom_sdtp_add_samples (traf->sdtps->data, 0x10 | ((flags & 0xff) >> 4));
+}
+
+guint32
+atom_traf_get_sample_num (AtomTRAF * traf)
+{
+ AtomTRUN *trun;
+
+ if (G_UNLIKELY (!traf->truns))
+ return 0;
+
+ /* FIXME: only one trun? */
+ trun = traf->truns->data;
+ return atom_array_get_len (&trun->entries);
+}
+
+void
+atom_moof_add_traf (AtomMOOF * moof, AtomTRAF * traf)
+{
+ moof->trafs = g_list_append (moof->trafs, traf);
+}
+
+static void
+atom_tfra_free (AtomTFRA * tfra)
+{
+ atom_full_clear (&tfra->header);
+ atom_array_clear (&tfra->entries);
+ g_free (tfra);
+}
+
+AtomMFRA *
+atom_mfra_new (AtomsContext * context)
+{
+ AtomMFRA *mfra = g_new0 (AtomMFRA, 1);
+
+ atom_header_set (&mfra->header, FOURCC_mfra, 0, 0);
+ return mfra;
+}
+
+void
+atom_mfra_add_tfra (AtomMFRA * mfra, AtomTFRA * tfra)
+{
+ mfra->tfras = g_list_append (mfra->tfras, tfra);
+}
+
+void
+atom_mfra_free (AtomMFRA * mfra)
+{
+ GList *walker;
+
+ walker = mfra->tfras;
+ while (walker) {
+ atom_tfra_free ((AtomTFRA *) walker->data);
+ walker = g_list_next (walker);
+ }
+ g_list_free (mfra->tfras);
+ mfra->tfras = NULL;
+
+ atom_clear (&mfra->header);
+ g_free (mfra);
+}
+
+static void
+atom_tfra_init (AtomTFRA * tfra, guint32 track_ID)
+{
+ guint8 flags[3] = { 0, 0, 0 };
+
+ atom_full_init (&tfra->header, FOURCC_tfra, 0, 0, 0, flags);
+ tfra->track_ID = track_ID;
+ atom_array_init (&tfra->entries, 512);
+}
+
+AtomTFRA *
+atom_tfra_new (AtomsContext * context, guint32 track_ID)
+{
+ AtomTFRA *tfra = g_new0 (AtomTFRA, 1);
+
+ atom_tfra_init (tfra, track_ID);
+ return tfra;
+
+}
+
+static inline gint
+need_bytes (guint32 num)
+{
+ gint n = 0;
+
+ while (num >>= 8)
+ n++;
+
+ return n;
+}
+
+void
+atom_tfra_add_entry (AtomTFRA * tfra, guint64 dts, guint32 sample_num)
+{
+ TFRAEntry entry;
+
+ entry.time = dts;
+ /* fill in later */
+ entry.moof_offset = 0;
+ /* always write a single trun in a single traf */
+ entry.traf_number = 1;
+ entry.trun_number = 1;
+ entry.sample_number = sample_num;
+
+ /* auto-use 64 bits if needed */
+ if (dts > G_MAXUINT32)
+ tfra->header.version = 1;
+
+ /* 1 byte will always do for traf and trun number,
+ * check how much sample_num needs */
+ tfra->lengths = (tfra->lengths & 0xfc) ||
+ MAX (tfra->lengths, need_bytes (sample_num));
+
+ atom_array_append (&tfra->entries, entry, 256);
+}
+
+void
+atom_tfra_update_offset (AtomTFRA * tfra, guint64 offset)
+{
+ gint i;
+
+ /* auto-use 64 bits if needed */
+ if (offset > G_MAXUINT32)
+ tfra->header.version = 1;
+
+ for (i = atom_array_get_len (&tfra->entries) - 1; i >= 0; i--) {
+ TFRAEntry *entry = &atom_array_index (&tfra->entries, i);
+
+ if (entry->moof_offset)
+ break;
+ entry->moof_offset = offset;
+ }
+}
+
+static guint64
+atom_tfra_copy_data (AtomTFRA * tfra, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+ guint32 i;
+ TFRAEntry *entry;
+ guint32 data;
+ guint bytes;
+ guint version;
+
+ if (!atom_full_copy_data (&tfra->header, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint32 (tfra->track_ID, buffer, size, offset);
+ prop_copy_uint32 (tfra->lengths, buffer, size, offset);
+ prop_copy_uint32 (atom_array_get_len (&tfra->entries), buffer, size, offset);
+
+ version = tfra->header.version;
+ for (i = 0; i < atom_array_get_len (&tfra->entries); ++i) {
+ entry = &atom_array_index (&tfra->entries, i);
+ if (version) {
+ prop_copy_uint64 (entry->time, buffer, size, offset);
+ prop_copy_uint64 (entry->moof_offset, buffer, size, offset);
+ } else {
+ prop_copy_uint32 (entry->time, buffer, size, offset);
+ prop_copy_uint32 (entry->moof_offset, buffer, size, offset);
+ }
+
+ bytes = (tfra->lengths & (0x3 << 4)) + 1;
+ data = GUINT32_TO_BE (entry->traf_number);
+ prop_copy_fixed_size_string (((guint8 *) & data) + 4 - bytes, bytes,
+ buffer, size, offset);
+
+ bytes = (tfra->lengths & (0x3 << 2)) + 1;
+ data = GUINT32_TO_BE (entry->trun_number);
+ prop_copy_fixed_size_string (((guint8 *) & data) + 4 - bytes, bytes,
+ buffer, size, offset);
+
+ bytes = (tfra->lengths & (0x3)) + 1;
+ data = GUINT32_TO_BE (entry->sample_number);
+ prop_copy_fixed_size_string (((guint8 *) & data) + 4 - bytes, bytes,
+ buffer, size, offset);
+
+ }
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+static guint64
+atom_mfro_copy_data (guint32 s, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+ guint8 flags[3] = { 0, 0, 0 };
+ AtomFull mfro;
+
+ atom_full_init (&mfro, FOURCC_mfro, 0, 0, 0, flags);
+
+ if (!atom_full_copy_data (&mfro, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint32 (s, buffer, size, offset);
+
+ atom_write_size (buffer, size, offset, original_offset);
+
+ return *offset - original_offset;
+}
+
+
+guint64
+atom_mfra_copy_data (AtomMFRA * mfra, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ guint64 original_offset = *offset;
+ GList *walker;
+
+ if (!atom_copy_data (&mfra->header, buffer, size, offset))
+ return 0;
+
+ walker = g_list_first (mfra->tfras);
+ while (walker != NULL) {
+ if (!atom_tfra_copy_data ((AtomTFRA *) walker->data, buffer, size, offset)) {
+ return 0;
+ }
+ walker = g_list_next (walker);
+ }
+
+ /* 16 is the size of the mfro atom */
+ if (!atom_mfro_copy_data (*offset - original_offset + 16, buffer,
+ size, offset))
+ return 0;
+
+ atom_write_size (buffer, size, offset, original_offset);
+ return *offset - original_offset;
+}
+
+/* some sample description construction helpers */
+
+AtomInfo *
+build_esds_extension (AtomTRAK * trak, guint8 object_type, guint8 stream_type,
+ const GstBuffer * codec_data, guint32 avg_bitrate, guint32 max_bitrate)
+{
+ guint32 track_id;
+ AtomESDS *esds;
+
+ track_id = trak->tkhd.track_ID;
+
+ esds = atom_esds_new ();
+ esds->es.id = track_id & 0xFFFF;
+ esds->es.dec_conf_desc.object_type = object_type;
+ esds->es.dec_conf_desc.stream_type = stream_type << 2 | 0x01;
+
+ if (avg_bitrate > 0)
+ esds->es.dec_conf_desc.avg_bitrate = avg_bitrate;
+ if (max_bitrate > 0)
+ esds->es.dec_conf_desc.max_bitrate = max_bitrate;
+
+ /* optional DecoderSpecificInfo */
+ if (codec_data) {
+ DecoderSpecificInfoDescriptor *desc;
+ gsize size;
+
+ esds->es.dec_conf_desc.dec_specific_info = desc =
+ desc_dec_specific_info_new ();
+ size = gst_buffer_get_size ((GstBuffer *) codec_data);
+ desc_dec_specific_info_alloc_data (desc, size);
+ gst_buffer_extract ((GstBuffer *) codec_data, 0, desc->data, size);
+ }
+
+ return build_atom_info_wrapper ((Atom *) esds, atom_esds_copy_data,
+ atom_esds_free);
+}
+
+AtomInfo *
+build_btrt_extension (guint32 buffer_size_db, guint32 avg_bitrate,
+ guint32 max_bitrate)
+{
+ AtomData *atom_data = atom_data_new (FOURCC_btrt);
+ guint8 *data;
+
+ atom_data_alloc_mem (atom_data, 12);
+ data = atom_data->data;
+
+ GST_WRITE_UINT32_BE (data, buffer_size_db);
+ GST_WRITE_UINT32_BE (data + 4, max_bitrate);
+ GST_WRITE_UINT32_BE (data + 8, avg_bitrate);
+
+ return build_atom_info_wrapper ((Atom *) atom_data, atom_data_copy_data,
+ atom_data_free);
+}
+
+static AtomInfo *
+build_mov_wave_extension (guint32 fourcc, AtomInfo * atom1, AtomInfo * atom2,
+ gboolean terminator)
+{
+ AtomWAVE *wave;
+ AtomFRMA *frma;
+ Atom *ext_atom;
+
+ /* Build WAVE atom for sample table entry */
+ wave = atom_wave_new ();
+
+ /* Prepend Terminator atom to the WAVE list first, so it ends up last */
+ if (terminator) {
+ ext_atom = (Atom *) atom_data_new (FOURCC_null);
+ wave->extension_atoms =
+ atom_info_list_prepend_atom (wave->extension_atoms, (Atom *) ext_atom,
+ (AtomCopyDataFunc) atom_data_copy_data, (AtomFreeFunc) atom_data_free);
+ }
+
+ /* Add supplied atoms to WAVE */
+ if (atom2)
+ wave->extension_atoms = g_list_prepend (wave->extension_atoms, atom2);
+ if (atom1)
+ wave->extension_atoms = g_list_prepend (wave->extension_atoms, atom1);
+
+ /* Add FRMA to the WAVE */
+ frma = atom_frma_new ();
+ frma->media_type = fourcc;
+
+ wave->extension_atoms =
+ atom_info_list_prepend_atom (wave->extension_atoms, (Atom *) frma,
+ (AtomCopyDataFunc) atom_frma_copy_data, (AtomFreeFunc) atom_frma_free);
+
+ return build_atom_info_wrapper ((Atom *) wave, atom_wave_copy_data,
+ atom_wave_free);
+}
+
+AtomInfo *
+build_mov_aac_extension (AtomTRAK * trak, const GstBuffer * codec_data,
+ guint32 avg_bitrate, guint32 max_bitrate)
+{
+ AtomInfo *esds, *mp4a;
+ GstBuffer *buf;
+ guint32 tmp = 0;
+
+ /* Add ESDS atom to WAVE */
+ esds = build_esds_extension (trak, ESDS_OBJECT_TYPE_MPEG4_P3,
+ ESDS_STREAM_TYPE_AUDIO, codec_data, avg_bitrate, max_bitrate);
+
+ /* Add MP4A atom to the WAVE:
+ * not really in spec, but makes offset based players happy */
+ buf = GST_BUFFER_NEW_READONLY (&tmp, 4);
+ mp4a = build_codec_data_extension (FOURCC_mp4a, buf);
+ gst_buffer_unref (buf);
+
+ return build_mov_wave_extension (FOURCC_mp4a, mp4a, esds, TRUE);
+}
+
+AtomInfo *
+build_mov_alac_extension (const GstBuffer * codec_data)
+{
+ AtomInfo *alac;
+
+ alac = build_codec_data_extension (FOURCC_alac, codec_data);
+
+ return build_mov_wave_extension (FOURCC_alac, NULL, alac, TRUE);
+}
+
+AtomInfo *
+build_jp2x_extension (const GstBuffer * prefix)
+{
+ AtomData *atom_data;
+
+ if (!prefix) {
+ return NULL;
+ }
+
+ atom_data = atom_data_new_from_gst_buffer (FOURCC_jp2x, prefix);
+
+ return build_atom_info_wrapper ((Atom *) atom_data, atom_data_copy_data,
+ atom_data_free);
+}
+
+AtomInfo *
+build_jp2h_extension (gint width, gint height, const gchar * colorspace,
+ gint ncomp, const GValue * cmap_array, const GValue * cdef_array)
+{
+ AtomData *atom_data;
+ GstBuffer *buf;
+ guint8 cenum;
+ gint i;
+ gint idhr_size = 22;
+ gint colr_size = 15;
+ gint cmap_size = 0, cdef_size = 0;
+ gint cmap_array_size = 0;
+ gint cdef_array_size = 0;
+ GstByteWriter writer;
+
+ g_return_val_if_fail (cmap_array == NULL ||
+ GST_VALUE_HOLDS_ARRAY (cmap_array), NULL);
+ g_return_val_if_fail (cdef_array == NULL ||
+ GST_VALUE_HOLDS_ARRAY (cdef_array), NULL);
+
+ if (g_str_equal (colorspace, "sRGB")) {
+ cenum = 0x10;
+ if (ncomp == 0)
+ ncomp = 3;
+ } else if (g_str_equal (colorspace, "GRAY")) {
+ cenum = 0x11;
+ if (ncomp == 0)
+ ncomp = 1;
+ } else if (g_str_equal (colorspace, "sYUV")) {
+ cenum = 0x12;
+ if (ncomp == 0)
+ ncomp = 3;
+ } else
+ return NULL;
+
+ if (cmap_array) {
+ cmap_array_size = gst_value_array_get_size (cmap_array);
+ cmap_size = 8 + cmap_array_size * 4;
+ }
+ if (cdef_array) {
+ cdef_array_size = gst_value_array_get_size (cdef_array);
+ cdef_size = 8 + 2 + cdef_array_size * 6;
+ }
+
+ gst_byte_writer_init_with_size (&writer,
+ idhr_size + colr_size + cmap_size + cdef_size, TRUE);
+
+ /* ihdr = image header box */
+ gst_byte_writer_put_uint32_be_unchecked (&writer, 22);
+ gst_byte_writer_put_uint32_le_unchecked (&writer, FOURCC_ihdr);
+ gst_byte_writer_put_uint32_be_unchecked (&writer, height);
+ gst_byte_writer_put_uint32_be_unchecked (&writer, width);
+ gst_byte_writer_put_uint16_be_unchecked (&writer, ncomp);
+ /* 8 bits per component, unsigned */
+ gst_byte_writer_put_uint8_unchecked (&writer, 0x7);
+ /* compression type; reserved */
+ gst_byte_writer_put_uint8_unchecked (&writer, 0x7);
+ /* colour space (un)known */
+ gst_byte_writer_put_uint8_unchecked (&writer, 0x0);
+ /* intellectual property right (box present) */
+ gst_byte_writer_put_uint8_unchecked (&writer, 0x0);
+
+ /* colour specification box */
+ gst_byte_writer_put_uint32_be_unchecked (&writer, 15);
+ gst_byte_writer_put_uint32_le_unchecked (&writer, FOURCC_colr);
+
+ /* specification method: enumerated */
+ gst_byte_writer_put_uint8_unchecked (&writer, 0x1);
+ /* precedence; reserved */
+ gst_byte_writer_put_uint8_unchecked (&writer, 0x0);
+ /* approximation; reserved */
+ gst_byte_writer_put_uint8_unchecked (&writer, 0x0);
+ /* enumerated colourspace */
+ gst_byte_writer_put_uint32_be_unchecked (&writer, cenum);
+
+ if (cmap_array) {
+ gst_byte_writer_put_uint32_be_unchecked (&writer, cmap_size);
+ gst_byte_writer_put_uint32_le_unchecked (&writer, FOURCC_cmap);
+ for (i = 0; i < cmap_array_size; i++) {
+ const GValue *item;
+ gint value;
+ guint16 cmp;
+ guint8 mtyp;
+ guint8 pcol;
+ item = gst_value_array_get_value (cmap_array, i);
+ value = g_value_get_int (item);
+
+ /* value is '(mtyp << 24) | (pcol << 16) | cmp' */
+ cmp = value & 0xFFFF;
+ mtyp = value >> 24;
+ pcol = (value >> 16) & 0xFF;
+
+ if (mtyp == 1)
+ GST_WARNING ("MTYP of cmap atom signals Pallete Mapping, but we don't "
+ "handle Pallete mapping atoms yet");
+
+ gst_byte_writer_put_uint16_be_unchecked (&writer, cmp);
+ gst_byte_writer_put_uint8_unchecked (&writer, mtyp);
+ gst_byte_writer_put_uint8_unchecked (&writer, pcol);
+ }
+ }
+
+ if (cdef_array) {
+ gst_byte_writer_put_uint32_be_unchecked (&writer, cdef_size);
+ gst_byte_writer_put_uint32_le_unchecked (&writer, FOURCC_cdef);
+ gst_byte_writer_put_uint16_be_unchecked (&writer, cdef_array_size);
+ for (i = 0; i < cdef_array_size; i++) {
+ const GValue *item;
+ gint value;
+ item = gst_value_array_get_value (cdef_array, i);
+ value = g_value_get_int (item);
+
+ gst_byte_writer_put_uint16_be_unchecked (&writer, i);
+ if (value > 0) {
+ gst_byte_writer_put_uint16_be_unchecked (&writer, 0);
+ gst_byte_writer_put_uint16_be_unchecked (&writer, value);
+ } else if (value < 0) {
+ gst_byte_writer_put_uint16_be_unchecked (&writer, -value);
+ gst_byte_writer_put_uint16_be_unchecked (&writer, 0); /* TODO what here? */
+ } else {
+ gst_byte_writer_put_uint16_be_unchecked (&writer, 1);
+ gst_byte_writer_put_uint16_be_unchecked (&writer, 0);
+ }
+ }
+ }
+
+ g_assert (gst_byte_writer_get_remaining (&writer) == 0);
+ buf = gst_byte_writer_reset_and_get_buffer (&writer);
+
+ atom_data = atom_data_new_from_gst_buffer (FOURCC_jp2h, buf);
+ gst_buffer_unref (buf);
+
+ return build_atom_info_wrapper ((Atom *) atom_data, atom_data_copy_data,
+ atom_data_free);
+}
+
+AtomInfo *
+build_codec_data_extension (guint32 fourcc, const GstBuffer * codec_data)
+{
+ AtomData *data;
+ AtomInfo *result = NULL;
+
+ if (codec_data) {
+ data = atom_data_new_from_gst_buffer (fourcc, codec_data);
+ result = build_atom_info_wrapper ((Atom *) data, atom_data_copy_data,
+ atom_data_free);
+ }
+
+ return result;
+}
+
+AtomInfo *
+build_amr_extension (void)
+{
+ guint8 ext[9];
+ GstBuffer *buf;
+ AtomInfo *res;
+
+ /* vendor */
+ GST_WRITE_UINT32_LE (ext, 0);
+ /* decoder version */
+ GST_WRITE_UINT8 (ext + 4, 0);
+ /* mode set (all modes) */
+ GST_WRITE_UINT16_BE (ext + 5, 0x81FF);
+ /* mode change period (no restriction) */
+ GST_WRITE_UINT8 (ext + 7, 0);
+ /* frames per sample */
+ GST_WRITE_UINT8 (ext + 8, 1);
+
+ buf = GST_BUFFER_NEW_READONLY (ext, sizeof (ext));
+ res = build_codec_data_extension (FOURCC_damr, buf);
+ gst_buffer_unref (buf);
+ return res;
+}
+
+AtomInfo *
+build_h263_extension (void)
+{
+ guint8 ext[7];
+ GstBuffer *buf;
+ AtomInfo *res;
+
+ /* vendor */
+ GST_WRITE_UINT32_LE (ext, 0);
+ /* decoder version */
+ GST_WRITE_UINT8 (ext + 4, 0);
+ /* level / profile */
+ /* FIXME ? maybe ? obtain somewhere; baseline for now */
+ GST_WRITE_UINT8 (ext + 5, 10);
+ GST_WRITE_UINT8 (ext + 6, 0);
+
+ buf = GST_BUFFER_NEW_READONLY (ext, sizeof (ext));
+ res = build_codec_data_extension (FOURCC_d263, buf);
+ gst_buffer_unref (buf);
+ return res;
+}
+
+AtomInfo *
+build_gama_atom (gdouble gamma)
+{
+ AtomInfo *res;
+ guint32 gamma_fp;
+ GstBuffer *buf;
+
+ /* convert to uint32 from fixed point */
+ gamma_fp = (guint32) 65536 *gamma;
+
+ gamma_fp = GUINT32_TO_BE (gamma_fp);
+ buf = GST_BUFFER_NEW_READONLY (&gamma_fp, 4);
+ res = build_codec_data_extension (FOURCC_gama, buf);
+ gst_buffer_unref (buf);
+ return res;
+}
+
+AtomInfo *
+build_SMI_atom (const GstBuffer * seqh)
+{
+ AtomInfo *res;
+ GstBuffer *buf;
+ gsize size;
+ guint8 *data;
+
+ /* the seqh plus its size and fourcc */
+ size = gst_buffer_get_size ((GstBuffer *) seqh);
+ data = g_malloc (size + 8);
+
+ GST_WRITE_UINT32_LE (data, FOURCC_SEQH);
+ GST_WRITE_UINT32_BE (data + 4, size + 8);
+ gst_buffer_extract ((GstBuffer *) seqh, 0, data + 8, size);
+ buf = gst_buffer_new_wrapped (data, size + 8);
+ res = build_codec_data_extension (FOURCC_SMI_, buf);
+ gst_buffer_unref (buf);
+ return res;
+}
+
+static AtomInfo *
+build_ima_adpcm_atom (gint channels, gint rate, gint blocksize)
+{
+#define IMA_ADPCM_ATOM_SIZE 20
+ AtomData *atom_data;
+ guint8 *data;
+ guint32 fourcc;
+ gint samplesperblock;
+ gint bytespersec;
+
+ /* The FOURCC for WAV codecs in QT is 'ms' followed by the 16 bit wave codec
+ identifier. Note that the identifier here is big-endian, but when used
+ within the WAVE header (below), it's little endian. */
+ fourcc = MS_WAVE_FOURCC (0x11);
+
+ atom_data = atom_data_new (fourcc);
+ atom_data_alloc_mem (atom_data, IMA_ADPCM_ATOM_SIZE);
+ data = atom_data->data;
+
+ /* This atom's content is a WAVE header, including 2 bytes of extra data.
+ Note that all of this is little-endian, unlike most stuff in qt. */
+ /* 4 bytes header per channel (including 1 sample). Then 2 samples per byte
+ for the rest. Simplifies to this. */
+ samplesperblock = 2 * blocksize / channels - 7;
+ bytespersec = rate * blocksize / samplesperblock;
+ GST_WRITE_UINT16_LE (data, 0x11);
+ GST_WRITE_UINT16_LE (data + 2, channels);
+ GST_WRITE_UINT32_LE (data + 4, rate);
+ GST_WRITE_UINT32_LE (data + 8, bytespersec);
+ GST_WRITE_UINT16_LE (data + 12, blocksize);
+ GST_WRITE_UINT16_LE (data + 14, 4);
+ GST_WRITE_UINT16_LE (data + 16, 2); /* Two extra bytes */
+ GST_WRITE_UINT16_LE (data + 18, samplesperblock);
+
+ return build_atom_info_wrapper ((Atom *) atom_data, atom_data_copy_data,
+ atom_data_free);
+}
+
+AtomInfo *
+build_ima_adpcm_extension (gint channels, gint rate, gint blocksize)
+{
+ AtomWAVE *wave;
+ AtomFRMA *frma;
+ Atom *ext_atom;
+
+ /* Add WAVE atom */
+ wave = atom_wave_new ();
+
+ /* Prepend Terminator atom to the WAVE list first, so it ends up last */
+ ext_atom = (Atom *) atom_data_new (FOURCC_null);
+ wave->extension_atoms =
+ atom_info_list_prepend_atom (wave->extension_atoms, (Atom *) ext_atom,
+ (AtomCopyDataFunc) atom_data_copy_data, (AtomFreeFunc) atom_data_free);
+
+ /* Add wave ima adpcm atom to WAVE */
+ wave->extension_atoms = g_list_prepend (wave->extension_atoms,
+ build_ima_adpcm_atom (channels, rate, blocksize));
+
+ /* Add FRMA to the WAVE */
+ frma = atom_frma_new ();
+ frma->media_type = MS_WAVE_FOURCC (0x11);
+
+ wave->extension_atoms =
+ atom_info_list_prepend_atom (wave->extension_atoms, (Atom *) frma,
+ (AtomCopyDataFunc) atom_frma_copy_data, (AtomFreeFunc) atom_frma_free);
+
+ return build_atom_info_wrapper ((Atom *) wave, atom_wave_copy_data,
+ atom_wave_free);
+}
+
+AtomInfo *
+build_ac3_extension (guint8 fscod, guint8 bsid, guint8 bsmod, guint8 acmod,
+ guint8 lfe_on, guint8 bitrate_code)
+{
+ AtomData *atom_data = atom_data_new (FOURCC_dac3);
+ guint8 *data;
+
+ atom_data_alloc_mem (atom_data, 3);
+ data = atom_data->data;
+
+ /* Bits from the spec
+ * fscod 2
+ * bsid 5
+ * bsmod 3
+ * acmod 3
+ * lfeon 1
+ * bit_rate_code 5
+ * reserved 5
+ */
+
+ /* Some bit manipulation magic. Need bitwriter */
+ data[0] = (fscod << 6) | (bsid << 1) | ((bsmod >> 2) & 1);
+ data[1] =
+ ((bsmod & 0x3) << 6) | (acmod << 3) | ((lfe_on & 1) << 2) | ((bitrate_code
+ >> 3) & 0x3);
+ data[2] = ((bitrate_code & 0x7) << 5);
+
+ return build_atom_info_wrapper ((Atom *) atom_data, atom_data_copy_data,
+ atom_data_free);
+}
+
+AtomInfo *
+build_opus_extension (guint32 rate, guint8 channels, guint8 mapping_family,
+ guint8 stream_count, guint8 coupled_count, guint8 channel_mapping[256],
+ guint16 pre_skip, guint16 output_gain)
+{
+ AtomData *atom_data;
+ guint8 *data_block;
+ GstByteWriter bw;
+ gboolean hdl = TRUE;
+ guint data_block_len;
+
+ gst_byte_writer_init (&bw);
+ hdl &= gst_byte_writer_put_uint8 (&bw, 0x00); /* version number */
+ hdl &= gst_byte_writer_put_uint8 (&bw, channels);
+ hdl &= gst_byte_writer_put_uint16_le (&bw, pre_skip);
+ hdl &= gst_byte_writer_put_uint32_le (&bw, rate);
+ hdl &= gst_byte_writer_put_uint16_le (&bw, output_gain);
+ hdl &= gst_byte_writer_put_uint8 (&bw, mapping_family);
+ if (mapping_family > 0) {
+ hdl &= gst_byte_writer_put_uint8 (&bw, stream_count);
+ hdl &= gst_byte_writer_put_uint8 (&bw, coupled_count);
+ hdl &= gst_byte_writer_put_data (&bw, channel_mapping, channels);
+ }
+
+ if (!hdl) {
+ GST_WARNING ("Error creating header");
+ return NULL;
+ }
+
+ data_block_len = gst_byte_writer_get_size (&bw);
+ data_block = gst_byte_writer_reset_and_get_data (&bw);
+ atom_data = atom_data_new_from_data (FOURCC_dops, data_block, data_block_len);
+ g_free (data_block);
+
+ return build_atom_info_wrapper ((Atom *) atom_data, atom_data_copy_data,
+ atom_data_free);
+}
+
+AtomInfo *
+build_uuid_xmp_atom (GstBuffer * xmp_data)
+{
+ AtomUUID *uuid;
+ gsize size;
+ static const guint8 xmp_uuid[] = { 0xBE, 0x7A, 0xCF, 0xCB,
+ 0x97, 0xA9, 0x42, 0xE8,
+ 0x9C, 0x71, 0x99, 0x94,
+ 0x91, 0xE3, 0xAF, 0xAC
+ };
+
+ if (xmp_data == NULL)
+ return NULL;
+
+ uuid = atom_uuid_new ();
+ memcpy (uuid->uuid, xmp_uuid, 16);
+
+ size = gst_buffer_get_size (xmp_data);
+ uuid->data = g_malloc (size);
+ uuid->datalen = size;
+ gst_buffer_extract (xmp_data, 0, uuid->data, size);
+
+ return build_atom_info_wrapper ((Atom *) uuid, atom_uuid_copy_data,
+ atom_uuid_free);
+}
diff --git a/gst/isomp4/atoms.h b/gst/isomp4/atoms.h
new file mode 100644
index 0000000000..33d1525d40
--- /dev/null
+++ b/gst/isomp4/atoms.h
@@ -0,0 +1,1149 @@
+/* Quicktime muxer plugin for GStreamer
+ * Copyright (C) 2008-2010 Thiago Santos <thiagoss@embedded.ufcg.edu.br>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#ifndef __ATOMS_H__
+#define __ATOMS_H__
+
+#include <glib.h>
+#include <string.h>
+#include <gst/video/video.h>
+
+#include "descriptors.h"
+#include "properties.h"
+#include "fourcc.h"
+
+/* helper storage struct */
+#define ATOM_ARRAY(struct_type) \
+struct { \
+ guint size; \
+ guint len; \
+ struct_type *data; \
+}
+
+/* storage helpers */
+
+#define atom_array_init(array, reserve) \
+G_STMT_START { \
+ (array)->len = 0; \
+ (array)->size = reserve; \
+ (array)->data = g_malloc (sizeof (*(array)->data) * reserve); \
+} G_STMT_END
+
+#define atom_array_append(array, elmt, inc) \
+G_STMT_START { \
+ g_assert ((array)->data); \
+ g_assert (inc > 0); \
+ if (G_UNLIKELY ((array)->len == (array)->size)) { \
+ (array)->size += inc; \
+ (array)->data = \
+ g_realloc ((array)->data, sizeof (*((array)->data)) * (array)->size); \
+ } \
+ (array)->data[(array)->len] = elmt; \
+ (array)->len++; \
+} G_STMT_END
+
+#define atom_array_get_len(array) ((array)->len)
+#define atom_array_index(array, index) ((array)->data[index])
+
+#define atom_array_clear(array) \
+G_STMT_START { \
+ (array)->size = (array)->len = 0; \
+ g_free ((array)->data); \
+ (array)->data = NULL; \
+} G_STMT_END
+
+/* light-weight context that may influence header atom tree construction */
+typedef enum _AtomsTreeFlavor
+{
+ ATOMS_TREE_FLAVOR_MOV,
+ ATOMS_TREE_FLAVOR_ISOM,
+ ATOMS_TREE_FLAVOR_3GP,
+ ATOMS_TREE_FLAVOR_ISML
+} AtomsTreeFlavor;
+
+typedef struct _AtomsContext
+{
+ AtomsTreeFlavor flavor;
+ gboolean force_create_timecode_trak;
+} AtomsContext;
+
+AtomsContext* atoms_context_new (AtomsTreeFlavor flavor, gboolean force_create_timecode_trak);
+void atoms_context_free (AtomsContext *context);
+
+#define METADATA_DATA_FLAG 0x0
+#define METADATA_TEXT_FLAG 0x1
+
+/* atom defs and functions */
+
+typedef struct _AtomInfo AtomInfo;
+
+/*
+ * Used for storing time related values for some atoms.
+ */
+typedef struct _TimeInfo
+{
+ guint64 creation_time;
+ guint64 modification_time;
+ guint32 timescale;
+ guint64 duration;
+} TimeInfo;
+
+typedef struct _Atom
+{
+ guint32 size;
+ guint32 type;
+ guint64 extended_size;
+} Atom;
+
+typedef struct _AtomFull
+{
+ Atom header;
+
+ guint8 version;
+ guint8 flags[3];
+} AtomFull;
+
+/*
+ * Generic extension atom
+ */
+typedef struct _AtomData
+{
+ Atom header;
+
+ /* not written */
+ guint32 datalen;
+
+ guint8 *data;
+} AtomData;
+
+typedef struct _AtomUUID
+{
+ Atom header;
+
+ guint8 uuid[16];
+
+ /* not written */
+ guint32 datalen;
+
+ guint8 *data;
+} AtomUUID;
+
+typedef struct _AtomFTYP
+{
+ Atom header;
+ guint32 major_brand;
+ guint32 version;
+ guint32 *compatible_brands;
+
+ /* not written */
+ guint32 compatible_brands_size;
+} AtomFTYP;
+
+typedef struct _AtomMVHD
+{
+ AtomFull header;
+
+ /* version 0: 32 bits */
+ TimeInfo time_info;
+
+ guint32 prefered_rate; /* ISO: 0x00010000 */
+ guint16 volume; /* ISO: 0x0100 */
+ guint16 reserved3; /* ISO: 0x0 */
+ guint32 reserved4[2]; /* ISO: 0, 0 */
+ /* ISO: identity matrix =
+ * { 0x00010000, 0, 0, 0, 0x00010000, 0, 0, 0, 0x40000000 } */
+ guint32 matrix[9];
+
+ /* ISO: all 0 */
+ guint32 preview_time;
+ guint32 preview_duration;
+ guint32 poster_time;
+ guint32 selection_time;
+ guint32 selection_duration;
+ guint32 current_time;
+
+ guint32 next_track_id;
+} AtomMVHD;
+
+typedef struct _AtomTKHD
+{
+ AtomFull header;
+
+ /* version 0: 32 bits */
+ /* like the TimeInfo struct, but it has this track_ID inside */
+ guint64 creation_time;
+ guint64 modification_time;
+ guint32 track_ID;
+ guint32 reserved;
+ guint64 duration;
+
+ guint32 reserved2[2];
+ guint16 layer;
+ guint16 alternate_group;
+ guint16 volume;
+ guint16 reserved3;
+
+ /* ISO: identity matrix =
+ * { 0x00010000, 0, 0, 0, 0x00010000, 0, 0, 0, 0x40000000 } */
+ guint32 matrix[9];
+ guint32 width;
+ guint32 height;
+} AtomTKHD;
+
+typedef struct _AtomMDHD
+{
+ AtomFull header;
+
+ /* version 0: 32 bits */
+ TimeInfo time_info;
+
+ /* ISO: packed ISO-639-2/T language code (first bit must be 0) */
+ guint16 language_code;
+ /* ISO: 0 */
+ guint16 quality;
+} AtomMDHD;
+
+typedef struct _AtomHDLR
+{
+ AtomFull header;
+
+ /* ISO: 0 */
+ guint32 component_type;
+ guint32 handler_type;
+ guint32 manufacturer;
+ guint32 flags;
+ guint32 flags_mask;
+ gchar *name;
+
+ AtomsTreeFlavor flavor;
+} AtomHDLR;
+
+typedef struct _AtomVMHD
+{
+ AtomFull header; /* ISO: flags = 1 */
+
+ guint16 graphics_mode;
+ /* RGB */
+ guint16 opcolor[3];
+} AtomVMHD;
+
+typedef struct _AtomSMHD
+{
+ AtomFull header;
+
+ guint16 balance;
+ guint16 reserved;
+} AtomSMHD;
+
+typedef struct _AtomHMHD
+{
+ AtomFull header;
+
+ guint16 max_pdu_size;
+ guint16 avg_pdu_size;
+ guint32 max_bitrate;
+ guint32 avg_bitrate;
+ guint32 sliding_avg_bitrate;
+} AtomHMHD;
+
+typedef struct _AtomTCMI
+{
+ AtomFull header;
+
+ guint16 text_font;
+ guint16 text_face;
+ guint16 text_size;
+ guint16 text_color[3];
+ guint16 bg_color[3];
+ gchar *font_name;
+} AtomTCMI;
+
+typedef struct _AtomTMCD
+{
+ Atom header;
+
+ AtomTCMI tcmi;
+} AtomTMCD;
+
+typedef struct _AtomGMIN
+{
+ AtomFull header;
+
+ guint16 graphics_mode;
+ guint16 opcolor[3];
+ guint8 balance;
+ guint8 reserved;
+
+} AtomGMIN;
+
+typedef struct _AtomGMHD
+{
+ Atom header;
+
+ /* Only gmin is required in a gmhd atom
+ * The other fields are optional */
+ AtomGMIN gmin;
+ AtomTMCD *tmcd;
+
+} AtomGMHD;
+
+typedef struct _AtomNMHD
+{
+ Atom header;
+ guint32 flags;
+} AtomNMHD;
+
+typedef struct _AtomURL
+{
+ AtomFull header;
+
+ gchar *location;
+} AtomURL;
+
+typedef struct _AtomDREF
+{
+ AtomFull header;
+
+ GList *entries;
+} AtomDREF;
+
+typedef struct _AtomDINF
+{
+ Atom header;
+
+ AtomDREF dref;
+} AtomDINF;
+
+typedef struct _STTSEntry
+{
+ guint32 sample_count;
+ gint32 sample_delta;
+} STTSEntry;
+
+typedef struct _AtomSTTS
+{
+ AtomFull header;
+
+ ATOM_ARRAY (STTSEntry) entries;
+} AtomSTTS;
+
+typedef struct _AtomSTSS
+{
+ AtomFull header;
+
+ ATOM_ARRAY (guint32) entries;
+} AtomSTSS;
+
+typedef struct _AtomESDS
+{
+ AtomFull header;
+
+ ESDescriptor es;
+} AtomESDS;
+
+typedef struct _AtomFRMA
+{
+ Atom header;
+
+ guint32 media_type;
+} AtomFRMA;
+
+typedef enum _SampleEntryKind
+{
+ UNKNOWN,
+ AUDIO,
+ VIDEO,
+ SUBTITLE,
+ TIMECODE,
+ CLOSEDCAPTION
+} SampleEntryKind;
+
+typedef struct _SampleTableEntry
+{
+ Atom header;
+
+ guint8 reserved[6];
+ guint16 data_reference_index;
+
+ /* type of entry */
+ SampleEntryKind kind;
+} SampleTableEntry;
+
+typedef struct _AtomHintSampleEntry
+{
+ SampleTableEntry se;
+ guint32 size;
+ guint8 *data;
+} AtomHintSampleEntry;
+
+typedef struct _SampleTableEntryMP4V
+{
+ SampleTableEntry se;
+
+ guint16 version;
+ guint16 revision_level;
+
+ guint32 vendor; /* fourcc code */
+ guint32 temporal_quality;
+ guint32 spatial_quality;
+
+ guint16 width;
+ guint16 height;
+
+ guint32 horizontal_resolution;
+ guint32 vertical_resolution;
+ guint32 datasize;
+
+ guint16 frame_count; /* usually 1 */
+
+ guint8 compressor[32]; /* pascal string, i.e. first byte = length */
+
+ guint16 depth;
+ guint16 color_table_id;
+
+ /* (optional) list of AtomInfo */
+ GList *extension_atoms;
+} SampleTableEntryMP4V;
+
+typedef struct _SampleTableEntryMP4A
+{
+ SampleTableEntry se;
+
+ guint16 version;
+ guint16 revision_level;
+ guint32 vendor;
+
+ guint16 channels;
+ guint16 sample_size;
+ guint16 compression_id;
+ guint16 packet_size;
+
+ guint32 sample_rate; /* fixed point 16.16 */
+
+ guint32 samples_per_packet;
+ guint32 bytes_per_packet;
+ guint32 bytes_per_frame;
+ guint32 bytes_per_sample;
+
+ /* (optional) list of AtomInfo */
+ GList *extension_atoms;
+} SampleTableEntryMP4A;
+
+typedef struct _AtomNAME
+{
+ Atom header;
+
+ guint8 language_code;
+ gchar *name;
+} AtomNAME;
+
+typedef struct _SampleTableEntryTMCD
+{
+ SampleTableEntry se;
+
+ guint32 tc_flags;
+ guint32 timescale;
+ guint32 frame_duration;
+ guint8 n_frames;
+
+ AtomNAME name;
+
+} SampleTableEntryTMCD;
+
+typedef struct _SampleTableEntryTX3G
+{
+ SampleTableEntry se;
+
+ guint32 display_flags;
+ guint64 default_text_box;
+ guint16 font_id;
+ guint8 font_face; /* bold=0x1, italic=0x2, underline=0x4 */
+ guint8 font_size; /* should always be 0.05 multiplied by the video track header height */
+ guint32 foreground_color_rgba;
+
+} SampleTableEntryTX3G;
+
+typedef struct _AtomSTSD
+{
+ AtomFull header;
+
+ guint n_entries;
+ /* list of subclasses of SampleTableEntry */
+ GList *entries;
+} AtomSTSD;
+
+typedef struct _AtomSTSZ
+{
+ AtomFull header;
+
+ guint32 sample_size;
+
+ /* need the size here because when sample_size is constant,
+ * the list is empty */
+ guint32 table_size;
+ ATOM_ARRAY (guint32) entries;
+} AtomSTSZ;
+
+typedef struct _STSCEntry
+{
+ guint32 first_chunk;
+ guint32 samples_per_chunk;
+ guint32 sample_description_index;
+} STSCEntry;
+
+typedef struct _AtomSTSC
+{
+ AtomFull header;
+
+ ATOM_ARRAY (STSCEntry) entries;
+} AtomSTSC;
+
+/* FIXME: this can support multiple tracks */
+typedef struct _AtomTREF
+{
+ Atom header;
+
+ guint32 reftype;
+ ATOM_ARRAY (guint32) entries;
+} AtomTREF;
+
+/*
+ * used for both STCO and CO64
+ * The table will be written out as STCO automatically when
+ * the offsets being written will fit in a 32-bit table,
+ * otherwise it is written as CO64
+ */
+typedef struct _AtomSTCO64
+{
+ AtomFull header;
+ /* Global offset to add to entries when serialising */
+ guint32 chunk_offset;
+ /* Maximum offset stored in the table */
+ guint64 max_offset;
+ ATOM_ARRAY (guint64) entries;
+} AtomSTCO64;
+
+typedef struct _CTTSEntry
+{
+ guint32 samplecount;
+ guint32 sampleoffset;
+} CTTSEntry;
+
+typedef struct _AtomCTTS
+{
+ AtomFull header;
+
+ /* also entry count here */
+ ATOM_ARRAY (CTTSEntry) entries;
+ gboolean do_pts;
+} AtomCTTS;
+
+typedef struct _AtomSVMI
+{
+ AtomFull header;
+
+ guint8 stereoscopic_composition_type;
+ gboolean is_left_first;
+} AtomSVMI;
+
+typedef struct _AtomSTBL
+{
+ Atom header;
+
+ AtomSTSD stsd;
+ AtomSTTS stts;
+ AtomSTSS stss;
+ AtomSTSC stsc;
+ AtomSTSZ stsz;
+ /* NULL if not present */
+ AtomCTTS *ctts;
+ /* NULL if not present */
+ AtomSVMI *svmi;
+
+ AtomSTCO64 stco64;
+} AtomSTBL;
+
+typedef struct _AtomMINF
+{
+ Atom header;
+
+ /* only (exactly) one of those must be present */
+ AtomVMHD *vmhd;
+ AtomSMHD *smhd;
+ AtomHMHD *hmhd;
+ AtomGMHD *gmhd;
+ AtomNMHD *nmhd;
+
+ AtomHDLR *hdlr;
+ AtomDINF dinf;
+ AtomSTBL stbl;
+} AtomMINF;
+
+typedef struct _EditListEntry
+{
+ /* duration in movie's timescale */
+ guint32 duration;
+ /* start time in media's timescale, -1 for empty */
+ guint32 media_time;
+ guint32 media_rate; /* fixed point 32 bit */
+} EditListEntry;
+
+typedef struct _AtomELST
+{
+ AtomFull header;
+
+ /* number of entries is implicit */
+ GSList *entries;
+} AtomELST;
+
+typedef struct _AtomEDTS
+{
+ Atom header;
+ AtomELST elst;
+} AtomEDTS;
+
+typedef struct _AtomMDIA
+{
+ Atom header;
+
+ AtomMDHD mdhd;
+ AtomHDLR hdlr;
+ AtomMINF minf;
+} AtomMDIA;
+
+typedef struct _AtomILST
+{
+ Atom header;
+
+ /* list of AtomInfo */
+ GList* entries;
+} AtomILST;
+
+typedef struct _AtomTagData
+{
+ AtomFull header;
+ guint32 reserved;
+
+ guint32 datalen;
+ guint8* data;
+} AtomTagData;
+
+typedef struct _AtomTag
+{
+ Atom header;
+
+ AtomTagData data;
+} AtomTag;
+
+typedef struct _AtomMETA
+{
+ AtomFull header;
+ AtomHDLR hdlr;
+ AtomILST *ilst;
+} AtomMETA;
+
+typedef struct _AtomUDTA
+{
+ Atom header;
+
+ /* list of AtomInfo */
+ GList* entries;
+ /* or list is further down */
+ AtomMETA *meta;
+
+ AtomsContext *context;
+} AtomUDTA;
+
+enum TrFlags
+{
+ TR_DATA_OFFSET = 0x01, /* data-offset-present */
+ TR_FIRST_SAMPLE_FLAGS = 0x04, /* first-sample-flags-present */
+ TR_SAMPLE_DURATION = 0x0100, /* sample-duration-present */
+ TR_SAMPLE_SIZE = 0x0200, /* sample-size-present */
+ TR_SAMPLE_FLAGS = 0x0400, /* sample-flags-present */
+ TR_COMPOSITION_TIME_OFFSETS = 0x0800 /* sample-composition-time-offsets-presents */
+};
+
+enum TfFlags
+{
+ TF_BASE_DATA_OFFSET = 0x01, /* base-data-offset-present */
+ TF_SAMPLE_DESCRIPTION_INDEX = 0x02, /* sample-description-index-present */
+ TF_DEFAULT_SAMPLE_DURATION = 0x08, /* default-sample-duration-present */
+ TF_DEFAULT_SAMPLE_SIZE = 0x010, /* default-sample-size-present */
+ TF_DEFAULT_SAMPLE_FLAGS = 0x020, /* default-sample-flags-present */
+ TF_DURATION_IS_EMPTY = 0x010000, /* sample-composition-time-offsets-presents */
+ TF_DEFAULT_BASE_IS_MOOF = 0x020000 /* default-base-is-moof */
+};
+
+/* Timecode flags */
+enum TcFlags
+{
+ TC_DROP_FRAME = 0x0001, /* Drop-frame timecode */
+ TC_24H_MAX = 0x0002, /* Whether the timecode wraps after 24 hours */
+ TC_NEGATIVE_OK = 0x0004, /* Whether negative time values are OK */
+ TC_COUNTER = 0x0008 /* Whether the time value corresponds to a tape counter value */
+};
+
+typedef struct _AtomTRAK
+{
+ Atom header;
+
+ AtomTKHD tkhd;
+ AtomInfo *tapt;
+ AtomEDTS *edts;
+ AtomMDIA mdia;
+ AtomUDTA udta;
+ AtomTREF *tref;
+
+ /* some helper info for structural conformity checks */
+ gboolean is_video;
+ gboolean is_h264;
+
+ AtomsContext *context;
+} AtomTRAK;
+
+typedef struct _AtomTREX
+{
+ AtomFull header;
+
+ guint32 track_ID;
+ guint32 default_sample_description_index;
+ guint32 default_sample_duration;
+ guint32 default_sample_size;
+ guint32 default_sample_flags;
+} AtomTREX;
+
+typedef struct _AtomMEHD
+{
+ AtomFull header;
+
+ guint64 fragment_duration;
+} AtomMEHD;
+
+
+typedef struct _AtomMVEX
+{
+ Atom header;
+
+ AtomMEHD mehd;
+
+ /* list of AtomTREX */
+ GList *trexs;
+} AtomMVEX;
+
+typedef struct _AtomMFHD
+{
+ AtomFull header;
+
+ guint32 sequence_number;
+} AtomMFHD;
+
+typedef struct _AtomTFHD
+{
+ AtomFull header;
+
+ guint32 track_ID;
+ guint64 base_data_offset;
+ guint32 sample_description_index;
+ guint32 default_sample_duration;
+ guint32 default_sample_size;
+ guint32 default_sample_flags;
+} AtomTFHD;
+
+typedef struct _AtomTFDT
+{
+ AtomFull header;
+
+ guint64 base_media_decode_time;
+} AtomTFDT;
+
+typedef struct _TRUNSampleEntry
+{
+ guint32 sample_duration;
+ guint32 sample_size;
+ guint32 sample_flags;
+ guint32 sample_composition_time_offset;
+} TRUNSampleEntry;
+
+typedef struct _AtomTRUN
+{
+ AtomFull header;
+
+ guint32 sample_count;
+ gint32 data_offset;
+ guint32 first_sample_flags;
+
+ /* array of fields */
+ ATOM_ARRAY (TRUNSampleEntry) entries;
+} AtomTRUN;
+
+typedef struct _AtomSDTP
+{
+ AtomFull header;
+
+ /* not serialized */
+ guint32 sample_count;
+
+ /* array of fields */
+ ATOM_ARRAY (guint8) entries;
+} AtomSDTP;
+
+typedef struct _AtomTRAF
+{
+ Atom header;
+
+ AtomTFHD tfhd;
+
+ AtomTFDT tfdt;
+
+ /* list of AtomTRUN. */
+ GList *truns;
+ /* list of AtomSDTP */
+ GList *sdtps;
+} AtomTRAF;
+
+typedef struct _AtomMOOF
+{
+ Atom header;
+
+ AtomMFHD mfhd;
+
+ /* list of AtomTRAF */
+ GList *trafs;
+
+ guint64 traf_offset;
+} AtomMOOF;
+
+
+typedef struct _AtomMOOV
+{
+ /* style */
+ AtomsContext context;
+
+ Atom header;
+
+ AtomMVHD mvhd;
+ AtomMVEX mvex;
+
+ /* list of AtomTRAK */
+ GList *traks;
+ AtomUDTA udta;
+
+ gboolean fragmented;
+ guint32 chunks_offset;
+} AtomMOOV;
+
+typedef struct _AtomWAVE
+{
+ Atom header;
+
+ /* list of AtomInfo */
+ GList *extension_atoms;
+} AtomWAVE;
+
+typedef struct _TFRAEntry
+{
+ guint64 time;
+ guint64 moof_offset;
+ guint32 traf_number;
+ guint32 trun_number;
+ guint32 sample_number;
+} TFRAEntry;
+
+typedef struct _AtomTFRA
+{
+ AtomFull header;
+
+ guint32 track_ID;
+ guint32 lengths;
+ /* array of entries */
+ ATOM_ARRAY (TFRAEntry) entries;
+} AtomTFRA;
+
+typedef struct _AtomMFRA
+{
+ Atom header;
+
+ /* list of tfra */
+ GList *tfras;
+} AtomMFRA;
+
+/*
+ * Function to serialize an atom
+ */
+typedef guint64 (*AtomCopyDataFunc) (Atom *atom, guint8 **buffer, guint64 *size, guint64 *offset);
+
+/*
+ * Releases memory allocated by an atom
+ */
+typedef guint64 (*AtomFreeFunc) (Atom *atom);
+
+/*
+ * Some atoms might have many optional different kinds of child atoms, so this
+ * is useful for enabling generic handling of any atom.
+ * All we need are the two functions (copying it to an array
+ * for serialization and the memory releasing function).
+ */
+struct _AtomInfo
+{
+ Atom *atom;
+ AtomCopyDataFunc copy_data_func;
+ AtomFreeFunc free_func;
+};
+
+guint64 atoms_get_current_qt_time (void);
+
+guint64 atom_copy_data (Atom *atom, guint8 **buffer,
+ guint64 *size, guint64* offset);
+
+AtomFTYP* atom_ftyp_new (AtomsContext *context, guint32 major,
+ guint32 version, GList *brands);
+guint64 atom_ftyp_copy_data (AtomFTYP *ftyp, guint8 **buffer,
+ guint64 *size, guint64 *offset);
+void atom_ftyp_free (AtomFTYP *ftyp);
+
+AtomTRAK* atom_trak_new (AtomsContext *context);
+void atom_trak_add_samples (AtomTRAK * trak, guint32 nsamples, guint32 delta,
+ guint32 size, guint64 chunk_offset, gboolean sync,
+ gint64 pts_offset);
+void atom_trak_set_elst_entry (AtomTRAK * trak, gint index, guint32 duration,
+ guint32 media_time, guint32 rate);
+void atom_trak_edts_clear (AtomTRAK * trak);
+guint32 atom_trak_get_timescale (AtomTRAK *trak);
+guint32 atom_trak_get_id (AtomTRAK * trak);
+void atom_trak_set_constant_size_samples (AtomTRAK * trak, guint32 sample_size);
+void atom_stbl_add_samples (AtomSTBL * stbl, guint32 nsamples,
+ guint32 delta, guint32 size,
+ guint64 chunk_offset, gboolean sync,
+ gint64 pts_offset);
+void atom_stsc_add_new_entry (AtomSTSC * stsc,
+ guint32 first_chunk, guint32 nsamples, guint32 sample_description_index);
+
+AtomMOOV* atom_moov_new (AtomsContext *context);
+void atom_moov_free (AtomMOOV *moov);
+guint64 atom_moov_copy_data (AtomMOOV *atom, guint8 **buffer, guint64 *size, guint64* offset);
+void atom_moov_update_timescale (AtomMOOV *moov, guint32 timescale);
+void atom_moov_update_duration (AtomMOOV *moov);
+void atom_moov_set_fragmented (AtomMOOV *moov, gboolean fragmented);
+void atom_moov_chunks_set_offset (AtomMOOV *moov, guint32 offset);
+void atom_moov_add_trak (AtomMOOV *moov, AtomTRAK *trak);
+guint atom_moov_get_trak_count (AtomMOOV *moov);
+
+guint atom_framerate_to_timescale (gint fps_n, gint fps_d);
+
+guint64 atom_mvhd_copy_data (AtomMVHD * atom, guint8 ** buffer,
+ guint64 * size, guint64 * offset);
+void atom_stco64_chunks_set_offset (AtomSTCO64 * stco64, guint32 offset);
+guint64 atom_trak_copy_data (AtomTRAK * atom, guint8 ** buffer,
+ guint64 * size, guint64 * offset);
+void atom_stbl_clear (AtomSTBL * stbl);
+void atom_stbl_init (AtomSTBL * stbl);
+guint64 atom_stss_copy_data (AtomSTSS *atom, guint8 **buffer,
+ guint64 *size, guint64* offset);
+guint64 atom_stts_copy_data (AtomSTTS *atom, guint8 **buffer,
+ guint64 *size, guint64* offset);
+guint64 atom_stsc_copy_data (AtomSTSC *atom, guint8 **buffer,
+ guint64 *size, guint64* offset);
+guint64 atom_stsz_copy_data (AtomSTSZ *atom, guint8 **buffer,
+ guint64 *size, guint64* offset);
+guint64 atom_ctts_copy_data (AtomCTTS *atom, guint8 **buffer,
+ guint64 *size, guint64* offset);
+guint64 atom_svmi_copy_data (AtomSVMI *atom, guint8 **buffer,
+ guint64 *size, guint64* offset);
+AtomSVMI * atom_svmi_new (guint8 stereoscopic_composition_type, gboolean is_left_first);
+guint64 atom_stco64_copy_data (AtomSTCO64 *atom, guint8 **buffer,
+ guint64 *size, guint64* offset);
+AtomMOOF* atom_moof_new (AtomsContext *context, guint32 sequence_number);
+void atom_moof_free (AtomMOOF *moof);
+guint64 atom_moof_copy_data (AtomMOOF *moof, guint8 **buffer, guint64 *size, guint64* offset);
+void atom_moof_set_base_offset (AtomMOOF * moof, guint64 offset);
+AtomTRAF * atom_traf_new (AtomsContext * context, guint32 track_ID);
+void atom_traf_free (AtomTRAF * traf);
+void atom_traf_set_base_decode_time (AtomTRAF * traf, guint64 base_decode_time);
+void atom_traf_add_samples (AtomTRAF * traf, guint32 nsamples, guint32 delta,
+ guint32 size, gint32 data_offset, gboolean sync,
+ gint64 pts_offset, gboolean sdtp_sync);
+guint32 atom_traf_get_sample_num (AtomTRAF * traf);
+void atom_trun_set_offset (AtomTRUN * trun, gint32 offset);
+void atom_moof_add_traf (AtomMOOF *moof, AtomTRAF *traf);
+
+AtomMFRA* atom_mfra_new (AtomsContext *context);
+void atom_mfra_free (AtomMFRA *mfra);
+AtomTFRA* atom_tfra_new (AtomsContext *context, guint32 track_ID);
+void atom_tfra_add_entry (AtomTFRA *tfra, guint64 dts, guint32 sample_num);
+void atom_tfra_update_offset (AtomTFRA * tfra, guint64 offset);
+void atom_mfra_add_tfra (AtomMFRA *mfra, AtomTFRA *tfra);
+guint64 atom_mfra_copy_data (AtomMFRA *mfra, guint8 **buffer, guint64 *size, guint64* offset);
+
+
+/* media sample description related helpers */
+typedef struct
+{
+ guint16 version;
+ guint32 fourcc;
+ guint width;
+ guint height;
+ guint depth;
+ guint frame_count;
+ gint color_table_id;
+ guint par_n;
+ guint par_d;
+
+ GstBuffer *codec_data;
+} VisualSampleEntry;
+
+typedef struct
+{
+ guint32 fourcc;
+ guint version;
+ gint compression_id;
+ guint sample_rate;
+ guint channels;
+ guint sample_size;
+ guint bytes_per_packet;
+ guint samples_per_packet;
+ guint bytes_per_sample;
+ guint bytes_per_frame;
+
+ GstBuffer *codec_data;
+} AudioSampleEntry;
+
+typedef struct
+{
+ guint32 fourcc;
+
+ guint8 font_face; /* bold=0x1, italic=0x2, underline=0x4 */
+ guint8 font_size;
+ guint32 foreground_color_rgba;
+} SubtitleSampleEntry;
+
+void subtitle_sample_entry_init (SubtitleSampleEntry * entry);
+
+SampleTableEntryMP4A * atom_trak_set_audio_type (AtomTRAK * trak, AtomsContext * context,
+ AudioSampleEntry * entry, guint32 scale,
+ AtomInfo * ext, gint sample_size);
+
+SampleTableEntryMP4V * atom_trak_set_video_type (AtomTRAK * trak, AtomsContext * context,
+ VisualSampleEntry * entry, guint32 rate,
+ GList * ext_atoms_list);
+
+SampleTableEntryTX3G * atom_trak_set_subtitle_type (AtomTRAK * trak, AtomsContext * context,
+ SubtitleSampleEntry * entry);
+
+SampleTableEntryTMCD *
+atom_trak_set_timecode_type (AtomTRAK * trak, AtomsContext * context, guint trak_timescale, GstVideoTimeCode * tc);
+
+SampleTableEntry * atom_trak_set_caption_type (AtomTRAK *trak, AtomsContext *context,
+ guint32 trak_timescale, guint32 caption_type);
+
+void atom_trak_update_bitrates (AtomTRAK * trak, guint32 avg_bitrate,
+ guint32 max_bitrate);
+
+void atom_trak_tx3g_update_dimension (AtomTRAK * trak, guint32 width,
+ guint32 height);
+
+void sample_table_entry_add_ext_atom (SampleTableEntry * ste, AtomInfo * ext);
+
+AtomInfo * build_codec_data_extension (guint32 fourcc, const GstBuffer * codec_data);
+AtomInfo * build_mov_aac_extension (AtomTRAK * trak, const GstBuffer * codec_data,
+ guint32 avg_bitrate, guint32 max_bitrate);
+AtomInfo * build_mov_alac_extension (const GstBuffer * codec_data);
+AtomInfo * build_esds_extension (AtomTRAK * trak, guint8 object_type,
+ guint8 stream_type, const GstBuffer * codec_data,
+ guint32 avg_bitrate, guint32 max_bitrate);
+AtomInfo * build_btrt_extension (guint32 buffer_size_db, guint32 avg_bitrate,
+ guint32 max_bitrate);
+AtomInfo * build_jp2h_extension (gint width, gint height, const gchar *colorspace,
+ gint ncomp, const GValue * cmap_array,
+ const GValue * cdef_array);
+
+AtomInfo * build_jp2x_extension (const GstBuffer * prefix);
+AtomInfo * build_fiel_extension (GstVideoInterlaceMode mode, GstVideoFieldOrder order);
+AtomInfo * build_colr_extension (const GstVideoColorimetry *colorimetry, gboolean is_mp4);
+AtomInfo * build_clap_extension (gint width_n, gint width_d, gint height_n, gint height_d, gint h_off_n, gint h_off_d, gint v_off_n, gint v_off_d);
+AtomInfo * build_tapt_extension (gint clef_width, gint clef_height, gint prof_width, gint prof_height, gint enof_width, gint enof_height);
+
+
+AtomInfo * build_ac3_extension (guint8 fscod, guint8 bsid,
+ guint8 bsmod, guint8 acmod,
+ guint8 lfe_on, guint8 bitrate_code);
+AtomInfo * build_opus_extension (guint32 rate, guint8 channels, guint8 mapping_family,
+ guint8 stream_count, guint8 coupled_count,
+ guint8 channel_mapping[256], guint16 pre_skip,
+ guint16 output_gain);
+
+AtomInfo * build_amr_extension (void);
+AtomInfo * build_h263_extension (void);
+AtomInfo * build_gama_atom (gdouble gamma);
+AtomInfo * build_SMI_atom (const GstBuffer *seqh);
+AtomInfo * build_ima_adpcm_extension (gint channels, gint rate,
+ gint blocksize);
+AtomInfo * build_uuid_xmp_atom (GstBuffer * xmp);
+
+
+/*
+ * Meta tags functions
+ */
+void atom_udta_clear_tags (AtomUDTA *udta);
+void atom_udta_add_str_tag (AtomUDTA *udta, guint32 fourcc, const gchar *value);
+void atom_udta_add_uint_tag (AtomUDTA *udta, guint32 fourcc, guint32 flags,
+ guint32 value);
+void atom_udta_add_tag (AtomUDTA *udta, guint32 fourcc, guint32 flags,
+ const guint8 * data, guint size);
+void atom_udta_add_blob_tag (AtomUDTA *udta, guint8 *data, guint size);
+
+void atom_udta_add_3gp_str_tag (AtomUDTA *udta, guint32 fourcc, const gchar * value);
+void atom_udta_add_3gp_uint_tag (AtomUDTA *udta, guint32 fourcc, guint16 value);
+void atom_udta_add_3gp_str_int_tag (AtomUDTA *udta, guint32 fourcc, const gchar * value,
+ gint16 ivalue);
+void atom_udta_add_3gp_tag (AtomUDTA *udta, guint32 fourcc, guint8 * data,
+ guint size);
+
+void atom_udta_add_xmp_tags (AtomUDTA *udta, GstBuffer * xmp);
+
+AtomTREF * atom_tref_new (guint32 reftype);
+void atom_tref_add_entry (AtomTREF * tref, guint32 sample);
+
+#define GST_QT_MUX_DEFAULT_TAG_LANGUAGE "und" /* undefined/unknown */
+guint16 language_code (const char * lang);
+
+#endif /* __ATOMS_H__ */
diff --git a/gst/isomp4/atomsrecovery.c b/gst/isomp4/atomsrecovery.c
new file mode 100644
index 0000000000..edc443487b
--- /dev/null
+++ b/gst/isomp4/atomsrecovery.c
@@ -0,0 +1,1206 @@
+/* Quicktime muxer plugin for GStreamer
+ * Copyright (C) 2010 Thiago Santos <thiago.sousa.santos@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+/*
+ * This module contains functions for serializing partial information from
+ * a mux in progress (by qtmux elements). This enables reconstruction of the
+ * moov box if a crash happens and thus recovering the movie file.
+ *
+ * Usage:
+ * 1) pipeline: ...yourelements ! qtmux moov-recovery-file=path.mrf ! \
+ * filesink location=moovie.mov
+ *
+ * 2) CRASH!
+ *
+ * 3) gst-launch-1.0 qtmoovrecover recovery-input=path.mrf broken-input=moovie.mov \
+ fixed-output=recovered.mov
+ *
+ * 4) (Hopefully) enjoy recovered.mov.
+ *
+ * --- Recovery file layout ---
+ * 1) Version (a guint16)
+ * 2) Prefix atom (if present)
+ * 3) ftyp atom
+ * 4) MVHD atom (without timescale/duration set)
+ * 5) moovie timescale
+ * 6) number of traks
+ * 7) list of trak atoms (stbl data is ignored, except for the stsd atom)
+ * 8) Buffers metadata (metadata that is relevant to the container)
+ * Buffers metadata are stored in the order they are added to the mdat,
+ * each entre has a fixed size and is stored in BE. booleans are stored
+ * as a single byte where 0 means false, otherwise is true.
+ * Metadata:
+ * - guint32 track_id;
+ * - guint32 nsamples;
+ * - guint32 delta;
+ * - guint32 size;
+ * - guint64 chunk_offset;
+ * - gboolean sync;
+ * - gboolean do_pts;
+ * - guint64 pts_offset; (always present, ignored if do_pts is false)
+ *
+ * The mdat file might contain ftyp and then mdat, in case this is the faststart
+ * temporary file there is no ftyp and no mdat header, only the buffers data.
+ *
+ * Notes about recovery file layout: We still don't store tags nor EDTS data.
+ *
+ * IMPORTANT: this is still at a experimental state.
+ */
+
+#include "atomsrecovery.h"
+
+#define MAX_CHUNK_SIZE (1024 * 1024) /* 1MB */
+
+#define ATOMS_RECOV_OUTPUT_WRITE_ERROR(err) \
+ g_set_error (err, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_FILE, \
+ "Failed to write to output file: %s", g_strerror (errno))
+
+static gboolean
+atoms_recov_write_version (FILE * f)
+{
+ guint8 data[2];
+ GST_WRITE_UINT16_BE (data, ATOMS_RECOV_FILE_VERSION);
+ return fwrite (data, 2, 1, f) == 1;
+}
+
+static gboolean
+atoms_recov_write_ftyp_info (FILE * f, AtomFTYP * ftyp, GstBuffer * prefix)
+{
+ guint8 *data = NULL;
+ guint64 offset = 0;
+ guint64 size = 0;
+
+ if (prefix) {
+ GstMapInfo map;
+
+ if (!gst_buffer_map (prefix, &map, GST_MAP_READ)) {
+ return FALSE;
+ }
+ if (fwrite (map.data, 1, map.size, f) != map.size) {
+ gst_buffer_unmap (prefix, &map);
+ return FALSE;
+ }
+ gst_buffer_unmap (prefix, &map);
+ }
+ if (!atom_ftyp_copy_data (ftyp, &data, &size, &offset)) {
+ return FALSE;
+ }
+ if (fwrite (data, 1, offset, f) != offset) {
+ g_free (data);
+ return FALSE;
+ }
+ g_free (data);
+ return TRUE;
+}
+
+/*
+ * Writes important info on the 'moov' atom (non-trak related)
+ * to be able to recover the moov structure after a crash.
+ *
+ * Currently, it writes the MVHD atom.
+ */
+static gboolean
+atoms_recov_write_moov_info (FILE * f, AtomMOOV * moov)
+{
+ guint8 *data;
+ guint64 size;
+ guint64 offset = 0;
+ guint64 atom_size = 0;
+ gint writen = 0;
+
+ /* likely enough */
+ size = 256;
+ data = g_malloc (size);
+ atom_size = atom_mvhd_copy_data (&moov->mvhd, &data, &size, &offset);
+ if (atom_size > 0)
+ writen = fwrite (data, 1, atom_size, f);
+ g_free (data);
+ return atom_size > 0 && writen == atom_size;
+}
+
+/*
+ * Writes the number of traks to the file.
+ * This simply writes a guint32 in BE.
+ */
+static gboolean
+atoms_recov_write_traks_number (FILE * f, guint32 traks)
+{
+ guint8 data[4];
+ GST_WRITE_UINT32_BE (data, traks);
+ return fwrite (data, 4, 1, f) == 1;
+}
+
+/*
+ * Writes the moov's timescale to the file
+ * This simply writes a guint32 in BE.
+ */
+static gboolean
+atoms_recov_write_moov_timescale (FILE * f, guint32 timescale)
+{
+ guint8 data[4];
+ GST_WRITE_UINT32_BE (data, timescale);
+ return fwrite (data, 4, 1, f) == 1;
+}
+
+/*
+ * Writes the trak atom to the file.
+ */
+gboolean
+atoms_recov_write_trak_info (FILE * f, AtomTRAK * trak)
+{
+ guint8 *data;
+ guint64 size;
+ guint64 offset = 0;
+ guint64 atom_size = 0;
+ gint writen = 0;
+
+ /* buffer is realloced to a larger size if needed */
+ size = 4 * 1024;
+ data = g_malloc (size);
+ atom_size = atom_trak_copy_data (trak, &data, &size, &offset);
+ if (atom_size > 0)
+ writen = fwrite (data, atom_size, 1, f);
+ g_free (data);
+ return atom_size > 0 && writen == atom_size;
+}
+
+gboolean
+atoms_recov_write_trak_samples (FILE * f, AtomTRAK * trak, guint32 nsamples,
+ guint32 delta, guint32 size, guint64 chunk_offset, gboolean sync,
+ gboolean do_pts, gint64 pts_offset)
+{
+ guint8 data[TRAK_BUFFER_ENTRY_INFO_SIZE];
+ /*
+ * We have to write a TrakBufferEntryInfo
+ */
+ GST_WRITE_UINT32_BE (data + 0, trak->tkhd.track_ID);
+ GST_WRITE_UINT32_BE (data + 4, nsamples);
+ GST_WRITE_UINT32_BE (data + 8, delta);
+ GST_WRITE_UINT32_BE (data + 12, size);
+ GST_WRITE_UINT64_BE (data + 16, chunk_offset);
+ if (sync)
+ GST_WRITE_UINT8 (data + 24, 1);
+ else
+ GST_WRITE_UINT8 (data + 24, 0);
+ if (do_pts) {
+ GST_WRITE_UINT8 (data + 25, 1);
+ GST_WRITE_UINT64_BE (data + 26, pts_offset);
+ } else {
+ GST_WRITE_UINT8 (data + 25, 0);
+ GST_WRITE_UINT64_BE (data + 26, 0);
+ }
+
+ return fwrite (data, 1, TRAK_BUFFER_ENTRY_INFO_SIZE, f) ==
+ TRAK_BUFFER_ENTRY_INFO_SIZE;
+}
+
+gboolean
+atoms_recov_write_headers (FILE * f, AtomFTYP * ftyp, GstBuffer * prefix,
+ AtomMOOV * moov, guint32 timescale, guint32 traks_number)
+{
+ if (!atoms_recov_write_version (f)) {
+ return FALSE;
+ }
+
+ if (!atoms_recov_write_ftyp_info (f, ftyp, prefix)) {
+ return FALSE;
+ }
+
+ if (!atoms_recov_write_moov_info (f, moov)) {
+ return FALSE;
+ }
+
+ if (!atoms_recov_write_moov_timescale (f, timescale)) {
+ return FALSE;
+ }
+
+ if (!atoms_recov_write_traks_number (f, traks_number)) {
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+static gboolean
+read_atom_header (FILE * f, guint32 * fourcc, guint32 * size)
+{
+ guint8 aux[8];
+
+ if (fread (aux, 1, 8, f) != 8)
+ return FALSE;
+ *size = GST_READ_UINT32_BE (aux);
+ *fourcc = GST_READ_UINT32_LE (aux + 4);
+ return TRUE;
+}
+
+static gboolean
+moov_recov_file_parse_prefix (MoovRecovFile * moovrf)
+{
+ guint32 fourcc;
+ guint32 size;
+ guint32 total_size = 0;
+ if (fseek (moovrf->file, 2, SEEK_SET) != 0)
+ return FALSE;
+ if (!read_atom_header (moovrf->file, &fourcc, &size)) {
+ return FALSE;
+ }
+
+ if (fourcc != FOURCC_ftyp) {
+ /* we might have a prefix here */
+ if (fseek (moovrf->file, size - 8, SEEK_CUR) != 0)
+ return FALSE;
+
+ total_size += size;
+
+ /* now read the ftyp */
+ if (!read_atom_header (moovrf->file, &fourcc, &size))
+ return FALSE;
+ }
+
+ /* this has to be the ftyp */
+ if (fourcc != FOURCC_ftyp)
+ return FALSE;
+ total_size += size;
+ moovrf->prefix_size = total_size;
+ return fseek (moovrf->file, size - 8, SEEK_CUR) == 0;
+}
+
+static gboolean
+moov_recov_file_parse_mvhd (MoovRecovFile * moovrf)
+{
+ guint32 fourcc;
+ guint32 size;
+ if (!read_atom_header (moovrf->file, &fourcc, &size)) {
+ return FALSE;
+ }
+ /* check for sanity */
+ if (fourcc != FOURCC_mvhd)
+ return FALSE;
+
+ moovrf->mvhd_size = size;
+ moovrf->mvhd_pos = ftell (moovrf->file) - 8;
+
+ /* skip the remaining of the mvhd in the file */
+ return fseek (moovrf->file, size - 8, SEEK_CUR) == 0;
+}
+
+static gboolean
+mdat_recov_file_parse_mdat_start (MdatRecovFile * mdatrf)
+{
+ guint32 fourcc, size;
+
+ if (!read_atom_header (mdatrf->file, &fourcc, &size)) {
+ return FALSE;
+ }
+ if (size == 1) {
+ mdatrf->mdat_header_size = 16;
+ mdatrf->mdat_size = 16;
+ } else {
+ mdatrf->mdat_header_size = 8;
+ mdatrf->mdat_size = 8;
+ }
+ mdatrf->mdat_start = ftell (mdatrf->file) - 8;
+
+ return fourcc == FOURCC_mdat;
+}
+
+static gboolean
+mdat_recov_file_find_mdat (FILE * file, GError ** err)
+{
+ guint32 fourcc = 0, size = 0;
+ gboolean failure = FALSE;
+ while (fourcc != FOURCC_mdat && !failure) {
+ if (!read_atom_header (file, &fourcc, &size)) {
+ goto parse_error;
+ }
+ switch (fourcc) {
+ /* skip these atoms */
+ case FOURCC_ftyp:
+ case FOURCC_free:
+ case FOURCC_udta:
+ if (fseek (file, size - 8, SEEK_CUR) != 0) {
+ goto file_seek_error;
+ }
+ break;
+ case FOURCC_mdat:
+ break;
+ default:
+ GST_ERROR ("Unexpected atom in headers %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (fourcc));
+ failure = TRUE;
+ break;
+ }
+ }
+
+ if (!failure) {
+ /* Reverse to mdat start */
+ if (fseek (file, -8, SEEK_CUR) != 0)
+ goto file_seek_error;
+ }
+
+ return !failure;
+
+parse_error:
+ g_set_error (err, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_FILE,
+ "Failed to parse atom");
+ return FALSE;
+
+file_seek_error:
+ g_set_error (err, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_FILE,
+ "Failed to seek to start of the file");
+ return FALSE;
+
+}
+
+MdatRecovFile *
+mdat_recov_file_create (FILE * file, gboolean datafile, GError ** err)
+{
+ MdatRecovFile *mrf = g_new0 (MdatRecovFile, 1);
+
+ g_return_val_if_fail (file != NULL, NULL);
+
+ mrf->file = file;
+ mrf->rawfile = datafile;
+
+ /* get the file/data length */
+ if (fseek (file, 0, SEEK_END) != 0)
+ goto file_length_error;
+ /* still needs to deduce the mdat header and ftyp size */
+ mrf->data_size = ftell (file);
+ if (mrf->data_size == -1L)
+ goto file_length_error;
+
+ if (fseek (file, 0, SEEK_SET) != 0)
+ goto file_seek_error;
+
+ if (datafile) {
+ /* this file contains no atoms, only raw data to be placed on the mdat
+ * this happens when faststart mode is used */
+ mrf->mdat_start = 0;
+ mrf->mdat_header_size = 16;
+ mrf->mdat_size = 16;
+ return mrf;
+ }
+
+ if (!mdat_recov_file_find_mdat (file, err)) {
+ goto fail;
+ }
+
+ /* we don't parse this if we have a tmpdatafile */
+ if (!mdat_recov_file_parse_mdat_start (mrf)) {
+ g_set_error (err, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_PARSING,
+ "Error while parsing mdat atom");
+ goto fail;
+ }
+
+ return mrf;
+
+file_seek_error:
+ g_set_error (err, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_FILE,
+ "Failed to seek to start of the file");
+ goto fail;
+
+file_length_error:
+ g_set_error (err, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_FILE,
+ "Failed to determine file size");
+ goto fail;
+
+fail:
+ mdat_recov_file_free (mrf);
+ return NULL;
+}
+
+void
+mdat_recov_file_free (MdatRecovFile * mrf)
+{
+ fclose (mrf->file);
+ g_free (mrf);
+}
+
+static gboolean
+moov_recov_parse_num_traks (MoovRecovFile * moovrf)
+{
+ guint8 traks[4];
+ if (fread (traks, 1, 4, moovrf->file) != 4)
+ return FALSE;
+ moovrf->num_traks = GST_READ_UINT32_BE (traks);
+ return TRUE;
+}
+
+static gboolean
+moov_recov_parse_moov_timescale (MoovRecovFile * moovrf)
+{
+ guint8 ts[4];
+ if (fread (ts, 1, 4, moovrf->file) != 4)
+ return FALSE;
+ moovrf->timescale = GST_READ_UINT32_BE (ts);
+ return TRUE;
+}
+
+static gboolean
+skip_atom (MoovRecovFile * moovrf, guint32 expected_fourcc)
+{
+ guint32 size;
+ guint32 fourcc;
+
+ if (!read_atom_header (moovrf->file, &fourcc, &size))
+ return FALSE;
+ if (fourcc != expected_fourcc)
+ return FALSE;
+
+ return (fseek (moovrf->file, size - 8, SEEK_CUR) == 0);
+}
+
+static gboolean
+moov_recov_parse_tkhd (MoovRecovFile * moovrf, TrakRecovData * trakrd)
+{
+ guint32 size;
+ guint32 fourcc;
+ guint8 data[4];
+
+ /* make sure we are on a tkhd atom */
+ if (!read_atom_header (moovrf->file, &fourcc, &size))
+ return FALSE;
+ if (fourcc != FOURCC_tkhd)
+ return FALSE;
+
+ trakrd->tkhd_file_offset = ftell (moovrf->file) - 8;
+
+ /* move 8 bytes forward to the trak_id pos */
+ if (fseek (moovrf->file, 12, SEEK_CUR) != 0)
+ return FALSE;
+ if (fread (data, 1, 4, moovrf->file) != 4)
+ return FALSE;
+
+ /* advance the rest of tkhd */
+ if (fseek (moovrf->file, 68, SEEK_CUR) != 0)
+ return FALSE;
+
+ trakrd->trak_id = GST_READ_UINT32_BE (data);
+ return TRUE;
+}
+
+static gboolean
+moov_recov_parse_stbl (MoovRecovFile * moovrf, TrakRecovData * trakrd)
+{
+ guint32 size;
+ guint32 fourcc;
+ guint32 auxsize;
+
+ if (!read_atom_header (moovrf->file, &fourcc, &size))
+ return FALSE;
+ if (fourcc != FOURCC_stbl)
+ return FALSE;
+
+ trakrd->stbl_file_offset = ftell (moovrf->file) - 8;
+ trakrd->stbl_size = size;
+
+ /* skip the stsd */
+ if (!read_atom_header (moovrf->file, &fourcc, &auxsize))
+ return FALSE;
+ if (fourcc != FOURCC_stsd)
+ return FALSE;
+ if (fseek (moovrf->file, auxsize - 8, SEEK_CUR) != 0)
+ return FALSE;
+
+ trakrd->stsd_size = auxsize;
+ trakrd->post_stsd_offset = ftell (moovrf->file);
+
+ /* as this is the last atom we parse, we don't skip forward */
+
+ return TRUE;
+}
+
+static gboolean
+moov_recov_parse_minf (MoovRecovFile * moovrf, TrakRecovData * trakrd)
+{
+ guint32 size;
+ guint32 fourcc;
+ guint32 auxsize;
+
+ if (!read_atom_header (moovrf->file, &fourcc, &size))
+ return FALSE;
+ if (fourcc != FOURCC_minf)
+ return FALSE;
+
+ trakrd->minf_file_offset = ftell (moovrf->file) - 8;
+ trakrd->minf_size = size;
+
+ /* skip either of vmhd, smhd, hmhd that might follow */
+ if (!read_atom_header (moovrf->file, &fourcc, &auxsize))
+ return FALSE;
+ if (fourcc != FOURCC_vmhd && fourcc != FOURCC_smhd && fourcc != FOURCC_hmhd &&
+ fourcc != FOURCC_gmhd)
+ return FALSE;
+ if (fseek (moovrf->file, auxsize - 8, SEEK_CUR))
+ return FALSE;
+
+ /* skip a possible hdlr and the following dinf */
+ if (!read_atom_header (moovrf->file, &fourcc, &auxsize))
+ return FALSE;
+ if (fourcc == FOURCC_hdlr) {
+ if (fseek (moovrf->file, auxsize - 8, SEEK_CUR))
+ return FALSE;
+ if (!read_atom_header (moovrf->file, &fourcc, &auxsize))
+ return FALSE;
+ }
+ if (fourcc != FOURCC_dinf)
+ return FALSE;
+ if (fseek (moovrf->file, auxsize - 8, SEEK_CUR))
+ return FALSE;
+
+ /* now we are ready to read the stbl */
+ if (!moov_recov_parse_stbl (moovrf, trakrd))
+ return FALSE;
+
+ return TRUE;
+}
+
+static gboolean
+moov_recov_parse_mdhd (MoovRecovFile * moovrf, TrakRecovData * trakrd)
+{
+ guint32 size;
+ guint32 fourcc;
+ guint8 data[4];
+
+ /* make sure we are on a tkhd atom */
+ if (!read_atom_header (moovrf->file, &fourcc, &size))
+ return FALSE;
+ if (fourcc != FOURCC_mdhd)
+ return FALSE;
+
+ trakrd->mdhd_file_offset = ftell (moovrf->file) - 8;
+
+ /* get the timescale */
+ if (fseek (moovrf->file, 12, SEEK_CUR) != 0)
+ return FALSE;
+ if (fread (data, 1, 4, moovrf->file) != 4)
+ return FALSE;
+ trakrd->timescale = GST_READ_UINT32_BE (data);
+ if (fseek (moovrf->file, 8, SEEK_CUR) != 0)
+ return FALSE;
+ return TRUE;
+}
+
+static gboolean
+moov_recov_parse_mdia (MoovRecovFile * moovrf, TrakRecovData * trakrd)
+{
+ guint32 size;
+ guint32 fourcc;
+
+ /* make sure we are on a tkhd atom */
+ if (!read_atom_header (moovrf->file, &fourcc, &size))
+ return FALSE;
+ if (fourcc != FOURCC_mdia)
+ return FALSE;
+
+ trakrd->mdia_file_offset = ftell (moovrf->file) - 8;
+ trakrd->mdia_size = size;
+
+ if (!moov_recov_parse_mdhd (moovrf, trakrd))
+ return FALSE;
+
+ if (!skip_atom (moovrf, FOURCC_hdlr))
+ return FALSE;
+ if (!moov_recov_parse_minf (moovrf, trakrd))
+ return FALSE;
+ return TRUE;
+}
+
+static gboolean
+moov_recov_parse_trak (MoovRecovFile * moovrf, TrakRecovData * trakrd)
+{
+ guint64 offset;
+ guint32 size;
+ guint32 fourcc;
+
+ offset = ftell (moovrf->file);
+ if (offset == -1) {
+ return FALSE;
+ }
+
+ /* make sure we are on a trak atom */
+ if (!read_atom_header (moovrf->file, &fourcc, &size)) {
+ return FALSE;
+ }
+ if (fourcc != FOURCC_trak) {
+ return FALSE;
+ }
+ trakrd->trak_size = size;
+
+ /* now we should have a trak header 'tkhd' */
+ if (!moov_recov_parse_tkhd (moovrf, trakrd))
+ return FALSE;
+
+ /* FIXME add edts handling here and in qtmux, as this is only detected
+ * after buffers start flowing */
+
+ if (!moov_recov_parse_mdia (moovrf, trakrd))
+ return FALSE;
+
+ if (fseek (moovrf->file,
+ (long int) trakrd->mdia_file_offset + trakrd->mdia_size,
+ SEEK_SET) != 0)
+ return FALSE;
+
+ trakrd->extra_atoms_offset = ftell (moovrf->file);
+ trakrd->extra_atoms_size = size - (trakrd->extra_atoms_offset - offset);
+
+ trakrd->file_offset = offset;
+ /* position after the trak */
+ return fseek (moovrf->file, (long int) offset + size, SEEK_SET) == 0;
+}
+
+MoovRecovFile *
+moov_recov_file_create (FILE * file, GError ** err)
+{
+ gint i;
+ MoovRecovFile *moovrf = g_new0 (MoovRecovFile, 1);
+
+ g_return_val_if_fail (file != NULL, NULL);
+
+ moovrf->file = file;
+
+ /* look for ftyp and prefix at the start */
+ if (!moov_recov_file_parse_prefix (moovrf)) {
+ g_set_error (err, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_PARSING,
+ "Error while parsing prefix atoms");
+ goto fail;
+ }
+
+ /* parse the mvhd */
+ if (!moov_recov_file_parse_mvhd (moovrf)) {
+ g_set_error (err, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_PARSING,
+ "Error while parsing mvhd atom");
+ goto fail;
+ }
+
+ if (!moov_recov_parse_moov_timescale (moovrf)) {
+ g_set_error (err, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_PARSING,
+ "Error while parsing timescale");
+ goto fail;
+ }
+ if (!moov_recov_parse_num_traks (moovrf)) {
+ g_set_error (err, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_PARSING,
+ "Error while parsing parsing number of traks");
+ goto fail;
+ }
+
+ /* sanity check */
+ if (moovrf->num_traks > 1024) {
+ g_set_error (err, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_PARSING,
+ "Unsupported number of traks");
+ goto fail;
+ }
+
+ /* init the traks */
+ moovrf->traks_rd = g_new0 (TrakRecovData, moovrf->num_traks);
+ for (i = 0; i < moovrf->num_traks; i++) {
+ atom_stbl_init (&(moovrf->traks_rd[i].stbl));
+ }
+ for (i = 0; i < moovrf->num_traks; i++) {
+ if (!moov_recov_parse_trak (moovrf, &(moovrf->traks_rd[i]))) {
+ g_set_error (err, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_PARSING,
+ "Error while parsing trak atom");
+ goto fail;
+ }
+ }
+
+ return moovrf;
+
+fail:
+ moov_recov_file_free (moovrf);
+ return NULL;
+}
+
+void
+moov_recov_file_free (MoovRecovFile * moovrf)
+{
+ gint i;
+ fclose (moovrf->file);
+ if (moovrf->traks_rd) {
+ for (i = 0; i < moovrf->num_traks; i++) {
+ atom_stbl_clear (&(moovrf->traks_rd[i].stbl));
+ }
+ g_free (moovrf->traks_rd);
+ }
+ g_free (moovrf);
+}
+
+static gboolean
+moov_recov_parse_buffer_entry (MoovRecovFile * moovrf, TrakBufferEntryInfo * b)
+{
+ guint8 data[TRAK_BUFFER_ENTRY_INFO_SIZE];
+ gint read;
+
+ read = fread (data, 1, TRAK_BUFFER_ENTRY_INFO_SIZE, moovrf->file);
+ if (read != TRAK_BUFFER_ENTRY_INFO_SIZE)
+ return FALSE;
+
+ b->track_id = GST_READ_UINT32_BE (data);
+ b->nsamples = GST_READ_UINT32_BE (data + 4);
+ b->delta = GST_READ_UINT32_BE (data + 8);
+ b->size = GST_READ_UINT32_BE (data + 12);
+ b->chunk_offset = GST_READ_UINT64_BE (data + 16);
+ b->sync = data[24] != 0;
+ b->do_pts = data[25] != 0;
+ b->pts_offset = GST_READ_UINT64_BE (data + 26);
+ return TRUE;
+}
+
+static gboolean
+mdat_recov_add_sample (MdatRecovFile * mdatrf, guint32 size)
+{
+ /* test if this data exists */
+ if (mdatrf->mdat_size - mdatrf->mdat_header_size + size > mdatrf->data_size)
+ return FALSE;
+
+ mdatrf->mdat_size += size;
+ return TRUE;
+}
+
+static TrakRecovData *
+moov_recov_get_trak (MoovRecovFile * moovrf, guint32 id)
+{
+ gint i;
+ for (i = 0; i < moovrf->num_traks; i++) {
+ if (moovrf->traks_rd[i].trak_id == id)
+ return &(moovrf->traks_rd[i]);
+ }
+ return NULL;
+}
+
+static void
+trak_recov_data_add_sample (TrakRecovData * trak, TrakBufferEntryInfo * b)
+{
+ trak->duration += b->nsamples * b->delta;
+ atom_stbl_add_samples (&trak->stbl, b->nsamples, b->delta, b->size,
+ b->chunk_offset, b->sync, b->pts_offset);
+}
+
+/*
+ * Parses the buffer entries in the MoovRecovFile and matches the inputs
+ * with the data in the MdatRecovFile. Whenever a buffer entry of that
+ * represents 'x' bytes of data, the same amount of data is 'validated' in
+ * the MdatRecovFile and will be inluded in the generated moovie file.
+ */
+gboolean
+moov_recov_parse_buffers (MoovRecovFile * moovrf, MdatRecovFile * mdatrf,
+ GError ** err)
+{
+ TrakBufferEntryInfo entry;
+ TrakRecovData *trak;
+
+ /* we assume both moovrf and mdatrf are at the starting points of their
+ * data reading */
+ while (moov_recov_parse_buffer_entry (moovrf, &entry)) {
+ /* be sure we still have this data in mdat */
+ trak = moov_recov_get_trak (moovrf, entry.track_id);
+ if (trak == NULL) {
+ g_set_error (err, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_PARSING,
+ "Invalid trak id found in buffer entry");
+ return FALSE;
+ }
+ if (!mdat_recov_add_sample (mdatrf, entry.size))
+ break;
+ trak_recov_data_add_sample (trak, &entry);
+ }
+ return TRUE;
+}
+
+static guint32
+trak_recov_data_get_trak_atom_size (TrakRecovData * trak)
+{
+ AtomSTBL *stbl = &trak->stbl;
+ guint64 offset;
+
+ /* write out our stbl child atoms */
+ offset = 0;
+
+ if (!atom_stts_copy_data (&stbl->stts, NULL, NULL, &offset)) {
+ goto fail;
+ }
+ if (atom_array_get_len (&stbl->stss.entries) > 0) {
+ if (!atom_stss_copy_data (&stbl->stss, NULL, NULL, &offset)) {
+ goto fail;
+ }
+ }
+ if (!atom_stsc_copy_data (&stbl->stsc, NULL, NULL, &offset)) {
+ goto fail;
+ }
+ if (!atom_stsz_copy_data (&stbl->stsz, NULL, NULL, &offset)) {
+ goto fail;
+ }
+ if (stbl->ctts) {
+ if (!atom_ctts_copy_data (stbl->ctts, NULL, NULL, &offset)) {
+ goto fail;
+ }
+ }
+ if (!atom_stco64_copy_data (&stbl->stco64, NULL, NULL, &offset)) {
+ goto fail;
+ }
+
+ return trak->trak_size + ((trak->stsd_size + offset + 8) - trak->stbl_size);
+
+fail:
+ return 0;
+}
+
+static guint8 *
+moov_recov_get_stbl_children_data (MoovRecovFile * moovrf, TrakRecovData * trak,
+ guint64 * p_size)
+{
+ AtomSTBL *stbl = &trak->stbl;
+ guint8 *buffer;
+ guint64 size;
+ guint64 offset;
+
+ /* write out our stbl child atoms
+ *
+ * Use 1MB as a starting size, *_copy_data functions
+ * will grow the buffer if needed.
+ */
+ size = 1024 * 1024;
+ buffer = g_malloc0 (size);
+ offset = 0;
+
+ if (!atom_stts_copy_data (&stbl->stts, &buffer, &size, &offset)) {
+ goto fail;
+ }
+ if (atom_array_get_len (&stbl->stss.entries) > 0) {
+ if (!atom_stss_copy_data (&stbl->stss, &buffer, &size, &offset)) {
+ goto fail;
+ }
+ }
+ if (!atom_stsc_copy_data (&stbl->stsc, &buffer, &size, &offset)) {
+ goto fail;
+ }
+ if (!atom_stsz_copy_data (&stbl->stsz, &buffer, &size, &offset)) {
+ goto fail;
+ }
+ if (stbl->ctts) {
+ if (!atom_ctts_copy_data (stbl->ctts, &buffer, &size, &offset)) {
+ goto fail;
+ }
+ }
+ if (!atom_stco64_copy_data (&stbl->stco64, &buffer, &size, &offset)) {
+ goto fail;
+ }
+ *p_size = offset;
+ return buffer;
+
+fail:
+ g_free (buffer);
+ return NULL;
+}
+
+static gboolean
+copy_data_from_file_to_file (FILE * from, guint position, guint size, FILE * to,
+ GError ** err)
+{
+ guint8 *data = NULL;
+
+ if (fseek (from, position, SEEK_SET) != 0)
+ goto fail;
+ data = g_malloc (size);
+ if (fread (data, 1, size, from) != size) {
+ goto fail;
+ }
+ if (fwrite (data, 1, size, to) != size) {
+ ATOMS_RECOV_OUTPUT_WRITE_ERROR (err);
+ goto fail;
+ }
+
+ g_free (data);
+ return TRUE;
+
+fail:
+ g_free (data);
+ return FALSE;
+}
+
+gboolean
+moov_recov_write_file (MoovRecovFile * moovrf, MdatRecovFile * mdatrf,
+ FILE * outf, GError ** err, GError ** warn)
+{
+ guint8 auxdata[16];
+ guint8 *data = NULL;
+ guint8 *prefix_data = NULL;
+ guint8 *mvhd_data = NULL;
+ guint8 *trak_data = NULL;
+ guint32 moov_size = 0;
+ gint i;
+ guint64 stbl_children_size = 0;
+ guint8 *stbl_children = NULL;
+ guint32 longest_duration = 0;
+ guint16 version;
+ guint remaining;
+
+ /* check the version */
+ if (fseek (moovrf->file, 0, SEEK_SET) != 0) {
+ g_set_error (err, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_FILE,
+ "Failed to seek to the start of the moov recovery file");
+ goto fail;
+ }
+ if (fread (auxdata, 1, 2, moovrf->file) != 2) {
+ g_set_error (err, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_FILE,
+ "Failed to read version from file");
+ }
+
+ version = GST_READ_UINT16_BE (auxdata);
+ if (version != ATOMS_RECOV_FILE_VERSION) {
+ g_set_error (err, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_VERSION,
+ "Input file version (%u) is not supported in this version (%u)",
+ version, ATOMS_RECOV_FILE_VERSION);
+ return FALSE;
+ }
+
+ /* write the ftyp */
+ prefix_data = g_malloc (moovrf->prefix_size);
+ if (fread (prefix_data, 1, moovrf->prefix_size,
+ moovrf->file) != moovrf->prefix_size) {
+ g_set_error (err, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_FILE,
+ "Failed to read the ftyp atom from file");
+ goto fail;
+ }
+ if (fwrite (prefix_data, 1, moovrf->prefix_size, outf) != moovrf->prefix_size) {
+ ATOMS_RECOV_OUTPUT_WRITE_ERROR (err);
+ goto fail;
+ }
+ g_free (prefix_data);
+ prefix_data = NULL;
+
+ /* need to calculate the moov size beforehand to add the offset to
+ * chunk offset entries */
+ moov_size += moovrf->mvhd_size + 8; /* mvhd + moov size + fourcc */
+ for (i = 0; i < moovrf->num_traks; i++) {
+ TrakRecovData *trak = &(moovrf->traks_rd[i]);
+ guint32 duration; /* in moov's timescale */
+ guint32 trak_size;
+
+ /* convert trak duration to moov's duration */
+ duration = gst_util_uint64_scale_round (trak->duration, moovrf->timescale,
+ trak->timescale);
+
+ if (duration > longest_duration)
+ longest_duration = duration;
+ trak_size = trak_recov_data_get_trak_atom_size (trak);
+ if (trak_size == 0) {
+ g_set_error (err, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_GENERIC,
+ "Failed to estimate trak atom size");
+ goto fail;
+ }
+ moov_size += trak_size;
+ }
+
+ /* add chunks offsets */
+ for (i = 0; i < moovrf->num_traks; i++) {
+ TrakRecovData *trak = &(moovrf->traks_rd[i]);
+ /* 8 or 16 for the mdat header */
+ gint64 offset = moov_size + ftell (outf) + mdatrf->mdat_header_size;
+ atom_stco64_chunks_set_offset (&trak->stbl.stco64, offset);
+ }
+
+ /* write the moov */
+ GST_WRITE_UINT32_BE (auxdata, moov_size);
+ GST_WRITE_UINT32_LE (auxdata + 4, FOURCC_moov);
+ if (fwrite (auxdata, 1, 8, outf) != 8) {
+ ATOMS_RECOV_OUTPUT_WRITE_ERROR (err);
+ goto fail;
+ }
+
+ /* write the mvhd */
+ mvhd_data = g_malloc (moovrf->mvhd_size);
+ if (fseek (moovrf->file, moovrf->mvhd_pos, SEEK_SET) != 0)
+ goto fail;
+ if (fread (mvhd_data, 1, moovrf->mvhd_size,
+ moovrf->file) != moovrf->mvhd_size)
+ goto fail;
+ GST_WRITE_UINT32_BE (mvhd_data + 20, moovrf->timescale);
+ GST_WRITE_UINT32_BE (mvhd_data + 24, longest_duration);
+ if (fwrite (mvhd_data, 1, moovrf->mvhd_size, outf) != moovrf->mvhd_size) {
+ ATOMS_RECOV_OUTPUT_WRITE_ERROR (err);
+ goto fail;
+ }
+ g_free (mvhd_data);
+ mvhd_data = NULL;
+
+ /* write the traks, this is the tough part because we need to update:
+ * - stbl atom
+ * - sizes of atoms from stbl to trak
+ * - trak duration
+ */
+ for (i = 0; i < moovrf->num_traks; i++) {
+ TrakRecovData *trak = &(moovrf->traks_rd[i]);
+ guint trak_data_size;
+ guint32 stbl_new_size;
+ guint32 minf_new_size;
+ guint32 mdia_new_size;
+ guint32 trak_new_size;
+ guint32 size_diff;
+ guint32 duration; /* in moov's timescale */
+
+ /* convert trak duration to moov's duration */
+ duration = gst_util_uint64_scale_round (trak->duration, moovrf->timescale,
+ trak->timescale);
+
+ stbl_children = moov_recov_get_stbl_children_data (moovrf, trak,
+ &stbl_children_size);
+ if (stbl_children == NULL)
+ goto fail;
+
+ /* calc the new size of the atoms from stbl to trak in the atoms tree */
+ stbl_new_size = trak->stsd_size + stbl_children_size + 8;
+ size_diff = stbl_new_size - trak->stbl_size;
+ minf_new_size = trak->minf_size + size_diff;
+ mdia_new_size = trak->mdia_size + size_diff;
+ trak_new_size = trak->trak_size + size_diff;
+
+ if (fseek (moovrf->file, trak->file_offset, SEEK_SET) != 0)
+ goto fail;
+ trak_data_size = trak->post_stsd_offset - trak->file_offset;
+ trak_data = g_malloc (trak_data_size);
+ if (fread (trak_data, 1, trak_data_size, moovrf->file) != trak_data_size) {
+ goto fail;
+ }
+ /* update the size values in those read atoms before writing */
+ GST_WRITE_UINT32_BE (trak_data, trak_new_size);
+ GST_WRITE_UINT32_BE (trak_data + (trak->mdia_file_offset -
+ trak->file_offset), mdia_new_size);
+ GST_WRITE_UINT32_BE (trak_data + (trak->minf_file_offset -
+ trak->file_offset), minf_new_size);
+ GST_WRITE_UINT32_BE (trak_data + (trak->stbl_file_offset -
+ trak->file_offset), stbl_new_size);
+
+ /* update duration values in tkhd and mdhd */
+ GST_WRITE_UINT32_BE (trak_data + (trak->tkhd_file_offset -
+ trak->file_offset) + 28, duration);
+ GST_WRITE_UINT32_BE (trak_data + (trak->mdhd_file_offset -
+ trak->file_offset) + 24, trak->duration);
+
+ if (fwrite (trak_data, 1, trak_data_size, outf) != trak_data_size) {
+ ATOMS_RECOV_OUTPUT_WRITE_ERROR (err);
+ goto fail;
+ }
+ if (fwrite (stbl_children, 1, stbl_children_size, outf) !=
+ stbl_children_size) {
+ ATOMS_RECOV_OUTPUT_WRITE_ERROR (err);
+ goto fail;
+ }
+
+ g_free (trak_data);
+ trak_data = NULL;
+ g_free (stbl_children);
+ stbl_children = NULL;
+
+ /* Copy the extra atoms after 'minf' */
+ if (!copy_data_from_file_to_file (moovrf->file, trak->extra_atoms_offset,
+ trak->extra_atoms_size, outf, err))
+ goto fail;
+ }
+
+ /* write the mdat */
+ /* write the header first */
+ if (mdatrf->mdat_header_size == 16) {
+ GST_WRITE_UINT32_BE (auxdata, 1);
+ GST_WRITE_UINT32_LE (auxdata + 4, FOURCC_mdat);
+ GST_WRITE_UINT64_BE (auxdata + 8, mdatrf->mdat_size);
+ } else if (mdatrf->mdat_header_size == 8) {
+ GST_WRITE_UINT32_BE (auxdata, mdatrf->mdat_size);
+ GST_WRITE_UINT32_LE (auxdata + 4, FOURCC_mdat);
+ } else {
+ GST_ERROR ("Unexpected atom size: %u", mdatrf->mdat_header_size);
+ g_assert_not_reached ();
+ goto fail;
+ }
+
+ if (fwrite (auxdata, 1, mdatrf->mdat_header_size,
+ outf) != mdatrf->mdat_header_size) {
+ ATOMS_RECOV_OUTPUT_WRITE_ERROR (err);
+ goto fail;
+ }
+
+ /* now read the mdat data and output to the file */
+ if (fseek (mdatrf->file, mdatrf->mdat_start +
+ (mdatrf->rawfile ? 0 : mdatrf->mdat_header_size), SEEK_SET) != 0)
+ goto fail;
+
+ remaining = mdatrf->mdat_size - mdatrf->mdat_header_size;
+ data = g_malloc (MAX_CHUNK_SIZE);
+ while (!feof (mdatrf->file) && remaining > 0) {
+ gint read, write, readsize;
+
+ readsize = MIN (MAX_CHUNK_SIZE, remaining);
+
+ read = fread (data, 1, readsize, mdatrf->file);
+ write = fwrite (data, 1, read, outf);
+ remaining -= read;
+
+ if (write != read) {
+ g_set_error (err, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_FILE,
+ "Failed to copy data to output file: %s", g_strerror (errno));
+ goto fail;
+ }
+ }
+ g_free (data);
+
+ if (remaining) {
+ g_set_error (warn, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_FILE,
+ "Samples in recovery file were not present on headers."
+ " Bytes lost: %u", remaining);
+ } else if (!feof (mdatrf->file)) {
+ g_set_error (warn, ATOMS_RECOV_QUARK, ATOMS_RECOV_ERR_FILE,
+ "Samples in headers were not found in data file.");
+ GST_FIXME ("Rewrite mdat size if we reach this to make the file"
+ " fully correct");
+ }
+
+ return TRUE;
+
+fail:
+ g_free (stbl_children);
+ g_free (mvhd_data);
+ g_free (prefix_data);
+ g_free (trak_data);
+ g_free (data);
+ return FALSE;
+}
diff --git a/gst/isomp4/atomsrecovery.h b/gst/isomp4/atomsrecovery.h
new file mode 100644
index 0000000000..f044c9b9fc
--- /dev/null
+++ b/gst/isomp4/atomsrecovery.h
@@ -0,0 +1,162 @@
+/* Quicktime muxer plugin for GStreamer
+ * Copyright (C) 2010 Thiago Santos <thiago.sousa.santos@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#ifndef __ATOMS_RECOVERY_H__
+#define __ATOMS_RECOVERY_H__
+
+#include <glib.h>
+#include <string.h>
+#include <stdio.h>
+#include <gst/gst.h>
+
+#include "atoms.h"
+
+/* Version to be incremented each time we decide
+ * to change the file layout */
+#define ATOMS_RECOV_FILE_VERSION 1
+
+#define ATOMS_RECOV_QUARK (g_quark_from_string ("qtmux-atoms-recovery"))
+
+/* gerror error codes */
+#define ATOMS_RECOV_ERR_GENERIC 1
+#define ATOMS_RECOV_ERR_FILE 2
+#define ATOMS_RECOV_ERR_PARSING 3
+#define ATOMS_RECOV_ERR_VERSION 4
+
+/* this struct represents each buffer in a moov file, containing the info
+ * that is placed in the stsd children atoms
+ * Fields should be writen in BE order, and booleans should be writen as
+ * 1byte with 0 for false, anything otherwise */
+#define TRAK_BUFFER_ENTRY_INFO_SIZE 34
+typedef struct
+{
+ guint32 track_id;
+ guint32 nsamples;
+ guint32 delta;
+ guint32 size;
+ guint64 chunk_offset;
+ guint64 pts_offset;
+ gboolean sync;
+ gboolean do_pts;
+} TrakBufferEntryInfo;
+
+typedef struct
+{
+ guint32 trak_id;
+ guint32 duration; /* duration in trak timescale */
+ guint32 timescale; /* trak's timescale */
+
+ guint64 file_offset;
+
+ /* need for later updating duration */
+ guint64 tkhd_file_offset;
+ guint64 mdhd_file_offset;
+
+ /* need these offsets to update size */
+ guint32 trak_size;
+ guint64 mdia_file_offset;
+ guint32 mdia_size;
+ guint64 minf_file_offset;
+ guint32 minf_size;
+ guint64 stbl_file_offset;
+ guint32 stbl_size;
+
+ guint64 post_stsd_offset;
+ guint32 stsd_size;
+
+ guint32 extra_atoms_size;
+ guint32 extra_atoms_offset;
+
+ /* for storing the samples info */
+ AtomSTBL stbl;
+} TrakRecovData;
+
+typedef struct
+{
+ FILE * file;
+ gboolean rawfile;
+
+ /* results from parsing the input file */
+ guint64 data_size;
+ guint32 mdat_header_size;
+ guint mdat_start;
+
+ guint64 mdat_size;
+} MdatRecovFile;
+
+typedef struct
+{
+ FILE * file;
+ guint32 timescale;
+
+ guint32 mvhd_pos;
+ guint32 mvhd_size;
+ guint32 prefix_size; /* prefix + ftyp total size */
+
+ gint num_traks;
+ TrakRecovData *traks_rd;
+} MoovRecovFile;
+
+gboolean atoms_recov_write_trak_info (FILE * f, AtomTRAK * trak);
+gboolean atoms_recov_write_headers (FILE * f, AtomFTYP * ftyp,
+ GstBuffer * prefix, AtomMOOV * moov,
+ guint32 timescale,
+ guint32 traks_number);
+gboolean atoms_recov_write_trak_samples (FILE * f, AtomTRAK * trak,
+ guint32 nsamples, guint32 delta,
+ guint32 size, guint64 chunk_offset,
+ gboolean sync, gboolean do_pts,
+ gint64 pts_offset);
+
+MdatRecovFile * mdat_recov_file_create (FILE * file, gboolean datafile,
+ GError ** err);
+void mdat_recov_file_free (MdatRecovFile * mrf);
+MoovRecovFile * moov_recov_file_create (FILE * file, GError ** err);
+void moov_recov_file_free (MoovRecovFile * moovrf);
+gboolean moov_recov_parse_buffers (MoovRecovFile * moovrf,
+ MdatRecovFile * mdatrf,
+ GError ** err);
+gboolean moov_recov_write_file (MoovRecovFile * moovrf,
+ MdatRecovFile * mdatrf, FILE * outf,
+ GError ** err, GError ** warn);
+
+#endif /* __ATOMS_RECOVERY_H__ */
diff --git a/gst/isomp4/descriptors.c b/gst/isomp4/descriptors.c
new file mode 100644
index 0000000000..713ffdcf5f
--- /dev/null
+++ b/gst/isomp4/descriptors.c
@@ -0,0 +1,457 @@
+/* Quicktime muxer plugin for GStreamer
+ * Copyright (C) 2008 Thiago Sousa Santos <thiagoss@embedded.ufcg.edu.br>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#include "descriptors.h"
+
+/*
+ * Some mp4 structures (descriptors) use a coding scheme for
+ * representing its size.
+ * It is grouped in bytes. The 1st bit set to 1 means we need another byte,
+ * 0 otherwise. The remaining 7 bits are the useful values.
+ *
+ * The next set of functions handle those values
+ */
+
+/*
+ * Gets an unsigned integer and packs it into a 'expandable size' format
+ * (as used by mp4 descriptors)
+ * @size: the integer to be parsed
+ * @ptr: the array to place the result
+ * @array_size: the size of ptr array
+ */
+static void
+expandable_size_parse (guint64 size, guint8 * ptr, guint32 array_size)
+{
+ int index = 0;
+
+ memset (ptr, 0, sizeof (array_size));
+ while (size > 0 && index < array_size) {
+ ptr[index++] = (size > 0x7F ? 0x80 : 0x0) | (size & 0x7F);
+ size = size >> 7;
+ }
+}
+
+/*
+ * Gets how many positions in an array holding an 'expandable size'
+ * are really used
+ *
+ * @ptr: the array with the 'expandable size'
+ * @array_size: the size of ptr array
+ *
+ * Returns: the number of really used positions
+ */
+static guint64
+expandable_size_get_length (guint8 * ptr, guint32 array_size)
+{
+ gboolean next = TRUE;
+ guint32 index = 0;
+
+ while (next && index < array_size) {
+ next = (ptr[index] & 0x80);
+ index++;
+ }
+ return index;
+}
+
+/*
+ * Initializers below
+ */
+
+static void
+desc_base_descriptor_init (BaseDescriptor * bd, guint8 tag, guint32 size)
+{
+ bd->tag = tag;
+ expandable_size_parse (size, bd->size, 4);
+}
+
+static void
+desc_dec_specific_info_init (DecoderSpecificInfoDescriptor * dsid)
+{
+ desc_base_descriptor_init (&dsid->base, DECODER_SPECIFIC_INFO_TAG, 0);
+ dsid->length = 0;
+ dsid->data = NULL;
+}
+
+DecoderSpecificInfoDescriptor *
+desc_dec_specific_info_new (void)
+{
+ DecoderSpecificInfoDescriptor *desc =
+ g_new0 (DecoderSpecificInfoDescriptor, 1);
+ desc_dec_specific_info_init (desc);
+ return desc;
+}
+
+static void
+desc_dec_conf_desc_init (DecoderConfigDescriptor * dcd)
+{
+ desc_base_descriptor_init (&dcd->base, DECODER_CONFIG_DESC_TAG, 0);
+ dcd->dec_specific_info = NULL;
+}
+
+static void
+desc_sl_conf_desc_init (SLConfigDescriptor * sl)
+{
+ desc_base_descriptor_init (&sl->base, SL_CONFIG_DESC_TAG, 0);
+ sl->predefined = 0x2;
+}
+
+void
+desc_es_init (ESDescriptor * es)
+{
+ desc_base_descriptor_init (&es->base, ES_DESCRIPTOR_TAG, 0);
+
+ es->id = 0;
+ es->flags = 0;
+ es->depends_on_es_id = 0;
+ es->ocr_es_id = 0;
+ es->url_length = 0;
+ es->url_string = NULL;
+
+ desc_dec_conf_desc_init (&es->dec_conf_desc);
+ desc_sl_conf_desc_init (&es->sl_conf_desc);
+}
+
+ESDescriptor *
+desc_es_descriptor_new (void)
+{
+ ESDescriptor *es = g_new0 (ESDescriptor, 1);
+
+ desc_es_init (es);
+ return es;
+}
+
+/*
+ * Deinitializers/Destructors below
+ */
+
+static void
+desc_base_descriptor_clear (BaseDescriptor * base)
+{
+}
+
+void
+desc_dec_specific_info_free (DecoderSpecificInfoDescriptor * dsid)
+{
+ desc_base_descriptor_clear (&dsid->base);
+ if (dsid->data) {
+ g_free (dsid->data);
+ dsid->data = NULL;
+ }
+ g_free (dsid);
+}
+
+static void
+desc_dec_conf_desc_clear (DecoderConfigDescriptor * dec)
+{
+ desc_base_descriptor_clear (&dec->base);
+ if (dec->dec_specific_info) {
+ desc_dec_specific_info_free (dec->dec_specific_info);
+ }
+}
+
+static void
+desc_sl_config_descriptor_clear (SLConfigDescriptor * sl)
+{
+ desc_base_descriptor_clear (&sl->base);
+}
+
+void
+desc_es_descriptor_clear (ESDescriptor * es)
+{
+ desc_base_descriptor_clear (&es->base);
+ if (es->url_string) {
+ g_free (es->url_string);
+ es->url_string = NULL;
+ }
+ desc_dec_conf_desc_clear (&es->dec_conf_desc);
+ desc_sl_config_descriptor_clear (&es->sl_conf_desc);
+}
+
+/*
+ * Size handling functions below
+ */
+
+void
+desc_dec_specific_info_alloc_data (DecoderSpecificInfoDescriptor * dsid,
+ guint32 size)
+{
+ if (dsid->data) {
+ g_free (dsid->data);
+ }
+ dsid->data = g_new0 (guint8, size);
+ dsid->length = size;
+}
+
+static void
+desc_base_descriptor_set_size (BaseDescriptor * bd, guint32 size)
+{
+ expandable_size_parse (size, bd->size, 4);
+}
+
+static guint64
+desc_base_descriptor_get_size (BaseDescriptor * bd)
+{
+ guint64 size = 0;
+
+ size += sizeof (guint8);
+ size += expandable_size_get_length (bd->size, 4) * sizeof (guint8);
+ return size;
+}
+
+static guint64
+desc_sl_config_descriptor_get_size (SLConfigDescriptor * sl_desc)
+{
+ guint64 size = 0;
+ guint64 extra_size = 0;
+
+ size += desc_base_descriptor_get_size (&sl_desc->base);
+ /* predefined */
+ extra_size += sizeof (guint8);
+
+ desc_base_descriptor_set_size (&sl_desc->base, extra_size);
+
+ return size + extra_size;
+}
+
+static guint64
+desc_dec_specific_info_get_size (DecoderSpecificInfoDescriptor * dsid)
+{
+ guint64 size = 0;
+ guint64 extra_size = 0;
+
+ size += desc_base_descriptor_get_size (&dsid->base);
+ extra_size += sizeof (guint8) * dsid->length;
+ desc_base_descriptor_set_size (&dsid->base, extra_size);
+ return size + extra_size;
+}
+
+static guint64
+desc_dec_config_descriptor_get_size (DecoderConfigDescriptor * dec_desc)
+{
+ guint64 size = 0;
+ guint64 extra_size = 0;
+
+ size += desc_base_descriptor_get_size (&dec_desc->base);
+ /* object type */
+ extra_size += sizeof (guint8);
+ /* stream type */
+ extra_size += sizeof (guint8);
+ /* buffer size */
+ extra_size += sizeof (guint8) * 3;
+ /* max bitrate */
+ extra_size += sizeof (guint32);
+ /* avg bitrate */
+ extra_size += sizeof (guint32);
+ if (dec_desc->dec_specific_info) {
+ extra_size += desc_dec_specific_info_get_size (dec_desc->dec_specific_info);
+ }
+
+ desc_base_descriptor_set_size (&dec_desc->base, extra_size);
+ return size + extra_size;
+}
+
+static guint64
+desc_es_descriptor_get_size (ESDescriptor * es)
+{
+ guint64 size = 0;
+ guint64 extra_size = 0;
+
+ size += desc_base_descriptor_get_size (&es->base);
+ /* id */
+ extra_size += sizeof (guint16);
+ /* flags */
+ extra_size += sizeof (guint8);
+ /* depends_on_es_id */
+ if (es->flags & 0x80) {
+ extra_size += sizeof (guint16);
+ }
+ if (es->flags & 0x40) {
+ /* url_length */
+ extra_size += sizeof (guint8);
+ /* url */
+ extra_size += sizeof (gchar) * es->url_length;
+ }
+ if (es->flags & 0x20) {
+ /* ocr_es_id */
+ extra_size += sizeof (guint16);
+ }
+
+ extra_size += desc_dec_config_descriptor_get_size (&es->dec_conf_desc);
+ extra_size += desc_sl_config_descriptor_get_size (&es->sl_conf_desc);
+
+ desc_base_descriptor_set_size (&es->base, extra_size);
+
+ return size + extra_size;
+}
+
+static gboolean
+desc_es_descriptor_check_stream_dependency (ESDescriptor * es)
+{
+ return es->flags & 0x80;
+}
+
+static gboolean
+desc_es_descriptor_check_url_flag (ESDescriptor * es)
+{
+ return es->flags & 0x40;
+}
+
+static gboolean
+desc_es_descriptor_check_ocr (ESDescriptor * es)
+{
+ return es->flags & 0x20;
+}
+
+/* Copy/Serializations Functions below */
+
+static guint64
+desc_base_descriptor_copy_data (BaseDescriptor * desc, guint8 ** buffer,
+ guint64 * size, guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ prop_copy_uint8 (desc->tag, buffer, size, offset);
+ prop_copy_uint8_array (desc->size, expandable_size_get_length (desc->size, 4),
+ buffer, size, offset);
+ return original_offset - *offset;
+}
+
+static guint64
+desc_sl_config_descriptor_copy_data (SLConfigDescriptor * desc,
+ guint8 ** buffer, guint64 * size, guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!desc_base_descriptor_copy_data (&desc->base, buffer, size, offset)) {
+ return 0;
+ }
+ /* predefined attribute */
+ prop_copy_uint8 (desc->predefined, buffer, size, offset);
+
+ return *offset - original_offset;
+}
+
+static guint64
+desc_dec_specific_info_copy_data (DecoderSpecificInfoDescriptor * desc,
+ guint8 ** buffer, guint64 * size, guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!desc_base_descriptor_copy_data (&desc->base, buffer, size, offset)) {
+ return 0;
+ }
+ prop_copy_uint8_array (desc->data, desc->length, buffer, size, offset);
+
+ return *offset - original_offset;
+}
+
+static guint64
+desc_dec_config_descriptor_copy_data (DecoderConfigDescriptor * desc,
+ guint8 ** buffer, guint64 * size, guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ if (!desc_base_descriptor_copy_data (&desc->base, buffer, size, offset)) {
+ return 0;
+ }
+
+ prop_copy_uint8 (desc->object_type, buffer, size, offset);
+
+ prop_copy_uint8 (desc->stream_type, buffer, size, offset);
+ prop_copy_uint8_array (desc->buffer_size_DB, 3, buffer, size, offset);
+
+ prop_copy_uint32 (desc->max_bitrate, buffer, size, offset);
+ prop_copy_uint32 (desc->avg_bitrate, buffer, size, offset);
+
+ if (desc->dec_specific_info) {
+ if (!desc_dec_specific_info_copy_data (desc->dec_specific_info, buffer,
+ size, offset)) {
+ return 0;
+ }
+ }
+
+ return *offset - original_offset;
+}
+
+guint64
+desc_es_descriptor_copy_data (ESDescriptor * desc, guint8 ** buffer,
+ guint64 * size, guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ /* must call this twice to have size fields of all contained descriptors set
+ * correctly, and to have the size of the size fields taken into account */
+ desc_es_descriptor_get_size (desc);
+ desc_es_descriptor_get_size (desc);
+
+ if (!desc_base_descriptor_copy_data (&desc->base, buffer, size, offset)) {
+ return 0;
+ }
+ /* id and flags */
+ prop_copy_uint16 (desc->id, buffer, size, offset);
+ prop_copy_uint8 (desc->flags, buffer, size, offset);
+
+ if (desc_es_descriptor_check_stream_dependency (desc)) {
+ prop_copy_uint16 (desc->depends_on_es_id, buffer, size, offset);
+ }
+
+ if (desc_es_descriptor_check_url_flag (desc)) {
+ prop_copy_size_string (desc->url_string, desc->url_length, buffer, size,
+ offset);
+ }
+
+ if (desc_es_descriptor_check_ocr (desc)) {
+ prop_copy_uint16 (desc->ocr_es_id, buffer, size, offset);
+ }
+
+ if (!desc_dec_config_descriptor_copy_data (&desc->dec_conf_desc, buffer, size,
+ offset)) {
+ return 0;
+ }
+
+ if (!desc_sl_config_descriptor_copy_data (&desc->sl_conf_desc, buffer, size,
+ offset)) {
+ return 0;
+ }
+
+ return *offset - original_offset;
+}
diff --git a/gst/isomp4/descriptors.h b/gst/isomp4/descriptors.h
new file mode 100644
index 0000000000..b472523196
--- /dev/null
+++ b/gst/isomp4/descriptors.h
@@ -0,0 +1,151 @@
+/* Quicktime muxer plugin for GStreamer
+ * Copyright (C) 2008 Thiago Sousa Santos <thiagoss@embedded.ufcg.edu.br>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#ifndef __DESCRIPTORS_H__
+#define __DESCRIPTORS_H__
+
+#include <glib.h>
+#include <string.h>
+#include "properties.h"
+
+/*
+ * Tags for descriptor (each kind is represented by a number, instead of fourcc as in atoms)
+ */
+#define OBJECT_DESC_TAG 0x01
+#define INIT_OBJECT_DESC_TAG 0x02
+#define ES_DESCRIPTOR_TAG 0x03
+#define DECODER_CONFIG_DESC_TAG 0x04
+#define DECODER_SPECIFIC_INFO_TAG 0x05
+#define SL_CONFIG_DESC_TAG 0x06
+#define ES_ID_INC_TAG 0x0E
+#define MP4_INIT_OBJECT_DESC_TAG 0x10
+
+#define ESDS_OBJECT_TYPE_MPEG1_P3 0x6B
+#define ESDS_OBJECT_TYPE_MPEG2_P7_MAIN 0x66
+#define ESDS_OBJECT_TYPE_MPEG4_P7_LC 0x67
+#define ESDS_OBJECT_TYPE_MPEG4_P7_SSR 0x68
+#define ESDS_OBJECT_TYPE_MPEG4_P2 0x20
+#define ESDS_OBJECT_TYPE_MPEG4_P3 0x40
+
+#define ESDS_STREAM_TYPE_VISUAL 0x04
+#define ESDS_STREAM_TYPE_AUDIO 0x05
+
+
+typedef struct _BaseDescriptor
+{
+ guint8 tag;
+ /* the first bit of each byte indicates if the next byte should be used */
+ guint8 size[4];
+} BaseDescriptor;
+
+typedef struct _SLConfigDescriptor
+{
+ BaseDescriptor base;
+
+ guint8 predefined; /* everything is supposed predefined */
+} SLConfigDescriptor;
+
+typedef struct _DecoderSpecificInfoDescriptor
+{
+ BaseDescriptor base;
+ guint32 length;
+ guint8 *data;
+} DecoderSpecificInfoDescriptor;
+
+typedef struct _DecoderConfigDescriptor {
+ BaseDescriptor base;
+
+ guint8 object_type;
+
+ /* following are condensed into streamType:
+ * bit(6) streamType;
+ * bit(1) upStream;
+ * const bit(1) reserved=1;
+ */
+ guint8 stream_type;
+
+ guint8 buffer_size_DB[3];
+ guint32 max_bitrate;
+ guint32 avg_bitrate;
+
+ DecoderSpecificInfoDescriptor *dec_specific_info;
+} DecoderConfigDescriptor;
+
+typedef struct _ESDescriptor
+{
+ BaseDescriptor base;
+
+ guint16 id;
+
+ /* flags contains the following:
+ * bit(1) streamDependenceFlag;
+ * bit(1) URL_Flag;
+ * bit(1) OCRstreamFlag;
+ * bit(5) streamPriority;
+ */
+ guint8 flags;
+
+ guint16 depends_on_es_id;
+ guint8 url_length; /* only if URL_flag is set */
+ guint8 *url_string; /* size is url_length */
+
+ guint16 ocr_es_id; /* only if OCRstreamFlag is set */
+
+ DecoderConfigDescriptor dec_conf_desc;
+ SLConfigDescriptor sl_conf_desc;
+
+ /* optional remainder of ESDescriptor is not used */
+} ESDescriptor;
+
+/* --- FUNCTIONS --- */
+void desc_es_init (ESDescriptor *es);
+ESDescriptor *desc_es_descriptor_new (void);
+guint64 desc_es_descriptor_copy_data (ESDescriptor *es, guint8 **buffer,
+ guint64 *size, guint64 *offset);
+void desc_es_descriptor_clear (ESDescriptor *es);
+
+DecoderSpecificInfoDescriptor *desc_dec_specific_info_new(void);
+void desc_dec_specific_info_free (DecoderSpecificInfoDescriptor *dsid);
+void desc_dec_specific_info_alloc_data (DecoderSpecificInfoDescriptor *dsid,
+ guint32 size);
+
+#endif /* __DESCRIPTORS_H__ */
diff --git a/gst/isomp4/fourcc.h b/gst/isomp4/fourcc.h
new file mode 100644
index 0000000000..8872b4bda3
--- /dev/null
+++ b/gst/isomp4/fourcc.h
@@ -0,0 +1,413 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+ /*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+
+#ifndef __FOURCC_H__
+#define __FOURCC_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+#define FOURCC_null 0x0
+
+#define FOURCC_2vuy GST_MAKE_FOURCC('2','v','u','y')
+#define FOURCC_FMP4 GST_MAKE_FOURCC('F','M','P','4')
+#define FOURCC_H264 GST_MAKE_FOURCC('H','2','6','4')
+#define FOURCC_H265 GST_MAKE_FOURCC('H','2','6','5')
+#define FOURCC_MAC3 GST_MAKE_FOURCC('M','A','C','3')
+#define FOURCC_MAC6 GST_MAKE_FOURCC('M','A','C','6')
+#define FOURCC_MP4V GST_MAKE_FOURCC('M','P','4','V')
+#define FOURCC_PICT GST_MAKE_FOURCC('P','I','C','T')
+#define FOURCC_QDM2 GST_MAKE_FOURCC('Q','D','M','2')
+#define FOURCC_SVQ3 GST_MAKE_FOURCC('S','V','Q','3')
+#define FOURCC_VP31 GST_MAKE_FOURCC('V','P','3','1')
+#define FOURCC_VP80 GST_MAKE_FOURCC('V','P','8','0')
+#define FOURCC_WRLE GST_MAKE_FOURCC('W','R','L','E')
+#define FOURCC_XMP_ GST_MAKE_FOURCC('X','M','P','_')
+#define FOURCC_XVID GST_MAKE_FOURCC('X','V','I','D')
+#define FOURCC__ART GST_MAKE_FOURCC(0xa9,'A','R','T')
+#define FOURCC_____ GST_MAKE_FOURCC('-','-','-','-')
+#define FOURCC___in GST_MAKE_FOURCC(' ',' ','i','n')
+#define FOURCC___ty GST_MAKE_FOURCC(' ',' ','t','y')
+#define FOURCC__alb GST_MAKE_FOURCC(0xa9,'a','l','b')
+#define FOURCC__cpy GST_MAKE_FOURCC(0xa9,'c','p','y')
+#define FOURCC__day GST_MAKE_FOURCC(0xa9,'d','a','y')
+#define FOURCC__des GST_MAKE_FOURCC(0xa9,'d','e','s')
+#define FOURCC__enc GST_MAKE_FOURCC(0xa9,'e','n','c')
+#define FOURCC__gen GST_MAKE_FOURCC(0xa9, 'g', 'e', 'n')
+#define FOURCC__grp GST_MAKE_FOURCC(0xa9,'g','r','p')
+#define FOURCC__inf GST_MAKE_FOURCC(0xa9,'i','n','f')
+#define FOURCC__lyr GST_MAKE_FOURCC(0xa9,'l','y','r')
+#define FOURCC__mp3 GST_MAKE_FOURCC('.','m','p','3')
+#define FOURCC__nam GST_MAKE_FOURCC(0xa9,'n','a','m')
+#define FOURCC__req GST_MAKE_FOURCC(0xa9,'r','e','q')
+#define FOURCC__too GST_MAKE_FOURCC(0xa9,'t','o','o')
+#define FOURCC__wrt GST_MAKE_FOURCC(0xa9,'w','r','t')
+#define FOURCC_aART GST_MAKE_FOURCC('a','A','R','T')
+#define FOURCC_ac_3 GST_MAKE_FOURCC('a','c','-','3')
+#define FOURCC_agsm GST_MAKE_FOURCC('a','g','s','m')
+#define FOURCC_alac GST_MAKE_FOURCC('a','l','a','c')
+#define FOURCC_fLaC GST_MAKE_FOURCC('f','L','a','C')
+#define FOURCC_dfLa GST_MAKE_FOURCC('d','f','L','a')
+#define FOURCC_alaw GST_MAKE_FOURCC('a','l','a','w')
+#define FOURCC_alis GST_MAKE_FOURCC('a','l','i','s')
+#define FOURCC_appl GST_MAKE_FOURCC('a','p','p','l')
+#define FOURCC_avc1 GST_MAKE_FOURCC('a','v','c','1')
+#define FOURCC_avc3 GST_MAKE_FOURCC('a','v','c','3')
+#define FOURCC_avcC GST_MAKE_FOURCC('a','v','c','C')
+#define FOURCC_c608 GST_MAKE_FOURCC('c','6','0','8')
+#define FOURCC_c708 GST_MAKE_FOURCC('c','7','0','8')
+#define FOURCC_ccdp GST_MAKE_FOURCC('c','c','d','p')
+#define FOURCC_cdat GST_MAKE_FOURCC('c','d','a','t')
+#define FOURCC_cdt2 GST_MAKE_FOURCC('c','d','t','2')
+#define FOURCC_clcp GST_MAKE_FOURCC('c','l','c','p')
+#define FOURCC_clip GST_MAKE_FOURCC('c','l','i','p')
+#define FOURCC_cmov GST_MAKE_FOURCC('c','m','o','v')
+#define FOURCC_cmvd GST_MAKE_FOURCC('c','m','v','d')
+#define FOURCC_co64 GST_MAKE_FOURCC('c','o','6','4')
+#define FOURCC_covr GST_MAKE_FOURCC('c','o','v','r')
+#define FOURCC_cpil GST_MAKE_FOURCC('c','p','i','l')
+#define FOURCC_cprt GST_MAKE_FOURCC('c','p','r','t')
+#define FOURCC_crgn GST_MAKE_FOURCC('c','r','g','n')
+#define FOURCC_ctab GST_MAKE_FOURCC('c','t','a','b')
+#define FOURCC_ctts GST_MAKE_FOURCC('c','t','t','s')
+#define FOURCC_cslg GST_MAKE_FOURCC('c','s','l','g')
+#define FOURCC_d263 GST_MAKE_FOURCC('d','2','6','3')
+#define FOURCC_dac3 GST_MAKE_FOURCC('d','a','c','3')
+#define FOURCC_damr GST_MAKE_FOURCC('d','a','m','r')
+#define FOURCC_data GST_MAKE_FOURCC('d','a','t','a')
+#define FOURCC_dcom GST_MAKE_FOURCC('d','c','o','m')
+#define FOURCC_desc GST_MAKE_FOURCC('d','e','s','c')
+#define FOURCC_dhlr GST_MAKE_FOURCC('d','h','l','r')
+#define FOURCC_dinf GST_MAKE_FOURCC('d','i','n','f')
+#define FOURCC_disc GST_MAKE_FOURCC('d','i','s','c')
+#define FOURCC_disk GST_MAKE_FOURCC('d','i','s','k')
+#define FOURCC_drac GST_MAKE_FOURCC('d','r','a','c')
+#define FOURCC_dref GST_MAKE_FOURCC('d','r','e','f')
+#define FOURCC_drmi GST_MAKE_FOURCC('d','r','m','i')
+#define FOURCC_drms GST_MAKE_FOURCC('d','r','m','s')
+#define FOURCC_dvcp GST_MAKE_FOURCC('d','v','c','p')
+#define FOURCC_dvc_ GST_MAKE_FOURCC('d','v','c',' ')
+#define FOURCC_dv5p GST_MAKE_FOURCC('d','v','5','p')
+#define FOURCC_dv5n GST_MAKE_FOURCC('d','v','5','n')
+#define FOURCC_dva1 GST_MAKE_FOURCC('d','v','a','1')
+#define FOURCC_dvav GST_MAKE_FOURCC('d','v','a','v')
+#define FOURCC_dvh1 GST_MAKE_FOURCC('d','v','h','1')
+#define FOURCC_dvhe GST_MAKE_FOURCC('d','v','h','e')
+#define FOURCC_dvcC GST_MAKE_FOURCC('d','v','c','C')
+#define FOURCC_edts GST_MAKE_FOURCC('e','d','t','s')
+#define FOURCC_elst GST_MAKE_FOURCC('e','l','s','t')
+#define FOURCC_enda GST_MAKE_FOURCC('e','n','d','a')
+#define FOURCC_esds GST_MAKE_FOURCC('e','s','d','s')
+#define FOURCC_fmp4 GST_MAKE_FOURCC('f','m','p','4')
+#define FOURCC_free GST_MAKE_FOURCC('f','r','e','e')
+#define FOURCC_frma GST_MAKE_FOURCC('f','r','m','a')
+#define FOURCC_ftyp GST_MAKE_FOURCC('f','t','y','p')
+#define FOURCC_ftab GST_MAKE_FOURCC('f','t','a','b')
+#define FOURCC_gama GST_MAKE_FOURCC('g','a','m','a')
+#define FOURCC_glbl GST_MAKE_FOURCC('g','l','b','l')
+#define FOURCC_gmhd GST_MAKE_FOURCC('g','m','h','d')
+#define FOURCC_gmin GST_MAKE_FOURCC('g','m','i','n')
+#define FOURCC_gnre GST_MAKE_FOURCC('g','n','r','e')
+#define FOURCC_h263 GST_MAKE_FOURCC('h','2','6','3')
+#define FOURCC_hdlr GST_MAKE_FOURCC('h','d','l','r')
+#define FOURCC_hev1 GST_MAKE_FOURCC('h','e','v','1')
+#define FOURCC_hint GST_MAKE_FOURCC('h','i','n','t')
+#define FOURCC_hmhd GST_MAKE_FOURCC('h','m','h','d')
+#define FOURCC_hndl GST_MAKE_FOURCC('h','n','d','l')
+#define FOURCC_hnti GST_MAKE_FOURCC('h','n','t','i')
+#define FOURCC_hvc1 GST_MAKE_FOURCC('h','v','c','1')
+#define FOURCC_hvcC GST_MAKE_FOURCC('h','v','c','C')
+#define FOURCC_ilst GST_MAKE_FOURCC('i','l','s','t')
+#define FOURCC_ima4 GST_MAKE_FOURCC('i','m','a','4')
+#define FOURCC_imap GST_MAKE_FOURCC('i','m','a','p')
+#define FOURCC_s16l GST_MAKE_FOURCC('s','1','6','l')
+#define FOURCC_in24 GST_MAKE_FOURCC('i','n','2','4')
+#define FOURCC_in32 GST_MAKE_FOURCC('i','n','3','2')
+#define FOURCC_fl64 GST_MAKE_FOURCC('f','l','6','4')
+#define FOURCC_fl32 GST_MAKE_FOURCC('f','l','3','2')
+#define FOURCC_jp2c GST_MAKE_FOURCC('j','p','2','c')
+#define FOURCC_jpeg GST_MAKE_FOURCC('j','p','e','g')
+#define FOURCC_keyw GST_MAKE_FOURCC('k','e','y','w')
+#define FOURCC_kmat GST_MAKE_FOURCC('k','m','a','t')
+#define FOURCC_kywd GST_MAKE_FOURCC('k','y','w','d')
+#define FOURCC_load GST_MAKE_FOURCC('l','o','a','d')
+#define FOURCC_matt GST_MAKE_FOURCC('m','a','t','t')
+#define FOURCC_mdat GST_MAKE_FOURCC('m','d','a','t')
+#define FOURCC_mdhd GST_MAKE_FOURCC('m','d','h','d')
+#define FOURCC_mdia GST_MAKE_FOURCC('m','d','i','a')
+#define FOURCC_mdir GST_MAKE_FOURCC('m','d','i','r')
+#define FOURCC_mean GST_MAKE_FOURCC('m','e','a','n')
+#define FOURCC_meta GST_MAKE_FOURCC('m','e','t','a')
+#define FOURCC_mhlr GST_MAKE_FOURCC('m','h','l','r')
+#define FOURCC_minf GST_MAKE_FOURCC('m','i','n','f')
+#define FOURCC_moov GST_MAKE_FOURCC('m','o','o','v')
+#define FOURCC_mp3_ GST_MAKE_FOURCC('m','p','3',' ')
+#define FOURCC_mp4a GST_MAKE_FOURCC('m','p','4','a')
+#define FOURCC_mp4s GST_MAKE_FOURCC('m','p','4','s')
+#define FOURCC_mp4s GST_MAKE_FOURCC('m','p','4','s')
+#define FOURCC_mp4v GST_MAKE_FOURCC('m','p','4','v')
+#define FOURCC_name GST_MAKE_FOURCC('n','a','m','e')
+#define FOURCC_nclc GST_MAKE_FOURCC('n','c','l','c')
+#define FOURCC_nclx GST_MAKE_FOURCC('n','c','l','x')
+#define FOURCC_nmhd GST_MAKE_FOURCC('n','m','h','d')
+#define FOURCC_opus GST_MAKE_FOURCC('O','p','u','s')
+#define FOURCC_dops GST_MAKE_FOURCC('d','O','p','s')
+#define FOURCC_pasp GST_MAKE_FOURCC('p','a','s','p')
+#define FOURCC_colr GST_MAKE_FOURCC('c','o','l','r')
+#define FOURCC_clap GST_MAKE_FOURCC('c','l','a','p')
+#define FOURCC_tapt GST_MAKE_FOURCC('t','a','p','t')
+#define FOURCC_clef GST_MAKE_FOURCC('c','l','e','f')
+#define FOURCC_prof GST_MAKE_FOURCC('p','r','o','f')
+#define FOURCC_enof GST_MAKE_FOURCC('e','n','o','f')
+#define FOURCC_fiel GST_MAKE_FOURCC('f','i','e','l')
+#define FOURCC_pcst GST_MAKE_FOURCC('p','c','s','t')
+#define FOURCC_pgap GST_MAKE_FOURCC('p','g','a','p')
+#define FOURCC_png GST_MAKE_FOURCC('p','n','g',' ')
+#define FOURCC_pnot GST_MAKE_FOURCC('p','n','o','t')
+#define FOURCC_qt__ GST_MAKE_FOURCC('q','t',' ',' ')
+#define FOURCC_qtim GST_MAKE_FOURCC('q','t','i','m')
+#define FOURCC_raw_ GST_MAKE_FOURCC('r','a','w',' ')
+#define FOURCC_rdrf GST_MAKE_FOURCC('r','d','r','f')
+#define FOURCC_rle_ GST_MAKE_FOURCC('r','l','e',' ')
+#define FOURCC_rmda GST_MAKE_FOURCC('r','m','d','a')
+#define FOURCC_rmdr GST_MAKE_FOURCC('r','m','d','r')
+#define FOURCC_rmra GST_MAKE_FOURCC('r','m','r','a')
+#define FOURCC_rmvc GST_MAKE_FOURCC('r','m','v','c')
+#define FOURCC_rtp_ GST_MAKE_FOURCC('r','t','p',' ')
+#define FOURCC_rtsp GST_MAKE_FOURCC('r','t','s','p')
+#define FOURCC_s263 GST_MAKE_FOURCC('s','2','6','3')
+#define FOURCC_samr GST_MAKE_FOURCC('s','a','m','r')
+#define FOURCC_sawb GST_MAKE_FOURCC('s','a','w','b')
+#define FOURCC_sbtl GST_MAKE_FOURCC('s','b','t','l')
+#define FOURCC_sdp_ GST_MAKE_FOURCC('s','d','p',' ')
+#define FOURCC_sidx GST_MAKE_FOURCC('s','i','d','x')
+#define FOURCC_skip GST_MAKE_FOURCC('s','k','i','p')
+#define FOURCC_smhd GST_MAKE_FOURCC('s','m','h','d')
+#define FOURCC_soaa GST_MAKE_FOURCC('s','o','a','a')
+#define FOURCC_soal GST_MAKE_FOURCC('s','o','a','l')
+#define FOURCC_soar GST_MAKE_FOURCC('s','o','a','r')
+#define FOURCC_soco GST_MAKE_FOURCC('s','o','c','o')
+#define FOURCC_sonm GST_MAKE_FOURCC('s','o','n','m')
+#define FOURCC_sosn GST_MAKE_FOURCC('s','o','s','n')
+#define FOURCC_soun GST_MAKE_FOURCC('s','o','u','n')
+#define FOURCC_sowt GST_MAKE_FOURCC('s','o','w','t')
+#define FOURCC_stbl GST_MAKE_FOURCC('s','t','b','l')
+#define FOURCC_stco GST_MAKE_FOURCC('s','t','c','o')
+#define FOURCC_stpp GST_MAKE_FOURCC('s','t','p','p')
+#define FOURCC_stps GST_MAKE_FOURCC('s','t','p','s')
+#define FOURCC_strf GST_MAKE_FOURCC('s','t','r','f')
+#define FOURCC_strm GST_MAKE_FOURCC('s','t','r','m')
+#define FOURCC_stsc GST_MAKE_FOURCC('s','t','s','c')
+#define FOURCC_stsd GST_MAKE_FOURCC('s','t','s','d')
+#define FOURCC_stss GST_MAKE_FOURCC('s','t','s','s')
+#define FOURCC_stsz GST_MAKE_FOURCC('s','t','s','z')
+#define FOURCC_stts GST_MAKE_FOURCC('s','t','t','s')
+#define FOURCC_styp GST_MAKE_FOURCC('s','t','y','p')
+#define FOURCC_subp GST_MAKE_FOURCC('s','u','b','p')
+#define FOURCC_subt GST_MAKE_FOURCC('s','u','b','t')
+#define FOURCC_text GST_MAKE_FOURCC('t','e','x','t')
+#define FOURCC_tcmi GST_MAKE_FOURCC('t','c','m','i')
+#define FOURCC_tkhd GST_MAKE_FOURCC('t','k','h','d')
+#define FOURCC_tmcd GST_MAKE_FOURCC('t','m','c','d')
+#define FOURCC_tmpo GST_MAKE_FOURCC('t','m','p','o')
+#define FOURCC_trak GST_MAKE_FOURCC('t','r','a','k')
+#define FOURCC_tref GST_MAKE_FOURCC('t','r','e','f')
+#define FOURCC_trkn GST_MAKE_FOURCC('t','r','k','n')
+#define FOURCC_tven GST_MAKE_FOURCC('t','v','e','n')
+#define FOURCC_tves GST_MAKE_FOURCC('t','v','e','s')
+#define FOURCC_tvsh GST_MAKE_FOURCC('t','v','s','h')
+#define FOURCC_tvsn GST_MAKE_FOURCC('t','v','s','n')
+#define FOURCC_twos GST_MAKE_FOURCC('t','w','o','s')
+#define FOURCC_tx3g GST_MAKE_FOURCC('t','x','3','g')
+#define FOURCC_udta GST_MAKE_FOURCC('u','d','t','a')
+#define FOURCC_ulaw GST_MAKE_FOURCC('u','l','a','w')
+#define FOURCC_url_ GST_MAKE_FOURCC('u','r','l',' ')
+#define FOURCC_uuid GST_MAKE_FOURCC('u','u','i','d')
+#define FOURCC_v210 GST_MAKE_FOURCC('v','2','1','0')
+#define FOURCC_vc_1 GST_MAKE_FOURCC('v','c','-','1')
+#define FOURCC_vide GST_MAKE_FOURCC('v','i','d','e')
+#define FOURCC_vmhd GST_MAKE_FOURCC('v','m','h','d')
+#define FOURCC_vp08 GST_MAKE_FOURCC('v','p','0','8')
+#define FOURCC_vp09 GST_MAKE_FOURCC('v','p','0','9')
+#define FOURCC_vpcC GST_MAKE_FOURCC('v','p','c','C')
+#define FOURCC_xvid GST_MAKE_FOURCC('x','v','i','d')
+#define FOURCC_wave GST_MAKE_FOURCC('w','a','v','e')
+#define FOURCC_wide GST_MAKE_FOURCC('w','i','d','e')
+#define FOURCC_zlib GST_MAKE_FOURCC('z','l','i','b')
+#define FOURCC_lpcm GST_MAKE_FOURCC('l','p','c','m')
+#define FOURCC_av01 GST_MAKE_FOURCC('a','v','0','1')
+#define FOURCC_av1C GST_MAKE_FOURCC('a','v','1','C')
+#define FOURCC_av1f GST_MAKE_FOURCC('a','v','1','f')
+#define FOURCC_av1m GST_MAKE_FOURCC('a','v','1','m')
+#define FOURCC_av1s GST_MAKE_FOURCC('a','v','1','s')
+#define FOURCC_av1M GST_MAKE_FOURCC('a','v','1','M')
+
+#define FOURCC_cfhd GST_MAKE_FOURCC('C','F','H','D')
+#define FOURCC_ap4x GST_MAKE_FOURCC('a','p','4','x')
+#define FOURCC_ap4h GST_MAKE_FOURCC('a','p','4','h')
+#define FOURCC_apch GST_MAKE_FOURCC('a','p','c','h')
+#define FOURCC_apcn GST_MAKE_FOURCC('a','p','c','n')
+#define FOURCC_apco GST_MAKE_FOURCC('a','p','c','o')
+#define FOURCC_apcs GST_MAKE_FOURCC('a','p','c','s')
+#define FOURCC_m1v GST_MAKE_FOURCC('m','1','v',' ')
+#define FOURCC_vivo GST_MAKE_FOURCC('v','i','v','o')
+#define FOURCC_saiz GST_MAKE_FOURCC('s','a','i','z')
+#define FOURCC_saio GST_MAKE_FOURCC('s','a','i','o')
+
+#define FOURCC_3gg6 GST_MAKE_FOURCC('3','g','g','6')
+#define FOURCC_3gg7 GST_MAKE_FOURCC('3','g','g','7')
+#define FOURCC_3gp4 GST_MAKE_FOURCC('3','g','p','4')
+#define FOURCC_3gp6 GST_MAKE_FOURCC('3','g','p','6')
+#define FOURCC_3gr6 GST_MAKE_FOURCC('3','g','r','6')
+#define FOURCC_3g__ GST_MAKE_FOURCC('3','g',0,0)
+#define FOURCC_isml GST_MAKE_FOURCC('i','s','m','l')
+#define FOURCC_iso2 GST_MAKE_FOURCC('i','s','o','2')
+#define FOURCC_isom GST_MAKE_FOURCC('i','s','o','m')
+#define FOURCC_mp41 GST_MAKE_FOURCC('m','p','4','1')
+#define FOURCC_mp42 GST_MAKE_FOURCC('m','p','4','2')
+#define FOURCC_piff GST_MAKE_FOURCC('p','i','f','f')
+#define FOURCC_titl GST_MAKE_FOURCC('t','i','t','l')
+
+/* SVQ3 fourcc */
+#define FOURCC_SEQH GST_MAKE_FOURCC('S','E','Q','H')
+#define FOURCC_SMI_ GST_MAKE_FOURCC('S','M','I',' ')
+
+/* 3gpp asset meta data fourcc */
+#define FOURCC_albm GST_MAKE_FOURCC('a','l','b','m')
+#define FOURCC_auth GST_MAKE_FOURCC('a','u','t','h')
+#define FOURCC_clsf GST_MAKE_FOURCC('c','l','s','f')
+#define FOURCC_dscp GST_MAKE_FOURCC('d','s','c','p')
+#define FOURCC_loci GST_MAKE_FOURCC('l','o','c','i')
+#define FOURCC_perf GST_MAKE_FOURCC('p','e','r','f')
+#define FOURCC_rtng GST_MAKE_FOURCC('r','t','n','g')
+#define FOURCC_yrrc GST_MAKE_FOURCC('y','r','r','c')
+
+/* misc tag stuff */
+#define FOURCC_ID32 GST_MAKE_FOURCC('I', 'D','3','2')
+
+/* ISO Motion JPEG 2000 fourcc */
+#define FOURCC_cdef GST_MAKE_FOURCC('c','d','e','f')
+#define FOURCC_cmap GST_MAKE_FOURCC('c','m','a','p')
+#define FOURCC_ihdr GST_MAKE_FOURCC('i','h','d','r')
+#define FOURCC_jp2h GST_MAKE_FOURCC('j','p','2','h')
+#define FOURCC_jp2x GST_MAKE_FOURCC('j','p','2','x')
+#define FOURCC_mjp2 GST_MAKE_FOURCC('m','j','p','2')
+
+/* some buggy hardware's notion of mdhd */
+#define FOURCC_mhdr GST_MAKE_FOURCC('m','h','d','r')
+
+/* Fragmented MP4 */
+#define FOURCC_btrt GST_MAKE_FOURCC('b','t','r','t')
+#define FOURCC_mehd GST_MAKE_FOURCC('m','e','h','d')
+#define FOURCC_mfhd GST_MAKE_FOURCC('m','f','h','d')
+#define FOURCC_mfra GST_MAKE_FOURCC('m','f','r','a')
+#define FOURCC_mfro GST_MAKE_FOURCC('m','f','r','o')
+#define FOURCC_moof GST_MAKE_FOURCC('m','o','o','f')
+#define FOURCC_mvex GST_MAKE_FOURCC('m','v','e','x')
+#define FOURCC_mvhd GST_MAKE_FOURCC('m','v','h','d')
+#define FOURCC_ovc1 GST_MAKE_FOURCC('o','v','c','1')
+#define FOURCC_owma GST_MAKE_FOURCC('o','w','m','a')
+#define FOURCC_sdtp GST_MAKE_FOURCC('s','d','t','p')
+#define FOURCC_tfhd GST_MAKE_FOURCC('t','f','h','d')
+#define FOURCC_tfra GST_MAKE_FOURCC('t','f','r','a')
+#define FOURCC_traf GST_MAKE_FOURCC('t','r','a','f')
+#define FOURCC_trex GST_MAKE_FOURCC('t','r','e','x')
+#define FOURCC_trun GST_MAKE_FOURCC('t','r','u','n')
+#define FOURCC_wma_ GST_MAKE_FOURCC('w','m','a',' ')
+
+/* MPEG DASH */
+#define FOURCC_tfdt GST_MAKE_FOURCC('t','f','d','t')
+
+/* Xiph fourcc */
+#define FOURCC_XdxT GST_MAKE_FOURCC('X','d','x','T')
+#define FOURCC_XiTh GST_MAKE_FOURCC('X','i','T','h')
+#define FOURCC_tCtC GST_MAKE_FOURCC('t','C','t','C')
+#define FOURCC_tCtH GST_MAKE_FOURCC('t','C','t','H')
+#define FOURCC_tCt_ GST_MAKE_FOURCC('t','C','t','#')
+
+/* ilst metatags */
+#define FOURCC__cmt GST_MAKE_FOURCC(0xa9, 'c','m','t')
+
+/* apple tags */
+#define FOURCC__mak GST_MAKE_FOURCC(0xa9, 'm','a','k')
+#define FOURCC__mod GST_MAKE_FOURCC(0xa9, 'm','o','d')
+#define FOURCC__swr GST_MAKE_FOURCC(0xa9, 's','w','r')
+
+/* Chapters reference */
+#define FOURCC_chap GST_MAKE_FOURCC('c','h','a','p')
+
+/* For Microsoft Wave formats embedded in quicktime, the FOURCC is
+ 'm', 's', then the 16 bit wave codec id */
+#define MS_WAVE_FOURCC(codecid) GST_MAKE_FOURCC( \
+ 'm', 's', ((codecid)>>8)&0xff, ((codecid)&0xff))
+
+/* MPEG Application Format , Stereo Video */
+#define FOURCC_ss01 GST_MAKE_FOURCC('s','s','0','1')
+#define FOURCC_ss02 GST_MAKE_FOURCC('s','s','0','2')
+#define FOURCC_svmi GST_MAKE_FOURCC('s','v','m','i')
+#define FOURCC_scdi GST_MAKE_FOURCC('s','c','d','i')
+
+/* Protected streams */
+#define FOURCC_encv GST_MAKE_FOURCC('e','n','c','v')
+#define FOURCC_enca GST_MAKE_FOURCC('e','n','c','a')
+#define FOURCC_enct GST_MAKE_FOURCC('e','n','c','t')
+#define FOURCC_encs GST_MAKE_FOURCC('e','n','c','s')
+#define FOURCC_sinf GST_MAKE_FOURCC('s','i','n','f')
+#define FOURCC_frma GST_MAKE_FOURCC('f','r','m','a')
+#define FOURCC_schm GST_MAKE_FOURCC('s','c','h','m')
+#define FOURCC_schi GST_MAKE_FOURCC('s','c','h','i')
+
+/* Common Encryption */
+#define FOURCC_pssh GST_MAKE_FOURCC('p','s','s','h')
+#define FOURCC_tenc GST_MAKE_FOURCC('t','e','n','c')
+#define FOURCC_cenc GST_MAKE_FOURCC('c','e','n','c')
+#define FOURCC_cbcs GST_MAKE_FOURCC('c','b','c','s')
+
+/* Audible AAX encrypted audio */
+#define FOURCC_aavd GST_MAKE_FOURCC('a','a','v','d')
+#define FOURCC_adrm GST_MAKE_FOURCC('a','d','r','m')
+
+G_END_DECLS
+
+#endif /* __FOURCC_H__ */
diff --git a/gst/isomp4/gstisoff.c b/gst/isomp4/gstisoff.c
new file mode 100644
index 0000000000..829a73af99
--- /dev/null
+++ b/gst/isomp4/gstisoff.c
@@ -0,0 +1,203 @@
+/*
+ * ISO File Format parsing library
+ *
+ * gstisoff.h
+ *
+ * Copyright (C) 2015 Samsung Electronics. All rights reserved.
+ * Author: Thiago Santos <thiagoss@osg.samsung.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#include "qtdemux_debug.h"
+#include "gstisoff.h"
+#include <gst/base/gstbytereader.h>
+
+#define GST_CAT_DEFAULT qtdemux_debug
+
+void
+gst_isoff_qt_sidx_parser_init (GstSidxParser * parser)
+{
+ parser->status = GST_ISOFF_QT_SIDX_PARSER_INIT;
+ parser->cumulative_entry_size = 0;
+ parser->sidx.entries = NULL;
+ parser->sidx.entries_count = 0;
+}
+
+void
+gst_isoff_qt_sidx_parser_clear (GstSidxParser * parser)
+{
+ g_free (parser->sidx.entries);
+ parser->sidx.entries = NULL;
+}
+
+static void
+gst_isoff_qt_parse_sidx_entry (GstSidxBoxEntry * entry, GstByteReader * reader)
+{
+ guint32 aux;
+
+ aux = gst_byte_reader_get_uint32_be_unchecked (reader);
+ entry->ref_type = aux >> 31;
+ entry->size = aux & 0x7FFFFFFF;
+ entry->duration = gst_byte_reader_get_uint32_be_unchecked (reader);
+ aux = gst_byte_reader_get_uint32_be_unchecked (reader);
+ entry->starts_with_sap = aux >> 31;
+ entry->sap_type = ((aux >> 28) & 0x7);
+ entry->sap_delta_time = aux & 0xFFFFFFF;
+}
+
+GstIsoffParserResult
+gst_isoff_qt_sidx_parser_add_data (GstSidxParser * parser,
+ const guint8 * buffer, gint length, guint * consumed)
+{
+ GstIsoffParserResult res = GST_ISOFF_QT_PARSER_OK;
+ GstByteReader reader;
+ gsize remaining;
+ guint32 fourcc;
+
+ gst_byte_reader_init (&reader, buffer, length);
+
+ switch (parser->status) {
+ case GST_ISOFF_QT_SIDX_PARSER_INIT:
+ if (gst_byte_reader_get_remaining (&reader) < GST_ISOFF_QT_FULL_BOX_SIZE) {
+ break;
+ }
+
+ parser->size = gst_byte_reader_get_uint32_be_unchecked (&reader);
+ fourcc = gst_byte_reader_get_uint32_le_unchecked (&reader);
+ if (fourcc != GST_ISOFF_QT_FOURCC_SIDX) {
+ res = GST_ISOFF_QT_PARSER_UNEXPECTED;
+ gst_byte_reader_set_pos (&reader, 0);
+ break;
+ }
+ if (parser->size == 1) {
+ if (gst_byte_reader_get_remaining (&reader) < 12) {
+ gst_byte_reader_set_pos (&reader, 0);
+ break;
+ }
+
+ parser->size = gst_byte_reader_get_uint64_be_unchecked (&reader);
+ }
+ if (parser->size == 0) {
+ res = GST_ISOFF_QT_PARSER_ERROR;
+ gst_byte_reader_set_pos (&reader, 0);
+ break;
+ }
+ parser->sidx.version = gst_byte_reader_get_uint8_unchecked (&reader);
+ parser->sidx.flags = gst_byte_reader_get_uint24_le_unchecked (&reader);
+
+ parser->status = GST_ISOFF_QT_SIDX_PARSER_HEADER;
+
+ case GST_ISOFF_QT_SIDX_PARSER_HEADER:
+ remaining = gst_byte_reader_get_remaining (&reader);
+ if (remaining < 12 + (parser->sidx.version == 0 ? 8 : 16)) {
+ break;
+ }
+
+ parser->sidx.ref_id = gst_byte_reader_get_uint32_be_unchecked (&reader);
+ parser->sidx.timescale =
+ gst_byte_reader_get_uint32_be_unchecked (&reader);
+ if (parser->sidx.version == 0) {
+ parser->sidx.earliest_pts =
+ gst_byte_reader_get_uint32_be_unchecked (&reader);
+ parser->sidx.first_offset = parser->sidx.earliest_pts =
+ gst_byte_reader_get_uint32_be_unchecked (&reader);
+ } else {
+ parser->sidx.earliest_pts =
+ gst_byte_reader_get_uint64_be_unchecked (&reader);
+ parser->sidx.first_offset =
+ gst_byte_reader_get_uint64_be_unchecked (&reader);
+ }
+ /* skip 2 reserved bytes */
+ gst_byte_reader_skip_unchecked (&reader, 2);
+ parser->sidx.entries_count =
+ gst_byte_reader_get_uint16_be_unchecked (&reader);
+
+ GST_LOG ("Timescale: %" G_GUINT32_FORMAT, parser->sidx.timescale);
+ GST_LOG ("Earliest pts: %" G_GUINT64_FORMAT, parser->sidx.earliest_pts);
+ GST_LOG ("First offset: %" G_GUINT64_FORMAT, parser->sidx.first_offset);
+
+ parser->cumulative_pts =
+ gst_util_uint64_scale_int_round (parser->sidx.earliest_pts,
+ GST_SECOND, parser->sidx.timescale);
+
+ if (parser->sidx.entries_count) {
+ parser->sidx.entries =
+ g_malloc (sizeof (GstSidxBoxEntry) * parser->sidx.entries_count);
+ }
+ parser->sidx.entry_index = 0;
+
+ parser->status = GST_ISOFF_QT_SIDX_PARSER_DATA;
+
+ case GST_ISOFF_QT_SIDX_PARSER_DATA:
+ while (parser->sidx.entry_index < parser->sidx.entries_count) {
+ GstSidxBoxEntry *entry =
+ &parser->sidx.entries[parser->sidx.entry_index];
+
+ remaining = gst_byte_reader_get_remaining (&reader);
+ if (remaining < 12)
+ break;
+
+ entry->offset = parser->cumulative_entry_size;
+ entry->pts = parser->cumulative_pts;
+ gst_isoff_qt_parse_sidx_entry (entry, &reader);
+ entry->duration = gst_util_uint64_scale_int_round (entry->duration,
+ GST_SECOND, parser->sidx.timescale);
+ parser->cumulative_entry_size += entry->size;
+ parser->cumulative_pts += entry->duration;
+
+ GST_LOG ("Sidx entry %d) offset: %" G_GUINT64_FORMAT ", pts: %"
+ GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT " - size %"
+ G_GUINT32_FORMAT, parser->sidx.entry_index, entry->offset,
+ GST_TIME_ARGS (entry->pts), GST_TIME_ARGS (entry->duration),
+ entry->size);
+
+ parser->sidx.entry_index++;
+ }
+
+ if (parser->sidx.entry_index == parser->sidx.entries_count)
+ parser->status = GST_ISOFF_QT_SIDX_PARSER_FINISHED;
+ else
+ break;
+ case GST_ISOFF_QT_SIDX_PARSER_FINISHED:
+ parser->sidx.entry_index = 0;
+ res = GST_ISOFF_QT_PARSER_DONE;
+ break;
+ }
+
+ *consumed = gst_byte_reader_get_pos (&reader);
+ return res;
+}
+
+GstIsoffParserResult
+gst_isoff_qt_sidx_parser_add_buffer (GstSidxParser * parser, GstBuffer * buffer,
+ guint * consumed)
+{
+ GstIsoffParserResult res = GST_ISOFF_QT_PARSER_OK;
+ GstMapInfo info;
+
+ if (!gst_buffer_map (buffer, &info, GST_MAP_READ)) {
+ *consumed = 0;
+ return GST_ISOFF_QT_PARSER_ERROR;
+ }
+
+ res =
+ gst_isoff_qt_sidx_parser_add_data (parser, info.data, info.size,
+ consumed);
+
+ gst_buffer_unmap (buffer, &info);
+ return res;
+}
diff --git a/gst/isomp4/gstisoff.h b/gst/isomp4/gstisoff.h
new file mode 100644
index 0000000000..c6fbf33828
--- /dev/null
+++ b/gst/isomp4/gstisoff.h
@@ -0,0 +1,100 @@
+/*
+ * ISO File Format parsing library
+ *
+ * gstisoff.h
+ *
+ * Copyright (C) 2015 Samsung Electronics. All rights reserved.
+ * Author: Thiago Santos <thiagoss@osg.samsung.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library (COPYING); if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_ISOFF_QT_H__
+#define __GST_ISOFF_QT_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+typedef enum {
+ GST_ISOFF_QT_PARSER_OK,
+ GST_ISOFF_QT_PARSER_DONE,
+ GST_ISOFF_QT_PARSER_UNEXPECTED,
+ GST_ISOFF_QT_PARSER_ERROR
+} GstIsoffParserResult;
+
+/* this is the minimum size, it can be larger if it
+ * uses extended size or type */
+#define GST_ISOFF_QT_FULL_BOX_SIZE 12
+
+#define GST_ISOFF_QT_FOURCC_SIDX GST_MAKE_FOURCC('s','i','d','x')
+typedef struct _GstSidxBoxEntry
+{
+ gboolean ref_type;
+ guint32 size;
+ GstClockTime duration;
+ gboolean starts_with_sap;
+ guint8 sap_type;
+ guint32 sap_delta_time;
+
+ guint64 offset;
+ GstClockTime pts;
+} GstSidxBoxEntry;
+
+typedef struct _GstSidxBox
+{
+ guint8 version;
+ guint32 flags;
+
+ guint32 ref_id;
+ guint32 timescale;
+ guint64 earliest_pts;
+ guint64 first_offset;
+
+ gint entry_index;
+ gint entries_count;
+
+ GstSidxBoxEntry *entries;
+} GstSidxBox;
+
+typedef enum _GstSidxParserStatus
+{
+ GST_ISOFF_QT_SIDX_PARSER_INIT,
+ GST_ISOFF_QT_SIDX_PARSER_HEADER,
+ GST_ISOFF_QT_SIDX_PARSER_DATA,
+ GST_ISOFF_QT_SIDX_PARSER_FINISHED
+} GstSidxParserStatus;
+
+typedef struct _GstSidxParser
+{
+ GstSidxParserStatus status;
+
+ guint64 size;
+ guint64 cumulative_entry_size;
+ guint64 cumulative_pts;
+
+ GstSidxBox sidx;
+} GstSidxParser;
+
+void gst_isoff_qt_sidx_parser_init (GstSidxParser * parser);
+void gst_isoff_qt_sidx_parser_clear (GstSidxParser * parser);
+GstIsoffParserResult gst_isoff_qt_sidx_parser_add_data (GstSidxParser * parser, const guint8 * buffer, gint length, guint * consumed);
+GstIsoffParserResult gst_isoff_qt_sidx_parser_add_buffer (GstSidxParser * parser, GstBuffer * buf, guint * consumed);
+
+G_END_DECLS
+
+#endif /* __GST_ISOFF_QT_H__ */
+
diff --git a/gst/isomp4/gstisomp4element.c b/gst/isomp4/gstisomp4element.c
new file mode 100644
index 0000000000..06a82d2b62
--- /dev/null
+++ b/gst/isomp4/gstisomp4element.c
@@ -0,0 +1,55 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David A. Schleef <ds@schleef.org>
+ * Copyright (C) <2006> Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include "gst/gst-i18n-plugin.h"
+#include <gst/pbutils/pbutils.h>
+
+#include "gstisomp4elements.h"
+#include "qtdemux.h"
+
+void
+isomp4_element_init (GstPlugin * plugin)
+{
+ static gsize res = FALSE;
+
+ if (g_once_init_enter (&res)) {
+#ifdef ENABLE_NLS
+ bindtextdomain (GETTEXT_PACKAGE, LOCALEDIR);
+ bind_textdomain_codeset (GETTEXT_PACKAGE, "UTF-8");
+#endif /* ENABLE_NLS */
+
+ gst_pb_utils_init ();
+
+ /* ensure private tag is registered */
+ gst_tag_register (GST_QT_DEMUX_PRIVATE_TAG, GST_TAG_FLAG_META,
+ GST_TYPE_SAMPLE, "QT atom", "unparsed QT tag atom",
+ gst_tag_merge_use_first);
+
+ gst_tag_register (GST_QT_DEMUX_CLASSIFICATION_TAG, GST_TAG_FLAG_META,
+ G_TYPE_STRING, GST_QT_DEMUX_CLASSIFICATION_TAG,
+ "content classification", gst_tag_merge_use_first);
+
+ g_once_init_leave (&res, TRUE);
+ }
+}
diff --git a/gst/isomp4/gstisomp4elements.h b/gst/isomp4/gstisomp4elements.h
new file mode 100644
index 0000000000..0347e6e3b7
--- /dev/null
+++ b/gst/isomp4/gstisomp4elements.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David A. Schleef <ds@schleef.org>
+ * Copyright (C) <2006> Wim Taymans <wim@fluendo.com>
+ * Copyright (C) 2020 Huawei Technologies Co., Ltd.
+ * @Author: Julian Bouzas <julian.bouzas@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_ISOMP4_ELEMENTS_H__
+#define __GST_ISOMP4_ELEMENTS_H__
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+void isomp4_element_init (GstPlugin * plugin);
+
+GST_ELEMENT_REGISTER_DECLARE (qtdemux);
+GST_ELEMENT_REGISTER_DECLARE (rtpxqtdepay);
+GST_ELEMENT_REGISTER_DECLARE (qtmux);
+GST_ELEMENT_REGISTER_DECLARE (qtmoovrecover);
+
+G_END_DECLS
+
+#endif /* __GST_ISOMP4_ELEMENTS_H__ */
diff --git a/gst/isomp4/gstqtmoovrecover.c b/gst/isomp4/gstqtmoovrecover.c
new file mode 100644
index 0000000000..068030a64c
--- /dev/null
+++ b/gst/isomp4/gstqtmoovrecover.c
@@ -0,0 +1,380 @@
+/* Quicktime muxer plugin for GStreamer
+ * Copyright (C) 2010 Thiago Santos <thiago.sousa.santos@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+
+/**
+ * SECTION:element-qtmoovrecover
+ * @title: qtmoovrecover
+ * @short_description: Utility element for recovering unfinished quicktime files
+ *
+ * This element recovers quicktime files created with qtmux using the moov
+ * recovery feature.
+ *
+ * ## Example pipelines
+ *
+ * |[
+ * TODO
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <glib/gstdio.h>
+#include <gst/gst.h>
+
+#include "gstisomp4elements.h"
+#include "gstqtmoovrecover.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_qt_moov_recover_debug);
+#define GST_CAT_DEFAULT gst_qt_moov_recover_debug
+
+/* QTMoovRecover signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_RECOVERY_INPUT,
+ PROP_BROKEN_INPUT,
+ PROP_FIXED_OUTPUT,
+ PROP_FAST_START_MODE
+};
+
+#define gst_qt_moov_recover_parent_class parent_class
+G_DEFINE_TYPE (GstQTMoovRecover, gst_qt_moov_recover, GST_TYPE_PIPELINE);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (qtmoovrecover, "qtmoovrecover",
+ GST_RANK_NONE, GST_TYPE_QT_MOOV_RECOVER, isomp4_element_init (plugin));
+
+/* property functions */
+static void gst_qt_moov_recover_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_qt_moov_recover_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+static GstStateChangeReturn gst_qt_moov_recover_change_state (GstElement *
+ element, GstStateChange transition);
+
+static void gst_qt_moov_recover_finalize (GObject * object);
+
+static void
+gst_qt_moov_recover_class_init (GstQTMoovRecoverClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->finalize = gst_qt_moov_recover_finalize;
+ gobject_class->get_property = gst_qt_moov_recover_get_property;
+ gobject_class->set_property = gst_qt_moov_recover_set_property;
+
+ gstelement_class->change_state = gst_qt_moov_recover_change_state;
+
+ g_object_class_install_property (gobject_class, PROP_FIXED_OUTPUT,
+ g_param_spec_string ("fixed-output",
+ "Path to write the fixed file",
+ "Path to write the fixed file to (used as output)",
+ NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_BROKEN_INPUT,
+ g_param_spec_string ("broken-input",
+ "Path to broken input file",
+ "Path to broken input file. (If qtmux was on faststart mode, this "
+ "file is the faststart file)", NULL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_RECOVERY_INPUT,
+ g_param_spec_string ("recovery-input",
+ "Path to recovery file",
+ "Path to recovery file (used as input)", NULL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_FAST_START_MODE,
+ g_param_spec_boolean ("faststart-mode",
+ "If the broken input is from faststart mode",
+ "If the broken input is from faststart mode",
+ FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ GST_DEBUG_CATEGORY_INIT (gst_qt_moov_recover_debug, "qtmoovrecover", 0,
+ "QT Moovie Recover");
+
+ gst_element_class_set_static_metadata (gstelement_class, "QT Moov Recover",
+ "Util", "Recovers unfinished qtmux files",
+ "Thiago Santos <thiago.sousa.santos@collabora.co.uk>");
+}
+
+static void
+gst_qt_moov_recover_init (GstQTMoovRecover * qtmr)
+{
+}
+
+static void
+gst_qt_moov_recover_finalize (GObject * object)
+{
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_qt_moov_recover_run (void *data)
+{
+ FILE *moovrec = NULL;
+ FILE *mdatinput = NULL;
+ FILE *output = NULL;
+ MdatRecovFile *mdat_recov = NULL;
+ MoovRecovFile *moov_recov = NULL;
+ GstQTMoovRecover *qtmr = GST_QT_MOOV_RECOVER_CAST (data);
+ GError *err = NULL;
+ GError *warn = NULL;
+
+ GST_LOG_OBJECT (qtmr, "Starting task");
+
+ GST_DEBUG_OBJECT (qtmr, "Validating properties");
+ GST_OBJECT_LOCK (qtmr);
+ /* validate properties */
+ if (qtmr->broken_input == NULL) {
+ GST_OBJECT_UNLOCK (qtmr);
+ GST_ELEMENT_ERROR (qtmr, RESOURCE, SETTINGS,
+ ("Please set broken-input property"), (NULL));
+ goto end;
+ }
+ if (qtmr->recovery_input == NULL) {
+ GST_OBJECT_UNLOCK (qtmr);
+ GST_ELEMENT_ERROR (qtmr, RESOURCE, SETTINGS,
+ ("Please set recovery-input property"), (NULL));
+ goto end;
+ }
+ if (qtmr->fixed_output == NULL) {
+ GST_OBJECT_UNLOCK (qtmr);
+ GST_ELEMENT_ERROR (qtmr, RESOURCE, SETTINGS,
+ ("Please set fixed-output property"), (NULL));
+ goto end;
+ }
+
+ GST_DEBUG_OBJECT (qtmr, "Opening input/output files");
+ /* open files */
+ moovrec = g_fopen (qtmr->recovery_input, "rb");
+ if (moovrec == NULL) {
+ GST_OBJECT_UNLOCK (qtmr);
+ GST_ELEMENT_ERROR (qtmr, RESOURCE, OPEN_READ,
+ ("Failed to open recovery-input file"), (NULL));
+ goto end;
+ }
+
+ mdatinput = g_fopen (qtmr->broken_input, "rb");
+ if (mdatinput == NULL) {
+ GST_OBJECT_UNLOCK (qtmr);
+ GST_ELEMENT_ERROR (qtmr, RESOURCE, OPEN_READ,
+ ("Failed to open broken-input file"), (NULL));
+ goto end;
+ }
+ output = g_fopen (qtmr->fixed_output, "wb+");
+ if (output == NULL) {
+ GST_OBJECT_UNLOCK (qtmr);
+ GST_ELEMENT_ERROR (qtmr, RESOURCE, OPEN_READ_WRITE,
+ ("Failed to open fixed-output file"), (NULL));
+ goto end;
+ }
+ GST_OBJECT_UNLOCK (qtmr);
+
+ GST_DEBUG_OBJECT (qtmr, "Parsing input files");
+ /* now create our structures */
+ mdat_recov = mdat_recov_file_create (mdatinput, qtmr->faststart_mode, &err);
+ mdatinput = NULL;
+ if (mdat_recov == NULL) {
+ GST_ELEMENT_ERROR (qtmr, RESOURCE, FAILED,
+ ("Broken file could not be parsed correctly"), (NULL));
+ goto end;
+ }
+ moov_recov = moov_recov_file_create (moovrec, &err);
+ moovrec = NULL;
+ if (moov_recov == NULL) {
+ GST_ELEMENT_ERROR (qtmr, RESOURCE, FAILED,
+ ("Recovery file could not be parsed correctly"), (NULL));
+ goto end;
+ }
+
+ /* now parse the buffers data from moovrec */
+ if (!moov_recov_parse_buffers (moov_recov, mdat_recov, &err)) {
+ goto end;
+ }
+
+ GST_DEBUG_OBJECT (qtmr, "Writing fixed file to output");
+ if (!moov_recov_write_file (moov_recov, mdat_recov, output, &err, &warn)) {
+ goto end;
+ }
+
+ if (warn) {
+ GST_ELEMENT_WARNING (qtmr, RESOURCE, FAILED, ("%s", warn->message), (NULL));
+ g_error_free (warn);
+ }
+
+ /* here means success */
+ GST_DEBUG_OBJECT (qtmr, "Finished successfully, posting EOS");
+ gst_element_post_message (GST_ELEMENT_CAST (qtmr),
+ gst_message_new_eos (GST_OBJECT_CAST (qtmr)));
+
+end:
+ GST_LOG_OBJECT (qtmr, "Finalizing task");
+ if (err) {
+ GST_ELEMENT_ERROR (qtmr, RESOURCE, FAILED, ("%s", err->message), (NULL));
+ g_error_free (err);
+ }
+
+ if (moov_recov)
+ moov_recov_file_free (moov_recov);
+ if (moovrec)
+ fclose (moovrec);
+
+ if (mdat_recov)
+ mdat_recov_file_free (mdat_recov);
+ if (mdatinput)
+ fclose (mdatinput);
+
+ if (output)
+ fclose (output);
+ GST_LOG_OBJECT (qtmr, "Leaving task");
+ gst_task_stop (qtmr->task);
+}
+
+static void
+gst_qt_moov_recover_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstQTMoovRecover *qtmr = GST_QT_MOOV_RECOVER_CAST (object);
+
+ GST_OBJECT_LOCK (qtmr);
+ switch (prop_id) {
+ case PROP_FAST_START_MODE:
+ g_value_set_boolean (value, qtmr->faststart_mode);
+ break;
+ case PROP_BROKEN_INPUT:
+ g_value_set_string (value, qtmr->broken_input);
+ break;
+ case PROP_RECOVERY_INPUT:
+ g_value_set_string (value, qtmr->recovery_input);
+ break;
+ case PROP_FIXED_OUTPUT:
+ g_value_set_string (value, qtmr->fixed_output);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (qtmr);
+}
+
+static void
+gst_qt_moov_recover_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstQTMoovRecover *qtmr = GST_QT_MOOV_RECOVER_CAST (object);
+
+ GST_OBJECT_LOCK (qtmr);
+ switch (prop_id) {
+ case PROP_FAST_START_MODE:
+ qtmr->faststart_mode = g_value_get_boolean (value);
+ break;
+ case PROP_BROKEN_INPUT:
+ g_free (qtmr->broken_input);
+ qtmr->broken_input = g_value_dup_string (value);
+ break;
+ case PROP_RECOVERY_INPUT:
+ g_free (qtmr->recovery_input);
+ qtmr->recovery_input = g_value_dup_string (value);
+ break;
+ case PROP_FIXED_OUTPUT:
+ g_free (qtmr->fixed_output);
+ qtmr->fixed_output = g_value_dup_string (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (qtmr);
+}
+
+static GstStateChangeReturn
+gst_qt_moov_recover_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstQTMoovRecover *qtmr = GST_QT_MOOV_RECOVER_CAST (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ qtmr->task = gst_task_new (gst_qt_moov_recover_run, qtmr, NULL);
+ g_rec_mutex_init (&qtmr->task_mutex);
+ gst_task_set_lock (qtmr->task, &qtmr->task_mutex);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ gst_task_start (qtmr->task);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ gst_task_stop (qtmr->task);
+ gst_task_join (qtmr->task);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ if (gst_task_get_state (qtmr->task) != GST_TASK_STOPPED)
+ GST_ERROR ("task %p should be stopped by now", qtmr->task);
+ gst_object_unref (qtmr->task);
+ qtmr->task = NULL;
+ g_rec_mutex_clear (&qtmr->task_mutex);
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/isomp4/gstqtmoovrecover.h b/gst/isomp4/gstqtmoovrecover.h
new file mode 100644
index 0000000000..b86c2233ef
--- /dev/null
+++ b/gst/isomp4/gstqtmoovrecover.h
@@ -0,0 +1,88 @@
+/* Quicktime muxer plugin for GStreamer
+ * Copyright (C) 2010 Thiago Santos <thiago.sousa.santos@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#ifndef __GST_QT_MOOV_RECOVER_H__
+#define __GST_QT_MOOV_RECOVER_H__
+
+#include <gst/gst.h>
+
+#include "atoms.h"
+#include "atomsrecovery.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_QT_MOOV_RECOVER (gst_qt_moov_recover_get_type())
+#define GST_QT_MOOV_RECOVER(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_QT_MOOV_RECOVER, GstQTMoovRecover))
+#define GST_QT_MOOV_RECOVER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_QT_MOOV_RECOVER, GstQTMoovRecover))
+#define GST_IS_QT_MOOV_RECOVER(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_QT_MOOV_RECOVER))
+#define GST_IS_QT_MOOV_RECOVER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_QT_MOOV_RECOVER))
+#define GST_QT_MOOV_RECOVER_CAST(obj) ((GstQTMoovRecover*)(obj))
+
+
+typedef struct _GstQTMoovRecover GstQTMoovRecover;
+typedef struct _GstQTMoovRecoverClass GstQTMoovRecoverClass;
+
+struct _GstQTMoovRecover
+{
+ GstPipeline pipeline;
+
+ GstTask *task;
+ GRecMutex task_mutex;
+
+ /* properties */
+ gboolean faststart_mode;
+ gchar *recovery_input;
+ gchar *fixed_output;
+ gchar *broken_input;
+};
+
+struct _GstQTMoovRecoverClass
+{
+ GstPipelineClass parent_class;
+};
+
+GType gst_qt_moov_recover_get_type (void);
+gboolean gst_qt_moov_recover_register (GstPlugin * plugin);
+
+G_END_DECLS
+
+#endif /* __GST_QT_MOOV_RECOVER_H__ */
diff --git a/gst/isomp4/gstqtmux-doc.c b/gst/isomp4/gstqtmux-doc.c
new file mode 100644
index 0000000000..bc9b3e81e9
--- /dev/null
+++ b/gst/isomp4/gstqtmux-doc.c
@@ -0,0 +1,318 @@
+/* Quicktime muxer documentation
+ * Copyright (C) 2008-2010 Thiago Santos <thiagoss@embedded.ufcg.edu.br>
+ * Copyright (C) 2008 Mark Nauwelaerts <mnauw@users.sf.net>
+ * Copyright (C) 2010 Nokia Corporation. All rights reserved.
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+/* ============================= qtmux ==================================== */
+
+/**
+ * SECTION:element-qtmux
+ * @title: qtmux
+ * @short_description: Muxer for quicktime(.mov) files
+ *
+ * This element merges streams (audio and video) into QuickTime(.mov) files.
+ *
+ * The following background intends to explain why various similar muxers
+ * are present in this plugin.
+ *
+ * The [QuickTime file format specification](http://www.apple.com/quicktime/resources/qtfileformat.pdf)
+ * served as basis for the MP4 file format specification (mp4mux), and as such
+ * the QuickTime file structure is nearly identical to the so-called ISO Base
+ * Media file format defined in ISO 14496-12 (except for some media specific
+ * parts).
+ *
+ * In turn, the latter ISO Base Media format was further specialized as a
+ * Motion JPEG-2000 file format in ISO 15444-3 (mj2mux)
+ * and in various 3GPP(2) specs (gppmux).
+ * The fragmented file features defined (only) in ISO Base Media are used by
+ * ISMV files making up (a.o.) Smooth Streaming (ismlmux).
+ *
+ * A few properties (#GstBaseQTMux:movie-timescale, #GstBaseQTMux:trak-timescale,
+ * #GstQTMuxPad:trak-timescale) allow adjusting some technical parameters,
+ * which might be useful in (rare) cases to resolve compatibility issues in
+ * some situations.
+ *
+ * Some other properties influence the result more fundamentally.
+ * A typical mov/mp4 file's metadata (aka moov) is located at the end of the
+ * file, somewhat contrary to this usually being called "the header".
+ * However, a #GstBaseQTMux:faststart file will (with some effort) arrange this to
+ * be located near start of the file, which then allows it e.g. to be played
+ * while downloading. Alternatively, rather than having one chunk of metadata at
+ * start (or end), there can be some metadata at start and most of the other
+ * data can be spread out into fragments of #GstBaseQTMux:fragment-duration.
+ * If such fragmented layout is intended for streaming purposes, then
+ * #GstQTMux:streamable allows foregoing to add index metadata (at the end of
+ * file).
+ *
+ * When the maximum duration to be recorded can be known in advance, #GstQTMux
+ * also supports a 'Robust Muxing' mode. In robust muxing mode, space for the
+ * headers are reserved at the start of muxing, and rewritten at a configurable
+ * interval, so that the output file is always playable, even if the recording
+ * is interrupted uncleanly by a crash. Robust muxing mode requires a seekable
+ * output, such as filesink, because it needs to rewrite the start of the file.
+ *
+ * To enable robust muxing mode, set the #GstBaseQTMux:reserved-moov-update-period
+ * and #GstBaseQTMux:reserved-max-duration property. Also present is the
+ * #GstBaseQTMux:reserved-bytes-per-sec property, which can be increased if
+ * for some reason the default is not large enough and the initial reserved
+ * space for headers is too small. Applications can monitor the
+ * #GstBaseQTMux:reserved-duration-remaining property to see how close to full
+ * the reserved space is becoming.
+ *
+ * Applications that wish to be able to use/edit a file while it is being
+ * written to by live content, can use the "Robust Prefill Muxing" mode. That
+ * mode is a variant of the "Robust Muxing" mode in that it will pre-allocate a
+ * completely valid header from the start for all tracks (i.e. it appears as
+ * though the file is "reserved-max-duration" long with all samples
+ * present). This mode can be enabled by setting the
+ * #GstBaseQTMux:reserved-moov-update-period and #GstBaseQTMux:reserved-prefill
+ * properties. Note that this mode is only possible with input streams that have
+ * a fixed sample size (such as raw audio and Prores Video) and that don't
+ * have reordered samples.
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 v4l2src num-buffers=500 ! video/x-raw,width=320,height=240 ! videoconvert ! qtmux ! filesink location=video.mov
+ * ]|
+ * Records a video stream captured from a v4l2 device and muxes it into a qt file.
+ *
+ */
+
+/* ============================= mp4mux ==================================== */
+
+/**
+ * SECTION:element-mp4mux
+ * @title: mp4mux
+ * @short_description: Muxer for ISO MPEG-4 (.mp4) files
+ *
+ * This element merges streams (audio and video) into ISO MPEG-4 (.mp4) files.
+ *
+ * The following background intends to explain why various similar muxers
+ * are present in this plugin.
+ *
+ * The [QuickTime file format specification](http://www.apple.com/quicktime/resources/qtfileformat.pdf)
+ * served as basis for the MP4 file format specification (mp4mux), and as such
+ * the QuickTime file structure is nearly identical to the so-called ISO Base
+ * Media file format defined in ISO 14496-12 (except for some media specific
+ * parts).
+ *
+ * In turn, the latter ISO Base Media format was further specialized as a
+ * Motion JPEG-2000 file format in ISO 15444-3 (mj2mux)
+ * and in various 3GPP(2) specs (3gppmux).
+ * The fragmented file features defined (only) in ISO Base Media are used by
+ * ISMV files making up (a.o.) Smooth Streaming (ismlmux).
+ *
+ * A few properties (#GstBaseQTMux:movie-timescale, #GstBaseQTMux:trak-timescale)
+ * allow adjusting some technical parameters, which might be useful in (rare)
+ * cases to resolve compatibility issues in some situations.
+ *
+ * Some other properties influence the result more fundamentally.
+ * A typical mov/mp4 file's metadata (aka moov) is located at the end of the
+ * file, somewhat contrary to this usually being called "the header".
+ * However, a #GstBaseQTMux:faststart file will (with some effort) arrange this to
+ * be located near start of the file, which then allows it e.g. to be played
+ * while downloading. Alternatively, rather than having one chunk of metadata at
+ * start (or end), there can be some metadata at start and most of the other
+ * data can be spread out into fragments of #GstBaseQTMux:fragment-duration.
+ * If such fragmented layout is intended for streaming purposes, then
+ * #GstMP4Mux:streamable allows foregoing to add index metadata (at the end of
+ * file).
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 gst-launch-1.0 v4l2src num-buffers=50 ! queue ! x264enc ! mp4mux ! filesink location=video.mp4
+ * ]|
+ * Records a video stream captured from a v4l2 device, encodes it into H.264
+ * and muxes it into an mp4 file.
+ *
+ */
+
+/* ============================= 3gppmux ==================================== */
+
+/**
+ * SECTION:element-3gppmux
+ * @title: 3gppmux
+ * @short_description: Muxer for 3GPP (.3gp) files
+ *
+ * This element merges streams (audio and video) into 3GPP (.3gp) files.
+ *
+ * The following background intends to explain why various similar muxers
+ * are present in this plugin.
+ *
+ * The [QuickTime file format specification](http://www.apple.com/quicktime/resources/qtfileformat.pdf)
+ * served as basis for the MP4 file format specification (mp4mux), and as such
+ * the QuickTime file structure is nearly identical to the so-called ISO Base
+ * Media file format defined in ISO 14496-12 (except for some media specific
+ * parts).
+ *
+ * In turn, the latter ISO Base Media format was further specialized as a
+ * Motion JPEG-2000 file format in ISO 15444-3 (mj2mux)
+ * and in various 3GPP(2) specs (3gppmux).
+ * The fragmented file features defined (only) in ISO Base Media are used by
+ * ISMV files making up (a.o.) Smooth Streaming (ismlmux).
+ *
+ * A few properties (#GstBaseQTMux:movie-timescale, #GstBaseQTMux:trak-timescale)
+ * allow adjusting some technical parameters, which might be useful in (rare)
+ * cases to resolve compatibility issues in some situations.
+ *
+ * Some other properties influence the result more fundamentally.
+ * A typical mov/mp4 file's metadata (aka moov) is located at the end of the file,
+ * somewhat contrary to this usually being called "the header". However, a
+ * #GstBaseQTMux:faststart file will (with some effort) arrange this to be located
+ * near start of the file, which then allows it e.g. to be played while
+ * downloading. Alternatively, rather than having one chunk of metadata at start
+ * (or end), there can be some metadata at start and most of the other data can
+ * be spread out into fragments of #GstBaseQTMux:fragment-duration. If such
+ * fragmented layout is intended for streaming purposes, then
+ * #Gst3GPPMux:streamable allows foregoing to add index metadata (at the end of
+ * file).
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 v4l2src num-buffers=50 ! queue ! ffenc_h263 ! 3gppmux ! filesink location=video.3gp
+ * ]|
+ * Records a video stream captured from a v4l2 device, encodes it into H.263
+ * and muxes it into an 3gp file.
+ *
+ * Documentation last reviewed on 2011-04-21
+ */
+
+/* ============================= mj2pmux ==================================== */
+
+/**
+ * SECTION:element-mj2mux
+ * @title: mj2mux
+ * @short_description: Muxer for Motion JPEG-2000 (.mj2) files
+ *
+ * This element merges streams (audio and video) into MJ2 (.mj2) files.
+ *
+ * The following background intends to explain why various similar muxers
+ * are present in this plugin.
+ *
+ * The [QuickTime file format specification](http://www.apple.com/quicktime/resources/qtfileformat.pdf)
+ * served as basis for the MP4 file format specification (mp4mux), and as such
+ * the QuickTime file structure is nearly identical to the so-called ISO Base
+ * Media file format defined in ISO 14496-12 (except for some media specific
+ * parts).
+ *
+ * In turn, the latter ISO Base Media format was further specialized as a
+ * Motion JPEG-2000 file format in ISO 15444-3 (mj2mux)
+ * and in various 3GPP(2) specs (3gppmux).
+ * The fragmented file features defined (only) in ISO Base Media are used by
+ * ISMV files making up (a.o.) Smooth Streaming (ismlmux).
+ *
+ * A few properties (#GstBaseQTMux:movie-timescale, #GstBaseQTMux:trak-timescale)
+ * allow adjusting some technical parameters, which might be useful in (rare)
+ * cases to resolve compatibility issues in some situations.
+ *
+ * Some other properties influence the result more fundamentally.
+ * A typical mov/mp4 file's metadata (aka moov) is located at the end of the file,
+ * somewhat contrary to this usually being called "the header". However, a
+ * #GstBaseQTMux:faststart file will (with some effort) arrange this to be located
+ * near start of the file, which then allows it e.g. to be played while
+ * downloading. Alternatively, rather than having one chunk of metadata at start
+ * (or end), there can be some metadata at start and most of the other data can
+ * be spread out into fragments of #GstBaseQTMux:fragment-duration. If such
+ * fragmented layout is intended for streaming purposes, then
+ * #GstMJ2Mux:streamable allows foregoing to add index metadata (at the end of
+ * file).
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 v4l2src num-buffers=50 ! queue ! jp2kenc ! mj2mux ! filesink location=video.mj2
+ * ]|
+ * Records a video stream captured from a v4l2 device, encodes it into JPEG-2000
+ * and muxes it into an mj2 file.
+ *
+ * Documentation last reviewed on 2011-04-21
+ */
+
+/* ============================= ismlmux ==================================== */
+
+/**
+ * SECTION:element-ismlmux
+ * @title: ismlmux
+ * @short_description: Muxer for ISML smooth streaming (.isml) files
+ *
+ * This element merges streams (audio and video) into MJ2 (.mj2) files.
+ *
+ * The following background intends to explain why various similar muxers
+ * are present in this plugin.
+ *
+ * The [QuickTime file format specification](http://www.apple.com/quicktime/resources/qtfileformat.pdf)
+ * served as basis for the MP4 file format specification (mp4mux), and as such
+ * the QuickTime file structure is nearly identical to the so-called ISO Base
+ * Media file format defined in ISO 14496-12 (except for some media specific
+ * parts).
+ *
+ * In turn, the latter ISO Base Media format was further specialized as a
+ * Motion JPEG-2000 file format in ISO 15444-3 (mj2mux)
+ * and in various 3GPP(2) specs (3gppmux).
+ * The fragmented file features defined (only) in ISO Base Media are used by
+ * ISMV files making up (a.o.) Smooth Streaming (ismlmux).
+ *
+ * A few properties (#GstBaseQTMux:movie-timescale, #GstBaseQTMux:trak-timescale)
+ * allow adjusting some technical parameters, which might be useful in (rare)
+ * cases to resolve compatibility issues in some situations.
+ *
+ * Some other properties influence the result more fundamentally.
+ * A typical mov/mp4 file's metadata (aka moov) is located at the end of the file,
+ * somewhat contrary to this usually being called "the header". However, a
+ * #GstBaseQTMux:faststart file will (with some effort) arrange this to be located
+ * near start of the file, which then allows it e.g. to be played while
+ * downloading. Alternatively, rather than having one chunk of metadata at start
+ * (or end), there can be some metadata at start and most of the other data can
+ * be spread out into fragments of #GstBaseQTMux:fragment-duration. If such
+ * fragmented layout is intended for streaming purposes, then
+ * #GstISMLMux:streamable allows foregoing to add index metadata (at the end of
+ * file).
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 v4l2src num-buffers=50 ! queue ! jp2kenc ! mj2mux ! filesink location=video.mj2
+ * ]|
+ * Records a video stream captured from a v4l2 device, encodes it into JPEG-2000
+ * and muxes it into an mj2 file.
+ *
+ * Documentation last reviewed on 2011-04-21
+ */
diff --git a/gst/isomp4/gstqtmux-doc.h b/gst/isomp4/gstqtmux-doc.h
new file mode 100644
index 0000000000..c6f8baab55
--- /dev/null
+++ b/gst/isomp4/gstqtmux-doc.h
@@ -0,0 +1,53 @@
+/* Quicktime muxer documentation
+ * Copyright (C) 2008-2010 Thiago Santos <thiagoss@embedded.ufcg.edu.br>
+ * Copyright (C) 2008 Mark Nauwelaerts <mnauw@users.sf.net>
+ * Copyright (C) 2010 Nokia Corporation. All rights reserved.
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#error "This header is for gtk-doc only and not supposed to be included"
+
+typedef struct _GstQTMuxElement GstQTMuxElement;
+typedef struct _GstMP4Mux GstMP4Mux;
+typedef struct _Gst3GPPMux Gst3GPPMux;
+typedef struct _GstISMLMux GstISMLMux;
+typedef struct _GstMJ2Mux GstMJ2Mux;
+
diff --git a/gst/isomp4/gstqtmux.c b/gst/isomp4/gstqtmux.c
new file mode 100644
index 0000000000..999621aa8b
--- /dev/null
+++ b/gst/isomp4/gstqtmux.c
@@ -0,0 +1,7509 @@
+/* Quicktime muxer plugin for GStreamer
+ * Copyright (C) 2008-2010 Thiago Santos <thiagoss@embedded.ufcg.edu.br>
+ * Copyright (C) 2008 Mark Nauwelaerts <mnauw@users.sf.net>
+ * Copyright (C) 2010 Nokia Corporation. All rights reserved.
+ * Copyright (C) 2014 Jan Schmidt <jan@centricular.com>
+ * Contact: Stefan Kost <stefan.kost@nokia.com>
+
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+
+/**
+ * SECTION:GstQTMux
+ * @title: GstQTMux
+ * @short_description: Muxer for ISO MP4-based files
+ */
+
+/*
+ * Based on avimux
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <glib/gstdio.h>
+
+#include <gst/gst.h>
+#include <gst/base/gstbytereader.h>
+#include <gst/base/gstbitreader.h>
+#include <gst/audio/audio.h>
+#include <gst/video/video.h>
+#include <gst/tag/tag.h>
+#include <gst/pbutils/pbutils.h>
+
+#include <sys/types.h>
+#ifdef G_OS_WIN32
+#include <io.h> /* lseek, open, close, read */
+#undef lseek
+#define lseek _lseeki64
+#undef off_t
+#define off_t guint64
+#endif
+
+#ifdef _MSC_VER
+#define ftruncate g_win32_ftruncate
+#endif
+
+#ifdef HAVE_UNISTD_H
+# include <unistd.h>
+#endif
+
+#include "gstisomp4elements.h"
+#include "gstqtmux.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_qt_mux_debug);
+#define GST_CAT_DEFAULT gst_qt_mux_debug
+
+#ifndef ABSDIFF
+#define ABSDIFF(a, b) ((a) > (b) ? (a) - (b) : (b) - (a))
+#endif
+
+/* Hacker notes.
+ *
+ * The basic building blocks of MP4 files are:
+ * - an 'ftyp' box at the very start
+ * - an 'mdat' box which contains the raw audio/video/subtitle data;
+ * this is just a bunch of bytes, completely unframed and possibly
+ * unordered with no additional meta-information
+ * - a 'moov' box that contains information about the different streams
+ * and what they contain, as well as sample tables for each stream
+ * that tell the demuxer where in the mdat box each buffer/sample is
+ * and what its duration/timestamp etc. is, and whether it's a
+ * keyframe etc.
+ * Additionally, fragmented MP4 works by writing chunks of data in
+ * pairs of 'moof' and 'mdat' boxes:
+ * - 'moof' boxes, header preceding each mdat fragment describing the
+ * contents, like a moov but only for that fragment.
+ * - a 'mfra' box for Fragmented MP4, which is written at the end and
+ * contains a summary of all fragments and seek tables.
+ *
+ * Currently mp4mux can work in 4 different modes / generate 4 types
+ * of output files/streams:
+ *
+ * - Normal mp4: mp4mux will write a little ftyp identifier at the
+ * beginning, then start an mdat box into which it will write all the
+ * sample data. At EOS it will then write the moov header with track
+ * headers and sample tables at the end of the file, and rewrite the
+ * start of the file to fix up the mdat box size at the beginning.
+ * It has to wait for EOS to write the moov (which includes the
+ * sample tables) because it doesn't know how much space those
+ * tables will be. The output downstream must be seekable to rewrite
+ * the mdat box at EOS.
+ *
+ * - Fragmented mp4: moov header with track headers at start
+ * but no sample table, followed by N fragments, each containing
+ * track headers with sample tables followed by some data. Downstream
+ * does not need to be seekable if the 'streamable' flag is TRUE,
+ * as the final mfra and total duration will be omitted.
+ *
+ * - Fast-start mp4: the goal here is to create a file where the moov
+ * headers are at the beginning; what mp4mux will do is write all
+ * sample data into a temp file and build moov header plus sample
+ * tables in memory and then when EOS comes, it will push out the
+ * moov header plus sample tables at the beginning, followed by the
+ * mdat sample data at the end which is read in from the temp file
+ * Files created in this mode are better for streaming over the
+ * network, since the client doesn't have to seek to the end of the
+ * file to get the headers, but it requires copying all sample data
+ * out of the temp file at EOS, which can be expensive. Downstream does
+ * not need to be seekable, because of the use of the temp file.
+ *
+ * - Robust Muxing mode: In this mode, qtmux uses the reserved-max-duration
+ * and reserved-moov-update-period properties to reserve free space
+ * at the start of the file and periodically write the MOOV atom out
+ * to it. That means that killing the muxing at any point still
+ * results in a playable file, at the cost of wasting some amount of
+ * free space at the start of file. The approximate recording duration
+ * has to be known in advance to estimate how much free space to reserve
+ * for the moov, and the downstream must be seekable.
+ * If the moov header grows larger than the reserved space, an error
+ * is generated - so it's better to over-estimate the amount of space
+ * to reserve. To ensure the file is playable at any point, the moov
+ * is updated using a 'ping-pong' strategy, so the output is never in
+ * an invalid state.
+ */
+
+#ifndef GST_REMOVE_DEPRECATED
+enum
+{
+ DTS_METHOD_DD,
+ DTS_METHOD_REORDER,
+ DTS_METHOD_ASC
+};
+
+static GType
+gst_qt_mux_dts_method_get_type (void)
+{
+ static GType gst_qt_mux_dts_method = 0;
+
+ if (!gst_qt_mux_dts_method) {
+ static const GEnumValue dts_methods[] = {
+ {DTS_METHOD_DD, "delta/duration", "dd"},
+ {DTS_METHOD_REORDER, "reorder", "reorder"},
+ {DTS_METHOD_ASC, "ascending", "asc"},
+ {0, NULL, NULL},
+ };
+
+ gst_qt_mux_dts_method =
+ g_enum_register_static ("GstQTMuxDtsMethods", dts_methods);
+ }
+
+ return gst_qt_mux_dts_method;
+}
+
+#define GST_TYPE_QT_MUX_DTS_METHOD \
+ (gst_qt_mux_dts_method_get_type ())
+#endif
+
+static GType
+gst_qt_mux_fragment_mode_get_type (void)
+{
+ static GType gst_qt_mux_fragment_mode = 0;
+
+ if (!gst_qt_mux_fragment_mode) {
+ static const GEnumValue gst_qt_mux_fragment_modes[] = {
+ {GST_QT_MUX_FRAGMENT_DASH_OR_MSS, "Dash or Smoothstreaming",
+ "dash-or-mss"},
+ {GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE,
+ "First MOOV Fragment Then Finalise", "first-moov-then-finalise"},
+ /* internal only */
+ /* {GST_QT_MUX_FRAGMENT_STREAMABLE, "streamable", "Streamable (ISML only. Deprecated elsewhere)"}, */
+ {0, NULL, NULL},
+ };
+
+ gst_qt_mux_fragment_mode =
+ g_enum_register_static ("GstQTMuxFragmentMode",
+ gst_qt_mux_fragment_modes);
+ }
+
+ return gst_qt_mux_fragment_mode;
+}
+
+#define GST_TYPE_QT_MUX_FRAGMENT_MODE \
+ (gst_qt_mux_fragment_mode_get_type ())
+
+enum
+{
+ PROP_PAD_0,
+ PROP_PAD_TRAK_TIMESCALE,
+};
+
+#define DEFAULT_PAD_TRAK_TIMESCALE 0
+
+G_DEFINE_TYPE (GstQTMuxPad, gst_qt_mux_pad, GST_TYPE_AGGREGATOR_PAD);
+
+static void
+gst_qt_mux_pad_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstQTMuxPad *pad = GST_QT_MUX_PAD_CAST (object);
+
+ GST_OBJECT_LOCK (pad);
+ switch (prop_id) {
+ case PROP_PAD_TRAK_TIMESCALE:
+ pad->trak_timescale = g_value_get_uint (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (pad);
+}
+
+static void
+gst_qt_mux_pad_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstQTMuxPad *pad = GST_QT_MUX_PAD_CAST (object);
+
+ GST_OBJECT_LOCK (pad);
+ switch (prop_id) {
+ case PROP_PAD_TRAK_TIMESCALE:
+ g_value_set_uint (value, pad->trak_timescale);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (pad);
+}
+
+static void
+gst_qt_mux_pad_class_init (GstQTMuxPadClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ gobject_class->get_property = gst_qt_mux_pad_get_property;
+ gobject_class->set_property = gst_qt_mux_pad_set_property;
+
+ g_object_class_install_property (gobject_class, PROP_PAD_TRAK_TIMESCALE,
+ g_param_spec_uint ("trak-timescale", "Track timescale",
+ "Timescale to use for this pad's trak (units per second, 0 is automatic)",
+ 0, G_MAXUINT32, DEFAULT_PAD_TRAK_TIMESCALE,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+}
+
+static void
+gst_qt_mux_pad_init (GstQTMuxPad * pad)
+{
+ pad->trak_timescale = DEFAULT_PAD_TRAK_TIMESCALE;
+}
+
+static guint32
+gst_qt_mux_pad_get_timescale (GstQTMuxPad * pad)
+{
+ guint32 timescale;
+
+ GST_OBJECT_LOCK (pad);
+ timescale = pad->trak_timescale;
+ GST_OBJECT_UNLOCK (pad);
+
+ return timescale;
+}
+
+/* QTMux signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_MOVIE_TIMESCALE,
+ PROP_TRAK_TIMESCALE,
+ PROP_FAST_START,
+ PROP_FAST_START_TEMP_FILE,
+ PROP_MOOV_RECOV_FILE,
+ PROP_FRAGMENT_DURATION,
+ PROP_RESERVED_MAX_DURATION,
+ PROP_RESERVED_DURATION_REMAINING,
+ PROP_RESERVED_MOOV_UPDATE_PERIOD,
+ PROP_RESERVED_BYTES_PER_SEC,
+ PROP_RESERVED_PREFILL,
+#ifndef GST_REMOVE_DEPRECATED
+ PROP_DTS_METHOD,
+#endif
+ PROP_DO_CTTS,
+ PROP_INTERLEAVE_BYTES,
+ PROP_INTERLEAVE_TIME,
+ PROP_FORCE_CHUNKS,
+ PROP_MAX_RAW_AUDIO_DRIFT,
+ PROP_START_GAP_THRESHOLD,
+ PROP_FORCE_CREATE_TIMECODE_TRAK,
+ PROP_FRAGMENT_MODE,
+};
+
+/* some spare for header size as well */
+#define MDAT_LARGE_FILE_LIMIT ((guint64) 1024 * 1024 * 1024 * 2)
+
+#define DEFAULT_MOVIE_TIMESCALE 0
+#define DEFAULT_TRAK_TIMESCALE 0
+#define DEFAULT_DO_CTTS TRUE
+#define DEFAULT_FAST_START FALSE
+#define DEFAULT_FAST_START_TEMP_FILE NULL
+#define DEFAULT_MOOV_RECOV_FILE NULL
+#define DEFAULT_FRAGMENT_DURATION 0
+#define DEFAULT_STREAMABLE TRUE
+#ifndef GST_REMOVE_DEPRECATED
+#define DEFAULT_DTS_METHOD DTS_METHOD_REORDER
+#endif
+#define DEFAULT_RESERVED_MAX_DURATION GST_CLOCK_TIME_NONE
+#define DEFAULT_RESERVED_MOOV_UPDATE_PERIOD GST_CLOCK_TIME_NONE
+#define DEFAULT_RESERVED_BYTES_PER_SEC_PER_TRAK 550
+#define DEFAULT_RESERVED_PREFILL FALSE
+#define DEFAULT_INTERLEAVE_BYTES 0
+#define DEFAULT_INTERLEAVE_TIME 250*GST_MSECOND
+#define DEFAULT_FORCE_CHUNKS (FALSE)
+#define DEFAULT_MAX_RAW_AUDIO_DRIFT 40 * GST_MSECOND
+#define DEFAULT_START_GAP_THRESHOLD 0
+#define DEFAULT_FORCE_CREATE_TIMECODE_TRAK FALSE
+#define DEFAULT_FRAGMENT_MODE GST_QT_MUX_FRAGMENT_DASH_OR_MSS
+
+static void gst_qt_mux_finalize (GObject * object);
+
+/* property functions */
+static void gst_qt_mux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_qt_mux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+/* pad functions */
+static GstPad *gst_qt_mux_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
+static void gst_qt_mux_release_pad (GstElement * element, GstPad * pad);
+
+/* event */
+static gboolean gst_qt_mux_sink_event (GstAggregator * agg,
+ GstAggregatorPad * agg_pad, GstEvent * event);
+static GstFlowReturn gst_qt_mux_sink_event_pre_queue (GstAggregator * self,
+ GstAggregatorPad * aggpad, GstEvent * event);
+
+/* aggregator */
+static GstAggregatorPad *gst_qt_mux_create_new_pad (GstAggregator * self,
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps);
+static GstFlowReturn gst_qt_mux_aggregate (GstAggregator * agg,
+ gboolean timeout);
+static GstBuffer *gst_qt_mux_clip_running_time (GstAggregator * agg,
+ GstAggregatorPad * agg_pad, GstBuffer * buf);
+static gboolean gst_qt_mux_start (GstAggregator * agg);
+static gboolean gst_qt_mux_stop (GstAggregator * agg);
+
+/* internal */
+
+static GstFlowReturn gst_qt_mux_add_buffer (GstQTMux * qtmux, GstQTMuxPad * pad,
+ GstBuffer * buf);
+
+static GstFlowReturn
+gst_qt_mux_robust_recording_rewrite_moov (GstQTMux * qtmux);
+
+static void gst_qt_mux_update_global_statistics (GstQTMux * qtmux);
+static void gst_qt_mux_update_edit_lists (GstQTMux * qtmux);
+
+static GstFlowReturn gst_qtmux_push_mdat_stored_buffers (GstQTMux * qtmux);
+
+static GstElementClass *parent_class = NULL;
+
+static void
+gst_qt_mux_base_init (gpointer g_class)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+ GstQTMuxClass *klass = (GstQTMuxClass *) g_class;
+ GstQTMuxClassParams *params;
+ GstPadTemplate *videosinktempl, *audiosinktempl, *subtitlesinktempl,
+ *captionsinktempl;
+ GstPadTemplate *srctempl;
+ gchar *longname, *description;
+
+ params =
+ (GstQTMuxClassParams *) g_type_get_qdata (G_OBJECT_CLASS_TYPE (g_class),
+ GST_QT_MUX_PARAMS_QDATA);
+ if (!params)
+ return;
+
+ /* construct the element details struct */
+ longname = g_strdup_printf ("%s Muxer", params->prop->long_name);
+ description = g_strdup_printf ("Multiplex audio and video into a %s file",
+ params->prop->long_name);
+ gst_element_class_set_metadata (element_class, longname,
+ "Codec/Muxer", description,
+ "Thiago Sousa Santos <thiagoss@embedded.ufcg.edu.br>");
+ g_free (longname);
+ g_free (description);
+
+ /* pad templates */
+ srctempl = gst_pad_template_new_with_gtype ("src", GST_PAD_SRC,
+ GST_PAD_ALWAYS, params->src_caps, GST_TYPE_AGGREGATOR_PAD);
+ gst_element_class_add_pad_template (element_class, srctempl);
+
+ if (params->audio_sink_caps) {
+ audiosinktempl = gst_pad_template_new_with_gtype ("audio_%u",
+ GST_PAD_SINK, GST_PAD_REQUEST, params->audio_sink_caps,
+ GST_TYPE_QT_MUX_PAD);
+ gst_element_class_add_pad_template (element_class, audiosinktempl);
+ }
+
+ if (params->video_sink_caps) {
+ videosinktempl = gst_pad_template_new_with_gtype ("video_%u",
+ GST_PAD_SINK, GST_PAD_REQUEST, params->video_sink_caps,
+ GST_TYPE_QT_MUX_PAD);
+ gst_element_class_add_pad_template (element_class, videosinktempl);
+ }
+
+ if (params->subtitle_sink_caps) {
+ subtitlesinktempl = gst_pad_template_new_with_gtype ("subtitle_%u",
+ GST_PAD_SINK, GST_PAD_REQUEST, params->subtitle_sink_caps,
+ GST_TYPE_QT_MUX_PAD);
+ gst_element_class_add_pad_template (element_class, subtitlesinktempl);
+ }
+
+ if (params->caption_sink_caps) {
+ captionsinktempl = gst_pad_template_new_with_gtype ("caption_%u",
+ GST_PAD_SINK, GST_PAD_REQUEST, params->caption_sink_caps,
+ GST_TYPE_QT_MUX_PAD);
+ gst_element_class_add_pad_template (element_class, captionsinktempl);
+ }
+
+ klass->format = params->prop->format;
+}
+
+static void
+gst_qt_mux_class_init (GstQTMuxClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstAggregatorClass *gstagg_class = GST_AGGREGATOR_CLASS (klass);
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->finalize = gst_qt_mux_finalize;
+ gobject_class->get_property = gst_qt_mux_get_property;
+ gobject_class->set_property = gst_qt_mux_set_property;
+
+ g_object_class_install_property (gobject_class, PROP_MOVIE_TIMESCALE,
+ g_param_spec_uint ("movie-timescale", "Movie timescale",
+ "Timescale to use in the movie (units per second, 0 == default)",
+ 0, G_MAXUINT32, DEFAULT_MOVIE_TIMESCALE,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_TRAK_TIMESCALE,
+ g_param_spec_uint ("trak-timescale", "Track timescale",
+ "Timescale to use for the tracks (units per second, 0 is automatic)",
+ 0, G_MAXUINT32, DEFAULT_TRAK_TIMESCALE,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_DO_CTTS,
+ g_param_spec_boolean ("presentation-time",
+ "Include presentation-time info",
+ "Calculate and include presentation/composition time "
+ "(in addition to decoding time)", DEFAULT_DO_CTTS,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+#ifndef GST_REMOVE_DEPRECATED
+ g_object_class_install_property (gobject_class, PROP_DTS_METHOD,
+ g_param_spec_enum ("dts-method", "dts-method",
+ "Method to determine DTS time (DEPRECATED)",
+ GST_TYPE_QT_MUX_DTS_METHOD, DEFAULT_DTS_METHOD,
+ G_PARAM_DEPRECATED | G_PARAM_READWRITE | G_PARAM_CONSTRUCT |
+ G_PARAM_STATIC_STRINGS));
+#endif
+ g_object_class_install_property (gobject_class, PROP_FAST_START,
+ g_param_spec_boolean ("faststart", "Format file to faststart",
+ "If the file should be formatted for faststart (headers first)",
+ DEFAULT_FAST_START, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_FAST_START_TEMP_FILE,
+ g_param_spec_string ("faststart-file", "File to use for storing buffers",
+ "File that will be used temporarily to store data from the stream "
+ "when creating a faststart file. If null a filepath will be "
+ "created automatically", DEFAULT_FAST_START_TEMP_FILE,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_DOC_SHOW_DEFAULT));
+ g_object_class_install_property (gobject_class, PROP_MOOV_RECOV_FILE,
+ g_param_spec_string ("moov-recovery-file",
+ "File to store data for posterior moov atom recovery",
+ "File to be used to store "
+ "data for moov atom making movie file recovery possible in case "
+ "of a crash during muxing. Null for disabled. (Experimental)",
+ DEFAULT_MOOV_RECOV_FILE,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_FRAGMENT_DURATION,
+ g_param_spec_uint ("fragment-duration", "Fragment duration",
+ "Fragment durations in ms (produce a fragmented file if > 0)",
+ 0, G_MAXUINT32, klass->format == GST_QT_MUX_FORMAT_ISML ?
+ 2000 : DEFAULT_FRAGMENT_DURATION,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_RESERVED_MAX_DURATION,
+ g_param_spec_uint64 ("reserved-max-duration",
+ "Reserved maximum file duration (ns)",
+ "When set to a value > 0, reserves space for index tables at the "
+ "beginning of the file.",
+ 0, G_MAXUINT64, DEFAULT_RESERVED_MAX_DURATION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class,
+ PROP_RESERVED_DURATION_REMAINING,
+ g_param_spec_uint64 ("reserved-duration-remaining",
+ "Report the approximate amount of remaining recording space (ns)",
+ "Reports the approximate amount of remaining moov header space "
+ "reserved using reserved-max-duration", 0, G_MAXUINT64, 0,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class,
+ PROP_RESERVED_MOOV_UPDATE_PERIOD,
+ g_param_spec_uint64 ("reserved-moov-update-period",
+ "Interval at which to update index tables (ns)",
+ "When used with reserved-max-duration, periodically updates the "
+ "index tables with information muxed so far.", 0, G_MAXUINT64,
+ DEFAULT_RESERVED_MOOV_UPDATE_PERIOD,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_RESERVED_BYTES_PER_SEC,
+ g_param_spec_uint ("reserved-bytes-per-sec",
+ "Reserved MOOV bytes per second, per track",
+ "Multiplier for converting reserved-max-duration into bytes of header to reserve, per second, per track",
+ 0, 10000, DEFAULT_RESERVED_BYTES_PER_SEC_PER_TRAK,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_RESERVED_PREFILL,
+ g_param_spec_boolean ("reserved-prefill",
+ "Reserved Prefill Samples Table",
+ "Prefill samples table of reserved duration",
+ DEFAULT_RESERVED_PREFILL,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_INTERLEAVE_BYTES,
+ g_param_spec_uint64 ("interleave-bytes", "Interleave (bytes)",
+ "Interleave between streams in bytes",
+ 0, G_MAXUINT64, DEFAULT_INTERLEAVE_BYTES,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_INTERLEAVE_TIME,
+ g_param_spec_uint64 ("interleave-time", "Interleave (time)",
+ "Interleave between streams in nanoseconds",
+ 0, G_MAXUINT64, DEFAULT_INTERLEAVE_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_FORCE_CHUNKS,
+ g_param_spec_boolean ("force-chunks", "Force Chunks",
+ "Force multiple chunks to be created even for single-stream files",
+ DEFAULT_FORCE_CHUNKS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MAX_RAW_AUDIO_DRIFT,
+ g_param_spec_uint64 ("max-raw-audio-drift", "Max Raw Audio Drift",
+ "Maximum allowed drift of raw audio samples vs. timestamps in nanoseconds",
+ 0, G_MAXUINT64, DEFAULT_MAX_RAW_AUDIO_DRIFT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_START_GAP_THRESHOLD,
+ g_param_spec_uint64 ("start-gap-threshold", "Start Gap Threshold",
+ "Threshold for creating an edit list for gaps at the start in nanoseconds",
+ 0, G_MAXUINT64, DEFAULT_START_GAP_THRESHOLD,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class,
+ PROP_FORCE_CREATE_TIMECODE_TRAK,
+ g_param_spec_boolean ("force-create-timecode-trak",
+ "Force Create Timecode Trak",
+ "Create a timecode trak even in unsupported flavors",
+ DEFAULT_FORCE_CREATE_TIMECODE_TRAK,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstBaseQTMux:fragment-mode:
+ *
+ * Influence how fragmented files are produces. Only has any affect when the
+ * the 'fragment-duration' property is set to a value greater than '0'
+ *
+ * Currently, two options exist:
+ * - "dash-or-mss": for the original fragmented mode that supports dash or
+ * mocrosoft smoothstreaming with a single input stream
+ * - "first-moov-then-finalise" is a fragmented mode that will start with a
+ * self-contained 'moov' atom fo the first fragment, then produce fragments.
+ * When the file is finalised, the initial 'moov' is invalidated and a
+ * new 'moov' is written covering the entire file.
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class, PROP_FRAGMENT_MODE,
+ g_param_spec_enum ("fragment-mode", "Fragment Mode",
+ "How to to write fragments to the file. Only used when "
+ "\'fragment-duration\' is greather than 0",
+ GST_TYPE_QT_MUX_FRAGMENT_MODE, DEFAULT_FRAGMENT_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstelement_class->request_new_pad =
+ GST_DEBUG_FUNCPTR (gst_qt_mux_request_new_pad);
+ gstelement_class->release_pad = GST_DEBUG_FUNCPTR (gst_qt_mux_release_pad);
+
+ gstagg_class->sink_event = gst_qt_mux_sink_event;
+ gstagg_class->sink_event_pre_queue = gst_qt_mux_sink_event_pre_queue;
+ gstagg_class->aggregate = gst_qt_mux_aggregate;
+ gstagg_class->clip = gst_qt_mux_clip_running_time;
+ gstagg_class->start = gst_qt_mux_start;
+ gstagg_class->stop = gst_qt_mux_stop;
+ gstagg_class->create_new_pad = gst_qt_mux_create_new_pad;
+
+ gst_type_mark_as_plugin_api (GST_TYPE_QT_MUX_PAD, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_QT_MUX_DTS_METHOD, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_QT_MUX_FRAGMENT_MODE, 0);
+}
+
+static void
+gst_qt_mux_pad_reset (GstQTMuxPad * qtpad)
+{
+ qtpad->fourcc = 0;
+ qtpad->is_out_of_order = FALSE;
+ qtpad->sample_size = 0;
+ qtpad->sync = FALSE;
+ qtpad->last_dts = 0;
+ qtpad->sample_offset = 0;
+ qtpad->dts_adjustment = GST_CLOCK_TIME_NONE;
+ qtpad->first_ts = GST_CLOCK_TIME_NONE;
+ qtpad->first_dts = GST_CLOCK_TIME_NONE;
+ qtpad->prepare_buf_func = NULL;
+ qtpad->create_empty_buffer = NULL;
+ qtpad->avg_bitrate = 0;
+ qtpad->max_bitrate = 0;
+ qtpad->total_duration = 0;
+ qtpad->total_bytes = 0;
+ qtpad->sparse = FALSE;
+ qtpad->first_cc_sample_size = 0;
+ qtpad->flow_status = GST_FLOW_OK;
+ qtpad->warned_empty_duration = FALSE;
+
+ gst_buffer_replace (&qtpad->last_buf, NULL);
+
+ gst_caps_replace (&qtpad->configured_caps, NULL);
+
+ if (qtpad->tags) {
+ gst_tag_list_unref (qtpad->tags);
+ qtpad->tags = NULL;
+ }
+
+ /* reference owned elsewhere */
+ qtpad->trak = NULL;
+ qtpad->tc_trak = NULL;
+
+ if (qtpad->traf) {
+ atom_traf_free (qtpad->traf);
+ qtpad->traf = NULL;
+ }
+ atom_array_clear (&qtpad->fragment_buffers);
+ if (qtpad->samples)
+ g_array_unref (qtpad->samples);
+ qtpad->samples = NULL;
+
+ /* reference owned elsewhere */
+ qtpad->tfra = NULL;
+
+ qtpad->first_pts = GST_CLOCK_TIME_NONE;
+ qtpad->tc_pos = -1;
+ if (qtpad->first_tc)
+ gst_video_time_code_free (qtpad->first_tc);
+ qtpad->first_tc = NULL;
+
+ if (qtpad->raw_audio_adapter)
+ gst_object_unref (qtpad->raw_audio_adapter);
+ qtpad->raw_audio_adapter = NULL;
+}
+
+/*
+ * Takes GstQTMux back to its initial state
+ */
+static void
+gst_qt_mux_reset (GstQTMux * qtmux, gboolean alloc)
+{
+ GSList *walk;
+ GList *l;
+
+ qtmux->state = GST_QT_MUX_STATE_NONE;
+ qtmux->header_size = 0;
+ qtmux->mdat_size = 0;
+ qtmux->moov_pos = 0;
+ qtmux->mdat_pos = 0;
+ qtmux->longest_chunk = GST_CLOCK_TIME_NONE;
+ qtmux->fragment_sequence = 0;
+
+ if (qtmux->ftyp) {
+ atom_ftyp_free (qtmux->ftyp);
+ qtmux->ftyp = NULL;
+ }
+ if (qtmux->moov) {
+ atom_moov_free (qtmux->moov);
+ qtmux->moov = NULL;
+ }
+ if (qtmux->mfra) {
+ atom_mfra_free (qtmux->mfra);
+ qtmux->mfra = NULL;
+ }
+ if (qtmux->fast_start_file) {
+ fclose (qtmux->fast_start_file);
+ g_remove (qtmux->fast_start_file_path);
+ qtmux->fast_start_file = NULL;
+ }
+ if (qtmux->moov_recov_file) {
+ fclose (qtmux->moov_recov_file);
+ qtmux->moov_recov_file = NULL;
+ }
+ for (walk = qtmux->extra_atoms; walk; walk = g_slist_next (walk)) {
+ AtomInfo *ainfo = (AtomInfo *) walk->data;
+ ainfo->free_func (ainfo->atom);
+ g_free (ainfo);
+ }
+ g_slist_free (qtmux->extra_atoms);
+ qtmux->extra_atoms = NULL;
+
+ GST_OBJECT_LOCK (qtmux);
+ gst_tag_setter_reset_tags (GST_TAG_SETTER (qtmux));
+
+ /* reset pad data */
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+ gst_qt_mux_pad_reset (qtpad);
+
+ /* hm, moov_free above yanked the traks away from us,
+ * so do not free, but do clear */
+ qtpad->trak = NULL;
+ }
+
+ if (alloc) {
+ qtmux->moov = atom_moov_new (qtmux->context);
+ /* ensure all is as nice and fresh as request_new_pad would provide it */
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+
+ qtpad->trak = atom_trak_new (qtmux->context);
+ atom_moov_add_trak (qtmux->moov, qtpad->trak);
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ g_list_free_full (qtmux->output_buffers, (GDestroyNotify) gst_buffer_unref);
+ qtmux->output_buffers = NULL;
+
+ qtmux->current_pad = NULL;
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ qtmux->current_chunk_offset = -1;
+
+ qtmux->reserved_moov_size = 0;
+ qtmux->last_moov_update = GST_CLOCK_TIME_NONE;
+ qtmux->muxed_since_last_update = 0;
+ qtmux->reserved_duration_remaining = GST_CLOCK_TIME_NONE;
+}
+
+static GstBuffer *
+gst_qt_mux_clip_running_time (GstAggregator * agg,
+ GstAggregatorPad * agg_pad, GstBuffer * buf)
+{
+ GstQTMuxPad *qtpad = GST_QT_MUX_PAD (agg_pad);
+ GstBuffer *outbuf = buf;
+
+ /* invalid left alone and passed */
+ if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DTS_OR_PTS (buf)))) {
+ GstClockTime time;
+ GstClockTime buf_dts, abs_dts;
+ gint dts_sign;
+
+ time = GST_BUFFER_PTS (buf);
+
+ if (GST_CLOCK_TIME_IS_VALID (time)) {
+ time =
+ gst_segment_to_running_time (&agg_pad->segment, GST_FORMAT_TIME,
+ time);
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (time))) {
+ GST_DEBUG_OBJECT (agg_pad, "clipping buffer on pad outside segment %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (buf)));
+ gst_buffer_unref (buf);
+ return NULL;
+ }
+ }
+
+ GST_LOG_OBJECT (agg_pad, "buffer pts %" GST_TIME_FORMAT " -> %"
+ GST_TIME_FORMAT " running time",
+ GST_TIME_ARGS (GST_BUFFER_PTS (buf)), GST_TIME_ARGS (time));
+ outbuf = gst_buffer_make_writable (buf);
+ GST_BUFFER_PTS (outbuf) = time;
+
+ dts_sign = gst_segment_to_running_time_full (&agg_pad->segment,
+ GST_FORMAT_TIME, GST_BUFFER_DTS (outbuf), &abs_dts);
+ buf_dts = GST_BUFFER_DTS (outbuf);
+ if (dts_sign > 0) {
+ GST_BUFFER_DTS (outbuf) = abs_dts;
+ qtpad->dts = abs_dts;
+ } else if (dts_sign < 0) {
+ GST_BUFFER_DTS (outbuf) = GST_CLOCK_TIME_NONE;
+ qtpad->dts = -((gint64) abs_dts);
+ } else {
+ GST_BUFFER_DTS (outbuf) = GST_CLOCK_TIME_NONE;
+ qtpad->dts = GST_CLOCK_STIME_NONE;
+ }
+
+ GST_LOG_OBJECT (agg_pad, "buffer dts %" GST_TIME_FORMAT " -> %"
+ GST_STIME_FORMAT " running time", GST_TIME_ARGS (buf_dts),
+ GST_STIME_ARGS (qtpad->dts));
+ }
+
+ return outbuf;
+}
+
+static void
+gst_qt_mux_init (GstQTMux * qtmux, GstQTMuxClass * qtmux_klass)
+{
+ /* properties set to default upon construction */
+
+ qtmux->reserved_max_duration = DEFAULT_RESERVED_MAX_DURATION;
+ qtmux->reserved_moov_update_period = DEFAULT_RESERVED_MOOV_UPDATE_PERIOD;
+ qtmux->reserved_bytes_per_sec_per_trak =
+ DEFAULT_RESERVED_BYTES_PER_SEC_PER_TRAK;
+ qtmux->interleave_bytes = DEFAULT_INTERLEAVE_BYTES;
+ qtmux->interleave_time = DEFAULT_INTERLEAVE_TIME;
+ qtmux->force_chunks = DEFAULT_FORCE_CHUNKS;
+ qtmux->max_raw_audio_drift = DEFAULT_MAX_RAW_AUDIO_DRIFT;
+ qtmux->start_gap_threshold = DEFAULT_START_GAP_THRESHOLD;
+ qtmux->force_create_timecode_trak = DEFAULT_FORCE_CREATE_TIMECODE_TRAK;
+
+ /* always need this */
+ qtmux->context =
+ atoms_context_new (gst_qt_mux_map_format_to_flavor (qtmux_klass->format),
+ qtmux->force_create_timecode_trak);
+
+ /* internals to initial state */
+ gst_qt_mux_reset (qtmux, TRUE);
+}
+
+
+static void
+gst_qt_mux_finalize (GObject * object)
+{
+ GstQTMux *qtmux = GST_QT_MUX_CAST (object);
+
+ gst_qt_mux_reset (qtmux, FALSE);
+
+ g_free (qtmux->fast_start_file_path);
+ g_free (qtmux->moov_recov_file_path);
+
+ atoms_context_free (qtmux->context);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static GstBuffer *
+gst_qt_mux_prepare_jpc_buffer (GstQTMuxPad * qtpad, GstBuffer * buf,
+ GstQTMux * qtmux)
+{
+ GstBuffer *newbuf;
+ GstMapInfo map;
+ gsize size;
+
+ GST_LOG_OBJECT (qtmux, "Preparing jpc buffer");
+
+ if (buf == NULL)
+ return NULL;
+
+ size = gst_buffer_get_size (buf);
+ newbuf = gst_buffer_new_and_alloc (size + 8);
+ gst_buffer_copy_into (newbuf, buf, GST_BUFFER_COPY_ALL, 8, size);
+
+ gst_buffer_map (newbuf, &map, GST_MAP_WRITE);
+ GST_WRITE_UINT32_BE (map.data, map.size);
+ GST_WRITE_UINT32_LE (map.data + 4, FOURCC_jp2c);
+
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ return newbuf;
+}
+
+static gsize
+extract_608_field_from_s334_1a (const guint8 * ccdata, gsize ccdata_size,
+ guint field, guint8 ** res)
+{
+ guint8 *storage;
+ gsize storage_size = 128;
+ gsize i, res_size = 0;
+
+ storage = g_malloc0 (storage_size);
+
+ /* Iterate over the ccdata and put the corresponding tuples for the given field
+ * in the storage */
+ for (i = 0; i < ccdata_size; i += 3) {
+ if ((field == 1 && (ccdata[i * 3] & 0x80)) ||
+ (field == 2 && !(ccdata[i * 3] & 0x80))) {
+ GST_DEBUG ("Storing matching cc for field %d : 0x%02x 0x%02x", field,
+ ccdata[i * 3 + 1], ccdata[i * 3 + 2]);
+ if (res_size >= storage_size) {
+ storage_size += 128;
+ storage = g_realloc (storage, storage_size);
+ }
+ storage[res_size] = ccdata[i * 3 + 1];
+ storage[res_size + 1] = ccdata[i * 3 + 2];
+ res_size += 2;
+ }
+ }
+
+ if (res_size == 0) {
+ g_free (storage);
+ *res = NULL;
+ return 0;
+ }
+
+ *res = storage;
+ return res_size;
+}
+
+
+static GstBuffer *
+gst_qt_mux_prepare_caption_buffer (GstQTMuxPad * qtpad, GstBuffer * buf,
+ GstQTMux * qtmux)
+{
+ GstBuffer *newbuf = NULL;
+ GstMapInfo map, inmap;
+ gsize size;
+ gboolean in_prefill;
+
+ if (buf == NULL)
+ return NULL;
+
+ in_prefill = (qtmux->mux_mode == GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL);
+
+ size = gst_buffer_get_size (buf);
+ gst_buffer_map (buf, &inmap, GST_MAP_READ);
+
+ GST_LOG_OBJECT (qtmux,
+ "Preparing caption buffer %" GST_FOURCC_FORMAT " size:%" G_GSIZE_FORMAT,
+ GST_FOURCC_ARGS (qtpad->fourcc), size);
+
+ switch (qtpad->fourcc) {
+ case FOURCC_c608:
+ {
+ guint8 *cdat, *cdt2;
+ gsize cdat_size, cdt2_size, total_size = 0;
+ gsize write_offs = 0;
+
+ cdat_size =
+ extract_608_field_from_s334_1a (inmap.data, inmap.size, 1, &cdat);
+ cdt2_size =
+ extract_608_field_from_s334_1a (inmap.data, inmap.size, 2, &cdt2);
+
+ if (cdat_size)
+ total_size += cdat_size + 8;
+ if (cdt2_size)
+ total_size += cdt2_size + 8;
+ if (total_size == 0) {
+ GST_DEBUG_OBJECT (qtmux, "No 608 data ?");
+ /* FIXME : We might want to *always* store something, even if
+ * it's "empty" CC (i.e. 0x80 0x80) */
+ break;
+ }
+
+ newbuf = gst_buffer_new_and_alloc (in_prefill ? 20 : total_size);
+ /* Let's copy over all metadata and not the memory */
+ gst_buffer_copy_into (newbuf, buf, GST_BUFFER_COPY_METADATA, 0, size);
+
+ gst_buffer_map (newbuf, &map, GST_MAP_WRITE);
+ if (cdat_size || in_prefill) {
+ GST_WRITE_UINT32_BE (map.data, in_prefill ? 10 : cdat_size + 8);
+ GST_WRITE_UINT32_LE (map.data + 4, FOURCC_cdat);
+ if (cdat_size)
+ memcpy (map.data + 8, cdat, in_prefill ? 2 : cdat_size);
+ else {
+ /* Write 'empty' CC */
+ map.data[8] = 0x80;
+ map.data[9] = 0x80;
+ }
+ write_offs = in_prefill ? 10 : cdat_size + 8;
+ if (cdat_size)
+ g_free (cdat);
+ }
+
+ if (cdt2_size || in_prefill) {
+ GST_WRITE_UINT32_BE (map.data + write_offs,
+ in_prefill ? 10 : cdt2_size + 8);
+ GST_WRITE_UINT32_LE (map.data + write_offs + 4, FOURCC_cdt2);
+ if (cdt2_size)
+ memcpy (map.data + write_offs + 8, cdt2, in_prefill ? 2 : cdt2_size);
+ else {
+ /* Write 'empty' CC */
+ map.data[write_offs + 8] = 0x80;
+ map.data[write_offs + 9] = 0x80;
+ }
+ if (cdt2_size)
+ g_free (cdt2);
+ }
+ gst_buffer_unmap (newbuf, &map);
+ break;
+ }
+ break;
+ case FOURCC_c708:
+ {
+ gsize actual_size;
+
+ /* Take the whole CDP */
+ if (in_prefill) {
+ if (size > qtpad->first_cc_sample_size) {
+ GST_ELEMENT_WARNING (qtmux, RESOURCE, WRITE,
+ ("Truncating too big CEA708 sample (%" G_GSIZE_FORMAT " > %u)",
+ size, qtpad->first_cc_sample_size), (NULL));
+ } else if (size < qtpad->first_cc_sample_size) {
+ GST_ELEMENT_WARNING (qtmux, RESOURCE, WRITE,
+ ("Padding too small CEA708 sample (%" G_GSIZE_FORMAT " < %u)",
+ size, qtpad->first_cc_sample_size), (NULL));
+ }
+
+ actual_size = MIN (qtpad->first_cc_sample_size, size);
+ } else {
+ actual_size = size;
+ }
+
+ newbuf = gst_buffer_new_and_alloc (actual_size + 8);
+
+ /* Let's copy over all metadata and not the memory */
+ gst_buffer_copy_into (newbuf, buf, GST_BUFFER_COPY_METADATA, 0, -1);
+
+ gst_buffer_map (newbuf, &map, GST_MAP_WRITE);
+
+ GST_WRITE_UINT32_BE (map.data, actual_size + 8);
+ GST_WRITE_UINT32_LE (map.data + 4, FOURCC_ccdp);
+ memcpy (map.data + 8, inmap.data, actual_size);
+
+ gst_buffer_unmap (newbuf, &map);
+ break;
+ }
+ default:
+ /* theoretically this should never happen, but let's keep this here in case */
+ GST_WARNING_OBJECT (qtmux, "Unknown caption format");
+ break;
+ }
+
+ gst_buffer_unmap (buf, &inmap);
+ gst_buffer_unref (buf);
+
+ return newbuf;
+}
+
+static GstBuffer *
+gst_qt_mux_prepare_tx3g_buffer (GstQTMuxPad * qtpad, GstBuffer * buf,
+ GstQTMux * qtmux)
+{
+ GstBuffer *newbuf;
+ GstMapInfo frommap;
+ GstMapInfo tomap;
+ gsize size;
+ const guint8 *dataend;
+
+ GST_LOG_OBJECT (qtmux, "Preparing tx3g buffer %" GST_PTR_FORMAT, buf);
+
+ if (buf == NULL)
+ return NULL;
+
+ gst_buffer_map (buf, &frommap, GST_MAP_READ);
+
+ dataend = memchr (frommap.data, 0, frommap.size);
+ size = dataend ? dataend - frommap.data : frommap.size;
+ newbuf = gst_buffer_new_and_alloc (size + 2);
+
+ gst_buffer_map (newbuf, &tomap, GST_MAP_WRITE);
+
+ GST_WRITE_UINT16_BE (tomap.data, size);
+ memcpy (tomap.data + 2, frommap.data, size);
+
+ gst_buffer_unmap (newbuf, &tomap);
+ gst_buffer_unmap (buf, &frommap);
+
+ gst_buffer_copy_into (newbuf, buf, GST_BUFFER_COPY_METADATA, 0, size);
+
+ /* gst_buffer_copy_into is trying to be too clever and
+ * won't copy duration when size is different */
+ GST_BUFFER_DURATION (newbuf) = GST_BUFFER_DURATION (buf);
+
+ gst_buffer_unref (buf);
+
+ return newbuf;
+}
+
+static void
+gst_qt_mux_pad_add_ac3_extension (GstQTMux * qtmux, GstQTMuxPad * qtpad,
+ guint8 fscod, guint8 frmsizcod, guint8 bsid, guint8 bsmod, guint8 acmod,
+ guint8 lfe_on)
+{
+ AtomInfo *ext;
+
+ g_return_if_fail (qtpad->trak_ste);
+
+ ext = build_ac3_extension (fscod, bsid, bsmod, acmod, lfe_on, frmsizcod >> 1); /* bitrate_code is inside frmsizcod */
+
+ sample_table_entry_add_ext_atom (qtpad->trak_ste, ext);
+}
+
+static GstBuffer *
+gst_qt_mux_prepare_parse_ac3_frame (GstQTMuxPad * qtpad, GstBuffer * buf,
+ GstQTMux * qtmux)
+{
+ GstMapInfo map;
+ GstByteReader reader;
+ guint off;
+
+ if (!gst_buffer_map (buf, &map, GST_MAP_READ)) {
+ GST_WARNING_OBJECT (qtpad, "Failed to map buffer");
+ return buf;
+ }
+
+ if (G_UNLIKELY (map.size < 8))
+ goto done;
+
+ gst_byte_reader_init (&reader, map.data, map.size);
+ off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffff0000, 0x0b770000,
+ 0, map.size);
+
+ if (off != -1) {
+ GstBitReader bits;
+ guint8 fscod, frmsizcod, bsid, bsmod, acmod, lfe_on;
+
+ GST_DEBUG_OBJECT (qtpad, "Found ac3 sync point at offset: %u", off);
+
+ gst_bit_reader_init (&bits, map.data, map.size);
+
+ /* off + sync + crc */
+ gst_bit_reader_skip_unchecked (&bits, off * 8 + 16 + 16);
+
+ fscod = gst_bit_reader_get_bits_uint8_unchecked (&bits, 2);
+ frmsizcod = gst_bit_reader_get_bits_uint8_unchecked (&bits, 6);
+ bsid = gst_bit_reader_get_bits_uint8_unchecked (&bits, 5);
+ bsmod = gst_bit_reader_get_bits_uint8_unchecked (&bits, 3);
+ acmod = gst_bit_reader_get_bits_uint8_unchecked (&bits, 3);
+
+ if ((acmod & 0x1) && (acmod != 0x1)) /* 3 front channels */
+ gst_bit_reader_skip_unchecked (&bits, 2);
+ if ((acmod & 0x4)) /* if a surround channel exists */
+ gst_bit_reader_skip_unchecked (&bits, 2);
+ if (acmod == 0x2) /* if in 2/0 mode */
+ gst_bit_reader_skip_unchecked (&bits, 2);
+
+ lfe_on = gst_bit_reader_get_bits_uint8_unchecked (&bits, 1);
+
+ gst_qt_mux_pad_add_ac3_extension (qtmux, qtpad, fscod, frmsizcod, bsid,
+ bsmod, acmod, lfe_on);
+
+ /* AC-3 spec says that those values should be constant for the
+ * whole stream when muxed in mp4. We trust the input follows it */
+ GST_DEBUG_OBJECT (qtpad, "Data parsed, removing "
+ "prepare buffer function");
+ qtpad->prepare_buf_func = NULL;
+ }
+
+done:
+ gst_buffer_unmap (buf, &map);
+ return buf;
+}
+
+static GstBuffer *
+gst_qt_mux_create_empty_tx3g_buffer (GstQTMuxPad * qtpad, gint64 duration)
+{
+ guint8 *data;
+
+ data = g_malloc (2);
+ GST_WRITE_UINT16_BE (data, 0);
+
+ return gst_buffer_new_wrapped (data, 2);
+}
+
+static void
+gst_qt_mux_add_mp4_tag (GstQTMux * qtmux, const GstTagList * list,
+ AtomUDTA * udta, const char *tag, const char *tag2, guint32 fourcc)
+{
+ switch (gst_tag_get_type (tag)) {
+ /* strings */
+ case G_TYPE_STRING:
+ {
+ gchar *str = NULL;
+
+ if (!gst_tag_list_get_string (list, tag, &str) || !str)
+ break;
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT " -> %s",
+ GST_FOURCC_ARGS (fourcc), str);
+ atom_udta_add_str_tag (udta, fourcc, str);
+ g_free (str);
+ break;
+ }
+ /* double */
+ case G_TYPE_DOUBLE:
+ {
+ gdouble value;
+
+ if (!gst_tag_list_get_double (list, tag, &value))
+ break;
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT " -> %u",
+ GST_FOURCC_ARGS (fourcc), (gint) value);
+ atom_udta_add_uint_tag (udta, fourcc, 21, (gint) value);
+ break;
+ }
+ case G_TYPE_UINT:
+ {
+ guint value = 0;
+ if (tag2) {
+ /* paired unsigned integers */
+ guint count = 0;
+ gboolean got_tag;
+
+ got_tag = gst_tag_list_get_uint (list, tag, &value);
+ got_tag = gst_tag_list_get_uint (list, tag2, &count) || got_tag;
+ if (!got_tag)
+ break;
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT " -> %u/%u",
+ GST_FOURCC_ARGS (fourcc), value, count);
+ atom_udta_add_uint_tag (udta, fourcc, 0,
+ value << 16 | (count & 0xFFFF));
+ } else {
+ /* unpaired unsigned integers */
+ if (!gst_tag_list_get_uint (list, tag, &value))
+ break;
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT " -> %u",
+ GST_FOURCC_ARGS (fourcc), value);
+ atom_udta_add_uint_tag (udta, fourcc, 1, value);
+ }
+ break;
+ }
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+}
+
+static void
+gst_qt_mux_add_mp4_date (GstQTMux * qtmux, const GstTagList * list,
+ AtomUDTA * udta, const char *tag, const char *tag2, guint32 fourcc)
+{
+ GDate *date = NULL;
+ GDateYear year;
+ GDateMonth month;
+ GDateDay day;
+ gchar *str;
+
+ g_return_if_fail (gst_tag_get_type (tag) == G_TYPE_DATE);
+
+ if (!gst_tag_list_get_date (list, tag, &date) || !date)
+ return;
+
+ year = g_date_get_year (date);
+ month = g_date_get_month (date);
+ day = g_date_get_day (date);
+
+ g_date_free (date);
+
+ if (year == G_DATE_BAD_YEAR && month == G_DATE_BAD_MONTH &&
+ day == G_DATE_BAD_DAY) {
+ GST_WARNING_OBJECT (qtmux, "invalid date in tag");
+ return;
+ }
+
+ str = g_strdup_printf ("%u-%u-%u", year, month, day);
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT " -> %s",
+ GST_FOURCC_ARGS (fourcc), str);
+ atom_udta_add_str_tag (udta, fourcc, str);
+ g_free (str);
+}
+
+static void
+gst_qt_mux_add_mp4_cover (GstQTMux * qtmux, const GstTagList * list,
+ AtomUDTA * udta, const char *tag, const char *tag2, guint32 fourcc)
+{
+ GValue value = { 0, };
+ GstBuffer *buf;
+ GstSample *sample;
+ GstCaps *caps;
+ GstStructure *structure;
+ gint flags = 0;
+ GstMapInfo map;
+
+ g_return_if_fail (gst_tag_get_type (tag) == GST_TYPE_SAMPLE);
+
+ if (!gst_tag_list_copy_value (&value, list, tag))
+ return;
+
+ sample = gst_value_get_sample (&value);
+
+ if (!sample)
+ goto done;
+
+ buf = gst_sample_get_buffer (sample);
+ if (!buf)
+ goto done;
+
+ caps = gst_sample_get_caps (sample);
+ if (!caps) {
+ GST_WARNING_OBJECT (qtmux, "preview image without caps");
+ goto done;
+ }
+
+ GST_DEBUG_OBJECT (qtmux, "preview image caps %" GST_PTR_FORMAT, caps);
+
+ structure = gst_caps_get_structure (caps, 0);
+ if (gst_structure_has_name (structure, "image/jpeg"))
+ flags = 13;
+ else if (gst_structure_has_name (structure, "image/png"))
+ flags = 14;
+
+ if (!flags) {
+ GST_WARNING_OBJECT (qtmux, "preview image format not supported");
+ goto done;
+ }
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT
+ " -> image size %" G_GSIZE_FORMAT "", GST_FOURCC_ARGS (fourcc), map.size);
+ atom_udta_add_tag (udta, fourcc, flags, map.data, map.size);
+ gst_buffer_unmap (buf, &map);
+done:
+ g_value_unset (&value);
+}
+
+static void
+gst_qt_mux_add_3gp_str (GstQTMux * qtmux, const GstTagList * list,
+ AtomUDTA * udta, const char *tag, const char *tag2, guint32 fourcc)
+{
+ gchar *str = NULL;
+ guint number;
+
+ g_return_if_fail (gst_tag_get_type (tag) == G_TYPE_STRING);
+ g_return_if_fail (!tag2 || gst_tag_get_type (tag2) == G_TYPE_UINT);
+
+ if (!gst_tag_list_get_string (list, tag, &str) || !str)
+ return;
+
+ if (tag2)
+ if (!gst_tag_list_get_uint (list, tag2, &number))
+ tag2 = NULL;
+
+ if (!tag2) {
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT " -> %s",
+ GST_FOURCC_ARGS (fourcc), str);
+ atom_udta_add_3gp_str_tag (udta, fourcc, str);
+ } else {
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT " -> %s/%d",
+ GST_FOURCC_ARGS (fourcc), str, number);
+ atom_udta_add_3gp_str_int_tag (udta, fourcc, str, number);
+ }
+
+ g_free (str);
+}
+
+static void
+gst_qt_mux_add_3gp_date (GstQTMux * qtmux, const GstTagList * list,
+ AtomUDTA * udta, const char *tag, const char *tag2, guint32 fourcc)
+{
+ GDate *date = NULL;
+ GDateYear year;
+
+ g_return_if_fail (gst_tag_get_type (tag) == G_TYPE_DATE);
+
+ if (!gst_tag_list_get_date (list, tag, &date) || !date)
+ return;
+
+ year = g_date_get_year (date);
+ g_date_free (date);
+
+ if (year == G_DATE_BAD_YEAR) {
+ GST_WARNING_OBJECT (qtmux, "invalid date in tag");
+ return;
+ }
+
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT " -> %d",
+ GST_FOURCC_ARGS (fourcc), year);
+ atom_udta_add_3gp_uint_tag (udta, fourcc, year);
+}
+
+static void
+gst_qt_mux_add_3gp_location (GstQTMux * qtmux, const GstTagList * list,
+ AtomUDTA * udta, const char *tag, const char *tag2, guint32 fourcc)
+{
+ gdouble latitude = -360, longitude = -360, altitude = 0;
+ gchar *location = NULL;
+ guint8 *data, *ddata;
+ gint size = 0, len = 0;
+ gboolean ret = FALSE;
+
+ g_return_if_fail (strcmp (tag, GST_TAG_GEO_LOCATION_NAME) == 0);
+
+ ret = gst_tag_list_get_string (list, tag, &location);
+ ret |= gst_tag_list_get_double (list, GST_TAG_GEO_LOCATION_LONGITUDE,
+ &longitude);
+ ret |= gst_tag_list_get_double (list, GST_TAG_GEO_LOCATION_LATITUDE,
+ &latitude);
+ ret |= gst_tag_list_get_double (list, GST_TAG_GEO_LOCATION_ELEVATION,
+ &altitude);
+
+ if (!ret)
+ return;
+
+ if (location)
+ len = strlen (location);
+ size += len + 1 + 2;
+
+ /* role + (long, lat, alt) + body + notes */
+ size += 1 + 3 * 4 + 1 + 1;
+
+ data = ddata = g_malloc (size);
+
+ /* language tag */
+ GST_WRITE_UINT16_BE (data, language_code (GST_QT_MUX_DEFAULT_TAG_LANGUAGE));
+ /* location */
+ if (location)
+ memcpy (data + 2, location, len);
+ GST_WRITE_UINT8 (data + 2 + len, 0);
+ data += len + 1 + 2;
+ /* role */
+ GST_WRITE_UINT8 (data, 0);
+ /* long, lat, alt */
+#define QT_WRITE_SFP32(data, fp) GST_WRITE_UINT32_BE(data, (guint32) ((gint) (fp * 65536.0)))
+ QT_WRITE_SFP32 (data + 1, longitude);
+ QT_WRITE_SFP32 (data + 5, latitude);
+ QT_WRITE_SFP32 (data + 9, altitude);
+ /* neither astronomical body nor notes */
+ GST_WRITE_UINT16_BE (data + 13, 0);
+
+ GST_DEBUG_OBJECT (qtmux, "Adding tag 'loci'");
+ atom_udta_add_3gp_tag (udta, fourcc, ddata, size);
+ g_free (ddata);
+}
+
+static void
+gst_qt_mux_add_3gp_keywords (GstQTMux * qtmux, const GstTagList * list,
+ AtomUDTA * udta, const char *tag, const char *tag2, guint32 fourcc)
+{
+ gchar *keywords = NULL;
+ guint8 *data, *ddata;
+ gint size = 0, i;
+ gchar **kwds;
+
+ g_return_if_fail (strcmp (tag, GST_TAG_KEYWORDS) == 0);
+
+ if (!gst_tag_list_get_string (list, tag, &keywords) || !keywords)
+ return;
+
+ kwds = g_strsplit (keywords, ",", 0);
+ g_free (keywords);
+
+ size = 0;
+ for (i = 0; kwds[i]; i++) {
+ /* size byte + null-terminator */
+ size += strlen (kwds[i]) + 1 + 1;
+ }
+
+ /* language tag + count + keywords */
+ size += 2 + 1;
+
+ data = ddata = g_malloc (size);
+
+ /* language tag */
+ GST_WRITE_UINT16_BE (data, language_code (GST_QT_MUX_DEFAULT_TAG_LANGUAGE));
+ /* count */
+ GST_WRITE_UINT8 (data + 2, i);
+ data += 3;
+ /* keywords */
+ for (i = 0; kwds[i]; ++i) {
+ gint len = strlen (kwds[i]);
+
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT " -> %s",
+ GST_FOURCC_ARGS (fourcc), kwds[i]);
+ /* size */
+ GST_WRITE_UINT8 (data, len + 1);
+ memcpy (data + 1, kwds[i], len + 1);
+ data += len + 2;
+ }
+
+ g_strfreev (kwds);
+
+ atom_udta_add_3gp_tag (udta, fourcc, ddata, size);
+ g_free (ddata);
+}
+
+static gboolean
+gst_qt_mux_parse_classification_string (GstQTMux * qtmux, const gchar * input,
+ guint32 * p_fourcc, guint16 * p_table, gchar ** p_content)
+{
+ guint32 fourcc;
+ gint table;
+ gint size;
+ const gchar *data;
+
+ data = input;
+ size = strlen (input);
+
+ if (size < 4 + 3 + 1 + 1 + 1) {
+ /* at least the minimum xxxx://y/z */
+ GST_WARNING_OBJECT (qtmux, "Classification tag input (%s) too short, "
+ "ignoring", input);
+ return FALSE;
+ }
+
+ /* read the fourcc */
+ memcpy (&fourcc, data, 4);
+ size -= 4;
+ data += 4;
+
+ if (strncmp (data, "://", 3) != 0) {
+ goto mismatch;
+ }
+ data += 3;
+ size -= 3;
+
+ /* read the table number */
+ if (sscanf (data, "%d", &table) != 1) {
+ goto mismatch;
+ }
+ if (table < 0) {
+ GST_WARNING_OBJECT (qtmux, "Invalid table number in classification tag (%d)"
+ ", table numbers should be positive, ignoring tag", table);
+ return FALSE;
+ }
+
+ /* find the next / */
+ while (size > 0 && data[0] != '/') {
+ data += 1;
+ size -= 1;
+ }
+ if (size == 0) {
+ goto mismatch;
+ }
+ g_assert (data[0] == '/');
+
+ /* skip the '/' */
+ data += 1;
+ size -= 1;
+ if (size == 0) {
+ goto mismatch;
+ }
+
+ /* read up the rest of the string */
+ *p_content = g_strdup (data);
+ *p_table = (guint16) table;
+ *p_fourcc = fourcc;
+ return TRUE;
+
+mismatch:
+ {
+ GST_WARNING_OBJECT (qtmux, "Ignoring classification tag as "
+ "input (%s) didn't match the expected entitycode://table/content",
+ input);
+ return FALSE;
+ }
+}
+
+static void
+gst_qt_mux_add_3gp_classification (GstQTMux * qtmux, const GstTagList * list,
+ AtomUDTA * udta, const char *tag, const char *tag2, guint32 fourcc)
+{
+ gchar *clsf_data = NULL;
+ gint size = 0;
+ guint32 entity = 0;
+ guint16 table = 0;
+ gchar *content = NULL;
+ guint8 *data;
+
+ g_return_if_fail (strcmp (tag, GST_TAG_3GP_CLASSIFICATION) == 0);
+
+ if (!gst_tag_list_get_string (list, tag, &clsf_data) || !clsf_data)
+ return;
+
+ GST_DEBUG_OBJECT (qtmux, "Adding tag %" GST_FOURCC_FORMAT " -> %s",
+ GST_FOURCC_ARGS (fourcc), clsf_data);
+
+ /* parse the string, format is:
+ * entityfourcc://table/content
+ */
+ gst_qt_mux_parse_classification_string (qtmux, clsf_data, &entity, &table,
+ &content);
+ g_free (clsf_data);
+ /* +1 for the \0 */
+ size = strlen (content) + 1;
+
+ /* now we have everything, build the atom
+ * atom description is at 3GPP TS 26.244 V8.2.0 (2009-09) */
+ data = g_malloc (4 + 2 + 2 + size);
+ GST_WRITE_UINT32_LE (data, entity);
+ GST_WRITE_UINT16_BE (data + 4, (guint16) table);
+ GST_WRITE_UINT16_BE (data + 6, 0);
+ memcpy (data + 8, content, size);
+ g_free (content);
+
+ atom_udta_add_3gp_tag (udta, fourcc, data, 4 + 2 + 2 + size);
+ g_free (data);
+}
+
+typedef void (*GstQTMuxAddUdtaTagFunc) (GstQTMux * mux,
+ const GstTagList * list, AtomUDTA * udta, const char *tag,
+ const char *tag2, guint32 fourcc);
+
+/*
+ * Struct to record mappings from gstreamer tags to fourcc codes
+ */
+typedef struct _GstTagToFourcc
+{
+ guint32 fourcc;
+ const gchar *gsttag;
+ const gchar *gsttag2;
+ const GstQTMuxAddUdtaTagFunc func;
+} GstTagToFourcc;
+
+/* tag list tags to fourcc matching */
+static const GstTagToFourcc tag_matches_mp4[] = {
+ {FOURCC__alb, GST_TAG_ALBUM, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_soal, GST_TAG_ALBUM_SORTNAME, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__ART, GST_TAG_ARTIST, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_soar, GST_TAG_ARTIST_SORTNAME, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_aART, GST_TAG_ALBUM_ARTIST, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_soaa, GST_TAG_ALBUM_ARTIST_SORTNAME, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__swr, GST_TAG_APPLICATION_NAME, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__cmt, GST_TAG_COMMENT, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__wrt, GST_TAG_COMPOSER, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_soco, GST_TAG_COMPOSER_SORTNAME, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_tvsh, GST_TAG_SHOW_NAME, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_sosn, GST_TAG_SHOW_SORTNAME, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_tvsn, GST_TAG_SHOW_SEASON_NUMBER, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_tves, GST_TAG_SHOW_EPISODE_NUMBER, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__gen, GST_TAG_GENRE, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__nam, GST_TAG_TITLE, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_sonm, GST_TAG_TITLE_SORTNAME, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_perf, GST_TAG_PERFORMER, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__grp, GST_TAG_GROUPING, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__des, GST_TAG_DESCRIPTION, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__lyr, GST_TAG_LYRICS, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__too, GST_TAG_ENCODER, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_cprt, GST_TAG_COPYRIGHT, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_keyw, GST_TAG_KEYWORDS, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC__day, GST_TAG_DATE, NULL, gst_qt_mux_add_mp4_date},
+ {FOURCC_tmpo, GST_TAG_BEATS_PER_MINUTE, NULL, gst_qt_mux_add_mp4_tag},
+ {FOURCC_trkn, GST_TAG_TRACK_NUMBER, GST_TAG_TRACK_COUNT,
+ gst_qt_mux_add_mp4_tag},
+ {FOURCC_disk, GST_TAG_ALBUM_VOLUME_NUMBER, GST_TAG_ALBUM_VOLUME_COUNT,
+ gst_qt_mux_add_mp4_tag},
+ {FOURCC_covr, GST_TAG_PREVIEW_IMAGE, NULL, gst_qt_mux_add_mp4_cover},
+ {FOURCC_covr, GST_TAG_IMAGE, NULL, gst_qt_mux_add_mp4_cover},
+ {0, NULL,}
+};
+
+static const GstTagToFourcc tag_matches_3gp[] = {
+ {FOURCC_titl, GST_TAG_TITLE, NULL, gst_qt_mux_add_3gp_str},
+ {FOURCC_dscp, GST_TAG_DESCRIPTION, NULL, gst_qt_mux_add_3gp_str},
+ {FOURCC_cprt, GST_TAG_COPYRIGHT, NULL, gst_qt_mux_add_3gp_str},
+ {FOURCC_perf, GST_TAG_ARTIST, NULL, gst_qt_mux_add_3gp_str},
+ {FOURCC_auth, GST_TAG_COMPOSER, NULL, gst_qt_mux_add_3gp_str},
+ {FOURCC_gnre, GST_TAG_GENRE, NULL, gst_qt_mux_add_3gp_str},
+ {FOURCC_kywd, GST_TAG_KEYWORDS, NULL, gst_qt_mux_add_3gp_keywords},
+ {FOURCC_yrrc, GST_TAG_DATE, NULL, gst_qt_mux_add_3gp_date},
+ {FOURCC_albm, GST_TAG_ALBUM, GST_TAG_TRACK_NUMBER, gst_qt_mux_add_3gp_str},
+ {FOURCC_loci, GST_TAG_GEO_LOCATION_NAME, NULL, gst_qt_mux_add_3gp_location},
+ {FOURCC_clsf, GST_TAG_3GP_CLASSIFICATION, NULL,
+ gst_qt_mux_add_3gp_classification},
+ {0, NULL,}
+};
+
+/* qtdemux produces these for atoms it cannot parse */
+#define GST_QT_DEMUX_PRIVATE_TAG "private-qt-tag"
+
+static void
+gst_qt_mux_add_xmp_tags (GstQTMux * qtmux, const GstTagList * list)
+{
+ GstQTMuxClass *qtmux_klass = (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+ GstBuffer *xmp = NULL;
+
+ /* adobe specs only have 'quicktime' and 'mp4',
+ * but I guess we can extrapolate to gpp.
+ * Keep mj2 out for now as we don't add any tags for it yet.
+ * If you have further info about xmp on these formats, please share */
+ if (qtmux_klass->format == GST_QT_MUX_FORMAT_MJ2)
+ return;
+
+ GST_DEBUG_OBJECT (qtmux, "Adding xmp tags");
+
+ if (qtmux_klass->format == GST_QT_MUX_FORMAT_QT) {
+ xmp = gst_tag_xmp_writer_tag_list_to_xmp_buffer (GST_TAG_XMP_WRITER (qtmux),
+ list, TRUE);
+ if (xmp)
+ atom_udta_add_xmp_tags (&qtmux->moov->udta, xmp);
+ } else {
+ AtomInfo *ainfo;
+ /* for isom/mp4, it is a top level uuid atom */
+ xmp = gst_tag_xmp_writer_tag_list_to_xmp_buffer (GST_TAG_XMP_WRITER (qtmux),
+ list, TRUE);
+ if (xmp) {
+ ainfo = build_uuid_xmp_atom (xmp);
+ if (ainfo) {
+ qtmux->extra_atoms = g_slist_prepend (qtmux->extra_atoms, ainfo);
+ }
+ }
+ }
+ if (xmp)
+ gst_buffer_unref (xmp);
+}
+
+static void
+gst_qt_mux_add_metadata_tags (GstQTMux * qtmux, const GstTagList * list,
+ AtomUDTA * udta)
+{
+ GstQTMuxClass *qtmux_klass = (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+ guint32 fourcc;
+ gint i;
+ const gchar *tag, *tag2;
+ const GstTagToFourcc *tag_matches;
+
+ switch (qtmux_klass->format) {
+ case GST_QT_MUX_FORMAT_3GP:
+ tag_matches = tag_matches_3gp;
+ break;
+ case GST_QT_MUX_FORMAT_MJ2:
+ tag_matches = NULL;
+ break;
+ default:
+ /* sort of iTunes style for mp4 and QT (?) */
+ tag_matches = tag_matches_mp4;
+ break;
+ }
+
+ if (!tag_matches)
+ return;
+
+ /* Clear existing tags so we don't add them over and over */
+ atom_udta_clear_tags (udta);
+
+ for (i = 0; tag_matches[i].fourcc; i++) {
+ fourcc = tag_matches[i].fourcc;
+ tag = tag_matches[i].gsttag;
+ tag2 = tag_matches[i].gsttag2;
+
+ g_assert (tag_matches[i].func);
+ tag_matches[i].func (qtmux, list, udta, tag, tag2, fourcc);
+ }
+
+ /* add unparsed blobs if present */
+ if (gst_tag_exists (GST_QT_DEMUX_PRIVATE_TAG)) {
+ guint num_tags;
+
+ num_tags = gst_tag_list_get_tag_size (list, GST_QT_DEMUX_PRIVATE_TAG);
+ for (i = 0; i < num_tags; ++i) {
+ GstSample *sample = NULL;
+ GstBuffer *buf;
+ const GstStructure *s;
+
+ if (!gst_tag_list_get_sample_index (list, GST_QT_DEMUX_PRIVATE_TAG, i,
+ &sample))
+ continue;
+ buf = gst_sample_get_buffer (sample);
+
+ if (buf && (s = gst_sample_get_info (sample))) {
+ const gchar *style = NULL;
+ GstMapInfo map;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ GST_DEBUG_OBJECT (qtmux,
+ "Found private tag %d/%d; size %" G_GSIZE_FORMAT ", info %"
+ GST_PTR_FORMAT, i, num_tags, map.size, s);
+ if (s && (style = gst_structure_get_string (s, "style"))) {
+ /* try to prevent some style tag ending up into another variant
+ * (todo: make into a list if more cases) */
+ if ((strcmp (style, "itunes") == 0 &&
+ qtmux_klass->format == GST_QT_MUX_FORMAT_MP4) ||
+ (strcmp (style, "iso") == 0 &&
+ qtmux_klass->format == GST_QT_MUX_FORMAT_3GP)) {
+ GST_DEBUG_OBJECT (qtmux, "Adding private tag");
+ atom_udta_add_blob_tag (udta, map.data, map.size);
+ }
+ }
+ gst_buffer_unmap (buf, &map);
+ }
+ gst_sample_unref (sample);
+ }
+ }
+
+ return;
+}
+
+/*
+ * Gets the tagsetter iface taglist and puts the known tags
+ * into the output stream
+ */
+static void
+gst_qt_mux_setup_metadata (GstQTMux * qtmux)
+{
+ const GstTagList *tags = NULL;
+ GList *l;
+
+ GST_OBJECT_LOCK (qtmux);
+ if (qtmux->tags_changed) {
+ tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (qtmux));
+ qtmux->tags_changed = FALSE;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ GST_LOG_OBJECT (qtmux, "tags: %" GST_PTR_FORMAT, tags);
+
+ if (tags && !gst_tag_list_is_empty (tags)) {
+ GstTagList *copy = gst_tag_list_copy (tags);
+
+ GST_DEBUG_OBJECT (qtmux, "Removing bogus tags");
+ gst_tag_list_remove_tag (copy, GST_TAG_VIDEO_CODEC);
+ gst_tag_list_remove_tag (copy, GST_TAG_AUDIO_CODEC);
+ gst_tag_list_remove_tag (copy, GST_TAG_CONTAINER_FORMAT);
+
+ GST_DEBUG_OBJECT (qtmux, "Formatting tags");
+ gst_qt_mux_add_metadata_tags (qtmux, copy, &qtmux->moov->udta);
+ gst_qt_mux_add_xmp_tags (qtmux, copy);
+ gst_tag_list_unref (copy);
+ } else {
+ GST_DEBUG_OBJECT (qtmux, "No new tags received");
+ }
+
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qpad = GST_QT_MUX_PAD (l->data);
+
+ if (qpad->tags_changed && qpad->tags) {
+ GST_DEBUG_OBJECT (qpad, "Adding tags");
+ gst_tag_list_remove_tag (qpad->tags, GST_TAG_CONTAINER_FORMAT);
+ gst_qt_mux_add_metadata_tags (qtmux, qpad->tags, &qpad->trak->udta);
+ qpad->tags_changed = FALSE;
+ GST_DEBUG_OBJECT (qpad, "Tags added");
+ } else {
+ GST_DEBUG_OBJECT (qpad, "No new tags received");
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+}
+
+static inline GstBuffer *
+_gst_buffer_new_take_data (guint8 * data, guint size)
+{
+ GstBuffer *buf;
+
+ buf = gst_buffer_new ();
+ gst_buffer_append_memory (buf,
+ gst_memory_new_wrapped (0, data, size, 0, size, data, g_free));
+
+ return buf;
+}
+
+static GstFlowReturn
+gst_qt_mux_send_buffer (GstQTMux * qtmux, GstBuffer * buf, guint64 * offset,
+ gboolean mind_fast)
+{
+ GstFlowReturn res = GST_FLOW_OK;
+ gsize size;
+
+ g_return_val_if_fail (buf != NULL, GST_FLOW_ERROR);
+
+ size = gst_buffer_get_size (buf);
+ GST_LOG_OBJECT (qtmux, "sending buffer size %" G_GSIZE_FORMAT, size);
+
+ if (mind_fast && qtmux->fast_start_file) {
+ GstMapInfo map;
+ gint ret;
+
+ GST_LOG_OBJECT (qtmux, "to temporary file");
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ ret = fwrite (map.data, sizeof (guint8), map.size, qtmux->fast_start_file);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ if (ret != size)
+ goto write_error;
+ else
+ res = GST_FLOW_OK;
+ } else {
+ if (!mind_fast) {
+ res = gst_qtmux_push_mdat_stored_buffers (qtmux);
+ }
+
+ if (res == GST_FLOW_OK) {
+ GST_LOG_OBJECT (qtmux, "downstream");
+ res = gst_aggregator_finish_buffer (GST_AGGREGATOR (qtmux), buf);
+ }
+ }
+
+ if (res != GST_FLOW_OK)
+ GST_WARNING_OBJECT (qtmux,
+ "Failed to send buffer (%p) size %" G_GSIZE_FORMAT, buf, size);
+
+ if (G_LIKELY (offset))
+ *offset += size;
+
+ return res;
+
+ /* ERRORS */
+write_error:
+ {
+ GST_ELEMENT_ERROR (qtmux, RESOURCE, WRITE,
+ ("Failed to write to temporary file"), GST_ERROR_SYSTEM);
+ return GST_FLOW_ERROR;
+ }
+}
+
+static gboolean
+gst_qt_mux_seek_to_beginning (FILE * f)
+{
+#ifdef HAVE_FSEEKO
+ if (fseeko (f, (off_t) 0, SEEK_SET) != 0)
+ return FALSE;
+#elif defined (G_OS_UNIX) || defined (G_OS_WIN32)
+ if (lseek (fileno (f), (off_t) 0, SEEK_SET) == (off_t) - 1)
+ return FALSE;
+#else
+ if (fseek (f, (long) 0, SEEK_SET) != 0)
+ return FALSE;
+#endif
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_qt_mux_send_buffered_data (GstQTMux * qtmux, guint64 * offset)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBuffer *buf = NULL;
+
+ if (fflush (qtmux->fast_start_file))
+ goto flush_failed;
+
+ if (!gst_qt_mux_seek_to_beginning (qtmux->fast_start_file))
+ goto seek_failed;
+
+ /* hm, this could all take a really really long time,
+ * but there may not be another way to get moov atom first
+ * (somehow optimize copy?) */
+ GST_DEBUG_OBJECT (qtmux, "Sending buffered data");
+ while (ret == GST_FLOW_OK) {
+ const int bufsize = 4096;
+ GstMapInfo map;
+ gsize size;
+
+ buf = gst_buffer_new_and_alloc (bufsize);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ size = fread (map.data, sizeof (guint8), bufsize, qtmux->fast_start_file);
+ if (size == 0) {
+ gst_buffer_unmap (buf, &map);
+ break;
+ }
+ GST_LOG_OBJECT (qtmux, "Pushing buffered buffer of size %d", (gint) size);
+ gst_buffer_unmap (buf, &map);
+ if (size != bufsize)
+ gst_buffer_set_size (buf, size);
+ ret = gst_qt_mux_send_buffer (qtmux, buf, offset, FALSE);
+ buf = NULL;
+ }
+ if (buf)
+ gst_buffer_unref (buf);
+
+ if (ftruncate (fileno (qtmux->fast_start_file), 0))
+ goto seek_failed;
+ if (!gst_qt_mux_seek_to_beginning (qtmux->fast_start_file))
+ goto seek_failed;
+
+ return ret;
+
+ /* ERRORS */
+flush_failed:
+ {
+ GST_ELEMENT_ERROR (qtmux, RESOURCE, WRITE,
+ ("Failed to flush temporary file"), GST_ERROR_SYSTEM);
+ ret = GST_FLOW_ERROR;
+ goto fail;
+ }
+seek_failed:
+ {
+ GST_ELEMENT_ERROR (qtmux, RESOURCE, SEEK,
+ ("Failed to seek temporary file"), GST_ERROR_SYSTEM);
+ ret = GST_FLOW_ERROR;
+ goto fail;
+ }
+fail:
+ {
+ /* clear descriptor so we don't remove temp file later on,
+ * might be possible to recover */
+ fclose (qtmux->fast_start_file);
+ qtmux->fast_start_file = NULL;
+ return ret;
+ }
+}
+
+/*
+ * Sends the initial mdat atom fields (size fields and fourcc type),
+ * the subsequent buffers are considered part of it's data.
+ * As we can't predict the amount of data that we are going to place in mdat
+ * we need to record the position of the size field in the stream so we can
+ * seek back to it later and update when the streams have finished.
+ */
+static GstFlowReturn
+gst_qt_mux_send_mdat_header (GstQTMux * qtmux, guint64 * off, guint64 size,
+ gboolean extended, gboolean fsync_after)
+{
+ GstBuffer *buf;
+ GstMapInfo map;
+ gboolean mind_fast = FALSE;
+
+ GST_DEBUG_OBJECT (qtmux, "Sending mdat's atom header, "
+ "size %" G_GUINT64_FORMAT, size);
+
+ /* if the qtmux state is EOS, really write the mdat, otherwise
+ * allow size == 0 for a placeholder atom */
+ if (qtmux->state == GST_QT_MUX_STATE_EOS || size > 0)
+ size += 8;
+
+ if (extended) {
+ gboolean large_file = (size > MDAT_LARGE_FILE_LIMIT);
+ /* Always write 16-bytes, but put a free atom first
+ * if the size is < 4GB. */
+ buf = gst_buffer_new_and_alloc (16);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+
+ if (large_file) {
+ /* Write extended mdat header and large_size field */
+ GST_WRITE_UINT32_BE (map.data, 1);
+ GST_WRITE_UINT32_LE (map.data + 4, FOURCC_mdat);
+ GST_WRITE_UINT64_BE (map.data + 8, size + 8);
+ } else {
+ /* Write an empty free atom, then standard 32-bit mdat */
+ GST_WRITE_UINT32_BE (map.data, 8);
+ GST_WRITE_UINT32_LE (map.data + 4, FOURCC_free);
+ GST_WRITE_UINT32_BE (map.data + 8, size);
+ GST_WRITE_UINT32_LE (map.data + 12, FOURCC_mdat);
+ }
+ gst_buffer_unmap (buf, &map);
+ } else {
+ buf = gst_buffer_new_and_alloc (8);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+
+ /* Vanilla 32-bit mdat */
+ GST_WRITE_UINT32_BE (map.data, size);
+ GST_WRITE_UINT32_LE (map.data + 4, FOURCC_mdat);
+ gst_buffer_unmap (buf, &map);
+ }
+
+ GST_LOG_OBJECT (qtmux, "Pushing mdat header");
+ if (fsync_after)
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_SYNC_AFTER);
+
+ mind_fast = qtmux->mux_mode == GST_QT_MUX_MODE_MOOV_AT_END
+ && !qtmux->downstream_seekable;
+
+ return gst_qt_mux_send_buffer (qtmux, buf, off, mind_fast);
+}
+
+static void
+gst_qt_mux_seek_to (GstQTMux * qtmux, guint64 position)
+{
+ GstSegment segment;
+
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ segment.start = position;
+ GST_LOG_OBJECT (qtmux, "seeking to byte position %" G_GUINT64_FORMAT,
+ position);
+ gst_aggregator_update_segment (GST_AGGREGATOR (qtmux), &segment);
+}
+
+/*
+ * We get the position of the mdat size field, seek back to it
+ * and overwrite with the real value
+ */
+static GstFlowReturn
+gst_qt_mux_update_mdat_size (GstQTMux * qtmux, guint64 mdat_pos,
+ guint64 mdat_size, guint64 * offset, gboolean fsync_after)
+{
+
+ /* We must have recorded the mdat position for this to work */
+ g_assert (mdat_pos != 0);
+
+ /* seek and rewrite the header */
+ gst_qt_mux_seek_to (qtmux, mdat_pos);
+
+ return gst_qt_mux_send_mdat_header (qtmux, offset, mdat_size, TRUE,
+ fsync_after);
+}
+
+static GstFlowReturn
+gst_qt_mux_send_ftyp (GstQTMux * qtmux, guint64 * off)
+{
+ GstBuffer *buf;
+ guint64 size = 0, offset = 0;
+ guint8 *data = NULL;
+
+ GST_DEBUG_OBJECT (qtmux, "Sending ftyp atom");
+
+ if (!atom_ftyp_copy_data (qtmux->ftyp, &data, &size, &offset))
+ goto serialize_error;
+
+ buf = _gst_buffer_new_take_data (data, offset);
+
+ GST_LOG_OBJECT (qtmux, "Pushing ftyp");
+ return gst_qt_mux_send_buffer (qtmux, buf, off, FALSE);
+
+ /* ERRORS */
+serialize_error:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Failed to serialize ftyp"));
+ return GST_FLOW_ERROR;
+ }
+}
+
+static void
+gst_qt_mux_prepare_ftyp (GstQTMux * qtmux, AtomFTYP ** p_ftyp,
+ GstBuffer ** p_prefix)
+{
+ GstQTMuxClass *qtmux_klass = (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+ guint32 major, version;
+ GList *comp;
+ GstBuffer *prefix = NULL;
+ AtomFTYP *ftyp = NULL;
+
+ GST_DEBUG_OBJECT (qtmux, "Preparing ftyp and possible prefix atom");
+
+ /* init and send context and ftyp based on current property state */
+ gst_qt_mux_map_format_to_header (qtmux_klass->format, &prefix, &major,
+ &version, &comp, qtmux->moov, qtmux->longest_chunk,
+ qtmux->fast_start_file != NULL);
+ ftyp = atom_ftyp_new (qtmux->context, major, version, comp);
+ if (comp)
+ g_list_free (comp);
+ if (prefix) {
+ if (p_prefix)
+ *p_prefix = prefix;
+ else
+ gst_buffer_unref (prefix);
+ }
+ *p_ftyp = ftyp;
+}
+
+static GstFlowReturn
+gst_qt_mux_prepare_and_send_ftyp (GstQTMux * qtmux)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBuffer *prefix = NULL;
+
+ GST_DEBUG_OBJECT (qtmux, "Preparing to send ftyp atom");
+
+ /* init and send context and ftyp based on current property state */
+ if (qtmux->ftyp) {
+ atom_ftyp_free (qtmux->ftyp);
+ qtmux->ftyp = NULL;
+ }
+ gst_qt_mux_prepare_ftyp (qtmux, &qtmux->ftyp, &prefix);
+ if (prefix) {
+ ret = gst_qt_mux_send_buffer (qtmux, prefix, &qtmux->header_size, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ }
+ return gst_qt_mux_send_ftyp (qtmux, &qtmux->header_size);
+}
+
+static void
+gst_qt_mux_set_header_on_caps (GstQTMux * mux, GstBuffer * buf)
+{
+ GstStructure *structure;
+ GValue array = { 0 };
+ GValue value = { 0 };
+ GstCaps *caps, *tcaps;
+
+ tcaps = gst_pad_get_current_caps (GST_AGGREGATOR_SRC_PAD (mux));
+ caps = gst_caps_copy (tcaps);
+ gst_caps_unref (tcaps);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ g_value_init (&array, GST_TYPE_ARRAY);
+
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
+ g_value_init (&value, GST_TYPE_BUFFER);
+ gst_value_take_buffer (&value, gst_buffer_ref (buf));
+ gst_value_array_append_value (&array, &value);
+ g_value_unset (&value);
+
+ gst_structure_set_value (structure, "streamheader", &array);
+ g_value_unset (&array);
+ gst_aggregator_set_src_caps (GST_AGGREGATOR (mux), caps);
+ gst_caps_unref (caps);
+}
+
+/*
+ * Write out a free space atom. The offset is adjusted by the full
+ * size, but a smaller buffer is sent
+ */
+static GstFlowReturn
+gst_qt_mux_send_free_atom (GstQTMux * qtmux, guint64 * off, guint32 size,
+ gboolean fsync_after)
+{
+ Atom *node_header;
+ GstBuffer *buf;
+ guint8 *data = NULL;
+ guint64 offset = 0, bsize = 0;
+ GstFlowReturn ret;
+
+ GST_DEBUG_OBJECT (qtmux, "Sending free atom header of size %u", size);
+
+ /* We can't make a free space atom smaller than the header */
+ if (size < 8)
+ goto too_small;
+
+ node_header = g_malloc0 (sizeof (Atom));
+ node_header->type = FOURCC_free;
+ node_header->size = size;
+
+ bsize = offset = 0;
+ if (atom_copy_data (node_header, &data, &bsize, &offset) == 0)
+ goto serialize_error;
+
+ buf = _gst_buffer_new_take_data (data, offset);
+ g_free (node_header);
+
+ if (fsync_after)
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_SYNC_AFTER);
+
+ GST_LOG_OBJECT (qtmux, "Pushing free atom");
+ ret = gst_qt_mux_send_buffer (qtmux, buf, off, FALSE);
+
+ if (off) {
+ *off += size - 8;
+
+ /* Make sure downstream position ends up at the end of this free box */
+ gst_qt_mux_seek_to (qtmux, *off);
+ }
+
+ return ret;
+
+ /* ERRORS */
+too_small:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Not enough free reserved space"));
+ return GST_FLOW_ERROR;
+ }
+serialize_error:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Failed to serialize mdat"));
+ g_free (node_header);
+ return GST_FLOW_ERROR;
+ }
+}
+
+static void
+gst_qt_mux_configure_moov_full (GstQTMux * qtmux, gboolean fragmented,
+ guint32 timescale)
+{
+ /* inform lower layers of our property wishes, and determine duration.
+ * Let moov take care of this using its list of traks;
+ * so that released pads are also included */
+ GST_DEBUG_OBJECT (qtmux, "Updating timescale to %" G_GUINT32_FORMAT,
+ timescale);
+ atom_moov_update_timescale (qtmux->moov, timescale);
+ atom_moov_set_fragmented (qtmux->moov, fragmented);
+
+ atom_moov_update_duration (qtmux->moov);
+}
+
+static void
+gst_qt_mux_configure_moov (GstQTMux * qtmux)
+{
+ gboolean fragmented = FALSE;
+ guint32 timescale;
+
+ GST_OBJECT_LOCK (qtmux);
+ timescale = qtmux->timescale;
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_FRAGMENTED
+ && qtmux->fragment_mode != GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE)
+ fragmented = TRUE;
+ GST_OBJECT_UNLOCK (qtmux);
+
+ gst_qt_mux_configure_moov_full (qtmux, fragmented, timescale);
+}
+
+static GstFlowReturn
+gst_qt_mux_send_moov (GstQTMux * qtmux, guint64 * _offset,
+ guint64 padded_moov_size, gboolean mind_fast, gboolean fsync_after)
+{
+ guint64 offset = 0, size = 0;
+ guint8 *data;
+ GstBuffer *buf;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GList *l;
+ guint64 current_time = atoms_get_current_qt_time ();
+
+ /* update modification times */
+ qtmux->moov->mvhd.time_info.modification_time = current_time;
+
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+
+ qtpad->trak->mdia.mdhd.time_info.modification_time = current_time;
+ qtpad->trak->tkhd.modification_time = current_time;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ /* serialize moov */
+ offset = size = 0;
+ data = NULL;
+ GST_LOG_OBJECT (qtmux, "Copying movie header into buffer");
+ if (!atom_moov_copy_data (qtmux->moov, &data, &size, &offset))
+ goto serialize_error;
+ qtmux->last_moov_size = offset;
+
+ /* Check we have enough reserved space for this and a Free atom */
+ if (padded_moov_size > 0 && offset + 8 > padded_moov_size)
+ goto too_small_reserved;
+ buf = _gst_buffer_new_take_data (data, offset);
+ GST_DEBUG_OBJECT (qtmux, "Pushing moov atoms");
+
+ /* If at EOS, this is the final moov, put in the streamheader
+ * (apparently used by a flumotion util) */
+ if (qtmux->state == GST_QT_MUX_STATE_EOS)
+ gst_qt_mux_set_header_on_caps (qtmux, buf);
+
+ if (fsync_after)
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_SYNC_AFTER);
+ ret = gst_qt_mux_send_buffer (qtmux, buf, _offset, mind_fast);
+
+ /* Write out a free atom if needed */
+ if (ret == GST_FLOW_OK && offset < padded_moov_size) {
+ GST_LOG_OBJECT (qtmux, "Writing out free atom of size %u",
+ (guint32) (padded_moov_size - offset));
+ ret =
+ gst_qt_mux_send_free_atom (qtmux, _offset, padded_moov_size - offset,
+ fsync_after);
+ }
+
+ return ret;
+too_small_reserved:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX,
+ ("Not enough free reserved header space"),
+ ("Needed %" G_GUINT64_FORMAT " bytes, reserved %" G_GUINT64_FORMAT,
+ offset + 8, padded_moov_size));
+ return GST_FLOW_ERROR;
+ }
+serialize_error:
+ {
+ g_free (data);
+ return GST_FLOW_ERROR;
+ }
+}
+
+/* either calculates size of extra atoms or pushes them */
+static GstFlowReturn
+gst_qt_mux_send_extra_atoms (GstQTMux * qtmux, gboolean send, guint64 * offset,
+ gboolean mind_fast)
+{
+ GSList *walk;
+ guint64 loffset = 0, size = 0;
+ guint8 *data;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ for (walk = qtmux->extra_atoms; walk; walk = g_slist_next (walk)) {
+ AtomInfo *ainfo = (AtomInfo *) walk->data;
+
+ loffset = size = 0;
+ data = NULL;
+ if (!ainfo->copy_data_func (ainfo->atom,
+ send ? &data : NULL, &size, &loffset))
+ goto serialize_error;
+
+ if (send) {
+ GstBuffer *buf;
+
+ GST_DEBUG_OBJECT (qtmux,
+ "Pushing extra top-level atom %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (ainfo->atom->type));
+ buf = _gst_buffer_new_take_data (data, loffset);
+ ret = gst_qt_mux_send_buffer (qtmux, buf, offset, FALSE);
+ if (ret != GST_FLOW_OK)
+ break;
+ } else {
+ if (offset)
+ *offset += loffset;
+ }
+ }
+
+ return ret;
+
+serialize_error:
+ {
+ g_free (data);
+ return GST_FLOW_ERROR;
+ }
+}
+
+static gboolean
+gst_qt_mux_downstream_is_seekable (GstQTMux * qtmux)
+{
+ gboolean seekable = FALSE;
+ GstQuery *query = gst_query_new_seeking (GST_FORMAT_BYTES);
+
+ if (gst_pad_peer_query (GST_AGGREGATOR_SRC_PAD (qtmux), query)) {
+ gst_query_parse_seeking (query, NULL, &seekable, NULL, NULL);
+ GST_INFO_OBJECT (qtmux, "downstream is %sseekable", seekable ? "" : "not ");
+ } else {
+ /* have to assume seeking is not supported if query not handled downstream */
+ GST_WARNING_OBJECT (qtmux, "downstream did not handle seeking query");
+ seekable = FALSE;
+ }
+ gst_query_unref (query);
+
+ return seekable;
+}
+
+static void
+gst_qt_mux_prepare_moov_recovery (GstQTMux * qtmux)
+{
+ GList *l;
+ gboolean fail = FALSE;
+ AtomFTYP *ftyp = NULL;
+ GstBuffer *prefix = NULL;
+
+ GST_DEBUG_OBJECT (qtmux, "Opening moov recovery file: %s",
+ qtmux->moov_recov_file_path);
+
+ qtmux->moov_recov_file = g_fopen (qtmux->moov_recov_file_path, "wb+");
+ if (qtmux->moov_recov_file == NULL) {
+ GST_WARNING_OBJECT (qtmux, "Failed to open moov recovery file in %s",
+ qtmux->moov_recov_file_path);
+ return;
+ }
+
+ gst_qt_mux_prepare_ftyp (qtmux, &ftyp, &prefix);
+
+ GST_OBJECT_LOCK (qtmux);
+ if (!atoms_recov_write_headers (qtmux->moov_recov_file, ftyp, prefix,
+ qtmux->moov, qtmux->timescale,
+ g_list_length (GST_ELEMENT (qtmux)->sinkpads))) {
+ GST_WARNING_OBJECT (qtmux, "Failed to write moov recovery file " "headers");
+ GST_OBJECT_UNLOCK (qtmux);
+ goto fail;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ atom_ftyp_free (ftyp);
+ if (prefix)
+ gst_buffer_unref (prefix);
+
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qpad = (GstQTMuxPad *) l->data;
+ /* write info for each stream */
+ fail = atoms_recov_write_trak_info (qtmux->moov_recov_file, qpad->trak);
+ if (fail) {
+ GST_WARNING_OBJECT (qtmux, "Failed to write trak info to recovery "
+ "file");
+ break;
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ return;
+
+fail:
+ /* cleanup */
+ fclose (qtmux->moov_recov_file);
+ qtmux->moov_recov_file = NULL;
+}
+
+static guint64
+prefill_get_block_index (GstQTMux * qtmux, GstQTMuxPad * qpad)
+{
+ switch (qpad->fourcc) {
+ case FOURCC_apch:
+ case FOURCC_apcn:
+ case FOURCC_apcs:
+ case FOURCC_apco:
+ case FOURCC_ap4h:
+ case FOURCC_ap4x:
+ case FOURCC_c608:
+ case FOURCC_c708:
+ return qpad->sample_offset;
+ case FOURCC_sowt:
+ case FOURCC_twos:
+ return gst_util_uint64_scale_ceil (qpad->sample_offset,
+ qpad->expected_sample_duration_n,
+ qpad->expected_sample_duration_d *
+ atom_trak_get_timescale (qpad->trak));
+ default:
+ return -1;
+ }
+}
+
+static guint
+prefill_get_sample_size (GstQTMux * qtmux, GstQTMuxPad * qpad)
+{
+ switch (qpad->fourcc) {
+ case FOURCC_apch:
+ if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 480) {
+ return 300000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 576) {
+ return 350000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 720) {
+ return 525000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 1080) {
+ return 1050000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 2160) {
+ return 4150000;
+ } else {
+ return 16600000;
+ }
+ break;
+ case FOURCC_apcn:
+ if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 480) {
+ return 200000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 576) {
+ return 250000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 720) {
+ return 350000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 1080) {
+ return 700000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 2160) {
+ return 2800000;
+ } else {
+ return 11200000;
+ }
+ break;
+ case FOURCC_apcs:
+ if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 480) {
+ return 150000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 576) {
+ return 200000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 720) {
+ return 250000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 1080) {
+ return 500000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 2160) {
+ return 2800000;
+ } else {
+ return 11200000;
+ }
+ break;
+ case FOURCC_apco:
+ if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 480) {
+ return 80000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 576) {
+ return 100000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 720) {
+ return 150000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 1080) {
+ return 250000;
+ } else if (((SampleTableEntryMP4V *) qpad->trak_ste)->height <= 2160) {
+ return 900000;
+ } else {
+ return 3600000;
+ }
+ break;
+ case FOURCC_c608:
+ /* We always write both cdat and cdt2 atom in prefill mode */
+ return 20;
+ case FOURCC_c708:{
+ if (qpad->first_cc_sample_size == 0) {
+ GstBuffer *buf =
+ gst_aggregator_pad_peek_buffer (GST_AGGREGATOR_PAD (qpad));
+ g_assert (buf != NULL);
+ qpad->first_cc_sample_size = gst_buffer_get_size (buf);
+ g_assert (qpad->first_cc_sample_size != 0);
+ gst_buffer_unref (buf);
+ }
+ return qpad->first_cc_sample_size + 8;
+ }
+ case FOURCC_sowt:
+ case FOURCC_twos:{
+ guint64 block_idx;
+ guint64 next_sample_offset;
+
+ block_idx = prefill_get_block_index (qtmux, qpad);
+ next_sample_offset =
+ gst_util_uint64_scale (block_idx + 1,
+ qpad->expected_sample_duration_d *
+ atom_trak_get_timescale (qpad->trak),
+ qpad->expected_sample_duration_n);
+
+ return (next_sample_offset - qpad->sample_offset) * qpad->sample_size;
+ }
+ case FOURCC_ap4h:
+ case FOURCC_ap4x:
+ default:
+ GST_ERROR_OBJECT (qtmux, "unsupported codec for pre-filling");
+ return -1;
+ }
+
+ return -1;
+}
+
+static GstClockTime
+prefill_get_next_timestamp (GstQTMux * qtmux, GstQTMuxPad * qpad)
+{
+ switch (qpad->fourcc) {
+ case FOURCC_apch:
+ case FOURCC_apcn:
+ case FOURCC_apcs:
+ case FOURCC_apco:
+ case FOURCC_ap4h:
+ case FOURCC_ap4x:
+ case FOURCC_c608:
+ case FOURCC_c708:
+ return gst_util_uint64_scale (qpad->sample_offset + 1,
+ qpad->expected_sample_duration_d * GST_SECOND,
+ qpad->expected_sample_duration_n);
+ case FOURCC_sowt:
+ case FOURCC_twos:{
+ guint64 block_idx;
+ guint64 next_sample_offset;
+
+ block_idx = prefill_get_block_index (qtmux, qpad);
+ next_sample_offset =
+ gst_util_uint64_scale (block_idx + 1,
+ qpad->expected_sample_duration_d *
+ atom_trak_get_timescale (qpad->trak),
+ qpad->expected_sample_duration_n);
+
+ return gst_util_uint64_scale (next_sample_offset, GST_SECOND,
+ atom_trak_get_timescale (qpad->trak));
+ }
+ default:
+ GST_ERROR_OBJECT (qtmux, "unsupported codec for pre-filling");
+ return -1;
+ }
+
+ return -1;
+}
+
+static GstBuffer *
+prefill_raw_audio_prepare_buf_func (GstQTMuxPad * qtpad, GstBuffer * buf,
+ GstQTMux * qtmux)
+{
+ guint64 block_idx;
+ guint64 nsamples;
+ GstClockTime input_timestamp;
+ guint64 input_timestamp_distance;
+
+ if (buf)
+ gst_adapter_push (qtpad->raw_audio_adapter, buf);
+
+ block_idx = gst_util_uint64_scale_ceil (qtpad->raw_audio_adapter_offset,
+ qtpad->expected_sample_duration_n,
+ qtpad->expected_sample_duration_d *
+ atom_trak_get_timescale (qtpad->trak));
+ nsamples =
+ gst_util_uint64_scale (block_idx + 1,
+ qtpad->expected_sample_duration_d * atom_trak_get_timescale (qtpad->trak),
+ qtpad->expected_sample_duration_n) - qtpad->raw_audio_adapter_offset;
+
+ if ((!gst_aggregator_pad_is_eos (GST_AGGREGATOR_PAD (qtpad))
+ && gst_adapter_available (qtpad->raw_audio_adapter) <
+ nsamples * qtpad->sample_size)
+ || gst_adapter_available (qtpad->raw_audio_adapter) == 0) {
+ return NULL;
+ }
+
+ input_timestamp =
+ gst_adapter_prev_pts (qtpad->raw_audio_adapter,
+ &input_timestamp_distance);
+ if (input_timestamp != GST_CLOCK_TIME_NONE)
+ input_timestamp +=
+ gst_util_uint64_scale (input_timestamp_distance, GST_SECOND,
+ qtpad->sample_size * atom_trak_get_timescale (qtpad->trak));
+
+ buf =
+ gst_adapter_take_buffer (qtpad->raw_audio_adapter,
+ !gst_aggregator_pad_is_eos (GST_AGGREGATOR_PAD (qtpad)) ? nsamples *
+ qtpad->sample_size : gst_adapter_available (qtpad->raw_audio_adapter));
+ GST_BUFFER_PTS (buf) = input_timestamp;
+ GST_BUFFER_DTS (buf) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_DURATION (buf) = GST_CLOCK_TIME_NONE;
+
+ qtpad->raw_audio_adapter_offset += nsamples;
+
+ /* Check if we have yet another block of raw audio in the adapter */
+ nsamples =
+ gst_util_uint64_scale (block_idx + 2,
+ qtpad->expected_sample_duration_d * atom_trak_get_timescale (qtpad->trak),
+ qtpad->expected_sample_duration_n) - qtpad->raw_audio_adapter_offset;
+ if (gst_adapter_available (qtpad->raw_audio_adapter) >=
+ nsamples * qtpad->sample_size) {
+ input_timestamp =
+ gst_adapter_prev_pts (qtpad->raw_audio_adapter,
+ &input_timestamp_distance);
+ if (input_timestamp != GST_CLOCK_TIME_NONE)
+ input_timestamp +=
+ gst_util_uint64_scale (input_timestamp_distance, GST_SECOND,
+ qtpad->sample_size * atom_trak_get_timescale (qtpad->trak));
+ qtpad->raw_audio_adapter_pts = input_timestamp;
+ } else {
+ qtpad->raw_audio_adapter_pts = GST_CLOCK_TIME_NONE;
+ }
+
+ return buf;
+}
+
+/* Must be called with object lock */
+static void
+find_video_sample_duration (GstQTMux * qtmux, guint * dur_n, guint * dur_d)
+{
+ GList *l;
+
+ /* Find the (first) video track and assume that we have to output
+ * in that size */
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *tmp_qpad = (GstQTMuxPad *) l->data;
+
+ if (tmp_qpad->trak->is_video) {
+ *dur_n = tmp_qpad->expected_sample_duration_n;
+ *dur_d = tmp_qpad->expected_sample_duration_d;
+ break;
+ }
+ }
+
+ if (l == NULL) {
+ GST_INFO_OBJECT (qtmux,
+ "Found no video framerate, using 40ms audio buffers");
+ *dur_n = 25;
+ *dur_d = 1;
+ }
+}
+
+/* Called when all pads are prerolled to adjust and */
+static gboolean
+prefill_update_sample_size (GstQTMux * qtmux, GstQTMuxPad * qpad)
+{
+ switch (qpad->fourcc) {
+ case FOURCC_apch:
+ case FOURCC_apcn:
+ case FOURCC_apcs:
+ case FOURCC_apco:
+ case FOURCC_ap4h:
+ case FOURCC_ap4x:
+ {
+ guint sample_size = prefill_get_sample_size (qtmux, qpad);
+ atom_trak_set_constant_size_samples (qpad->trak, sample_size);
+ return TRUE;
+ }
+ case FOURCC_c608:
+ case FOURCC_c708:
+ {
+ guint sample_size = prefill_get_sample_size (qtmux, qpad);
+ /* We need a "valid" duration */
+ find_video_sample_duration (qtmux, &qpad->expected_sample_duration_n,
+ &qpad->expected_sample_duration_d);
+ atom_trak_set_constant_size_samples (qpad->trak, sample_size);
+ return TRUE;
+ }
+ case FOURCC_sowt:
+ case FOURCC_twos:{
+ find_video_sample_duration (qtmux, &qpad->expected_sample_duration_n,
+ &qpad->expected_sample_duration_d);
+ /* Set a prepare_buf_func that ensures this */
+ qpad->prepare_buf_func = prefill_raw_audio_prepare_buf_func;
+ qpad->raw_audio_adapter = gst_adapter_new ();
+ qpad->raw_audio_adapter_offset = 0;
+ qpad->raw_audio_adapter_pts = GST_CLOCK_TIME_NONE;
+
+ return TRUE;
+ }
+ default:
+ return TRUE;
+ }
+}
+
+/* Only called at startup when doing the "fake" iteration of all tracks in order
+ * to prefill the sample tables in the header. */
+static GstQTMuxPad *
+find_best_pad_prefill_start (GstQTMux * qtmux)
+{
+ GstQTMuxPad *best_pad = NULL;
+
+ /* If interleave limits have been specified and the current pad is within
+ * those interleave limits, pick that one, otherwise let's try to figure out
+ * the next best one. */
+
+ if (qtmux->current_pad &&
+ (qtmux->interleave_bytes != 0 || qtmux->interleave_time != 0) &&
+ (qtmux->interleave_bytes == 0
+ || qtmux->current_chunk_size <= qtmux->interleave_bytes)
+ && (qtmux->interleave_time == 0
+ || qtmux->current_chunk_duration <= qtmux->interleave_time)
+ && qtmux->mux_mode != GST_QT_MUX_MODE_FRAGMENTED) {
+
+ if (qtmux->current_pad->total_duration < qtmux->reserved_max_duration) {
+ best_pad = qtmux->current_pad;
+ }
+ } else {
+ GST_OBJECT_LOCK (qtmux);
+ if (GST_ELEMENT_CAST (qtmux)->sinkpads->next) {
+ /* Attempt to try another pad if we have one. Otherwise use the only pad
+ * present */
+ best_pad = qtmux->current_pad = NULL;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+ }
+
+ /* The next best pad is the one which has the lowest timestamp and hasn't
+ * exceeded the reserved max duration */
+ if (!best_pad) {
+ GList *l;
+ GstClockTime best_time = GST_CLOCK_TIME_NONE;
+
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+ GstClockTime timestamp;
+
+ if (qtpad->total_duration >= qtmux->reserved_max_duration)
+ continue;
+
+ timestamp = qtpad->total_duration;
+
+ if (best_pad == NULL ||
+ !GST_CLOCK_TIME_IS_VALID (best_time) || timestamp < best_time) {
+ best_pad = qtpad;
+ best_time = timestamp;
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+ }
+
+ return best_pad;
+}
+
+/* Called when starting the file in prefill_mode to figure out all the entries
+ * of the header based on the input stream and reserved maximum duration.
+ *
+ * The _actual_ header (i.e. with the proper duration and trimmed sample tables)
+ * will be updated and written on EOS. */
+static gboolean
+gst_qt_mux_prefill_samples (GstQTMux * qtmux)
+{
+ GstQTMuxPad *qpad;
+ GList *l;
+ GstQTMuxClass *qtmux_klass = (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+
+ /* Update expected sample sizes/durations as needed, this is for raw
+ * audio where samples are actual audio samples. */
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qpad = (GstQTMuxPad *) l->data;
+
+ if (!prefill_update_sample_size (qtmux, qpad)) {
+ GST_OBJECT_UNLOCK (qtmux);
+ return FALSE;
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ if (qtmux_klass->format == GST_QT_MUX_FORMAT_QT ||
+ qtmux->force_create_timecode_trak) {
+ /* For the first sample check/update timecode as needed. We do that before
+ * all actual samples as the code in gst_qt_mux_add_buffer() does it with
+ * initial buffer directly, not with last_buf */
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qpad = (GstQTMuxPad *) l->data;
+ GstBuffer *buffer =
+ gst_aggregator_pad_peek_buffer (GST_AGGREGATOR_PAD (qpad));
+ GstVideoTimeCodeMeta *tc_meta;
+
+ if (buffer && (tc_meta = gst_buffer_get_video_time_code_meta (buffer))
+ && qpad->trak->is_video) {
+ GstVideoTimeCode *tc = &tc_meta->tc;
+
+ qpad->tc_trak = atom_trak_new (qtmux->context);
+ atom_moov_add_trak (qtmux->moov, qpad->tc_trak);
+
+ qpad->trak->tref = atom_tref_new (FOURCC_tmcd);
+ atom_tref_add_entry (qpad->trak->tref, qpad->tc_trak->tkhd.track_ID);
+
+ atom_trak_set_timecode_type (qpad->tc_trak, qtmux->context,
+ qpad->trak->mdia.mdhd.time_info.timescale, tc);
+
+ atom_trak_add_samples (qpad->tc_trak, 1, 1, 4,
+ qtmux->mdat_size, FALSE, 0);
+
+ qpad->tc_pos = qtmux->mdat_size;
+ qpad->first_tc = gst_video_time_code_copy (tc);
+ qpad->first_pts = GST_BUFFER_PTS (buffer);
+
+ qtmux->current_chunk_offset = -1;
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ qtmux->mdat_size += 4;
+ }
+ if (buffer)
+ gst_buffer_unref (buffer);
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+ }
+
+ while ((qpad = find_best_pad_prefill_start (qtmux))) {
+ GstClockTime timestamp, next_timestamp, duration;
+ guint nsamples, sample_size;
+ guint64 chunk_offset;
+ gint64 scaled_duration;
+ gint64 pts_offset = 0;
+ gboolean sync = FALSE;
+ TrakBufferEntryInfo sample_entry;
+
+ sample_size = prefill_get_sample_size (qtmux, qpad);
+
+ if (sample_size == -1) {
+ return FALSE;
+ }
+
+ if (!qpad->samples)
+ qpad->samples = g_array_new (FALSE, FALSE, sizeof (TrakBufferEntryInfo));
+
+ timestamp = qpad->total_duration;
+ next_timestamp = prefill_get_next_timestamp (qtmux, qpad);
+ duration = next_timestamp - timestamp;
+
+ if (qpad->first_ts == GST_CLOCK_TIME_NONE)
+ qpad->first_ts = timestamp;
+ if (qpad->first_dts == GST_CLOCK_TIME_NONE)
+ qpad->first_dts = timestamp;
+
+ if (qtmux->current_pad != qpad || qtmux->current_chunk_offset == -1) {
+ qtmux->current_pad = qpad;
+ if (qtmux->current_chunk_offset == -1)
+ qtmux->current_chunk_offset = qtmux->mdat_size;
+ else
+ qtmux->current_chunk_offset += qtmux->current_chunk_size;
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ }
+ if (qpad->sample_size)
+ nsamples = sample_size / qpad->sample_size;
+ else
+ nsamples = 1;
+ qpad->last_dts = timestamp;
+ scaled_duration = gst_util_uint64_scale_round (timestamp + duration,
+ atom_trak_get_timescale (qpad->trak),
+ GST_SECOND) - gst_util_uint64_scale_round (timestamp,
+ atom_trak_get_timescale (qpad->trak), GST_SECOND);
+
+ qtmux->current_chunk_size += sample_size;
+ qtmux->current_chunk_duration += duration;
+ qpad->total_bytes += sample_size;
+
+ chunk_offset = qtmux->current_chunk_offset;
+
+ /* I-frame only, no frame reordering */
+ sync = FALSE;
+ pts_offset = 0;
+
+ if (qtmux->current_chunk_duration > qtmux->longest_chunk
+ || !GST_CLOCK_TIME_IS_VALID (qtmux->longest_chunk)) {
+ qtmux->longest_chunk = qtmux->current_chunk_duration;
+ }
+
+ sample_entry.track_id = qpad->trak->tkhd.track_ID;
+ sample_entry.nsamples = nsamples;
+ sample_entry.delta = scaled_duration / nsamples;
+ sample_entry.size = sample_size / nsamples;
+ sample_entry.chunk_offset = chunk_offset;
+ sample_entry.pts_offset = pts_offset;
+ sample_entry.sync = sync;
+ sample_entry.do_pts = TRUE;
+ g_array_append_val (qpad->samples, sample_entry);
+ atom_trak_add_samples (qpad->trak, nsamples, scaled_duration / nsamples,
+ sample_size / nsamples, chunk_offset, sync, pts_offset);
+
+ qpad->total_duration = next_timestamp;
+ qtmux->mdat_size += sample_size;
+ qpad->sample_offset += nsamples;
+ }
+
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_qt_mux_start_file (GstQTMux * qtmux)
+{
+ GstQTMuxClass *qtmux_klass = (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstCaps *caps;
+ GstClockTime reserved_max_duration;
+ guint reserved_bytes_per_sec_per_trak;
+ GList *l;
+
+ GST_DEBUG_OBJECT (qtmux, "starting file");
+
+ GST_OBJECT_LOCK (qtmux);
+ reserved_max_duration = qtmux->reserved_max_duration;
+ reserved_bytes_per_sec_per_trak = qtmux->reserved_bytes_per_sec_per_trak;
+ GST_OBJECT_UNLOCK (qtmux);
+
+ caps =
+ gst_caps_copy (gst_pad_get_pad_template_caps (GST_AGGREGATOR_SRC_PAD
+ (qtmux)));
+ /* qtmux has structure with and without variant, remove all but the first */
+ g_assert (gst_caps_truncate (caps));
+ gst_aggregator_set_src_caps (GST_AGGREGATOR (qtmux), caps);
+ gst_caps_unref (caps);
+
+ /* Default is 'normal' mode */
+ qtmux->mux_mode = GST_QT_MUX_MODE_MOOV_AT_END;
+
+ /* Require a sensible fragment duration when muxing
+ * using the ISML muxer */
+ if (qtmux_klass->format == GST_QT_MUX_FORMAT_ISML &&
+ qtmux->fragment_duration == 0)
+ goto invalid_isml;
+
+ if (qtmux->fragment_duration > 0) {
+ qtmux->mux_mode = GST_QT_MUX_MODE_FRAGMENTED;
+ if (qtmux->streamable
+ && qtmux->fragment_mode == GST_QT_MUX_FRAGMENT_DASH_OR_MSS) {
+ qtmux->fragment_mode = GST_QT_MUX_FRAGMENT_STREAMABLE;
+ }
+ } else if (qtmux->fast_start) {
+ qtmux->mux_mode = GST_QT_MUX_MODE_FAST_START;
+ } else if (reserved_max_duration != GST_CLOCK_TIME_NONE) {
+ if (reserved_max_duration == 0) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX,
+ ("reserved-max-duration of 0 is not allowed"), (NULL));
+ return GST_FLOW_ERROR;
+ }
+ if (qtmux->reserved_prefill)
+ qtmux->mux_mode = GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL;
+ else
+ qtmux->mux_mode = GST_QT_MUX_MODE_ROBUST_RECORDING;
+ }
+
+ qtmux->downstream_seekable = gst_qt_mux_downstream_is_seekable (qtmux);
+ switch (qtmux->mux_mode) {
+ case GST_QT_MUX_MODE_MOOV_AT_END:
+ break;
+ case GST_QT_MUX_MODE_ROBUST_RECORDING:
+ /* We have to be able to seek to rewrite the mdat header, or any
+ * moov atom we write will not be visible in the file, because an
+ * MDAT with 0 as the size covers the rest of the file. A file
+ * with no moov is not playable, so error out now. */
+ if (!qtmux->downstream_seekable) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX,
+ ("Downstream is not seekable - will not be able to create a playable file"),
+ (NULL));
+ return GST_FLOW_ERROR;
+ }
+ if (qtmux->reserved_moov_update_period == GST_CLOCK_TIME_NONE) {
+ GST_WARNING_OBJECT (qtmux,
+ "Robust muxing requires reserved-moov-update-period to be set");
+ }
+ break;
+ case GST_QT_MUX_MODE_FAST_START:
+ break; /* Don't need seekability, ignore */
+ case GST_QT_MUX_MODE_FRAGMENTED:
+ if (qtmux->fragment_mode == GST_QT_MUX_FRAGMENT_STREAMABLE)
+ break;
+ if (!qtmux->downstream_seekable) {
+ if (qtmux->fragment_mode == GST_QT_MUX_FRAGMENT_DASH_OR_MSS) {
+ GST_WARNING_OBJECT (qtmux, "downstream is not seekable, but "
+ "streamable=false. Will ignore that and create streamable output "
+ "instead");
+ qtmux->streamable = TRUE;
+ g_object_notify (G_OBJECT (qtmux), "streamable");
+ qtmux->fragment_mode = GST_QT_MUX_FRAGMENT_STREAMABLE;
+ }
+ }
+ break;
+ case GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL:
+ if (!qtmux->downstream_seekable) {
+ GST_WARNING_OBJECT (qtmux,
+ "downstream is not seekable, will not be able "
+ "to trim samples table at the end if less than reserved-duration is "
+ "recorded");
+ }
+ break;
+ }
+
+ GST_OBJECT_LOCK (qtmux);
+
+ if (qtmux->timescale == 0) {
+ guint32 suggested_timescale = 0;
+
+ /* Calculate a reasonable timescale for the moov:
+ * If there is video, it is the biggest video track timescale or an even
+ * multiple of it if it's smaller than 1800.
+ * Otherwise it is 1800 */
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qpad = (GstQTMuxPad *) l->data;
+
+ if (!qpad->trak)
+ continue;
+
+ /* not video */
+ if (!qpad->trak->mdia.minf.vmhd)
+ continue;
+
+ suggested_timescale =
+ MAX (qpad->trak->mdia.mdhd.time_info.timescale, suggested_timescale);
+ }
+
+ if (suggested_timescale == 0)
+ suggested_timescale = 1800;
+
+ while (suggested_timescale < 1800)
+ suggested_timescale *= 2;
+
+ qtmux->timescale = suggested_timescale;
+ }
+
+ /* Set width/height/timescale of any closed caption tracks to that of the
+ * first video track */
+ {
+ guint video_width = 0, video_height = 0;
+ guint32 video_timescale = 0;
+ GList *l;
+
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qpad = (GstQTMuxPad *) l->data;
+
+ if (!qpad->trak)
+ continue;
+
+ /* Not closed caption */
+ if (qpad->trak->mdia.hdlr.handler_type != FOURCC_clcp)
+ continue;
+
+ if (video_width == 0 || video_height == 0 || video_timescale == 0) {
+ GList *l2;
+
+ for (l2 = GST_ELEMENT_CAST (qtmux)->sinkpads; l2; l2 = l2->next) {
+ GstQTMuxPad *qpad2 = (GstQTMuxPad *) l2->data;
+
+ if (!qpad2->trak)
+ continue;
+
+ /* not video */
+ if (!qpad2->trak->mdia.minf.vmhd)
+ continue;
+
+ video_width = qpad2->trak->tkhd.width;
+ video_height = qpad2->trak->tkhd.height;
+ video_timescale = qpad2->trak->mdia.mdhd.time_info.timescale;
+ }
+ }
+
+ qpad->trak->tkhd.width = video_width << 16;
+ qpad->trak->tkhd.height = video_height << 16;
+ qpad->trak->mdia.mdhd.time_info.timescale = video_timescale;
+ }
+ }
+
+ /* initialize our moov recovery file */
+ if (qtmux->moov_recov_file_path) {
+ gst_qt_mux_prepare_moov_recovery (qtmux);
+ }
+
+ /* Make sure the first time we update the moov, we'll
+ * include any tagsetter tags */
+ qtmux->tags_changed = TRUE;
+
+ GST_OBJECT_UNLOCK (qtmux);
+
+ /*
+ * send mdat header if already needed, and mark position for later update.
+ * We don't send ftyp now if we are on fast start mode, because we can
+ * better fine tune using the information we gather to create the whole moov
+ * atom.
+ */
+ switch (qtmux->mux_mode) {
+ case GST_QT_MUX_MODE_MOOV_AT_END:
+ ret = gst_qt_mux_prepare_and_send_ftyp (qtmux);
+ if (ret != GST_FLOW_OK)
+ break;
+
+ /* Store this as the mdat offset for later updating
+ * when we write the moov */
+ qtmux->mdat_pos = qtmux->header_size;
+ /* extended atom in case we go over 4GB while writing and need
+ * the full 64-bit atom */
+ if (qtmux->downstream_seekable)
+ ret =
+ gst_qt_mux_send_mdat_header (qtmux, &qtmux->header_size, 0, TRUE,
+ FALSE);
+ break;
+ case GST_QT_MUX_MODE_ROBUST_RECORDING:
+ ret = gst_qt_mux_prepare_and_send_ftyp (qtmux);
+ if (ret != GST_FLOW_OK)
+ break;
+
+ /* Pad ftyp out to an 8-byte boundary before starting the moov
+ * ping pong region. It should be well less than 1 disk sector,
+ * unless there's a bajillion compatible types listed,
+ * but let's be sure the free atom doesn't cross a sector
+ * boundary anyway */
+ if (qtmux->header_size % 8) {
+ /* Extra 8 bytes for the padding free atom header */
+ guint padding = (guint) (16 - (qtmux->header_size % 8));
+ GST_LOG_OBJECT (qtmux, "Rounding ftyp by %u bytes", padding);
+ ret =
+ gst_qt_mux_send_free_atom (qtmux, &qtmux->header_size, padding,
+ FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ }
+
+ /* Store this as the moov offset for later updating.
+ * We record mdat position below */
+ qtmux->moov_pos = qtmux->header_size;
+
+ /* Set up the initial 'ping' state of the ping-pong buffers */
+ qtmux->reserved_moov_first_active = TRUE;
+
+ gst_qt_mux_configure_moov (qtmux);
+ gst_qt_mux_setup_metadata (qtmux);
+ /* Empty free atom to begin, starting on an 8-byte boundary */
+ ret = gst_qt_mux_send_free_atom (qtmux, &qtmux->header_size, 8, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ /* Moov header, not padded yet */
+ ret = gst_qt_mux_send_moov (qtmux, &qtmux->header_size, 0, FALSE, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ /* The moov we just sent contains the 'base' size of the moov, before
+ * we put in any time-dependent per-trak data. Use that to make
+ * a good estimate of how much extra to reserve */
+ /* Calculate how much space to reserve for our MOOV atom.
+ * We actually reserve twice that, for ping-pong buffers */
+ qtmux->base_moov_size = qtmux->last_moov_size;
+ GST_LOG_OBJECT (qtmux, "Base moov size is %u before any indexes",
+ qtmux->base_moov_size);
+ qtmux->reserved_moov_size = qtmux->base_moov_size +
+ gst_util_uint64_scale (reserved_max_duration,
+ reserved_bytes_per_sec_per_trak *
+ atom_moov_get_trak_count (qtmux->moov), GST_SECOND);
+
+ /* Need space for at least 4 atom headers. More really, but
+ * this as an absolute minimum */
+ if (qtmux->reserved_moov_size < 4 * 8)
+ goto reserved_moov_too_small;
+
+ GST_DEBUG_OBJECT (qtmux, "reserving header area of size %u",
+ 2 * qtmux->reserved_moov_size + 16);
+
+ GST_OBJECT_LOCK (qtmux);
+ qtmux->reserved_duration_remaining =
+ gst_util_uint64_scale (qtmux->reserved_moov_size -
+ qtmux->base_moov_size, GST_SECOND,
+ reserved_bytes_per_sec_per_trak *
+ atom_moov_get_trak_count (qtmux->moov));
+ GST_OBJECT_UNLOCK (qtmux);
+
+ /* Now that we know how much reserved space is targeted,
+ * output a free atom to fill the extra reserved */
+ ret = gst_qt_mux_send_free_atom (qtmux, &qtmux->header_size,
+ qtmux->reserved_moov_size - qtmux->base_moov_size, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* Then a free atom containing 'pong' buffer, with an
+ * extra 8 bytes to account for the free atom header itself */
+ ret = gst_qt_mux_send_free_atom (qtmux, &qtmux->header_size,
+ qtmux->reserved_moov_size + 8, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* extra atoms go after the free/moov(s), before the mdat */
+ ret =
+ gst_qt_mux_send_extra_atoms (qtmux, TRUE, &qtmux->header_size, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ qtmux->mdat_pos = qtmux->header_size;
+ /* extended atom in case we go over 4GB while writing and need
+ * the full 64-bit atom */
+ ret =
+ gst_qt_mux_send_mdat_header (qtmux, &qtmux->header_size, 0, TRUE,
+ FALSE);
+ break;
+ case GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL:
+ {
+ guint32 atom_size;
+
+ ret = gst_qt_mux_prepare_and_send_ftyp (qtmux);
+ if (ret != GST_FLOW_OK)
+ break;
+
+ /* Store this as the moov offset for later updating.
+ * We record mdat position below */
+ qtmux->moov_pos = qtmux->header_size;
+
+ if (!gst_qt_mux_prefill_samples (qtmux)) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX,
+ ("Unsupported codecs or configuration for prefill mode"), (NULL));
+
+ return GST_FLOW_ERROR;
+ }
+
+ gst_qt_mux_update_global_statistics (qtmux);
+ gst_qt_mux_configure_moov (qtmux);
+ gst_qt_mux_update_edit_lists (qtmux);
+ gst_qt_mux_setup_metadata (qtmux);
+
+ /* Moov header with pre-filled samples */
+ ret = gst_qt_mux_send_moov (qtmux, &qtmux->header_size, 0, FALSE, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ GST_OBJECT_LOCK (qtmux);
+ atom_size = 12 * g_list_length (GST_ELEMENT (qtmux)->sinkpads) + 8;
+ GST_OBJECT_UNLOCK (qtmux);
+
+ /* last_moov_size now contains the full size of the moov, moov_pos the
+ * position. This allows us to rewrite it in the very end as needed */
+ qtmux->reserved_moov_size = qtmux->last_moov_size + atom_size;
+
+ /* Send an additional free atom at the end so we definitely have space
+ * to rewrite the moov header at the end and remove the samples that
+ * were not actually written */
+ ret =
+ gst_qt_mux_send_free_atom (qtmux, &qtmux->header_size, atom_size,
+ FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* extra atoms go after the free/moov(s), before the mdat */
+ ret =
+ gst_qt_mux_send_extra_atoms (qtmux, TRUE, &qtmux->header_size, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ qtmux->mdat_pos = qtmux->header_size;
+
+ /* And now send the mdat header */
+ ret =
+ gst_qt_mux_send_mdat_header (qtmux, &qtmux->header_size,
+ qtmux->mdat_size, TRUE, FALSE);
+
+ /* chunks position is set relative to the first byte of the
+ * MDAT atom payload. Set the overall offset into the file */
+ atom_moov_chunks_set_offset (qtmux->moov, qtmux->header_size);
+
+ {
+ gst_qt_mux_seek_to (qtmux, qtmux->moov_pos);
+
+ ret = gst_qt_mux_send_moov (qtmux, NULL, 0, FALSE, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ gst_qt_mux_seek_to (qtmux, qtmux->header_size);
+ }
+
+ GST_OBJECT_LOCK (qtmux);
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ qtmux->current_chunk_offset = -1;
+ qtmux->mdat_size = 0;
+ qtmux->current_pad = NULL;
+ qtmux->longest_chunk = GST_CLOCK_TIME_NONE;
+
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+
+ qtpad->total_bytes = 0;
+ qtpad->total_duration = 0;
+ qtpad->first_dts = qtpad->first_ts = GST_CLOCK_TIME_NONE;
+ qtpad->last_dts = GST_CLOCK_TIME_NONE;
+ qtpad->sample_offset = 0;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ break;
+ }
+ case GST_QT_MUX_MODE_FAST_START:
+ GST_OBJECT_LOCK (qtmux);
+ qtmux->fast_start_file = g_fopen (qtmux->fast_start_file_path, "wb+");
+ if (!qtmux->fast_start_file)
+ goto open_failed;
+ GST_OBJECT_UNLOCK (qtmux);
+ /* send a dummy buffer for preroll */
+ ret = gst_qt_mux_send_buffer (qtmux, gst_buffer_new (), NULL, FALSE);
+ break;
+ case GST_QT_MUX_MODE_FRAGMENTED:
+ ret = gst_qt_mux_prepare_and_send_ftyp (qtmux);
+ if (ret != GST_FLOW_OK)
+ break;
+
+ GST_DEBUG_OBJECT (qtmux, "fragment duration %d ms, writing headers",
+ qtmux->fragment_duration);
+ qtmux->fragment_sequence = 0;
+ if (qtmux->fragment_mode == GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE) {
+ /* Store this as the mdat offset for later updating
+ * when we write the moov */
+ qtmux->mdat_pos = qtmux->header_size;
+ /* extended atom in case we go over 4GB while writing and need
+ * the full 64-bit atom */
+ ret =
+ gst_qt_mux_send_mdat_header (qtmux, &qtmux->header_size, 0, TRUE,
+ FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ } else {
+ /* store the moov pos so we can update the duration later
+ * in non-streamable mode */
+ qtmux->moov_pos = qtmux->header_size;
+
+ /* prepare moov and/or tags */
+ qtmux->fragment_sequence++;
+ gst_qt_mux_configure_moov (qtmux);
+ gst_qt_mux_setup_metadata (qtmux);
+ ret =
+ gst_qt_mux_send_moov (qtmux, &qtmux->header_size, 0, FALSE, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ /* extra atoms */
+ ret =
+ gst_qt_mux_send_extra_atoms (qtmux, TRUE, &qtmux->header_size,
+ FALSE);
+ if (ret != GST_FLOW_OK)
+ break;
+ }
+ /* prepare index if not streamable, or overwriting with moov */
+ if (qtmux->fragment_mode == GST_QT_MUX_FRAGMENT_DASH_OR_MSS)
+ qtmux->mfra = atom_mfra_new (qtmux->context);
+ break;
+ }
+
+ return ret;
+ /* ERRORS */
+invalid_isml:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX,
+ ("Cannot create an ISML file with 0 fragment duration"), (NULL));
+ return GST_FLOW_ERROR;
+ }
+reserved_moov_too_small:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX,
+ ("Not enough reserved space for creating headers"), (NULL));
+ return GST_FLOW_ERROR;
+ }
+open_failed:
+ {
+ GST_ELEMENT_ERROR (qtmux, RESOURCE, OPEN_READ_WRITE,
+ (("Could not open temporary file \"%s\""),
+ qtmux->fast_start_file_path), GST_ERROR_SYSTEM);
+ GST_OBJECT_UNLOCK (qtmux);
+ return GST_FLOW_ERROR;
+ }
+}
+
+static GstFlowReturn
+gst_qt_mux_send_last_buffers (GstQTMux * qtmux)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GList *sinkpads, *l;
+
+ GST_OBJECT_LOCK (qtmux);
+ sinkpads = g_list_copy_deep (GST_ELEMENT_CAST (qtmux)->sinkpads,
+ (GCopyFunc) gst_object_ref, NULL);
+ GST_OBJECT_UNLOCK (qtmux);
+
+ for (l = sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+
+ /* avoid add_buffer complaining if not negotiated
+ * in which case no buffers either, so skipping */
+ if (!qtpad->fourcc) {
+ GST_DEBUG_OBJECT (qtmux, "Pad %s has never had buffers",
+ GST_PAD_NAME (qtpad));
+ continue;
+ }
+
+ /* send last buffer; also flushes possibly queued buffers/ts */
+ GST_DEBUG_OBJECT (qtmux, "Sending the last buffer for pad %s",
+ GST_PAD_NAME (qtpad));
+ ret = gst_qt_mux_add_buffer (qtmux, qtpad, NULL);
+ if (ret != GST_FLOW_OK) {
+ GST_WARNING_OBJECT (qtmux, "Failed to send last buffer for %s, "
+ "flow return: %s", GST_PAD_NAME (qtpad), gst_flow_get_name (ret));
+ }
+ }
+
+ g_list_free_full (sinkpads, gst_object_unref);
+
+ return ret;
+}
+
+static void
+gst_qt_mux_update_global_statistics (GstQTMux * qtmux)
+{
+ GList *l;
+
+ /* for setting some subtitles fields */
+ guint max_width = 0;
+ guint max_height = 0;
+
+ qtmux->first_ts = qtmux->last_dts = GST_CLOCK_TIME_NONE;
+
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+
+ if (!qtpad->fourcc) {
+ GST_DEBUG_OBJECT (qtmux, "Pad %s has never had buffers",
+ GST_PAD_NAME (qtpad));
+ continue;
+ }
+
+ /* having flushed above, can check for buffers now */
+ if (GST_CLOCK_TIME_IS_VALID (qtpad->first_ts)) {
+ GstClockTime first_pts_in = qtpad->first_ts;
+ /* it should be, since we got first_ts by adding adjustment
+ * to a positive incoming PTS */
+ if (qtpad->dts_adjustment <= first_pts_in)
+ first_pts_in -= qtpad->dts_adjustment;
+ /* determine max stream duration */
+ if (!GST_CLOCK_TIME_IS_VALID (qtmux->last_dts)
+ || qtpad->last_dts > qtmux->last_dts) {
+ qtmux->last_dts = qtpad->last_dts;
+ }
+ if (!GST_CLOCK_TIME_IS_VALID (qtmux->first_ts)
+ || first_pts_in < qtmux->first_ts) {
+ /* we need the original incoming PTS here, as this first_ts
+ * is used in update_edit_lists to construct the edit list that arrange
+ * for sync'ed streams. The first_ts is most likely obtained from
+ * some (audio) stream with 0 dts_adjustment and initial 0 PTS,
+ * so it makes no difference, though it matters in other cases */
+ qtmux->first_ts = first_pts_in;
+ }
+ }
+
+ /* subtitles need to know the video width/height,
+ * it is stored shifted 16 bits to the left according to the
+ * spec */
+ max_width = MAX (max_width, (qtpad->trak->tkhd.width >> 16));
+ max_height = MAX (max_height, (qtpad->trak->tkhd.height >> 16));
+
+ /* update average bitrate of streams if needed */
+ {
+ guint32 avgbitrate = 0;
+ guint32 maxbitrate = qtpad->max_bitrate;
+
+ if (qtpad->avg_bitrate)
+ avgbitrate = qtpad->avg_bitrate;
+ else if (qtpad->total_duration > 0)
+ avgbitrate = (guint32) gst_util_uint64_scale_round (qtpad->total_bytes,
+ 8 * GST_SECOND, qtpad->total_duration);
+
+ atom_trak_update_bitrates (qtpad->trak, avgbitrate, maxbitrate);
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ /* need to update values on subtitle traks now that we know the
+ * max width and height */
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+
+ if (!qtpad->fourcc) {
+ GST_DEBUG_OBJECT (qtmux, "Pad %s has never had buffers",
+ GST_PAD_NAME (qtpad));
+ continue;
+ }
+
+ if (qtpad->fourcc == FOURCC_tx3g) {
+ atom_trak_tx3g_update_dimension (qtpad->trak, max_width, max_height);
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+}
+
+/* Called after gst_qt_mux_update_global_statistics() updates the
+ * first_ts tracking, to create/set edit lists for delayed streams */
+static void
+gst_qt_mux_update_edit_lists (GstQTMux * qtmux)
+{
+ GList *l;
+
+ GST_DEBUG_OBJECT (qtmux, "Media first ts selected: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (qtmux->first_ts));
+ /* add/update EDTSs for late streams. configure_moov will have
+ * set the trak durations above by summing the sample tables,
+ * here we extend that if needing to insert an empty segment */
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+
+ atom_trak_edts_clear (qtpad->trak);
+
+ if (GST_CLOCK_TIME_IS_VALID (qtpad->first_ts)) {
+ guint32 lateness = 0;
+ guint32 duration = qtpad->trak->tkhd.duration;
+ gboolean has_gap;
+
+ has_gap = (qtpad->first_ts > (qtmux->first_ts + qtpad->dts_adjustment));
+
+ if (has_gap) {
+ GstClockTime diff, trak_lateness;
+
+ diff = qtpad->first_ts - (qtmux->first_ts + qtpad->dts_adjustment);
+ lateness = gst_util_uint64_scale_round (diff,
+ qtmux->timescale, GST_SECOND);
+
+ /* Allow up to 1 trak timescale unit of lateness, Such a small
+ * timestamp/duration can't be represented by the trak-specific parts
+ * of the headers anyway, so it's irrelevantly small */
+ trak_lateness = gst_util_uint64_scale (diff,
+ atom_trak_get_timescale (qtpad->trak), GST_SECOND);
+
+ if (trak_lateness > 0 && diff > qtmux->start_gap_threshold) {
+ GST_DEBUG_OBJECT (qtmux,
+ "Pad %s is a late stream by %" GST_TIME_FORMAT,
+ GST_PAD_NAME (qtpad), GST_TIME_ARGS (diff));
+
+ atom_trak_set_elst_entry (qtpad->trak, 0, lateness, (guint32) - 1,
+ (guint32) (1 * 65536.0));
+ }
+ }
+
+ /* Always write an edit list for the whole track. In general this is not
+ * necessary except for the case of having a gap or DTS adjustment but
+ * it allows to give the whole track's duration in the usually more
+ * accurate media timescale
+ */
+ {
+ GstClockTime ctts = 0;
+ guint32 media_start;
+
+ if (qtpad->first_ts > qtpad->first_dts)
+ ctts = qtpad->first_ts - qtpad->first_dts;
+
+ media_start = gst_util_uint64_scale_round (ctts,
+ atom_trak_get_timescale (qtpad->trak), GST_SECOND);
+
+ /* atom_trak_set_elst_entry() has a quirk - if the edit list
+ * is empty because there's no gap added above, this call
+ * will not replace index 1, it will create the entry at index 0.
+ * Luckily, that's exactly what we want here */
+ atom_trak_set_elst_entry (qtpad->trak, 1, duration, media_start,
+ (guint32) (1 * 65536.0));
+ }
+
+ /* need to add the empty time to the trak duration */
+ duration += lateness;
+ qtpad->trak->tkhd.duration = duration;
+ if (qtpad->tc_trak) {
+ qtpad->tc_trak->tkhd.duration = duration;
+ qtpad->tc_trak->mdia.mdhd.time_info.duration = duration;
+ }
+
+ /* And possibly grow the moov duration */
+ if (duration > qtmux->moov->mvhd.time_info.duration) {
+ qtmux->moov->mvhd.time_info.duration = duration;
+ qtmux->moov->mvex.mehd.fragment_duration = duration;
+ }
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+}
+
+static GstFlowReturn
+gst_qt_mux_update_timecode (GstQTMux * qtmux, GstQTMuxPad * qtpad)
+{
+ GstBuffer *buf;
+ GstMapInfo map;
+ guint64 offset = qtpad->tc_pos;
+ GstQTMuxClass *qtmux_klass = (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+
+ if (qtmux_klass->format != GST_QT_MUX_FORMAT_QT &&
+ !qtmux->force_create_timecode_trak)
+ return GST_FLOW_OK;
+
+ g_assert (qtpad->tc_pos != -1);
+
+ gst_qt_mux_seek_to (qtmux, offset);
+
+ buf = gst_buffer_new_and_alloc (4);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+
+ GST_WRITE_UINT32_BE (map.data,
+ gst_video_time_code_frames_since_daily_jam (qtpad->first_tc));
+ gst_buffer_unmap (buf, &map);
+
+ /* Reset this value, so the timecode won't be re-rewritten */
+ qtpad->tc_pos = -1;
+
+ return gst_qt_mux_send_buffer (qtmux, buf, &offset, FALSE);
+}
+
+static void
+unref_buffer_if_set (GstBuffer * buffer)
+{
+ if (buffer)
+ gst_buffer_unref (buffer);
+}
+
+static GstFlowReturn
+gst_qtmux_push_mdat_stored_buffers (GstQTMux * qtmux)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GList *l = qtmux->output_buffers;
+ guint64 mdat_header_size = 0, size = 0;
+
+ for (; l; l = g_list_next (l)) {
+ GstBuffer *buf = (GstBuffer *) l->data;
+
+ size += gst_buffer_get_size (buf);
+ }
+
+ if (size == 0)
+ return GST_FLOW_OK;
+
+ GST_DEBUG_OBJECT (qtmux, "Pushing stored buffers of size %" G_GUINT64_FORMAT
+ " current mdat size %" G_GUINT64_FORMAT, size, qtmux->mdat_size);
+
+ ret = gst_qt_mux_send_mdat_header (qtmux, &mdat_header_size, size,
+ size > MDAT_LARGE_FILE_LIMIT, FALSE);
+
+ /* reset chunking */
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ qtmux->current_chunk_offset = -1;
+
+ /* on the first mdat, we need to offset the header by the mdat header size
+ * as the moov offset is in relation to the first data byte inside the first
+ * mdat */
+ if (qtmux->mdat_size == 0)
+ qtmux->header_size += mdat_header_size;
+ qtmux->mdat_size += mdat_header_size;
+
+ l = qtmux->output_buffers;
+ while (ret == GST_FLOW_OK && l) {
+ GstBuffer *buf = (GstBuffer *) l->data;
+
+ ret = gst_qt_mux_send_buffer (qtmux, buf, &qtmux->mdat_size, TRUE);
+
+ l->data = NULL;
+ l = g_list_next (l);
+ }
+
+ g_list_free_full (qtmux->output_buffers,
+ (GDestroyNotify) unref_buffer_if_set);
+ qtmux->output_buffers = NULL;
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_qt_mux_stop_file (GstQTMux * qtmux)
+{
+ gboolean ret = GST_FLOW_OK;
+ guint64 offset = 0, size = 0;
+ gboolean large_file;
+ GList *sinkpads, *l;
+
+ GST_DEBUG_OBJECT (qtmux, "Updating remaining values and sending last data");
+
+ /* pushing last buffers for each pad */
+ if ((ret = gst_qt_mux_send_last_buffers (qtmux)) != GST_FLOW_OK)
+ return ret;
+
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_FRAGMENTED
+ && qtmux->fragment_mode == GST_QT_MUX_FRAGMENT_STREAMABLE) {
+ /* Streamable mode; no need to write duration or MFRA */
+ GST_DEBUG_OBJECT (qtmux, "streamable file; nothing to stop");
+ return GST_FLOW_OK;
+ }
+
+ gst_qt_mux_update_global_statistics (qtmux);
+
+ GST_OBJECT_LOCK (qtmux);
+ sinkpads = g_list_copy_deep (GST_ELEMENT_CAST (qtmux)->sinkpads,
+ (GCopyFunc) gst_object_ref, NULL);
+ GST_OBJECT_UNLOCK (qtmux);
+
+ for (l = sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+
+ if (qtpad->tc_pos != -1) {
+ /* File is being stopped and timecode hasn't been updated. Update it now
+ * with whatever we have */
+ ret = gst_qt_mux_update_timecode (qtmux, qtpad);
+ if (ret != GST_FLOW_OK) {
+ g_list_free_full (sinkpads, gst_object_unref);
+ return ret;
+ }
+ }
+ }
+
+ g_list_free_full (sinkpads, gst_object_unref);
+
+ switch (qtmux->mux_mode) {
+ case GST_QT_MUX_MODE_MOOV_AT_END:{
+ if (!qtmux->downstream_seekable) {
+ ret = gst_qtmux_push_mdat_stored_buffers (qtmux);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ }
+ break;
+ }
+ case GST_QT_MUX_MODE_FRAGMENTED:{
+ GstBuffer *buf;
+ GstClockTime duration;
+
+ if (qtmux->mfra) {
+ guint8 *data = NULL;
+
+ size = offset = 0;
+
+ GST_DEBUG_OBJECT (qtmux, "adding mfra");
+ if (!atom_mfra_copy_data (qtmux->mfra, &data, &size, &offset))
+ goto serialize_error;
+ buf = _gst_buffer_new_take_data (data, offset);
+ ret = gst_qt_mux_send_buffer (qtmux, buf, NULL, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ }
+
+ /* only mvex duration is updated,
+ * mvhd should be consistent with empty moov
+ * (but TODO maybe some clients do not handle that well ?) */
+ duration = gst_util_uint64_scale_round (qtmux->last_dts, qtmux->timescale,
+ GST_SECOND);
+
+ GST_DEBUG_OBJECT (qtmux,
+ "writing moov with mvhd/mvex duration %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (qtmux->last_dts));
+ if (qtmux->fragment_mode == GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE) {
+ /* seek and overwrite the original moov with an invalid atom */
+ /* XXX: assumes an extended size atom is not used for the moov */
+
+ qtmux->moov->mvhd.time_info.duration = duration;
+
+ /* (+4) skip the skip bytes */
+ gst_qt_mux_seek_to (qtmux, qtmux->moov_pos + 4);
+
+ /* invalidate the previous moov */
+ buf = gst_buffer_new_wrapped (g_strdup ("h"), 1);
+ ret = gst_qt_mux_send_buffer (qtmux, buf, NULL, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* we want to rewrite the first mdat to cover the entire data before
+ * this moov */
+ qtmux->mdat_size = qtmux->header_size - qtmux->mdat_pos - 16;
+
+ gst_qt_mux_seek_to (qtmux, qtmux->mdat_pos);
+
+ ret = gst_qt_mux_update_mdat_size (qtmux, qtmux->mdat_pos,
+ qtmux->mdat_size, NULL, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* Then write the moov atom as in moov-at-end *without* updating the
+ * mdat size */
+ gst_qt_mux_seek_to (qtmux, qtmux->header_size);
+
+ /* revert back to moov-at-end assumptions where header_size is the
+ * size up to the first byte of data in the mdat */
+ qtmux->header_size = qtmux->mdat_pos + 16;
+ break;
+ } else {
+ qtmux->moov->mvex.mehd.fragment_duration = duration;
+
+ /* seek and rewrite the header */
+ gst_qt_mux_seek_to (qtmux, qtmux->moov_pos);
+ /* no need to seek back */
+ return gst_qt_mux_send_moov (qtmux, NULL, 0, FALSE, FALSE);
+ }
+ }
+ case GST_QT_MUX_MODE_ROBUST_RECORDING:{
+ ret = gst_qt_mux_robust_recording_rewrite_moov (qtmux);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ return ret;
+ /* Finalise by writing the final size into the mdat. Up until now
+ * it's been 0, which means 'rest of the file'
+ * No need to seek back after this, we won't write any more */
+ return gst_qt_mux_update_mdat_size (qtmux, qtmux->mdat_pos,
+ qtmux->mdat_size, NULL, TRUE);
+ }
+ case GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL:{
+ GList *l;
+ guint32 next_track_id = qtmux->moov->mvhd.next_track_id;
+
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qpad = (GstQTMuxPad *) l->data;
+ guint64 block_idx;
+ AtomSTBL *stbl = &qpad->trak->mdia.minf.stbl;
+
+ /* Get the block index of the last sample we wrote, not of the next
+ * sample we would write */
+ block_idx = prefill_get_block_index (qtmux, qpad);
+
+ /* stts */
+ if (block_idx > 0) {
+ STTSEntry *entry;
+ guint64 nsamples = 0;
+ gint i, n;
+
+ n = atom_array_get_len (&stbl->stts.entries);
+ for (i = 0; i < n; i++) {
+ entry = &atom_array_index (&stbl->stts.entries, i);
+ if (nsamples + entry->sample_count >= qpad->sample_offset) {
+ entry->sample_count = qpad->sample_offset - nsamples;
+ stbl->stts.entries.len = i + 1;
+ break;
+ }
+ nsamples += entry->sample_count;
+ }
+ g_assert (i < n);
+ } else {
+ stbl->stts.entries.len = 0;
+ }
+
+ /* stsz */
+ {
+ g_assert (stbl->stsz.entries.len == 0);
+ stbl->stsz.table_size = qpad->sample_offset;
+ }
+
+ /* stco/stsc */
+ {
+ gint i, n;
+ guint64 nsamples = 0;
+ gint chunk_index = 0;
+ const TrakBufferEntryInfo *sample_entry;
+
+ if (block_idx > 0) {
+ sample_entry =
+ &g_array_index (qpad->samples, TrakBufferEntryInfo,
+ block_idx - 1);
+
+ n = stbl->stco64.entries.len;
+ for (i = 0; i < n; i++) {
+ guint64 *entry = &atom_array_index (&stbl->stco64.entries, i);
+
+ if (*entry == sample_entry->chunk_offset) {
+ stbl->stco64.entries.len = i + 1;
+ chunk_index = i + 1;
+ break;
+ }
+ }
+ g_assert (i < n);
+ g_assert (chunk_index > 0);
+
+ n = stbl->stsc.entries.len;
+ for (i = 0; i < n; i++) {
+ STSCEntry *entry = &atom_array_index (&stbl->stsc.entries, i);
+
+ if (entry->first_chunk >= chunk_index)
+ break;
+
+ if (i > 0) {
+ nsamples +=
+ (entry->first_chunk - atom_array_index (&stbl->stsc.entries,
+ i -
+ 1).first_chunk) * atom_array_index (&stbl->stsc.entries,
+ i - 1).samples_per_chunk;
+ }
+ }
+ g_assert (i <= n);
+
+ if (i > 0) {
+ STSCEntry *prev_entry =
+ &atom_array_index (&stbl->stsc.entries, i - 1);
+ nsamples +=
+ (chunk_index -
+ prev_entry->first_chunk) * prev_entry->samples_per_chunk;
+ if (qpad->sample_offset - nsamples > 0) {
+ stbl->stsc.entries.len = i;
+ atom_stsc_add_new_entry (&stbl->stsc, chunk_index,
+ qpad->sample_offset - nsamples, stbl->stsd.n_entries);
+ } else {
+ stbl->stsc.entries.len = i;
+ stbl->stco64.entries.len--;
+ }
+ } else {
+ /* Everything in a single chunk */
+ stbl->stsc.entries.len = 0;
+ atom_stsc_add_new_entry (&stbl->stsc, chunk_index,
+ qpad->sample_offset, stbl->stsd.n_entries);
+ }
+ } else {
+ stbl->stco64.entries.len = 0;
+ stbl->stsc.entries.len = 0;
+ }
+ }
+
+ {
+ GList *walk2;
+
+ for (walk2 = qtmux->moov->mvex.trexs; walk2; walk2 = walk2->next) {
+ AtomTREX *trex = walk2->data;
+
+ if (trex->track_ID == qpad->trak->tkhd.track_ID) {
+ trex->track_ID = next_track_id;
+ break;
+ }
+ }
+
+ qpad->trak->tkhd.track_ID = next_track_id++;
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ qtmux->moov->mvhd.next_track_id = next_track_id;
+
+ gst_qt_mux_update_global_statistics (qtmux);
+ gst_qt_mux_configure_moov (qtmux);
+
+ gst_qt_mux_update_edit_lists (qtmux);
+
+ /* Check if any gap edit lists were added. We don't have any space
+ * reserved for this in the moov and the pre-finalized moov would have
+ * broken A/V synchronization. Error out here now
+ */
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qpad = (GstQTMuxPad *) l->data;
+
+ if (qpad->trak->edts
+ && g_slist_length (qpad->trak->edts->elst.entries) > 1) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Can't support gaps in prefill mode"));
+
+ GST_OBJECT_UNLOCK (qtmux);
+
+ return GST_FLOW_ERROR;
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ gst_qt_mux_setup_metadata (qtmux);
+ atom_moov_chunks_set_offset (qtmux->moov, qtmux->header_size);
+
+ {
+ gst_qt_mux_seek_to (qtmux, qtmux->moov_pos);
+
+ ret =
+ gst_qt_mux_send_moov (qtmux, NULL, qtmux->reserved_moov_size, FALSE,
+ FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ if (qtmux->reserved_moov_size > qtmux->last_moov_size) {
+ ret =
+ gst_qt_mux_send_free_atom (qtmux, NULL,
+ qtmux->reserved_moov_size - qtmux->last_moov_size, TRUE);
+ }
+
+ if (ret != GST_FLOW_OK)
+ return ret;
+ }
+
+ ret = gst_qt_mux_update_mdat_size (qtmux, qtmux->mdat_pos,
+ qtmux->mdat_size, NULL, FALSE);
+ return ret;
+ }
+ default:
+ break;
+ }
+
+ /* Moov-at-end or fast-start mode from here down */
+ gst_qt_mux_configure_moov (qtmux);
+
+ gst_qt_mux_update_edit_lists (qtmux);
+
+ /* tags into file metadata */
+ gst_qt_mux_setup_metadata (qtmux);
+
+ large_file = (qtmux->mdat_size > MDAT_LARGE_FILE_LIMIT);
+
+ switch (qtmux->mux_mode) {
+ case GST_QT_MUX_MODE_FAST_START:{
+ /* if faststart, update the offset of the atoms in the movie with the offset
+ * that the movie headers before mdat will cause.
+ * Also, send the ftyp */
+ offset = size = 0;
+
+ ret = gst_qt_mux_prepare_and_send_ftyp (qtmux);
+ if (ret != GST_FLOW_OK) {
+ goto ftyp_error;
+ }
+ /* copy into NULL to obtain size */
+ if (!atom_moov_copy_data (qtmux->moov, NULL, &size, &offset))
+ goto serialize_error;
+ GST_DEBUG_OBJECT (qtmux, "calculated moov atom size %" G_GUINT64_FORMAT,
+ offset);
+ offset += qtmux->header_size + (large_file ? 16 : 8);
+
+ /* sum up with the extra atoms size */
+ ret = gst_qt_mux_send_extra_atoms (qtmux, FALSE, &offset, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ break;
+ }
+ default:
+ offset = qtmux->header_size;
+ break;
+ }
+
+ /* Now that we know the size of moov + extra atoms, we can adjust
+ * the chunk offsets stored into the moov */
+ atom_moov_chunks_set_offset (qtmux->moov, offset);
+
+ /* write out moov and extra atoms */
+ /* note: as of this point, we no longer care about tracking written data size,
+ * since there is no more use for it anyway */
+ ret = gst_qt_mux_send_moov (qtmux, NULL, 0, FALSE, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* extra atoms */
+ ret = gst_qt_mux_send_extra_atoms (qtmux, TRUE, NULL, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ switch (qtmux->mux_mode) {
+ case GST_QT_MUX_MODE_MOOV_AT_END:
+ {
+ if (qtmux->downstream_seekable) {
+ /* mdat needs update iff not using faststart */
+ GST_DEBUG_OBJECT (qtmux, "updating mdat size at position %"
+ G_GUINT64_FORMAT " to size %" G_GUINT64_FORMAT, qtmux->mdat_pos,
+ qtmux->mdat_size);
+ ret = gst_qt_mux_update_mdat_size (qtmux, qtmux->mdat_pos,
+ qtmux->mdat_size, NULL, FALSE);
+ /* note; no seeking back to the end of file is done,
+ * since we no longer write anything anyway */
+ }
+ break;
+ }
+ case GST_QT_MUX_MODE_FAST_START:
+ {
+ /* send mdat atom and move buffered data into it */
+ /* mdat_size = accumulated (buffered data) */
+ ret = gst_qt_mux_send_mdat_header (qtmux, NULL, qtmux->mdat_size,
+ large_file, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ ret = gst_qt_mux_send_buffered_data (qtmux, NULL);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ break;
+ }
+ case GST_QT_MUX_MODE_FRAGMENTED:
+ g_assert (qtmux->fragment_mode ==
+ GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE);
+ break;
+ default:
+ g_assert_not_reached ();
+ }
+
+ return ret;
+
+ /* ERRORS */
+serialize_error:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Failed to serialize moov"));
+ return GST_FLOW_ERROR;
+ }
+ftyp_error:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL), ("Failed to send ftyp"));
+ return GST_FLOW_ERROR;
+ }
+}
+
+static gboolean
+gst_qtmux_pad_update_fragment_duration (GstElement * element, GstPad * pad,
+ gpointer user_data)
+{
+ GstQTMux *qtmux = (GstQTMux *) element;
+ GstQTMuxPad *qt_pad = GST_QT_MUX_PAD (pad);
+
+ qt_pad->fragment_duration = gst_util_uint64_scale (qtmux->fragment_duration,
+ atom_trak_get_timescale (qt_pad->trak), 1000);
+
+ return TRUE;
+}
+
+static gboolean
+gst_qtmux_pad_collect_traf (GstElement * element, GstPad * pad,
+ gpointer user_data)
+{
+ GstQTMuxPad *qt_pad = GST_QT_MUX_PAD (pad);
+ AtomMOOF *moof = user_data;
+
+ GST_TRACE_OBJECT (pad, "adding traf %p to moof %p", qt_pad->traf, moof);
+
+ /* takes ownership */
+ if (qt_pad->traf)
+ atom_moof_add_traf (moof, qt_pad->traf);
+ qt_pad->traf = NULL;
+
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_qt_mux_pad_fragment_add_buffer (GstQTMux * qtmux, GstQTMuxPad * pad,
+ GstBuffer * buf, gboolean force, guint32 nsamples, gint64 dts,
+ guint32 delta, guint32 size, guint64 chunk_offset, gboolean sync,
+ gint64 pts_offset)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint index = 0;
+
+ GST_LOG_OBJECT (pad, "%p %u %" G_GUINT64_FORMAT " %" G_GUINT64_FORMAT,
+ pad->traf, force, qtmux->current_chunk_offset, chunk_offset);
+
+ /* setup if needed */
+ if (G_UNLIKELY (!pad->traf || force))
+ goto init;
+
+flush:
+ /* flush pad fragment if threshold reached,
+ * or at new keyframe if we should be minding those in the first place */
+ if (G_UNLIKELY (force || (sync && pad->sync) ||
+ pad->fragment_duration < (gint64) delta)) {
+
+ if (qtmux->fragment_mode == GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE) {
+ if (qtmux->fragment_sequence == 0) {
+ /* the first fragment which we write as a moov */
+ guint64 orig_offset;
+ guint64 offset = orig_offset = qtmux->mdat_pos + 16 + qtmux->mdat_size;
+ guint64 chunk_increase, buf_size;
+ AtomMOOF *moof;
+
+ GST_LOG_OBJECT (qtmux, "current file offset calculated to be %"
+ G_GUINT64_FORMAT " based on mdat pos %" G_GUINT64_FORMAT
+ " and size %" G_GUINT64_FORMAT, offset, qtmux->mdat_pos,
+ qtmux->mdat_size);
+
+ moof = atom_moof_new (qtmux->context, qtmux->fragment_sequence);
+ gst_element_foreach_sink_pad (GST_ELEMENT (qtmux),
+ gst_qtmux_pad_collect_traf, moof);
+ atom_moof_free (moof);
+
+ ret = gst_qt_mux_update_mdat_size (qtmux, qtmux->mdat_pos,
+ qtmux->mdat_size, NULL, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* seek back to the end of the file */
+ qtmux->moov_pos = offset;
+ gst_qt_mux_seek_to (qtmux, qtmux->moov_pos);
+
+ /* update moov data */
+ gst_qt_mux_update_global_statistics (qtmux);
+ gst_qt_mux_configure_moov_full (qtmux, TRUE, qtmux->timescale);
+ gst_qt_mux_update_edit_lists (qtmux);
+ gst_qt_mux_setup_metadata (qtmux);
+ /* chunk offset is the offset to the first byte inside the mdat */
+ atom_moov_chunks_set_offset (qtmux->moov, qtmux->mdat_pos + 16);
+
+ ret = gst_qt_mux_send_moov (qtmux, &offset, 0, TRUE, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* for the continuation in fragments, header_size is the tracking write
+ * position */
+ qtmux->header_size = offset;
+ qtmux->moof_mdat_pos = 0;
+
+ buf_size = (buf ? gst_buffer_get_size (buf) : 0);
+
+ chunk_increase = offset - orig_offset + 16;
+ /* we need to undo the addition to qtmux->current_chunk_size of this
+ * buffer performed in gst_qt_mux_register_buffer_in_chunk() */
+ chunk_increase += qtmux->current_chunk_size - buf_size;
+ GST_LOG_OBJECT (qtmux, "We think we have written %" G_GUINT64_FORMAT
+ " including a moov and mdat header of %" G_GUINT64_FORMAT
+ ". mangling this buffer's chunk offset from %" G_GUINT64_FORMAT
+ " to %" G_GUINT64_FORMAT, qtmux->header_size,
+ offset - orig_offset + 16, chunk_offset,
+ chunk_offset + chunk_increase);
+ /* this is the offset for the current chunk that is applied to all subsequent chunks */
+ chunk_offset += chunk_increase;
+ qtmux->current_chunk_offset += chunk_increase;
+ qtmux->current_chunk_size = buf_size;
+ GST_LOG_OBJECT (qtmux, "change next chunk offset to %" G_GUINT64_FORMAT
+ " and size to %" G_GUINT64_FORMAT, qtmux->current_chunk_offset,
+ qtmux->current_chunk_size);
+
+ gst_element_foreach_sink_pad (GST_ELEMENT (qtmux),
+ gst_qtmux_pad_update_fragment_duration, NULL);
+ } else {
+ AtomMOOF *moof;
+ guint64 size = 0, offset = 0;
+ guint8 *data = NULL;
+ GstBuffer *moof_buffer;
+ guint64 moof_size = 0, buf_size;
+ guint64 chunk_increase;
+
+ /* rewrite the mdat header */
+ ret = gst_qt_mux_update_mdat_size (qtmux, qtmux->moof_mdat_pos,
+ qtmux->header_size - qtmux->moof_mdat_pos - 16, NULL, FALSE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* reseek back to the current position */
+ gst_qt_mux_seek_to (qtmux, qtmux->header_size);
+
+ moof = atom_moof_new (qtmux->context, qtmux->fragment_sequence);
+ gst_element_foreach_sink_pad (GST_ELEMENT (qtmux),
+ gst_qtmux_pad_collect_traf, moof);
+ atom_moof_set_base_offset (moof, qtmux->moof_mdat_pos);
+ atom_moof_copy_data (moof, &data, &size, &offset);
+ moof_buffer = _gst_buffer_new_take_data (data, offset);
+ moof_size = gst_buffer_get_size (moof_buffer);
+
+ atom_moof_free (moof);
+ /* now we know where moof ends up, update offset in tfra */
+ if (pad->tfra)
+ atom_tfra_update_offset (pad->tfra, qtmux->header_size);
+
+ GST_LOG_OBJECT (qtmux, "writing moof of size %" G_GUINT64_FORMAT,
+ moof_size);
+ ret =
+ gst_qt_mux_send_buffer (qtmux, moof_buffer, &qtmux->header_size,
+ FALSE);
+ if (ret != GST_FLOW_OK)
+ goto moof_send_error;
+ qtmux->moof_mdat_pos = 0;
+
+ /* if we are writing a final moov, then we need to increase our chunk
+ * offsets to include the moof/mdat headers that were just written so
+ * so that they are correctly skipped over.
+ */
+ buf_size = (buf ? gst_buffer_get_size (buf) : 0);
+ chunk_increase = moof_size + 16;
+ /* we need to undo the addition to qtmux->current_chunk_size of this
+ * buffer performed in gst_qt_mux_register_buffer_in_chunk() */
+ chunk_increase += qtmux->current_chunk_size - buf_size;
+ GST_LOG_OBJECT (qtmux, "We think we have currently written %"
+ G_GUINT64_FORMAT " including a moof of %" G_GUINT64_FORMAT
+ " mangling this buffer's chunk offset from %" G_GUINT64_FORMAT
+ " to %" G_GUINT64_FORMAT, qtmux->header_size, moof_size,
+ chunk_offset, chunk_offset + chunk_increase);
+ chunk_offset += chunk_increase;
+ /* this is the offset for the next chunk */
+ qtmux->current_chunk_offset += chunk_increase;
+ qtmux->current_chunk_size = buf_size;
+ GST_LOG_OBJECT (qtmux, "change next chunk offset to %" G_GUINT64_FORMAT
+ " and size to %" G_GUINT64_FORMAT, qtmux->current_chunk_offset,
+ qtmux->current_chunk_size);
+
+ /* if we are are generating a moof, it is for all streams */
+ gst_element_foreach_sink_pad (GST_ELEMENT (qtmux),
+ gst_qtmux_pad_update_fragment_duration, NULL);
+ }
+ } else {
+ /* not moov-related. writes out moof then mdat for a single stream only */
+ AtomMOOF *moof;
+ guint64 size = 0, offset = 0;
+ guint8 *data = NULL;
+ GstBuffer *moof_buffer;
+ guint i, total_size;
+ AtomTRUN *first_trun;
+
+ total_size = 0;
+ for (i = 0; i < atom_array_get_len (&pad->fragment_buffers); i++) {
+ total_size +=
+ gst_buffer_get_size (atom_array_index (&pad->fragment_buffers, i));
+ }
+
+ moof = atom_moof_new (qtmux->context, qtmux->fragment_sequence);
+ /* takes ownership */
+ atom_moof_add_traf (moof, pad->traf);
+ /* write the offset into the first 'trun'. All other truns are assumed
+ * to follow on from this trun. Skip over the mdat header (+12) */
+ atom_moof_copy_data (moof, &data, &size, &offset);
+ first_trun = (AtomTRUN *) pad->traf->truns->data;
+ atom_trun_set_offset (first_trun, offset + 12);
+ pad->traf = NULL;
+ size = offset = 0;
+ atom_moof_copy_data (moof, &data, &size, &offset);
+ moof_buffer = _gst_buffer_new_take_data (data, offset);
+
+ atom_moof_free (moof);
+
+ /* now we know where moof ends up, update offset in tfra */
+ if (pad->tfra)
+ atom_tfra_update_offset (pad->tfra, qtmux->header_size);
+
+ GST_LOG_OBJECT (qtmux, "writing moof size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (moof_buffer));
+ ret =
+ gst_qt_mux_send_buffer (qtmux, moof_buffer, &qtmux->header_size,
+ FALSE);
+ if (ret != GST_FLOW_OK)
+ goto moof_send_error;
+
+ GST_LOG_OBJECT (qtmux, "writing %d buffers, total_size %d",
+ atom_array_get_len (&pad->fragment_buffers), total_size);
+
+ ret = gst_qt_mux_send_mdat_header (qtmux, &qtmux->header_size, total_size,
+ FALSE, FALSE);
+ if (ret != GST_FLOW_OK)
+ goto mdat_header_send_error;
+
+ for (index = 0; index < atom_array_get_len (&pad->fragment_buffers);
+ index++) {
+ GST_DEBUG_OBJECT (qtmux, "sending fragment %p",
+ atom_array_index (&pad->fragment_buffers, index));
+ ret =
+ gst_qt_mux_send_buffer (qtmux,
+ atom_array_index (&pad->fragment_buffers, index),
+ &qtmux->header_size, FALSE);
+ if (ret != GST_FLOW_OK)
+ goto fragment_buf_send_error;
+ }
+
+ }
+ atom_array_clear (&pad->fragment_buffers);
+ qtmux->fragment_sequence++;
+ force = FALSE;
+ }
+
+init:
+ if (qtmux->fragment_mode == GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE
+ && qtmux->fragment_sequence == 0) {
+ atom_trak_add_samples (pad->trak, nsamples, (gint32) delta, size,
+ chunk_offset, sync, pts_offset);
+
+ ret = gst_qt_mux_send_buffer (qtmux, buf, &qtmux->mdat_size, TRUE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ buf = NULL;
+
+ if (G_UNLIKELY (force))
+ goto flush;
+
+ if (!pad->traf) {
+ pad->traf = atom_traf_new (qtmux->context, atom_trak_get_id (pad->trak));
+ pad->fragment_duration = gst_util_uint64_scale (qtmux->fragment_duration,
+ atom_trak_get_timescale (pad->trak), 1000);
+ }
+ pad->fragment_duration -= delta;
+
+ return ret;
+ } else if (G_UNLIKELY (!pad->traf)) {
+ GstClockTime first_dts = 0, current_dts;
+ gint64 first_qt_dts;
+ GST_LOG_OBJECT (pad, "setting up new fragment");
+ pad->traf = atom_traf_new (qtmux->context, atom_trak_get_id (pad->trak));
+ atom_array_init (&pad->fragment_buffers, 512);
+ pad->fragment_duration = gst_util_uint64_scale (qtmux->fragment_duration,
+ atom_trak_get_timescale (pad->trak), 1000);
+
+ if (G_UNLIKELY (qtmux->mfra && !pad->tfra)) {
+ pad->tfra = atom_tfra_new (qtmux->context, atom_trak_get_id (pad->trak));
+ atom_mfra_add_tfra (qtmux->mfra, pad->tfra);
+ }
+ if (GST_CLOCK_TIME_IS_VALID (pad->first_dts))
+ first_dts = pad->first_dts;
+
+ current_dts =
+ gst_util_uint64_scale (dts, GST_SECOND,
+ atom_trak_get_timescale (pad->trak));
+ first_qt_dts =
+ gst_util_uint64_scale (first_dts, atom_trak_get_timescale (pad->trak),
+ GST_SECOND);
+ GST_DEBUG_OBJECT (pad, "calculating base decode time with first dts %"
+ G_GINT64_FORMAT " (%" GST_TIME_FORMAT ") and current dts %"
+ G_GINT64_FORMAT " (%" GST_TIME_FORMAT ") of %" G_GINT64_FORMAT " (%"
+ GST_STIME_FORMAT ")", first_qt_dts, GST_TIME_ARGS (first_dts), dts,
+ GST_TIME_ARGS (current_dts), dts - first_qt_dts,
+ GST_STIME_ARGS (current_dts - first_dts));
+ atom_traf_set_base_decode_time (pad->traf, dts - first_qt_dts);
+ }
+
+ if (qtmux->fragment_mode == GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE) {
+ if (qtmux->fragment_sequence > 0 && !force) {
+ if (qtmux->moof_mdat_pos == 0) {
+ /* send temporary mdat */
+ qtmux->moof_mdat_pos = qtmux->header_size;
+ ret = gst_qt_mux_send_mdat_header (qtmux, &qtmux->header_size, 0,
+ TRUE, FALSE);
+ if (ret != GST_FLOW_OK)
+ goto mdat_header_send_error;
+ }
+
+ if (buf) {
+ atom_trak_add_samples (pad->trak, nsamples, (gint32) delta, size,
+ chunk_offset, sync, pts_offset);
+ atom_traf_add_samples (pad->traf, nsamples, delta, size,
+ qtmux->header_size - qtmux->moof_mdat_pos, sync, pts_offset,
+ pad->sync && sync);
+
+ ret = gst_qt_mux_send_buffer (qtmux, buf, &qtmux->header_size, TRUE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ buf = NULL;
+ }
+ }
+ } else {
+ /* add buffer and metadata */
+ atom_traf_add_samples (pad->traf, nsamples, delta, size, 0, sync,
+ pts_offset, pad->sync && sync);
+ GST_LOG_OBJECT (qtmux, "adding buffer %p to fragments", buf);
+ atom_array_append (&pad->fragment_buffers, g_steal_pointer (&buf), 256);
+ }
+ pad->fragment_duration -= delta;
+
+ if (pad->tfra) {
+ guint32 sn = atom_traf_get_sample_num (pad->traf);
+
+ if ((sync && pad->sync) || (sn == 1 && !pad->sync))
+ atom_tfra_add_entry (pad->tfra, dts, sn);
+ }
+
+ if (G_UNLIKELY (force))
+ goto flush;
+
+ return ret;
+
+moof_send_error:
+ {
+ guint i;
+
+ GST_ERROR_OBJECT (qtmux, "Failed to send moof buffer");
+ for (i = 0; i < atom_array_get_len (&pad->fragment_buffers); i++)
+ gst_buffer_unref (atom_array_index (&pad->fragment_buffers, i));
+ atom_array_clear (&pad->fragment_buffers);
+ gst_clear_buffer (&buf);
+
+ return ret;
+ }
+
+mdat_header_send_error:
+ {
+ guint i;
+
+ GST_ERROR_OBJECT (qtmux, "Failed to send mdat header");
+ for (i = 0; i < atom_array_get_len (&pad->fragment_buffers); i++)
+ gst_buffer_unref (atom_array_index (&pad->fragment_buffers, i));
+ atom_array_clear (&pad->fragment_buffers);
+ gst_clear_buffer (&buf);
+
+ return ret;
+ }
+
+fragment_buf_send_error:
+ {
+ guint i;
+
+ GST_ERROR_OBJECT (qtmux, "Failed to send fragment");
+ for (i = index + 1; i < atom_array_get_len (&pad->fragment_buffers); i++) {
+ gst_buffer_unref (atom_array_index (&pad->fragment_buffers, i));
+ }
+ atom_array_clear (&pad->fragment_buffers);
+ gst_clear_buffer (&buf);
+
+ return ret;
+ }
+}
+
+/* Here's the clever bit of robust recording: Updating the moov
+ * header is done using a ping-pong scheme inside 2 blocks of size
+ * 'reserved_moov_size' at the start of the file, in such a way that the
+ * file on-disk is always valid if interrupted.
+ * Inside the reserved space, we have 2 pairs of free + moov atoms
+ * (in that order), free-A + moov-A @ offset 0 and free-B + moov-B at
+ * at offset "reserved_moov_size".
+ *
+ * 1. Free-A has 0 size payload, moov-A immediately after is
+ * active/current, and is padded with an internal Free atom to
+ * end at reserved_space/2. Free-B is at reserved_space/2, sized
+ * to cover the remaining free space (including moov-B).
+ * 2. We write moov-B (which is invisible inside free-B), and pad it to
+ * end at the end of free space. Then, we update free-A to size
+ * reserved_space/2 + sizeof(free-B), which hides moov-A and the
+ * free-B header, and makes moov-B active.
+ * 3. Rewrite moov-A inside free-A, with padding out to free-B.
+ * Change the size of free-A to make moov-A active again.
+ * 4. Rinse and repeat.
+ *
+ */
+static GstFlowReturn
+gst_qt_mux_robust_recording_rewrite_moov (GstQTMux * qtmux)
+{
+ GstFlowReturn ret;
+ guint64 freeA_offset;
+ guint32 new_freeA_size;
+ guint64 new_moov_offset;
+
+ /* Update moov info, then seek and rewrite the MOOV atom */
+ gst_qt_mux_update_global_statistics (qtmux);
+ gst_qt_mux_configure_moov (qtmux);
+
+ gst_qt_mux_update_edit_lists (qtmux);
+
+ /* tags into file metadata */
+ gst_qt_mux_setup_metadata (qtmux);
+
+ /* chunks position is set relative to the first byte of the
+ * MDAT atom payload. Set the overall offset into the file */
+ atom_moov_chunks_set_offset (qtmux->moov, qtmux->header_size);
+
+ /* Calculate which moov to rewrite. qtmux->moov_pos points to
+ * the start of the free-A header */
+ freeA_offset = qtmux->moov_pos;
+ if (qtmux->reserved_moov_first_active) {
+ GST_DEBUG_OBJECT (qtmux, "Updating pong moov header");
+ /* After this, freeA will include itself, moovA, plus the freeB
+ * header */
+ new_freeA_size = qtmux->reserved_moov_size + 16;
+ } else {
+ GST_DEBUG_OBJECT (qtmux, "Updating ping moov header");
+ new_freeA_size = 8;
+ }
+ /* the moov we update is after free-A, calculate its offset */
+ new_moov_offset = freeA_offset + new_freeA_size;
+
+ /* Swap ping-pong cadence marker */
+ qtmux->reserved_moov_first_active = !qtmux->reserved_moov_first_active;
+
+ /* seek and rewrite the MOOV atom */
+ gst_qt_mux_seek_to (qtmux, new_moov_offset);
+
+ ret =
+ gst_qt_mux_send_moov (qtmux, NULL, qtmux->reserved_moov_size, FALSE,
+ TRUE);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* Update the estimated recording space remaining, based on amount used so
+ * far and duration muxed so far */
+ if (qtmux->last_moov_size > qtmux->base_moov_size && qtmux->last_dts > 0) {
+ GstClockTime remain;
+ GstClockTime time_muxed = qtmux->last_dts;
+
+ remain =
+ gst_util_uint64_scale (qtmux->reserved_moov_size -
+ qtmux->last_moov_size, time_muxed,
+ qtmux->last_moov_size - qtmux->base_moov_size);
+ /* Always under-estimate slightly, so users
+ * have time to stop muxing before we run out */
+ if (remain < GST_SECOND / 2)
+ remain = 0;
+ else
+ remain -= GST_SECOND / 2;
+
+ GST_INFO_OBJECT (qtmux,
+ "Reserved %u header bytes. Used %u in %" GST_TIME_FORMAT
+ ". Remaining now %u or approx %" G_GUINT64_FORMAT " ns\n",
+ qtmux->reserved_moov_size, qtmux->last_moov_size,
+ GST_TIME_ARGS (qtmux->last_dts),
+ qtmux->reserved_moov_size - qtmux->last_moov_size, remain);
+
+ GST_OBJECT_LOCK (qtmux);
+ qtmux->reserved_duration_remaining = remain;
+ qtmux->muxed_since_last_update = 0;
+ GST_DEBUG_OBJECT (qtmux, "reserved remaining duration now %"
+ G_GUINT64_FORMAT, qtmux->reserved_duration_remaining);
+ GST_OBJECT_UNLOCK (qtmux);
+ }
+
+
+ /* Now update the moov-A size. Don't pass offset, since we don't need
+ * send_free_atom() to seek for us - all our callers seek back to
+ * where they need after this, or they don't need it */
+ gst_qt_mux_seek_to (qtmux, freeA_offset);
+
+ ret = gst_qt_mux_send_free_atom (qtmux, NULL, new_freeA_size, TRUE);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_qt_mux_robust_recording_update (GstQTMux * qtmux, GstClockTime position)
+{
+ GstFlowReturn flow_ret;
+
+ guint64 mdat_offset = qtmux->mdat_pos + 16 + qtmux->mdat_size;
+
+ GST_OBJECT_LOCK (qtmux);
+
+ /* Update the offset of how much we've muxed, so the
+ * report of remaining space keeps counting down */
+ if (position > qtmux->last_moov_update &&
+ position - qtmux->last_moov_update > qtmux->muxed_since_last_update) {
+ GST_LOG_OBJECT (qtmux,
+ "Muxed time %" G_GUINT64_FORMAT " since last moov update",
+ qtmux->muxed_since_last_update);
+ qtmux->muxed_since_last_update = position - qtmux->last_moov_update;
+ }
+
+ /* Next, check if we're supposed to send periodic moov updates downstream */
+ if (qtmux->reserved_moov_update_period == GST_CLOCK_TIME_NONE) {
+ GST_OBJECT_UNLOCK (qtmux);
+ return GST_FLOW_OK;
+ }
+
+ /* Update if position is > the threshold or there's been no update yet */
+ if (qtmux->last_moov_update != GST_CLOCK_TIME_NONE &&
+ (position <= qtmux->last_moov_update ||
+ (position - qtmux->last_moov_update) <
+ qtmux->reserved_moov_update_period)) {
+ GST_OBJECT_UNLOCK (qtmux);
+ return GST_FLOW_OK; /* No update needed yet */
+ }
+
+ qtmux->last_moov_update = position;
+ GST_OBJECT_UNLOCK (qtmux);
+
+ GST_DEBUG_OBJECT (qtmux, "Update moov atom, position %" GST_TIME_FORMAT
+ " mdat starts @ %" G_GUINT64_FORMAT " we were a %" G_GUINT64_FORMAT,
+ GST_TIME_ARGS (position), qtmux->mdat_pos, mdat_offset);
+
+ flow_ret = gst_qt_mux_robust_recording_rewrite_moov (qtmux);
+ if (G_UNLIKELY (flow_ret != GST_FLOW_OK))
+ return flow_ret;
+
+ /* Seek back to previous position */
+ gst_qt_mux_seek_to (qtmux, mdat_offset);
+
+ return flow_ret;
+}
+
+static GstFlowReturn
+gst_qt_mux_register_and_push_sample (GstQTMux * qtmux, GstQTMuxPad * pad,
+ GstBuffer * buffer, gboolean is_last_buffer, guint nsamples,
+ gint64 last_dts, gint64 scaled_duration, guint sample_size,
+ guint64 chunk_offset, gboolean sync, gboolean do_pts, gint64 pts_offset)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ /* note that a new chunk is started each time (not fancy but works) */
+ if (qtmux->moov_recov_file) {
+ if (!atoms_recov_write_trak_samples (qtmux->moov_recov_file, pad->trak,
+ nsamples, (gint32) scaled_duration, sample_size, chunk_offset, sync,
+ do_pts, pts_offset)) {
+ GST_WARNING_OBJECT (qtmux, "Failed to write sample information to "
+ "recovery file, disabling recovery");
+ fclose (qtmux->moov_recov_file);
+ qtmux->moov_recov_file = NULL;
+ }
+ }
+
+ switch (qtmux->mux_mode) {
+ case GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL:{
+ const TrakBufferEntryInfo *sample_entry;
+ guint64 block_idx = prefill_get_block_index (qtmux, pad);
+
+ if (block_idx >= pad->samples->len) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Unexpected sample %" G_GUINT64_FORMAT ", expected up to %u",
+ block_idx, pad->samples->len));
+ gst_buffer_unref (buffer);
+ return GST_FLOW_ERROR;
+ }
+
+ /* Check if all values are as expected */
+ sample_entry =
+ &g_array_index (pad->samples, TrakBufferEntryInfo, block_idx);
+
+ /* Allow +/- 1 difference for the scaled_duration to allow
+ * for some rounding errors
+ */
+ if (sample_entry->nsamples != nsamples
+ || ABSDIFF (sample_entry->delta, scaled_duration) > 1
+ || sample_entry->size != sample_size
+ || sample_entry->chunk_offset != chunk_offset
+ || sample_entry->pts_offset != pts_offset
+ || sample_entry->sync != sync) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Unexpected values in sample %" G_GUINT64_FORMAT,
+ pad->sample_offset + 1));
+ GST_ERROR_OBJECT (qtmux, "Expected: samples %u, delta %u, size %u, "
+ "chunk offset %" G_GUINT64_FORMAT ", "
+ "pts offset %" G_GUINT64_FORMAT ", sync %d",
+ sample_entry->nsamples,
+ sample_entry->delta,
+ sample_entry->size,
+ sample_entry->chunk_offset,
+ sample_entry->pts_offset, sample_entry->sync);
+ GST_ERROR_OBJECT (qtmux, "Got: samples %u, delta %u, size %u, "
+ "chunk offset %" G_GUINT64_FORMAT ", "
+ "pts offset %" G_GUINT64_FORMAT ", sync %d",
+ nsamples,
+ (guint) scaled_duration,
+ sample_size, chunk_offset, pts_offset, sync);
+
+ gst_buffer_unref (buffer);
+ return GST_FLOW_ERROR;
+ }
+
+ ret = gst_qt_mux_send_buffer (qtmux, buffer, &qtmux->mdat_size, TRUE);
+ break;
+ }
+ case GST_QT_MUX_MODE_MOOV_AT_END:
+ case GST_QT_MUX_MODE_FAST_START:
+ case GST_QT_MUX_MODE_ROBUST_RECORDING:
+ atom_trak_add_samples (pad->trak, nsamples, (gint32) scaled_duration,
+ sample_size, chunk_offset, sync, pts_offset);
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_MOOV_AT_END
+ && !qtmux->downstream_seekable) {
+ qtmux->output_buffers = g_list_append (qtmux->output_buffers, buffer);
+ ret = GST_FLOW_OK;
+ } else {
+ ret = gst_qt_mux_send_buffer (qtmux, buffer, &qtmux->mdat_size, TRUE);
+ }
+ /* Check if it's time to re-write the headers in robust-recording mode */
+ if (ret == GST_FLOW_OK
+ && qtmux->mux_mode == GST_QT_MUX_MODE_ROBUST_RECORDING)
+ ret = gst_qt_mux_robust_recording_update (qtmux, pad->total_duration);
+ break;
+ case GST_QT_MUX_MODE_FRAGMENTED:
+ /* ensure that always sync samples are marked as such */
+ ret = gst_qt_mux_pad_fragment_add_buffer (qtmux, pad, buffer,
+ is_last_buffer, nsamples, last_dts, (gint32) scaled_duration,
+ sample_size, chunk_offset, !pad->sync || sync, pts_offset);
+ break;
+ }
+
+ return ret;
+}
+
+static void
+gst_qt_mux_register_buffer_in_chunk (GstQTMux * qtmux, GstQTMuxPad * pad,
+ guint buffer_size, GstClockTime duration)
+{
+ /* not that much happens here,
+ * but updating any of this very likely needs to happen all in sync,
+ * unless there is a very good reason not to */
+
+ /* for computing the avg bitrate */
+ pad->total_bytes += buffer_size;
+ pad->total_duration += duration;
+ /* for keeping track of where we are in chunk;
+ * ensures that data really is located as recorded in atoms */
+
+ qtmux->current_chunk_size += buffer_size;
+ qtmux->current_chunk_duration += duration;
+}
+
+static GstFlowReturn
+gst_qt_mux_check_and_update_timecode (GstQTMux * qtmux, GstQTMuxPad * pad,
+ GstBuffer * buf, GstFlowReturn ret)
+{
+ GstVideoTimeCodeMeta *tc_meta;
+ GstVideoTimeCode *tc;
+ GstBuffer *tc_buf;
+ gsize szret;
+ guint32 frames_since_daily_jam;
+ GstQTMuxClass *qtmux_klass = (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+
+ if (!pad->trak->is_video)
+ return ret;
+
+ if (qtmux_klass->format != GST_QT_MUX_FORMAT_QT &&
+ !qtmux->force_create_timecode_trak)
+ return ret;
+
+ if (buf == NULL || (pad->tc_trak != NULL && pad->tc_pos == -1))
+ return ret;
+
+ tc_meta = gst_buffer_get_video_time_code_meta (buf);
+ if (!tc_meta)
+ return ret;
+
+ tc = &tc_meta->tc;
+
+ /* This means we never got a timecode before */
+ if (pad->first_tc == NULL) {
+ guint64 *offset;
+
+#ifndef GST_DISABLE_GST_DEBUG
+ gchar *tc_str = gst_video_time_code_to_string (tc);
+ GST_DEBUG_OBJECT (qtmux, "Found first timecode %s", tc_str);
+ g_free (tc_str);
+#endif
+ g_assert (pad->tc_trak == NULL);
+ pad->first_tc = gst_video_time_code_copy (tc);
+
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_FRAGMENTED
+ && qtmux->fragment_sequence > 0) {
+ offset = &qtmux->header_size;
+ } else {
+ offset = &qtmux->mdat_size;
+ }
+ /* If frames are out of order, the frame we're currently getting might
+ * not be the first one. Just write a 0 timecode for now and wait
+ * until we receive a timecode that's lower than the current one */
+ if (pad->is_out_of_order) {
+ pad->first_pts = GST_BUFFER_PTS (buf);
+ frames_since_daily_jam = 0;
+ /* Position to rewrite */
+ pad->tc_pos = *offset;
+ } else {
+ frames_since_daily_jam =
+ gst_video_time_code_frames_since_daily_jam (pad->first_tc);
+ frames_since_daily_jam = GUINT32_TO_BE (frames_since_daily_jam);
+ }
+ /* Write the timecode trak now */
+ pad->tc_trak = atom_trak_new (qtmux->context);
+ atom_moov_add_trak (qtmux->moov, pad->tc_trak);
+
+ pad->trak->tref = atom_tref_new (FOURCC_tmcd);
+ atom_tref_add_entry (pad->trak->tref, pad->tc_trak->tkhd.track_ID);
+
+ atom_trak_set_timecode_type (pad->tc_trak, qtmux->context,
+ pad->trak->mdia.mdhd.time_info.timescale, pad->first_tc);
+
+ tc_buf = gst_buffer_new_allocate (NULL, 4, NULL);
+ szret = gst_buffer_fill (tc_buf, 0, &frames_since_daily_jam, 4);
+ g_assert (szret == 4);
+
+ atom_trak_add_samples (pad->tc_trak, 1, 1, 4, *offset, FALSE, 0);
+
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_MOOV_AT_END
+ && !qtmux->downstream_seekable) {
+ ret = gst_qtmux_push_mdat_stored_buffers (qtmux);
+ qtmux->output_buffers = g_list_append (qtmux->output_buffers, tc_buf);
+ ret = GST_FLOW_OK;
+ } else {
+ ret = gst_qt_mux_send_buffer (qtmux, tc_buf, offset, TRUE);
+ }
+
+ /* Need to reset the current chunk (of the previous pad) here because
+ * some other data was written now above, and the pad has to start a
+ * new chunk now */
+ qtmux->current_chunk_offset = -1;
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ } else if (qtmux->mux_mode == GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL) {
+ frames_since_daily_jam =
+ gst_video_time_code_frames_since_daily_jam (pad->first_tc);
+ frames_since_daily_jam = GUINT32_TO_BE (frames_since_daily_jam);
+
+ tc_buf = gst_buffer_new_allocate (NULL, 4, NULL);
+ szret = gst_buffer_fill (tc_buf, 0, &frames_since_daily_jam, 4);
+ g_assert (szret == 4);
+
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_MOOV_AT_END
+ && !qtmux->downstream_seekable) {
+ ret = gst_qtmux_push_mdat_stored_buffers (qtmux);
+ qtmux->output_buffers = g_list_append (qtmux->output_buffers, tc_buf);
+ ret = GST_FLOW_OK;
+ } else {
+ ret = gst_qt_mux_send_buffer (qtmux, tc_buf, &qtmux->mdat_size, TRUE);
+ }
+ pad->tc_pos = -1;
+
+ qtmux->current_chunk_offset = -1;
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ } else if (pad->is_out_of_order) {
+ /* Check for a lower timecode than the one stored */
+ g_assert (pad->tc_trak != NULL);
+ if (GST_BUFFER_DTS (buf) <= pad->first_pts) {
+ if (gst_video_time_code_compare (tc, pad->first_tc) == -1) {
+ gst_video_time_code_free (pad->first_tc);
+ pad->first_tc = gst_video_time_code_copy (tc);
+ }
+ } else {
+ guint64 bk_size = qtmux->mdat_size;
+ /* If this frame's DTS is after the first PTS received, it means
+ * we've already received the first frame to be presented. Otherwise
+ * the decoder would need to go back in time */
+ gst_qt_mux_update_timecode (qtmux, pad);
+
+ /* Reset writing position */
+ gst_qt_mux_seek_to (qtmux, bk_size);
+ }
+ }
+
+ return ret;
+}
+
+/*
+ * Here we push the buffer and update the tables in the track atoms
+ */
+static GstFlowReturn
+gst_qt_mux_add_buffer (GstQTMux * qtmux, GstQTMuxPad * pad, GstBuffer * buf)
+{
+ GstBuffer *last_buf = NULL;
+ GstClockTime duration;
+ guint nsamples, sample_size;
+ guint64 chunk_offset;
+ gint64 last_dts, scaled_duration;
+ gint64 pts_offset = 0;
+ gboolean sync = FALSE;
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint buffer_size;
+
+ /* GAP event, nothing to do */
+ if (buf && gst_buffer_get_size (buf) == 0 &&
+ GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP)) {
+ gst_buffer_unref (buf);
+ return GST_FLOW_OK;
+ }
+
+ if (!pad->fourcc)
+ goto not_negotiated;
+
+ /* if this pad has a prepare function, call it */
+ if (pad->prepare_buf_func != NULL) {
+ GstBuffer *new_buf;
+
+ new_buf = pad->prepare_buf_func (pad, buf, qtmux);
+ if (buf && !new_buf)
+ return GST_FLOW_OK;
+ buf = new_buf;
+ }
+
+ ret = gst_qt_mux_check_and_update_timecode (qtmux, pad, buf, ret);
+ if (ret != GST_FLOW_OK) {
+ if (buf)
+ gst_buffer_unref (buf);
+ return ret;
+ }
+
+ last_buf = pad->last_buf;
+ pad->last_buf = buf;
+
+ if (last_buf == NULL) {
+#ifndef GST_DISABLE_GST_DEBUG
+ if (buf == NULL) {
+ GST_DEBUG_OBJECT (qtmux, "Pad %s has no previous buffer stored and "
+ "received NULL buffer, doing nothing", GST_PAD_NAME (pad));
+ } else {
+ GST_LOG_OBJECT (qtmux,
+ "Pad %s has no previous buffer stored, storing now",
+ GST_PAD_NAME (pad));
+ }
+#endif
+ goto exit;
+ }
+
+ if (!GST_BUFFER_PTS_IS_VALID (last_buf))
+ goto no_pts;
+
+ /* if this is the first buffer, store the timestamp */
+ if (G_UNLIKELY (pad->first_ts == GST_CLOCK_TIME_NONE)) {
+ if (GST_BUFFER_PTS_IS_VALID (last_buf)) {
+ pad->first_ts = GST_BUFFER_PTS (last_buf);
+ } else if (GST_BUFFER_DTS_IS_VALID (last_buf)) {
+ pad->first_ts = GST_BUFFER_DTS (last_buf);
+ }
+
+ if (GST_BUFFER_DTS_IS_VALID (last_buf)) {
+ pad->first_dts = pad->last_dts = GST_BUFFER_DTS (last_buf);
+ } else if (GST_BUFFER_PTS_IS_VALID (last_buf)) {
+ pad->first_dts = pad->last_dts = GST_BUFFER_PTS (last_buf);
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (pad->first_ts)) {
+ GST_DEBUG ("setting first_ts to %" G_GUINT64_FORMAT, pad->first_ts);
+ } else {
+ GST_WARNING_OBJECT (qtmux, "First buffer for pad %s has no timestamp, "
+ "using 0 as first timestamp", GST_PAD_NAME (pad));
+ pad->first_ts = pad->first_dts = 0;
+ }
+ GST_DEBUG_OBJECT (qtmux, "Stored first timestamp for pad %s %"
+ GST_TIME_FORMAT, GST_PAD_NAME (pad), GST_TIME_ARGS (pad->first_ts));
+ }
+
+ if (buf && GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DTS (buf)) &&
+ GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DTS (last_buf)) &&
+ GST_BUFFER_DTS (buf) < GST_BUFFER_DTS (last_buf)) {
+ GST_ERROR ("decreasing DTS value %" GST_TIME_FORMAT " < %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (GST_BUFFER_DTS (buf)),
+ GST_TIME_ARGS (GST_BUFFER_DTS (last_buf)));
+ pad->last_buf = buf = gst_buffer_make_writable (buf);
+ GST_BUFFER_DTS (buf) = GST_BUFFER_DTS (last_buf);
+ }
+
+ buffer_size = gst_buffer_get_size (last_buf);
+
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL) {
+ guint required_buffer_size = prefill_get_sample_size (qtmux, pad);
+ guint fill_size = required_buffer_size - buffer_size;
+ GstMemory *mem;
+ GstMapInfo map;
+
+ if (required_buffer_size < buffer_size) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Sample size %u bigger than expected maximum %u", buffer_size,
+ required_buffer_size));
+ goto bail;
+ }
+
+ if (fill_size > 0) {
+ GST_DEBUG_OBJECT (qtmux,
+ "Padding buffer by %u bytes to reach required %u bytes", fill_size,
+ required_buffer_size);
+ mem = gst_allocator_alloc (NULL, fill_size, NULL);
+ gst_memory_map (mem, &map, GST_MAP_WRITE);
+ memset (map.data, 0, map.size);
+ gst_memory_unmap (mem, &map);
+ last_buf = gst_buffer_make_writable (last_buf);
+ gst_buffer_append_memory (last_buf, mem);
+ buffer_size = required_buffer_size;
+ }
+ }
+
+ /* duration actually means time delta between samples, so we calculate
+ * the duration based on the difference in DTS or PTS, falling back
+ * to DURATION if the other two don't exist, such as with the last
+ * sample before EOS. Or use 0 if nothing else is available,
+ * making sure that duration doesn't go negative and wraparound. */
+ if (GST_BUFFER_DURATION_IS_VALID (last_buf))
+ duration = GST_BUFFER_DURATION (last_buf);
+ else
+ duration = 0;
+ if (!pad->sparse) {
+ if (buf && GST_BUFFER_DTS_IS_VALID (buf)
+ && GST_BUFFER_DTS_IS_VALID (last_buf)) {
+ if (GST_BUFFER_DTS (buf) >= GST_BUFFER_DTS (last_buf))
+ duration = GST_BUFFER_DTS (buf) - GST_BUFFER_DTS (last_buf);
+ } else if (buf && GST_BUFFER_PTS_IS_VALID (buf)
+ && GST_BUFFER_PTS_IS_VALID (last_buf)) {
+ if (GST_BUFFER_PTS (buf) >= GST_BUFFER_PTS (last_buf))
+ duration = GST_BUFFER_PTS (buf) - GST_BUFFER_PTS (last_buf);
+ }
+ if (duration == 0 && !pad->warned_empty_duration) {
+ GST_WARNING_OBJECT (qtmux,
+ "Sample with zero duration on pad %" GST_PTR_FORMAT
+ " due to missing or backward timestamps on the input stream", pad);
+ pad->warned_empty_duration = TRUE;
+ }
+ }
+
+ if (qtmux->current_pad != pad || qtmux->current_chunk_offset == -1) {
+ GST_DEBUG_OBJECT (qtmux,
+ "Switching to next chunk for pad %s:%s: offset %" G_GUINT64_FORMAT
+ ", size %" G_GUINT64_FORMAT ", duration %" GST_TIME_FORMAT,
+ GST_DEBUG_PAD_NAME (pad), qtmux->current_chunk_offset,
+ qtmux->current_chunk_size,
+ GST_TIME_ARGS (qtmux->current_chunk_duration));
+ qtmux->current_pad = pad;
+ if (qtmux->current_chunk_offset == -1)
+ qtmux->current_chunk_offset = qtmux->mdat_size;
+ else
+ qtmux->current_chunk_offset += qtmux->current_chunk_size;
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ }
+
+ last_dts = gst_util_uint64_scale_round (pad->last_dts,
+ atom_trak_get_timescale (pad->trak), GST_SECOND);
+
+ /* fragments only deal with 1 buffer == 1 chunk (== 1 sample) */
+ if (pad->sample_size && (qtmux->mux_mode != GST_QT_MUX_MODE_FRAGMENTED
+ || qtmux->fragment_mode ==
+ GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE)) {
+ GstClockTime expected_timestamp;
+
+ /* Constant size packets: usually raw audio (with many samples per
+ buffer (= chunk)), but can also be fixed-packet-size codecs like ADPCM
+ */
+ sample_size = pad->sample_size;
+ if (buffer_size % sample_size != 0)
+ goto fragmented_sample;
+
+ /* note: qt raw audio storage warps it implicitly into a timewise
+ * perfect stream, discarding buffer times.
+ * If the difference between the current PTS and the expected one
+ * becomes too big, we error out: there was a gap and we have no way to
+ * represent that, causing A/V sync to be off */
+ expected_timestamp =
+ gst_util_uint64_scale (pad->sample_offset, GST_SECOND,
+ atom_trak_get_timescale (pad->trak)) + pad->first_ts;
+ if (ABSDIFF (GST_BUFFER_DTS_OR_PTS (last_buf),
+ expected_timestamp) > qtmux->max_raw_audio_drift)
+ goto raw_audio_timestamp_drift;
+
+ if (GST_BUFFER_DURATION (last_buf) != GST_CLOCK_TIME_NONE) {
+ nsamples = gst_util_uint64_scale_round (GST_BUFFER_DURATION (last_buf),
+ atom_trak_get_timescale (pad->trak), GST_SECOND);
+ duration = GST_BUFFER_DURATION (last_buf);
+ } else {
+ nsamples = buffer_size / sample_size;
+ duration =
+ gst_util_uint64_scale_round (nsamples, GST_SECOND,
+ atom_trak_get_timescale (pad->trak));
+ }
+
+ /* timescale = samplerate */
+ scaled_duration = 1;
+ pad->last_dts =
+ pad->first_dts + gst_util_uint64_scale_round (pad->sample_offset +
+ nsamples, GST_SECOND, atom_trak_get_timescale (pad->trak));
+ } else {
+ nsamples = 1;
+ sample_size = buffer_size;
+ if (!pad->sparse && ((buf && GST_BUFFER_DTS_IS_VALID (buf))
+ || GST_BUFFER_DTS_IS_VALID (last_buf))) {
+ gint64 scaled_dts;
+ if (buf && GST_BUFFER_DTS_IS_VALID (buf)) {
+ pad->last_dts = GST_BUFFER_DTS (buf);
+ } else {
+ pad->last_dts = GST_BUFFER_DTS (last_buf) + duration;
+ }
+ if ((gint64) (pad->last_dts) < 0) {
+ scaled_dts = -gst_util_uint64_scale_round (-pad->last_dts,
+ atom_trak_get_timescale (pad->trak), GST_SECOND);
+ } else {
+ scaled_dts = gst_util_uint64_scale_round (pad->last_dts,
+ atom_trak_get_timescale (pad->trak), GST_SECOND);
+ }
+ scaled_duration = scaled_dts - last_dts;
+ last_dts = scaled_dts;
+ } else {
+ /* first convert intended timestamp (in GstClockTime resolution) to
+ * trak timescale, then derive delta;
+ * this ensures sums of (scale)delta add up to converted timestamp,
+ * which only deviates at most 1/scale from timestamp itself */
+ scaled_duration = gst_util_uint64_scale_round (pad->last_dts + duration,
+ atom_trak_get_timescale (pad->trak), GST_SECOND) - last_dts;
+ pad->last_dts += duration;
+ }
+ }
+
+ gst_qt_mux_register_buffer_in_chunk (qtmux, pad, buffer_size, duration);
+
+ chunk_offset = qtmux->current_chunk_offset;
+
+ GST_LOG_OBJECT (qtmux,
+ "Pad (%s) dts updated to %" GST_TIME_FORMAT,
+ GST_PAD_NAME (pad), GST_TIME_ARGS (pad->last_dts));
+ GST_LOG_OBJECT (qtmux,
+ "Adding %d samples to track, duration: %" G_GUINT64_FORMAT
+ " size: %" G_GUINT32_FORMAT " chunk offset: %" G_GUINT64_FORMAT,
+ nsamples, scaled_duration, sample_size, chunk_offset);
+
+ /* might be a sync sample */
+ if (pad->sync &&
+ !GST_BUFFER_FLAG_IS_SET (last_buf, GST_BUFFER_FLAG_DELTA_UNIT)) {
+ GST_LOG_OBJECT (qtmux, "Adding new sync sample entry for track of pad %s",
+ GST_PAD_NAME (pad));
+ sync = TRUE;
+ }
+
+ if (GST_BUFFER_DTS_IS_VALID (last_buf)) {
+ last_dts = gst_util_uint64_scale_round (GST_BUFFER_DTS (last_buf),
+ atom_trak_get_timescale (pad->trak), GST_SECOND);
+ pts_offset =
+ (gint64) (gst_util_uint64_scale_round (GST_BUFFER_PTS (last_buf),
+ atom_trak_get_timescale (pad->trak), GST_SECOND) - last_dts);
+ } else {
+ pts_offset = 0;
+ last_dts = gst_util_uint64_scale_round (GST_BUFFER_PTS (last_buf),
+ atom_trak_get_timescale (pad->trak), GST_SECOND);
+ }
+ GST_DEBUG ("dts: %" GST_TIME_FORMAT " pts: %" GST_TIME_FORMAT
+ " timebase_dts: %d pts_offset: %d",
+ GST_TIME_ARGS (GST_BUFFER_DTS (last_buf)),
+ GST_TIME_ARGS (GST_BUFFER_PTS (last_buf)),
+ (int) (last_dts), (int) (pts_offset));
+
+ if (GST_CLOCK_TIME_IS_VALID (duration)
+ && (qtmux->current_chunk_duration > qtmux->longest_chunk
+ || !GST_CLOCK_TIME_IS_VALID (qtmux->longest_chunk))) {
+ GST_DEBUG_OBJECT (qtmux,
+ "New longest chunk found: %" GST_TIME_FORMAT ", pad %s",
+ GST_TIME_ARGS (qtmux->current_chunk_duration), GST_PAD_NAME (pad));
+ qtmux->longest_chunk = qtmux->current_chunk_duration;
+ }
+
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL) {
+ const TrakBufferEntryInfo *sample_entry;
+ guint64 block_idx = prefill_get_block_index (qtmux, pad);
+
+ if (block_idx >= pad->samples->len) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Unexpected sample %" G_GUINT64_FORMAT ", expected up to %u",
+ block_idx, pad->samples->len));
+ goto bail;
+ }
+
+ /* Check if all values are as expected */
+ sample_entry =
+ &g_array_index (pad->samples, TrakBufferEntryInfo, block_idx);
+
+ if (chunk_offset < sample_entry->chunk_offset) {
+ guint fill_size = sample_entry->chunk_offset - chunk_offset;
+ GstBuffer *fill_buf;
+
+ fill_buf = gst_buffer_new_allocate (NULL, fill_size, NULL);
+ gst_buffer_memset (fill_buf, 0, 0, fill_size);
+
+ ret = gst_qt_mux_send_buffer (qtmux, fill_buf, &qtmux->mdat_size, TRUE);
+ if (ret != GST_FLOW_OK)
+ goto bail;
+ qtmux->current_chunk_offset = chunk_offset = sample_entry->chunk_offset;
+ qtmux->current_chunk_size = buffer_size;
+ qtmux->current_chunk_duration = duration;
+ } else if (chunk_offset != sample_entry->chunk_offset) {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Unexpected chunk offset %" G_GUINT64_FORMAT ", expected up to %"
+ G_GUINT64_FORMAT, chunk_offset, sample_entry->chunk_offset));
+ goto bail;
+ }
+ }
+
+ /* now we go and register this buffer/sample all over */
+ pad->flow_status = gst_qt_mux_register_and_push_sample (qtmux, pad, last_buf,
+ buf == NULL, nsamples, last_dts, scaled_duration, sample_size,
+ chunk_offset, sync, TRUE, pts_offset);
+ if (pad->flow_status != GST_FLOW_OK)
+ goto sample_error;
+
+ pad->sample_offset += nsamples;
+
+ /* if this is sparse and we have a next buffer, check if there is any gap
+ * between them to insert an empty sample */
+ if (pad->sparse && buf) {
+ if (pad->create_empty_buffer) {
+ GstBuffer *empty_buf;
+ gint64 empty_duration =
+ GST_BUFFER_PTS (buf) - (GST_BUFFER_PTS (last_buf) + duration);
+ gint64 empty_duration_scaled;
+ guint empty_size;
+
+ empty_buf = pad->create_empty_buffer (pad, empty_duration);
+
+ pad->last_dts = GST_BUFFER_PTS (buf);
+ empty_duration_scaled = gst_util_uint64_scale_round (pad->last_dts,
+ atom_trak_get_timescale (pad->trak), GST_SECOND)
+ - (last_dts + scaled_duration);
+ empty_size = gst_buffer_get_size (empty_buf);
+
+ gst_qt_mux_register_buffer_in_chunk (qtmux, pad, empty_size,
+ empty_duration);
+
+ ret =
+ gst_qt_mux_register_and_push_sample (qtmux, pad, empty_buf, FALSE, 1,
+ last_dts + scaled_duration, empty_duration_scaled,
+ empty_size, chunk_offset, sync, TRUE, 0);
+ } else if (pad->fourcc != FOURCC_c608 && pad->fourcc != FOURCC_c708) {
+ /* This assert is kept here to make sure implementors of new
+ * sparse input format decide whether there needs to be special
+ * gap handling or not */
+ g_assert_not_reached ();
+ GST_WARNING_OBJECT (qtmux,
+ "no empty buffer creation function found for pad %s",
+ GST_PAD_NAME (pad));
+ }
+ }
+
+exit:
+
+ return ret;
+
+ /* ERRORS */
+bail:
+ {
+ gst_buffer_unref (last_buf);
+ return GST_FLOW_ERROR;
+ }
+fragmented_sample:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Audio buffer contains fragmented sample."));
+ goto bail;
+ }
+raw_audio_timestamp_drift:
+ {
+ /* TODO: Could in theory be implemented with edit lists */
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL),
+ ("Audio stream timestamps are drifting (got %" GST_TIME_FORMAT
+ ", expected %" GST_TIME_FORMAT "). This is not supported yet!",
+ GST_TIME_ARGS (GST_BUFFER_DTS_OR_PTS (last_buf)),
+ GST_TIME_ARGS (gst_util_uint64_scale (pad->sample_offset,
+ GST_SECOND,
+ atom_trak_get_timescale (pad->trak)) + pad->first_ts)));
+ goto bail;
+ }
+no_pts:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL), ("Buffer has no PTS."));
+ goto bail;
+ }
+not_negotiated:
+ {
+ GST_ELEMENT_ERROR (qtmux, CORE, NEGOTIATION, (NULL),
+ ("format wasn't negotiated before buffer flow on pad %s",
+ GST_PAD_NAME (pad)));
+ if (buf)
+ gst_buffer_unref (buf);
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+sample_error:
+ {
+ GST_ELEMENT_ERROR (qtmux, STREAM, MUX, (NULL), ("Failed to push sample."));
+ return pad->flow_status;
+ }
+}
+
+/*
+ * DTS running time can be negative. There is no way to represent that in
+ * MP4 however, thus we need to offset DTS so that it starts from 0.
+ */
+static void
+gst_qt_pad_adjust_buffer_dts (GstQTMux * qtmux, GstQTMuxPad * pad,
+ GstBuffer ** buf)
+{
+ GstClockTime pts;
+ gint64 dts;
+
+ pts = GST_BUFFER_PTS (*buf);
+ dts = pad->dts;
+
+ GST_LOG_OBJECT (qtmux, "selected pad %s with PTS %" GST_TIME_FORMAT
+ " and DTS %" GST_STIME_FORMAT, GST_PAD_NAME (pad),
+ GST_TIME_ARGS (pts), GST_STIME_ARGS (dts));
+
+ if (!GST_CLOCK_TIME_IS_VALID (pad->dts_adjustment)) {
+ if (GST_CLOCK_STIME_IS_VALID (dts) && dts < 0)
+ pad->dts_adjustment = -dts;
+ else
+ pad->dts_adjustment = 0;
+ }
+
+ if (pad->dts_adjustment > 0) {
+ *buf = gst_buffer_make_writable (*buf);
+
+ dts += pad->dts_adjustment;
+
+ if (GST_CLOCK_TIME_IS_VALID (pts))
+ pts += pad->dts_adjustment;
+
+ if (GST_CLOCK_STIME_IS_VALID (dts) && dts < 0) {
+ GST_WARNING_OBJECT (pad, "Decreasing DTS.");
+ dts = 0;
+ }
+
+ if (pts < dts) {
+ GST_WARNING_OBJECT (pad, "DTS is bigger then PTS");
+ pts = dts;
+ }
+
+ GST_BUFFER_PTS (*buf) = pts;
+ GST_BUFFER_DTS (*buf) = dts;
+
+ GST_LOG_OBJECT (qtmux, "time adjusted to PTS %" GST_TIME_FORMAT
+ " and DTS %" GST_TIME_FORMAT, GST_TIME_ARGS (pts), GST_TIME_ARGS (dts));
+ }
+}
+
+static GstQTMuxPad *
+find_best_pad (GstQTMux * qtmux)
+{
+ GList *l;
+ GstQTMuxPad *best_pad = NULL;
+
+ if (qtmux->mux_mode == GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL) {
+ guint64 smallest_offset = G_MAXUINT64;
+ guint64 chunk_offset = 0;
+
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+ const TrakBufferEntryInfo *sample_entry;
+ guint64 block_idx, current_block_idx;
+ guint64 chunk_offset_offset = 0;
+ GstBuffer *tmp_buf =
+ gst_aggregator_pad_peek_buffer (GST_AGGREGATOR_PAD (qtpad));
+
+ /* Check for EOS pads and just skip them */
+ if (!tmp_buf && !qtpad->last_buf && (!qtpad->raw_audio_adapter
+ || gst_adapter_available (qtpad->raw_audio_adapter) == 0))
+ continue;
+ if (tmp_buf)
+ gst_buffer_unref (tmp_buf);
+
+ /* Find the exact offset where the next sample of this track is supposed
+ * to be written at */
+ block_idx = current_block_idx = prefill_get_block_index (qtmux, qtpad);
+ if (!qtpad->samples || block_idx >= qtpad->samples->len) {
+ GST_ELEMENT_ERROR (qtmux, RESOURCE, SETTINGS,
+ ("Failed to create samples in prefill mode"), (NULL));
+ return NULL;
+ }
+
+ sample_entry =
+ &g_array_index (qtpad->samples, TrakBufferEntryInfo, block_idx);
+ while (block_idx > 0) {
+ const TrakBufferEntryInfo *tmp =
+ &g_array_index (qtpad->samples, TrakBufferEntryInfo, block_idx - 1);
+
+ if (tmp->chunk_offset != sample_entry->chunk_offset)
+ break;
+ chunk_offset_offset += tmp->size * tmp->nsamples;
+ block_idx--;
+ }
+
+ /* Except for the previously selected pad being EOS we always have
+ * qtmux->current_chunk_offset + qtmux->current_chunk_size
+ * ==
+ * sample_entry->chunk_offset + chunk_offset_offset
+ * for the best pad. Instead of checking that, we just return the
+ * pad that has the smallest offset for the next to-be-written sample.
+ */
+ if (sample_entry->chunk_offset + chunk_offset_offset < smallest_offset) {
+ smallest_offset = sample_entry->chunk_offset + chunk_offset_offset;
+ best_pad = qtpad;
+ chunk_offset = sample_entry->chunk_offset;
+ }
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ if (chunk_offset != qtmux->current_chunk_offset) {
+ qtmux->current_pad = NULL;
+ }
+
+ return best_pad;
+ }
+
+ if (qtmux->current_pad && (qtmux->interleave_bytes != 0
+ || qtmux->interleave_time != 0) && (qtmux->interleave_bytes == 0
+ || qtmux->current_chunk_size <= qtmux->interleave_bytes)
+ && (qtmux->interleave_time == 0
+ || qtmux->current_chunk_duration <= qtmux->interleave_time)
+ && qtmux->mux_mode != GST_QT_MUX_MODE_FRAGMENTED) {
+ GstBuffer *tmp_buf =
+ gst_aggregator_pad_peek_buffer (GST_AGGREGATOR_PAD
+ (qtmux->current_pad));
+
+ if (tmp_buf || qtmux->current_pad->last_buf) {
+ best_pad = qtmux->current_pad;
+ if (tmp_buf)
+ gst_buffer_unref (tmp_buf);
+ GST_DEBUG_OBJECT (qtmux, "Reusing pad %s:%s",
+ GST_DEBUG_PAD_NAME (best_pad));
+ }
+ } else {
+ gboolean push_stored = FALSE;
+
+ GST_OBJECT_LOCK (qtmux);
+ if ((GST_ELEMENT (qtmux)->sinkpads && GST_ELEMENT (qtmux)->sinkpads->next)
+ || qtmux->force_chunks) {
+ /* Only switch pads if we have more than one, otherwise
+ * we can just put everything into a single chunk and save
+ * a few bytes of offsets.
+ *
+ * Various applications and the Apple ProRes spec require chunking even
+ * in case of single stream files.
+ */
+ if (qtmux->current_pad)
+ GST_DEBUG_OBJECT (qtmux, "Switching from pad %s:%s",
+ GST_DEBUG_PAD_NAME (qtmux->current_pad));
+ best_pad = qtmux->current_pad = NULL;
+ push_stored = TRUE;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ if (push_stored)
+ gst_qtmux_push_mdat_stored_buffers (qtmux);
+ }
+
+ if (!best_pad) {
+ GstClockTime best_time = GST_CLOCK_TIME_NONE;
+
+ GST_OBJECT_LOCK (qtmux);
+ for (l = GST_ELEMENT_CAST (qtmux)->sinkpads; l; l = l->next) {
+ GstQTMuxPad *qtpad = (GstQTMuxPad *) l->data;
+ GstBuffer *tmp_buf;
+ GstClockTime timestamp;
+
+ tmp_buf = gst_aggregator_pad_peek_buffer (GST_AGGREGATOR_PAD (qtpad));;
+ if (!tmp_buf) {
+ /* This one is newly EOS now, finish it for real */
+ if (qtpad->last_buf) {
+ timestamp = GST_BUFFER_DTS_OR_PTS (qtpad->last_buf);
+ } else {
+ continue;
+ }
+ } else {
+ if (qtpad->last_buf)
+ timestamp = GST_BUFFER_DTS_OR_PTS (qtpad->last_buf);
+ else
+ timestamp = GST_BUFFER_DTS_OR_PTS (tmp_buf);
+ }
+
+ if (best_pad == NULL ||
+ !GST_CLOCK_TIME_IS_VALID (best_time) || timestamp < best_time) {
+ best_pad = qtpad;
+ best_time = timestamp;
+ }
+
+ if (tmp_buf)
+ gst_buffer_unref (tmp_buf);
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ if (best_pad) {
+ GST_DEBUG_OBJECT (qtmux, "Choosing pad %s:%s",
+ GST_DEBUG_PAD_NAME (best_pad));
+ } else {
+ GST_DEBUG_OBJECT (qtmux, "No best pad: EOS");
+ }
+ }
+
+ return best_pad;
+}
+
+static gboolean
+gst_qt_mux_are_all_pads_eos (GstQTMux * mux)
+{
+ GList *l;
+ gboolean ret = TRUE;
+
+ GST_OBJECT_LOCK (mux);
+ for (l = GST_ELEMENT_CAST (mux)->sinkpads; l; l = l->next) {
+ if (!gst_aggregator_pad_is_eos (GST_AGGREGATOR_PAD (l->data))) {
+ ret = FALSE;
+ break;
+ }
+ }
+ GST_OBJECT_UNLOCK (mux);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_qt_mux_aggregate (GstAggregator * agg, gboolean timeout)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstQTMux *qtmux = GST_QT_MUX_CAST (agg);
+ GstQTMuxPad *best_pad = NULL;
+
+ if (G_UNLIKELY (qtmux->state == GST_QT_MUX_STATE_STARTED)) {
+ if ((ret = gst_qt_mux_start_file (qtmux)) != GST_FLOW_OK)
+ return ret;
+
+ qtmux->state = GST_QT_MUX_STATE_DATA;
+ }
+
+ if (G_UNLIKELY (qtmux->state == GST_QT_MUX_STATE_EOS))
+ return GST_FLOW_EOS;
+
+ best_pad = find_best_pad (qtmux);
+
+ /* clipping already converted to running time */
+ if (best_pad != NULL) {
+ GstBuffer *buf = NULL;
+
+ /* FIXME: the function should always return flow_status information, that
+ * is supposed to be stored each time buffers (collected from the pads)
+ * are pushed. */
+ if (best_pad->flow_status != GST_FLOW_OK)
+ return best_pad->flow_status;
+
+ if (qtmux->mux_mode != GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL ||
+ best_pad->raw_audio_adapter == NULL ||
+ best_pad->raw_audio_adapter_pts == GST_CLOCK_TIME_NONE)
+ buf = gst_aggregator_pad_pop_buffer (GST_AGGREGATOR_PAD (best_pad));
+
+ g_assert (buf || best_pad->last_buf || (best_pad->raw_audio_adapter
+ && gst_adapter_available (best_pad->raw_audio_adapter) > 0));
+
+ if (buf)
+ gst_qt_pad_adjust_buffer_dts (qtmux, best_pad, &buf);
+
+ ret = gst_qt_mux_add_buffer (qtmux, best_pad, buf);
+ } else if (gst_qt_mux_are_all_pads_eos (qtmux)) {
+
+ qtmux->state = GST_QT_MUX_STATE_EOS;
+ ret = gst_qt_mux_stop_file (qtmux);
+ if (ret == GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (qtmux, "We are eos");
+ ret = GST_FLOW_EOS;
+ } else {
+ GST_WARNING_OBJECT (qtmux, "Failed to stop file: %s",
+ gst_flow_get_name (ret));
+ }
+ }
+
+ return ret;
+}
+
+static gboolean
+check_field (GQuark field_id, const GValue * value, gpointer user_data)
+{
+ GstStructure *structure = (GstStructure *) user_data;
+ const GValue *other = gst_structure_id_get_value (structure, field_id);
+ const gchar *name = gst_structure_get_name (structure);
+
+ if (g_str_has_prefix (name, "video/")) {
+ /* ignore framerate with video caps */
+ if (g_strcmp0 (g_quark_to_string (field_id), "framerate") == 0)
+ return TRUE;
+ }
+
+ if (g_strcmp0 (name, "video/x-h264") == 0 ||
+ g_strcmp0 (name, "video/x-h265") == 0) {
+ /* We support muxing multiple codec_data structures, and the new SPS
+ * will contain updated tier / level / profiles, which means we do
+ * not need to fail renegotiation when those change.
+ */
+ if (g_strcmp0 (g_quark_to_string (field_id), "codec_data") == 0) {
+ return TRUE;
+ } else if (g_strcmp0 (g_quark_to_string (field_id), "tier") == 0) {
+ return TRUE;
+ } else if (g_strcmp0 (g_quark_to_string (field_id), "level") == 0) {
+ return TRUE;
+ } else if (g_strcmp0 (g_quark_to_string (field_id), "profile") == 0) {
+ return TRUE;
+ } else if (g_strcmp0 (g_quark_to_string (field_id), "chroma-format") == 0) {
+ return TRUE;
+ } else if (g_strcmp0 (g_quark_to_string (field_id), "bit-depth-luma") == 0) {
+ return TRUE;
+ } else if (g_strcmp0 (g_quark_to_string (field_id),
+ "bit-depth-chroma") == 0) {
+ return TRUE;
+ } else if (g_strcmp0 (g_quark_to_string (field_id), "colorimetry") == 0) {
+ return TRUE;
+ } else if (g_strcmp0 (g_quark_to_string (field_id), "width") == 0) {
+ /* TODO: this may require a separate track but gst, vlc, ffmpeg and
+ * browsers work with this so... */
+ return TRUE;
+ } else if (g_strcmp0 (g_quark_to_string (field_id), "height") == 0) {
+ return TRUE;
+ }
+ }
+
+ if (other == NULL)
+ return FALSE;
+ return gst_value_compare (value, other) == GST_VALUE_EQUAL;
+}
+
+static gboolean
+gst_qtmux_caps_is_subset_full (GstQTMux * qtmux, GstCaps * subset,
+ GstCaps * superset)
+{
+ GstStructure *sub_s = gst_caps_get_structure (subset, 0);
+ GstStructure *sup_s = gst_caps_get_structure (superset, 0);
+
+ if (!gst_structure_has_name (sup_s, gst_structure_get_name (sub_s)))
+ return FALSE;
+
+ return gst_structure_foreach (sub_s, check_field, sup_s);
+}
+
+/* will unref @qtmux */
+static gboolean
+gst_qt_mux_can_renegotiate (GstQTMux * qtmux, GstPad * pad, GstCaps * caps)
+{
+ GstQTMuxPad *qtmuxpad = GST_QT_MUX_PAD_CAST (pad);
+
+ /* does not go well to renegotiate stream mid-way, unless
+ * the old caps are a subset of the new one (this means upstream
+ * added more info to the caps, as both should be 'fixed' caps) */
+
+ if (!qtmuxpad->configured_caps) {
+ GST_DEBUG_OBJECT (qtmux, "pad %s accepted caps %" GST_PTR_FORMAT,
+ GST_PAD_NAME (pad), caps);
+ return TRUE;
+ }
+
+ g_assert (caps != NULL);
+
+ if (!gst_qtmux_caps_is_subset_full (qtmux, qtmuxpad->configured_caps, caps)) {
+ GST_WARNING_OBJECT (qtmux,
+ "pad %s refused renegotiation to %" GST_PTR_FORMAT " from %"
+ GST_PTR_FORMAT, GST_PAD_NAME (pad), caps, qtmuxpad->configured_caps);
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (qtmux,
+ "pad %s accepted renegotiation to %" GST_PTR_FORMAT " from %"
+ GST_PTR_FORMAT, GST_PAD_NAME (pad), caps, qtmuxpad->configured_caps);
+
+ return TRUE;
+}
+
+static gboolean
+gst_qt_mux_audio_sink_set_caps (GstQTMuxPad * qtpad, GstCaps * caps)
+{
+ GstPad *pad = GST_PAD (qtpad);
+ GstQTMux *qtmux = GST_QT_MUX_CAST (gst_pad_get_parent (pad));
+ GstQTMuxClass *qtmux_klass = (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+ GstStructure *structure;
+ const gchar *mimetype;
+ gint rate, channels;
+ const GValue *value = NULL;
+ const GstBuffer *codec_data = NULL;
+ GstQTMuxFormat format;
+ AudioSampleEntry entry = { 0, };
+ AtomInfo *ext_atom = NULL;
+ gint constant_size = 0;
+ const gchar *stream_format;
+ guint32 timescale;
+
+ GST_DEBUG_OBJECT (qtmux, "%s:%s, caps=%" GST_PTR_FORMAT,
+ GST_DEBUG_PAD_NAME (pad), caps);
+
+ qtpad->prepare_buf_func = NULL;
+
+ format = qtmux_klass->format;
+ structure = gst_caps_get_structure (caps, 0);
+ mimetype = gst_structure_get_name (structure);
+
+ /* common info */
+ if (!gst_structure_get_int (structure, "channels", &channels) ||
+ !gst_structure_get_int (structure, "rate", &rate)) {
+ goto refuse_caps;
+ }
+
+ /* optional */
+ value = gst_structure_get_value (structure, "codec_data");
+ if (value != NULL)
+ codec_data = gst_value_get_buffer (value);
+
+ qtpad->is_out_of_order = FALSE;
+
+ /* set common properties */
+ entry.sample_rate = rate;
+ entry.channels = channels;
+ /* default */
+ entry.sample_size = 16;
+ /* this is the typical compressed case */
+ if (format == GST_QT_MUX_FORMAT_QT) {
+ entry.version = 1;
+ entry.compression_id = -2;
+ }
+
+ /* now map onto a fourcc, and some extra properties */
+ if (strcmp (mimetype, "audio/mpeg") == 0) {
+ gint mpegversion = 0, mpegaudioversion = 0;
+ gint layer = -1;
+
+ gst_structure_get_int (structure, "mpegversion", &mpegversion);
+ switch (mpegversion) {
+ case 1:
+ gst_structure_get_int (structure, "layer", &layer);
+ gst_structure_get_int (structure, "mpegaudioversion",
+ &mpegaudioversion);
+
+ /* mp1/2/3 */
+ /* note: QuickTime player does not like mp3 either way in iso/mp4 */
+ if (format == GST_QT_MUX_FORMAT_QT)
+ entry.fourcc = FOURCC__mp3;
+ else {
+ entry.fourcc = FOURCC_mp4a;
+ ext_atom =
+ build_esds_extension (qtpad->trak, ESDS_OBJECT_TYPE_MPEG1_P3,
+ ESDS_STREAM_TYPE_AUDIO, codec_data, qtpad->avg_bitrate,
+ qtpad->max_bitrate);
+ }
+ if (layer == 1) {
+ g_warn_if_fail (format == GST_QT_MUX_FORMAT_MP4
+ || format == GST_QT_MUX_FORMAT_QT);
+ entry.samples_per_packet = 384;
+ } else if (layer == 2) {
+ g_warn_if_fail (format == GST_QT_MUX_FORMAT_MP4
+ || format == GST_QT_MUX_FORMAT_QT);
+ entry.samples_per_packet = 1152;
+ } else {
+ g_warn_if_fail (layer == 3);
+ entry.samples_per_packet = (mpegaudioversion <= 1) ? 1152 : 576;
+ }
+ entry.bytes_per_sample = 2;
+ break;
+ case 4:
+
+ /* check stream-format */
+ stream_format = gst_structure_get_string (structure, "stream-format");
+ if (stream_format) {
+ if (strcmp (stream_format, "raw") != 0) {
+ GST_WARNING_OBJECT (qtmux, "Unsupported AAC stream-format %s, "
+ "please use 'raw'", stream_format);
+ goto refuse_caps;
+ }
+ } else {
+ GST_WARNING_OBJECT (qtmux, "No stream-format present in caps, "
+ "assuming 'raw'");
+ }
+
+ if (!codec_data || gst_buffer_get_size ((GstBuffer *) codec_data) < 2) {
+ GST_WARNING_OBJECT (qtmux, "no (valid) codec_data for AAC audio");
+ goto refuse_caps;
+ } else {
+ guint8 profile;
+
+ gst_buffer_extract ((GstBuffer *) codec_data, 0, &profile, 1);
+ /* warn if not Low Complexity profile */
+ profile >>= 3;
+ if (profile != 2)
+ GST_WARNING_OBJECT (qtmux,
+ "non-LC AAC may not run well on (Apple) QuickTime/iTunes");
+ }
+
+ /* AAC */
+ entry.fourcc = FOURCC_mp4a;
+
+ if (format == GST_QT_MUX_FORMAT_QT)
+ ext_atom = build_mov_aac_extension (qtpad->trak, codec_data,
+ qtpad->avg_bitrate, qtpad->max_bitrate);
+ else
+ ext_atom =
+ build_esds_extension (qtpad->trak, ESDS_OBJECT_TYPE_MPEG4_P3,
+ ESDS_STREAM_TYPE_AUDIO, codec_data, qtpad->avg_bitrate,
+ qtpad->max_bitrate);
+ break;
+ default:
+ break;
+ }
+ } else if (strcmp (mimetype, "audio/AMR") == 0) {
+ entry.fourcc = FOURCC_samr;
+ entry.sample_size = 16;
+ entry.samples_per_packet = 160;
+ entry.bytes_per_sample = 2;
+ ext_atom = build_amr_extension ();
+ } else if (strcmp (mimetype, "audio/AMR-WB") == 0) {
+ entry.fourcc = FOURCC_sawb;
+ entry.sample_size = 16;
+ entry.samples_per_packet = 320;
+ entry.bytes_per_sample = 2;
+ ext_atom = build_amr_extension ();
+ } else if (strcmp (mimetype, "audio/x-raw") == 0) {
+ GstAudioInfo info;
+
+ gst_audio_info_init (&info);
+ if (!gst_audio_info_from_caps (&info, caps))
+ goto refuse_caps;
+
+ /* spec has no place for a distinction in these */
+ if (info.finfo->width != info.finfo->depth) {
+ GST_DEBUG_OBJECT (qtmux, "width must be same as depth!");
+ goto refuse_caps;
+ }
+
+ if ((info.finfo->flags & GST_AUDIO_FORMAT_FLAG_SIGNED)) {
+ if (info.finfo->endianness == G_LITTLE_ENDIAN)
+ entry.fourcc = FOURCC_sowt;
+ else if (info.finfo->endianness == G_BIG_ENDIAN)
+ entry.fourcc = FOURCC_twos;
+ else
+ entry.fourcc = FOURCC_sowt;
+ /* maximum backward compatibility; only new version for > 16 bit */
+ if (info.finfo->depth <= 16)
+ entry.version = 0;
+ /* not compressed in any case */
+ entry.compression_id = 0;
+ /* QT spec says: max at 16 bit even if sample size were actually larger,
+ * however, most players (e.g. QuickTime!) seem to disagree, so ... */
+ entry.sample_size = info.finfo->depth;
+ entry.bytes_per_sample = info.finfo->depth / 8;
+ entry.samples_per_packet = 1;
+ entry.bytes_per_packet = info.finfo->depth / 8;
+ entry.bytes_per_frame = entry.bytes_per_packet * info.channels;
+ } else {
+ if (info.finfo->width == 8 && info.finfo->depth == 8) {
+ /* fall back to old 8-bit version */
+ entry.fourcc = FOURCC_raw_;
+ entry.version = 0;
+ entry.compression_id = 0;
+ entry.sample_size = 8;
+ } else {
+ GST_DEBUG_OBJECT (qtmux, "non 8-bit PCM must be signed");
+ goto refuse_caps;
+ }
+ }
+ constant_size = (info.finfo->depth / 8) * info.channels;
+ } else if (strcmp (mimetype, "audio/x-alaw") == 0) {
+ entry.fourcc = FOURCC_alaw;
+ entry.samples_per_packet = 1023;
+ entry.bytes_per_sample = 2;
+ } else if (strcmp (mimetype, "audio/x-mulaw") == 0) {
+ entry.fourcc = FOURCC_ulaw;
+ entry.samples_per_packet = 1023;
+ entry.bytes_per_sample = 2;
+ } else if (strcmp (mimetype, "audio/x-adpcm") == 0) {
+ gint blocksize;
+ if (!gst_structure_get_int (structure, "block_align", &blocksize)) {
+ GST_DEBUG_OBJECT (qtmux, "broken caps, block_align missing");
+ goto refuse_caps;
+ }
+ /* Currently only supports WAV-style IMA ADPCM, for which the codec id is
+ 0x11 */
+ entry.fourcc = MS_WAVE_FOURCC (0x11);
+ /* 4 byte header per channel (including one sample). 2 samples per byte
+ remaining. Simplifying gives the following (samples per block per
+ channel) */
+ entry.samples_per_packet = 2 * blocksize / channels - 7;
+ entry.bytes_per_sample = 2;
+
+ entry.bytes_per_frame = blocksize;
+ entry.bytes_per_packet = blocksize / channels;
+ /* ADPCM has constant size packets */
+ constant_size = 1;
+ /* TODO: I don't really understand why this helps, but it does! Constant
+ * size and compression_id of -2 seem to be incompatible, and other files
+ * in the wild use this too. */
+ entry.compression_id = -1;
+
+ ext_atom = build_ima_adpcm_extension (channels, rate, blocksize);
+ } else if (strcmp (mimetype, "audio/x-alac") == 0) {
+ GstBuffer *codec_config;
+ gint len;
+ GstMapInfo map;
+
+ entry.fourcc = FOURCC_alac;
+ gst_buffer_map ((GstBuffer *) codec_data, &map, GST_MAP_READ);
+ /* let's check if codec data already comes with 'alac' atom prefix */
+ if (!codec_data || (len = map.size) < 28) {
+ GST_DEBUG_OBJECT (qtmux, "broken caps, codec data missing");
+ gst_buffer_unmap ((GstBuffer *) codec_data, &map);
+ goto refuse_caps;
+ }
+ if (GST_READ_UINT32_LE (map.data + 4) == FOURCC_alac) {
+ len -= 8;
+ codec_config =
+ gst_buffer_copy_region ((GstBuffer *) codec_data,
+ GST_BUFFER_COPY_MEMORY, 8, len);
+ } else {
+ codec_config = gst_buffer_ref ((GstBuffer *) codec_data);
+ }
+ gst_buffer_unmap ((GstBuffer *) codec_data, &map);
+ if (len != 28) {
+ /* does not look good, but perhaps some trailing unneeded stuff */
+ GST_WARNING_OBJECT (qtmux, "unexpected codec-data size, possibly broken");
+ }
+ if (format == GST_QT_MUX_FORMAT_QT)
+ ext_atom = build_mov_alac_extension (codec_config);
+ else
+ ext_atom = build_codec_data_extension (FOURCC_alac, codec_config);
+ /* set some more info */
+ gst_buffer_map (codec_config, &map, GST_MAP_READ);
+ entry.bytes_per_sample = 2;
+ entry.samples_per_packet = GST_READ_UINT32_BE (map.data + 4);
+ gst_buffer_unmap (codec_config, &map);
+ gst_buffer_unref (codec_config);
+ } else if (strcmp (mimetype, "audio/x-ac3") == 0) {
+ entry.fourcc = FOURCC_ac_3;
+
+ /* Fixed values according to TS 102 366 but it also mentions that
+ * they should be ignored */
+ entry.channels = 2;
+ entry.sample_size = 16;
+
+ /* AC-3 needs an extension atom but its data can only be obtained from
+ * the stream itself. Abuse the prepare_buf_func so we parse a frame
+ * and get the needed data */
+ qtpad->prepare_buf_func = gst_qt_mux_prepare_parse_ac3_frame;
+ } else if (strcmp (mimetype, "audio/x-opus") == 0) {
+ /* Based on the specification defined in:
+ * https://www.opus-codec.org/docs/opus_in_isobmff.html */
+ guint8 channels, mapping_family, stream_count, coupled_count;
+ guint16 pre_skip;
+ gint16 output_gain;
+ guint32 rate;
+ guint8 channel_mapping[256];
+ const GValue *streamheader;
+ const GValue *first_element;
+ GstBuffer *header;
+
+ entry.fourcc = FOURCC_opus;
+ entry.sample_size = 16;
+
+ streamheader = gst_structure_get_value (structure, "streamheader");
+ if (streamheader && GST_VALUE_HOLDS_ARRAY (streamheader) &&
+ gst_value_array_get_size (streamheader) != 0) {
+ first_element = gst_value_array_get_value (streamheader, 0);
+ header = gst_value_get_buffer (first_element);
+ if (!gst_codec_utils_opus_parse_header (header, &rate, &channels,
+ &mapping_family, &stream_count, &coupled_count, channel_mapping,
+ &pre_skip, &output_gain)) {
+ GST_ERROR_OBJECT (qtmux, "Incomplete OpusHead");
+ goto refuse_caps;
+ }
+ } else {
+ GST_WARNING_OBJECT (qtmux,
+ "no streamheader field in caps %" GST_PTR_FORMAT, caps);
+
+ if (!gst_codec_utils_opus_parse_caps (caps, &rate, &channels,
+ &mapping_family, &stream_count, &coupled_count,
+ channel_mapping)) {
+ GST_ERROR_OBJECT (qtmux, "Incomplete Opus caps");
+ goto refuse_caps;
+ }
+ pre_skip = 0;
+ output_gain = 0;
+ }
+
+ entry.channels = channels;
+ ext_atom = build_opus_extension (rate, channels, mapping_family,
+ stream_count, coupled_count, channel_mapping, pre_skip, output_gain);
+ }
+
+ if (!entry.fourcc)
+ goto refuse_caps;
+
+ timescale = gst_qt_mux_pad_get_timescale (GST_QT_MUX_PAD_CAST (pad));
+ if (!timescale && qtmux->trak_timescale)
+ timescale = qtmux->trak_timescale;
+ else if (!timescale)
+ timescale = entry.sample_rate;
+
+ /* ok, set the pad info accordingly */
+ qtpad->fourcc = entry.fourcc;
+ qtpad->sample_size = constant_size;
+ qtpad->trak_ste =
+ (SampleTableEntry *) atom_trak_set_audio_type (qtpad->trak,
+ qtmux->context, &entry, timescale, ext_atom, constant_size);
+
+ gst_object_unref (qtmux);
+ return TRUE;
+
+ /* ERRORS */
+refuse_caps:
+ {
+ GST_WARNING_OBJECT (qtmux, "pad %s refused caps %" GST_PTR_FORMAT,
+ GST_PAD_NAME (pad), caps);
+ gst_object_unref (qtmux);
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_qt_mux_video_sink_set_caps (GstQTMuxPad * qtpad, GstCaps * caps)
+{
+ GstPad *pad = GST_PAD (qtpad);
+ GstQTMux *qtmux = GST_QT_MUX_CAST (gst_pad_get_parent (pad));
+ GstQTMuxClass *qtmux_klass = (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+ GstStructure *structure;
+ const gchar *mimetype;
+ gint width, height, depth = -1;
+ gint framerate_num, framerate_den;
+ guint32 rate;
+ const GValue *value = NULL;
+ const GstBuffer *codec_data = NULL;
+ VisualSampleEntry entry = { 0, };
+ GstQTMuxFormat format;
+ AtomInfo *ext_atom = NULL;
+ GList *ext_atom_list = NULL;
+ gboolean sync = FALSE;
+ int par_num, par_den;
+ const gchar *multiview_mode;
+
+ GST_DEBUG_OBJECT (qtmux, "%s:%s, caps=%" GST_PTR_FORMAT,
+ GST_DEBUG_PAD_NAME (pad), caps);
+
+ qtpad->prepare_buf_func = NULL;
+
+ format = qtmux_klass->format;
+ structure = gst_caps_get_structure (caps, 0);
+ mimetype = gst_structure_get_name (structure);
+
+ /* required parts */
+ if (!gst_structure_get_int (structure, "width", &width) ||
+ !gst_structure_get_int (structure, "height", &height))
+ goto refuse_caps;
+
+ /* optional */
+ depth = -1;
+ /* works as a default timebase */
+ framerate_num = 10000;
+ framerate_den = 1;
+ gst_structure_get_fraction (structure, "framerate", &framerate_num,
+ &framerate_den);
+ gst_structure_get_int (structure, "depth", &depth);
+ value = gst_structure_get_value (structure, "codec_data");
+ if (value != NULL)
+ codec_data = gst_value_get_buffer (value);
+
+ par_num = 1;
+ par_den = 1;
+ gst_structure_get_fraction (structure, "pixel-aspect-ratio", &par_num,
+ &par_den);
+
+ qtpad->is_out_of_order = FALSE;
+
+ /* bring frame numerator into a range that ensures both reasonable resolution
+ * as well as a fair duration */
+ qtpad->expected_sample_duration_n = framerate_num;
+ qtpad->expected_sample_duration_d = framerate_den;
+
+ rate = gst_qt_mux_pad_get_timescale (GST_QT_MUX_PAD_CAST (pad));
+ if (!rate && qtmux->trak_timescale)
+ rate = qtmux->trak_timescale;
+ else if (!rate)
+ rate = atom_framerate_to_timescale (framerate_num, framerate_den);
+
+ GST_DEBUG_OBJECT (qtmux, "Rate of video track selected: %" G_GUINT32_FORMAT,
+ rate);
+
+ multiview_mode = gst_structure_get_string (structure, "multiview-mode");
+ if (multiview_mode && !qtpad->trak->mdia.minf.stbl.svmi) {
+ GstVideoMultiviewMode mode;
+ GstVideoMultiviewFlags flags = 0;
+
+ mode = gst_video_multiview_mode_from_caps_string (multiview_mode);
+ gst_structure_get_flagset (structure,
+ "multiview-flags", (guint *) & flags, NULL);
+ switch (mode) {
+ case GST_VIDEO_MULTIVIEW_MODE_MONO:
+ /* Nothing to do for mono, just don't warn about it */
+ break;
+ case GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE:
+ qtpad->trak->mdia.minf.stbl.svmi =
+ atom_svmi_new (0,
+ flags & GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST);
+ break;
+ case GST_VIDEO_MULTIVIEW_MODE_ROW_INTERLEAVED:
+ qtpad->trak->mdia.minf.stbl.svmi =
+ atom_svmi_new (1,
+ flags & GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST);
+ break;
+ case GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME:
+ qtpad->trak->mdia.minf.stbl.svmi =
+ atom_svmi_new (2,
+ flags & GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST);
+ break;
+ default:
+ GST_DEBUG_OBJECT (qtmux, "Unsupported multiview-mode %s",
+ multiview_mode);
+ break;
+ }
+ }
+
+ /* set common properties */
+ entry.width = width;
+ entry.height = height;
+ entry.par_n = par_num;
+ entry.par_d = par_den;
+ /* should be OK according to qt and iso spec, override if really needed */
+ entry.color_table_id = -1;
+ entry.frame_count = 1;
+ entry.depth = 24;
+
+ /* sync entries by default */
+ sync = TRUE;
+
+ /* now map onto a fourcc, and some extra properties */
+ if (strcmp (mimetype, "video/x-raw") == 0) {
+ const gchar *format;
+ GstVideoFormat fmt;
+ const GstVideoFormatInfo *vinfo;
+
+ format = gst_structure_get_string (structure, "format");
+ fmt = gst_video_format_from_string (format);
+ vinfo = gst_video_format_get_info (fmt);
+
+ switch (fmt) {
+ case GST_VIDEO_FORMAT_UYVY:
+ if (depth == -1)
+ depth = 24;
+ entry.fourcc = FOURCC_2vuy;
+ entry.depth = depth;
+ sync = FALSE;
+ break;
+ case GST_VIDEO_FORMAT_v210:
+ if (depth == -1)
+ depth = 24;
+ entry.fourcc = FOURCC_v210;
+ entry.depth = depth;
+ sync = FALSE;
+ break;
+ default:
+ if (GST_VIDEO_FORMAT_INFO_FLAGS (vinfo) & GST_VIDEO_FORMAT_FLAG_RGB) {
+ entry.fourcc = FOURCC_raw_;
+ entry.depth = GST_VIDEO_FORMAT_INFO_PSTRIDE (vinfo, 0) * 8;
+ sync = FALSE;
+ }
+ break;
+ }
+ } else if (strcmp (mimetype, "video/x-h263") == 0) {
+ ext_atom = NULL;
+ if (format == GST_QT_MUX_FORMAT_QT)
+ entry.fourcc = FOURCC_h263;
+ else
+ entry.fourcc = FOURCC_s263;
+ ext_atom = build_h263_extension ();
+ if (ext_atom != NULL)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+ } else if (strcmp (mimetype, "video/x-divx") == 0 ||
+ strcmp (mimetype, "video/mpeg") == 0) {
+ gint version = 0;
+
+ if (strcmp (mimetype, "video/x-divx") == 0) {
+ gst_structure_get_int (structure, "divxversion", &version);
+ version = version == 5 ? 1 : 0;
+ } else {
+ gst_structure_get_int (structure, "mpegversion", &version);
+ version = version == 4 ? 1 : 0;
+ }
+ if (version) {
+ entry.fourcc = FOURCC_mp4v;
+ ext_atom =
+ build_esds_extension (qtpad->trak, ESDS_OBJECT_TYPE_MPEG4_P2,
+ ESDS_STREAM_TYPE_VISUAL, codec_data, qtpad->avg_bitrate,
+ qtpad->max_bitrate);
+ if (ext_atom != NULL)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+ if (!codec_data)
+ GST_WARNING_OBJECT (qtmux, "no codec_data for MPEG4 video; "
+ "output might not play in Apple QuickTime (try global-headers?)");
+ }
+ } else if (strcmp (mimetype, "video/x-h264") == 0) {
+ const gchar *stream_format;
+
+ if (!codec_data) {
+ GST_WARNING_OBJECT (qtmux, "no codec_data in h264 caps");
+ goto refuse_caps;
+ }
+
+ stream_format = gst_structure_get_string (structure, "stream-format");
+
+ if (!g_strcmp0 (stream_format, "avc")) {
+ entry.fourcc = FOURCC_avc1;
+ } else if (!g_strcmp0 (stream_format, "avc3")) {
+ entry.fourcc = FOURCC_avc3;
+ } else {
+ g_assert_not_reached ();
+ }
+
+ ext_atom = build_btrt_extension (0, qtpad->avg_bitrate, qtpad->max_bitrate);
+ if (ext_atom != NULL)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+ ext_atom = build_codec_data_extension (FOURCC_avcC, codec_data);
+ if (ext_atom != NULL)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+ } else if (strcmp (mimetype, "video/x-h265") == 0) {
+ const gchar *format;
+
+ if (!codec_data) {
+ GST_WARNING_OBJECT (qtmux, "no codec_data in h265 caps");
+ goto refuse_caps;
+ }
+
+ format = gst_structure_get_string (structure, "stream-format");
+ if (strcmp (format, "hvc1") == 0)
+ entry.fourcc = FOURCC_hvc1;
+ else if (strcmp (format, "hev1") == 0)
+ entry.fourcc = FOURCC_hev1;
+
+ ext_atom = build_btrt_extension (0, qtpad->avg_bitrate, qtpad->max_bitrate);
+ if (ext_atom != NULL)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+
+ ext_atom = build_codec_data_extension (FOURCC_hvcC, codec_data);
+ if (ext_atom != NULL)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+
+ } else if (strcmp (mimetype, "video/x-svq") == 0) {
+ gint version = 0;
+ const GstBuffer *seqh = NULL;
+ const GValue *seqh_value;
+ gdouble gamma = 0;
+
+ gst_structure_get_int (structure, "svqversion", &version);
+ if (version == 3) {
+ entry.fourcc = FOURCC_SVQ3;
+ entry.version = 3;
+ entry.depth = 32;
+
+ seqh_value = gst_structure_get_value (structure, "seqh");
+ if (seqh_value) {
+ seqh = gst_value_get_buffer (seqh_value);
+ ext_atom = build_SMI_atom (seqh);
+ if (ext_atom)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+ }
+
+ /* we need to add the gamma anyway because quicktime might crash
+ * when it doesn't find it */
+ if (!gst_structure_get_double (structure, "applied-gamma", &gamma)) {
+ /* it seems that using 0 here makes it ignored */
+ gamma = 0.0;
+ }
+ ext_atom = build_gama_atom (gamma);
+ if (ext_atom)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+ } else {
+ GST_WARNING_OBJECT (qtmux, "SVQ version %d not supported. Please file "
+ "a bug at http://bugzilla.gnome.org", version);
+ }
+ } else if (strcmp (mimetype, "video/x-dv") == 0) {
+ gint version = 0;
+ gboolean pal = TRUE;
+
+ sync = FALSE;
+ if (framerate_num != 25 || framerate_den != 1)
+ pal = FALSE;
+ gst_structure_get_int (structure, "dvversion", &version);
+ /* fall back to typical one */
+ if (!version)
+ version = 25;
+ switch (version) {
+ case 25:
+ if (pal)
+ entry.fourcc = FOURCC_dvcp;
+ else
+ entry.fourcc = FOURCC_dvc_;
+ break;
+ case 50:
+ if (pal)
+ entry.fourcc = FOURCC_dv5p;
+ else
+ entry.fourcc = FOURCC_dv5n;
+ break;
+ default:
+ GST_WARNING_OBJECT (qtmux, "unrecognized dv version");
+ break;
+ }
+ } else if (strcmp (mimetype, "image/jpeg") == 0) {
+ entry.fourcc = FOURCC_jpeg;
+ sync = FALSE;
+ } else if (strcmp (mimetype, "image/png") == 0) {
+ entry.fourcc = FOURCC_png;
+ sync = FALSE;
+ } else if (strcmp (mimetype, "image/x-j2c") == 0 ||
+ strcmp (mimetype, "image/x-jpc") == 0) {
+ const gchar *colorspace;
+ const GValue *cmap_array;
+ const GValue *cdef_array;
+ gint ncomp = 0;
+
+ if (strcmp (mimetype, "image/x-jpc") == 0) {
+ qtpad->prepare_buf_func = gst_qt_mux_prepare_jpc_buffer;
+ }
+
+ gst_structure_get_int (structure, "num-components", &ncomp);
+ cmap_array = gst_structure_get_value (structure, "component-map");
+ cdef_array = gst_structure_get_value (structure, "channel-definitions");
+
+ ext_atom = NULL;
+ entry.fourcc = FOURCC_mjp2;
+ sync = FALSE;
+
+ colorspace = gst_structure_get_string (structure, "colorspace");
+ if (colorspace &&
+ (ext_atom =
+ build_jp2h_extension (width, height, colorspace, ncomp, cmap_array,
+ cdef_array)) != NULL) {
+ ext_atom_list = g_list_append (ext_atom_list, ext_atom);
+
+ ext_atom = build_jp2x_extension (codec_data);
+ if (ext_atom)
+ ext_atom_list = g_list_append (ext_atom_list, ext_atom);
+ } else {
+ GST_DEBUG_OBJECT (qtmux, "missing or invalid fourcc in jp2 caps");
+ goto refuse_caps;
+ }
+ } else if (strcmp (mimetype, "video/x-vp8") == 0) {
+ entry.fourcc = FOURCC_vp08;
+ } else if (strcmp (mimetype, "video/x-vp9") == 0) {
+ entry.fourcc = FOURCC_vp09;
+ } else if (strcmp (mimetype, "video/x-dirac") == 0) {
+ entry.fourcc = FOURCC_drac;
+ } else if (strcmp (mimetype, "video/x-qt-part") == 0) {
+ guint32 fourcc = 0;
+
+ gst_structure_get_uint (structure, "format", &fourcc);
+ entry.fourcc = fourcc;
+ } else if (strcmp (mimetype, "video/x-mp4-part") == 0) {
+ guint32 fourcc = 0;
+
+ gst_structure_get_uint (structure, "format", &fourcc);
+ entry.fourcc = fourcc;
+ } else if (strcmp (mimetype, "video/x-prores") == 0) {
+ const gchar *variant;
+
+ variant = gst_structure_get_string (structure, "variant");
+ if (!variant || !g_strcmp0 (variant, "standard"))
+ entry.fourcc = FOURCC_apcn;
+ else if (!g_strcmp0 (variant, "lt"))
+ entry.fourcc = FOURCC_apcs;
+ else if (!g_strcmp0 (variant, "hq"))
+ entry.fourcc = FOURCC_apch;
+ else if (!g_strcmp0 (variant, "proxy"))
+ entry.fourcc = FOURCC_apco;
+ else if (!g_strcmp0 (variant, "4444"))
+ entry.fourcc = FOURCC_ap4h;
+ else if (!g_strcmp0 (variant, "4444xq"))
+ entry.fourcc = FOURCC_ap4x;
+
+ sync = FALSE;
+
+ if (!qtmux->interleave_time_set)
+ qtmux->interleave_time = 500 * GST_MSECOND;
+ if (!qtmux->interleave_bytes_set)
+ qtmux->interleave_bytes = width > 720 ? 4 * 1024 * 1024 : 2 * 1024 * 1024;
+ } else if (strcmp (mimetype, "video/x-cineform") == 0) {
+ entry.fourcc = FOURCC_cfhd;
+ sync = FALSE;
+ } else if (strcmp (mimetype, "video/x-av1") == 0) {
+ gint presentation_delay;
+ guint8 presentation_delay_byte = 0;
+ GstBuffer *av1_codec_data;
+
+ if (gst_structure_get_int (structure, "presentation-delay",
+ &presentation_delay)) {
+ presentation_delay_byte = 1 << 5;
+ presentation_delay_byte |= MAX (0xF, presentation_delay & 0xF);
+ }
+
+
+ av1_codec_data = gst_buffer_new_allocate (NULL, 5, NULL);
+ /* Fill version and 3 bytes of flags to 0 */
+ gst_buffer_memset (av1_codec_data, 0, 0, 4);
+ gst_buffer_fill (av1_codec_data, 4, &presentation_delay_byte, 1);
+ if (codec_data)
+ av1_codec_data = gst_buffer_append (av1_codec_data,
+ gst_buffer_ref ((GstBuffer *) codec_data));
+
+ entry.fourcc = FOURCC_av01;
+
+ ext_atom = build_btrt_extension (0, qtpad->avg_bitrate, qtpad->max_bitrate);
+ if (ext_atom != NULL)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+ ext_atom = build_codec_data_extension (FOURCC_av1C, av1_codec_data);
+ if (ext_atom != NULL)
+ ext_atom_list = g_list_prepend (ext_atom_list, ext_atom);
+ gst_buffer_unref (av1_codec_data);
+ }
+
+ if (!entry.fourcc)
+ goto refuse_caps;
+
+ if (qtmux_klass->format == GST_QT_MUX_FORMAT_QT ||
+ qtmux_klass->format == GST_QT_MUX_FORMAT_MP4) {
+ const gchar *s;
+ GstVideoColorimetry colorimetry;
+
+ s = gst_structure_get_string (structure, "colorimetry");
+ if (s && gst_video_colorimetry_from_string (&colorimetry, s)) {
+ ext_atom =
+ build_colr_extension (&colorimetry,
+ qtmux_klass->format == GST_QT_MUX_FORMAT_MP4);
+ if (ext_atom)
+ ext_atom_list = g_list_append (ext_atom_list, ext_atom);
+ }
+ }
+
+ if (qtmux_klass->format == GST_QT_MUX_FORMAT_QT
+ || strcmp (mimetype, "image/x-j2c") == 0
+ || strcmp (mimetype, "image/x-jpc") == 0) {
+ const gchar *s;
+ GstVideoInterlaceMode interlace_mode;
+ GstVideoFieldOrder field_order;
+ gint fields = -1;
+
+ if (strcmp (mimetype, "image/x-j2c") == 0 ||
+ strcmp (mimetype, "image/x-jpc") == 0) {
+
+ fields = 1;
+ gst_structure_get_int (structure, "fields", &fields);
+ }
+
+ s = gst_structure_get_string (structure, "interlace-mode");
+ if (s)
+ interlace_mode = gst_video_interlace_mode_from_string (s);
+ else
+ interlace_mode =
+ (fields <=
+ 1) ? GST_VIDEO_INTERLACE_MODE_PROGRESSIVE :
+ GST_VIDEO_INTERLACE_MODE_MIXED;
+
+ field_order = GST_VIDEO_FIELD_ORDER_UNKNOWN;
+ if (interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED) {
+ s = gst_structure_get_string (structure, "field-order");
+ if (s)
+ field_order = gst_video_field_order_from_string (s);
+ }
+
+ ext_atom = build_fiel_extension (interlace_mode, field_order);
+ if (ext_atom)
+ ext_atom_list = g_list_append (ext_atom_list, ext_atom);
+ }
+
+
+ if (qtmux_klass->format == GST_QT_MUX_FORMAT_QT &&
+ width > 640 && width <= 1052 && height >= 480 && height <= 576) {
+ /* The 'clap' extension is also defined for MP4 but inventing values in
+ * general seems a bit tricky for this one. We only write it for
+ * SD resolution in MOV, where it is a requirement.
+ * The same goes for the 'tapt' extension, just that it is not defined for
+ * MP4 and only for MOV
+ */
+ gint dar_num, dar_den;
+ gint clef_width, clef_height, prof_width;
+ gint clap_width_n, clap_width_d, clap_height;
+ gint cdiv;
+ double approx_dar;
+
+ /* First, guess display aspect ratio based on pixel aspect ratio,
+ * width and height. We assume that display aspect ratio is either
+ * 4:3 or 16:9
+ */
+ approx_dar = (gdouble) (width * par_num) / (height * par_den);
+ if (approx_dar > 11.0 / 9 && approx_dar < 14.0 / 9) {
+ dar_num = 4;
+ dar_den = 3;
+ } else if (approx_dar > 15.0 / 9 && approx_dar < 18.0 / 9) {
+ dar_num = 16;
+ dar_den = 9;
+ } else {
+ dar_num = width * par_num;
+ dar_den = height * par_den;
+ cdiv = gst_util_greatest_common_divisor (dar_num, dar_den);
+ dar_num /= cdiv;
+ dar_den /= cdiv;
+ }
+
+ /* Then, calculate clean-aperture values (clap and clef)
+ * using the guessed DAR.
+ */
+ clef_height = clap_height = (height == 486 ? 480 : height);
+ clef_width = gst_util_uint64_scale (clef_height,
+ dar_num * G_GUINT64_CONSTANT (65536), dar_den);
+ prof_width = gst_util_uint64_scale (width,
+ par_num * G_GUINT64_CONSTANT (65536), par_den);
+ clap_width_n = clap_height * dar_num * par_den;
+ clap_width_d = dar_den * par_num;
+ cdiv = gst_util_greatest_common_divisor (clap_width_n, clap_width_d);
+ clap_width_n /= cdiv;
+ clap_width_d /= cdiv;
+
+ ext_atom = build_tapt_extension (clef_width, clef_height << 16, prof_width,
+ height << 16, width << 16, height << 16);
+ qtpad->trak->tapt = ext_atom;
+
+ ext_atom = build_clap_extension (clap_width_n, clap_width_d,
+ clap_height, 1, 0, 1, 0, 1);
+ if (ext_atom)
+ ext_atom_list = g_list_append (ext_atom_list, ext_atom);
+ }
+
+ /* ok, set the pad info accordingly */
+ qtpad->fourcc = entry.fourcc;
+ qtpad->sync = sync;
+ qtpad->trak_ste =
+ (SampleTableEntry *) atom_trak_set_video_type (qtpad->trak,
+ qtmux->context, &entry, rate, ext_atom_list);
+ if (strcmp (mimetype, "video/x-prores") == 0) {
+ SampleTableEntryMP4V *mp4v = (SampleTableEntryMP4V *) qtpad->trak_ste;
+ const gchar *compressor = NULL;
+ mp4v->spatial_quality = 0x3FF;
+ mp4v->temporal_quality = 0;
+ mp4v->vendor = FOURCC_appl;
+ mp4v->horizontal_resolution = 72 << 16;
+ mp4v->vertical_resolution = 72 << 16;
+ mp4v->depth = (entry.fourcc == FOURCC_ap4h
+ || entry.fourcc == FOURCC_ap4x) ? (depth > 0 ? depth : 32) : 24;
+
+ /* Set compressor name, required by some software */
+ switch (entry.fourcc) {
+ case FOURCC_apcn:
+ compressor = "Apple ProRes 422";
+ break;
+ case FOURCC_apcs:
+ compressor = "Apple ProRes 422 LT";
+ break;
+ case FOURCC_apch:
+ compressor = "Apple ProRes 422 HQ";
+ break;
+ case FOURCC_apco:
+ compressor = "Apple ProRes 422 Proxy";
+ break;
+ case FOURCC_ap4h:
+ compressor = "Apple ProRes 4444";
+ break;
+ case FOURCC_ap4x:
+ compressor = "Apple ProRes 4444 XQ";
+ break;
+ }
+ if (compressor) {
+ strcpy ((gchar *) mp4v->compressor + 1, compressor);
+ mp4v->compressor[0] = strlen (compressor);
+ }
+ }
+
+ gst_object_unref (qtmux);
+ return TRUE;
+
+ /* ERRORS */
+refuse_caps:
+ {
+ GST_WARNING_OBJECT (qtmux, "pad %s refused caps %" GST_PTR_FORMAT,
+ GST_PAD_NAME (pad), caps);
+ gst_object_unref (qtmux);
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_qt_mux_subtitle_sink_set_caps (GstQTMuxPad * qtpad, GstCaps * caps)
+{
+ GstPad *pad = GST_PAD (qtpad);
+ GstQTMux *qtmux = GST_QT_MUX_CAST (gst_pad_get_parent (pad));
+ GstStructure *structure;
+ SubtitleSampleEntry entry = { 0, };
+
+ GST_DEBUG_OBJECT (qtmux, "%s:%s, caps=%" GST_PTR_FORMAT,
+ GST_DEBUG_PAD_NAME (pad), caps);
+
+ /* subtitles default */
+ subtitle_sample_entry_init (&entry);
+ qtpad->is_out_of_order = FALSE;
+ qtpad->sync = FALSE;
+ qtpad->sparse = TRUE;
+ qtpad->prepare_buf_func = NULL;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (gst_structure_has_name (structure, "text/x-raw")) {
+ const gchar *format = gst_structure_get_string (structure, "format");
+ if (format && strcmp (format, "utf8") == 0) {
+ entry.fourcc = FOURCC_tx3g;
+ qtpad->prepare_buf_func = gst_qt_mux_prepare_tx3g_buffer;
+ qtpad->create_empty_buffer = gst_qt_mux_create_empty_tx3g_buffer;
+ }
+ }
+
+ if (!entry.fourcc)
+ goto refuse_caps;
+
+ qtpad->fourcc = entry.fourcc;
+ qtpad->trak_ste =
+ (SampleTableEntry *) atom_trak_set_subtitle_type (qtpad->trak,
+ qtmux->context, &entry);
+
+ gst_object_unref (qtmux);
+ return TRUE;
+
+ /* ERRORS */
+refuse_caps:
+ {
+ GST_WARNING_OBJECT (qtmux, "pad %s refused caps %" GST_PTR_FORMAT,
+ GST_PAD_NAME (pad), caps);
+ gst_object_unref (qtmux);
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_qt_mux_caption_sink_set_caps (GstQTMuxPad * qtpad, GstCaps * caps)
+{
+ GstPad *pad = GST_PAD (qtpad);
+ GstQTMux *qtmux = GST_QT_MUX_CAST (gst_pad_get_parent (pad));
+ GstStructure *structure;
+ guint32 fourcc_entry;
+ guint32 timescale;
+
+ GST_DEBUG_OBJECT (qtmux, "%s:%s, caps=%" GST_PTR_FORMAT,
+ GST_DEBUG_PAD_NAME (pad), caps);
+
+ /* captions default */
+ qtpad->is_out_of_order = FALSE;
+ qtpad->sync = FALSE;
+ qtpad->sparse = TRUE;
+ /* Closed caption data are within atoms */
+ qtpad->prepare_buf_func = gst_qt_mux_prepare_caption_buffer;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ /* We know we only handle 608,format=s334-1a and 708,format=cdp */
+ if (gst_structure_has_name (structure, "closedcaption/x-cea-608")) {
+ fourcc_entry = FOURCC_c608;
+ } else if (gst_structure_has_name (structure, "closedcaption/x-cea-708")) {
+ fourcc_entry = FOURCC_c708;
+ } else
+ goto refuse_caps;
+
+ /* We set the real timescale later to the one from the video track when
+ * writing the headers */
+ timescale = gst_qt_mux_pad_get_timescale (GST_QT_MUX_PAD_CAST (pad));
+ if (!timescale && qtmux->trak_timescale)
+ timescale = qtmux->trak_timescale;
+ else if (!timescale)
+ timescale = 30000;
+
+ qtpad->fourcc = fourcc_entry;
+ qtpad->trak_ste =
+ (SampleTableEntry *) atom_trak_set_caption_type (qtpad->trak,
+ qtmux->context, timescale, fourcc_entry);
+
+ /* Initialize caption track language code to 0 unless something else is
+ * specified. Without this, Final Cut considers it "non-standard"
+ */
+ qtpad->trak->mdia.mdhd.language_code = 0;
+
+ gst_object_unref (qtmux);
+ return TRUE;
+
+ /* ERRORS */
+refuse_caps:
+ {
+ GST_WARNING_OBJECT (qtmux, "pad %s refused caps %" GST_PTR_FORMAT,
+ GST_PAD_NAME (pad), caps);
+ gst_object_unref (qtmux);
+ return FALSE;
+ }
+}
+
+static GstFlowReturn
+gst_qt_mux_sink_event_pre_queue (GstAggregator * agg,
+ GstAggregatorPad * agg_pad, GstEvent * event)
+{
+ GstAggregatorClass *agg_class = GST_AGGREGATOR_CLASS (parent_class);
+ GstQTMux *qtmux;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ qtmux = GST_QT_MUX_CAST (agg);
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_CAPS) {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ if (!gst_qt_mux_can_renegotiate (qtmux, GST_PAD (agg_pad), caps)) {
+ gst_event_unref (event);
+ event = NULL;
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ }
+ }
+
+ if (event != NULL)
+ ret = agg_class->sink_event_pre_queue (agg, agg_pad, event);
+
+ return ret;
+}
+
+
+static gboolean
+gst_qt_mux_sink_event (GstAggregator * agg, GstAggregatorPad * agg_pad,
+ GstEvent * event)
+{
+ GstAggregatorClass *agg_class = GST_AGGREGATOR_CLASS (parent_class);
+ GstQTMuxPad *qtmux_pad;
+ GstQTMux *qtmux;
+ guint32 avg_bitrate = 0, max_bitrate = 0;
+ GstPad *pad = GST_PAD (agg_pad);
+ gboolean ret = TRUE;
+
+ qtmux = GST_QT_MUX_CAST (agg);
+ qtmux_pad = GST_QT_MUX_PAD_CAST (agg_pad);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+
+ /* find stream data */
+ g_assert (qtmux_pad->set_caps);
+
+ /* depending on codec (h264/h265 for example), muxer will append a new
+ * stsd entry per set_caps(), but it's not ideal if referenced fields
+ * in caps is not updated from previous one.
+ * Each set_caps() implementation can be more enhanced
+ * so that we can avoid duplicated atoms though, this identical caps
+ * case is one we can skip obviously */
+ if (qtmux_pad->configured_caps &&
+ gst_caps_is_equal (qtmux_pad->configured_caps, caps)) {
+ GST_DEBUG_OBJECT (qtmux_pad, "Ignore duplicated caps %" GST_PTR_FORMAT,
+ caps);
+ } else {
+ ret = qtmux_pad->set_caps (qtmux_pad, caps);
+
+ GST_OBJECT_LOCK (qtmux);
+ if (qtmux->current_pad == qtmux_pad) {
+ qtmux->current_chunk_offset = -1;
+ qtmux->current_chunk_size = 0;
+ qtmux->current_chunk_duration = 0;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+ }
+
+ if (ret)
+ gst_caps_replace (&qtmux_pad->configured_caps, caps);
+
+ gst_event_unref (event);
+ event = NULL;
+ break;
+ }
+ case GST_EVENT_TAG:{
+ GstTagList *list;
+ GstTagSetter *setter = GST_TAG_SETTER (qtmux);
+ GstTagMergeMode mode;
+ gchar *code;
+
+ GST_OBJECT_LOCK (qtmux);
+ mode = gst_tag_setter_get_tag_merge_mode (setter);
+
+ gst_event_parse_tag (event, &list);
+ GST_DEBUG_OBJECT (qtmux, "received tag event on pad %s:%s : %"
+ GST_PTR_FORMAT, GST_DEBUG_PAD_NAME (pad), list);
+
+ if (gst_tag_list_get_scope (list) == GST_TAG_SCOPE_GLOBAL) {
+ gst_tag_setter_merge_tags (setter, list, mode);
+ qtmux->tags_changed = TRUE;
+ } else {
+ if (!qtmux_pad->tags)
+ qtmux_pad->tags = gst_tag_list_new_empty ();
+ gst_tag_list_insert (qtmux_pad->tags, list, mode);
+ qtmux_pad->tags_changed = TRUE;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+
+ if (gst_tag_list_get_uint (list, GST_TAG_BITRATE, &avg_bitrate) |
+ gst_tag_list_get_uint (list, GST_TAG_MAXIMUM_BITRATE, &max_bitrate)) {
+ if (avg_bitrate > 0 && avg_bitrate < G_MAXUINT32)
+ qtmux_pad->avg_bitrate = avg_bitrate;
+ if (max_bitrate > 0 && max_bitrate < G_MAXUINT32)
+ qtmux_pad->max_bitrate = max_bitrate;
+ }
+
+ if (gst_tag_list_get_string (list, GST_TAG_LANGUAGE_CODE, &code)) {
+ const char *iso_code = gst_tag_get_language_code_iso_639_2T (code);
+ if (iso_code) {
+ if (qtmux_pad->trak) {
+ /* https://developer.apple.com/library/mac/#documentation/QuickTime/QTFF/QTFFChap4/qtff4.html */
+ qtmux_pad->trak->mdia.mdhd.language_code = language_code (iso_code);
+ }
+ }
+ g_free (code);
+ }
+
+ gst_event_unref (event);
+ event = NULL;
+ ret = TRUE;
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (event != NULL)
+ ret = agg_class->sink_event (agg, agg_pad, event);
+
+ return ret;
+}
+
+static void
+gst_qt_mux_release_pad (GstElement * element, GstPad * pad)
+{
+ GstQTMux *mux = GST_QT_MUX_CAST (element);
+ GstQTMuxPad *muxpad = GST_QT_MUX_PAD_CAST (pad);
+
+ GST_DEBUG_OBJECT (element, "Releasing %s:%s", GST_DEBUG_PAD_NAME (pad));
+
+ /* Take a ref to the pad so we can clean it up after removing it from the element */
+ pad = gst_object_ref (pad);
+
+ /* Do aggregate level cleanup */
+ GST_ELEMENT_CLASS (parent_class)->release_pad (element, pad);
+
+ GST_OBJECT_LOCK (mux);
+ if (mux->current_pad && GST_PAD (mux->current_pad) == pad) {
+ mux->current_pad = NULL;
+ mux->current_chunk_size = 0;
+ mux->current_chunk_duration = 0;
+ }
+
+ gst_qt_mux_pad_reset (muxpad);
+
+ if (GST_ELEMENT (mux)->sinkpads == NULL) {
+ /* No more outstanding request pads, reset our counters */
+ mux->video_pads = 0;
+ mux->audio_pads = 0;
+ mux->subtitle_pads = 0;
+ }
+ GST_OBJECT_UNLOCK (mux);
+
+ gst_object_unref (pad);
+}
+
+static GstAggregatorPad *
+gst_qt_mux_create_new_pad (GstAggregator * self,
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
+{
+ return g_object_new (GST_TYPE_QT_MUX_PAD, "name", req_name, "direction",
+ templ->direction, "template", templ, NULL);
+}
+
+static GstPad *
+gst_qt_mux_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
+{
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (element);
+ GstQTMux *qtmux = GST_QT_MUX_CAST (element);
+ GstQTMuxPad *qtpad;
+ GstQTPadSetCapsFunc setcaps_func;
+ gchar *name;
+ gint pad_id;
+
+ if (templ->direction != GST_PAD_SINK)
+ goto wrong_direction;
+
+ if (qtmux->state > GST_QT_MUX_STATE_STARTED)
+ goto too_late;
+
+ if (templ == gst_element_class_get_pad_template (klass, "audio_%u")) {
+ setcaps_func = gst_qt_mux_audio_sink_set_caps;
+ if (req_name != NULL && sscanf (req_name, "audio_%u", &pad_id) == 1) {
+ name = g_strdup (req_name);
+ } else {
+ name = g_strdup_printf ("audio_%u", qtmux->audio_pads++);
+ }
+ } else if (templ == gst_element_class_get_pad_template (klass, "video_%u")) {
+ setcaps_func = gst_qt_mux_video_sink_set_caps;
+ if (req_name != NULL && sscanf (req_name, "video_%u", &pad_id) == 1) {
+ name = g_strdup (req_name);
+ } else {
+ name = g_strdup_printf ("video_%u", qtmux->video_pads++);
+ }
+ } else if (templ == gst_element_class_get_pad_template (klass, "subtitle_%u")) {
+ setcaps_func = gst_qt_mux_subtitle_sink_set_caps;
+ if (req_name != NULL && sscanf (req_name, "subtitle_%u", &pad_id) == 1) {
+ name = g_strdup (req_name);
+ } else {
+ name = g_strdup_printf ("subtitle_%u", qtmux->subtitle_pads++);
+ }
+ } else if (templ == gst_element_class_get_pad_template (klass, "caption_%u")) {
+ setcaps_func = gst_qt_mux_caption_sink_set_caps;
+ if (req_name != NULL && sscanf (req_name, "caption_%u", &pad_id) == 1) {
+ name = g_strdup (req_name);
+ } else {
+ name = g_strdup_printf ("caption_%u", qtmux->caption_pads++);
+ }
+ } else
+ goto wrong_template;
+
+ GST_DEBUG_OBJECT (qtmux, "Requested pad: %s", name);
+
+ qtpad = (GstQTMuxPad *)
+ GST_ELEMENT_CLASS (parent_class)->request_new_pad (element,
+ templ, name, caps);
+
+ g_free (name);
+
+ /* set up pad */
+ GST_OBJECT_LOCK (qtmux);
+ gst_qt_mux_pad_reset (qtpad);
+ qtpad->trak = atom_trak_new (qtmux->context);
+
+ atom_moov_add_trak (qtmux->moov, qtpad->trak);
+ GST_OBJECT_UNLOCK (qtmux);
+
+ /* set up pad functions */
+ qtpad->set_caps = setcaps_func;
+ qtpad->dts = G_MININT64;
+
+ return GST_PAD (qtpad);
+
+ /* ERRORS */
+wrong_direction:
+ {
+ GST_WARNING_OBJECT (qtmux, "Request pad that is not a SINK pad.");
+ return NULL;
+ }
+too_late:
+ {
+ GST_WARNING_OBJECT (qtmux, "Not providing request pad after stream start.");
+ return NULL;
+ }
+wrong_template:
+ {
+ GST_WARNING_OBJECT (qtmux, "This is not our template!");
+ return NULL;
+ }
+}
+
+static void
+gst_qt_mux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstQTMux *qtmux = GST_QT_MUX_CAST (object);
+
+ GST_OBJECT_LOCK (qtmux);
+ switch (prop_id) {
+ case PROP_MOVIE_TIMESCALE:
+ g_value_set_uint (value, qtmux->timescale);
+ break;
+ case PROP_TRAK_TIMESCALE:
+ g_value_set_uint (value, qtmux->trak_timescale);
+ break;
+ case PROP_DO_CTTS:
+ g_value_set_boolean (value, qtmux->guess_pts);
+ break;
+#ifndef GST_REMOVE_DEPRECATED
+ case PROP_DTS_METHOD:
+ g_value_set_enum (value, qtmux->dts_method);
+ break;
+#endif
+ case PROP_FAST_START:
+ g_value_set_boolean (value, qtmux->fast_start);
+ break;
+ case PROP_FAST_START_TEMP_FILE:
+ g_value_set_string (value, qtmux->fast_start_file_path);
+ break;
+ case PROP_MOOV_RECOV_FILE:
+ g_value_set_string (value, qtmux->moov_recov_file_path);
+ break;
+ case PROP_FRAGMENT_DURATION:
+ g_value_set_uint (value, qtmux->fragment_duration);
+ break;
+ case PROP_RESERVED_MAX_DURATION:
+ g_value_set_uint64 (value, qtmux->reserved_max_duration);
+ break;
+ case PROP_RESERVED_DURATION_REMAINING:
+ if (qtmux->reserved_duration_remaining == GST_CLOCK_TIME_NONE)
+ g_value_set_uint64 (value, qtmux->reserved_max_duration);
+ else {
+ GstClockTime remaining = qtmux->reserved_duration_remaining;
+
+ /* Report the remaining space as the calculated remaining, minus
+ * however much we've muxed since the last update */
+ if (remaining > qtmux->muxed_since_last_update)
+ remaining -= qtmux->muxed_since_last_update;
+ else
+ remaining = 0;
+ GST_LOG_OBJECT (qtmux, "reserved duration remaining - reporting %"
+ G_GUINT64_FORMAT "(%" G_GUINT64_FORMAT " - %" G_GUINT64_FORMAT,
+ remaining, qtmux->reserved_duration_remaining,
+ qtmux->muxed_since_last_update);
+ g_value_set_uint64 (value, remaining);
+ }
+ break;
+ case PROP_RESERVED_MOOV_UPDATE_PERIOD:
+ g_value_set_uint64 (value, qtmux->reserved_moov_update_period);
+ break;
+ case PROP_RESERVED_BYTES_PER_SEC:
+ g_value_set_uint (value, qtmux->reserved_bytes_per_sec_per_trak);
+ break;
+ case PROP_RESERVED_PREFILL:
+ g_value_set_boolean (value, qtmux->reserved_prefill);
+ break;
+ case PROP_INTERLEAVE_BYTES:
+ g_value_set_uint64 (value, qtmux->interleave_bytes);
+ break;
+ case PROP_INTERLEAVE_TIME:
+ g_value_set_uint64 (value, qtmux->interleave_time);
+ break;
+ case PROP_FORCE_CHUNKS:
+ g_value_set_boolean (value, qtmux->force_chunks);
+ break;
+ case PROP_MAX_RAW_AUDIO_DRIFT:
+ g_value_set_uint64 (value, qtmux->max_raw_audio_drift);
+ break;
+ case PROP_START_GAP_THRESHOLD:
+ g_value_set_uint64 (value, qtmux->start_gap_threshold);
+ break;
+ case PROP_FORCE_CREATE_TIMECODE_TRAK:
+ g_value_set_boolean (value, qtmux->force_create_timecode_trak);
+ break;
+ case PROP_FRAGMENT_MODE:{
+ GstQTMuxFragmentMode mode = qtmux->fragment_mode;
+ if (mode == GST_QT_MUX_FRAGMENT_STREAMABLE)
+ mode = GST_QT_MUX_FRAGMENT_DASH_OR_MSS;
+ g_value_set_enum (value, mode);
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+}
+
+static void
+gst_qt_mux_generate_fast_start_file_path (GstQTMux * qtmux)
+{
+ gchar *tmp;
+
+ g_free (qtmux->fast_start_file_path);
+ qtmux->fast_start_file_path = NULL;
+
+ tmp = g_strdup_printf ("%s%d", "qtmux", g_random_int ());
+ qtmux->fast_start_file_path = g_build_filename (g_get_tmp_dir (), tmp, NULL);
+ g_free (tmp);
+}
+
+static void
+gst_qt_mux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstQTMux *qtmux = GST_QT_MUX_CAST (object);
+
+ GST_OBJECT_LOCK (qtmux);
+ switch (prop_id) {
+ case PROP_MOVIE_TIMESCALE:
+ qtmux->timescale = g_value_get_uint (value);
+ break;
+ case PROP_TRAK_TIMESCALE:
+ qtmux->trak_timescale = g_value_get_uint (value);
+ break;
+ case PROP_DO_CTTS:
+ qtmux->guess_pts = g_value_get_boolean (value);
+ break;
+#ifndef GST_REMOVE_DEPRECATED
+ case PROP_DTS_METHOD:
+ qtmux->dts_method = g_value_get_enum (value);
+ break;
+#endif
+ case PROP_FAST_START:
+ qtmux->fast_start = g_value_get_boolean (value);
+ break;
+ case PROP_FAST_START_TEMP_FILE:
+ g_free (qtmux->fast_start_file_path);
+ qtmux->fast_start_file_path = g_value_dup_string (value);
+ /* NULL means to generate a random one */
+ if (!qtmux->fast_start_file_path) {
+ gst_qt_mux_generate_fast_start_file_path (qtmux);
+ }
+ break;
+ case PROP_MOOV_RECOV_FILE:
+ g_free (qtmux->moov_recov_file_path);
+ qtmux->moov_recov_file_path = g_value_dup_string (value);
+ break;
+ case PROP_FRAGMENT_DURATION:
+ qtmux->fragment_duration = g_value_get_uint (value);
+ break;
+ case PROP_RESERVED_MAX_DURATION:
+ qtmux->reserved_max_duration = g_value_get_uint64 (value);
+ break;
+ case PROP_RESERVED_MOOV_UPDATE_PERIOD:
+ qtmux->reserved_moov_update_period = g_value_get_uint64 (value);
+ break;
+ case PROP_RESERVED_BYTES_PER_SEC:
+ qtmux->reserved_bytes_per_sec_per_trak = g_value_get_uint (value);
+ break;
+ case PROP_RESERVED_PREFILL:
+ qtmux->reserved_prefill = g_value_get_boolean (value);
+ break;
+ case PROP_INTERLEAVE_BYTES:
+ qtmux->interleave_bytes = g_value_get_uint64 (value);
+ qtmux->interleave_bytes_set = TRUE;
+ break;
+ case PROP_INTERLEAVE_TIME:
+ qtmux->interleave_time = g_value_get_uint64 (value);
+ qtmux->interleave_time_set = TRUE;
+ break;
+ case PROP_FORCE_CHUNKS:
+ qtmux->force_chunks = g_value_get_boolean (value);
+ break;
+ case PROP_MAX_RAW_AUDIO_DRIFT:
+ qtmux->max_raw_audio_drift = g_value_get_uint64 (value);
+ break;
+ case PROP_START_GAP_THRESHOLD:
+ qtmux->start_gap_threshold = g_value_get_uint64 (value);
+ break;
+ case PROP_FORCE_CREATE_TIMECODE_TRAK:
+ qtmux->force_create_timecode_trak = g_value_get_boolean (value);
+ qtmux->context->force_create_timecode_trak =
+ qtmux->force_create_timecode_trak;
+ break;
+ case PROP_FRAGMENT_MODE:{
+ GstQTMuxFragmentMode mode = g_value_get_enum (value);
+ if (mode != GST_QT_MUX_FRAGMENT_STREAMABLE)
+ qtmux->fragment_mode = mode;
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+}
+
+static gboolean
+gst_qt_mux_start (GstAggregator * agg)
+{
+ GstQTMux *qtmux = GST_QT_MUX_CAST (agg);
+ GstSegment segment;
+
+ qtmux->state = GST_QT_MUX_STATE_STARTED;
+
+ /* let downstream know we think in BYTES and expect to do seeking later on */
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ gst_aggregator_update_segment (agg, &segment);
+
+ return TRUE;
+}
+
+static gboolean
+gst_qt_mux_stop (GstAggregator * agg)
+{
+ GstQTMux *qtmux = GST_QT_MUX_CAST (agg);
+
+ gst_qt_mux_reset (qtmux, TRUE);
+
+ return TRUE;
+}
+
+enum
+{
+ PROP_SUBCLASS_STREAMABLE = 1,
+};
+
+static void
+gst_qt_mux_subclass_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstQTMux *qtmux = GST_QT_MUX_CAST (object);
+
+ GST_OBJECT_LOCK (qtmux);
+ switch (prop_id) {
+ case PROP_SUBCLASS_STREAMABLE:{
+ GstQTMuxClass *qtmux_klass =
+ (GstQTMuxClass *) (G_OBJECT_GET_CLASS (qtmux));
+ if (qtmux_klass->format == GST_QT_MUX_FORMAT_ISML) {
+ qtmux->streamable = g_value_get_boolean (value);
+ }
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+}
+
+static void
+gst_qt_mux_subclass_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstQTMux *qtmux = GST_QT_MUX_CAST (object);
+
+ GST_OBJECT_LOCK (qtmux);
+ switch (prop_id) {
+ case PROP_SUBCLASS_STREAMABLE:
+ g_value_set_boolean (value, qtmux->streamable);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (qtmux);
+}
+
+static void
+gst_qt_mux_subclass_class_init (GstQTMuxClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GParamFlags streamable_flags;
+ const gchar *streamable_desc;
+ gboolean streamable;
+#define STREAMABLE_DESC "If set to true, the output should be as if it is to "\
+ "be streamed and hence no indexes written or duration written."
+
+ gobject_class->set_property = gst_qt_mux_subclass_set_property;
+ gobject_class->get_property = gst_qt_mux_subclass_get_property;
+
+ streamable_flags = G_PARAM_READWRITE | G_PARAM_CONSTRUCT;
+ if (klass->format == GST_QT_MUX_FORMAT_ISML) {
+ streamable_desc = STREAMABLE_DESC;
+ streamable = DEFAULT_STREAMABLE;
+ } else {
+ streamable_desc =
+ STREAMABLE_DESC " (DEPRECATED, only valid for fragmented MP4)";
+ streamable_flags |= G_PARAM_DEPRECATED;
+ streamable = FALSE;
+ }
+
+ g_object_class_install_property (gobject_class, PROP_SUBCLASS_STREAMABLE,
+ g_param_spec_boolean ("streamable", "Streamable", streamable_desc,
+ streamable, streamable_flags | G_PARAM_STATIC_STRINGS));
+}
+
+static void
+gst_qt_mux_subclass_init (GstQTMux * qtmux)
+{
+}
+
+gboolean
+gst_qt_mux_register (GstPlugin * plugin)
+{
+ GTypeInfo parent_typeinfo = {
+ sizeof (GstQTMuxClass),
+ (GBaseInitFunc) gst_qt_mux_base_init,
+ NULL,
+ (GClassInitFunc) gst_qt_mux_class_init,
+ NULL,
+ NULL,
+ sizeof (GstQTMux),
+ 0,
+ (GInstanceInitFunc) gst_qt_mux_init,
+ };
+ static const GInterfaceInfo tag_setter_info = {
+ NULL, NULL, NULL
+ };
+ static const GInterfaceInfo tag_xmp_writer_info = {
+ NULL, NULL, NULL
+ };
+ static const GInterfaceInfo preset_info = {
+ NULL, NULL, NULL
+ };
+ GType parent_type;
+ GstQTMuxFormat format;
+ GstQTMuxClassParams *params;
+ guint i = 0;
+
+ GST_DEBUG_CATEGORY_INIT (gst_qt_mux_debug, "qtmux", 0, "QT Muxer");
+
+ GST_LOG ("Registering muxers");
+
+ parent_type =
+ g_type_register_static (GST_TYPE_AGGREGATOR, "GstBaseQTMux",
+ &parent_typeinfo, 0);
+ g_type_add_interface_static (parent_type, GST_TYPE_TAG_SETTER,
+ &tag_setter_info);
+ g_type_add_interface_static (parent_type, GST_TYPE_TAG_XMP_WRITER,
+ &tag_xmp_writer_info);
+ g_type_add_interface_static (parent_type, GST_TYPE_PRESET, &preset_info);
+
+ gst_type_mark_as_plugin_api (parent_type, 0);
+
+ while (TRUE) {
+ GType type;
+ GTypeInfo subclass_typeinfo = {
+ sizeof (GstQTMuxClass),
+ NULL,
+ NULL,
+ (GClassInitFunc) gst_qt_mux_subclass_class_init,
+ NULL,
+ NULL,
+ sizeof (GstQTMux),
+ 0,
+ (GInstanceInitFunc) gst_qt_mux_subclass_init,
+ };
+ GstQTMuxFormatProp *prop;
+ GstCaps *subtitle_caps, *caption_caps;
+
+ prop = &gst_qt_mux_format_list[i];
+ format = prop->format;
+ if (format == GST_QT_MUX_FORMAT_NONE)
+ break;
+
+ /* create a cache for these properties */
+ params = g_new0 (GstQTMuxClassParams, 1);
+ params->prop = prop;
+ params->src_caps = gst_static_caps_get (&prop->src_caps);
+ params->video_sink_caps = gst_static_caps_get (&prop->video_sink_caps);
+ params->audio_sink_caps = gst_static_caps_get (&prop->audio_sink_caps);
+ subtitle_caps = gst_static_caps_get (&prop->subtitle_sink_caps);
+ if (!gst_caps_is_equal (subtitle_caps, GST_CAPS_NONE)) {
+ params->subtitle_sink_caps = subtitle_caps;
+ } else {
+ gst_caps_unref (subtitle_caps);
+ }
+ caption_caps = gst_static_caps_get (&prop->caption_sink_caps);
+ if (!gst_caps_is_equal (caption_caps, GST_CAPS_NONE)) {
+ params->caption_sink_caps = caption_caps;
+ } else {
+ gst_caps_unref (caption_caps);
+ }
+
+ /* create the type now */
+ type =
+ g_type_register_static (parent_type, prop->type_name,
+ &subclass_typeinfo, 0);
+ g_type_set_qdata (type, GST_QT_MUX_PARAMS_QDATA, (gpointer) params);
+
+ if (!gst_element_register (plugin, prop->name, prop->rank, type))
+ return FALSE;
+
+ i++;
+ }
+
+ GST_LOG ("Finished registering muxers");
+
+ /* FIXME: ideally classification tag should be added and
+ registered in gstreamer core gsttaglist
+ */
+
+ GST_LOG ("Registering tags");
+
+ gst_tag_register (GST_TAG_3GP_CLASSIFICATION, GST_TAG_FLAG_META,
+ G_TYPE_STRING, GST_TAG_3GP_CLASSIFICATION, "content classification",
+ gst_tag_merge_use_first);
+
+ isomp4_element_init (plugin);
+
+ GST_LOG ("Finished registering tags");
+
+ return TRUE;
+}
+
+GST_ELEMENT_REGISTER_DEFINE_CUSTOM (qtmux, gst_qt_mux_register);
diff --git a/gst/isomp4/gstqtmux.h b/gst/isomp4/gstqtmux.h
new file mode 100644
index 0000000000..0e6e7f6793
--- /dev/null
+++ b/gst/isomp4/gstqtmux.h
@@ -0,0 +1,390 @@
+/* Quicktime muxer plugin for GStreamer
+ * Copyright (C) 2008-2010 Thiago Santos <thiagoss@embedded.ufcg.edu.br>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#ifndef __GST_QT_MUX_H__
+#define __GST_QT_MUX_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstaggregator.h>
+
+#include "fourcc.h"
+#include "atoms.h"
+#include "atomsrecovery.h"
+#include "gstqtmuxmap.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_QT_MUX (gst_qt_mux_get_type())
+#define GST_QT_MUX(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_QT_MUX, GstQTMux))
+#define GST_QT_MUX_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_QT_MUX, GstQTMux))
+#define GST_IS_QT_MUX(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_QT_MUX))
+#define GST_IS_QT_MUX_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_QT_MUX))
+#define GST_QT_MUX_CAST(obj) ((GstQTMux*)(obj))
+
+
+typedef struct _GstQTMux GstQTMux;
+typedef struct _GstQTMuxClass GstQTMuxClass;
+typedef struct _GstQTMuxPad GstQTMuxPad;
+typedef struct _GstQTMuxPadClass GstQTMuxPadClass;
+
+/*
+ * GstQTPadPrepareBufferFunc
+ *
+ * Receives a buffer (takes ref) and returns a new buffer that should
+ * replace the passed one.
+ *
+ * Useful for when the pad/datatype needs some manipulation before
+ * being muxed. (Originally added for image/x-jpc support, for which buffers
+ * need to be wrapped into a isom box)
+ */
+typedef GstBuffer * (*GstQTPadPrepareBufferFunc) (GstQTMuxPad * pad,
+ GstBuffer * buf, GstQTMux * qtmux);
+typedef gboolean (*GstQTPadSetCapsFunc) (GstQTMuxPad * pad, GstCaps * caps);
+typedef GstBuffer * (*GstQTPadCreateEmptyBufferFunc) (GstQTMuxPad * pad, gint64 duration);
+
+GType gst_qt_mux_pad_get_type (void);
+
+#define GST_TYPE_QT_MUX_PAD \
+ (gst_qt_mux_pad_get_type())
+#define GST_QT_MUX_PAD(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_QT_MUX_PAD, GstQTMuxPad))
+#define GST_QT_MUX_PAD_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_QT_MUX_PAD, GstQTMuxPadClass))
+#define GST_IS_QT_MUX_PAD(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_QT_MUX_PAD))
+#define GST_IS_QT_MUX_PAD_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_QT_MUX_PAD))
+#define GST_QT_MUX_PAD_CAST(obj) \
+ ((GstQTMuxPad *)(obj))
+
+struct _GstQTMuxPad
+{
+ GstAggregatorPad parent;
+
+ guint32 trak_timescale;
+
+ /* fourcc id of stream */
+ guint32 fourcc;
+ /* whether using format that have out of order buffers */
+ gboolean is_out_of_order;
+ /* if not 0, track with constant sized samples, e.g. raw audio */
+ guint sample_size;
+ /* make sync table entry */
+ gboolean sync;
+ /* if it is a sparse stream
+ * (meaning we can't use PTS differences to compute duration) */
+ gboolean sparse;
+ /* bitrates */
+ guint32 avg_bitrate, max_bitrate;
+ /* expected sample duration */
+ guint expected_sample_duration_n;
+ guint expected_sample_duration_d;
+
+ /* for avg bitrate calculation */
+ guint64 total_bytes;
+ guint64 total_duration;
+
+ GstBuffer *last_buf;
+ /* dts of last_buf */
+ GstClockTime last_dts;
+ guint64 sample_offset;
+
+ /* TRUE if we saw backward/missing DTS on this
+ * pad (and warned about it */
+ gboolean warned_empty_duration;
+
+ /* This is compensate for CTTS */
+ GstClockTime dts_adjustment;
+
+ /* store the first timestamp for comparing with other streams and
+ * know if there are late streams */
+ /* subjected to dts adjustment */
+ GstClockTime first_ts;
+ GstClockTime first_dts;
+
+ gint64 dts; /* the signed version of the DTS converted to running time. */
+
+ /* all the atom and chunk book-keeping is delegated here
+ * unowned/uncounted reference, parent MOOV owns */
+ AtomTRAK *trak;
+ AtomTRAK *tc_trak;
+ SampleTableEntry *trak_ste;
+ /* fragmented support */
+ /* meta data book-keeping delegated here */
+ AtomTRAF *traf;
+ /* fragment buffers */
+ ATOM_ARRAY (GstBuffer *) fragment_buffers;
+ /* running fragment duration */
+ gint64 fragment_duration;
+ /* optional fragment index book-keeping */
+ AtomTFRA *tfra;
+
+ /* Set when tags are received, cleared when written to moov */
+ gboolean tags_changed;
+
+ GstTagList *tags;
+
+ /* if nothing is set, it won't be called */
+ GstQTPadPrepareBufferFunc prepare_buf_func;
+ GstQTPadSetCapsFunc set_caps;
+ GstQTPadCreateEmptyBufferFunc create_empty_buffer;
+
+ /* SMPTE timecode */
+ GstVideoTimeCode *first_tc;
+ GstClockTime first_pts;
+ guint64 tc_pos;
+
+ /* for keeping track in pre-fill mode */
+ GArray *samples;
+ guint first_cc_sample_size;
+ /* current sample */
+ GstAdapter *raw_audio_adapter;
+ guint64 raw_audio_adapter_offset;
+ GstClockTime raw_audio_adapter_pts;
+ GstFlowReturn flow_status;
+
+ GstCaps *configured_caps;
+};
+
+struct _GstQTMuxPadClass
+{
+ GstAggregatorPadClass parent;
+};
+
+#define QTMUX_NO_OF_TS 10
+
+typedef enum _GstQTMuxState
+{
+ GST_QT_MUX_STATE_NONE,
+ GST_QT_MUX_STATE_STARTED,
+ GST_QT_MUX_STATE_DATA,
+ GST_QT_MUX_STATE_EOS
+} GstQTMuxState;
+
+typedef enum _GstQtMuxMode {
+ GST_QT_MUX_MODE_MOOV_AT_END,
+ GST_QT_MUX_MODE_FRAGMENTED,
+ GST_QT_MUX_MODE_FAST_START,
+ GST_QT_MUX_MODE_ROBUST_RECORDING,
+ GST_QT_MUX_MODE_ROBUST_RECORDING_PREFILL,
+} GstQtMuxMode;
+
+/**
+ * GstQTMuxFragmentMode:
+ * @GST_QT_MUX_FRAGMENT_DASH_OR_MSS: dash-or-mss
+ * @GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE: first-moov-then-finalise
+ * @GST_QT_MUX_FRAGMENT_STREAMABLE: streamable (private value)
+ *
+ * Since: 1.20
+ */
+typedef enum _GstQTMuxFragmentMode
+{
+ GST_QT_MUX_FRAGMENT_DASH_OR_MSS = 0,
+ GST_QT_MUX_FRAGMENT_FIRST_MOOV_THEN_FINALISE,
+ GST_QT_MUX_FRAGMENT_STREAMABLE = G_MAXUINT32, /* internal value */
+} GstQTMuxFragmentMode;
+
+struct _GstQTMux
+{
+ GstAggregator parent;
+
+ /* state */
+ GstQTMuxState state;
+
+ /* Mux mode, inferred from property
+ * set in gst_qt_mux_start_file() */
+ GstQtMuxMode mux_mode;
+ /* fragment_mode, controls how fragments are created. Only if
+ * @mux_mode == GST_QT_MUX_MODE_FRAGMENTED */
+ GstQTMuxFragmentMode fragment_mode;
+
+ /* whether downstream is seekable */
+ gboolean downstream_seekable;
+
+ /* size of header (prefix, atoms (ftyp, possibly moov, mdat header)) */
+ guint64 header_size;
+ /* accumulated size of raw media data (not including mdat header) */
+ guint64 mdat_size;
+ /* position of the moov (for fragmented mode) or reserved moov atom
+ * area (for robust-muxing mode) */
+ guint64 moov_pos;
+ /* position of mdat atom header (for later updating of size) in
+ * moov-at-end, fragmented and robust-muxing modes */
+ guint64 mdat_pos;
+ /* position of the mdat atom header of the latest fragment for writing
+ * the default base offset in fragmented mode first-moov-then-finalise and
+ * any other future non-streaming fragmented mode */
+ guint64 moof_mdat_pos;
+
+ /* keep track of the largest chunk to fine-tune brands */
+ GstClockTime longest_chunk;
+
+ /* Earliest timestamp across all pads/traks
+ * (unadjusted incoming PTS) */
+ GstClockTime first_ts;
+ /* Last DTS across all pads (= duration) */
+ GstClockTime last_dts;
+
+ /* Last pad we used for writing the current chunk */
+ GstQTMuxPad *current_pad;
+ guint64 current_chunk_size;
+ GstClockTime current_chunk_duration;
+ guint64 current_chunk_offset;
+
+ /* list of buffers to hold for batching inside a single mdat when downstream
+ * is not seekable */
+ GList *output_buffers;
+
+ /* atom helper objects */
+ AtomsContext *context;
+ AtomFTYP *ftyp;
+ AtomMOOV *moov;
+ GSList *extra_atoms; /* list of extra top-level atoms (e.g. UUID for xmp)
+ * Stored as AtomInfo structs */
+
+ /* Set when tags are received, cleared when written to moov */
+ gboolean tags_changed;
+
+ /* fragmented file index */
+ AtomMFRA *mfra;
+
+ /* fast start */
+ FILE *fast_start_file;
+
+ /* moov recovery */
+ FILE *moov_recov_file;
+
+ /* fragment sequence */
+ guint32 fragment_sequence;
+
+ /* properties */
+ guint32 timescale;
+ guint32 trak_timescale;
+ AtomsTreeFlavor flavor;
+ gboolean fast_start;
+ gboolean guess_pts;
+#ifndef GST_REMOVE_DEPRECATED
+ gint dts_method;
+#endif
+ gchar *fast_start_file_path;
+ gchar *moov_recov_file_path;
+ guint32 fragment_duration;
+ /* Whether or not to work in 'streamable' mode and not
+ * seek to rewrite headers - only valid for fragmented
+ * mode. Deprecated */
+ gboolean streamable;
+
+ /* Requested target maximum duration */
+ GstClockTime reserved_max_duration;
+ /* Estimate of remaining reserved header space (in ns of recording) */
+ GstClockTime reserved_duration_remaining;
+ /* Multiplier for conversion from reserved_max_duration to bytes */
+ guint reserved_bytes_per_sec_per_trak;
+
+ guint64 interleave_bytes;
+ GstClockTime interleave_time;
+ gboolean interleave_bytes_set, interleave_time_set;
+ gboolean force_chunks;
+
+ GstClockTime max_raw_audio_drift;
+
+ /* Reserved minimum MOOV size in bytes
+ * This is converted from reserved_max_duration
+ * using the bytes/trak/sec estimate */
+ guint32 reserved_moov_size;
+ /* Basic size of the moov (static headers + tags) */
+ guint32 base_moov_size;
+ /* Size of the most recently generated moov header */
+ guint32 last_moov_size;
+ /* True if the first moov in the ping-pong buffers
+ * is the active one. See gst_qt_mux_robust_recording_rewrite_moov() */
+ gboolean reserved_moov_first_active;
+
+ /* Tracking of periodic MOOV updates */
+ GstClockTime last_moov_update;
+ GstClockTime reserved_moov_update_period;
+ GstClockTime muxed_since_last_update;
+
+ gboolean reserved_prefill;
+
+ GstClockTime start_gap_threshold;
+
+ gboolean force_create_timecode_trak;
+
+ /* for request pad naming */
+ guint video_pads, audio_pads, subtitle_pads, caption_pads;
+};
+
+struct _GstQTMuxClass
+{
+ GstAggregatorClass parent_class;
+
+ GstQTMuxFormat format;
+};
+
+/* type register helper struct */
+typedef struct _GstQTMuxClassParams
+{
+ GstQTMuxFormatProp *prop;
+ GstCaps *src_caps;
+ GstCaps *video_sink_caps;
+ GstCaps *audio_sink_caps;
+ GstCaps *subtitle_sink_caps;
+ GstCaps *caption_sink_caps;
+} GstQTMuxClassParams;
+
+#define GST_QT_MUX_PARAMS_QDATA g_quark_from_static_string("qt-mux-params")
+
+GType gst_qt_mux_get_type (void);
+gboolean gst_qt_mux_register (GstPlugin * plugin);
+
+/* FIXME: ideally classification tag should be added and
+ * registered in gstreamer core gsttaglist
+ *
+ * this tag is a string in the format: entityfourcc://table_num/content
+ * FIXME Shouldn't we add a field for 'language'?
+ */
+#define GST_TAG_3GP_CLASSIFICATION "classification"
+
+G_END_DECLS
+
+#endif /* __GST_QT_MUX_H__ */
diff --git a/gst/isomp4/gstqtmuxmap.c b/gst/isomp4/gstqtmuxmap.c
new file mode 100644
index 0000000000..143b217431
--- /dev/null
+++ b/gst/isomp4/gstqtmuxmap.c
@@ -0,0 +1,424 @@
+/* Quicktime muxer plugin for GStreamer
+ * Copyright (C) 2008 Thiago Sousa Santos <thiagoss@embedded.ufcg.edu.br>
+ * Copyright (C) 2008 Mark Nauwelaerts <mnauw@users.sf.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#include "gstqtmuxmap.h"
+#include "fourcc.h"
+
+/* static info related to various format */
+
+#define COMMON_VIDEO_CAPS \
+ "width = (int) [ 16, MAX ], " \
+ "height = (int) [ 16, MAX ]"
+
+#define COMMON_VIDEO_CAPS_NO_FRAMERATE \
+ "width = (int) [ 16, MAX ], " \
+ "height = (int) [ 16, MAX ] "
+
+#define H263_CAPS \
+ "video/x-h263, " \
+ COMMON_VIDEO_CAPS
+
+#define H264_CAPS \
+ "video/x-h264, " \
+ "stream-format = (string) { avc, avc3 }, " \
+ "alignment = (string) au, " \
+ COMMON_VIDEO_CAPS
+
+#define H265_CAPS \
+ "video/x-h265, " \
+ "stream-format = (string) { hvc1, hev1 }, " \
+ "alignment = (string) au, " \
+ COMMON_VIDEO_CAPS
+
+#define MPEG4V_CAPS \
+ "video/mpeg, " \
+ "mpegversion = (int) 4, "\
+ "systemstream = (boolean) false, " \
+ COMMON_VIDEO_CAPS "; " \
+ "video/x-divx, " \
+ "divxversion = (int) 5, "\
+ COMMON_VIDEO_CAPS
+
+#define PRORES_CAPS \
+ "video/x-prores, " \
+ "variant = (string) {standard, lt, hq, proxy, 4444, 4444xq}, " \
+ COMMON_VIDEO_CAPS
+
+#define CINEFORM_CAPS \
+ "video/x-cineform, " \
+ COMMON_VIDEO_CAPS
+
+#define SVQ_CAPS \
+ "video/x-svq, " \
+ "svqversion = (int) 3, " \
+ COMMON_VIDEO_CAPS
+
+#define COMMON_AUDIO_CAPS(c, r) \
+ "channels = (int) [ 1, " G_STRINGIFY (c) " ], " \
+ "rate = (int) [ 1, " G_STRINGIFY (r) " ]"
+
+#define PCM_CAPS \
+ "audio/x-raw, " \
+ "format = (string) { S16LE, S16BE, S8, U8 }, " \
+ "layout = (string) interleaved, " \
+ COMMON_AUDIO_CAPS (2, MAX)
+
+#define PCM_CAPS_FULL \
+ "audio/x-raw, " \
+ "format = (string) { S32LE, S32BE, S24LE, S24BE, S16LE, S16BE, S8, U8 }, " \
+ "layout = (string) interleaved, " \
+ COMMON_AUDIO_CAPS (2, MAX)
+
+#define PCM_CAPS_UNPOSITIONED \
+ "audio/x-raw, " \
+ "format = (string) { S32LE, S32BE, S24LE, S24BE, S16LE, S16BE, S8, U8 }, " \
+ "layout = (string) interleaved, " \
+ "channel-mask = (bitmask) 0, " \
+ COMMON_AUDIO_CAPS (16, MAX)
+
+#define MP3_CAPS \
+ "audio/mpeg, " \
+ "mpegversion = (int) 1, " \
+ "layer = (int) 3, " \
+ COMMON_AUDIO_CAPS (2, MAX)
+
+#define MP123_CAPS \
+ "audio/mpeg, " \
+ "mpegversion = (int) 1, " \
+ "layer = (int) [1, 3], " \
+ COMMON_AUDIO_CAPS (2, MAX)
+
+#define AAC_CAPS \
+ "audio/mpeg, " \
+ "mpegversion = (int) 4, " \
+ "stream-format = (string) raw, " \
+ COMMON_AUDIO_CAPS (8, MAX)
+
+#define AC3_CAPS \
+ "audio/x-ac3, " \
+ COMMON_AUDIO_CAPS (6, MAX)
+
+#define AMR_CAPS \
+ "audio/AMR, " \
+ "rate = (int) 8000, " \
+ "channels = [ 1, 2 ]; " \
+ "audio/AMR-WB, " \
+ "rate = (int) 16000, " \
+ "channels = [ 1, 2 ] "
+
+#define ADPCM_CAPS \
+ "audio/x-adpcm, " \
+ "layout = (string)dvi, " \
+ "block_align = (int)[64, 8096], " \
+ COMMON_AUDIO_CAPS(2, MAX)
+
+#define ALAC_CAPS \
+ "audio/x-alac, " \
+ COMMON_AUDIO_CAPS(2, MAX)
+
+#define OPUS_CAPS \
+ "audio/x-opus, " \
+ "channel-mapping-family = (int) [0, 255], " \
+ COMMON_AUDIO_CAPS(8, MAX)
+
+
+#define TEXT_UTF8 \
+ "text/x-raw, " \
+ "format=(string)utf8"
+
+#define CEA608_CAPS \
+ "closedcaption/x-cea-608, format=(string)s334-1a"
+#define CEA708_CAPS \
+ "closedcaption/x-cea-708, format=(string)cdp"
+
+/* FIXME 0.11 - take a look at bugs #580005 and #340375 */
+GstQTMuxFormatProp gst_qt_mux_format_list[] = {
+ /* original QuickTime format; see Apple site (e.g. qtff.pdf) */
+ {
+ GST_QT_MUX_FORMAT_QT,
+ GST_RANK_PRIMARY,
+ "qtmux",
+ "QuickTime",
+ "GstQTMux",
+ GST_STATIC_CAPS ("video/quicktime, variant = (string) apple; "
+ "video/quicktime"),
+ GST_STATIC_CAPS ("video/x-raw, "
+ "format = (string) { RGB, UYVY, v210 }, "
+ COMMON_VIDEO_CAPS "; "
+ MPEG4V_CAPS "; "
+ PRORES_CAPS "; "
+ CINEFORM_CAPS "; "
+ H263_CAPS "; "
+ H264_CAPS "; "
+ H265_CAPS "; "
+ SVQ_CAPS "; "
+ "video/x-dv, "
+ "systemstream = (boolean) false, "
+ COMMON_VIDEO_CAPS "; "
+ "image/jpeg, "
+ COMMON_VIDEO_CAPS_NO_FRAMERATE "; "
+ "image/png, "
+ COMMON_VIDEO_CAPS_NO_FRAMERATE "; "
+ "video/x-vp8, "
+ COMMON_VIDEO_CAPS "; "
+ "video/x-vp9, "
+ COMMON_VIDEO_CAPS "; "
+ "video/x-dirac, "
+ COMMON_VIDEO_CAPS "; " "video/x-qt-part, " COMMON_VIDEO_CAPS "; "
+ "video/x-av1, " COMMON_VIDEO_CAPS),
+ GST_STATIC_CAPS (PCM_CAPS_FULL "; "
+ PCM_CAPS_UNPOSITIONED " ; "
+ MP123_CAPS " ; "
+ AAC_CAPS " ; "
+ AC3_CAPS " ; "
+ ADPCM_CAPS " ; "
+ "audio/x-alaw, " COMMON_AUDIO_CAPS (2, MAX) "; "
+ "audio/x-mulaw, " COMMON_AUDIO_CAPS (2, MAX) "; "
+ AMR_CAPS " ; " ALAC_CAPS " ; " OPUS_CAPS),
+ GST_STATIC_CAPS (TEXT_UTF8),
+ GST_STATIC_CAPS (CEA608_CAPS "; " CEA708_CAPS)}
+ ,
+ /* ISO 14496-14: mp42 as ISO base media extension
+ * (supersedes original ISO 144996-1 mp41) */
+ {
+ GST_QT_MUX_FORMAT_MP4,
+ GST_RANK_PRIMARY,
+ "mp4mux",
+ "MP4",
+ "GstMP4Mux",
+ GST_STATIC_CAPS ("video/quicktime, variant = (string) iso"),
+ GST_STATIC_CAPS (MPEG4V_CAPS "; " H264_CAPS ";" H265_CAPS ";"
+ "video/x-mp4-part," COMMON_VIDEO_CAPS "; "
+ "video/x-av1, " COMMON_VIDEO_CAPS),
+ GST_STATIC_CAPS (MP123_CAPS "; "
+ AAC_CAPS " ; " AC3_CAPS " ; " ALAC_CAPS " ; " OPUS_CAPS),
+ GST_STATIC_CAPS (TEXT_UTF8),
+ GST_STATIC_CAPS_NONE}
+ ,
+ /* Microsoft Smooth Streaming fmp4/isml */
+ /* TODO add WMV/WMA support */
+ {
+ GST_QT_MUX_FORMAT_ISML,
+ GST_RANK_PRIMARY,
+ "ismlmux",
+ "ISML",
+ "GstISMLMux",
+ GST_STATIC_CAPS ("video/quicktime, variant = (string) iso-fragmented"),
+ GST_STATIC_CAPS (MPEG4V_CAPS "; " H264_CAPS),
+ GST_STATIC_CAPS (MP3_CAPS "; " AAC_CAPS),
+ GST_STATIC_CAPS_NONE,
+ GST_STATIC_CAPS_NONE}
+ ,
+ /* 3GPP Technical Specification 26.244 V7.3.0
+ * (extended in 3GPP2 File Formats for Multimedia Services) */
+ {
+ GST_QT_MUX_FORMAT_3GP,
+ GST_RANK_PRIMARY,
+ "3gppmux",
+ "3GPP",
+ "Gst3GPPMux",
+ GST_STATIC_CAPS ("video/quicktime, variant = (string) 3gpp"),
+ GST_STATIC_CAPS (H263_CAPS "; " MPEG4V_CAPS "; " H264_CAPS),
+ GST_STATIC_CAPS (AMR_CAPS "; " MP3_CAPS "; " AAC_CAPS "; " AC3_CAPS),
+ GST_STATIC_CAPS (TEXT_UTF8),
+ GST_STATIC_CAPS_NONE}
+ ,
+ /* ISO 15444-3: Motion-JPEG-2000 (also ISO base media extension) */
+ {
+ GST_QT_MUX_FORMAT_MJ2,
+ GST_RANK_PRIMARY,
+ "mj2mux",
+ "MJ2",
+ "GstMJ2Mux",
+ GST_STATIC_CAPS ("video/mj2"),
+ GST_STATIC_CAPS ("image/x-j2c, " COMMON_VIDEO_CAPS "; "
+ "image/x-jpc, " COMMON_VIDEO_CAPS),
+ GST_STATIC_CAPS (PCM_CAPS),
+ GST_STATIC_CAPS_NONE,
+ GST_STATIC_CAPS_NONE}
+ ,
+ {
+ GST_QT_MUX_FORMAT_NONE,
+ }
+};
+
+/* pretty static, but may turn out needed a few times */
+AtomsTreeFlavor
+gst_qt_mux_map_format_to_flavor (GstQTMuxFormat format)
+{
+ if (format == GST_QT_MUX_FORMAT_QT)
+ return ATOMS_TREE_FLAVOR_MOV;
+ else if (format == GST_QT_MUX_FORMAT_3GP)
+ return ATOMS_TREE_FLAVOR_3GP;
+ else if (format == GST_QT_MUX_FORMAT_ISML)
+ return ATOMS_TREE_FLAVOR_ISML;
+ else
+ return ATOMS_TREE_FLAVOR_ISOM;
+}
+
+static void
+gst_qt_mux_map_check_tracks (AtomMOOV * moov, gint * _video, gint * _audio,
+ gboolean * _has_h264)
+{
+ GList *it;
+ gint video = 0, audio = 0;
+ gboolean has_h264 = FALSE;
+
+ for (it = moov->traks; it != NULL; it = g_list_next (it)) {
+ AtomTRAK *track = it->data;
+
+ if (track->is_video) {
+ video++;
+ if (track->is_h264)
+ has_h264 = TRUE;
+ } else
+ audio++;
+ }
+
+ if (_video)
+ *_video = video;
+ if (_audio)
+ *_audio = audio;
+ if (_has_h264)
+ *_has_h264 = has_h264;
+}
+
+/* pretty static, but possibly dynamic format info */
+
+/* notes:
+ * - avc1 brand is not used, since the specific extensions indicated by it
+ * are not used (e.g. sample groupings, etc)
+ * - TODO: maybe even more 3GPP brand fine-tuning ??
+ * (but that might need ftyp rewriting at the end) */
+void
+gst_qt_mux_map_format_to_header (GstQTMuxFormat format, GstBuffer ** _prefix,
+ guint32 * _major, guint32 * _version, GList ** _compatible, AtomMOOV * moov,
+ GstClockTime longest_chunk, gboolean faststart)
+{
+ static const guint32 qt_brands[] = { 0 };
+ static const guint32 mp4_brands[] =
+ { FOURCC_mp41, FOURCC_isom, FOURCC_iso2, 0 };
+ static const guint32 isml_brands[] = { FOURCC_iso2, 0 };
+ static const guint32 gpp_brands[] = { FOURCC_isom, FOURCC_iso2, 0 };
+ static const guint32 mjp2_brands[] = { FOURCC_isom, FOURCC_iso2, 0 };
+ static const guint8 mjp2_prefix[] =
+ { 0, 0, 0, 12, 'j', 'P', ' ', ' ', 0x0D, 0x0A, 0x87, 0x0A };
+ const guint32 *comp = NULL;
+ guint32 major = 0, version = 0;
+ GstBuffer *prefix = NULL;
+ GList *result = NULL;
+
+ g_return_if_fail (_prefix != NULL);
+ g_return_if_fail (_major != NULL);
+ g_return_if_fail (_version != NULL);
+ g_return_if_fail (_compatible != NULL);
+
+ switch (format) {
+ case GST_QT_MUX_FORMAT_QT:
+ major = FOURCC_qt__;
+ comp = qt_brands;
+ version = 0x20050300;
+ break;
+ case GST_QT_MUX_FORMAT_MP4:
+ major = FOURCC_mp42;
+ comp = mp4_brands;
+ break;
+ case GST_QT_MUX_FORMAT_ISML:
+ major = FOURCC_isml;
+ comp = isml_brands;
+ break;
+ case GST_QT_MUX_FORMAT_3GP:
+ {
+ gint video, audio;
+ gboolean has_h264;
+
+ gst_qt_mux_map_check_tracks (moov, &video, &audio, &has_h264);
+ /* only track restriction really matters for Basic Profile */
+ if (video <= 1 && audio <= 1) {
+ /* it seems only newer spec knows about H264 */
+ major = has_h264 ? FOURCC_3gp6 : FOURCC_3gp4;
+ version = has_h264 ? 0x100 : 0x200;
+ } else {
+ major = FOURCC_3gg6;
+ version = 0x100;
+ }
+ comp = gpp_brands;
+
+ /*
+ * We assume that we have chunks in dts order
+ */
+ if (faststart && longest_chunk <= GST_SECOND) {
+ /* add progressive download profile */
+ result = g_list_append (result, GUINT_TO_POINTER (FOURCC_3gr6));
+ }
+ break;
+ }
+ case GST_QT_MUX_FORMAT_MJ2:
+ {
+ major = FOURCC_mjp2;
+ comp = mjp2_brands;
+ version = 0;
+ prefix = gst_buffer_new_and_alloc (sizeof (mjp2_prefix));
+ gst_buffer_fill (prefix, 0, mjp2_prefix, sizeof (mjp2_prefix));
+ break;
+ }
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+
+ /* convert list to list, hm */
+ while (comp && *comp != 0) {
+ /* order matters over efficiency */
+ result = g_list_append (result, GUINT_TO_POINTER (*comp));
+ comp++;
+ }
+
+ *_major = major;
+ *_version = version;
+ *_prefix = prefix;
+ *_compatible = result;
+
+ /* TODO 3GPP may include mp42 as compatible if applicable */
+ /* TODO 3GPP major brand 3gp7 if at most 1 video and audio track */
+}
diff --git a/gst/isomp4/gstqtmuxmap.h b/gst/isomp4/gstqtmuxmap.h
new file mode 100644
index 0000000000..f0bae16cb4
--- /dev/null
+++ b/gst/isomp4/gstqtmuxmap.h
@@ -0,0 +1,86 @@
+/* Quicktime muxer plugin for GStreamer
+ * Copyright (C) 2008 Thiago Sousa Santos <thiagoss@embedded.ufcg.edu.br>
+ * Copyright (C) 2008 Mark Nauwelaerts <mnauw@users.sf.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#ifndef __GST_QT_MUX_MAP_H__
+#define __GST_QT_MUX_MAP_H__
+
+#include "atoms.h"
+
+#include <glib.h>
+#include <gst/gst.h>
+
+typedef enum _GstQTMuxFormat
+{
+ GST_QT_MUX_FORMAT_NONE = 0,
+ GST_QT_MUX_FORMAT_QT,
+ GST_QT_MUX_FORMAT_MP4,
+ GST_QT_MUX_FORMAT_3GP,
+ GST_QT_MUX_FORMAT_MJ2,
+ GST_QT_MUX_FORMAT_ISML
+} GstQTMuxFormat;
+
+typedef struct _GstQTMuxFormatProp
+{
+ GstQTMuxFormat format;
+ GstRank rank;
+ const gchar *name;
+ const gchar *long_name;
+ const gchar *type_name;
+ GstStaticCaps src_caps;
+ GstStaticCaps video_sink_caps;
+ GstStaticCaps audio_sink_caps;
+ GstStaticCaps subtitle_sink_caps;
+ GstStaticCaps caption_sink_caps;
+} GstQTMuxFormatProp;
+
+extern GstQTMuxFormatProp gst_qt_mux_format_list[];
+
+void gst_qt_mux_map_format_to_header (GstQTMuxFormat format, GstBuffer ** _prefix,
+ guint32 * _major, guint32 * verson,
+ GList ** _compatible, AtomMOOV * moov,
+ GstClockTime longest_chunk,
+ gboolean faststart);
+
+AtomsTreeFlavor gst_qt_mux_map_format_to_flavor (GstQTMuxFormat format);
+
+#endif /* __GST_QT_MUX_MAP_H__ */
diff --git a/gst/isomp4/gstrtpxqtdepay.c b/gst/isomp4/gstrtpxqtdepay.c
new file mode 100644
index 0000000000..3900db28f5
--- /dev/null
+++ b/gst/isomp4/gstrtpxqtdepay.c
@@ -0,0 +1,690 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * based on http://developer.apple.com/quicktime/icefloe/dispatch026.html
+ */
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include <string.h>
+#include "gstisomp4elements.h"
+#include "gstrtpxqtdepay.h"
+
+#define MAKE_TLV(a,b) (((a)<<8)|(b))
+
+#define TLV_sd MAKE_TLV ('s','d')
+#define TLV_qt MAKE_TLV ('q','t')
+#define TLV_ti MAKE_TLV ('t','i')
+#define TLV_ly MAKE_TLV ('l','y')
+#define TLV_vo MAKE_TLV ('v','o')
+#define TLV_mx MAKE_TLV ('m','x')
+#define TLV_tr MAKE_TLV ('t','r')
+#define TLV_tw MAKE_TLV ('t','w')
+#define TLV_th MAKE_TLV ('t','h')
+#define TLV_la MAKE_TLV ('l','a')
+#define TLV_rt MAKE_TLV ('r','t')
+#define TLV_gm MAKE_TLV ('g','m')
+#define TLV_oc MAKE_TLV ('o','c')
+#define TLV_cr MAKE_TLV ('c','r')
+#define TLV_du MAKE_TLV ('d','u')
+#define TLV_po MAKE_TLV ('p','o')
+
+#define QT_UINT32(a) (GST_READ_UINT32_BE(a))
+#define QT_UINT24(a) (GST_READ_UINT32_BE(a) >> 8)
+#define QT_UINT16(a) (GST_READ_UINT16_BE(a))
+#define QT_UINT8(a) (GST_READ_UINT8(a))
+#define QT_FP32(a) ((GST_READ_UINT32_BE(a))/65536.0)
+#define QT_FP16(a) ((GST_READ_UINT16_BE(a))/256.0)
+#define QT_FOURCC(a) (GST_READ_UINT32_LE(a))
+#define QT_UINT64(a) ((((guint64)QT_UINT32(a))<<32)|QT_UINT32(((guint8 *)a)+4))
+
+#define FOURCC_avc1 GST_MAKE_FOURCC('a','v','c','1')
+#define FOURCC_avc3 GST_MAKE_FOURCC('a','v','c','3')
+#define FOURCC_avcC GST_MAKE_FOURCC('a','v','c','C')
+
+GST_DEBUG_CATEGORY_STATIC (rtpxqtdepay_debug);
+#define GST_CAT_DEFAULT (rtpxqtdepay_debug)
+
+/* RtpXQTDepay signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+};
+
+static GstStaticPadTemplate gst_rtp_xqt_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+static GstStaticPadTemplate gst_rtp_xqt_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "media = (string) { \"audio\", \"video\" }, clock-rate = (int) [1, MAX], "
+ "encoding-name = (string) { \"X-QT\", \"X-QUICKTIME\" }")
+ );
+
+#define gst_rtp_xqt_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpXQTDepay, gst_rtp_xqt_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpxqtdepay, "rtpxqtdepay",
+ GST_RANK_MARGINAL, GST_TYPE_RTP_XQT_DEPAY, isomp4_element_init (plugin));
+
+static void gst_rtp_xqt_depay_finalize (GObject * object);
+
+static gboolean gst_rtp_xqt_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_xqt_depay_process (GstRTPBaseDepayload * depayload,
+ GstBuffer * buf);
+
+static GstStateChangeReturn gst_rtp_xqt_depay_change_state (GstElement *
+ element, GstStateChange transition);
+
+
+static void
+gst_rtp_xqt_depay_class_init (GstRtpXQTDepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->finalize = gst_rtp_xqt_depay_finalize;
+
+ gstelement_class->change_state = gst_rtp_xqt_depay_change_state;
+
+ gstrtpbasedepayload_class->set_caps = gst_rtp_xqt_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_xqt_depay_process;
+
+ GST_DEBUG_CATEGORY_INIT (rtpxqtdepay_debug, "rtpxqtdepay", 0,
+ "QT Media RTP Depayloader");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_xqt_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_xqt_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP packet depayloader", "Codec/Depayloader/Network",
+ "Extracts Quicktime audio/video from RTP packets",
+ "Wim Taymans <wim@fluendo.com>");
+}
+
+static void
+gst_rtp_xqt_depay_init (GstRtpXQTDepay * rtpxqtdepay)
+{
+ rtpxqtdepay->adapter = gst_adapter_new ();
+}
+
+static void
+gst_rtp_xqt_depay_finalize (GObject * object)
+{
+ GstRtpXQTDepay *rtpxqtdepay;
+
+ rtpxqtdepay = GST_RTP_XQT_DEPAY (object);
+
+ g_object_unref (rtpxqtdepay->adapter);
+ rtpxqtdepay->adapter = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_rtp_quicktime_parse_sd (GstRtpXQTDepay * rtpxqtdepay, guint8 * data,
+ guint data_len)
+{
+ gint len;
+ guint32 fourcc;
+
+ if (data_len < 8)
+ goto too_short;
+
+ len = (data[0] << 24) | (data[1] << 16) | (data[2] << 8) | data[3];
+ if (len > data_len)
+ goto too_short;
+
+ fourcc = QT_FOURCC (data + 4);
+
+ GST_DEBUG_OBJECT (rtpxqtdepay, "parsing %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (fourcc));
+
+ switch (fourcc) {
+ case FOURCC_avc1:
+ case FOURCC_avc3:
+ {
+ guint32 chlen;
+
+ if (len < 0x56)
+ goto too_short;
+ len -= 0x56;
+ data += 0x56;
+
+ /* find avcC */
+ while (len >= 8) {
+ chlen = QT_UINT32 (data);
+ fourcc = QT_FOURCC (data + 4);
+ if (fourcc == FOURCC_avcC) {
+ GstBuffer *buf;
+ gint size;
+ GstCaps *caps;
+
+ GST_DEBUG_OBJECT (rtpxqtdepay, "found avcC codec_data in sd, %u",
+ chlen);
+
+ /* parse, if found */
+ if (chlen < len)
+ size = chlen - 8;
+ else
+ size = len - 8;
+
+ buf = gst_buffer_new_and_alloc (size);
+ gst_buffer_fill (buf, 0, data + 8, size);
+ caps = gst_caps_new_simple ("video/x-h264",
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD (rtpxqtdepay)->srcpad, caps);
+ gst_caps_unref (caps);
+ break;
+ }
+ len -= chlen;
+ data += chlen;
+ }
+ break;
+ }
+ default:
+ break;
+ }
+ return TRUE;
+
+ /* ERRORS */
+too_short:
+ {
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_rtp_xqt_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure;
+ gint clock_rate = 90000; /* default */
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ gst_structure_get_int (structure, "clock-rate", &clock_rate);
+ depayload->clock_rate = clock_rate;
+
+ return TRUE;
+}
+
+static GstBuffer *
+gst_rtp_xqt_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
+{
+ GstRtpXQTDepay *rtpxqtdepay;
+ GstBuffer *outbuf = NULL;
+ gboolean m;
+ GstRTPBuffer rtp = { NULL };
+
+ rtpxqtdepay = GST_RTP_XQT_DEPAY (depayload);
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+
+ if (GST_BUFFER_IS_DISCONT (buf)) {
+ /* discont, clear adapter and try to find a new packet start */
+ gst_adapter_clear (rtpxqtdepay->adapter);
+ rtpxqtdepay->need_resync = TRUE;
+ GST_DEBUG_OBJECT (rtpxqtdepay, "we need resync");
+ }
+
+ m = gst_rtp_buffer_get_marker (&rtp);
+ GST_LOG_OBJECT (rtpxqtdepay, "marker: %d", m);
+
+ {
+ gint payload_len;
+ guint avail;
+ guint8 *payload;
+ guint8 ver, pck;
+ gboolean s, q, l, d;
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+
+ /* 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | VER |PCK|S|Q|L| RES |D| QuickTime Payload ID |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ if (payload_len <= 4)
+ goto wrong_length;
+
+ ver = (payload[0] & 0xf0) >> 4;
+ if (ver > 1)
+ goto wrong_version;
+
+ pck = (payload[0] & 0x0c) >> 2;
+ if (pck == 0)
+ goto pck_reserved;
+
+ s = (payload[0] & 0x02) != 0; /* contains sync sample */
+ q = (payload[0] & 0x01) != 0; /* has payload description */
+ l = (payload[1] & 0x80) != 0; /* has packet specific information description */
+ d = (payload[2] & 0x80) != 0; /* don't cache info for payload id */
+ /* id used for caching info */
+ rtpxqtdepay->current_id = ((payload[2] & 0x7f) << 8) | payload[3];
+
+ GST_LOG_OBJECT (rtpxqtdepay,
+ "VER: %d, PCK: %d, S: %d, Q: %d, L: %d, D: %d, ID: %d", ver, pck, s, q,
+ l, d, rtpxqtdepay->current_id);
+
+ if (rtpxqtdepay->need_resync) {
+ /* we need to find the boundary of a new packet after a DISCONT */
+ if (pck != 3 || q) {
+ /* non-fragmented packet or payload description present, packet starts
+ * here. */
+ rtpxqtdepay->need_resync = FALSE;
+ } else {
+ /* fragmented packet without description */
+ if (m) {
+ /* marker bit set, next packet is start of new one */
+ rtpxqtdepay->need_resync = FALSE;
+ }
+ goto need_resync;
+ }
+ }
+
+ payload += 4;
+ payload_len -= 4;
+
+ if (q) {
+ gboolean k, f, a, z;
+ guint pdlen, pdpadded;
+ gint padding;
+ /* media_type only used for printing */
+ guint32 G_GNUC_UNUSED media_type;
+ guint32 timescale;
+
+ /* 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |K|F|A|Z| RES | QuickTime Payload Desc Length |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * . QuickTime Payload Desc Data ... .
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ if (payload_len <= 4)
+ goto wrong_length;
+
+ k = (payload[0] & 0x80) != 0; /* keyframe */
+ f = (payload[0] & 0x40) != 0; /* sparse */
+ a = (payload[0] & 0x20) != 0; /* start of payload */
+ z = (payload[0] & 0x10) != 0; /* end of payload */
+ pdlen = (payload[2] << 8) | payload[3];
+
+ if (pdlen < 12)
+ goto wrong_length;
+
+ /* calc padding */
+ pdpadded = pdlen + 3;
+ pdpadded -= pdpadded % 4;
+ if (payload_len < pdpadded)
+ goto wrong_length;
+
+ padding = pdpadded - pdlen;
+ GST_LOG_OBJECT (rtpxqtdepay,
+ "K: %d, F: %d, A: %d, Z: %d, len: %d, padding %d", k, f, a, z, pdlen,
+ padding);
+
+ payload += 4;
+ payload_len -= 4;
+ /* 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | QuickTime Media Type |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Timescale |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * . QuickTime TLVs ... .
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ media_type =
+ (payload[0] << 24) | (payload[1] << 16) | (payload[2] << 8) |
+ payload[3];
+ timescale =
+ (payload[4] << 24) | (payload[5] << 16) | (payload[6] << 8) |
+ payload[7];
+
+ GST_LOG_OBJECT (rtpxqtdepay, "media_type: %c%c%c%c, timescale %u",
+ payload[0], payload[1], payload[2], payload[3], timescale);
+
+ payload += 8;
+ payload_len -= 8;
+ pdlen -= 12;
+
+ /* parse TLV (type-length-value triplets */
+ while (pdlen > 3) {
+ guint16 tlv_len, tlv_type;
+
+ /* 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | QuickTime TLV Length | QuickTime TLV Type |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * . QuickTime TLV Value ... .
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ tlv_len = (payload[0] << 8) | payload[1];
+ tlv_type = (payload[2] << 8) | payload[3];
+ pdlen -= 4;
+ if (tlv_len > pdlen)
+ goto wrong_length;
+
+ GST_LOG_OBJECT (rtpxqtdepay, "TLV '%c%c', len %d", payload[2],
+ payload[3], tlv_len);
+
+ payload += 4;
+ payload_len -= 4;
+
+ switch (tlv_type) {
+ case TLV_sd:
+ /* Session description */
+ if (!gst_rtp_quicktime_parse_sd (rtpxqtdepay, payload, tlv_len))
+ goto unknown_format;
+ rtpxqtdepay->have_sd = TRUE;
+ break;
+ case TLV_qt:
+ case TLV_ti:
+ case TLV_ly:
+ case TLV_vo:
+ case TLV_mx:
+ case TLV_tr:
+ case TLV_tw:
+ case TLV_th:
+ case TLV_la:
+ case TLV_rt:
+ case TLV_gm:
+ case TLV_oc:
+ case TLV_cr:
+ case TLV_du:
+ case TLV_po:
+ default:
+ break;
+ }
+
+ pdlen -= tlv_len;
+ payload += tlv_len;
+ payload_len -= tlv_len;
+ }
+ payload += padding;
+ payload_len -= padding;
+ }
+
+ if (l) {
+ guint ssilen, ssipadded;
+ gint padding;
+
+ /* 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | RES | Sample-Specific Info Length |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * . QuickTime TLVs ...
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ if (payload_len <= 4)
+ goto wrong_length;
+
+ ssilen = (payload[2] << 8) | payload[3];
+ if (ssilen < 4)
+ goto wrong_length;
+
+ /* calc padding */
+ ssipadded = ssilen + 3;
+ ssipadded -= ssipadded % 4;
+ if (payload_len < ssipadded)
+ goto wrong_length;
+
+ padding = ssipadded - ssilen;
+ GST_LOG_OBJECT (rtpxqtdepay, "len: %d, padding %d", ssilen, padding);
+
+ payload += 4;
+ payload_len -= 4;
+ ssilen -= 4;
+
+ /* parse TLV (type-length-value triplets */
+ while (ssilen > 3) {
+ guint16 tlv_len, tlv_type;
+
+ /* 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | QuickTime TLV Length | QuickTime TLV Type |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * . QuickTime TLV Value ... .
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ tlv_len = (payload[0] << 8) | payload[1];
+ tlv_type = (payload[2] << 8) | payload[3];
+ ssilen -= 4;
+ if (tlv_len > ssilen)
+ goto wrong_length;
+
+ GST_LOG_OBJECT (rtpxqtdepay, "TLV '%c%c', len %d", payload[2],
+ payload[3], tlv_len);
+
+ payload += 4;
+ payload_len -= 4;
+
+ switch (tlv_type) {
+ case TLV_sd:
+ case TLV_qt:
+ case TLV_ti:
+ case TLV_ly:
+ case TLV_vo:
+ case TLV_mx:
+ case TLV_tr:
+ case TLV_tw:
+ case TLV_th:
+ case TLV_la:
+ case TLV_rt:
+ case TLV_gm:
+ case TLV_oc:
+ case TLV_cr:
+ case TLV_du:
+ case TLV_po:
+ default:
+ break;
+ }
+
+ ssilen -= tlv_len;
+ payload += tlv_len;
+ payload_len -= tlv_len;
+ }
+ payload += padding;
+ payload_len -= padding;
+ }
+
+ rtpxqtdepay->previous_id = rtpxqtdepay->current_id;
+
+ switch (pck) {
+ case 1:
+ {
+ /* multiple samples per packet. */
+ outbuf = gst_buffer_new_and_alloc (payload_len);
+ gst_buffer_fill (outbuf, 0, payload, payload_len);
+
+ goto done;
+ }
+ case 2:
+ {
+ guint slen;
+
+ /* multiple samples per packet.
+ * 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |S| Reserved | Sample Length |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Sample Timestamp |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * . Sample Data ... .
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |S| Reserved | Sample Length |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Sample Timestamp |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * . Sample Data ... .
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * . ...... .
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ while (payload_len > 8) {
+ s = (payload[0] & 0x80) != 0; /* contains sync sample */
+ slen = (payload[2] << 8) | payload[3];
+ /* timestamp =
+ * (payload[4] << 24) | (payload[5] << 16) | (payload[6] << 8) |
+ * payload[7];
+ */
+
+ payload += 8;
+ payload_len -= 8;
+
+ if (slen > payload_len)
+ slen = payload_len;
+
+ outbuf = gst_buffer_new_and_alloc (slen);
+ gst_buffer_fill (outbuf, 0, payload, slen);
+ if (!s)
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ gst_rtp_base_depayload_push (depayload, outbuf);
+
+ /* aligned on 32 bit boundary */
+ slen = GST_ROUND_UP_4 (slen);
+
+ payload += slen;
+ payload_len -= slen;
+ }
+ break;
+ }
+ case 3:
+ {
+ /* one sample per packet, use adapter to combine based on marker bit. */
+ outbuf = gst_buffer_new_and_alloc (payload_len);
+ gst_buffer_fill (outbuf, 0, payload, payload_len);
+
+ gst_adapter_push (rtpxqtdepay->adapter, outbuf);
+ outbuf = NULL;
+
+ if (!m)
+ goto done;
+
+ avail = gst_adapter_available (rtpxqtdepay->adapter);
+ outbuf = gst_adapter_take_buffer (rtpxqtdepay->adapter, avail);
+
+ GST_DEBUG_OBJECT (rtpxqtdepay,
+ "gst_rtp_xqt_depay_chain: pushing buffer of size %u", avail);
+
+ goto done;
+ }
+ }
+ }
+
+done:
+ gst_rtp_buffer_unmap (&rtp);
+ return outbuf;
+
+need_resync:
+ {
+ GST_DEBUG_OBJECT (rtpxqtdepay, "waiting for marker");
+ goto done;
+ }
+wrong_version:
+ {
+ GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE,
+ ("Unknown payload version."), (NULL));
+ goto done;
+ }
+pck_reserved:
+ {
+ GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE,
+ ("PCK reserved 0."), (NULL));
+ goto done;
+ }
+wrong_length:
+ {
+ GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE,
+ ("Wrong payload length."), (NULL));
+ goto done;
+ }
+unknown_format:
+ {
+ GST_ELEMENT_WARNING (rtpxqtdepay, STREAM, DECODE,
+ ("Unknown payload format."), (NULL));
+ goto done;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_xqt_depay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstRtpXQTDepay *rtpxqtdepay;
+ GstStateChangeReturn ret;
+
+ rtpxqtdepay = GST_RTP_XQT_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_adapter_clear (rtpxqtdepay->adapter);
+ rtpxqtdepay->previous_id = -1;
+ rtpxqtdepay->current_id = -1;
+ rtpxqtdepay->need_resync = TRUE;
+ rtpxqtdepay->have_sd = FALSE;
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_adapter_clear (rtpxqtdepay->adapter);
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/isomp4/gstrtpxqtdepay.h b/gst/isomp4/gstrtpxqtdepay.h
new file mode 100644
index 0000000000..ca4e41a47c
--- /dev/null
+++ b/gst/isomp4/gstrtpxqtdepay.h
@@ -0,0 +1,64 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_XQT_DEPAY_H__
+#define __GST_RTP_XQT_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_XQT_DEPAY \
+ (gst_rtp_xqt_depay_get_type())
+#define GST_RTP_XQT_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_XQT_DEPAY,GstRtpXQTDepay))
+#define GST_RTP_XQT_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_XQT_DEPAY,GstRtpXQTDepayClass))
+#define GST_IS_RTP_XQT_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_XQT_DEPAY))
+#define GST_IS_RTP_XQT_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_XQT_DEPAY))
+
+typedef struct _GstRtpXQTDepay GstRtpXQTDepay;
+typedef struct _GstRtpXQTDepayClass GstRtpXQTDepayClass;
+
+struct _GstRtpXQTDepay
+{
+ GstRTPBaseDepayload depayload;
+
+ GstAdapter *adapter;
+
+ gboolean need_resync;
+ guint16 previous_id;
+ guint16 current_id;
+ gboolean have_sd;
+};
+
+struct _GstRtpXQTDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_xqt_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_XQT_DEPAY_H__ */
diff --git a/gst/isomp4/isomp4-plugin.c b/gst/isomp4/isomp4-plugin.c
new file mode 100644
index 0000000000..3694d157b2
--- /dev/null
+++ b/gst/isomp4/isomp4-plugin.c
@@ -0,0 +1,47 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David A. Schleef <ds@schleef.org>
+ * Copyright (C) <2006> Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include "gst/gst-i18n-plugin.h"
+
+#include "gstisomp4elements.h"
+
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (qtdemux, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpxqtdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (qtmux, plugin);
+ ret |= GST_ELEMENT_REGISTER (qtmoovrecover, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ isomp4,
+ "ISO base media file format support (mp4, 3gpp, qt, mj2)",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
diff --git a/gst/isomp4/meson.build b/gst/isomp4/meson.build
new file mode 100644
index 0000000000..b510c0510e
--- /dev/null
+++ b/gst/isomp4/meson.build
@@ -0,0 +1,34 @@
+mp4_sources = [
+ 'isomp4-plugin.c',
+ 'gstisomp4element.c',
+ 'gstrtpxqtdepay.c',
+ 'qtdemux.c',
+ 'qtdemux_types.c',
+ 'qtdemux_dump.c',
+ 'qtdemux_lang.c',
+ 'qtdemux_tags.c',
+ 'qtdemux_tree.c',
+ 'gstisoff.c',
+ 'gstqtmux.c',
+ 'gstqtmoovrecover.c',
+ 'atoms.c',
+ 'atomsrecovery.c',
+ 'descriptors.c',
+ 'properties.c',
+ 'gstqtmuxmap.c'
+]
+
+gstisomp4 = library('gstisomp4',
+ mp4_sources,
+ c_args : gst_plugins_good_args,
+ link_args : noseh_link_args,
+ include_directories : [configinc, libsinc],
+ dependencies : [gst_dep, gstriff_dep, gstaudio_dep, gstvideo_dep,
+ gstrtp_dep, gsttag_dep, gstpbutils_dep, zlib_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstisomp4, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstisomp4]
+
+install_data(sources: 'GstQTMux.prs', install_dir: presetdir)
diff --git a/gst/isomp4/properties.c b/gst/isomp4/properties.c
new file mode 100644
index 0000000000..cb43e295e5
--- /dev/null
+++ b/gst/isomp4/properties.c
@@ -0,0 +1,210 @@
+/* Quicktime muxer plugin for GStreamer
+ * Copyright (C) 2008 Thiago Sousa Santos <thiagoss@embedded.ufcg.edu.br>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#include "properties.h"
+
+/* if needed, re-allocate buffer to ensure size bytes can be written into it
+ * at offset */
+void
+prop_copy_ensure_buffer (guint8 ** buffer, guint64 * bsize, guint64 * offset,
+ guint64 size)
+{
+ if (buffer && *bsize - *offset < size) {
+ *bsize += size + 10 * 1024;
+ *buffer = g_realloc (*buffer, *bsize);
+ }
+}
+
+static guint64
+copy_func (void *prop, guint size, guint8 ** buffer, guint64 * bsize,
+ guint64 * offset)
+{
+ if (buffer) {
+ prop_copy_ensure_buffer (buffer, bsize, offset, size);
+ memcpy (*buffer + *offset, prop, size);
+ }
+ *offset += size;
+ return size;
+}
+
+#define INT_ARRAY_COPY_FUNC_FAST(name, datatype) \
+guint64 prop_copy_ ## name ## _array (datatype *prop, guint size, \
+ guint8 ** buffer, guint64 * bsize, guint64 * offset) { \
+ return copy_func (prop, sizeof (datatype) * size, buffer, bsize, offset);\
+}
+
+#define INT_ARRAY_COPY_FUNC(name, datatype) \
+guint64 prop_copy_ ## name ## _array (datatype *prop, guint size, \
+ guint8 ** buffer, guint64 * bsize, guint64 * offset) { \
+ guint i; \
+ \
+ for (i = 0; i < size; i++) { \
+ prop_copy_ ## name (prop[i], buffer, bsize, offset); \
+ } \
+ return sizeof (datatype) * size; \
+}
+
+/* INTEGERS */
+guint64
+prop_copy_uint8 (guint8 prop, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ return copy_func (&prop, sizeof (guint8), buffer, size, offset);
+}
+
+guint64
+prop_copy_uint16 (guint16 prop, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ prop = GUINT16_TO_BE (prop);
+ return copy_func (&prop, sizeof (guint16), buffer, size, offset);
+}
+
+guint64
+prop_copy_uint32 (guint32 prop, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ prop = GUINT32_TO_BE (prop);
+ return copy_func (&prop, sizeof (guint32), buffer, size, offset);
+}
+
+guint64
+prop_copy_uint64 (guint64 prop, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ prop = GUINT64_TO_BE (prop);
+ return copy_func (&prop, sizeof (guint64), buffer, size, offset);
+}
+
+guint64
+prop_copy_int32 (gint32 prop, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ prop = GINT32_TO_BE (prop);
+ return copy_func (&prop, sizeof (guint32), buffer, size, offset);
+}
+
+/* uint8 can use direct copy in any case, and may be used for large quantity */
+INT_ARRAY_COPY_FUNC_FAST (uint8, guint8);
+/* not used in large quantity anyway */
+INT_ARRAY_COPY_FUNC (uint16, guint16);
+INT_ARRAY_COPY_FUNC (uint32, guint32);
+INT_ARRAY_COPY_FUNC (uint64, guint64);
+
+/* FOURCC */
+guint64
+prop_copy_fourcc (guint32 prop, guint8 ** buffer, guint64 * size,
+ guint64 * offset)
+{
+ prop = GINT32_TO_LE (prop);
+ return copy_func (&prop, sizeof (guint32), buffer, size, offset);
+}
+
+INT_ARRAY_COPY_FUNC (fourcc, guint32);
+
+/**
+ * prop_copy_fixed_size_string:
+ * @string: the string to be copied
+ * @str_size: size of the string
+ * @buffer: the array to copy the string to
+ * @offset: the position in the buffer array.
+ * This value is updated to the point right after the copied string.
+ *
+ * Copies a string of bytes without placing its size at the beginning.
+ *
+ * Returns: the number of bytes copied
+ */
+guint64
+prop_copy_fixed_size_string (guint8 * string, guint str_size, guint8 ** buffer,
+ guint64 * size, guint64 * offset)
+{
+ return copy_func (string, str_size * sizeof (guint8), buffer, size, offset);
+}
+
+/**
+ * prop_copy_size_string:
+ *
+ * @string: the string to be copied
+ * @str_size: size of the string
+ * @buffer: the array to copy the string to
+ * @offset: the position in the buffer array.
+ * This value is updated to the point right after the copied string.
+ *
+ * Copies a string and its size to an array. Example:
+ * string = 'abc\0'
+ * result in the array: [3][a][b][c] (each [x] represents a position)
+ *
+ * Returns: the number of bytes copied
+ */
+guint64
+prop_copy_size_string (guint8 * string, guint str_size, guint8 ** buffer,
+ guint64 * size, guint64 * offset)
+{
+ guint64 original_offset = *offset;
+
+ prop_copy_uint8 (str_size, buffer, size, offset);
+ prop_copy_fixed_size_string (string, str_size, buffer, size, offset);
+ return *offset - original_offset;
+}
+
+/**
+ * prop_copy_null_terminated_string:
+ * @string: the string to be copied
+ * @buffer: the array to copy the string to
+ * @offset: the position in the buffer array.
+ * This value is updated to the point right after the copied string.
+ *
+ * Copies a string including its null terminating char to an array.
+ *
+ * Returns: the number of bytes copied
+ */
+guint64
+prop_copy_null_terminated_string (gchar * string, guint8 ** buffer,
+ guint64 * size, guint64 * offset)
+{
+ guint64 original_offset = *offset;
+ guint len = strlen (string);
+
+ prop_copy_fixed_size_string ((guint8 *) string, len, buffer, size, offset);
+ prop_copy_uint8 ('\0', buffer, size, offset);
+ return *offset - original_offset;
+}
diff --git a/gst/isomp4/properties.h b/gst/isomp4/properties.h
new file mode 100644
index 0000000000..c36fe48769
--- /dev/null
+++ b/gst/isomp4/properties.h
@@ -0,0 +1,87 @@
+/* Quicktime muxer plugin for GStreamer
+ * Copyright (C) 2008 Thiago Sousa Santos <thiagoss@embedded.ufcg.edu.br>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#ifndef __PROPERTIES_H__
+#define __PROPERTIES_H__
+
+#include <glib.h>
+#include <string.h>
+
+/*
+ * Functions for copying atoms properties.
+ *
+ * All of them receive, as the input, the property to be copied, the destination
+ * buffer, and a pointer to an offset in the destination buffer to copy to the right place.
+ * This offset will be updated to the new value (offset + copied_size)
+ * The functions return the size of the property that has been copied or 0
+ * if it couldn't copy.
+ */
+
+void prop_copy_ensure_buffer (guint8 ** buffer, guint64 * bsize, guint64 * offset, guint64 size);
+
+guint64 prop_copy_uint8 (guint8 prop, guint8 **buffer, guint64 *size, guint64 *offset);
+guint64 prop_copy_uint16 (guint16 prop, guint8 **buffer, guint64 *size, guint64 *offset);
+guint64 prop_copy_uint32 (guint32 prop, guint8 **buffer, guint64 *size, guint64 *offset);
+guint64 prop_copy_uint64 (guint64 prop, guint8 **buffer, guint64 *size, guint64 *offset);
+
+guint64 prop_copy_int32 (gint32 prop, guint8 **buffer, guint64 *size, guint64 *offset);
+
+guint64 prop_copy_uint8_array (guint8 *prop, guint size,
+ guint8 **buffer, guint64 *bsize, guint64 *offset);
+guint64 prop_copy_uint16_array (guint16 *prop, guint size,
+ guint8 **buffer, guint64 *bsize, guint64 *offset);
+guint64 prop_copy_uint32_array (guint32 *prop, guint size,
+ guint8 **buffer, guint64 *bsize, guint64 *offset);
+guint64 prop_copy_uint64_array (guint64 *prop, guint size,
+ guint8 **buffer, guint64 *bsize, guint64 *offset);
+
+guint64 prop_copy_fourcc (guint32 prop, guint8 **buffer, guint64 *size, guint64 *offset);
+guint64 prop_copy_fourcc_array (guint32 *prop, guint size,
+ guint8 **buffer, guint64 *bsize, guint64 *offset);
+guint64 prop_copy_fixed_size_string (guint8 *string, guint str_size,
+ guint8 **buffer, guint64 *size, guint64 *offset);
+guint64 prop_copy_size_string (guint8 *string, guint str_size,
+ guint8 **buffer, guint64 *size, guint64 *offset);
+guint64 prop_copy_null_terminated_string (gchar *string,
+ guint8 **buffer, guint64 *size, guint64 *offset);
+
+#endif /* __PROPERTIES_H__ */
diff --git a/gst/isomp4/qtatomparser.h b/gst/isomp4/qtatomparser.h
new file mode 100644
index 0000000000..89bab50d8e
--- /dev/null
+++ b/gst/isomp4/qtatomparser.h
@@ -0,0 +1,139 @@
+/* GStreamer QuickTime atom parser
+ * Copyright (C) 2009 Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) <2009> STEricsson <benjamin.gaignard@stericsson.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef QT_ATOM_PARSER_H
+#define QT_ATOM_PARSER_H
+
+#include <gst/base/gstbytereader.h>
+
+/* our inlined version of GstByteReader */
+
+static inline gboolean
+qt_atom_parser_has_remaining (GstByteReader * parser, guint64 bytes_needed)
+{
+ return G_LIKELY (parser->size >= bytes_needed) &&
+ G_LIKELY ((parser->size - bytes_needed) >= parser->byte);
+}
+
+static inline gboolean
+qt_atom_parser_has_chunks (GstByteReader * parser, guint32 n_chunks,
+ guint32 chunk_size)
+{
+ /* assumption: n_chunks and chunk_size are 32-bit, we cast to 64-bit here
+ * to avoid overflows, to handle e.g. (guint32)-1 * size correctly */
+ return qt_atom_parser_has_remaining (parser, (guint64) n_chunks * chunk_size);
+}
+
+static inline gboolean
+qt_atom_parser_peek_sub (GstByteReader * parser, guint offset, guint size,
+ GstByteReader * sub)
+{
+ *sub = *parser;
+
+ if (G_UNLIKELY (!gst_byte_reader_skip (sub, offset)))
+ return FALSE;
+
+ return (gst_byte_reader_get_remaining (sub) >= size);
+}
+
+static inline gboolean
+qt_atom_parser_skipn_and_get_uint32 (GstByteReader * parser,
+ guint bytes_to_skip, guint32 * val)
+{
+ if (G_UNLIKELY (gst_byte_reader_get_remaining (parser) < (bytes_to_skip + 4)))
+ return FALSE;
+
+ gst_byte_reader_skip_unchecked (parser, bytes_to_skip);
+ *val = gst_byte_reader_get_uint32_be_unchecked (parser);
+ return TRUE;
+}
+
+/* off_size must be either 4 or 8 */
+static inline gboolean
+qt_atom_parser_get_offset (GstByteReader * parser, guint off_size,
+ guint64 * val)
+{
+ if (G_UNLIKELY (gst_byte_reader_get_remaining (parser) < off_size))
+ return FALSE;
+
+ if (off_size == sizeof (guint64)) {
+ *val = gst_byte_reader_get_uint64_be_unchecked (parser);
+ } else {
+ *val = gst_byte_reader_get_uint32_be_unchecked (parser);
+ }
+ return TRUE;
+}
+
+/* off_size must be either 4 or 8 */
+static inline guint64
+qt_atom_parser_get_offset_unchecked (GstByteReader * parser, guint off_size)
+{
+ if (off_size == sizeof (guint64)) {
+ return gst_byte_reader_get_uint64_be_unchecked (parser);
+ } else {
+ return gst_byte_reader_get_uint32_be_unchecked (parser);
+ }
+}
+
+/* size must be from 1 to 4 */
+static inline guint32
+qt_atom_parser_get_uint_with_size_unchecked (GstByteReader * parser,
+ guint size)
+{
+ switch (size) {
+ case 1:
+ return gst_byte_reader_get_uint8_unchecked (parser);
+ case 2:
+ return gst_byte_reader_get_uint16_be_unchecked (parser);
+ case 3:
+ return gst_byte_reader_get_uint24_be_unchecked (parser);
+ case 4:
+ return gst_byte_reader_get_uint32_be_unchecked (parser);
+ default:
+ g_assert_not_reached ();
+ gst_byte_reader_skip_unchecked (parser, size);
+ break;
+ }
+ return 0;
+}
+
+static inline gboolean
+qt_atom_parser_get_fourcc (GstByteReader * parser, guint32 * fourcc)
+{
+ guint32 f_be;
+
+ if (G_UNLIKELY (gst_byte_reader_get_remaining (parser) < 4))
+ return FALSE;
+
+ f_be = gst_byte_reader_get_uint32_be_unchecked (parser);
+ *fourcc = GUINT32_SWAP_LE_BE (f_be);
+ return TRUE;
+}
+
+static inline guint32
+qt_atom_parser_get_fourcc_unchecked (GstByteReader * parser)
+{
+ guint32 fourcc;
+
+ fourcc = gst_byte_reader_get_uint32_be_unchecked (parser);
+ return GUINT32_SWAP_LE_BE (fourcc);
+}
+
+#endif /* QT_ATOM_PARSER_H */
diff --git a/gst/isomp4/qtdemux.c b/gst/isomp4/qtdemux.c
new file mode 100644
index 0000000000..a1a2b918a7
--- /dev/null
+++ b/gst/isomp4/qtdemux.c
@@ -0,0 +1,15031 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David A. Schleef <ds@schleef.org>
+ * Copyright (C) <2006> Wim Taymans <wim@fluendo.com>
+ * Copyright (C) <2007> Julien Moutte <julien@fluendo.com>
+ * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) <2009> STEricsson <benjamin.gaignard@stericsson.com>
+ * Copyright (C) <2013> Sreerenj Balachandran <sreerenj.balachandran@intel.com>
+ * Copyright (C) <2013> Intel Corporation
+ * Copyright (C) <2014> Centricular Ltd
+ * Copyright (C) <2015> YouView TV Ltd.
+ * Copyright (C) <2016> British Broadcasting Corporation
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-qtdemux
+ * @title: qtdemux
+ *
+ * Demuxes a .mov file into raw or compressed audio and/or video streams.
+ *
+ * This element supports both push and pull-based scheduling, depending on the
+ * capabilities of the upstream elements.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=test.mov ! qtdemux name=demux demux.audio_0 ! queue ! decodebin ! audioconvert ! audioresample ! autoaudiosink demux.video_0 ! queue ! decodebin ! videoconvert ! videoscale ! autovideosink
+ * ]| Play (parse and decode) a .mov file and try to output it to
+ * an automatically detected soundcard and videosink. If the MOV file contains
+ * compressed audio or video data, this will only work if you have the
+ * right decoder elements/plugins installed.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gst/gst-i18n-plugin.h"
+
+#include <glib/gprintf.h>
+#include <gst/base/base.h>
+#include <gst/tag/tag.h>
+#include <gst/audio/audio.h>
+#include <gst/riff/riff.h>
+#include <gst/pbutils/pbutils.h>
+
+#include "gstisomp4elements.h"
+#include "qtatomparser.h"
+#include "qtdemux_types.h"
+#include "qtdemux_dump.h"
+#include "fourcc.h"
+#include "descriptors.h"
+#include "qtdemux_lang.h"
+#include "qtdemux.h"
+#include "qtpalette.h"
+#include "qtdemux_tags.h"
+#include "qtdemux_tree.h"
+
+#include <stdlib.h>
+#include <string.h>
+
+#include <math.h>
+#include <gst/math-compat.h>
+
+#ifdef HAVE_ZLIB
+# include <zlib.h>
+#endif
+
+/* max. size considered 'sane' for non-mdat atoms */
+#define QTDEMUX_MAX_ATOM_SIZE (32*1024*1024)
+
+/* if the sample index is larger than this, something is likely wrong */
+#define QTDEMUX_MAX_SAMPLE_INDEX_SIZE (200*1024*1024)
+
+/* For converting qt creation times to unix epoch times */
+#define QTDEMUX_SECONDS_PER_DAY (60 * 60 * 24)
+#define QTDEMUX_LEAP_YEARS_FROM_1904_TO_1970 17
+#define QTDEMUX_SECONDS_FROM_1904_TO_1970 (((1970 - 1904) * (guint64) 365 + \
+ QTDEMUX_LEAP_YEARS_FROM_1904_TO_1970) * QTDEMUX_SECONDS_PER_DAY)
+
+#define QTDEMUX_TREE_NODE_FOURCC(n) (QT_FOURCC(((guint8 *) (n)->data) + 4))
+
+#define STREAM_IS_EOS(s) ((s)->time_position == GST_CLOCK_TIME_NONE)
+
+#define ABSDIFF(x, y) ( (x) > (y) ? ((x) - (y)) : ((y) - (x)) )
+
+#define QTDEMUX_STREAM(s) ((QtDemuxStream *)(s))
+#define QTDEMUX_N_STREAMS(demux) ((demux)->active_streams->len)
+#define QTDEMUX_NTH_STREAM(demux,idx) \
+ QTDEMUX_STREAM(g_ptr_array_index((demux)->active_streams,idx))
+#define QTDEMUX_NTH_OLD_STREAM(demux,idx) \
+ QTDEMUX_STREAM(g_ptr_array_index((demux)->old_streams,idx))
+
+#define CUR_STREAM(s) (&((s)->stsd_entries[(s)->cur_stsd_entry_index]))
+
+GST_DEBUG_CATEGORY (qtdemux_debug);
+#define GST_CAT_DEFAULT qtdemux_debug
+
+typedef struct _QtDemuxCencSampleSetInfo QtDemuxCencSampleSetInfo;
+typedef struct _QtDemuxAavdEncryptionInfo QtDemuxAavdEncryptionInfo;
+
+/* Macros for converting to/from timescale */
+#define QTSTREAMTIME_TO_GSTTIME(stream, value) (gst_util_uint64_scale((value), GST_SECOND, (stream)->timescale))
+#define GSTTIME_TO_QTSTREAMTIME(stream, value) (gst_util_uint64_scale((value), (stream)->timescale, GST_SECOND))
+
+#define QTTIME_TO_GSTTIME(qtdemux, value) (gst_util_uint64_scale((value), GST_SECOND, (qtdemux)->timescale))
+#define GSTTIME_TO_QTTIME(qtdemux, value) (gst_util_uint64_scale((value), (qtdemux)->timescale, GST_SECOND))
+
+/* timestamp is the DTS */
+#define QTSAMPLE_DTS(stream,sample) (QTSTREAMTIME_TO_GSTTIME((stream), (sample)->timestamp))
+/* timestamp + offset + cslg_shift is the outgoing PTS */
+#define QTSAMPLE_PTS(stream,sample) (QTSTREAMTIME_TO_GSTTIME((stream), (sample)->timestamp + (stream)->cslg_shift + (sample)->pts_offset))
+/* timestamp + offset is the PTS used for internal seek calculations */
+#define QTSAMPLE_PTS_NO_CSLG(stream,sample) (QTSTREAMTIME_TO_GSTTIME((stream), (sample)->timestamp + (sample)->pts_offset))
+/* timestamp + duration - dts is the duration */
+#define QTSAMPLE_DUR_DTS(stream, sample, dts) (QTSTREAMTIME_TO_GSTTIME ((stream), (sample)->timestamp + (sample)->duration) - (dts))
+
+#define QTSAMPLE_KEYFRAME(stream,sample) ((stream)->all_keyframe || (sample)->keyframe)
+
+#define QTDEMUX_EXPOSE_GET_LOCK(demux) (&((demux)->expose_lock))
+#define QTDEMUX_EXPOSE_LOCK(demux) G_STMT_START { \
+ GST_TRACE("Locking from thread %p", g_thread_self()); \
+ g_mutex_lock (QTDEMUX_EXPOSE_GET_LOCK (demux)); \
+ GST_TRACE("Locked from thread %p", g_thread_self()); \
+ } G_STMT_END
+
+#define QTDEMUX_EXPOSE_UNLOCK(demux) G_STMT_START { \
+ GST_TRACE("Unlocking from thread %p", g_thread_self()); \
+ g_mutex_unlock (QTDEMUX_EXPOSE_GET_LOCK (demux)); \
+ } G_STMT_END
+
+/*
+ * Quicktime has tracks and segments. A track is a continuous piece of
+ * multimedia content. The track is not always played from start to finish but
+ * instead, pieces of the track are 'cut out' and played in sequence. This is
+ * what the segments do.
+ *
+ * Inside the track we have keyframes (K) and delta frames. The track has its
+ * own timing, which starts from 0 and extends to end. The position in the track
+ * is called the media_time.
+ *
+ * The segments now describe the pieces that should be played from this track
+ * and are basically tuples of media_time/duration/rate entries. We can have
+ * multiple segments and they are all played after one another. An example:
+ *
+ * segment 1: media_time: 1 second, duration: 1 second, rate 1
+ * segment 2: media_time: 3 second, duration: 2 second, rate 2
+ *
+ * To correctly play back this track, one must play: 1 second of media starting
+ * from media_time 1 followed by 2 seconds of media starting from media_time 3
+ * at a rate of 2.
+ *
+ * Each of the segments will be played at a specific time, the first segment at
+ * time 0, the second one after the duration of the first one, etc.. Note that
+ * the time in resulting playback is not identical to the media_time of the
+ * track anymore.
+ *
+ * Visually, assuming the track has 4 second of media_time:
+ *
+ * (a) (b) (c) (d)
+ * .-----------------------------------------------------------.
+ * track: | K.....K.........K........K.......K.......K...........K... |
+ * '-----------------------------------------------------------'
+ * 0 1 2 3 4
+ * .------------^ ^ .----------^ ^
+ * / .-------------' / .------------------'
+ * / / .-----' /
+ * .--------------. .--------------.
+ * | segment 1 | | segment 2 |
+ * '--------------' '--------------'
+ *
+ * The challenge here is to cut out the right pieces of the track for each of
+ * the playback segments. This fortunately can easily be done with the SEGMENT
+ * events of GStreamer.
+ *
+ * For playback of segment 1, we need to provide the decoder with the keyframe
+ * (a), in the above figure, but we must instruct it only to output the decoded
+ * data between second 1 and 2. We do this with a SEGMENT event for 1 to 2, time
+ * position set to the time of the segment: 0.
+ *
+ * We then proceed to push data from keyframe (a) to frame (b). The decoder
+ * decodes but clips all before media_time 1.
+ *
+ * After finishing a segment, we push out a new SEGMENT event with the clipping
+ * boundaries of the new data.
+ *
+ * This is a good usecase for the GStreamer accumulated SEGMENT events.
+ */
+
+struct _QtDemuxSegment
+{
+ /* global time and duration, all gst time */
+ GstClockTime time;
+ GstClockTime stop_time;
+ GstClockTime duration;
+ /* media time of trak, all gst time */
+ GstClockTime media_start;
+ GstClockTime media_stop;
+ gdouble rate;
+ /* Media start time in trak timescale units */
+ guint32 trak_media_start;
+};
+
+#define QTSEGMENT_IS_EMPTY(s) ((s)->media_start == GST_CLOCK_TIME_NONE)
+
+/* Used with fragmented MP4 files (mfra atom) */
+struct _QtDemuxRandomAccessEntry
+{
+ GstClockTime ts;
+ guint64 moof_offset;
+};
+
+
+/* Contains properties and cryptographic info for a set of samples from a
+ * track protected using Common Encryption (cenc) */
+struct _QtDemuxCencSampleSetInfo
+{
+ GstStructure *default_properties;
+
+ /* @crypto_info holds one GstStructure per sample */
+ GPtrArray *crypto_info;
+};
+
+struct _QtDemuxAavdEncryptionInfo
+{
+ GstStructure *default_properties;
+};
+
+static const gchar *
+qt_demux_state_string (enum QtDemuxState state)
+{
+ switch (state) {
+ case QTDEMUX_STATE_INITIAL:
+ return "<INITIAL>";
+ case QTDEMUX_STATE_HEADER:
+ return "<HEADER>";
+ case QTDEMUX_STATE_MOVIE:
+ return "<MOVIE>";
+ case QTDEMUX_STATE_BUFFER_MDAT:
+ return "<BUFFER_MDAT>";
+ default:
+ return "<UNKNOWN>";
+ }
+}
+
+static GstFlowReturn qtdemux_add_fragmented_samples (GstQTDemux * qtdemux);
+
+static void gst_qtdemux_check_send_pending_segment (GstQTDemux * demux);
+
+static GstStaticPadTemplate gst_qtdemux_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/quicktime; video/mj2; audio/x-m4a; "
+ "application/x-3gp")
+ );
+
+static GstStaticPadTemplate gst_qtdemux_videosrc_template =
+GST_STATIC_PAD_TEMPLATE ("video_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+static GstStaticPadTemplate gst_qtdemux_audiosrc_template =
+GST_STATIC_PAD_TEMPLATE ("audio_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+static GstStaticPadTemplate gst_qtdemux_subsrc_template =
+GST_STATIC_PAD_TEMPLATE ("subtitle_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+#define gst_qtdemux_parent_class parent_class
+G_DEFINE_TYPE (GstQTDemux, gst_qtdemux, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (qtdemux, "qtdemux",
+ GST_RANK_PRIMARY, GST_TYPE_QTDEMUX, isomp4_element_init (plugin));
+
+static void gst_qtdemux_dispose (GObject * object);
+static void gst_qtdemux_finalize (GObject * object);
+
+static guint32
+gst_qtdemux_find_index_linear (GstQTDemux * qtdemux, QtDemuxStream * str,
+ GstClockTime media_time);
+static guint32
+gst_qtdemux_find_index_for_given_media_offset_linear (GstQTDemux * qtdemux,
+ QtDemuxStream * str, gint64 media_offset);
+
+#if 0
+static void gst_qtdemux_set_index (GstElement * element, GstIndex * index);
+static GstIndex *gst_qtdemux_get_index (GstElement * element);
+#endif
+static GstStateChangeReturn gst_qtdemux_change_state (GstElement * element,
+ GstStateChange transition);
+static void gst_qtdemux_set_context (GstElement * element,
+ GstContext * context);
+static gboolean qtdemux_sink_activate (GstPad * sinkpad, GstObject * parent);
+static gboolean qtdemux_sink_activate_mode (GstPad * sinkpad,
+ GstObject * parent, GstPadMode mode, gboolean active);
+
+static void gst_qtdemux_loop (GstPad * pad);
+static GstFlowReturn gst_qtdemux_chain (GstPad * sinkpad, GstObject * parent,
+ GstBuffer * inbuf);
+static gboolean gst_qtdemux_handle_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static gboolean gst_qtdemux_handle_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+static gboolean gst_qtdemux_setcaps (GstQTDemux * qtdemux, GstCaps * caps);
+static gboolean gst_qtdemux_configure_stream (GstQTDemux * qtdemux,
+ QtDemuxStream * stream);
+static void gst_qtdemux_stream_check_and_change_stsd_index (GstQTDemux * demux,
+ QtDemuxStream * stream);
+static GstFlowReturn gst_qtdemux_process_adapter (GstQTDemux * demux,
+ gboolean force);
+
+static void gst_qtdemux_check_seekability (GstQTDemux * demux);
+
+static gboolean qtdemux_parse_moov (GstQTDemux * qtdemux,
+ const guint8 * buffer, guint length);
+static gboolean qtdemux_parse_node (GstQTDemux * qtdemux, GNode * node,
+ const guint8 * buffer, guint length);
+static gboolean qtdemux_parse_tree (GstQTDemux * qtdemux);
+
+static void gst_qtdemux_handle_esds (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, QtDemuxStreamStsdEntry * entry, GNode * esds,
+ GstTagList * list);
+static GstCaps *qtdemux_video_caps (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, QtDemuxStreamStsdEntry * entry, guint32 fourcc,
+ const guint8 * stsd_entry_data, gchar ** codec_name);
+static GstCaps *qtdemux_audio_caps (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, QtDemuxStreamStsdEntry * entry, guint32 fourcc,
+ const guint8 * data, int len, gchar ** codec_name);
+static GstCaps *qtdemux_sub_caps (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ QtDemuxStreamStsdEntry * entry, guint32 fourcc, const guint8 * data,
+ gchar ** codec_name);
+static GstCaps *qtdemux_generic_caps (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, QtDemuxStreamStsdEntry * entry, guint32 fourcc,
+ const guint8 * stsd_entry_data, gchar ** codec_name);
+
+static gboolean qtdemux_parse_samples (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, guint32 n);
+static GstFlowReturn qtdemux_expose_streams (GstQTDemux * qtdemux);
+static QtDemuxStream *gst_qtdemux_stream_ref (QtDemuxStream * stream);
+static void gst_qtdemux_stream_unref (QtDemuxStream * stream);
+static void gst_qtdemux_stream_clear (QtDemuxStream * stream);
+static GstFlowReturn qtdemux_prepare_streams (GstQTDemux * qtdemux);
+static void qtdemux_do_allocation (QtDemuxStream * stream,
+ GstQTDemux * qtdemux);
+static gboolean gst_qtdemux_activate_segment (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, guint32 seg_idx, GstClockTime offset);
+static gboolean gst_qtdemux_stream_update_segment (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, gint seg_idx, GstClockTime offset,
+ GstClockTime * _start, GstClockTime * _stop);
+static void gst_qtdemux_send_gap_for_segment (GstQTDemux * demux,
+ QtDemuxStream * stream, gint segment_index, GstClockTime pos);
+
+static gboolean qtdemux_pull_mfro_mfra (GstQTDemux * qtdemux);
+static void check_update_duration (GstQTDemux * qtdemux, GstClockTime duration);
+
+static gchar *qtdemux_uuid_bytes_to_string (gconstpointer uuid_bytes);
+
+static GstStructure *qtdemux_get_cenc_sample_properties (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, guint sample_index);
+static void gst_qtdemux_append_protection_system_id (GstQTDemux * qtdemux,
+ const gchar * id);
+static void qtdemux_gst_structure_free (GstStructure * gststructure);
+static void gst_qtdemux_reset (GstQTDemux * qtdemux, gboolean hard);
+
+static void
+gst_qtdemux_class_init (GstQTDemuxClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->dispose = gst_qtdemux_dispose;
+ gobject_class->finalize = gst_qtdemux_finalize;
+
+ gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_qtdemux_change_state);
+#if 0
+ gstelement_class->set_index = GST_DEBUG_FUNCPTR (gst_qtdemux_set_index);
+ gstelement_class->get_index = GST_DEBUG_FUNCPTR (gst_qtdemux_get_index);
+#endif
+ gstelement_class->set_context = GST_DEBUG_FUNCPTR (gst_qtdemux_set_context);
+
+ gst_tag_register_musicbrainz_tags ();
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_qtdemux_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_qtdemux_videosrc_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_qtdemux_audiosrc_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_qtdemux_subsrc_template);
+ gst_element_class_set_static_metadata (gstelement_class, "QuickTime demuxer",
+ "Codec/Demuxer",
+ "Demultiplex a QuickTime file into audio and video streams",
+ "David Schleef <ds@schleef.org>, Wim Taymans <wim@fluendo.com>");
+
+ GST_DEBUG_CATEGORY_INIT (qtdemux_debug, "qtdemux", 0, "qtdemux plugin");
+ gst_riff_init ();
+}
+
+static void
+gst_qtdemux_init (GstQTDemux * qtdemux)
+{
+ qtdemux->sinkpad =
+ gst_pad_new_from_static_template (&gst_qtdemux_sink_template, "sink");
+ gst_pad_set_activate_function (qtdemux->sinkpad, qtdemux_sink_activate);
+ gst_pad_set_activatemode_function (qtdemux->sinkpad,
+ qtdemux_sink_activate_mode);
+ gst_pad_set_chain_function (qtdemux->sinkpad, gst_qtdemux_chain);
+ gst_pad_set_event_function (qtdemux->sinkpad, gst_qtdemux_handle_sink_event);
+ gst_pad_set_query_function (qtdemux->sinkpad, gst_qtdemux_handle_sink_query);
+ gst_element_add_pad (GST_ELEMENT_CAST (qtdemux), qtdemux->sinkpad);
+
+ qtdemux->adapter = gst_adapter_new ();
+ g_queue_init (&qtdemux->protection_event_queue);
+ qtdemux->flowcombiner = gst_flow_combiner_new ();
+ g_mutex_init (&qtdemux->expose_lock);
+
+ qtdemux->active_streams = g_ptr_array_new_with_free_func
+ ((GDestroyNotify) gst_qtdemux_stream_unref);
+ qtdemux->old_streams = g_ptr_array_new_with_free_func
+ ((GDestroyNotify) gst_qtdemux_stream_unref);
+
+ GST_OBJECT_FLAG_SET (qtdemux, GST_ELEMENT_FLAG_INDEXABLE);
+
+ gst_qtdemux_reset (qtdemux, TRUE);
+}
+
+static void
+gst_qtdemux_finalize (GObject * object)
+{
+ GstQTDemux *qtdemux = GST_QTDEMUX (object);
+
+ g_free (qtdemux->redirect_location);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_qtdemux_dispose (GObject * object)
+{
+ GstQTDemux *qtdemux = GST_QTDEMUX (object);
+
+ if (qtdemux->adapter) {
+ g_object_unref (G_OBJECT (qtdemux->adapter));
+ qtdemux->adapter = NULL;
+ }
+ gst_tag_list_unref (qtdemux->tag_list);
+ gst_flow_combiner_free (qtdemux->flowcombiner);
+ g_queue_foreach (&qtdemux->protection_event_queue, (GFunc) gst_event_unref,
+ NULL);
+ g_queue_clear (&qtdemux->protection_event_queue);
+
+ g_free (qtdemux->cenc_aux_info_sizes);
+ qtdemux->cenc_aux_info_sizes = NULL;
+ g_mutex_clear (&qtdemux->expose_lock);
+
+ g_ptr_array_free (qtdemux->active_streams, TRUE);
+ g_ptr_array_free (qtdemux->old_streams, TRUE);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static void
+gst_qtdemux_post_no_playable_stream_error (GstQTDemux * qtdemux)
+{
+ if (qtdemux->redirect_location) {
+ GST_ELEMENT_ERROR_WITH_DETAILS (qtdemux, STREAM, DEMUX,
+ (_("This file contains no playable streams.")),
+ ("no known streams found, a redirect message has been posted"),
+ ("redirect-location", G_TYPE_STRING, qtdemux->redirect_location, NULL));
+ } else {
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
+ (_("This file contains no playable streams.")),
+ ("no known streams found"));
+ }
+}
+
+static GstBuffer *
+_gst_buffer_new_wrapped (gpointer mem, gsize size, GFreeFunc free_func)
+{
+ return gst_buffer_new_wrapped_full (free_func ? 0 : GST_MEMORY_FLAG_READONLY,
+ mem, size, 0, size, mem, free_func);
+}
+
+static GstFlowReturn
+gst_qtdemux_pull_atom (GstQTDemux * qtdemux, guint64 offset, guint64 size,
+ GstBuffer ** buf)
+{
+ GstFlowReturn flow;
+ GstMapInfo map;
+ gsize bsize;
+
+ if (G_UNLIKELY (size == 0)) {
+ GstFlowReturn ret;
+ GstBuffer *tmp = NULL;
+
+ ret = gst_qtdemux_pull_atom (qtdemux, offset, sizeof (guint32), &tmp);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ gst_buffer_map (tmp, &map, GST_MAP_READ);
+ size = QT_UINT32 (map.data);
+ GST_DEBUG_OBJECT (qtdemux, "size 0x%08" G_GINT64_MODIFIER "x", size);
+
+ gst_buffer_unmap (tmp, &map);
+ gst_buffer_unref (tmp);
+ }
+
+ /* Sanity check: catch bogus sizes (fuzzed/broken files) */
+ if (G_UNLIKELY (size > QTDEMUX_MAX_ATOM_SIZE)) {
+ if (qtdemux->state != QTDEMUX_STATE_MOVIE && qtdemux->got_moov) {
+ /* we're pulling header but already got most interesting bits,
+ * so never mind the rest (e.g. tags) (that much) */
+ GST_WARNING_OBJECT (qtdemux, "atom has bogus size %" G_GUINT64_FORMAT,
+ size);
+ return GST_FLOW_EOS;
+ } else {
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
+ (_("This file is invalid and cannot be played.")),
+ ("atom has bogus size %" G_GUINT64_FORMAT, size));
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ flow = gst_pad_pull_range (qtdemux->sinkpad, offset, size, buf);
+
+ if (G_UNLIKELY (flow != GST_FLOW_OK))
+ return flow;
+
+ bsize = gst_buffer_get_size (*buf);
+ /* Catch short reads - we don't want any partial atoms */
+ if (G_UNLIKELY (bsize < size)) {
+ GST_WARNING_OBJECT (qtdemux,
+ "short read: %" G_GSIZE_FORMAT " < %" G_GUINT64_FORMAT, bsize, size);
+ gst_buffer_unref (*buf);
+ *buf = NULL;
+ return GST_FLOW_EOS;
+ }
+
+ return flow;
+}
+
+#if 1
+static gboolean
+gst_qtdemux_src_convert (GstQTDemux * qtdemux, GstPad * pad,
+ GstFormat src_format, gint64 src_value, GstFormat dest_format,
+ gint64 * dest_value)
+{
+ gboolean res = TRUE;
+ QtDemuxStream *stream = gst_pad_get_element_private (pad);
+ gint32 index;
+
+ if (stream->subtype != FOURCC_vide) {
+ res = FALSE;
+ goto done;
+ }
+
+ switch (src_format) {
+ case GST_FORMAT_TIME:
+ switch (dest_format) {
+ case GST_FORMAT_BYTES:{
+ index = gst_qtdemux_find_index_linear (qtdemux, stream, src_value);
+ if (-1 == index) {
+ res = FALSE;
+ goto done;
+ }
+
+ *dest_value = stream->samples[index].offset;
+
+ GST_DEBUG_OBJECT (qtdemux, "Format Conversion Time->Offset :%"
+ GST_TIME_FORMAT "->%" G_GUINT64_FORMAT,
+ GST_TIME_ARGS (src_value), *dest_value);
+ break;
+ }
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ case GST_FORMAT_BYTES:
+ switch (dest_format) {
+ case GST_FORMAT_TIME:{
+ index =
+ gst_qtdemux_find_index_for_given_media_offset_linear (qtdemux,
+ stream, src_value);
+
+ if (-1 == index) {
+ res = FALSE;
+ goto done;
+ }
+
+ *dest_value =
+ QTSTREAMTIME_TO_GSTTIME (stream,
+ stream->samples[index].timestamp);
+ GST_DEBUG_OBJECT (qtdemux,
+ "Format Conversion Offset->Time :%" G_GUINT64_FORMAT "->%"
+ GST_TIME_FORMAT, src_value, GST_TIME_ARGS (*dest_value));
+ break;
+ }
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+
+done:
+ return res;
+}
+#endif
+
+static gboolean
+gst_qtdemux_get_duration (GstQTDemux * qtdemux, GstClockTime * duration)
+{
+ gboolean res = FALSE;
+
+ *duration = GST_CLOCK_TIME_NONE;
+
+ if (qtdemux->duration != 0 &&
+ qtdemux->duration != G_MAXINT64 && qtdemux->timescale != 0) {
+ *duration = QTTIME_TO_GSTTIME (qtdemux, qtdemux->duration);
+ res = TRUE;
+ } else {
+ *duration = GST_CLOCK_TIME_NONE;
+ }
+
+ return res;
+}
+
+static gboolean
+gst_qtdemux_handle_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ gboolean res = FALSE;
+ GstQTDemux *qtdemux = GST_QTDEMUX (parent);
+
+ GST_LOG_OBJECT (pad, "%s query", GST_QUERY_TYPE_NAME (query));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:{
+ GstFormat fmt;
+
+ gst_query_parse_position (query, &fmt, NULL);
+ if (fmt == GST_FORMAT_TIME
+ && GST_CLOCK_TIME_IS_VALID (qtdemux->segment.position)) {
+ gst_query_set_position (query, GST_FORMAT_TIME,
+ qtdemux->segment.position);
+ res = TRUE;
+ }
+ }
+ break;
+ case GST_QUERY_DURATION:{
+ GstFormat fmt;
+
+ gst_query_parse_duration (query, &fmt, NULL);
+ if (fmt == GST_FORMAT_TIME) {
+ /* First try to query upstream */
+ res = gst_pad_query_default (pad, parent, query);
+ if (!res) {
+ GstClockTime duration;
+ if (gst_qtdemux_get_duration (qtdemux, &duration) && duration > 0) {
+ gst_query_set_duration (query, GST_FORMAT_TIME, duration);
+ res = TRUE;
+ }
+ }
+ }
+ break;
+ }
+ case GST_QUERY_CONVERT:{
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_value, dest_value = 0;
+
+ gst_query_parse_convert (query, &src_fmt, &src_value, &dest_fmt, NULL);
+
+ res = gst_qtdemux_src_convert (qtdemux, pad,
+ src_fmt, src_value, dest_fmt, &dest_value);
+ if (res)
+ gst_query_set_convert (query, src_fmt, src_value, dest_fmt, dest_value);
+
+ break;
+ }
+ case GST_QUERY_FORMATS:
+ gst_query_set_formats (query, 2, GST_FORMAT_TIME, GST_FORMAT_BYTES);
+ res = TRUE;
+ break;
+ case GST_QUERY_SEEKING:{
+ GstFormat fmt;
+ gboolean seekable;
+
+ gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+
+ if (fmt == GST_FORMAT_BYTES) {
+ /* We always refuse BYTES seeks from downstream */
+ break;
+ }
+
+ /* try upstream first */
+ res = gst_pad_query_default (pad, parent, query);
+
+ if (!res) {
+ gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+ if (fmt == GST_FORMAT_TIME) {
+ GstClockTime duration;
+
+ gst_qtdemux_get_duration (qtdemux, &duration);
+ seekable = TRUE;
+ if (!qtdemux->pullbased) {
+ GstQuery *q;
+
+ /* we might be able with help from upstream */
+ seekable = FALSE;
+ q = gst_query_new_seeking (GST_FORMAT_BYTES);
+ if (gst_pad_peer_query (qtdemux->sinkpad, q)) {
+ gst_query_parse_seeking (q, &fmt, &seekable, NULL, NULL);
+ GST_LOG_OBJECT (qtdemux, "upstream BYTE seekable %d", seekable);
+ }
+ gst_query_unref (q);
+ }
+ gst_query_set_seeking (query, GST_FORMAT_TIME, seekable, 0, duration);
+ res = TRUE;
+ }
+ }
+ break;
+ }
+ case GST_QUERY_SEGMENT:
+ {
+ GstFormat format;
+ gint64 start, stop;
+
+ format = qtdemux->segment.format;
+
+ start =
+ gst_segment_to_stream_time (&qtdemux->segment, format,
+ qtdemux->segment.start);
+ if ((stop = qtdemux->segment.stop) == -1)
+ stop = qtdemux->segment.duration;
+ else
+ stop = gst_segment_to_stream_time (&qtdemux->segment, format, stop);
+
+ gst_query_set_segment (query, qtdemux->segment.rate, format, start, stop);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return res;
+}
+
+static void
+gst_qtdemux_push_tags (GstQTDemux * qtdemux, QtDemuxStream * stream)
+{
+ if (G_LIKELY (stream->pad)) {
+ GST_DEBUG_OBJECT (qtdemux, "Checking pad %s:%s for tags",
+ GST_DEBUG_PAD_NAME (stream->pad));
+
+ if (!gst_tag_list_is_empty (stream->stream_tags)) {
+ GST_DEBUG_OBJECT (qtdemux, "Sending tags %" GST_PTR_FORMAT,
+ stream->stream_tags);
+ gst_pad_push_event (stream->pad,
+ gst_event_new_tag (gst_tag_list_ref (stream->stream_tags)));
+ }
+
+ if (G_UNLIKELY (stream->send_global_tags)) {
+ GST_DEBUG_OBJECT (qtdemux, "Sending global tags %" GST_PTR_FORMAT,
+ qtdemux->tag_list);
+ gst_pad_push_event (stream->pad,
+ gst_event_new_tag (gst_tag_list_ref (qtdemux->tag_list)));
+ stream->send_global_tags = FALSE;
+ }
+ }
+}
+
+/* push event on all source pads; takes ownership of the event */
+static void
+gst_qtdemux_push_event (GstQTDemux * qtdemux, GstEvent * event)
+{
+ gboolean has_valid_stream = FALSE;
+ GstEventType etype = GST_EVENT_TYPE (event);
+ guint i;
+
+ GST_DEBUG_OBJECT (qtdemux, "pushing %s event on all source pads",
+ GST_EVENT_TYPE_NAME (event));
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ GstPad *pad;
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ GST_DEBUG_OBJECT (qtdemux, "pushing on track-id %u", stream->track_id);
+
+ if ((pad = stream->pad)) {
+ has_valid_stream = TRUE;
+
+ if (etype == GST_EVENT_EOS) {
+ /* let's not send twice */
+ if (stream->sent_eos)
+ continue;
+ stream->sent_eos = TRUE;
+ }
+
+ gst_pad_push_event (pad, gst_event_ref (event));
+ }
+ }
+
+ gst_event_unref (event);
+
+ /* if it is EOS and there are no pads, post an error */
+ if (!has_valid_stream && etype == GST_EVENT_EOS) {
+ gst_qtdemux_post_no_playable_stream_error (qtdemux);
+ }
+}
+
+typedef struct
+{
+ guint64 media_time;
+} FindData;
+
+static gint
+find_func (QtDemuxSample * s1, gint64 * media_time, gpointer user_data)
+{
+ if ((gint64) s1->timestamp > *media_time)
+ return 1;
+ if ((gint64) s1->timestamp == *media_time)
+ return 0;
+
+ return -1;
+}
+
+/* find the index of the sample that includes the data for @media_time using a
+ * binary search. Only to be called in optimized cases of linear search below.
+ *
+ * Returns the index of the sample with the corresponding *DTS*.
+ */
+static guint32
+gst_qtdemux_find_index (GstQTDemux * qtdemux, QtDemuxStream * str,
+ guint64 media_time)
+{
+ QtDemuxSample *result;
+ guint32 index;
+
+ /* convert media_time to mov format */
+ media_time =
+ gst_util_uint64_scale_ceil (media_time, str->timescale, GST_SECOND);
+
+ result = gst_util_array_binary_search (str->samples, str->stbl_index + 1,
+ sizeof (QtDemuxSample), (GCompareDataFunc) find_func,
+ GST_SEARCH_MODE_BEFORE, &media_time, NULL);
+
+ if (G_LIKELY (result))
+ index = result - str->samples;
+ else
+ index = 0;
+
+ return index;
+}
+
+
+
+/* find the index of the sample that includes the data for @media_offset using a
+ * linear search
+ *
+ * Returns the index of the sample.
+ */
+static guint32
+gst_qtdemux_find_index_for_given_media_offset_linear (GstQTDemux * qtdemux,
+ QtDemuxStream * str, gint64 media_offset)
+{
+ QtDemuxSample *result = str->samples;
+ guint32 index = 0;
+
+ if (result == NULL || str->n_samples == 0)
+ return -1;
+
+ if (media_offset == result->offset)
+ return index;
+
+ result++;
+ while (index < str->n_samples - 1) {
+ if (!qtdemux_parse_samples (qtdemux, str, index + 1))
+ goto parse_failed;
+
+ if (media_offset < result->offset)
+ break;
+
+ index++;
+ result++;
+ }
+ return index;
+
+ /* ERRORS */
+parse_failed:
+ {
+ GST_LOG_OBJECT (qtdemux, "Parsing of index %u failed!", index + 1);
+ return -1;
+ }
+}
+
+/* find the index of the sample that includes the data for @media_time using a
+ * linear search, and keeping in mind that not all samples may have been parsed
+ * yet. If possible, it will delegate to binary search.
+ *
+ * Returns the index of the sample.
+ */
+static guint32
+gst_qtdemux_find_index_linear (GstQTDemux * qtdemux, QtDemuxStream * str,
+ GstClockTime media_time)
+{
+ guint32 index = 0;
+ guint64 mov_time;
+ QtDemuxSample *sample;
+
+ /* convert media_time to mov format */
+ mov_time =
+ gst_util_uint64_scale_ceil (media_time, str->timescale, GST_SECOND);
+
+ sample = str->samples;
+ if (mov_time == sample->timestamp + sample->pts_offset)
+ return index;
+
+ /* use faster search if requested time in already parsed range */
+ sample = str->samples + str->stbl_index;
+ if (str->stbl_index >= 0 && mov_time <= sample->timestamp) {
+ index = gst_qtdemux_find_index (qtdemux, str, media_time);
+ sample = str->samples + index;
+ } else {
+ while (index < str->n_samples - 1) {
+ if (!qtdemux_parse_samples (qtdemux, str, index + 1))
+ goto parse_failed;
+
+ sample = str->samples + index + 1;
+ if (mov_time < sample->timestamp) {
+ sample = str->samples + index;
+ break;
+ }
+
+ index++;
+ }
+ }
+
+ /* sample->timestamp is now <= media_time, need to find the corresponding
+ * PTS now by looking backwards */
+ while (index > 0 && sample->timestamp + sample->pts_offset > mov_time) {
+ index--;
+ sample = str->samples + index;
+ }
+
+ return index;
+
+ /* ERRORS */
+parse_failed:
+ {
+ GST_LOG_OBJECT (qtdemux, "Parsing of index %u failed!", index + 1);
+ return -1;
+ }
+}
+
+/* find the index of the keyframe needed to decode the sample at @index
+ * of stream @str, or of a subsequent keyframe (depending on @next)
+ *
+ * Returns the index of the keyframe.
+ */
+static guint32
+gst_qtdemux_find_keyframe (GstQTDemux * qtdemux, QtDemuxStream * str,
+ guint32 index, gboolean next)
+{
+ guint32 new_index = index;
+
+ if (index >= str->n_samples) {
+ new_index = str->n_samples;
+ goto beach;
+ }
+
+ /* all keyframes, return index */
+ if (str->all_keyframe) {
+ new_index = index;
+ goto beach;
+ }
+
+ /* else search until we have a keyframe */
+ while (new_index < str->n_samples) {
+ if (next && !qtdemux_parse_samples (qtdemux, str, new_index))
+ goto parse_failed;
+
+ if (str->samples[new_index].keyframe)
+ break;
+
+ if (new_index == 0)
+ break;
+
+ if (next)
+ new_index++;
+ else
+ new_index--;
+ }
+
+ if (new_index == str->n_samples) {
+ GST_DEBUG_OBJECT (qtdemux, "no next keyframe");
+ new_index = -1;
+ }
+
+beach:
+ GST_DEBUG_OBJECT (qtdemux, "searching for keyframe index %s index %u "
+ "gave %u", next ? "after" : "before", index, new_index);
+
+ return new_index;
+
+ /* ERRORS */
+parse_failed:
+ {
+ GST_LOG_OBJECT (qtdemux, "Parsing of index %u failed!", new_index);
+ return -1;
+ }
+}
+
+/* find the segment for @time_position for @stream
+ *
+ * Returns the index of the segment containing @time_position.
+ * Returns the last segment and sets the @eos variable to TRUE
+ * if the time is beyond the end. @eos may be NULL
+ */
+static guint32
+gst_qtdemux_find_segment (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstClockTime time_position)
+{
+ gint i;
+ guint32 seg_idx;
+
+ GST_LOG_OBJECT (stream->pad, "finding segment for %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (time_position));
+
+ seg_idx = -1;
+ for (i = 0; i < stream->n_segments; i++) {
+ QtDemuxSegment *segment = &stream->segments[i];
+
+ GST_LOG_OBJECT (stream->pad,
+ "looking at segment %" GST_TIME_FORMAT "-%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (segment->time), GST_TIME_ARGS (segment->stop_time));
+
+ /* For the last segment we include stop_time in the last segment */
+ if (i < stream->n_segments - 1) {
+ if (segment->time <= time_position && time_position < segment->stop_time) {
+ GST_LOG_OBJECT (stream->pad, "segment %d matches", i);
+ seg_idx = i;
+ break;
+ }
+ } else {
+ /* Last segment always matches */
+ seg_idx = i;
+ break;
+ }
+ }
+ return seg_idx;
+}
+
+/* move the stream @str to the sample position @index.
+ *
+ * Updates @str->sample_index and marks discontinuity if needed.
+ */
+static void
+gst_qtdemux_move_stream (GstQTDemux * qtdemux, QtDemuxStream * str,
+ guint32 index)
+{
+ /* no change needed */
+ if (index == str->sample_index)
+ return;
+
+ GST_DEBUG_OBJECT (qtdemux, "moving to sample %u of %u", index,
+ str->n_samples);
+
+ /* position changed, we have a discont */
+ str->sample_index = index;
+ str->offset_in_sample = 0;
+ /* Each time we move in the stream we store the position where we are
+ * starting from */
+ str->from_sample = index;
+ str->discont = TRUE;
+}
+
+static void
+gst_qtdemux_adjust_seek (GstQTDemux * qtdemux, gint64 desired_time,
+ gboolean use_sparse, gboolean next, gint64 * key_time, gint64 * key_offset)
+{
+ guint64 min_offset;
+ gint64 min_byte_offset = -1;
+ guint i;
+
+ min_offset = desired_time;
+
+ /* for each stream, find the index of the sample in the segment
+ * and move back to the previous keyframe. */
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *str;
+ guint32 index, kindex;
+ guint32 seg_idx;
+ GstClockTime media_start;
+ GstClockTime media_time;
+ GstClockTime seg_time;
+ QtDemuxSegment *seg;
+ gboolean empty_segment = FALSE;
+
+ str = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ if (CUR_STREAM (str)->sparse && !use_sparse)
+ continue;
+
+ seg_idx = gst_qtdemux_find_segment (qtdemux, str, desired_time);
+ GST_DEBUG_OBJECT (qtdemux, "align segment %d", seg_idx);
+
+ /* get segment and time in the segment */
+ seg = &str->segments[seg_idx];
+ seg_time = (desired_time - seg->time) * seg->rate;
+
+ while (QTSEGMENT_IS_EMPTY (seg)) {
+ seg_time = 0;
+ empty_segment = TRUE;
+ GST_DEBUG_OBJECT (str->pad, "Segment %d is empty, moving to next one",
+ seg_idx);
+ seg_idx++;
+ if (seg_idx == str->n_segments)
+ break;
+ seg = &str->segments[seg_idx];
+ }
+
+ if (seg_idx == str->n_segments) {
+ /* FIXME track shouldn't have the last segment as empty, but if it
+ * happens we better handle it */
+ continue;
+ }
+
+ /* get the media time in the segment */
+ media_start = seg->media_start + seg_time;
+
+ /* get the index of the sample with media time */
+ index = gst_qtdemux_find_index_linear (qtdemux, str, media_start);
+ GST_DEBUG_OBJECT (qtdemux, "sample for %" GST_TIME_FORMAT " at %u"
+ " at offset %" G_GUINT64_FORMAT " (empty segment: %d)",
+ GST_TIME_ARGS (media_start), index, str->samples[index].offset,
+ empty_segment);
+
+ /* shift to next frame if we are looking for next keyframe */
+ if (next && QTSAMPLE_PTS_NO_CSLG (str, &str->samples[index]) < media_start
+ && index < str->stbl_index)
+ index++;
+
+ if (!empty_segment) {
+ /* find previous keyframe */
+ kindex = gst_qtdemux_find_keyframe (qtdemux, str, index, next);
+
+ /* we will settle for one before if none found after */
+ if (next && kindex == -1)
+ kindex = gst_qtdemux_find_keyframe (qtdemux, str, index, FALSE);
+
+ /* Update the requested time whenever a keyframe was found, to make it
+ * accurate and avoid having the first buffer fall outside of the segment
+ */
+ if (kindex != -1) {
+ index = kindex;
+
+ /* get timestamp of keyframe */
+ media_time = QTSAMPLE_PTS_NO_CSLG (str, &str->samples[kindex]);
+ GST_DEBUG_OBJECT (qtdemux,
+ "keyframe at %u with time %" GST_TIME_FORMAT " at offset %"
+ G_GUINT64_FORMAT, kindex, GST_TIME_ARGS (media_time),
+ str->samples[kindex].offset);
+
+ /* keyframes in the segment get a chance to change the
+ * desired_offset. keyframes out of the segment are
+ * ignored. */
+ if (media_time >= seg->media_start) {
+ GstClockTime seg_time;
+
+ /* this keyframe is inside the segment, convert back to
+ * segment time */
+ seg_time = (media_time - seg->media_start) + seg->time;
+ if ((!next && (seg_time < min_offset)) ||
+ (next && (seg_time > min_offset)))
+ min_offset = seg_time;
+ }
+ }
+ }
+
+ if (min_byte_offset < 0 || str->samples[index].offset < min_byte_offset)
+ min_byte_offset = str->samples[index].offset;
+ }
+
+ if (key_time)
+ *key_time = min_offset;
+ if (key_offset)
+ *key_offset = min_byte_offset;
+}
+
+static gboolean
+gst_qtdemux_convert_seek (GstPad * pad, GstFormat * format,
+ GstSeekType cur_type, gint64 * cur, GstSeekType stop_type, gint64 * stop)
+{
+ gboolean res;
+
+ g_return_val_if_fail (format != NULL, FALSE);
+ g_return_val_if_fail (cur != NULL, FALSE);
+ g_return_val_if_fail (stop != NULL, FALSE);
+
+ if (*format == GST_FORMAT_TIME)
+ return TRUE;
+
+ res = TRUE;
+ if (cur_type != GST_SEEK_TYPE_NONE)
+ res = gst_pad_query_convert (pad, *format, *cur, GST_FORMAT_TIME, cur);
+ if (res && stop_type != GST_SEEK_TYPE_NONE)
+ res = gst_pad_query_convert (pad, *format, *stop, GST_FORMAT_TIME, stop);
+
+ if (res)
+ *format = GST_FORMAT_TIME;
+
+ return res;
+}
+
+/* perform seek in push based mode:
+ find BYTE position to move to based on time and delegate to upstream
+*/
+static gboolean
+gst_qtdemux_do_push_seek (GstQTDemux * qtdemux, GstPad * pad, GstEvent * event)
+{
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType cur_type, stop_type;
+ gint64 cur, stop, key_cur;
+ gboolean res;
+ gint64 byte_cur;
+ gint64 original_stop;
+ guint32 seqnum;
+
+ GST_DEBUG_OBJECT (qtdemux, "doing push-based seek");
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &cur_type, &cur, &stop_type, &stop);
+ seqnum = gst_event_get_seqnum (event);
+
+ /* Directly send the instant-rate-change event here before taking the
+ * stream-lock so that it can be applied as soon as possible */
+ if (flags & GST_SEEK_FLAG_INSTANT_RATE_CHANGE) {
+ GstEvent *ev;
+
+ /* instant rate change only supported if direction does not change. All
+ * other requirements are already checked before creating the seek event
+ * but let's double-check here to be sure */
+ if ((qtdemux->segment.rate > 0 && rate < 0) ||
+ (qtdemux->segment.rate < 0 && rate > 0) ||
+ cur_type != GST_SEEK_TYPE_NONE ||
+ stop_type != GST_SEEK_TYPE_NONE || (flags & GST_SEEK_FLAG_FLUSH)) {
+ GST_ERROR_OBJECT (qtdemux,
+ "Instant rate change seeks only supported in the "
+ "same direction, without flushing and position change");
+ return FALSE;
+ }
+
+ ev = gst_event_new_instant_rate_change (rate / qtdemux->segment.rate,
+ (GstSegmentFlags) flags);
+ gst_event_set_seqnum (ev, seqnum);
+ gst_qtdemux_push_event (qtdemux, ev);
+ return TRUE;
+ }
+
+ /* only forward streaming and seeking is possible */
+ if (rate <= 0)
+ goto unsupported_seek;
+
+ /* convert to TIME if needed and possible */
+ if (!gst_qtdemux_convert_seek (pad, &format, cur_type, &cur,
+ stop_type, &stop))
+ goto no_format;
+
+ /* Upstream seek in bytes will have undefined stop, but qtdemux stores
+ * the original stop position to use when upstream pushes the new segment
+ * for this seek */
+ original_stop = stop;
+ stop = -1;
+
+ /* find reasonable corresponding BYTE position,
+ * also try to mind about keyframes, since we can not go back a bit for them
+ * later on */
+ /* determining @next here based on SNAP_BEFORE/SNAP_AFTER should
+ * mostly just work, but let's not yet boldly go there ... */
+ gst_qtdemux_adjust_seek (qtdemux, cur, FALSE, FALSE, &key_cur, &byte_cur);
+
+ if (byte_cur == -1)
+ goto abort_seek;
+
+ GST_DEBUG_OBJECT (qtdemux, "Pushing BYTE seek rate %g, "
+ "start %" G_GINT64_FORMAT ", stop %" G_GINT64_FORMAT, rate, byte_cur,
+ stop);
+
+ GST_OBJECT_LOCK (qtdemux);
+ qtdemux->seek_offset = byte_cur;
+ if (!(flags & GST_SEEK_FLAG_KEY_UNIT)) {
+ qtdemux->push_seek_start = cur;
+ } else {
+ qtdemux->push_seek_start = key_cur;
+ }
+
+ if (stop_type == GST_SEEK_TYPE_NONE) {
+ qtdemux->push_seek_stop = qtdemux->segment.stop;
+ } else {
+ qtdemux->push_seek_stop = original_stop;
+ }
+ GST_OBJECT_UNLOCK (qtdemux);
+
+ qtdemux->segment_seqnum = seqnum;
+ /* BYTE seek event */
+ event = gst_event_new_seek (rate, GST_FORMAT_BYTES, flags, cur_type, byte_cur,
+ stop_type, stop);
+ gst_event_set_seqnum (event, seqnum);
+ res = gst_pad_push_event (qtdemux->sinkpad, event);
+
+ return res;
+
+ /* ERRORS */
+abort_seek:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "could not determine byte position to seek to, "
+ "seek aborted.");
+ return FALSE;
+ }
+unsupported_seek:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "unsupported seek, seek aborted.");
+ return FALSE;
+ }
+no_format:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "unsupported format given, seek aborted.");
+ return FALSE;
+ }
+}
+
+/* perform the seek.
+ *
+ * We set all segment_indexes in the streams to unknown and
+ * adjust the time_position to the desired position. this is enough
+ * to trigger a segment switch in the streaming thread to start
+ * streaming from the desired position.
+ *
+ * Keyframe seeking is a little more complicated when dealing with
+ * segments. Ideally we want to move to the previous keyframe in
+ * the segment but there might not be a keyframe in the segment. In
+ * fact, none of the segments could contain a keyframe. We take a
+ * practical approach: seek to the previous keyframe in the segment,
+ * if there is none, seek to the beginning of the segment.
+ *
+ * Called with STREAM_LOCK
+ */
+static gboolean
+gst_qtdemux_perform_seek (GstQTDemux * qtdemux, GstSegment * segment,
+ guint32 seqnum, GstSeekFlags flags)
+{
+ gint64 desired_offset;
+ guint i;
+
+ desired_offset = segment->position;
+
+ GST_DEBUG_OBJECT (qtdemux, "seeking to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (desired_offset));
+
+ /* may not have enough fragmented info to do this adjustment,
+ * and we can't scan (and probably should not) at this time with
+ * possibly flushing upstream */
+ if ((flags & GST_SEEK_FLAG_KEY_UNIT) && !qtdemux->fragmented) {
+ gint64 min_offset;
+ gboolean next, before, after;
+
+ before = ! !(flags & GST_SEEK_FLAG_SNAP_BEFORE);
+ after = ! !(flags & GST_SEEK_FLAG_SNAP_AFTER);
+ next = after && !before;
+ if (segment->rate < 0)
+ next = !next;
+
+ gst_qtdemux_adjust_seek (qtdemux, desired_offset, TRUE, next, &min_offset,
+ NULL);
+ GST_DEBUG_OBJECT (qtdemux, "keyframe seek, align to %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (min_offset));
+ desired_offset = min_offset;
+ }
+
+ /* and set all streams to the final position */
+ GST_OBJECT_LOCK (qtdemux);
+ gst_flow_combiner_reset (qtdemux->flowcombiner);
+ GST_OBJECT_UNLOCK (qtdemux);
+ qtdemux->segment_seqnum = seqnum;
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ stream->time_position = desired_offset;
+ stream->accumulated_base = 0;
+ stream->sample_index = -1;
+ stream->offset_in_sample = 0;
+ stream->segment_index = -1;
+ stream->sent_eos = FALSE;
+ stream->last_keyframe_dts = GST_CLOCK_TIME_NONE;
+
+ if (segment->flags & GST_SEEK_FLAG_FLUSH)
+ gst_segment_init (&stream->segment, GST_FORMAT_TIME);
+ }
+ segment->position = desired_offset;
+ if (segment->rate >= 0) {
+ segment->start = desired_offset;
+ /* We need to update time as we update start in that direction */
+ segment->time = desired_offset;
+
+ /* we stop at the end */
+ if (segment->stop == -1)
+ segment->stop = segment->duration;
+ } else {
+ segment->stop = desired_offset;
+ }
+
+ if (qtdemux->fragmented)
+ qtdemux->fragmented_seek_pending = TRUE;
+
+ return TRUE;
+}
+
+/* do a seek in pull based mode */
+static gboolean
+gst_qtdemux_do_seek (GstQTDemux * qtdemux, GstPad * pad, GstEvent * event)
+{
+ gdouble rate = 1.0;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType cur_type, stop_type;
+ gint64 cur, stop;
+ gboolean flush, instant_rate_change;
+ gboolean update;
+ GstSegment seeksegment;
+ guint32 seqnum = GST_SEQNUM_INVALID;
+ GstEvent *flush_event;
+ gboolean ret;
+
+ GST_DEBUG_OBJECT (qtdemux, "doing seek with event");
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &cur_type, &cur, &stop_type, &stop);
+ seqnum = gst_event_get_seqnum (event);
+
+ /* we have to have a format as the segment format. Try to convert
+ * if not. */
+ if (!gst_qtdemux_convert_seek (pad, &format, cur_type, &cur,
+ stop_type, &stop))
+ goto no_format;
+
+ GST_DEBUG_OBJECT (qtdemux, "seek format %s", gst_format_get_name (format));
+
+ flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
+ instant_rate_change = ! !(flags & GST_SEEK_FLAG_INSTANT_RATE_CHANGE);
+
+ /* Directly send the instant-rate-change event here before taking the
+ * stream-lock so that it can be applied as soon as possible */
+ if (instant_rate_change) {
+ GstEvent *ev;
+
+ /* instant rate change only supported if direction does not change. All
+ * other requirements are already checked before creating the seek event
+ * but let's double-check here to be sure */
+ if ((qtdemux->segment.rate > 0 && rate < 0) ||
+ (qtdemux->segment.rate < 0 && rate > 0) ||
+ cur_type != GST_SEEK_TYPE_NONE ||
+ stop_type != GST_SEEK_TYPE_NONE || flush) {
+ GST_ERROR_OBJECT (qtdemux,
+ "Instant rate change seeks only supported in the "
+ "same direction, without flushing and position change");
+ return FALSE;
+ }
+
+ ev = gst_event_new_instant_rate_change (rate / qtdemux->segment.rate,
+ (GstSegmentFlags) flags);
+ gst_event_set_seqnum (ev, seqnum);
+ gst_qtdemux_push_event (qtdemux, ev);
+ return TRUE;
+ }
+
+ /* stop streaming, either by flushing or by pausing the task */
+ if (flush) {
+ flush_event = gst_event_new_flush_start ();
+ if (seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (flush_event, seqnum);
+ /* unlock upstream pull_range */
+ gst_pad_push_event (qtdemux->sinkpad, gst_event_ref (flush_event));
+ /* make sure out loop function exits */
+ gst_qtdemux_push_event (qtdemux, flush_event);
+ } else {
+ /* non flushing seek, pause the task */
+ gst_pad_pause_task (qtdemux->sinkpad);
+ }
+
+ /* wait for streaming to finish */
+ GST_PAD_STREAM_LOCK (qtdemux->sinkpad);
+
+ /* copy segment, we need this because we still need the old
+ * segment when we close the current segment. */
+ memcpy (&seeksegment, &qtdemux->segment, sizeof (GstSegment));
+
+ /* configure the segment with the seek variables */
+ GST_DEBUG_OBJECT (qtdemux, "configuring seek");
+ if (!gst_segment_do_seek (&seeksegment, rate, format, flags,
+ cur_type, cur, stop_type, stop, &update)) {
+ ret = FALSE;
+ GST_ERROR_OBJECT (qtdemux, "inconsistent seek values, doing nothing");
+ } else {
+ /* now do the seek */
+ ret = gst_qtdemux_perform_seek (qtdemux, &seeksegment, seqnum, flags);
+ }
+
+ /* prepare for streaming again */
+ if (flush) {
+ flush_event = gst_event_new_flush_stop (TRUE);
+ if (seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (flush_event, seqnum);
+
+ gst_pad_push_event (qtdemux->sinkpad, gst_event_ref (flush_event));
+ gst_qtdemux_push_event (qtdemux, flush_event);
+ }
+
+ /* commit the new segment */
+ memcpy (&qtdemux->segment, &seeksegment, sizeof (GstSegment));
+
+ if (qtdemux->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ GstMessage *msg = gst_message_new_segment_start (GST_OBJECT_CAST (qtdemux),
+ qtdemux->segment.format, qtdemux->segment.position);
+ if (seqnum != GST_SEQNUM_INVALID)
+ gst_message_set_seqnum (msg, seqnum);
+ gst_element_post_message (GST_ELEMENT_CAST (qtdemux), msg);
+ }
+
+ /* restart streaming, NEWSEGMENT will be sent from the streaming thread. */
+ gst_pad_start_task (qtdemux->sinkpad, (GstTaskFunction) gst_qtdemux_loop,
+ qtdemux->sinkpad, NULL);
+
+ GST_PAD_STREAM_UNLOCK (qtdemux->sinkpad);
+
+ return ret;
+
+ /* ERRORS */
+no_format:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "unsupported format given, seek aborted.");
+ return FALSE;
+ }
+}
+
+static gboolean
+qtdemux_ensure_index (GstQTDemux * qtdemux)
+{
+ guint i;
+
+ GST_DEBUG_OBJECT (qtdemux, "collecting all metadata for all streams");
+
+ /* Build complete index */
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ if (!qtdemux_parse_samples (qtdemux, stream, stream->n_samples - 1)) {
+ GST_LOG_OBJECT (qtdemux,
+ "Building complete index of track-id %u for seeking failed!",
+ stream->track_id);
+ return FALSE;
+ }
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_qtdemux_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ gboolean res = TRUE;
+ GstQTDemux *qtdemux = GST_QTDEMUX (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_RECONFIGURE:
+ GST_OBJECT_LOCK (qtdemux);
+ gst_flow_combiner_reset (qtdemux->flowcombiner);
+ GST_OBJECT_UNLOCK (qtdemux);
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ case GST_EVENT_SEEK:
+ {
+ GstSeekFlags flags = 0;
+ GstFormat seek_format;
+ gboolean instant_rate_change;
+
+#ifndef GST_DISABLE_GST_DEBUG
+ GstClockTime ts = gst_util_get_timestamp ();
+#endif
+ guint32 seqnum = gst_event_get_seqnum (event);
+
+ qtdemux->received_seek = TRUE;
+
+ gst_event_parse_seek (event, NULL, &seek_format, &flags, NULL, NULL, NULL,
+ NULL);
+ instant_rate_change = ! !(flags & GST_SEEK_FLAG_INSTANT_RATE_CHANGE);
+
+ if (seqnum == qtdemux->segment_seqnum) {
+ GST_LOG_OBJECT (pad,
+ "Drop duplicated SEEK event seqnum %" G_GUINT32_FORMAT, seqnum);
+ gst_event_unref (event);
+ return TRUE;
+ }
+
+ if (qtdemux->upstream_format_is_time && qtdemux->fragmented) {
+ /* seek should be handled by upstream, we might need to re-download fragments */
+ GST_DEBUG_OBJECT (qtdemux,
+ "let upstream handle seek for fragmented playback");
+ goto upstream;
+ }
+
+ if (seek_format == GST_FORMAT_BYTES) {
+ GST_DEBUG_OBJECT (pad, "Rejecting seek request in bytes format");
+ gst_event_unref (event);
+ return FALSE;
+ }
+
+ gst_event_parse_seek_trickmode_interval (event,
+ &qtdemux->trickmode_interval);
+
+ /* Build complete index for seeking;
+ * if not a fragmented file at least and we're really doing a seek,
+ * not just an instant-rate-change */
+ if (!qtdemux->fragmented && !instant_rate_change) {
+ if (!qtdemux_ensure_index (qtdemux))
+ goto index_failed;
+ }
+#ifndef GST_DISABLE_GST_DEBUG
+ ts = gst_util_get_timestamp () - ts;
+ GST_INFO_OBJECT (qtdemux,
+ "Time taken to parse index %" GST_TIME_FORMAT, GST_TIME_ARGS (ts));
+#endif
+ if (qtdemux->pullbased) {
+ res = gst_qtdemux_do_seek (qtdemux, pad, event);
+ } else if (gst_pad_push_event (qtdemux->sinkpad, gst_event_ref (event))) {
+ GST_DEBUG_OBJECT (qtdemux, "Upstream successfully seeked");
+ res = TRUE;
+ } else if (qtdemux->state == QTDEMUX_STATE_MOVIE
+ && QTDEMUX_N_STREAMS (qtdemux)
+ && !qtdemux->fragmented) {
+ res = gst_qtdemux_do_push_seek (qtdemux, pad, event);
+ } else {
+ GST_DEBUG_OBJECT (qtdemux,
+ "ignoring seek in push mode in current state");
+ res = FALSE;
+ }
+ gst_event_unref (event);
+ }
+ break;
+ default:
+ upstream:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+done:
+ return res;
+
+ /* ERRORS */
+index_failed:
+ {
+ GST_ERROR_OBJECT (qtdemux, "Index failed");
+ gst_event_unref (event);
+ res = FALSE;
+ goto done;
+ }
+}
+
+/* Find, for each track, the first sample in coding order that has a file offset >= @byte_pos.
+ *
+ * If @fw is false, the coding order is explored backwards.
+ *
+ * If @set is true, each stream will be moved to its matched sample, or EOS if no matching
+ * sample is found for that track.
+ *
+ * The stream and sample index of the sample with the minimum offset in the direction explored
+ * (see @fw) is returned in the output parameters @_stream and @_index respectively.
+ *
+ * @_time is set to the QTSAMPLE_PTS of the matched sample with the minimum QTSAMPLE_PTS in the
+ * direction explored, which may not always match the QTSAMPLE_PTS of the sample returned in
+ * @_stream and @_index. */
+static void
+gst_qtdemux_find_sample (GstQTDemux * qtdemux, gint64 byte_pos, gboolean fw,
+ gboolean set, QtDemuxStream ** _stream, gint * _index, gint64 * _time)
+{
+ gint i, index;
+ gint64 time, min_time;
+ QtDemuxStream *stream;
+ gint iter;
+
+ min_time = -1;
+ stream = NULL;
+ index = -1;
+
+ for (iter = 0; iter < QTDEMUX_N_STREAMS (qtdemux); iter++) {
+ QtDemuxStream *str;
+ gint inc;
+ gboolean set_sample;
+
+ str = QTDEMUX_NTH_STREAM (qtdemux, iter);
+ set_sample = !set;
+
+ if (fw) {
+ i = 0;
+ inc = 1;
+ } else {
+ i = str->n_samples - 1;
+ inc = -1;
+ }
+
+ for (; (i >= 0) && (i < str->n_samples); i += inc) {
+ if (str->samples[i].size == 0)
+ continue;
+
+ if (fw && (str->samples[i].offset < byte_pos))
+ continue;
+
+ if (!fw && (str->samples[i].offset + str->samples[i].size > byte_pos))
+ continue;
+
+ /* move stream to first available sample */
+ if (set) {
+ gst_qtdemux_move_stream (qtdemux, str, i);
+ set_sample = TRUE;
+ }
+
+ /* avoid index from sparse streams since they might be far away */
+ if (!CUR_STREAM (str)->sparse) {
+ /* determine min/max time */
+ time = QTSAMPLE_PTS (str, &str->samples[i]);
+ if (min_time == -1 || (!fw && time > min_time) ||
+ (fw && time < min_time)) {
+ min_time = time;
+ }
+
+ /* determine stream with leading sample, to get its position */
+ if (!stream ||
+ (fw && (str->samples[i].offset < stream->samples[index].offset)) ||
+ (!fw && (str->samples[i].offset > stream->samples[index].offset))) {
+ stream = str;
+ index = i;
+ }
+ }
+ break;
+ }
+
+ /* no sample for this stream, mark eos */
+ if (!set_sample)
+ gst_qtdemux_move_stream (qtdemux, str, str->n_samples);
+ }
+
+ if (_time)
+ *_time = min_time;
+ if (_stream)
+ *_stream = stream;
+ if (_index)
+ *_index = index;
+}
+
+/* Copied from mpegtsbase code */
+/* FIXME: replace this function when we add new util function for stream-id creation */
+static gchar *
+_get_upstream_id (GstQTDemux * demux)
+{
+ gchar *upstream_id = gst_pad_get_stream_id (demux->sinkpad);
+
+ if (!upstream_id) {
+ /* Try to create one from the upstream URI, else use a randome number */
+ GstQuery *query;
+ gchar *uri = NULL;
+
+ /* Try to generate one from the URI query and
+ * if it fails take a random number instead */
+ query = gst_query_new_uri ();
+ if (gst_element_query (GST_ELEMENT_CAST (demux), query)) {
+ gst_query_parse_uri (query, &uri);
+ }
+
+ if (uri) {
+ GChecksum *cs;
+
+ /* And then generate an SHA256 sum of the URI */
+ cs = g_checksum_new (G_CHECKSUM_SHA256);
+ g_checksum_update (cs, (const guchar *) uri, strlen (uri));
+ g_free (uri);
+ upstream_id = g_strdup (g_checksum_get_string (cs));
+ g_checksum_free (cs);
+ } else {
+ /* Just get some random number if the URI query fails */
+ GST_FIXME_OBJECT (demux, "Creating random stream-id, consider "
+ "implementing a deterministic way of creating a stream-id");
+ upstream_id =
+ g_strdup_printf ("%08x%08x%08x%08x", g_random_int (), g_random_int (),
+ g_random_int (), g_random_int ());
+ }
+
+ gst_query_unref (query);
+ }
+ return upstream_id;
+}
+
+static QtDemuxStream *
+_create_stream (GstQTDemux * demux, guint32 track_id)
+{
+ QtDemuxStream *stream;
+ gchar *upstream_id;
+
+ stream = g_new0 (QtDemuxStream, 1);
+ stream->demux = demux;
+ stream->track_id = track_id;
+ upstream_id = _get_upstream_id (demux);
+ stream->stream_id = g_strdup_printf ("%s/%03u", upstream_id, track_id);
+ g_free (upstream_id);
+ /* new streams always need a discont */
+ stream->discont = TRUE;
+ /* we enable clipping for raw audio/video streams */
+ stream->need_clip = FALSE;
+ stream->need_process = FALSE;
+ stream->segment_index = -1;
+ stream->time_position = 0;
+ stream->sample_index = -1;
+ stream->offset_in_sample = 0;
+ stream->new_stream = TRUE;
+ stream->multiview_mode = GST_VIDEO_MULTIVIEW_MODE_NONE;
+ stream->multiview_flags = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
+ stream->protected = FALSE;
+ stream->protection_scheme_type = 0;
+ stream->protection_scheme_version = 0;
+ stream->protection_scheme_info = NULL;
+ stream->n_samples_moof = 0;
+ stream->duration_moof = 0;
+ stream->duration_last_moof = 0;
+ stream->alignment = 1;
+ stream->stream_tags = gst_tag_list_new_empty ();
+ gst_tag_list_set_scope (stream->stream_tags, GST_TAG_SCOPE_STREAM);
+ g_queue_init (&stream->protection_scheme_event_queue);
+ stream->ref_count = 1;
+ /* consistent default for push based mode */
+ gst_segment_init (&stream->segment, GST_FORMAT_TIME);
+ return stream;
+}
+
+static gboolean
+gst_qtdemux_setcaps (GstQTDemux * demux, GstCaps * caps)
+{
+ GstStructure *structure;
+ const gchar *variant;
+ const GstCaps *mediacaps = NULL;
+
+ GST_DEBUG_OBJECT (demux, "Sink set caps: %" GST_PTR_FORMAT, caps);
+
+ structure = gst_caps_get_structure (caps, 0);
+ variant = gst_structure_get_string (structure, "variant");
+
+ if (variant && strcmp (variant, "mss-fragmented") == 0) {
+ QtDemuxStream *stream;
+ const GValue *value;
+
+ demux->fragmented = TRUE;
+ demux->mss_mode = TRUE;
+
+ if (QTDEMUX_N_STREAMS (demux) > 1) {
+ /* can't do this, we can only renegotiate for another mss format */
+ return FALSE;
+ }
+
+ value = gst_structure_get_value (structure, "media-caps");
+ /* create stream */
+ if (value) {
+ const GValue *timescale_v;
+
+ /* TODO update when stream changes during playback */
+
+ if (QTDEMUX_N_STREAMS (demux) == 0) {
+ stream = _create_stream (demux, 1);
+ g_ptr_array_add (demux->active_streams, stream);
+ /* mss has no stsd/stsd entry, use id 0 as default */
+ stream->stsd_entries_length = 1;
+ stream->stsd_sample_description_id = stream->cur_stsd_entry_index = 0;
+ stream->stsd_entries = g_new0 (QtDemuxStreamStsdEntry, 1);
+ } else {
+ stream = QTDEMUX_NTH_STREAM (demux, 0);
+ }
+
+ timescale_v = gst_structure_get_value (structure, "timescale");
+ if (timescale_v) {
+ stream->timescale = g_value_get_uint64 (timescale_v);
+ } else {
+ /* default mss timescale */
+ stream->timescale = 10000000;
+ }
+ demux->timescale = stream->timescale;
+
+ mediacaps = gst_value_get_caps (value);
+ if (!CUR_STREAM (stream)->caps
+ || !gst_caps_is_equal_fixed (mediacaps, CUR_STREAM (stream)->caps)) {
+ GST_DEBUG_OBJECT (demux, "We have a new caps %" GST_PTR_FORMAT,
+ mediacaps);
+ stream->new_caps = TRUE;
+ }
+ gst_caps_replace (&CUR_STREAM (stream)->caps, (GstCaps *) mediacaps);
+ structure = gst_caps_get_structure (mediacaps, 0);
+ if (g_str_has_prefix (gst_structure_get_name (structure), "video")) {
+ stream->subtype = FOURCC_vide;
+
+ gst_structure_get_int (structure, "width", &CUR_STREAM (stream)->width);
+ gst_structure_get_int (structure, "height",
+ &CUR_STREAM (stream)->height);
+ gst_structure_get_fraction (structure, "framerate",
+ &CUR_STREAM (stream)->fps_n, &CUR_STREAM (stream)->fps_d);
+ } else if (g_str_has_prefix (gst_structure_get_name (structure), "audio")) {
+ gint rate = 0;
+ stream->subtype = FOURCC_soun;
+ gst_structure_get_int (structure, "channels",
+ &CUR_STREAM (stream)->n_channels);
+ gst_structure_get_int (structure, "rate", &rate);
+ CUR_STREAM (stream)->rate = rate;
+ } else if (gst_structure_has_name (structure, "application/x-cenc")) {
+ if (gst_structure_has_field (structure, "original-media-type")) {
+ const gchar *media_type =
+ gst_structure_get_string (structure, "original-media-type");
+ if (g_str_has_prefix (media_type, "video")) {
+ stream->subtype = FOURCC_vide;
+ } else if (g_str_has_prefix (media_type, "audio")) {
+ stream->subtype = FOURCC_soun;
+ }
+ }
+ }
+ }
+ gst_caps_replace (&demux->media_caps, (GstCaps *) mediacaps);
+ } else {
+ demux->mss_mode = FALSE;
+ }
+
+ return TRUE;
+}
+
+static void
+gst_qtdemux_reset (GstQTDemux * qtdemux, gboolean hard)
+{
+ gint i;
+
+ GST_DEBUG_OBJECT (qtdemux, "Resetting demux");
+ gst_pad_stop_task (qtdemux->sinkpad);
+
+ if (hard || qtdemux->upstream_format_is_time) {
+ qtdemux->state = QTDEMUX_STATE_INITIAL;
+ qtdemux->neededbytes = 16;
+ qtdemux->todrop = 0;
+ qtdemux->pullbased = FALSE;
+ g_clear_pointer (&qtdemux->redirect_location, g_free);
+ qtdemux->first_mdat = -1;
+ qtdemux->header_size = 0;
+ qtdemux->mdatoffset = -1;
+ qtdemux->restoredata_offset = -1;
+ if (qtdemux->mdatbuffer)
+ gst_buffer_unref (qtdemux->mdatbuffer);
+ if (qtdemux->restoredata_buffer)
+ gst_buffer_unref (qtdemux->restoredata_buffer);
+ qtdemux->mdatbuffer = NULL;
+ qtdemux->restoredata_buffer = NULL;
+ qtdemux->mdatleft = 0;
+ qtdemux->mdatsize = 0;
+ if (qtdemux->comp_brands)
+ gst_buffer_unref (qtdemux->comp_brands);
+ qtdemux->comp_brands = NULL;
+ qtdemux->last_moov_offset = -1;
+ if (qtdemux->moov_node_compressed) {
+ g_node_destroy (qtdemux->moov_node_compressed);
+ if (qtdemux->moov_node)
+ g_free (qtdemux->moov_node->data);
+ }
+ qtdemux->moov_node_compressed = NULL;
+ if (qtdemux->moov_node)
+ g_node_destroy (qtdemux->moov_node);
+ qtdemux->moov_node = NULL;
+ if (qtdemux->tag_list)
+ gst_mini_object_unref (GST_MINI_OBJECT_CAST (qtdemux->tag_list));
+ qtdemux->tag_list = gst_tag_list_new_empty ();
+ gst_tag_list_set_scope (qtdemux->tag_list, GST_TAG_SCOPE_GLOBAL);
+#if 0
+ if (qtdemux->element_index)
+ gst_object_unref (qtdemux->element_index);
+ qtdemux->element_index = NULL;
+#endif
+ qtdemux->major_brand = 0;
+ qtdemux->upstream_format_is_time = FALSE;
+ qtdemux->upstream_seekable = FALSE;
+ qtdemux->upstream_size = 0;
+
+ qtdemux->fragment_start = -1;
+ qtdemux->fragment_start_offset = -1;
+ qtdemux->duration = 0;
+ qtdemux->moof_offset = 0;
+ qtdemux->chapters_track_id = 0;
+ qtdemux->have_group_id = FALSE;
+ qtdemux->group_id = G_MAXUINT;
+
+ g_queue_foreach (&qtdemux->protection_event_queue, (GFunc) gst_event_unref,
+ NULL);
+ g_queue_clear (&qtdemux->protection_event_queue);
+
+ qtdemux->received_seek = FALSE;
+ qtdemux->first_moof_already_parsed = FALSE;
+ }
+ qtdemux->offset = 0;
+ gst_adapter_clear (qtdemux->adapter);
+ gst_segment_init (&qtdemux->segment, GST_FORMAT_TIME);
+ qtdemux->need_segment = TRUE;
+
+ if (hard) {
+ qtdemux->segment_seqnum = GST_SEQNUM_INVALID;
+ qtdemux->trickmode_interval = 0;
+ g_ptr_array_set_size (qtdemux->active_streams, 0);
+ g_ptr_array_set_size (qtdemux->old_streams, 0);
+ qtdemux->n_video_streams = 0;
+ qtdemux->n_audio_streams = 0;
+ qtdemux->n_sub_streams = 0;
+ qtdemux->exposed = FALSE;
+ qtdemux->fragmented = FALSE;
+ qtdemux->mss_mode = FALSE;
+ gst_caps_replace (&qtdemux->media_caps, NULL);
+ qtdemux->timescale = 0;
+ qtdemux->got_moov = FALSE;
+ qtdemux->cenc_aux_info_offset = 0;
+ qtdemux->cenc_aux_info_sizes = NULL;
+ qtdemux->cenc_aux_sample_count = 0;
+ if (qtdemux->protection_system_ids) {
+ g_ptr_array_free (qtdemux->protection_system_ids, TRUE);
+ qtdemux->protection_system_ids = NULL;
+ }
+ qtdemux->streams_aware = GST_OBJECT_PARENT (qtdemux)
+ && GST_OBJECT_FLAG_IS_SET (GST_OBJECT_PARENT (qtdemux),
+ GST_BIN_FLAG_STREAMS_AWARE);
+
+ if (qtdemux->preferred_protection_system_id) {
+ g_free (qtdemux->preferred_protection_system_id);
+ qtdemux->preferred_protection_system_id = NULL;
+ }
+ } else if (qtdemux->mss_mode) {
+ gst_flow_combiner_reset (qtdemux->flowcombiner);
+ g_ptr_array_foreach (qtdemux->active_streams,
+ (GFunc) gst_qtdemux_stream_clear, NULL);
+ } else {
+ gst_flow_combiner_reset (qtdemux->flowcombiner);
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ stream->sent_eos = FALSE;
+ stream->time_position = 0;
+ stream->accumulated_base = 0;
+ stream->last_keyframe_dts = GST_CLOCK_TIME_NONE;
+ }
+ }
+}
+
+
+/* Maps the @segment to the qt edts internal segments and pushes
+ * the corresponding segment event.
+ *
+ * If it ends up being at a empty segment, a gap will be pushed and the next
+ * edts segment will be activated in sequence.
+ *
+ * To be used in push-mode only */
+static void
+gst_qtdemux_map_and_push_segments (GstQTDemux * qtdemux, GstSegment * segment)
+{
+ gint i, iter;
+
+ for (iter = 0; iter < QTDEMUX_N_STREAMS (qtdemux); iter++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, iter);
+
+ stream->time_position = segment->start;
+
+ /* in push mode we should be guaranteed that we will have empty segments
+ * at the beginning and then one segment after, other scenarios are not
+ * supported and are discarded when parsing the edts */
+ for (i = 0; i < stream->n_segments; i++) {
+ if (stream->segments[i].stop_time > segment->start) {
+ /* push the empty segment and move to the next one */
+ gst_qtdemux_activate_segment (qtdemux, stream, i,
+ stream->time_position);
+ if (QTSEGMENT_IS_EMPTY (&stream->segments[i])) {
+ gst_qtdemux_send_gap_for_segment (qtdemux, stream, i,
+ stream->time_position);
+
+ /* accumulate previous segments */
+ if (GST_CLOCK_TIME_IS_VALID (stream->segment.stop))
+ stream->accumulated_base +=
+ (stream->segment.stop -
+ stream->segment.start) / ABS (stream->segment.rate);
+ continue;
+ }
+
+ g_assert (i == stream->n_segments - 1);
+ }
+ }
+ }
+}
+
+static void
+gst_qtdemux_stream_concat (GstQTDemux * qtdemux, GPtrArray * dest,
+ GPtrArray * src)
+{
+ guint i;
+ guint len;
+
+ len = src->len;
+
+ if (len == 0)
+ return;
+
+ for (i = 0; i < len; i++) {
+ QtDemuxStream *stream = g_ptr_array_index (src, i);
+
+#ifndef GST_DISABLE_GST_DEBUG
+ GST_DEBUG_OBJECT (qtdemux, "Move stream %p (stream-id %s) to %p",
+ stream, GST_STR_NULL (stream->stream_id), dest);
+#endif
+ g_ptr_array_add (dest, gst_qtdemux_stream_ref (stream));
+ }
+
+ g_ptr_array_set_size (src, 0);
+}
+
+static gboolean
+gst_qtdemux_handle_sink_event (GstPad * sinkpad, GstObject * parent,
+ GstEvent * event)
+{
+ GstQTDemux *demux = GST_QTDEMUX (parent);
+ gboolean res = TRUE;
+
+ GST_LOG_OBJECT (demux, "handling %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:
+ {
+ gint64 offset = 0;
+ QtDemuxStream *stream;
+ gint idx;
+ GstSegment segment;
+
+ /* some debug output */
+ gst_event_copy_segment (event, &segment);
+ GST_DEBUG_OBJECT (demux, "received newsegment %" GST_SEGMENT_FORMAT,
+ &segment);
+
+ if (segment.format == GST_FORMAT_TIME) {
+ demux->upstream_format_is_time = TRUE;
+ demux->segment_seqnum = gst_event_get_seqnum (event);
+ } else {
+ GST_DEBUG_OBJECT (demux, "Not storing upstream newsegment, "
+ "not in time format");
+
+ /* chain will send initial newsegment after pads have been added */
+ if (demux->state != QTDEMUX_STATE_MOVIE || !QTDEMUX_N_STREAMS (demux)) {
+ GST_DEBUG_OBJECT (demux, "still starting, eating event");
+ goto exit;
+ }
+ }
+
+ /* check if this matches a time seek we received previously
+ * FIXME for backwards compatibility reasons we use the
+ * seek_offset here to compare. In the future we might want to
+ * change this to use the seqnum as it uniquely should identify
+ * the segment that corresponds to the seek. */
+ GST_DEBUG_OBJECT (demux, "Stored seek offset: %" G_GINT64_FORMAT
+ ", received segment offset %" G_GINT64_FORMAT,
+ demux->seek_offset, segment.start);
+ if (segment.format == GST_FORMAT_BYTES
+ && demux->seek_offset == segment.start) {
+ GST_OBJECT_LOCK (demux);
+ offset = segment.start;
+
+ segment.format = GST_FORMAT_TIME;
+ segment.start = demux->push_seek_start;
+ segment.stop = demux->push_seek_stop;
+ GST_DEBUG_OBJECT (demux, "Replaced segment with stored seek "
+ "segment %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (segment.start), GST_TIME_ARGS (segment.stop));
+ GST_OBJECT_UNLOCK (demux);
+ }
+
+ /* we only expect a BYTE segment, e.g. following a seek */
+ if (segment.format == GST_FORMAT_BYTES) {
+ if (GST_CLOCK_TIME_IS_VALID (segment.start)) {
+ offset = segment.start;
+
+ gst_qtdemux_find_sample (demux, segment.start, TRUE, FALSE, NULL,
+ NULL, (gint64 *) & segment.start);
+ if ((gint64) segment.start < 0)
+ segment.start = 0;
+ }
+ if (GST_CLOCK_TIME_IS_VALID (segment.stop)) {
+ gst_qtdemux_find_sample (demux, segment.stop, FALSE, FALSE, NULL,
+ NULL, (gint64 *) & segment.stop);
+ /* keyframe seeking should already arrange for start >= stop,
+ * but make sure in other rare cases */
+ segment.stop = MAX (segment.stop, segment.start);
+ }
+ } else if (segment.format == GST_FORMAT_TIME) {
+ /* push all data on the adapter before starting this
+ * new segment */
+ gst_qtdemux_process_adapter (demux, TRUE);
+ } else {
+ GST_DEBUG_OBJECT (demux, "unsupported segment format, ignoring");
+ goto exit;
+ }
+
+ /* We shouldn't modify upstream driven TIME FORMAT segment */
+ if (!demux->upstream_format_is_time) {
+ /* accept upstream's notion of segment and distribute along */
+ segment.format = GST_FORMAT_TIME;
+ segment.position = segment.time = segment.start;
+ segment.duration = demux->segment.duration;
+ segment.base = gst_segment_to_running_time (&demux->segment,
+ GST_FORMAT_TIME, demux->segment.position);
+ }
+
+ gst_segment_copy_into (&segment, &demux->segment);
+ GST_DEBUG_OBJECT (demux, "Pushing newseg %" GST_SEGMENT_FORMAT, &segment);
+
+ /* map segment to internal qt segments and push on each stream */
+ if (QTDEMUX_N_STREAMS (demux)) {
+ demux->need_segment = TRUE;
+ gst_qtdemux_check_send_pending_segment (demux);
+ }
+
+ /* clear leftover in current segment, if any */
+ gst_adapter_clear (demux->adapter);
+
+ /* set up streaming thread */
+ demux->offset = offset;
+ if (demux->upstream_format_is_time) {
+ GST_DEBUG_OBJECT (demux, "Upstream is driving in time format, "
+ "set values to restart reading from a new atom");
+ demux->neededbytes = 16;
+ demux->todrop = 0;
+ } else {
+ gst_qtdemux_find_sample (demux, offset, TRUE, TRUE, &stream, &idx,
+ NULL);
+ if (stream) {
+ demux->todrop = stream->samples[idx].offset - offset;
+ demux->neededbytes = demux->todrop + stream->samples[idx].size;
+ } else {
+ /* set up for EOS */
+ demux->neededbytes = -1;
+ demux->todrop = 0;
+ }
+ }
+ exit:
+ gst_event_unref (event);
+ res = TRUE;
+ goto drop;
+ }
+ case GST_EVENT_FLUSH_START:
+ {
+ if (gst_event_get_seqnum (event) == demux->offset_seek_seqnum) {
+ gst_event_unref (event);
+ goto drop;
+ }
+ QTDEMUX_EXPOSE_LOCK (demux);
+ res = gst_pad_event_default (demux->sinkpad, parent, event);
+ QTDEMUX_EXPOSE_UNLOCK (demux);
+ goto drop;
+ }
+ case GST_EVENT_FLUSH_STOP:
+ {
+ guint64 dur;
+
+ dur = demux->segment.duration;
+ gst_qtdemux_reset (demux, FALSE);
+ demux->segment.duration = dur;
+
+ if (gst_event_get_seqnum (event) == demux->offset_seek_seqnum) {
+ gst_event_unref (event);
+ goto drop;
+ }
+ break;
+ }
+ case GST_EVENT_EOS:
+ /* If we are in push mode, and get an EOS before we've seen any streams,
+ * then error out - we have nowhere to send the EOS */
+ if (!demux->pullbased) {
+ gint i;
+ gboolean has_valid_stream = FALSE;
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ if (QTDEMUX_NTH_STREAM (demux, i)->pad != NULL) {
+ has_valid_stream = TRUE;
+ break;
+ }
+ }
+ if (!has_valid_stream)
+ gst_qtdemux_post_no_playable_stream_error (demux);
+ else {
+ GST_DEBUG_OBJECT (demux, "Data still available after EOS: %u",
+ (guint) gst_adapter_available (demux->adapter));
+ if (gst_qtdemux_process_adapter (demux, TRUE) != GST_FLOW_OK) {
+ res = FALSE;
+ }
+ }
+ }
+ break;
+ case GST_EVENT_CAPS:{
+ GstCaps *caps = NULL;
+
+ gst_event_parse_caps (event, &caps);
+ gst_qtdemux_setcaps (demux, caps);
+ res = TRUE;
+ gst_event_unref (event);
+ goto drop;
+ }
+ case GST_EVENT_PROTECTION:
+ {
+ const gchar *system_id = NULL;
+
+ gst_event_parse_protection (event, &system_id, NULL, NULL);
+ GST_DEBUG_OBJECT (demux, "Received protection event for system ID %s",
+ system_id);
+ gst_qtdemux_append_protection_system_id (demux, system_id);
+ /* save the event for later, for source pads that have not been created */
+ g_queue_push_tail (&demux->protection_event_queue, gst_event_ref (event));
+ /* send it to all pads that already exist */
+ gst_qtdemux_push_event (demux, event);
+ res = TRUE;
+ goto drop;
+ }
+ case GST_EVENT_STREAM_START:
+ {
+ res = TRUE;
+ gst_event_unref (event);
+
+ /* Drain all the buffers */
+ gst_qtdemux_process_adapter (demux, TRUE);
+ gst_qtdemux_reset (demux, FALSE);
+ /* We expect new moov box after new stream-start event */
+ if (demux->exposed) {
+ gst_qtdemux_stream_concat (demux,
+ demux->old_streams, demux->active_streams);
+ }
+
+ goto drop;
+ }
+ default:
+ break;
+ }
+
+ res = gst_pad_event_default (demux->sinkpad, parent, event) & res;
+
+drop:
+ return res;
+}
+
+static gboolean
+gst_qtdemux_handle_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ GstQTDemux *demux = GST_QTDEMUX (parent);
+ gboolean res = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_BITRATE:
+ {
+ GstClockTime duration;
+
+ /* populate demux->upstream_size if not done yet */
+ gst_qtdemux_check_seekability (demux);
+
+ if (demux->upstream_size != -1
+ && gst_qtdemux_get_duration (demux, &duration)) {
+ guint bitrate =
+ gst_util_uint64_scale (8 * demux->upstream_size, GST_SECOND,
+ duration);
+
+ GST_LOG_OBJECT (demux, "bitrate query byte length: %" G_GUINT64_FORMAT
+ " duration %" GST_TIME_FORMAT " resulting a bitrate of %u",
+ demux->upstream_size, GST_TIME_ARGS (duration), bitrate);
+
+ /* TODO: better results based on ranges/index tables */
+ gst_query_set_bitrate (query, bitrate);
+ res = TRUE;
+ }
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, (GstObject *) demux, query);
+ break;
+ }
+
+ return res;
+}
+
+
+#if 0
+static void
+gst_qtdemux_set_index (GstElement * element, GstIndex * index)
+{
+ GstQTDemux *demux = GST_QTDEMUX (element);
+
+ GST_OBJECT_LOCK (demux);
+ if (demux->element_index)
+ gst_object_unref (demux->element_index);
+ if (index) {
+ demux->element_index = gst_object_ref (index);
+ } else {
+ demux->element_index = NULL;
+ }
+ GST_OBJECT_UNLOCK (demux);
+ /* object lock might be taken again */
+ if (index)
+ gst_index_get_writer_id (index, GST_OBJECT (element), &demux->index_id);
+ GST_DEBUG_OBJECT (demux, "Set index %" GST_PTR_FORMAT "for writer id %d",
+ demux->element_index, demux->index_id);
+}
+
+static GstIndex *
+gst_qtdemux_get_index (GstElement * element)
+{
+ GstIndex *result = NULL;
+ GstQTDemux *demux = GST_QTDEMUX (element);
+
+ GST_OBJECT_LOCK (demux);
+ if (demux->element_index)
+ result = gst_object_ref (demux->element_index);
+ GST_OBJECT_UNLOCK (demux);
+
+ GST_DEBUG_OBJECT (demux, "Returning index %" GST_PTR_FORMAT, result);
+
+ return result;
+}
+#endif
+
+static void
+gst_qtdemux_stbl_free (QtDemuxStream * stream)
+{
+ g_free ((gpointer) stream->stco.data);
+ stream->stco.data = NULL;
+ g_free ((gpointer) stream->stsz.data);
+ stream->stsz.data = NULL;
+ g_free ((gpointer) stream->stsc.data);
+ stream->stsc.data = NULL;
+ g_free ((gpointer) stream->stts.data);
+ stream->stts.data = NULL;
+ g_free ((gpointer) stream->stss.data);
+ stream->stss.data = NULL;
+ g_free ((gpointer) stream->stps.data);
+ stream->stps.data = NULL;
+ g_free ((gpointer) stream->ctts.data);
+ stream->ctts.data = NULL;
+}
+
+static void
+gst_qtdemux_stream_flush_segments_data (QtDemuxStream * stream)
+{
+ g_free (stream->segments);
+ stream->segments = NULL;
+ stream->segment_index = -1;
+ stream->accumulated_base = 0;
+}
+
+static void
+gst_qtdemux_stream_flush_samples_data (QtDemuxStream * stream)
+{
+ g_free (stream->samples);
+ stream->samples = NULL;
+ gst_qtdemux_stbl_free (stream);
+
+ /* fragments */
+ g_free (stream->ra_entries);
+ stream->ra_entries = NULL;
+ stream->n_ra_entries = 0;
+
+ stream->sample_index = -1;
+ stream->stbl_index = -1;
+ stream->n_samples = 0;
+ stream->time_position = 0;
+
+ stream->n_samples_moof = 0;
+ stream->duration_moof = 0;
+ stream->duration_last_moof = 0;
+}
+
+static void
+gst_qtdemux_stream_clear (QtDemuxStream * stream)
+{
+ gint i;
+ if (stream->allocator)
+ gst_object_unref (stream->allocator);
+ while (stream->buffers) {
+ gst_buffer_unref (GST_BUFFER_CAST (stream->buffers->data));
+ stream->buffers = g_slist_delete_link (stream->buffers, stream->buffers);
+ }
+ for (i = 0; i < stream->stsd_entries_length; i++) {
+ QtDemuxStreamStsdEntry *entry = &stream->stsd_entries[i];
+ if (entry->rgb8_palette) {
+ gst_memory_unref (entry->rgb8_palette);
+ entry->rgb8_palette = NULL;
+ }
+ entry->sparse = FALSE;
+ }
+
+ if (stream->stream_tags)
+ gst_tag_list_unref (stream->stream_tags);
+
+ stream->stream_tags = gst_tag_list_new_empty ();
+ gst_tag_list_set_scope (stream->stream_tags, GST_TAG_SCOPE_STREAM);
+ g_free (stream->redirect_uri);
+ stream->redirect_uri = NULL;
+ stream->sent_eos = FALSE;
+ stream->protected = FALSE;
+ if (stream->protection_scheme_info) {
+ if (stream->protection_scheme_type == FOURCC_cenc
+ || stream->protection_scheme_type == FOURCC_cbcs) {
+ QtDemuxCencSampleSetInfo *info =
+ (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
+ if (info->default_properties)
+ gst_structure_free (info->default_properties);
+ if (info->crypto_info)
+ g_ptr_array_free (info->crypto_info, TRUE);
+ }
+ if (stream->protection_scheme_type == FOURCC_aavd) {
+ QtDemuxAavdEncryptionInfo *info =
+ (QtDemuxAavdEncryptionInfo *) stream->protection_scheme_info;
+ if (info->default_properties)
+ gst_structure_free (info->default_properties);
+ }
+ g_free (stream->protection_scheme_info);
+ stream->protection_scheme_info = NULL;
+ }
+ stream->protection_scheme_type = 0;
+ stream->protection_scheme_version = 0;
+ g_queue_foreach (&stream->protection_scheme_event_queue,
+ (GFunc) gst_event_unref, NULL);
+ g_queue_clear (&stream->protection_scheme_event_queue);
+ gst_qtdemux_stream_flush_segments_data (stream);
+ gst_qtdemux_stream_flush_samples_data (stream);
+}
+
+static void
+gst_qtdemux_stream_reset (QtDemuxStream * stream)
+{
+ gint i;
+ gst_qtdemux_stream_clear (stream);
+ for (i = 0; i < stream->stsd_entries_length; i++) {
+ QtDemuxStreamStsdEntry *entry = &stream->stsd_entries[i];
+ if (entry->caps) {
+ gst_caps_unref (entry->caps);
+ entry->caps = NULL;
+ }
+ }
+ g_free (stream->stsd_entries);
+ stream->stsd_entries = NULL;
+ stream->stsd_entries_length = 0;
+}
+
+static QtDemuxStream *
+gst_qtdemux_stream_ref (QtDemuxStream * stream)
+{
+ g_atomic_int_add (&stream->ref_count, 1);
+
+ return stream;
+}
+
+static void
+gst_qtdemux_stream_unref (QtDemuxStream * stream)
+{
+ if (g_atomic_int_dec_and_test (&stream->ref_count)) {
+ gst_qtdemux_stream_reset (stream);
+ gst_tag_list_unref (stream->stream_tags);
+ if (stream->pad) {
+ GstQTDemux *demux = stream->demux;
+ gst_element_remove_pad (GST_ELEMENT_CAST (demux), stream->pad);
+ GST_OBJECT_LOCK (demux);
+ gst_flow_combiner_remove_pad (demux->flowcombiner, stream->pad);
+ GST_OBJECT_UNLOCK (demux);
+ }
+ g_free (stream->stream_id);
+ g_free (stream);
+ }
+}
+
+static GstStateChangeReturn
+gst_qtdemux_change_state (GstElement * element, GstStateChange transition)
+{
+ GstQTDemux *qtdemux = GST_QTDEMUX (element);
+ GstStateChangeReturn result = GST_STATE_CHANGE_FAILURE;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_qtdemux_reset (qtdemux, TRUE);
+ break;
+ default:
+ break;
+ }
+
+ result = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:{
+ gst_qtdemux_reset (qtdemux, TRUE);
+ break;
+ }
+ default:
+ break;
+ }
+
+ return result;
+}
+
+static void
+gst_qtdemux_set_context (GstElement * element, GstContext * context)
+{
+ GstQTDemux *qtdemux = GST_QTDEMUX (element);
+
+ g_return_if_fail (GST_IS_CONTEXT (context));
+
+ if (gst_context_has_context_type (context,
+ "drm-preferred-decryption-system-id")) {
+ const GstStructure *s;
+
+ s = gst_context_get_structure (context);
+ g_free (qtdemux->preferred_protection_system_id);
+ qtdemux->preferred_protection_system_id =
+ g_strdup (gst_structure_get_string (s, "decryption-system-id"));
+ GST_DEBUG_OBJECT (element, "set preferred decryption system to %s",
+ qtdemux->preferred_protection_system_id);
+ }
+
+ GST_ELEMENT_CLASS (parent_class)->set_context (element, context);
+}
+
+static void
+qtdemux_parse_ftyp (GstQTDemux * qtdemux, const guint8 * buffer, gint length)
+{
+ /* counts as header data */
+ qtdemux->header_size += length;
+
+ /* only consider at least a sufficiently complete ftyp atom */
+ if (length >= 20) {
+ GstBuffer *buf;
+
+ qtdemux->major_brand = QT_FOURCC (buffer + 8);
+ GST_DEBUG_OBJECT (qtdemux, "major brand: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (qtdemux->major_brand));
+ if (qtdemux->comp_brands)
+ gst_buffer_unref (qtdemux->comp_brands);
+ buf = qtdemux->comp_brands = gst_buffer_new_and_alloc (length - 16);
+ gst_buffer_fill (buf, 0, buffer + 16, length - 16);
+ }
+}
+
+static void
+qtdemux_update_default_sample_cenc_settings (GstQTDemux * qtdemux,
+ QtDemuxCencSampleSetInfo * info, guint32 is_encrypted,
+ guint32 protection_scheme_type, guint8 iv_size, const guint8 * kid,
+ guint crypt_byte_block, guint skip_byte_block, guint8 constant_iv_size,
+ const guint8 * constant_iv)
+{
+ GstBuffer *kid_buf = gst_buffer_new_allocate (NULL, 16, NULL);
+ gst_buffer_fill (kid_buf, 0, kid, 16);
+ if (info->default_properties)
+ gst_structure_free (info->default_properties);
+ info->default_properties =
+ gst_structure_new ("application/x-cenc",
+ "iv_size", G_TYPE_UINT, iv_size,
+ "encrypted", G_TYPE_BOOLEAN, (is_encrypted == 1),
+ "kid", GST_TYPE_BUFFER, kid_buf, NULL);
+ GST_DEBUG_OBJECT (qtdemux, "default sample properties: "
+ "is_encrypted=%u, iv_size=%u", is_encrypted, iv_size);
+ gst_buffer_unref (kid_buf);
+ if (protection_scheme_type == FOURCC_cbcs) {
+ if (crypt_byte_block != 0 || skip_byte_block != 0) {
+ gst_structure_set (info->default_properties, "crypt_byte_block",
+ G_TYPE_UINT, crypt_byte_block, "skip_byte_block", G_TYPE_UINT,
+ skip_byte_block, NULL);
+ }
+ if (constant_iv != NULL) {
+ GstBuffer *constant_iv_buf =
+ gst_buffer_new_allocate (NULL, constant_iv_size, NULL);
+ gst_buffer_fill (constant_iv_buf, 0, constant_iv, constant_iv_size);
+ gst_structure_set (info->default_properties, "constant_iv_size",
+ G_TYPE_UINT, constant_iv_size, "iv", GST_TYPE_BUFFER, constant_iv_buf,
+ NULL);
+ gst_buffer_unref (constant_iv_buf);
+ }
+ gst_structure_set (info->default_properties, "cipher-mode",
+ G_TYPE_STRING, "cbcs", NULL);
+ } else {
+ gst_structure_set (info->default_properties, "cipher-mode",
+ G_TYPE_STRING, "cenc", NULL);
+ }
+}
+
+static gboolean
+qtdemux_update_default_piff_encryption_settings (GstQTDemux * qtdemux,
+ QtDemuxCencSampleSetInfo * info, GstByteReader * br)
+{
+ guint32 algorithm_id = 0;
+ const guint8 *kid;
+ gboolean is_encrypted = TRUE;
+ guint8 iv_size = 8;
+
+ if (!gst_byte_reader_get_uint24_le (br, &algorithm_id)) {
+ GST_ERROR_OBJECT (qtdemux, "Error getting box's algorithm ID field");
+ return FALSE;
+ }
+
+ algorithm_id >>= 8;
+ if (algorithm_id == 0) {
+ is_encrypted = FALSE;
+ } else if (algorithm_id == 1) {
+ GST_DEBUG_OBJECT (qtdemux, "AES 128-bits CTR encrypted stream");
+ } else if (algorithm_id == 2) {
+ GST_DEBUG_OBJECT (qtdemux, "AES 128-bits CBC encrypted stream");
+ }
+
+ if (!gst_byte_reader_get_uint8 (br, &iv_size))
+ return FALSE;
+
+ if (!gst_byte_reader_get_data (br, 16, &kid))
+ return FALSE;
+
+ qtdemux_update_default_sample_cenc_settings (qtdemux, info,
+ is_encrypted, FOURCC_cenc, iv_size, kid, 0, 0, 0, NULL);
+ gst_structure_set (info->default_properties, "piff_algorithm_id",
+ G_TYPE_UINT, algorithm_id, NULL);
+ return TRUE;
+}
+
+
+static void
+qtdemux_parse_piff (GstQTDemux * qtdemux, const guint8 * buffer, gint length,
+ guint offset)
+{
+ GstByteReader br;
+ guint8 version;
+ guint32 flags = 0;
+ guint i;
+ guint iv_size = 8;
+ QtDemuxStream *stream;
+ GstStructure *structure;
+ QtDemuxCencSampleSetInfo *ss_info = NULL;
+ const gchar *system_id;
+ gboolean uses_sub_sample_encryption = FALSE;
+ guint32 sample_count;
+
+ if (QTDEMUX_N_STREAMS (qtdemux) == 0)
+ return;
+
+ stream = QTDEMUX_NTH_STREAM (qtdemux, 0);
+
+ structure = gst_caps_get_structure (CUR_STREAM (stream)->caps, 0);
+ if (!gst_structure_has_name (structure, "application/x-cenc")) {
+ GST_WARNING_OBJECT (qtdemux,
+ "Attempting PIFF box parsing on an unencrypted stream.");
+ return;
+ }
+
+ gst_structure_get (structure, GST_PROTECTION_SYSTEM_ID_CAPS_FIELD,
+ G_TYPE_STRING, &system_id, NULL);
+ gst_qtdemux_append_protection_system_id (qtdemux, system_id);
+
+ stream->protected = TRUE;
+ stream->protection_scheme_type = FOURCC_cenc;
+
+ if (!stream->protection_scheme_info)
+ stream->protection_scheme_info = g_new0 (QtDemuxCencSampleSetInfo, 1);
+
+ ss_info = (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
+ if (!ss_info->default_properties) {
+ ss_info->default_properties =
+ gst_structure_new ("application/x-cenc",
+ "iv_size", G_TYPE_UINT, iv_size, "encrypted", G_TYPE_BOOLEAN, TRUE,
+ NULL);
+
+ }
+
+ if (ss_info->crypto_info) {
+ GST_LOG_OBJECT (qtdemux, "unreffing existing crypto_info");
+ g_ptr_array_free (ss_info->crypto_info, TRUE);
+ ss_info->crypto_info = NULL;
+ }
+
+ /* skip UUID */
+ gst_byte_reader_init (&br, buffer + offset + 16, length - offset - 16);
+
+ if (!gst_byte_reader_get_uint8 (&br, &version)) {
+ GST_ERROR_OBJECT (qtdemux, "Error getting box's version field");
+ return;
+ }
+
+ if (!gst_byte_reader_get_uint24_be (&br, &flags)) {
+ GST_ERROR_OBJECT (qtdemux, "Error getting box's flags field");
+ return;
+ }
+
+ if ((flags & 0x000001)) {
+ if (!qtdemux_update_default_piff_encryption_settings (qtdemux, ss_info,
+ &br))
+ return;
+ } else if ((flags & 0x000002)) {
+ uses_sub_sample_encryption = TRUE;
+ }
+
+ if (!gst_structure_get_uint (ss_info->default_properties, "iv_size",
+ &iv_size)) {
+ GST_ERROR_OBJECT (qtdemux, "Error getting encryption IV size field");
+ return;
+ }
+
+ if (!gst_byte_reader_get_uint32_be (&br, &sample_count)) {
+ GST_ERROR_OBJECT (qtdemux, "Error getting box's sample count field");
+ return;
+ }
+
+ ss_info->crypto_info =
+ g_ptr_array_new_full (sample_count,
+ (GDestroyNotify) qtdemux_gst_structure_free);
+
+ for (i = 0; i < sample_count; ++i) {
+ GstStructure *properties;
+ guint8 *data;
+ GstBuffer *buf;
+
+ properties = qtdemux_get_cenc_sample_properties (qtdemux, stream, i);
+ if (properties == NULL) {
+ GST_ERROR_OBJECT (qtdemux, "failed to get properties for sample %u", i);
+ qtdemux->cenc_aux_sample_count = i;
+ return;
+ }
+
+ if (!gst_byte_reader_dup_data (&br, iv_size, &data)) {
+ GST_ERROR_OBJECT (qtdemux, "IV data not present for sample %u", i);
+ gst_structure_free (properties);
+ qtdemux->cenc_aux_sample_count = i;
+ return;
+ }
+ buf = gst_buffer_new_wrapped (data, iv_size);
+ gst_structure_set (properties, "iv", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+
+ if (uses_sub_sample_encryption) {
+ guint16 n_subsamples;
+ const GValue *kid_buf_value;
+
+ if (!gst_byte_reader_get_uint16_be (&br, &n_subsamples)
+ || n_subsamples == 0) {
+ GST_ERROR_OBJECT (qtdemux,
+ "failed to get subsample count for sample %u", i);
+ gst_structure_free (properties);
+ qtdemux->cenc_aux_sample_count = i;
+ return;
+ }
+ GST_LOG_OBJECT (qtdemux, "subsample count: %u", n_subsamples);
+ if (!gst_byte_reader_dup_data (&br, n_subsamples * 6, &data)) {
+ GST_ERROR_OBJECT (qtdemux, "failed to get subsample data for sample %u",
+ i);
+ gst_structure_free (properties);
+ qtdemux->cenc_aux_sample_count = i;
+ return;
+ }
+ buf = gst_buffer_new_wrapped (data, n_subsamples * 6);
+
+ kid_buf_value =
+ gst_structure_get_value (ss_info->default_properties, "kid");
+
+ gst_structure_set (properties,
+ "subsample_count", G_TYPE_UINT, n_subsamples,
+ "subsamples", GST_TYPE_BUFFER, buf, NULL);
+ gst_structure_set_value (properties, "kid", kid_buf_value);
+ gst_buffer_unref (buf);
+ } else {
+ gst_structure_set (properties, "subsample_count", G_TYPE_UINT, 0, NULL);
+ }
+
+ g_ptr_array_add (ss_info->crypto_info, properties);
+ }
+
+ qtdemux->cenc_aux_sample_count = sample_count;
+}
+
+static void
+qtdemux_parse_uuid (GstQTDemux * qtdemux, const guint8 * buffer, gint length)
+{
+ static const guint8 xmp_uuid[] = { 0xBE, 0x7A, 0xCF, 0xCB,
+ 0x97, 0xA9, 0x42, 0xE8,
+ 0x9C, 0x71, 0x99, 0x94,
+ 0x91, 0xE3, 0xAF, 0xAC
+ };
+ static const guint8 playready_uuid[] = {
+ 0xd0, 0x8a, 0x4f, 0x18, 0x10, 0xf3, 0x4a, 0x82,
+ 0xb6, 0xc8, 0x32, 0xd8, 0xab, 0xa1, 0x83, 0xd3
+ };
+
+ static const guint8 piff_sample_encryption_uuid[] = {
+ 0xa2, 0x39, 0x4f, 0x52, 0x5a, 0x9b, 0x4f, 0x14,
+ 0xa2, 0x44, 0x6c, 0x42, 0x7c, 0x64, 0x8d, 0xf4
+ };
+
+ guint offset;
+
+ /* counts as header data */
+ qtdemux->header_size += length;
+
+ offset = (QT_UINT32 (buffer) == 0) ? 16 : 8;
+
+ if (length <= offset + 16) {
+ GST_DEBUG_OBJECT (qtdemux, "uuid atom is too short, skipping");
+ return;
+ }
+
+ if (memcmp (buffer + offset, xmp_uuid, 16) == 0) {
+ GstBuffer *buf;
+ GstTagList *taglist;
+
+ buf = _gst_buffer_new_wrapped ((guint8 *) buffer + offset + 16,
+ length - offset - 16, NULL);
+ taglist = gst_tag_list_from_xmp_buffer (buf);
+ gst_buffer_unref (buf);
+
+ /* make sure we have a usable taglist */
+ qtdemux->tag_list = gst_tag_list_make_writable (qtdemux->tag_list);
+
+ qtdemux_handle_xmp_taglist (qtdemux, qtdemux->tag_list, taglist);
+
+ } else if (memcmp (buffer + offset, playready_uuid, 16) == 0) {
+ int len;
+ const gunichar2 *s_utf16;
+ char *contents;
+
+ len = GST_READ_UINT16_LE (buffer + offset + 0x30);
+ s_utf16 = (const gunichar2 *) (buffer + offset + 0x32);
+ contents = g_utf16_to_utf8 (s_utf16, len / 2, NULL, NULL, NULL);
+ GST_ERROR_OBJECT (qtdemux, "contents: %s", contents);
+
+ g_free (contents);
+
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DECRYPT,
+ (_("Cannot play stream because it is encrypted with PlayReady DRM.")),
+ (NULL));
+ } else if (memcmp (buffer + offset, piff_sample_encryption_uuid, 16) == 0) {
+ qtdemux_parse_piff (qtdemux, buffer, length, offset);
+ } else {
+ GST_DEBUG_OBJECT (qtdemux, "Ignoring unknown uuid: %08x-%08x-%08x-%08x",
+ GST_READ_UINT32_LE (buffer + offset),
+ GST_READ_UINT32_LE (buffer + offset + 4),
+ GST_READ_UINT32_LE (buffer + offset + 8),
+ GST_READ_UINT32_LE (buffer + offset + 12));
+ }
+}
+
+static void
+qtdemux_parse_sidx (GstQTDemux * qtdemux, const guint8 * buffer, gint length)
+{
+ GstSidxParser sidx_parser;
+ GstIsoffParserResult res;
+ guint consumed;
+
+ gst_isoff_qt_sidx_parser_init (&sidx_parser);
+
+ res =
+ gst_isoff_qt_sidx_parser_add_data (&sidx_parser, buffer, length,
+ &consumed);
+ GST_DEBUG_OBJECT (qtdemux, "sidx parse result: %d", res);
+ if (res == GST_ISOFF_QT_PARSER_DONE) {
+ check_update_duration (qtdemux, sidx_parser.cumulative_pts);
+ }
+ gst_isoff_qt_sidx_parser_clear (&sidx_parser);
+}
+
+/* caller verifies at least 8 bytes in buf */
+static void
+extract_initial_length_and_fourcc (const guint8 * data, guint size,
+ guint64 * plength, guint32 * pfourcc)
+{
+ guint64 length;
+ guint32 fourcc;
+
+ length = QT_UINT32 (data);
+ GST_DEBUG ("length 0x%08" G_GINT64_MODIFIER "x", length);
+ fourcc = QT_FOURCC (data + 4);
+ GST_DEBUG ("atom type %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (fourcc));
+
+ if (length == 0) {
+ length = G_MAXUINT64;
+ } else if (length == 1 && size >= 16) {
+ /* this means we have an extended size, which is the 64 bit value of
+ * the next 8 bytes */
+ length = QT_UINT64 (data + 8);
+ GST_DEBUG ("length 0x%08" G_GINT64_MODIFIER "x", length);
+ }
+
+ if (plength)
+ *plength = length;
+ if (pfourcc)
+ *pfourcc = fourcc;
+}
+
+static gboolean
+qtdemux_parse_mehd (GstQTDemux * qtdemux, GstByteReader * br)
+{
+ guint32 version = 0;
+ GstClockTime duration = 0;
+
+ if (!gst_byte_reader_get_uint32_be (br, &version))
+ goto failed;
+
+ version >>= 24;
+ if (version == 1) {
+ if (!gst_byte_reader_get_uint64_be (br, &duration))
+ goto failed;
+ } else {
+ guint32 dur = 0;
+
+ if (!gst_byte_reader_get_uint32_be (br, &dur))
+ goto failed;
+ duration = dur;
+ }
+
+ GST_INFO_OBJECT (qtdemux, "mehd duration: %" G_GUINT64_FORMAT, duration);
+ qtdemux->duration = duration;
+
+ return TRUE;
+
+failed:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "parsing mehd failed");
+ return FALSE;
+ }
+}
+
+static gboolean
+qtdemux_parse_trex (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ guint32 * ds_duration, guint32 * ds_size, guint32 * ds_flags)
+{
+ if (!stream->parsed_trex && qtdemux->moov_node) {
+ GNode *mvex, *trex;
+ GstByteReader trex_data;
+
+ mvex = qtdemux_tree_get_child_by_type (qtdemux->moov_node, FOURCC_mvex);
+ if (mvex) {
+ trex = qtdemux_tree_get_child_by_type_full (mvex, FOURCC_trex,
+ &trex_data);
+ while (trex) {
+ guint32 id = 0, sdi = 0, dur = 0, size = 0, flags = 0;
+
+ /* skip version/flags */
+ if (!gst_byte_reader_skip (&trex_data, 4))
+ goto next;
+ if (!gst_byte_reader_get_uint32_be (&trex_data, &id))
+ goto next;
+ if (id != stream->track_id)
+ goto next;
+ if (!gst_byte_reader_get_uint32_be (&trex_data, &sdi))
+ goto next;
+ if (!gst_byte_reader_get_uint32_be (&trex_data, &dur))
+ goto next;
+ if (!gst_byte_reader_get_uint32_be (&trex_data, &size))
+ goto next;
+ if (!gst_byte_reader_get_uint32_be (&trex_data, &flags))
+ goto next;
+
+ GST_DEBUG_OBJECT (qtdemux, "fragment defaults for stream %d; "
+ "duration %d, size %d, flags 0x%x", stream->track_id,
+ dur, size, flags);
+
+ stream->parsed_trex = TRUE;
+ stream->def_sample_description_index = sdi;
+ stream->def_sample_duration = dur;
+ stream->def_sample_size = size;
+ stream->def_sample_flags = flags;
+
+ next:
+ /* iterate all siblings */
+ trex = qtdemux_tree_get_sibling_by_type_full (trex, FOURCC_trex,
+ &trex_data);
+ }
+ }
+ }
+
+ *ds_duration = stream->def_sample_duration;
+ *ds_size = stream->def_sample_size;
+ *ds_flags = stream->def_sample_flags;
+
+ /* even then, above values are better than random ... */
+ if (G_UNLIKELY (!stream->parsed_trex)) {
+ GST_WARNING_OBJECT (qtdemux,
+ "failed to find fragment defaults for stream %d", stream->track_id);
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+/* This method should be called whenever a more accurate duration might
+ * have been found. It will update all relevant variables if/where needed
+ */
+static void
+check_update_duration (GstQTDemux * qtdemux, GstClockTime duration)
+{
+ guint i;
+ guint64 movdur;
+ GstClockTime prevdur;
+
+ movdur = GSTTIME_TO_QTTIME (qtdemux, duration);
+
+ if (movdur > qtdemux->duration) {
+ prevdur = QTTIME_TO_GSTTIME (qtdemux, qtdemux->duration);
+ GST_DEBUG_OBJECT (qtdemux,
+ "Updating total duration to %" GST_TIME_FORMAT " was %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (duration), GST_TIME_ARGS (prevdur));
+ qtdemux->duration = movdur;
+ GST_DEBUG_OBJECT (qtdemux,
+ "qtdemux->segment.duration: %" GST_TIME_FORMAT " .stop: %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (qtdemux->segment.duration),
+ GST_TIME_ARGS (qtdemux->segment.stop));
+ if (qtdemux->segment.duration == prevdur) {
+ /* If the current segment has duration/stop identical to previous duration
+ * update them also (because they were set at that point in time with
+ * the wrong duration */
+ /* We convert the value *from* the timescale version to avoid rounding errors */
+ GstClockTime fixeddur = QTTIME_TO_GSTTIME (qtdemux, movdur);
+ GST_DEBUG_OBJECT (qtdemux, "Updated segment.duration and segment.stop");
+ qtdemux->segment.duration = fixeddur;
+ qtdemux->segment.stop = fixeddur;
+ }
+ }
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ movdur = GSTTIME_TO_QTSTREAMTIME (stream, duration);
+ if (movdur > stream->duration) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "Updating stream #%d duration to %" GST_TIME_FORMAT, i,
+ GST_TIME_ARGS (duration));
+ stream->duration = movdur;
+ /* internal duration tracking state has been updated above, so */
+ /* preserve an open-ended dummy segment rather than repeatedly updating
+ * it and spamming downstream accordingly with segment events */
+ /* also mangle the edit list end time when fragmented with a single edit
+ * list that may only cover any non-fragmented data */
+ if ((stream->dummy_segment ||
+ (qtdemux->fragmented && stream->n_segments == 1)) &&
+ GST_CLOCK_TIME_IS_VALID (stream->segments[0].duration)) {
+ /* Update all dummy values to new duration */
+ stream->segments[0].stop_time = duration;
+ stream->segments[0].duration = duration;
+ stream->segments[0].media_stop = duration;
+
+ /* let downstream know we possibly have a new stop time */
+ if (stream->segment_index != -1) {
+ GstClockTime pos;
+
+ if (qtdemux->segment.rate >= 0) {
+ pos = stream->segment.start;
+ } else {
+ pos = stream->segment.stop;
+ }
+
+ gst_qtdemux_stream_update_segment (qtdemux, stream,
+ stream->segment_index, pos, NULL, NULL);
+ }
+ }
+ }
+ }
+}
+
+static gboolean
+qtdemux_parse_trun (GstQTDemux * qtdemux, GstByteReader * trun,
+ QtDemuxStream * stream, guint32 d_sample_duration, guint32 d_sample_size,
+ guint32 d_sample_flags, gint64 moof_offset, gint64 moof_length,
+ gint64 * base_offset, gint64 * running_offset, gint64 decode_ts,
+ gboolean has_tfdt)
+{
+ GstClockTime gst_ts = GST_CLOCK_TIME_NONE;
+ guint64 timestamp;
+ gint32 data_offset = 0;
+ guint32 flags = 0, first_flags = 0, samples_count = 0;
+ gint i;
+ guint8 *data;
+ guint entry_size, dur_offset, size_offset, flags_offset = 0, ct_offset = 0;
+ QtDemuxSample *sample;
+ gboolean ismv = FALSE;
+ gint64 initial_offset;
+
+ GST_LOG_OBJECT (qtdemux, "parsing trun track-id %d; "
+ "default dur %d, size %d, flags 0x%x, base offset %" G_GINT64_FORMAT ", "
+ "decode ts %" G_GINT64_FORMAT, stream->track_id, d_sample_duration,
+ d_sample_size, d_sample_flags, *base_offset, decode_ts);
+
+ if (stream->pending_seek && moof_offset < stream->pending_seek->moof_offset) {
+ GST_INFO_OBJECT (stream->pad, "skipping trun before seek target fragment");
+ return TRUE;
+ }
+
+ /* presence of stss or not can't really tell us much,
+ * and flags and so on tend to be marginally reliable in these files */
+ if (stream->subtype == FOURCC_soun) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "sound track in fragmented file; marking all keyframes");
+ stream->all_keyframe = TRUE;
+ }
+
+ if (!gst_byte_reader_skip (trun, 1) ||
+ !gst_byte_reader_get_uint24_be (trun, &flags))
+ goto fail;
+
+ if (!gst_byte_reader_get_uint32_be (trun, &samples_count))
+ goto fail;
+
+ if (flags & TR_DATA_OFFSET) {
+ /* note this is really signed */
+ if (!gst_byte_reader_get_int32_be (trun, &data_offset))
+ goto fail;
+ GST_LOG_OBJECT (qtdemux, "trun data offset %d", data_offset);
+ /* default base offset = first byte of moof */
+ if (*base_offset == -1) {
+ GST_LOG_OBJECT (qtdemux, "base_offset at moof");
+ *base_offset = moof_offset;
+ }
+ *running_offset = *base_offset + data_offset;
+ } else {
+ /* if no offset at all, that would mean data starts at moof start,
+ * which is a bit wrong and is ismv crappy way, so compensate
+ * assuming data is in mdat following moof */
+ if (*base_offset == -1) {
+ *base_offset = moof_offset + moof_length + 8;
+ GST_LOG_OBJECT (qtdemux, "base_offset assumed in mdat after moof");
+ ismv = TRUE;
+ }
+ if (*running_offset == -1)
+ *running_offset = *base_offset;
+ }
+
+ GST_LOG_OBJECT (qtdemux, "running offset now %" G_GINT64_FORMAT,
+ *running_offset);
+ GST_LOG_OBJECT (qtdemux, "trun offset %d, flags 0x%x, entries %d",
+ data_offset, flags, samples_count);
+
+ if (flags & TR_FIRST_SAMPLE_FLAGS) {
+ if (G_UNLIKELY (flags & TR_SAMPLE_FLAGS)) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "invalid flags; SAMPLE and FIRST_SAMPLE present, discarding latter");
+ flags ^= TR_FIRST_SAMPLE_FLAGS;
+ } else {
+ if (!gst_byte_reader_get_uint32_be (trun, &first_flags))
+ goto fail;
+ GST_LOG_OBJECT (qtdemux, "first flags: 0x%x", first_flags);
+ }
+ }
+
+ /* FIXME ? spec says other bits should also be checked to determine
+ * entry size (and prefix size for that matter) */
+ entry_size = 0;
+ dur_offset = size_offset = 0;
+ if (flags & TR_SAMPLE_DURATION) {
+ GST_LOG_OBJECT (qtdemux, "entry duration present");
+ dur_offset = entry_size;
+ entry_size += 4;
+ }
+ if (flags & TR_SAMPLE_SIZE) {
+ GST_LOG_OBJECT (qtdemux, "entry size present");
+ size_offset = entry_size;
+ entry_size += 4;
+ }
+ if (flags & TR_SAMPLE_FLAGS) {
+ GST_LOG_OBJECT (qtdemux, "entry flags present");
+ flags_offset = entry_size;
+ entry_size += 4;
+ }
+ if (flags & TR_COMPOSITION_TIME_OFFSETS) {
+ GST_LOG_OBJECT (qtdemux, "entry ct offset present");
+ ct_offset = entry_size;
+ entry_size += 4;
+ }
+
+ if (!qt_atom_parser_has_chunks (trun, samples_count, entry_size))
+ goto fail;
+ data = (guint8 *) gst_byte_reader_peek_data_unchecked (trun);
+
+ if (stream->n_samples + samples_count >=
+ QTDEMUX_MAX_SAMPLE_INDEX_SIZE / sizeof (QtDemuxSample))
+ goto index_too_big;
+
+ GST_DEBUG_OBJECT (qtdemux, "allocating n_samples %u * %u (%.2f MB)",
+ stream->n_samples + samples_count, (guint) sizeof (QtDemuxSample),
+ (stream->n_samples + samples_count) *
+ sizeof (QtDemuxSample) / (1024.0 * 1024.0));
+
+ /* create a new array of samples if it's the first sample parsed */
+ if (stream->n_samples == 0) {
+ g_assert (stream->samples == NULL);
+ stream->samples = g_try_new0 (QtDemuxSample, samples_count);
+ /* or try to reallocate it with space enough to insert the new samples */
+ } else
+ stream->samples = g_try_renew (QtDemuxSample, stream->samples,
+ stream->n_samples + samples_count);
+ if (stream->samples == NULL)
+ goto out_of_memory;
+
+ if (qtdemux->fragment_start != -1) {
+ timestamp = GSTTIME_TO_QTSTREAMTIME (stream, qtdemux->fragment_start);
+ qtdemux->fragment_start = -1;
+ } else {
+ if (stream->n_samples == 0) {
+ if (decode_ts > 0) {
+ timestamp = decode_ts;
+ } else if (stream->pending_seek != NULL) {
+ /* if we don't have a timestamp from a tfdt box, we'll use the one
+ * from the mfra seek table */
+ GST_INFO_OBJECT (stream->pad, "pending seek ts = %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stream->pending_seek->ts));
+
+ /* FIXME: this is not fully correct, the timestamp refers to the random
+ * access sample refered to in the tfra entry, which may not necessarily
+ * be the first sample in the tfrag/trun (but hopefully/usually is) */
+ timestamp = GSTTIME_TO_QTSTREAMTIME (stream, stream->pending_seek->ts);
+ } else {
+ timestamp = 0;
+ }
+
+ gst_ts = QTSTREAMTIME_TO_GSTTIME (stream, timestamp);
+ GST_INFO_OBJECT (stream->pad, "first sample ts %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (gst_ts));
+ } else {
+ /* subsequent fragments extend stream */
+ timestamp =
+ stream->samples[stream->n_samples - 1].timestamp +
+ stream->samples[stream->n_samples - 1].duration;
+
+ /* If this is a GST_FORMAT_BYTES stream and there's a significant
+ * difference (1 sec.) between decode_ts and timestamp, prefer the
+ * former */
+ if (has_tfdt && !qtdemux->upstream_format_is_time
+ && ABSDIFF (decode_ts, timestamp) >
+ MAX (stream->duration_last_moof / 2,
+ GSTTIME_TO_QTSTREAMTIME (stream, GST_SECOND))) {
+ GST_INFO_OBJECT (qtdemux,
+ "decode_ts (%" GST_TIME_FORMAT ") and timestamp (%" GST_TIME_FORMAT
+ ") are significantly different (more than %" GST_TIME_FORMAT
+ "), using decode_ts",
+ GST_TIME_ARGS (QTSTREAMTIME_TO_GSTTIME (stream, decode_ts)),
+ GST_TIME_ARGS (QTSTREAMTIME_TO_GSTTIME (stream, timestamp)),
+ GST_TIME_ARGS (QTSTREAMTIME_TO_GSTTIME (stream,
+ MAX (stream->duration_last_moof / 2,
+ GSTTIME_TO_QTSTREAMTIME (stream, GST_SECOND)))));
+ timestamp = decode_ts;
+ }
+
+ gst_ts = QTSTREAMTIME_TO_GSTTIME (stream, timestamp);
+ GST_INFO_OBJECT (qtdemux, "first sample ts %" GST_TIME_FORMAT
+ " (extends previous samples)", GST_TIME_ARGS (gst_ts));
+ }
+ }
+
+ initial_offset = *running_offset;
+
+ sample = stream->samples + stream->n_samples;
+ for (i = 0; i < samples_count; i++) {
+ guint32 dur, size, sflags, ct;
+
+ /* first read sample data */
+ if (flags & TR_SAMPLE_DURATION) {
+ dur = QT_UINT32 (data + dur_offset);
+ } else {
+ dur = d_sample_duration;
+ }
+ if (flags & TR_SAMPLE_SIZE) {
+ size = QT_UINT32 (data + size_offset);
+ } else {
+ size = d_sample_size;
+ }
+ if (flags & TR_FIRST_SAMPLE_FLAGS) {
+ if (i == 0) {
+ sflags = first_flags;
+ } else {
+ sflags = d_sample_flags;
+ }
+ } else if (flags & TR_SAMPLE_FLAGS) {
+ sflags = QT_UINT32 (data + flags_offset);
+ } else {
+ sflags = d_sample_flags;
+ }
+ if (flags & TR_COMPOSITION_TIME_OFFSETS) {
+ ct = QT_UINT32 (data + ct_offset);
+ } else {
+ ct = 0;
+ }
+ data += entry_size;
+
+ /* fill the sample information */
+ sample->offset = *running_offset;
+ sample->pts_offset = ct;
+ sample->size = size;
+ sample->timestamp = timestamp;
+ sample->duration = dur;
+ /* sample-is-difference-sample */
+ /* ismv seems to use 0x40 for keyframe, 0xc0 for non-keyframe,
+ * now idea how it relates to bitfield other than massive LE/BE confusion */
+ sample->keyframe = ismv ? ((sflags & 0xff) == 0x40) : !(sflags & 0x10000);
+ *running_offset += size;
+ timestamp += dur;
+ stream->duration_moof += dur;
+ sample++;
+ }
+
+ /* Update total duration if needed */
+ check_update_duration (qtdemux, QTSTREAMTIME_TO_GSTTIME (stream, timestamp));
+
+ /* Pre-emptively figure out size of mdat based on trun information.
+ * If the [mdat] atom is effectivelly read, it will be replaced by the actual
+ * size, else we will still be able to use this when dealing with gap'ed
+ * input */
+ qtdemux->mdatleft = *running_offset - initial_offset;
+ qtdemux->mdatoffset = initial_offset;
+ qtdemux->mdatsize = qtdemux->mdatleft;
+
+ stream->n_samples += samples_count;
+ stream->n_samples_moof += samples_count;
+
+ if (stream->pending_seek != NULL)
+ stream->pending_seek = NULL;
+
+ return TRUE;
+
+fail:
+ {
+ GST_WARNING_OBJECT (qtdemux, "failed to parse trun");
+ return FALSE;
+ }
+out_of_memory:
+ {
+ GST_WARNING_OBJECT (qtdemux, "failed to allocate %d samples",
+ stream->n_samples);
+ return FALSE;
+ }
+index_too_big:
+ {
+ GST_WARNING_OBJECT (qtdemux, "not allocating index of %d samples, would "
+ "be larger than %uMB (broken file?)", stream->n_samples,
+ QTDEMUX_MAX_SAMPLE_INDEX_SIZE >> 20);
+ return FALSE;
+ }
+}
+
+/* find stream with @id */
+static inline QtDemuxStream *
+qtdemux_find_stream (GstQTDemux * qtdemux, guint32 id)
+{
+ QtDemuxStream *stream;
+ gint i;
+
+ /* check */
+ if (G_UNLIKELY (!id)) {
+ GST_DEBUG_OBJECT (qtdemux, "invalid track id 0");
+ return NULL;
+ }
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ if (stream->track_id == id)
+ return stream;
+ }
+ if (qtdemux->mss_mode) {
+ /* mss should have only 1 stream anyway */
+ return QTDEMUX_NTH_STREAM (qtdemux, 0);
+ }
+
+ return NULL;
+}
+
+static gboolean
+qtdemux_parse_mfhd (GstQTDemux * qtdemux, GstByteReader * mfhd,
+ guint32 * fragment_number)
+{
+ if (!gst_byte_reader_skip (mfhd, 4))
+ goto fail;
+ if (!gst_byte_reader_get_uint32_be (mfhd, fragment_number))
+ goto fail;
+ return TRUE;
+fail:
+ {
+ GST_WARNING_OBJECT (qtdemux, "Failed to parse mfhd atom");
+ return FALSE;
+ }
+}
+
+static gboolean
+qtdemux_parse_tfhd (GstQTDemux * qtdemux, GstByteReader * tfhd,
+ QtDemuxStream ** stream, guint32 * default_sample_duration,
+ guint32 * default_sample_size, guint32 * default_sample_flags,
+ gint64 * base_offset)
+{
+ guint32 flags = 0;
+ guint32 track_id = 0;
+
+ if (!gst_byte_reader_skip (tfhd, 1) ||
+ !gst_byte_reader_get_uint24_be (tfhd, &flags))
+ goto invalid_track;
+
+ if (!gst_byte_reader_get_uint32_be (tfhd, &track_id))
+ goto invalid_track;
+
+ *stream = qtdemux_find_stream (qtdemux, track_id);
+ if (G_UNLIKELY (!*stream))
+ goto unknown_stream;
+
+ if (flags & TF_DEFAULT_BASE_IS_MOOF)
+ *base_offset = qtdemux->moof_offset;
+
+ if (flags & TF_BASE_DATA_OFFSET)
+ if (!gst_byte_reader_get_uint64_be (tfhd, (guint64 *) base_offset))
+ goto invalid_track;
+
+ /* obtain stream defaults */
+ qtdemux_parse_trex (qtdemux, *stream,
+ default_sample_duration, default_sample_size, default_sample_flags);
+
+ (*stream)->stsd_sample_description_id =
+ (*stream)->def_sample_description_index - 1;
+
+ if (flags & TF_SAMPLE_DESCRIPTION_INDEX) {
+ guint32 sample_description_index;
+ if (!gst_byte_reader_get_uint32_be (tfhd, &sample_description_index))
+ goto invalid_track;
+ (*stream)->stsd_sample_description_id = sample_description_index - 1;
+ }
+
+ if (qtdemux->mss_mode) {
+ /* mss has no stsd entry */
+ (*stream)->stsd_sample_description_id = 0;
+ }
+
+ if (flags & TF_DEFAULT_SAMPLE_DURATION)
+ if (!gst_byte_reader_get_uint32_be (tfhd, default_sample_duration))
+ goto invalid_track;
+
+ if (flags & TF_DEFAULT_SAMPLE_SIZE)
+ if (!gst_byte_reader_get_uint32_be (tfhd, default_sample_size))
+ goto invalid_track;
+
+ if (flags & TF_DEFAULT_SAMPLE_FLAGS)
+ if (!gst_byte_reader_get_uint32_be (tfhd, default_sample_flags))
+ goto invalid_track;
+
+ return TRUE;
+
+invalid_track:
+ {
+ GST_WARNING_OBJECT (qtdemux, "invalid track fragment header");
+ return FALSE;
+ }
+unknown_stream:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "unknown stream (%u) in tfhd", track_id);
+ return TRUE;
+ }
+}
+
+static gboolean
+qtdemux_parse_tfdt (GstQTDemux * qtdemux, GstByteReader * br,
+ guint64 * decode_time)
+{
+ guint32 version = 0;
+
+ if (!gst_byte_reader_get_uint32_be (br, &version))
+ return FALSE;
+
+ version >>= 24;
+ if (version == 1) {
+ if (!gst_byte_reader_get_uint64_be (br, decode_time))
+ goto failed;
+ } else {
+ guint32 dec_time = 0;
+ if (!gst_byte_reader_get_uint32_be (br, &dec_time))
+ goto failed;
+ *decode_time = dec_time;
+ }
+
+ GST_INFO_OBJECT (qtdemux, "Track fragment decode time: %" G_GUINT64_FORMAT,
+ *decode_time);
+
+ return TRUE;
+
+failed:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "parsing tfdt failed");
+ return FALSE;
+ }
+}
+
+/* Returns a pointer to a GstStructure containing the properties of
+ * the stream sample identified by @sample_index. The caller must unref
+ * the returned object after use. Returns NULL if unsuccessful. */
+static GstStructure *
+qtdemux_get_cenc_sample_properties (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, guint sample_index)
+{
+ QtDemuxCencSampleSetInfo *info = NULL;
+
+ g_return_val_if_fail (stream != NULL, NULL);
+ g_return_val_if_fail (stream->protected, NULL);
+ g_return_val_if_fail (stream->protection_scheme_info != NULL, NULL);
+
+ info = (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
+
+ /* Currently, cenc properties for groups of samples are not supported, so
+ * simply return a copy of the default sample properties */
+ return gst_structure_copy (info->default_properties);
+}
+
+/* Parses the sizes of sample auxiliary information contained within a stream,
+ * as given in a saiz box. Returns array of sample_count guint8 size values,
+ * or NULL on failure */
+static guint8 *
+qtdemux_parse_saiz (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstByteReader * br, guint32 * sample_count)
+{
+ guint32 flags = 0;
+ guint8 *info_sizes;
+ guint8 default_info_size;
+
+ g_return_val_if_fail (qtdemux != NULL, NULL);
+ g_return_val_if_fail (stream != NULL, NULL);
+ g_return_val_if_fail (br != NULL, NULL);
+ g_return_val_if_fail (sample_count != NULL, NULL);
+
+ if (!gst_byte_reader_get_uint32_be (br, &flags))
+ return NULL;
+
+ if (flags & 0x1) {
+ /* aux_info_type and aux_info_type_parameter are ignored */
+ if (!gst_byte_reader_skip (br, 8))
+ return NULL;
+ }
+
+ if (!gst_byte_reader_get_uint8 (br, &default_info_size))
+ return NULL;
+ GST_DEBUG_OBJECT (qtdemux, "default_info_size: %u", default_info_size);
+
+ if (!gst_byte_reader_get_uint32_be (br, sample_count))
+ return NULL;
+ GST_DEBUG_OBJECT (qtdemux, "sample_count: %u", *sample_count);
+
+
+ if (default_info_size == 0) {
+ if (!gst_byte_reader_dup_data (br, *sample_count, &info_sizes)) {
+ return NULL;
+ }
+ } else {
+ info_sizes = g_new (guint8, *sample_count);
+ memset (info_sizes, default_info_size, *sample_count);
+ }
+
+ return info_sizes;
+}
+
+/* Parses the offset of sample auxiliary information contained within a stream,
+ * as given in a saio box. Returns TRUE if successful; FALSE otherwise. */
+static gboolean
+qtdemux_parse_saio (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstByteReader * br, guint32 * info_type, guint32 * info_type_parameter,
+ guint64 * offset)
+{
+ guint8 version = 0;
+ guint32 flags = 0;
+ guint32 aux_info_type = 0;
+ guint32 aux_info_type_parameter = 0;
+ guint32 entry_count;
+ guint32 off_32;
+ guint64 off_64;
+ const guint8 *aux_info_type_data = NULL;
+
+ g_return_val_if_fail (qtdemux != NULL, FALSE);
+ g_return_val_if_fail (stream != NULL, FALSE);
+ g_return_val_if_fail (br != NULL, FALSE);
+ g_return_val_if_fail (offset != NULL, FALSE);
+
+ if (!gst_byte_reader_get_uint8 (br, &version))
+ return FALSE;
+
+ if (!gst_byte_reader_get_uint24_be (br, &flags))
+ return FALSE;
+
+ if (flags & 0x1) {
+
+ if (!gst_byte_reader_get_data (br, 4, &aux_info_type_data))
+ return FALSE;
+ aux_info_type = QT_FOURCC (aux_info_type_data);
+
+ if (!gst_byte_reader_get_uint32_be (br, &aux_info_type_parameter))
+ return FALSE;
+ } else if (stream->protected) {
+ aux_info_type = stream->protection_scheme_type;
+ } else {
+ aux_info_type = CUR_STREAM (stream)->fourcc;
+ }
+
+ if (info_type)
+ *info_type = aux_info_type;
+ if (info_type_parameter)
+ *info_type_parameter = aux_info_type_parameter;
+
+ GST_DEBUG_OBJECT (qtdemux, "aux_info_type: '%" GST_FOURCC_FORMAT "', "
+ "aux_info_type_parameter: %#06x",
+ GST_FOURCC_ARGS (aux_info_type), aux_info_type_parameter);
+
+ if (!gst_byte_reader_get_uint32_be (br, &entry_count))
+ return FALSE;
+
+ if (entry_count != 1) {
+ GST_ERROR_OBJECT (qtdemux, "multiple offsets are not supported");
+ return FALSE;
+ }
+
+ if (version == 0) {
+ if (!gst_byte_reader_get_uint32_be (br, &off_32))
+ return FALSE;
+ *offset = (guint64) off_32;
+ } else {
+ if (!gst_byte_reader_get_uint64_be (br, &off_64))
+ return FALSE;
+ *offset = off_64;
+ }
+
+ GST_DEBUG_OBJECT (qtdemux, "offset: %" G_GUINT64_FORMAT, *offset);
+ return TRUE;
+}
+
+static void
+qtdemux_gst_structure_free (GstStructure * gststructure)
+{
+ if (gststructure) {
+ gst_structure_free (gststructure);
+ }
+}
+
+/* Parses auxiliary information relating to samples protected using
+ * Common Encryption (cenc); the format of this information
+ * is defined in ISO/IEC 23001-7. Returns TRUE if successful; FALSE
+ * otherwise. */
+static gboolean
+qtdemux_parse_cenc_aux_info (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstByteReader * br, guint8 * info_sizes, guint32 sample_count)
+{
+ QtDemuxCencSampleSetInfo *ss_info = NULL;
+ guint8 size;
+ gint i;
+ GPtrArray *old_crypto_info = NULL;
+ guint old_entries = 0;
+
+ g_return_val_if_fail (qtdemux != NULL, FALSE);
+ g_return_val_if_fail (stream != NULL, FALSE);
+ g_return_val_if_fail (br != NULL, FALSE);
+ g_return_val_if_fail (stream->protected, FALSE);
+ g_return_val_if_fail (stream->protection_scheme_info != NULL, FALSE);
+
+ ss_info = (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
+
+ if (ss_info->crypto_info) {
+ old_crypto_info = ss_info->crypto_info;
+ /* Count number of non-null entries remaining at the tail end */
+ for (i = old_crypto_info->len - 1; i >= 0; i--) {
+ if (g_ptr_array_index (old_crypto_info, i) == NULL)
+ break;
+ old_entries++;
+ }
+ }
+
+ ss_info->crypto_info =
+ g_ptr_array_new_full (sample_count + old_entries,
+ (GDestroyNotify) qtdemux_gst_structure_free);
+
+ /* We preserve old entries because we parse the next moof in advance
+ * of consuming all samples from the previous moof, and otherwise
+ * we'd discard the corresponding crypto info for the samples
+ * from the previous fragment. */
+ if (old_entries) {
+ GST_DEBUG_OBJECT (qtdemux, "Preserving %d old crypto info entries",
+ old_entries);
+ for (i = old_crypto_info->len - old_entries; i < old_crypto_info->len; i++) {
+ g_ptr_array_add (ss_info->crypto_info, g_ptr_array_index (old_crypto_info,
+ i));
+ g_ptr_array_index (old_crypto_info, i) = NULL;
+ }
+ }
+
+ if (old_crypto_info) {
+ /* Everything now belongs to the new array */
+ g_ptr_array_free (old_crypto_info, TRUE);
+ }
+
+ for (i = 0; i < sample_count; ++i) {
+ GstStructure *properties;
+ guint16 n_subsamples = 0;
+ guint8 *data;
+ guint iv_size;
+ GstBuffer *buf;
+ gboolean could_read_iv;
+
+ properties = qtdemux_get_cenc_sample_properties (qtdemux, stream, i);
+ if (properties == NULL) {
+ GST_ERROR_OBJECT (qtdemux, "failed to get properties for sample %u", i);
+ return FALSE;
+ }
+ if (!gst_structure_get_uint (properties, "iv_size", &iv_size)) {
+ GST_ERROR_OBJECT (qtdemux, "failed to get iv_size for sample %u", i);
+ gst_structure_free (properties);
+ return FALSE;
+ }
+ could_read_iv =
+ iv_size > 0 ? gst_byte_reader_dup_data (br, iv_size, &data) : FALSE;
+ if (could_read_iv) {
+ buf = gst_buffer_new_wrapped (data, iv_size);
+ gst_structure_set (properties, "iv", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ } else if (stream->protection_scheme_type == FOURCC_cbcs) {
+ const GValue *constant_iv_size_value =
+ gst_structure_get_value (properties, "constant_iv_size");
+ const GValue *constant_iv_value =
+ gst_structure_get_value (properties, "iv");
+ if (constant_iv_size_value == NULL || constant_iv_value == NULL) {
+ GST_ERROR_OBJECT (qtdemux, "failed to get constant_iv");
+ gst_structure_free (properties);
+ return FALSE;
+ }
+ gst_structure_set_value (properties, "iv_size", constant_iv_size_value);
+ gst_structure_remove_field (properties, "constant_iv_size");
+ } else if (stream->protection_scheme_type == FOURCC_cenc) {
+ GST_ERROR_OBJECT (qtdemux, "failed to get IV for sample %u", i);
+ gst_structure_free (properties);
+ return FALSE;
+ }
+ size = info_sizes[i];
+ if (size > iv_size) {
+ if (!gst_byte_reader_get_uint16_be (br, &n_subsamples)
+ || !(n_subsamples > 0)) {
+ gst_structure_free (properties);
+ GST_ERROR_OBJECT (qtdemux,
+ "failed to get subsample count for sample %u", i);
+ return FALSE;
+ }
+ GST_LOG_OBJECT (qtdemux, "subsample count: %u", n_subsamples);
+ if (!gst_byte_reader_dup_data (br, n_subsamples * 6, &data)) {
+ GST_ERROR_OBJECT (qtdemux, "failed to get subsample data for sample %u",
+ i);
+ gst_structure_free (properties);
+ return FALSE;
+ }
+ buf = gst_buffer_new_wrapped (data, n_subsamples * 6);
+ if (!buf) {
+ gst_structure_free (properties);
+ return FALSE;
+ }
+ gst_structure_set (properties,
+ "subsample_count", G_TYPE_UINT, n_subsamples,
+ "subsamples", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ } else {
+ gst_structure_set (properties, "subsample_count", G_TYPE_UINT, 0, NULL);
+ }
+ g_ptr_array_add (ss_info->crypto_info, properties);
+ }
+ return TRUE;
+}
+
+/* Converts a UUID in raw byte form to a string representation, as defined in
+ * RFC 4122. The caller takes ownership of the returned string and is
+ * responsible for freeing it after use. */
+static gchar *
+qtdemux_uuid_bytes_to_string (gconstpointer uuid_bytes)
+{
+ const guint8 *uuid = (const guint8 *) uuid_bytes;
+
+ return g_strdup_printf ("%02x%02x%02x%02x-%02x%02x-%02x%02x-"
+ "%02x%02x-%02x%02x%02x%02x%02x%02x",
+ uuid[0], uuid[1], uuid[2], uuid[3],
+ uuid[4], uuid[5], uuid[6], uuid[7],
+ uuid[8], uuid[9], uuid[10], uuid[11],
+ uuid[12], uuid[13], uuid[14], uuid[15]);
+}
+
+/* Parses a Protection System Specific Header box (pssh), as defined in the
+ * Common Encryption (cenc) standard (ISO/IEC 23001-7), which contains
+ * information needed by a specific content protection system in order to
+ * decrypt cenc-protected tracks. Returns TRUE if successful; FALSE
+ * otherwise. */
+static gboolean
+qtdemux_parse_pssh (GstQTDemux * qtdemux, GNode * node)
+{
+ gchar *sysid_string;
+ guint32 pssh_size = QT_UINT32 (node->data);
+ GstBuffer *pssh = NULL;
+ GstEvent *event = NULL;
+ guint32 parent_box_type;
+ gint i;
+
+ if (G_UNLIKELY (pssh_size < 32U)) {
+ GST_ERROR_OBJECT (qtdemux, "invalid box size");
+ return FALSE;
+ }
+
+ sysid_string =
+ qtdemux_uuid_bytes_to_string ((const guint8 *) node->data + 12);
+
+ gst_qtdemux_append_protection_system_id (qtdemux, sysid_string);
+
+ pssh = gst_buffer_new_memdup (node->data, pssh_size);
+ GST_LOG_OBJECT (qtdemux, "cenc pssh size: %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (pssh));
+
+ parent_box_type = QT_FOURCC ((const guint8 *) node->parent->data + 4);
+
+ /* Push an event containing the pssh box onto the queues of all streams. */
+ event = gst_event_new_protection (sysid_string, pssh,
+ (parent_box_type == FOURCC_moov) ? "isobmff/moov" : "isobmff/moof");
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ GST_TRACE_OBJECT (qtdemux,
+ "adding protection event for stream %s and system %s",
+ stream->stream_id, sysid_string);
+ g_queue_push_tail (&stream->protection_scheme_event_queue,
+ gst_event_ref (event));
+ }
+ g_free (sysid_string);
+ gst_event_unref (event);
+ gst_buffer_unref (pssh);
+ return TRUE;
+}
+
+static gboolean
+qtdemux_parse_moof (GstQTDemux * qtdemux, const guint8 * buffer, guint length,
+ guint64 moof_offset, QtDemuxStream * stream)
+{
+ GNode *moof_node, *traf_node, *tfhd_node, *trun_node, *tfdt_node, *mfhd_node;
+ GNode *uuid_node;
+ GstByteReader mfhd_data, trun_data, tfhd_data, tfdt_data;
+ GNode *saiz_node, *saio_node, *pssh_node;
+ GstByteReader saiz_data, saio_data;
+ guint32 ds_size = 0, ds_duration = 0, ds_flags = 0;
+ gint64 base_offset, running_offset;
+ guint32 frag_num;
+ GstClockTime min_dts = GST_CLOCK_TIME_NONE;
+
+ /* NOTE @stream ignored */
+
+ moof_node = g_node_new ((guint8 *) buffer);
+ qtdemux_parse_node (qtdemux, moof_node, buffer, length);
+ qtdemux_node_dump (qtdemux, moof_node);
+
+ /* Get fragment number from mfhd and check it's valid */
+ mfhd_node =
+ qtdemux_tree_get_child_by_type_full (moof_node, FOURCC_mfhd, &mfhd_data);
+ if (mfhd_node == NULL)
+ goto missing_mfhd;
+ if (!qtdemux_parse_mfhd (qtdemux, &mfhd_data, &frag_num))
+ goto fail;
+ GST_DEBUG_OBJECT (qtdemux, "Fragment #%d", frag_num);
+
+ /* unknown base_offset to start with */
+ base_offset = running_offset = -1;
+ traf_node = qtdemux_tree_get_child_by_type (moof_node, FOURCC_traf);
+ while (traf_node) {
+ guint64 decode_time = 0;
+
+ /* Fragment Header node */
+ tfhd_node =
+ qtdemux_tree_get_child_by_type_full (traf_node, FOURCC_tfhd,
+ &tfhd_data);
+ if (!tfhd_node)
+ goto missing_tfhd;
+ if (!qtdemux_parse_tfhd (qtdemux, &tfhd_data, &stream, &ds_duration,
+ &ds_size, &ds_flags, &base_offset))
+ goto missing_tfhd;
+
+ /* The following code assumes at most a single set of sample auxiliary
+ * data in the fragment (consisting of a saiz box and a corresponding saio
+ * box); in theory, however, there could be multiple sets of sample
+ * auxiliary data in a fragment. */
+ saiz_node =
+ qtdemux_tree_get_child_by_type_full (traf_node, FOURCC_saiz,
+ &saiz_data);
+ if (saiz_node) {
+ guint32 info_type = 0;
+ guint64 offset = 0;
+ guint32 info_type_parameter = 0;
+
+ g_free (qtdemux->cenc_aux_info_sizes);
+
+ qtdemux->cenc_aux_info_sizes =
+ qtdemux_parse_saiz (qtdemux, stream, &saiz_data,
+ &qtdemux->cenc_aux_sample_count);
+ if (qtdemux->cenc_aux_info_sizes == NULL) {
+ GST_ERROR_OBJECT (qtdemux, "failed to parse saiz box");
+ goto fail;
+ }
+ saio_node =
+ qtdemux_tree_get_child_by_type_full (traf_node, FOURCC_saio,
+ &saio_data);
+ if (!saio_node) {
+ GST_ERROR_OBJECT (qtdemux, "saiz box without a corresponding saio box");
+ g_free (qtdemux->cenc_aux_info_sizes);
+ qtdemux->cenc_aux_info_sizes = NULL;
+ goto fail;
+ }
+
+ if (G_UNLIKELY (!qtdemux_parse_saio (qtdemux, stream, &saio_data,
+ &info_type, &info_type_parameter, &offset))) {
+ GST_ERROR_OBJECT (qtdemux, "failed to parse saio box");
+ g_free (qtdemux->cenc_aux_info_sizes);
+ qtdemux->cenc_aux_info_sizes = NULL;
+ goto fail;
+ }
+ if (base_offset > -1 && base_offset > qtdemux->moof_offset)
+ offset += (guint64) (base_offset - qtdemux->moof_offset);
+ if ((info_type == FOURCC_cenc || info_type == FOURCC_cbcs)
+ && info_type_parameter == 0U) {
+ GstByteReader br;
+ if (offset > length) {
+ GST_DEBUG_OBJECT (qtdemux, "cenc auxiliary info stored out of moof");
+ qtdemux->cenc_aux_info_offset = offset;
+ } else {
+ gst_byte_reader_init (&br, buffer + offset, length - offset);
+ if (!qtdemux_parse_cenc_aux_info (qtdemux, stream, &br,
+ qtdemux->cenc_aux_info_sizes,
+ qtdemux->cenc_aux_sample_count)) {
+ GST_ERROR_OBJECT (qtdemux, "failed to parse cenc auxiliary info");
+ g_free (qtdemux->cenc_aux_info_sizes);
+ qtdemux->cenc_aux_info_sizes = NULL;
+ goto fail;
+ }
+ }
+ }
+ }
+
+ tfdt_node =
+ qtdemux_tree_get_child_by_type_full (traf_node, FOURCC_tfdt,
+ &tfdt_data);
+ if (tfdt_node) {
+ /* We'll use decode_time to interpolate timestamps
+ * in case the input timestamps are missing */
+ qtdemux_parse_tfdt (qtdemux, &tfdt_data, &decode_time);
+
+ GST_DEBUG_OBJECT (qtdemux, "decode time %" G_GINT64_FORMAT
+ " (%" GST_TIME_FORMAT ")", decode_time,
+ GST_TIME_ARGS (stream ? QTSTREAMTIME_TO_GSTTIME (stream,
+ decode_time) : GST_CLOCK_TIME_NONE));
+
+ /* Discard the fragment buffer timestamp info to avoid using it.
+ * Rely on tfdt instead as it is more accurate than the timestamp
+ * that is fetched from a manifest/playlist and is usually
+ * less accurate. */
+ qtdemux->fragment_start = -1;
+ }
+
+ if (G_UNLIKELY (!stream)) {
+ /* we lost track of offset, we'll need to regain it,
+ * but can delay complaining until later or avoid doing so altogether */
+ base_offset = -2;
+ goto next;
+ }
+ if (G_UNLIKELY (base_offset < -1))
+ goto lost_offset;
+
+ min_dts = MIN (min_dts, QTSTREAMTIME_TO_GSTTIME (stream, decode_time));
+
+ if (!qtdemux->pullbased) {
+ /* Sample tables can grow enough to be problematic if the system memory
+ * is very low (e.g. embedded devices) and the videos very long
+ * (~8 MiB/hour for 25-30 fps video + typical AAC audio frames).
+ * Fortunately, we can easily discard them for each new fragment when
+ * we know qtdemux will not receive seeks outside of the current fragment.
+ * adaptivedemux honors this assumption.
+ * This optimization is also useful for applications that use qtdemux as
+ * a push-based simple demuxer, like Media Source Extensions. */
+ gst_qtdemux_stream_flush_samples_data (stream);
+ }
+
+ /* initialise moof sample data */
+ stream->n_samples_moof = 0;
+ stream->duration_last_moof = stream->duration_moof;
+ stream->duration_moof = 0;
+
+ /* Track Run node */
+ trun_node =
+ qtdemux_tree_get_child_by_type_full (traf_node, FOURCC_trun,
+ &trun_data);
+ while (trun_node) {
+ qtdemux_parse_trun (qtdemux, &trun_data, stream,
+ ds_duration, ds_size, ds_flags, moof_offset, length, &base_offset,
+ &running_offset, decode_time, (tfdt_node != NULL));
+ /* iterate all siblings */
+ trun_node = qtdemux_tree_get_sibling_by_type_full (trun_node, FOURCC_trun,
+ &trun_data);
+ }
+
+ uuid_node = qtdemux_tree_get_child_by_type (traf_node, FOURCC_uuid);
+ if (uuid_node) {
+ guint8 *uuid_buffer = (guint8 *) uuid_node->data;
+ guint32 box_length = QT_UINT32 (uuid_buffer);
+
+ qtdemux_parse_uuid (qtdemux, uuid_buffer, box_length);
+ }
+
+ /* if no new base_offset provided for next traf,
+ * base is end of current traf */
+ base_offset = running_offset;
+ running_offset = -1;
+
+ if (stream->n_samples_moof && stream->duration_moof)
+ stream->new_caps = TRUE;
+
+ next:
+ /* iterate all siblings */
+ traf_node = qtdemux_tree_get_sibling_by_type (traf_node, FOURCC_traf);
+ }
+
+ /* parse any protection system info */
+ pssh_node = qtdemux_tree_get_child_by_type (moof_node, FOURCC_pssh);
+ while (pssh_node) {
+ GST_LOG_OBJECT (qtdemux, "Parsing pssh box.");
+ qtdemux_parse_pssh (qtdemux, pssh_node);
+ pssh_node = qtdemux_tree_get_sibling_by_type (pssh_node, FOURCC_pssh);
+ }
+
+ if (!qtdemux->upstream_format_is_time && !qtdemux->first_moof_already_parsed
+ && !qtdemux->received_seek && GST_CLOCK_TIME_IS_VALID (min_dts)
+ && min_dts != 0) {
+ /* Unless the user has explicitly requested another seek, perform an
+ * internal seek to the time specified in the tfdt.
+ *
+ * This way if the user opens a file where the first tfdt is 1 hour
+ * into the presentation, they will not have to wait 1 hour for run
+ * time to catch up and actual playback to start. */
+ gint i;
+
+ GST_DEBUG_OBJECT (qtdemux, "First fragment has a non-zero tfdt, "
+ "performing an internal seek to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (min_dts));
+
+ qtdemux->segment.start = min_dts;
+ qtdemux->segment.time = qtdemux->segment.position = min_dts;
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ stream->time_position = min_dts;
+ }
+
+ /* Before this code was run a segment was already sent when the moov was
+ * parsed... which is OK -- some apps (mostly tests) expect a segment to
+ * be emitted after a moov, and we can emit a second segment anyway for
+ * special cases like this. */
+ qtdemux->need_segment = TRUE;
+ }
+
+ qtdemux->first_moof_already_parsed = TRUE;
+
+ g_node_destroy (moof_node);
+ return TRUE;
+
+missing_tfhd:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "missing tfhd box");
+ goto fail;
+ }
+missing_mfhd:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "Missing mfhd box");
+ goto fail;
+ }
+lost_offset:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "lost offset");
+ goto fail;
+ }
+fail:
+ {
+ g_node_destroy (moof_node);
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
+ (_("This file is corrupt and cannot be played.")), (NULL));
+ return FALSE;
+ }
+}
+
+#if 0
+/* might be used if some day we actually use mfra & co
+ * for random access to fragments,
+ * but that will require quite some modifications and much less relying
+ * on a sample array */
+#endif
+
+static gboolean
+qtdemux_parse_tfra (GstQTDemux * qtdemux, GNode * tfra_node)
+{
+ QtDemuxStream *stream;
+ guint32 ver_flags, track_id, len, num_entries, i;
+ guint value_size, traf_size, trun_size, sample_size;
+ guint64 time = 0, moof_offset = 0;
+#if 0
+ GstBuffer *buf = NULL;
+ GstFlowReturn ret;
+#endif
+ GstByteReader tfra;
+
+ gst_byte_reader_init (&tfra, tfra_node->data, QT_UINT32 (tfra_node->data));
+
+ if (!gst_byte_reader_skip (&tfra, 8))
+ return FALSE;
+
+ if (!gst_byte_reader_get_uint32_be (&tfra, &ver_flags))
+ return FALSE;
+
+ if (!gst_byte_reader_get_uint32_be (&tfra, &track_id)
+ || !gst_byte_reader_get_uint32_be (&tfra, &len)
+ || !gst_byte_reader_get_uint32_be (&tfra, &num_entries))
+ return FALSE;
+
+ GST_DEBUG_OBJECT (qtdemux, "parsing tfra box for track id %u", track_id);
+
+ stream = qtdemux_find_stream (qtdemux, track_id);
+ if (stream == NULL)
+ goto unknown_trackid;
+
+ value_size = ((ver_flags >> 24) == 1) ? sizeof (guint64) : sizeof (guint32);
+ sample_size = (len & 3) + 1;
+ trun_size = ((len & 12) >> 2) + 1;
+ traf_size = ((len & 48) >> 4) + 1;
+
+ GST_DEBUG_OBJECT (qtdemux, "%u entries, sizes: value %u, traf %u, trun %u, "
+ "sample %u", num_entries, value_size, traf_size, trun_size, sample_size);
+
+ if (num_entries == 0)
+ goto no_samples;
+
+ if (!qt_atom_parser_has_chunks (&tfra, num_entries,
+ value_size + value_size + traf_size + trun_size + sample_size))
+ goto corrupt_file;
+
+ g_free (stream->ra_entries);
+ stream->ra_entries = g_new (QtDemuxRandomAccessEntry, num_entries);
+ stream->n_ra_entries = num_entries;
+
+ for (i = 0; i < num_entries; i++) {
+ qt_atom_parser_get_offset (&tfra, value_size, &time);
+ qt_atom_parser_get_offset (&tfra, value_size, &moof_offset);
+ qt_atom_parser_get_uint_with_size_unchecked (&tfra, traf_size);
+ qt_atom_parser_get_uint_with_size_unchecked (&tfra, trun_size);
+ qt_atom_parser_get_uint_with_size_unchecked (&tfra, sample_size);
+
+ time = QTSTREAMTIME_TO_GSTTIME (stream, time);
+
+ GST_LOG_OBJECT (qtdemux, "fragment time: %" GST_TIME_FORMAT ", "
+ " moof_offset: %" G_GUINT64_FORMAT, GST_TIME_ARGS (time), moof_offset);
+
+ stream->ra_entries[i].ts = time;
+ stream->ra_entries[i].moof_offset = moof_offset;
+
+ /* don't want to go through the entire file and read all moofs at startup */
+#if 0
+ ret = gst_qtdemux_pull_atom (qtdemux, moof_offset, 0, &buf);
+ if (ret != GST_FLOW_OK)
+ goto corrupt_file;
+ qtdemux_parse_moof (qtdemux, GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf),
+ moof_offset, stream);
+ gst_buffer_unref (buf);
+#endif
+ }
+
+ check_update_duration (qtdemux, time);
+
+ return TRUE;
+
+/* ERRORS */
+unknown_trackid:
+ {
+ GST_WARNING_OBJECT (qtdemux, "Couldn't find stream for track %u", track_id);
+ return FALSE;
+ }
+corrupt_file:
+ {
+ GST_WARNING_OBJECT (qtdemux, "broken traf box, ignoring");
+ return FALSE;
+ }
+no_samples:
+ {
+ GST_WARNING_OBJECT (qtdemux, "stream has no samples");
+ return FALSE;
+ }
+}
+
+static gboolean
+qtdemux_pull_mfro_mfra (GstQTDemux * qtdemux)
+{
+ GstMapInfo mfro_map = GST_MAP_INFO_INIT;
+ GstMapInfo mfra_map = GST_MAP_INFO_INIT;
+ GstBuffer *mfro = NULL, *mfra = NULL;
+ GstFlowReturn flow;
+ gboolean ret = FALSE;
+ GNode *mfra_node, *tfra_node;
+ guint64 mfra_offset = 0;
+ guint32 fourcc, mfra_size;
+ gint64 len;
+
+ /* query upstream size in bytes */
+ if (!gst_pad_peer_query_duration (qtdemux->sinkpad, GST_FORMAT_BYTES, &len))
+ goto size_query_failed;
+
+ /* mfro box should be at the very end of the file */
+ flow = gst_qtdemux_pull_atom (qtdemux, len - 16, 16, &mfro);
+ if (flow != GST_FLOW_OK)
+ goto exit;
+
+ gst_buffer_map (mfro, &mfro_map, GST_MAP_READ);
+
+ fourcc = QT_FOURCC (mfro_map.data + 4);
+ if (fourcc != FOURCC_mfro)
+ goto exit;
+
+ GST_INFO_OBJECT (qtdemux, "Found mfro box");
+ if (mfro_map.size < 16)
+ goto invalid_mfro_size;
+
+ mfra_size = QT_UINT32 (mfro_map.data + 12);
+ if (mfra_size >= len)
+ goto invalid_mfra_size;
+
+ mfra_offset = len - mfra_size;
+
+ GST_INFO_OBJECT (qtdemux, "mfra offset: %" G_GUINT64_FORMAT ", size %u",
+ mfra_offset, mfra_size);
+
+ /* now get and parse mfra box */
+ flow = gst_qtdemux_pull_atom (qtdemux, mfra_offset, mfra_size, &mfra);
+ if (flow != GST_FLOW_OK)
+ goto broken_file;
+
+ gst_buffer_map (mfra, &mfra_map, GST_MAP_READ);
+
+ mfra_node = g_node_new ((guint8 *) mfra_map.data);
+ qtdemux_parse_node (qtdemux, mfra_node, mfra_map.data, mfra_map.size);
+
+ tfra_node = qtdemux_tree_get_child_by_type (mfra_node, FOURCC_tfra);
+
+ while (tfra_node) {
+ qtdemux_parse_tfra (qtdemux, tfra_node);
+ /* iterate all siblings */
+ tfra_node = qtdemux_tree_get_sibling_by_type (tfra_node, FOURCC_tfra);
+ }
+ g_node_destroy (mfra_node);
+
+ GST_INFO_OBJECT (qtdemux, "parsed movie fragment random access box (mfra)");
+ ret = TRUE;
+
+exit:
+
+ if (mfro) {
+ if (mfro_map.memory != NULL)
+ gst_buffer_unmap (mfro, &mfro_map);
+ gst_buffer_unref (mfro);
+ }
+ if (mfra) {
+ if (mfra_map.memory != NULL)
+ gst_buffer_unmap (mfra, &mfra_map);
+ gst_buffer_unref (mfra);
+ }
+ return ret;
+
+/* ERRORS */
+size_query_failed:
+ {
+ GST_WARNING_OBJECT (qtdemux, "could not query upstream size");
+ goto exit;
+ }
+invalid_mfro_size:
+ {
+ GST_WARNING_OBJECT (qtdemux, "mfro size is too small");
+ goto exit;
+ }
+invalid_mfra_size:
+ {
+ GST_WARNING_OBJECT (qtdemux, "mfra_size in mfro box is invalid");
+ goto exit;
+ }
+broken_file:
+ {
+ GST_WARNING_OBJECT (qtdemux, "bogus mfra offset or size, broken file");
+ goto exit;
+ }
+}
+
+static guint64
+add_offset (guint64 offset, guint64 advance)
+{
+ /* Avoid 64-bit overflow by clamping */
+ if (offset > G_MAXUINT64 - advance)
+ return G_MAXUINT64;
+ return offset + advance;
+}
+
+static GstFlowReturn
+gst_qtdemux_loop_state_header (GstQTDemux * qtdemux)
+{
+ guint64 length = 0;
+ guint32 fourcc = 0;
+ GstBuffer *buf = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint64 cur_offset = qtdemux->offset;
+ GstMapInfo map;
+
+ ret = gst_pad_pull_range (qtdemux->sinkpad, cur_offset, 16, &buf);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto beach;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (G_LIKELY (map.size >= 8))
+ extract_initial_length_and_fourcc (map.data, map.size, &length, &fourcc);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ /* maybe we already got most we needed, so only consider this eof */
+ if (G_UNLIKELY (length == 0)) {
+ GST_ELEMENT_WARNING (qtdemux, STREAM, DEMUX,
+ (_("Invalid atom size.")),
+ ("Header atom '%" GST_FOURCC_FORMAT "' has empty length",
+ GST_FOURCC_ARGS (fourcc)));
+ ret = GST_FLOW_EOS;
+ goto beach;
+ }
+
+ switch (fourcc) {
+ case FOURCC_moof:
+ /* record for later parsing when needed */
+ if (!qtdemux->moof_offset) {
+ qtdemux->moof_offset = qtdemux->offset;
+ }
+ if (qtdemux_pull_mfro_mfra (qtdemux)) {
+ /* FIXME */
+ } else {
+ qtdemux->offset += length; /* skip moof and keep going */
+ }
+ if (qtdemux->got_moov) {
+ GST_INFO_OBJECT (qtdemux, "moof header, got moov, done with headers");
+ ret = GST_FLOW_EOS;
+ goto beach;
+ }
+ break;
+ case FOURCC_mdat:
+ case FOURCC_free:
+ case FOURCC_skip:
+ case FOURCC_wide:
+ case FOURCC_PICT:
+ case FOURCC_pnot:
+ {
+ GST_LOG_OBJECT (qtdemux,
+ "skipping atom '%" GST_FOURCC_FORMAT "' at %" G_GUINT64_FORMAT,
+ GST_FOURCC_ARGS (fourcc), cur_offset);
+ qtdemux->offset = add_offset (qtdemux->offset, length);
+ break;
+ }
+ case FOURCC_moov:
+ {
+ GstBuffer *moov = NULL;
+
+ if (qtdemux->got_moov) {
+ GST_DEBUG_OBJECT (qtdemux, "Skipping moov atom as we have one already");
+ qtdemux->offset = add_offset (qtdemux->offset, length);
+ goto beach;
+ }
+
+ ret = gst_pad_pull_range (qtdemux->sinkpad, cur_offset, length, &moov);
+ if (ret != GST_FLOW_OK)
+ goto beach;
+ gst_buffer_map (moov, &map, GST_MAP_READ);
+
+ if (length != map.size) {
+ /* Some files have a 'moov' atom at the end of the file which contains
+ * a terminal 'free' atom where the body of the atom is missing.
+ * Check for, and permit, this special case.
+ */
+ if (map.size >= 8) {
+ guint8 *final_data = map.data + (map.size - 8);
+ guint32 final_length = QT_UINT32 (final_data);
+ guint32 final_fourcc = QT_FOURCC (final_data + 4);
+
+ if (final_fourcc == FOURCC_free
+ && map.size + final_length - 8 == length) {
+ /* Ok, we've found that special case. Allocate a new buffer with
+ * that free atom actually present. */
+ GstBuffer *newmoov = gst_buffer_new_and_alloc (length);
+ gst_buffer_fill (newmoov, 0, map.data, map.size);
+ gst_buffer_memset (newmoov, map.size, 0, final_length - 8);
+ gst_buffer_unmap (moov, &map);
+ gst_buffer_unref (moov);
+ moov = newmoov;
+ gst_buffer_map (moov, &map, GST_MAP_READ);
+ }
+ }
+ }
+
+ if (length != map.size) {
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
+ (_("This file is incomplete and cannot be played.")),
+ ("We got less than expected (received %" G_GSIZE_FORMAT
+ ", wanted %u, offset %" G_GUINT64_FORMAT ")", map.size,
+ (guint) length, cur_offset));
+ gst_buffer_unmap (moov, &map);
+ gst_buffer_unref (moov);
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+ qtdemux->offset += length;
+
+ qtdemux_parse_moov (qtdemux, map.data, length);
+ qtdemux_node_dump (qtdemux, qtdemux->moov_node);
+
+ qtdemux_parse_tree (qtdemux);
+ if (qtdemux->moov_node_compressed) {
+ g_node_destroy (qtdemux->moov_node_compressed);
+ g_free (qtdemux->moov_node->data);
+ }
+ qtdemux->moov_node_compressed = NULL;
+ g_node_destroy (qtdemux->moov_node);
+ qtdemux->moov_node = NULL;
+ gst_buffer_unmap (moov, &map);
+ gst_buffer_unref (moov);
+ qtdemux->got_moov = TRUE;
+
+ break;
+ }
+ case FOURCC_ftyp:
+ {
+ GstBuffer *ftyp = NULL;
+
+ /* extract major brand; might come in handy for ISO vs QT issues */
+ ret = gst_qtdemux_pull_atom (qtdemux, cur_offset, length, &ftyp);
+ if (ret != GST_FLOW_OK)
+ goto beach;
+ qtdemux->offset += length;
+ gst_buffer_map (ftyp, &map, GST_MAP_READ);
+ qtdemux_parse_ftyp (qtdemux, map.data, map.size);
+ gst_buffer_unmap (ftyp, &map);
+ gst_buffer_unref (ftyp);
+ break;
+ }
+ case FOURCC_uuid:
+ {
+ GstBuffer *uuid = NULL;
+
+ /* uuid are extension atoms */
+ ret = gst_qtdemux_pull_atom (qtdemux, cur_offset, length, &uuid);
+ if (ret != GST_FLOW_OK)
+ goto beach;
+ qtdemux->offset += length;
+ gst_buffer_map (uuid, &map, GST_MAP_READ);
+ qtdemux_parse_uuid (qtdemux, map.data, map.size);
+ gst_buffer_unmap (uuid, &map);
+ gst_buffer_unref (uuid);
+ break;
+ }
+ case FOURCC_sidx:
+ {
+ GstBuffer *sidx = NULL;
+ ret = gst_qtdemux_pull_atom (qtdemux, cur_offset, length, &sidx);
+ if (ret != GST_FLOW_OK)
+ goto beach;
+ qtdemux->offset += length;
+ gst_buffer_map (sidx, &map, GST_MAP_READ);
+ qtdemux_parse_sidx (qtdemux, map.data, map.size);
+ gst_buffer_unmap (sidx, &map);
+ gst_buffer_unref (sidx);
+ break;
+ }
+ default:
+ {
+ GstBuffer *unknown = NULL;
+
+ GST_LOG_OBJECT (qtdemux,
+ "unknown %08x '%" GST_FOURCC_FORMAT "' of size %" G_GUINT64_FORMAT
+ " at %" G_GUINT64_FORMAT, fourcc, GST_FOURCC_ARGS (fourcc), length,
+ cur_offset);
+ ret = gst_qtdemux_pull_atom (qtdemux, cur_offset, length, &unknown);
+ if (ret != GST_FLOW_OK)
+ goto beach;
+ gst_buffer_map (unknown, &map, GST_MAP_READ);
+ GST_MEMDUMP ("Unknown tag", map.data, map.size);
+ gst_buffer_unmap (unknown, &map);
+ gst_buffer_unref (unknown);
+ qtdemux->offset += length;
+ break;
+ }
+ }
+
+beach:
+ if (ret == GST_FLOW_EOS && (qtdemux->got_moov || qtdemux->media_caps)) {
+ /* digested all data, show what we have */
+ qtdemux_prepare_streams (qtdemux);
+ QTDEMUX_EXPOSE_LOCK (qtdemux);
+ ret = qtdemux_expose_streams (qtdemux);
+ QTDEMUX_EXPOSE_UNLOCK (qtdemux);
+
+ qtdemux->state = QTDEMUX_STATE_MOVIE;
+ GST_DEBUG_OBJECT (qtdemux, "switching state to STATE_MOVIE (%d)",
+ qtdemux->state);
+ return ret;
+ }
+ return ret;
+}
+
+/* Seeks to the previous keyframe of the indexed stream and
+ * aligns other streams with respect to the keyframe timestamp
+ * of indexed stream. Only called in case of Reverse Playback
+ */
+static GstFlowReturn
+gst_qtdemux_seek_to_previous_keyframe (GstQTDemux * qtdemux)
+{
+ guint32 seg_idx = 0, k_index = 0;
+ guint32 ref_seg_idx, ref_k_index;
+ GstClockTime k_pos = 0, last_stop = 0;
+ QtDemuxSegment *seg = NULL;
+ QtDemuxStream *ref_str = NULL;
+ guint64 seg_media_start_mov; /* segment media start time in mov format */
+ guint64 target_ts;
+ gint i;
+
+ /* Now we choose an arbitrary stream, get the previous keyframe timestamp
+ * and finally align all the other streams on that timestamp with their
+ * respective keyframes */
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *str = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ /* No candidate yet, take the first stream */
+ if (!ref_str) {
+ ref_str = str;
+ continue;
+ }
+
+ /* So that stream has a segment, we prefer video streams */
+ if (str->subtype == FOURCC_vide) {
+ ref_str = str;
+ break;
+ }
+ }
+
+ if (G_UNLIKELY (!ref_str)) {
+ GST_DEBUG_OBJECT (qtdemux, "couldn't find any stream");
+ goto eos;
+ }
+
+ if (G_UNLIKELY (!ref_str->from_sample)) {
+ GST_DEBUG_OBJECT (qtdemux, "reached the beginning of the file");
+ goto eos;
+ }
+
+ /* So that stream has been playing from from_sample to to_sample. We will
+ * get the timestamp of the previous sample and search for a keyframe before
+ * that. For audio streams we do an arbitrary jump in the past (10 samples) */
+ if (ref_str->subtype == FOURCC_vide) {
+ k_index = gst_qtdemux_find_keyframe (qtdemux, ref_str,
+ ref_str->from_sample - 1, FALSE);
+ } else {
+ if (ref_str->from_sample >= 10)
+ k_index = ref_str->from_sample - 10;
+ else
+ k_index = 0;
+ }
+
+ target_ts =
+ ref_str->samples[k_index].timestamp +
+ ref_str->samples[k_index].pts_offset;
+
+ /* get current segment for that stream */
+ seg = &ref_str->segments[ref_str->segment_index];
+ /* Use segment start in original timescale for comparisons */
+ seg_media_start_mov = seg->trak_media_start;
+
+ GST_LOG_OBJECT (qtdemux, "keyframe index %u ts %" G_GUINT64_FORMAT
+ " seg start %" G_GUINT64_FORMAT " %" GST_TIME_FORMAT,
+ k_index, target_ts, seg_media_start_mov,
+ GST_TIME_ARGS (seg->media_start));
+
+ /* Crawl back through segments to find the one containing this I frame */
+ while (target_ts < seg_media_start_mov) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "keyframe position (sample %u) is out of segment %u " " target %"
+ G_GUINT64_FORMAT " seg start %" G_GUINT64_FORMAT, k_index,
+ ref_str->segment_index, target_ts, seg_media_start_mov);
+
+ if (G_UNLIKELY (!ref_str->segment_index)) {
+ /* Reached first segment, let's consider it's EOS */
+ goto eos;
+ }
+ ref_str->segment_index--;
+ seg = &ref_str->segments[ref_str->segment_index];
+ /* Use segment start in original timescale for comparisons */
+ seg_media_start_mov = seg->trak_media_start;
+ }
+ /* Calculate time position of the keyframe and where we should stop */
+ k_pos =
+ QTSTREAMTIME_TO_GSTTIME (ref_str,
+ target_ts - seg->trak_media_start) + seg->time;
+ last_stop =
+ QTSTREAMTIME_TO_GSTTIME (ref_str,
+ ref_str->samples[ref_str->from_sample].timestamp -
+ seg->trak_media_start) + seg->time;
+
+ GST_DEBUG_OBJECT (qtdemux, "preferred stream played from sample %u, "
+ "now going to sample %u (pts %" GST_TIME_FORMAT ")", ref_str->from_sample,
+ k_index, GST_TIME_ARGS (k_pos));
+
+ /* Set last_stop with the keyframe timestamp we pushed of that stream */
+ qtdemux->segment.position = last_stop;
+ GST_DEBUG_OBJECT (qtdemux, "last_stop now is %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (last_stop));
+
+ if (G_UNLIKELY (last_stop < qtdemux->segment.start)) {
+ GST_DEBUG_OBJECT (qtdemux, "reached the beginning of segment");
+ goto eos;
+ }
+
+ ref_seg_idx = ref_str->segment_index;
+ ref_k_index = k_index;
+
+ /* Align them all on this */
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ guint32 index = 0;
+ GstClockTime seg_time = 0;
+ QtDemuxStream *str = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ /* aligning reference stream again might lead to backing up to yet another
+ * keyframe (due to timestamp rounding issues),
+ * potentially putting more load on downstream; so let's try to avoid */
+ if (str == ref_str) {
+ seg_idx = ref_seg_idx;
+ seg = &str->segments[seg_idx];
+ k_index = ref_k_index;
+ GST_DEBUG_OBJECT (qtdemux, "reference track-id %u segment %d, "
+ "sample at index %d", str->track_id, ref_str->segment_index, k_index);
+ } else {
+ seg_idx = gst_qtdemux_find_segment (qtdemux, str, k_pos);
+ GST_DEBUG_OBJECT (qtdemux,
+ "track-id %u align segment %d for keyframe pos %" GST_TIME_FORMAT,
+ str->track_id, seg_idx, GST_TIME_ARGS (k_pos));
+
+ /* get segment and time in the segment */
+ seg = &str->segments[seg_idx];
+ seg_time = k_pos - seg->time;
+
+ /* get the media time in the segment.
+ * No adjustment for empty "filler" segments */
+ if (seg->media_start != GST_CLOCK_TIME_NONE)
+ seg_time += seg->media_start;
+
+ /* get the index of the sample with media time */
+ index = gst_qtdemux_find_index_linear (qtdemux, str, seg_time);
+ GST_DEBUG_OBJECT (qtdemux,
+ "track-id %u sample for %" GST_TIME_FORMAT " at %u", str->track_id,
+ GST_TIME_ARGS (seg_time), index);
+
+ /* find previous keyframe */
+ k_index = gst_qtdemux_find_keyframe (qtdemux, str, index, FALSE);
+ }
+
+ /* Remember until where we want to go */
+ str->to_sample = str->from_sample - 1;
+ /* Define our time position */
+ target_ts =
+ str->samples[k_index].timestamp + str->samples[k_index].pts_offset;
+ str->time_position = QTSTREAMTIME_TO_GSTTIME (str, target_ts) + seg->time;
+ if (seg->media_start != GST_CLOCK_TIME_NONE)
+ str->time_position -= seg->media_start;
+
+ /* Now seek back in time */
+ gst_qtdemux_move_stream (qtdemux, str, k_index);
+ GST_DEBUG_OBJECT (qtdemux, "track-id %u keyframe at %u, time position %"
+ GST_TIME_FORMAT " playing from sample %u to %u", str->track_id, k_index,
+ GST_TIME_ARGS (str->time_position), str->from_sample, str->to_sample);
+ }
+
+ return GST_FLOW_OK;
+
+eos:
+ return GST_FLOW_EOS;
+}
+
+/*
+ * Gets the current qt segment start, stop and position for the
+ * given time offset. This is used in update_segment()
+ */
+static void
+gst_qtdemux_stream_segment_get_boundaries (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, GstClockTime offset,
+ GstClockTime * _start, GstClockTime * _stop, GstClockTime * _time)
+{
+ GstClockTime seg_time;
+ GstClockTime start, stop, time;
+ QtDemuxSegment *segment;
+
+ segment = &stream->segments[stream->segment_index];
+
+ /* get time in this segment */
+ seg_time = (offset - segment->time) * segment->rate;
+
+ GST_LOG_OBJECT (stream->pad, "seg_time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (seg_time));
+
+ if (G_UNLIKELY (seg_time > segment->duration)) {
+ GST_LOG_OBJECT (stream->pad,
+ "seg_time > segment->duration %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (segment->duration));
+ seg_time = segment->duration;
+ }
+
+ /* qtdemux->segment.stop is in outside-time-realm, whereas
+ * segment->media_stop is in track-time-realm.
+ *
+ * In order to compare the two, we need to bring segment.stop
+ * into the track-time-realm
+ *
+ * FIXME - does this comment still hold? Don't see any conversion here */
+
+ stop = qtdemux->segment.stop;
+ if (stop == GST_CLOCK_TIME_NONE)
+ stop = qtdemux->segment.duration;
+ if (stop == GST_CLOCK_TIME_NONE)
+ stop = segment->media_stop;
+ else
+ stop =
+ MIN (segment->media_stop, stop - segment->time + segment->media_start);
+
+ if (G_UNLIKELY (QTSEGMENT_IS_EMPTY (segment))) {
+ start = segment->time + seg_time;
+ time = offset;
+ stop = start - seg_time + segment->duration;
+ } else if (qtdemux->segment.rate >= 0) {
+ start = MIN (segment->media_start + seg_time, stop);
+ time = offset;
+ } else {
+ if (segment->media_start >= qtdemux->segment.start) {
+ time = segment->time;
+ } else {
+ time = segment->time + (qtdemux->segment.start - segment->media_start);
+ }
+
+ start = MAX (segment->media_start, qtdemux->segment.start);
+ stop = MIN (segment->media_start + seg_time, stop);
+ }
+
+ *_start = start;
+ *_stop = stop;
+ *_time = time;
+}
+
+/*
+ * Updates the qt segment used for the stream and pushes a new segment event
+ * downstream on this stream's pad.
+ */
+static gboolean
+gst_qtdemux_stream_update_segment (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ gint seg_idx, GstClockTime offset, GstClockTime * _start,
+ GstClockTime * _stop)
+{
+ QtDemuxSegment *segment;
+ GstClockTime start = 0, stop = GST_CLOCK_TIME_NONE, time = 0;
+ gdouble rate;
+ GstEvent *event;
+
+ /* update the current segment */
+ stream->segment_index = seg_idx;
+
+ /* get the segment */
+ segment = &stream->segments[seg_idx];
+
+ if (G_UNLIKELY (offset < segment->time)) {
+ GST_WARNING_OBJECT (stream->pad, "offset < segment->time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (segment->time));
+ return FALSE;
+ }
+
+ /* segment lies beyond total indicated duration */
+ if (G_UNLIKELY (qtdemux->segment.duration != GST_CLOCK_TIME_NONE &&
+ segment->time > qtdemux->segment.duration)) {
+ GST_WARNING_OBJECT (stream->pad, "file duration %" GST_TIME_FORMAT
+ " < segment->time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (qtdemux->segment.duration),
+ GST_TIME_ARGS (segment->time));
+ return FALSE;
+ }
+
+ gst_qtdemux_stream_segment_get_boundaries (qtdemux, stream, offset,
+ &start, &stop, &time);
+
+ GST_DEBUG_OBJECT (stream->pad, "new segment %d from %" GST_TIME_FORMAT
+ " to %" GST_TIME_FORMAT ", time %" GST_TIME_FORMAT, seg_idx,
+ GST_TIME_ARGS (start), GST_TIME_ARGS (stop), GST_TIME_ARGS (time));
+
+ /* combine global rate with that of the segment */
+ rate = segment->rate * qtdemux->segment.rate;
+
+ /* Copy flags from main segment */
+ stream->segment.flags = qtdemux->segment.flags;
+
+ /* update the segment values used for clipping */
+ stream->segment.offset = qtdemux->segment.offset;
+ stream->segment.base = qtdemux->segment.base + stream->accumulated_base;
+ stream->segment.applied_rate = qtdemux->segment.applied_rate;
+ stream->segment.rate = rate;
+ stream->segment.start = start + QTSTREAMTIME_TO_GSTTIME (stream,
+ stream->cslg_shift);
+ stream->segment.stop = stop + QTSTREAMTIME_TO_GSTTIME (stream,
+ stream->cslg_shift);
+ stream->segment.time = time;
+ stream->segment.position = stream->segment.start;
+
+ GST_DEBUG_OBJECT (stream->pad, "New segment: %" GST_SEGMENT_FORMAT,
+ &stream->segment);
+
+ /* now prepare and send the segment */
+ if (stream->pad) {
+ event = gst_event_new_segment (&stream->segment);
+ if (qtdemux->segment_seqnum != GST_SEQNUM_INVALID) {
+ gst_event_set_seqnum (event, qtdemux->segment_seqnum);
+ }
+ gst_pad_push_event (stream->pad, event);
+ /* assume we can send more data now */
+ GST_PAD_LAST_FLOW_RETURN (stream->pad) = GST_FLOW_OK;
+ /* clear to send tags on this pad now */
+ gst_qtdemux_push_tags (qtdemux, stream);
+ }
+
+ if (_start)
+ *_start = start;
+ if (_stop)
+ *_stop = stop;
+
+ return TRUE;
+}
+
+/* activate the given segment number @seg_idx of @stream at time @offset.
+ * @offset is an absolute global position over all the segments.
+ *
+ * This will push out a NEWSEGMENT event with the right values and
+ * position the stream index to the first decodable sample before
+ * @offset.
+ */
+static gboolean
+gst_qtdemux_activate_segment (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ guint32 seg_idx, GstClockTime offset)
+{
+ QtDemuxSegment *segment;
+ guint32 index, kf_index;
+ GstClockTime start = 0, stop = GST_CLOCK_TIME_NONE;
+
+ GST_LOG_OBJECT (stream->pad, "activate segment %d, offset %" GST_TIME_FORMAT,
+ seg_idx, GST_TIME_ARGS (offset));
+
+ if (!gst_qtdemux_stream_update_segment (qtdemux, stream, seg_idx, offset,
+ &start, &stop))
+ return FALSE;
+
+ segment = &stream->segments[stream->segment_index];
+
+ /* in the fragmented case, we pick a fragment that starts before our
+ * desired position and rely on downstream to wait for a keyframe
+ * (FIXME: doesn't seem to work so well with ismv and wmv, as no parser; the
+ * tfra entries tells us which trun/sample the key unit is in, but we don't
+ * make use of this additional information at the moment) */
+ if (qtdemux->fragmented && !qtdemux->fragmented_seek_pending) {
+ stream->to_sample = G_MAXUINT32;
+ return TRUE;
+ } else {
+ /* well, it will be taken care of below */
+ qtdemux->fragmented_seek_pending = FALSE;
+ /* FIXME ideally the do_fragmented_seek can be done right here,
+ * rather than at loop level
+ * (which might even allow handling edit lists in a fragmented file) */
+ }
+
+ /* We don't need to look for a sample in push-based */
+ if (!qtdemux->pullbased)
+ return TRUE;
+
+ /* and move to the keyframe before the indicated media time of the
+ * segment */
+ if (G_LIKELY (!QTSEGMENT_IS_EMPTY (segment))) {
+ if (qtdemux->segment.rate >= 0) {
+ index = gst_qtdemux_find_index_linear (qtdemux, stream, start);
+ stream->to_sample = G_MAXUINT32;
+ GST_DEBUG_OBJECT (stream->pad,
+ "moving data pointer to %" GST_TIME_FORMAT ", index: %u, pts %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (start), index,
+ GST_TIME_ARGS (QTSAMPLE_PTS (stream, &stream->samples[index])));
+ } else {
+ index = gst_qtdemux_find_index_linear (qtdemux, stream, stop);
+ stream->to_sample = index;
+ GST_DEBUG_OBJECT (stream->pad,
+ "moving data pointer to %" GST_TIME_FORMAT ", index: %u, pts %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (stop), index,
+ GST_TIME_ARGS (QTSAMPLE_PTS (stream, &stream->samples[index])));
+ }
+ } else {
+ GST_DEBUG_OBJECT (stream->pad, "No need to look for keyframe, "
+ "this is an empty segment");
+ return TRUE;
+ }
+
+ /* gst_qtdemux_parse_sample () called from gst_qtdemux_find_index_linear ()
+ * encountered an error and printed a message so we return appropriately */
+ if (index == -1)
+ return FALSE;
+
+ /* we're at the right spot */
+ if (index == stream->sample_index) {
+ GST_DEBUG_OBJECT (stream->pad, "we are at the right index");
+ return TRUE;
+ }
+
+ /* find keyframe of the target index */
+ kf_index = gst_qtdemux_find_keyframe (qtdemux, stream, index, FALSE);
+
+ /* go back two frames to provide lead-in for non-raw audio decoders */
+ if (stream->subtype == FOURCC_soun && !stream->need_clip) {
+ guint32 lead_in = 2;
+ guint32 old_index = kf_index;
+ GstStructure *s = gst_caps_get_structure (CUR_STREAM (stream)->caps, 0);
+
+ if (gst_structure_has_name (s, "audio/mpeg")) {
+ gint mpegversion;
+ if (gst_structure_get_int (s, "mpegversion", &mpegversion)
+ && mpegversion == 1) {
+ /* mp3 could need up to 30 frames of lead-in per mpegaudioparse */
+ lead_in = 30;
+ }
+ }
+
+ kf_index = MAX (kf_index, lead_in) - lead_in;
+ if (qtdemux_parse_samples (qtdemux, stream, kf_index)) {
+ GST_DEBUG_OBJECT (stream->pad,
+ "Moving backwards %u frames to ensure sufficient sound lead-in",
+ old_index - kf_index);
+ } else {
+ kf_index = old_index;
+ }
+ }
+
+ /* if we move forwards, we don't have to go back to the previous
+ * keyframe since we already sent that. We can also just jump to
+ * the keyframe right before the target index if there is one. */
+ if (index > stream->sample_index) {
+ /* moving forwards check if we move past a keyframe */
+ if (kf_index > stream->sample_index) {
+ GST_DEBUG_OBJECT (stream->pad,
+ "moving forwards to keyframe at %u "
+ "(pts %" GST_TIME_FORMAT " dts %" GST_TIME_FORMAT " )",
+ kf_index,
+ GST_TIME_ARGS (QTSAMPLE_PTS (stream, &stream->samples[kf_index])),
+ GST_TIME_ARGS (QTSAMPLE_DTS (stream, &stream->samples[kf_index])));
+ gst_qtdemux_move_stream (qtdemux, stream, kf_index);
+ } else {
+ GST_DEBUG_OBJECT (stream->pad,
+ "moving forwards, keyframe at %u "
+ "(pts %" GST_TIME_FORMAT " dts %" GST_TIME_FORMAT " ) already sent",
+ kf_index,
+ GST_TIME_ARGS (QTSAMPLE_PTS (stream, &stream->samples[kf_index])),
+ GST_TIME_ARGS (QTSAMPLE_DTS (stream, &stream->samples[kf_index])));
+ }
+ } else {
+ GST_DEBUG_OBJECT (stream->pad,
+ "moving backwards to %sframe at %u "
+ "(pts %" GST_TIME_FORMAT " dts %" GST_TIME_FORMAT " )",
+ (stream->subtype == FOURCC_soun) ? "audio " : "key", kf_index,
+ GST_TIME_ARGS (QTSAMPLE_PTS (stream, &stream->samples[kf_index])),
+ GST_TIME_ARGS (QTSAMPLE_DTS (stream, &stream->samples[kf_index])));
+ gst_qtdemux_move_stream (qtdemux, stream, kf_index);
+ }
+
+ return TRUE;
+}
+
+/* prepare to get the current sample of @stream, getting essential values.
+ *
+ * This function will also prepare and send the segment when needed.
+ *
+ * Return FALSE if the stream is EOS.
+ *
+ * PULL-BASED
+ */
+static gboolean
+gst_qtdemux_prepare_current_sample (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, gboolean * empty, guint64 * offset, guint * size,
+ GstClockTime * dts, GstClockTime * pts, GstClockTime * duration,
+ gboolean * keyframe)
+{
+ QtDemuxSample *sample;
+ GstClockTime time_position;
+ guint32 seg_idx;
+
+ g_return_val_if_fail (stream != NULL, FALSE);
+
+ time_position = stream->time_position;
+ if (G_UNLIKELY (time_position == GST_CLOCK_TIME_NONE))
+ goto eos;
+
+ seg_idx = stream->segment_index;
+ if (G_UNLIKELY (seg_idx == -1)) {
+ /* find segment corresponding to time_position if we are looking
+ * for a segment. */
+ seg_idx = gst_qtdemux_find_segment (qtdemux, stream, time_position);
+ }
+
+ /* different segment, activate it, sample_index will be set. */
+ if (G_UNLIKELY (stream->segment_index != seg_idx))
+ gst_qtdemux_activate_segment (qtdemux, stream, seg_idx, time_position);
+
+ if (G_UNLIKELY (QTSEGMENT_IS_EMPTY (&stream->
+ segments[stream->segment_index]))) {
+ QtDemuxSegment *seg = &stream->segments[stream->segment_index];
+
+ GST_LOG_OBJECT (qtdemux, "Empty segment activated,"
+ " prepare empty sample");
+
+ *empty = TRUE;
+ *pts = *dts = time_position;
+ *duration = seg->duration - (time_position - seg->time);
+
+ return TRUE;
+ }
+
+ *empty = FALSE;
+
+ if (stream->sample_index == -1)
+ stream->sample_index = 0;
+
+ GST_LOG_OBJECT (qtdemux, "segment active, index = %u of %u",
+ stream->sample_index, stream->n_samples);
+
+ if (G_UNLIKELY (stream->sample_index >= stream->n_samples)) {
+ if (!qtdemux->fragmented)
+ goto eos;
+
+ GST_INFO_OBJECT (qtdemux, "out of samples, trying to add more");
+ do {
+ GstFlowReturn flow;
+
+ GST_OBJECT_LOCK (qtdemux);
+ flow = qtdemux_add_fragmented_samples (qtdemux);
+ GST_OBJECT_UNLOCK (qtdemux);
+
+ if (flow != GST_FLOW_OK)
+ goto eos;
+ }
+ while (stream->sample_index >= stream->n_samples);
+ }
+
+ if (!qtdemux_parse_samples (qtdemux, stream, stream->sample_index)) {
+ GST_LOG_OBJECT (qtdemux, "Parsing of index %u failed!",
+ stream->sample_index);
+ return FALSE;
+ }
+
+ /* now get the info for the sample we're at */
+ sample = &stream->samples[stream->sample_index];
+
+ *dts = QTSAMPLE_DTS (stream, sample);
+ *pts = QTSAMPLE_PTS (stream, sample);
+ *offset = sample->offset;
+ *size = sample->size;
+ *duration = QTSAMPLE_DUR_DTS (stream, sample, *dts);
+ *keyframe = QTSAMPLE_KEYFRAME (stream, sample);
+
+ return TRUE;
+
+ /* special cases */
+eos:
+ {
+ stream->time_position = GST_CLOCK_TIME_NONE;
+ return FALSE;
+ }
+}
+
+/* move to the next sample in @stream.
+ *
+ * Moves to the next segment when needed.
+ */
+static void
+gst_qtdemux_advance_sample (GstQTDemux * qtdemux, QtDemuxStream * stream)
+{
+ QtDemuxSample *sample;
+ QtDemuxSegment *segment;
+
+ /* get current segment */
+ segment = &stream->segments[stream->segment_index];
+
+ if (G_UNLIKELY (QTSEGMENT_IS_EMPTY (segment))) {
+ GST_DEBUG_OBJECT (qtdemux, "Empty segment, no samples to advance");
+ goto next_segment;
+ }
+
+ if (G_UNLIKELY (stream->sample_index >= stream->to_sample)) {
+ /* Mark the stream as EOS */
+ GST_DEBUG_OBJECT (qtdemux,
+ "reached max allowed sample %u, mark EOS", stream->to_sample);
+ stream->time_position = GST_CLOCK_TIME_NONE;
+ return;
+ }
+
+ /* move to next sample */
+ stream->sample_index++;
+ stream->offset_in_sample = 0;
+
+ GST_TRACE_OBJECT (qtdemux, "advance to sample %u/%u", stream->sample_index,
+ stream->n_samples);
+
+ /* reached the last sample, we need the next segment */
+ if (G_UNLIKELY (stream->sample_index >= stream->n_samples))
+ goto next_segment;
+
+ if (!qtdemux_parse_samples (qtdemux, stream, stream->sample_index)) {
+ GST_LOG_OBJECT (qtdemux, "Parsing of index %u failed!",
+ stream->sample_index);
+ return;
+ }
+
+ /* get next sample */
+ sample = &stream->samples[stream->sample_index];
+
+ GST_TRACE_OBJECT (qtdemux, "sample dts %" GST_TIME_FORMAT " media_stop: %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (QTSAMPLE_DTS (stream, sample)),
+ GST_TIME_ARGS (segment->media_stop));
+
+ /* see if we are past the segment */
+ if (G_UNLIKELY (QTSAMPLE_DTS (stream, sample) >= segment->media_stop))
+ goto next_segment;
+
+ if (QTSAMPLE_DTS (stream, sample) >= segment->media_start) {
+ /* inside the segment, update time_position, looks very familiar to
+ * GStreamer segments, doesn't it? */
+ stream->time_position =
+ QTSAMPLE_DTS (stream, sample) - segment->media_start + segment->time;
+ } else {
+ /* not yet in segment, time does not yet increment. This means
+ * that we are still prerolling keyframes to the decoder so it can
+ * decode the first sample of the segment. */
+ stream->time_position = segment->time;
+ }
+ return;
+
+ /* move to the next segment */
+next_segment:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "segment %d ended ", stream->segment_index);
+
+ if (stream->segment_index == stream->n_segments - 1) {
+ /* are we at the end of the last segment, we're EOS */
+ stream->time_position = GST_CLOCK_TIME_NONE;
+ } else {
+ /* else we're only at the end of the current segment */
+ stream->time_position = segment->stop_time;
+ }
+ /* make sure we select a new segment */
+
+ /* accumulate previous segments */
+ if (GST_CLOCK_TIME_IS_VALID (stream->segment.stop))
+ stream->accumulated_base +=
+ (stream->segment.stop -
+ stream->segment.start) / ABS (stream->segment.rate);
+
+ stream->segment_index = -1;
+ }
+}
+
+static void
+gst_qtdemux_sync_streams (GstQTDemux * demux)
+{
+ gint i;
+
+ if (QTDEMUX_N_STREAMS (demux) <= 1)
+ return;
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ QtDemuxStream *stream;
+ GstClockTime end_time;
+
+ stream = QTDEMUX_NTH_STREAM (demux, i);
+
+ if (!stream->pad)
+ continue;
+
+ /* TODO advance time on subtitle streams here, if any some day */
+
+ /* some clips/trailers may have unbalanced streams at the end,
+ * so send EOS on shorter stream to prevent stalling others */
+
+ /* do not mess with EOS if SEGMENT seeking */
+ if (demux->segment.flags & GST_SEEK_FLAG_SEGMENT)
+ continue;
+
+ if (demux->pullbased) {
+ /* loop mode is sample time based */
+ if (!STREAM_IS_EOS (stream))
+ continue;
+ } else {
+ /* push mode is byte position based */
+ if (stream->n_samples &&
+ stream->samples[stream->n_samples - 1].offset >= demux->offset)
+ continue;
+ }
+
+ if (stream->sent_eos)
+ continue;
+
+ /* only act if some gap */
+ end_time = stream->segments[stream->n_segments - 1].stop_time;
+ GST_LOG_OBJECT (demux, "current position: %" GST_TIME_FORMAT
+ ", stream end: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->segment.position), GST_TIME_ARGS (end_time));
+ if (GST_CLOCK_TIME_IS_VALID (end_time)
+ && (end_time + 2 * GST_SECOND < demux->segment.position)) {
+ GstEvent *event;
+
+ GST_DEBUG_OBJECT (demux, "sending EOS for stream %s",
+ GST_PAD_NAME (stream->pad));
+ stream->sent_eos = TRUE;
+ event = gst_event_new_eos ();
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ gst_pad_push_event (stream->pad, event);
+ }
+ }
+}
+
+/* EOS and NOT_LINKED need to be combined. This means that we return:
+ *
+ * GST_FLOW_NOT_LINKED: when all pads NOT_LINKED.
+ * GST_FLOW_EOS: when all pads EOS or NOT_LINKED.
+ */
+static GstFlowReturn
+gst_qtdemux_combine_flows (GstQTDemux * demux, QtDemuxStream * stream,
+ GstFlowReturn ret)
+{
+ GST_LOG_OBJECT (demux, "flow return: %s", gst_flow_get_name (ret));
+
+ if (stream->pad)
+ ret = gst_flow_combiner_update_pad_flow (demux->flowcombiner, stream->pad,
+ ret);
+ else
+ ret = gst_flow_combiner_update_flow (demux->flowcombiner, ret);
+
+ GST_LOG_OBJECT (demux, "combined flow return: %s", gst_flow_get_name (ret));
+ return ret;
+}
+
+/* the input buffer metadata must be writable. Returns NULL when the buffer is
+ * completely clipped
+ *
+ * Should be used only with raw buffers */
+static GstBuffer *
+gst_qtdemux_clip_buffer (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstBuffer * buf)
+{
+ guint64 start, stop, cstart, cstop, diff;
+ GstClockTime pts, duration;
+ gsize size, osize;
+ gint num_rate, denom_rate;
+ gint frame_size;
+ gboolean clip_data;
+ guint offset;
+
+ osize = size = gst_buffer_get_size (buf);
+ offset = 0;
+
+ /* depending on the type, setup the clip parameters */
+ if (stream->subtype == FOURCC_soun) {
+ frame_size = CUR_STREAM (stream)->bytes_per_frame;
+ num_rate = GST_SECOND;
+ denom_rate = (gint) CUR_STREAM (stream)->rate;
+ clip_data = TRUE;
+ } else if (stream->subtype == FOURCC_vide) {
+ frame_size = size;
+ num_rate = CUR_STREAM (stream)->fps_n;
+ denom_rate = CUR_STREAM (stream)->fps_d;
+ clip_data = FALSE;
+ } else
+ goto wrong_type;
+
+ if (frame_size <= 0)
+ goto bad_frame_size;
+
+ /* we can only clip if we have a valid pts */
+ pts = GST_BUFFER_PTS (buf);
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (pts)))
+ goto no_pts;
+
+ duration = GST_BUFFER_DURATION (buf);
+
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (duration))) {
+ duration =
+ gst_util_uint64_scale_int (size / frame_size, num_rate, denom_rate);
+ }
+
+ start = pts;
+ stop = start + duration;
+
+ if (G_UNLIKELY (!gst_segment_clip (&stream->segment,
+ GST_FORMAT_TIME, start, stop, &cstart, &cstop)))
+ goto clipped;
+
+ /* see if some clipping happened */
+ diff = cstart - start;
+ if (diff > 0) {
+ pts += diff;
+ duration -= diff;
+
+ if (clip_data) {
+ /* bring clipped time to samples and to bytes */
+ diff = gst_util_uint64_scale_int (diff, denom_rate, num_rate);
+ diff *= frame_size;
+
+ GST_DEBUG_OBJECT (qtdemux,
+ "clipping start to %" GST_TIME_FORMAT " %"
+ G_GUINT64_FORMAT " bytes", GST_TIME_ARGS (cstart), diff);
+
+ offset = diff;
+ size -= diff;
+ }
+ }
+ diff = stop - cstop;
+ if (diff > 0) {
+ duration -= diff;
+
+ if (clip_data) {
+ /* bring clipped time to samples and then to bytes */
+ diff = gst_util_uint64_scale_int (diff, denom_rate, num_rate);
+ diff *= frame_size;
+ GST_DEBUG_OBJECT (qtdemux,
+ "clipping stop to %" GST_TIME_FORMAT " %" G_GUINT64_FORMAT
+ " bytes", GST_TIME_ARGS (cstop), diff);
+ size -= diff;
+ }
+ }
+
+ if (offset != 0 || size != osize)
+ gst_buffer_resize (buf, offset, size);
+
+ GST_BUFFER_DTS (buf) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_PTS (buf) = pts;
+ GST_BUFFER_DURATION (buf) = duration;
+
+ return buf;
+
+ /* dropped buffer */
+wrong_type:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "unknown stream type");
+ return buf;
+ }
+bad_frame_size:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "bad frame size");
+ return buf;
+ }
+no_pts:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "no pts on buffer");
+ return buf;
+ }
+clipped:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "clipped buffer");
+ gst_buffer_unref (buf);
+ return NULL;
+ }
+}
+
+static GstBuffer *
+gst_qtdemux_align_buffer (GstQTDemux * demux,
+ GstBuffer * buffer, gsize alignment)
+{
+ GstMapInfo map;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ if (map.size < sizeof (guintptr)) {
+ gst_buffer_unmap (buffer, &map);
+ return buffer;
+ }
+
+ if (((guintptr) map.data) & (alignment - 1)) {
+ GstBuffer *new_buffer;
+ GstAllocationParams params = { 0, alignment - 1, 0, 0, };
+
+ new_buffer = gst_buffer_new_allocate (NULL,
+ gst_buffer_get_size (buffer), &params);
+
+ /* Copy data "by hand", so ensure alignment is kept: */
+ gst_buffer_fill (new_buffer, 0, map.data, map.size);
+
+ gst_buffer_copy_into (new_buffer, buffer, GST_BUFFER_COPY_METADATA, 0, -1);
+ GST_DEBUG_OBJECT (demux,
+ "We want output aligned on %" G_GSIZE_FORMAT ", reallocated",
+ alignment);
+
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+
+ return new_buffer;
+ }
+
+ gst_buffer_unmap (buffer, &map);
+ return buffer;
+}
+
+static guint8 *
+convert_to_s334_1a (const guint8 * ccpair, guint8 ccpair_size, guint field,
+ gsize * res)
+{
+ guint8 *storage;
+ gsize i;
+
+ /* We are converting from pairs to triplets */
+ *res = ccpair_size / 2 * 3;
+ storage = g_malloc (*res);
+ for (i = 0; i * 2 < ccpair_size; i += 1) {
+ /* FIXME: Use line offset 0 as we simply can't know here */
+ if (field == 1)
+ storage[i * 3] = 0x80 | 0x00;
+ else
+ storage[i * 3] = 0x00 | 0x00;
+ storage[i * 3 + 1] = ccpair[i * 2];
+ storage[i * 3 + 2] = ccpair[i * 2 + 1];
+ }
+
+ return storage;
+}
+
+static guint8 *
+extract_cc_from_data (QtDemuxStream * stream, const guint8 * data, gsize size,
+ gsize * cclen)
+{
+ guint8 *res = NULL;
+ guint32 atom_length, fourcc;
+ QtDemuxStreamStsdEntry *stsd_entry;
+
+ GST_MEMDUMP ("caption atom", data, size);
+
+ /* There might be multiple atoms */
+
+ *cclen = 0;
+ if (size < 8)
+ goto invalid_cdat;
+ atom_length = QT_UINT32 (data);
+ fourcc = QT_FOURCC (data + 4);
+ if (G_UNLIKELY (atom_length > size || atom_length == 8))
+ goto invalid_cdat;
+
+ GST_DEBUG_OBJECT (stream->pad, "here");
+
+ /* Check if we have something compatible */
+ stsd_entry = CUR_STREAM (stream);
+ switch (stsd_entry->fourcc) {
+ case FOURCC_c608:{
+ guint8 *cdat = NULL, *cdt2 = NULL;
+ gsize cdat_size = 0, cdt2_size = 0;
+ /* Should be cdat or cdt2 */
+ if (fourcc != FOURCC_cdat && fourcc != FOURCC_cdt2) {
+ GST_WARNING_OBJECT (stream->pad,
+ "Unknown data atom (%" GST_FOURCC_FORMAT ") for CEA608",
+ GST_FOURCC_ARGS (fourcc));
+ goto invalid_cdat;
+ }
+
+ /* Convert to S334-1 Annex A byte triplet */
+ if (fourcc == FOURCC_cdat)
+ cdat = convert_to_s334_1a (data + 8, atom_length - 8, 1, &cdat_size);
+ else
+ cdt2 = convert_to_s334_1a (data + 8, atom_length - 8, 2, &cdt2_size);
+ GST_DEBUG_OBJECT (stream->pad, "size:%" G_GSIZE_FORMAT " atom_length:%u",
+ size, atom_length);
+
+ /* Check for another atom ? */
+ if (size > atom_length + 8) {
+ guint32 new_atom_length = QT_UINT32 (data + atom_length);
+ if (size >= atom_length + new_atom_length) {
+ fourcc = QT_FOURCC (data + atom_length + 4);
+ if (fourcc == FOURCC_cdat) {
+ if (cdat == NULL)
+ cdat =
+ convert_to_s334_1a (data + atom_length + 8,
+ new_atom_length - 8, 1, &cdat_size);
+ else
+ GST_WARNING_OBJECT (stream->pad,
+ "Got multiple [cdat] atoms in a c608 sample. This is unsupported for now. Please file a bug");
+ } else {
+ if (cdt2 == NULL)
+ cdt2 =
+ convert_to_s334_1a (data + atom_length + 8,
+ new_atom_length - 8, 2, &cdt2_size);
+ else
+ GST_WARNING_OBJECT (stream->pad,
+ "Got multiple [cdt2] atoms in a c608 sample. This is unsupported for now. Please file a bug");
+ }
+ }
+ }
+
+ *cclen = cdat_size + cdt2_size;
+ res = g_malloc (*cclen);
+ if (cdat_size)
+ memcpy (res, cdat, cdat_size);
+ if (cdt2_size)
+ memcpy (res + cdat_size, cdt2, cdt2_size);
+ g_free (cdat);
+ g_free (cdt2);
+ }
+ break;
+ case FOURCC_c708:
+ if (fourcc != FOURCC_ccdp) {
+ GST_WARNING_OBJECT (stream->pad,
+ "Unknown data atom (%" GST_FOURCC_FORMAT ") for CEA708",
+ GST_FOURCC_ARGS (fourcc));
+ goto invalid_cdat;
+ }
+ *cclen = atom_length - 8;
+ res = g_memdup2 (data + 8, *cclen);
+ break;
+ default:
+ /* Keep this here in case other closed caption formats are added */
+ g_assert_not_reached ();
+ break;
+ }
+
+ GST_MEMDUMP ("Output", res, *cclen);
+ return res;
+
+ /* Errors */
+invalid_cdat:
+ GST_WARNING ("[cdat] atom is too small or invalid");
+ return NULL;
+}
+
+/* the input buffer metadata must be writable,
+ * but time/duration etc not yet set and need not be preserved */
+static GstBuffer *
+gst_qtdemux_process_buffer (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstBuffer * buf)
+{
+ GstMapInfo map;
+ guint nsize = 0;
+ gchar *str;
+
+ /* not many cases for now */
+ if (G_UNLIKELY (CUR_STREAM (stream)->fourcc == FOURCC_mp4s)) {
+ /* send a one time dvd clut event */
+ if (stream->pending_event && stream->pad)
+ gst_pad_push_event (stream->pad, stream->pending_event);
+ stream->pending_event = NULL;
+ }
+
+ if (G_UNLIKELY (stream->subtype != FOURCC_text
+ && stream->subtype != FOURCC_sbtl &&
+ stream->subtype != FOURCC_subp && stream->subtype != FOURCC_clcp)) {
+ return buf;
+ }
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ /* empty buffer is sent to terminate previous subtitle */
+ if (map.size <= 2) {
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ return NULL;
+ }
+ if (stream->subtype == FOURCC_subp) {
+ /* That's all the processing needed for subpictures */
+ gst_buffer_unmap (buf, &map);
+ return buf;
+ }
+
+ if (stream->subtype == FOURCC_clcp) {
+ guint8 *cc;
+ gsize cclen = 0;
+ /* For closed caption, we need to extract the information from the
+ * [cdat],[cdt2] or [ccdp] atom */
+ cc = extract_cc_from_data (stream, map.data, map.size, &cclen);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ if (cc) {
+ buf = _gst_buffer_new_wrapped (cc, cclen, g_free);
+ } else {
+ /* Conversion failed or there's nothing */
+ buf = NULL;
+ }
+ return buf;
+ }
+
+ nsize = GST_READ_UINT16_BE (map.data);
+ nsize = MIN (nsize, map.size - 2);
+
+ GST_LOG_OBJECT (qtdemux, "3GPP timed text subtitle: %d/%" G_GSIZE_FORMAT "",
+ nsize, map.size);
+
+ /* takes care of UTF-8 validation or UTF-16 recognition,
+ * no other encoding expected */
+ str = gst_tag_freeform_string_to_utf8 ((gchar *) map.data + 2, nsize, NULL);
+ gst_buffer_unmap (buf, &map);
+ if (str) {
+ gst_buffer_unref (buf);
+ buf = _gst_buffer_new_wrapped (str, strlen (str), g_free);
+ } else {
+ /* this should not really happen unless the subtitle is corrupted */
+ gst_buffer_unref (buf);
+ buf = NULL;
+ }
+
+ /* FIXME ? convert optional subsequent style info to markup */
+
+ return buf;
+}
+
+static GstFlowReturn
+gst_qtdemux_push_buffer (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstBuffer * buf)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstClockTime pts, duration;
+
+ if (stream->need_clip)
+ buf = gst_qtdemux_clip_buffer (qtdemux, stream, buf);
+
+ if (G_UNLIKELY (buf == NULL))
+ goto exit;
+
+ if (G_UNLIKELY (stream->discont)) {
+ GST_LOG_OBJECT (qtdemux, "marking discont buffer");
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+ stream->discont = FALSE;
+ } else {
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
+ }
+
+ GST_LOG_OBJECT (qtdemux,
+ "Pushing buffer with dts %" GST_TIME_FORMAT ", pts %" GST_TIME_FORMAT
+ ", duration %" GST_TIME_FORMAT " on pad %s",
+ GST_TIME_ARGS (GST_BUFFER_DTS (buf)),
+ GST_TIME_ARGS (GST_BUFFER_PTS (buf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_PAD_NAME (stream->pad));
+
+ if (stream->protected && stream->protection_scheme_type == FOURCC_aavd) {
+ GstStructure *crypto_info;
+ QtDemuxAavdEncryptionInfo *info =
+ (QtDemuxAavdEncryptionInfo *) stream->protection_scheme_info;
+
+ crypto_info = gst_structure_copy (info->default_properties);
+ if (!crypto_info || !gst_buffer_add_protection_meta (buf, crypto_info))
+ GST_ERROR_OBJECT (qtdemux, "failed to attach aavd metadata to buffer");
+ }
+
+ if (stream->protected && (stream->protection_scheme_type == FOURCC_cenc
+ || stream->protection_scheme_type == FOURCC_cbcs)) {
+ GstStructure *crypto_info;
+ QtDemuxCencSampleSetInfo *info =
+ (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
+ gint index;
+ GstEvent *event;
+
+ while ((event = g_queue_pop_head (&stream->protection_scheme_event_queue))) {
+ GST_TRACE_OBJECT (stream->pad, "pushing protection event: %"
+ GST_PTR_FORMAT, event);
+ gst_pad_push_event (stream->pad, event);
+ }
+
+ if (info->crypto_info == NULL) {
+ if (stream->protection_scheme_type == FOURCC_cbcs) {
+ crypto_info = qtdemux_get_cenc_sample_properties (qtdemux, stream, 0);
+ if (!crypto_info || !gst_buffer_add_protection_meta (buf, crypto_info)) {
+ GST_ERROR_OBJECT (qtdemux,
+ "failed to attach cbcs metadata to buffer");
+ qtdemux_gst_structure_free (crypto_info);
+ } else {
+ GST_TRACE_OBJECT (qtdemux, "added cbcs protection metadata");
+ }
+ } else {
+ GST_DEBUG_OBJECT (qtdemux,
+ "cenc metadata hasn't been parsed yet, pushing buffer as if it wasn't encrypted");
+ }
+ } else {
+ /* The end of the crypto_info array matches our n_samples position,
+ * so count backward from there */
+ index = stream->sample_index - stream->n_samples + info->crypto_info->len;
+ if (G_LIKELY (index >= 0 && index < info->crypto_info->len)) {
+ /* steal structure from array */
+ crypto_info = g_ptr_array_index (info->crypto_info, index);
+ g_ptr_array_index (info->crypto_info, index) = NULL;
+ GST_LOG_OBJECT (qtdemux, "attaching cenc metadata [%u/%u]", index,
+ info->crypto_info->len);
+ if (!crypto_info || !gst_buffer_add_protection_meta (buf, crypto_info))
+ GST_ERROR_OBJECT (qtdemux,
+ "failed to attach cenc metadata to buffer");
+ } else {
+ GST_INFO_OBJECT (qtdemux, "No crypto info with index %d and sample %d",
+ index, stream->sample_index);
+ }
+ }
+ }
+
+ if (stream->alignment > 1)
+ buf = gst_qtdemux_align_buffer (qtdemux, buf, stream->alignment);
+
+ pts = GST_BUFFER_PTS (buf);
+ duration = GST_BUFFER_DURATION (buf);
+
+ ret = gst_pad_push (stream->pad, buf);
+
+ if (GST_CLOCK_TIME_IS_VALID (pts) && GST_CLOCK_TIME_IS_VALID (duration)) {
+ /* mark position in stream, we'll need this to know when to send GAP event */
+ stream->segment.position = pts + duration;
+ }
+
+exit:
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_qtdemux_split_and_push_buffer (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstBuffer * buf)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ if (stream->subtype == FOURCC_clcp
+ && CUR_STREAM (stream)->fourcc == FOURCC_c608 && stream->need_split) {
+ GstMapInfo map;
+ guint n_output_buffers, n_field1 = 0, n_field2 = 0;
+ guint n_triplets, i;
+ guint field1_off = 0, field2_off = 0;
+
+ /* We have to split CEA608 buffers so that each outgoing buffer contains
+ * one byte pair per field according to the framerate of the video track.
+ *
+ * If there is only a single byte pair per field we don't have to do
+ * anything
+ */
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ n_triplets = map.size / 3;
+ for (i = 0; i < n_triplets; i++) {
+ if (map.data[3 * i] & 0x80)
+ n_field1++;
+ else
+ n_field2++;
+ }
+
+ g_assert (n_field1 || n_field2);
+
+ /* If there's more than 1 frame we have to split, otherwise we can just
+ * pass through */
+ if (n_field1 > 1 || n_field2 > 1) {
+ n_output_buffers =
+ gst_util_uint64_scale (GST_BUFFER_DURATION (buf),
+ CUR_STREAM (stream)->fps_n, GST_SECOND * CUR_STREAM (stream)->fps_d);
+
+ for (i = 0; i < n_output_buffers; i++) {
+ GstBuffer *outbuf =
+ gst_buffer_new_and_alloc ((n_field1 ? 3 : 0) + (n_field2 ? 3 : 0));
+ GstMapInfo outmap;
+ guint8 *outptr;
+
+ gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);
+ outptr = outmap.data;
+
+ if (n_field1) {
+ gboolean found = FALSE;
+
+ while (map.data + field1_off < map.data + map.size) {
+ if (map.data[field1_off] & 0x80) {
+ memcpy (outptr, &map.data[field1_off], 3);
+ field1_off += 3;
+ found = TRUE;
+ break;
+ }
+ field1_off += 3;
+ }
+
+ if (!found) {
+ const guint8 empty[] = { 0x80, 0x80, 0x80 };
+
+ memcpy (outptr, empty, 3);
+ }
+
+ outptr += 3;
+ }
+
+ if (n_field2) {
+ gboolean found = FALSE;
+
+ while (map.data + field2_off < map.data + map.size) {
+ if ((map.data[field2_off] & 0x80) == 0) {
+ memcpy (outptr, &map.data[field2_off], 3);
+ field2_off += 3;
+ found = TRUE;
+ break;
+ }
+ field2_off += 3;
+ }
+
+ if (!found) {
+ const guint8 empty[] = { 0x00, 0x80, 0x80 };
+
+ memcpy (outptr, empty, 3);
+ }
+
+ outptr += 3;
+ }
+
+ gst_buffer_unmap (outbuf, &outmap);
+
+ GST_BUFFER_PTS (outbuf) =
+ GST_BUFFER_PTS (buf) + gst_util_uint64_scale (i,
+ GST_SECOND * CUR_STREAM (stream)->fps_d,
+ CUR_STREAM (stream)->fps_n);
+ GST_BUFFER_DURATION (outbuf) =
+ gst_util_uint64_scale (GST_SECOND, CUR_STREAM (stream)->fps_d,
+ CUR_STREAM (stream)->fps_n);
+ GST_BUFFER_OFFSET (outbuf) = -1;
+ GST_BUFFER_OFFSET_END (outbuf) = -1;
+
+ ret = gst_qtdemux_push_buffer (qtdemux, stream, outbuf);
+
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_NOT_LINKED)
+ break;
+ }
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ } else {
+ gst_buffer_unmap (buf, &map);
+ ret = gst_qtdemux_push_buffer (qtdemux, stream, buf);
+ }
+ } else {
+ ret = gst_qtdemux_push_buffer (qtdemux, stream, buf);
+ }
+
+ return ret;
+}
+
+/* Sets a buffer's attributes properly and pushes it downstream.
+ * Also checks for additional actions and custom processing that may
+ * need to be done first.
+ */
+static GstFlowReturn
+gst_qtdemux_decorate_and_push_buffer (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, GstBuffer * buf,
+ GstClockTime dts, GstClockTime pts, GstClockTime duration,
+ gboolean keyframe, GstClockTime position, guint64 byte_position)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ /* offset the timestamps according to the edit list */
+
+ if (G_UNLIKELY (CUR_STREAM (stream)->fourcc == FOURCC_rtsp)) {
+ gchar *url;
+ GstMapInfo map;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ url = g_strndup ((gchar *) map.data, map.size);
+ gst_buffer_unmap (buf, &map);
+ if (url != NULL && strlen (url) != 0) {
+ /* we have RTSP redirect now */
+ g_free (qtdemux->redirect_location);
+ qtdemux->redirect_location = g_strdup (url);
+ gst_element_post_message (GST_ELEMENT_CAST (qtdemux),
+ gst_message_new_element (GST_OBJECT_CAST (qtdemux),
+ gst_structure_new ("redirect",
+ "new-location", G_TYPE_STRING, url, NULL)));
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "Redirect URI of stream is empty, not "
+ "posting");
+ }
+ g_free (url);
+ }
+
+ /* position reporting */
+ if (qtdemux->segment.rate >= 0) {
+ qtdemux->segment.position = position;
+ gst_qtdemux_sync_streams (qtdemux);
+ }
+
+ if (G_UNLIKELY (!stream->pad)) {
+ GST_DEBUG_OBJECT (qtdemux, "No output pad for stream, ignoring");
+ gst_buffer_unref (buf);
+ goto exit;
+ }
+
+ /* send out pending buffers */
+ while (stream->buffers) {
+ GstBuffer *buffer = (GstBuffer *) stream->buffers->data;
+
+ if (G_UNLIKELY (stream->discont)) {
+ GST_LOG_OBJECT (qtdemux, "marking discont buffer");
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
+ stream->discont = FALSE;
+ } else {
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
+ }
+
+ if (stream->alignment > 1)
+ buffer = gst_qtdemux_align_buffer (qtdemux, buffer, stream->alignment);
+ gst_pad_push (stream->pad, buffer);
+
+ stream->buffers = g_slist_delete_link (stream->buffers, stream->buffers);
+ }
+
+ /* we're going to modify the metadata */
+ buf = gst_buffer_make_writable (buf);
+
+ if (G_UNLIKELY (stream->need_process))
+ buf = gst_qtdemux_process_buffer (qtdemux, stream, buf);
+
+ if (!buf) {
+ goto exit;
+ }
+
+ GST_BUFFER_DTS (buf) = dts;
+ GST_BUFFER_PTS (buf) = pts;
+ GST_BUFFER_DURATION (buf) = duration;
+ GST_BUFFER_OFFSET (buf) = -1;
+ GST_BUFFER_OFFSET_END (buf) = -1;
+
+ if (!keyframe) {
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+ stream->on_keyframe = FALSE;
+ } else {
+ stream->on_keyframe = TRUE;
+ }
+
+ if (G_UNLIKELY (CUR_STREAM (stream)->rgb8_palette))
+ gst_buffer_append_memory (buf,
+ gst_memory_ref (CUR_STREAM (stream)->rgb8_palette));
+
+ if (G_UNLIKELY (CUR_STREAM (stream)->padding)) {
+ gst_buffer_resize (buf, CUR_STREAM (stream)->padding, -1);
+ }
+#if 0
+ if (G_UNLIKELY (qtdemux->element_index)) {
+ GstClockTime stream_time;
+
+ stream_time =
+ gst_segment_to_stream_time (&stream->segment, GST_FORMAT_TIME,
+ timestamp);
+ if (GST_CLOCK_TIME_IS_VALID (stream_time)) {
+ GST_LOG_OBJECT (qtdemux,
+ "adding association %" GST_TIME_FORMAT "-> %"
+ G_GUINT64_FORMAT, GST_TIME_ARGS (stream_time), byte_position);
+ gst_index_add_association (qtdemux->element_index,
+ qtdemux->index_id,
+ keyframe ? GST_ASSOCIATION_FLAG_KEY_UNIT :
+ GST_ASSOCIATION_FLAG_DELTA_UNIT, GST_FORMAT_TIME, stream_time,
+ GST_FORMAT_BYTES, byte_position, NULL);
+ }
+ }
+#endif
+
+ ret = gst_qtdemux_split_and_push_buffer (qtdemux, stream, buf);
+
+exit:
+ return ret;
+}
+
+static const QtDemuxRandomAccessEntry *
+gst_qtdemux_stream_seek_fragment (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GstClockTime pos, gboolean after)
+{
+ QtDemuxRandomAccessEntry *entries = stream->ra_entries;
+ guint n_entries = stream->n_ra_entries;
+ guint i;
+
+ /* we assume the table is sorted */
+ for (i = 0; i < n_entries; ++i) {
+ if (entries[i].ts > pos)
+ break;
+ }
+
+ /* FIXME: maybe save first moof_offset somewhere instead, but for now it's
+ * probably okay to assume that the index lists the very first fragment */
+ if (i == 0)
+ return &entries[0];
+
+ if (after)
+ return &entries[i];
+ else
+ return &entries[i - 1];
+}
+
+static gboolean
+gst_qtdemux_do_fragmented_seek (GstQTDemux * qtdemux)
+{
+ const QtDemuxRandomAccessEntry *best_entry = NULL;
+ gint i;
+
+ GST_OBJECT_LOCK (qtdemux);
+
+ g_assert (QTDEMUX_N_STREAMS (qtdemux) > 0);
+
+ /* first see if we can determine where to go to using mfra,
+ * before we start clearing things */
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ const QtDemuxRandomAccessEntry *entry;
+ QtDemuxStream *stream;
+ gboolean is_audio_or_video;
+
+ stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ if (stream->ra_entries == NULL)
+ continue;
+
+ if (stream->subtype == FOURCC_vide || stream->subtype == FOURCC_soun)
+ is_audio_or_video = TRUE;
+ else
+ is_audio_or_video = FALSE;
+
+ entry =
+ gst_qtdemux_stream_seek_fragment (qtdemux, stream,
+ stream->time_position, !is_audio_or_video);
+
+ GST_INFO_OBJECT (stream->pad, "%" GST_TIME_FORMAT " at offset "
+ "%" G_GUINT64_FORMAT, GST_TIME_ARGS (entry->ts), entry->moof_offset);
+
+ stream->pending_seek = entry;
+
+ /* decide position to jump to just based on audio/video tracks, not subs */
+ if (!is_audio_or_video)
+ continue;
+
+ if (best_entry == NULL || entry->moof_offset < best_entry->moof_offset)
+ best_entry = entry;
+ }
+
+ /* no luck, will handle seek otherwise */
+ if (best_entry == NULL) {
+ GST_OBJECT_UNLOCK (qtdemux);
+ return FALSE;
+ }
+
+ /* ok, now we can prepare for processing as of located moof */
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream;
+
+ stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ g_free (stream->samples);
+ stream->samples = NULL;
+ stream->n_samples = 0;
+ stream->stbl_index = -1; /* no samples have yet been parsed */
+ stream->sample_index = -1;
+
+ if (stream->protection_scheme_info) {
+ /* Clear out any old cenc crypto info entries as we'll move to a new moof */
+ if (stream->protection_scheme_type == FOURCC_cenc
+ || stream->protection_scheme_type == FOURCC_cbcs) {
+ QtDemuxCencSampleSetInfo *info =
+ (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
+ if (info->crypto_info) {
+ g_ptr_array_free (info->crypto_info, TRUE);
+ info->crypto_info = NULL;
+ }
+ }
+ }
+ }
+
+ GST_INFO_OBJECT (qtdemux, "seek to %" GST_TIME_FORMAT ", best fragment "
+ "moof offset: %" G_GUINT64_FORMAT ", ts %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (QTDEMUX_NTH_STREAM (qtdemux, 0)->time_position),
+ best_entry->moof_offset, GST_TIME_ARGS (best_entry->ts));
+
+ qtdemux->moof_offset = best_entry->moof_offset;
+
+ qtdemux_add_fragmented_samples (qtdemux);
+
+ GST_OBJECT_UNLOCK (qtdemux);
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_qtdemux_loop_state_movie (GstQTDemux * qtdemux)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBuffer *buf = NULL;
+ QtDemuxStream *stream, *target_stream = NULL;
+ GstClockTime min_time;
+ guint64 offset = 0;
+ GstClockTime dts = GST_CLOCK_TIME_NONE;
+ GstClockTime pts = GST_CLOCK_TIME_NONE;
+ GstClockTime duration = 0;
+ gboolean keyframe = FALSE;
+ guint sample_size = 0;
+ guint num_samples = 1;
+ gboolean empty = 0;
+ guint size;
+ gint i;
+
+ if (qtdemux->fragmented_seek_pending) {
+ GST_INFO_OBJECT (qtdemux, "pending fragmented seek");
+ if (gst_qtdemux_do_fragmented_seek (qtdemux)) {
+ GST_INFO_OBJECT (qtdemux, "fragmented seek done!");
+ qtdemux->fragmented_seek_pending = FALSE;
+ } else {
+ GST_INFO_OBJECT (qtdemux, "fragmented seek still pending");
+ }
+ }
+
+ /* Figure out the next stream sample to output, min_time is expressed in
+ * global time and runs over the edit list segments. */
+ min_time = G_MAXUINT64;
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ GstClockTime position;
+
+ stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ position = stream->time_position;
+
+ if (!GST_CLOCK_TIME_IS_VALID (position))
+ continue;
+
+ if (stream->segment_index != -1) {
+ QtDemuxSegment *segment = &stream->segments[stream->segment_index];
+ position += segment->media_start;
+ }
+
+ /* position of -1 is EOS */
+ if (position < min_time) {
+ min_time = position;
+ target_stream = stream;
+ }
+ }
+ /* all are EOS */
+ if (G_UNLIKELY (target_stream == NULL)) {
+ GST_DEBUG_OBJECT (qtdemux, "all streams are EOS");
+ goto eos;
+ }
+
+ /* check for segment end */
+ if (G_UNLIKELY (qtdemux->segment.stop != -1
+ && qtdemux->segment.rate >= 0
+ && qtdemux->segment.stop <= min_time && target_stream->on_keyframe)) {
+ GST_DEBUG_OBJECT (qtdemux, "we reached the end of our segment.");
+ target_stream->time_position = GST_CLOCK_TIME_NONE;
+ goto eos_stream;
+ }
+
+ /* gap events for subtitle streams */
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ if (stream->pad) {
+ GstClockTime gap_threshold;
+
+ /* Only send gap events on non-subtitle streams if lagging way behind. */
+ if (stream->subtype == FOURCC_subp
+ || stream->subtype == FOURCC_text || stream->subtype == FOURCC_sbtl)
+ gap_threshold = 1 * GST_SECOND;
+ else
+ gap_threshold = 3 * GST_SECOND;
+
+ /* send gap events until the stream catches up */
+ /* gaps can only be sent after segment is activated (segment.stop is no longer -1) */
+ while (GST_CLOCK_TIME_IS_VALID (stream->segment.stop) &&
+ GST_CLOCK_TIME_IS_VALID (stream->segment.position) &&
+ stream->segment.position + gap_threshold < min_time) {
+ GstEvent *gap =
+ gst_event_new_gap (stream->segment.position, gap_threshold);
+ gst_pad_push_event (stream->pad, gap);
+ stream->segment.position += gap_threshold;
+ }
+ }
+ }
+
+ stream = target_stream;
+ /* fetch info for the current sample of this stream */
+ if (G_UNLIKELY (!gst_qtdemux_prepare_current_sample (qtdemux, stream, &empty,
+ &offset, &sample_size, &dts, &pts, &duration, &keyframe)))
+ goto eos_stream;
+
+ gst_qtdemux_stream_check_and_change_stsd_index (qtdemux, stream);
+ if (stream->new_caps) {
+ gst_qtdemux_configure_stream (qtdemux, stream);
+ qtdemux_do_allocation (stream, qtdemux);
+ }
+
+ /* If we're doing a keyframe-only trickmode, only push keyframes on video streams */
+ if (G_UNLIKELY (qtdemux->segment.
+ flags & GST_SEGMENT_FLAG_TRICKMODE_KEY_UNITS)) {
+ if (stream->subtype == FOURCC_vide) {
+ if (!keyframe) {
+ GST_LOG_OBJECT (qtdemux, "Skipping non-keyframe on track-id %u",
+ stream->track_id);
+ goto next;
+ } else if (qtdemux->trickmode_interval > 0) {
+ GstClockTimeDiff interval;
+
+ if (qtdemux->segment.rate > 0)
+ interval = stream->time_position - stream->last_keyframe_dts;
+ else
+ interval = stream->last_keyframe_dts - stream->time_position;
+
+ if (GST_CLOCK_TIME_IS_VALID (stream->last_keyframe_dts)
+ && interval < qtdemux->trickmode_interval) {
+ GST_LOG_OBJECT (qtdemux,
+ "Skipping keyframe within interval on track-id %u",
+ stream->track_id);
+ goto next;
+ } else {
+ stream->last_keyframe_dts = stream->time_position;
+ }
+ }
+ }
+ }
+
+ GST_DEBUG_OBJECT (qtdemux,
+ "pushing from track-id %u, empty %d offset %" G_GUINT64_FORMAT
+ ", size %d, dts=%" GST_TIME_FORMAT ", pts=%" GST_TIME_FORMAT
+ ", duration %" GST_TIME_FORMAT, stream->track_id, empty, offset,
+ sample_size, GST_TIME_ARGS (dts), GST_TIME_ARGS (pts),
+ GST_TIME_ARGS (duration));
+
+ if (G_UNLIKELY (empty)) {
+ /* empty segment, push a gap if there's a second or more
+ * difference and move to the next one */
+ if ((pts + duration - stream->segment.position) >= GST_SECOND)
+ gst_pad_push_event (stream->pad, gst_event_new_gap (pts, duration));
+ stream->segment.position = pts + duration;
+ goto next;
+ }
+
+ /* hmm, empty sample, skip and move to next sample */
+ if (G_UNLIKELY (sample_size <= 0))
+ goto next;
+
+ /* last pushed sample was out of boundary, goto next sample */
+ if (G_UNLIKELY (GST_PAD_LAST_FLOW_RETURN (stream->pad) == GST_FLOW_EOS))
+ goto next;
+
+ if (stream->max_buffer_size != 0 && sample_size > stream->max_buffer_size) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "size %d larger than stream max_buffer_size %d, trimming",
+ sample_size, stream->max_buffer_size);
+ size =
+ MIN (sample_size - stream->offset_in_sample, stream->max_buffer_size);
+ } else if (stream->min_buffer_size != 0 && stream->offset_in_sample == 0
+ && sample_size < stream->min_buffer_size) {
+ guint start_sample_index = stream->sample_index;
+ guint accumulated_size = sample_size;
+ guint64 expected_next_offset = offset + sample_size;
+
+ GST_DEBUG_OBJECT (qtdemux,
+ "size %d smaller than stream min_buffer_size %d, combining with the next",
+ sample_size, stream->min_buffer_size);
+
+ while (stream->sample_index < stream->to_sample
+ && stream->sample_index + 1 < stream->n_samples) {
+ const QtDemuxSample *next_sample;
+
+ /* Increment temporarily */
+ stream->sample_index++;
+
+ /* Failed to parse sample so let's go back to the previous one that was
+ * still successful */
+ if (!qtdemux_parse_samples (qtdemux, stream, stream->sample_index)) {
+ stream->sample_index--;
+ break;
+ }
+
+ next_sample = &stream->samples[stream->sample_index];
+
+ /* Not contiguous with the previous sample so let's go back to the
+ * previous one that was still successful */
+ if (next_sample->offset != expected_next_offset) {
+ stream->sample_index--;
+ break;
+ }
+
+ accumulated_size += next_sample->size;
+ expected_next_offset += next_sample->size;
+ if (accumulated_size >= stream->min_buffer_size)
+ break;
+ }
+
+ num_samples = stream->sample_index + 1 - start_sample_index;
+ stream->sample_index = start_sample_index;
+ GST_DEBUG_OBJECT (qtdemux, "Pulling %u samples of size %u at once",
+ num_samples, accumulated_size);
+ size = accumulated_size;
+ } else {
+ size = sample_size;
+ }
+
+ if (qtdemux->cenc_aux_info_offset > 0) {
+ GstMapInfo map;
+ GstByteReader br;
+ GstBuffer *aux_info = NULL;
+
+ /* pull the data stored before the sample */
+ ret =
+ gst_qtdemux_pull_atom (qtdemux, qtdemux->offset,
+ offset + stream->offset_in_sample - qtdemux->offset, &aux_info);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto beach;
+ gst_buffer_map (aux_info, &map, GST_MAP_READ);
+ GST_DEBUG_OBJECT (qtdemux, "parsing cenc auxiliary info");
+ gst_byte_reader_init (&br, map.data + 8, map.size);
+ if (!qtdemux_parse_cenc_aux_info (qtdemux, stream, &br,
+ qtdemux->cenc_aux_info_sizes, qtdemux->cenc_aux_sample_count)) {
+ GST_ERROR_OBJECT (qtdemux, "failed to parse cenc auxiliary info");
+ gst_buffer_unmap (aux_info, &map);
+ gst_buffer_unref (aux_info);
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+ gst_buffer_unmap (aux_info, &map);
+ gst_buffer_unref (aux_info);
+ }
+
+ GST_LOG_OBJECT (qtdemux, "reading %d bytes @ %" G_GUINT64_FORMAT, size,
+ offset);
+
+ if (stream->use_allocator) {
+ /* if we have a per-stream allocator, use it */
+ buf = gst_buffer_new_allocate (stream->allocator, size, &stream->params);
+ }
+
+ ret = gst_qtdemux_pull_atom (qtdemux, offset + stream->offset_in_sample,
+ size, &buf);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto beach;
+
+ /* Update for both splitting and combining of samples */
+ if (size != sample_size) {
+ pts += gst_util_uint64_scale_int (GST_SECOND,
+ stream->offset_in_sample / CUR_STREAM (stream)->bytes_per_frame,
+ stream->timescale);
+ dts +=
+ gst_util_uint64_scale_int (GST_SECOND,
+ stream->offset_in_sample / CUR_STREAM (stream)->bytes_per_frame,
+ stream->timescale);
+ duration =
+ gst_util_uint64_scale_int (GST_SECOND,
+ size / CUR_STREAM (stream)->bytes_per_frame, stream->timescale);
+ }
+
+ ret = gst_qtdemux_decorate_and_push_buffer (qtdemux, stream, buf,
+ dts, pts, duration, keyframe, min_time, offset);
+
+ if (size < sample_size) {
+ QtDemuxSample *sample = &stream->samples[stream->sample_index];
+ QtDemuxSegment *segment = &stream->segments[stream->segment_index];
+
+ GstClockTime time_position = QTSTREAMTIME_TO_GSTTIME (stream,
+ sample->timestamp +
+ stream->offset_in_sample / CUR_STREAM (stream)->bytes_per_frame);
+ if (time_position >= segment->media_start) {
+ /* inside the segment, update time_position, looks very familiar to
+ * GStreamer segments, doesn't it? */
+ stream->time_position = (time_position - segment->media_start) +
+ segment->time;
+ } else {
+ /* not yet in segment, time does not yet increment. This means
+ * that we are still prerolling keyframes to the decoder so it can
+ * decode the first sample of the segment. */
+ stream->time_position = segment->time;
+ }
+ } else if (size > sample_size) {
+ /* Increase to the last sample we already pulled so that advancing
+ * below brings us to the next sample we need to pull */
+ stream->sample_index += num_samples - 1;
+ }
+
+ /* combine flows */
+ GST_OBJECT_LOCK (qtdemux);
+ ret = gst_qtdemux_combine_flows (qtdemux, stream, ret);
+ GST_OBJECT_UNLOCK (qtdemux);
+ /* ignore unlinked, we will not push on the pad anymore and we will EOS when
+ * we have no more data for the pad to push */
+ if (ret == GST_FLOW_EOS)
+ ret = GST_FLOW_OK;
+
+ stream->offset_in_sample += size;
+ if (stream->offset_in_sample >= sample_size) {
+ gst_qtdemux_advance_sample (qtdemux, stream);
+ }
+ goto beach;
+
+next:
+ gst_qtdemux_advance_sample (qtdemux, stream);
+
+beach:
+ return ret;
+
+ /* special cases */
+eos:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "No samples left for any streams - EOS");
+ ret = GST_FLOW_EOS;
+ goto beach;
+ }
+eos_stream:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "No samples left for stream");
+ /* EOS will be raised if all are EOS */
+ ret = GST_FLOW_OK;
+ goto beach;
+ }
+}
+
+static void
+gst_qtdemux_loop (GstPad * pad)
+{
+ GstQTDemux *qtdemux;
+ guint64 cur_offset;
+ GstFlowReturn ret;
+
+ qtdemux = GST_QTDEMUX (gst_pad_get_parent (pad));
+
+ cur_offset = qtdemux->offset;
+ GST_LOG_OBJECT (qtdemux, "loop at position %" G_GUINT64_FORMAT ", state %s",
+ cur_offset, qt_demux_state_string (qtdemux->state));
+
+ switch (qtdemux->state) {
+ case QTDEMUX_STATE_INITIAL:
+ case QTDEMUX_STATE_HEADER:
+ ret = gst_qtdemux_loop_state_header (qtdemux);
+ break;
+ case QTDEMUX_STATE_MOVIE:
+ ret = gst_qtdemux_loop_state_movie (qtdemux);
+ if (qtdemux->segment.rate < 0 && ret == GST_FLOW_EOS) {
+ ret = gst_qtdemux_seek_to_previous_keyframe (qtdemux);
+ }
+ break;
+ default:
+ /* ouch */
+ goto invalid_state;
+ }
+
+ /* if something went wrong, pause */
+ if (ret != GST_FLOW_OK)
+ goto pause;
+
+done:
+ gst_object_unref (qtdemux);
+ return;
+
+ /* ERRORS */
+invalid_state:
+ {
+ GST_ELEMENT_ERROR (qtdemux, STREAM, FAILED,
+ (NULL), ("streaming stopped, invalid state"));
+ gst_pad_pause_task (pad);
+ gst_qtdemux_push_event (qtdemux, gst_event_new_eos ());
+ goto done;
+ }
+pause:
+ {
+ const gchar *reason = gst_flow_get_name (ret);
+
+ GST_LOG_OBJECT (qtdemux, "pausing task, reason %s", reason);
+
+ gst_pad_pause_task (pad);
+
+ /* fatal errors need special actions */
+ /* check EOS */
+ if (ret == GST_FLOW_EOS) {
+ if (QTDEMUX_N_STREAMS (qtdemux) == 0) {
+ /* we have no streams, post an error */
+ gst_qtdemux_post_no_playable_stream_error (qtdemux);
+ }
+ if (qtdemux->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ gint64 stop;
+
+ if ((stop = qtdemux->segment.stop) == -1)
+ stop = qtdemux->segment.duration;
+
+ if (qtdemux->segment.rate >= 0) {
+ GstMessage *message;
+ GstEvent *event;
+
+ GST_LOG_OBJECT (qtdemux, "Sending segment done, at end of segment");
+ message = gst_message_new_segment_done (GST_OBJECT_CAST (qtdemux),
+ GST_FORMAT_TIME, stop);
+ event = gst_event_new_segment_done (GST_FORMAT_TIME, stop);
+ if (qtdemux->segment_seqnum != GST_SEQNUM_INVALID) {
+ gst_message_set_seqnum (message, qtdemux->segment_seqnum);
+ gst_event_set_seqnum (event, qtdemux->segment_seqnum);
+ }
+ gst_element_post_message (GST_ELEMENT_CAST (qtdemux), message);
+ gst_qtdemux_push_event (qtdemux, event);
+ } else {
+ GstMessage *message;
+ GstEvent *event;
+
+ /* For Reverse Playback */
+ GST_LOG_OBJECT (qtdemux, "Sending segment done, at start of segment");
+ message = gst_message_new_segment_done (GST_OBJECT_CAST (qtdemux),
+ GST_FORMAT_TIME, qtdemux->segment.start);
+ event = gst_event_new_segment_done (GST_FORMAT_TIME,
+ qtdemux->segment.start);
+ if (qtdemux->segment_seqnum != GST_SEQNUM_INVALID) {
+ gst_message_set_seqnum (message, qtdemux->segment_seqnum);
+ gst_event_set_seqnum (event, qtdemux->segment_seqnum);
+ }
+ gst_element_post_message (GST_ELEMENT_CAST (qtdemux), message);
+ gst_qtdemux_push_event (qtdemux, event);
+ }
+ } else {
+ GstEvent *event;
+
+ GST_LOG_OBJECT (qtdemux, "Sending EOS at end of segment");
+ event = gst_event_new_eos ();
+ if (qtdemux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, qtdemux->segment_seqnum);
+ gst_qtdemux_push_event (qtdemux, event);
+ }
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
+ GST_ELEMENT_FLOW_ERROR (qtdemux, ret);
+ gst_qtdemux_push_event (qtdemux, gst_event_new_eos ());
+ }
+ goto done;
+ }
+}
+
+/*
+ * has_next_entry
+ *
+ * Returns if there are samples to be played.
+ */
+static gboolean
+has_next_entry (GstQTDemux * demux)
+{
+ QtDemuxStream *stream;
+ gint i;
+
+ GST_DEBUG_OBJECT (demux, "Checking if there are samples not played yet");
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ stream = QTDEMUX_NTH_STREAM (demux, i);
+
+ if (stream->sample_index == -1) {
+ stream->sample_index = 0;
+ stream->offset_in_sample = 0;
+ }
+
+ if (stream->sample_index >= stream->n_samples) {
+ GST_LOG_OBJECT (demux, "track-id %u samples exhausted", stream->track_id);
+ continue;
+ }
+ GST_DEBUG_OBJECT (demux, "Found a sample");
+ return TRUE;
+ }
+
+ GST_DEBUG_OBJECT (demux, "There wasn't any next sample");
+ return FALSE;
+}
+
+/*
+ * next_entry_size
+ *
+ * Returns the size of the first entry at the current offset.
+ * If -1, there are none (which means EOS or empty file).
+ */
+static guint64
+next_entry_size (GstQTDemux * demux)
+{
+ QtDemuxStream *stream, *target_stream = NULL;
+ guint64 smalloffs = (guint64) - 1;
+ QtDemuxSample *sample;
+ gint i;
+
+ GST_LOG_OBJECT (demux, "Finding entry at offset %" G_GUINT64_FORMAT,
+ demux->offset);
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ stream = QTDEMUX_NTH_STREAM (demux, i);
+
+ if (stream->sample_index == -1) {
+ stream->sample_index = 0;
+ stream->offset_in_sample = 0;
+ }
+
+ if (stream->sample_index >= stream->n_samples) {
+ GST_LOG_OBJECT (demux, "track-id %u samples exhausted", stream->track_id);
+ continue;
+ }
+
+ if (!qtdemux_parse_samples (demux, stream, stream->sample_index)) {
+ GST_LOG_OBJECT (demux, "Parsing of index %u from stbl atom failed!",
+ stream->sample_index);
+ return -1;
+ }
+
+ sample = &stream->samples[stream->sample_index];
+
+ GST_LOG_OBJECT (demux,
+ "Checking track-id %u (sample_index:%d / offset:%" G_GUINT64_FORMAT
+ " / size:%" G_GUINT32_FORMAT ")", stream->track_id,
+ stream->sample_index, sample->offset, sample->size);
+
+ if (((smalloffs == -1)
+ || (sample->offset < smalloffs)) && (sample->size)) {
+ smalloffs = sample->offset;
+ target_stream = stream;
+ }
+ }
+
+ if (!target_stream)
+ return -1;
+
+ GST_LOG_OBJECT (demux,
+ "track-id %u offset %" G_GUINT64_FORMAT " demux->offset :%"
+ G_GUINT64_FORMAT, target_stream->track_id, smalloffs, demux->offset);
+
+ stream = target_stream;
+ sample = &stream->samples[stream->sample_index];
+
+ if (sample->offset >= demux->offset) {
+ demux->todrop = sample->offset - demux->offset;
+ return sample->size + demux->todrop;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "There wasn't any entry at offset %" G_GUINT64_FORMAT, demux->offset);
+ return -1;
+}
+
+static void
+gst_qtdemux_post_progress (GstQTDemux * demux, gint num, gint denom)
+{
+ gint perc = (gint) ((gdouble) num * 100.0 / (gdouble) denom);
+
+ gst_element_post_message (GST_ELEMENT_CAST (demux),
+ gst_message_new_element (GST_OBJECT_CAST (demux),
+ gst_structure_new ("progress", "percent", G_TYPE_INT, perc, NULL)));
+}
+
+static gboolean
+qtdemux_seek_offset (GstQTDemux * demux, guint64 offset)
+{
+ GstEvent *event;
+ gboolean res = 0;
+
+ GST_DEBUG_OBJECT (demux, "Seeking to %" G_GUINT64_FORMAT, offset);
+
+ event =
+ gst_event_new_seek (1.0, GST_FORMAT_BYTES,
+ GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, GST_SEEK_TYPE_SET, offset,
+ GST_SEEK_TYPE_NONE, -1);
+
+ /* store seqnum to drop flush events, they don't need to reach downstream */
+ demux->offset_seek_seqnum = gst_event_get_seqnum (event);
+ res = gst_pad_push_event (demux->sinkpad, event);
+ demux->offset_seek_seqnum = GST_SEQNUM_INVALID;
+
+ return res;
+}
+
+/* check for seekable upstream, above and beyond a mere query */
+static void
+gst_qtdemux_check_seekability (GstQTDemux * demux)
+{
+ GstQuery *query;
+ gboolean seekable = FALSE;
+ gint64 start = -1, stop = -1;
+
+ if (demux->upstream_size)
+ return;
+
+ if (demux->upstream_format_is_time)
+ return;
+
+ query = gst_query_new_seeking (GST_FORMAT_BYTES);
+ if (!gst_pad_peer_query (demux->sinkpad, query)) {
+ GST_DEBUG_OBJECT (demux, "seeking query failed");
+ goto done;
+ }
+
+ gst_query_parse_seeking (query, NULL, &seekable, &start, &stop);
+
+ /* try harder to query upstream size if we didn't get it the first time */
+ if (seekable && stop == -1) {
+ GST_DEBUG_OBJECT (demux, "doing duration query to fix up unset stop");
+ gst_pad_peer_query_duration (demux->sinkpad, GST_FORMAT_BYTES, &stop);
+ }
+
+ /* if upstream doesn't know the size, it's likely that it's not seekable in
+ * practice even if it technically may be seekable */
+ if (seekable && (start != 0 || stop <= start)) {
+ GST_DEBUG_OBJECT (demux, "seekable but unknown start/stop -> disable");
+ seekable = FALSE;
+ }
+
+done:
+ gst_query_unref (query);
+
+ GST_DEBUG_OBJECT (demux, "seekable: %d (%" G_GUINT64_FORMAT " - %"
+ G_GUINT64_FORMAT ")", seekable, start, stop);
+ demux->upstream_seekable = seekable;
+ demux->upstream_size = seekable ? stop : -1;
+}
+
+static void
+gst_qtdemux_drop_data (GstQTDemux * demux, gint bytes)
+{
+ g_return_if_fail (bytes <= demux->todrop);
+
+ GST_LOG_OBJECT (demux, "Dropping %d bytes", bytes);
+ gst_adapter_flush (demux->adapter, bytes);
+ demux->neededbytes -= bytes;
+ demux->offset += bytes;
+ demux->todrop -= bytes;
+}
+
+/* PUSH-MODE only: Send a segment, if not done already. */
+static void
+gst_qtdemux_check_send_pending_segment (GstQTDemux * demux)
+{
+ if (G_UNLIKELY (demux->need_segment)) {
+ gint i;
+
+ if (!demux->upstream_format_is_time) {
+ gst_qtdemux_map_and_push_segments (demux, &demux->segment);
+ } else {
+ GstEvent *segment_event;
+ segment_event = gst_event_new_segment (&demux->segment);
+ if (demux->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (segment_event, demux->segment_seqnum);
+ gst_qtdemux_push_event (demux, segment_event);
+ }
+
+ demux->need_segment = FALSE;
+
+ /* clear to send tags on all streams */
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (demux, i);
+ gst_qtdemux_push_tags (demux, stream);
+ if (CUR_STREAM (stream)->sparse) {
+ GST_INFO_OBJECT (demux, "Sending gap event on stream %d", i);
+ gst_pad_push_event (stream->pad,
+ gst_event_new_gap (stream->segment.position, GST_CLOCK_TIME_NONE));
+ }
+ }
+ }
+}
+
+/* Used for push mode only. */
+static void
+gst_qtdemux_send_gap_for_segment (GstQTDemux * demux,
+ QtDemuxStream * stream, gint segment_index, GstClockTime pos)
+{
+ GstClockTime ts, dur;
+
+ ts = pos;
+ dur =
+ stream->segments[segment_index].duration - (pos -
+ stream->segments[segment_index].time);
+ stream->time_position += dur;
+
+ /* Only gaps with a duration of at least one second are propagated.
+ * Same workaround as in pull mode.
+ * (See 2e45926a96ec5298c6ef29bf912e5e6a06dc3e0e) */
+ if (dur >= GST_SECOND) {
+ GstEvent *gap;
+ gap = gst_event_new_gap (ts, dur);
+
+ GST_DEBUG_OBJECT (stream->pad, "Pushing gap for empty "
+ "segment: %" GST_PTR_FORMAT, gap);
+ gst_pad_push_event (stream->pad, gap);
+ }
+}
+
+static GstFlowReturn
+gst_qtdemux_chain (GstPad * sinkpad, GstObject * parent, GstBuffer * inbuf)
+{
+ GstQTDemux *demux;
+
+ demux = GST_QTDEMUX (parent);
+
+ GST_DEBUG_OBJECT (demux,
+ "Received buffer pts:%" GST_TIME_FORMAT " dts:%" GST_TIME_FORMAT
+ " offset:%" G_GUINT64_FORMAT " size:%" G_GSIZE_FORMAT " demux offset:%"
+ G_GUINT64_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (inbuf)),
+ GST_TIME_ARGS (GST_BUFFER_DTS (inbuf)), GST_BUFFER_OFFSET (inbuf),
+ gst_buffer_get_size (inbuf), demux->offset);
+
+ if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_DISCONT)) {
+ gboolean is_gap_input = FALSE;
+ gint i;
+
+ GST_DEBUG_OBJECT (demux, "Got DISCONT, marking all streams as DISCONT");
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ QTDEMUX_NTH_STREAM (demux, i)->discont = TRUE;
+ }
+
+ /* Check if we can land back on our feet in the case where upstream is
+ * handling the seeking/pushing of samples with gaps in between (like
+ * in the case of trick-mode DASH for example) */
+ if (demux->upstream_format_is_time
+ && GST_BUFFER_OFFSET (inbuf) != GST_BUFFER_OFFSET_NONE) {
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ guint32 res;
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (demux, i);
+ GST_LOG_OBJECT (demux,
+ "track-id #%u , checking if offset %" G_GUINT64_FORMAT
+ " is a sample start", stream->track_id, GST_BUFFER_OFFSET (inbuf));
+ res =
+ gst_qtdemux_find_index_for_given_media_offset_linear (demux,
+ stream, GST_BUFFER_OFFSET (inbuf));
+ if (res != -1) {
+ QtDemuxSample *sample = &stream->samples[res];
+ GST_LOG_OBJECT (demux,
+ "Checking if sample %d from track-id %u is valid (offset:%"
+ G_GUINT64_FORMAT " size:%" G_GUINT32_FORMAT ")", res,
+ stream->track_id, sample->offset, sample->size);
+ if (sample->offset == GST_BUFFER_OFFSET (inbuf)) {
+ GST_LOG_OBJECT (demux,
+ "new buffer corresponds to a valid sample : %" G_GUINT32_FORMAT,
+ res);
+ is_gap_input = TRUE;
+ /* We can go back to standard playback mode */
+ demux->state = QTDEMUX_STATE_MOVIE;
+ /* Remember which sample this stream is at */
+ stream->sample_index = res;
+ /* Finally update all push-based values to the expected values */
+ demux->neededbytes = stream->samples[res].size;
+ demux->offset = GST_BUFFER_OFFSET (inbuf);
+ demux->mdatleft =
+ demux->mdatsize - demux->offset + demux->mdatoffset;
+ demux->todrop = 0;
+ }
+ }
+ }
+ if (!is_gap_input) {
+ GST_DEBUG_OBJECT (demux, "Resetting, actual DISCONT");
+ /* Reset state if it's a real discont */
+ demux->neededbytes = 16;
+ demux->state = QTDEMUX_STATE_INITIAL;
+ demux->offset = GST_BUFFER_OFFSET (inbuf);
+ gst_adapter_clear (demux->adapter);
+ }
+ }
+ /* Reverse fragmented playback, need to flush all we have before
+ * consuming a new fragment.
+ * The samples array have the timestamps calculated by accumulating the
+ * durations but this won't work for reverse playback of fragments as
+ * the timestamps of a subsequent fragment should be smaller than the
+ * previously received one. */
+ if (!is_gap_input && demux->fragmented && demux->segment.rate < 0) {
+ gst_qtdemux_process_adapter (demux, TRUE);
+ g_ptr_array_foreach (demux->active_streams,
+ (GFunc) gst_qtdemux_stream_flush_samples_data, NULL);
+ }
+ }
+
+ gst_adapter_push (demux->adapter, inbuf);
+
+ GST_DEBUG_OBJECT (demux,
+ "pushing in inbuf %p, neededbytes:%u, available:%" G_GSIZE_FORMAT, inbuf,
+ demux->neededbytes, gst_adapter_available (demux->adapter));
+
+ return gst_qtdemux_process_adapter (demux, FALSE);
+}
+
+static GstFlowReturn
+gst_qtdemux_process_adapter (GstQTDemux * demux, gboolean force)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ /* we never really mean to buffer that much */
+ if (demux->neededbytes == -1) {
+ goto eos;
+ }
+
+ while (((gst_adapter_available (demux->adapter)) >= demux->neededbytes) &&
+ (ret == GST_FLOW_OK || (ret == GST_FLOW_NOT_LINKED && force))) {
+
+#ifndef GST_DISABLE_GST_DEBUG
+ {
+ guint64 discont_offset, distance_from_discont;
+
+ discont_offset = gst_adapter_offset_at_discont (demux->adapter);
+ distance_from_discont =
+ gst_adapter_distance_from_discont (demux->adapter);
+
+ GST_DEBUG_OBJECT (demux,
+ "state:%s , demux->neededbytes:%d, demux->offset:%" G_GUINT64_FORMAT
+ " adapter offset :%" G_GUINT64_FORMAT " (+ %" G_GUINT64_FORMAT
+ " bytes)", qt_demux_state_string (demux->state), demux->neededbytes,
+ demux->offset, discont_offset, distance_from_discont);
+ }
+#endif
+
+ switch (demux->state) {
+ case QTDEMUX_STATE_INITIAL:{
+ const guint8 *data;
+ guint32 fourcc;
+ guint64 size;
+
+ gst_qtdemux_check_seekability (demux);
+
+ data = gst_adapter_map (demux->adapter, demux->neededbytes);
+
+ /* get fourcc/length, set neededbytes */
+ extract_initial_length_and_fourcc ((guint8 *) data, demux->neededbytes,
+ &size, &fourcc);
+ gst_adapter_unmap (demux->adapter);
+ data = NULL;
+ GST_DEBUG_OBJECT (demux, "Peeking found [%" GST_FOURCC_FORMAT "] "
+ "size: %" G_GUINT64_FORMAT, GST_FOURCC_ARGS (fourcc), size);
+ if (size == 0) {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX,
+ (_("This file is invalid and cannot be played.")),
+ ("initial atom '%" GST_FOURCC_FORMAT "' has empty length",
+ GST_FOURCC_ARGS (fourcc)));
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+ if (fourcc == FOURCC_mdat) {
+ gint next_entry = next_entry_size (demux);
+ if (QTDEMUX_N_STREAMS (demux) > 0 && (next_entry != -1
+ || !demux->fragmented)) {
+ /* we have the headers, start playback */
+ demux->state = QTDEMUX_STATE_MOVIE;
+ demux->neededbytes = next_entry;
+ demux->mdatleft = size;
+ demux->mdatsize = demux->mdatleft;
+ } else {
+ /* no headers yet, try to get them */
+ guint bs;
+ gboolean res;
+ guint64 old, target;
+
+ buffer_data:
+ old = demux->offset;
+ target = old + size;
+
+ /* try to jump over the atom with a seek */
+ /* only bother if it seems worth doing so,
+ * and avoids possible upstream/server problems */
+ if (demux->upstream_seekable &&
+ demux->upstream_size > 4 * (1 << 20)) {
+ res = qtdemux_seek_offset (demux, target);
+ } else {
+ GST_DEBUG_OBJECT (demux, "skipping seek");
+ res = FALSE;
+ }
+
+ if (res) {
+ GST_DEBUG_OBJECT (demux, "seek success");
+ /* remember the offset fo the first mdat so we can seek back to it
+ * after we have the headers */
+ if (fourcc == FOURCC_mdat && demux->first_mdat == -1) {
+ demux->first_mdat = old;
+ GST_DEBUG_OBJECT (demux, "first mdat at %" G_GUINT64_FORMAT,
+ demux->first_mdat);
+ }
+ /* seek worked, continue reading */
+ demux->offset = target;
+ demux->neededbytes = 16;
+ demux->state = QTDEMUX_STATE_INITIAL;
+ } else {
+ /* seek failed, need to buffer */
+ demux->offset = old;
+ GST_DEBUG_OBJECT (demux, "seek failed/skipped");
+ /* there may be multiple mdat (or alike) buffers */
+ /* sanity check */
+ if (demux->mdatbuffer)
+ bs = gst_buffer_get_size (demux->mdatbuffer);
+ else
+ bs = 0;
+ if (size + bs > 10 * (1 << 20))
+ goto no_moov;
+ demux->state = QTDEMUX_STATE_BUFFER_MDAT;
+ demux->neededbytes = size;
+ if (!demux->mdatbuffer)
+ demux->mdatoffset = demux->offset;
+ }
+ }
+ } else if (G_UNLIKELY (size > QTDEMUX_MAX_ATOM_SIZE)) {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX,
+ (_("This file is invalid and cannot be played.")),
+ ("atom %" GST_FOURCC_FORMAT " has bogus size %" G_GUINT64_FORMAT,
+ GST_FOURCC_ARGS (fourcc), size));
+ ret = GST_FLOW_ERROR;
+ break;
+ } else {
+ /* this means we already started buffering and still no moov header,
+ * let's continue buffering everything till we get moov */
+ if (demux->mdatbuffer && !(fourcc == FOURCC_moov
+ || fourcc == FOURCC_moof))
+ goto buffer_data;
+ demux->neededbytes = size;
+ demux->state = QTDEMUX_STATE_HEADER;
+ }
+ break;
+ }
+ case QTDEMUX_STATE_HEADER:{
+ const guint8 *data;
+ guint32 fourcc;
+
+ GST_DEBUG_OBJECT (demux, "In header");
+
+ data = gst_adapter_map (demux->adapter, demux->neededbytes);
+
+ /* parse the header */
+ extract_initial_length_and_fourcc (data, demux->neededbytes, NULL,
+ &fourcc);
+ if (fourcc == FOURCC_moov) {
+ /* in usual fragmented setup we could try to scan for more
+ * and end up at the the moov (after mdat) again */
+ if (demux->got_moov && QTDEMUX_N_STREAMS (demux) > 0 &&
+ (!demux->fragmented
+ || demux->last_moov_offset == demux->offset)) {
+ GST_DEBUG_OBJECT (demux,
+ "Skipping moov atom as we have (this) one already");
+ } else {
+ GST_DEBUG_OBJECT (demux, "Parsing [moov]");
+
+ if (demux->got_moov && demux->fragmented) {
+ GST_DEBUG_OBJECT (demux,
+ "Got a second moov, clean up data from old one");
+ if (demux->moov_node_compressed) {
+ g_node_destroy (demux->moov_node_compressed);
+ if (demux->moov_node)
+ g_free (demux->moov_node->data);
+ }
+ demux->moov_node_compressed = NULL;
+ if (demux->moov_node)
+ g_node_destroy (demux->moov_node);
+ demux->moov_node = NULL;
+ }
+
+ demux->last_moov_offset = demux->offset;
+
+ /* Update streams with new moov */
+ gst_qtdemux_stream_concat (demux,
+ demux->old_streams, demux->active_streams);
+
+ qtdemux_parse_moov (demux, data, demux->neededbytes);
+ qtdemux_node_dump (demux, demux->moov_node);
+ qtdemux_parse_tree (demux);
+ qtdemux_prepare_streams (demux);
+ QTDEMUX_EXPOSE_LOCK (demux);
+ qtdemux_expose_streams (demux);
+ QTDEMUX_EXPOSE_UNLOCK (demux);
+
+ demux->got_moov = TRUE;
+
+ gst_qtdemux_check_send_pending_segment (demux);
+
+ if (demux->moov_node_compressed) {
+ g_node_destroy (demux->moov_node_compressed);
+ g_free (demux->moov_node->data);
+ }
+ demux->moov_node_compressed = NULL;
+ g_node_destroy (demux->moov_node);
+ demux->moov_node = NULL;
+ GST_DEBUG_OBJECT (demux, "Finished parsing the header");
+ }
+ } else if (fourcc == FOURCC_moof) {
+ if ((demux->got_moov || demux->media_caps) && demux->fragmented) {
+ guint64 dist = 0;
+ GstClockTime prev_pts;
+ guint64 prev_offset;
+ guint64 adapter_discont_offset, adapter_discont_dist;
+
+ GST_DEBUG_OBJECT (demux, "Parsing [moof]");
+
+ /*
+ * The timestamp of the moof buffer is relevant as some scenarios
+ * won't have the initial timestamp in the atoms. Whenever a new
+ * buffer has started, we get that buffer's PTS and use it as a base
+ * timestamp for the trun entries.
+ *
+ * To keep track of the current buffer timestamp and starting point
+ * we use gst_adapter_prev_pts that gives us the PTS and the distance
+ * from the beginning of the buffer, with the distance and demux->offset
+ * we know if it is still the same buffer or not.
+ */
+ prev_pts = gst_adapter_prev_pts (demux->adapter, &dist);
+ prev_offset = demux->offset - dist;
+ if (demux->fragment_start_offset == -1
+ || prev_offset > demux->fragment_start_offset) {
+ demux->fragment_start_offset = prev_offset;
+ demux->fragment_start = prev_pts;
+ GST_DEBUG_OBJECT (demux,
+ "New fragment start found at: %" G_GUINT64_FORMAT " : %"
+ GST_TIME_FORMAT, demux->fragment_start_offset,
+ GST_TIME_ARGS (demux->fragment_start));
+ }
+
+ /* We can't use prev_offset() here because this would require
+ * upstream to set consistent and correct offsets on all buffers
+ * since the discont. Nothing ever did that in the past and we
+ * would break backwards compatibility here then.
+ * Instead take the offset we had at the last discont and count
+ * the bytes from there. This works with old code as there would
+ * be no discont between moov and moof, and also works with
+ * adaptivedemux which correctly sets offset and will set the
+ * DISCONT flag accordingly when needed.
+ *
+ * We also only do this for upstream TIME segments as otherwise
+ * there are potential backwards compatibility problems with
+ * seeking in PUSH mode and upstream providing inconsistent
+ * timestamps. */
+ adapter_discont_offset =
+ gst_adapter_offset_at_discont (demux->adapter);
+ adapter_discont_dist =
+ gst_adapter_distance_from_discont (demux->adapter);
+
+ GST_DEBUG_OBJECT (demux,
+ "demux offset %" G_GUINT64_FORMAT " adapter offset %"
+ G_GUINT64_FORMAT " (+ %" G_GUINT64_FORMAT " bytes)",
+ demux->offset, adapter_discont_offset, adapter_discont_dist);
+
+ if (demux->upstream_format_is_time) {
+ demux->moof_offset = adapter_discont_offset;
+ if (demux->moof_offset != GST_BUFFER_OFFSET_NONE)
+ demux->moof_offset += adapter_discont_dist;
+ if (demux->moof_offset == GST_BUFFER_OFFSET_NONE)
+ demux->moof_offset = demux->offset;
+ } else {
+ demux->moof_offset = demux->offset;
+ }
+
+ if (!qtdemux_parse_moof (demux, data, demux->neededbytes,
+ demux->moof_offset, NULL)) {
+ gst_adapter_unmap (demux->adapter);
+ ret = GST_FLOW_ERROR;
+ goto done;
+ }
+
+ /* in MSS we need to expose the pads after the first moof as we won't get a moov */
+ if (demux->mss_mode && !demux->exposed) {
+ QTDEMUX_EXPOSE_LOCK (demux);
+ qtdemux_expose_streams (demux);
+ QTDEMUX_EXPOSE_UNLOCK (demux);
+ }
+
+ gst_qtdemux_check_send_pending_segment (demux);
+ } else {
+ GST_DEBUG_OBJECT (demux, "Discarding [moof]");
+ }
+ } else if (fourcc == FOURCC_ftyp) {
+ GST_DEBUG_OBJECT (demux, "Parsing [ftyp]");
+ qtdemux_parse_ftyp (demux, data, demux->neededbytes);
+ } else if (fourcc == FOURCC_uuid) {
+ GST_DEBUG_OBJECT (demux, "Parsing [uuid]");
+ qtdemux_parse_uuid (demux, data, demux->neededbytes);
+ } else if (fourcc == FOURCC_sidx) {
+ GST_DEBUG_OBJECT (demux, "Parsing [sidx]");
+ qtdemux_parse_sidx (demux, data, demux->neededbytes);
+ } else {
+ switch (fourcc) {
+ case FOURCC_styp:
+ /* [styp] is like a [ftyp], but in fragment header. We ignore it for now
+ * FALLTHROUGH */
+ case FOURCC_skip:
+ case FOURCC_free:
+ /* [free] and [skip] are padding atoms */
+ GST_DEBUG_OBJECT (demux,
+ "Skipping fourcc while parsing header : %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (fourcc));
+ break;
+ default:
+ GST_WARNING_OBJECT (demux,
+ "Unknown fourcc while parsing header : %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (fourcc));
+ /* Let's jump that one and go back to initial state */
+ break;
+ }
+ }
+ gst_adapter_unmap (demux->adapter);
+ data = NULL;
+
+ if (demux->mdatbuffer && QTDEMUX_N_STREAMS (demux)) {
+ gsize remaining_data_size = 0;
+
+ /* the mdat was before the header */
+ GST_DEBUG_OBJECT (demux, "We have n_streams:%d and mdatbuffer:%p",
+ QTDEMUX_N_STREAMS (demux), demux->mdatbuffer);
+ /* restore our adapter/offset view of things with upstream;
+ * put preceding buffered data ahead of current moov data.
+ * This should also handle evil mdat, moov, mdat cases and alike */
+ gst_adapter_flush (demux->adapter, demux->neededbytes);
+
+ /* Store any remaining data after the mdat for later usage */
+ remaining_data_size = gst_adapter_available (demux->adapter);
+ if (remaining_data_size > 0) {
+ g_assert (demux->restoredata_buffer == NULL);
+ demux->restoredata_buffer =
+ gst_adapter_take_buffer (demux->adapter, remaining_data_size);
+ demux->restoredata_offset = demux->offset + demux->neededbytes;
+ GST_DEBUG_OBJECT (demux,
+ "Stored %" G_GSIZE_FORMAT " post mdat bytes at offset %"
+ G_GUINT64_FORMAT, remaining_data_size,
+ demux->restoredata_offset);
+ }
+
+ gst_adapter_push (demux->adapter, demux->mdatbuffer);
+ demux->mdatbuffer = NULL;
+ demux->offset = demux->mdatoffset;
+ demux->neededbytes = next_entry_size (demux);
+ demux->state = QTDEMUX_STATE_MOVIE;
+ demux->mdatleft = gst_adapter_available (demux->adapter);
+ demux->mdatsize = demux->mdatleft;
+ } else {
+ GST_DEBUG_OBJECT (demux, "Carrying on normally");
+ gst_adapter_flush (demux->adapter, demux->neededbytes);
+
+ /* only go back to the mdat if there are samples to play */
+ if (demux->got_moov && demux->first_mdat != -1
+ && has_next_entry (demux)) {
+ gboolean res;
+
+ /* we need to seek back */
+ res = qtdemux_seek_offset (demux, demux->first_mdat);
+ if (res) {
+ demux->offset = demux->first_mdat;
+ } else {
+ GST_DEBUG_OBJECT (demux, "Seek back failed");
+ }
+ } else {
+ demux->offset += demux->neededbytes;
+ }
+ demux->neededbytes = 16;
+ demux->state = QTDEMUX_STATE_INITIAL;
+ }
+
+ break;
+ }
+ case QTDEMUX_STATE_BUFFER_MDAT:{
+ GstBuffer *buf;
+ guint8 fourcc[4];
+
+ GST_DEBUG_OBJECT (demux, "Got our buffer at offset %" G_GUINT64_FORMAT,
+ demux->offset);
+ buf = gst_adapter_take_buffer (demux->adapter, demux->neededbytes);
+ gst_buffer_extract (buf, 0, fourcc, 4);
+ GST_DEBUG_OBJECT (demux, "mdatbuffer starts with %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (QT_FOURCC (fourcc)));
+ if (demux->mdatbuffer)
+ demux->mdatbuffer = gst_buffer_append (demux->mdatbuffer, buf);
+ else
+ demux->mdatbuffer = buf;
+ demux->offset += demux->neededbytes;
+ demux->neededbytes = 16;
+ demux->state = QTDEMUX_STATE_INITIAL;
+ gst_qtdemux_post_progress (demux, 1, 1);
+
+ break;
+ }
+ case QTDEMUX_STATE_MOVIE:{
+ QtDemuxStream *stream = NULL;
+ QtDemuxSample *sample;
+ GstClockTime dts, pts, duration;
+ gboolean keyframe;
+ gint i;
+
+ GST_DEBUG_OBJECT (demux,
+ "BEGIN // in MOVIE for offset %" G_GUINT64_FORMAT, demux->offset);
+
+ if (demux->fragmented) {
+ GST_DEBUG_OBJECT (demux, "mdat remaining %" G_GUINT64_FORMAT,
+ demux->mdatleft);
+ if (G_LIKELY (demux->todrop < demux->mdatleft)) {
+ /* if needed data starts within this atom,
+ * then it should not exceed this atom */
+ if (G_UNLIKELY (demux->neededbytes > demux->mdatleft)) {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX,
+ (_("This file is invalid and cannot be played.")),
+ ("sample data crosses atom boundary"));
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+ demux->mdatleft -= demux->neededbytes;
+ } else {
+ GST_DEBUG_OBJECT (demux, "data atom emptied; resuming atom scan");
+ /* so we are dropping more than left in this atom */
+ gst_qtdemux_drop_data (demux, demux->mdatleft);
+ demux->mdatleft = 0;
+
+ /* need to resume atom parsing so we do not miss any other pieces */
+ demux->state = QTDEMUX_STATE_INITIAL;
+ demux->neededbytes = 16;
+
+ /* check if there was any stored post mdat data from previous buffers */
+ if (demux->restoredata_buffer) {
+ g_assert (gst_adapter_available (demux->adapter) == 0);
+
+ gst_adapter_push (demux->adapter, demux->restoredata_buffer);
+ demux->restoredata_buffer = NULL;
+ demux->offset = demux->restoredata_offset;
+ }
+
+ break;
+ }
+ }
+
+ if (demux->todrop) {
+ if (demux->cenc_aux_info_offset > 0) {
+ GstByteReader br;
+ const guint8 *data;
+
+ GST_DEBUG_OBJECT (demux, "parsing cenc auxiliary info");
+ data = gst_adapter_map (demux->adapter, demux->todrop);
+ gst_byte_reader_init (&br, data + 8, demux->todrop);
+ if (!qtdemux_parse_cenc_aux_info (demux,
+ QTDEMUX_NTH_STREAM (demux, 0), &br,
+ demux->cenc_aux_info_sizes, demux->cenc_aux_sample_count)) {
+ GST_ERROR_OBJECT (demux, "failed to parse cenc auxiliary info");
+ ret = GST_FLOW_ERROR;
+ gst_adapter_unmap (demux->adapter);
+ g_free (demux->cenc_aux_info_sizes);
+ demux->cenc_aux_info_sizes = NULL;
+ goto done;
+ }
+ demux->cenc_aux_info_offset = 0;
+ g_free (demux->cenc_aux_info_sizes);
+ demux->cenc_aux_info_sizes = NULL;
+ gst_adapter_unmap (demux->adapter);
+ }
+ gst_qtdemux_drop_data (demux, demux->todrop);
+ }
+
+ /* first buffer? */
+ /* initial newsegment sent here after having added pads,
+ * possible others in sink_event */
+ gst_qtdemux_check_send_pending_segment (demux);
+
+ /* Figure out which stream this packet belongs to */
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ stream = QTDEMUX_NTH_STREAM (demux, i);
+ if (stream->sample_index >= stream->n_samples) {
+ /* reset to be checked below G_UNLIKELY (stream == NULL) */
+ stream = NULL;
+ continue;
+ }
+ GST_LOG_OBJECT (demux,
+ "Checking track-id %u (sample_index:%d / offset:%"
+ G_GUINT64_FORMAT " / size:%d)", stream->track_id,
+ stream->sample_index,
+ stream->samples[stream->sample_index].offset,
+ stream->samples[stream->sample_index].size);
+
+ if (stream->samples[stream->sample_index].offset == demux->offset)
+ break;
+ }
+
+ if (G_UNLIKELY (stream == NULL))
+ goto unknown_stream;
+
+ gst_qtdemux_stream_check_and_change_stsd_index (demux, stream);
+
+ if (stream->new_caps) {
+ gst_qtdemux_configure_stream (demux, stream);
+ }
+
+ /* Put data in a buffer, set timestamps, caps, ... */
+ sample = &stream->samples[stream->sample_index];
+
+ if (G_LIKELY (!(STREAM_IS_EOS (stream)))) {
+ GST_DEBUG_OBJECT (demux, "stream : %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (CUR_STREAM (stream)->fourcc));
+
+ dts = QTSAMPLE_DTS (stream, sample);
+ pts = QTSAMPLE_PTS (stream, sample);
+ duration = QTSAMPLE_DUR_DTS (stream, sample, dts);
+ keyframe = QTSAMPLE_KEYFRAME (stream, sample);
+
+ /* check for segment end */
+ if (G_UNLIKELY (demux->segment.stop != -1
+ && demux->segment.stop <= pts && stream->on_keyframe)
+ && !(demux->upstream_format_is_time && demux->segment.rate < 0)) {
+ GST_DEBUG_OBJECT (demux, "we reached the end of our segment.");
+ stream->time_position = GST_CLOCK_TIME_NONE; /* this means EOS */
+
+ /* skip this data, stream is EOS */
+ gst_adapter_flush (demux->adapter, demux->neededbytes);
+ demux->offset += demux->neededbytes;
+
+ /* check if all streams are eos */
+ ret = GST_FLOW_EOS;
+ for (i = 0; i < QTDEMUX_N_STREAMS (demux); i++) {
+ if (!STREAM_IS_EOS (QTDEMUX_NTH_STREAM (demux, i))) {
+ ret = GST_FLOW_OK;
+ break;
+ }
+ }
+ } else {
+ GstBuffer *outbuf;
+
+ outbuf =
+ gst_adapter_take_buffer (demux->adapter, demux->neededbytes);
+
+ /* FIXME: should either be an assert or a plain check */
+ g_return_val_if_fail (outbuf != NULL, GST_FLOW_ERROR);
+
+ ret = gst_qtdemux_decorate_and_push_buffer (demux, stream, outbuf,
+ dts, pts, duration, keyframe, dts, demux->offset);
+ }
+
+ /* combine flows */
+ GST_OBJECT_LOCK (demux);
+ ret = gst_qtdemux_combine_flows (demux, stream, ret);
+ GST_OBJECT_UNLOCK (demux);
+ } else {
+ /* skip this data, stream is EOS */
+ gst_adapter_flush (demux->adapter, demux->neededbytes);
+ }
+
+ stream->sample_index++;
+ stream->offset_in_sample = 0;
+
+ /* update current offset and figure out size of next buffer */
+ GST_LOG_OBJECT (demux, "increasing offset %" G_GUINT64_FORMAT " by %u",
+ demux->offset, demux->neededbytes);
+ demux->offset += demux->neededbytes;
+ GST_LOG_OBJECT (demux, "offset is now %" G_GUINT64_FORMAT,
+ demux->offset);
+
+
+ if (ret == GST_FLOW_EOS) {
+ GST_DEBUG_OBJECT (demux, "All streams are EOS, signal upstream");
+ demux->neededbytes = -1;
+ goto eos;
+ }
+
+ if ((demux->neededbytes = next_entry_size (demux)) == -1) {
+ if (demux->fragmented) {
+ GST_DEBUG_OBJECT (demux, "(temporarily) out of fragmented samples");
+ /* there may be more to follow, only finish this atom */
+ demux->todrop = demux->mdatleft;
+ demux->neededbytes = demux->todrop;
+ break;
+ }
+ goto eos;
+ }
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_NOT_LINKED) {
+ goto non_ok_unlinked_flow;
+ }
+ break;
+ }
+ default:
+ goto invalid_state;
+ }
+ }
+
+ /* when buffering movie data, at least show user something is happening */
+ if (ret == GST_FLOW_OK && demux->state == QTDEMUX_STATE_BUFFER_MDAT &&
+ gst_adapter_available (demux->adapter) <= demux->neededbytes) {
+ gst_qtdemux_post_progress (demux, gst_adapter_available (demux->adapter),
+ demux->neededbytes);
+ }
+done:
+
+ return ret;
+
+ /* ERRORS */
+non_ok_unlinked_flow:
+ {
+ GST_DEBUG_OBJECT (demux, "Stopping, combined return flow %s",
+ gst_flow_get_name (ret));
+ return ret;
+ }
+unknown_stream:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, FAILED, (NULL), ("unknown stream found"));
+ ret = GST_FLOW_ERROR;
+ goto done;
+ }
+eos:
+ {
+ GST_DEBUG_OBJECT (demux, "no next entry, EOS");
+ ret = GST_FLOW_EOS;
+ goto done;
+ }
+invalid_state:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, FAILED,
+ (NULL), ("qtdemuxer invalid state %d", demux->state));
+ ret = GST_FLOW_ERROR;
+ goto done;
+ }
+no_moov:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, FAILED,
+ (NULL), ("no 'moov' atom within the first 10 MB"));
+ ret = GST_FLOW_ERROR;
+ goto done;
+ }
+}
+
+static gboolean
+qtdemux_sink_activate (GstPad * sinkpad, GstObject * parent)
+{
+ GstQuery *query;
+ gboolean pull_mode;
+
+ query = gst_query_new_scheduling ();
+
+ if (!gst_pad_peer_query (sinkpad, query)) {
+ gst_query_unref (query);
+ goto activate_push;
+ }
+
+ pull_mode = gst_query_has_scheduling_mode_with_flags (query,
+ GST_PAD_MODE_PULL, GST_SCHEDULING_FLAG_SEEKABLE);
+ gst_query_unref (query);
+
+ if (!pull_mode)
+ goto activate_push;
+
+ GST_DEBUG_OBJECT (sinkpad, "activating pull");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE);
+
+activate_push:
+ {
+ GST_DEBUG_OBJECT (sinkpad, "activating push");
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
+ }
+}
+
+static gboolean
+qtdemux_sink_activate_mode (GstPad * sinkpad, GstObject * parent,
+ GstPadMode mode, gboolean active)
+{
+ gboolean res;
+ GstQTDemux *demux = GST_QTDEMUX (parent);
+
+ switch (mode) {
+ case GST_PAD_MODE_PUSH:
+ demux->pullbased = FALSE;
+ res = TRUE;
+ break;
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ demux->pullbased = TRUE;
+ res = gst_pad_start_task (sinkpad, (GstTaskFunction) gst_qtdemux_loop,
+ sinkpad, NULL);
+ } else {
+ res = gst_pad_stop_task (sinkpad);
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ return res;
+}
+
+#ifdef HAVE_ZLIB
+static void *
+qtdemux_inflate (void *z_buffer, guint z_length, guint * length)
+{
+ guint8 *buffer;
+ z_stream z;
+ int ret;
+
+ memset (&z, 0, sizeof (z));
+ z.zalloc = NULL;
+ z.zfree = NULL;
+ z.opaque = NULL;
+
+ if ((ret = inflateInit (&z)) != Z_OK) {
+ GST_ERROR ("inflateInit() returned %d", ret);
+ return NULL;
+ }
+
+ z.next_in = z_buffer;
+ z.avail_in = z_length;
+
+ buffer = (guint8 *) g_malloc (*length);
+ z.avail_out = *length;
+ z.next_out = (Bytef *) buffer;
+ do {
+ ret = inflate (&z, Z_NO_FLUSH);
+ if (ret == Z_STREAM_END) {
+ break;
+ } else if (ret != Z_OK) {
+ GST_WARNING ("inflate() returned %d", ret);
+ break;
+ }
+
+ *length += 4096;
+ buffer = (guint8 *) g_realloc (buffer, *length);
+ z.next_out = (Bytef *) (buffer + z.total_out);
+ z.avail_out += 4096;
+ } while (z.avail_in > 0);
+
+ if (ret != Z_STREAM_END) {
+ g_free (buffer);
+ buffer = NULL;
+ *length = 0;
+ } else {
+ *length = z.total_out;
+ }
+
+ inflateEnd (&z);
+
+ return buffer;
+}
+#endif /* HAVE_ZLIB */
+
+static gboolean
+qtdemux_parse_moov (GstQTDemux * qtdemux, const guint8 * buffer, guint length)
+{
+ GNode *cmov;
+
+ qtdemux->moov_node = g_node_new ((guint8 *) buffer);
+
+ /* counts as header data */
+ qtdemux->header_size += length;
+
+ GST_DEBUG_OBJECT (qtdemux, "parsing 'moov' atom");
+ qtdemux_parse_node (qtdemux, qtdemux->moov_node, buffer, length);
+
+ cmov = qtdemux_tree_get_child_by_type (qtdemux->moov_node, FOURCC_cmov);
+ if (cmov) {
+ guint32 method;
+ GNode *dcom;
+ GNode *cmvd;
+ guint32 dcom_len;
+
+ dcom = qtdemux_tree_get_child_by_type (cmov, FOURCC_dcom);
+ cmvd = qtdemux_tree_get_child_by_type (cmov, FOURCC_cmvd);
+ if (dcom == NULL || cmvd == NULL)
+ goto invalid_compression;
+
+ dcom_len = QT_UINT32 (dcom->data);
+ if (dcom_len < 12)
+ goto invalid_compression;
+
+ method = QT_FOURCC ((guint8 *) dcom->data + 8);
+ switch (method) {
+#ifdef HAVE_ZLIB
+ case FOURCC_zlib:{
+ guint uncompressed_length;
+ guint compressed_length;
+ guint8 *buf;
+ guint32 cmvd_len;
+
+ cmvd_len = QT_UINT32 ((guint8 *) cmvd->data);
+ if (cmvd_len < 12)
+ goto invalid_compression;
+
+ uncompressed_length = QT_UINT32 ((guint8 *) cmvd->data + 8);
+ compressed_length = cmvd_len - 12;
+ GST_LOG ("length = %u", uncompressed_length);
+
+ buf =
+ (guint8 *) qtdemux_inflate ((guint8 *) cmvd->data + 12,
+ compressed_length, &uncompressed_length);
+
+ if (buf) {
+ qtdemux->moov_node_compressed = qtdemux->moov_node;
+ qtdemux->moov_node = g_node_new (buf);
+
+ qtdemux_parse_node (qtdemux, qtdemux->moov_node, buf,
+ uncompressed_length);
+ }
+ break;
+ }
+#endif /* HAVE_ZLIB */
+ default:
+ GST_WARNING_OBJECT (qtdemux, "unknown or unhandled header compression "
+ "type %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (method));
+ break;
+ }
+ }
+ return TRUE;
+
+ /* ERRORS */
+invalid_compression:
+ {
+ GST_ERROR_OBJECT (qtdemux, "invalid compressed header");
+ return FALSE;
+ }
+}
+
+static gboolean
+qtdemux_parse_container (GstQTDemux * qtdemux, GNode * node, const guint8 * buf,
+ const guint8 * end)
+{
+ while (G_UNLIKELY (buf < end)) {
+ GNode *child;
+ guint32 len;
+
+ if (G_UNLIKELY (buf + 4 > end)) {
+ GST_LOG_OBJECT (qtdemux, "buffer overrun");
+ break;
+ }
+ len = QT_UINT32 (buf);
+ if (G_UNLIKELY (len == 0)) {
+ GST_LOG_OBJECT (qtdemux, "empty container");
+ break;
+ }
+ if (G_UNLIKELY (len < 8)) {
+ GST_WARNING_OBJECT (qtdemux, "length too short (%d < 8)", len);
+ break;
+ }
+ if (G_UNLIKELY (len > (end - buf))) {
+ GST_WARNING_OBJECT (qtdemux, "length too long (%d > %d)", len,
+ (gint) (end - buf));
+ break;
+ }
+
+ child = g_node_new ((guint8 *) buf);
+ g_node_append (node, child);
+ GST_LOG_OBJECT (qtdemux, "adding new node of len %d", len);
+ qtdemux_parse_node (qtdemux, child, buf, len);
+
+ buf += len;
+ }
+ return TRUE;
+}
+
+static gboolean
+qtdemux_parse_theora_extension (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GNode * xdxt)
+{
+ int len = QT_UINT32 (xdxt->data);
+ guint8 *buf = xdxt->data;
+ guint8 *end = buf + len;
+ GstBuffer *buffer;
+
+ /* skip size and type */
+ buf += 8;
+ end -= 8;
+
+ while (buf < end) {
+ gint size;
+ guint32 type;
+
+ size = QT_UINT32 (buf);
+ type = QT_FOURCC (buf + 4);
+
+ GST_LOG_OBJECT (qtdemux, "%p %p", buf, end);
+
+ if (buf + size > end || size <= 0)
+ break;
+
+ buf += 8;
+ size -= 8;
+
+ GST_WARNING_OBJECT (qtdemux, "have cookie %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (type));
+
+ switch (type) {
+ case FOURCC_tCtH:
+ buffer = gst_buffer_new_and_alloc (size);
+ gst_buffer_fill (buffer, 0, buf, size);
+ stream->buffers = g_slist_append (stream->buffers, buffer);
+ GST_LOG_OBJECT (qtdemux, "parsing theora header");
+ break;
+ case FOURCC_tCt_:
+ buffer = gst_buffer_new_and_alloc (size);
+ gst_buffer_fill (buffer, 0, buf, size);
+ stream->buffers = g_slist_append (stream->buffers, buffer);
+ GST_LOG_OBJECT (qtdemux, "parsing theora comment");
+ break;
+ case FOURCC_tCtC:
+ buffer = gst_buffer_new_and_alloc (size);
+ gst_buffer_fill (buffer, 0, buf, size);
+ stream->buffers = g_slist_append (stream->buffers, buffer);
+ GST_LOG_OBJECT (qtdemux, "parsing theora codebook");
+ break;
+ default:
+ GST_WARNING_OBJECT (qtdemux,
+ "unknown theora cookie %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (type));
+ break;
+ }
+ buf += size;
+ }
+ return TRUE;
+}
+
+static gboolean
+qtdemux_parse_node (GstQTDemux * qtdemux, GNode * node, const guint8 * buffer,
+ guint length)
+{
+ guint32 fourcc = 0;
+ guint32 node_length = 0;
+ const QtNodeType *type;
+ const guint8 *end;
+
+ GST_LOG_OBJECT (qtdemux, "qtdemux_parse buffer %p length %u", buffer, length);
+
+ if (G_UNLIKELY (length < 8))
+ goto not_enough_data;
+
+ node_length = QT_UINT32 (buffer);
+ fourcc = QT_FOURCC (buffer + 4);
+
+ /* ignore empty nodes */
+ if (G_UNLIKELY (fourcc == 0 || node_length == 8))
+ return TRUE;
+
+ type = qtdemux_type_get (fourcc);
+
+ end = buffer + length;
+
+ GST_LOG_OBJECT (qtdemux,
+ "parsing '%" GST_FOURCC_FORMAT "', length=%u, name '%s'",
+ GST_FOURCC_ARGS (fourcc), node_length, type->name);
+
+ if (node_length > length)
+ goto broken_atom_size;
+
+ if (type->flags & QT_FLAG_CONTAINER) {
+ qtdemux_parse_container (qtdemux, node, buffer + 8, end);
+ } else {
+ switch (fourcc) {
+ case FOURCC_stsd:
+ {
+ if (node_length < 20) {
+ GST_LOG_OBJECT (qtdemux, "skipping small stsd box");
+ break;
+ }
+ GST_DEBUG_OBJECT (qtdemux,
+ "parsing stsd (sample table, sample description) atom");
+ /* Skip over 8 byte atom hdr + 1 byte version, 3 bytes flags, 4 byte num_entries */
+ qtdemux_parse_container (qtdemux, node, buffer + 16, end);
+ break;
+ }
+ case FOURCC_mp4a:
+ case FOURCC_alac:
+ case FOURCC_fLaC:
+ case FOURCC_aavd:
+ {
+ guint32 version;
+ guint32 offset;
+ guint min_size;
+
+ /* also read alac (or whatever) in stead of mp4a in the following,
+ * since a similar layout is used in other cases as well */
+ if (fourcc == FOURCC_mp4a)
+ min_size = 20;
+ else if (fourcc == FOURCC_fLaC)
+ min_size = 86;
+ else
+ min_size = 40;
+
+ /* There are two things we might encounter here: a true mp4a atom, and
+ an mp4a entry in an stsd atom. The latter is what we're interested
+ in, and it looks like an atom, but isn't really one. The true mp4a
+ atom is short, so we detect it based on length here. */
+ if (length < min_size) {
+ GST_LOG_OBJECT (qtdemux, "skipping small %" GST_FOURCC_FORMAT " box",
+ GST_FOURCC_ARGS (fourcc));
+ break;
+ }
+
+ /* 'version' here is the sound sample description version. Types 0 and
+ 1 are documented in the QTFF reference, but type 2 is not: it's
+ described in Apple header files instead (struct SoundDescriptionV2
+ in Movies.h) */
+ version = QT_UINT16 (buffer + 16);
+
+ GST_DEBUG_OBJECT (qtdemux, "%" GST_FOURCC_FORMAT " version 0x%08x",
+ GST_FOURCC_ARGS (fourcc), version);
+
+ /* parse any esds descriptors */
+ switch (version) {
+ case 0:
+ offset = 0x24;
+ break;
+ case 1:
+ offset = 0x34;
+ break;
+ case 2:
+ offset = 0x48;
+ break;
+ default:
+ GST_WARNING_OBJECT (qtdemux,
+ "unhandled %" GST_FOURCC_FORMAT " version 0x%08x",
+ GST_FOURCC_ARGS (fourcc), version);
+ offset = 0;
+ break;
+ }
+ if (offset)
+ qtdemux_parse_container (qtdemux, node, buffer + offset, end);
+ break;
+ }
+ case FOURCC_mp4v:
+ case FOURCC_MP4V:
+ case FOURCC_fmp4:
+ case FOURCC_FMP4:
+ case FOURCC_apcs:
+ case FOURCC_apch:
+ case FOURCC_apcn:
+ case FOURCC_apco:
+ case FOURCC_ap4h:
+ case FOURCC_xvid:
+ case FOURCC_XVID:
+ case FOURCC_H264:
+ case FOURCC_avc1:
+ case FOURCC_avc3:
+ case FOURCC_H265:
+ case FOURCC_hvc1:
+ case FOURCC_hev1:
+ case FOURCC_dvh1:
+ case FOURCC_dvhe:
+ case FOURCC_mjp2:
+ case FOURCC_encv:
+ {
+ guint32 version;
+ guint32 str_len;
+
+ /* codec_data is contained inside these atoms, which all have
+ * the same format. */
+ /* video sample description size is 86 bytes without extension.
+ * node_length have to be bigger than 86 bytes because video sample
+ * description can include extensions such as esds, fiel, glbl, etc. */
+ if (node_length < 86) {
+ GST_WARNING_OBJECT (qtdemux, "%" GST_FOURCC_FORMAT
+ " sample description length too short (%u < 86)",
+ GST_FOURCC_ARGS (fourcc), node_length);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (qtdemux, "parsing in %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (fourcc));
+
+ /* version (2 bytes) : this is set to 0, unless a compressor has changed
+ * its data format.
+ * revision level (2 bytes) : must be set to 0. */
+ version = QT_UINT32 (buffer + 16);
+ GST_DEBUG_OBJECT (qtdemux, "version %08x", version);
+
+ /* compressor name : PASCAL string and informative purposes
+ * first byte : the number of bytes to be displayed.
+ * it has to be less than 32 because it is reserved
+ * space of 32 bytes total including itself. */
+ str_len = QT_UINT8 (buffer + 50);
+ if (str_len < 32)
+ GST_DEBUG_OBJECT (qtdemux, "compressorname = %.*s", str_len,
+ (char *) buffer + 51);
+ else
+ GST_WARNING_OBJECT (qtdemux,
+ "compressorname length too big (%u > 31)", str_len);
+
+ GST_MEMDUMP_OBJECT (qtdemux, "video sample description", buffer,
+ end - buffer);
+ qtdemux_parse_container (qtdemux, node, buffer + 86, end);
+ break;
+ }
+ case FOURCC_meta:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "parsing meta atom");
+
+ /* You are reading this correctly. QTFF specifies that the
+ * metadata atom is a short atom, whereas ISO BMFF specifies
+ * it's a full atom. But since so many people are doing things
+ * differently, we actually peek into the atom to see which
+ * variant it is */
+ if (length < 16) {
+ GST_LOG_OBJECT (qtdemux, "skipping small %" GST_FOURCC_FORMAT " box",
+ GST_FOURCC_ARGS (fourcc));
+ break;
+ }
+ if (QT_FOURCC (buffer + 12) == FOURCC_hdlr) {
+ /* Variant 1: What QTFF specifies. 'meta' is a short header which
+ * starts with a 'hdlr' atom */
+ qtdemux_parse_container (qtdemux, node, buffer + 8, end);
+ } else if (QT_UINT32 (buffer + 8) == 0x00000000) {
+ /* Variant 2: What ISO BMFF specifies. 'meta' is a _full_ atom
+ * with version/flags both set to zero */
+ qtdemux_parse_container (qtdemux, node, buffer + 12, end);
+ } else
+ GST_WARNING_OBJECT (qtdemux, "Unknown 'meta' atom format");
+ break;
+ }
+ case FOURCC_mp4s:
+ {
+ GST_MEMDUMP_OBJECT (qtdemux, "mp4s", buffer, end - buffer);
+ /* Skip 8 byte header, plus 8 byte version + flags + entry_count */
+ qtdemux_parse_container (qtdemux, node, buffer + 16, end);
+ break;
+ }
+ case FOURCC_XiTh:
+ {
+ guint32 version;
+ guint32 offset;
+
+ if (length < 16) {
+ GST_LOG_OBJECT (qtdemux, "skipping small %" GST_FOURCC_FORMAT " box",
+ GST_FOURCC_ARGS (fourcc));
+ break;
+ }
+
+ version = QT_UINT32 (buffer + 12);
+ GST_DEBUG_OBJECT (qtdemux, "parsing XiTh atom version 0x%08x", version);
+
+ switch (version) {
+ case 0x00000001:
+ offset = 0x62;
+ break;
+ default:
+ GST_DEBUG_OBJECT (qtdemux, "unknown version 0x%08x", version);
+ offset = 0;
+ break;
+ }
+ if (offset) {
+ if (length < offset) {
+ GST_WARNING_OBJECT (qtdemux,
+ "skipping too small %" GST_FOURCC_FORMAT " box",
+ GST_FOURCC_ARGS (fourcc));
+ break;
+ }
+ qtdemux_parse_container (qtdemux, node, buffer + offset, end);
+ }
+ break;
+ }
+ case FOURCC_in24:
+ {
+ qtdemux_parse_container (qtdemux, node, buffer + 0x34, end);
+ break;
+ }
+ case FOURCC_uuid:
+ {
+ qtdemux_parse_uuid (qtdemux, buffer, end - buffer);
+ break;
+ }
+ case FOURCC_enca:
+ {
+ qtdemux_parse_container (qtdemux, node, buffer + 36, end);
+ break;
+ }
+ default:
+ if (!strcmp (type->name, "unknown"))
+ GST_MEMDUMP ("Unknown tag", buffer + 4, end - buffer - 4);
+ break;
+ }
+ }
+ GST_LOG_OBJECT (qtdemux, "parsed '%" GST_FOURCC_FORMAT "'",
+ GST_FOURCC_ARGS (fourcc));
+ return TRUE;
+
+/* ERRORS */
+not_enough_data:
+ {
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
+ (_("This file is corrupt and cannot be played.")),
+ ("Not enough data for an atom header, got only %u bytes", length));
+ return FALSE;
+ }
+broken_atom_size:
+ {
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
+ (_("This file is corrupt and cannot be played.")),
+ ("Atom '%" GST_FOURCC_FORMAT "' has size of %u bytes, but we have only "
+ "%u bytes available.", GST_FOURCC_ARGS (fourcc), node_length,
+ length));
+ return FALSE;
+ }
+}
+
+static void
+qtdemux_do_allocation (QtDemuxStream * stream, GstQTDemux * qtdemux)
+{
+/* FIXME: This can only reliably work if demuxers have a
+ * separate streaming thread per srcpad. This should be
+ * done in a demuxer base class, which integrates parts
+ * of multiqueue
+ *
+ * https://bugzilla.gnome.org/show_bug.cgi?id=701856
+ */
+#if 0
+ GstQuery *query;
+
+ query = gst_query_new_allocation (stream->caps, FALSE);
+
+ if (!gst_pad_peer_query (stream->pad, query)) {
+ /* not a problem, just debug a little */
+ GST_DEBUG_OBJECT (qtdemux, "peer ALLOCATION query failed");
+ }
+
+ if (stream->allocator)
+ gst_object_unref (stream->allocator);
+
+ if (gst_query_get_n_allocation_params (query) > 0) {
+ /* try the allocator */
+ gst_query_parse_nth_allocation_param (query, 0, &stream->allocator,
+ &stream->params);
+ stream->use_allocator = TRUE;
+ } else {
+ stream->allocator = NULL;
+ gst_allocation_params_init (&stream->params);
+ stream->use_allocator = FALSE;
+ }
+ gst_query_unref (query);
+#endif
+}
+
+static gboolean
+pad_query (const GValue * item, GValue * value, gpointer user_data)
+{
+ GstPad *pad = g_value_get_object (item);
+ GstQuery *query = user_data;
+ gboolean res;
+
+ res = gst_pad_peer_query (pad, query);
+
+ if (res) {
+ g_value_set_boolean (value, TRUE);
+ return FALSE;
+ }
+
+ GST_INFO_OBJECT (pad, "pad peer query failed");
+ return TRUE;
+}
+
+static gboolean
+gst_qtdemux_run_query (GstElement * element, GstQuery * query,
+ GstPadDirection direction)
+{
+ GstIterator *it;
+ GstIteratorFoldFunction func = pad_query;
+ GValue res = { 0, };
+
+ g_value_init (&res, G_TYPE_BOOLEAN);
+ g_value_set_boolean (&res, FALSE);
+
+ /* Ask neighbor */
+ if (direction == GST_PAD_SRC)
+ it = gst_element_iterate_src_pads (element);
+ else
+ it = gst_element_iterate_sink_pads (element);
+
+ while (gst_iterator_fold (it, func, &res, query) == GST_ITERATOR_RESYNC)
+ gst_iterator_resync (it);
+
+ gst_iterator_free (it);
+
+ return g_value_get_boolean (&res);
+}
+
+static void
+gst_qtdemux_request_protection_context (GstQTDemux * qtdemux,
+ QtDemuxStream * stream)
+{
+ GstQuery *query;
+ GstContext *ctxt;
+ GstElement *element = GST_ELEMENT (qtdemux);
+ GstStructure *st;
+ gchar **filtered_sys_ids;
+ GValue event_list = G_VALUE_INIT;
+ GList *walk;
+
+ /* 1. Check if we already have the context. */
+ if (qtdemux->preferred_protection_system_id != NULL) {
+ GST_LOG_OBJECT (element,
+ "already have the protection context, no need to request it again");
+ return;
+ }
+
+ g_ptr_array_add (qtdemux->protection_system_ids, NULL);
+ filtered_sys_ids = gst_protection_filter_systems_by_available_decryptors (
+ (const gchar **) qtdemux->protection_system_ids->pdata);
+
+ g_ptr_array_remove_index (qtdemux->protection_system_ids,
+ qtdemux->protection_system_ids->len - 1);
+ GST_TRACE_OBJECT (qtdemux, "detected %u protection systems, we have "
+ "decryptors for %u of them, running context request",
+ qtdemux->protection_system_ids->len,
+ filtered_sys_ids ? g_strv_length (filtered_sys_ids) : 0);
+
+
+ if (stream->protection_scheme_event_queue.length) {
+ GST_TRACE_OBJECT (qtdemux, "using stream event queue, length %u",
+ stream->protection_scheme_event_queue.length);
+ walk = stream->protection_scheme_event_queue.tail;
+ } else {
+ GST_TRACE_OBJECT (qtdemux, "using demuxer event queue, length %u",
+ qtdemux->protection_event_queue.length);
+ walk = qtdemux->protection_event_queue.tail;
+ }
+
+ g_value_init (&event_list, GST_TYPE_LIST);
+ for (; walk; walk = g_list_previous (walk)) {
+ GValue *event_value = g_new0 (GValue, 1);
+ g_value_init (event_value, GST_TYPE_EVENT);
+ g_value_set_boxed (event_value, walk->data);
+ gst_value_list_append_and_take_value (&event_list, event_value);
+ }
+
+ /* 2a) Query downstream with GST_QUERY_CONTEXT for the context and
+ * check if downstream already has a context of the specific type
+ * 2b) Query upstream as above.
+ */
+ query = gst_query_new_context ("drm-preferred-decryption-system-id");
+ st = gst_query_writable_structure (query);
+ gst_structure_set (st, "track-id", G_TYPE_UINT, stream->track_id,
+ "available-stream-encryption-systems", G_TYPE_STRV, filtered_sys_ids,
+ NULL);
+ gst_structure_set_value (st, "stream-encryption-events", &event_list);
+ if (gst_qtdemux_run_query (element, query, GST_PAD_SRC)) {
+ gst_query_parse_context (query, &ctxt);
+ GST_INFO_OBJECT (element, "found context (%p) in downstream query", ctxt);
+ gst_element_set_context (element, ctxt);
+ } else if (gst_qtdemux_run_query (element, query, GST_PAD_SINK)) {
+ gst_query_parse_context (query, &ctxt);
+ GST_INFO_OBJECT (element, "found context (%p) in upstream query", ctxt);
+ gst_element_set_context (element, ctxt);
+ } else {
+ /* 3) Post a GST_MESSAGE_NEED_CONTEXT message on the bus with
+ * the required context type and afterwards check if a
+ * usable context was set now as in 1). The message could
+ * be handled by the parent bins of the element and the
+ * application.
+ */
+ GstMessage *msg;
+
+ GST_INFO_OBJECT (element, "posting need context message");
+ msg = gst_message_new_need_context (GST_OBJECT_CAST (element),
+ "drm-preferred-decryption-system-id");
+ st = (GstStructure *) gst_message_get_structure (msg);
+ gst_structure_set (st, "track-id", G_TYPE_UINT, stream->track_id,
+ "available-stream-encryption-systems", G_TYPE_STRV, filtered_sys_ids,
+ NULL);
+
+ gst_structure_set_value (st, "stream-encryption-events", &event_list);
+ gst_element_post_message (element, msg);
+ }
+
+ g_strfreev (filtered_sys_ids);
+ g_value_unset (&event_list);
+ gst_query_unref (query);
+}
+
+static gboolean
+gst_qtdemux_configure_protected_caps (GstQTDemux * qtdemux,
+ QtDemuxStream * stream)
+{
+ GstStructure *s;
+ const gchar *selected_system = NULL;
+
+ g_return_val_if_fail (qtdemux != NULL, FALSE);
+ g_return_val_if_fail (stream != NULL, FALSE);
+ g_return_val_if_fail (gst_caps_get_size (CUR_STREAM (stream)->caps) == 1,
+ FALSE);
+
+ if (stream->protection_scheme_type == FOURCC_aavd) {
+ s = gst_caps_get_structure (CUR_STREAM (stream)->caps, 0);
+ if (!gst_structure_has_name (s, "application/x-aavd")) {
+ gst_structure_set (s,
+ "original-media-type", G_TYPE_STRING, gst_structure_get_name (s),
+ NULL);
+ gst_structure_set_name (s, "application/x-aavd");
+ }
+ return TRUE;
+ }
+
+ if (stream->protection_scheme_type != FOURCC_cenc
+ && stream->protection_scheme_type != FOURCC_cbcs) {
+ GST_ERROR_OBJECT (qtdemux,
+ "unsupported protection scheme: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (stream->protection_scheme_type));
+ return FALSE;
+ }
+
+ s = gst_caps_get_structure (CUR_STREAM (stream)->caps, 0);
+ if (!gst_structure_has_name (s, "application/x-cenc")) {
+ gst_structure_set (s,
+ "original-media-type", G_TYPE_STRING, gst_structure_get_name (s), NULL);
+ gst_structure_set (s, "cipher-mode", G_TYPE_STRING,
+ (stream->protection_scheme_type == FOURCC_cbcs) ? "cbcs" : "cenc",
+ NULL);
+ gst_structure_set_name (s, "application/x-cenc");
+ }
+
+ if (qtdemux->protection_system_ids == NULL) {
+ GST_DEBUG_OBJECT (qtdemux, "stream is protected using cenc, but no "
+ "cenc protection system information has been found, not setting a "
+ "protection system UUID");
+ return TRUE;
+ }
+
+ gst_qtdemux_request_protection_context (qtdemux, stream);
+ if (qtdemux->preferred_protection_system_id != NULL) {
+ const gchar *preferred_system_array[] =
+ { qtdemux->preferred_protection_system_id, NULL };
+
+ selected_system = gst_protection_select_system (preferred_system_array);
+
+ if (selected_system) {
+ GST_TRACE_OBJECT (qtdemux, "selected preferred system %s",
+ qtdemux->preferred_protection_system_id);
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "could not select preferred system %s "
+ "because there is no available decryptor",
+ qtdemux->preferred_protection_system_id);
+ }
+ }
+
+ if (!selected_system) {
+ g_ptr_array_add (qtdemux->protection_system_ids, NULL);
+ selected_system = gst_protection_select_system ((const gchar **)
+ qtdemux->protection_system_ids->pdata);
+ g_ptr_array_remove_index (qtdemux->protection_system_ids,
+ qtdemux->protection_system_ids->len - 1);
+ }
+
+ if (!selected_system) {
+ GST_ERROR_OBJECT (qtdemux, "stream is protected, but no "
+ "suitable decryptor element has been found");
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (qtdemux, "selected protection system is %s",
+ selected_system);
+
+ gst_structure_set (s,
+ GST_PROTECTION_SYSTEM_ID_CAPS_FIELD, G_TYPE_STRING, selected_system,
+ NULL);
+
+ return TRUE;
+}
+
+static gboolean
+gst_qtdemux_guess_framerate (GstQTDemux * qtdemux, QtDemuxStream * stream)
+{
+ /* fps is calculated base on the duration of the average framerate since
+ * qt does not have a fixed framerate. */
+ gboolean fps_available = TRUE;
+ guint32 first_duration = 0;
+
+ if (stream->n_samples > 0)
+ first_duration = stream->samples[0].duration;
+
+ if ((stream->n_samples == 1 && first_duration == 0)
+ || (qtdemux->fragmented && stream->n_samples_moof == 1)) {
+ /* still frame */
+ CUR_STREAM (stream)->fps_n = 0;
+ CUR_STREAM (stream)->fps_d = 1;
+ } else {
+ if (stream->duration == 0 || stream->n_samples < 2) {
+ CUR_STREAM (stream)->fps_n = stream->timescale;
+ CUR_STREAM (stream)->fps_d = 1;
+ fps_available = FALSE;
+ } else {
+ GstClockTime avg_duration;
+ guint64 duration;
+ guint32 n_samples;
+
+ /* duration and n_samples can be updated for fragmented format
+ * so, framerate of fragmented format is calculated using data in a moof */
+ if (qtdemux->fragmented && stream->n_samples_moof > 0
+ && stream->duration_moof > 0) {
+ n_samples = stream->n_samples_moof;
+ duration = stream->duration_moof;
+ } else {
+ n_samples = stream->n_samples;
+ duration = stream->duration;
+ }
+
+ /* Calculate a framerate, ignoring the first sample which is sometimes truncated */
+ /* stream->duration is guint64, timescale, n_samples are guint32 */
+ avg_duration =
+ gst_util_uint64_scale_round (duration -
+ first_duration, GST_SECOND,
+ (guint64) (stream->timescale) * (n_samples - 1));
+
+ GST_LOG_OBJECT (qtdemux,
+ "Calculating avg sample duration based on stream (or moof) duration %"
+ G_GUINT64_FORMAT
+ " minus first sample %u, leaving %d samples gives %"
+ GST_TIME_FORMAT, duration, first_duration,
+ n_samples - 1, GST_TIME_ARGS (avg_duration));
+
+ fps_available =
+ gst_video_guess_framerate (avg_duration,
+ &CUR_STREAM (stream)->fps_n, &CUR_STREAM (stream)->fps_d);
+
+ GST_DEBUG_OBJECT (qtdemux,
+ "Calculating framerate, timescale %u gave fps_n %d fps_d %d",
+ stream->timescale, CUR_STREAM (stream)->fps_n,
+ CUR_STREAM (stream)->fps_d);
+ }
+ }
+
+ return fps_available;
+}
+
+static gboolean
+gst_qtdemux_configure_stream (GstQTDemux * qtdemux, QtDemuxStream * stream)
+{
+ if (stream->subtype == FOURCC_vide) {
+ gboolean fps_available = gst_qtdemux_guess_framerate (qtdemux, stream);
+
+ if (CUR_STREAM (stream)->caps) {
+ CUR_STREAM (stream)->caps =
+ gst_caps_make_writable (CUR_STREAM (stream)->caps);
+
+ if (CUR_STREAM (stream)->width && CUR_STREAM (stream)->height)
+ gst_caps_set_simple (CUR_STREAM (stream)->caps,
+ "width", G_TYPE_INT, CUR_STREAM (stream)->width,
+ "height", G_TYPE_INT, CUR_STREAM (stream)->height, NULL);
+
+ /* set framerate if calculated framerate is reliable */
+ if (fps_available) {
+ gst_caps_set_simple (CUR_STREAM (stream)->caps,
+ "framerate", GST_TYPE_FRACTION, CUR_STREAM (stream)->fps_n,
+ CUR_STREAM (stream)->fps_d, NULL);
+ }
+
+ /* calculate pixel-aspect-ratio using display width and height */
+ GST_DEBUG_OBJECT (qtdemux,
+ "video size %dx%d, target display size %dx%d",
+ CUR_STREAM (stream)->width, CUR_STREAM (stream)->height,
+ stream->display_width, stream->display_height);
+ /* qt file might have pasp atom */
+ if (CUR_STREAM (stream)->par_w > 0 && CUR_STREAM (stream)->par_h > 0) {
+ GST_DEBUG_OBJECT (qtdemux, "par %d:%d", CUR_STREAM (stream)->par_w,
+ CUR_STREAM (stream)->par_h);
+ gst_caps_set_simple (CUR_STREAM (stream)->caps, "pixel-aspect-ratio",
+ GST_TYPE_FRACTION, CUR_STREAM (stream)->par_w,
+ CUR_STREAM (stream)->par_h, NULL);
+ } else if (stream->display_width > 0 && stream->display_height > 0
+ && CUR_STREAM (stream)->width > 0
+ && CUR_STREAM (stream)->height > 0) {
+ gint n, d;
+
+ /* calculate the pixel aspect ratio using the display and pixel w/h */
+ n = stream->display_width * CUR_STREAM (stream)->height;
+ d = stream->display_height * CUR_STREAM (stream)->width;
+ if (n == d)
+ n = d = 1;
+ GST_DEBUG_OBJECT (qtdemux, "setting PAR to %d/%d", n, d);
+ CUR_STREAM (stream)->par_w = n;
+ CUR_STREAM (stream)->par_h = d;
+ gst_caps_set_simple (CUR_STREAM (stream)->caps, "pixel-aspect-ratio",
+ GST_TYPE_FRACTION, CUR_STREAM (stream)->par_w,
+ CUR_STREAM (stream)->par_h, NULL);
+ }
+
+ if (CUR_STREAM (stream)->interlace_mode > 0) {
+ if (CUR_STREAM (stream)->interlace_mode == 1) {
+ gst_caps_set_simple (CUR_STREAM (stream)->caps, "interlace-mode",
+ G_TYPE_STRING, "progressive", NULL);
+ } else if (CUR_STREAM (stream)->interlace_mode == 2) {
+ gst_caps_set_simple (CUR_STREAM (stream)->caps, "interlace-mode",
+ G_TYPE_STRING, "interleaved", NULL);
+ if (CUR_STREAM (stream)->field_order == 9) {
+ gst_caps_set_simple (CUR_STREAM (stream)->caps, "field-order",
+ G_TYPE_STRING, "top-field-first", NULL);
+ } else if (CUR_STREAM (stream)->field_order == 14) {
+ gst_caps_set_simple (CUR_STREAM (stream)->caps, "field-order",
+ G_TYPE_STRING, "bottom-field-first", NULL);
+ }
+ }
+ }
+
+ /* Create incomplete colorimetry here if needed */
+ if (CUR_STREAM (stream)->colorimetry.range ||
+ CUR_STREAM (stream)->colorimetry.matrix ||
+ CUR_STREAM (stream)->colorimetry.transfer
+ || CUR_STREAM (stream)->colorimetry.primaries) {
+ gchar *colorimetry =
+ gst_video_colorimetry_to_string (&CUR_STREAM (stream)->colorimetry);
+ gst_caps_set_simple (CUR_STREAM (stream)->caps, "colorimetry",
+ G_TYPE_STRING, colorimetry, NULL);
+ g_free (colorimetry);
+ }
+
+ if (stream->multiview_mode != GST_VIDEO_MULTIVIEW_MODE_NONE) {
+ guint par_w = 1, par_h = 1;
+
+ if (CUR_STREAM (stream)->par_w > 0 && CUR_STREAM (stream)->par_h > 0) {
+ par_w = CUR_STREAM (stream)->par_w;
+ par_h = CUR_STREAM (stream)->par_h;
+ }
+
+ if (gst_video_multiview_guess_half_aspect (stream->multiview_mode,
+ CUR_STREAM (stream)->width, CUR_STREAM (stream)->height, par_w,
+ par_h)) {
+ stream->multiview_flags |= GST_VIDEO_MULTIVIEW_FLAGS_HALF_ASPECT;
+ }
+
+ gst_caps_set_simple (CUR_STREAM (stream)->caps,
+ "multiview-mode", G_TYPE_STRING,
+ gst_video_multiview_mode_to_caps_string (stream->multiview_mode),
+ "multiview-flags", GST_TYPE_VIDEO_MULTIVIEW_FLAGSET,
+ stream->multiview_flags, GST_FLAG_SET_MASK_EXACT, NULL);
+ }
+ }
+ }
+
+ else if (stream->subtype == FOURCC_soun) {
+ if (CUR_STREAM (stream)->caps) {
+ CUR_STREAM (stream)->caps =
+ gst_caps_make_writable (CUR_STREAM (stream)->caps);
+ if (CUR_STREAM (stream)->rate > 0)
+ gst_caps_set_simple (CUR_STREAM (stream)->caps,
+ "rate", G_TYPE_INT, (int) CUR_STREAM (stream)->rate, NULL);
+ if (CUR_STREAM (stream)->n_channels > 0)
+ gst_caps_set_simple (CUR_STREAM (stream)->caps,
+ "channels", G_TYPE_INT, CUR_STREAM (stream)->n_channels, NULL);
+ if (CUR_STREAM (stream)->n_channels > 2) {
+ /* FIXME: Need to parse the 'chan' atom to get channel layouts
+ * correctly; this is just the minimum we can do - assume
+ * we don't actually have any channel positions. */
+ gst_caps_set_simple (CUR_STREAM (stream)->caps,
+ "channel-mask", GST_TYPE_BITMASK, G_GUINT64_CONSTANT (0), NULL);
+ }
+ }
+ }
+
+ else if (stream->subtype == FOURCC_clcp && CUR_STREAM (stream)->caps) {
+ const GstStructure *s;
+ QtDemuxStream *fps_stream = NULL;
+ gboolean fps_available = FALSE;
+
+ /* CEA608 closed caption tracks are a bit special in that each sample
+ * can contain CCs for multiple frames, and CCs can be omitted and have to
+ * be inferred from the duration of the sample then.
+ *
+ * As such we take the framerate from the (first) video track here for
+ * CEA608 as there must be one CC byte pair for every video frame
+ * according to the spec.
+ *
+ * For CEA708 all is fine and there is one sample per frame.
+ */
+
+ s = gst_caps_get_structure (CUR_STREAM (stream)->caps, 0);
+ if (gst_structure_has_name (s, "closedcaption/x-cea-608")) {
+ gint i;
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *tmp = QTDEMUX_NTH_STREAM (qtdemux, i);
+
+ if (tmp->subtype == FOURCC_vide) {
+ fps_stream = tmp;
+ break;
+ }
+ }
+
+ if (fps_stream) {
+ fps_available = gst_qtdemux_guess_framerate (qtdemux, fps_stream);
+ CUR_STREAM (stream)->fps_n = CUR_STREAM (fps_stream)->fps_n;
+ CUR_STREAM (stream)->fps_d = CUR_STREAM (fps_stream)->fps_d;
+ }
+ } else {
+ fps_available = gst_qtdemux_guess_framerate (qtdemux, stream);
+ fps_stream = stream;
+ }
+
+ CUR_STREAM (stream)->caps =
+ gst_caps_make_writable (CUR_STREAM (stream)->caps);
+
+ /* set framerate if calculated framerate is reliable */
+ if (fps_available) {
+ gst_caps_set_simple (CUR_STREAM (stream)->caps,
+ "framerate", GST_TYPE_FRACTION, CUR_STREAM (stream)->fps_n,
+ CUR_STREAM (stream)->fps_d, NULL);
+ }
+ }
+
+ if (stream->pad) {
+ GstCaps *prev_caps = NULL;
+
+ GST_PAD_ELEMENT_PRIVATE (stream->pad) = stream;
+ gst_pad_set_event_function (stream->pad, gst_qtdemux_handle_src_event);
+ gst_pad_set_query_function (stream->pad, gst_qtdemux_handle_src_query);
+ gst_pad_set_active (stream->pad, TRUE);
+
+ gst_pad_use_fixed_caps (stream->pad);
+
+ if (stream->protected) {
+ if (!gst_qtdemux_configure_protected_caps (qtdemux, stream)) {
+ GST_ERROR_OBJECT (qtdemux,
+ "Failed to configure protected stream caps.");
+ return FALSE;
+ }
+ }
+
+ GST_DEBUG_OBJECT (qtdemux, "setting caps %" GST_PTR_FORMAT,
+ CUR_STREAM (stream)->caps);
+ if (stream->new_stream) {
+ GstEvent *event;
+ GstStreamFlags stream_flags = GST_STREAM_FLAG_NONE;
+
+ event =
+ gst_pad_get_sticky_event (qtdemux->sinkpad, GST_EVENT_STREAM_START,
+ 0);
+ if (event) {
+ gst_event_parse_stream_flags (event, &stream_flags);
+ if (gst_event_parse_group_id (event, &qtdemux->group_id))
+ qtdemux->have_group_id = TRUE;
+ else
+ qtdemux->have_group_id = FALSE;
+ gst_event_unref (event);
+ } else if (!qtdemux->have_group_id) {
+ qtdemux->have_group_id = TRUE;
+ qtdemux->group_id = gst_util_group_id_next ();
+ }
+
+ stream->new_stream = FALSE;
+ event = gst_event_new_stream_start (stream->stream_id);
+ if (qtdemux->have_group_id)
+ gst_event_set_group_id (event, qtdemux->group_id);
+ if (stream->disabled)
+ stream_flags |= GST_STREAM_FLAG_UNSELECT;
+ if (CUR_STREAM (stream)->sparse) {
+ stream_flags |= GST_STREAM_FLAG_SPARSE;
+ } else {
+ stream_flags &= ~GST_STREAM_FLAG_SPARSE;
+ }
+ gst_event_set_stream_flags (event, stream_flags);
+ gst_pad_push_event (stream->pad, event);
+ }
+
+ prev_caps = gst_pad_get_current_caps (stream->pad);
+
+ if (CUR_STREAM (stream)->caps) {
+ if (!prev_caps
+ || !gst_caps_is_equal_fixed (prev_caps, CUR_STREAM (stream)->caps)) {
+ GST_DEBUG_OBJECT (qtdemux, "setting caps %" GST_PTR_FORMAT,
+ CUR_STREAM (stream)->caps);
+ gst_pad_set_caps (stream->pad, CUR_STREAM (stream)->caps);
+ } else {
+ GST_DEBUG_OBJECT (qtdemux, "ignore duplicated caps");
+ }
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "stream without caps");
+ }
+
+ if (prev_caps)
+ gst_caps_unref (prev_caps);
+ stream->new_caps = FALSE;
+ }
+ return TRUE;
+}
+
+static void
+gst_qtdemux_stream_check_and_change_stsd_index (GstQTDemux * demux,
+ QtDemuxStream * stream)
+{
+ if (stream->cur_stsd_entry_index == stream->stsd_sample_description_id)
+ return;
+
+ GST_DEBUG_OBJECT (stream->pad, "Changing stsd index from '%u' to '%u'",
+ stream->cur_stsd_entry_index, stream->stsd_sample_description_id);
+ if (G_UNLIKELY (stream->stsd_sample_description_id >=
+ stream->stsd_entries_length)) {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX,
+ (_("This file is invalid and cannot be played.")),
+ ("New sample description id is out of bounds (%d >= %d)",
+ stream->stsd_sample_description_id, stream->stsd_entries_length));
+ } else {
+ stream->cur_stsd_entry_index = stream->stsd_sample_description_id;
+ stream->new_caps = TRUE;
+ }
+}
+
+static gboolean
+gst_qtdemux_add_stream (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, GstTagList * list)
+{
+ gboolean ret = TRUE;
+
+ if (stream->subtype == FOURCC_vide) {
+ gchar *name = g_strdup_printf ("video_%u", qtdemux->n_video_streams);
+
+ stream->pad =
+ gst_pad_new_from_static_template (&gst_qtdemux_videosrc_template, name);
+ g_free (name);
+
+ if (!gst_qtdemux_configure_stream (qtdemux, stream)) {
+ gst_object_unref (stream->pad);
+ stream->pad = NULL;
+ ret = FALSE;
+ goto done;
+ }
+
+ qtdemux->n_video_streams++;
+ } else if (stream->subtype == FOURCC_soun) {
+ gchar *name = g_strdup_printf ("audio_%u", qtdemux->n_audio_streams);
+
+ stream->pad =
+ gst_pad_new_from_static_template (&gst_qtdemux_audiosrc_template, name);
+ g_free (name);
+ if (!gst_qtdemux_configure_stream (qtdemux, stream)) {
+ gst_object_unref (stream->pad);
+ stream->pad = NULL;
+ ret = FALSE;
+ goto done;
+ }
+ qtdemux->n_audio_streams++;
+ } else if (stream->subtype == FOURCC_strm) {
+ GST_DEBUG_OBJECT (qtdemux, "stream type, not creating pad");
+ } else if (stream->subtype == FOURCC_subp || stream->subtype == FOURCC_text
+ || stream->subtype == FOURCC_sbtl || stream->subtype == FOURCC_subt
+ || stream->subtype == FOURCC_clcp) {
+ gchar *name = g_strdup_printf ("subtitle_%u", qtdemux->n_sub_streams);
+
+ stream->pad =
+ gst_pad_new_from_static_template (&gst_qtdemux_subsrc_template, name);
+ g_free (name);
+ if (!gst_qtdemux_configure_stream (qtdemux, stream)) {
+ gst_object_unref (stream->pad);
+ stream->pad = NULL;
+ ret = FALSE;
+ goto done;
+ }
+ qtdemux->n_sub_streams++;
+ } else if (CUR_STREAM (stream)->caps) {
+ gchar *name = g_strdup_printf ("video_%u", qtdemux->n_video_streams);
+
+ stream->pad =
+ gst_pad_new_from_static_template (&gst_qtdemux_videosrc_template, name);
+ g_free (name);
+ if (!gst_qtdemux_configure_stream (qtdemux, stream)) {
+ gst_object_unref (stream->pad);
+ stream->pad = NULL;
+ ret = FALSE;
+ goto done;
+ }
+ qtdemux->n_video_streams++;
+ } else {
+ GST_DEBUG_OBJECT (qtdemux, "unknown stream type");
+ goto done;
+ }
+
+ if (stream->pad) {
+ GList *l;
+
+ GST_DEBUG_OBJECT (qtdemux, "adding pad %s %p to qtdemux %p",
+ GST_OBJECT_NAME (stream->pad), stream->pad, qtdemux);
+ gst_element_add_pad (GST_ELEMENT_CAST (qtdemux), stream->pad);
+ GST_OBJECT_LOCK (qtdemux);
+ gst_flow_combiner_add_pad (qtdemux->flowcombiner, stream->pad);
+ GST_OBJECT_UNLOCK (qtdemux);
+
+ if (stream->stream_tags)
+ gst_tag_list_unref (stream->stream_tags);
+ stream->stream_tags = list;
+ list = NULL;
+ /* global tags go on each pad anyway */
+ stream->send_global_tags = TRUE;
+ /* send upstream GST_EVENT_PROTECTION events that were received before
+ this source pad was created */
+ for (l = qtdemux->protection_event_queue.head; l != NULL; l = l->next)
+ gst_pad_push_event (stream->pad, gst_event_ref (l->data));
+ }
+done:
+ if (list)
+ gst_tag_list_unref (list);
+ return ret;
+}
+
+/* find next atom with @fourcc starting at @offset */
+static GstFlowReturn
+qtdemux_find_atom (GstQTDemux * qtdemux, guint64 * offset,
+ guint64 * length, guint32 fourcc)
+{
+ GstFlowReturn ret;
+ guint32 lfourcc;
+ GstBuffer *buf;
+
+ GST_LOG_OBJECT (qtdemux, "finding fourcc %" GST_FOURCC_FORMAT " at offset %"
+ G_GUINT64_FORMAT, GST_FOURCC_ARGS (fourcc), *offset);
+
+ while (TRUE) {
+ GstMapInfo map;
+
+ buf = NULL;
+ ret = gst_pad_pull_range (qtdemux->sinkpad, *offset, 16, &buf);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto locate_failed;
+ if (G_UNLIKELY (gst_buffer_get_size (buf) != 16)) {
+ /* likely EOF */
+ ret = GST_FLOW_EOS;
+ gst_buffer_unref (buf);
+ goto locate_failed;
+ }
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ extract_initial_length_and_fourcc (map.data, 16, length, &lfourcc);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ if (G_UNLIKELY (*length == 0)) {
+ GST_DEBUG_OBJECT (qtdemux, "invalid length 0");
+ ret = GST_FLOW_ERROR;
+ goto locate_failed;
+ }
+
+ if (lfourcc == fourcc) {
+ GST_DEBUG_OBJECT (qtdemux, "found '%" GST_FOURCC_FORMAT " at offset %"
+ G_GUINT64_FORMAT, GST_FOURCC_ARGS (fourcc), *offset);
+ break;
+ } else {
+ GST_LOG_OBJECT (qtdemux,
+ "skipping atom '%" GST_FOURCC_FORMAT "' at %" G_GUINT64_FORMAT,
+ GST_FOURCC_ARGS (lfourcc), *offset);
+ if (*offset == G_MAXUINT64)
+ goto locate_failed;
+ *offset += *length;
+ }
+ }
+
+ return GST_FLOW_OK;
+
+locate_failed:
+ {
+ /* might simply have had last one */
+ GST_DEBUG_OBJECT (qtdemux, "fourcc not found");
+ return ret;
+ }
+}
+
+/* should only do something in pull mode */
+/* call with OBJECT lock */
+static GstFlowReturn
+qtdemux_add_fragmented_samples (GstQTDemux * qtdemux)
+{
+ guint64 length, offset;
+ GstBuffer *buf = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstFlowReturn res = GST_FLOW_OK;
+ GstMapInfo map;
+
+ offset = qtdemux->moof_offset;
+ GST_DEBUG_OBJECT (qtdemux, "next moof at offset %" G_GUINT64_FORMAT, offset);
+
+ if (!offset) {
+ GST_DEBUG_OBJECT (qtdemux, "no next moof");
+ return GST_FLOW_EOS;
+ }
+
+ /* best not do pull etc with lock held */
+ GST_OBJECT_UNLOCK (qtdemux);
+
+ ret = qtdemux_find_atom (qtdemux, &offset, &length, FOURCC_moof);
+ if (ret != GST_FLOW_OK)
+ goto flow_failed;
+
+ ret = gst_qtdemux_pull_atom (qtdemux, offset, length, &buf);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto flow_failed;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (!qtdemux_parse_moof (qtdemux, map.data, map.size, offset, NULL)) {
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ buf = NULL;
+ goto parse_failed;
+ }
+
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ buf = NULL;
+
+ offset += length;
+ /* look for next moof */
+ ret = qtdemux_find_atom (qtdemux, &offset, &length, FOURCC_moof);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto flow_failed;
+
+exit:
+ GST_OBJECT_LOCK (qtdemux);
+
+ qtdemux->moof_offset = offset;
+
+ return res;
+
+parse_failed:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "failed to parse moof");
+ offset = 0;
+ res = GST_FLOW_ERROR;
+ goto exit;
+ }
+flow_failed:
+ {
+ /* maybe upstream temporarily flushing */
+ if (ret != GST_FLOW_FLUSHING) {
+ GST_DEBUG_OBJECT (qtdemux, "no next moof");
+ offset = 0;
+ } else {
+ GST_DEBUG_OBJECT (qtdemux, "upstream WRONG_STATE");
+ /* resume at current position next time */
+ }
+ res = ret;
+ goto exit;
+ }
+}
+
+static void
+qtdemux_merge_sample_table (GstQTDemux * qtdemux, QtDemuxStream * stream)
+{
+ guint i;
+ guint32 num_chunks;
+ gint32 stts_duration;
+ GstByteWriter stsc, stts, stsz;
+
+ /* Each sample has a different size, which we don't support for merging */
+ if (stream->sample_size == 0) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "Not all samples have the same size, not merging");
+ return;
+ }
+
+ /* The stream has a ctts table, we don't support that */
+ if (stream->ctts_present) {
+ GST_DEBUG_OBJECT (qtdemux, "Have ctts, not merging");
+ return;
+ }
+
+ /* If there's a sync sample table also ignore this stream */
+ if (stream->stps_present || stream->stss_present) {
+ GST_DEBUG_OBJECT (qtdemux, "Have stss/stps, not merging");
+ return;
+ }
+
+ /* If chunks are considered samples already ignore this stream */
+ if (stream->chunks_are_samples) {
+ GST_DEBUG_OBJECT (qtdemux, "Chunks are samples, not merging");
+ return;
+ }
+
+ /* Require that all samples have the same duration */
+ if (stream->n_sample_times > 1) {
+ GST_DEBUG_OBJECT (qtdemux, "Not all samples have the same duration");
+ return;
+ }
+
+ /* Parse the stts to get the sample duration and number of samples */
+ gst_byte_reader_skip_unchecked (&stream->stts, 4);
+ stts_duration = gst_byte_reader_get_uint32_be_unchecked (&stream->stts);
+
+ /* Parse the number of chunks from the stco manually because the
+ * reader is already behind that */
+ num_chunks = GST_READ_UINT32_BE (stream->stco.data + 4);
+
+ GST_DEBUG_OBJECT (qtdemux, "sample_duration %d, num_chunks %u", stts_duration,
+ num_chunks);
+
+ /* Now parse stsc, convert chunks into single samples and generate a
+ * new stsc, stts and stsz from this information */
+ gst_byte_writer_init (&stsc);
+ gst_byte_writer_init (&stts);
+ gst_byte_writer_init (&stsz);
+
+ /* Note: we skip fourccs, size, version, flags and other fields of the new
+ * atoms as the byte readers with them are already behind that position
+ * anyway and only update the values of those inside the stream directly.
+ */
+ stream->n_sample_times = 0;
+ stream->n_samples = 0;
+ for (i = 0; i < stream->n_samples_per_chunk; i++) {
+ guint j;
+ guint32 first_chunk, last_chunk, samples_per_chunk, sample_description_id;
+
+ first_chunk = gst_byte_reader_get_uint32_be_unchecked (&stream->stsc);
+ samples_per_chunk = gst_byte_reader_get_uint32_be_unchecked (&stream->stsc);
+ sample_description_id =
+ gst_byte_reader_get_uint32_be_unchecked (&stream->stsc);
+
+ if (i == stream->n_samples_per_chunk - 1) {
+ /* +1 because first_chunk is 1-based */
+ last_chunk = num_chunks + 1;
+ } else {
+ last_chunk = gst_byte_reader_peek_uint32_be_unchecked (&stream->stsc);
+ }
+
+ GST_DEBUG_OBJECT (qtdemux,
+ "Merging first_chunk: %u, last_chunk: %u, samples_per_chunk: %u, sample_description_id: %u",
+ first_chunk, last_chunk, samples_per_chunk, sample_description_id);
+
+ gst_byte_writer_put_uint32_be (&stsc, first_chunk);
+ /* One sample in this chunk */
+ gst_byte_writer_put_uint32_be (&stsc, 1);
+ gst_byte_writer_put_uint32_be (&stsc, sample_description_id);
+
+ /* For each chunk write a stts and stsz entry now */
+ gst_byte_writer_put_uint32_be (&stts, last_chunk - first_chunk);
+ gst_byte_writer_put_uint32_be (&stts, stts_duration * samples_per_chunk);
+ for (j = first_chunk; j < last_chunk; j++) {
+ gst_byte_writer_put_uint32_be (&stsz,
+ stream->sample_size * samples_per_chunk);
+ }
+
+ stream->n_sample_times += 1;
+ stream->n_samples += last_chunk - first_chunk;
+ }
+
+ g_assert_cmpint (stream->n_samples, ==, num_chunks);
+
+ GST_DEBUG_OBJECT (qtdemux, "Have %u samples and %u sample times",
+ stream->n_samples, stream->n_sample_times);
+
+ /* We don't have a fixed sample size anymore */
+ stream->sample_size = 0;
+
+ /* Free old data for the atoms */
+ g_free ((gpointer) stream->stsz.data);
+ stream->stsz.data = NULL;
+ g_free ((gpointer) stream->stsc.data);
+ stream->stsc.data = NULL;
+ g_free ((gpointer) stream->stts.data);
+ stream->stts.data = NULL;
+
+ /* Store new data and replace byte readers */
+ stream->stsz.size = gst_byte_writer_get_size (&stsz);
+ stream->stsz.data = gst_byte_writer_reset_and_get_data (&stsz);
+ gst_byte_reader_init (&stream->stsz, stream->stsz.data, stream->stsz.size);
+ stream->stts.size = gst_byte_writer_get_size (&stts);
+ stream->stts.data = gst_byte_writer_reset_and_get_data (&stts);
+ gst_byte_reader_init (&stream->stts, stream->stts.data, stream->stts.size);
+ stream->stsc.size = gst_byte_writer_get_size (&stsc);
+ stream->stsc.data = gst_byte_writer_reset_and_get_data (&stsc);
+ gst_byte_reader_init (&stream->stsc, stream->stsc.data, stream->stsc.size);
+}
+
+/* initialise bytereaders for stbl sub-atoms */
+static gboolean
+qtdemux_stbl_init (GstQTDemux * qtdemux, QtDemuxStream * stream, GNode * stbl)
+{
+ stream->stbl_index = -1; /* no samples have yet been parsed */
+ stream->sample_index = -1;
+
+ /* time-to-sample atom */
+ if (!qtdemux_tree_get_child_by_type_full (stbl, FOURCC_stts, &stream->stts))
+ goto corrupt_file;
+
+ /* copy atom data into a new buffer for later use */
+ stream->stts.data = g_memdup2 (stream->stts.data, stream->stts.size);
+
+ /* skip version + flags */
+ if (!gst_byte_reader_skip (&stream->stts, 1 + 3) ||
+ !gst_byte_reader_get_uint32_be (&stream->stts, &stream->n_sample_times))
+ goto corrupt_file;
+ GST_LOG_OBJECT (qtdemux, "%u timestamp blocks", stream->n_sample_times);
+
+ /* make sure there's enough data */
+ if (!qt_atom_parser_has_chunks (&stream->stts, stream->n_sample_times, 8)) {
+ stream->n_sample_times = gst_byte_reader_get_remaining (&stream->stts) / 8;
+ GST_LOG_OBJECT (qtdemux, "overriding to %u timestamp blocks",
+ stream->n_sample_times);
+ if (!stream->n_sample_times)
+ goto corrupt_file;
+ }
+
+ /* sync sample atom */
+ stream->stps_present = FALSE;
+ if ((stream->stss_present =
+ ! !qtdemux_tree_get_child_by_type_full (stbl, FOURCC_stss,
+ &stream->stss) ? TRUE : FALSE) == TRUE) {
+ /* copy atom data into a new buffer for later use */
+ stream->stss.data = g_memdup2 (stream->stss.data, stream->stss.size);
+
+ /* skip version + flags */
+ if (!gst_byte_reader_skip (&stream->stss, 1 + 3) ||
+ !gst_byte_reader_get_uint32_be (&stream->stss, &stream->n_sample_syncs))
+ goto corrupt_file;
+
+ if (stream->n_sample_syncs) {
+ /* make sure there's enough data */
+ if (!qt_atom_parser_has_chunks (&stream->stss, stream->n_sample_syncs, 4))
+ goto corrupt_file;
+ }
+
+ /* partial sync sample atom */
+ if ((stream->stps_present =
+ ! !qtdemux_tree_get_child_by_type_full (stbl, FOURCC_stps,
+ &stream->stps) ? TRUE : FALSE) == TRUE) {
+ /* copy atom data into a new buffer for later use */
+ stream->stps.data = g_memdup2 (stream->stps.data, stream->stps.size);
+
+ /* skip version + flags */
+ if (!gst_byte_reader_skip (&stream->stps, 1 + 3) ||
+ !gst_byte_reader_get_uint32_be (&stream->stps,
+ &stream->n_sample_partial_syncs))
+ goto corrupt_file;
+
+ /* if there are no entries, the stss table contains the real
+ * sync samples */
+ if (stream->n_sample_partial_syncs) {
+ /* make sure there's enough data */
+ if (!qt_atom_parser_has_chunks (&stream->stps,
+ stream->n_sample_partial_syncs, 4))
+ goto corrupt_file;
+ }
+ }
+ }
+
+ /* sample size */
+ if (!qtdemux_tree_get_child_by_type_full (stbl, FOURCC_stsz, &stream->stsz))
+ goto no_samples;
+
+ /* copy atom data into a new buffer for later use */
+ stream->stsz.data = g_memdup2 (stream->stsz.data, stream->stsz.size);
+
+ /* skip version + flags */
+ if (!gst_byte_reader_skip (&stream->stsz, 1 + 3) ||
+ !gst_byte_reader_get_uint32_be (&stream->stsz, &stream->sample_size))
+ goto corrupt_file;
+
+ if (!gst_byte_reader_get_uint32_be (&stream->stsz, &stream->n_samples))
+ goto corrupt_file;
+
+ if (!stream->n_samples)
+ goto no_samples;
+
+ /* sample-to-chunk atom */
+ if (!qtdemux_tree_get_child_by_type_full (stbl, FOURCC_stsc, &stream->stsc))
+ goto corrupt_file;
+
+ /* copy atom data into a new buffer for later use */
+ stream->stsc.data = g_memdup2 (stream->stsc.data, stream->stsc.size);
+
+ /* skip version + flags */
+ if (!gst_byte_reader_skip (&stream->stsc, 1 + 3) ||
+ !gst_byte_reader_get_uint32_be (&stream->stsc,
+ &stream->n_samples_per_chunk))
+ goto corrupt_file;
+
+ GST_DEBUG_OBJECT (qtdemux, "n_samples_per_chunk %u",
+ stream->n_samples_per_chunk);
+
+ /* make sure there's enough data */
+ if (!qt_atom_parser_has_chunks (&stream->stsc, stream->n_samples_per_chunk,
+ 12))
+ goto corrupt_file;
+
+
+ /* chunk offset */
+ if (qtdemux_tree_get_child_by_type_full (stbl, FOURCC_stco, &stream->stco))
+ stream->co_size = sizeof (guint32);
+ else if (qtdemux_tree_get_child_by_type_full (stbl, FOURCC_co64,
+ &stream->stco))
+ stream->co_size = sizeof (guint64);
+ else
+ goto corrupt_file;
+
+ /* copy atom data into a new buffer for later use */
+ stream->stco.data = g_memdup2 (stream->stco.data, stream->stco.size);
+
+ /* skip version + flags */
+ if (!gst_byte_reader_skip (&stream->stco, 1 + 3))
+ goto corrupt_file;
+
+ /* chunks_are_samples == TRUE means treat chunks as samples */
+ stream->chunks_are_samples = stream->sample_size
+ && !CUR_STREAM (stream)->sampled;
+ if (stream->chunks_are_samples) {
+ /* treat chunks as samples */
+ if (!gst_byte_reader_get_uint32_be (&stream->stco, &stream->n_samples))
+ goto corrupt_file;
+ } else {
+ /* skip number of entries */
+ if (!gst_byte_reader_skip (&stream->stco, 4))
+ goto corrupt_file;
+
+ /* make sure there are enough data in the stsz atom */
+ if (!stream->sample_size) {
+ /* different sizes for each sample */
+ if (!qt_atom_parser_has_chunks (&stream->stsz, stream->n_samples, 4))
+ goto corrupt_file;
+ }
+ }
+
+ /* composition time-to-sample */
+ if ((stream->ctts_present =
+ ! !qtdemux_tree_get_child_by_type_full (stbl, FOURCC_ctts,
+ &stream->ctts) ? TRUE : FALSE) == TRUE) {
+ GstByteReader cslg = GST_BYTE_READER_INIT (NULL, 0);
+
+ /* copy atom data into a new buffer for later use */
+ stream->ctts.data = g_memdup2 (stream->ctts.data, stream->ctts.size);
+
+ /* skip version + flags */
+ if (!gst_byte_reader_skip (&stream->ctts, 1 + 3)
+ || !gst_byte_reader_get_uint32_be (&stream->ctts,
+ &stream->n_composition_times))
+ goto corrupt_file;
+
+ /* make sure there's enough data */
+ if (!qt_atom_parser_has_chunks (&stream->ctts, stream->n_composition_times,
+ 4 + 4))
+ goto corrupt_file;
+
+ /* This is optional, if missing we iterate the ctts */
+ if (qtdemux_tree_get_child_by_type_full (stbl, FOURCC_cslg, &cslg)) {
+ if (!gst_byte_reader_skip (&cslg, 1 + 3)
+ || !gst_byte_reader_get_uint32_be (&cslg, &stream->cslg_shift)) {
+ g_free ((gpointer) cslg.data);
+ goto corrupt_file;
+ }
+ } else {
+ gint32 cslg_least = 0;
+ guint num_entries, pos;
+ gint i;
+
+ pos = gst_byte_reader_get_pos (&stream->ctts);
+ num_entries = stream->n_composition_times;
+
+ stream->cslg_shift = 0;
+
+ for (i = 0; i < num_entries; i++) {
+ gint32 offset;
+
+ gst_byte_reader_skip_unchecked (&stream->ctts, 4);
+ offset = gst_byte_reader_get_int32_be_unchecked (&stream->ctts);
+ /* HACK: if sample_offset is larger than 2 * duration, ignore the box.
+ * slightly inaccurate PTS could be more usable than corrupted one */
+ if (G_UNLIKELY ((ABS (offset) / 2) > stream->duration)) {
+ GST_WARNING_OBJECT (qtdemux,
+ "Ignore corrupted ctts, sample_offset %" G_GINT32_FORMAT
+ " larger than duration %" G_GUINT64_FORMAT,
+ offset, stream->duration);
+
+ stream->cslg_shift = 0;
+ stream->ctts_present = FALSE;
+ goto done;
+ }
+
+ if (offset < cslg_least)
+ cslg_least = offset;
+ }
+
+ if (cslg_least < 0)
+ stream->cslg_shift = ABS (cslg_least);
+ else
+ stream->cslg_shift = 0;
+
+ /* reset the reader so we can generate sample table */
+ gst_byte_reader_set_pos (&stream->ctts, pos);
+ }
+ } else {
+ /* Ensure the cslg_shift value is consistent so we can use it
+ * unconditionally to produce TS and Segment */
+ stream->cslg_shift = 0;
+ }
+
+ /* For raw audio streams especially we might want to merge the samples
+ * to not output one audio sample per buffer. We're doing this here
+ * before allocating the sample tables so that from this point onwards
+ * the number of container samples are static */
+ if (stream->min_buffer_size > 0) {
+ qtdemux_merge_sample_table (qtdemux, stream);
+ }
+
+done:
+ GST_DEBUG_OBJECT (qtdemux, "allocating n_samples %u * %u (%.2f MB)",
+ stream->n_samples, (guint) sizeof (QtDemuxSample),
+ stream->n_samples * sizeof (QtDemuxSample) / (1024.0 * 1024.0));
+
+ if (stream->n_samples >=
+ QTDEMUX_MAX_SAMPLE_INDEX_SIZE / sizeof (QtDemuxSample)) {
+ GST_WARNING_OBJECT (qtdemux, "not allocating index of %d samples, would "
+ "be larger than %uMB (broken file?)", stream->n_samples,
+ QTDEMUX_MAX_SAMPLE_INDEX_SIZE >> 20);
+ return FALSE;
+ }
+
+ g_assert (stream->samples == NULL);
+ stream->samples = g_try_new0 (QtDemuxSample, stream->n_samples);
+ if (!stream->samples) {
+ GST_WARNING_OBJECT (qtdemux, "failed to allocate %d samples",
+ stream->n_samples);
+ return FALSE;
+ }
+
+ return TRUE;
+
+corrupt_file:
+ {
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
+ (_("This file is corrupt and cannot be played.")), (NULL));
+ return FALSE;
+ }
+no_samples:
+ {
+ gst_qtdemux_stbl_free (stream);
+ if (!qtdemux->fragmented) {
+ /* not quite good */
+ GST_WARNING_OBJECT (qtdemux, "stream has no samples");
+ return FALSE;
+ } else {
+ /* may pick up samples elsewhere */
+ return TRUE;
+ }
+ }
+}
+
+/* collect samples from the next sample to be parsed up to sample @n for @stream
+ * by reading the info from @stbl
+ *
+ * This code can be executed from both the streaming thread and the seeking
+ * thread so it takes the object lock to protect itself
+ */
+static gboolean
+qtdemux_parse_samples (GstQTDemux * qtdemux, QtDemuxStream * stream, guint32 n)
+{
+ gint i, j, k;
+ QtDemuxSample *samples, *first, *cur, *last;
+ guint32 n_samples_per_chunk;
+ guint32 n_samples;
+
+ GST_LOG_OBJECT (qtdemux, "parsing samples for stream fourcc %"
+ GST_FOURCC_FORMAT ", pad %s",
+ GST_FOURCC_ARGS (CUR_STREAM (stream)->fourcc),
+ stream->pad ? GST_PAD_NAME (stream->pad) : "(NULL)");
+
+ n_samples = stream->n_samples;
+
+ if (n >= n_samples)
+ goto out_of_samples;
+
+ GST_OBJECT_LOCK (qtdemux);
+ if (n <= stream->stbl_index)
+ goto already_parsed;
+
+ GST_DEBUG_OBJECT (qtdemux, "parsing up to sample %u", n);
+
+ if (!stream->stsz.data) {
+ /* so we already parsed and passed all the moov samples;
+ * onto fragmented ones */
+ g_assert (qtdemux->fragmented);
+ goto done;
+ }
+
+ /* pointer to the sample table */
+ samples = stream->samples;
+
+ /* starts from -1, moves to the next sample index to parse */
+ stream->stbl_index++;
+
+ /* keep track of the first and last sample to fill */
+ first = &samples[stream->stbl_index];
+ last = &samples[n];
+
+ if (!stream->chunks_are_samples) {
+ /* set the sample sizes */
+ if (stream->sample_size == 0) {
+ /* different sizes for each sample */
+ for (cur = first; cur <= last; cur++) {
+ cur->size = gst_byte_reader_get_uint32_be_unchecked (&stream->stsz);
+ GST_LOG_OBJECT (qtdemux, "sample %d has size %u",
+ (guint) (cur - samples), cur->size);
+ }
+ } else {
+ /* samples have the same size */
+ GST_LOG_OBJECT (qtdemux, "all samples have size %u", stream->sample_size);
+ for (cur = first; cur <= last; cur++)
+ cur->size = stream->sample_size;
+ }
+ }
+
+ n_samples_per_chunk = stream->n_samples_per_chunk;
+ cur = first;
+
+ for (i = stream->stsc_index; i < n_samples_per_chunk; i++) {
+ guint32 last_chunk;
+
+ if (stream->stsc_chunk_index >= stream->last_chunk
+ || stream->stsc_chunk_index < stream->first_chunk) {
+ stream->first_chunk =
+ gst_byte_reader_get_uint32_be_unchecked (&stream->stsc);
+ stream->samples_per_chunk =
+ gst_byte_reader_get_uint32_be_unchecked (&stream->stsc);
+ /* starts from 1 */
+ stream->stsd_sample_description_id =
+ gst_byte_reader_get_uint32_be_unchecked (&stream->stsc) - 1;
+
+ /* chunk numbers are counted from 1 it seems */
+ if (G_UNLIKELY (stream->first_chunk == 0))
+ goto corrupt_file;
+
+ --stream->first_chunk;
+
+ /* the last chunk of each entry is calculated by taking the first chunk
+ * of the next entry; except if there is no next, where we fake it with
+ * INT_MAX */
+ if (G_UNLIKELY (i == (stream->n_samples_per_chunk - 1))) {
+ stream->last_chunk = G_MAXUINT32;
+ } else {
+ stream->last_chunk =
+ gst_byte_reader_peek_uint32_be_unchecked (&stream->stsc);
+ if (G_UNLIKELY (stream->last_chunk == 0))
+ goto corrupt_file;
+
+ --stream->last_chunk;
+ }
+
+ GST_LOG_OBJECT (qtdemux,
+ "entry %d has first_chunk %d, last_chunk %d, samples_per_chunk %d"
+ "sample desc ID: %d", i, stream->first_chunk, stream->last_chunk,
+ stream->samples_per_chunk, stream->stsd_sample_description_id);
+
+ if (G_UNLIKELY (stream->last_chunk < stream->first_chunk))
+ goto corrupt_file;
+
+ if (stream->last_chunk != G_MAXUINT32) {
+ if (!qt_atom_parser_peek_sub (&stream->stco,
+ stream->first_chunk * stream->co_size,
+ (stream->last_chunk - stream->first_chunk) * stream->co_size,
+ &stream->co_chunk))
+ goto corrupt_file;
+
+ } else {
+ stream->co_chunk = stream->stco;
+ if (!gst_byte_reader_skip (&stream->co_chunk,
+ stream->first_chunk * stream->co_size))
+ goto corrupt_file;
+ }
+
+ stream->stsc_chunk_index = stream->first_chunk;
+ }
+
+ last_chunk = stream->last_chunk;
+
+ if (stream->chunks_are_samples) {
+ cur = &samples[stream->stsc_chunk_index];
+
+ for (j = stream->stsc_chunk_index; j < last_chunk; j++) {
+ if (j > n) {
+ /* save state */
+ stream->stsc_chunk_index = j;
+ goto done;
+ }
+
+ cur->offset =
+ qt_atom_parser_get_offset_unchecked (&stream->co_chunk,
+ stream->co_size);
+
+ GST_LOG_OBJECT (qtdemux, "Created entry %d with offset "
+ "%" G_GUINT64_FORMAT, j, cur->offset);
+
+ if (CUR_STREAM (stream)->samples_per_frame > 0 &&
+ CUR_STREAM (stream)->bytes_per_frame > 0) {
+ cur->size =
+ (stream->samples_per_chunk * CUR_STREAM (stream)->n_channels) /
+ CUR_STREAM (stream)->samples_per_frame *
+ CUR_STREAM (stream)->bytes_per_frame;
+ } else {
+ cur->size = stream->samples_per_chunk;
+ }
+
+ GST_DEBUG_OBJECT (qtdemux,
+ "keyframe sample %d: timestamp %" GST_TIME_FORMAT ", size %u",
+ j, GST_TIME_ARGS (QTSTREAMTIME_TO_GSTTIME (stream,
+ stream->stco_sample_index)), cur->size);
+
+ cur->timestamp = stream->stco_sample_index;
+ cur->duration = stream->samples_per_chunk;
+ cur->keyframe = TRUE;
+ cur++;
+
+ stream->stco_sample_index += stream->samples_per_chunk;
+ }
+ stream->stsc_chunk_index = j;
+ } else {
+ for (j = stream->stsc_chunk_index; j < last_chunk; j++) {
+ guint32 samples_per_chunk;
+ guint64 chunk_offset;
+
+ if (!stream->stsc_sample_index
+ && !qt_atom_parser_get_offset (&stream->co_chunk, stream->co_size,
+ &stream->chunk_offset))
+ goto corrupt_file;
+
+ samples_per_chunk = stream->samples_per_chunk;
+ chunk_offset = stream->chunk_offset;
+
+ for (k = stream->stsc_sample_index; k < samples_per_chunk; k++) {
+ GST_LOG_OBJECT (qtdemux, "creating entry %d with offset %"
+ G_GUINT64_FORMAT " and size %d",
+ (guint) (cur - samples), chunk_offset, cur->size);
+
+ cur->offset = chunk_offset;
+ chunk_offset += cur->size;
+ cur++;
+
+ if (G_UNLIKELY (cur > last)) {
+ /* save state */
+ stream->stsc_sample_index = k + 1;
+ stream->chunk_offset = chunk_offset;
+ stream->stsc_chunk_index = j;
+ goto done2;
+ }
+ }
+ stream->stsc_sample_index = 0;
+ }
+ stream->stsc_chunk_index = j;
+ }
+ stream->stsc_index++;
+ }
+
+ if (stream->chunks_are_samples)
+ goto ctts;
+done2:
+ {
+ guint32 n_sample_times;
+
+ n_sample_times = stream->n_sample_times;
+ cur = first;
+
+ for (i = stream->stts_index; i < n_sample_times; i++) {
+ guint32 stts_samples;
+ gint32 stts_duration;
+ gint64 stts_time;
+
+ if (stream->stts_sample_index >= stream->stts_samples
+ || !stream->stts_sample_index) {
+
+ stream->stts_samples =
+ gst_byte_reader_get_uint32_be_unchecked (&stream->stts);
+ stream->stts_duration =
+ gst_byte_reader_get_uint32_be_unchecked (&stream->stts);
+
+ GST_LOG_OBJECT (qtdemux, "block %d, %u timestamps, duration %u",
+ i, stream->stts_samples, stream->stts_duration);
+
+ stream->stts_sample_index = 0;
+ }
+
+ stts_samples = stream->stts_samples;
+ stts_duration = stream->stts_duration;
+ stts_time = stream->stts_time;
+
+ for (j = stream->stts_sample_index; j < stts_samples; j++) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "sample %d: index %d, timestamp %" GST_TIME_FORMAT,
+ (guint) (cur - samples), j,
+ GST_TIME_ARGS (QTSTREAMTIME_TO_GSTTIME (stream, stts_time)));
+
+ cur->timestamp = stts_time;
+ cur->duration = stts_duration;
+
+ /* avoid 32-bit wrap-around,
+ * but still mind possible 'negative' duration */
+ stts_time += (gint64) stts_duration;
+ cur++;
+
+ if (G_UNLIKELY (cur > last)) {
+ /* save values */
+ stream->stts_time = stts_time;
+ stream->stts_sample_index = j + 1;
+ if (stream->stts_sample_index >= stream->stts_samples)
+ stream->stts_index++;
+ goto done3;
+ }
+ }
+ stream->stts_sample_index = 0;
+ stream->stts_time = stts_time;
+ stream->stts_index++;
+ }
+ /* fill up empty timestamps with the last timestamp, this can happen when
+ * the last samples do not decode and so we don't have timestamps for them.
+ * We however look at the last timestamp to estimate the track length so we
+ * need something in here. */
+ for (; cur < last; cur++) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "fill sample %d: timestamp %" GST_TIME_FORMAT,
+ (guint) (cur - samples),
+ GST_TIME_ARGS (QTSTREAMTIME_TO_GSTTIME (stream, stream->stts_time)));
+ cur->timestamp = stream->stts_time;
+ cur->duration = -1;
+ }
+ }
+done3:
+ {
+ /* sample sync, can be NULL */
+ if (stream->stss_present == TRUE) {
+ guint32 n_sample_syncs;
+
+ n_sample_syncs = stream->n_sample_syncs;
+
+ if (!n_sample_syncs) {
+ GST_DEBUG_OBJECT (qtdemux, "all samples are keyframes");
+ stream->all_keyframe = TRUE;
+ } else {
+ for (i = stream->stss_index; i < n_sample_syncs; i++) {
+ /* note that the first sample is index 1, not 0 */
+ guint32 index;
+
+ index = gst_byte_reader_get_uint32_be_unchecked (&stream->stss);
+
+ if (G_LIKELY (index > 0 && index <= n_samples)) {
+ index -= 1;
+ samples[index].keyframe = TRUE;
+ GST_DEBUG_OBJECT (qtdemux, "samples at %u is keyframe", index);
+ /* and exit if we have enough samples */
+ if (G_UNLIKELY (index >= n)) {
+ i++;
+ break;
+ }
+ }
+ }
+ /* save state */
+ stream->stss_index = i;
+ }
+
+ /* stps marks partial sync frames like open GOP I-Frames */
+ if (stream->stps_present == TRUE) {
+ guint32 n_sample_partial_syncs;
+
+ n_sample_partial_syncs = stream->n_sample_partial_syncs;
+
+ /* if there are no entries, the stss table contains the real
+ * sync samples */
+ if (n_sample_partial_syncs) {
+ for (i = stream->stps_index; i < n_sample_partial_syncs; i++) {
+ /* note that the first sample is index 1, not 0 */
+ guint32 index;
+
+ index = gst_byte_reader_get_uint32_be_unchecked (&stream->stps);
+
+ if (G_LIKELY (index > 0 && index <= n_samples)) {
+ index -= 1;
+ samples[index].keyframe = TRUE;
+ GST_DEBUG_OBJECT (qtdemux, "samples at %u is keyframe", index);
+ /* and exit if we have enough samples */
+ if (G_UNLIKELY (index >= n)) {
+ i++;
+ break;
+ }
+ }
+ }
+ /* save state */
+ stream->stps_index = i;
+ }
+ }
+ } else {
+ /* no stss, all samples are keyframes */
+ stream->all_keyframe = TRUE;
+ GST_DEBUG_OBJECT (qtdemux, "setting all keyframes");
+ }
+ }
+
+ctts:
+ /* composition time to sample */
+ if (stream->ctts_present == TRUE) {
+ guint32 n_composition_times;
+ guint32 ctts_count;
+ gint32 ctts_soffset;
+
+ /* Fill in the pts_offsets */
+ cur = first;
+ n_composition_times = stream->n_composition_times;
+
+ for (i = stream->ctts_index; i < n_composition_times; i++) {
+ if (stream->ctts_sample_index >= stream->ctts_count
+ || !stream->ctts_sample_index) {
+ stream->ctts_count =
+ gst_byte_reader_get_uint32_be_unchecked (&stream->ctts);
+ stream->ctts_soffset =
+ gst_byte_reader_get_int32_be_unchecked (&stream->ctts);
+ stream->ctts_sample_index = 0;
+ }
+
+ ctts_count = stream->ctts_count;
+ ctts_soffset = stream->ctts_soffset;
+
+ for (j = stream->ctts_sample_index; j < ctts_count; j++) {
+ cur->pts_offset = ctts_soffset;
+ cur++;
+
+ if (G_UNLIKELY (cur > last)) {
+ /* save state */
+ stream->ctts_sample_index = j + 1;
+ goto done;
+ }
+ }
+ stream->ctts_sample_index = 0;
+ stream->ctts_index++;
+ }
+ }
+done:
+ stream->stbl_index = n;
+ /* if index has been completely parsed, free data that is no-longer needed */
+ if (n + 1 == stream->n_samples) {
+ gst_qtdemux_stbl_free (stream);
+ GST_DEBUG_OBJECT (qtdemux, "parsed all available samples;");
+ if (qtdemux->pullbased) {
+ GST_DEBUG_OBJECT (qtdemux, "checking for more samples");
+ while (n + 1 == stream->n_samples)
+ if (qtdemux_add_fragmented_samples (qtdemux) != GST_FLOW_OK)
+ break;
+ }
+ }
+ GST_OBJECT_UNLOCK (qtdemux);
+
+ return TRUE;
+
+ /* SUCCESS */
+already_parsed:
+ {
+ GST_LOG_OBJECT (qtdemux,
+ "Tried to parse up to sample %u but this sample has already been parsed",
+ n);
+ /* if fragmented, there may be more */
+ if (qtdemux->fragmented && n == stream->stbl_index)
+ goto done;
+ GST_OBJECT_UNLOCK (qtdemux);
+ return TRUE;
+ }
+ /* ERRORS */
+out_of_samples:
+ {
+ GST_LOG_OBJECT (qtdemux,
+ "Tried to parse up to sample %u but there are only %u samples", n + 1,
+ stream->n_samples);
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
+ (_("This file is corrupt and cannot be played.")), (NULL));
+ return FALSE;
+ }
+corrupt_file:
+ {
+ GST_OBJECT_UNLOCK (qtdemux);
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
+ (_("This file is corrupt and cannot be played.")), (NULL));
+ return FALSE;
+ }
+}
+
+/* collect all segment info for @stream.
+ */
+static gboolean
+qtdemux_parse_segments (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GNode * trak)
+{
+ GNode *edts;
+ /* accept edts if they contain gaps at start and there is only
+ * one media segment */
+ gboolean allow_pushbased_edts = TRUE;
+ gint media_segments_count = 0;
+
+ /* parse and prepare segment info from the edit list */
+ GST_DEBUG_OBJECT (qtdemux, "looking for edit list container");
+ stream->n_segments = 0;
+ stream->segments = NULL;
+ if ((edts = qtdemux_tree_get_child_by_type (trak, FOURCC_edts))) {
+ GNode *elst;
+ gint n_segments;
+ gint segment_number, entry_size;
+ guint64 time;
+ GstClockTime stime;
+ const guint8 *buffer;
+ guint8 version;
+ guint32 size;
+
+ GST_DEBUG_OBJECT (qtdemux, "looking for edit list");
+ if (!(elst = qtdemux_tree_get_child_by_type (edts, FOURCC_elst)))
+ goto done;
+
+ buffer = elst->data;
+
+ size = QT_UINT32 (buffer);
+ /* version, flags, n_segments */
+ if (size < 16) {
+ GST_WARNING_OBJECT (qtdemux, "Invalid edit list");
+ goto done;
+ }
+ version = QT_UINT8 (buffer + 8);
+ entry_size = (version == 1) ? 20 : 12;
+
+ n_segments = QT_UINT32 (buffer + 12);
+
+ if (n_segments > 100000 || size < 16 + n_segments * entry_size) {
+ GST_WARNING_OBJECT (qtdemux, "Invalid edit list");
+ goto done;
+ }
+
+ /* we might allocate a bit too much, at least allocate 1 segment */
+ stream->segments = g_new (QtDemuxSegment, MAX (n_segments, 1));
+
+ /* segments always start from 0 */
+ time = 0;
+ stime = 0;
+ buffer += 16;
+ for (segment_number = 0; segment_number < n_segments; segment_number++) {
+ guint64 duration;
+ guint64 media_time;
+ gboolean empty_edit = FALSE;
+ QtDemuxSegment *segment;
+ guint32 rate_int;
+ GstClockTime media_start = GST_CLOCK_TIME_NONE;
+
+ if (version == 1) {
+ media_time = QT_UINT64 (buffer + 8);
+ duration = QT_UINT64 (buffer);
+ if (media_time == G_MAXUINT64)
+ empty_edit = TRUE;
+ } else {
+ media_time = QT_UINT32 (buffer + 4);
+ duration = QT_UINT32 (buffer);
+ if (media_time == G_MAXUINT32)
+ empty_edit = TRUE;
+ }
+
+ if (!empty_edit)
+ media_start = QTSTREAMTIME_TO_GSTTIME (stream, media_time);
+
+ segment = &stream->segments[segment_number];
+
+ /* time and duration expressed in global timescale */
+ segment->time = stime;
+ if (duration != 0 || empty_edit) {
+ /* edge case: empty edits with duration=zero are treated here.
+ * (files should not have these anyway). */
+
+ /* add non scaled values so we don't cause roundoff errors */
+ time += duration;
+ stime = QTTIME_TO_GSTTIME (qtdemux, time);
+ segment->duration = stime - segment->time;
+ } else {
+ /* zero duration does not imply media_start == media_stop
+ * but, only specify media_start. The edit ends with the track. */
+ stime = segment->duration = GST_CLOCK_TIME_NONE;
+ /* Don't allow more edits after this one. */
+ n_segments = segment_number + 1;
+ }
+ segment->stop_time = stime;
+
+ segment->trak_media_start = media_time;
+ /* media_time expressed in stream timescale */
+ if (!empty_edit) {
+ segment->media_start = media_start;
+ segment->media_stop = GST_CLOCK_TIME_IS_VALID (segment->duration)
+ ? segment->media_start + segment->duration : GST_CLOCK_TIME_NONE;
+ media_segments_count++;
+ } else {
+ segment->media_start = GST_CLOCK_TIME_NONE;
+ segment->media_stop = GST_CLOCK_TIME_NONE;
+ }
+ rate_int = QT_UINT32 (buffer + ((version == 1) ? 16 : 8));
+
+ if (rate_int <= 1) {
+ /* 0 is not allowed, some programs write 1 instead of the floating point
+ * value */
+ GST_WARNING_OBJECT (qtdemux, "found suspicious rate %" G_GUINT32_FORMAT,
+ rate_int);
+ segment->rate = 1;
+ } else {
+ segment->rate = rate_int / 65536.0;
+ }
+
+ GST_DEBUG_OBJECT (qtdemux, "created segment %d time %" GST_TIME_FORMAT
+ ", duration %" GST_TIME_FORMAT ", media_start %" GST_TIME_FORMAT
+ " (%" G_GUINT64_FORMAT ") , media_stop %" GST_TIME_FORMAT
+ " stop_time %" GST_TIME_FORMAT " rate %g, (%d) timescale %u",
+ segment_number, GST_TIME_ARGS (segment->time),
+ GST_TIME_ARGS (segment->duration),
+ GST_TIME_ARGS (segment->media_start), media_time,
+ GST_TIME_ARGS (segment->media_stop),
+ GST_TIME_ARGS (segment->stop_time), segment->rate, rate_int,
+ stream->timescale);
+ if (segment->stop_time > qtdemux->segment.stop &&
+ !qtdemux->upstream_format_is_time) {
+ GST_WARNING_OBJECT (qtdemux, "Segment %d "
+ " extends to %" GST_TIME_FORMAT
+ " past the end of the declared movie duration %" GST_TIME_FORMAT
+ " movie segment will be extended", segment_number,
+ GST_TIME_ARGS (segment->stop_time),
+ GST_TIME_ARGS (qtdemux->segment.stop));
+ qtdemux->segment.stop = qtdemux->segment.duration = segment->stop_time;
+ }
+
+ buffer += entry_size;
+ }
+ GST_DEBUG_OBJECT (qtdemux, "found %d segments", n_segments);
+ stream->n_segments = n_segments;
+ if (media_segments_count != 1)
+ allow_pushbased_edts = FALSE;
+ }
+done:
+
+ /* push based does not handle segments, so act accordingly here,
+ * and warn if applicable */
+ if (!qtdemux->pullbased && !allow_pushbased_edts) {
+ GST_WARNING_OBJECT (qtdemux, "streaming; discarding edit list segments");
+ /* remove and use default one below, we stream like it anyway */
+ g_free (stream->segments);
+ stream->segments = NULL;
+ stream->n_segments = 0;
+ }
+
+ /* no segments, create one to play the complete trak */
+ if (stream->n_segments == 0) {
+ GstClockTime stream_duration =
+ QTSTREAMTIME_TO_GSTTIME (stream, stream->duration);
+
+ if (stream->segments == NULL)
+ stream->segments = g_new (QtDemuxSegment, 1);
+
+ /* represent unknown our way */
+ if (stream_duration == 0)
+ stream_duration = GST_CLOCK_TIME_NONE;
+
+ stream->segments[0].time = 0;
+ stream->segments[0].stop_time = stream_duration;
+ stream->segments[0].duration = stream_duration;
+ stream->segments[0].media_start = 0;
+ stream->segments[0].media_stop = stream_duration;
+ stream->segments[0].rate = 1.0;
+ stream->segments[0].trak_media_start = 0;
+
+ GST_DEBUG_OBJECT (qtdemux, "created dummy segment %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (stream_duration));
+ stream->n_segments = 1;
+ stream->dummy_segment = TRUE;
+ }
+ GST_DEBUG_OBJECT (qtdemux, "using %d segments", stream->n_segments);
+
+ return TRUE;
+}
+
+/*
+ * Parses the stsd atom of a svq3 trak looking for
+ * the SMI and gama atoms.
+ */
+static void
+qtdemux_parse_svq3_stsd_data (GstQTDemux * qtdemux,
+ const guint8 * stsd_entry_data, const guint8 ** gamma, GstBuffer ** seqh)
+{
+ const guint8 *_gamma = NULL;
+ GstBuffer *_seqh = NULL;
+ const guint8 *stsd_data = stsd_entry_data;
+ guint32 length = QT_UINT32 (stsd_data);
+ guint16 version;
+
+ if (length < 32) {
+ GST_WARNING_OBJECT (qtdemux, "stsd too short");
+ goto end;
+ }
+
+ stsd_data += 16;
+ length -= 16;
+ version = QT_UINT16 (stsd_data);
+ if (version == 3) {
+ if (length >= 70) {
+ length -= 70;
+ stsd_data += 70;
+ while (length > 8) {
+ guint32 fourcc, size;
+ const guint8 *data;
+ size = QT_UINT32 (stsd_data);
+ fourcc = QT_FOURCC (stsd_data + 4);
+ data = stsd_data + 8;
+
+ if (size == 0) {
+ GST_WARNING_OBJECT (qtdemux, "Atom of size 0 found, aborting "
+ "svq3 atom parsing");
+ goto end;
+ }
+
+ switch (fourcc) {
+ case FOURCC_gama:{
+ if (size == 12) {
+ _gamma = data;
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "Unexpected size %" G_GUINT32_FORMAT
+ " for gama atom, expected 12", size);
+ }
+ break;
+ }
+ case FOURCC_SMI_:{
+ if (size > 16 && QT_FOURCC (data) == FOURCC_SEQH) {
+ guint32 seqh_size;
+ if (_seqh != NULL) {
+ GST_WARNING_OBJECT (qtdemux, "Unexpected second SEQH SMI atom "
+ " found, ignoring");
+ } else {
+ seqh_size = QT_UINT32 (data + 4);
+ if (seqh_size > 0) {
+ _seqh = gst_buffer_new_and_alloc (seqh_size);
+ gst_buffer_fill (_seqh, 0, data + 8, seqh_size);
+ }
+ }
+ }
+ break;
+ }
+ default:{
+ GST_WARNING_OBJECT (qtdemux, "Unhandled atom %" GST_FOURCC_FORMAT
+ " in SVQ3 entry in stsd atom", GST_FOURCC_ARGS (fourcc));
+ }
+ }
+
+ if (size <= length) {
+ length -= size;
+ stsd_data += size;
+ }
+ }
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "SVQ3 entry too short in stsd atom");
+ }
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "Unexpected version for SVQ3 entry %"
+ G_GUINT16_FORMAT, version);
+ goto end;
+ }
+
+end:
+ if (gamma) {
+ *gamma = _gamma;
+ }
+ if (seqh) {
+ *seqh = _seqh;
+ } else if (_seqh) {
+ gst_buffer_unref (_seqh);
+ }
+}
+
+static gchar *
+qtdemux_get_rtsp_uri_from_hndl (GstQTDemux * qtdemux, GNode * minf)
+{
+ GNode *dinf;
+ GstByteReader dref;
+ gchar *uri = NULL;
+
+ /*
+ * Get 'dinf', to get its child 'dref', that might contain a 'hndl'
+ * atom that might contain a 'data' atom with the rtsp uri.
+ * This case was reported in bug #597497, some info about
+ * the hndl atom can be found in TN1195
+ */
+ dinf = qtdemux_tree_get_child_by_type (minf, FOURCC_dinf);
+ GST_DEBUG_OBJECT (qtdemux, "Trying to obtain rtsp URI for stream trak");
+
+ if (dinf) {
+ guint32 dref_num_entries = 0;
+ if (qtdemux_tree_get_child_by_type_full (dinf, FOURCC_dref, &dref) &&
+ gst_byte_reader_skip (&dref, 4) &&
+ gst_byte_reader_get_uint32_be (&dref, &dref_num_entries)) {
+ gint i;
+
+ /* search dref entries for hndl atom */
+ for (i = 0; i < dref_num_entries; i++) {
+ guint32 size = 0, type;
+ guint8 string_len = 0;
+ if (gst_byte_reader_get_uint32_be (&dref, &size) &&
+ qt_atom_parser_get_fourcc (&dref, &type)) {
+ if (type == FOURCC_hndl) {
+ GST_DEBUG_OBJECT (qtdemux, "Found hndl atom");
+
+ /* skip data reference handle bytes and the
+ * following pascal string and some extra 4
+ * bytes I have no idea what are */
+ if (!gst_byte_reader_skip (&dref, 4) ||
+ !gst_byte_reader_get_uint8 (&dref, &string_len) ||
+ !gst_byte_reader_skip (&dref, string_len + 4)) {
+ GST_WARNING_OBJECT (qtdemux, "Failed to parse hndl atom");
+ break;
+ }
+
+ /* iterate over the atoms to find the data atom */
+ while (gst_byte_reader_get_remaining (&dref) >= 8) {
+ guint32 atom_size;
+ guint32 atom_type;
+
+ if (gst_byte_reader_get_uint32_be (&dref, &atom_size) &&
+ qt_atom_parser_get_fourcc (&dref, &atom_type)) {
+ if (atom_type == FOURCC_data) {
+ const guint8 *uri_aux = NULL;
+
+ /* found the data atom that might contain the rtsp uri */
+ GST_DEBUG_OBJECT (qtdemux, "Found data atom inside "
+ "hndl atom, interpreting it as an URI");
+ if (gst_byte_reader_peek_data (&dref, atom_size - 8,
+ &uri_aux)) {
+ if (g_strstr_len ((gchar *) uri_aux, 7, "rtsp://") != NULL)
+ uri = g_strndup ((gchar *) uri_aux, atom_size - 8);
+ else
+ GST_WARNING_OBJECT (qtdemux, "Data atom in hndl atom "
+ "didn't contain a rtsp address");
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "Failed to get the data "
+ "atom contents");
+ }
+ break;
+ }
+ /* skipping to the next entry */
+ if (!gst_byte_reader_skip (&dref, atom_size - 8))
+ break;
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "Failed to parse hndl child "
+ "atom header");
+ break;
+ }
+ }
+ break;
+ }
+ /* skip to the next entry */
+ if (!gst_byte_reader_skip (&dref, size - 8))
+ break;
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "Error parsing dref atom");
+ }
+ }
+ GST_DEBUG_OBJECT (qtdemux, "Finished parsing dref atom");
+ }
+ }
+ return uri;
+}
+
+#define AMR_NB_ALL_MODES 0x81ff
+#define AMR_WB_ALL_MODES 0x83ff
+static guint
+qtdemux_parse_amr_bitrate (GstBuffer * buf, gboolean wb)
+{
+ /* The 'damr' atom is of the form:
+ *
+ * | vendor | decoder_ver | mode_set | mode_change_period | frames/sample |
+ * 32 b 8 b 16 b 8 b 8 b
+ *
+ * The highest set bit of the first 7 (AMR-NB) or 8 (AMR-WB) bits of mode_set
+ * represents the highest mode used in the stream (and thus the maximum
+ * bitrate), with a couple of special cases as seen below.
+ */
+
+ /* Map of frame type ID -> bitrate */
+ static const guint nb_bitrates[] = {
+ 4750, 5150, 5900, 6700, 7400, 7950, 10200, 12200
+ };
+ static const guint wb_bitrates[] = {
+ 6600, 8850, 12650, 14250, 15850, 18250, 19850, 23050, 23850
+ };
+ GstMapInfo map;
+ gsize max_mode;
+ guint16 mode_set;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ if (map.size != 0x11) {
+ GST_DEBUG ("Atom should have size 0x11, not %" G_GSIZE_FORMAT, map.size);
+ goto bad_data;
+ }
+
+ if (QT_FOURCC (map.data + 4) != FOURCC_damr) {
+ GST_DEBUG ("Unknown atom in %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (QT_UINT32 (map.data + 4)));
+ goto bad_data;
+ }
+
+ mode_set = QT_UINT16 (map.data + 13);
+
+ if (mode_set == (wb ? AMR_WB_ALL_MODES : AMR_NB_ALL_MODES))
+ max_mode = 7 + (wb ? 1 : 0);
+ else
+ /* AMR-NB modes fo from 0-7, and AMR-WB modes go from 0-8 */
+ max_mode = g_bit_nth_msf ((gulong) mode_set & (wb ? 0x1ff : 0xff), -1);
+
+ if (max_mode == -1) {
+ GST_DEBUG ("No mode indication was found (mode set) = %x",
+ (guint) mode_set);
+ goto bad_data;
+ }
+
+ gst_buffer_unmap (buf, &map);
+ return wb ? wb_bitrates[max_mode] : nb_bitrates[max_mode];
+
+bad_data:
+ gst_buffer_unmap (buf, &map);
+ return 0;
+}
+
+static gboolean
+qtdemux_parse_transformation_matrix (GstQTDemux * qtdemux,
+ GstByteReader * reader, guint32 * matrix, const gchar * atom)
+{
+ /*
+ * 9 values of 32 bits (fixed point 16.16, except 2 5 and 8 that are 2.30)
+ * [0 1 2]
+ * [3 4 5]
+ * [6 7 8]
+ */
+
+ if (gst_byte_reader_get_remaining (reader) < 36)
+ return FALSE;
+
+ matrix[0] = gst_byte_reader_get_uint32_be_unchecked (reader);
+ matrix[1] = gst_byte_reader_get_uint32_be_unchecked (reader);
+ matrix[2] = gst_byte_reader_get_uint32_be_unchecked (reader);
+ matrix[3] = gst_byte_reader_get_uint32_be_unchecked (reader);
+ matrix[4] = gst_byte_reader_get_uint32_be_unchecked (reader);
+ matrix[5] = gst_byte_reader_get_uint32_be_unchecked (reader);
+ matrix[6] = gst_byte_reader_get_uint32_be_unchecked (reader);
+ matrix[7] = gst_byte_reader_get_uint32_be_unchecked (reader);
+ matrix[8] = gst_byte_reader_get_uint32_be_unchecked (reader);
+
+ GST_DEBUG_OBJECT (qtdemux, "Transformation matrix from atom %s", atom);
+ GST_DEBUG_OBJECT (qtdemux, "%u.%u %u.%u %u.%u", matrix[0] >> 16,
+ matrix[0] & 0xFFFF, matrix[1] >> 16, matrix[1] & 0xFF, matrix[2] >> 16,
+ matrix[2] & 0xFF);
+ GST_DEBUG_OBJECT (qtdemux, "%u.%u %u.%u %u.%u", matrix[3] >> 16,
+ matrix[3] & 0xFFFF, matrix[4] >> 16, matrix[4] & 0xFF, matrix[5] >> 16,
+ matrix[5] & 0xFF);
+ GST_DEBUG_OBJECT (qtdemux, "%u.%u %u.%u %u.%u", matrix[6] >> 16,
+ matrix[6] & 0xFFFF, matrix[7] >> 16, matrix[7] & 0xFF, matrix[8] >> 16,
+ matrix[8] & 0xFF);
+
+ return TRUE;
+}
+
+static void
+qtdemux_inspect_transformation_matrix (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, guint32 * matrix, GstTagList ** taglist)
+{
+
+/* [a b c]
+ * [d e f]
+ * [g h i]
+ *
+ * This macro will only compare value abdegh, it expects cfi to have already
+ * been checked
+ */
+#define QTCHECK_MATRIX(m,a,b,d,e) ((m)[0] == (a << 16) && (m)[1] == (b << 16) && \
+ (m)[3] == (d << 16) && (m)[4] == (e << 16))
+
+ /* only handle the cases where the last column has standard values */
+ if (matrix[2] == 0 && matrix[5] == 0 && matrix[8] == 1 << 30) {
+ const gchar *rotation_tag = NULL;
+
+ /* no rotation needed */
+ if (QTCHECK_MATRIX (matrix, 1, 0, 0, 1)) {
+ /* NOP */
+ } else if (QTCHECK_MATRIX (matrix, 0, 1, G_MAXUINT16, 0)) {
+ rotation_tag = "rotate-90";
+ } else if (QTCHECK_MATRIX (matrix, G_MAXUINT16, 0, 0, G_MAXUINT16)) {
+ rotation_tag = "rotate-180";
+ } else if (QTCHECK_MATRIX (matrix, 0, G_MAXUINT16, 1, 0)) {
+ rotation_tag = "rotate-270";
+ } else {
+ GST_FIXME_OBJECT (qtdemux, "Unhandled transformation matrix values");
+ }
+
+ GST_DEBUG_OBJECT (qtdemux, "Transformation matrix rotation %s",
+ GST_STR_NULL (rotation_tag));
+ if (rotation_tag != NULL) {
+ if (*taglist == NULL)
+ *taglist = gst_tag_list_new_empty ();
+ gst_tag_list_add (*taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_IMAGE_ORIENTATION, rotation_tag, NULL);
+ }
+ } else {
+ GST_FIXME_OBJECT (qtdemux, "Unhandled transformation matrix values");
+ }
+}
+
+static gboolean
+qtdemux_parse_protection_aavd (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, GNode * container, guint32 * original_fmt)
+{
+ GNode *adrm;
+ guint32 adrm_size;
+ GstBuffer *adrm_buf = NULL;
+ QtDemuxAavdEncryptionInfo *info;
+
+ adrm = qtdemux_tree_get_child_by_type (container, FOURCC_adrm);
+ if (G_UNLIKELY (!adrm)) {
+ GST_ERROR_OBJECT (qtdemux, "aavd box does not contain mandatory adrm box");
+ return FALSE;
+ }
+ adrm_size = QT_UINT32 (adrm->data);
+ adrm_buf = gst_buffer_new_memdup (adrm->data, adrm_size);
+
+ stream->protection_scheme_type = FOURCC_aavd;
+
+ if (!stream->protection_scheme_info)
+ stream->protection_scheme_info = g_new0 (QtDemuxAavdEncryptionInfo, 1);
+
+ info = (QtDemuxAavdEncryptionInfo *) stream->protection_scheme_info;
+
+ if (info->default_properties)
+ gst_structure_free (info->default_properties);
+ info->default_properties = gst_structure_new ("application/x-aavd",
+ "encrypted", G_TYPE_BOOLEAN, TRUE,
+ "adrm", GST_TYPE_BUFFER, adrm_buf, NULL);
+ gst_buffer_unref (adrm_buf);
+
+ *original_fmt = FOURCC_mp4a;
+ return TRUE;
+}
+
+/* Parses the boxes defined in ISO/IEC 14496-12 that enable support for
+ * protected streams (sinf, frma, schm and schi); if the protection scheme is
+ * Common Encryption (cenc), the function will also parse the tenc box (defined
+ * in ISO/IEC 23001-7). @container points to the node that contains these boxes
+ * (typically an enc[v|a|t|s] sample entry); the function will set
+ * @original_fmt to the fourcc of the original unencrypted stream format.
+ * Returns TRUE if successful; FALSE otherwise. */
+static gboolean
+qtdemux_parse_protection_scheme_info (GstQTDemux * qtdemux,
+ QtDemuxStream * stream, GNode * container, guint32 * original_fmt)
+{
+ GNode *sinf;
+ GNode *frma;
+ GNode *schm;
+ GNode *schi;
+ QtDemuxCencSampleSetInfo *info;
+ GNode *tenc;
+ const guint8 *tenc_data;
+
+ g_return_val_if_fail (qtdemux != NULL, FALSE);
+ g_return_val_if_fail (stream != NULL, FALSE);
+ g_return_val_if_fail (container != NULL, FALSE);
+ g_return_val_if_fail (original_fmt != NULL, FALSE);
+
+ sinf = qtdemux_tree_get_child_by_type (container, FOURCC_sinf);
+ if (G_UNLIKELY (!sinf)) {
+ if (stream->protection_scheme_type == FOURCC_cenc
+ || stream->protection_scheme_type == FOURCC_cbcs) {
+ GST_ERROR_OBJECT (qtdemux, "sinf box does not contain schi box, which is "
+ "mandatory for Common Encryption");
+ return FALSE;
+ }
+ return TRUE;
+ }
+
+ frma = qtdemux_tree_get_child_by_type (sinf, FOURCC_frma);
+ if (G_UNLIKELY (!frma)) {
+ GST_ERROR_OBJECT (qtdemux, "sinf box does not contain mandatory frma box");
+ return FALSE;
+ }
+
+ *original_fmt = QT_FOURCC ((const guint8 *) frma->data + 8);
+ GST_DEBUG_OBJECT (qtdemux, "original stream format: '%" GST_FOURCC_FORMAT "'",
+ GST_FOURCC_ARGS (*original_fmt));
+
+ schm = qtdemux_tree_get_child_by_type (sinf, FOURCC_schm);
+ if (!schm) {
+ GST_DEBUG_OBJECT (qtdemux, "sinf box does not contain schm box");
+ return FALSE;
+ }
+ stream->protection_scheme_type = QT_FOURCC ((const guint8 *) schm->data + 12);
+ stream->protection_scheme_version =
+ QT_UINT32 ((const guint8 *) schm->data + 16);
+
+ GST_DEBUG_OBJECT (qtdemux,
+ "protection_scheme_type: %" GST_FOURCC_FORMAT ", "
+ "protection_scheme_version: %#010x",
+ GST_FOURCC_ARGS (stream->protection_scheme_type),
+ stream->protection_scheme_version);
+
+ schi = qtdemux_tree_get_child_by_type (sinf, FOURCC_schi);
+ if (!schi) {
+ GST_DEBUG_OBJECT (qtdemux, "sinf box does not contain schi box");
+ return FALSE;
+ }
+ if (stream->protection_scheme_type != FOURCC_cenc &&
+ stream->protection_scheme_type != FOURCC_piff &&
+ stream->protection_scheme_type != FOURCC_cbcs) {
+ GST_ERROR_OBJECT (qtdemux,
+ "Invalid protection_scheme_type: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (stream->protection_scheme_type));
+ return FALSE;
+ }
+
+ if (G_UNLIKELY (!stream->protection_scheme_info))
+ stream->protection_scheme_info =
+ g_malloc0 (sizeof (QtDemuxCencSampleSetInfo));
+
+ info = (QtDemuxCencSampleSetInfo *) stream->protection_scheme_info;
+
+ if (stream->protection_scheme_type == FOURCC_cenc
+ || stream->protection_scheme_type == FOURCC_cbcs) {
+ guint8 is_encrypted;
+ guint8 iv_size;
+ guint8 constant_iv_size = 0;
+ const guint8 *default_kid;
+ guint8 crypt_byte_block = 0;
+ guint8 skip_byte_block = 0;
+ const guint8 *constant_iv = NULL;
+
+ tenc = qtdemux_tree_get_child_by_type (schi, FOURCC_tenc);
+ if (!tenc) {
+ GST_ERROR_OBJECT (qtdemux, "schi box does not contain tenc box, "
+ "which is mandatory for Common Encryption");
+ return FALSE;
+ }
+ tenc_data = (const guint8 *) tenc->data + 12;
+ is_encrypted = QT_UINT8 (tenc_data + 2);
+ iv_size = QT_UINT8 (tenc_data + 3);
+ default_kid = (tenc_data + 4);
+ if (stream->protection_scheme_type == FOURCC_cbcs) {
+ guint8 possible_pattern_info;
+ if (iv_size == 0) {
+ constant_iv_size = QT_UINT8 (tenc_data + 20);
+ if (constant_iv_size != 8 && constant_iv_size != 16) {
+ GST_ERROR_OBJECT (qtdemux,
+ "constant IV size should be 8 or 16, not %hhu", constant_iv_size);
+ return FALSE;
+ }
+ constant_iv = (tenc_data + 21);
+ }
+ possible_pattern_info = QT_UINT8 (tenc_data + 1);
+ crypt_byte_block = (possible_pattern_info >> 4) & 0x0f;
+ skip_byte_block = possible_pattern_info & 0x0f;
+ }
+ qtdemux_update_default_sample_cenc_settings (qtdemux, info,
+ is_encrypted, stream->protection_scheme_type, iv_size, default_kid,
+ crypt_byte_block, skip_byte_block, constant_iv_size, constant_iv);
+ } else if (stream->protection_scheme_type == FOURCC_piff) {
+ GstByteReader br;
+ static const guint8 piff_track_encryption_uuid[] = {
+ 0x89, 0x74, 0xdb, 0xce, 0x7b, 0xe7, 0x4c, 0x51,
+ 0x84, 0xf9, 0x71, 0x48, 0xf9, 0x88, 0x25, 0x54
+ };
+
+ tenc = qtdemux_tree_get_child_by_type (schi, FOURCC_uuid);
+ if (!tenc) {
+ GST_ERROR_OBJECT (qtdemux, "schi box does not contain tenc box, "
+ "which is mandatory for Common Encryption");
+ return FALSE;
+ }
+
+ tenc_data = (const guint8 *) tenc->data + 8;
+ if (memcmp (tenc_data, piff_track_encryption_uuid, 16) != 0) {
+ gchar *box_uuid = qtdemux_uuid_bytes_to_string (tenc_data);
+ GST_ERROR_OBJECT (qtdemux,
+ "Unsupported track encryption box with uuid: %s", box_uuid);
+ g_free (box_uuid);
+ return FALSE;
+ }
+ tenc_data = (const guint8 *) tenc->data + 16 + 12;
+ gst_byte_reader_init (&br, tenc_data, 20);
+ if (!qtdemux_update_default_piff_encryption_settings (qtdemux, info, &br)) {
+ GST_ERROR_OBJECT (qtdemux, "PIFF track box parsing error");
+ return FALSE;
+ }
+ stream->protection_scheme_type = FOURCC_cenc;
+ }
+
+ return TRUE;
+}
+
+static gint
+qtdemux_track_id_compare_func (QtDemuxStream ** stream1,
+ QtDemuxStream ** stream2)
+{
+ return (gint) (*stream1)->track_id - (gint) (*stream2)->track_id;
+}
+
+static gboolean
+qtdemux_parse_stereo_svmi_atom (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ GNode * stbl)
+{
+ GNode *svmi;
+
+ /*parse svmi header if existing */
+ svmi = qtdemux_tree_get_child_by_type (stbl, FOURCC_svmi);
+ if (svmi) {
+ guint len = QT_UINT32 ((guint8 *) svmi->data);
+ guint32 version = QT_UINT32 ((guint8 *) svmi->data + 8);
+ if (!version) {
+ GstVideoMultiviewMode mode = GST_VIDEO_MULTIVIEW_MODE_NONE;
+ GstVideoMultiviewFlags flags = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
+ guint8 frame_type, frame_layout;
+ guint32 stereo_mono_change_count;
+
+ if (len < 18)
+ return FALSE;
+
+ /* MPEG-A stereo video */
+ if (qtdemux->major_brand == FOURCC_ss02)
+ flags |= GST_VIDEO_MULTIVIEW_FLAGS_MIXED_MONO;
+
+ frame_type = QT_UINT8 ((guint8 *) svmi->data + 12);
+ frame_layout = QT_UINT8 ((guint8 *) svmi->data + 13) & 0x01;
+ stereo_mono_change_count = QT_UINT32 ((guint8 *) svmi->data + 14);
+
+ switch (frame_type) {
+ case 0:
+ mode = GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE;
+ break;
+ case 1:
+ mode = GST_VIDEO_MULTIVIEW_MODE_ROW_INTERLEAVED;
+ break;
+ case 2:
+ mode = GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME;
+ break;
+ case 3:
+ /* mode 3 is primary/secondary view sequence, ie
+ * left/right views in separate tracks. See section 7.2
+ * of ISO/IEC 23000-11:2009 */
+ /* In the future this might be supported using related
+ * streams, like an enhancement track - if files like this
+ * ever exist */
+ GST_FIXME_OBJECT (qtdemux,
+ "Implement stereo video in separate streams");
+ }
+
+ if ((frame_layout & 0x1) == 0)
+ flags |= GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST;
+
+ GST_LOG_OBJECT (qtdemux,
+ "StereoVideo: composition type: %u, is_left_first: %u",
+ frame_type, frame_layout);
+
+ if (stereo_mono_change_count > 1) {
+ GST_FIXME_OBJECT (qtdemux,
+ "Mixed-mono flags are not yet supported in qtdemux.");
+ }
+
+ stream->multiview_mode = mode;
+ stream->multiview_flags = flags;
+ }
+ }
+
+ return TRUE;
+}
+
+/* parse the traks.
+ * With each track we associate a new QtDemuxStream that contains all the info
+ * about the trak.
+ * traks that do not decode to something (like strm traks) will not have a pad.
+ */
+static gboolean
+qtdemux_parse_trak (GstQTDemux * qtdemux, GNode * trak)
+{
+ GstByteReader tkhd;
+ int offset;
+ GNode *mdia;
+ GNode *mdhd;
+ GNode *hdlr;
+ GNode *minf;
+ GNode *stbl;
+ GNode *stsd;
+ GNode *mp4a;
+ GNode *mp4v;
+ GNode *esds;
+ GNode *tref;
+ GNode *udta;
+
+ QtDemuxStream *stream = NULL;
+ const guint8 *stsd_data;
+ const guint8 *stsd_entry_data;
+ guint remaining_stsd_len;
+ guint stsd_entry_count;
+ guint stsd_index;
+ guint16 lang_code; /* quicktime lang code or packed iso code */
+ guint32 version;
+ guint32 tkhd_flags = 0;
+ guint8 tkhd_version = 0;
+ guint32 w = 0, h = 0;
+ guint value_size, stsd_len, len;
+ guint32 track_id;
+ guint32 dummy;
+
+ GST_DEBUG_OBJECT (qtdemux, "parse_trak");
+
+ if (!qtdemux_tree_get_child_by_type_full (trak, FOURCC_tkhd, &tkhd)
+ || !gst_byte_reader_get_uint8 (&tkhd, &tkhd_version)
+ || !gst_byte_reader_get_uint24_be (&tkhd, &tkhd_flags))
+ goto corrupt_file;
+
+ /* pick between 64 or 32 bits */
+ value_size = tkhd_version == 1 ? 8 : 4;
+ if (!gst_byte_reader_skip (&tkhd, value_size * 2) ||
+ !gst_byte_reader_get_uint32_be (&tkhd, &track_id))
+ goto corrupt_file;
+
+ /* Check if current moov has duplicated track_id */
+ if (qtdemux_find_stream (qtdemux, track_id))
+ goto existing_stream;
+
+ stream = _create_stream (qtdemux, track_id);
+ stream->stream_tags = gst_tag_list_make_writable (stream->stream_tags);
+
+ /* need defaults for fragments */
+ qtdemux_parse_trex (qtdemux, stream, &dummy, &dummy, &dummy);
+
+ if ((tkhd_flags & 1) == 0)
+ stream->disabled = TRUE;
+
+ GST_LOG_OBJECT (qtdemux, "track[tkhd] version/flags/id: 0x%02x/%06x/%u",
+ tkhd_version, tkhd_flags, stream->track_id);
+
+ if (!(mdia = qtdemux_tree_get_child_by_type (trak, FOURCC_mdia)))
+ goto corrupt_file;
+
+ if (!(mdhd = qtdemux_tree_get_child_by_type (mdia, FOURCC_mdhd))) {
+ /* be nice for some crooked mjp2 files that use mhdr for mdhd */
+ if (qtdemux->major_brand != FOURCC_mjp2 ||
+ !(mdhd = qtdemux_tree_get_child_by_type (mdia, FOURCC_mhdr)))
+ goto corrupt_file;
+ }
+
+ len = QT_UINT32 ((guint8 *) mdhd->data);
+ version = QT_UINT32 ((guint8 *) mdhd->data + 8);
+ GST_LOG_OBJECT (qtdemux, "track version/flags: %08x", version);
+ if (version == 0x01000000) {
+ if (len < 42)
+ goto corrupt_file;
+ stream->timescale = QT_UINT32 ((guint8 *) mdhd->data + 28);
+ stream->duration = QT_UINT64 ((guint8 *) mdhd->data + 32);
+ lang_code = QT_UINT16 ((guint8 *) mdhd->data + 40);
+ } else {
+ if (len < 30)
+ goto corrupt_file;
+ stream->timescale = QT_UINT32 ((guint8 *) mdhd->data + 20);
+ stream->duration = QT_UINT32 ((guint8 *) mdhd->data + 24);
+ lang_code = QT_UINT16 ((guint8 *) mdhd->data + 28);
+ }
+
+ if (lang_code < 0x400) {
+ qtdemux_lang_map_qt_code_to_iso (stream->lang_id, lang_code);
+ } else if (lang_code == 0x7fff) {
+ stream->lang_id[0] = 0; /* unspecified */
+ } else {
+ stream->lang_id[0] = 0x60 + ((lang_code >> 10) & 0x1F);
+ stream->lang_id[1] = 0x60 + ((lang_code >> 5) & 0x1F);
+ stream->lang_id[2] = 0x60 + (lang_code & 0x1F);
+ stream->lang_id[3] = 0;
+ }
+
+ GST_LOG_OBJECT (qtdemux, "track timescale: %" G_GUINT32_FORMAT,
+ stream->timescale);
+ GST_LOG_OBJECT (qtdemux, "track duration: %" G_GUINT64_FORMAT,
+ stream->duration);
+ GST_LOG_OBJECT (qtdemux, "track language code/id: 0x%04x/%s",
+ lang_code, stream->lang_id);
+
+ if (G_UNLIKELY (stream->timescale == 0 || qtdemux->timescale == 0))
+ goto corrupt_file;
+
+ if ((tref = qtdemux_tree_get_child_by_type (trak, FOURCC_tref))) {
+ /* chapters track reference */
+ GNode *chap = qtdemux_tree_get_child_by_type (tref, FOURCC_chap);
+ if (chap) {
+ gsize length = GST_READ_UINT32_BE (chap->data);
+ if (qtdemux->chapters_track_id)
+ GST_FIXME_OBJECT (qtdemux, "Multiple CHAP tracks");
+
+ if (length >= 12) {
+ qtdemux->chapters_track_id =
+ GST_READ_UINT32_BE ((gint8 *) chap->data + 8);
+ }
+ }
+ }
+
+ /* fragmented files may have bogus duration in moov */
+ if (!qtdemux->fragmented &&
+ qtdemux->duration != G_MAXINT64 && stream->duration != G_MAXINT32) {
+ guint64 tdur1, tdur2;
+
+ /* don't overflow */
+ tdur1 = stream->timescale * (guint64) qtdemux->duration;
+ tdur2 = qtdemux->timescale * (guint64) stream->duration;
+
+ /* HACK:
+ * some of those trailers, nowadays, have prologue images that are
+ * themselves video tracks as well. I haven't really found a way to
+ * identify those yet, except for just looking at their duration. */
+ if (tdur1 != 0 && (tdur2 * 10 / tdur1) < 2) {
+ GST_WARNING_OBJECT (qtdemux,
+ "Track shorter than 20%% (%" G_GUINT64_FORMAT "/%" G_GUINT32_FORMAT
+ " vs. %" G_GUINT64_FORMAT "/%" G_GUINT32_FORMAT ") of the stream "
+ "found, assuming preview image or something; skipping track",
+ stream->duration, stream->timescale, qtdemux->duration,
+ qtdemux->timescale);
+ gst_qtdemux_stream_unref (stream);
+ return TRUE;
+ }
+ }
+
+ if (!(hdlr = qtdemux_tree_get_child_by_type (mdia, FOURCC_hdlr)))
+ goto corrupt_file;
+
+ GST_LOG_OBJECT (qtdemux, "track type: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (QT_FOURCC ((guint8 *) hdlr->data + 12)));
+
+ len = QT_UINT32 ((guint8 *) hdlr->data);
+ if (len >= 20)
+ stream->subtype = QT_FOURCC ((guint8 *) hdlr->data + 16);
+ GST_LOG_OBJECT (qtdemux, "track subtype: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (stream->subtype));
+
+ if (!(minf = qtdemux_tree_get_child_by_type (mdia, FOURCC_minf)))
+ goto corrupt_file;
+
+ if (!(stbl = qtdemux_tree_get_child_by_type (minf, FOURCC_stbl)))
+ goto corrupt_file;
+
+ /* Parse out svmi (and later st3d/sv3d) atoms */
+ if (!qtdemux_parse_stereo_svmi_atom (qtdemux, stream, stbl))
+ goto corrupt_file;
+
+ /* parse rest of tkhd */
+ if (stream->subtype == FOURCC_vide) {
+ guint32 matrix[9];
+
+ /* version 1 uses some 64-bit ints */
+ if (!gst_byte_reader_skip (&tkhd, 20 + value_size))
+ goto corrupt_file;
+
+ if (!qtdemux_parse_transformation_matrix (qtdemux, &tkhd, matrix, "tkhd"))
+ goto corrupt_file;
+
+ if (!gst_byte_reader_get_uint32_be (&tkhd, &w)
+ || !gst_byte_reader_get_uint32_be (&tkhd, &h))
+ goto corrupt_file;
+
+ qtdemux_inspect_transformation_matrix (qtdemux, stream, matrix,
+ &stream->stream_tags);
+ }
+
+ /* parse stsd */
+ if (!(stsd = qtdemux_tree_get_child_by_type (stbl, FOURCC_stsd)))
+ goto corrupt_file;
+ stsd_data = (const guint8 *) stsd->data;
+
+ /* stsd should at least have one entry */
+ stsd_len = QT_UINT32 (stsd_data);
+ if (stsd_len < 24) {
+ /* .. but skip stream with empty stsd produced by some Vivotek cameras */
+ if (stream->subtype == FOURCC_vivo) {
+ gst_qtdemux_stream_unref (stream);
+ return TRUE;
+ } else {
+ goto corrupt_file;
+ }
+ }
+
+ stream->stsd_entries_length = stsd_entry_count = QT_UINT32 (stsd_data + 12);
+ /* each stsd entry must contain at least 8 bytes */
+ if (stream->stsd_entries_length == 0
+ || stream->stsd_entries_length > stsd_len / 8) {
+ stream->stsd_entries_length = 0;
+ goto corrupt_file;
+ }
+ stream->stsd_entries = g_new0 (QtDemuxStreamStsdEntry, stsd_entry_count);
+ GST_LOG_OBJECT (qtdemux, "stsd len: %d", stsd_len);
+ GST_LOG_OBJECT (qtdemux, "stsd entry count: %u", stsd_entry_count);
+
+ stsd_entry_data = stsd_data + 16;
+ remaining_stsd_len = stsd_len - 16;
+ for (stsd_index = 0; stsd_index < stsd_entry_count; stsd_index++) {
+ guint32 fourcc;
+ gchar *codec = NULL;
+ QtDemuxStreamStsdEntry *entry = &stream->stsd_entries[stsd_index];
+
+ /* and that entry should fit within stsd */
+ len = QT_UINT32 (stsd_entry_data);
+ if (len > remaining_stsd_len)
+ goto corrupt_file;
+
+ entry->fourcc = fourcc = QT_FOURCC (stsd_entry_data + 4);
+ GST_LOG_OBJECT (qtdemux, "stsd type: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (entry->fourcc));
+ GST_LOG_OBJECT (qtdemux, "stsd type len: %d", len);
+
+ if ((fourcc == FOURCC_drms) || (fourcc == FOURCC_drmi))
+ goto error_encrypted;
+
+ if (fourcc == FOURCC_aavd) {
+ if (stream->subtype != FOURCC_soun) {
+ GST_ERROR_OBJECT (qtdemux,
+ "Unexpeced stsd type 'aavd' outside 'soun' track");
+ } else {
+ /* encrypted audio with sound sample description v0 */
+ GNode *enc = qtdemux_tree_get_child_by_type (stsd, fourcc);
+ stream->protected = TRUE;
+ if (!qtdemux_parse_protection_aavd (qtdemux, stream, enc, &fourcc))
+ GST_ERROR_OBJECT (qtdemux, "Failed to parse protection scheme info");
+ }
+ }
+
+ if (fourcc == FOURCC_encv || fourcc == FOURCC_enca) {
+ /* FIXME this looks wrong, there might be multiple children
+ * with the same type */
+ GNode *enc = qtdemux_tree_get_child_by_type (stsd, fourcc);
+ stream->protected = TRUE;
+ if (!qtdemux_parse_protection_scheme_info (qtdemux, stream, enc, &fourcc))
+ GST_ERROR_OBJECT (qtdemux, "Failed to parse protection scheme info");
+ }
+
+ if (stream->subtype == FOURCC_vide) {
+ GNode *colr;
+ GNode *fiel;
+ GNode *pasp;
+ gboolean gray;
+ gint depth, palette_size, palette_count;
+ guint32 *palette_data = NULL;
+
+ entry->sampled = TRUE;
+
+ stream->display_width = w >> 16;
+ stream->display_height = h >> 16;
+
+ offset = 16;
+ if (len < 86) /* TODO verify */
+ goto corrupt_file;
+
+ entry->width = QT_UINT16 (stsd_entry_data + offset + 16);
+ entry->height = QT_UINT16 (stsd_entry_data + offset + 18);
+ entry->fps_n = 0; /* this is filled in later */
+ entry->fps_d = 0; /* this is filled in later */
+ entry->bits_per_sample = QT_UINT16 (stsd_entry_data + offset + 66);
+ entry->color_table_id = QT_UINT16 (stsd_entry_data + offset + 68);
+
+ /* if color_table_id is 0, ctab atom must follow; however some files
+ * produced by TMPEGEnc have color_table_id = 0 and no ctab atom, so
+ * if color table is not present we'll correct the value */
+ if (entry->color_table_id == 0 &&
+ (len < 90
+ || QT_FOURCC (stsd_entry_data + offset + 70) != FOURCC_ctab)) {
+ entry->color_table_id = -1;
+ }
+
+ GST_LOG_OBJECT (qtdemux, "width %d, height %d, bps %d, color table id %d",
+ entry->width, entry->height, entry->bits_per_sample,
+ entry->color_table_id);
+
+ depth = entry->bits_per_sample;
+
+ /* more than 32 bits means grayscale */
+ gray = (depth > 32);
+ /* low 32 bits specify the depth */
+ depth &= 0x1F;
+
+ /* different number of palette entries is determined by depth. */
+ palette_count = 0;
+ if ((depth == 1) || (depth == 2) || (depth == 4) || (depth == 8))
+ palette_count = (1 << depth);
+ palette_size = palette_count * 4;
+
+ if (entry->color_table_id) {
+ switch (palette_count) {
+ case 0:
+ break;
+ case 2:
+ palette_data = g_memdup2 (ff_qt_default_palette_2, palette_size);
+ break;
+ case 4:
+ palette_data = g_memdup2 (ff_qt_default_palette_4, palette_size);
+ break;
+ case 16:
+ if (gray)
+ palette_data =
+ g_memdup2 (ff_qt_grayscale_palette_16, palette_size);
+ else
+ palette_data = g_memdup2 (ff_qt_default_palette_16, palette_size);
+ break;
+ case 256:
+ if (gray)
+ palette_data =
+ g_memdup2 (ff_qt_grayscale_palette_256, palette_size);
+ else
+ palette_data =
+ g_memdup2 (ff_qt_default_palette_256, palette_size);
+ break;
+ default:
+ GST_ELEMENT_WARNING (qtdemux, STREAM, DEMUX,
+ (_("The video in this file might not play correctly.")),
+ ("unsupported palette depth %d", depth));
+ break;
+ }
+ } else {
+ gint i, j, start, end;
+
+ if (len < 94)
+ goto corrupt_file;
+
+ /* read table */
+ start = QT_UINT32 (stsd_entry_data + offset + 70);
+ palette_count = QT_UINT16 (stsd_entry_data + offset + 74);
+ end = QT_UINT16 (stsd_entry_data + offset + 76);
+
+ GST_LOG_OBJECT (qtdemux, "start %d, end %d, palette_count %d",
+ start, end, palette_count);
+
+ if (end > 255)
+ end = 255;
+ if (start > end)
+ start = end;
+
+ if (len < 94 + (end - start) * 8)
+ goto corrupt_file;
+
+ /* palette is always the same size */
+ palette_data = g_malloc0 (256 * 4);
+ palette_size = 256 * 4;
+
+ for (j = 0, i = start; i <= end; j++, i++) {
+ guint32 a, r, g, b;
+
+ a = QT_UINT16 (stsd_entry_data + offset + 78 + (j * 8));
+ r = QT_UINT16 (stsd_entry_data + offset + 80 + (j * 8));
+ g = QT_UINT16 (stsd_entry_data + offset + 82 + (j * 8));
+ b = QT_UINT16 (stsd_entry_data + offset + 84 + (j * 8));
+
+ palette_data[i] = ((a & 0xff00) << 16) | ((r & 0xff00) << 8) |
+ (g & 0xff00) | (b >> 8);
+ }
+ }
+
+ if (entry->caps)
+ gst_caps_unref (entry->caps);
+
+ entry->caps =
+ qtdemux_video_caps (qtdemux, stream, entry, fourcc, stsd_entry_data,
+ &codec);
+ if (G_UNLIKELY (!entry->caps)) {
+ g_free (palette_data);
+ goto unknown_stream;
+ }
+
+ if (codec) {
+ gst_tag_list_add (stream->stream_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_VIDEO_CODEC, codec, NULL);
+ g_free (codec);
+ codec = NULL;
+ }
+
+ if (palette_data) {
+ GstStructure *s;
+
+ if (entry->rgb8_palette)
+ gst_memory_unref (entry->rgb8_palette);
+ entry->rgb8_palette = gst_memory_new_wrapped (GST_MEMORY_FLAG_READONLY,
+ palette_data, palette_size, 0, palette_size, palette_data, g_free);
+
+ s = gst_caps_get_structure (entry->caps, 0);
+
+ /* non-raw video has a palette_data property. raw video has the palette as
+ * an extra plane that we append to the output buffers before we push
+ * them*/
+ if (!gst_structure_has_name (s, "video/x-raw")) {
+ GstBuffer *palette;
+
+ palette = gst_buffer_new ();
+ gst_buffer_append_memory (palette, entry->rgb8_palette);
+ entry->rgb8_palette = NULL;
+
+ gst_caps_set_simple (entry->caps, "palette_data",
+ GST_TYPE_BUFFER, palette, NULL);
+ gst_buffer_unref (palette);
+ }
+ } else if (palette_count != 0) {
+ GST_ELEMENT_WARNING (qtdemux, STREAM, NOT_IMPLEMENTED,
+ (NULL), ("Unsupported palette depth %d", depth));
+ }
+
+ GST_LOG_OBJECT (qtdemux, "frame count: %u",
+ QT_UINT16 (stsd_entry_data + offset + 32));
+
+ esds = NULL;
+ pasp = NULL;
+ colr = NULL;
+ fiel = NULL;
+ /* pick 'the' stsd child */
+ mp4v = qtdemux_tree_get_child_by_index (stsd, stsd_index);
+ // We should skip parsing the stsd for non-protected streams if
+ // the entry doesn't match the fourcc, since they don't change
+ // format. However, for protected streams we can have partial
+ // encryption, where parts of the stream are encrypted and parts
+ // not. For both parts of such streams, we should ensure the
+ // esds overrides are parsed for both from the stsd.
+ if (QTDEMUX_TREE_NODE_FOURCC (mp4v) != fourcc) {
+ if (stream->protected && QTDEMUX_TREE_NODE_FOURCC (mp4v) != FOURCC_encv)
+ mp4v = NULL;
+ else if (!stream->protected)
+ mp4v = NULL;
+ }
+
+ if (mp4v) {
+ esds = qtdemux_tree_get_child_by_type (mp4v, FOURCC_esds);
+ pasp = qtdemux_tree_get_child_by_type (mp4v, FOURCC_pasp);
+ colr = qtdemux_tree_get_child_by_type (mp4v, FOURCC_colr);
+ fiel = qtdemux_tree_get_child_by_type (mp4v, FOURCC_fiel);
+ }
+
+ if (pasp) {
+ const guint8 *pasp_data = (const guint8 *) pasp->data;
+ gint len = QT_UINT32 (pasp_data);
+
+ if (len == 16) {
+ CUR_STREAM (stream)->par_w = QT_UINT32 (pasp_data + 8);
+ CUR_STREAM (stream)->par_h = QT_UINT32 (pasp_data + 12);
+ } else {
+ CUR_STREAM (stream)->par_w = 0;
+ CUR_STREAM (stream)->par_h = 0;
+ }
+ } else {
+ CUR_STREAM (stream)->par_w = 0;
+ CUR_STREAM (stream)->par_h = 0;
+ }
+
+ if (fiel) {
+ const guint8 *fiel_data = (const guint8 *) fiel->data;
+ gint len = QT_UINT32 (fiel_data);
+
+ if (len == 10) {
+ CUR_STREAM (stream)->interlace_mode = GST_READ_UINT8 (fiel_data + 8);
+ CUR_STREAM (stream)->field_order = GST_READ_UINT8 (fiel_data + 9);
+ }
+ }
+
+ if (colr) {
+ const guint8 *colr_data = (const guint8 *) colr->data;
+ gint len = QT_UINT32 (colr_data);
+
+ if (len == 19 || len == 18) {
+ guint32 color_type = GST_READ_UINT32_LE (colr_data + 8);
+
+ if (color_type == FOURCC_nclx || color_type == FOURCC_nclc) {
+ guint16 primaries = GST_READ_UINT16_BE (colr_data + 12);
+ guint16 transfer_function = GST_READ_UINT16_BE (colr_data + 14);
+ guint16 matrix = GST_READ_UINT16_BE (colr_data + 16);
+ gboolean full_range = len == 19 ? colr_data[17] >> 7 : FALSE;
+
+ CUR_STREAM (stream)->colorimetry.primaries =
+ gst_video_color_primaries_from_iso (primaries);
+ CUR_STREAM (stream)->colorimetry.transfer =
+ gst_video_transfer_function_from_iso (transfer_function);
+ CUR_STREAM (stream)->colorimetry.matrix =
+ gst_video_color_matrix_from_iso (matrix);
+ CUR_STREAM (stream)->colorimetry.range =
+ full_range ? GST_VIDEO_COLOR_RANGE_0_255 :
+ GST_VIDEO_COLOR_RANGE_16_235;
+ } else {
+ GST_DEBUG_OBJECT (qtdemux, "Unsupported color type");
+ }
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "Invalid colr atom size");
+ }
+ }
+
+ if (esds) {
+ gst_qtdemux_handle_esds (qtdemux, stream, entry, esds,
+ stream->stream_tags);
+ } else {
+ switch (fourcc) {
+ case FOURCC_H264:
+ case FOURCC_avc1:
+ case FOURCC_avc3:
+ {
+ gint len = QT_UINT32 (stsd_entry_data) - 0x56;
+ const guint8 *avc_data = stsd_entry_data + 0x56;
+
+ /* find avcC */
+ while (len >= 0x8) {
+ gint size;
+
+ if (QT_UINT32 (avc_data) <= len)
+ size = QT_UINT32 (avc_data) - 0x8;
+ else
+ size = len - 0x8;
+
+ if (size < 1)
+ /* No real data, so break out */
+ break;
+
+ switch (QT_FOURCC (avc_data + 0x4)) {
+ case FOURCC_avcC:
+ {
+ /* parse, if found */
+ GstBuffer *buf;
+
+ GST_DEBUG_OBJECT (qtdemux, "found avcC codec_data in stsd");
+
+ /* First 4 bytes are the length of the atom, the next 4 bytes
+ * are the fourcc, the next 1 byte is the version, and the
+ * subsequent bytes are profile_tier_level structure like data. */
+ gst_codec_utils_h264_caps_set_level_and_profile (entry->caps,
+ avc_data + 8 + 1, size - 1);
+ buf = gst_buffer_new_and_alloc (size);
+ gst_buffer_fill (buf, 0, avc_data + 0x8, size);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+
+ break;
+ }
+ case FOURCC_strf:
+ {
+ GstBuffer *buf;
+
+ GST_DEBUG_OBJECT (qtdemux, "found strf codec_data in stsd");
+
+ /* First 4 bytes are the length of the atom, the next 4 bytes
+ * are the fourcc, next 40 bytes are BITMAPINFOHEADER,
+ * next 1 byte is the version, and the
+ * subsequent bytes are sequence parameter set like data. */
+
+ size -= 40; /* we'll be skipping BITMAPINFOHEADER */
+ if (size > 1) {
+ gst_codec_utils_h264_caps_set_level_and_profile
+ (entry->caps, avc_data + 8 + 40 + 1, size - 1);
+
+ buf = gst_buffer_new_and_alloc (size);
+ gst_buffer_fill (buf, 0, avc_data + 8 + 40, size);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ }
+ break;
+ }
+ case FOURCC_btrt:
+ {
+ guint avg_bitrate, max_bitrate;
+
+ /* bufferSizeDB, maxBitrate and avgBitrate - 4 bytes each */
+ if (size < 12)
+ break;
+
+ max_bitrate = QT_UINT32 (avc_data + 0xc);
+ avg_bitrate = QT_UINT32 (avc_data + 0x10);
+
+ if (!max_bitrate && !avg_bitrate)
+ break;
+
+ /* Some muxers seem to swap the average and maximum bitrates
+ * (I'm looking at you, YouTube), so we swap for sanity. */
+ if (max_bitrate > 0 && max_bitrate < avg_bitrate) {
+ guint temp = avg_bitrate;
+
+ avg_bitrate = max_bitrate;
+ max_bitrate = temp;
+ }
+
+ if (max_bitrate > 0 && max_bitrate < G_MAXUINT32) {
+ gst_tag_list_add (stream->stream_tags,
+ GST_TAG_MERGE_REPLACE, GST_TAG_MAXIMUM_BITRATE,
+ max_bitrate, NULL);
+ }
+ if (avg_bitrate > 0 && avg_bitrate < G_MAXUINT32) {
+ gst_tag_list_add (stream->stream_tags,
+ GST_TAG_MERGE_REPLACE, GST_TAG_BITRATE, avg_bitrate,
+ NULL);
+ }
+
+ break;
+ }
+
+ default:
+ break;
+ }
+
+ len -= size + 8;
+ avc_data += size + 8;
+ }
+
+ break;
+ }
+ case FOURCC_H265:
+ case FOURCC_hvc1:
+ case FOURCC_hev1:
+ case FOURCC_dvh1:
+ case FOURCC_dvhe:
+ {
+ gint len = QT_UINT32 (stsd_entry_data) - 0x56;
+ const guint8 *hevc_data = stsd_entry_data + 0x56;
+
+ /* find hevc */
+ while (len >= 0x8) {
+ gint size;
+
+ if (QT_UINT32 (hevc_data) <= len)
+ size = QT_UINT32 (hevc_data) - 0x8;
+ else
+ size = len - 0x8;
+
+ if (size < 1)
+ /* No real data, so break out */
+ break;
+
+ switch (QT_FOURCC (hevc_data + 0x4)) {
+ case FOURCC_hvcC:
+ {
+ /* parse, if found */
+ GstBuffer *buf;
+
+ GST_DEBUG_OBJECT (qtdemux, "found hvcC codec_data in stsd");
+
+ /* First 4 bytes are the length of the atom, the next 4 bytes
+ * are the fourcc, the next 1 byte is the version, and the
+ * subsequent bytes are sequence parameter set like data. */
+ gst_codec_utils_h265_caps_set_level_tier_and_profile
+ (entry->caps, hevc_data + 8 + 1, size - 1);
+
+ buf = gst_buffer_new_and_alloc (size);
+ gst_buffer_fill (buf, 0, hevc_data + 0x8, size);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ break;
+ }
+ default:
+ break;
+ }
+ len -= size + 8;
+ hevc_data += size + 8;
+ }
+ break;
+ }
+ case FOURCC_mp4v:
+ case FOURCC_MP4V:
+ case FOURCC_fmp4:
+ case FOURCC_FMP4:
+ case FOURCC_xvid:
+ case FOURCC_XVID:
+ {
+ GNode *glbl;
+
+ GST_DEBUG_OBJECT (qtdemux, "found %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (fourcc));
+
+ /* codec data might be in glbl extension atom */
+ glbl = mp4v ?
+ qtdemux_tree_get_child_by_type (mp4v, FOURCC_glbl) : NULL;
+ if (glbl) {
+ guint8 *data;
+ GstBuffer *buf;
+ gint len;
+
+ GST_DEBUG_OBJECT (qtdemux, "found glbl data in stsd");
+ data = glbl->data;
+ len = QT_UINT32 (data);
+ if (len > 0x8) {
+ len -= 0x8;
+ buf = gst_buffer_new_and_alloc (len);
+ gst_buffer_fill (buf, 0, data + 8, len);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ }
+ }
+ break;
+ }
+ case FOURCC_mjp2:
+ {
+ /* see annex I of the jpeg2000 spec */
+ GNode *jp2h, *ihdr, *colr, *mjp2, *field, *prefix, *cmap, *cdef;
+ const guint8 *data;
+ const gchar *colorspace = NULL;
+ gint ncomp = 0;
+ guint32 ncomp_map = 0;
+ gint32 *comp_map = NULL;
+ guint32 nchan_def = 0;
+ gint32 *chan_def = NULL;
+
+ GST_DEBUG_OBJECT (qtdemux, "found mjp2");
+ /* some required atoms */
+ mjp2 = qtdemux_tree_get_child_by_index (stsd, stsd_index);
+ if (!mjp2)
+ break;
+ jp2h = qtdemux_tree_get_child_by_type (mjp2, FOURCC_jp2h);
+ if (!jp2h)
+ break;
+
+ /* number of components; redundant with info in codestream, but useful
+ to a muxer */
+ ihdr = qtdemux_tree_get_child_by_type (jp2h, FOURCC_ihdr);
+ if (!ihdr || QT_UINT32 (ihdr->data) != 22)
+ break;
+ ncomp = QT_UINT16 (((guint8 *) ihdr->data) + 16);
+
+ colr = qtdemux_tree_get_child_by_type (jp2h, FOURCC_colr);
+ if (!colr)
+ break;
+ GST_DEBUG_OBJECT (qtdemux, "found colr");
+ /* extract colour space info */
+ if (QT_UINT8 ((guint8 *) colr->data + 8) == 1) {
+ switch (QT_UINT32 ((guint8 *) colr->data + 11)) {
+ case 16:
+ colorspace = "sRGB";
+ break;
+ case 17:
+ colorspace = "GRAY";
+ break;
+ case 18:
+ colorspace = "sYUV";
+ break;
+ default:
+ colorspace = NULL;
+ break;
+ }
+ }
+ if (!colorspace)
+ /* colr is required, and only values 16, 17, and 18 are specified,
+ so error if we have no colorspace */
+ break;
+
+ /* extract component mapping */
+ cmap = qtdemux_tree_get_child_by_type (jp2h, FOURCC_cmap);
+ if (cmap) {
+ guint32 cmap_len = 0;
+ int i;
+ cmap_len = QT_UINT32 (cmap->data);
+ if (cmap_len >= 8) {
+ /* normal box, subtract off header */
+ cmap_len -= 8;
+ /* cmap: { u16 cmp; u8 mtyp; u8 pcol; }* */
+ if (cmap_len % 4 == 0) {
+ ncomp_map = (cmap_len / 4);
+ comp_map = g_new0 (gint32, ncomp_map);
+ for (i = 0; i < ncomp_map; i++) {
+ guint16 cmp;
+ guint8 mtyp, pcol;
+ cmp = QT_UINT16 (((guint8 *) cmap->data) + 8 + i * 4);
+ mtyp = QT_UINT8 (((guint8 *) cmap->data) + 8 + i * 4 + 2);
+ pcol = QT_UINT8 (((guint8 *) cmap->data) + 8 + i * 4 + 3);
+ comp_map[i] = (mtyp << 24) | (pcol << 16) | cmp;
+ }
+ }
+ }
+ }
+ /* extract channel definitions */
+ cdef = qtdemux_tree_get_child_by_type (jp2h, FOURCC_cdef);
+ if (cdef) {
+ guint32 cdef_len = 0;
+ int i;
+ cdef_len = QT_UINT32 (cdef->data);
+ if (cdef_len >= 10) {
+ /* normal box, subtract off header and len */
+ cdef_len -= 10;
+ /* cdef: u16 n; { u16 cn; u16 typ; u16 asoc; }* */
+ if (cdef_len % 6 == 0) {
+ nchan_def = (cdef_len / 6);
+ chan_def = g_new0 (gint32, nchan_def);
+ for (i = 0; i < nchan_def; i++)
+ chan_def[i] = -1;
+ for (i = 0; i < nchan_def; i++) {
+ guint16 cn, typ, asoc;
+ cn = QT_UINT16 (((guint8 *) cdef->data) + 10 + i * 6);
+ typ = QT_UINT16 (((guint8 *) cdef->data) + 10 + i * 6 + 2);
+ asoc = QT_UINT16 (((guint8 *) cdef->data) + 10 + i * 6 + 4);
+ if (cn < nchan_def) {
+ switch (typ) {
+ case 0:
+ chan_def[cn] = asoc;
+ break;
+ case 1:
+ chan_def[cn] = 0; /* alpha */
+ break;
+ default:
+ chan_def[cn] = -typ;
+ }
+ }
+ }
+ }
+ }
+ }
+
+ gst_caps_set_simple (entry->caps,
+ "num-components", G_TYPE_INT, ncomp, NULL);
+ gst_caps_set_simple (entry->caps,
+ "colorspace", G_TYPE_STRING, colorspace, NULL);
+
+ if (comp_map) {
+ GValue arr = { 0, };
+ GValue elt = { 0, };
+ int i;
+ g_value_init (&arr, GST_TYPE_ARRAY);
+ g_value_init (&elt, G_TYPE_INT);
+ for (i = 0; i < ncomp_map; i++) {
+ g_value_set_int (&elt, comp_map[i]);
+ gst_value_array_append_value (&arr, &elt);
+ }
+ gst_structure_set_value (gst_caps_get_structure (entry->caps, 0),
+ "component-map", &arr);
+ g_value_unset (&elt);
+ g_value_unset (&arr);
+ g_free (comp_map);
+ }
+
+ if (chan_def) {
+ GValue arr = { 0, };
+ GValue elt = { 0, };
+ int i;
+ g_value_init (&arr, GST_TYPE_ARRAY);
+ g_value_init (&elt, G_TYPE_INT);
+ for (i = 0; i < nchan_def; i++) {
+ g_value_set_int (&elt, chan_def[i]);
+ gst_value_array_append_value (&arr, &elt);
+ }
+ gst_structure_set_value (gst_caps_get_structure (entry->caps, 0),
+ "channel-definitions", &arr);
+ g_value_unset (&elt);
+ g_value_unset (&arr);
+ g_free (chan_def);
+ }
+
+ /* some optional atoms */
+ field = qtdemux_tree_get_child_by_type (mjp2, FOURCC_fiel);
+ prefix = qtdemux_tree_get_child_by_type (mjp2, FOURCC_jp2x);
+
+ /* indicate possible fields in caps */
+ if (field) {
+ data = (guint8 *) field->data + 8;
+ if (*data != 1)
+ gst_caps_set_simple (entry->caps, "fields", G_TYPE_INT,
+ (gint) * data, NULL);
+ }
+ /* add codec_data if provided */
+ if (prefix) {
+ GstBuffer *buf;
+ gint len;
+
+ GST_DEBUG_OBJECT (qtdemux, "found prefix data in stsd");
+ data = prefix->data;
+ len = QT_UINT32 (data);
+ if (len > 0x8) {
+ len -= 0x8;
+ buf = gst_buffer_new_and_alloc (len);
+ gst_buffer_fill (buf, 0, data + 8, len);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ }
+ }
+ break;
+ }
+ case FOURCC_SVQ3:
+ case FOURCC_VP31:
+ {
+ GstBuffer *buf;
+ GstBuffer *seqh = NULL;
+ const guint8 *gamma_data = NULL;
+ gint len = QT_UINT32 (stsd_data); /* FIXME review - why put the whole stsd in codec data? */
+
+ qtdemux_parse_svq3_stsd_data (qtdemux, stsd_entry_data, &gamma_data,
+ &seqh);
+ if (gamma_data) {
+ gst_caps_set_simple (entry->caps, "applied-gamma", G_TYPE_DOUBLE,
+ QT_FP32 (gamma_data), NULL);
+ }
+ if (seqh) {
+ /* sorry for the bad name, but we don't know what this is, other
+ * than its own fourcc */
+ gst_caps_set_simple (entry->caps, "seqh", GST_TYPE_BUFFER, seqh,
+ NULL);
+ gst_buffer_unref (seqh);
+ }
+
+ GST_DEBUG_OBJECT (qtdemux, "found codec_data in stsd");
+ buf = gst_buffer_new_and_alloc (len);
+ gst_buffer_fill (buf, 0, stsd_data, len);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ break;
+ }
+ case FOURCC_jpeg:
+ {
+ /* https://developer.apple.com/standards/qtff-2001.pdf,
+ * page 92, "Video Sample Description", under table 3.1 */
+ GstByteReader br;
+
+ const gint compressor_offset =
+ 16 + 4 + 4 * 3 + 2 * 2 + 2 * 4 + 4 + 2;
+ const gint min_size = compressor_offset + 32 + 2 + 2;
+ GNode *jpeg;
+ guint32 len;
+ guint16 color_table_id = 0;
+ gboolean ok;
+
+ GST_DEBUG_OBJECT (qtdemux, "found jpeg");
+
+ /* recover information on interlaced/progressive */
+ jpeg = qtdemux_tree_get_child_by_type (stsd, FOURCC_jpeg);
+ if (!jpeg)
+ break;
+
+ len = QT_UINT32 (jpeg->data);
+ GST_DEBUG_OBJECT (qtdemux, "Found jpeg: len %u, need %d", len,
+ min_size);
+ if (len >= min_size) {
+ gst_byte_reader_init (&br, jpeg->data, len);
+
+ gst_byte_reader_skip (&br, compressor_offset + 32 + 2);
+ gst_byte_reader_get_uint16_le (&br, &color_table_id);
+ if (color_table_id != 0) {
+ /* the spec says there can be concatenated chunks in the data, and we want
+ * to find one called field. Walk through them. */
+ gint offset = min_size;
+ while (offset + 8 < len) {
+ guint32 size = 0, tag;
+ ok = gst_byte_reader_get_uint32_le (&br, &size);
+ ok &= gst_byte_reader_get_uint32_le (&br, &tag);
+ if (!ok || size < 8) {
+ GST_WARNING_OBJECT (qtdemux,
+ "Failed to walk optional chunk list");
+ break;
+ }
+ GST_DEBUG_OBJECT (qtdemux,
+ "Found optional %4.4s chunk, size %u",
+ (const char *) &tag, size);
+ if (tag == FOURCC_fiel) {
+ guint8 n_fields = 0, ordering = 0;
+ gst_byte_reader_get_uint8 (&br, &n_fields);
+ gst_byte_reader_get_uint8 (&br, &ordering);
+ if (n_fields == 1 || n_fields == 2) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "Found fiel tag with %u fields, ordering %u",
+ n_fields, ordering);
+ if (n_fields == 2)
+ gst_caps_set_simple (CUR_STREAM (stream)->caps,
+ "interlace-mode", G_TYPE_STRING, "interleaved",
+ NULL);
+ } else {
+ GST_WARNING_OBJECT (qtdemux,
+ "Found fiel tag with invalid fields (%u)", n_fields);
+ }
+ }
+ offset += size;
+ }
+ } else {
+ GST_DEBUG_OBJECT (qtdemux,
+ "Color table ID is 0, not trying to get interlacedness");
+ }
+ } else {
+ GST_WARNING_OBJECT (qtdemux,
+ "Length of jpeg chunk is too small, not trying to get interlacedness");
+ }
+
+ break;
+ }
+ case FOURCC_rle_:
+ case FOURCC_WRLE:
+ {
+ gst_caps_set_simple (entry->caps,
+ "depth", G_TYPE_INT, QT_UINT16 (stsd_entry_data + offset + 66),
+ NULL);
+ break;
+ }
+ case FOURCC_XiTh:
+ {
+ GNode *xith, *xdxt;
+
+ GST_DEBUG_OBJECT (qtdemux, "found XiTh");
+ xith = qtdemux_tree_get_child_by_index (stsd, stsd_index);
+ if (!xith)
+ break;
+
+ xdxt = qtdemux_tree_get_child_by_type (xith, FOURCC_XdxT);
+ if (!xdxt)
+ break;
+
+ GST_DEBUG_OBJECT (qtdemux, "found XdxT node");
+ /* collect the headers and store them in a stream list so that we can
+ * send them out first */
+ qtdemux_parse_theora_extension (qtdemux, stream, xdxt);
+ break;
+ }
+ case FOURCC_ovc1:
+ {
+ GNode *ovc1;
+ guint8 *ovc1_data;
+ guint ovc1_len;
+ GstBuffer *buf;
+
+ GST_DEBUG_OBJECT (qtdemux, "parse ovc1 header");
+ ovc1 = qtdemux_tree_get_child_by_index (stsd, stsd_index);
+ if (!ovc1)
+ break;
+ ovc1_data = ovc1->data;
+ ovc1_len = QT_UINT32 (ovc1_data);
+ if (ovc1_len <= 198) {
+ GST_WARNING_OBJECT (qtdemux, "Too small ovc1 header, skipping");
+ break;
+ }
+ buf = gst_buffer_new_and_alloc (ovc1_len - 198);
+ gst_buffer_fill (buf, 0, ovc1_data + 198, ovc1_len - 198);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ break;
+ }
+ case FOURCC_vc_1:
+ {
+ gint len = QT_UINT32 (stsd_entry_data) - 0x56;
+ const guint8 *vc1_data = stsd_entry_data + 0x56;
+
+ /* find dvc1 */
+ while (len >= 8) {
+ gint size;
+
+ if (QT_UINT32 (vc1_data) <= len)
+ size = QT_UINT32 (vc1_data) - 8;
+ else
+ size = len - 8;
+
+ if (size < 1)
+ /* No real data, so break out */
+ break;
+
+ switch (QT_FOURCC (vc1_data + 0x4)) {
+ case GST_MAKE_FOURCC ('d', 'v', 'c', '1'):
+ {
+ GstBuffer *buf;
+
+ GST_DEBUG_OBJECT (qtdemux, "found dvc1 codec_data in stsd");
+ buf = gst_buffer_new_and_alloc (size);
+ gst_buffer_fill (buf, 0, vc1_data + 8, size);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ break;
+ }
+ default:
+ break;
+ }
+ len -= size + 8;
+ vc1_data += size + 8;
+ }
+ break;
+ }
+ case FOURCC_av01:
+ {
+ gint len = QT_UINT32 (stsd_entry_data) - 0x56;
+ const guint8 *av1_data = stsd_entry_data + 0x56;
+
+ /* find av1C */
+ while (len >= 0x8) {
+ gint size;
+
+ if (QT_UINT32 (av1_data) <= len)
+ size = QT_UINT32 (av1_data) - 0x8;
+ else
+ size = len - 0x8;
+
+ if (size < 1)
+ /* No real data, so break out */
+ break;
+
+ switch (QT_FOURCC (av1_data + 0x4)) {
+ case FOURCC_av1C:
+ {
+ /* parse, if found */
+ GstBuffer *buf;
+ guint8 pres_delay_field;
+
+ GST_DEBUG_OBJECT (qtdemux,
+ "found av1C codec_data in stsd of size %d", size);
+
+ /* not enough data, just ignore and hope for the best */
+ if (size < 5)
+ break;
+
+ /* Content is:
+ * 4 bytes: atom length
+ * 4 bytes: fourcc
+ * 1 byte: version
+ * 3 bytes: flags
+ * 3 bits: reserved
+ * 1 bits: initial_presentation_delay_present
+ * 4 bits: initial_presentation_delay (if present else reserved
+ * rest: OBUs.
+ */
+
+ if (av1_data[9] != 0) {
+ GST_WARNING ("Unknown version %d of av1C box", av1_data[9]);
+ break;
+ }
+
+ /* We skip initial_presentation_delay* for now */
+ pres_delay_field = *(av1_data + 12);
+ if (pres_delay_field & (1 << 5)) {
+ gst_caps_set_simple (entry->caps,
+ "presentation-delay", G_TYPE_INT,
+ (gint) (pres_delay_field & 0x0F) + 1, NULL);
+ }
+ if (size > 5) {
+ buf = gst_buffer_new_and_alloc (size - 5);
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
+ gst_buffer_fill (buf, 0, av1_data + 13, size - 5);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ len -= size + 8;
+ av1_data += size + 8;
+ }
+
+ break;
+ }
+
+ /* TODO: Need to parse vpcC for VP8 codec too.
+ * Note that VPCodecConfigurationBox (vpcC) is defined for
+ * vp08, vp09, and vp10 fourcc. */
+ case FOURCC_vp09:
+ {
+ gint len = QT_UINT32 (stsd_entry_data) - 0x56;
+ const guint8 *vpcc_data = stsd_entry_data + 0x56;
+
+ /* find vpcC */
+ while (len >= 0x8) {
+ gint size;
+
+ if (QT_UINT32 (vpcc_data) <= len)
+ size = QT_UINT32 (vpcc_data) - 0x8;
+ else
+ size = len - 0x8;
+
+ if (size < 1)
+ /* No real data, so break out */
+ break;
+
+ switch (QT_FOURCC (vpcc_data + 0x4)) {
+ case FOURCC_vpcC:
+ {
+ const gchar *profile_str = NULL;
+ const gchar *chroma_format_str = NULL;
+ guint8 profile;
+ guint8 bitdepth;
+ guint8 chroma_format;
+ GstVideoColorimetry cinfo;
+
+ /* parse, if found */
+ GST_DEBUG_OBJECT (qtdemux,
+ "found vp codec_data in stsd of size %d", size);
+
+ /* the meaning of "size" is length of the atom body, excluding
+ * atom length and fourcc fields */
+ if (size < 12)
+ break;
+
+ /* Content is:
+ * 4 bytes: atom length
+ * 4 bytes: fourcc
+ * 1 byte: version
+ * 3 bytes: flags
+ * 1 byte: profile
+ * 1 byte: level
+ * 4 bits: bitDepth
+ * 3 bits: chromaSubsampling
+ * 1 bit: videoFullRangeFlag
+ * 1 byte: colourPrimaries
+ * 1 byte: transferCharacteristics
+ * 1 byte: matrixCoefficients
+ * 2 bytes: codecIntializationDataSize (should be zero for vp8 and vp9)
+ * rest: codecIntializationData (not used for vp8 and vp9)
+ */
+
+ if (vpcc_data[8] != 1) {
+ GST_WARNING_OBJECT (qtdemux,
+ "unknown vpcC version %d", vpcc_data[8]);
+ break;
+ }
+
+ profile = vpcc_data[12];
+ switch (profile) {
+ case 0:
+ profile_str = "0";
+ break;
+ case 1:
+ profile_str = "1";
+ break;
+ case 2:
+ profile_str = "2";
+ break;
+ case 3:
+ profile_str = "3";
+ break;
+ default:
+ break;
+ }
+
+ if (profile_str) {
+ gst_caps_set_simple (entry->caps,
+ "profile", G_TYPE_STRING, profile_str, NULL);
+ }
+
+ /* skip level, the VP9 spec v0.6 defines only one level atm,
+ * but webm spec define various ones. Add level to caps
+ * if we really need it then */
+
+ bitdepth = (vpcc_data[14] & 0xf0) >> 4;
+ if (bitdepth == 8 || bitdepth == 10 || bitdepth == 12) {
+ gst_caps_set_simple (entry->caps,
+ "bit-depth-luma", G_TYPE_UINT, bitdepth,
+ "bit-depth-chroma", G_TYPE_UINT, bitdepth, NULL);
+ }
+
+ chroma_format = (vpcc_data[14] & 0xe) >> 1;
+ switch (chroma_format) {
+ case 0:
+ case 1:
+ chroma_format_str = "4:2:0";
+ break;
+ case 2:
+ chroma_format_str = "4:2:2";
+ break;
+ case 3:
+ chroma_format_str = "4:4:4";
+ break;
+ default:
+ break;
+ }
+
+ if (chroma_format_str) {
+ gst_caps_set_simple (entry->caps,
+ "chroma-format", G_TYPE_STRING, chroma_format_str,
+ NULL);
+ }
+
+ if ((vpcc_data[14] & 0x1) != 0)
+ cinfo.range = GST_VIDEO_COLOR_RANGE_0_255;
+ else
+ cinfo.range = GST_VIDEO_COLOR_RANGE_16_235;
+ cinfo.primaries =
+ gst_video_color_primaries_from_iso (vpcc_data[15]);
+ cinfo.transfer =
+ gst_video_transfer_function_from_iso (vpcc_data[16]);
+ cinfo.matrix =
+ gst_video_color_matrix_from_iso (vpcc_data[17]);
+
+ if (cinfo.primaries != GST_VIDEO_COLOR_PRIMARIES_UNKNOWN &&
+ cinfo.transfer != GST_VIDEO_TRANSFER_UNKNOWN &&
+ cinfo.matrix != GST_VIDEO_COLOR_MATRIX_UNKNOWN) {
+ /* set this only if all values are known, otherwise this
+ * might overwrite valid ones parsed from other color box */
+ CUR_STREAM (stream)->colorimetry = cinfo;
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ len -= size + 8;
+ vpcc_data += size + 8;
+ }
+
+ break;
+ }
+ default:
+ break;
+ }
+ }
+
+ GST_INFO_OBJECT (qtdemux,
+ "type %" GST_FOURCC_FORMAT " caps %" GST_PTR_FORMAT,
+ GST_FOURCC_ARGS (fourcc), entry->caps);
+
+ } else if (stream->subtype == FOURCC_soun) {
+ GNode *wave;
+ int version, samplesize;
+ guint16 compression_id;
+ gboolean amrwb = FALSE;
+
+ offset = 16;
+ /* sample description entry (16) + sound sample description v0 (20) */
+ if (len < 36)
+ goto corrupt_file;
+
+ version = QT_UINT32 (stsd_entry_data + offset);
+ entry->n_channels = QT_UINT16 (stsd_entry_data + offset + 8);
+ samplesize = QT_UINT16 (stsd_entry_data + offset + 10);
+ compression_id = QT_UINT16 (stsd_entry_data + offset + 12);
+ entry->rate = QT_FP32 (stsd_entry_data + offset + 16);
+
+ GST_LOG_OBJECT (qtdemux, "version/rev: %08x", version);
+ GST_LOG_OBJECT (qtdemux, "vendor: %08x",
+ QT_UINT32 (stsd_entry_data + offset + 4));
+ GST_LOG_OBJECT (qtdemux, "n_channels: %d", entry->n_channels);
+ GST_LOG_OBJECT (qtdemux, "sample_size: %d", samplesize);
+ GST_LOG_OBJECT (qtdemux, "compression_id: %d", compression_id);
+ GST_LOG_OBJECT (qtdemux, "packet size: %d",
+ QT_UINT16 (stsd_entry_data + offset + 14));
+ GST_LOG_OBJECT (qtdemux, "sample rate: %g", entry->rate);
+
+ if (compression_id == 0xfffe)
+ entry->sampled = TRUE;
+
+ /* first assume uncompressed audio */
+ entry->bytes_per_sample = samplesize / 8;
+ entry->samples_per_frame = entry->n_channels;
+ entry->bytes_per_frame = entry->n_channels * entry->bytes_per_sample;
+ entry->samples_per_packet = entry->samples_per_frame;
+ entry->bytes_per_packet = entry->bytes_per_sample;
+
+ offset = 36;
+
+ if (version == 0x00010000) {
+ /* sample description entry (16) + sound sample description v1 (20+16) */
+ if (len < 52)
+ goto corrupt_file;
+
+ /* take information from here over the normal sample description */
+ entry->samples_per_packet = QT_UINT32 (stsd_entry_data + offset);
+ entry->bytes_per_packet = QT_UINT32 (stsd_entry_data + offset + 4);
+ entry->bytes_per_frame = QT_UINT32 (stsd_entry_data + offset + 8);
+ entry->bytes_per_sample = QT_UINT32 (stsd_entry_data + offset + 12);
+
+ GST_LOG_OBJECT (qtdemux, "Sound sample description Version 1");
+ GST_LOG_OBJECT (qtdemux, "samples/packet: %d",
+ entry->samples_per_packet);
+ GST_LOG_OBJECT (qtdemux, "bytes/packet: %d",
+ entry->bytes_per_packet);
+ GST_LOG_OBJECT (qtdemux, "bytes/frame: %d",
+ entry->bytes_per_frame);
+ GST_LOG_OBJECT (qtdemux, "bytes/sample: %d",
+ entry->bytes_per_sample);
+
+ if (!entry->sampled && entry->bytes_per_packet) {
+ entry->samples_per_frame = (entry->bytes_per_frame /
+ entry->bytes_per_packet) * entry->samples_per_packet;
+ GST_LOG_OBJECT (qtdemux, "samples/frame: %d",
+ entry->samples_per_frame);
+ }
+ } else if (version == 0x00020000) {
+ /* sample description entry (16) + sound sample description v2 (56) */
+ if (len < 72)
+ goto corrupt_file;
+
+ /* take information from here over the normal sample description */
+ entry->rate = GST_READ_DOUBLE_BE (stsd_entry_data + offset + 4);
+ entry->n_channels = QT_UINT32 (stsd_entry_data + offset + 12);
+ entry->samples_per_frame = entry->n_channels;
+ entry->bytes_per_sample = QT_UINT32 (stsd_entry_data + offset + 20) / 8;
+ entry->bytes_per_packet = QT_UINT32 (stsd_entry_data + offset + 28);
+ entry->samples_per_packet = QT_UINT32 (stsd_entry_data + offset + 32);
+ entry->bytes_per_frame = entry->bytes_per_sample * entry->n_channels;
+
+ GST_LOG_OBJECT (qtdemux, "Sound sample description Version 2");
+ GST_LOG_OBJECT (qtdemux, "sample rate: %g", entry->rate);
+ GST_LOG_OBJECT (qtdemux, "n_channels: %d", entry->n_channels);
+ GST_LOG_OBJECT (qtdemux, "bits/channel: %d",
+ entry->bytes_per_sample * 8);
+ GST_LOG_OBJECT (qtdemux, "format flags: %X",
+ QT_UINT32 (stsd_entry_data + offset + 24));
+ GST_LOG_OBJECT (qtdemux, "bytes/packet: %d",
+ entry->bytes_per_packet);
+ GST_LOG_OBJECT (qtdemux, "LPCM frames/packet: %d",
+ entry->samples_per_packet);
+ } else if (version != 0x00000) {
+ GST_WARNING_OBJECT (qtdemux, "unknown audio STSD version %08x",
+ version);
+ }
+
+ switch (fourcc) {
+ /* Yes, these have to be hard-coded */
+ case FOURCC_MAC6:
+ {
+ entry->samples_per_packet = 6;
+ entry->bytes_per_packet = 1;
+ entry->bytes_per_frame = 1 * entry->n_channels;
+ entry->bytes_per_sample = 1;
+ entry->samples_per_frame = 6 * entry->n_channels;
+ break;
+ }
+ case FOURCC_MAC3:
+ {
+ entry->samples_per_packet = 3;
+ entry->bytes_per_packet = 1;
+ entry->bytes_per_frame = 1 * entry->n_channels;
+ entry->bytes_per_sample = 1;
+ entry->samples_per_frame = 3 * entry->n_channels;
+ break;
+ }
+ case FOURCC_ima4:
+ {
+ entry->samples_per_packet = 64;
+ entry->bytes_per_packet = 34;
+ entry->bytes_per_frame = 34 * entry->n_channels;
+ entry->bytes_per_sample = 2;
+ entry->samples_per_frame = 64 * entry->n_channels;
+ break;
+ }
+ case FOURCC_ulaw:
+ case FOURCC_alaw:
+ {
+ entry->samples_per_packet = 1;
+ entry->bytes_per_packet = 1;
+ entry->bytes_per_frame = 1 * entry->n_channels;
+ entry->bytes_per_sample = 1;
+ entry->samples_per_frame = 1 * entry->n_channels;
+ break;
+ }
+ case FOURCC_agsm:
+ {
+ entry->samples_per_packet = 160;
+ entry->bytes_per_packet = 33;
+ entry->bytes_per_frame = 33 * entry->n_channels;
+ entry->bytes_per_sample = 2;
+ entry->samples_per_frame = 160 * entry->n_channels;
+ break;
+ }
+ /* fix up any invalid header information from above */
+ case FOURCC_twos:
+ case FOURCC_sowt:
+ case FOURCC_raw_:
+ case FOURCC_lpcm:
+ /* Sometimes these are set to 0 in the sound sample descriptions so
+ * let's try to infer useful values from the other information we
+ * have available */
+ if (entry->bytes_per_sample == 0)
+ entry->bytes_per_sample =
+ entry->bytes_per_frame / entry->n_channels;
+ if (entry->bytes_per_sample == 0)
+ entry->bytes_per_sample = samplesize / 8;
+
+ if (entry->bytes_per_frame == 0)
+ entry->bytes_per_frame =
+ entry->bytes_per_sample * entry->n_channels;
+
+ if (entry->bytes_per_packet == 0)
+ entry->bytes_per_packet = entry->bytes_per_sample;
+
+ if (entry->samples_per_frame == 0)
+ entry->samples_per_frame = entry->n_channels;
+
+ if (entry->samples_per_packet == 0)
+ entry->samples_per_packet = entry->samples_per_frame;
+
+ break;
+ case FOURCC_in24:
+ case FOURCC_in32:
+ case FOURCC_fl32:
+ case FOURCC_fl64:
+ case FOURCC_s16l:{
+ switch (fourcc) {
+ case FOURCC_in24:
+ entry->bytes_per_sample = 3;
+ break;
+ case FOURCC_in32:
+ case FOURCC_fl32:
+ entry->bytes_per_sample = 4;
+ break;
+ case FOURCC_fl64:
+ entry->bytes_per_sample = 8;
+ break;
+ case FOURCC_s16l:
+ entry->bytes_per_sample = 2;
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ entry->samples_per_frame = entry->n_channels;
+ entry->bytes_per_frame = entry->n_channels * entry->bytes_per_sample;
+ entry->samples_per_packet = entry->samples_per_frame;
+ entry->bytes_per_packet = entry->bytes_per_sample;
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (entry->caps)
+ gst_caps_unref (entry->caps);
+
+ entry->caps = qtdemux_audio_caps (qtdemux, stream, entry, fourcc,
+ stsd_entry_data + 32, len - 16, &codec);
+
+ switch (fourcc) {
+ case FOURCC_in24:
+ case FOURCC_in32:
+ case FOURCC_fl32:
+ case FOURCC_fl64:
+ {
+ GNode *enda;
+ GNode *fmt;
+
+ fmt = qtdemux_tree_get_child_by_type (stsd, fourcc);
+
+ enda = qtdemux_tree_get_child_by_type (fmt, FOURCC_enda);
+ if (!enda) {
+ wave = qtdemux_tree_get_child_by_type (fmt, FOURCC_wave);
+ if (wave)
+ enda = qtdemux_tree_get_child_by_type (wave, FOURCC_enda);
+ }
+ if (enda) {
+ int enda_value = QT_UINT16 ((guint8 *) enda->data + 8);
+ const gchar *format_str;
+
+ switch (fourcc) {
+ case FOURCC_in24:
+ format_str = (enda_value) ? "S24LE" : "S24BE";
+ break;
+ case FOURCC_in32:
+ format_str = (enda_value) ? "S32LE" : "S32BE";
+ break;
+ case FOURCC_fl32:
+ format_str = (enda_value) ? "F32LE" : "F32BE";
+ break;
+ case FOURCC_fl64:
+ format_str = (enda_value) ? "F64LE" : "F64BE";
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ gst_caps_set_simple (entry->caps,
+ "format", G_TYPE_STRING, format_str, NULL);
+ }
+ break;
+ }
+ case FOURCC_owma:
+ {
+ const guint8 *owma_data;
+ const gchar *codec_name = NULL;
+ guint owma_len;
+ GstBuffer *buf;
+ gint version = 1;
+ /* from http://msdn.microsoft.com/en-us/library/dd757720(VS.85).aspx */
+ /* FIXME this should also be gst_riff_strf_auds,
+ * but the latter one is actually missing bits-per-sample :( */
+ typedef struct
+ {
+ gint16 wFormatTag;
+ gint16 nChannels;
+ gint32 nSamplesPerSec;
+ gint32 nAvgBytesPerSec;
+ gint16 nBlockAlign;
+ gint16 wBitsPerSample;
+ gint16 cbSize;
+ } WAVEFORMATEX;
+ WAVEFORMATEX *wfex;
+
+ GST_DEBUG_OBJECT (qtdemux, "parse owma");
+ owma_data = stsd_entry_data;
+ owma_len = QT_UINT32 (owma_data);
+ if (owma_len <= 54) {
+ GST_WARNING_OBJECT (qtdemux, "Too small owma header, skipping");
+ break;
+ }
+ wfex = (WAVEFORMATEX *) (owma_data + 36);
+ buf = gst_buffer_new_and_alloc (owma_len - 54);
+ gst_buffer_fill (buf, 0, owma_data + 54, owma_len - 54);
+ if (wfex->wFormatTag == 0x0161) {
+ codec_name = "Windows Media Audio";
+ version = 2;
+ } else if (wfex->wFormatTag == 0x0162) {
+ codec_name = "Windows Media Audio 9 Pro";
+ version = 3;
+ } else if (wfex->wFormatTag == 0x0163) {
+ codec_name = "Windows Media Audio 9 Lossless";
+ /* is that correct? gstffmpegcodecmap.c is missing it, but
+ * fluendo codec seems to support it */
+ version = 4;
+ }
+
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf,
+ "wmaversion", G_TYPE_INT, version,
+ "block_align", G_TYPE_INT,
+ GST_READ_UINT16_LE (&wfex->nBlockAlign), "bitrate", G_TYPE_INT,
+ GST_READ_UINT32_LE (&wfex->nAvgBytesPerSec), "width", G_TYPE_INT,
+ GST_READ_UINT16_LE (&wfex->wBitsPerSample), "depth", G_TYPE_INT,
+ GST_READ_UINT16_LE (&wfex->wBitsPerSample), NULL);
+ gst_buffer_unref (buf);
+
+ if (codec_name) {
+ g_free (codec);
+ codec = g_strdup (codec_name);
+ }
+ break;
+ }
+ case FOURCC_wma_:
+ {
+ gint len = QT_UINT32 (stsd_entry_data) - offset;
+ const guint8 *wfex_data = stsd_entry_data + offset;
+ const gchar *codec_name = NULL;
+ gint version = 1;
+ /* from http://msdn.microsoft.com/en-us/library/dd757720(VS.85).aspx */
+ /* FIXME this should also be gst_riff_strf_auds,
+ * but the latter one is actually missing bits-per-sample :( */
+ typedef struct
+ {
+ gint16 wFormatTag;
+ gint16 nChannels;
+ gint32 nSamplesPerSec;
+ gint32 nAvgBytesPerSec;
+ gint16 nBlockAlign;
+ gint16 wBitsPerSample;
+ gint16 cbSize;
+ } WAVEFORMATEX;
+ WAVEFORMATEX wfex;
+
+ /* FIXME: unify with similar wavformatex parsing code above */
+ GST_DEBUG_OBJECT (qtdemux, "parse wma, looking for wfex");
+
+ /* find wfex */
+ while (len >= 8) {
+ gint size;
+
+ if (QT_UINT32 (wfex_data) <= len)
+ size = QT_UINT32 (wfex_data) - 8;
+ else
+ size = len - 8;
+
+ if (size < 1)
+ /* No real data, so break out */
+ break;
+
+ switch (QT_FOURCC (wfex_data + 4)) {
+ case GST_MAKE_FOURCC ('w', 'f', 'e', 'x'):
+ {
+ GST_DEBUG_OBJECT (qtdemux, "found wfex in stsd");
+
+ if (size < 8 + 18)
+ break;
+
+ wfex.wFormatTag = GST_READ_UINT16_LE (wfex_data + 8 + 0);
+ wfex.nChannels = GST_READ_UINT16_LE (wfex_data + 8 + 2);
+ wfex.nSamplesPerSec = GST_READ_UINT32_LE (wfex_data + 8 + 4);
+ wfex.nAvgBytesPerSec = GST_READ_UINT32_LE (wfex_data + 8 + 8);
+ wfex.nBlockAlign = GST_READ_UINT16_LE (wfex_data + 8 + 12);
+ wfex.wBitsPerSample = GST_READ_UINT16_LE (wfex_data + 8 + 14);
+ wfex.cbSize = GST_READ_UINT16_LE (wfex_data + 8 + 16);
+
+ GST_LOG_OBJECT (qtdemux, "Found wfex box in stsd:");
+ GST_LOG_OBJECT (qtdemux, "FormatTag = 0x%04x, Channels = %u, "
+ "SamplesPerSec = %u, AvgBytesPerSec = %u, BlockAlign = %u, "
+ "BitsPerSample = %u, Size = %u", wfex.wFormatTag,
+ wfex.nChannels, wfex.nSamplesPerSec, wfex.nAvgBytesPerSec,
+ wfex.nBlockAlign, wfex.wBitsPerSample, wfex.cbSize);
+
+ if (wfex.wFormatTag == 0x0161) {
+ codec_name = "Windows Media Audio";
+ version = 2;
+ } else if (wfex.wFormatTag == 0x0162) {
+ codec_name = "Windows Media Audio 9 Pro";
+ version = 3;
+ } else if (wfex.wFormatTag == 0x0163) {
+ codec_name = "Windows Media Audio 9 Lossless";
+ /* is that correct? gstffmpegcodecmap.c is missing it, but
+ * fluendo codec seems to support it */
+ version = 4;
+ }
+
+ gst_caps_set_simple (entry->caps,
+ "wmaversion", G_TYPE_INT, version,
+ "block_align", G_TYPE_INT, wfex.nBlockAlign,
+ "bitrate", G_TYPE_INT, wfex.nAvgBytesPerSec,
+ "width", G_TYPE_INT, wfex.wBitsPerSample,
+ "depth", G_TYPE_INT, wfex.wBitsPerSample, NULL);
+
+ if (size > wfex.cbSize) {
+ GstBuffer *buf;
+
+ buf = gst_buffer_new_and_alloc (size - wfex.cbSize);
+ gst_buffer_fill (buf, 0, wfex_data + 8 + wfex.cbSize,
+ size - wfex.cbSize);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "no codec data");
+ }
+
+ if (codec_name) {
+ g_free (codec);
+ codec = g_strdup (codec_name);
+ }
+ break;
+ }
+ default:
+ break;
+ }
+ len -= size + 8;
+ wfex_data += size + 8;
+ }
+ break;
+ }
+ case FOURCC_opus:
+ {
+ const guint8 *opus_data;
+ guint8 *channel_mapping = NULL;
+ guint32 rate;
+ guint8 channels;
+ guint8 channel_mapping_family;
+ guint8 stream_count;
+ guint8 coupled_count;
+ guint8 i;
+
+ opus_data = stsd_entry_data;
+
+ channels = GST_READ_UINT8 (opus_data + 45);
+ rate = GST_READ_UINT32_LE (opus_data + 48);
+ channel_mapping_family = GST_READ_UINT8 (opus_data + 54);
+ stream_count = GST_READ_UINT8 (opus_data + 55);
+ coupled_count = GST_READ_UINT8 (opus_data + 56);
+
+ if (channels > 0) {
+ channel_mapping = g_malloc (channels * sizeof (guint8));
+ for (i = 0; i < channels; i++)
+ channel_mapping[i] = GST_READ_UINT8 (opus_data + i + 57);
+ }
+
+ entry->caps = gst_codec_utils_opus_create_caps (rate, channels,
+ channel_mapping_family, stream_count, coupled_count,
+ channel_mapping);
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (codec) {
+ GstStructure *s;
+ gint bitrate = 0;
+
+ gst_tag_list_add (stream->stream_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_AUDIO_CODEC, codec, NULL);
+ g_free (codec);
+ codec = NULL;
+
+ /* some bitrate info may have ended up in caps */
+ s = gst_caps_get_structure (entry->caps, 0);
+ gst_structure_get_int (s, "bitrate", &bitrate);
+ if (bitrate > 0)
+ gst_tag_list_add (stream->stream_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_BITRATE, bitrate, NULL);
+ }
+
+ esds = NULL;
+ mp4a = qtdemux_tree_get_child_by_index (stsd, stsd_index);
+ if (QTDEMUX_TREE_NODE_FOURCC (mp4a) != fourcc) {
+ if (stream->protected) {
+ if (QTDEMUX_TREE_NODE_FOURCC (mp4a) == FOURCC_aavd) {
+ esds = qtdemux_tree_get_child_by_type (mp4a, FOURCC_esds);
+ }
+ if (QTDEMUX_TREE_NODE_FOURCC (mp4a) != FOURCC_enca) {
+ mp4a = NULL;
+ }
+ } else {
+ mp4a = NULL;
+ }
+ }
+
+ wave = NULL;
+ if (mp4a) {
+ wave = qtdemux_tree_get_child_by_type (mp4a, FOURCC_wave);
+ if (wave)
+ esds = qtdemux_tree_get_child_by_type (wave, FOURCC_esds);
+ if (!esds)
+ esds = qtdemux_tree_get_child_by_type (mp4a, FOURCC_esds);
+ }
+
+
+ /* If the fourcc's bottom 16 bits gives 'sm', then the top
+ 16 bits is a byte-swapped wave-style codec identifier,
+ and we can find a WAVE header internally to a 'wave' atom here.
+ This can more clearly be thought of as 'ms' as the top 16 bits, and a
+ codec id as the bottom 16 bits - but byte-swapped to store in QT (which
+ is big-endian).
+ */
+ if ((fourcc & 0xffff) == (('s' << 8) | 'm')) {
+ if (len < offset + 20) {
+ GST_WARNING_OBJECT (qtdemux, "No wave atom in MS-style audio");
+ } else {
+ guint32 datalen = QT_UINT32 (stsd_entry_data + offset + 16);
+ const guint8 *data = stsd_entry_data + offset + 16;
+ GNode *wavenode;
+ GNode *waveheadernode;
+
+ wavenode = g_node_new ((guint8 *) data);
+ if (qtdemux_parse_node (qtdemux, wavenode, data, datalen)) {
+ const guint8 *waveheader;
+ guint32 headerlen;
+
+ waveheadernode = qtdemux_tree_get_child_by_type (wavenode, fourcc);
+ if (waveheadernode) {
+ waveheader = (const guint8 *) waveheadernode->data;
+ headerlen = QT_UINT32 (waveheader);
+
+ if (headerlen > 8) {
+ gst_riff_strf_auds *header = NULL;
+ GstBuffer *headerbuf;
+ GstBuffer *extra;
+
+ waveheader += 8;
+ headerlen -= 8;
+
+ headerbuf = gst_buffer_new_and_alloc (headerlen);
+ gst_buffer_fill (headerbuf, 0, waveheader, headerlen);
+
+ if (gst_riff_parse_strf_auds (GST_ELEMENT_CAST (qtdemux),
+ headerbuf, &header, &extra)) {
+ gst_caps_unref (entry->caps);
+ /* FIXME: Need to do something with the channel reorder map */
+ entry->caps =
+ gst_riff_create_audio_caps (header->format, NULL, header,
+ extra, NULL, NULL, NULL);
+
+ if (extra)
+ gst_buffer_unref (extra);
+ g_free (header);
+ }
+ }
+ } else
+ GST_DEBUG ("Didn't find waveheadernode for this codec");
+ }
+ g_node_destroy (wavenode);
+ }
+ } else if (esds) {
+ gst_qtdemux_handle_esds (qtdemux, stream, entry, esds,
+ stream->stream_tags);
+ } else {
+ switch (fourcc) {
+#if 0
+ /* FIXME: what is in the chunk? */
+ case FOURCC_QDMC:
+ {
+ gint len = QT_UINT32 (stsd_data);
+
+ /* seems to be always = 116 = 0x74 */
+ break;
+ }
+#endif
+ case FOURCC_QDM2:
+ {
+ gint len = QT_UINT32 (stsd_entry_data);
+
+ if (len > 0x3C) {
+ GstBuffer *buf = gst_buffer_new_and_alloc (len - 0x3C);
+
+ gst_buffer_fill (buf, 0, stsd_entry_data + 0x3C, len - 0x3C);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ }
+ gst_caps_set_simple (entry->caps,
+ "samplesize", G_TYPE_INT, samplesize, NULL);
+ break;
+ }
+ case FOURCC_alac:
+ {
+ GNode *alac, *wave = NULL;
+
+ /* apparently, m4a has this atom appended directly in the stsd entry,
+ * while mov has it in a wave atom */
+ alac = qtdemux_tree_get_child_by_type (stsd, FOURCC_alac);
+ if (alac) {
+ /* alac now refers to stsd entry atom */
+ wave = qtdemux_tree_get_child_by_type (alac, FOURCC_wave);
+ if (wave)
+ alac = qtdemux_tree_get_child_by_type (wave, FOURCC_alac);
+ else
+ alac = qtdemux_tree_get_child_by_type (alac, FOURCC_alac);
+ }
+ if (alac) {
+ const guint8 *alac_data = alac->data;
+ gint len = QT_UINT32 (alac->data);
+ GstBuffer *buf;
+
+ if (len < 36) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "discarding alac atom with unexpected len %d", len);
+ } else {
+ /* codec-data contains alac atom size and prefix,
+ * ffmpeg likes it that way, not quite gst-ish though ...*/
+ buf = gst_buffer_new_and_alloc (len);
+ gst_buffer_fill (buf, 0, alac->data, len);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+
+ entry->bytes_per_frame = QT_UINT32 (alac_data + 12);
+ entry->n_channels = QT_UINT8 (alac_data + 21);
+ entry->rate = QT_UINT32 (alac_data + 32);
+ samplesize = QT_UINT8 (alac_data + 16 + 1);
+ }
+ }
+ gst_caps_set_simple (entry->caps,
+ "samplesize", G_TYPE_INT, samplesize, NULL);
+ break;
+ }
+ case FOURCC_fLaC:
+ {
+ /* The codingname of the sample entry is 'fLaC' */
+ GNode *flac = qtdemux_tree_get_child_by_type (stsd, FOURCC_fLaC);
+
+ if (flac) {
+ /* The 'dfLa' box is added to the sample entry to convey
+ initializing information for the decoder. */
+ const GNode *dfla =
+ qtdemux_tree_get_child_by_type (flac, FOURCC_dfLa);
+
+ if (dfla) {
+ const guint32 len = QT_UINT32 (dfla->data);
+
+ /* Must contain at least dfLa box header (12),
+ * METADATA_BLOCK_HEADER (4), METADATA_BLOCK_STREAMINFO (34) */
+ if (len < 50) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "discarding dfla atom with unexpected len %d", len);
+ } else {
+ /* skip dfLa header to get the METADATA_BLOCKs */
+ const guint8 *metadata_blocks = (guint8 *) dfla->data + 12;
+ const guint32 metadata_blocks_len = len - 12;
+
+ gchar *stream_marker = g_strdup ("fLaC");
+ GstBuffer *block = gst_buffer_new_wrapped (stream_marker,
+ strlen (stream_marker));
+
+ guint32 index = 0;
+ guint32 remainder = 0;
+ guint32 block_size = 0;
+ gboolean is_last = FALSE;
+
+ GValue array = G_VALUE_INIT;
+ GValue value = G_VALUE_INIT;
+
+ g_value_init (&array, GST_TYPE_ARRAY);
+ g_value_init (&value, GST_TYPE_BUFFER);
+
+ gst_value_set_buffer (&value, block);
+ gst_value_array_append_value (&array, &value);
+ g_value_reset (&value);
+
+ gst_buffer_unref (block);
+
+ /* check there's at least one METADATA_BLOCK_HEADER's worth
+ * of data, and we haven't already finished parsing */
+ while (!is_last && ((index + 3) < metadata_blocks_len)) {
+ remainder = metadata_blocks_len - index;
+
+ /* add the METADATA_BLOCK_HEADER size to the signalled size */
+ block_size = 4 +
+ (metadata_blocks[index + 1] << 16) +
+ (metadata_blocks[index + 2] << 8) +
+ metadata_blocks[index + 3];
+
+ /* be careful not to read off end of box */
+ if (block_size > remainder) {
+ break;
+ }
+
+ is_last = metadata_blocks[index] >> 7;
+
+ block = gst_buffer_new_and_alloc (block_size);
+
+ gst_buffer_fill (block, 0, &metadata_blocks[index],
+ block_size);
+
+ gst_value_set_buffer (&value, block);
+ gst_value_array_append_value (&array, &value);
+ g_value_reset (&value);
+
+ gst_buffer_unref (block);
+
+ index += block_size;
+ }
+
+ /* only append the metadata if we successfully read all of it */
+ if (is_last) {
+ gst_structure_set_value (gst_caps_get_structure (CUR_STREAM
+ (stream)->caps, 0), "streamheader", &array);
+ } else {
+ GST_WARNING_OBJECT (qtdemux,
+ "discarding all METADATA_BLOCKs due to invalid "
+ "block_size %d at idx %d, rem %d", block_size, index,
+ remainder);
+ }
+
+ g_value_unset (&value);
+ g_value_unset (&array);
+
+ /* The sample rate obtained from the stsd may not be accurate
+ * since it cannot represent rates greater than 65535Hz, so
+ * override that value with the sample rate from the
+ * METADATA_BLOCK_STREAMINFO block */
+ CUR_STREAM (stream)->rate =
+ (QT_UINT32 (metadata_blocks + 14) >> 12) & 0xFFFFF;
+ }
+ }
+ }
+ break;
+ }
+ case FOURCC_sawb:
+ /* Fallthrough! */
+ amrwb = TRUE;
+ case FOURCC_samr:
+ {
+ gint len = QT_UINT32 (stsd_entry_data);
+
+ if (len > 0x24) {
+ GstBuffer *buf = gst_buffer_new_and_alloc (len - 0x24);
+ guint bitrate;
+
+ gst_buffer_fill (buf, 0, stsd_entry_data + 0x24, len - 0x24);
+
+ /* If we have enough data, let's try to get the 'damr' atom. See
+ * the 3GPP container spec (26.244) for more details. */
+ if ((len - 0x34) > 8 &&
+ (bitrate = qtdemux_parse_amr_bitrate (buf, amrwb))) {
+ gst_tag_list_add (stream->stream_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_MAXIMUM_BITRATE, bitrate, NULL);
+ }
+
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ }
+ break;
+ }
+ case FOURCC_mp4a:
+ {
+ /* mp4a atom withtout ESDS; Attempt to build codec data from atom */
+ gint len = QT_UINT32 (stsd_entry_data);
+ guint16 sound_version = 0;
+ /* FIXME: Can this be determined somehow? There doesn't seem to be
+ * anything in mp4a atom that specifis compression */
+ gint profile = 2;
+ guint16 channels = entry->n_channels;
+ guint32 time_scale = (guint32) entry->rate;
+ gint sample_rate_index = -1;
+
+ if (len >= 34) {
+ sound_version = QT_UINT16 (stsd_entry_data + 16);
+
+ if (sound_version == 1) {
+ channels = QT_UINT16 (stsd_entry_data + 24);
+ time_scale = QT_UINT32 (stsd_entry_data + 30);
+ } else {
+ GST_FIXME_OBJECT (qtdemux, "Unhandled mp4a atom version %d",
+ sound_version);
+ }
+ } else {
+ GST_DEBUG_OBJECT (qtdemux, "Too small stsd entry data len %d",
+ len);
+ }
+
+ sample_rate_index =
+ gst_codec_utils_aac_get_index_from_sample_rate (time_scale);
+ if (sample_rate_index >= 0 && channels > 0) {
+ guint8 codec_data[2];
+ GstBuffer *buf;
+
+ /* build AAC codec data */
+ codec_data[0] = profile << 3;
+ codec_data[0] |= ((sample_rate_index >> 1) & 0x7);
+ codec_data[1] = (sample_rate_index & 0x01) << 7;
+ codec_data[1] |= (channels & 0xF) << 3;
+
+ buf = gst_buffer_new_and_alloc (2);
+ gst_buffer_fill (buf, 0, codec_data, 2);
+ gst_caps_set_simple (entry->caps,
+ "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ }
+ break;
+ }
+ case FOURCC_lpcm:
+ case FOURCC_in24:
+ case FOURCC_in32:
+ case FOURCC_fl32:
+ case FOURCC_fl64:
+ case FOURCC_s16l:
+ /* Fully handled elsewhere */
+ break;
+ default:
+ GST_INFO_OBJECT (qtdemux,
+ "unhandled type %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (fourcc));
+ break;
+ }
+ }
+ GST_INFO_OBJECT (qtdemux,
+ "type %" GST_FOURCC_FORMAT " caps %" GST_PTR_FORMAT,
+ GST_FOURCC_ARGS (fourcc), entry->caps);
+
+ } else if (stream->subtype == FOURCC_strm) {
+ if (fourcc == FOURCC_rtsp) {
+ stream->redirect_uri = qtdemux_get_rtsp_uri_from_hndl (qtdemux, minf);
+ } else {
+ GST_INFO_OBJECT (qtdemux, "unhandled stream type %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (fourcc));
+ goto unknown_stream;
+ }
+ entry->sampled = TRUE;
+ } else if (stream->subtype == FOURCC_subp || stream->subtype == FOURCC_text
+ || stream->subtype == FOURCC_sbtl || stream->subtype == FOURCC_subt
+ || stream->subtype == FOURCC_clcp) {
+
+ entry->sampled = TRUE;
+ entry->sparse = TRUE;
+
+ entry->caps =
+ qtdemux_sub_caps (qtdemux, stream, entry, fourcc, stsd_entry_data,
+ &codec);
+ if (codec) {
+ gst_tag_list_add (stream->stream_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_SUBTITLE_CODEC, codec, NULL);
+ g_free (codec);
+ codec = NULL;
+ }
+
+ /* hunt for sort-of codec data */
+ switch (fourcc) {
+ case FOURCC_mp4s:
+ {
+ GNode *mp4s = NULL;
+ GNode *esds = NULL;
+
+ /* look for palette in a stsd->mp4s->esds sub-atom */
+ mp4s = qtdemux_tree_get_child_by_type (stsd, FOURCC_mp4s);
+ if (mp4s)
+ esds = qtdemux_tree_get_child_by_type (mp4s, FOURCC_esds);
+ if (esds == NULL) {
+ /* Invalid STSD */
+ GST_LOG_OBJECT (qtdemux, "Skipping invalid stsd: no esds child");
+ break;
+ }
+
+ gst_qtdemux_handle_esds (qtdemux, stream, entry, esds,
+ stream->stream_tags);
+ break;
+ }
+ default:
+ GST_INFO_OBJECT (qtdemux,
+ "unhandled type %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (fourcc));
+ break;
+ }
+ GST_INFO_OBJECT (qtdemux,
+ "type %" GST_FOURCC_FORMAT " caps %" GST_PTR_FORMAT,
+ GST_FOURCC_ARGS (fourcc), entry->caps);
+ } else {
+ /* everything in 1 sample */
+ entry->sampled = TRUE;
+
+ entry->caps =
+ qtdemux_generic_caps (qtdemux, stream, entry, fourcc, stsd_entry_data,
+ &codec);
+
+ if (entry->caps == NULL)
+ goto unknown_stream;
+
+ if (codec) {
+ gst_tag_list_add (stream->stream_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_SUBTITLE_CODEC, codec, NULL);
+ g_free (codec);
+ codec = NULL;
+ }
+ }
+
+ /* promote to sampled format */
+ if (entry->fourcc == FOURCC_samr) {
+ /* force mono 8000 Hz for AMR */
+ entry->sampled = TRUE;
+ entry->n_channels = 1;
+ entry->rate = 8000;
+ } else if (entry->fourcc == FOURCC_sawb) {
+ /* force mono 16000 Hz for AMR-WB */
+ entry->sampled = TRUE;
+ entry->n_channels = 1;
+ entry->rate = 16000;
+ } else if (entry->fourcc == FOURCC_mp4a) {
+ entry->sampled = TRUE;
+ }
+
+
+ stsd_entry_data += len;
+ remaining_stsd_len -= len;
+
+ }
+
+ /* collect sample information */
+ if (!qtdemux_stbl_init (qtdemux, stream, stbl))
+ goto samples_failed;
+
+ if (qtdemux->fragmented) {
+ guint64 offset;
+
+ /* need all moov samples as basis; probably not many if any at all */
+ /* prevent moof parsing taking of at this time */
+ offset = qtdemux->moof_offset;
+ qtdemux->moof_offset = 0;
+ if (stream->n_samples &&
+ !qtdemux_parse_samples (qtdemux, stream, stream->n_samples - 1)) {
+ qtdemux->moof_offset = offset;
+ goto samples_failed;
+ }
+ qtdemux->moof_offset = offset;
+ /* movie duration more reliable in this case (e.g. mehd) */
+ if (qtdemux->segment.duration &&
+ GST_CLOCK_TIME_IS_VALID (qtdemux->segment.duration))
+ stream->duration =
+ GSTTIME_TO_QTSTREAMTIME (stream, qtdemux->segment.duration);
+ }
+
+ /* configure segments */
+ if (!qtdemux_parse_segments (qtdemux, stream, trak))
+ goto segments_failed;
+
+ /* add some language tag, if useful */
+ if (stream->lang_id[0] != '\0' && strcmp (stream->lang_id, "unk") &&
+ strcmp (stream->lang_id, "und")) {
+ const gchar *lang_code;
+
+ /* convert ISO 639-2 code to ISO 639-1 */
+ lang_code = gst_tag_get_language_code (stream->lang_id);
+ gst_tag_list_add (stream->stream_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_LANGUAGE_CODE, (lang_code) ? lang_code : stream->lang_id, NULL);
+ }
+
+ /* Check for UDTA tags */
+ if ((udta = qtdemux_tree_get_child_by_type (trak, FOURCC_udta))) {
+ qtdemux_parse_udta (qtdemux, stream->stream_tags, udta);
+ }
+
+ /* Insert and sort new stream in track-id order.
+ * This will help in comparing old/new streams during stream update check */
+ g_ptr_array_add (qtdemux->active_streams, stream);
+ g_ptr_array_sort (qtdemux->active_streams,
+ (GCompareFunc) qtdemux_track_id_compare_func);
+ GST_DEBUG_OBJECT (qtdemux, "n_streams is now %d",
+ QTDEMUX_N_STREAMS (qtdemux));
+
+ return TRUE;
+
+/* ERRORS */
+corrupt_file:
+ {
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DEMUX,
+ (_("This file is corrupt and cannot be played.")), (NULL));
+ if (stream)
+ gst_qtdemux_stream_unref (stream);
+ return FALSE;
+ }
+error_encrypted:
+ {
+ GST_ELEMENT_ERROR (qtdemux, STREAM, DECRYPT, (NULL), (NULL));
+ gst_qtdemux_stream_unref (stream);
+ return FALSE;
+ }
+samples_failed:
+segments_failed:
+ {
+ /* we posted an error already */
+ /* free stbl sub-atoms */
+ gst_qtdemux_stbl_free (stream);
+ gst_qtdemux_stream_unref (stream);
+ return FALSE;
+ }
+existing_stream:
+ {
+ GST_INFO_OBJECT (qtdemux, "stream with track id %i already exists",
+ track_id);
+ return TRUE;
+ }
+unknown_stream:
+ {
+ GST_INFO_OBJECT (qtdemux, "unknown subtype %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (stream->subtype));
+ gst_qtdemux_stream_unref (stream);
+ return TRUE;
+ }
+}
+
+/* If we can estimate the overall bitrate, and don't have information about the
+ * stream bitrate for exactly one stream, this guesses the stream bitrate as
+ * the overall bitrate minus the sum of the bitrates of all other streams. This
+ * should be useful for the common case where we have one audio and one video
+ * stream and can estimate the bitrate of one, but not the other. */
+static void
+gst_qtdemux_guess_bitrate (GstQTDemux * qtdemux)
+{
+ QtDemuxStream *stream = NULL;
+ gint64 size, sys_bitrate, sum_bitrate = 0;
+ GstClockTime duration;
+ guint bitrate;
+ gint i;
+
+ if (qtdemux->fragmented)
+ return;
+
+ GST_DEBUG_OBJECT (qtdemux, "Looking for streams with unknown bitrate");
+
+ if (!gst_pad_peer_query_duration (qtdemux->sinkpad, GST_FORMAT_BYTES, &size)
+ || size <= 0) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "Size in bytes of the stream not known - bailing");
+ return;
+ }
+
+ /* Subtract the header size */
+ GST_DEBUG_OBJECT (qtdemux, "Total size %" G_GINT64_FORMAT ", header size %u",
+ size, qtdemux->header_size);
+
+ if (size < qtdemux->header_size)
+ return;
+
+ size = size - qtdemux->header_size;
+
+ if (!gst_qtdemux_get_duration (qtdemux, &duration)) {
+ GST_DEBUG_OBJECT (qtdemux, "Stream duration not known - bailing");
+ return;
+ }
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *str = QTDEMUX_NTH_STREAM (qtdemux, i);
+ switch (str->subtype) {
+ case FOURCC_soun:
+ case FOURCC_vide:
+ GST_DEBUG_OBJECT (qtdemux, "checking bitrate for %" GST_PTR_FORMAT,
+ CUR_STREAM (str)->caps);
+ /* retrieve bitrate, prefer avg then max */
+ bitrate = 0;
+ if (str->stream_tags) {
+ if (gst_tag_list_get_uint (str->stream_tags,
+ GST_TAG_MAXIMUM_BITRATE, &bitrate))
+ GST_DEBUG_OBJECT (qtdemux, "max-bitrate: %u", bitrate);
+ if (gst_tag_list_get_uint (str->stream_tags,
+ GST_TAG_NOMINAL_BITRATE, &bitrate))
+ GST_DEBUG_OBJECT (qtdemux, "nominal-bitrate: %u", bitrate);
+ if (gst_tag_list_get_uint (str->stream_tags,
+ GST_TAG_BITRATE, &bitrate))
+ GST_DEBUG_OBJECT (qtdemux, "bitrate: %u", bitrate);
+ }
+ if (bitrate)
+ sum_bitrate += bitrate;
+ else {
+ if (stream) {
+ GST_DEBUG_OBJECT (qtdemux,
+ ">1 stream with unknown bitrate - bailing");
+ return;
+ } else
+ stream = str;
+ }
+
+ default:
+ /* For other subtypes, we assume no significant impact on bitrate */
+ break;
+ }
+ }
+
+ if (!stream) {
+ GST_DEBUG_OBJECT (qtdemux, "All stream bitrates are known");
+ return;
+ }
+
+ sys_bitrate = gst_util_uint64_scale (size, GST_SECOND * 8, duration);
+
+ if (sys_bitrate < sum_bitrate) {
+ /* This can happen, since sum_bitrate might be derived from maximum
+ * bitrates and not average bitrates */
+ GST_DEBUG_OBJECT (qtdemux,
+ "System bitrate less than sum bitrate - bailing");
+ return;
+ }
+
+ bitrate = sys_bitrate - sum_bitrate;
+ GST_DEBUG_OBJECT (qtdemux, "System bitrate = %" G_GINT64_FORMAT
+ ", Stream bitrate = %u", sys_bitrate, bitrate);
+
+ if (!stream->stream_tags)
+ stream->stream_tags = gst_tag_list_new_empty ();
+ else
+ stream->stream_tags = gst_tag_list_make_writable (stream->stream_tags);
+
+ gst_tag_list_add (stream->stream_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_BITRATE, bitrate, NULL);
+}
+
+static GstFlowReturn
+qtdemux_prepare_streams (GstQTDemux * qtdemux)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ gint i;
+
+ GST_DEBUG_OBJECT (qtdemux, "prepare %u streams", QTDEMUX_N_STREAMS (qtdemux));
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ guint32 sample_num = 0;
+
+ GST_DEBUG_OBJECT (qtdemux, "track-id %u, fourcc %" GST_FOURCC_FORMAT,
+ stream->track_id, GST_FOURCC_ARGS (CUR_STREAM (stream)->fourcc));
+
+ if (qtdemux->fragmented && qtdemux->pullbased) {
+ /* need all moov samples first */
+ GST_OBJECT_LOCK (qtdemux);
+ while (stream->n_samples == 0)
+ if ((ret = qtdemux_add_fragmented_samples (qtdemux)) != GST_FLOW_OK)
+ break;
+ GST_OBJECT_UNLOCK (qtdemux);
+ } else {
+ /* discard any stray moof */
+ qtdemux->moof_offset = 0;
+ }
+
+ /* prepare braking */
+ if (ret != GST_FLOW_ERROR)
+ ret = GST_FLOW_OK;
+
+ /* in pull mode, we should have parsed some sample info by now;
+ * and quite some code will not handle no samples.
+ * in push mode, we'll just have to deal with it */
+ if (G_UNLIKELY (qtdemux->pullbased && !stream->n_samples)) {
+ GST_DEBUG_OBJECT (qtdemux, "no samples for stream; discarding");
+ g_ptr_array_remove_index (qtdemux->active_streams, i);
+ i--;
+ continue;
+ } else if (stream->track_id == qtdemux->chapters_track_id &&
+ (stream->subtype == FOURCC_text || stream->subtype == FOURCC_sbtl)) {
+ /* TODO - parse chapters track and expose it as GstToc; For now just ignore it
+ so that it doesn't look like a subtitle track */
+ g_ptr_array_remove_index (qtdemux->active_streams, i);
+ i--;
+ continue;
+ }
+
+ /* parse the initial sample for use in setting the frame rate cap */
+ while (sample_num == 0 && sample_num < stream->n_samples) {
+ if (!qtdemux_parse_samples (qtdemux, stream, sample_num))
+ break;
+ ++sample_num;
+ }
+ }
+
+ return ret;
+}
+
+static gboolean
+_stream_equal_func (const QtDemuxStream * stream, const gchar * stream_id)
+{
+ return g_strcmp0 (stream->stream_id, stream_id) == 0;
+}
+
+static gboolean
+qtdemux_is_streams_update (GstQTDemux * qtdemux)
+{
+ gint i;
+
+ /* Different length, updated */
+ if (QTDEMUX_N_STREAMS (qtdemux) != qtdemux->old_streams->len)
+ return TRUE;
+
+ /* streams in list are sorted in track-id order */
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ /* Different stream-id, updated */
+ if (g_strcmp0 (QTDEMUX_NTH_STREAM (qtdemux, i)->stream_id,
+ QTDEMUX_NTH_OLD_STREAM (qtdemux, i)->stream_id))
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+static gboolean
+qtdemux_reuse_and_configure_stream (GstQTDemux * qtdemux,
+ QtDemuxStream * oldstream, QtDemuxStream * newstream)
+{
+ /* Connect old stream's srcpad to new stream */
+ newstream->pad = oldstream->pad;
+ oldstream->pad = NULL;
+
+ /* unset new_stream to prevent stream-start event, unless we are EOS in which
+ * case we need to force one through */
+ newstream->new_stream = GST_PAD_IS_EOS (newstream->pad);
+
+ return gst_qtdemux_configure_stream (qtdemux, newstream);
+}
+
+static gboolean
+qtdemux_update_streams (GstQTDemux * qtdemux)
+{
+ gint i;
+ g_assert (qtdemux->streams_aware);
+
+ /* At below, figure out which stream in active_streams has identical stream-id
+ * with that of in old_streams. If there is matching stream-id,
+ * corresponding newstream will not be exposed again,
+ * but demux will reuse srcpad of matched old stream
+ *
+ * active_streams : newly created streams from the latest moov
+ * old_streams : existing streams (belong to previous moov)
+ */
+
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ QtDemuxStream *oldstream = NULL;
+ guint target;
+
+ GST_DEBUG_OBJECT (qtdemux, "track-id %u, fourcc %" GST_FOURCC_FORMAT,
+ stream->track_id, GST_FOURCC_ARGS (CUR_STREAM (stream)->fourcc));
+
+ if (g_ptr_array_find_with_equal_func (qtdemux->old_streams,
+ stream->stream_id, (GEqualFunc) _stream_equal_func, &target)) {
+ oldstream = QTDEMUX_NTH_OLD_STREAM (qtdemux, target);
+
+ /* null pad stream cannot be reused */
+ if (oldstream->pad == NULL)
+ oldstream = NULL;
+ }
+
+ if (oldstream) {
+ GST_DEBUG_OBJECT (qtdemux, "Reuse track-id %d", oldstream->track_id);
+
+ if (!qtdemux_reuse_and_configure_stream (qtdemux, oldstream, stream))
+ return FALSE;
+
+ /* we don't need to preserve order of old streams */
+ g_ptr_array_remove_fast (qtdemux->old_streams, oldstream);
+ } else {
+ GstTagList *list;
+
+ /* now we have all info and can expose */
+ list = stream->stream_tags;
+ stream->stream_tags = NULL;
+ if (!gst_qtdemux_add_stream (qtdemux, stream, list))
+ return FALSE;
+ }
+ }
+
+ return TRUE;
+}
+
+/* Must be called with expose lock */
+static GstFlowReturn
+qtdemux_expose_streams (GstQTDemux * qtdemux)
+{
+ gint i;
+
+ GST_DEBUG_OBJECT (qtdemux, "exposing streams");
+
+ if (!qtdemux_is_streams_update (qtdemux)) {
+ GST_DEBUG_OBJECT (qtdemux, "Reuse all streams");
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *new_stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ QtDemuxStream *old_stream = QTDEMUX_NTH_OLD_STREAM (qtdemux, i);
+ if (!qtdemux_reuse_and_configure_stream (qtdemux, old_stream, new_stream))
+ return GST_FLOW_ERROR;
+ }
+
+ g_ptr_array_set_size (qtdemux->old_streams, 0);
+ qtdemux->need_segment = TRUE;
+
+ return GST_FLOW_OK;
+ }
+
+ if (qtdemux->streams_aware) {
+ if (!qtdemux_update_streams (qtdemux))
+ return GST_FLOW_ERROR;
+ } else {
+ for (i = 0; i < QTDEMUX_N_STREAMS (qtdemux); i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_STREAM (qtdemux, i);
+ GstTagList *list;
+
+ /* now we have all info and can expose */
+ list = stream->stream_tags;
+ stream->stream_tags = NULL;
+ if (!gst_qtdemux_add_stream (qtdemux, stream, list))
+ return GST_FLOW_ERROR;
+
+ }
+ }
+
+ gst_qtdemux_guess_bitrate (qtdemux);
+
+ gst_element_no_more_pads (GST_ELEMENT_CAST (qtdemux));
+
+ /* If we have still old_streams, it's no more used stream */
+ for (i = 0; i < qtdemux->old_streams->len; i++) {
+ QtDemuxStream *stream = QTDEMUX_NTH_OLD_STREAM (qtdemux, i);
+
+ if (stream->pad) {
+ GstEvent *event;
+
+ event = gst_event_new_eos ();
+ if (qtdemux->segment_seqnum)
+ gst_event_set_seqnum (event, qtdemux->segment_seqnum);
+
+ gst_pad_push_event (stream->pad, event);
+ }
+ }
+
+ g_ptr_array_set_size (qtdemux->old_streams, 0);
+
+ /* check if we should post a redirect in case there is a single trak
+ * and it is a redirecting trak */
+ if (QTDEMUX_N_STREAMS (qtdemux) == 1 &&
+ QTDEMUX_NTH_STREAM (qtdemux, 0)->redirect_uri != NULL) {
+ GstMessage *m;
+
+ GST_INFO_OBJECT (qtdemux, "Issuing a redirect due to a single track with "
+ "an external content");
+ m = gst_message_new_element (GST_OBJECT_CAST (qtdemux),
+ gst_structure_new ("redirect",
+ "new-location", G_TYPE_STRING,
+ QTDEMUX_NTH_STREAM (qtdemux, 0)->redirect_uri, NULL));
+ gst_element_post_message (GST_ELEMENT_CAST (qtdemux), m);
+ g_free (qtdemux->redirect_location);
+ qtdemux->redirect_location =
+ g_strdup (QTDEMUX_NTH_STREAM (qtdemux, 0)->redirect_uri);
+ }
+
+ g_ptr_array_foreach (qtdemux->active_streams,
+ (GFunc) qtdemux_do_allocation, qtdemux);
+
+ qtdemux->need_segment = TRUE;
+
+ qtdemux->exposed = TRUE;
+ return GST_FLOW_OK;
+}
+
+typedef struct
+{
+ GstStructure *structure; /* helper for sort function */
+ gchar *location;
+ guint min_req_bitrate;
+ guint min_req_qt_version;
+} GstQtReference;
+
+static gint
+qtdemux_redirects_sort_func (gconstpointer a, gconstpointer b)
+{
+ GstQtReference *ref_a = (GstQtReference *) a;
+ GstQtReference *ref_b = (GstQtReference *) b;
+
+ if (ref_b->min_req_qt_version != ref_a->min_req_qt_version)
+ return ref_b->min_req_qt_version - ref_a->min_req_qt_version;
+
+ /* known bitrates go before unknown; higher bitrates go first */
+ return ref_b->min_req_bitrate - ref_a->min_req_bitrate;
+}
+
+/* sort the redirects and post a message for the application.
+ */
+static void
+qtdemux_process_redirects (GstQTDemux * qtdemux, GList * references)
+{
+ GstQtReference *best;
+ GstStructure *s;
+ GstMessage *msg;
+ GValue list_val = { 0, };
+ GList *l;
+
+ g_assert (references != NULL);
+
+ references = g_list_sort (references, qtdemux_redirects_sort_func);
+
+ best = (GstQtReference *) references->data;
+
+ g_value_init (&list_val, GST_TYPE_LIST);
+
+ for (l = references; l != NULL; l = l->next) {
+ GstQtReference *ref = (GstQtReference *) l->data;
+ GValue struct_val = { 0, };
+
+ ref->structure = gst_structure_new ("redirect",
+ "new-location", G_TYPE_STRING, ref->location, NULL);
+
+ if (ref->min_req_bitrate > 0) {
+ gst_structure_set (ref->structure, "minimum-bitrate", G_TYPE_INT,
+ ref->min_req_bitrate, NULL);
+ }
+
+ g_value_init (&struct_val, GST_TYPE_STRUCTURE);
+ g_value_set_boxed (&struct_val, ref->structure);
+ gst_value_list_append_value (&list_val, &struct_val);
+ g_value_unset (&struct_val);
+ /* don't free anything here yet, since we need best->structure below */
+ }
+
+ g_assert (best != NULL);
+ s = gst_structure_copy (best->structure);
+
+ if (g_list_length (references) > 1) {
+ gst_structure_set_value (s, "locations", &list_val);
+ }
+
+ g_value_unset (&list_val);
+
+ for (l = references; l != NULL; l = l->next) {
+ GstQtReference *ref = (GstQtReference *) l->data;
+
+ gst_structure_free (ref->structure);
+ g_free (ref->location);
+ g_free (ref);
+ }
+ g_list_free (references);
+
+ GST_INFO_OBJECT (qtdemux, "posting redirect message: %" GST_PTR_FORMAT, s);
+ g_free (qtdemux->redirect_location);
+ qtdemux->redirect_location =
+ g_strdup (gst_structure_get_string (s, "new-location"));
+ msg = gst_message_new_element (GST_OBJECT_CAST (qtdemux), s);
+ gst_element_post_message (GST_ELEMENT_CAST (qtdemux), msg);
+}
+
+/* look for redirect nodes, collect all redirect information and
+ * process it.
+ */
+static gboolean
+qtdemux_parse_redirects (GstQTDemux * qtdemux)
+{
+ GNode *rmra, *rmda, *rdrf;
+
+ rmra = qtdemux_tree_get_child_by_type (qtdemux->moov_node, FOURCC_rmra);
+ if (rmra) {
+ GList *redirects = NULL;
+
+ rmda = qtdemux_tree_get_child_by_type (rmra, FOURCC_rmda);
+ while (rmda) {
+ GstQtReference ref = { NULL, NULL, 0, 0 };
+ GNode *rmdr, *rmvc;
+
+ if ((rmdr = qtdemux_tree_get_child_by_type (rmda, FOURCC_rmdr))) {
+ ref.min_req_bitrate = QT_UINT32 ((guint8 *) rmdr->data + 12);
+ GST_LOG_OBJECT (qtdemux, "data rate atom, required bitrate = %u",
+ ref.min_req_bitrate);
+ }
+
+ if ((rmvc = qtdemux_tree_get_child_by_type (rmda, FOURCC_rmvc))) {
+ guint32 package = QT_FOURCC ((guint8 *) rmvc->data + 12);
+ guint version = QT_UINT32 ((guint8 *) rmvc->data + 16);
+
+#ifndef GST_DISABLE_GST_DEBUG
+ guint bitmask = QT_UINT32 ((guint8 *) rmvc->data + 20);
+#endif
+ guint check_type = QT_UINT16 ((guint8 *) rmvc->data + 24);
+
+ GST_LOG_OBJECT (qtdemux,
+ "version check atom [%" GST_FOURCC_FORMAT "], version=0x%08x"
+ ", mask=%08x, check_type=%u", GST_FOURCC_ARGS (package), version,
+ bitmask, check_type);
+ if (package == FOURCC_qtim && check_type == 0) {
+ ref.min_req_qt_version = version;
+ }
+ }
+
+ rdrf = qtdemux_tree_get_child_by_type (rmda, FOURCC_rdrf);
+ if (rdrf) {
+ guint32 ref_type;
+ guint8 *ref_data;
+ guint ref_len;
+
+ ref_len = QT_UINT32 ((guint8 *) rdrf->data);
+ if (ref_len > 20) {
+ ref_type = QT_FOURCC ((guint8 *) rdrf->data + 12);
+ ref_data = (guint8 *) rdrf->data + 20;
+ if (ref_type == FOURCC_alis) {
+ guint record_len, record_version, fn_len;
+
+ if (ref_len > 70) {
+ /* MacOSX alias record, google for alias-layout.txt */
+ record_len = QT_UINT16 (ref_data + 4);
+ record_version = QT_UINT16 (ref_data + 4 + 2);
+ fn_len = QT_UINT8 (ref_data + 50);
+ if (record_len > 50 && record_version == 2 && fn_len > 0) {
+ ref.location = g_strndup ((gchar *) ref_data + 51, fn_len);
+ }
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "Invalid rdrf/alis size (%u < 70)",
+ ref_len);
+ }
+ } else if (ref_type == FOURCC_url_) {
+ ref.location = g_strndup ((gchar *) ref_data, ref_len - 8);
+ } else {
+ GST_DEBUG_OBJECT (qtdemux,
+ "unknown rdrf reference type %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (ref_type));
+ }
+ if (ref.location != NULL) {
+ GST_INFO_OBJECT (qtdemux, "New location: %s", ref.location);
+ redirects =
+ g_list_prepend (redirects, g_memdup2 (&ref, sizeof (ref)));
+ } else {
+ GST_WARNING_OBJECT (qtdemux,
+ "Failed to extract redirect location from rdrf atom");
+ }
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "Invalid rdrf size (%u < 20)", ref_len);
+ }
+ }
+
+ /* look for others */
+ rmda = qtdemux_tree_get_sibling_by_type (rmda, FOURCC_rmda);
+ }
+
+ if (redirects != NULL) {
+ qtdemux_process_redirects (qtdemux, redirects);
+ }
+ }
+ return TRUE;
+}
+
+static GstTagList *
+qtdemux_add_container_format (GstQTDemux * qtdemux, GstTagList * tags)
+{
+ const gchar *fmt;
+
+ if (tags == NULL) {
+ tags = gst_tag_list_new_empty ();
+ gst_tag_list_set_scope (tags, GST_TAG_SCOPE_GLOBAL);
+ }
+
+ if (qtdemux->major_brand == FOURCC_mjp2)
+ fmt = "Motion JPEG 2000";
+ else if ((qtdemux->major_brand & 0xffff) == FOURCC_3g__)
+ fmt = "3GP";
+ else if (qtdemux->major_brand == FOURCC_qt__)
+ fmt = "Quicktime";
+ else if (qtdemux->fragmented)
+ fmt = "ISO fMP4";
+ else
+ fmt = "ISO MP4/M4A";
+
+ GST_LOG_OBJECT (qtdemux, "mapped %" GST_FOURCC_FORMAT " to '%s'",
+ GST_FOURCC_ARGS (qtdemux->major_brand), fmt);
+
+ gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE, GST_TAG_CONTAINER_FORMAT,
+ fmt, NULL);
+
+ return tags;
+}
+
+/* we have read the complete moov node now.
+ * This function parses all of the relevant info, creates the traks and
+ * prepares all data structures for playback
+ */
+static gboolean
+qtdemux_parse_tree (GstQTDemux * qtdemux)
+{
+ GNode *mvhd;
+ GNode *trak;
+ GNode *udta;
+ GNode *mvex;
+ GNode *pssh;
+ guint64 creation_time;
+ GstDateTime *datetime = NULL;
+ gint version;
+
+ /* make sure we have a usable taglist */
+ qtdemux->tag_list = gst_tag_list_make_writable (qtdemux->tag_list);
+
+ mvhd = qtdemux_tree_get_child_by_type (qtdemux->moov_node, FOURCC_mvhd);
+ if (mvhd == NULL) {
+ GST_LOG_OBJECT (qtdemux, "No mvhd node found, looking for redirects.");
+ return qtdemux_parse_redirects (qtdemux);
+ }
+
+ version = QT_UINT8 ((guint8 *) mvhd->data + 8);
+ if (version == 1) {
+ creation_time = QT_UINT64 ((guint8 *) mvhd->data + 12);
+ qtdemux->timescale = QT_UINT32 ((guint8 *) mvhd->data + 28);
+ qtdemux->duration = QT_UINT64 ((guint8 *) mvhd->data + 32);
+ } else if (version == 0) {
+ creation_time = QT_UINT32 ((guint8 *) mvhd->data + 12);
+ qtdemux->timescale = QT_UINT32 ((guint8 *) mvhd->data + 20);
+ qtdemux->duration = QT_UINT32 ((guint8 *) mvhd->data + 24);
+ } else {
+ GST_WARNING_OBJECT (qtdemux, "Unhandled mvhd version %d", version);
+ return FALSE;
+ }
+
+ /* Moving qt creation time (secs since 1904) to unix time */
+ if (creation_time != 0) {
+ /* Try to use epoch first as it should be faster and more commonly found */
+ if (creation_time >= QTDEMUX_SECONDS_FROM_1904_TO_1970) {
+ gint64 now_s;
+
+ creation_time -= QTDEMUX_SECONDS_FROM_1904_TO_1970;
+ /* some data cleansing sanity */
+ now_s = g_get_real_time () / G_USEC_PER_SEC;
+ if (now_s + 24 * 3600 < creation_time) {
+ GST_DEBUG_OBJECT (qtdemux, "discarding bogus future creation time");
+ } else {
+ datetime = gst_date_time_new_from_unix_epoch_utc (creation_time);
+ }
+ } else {
+ GDateTime *base_dt = g_date_time_new_utc (1904, 1, 1, 0, 0, 0);
+ GDateTime *dt, *dt_local;
+
+ dt = g_date_time_add_seconds (base_dt, creation_time);
+ dt_local = g_date_time_to_local (dt);
+ datetime = gst_date_time_new_from_g_date_time (dt_local);
+
+ g_date_time_unref (base_dt);
+ g_date_time_unref (dt);
+ }
+ }
+ if (datetime) {
+ /* Use KEEP as explicit tags should have a higher priority than mvhd tag */
+ gst_tag_list_add (qtdemux->tag_list, GST_TAG_MERGE_KEEP, GST_TAG_DATE_TIME,
+ datetime, NULL);
+ gst_date_time_unref (datetime);
+ }
+
+ GST_INFO_OBJECT (qtdemux, "timescale: %u", qtdemux->timescale);
+ GST_INFO_OBJECT (qtdemux, "duration: %" G_GUINT64_FORMAT, qtdemux->duration);
+
+ /* check for fragmented file and get some (default) data */
+ mvex = qtdemux_tree_get_child_by_type (qtdemux->moov_node, FOURCC_mvex);
+ if (mvex) {
+ GNode *mehd;
+ GstByteReader mehd_data;
+
+ /* let track parsing or anyone know weird stuff might happen ... */
+ qtdemux->fragmented = TRUE;
+
+ /* compensate for total duration */
+ mehd = qtdemux_tree_get_child_by_type_full (mvex, FOURCC_mehd, &mehd_data);
+ if (mehd)
+ qtdemux_parse_mehd (qtdemux, &mehd_data);
+ }
+
+ /* Update the movie segment duration, unless it was directly given to us
+ * by upstream. Otherwise let it as is, as we don't want to mangle the
+ * duration provided by upstream that may come e.g. from a MPD file. */
+ if (!qtdemux->upstream_format_is_time) {
+ GstClockTime duration;
+ /* set duration in the segment info */
+ gst_qtdemux_get_duration (qtdemux, &duration);
+ qtdemux->segment.duration = duration;
+ /* also do not exceed duration; stop is set that way post seek anyway,
+ * and segment activation falls back to duration,
+ * whereas loop only checks stop, so let's align this here as well */
+ qtdemux->segment.stop = duration;
+ }
+
+ /* parse all traks */
+ trak = qtdemux_tree_get_child_by_type (qtdemux->moov_node, FOURCC_trak);
+ while (trak) {
+ qtdemux_parse_trak (qtdemux, trak);
+ /* iterate all siblings */
+ trak = qtdemux_tree_get_sibling_by_type (trak, FOURCC_trak);
+ }
+
+ qtdemux->tag_list = gst_tag_list_make_writable (qtdemux->tag_list);
+
+ /* find tags */
+ udta = qtdemux_tree_get_child_by_type (qtdemux->moov_node, FOURCC_udta);
+ if (udta) {
+ qtdemux_parse_udta (qtdemux, qtdemux->tag_list, udta);
+ } else {
+ GST_LOG_OBJECT (qtdemux, "No udta node found.");
+ }
+
+ /* maybe also some tags in meta box */
+ udta = qtdemux_tree_get_child_by_type (qtdemux->moov_node, FOURCC_meta);
+ if (udta) {
+ GST_DEBUG_OBJECT (qtdemux, "Parsing meta box for tags.");
+ qtdemux_parse_udta (qtdemux, qtdemux->tag_list, udta);
+ } else {
+ GST_LOG_OBJECT (qtdemux, "No meta node found.");
+ }
+
+ /* parse any protection system info */
+ pssh = qtdemux_tree_get_child_by_type (qtdemux->moov_node, FOURCC_pssh);
+ while (pssh) {
+ GST_LOG_OBJECT (qtdemux, "Parsing pssh box.");
+ qtdemux_parse_pssh (qtdemux, pssh);
+ pssh = qtdemux_tree_get_sibling_by_type (pssh, FOURCC_pssh);
+ }
+
+ qtdemux->tag_list = qtdemux_add_container_format (qtdemux, qtdemux->tag_list);
+
+ return TRUE;
+}
+
+/* taken from ffmpeg */
+static int
+read_descr_size (guint8 * ptr, guint8 * end, guint8 ** end_out)
+{
+ int count = 4;
+ int len = 0;
+
+ while (count--) {
+ int c;
+
+ if (ptr >= end)
+ return -1;
+
+ c = *ptr++;
+ len = (len << 7) | (c & 0x7f);
+ if (!(c & 0x80))
+ break;
+ }
+ *end_out = ptr;
+ return len;
+}
+
+static GList *
+parse_xiph_stream_headers (GstQTDemux * qtdemux, gpointer codec_data,
+ gsize codec_data_size)
+{
+ GList *list = NULL;
+ guint8 *p = codec_data;
+ gint i, offset, num_packets;
+ guint *length, last;
+
+ GST_MEMDUMP_OBJECT (qtdemux, "xiph codec data", codec_data, codec_data_size);
+
+ if (codec_data == NULL || codec_data_size == 0)
+ goto error;
+
+ /* start of the stream and vorbis audio or theora video, need to
+ * send the codec_priv data as first three packets */
+ num_packets = p[0] + 1;
+ GST_DEBUG_OBJECT (qtdemux,
+ "%u stream headers, total length=%" G_GSIZE_FORMAT " bytes",
+ (guint) num_packets, codec_data_size);
+
+ /* Let's put some limits, Don't think there even is a xiph codec
+ * with more than 3-4 headers */
+ if (G_UNLIKELY (num_packets > 16)) {
+ GST_WARNING_OBJECT (qtdemux,
+ "Unlikely number of xiph headers, most likely not valid");
+ goto error;
+ }
+
+ length = g_alloca (num_packets * sizeof (guint));
+ last = 0;
+ offset = 1;
+
+ /* first packets, read length values */
+ for (i = 0; i < num_packets - 1; i++) {
+ length[i] = 0;
+ while (offset < codec_data_size) {
+ length[i] += p[offset];
+ if (p[offset++] != 0xff)
+ break;
+ }
+ last += length[i];
+ }
+ if (offset + last > codec_data_size)
+ goto error;
+
+ /* last packet is the remaining size */
+ length[i] = codec_data_size - offset - last;
+
+ for (i = 0; i < num_packets; i++) {
+ GstBuffer *hdr;
+
+ GST_DEBUG_OBJECT (qtdemux, "buffer %d: %u bytes", i, (guint) length[i]);
+
+ if (offset + length[i] > codec_data_size)
+ goto error;
+
+ hdr = gst_buffer_new_memdup (p + offset, length[i]);
+ list = g_list_append (list, hdr);
+
+ offset += length[i];
+ }
+
+ return list;
+
+ /* ERRORS */
+error:
+ {
+ if (list != NULL)
+ g_list_free_full (list, (GDestroyNotify) gst_buffer_unref);
+ return NULL;
+ }
+
+}
+
+/* this can change the codec originally present in @list */
+static void
+gst_qtdemux_handle_esds (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ QtDemuxStreamStsdEntry * entry, GNode * esds, GstTagList * list)
+{
+ int len = QT_UINT32 (esds->data);
+ guint8 *ptr = esds->data;
+ guint8 *end = ptr + len;
+ int tag;
+ guint8 *data_ptr = NULL;
+ int data_len = 0;
+ guint8 object_type_id = 0;
+ guint8 stream_type = 0;
+ const char *codec_name = NULL;
+ GstCaps *caps = NULL;
+
+ GST_MEMDUMP_OBJECT (qtdemux, "esds", ptr, len);
+ ptr += 8;
+ GST_DEBUG_OBJECT (qtdemux, "version/flags = %08x", QT_UINT32 (ptr));
+ ptr += 4;
+ while (ptr + 1 < end) {
+ tag = QT_UINT8 (ptr);
+ GST_DEBUG_OBJECT (qtdemux, "tag = %02x", tag);
+ ptr++;
+ len = read_descr_size (ptr, end, &ptr);
+ GST_DEBUG_OBJECT (qtdemux, "len = %d", len);
+
+ /* Check the stated amount of data is available for reading */
+ if (len < 0 || ptr + len > end)
+ break;
+
+ switch (tag) {
+ case ES_DESCRIPTOR_TAG:
+ GST_DEBUG_OBJECT (qtdemux, "ID 0x%04x", QT_UINT16 (ptr));
+ GST_DEBUG_OBJECT (qtdemux, "priority 0x%04x", QT_UINT8 (ptr + 2));
+ ptr += 3;
+ break;
+ case DECODER_CONFIG_DESC_TAG:{
+ guint max_bitrate, avg_bitrate;
+
+ object_type_id = QT_UINT8 (ptr);
+ stream_type = QT_UINT8 (ptr + 1) >> 2;
+ max_bitrate = QT_UINT32 (ptr + 5);
+ avg_bitrate = QT_UINT32 (ptr + 9);
+ GST_DEBUG_OBJECT (qtdemux, "object_type_id %02x", object_type_id);
+ GST_DEBUG_OBJECT (qtdemux, "stream_type %02x", stream_type);
+ GST_DEBUG_OBJECT (qtdemux, "buffer_size_db %02x", QT_UINT24 (ptr + 2));
+ GST_DEBUG_OBJECT (qtdemux, "max bitrate %u", max_bitrate);
+ GST_DEBUG_OBJECT (qtdemux, "avg bitrate %u", avg_bitrate);
+ if (max_bitrate > 0 && max_bitrate < G_MAXUINT32) {
+ gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
+ GST_TAG_MAXIMUM_BITRATE, max_bitrate, NULL);
+ }
+ if (avg_bitrate > 0 && avg_bitrate < G_MAXUINT32) {
+ gst_tag_list_add (list, GST_TAG_MERGE_REPLACE, GST_TAG_BITRATE,
+ avg_bitrate, NULL);
+ }
+ ptr += 13;
+ break;
+ }
+ case DECODER_SPECIFIC_INFO_TAG:
+ GST_MEMDUMP_OBJECT (qtdemux, "data", ptr, len);
+ if (object_type_id == 0xe0 && len == 0x40) {
+ guint8 *data;
+ GstStructure *s;
+ guint32 clut[16];
+ gint i;
+
+ GST_DEBUG_OBJECT (qtdemux,
+ "Have VOBSUB palette. Creating palette event");
+ /* move to decConfigDescr data and read palette */
+ data = ptr;
+ for (i = 0; i < 16; i++) {
+ clut[i] = QT_UINT32 (data);
+ data += 4;
+ }
+
+ s = gst_structure_new ("application/x-gst-dvd", "event",
+ G_TYPE_STRING, "dvd-spu-clut-change",
+ "clut00", G_TYPE_INT, clut[0], "clut01", G_TYPE_INT, clut[1],
+ "clut02", G_TYPE_INT, clut[2], "clut03", G_TYPE_INT, clut[3],
+ "clut04", G_TYPE_INT, clut[4], "clut05", G_TYPE_INT, clut[5],
+ "clut06", G_TYPE_INT, clut[6], "clut07", G_TYPE_INT, clut[7],
+ "clut08", G_TYPE_INT, clut[8], "clut09", G_TYPE_INT, clut[9],
+ "clut10", G_TYPE_INT, clut[10], "clut11", G_TYPE_INT, clut[11],
+ "clut12", G_TYPE_INT, clut[12], "clut13", G_TYPE_INT, clut[13],
+ "clut14", G_TYPE_INT, clut[14], "clut15", G_TYPE_INT, clut[15],
+ NULL);
+
+ /* store event and trigger custom processing */
+ stream->pending_event =
+ gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, s);
+ } else {
+ /* Generic codec_data handler puts it on the caps */
+ data_ptr = ptr;
+ data_len = len;
+ }
+
+ ptr += len;
+ break;
+ case SL_CONFIG_DESC_TAG:
+ GST_DEBUG_OBJECT (qtdemux, "data %02x", QT_UINT8 (ptr));
+ ptr += 1;
+ break;
+ default:
+ GST_DEBUG_OBJECT (qtdemux, "Unknown/unhandled descriptor tag %02x",
+ tag);
+ GST_MEMDUMP_OBJECT (qtdemux, "descriptor data", ptr, len);
+ ptr += len;
+ break;
+ }
+ }
+
+ /* object_type_id in the esds atom in mp4a and mp4v tells us which codec is
+ * in use, and should also be used to override some other parameters for some
+ * codecs. */
+ switch (object_type_id) {
+ case 0x20: /* MPEG-4 */
+ /* 4 bytes for the visual_object_sequence_start_code and 1 byte for the
+ * profile_and_level_indication */
+ if (data_ptr != NULL && data_len >= 5 &&
+ GST_READ_UINT32_BE (data_ptr) == 0x000001b0) {
+ gst_codec_utils_mpeg4video_caps_set_level_and_profile (entry->caps,
+ data_ptr + 4, data_len - 4);
+ }
+ break; /* Nothing special needed here */
+ case 0x21: /* H.264 */
+ codec_name = "H.264 / AVC";
+ caps = gst_caps_new_simple ("video/x-h264",
+ "stream-format", G_TYPE_STRING, "avc",
+ "alignment", G_TYPE_STRING, "au", NULL);
+ break;
+ case 0x40: /* AAC (any) */
+ case 0x66: /* AAC Main */
+ case 0x67: /* AAC LC */
+ case 0x68: /* AAC SSR */
+ /* Override channels and rate based on the codec_data, as it's often
+ * wrong. */
+ /* Only do so for basic setup without HE-AAC extension */
+ if (data_ptr && data_len == 2) {
+ guint channels, rate;
+
+ channels = gst_codec_utils_aac_get_channels (data_ptr, data_len);
+ if (channels > 0)
+ entry->n_channels = channels;
+
+ rate = gst_codec_utils_aac_get_sample_rate (data_ptr, data_len);
+ if (rate > 0)
+ entry->rate = rate;
+ }
+
+ /* Set level and profile if possible */
+ if (data_ptr != NULL && data_len >= 2) {
+ gst_codec_utils_aac_caps_set_level_and_profile (entry->caps,
+ data_ptr, data_len);
+ } else {
+ const gchar *profile_str = NULL;
+ GstBuffer *buffer;
+ GstMapInfo map;
+ guint8 *codec_data;
+ gint rate_idx, profile;
+
+ /* No codec_data, let's invent something.
+ * FIXME: This is wrong for SBR! */
+
+ GST_WARNING_OBJECT (qtdemux, "No codec_data for AAC available");
+
+ buffer = gst_buffer_new_and_alloc (2);
+ gst_buffer_map (buffer, &map, GST_MAP_WRITE);
+ codec_data = map.data;
+
+ rate_idx =
+ gst_codec_utils_aac_get_index_from_sample_rate (CUR_STREAM
+ (stream)->rate);
+
+ switch (object_type_id) {
+ case 0x66:
+ profile_str = "main";
+ profile = 0;
+ break;
+ case 0x67:
+ profile_str = "lc";
+ profile = 1;
+ break;
+ case 0x68:
+ profile_str = "ssr";
+ profile = 2;
+ break;
+ default:
+ profile = 3;
+ break;
+ }
+
+ codec_data[0] = ((profile + 1) << 3) | ((rate_idx & 0xE) >> 1);
+ codec_data[1] =
+ ((rate_idx & 0x1) << 7) | (CUR_STREAM (stream)->n_channels << 3);
+
+ gst_buffer_unmap (buffer, &map);
+ gst_caps_set_simple (CUR_STREAM (stream)->caps, "codec_data",
+ GST_TYPE_BUFFER, buffer, NULL);
+ gst_buffer_unref (buffer);
+
+ if (profile_str) {
+ gst_caps_set_simple (CUR_STREAM (stream)->caps, "profile",
+ G_TYPE_STRING, profile_str, NULL);
+ }
+ }
+ break;
+ case 0x60: /* MPEG-2, various profiles */
+ case 0x61:
+ case 0x62:
+ case 0x63:
+ case 0x64:
+ case 0x65:
+ codec_name = "MPEG-2 video";
+ caps = gst_caps_new_simple ("video/mpeg",
+ "mpegversion", G_TYPE_INT, 2,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ case 0x69: /* MPEG-2 BC audio */
+ case 0x6B: /* MPEG-1 audio */
+ caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 1, NULL);
+ codec_name = "MPEG-1 audio";
+ break;
+ case 0x6A: /* MPEG-1 */
+ codec_name = "MPEG-1 video";
+ caps = gst_caps_new_simple ("video/mpeg",
+ "mpegversion", G_TYPE_INT, 1,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ case 0x6C: /* MJPEG */
+ caps =
+ gst_caps_new_simple ("image/jpeg", "parsed", G_TYPE_BOOLEAN, TRUE,
+ NULL);
+ codec_name = "Motion-JPEG";
+ break;
+ case 0x6D: /* PNG */
+ caps =
+ gst_caps_new_simple ("image/png", "parsed", G_TYPE_BOOLEAN, TRUE,
+ NULL);
+ codec_name = "PNG still images";
+ break;
+ case 0x6E: /* JPEG2000 */
+ codec_name = "JPEG-2000";
+ caps = gst_caps_new_simple ("image/x-j2c", "fields", G_TYPE_INT, 1, NULL);
+ break;
+ case 0xA4: /* Dirac */
+ codec_name = "Dirac";
+ caps = gst_caps_new_empty_simple ("video/x-dirac");
+ break;
+ case 0xA5: /* AC3 */
+ codec_name = "AC-3 audio";
+ caps = gst_caps_new_simple ("audio/x-ac3",
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ break;
+ case 0xA9: /* AC3 */
+ codec_name = "DTS audio";
+ caps = gst_caps_new_simple ("audio/x-dts",
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ break;
+ case 0xDD:
+ if (stream_type == 0x05 && data_ptr) {
+ GList *headers =
+ parse_xiph_stream_headers (qtdemux, data_ptr, data_len);
+ if (headers) {
+ GList *tmp;
+ GValue arr_val = G_VALUE_INIT;
+ GValue buf_val = G_VALUE_INIT;
+ GstStructure *s;
+
+ /* Let's assume it's vorbis if it's an audio stream of type 0xdd and we have codec data that extracts properly */
+ codec_name = "Vorbis";
+ caps = gst_caps_new_empty_simple ("audio/x-vorbis");
+ g_value_init (&arr_val, GST_TYPE_ARRAY);
+ g_value_init (&buf_val, GST_TYPE_BUFFER);
+ for (tmp = headers; tmp; tmp = tmp->next) {
+ g_value_set_boxed (&buf_val, (GstBuffer *) tmp->data);
+ gst_value_array_append_value (&arr_val, &buf_val);
+ }
+ s = gst_caps_get_structure (caps, 0);
+ gst_structure_take_value (s, "streamheader", &arr_val);
+ g_value_unset (&buf_val);
+ g_list_free (headers);
+
+ data_ptr = NULL;
+ data_len = 0;
+ }
+ }
+ break;
+ case 0xE1: /* QCELP */
+ /* QCELP, the codec_data is a riff tag (little endian) with
+ * more info (http://ftp.3gpp2.org/TSGC/Working/2003/2003-05-SanDiego/TSG-C-2003-05-San%20Diego/WG1/SWG12/C12-20030512-006%20=%20C12-20030217-015_Draft_Baseline%20Text%20of%20FFMS_R2.doc). */
+ caps = gst_caps_new_empty_simple ("audio/qcelp");
+ codec_name = "QCELP";
+ break;
+ default:
+ break;
+ }
+
+ /* If we have a replacement caps, then change our caps for this stream */
+ if (caps) {
+ gst_caps_unref (entry->caps);
+ entry->caps = caps;
+ }
+
+ if (codec_name && list)
+ gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
+ GST_TAG_AUDIO_CODEC, codec_name, NULL);
+
+ /* Add the codec_data attribute to caps, if we have it */
+ if (data_ptr) {
+ GstBuffer *buffer;
+
+ buffer = gst_buffer_new_and_alloc (data_len);
+ gst_buffer_fill (buffer, 0, data_ptr, data_len);
+
+ GST_DEBUG_OBJECT (qtdemux, "setting codec_data from esds");
+ GST_MEMDUMP_OBJECT (qtdemux, "codec_data from esds", data_ptr, data_len);
+
+ gst_caps_set_simple (entry->caps, "codec_data", GST_TYPE_BUFFER,
+ buffer, NULL);
+ gst_buffer_unref (buffer);
+ }
+
+}
+
+static inline GstCaps *
+_get_unknown_codec_name (const gchar * type, guint32 fourcc)
+{
+ GstCaps *caps;
+ guint i;
+ char *s, fourstr[5];
+
+ g_snprintf (fourstr, 5, "%" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (fourcc));
+ for (i = 0; i < 4; i++) {
+ if (!g_ascii_isalnum (fourstr[i]))
+ fourstr[i] = '_';
+ }
+ s = g_strdup_printf ("%s/x-gst-fourcc-%s", type, g_strstrip (fourstr));
+ caps = gst_caps_new_empty_simple (s);
+ g_free (s);
+ return caps;
+}
+
+#define _codec(name) \
+ do { \
+ if (codec_name) { \
+ *codec_name = g_strdup (name); \
+ } \
+ } while (0)
+
+static GstCaps *
+qtdemux_video_caps (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ QtDemuxStreamStsdEntry * entry, guint32 fourcc,
+ const guint8 * stsd_entry_data, gchar ** codec_name)
+{
+ GstCaps *caps = NULL;
+ GstVideoFormat format = GST_VIDEO_FORMAT_UNKNOWN;
+
+ switch (fourcc) {
+ case FOURCC_png:
+ _codec ("PNG still images");
+ caps = gst_caps_new_empty_simple ("image/png");
+ break;
+ case FOURCC_jpeg:
+ _codec ("JPEG still images");
+ caps =
+ gst_caps_new_simple ("image/jpeg", "parsed", G_TYPE_BOOLEAN, TRUE,
+ NULL);
+ break;
+ case GST_MAKE_FOURCC ('m', 'j', 'p', 'a'):
+ case GST_MAKE_FOURCC ('A', 'V', 'D', 'J'):
+ case GST_MAKE_FOURCC ('M', 'J', 'P', 'G'):
+ case GST_MAKE_FOURCC ('d', 'm', 'b', '1'):
+ _codec ("Motion-JPEG");
+ caps =
+ gst_caps_new_simple ("image/jpeg", "parsed", G_TYPE_BOOLEAN, TRUE,
+ NULL);
+ break;
+ case GST_MAKE_FOURCC ('m', 'j', 'p', 'b'):
+ _codec ("Motion-JPEG format B");
+ caps = gst_caps_new_empty_simple ("video/x-mjpeg-b");
+ break;
+ case FOURCC_mjp2:
+ _codec ("JPEG-2000");
+ /* override to what it should be according to spec, avoid palette_data */
+ entry->bits_per_sample = 24;
+ caps = gst_caps_new_simple ("image/x-j2c", "fields", G_TYPE_INT, 1, NULL);
+ break;
+ case FOURCC_SVQ3:
+ _codec ("Sorensen video v.3");
+ caps = gst_caps_new_simple ("video/x-svq",
+ "svqversion", G_TYPE_INT, 3, NULL);
+ break;
+ case GST_MAKE_FOURCC ('s', 'v', 'q', 'i'):
+ case GST_MAKE_FOURCC ('S', 'V', 'Q', '1'):
+ _codec ("Sorensen video v.1");
+ caps = gst_caps_new_simple ("video/x-svq",
+ "svqversion", G_TYPE_INT, 1, NULL);
+ break;
+ case GST_MAKE_FOURCC ('W', 'R', 'A', 'W'):
+ caps = gst_caps_new_empty_simple ("video/x-raw");
+ gst_caps_set_simple (caps, "format", G_TYPE_STRING, "RGB8P", NULL);
+ _codec ("Windows Raw RGB");
+ stream->alignment = 32;
+ break;
+ case FOURCC_raw_:
+ {
+ guint16 bps;
+
+ bps = QT_UINT16 (stsd_entry_data + 82);
+ switch (bps) {
+ case 15:
+ format = GST_VIDEO_FORMAT_RGB15;
+ break;
+ case 16:
+ format = GST_VIDEO_FORMAT_RGB16;
+ break;
+ case 24:
+ format = GST_VIDEO_FORMAT_RGB;
+ break;
+ case 32:
+ format = GST_VIDEO_FORMAT_ARGB;
+ break;
+ default:
+ /* unknown */
+ break;
+ }
+ break;
+ }
+ case GST_MAKE_FOURCC ('y', 'v', '1', '2'):
+ format = GST_VIDEO_FORMAT_I420;
+ break;
+ case GST_MAKE_FOURCC ('y', 'u', 'v', '2'):
+ case GST_MAKE_FOURCC ('Y', 'u', 'v', '2'):
+ format = GST_VIDEO_FORMAT_I420;
+ break;
+ case FOURCC_2vuy:
+ case GST_MAKE_FOURCC ('2', 'V', 'u', 'y'):
+ format = GST_VIDEO_FORMAT_UYVY;
+ break;
+ case GST_MAKE_FOURCC ('v', '3', '0', '8'):
+ format = GST_VIDEO_FORMAT_v308;
+ break;
+ case GST_MAKE_FOURCC ('v', '2', '1', '6'):
+ format = GST_VIDEO_FORMAT_v216;
+ break;
+ case FOURCC_v210:
+ format = GST_VIDEO_FORMAT_v210;
+ break;
+ case GST_MAKE_FOURCC ('r', '2', '1', '0'):
+ format = GST_VIDEO_FORMAT_r210;
+ break;
+ /* Packed YUV 4:4:4 10 bit in 32 bits, complex
+ case GST_MAKE_FOURCC ('v', '4', '1', '0'):
+ format = GST_VIDEO_FORMAT_v410;
+ break;
+ */
+ /* Packed YUV 4:4:4:4 8 bit in 32 bits
+ * but different order than AYUV
+ case GST_MAKE_FOURCC ('v', '4', '0', '8'):
+ format = GST_VIDEO_FORMAT_v408;
+ break;
+ */
+ case GST_MAKE_FOURCC ('m', 'p', 'e', 'g'):
+ case GST_MAKE_FOURCC ('m', 'p', 'g', '1'):
+ _codec ("MPEG-1 video");
+ caps = gst_caps_new_simple ("video/mpeg", "mpegversion", G_TYPE_INT, 1,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ case GST_MAKE_FOURCC ('h', 'd', 'v', '1'): /* HDV 720p30 */
+ case GST_MAKE_FOURCC ('h', 'd', 'v', '2'): /* HDV 1080i60 */
+ case GST_MAKE_FOURCC ('h', 'd', 'v', '3'): /* HDV 1080i50 */
+ case GST_MAKE_FOURCC ('h', 'd', 'v', '4'): /* HDV 720p24 */
+ case GST_MAKE_FOURCC ('h', 'd', 'v', '5'): /* HDV 720p25 */
+ case GST_MAKE_FOURCC ('h', 'd', 'v', '6'): /* HDV 1080p24 */
+ case GST_MAKE_FOURCC ('h', 'd', 'v', '7'): /* HDV 1080p25 */
+ case GST_MAKE_FOURCC ('h', 'd', 'v', '8'): /* HDV 1080p30 */
+ case GST_MAKE_FOURCC ('h', 'd', 'v', '9'): /* HDV 720p60 */
+ case GST_MAKE_FOURCC ('h', 'd', 'v', 'a'): /* HDV 720p50 */
+ case GST_MAKE_FOURCC ('m', 'x', '5', 'n'): /* MPEG2 IMX NTSC 525/60 50mb/s produced by FCP */
+ case GST_MAKE_FOURCC ('m', 'x', '5', 'p'): /* MPEG2 IMX PAL 625/60 50mb/s produced by FCP */
+ case GST_MAKE_FOURCC ('m', 'x', '4', 'n'): /* MPEG2 IMX NTSC 525/60 40mb/s produced by FCP */
+ case GST_MAKE_FOURCC ('m', 'x', '4', 'p'): /* MPEG2 IMX PAL 625/60 40mb/s produced by FCP */
+ case GST_MAKE_FOURCC ('m', 'x', '3', 'n'): /* MPEG2 IMX NTSC 525/60 30mb/s produced by FCP */
+ case GST_MAKE_FOURCC ('m', 'x', '3', 'p'): /* MPEG2 IMX PAL 625/50 30mb/s produced by FCP */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', '1'): /* XDCAM HD 720p30 35Mb/s */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', '2'): /* XDCAM HD 1080i60 35Mb/s */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', '3'): /* XDCAM HD 1080i50 35Mb/s */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', '4'): /* XDCAM HD 720p24 35Mb/s */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', '5'): /* XDCAM HD 720p25 35Mb/s */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', '6'): /* XDCAM HD 1080p24 35Mb/s */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', '7'): /* XDCAM HD 1080p25 35Mb/s */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', '8'): /* XDCAM HD 1080p30 35Mb/s */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', '9'): /* XDCAM HD 720p60 35Mb/s */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', 'a'): /* XDCAM HD 720p50 35Mb/s */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', 'b'): /* XDCAM EX 1080i60 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', 'c'): /* XDCAM EX 1080i50 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', 'd'): /* XDCAM HD 1080p24 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', 'e'): /* XDCAM HD 1080p25 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', 'v', 'f'): /* XDCAM HD 1080p30 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', '5', '1'): /* XDCAM HD422 720p30 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', '5', '4'): /* XDCAM HD422 720p24 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', '5', '5'): /* XDCAM HD422 720p25 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', '5', '9'): /* XDCAM HD422 720p60 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', '5', 'a'): /* XDCAM HD422 720p50 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', '5', 'b'): /* XDCAM HD422 1080i50 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', '5', 'c'): /* XDCAM HD422 1080i50 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', '5', 'd'): /* XDCAM HD422 1080p24 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', '5', 'e'): /* XDCAM HD422 1080p25 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', '5', 'f'): /* XDCAM HD422 1080p30 50Mb/s CBR */
+ case GST_MAKE_FOURCC ('x', 'd', 'h', 'd'): /* XDCAM HD 540p */
+ case GST_MAKE_FOURCC ('x', 'd', 'h', '2'): /* XDCAM HD422 540p */
+ case GST_MAKE_FOURCC ('A', 'V', 'm', 'p'): /* AVID IMX PAL */
+ case GST_MAKE_FOURCC ('m', 'p', 'g', '2'): /* AVID IMX PAL */
+ case GST_MAKE_FOURCC ('m', 'p', '2', 'v'): /* AVID IMX PAL */
+ case GST_MAKE_FOURCC ('m', '2', 'v', '1'):
+ _codec ("MPEG-2 video");
+ caps = gst_caps_new_simple ("video/mpeg", "mpegversion", G_TYPE_INT, 2,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ case GST_MAKE_FOURCC ('g', 'i', 'f', ' '):
+ _codec ("GIF still images");
+ caps = gst_caps_new_empty_simple ("image/gif");
+ break;
+ case FOURCC_h263:
+ case GST_MAKE_FOURCC ('H', '2', '6', '3'):
+ case FOURCC_s263:
+ case GST_MAKE_FOURCC ('U', '2', '6', '3'):
+ _codec ("H.263");
+ /* ffmpeg uses the height/width props, don't know why */
+ caps = gst_caps_new_simple ("video/x-h263",
+ "variant", G_TYPE_STRING, "itu", NULL);
+ break;
+ case FOURCC_mp4v:
+ case FOURCC_MP4V:
+ _codec ("MPEG-4 video");
+ caps = gst_caps_new_simple ("video/mpeg", "mpegversion", G_TYPE_INT, 4,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ case GST_MAKE_FOURCC ('3', 'i', 'v', 'd'):
+ case GST_MAKE_FOURCC ('3', 'I', 'V', 'D'):
+ _codec ("Microsoft MPEG-4 4.3"); /* FIXME? */
+ caps = gst_caps_new_simple ("video/x-msmpeg",
+ "msmpegversion", G_TYPE_INT, 43, NULL);
+ break;
+ case GST_MAKE_FOURCC ('D', 'I', 'V', '3'):
+ _codec ("DivX 3");
+ caps = gst_caps_new_simple ("video/x-divx",
+ "divxversion", G_TYPE_INT, 3, NULL);
+ break;
+ case GST_MAKE_FOURCC ('D', 'I', 'V', 'X'):
+ case GST_MAKE_FOURCC ('d', 'i', 'v', 'x'):
+ _codec ("DivX 4");
+ caps = gst_caps_new_simple ("video/x-divx",
+ "divxversion", G_TYPE_INT, 4, NULL);
+ break;
+ case GST_MAKE_FOURCC ('D', 'X', '5', '0'):
+ _codec ("DivX 5");
+ caps = gst_caps_new_simple ("video/x-divx",
+ "divxversion", G_TYPE_INT, 5, NULL);
+ break;
+
+ case GST_MAKE_FOURCC ('F', 'F', 'V', '1'):
+ _codec ("FFV1");
+ caps = gst_caps_new_simple ("video/x-ffv",
+ "ffvversion", G_TYPE_INT, 1, NULL);
+ break;
+
+ case GST_MAKE_FOURCC ('3', 'I', 'V', '1'):
+ case GST_MAKE_FOURCC ('3', 'I', 'V', '2'):
+ case FOURCC_XVID:
+ case FOURCC_xvid:
+ case FOURCC_FMP4:
+ case FOURCC_fmp4:
+ case GST_MAKE_FOURCC ('U', 'M', 'P', '4'):
+ caps = gst_caps_new_simple ("video/mpeg", "mpegversion", G_TYPE_INT, 4,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ _codec ("MPEG-4");
+ break;
+
+ case GST_MAKE_FOURCC ('c', 'v', 'i', 'd'):
+ _codec ("Cinepak");
+ caps = gst_caps_new_empty_simple ("video/x-cinepak");
+ break;
+ case GST_MAKE_FOURCC ('q', 'd', 'r', 'w'):
+ _codec ("Apple QuickDraw");
+ caps = gst_caps_new_empty_simple ("video/x-qdrw");
+ break;
+ case GST_MAKE_FOURCC ('r', 'p', 'z', 'a'):
+ _codec ("Apple video");
+ caps = gst_caps_new_empty_simple ("video/x-apple-video");
+ break;
+ case FOURCC_H264:
+ case FOURCC_avc1:
+ case FOURCC_dva1:
+ _codec ("H.264 / AVC");
+ caps = gst_caps_new_simple ("video/x-h264",
+ "stream-format", G_TYPE_STRING, "avc",
+ "alignment", G_TYPE_STRING, "au", NULL);
+ break;
+ case FOURCC_avc3:
+ case FOURCC_dvav:
+ _codec ("H.264 / AVC");
+ caps = gst_caps_new_simple ("video/x-h264",
+ "stream-format", G_TYPE_STRING, "avc3",
+ "alignment", G_TYPE_STRING, "au", NULL);
+ break;
+ case FOURCC_H265:
+ case FOURCC_hvc1:
+ case FOURCC_dvh1:
+ _codec ("H.265 / HEVC");
+ caps = gst_caps_new_simple ("video/x-h265",
+ "stream-format", G_TYPE_STRING, "hvc1",
+ "alignment", G_TYPE_STRING, "au", NULL);
+ break;
+ case FOURCC_hev1:
+ case FOURCC_dvhe:
+ _codec ("H.265 / HEVC");
+ caps = gst_caps_new_simple ("video/x-h265",
+ "stream-format", G_TYPE_STRING, "hev1",
+ "alignment", G_TYPE_STRING, "au", NULL);
+ break;
+ case FOURCC_rle_:
+ _codec ("Run-length encoding");
+ caps = gst_caps_new_simple ("video/x-rle",
+ "layout", G_TYPE_STRING, "quicktime", NULL);
+ break;
+ case FOURCC_WRLE:
+ _codec ("Run-length encoding");
+ caps = gst_caps_new_simple ("video/x-rle",
+ "layout", G_TYPE_STRING, "microsoft", NULL);
+ break;
+ case GST_MAKE_FOURCC ('I', 'V', '3', '2'):
+ case GST_MAKE_FOURCC ('i', 'v', '3', '2'):
+ _codec ("Indeo Video 3");
+ caps = gst_caps_new_simple ("video/x-indeo",
+ "indeoversion", G_TYPE_INT, 3, NULL);
+ break;
+ case GST_MAKE_FOURCC ('I', 'V', '4', '1'):
+ case GST_MAKE_FOURCC ('i', 'v', '4', '1'):
+ _codec ("Intel Video 4");
+ caps = gst_caps_new_simple ("video/x-indeo",
+ "indeoversion", G_TYPE_INT, 4, NULL);
+ break;
+ case FOURCC_dvcp:
+ case FOURCC_dvc_:
+ case GST_MAKE_FOURCC ('d', 'v', 's', 'd'):
+ case GST_MAKE_FOURCC ('D', 'V', 'S', 'D'):
+ case GST_MAKE_FOURCC ('d', 'v', 'c', 's'):
+ case GST_MAKE_FOURCC ('D', 'V', 'C', 'S'):
+ case GST_MAKE_FOURCC ('d', 'v', '2', '5'):
+ case GST_MAKE_FOURCC ('d', 'v', 'p', 'p'):
+ _codec ("DV Video");
+ caps = gst_caps_new_simple ("video/x-dv", "dvversion", G_TYPE_INT, 25,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ case FOURCC_dv5n: /* DVCPRO50 NTSC */
+ case FOURCC_dv5p: /* DVCPRO50 PAL */
+ _codec ("DVCPro50 Video");
+ caps = gst_caps_new_simple ("video/x-dv", "dvversion", G_TYPE_INT, 50,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ case GST_MAKE_FOURCC ('d', 'v', 'h', '5'): /* DVCPRO HD 50i produced by FCP */
+ case GST_MAKE_FOURCC ('d', 'v', 'h', '6'): /* DVCPRO HD 60i produced by FCP */
+ _codec ("DVCProHD Video");
+ caps = gst_caps_new_simple ("video/x-dv", "dvversion", G_TYPE_INT, 100,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ case GST_MAKE_FOURCC ('s', 'm', 'c', ' '):
+ _codec ("Apple Graphics (SMC)");
+ caps = gst_caps_new_empty_simple ("video/x-smc");
+ break;
+ case GST_MAKE_FOURCC ('V', 'P', '3', '1'):
+ _codec ("VP3");
+ caps = gst_caps_new_empty_simple ("video/x-vp3");
+ break;
+ case GST_MAKE_FOURCC ('V', 'P', '6', 'F'):
+ _codec ("VP6 Flash");
+ caps = gst_caps_new_empty_simple ("video/x-vp6-flash");
+ break;
+ case FOURCC_XiTh:
+ _codec ("Theora");
+ caps = gst_caps_new_empty_simple ("video/x-theora");
+ /* theora uses one byte of padding in the data stream because it does not
+ * allow 0 sized packets while theora does */
+ entry->padding = 1;
+ break;
+ case FOURCC_drac:
+ _codec ("Dirac");
+ caps = gst_caps_new_empty_simple ("video/x-dirac");
+ break;
+ case GST_MAKE_FOURCC ('t', 'i', 'f', 'f'):
+ _codec ("TIFF still images");
+ caps = gst_caps_new_empty_simple ("image/tiff");
+ break;
+ case GST_MAKE_FOURCC ('i', 'c', 'o', 'd'):
+ _codec ("Apple Intermediate Codec");
+ caps = gst_caps_from_string ("video/x-apple-intermediate-codec");
+ break;
+ case GST_MAKE_FOURCC ('A', 'V', 'd', 'n'):
+ _codec ("AVID DNxHD");
+ caps = gst_caps_from_string ("video/x-dnxhd");
+ break;
+ case FOURCC_VP80:
+ case FOURCC_vp08:
+ _codec ("On2 VP8");
+ caps = gst_caps_from_string ("video/x-vp8");
+ break;
+ case FOURCC_vp09:
+ _codec ("Google VP9");
+ caps = gst_caps_from_string ("video/x-vp9");
+ break;
+ case FOURCC_apcs:
+ _codec ("Apple ProRes LT");
+ caps =
+ gst_caps_new_simple ("video/x-prores", "variant", G_TYPE_STRING, "lt",
+ NULL);
+ break;
+ case FOURCC_apch:
+ _codec ("Apple ProRes HQ");
+ caps =
+ gst_caps_new_simple ("video/x-prores", "variant", G_TYPE_STRING, "hq",
+ NULL);
+ break;
+ case FOURCC_apcn:
+ _codec ("Apple ProRes");
+ caps =
+ gst_caps_new_simple ("video/x-prores", "variant", G_TYPE_STRING,
+ "standard", NULL);
+ break;
+ case FOURCC_apco:
+ _codec ("Apple ProRes Proxy");
+ caps =
+ gst_caps_new_simple ("video/x-prores", "variant", G_TYPE_STRING,
+ "proxy", NULL);
+ break;
+ case FOURCC_ap4h:
+ _codec ("Apple ProRes 4444");
+ caps =
+ gst_caps_new_simple ("video/x-prores", "variant", G_TYPE_STRING,
+ "4444", NULL);
+
+ /* 24 bits per sample = an alpha channel is coded but image is always opaque */
+ if (entry->bits_per_sample > 0) {
+ gst_caps_set_simple (caps, "depth", G_TYPE_INT, entry->bits_per_sample,
+ NULL);
+ }
+ break;
+ case FOURCC_ap4x:
+ _codec ("Apple ProRes 4444 XQ");
+ caps =
+ gst_caps_new_simple ("video/x-prores", "variant", G_TYPE_STRING,
+ "4444xq", NULL);
+
+ /* 24 bits per sample = an alpha channel is coded but image is always opaque */
+ if (entry->bits_per_sample > 0) {
+ gst_caps_set_simple (caps, "depth", G_TYPE_INT, entry->bits_per_sample,
+ NULL);
+ }
+ break;
+ case FOURCC_cfhd:
+ _codec ("GoPro CineForm");
+ caps = gst_caps_from_string ("video/x-cineform");
+ break;
+ case FOURCC_vc_1:
+ case FOURCC_ovc1:
+ _codec ("VC-1");
+ caps = gst_caps_new_simple ("video/x-wmv",
+ "wmvversion", G_TYPE_INT, 3, "format", G_TYPE_STRING, "WVC1", NULL);
+ break;
+ case FOURCC_av01:
+ _codec ("AV1");
+ caps = gst_caps_new_empty_simple ("video/x-av1");
+ break;
+ case GST_MAKE_FOURCC ('k', 'p', 'c', 'd'):
+ default:
+ {
+ caps = _get_unknown_codec_name ("video", fourcc);
+ break;
+ }
+ }
+
+ if (format != GST_VIDEO_FORMAT_UNKNOWN) {
+ GstVideoInfo info;
+
+ gst_video_info_init (&info);
+ gst_video_info_set_format (&info, format, entry->width, entry->height);
+
+ caps = gst_video_info_to_caps (&info);
+ *codec_name = gst_pb_utils_get_codec_description (caps);
+
+ /* enable clipping for raw video streams */
+ stream->need_clip = TRUE;
+ stream->alignment = 32;
+ }
+
+ return caps;
+}
+
+static guint
+round_up_pow2 (guint n)
+{
+ n = n - 1;
+ n = n | (n >> 1);
+ n = n | (n >> 2);
+ n = n | (n >> 4);
+ n = n | (n >> 8);
+ n = n | (n >> 16);
+ return n + 1;
+}
+
+static GstCaps *
+qtdemux_audio_caps (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ QtDemuxStreamStsdEntry * entry, guint32 fourcc, const guint8 * data,
+ int len, gchar ** codec_name)
+{
+ GstCaps *caps;
+ const GstStructure *s;
+ const gchar *name;
+ gint endian = 0;
+ GstAudioFormat format = 0;
+ gint depth;
+
+ GST_DEBUG_OBJECT (qtdemux, "resolve fourcc 0x%08x", GUINT32_TO_BE (fourcc));
+
+ depth = entry->bytes_per_packet * 8;
+
+ switch (fourcc) {
+ case GST_MAKE_FOURCC ('N', 'O', 'N', 'E'):
+ case FOURCC_raw_:
+ /* 8-bit audio is unsigned */
+ if (depth == 8)
+ format = GST_AUDIO_FORMAT_U8;
+ /* otherwise it's signed and big-endian just like 'twos' */
+ case FOURCC_twos:
+ endian = G_BIG_ENDIAN;
+ /* fall-through */
+ case FOURCC_sowt:
+ {
+ gchar *str;
+
+ if (!endian)
+ endian = G_LITTLE_ENDIAN;
+
+ if (!format)
+ format = gst_audio_format_build_integer (TRUE, endian, depth, depth);
+
+ str = g_strdup_printf ("Raw %d-bit PCM audio", depth);
+ _codec (str);
+ g_free (str);
+
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, gst_audio_format_to_string (format),
+ "layout", G_TYPE_STRING, "interleaved", NULL);
+ stream->alignment = GST_ROUND_UP_8 (depth);
+ stream->alignment = round_up_pow2 (stream->alignment);
+ break;
+ }
+ case FOURCC_fl64:
+ _codec ("Raw 64-bit floating-point audio");
+ /* we assume BIG ENDIAN, an enda box will tell us to change this to little
+ * endian later */
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, "F64BE",
+ "layout", G_TYPE_STRING, "interleaved", NULL);
+ stream->alignment = 8;
+ break;
+ case FOURCC_fl32:
+ _codec ("Raw 32-bit floating-point audio");
+ /* we assume BIG ENDIAN, an enda box will tell us to change this to little
+ * endian later */
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, "F32BE",
+ "layout", G_TYPE_STRING, "interleaved", NULL);
+ stream->alignment = 4;
+ break;
+ case FOURCC_in24:
+ _codec ("Raw 24-bit PCM audio");
+ /* we assume BIG ENDIAN, an enda box will tell us to change this to little
+ * endian later */
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, "S24BE",
+ "layout", G_TYPE_STRING, "interleaved", NULL);
+ stream->alignment = 4;
+ break;
+ case FOURCC_in32:
+ _codec ("Raw 32-bit PCM audio");
+ /* we assume BIG ENDIAN, an enda box will tell us to change this to little
+ * endian later */
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, "S32BE",
+ "layout", G_TYPE_STRING, "interleaved", NULL);
+ stream->alignment = 4;
+ break;
+ case FOURCC_s16l:
+ _codec ("Raw 16-bit PCM audio");
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, "S16LE",
+ "layout", G_TYPE_STRING, "interleaved", NULL);
+ stream->alignment = 2;
+ break;
+ case FOURCC_ulaw:
+ _codec ("Mu-law audio");
+ caps = gst_caps_new_empty_simple ("audio/x-mulaw");
+ break;
+ case FOURCC_alaw:
+ _codec ("A-law audio");
+ caps = gst_caps_new_empty_simple ("audio/x-alaw");
+ break;
+ case 0x0200736d:
+ case 0x6d730002:
+ _codec ("Microsoft ADPCM");
+ /* Microsoft ADPCM-ACM code 2 */
+ caps = gst_caps_new_simple ("audio/x-adpcm",
+ "layout", G_TYPE_STRING, "microsoft", NULL);
+ break;
+ case 0x1100736d:
+ case 0x6d730011:
+ _codec ("DVI/IMA ADPCM");
+ caps = gst_caps_new_simple ("audio/x-adpcm",
+ "layout", G_TYPE_STRING, "dvi", NULL);
+ break;
+ case 0x1700736d:
+ case 0x6d730017:
+ _codec ("DVI/Intel IMA ADPCM");
+ /* FIXME DVI/Intel IMA ADPCM/ACM code 17 */
+ caps = gst_caps_new_simple ("audio/x-adpcm",
+ "layout", G_TYPE_STRING, "quicktime", NULL);
+ break;
+ case 0x5500736d:
+ case 0x6d730055:
+ /* MPEG layer 3, CBR only (pre QT4.1) */
+ case FOURCC__mp3:
+ case FOURCC_mp3_:
+ _codec ("MPEG-1 layer 3");
+ /* MPEG layer 3, CBR & VBR (QT4.1 and later) */
+ caps = gst_caps_new_simple ("audio/mpeg", "layer", G_TYPE_INT, 3,
+ "mpegversion", G_TYPE_INT, 1, NULL);
+ break;
+ case GST_MAKE_FOURCC ('.', 'm', 'p', '2'):
+ _codec ("MPEG-1 layer 2");
+ /* MPEG layer 2 */
+ caps = gst_caps_new_simple ("audio/mpeg", "layer", G_TYPE_INT, 2,
+ "mpegversion", G_TYPE_INT, 1, NULL);
+ break;
+ case 0x20736d:
+ case GST_MAKE_FOURCC ('e', 'c', '-', '3'):
+ _codec ("EAC-3 audio");
+ caps = gst_caps_new_simple ("audio/x-eac3",
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ entry->sampled = TRUE;
+ break;
+ case GST_MAKE_FOURCC ('s', 'a', 'c', '3'): // Nero Recode
+ case FOURCC_ac_3:
+ _codec ("AC-3 audio");
+ caps = gst_caps_new_simple ("audio/x-ac3",
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ entry->sampled = TRUE;
+ break;
+ case GST_MAKE_FOURCC ('d', 't', 's', 'c'):
+ case GST_MAKE_FOURCC ('D', 'T', 'S', ' '):
+ _codec ("DTS audio");
+ caps = gst_caps_new_simple ("audio/x-dts",
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ entry->sampled = TRUE;
+ break;
+ case GST_MAKE_FOURCC ('d', 't', 's', 'h'): // DTS-HD
+ case GST_MAKE_FOURCC ('d', 't', 's', 'l'): // DTS-HD Lossless
+ _codec ("DTS-HD audio");
+ caps = gst_caps_new_simple ("audio/x-dts",
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ entry->sampled = TRUE;
+ break;
+ case FOURCC_MAC3:
+ _codec ("MACE-3");
+ caps = gst_caps_new_simple ("audio/x-mace",
+ "maceversion", G_TYPE_INT, 3, NULL);
+ break;
+ case FOURCC_MAC6:
+ _codec ("MACE-6");
+ caps = gst_caps_new_simple ("audio/x-mace",
+ "maceversion", G_TYPE_INT, 6, NULL);
+ break;
+ case GST_MAKE_FOURCC ('O', 'g', 'g', 'V'):
+ /* ogg/vorbis */
+ caps = gst_caps_new_empty_simple ("application/ogg");
+ break;
+ case GST_MAKE_FOURCC ('d', 'v', 'c', 'a'):
+ _codec ("DV audio");
+ caps = gst_caps_new_empty_simple ("audio/x-dv");
+ break;
+ case FOURCC_mp4a:
+ _codec ("MPEG-4 AAC audio");
+ caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 4, "framed", G_TYPE_BOOLEAN, TRUE,
+ "stream-format", G_TYPE_STRING, "raw", NULL);
+ break;
+ case GST_MAKE_FOURCC ('Q', 'D', 'M', 'C'):
+ _codec ("QDesign Music");
+ caps = gst_caps_new_empty_simple ("audio/x-qdm");
+ break;
+ case FOURCC_QDM2:
+ _codec ("QDesign Music v.2");
+ /* FIXME: QDesign music version 2 (no constant) */
+ if (FALSE && data) {
+ caps = gst_caps_new_simple ("audio/x-qdm2",
+ "framesize", G_TYPE_INT, QT_UINT32 (data + 52),
+ "bitrate", G_TYPE_INT, QT_UINT32 (data + 40),
+ "blocksize", G_TYPE_INT, QT_UINT32 (data + 44), NULL);
+ } else {
+ caps = gst_caps_new_empty_simple ("audio/x-qdm2");
+ }
+ break;
+ case FOURCC_agsm:
+ _codec ("GSM audio");
+ caps = gst_caps_new_empty_simple ("audio/x-gsm");
+ break;
+ case FOURCC_samr:
+ _codec ("AMR audio");
+ caps = gst_caps_new_empty_simple ("audio/AMR");
+ break;
+ case FOURCC_sawb:
+ _codec ("AMR-WB audio");
+ caps = gst_caps_new_empty_simple ("audio/AMR-WB");
+ break;
+ case FOURCC_ima4:
+ _codec ("Quicktime IMA ADPCM");
+ caps = gst_caps_new_simple ("audio/x-adpcm",
+ "layout", G_TYPE_STRING, "quicktime", NULL);
+ break;
+ case FOURCC_alac:
+ _codec ("Apple lossless audio");
+ caps = gst_caps_new_empty_simple ("audio/x-alac");
+ break;
+ case FOURCC_fLaC:
+ _codec ("Free Lossless Audio Codec");
+ caps = gst_caps_new_simple ("audio/x-flac",
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ break;
+ case GST_MAKE_FOURCC ('Q', 'c', 'l', 'p'):
+ _codec ("QualComm PureVoice");
+ caps = gst_caps_from_string ("audio/qcelp");
+ break;
+ case FOURCC_wma_:
+ case FOURCC_owma:
+ _codec ("WMA");
+ caps = gst_caps_new_empty_simple ("audio/x-wma");
+ break;
+ case FOURCC_opus:
+ _codec ("Opus");
+ caps = gst_caps_new_empty_simple ("audio/x-opus");
+ break;
+ case FOURCC_lpcm:
+ {
+ guint32 flags = 0;
+ guint32 depth = 0;
+ guint32 width = 0;
+ GstAudioFormat format;
+ enum
+ {
+ FLAG_IS_FLOAT = 0x1,
+ FLAG_IS_BIG_ENDIAN = 0x2,
+ FLAG_IS_SIGNED = 0x4,
+ FLAG_IS_PACKED = 0x8,
+ FLAG_IS_ALIGNED_HIGH = 0x10,
+ FLAG_IS_NON_INTERLEAVED = 0x20
+ };
+ _codec ("Raw LPCM audio");
+
+ if (data && len >= 36) {
+ depth = QT_UINT32 (data + 24);
+ flags = QT_UINT32 (data + 28);
+ width = QT_UINT32 (data + 32) * 8 / entry->n_channels;
+ }
+ if ((flags & FLAG_IS_FLOAT) == 0) {
+ if (depth == 0)
+ depth = 16;
+ if (width == 0)
+ width = 16;
+ if ((flags & FLAG_IS_ALIGNED_HIGH))
+ depth = width;
+
+ format = gst_audio_format_build_integer ((flags & FLAG_IS_SIGNED) ?
+ TRUE : FALSE, (flags & FLAG_IS_BIG_ENDIAN) ?
+ G_BIG_ENDIAN : G_LITTLE_ENDIAN, width, depth);
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING,
+ format !=
+ GST_AUDIO_FORMAT_UNKNOWN ? gst_audio_format_to_string (format) :
+ "UNKNOWN", "layout", G_TYPE_STRING,
+ (flags & FLAG_IS_NON_INTERLEAVED) ? "non-interleaved" :
+ "interleaved", NULL);
+ stream->alignment = GST_ROUND_UP_8 (depth);
+ stream->alignment = round_up_pow2 (stream->alignment);
+ } else {
+ if (width == 0)
+ width = 32;
+ if (width == 64) {
+ if (flags & FLAG_IS_BIG_ENDIAN)
+ format = GST_AUDIO_FORMAT_F64BE;
+ else
+ format = GST_AUDIO_FORMAT_F64LE;
+ } else {
+ if (flags & FLAG_IS_BIG_ENDIAN)
+ format = GST_AUDIO_FORMAT_F32BE;
+ else
+ format = GST_AUDIO_FORMAT_F32LE;
+ }
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, gst_audio_format_to_string (format),
+ "layout", G_TYPE_STRING, (flags & FLAG_IS_NON_INTERLEAVED) ?
+ "non-interleaved" : "interleaved", NULL);
+ stream->alignment = width / 8;
+ }
+ break;
+ }
+ case GST_MAKE_FOURCC ('a', 'c', '-', '4'):
+ {
+ _codec ("AC4");
+ caps = gst_caps_new_empty_simple ("audio/x-ac4");
+ break;
+ }
+ case GST_MAKE_FOURCC ('q', 't', 'v', 'r'):
+ /* ? */
+ default:
+ {
+ caps = _get_unknown_codec_name ("audio", fourcc);
+ break;
+ }
+ }
+
+ if (caps) {
+ GstCaps *templ_caps =
+ gst_static_pad_template_get_caps (&gst_qtdemux_audiosrc_template);
+ GstCaps *intersection = gst_caps_intersect (caps, templ_caps);
+ gst_caps_unref (caps);
+ gst_caps_unref (templ_caps);
+ caps = intersection;
+ }
+
+ /* enable clipping for raw audio streams */
+ s = gst_caps_get_structure (caps, 0);
+ name = gst_structure_get_name (s);
+ if (g_str_has_prefix (name, "audio/x-raw")) {
+ stream->need_clip = TRUE;
+ stream->min_buffer_size = 1024 * entry->bytes_per_frame;
+ stream->max_buffer_size = 4096 * entry->bytes_per_frame;
+ GST_DEBUG ("setting min/max buffer sizes to %d/%d", stream->min_buffer_size,
+ stream->max_buffer_size);
+ }
+ return caps;
+}
+
+static GstCaps *
+qtdemux_sub_caps (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ QtDemuxStreamStsdEntry * entry, guint32 fourcc,
+ const guint8 * stsd_entry_data, gchar ** codec_name)
+{
+ GstCaps *caps;
+
+ GST_DEBUG_OBJECT (qtdemux, "resolve fourcc 0x%08x", GUINT32_TO_BE (fourcc));
+
+ switch (fourcc) {
+ case FOURCC_mp4s:
+ _codec ("DVD subtitle");
+ caps = gst_caps_new_empty_simple ("subpicture/x-dvd");
+ stream->need_process = TRUE;
+ break;
+ case FOURCC_text:
+ _codec ("Quicktime timed text");
+ goto text;
+ case FOURCC_tx3g:
+ _codec ("3GPP timed text");
+ text:
+ caps = gst_caps_new_simple ("text/x-raw", "format", G_TYPE_STRING,
+ "utf8", NULL);
+ /* actual text piece needs to be extracted */
+ stream->need_process = TRUE;
+ break;
+ case FOURCC_stpp:
+ _codec ("XML subtitles");
+ caps = gst_caps_new_empty_simple ("application/ttml+xml");
+ break;
+ case FOURCC_c608:
+ _codec ("CEA 608 Closed Caption");
+ caps =
+ gst_caps_new_simple ("closedcaption/x-cea-608", "format",
+ G_TYPE_STRING, "s334-1a", NULL);
+ stream->need_process = TRUE;
+ stream->need_split = TRUE;
+ break;
+ case FOURCC_c708:
+ _codec ("CEA 708 Closed Caption");
+ caps =
+ gst_caps_new_simple ("closedcaption/x-cea-708", "format",
+ G_TYPE_STRING, "cdp", NULL);
+ stream->need_process = TRUE;
+ break;
+
+ default:
+ {
+ caps = _get_unknown_codec_name ("text", fourcc);
+ break;
+ }
+ }
+ return caps;
+}
+
+static GstCaps *
+qtdemux_generic_caps (GstQTDemux * qtdemux, QtDemuxStream * stream,
+ QtDemuxStreamStsdEntry * entry, guint32 fourcc,
+ const guint8 * stsd_entry_data, gchar ** codec_name)
+{
+ GstCaps *caps;
+
+ switch (fourcc) {
+ case FOURCC_m1v:
+ _codec ("MPEG 1 video");
+ caps = gst_caps_new_simple ("video/mpeg", "mpegversion", G_TYPE_INT, 1,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ break;
+ default:
+ caps = NULL;
+ break;
+ }
+ return caps;
+}
+
+static void
+gst_qtdemux_append_protection_system_id (GstQTDemux * qtdemux,
+ const gchar * system_id)
+{
+ gint i;
+
+ if (!qtdemux->protection_system_ids)
+ qtdemux->protection_system_ids =
+ g_ptr_array_new_with_free_func ((GDestroyNotify) g_free);
+ /* Check whether we already have an entry for this system ID. */
+ for (i = 0; i < qtdemux->protection_system_ids->len; ++i) {
+ const gchar *id = g_ptr_array_index (qtdemux->protection_system_ids, i);
+ if (g_ascii_strcasecmp (system_id, id) == 0) {
+ return;
+ }
+ }
+ GST_DEBUG_OBJECT (qtdemux, "Adding cenc protection system ID %s", system_id);
+ g_ptr_array_add (qtdemux->protection_system_ids, g_ascii_strdown (system_id,
+ -1));
+}
diff --git a/gst/isomp4/qtdemux.h b/gst/isomp4/qtdemux.h
new file mode 100644
index 0000000000..81fb9d242c
--- /dev/null
+++ b/gst/isomp4/qtdemux.h
@@ -0,0 +1,507 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_QTDEMUX_H__
+#define __GST_QTDEMUX_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/base/gstflowcombiner.h>
+#include <gst/base/gstbytereader.h>
+#include <gst/video/video.h>
+#include "gstisoff.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_QTDEMUX \
+ (gst_qtdemux_get_type())
+#define GST_QTDEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_QTDEMUX,GstQTDemux))
+#define GST_QTDEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_QTDEMUX,GstQTDemuxClass))
+#define GST_IS_QTDEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_QTDEMUX))
+#define GST_IS_QTDEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_QTDEMUX))
+
+#define GST_QTDEMUX_CAST(obj) ((GstQTDemux *)(obj))
+
+/* qtdemux produces these for atoms it cannot parse */
+#define GST_QT_DEMUX_PRIVATE_TAG "private-qt-tag"
+#define GST_QT_DEMUX_CLASSIFICATION_TAG "classification"
+
+typedef struct _GstQTDemux GstQTDemux;
+typedef struct _GstQTDemuxClass GstQTDemuxClass;
+typedef struct _QtDemuxStream QtDemuxStream;
+typedef struct _QtDemuxSample QtDemuxSample;
+typedef struct _QtDemuxSegment QtDemuxSegment;
+typedef struct _QtDemuxRandomAccessEntry QtDemuxRandomAccessEntry;
+typedef struct _QtDemuxStreamStsdEntry QtDemuxStreamStsdEntry;
+
+enum QtDemuxState
+{
+ QTDEMUX_STATE_INITIAL, /* Initial state (haven't got the header yet) */
+ QTDEMUX_STATE_HEADER, /* Parsing the header */
+ QTDEMUX_STATE_MOVIE, /* Parsing/Playing the media data */
+ QTDEMUX_STATE_BUFFER_MDAT /* Buffering the mdat atom */
+};
+
+struct _GstQTDemux {
+ GstElement element;
+
+ /* Global state */
+ enum QtDemuxState state;
+
+ /* static sink pad */
+ GstPad *sinkpad;
+
+ /* TRUE if pull-based */
+ gboolean pullbased;
+
+ gchar *redirect_location;
+
+ /* Protect pad exposing from flush event */
+ GMutex expose_lock;
+
+ /* list of QtDemuxStream */
+ GPtrArray *active_streams;
+ GPtrArray *old_streams;
+
+ gint n_video_streams;
+ gint n_audio_streams;
+ gint n_sub_streams;
+
+ GstFlowCombiner *flowcombiner;
+
+ /* Incoming stream group-id to set on downstream STREAM_START events.
+ * If upstream doesn't contain one, a global one will be generated */
+ gboolean have_group_id;
+ guint group_id;
+
+ guint major_brand;
+ GstBuffer *comp_brands;
+
+ /* [moov] header.
+ * FIXME : This is discarded just after it's created. Just move it
+ * to a temporary variable ? */
+ GNode *moov_node;
+
+ /* FIXME : This is never freed. It is only assigned once. memleak ? */
+ GNode *moov_node_compressed;
+
+ /* Set to TRUE when the [moov] header has been fully parsed */
+ gboolean got_moov;
+
+ /* Global timescale for the incoming stream. Use the QTTIME macros
+ * to convert values to/from GstClockTime */
+ guint32 timescale;
+
+ /* Global duration (in global timescale). Use QTTIME macros to get GstClockTime */
+ guint64 duration;
+
+ /* Total size of header atoms. Used to calculate fallback overall bitrate */
+ guint header_size;
+
+ GstTagList *tag_list;
+
+ /* configured playback region */
+ GstSegment segment;
+
+ /* State for key_units trickmode */
+ GstClockTime trickmode_interval;
+
+ /* PUSH-BASED only: If the initial segment event, or a segment consequence of
+ * a seek or incoming TIME segment from upstream needs to be pushed. This
+ * variable is used instead of pushing the event directly because at that
+ * point we may not have yet emitted the srcpads. */
+ gboolean need_segment;
+
+ guint32 segment_seqnum;
+
+ /* flag to indicate that we're working with a smoothstreaming fragment
+ * Mss doesn't have 'moov' or any information about the streams format,
+ * requiring qtdemux to expose and create the streams */
+ gboolean mss_mode;
+
+ /* Set to TRUE if the incoming stream is either a MSS stream or
+ * a Fragmented MP4 (containing the [mvex] atom in the header) */
+ gboolean fragmented;
+
+ /* PULL-BASED only : If TRUE there is a pending seek */
+ gboolean fragmented_seek_pending;
+
+ /* PULL-BASED : offset of first [moof] or of fragment to seek to
+ * PUSH-BASED : offset of latest [moof] */
+ guint64 moof_offset;
+
+ /* MSS streams have a single media that is unspecified at the atoms, so
+ * upstream provides it at the caps */
+ GstCaps *media_caps;
+
+ /* Set to TRUE when all streams have been exposed */
+ gboolean exposed;
+
+ gint64 chapters_track_id;
+
+ /* protection support */
+ GPtrArray *protection_system_ids; /* Holds identifiers of all content protection systems for all tracks */
+ GQueue protection_event_queue; /* holds copy of upstream protection events */
+ guint64 cenc_aux_info_offset;
+ guint8 *cenc_aux_info_sizes;
+ guint32 cenc_aux_sample_count;
+ gchar *preferred_protection_system_id;
+
+ /* Whether the parent bin is streams-aware, meaning we can
+ * add/remove streams at any point in time */
+ gboolean streams_aware;
+
+ /*
+ * ALL VARIABLES BELOW ARE ONLY USED IN PUSH-BASED MODE
+ */
+ GstAdapter *adapter;
+ guint neededbytes;
+ guint todrop;
+ /* Used to store data if [mdat] is before the headers */
+ GstBuffer *mdatbuffer;
+ /* Amount of bytes left to read in the current [mdat] */
+ guint64 mdatleft, mdatsize;
+
+ /* When restoring the mdat to the adapter, this buffer stores any
+ * trailing data that was after the last atom parsed as it has to be
+ * restored later along with the correct offset. Used in fragmented
+ * scenario where mdat/moof are one after the other in any order.
+ *
+ * Check https://bugzilla.gnome.org/show_bug.cgi?id=710623 */
+ GstBuffer *restoredata_buffer;
+ guint64 restoredata_offset;
+
+ /* The current offset in bytes from upstream.
+ * Note: While it makes complete sense when we are PULL-BASED (pulling
+ * in BYTES from upstream) and PUSH-BASED with a BYTE SEGMENT (receiving
+ * buffers with actual offsets), it is undefined in PUSH-BASED with a
+ * TIME SEGMENT */
+ guint64 offset;
+
+ /* offset of the mdat atom */
+ guint64 mdatoffset;
+ /* Offset of the first mdat */
+ guint64 first_mdat;
+ /* offset of last [moov] seen */
+ guint64 last_moov_offset;
+
+ /* If TRUE, qtdemux received upstream newsegment in TIME format
+ * which likely means that upstream is driving the pipeline (such as
+ * adaptive demuxers or dlna sources) */
+ gboolean upstream_format_is_time;
+
+ /* Seqnum of the seek event sent upstream. Will be used to
+ * detect incoming FLUSH events corresponding to that */
+ guint32 offset_seek_seqnum;
+
+ /* UPSTREAM BYTE: Requested upstream byte seek offset.
+ * Currently it is only used to check if an incoming BYTE SEGMENT
+ * corresponds to a seek event that was sent upstream */
+ gint64 seek_offset;
+
+ /* UPSTREAM BYTE: Requested start/stop TIME values from
+ * downstream.
+ * Used to set on the downstream segment once the corresponding upstream
+ * BYTE SEEK has succeeded */
+ gint64 push_seek_start;
+ gint64 push_seek_stop;
+
+#if 0
+ /* gst index support */
+ GstIndex *element_index;
+ gint index_id;
+#endif
+
+ /* Whether upstream is seekable in BYTES */
+ gboolean upstream_seekable;
+ /* UPSTREAM BYTE: Size of upstream content.
+ * Note : This is only computed once ! If upstream grows in the meantime
+ * it will not be updated */
+ gint64 upstream_size;
+
+ /* UPSTREAM TIME : Contains the PTS (if any) of the
+ * buffer that contains a [moof] header. Will be used to establish
+ * the actual PTS of the samples contained within that fragment. */
+ guint64 fragment_start;
+ /* UPSTREAM TIME : The offset in bytes of the [moof]
+ * header start.
+ * Note : This is not computed from the GST_BUFFER_OFFSET field */
+ guint64 fragment_start_offset;
+
+ /* These two fields are used to perform an implicit seek when a fragmented
+ * file whose first tfdt is not zero. This way if the first fragment starts
+ * at 1 hour, the user does not have to wait 1 hour or perform a manual seek
+ * for the image to move and the sound to play.
+ *
+ * This implicit seek is only done if the first parsed fragment has a non-zero
+ * decode base time and a seek has not been received previously, hence these
+ * fields. */
+ gboolean received_seek;
+ gboolean first_moof_already_parsed;
+};
+
+struct _GstQTDemuxClass {
+ GstElementClass parent_class;
+};
+
+GType gst_qtdemux_get_type (void);
+
+struct _QtDemuxStreamStsdEntry
+{
+ GstCaps *caps;
+ guint32 fourcc;
+ gboolean sparse;
+
+ /* video info */
+ gint width;
+ gint height;
+ gint par_w;
+ gint par_h;
+ /* Numerator/denominator framerate */
+ gint fps_n;
+ gint fps_d;
+ GstVideoColorimetry colorimetry;
+ guint16 bits_per_sample;
+ guint16 color_table_id;
+ GstMemory *rgb8_palette;
+ guint interlace_mode;
+ guint field_order;
+
+ /* audio info */
+ gdouble rate;
+ gint n_channels;
+ guint samples_per_packet;
+ guint samples_per_frame;
+ guint bytes_per_packet;
+ guint bytes_per_sample;
+ guint bytes_per_frame;
+ guint compression;
+
+ /* if we use chunks or samples */
+ gboolean sampled;
+ guint padding;
+
+};
+
+struct _QtDemuxSample
+{
+ guint32 size;
+ gint32 pts_offset; /* Add this value to timestamp to get the pts */
+ guint64 offset;
+ guint64 timestamp; /* DTS In mov time */
+ guint32 duration; /* In mov time */
+ gboolean keyframe; /* TRUE when this packet is a keyframe */
+};
+
+struct _QtDemuxStream
+{
+ GstPad *pad;
+
+ GstQTDemux *demux;
+ gchar *stream_id;
+
+ QtDemuxStreamStsdEntry *stsd_entries;
+ guint stsd_entries_length;
+ guint cur_stsd_entry_index;
+
+ /* stream type */
+ guint32 subtype;
+
+ gboolean new_caps; /* If TRUE, caps need to be generated (by
+ * calling _configure_stream()) This happens
+ * for MSS and fragmented streams */
+
+ gboolean new_stream; /* signals that a stream_start is required */
+ gboolean on_keyframe; /* if this stream last pushed buffer was a
+ * keyframe. This is important to identify
+ * where to stop pushing buffers after a
+ * segment stop time */
+
+ /* if the stream has a redirect URI in its headers, we store it here */
+ gchar *redirect_uri;
+
+ /* track id */
+ guint track_id;
+
+ /* duration/scale */
+ guint64 duration; /* in timescale units */
+ guint32 timescale;
+
+ /* language */
+ gchar lang_id[4]; /* ISO 639-2T language code */
+
+ /* our samples */
+ guint32 n_samples;
+ QtDemuxSample *samples;
+ gboolean all_keyframe; /* TRUE when all samples are keyframes (no stss) */
+ guint32 n_samples_moof; /* sample count in a moof */
+ guint64 duration_moof; /* duration in timescale of a moof, used for figure out
+ * the framerate of fragmented format stream */
+ guint64 duration_last_moof;
+
+ guint32 offset_in_sample; /* Offset in the current sample, used for
+ * streams which have got exceedingly big
+ * sample size (such as 24s of raw audio).
+ * Only used when max_buffer_size is non-NULL */
+ guint32 min_buffer_size; /* Minimum allowed size for output buffers.
+ * Currently only set for raw audio streams*/
+ guint32 max_buffer_size; /* Maximum allowed size for output buffers.
+ * Currently only set for raw audio streams*/
+
+ /* video info */
+ /* aspect ratio */
+ gint display_width;
+ gint display_height;
+
+ /* allocation */
+ gboolean use_allocator;
+ GstAllocator *allocator;
+ GstAllocationParams params;
+
+ gsize alignment;
+
+ /* when a discontinuity is pending */
+ gboolean discont;
+
+ /* list of buffers to push first */
+ GSList *buffers;
+
+ /* if we need to clip this buffer. This is only needed for uncompressed
+ * data */
+ gboolean need_clip;
+
+ /* buffer needs some custom processing, e.g. subtitles */
+ gboolean need_process;
+ /* buffer needs potentially be split, e.g. CEA608 subtitles */
+ gboolean need_split;
+
+ /* current position */
+ guint32 segment_index;
+ guint32 sample_index;
+ GstClockTime time_position; /* in gst time */
+ guint64 accumulated_base;
+
+ /* the Gst segment we are processing out, used for clipping */
+ GstSegment segment;
+
+ /* quicktime segments */
+ guint32 n_segments;
+ QtDemuxSegment *segments;
+ gboolean dummy_segment;
+ guint32 from_sample;
+ guint32 to_sample;
+
+ gboolean sent_eos;
+ GstTagList *stream_tags;
+ gboolean send_global_tags;
+
+ GstEvent *pending_event;
+
+ GstByteReader stco;
+ GstByteReader stsz;
+ GstByteReader stsc;
+ GstByteReader stts;
+ GstByteReader stss;
+ GstByteReader stps;
+ GstByteReader ctts;
+
+ gboolean chunks_are_samples; /* TRUE means treat chunks as samples */
+ gint64 stbl_index;
+ /* stco */
+ guint co_size;
+ GstByteReader co_chunk;
+ guint32 first_chunk;
+ guint32 current_chunk;
+ guint32 last_chunk;
+ guint32 samples_per_chunk;
+ guint32 stsd_sample_description_id;
+ guint32 stco_sample_index;
+ /* stsz */
+ guint32 sample_size; /* 0 means variable sizes are stored in stsz */
+ /* stsc */
+ guint32 stsc_index;
+ guint32 n_samples_per_chunk;
+ guint32 stsc_chunk_index;
+ guint32 stsc_sample_index;
+ guint64 chunk_offset;
+ /* stts */
+ guint32 stts_index;
+ guint32 stts_samples;
+ guint32 n_sample_times;
+ guint32 stts_sample_index;
+ guint64 stts_time;
+ guint32 stts_duration;
+ /* stss */
+ gboolean stss_present;
+ guint32 n_sample_syncs;
+ guint32 stss_index;
+ /* stps */
+ gboolean stps_present;
+ guint32 n_sample_partial_syncs;
+ guint32 stps_index;
+ QtDemuxRandomAccessEntry *ra_entries;
+ guint n_ra_entries;
+
+ const QtDemuxRandomAccessEntry *pending_seek;
+
+ /* ctts */
+ gboolean ctts_present;
+ guint32 n_composition_times;
+ guint32 ctts_index;
+ guint32 ctts_sample_index;
+ guint32 ctts_count;
+ gint32 ctts_soffset;
+
+ /* cslg */
+ guint32 cslg_shift;
+
+ /* fragmented */
+ gboolean parsed_trex;
+ guint32 def_sample_description_index; /* index is 1-based */
+ guint32 def_sample_duration;
+ guint32 def_sample_size;
+ guint32 def_sample_flags;
+
+ gboolean disabled;
+
+ /* stereoscopic video streams */
+ GstVideoMultiviewMode multiview_mode;
+ GstVideoMultiviewFlags multiview_flags;
+
+ /* protected streams */
+ gboolean protected;
+ guint32 protection_scheme_type;
+ guint32 protection_scheme_version;
+ gpointer protection_scheme_info; /* specific to the protection scheme */
+ GQueue protection_scheme_event_queue;
+
+ /* KEY_UNITS trickmode with an interval */
+ GstClockTime last_keyframe_dts;
+
+ gint ref_count; /* atomic */
+};
+
+G_END_DECLS
+
+#endif /* __GST_QTDEMUX_H__ */
diff --git a/gst/isomp4/qtdemux_debug.h b/gst/isomp4/qtdemux_debug.h
new file mode 100644
index 0000000000..26da2686c7
--- /dev/null
+++ b/gst/isomp4/qtdemux_debug.h
@@ -0,0 +1,12 @@
+#ifndef __GST_QTDEMUX_DEBUG_H__
+#define __GST_QTDEMUX_DEBUG_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+GST_DEBUG_CATEGORY_EXTERN (qtdemux_debug);
+
+G_END_DECLS
+
+#endif /* __GST_QTDEMUX_DEBUG_H__ */
diff --git a/gst/isomp4/qtdemux_dump.c b/gst/isomp4/qtdemux_dump.c
new file mode 100644
index 0000000000..25921dfe2e
--- /dev/null
+++ b/gst/isomp4/qtdemux_dump.c
@@ -0,0 +1,1097 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) 2009 Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) <2009> STEricsson <benjamin.gaignard@stericsson.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "qtdemux_debug.h"
+#include "qtdemux_types.h"
+#include "qtdemux_dump.h"
+#include "fourcc.h"
+
+#include "qtatomparser.h"
+
+#include <string.h>
+
+#define GST_CAT_DEFAULT qtdemux_debug
+
+#define GET_UINT8(data) gst_byte_reader_get_uint8_unchecked(data)
+#define GET_UINT16(data) gst_byte_reader_get_uint16_be_unchecked(data)
+#define GET_UINT32(data) gst_byte_reader_get_uint32_be_unchecked(data)
+#define GET_UINT64(data) gst_byte_reader_get_uint64_be_unchecked(data)
+#define GET_FP32(data) (gst_byte_reader_get_uint32_be_unchecked(data)/65536.0)
+#define GET_FP16(data) (gst_byte_reader_get_uint16_be_unchecked(data)/256.0)
+#define GET_FOURCC(data) qt_atom_parser_get_fourcc_unchecked(data)
+
+gboolean
+qtdemux_dump_mvhd (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 version = 0;
+
+ if (!qt_atom_parser_has_remaining (data, 100))
+ return FALSE;
+
+ version = GET_UINT32 (data);
+ GST_LOG ("%*s version/flags: %08x", depth, "", version);
+
+ version = version >> 24;
+ if (version == 0) {
+ GST_LOG ("%*s creation time: %u", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s modify time: %u", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s time scale: 1/%u sec", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s duration: %u", depth, "", GET_UINT32 (data));
+ } else if (version == 1) {
+ GST_LOG ("%*s creation time: %" G_GUINT64_FORMAT,
+ depth, "", GET_UINT64 (data));
+ GST_LOG ("%*s modify time: %" G_GUINT64_FORMAT,
+ depth, "", GET_UINT64 (data));
+ GST_LOG ("%*s time scale: 1/%u sec", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s duration: %" G_GUINT64_FORMAT,
+ depth, "", GET_UINT64 (data));
+ } else
+ return FALSE;
+
+ GST_LOG ("%*s pref. rate: %g", depth, "", GET_FP32 (data));
+ GST_LOG ("%*s pref. volume: %g", depth, "", GET_FP16 (data));
+ gst_byte_reader_skip_unchecked (data, 46);
+ GST_LOG ("%*s preview time: %u", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s preview dur.: %u", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s poster time: %u", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s select time: %u", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s select dur.: %u", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s current time: %u", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s next track ID: %d", depth, "", GET_UINT32 (data));
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_tkhd (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint64 duration, ctime, mtime;
+ guint32 version = 0, track_id = 0, iwidth = 0, iheight = 0;
+ guint16 layer = 0, alt_group = 0, ivol = 0;
+ guint value_size;
+
+ if (!gst_byte_reader_get_uint32_be (data, &version))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", version);
+
+ value_size = ((version >> 24) == 1) ? sizeof (guint64) : sizeof (guint32);
+
+ if (qt_atom_parser_get_offset (data, value_size, &ctime) &&
+ qt_atom_parser_get_offset (data, value_size, &mtime) &&
+ gst_byte_reader_get_uint32_be (data, &track_id) &&
+ gst_byte_reader_skip (data, 4) &&
+ qt_atom_parser_get_offset (data, value_size, &duration) &&
+ gst_byte_reader_skip (data, 4) &&
+ gst_byte_reader_get_uint16_be (data, &layer) &&
+ gst_byte_reader_get_uint16_be (data, &alt_group) &&
+ gst_byte_reader_skip (data, 4) &&
+ gst_byte_reader_get_uint16_be (data, &ivol) &&
+ gst_byte_reader_skip (data, 2 + (9 * 4)) &&
+ gst_byte_reader_get_uint32_be (data, &iwidth) &&
+ gst_byte_reader_get_uint32_be (data, &iheight)) {
+ GST_LOG ("%*s creation time: %" G_GUINT64_FORMAT, depth, "", ctime);
+ GST_LOG ("%*s modify time: %" G_GUINT64_FORMAT, depth, "", mtime);
+ GST_LOG ("%*s track ID: %u", depth, "", track_id);
+ GST_LOG ("%*s duration: %" G_GUINT64_FORMAT, depth, "", duration);
+ GST_LOG ("%*s layer: %u", depth, "", layer);
+ GST_LOG ("%*s alt group: %u", depth, "", alt_group);
+ GST_LOG ("%*s volume: %g", depth, "", ivol / 256.0);
+ GST_LOG ("%*s track width: %g", depth, "", iwidth / 65536.0);
+ GST_LOG ("%*s track height: %g", depth, "", iheight / 65536.0);
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+gboolean
+qtdemux_dump_elst (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 ver_flags = 0, num_entries = 0, i;
+
+ if (!gst_byte_reader_get_uint32_be (data, &ver_flags) ||
+ !gst_byte_reader_get_uint32_be (data, &num_entries))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", ver_flags);
+ GST_LOG ("%*s n entries: %d", depth, "", num_entries);
+
+ if (!qt_atom_parser_has_chunks (data, num_entries, 4 + 4 + 4))
+ return FALSE;
+
+ for (i = 0; i < num_entries; i++) {
+ GST_LOG ("%*s track dur: %u", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s media time: %u", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s media rate: %g", depth, "", GET_FP32 (data));
+ }
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_mdhd (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 version = 0;
+ guint64 duration, ctime, mtime;
+ guint32 time_scale = 0;
+ guint16 language = 0, quality = 0;
+ guint value_size;
+
+ if (!gst_byte_reader_get_uint32_be (data, &version))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", version);
+
+ value_size = ((version >> 24) == 1) ? sizeof (guint64) : sizeof (guint32);
+
+ if (qt_atom_parser_get_offset (data, value_size, &ctime) &&
+ qt_atom_parser_get_offset (data, value_size, &mtime) &&
+ gst_byte_reader_get_uint32_be (data, &time_scale) &&
+ qt_atom_parser_get_offset (data, value_size, &duration) &&
+ gst_byte_reader_get_uint16_be (data, &language) &&
+ gst_byte_reader_get_uint16_be (data, &quality)) {
+ GST_LOG ("%*s creation time: %" G_GUINT64_FORMAT, depth, "", ctime);
+ GST_LOG ("%*s modify time: %" G_GUINT64_FORMAT, depth, "", mtime);
+ GST_LOG ("%*s time scale: 1/%u sec", depth, "", time_scale);
+ GST_LOG ("%*s duration: %" G_GUINT64_FORMAT, depth, "", duration);
+ GST_LOG ("%*s language: %u", depth, "", language);
+ GST_LOG ("%*s quality: %u", depth, "", quality);
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+gboolean
+qtdemux_dump_hdlr (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 version, type, subtype, manufacturer;
+ const gchar *name;
+
+ if (!qt_atom_parser_has_remaining (data, 4 + 4 + 4 + 4 + 4 + 4))
+ return FALSE;
+
+ version = GET_UINT32 (data);
+ type = GET_FOURCC (data);
+ subtype = GET_FOURCC (data);
+ manufacturer = GET_FOURCC (data);
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", version);
+ GST_LOG ("%*s type: %" GST_FOURCC_FORMAT, depth, "",
+ GST_FOURCC_ARGS (type));
+ GST_LOG ("%*s subtype: %" GST_FOURCC_FORMAT, depth, "",
+ GST_FOURCC_ARGS (subtype));
+ GST_LOG ("%*s manufacturer: %" GST_FOURCC_FORMAT, depth, "",
+ GST_FOURCC_ARGS (manufacturer));
+ GST_LOG ("%*s flags: %08x", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s flags mask: %08x", depth, "", GET_UINT32 (data));
+
+ /* quicktime uses pascal string, mp4 zero-terminated string */
+ if (gst_byte_reader_peek_string (data, &name)) {
+ GST_LOG ("%*s name: %s", depth, "", name);
+ } else {
+ gchar buf[256];
+ guint8 len;
+
+ if (gst_byte_reader_get_uint8 (data, &len)
+ && qt_atom_parser_has_remaining (data, len)) {
+ memcpy (buf, gst_byte_reader_peek_data_unchecked (data), len);
+ buf[len] = '\0';
+ GST_LOG ("%*s name: %s", depth, "", buf);
+ }
+ }
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_vmhd (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ if (!qt_atom_parser_has_remaining (data, 4 + 4))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s mode/color: %08x", depth, "", GET_UINT32 (data));
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_dref (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 ver_flags = 0, num_entries = 0, i;
+
+ if (!gst_byte_reader_get_uint32_be (data, &ver_flags) ||
+ !gst_byte_reader_get_uint32_be (data, &num_entries))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", ver_flags);
+ GST_LOG ("%*s n entries: %u", depth, "", num_entries);
+ for (i = 0; i < num_entries; i++) {
+ guint32 size = 0, fourcc;
+
+ if (!gst_byte_reader_get_uint32_be (data, &size) ||
+ !qt_atom_parser_get_fourcc (data, &fourcc) || size < 8 ||
+ !gst_byte_reader_skip (data, size - 8))
+ return FALSE;
+
+ GST_LOG ("%*s size: %u", depth, "", size);
+ GST_LOG ("%*s type: %" GST_FOURCC_FORMAT, depth, "",
+ GST_FOURCC_ARGS (fourcc));
+ }
+ return TRUE;
+}
+
+static gboolean
+qtdemux_dump_stsd_avc1 (GstQTDemux * qtdemux, GstByteReader * data, guint size,
+ int depth)
+{
+ guint32 fourcc;
+
+ /* Size of avc1 = 78 bytes */
+ if (size < (6 + 2 + 4 + 4 + 4 + 4 + 2 + 2 + 4 + 4 + 4 + 2 + 1 + 31 + 2 + 2))
+ return FALSE;
+
+ gst_byte_reader_skip_unchecked (data, 6);
+ GST_LOG_OBJECT (qtdemux, "%*s data reference:%d", depth, "",
+ GET_UINT16 (data));
+ GST_LOG_OBJECT (qtdemux, "%*s version/rev.: %08x", depth, "",
+ GET_UINT32 (data));
+ fourcc = GET_FOURCC (data);
+ GST_LOG_OBJECT (qtdemux, "%*s vendor: %" GST_FOURCC_FORMAT, depth,
+ "", GST_FOURCC_ARGS (fourcc));
+ GST_LOG_OBJECT (qtdemux, "%*s temporal qual: %u", depth, "",
+ GET_UINT32 (data));
+ GST_LOG_OBJECT (qtdemux, "%*s spatial qual: %u", depth, "",
+ GET_UINT32 (data));
+ GST_LOG_OBJECT (qtdemux, "%*s width: %u", depth, "",
+ GET_UINT16 (data));
+ GST_LOG_OBJECT (qtdemux, "%*s height: %u", depth, "",
+ GET_UINT16 (data));
+ GST_LOG_OBJECT (qtdemux, "%*s horiz. resol: %g", depth, "",
+ GET_FP32 (data));
+ GST_LOG_OBJECT (qtdemux, "%*s vert. resol.: %g", depth, "",
+ GET_FP32 (data));
+ GST_LOG_OBJECT (qtdemux, "%*s data size: %u", depth, "",
+ GET_UINT32 (data));
+ GST_LOG_OBJECT (qtdemux, "%*s frame count: %u", depth, "",
+ GET_UINT16 (data));
+ /* something is not right with this, it's supposed to be a string but it's
+ * not apparently, so just skip this for now */
+ gst_byte_reader_skip_unchecked (data, 1 + 31);
+ GST_LOG_OBJECT (qtdemux, "%*s compressor: (skipped)", depth, "");
+ GST_LOG_OBJECT (qtdemux, "%*s depth: %u", depth, "",
+ GET_UINT16 (data));
+ GST_LOG_OBJECT (qtdemux, "%*s color table ID:%u", depth, "",
+ GET_UINT16 (data));
+
+ return TRUE;
+}
+
+
+static gboolean
+qtdemux_dump_stsd_av01 (GstQTDemux * qtdemux, GstByteReader * data, guint size,
+ int depth)
+{
+ guint compressor_len;
+ char compressor_name[32];
+
+ /* Size of av01 = 78 bytes */
+ if (size < (6 + 2 + 4 + 12 + 2 + 2 + 4 + 4 + 4 + 2 + 1 + 31 + 2 + 2))
+ return FALSE;
+
+ gst_byte_reader_skip_unchecked (data, 6);
+ GST_LOG_OBJECT (qtdemux, "%*s data reference:%d", depth, "",
+ GET_UINT16 (data));
+ GST_LOG_OBJECT (qtdemux, "%*s version/rev.: %08x", depth, "",
+ GET_UINT32 (data));
+ gst_byte_reader_skip_unchecked (data, 12); /* pre-defined & reserved */
+ GST_LOG_OBJECT (qtdemux, "%*s width: %u", depth, "",
+ GET_UINT16 (data));
+ GST_LOG_OBJECT (qtdemux, "%*s height: %u", depth, "",
+ GET_UINT16 (data));
+ GST_LOG_OBJECT (qtdemux, "%*s horiz. resol: %g", depth, "",
+ GET_FP32 (data));
+ GST_LOG_OBJECT (qtdemux, "%*s vert. resol.: %g", depth, "",
+ GET_FP32 (data));
+ GST_LOG_OBJECT (qtdemux, "%*s data size: %u", depth, "",
+ GET_UINT32 (data));
+ GST_LOG_OBJECT (qtdemux, "%*s frame count: %u", depth, "",
+ GET_UINT16 (data));
+ /* something is not right with this, it's supposed to be a string but it's
+ * not apparently, so just skip this for now */
+ compressor_len = MAX (GET_UINT8 (data), 31);
+ memcpy (compressor_name, gst_byte_reader_get_data_unchecked (data, 31), 31);
+ compressor_name[compressor_len] = 0;
+ GST_LOG_OBJECT (qtdemux, "%*s compressor: %s", depth, "",
+ compressor_name);
+ GST_LOG_OBJECT (qtdemux, "%*s depth: %u", depth, "",
+ GET_UINT16 (data));
+ GST_LOG_OBJECT (qtdemux, "%*s color table ID:%u", depth, "",
+ GET_UINT16 (data));
+
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_stsd (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 ver_flags = 0, num_entries = 0, i;
+
+ if (!gst_byte_reader_get_uint32_be (data, &ver_flags) ||
+ !gst_byte_reader_get_uint32_be (data, &num_entries))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", ver_flags);
+ GST_LOG ("%*s n entries: %d", depth, "", num_entries);
+
+ for (i = 0; i < num_entries; i++) {
+ GstByteReader sub;
+ guint32 size, remain;
+ guint32 fourcc;
+
+ if (!gst_byte_reader_get_uint32_be (data, &size) ||
+ !qt_atom_parser_get_fourcc (data, &fourcc))
+ return FALSE;
+
+ GST_LOG_OBJECT (qtdemux, "%*s size: %u", depth, "", size);
+ GST_LOG_OBJECT (qtdemux, "%*s type: %" GST_FOURCC_FORMAT, depth,
+ "", GST_FOURCC_ARGS (fourcc));
+
+ remain = gst_byte_reader_get_remaining (data);
+ /* Size includes the 8 bytes we just read: len & fourcc, then 8 bytes
+ * version, flags, entries_count */
+ if (size > remain + 8) {
+ GST_LOG_OBJECT (qtdemux,
+ "Not enough data left for this atom (have %u need %u)", remain, size);
+ return FALSE;
+ }
+
+ qt_atom_parser_peek_sub (data, 0, size, &sub);
+ switch (fourcc) {
+ case FOURCC_avc1:
+ if (!qtdemux_dump_stsd_avc1 (qtdemux, &sub, size, depth + 1))
+ return FALSE;
+ break;
+ case FOURCC_fLaC:
+ /* will be dumped by node_dump_foreach */
+ break;
+ case FOURCC_mp4s:
+ if (!gst_byte_reader_get_uint32_be (&sub, &ver_flags) ||
+ !gst_byte_reader_get_uint32_be (&sub, &num_entries))
+ return FALSE;
+ if (!qtdemux_dump_unknown (qtdemux, &sub, depth + 1))
+ return FALSE;
+ break;
+ case FOURCC_av01:
+ if (!qtdemux_dump_stsd_av01 (qtdemux, &sub, size, depth + 1))
+ return FALSE;
+ break;
+ default:
+ /* Unknown stsd data, dump the bytes */
+ if (!qtdemux_dump_unknown (qtdemux, &sub, depth + 1))
+ return FALSE;
+ break;
+ }
+
+ if (!gst_byte_reader_skip (data, size - (4 + 4)))
+ return FALSE;
+ }
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_stts (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 ver_flags = 0, num_entries = 0, i;
+
+ if (!gst_byte_reader_get_uint32_be (data, &ver_flags) ||
+ !gst_byte_reader_get_uint32_be (data, &num_entries))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", ver_flags);
+ GST_LOG ("%*s n entries: %d", depth, "", num_entries);
+
+ if (!qt_atom_parser_has_chunks (data, num_entries, 4 + 4))
+ return FALSE;
+
+ for (i = 0; i < num_entries; i++) {
+ GST_LOG ("%*s count: %u", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s duration: %u", depth, "", GET_UINT32 (data));
+ }
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_stps (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 ver_flags = 0, num_entries = 0, i;
+
+ if (!gst_byte_reader_get_uint32_be (data, &ver_flags) ||
+ !gst_byte_reader_get_uint32_be (data, &num_entries))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", ver_flags);
+ GST_LOG ("%*s n entries: %d", depth, "", num_entries);
+
+ if (!qt_atom_parser_has_chunks (data, num_entries, 4))
+ return FALSE;
+
+ for (i = 0; i < num_entries; i++) {
+ GST_LOG ("%*s sample: %u", depth, "", GET_UINT32 (data));
+ }
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_stss (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 ver_flags = 0, num_entries = 0, i;
+
+ if (!gst_byte_reader_get_uint32_be (data, &ver_flags) ||
+ !gst_byte_reader_get_uint32_be (data, &num_entries))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", ver_flags);
+ GST_LOG ("%*s n entries: %d", depth, "", num_entries);
+
+ if (!qt_atom_parser_has_chunks (data, num_entries, 4))
+ return FALSE;
+
+ for (i = 0; i < num_entries; i++) {
+ GST_LOG ("%*s sample: %u", depth, "", GET_UINT32 (data));
+ }
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_stsc (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 ver_flags = 0, num_entries = 0, i;
+
+ if (!gst_byte_reader_get_uint32_be (data, &ver_flags) ||
+ !gst_byte_reader_get_uint32_be (data, &num_entries))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", ver_flags);
+ GST_LOG ("%*s n entries: %d", depth, "", num_entries);
+
+ if (!qt_atom_parser_has_chunks (data, num_entries, 4 + 4 + 4))
+ return FALSE;
+
+ for (i = 0; i < num_entries; i++) {
+ GST_LOG ("%*s first chunk: %u", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s sample per ch: %u", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s sample desc id:%08x", depth, "", GET_UINT32 (data));
+ }
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_stsz (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 ver_flags = 0, sample_size = 0, num_entries = 0, i;
+
+ if (!gst_byte_reader_get_uint32_be (data, &ver_flags) ||
+ !gst_byte_reader_get_uint32_be (data, &sample_size))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", ver_flags);
+ GST_LOG ("%*s sample size: %d", depth, "", sample_size);
+
+ if (sample_size == 0) {
+ if (!gst_byte_reader_get_uint32_be (data, &num_entries))
+ return FALSE;
+
+ GST_LOG ("%*s n entries: %d", depth, "", num_entries);
+ if (!qt_atom_parser_has_chunks (data, num_entries, 4))
+ return FALSE;
+ for (i = 0; i < num_entries; i++) {
+ GST_TRACE ("%*s sample size: %u", depth, "", GET_UINT32 (data));
+ }
+ }
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_stco (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 ver_flags = 0, num_entries = 0, i;
+
+ if (!gst_byte_reader_get_uint32_be (data, &ver_flags) ||
+ !gst_byte_reader_get_uint32_be (data, &num_entries))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", ver_flags);
+ GST_LOG ("%*s n entries: %d", depth, "", num_entries);
+
+ if (!qt_atom_parser_has_chunks (data, num_entries, 4))
+ return FALSE;
+
+ for (i = 0; i < num_entries; i++) {
+ GST_LOG ("%*s chunk offset: %u", depth, "", GET_UINT32 (data));
+ }
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_ctts (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 ver_flags = 0, num_entries = 0, i, count;
+ gint32 offset;
+
+
+ if (!gst_byte_reader_get_uint32_be (data, &ver_flags) ||
+ !gst_byte_reader_get_uint32_be (data, &num_entries))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", ver_flags);
+ GST_LOG ("%*s n entries: %u", depth, "", num_entries);
+
+ if (!qt_atom_parser_has_chunks (data, num_entries, 4 + 4))
+ return FALSE;
+
+ for (i = 0; i < num_entries; i++) {
+ count = GET_UINT32 (data);
+ offset = GET_UINT32 (data);
+ GST_LOG ("%*s sample count :%8d offset: %8d", depth, "", count, offset);
+ }
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_cslg (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 ver_flags = 0, shift = 0;
+ gint32 least_offset = 0, start_time = 0, end_time = 0;
+
+ if (!gst_byte_reader_get_uint32_be (data, &ver_flags) ||
+ !gst_byte_reader_get_uint32_be (data, &shift) ||
+ !gst_byte_reader_get_int32_be (data, &least_offset) ||
+ !gst_byte_reader_get_int32_be (data, &start_time) ||
+ !gst_byte_reader_get_int32_be (data, &end_time))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", ver_flags);
+ GST_LOG ("%*s shift: %u", depth, "", shift);
+ GST_LOG ("%*s least offset: %d", depth, "", least_offset);
+ GST_LOG ("%*s start time: %d", depth, "", start_time);
+ GST_LOG ("%*s end time: %d", depth, "", end_time);
+
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_co64 (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 ver_flags = 0, num_entries = 0, i;
+
+ if (!gst_byte_reader_get_uint32_be (data, &ver_flags) ||
+ !gst_byte_reader_get_uint32_be (data, &num_entries))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", ver_flags);
+ GST_LOG ("%*s n entries: %d", depth, "", num_entries);
+
+ if (!qt_atom_parser_has_chunks (data, num_entries, 8))
+ return FALSE;
+
+ for (i = 0; i < num_entries; i++) {
+ GST_LOG ("%*s chunk offset: %" G_GUINT64_FORMAT, depth, "",
+ GET_UINT64 (data));
+ }
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_dcom (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ if (!qt_atom_parser_has_remaining (data, 4))
+ return FALSE;
+
+ GST_LOG ("%*s compression type: %" GST_FOURCC_FORMAT, depth, "",
+ GST_FOURCC_ARGS (GET_FOURCC (data)));
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_cmvd (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ if (!qt_atom_parser_has_remaining (data, 4))
+ return FALSE;
+
+ GST_LOG ("%*s length: %d", depth, "", GET_UINT32 (data));
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_mfro (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ if (!qt_atom_parser_has_remaining (data, 4))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s size: %d", depth, "", GET_UINT32 (data));
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_mfhd (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ if (!qt_atom_parser_has_remaining (data, 4))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s sequence_number: %d", depth, "", GET_UINT32 (data));
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_tfra (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint64 time = 0, moof_offset = 0;
+ guint32 len = 0, num_entries = 0, ver_flags = 0, track_id = 0, i;
+ guint value_size, traf_size, trun_size, sample_size;
+
+ if (!gst_byte_reader_get_uint32_be (data, &ver_flags))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", ver_flags);
+
+ if (!gst_byte_reader_get_uint32_be (data, &track_id) ||
+ !gst_byte_reader_get_uint32_be (data, &len) ||
+ !gst_byte_reader_get_uint32_be (data, &num_entries))
+ return FALSE;
+
+ GST_LOG ("%*s track ID: %u", depth, "", track_id);
+ GST_LOG ("%*s length: 0x%x", depth, "", len);
+ GST_LOG ("%*s n entries: %u", depth, "", num_entries);
+
+ value_size = ((ver_flags >> 24) == 1) ? sizeof (guint64) : sizeof (guint32);
+ sample_size = (len & 3) + 1;
+ trun_size = ((len & 12) >> 2) + 1;
+ traf_size = ((len & 48) >> 4) + 1;
+
+ if (!qt_atom_parser_has_chunks (data, num_entries,
+ value_size + value_size + traf_size + trun_size + sample_size))
+ return FALSE;
+
+ for (i = 0; i < num_entries; i++) {
+ qt_atom_parser_get_offset (data, value_size, &time);
+ qt_atom_parser_get_offset (data, value_size, &moof_offset);
+ GST_LOG ("%*s time: %" G_GUINT64_FORMAT, depth, "", time);
+ GST_LOG ("%*s moof_offset: %" G_GUINT64_FORMAT,
+ depth, "", moof_offset);
+ GST_LOG ("%*s traf_number: %u", depth, "",
+ qt_atom_parser_get_uint_with_size_unchecked (data, traf_size));
+ GST_LOG ("%*s trun_number: %u", depth, "",
+ qt_atom_parser_get_uint_with_size_unchecked (data, trun_size));
+ GST_LOG ("%*s sample_number: %u", depth, "",
+ qt_atom_parser_get_uint_with_size_unchecked (data, sample_size));
+ }
+
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_tfhd (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 flags = 0, n = 0, track_id = 0;
+ guint64 base_data_offset = 0;
+
+ if (!gst_byte_reader_skip (data, 1) ||
+ !gst_byte_reader_get_uint24_be (data, &flags))
+ return FALSE;
+ GST_LOG ("%*s flags: %08x", depth, "", flags);
+
+ if (!gst_byte_reader_get_uint32_be (data, &track_id))
+ return FALSE;
+ GST_LOG ("%*s track_id: %u", depth, "", track_id);
+
+ if (flags & TF_BASE_DATA_OFFSET) {
+ if (!gst_byte_reader_get_uint64_be (data, &base_data_offset))
+ return FALSE;
+ GST_LOG ("%*s base-data-offset: %" G_GUINT64_FORMAT,
+ depth, "", base_data_offset);
+ }
+
+ if (flags & TF_SAMPLE_DESCRIPTION_INDEX) {
+ if (!gst_byte_reader_get_uint32_be (data, &n))
+ return FALSE;
+ GST_LOG ("%*s sample-description-index: %u", depth, "", n);
+ }
+
+ if (flags & TF_DEFAULT_SAMPLE_DURATION) {
+ if (!gst_byte_reader_get_uint32_be (data, &n))
+ return FALSE;
+ GST_LOG ("%*s default-sample-duration: %u", depth, "", n);
+ }
+
+ if (flags & TF_DEFAULT_SAMPLE_SIZE) {
+ if (!gst_byte_reader_get_uint32_be (data, &n))
+ return FALSE;
+ GST_LOG ("%*s default-sample-size: %u", depth, "", n);
+ }
+
+ if (flags & TF_DEFAULT_SAMPLE_FLAGS) {
+ if (!gst_byte_reader_get_uint32_be (data, &n))
+ return FALSE;
+ GST_LOG ("%*s default-sample-flags: %u", depth, "", n);
+ }
+
+ GST_LOG ("%*s duration-is-empty: %s", depth, "",
+ flags & TF_DURATION_IS_EMPTY ? "yes" : "no");
+
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_trun (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 flags = 0, samples_count = 0, data_offset = 0, first_sample_flags = 0;
+ guint32 sample_duration = 0, sample_size = 0, sample_flags =
+ 0, composition_time_offsets = 0;
+ int i = 0;
+
+ if (!gst_byte_reader_skip (data, 1) ||
+ !gst_byte_reader_get_uint24_be (data, &flags))
+ return FALSE;
+
+ GST_LOG ("%*s flags: %08x", depth, "", flags);
+
+ if (!gst_byte_reader_get_uint32_be (data, &samples_count))
+ return FALSE;
+ GST_LOG ("%*s samples_count: %u", depth, "", samples_count);
+
+ if (flags & TR_DATA_OFFSET) {
+ if (!gst_byte_reader_get_uint32_be (data, &data_offset))
+ return FALSE;
+ GST_LOG ("%*s data-offset: %u", depth, "", data_offset);
+ }
+
+ if (flags & TR_FIRST_SAMPLE_FLAGS) {
+ if (!gst_byte_reader_get_uint32_be (data, &first_sample_flags))
+ return FALSE;
+ GST_LOG ("%*s first-sample-flags: %u", depth, "", first_sample_flags);
+ }
+
+ for (i = 0; i < samples_count; i++) {
+ if (flags & TR_SAMPLE_DURATION) {
+ if (!gst_byte_reader_get_uint32_be (data, &sample_duration))
+ return FALSE;
+ GST_TRACE ("%*s sample-duration: %u", depth, "", sample_duration);
+ }
+
+ if (flags & TR_SAMPLE_SIZE) {
+ if (!gst_byte_reader_get_uint32_be (data, &sample_size))
+ return FALSE;
+ GST_TRACE ("%*s sample-size: %u", depth, "", sample_size);
+ }
+
+ if (flags & TR_SAMPLE_FLAGS) {
+ if (!gst_byte_reader_get_uint32_be (data, &sample_flags))
+ return FALSE;
+ GST_TRACE ("%*s sample-flags: %u", depth, "", sample_flags);
+ }
+
+ if (flags & TR_COMPOSITION_TIME_OFFSETS) {
+ if (!gst_byte_reader_get_uint32_be (data, &composition_time_offsets))
+ return FALSE;
+ GST_TRACE ("%*s composition_time_offsets: %u", depth, "",
+ composition_time_offsets);
+ }
+ }
+
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_trex (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ if (!qt_atom_parser_has_remaining (data, 4 + 4 + 4 + 4 + 4 + 4))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s track ID: %08x", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s default sample desc. index: %08x", depth, "",
+ GET_UINT32 (data));
+ GST_LOG ("%*s default sample duration: %08x", depth, "",
+ GET_UINT32 (data));
+ GST_LOG ("%*s default sample size: %08x", depth, "",
+ GET_UINT32 (data));
+ GST_LOG ("%*s default sample flags: %08x", depth, "",
+ GET_UINT32 (data));
+
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_mehd (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 version = 0;
+ guint64 fragment_duration;
+ guint value_size;
+
+ if (!gst_byte_reader_get_uint32_be (data, &version))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", version);
+
+ value_size = ((version >> 24) == 1) ? sizeof (guint64) : sizeof (guint32);
+ if (qt_atom_parser_get_offset (data, value_size, &fragment_duration)) {
+ GST_LOG ("%*s fragment duration: %" G_GUINT64_FORMAT,
+ depth, "", fragment_duration);
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+gboolean
+qtdemux_dump_tfdt (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 version = 0;
+ guint64 decode_time;
+ guint value_size;
+
+ if (!gst_byte_reader_get_uint32_be (data, &version))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", version);
+
+ value_size = ((version >> 24) == 1) ? sizeof (guint64) : sizeof (guint32);
+ if (qt_atom_parser_get_offset (data, value_size, &decode_time)) {
+ GST_LOG ("%*s Track fragment decode time: %" G_GUINT64_FORMAT,
+ depth, "", decode_time);
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+gboolean
+qtdemux_dump_sdtp (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 version;
+ guint8 val;
+ guint i = 1;
+
+ version = GET_UINT32 (data);
+ GST_LOG ("%*s version/flags: %08x", depth, "", version);
+
+ /* the sample_count is specified in the stsz or stz2 box.
+ * the information for a sample is stored in a single byte,
+ * so we read until there are no remaining bytes */
+ while (qt_atom_parser_has_remaining (data, 1)) {
+ val = GET_UINT8 (data);
+ GST_LOG ("%*s sample number: %d", depth, "", i);
+ GST_LOG ("%*s sample_depends_on: %d", depth, "",
+ ((guint16) (val)) & 0x3);
+ GST_LOG ("%*s sample_is_depended_on: %d", depth, "",
+ ((guint16) (val >> 2)) & 0x3);
+ GST_LOG ("%*s sample_has_redundancy: %d", depth, "",
+ ((guint16) (val >> 4)) & 0x3);
+ GST_LOG ("%*s early display: %d", depth, "",
+ ((guint16) (val >> 6)) & 0x1);
+ ++i;
+ }
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_svmi (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 version;
+ guint stereo_mono_change_count;
+ guint i;
+
+ version = GET_UINT32 (data);
+ GST_LOG ("%*s version/flags: %08x", depth, "", version);
+
+ if (!version) {
+ /* stereoscopic visual type information */
+ GST_LOG ("%*s stereo_composition_type: %d", depth, "",
+ GET_UINT8 (data));
+ GST_LOG ("%*s is_left_first: %d", depth, "",
+ ((guint8) GET_UINT8 (data)) & 0x01);
+
+ /* stereo_mono_change information */
+ stereo_mono_change_count = GET_UINT32 (data);
+ GST_LOG ("%*s stereo_mono_change_count: %d", depth, "",
+ stereo_mono_change_count);
+ for (i = 1; i <= stereo_mono_change_count; i++) {
+ GST_LOG ("%*s sample_count: %d", depth, "", GET_UINT32 (data));
+ GST_LOG ("%*s stereo_flag: %d", depth, "",
+ ((guint8) GET_UINT8 (data)) & 0x01);
+ }
+ }
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_dfLa (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ const gchar *block_types[] = {
+ "STREAMINFO", "PADDING", "APPLICATION", "SEEKTABLE", "VORBIS_COMMENT",
+ "CUESHEET", "PICTURE", "UNKNOWN", "INVALID"
+ };
+
+ guint32 ver_flags, block_header, block_size;
+ gint8 block_type;
+ gboolean isLast = FALSE;
+
+ if (!gst_byte_reader_get_uint32_be (data, &ver_flags))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags: %08x", depth, "", ver_flags);
+
+ do {
+ if (!gst_byte_reader_get_uint32_be (data, &block_header))
+ break;
+
+ isLast = (block_header >> 31) & 1;
+ block_type = (block_header >> 24) & 0x7F;
+ block_size = block_header & 0xFFFFFF;
+
+ if (block_type == 127)
+ block_type = 8;
+ else if (block_type > 6)
+ block_type = 7;
+
+ GST_LOG ("%*s block_type: %s", depth, "", block_types[block_type]);
+ GST_LOG ("%*s last-block-flag: %s", depth, "", isLast ? "true" : "false");
+ GST_LOG ("%*s length: %d", depth, "", block_size);
+
+ if (!gst_byte_reader_skip (data, block_size))
+ break;
+ } while (!isLast);
+
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_fLaC (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint16 data_ref_id, n_channels, sample_size;
+ guint32 sample_rate;
+
+ if (!gst_byte_reader_skip (data, 6) ||
+ !gst_byte_reader_get_uint16_be (data, &data_ref_id) ||
+ !gst_byte_reader_skip (data, 8) ||
+ !gst_byte_reader_get_uint16_be (data, &n_channels) ||
+ !gst_byte_reader_get_uint16_be (data, &sample_size) ||
+ !gst_byte_reader_skip (data, 4) ||
+ !gst_byte_reader_get_uint32_be (data, &sample_rate))
+ return FALSE;
+
+ GST_LOG ("%*s data reference: %d", depth, "", data_ref_id);
+ GST_LOG ("%*s channel count: %d", depth, "", n_channels);
+ GST_LOG ("%*s sample size: %d", depth, "", sample_size);
+ GST_LOG ("%*s sample rate: %d", depth, "", (sample_rate >> 16));
+
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_gmin (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ guint32 ver_flags;
+ guint16 graphics_mode, opc_r, opc_g, opc_b, balance;
+
+ if (!gst_byte_reader_get_uint32_be (data, &ver_flags))
+ return FALSE;
+
+ GST_LOG ("%*s version/flags : %08x", depth, "", ver_flags);
+ if (!gst_byte_reader_get_uint16_be (data, &graphics_mode) ||
+ !gst_byte_reader_get_uint16_be (data, &opc_r) ||
+ !gst_byte_reader_get_uint16_be (data, &opc_g) ||
+ !gst_byte_reader_get_uint16_be (data, &opc_b) ||
+ !gst_byte_reader_get_uint16_be (data, &balance))
+ return FALSE;
+
+ GST_LOG ("%*s graphics mode : 0x%x", depth, "", graphics_mode);
+ GST_LOG ("%*s opcolor : r:0x%x g:0x%x b:0x%x", depth, "", opc_r, opc_g,
+ opc_b);
+ GST_LOG ("%*s balance : %d", depth, "", balance);
+
+ return TRUE;
+}
+
+gboolean
+qtdemux_dump_unknown (GstQTDemux * qtdemux, GstByteReader * data, int depth)
+{
+ int len;
+
+ len = gst_byte_reader_get_remaining (data);
+ GST_LOG ("%*s length: %d", depth, "", len);
+
+ GST_MEMDUMP_OBJECT (qtdemux, "unknown atom data",
+ gst_byte_reader_peek_data_unchecked (data), len);
+ return TRUE;
+}
+
+static gboolean
+qtdemux_node_dump_foreach (GNode * node, gpointer qtdemux)
+{
+ GstByteReader parser;
+ guint8 *buffer = (guint8 *) node->data; /* FIXME: move to byte reader */
+ guint32 node_length;
+ guint32 fourcc;
+ const QtNodeType *type;
+ int depth;
+
+ node_length = GST_READ_UINT32_BE (buffer);
+ fourcc = GST_READ_UINT32_LE (buffer + 4);
+
+ g_warn_if_fail (node_length >= 8);
+
+ gst_byte_reader_init (&parser, buffer + 8, node_length - 8);
+
+ type = qtdemux_type_get (fourcc);
+
+ depth = (g_node_depth (node) - 1) * 2;
+ GST_LOG ("%*s'%" GST_FOURCC_FORMAT "', [%d], %s",
+ depth, "", GST_FOURCC_ARGS (fourcc), node_length, type->name);
+
+ if (type->dump) {
+ gboolean ret;
+
+ ret = type->dump (GST_QTDEMUX_CAST (qtdemux), &parser, depth);
+
+ if (!ret) {
+ GST_WARNING ("%*s not enough data parsing atom %" GST_FOURCC_FORMAT,
+ depth, "", GST_FOURCC_ARGS (fourcc));
+ }
+ }
+
+ return FALSE;
+}
+
+gboolean
+qtdemux_node_dump (GstQTDemux * qtdemux, GNode * node)
+{
+#ifndef GST_DISABLE_GST_DEBUG
+ /* Only traverse/dump if we know it will be outputted in the end */
+ if (qtdemux_debug->threshold < GST_LEVEL_LOG)
+ return TRUE;
+
+ g_node_traverse (node, G_PRE_ORDER, G_TRAVERSE_ALL, -1,
+ qtdemux_node_dump_foreach, qtdemux);
+#endif
+ return TRUE;
+}
diff --git a/gst/isomp4/qtdemux_dump.h b/gst/isomp4/qtdemux_dump.h
new file mode 100644
index 0000000000..45dcd3f081
--- /dev/null
+++ b/gst/isomp4/qtdemux_dump.h
@@ -0,0 +1,98 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2009> STEricsson <benjamin.gaignard@stericsson.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_QTDEMUX_DUMP_H__
+#define __GST_QTDEMUX_DUMP_H__
+
+#include <gst/gst.h>
+#include <qtdemux.h>
+
+G_BEGIN_DECLS
+ gboolean qtdemux_dump_mvhd (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_tkhd (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_elst (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_mdhd (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_hdlr (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_vmhd (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_dref (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_stsd (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_stts (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_stss (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_stps (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_stsc (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_stsz (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_stco (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_co64 (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_dcom (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_cmvd (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_ctts (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_cslg (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_mfro (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_mfhd (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_tfra (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_tfhd (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_trun (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_trex (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_mehd (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_sdtp (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_tfdt (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_unknown (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_svmi (GstQTDemux *qtdemux, GstByteReader *data,
+ int depth);
+gboolean qtdemux_dump_dfLa (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_fLaC (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+gboolean qtdemux_dump_gmin (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+
+gboolean qtdemux_node_dump (GstQTDemux * qtdemux, GNode * node);
+
+G_END_DECLS
+#endif /* __GST_QTDEMUX_DUMP_H__ */
diff --git a/gst/isomp4/qtdemux_lang.c b/gst/isomp4/qtdemux_lang.c
new file mode 100644
index 0000000000..1f5f3c35cd
--- /dev/null
+++ b/gst/isomp4/qtdemux_lang.c
@@ -0,0 +1,207 @@
+/* GStreamer Quicktime/ISO demuxer language utility functions
+ * Copyright (C) 2010 Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "qtdemux_debug.h"
+#include "qtdemux_lang.h"
+
+#include <string.h>
+
+#define GST_CAT_DEFAULT qtdemux_debug
+
+/* http://developer.apple.com/mac/library/documentation/QuickTime/QTFF/QTFFChap4/qtff4.html */
+
+static const gchar qt_lang_map[][4] = {
+
+/* 000 English
+ * 001 French
+ * 002 German
+ * 003 Italian
+ * 004 Dutch
+ * 005 Swedish
+ * 006 Spanish
+ * 007 Danish
+ * 008 Portuguese
+ * 009 Norwegian
+ */
+ "eng", "fre", "deu", "ita", "nld", "swe", "spa", "dan", "por", "nor",
+
+/* 010 Hebrew
+ * 011 Japanese
+ * 012 Arabic
+ * 013 Finnish
+ * 014 Greek
+ * 015 Icelandic
+ * 016 Maltese
+ * 017 Turkish
+ * 018 Croatian
+ * 019 Traditional Chinese (ISO 639-2 can't express script differences, so zho)
+ */
+ "heb", "jpn", "ara", "fin", "ell", "isl", "mlt", "tur", "hrv", "zho",
+
+/* 020 Urdu
+ * 021 Hindi
+ * 022 Thai
+ * 023 Korean
+ * 024 Lithuanian
+ * 025 Polish
+ * 026 Hungarian
+ * 027 Estonian
+ * 028 Latvian / Lettish
+ * 029 Lappish / Saamish (used code for Northern Sami)
+ */
+ "urd", "hin", "tha", "kor", "lit", "pol", "hun", "est", "lav", "sme",
+
+/* 030 Faeroese
+ * 031 Farsi
+ * 032 Russian
+ * 033 Simplified Chinese (ISO 639-2 can't express script differences, so zho)
+ * 034 Flemish (no ISO 639-2 code, used Dutch code)
+ * 035 Irish
+ * 036 Albanian
+ * 037 Romanian
+ * 038 Czech
+ * 039 Slovak
+ */
+ "fao", "fas", "rus", "zho", "nld", "gle", "sqi", "ron", "ces", "slk",
+
+/* 040 Slovenian
+ * 041 Yiddish
+ * 042 Serbian
+ * 043 Macedonian
+ * 044 Bulgarian
+ * 045 Ukrainian
+ * 046 Byelorussian
+ * 047 Uzbek
+ * 048 Kazakh
+ * 049 Azerbaijani
+ */
+ "slv", "yid", "srp", "mkd", "bul", "ukr", "bel", "uzb", "kaz", "aze",
+
+/* 050 AzerbaijanAr (presumably script difference? used aze here)
+ * 051 Armenian
+ * 052 Georgian
+ * 053 Moldavian
+ * 054 Kirghiz
+ * 055 Tajiki
+ * 056 Turkmen
+ * 057 Mongolian
+ * 058 MongolianCyr (presumably script difference? used mon here)
+ * 059 Pashto
+ */
+
+ "aze", "hye", "kat", "mol", "kir", "tgk", "tuk", "mon", "mon", "pus",
+
+
+/* 060 Kurdish
+ * 061 Kashmiri
+ * 062 Sindhi
+ * 063 Tibetan
+ * 064 Nepali
+ * 065 Sanskrit
+ * 066 Marathi
+ * 067 Bengali
+ * 068 Assamese
+ * 069 Gujarati
+ */
+ "kur", "kas", "snd", "bod", "nep", "san", "mar", "ben", "asm", "guj",
+
+/* 070 Punjabi
+ * 071 Oriya
+ * 072 Malayalam
+ * 073 Kannada
+ * 074 Tamil
+ * 075 Telugu
+ * 076 Sinhalese
+ * 077 Burmese
+ * 078 Khmer
+ * 079 Lao
+ */
+ "pan", "ori", "mal", "kan", "tam", "tel", "sin", "mya", "khm", "lao",
+
+/* 080 Vietnamese
+ * 081 Indonesian
+ * 082 Tagalog
+ * 083 MalayRoman
+ * 084 MalayArabic
+ * 085 Amharic
+ * 087 Galla (same as Oromo?)
+ * 087 Oromo
+ * 088 Somali
+ * 089 Swahili
+ */
+ "vie", "ind", "tgl", "msa", "msa", "amh", "orm", "orm", "som", "swa",
+
+/* 090 Ruanda
+ * 091 Rundi
+ * 092 Chewa
+ * 093 Malagasy
+ * 094 Esperanto
+ * 095 ---
+ * 096 ---
+ * 097 ---
+ * 098 ---
+ * 099 ---
+ */
+ "kin", "run", "nya", "mlg", "ep", "und", "und", "und", "und", "und",
+
+/* 100-109 ---
+ * 110-119 ---
+ */
+ "und", "und", "und", "und", "und", "und", "und", "und", "und", "und",
+ "und", "und", "und", "und", "und", "und", "und", "und", "und", "und",
+
+/* 120-127 ---
+ * 128 Welsh
+ * 129 Basque
+ */
+ "und", "und", "und", "und", "und", "und", "und", "und", "cym", "eus",
+
+/* 130 Catalan
+ * 131 Latin
+ * 132 Quechua
+ * 133 Guarani
+ * 134 Aymara
+ * 135 Tatar
+ * 136 Uighur
+ * 137 Dzongkha
+ * 138 JavaneseRom
+ */
+ "cat", "lat", "que", "grn", "aym", "tat", "uig", "dzo", "jav"
+};
+
+/* map quicktime language code to ISO-639-2T id, returns "und" if unknown */
+void
+qtdemux_lang_map_qt_code_to_iso (gchar id[4], guint16 qt_lang_code)
+{
+ const gchar *iso_code;
+
+ g_assert (qt_lang_code < 0x400);
+
+ if (qt_lang_code < G_N_ELEMENTS (qt_lang_map))
+ iso_code = qt_lang_map[qt_lang_code];
+ else
+ iso_code = "und";
+
+ GST_DEBUG ("mapped quicktime language code %u to ISO 639-2T code '%s'",
+ qt_lang_code, iso_code);
+
+ memcpy (id, iso_code, 4);
+
+ g_assert (id[3] == '\0');
+}
diff --git a/gst/isomp4/qtdemux_lang.h b/gst/isomp4/qtdemux_lang.h
new file mode 100644
index 0000000000..707c5f7213
--- /dev/null
+++ b/gst/isomp4/qtdemux_lang.h
@@ -0,0 +1,31 @@
+/* GStreamer Quicktime/ISO demuxer language utility functions
+ * Copyright (C) 2010 Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_QTDEMUX_LANG_H__
+#define __GST_QTDEMUX_LANG_H__
+
+G_BEGIN_DECLS
+
+#include <glib.h>
+
+void qtdemux_lang_map_qt_code_to_iso (gchar id[4], guint16 qt_lang_code);
+
+G_END_DECLS
+
+#endif /* __GST_QTDEMUX_LANG_H__ */
diff --git a/gst/isomp4/qtdemux_tags.c b/gst/isomp4/qtdemux_tags.c
new file mode 100644
index 0000000000..f2b384d692
--- /dev/null
+++ b/gst/isomp4/qtdemux_tags.c
@@ -0,0 +1,1034 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David A. Schleef <ds@schleef.org>
+ * Copyright (C) <2006> Wim Taymans <wim@fluendo.com>
+ * Copyright (C) <2007> Julien Moutte <julien@fluendo.com>
+ * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) <2009> STEricsson <benjamin.gaignard@stericsson.com>
+ * Copyright (C) <2013> Sreerenj Balachandran <sreerenj.balachandran@intel.com>
+ * Copyright (C) <2013> Intel Corporation
+ * Copyright (C) <2014> Centricular Ltd
+ * Copyright (C) <2015> YouView TV Ltd.
+ * Copyright (C) <2016> British Broadcasting Corporation
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/* Parsing functions for various MP4 standard extension atom groups */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <stdio.h>
+#include <gst/base/gstbytereader.h>
+#include <gst/tag/tag.h>
+
+#include "qtdemux_tags.h"
+#include "qtdemux_tree.h"
+#include "qtdemux_types.h"
+#include "fourcc.h"
+
+static GstBuffer *
+_gst_buffer_new_wrapped (gpointer mem, gsize size, GFreeFunc free_func)
+{
+ return gst_buffer_new_wrapped_full (free_func ? 0 : GST_MEMORY_FLAG_READONLY,
+ mem, size, 0, size, mem, free_func);
+}
+
+/* check if major or compatible brand is 3GP */
+static inline gboolean
+qtdemux_is_brand_3gp (GstQTDemux * qtdemux, gboolean major)
+{
+ if (major) {
+ return ((qtdemux->major_brand & GST_MAKE_FOURCC (255, 255, 0, 0)) ==
+ FOURCC_3g__);
+ } else if (qtdemux->comp_brands != NULL) {
+ GstMapInfo map;
+ guint8 *data;
+ gsize size;
+ gboolean res = FALSE;
+
+ gst_buffer_map (qtdemux->comp_brands, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+ while (size >= 4) {
+ res = res || ((QT_FOURCC (data) & GST_MAKE_FOURCC (255, 255, 0, 0)) ==
+ FOURCC_3g__);
+ data += 4;
+ size -= 4;
+ }
+ gst_buffer_unmap (qtdemux->comp_brands, &map);
+ return res;
+ } else {
+ return FALSE;
+ }
+}
+
+/* check if tag is a spec'ed 3GP tag keyword storing a string */
+static inline gboolean
+qtdemux_is_string_tag_3gp (GstQTDemux * qtdemux, guint32 fourcc)
+{
+ return fourcc == FOURCC_cprt || fourcc == FOURCC_gnre || fourcc == FOURCC_titl
+ || fourcc == FOURCC_dscp || fourcc == FOURCC_perf || fourcc == FOURCC_auth
+ || fourcc == FOURCC_albm;
+}
+
+static void
+qtdemux_tag_add_location (GstQTDemux * qtdemux, GstTagList * taglist,
+ const char *tag, const char *dummy, GNode * node)
+{
+ const gchar *env_vars[] = { "GST_QT_TAG_ENCODING", "GST_TAG_ENCODING", NULL };
+ int offset;
+ char *name;
+ gchar *data;
+ gdouble longitude, latitude, altitude;
+ gint len;
+
+ len = QT_UINT32 (node->data);
+ if (len <= 14)
+ goto short_read;
+
+ data = node->data;
+ offset = 14;
+
+ /* TODO: language code skipped */
+
+ name = gst_tag_freeform_string_to_utf8 (data + offset, -1, env_vars);
+
+ if (!name) {
+ /* do not alarm in trivial case, but bail out otherwise */
+ if (*(data + offset) != 0) {
+ GST_DEBUG_OBJECT (qtdemux, "failed to convert %s tag to UTF-8, "
+ "giving up", tag);
+ }
+ } else {
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_GEO_LOCATION_NAME, name, NULL);
+ offset += strlen (name);
+ g_free (name);
+ }
+
+ if (len < offset + 2 + 4 + 4 + 4)
+ goto short_read;
+
+ /* +1 +1 = skip null-terminator and location role byte */
+ offset += 1 + 1;
+ /* table in spec says unsigned, semantics say negative has meaning ... */
+ longitude = QT_SFP32 (data + offset);
+
+ offset += 4;
+ latitude = QT_SFP32 (data + offset);
+
+ offset += 4;
+ altitude = QT_SFP32 (data + offset);
+
+ /* one invalid means all are invalid */
+ if (longitude >= -180.0 && longitude <= 180.0 &&
+ latitude >= -90.0 && latitude <= 90.0) {
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_GEO_LOCATION_LATITUDE, latitude,
+ GST_TAG_GEO_LOCATION_LONGITUDE, longitude,
+ GST_TAG_GEO_LOCATION_ELEVATION, altitude, NULL);
+ }
+
+ /* TODO: no GST_TAG_, so astronomical body and additional notes skipped */
+
+ return;
+
+ /* ERRORS */
+short_read:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "short read parsing 3GP location");
+ return;
+ }
+}
+
+
+static void
+qtdemux_tag_add_year (GstQTDemux * qtdemux, GstTagList * taglist,
+ const char *tag, const char *dummy, GNode * node)
+{
+ guint16 y;
+ GDate *date;
+ gint len;
+
+ len = QT_UINT32 (node->data);
+ if (len < 14)
+ return;
+
+ y = QT_UINT16 ((guint8 *) node->data + 12);
+ if (y == 0) {
+ GST_DEBUG_OBJECT (qtdemux, "year: %u is not a valid year", y);
+ return;
+ }
+ GST_DEBUG_OBJECT (qtdemux, "year: %u", y);
+
+ date = g_date_new_dmy (1, 1, y);
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, tag, date, NULL);
+ g_date_free (date);
+}
+
+static void
+qtdemux_tag_add_classification (GstQTDemux * qtdemux, GstTagList * taglist,
+ const char *tag, const char *dummy, GNode * node)
+{
+ int offset;
+ char *tag_str = NULL;
+ guint8 *entity;
+ guint16 table;
+ gint len;
+
+ len = QT_UINT32 (node->data);
+ if (len <= 20)
+ goto short_read;
+
+ offset = 12;
+ entity = (guint8 *) node->data + offset;
+ if (entity[0] == 0 || entity[1] == 0 || entity[2] == 0 || entity[3] == 0) {
+ GST_DEBUG_OBJECT (qtdemux,
+ "classification info: %c%c%c%c invalid classification entity",
+ entity[0], entity[1], entity[2], entity[3]);
+ return;
+ }
+
+ offset += 4;
+ table = QT_UINT16 ((guint8 *) node->data + offset);
+
+ /* Language code skipped */
+
+ offset += 4;
+
+ /* Tag format: "XXXX://Y[YYYY]/classification info string"
+ * XXXX: classification entity, fixed length 4 chars.
+ * Y[YYYY]: classification table, max 5 chars.
+ */
+ tag_str = g_strdup_printf ("----://%u/%s",
+ table, (char *) node->data + offset);
+
+ /* memcpy To be sure we're preserving byte order */
+ memcpy (tag_str, entity, 4);
+ GST_DEBUG_OBJECT (qtdemux, "classification info: %s", tag_str);
+
+ gst_tag_list_add (taglist, GST_TAG_MERGE_APPEND, tag, tag_str, NULL);
+
+ g_free (tag_str);
+
+ return;
+
+ /* ERRORS */
+short_read:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "short read parsing 3GP classification");
+ return;
+ }
+}
+
+static gboolean
+qtdemux_tag_add_str_full (GstQTDemux * qtdemux, GstTagList * taglist,
+ const char *tag, const char *dummy, GNode * node)
+{
+ const gchar *env_vars[] = { "GST_QT_TAG_ENCODING", "GST_TAG_ENCODING", NULL };
+ GNode *data;
+ char *s;
+ int len;
+ guint32 type;
+ int offset;
+ gboolean ret = TRUE;
+ const gchar *charset = NULL;
+
+ data = qtdemux_tree_get_child_by_type (node, FOURCC_data);
+ if (data) {
+ len = QT_UINT32 (data->data);
+ type = QT_UINT32 ((guint8 *) data->data + 8);
+ if (type == 0x00000001 && len > 16) {
+ s = gst_tag_freeform_string_to_utf8 ((char *) data->data + 16, len - 16,
+ env_vars);
+ if (s) {
+ GST_DEBUG_OBJECT (qtdemux, "adding tag %s", GST_STR_NULL (s));
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, tag, s, NULL);
+ g_free (s);
+ } else {
+ GST_DEBUG_OBJECT (qtdemux, "failed to convert %s tag to UTF-8", tag);
+ }
+ }
+ } else {
+ len = QT_UINT32 (node->data);
+ type = QT_UINT32 ((guint8 *) node->data + 4);
+ if ((type >> 24) == 0xa9 && len > 8 + 4) {
+ gint str_len;
+ gint lang_code;
+
+ /* Type starts with the (C) symbol, so the next data is a list
+ * of (string size(16), language code(16), string) */
+
+ str_len = QT_UINT16 ((guint8 *) node->data + 8);
+ lang_code = QT_UINT16 ((guint8 *) node->data + 10);
+
+ /* the string + fourcc + size + 2 16bit fields,
+ * means that there are more tags in this atom */
+ if (len > str_len + 8 + 4) {
+ /* TODO how to represent the same tag in different languages? */
+ GST_WARNING_OBJECT (qtdemux, "Ignoring metadata entry with multiple "
+ "text alternatives, reading only first one");
+ }
+
+ offset = 12;
+ len = MIN (len, str_len + 8 + 4); /* remove trailing strings that we don't use */
+ GST_DEBUG_OBJECT (qtdemux, "found international text tag");
+
+ if (lang_code < 0x800) { /* MAC encoded string */
+ charset = "mac";
+ }
+ } else if (len > 14 && qtdemux_is_string_tag_3gp (qtdemux,
+ QT_FOURCC ((guint8 *) node->data + 4))) {
+ guint32 type = QT_UINT32 ((guint8 *) node->data + 8);
+
+ /* we go for 3GP style encoding if major brands claims so,
+ * or if no hope for data be ok UTF-8, and compatible 3GP brand present */
+ if (qtdemux_is_brand_3gp (qtdemux, TRUE) ||
+ (qtdemux_is_brand_3gp (qtdemux, FALSE) &&
+ ((type & 0x00FFFFFF) == 0x0) && (type >> 24 <= 0xF))) {
+ offset = 14;
+ /* 16-bit Language code is ignored here as well */
+ GST_DEBUG_OBJECT (qtdemux, "found 3gpp text tag");
+ } else {
+ goto normal;
+ }
+ } else {
+ normal:
+ offset = 8;
+ GST_DEBUG_OBJECT (qtdemux, "found normal text tag");
+ ret = FALSE; /* may have to fallback */
+ }
+ if (charset) {
+ GError *err = NULL;
+
+ s = g_convert ((gchar *) node->data + offset, len - offset, "utf8",
+ charset, NULL, NULL, &err);
+ if (err) {
+ GST_DEBUG_OBJECT (qtdemux, "Failed to convert string from charset %s:"
+ " %s(%d): %s", charset, g_quark_to_string (err->domain), err->code,
+ err->message);
+ g_error_free (err);
+ }
+ } else {
+ s = gst_tag_freeform_string_to_utf8 ((char *) node->data + offset,
+ len - offset, env_vars);
+ }
+ if (s) {
+ GST_DEBUG_OBJECT (qtdemux, "adding tag %s", GST_STR_NULL (s));
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, tag, s, NULL);
+ g_free (s);
+ ret = TRUE;
+ } else {
+ GST_DEBUG_OBJECT (qtdemux, "failed to convert %s tag to UTF-8", tag);
+ }
+ }
+ return ret;
+}
+
+static void
+qtdemux_tag_add_str (GstQTDemux * qtdemux, GstTagList * taglist,
+ const char *tag, const char *dummy, GNode * node)
+{
+ qtdemux_tag_add_str_full (qtdemux, taglist, tag, dummy, node);
+}
+
+static void
+qtdemux_tag_add_keywords (GstQTDemux * qtdemux, GstTagList * taglist,
+ const char *tag, const char *dummy, GNode * node)
+{
+ const gchar *env_vars[] = { "GST_QT_TAG_ENCODING", "GST_TAG_ENCODING", NULL };
+ guint8 *data;
+ char *s, *t, *k = NULL;
+ int len;
+ int offset;
+ int count;
+
+ /* first try normal string tag if major brand not 3GP */
+ if (!qtdemux_is_brand_3gp (qtdemux, TRUE)) {
+ if (!qtdemux_tag_add_str_full (qtdemux, taglist, tag, dummy, node)) {
+ /* hm, that did not work, maybe 3gpp storage in non-3gpp major brand;
+ * let's try it 3gpp way after minor safety check */
+ data = node->data;
+ if (QT_UINT32 (data) < 15 || !qtdemux_is_brand_3gp (qtdemux, FALSE))
+ return;
+ } else
+ return;
+ }
+
+ GST_DEBUG_OBJECT (qtdemux, "found 3gpp keyword tag");
+
+ data = node->data;
+
+ len = QT_UINT32 (data);
+ if (len < 15)
+ goto short_read;
+
+ count = QT_UINT8 (data + 14);
+ offset = 15;
+ for (; count; count--) {
+ gint slen;
+
+ if (offset + 1 > len)
+ goto short_read;
+ slen = QT_UINT8 (data + offset);
+ offset += 1;
+ if (offset + slen > len)
+ goto short_read;
+ s = gst_tag_freeform_string_to_utf8 ((char *) node->data + offset,
+ slen, env_vars);
+ if (s) {
+ GST_DEBUG_OBJECT (qtdemux, "adding keyword %s", GST_STR_NULL (s));
+ if (k) {
+ t = g_strjoin (",", k, s, NULL);
+ g_free (s);
+ g_free (k);
+ k = t;
+ } else {
+ k = s;
+ }
+ } else {
+ GST_DEBUG_OBJECT (qtdemux, "failed to convert keyword to UTF-8");
+ }
+ offset += slen;
+ }
+
+done:
+ if (k) {
+ GST_DEBUG_OBJECT (qtdemux, "adding tag %s", GST_STR_NULL (k));
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, tag, k, NULL);
+ }
+ g_free (k);
+
+ return;
+
+ /* ERRORS */
+short_read:
+ {
+ GST_DEBUG_OBJECT (qtdemux, "short read parsing 3GP keywords");
+ goto done;
+ }
+}
+
+static void
+qtdemux_tag_add_num (GstQTDemux * qtdemux, GstTagList * taglist,
+ const char *tag1, const char *tag2, GNode * node)
+{
+ GNode *data;
+ int len;
+ int type;
+ int n1, n2;
+
+ data = qtdemux_tree_get_child_by_type (node, FOURCC_data);
+ if (data) {
+ len = QT_UINT32 (data->data);
+ type = QT_UINT32 ((guint8 *) data->data + 8);
+ if (type == 0x00000000 && len >= 22) {
+ n1 = QT_UINT16 ((guint8 *) data->data + 18);
+ n2 = QT_UINT16 ((guint8 *) data->data + 20);
+ if (n1 > 0) {
+ GST_DEBUG_OBJECT (qtdemux, "adding tag %s=%d", tag1, n1);
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, tag1, n1, NULL);
+ }
+ if (n2 > 0) {
+ GST_DEBUG_OBJECT (qtdemux, "adding tag %s=%d", tag2, n2);
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, tag2, n2, NULL);
+ }
+ }
+ }
+}
+
+static void
+qtdemux_tag_add_tmpo (GstQTDemux * qtdemux, GstTagList * taglist,
+ const char *tag1, const char *dummy, GNode * node)
+{
+ GNode *data;
+ int len;
+ int type;
+ int n1;
+
+ data = qtdemux_tree_get_child_by_type (node, FOURCC_data);
+ if (data) {
+ len = QT_UINT32 (data->data);
+ type = QT_UINT32 ((guint8 *) data->data + 8);
+ GST_DEBUG_OBJECT (qtdemux, "have tempo tag, type=%d,len=%d", type, len);
+ /* some files wrongly have a type 0x0f=15, but it should be 0x15 */
+ if ((type == 0x00000015 || type == 0x0000000f) && len >= 18) {
+ n1 = QT_UINT16 ((guint8 *) data->data + 16);
+ if (n1) {
+ /* do not add bpm=0 */
+ GST_DEBUG_OBJECT (qtdemux, "adding tag %d", n1);
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, tag1, (gdouble) n1,
+ NULL);
+ }
+ }
+ }
+}
+
+static void
+qtdemux_tag_add_uint32 (GstQTDemux * qtdemux, GstTagList * taglist,
+ const char *tag1, const char *dummy, GNode * node)
+{
+ GNode *data;
+ int len;
+ int type;
+ guint32 num;
+
+ data = qtdemux_tree_get_child_by_type (node, FOURCC_data);
+ if (data) {
+ len = QT_UINT32 (data->data);
+ type = QT_UINT32 ((guint8 *) data->data + 8);
+ GST_DEBUG_OBJECT (qtdemux, "have %s tag, type=%d,len=%d", tag1, type, len);
+ /* some files wrongly have a type 0x0f=15, but it should be 0x15 */
+ if ((type == 0x00000015 || type == 0x0000000f) && len >= 20) {
+ num = QT_UINT32 ((guint8 *) data->data + 16);
+ if (num) {
+ /* do not add num=0 */
+ GST_DEBUG_OBJECT (qtdemux, "adding tag %d", num);
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, tag1, num, NULL);
+ }
+ }
+ }
+}
+
+static void
+qtdemux_tag_add_covr (GstQTDemux * qtdemux, GstTagList * taglist,
+ const char *tag1, const char *dummy, GNode * node)
+{
+ GNode *data;
+ int len;
+ int type;
+ GstSample *sample;
+
+ data = qtdemux_tree_get_child_by_type (node, FOURCC_data);
+ if (data) {
+ len = QT_UINT32 (data->data);
+ type = QT_UINT32 ((guint8 *) data->data + 8);
+ GST_DEBUG_OBJECT (qtdemux, "have covr tag, type=%d,len=%d", type, len);
+ if ((type == 0x0000000d || type == 0x0000000e) && len > 16) {
+ GstTagImageType image_type;
+
+ if (gst_tag_list_get_tag_size (taglist, GST_TAG_IMAGE) == 0)
+ image_type = GST_TAG_IMAGE_TYPE_FRONT_COVER;
+ else
+ image_type = GST_TAG_IMAGE_TYPE_NONE;
+
+ if ((sample =
+ gst_tag_image_data_to_image_sample ((guint8 *) data->data + 16,
+ len - 16, image_type))) {
+ GST_DEBUG_OBJECT (qtdemux, "adding tag size %d", len - 16);
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, tag1, sample, NULL);
+ gst_sample_unref (sample);
+ }
+ }
+ }
+}
+
+static void
+qtdemux_tag_add_date (GstQTDemux * qtdemux, GstTagList * taglist,
+ const char *tag, const char *dummy, GNode * node)
+{
+ GNode *data;
+ GstDateTime *datetime = NULL;
+ char *s;
+ int len;
+ int type;
+
+ data = qtdemux_tree_get_child_by_type (node, FOURCC_data);
+ if (data) {
+ len = QT_UINT32 (data->data);
+ type = QT_UINT32 ((guint8 *) data->data + 8);
+ if (type == 0x00000001 && len > 16) {
+ guint y, m = 1, d = 1;
+ gint ret;
+
+ s = g_strndup ((char *) data->data + 16, len - 16);
+ GST_DEBUG_OBJECT (qtdemux, "adding date '%s'", s);
+ datetime = gst_date_time_new_from_iso8601_string (s);
+ if (datetime != NULL) {
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, GST_TAG_DATE_TIME,
+ datetime, NULL);
+ gst_date_time_unref (datetime);
+ }
+
+ ret = sscanf (s, "%u-%u-%u", &y, &m, &d);
+ if (ret >= 1 && y > 1500 && y < 3000) {
+ GDate *date;
+
+ date = g_date_new_dmy (d, m, y);
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, tag, date, NULL);
+ g_date_free (date);
+ } else {
+ GST_DEBUG_OBJECT (qtdemux, "could not parse date string '%s'", s);
+ }
+ g_free (s);
+ }
+ }
+}
+
+static void
+qtdemux_tag_add_gnre (GstQTDemux * qtdemux, GstTagList * taglist,
+ const char *tag, const char *dummy, GNode * node)
+{
+ GNode *data;
+
+ data = qtdemux_tree_get_child_by_type (node, FOURCC_data);
+
+ /* re-route to normal string tag if major brand says so
+ * or no data atom and compatible brand suggests so */
+ if (qtdemux_is_brand_3gp (qtdemux, TRUE) ||
+ (qtdemux_is_brand_3gp (qtdemux, FALSE) && !data)) {
+ qtdemux_tag_add_str (qtdemux, taglist, tag, dummy, node);
+ return;
+ }
+
+ if (data) {
+ guint len, type, n;
+
+ len = QT_UINT32 (data->data);
+ type = QT_UINT32 ((guint8 *) data->data + 8);
+ if (type == 0x00000000 && len >= 18) {
+ n = QT_UINT16 ((guint8 *) data->data + 16);
+ if (n > 0) {
+ const gchar *genre;
+
+ genre = gst_tag_id3_genre_get (n - 1);
+ if (genre != NULL) {
+ GST_DEBUG_OBJECT (qtdemux, "adding %d [%s]", n, genre);
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, tag, genre, NULL);
+ }
+ }
+ }
+ }
+}
+
+static void
+qtdemux_add_double_tag_from_str (GstQTDemux * demux, GstTagList * taglist,
+ const gchar * tag, guint8 * data, guint32 datasize)
+{
+ gdouble value;
+ gchar *datacopy;
+
+ /* make a copy to have \0 at the end */
+ datacopy = g_strndup ((gchar *) data, datasize);
+
+ /* convert the str to double */
+ if (sscanf (datacopy, "%lf", &value) == 1) {
+ GST_DEBUG_OBJECT (demux, "adding tag: %s [%s]", tag, datacopy);
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE, tag, value, NULL);
+ } else {
+ GST_WARNING_OBJECT (demux, "Failed to parse double from string: %s",
+ datacopy);
+ }
+ g_free (datacopy);
+}
+
+
+static void
+qtdemux_tag_add_revdns (GstQTDemux * demux, GstTagList * taglist,
+ const char *tag, const char *tag_bis, GNode * node)
+{
+ GNode *mean;
+ GNode *name;
+ GNode *data;
+ guint32 meansize;
+ guint32 namesize;
+ guint32 datatype;
+ guint32 datasize;
+ const gchar *meanstr;
+ const gchar *namestr;
+
+ /* checking the whole ---- atom size for consistency */
+ if (QT_UINT32 (node->data) <= 4 + 12 + 12 + 16) {
+ GST_WARNING_OBJECT (demux, "Tag ---- atom is too small, ignoring");
+ return;
+ }
+
+ mean = qtdemux_tree_get_child_by_type (node, FOURCC_mean);
+ if (!mean) {
+ GST_WARNING_OBJECT (demux, "No 'mean' atom found");
+ return;
+ }
+
+ meansize = QT_UINT32 (mean->data);
+ if (meansize <= 12) {
+ GST_WARNING_OBJECT (demux, "Small mean atom, ignoring the whole tag");
+ return;
+ }
+ meanstr = ((gchar *) mean->data) + 12;
+ meansize -= 12;
+
+ name = qtdemux_tree_get_child_by_type (node, FOURCC_name);
+ if (!name) {
+ GST_WARNING_OBJECT (demux, "'name' atom not found, ignoring tag");
+ return;
+ }
+
+ namesize = QT_UINT32 (name->data);
+ if (namesize <= 12) {
+ GST_WARNING_OBJECT (demux, "'name' atom is too small, ignoring tag");
+ return;
+ }
+ namestr = ((gchar *) name->data) + 12;
+ namesize -= 12;
+
+ /*
+ * Data atom is:
+ * uint32 - size
+ * uint32 - name
+ * uint8 - version
+ * uint24 - data type
+ * uint32 - all 0
+ * rest - the data
+ */
+ data = qtdemux_tree_get_child_by_type (node, FOURCC_data);
+ if (!data) {
+ GST_WARNING_OBJECT (demux, "No data atom in this tag");
+ return;
+ }
+ datasize = QT_UINT32 (data->data);
+ if (datasize <= 16) {
+ GST_WARNING_OBJECT (demux, "Data atom too small");
+ return;
+ }
+ datatype = QT_UINT32 (((gchar *) data->data) + 8) & 0xFFFFFF;
+
+ if ((strncmp (meanstr, "com.apple.iTunes", meansize) == 0) ||
+ (strncmp (meanstr, "org.hydrogenaudio.replaygain", meansize) == 0)) {
+ static const struct
+ {
+ const gchar name[28];
+ const gchar tag[28];
+ } tags[] = {
+ {
+ "replaygain_track_gain", GST_TAG_TRACK_GAIN}, {
+ "replaygain_track_peak", GST_TAG_TRACK_PEAK}, {
+ "replaygain_album_gain", GST_TAG_ALBUM_GAIN}, {
+ "replaygain_album_peak", GST_TAG_ALBUM_PEAK}, {
+ "MusicBrainz Track Id", GST_TAG_MUSICBRAINZ_TRACKID}, {
+ "MusicBrainz Artist Id", GST_TAG_MUSICBRAINZ_ARTISTID}, {
+ "MusicBrainz Album Id", GST_TAG_MUSICBRAINZ_ALBUMID}, {
+ "MusicBrainz Album Artist Id", GST_TAG_MUSICBRAINZ_ALBUMARTISTID}
+ };
+ int i;
+
+ for (i = 0; i < G_N_ELEMENTS (tags); ++i) {
+ if (!g_ascii_strncasecmp (tags[i].name, namestr, namesize)) {
+ switch (gst_tag_get_type (tags[i].tag)) {
+ case G_TYPE_DOUBLE:
+ qtdemux_add_double_tag_from_str (demux, taglist, tags[i].tag,
+ ((guint8 *) data->data) + 16, datasize - 16);
+ break;
+ case G_TYPE_STRING:
+ qtdemux_tag_add_str (demux, taglist, tags[i].tag, NULL, node);
+ break;
+ default:
+ /* not reached */
+ break;
+ }
+ break;
+ }
+ }
+ if (i == G_N_ELEMENTS (tags))
+ goto unknown_tag;
+ } else {
+ goto unknown_tag;
+ }
+
+ return;
+
+/* errors */
+unknown_tag:
+#ifndef GST_DISABLE_GST_DEBUG
+ {
+ gchar *namestr_dbg;
+ gchar *meanstr_dbg;
+
+ meanstr_dbg = g_strndup (meanstr, meansize);
+ namestr_dbg = g_strndup (namestr, namesize);
+
+ GST_WARNING_OBJECT (demux, "This tag %s:%s type:%u is not mapped, "
+ "file a bug at bugzilla.gnome.org", meanstr_dbg, namestr_dbg, datatype);
+
+ g_free (namestr_dbg);
+ g_free (meanstr_dbg);
+ }
+#endif
+ return;
+}
+
+static void
+qtdemux_tag_add_id32 (GstQTDemux * demux, GstTagList * taglist, const char *tag,
+ const char *tag_bis, GNode * node)
+{
+ guint8 *data;
+ GstBuffer *buf;
+ guint len;
+ GstTagList *id32_taglist = NULL;
+
+ GST_LOG_OBJECT (demux, "parsing ID32");
+
+ data = node->data;
+ len = GST_READ_UINT32_BE (data);
+
+ /* need at least full box and language tag */
+ if (len < 12 + 2)
+ return;
+
+ buf = gst_buffer_new_allocate (NULL, len - 14, NULL);
+ gst_buffer_fill (buf, 0, data + 14, len - 14);
+
+ id32_taglist = gst_tag_list_from_id3v2_tag (buf);
+ if (id32_taglist) {
+ GST_LOG_OBJECT (demux, "parsing ok");
+ gst_tag_list_insert (taglist, id32_taglist, GST_TAG_MERGE_KEEP);
+ gst_tag_list_unref (id32_taglist);
+ } else {
+ GST_LOG_OBJECT (demux, "parsing failed");
+ }
+
+ gst_buffer_unref (buf);
+}
+
+typedef void (*GstQTDemuxAddTagFunc) (GstQTDemux * demux, GstTagList * taglist,
+ const char *tag, const char *tag_bis, GNode * node);
+
+/* unmapped tags
+FOURCC_pcst -> if media is a podcast -> bool
+FOURCC_cpil -> if media is part of a compilation -> bool
+FOURCC_pgap -> if media is part of a gapless context -> bool
+FOURCC_tven -> the tv episode id e.g. S01E23 -> str
+*/
+
+static const struct
+{
+ guint32 fourcc;
+ const gchar *gst_tag;
+ const gchar *gst_tag_bis;
+ const GstQTDemuxAddTagFunc func;
+} add_funcs[] = {
+ {
+ FOURCC__nam, GST_TAG_TITLE, NULL, qtdemux_tag_add_str}, {
+ FOURCC_titl, GST_TAG_TITLE, NULL, qtdemux_tag_add_str}, {
+ FOURCC__grp, GST_TAG_GROUPING, NULL, qtdemux_tag_add_str}, {
+ FOURCC__wrt, GST_TAG_COMPOSER, NULL, qtdemux_tag_add_str}, {
+ FOURCC__ART, GST_TAG_ARTIST, NULL, qtdemux_tag_add_str}, {
+ FOURCC_aART, GST_TAG_ALBUM_ARTIST, NULL, qtdemux_tag_add_str}, {
+ FOURCC_perf, GST_TAG_ARTIST, NULL, qtdemux_tag_add_str}, {
+ FOURCC_auth, GST_TAG_COMPOSER, NULL, qtdemux_tag_add_str}, {
+ FOURCC__alb, GST_TAG_ALBUM, NULL, qtdemux_tag_add_str}, {
+ FOURCC_albm, GST_TAG_ALBUM, NULL, qtdemux_tag_add_str}, {
+ FOURCC_cprt, GST_TAG_COPYRIGHT, NULL, qtdemux_tag_add_str}, {
+ FOURCC__cpy, GST_TAG_COPYRIGHT, NULL, qtdemux_tag_add_str}, {
+ FOURCC__cmt, GST_TAG_COMMENT, NULL, qtdemux_tag_add_str}, {
+ FOURCC__des, GST_TAG_DESCRIPTION, NULL, qtdemux_tag_add_str}, {
+ FOURCC_desc, GST_TAG_DESCRIPTION, NULL, qtdemux_tag_add_str}, {
+ FOURCC_dscp, GST_TAG_DESCRIPTION, NULL, qtdemux_tag_add_str}, {
+ FOURCC__lyr, GST_TAG_LYRICS, NULL, qtdemux_tag_add_str}, {
+ FOURCC__day, GST_TAG_DATE, NULL, qtdemux_tag_add_date}, {
+ FOURCC_yrrc, GST_TAG_DATE, NULL, qtdemux_tag_add_year}, {
+ FOURCC__too, GST_TAG_ENCODER, NULL, qtdemux_tag_add_str}, {
+ FOURCC__inf, GST_TAG_COMMENT, NULL, qtdemux_tag_add_str}, {
+ FOURCC_trkn, GST_TAG_TRACK_NUMBER, GST_TAG_TRACK_COUNT, qtdemux_tag_add_num}, {
+ FOURCC_disk, GST_TAG_ALBUM_VOLUME_NUMBER, GST_TAG_ALBUM_VOLUME_COUNT,
+ qtdemux_tag_add_num}, {
+ FOURCC_disc, GST_TAG_ALBUM_VOLUME_NUMBER, GST_TAG_ALBUM_VOLUME_COUNT,
+ qtdemux_tag_add_num}, {
+ FOURCC__gen, GST_TAG_GENRE, NULL, qtdemux_tag_add_str}, {
+ FOURCC_gnre, GST_TAG_GENRE, NULL, qtdemux_tag_add_gnre}, {
+ FOURCC_tmpo, GST_TAG_BEATS_PER_MINUTE, NULL, qtdemux_tag_add_tmpo}, {
+ FOURCC_covr, GST_TAG_IMAGE, NULL, qtdemux_tag_add_covr}, {
+ FOURCC_sonm, GST_TAG_TITLE_SORTNAME, NULL, qtdemux_tag_add_str}, {
+ FOURCC_soal, GST_TAG_ALBUM_SORTNAME, NULL, qtdemux_tag_add_str}, {
+ FOURCC_soar, GST_TAG_ARTIST_SORTNAME, NULL, qtdemux_tag_add_str}, {
+ FOURCC_soaa, GST_TAG_ALBUM_ARTIST_SORTNAME, NULL, qtdemux_tag_add_str}, {
+ FOURCC_soco, GST_TAG_COMPOSER_SORTNAME, NULL, qtdemux_tag_add_str}, {
+ FOURCC_sosn, GST_TAG_SHOW_SORTNAME, NULL, qtdemux_tag_add_str}, {
+ FOURCC_tvsh, GST_TAG_SHOW_NAME, NULL, qtdemux_tag_add_str}, {
+ FOURCC_tvsn, GST_TAG_SHOW_SEASON_NUMBER, NULL, qtdemux_tag_add_uint32}, {
+ FOURCC_tves, GST_TAG_SHOW_EPISODE_NUMBER, NULL, qtdemux_tag_add_uint32}, {
+ FOURCC_kywd, GST_TAG_KEYWORDS, NULL, qtdemux_tag_add_keywords}, {
+ FOURCC_keyw, GST_TAG_KEYWORDS, NULL, qtdemux_tag_add_str}, {
+ FOURCC__enc, GST_TAG_ENCODER, NULL, qtdemux_tag_add_str}, {
+ FOURCC_loci, GST_TAG_GEO_LOCATION_NAME, NULL, qtdemux_tag_add_location}, {
+ FOURCC_clsf, GST_QT_DEMUX_CLASSIFICATION_TAG, NULL,
+ qtdemux_tag_add_classification}, {
+ FOURCC__mak, GST_TAG_DEVICE_MANUFACTURER, NULL, qtdemux_tag_add_str}, {
+ FOURCC__mod, GST_TAG_DEVICE_MODEL, NULL, qtdemux_tag_add_str}, {
+ FOURCC__swr, GST_TAG_APPLICATION_NAME, NULL, qtdemux_tag_add_str}, {
+
+ /* This is a special case, some tags are stored in this
+ * 'reverse dns naming', according to:
+ * http://atomicparsley.sourceforge.net/mpeg-4files.html and
+ * bug #614471
+ */
+ FOURCC_____, "", NULL, qtdemux_tag_add_revdns}, {
+ /* see http://www.mp4ra.org/specs.html for ID32 in meta box */
+ FOURCC_ID32, "", NULL, qtdemux_tag_add_id32}
+};
+
+struct _GstQtDemuxTagList
+{
+ GstQTDemux *demux;
+ GstTagList *taglist;
+};
+typedef struct _GstQtDemuxTagList GstQtDemuxTagList;
+
+static void
+qtdemux_tag_add_blob (GNode * node, GstQtDemuxTagList * qtdemuxtaglist)
+{
+ gint len;
+ guint8 *data;
+ GstBuffer *buf;
+ gchar *media_type;
+ const gchar *style;
+ GstSample *sample;
+ GstStructure *s;
+ guint i;
+ guint8 ndata[4];
+ GstQTDemux *demux = qtdemuxtaglist->demux;
+ GstTagList *taglist = qtdemuxtaglist->taglist;
+
+ data = node->data;
+ len = QT_UINT32 (data);
+ buf = gst_buffer_new_and_alloc (len);
+ gst_buffer_fill (buf, 0, data, len);
+
+ /* heuristic to determine style of tag */
+ if (QT_FOURCC (data + 4) == FOURCC_____ ||
+ (len > 8 + 12 && QT_FOURCC (data + 12) == FOURCC_data))
+ style = "itunes";
+ else if (demux->major_brand == FOURCC_qt__)
+ style = "quicktime";
+ /* fall back to assuming iso/3gp tag style */
+ else
+ style = "iso";
+
+ /* sanitize the name for the caps. */
+ for (i = 0; i < 4; i++) {
+ guint8 d = data[4 + i];
+ if (g_ascii_isalnum (d))
+ ndata[i] = g_ascii_tolower (d);
+ else
+ ndata[i] = '_';
+ }
+
+ media_type = g_strdup_printf ("application/x-gst-qt-%c%c%c%c-tag",
+ ndata[0], ndata[1], ndata[2], ndata[3]);
+ GST_DEBUG_OBJECT (demux, "media type %s", media_type);
+
+ s = gst_structure_new (media_type, "style", G_TYPE_STRING, style, NULL);
+ sample = gst_sample_new (buf, NULL, NULL, s);
+ gst_buffer_unref (buf);
+ g_free (media_type);
+
+ GST_DEBUG_OBJECT (demux, "adding private tag; size %d, info %" GST_PTR_FORMAT,
+ len, s);
+
+ gst_tag_list_add (taglist, GST_TAG_MERGE_APPEND,
+ GST_QT_DEMUX_PRIVATE_TAG, sample, NULL);
+
+ gst_sample_unref (sample);
+}
+
+void
+qtdemux_parse_udta (GstQTDemux * qtdemux, GstTagList * taglist, GNode * udta)
+{
+ GNode *meta;
+ GNode *ilst;
+ GNode *xmp_;
+ GNode *node;
+ gint i;
+ GstQtDemuxTagList demuxtaglist;
+
+ demuxtaglist.demux = qtdemux;
+ demuxtaglist.taglist = taglist;
+
+ meta = qtdemux_tree_get_child_by_type (udta, FOURCC_meta);
+ if (meta != NULL) {
+ ilst = qtdemux_tree_get_child_by_type (meta, FOURCC_ilst);
+ if (ilst == NULL) {
+ GST_LOG_OBJECT (qtdemux, "no ilst");
+ return;
+ }
+ } else {
+ ilst = udta;
+ GST_LOG_OBJECT (qtdemux, "no meta so using udta itself");
+ }
+
+ i = 0;
+ while (i < G_N_ELEMENTS (add_funcs)) {
+ node = qtdemux_tree_get_child_by_type (ilst, add_funcs[i].fourcc);
+ if (node) {
+ gint len;
+
+ len = QT_UINT32 (node->data);
+ if (len < 12) {
+ GST_DEBUG_OBJECT (qtdemux, "too small tag atom %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (add_funcs[i].fourcc));
+ } else {
+ add_funcs[i].func (qtdemux, taglist, add_funcs[i].gst_tag,
+ add_funcs[i].gst_tag_bis, node);
+ }
+ g_node_destroy (node);
+ } else {
+ i++;
+ }
+ }
+
+ /* parsed nodes have been removed, pass along remainder as blob */
+ g_node_children_foreach (ilst, G_TRAVERSE_ALL,
+ (GNodeForeachFunc) qtdemux_tag_add_blob, &demuxtaglist);
+
+ /* parse up XMP_ node if existing */
+ xmp_ = qtdemux_tree_get_child_by_type (udta, FOURCC_XMP_);
+ if (xmp_ != NULL) {
+ GstBuffer *buf;
+ GstTagList *xmptaglist;
+
+ buf = _gst_buffer_new_wrapped (((guint8 *) xmp_->data) + 8,
+ QT_UINT32 ((guint8 *) xmp_->data) - 8, NULL);
+ xmptaglist = gst_tag_list_from_xmp_buffer (buf);
+ gst_buffer_unref (buf);
+
+ qtdemux_handle_xmp_taglist (qtdemux, taglist, xmptaglist);
+ } else {
+ GST_DEBUG_OBJECT (qtdemux, "No XMP_ node found");
+ }
+}
+
+void
+qtdemux_handle_xmp_taglist (GstQTDemux * qtdemux, GstTagList * taglist,
+ GstTagList * xmptaglist)
+{
+ /* Strip out bogus fields */
+ if (xmptaglist) {
+ if (gst_tag_list_get_scope (taglist) == GST_TAG_SCOPE_GLOBAL) {
+ gst_tag_list_remove_tag (xmptaglist, GST_TAG_VIDEO_CODEC);
+ gst_tag_list_remove_tag (xmptaglist, GST_TAG_AUDIO_CODEC);
+ } else {
+ gst_tag_list_remove_tag (xmptaglist, GST_TAG_CONTAINER_FORMAT);
+ }
+
+ GST_DEBUG_OBJECT (qtdemux, "Found XMP tags %" GST_PTR_FORMAT, xmptaglist);
+
+ /* prioritize native tags using _KEEP mode */
+ gst_tag_list_insert (taglist, xmptaglist, GST_TAG_MERGE_KEEP);
+ gst_tag_list_unref (xmptaglist);
+ }
+}
diff --git a/gst/isomp4/qtdemux_tags.h b/gst/isomp4/qtdemux_tags.h
new file mode 100644
index 0000000000..a55e993394
--- /dev/null
+++ b/gst/isomp4/qtdemux_tags.h
@@ -0,0 +1,30 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __QTDEMUX_TAGS_H__
+#define __QTDEMUX_TAGS_H__
+
+#include <gst/gst.h>
+#include "qtdemux.h"
+
+void qtdemux_parse_udta (GstQTDemux * qtdemux, GstTagList * taglist, GNode * udta);
+void qtdemux_handle_xmp_taglist (GstQTDemux * qtdemux, GstTagList * taglist,
+ GstTagList * xmptaglist);
+
+#endif
diff --git a/gst/isomp4/qtdemux_tree.c b/gst/isomp4/qtdemux_tree.c
new file mode 100644
index 0000000000..e27dc45a39
--- /dev/null
+++ b/gst/isomp4/qtdemux_tree.c
@@ -0,0 +1,122 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David A. Schleef <ds@schleef.org>
+ * Copyright (C) <2006> Wim Taymans <wim@fluendo.com>
+ * Copyright (C) <2007> Julien Moutte <julien@fluendo.com>
+ * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) <2009> STEricsson <benjamin.gaignard@stericsson.com>
+ * Copyright (C) <2013> Sreerenj Balachandran <sreerenj.balachandran@intel.com>
+ * Copyright (C) <2013> Intel Corporation
+ * Copyright (C) <2014> Centricular Ltd
+ * Copyright (C) <2015> YouView TV Ltd.
+ * Copyright (C) <2016> British Broadcasting Corporation
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "qtdemux_tree.h"
+#include "qtdemux_types.h"
+#include "fourcc.h"
+
+GNode *
+qtdemux_tree_get_child_by_type (GNode * node, guint32 fourcc)
+{
+ GNode *child;
+ guint8 *buffer;
+ guint32 child_fourcc;
+
+ for (child = g_node_first_child (node); child;
+ child = g_node_next_sibling (child)) {
+ buffer = (guint8 *) child->data;
+
+ child_fourcc = QT_FOURCC (buffer + 4);
+
+ if (G_UNLIKELY (child_fourcc == fourcc)) {
+ return child;
+ }
+ }
+ return NULL;
+}
+
+GNode *
+qtdemux_tree_get_child_by_type_full (GNode * node, guint32 fourcc,
+ GstByteReader * parser)
+{
+ GNode *child;
+ guint8 *buffer;
+ guint32 child_fourcc, child_len;
+
+ for (child = g_node_first_child (node); child;
+ child = g_node_next_sibling (child)) {
+ buffer = (guint8 *) child->data;
+
+ child_len = QT_UINT32 (buffer);
+ child_fourcc = QT_FOURCC (buffer + 4);
+
+ if (G_UNLIKELY (child_fourcc == fourcc)) {
+ if (G_UNLIKELY (child_len < (4 + 4)))
+ return NULL;
+ /* FIXME: must verify if atom length < parent atom length */
+ gst_byte_reader_init (parser, buffer + (4 + 4), child_len - (4 + 4));
+ return child;
+ }
+ }
+ return NULL;
+}
+
+GNode *
+qtdemux_tree_get_child_by_index (GNode * node, guint index)
+{
+ return g_node_nth_child (node, index);
+}
+
+GNode *
+qtdemux_tree_get_sibling_by_type_full (GNode * node, guint32 fourcc,
+ GstByteReader * parser)
+{
+ GNode *child;
+ guint8 *buffer;
+ guint32 child_fourcc, child_len;
+
+ for (child = g_node_next_sibling (node); child;
+ child = g_node_next_sibling (child)) {
+ buffer = (guint8 *) child->data;
+
+ child_fourcc = QT_FOURCC (buffer + 4);
+
+ if (child_fourcc == fourcc) {
+ if (parser) {
+ child_len = QT_UINT32 (buffer);
+ if (G_UNLIKELY (child_len < (4 + 4)))
+ return NULL;
+ /* FIXME: must verify if atom length < parent atom length */
+ gst_byte_reader_init (parser, buffer + (4 + 4), child_len - (4 + 4));
+ }
+ return child;
+ }
+ }
+ return NULL;
+}
+
+GNode *
+qtdemux_tree_get_sibling_by_type (GNode * node, guint32 fourcc)
+{
+ return qtdemux_tree_get_sibling_by_type_full (node, fourcc, NULL);
+}
diff --git a/gst/isomp4/qtdemux_tree.h b/gst/isomp4/qtdemux_tree.h
new file mode 100644
index 0000000000..14381815ae
--- /dev/null
+++ b/gst/isomp4/qtdemux_tree.h
@@ -0,0 +1,47 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David A. Schleef <ds@schleef.org>
+ * Copyright (C) <2006> Wim Taymans <wim@fluendo.com>
+ * Copyright (C) <2007> Julien Moutte <julien@fluendo.com>
+ * Copyright (C) <2009> Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) <2009> STEricsson <benjamin.gaignard@stericsson.com>
+ * Copyright (C) <2013> Sreerenj Balachandran <sreerenj.balachandran@intel.com>
+ * Copyright (C) <2013> Intel Corporation
+ * Copyright (C) <2014> Centricular Ltd
+ * Copyright (C) <2015> YouView TV Ltd.
+ * Copyright (C) <2016> British Broadcasting Corporation
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#include <gst/gst.h>
+#include <gst/base/gstbytereader.h>
+
+#ifndef __QTDEMUX_TREE_H__
+#define __QTDEMUX_TREE_H__
+
+G_BEGIN_DECLS
+
+GNode *qtdemux_tree_get_child_by_type (GNode * node, guint32 fourcc);
+GNode *qtdemux_tree_get_child_by_type_full (GNode * node,
+ guint32 fourcc, GstByteReader * parser);
+GNode *qtdemux_tree_get_sibling_by_type (GNode * node, guint32 fourcc);
+GNode *qtdemux_tree_get_sibling_by_type_full (GNode * node,
+ guint32 fourcc, GstByteReader * parser);
+GNode *qtdemux_tree_get_child_by_index (GNode * node, guint index);
+
+G_END_DECLS
+
+#endif
diff --git a/gst/isomp4/qtdemux_types.c b/gst/isomp4/qtdemux_types.c
new file mode 100644
index 0000000000..15ad3e5e8b
--- /dev/null
+++ b/gst/isomp4/qtdemux_types.c
@@ -0,0 +1,250 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "qtdemux_debug.h"
+#include "qtdemux_types.h"
+#include "qtdemux_dump.h"
+#include "fourcc.h"
+
+#define GST_CAT_DEFAULT qtdemux_debug
+
+static const QtNodeType qt_node_types[] = {
+ {FOURCC_moov, "movie", QT_FLAG_CONTAINER,},
+ {FOURCC_mvhd, "movie header", 0,
+ qtdemux_dump_mvhd},
+ {FOURCC_clip, "clipping", QT_FLAG_CONTAINER,},
+ {FOURCC_trak, "track", QT_FLAG_CONTAINER,},
+ {FOURCC_udta, "user data", QT_FLAG_CONTAINER,}, /* special container */
+ {FOURCC_ctab, "color table", 0,},
+ {FOURCC_tkhd, "track header", 0,
+ qtdemux_dump_tkhd},
+ {FOURCC_crgn, "clipping region", 0,},
+ {FOURCC_matt, "track matte", QT_FLAG_CONTAINER,},
+ {FOURCC_kmat, "compressed matte", 0,},
+ {FOURCC_edts, "edit", QT_FLAG_CONTAINER,},
+ {FOURCC_elst, "edit list", 0,
+ qtdemux_dump_elst},
+ {FOURCC_load, "track load settings", 0,},
+ {FOURCC_tref, "track reference", QT_FLAG_CONTAINER,},
+ {FOURCC_imap, "track input map", QT_FLAG_CONTAINER,},
+ {FOURCC___in, "track input", 0,}, /* special container */
+ {FOURCC___ty, "input type", 0,},
+ {FOURCC_mdia, "media", QT_FLAG_CONTAINER},
+ {FOURCC_mdhd, "media header", 0,
+ qtdemux_dump_mdhd},
+ {FOURCC_hdlr, "handler reference", 0,
+ qtdemux_dump_hdlr},
+ {FOURCC_minf, "media information", QT_FLAG_CONTAINER},
+ {FOURCC_vmhd, "video media information", 0,
+ qtdemux_dump_vmhd},
+ {FOURCC_smhd, "sound media information", 0},
+ {FOURCC_gmhd, "base media information header", QT_FLAG_CONTAINER},
+ {FOURCC_gmin, "base media info", 0, qtdemux_dump_gmin},
+ {FOURCC_dinf, "data information", QT_FLAG_CONTAINER},
+ {FOURCC_dref, "data reference", 0,
+ qtdemux_dump_dref},
+ {FOURCC_stbl, "sample table", QT_FLAG_CONTAINER},
+ {FOURCC_stsd, "sample description", 0,
+ qtdemux_dump_stsd},
+ {FOURCC_stts, "time-to-sample", 0,
+ qtdemux_dump_stts},
+ {FOURCC_stps, "partial sync sample", 0,
+ qtdemux_dump_stps},
+ {FOURCC_stss, "sync sample", 0,
+ qtdemux_dump_stss},
+ {FOURCC_stsc, "sample-to-chunk", 0,
+ qtdemux_dump_stsc},
+ {FOURCC_stsz, "sample size", 0,
+ qtdemux_dump_stsz},
+ {FOURCC_stco, "chunk offset", 0,
+ qtdemux_dump_stco},
+ {FOURCC_co64, "64-bit chunk offset", 0,
+ qtdemux_dump_co64},
+ {FOURCC_vide, "video media", 0},
+ {FOURCC_cmov, "compressed movie", QT_FLAG_CONTAINER},
+ {FOURCC_dcom, "compressed data", 0, qtdemux_dump_dcom},
+ {FOURCC_cmvd, "compressed movie data", 0, qtdemux_dump_cmvd},
+ {FOURCC_hint, "hint", 0,},
+ {FOURCC_mp4a, "mp4a", 0,},
+ {FOURCC_mp4v, "mp4v", 0,},
+ {FOURCC_mjp2, "mjp2", 0,},
+ {FOURCC_mhdr, "mhdr", QT_FLAG_CONTAINER,},
+ {FOURCC_jp2h, "jp2h", QT_FLAG_CONTAINER,},
+ {FOURCC_colr, "colr", 0,},
+ {FOURCC_clap, "clap", 0,},
+ {FOURCC_tapt, "tapt", 0,},
+ {FOURCC_ihdr, "ihdr", 0,},
+ {FOURCC_fiel, "fiel", 0,},
+ {FOURCC_jp2x, "jp2x", 0,},
+ {FOURCC_alac, "alac", 0,},
+ {FOURCC_fLaC, "fLaC", 0, qtdemux_dump_fLaC},
+ {FOURCC_dfLa, "dfLa", 0, qtdemux_dump_dfLa},
+ {FOURCC_wave, "wave", QT_FLAG_CONTAINER},
+ {FOURCC_appl, "appl", QT_FLAG_CONTAINER},
+ {FOURCC_cfhd, "cfhd", QT_FLAG_CONTAINER},
+ {FOURCC_esds, "esds", 0},
+ {FOURCC_hnti, "hnti", QT_FLAG_CONTAINER},
+ {FOURCC_rtp_, "rtp ", 0, qtdemux_dump_unknown},
+ {FOURCC_sdp_, "sdp ", 0, qtdemux_dump_unknown},
+ {FOURCC_meta, "meta", 0, qtdemux_dump_unknown},
+ {FOURCC_ilst, "ilst", QT_FLAG_CONTAINER,},
+ {FOURCC__nam, "Name", QT_FLAG_CONTAINER,},
+ {FOURCC_titl, "Title", QT_FLAG_CONTAINER,},
+ {FOURCC__ART, "Artist", QT_FLAG_CONTAINER,},
+ {FOURCC_aART, "Album Artist", QT_FLAG_CONTAINER,},
+ {FOURCC_auth, "Author", QT_FLAG_CONTAINER,},
+ {FOURCC_perf, "Performer", QT_FLAG_CONTAINER,},
+ {FOURCC__wrt, "Writer", QT_FLAG_CONTAINER,},
+ {FOURCC__grp, "Grouping", QT_FLAG_CONTAINER,},
+ {FOURCC__alb, "Album", QT_FLAG_CONTAINER,},
+ {FOURCC_albm, "Album", QT_FLAG_CONTAINER,},
+ {FOURCC__day, "Date", QT_FLAG_CONTAINER,},
+ {FOURCC__cpy, "Copyright", QT_FLAG_CONTAINER,},
+ {FOURCC__cmt, "Comment", QT_FLAG_CONTAINER,},
+ {FOURCC__des, "Description", QT_FLAG_CONTAINER,},
+ {FOURCC_desc, "Description", QT_FLAG_CONTAINER,},
+ {FOURCC_dscp, "Description", QT_FLAG_CONTAINER,},
+ {FOURCC__lyr, "Lyrics", QT_FLAG_CONTAINER,},
+ {FOURCC__req, "Requirement", QT_FLAG_CONTAINER,},
+ {FOURCC__enc, "Encoder", QT_FLAG_CONTAINER,},
+ {FOURCC_gnre, "Genre", QT_FLAG_CONTAINER,},
+ {FOURCC_trkn, "Track Number", QT_FLAG_CONTAINER,},
+ {FOURCC_disc, "Disc Number", QT_FLAG_CONTAINER,},
+ {FOURCC_disk, "Disc Number", QT_FLAG_CONTAINER,},
+ {FOURCC_cprt, "Copyright", QT_FLAG_CONTAINER,},
+ {FOURCC_cpil, "Compilation", QT_FLAG_CONTAINER,},
+ {FOURCC_pgap, "Gapless", QT_FLAG_CONTAINER,},
+ {FOURCC_pcst, "Podcast", QT_FLAG_CONTAINER,},
+ {FOURCC_tmpo, "Tempo", QT_FLAG_CONTAINER,},
+ {FOURCC_covr, "Cover", QT_FLAG_CONTAINER,},
+ {FOURCC_sonm, "Sort Title", QT_FLAG_CONTAINER,},
+ {FOURCC_soal, "Sort Album", QT_FLAG_CONTAINER,},
+ {FOURCC_soar, "Sort Artist", QT_FLAG_CONTAINER,},
+ {FOURCC_soaa, "Sort Album Artist", QT_FLAG_CONTAINER,},
+ {FOURCC_soco, "Sort Composer", QT_FLAG_CONTAINER,},
+ {FOURCC_sosn, "Sort TV Show", QT_FLAG_CONTAINER,},
+ {FOURCC_tvsh, "TV Show", QT_FLAG_CONTAINER,},
+ {FOURCC_tven, "TV Episode ID", QT_FLAG_CONTAINER,},
+ {FOURCC_tvsn, "TV Season Number", QT_FLAG_CONTAINER,},
+ {FOURCC_tves, "TV Episode Number", QT_FLAG_CONTAINER,},
+ {FOURCC_keyw, "Keywords", QT_FLAG_CONTAINER,},
+ {FOURCC_kywd, "Keywords", QT_FLAG_CONTAINER,},
+ {FOURCC__too, "Encoder", QT_FLAG_CONTAINER,},
+ {FOURCC__swr, "Application Name", QT_FLAG_CONTAINER,},
+ {FOURCC_____, "----", QT_FLAG_CONTAINER,},
+ {FOURCC_data, "data", 0, qtdemux_dump_unknown},
+ {FOURCC_free, "free", 0,},
+ {FOURCC_skip, "skip", 0,},
+ {FOURCC_SVQ3, "SVQ3", 0,},
+ {FOURCC_rmra, "rmra", QT_FLAG_CONTAINER,},
+ {FOURCC_rmda, "rmda", QT_FLAG_CONTAINER,},
+ {FOURCC_rdrf, "rdrf", 0,},
+ {FOURCC__gen, "Custom Genre", QT_FLAG_CONTAINER,},
+ {FOURCC_ctts, "Composition time to sample", 0, qtdemux_dump_ctts},
+ {FOURCC_cslg, "Composition Shift Least Greatest", 0, qtdemux_dump_cslg},
+ {FOURCC_XiTh, "XiTh", 0},
+ {FOURCC_XdxT, "XdxT", 0},
+ {FOURCC_loci, "loci", 0},
+ {FOURCC_clsf, "clsf", 0},
+ {FOURCC_mfra, "movie fragment random access",
+ QT_FLAG_CONTAINER,},
+ {FOURCC_tfra, "track fragment random access", 0,
+ qtdemux_dump_tfra},
+ {FOURCC_mfro, "movie fragment random access offset", 0,
+ qtdemux_dump_mfro},
+ {FOURCC_moof, "movie fragment", QT_FLAG_CONTAINER,},
+ {FOURCC_mfhd, "movie fragment header", 0, qtdemux_dump_mfhd},
+ {FOURCC_traf, "track fragment", QT_FLAG_CONTAINER,},
+ {FOURCC_tfhd, "track fragment header", 0,
+ qtdemux_dump_tfhd},
+ {FOURCC_sdtp, "independent and disposable samples", 0,
+ qtdemux_dump_sdtp},
+ {FOURCC_trun, "track fragment run", 0, qtdemux_dump_trun},
+ {FOURCC_mdat, "moovie data", 0, qtdemux_dump_unknown},
+ {FOURCC_trex, "moovie data", 0, qtdemux_dump_trex},
+ {FOURCC_mvex, "mvex", QT_FLAG_CONTAINER,},
+ {FOURCC_mehd, "movie extends header", 0,
+ qtdemux_dump_mehd},
+ {FOURCC_ovc1, "ovc1", 0},
+ {FOURCC_owma, "owma", 0},
+ {FOURCC_avcC, "AV codec configuration container", 0},
+ {FOURCC_avc1, "AV codec configuration v1", 0},
+ {FOURCC_avc3, "AV codec configuration v3", 0},
+ {FOURCC_dva1, "AVC-based Dolby Vision derived from avc1", 0},
+ {FOURCC_dvav, "AVC-based Dolby Vision derived from avc3", 0},
+ {FOURCC_mp4s, "VOBSUB codec configuration", 0},
+ {FOURCC_hvc1, "HEVC codec configuration", 0},
+ {FOURCC_hev1, "HEVC codec configuration", 0},
+ {FOURCC_hvcC, "HEVC codec configuration container", 0},
+ {FOURCC_dvhe, "HEVC-based Dolby Vision codec derived from hev1 ", 0},
+ {FOURCC_dvh1, "HEVC-based Dolby Vision codec derived from hvc1 ", 0},
+ {FOURCC_dvcC, "HEVC-based Dolby Vision codec configuration container", 0},
+ {FOURCC_tfdt, "Track fragment decode time", 0, qtdemux_dump_tfdt},
+ {FOURCC_chap, "Chapter Reference"},
+ {FOURCC_btrt, "Bitrate information", 0},
+ {FOURCC_frma, "Audio codec format", 0},
+ {FOURCC_name, "name", 0},
+ {FOURCC_mean, "mean", 0},
+ {FOURCC_svmi, "Stereoscopic Video Media Information", 0,
+ qtdemux_dump_svmi},
+ {FOURCC_scdi, "Stereoscopic Camera and Display Information", 0,
+ qtdemux_dump_unknown},
+ {FOURCC_saiz, "sample auxiliary information sizes", 0},
+ {FOURCC_saio, "sample auxiliary information offsets", 0},
+ {FOURCC_encv, "encrypted visual sample entry", 0},
+ {FOURCC_enca, "encrypted audio sample entry", 0},
+ {FOURCC_enct, "encrypted text sample entry", 0},
+ {FOURCC_encs, "encrypted system sample entry", 0},
+ {FOURCC_sinf, "protection scheme information", QT_FLAG_CONTAINER},
+ {FOURCC_frma, "original format", 0},
+ {FOURCC_schm, "scheme type", 0},
+ {FOURCC_schi, "scheme information", QT_FLAG_CONTAINER},
+ {FOURCC_pssh, "protection system specific header", 0},
+ {FOURCC_tenc, "track encryption", 0},
+ {FOURCC_stpp, "XML subtitle sample entry", 0},
+ {FOURCC_clcp, "Closed Caption", 0},
+ {FOURCC_av01, "AV1 Sample Entry", 0},
+ {FOURCC_av1C, "AV1 Codec Configuration", 0},
+ {FOURCC_av1f, "AV1 Forward Key Frame sample group entry", 0},
+ {FOURCC_av1m, "AV1 Multi-Frame sample group entry", 0},
+ {FOURCC_av1s, "AV1 S-Frame sample group entry", 0},
+ {FOURCC_av1M, "AV1 Metadata sample group entry", 0},
+ {FOURCC_aavd, "AAX encrypted audio", 0},
+ {FOURCC_adrm, "AAX DRM key data", 0},
+ {0, "unknown", 0,},
+};
+
+static const int n_qt_node_types =
+ sizeof (qt_node_types) / sizeof (qt_node_types[0]);
+
+const QtNodeType *
+qtdemux_type_get (guint32 fourcc)
+{
+ int i;
+
+ for (i = 0; i < n_qt_node_types; i++) {
+ if (G_UNLIKELY (qt_node_types[i].fourcc == fourcc))
+ return qt_node_types + i;
+ }
+
+ GST_WARNING ("unknown QuickTime node type %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (fourcc));
+
+ return qt_node_types + n_qt_node_types - 1;
+}
diff --git a/gst/isomp4/qtdemux_types.h b/gst/isomp4/qtdemux_types.h
new file mode 100644
index 0000000000..43ef77c3a0
--- /dev/null
+++ b/gst/isomp4/qtdemux_types.h
@@ -0,0 +1,83 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2009> STEricsson <benjamin.gaignard@stericsson.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_QTDEMUX_TYPES_H__
+#define __GST_QTDEMUX_TYPES_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbytereader.h>
+
+#include "qtdemux.h"
+
+G_BEGIN_DECLS
+
+typedef gboolean (*QtDumpFunc) (GstQTDemux * qtdemux, GstByteReader * data,
+ int depth);
+
+typedef struct _QtNodeType QtNodeType;
+
+#define QT_UINT32(a) (GST_READ_UINT32_BE(a))
+#define QT_UINT24(a) (GST_READ_UINT32_BE(a) >> 8)
+#define QT_UINT16(a) (GST_READ_UINT16_BE(a))
+#define QT_UINT8(a) (GST_READ_UINT8(a))
+#define QT_FP32(a) ((GST_READ_UINT32_BE(a))/65536.0)
+#define QT_SFP32(a) (((gint)(GST_READ_UINT32_BE(a)))/65536.0)
+#define QT_FP16(a) ((GST_READ_UINT16_BE(a))/256.0)
+#define QT_FOURCC(a) (GST_READ_UINT32_LE(a))
+#define QT_UINT64(a) ((((guint64)QT_UINT32(a))<<32)|QT_UINT32(((guint8 *)a)+4))
+
+typedef enum {
+ QT_FLAG_NONE = (0),
+ QT_FLAG_CONTAINER = (1 << 0)
+} QtFlags;
+
+struct _QtNodeType {
+ guint32 fourcc;
+ const gchar *name;
+ QtFlags flags;
+ QtDumpFunc dump;
+};
+
+enum TfFlags
+{
+ TF_BASE_DATA_OFFSET = 0x000001, /* base-data-offset-present */
+ TF_SAMPLE_DESCRIPTION_INDEX = 0x000002, /* sample-description-index-present */
+ TF_DEFAULT_SAMPLE_DURATION = 0x000008, /* default-sample-duration-present */
+ TF_DEFAULT_SAMPLE_SIZE = 0x000010, /* default-sample-size-present */
+ TF_DEFAULT_SAMPLE_FLAGS = 0x000020, /* default-sample-flags-present */
+ TF_DURATION_IS_EMPTY = 0x010000, /* duration-is-empty */
+ TF_DEFAULT_BASE_IS_MOOF = 0x020000 /* default-base-is-moof */
+};
+
+enum TrFlags
+{
+ TR_DATA_OFFSET = 0x000001, /* data-offset-present */
+ TR_FIRST_SAMPLE_FLAGS = 0x000004, /* first-sample-flags-present */
+ TR_SAMPLE_DURATION = 0x000100, /* sample-duration-present */
+ TR_SAMPLE_SIZE = 0x000200, /* sample-size-present */
+ TR_SAMPLE_FLAGS = 0x000400, /* sample-flags-present */
+ TR_COMPOSITION_TIME_OFFSETS = 0x000800 /* sample-composition-time-offsets-presents */
+};
+
+const QtNodeType *qtdemux_type_get (guint32 fourcc);
+
+G_END_DECLS
+
+#endif /* __GST_QTDEMUX_TYPES_H__ */
diff --git a/gst/isomp4/qtpalette.h b/gst/isomp4/qtpalette.h
new file mode 100644
index 0000000000..a41e9911cc
--- /dev/null
+++ b/gst/isomp4/qtpalette.h
@@ -0,0 +1,137 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_QTPALLETE_H__
+#define __GST_QTPALLETE_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+static const guint32 ff_qt_default_palette_2[2] = {
+ 0xffffff, 0x000000
+};
+
+static const guint32 ff_qt_default_palette_4[4] = {
+ 0x93655e, 0xffffff, 0xdfd0ab, 0x000000
+};
+
+static const guint32 ff_qt_default_palette_16[16] = {
+ 0xfffbff, 0xefd9bb, 0xe8c9b1, 0x93655e,
+ 0xfcdee8, 0x9d8891, 0xffffff, 0xffffff,
+ 0xffffff, 0x474837, 0x7a5e55, 0xdfd0ab,
+ 0xfffbf9, 0xe8cac5, 0x8a7c77, 0x000000
+};
+static const guint32 ff_qt_default_palette_256[256] = {
+ 0xFFFFFF, 0xFFFFCC, 0xFFFF99, 0xFFFF66, 0xFFFF33, 0xFFFF00,
+ 0xFFCCFF, 0xFFCCCC, 0xFFCC99, 0xFFCC66, 0xFFCC33, 0xFFCC00,
+ 0xFF99FF, 0xFF99CC, 0xFF9999, 0xFF9966, 0xFF9933, 0xFF9900,
+ 0xFF66FF, 0xFF66CC, 0xFF6699, 0xFF6666, 0xFF6633, 0xFF6600,
+ 0xFF33FF, 0xFF33CC, 0xFF3399, 0xFF3366, 0xFF3333, 0xFF3300,
+ 0xFF00FF, 0xFF00CC, 0xFF0099, 0xFF0066, 0xFF0033, 0xFF0000,
+ 0xCCFFFF, 0xCCFFCC, 0xCCFF99, 0xCCFF66, 0xCCFF33, 0xCCFF00,
+ 0xCCCCFF, 0xCCCCCC, 0xCCCC99, 0xCCCC66, 0xCCCC33, 0xCCCC00,
+ 0xCC99FF, 0xCC99CC, 0xCC9999, 0xCC9966, 0xCC9933, 0xCC9900,
+ 0xCC66FF, 0xCC66CC, 0xCC6699, 0xCC6666, 0xCC6633, 0xCC6600,
+ 0xCC33FF, 0xCC33CC, 0xCC3399, 0xCC3366, 0xCC3333, 0xCC3300,
+ 0xCC00FF, 0xCC00CC, 0xCC0099, 0xCC0066, 0xCC0033, 0xCC0000,
+ 0x99FFFF, 0x99FFCC, 0x99FF99, 0x99FF66, 0x99FF33, 0x99FF00,
+ 0x99CCFF, 0x99CCCC, 0x99CC99, 0x99CC66, 0x99CC33, 0x99CC00,
+ 0x9999FF, 0x9999CC, 0x999999, 0x999966, 0x999933, 0x999900,
+ 0x9966FF, 0x9966CC, 0x996699, 0x996666, 0x996633, 0x996600,
+ 0x9933FF, 0x9933CC, 0x993399, 0x993366, 0x993333, 0x993300,
+ 0x9900FF, 0x9900CC, 0x990099, 0x990066, 0x990033, 0x990000,
+ 0x66FFFF, 0x66FFCC, 0x66FF99, 0x66FF66, 0x66FF33, 0x66FF00,
+ 0x66CCFF, 0x66CCCC, 0x66CC99, 0x66CC66, 0x66CC33, 0x66CC00,
+ 0x6699FF, 0x6699CC, 0x669999, 0x669966, 0x669933, 0x669900,
+ 0x6666FF, 0x6666CC, 0x666699, 0x666666, 0x666633, 0x666600,
+ 0x6633FF, 0x6633CC, 0x663399, 0x663366, 0x663333, 0x663300,
+ 0x6600FF, 0x6600CC, 0x660099, 0x660066, 0x660033, 0x660000,
+ 0x33FFFF, 0x33FFCC, 0x33FF99, 0x33FF66, 0x33FF33, 0x33FF00,
+ 0x33CCFF, 0x33CCCC, 0x33CC99, 0x33CC66, 0x33CC33, 0x33CC00,
+ 0x3399FF, 0x3399CC, 0x339999, 0x339966, 0x339933, 0x339900,
+ 0x3366FF, 0x3366CC, 0x336699, 0x336666, 0x336633, 0x336600,
+ 0x3333FF, 0x3333CC, 0x333399, 0x333366, 0x333333, 0x333300,
+ 0x3300FF, 0x3300CC, 0x330099, 0x330066, 0x330033, 0x330000,
+ 0x00FFFF, 0x00FFCC, 0x00FF99, 0x00FF66, 0x00FF33, 0x00FF00,
+ 0x00CCFF, 0x00CCCC, 0x00CC99, 0x00CC66, 0x00CC33, 0x00CC00,
+ 0x0099FF, 0x0099CC, 0x009999, 0x009966, 0x009933, 0x009900,
+ 0x0066FF, 0x0066CC, 0x006699, 0x006666, 0x006633, 0x006600,
+ 0x0033FF, 0x0033CC, 0x003399, 0x003366, 0x003333, 0x003300,
+ 0x0000FF, 0x0000CC, 0x000099, 0x000066, 0x000033, 0xEE0000,
+ 0xDD0000, 0xBB0000, 0xAA0000, 0x880000, 0x770000, 0x550000,
+ 0x440000, 0x220000, 0x110000, 0x00EE00, 0x00DD00, 0x00BB00,
+ 0x00AA00, 0x008800, 0x007700, 0x005500, 0x004400, 0x002200,
+ 0x001100, 0x0000EE, 0x0000DD, 0x0000BB, 0x0000AA, 0x000088,
+ 0x000077, 0x000055, 0x000044, 0x000022, 0x000011, 0xEEEEEE,
+ 0xDDDDDD, 0xBBBBBB, 0xAAAAAA, 0x888888, 0x777777, 0x555555,
+ 0x444444, 0x222222, 0x111111, 0x000000
+};
+
+static const guint32 ff_qt_grayscale_palette_16[16] = {
+ 0xffffff, 0xeeeeee, 0xdddddd, 0xcccccc,
+ 0xbbbbbb, 0xaaaaaa, 0x999999, 0x888888,
+ 0x777777, 0x666666, 0x555555, 0x444444,
+ 0x333333, 0x222222, 0x111111, 0x000000
+};
+
+static const guint32 ff_qt_grayscale_palette_256[256] = {
+ 0xffffff, 0xfefefe, 0xfdfdfd, 0xfcfcfc, 0xfbfbfb, 0xfafafa, 0xf9f9f9,
+ 0xf8f8f8, 0xf7f7f7, 0xf6f6f6, 0xf5f5f5, 0xf4f4f4, 0xf3f3f3, 0xf2f2f2,
+ 0xf1f1f1, 0xf0f0f0, 0xefefef, 0xeeeeee, 0xededed, 0xececec, 0xebebeb,
+ 0xeaeaea, 0xe9e9e9, 0xe8e8e8, 0xe7e7e7, 0xe6e6e6, 0xe5e5e5, 0xe4e4e4,
+ 0xe3e3e3, 0xe2e2e2, 0xe1e1e1, 0xe0e0e0, 0xdfdfdf, 0xdedede, 0xdddddd,
+ 0xdcdcdc, 0xdbdbdb, 0xdadada, 0xd9d9d9, 0xd8d8d8, 0xd7d7d7, 0xd6d6d6,
+ 0xd5d5d5, 0xd4d4d4, 0xd3d3d3, 0xd2d2d2, 0xd1d1d1, 0xd0d0d0, 0xcfcfcf,
+ 0xcecece, 0xcdcdcd, 0xcccccc, 0xcbcbcb, 0xcacaca, 0xc9c9c9, 0xc8c8c8,
+ 0xc7c7c7, 0xc6c6c6, 0xc5c5c5, 0xc4c4c4, 0xc3c3c3, 0xc2c2c2, 0xc1c1c1,
+ 0xc0c0c0, 0xbfbfbf, 0xbebebe, 0xbdbdbd, 0xbcbcbc, 0xbbbbbb, 0xbababa,
+ 0xb9b9b9, 0xb8b8b8, 0xb7b7b7, 0xb6b6b6, 0xb5b5b5, 0xb4b4b4, 0xb3b3b3,
+ 0xb2b2b2, 0xb1b1b1, 0xb0b0b0, 0xafafaf, 0xaeaeae, 0xadadad, 0xacacac,
+ 0xababab, 0xaaaaaa, 0xa9a9a9, 0xa8a8a8, 0xa7a7a7, 0xa6a6a6, 0xa5a5a5,
+ 0xa4a4a4, 0xa3a3a3, 0xa2a2a2, 0xa1a1a1, 0xa0a0a0, 0x9f9f9f, 0x9e9e9e,
+ 0x9d9d9d, 0x9c9c9c, 0x9b9b9b, 0x9a9a9a, 0x999999, 0x989898, 0x979797,
+ 0x969696, 0x959595, 0x949494, 0x939393, 0x929292, 0x919191, 0x909090,
+ 0x8f8f8f, 0x8e8e8e, 0x8d8d8d, 0x8c8c8c, 0x8b8b8b, 0x8a8a8a, 0x898989,
+ 0x888888, 0x878787, 0x868686, 0x858585, 0x848484, 0x838383, 0x828282,
+ 0x818181, 0x808080, 0x7f7f7f, 0x7e7e7e, 0x7d7d7d, 0x7c7c7c, 0x7b7b7b,
+ 0x7a7a7a, 0x797979, 0x787878, 0x777777, 0x767676, 0x757575, 0x747474,
+ 0x737373, 0x727272, 0x717171, 0x707070, 0x6f6f6f, 0x6e6e6e, 0x6d6d6d,
+ 0x6c6c6c, 0x6b6b6b, 0x6a6a6a, 0x696969, 0x686868, 0x676767, 0x666666,
+ 0x656565, 0x646464, 0x636363, 0x626262, 0x616161, 0x606060, 0x5f5f5f,
+ 0x5e5e5e, 0x5d5d5d, 0x5c5c5c, 0x5b5b5b, 0x5a5a5a, 0x595959, 0x585858,
+ 0x575757, 0x565656, 0x555555, 0x545454, 0x535353, 0x525252, 0x515151,
+ 0x505050, 0x4f4f4f, 0x4e4e4e, 0x4d4d4d, 0x4c4c4c, 0x4b4b4b, 0x4a4a4a,
+ 0x494949, 0x484848, 0x474747, 0x464646, 0x454545, 0x444444, 0x434343,
+ 0x424242, 0x414141, 0x404040, 0x3f3f3f, 0x3e3e3e, 0x3d3d3d, 0x3c3c3c,
+ 0x3b3b3b, 0x3a3a3a, 0x393939, 0x383838, 0x373737, 0x363636, 0x353535,
+ 0x343434, 0x333333, 0x323232, 0x313131, 0x303030, 0x2f2f2f, 0x2e2e2e,
+ 0x2d2d2d, 0x2c2c2c, 0x2b2b2b, 0x2a2a2a, 0x292929, 0x282828, 0x272727,
+ 0x262626, 0x252525, 0x242424, 0x232323, 0x222222, 0x212121, 0x202020,
+ 0x1f1f1f, 0x1e1e1e, 0x1d1d1d, 0x1c1c1c, 0x1b1b1b, 0x1a1a1a, 0x191919,
+ 0x181818, 0x171717, 0x161616, 0x151515, 0x141414, 0x131313, 0x121212,
+ 0x111111, 0x101010, 0x0f0f0f, 0x0e0e0e, 0x0d0d0d, 0x0c0c0c, 0x0b0b0b,
+ 0x0a0a0a, 0x090909, 0x080808, 0x070707, 0x060606, 0x050505, 0x040404,
+ 0x030303, 0x020202, 0x010101, 0x000000
+};
+
+G_END_DECLS
+
+#endif /* __GST_QTPALETTE_H__ */
diff --git a/gst/law/alaw-decode.c b/gst/law/alaw-decode.c
new file mode 100644
index 0000000000..95c2bc28d2
--- /dev/null
+++ b/gst/law/alaw-decode.c
@@ -0,0 +1,250 @@
+/* GStreamer A-Law to PCM conversion
+ * Copyright (C) 2000 by Abramo Bagnara <abramo@alsa-project.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-alawdec
+ * @title: alawdec
+ *
+ * This element decodes alaw audio. Alaw coding is also known as G.711.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "alaw-decode.h"
+
+extern GstStaticPadTemplate alaw_dec_src_factory;
+extern GstStaticPadTemplate alaw_dec_sink_factory;
+
+GST_DEBUG_CATEGORY_STATIC (alaw_dec_debug);
+#define GST_CAT_DEFAULT alaw_dec_debug
+
+static gboolean gst_alaw_dec_set_format (GstAudioDecoder * dec, GstCaps * caps);
+static GstFlowReturn gst_alaw_dec_handle_frame (GstAudioDecoder * dec,
+ GstBuffer * buffer);
+
+#define gst_alaw_dec_parent_class parent_class
+G_DEFINE_TYPE (GstALawDec, gst_alaw_dec, GST_TYPE_AUDIO_DECODER);
+GST_ELEMENT_REGISTER_DEFINE (alawdec, "alawdec", GST_RANK_PRIMARY,
+ GST_TYPE_ALAW_DEC);
+
+/* some day we might have defines in gstconfig.h that tell us about the
+ * desired cpu/memory/binary size trade-offs */
+#define GST_ALAW_DEC_USE_TABLE
+
+#ifdef GST_ALAW_DEC_USE_TABLE
+
+static const gint alaw_to_s16_table[256] = {
+ -5504, -5248, -6016, -5760, -4480, -4224, -4992, -4736,
+ -7552, -7296, -8064, -7808, -6528, -6272, -7040, -6784,
+ -2752, -2624, -3008, -2880, -2240, -2112, -2496, -2368,
+ -3776, -3648, -4032, -3904, -3264, -3136, -3520, -3392,
+ -22016, -20992, -24064, -23040, -17920, -16896, -19968, -18944,
+ -30208, -29184, -32256, -31232, -26112, -25088, -28160, -27136,
+ -11008, -10496, -12032, -11520, -8960, -8448, -9984, -9472,
+ -15104, -14592, -16128, -15616, -13056, -12544, -14080, -13568,
+ -344, -328, -376, -360, -280, -264, -312, -296,
+ -472, -456, -504, -488, -408, -392, -440, -424,
+ -88, -72, -120, -104, -24, -8, -56, -40,
+ -216, -200, -248, -232, -152, -136, -184, -168,
+ -1376, -1312, -1504, -1440, -1120, -1056, -1248, -1184,
+ -1888, -1824, -2016, -1952, -1632, -1568, -1760, -1696,
+ -688, -656, -752, -720, -560, -528, -624, -592,
+ -944, -912, -1008, -976, -816, -784, -880, -848,
+ 5504, 5248, 6016, 5760, 4480, 4224, 4992, 4736,
+ 7552, 7296, 8064, 7808, 6528, 6272, 7040, 6784,
+ 2752, 2624, 3008, 2880, 2240, 2112, 2496, 2368,
+ 3776, 3648, 4032, 3904, 3264, 3136, 3520, 3392,
+ 22016, 20992, 24064, 23040, 17920, 16896, 19968, 18944,
+ 30208, 29184, 32256, 31232, 26112, 25088, 28160, 27136,
+ 11008, 10496, 12032, 11520, 8960, 8448, 9984, 9472,
+ 15104, 14592, 16128, 15616, 13056, 12544, 14080, 13568,
+ 344, 328, 376, 360, 280, 264, 312, 296,
+ 472, 456, 504, 488, 408, 392, 440, 424,
+ 88, 72, 120, 104, 24, 8, 56, 40,
+ 216, 200, 248, 232, 152, 136, 184, 168,
+ 1376, 1312, 1504, 1440, 1120, 1056, 1248, 1184,
+ 1888, 1824, 2016, 1952, 1632, 1568, 1760, 1696,
+ 688, 656, 752, 720, 560, 528, 624, 592,
+ 944, 912, 1008, 976, 816, 784, 880, 848
+};
+
+static inline gint
+alaw_to_s16 (guint8 a_val)
+{
+ return alaw_to_s16_table[a_val];
+}
+
+#else /* GST_ALAW_DEC_USE_TABLE */
+
+static inline gint
+alaw_to_s16 (guint8 a_val)
+{
+ gint t;
+ gint seg;
+
+ a_val ^= 0x55;
+ t = a_val & 0x7f;
+ if (t < 16)
+ t = (t << 4) + 8;
+ else {
+ seg = (t >> 4) & 0x07;
+ t = ((t & 0x0f) << 4) + 0x108;
+ t <<= seg - 1;
+ }
+ return ((a_val & 0x80) ? t : -t);
+}
+
+#endif /* GST_ALAW_DEC_USE_TABLE */
+
+GstStaticPadTemplate alaw_dec_src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (S16) ", "
+ "layout = (string) interleaved, "
+ "rate = (int) [ 8000, 192000 ], " "channels = (int) [ 1, 2 ]")
+ );
+
+GstStaticPadTemplate alaw_dec_sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-alaw, "
+ "rate = [ 8000 , 192000 ], " "channels = [ 1 , 2 ]")
+ );
+
+static gboolean
+gst_alaw_dec_set_format (GstAudioDecoder * dec, GstCaps * caps)
+{
+ GstALawDec *alawdec = GST_ALAW_DEC (dec);
+ GstStructure *structure;
+ int rate, channels;
+ GstAudioInfo info;
+
+ structure = gst_caps_get_structure (caps, 0);
+ if (!structure) {
+ GST_ERROR_OBJECT (dec, "failed to get structure from caps");
+ return FALSE;
+ }
+
+ if (!gst_structure_get_int (structure, "rate", &rate)) {
+ GST_ERROR_OBJECT (dec, "failed to find field rate in input caps");
+ return FALSE;
+ }
+
+ if (!gst_structure_get_int (structure, "channels", &channels)) {
+ GST_ERROR_OBJECT (dec, "failed to find field channels in input caps");
+ return FALSE;
+ }
+
+ gst_audio_info_init (&info);
+ gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S16, rate, channels, NULL);
+
+ GST_DEBUG_OBJECT (alawdec, "rate=%d, channels=%d", rate, channels);
+
+ return gst_audio_decoder_set_output_format (dec, &info);
+}
+
+static GstFlowReturn
+gst_alaw_dec_handle_frame (GstAudioDecoder * dec, GstBuffer * buffer)
+{
+ GstMapInfo inmap, outmap;
+ gint16 *linear_data;
+ guint8 *alaw_data;
+ gsize alaw_size, linear_size;
+ GstBuffer *outbuf;
+ gint i;
+
+ if (!buffer) {
+ return GST_FLOW_OK;
+ }
+
+ if (!gst_buffer_map (buffer, &inmap, GST_MAP_READ)) {
+ GST_ERROR_OBJECT (dec, "failed to map input buffer");
+ goto error_failed_map_input_buffer;
+ }
+
+ alaw_data = inmap.data;
+ alaw_size = inmap.size;
+
+ linear_size = alaw_size * 2;
+
+ outbuf = gst_audio_decoder_allocate_output_buffer (dec, linear_size);
+ if (!gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE)) {
+ GST_ERROR_OBJECT (dec, "failed to map input buffer");
+ goto error_failed_map_output_buffer;
+ }
+
+ linear_data = (gint16 *) outmap.data;
+ for (i = 0; i < alaw_size; i++) {
+ linear_data[i] = alaw_to_s16 (alaw_data[i]);
+ }
+
+ gst_buffer_unmap (outbuf, &outmap);
+ gst_buffer_unmap (buffer, &inmap);
+
+ return gst_audio_decoder_finish_frame (dec, outbuf, -1);
+
+error_failed_map_output_buffer:
+ gst_buffer_unref (outbuf);
+ gst_buffer_unmap (buffer, &inmap);
+
+error_failed_map_input_buffer:
+ return GST_FLOW_ERROR;
+}
+
+static gboolean
+gst_alaw_dec_start (GstAudioDecoder * dec)
+{
+ gst_audio_decoder_set_estimate_rate (dec, TRUE);
+
+ return TRUE;
+}
+
+static void
+gst_alaw_dec_class_init (GstALawDecClass * klass)
+{
+ GstElementClass *element_class = (GstElementClass *) klass;
+ GstAudioDecoderClass *audiodec_class = GST_AUDIO_DECODER_CLASS (klass);
+
+ gst_element_class_add_static_pad_template (element_class,
+ &alaw_dec_src_factory);
+ gst_element_class_add_static_pad_template (element_class,
+ &alaw_dec_sink_factory);
+
+ audiodec_class->start = GST_DEBUG_FUNCPTR (gst_alaw_dec_start);
+ audiodec_class->set_format = GST_DEBUG_FUNCPTR (gst_alaw_dec_set_format);
+ audiodec_class->handle_frame = GST_DEBUG_FUNCPTR (gst_alaw_dec_handle_frame);
+
+ gst_element_class_set_static_metadata (element_class, "A Law audio decoder",
+ "Codec/Decoder/Audio",
+ "Convert 8bit A law to 16bit PCM",
+ "Zaheer Abbas Merali <zaheerabbas at merali dot org>");
+
+ GST_DEBUG_CATEGORY_INIT (alaw_dec_debug, "alawdec", 0, "A Law audio decoder");
+}
+
+static void
+gst_alaw_dec_init (GstALawDec * alawdec)
+{
+ gst_audio_decoder_set_needs_format (GST_AUDIO_DECODER (alawdec), TRUE);
+ gst_audio_decoder_set_use_default_pad_acceptcaps (GST_AUDIO_DECODER_CAST
+ (alawdec), TRUE);
+ GST_PAD_SET_ACCEPT_TEMPLATE (GST_AUDIO_DECODER_SINK_PAD (alawdec));
+}
diff --git a/gst/law/alaw-decode.h b/gst/law/alaw-decode.h
new file mode 100644
index 0000000000..8994c38df3
--- /dev/null
+++ b/gst/law/alaw-decode.h
@@ -0,0 +1,57 @@
+/* GStreamer A-Law to PCM conversion
+ * Copyright (C) 2000 by Abramo Bagnara <abramo@alsa-project.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_ALAW_DECODE_H__
+#define __GST_ALAW_DECODE_H__
+
+#include <gst/gst.h>
+#include <gst/audio/audio.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_ALAW_DEC \
+ (gst_alaw_dec_get_type())
+#define GST_ALAW_DEC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_ALAW_DEC,GstALawDec))
+#define GST_ALAW_DEC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_ALAW_DEC,GstALawDecClass))
+#define GST_IS_ALAW_DEC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_ALAW_DEC))
+#define GST_IS_ALAW_DEC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_ALAW_DEC))
+
+typedef struct _GstALawDec GstALawDec;
+typedef struct _GstALawDecClass GstALawDecClass;
+
+struct _GstALawDec {
+ GstAudioDecoder element;
+};
+
+struct _GstALawDecClass {
+ GstAudioDecoderClass parent_class;
+};
+
+GType gst_alaw_dec_get_type(void);
+
+GST_ELEMENT_REGISTER_DECLARE (alawdec);
+
+G_END_DECLS
+
+#endif /* __GST_ALAW_DECODE_H__ */
+
diff --git a/gst/law/alaw-encode.c b/gst/law/alaw-encode.c
new file mode 100644
index 0000000000..8a1b6e48d3
--- /dev/null
+++ b/gst/law/alaw-encode.c
@@ -0,0 +1,447 @@
+/* GStreamer PCM to A-Law conversion
+ * Copyright (C) 2000 by Abramo Bagnara <abramo@alsa-project.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-alawenc
+ * @title: alawenc
+ *
+ * This element encode alaw audio. Alaw coding is also known as G.711.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/audio/audio.h>
+
+#include "alaw-encode.h"
+
+GST_DEBUG_CATEGORY_STATIC (alaw_enc_debug);
+#define GST_CAT_DEFAULT alaw_enc_debug
+
+extern GstStaticPadTemplate alaw_enc_src_factory;
+extern GstStaticPadTemplate alaw_enc_sink_factory;
+
+G_DEFINE_TYPE (GstALawEnc, gst_alaw_enc, GST_TYPE_AUDIO_ENCODER);
+GST_ELEMENT_REGISTER_DEFINE (alawenc, "alawenc", GST_RANK_PRIMARY,
+ GST_TYPE_ALAW_ENC);
+
+static gboolean gst_alaw_enc_start (GstAudioEncoder * audioenc);
+static gboolean gst_alaw_enc_set_format (GstAudioEncoder * enc,
+ GstAudioInfo * info);
+static GstFlowReturn gst_alaw_enc_handle_frame (GstAudioEncoder * enc,
+ GstBuffer * buffer);
+
+/* some day we might have defines in gstconfig.h that tell us about the
+ * desired cpu/memory/binary size trade-offs */
+#define GST_ALAW_ENC_USE_TABLE
+
+#ifdef GST_ALAW_ENC_USE_TABLE
+
+static const guint8 alaw_encode[2048 + 1] = {
+ 0xd5, 0xd4, 0xd7, 0xd6, 0xd1, 0xd0, 0xd3, 0xd2, 0xdd, 0xdc, 0xdf, 0xde,
+ 0xd9, 0xd8, 0xdb, 0xda, 0xc5, 0xc4, 0xc7, 0xc6, 0xc1, 0xc0, 0xc3, 0xc2,
+ 0xcd, 0xcc, 0xcf, 0xce, 0xc9, 0xc8, 0xcb, 0xca, 0xf5, 0xf5, 0xf4, 0xf4,
+ 0xf7, 0xf7, 0xf6, 0xf6, 0xf1, 0xf1, 0xf0, 0xf0, 0xf3, 0xf3, 0xf2, 0xf2,
+ 0xfd, 0xfd, 0xfc, 0xfc, 0xff, 0xff, 0xfe, 0xfe, 0xf9, 0xf9, 0xf8, 0xf8,
+ 0xfb, 0xfb, 0xfa, 0xfa, 0xe5, 0xe5, 0xe5, 0xe5, 0xe4, 0xe4, 0xe4, 0xe4,
+ 0xe7, 0xe7, 0xe7, 0xe7, 0xe6, 0xe6, 0xe6, 0xe6, 0xe1, 0xe1, 0xe1, 0xe1,
+ 0xe0, 0xe0, 0xe0, 0xe0, 0xe3, 0xe3, 0xe3, 0xe3, 0xe2, 0xe2, 0xe2, 0xe2,
+ 0xed, 0xed, 0xed, 0xed, 0xec, 0xec, 0xec, 0xec, 0xef, 0xef, 0xef, 0xef,
+ 0xee, 0xee, 0xee, 0xee, 0xe9, 0xe9, 0xe9, 0xe9, 0xe8, 0xe8, 0xe8, 0xe8,
+ 0xeb, 0xeb, 0xeb, 0xeb, 0xea, 0xea, 0xea, 0xea, 0x95, 0x95, 0x95, 0x95,
+ 0x95, 0x95, 0x95, 0x95, 0x94, 0x94, 0x94, 0x94, 0x94, 0x94, 0x94, 0x94,
+ 0x97, 0x97, 0x97, 0x97, 0x97, 0x97, 0x97, 0x97, 0x96, 0x96, 0x96, 0x96,
+ 0x96, 0x96, 0x96, 0x96, 0x91, 0x91, 0x91, 0x91, 0x91, 0x91, 0x91, 0x91,
+ 0x90, 0x90, 0x90, 0x90, 0x90, 0x90, 0x90, 0x90, 0x93, 0x93, 0x93, 0x93,
+ 0x93, 0x93, 0x93, 0x93, 0x92, 0x92, 0x92, 0x92, 0x92, 0x92, 0x92, 0x92,
+ 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9d, 0x9c, 0x9c, 0x9c, 0x9c,
+ 0x9c, 0x9c, 0x9c, 0x9c, 0x9f, 0x9f, 0x9f, 0x9f, 0x9f, 0x9f, 0x9f, 0x9f,
+ 0x9e, 0x9e, 0x9e, 0x9e, 0x9e, 0x9e, 0x9e, 0x9e, 0x99, 0x99, 0x99, 0x99,
+ 0x99, 0x99, 0x99, 0x99, 0x98, 0x98, 0x98, 0x98, 0x98, 0x98, 0x98, 0x98,
+ 0x9b, 0x9b, 0x9b, 0x9b, 0x9b, 0x9b, 0x9b, 0x9b, 0x9a, 0x9a, 0x9a, 0x9a,
+ 0x9a, 0x9a, 0x9a, 0x9a, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85,
+ 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x85, 0x84, 0x84, 0x84, 0x84,
+ 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84, 0x84,
+ 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87, 0x87,
+ 0x87, 0x87, 0x87, 0x87, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86,
+ 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x86, 0x81, 0x81, 0x81, 0x81,
+ 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81, 0x81,
+ 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80,
+ 0x80, 0x80, 0x80, 0x80, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83,
+ 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x83, 0x82, 0x82, 0x82, 0x82,
+ 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82, 0x82,
+ 0x8d, 0x8d, 0x8d, 0x8d, 0x8d, 0x8d, 0x8d, 0x8d, 0x8d, 0x8d, 0x8d, 0x8d,
+ 0x8d, 0x8d, 0x8d, 0x8d, 0x8c, 0x8c, 0x8c, 0x8c, 0x8c, 0x8c, 0x8c, 0x8c,
+ 0x8c, 0x8c, 0x8c, 0x8c, 0x8c, 0x8c, 0x8c, 0x8c, 0x8f, 0x8f, 0x8f, 0x8f,
+ 0x8f, 0x8f, 0x8f, 0x8f, 0x8f, 0x8f, 0x8f, 0x8f, 0x8f, 0x8f, 0x8f, 0x8f,
+ 0x8e, 0x8e, 0x8e, 0x8e, 0x8e, 0x8e, 0x8e, 0x8e, 0x8e, 0x8e, 0x8e, 0x8e,
+ 0x8e, 0x8e, 0x8e, 0x8e, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89,
+ 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x89, 0x88, 0x88, 0x88, 0x88,
+ 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88, 0x88,
+ 0x8b, 0x8b, 0x8b, 0x8b, 0x8b, 0x8b, 0x8b, 0x8b, 0x8b, 0x8b, 0x8b, 0x8b,
+ 0x8b, 0x8b, 0x8b, 0x8b, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a,
+ 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0x8a, 0xb5, 0xb5, 0xb5, 0xb5,
+ 0xb5, 0xb5, 0xb5, 0xb5, 0xb5, 0xb5, 0xb5, 0xb5, 0xb5, 0xb5, 0xb5, 0xb5,
+ 0xb5, 0xb5, 0xb5, 0xb5, 0xb5, 0xb5, 0xb5, 0xb5, 0xb5, 0xb5, 0xb5, 0xb5,
+ 0xb5, 0xb5, 0xb5, 0xb5, 0xb4, 0xb4, 0xb4, 0xb4, 0xb4, 0xb4, 0xb4, 0xb4,
+ 0xb4, 0xb4, 0xb4, 0xb4, 0xb4, 0xb4, 0xb4, 0xb4, 0xb4, 0xb4, 0xb4, 0xb4,
+ 0xb4, 0xb4, 0xb4, 0xb4, 0xb4, 0xb4, 0xb4, 0xb4, 0xb4, 0xb4, 0xb4, 0xb4,
+ 0xb7, 0xb7, 0xb7, 0xb7, 0xb7, 0xb7, 0xb7, 0xb7, 0xb7, 0xb7, 0xb7, 0xb7,
+ 0xb7, 0xb7, 0xb7, 0xb7, 0xb7, 0xb7, 0xb7, 0xb7, 0xb7, 0xb7, 0xb7, 0xb7,
+ 0xb7, 0xb7, 0xb7, 0xb7, 0xb7, 0xb7, 0xb7, 0xb7, 0xb6, 0xb6, 0xb6, 0xb6,
+ 0xb6, 0xb6, 0xb6, 0xb6, 0xb6, 0xb6, 0xb6, 0xb6, 0xb6, 0xb6, 0xb6, 0xb6,
+ 0xb6, 0xb6, 0xb6, 0xb6, 0xb6, 0xb6, 0xb6, 0xb6, 0xb6, 0xb6, 0xb6, 0xb6,
+ 0xb6, 0xb6, 0xb6, 0xb6, 0xb1, 0xb1, 0xb1, 0xb1, 0xb1, 0xb1, 0xb1, 0xb1,
+ 0xb1, 0xb1, 0xb1, 0xb1, 0xb1, 0xb1, 0xb1, 0xb1, 0xb1, 0xb1, 0xb1, 0xb1,
+ 0xb1, 0xb1, 0xb1, 0xb1, 0xb1, 0xb1, 0xb1, 0xb1, 0xb1, 0xb1, 0xb1, 0xb1,
+ 0xb0, 0xb0, 0xb0, 0xb0, 0xb0, 0xb0, 0xb0, 0xb0, 0xb0, 0xb0, 0xb0, 0xb0,
+ 0xb0, 0xb0, 0xb0, 0xb0, 0xb0, 0xb0, 0xb0, 0xb0, 0xb0, 0xb0, 0xb0, 0xb0,
+ 0xb0, 0xb0, 0xb0, 0xb0, 0xb0, 0xb0, 0xb0, 0xb0, 0xb3, 0xb3, 0xb3, 0xb3,
+ 0xb3, 0xb3, 0xb3, 0xb3, 0xb3, 0xb3, 0xb3, 0xb3, 0xb3, 0xb3, 0xb3, 0xb3,
+ 0xb3, 0xb3, 0xb3, 0xb3, 0xb3, 0xb3, 0xb3, 0xb3, 0xb3, 0xb3, 0xb3, 0xb3,
+ 0xb3, 0xb3, 0xb3, 0xb3, 0xb2, 0xb2, 0xb2, 0xb2, 0xb2, 0xb2, 0xb2, 0xb2,
+ 0xb2, 0xb2, 0xb2, 0xb2, 0xb2, 0xb2, 0xb2, 0xb2, 0xb2, 0xb2, 0xb2, 0xb2,
+ 0xb2, 0xb2, 0xb2, 0xb2, 0xb2, 0xb2, 0xb2, 0xb2, 0xb2, 0xb2, 0xb2, 0xb2,
+ 0xbd, 0xbd, 0xbd, 0xbd, 0xbd, 0xbd, 0xbd, 0xbd, 0xbd, 0xbd, 0xbd, 0xbd,
+ 0xbd, 0xbd, 0xbd, 0xbd, 0xbd, 0xbd, 0xbd, 0xbd, 0xbd, 0xbd, 0xbd, 0xbd,
+ 0xbd, 0xbd, 0xbd, 0xbd, 0xbd, 0xbd, 0xbd, 0xbd, 0xbc, 0xbc, 0xbc, 0xbc,
+ 0xbc, 0xbc, 0xbc, 0xbc, 0xbc, 0xbc, 0xbc, 0xbc, 0xbc, 0xbc, 0xbc, 0xbc,
+ 0xbc, 0xbc, 0xbc, 0xbc, 0xbc, 0xbc, 0xbc, 0xbc, 0xbc, 0xbc, 0xbc, 0xbc,
+ 0xbc, 0xbc, 0xbc, 0xbc, 0xbf, 0xbf, 0xbf, 0xbf, 0xbf, 0xbf, 0xbf, 0xbf,
+ 0xbf, 0xbf, 0xbf, 0xbf, 0xbf, 0xbf, 0xbf, 0xbf, 0xbf, 0xbf, 0xbf, 0xbf,
+ 0xbf, 0xbf, 0xbf, 0xbf, 0xbf, 0xbf, 0xbf, 0xbf, 0xbf, 0xbf, 0xbf, 0xbf,
+ 0xbe, 0xbe, 0xbe, 0xbe, 0xbe, 0xbe, 0xbe, 0xbe, 0xbe, 0xbe, 0xbe, 0xbe,
+ 0xbe, 0xbe, 0xbe, 0xbe, 0xbe, 0xbe, 0xbe, 0xbe, 0xbe, 0xbe, 0xbe, 0xbe,
+ 0xbe, 0xbe, 0xbe, 0xbe, 0xbe, 0xbe, 0xbe, 0xbe, 0xb9, 0xb9, 0xb9, 0xb9,
+ 0xb9, 0xb9, 0xb9, 0xb9, 0xb9, 0xb9, 0xb9, 0xb9, 0xb9, 0xb9, 0xb9, 0xb9,
+ 0xb9, 0xb9, 0xb9, 0xb9, 0xb9, 0xb9, 0xb9, 0xb9, 0xb9, 0xb9, 0xb9, 0xb9,
+ 0xb9, 0xb9, 0xb9, 0xb9, 0xb8, 0xb8, 0xb8, 0xb8, 0xb8, 0xb8, 0xb8, 0xb8,
+ 0xb8, 0xb8, 0xb8, 0xb8, 0xb8, 0xb8, 0xb8, 0xb8, 0xb8, 0xb8, 0xb8, 0xb8,
+ 0xb8, 0xb8, 0xb8, 0xb8, 0xb8, 0xb8, 0xb8, 0xb8, 0xb8, 0xb8, 0xb8, 0xb8,
+ 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb,
+ 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb,
+ 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xbb, 0xba, 0xba, 0xba, 0xba,
+ 0xba, 0xba, 0xba, 0xba, 0xba, 0xba, 0xba, 0xba, 0xba, 0xba, 0xba, 0xba,
+ 0xba, 0xba, 0xba, 0xba, 0xba, 0xba, 0xba, 0xba, 0xba, 0xba, 0xba, 0xba,
+ 0xba, 0xba, 0xba, 0xba, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5,
+ 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5,
+ 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5,
+ 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5,
+ 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5,
+ 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa5, 0xa4, 0xa4, 0xa4, 0xa4,
+ 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4,
+ 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4,
+ 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4,
+ 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4,
+ 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4, 0xa4,
+ 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7,
+ 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7,
+ 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7,
+ 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7,
+ 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7, 0xa7,
+ 0xa7, 0xa7, 0xa7, 0xa7, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6,
+ 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6,
+ 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6,
+ 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6,
+ 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6,
+ 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa6, 0xa1, 0xa1, 0xa1, 0xa1,
+ 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1,
+ 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1,
+ 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1,
+ 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1,
+ 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1, 0xa1,
+ 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0,
+ 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0,
+ 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0,
+ 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0,
+ 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0, 0xa0,
+ 0xa0, 0xa0, 0xa0, 0xa0, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3,
+ 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3,
+ 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3,
+ 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3,
+ 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3,
+ 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa2, 0xa2, 0xa2, 0xa2,
+ 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2,
+ 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2,
+ 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2,
+ 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2,
+ 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2, 0xa2,
+ 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad,
+ 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad,
+ 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad,
+ 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad,
+ 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad, 0xad,
+ 0xad, 0xad, 0xad, 0xad, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac,
+ 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac,
+ 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac,
+ 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac,
+ 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac,
+ 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xac, 0xaf, 0xaf, 0xaf, 0xaf,
+ 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf,
+ 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf,
+ 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf,
+ 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf,
+ 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf, 0xaf,
+ 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae,
+ 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae,
+ 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae,
+ 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae,
+ 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae, 0xae,
+ 0xae, 0xae, 0xae, 0xae, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9,
+ 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9,
+ 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9,
+ 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9,
+ 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9,
+ 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa9, 0xa8, 0xa8, 0xa8, 0xa8,
+ 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8,
+ 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8,
+ 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8,
+ 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8,
+ 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8, 0xa8,
+ 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab,
+ 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab,
+ 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab,
+ 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab,
+ 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab, 0xab,
+ 0xab, 0xab, 0xab, 0xab, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+ 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+ 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+ 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+ 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa,
+ 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0xaa, 0x2a
+};
+
+static inline guint8
+s16_to_alaw (gint16 pcm_val)
+{
+ if (pcm_val >= 0)
+ return alaw_encode[pcm_val / 16];
+ else
+ return (0x7F & alaw_encode[pcm_val / -16]);
+}
+
+#else /* GST_ALAW_ENC_USE_TABLE */
+
+/*
+ * s16_to_alaw() - Convert a 16-bit linear PCM value to 8-bit A-law
+ *
+ * s16_to_alaw() accepts an 16-bit integer and encodes it as A-law data.
+ *
+ * Linear Input Code Compressed Code
+ * ------------------------ ---------------
+ * 0000000wxyza 000wxyz
+ * 0000001wxyza 001wxyz
+ * 000001wxyzab 010wxyz
+ * 00001wxyzabc 011wxyz
+ * 0001wxyzabcd 100wxyz
+ * 001wxyzabcde 101wxyz
+ * 01wxyzabcdef 110wxyz
+ * 1wxyzabcdefg 111wxyz
+ *
+ * For further information see John C. Bellamy's Digital Telephony, 1982,
+ * John Wiley & Sons, pps 98-111 and 472-476.
+ */
+
+static inline gint
+val_seg (gint val)
+{
+ gint r = 1;
+
+ val >>= 8;
+ if (val & 0xf0) {
+ val >>= 4;
+ r += 4;
+ }
+ if (val & 0x0c) {
+ val >>= 2;
+ r += 2;
+ }
+ if (val & 0x02)
+ r += 1;
+ return r;
+}
+
+static inline guint8
+s16_to_alaw (gint pcm_val)
+{
+ gint seg;
+ guint8 mask;
+ guint8 aval;
+
+ if (pcm_val >= 0) {
+ mask = 0xD5;
+ } else {
+ mask = 0x55;
+ pcm_val = -pcm_val;
+ if (pcm_val > 0x7fff)
+ pcm_val = 0x7fff;
+ }
+
+ if (pcm_val < 256)
+ aval = pcm_val >> 4;
+ else {
+ /* Convert the scaled magnitude to segment number. */
+ seg = val_seg (pcm_val);
+ aval = (seg << 4) | ((pcm_val >> (seg + 3)) & 0x0f);
+ }
+ return aval ^ mask;
+}
+
+#endif /* GST_ALAW_ENC_USE_TABLE */
+
+GstStaticPadTemplate alaw_enc_sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (S16) ", "
+ "layout = (string) interleaved, "
+ "rate = (int) [ 8000, 192000 ], " "channels = (int) [ 1, 2 ]")
+ );
+
+GstStaticPadTemplate alaw_enc_src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-alaw, "
+ "rate = [ 8000 , 192000 ], " "channels = [ 1 , 2 ]")
+ );
+
+static gboolean
+gst_alaw_enc_start (GstAudioEncoder * audioenc)
+{
+ GstALawEnc *alawenc = GST_ALAW_ENC (audioenc);
+
+ alawenc->channels = 0;
+ alawenc->rate = 0;
+
+ return TRUE;
+}
+
+static gboolean
+gst_alaw_enc_set_format (GstAudioEncoder * audioenc, GstAudioInfo * info)
+{
+ GstCaps *base_caps;
+ GstStructure *structure;
+ GstALawEnc *alawenc = GST_ALAW_ENC (audioenc);
+ gboolean ret;
+
+ alawenc->rate = info->rate;
+ alawenc->channels = info->channels;
+
+ base_caps =
+ gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SRC_PAD (audioenc));
+ g_assert (base_caps);
+ base_caps = gst_caps_make_writable (base_caps);
+ g_assert (base_caps);
+
+ structure = gst_caps_get_structure (base_caps, 0);
+ g_assert (structure);
+ gst_structure_set (structure, "rate", G_TYPE_INT, alawenc->rate, NULL);
+ gst_structure_set (structure, "channels", G_TYPE_INT, alawenc->channels,
+ NULL);
+
+ ret = gst_audio_encoder_set_output_format (audioenc, base_caps);
+ gst_caps_unref (base_caps);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_alaw_enc_handle_frame (GstAudioEncoder * audioenc, GstBuffer * buffer)
+{
+ GstALawEnc *alawenc;
+ GstMapInfo inmap, outmap;
+ gint16 *linear_data;
+ gsize linear_size;
+ guint8 *alaw_data;
+ guint alaw_size;
+ GstBuffer *outbuf;
+ GstFlowReturn ret;
+ gint i;
+
+ if (!buffer) {
+ ret = GST_FLOW_OK;
+ goto done;
+ }
+
+ alawenc = GST_ALAW_ENC (audioenc);
+
+ if (!alawenc->rate || !alawenc->channels)
+ goto not_negotiated;
+
+ gst_buffer_map (buffer, &inmap, GST_MAP_READ);
+ linear_data = (gint16 *) inmap.data;
+ linear_size = inmap.size;
+
+ alaw_size = linear_size / 2;
+
+ outbuf = gst_audio_encoder_allocate_output_buffer (audioenc, alaw_size);
+
+ g_assert (outbuf);
+
+ gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);
+ alaw_data = outmap.data;
+
+ for (i = 0; i < alaw_size; i++) {
+ alaw_data[i] = s16_to_alaw (linear_data[i]);
+ }
+
+ gst_buffer_unmap (outbuf, &outmap);
+ gst_buffer_unmap (buffer, &inmap);
+
+ ret = gst_audio_encoder_finish_frame (audioenc, outbuf, -1);
+
+done:
+ return ret;
+
+not_negotiated:
+ {
+ GST_DEBUG_OBJECT (alawenc, "no format negotiated");
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ goto done;
+ }
+}
+
+static void
+gst_alaw_enc_class_init (GstALawEncClass * klass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstAudioEncoderClass *audio_encoder_class = GST_AUDIO_ENCODER_CLASS (klass);
+
+ audio_encoder_class->start = GST_DEBUG_FUNCPTR (gst_alaw_enc_start);
+ audio_encoder_class->set_format = GST_DEBUG_FUNCPTR (gst_alaw_enc_set_format);
+ audio_encoder_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_alaw_enc_handle_frame);
+
+ gst_element_class_add_static_pad_template (element_class,
+ &alaw_enc_src_factory);
+ gst_element_class_add_static_pad_template (element_class,
+ &alaw_enc_sink_factory);
+
+ gst_element_class_set_static_metadata (element_class,
+ "A Law audio encoder", "Codec/Encoder/Audio",
+ "Convert 16bit PCM to 8bit A law",
+ "Zaheer Abbas Merali <zaheerabbas at merali dot org>");
+ GST_DEBUG_CATEGORY_INIT (alaw_enc_debug, "alawenc", 0, "A Law audio encoder");
+}
+
+static void
+gst_alaw_enc_init (GstALawEnc * alawenc)
+{
+ GST_PAD_SET_ACCEPT_TEMPLATE (GST_AUDIO_ENCODER_SINK_PAD (alawenc));
+}
diff --git a/gst/law/alaw-encode.h b/gst/law/alaw-encode.h
new file mode 100644
index 0000000000..59b9561b16
--- /dev/null
+++ b/gst/law/alaw-encode.h
@@ -0,0 +1,59 @@
+/* GStreamer PCM to A-Law conversion
+ * Copyright (C) 2000 by Abramo Bagnara <abramo@alsa-project.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_ALAW_ENCODE_H__
+#define __GST_ALAW_ENCODE_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_ALAW_ENC \
+ (gst_alaw_enc_get_type())
+#define GST_ALAW_ENC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_ALAW_ENC,GstALawEnc))
+#define GST_ALAW_ENC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_ALAW_ENC,GstALawEncClass))
+#define GST_IS_ALAW_ENC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_ALAW_ENC))
+#define GST_IS_ALAW_ENC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_ALAW_ENC))
+
+typedef struct _GstALawEnc GstALawEnc;
+typedef struct _GstALawEncClass GstALawEncClass;
+
+struct _GstALawEnc {
+ GstAudioEncoder encoder;
+
+ gint channels;
+ gint rate;
+};
+
+struct _GstALawEncClass {
+ GstAudioEncoderClass parent_class;
+};
+
+GType gst_alaw_enc_get_type(void);
+
+GST_ELEMENT_REGISTER_DECLARE (alawenc);
+
+G_END_DECLS
+
+#endif /* __GST_ALAW_ENCODE_H__ */
diff --git a/gst/law/alaw.c b/gst/law/alaw.c
new file mode 100644
index 0000000000..d1c53b26bd
--- /dev/null
+++ b/gst/law/alaw.c
@@ -0,0 +1,44 @@
+/* GStreamer PCM/A-Law conversions
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/audio/audio.h>
+
+#include "alaw-encode.h"
+#include "alaw-decode.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (alawenc, plugin);
+ ret |= GST_ELEMENT_REGISTER (alawdec, plugin);
+
+ return ret;
+}
+
+/* FIXME 0.11: merge alaw and mulaw into one plugin? */
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ alaw,
+ "ALaw audio conversion routines",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/law/meson.build b/gst/law/meson.build
new file mode 100644
index 0000000000..6757ccf30c
--- /dev/null
+++ b/gst/law/meson.build
@@ -0,0 +1,21 @@
+gstalaw = library('gstalaw',
+ 'alaw-encode.c', 'alaw-decode.c', 'alaw.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gstbase_dep, gstaudio_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstalaw, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstalaw]
+
+gstmulaw = library('gstmulaw',
+ 'mulaw-encode.c', 'mulaw-conversion.c', 'mulaw-decode.c', 'mulaw.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gstbase_dep, gstaudio_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstmulaw, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstmulaw]
diff --git a/gst/law/mulaw-conversion.c b/gst/law/mulaw-conversion.c
new file mode 100644
index 0000000000..d9d649c035
--- /dev/null
+++ b/gst/law/mulaw-conversion.c
@@ -0,0 +1,122 @@
+/*
+ * This routine converts from linear to ulaw
+ * 29 September 1989
+ *
+ * Craig Reese: IDA/Supercomputing Research Center
+ * Joe Campbell: Department of Defense
+ *
+ * References:
+ * 1) CCITT Recommendation G.711 (very difficult to follow)
+ * 2) "A New Digital Technique for Implementation of Any
+ * Continuous PCM Companding Law," Villeret, Michel,
+ * et al. 1973 IEEE Int. Conf. on Communications, Vol 1,
+ * 1973, pg. 11.12-11.17
+ * 3) MIL-STD-188-113,"Interoperability and Performance Standards
+ * for Analog-to_Digital Conversion Techniques,"
+ * 17 February 1987
+ *
+ * Input: Signed 16 bit linear sample
+ * Output: 8 bit ulaw sample
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <glib.h>
+
+#include "mulaw-conversion.h"
+
+#undef ZEROTRAP /* turn on the trap as per the MIL-STD */
+#define BIAS 0x84 /* define the add-in bias for 16 bit samples */
+#define CLIP 32635
+
+void
+mulaw_encode (gint16 * in, guint8 * out, gint numsamples)
+{
+ static const gint16 exp_lut[256] = {
+ 0, 0, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3,
+ 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
+ 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
+ 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
+ 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
+ 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
+ 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
+ };
+ gint16 sign, exponent, mantissa;
+ gint16 sample;
+ guint8 ulawbyte;
+ gint i;
+
+ for (i = 0; i < numsamples; i++) {
+ sample = in[i];
+ /* get the sample into sign-magnitude */
+ sign = (sample >> 8) & 0x80; /* set aside the sign */
+ if (sign != 0) {
+ sample = -sample; /* get magnitude */
+ }
+ /* sample can be zero because we can overflow in the inversion,
+ * checking against the unsigned version solves this */
+ if (((guint16) sample) > CLIP)
+ sample = CLIP; /* clip the magnitude */
+
+ /* convert from 16 bit linear to ulaw */
+ sample = sample + BIAS;
+ exponent = exp_lut[(sample >> 7) & 0xFF];
+ mantissa = (sample >> (exponent + 3)) & 0x0F;
+ ulawbyte = ~(sign | (exponent << 4) | mantissa);
+#ifdef ZEROTRAP
+ if (ulawbyte == 0)
+ ulawbyte = 0x02; /* optional CCITT trap */
+#endif
+ out[i] = ulawbyte;
+ }
+}
+
+/*
+ * This routine converts from ulaw to 16 bit linear
+ * 29 September 1989
+ *
+ * Craig Reese: IDA/Supercomputing Research Center
+ *
+ * References:
+ * 1) CCITT Recommendation G.711 (very difficult to follow)
+ * 2) MIL-STD-188-113,"Interoperability and Performance Standards
+ * for Analog-to_Digital Conversion Techniques,"
+ * 17 February 1987
+ *
+ * Input: 8 bit ulaw sample
+ * Output: signed 16 bit linear sample
+ */
+
+void
+mulaw_decode (guint8 * in, gint16 * out, gint numsamples)
+{
+ static const gint16 exp_lut[8] =
+ { 0, 132, 396, 924, 1980, 4092, 8316, 16764 };
+ gint16 sign, exponent, mantissa;
+ guint8 ulawbyte;
+ gint16 linear;
+ gint i;
+
+ for (i = 0; i < numsamples; i++) {
+ ulawbyte = in[i];
+ ulawbyte = ~ulawbyte;
+ sign = (ulawbyte & 0x80);
+ exponent = (ulawbyte >> 4) & 0x07;
+ mantissa = ulawbyte & 0x0F;
+ linear = exp_lut[exponent] + (mantissa << (exponent + 3));
+ if (sign != 0)
+ linear = -linear;
+ out[i] = linear;
+ }
+}
diff --git a/gst/law/mulaw-conversion.h b/gst/law/mulaw-conversion.h
new file mode 100644
index 0000000000..38367ad688
--- /dev/null
+++ b/gst/law/mulaw-conversion.h
@@ -0,0 +1,12 @@
+#ifndef _GST_ULAW_CONVERSION_H
+#define _GST_ULAW_CONVERSION_H
+
+#include <glib.h>
+
+void
+mulaw_encode(gint16* in, guint8* out, gint numsamples);
+void
+mulaw_decode(guint8* in,gint16* out,gint numsamples);
+
+#endif /* _GST_ULAW_CONVERSION_H */
+
diff --git a/gst/law/mulaw-decode.c b/gst/law/mulaw-decode.c
new file mode 100644
index 0000000000..2783376f3f
--- /dev/null
+++ b/gst/law/mulaw-decode.c
@@ -0,0 +1,201 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-mulawdec
+ * @title: mulawdec
+ *
+ * This element decodes mulaw audio. Mulaw coding is also known as G.711.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include <gst/gst.h>
+
+#include "mulaw-decode.h"
+#include "mulaw-conversion.h"
+
+extern GstStaticPadTemplate mulaw_dec_src_factory;
+extern GstStaticPadTemplate mulaw_dec_sink_factory;
+
+static gboolean gst_mulawdec_set_format (GstAudioDecoder * dec, GstCaps * caps);
+static GstFlowReturn gst_mulawdec_handle_frame (GstAudioDecoder * dec,
+ GstBuffer * buffer);
+
+
+/* Stereo signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0
+};
+
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+#define INT_FORMAT "S16LE"
+#else
+#define INT_FORMAT "S16BE"
+#endif
+
+GstStaticPadTemplate mulaw_dec_src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " INT_FORMAT ", "
+ "layout = (string) interleaved, "
+ "rate = (int) [ 8000, 192000 ], " "channels = (int) [ 1, 2 ]")
+ );
+
+GstStaticPadTemplate mulaw_dec_sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-mulaw, "
+ "rate = [ 8000 , 192000 ], " "channels = [ 1 , 2 ]")
+ );
+
+#define gst_mulawdec_parent_class parent_class
+G_DEFINE_TYPE (GstMuLawDec, gst_mulawdec, GST_TYPE_AUDIO_DECODER);
+GST_ELEMENT_REGISTER_DEFINE (mulawdec, "mulawdec", GST_RANK_PRIMARY,
+ GST_TYPE_MULAWDEC);
+
+static gboolean
+gst_mulawdec_set_format (GstAudioDecoder * dec, GstCaps * caps)
+{
+ GstMuLawDec *mulawdec = GST_MULAWDEC (dec);
+ GstStructure *structure;
+ int rate, channels;
+ GstAudioInfo info;
+
+ structure = gst_caps_get_structure (caps, 0);
+ if (!structure) {
+ GST_ERROR ("failed to get structure from caps");
+ goto error_failed_get_structure;
+ }
+
+ if (!gst_structure_get_int (structure, "rate", &rate)) {
+ GST_ERROR ("failed to find field rate in input caps");
+ goto error_failed_find_rate;
+ }
+
+ if (!gst_structure_get_int (structure, "channels", &channels)) {
+ GST_ERROR ("failed to find field channels in input caps");
+ goto error_failed_find_channel;
+ }
+
+ gst_audio_info_init (&info);
+ gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S16, rate, channels, NULL);
+
+ GST_DEBUG_OBJECT (mulawdec, "rate=%d, channels=%d", rate, channels);
+
+ return gst_audio_decoder_set_output_format (dec, &info);
+
+error_failed_find_channel:
+error_failed_find_rate:
+error_failed_get_structure:
+ return FALSE;
+}
+
+static GstFlowReturn
+gst_mulawdec_handle_frame (GstAudioDecoder * dec, GstBuffer * buffer)
+{
+ GstMapInfo inmap, outmap;
+ gint16 *linear_data;
+ guint8 *mulaw_data;
+ gsize mulaw_size, linear_size;
+ GstBuffer *outbuf;
+
+ if (!buffer) {
+ return GST_FLOW_OK;
+ }
+
+ if (!gst_buffer_map (buffer, &inmap, GST_MAP_READ)) {
+ GST_ERROR ("failed to map input buffer");
+ goto error_failed_map_input_buffer;
+ }
+
+ mulaw_data = inmap.data;
+ mulaw_size = inmap.size;
+
+ linear_size = mulaw_size * 2;
+
+ outbuf = gst_audio_decoder_allocate_output_buffer (dec, linear_size);
+ if (!gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE)) {
+ GST_ERROR ("failed to map input buffer");
+ goto error_failed_map_output_buffer;
+ }
+
+ linear_data = (gint16 *) outmap.data;
+
+ mulaw_decode (mulaw_data, linear_data, mulaw_size);
+
+ gst_buffer_unmap (outbuf, &outmap);
+ gst_buffer_unmap (buffer, &inmap);
+
+ return gst_audio_decoder_finish_frame (dec, outbuf, -1);
+
+error_failed_map_output_buffer:
+ gst_buffer_unref (outbuf);
+ gst_buffer_unmap (buffer, &inmap);
+
+error_failed_map_input_buffer:
+ return GST_FLOW_ERROR;
+}
+
+static gboolean
+gst_mulawdec_start (GstAudioDecoder * dec)
+{
+ gst_audio_decoder_set_estimate_rate (dec, TRUE);
+
+ return TRUE;
+}
+
+static void
+gst_mulawdec_class_init (GstMuLawDecClass * klass)
+{
+ GstElementClass *element_class = (GstElementClass *) klass;
+ GstAudioDecoderClass *audiodec_class = GST_AUDIO_DECODER_CLASS (klass);
+
+ gst_element_class_add_static_pad_template (element_class,
+ &mulaw_dec_src_factory);
+ gst_element_class_add_static_pad_template (element_class,
+ &mulaw_dec_sink_factory);
+
+
+ audiodec_class->start = GST_DEBUG_FUNCPTR (gst_mulawdec_start);
+ audiodec_class->set_format = GST_DEBUG_FUNCPTR (gst_mulawdec_set_format);
+ audiodec_class->handle_frame = GST_DEBUG_FUNCPTR (gst_mulawdec_handle_frame);
+
+ gst_element_class_set_static_metadata (element_class, "Mu Law audio decoder",
+ "Codec/Decoder/Audio",
+ "Convert 8bit mu law to 16bit PCM",
+ "Zaheer Abbas Merali <zaheerabbas at merali dot org>");
+}
+
+static void
+gst_mulawdec_init (GstMuLawDec * mulawdec)
+{
+ gst_audio_decoder_set_needs_format (GST_AUDIO_DECODER (mulawdec), TRUE);
+ gst_audio_decoder_set_use_default_pad_acceptcaps (GST_AUDIO_DECODER_CAST
+ (mulawdec), TRUE);
+ GST_PAD_SET_ACCEPT_TEMPLATE (GST_AUDIO_DECODER_SINK_PAD (mulawdec));
+}
diff --git a/gst/law/mulaw-decode.h b/gst/law/mulaw-decode.h
new file mode 100644
index 0000000000..b1b284e777
--- /dev/null
+++ b/gst/law/mulaw-decode.h
@@ -0,0 +1,55 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_MULAWDECODE_H__
+#define __GST_MULAWDECODE_H__
+
+#include <gst/gst.h>
+#include <gst/audio/audio.h>
+#include <gst/audio/gstaudiodecoder.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_MULAWDEC \
+ (gst_mulawdec_get_type())
+#define GST_MULAWDEC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MULAWDEC,GstMuLawDec))
+#define GST_MULAWDEC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MULAWDEC,GstMuLawDecClass))
+#define GST_IS_MULAWDEC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MULAWDEC))
+#define GST_IS_MULAWDEC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MULAWDEC))
+typedef struct _GstMuLawDec GstMuLawDec;
+typedef struct _GstMuLawDecClass GstMuLawDecClass;
+
+struct _GstMuLawDec
+{
+ GstAudioDecoder element;
+};
+
+struct _GstMuLawDecClass
+{
+ GstAudioDecoderClass parent_class;
+};
+
+GType gst_mulawdec_get_type (void);
+GST_ELEMENT_REGISTER_DECLARE (mulawdec);
+
+G_END_DECLS
+#endif /* __GST_STEREO_H__ */
diff --git a/gst/law/mulaw-encode.c b/gst/law/mulaw-encode.c
new file mode 100644
index 0000000000..8f951d46ff
--- /dev/null
+++ b/gst/law/mulaw-encode.c
@@ -0,0 +1,235 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-mulawenc
+ * @title: mulawenc
+ *
+ * This element encode mulaw audio. Mulaw coding is also known as G.711.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include <gst/gst.h>
+#include <gst/audio/audio.h>
+
+#include "mulaw-encode.h"
+#include "mulaw-conversion.h"
+
+extern GstStaticPadTemplate mulaw_enc_src_factory;
+extern GstStaticPadTemplate mulaw_enc_sink_factory;
+
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+#define INT_FORMAT "S16LE"
+#else
+#define INT_FORMAT "S16BE"
+#endif
+
+GstStaticPadTemplate mulaw_enc_sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " INT_FORMAT ", "
+ "layout = (string) interleaved, "
+ "rate = (int) [ 8000, 192000 ], " "channels = (int) [ 1, 2 ]")
+ );
+
+GstStaticPadTemplate mulaw_enc_src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-mulaw, "
+ "rate = [ 8000 , 192000 ], " "channels = [ 1 , 2 ]")
+ );
+/* Stereo signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0
+};
+
+static gboolean gst_mulawenc_start (GstAudioEncoder * audioenc);
+static gboolean gst_mulawenc_set_format (GstAudioEncoder * enc,
+ GstAudioInfo * info);
+static GstFlowReturn gst_mulawenc_handle_frame (GstAudioEncoder * enc,
+ GstBuffer * buffer);
+static void gst_mulawenc_set_tags (GstMuLawEnc * mulawenc);
+
+
+#define gst_mulawenc_parent_class parent_class
+G_DEFINE_TYPE (GstMuLawEnc, gst_mulawenc, GST_TYPE_AUDIO_ENCODER);
+GST_ELEMENT_REGISTER_DEFINE (mulawenc, "mulawenc", GST_RANK_PRIMARY,
+ GST_TYPE_MULAWENC);
+
+/*static guint gst_stereo_signals[LAST_SIGNAL] = { 0 }; */
+
+static gboolean
+gst_mulawenc_start (GstAudioEncoder * audioenc)
+{
+ GstMuLawEnc *mulawenc = GST_MULAWENC (audioenc);
+
+ mulawenc->channels = 0;
+ mulawenc->rate = 0;
+
+ return TRUE;
+}
+
+
+static void
+gst_mulawenc_set_tags (GstMuLawEnc * mulawenc)
+{
+ GstTagList *taglist;
+ guint bitrate;
+
+ /* bitrate of mulaw is 8 bits/sample * sample rate * number of channels */
+ bitrate = 8 * mulawenc->rate * mulawenc->channels;
+
+ taglist = gst_tag_list_new_empty ();
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_MAXIMUM_BITRATE, bitrate, NULL);
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_MINIMUM_BITRATE, bitrate, NULL);
+ gst_tag_list_add (taglist, GST_TAG_MERGE_REPLACE,
+ GST_TAG_BITRATE, bitrate, NULL);
+
+ gst_audio_encoder_merge_tags (GST_AUDIO_ENCODER (mulawenc),
+ taglist, GST_TAG_MERGE_REPLACE);
+
+ gst_tag_list_unref (taglist);
+}
+
+
+static gboolean
+gst_mulawenc_set_format (GstAudioEncoder * audioenc, GstAudioInfo * info)
+{
+ GstCaps *base_caps;
+ GstStructure *structure;
+ GstMuLawEnc *mulawenc = GST_MULAWENC (audioenc);
+ gboolean ret;
+
+ mulawenc->rate = info->rate;
+ mulawenc->channels = info->channels;
+
+ base_caps =
+ gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SRC_PAD (audioenc));
+ g_assert (base_caps);
+ base_caps = gst_caps_make_writable (base_caps);
+ g_assert (base_caps);
+
+ structure = gst_caps_get_structure (base_caps, 0);
+ g_assert (structure);
+ gst_structure_set (structure, "rate", G_TYPE_INT, mulawenc->rate, NULL);
+ gst_structure_set (structure, "channels", G_TYPE_INT, mulawenc->channels,
+ NULL);
+
+ gst_mulawenc_set_tags (mulawenc);
+
+ ret = gst_audio_encoder_set_output_format (audioenc, base_caps);
+ gst_caps_unref (base_caps);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_mulawenc_handle_frame (GstAudioEncoder * audioenc, GstBuffer * buffer)
+{
+ GstMuLawEnc *mulawenc;
+ GstMapInfo inmap, outmap;
+ gint16 *linear_data;
+ gsize linear_size;
+ guint8 *mulaw_data;
+ guint mulaw_size;
+ GstBuffer *outbuf;
+ GstFlowReturn ret;
+
+ if (!buffer) {
+ ret = GST_FLOW_OK;
+ goto done;
+ }
+
+ mulawenc = GST_MULAWENC (audioenc);
+
+ if (!mulawenc->rate || !mulawenc->channels)
+ goto not_negotiated;
+
+ gst_buffer_map (buffer, &inmap, GST_MAP_READ);
+ linear_data = (gint16 *) inmap.data;
+ linear_size = inmap.size;
+
+ mulaw_size = linear_size / 2;
+
+ outbuf = gst_audio_encoder_allocate_output_buffer (audioenc, mulaw_size);
+
+ g_assert (outbuf);
+
+ gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE);
+ mulaw_data = outmap.data;
+
+ mulaw_encode (linear_data, mulaw_data, mulaw_size);
+
+ gst_buffer_unmap (outbuf, &outmap);
+ gst_buffer_unmap (buffer, &inmap);
+
+ ret = gst_audio_encoder_finish_frame (audioenc, outbuf, -1);
+
+done:
+
+ return ret;
+
+not_negotiated:
+ {
+ GST_DEBUG_OBJECT (mulawenc, "no format negotiated");
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ goto done;
+ }
+}
+
+
+
+static void
+gst_mulawenc_class_init (GstMuLawEncClass * klass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstAudioEncoderClass *audio_encoder_class = GST_AUDIO_ENCODER_CLASS (klass);
+
+ audio_encoder_class->start = GST_DEBUG_FUNCPTR (gst_mulawenc_start);
+ audio_encoder_class->set_format = GST_DEBUG_FUNCPTR (gst_mulawenc_set_format);
+ audio_encoder_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_mulawenc_handle_frame);
+
+ gst_element_class_add_static_pad_template (element_class,
+ &mulaw_enc_src_factory);
+ gst_element_class_add_static_pad_template (element_class,
+ &mulaw_enc_sink_factory);
+
+ gst_element_class_set_static_metadata (element_class, "Mu Law audio encoder",
+ "Codec/Encoder/Audio",
+ "Convert 16bit PCM to 8bit mu law",
+ "Zaheer Abbas Merali <zaheerabbas at merali dot org>");
+}
+
+static void
+gst_mulawenc_init (GstMuLawEnc * mulawenc)
+{
+ GST_PAD_SET_ACCEPT_TEMPLATE (GST_AUDIO_ENCODER_SINK_PAD (mulawenc));
+}
diff --git a/gst/law/mulaw-encode.h b/gst/law/mulaw-encode.h
new file mode 100644
index 0000000000..cda2a54396
--- /dev/null
+++ b/gst/law/mulaw-encode.h
@@ -0,0 +1,59 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_MULAWENCODE_H__
+#define __GST_MULAWENCODE_H__
+
+#include <gst/gst.h>
+#include <gst/audio/gstaudioencoder.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_MULAWENC \
+ (gst_mulawenc_get_type())
+#define GST_MULAWENC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MULAWENC,GstMuLawEnc))
+#define GST_MULAWENC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MULAWENC,GstMuLawEncClass))
+#define GST_IS_MULAWENC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MULAWENC))
+#define GST_IS_MULAWENC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MULAWENC))
+typedef struct _GstMuLawEnc GstMuLawEnc;
+typedef struct _GstMuLawEncClass GstMuLawEncClass;
+
+struct _GstMuLawEnc
+{
+ GstAudioEncoder element;
+
+ gint channels;
+ gint rate;
+};
+
+struct _GstMuLawEncClass
+{
+ GstAudioEncoderClass parent_class;
+};
+
+GType gst_mulawenc_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (mulawenc);
+
+G_END_DECLS
+#endif /* __GST_STEREO_H__ */
diff --git a/gst/law/mulaw.c b/gst/law/mulaw.c
new file mode 100644
index 0000000000..14bfc28d51
--- /dev/null
+++ b/gst/law/mulaw.c
@@ -0,0 +1,46 @@
+/* GStreamer PCM/A-Law conversions
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include "mulaw-encode.h"
+#include "mulaw-decode.h"
+
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+#define INT_FORMAT "S16LE"
+#else
+#define INT_FORMAT "S16BE"
+#endif
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (mulawenc, plugin);
+ ret |= GST_ELEMENT_REGISTER (mulawdec, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ mulaw,
+ "MuLaw audio conversion routines",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/level/gstlevel.c b/gst/level/gstlevel.c
new file mode 100644
index 0000000000..968fd4fe72
--- /dev/null
+++ b/gst/level/gstlevel.c
@@ -0,0 +1,832 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) 2000,2001,2002,2003,2005
+ * Thomas Vander Stichele <thomas at apestaart dot org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-level
+ * @title: level
+ *
+ * Level analyses incoming audio buffers and, if the #GstLevel:message property
+ * is %TRUE, generates an element message named
+ * `level`: after each interval of time given by the #GstLevel:interval property.
+ * The message's structure contains these fields:
+ *
+ * * #GstClockTime `timestamp`: the timestamp of the buffer that triggered the message.
+ * * #GstClockTime `stream-time`: the stream time of the buffer.
+ * * #GstClockTime `running-time`: the running_time of the buffer.
+ * * #GstClockTime `duration`: the duration of the buffer.
+ * * #GstClockTime `endtime`: the end time of the buffer that triggered the message as
+ * stream time (this is deprecated, as it can be calculated from stream-time + duration)
+ * * #GValueArray of #gdouble `peak`: the peak power level in dB for each channel
+ * * #GValueArray of #gdouble `decay`: the decaying peak power level in dB for each channel
+ * The decaying peak level follows the peak level, but starts dropping if no
+ * new peak is reached after the time given by the #GstLevel:peak-ttl.
+ * When the decaying peak level drops, it does so at the decay rate as
+ * specified by the #GstLevel:peak-falloff.
+ * * #GValueArray of #gdouble `rms`: the Root Mean Square (or average power) level in dB
+ * for each channel
+ *
+ * ## Example application
+ *
+ * {{ tests/examples/level/level-example.c }}
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+/* FIXME 0.11: suppress warnings for deprecated API such as GValueArray
+ * with newer GLib versions (>= 2.31.0) */
+#define GLIB_DISABLE_DEPRECATION_WARNINGS
+
+#include <string.h>
+#include <math.h>
+#include <gst/gst.h>
+#include <gst/audio/audio.h>
+
+#include "gstlevel.h"
+
+GST_DEBUG_CATEGORY_STATIC (level_debug);
+#define GST_CAT_DEFAULT level_debug
+
+#define EPSILON 1e-35f
+
+static GstStaticPadTemplate sink_template_factory =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) { S8, " GST_AUDIO_NE (S16) ", " GST_AUDIO_NE (S32)
+ ", " GST_AUDIO_NE (F32) "," GST_AUDIO_NE (F64) " },"
+ "layout = (string) interleaved, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]")
+ );
+
+static GstStaticPadTemplate src_template_factory =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) { S8, " GST_AUDIO_NE (S16) ", " GST_AUDIO_NE (S32)
+ ", " GST_AUDIO_NE (F32) "," GST_AUDIO_NE (F64) " },"
+ "layout = (string) interleaved, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]")
+ );
+
+enum
+{
+ PROP_0,
+ PROP_POST_MESSAGES,
+ PROP_MESSAGE,
+ PROP_INTERVAL,
+ PROP_PEAK_TTL,
+ PROP_PEAK_FALLOFF,
+ PROP_AUDIO_LEVEL_META,
+};
+
+#define gst_level_parent_class parent_class
+G_DEFINE_TYPE (GstLevel, gst_level, GST_TYPE_BASE_TRANSFORM);
+GST_ELEMENT_REGISTER_DEFINE (level, "level", GST_RANK_NONE, GST_TYPE_LEVEL);
+
+static void gst_level_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_level_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_level_finalize (GObject * obj);
+
+static gboolean gst_level_set_caps (GstBaseTransform * trans, GstCaps * in,
+ GstCaps * out);
+static gboolean gst_level_start (GstBaseTransform * trans);
+static GstFlowReturn gst_level_transform_ip (GstBaseTransform * trans,
+ GstBuffer * in);
+static void gst_level_post_message (GstLevel * filter);
+static gboolean gst_level_sink_event (GstBaseTransform * trans,
+ GstEvent * event);
+static void gst_level_recalc_interval_frames (GstLevel * level);
+
+static void
+gst_level_class_init (GstLevelClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseTransformClass *trans_class = GST_BASE_TRANSFORM_CLASS (klass);
+
+ gobject_class->set_property = gst_level_set_property;
+ gobject_class->get_property = gst_level_get_property;
+ gobject_class->finalize = gst_level_finalize;
+
+ /**
+ * GstLevel:post-messages
+ *
+ * Post messages on the bus with level information.
+ *
+ * Since: 1.1.0
+ */
+ g_object_class_install_property (gobject_class, PROP_POST_MESSAGES,
+ g_param_spec_boolean ("post-messages", "Post Messages",
+ "Whether to post a 'level' element message on the bus for each "
+ "passed interval", TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /* FIXME(2.0): remove this property */
+ /**
+ * GstLevel:post-messages
+ *
+ * Post messages on the bus with level information.
+ *
+ * Deprecated: use the #GstLevel:post-messages property
+ */
+#ifndef GST_REMOVE_DEPRECATED
+ g_object_class_install_property (gobject_class, PROP_MESSAGE,
+ g_param_spec_boolean ("message", "message",
+ "Post a 'level' message for each passed interval "
+ "(deprecated, use the post-messages property instead)", TRUE,
+ G_PARAM_READWRITE | G_PARAM_DEPRECATED | G_PARAM_STATIC_STRINGS));
+#endif
+ g_object_class_install_property (gobject_class, PROP_INTERVAL,
+ g_param_spec_uint64 ("interval", "Interval",
+ "Interval of time between message posts (in nanoseconds)",
+ 1, G_MAXUINT64, GST_SECOND / 10,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_PEAK_TTL,
+ g_param_spec_uint64 ("peak-ttl", "Peak TTL",
+ "Time To Live of decay peak before it falls back (in nanoseconds)",
+ 0, G_MAXUINT64, GST_SECOND / 10 * 3,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_PEAK_FALLOFF,
+ g_param_spec_double ("peak-falloff", "Peak Falloff",
+ "Decay rate of decay peak after TTL (in dB/sec)",
+ 0.0, G_MAXDOUBLE, 10.0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstLevel:audio-level-meta:
+ *
+ * If %TRUE, generate or update GstAudioLevelMeta on output buffers.
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class, PROP_AUDIO_LEVEL_META,
+ g_param_spec_boolean ("audio-level-meta", "Audio Level Meta",
+ "Set GstAudioLevelMeta on buffers", FALSE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ GST_DEBUG_CATEGORY_INIT (level_debug, "level", 0, "Level calculation");
+
+ gst_element_class_add_static_pad_template (element_class,
+ &sink_template_factory);
+ gst_element_class_add_static_pad_template (element_class,
+ &src_template_factory);
+ gst_element_class_set_static_metadata (element_class, "Level",
+ "Filter/Analyzer/Audio",
+ "RMS/Peak/Decaying Peak Level messager for audio/raw",
+ "Thomas Vander Stichele <thomas at apestaart dot org>");
+
+ trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_level_set_caps);
+ trans_class->start = GST_DEBUG_FUNCPTR (gst_level_start);
+ trans_class->transform_ip = GST_DEBUG_FUNCPTR (gst_level_transform_ip);
+ trans_class->sink_event = GST_DEBUG_FUNCPTR (gst_level_sink_event);
+}
+
+static void
+configure_passthrough (GstLevel * self, gboolean audio_level_meta)
+{
+ /* can't use passthrough if audio-level-meta is enabled as we need a
+ * writable buffer to add the meta.
+ * gst_base_transform_set_passthrough() takes the object lock internally. */
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (self),
+ !audio_level_meta);
+}
+
+static void
+gst_level_init (GstLevel * filter)
+{
+ filter->CS = NULL;
+ filter->peak = NULL;
+ filter->last_peak = NULL;
+ filter->decay_peak = NULL;
+ filter->decay_peak_base = NULL;
+ filter->decay_peak_age = NULL;
+
+ gst_audio_info_init (&filter->info);
+
+ filter->interval = GST_SECOND / 10;
+ filter->decay_peak_ttl = GST_SECOND / 10 * 3;
+ filter->decay_peak_falloff = 10.0; /* dB falloff (/sec) */
+
+ filter->post_messages = TRUE;
+
+ filter->process = NULL;
+
+ gst_base_transform_set_gap_aware (GST_BASE_TRANSFORM (filter), TRUE);
+ configure_passthrough (filter, filter->audio_level_meta);
+}
+
+static void
+gst_level_finalize (GObject * obj)
+{
+ GstLevel *filter = GST_LEVEL (obj);
+
+ g_free (filter->CS);
+ g_free (filter->peak);
+ g_free (filter->last_peak);
+ g_free (filter->decay_peak);
+ g_free (filter->decay_peak_base);
+ g_free (filter->decay_peak_age);
+
+ filter->CS = NULL;
+ filter->peak = NULL;
+ filter->last_peak = NULL;
+ filter->decay_peak = NULL;
+ filter->decay_peak_base = NULL;
+ filter->decay_peak_age = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (obj);
+}
+
+static void
+gst_level_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstLevel *filter = GST_LEVEL (object);
+
+ GST_OBJECT_LOCK (filter);
+
+ switch (prop_id) {
+ case PROP_POST_MESSAGES:
+ /* fall-through */
+ case PROP_MESSAGE:
+ filter->post_messages = g_value_get_boolean (value);
+ break;
+ case PROP_INTERVAL:
+ filter->interval = g_value_get_uint64 (value);
+ if (GST_AUDIO_INFO_RATE (&filter->info)) {
+ gst_level_recalc_interval_frames (filter);
+ }
+ break;
+ case PROP_PEAK_TTL:
+ filter->decay_peak_ttl =
+ gst_guint64_to_gdouble (g_value_get_uint64 (value));
+ break;
+ case PROP_PEAK_FALLOFF:
+ filter->decay_peak_falloff = g_value_get_double (value);
+ break;
+ case PROP_AUDIO_LEVEL_META:
+ filter->audio_level_meta = g_value_get_boolean (value);
+ GST_OBJECT_UNLOCK (filter);
+ configure_passthrough (filter, g_value_get_boolean (value));
+ GST_OBJECT_LOCK (filter);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+
+ GST_OBJECT_UNLOCK (filter);
+}
+
+static void
+gst_level_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstLevel *filter = GST_LEVEL (object);
+
+ GST_OBJECT_LOCK (filter);
+
+ switch (prop_id) {
+ case PROP_POST_MESSAGES:
+ /* fall-through */
+ case PROP_MESSAGE:
+ g_value_set_boolean (value, filter->post_messages);
+ break;
+ case PROP_INTERVAL:
+ g_value_set_uint64 (value, filter->interval);
+ break;
+ case PROP_PEAK_TTL:
+ g_value_set_uint64 (value, filter->decay_peak_ttl);
+ break;
+ case PROP_PEAK_FALLOFF:
+ g_value_set_double (value, filter->decay_peak_falloff);
+ break;
+ case PROP_AUDIO_LEVEL_META:
+ g_value_set_boolean (value, filter->audio_level_meta);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+
+ GST_OBJECT_UNLOCK (filter);
+}
+
+
+/* process one (interleaved) channel of incoming samples
+ * calculate square sum of samples
+ * normalize and average over number of samples
+ * returns a normalized cumulative square value, which can be averaged
+ * to return the average power as a double between 0 and 1
+ * also returns the normalized peak power (square of the highest amplitude)
+ *
+ * caller must assure num is a multiple of channels
+ * samples for multiple channels are interleaved
+ * input sample data enters in *in_data and is not modified
+ * this filter only accepts signed audio data, so mid level is always 0
+ *
+ * for integers, this code considers the non-existent positive max value to be
+ * full-scale; so max-1 will not map to 1.0
+ */
+
+#define DEFINE_INT_LEVEL_CALCULATOR(TYPE, RESOLUTION) \
+static void inline \
+gst_level_calculate_##TYPE (gpointer data, guint num, guint channels, \
+ gdouble *NCS, gdouble *NPS) \
+{ \
+ TYPE * in = (TYPE *)data; \
+ register guint j; \
+ gdouble squaresum = 0.0; /* square sum of the input samples */ \
+ register gdouble square = 0.0; /* Square */ \
+ register gdouble peaksquare = 0.0; /* Peak Square Sample */ \
+ gdouble normalizer; /* divisor to get a [-1.0, 1.0] range */ \
+ \
+ /* *NCS = 0.0; Normalized Cumulative Square */ \
+ /* *NPS = 0.0; Normalized Peak Square */ \
+ \
+ for (j = 0; j < num; j += channels) { \
+ square = ((gdouble) in[j]) * in[j]; \
+ if (square > peaksquare) peaksquare = square; \
+ squaresum += square; \
+ } \
+ \
+ normalizer = (gdouble) (G_GINT64_CONSTANT(1) << (RESOLUTION * 2)); \
+ *NCS = squaresum / normalizer; \
+ *NPS = peaksquare / normalizer; \
+}
+
+DEFINE_INT_LEVEL_CALCULATOR (gint32, 31);
+DEFINE_INT_LEVEL_CALCULATOR (gint16, 15);
+DEFINE_INT_LEVEL_CALCULATOR (gint8, 7);
+
+/* FIXME: use orc to calculate squaresums? */
+#define DEFINE_FLOAT_LEVEL_CALCULATOR(TYPE) \
+static void inline \
+gst_level_calculate_##TYPE (gpointer data, guint num, guint channels, \
+ gdouble *NCS, gdouble *NPS) \
+{ \
+ TYPE * in = (TYPE *)data; \
+ register guint j; \
+ gdouble squaresum = 0.0; /* square sum of the input samples */ \
+ register gdouble square = 0.0; /* Square */ \
+ register gdouble peaksquare = 0.0; /* Peak Square Sample */ \
+ \
+ /* *NCS = 0.0; Normalized Cumulative Square */ \
+ /* *NPS = 0.0; Normalized Peak Square */ \
+ \
+ /* orc_level_squaresum_f64(&squaresum,in,num); */ \
+ for (j = 0; j < num; j += channels) { \
+ square = ((gdouble) in[j]) * in[j]; \
+ if (square > peaksquare) peaksquare = square; \
+ squaresum += square; \
+ } \
+ \
+ *NCS = squaresum; \
+ *NPS = peaksquare; \
+}
+
+DEFINE_FLOAT_LEVEL_CALCULATOR (gfloat);
+DEFINE_FLOAT_LEVEL_CALCULATOR (gdouble);
+
+/* we would need stride to deinterleave also
+static void inline
+gst_level_calculate_gdouble (gpointer data, guint num, guint channels,
+ gdouble *NCS, gdouble *NPS)
+{
+ orc_level_squaresum_f64(NCS,(gdouble *)data,num);
+ *NPS = 0.0;
+}
+*/
+
+/* called with object lock */
+static void
+gst_level_recalc_interval_frames (GstLevel * level)
+{
+ GstClockTime interval = level->interval;
+ guint sample_rate = GST_AUDIO_INFO_RATE (&level->info);
+ guint interval_frames;
+
+ interval_frames = GST_CLOCK_TIME_TO_FRAMES (interval, sample_rate);
+
+ if (interval_frames == 0) {
+ GST_WARNING_OBJECT (level, "interval %" GST_TIME_FORMAT " is too small, "
+ "should be at least %" GST_TIME_FORMAT " for sample rate %u",
+ GST_TIME_ARGS (interval),
+ GST_TIME_ARGS (GST_FRAMES_TO_CLOCK_TIME (1, sample_rate)), sample_rate);
+ interval_frames = 1;
+ }
+
+ level->interval_frames = interval_frames;
+
+ GST_INFO_OBJECT (level, "interval_frames now %u for interval "
+ "%" GST_TIME_FORMAT " and sample rate %u", interval_frames,
+ GST_TIME_ARGS (interval), sample_rate);
+}
+
+static gboolean
+gst_level_set_caps (GstBaseTransform * trans, GstCaps * in, GstCaps * out)
+{
+ GstLevel *filter = GST_LEVEL (trans);
+ GstAudioInfo info;
+ gint i, channels;
+
+ if (!gst_audio_info_from_caps (&info, in))
+ return FALSE;
+
+ GST_OBJECT_LOCK (filter);
+
+ switch (GST_AUDIO_INFO_FORMAT (&info)) {
+ case GST_AUDIO_FORMAT_S8:
+ filter->process = gst_level_calculate_gint8;
+ break;
+ case GST_AUDIO_FORMAT_S16:
+ filter->process = gst_level_calculate_gint16;
+ break;
+ case GST_AUDIO_FORMAT_S32:
+ filter->process = gst_level_calculate_gint32;
+ break;
+ case GST_AUDIO_FORMAT_F32:
+ filter->process = gst_level_calculate_gfloat;
+ break;
+ case GST_AUDIO_FORMAT_F64:
+ filter->process = gst_level_calculate_gdouble;
+ break;
+ default:
+ filter->process = NULL;
+ break;
+ }
+
+ filter->info = info;
+
+ channels = GST_AUDIO_INFO_CHANNELS (&info);
+
+ /* allocate channel variable arrays */
+ g_free (filter->CS);
+ g_free (filter->peak);
+ g_free (filter->last_peak);
+ g_free (filter->decay_peak);
+ g_free (filter->decay_peak_base);
+ g_free (filter->decay_peak_age);
+ filter->CS = g_new (gdouble, channels);
+ filter->peak = g_new (gdouble, channels);
+ filter->last_peak = g_new (gdouble, channels);
+ filter->decay_peak = g_new (gdouble, channels);
+ filter->decay_peak_base = g_new (gdouble, channels);
+
+ filter->decay_peak_age = g_new (GstClockTime, channels);
+
+ for (i = 0; i < channels; ++i) {
+ filter->CS[i] = filter->peak[i] = filter->last_peak[i] =
+ filter->decay_peak[i] = filter->decay_peak_base[i] = 0.0;
+ filter->decay_peak_age[i] = G_GUINT64_CONSTANT (0);
+ }
+
+ gst_level_recalc_interval_frames (filter);
+
+ GST_OBJECT_UNLOCK (filter);
+ return TRUE;
+}
+
+static gboolean
+gst_level_start (GstBaseTransform * trans)
+{
+ GstLevel *filter = GST_LEVEL (trans);
+
+ filter->num_frames = 0;
+ filter->message_ts = GST_CLOCK_TIME_NONE;
+
+ return TRUE;
+}
+
+static GstMessage *
+gst_level_message_new (GstLevel * level, GstClockTime timestamp,
+ GstClockTime duration)
+{
+ GstBaseTransform *trans = GST_BASE_TRANSFORM_CAST (level);
+ GstStructure *s;
+ GValue v = { 0, };
+ GstClockTime endtime, running_time, stream_time;
+
+ running_time = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME,
+ timestamp);
+ stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME,
+ timestamp);
+ /* endtime is for backwards compatibility */
+ endtime = stream_time + duration;
+
+ s = gst_structure_new ("level",
+ "endtime", GST_TYPE_CLOCK_TIME, endtime,
+ "timestamp", G_TYPE_UINT64, timestamp,
+ "stream-time", G_TYPE_UINT64, stream_time,
+ "running-time", G_TYPE_UINT64, running_time,
+ "duration", G_TYPE_UINT64, duration, NULL);
+
+ g_value_init (&v, G_TYPE_VALUE_ARRAY);
+ g_value_take_boxed (&v, g_value_array_new (0));
+ gst_structure_take_value (s, "rms", &v);
+
+ g_value_init (&v, G_TYPE_VALUE_ARRAY);
+ g_value_take_boxed (&v, g_value_array_new (0));
+ gst_structure_take_value (s, "peak", &v);
+
+ g_value_init (&v, G_TYPE_VALUE_ARRAY);
+ g_value_take_boxed (&v, g_value_array_new (0));
+ gst_structure_take_value (s, "decay", &v);
+
+ return gst_message_new_element (GST_OBJECT (level), s);
+}
+
+static void
+gst_level_message_append_channel (GstMessage * m, gdouble rms, gdouble peak,
+ gdouble decay)
+{
+ const GValue *array_val;
+ GstStructure *s;
+ GValueArray *arr;
+ GValue v = { 0, };
+
+ g_value_init (&v, G_TYPE_DOUBLE);
+
+ s = (GstStructure *) gst_message_get_structure (m);
+
+ array_val = gst_structure_get_value (s, "rms");
+ arr = (GValueArray *) g_value_get_boxed (array_val);
+ g_value_set_double (&v, rms);
+ g_value_array_append (arr, &v); /* copies by value */
+
+ array_val = gst_structure_get_value (s, "peak");
+ arr = (GValueArray *) g_value_get_boxed (array_val);
+ g_value_set_double (&v, peak);
+ g_value_array_append (arr, &v); /* copies by value */
+
+ array_val = gst_structure_get_value (s, "decay");
+ arr = (GValueArray *) g_value_get_boxed (array_val);
+ g_value_set_double (&v, decay);
+ g_value_array_append (arr, &v); /* copies by value */
+
+ g_value_unset (&v);
+}
+
+static void
+gst_level_rtp_audio_level_meta (GstLevel * self, GstBuffer * buffer,
+ guint8 level)
+{
+ GstAudioLevelMeta *meta;
+
+ /* Update the existing meta, if any, so we can have an upstream element
+ * filling the voice activity part of the meta. */
+ meta = gst_buffer_get_audio_level_meta (buffer);
+ if (meta) {
+ meta->level = level;
+ } else {
+ /* Assume audio does not contain voice, it can be detected by another
+ * downstream element. */
+ gst_buffer_add_audio_level_meta (buffer, level, FALSE);
+ }
+}
+
+static GstFlowReturn
+gst_level_transform_ip (GstBaseTransform * trans, GstBuffer * in)
+{
+ GstLevel *filter;
+ GstMapInfo map;
+ guint8 *in_data;
+ gsize in_size;
+ gdouble CS;
+ guint i;
+ guint num_frames;
+ guint num_int_samples = 0; /* number of interleaved samples
+ * ie. total count for all channels combined */
+ guint block_size, block_int_size; /* we subdivide buffers to not skip message
+ * intervals */
+ GstClockTimeDiff falloff_time;
+ gint channels, rate, bps;
+ gdouble CS_tot = 0; /* Total Cumulative Square on all samples */
+
+ filter = GST_LEVEL (trans);
+
+ channels = GST_AUDIO_INFO_CHANNELS (&filter->info);
+ bps = GST_AUDIO_INFO_BPS (&filter->info);
+ rate = GST_AUDIO_INFO_RATE (&filter->info);
+
+ gst_buffer_map (in, &map, GST_MAP_READ);
+ in_data = map.data;
+ in_size = map.size;
+
+ num_int_samples = in_size / bps;
+
+ GST_LOG_OBJECT (filter, "analyzing %u sample frames at ts %" GST_TIME_FORMAT,
+ num_int_samples, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (in)));
+
+ g_return_val_if_fail (num_int_samples % channels == 0, GST_FLOW_ERROR);
+
+ GST_OBJECT_LOCK (filter);
+
+ if (GST_BUFFER_FLAG_IS_SET (in, GST_BUFFER_FLAG_DISCONT)) {
+ filter->message_ts = GST_BUFFER_TIMESTAMP (in);
+ filter->num_frames = 0;
+ }
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (filter->message_ts))) {
+ filter->message_ts = GST_BUFFER_TIMESTAMP (in);
+ }
+
+ num_frames = num_int_samples / channels;
+ while (num_frames > 0) {
+ block_size = filter->interval_frames - filter->num_frames;
+ block_size = MIN (block_size, num_frames);
+ block_int_size = block_size * channels;
+
+ for (i = 0; i < channels; ++i) {
+ if (!GST_BUFFER_FLAG_IS_SET (in, GST_BUFFER_FLAG_GAP)) {
+ filter->process (in_data + (bps * i), block_int_size, channels, &CS,
+ &filter->peak[i]);
+ CS_tot += CS;
+ GST_LOG_OBJECT (filter,
+ "[%d]: cumulative squares %lf, over %d samples/%d channels",
+ i, CS, block_int_size, channels);
+ filter->CS[i] += CS;
+ } else {
+ filter->peak[i] = 0.0;
+ }
+
+ filter->decay_peak_age[i] += GST_FRAMES_TO_CLOCK_TIME (num_frames, rate);
+ GST_LOG_OBJECT (filter,
+ "[%d]: peak %f, last peak %f, decay peak %f, age %" GST_TIME_FORMAT,
+ i, filter->peak[i], filter->last_peak[i], filter->decay_peak[i],
+ GST_TIME_ARGS (filter->decay_peak_age[i]));
+
+ /* update running peak */
+ if (filter->peak[i] > filter->last_peak[i])
+ filter->last_peak[i] = filter->peak[i];
+
+ /* make decay peak fall off if too old */
+ falloff_time =
+ GST_CLOCK_DIFF (gst_gdouble_to_guint64 (filter->decay_peak_ttl),
+ filter->decay_peak_age[i]);
+ if (falloff_time > 0) {
+ gdouble falloff_dB;
+ gdouble falloff;
+ gdouble length; /* length of falloff time in seconds */
+
+ length = (gdouble) falloff_time / (gdouble) GST_SECOND;
+ falloff_dB = filter->decay_peak_falloff * length;
+ falloff = pow (10, falloff_dB / -20.0);
+
+ GST_LOG_OBJECT (filter,
+ "falloff: current %f, base %f, interval %" GST_TIME_FORMAT
+ ", dB falloff %f, factor %e",
+ filter->decay_peak[i], filter->decay_peak_base[i],
+ GST_TIME_ARGS (falloff_time), falloff_dB, falloff);
+ filter->decay_peak[i] = filter->decay_peak_base[i] * falloff;
+ GST_LOG_OBJECT (filter,
+ "peak is %" GST_TIME_FORMAT " old, decayed with factor %e to %f",
+ GST_TIME_ARGS (filter->decay_peak_age[i]), falloff,
+ filter->decay_peak[i]);
+ } else {
+ GST_LOG_OBJECT (filter, "peak not old enough, not decaying");
+ }
+
+ /* if the peak of this run is higher, the decay peak gets reset */
+ if (filter->peak[i] >= filter->decay_peak[i]) {
+ GST_LOG_OBJECT (filter, "new peak, %f", filter->peak[i]);
+ filter->decay_peak[i] = filter->peak[i];
+ filter->decay_peak_base[i] = filter->peak[i];
+ filter->decay_peak_age[i] = G_GINT64_CONSTANT (0);
+ }
+ }
+ in_data += block_size * bps * channels;
+
+ filter->num_frames += block_size;
+ num_frames -= block_size;
+
+ /* do we need to message ? */
+ if (filter->num_frames >= filter->interval_frames) {
+ gst_level_post_message (filter);
+ }
+ }
+
+ gst_buffer_unmap (in, &map);
+
+ if (filter->audio_level_meta) {
+ gdouble RMS = sqrt (CS_tot / num_int_samples);
+ gdouble RMSdB = 20 * log10 (RMS + EPSILON);
+
+ gst_level_rtp_audio_level_meta (filter, in, -RMSdB);
+ }
+
+ GST_OBJECT_UNLOCK (filter);
+ return GST_FLOW_OK;
+}
+
+/* called with object lock */
+static void
+gst_level_post_message (GstLevel * filter)
+{
+ guint i;
+ gint channels, rate, frames = filter->num_frames;
+ GstClockTime duration;
+
+ channels = GST_AUDIO_INFO_CHANNELS (&filter->info);
+ rate = GST_AUDIO_INFO_RATE (&filter->info);
+ duration = GST_FRAMES_TO_CLOCK_TIME (frames, rate);
+
+ if (filter->post_messages) {
+ GstMessage *m =
+ gst_level_message_new (filter, filter->message_ts, duration);
+
+ GST_LOG_OBJECT (filter,
+ "message: ts %" GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT
+ ", num_frames %d", GST_TIME_ARGS (filter->message_ts),
+ GST_TIME_ARGS (duration), frames);
+
+ for (i = 0; i < channels; ++i) {
+ gdouble RMS;
+ gdouble RMSdB, peakdB, decaydB;
+
+ RMS = sqrt (filter->CS[i] / frames);
+ GST_LOG_OBJECT (filter,
+ "message: channel %d, CS %f, RMS %f", i, filter->CS[i], RMS);
+ GST_LOG_OBJECT (filter,
+ "message: last_peak: %f, decay_peak: %f",
+ filter->last_peak[i], filter->decay_peak[i]);
+ /* RMS values are calculated in amplitude, so 20 * log 10 */
+ RMSdB = 20 * log10 (RMS + EPSILON);
+ /* peak values are square sums, ie. power, so 10 * log 10 */
+ peakdB = 10 * log10 (filter->last_peak[i] + EPSILON);
+ decaydB = 10 * log10 (filter->decay_peak[i] + EPSILON);
+
+ if (filter->decay_peak[i] < filter->last_peak[i]) {
+ /* this can happen in certain cases, for example when
+ * the last peak is between decay_peak and decay_peak_base */
+ GST_DEBUG_OBJECT (filter,
+ "message: decay peak dB %f smaller than last peak dB %f, copying",
+ decaydB, peakdB);
+ filter->decay_peak[i] = filter->last_peak[i];
+ }
+ GST_LOG_OBJECT (filter,
+ "message: RMS %f dB, peak %f dB, decay %f dB",
+ RMSdB, peakdB, decaydB);
+
+ gst_level_message_append_channel (m, RMSdB, peakdB, decaydB);
+
+ /* reset cumulative and normal peak */
+ filter->CS[i] = 0.0;
+ filter->last_peak[i] = 0.0;
+ }
+
+ GST_OBJECT_UNLOCK (filter);
+ gst_element_post_message (GST_ELEMENT (filter), m);
+ GST_OBJECT_LOCK (filter);
+
+ }
+ filter->num_frames -= frames;
+ filter->message_ts += duration;
+}
+
+
+static gboolean
+gst_level_sink_event (GstBaseTransform * trans, GstEvent * event)
+{
+ if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
+ GstLevel *filter = GST_LEVEL (trans);
+
+ GST_OBJECT_LOCK (filter);
+ gst_level_post_message (filter);
+ GST_OBJECT_UNLOCK (filter);
+ }
+
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, event);
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ return GST_ELEMENT_REGISTER (level, plugin);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ level,
+ "Audio level plugin",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
diff --git a/gst/level/gstlevel.h b/gst/level/gstlevel.h
new file mode 100644
index 0000000000..d205bfa6c2
--- /dev/null
+++ b/gst/level/gstlevel.h
@@ -0,0 +1,94 @@
+/* GStreamer
+ * Copyright (C) 1999 Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) 2000,2001,2002,2003,2005
+ * Thomas Vander Stichele <thomas at apestaart dot org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_LEVEL_H__
+#define __GST_LEVEL_H__
+
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
+
+G_BEGIN_DECLS
+
+
+#define GST_TYPE_LEVEL \
+ (gst_level_get_type())
+#define GST_LEVEL(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_LEVEL,GstLevel))
+#define GST_LEVEL_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_LEVEL,GstLevelClass))
+#define GST_LEVEL_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_LEVEL,GstLevelClass))
+#define GST_IS_LEVEL(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_LEVEL))
+#define GST_IS_LEVEL_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_LEVEL))
+
+
+typedef struct _GstLevel GstLevel;
+typedef struct _GstLevelClass GstLevelClass;
+
+/**
+ * GstLevel:
+ *
+ * Opaque data structure.
+ */
+struct _GstLevel {
+ GstBaseTransform element;
+
+ /* properties, protected by object lock */
+ gboolean post_messages; /* whether or not to post messages */
+ guint64 interval; /* how many nanoseconds between emits */
+ gdouble decay_peak_ttl; /* time to live for peak in nanoseconds */
+ gdouble decay_peak_falloff; /* falloff in dB/sec */
+ gboolean audio_level_meta; /* whether or not generate GstAudioLevelMeta */
+
+ GstAudioInfo info;
+ gint num_frames; /* frame count (1 sample per channel)
+ * since last emit */
+ gint interval_frames; /* after how many frame to sent a message */
+ GstClockTime message_ts; /* starttime for next message */
+
+ /* per-channel arrays for intermediate values */
+ gdouble *CS; /* normalized Cumulative Square */
+ gdouble *peak; /* normalized Peak value over buffer */
+ gdouble *last_peak; /* last normalized Peak value over interval */
+ gdouble *decay_peak; /* running decaying normalized Peak */
+ gdouble *decay_peak_base; /* value of last peak we are decaying from */
+ GstClockTime *decay_peak_age; /* age of last peak */
+
+ void (*process)(gpointer, guint, guint, gdouble*, gdouble*);
+};
+
+struct _GstLevelClass {
+ GstBaseTransformClass parent_class;
+};
+
+GType gst_level_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (level);
+
+G_END_DECLS
+
+
+#endif /* __GST_LEVEL_H__ */
diff --git a/gst/level/meson.build b/gst/level/meson.build
new file mode 100644
index 0000000000..949dafc14f
--- /dev/null
+++ b/gst/level/meson.build
@@ -0,0 +1,10 @@
+gstlevel = library('gstlevel',
+ 'gstlevel.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gstbase_dep, gstaudio_dep, libm],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstlevel, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstlevel]
diff --git a/gst/matroska/ebml-ids.h b/gst/matroska/ebml-ids.h
new file mode 100644
index 0000000000..ef1f5f3e9e
--- /dev/null
+++ b/gst/matroska/ebml-ids.h
@@ -0,0 +1,56 @@
+/* GStreamer EBML I/O
+ * (c) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ *
+ * ebml-ids.h: definition of EBML data IDs
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_EBML_IDS_H__
+#define __GST_EBML_IDS_H__
+
+#include <glib.h>
+
+G_BEGIN_DECLS
+
+/* EBML version supported */
+#define GST_EBML_VERSION 1
+
+/* Unknown size (all bits set to 1) */
+#define GST_EBML_SIZE_UNKNOWN G_GINT64_CONSTANT(0x00ffffffffffffff)
+
+/* top-level master-IDs */
+#define GST_EBML_ID_HEADER 0x1A45DFA3
+
+/* IDs in the HEADER master */
+#define GST_EBML_ID_EBMLVERSION 0x4286
+#define GST_EBML_ID_EBMLREADVERSION 0x42F7
+#define GST_EBML_ID_EBMLMAXIDLENGTH 0x42F2
+#define GST_EBML_ID_EBMLMAXSIZELENGTH 0x42F3
+#define GST_EBML_ID_DOCTYPE 0x4282
+#define GST_EBML_ID_DOCTYPEVERSION 0x4287
+#define GST_EBML_ID_DOCTYPEREADVERSION 0x4285
+
+/* general EBML types */
+#define GST_EBML_ID_VOID 0xEC
+#define GST_EBML_ID_CRC32 0xBF
+
+/* EbmlDate offset from the unix epoch in nanoseconds, 2001/01/01 00:00:00 UTC */
+#define GST_EBML_DATE_OFFSET G_GINT64_CONSTANT (978307200000000000)
+
+G_END_DECLS
+
+#endif /* __GST_EBML_IDS_H__ */
diff --git a/gst/matroska/ebml-read.c b/gst/matroska/ebml-read.c
new file mode 100644
index 0000000000..f66990b35e
--- /dev/null
+++ b/gst/matroska/ebml-read.c
@@ -0,0 +1,682 @@
+/* GStreamer EBML I/O
+ * (c) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ *
+ * ebml-read.c: read EBML data from file/stream
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+
+#include "ebml-read.h"
+#include "ebml-ids.h"
+
+#include <gst/math-compat.h>
+
+GST_DEBUG_CATEGORY (ebmlread_debug);
+#define GST_CAT_DEFAULT ebmlread_debug
+
+/* Peeks following element id and element length in datastream provided
+ * by @peek with @ctx as user data.
+ * Returns GST_FLOW_EOS if not enough data to read id and length.
+ * Otherwise, @needed provides the prefix length (id + length), and
+ * @length provides element length.
+ *
+ * @object and @offset are provided for informative messaging/debug purposes.
+ */
+GstFlowReturn
+gst_ebml_peek_id_length (guint32 * _id, guint64 * _length, guint * _needed,
+ GstPeekData peek, gpointer * ctx, GstElement * el, guint64 offset)
+{
+ guint needed;
+ const guint8 *buf;
+ gint len_mask = 0x80, read = 1, n = 1, num_ffs = 0;
+ guint64 total;
+ guint8 b;
+ GstFlowReturn ret;
+
+ g_return_val_if_fail (_id != NULL, GST_FLOW_ERROR);
+ g_return_val_if_fail (_length != NULL, GST_FLOW_ERROR);
+ g_return_val_if_fail (_needed != NULL, GST_FLOW_ERROR);
+
+ /* well ... */
+ *_id = (guint32) GST_EBML_SIZE_UNKNOWN;
+ *_length = GST_EBML_SIZE_UNKNOWN;
+
+ /* read element id */
+ needed = 2;
+ ret = peek (ctx, needed, &buf);
+ if (ret != GST_FLOW_OK)
+ goto peek_error;
+ b = GST_READ_UINT8 (buf);
+ total = (guint64) b;
+ while (read <= 4 && !(total & len_mask)) {
+ read++;
+ len_mask >>= 1;
+ }
+ if (G_UNLIKELY (read > 4))
+ goto invalid_id;
+
+ /* need id and at least something for subsequent length */
+ needed = read + 1;
+ ret = peek (ctx, needed, &buf);
+ if (ret != GST_FLOW_OK)
+ goto peek_error;
+ while (n < read) {
+ b = GST_READ_UINT8 (buf + n);
+ total = (total << 8) | b;
+ ++n;
+ }
+ *_id = (guint32) total;
+
+ /* read element length */
+ b = GST_READ_UINT8 (buf + n);
+ total = (guint64) b;
+ len_mask = 0x80;
+ read = 1;
+ while (read <= 8 && !(total & len_mask)) {
+ read++;
+ len_mask >>= 1;
+ }
+ if (G_UNLIKELY (read > 8))
+ goto invalid_length;
+ if ((total &= (len_mask - 1)) == len_mask - 1)
+ num_ffs++;
+
+ needed += read - 1;
+ ret = peek (ctx, needed, &buf);
+ if (ret != GST_FLOW_OK)
+ goto peek_error;
+ buf += (needed - read);
+ n = 1;
+ while (n < read) {
+ guint8 b = GST_READ_UINT8 (buf + n);
+
+ if (G_UNLIKELY (b == 0xff))
+ num_ffs++;
+ total = (total << 8) | b;
+ ++n;
+ }
+
+ if (G_UNLIKELY (read == num_ffs))
+ *_length = G_MAXUINT64;
+ else
+ *_length = total;
+
+ *_needed = needed;
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+peek_error:
+ {
+ if (ret != GST_FLOW_FLUSHING && ret != GST_FLOW_EOS)
+ GST_WARNING_OBJECT (el, "peek failed, ret = %s", gst_flow_get_name (ret));
+ else
+ GST_DEBUG_OBJECT (el, "peek failed, ret = %s", gst_flow_get_name (ret));
+ *_needed = needed;
+ return ret;
+ }
+invalid_id:
+ {
+ GST_ERROR_OBJECT (el,
+ "Invalid EBML ID size tag (0x%x) at position %" G_GUINT64_FORMAT " (0x%"
+ G_GINT64_MODIFIER "x)", (guint) b, offset, offset);
+ return GST_FLOW_ERROR;
+ }
+invalid_length:
+ {
+ GST_ERROR_OBJECT (el,
+ "Invalid EBML length size tag (0x%x) at position %" G_GUINT64_FORMAT
+ " (0x%" G_GINT64_MODIFIER "x)", (guint) b, offset, offset);
+ return GST_FLOW_ERROR;
+ }
+}
+
+/* setup for parsing @buf at position @offset on behalf of @el.
+ * Takes ownership of @buf. */
+void
+gst_ebml_read_init (GstEbmlRead * ebml, GstElement * el, GstBuffer * buf,
+ guint64 offset)
+{
+ GstEbmlMaster m;
+
+ g_return_if_fail (el);
+ g_return_if_fail (buf);
+
+ ebml->el = el;
+ ebml->offset = offset;
+ ebml->buf = buf;
+ gst_buffer_map (buf, &ebml->map, GST_MAP_READ);
+ ebml->readers = g_array_sized_new (FALSE, FALSE, sizeof (GstEbmlMaster), 10);
+ m.offset = ebml->offset;
+ gst_byte_reader_init (&m.br, ebml->map.data, ebml->map.size);
+ g_array_append_val (ebml->readers, m);
+}
+
+void
+gst_ebml_read_clear (GstEbmlRead * ebml)
+{
+ if (ebml->readers)
+ g_array_free (ebml->readers, TRUE);
+ ebml->readers = NULL;
+ if (ebml->buf) {
+ gst_buffer_unmap (ebml->buf, &ebml->map);
+ gst_buffer_unref (ebml->buf);
+ }
+ ebml->buf = NULL;
+ ebml->el = NULL;
+}
+
+static GstFlowReturn
+gst_ebml_read_peek (GstByteReader * br, guint peek, const guint8 ** data)
+{
+ if (G_LIKELY (gst_byte_reader_peek_data (br, peek, data)))
+ return GST_FLOW_OK;
+ else
+ return GST_FLOW_EOS;
+}
+
+static GstFlowReturn
+gst_ebml_peek_id_full (GstEbmlRead * ebml, guint32 * id, guint64 * length,
+ guint * prefix)
+{
+ GstFlowReturn ret;
+
+ ret = gst_ebml_peek_id_length (id, length, prefix,
+ (GstPeekData) gst_ebml_read_peek, (gpointer) gst_ebml_read_br (ebml),
+ ebml->el, gst_ebml_read_get_pos (ebml));
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ GST_LOG_OBJECT (ebml->el, "id 0x%x at offset 0x%" G_GINT64_MODIFIER "x"
+ " of length %" G_GUINT64_FORMAT ", prefix %d", *id,
+ gst_ebml_read_get_pos (ebml), *length, *prefix);
+
+#ifndef GST_DISABLE_GST_DEBUG
+ if (ebmlread_debug->threshold >= GST_LEVEL_LOG) {
+ const guint8 *data = NULL;
+ GstByteReader *br = gst_ebml_read_br (ebml);
+ guint size = gst_byte_reader_get_remaining (br);
+
+ if (gst_byte_reader_peek_data (br, size, &data)) {
+
+ GST_LOG_OBJECT (ebml->el, "current br %p; remaining %d", br, size);
+ if (data)
+ GST_MEMDUMP_OBJECT (ebml->el, "element", data, MIN (size, *length));
+ }
+ }
+#endif
+
+ return ret;
+}
+
+GstFlowReturn
+gst_ebml_peek_id (GstEbmlRead * ebml, guint32 * id)
+{
+ guint64 length;
+ guint needed;
+
+ return gst_ebml_peek_id_full (ebml, id, &length, &needed);
+}
+
+/*
+ * Read the next element, the contents are supposed to be sub-elements which
+ * can be read separately. A new bytereader is setup for doing so.
+ */
+GstFlowReturn
+gst_ebml_read_master (GstEbmlRead * ebml, guint32 * id)
+{
+ guint64 length;
+ guint prefix;
+ const guint8 *data = NULL;
+ GstFlowReturn ret;
+ GstEbmlMaster m;
+
+ ret = gst_ebml_peek_id_full (ebml, id, &length, &prefix);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* we just at least peeked the id */
+ if (!gst_byte_reader_skip (gst_ebml_read_br (ebml), prefix))
+ return GST_FLOW_ERROR; /* FIXME: do proper error handling */
+
+ m.offset = gst_ebml_read_get_pos (ebml);
+ if (!gst_byte_reader_get_data (gst_ebml_read_br (ebml), length, &data))
+ return GST_FLOW_PARSE;
+
+ GST_LOG_OBJECT (ebml->el, "pushing level %d at offset %" G_GUINT64_FORMAT,
+ ebml->readers->len, m.offset);
+ gst_byte_reader_init (&m.br, data, length);
+ g_array_append_val (ebml->readers, m);
+
+ return GST_FLOW_OK;
+}
+
+/* explicitly pop a bytereader from stack. Usually invoked automagically. */
+GstFlowReturn
+gst_ebml_read_pop_master (GstEbmlRead * ebml)
+{
+ g_return_val_if_fail (ebml->readers, GST_FLOW_ERROR);
+
+ /* never remove initial bytereader */
+ if (ebml->readers->len > 1) {
+ GST_LOG_OBJECT (ebml->el, "popping level %d", ebml->readers->len - 1);
+ g_array_remove_index (ebml->readers, ebml->readers->len - 1);
+ }
+
+ return GST_FLOW_OK;
+}
+
+/*
+ * Skip the next element.
+ */
+
+GstFlowReturn
+gst_ebml_read_skip (GstEbmlRead * ebml)
+{
+ guint64 length;
+ guint32 id;
+ guint prefix;
+ GstFlowReturn ret;
+
+ ret = gst_ebml_peek_id_full (ebml, &id, &length, &prefix);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ if (!gst_byte_reader_skip (gst_ebml_read_br (ebml), length + prefix))
+ return GST_FLOW_PARSE;
+
+ return ret;
+}
+
+/*
+ * Read the next element as a GstBuffer (binary).
+ */
+
+GstFlowReturn
+gst_ebml_read_buffer (GstEbmlRead * ebml, guint32 * id, GstBuffer ** buf)
+{
+ guint64 length;
+ guint prefix;
+ GstFlowReturn ret;
+
+ ret = gst_ebml_peek_id_full (ebml, id, &length, &prefix);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* we just at least peeked the id */
+ if (!gst_byte_reader_skip (gst_ebml_read_br (ebml), prefix))
+ return GST_FLOW_ERROR; /* FIXME: do proper error handling */
+
+ if (G_LIKELY (length > 0)) {
+ guint offset;
+
+ offset = gst_ebml_read_get_pos (ebml) - ebml->offset;
+ if (G_LIKELY (gst_byte_reader_skip (gst_ebml_read_br (ebml), length))) {
+ *buf = gst_buffer_copy_region (ebml->buf, GST_BUFFER_COPY_ALL,
+ offset, length);
+ } else {
+ *buf = NULL;
+ return GST_FLOW_PARSE;
+ }
+ } else {
+ *buf = gst_buffer_new ();
+ }
+
+ return ret;
+}
+
+/*
+ * Read the next element, return a pointer to it and its size.
+ */
+
+static GstFlowReturn
+gst_ebml_read_bytes (GstEbmlRead * ebml, guint32 * id, const guint8 ** data,
+ guint * size)
+{
+ guint64 length;
+ guint prefix;
+ GstFlowReturn ret;
+
+ *size = 0;
+
+ ret = gst_ebml_peek_id_full (ebml, id, &length, &prefix);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* we just at least peeked the id */
+ if (!gst_byte_reader_skip (gst_ebml_read_br (ebml), prefix))
+ return GST_FLOW_ERROR; /* FIXME: do proper error handling */
+
+ /* This shouldn't happen here with the elements read through this function */
+ if (length == GST_EBML_SIZE_UNKNOWN || length == G_MAXUINT64) {
+ GST_ERROR_OBJECT (ebml->el, "element 0x%x has undefined length!", *id);
+ return GST_FLOW_ERROR;
+ }
+
+ /* Sanity check since we're downcasting a 64-bit len to possibly 32-bit here */
+ if (length >= G_MAXUINT) {
+ GST_ERROR_OBJECT (ebml->el, "element 0x%x too large, "
+ "size %" G_GUINT64_FORMAT, *id, length);
+ return GST_FLOW_ERROR;
+ }
+
+ *data = NULL;
+ if (G_LIKELY (length > 0)) {
+ if (!gst_byte_reader_get_data (gst_ebml_read_br (ebml), length, data))
+ return GST_FLOW_PARSE;
+ }
+
+ *size = length;
+
+ return ret;
+}
+
+/*
+ * Read the next element as an unsigned int.
+ */
+
+GstFlowReturn
+gst_ebml_read_uint (GstEbmlRead * ebml, guint32 * id, guint64 * num)
+{
+ const guint8 *data;
+ guint size;
+ GstFlowReturn ret;
+
+ ret = gst_ebml_read_bytes (ebml, id, &data, &size);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ if (size > 8) {
+ GST_ERROR_OBJECT (ebml->el,
+ "Invalid integer element size %d at position %" G_GUINT64_FORMAT " (0x%"
+ G_GINT64_MODIFIER "x)", size, gst_ebml_read_get_pos (ebml) - size,
+ gst_ebml_read_get_pos (ebml) - size);
+ return GST_FLOW_ERROR;
+ }
+
+ if (size == 0) {
+ *num = 0;
+ return ret;
+ }
+
+ *num = 0;
+ while (size > 0) {
+ *num = (*num << 8) | *data;
+ size--;
+ data++;
+ }
+
+ return ret;
+}
+
+/*
+ * Read the next element as a signed int.
+ */
+
+GstFlowReturn
+gst_ebml_read_sint (GstEbmlRead * ebml, guint32 * id, gint64 * num)
+{
+ const guint8 *data;
+ guint size;
+ gboolean negative = 0;
+ GstFlowReturn ret;
+
+ ret = gst_ebml_read_bytes (ebml, id, &data, &size);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ if (size > 8) {
+ GST_ERROR_OBJECT (ebml->el,
+ "Invalid integer element size %d at position %" G_GUINT64_FORMAT " (0x%"
+ G_GINT64_MODIFIER "x)", size, gst_ebml_read_get_pos (ebml) - size,
+ gst_ebml_read_get_pos (ebml) - size);
+ return GST_FLOW_ERROR;
+ }
+
+ if (size == 0) {
+ *num = 0;
+ return ret;
+ }
+
+ *num = 0;
+ if (*data & 0x80) {
+ negative = 1;
+ *num = *data & ~0x80;
+ size--;
+ data++;
+ }
+
+ while (size > 0) {
+ *num = (*num << 8) | *data;
+ size--;
+ data++;
+ }
+
+ /* make signed */
+ if (negative) {
+ *num = 0 - *num;
+ }
+
+ return ret;
+}
+
+/* Convert 80 bit extended precision float in big endian format to double.
+ * Code taken from libavutil/intfloat_readwrite.c from ffmpeg,
+ * licensed under LGPL */
+
+struct _ext_float
+{
+ guint8 exponent[2];
+ guint8 mantissa[8];
+};
+
+static gdouble
+_ext2dbl (const guint8 * data)
+{
+ struct _ext_float ext;
+ guint64 m = 0;
+ gint e, i;
+
+ memcpy (&ext.exponent, data, 2);
+ memcpy (&ext.mantissa, data + 2, 8);
+
+ for (i = 0; i < 8; i++)
+ m = (m << 8) + ext.mantissa[i];
+ e = (((gint) ext.exponent[0] & 0x7f) << 8) | ext.exponent[1];
+ if (e == 0x7fff && m)
+ return NAN;
+ e -= 16383 + 63; /* In IEEE 80 bits, the whole (i.e. 1.xxxx)
+ * mantissa bit is written as opposed to the
+ * single and double precision formats */
+ if (ext.exponent[0] & 0x80)
+ m = -m;
+ return ldexp (m, e);
+}
+
+/*
+ * Read the next element as a float.
+ */
+
+GstFlowReturn
+gst_ebml_read_float (GstEbmlRead * ebml, guint32 * id, gdouble * num)
+{
+ const guint8 *data;
+ guint size;
+ GstFlowReturn ret;
+
+ ret = gst_ebml_read_bytes (ebml, id, &data, &size);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ if (size != 0 && size != 4 && size != 8 && size != 10) {
+ GST_ERROR_OBJECT (ebml->el,
+ "Invalid float element size %d at position %" G_GUINT64_FORMAT " (0x%"
+ G_GINT64_MODIFIER "x)", size, gst_ebml_read_get_pos (ebml) - size,
+ gst_ebml_read_get_pos (ebml) - size);
+ return GST_FLOW_ERROR;
+ }
+
+ if (size == 4) {
+ gfloat f;
+
+ memcpy (&f, data, 4);
+ f = GFLOAT_FROM_BE (f);
+
+ *num = f;
+ } else if (size == 8) {
+ gdouble d;
+
+ memcpy (&d, data, 8);
+ d = GDOUBLE_FROM_BE (d);
+
+ *num = d;
+ } else if (size == 10) {
+ *num = _ext2dbl (data);
+ } else {
+ /* size == 0 means a value of 0.0 */
+ *num = 0.0;
+ }
+
+ return ret;
+}
+
+/*
+ * Read the next element as a C string.
+ */
+
+static GstFlowReturn
+gst_ebml_read_string (GstEbmlRead * ebml, guint32 * id, gchar ** str)
+{
+ const guint8 *data;
+ guint size;
+ GstFlowReturn ret;
+
+ ret = gst_ebml_read_bytes (ebml, id, &data, &size);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ *str = g_malloc (size + 1);
+ memcpy (*str, data, size);
+ (*str)[size] = '\0';
+
+ return ret;
+}
+
+/*
+ * Read the next element as an ASCII string.
+ */
+
+GstFlowReturn
+gst_ebml_read_ascii (GstEbmlRead * ebml, guint32 * id, gchar ** str_out)
+{
+ GstFlowReturn ret;
+ gchar *str;
+ gchar *iter;
+
+#ifndef GST_DISABLE_GST_DEBUG
+ guint64 oldoff = ebml->offset;
+#endif
+
+ ret = gst_ebml_read_string (ebml, id, &str);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ for (iter = str; *iter != '\0'; iter++) {
+ if (G_UNLIKELY (*iter & 0x80)) {
+ GST_ERROR_OBJECT (ebml,
+ "Invalid ASCII string at offset %" G_GUINT64_FORMAT, oldoff);
+ g_free (str);
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ *str_out = str;
+ return ret;
+}
+
+/*
+ * Read the next element as a UTF-8 string.
+ */
+
+GstFlowReturn
+gst_ebml_read_utf8 (GstEbmlRead * ebml, guint32 * id, gchar ** str)
+{
+ GstFlowReturn ret;
+
+#ifndef GST_DISABLE_GST_DEBUG
+ guint64 oldoff = gst_ebml_read_get_pos (ebml);
+#endif
+
+ ret = gst_ebml_read_string (ebml, id, str);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ if (str != NULL && *str != NULL && **str != '\0' &&
+ !g_utf8_validate (*str, -1, NULL)) {
+ GST_WARNING_OBJECT (ebml->el,
+ "Invalid UTF-8 string at offset %" G_GUINT64_FORMAT, oldoff);
+ }
+
+ return ret;
+}
+
+/*
+ * Read the next element as a date.
+ * Returns the nanoseconds since the unix epoch.
+ */
+
+GstFlowReturn
+gst_ebml_read_date (GstEbmlRead * ebml, guint32 * id, gint64 * date)
+{
+ gint64 ebml_date;
+ GstFlowReturn ret;
+
+ ret = gst_ebml_read_sint (ebml, id, &ebml_date);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ *date = ebml_date + GST_EBML_DATE_OFFSET;
+
+ return ret;
+}
+
+/*
+ * Read the next element as binary data.
+ */
+
+GstFlowReturn
+gst_ebml_read_binary (GstEbmlRead * ebml,
+ guint32 * id, guint8 ** binary, guint64 * length)
+{
+ const guint8 *data;
+ guint size;
+ GstFlowReturn ret;
+
+ ret = gst_ebml_read_bytes (ebml, id, &data, &size);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ *length = size;
+ *binary = g_memdup2 (data, size);
+
+ return GST_FLOW_OK;
+}
diff --git a/gst/matroska/ebml-read.h b/gst/matroska/ebml-read.h
new file mode 100644
index 0000000000..ce894da7f7
--- /dev/null
+++ b/gst/matroska/ebml-read.h
@@ -0,0 +1,171 @@
+/* GStreamer EBML I/O
+ * (c) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ *
+ * ebml-read.c: read EBML data from file/stream
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_EBML_READ_H__
+#define __GST_EBML_READ_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbytereader.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_EBML_READ \
+ (gst_ebml_read_get_type ())
+#define GST_EBML_READ(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_EBML_READ, GstEbmlRead))
+#define GST_EBML_READ_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_EBML_READ, GstEbmlReadClass))
+#define GST_IS_EBML_READ(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_EBML_READ))
+#define GST_IS_EBML_READ_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_EBML_READ))
+#define GST_EBML_READ_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_EBML_READ, GstEbmlReadClass))
+
+GST_DEBUG_CATEGORY_EXTERN (ebmlread_debug);
+
+/* custom flow return code */
+#define GST_FLOW_PARSE GST_FLOW_CUSTOM_ERROR
+
+typedef struct _GstEbmlMaster {
+ guint64 offset;
+ GstByteReader br;
+} GstEbmlMaster;
+
+typedef struct _GstEbmlRead {
+ GstElement *el;
+
+ GstBuffer *buf;
+ guint64 offset;
+ GstMapInfo map;
+
+ GArray *readers;
+} GstEbmlRead;
+
+typedef GstFlowReturn (*GstPeekData) (gpointer * context, guint peek, const guint8 ** data);
+
+/* returns UNEXPECTED if not enough data */
+GstFlowReturn gst_ebml_peek_id_length (guint32 * _id, guint64 * _length,
+ guint * _needed,
+ GstPeekData peek, gpointer * ctx,
+ GstElement * el, guint64 offset);
+
+void gst_ebml_read_init (GstEbmlRead * ebml,
+ GstElement * el, GstBuffer * buf,
+ guint64 offset);
+
+void gst_ebml_read_clear (GstEbmlRead * ebml);
+
+GstFlowReturn gst_ebml_peek_id (GstEbmlRead * ebml, guint32 * id);
+
+/* return _PARSE if not enough data to read what is needed, _ERROR or _OK */
+GstFlowReturn gst_ebml_read_skip (GstEbmlRead *ebml);
+
+GstFlowReturn gst_ebml_read_buffer (GstEbmlRead *ebml,
+ guint32 *id,
+ GstBuffer **buf);
+
+GstFlowReturn gst_ebml_read_uint (GstEbmlRead *ebml,
+ guint32 *id,
+ guint64 *num);
+
+GstFlowReturn gst_ebml_read_sint (GstEbmlRead *ebml,
+ guint32 *id,
+ gint64 *num);
+
+GstFlowReturn gst_ebml_read_float (GstEbmlRead *ebml,
+ guint32 *id,
+ gdouble *num);
+
+GstFlowReturn gst_ebml_read_ascii (GstEbmlRead *ebml,
+ guint32 *id,
+ gchar **str);
+
+GstFlowReturn gst_ebml_read_utf8 (GstEbmlRead *ebml,
+ guint32 *id,
+ gchar **str);
+
+GstFlowReturn gst_ebml_read_date (GstEbmlRead *ebml,
+ guint32 *id,
+ gint64 *date);
+
+GstFlowReturn gst_ebml_read_master (GstEbmlRead *ebml,
+ guint32 *id);
+
+GstFlowReturn gst_ebml_read_pop_master (GstEbmlRead *ebml);
+
+GstFlowReturn gst_ebml_read_binary (GstEbmlRead *ebml,
+ guint32 *id,
+ guint8 **binary,
+ guint64 *length);
+
+GstFlowReturn gst_ebml_read_header (GstEbmlRead *read,
+ gchar **doctype,
+ guint *version);
+
+/* Returns current (absolute) position of Ebml parser,
+ * i.e. taking into account offset provided at init */
+static inline guint64
+gst_ebml_read_get_pos (GstEbmlRead * ebml)
+{
+ GstEbmlMaster *m;
+
+ g_return_val_if_fail (ebml->readers, 0);
+ g_return_val_if_fail (ebml->readers->len, 0);
+
+ m = &(g_array_index (ebml->readers, GstEbmlMaster, ebml->readers->len - 1));
+ return m->offset + gst_byte_reader_get_pos (&m->br);
+}
+
+/* Returns starting offset of Ebml parser */
+static inline guint64
+gst_ebml_read_get_offset (GstEbmlRead * ebml)
+{
+ return ebml->offset;
+}
+
+static inline GstByteReader *
+gst_ebml_read_br (GstEbmlRead * ebml)
+{
+ g_return_val_if_fail (ebml->readers, NULL);
+ g_return_val_if_fail (ebml->readers->len, NULL);
+
+ return &(g_array_index (ebml->readers,
+ GstEbmlMaster, ebml->readers->len - 1).br);
+}
+
+static inline gboolean
+gst_ebml_read_has_remaining (GstEbmlRead * ebml, guint64 bytes_needed,
+ gboolean auto_pop)
+{
+ gboolean res;
+
+ res = (gst_byte_reader_get_remaining (gst_ebml_read_br (ebml)) >= bytes_needed);
+ if (G_LIKELY (!res && auto_pop)) {
+ gst_ebml_read_pop_master (ebml);
+ }
+
+ return G_LIKELY (res);
+}
+
+G_END_DECLS
+
+#endif /* __GST_EBML_READ_H__ */
diff --git a/gst/matroska/ebml-write.c b/gst/matroska/ebml-write.c
new file mode 100644
index 0000000000..d676170e22
--- /dev/null
+++ b/gst/matroska/ebml-write.c
@@ -0,0 +1,941 @@
+/* GStreamer EBML I/O
+ * (c) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * (c) 2005 Michal Benes <michal.benes@xeris.cz>
+ *
+ * ebml-write.c: write EBML data to file/stream
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+
+#include "ebml-write.h"
+#include "ebml-ids.h"
+
+
+GST_DEBUG_CATEGORY_STATIC (gst_ebml_write_debug);
+#define GST_CAT_DEFAULT gst_ebml_write_debug
+
+#define _do_init \
+ GST_DEBUG_CATEGORY_INIT (gst_ebml_write_debug, "ebmlwrite", 0, "Write EBML structured data")
+#define parent_class gst_ebml_write_parent_class
+G_DEFINE_TYPE_WITH_CODE (GstEbmlWrite, gst_ebml_write, GST_TYPE_OBJECT,
+ _do_init);
+
+static void gst_ebml_write_finalize (GObject * object);
+
+static void
+gst_ebml_write_class_init (GstEbmlWriteClass * klass)
+{
+ GObjectClass *object = G_OBJECT_CLASS (klass);
+
+ object->finalize = gst_ebml_write_finalize;
+}
+
+static void
+gst_ebml_write_init (GstEbmlWrite * ebml)
+{
+ ebml->srcpad = NULL;
+ ebml->pos = 0;
+ ebml->last_pos = G_MAXUINT64; /* force segment event */
+
+ ebml->cache = NULL;
+ ebml->streamheader = NULL;
+ ebml->streamheader_pos = 0;
+ ebml->writing_streamheader = FALSE;
+ ebml->caps = NULL;
+}
+
+static void
+gst_ebml_write_finalize (GObject * object)
+{
+ GstEbmlWrite *ebml = GST_EBML_WRITE (object);
+
+ gst_object_unref (ebml->srcpad);
+
+ if (ebml->cache) {
+ gst_byte_writer_free (ebml->cache);
+ ebml->cache = NULL;
+ }
+
+ if (ebml->streamheader) {
+ gst_byte_writer_free (ebml->streamheader);
+ ebml->streamheader = NULL;
+ }
+
+ if (ebml->caps) {
+ gst_caps_unref (ebml->caps);
+ ebml->caps = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+
+/**
+ * gst_ebml_write_new:
+ * @srcpad: Source pad to which the output will be pushed.
+ *
+ * Creates a new #GstEbmlWrite.
+ *
+ * Returns: a new #GstEbmlWrite
+ */
+GstEbmlWrite *
+gst_ebml_write_new (GstPad * srcpad)
+{
+ GstEbmlWrite *ebml =
+ GST_EBML_WRITE (g_object_new (GST_TYPE_EBML_WRITE, NULL));
+
+ ebml->srcpad = gst_object_ref (srcpad);
+ ebml->timestamp = GST_CLOCK_TIME_NONE;
+
+ gst_ebml_write_reset (ebml);
+
+ return ebml;
+}
+
+
+/**
+ * gst_ebml_write_reset:
+ * @ebml: a #GstEbmlWrite.
+ *
+ * Reset internal state of #GstEbmlWrite.
+ */
+void
+gst_ebml_write_reset (GstEbmlWrite * ebml)
+{
+ ebml->pos = 0;
+ ebml->last_pos = G_MAXUINT64; /* force segment event */
+
+ if (ebml->cache) {
+ gst_byte_writer_free (ebml->cache);
+ ebml->cache = NULL;
+ }
+
+ if (ebml->caps) {
+ gst_caps_unref (ebml->caps);
+ ebml->caps = NULL;
+ }
+
+ ebml->last_write_result = GST_FLOW_OK;
+ ebml->timestamp = GST_CLOCK_TIME_NONE;
+}
+
+
+/**
+ * gst_ebml_last_write_result:
+ * @ebml: a #GstEbmlWrite.
+ *
+ * Returns: GST_FLOW_OK if there was not write error since the last call of
+ * gst_ebml_last_write_result or code of the error.
+ */
+GstFlowReturn
+gst_ebml_last_write_result (GstEbmlWrite * ebml)
+{
+ GstFlowReturn res = ebml->last_write_result;
+
+ ebml->last_write_result = GST_FLOW_OK;
+
+ return res;
+}
+
+
+void
+gst_ebml_start_streamheader (GstEbmlWrite * ebml)
+{
+ g_return_if_fail (ebml->streamheader == NULL);
+
+ GST_DEBUG ("Starting streamheader at %" G_GUINT64_FORMAT, ebml->pos);
+ ebml->streamheader = gst_byte_writer_new_with_size (1000, FALSE);
+ ebml->streamheader_pos = ebml->pos;
+ ebml->writing_streamheader = TRUE;
+}
+
+GstBuffer *
+gst_ebml_stop_streamheader (GstEbmlWrite * ebml)
+{
+ GstBuffer *buffer;
+
+ if (!ebml->streamheader)
+ return NULL;
+
+ buffer = gst_byte_writer_free_and_get_buffer (ebml->streamheader);
+ ebml->streamheader = NULL;
+ GST_DEBUG ("Streamheader was size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (buffer));
+
+ ebml->writing_streamheader = FALSE;
+ return buffer;
+}
+
+/**
+ * gst_ebml_write_set_cache:
+ * @ebml: a #GstEbmlWrite.
+ * @size: size of the cache.
+ * Create a cache.
+ *
+ * The idea is that you use this for writing a lot
+ * of small elements. This will just "queue" all of
+ * them and they'll be pushed to the next element all
+ * at once. This saves memory and time for buffer
+ * allocation and init, and it looks better.
+ */
+void
+gst_ebml_write_set_cache (GstEbmlWrite * ebml, guint size)
+{
+ g_return_if_fail (ebml->cache == NULL);
+
+ GST_DEBUG ("Starting cache at %" G_GUINT64_FORMAT, ebml->pos);
+ ebml->cache = gst_byte_writer_new_with_size (size, FALSE);
+ ebml->cache_pos = ebml->pos;
+}
+
+static gboolean
+gst_ebml_writer_send_segment_event (GstEbmlWrite * ebml, guint64 new_pos)
+{
+ GstSegment segment;
+ gboolean res;
+
+ GST_INFO ("seeking to %" G_GUINT64_FORMAT, new_pos);
+
+ gst_segment_init (&segment,
+ ebml->streamable ? GST_FORMAT_TIME : GST_FORMAT_BYTES);
+ segment.start = new_pos;
+ segment.stop = -1;
+ segment.position = 0;
+
+ res = gst_pad_push_event (ebml->srcpad, gst_event_new_segment (&segment));
+
+ if (!res)
+ GST_WARNING ("seek to %" G_GUINT64_FORMAT "failed", new_pos);
+
+ return res;
+}
+
+/**
+ * gst_ebml_write_flush_cache:
+ * @ebml: a #GstEbmlWrite.
+ * @timestamp: timestamp of the buffer.
+ *
+ * Flush the cache.
+ */
+void
+gst_ebml_write_flush_cache (GstEbmlWrite * ebml, gboolean is_keyframe,
+ GstClockTime timestamp)
+{
+ GstBuffer *buffer;
+
+ if (!ebml->cache)
+ return;
+
+ buffer = gst_byte_writer_free_and_get_buffer (ebml->cache);
+ ebml->cache = NULL;
+ GST_DEBUG ("Flushing cache of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (buffer));
+ GST_BUFFER_TIMESTAMP (buffer) = timestamp;
+ GST_BUFFER_OFFSET (buffer) = ebml->pos - gst_buffer_get_size (buffer);
+ GST_BUFFER_OFFSET_END (buffer) = ebml->pos;
+ if (ebml->last_write_result == GST_FLOW_OK) {
+ if (GST_BUFFER_OFFSET (buffer) != ebml->last_pos) {
+ gst_ebml_writer_send_segment_event (ebml, GST_BUFFER_OFFSET (buffer));
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
+ } else {
+ GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
+ }
+ if (ebml->writing_streamheader) {
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_HEADER);
+ } else {
+ GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_HEADER);
+ }
+ if (!is_keyframe) {
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+ }
+ ebml->last_pos = ebml->pos;
+ ebml->last_write_result = gst_pad_push (ebml->srcpad, buffer);
+ } else {
+ gst_buffer_unref (buffer);
+ }
+}
+
+
+/**
+ * gst_ebml_write_element_new:
+ * @ebml: a #GstEbmlWrite.
+ * @size: size of the requested buffer.
+ *
+ * Create a buffer for one element. If there is
+ * a cache, use that instead.
+ *
+ * Returns: A new #GstBuffer.
+ */
+static GstBuffer *
+gst_ebml_write_element_new (GstEbmlWrite * ebml, GstMapInfo * map, guint size)
+{
+ /* Create new buffer of size + ID + length */
+ GstBuffer *buf;
+
+ /* length, ID */
+ size += 12;
+
+ buf = gst_buffer_new_and_alloc (size);
+ GST_BUFFER_TIMESTAMP (buf) = ebml->timestamp;
+
+ /* FIXME unmap not possible */
+ gst_buffer_map (buf, map, GST_MAP_WRITE);
+
+ return buf;
+}
+
+
+/**
+ * gst_ebml_write_element_id:
+ * @data_inout: Pointer to data pointer
+ * @id: Element ID that should be written.
+ *
+ * Write element ID into a buffer.
+ */
+static void
+gst_ebml_write_element_id (guint8 ** data_inout, guint32 id)
+{
+ guint8 *data = *data_inout;
+ guint bytes = 4, mask = 0x10;
+
+ /* get ID length */
+ while (!(id & (mask << ((bytes - 1) * 8))) && bytes > 0) {
+ mask <<= 1;
+ bytes--;
+ }
+
+ /* if invalid ID, use dummy */
+ if (bytes == 0) {
+ GST_WARNING ("Invalid ID, voiding");
+ bytes = 1;
+ id = GST_EBML_ID_VOID;
+ }
+
+ /* write out, BE */
+ *data_inout += bytes;
+ while (bytes--) {
+ data[bytes] = id & 0xff;
+ id >>= 8;
+ }
+}
+
+
+/**
+ * gst_ebml_write_element_size:
+ * @data_inout: Pointer to data pointer
+ * @size: Element length.
+ *
+ * Write element length into a buffer.
+ */
+static void
+gst_ebml_write_element_size (guint8 ** data_inout, guint64 size)
+{
+ guint8 *data = *data_inout;
+ guint bytes = 1, mask = 0x80;
+
+ if (size != GST_EBML_SIZE_UNKNOWN) {
+ /* how many bytes? - use mask-1 because an all-1 bitset is not allowed */
+ while (bytes <= 8 && (size >> ((bytes - 1) * 8)) >= (mask - 1)) {
+ mask >>= 1;
+ bytes++;
+ }
+
+ /* if invalid size, use max. */
+ if (bytes > 8) {
+ GST_WARNING ("Invalid size, writing size unknown");
+ mask = 0x01;
+ bytes = 8;
+ /* Now here's a real FIXME: we cannot read those yet! */
+ size = GST_EBML_SIZE_UNKNOWN;
+ }
+ } else {
+ mask = 0x01;
+ bytes = 8;
+ }
+
+ /* write out, BE, with length size marker */
+ *data_inout += bytes;
+ while (bytes-- > 0) {
+ data[bytes] = size & 0xff;
+ size >>= 8;
+ if (!bytes)
+ *data |= mask;
+ }
+}
+
+
+/**
+ * gst_ebml_write_element_data:
+ * @data_inout: Pointer to data pointer
+ * @write: Data that should be written.
+ * @length: Length of the data.
+ *
+ * Write element data into a buffer.
+ */
+static void
+gst_ebml_write_element_data (guint8 ** data_inout, guint8 * write,
+ guint64 length)
+{
+ memcpy (*data_inout, write, length);
+ *data_inout += length;
+}
+
+
+/**
+ * gst_ebml_write_element_push:
+ * @ebml: #GstEbmlWrite
+ * @buf: #GstBuffer to be written.
+ * @buf_data: Start of data to push from @buf (or NULL for whole buffer).
+ * @buf_data_end: Data pointer positioned after the last byte in @buf_data (or
+ * NULL for whole buffer).
+ *
+ * Write out buffer by moving it to the next element.
+ */
+static void
+gst_ebml_write_element_push (GstEbmlWrite * ebml, GstBuffer * buf,
+ guint8 * buf_data, guint8 * buf_data_end)
+{
+ GstMapInfo map;
+ guint data_size;
+
+ map.data = NULL;
+
+ if (buf_data_end)
+ data_size = buf_data_end - buf_data;
+ else
+ data_size = gst_buffer_get_size (buf);
+
+ ebml->pos += data_size;
+
+ /* if there's no cache, then don't push it! */
+ if (ebml->writing_streamheader) {
+ if (!buf_data) {
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ buf_data = map.data;
+ }
+ if (!buf_data)
+ GST_WARNING ("Failed to map buffer");
+ else if (!gst_byte_writer_put_data (ebml->streamheader, buf_data,
+ data_size))
+ GST_WARNING ("Error writing data to streamheader");
+ }
+ if (ebml->cache) {
+ if (!buf_data) {
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ buf_data = map.data;
+ }
+ if (!buf_data)
+ GST_WARNING ("Failed to map buffer");
+ else if (!gst_byte_writer_put_data (ebml->cache, buf_data, data_size))
+ GST_WARNING ("Error writing data to cache");
+ if (map.data)
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ return;
+ }
+
+ if (buf_data && map.data)
+ gst_buffer_unmap (buf, &map);
+
+ if (ebml->last_write_result == GST_FLOW_OK) {
+ buf = gst_buffer_make_writable (buf);
+ GST_BUFFER_OFFSET (buf) = ebml->pos - data_size;
+ GST_BUFFER_OFFSET_END (buf) = ebml->pos;
+ if (ebml->writing_streamheader) {
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
+ } else {
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_HEADER);
+ }
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ if (GST_BUFFER_OFFSET (buf) != ebml->last_pos) {
+ gst_ebml_writer_send_segment_event (ebml, GST_BUFFER_OFFSET (buf));
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+ } else {
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
+ }
+ ebml->last_pos = ebml->pos;
+ ebml->last_write_result = gst_pad_push (ebml->srcpad, buf);
+ } else {
+ gst_buffer_unref (buf);
+ }
+}
+
+
+/**
+ * gst_ebml_write_seek:
+ * @ebml: #GstEbmlWrite
+ * @pos: Seek position.
+ *
+ * Seek.
+ */
+void
+gst_ebml_write_seek (GstEbmlWrite * ebml, guint64 pos)
+{
+ if (ebml->writing_streamheader) {
+ GST_DEBUG ("wanting to seek to pos %" G_GUINT64_FORMAT, pos);
+ if (pos >= ebml->streamheader_pos &&
+ pos <= ebml->streamheader_pos + ebml->streamheader->parent.size) {
+ gst_byte_writer_set_pos (ebml->streamheader,
+ pos - ebml->streamheader_pos);
+ GST_DEBUG ("seeked in streamheader to position %" G_GUINT64_FORMAT,
+ pos - ebml->streamheader_pos);
+ } else {
+ GST_WARNING
+ ("we are writing streamheader still and seek is out of bounds");
+ }
+ }
+ /* Cache seeking. A bit dangerous, we assume the client writer
+ * knows what he's doing... */
+ if (ebml->cache) {
+ /* within bounds? */
+ if (pos >= ebml->cache_pos &&
+ pos <= ebml->cache_pos + ebml->cache->parent.size) {
+ GST_DEBUG ("seeking in cache to %" G_GUINT64_FORMAT, pos);
+ ebml->pos = pos;
+ gst_byte_writer_set_pos (ebml->cache, ebml->pos - ebml->cache_pos);
+ return;
+ } else {
+ GST_LOG ("Seek outside cache range. Clearing...");
+ gst_ebml_write_flush_cache (ebml, FALSE, GST_CLOCK_TIME_NONE);
+ }
+ }
+
+ GST_INFO ("scheduling seek to %" G_GUINT64_FORMAT, pos);
+ ebml->pos = pos;
+}
+
+
+/**
+ * gst_ebml_write_get_uint_size:
+ * @num: Number to be encoded.
+ *
+ * Get number of bytes needed to write a uint.
+ *
+ * Returns: Encoded uint length.
+ */
+static guint
+gst_ebml_write_get_uint_size (guint64 num)
+{
+ guint size = 1;
+
+ /* get size */
+ while (size < 8 && num >= (G_GINT64_CONSTANT (1) << (size * 8))) {
+ size++;
+ }
+
+ return size;
+}
+
+
+/**
+ * gst_ebml_write_set_uint:
+ * @data_inout: Pointer to data pointer
+ * @num: Number to be written.
+ * @size: Encoded number length.
+ *
+ * Write an uint into a buffer.
+ */
+static void
+gst_ebml_write_set_uint (guint8 ** data_inout, guint64 num, guint size)
+{
+ guint8 *data = *data_inout;
+
+ *data_inout += size;
+
+ while (size-- > 0) {
+ data[size] = num & 0xff;
+ num >>= 8;
+ }
+}
+
+
+/**
+ * gst_ebml_write_uint:
+ * @ebml: #GstEbmlWrite
+ * @id: Element ID.
+ * @num: Number to be written.
+ *
+ * Write uint element.
+ */
+void
+gst_ebml_write_uint (GstEbmlWrite * ebml, guint32 id, guint64 num)
+{
+ GstBuffer *buf;
+ guint8 *data_start, *data_end;
+ guint size = gst_ebml_write_get_uint_size (num);
+ GstMapInfo map;
+
+ buf = gst_ebml_write_element_new (ebml, &map, sizeof (num));
+ data_end = data_start = map.data;
+
+ /* write */
+ gst_ebml_write_element_id (&data_end, id);
+ gst_ebml_write_element_size (&data_end, size);
+ gst_ebml_write_set_uint (&data_end, num, size);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_set_size (buf, (data_end - data_start));
+
+ gst_ebml_write_element_push (ebml, buf, data_start, data_end);
+}
+
+
+/**
+ * gst_ebml_write_sint:
+ * @ebml: #GstEbmlWrite
+ * @id: Element ID.
+ * @num: Number to be written.
+ *
+ * Write sint element.
+ */
+void
+gst_ebml_write_sint (GstEbmlWrite * ebml, guint32 id, gint64 num)
+{
+ GstBuffer *buf;
+ guint8 *data_start, *data_end;
+ GstMapInfo map;
+
+ /* if the signed number is on the edge of a extra-byte,
+ * then we'll fall over when detecting it. Example: if I
+ * have a number (-)0x8000 (G_MINSHORT), then my abs()<<1
+ * will be 0x10000; this is G_MAXUSHORT+1! So: if (<0) -1. */
+ guint64 unum = (num < 0 ? (-num - 1) << 1 : num << 1);
+ guint size = gst_ebml_write_get_uint_size (unum);
+
+ buf = gst_ebml_write_element_new (ebml, &map, sizeof (num));
+ data_end = data_start = map.data;
+
+ /* make unsigned */
+ if (num >= 0) {
+ unum = num;
+ } else {
+ unum = ((guint64) 0x80) << ((size - 1) * 8);
+ unum += num;
+ unum |= ((guint64) 0x80) << ((size - 1) * 8);
+ }
+
+ /* write */
+ gst_ebml_write_element_id (&data_end, id);
+ gst_ebml_write_element_size (&data_end, size);
+ gst_ebml_write_set_uint (&data_end, unum, size);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_set_size (buf, (data_end - data_start));
+
+ gst_ebml_write_element_push (ebml, buf, data_start, data_end);
+}
+
+
+/**
+ * gst_ebml_write_float:
+ * @ebml: #GstEbmlWrite
+ * @id: Element ID.
+ * @num: Number to be written.
+ *
+ * Write float element.
+ */
+void
+gst_ebml_write_float (GstEbmlWrite * ebml, guint32 id, gdouble num)
+{
+ GstBuffer *buf;
+ GstMapInfo map;
+ guint8 *data_start, *data_end;
+
+ buf = gst_ebml_write_element_new (ebml, &map, sizeof (num));
+ data_end = data_start = map.data;
+
+ gst_ebml_write_element_id (&data_end, id);
+ gst_ebml_write_element_size (&data_end, 8);
+ num = GDOUBLE_TO_BE (num);
+ gst_ebml_write_element_data (&data_end, (guint8 *) & num, 8);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_set_size (buf, (data_end - data_start));
+
+ gst_ebml_write_element_push (ebml, buf, data_start, data_end);
+}
+
+
+/**
+ * gst_ebml_write_ascii:
+ * @ebml: #GstEbmlWrite
+ * @id: Element ID.
+ * @str: String to be written.
+ *
+ * Write string element.
+ */
+void
+gst_ebml_write_ascii (GstEbmlWrite * ebml, guint32 id, const gchar * str)
+{
+ gint len = strlen (str) + 1; /* add trailing '\0' */
+ GstBuffer *buf;
+ GstMapInfo map;
+ guint8 *data_start, *data_end;
+
+ buf = gst_ebml_write_element_new (ebml, &map, len);
+ data_end = data_start = map.data;
+
+ gst_ebml_write_element_id (&data_end, id);
+ gst_ebml_write_element_size (&data_end, len);
+ gst_ebml_write_element_data (&data_end, (guint8 *) str, len);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_set_size (buf, (data_end - data_start));
+
+ gst_ebml_write_element_push (ebml, buf, data_start, data_end);
+}
+
+
+/**
+ * gst_ebml_write_utf8:
+ * @ebml: #GstEbmlWrite
+ * @id: Element ID.
+ * @str: String to be written.
+ *
+ * Write utf8 encoded string element.
+ */
+void
+gst_ebml_write_utf8 (GstEbmlWrite * ebml, guint32 id, const gchar * str)
+{
+ gst_ebml_write_ascii (ebml, id, str);
+}
+
+
+/**
+ * gst_ebml_write_date:
+ * @ebml: #GstEbmlWrite
+ * @id: Element ID.
+ * @date: Date in nanoseconds since the unix epoch.
+ *
+ * Write date element.
+ */
+void
+gst_ebml_write_date (GstEbmlWrite * ebml, guint32 id, gint64 date)
+{
+ gst_ebml_write_sint (ebml, id, date - GST_EBML_DATE_OFFSET);
+}
+
+/**
+ * gst_ebml_write_master_start:
+ * @ebml: #GstEbmlWrite
+ * @id: Element ID.
+ *
+ * Start wiriting mater element.
+ *
+ * Master writing is annoying. We use a size marker of
+ * the max. allowed length, so that we can later fill it
+ * in validly.
+ *
+ * Returns: Master starting position.
+ */
+guint64
+gst_ebml_write_master_start (GstEbmlWrite * ebml, guint32 id)
+{
+ guint64 pos = ebml->pos;
+ GstBuffer *buf;
+ GstMapInfo map;
+ guint8 *data_start, *data_end;
+
+ buf = gst_ebml_write_element_new (ebml, &map, 0);
+ data_end = data_start = map.data;
+
+ gst_ebml_write_element_id (&data_end, id);
+ pos += data_end - data_start;
+ gst_ebml_write_element_size (&data_end, GST_EBML_SIZE_UNKNOWN);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_set_size (buf, (data_end - data_start));
+
+ gst_ebml_write_element_push (ebml, buf, data_start, data_end);
+
+ return pos;
+}
+
+
+/**
+ * gst_ebml_write_master_finish_full:
+ * @ebml: #GstEbmlWrite
+ * @startpos: Master starting position.
+ *
+ * Finish writing master element. Size of master element is difference between
+ * current position and the element start, and @extra_size added to this.
+ */
+void
+gst_ebml_write_master_finish_full (GstEbmlWrite * ebml, guint64 startpos,
+ guint64 extra_size)
+{
+ guint64 pos = ebml->pos;
+ guint8 *data = g_malloc (8);
+ GstBuffer *buf = gst_buffer_new_wrapped (data, 8);
+
+ gst_ebml_write_seek (ebml, startpos);
+
+ GST_WRITE_UINT64_BE (data,
+ (G_GINT64_CONSTANT (1) << 56) | (pos - startpos - 8 + extra_size));
+
+ gst_ebml_write_element_push (ebml, buf, NULL, NULL);
+ gst_ebml_write_seek (ebml, pos);
+}
+
+void
+gst_ebml_write_master_finish (GstEbmlWrite * ebml, guint64 startpos)
+{
+ gst_ebml_write_master_finish_full (ebml, startpos, 0);
+}
+
+/**
+ * gst_ebml_write_binary:
+ * @ebml: #GstEbmlWrite
+ * @id: Element ID.
+ * @binary: Data to be written.
+ * @length: Length of the data
+ *
+ * Write an element with binary data.
+ */
+void
+gst_ebml_write_binary (GstEbmlWrite * ebml,
+ guint32 id, guint8 * binary, guint64 length)
+{
+ GstBuffer *buf;
+ GstMapInfo map;
+ guint8 *data_start, *data_end;
+
+ buf = gst_ebml_write_element_new (ebml, &map, length);
+ data_end = data_start = map.data;
+
+ gst_ebml_write_element_id (&data_end, id);
+ gst_ebml_write_element_size (&data_end, length);
+ gst_ebml_write_element_data (&data_end, binary, length);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_set_size (buf, (data_end - data_start));
+
+ gst_ebml_write_element_push (ebml, buf, data_start, data_end);
+}
+
+
+/**
+ * gst_ebml_write_buffer_header:
+ * @ebml: #GstEbmlWrite
+ * @id: Element ID.
+ * @length: Length of the data
+ *
+ * Write header of the binary element (use with gst_ebml_write_buffer function).
+ *
+ * For things like video frames and audio samples,
+ * you want to use this function, as it doesn't have
+ * the overhead of memcpy() that other functions
+ * such as write_binary() do have.
+ */
+void
+gst_ebml_write_buffer_header (GstEbmlWrite * ebml, guint32 id, guint64 length)
+{
+ GstBuffer *buf;
+ GstMapInfo map;
+ guint8 *data_start, *data_end;
+
+ buf = gst_ebml_write_element_new (ebml, &map, 0);
+ data_end = data_start = map.data;
+
+ gst_ebml_write_element_id (&data_end, id);
+ gst_ebml_write_element_size (&data_end, length);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_set_size (buf, (data_end - data_start));
+
+ gst_ebml_write_element_push (ebml, buf, data_start, data_end);
+}
+
+
+/**
+ * gst_ebml_write_buffer:
+ * @ebml: #GstEbmlWrite
+ * @buf: #GstBuffer containing the data.
+ *
+ * Write binary element (see gst_ebml_write_buffer_header).
+ */
+void
+gst_ebml_write_buffer (GstEbmlWrite * ebml, GstBuffer * buf)
+{
+ gst_ebml_write_element_push (ebml, buf, NULL, NULL);
+}
+
+
+/**
+ * gst_ebml_replace_uint:
+ * @ebml: #GstEbmlWrite
+ * @pos: Position of the uint that should be replaced.
+ * @num: New value.
+ *
+ * Replace uint with a new value.
+ *
+ * When replacing a uint, we assume that it is *always*
+ * 8-byte, since that's the safest guess we can do. This
+ * is just for simplicity.
+ *
+ * FIXME: this function needs to be replaced with something
+ * proper. This is a crude hack.
+ */
+void
+gst_ebml_replace_uint (GstEbmlWrite * ebml, guint64 pos, guint64 num)
+{
+ guint64 oldpos = ebml->pos;
+ guint8 *data_start, *data_end;
+ GstBuffer *buf;
+
+ data_start = g_malloc (8);
+ data_end = data_start;
+ buf = gst_buffer_new_wrapped (data_start, 8);
+
+ gst_ebml_write_seek (ebml, pos);
+ gst_ebml_write_set_uint (&data_end, num, 8);
+
+ gst_ebml_write_element_push (ebml, buf, data_start, data_end);
+ gst_ebml_write_seek (ebml, oldpos);
+}
+
+/**
+ * gst_ebml_write_header:
+ * @ebml: #GstEbmlWrite
+ * @doctype: Document type.
+ * @version: Document type version.
+ *
+ * Write EBML header.
+ */
+void
+gst_ebml_write_header (GstEbmlWrite * ebml, const gchar * doctype,
+ guint version)
+{
+ guint64 pos;
+
+ /* write the basic EBML header */
+ gst_ebml_write_set_cache (ebml, 0x40);
+ pos = gst_ebml_write_master_start (ebml, GST_EBML_ID_HEADER);
+#if (GST_EBML_VERSION != 1)
+ gst_ebml_write_uint (ebml, GST_EBML_ID_EBMLVERSION, GST_EBML_VERSION);
+ gst_ebml_write_uint (ebml, GST_EBML_ID_EBMLREADVERSION, GST_EBML_VERSION);
+#endif
+#if 0
+ /* we don't write these until they're "non-default" (never!) */
+ gst_ebml_write_uint (ebml, GST_EBML_ID_EBMLMAXIDLENGTH, sizeof (guint32));
+ gst_ebml_write_uint (ebml, GST_EBML_ID_EBMLMAXSIZELENGTH, sizeof (guint64));
+#endif
+ gst_ebml_write_ascii (ebml, GST_EBML_ID_DOCTYPE, doctype);
+ gst_ebml_write_uint (ebml, GST_EBML_ID_DOCTYPEVERSION, version);
+ gst_ebml_write_uint (ebml, GST_EBML_ID_DOCTYPEREADVERSION, version);
+ gst_ebml_write_master_finish (ebml, pos);
+ gst_ebml_write_flush_cache (ebml, FALSE, 0);
+}
diff --git a/gst/matroska/ebml-write.h b/gst/matroska/ebml-write.h
new file mode 100644
index 0000000000..08a170e214
--- /dev/null
+++ b/gst/matroska/ebml-write.h
@@ -0,0 +1,154 @@
+/* GStreamer EBML I/O
+ * (c) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * (c) 2005 Michal Benes <michal.benes@xeris.cz>
+ *
+ * ebml-write.c: write EBML data to file/stream
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_EBML_WRITE_H__
+#define __GST_EBML_WRITE_H__
+
+#include <glib.h>
+#include <gst/gst.h>
+#include <gst/base/gstbytewriter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_EBML_WRITE \
+ (gst_ebml_write_get_type ())
+#define GST_EBML_WRITE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_EBML_WRITE, GstEbmlWrite))
+#define GST_EBML_WRITE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_EBML_WRITE, GstEbmlWriteClass))
+#define GST_IS_EBML_WRITE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_EBML_WRITE))
+#define GST_IS_EBML_WRITE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_EBML_WRITE))
+#define GST_EBML_WRITE_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_EBML_WRITE, GstEbmlWriteClass))
+
+typedef struct _GstEbmlWrite {
+ GstObject object;
+
+ GstPad *srcpad;
+ guint64 pos;
+ guint64 last_pos;
+ GstClockTime timestamp;
+
+ GstByteWriter *cache;
+ guint64 cache_pos;
+
+ GstFlowReturn last_write_result;
+
+ gboolean writing_streamheader;
+ GstByteWriter *streamheader;
+ guint64 streamheader_pos;
+
+ GstCaps *caps;
+
+ gboolean streamable;
+} GstEbmlWrite;
+
+typedef struct _GstEbmlWriteClass {
+ GstObjectClass parent;
+} GstEbmlWriteClass;
+
+GType gst_ebml_write_get_type (void);
+
+GstEbmlWrite *gst_ebml_write_new (GstPad *srcpad);
+void gst_ebml_write_reset (GstEbmlWrite *ebml);
+
+GstFlowReturn gst_ebml_last_write_result (GstEbmlWrite *ebml);
+
+/* Used to create streamheaders */
+void gst_ebml_start_streamheader (GstEbmlWrite *ebml);
+GstBuffer* gst_ebml_stop_streamheader (GstEbmlWrite *ebml);
+
+/*
+ * Caching means that we do not push one buffer for
+ * each element, but fill this one until a flush.
+ */
+void gst_ebml_write_set_cache (GstEbmlWrite *ebml,
+ guint size);
+void gst_ebml_write_flush_cache (GstEbmlWrite *ebml,
+ gboolean is_keyframe,
+ GstClockTime timestamp);
+
+/*
+ * Seeking.
+ */
+void gst_ebml_write_seek (GstEbmlWrite *ebml,
+ guint64 pos);
+
+/*
+ * Data writing.
+ */
+void gst_ebml_write_uint (GstEbmlWrite *ebml,
+ guint32 id,
+ guint64 num);
+void gst_ebml_write_sint (GstEbmlWrite *ebml,
+ guint32 id,
+ gint64 num);
+void gst_ebml_write_float (GstEbmlWrite *ebml,
+ guint32 id,
+ gdouble num);
+void gst_ebml_write_ascii (GstEbmlWrite *ebml,
+ guint32 id,
+ const gchar *str);
+void gst_ebml_write_utf8 (GstEbmlWrite *ebml,
+ guint32 id,
+ const gchar *str);
+void gst_ebml_write_date (GstEbmlWrite *ebml,
+ guint32 id,
+ gint64 date);
+guint64 gst_ebml_write_master_start (GstEbmlWrite *ebml,
+ guint32 id);
+void gst_ebml_write_master_finish (GstEbmlWrite *ebml,
+ guint64 startpos);
+void gst_ebml_write_master_finish_full (GstEbmlWrite * ebml,
+ guint64 startpos,
+ guint64 extra_size);
+void gst_ebml_write_binary (GstEbmlWrite *ebml,
+ guint32 id,
+ guchar *binary,
+ guint64 length);
+void gst_ebml_write_header (GstEbmlWrite *ebml,
+ const gchar *doctype,
+ guint version);
+
+/*
+ * Note: this is supposed to be used only for media data.
+ */
+void gst_ebml_write_buffer_header (GstEbmlWrite *ebml,
+ guint32 id,
+ guint64 length);
+void gst_ebml_write_buffer (GstEbmlWrite *ebml,
+ GstBuffer *data);
+
+/*
+ * A hack, basically... See matroska-mux.c. I should actually
+ * make a nice _replace_element_with_size() or so, but this
+ * works for now.
+ */
+void gst_ebml_replace_uint (GstEbmlWrite *ebml,
+ guint64 pos,
+ guint64 num);
+
+G_END_DECLS
+
+#endif /* __GST_EBML_WRITE_H__ */
diff --git a/gst/matroska/gstmatroskaelement.c b/gst/matroska/gstmatroskaelement.c
new file mode 100644
index 0000000000..19cc6ec61a
--- /dev/null
+++ b/gst/matroska/gstmatroskaelement.c
@@ -0,0 +1,43 @@
+/* GStreamer Matroska muxer/demuxer
+ * (c) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ *
+ * matroska.c: plugin loader
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstmatroskaelements.h"
+#include "matroska-parse.h"
+#include "matroska-read-common.h"
+
+#include <gst/pbutils/pbutils.h>
+
+void
+matroska_element_init (GstPlugin * plugin)
+{
+ static gsize res = FALSE;
+ if (g_once_init_enter (&res)) {
+ gst_pb_utils_init ();
+ gst_matroska_register_tags ();
+ GST_DEBUG_CATEGORY_INIT (matroskareadcommon_debug, "matroskareadcommon", 0,
+ "Matroska demuxer/parser shared debug");
+ g_once_init_leave (&res, TRUE);
+ }
+}
diff --git a/gst/matroska/gstmatroskaelements.h b/gst/matroska/gstmatroskaelements.h
new file mode 100644
index 0000000000..0eff6fab80
--- /dev/null
+++ b/gst/matroska/gstmatroskaelements.h
@@ -0,0 +1,41 @@
+/*
+ * (c) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * Copyright (C) 2020 Huawei Technologies Co., Ltd.
+ * @Author: Stéphane Cerveau <stephane.cerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_MATROSKA_ELEMENTS_H__
+#define __GST_MATROSKA_ELEMENTS_H__
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+void matroska_element_init (GstPlugin * plugin);
+
+GST_ELEMENT_REGISTER_DECLARE (matroskademux);
+GST_ELEMENT_REGISTER_DECLARE (matroskaparse);
+GST_ELEMENT_REGISTER_DECLARE (matroskamux);
+GST_ELEMENT_REGISTER_DECLARE (webmmux);
+
+G_END_DECLS
+
+#endif /* __GST_MATROSKA_ELEMENTS_H__ */
diff --git a/gst/matroska/lzo.c b/gst/matroska/lzo.c
new file mode 100644
index 0000000000..9d1e84832a
--- /dev/null
+++ b/gst/matroska/lzo.c
@@ -0,0 +1,292 @@
+/*
+ * LZO 1x decompression
+ * Copyright (c) 2006 Reimar Doeffinger
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <stdlib.h>
+#include <string.h>
+#include "lzo.h"
+
+/*! define if we may write up to 12 bytes beyond the output buffer */
+/* #define OUTBUF_PADDED 1 */
+/*! define if we may read up to 8 bytes beyond the input buffer */
+/* #define INBUF_PADDED 1 */
+typedef struct LZOContext
+{
+ const guint8 *in, *in_end;
+ guint8 *out_start, *out, *out_end;
+ int error;
+} LZOContext;
+
+/*
+ * \brief read one byte from input buffer, avoiding overrun
+ * \return byte read
+ */
+static inline int
+get_byte (LZOContext * c)
+{
+ if (c->in < c->in_end)
+ return *c->in++;
+ c->error |= LZO_INPUT_DEPLETED;
+ return 1;
+}
+
+#ifdef INBUF_PADDED
+#define GETB(c) (*(c).in++)
+#else
+#define GETB(c) get_byte(&(c))
+#endif
+
+/*
+ * \brief decode a length value in the coding used by lzo
+ * \param x previous byte value
+ * \param mask bits used from x
+ * \return decoded length value
+ */
+static inline int
+get_len (LZOContext * c, int x, int mask)
+{
+ int cnt = x & mask;
+ if (!cnt) {
+ while (!(x = get_byte (c)))
+ cnt += 255;
+ cnt += mask + x;
+ }
+ return cnt;
+}
+
+/*#define UNALIGNED_LOADSTORE */
+#define BUILTIN_MEMCPY
+#ifdef UNALIGNED_LOADSTORE
+#define COPY2(d, s) *(guint16 *)(d) = *(guint16 *)(s);
+#define COPY4(d, s) *(guint32 *)(d) = *(guint32 *)(s);
+#elif defined(BUILTIN_MEMCPY)
+#define COPY2(d, s) memcpy(d, s, 2);
+#define COPY4(d, s) memcpy(d, s, 4);
+#else
+#define COPY2(d, s) (d)[0] = (s)[0]; (d)[1] = (s)[1];
+#define COPY4(d, s) (d)[0] = (s)[0]; (d)[1] = (s)[1]; (d)[2] = (s)[2]; (d)[3] = (s)[3];
+#endif
+
+/*
+ * \brief copy bytes from input to output buffer with checking
+ * \param cnt number of bytes to copy, must be >= 0
+ */
+static inline void
+copy (LZOContext * c, int cnt)
+{
+ register const guint8 *src = c->in;
+ register guint8 *dst = c->out;
+ if (cnt > c->in_end - src) {
+ cnt = MAX (c->in_end - src, 0);
+ c->error |= LZO_INPUT_DEPLETED;
+ }
+ if (cnt > c->out_end - dst) {
+ cnt = MAX (c->out_end - dst, 0);
+ c->error |= LZO_OUTPUT_FULL;
+ }
+#if defined(INBUF_PADDED) && defined(OUTBUF_PADDED)
+ COPY4 (dst, src);
+ src += 4;
+ dst += 4;
+ cnt -= 4;
+ if (cnt > 0)
+#endif
+ memcpy (dst, src, cnt);
+ c->in = src + cnt;
+ c->out = dst + cnt;
+}
+
+/*
+ * \brief copy previously decoded bytes to current position
+ * \param back how many bytes back we start
+ * \param cnt number of bytes to copy, must be >= 0
+ *
+ * cnt > back is valid, this will copy the bytes we just copied,
+ * thus creating a repeating pattern with a period length of back.
+ */
+static inline void
+copy_backptr (LZOContext * c, int back, int cnt)
+{
+ register const guint8 *src = &c->out[-back];
+ register guint8 *dst = c->out;
+ if (src < c->out_start || src > dst) {
+ c->error |= LZO_INVALID_BACKPTR;
+ return;
+ }
+ if (cnt > c->out_end - dst) {
+ cnt = MAX (c->out_end - dst, 0);
+ c->error |= LZO_OUTPUT_FULL;
+ }
+ if (back == 1) {
+ memset (dst, *src, cnt);
+ dst += cnt;
+ } else {
+#ifdef OUTBUF_PADDED
+ COPY2 (dst, src);
+ COPY2 (dst + 2, src + 2);
+ src += 4;
+ dst += 4;
+ cnt -= 4;
+ if (cnt > 0) {
+ COPY2 (dst, src);
+ COPY2 (dst + 2, src + 2);
+ COPY2 (dst + 4, src + 4);
+ COPY2 (dst + 6, src + 6);
+ src += 8;
+ dst += 8;
+ cnt -= 8;
+ }
+#endif
+ if (cnt > 0) {
+ int blocklen = back;
+ while (cnt > blocklen) {
+ memcpy (dst, src, blocklen);
+ dst += blocklen;
+ cnt -= blocklen;
+ blocklen <<= 1;
+ }
+ memcpy (dst, src, cnt);
+ }
+ dst += cnt;
+ }
+ c->out = dst;
+}
+
+/*
+ * \brief decode LZO 1x compressed data
+ * \param out output buffer
+ * \param outlen size of output buffer, number of bytes left are returned here
+ * \param in input buffer
+ * \param inlen size of input buffer, number of bytes left are returned here
+ * \return 0 on success, otherwise error flags, see lzo.h
+ *
+ * make sure all buffers are appropriately padded, in must provide
+ * LZO_INPUT_PADDING, out must provide LZO_OUTPUT_PADDING additional bytes
+ */
+int
+lzo1x_decode (void *out, int *outlen, const void *in, int *inlen)
+{
+ int state = 0;
+ int x;
+ LZOContext c;
+ c.in = in;
+ c.in_end = (const guint8 *) in + *inlen;
+ c.out = c.out_start = out;
+ c.out_end = (guint8 *) out + *outlen;
+ c.error = 0;
+ x = GETB (c);
+ if (x > 17) {
+ copy (&c, x - 17);
+ x = GETB (c);
+ if (x < 16)
+ c.error |= LZO_ERROR;
+ }
+ if (c.in > c.in_end)
+ c.error |= LZO_INPUT_DEPLETED;
+ while (!c.error) {
+ int cnt, back;
+ if (x > 15) {
+ if (x > 63) {
+ cnt = (x >> 5) - 1;
+ back = (GETB (c) << 3) + ((x >> 2) & 7) + 1;
+ } else if (x > 31) {
+ cnt = get_len (&c, x, 31);
+ x = GETB (c);
+ back = (GETB (c) << 6) + (x >> 2) + 1;
+ } else {
+ cnt = get_len (&c, x, 7);
+ back = (1 << 14) + ((x & 8) << 11);
+ x = GETB (c);
+ back += (GETB (c) << 6) + (x >> 2);
+ if (back == (1 << 14)) {
+ if (cnt != 1)
+ c.error |= LZO_ERROR;
+ break;
+ }
+ }
+ } else if (!state) {
+ cnt = get_len (&c, x, 15);
+ copy (&c, cnt + 3);
+ x = GETB (c);
+ if (x > 15)
+ continue;
+ cnt = 1;
+ back = (1 << 11) + (GETB (c) << 2) + (x >> 2) + 1;
+ } else {
+ cnt = 0;
+ back = (GETB (c) << 2) + (x >> 2) + 1;
+ }
+ copy_backptr (&c, back, cnt + 2);
+ state = cnt = x & 3;
+ copy (&c, cnt);
+ x = GETB (c);
+ }
+ *inlen = c.in_end - c.in;
+ if (c.in > c.in_end)
+ *inlen = 0;
+ *outlen = c.out_end - c.out;
+ return c.error;
+}
+
+#ifdef TEST
+#include <stdio.h>
+#include <lzo/lzo1x.h>
+#include "log.h"
+#define MAXSZ (10*1024*1024)
+int
+main (int argc, char *argv[])
+{
+ FILE *in = fopen (argv[1], "rb");
+ guint8 *orig = av_malloc (MAXSZ + 16);
+ guint8 *comp = av_malloc (2 * MAXSZ + 16);
+ guint8 *decomp = av_malloc (MAXSZ + 16);
+ gsize s = fread (orig, 1, MAXSZ, in);
+ lzo_uint clen = 0;
+ long tmp[LZO1X_MEM_COMPRESS];
+ int inlen, outlen;
+ int i;
+ av_log_level = AV_LOG_DEBUG;
+ lzo1x_999_compress (orig, s, comp, &clen, tmp);
+ for (i = 0; i < 300; i++) {
+ START_TIMER inlen = clen;
+ outlen = MAXSZ;
+#ifdef LIBLZO
+ if (lzo1x_decompress_safe (comp, inlen, decomp, &outlen, NULL))
+#elif defined(LIBLZO_UNSAFE)
+ if (lzo1x_decompress (comp, inlen, decomp, &outlen, NULL))
+#else
+ if (lzo1x_decode (decomp, &outlen, comp, &inlen))
+#endif
+ av_log (NULL, AV_LOG_ERROR, "decompression error\n");
+ STOP_TIMER ("lzod")
+ }
+ if (memcmp (orig, decomp, s))
+ av_log (NULL, AV_LOG_ERROR, "decompression incorrect\n");
+ else
+ av_log (NULL, AV_LOG_ERROR, "decompression ok\n");
+
+ fclose (in);
+ return 0;
+}
+#endif
diff --git a/gst/matroska/lzo.h b/gst/matroska/lzo.h
new file mode 100644
index 0000000000..e7795f7718
--- /dev/null
+++ b/gst/matroska/lzo.h
@@ -0,0 +1,35 @@
+/*
+ * LZO 1x decompression
+ * copyright (c) 2006 Reimar Doeffinger
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#ifndef FFMPEG_LZO_H
+#define FFMPEG_LZO_H
+
+#define LZO_INPUT_DEPLETED 1
+#define LZO_OUTPUT_FULL 2
+#define LZO_INVALID_BACKPTR 4
+#define LZO_ERROR 8
+
+#define LZO_INPUT_PADDING 8
+#define LZO_OUTPUT_PADDING 12
+
+int lzo1x_decode(void *out, int *outlen, const void *in, int *inlen);
+
+#endif /* FFMPEG_LZO_H */
diff --git a/gst/matroska/matroska-demux.c b/gst/matroska/matroska-demux.c
new file mode 100644
index 0000000000..1eb429f938
--- /dev/null
+++ b/gst/matroska/matroska-demux.c
@@ -0,0 +1,7503 @@
+/* GStreamer Matroska muxer/demuxer
+ * (c) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * (c) 2006 Tim-Philipp Müller <tim centricular net>
+ * (c) 2008 Sebastian Dröge <slomo@circular-chaos.org>
+ * (c) 2011 Debarshi Ray <rishi@gnu.org>
+ *
+ * matroska-demux.c: matroska file/stream demuxer
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/* TODO: check CRC32 if present
+ * TODO: there can be a segment after the first segment. Handle like
+ * chained oggs. Fixes #334082
+ * TODO: Test samples: http://www.matroska.org/samples/matrix/index.html
+ * http://samples.mplayerhq.hu/Matroska/
+ * TODO: check if demuxing is done correct for all codecs according to spec
+ * TODO: seeking with incomplete or without CUE
+ */
+
+/**
+ * SECTION:element-matroskademux
+ * @title: matroskademux
+ *
+ * matroskademux demuxes a Matroska file into the different contained streams.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v filesrc location=/path/to/mkv ! matroskademux ! vorbisdec ! audioconvert ! audioresample ! autoaudiosink
+ * ]| This pipeline demuxes a Matroska file and outputs the contained Vorbis audio.
+ *
+ */
+
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <math.h>
+#include <string.h>
+#include <glib/gprintf.h>
+
+#include <gst/base/base.h>
+
+/* For AVI compatibility mode
+ and for fourcc stuff */
+#include <gst/riff/riff-read.h>
+#include <gst/riff/riff-ids.h>
+#include <gst/riff/riff-media.h>
+
+#include <gst/audio/audio.h>
+#include <gst/tag/tag.h>
+#include <gst/pbutils/pbutils.h>
+#include <gst/video/gstvideocodecalphameta.h>
+#include <gst/video/video.h>
+
+#include "gstmatroskaelements.h"
+#include "matroska-demux.h"
+#include "matroska-ids.h"
+
+GST_DEBUG_CATEGORY_STATIC (matroskademux_debug);
+#define GST_CAT_DEFAULT matroskademux_debug
+
+#define DEBUG_ELEMENT_START(demux, ebml, element) \
+ GST_DEBUG_OBJECT (demux, "Parsing " element " element at offset %" \
+ G_GUINT64_FORMAT, gst_ebml_read_get_pos (ebml))
+
+#define DEBUG_ELEMENT_STOP(demux, ebml, element, ret) \
+ GST_DEBUG_OBJECT (demux, "Parsing " element " element " \
+ " finished with '%s'", gst_flow_get_name (ret))
+
+enum
+{
+ PROP_0,
+ PROP_METADATA,
+ PROP_STREAMINFO,
+ PROP_MAX_GAP_TIME,
+ PROP_MAX_BACKTRACK_DISTANCE
+};
+
+#define DEFAULT_MAX_GAP_TIME (2 * GST_SECOND)
+#define DEFAULT_MAX_BACKTRACK_DISTANCE 30
+#define INVALID_DATA_THRESHOLD (2 * 1024 * 1024)
+
+static GstStaticPadTemplate sink_templ = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-matroska; video/x-matroska; "
+ "video/x-matroska-3d; audio/webm; video/webm")
+ );
+
+/* TODO: fill in caps! */
+
+static GstStaticPadTemplate audio_src_templ =
+GST_STATIC_PAD_TEMPLATE ("audio_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("ANY")
+ );
+
+static GstStaticPadTemplate video_src_templ =
+GST_STATIC_PAD_TEMPLATE ("video_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("ANY")
+ );
+
+static GstStaticPadTemplate subtitle_src_templ =
+ GST_STATIC_PAD_TEMPLATE ("subtitle_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("text/x-raw, format=pango-markup; application/x-ssa; "
+ "application/x-ass;application/x-usf; subpicture/x-dvd; "
+ "subpicture/x-pgs; subtitle/x-kate; " "application/x-subtitle-unknown")
+ );
+
+static GQuark matroska_block_additional_quark;
+
+static GstFlowReturn gst_matroska_demux_parse_id (GstMatroskaDemux * demux,
+ guint32 id, guint64 length, guint needed);
+
+/* element functions */
+static void gst_matroska_demux_loop (GstPad * pad);
+
+static gboolean gst_matroska_demux_element_send_event (GstElement * element,
+ GstEvent * event);
+static gboolean gst_matroska_demux_element_query (GstElement * element,
+ GstQuery * query);
+
+/* pad functions */
+static gboolean gst_matroska_demux_sink_activate (GstPad * sinkpad,
+ GstObject * parent);
+static gboolean gst_matroska_demux_sink_activate_mode (GstPad * sinkpad,
+ GstObject * parent, GstPadMode mode, gboolean active);
+
+static gboolean gst_matroska_demux_handle_seek_push (GstMatroskaDemux * demux,
+ GstPad * pad, GstEvent * event);
+static gboolean gst_matroska_demux_handle_seek_event (GstMatroskaDemux * demux,
+ GstPad * pad, GstEvent * event);
+static gboolean gst_matroska_demux_handle_src_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+static gboolean gst_matroska_demux_handle_src_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
+
+static gboolean gst_matroska_demux_handle_sink_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+static gboolean gst_matroska_demux_handle_sink_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
+static GstFlowReturn gst_matroska_demux_chain (GstPad * pad,
+ GstObject * object, GstBuffer * buffer);
+
+static GstStateChangeReturn
+gst_matroska_demux_change_state (GstElement * element,
+ GstStateChange transition);
+#if 0
+static void
+gst_matroska_demux_set_index (GstElement * element, GstIndex * index);
+static GstIndex *gst_matroska_demux_get_index (GstElement * element);
+#endif
+
+/* caps functions */
+static GstCaps *gst_matroska_demux_video_caps (GstMatroskaTrackVideoContext
+ * videocontext, const gchar * codec_id, guint8 * data, guint size,
+ gchar ** codec_name, guint32 * riff_fourcc);
+static GstCaps *gst_matroska_demux_audio_caps (GstMatroskaTrackAudioContext
+ * audiocontext, const gchar * codec_id, guint8 * data, guint size,
+ gchar ** codec_name, guint16 * riff_audio_fmt, GstClockTime * lead_in_ts);
+static GstCaps
+ * gst_matroska_demux_subtitle_caps (GstMatroskaTrackSubtitleContext *
+ subtitlecontext, const gchar * codec_id, gpointer data, guint size);
+static const gchar *gst_matroska_track_encryption_algorithm_name (gint val);
+static const gchar *gst_matroska_track_encryption_cipher_mode_name (gint val);
+static const gchar *gst_matroska_track_encoding_scope_name (gint val);
+
+/* stream methods */
+static void gst_matroska_demux_reset (GstElement * element);
+static gboolean perform_seek_to_offset (GstMatroskaDemux * demux,
+ gdouble rate, guint64 offset, guint32 seqnum, GstSeekFlags flags);
+
+/* gobject functions */
+static void gst_matroska_demux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_matroska_demux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+GType gst_matroska_demux_get_type (void);
+#define parent_class gst_matroska_demux_parent_class
+G_DEFINE_TYPE (GstMatroskaDemux, gst_matroska_demux, GST_TYPE_ELEMENT);
+#define _do_init \
+ gst_riff_init (); \
+ matroska_element_init (plugin); \
+ GST_DEBUG_CATEGORY_INIT (ebmlread_debug, "ebmlread", 0, "EBML stream helper class"); \
+ matroska_block_additional_quark = \
+ g_quark_from_static_string ("matroska-block-additional");
+
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (matroskademux, "matroskademux",
+ GST_RANK_PRIMARY, GST_TYPE_MATROSKA_DEMUX, _do_init);
+
+static void
+gst_matroska_demux_finalize (GObject * object)
+{
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (object);
+
+ gst_matroska_read_common_finalize (&demux->common);
+ gst_flow_combiner_free (demux->flowcombiner);
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_matroska_demux_class_init (GstMatroskaDemuxClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (matroskademux_debug, "matroskademux", 0,
+ "Matroska demuxer");
+
+ gobject_class->finalize = gst_matroska_demux_finalize;
+
+ gobject_class->get_property = gst_matroska_demux_get_property;
+ gobject_class->set_property = gst_matroska_demux_set_property;
+
+ g_object_class_install_property (gobject_class, PROP_MAX_GAP_TIME,
+ g_param_spec_uint64 ("max-gap-time", "Maximum gap time",
+ "The demuxer sends out segment events for skipping "
+ "gaps longer than this (0 = disabled).", 0, G_MAXUINT64,
+ DEFAULT_MAX_GAP_TIME, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_BACKTRACK_DISTANCE,
+ g_param_spec_uint ("max-backtrack-distance",
+ "Maximum backtrack distance",
+ "Maximum backtrack distance in seconds when seeking without "
+ "and index in pull mode and search for a keyframe "
+ "(0 = disable backtracking).",
+ 0, G_MAXUINT, DEFAULT_MAX_BACKTRACK_DISTANCE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_change_state);
+ gstelement_class->send_event =
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_element_send_event);
+ gstelement_class->query =
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_element_query);
+#if 0
+ gstelement_class->set_index =
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_set_index);
+ gstelement_class->get_index =
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_get_index);
+#endif
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &video_src_templ);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &audio_src_templ);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &subtitle_src_templ);
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_templ);
+
+ gst_element_class_set_static_metadata (gstelement_class, "Matroska demuxer",
+ "Codec/Demuxer",
+ "Demuxes Matroska/WebM streams into video/audio/subtitles",
+ "GStreamer maintainers <gstreamer-devel@lists.freedesktop.org>");
+}
+
+static void
+gst_matroska_demux_init (GstMatroskaDemux * demux)
+{
+ demux->common.sinkpad = gst_pad_new_from_static_template (&sink_templ,
+ "sink");
+ gst_pad_set_activate_function (demux->common.sinkpad,
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_sink_activate));
+ gst_pad_set_activatemode_function (demux->common.sinkpad,
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_sink_activate_mode));
+ gst_pad_set_chain_function (demux->common.sinkpad,
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_chain));
+ gst_pad_set_event_function (demux->common.sinkpad,
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_handle_sink_event));
+ gst_pad_set_query_function (demux->common.sinkpad,
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_handle_sink_query));
+ gst_element_add_pad (GST_ELEMENT (demux), demux->common.sinkpad);
+
+ /* init defaults for common read context */
+ gst_matroska_read_common_init (&demux->common);
+
+ /* property defaults */
+ demux->max_gap_time = DEFAULT_MAX_GAP_TIME;
+ demux->max_backtrack_distance = DEFAULT_MAX_BACKTRACK_DISTANCE;
+
+ GST_OBJECT_FLAG_SET (demux, GST_ELEMENT_FLAG_INDEXABLE);
+
+ demux->flowcombiner = gst_flow_combiner_new ();
+
+ /* finish off */
+ gst_matroska_demux_reset (GST_ELEMENT (demux));
+}
+
+static void
+gst_matroska_demux_reset (GstElement * element)
+{
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (element);
+
+ GST_DEBUG_OBJECT (demux, "Resetting state");
+
+ gst_matroska_read_common_reset (GST_ELEMENT (demux), &demux->common);
+
+ demux->num_a_streams = 0;
+ demux->num_t_streams = 0;
+ demux->num_v_streams = 0;
+ demux->have_nonintraonly_v_streams = FALSE;
+
+ demux->have_group_id = FALSE;
+ demux->group_id = G_MAXUINT;
+
+ demux->clock = NULL;
+ demux->tracks_parsed = FALSE;
+
+ if (demux->clusters) {
+ g_array_free (demux->clusters, TRUE);
+ demux->clusters = NULL;
+ }
+
+ g_list_foreach (demux->seek_parsed,
+ (GFunc) gst_matroska_read_common_free_parsed_el, NULL);
+ g_list_free (demux->seek_parsed);
+ demux->seek_parsed = NULL;
+
+ demux->last_stop_end = GST_CLOCK_TIME_NONE;
+ demux->seek_block = 0;
+ demux->stream_start_time = GST_CLOCK_TIME_NONE;
+ demux->to_time = GST_CLOCK_TIME_NONE;
+ demux->cluster_time = GST_CLOCK_TIME_NONE;
+ demux->cluster_offset = 0;
+ demux->cluster_prevsize = 0;
+ demux->seen_cluster_prevsize = FALSE;
+ demux->next_cluster_offset = 0;
+ demux->stream_last_time = GST_CLOCK_TIME_NONE;
+ demux->last_cluster_offset = 0;
+ demux->index_offset = 0;
+ demux->seekable = FALSE;
+ demux->need_segment = FALSE;
+ demux->segment_seqnum = 0;
+ demux->requested_seek_time = GST_CLOCK_TIME_NONE;
+ demux->seek_offset = -1;
+ demux->audio_lead_in_ts = 0;
+ demux->building_index = FALSE;
+ if (demux->seek_event) {
+ gst_event_unref (demux->seek_event);
+ demux->seek_event = NULL;
+ }
+
+ demux->seek_index = NULL;
+ demux->seek_entry = 0;
+
+ if (demux->new_segment) {
+ gst_event_unref (demux->new_segment);
+ demux->new_segment = NULL;
+ }
+
+ demux->invalid_duration = FALSE;
+
+ demux->cached_length = G_MAXUINT64;
+
+ if (demux->deferred_seek_event)
+ gst_event_unref (demux->deferred_seek_event);
+ demux->deferred_seek_event = NULL;
+ demux->deferred_seek_pad = NULL;
+
+ gst_flow_combiner_clear (demux->flowcombiner);
+}
+
+static GstBuffer *
+gst_matroska_decode_buffer (GstMatroskaTrackContext * context, GstBuffer * buf)
+{
+ GstMapInfo map;
+ gpointer data;
+ gsize size;
+ GstBuffer *out_buf = buf;
+
+ g_return_val_if_fail (GST_IS_BUFFER (buf), NULL);
+
+ GST_DEBUG ("decoding buffer %p", buf);
+
+ gst_buffer_map (out_buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ g_return_val_if_fail (size > 0, buf);
+
+ if (gst_matroska_decode_data (context->encodings, &data, &size,
+ GST_MATROSKA_TRACK_ENCODING_SCOPE_FRAME, FALSE)) {
+ if (data != map.data) {
+ gst_buffer_unmap (out_buf, &map);
+ gst_buffer_unref (out_buf);
+ out_buf = gst_buffer_new_wrapped (data, size);
+ } else {
+ gst_buffer_unmap (out_buf, &map);
+ }
+ } else {
+ GST_DEBUG ("decode data failed");
+ gst_buffer_unmap (out_buf, &map);
+ gst_buffer_unref (out_buf);
+ return NULL;
+ }
+ /* Encrypted stream */
+ if (context->protection_info) {
+
+ GstStructure *info_protect = gst_structure_copy (context->protection_info);
+ gboolean encrypted = FALSE;
+
+ gst_buffer_map (out_buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ if (gst_matroska_parse_protection_meta (&data, &size, info_protect,
+ &encrypted)) {
+ if (data != map.data) {
+ GstBuffer *tmp_buf;
+
+ gst_buffer_unmap (out_buf, &map);
+ tmp_buf = out_buf;
+ out_buf = gst_buffer_copy_region (tmp_buf, GST_BUFFER_COPY_ALL,
+ gst_buffer_get_size (tmp_buf) - size, size);
+ gst_buffer_unref (tmp_buf);
+ if (encrypted)
+ gst_buffer_add_protection_meta (out_buf, info_protect);
+ else
+ gst_structure_free (info_protect);
+ } else {
+ gst_buffer_unmap (out_buf, &map);
+ gst_structure_free (info_protect);
+ }
+ } else {
+ GST_WARNING ("Adding protection metadata failed");
+ gst_buffer_unmap (out_buf, &map);
+ gst_buffer_unref (out_buf);
+ gst_structure_free (info_protect);
+ return NULL;
+ }
+ }
+
+ return out_buf;
+}
+
+static void
+gst_matroska_demux_add_stream_headers_to_caps (GstMatroskaDemux * demux,
+ GstBufferList * list, GstCaps * caps)
+{
+ GstStructure *s;
+ GValue arr_val = G_VALUE_INIT;
+ GValue buf_val = G_VALUE_INIT;
+ gint i, num;
+
+ g_assert (gst_caps_is_writable (caps));
+
+ g_value_init (&arr_val, GST_TYPE_ARRAY);
+ g_value_init (&buf_val, GST_TYPE_BUFFER);
+
+ num = gst_buffer_list_length (list);
+ for (i = 0; i < num; ++i) {
+ g_value_set_boxed (&buf_val, gst_buffer_list_get (list, i));
+ gst_value_array_append_value (&arr_val, &buf_val);
+ }
+
+ s = gst_caps_get_structure (caps, 0);
+ gst_structure_take_value (s, "streamheader", &arr_val);
+ g_value_unset (&buf_val);
+}
+
+static GstFlowReturn
+gst_matroska_demux_parse_mastering_metadata (GstMatroskaDemux * demux,
+ GstEbmlRead * ebml, GstMatroskaTrackVideoContext * video_context)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstVideoMasteringDisplayInfo minfo;
+ guint32 id;
+ gdouble num;
+ /* Precision defined by HEVC specification */
+ const guint chroma_scale = 50000;
+ const guint luma_scale = 10000;
+
+ gst_video_mastering_display_info_init (&minfo);
+
+ DEBUG_ELEMENT_START (demux, ebml, "MasteringMetadata");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK)
+ goto beach;
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ goto beach;
+
+ /* all sub elements have float type */
+ if ((ret = gst_ebml_read_float (ebml, &id, &num)) != GST_FLOW_OK)
+ goto beach;
+
+ /* chromaticity should be in [0, 1] range */
+ if (id >= GST_MATROSKA_ID_PRIMARYRCHROMATICITYX &&
+ id <= GST_MATROSKA_ID_WHITEPOINTCHROMATICITYY) {
+ if (num < 0 || num > 1.0) {
+ GST_WARNING_OBJECT (demux, "0x%x has invalid value %f", id, num);
+ goto beach;
+ }
+ } else if (id == GST_MATROSKA_ID_LUMINANCEMAX ||
+ id == GST_MATROSKA_ID_LUMINANCEMIN) {
+ /* Note: webM spec said valid range is [0, 999.9999] but
+ * 1000 cd/m^2 is generally used value on HDR. Just check guint range here.
+ * See https://www.webmproject.org/docs/container/#LuminanceMax
+ */
+ if (num < 0 || num > (gdouble) (G_MAXUINT32 / luma_scale)) {
+ GST_WARNING_OBJECT (demux, "0x%x has invalid value %f", id, num);
+ goto beach;
+ }
+ }
+
+ switch (id) {
+ case GST_MATROSKA_ID_PRIMARYRCHROMATICITYX:
+ minfo.display_primaries[0].x = (guint16) (num * chroma_scale);
+ break;
+ case GST_MATROSKA_ID_PRIMARYRCHROMATICITYY:
+ minfo.display_primaries[0].y = (guint16) (num * chroma_scale);
+ break;
+ case GST_MATROSKA_ID_PRIMARYGCHROMATICITYX:
+ minfo.display_primaries[1].x = (guint16) (num * chroma_scale);
+ break;
+ case GST_MATROSKA_ID_PRIMARYGCHROMATICITYY:
+ minfo.display_primaries[1].y = (guint16) (num * chroma_scale);
+ break;
+ case GST_MATROSKA_ID_PRIMARYBCHROMATICITYX:
+ minfo.display_primaries[2].x = (guint16) (num * chroma_scale);
+ break;
+ case GST_MATROSKA_ID_PRIMARYBCHROMATICITYY:
+ minfo.display_primaries[2].y = (guint16) (num * chroma_scale);
+ break;
+ case GST_MATROSKA_ID_WHITEPOINTCHROMATICITYX:
+ minfo.white_point.x = (guint16) (num * chroma_scale);
+ break;
+ case GST_MATROSKA_ID_WHITEPOINTCHROMATICITYY:
+ minfo.white_point.y = (guint16) (num * chroma_scale);
+ break;
+ case GST_MATROSKA_ID_LUMINANCEMAX:
+ minfo.max_display_mastering_luminance = (guint32) (num * luma_scale);
+ break;
+ case GST_MATROSKA_ID_LUMINANCEMIN:
+ minfo.min_display_mastering_luminance = (guint32) (num * luma_scale);
+ break;
+ default:
+ GST_FIXME_OBJECT (demux,
+ "Unsupported subelement 0x%x in MasteringMetadata", id);
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+
+ video_context->mastering_display_info = minfo;
+ video_context->mastering_display_info_present = TRUE;
+
+beach:
+ DEBUG_ELEMENT_STOP (demux, ebml, "MasteringMetadata", ret);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_matroska_demux_parse_colour (GstMatroskaDemux * demux, GstEbmlRead * ebml,
+ GstMatroskaTrackVideoContext * video_context)
+{
+ GstFlowReturn ret;
+ GstVideoColorimetry colorimetry;
+ guint32 id;
+ guint64 num;
+
+ colorimetry.range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
+ colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
+ colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
+ colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
+
+ DEBUG_ELEMENT_START (demux, ebml, "TrackVideoColour");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK)
+ goto beach;
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ goto beach;
+
+ switch (id) {
+ case GST_MATROSKA_ID_VIDEOMATRIXCOEFFICIENTS:{
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ goto beach;
+
+ colorimetry.matrix = gst_video_color_matrix_from_iso ((guint) num);
+ break;
+ }
+
+ case GST_MATROSKA_ID_VIDEORANGE:{
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ goto beach;
+
+ switch (num) {
+ case 0:
+ colorimetry.range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
+ break;
+ case 1:
+ colorimetry.range = GST_VIDEO_COLOR_RANGE_16_235;
+ break;
+ case 2:
+ colorimetry.range = GST_VIDEO_COLOR_RANGE_0_255;
+ break;
+ default:
+ GST_FIXME_OBJECT (demux, "Unsupported color range %"
+ G_GUINT64_FORMAT, num);
+ break;
+ }
+ break;
+ }
+
+ case GST_MATROSKA_ID_VIDEOTRANSFERCHARACTERISTICS:{
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ goto beach;
+
+ colorimetry.transfer =
+ gst_video_transfer_function_from_iso ((guint) num);
+ break;
+ }
+
+ case GST_MATROSKA_ID_VIDEOPRIMARIES:{
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ goto beach;
+
+ colorimetry.primaries =
+ gst_video_color_primaries_from_iso ((guint) num);
+ break;
+ }
+
+ case GST_MATROSKA_ID_MASTERINGMETADATA:{
+ if ((ret =
+ gst_matroska_demux_parse_mastering_metadata (demux, ebml,
+ video_context)) != GST_FLOW_OK)
+ goto beach;
+ break;
+ }
+
+ case GST_MATROSKA_ID_MAXCLL:{
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ goto beach;
+ if (num > G_MAXUINT16) {
+ GST_WARNING_OBJECT (demux,
+ "Too large maxCLL value %" G_GUINT64_FORMAT, num);
+ } else {
+ video_context->content_light_level.max_content_light_level = num;
+ }
+ break;
+ }
+
+ case GST_MATROSKA_ID_MAXFALL:{
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ goto beach;
+ if (num >= G_MAXUINT16) {
+ GST_WARNING_OBJECT (demux,
+ "Too large maxFALL value %" G_GUINT64_FORMAT, num);
+ } else {
+ video_context->content_light_level.max_frame_average_light_level =
+ num;
+ }
+ break;
+ }
+
+ default:
+ GST_FIXME_OBJECT (demux, "Unsupported subelement 0x%x in Colour", id);
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+
+ memcpy (&video_context->colorimetry, &colorimetry,
+ sizeof (GstVideoColorimetry));
+
+beach:
+ DEBUG_ELEMENT_STOP (demux, ebml, "TrackVideoColour", ret);
+ return ret;
+}
+
+static GstFlowReturn
+gst_matroska_demux_parse_stream (GstMatroskaDemux * demux, GstEbmlRead * ebml,
+ GstMatroskaTrackContext ** dest_context)
+{
+ GstMatroskaTrackContext *context;
+ GstCaps *caps = NULL;
+ GstTagList *cached_taglist;
+ GstFlowReturn ret;
+ guint32 id, riff_fourcc = 0;
+ guint16 riff_audio_fmt = 0;
+ gchar *codec = NULL;
+
+ DEBUG_ELEMENT_START (demux, ebml, "TrackEntry");
+
+ *dest_context = NULL;
+
+ /* start with the master */
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (demux, ebml, "TrackEntry", ret);
+ return ret;
+ }
+
+ /* allocate generic... if we know the type, we'll g_renew()
+ * with the precise type */
+ context = g_new0 (GstMatroskaTrackContext, 1);
+ context->index_writer_id = -1;
+ context->type = 0; /* no type yet */
+ context->default_duration = 0;
+ context->pos = 0;
+ context->set_discont = TRUE;
+ context->timecodescale = 1.0;
+ context->flags =
+ GST_MATROSKA_TRACK_ENABLED | GST_MATROSKA_TRACK_DEFAULT |
+ GST_MATROSKA_TRACK_LACING;
+ context->from_time = GST_CLOCK_TIME_NONE;
+ context->from_offset = -1;
+ context->to_offset = G_MAXINT64;
+ context->alignment = 1;
+ context->dts_only = FALSE;
+ context->intra_only = FALSE;
+ context->tags = gst_tag_list_new_empty ();
+ g_queue_init (&context->protection_event_queue);
+ context->protection_info = NULL;
+
+ GST_DEBUG_OBJECT (demux, "Parsing a TrackEntry (%d tracks parsed so far)",
+ demux->common.num_streams);
+
+ /* try reading the trackentry headers */
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ /* track number (unique stream ID) */
+ case GST_MATROSKA_ID_TRACKNUMBER:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_ERROR_OBJECT (demux, "Invalid TrackNumber 0");
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "TrackNumber: %" G_GUINT64_FORMAT, num);
+ context->num = num;
+ break;
+ }
+ /* track UID (unique identifier) */
+ case GST_MATROSKA_ID_TRACKUID:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_ERROR_OBJECT (demux, "Invalid TrackUID 0");
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "TrackUID: %" G_GUINT64_FORMAT, num);
+ context->uid = num;
+ break;
+ }
+
+ /* track type (video, audio, combined, subtitle, etc.) */
+ case GST_MATROSKA_ID_TRACKTYPE:{
+ guint64 track_type;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &track_type)) != GST_FLOW_OK) {
+ break;
+ }
+
+ if (context->type != 0 && context->type != track_type) {
+ GST_WARNING_OBJECT (demux,
+ "More than one tracktype defined in a TrackEntry - skipping");
+ break;
+ } else if (track_type < 1 || track_type > 254) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackType %" G_GUINT64_FORMAT,
+ track_type);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "TrackType: %" G_GUINT64_FORMAT, track_type);
+
+ /* ok, so we're actually going to reallocate this thing */
+ switch (track_type) {
+ case GST_MATROSKA_TRACK_TYPE_VIDEO:
+ gst_matroska_track_init_video_context (&context);
+ break;
+ case GST_MATROSKA_TRACK_TYPE_AUDIO:
+ gst_matroska_track_init_audio_context (&context);
+ break;
+ case GST_MATROSKA_TRACK_TYPE_SUBTITLE:
+ gst_matroska_track_init_subtitle_context (&context);
+ break;
+ case GST_MATROSKA_TRACK_TYPE_COMPLEX:
+ case GST_MATROSKA_TRACK_TYPE_LOGO:
+ case GST_MATROSKA_TRACK_TYPE_BUTTONS:
+ case GST_MATROSKA_TRACK_TYPE_CONTROL:
+ default:
+ GST_WARNING_OBJECT (demux,
+ "Unknown or unsupported TrackType %" G_GUINT64_FORMAT,
+ track_type);
+ context->type = 0;
+ break;
+ }
+ break;
+ }
+
+ /* tracktype specific stuff for video */
+ case GST_MATROSKA_ID_TRACKVIDEO:{
+ GstMatroskaTrackVideoContext *videocontext;
+
+ DEBUG_ELEMENT_START (demux, ebml, "TrackVideo");
+
+ if (!gst_matroska_track_init_video_context (&context)) {
+ GST_WARNING_OBJECT (demux,
+ "TrackVideo element in non-video track - ignoring track");
+ ret = GST_FLOW_ERROR;
+ break;
+ } else if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ break;
+ }
+ videocontext = (GstMatroskaTrackVideoContext *) context;
+
+ while (ret == GST_FLOW_OK &&
+ gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ /* Should be one level up but some broken muxers write it here. */
+ case GST_MATROSKA_ID_TRACKDEFAULTDURATION:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackDefaultDuration 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "TrackDefaultDuration: %" G_GUINT64_FORMAT, num);
+ context->default_duration = num;
+ break;
+ }
+
+ /* video framerate */
+ /* NOTE: This one is here only for backward compatibility.
+ * Use _TRACKDEFAULDURATION one level up. */
+ case GST_MATROSKA_ID_VIDEOFRAMERATE:{
+ gdouble num;
+
+ if ((ret = gst_ebml_read_float (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num <= 0.0) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackVideoFPS %lf", num);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "TrackVideoFrameRate: %lf", num);
+ if (context->default_duration == 0)
+ context->default_duration =
+ gst_gdouble_to_guint64 ((gdouble) GST_SECOND * (1.0 / num));
+ videocontext->default_fps = num;
+ break;
+ }
+
+ /* width of the size to display the video at */
+ case GST_MATROSKA_ID_VIDEODISPLAYWIDTH:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackVideoDisplayWidth 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "TrackVideoDisplayWidth: %" G_GUINT64_FORMAT, num);
+ videocontext->display_width = num;
+ break;
+ }
+
+ /* height of the size to display the video at */
+ case GST_MATROSKA_ID_VIDEODISPLAYHEIGHT:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackVideoDisplayHeight 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "TrackVideoDisplayHeight: %" G_GUINT64_FORMAT, num);
+ videocontext->display_height = num;
+ break;
+ }
+
+ /* width of the video in the file */
+ case GST_MATROSKA_ID_VIDEOPIXELWIDTH:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackVideoPixelWidth 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "TrackVideoPixelWidth: %" G_GUINT64_FORMAT, num);
+ videocontext->pixel_width = num;
+ break;
+ }
+
+ /* height of the video in the file */
+ case GST_MATROSKA_ID_VIDEOPIXELHEIGHT:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackVideoPixelHeight 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "TrackVideoPixelHeight: %" G_GUINT64_FORMAT, num);
+ videocontext->pixel_height = num;
+ break;
+ }
+
+ /* whether the video is interlaced */
+ case GST_MATROSKA_ID_VIDEOFLAGINTERLACED:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 1)
+ videocontext->interlace_mode =
+ GST_MATROSKA_INTERLACE_MODE_INTERLACED;
+ else if (num == 2)
+ videocontext->interlace_mode =
+ GST_MATROSKA_INTERLACE_MODE_PROGRESSIVE;
+ else
+ videocontext->interlace_mode =
+ GST_MATROSKA_INTERLACE_MODE_UNKNOWN;
+
+ GST_DEBUG_OBJECT (demux, "video track interlacing mode: %d",
+ videocontext->interlace_mode);
+ break;
+ }
+
+ /* interlaced field order */
+ case GST_MATROSKA_ID_VIDEOFIELDORDER:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (videocontext->interlace_mode !=
+ GST_MATROSKA_INTERLACE_MODE_INTERLACED) {
+ GST_WARNING_OBJECT (demux,
+ "FieldOrder element when not interlaced - ignoring");
+ break;
+ }
+
+ if (num == 0)
+ /* turns out we're actually progressive */
+ videocontext->interlace_mode =
+ GST_MATROSKA_INTERLACE_MODE_PROGRESSIVE;
+ else if (num == 2)
+ videocontext->field_order = GST_VIDEO_FIELD_ORDER_UNKNOWN;
+ else if (num == 9)
+ videocontext->field_order =
+ GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST;
+ else if (num == 14)
+ videocontext->field_order =
+ GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST;
+ else {
+ GST_FIXME_OBJECT (demux,
+ "Unknown or unsupported FieldOrder %" G_GUINT64_FORMAT,
+ num);
+ videocontext->field_order = GST_VIDEO_FIELD_ORDER_UNKNOWN;
+ }
+
+ GST_DEBUG_OBJECT (demux, "video track field order: %d",
+ videocontext->field_order);
+ break;
+ }
+
+ /* aspect ratio behaviour */
+ case GST_MATROSKA_ID_VIDEOASPECTRATIOTYPE:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num != GST_MATROSKA_ASPECT_RATIO_MODE_FREE &&
+ num != GST_MATROSKA_ASPECT_RATIO_MODE_KEEP &&
+ num != GST_MATROSKA_ASPECT_RATIO_MODE_FIXED) {
+ GST_WARNING_OBJECT (demux,
+ "Unknown TrackVideoAspectRatioType 0x%x", (guint) num);
+ break;
+ }
+ GST_DEBUG_OBJECT (demux,
+ "TrackVideoAspectRatioType: %" G_GUINT64_FORMAT, num);
+ videocontext->asr_mode = num;
+ break;
+ }
+
+ /* colourspace (only matters for raw video) fourcc */
+ case GST_MATROSKA_ID_VIDEOCOLOURSPACE:{
+ guint8 *data;
+ guint64 datalen;
+
+ if ((ret =
+ gst_ebml_read_binary (ebml, &id, &data,
+ &datalen)) != GST_FLOW_OK)
+ break;
+
+ if (datalen != 4) {
+ g_free (data);
+ GST_WARNING_OBJECT (demux,
+ "Invalid TrackVideoColourSpace length %" G_GUINT64_FORMAT,
+ datalen);
+ break;
+ }
+
+ memcpy (&videocontext->fourcc, data, 4);
+ GST_DEBUG_OBJECT (demux,
+ "TrackVideoColourSpace: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (videocontext->fourcc));
+ g_free (data);
+ break;
+ }
+
+ /* color info */
+ case GST_MATROSKA_ID_VIDEOCOLOUR:{
+ ret = gst_matroska_demux_parse_colour (demux, ebml, videocontext);
+ break;
+ }
+
+ case GST_MATROSKA_ID_VIDEOSTEREOMODE:
+ {
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ GST_DEBUG_OBJECT (demux, "StereoMode: %" G_GUINT64_FORMAT, num);
+
+ switch (num) {
+ case GST_MATROSKA_STEREO_MODE_SBS_RL:
+ videocontext->multiview_flags =
+ GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST;
+ /* fall through */
+ case GST_MATROSKA_STEREO_MODE_SBS_LR:
+ videocontext->multiview_mode =
+ GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE;
+ break;
+ case GST_MATROSKA_STEREO_MODE_TB_RL:
+ videocontext->multiview_flags =
+ GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST;
+ /* fall through */
+ case GST_MATROSKA_STEREO_MODE_TB_LR:
+ videocontext->multiview_mode =
+ GST_VIDEO_MULTIVIEW_MODE_TOP_BOTTOM;
+ break;
+ case GST_MATROSKA_STEREO_MODE_CHECKER_RL:
+ videocontext->multiview_flags =
+ GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST;
+ /* fall through */
+ case GST_MATROSKA_STEREO_MODE_CHECKER_LR:
+ videocontext->multiview_mode =
+ GST_VIDEO_MULTIVIEW_MODE_CHECKERBOARD;
+ break;
+ case GST_MATROSKA_STEREO_MODE_FBF_RL:
+ videocontext->multiview_flags =
+ GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST;
+ /* fall through */
+ case GST_MATROSKA_STEREO_MODE_FBF_LR:
+ videocontext->multiview_mode =
+ GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME;
+ /* FIXME: In frame-by-frame mode, left/right frame buffers are
+ * laced within one block, and we'll need to apply FIRST_IN_BUNDLE
+ * accordingly. See http://www.matroska.org/technical/specs/index.html#StereoMode */
+ GST_FIXME_OBJECT (demux,
+ "Frame-by-frame stereoscopic mode not fully implemented");
+ break;
+ }
+ break;
+ }
+
+ case GST_MATROSKA_ID_VIDEOALPHAMODE:
+ {
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ GST_DEBUG_OBJECT (demux, "AlphaMode: %" G_GUINT64_FORMAT, num);
+
+ if (num == 1)
+ videocontext->alpha_mode = TRUE;
+ else
+ videocontext->alpha_mode = FALSE;
+ break;
+ }
+
+ default:
+ GST_WARNING_OBJECT (demux,
+ "Unknown TrackVideo subelement 0x%x - ignoring", id);
+ /* fall through */
+ case GST_MATROSKA_ID_VIDEODISPLAYUNIT:
+ case GST_MATROSKA_ID_VIDEOPIXELCROPBOTTOM:
+ case GST_MATROSKA_ID_VIDEOPIXELCROPTOP:
+ case GST_MATROSKA_ID_VIDEOPIXELCROPLEFT:
+ case GST_MATROSKA_ID_VIDEOPIXELCROPRIGHT:
+ case GST_MATROSKA_ID_VIDEOGAMMAVALUE:
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (demux, ebml, "TrackVideo", ret);
+ break;
+ }
+
+ /* tracktype specific stuff for audio */
+ case GST_MATROSKA_ID_TRACKAUDIO:{
+ GstMatroskaTrackAudioContext *audiocontext;
+
+ DEBUG_ELEMENT_START (demux, ebml, "TrackAudio");
+
+ if (!gst_matroska_track_init_audio_context (&context)) {
+ GST_WARNING_OBJECT (demux,
+ "TrackAudio element in non-audio track - ignoring track");
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ audiocontext = (GstMatroskaTrackAudioContext *) context;
+
+ while (ret == GST_FLOW_OK &&
+ gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ /* samplerate */
+ case GST_MATROSKA_ID_AUDIOSAMPLINGFREQ:{
+ gdouble num;
+
+ if ((ret = gst_ebml_read_float (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+
+ if (num <= 0.0) {
+ GST_WARNING_OBJECT (demux,
+ "Invalid TrackAudioSamplingFrequency %lf", num);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "TrackAudioSamplingFrequency: %lf", num);
+ audiocontext->samplerate = num;
+ break;
+ }
+
+ /* bitdepth */
+ case GST_MATROSKA_ID_AUDIOBITDEPTH:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackAudioBitDepth 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "TrackAudioBitDepth: %" G_GUINT64_FORMAT,
+ num);
+ audiocontext->bitdepth = num;
+ break;
+ }
+
+ /* channels */
+ case GST_MATROSKA_ID_AUDIOCHANNELS:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackAudioChannels 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "TrackAudioChannels: %" G_GUINT64_FORMAT,
+ num);
+ audiocontext->channels = num;
+ break;
+ }
+
+ default:
+ GST_WARNING_OBJECT (demux,
+ "Unknown TrackAudio subelement 0x%x - ignoring", id);
+ /* fall through */
+ case GST_MATROSKA_ID_AUDIOCHANNELPOSITIONS:
+ case GST_MATROSKA_ID_AUDIOOUTPUTSAMPLINGFREQ:
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (demux, ebml, "TrackAudio", ret);
+
+ break;
+ }
+
+ /* codec identifier */
+ case GST_MATROSKA_ID_CODECID:{
+ gchar *text;
+
+ if ((ret = gst_ebml_read_ascii (ebml, &id, &text)) != GST_FLOW_OK)
+ break;
+
+ GST_DEBUG_OBJECT (demux, "CodecID: %s", GST_STR_NULL (text));
+ context->codec_id = text;
+ break;
+ }
+
+ /* codec private data */
+ case GST_MATROSKA_ID_CODECPRIVATE:{
+ guint8 *data;
+ guint64 size;
+
+ if ((ret =
+ gst_ebml_read_binary (ebml, &id, &data, &size)) != GST_FLOW_OK)
+ break;
+
+ context->codec_priv = data;
+ context->codec_priv_size = size;
+
+ GST_DEBUG_OBJECT (demux, "CodecPrivate of size %" G_GUINT64_FORMAT,
+ size);
+ break;
+ }
+
+ /* name of the codec */
+ case GST_MATROSKA_ID_CODECNAME:{
+ gchar *text;
+
+ if ((ret = gst_ebml_read_utf8 (ebml, &id, &text)) != GST_FLOW_OK)
+ break;
+
+ GST_DEBUG_OBJECT (demux, "CodecName: %s", GST_STR_NULL (text));
+ context->codec_name = text;
+ break;
+ }
+
+ /* codec delay */
+ case GST_MATROSKA_ID_CODECDELAY:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ context->codec_delay = num;
+
+ GST_DEBUG_OBJECT (demux, "CodecDelay: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (num));
+ break;
+ }
+
+ /* codec delay */
+ case GST_MATROSKA_ID_SEEKPREROLL:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ context->seek_preroll = num;
+
+ GST_DEBUG_OBJECT (demux, "SeekPreroll: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (num));
+ break;
+ }
+
+ /* name of this track */
+ case GST_MATROSKA_ID_TRACKNAME:{
+ gchar *text;
+
+ if ((ret = gst_ebml_read_utf8 (ebml, &id, &text)) != GST_FLOW_OK)
+ break;
+
+ context->name = text;
+ GST_DEBUG_OBJECT (demux, "TrackName: %s", GST_STR_NULL (text));
+ break;
+ }
+
+ /* language (matters for audio/subtitles, mostly) */
+ case GST_MATROSKA_ID_TRACKLANGUAGE:{
+ gchar *text;
+
+ if ((ret = gst_ebml_read_utf8 (ebml, &id, &text)) != GST_FLOW_OK)
+ break;
+
+
+ context->language = text;
+
+ /* fre-ca => fre */
+ if (strlen (context->language) >= 4 && context->language[3] == '-')
+ context->language[3] = '\0';
+
+ GST_DEBUG_OBJECT (demux, "TrackLanguage: %s",
+ GST_STR_NULL (context->language));
+ break;
+ }
+
+ /* whether this is actually used */
+ case GST_MATROSKA_ID_TRACKFLAGENABLED:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num)
+ context->flags |= GST_MATROSKA_TRACK_ENABLED;
+ else
+ context->flags &= ~GST_MATROSKA_TRACK_ENABLED;
+
+ GST_DEBUG_OBJECT (demux, "TrackEnabled: %d",
+ (context->flags & GST_MATROSKA_TRACK_ENABLED) ? 1 : 0);
+ break;
+ }
+
+ /* whether it's the default for this track type */
+ case GST_MATROSKA_ID_TRACKFLAGDEFAULT:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num)
+ context->flags |= GST_MATROSKA_TRACK_DEFAULT;
+ else
+ context->flags &= ~GST_MATROSKA_TRACK_DEFAULT;
+
+ GST_DEBUG_OBJECT (demux, "TrackDefault: %d",
+ (context->flags & GST_MATROSKA_TRACK_DEFAULT) ? 1 : 0);
+ break;
+ }
+
+ /* whether the track must be used during playback */
+ case GST_MATROSKA_ID_TRACKFLAGFORCED:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num)
+ context->flags |= GST_MATROSKA_TRACK_FORCED;
+ else
+ context->flags &= ~GST_MATROSKA_TRACK_FORCED;
+
+ GST_DEBUG_OBJECT (demux, "TrackForced: %d",
+ (context->flags & GST_MATROSKA_TRACK_FORCED) ? 1 : 0);
+ break;
+ }
+
+ /* lacing (like MPEG, where blocks don't end/start on frame
+ * boundaries) */
+ case GST_MATROSKA_ID_TRACKFLAGLACING:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num)
+ context->flags |= GST_MATROSKA_TRACK_LACING;
+ else
+ context->flags &= ~GST_MATROSKA_TRACK_LACING;
+
+ GST_DEBUG_OBJECT (demux, "TrackLacing: %d",
+ (context->flags & GST_MATROSKA_TRACK_LACING) ? 1 : 0);
+ break;
+ }
+
+ /* default length (in time) of one data block in this track */
+ case GST_MATROSKA_ID_TRACKDEFAULTDURATION:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackDefaultDuration 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "TrackDefaultDuration: %" G_GUINT64_FORMAT,
+ num);
+ context->default_duration = num;
+ break;
+ }
+
+ case GST_MATROSKA_ID_CONTENTENCODINGS:{
+ ret = gst_matroska_read_common_read_track_encodings (&demux->common,
+ ebml, context);
+ break;
+ }
+
+ case GST_MATROSKA_ID_TRACKTIMECODESCALE:{
+ gdouble num;
+
+ if ((ret = gst_ebml_read_float (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num <= 0.0) {
+ GST_WARNING_OBJECT (demux, "Invalid TrackTimeCodeScale %lf", num);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "TrackTimeCodeScale: %lf", num);
+ context->timecodescale = num;
+ break;
+ }
+
+ default:
+ GST_WARNING ("Unknown TrackEntry subelement 0x%x - ignoring", id);
+ /* pass-through */
+
+ /* we ignore these because they're nothing useful (i.e. crap)
+ * or simply not implemented yet. */
+ case GST_MATROSKA_ID_TRACKMINCACHE:
+ case GST_MATROSKA_ID_TRACKMAXCACHE:
+ case GST_MATROSKA_ID_MAXBLOCKADDITIONID:
+ case GST_MATROSKA_ID_TRACKATTACHMENTLINK:
+ case GST_MATROSKA_ID_TRACKOVERLAY:
+ case GST_MATROSKA_ID_TRACKTRANSLATE:
+ case GST_MATROSKA_ID_TRACKOFFSET:
+ case GST_MATROSKA_ID_CODECSETTINGS:
+ case GST_MATROSKA_ID_CODECINFOURL:
+ case GST_MATROSKA_ID_CODECDOWNLOADURL:
+ case GST_MATROSKA_ID_CODECDECODEALL:
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (demux, ebml, "TrackEntry", ret);
+
+ /* Decode codec private data if necessary */
+ if (context->encodings && context->encodings->len > 0 && context->codec_priv
+ && context->codec_priv_size > 0) {
+ if (!gst_matroska_decode_data (context->encodings,
+ &context->codec_priv, &context->codec_priv_size,
+ GST_MATROSKA_TRACK_ENCODING_SCOPE_CODEC_DATA, TRUE)) {
+ GST_WARNING_OBJECT (demux, "Decoding codec private data failed");
+ ret = GST_FLOW_ERROR;
+ }
+ }
+
+ if (context->type == 0 || context->codec_id == NULL || (ret != GST_FLOW_OK
+ && ret != GST_FLOW_EOS)) {
+ if (ret == GST_FLOW_OK || ret == GST_FLOW_EOS)
+ GST_WARNING_OBJECT (ebml, "Unknown stream/codec in track entry header");
+
+ gst_matroska_track_free (context);
+ context = NULL;
+ *dest_context = NULL;
+ return ret;
+ }
+
+ /* check for a cached track taglist */
+ cached_taglist =
+ (GstTagList *) g_hash_table_lookup (demux->common.cached_track_taglists,
+ GUINT_TO_POINTER (context->uid));
+ if (cached_taglist)
+ gst_tag_list_insert (context->tags, cached_taglist, GST_TAG_MERGE_APPEND);
+
+ /* compute caps */
+ switch (context->type) {
+ case GST_MATROSKA_TRACK_TYPE_VIDEO:{
+ GstMatroskaTrackVideoContext *videocontext =
+ (GstMatroskaTrackVideoContext *) context;
+
+ caps = gst_matroska_demux_video_caps (videocontext,
+ context->codec_id, context->codec_priv,
+ context->codec_priv_size, &codec, &riff_fourcc);
+
+ if (codec) {
+ gst_tag_list_add (context->tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_VIDEO_CODEC, codec, NULL);
+ context->tags_changed = TRUE;
+ g_free (codec);
+ }
+ break;
+ }
+
+ case GST_MATROSKA_TRACK_TYPE_AUDIO:{
+ GstClockTime lead_in_ts = 0;
+ GstMatroskaTrackAudioContext *audiocontext =
+ (GstMatroskaTrackAudioContext *) context;
+
+ caps = gst_matroska_demux_audio_caps (audiocontext,
+ context->codec_id, context->codec_priv, context->codec_priv_size,
+ &codec, &riff_audio_fmt, &lead_in_ts);
+ if (lead_in_ts > demux->audio_lead_in_ts) {
+ demux->audio_lead_in_ts = lead_in_ts;
+ GST_DEBUG_OBJECT (demux, "Increased audio lead-in to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (lead_in_ts));
+ }
+
+ if (codec) {
+ gst_tag_list_add (context->tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_AUDIO_CODEC, codec, NULL);
+ context->tags_changed = TRUE;
+ g_free (codec);
+ }
+ break;
+ }
+
+ case GST_MATROSKA_TRACK_TYPE_SUBTITLE:{
+ GstMatroskaTrackSubtitleContext *subtitlecontext =
+ (GstMatroskaTrackSubtitleContext *) context;
+
+ caps = gst_matroska_demux_subtitle_caps (subtitlecontext,
+ context->codec_id, context->codec_priv, context->codec_priv_size);
+ break;
+ }
+
+ case GST_MATROSKA_TRACK_TYPE_COMPLEX:
+ case GST_MATROSKA_TRACK_TYPE_LOGO:
+ case GST_MATROSKA_TRACK_TYPE_BUTTONS:
+ case GST_MATROSKA_TRACK_TYPE_CONTROL:
+ default:
+ /* we should already have quit by now */
+ g_assert_not_reached ();
+ }
+
+ if ((context->language == NULL || *context->language == '\0') &&
+ (context->type == GST_MATROSKA_TRACK_TYPE_AUDIO ||
+ context->type == GST_MATROSKA_TRACK_TYPE_SUBTITLE)) {
+ GST_LOG ("stream %d: language=eng (assuming default)", context->index);
+ context->language = g_strdup ("eng");
+ }
+
+ if (context->language) {
+ const gchar *lang;
+
+ /* Matroska contains ISO 639-2B codes, we want ISO 639-1 */
+ lang = gst_tag_get_language_code (context->language);
+ gst_tag_list_add (context->tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_LANGUAGE_CODE, (lang) ? lang : context->language, NULL);
+
+ if (context->name) {
+ gst_tag_list_add (context->tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_TITLE, context->name, NULL);
+ }
+ context->tags_changed = TRUE;
+ }
+
+ if (caps == NULL) {
+ GST_WARNING_OBJECT (demux, "could not determine caps for stream with "
+ "codec_id='%s'", context->codec_id);
+ switch (context->type) {
+ case GST_MATROSKA_TRACK_TYPE_VIDEO:
+ caps = gst_caps_new_empty_simple ("video/x-unknown");
+ break;
+ case GST_MATROSKA_TRACK_TYPE_AUDIO:
+ caps = gst_caps_new_empty_simple ("audio/x-unknown");
+ break;
+ case GST_MATROSKA_TRACK_TYPE_SUBTITLE:
+ caps = gst_caps_new_empty_simple ("application/x-subtitle-unknown");
+ break;
+ case GST_MATROSKA_TRACK_TYPE_COMPLEX:
+ default:
+ caps = gst_caps_new_empty_simple ("application/x-matroska-unknown");
+ break;
+ }
+ gst_caps_set_simple (caps, "codec-id", G_TYPE_STRING, context->codec_id,
+ NULL);
+
+ /* add any unrecognised riff fourcc / audio format, but after codec-id */
+ if (context->type == GST_MATROSKA_TRACK_TYPE_AUDIO && riff_audio_fmt != 0)
+ gst_caps_set_simple (caps, "format", G_TYPE_INT, riff_audio_fmt, NULL);
+ else if (context->type == GST_MATROSKA_TRACK_TYPE_VIDEO && riff_fourcc != 0) {
+ gchar *fstr = g_strdup_printf ("%" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (riff_fourcc));
+ gst_caps_set_simple (caps, "fourcc", G_TYPE_STRING, fstr, NULL);
+ g_free (fstr);
+ }
+ } else if (context->stream_headers != NULL) {
+ gst_matroska_demux_add_stream_headers_to_caps (demux,
+ context->stream_headers, caps);
+ }
+
+ if (context->encodings) {
+ GstMatroskaTrackEncoding *enc;
+ guint i;
+
+ for (i = 0; i < context->encodings->len; i++) {
+ enc = &g_array_index (context->encodings, GstMatroskaTrackEncoding, i);
+ if (enc->type == GST_MATROSKA_ENCODING_ENCRYPTION /* encryption */ ) {
+ GstStructure *s = gst_caps_get_structure (caps, 0);
+ if (!gst_structure_has_name (s, "application/x-webm-enc")) {
+ gst_structure_set (s, "original-media-type", G_TYPE_STRING,
+ gst_structure_get_name (s), NULL);
+ gst_structure_set (s, "encryption-algorithm", G_TYPE_STRING,
+ gst_matroska_track_encryption_algorithm_name (enc->enc_algo),
+ NULL);
+ gst_structure_set (s, "encoding-scope", G_TYPE_STRING,
+ gst_matroska_track_encoding_scope_name (enc->scope), NULL);
+ gst_structure_set (s, "cipher-mode", G_TYPE_STRING,
+ gst_matroska_track_encryption_cipher_mode_name
+ (enc->enc_cipher_mode), NULL);
+ gst_structure_set_name (s, "application/x-webm-enc");
+ }
+ }
+ }
+ }
+
+ context->caps = caps;
+
+ /* tadaah! */
+ *dest_context = context;
+ return ret;
+}
+
+static void
+gst_matroska_demux_add_stream (GstMatroskaDemux * demux,
+ GstMatroskaTrackContext * context)
+{
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (demux);
+ gchar *padname = NULL;
+ GstPadTemplate *templ = NULL;
+ GstStreamFlags stream_flags;
+
+ GstEvent *stream_start;
+
+ gchar *stream_id;
+
+ g_ptr_array_add (demux->common.src, context);
+ context->index = demux->common.num_streams++;
+ g_assert (demux->common.src->len == demux->common.num_streams);
+ g_ptr_array_index (demux->common.src, demux->common.num_streams - 1) =
+ context;
+
+ /* now create the GStreamer connectivity */
+ switch (context->type) {
+ case GST_MATROSKA_TRACK_TYPE_VIDEO:
+ padname = g_strdup_printf ("video_%u", demux->num_v_streams++);
+ templ = gst_element_class_get_pad_template (klass, "video_%u");
+
+ if (!context->intra_only)
+ demux->have_nonintraonly_v_streams = TRUE;
+ break;
+
+ case GST_MATROSKA_TRACK_TYPE_AUDIO:
+ padname = g_strdup_printf ("audio_%u", demux->num_a_streams++);
+ templ = gst_element_class_get_pad_template (klass, "audio_%u");
+ break;
+
+ case GST_MATROSKA_TRACK_TYPE_SUBTITLE:
+ padname = g_strdup_printf ("subtitle_%u", demux->num_t_streams++);
+ templ = gst_element_class_get_pad_template (klass, "subtitle_%u");
+ break;
+
+ default:
+ /* we should already have quit by now */
+ g_assert_not_reached ();
+ }
+
+ /* the pad in here */
+ context->pad = gst_pad_new_from_template (templ, padname);
+
+ gst_pad_set_event_function (context->pad,
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_handle_src_event));
+ gst_pad_set_query_function (context->pad,
+ GST_DEBUG_FUNCPTR (gst_matroska_demux_handle_src_query));
+
+ GST_INFO_OBJECT (demux, "Adding pad '%s' with caps %" GST_PTR_FORMAT,
+ padname, context->caps);
+
+ gst_pad_set_element_private (context->pad, context);
+
+ gst_pad_use_fixed_caps (context->pad);
+ gst_pad_set_active (context->pad, TRUE);
+
+ stream_id =
+ gst_pad_create_stream_id_printf (context->pad, GST_ELEMENT_CAST (demux),
+ "%03" G_GUINT64_FORMAT ":%03" G_GUINT64_FORMAT,
+ context->num, context->uid);
+ stream_start =
+ gst_pad_get_sticky_event (demux->common.sinkpad, GST_EVENT_STREAM_START,
+ 0);
+ if (stream_start) {
+ if (gst_event_parse_group_id (stream_start, &demux->group_id))
+ demux->have_group_id = TRUE;
+ else
+ demux->have_group_id = FALSE;
+ gst_event_unref (stream_start);
+ } else if (!demux->have_group_id) {
+ demux->have_group_id = TRUE;
+ demux->group_id = gst_util_group_id_next ();
+ }
+
+ stream_start = gst_event_new_stream_start (stream_id);
+ g_free (stream_id);
+ if (demux->have_group_id)
+ gst_event_set_group_id (stream_start, demux->group_id);
+ stream_flags = GST_STREAM_FLAG_NONE;
+ if (context->type == GST_MATROSKA_TRACK_TYPE_SUBTITLE)
+ stream_flags |= GST_STREAM_FLAG_SPARSE;
+ if (context->flags & GST_MATROSKA_TRACK_DEFAULT)
+ stream_flags |= GST_STREAM_FLAG_SELECT;
+ else if (!(context->flags & GST_MATROSKA_TRACK_ENABLED))
+ stream_flags |= GST_STREAM_FLAG_UNSELECT;
+
+ gst_event_set_stream_flags (stream_start, stream_flags);
+ gst_pad_push_event (context->pad, stream_start);
+ gst_pad_set_caps (context->pad, context->caps);
+
+
+ if (demux->common.global_tags) {
+ GstEvent *tag_event;
+
+ gst_tag_list_add (demux->common.global_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_CONTAINER_FORMAT, "Matroska", NULL);
+ GST_DEBUG_OBJECT (context->pad, "Sending global_tags %p: %" GST_PTR_FORMAT,
+ demux->common.global_tags, demux->common.global_tags);
+
+ tag_event =
+ gst_event_new_tag (gst_tag_list_copy (demux->common.global_tags));
+
+ gst_pad_push_event (context->pad, tag_event);
+ }
+
+ if (G_UNLIKELY (context->tags_changed)) {
+ GST_DEBUG_OBJECT (context->pad, "Sending tags %p: %"
+ GST_PTR_FORMAT, context->tags, context->tags);
+ gst_pad_push_event (context->pad,
+ gst_event_new_tag (gst_tag_list_copy (context->tags)));
+ context->tags_changed = FALSE;
+ }
+
+ gst_element_add_pad (GST_ELEMENT (demux), context->pad);
+ gst_flow_combiner_add_pad (demux->flowcombiner, context->pad);
+
+ g_free (padname);
+}
+
+static gboolean
+gst_matroska_demux_query (GstMatroskaDemux * demux, GstPad * pad,
+ GstQuery * query)
+{
+ gboolean res = FALSE;
+ GstMatroskaTrackContext *context = NULL;
+
+ if (pad) {
+ context = gst_pad_get_element_private (pad);
+ }
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:
+ {
+ GstFormat format;
+
+ gst_query_parse_position (query, &format, NULL);
+
+ res = TRUE;
+ if (format == GST_FORMAT_TIME) {
+ GST_OBJECT_LOCK (demux);
+ if (context)
+ gst_query_set_position (query, GST_FORMAT_TIME,
+ MAX (context->pos, demux->stream_start_time) -
+ demux->stream_start_time);
+ else
+ gst_query_set_position (query, GST_FORMAT_TIME,
+ MAX (demux->common.segment.position, demux->stream_start_time) -
+ demux->stream_start_time);
+ GST_OBJECT_UNLOCK (demux);
+ } else if (format == GST_FORMAT_DEFAULT && context
+ && context->default_duration) {
+ GST_OBJECT_LOCK (demux);
+ gst_query_set_position (query, GST_FORMAT_DEFAULT,
+ context->pos / context->default_duration);
+ GST_OBJECT_UNLOCK (demux);
+ } else {
+ GST_DEBUG_OBJECT (demux,
+ "only position query in TIME and DEFAULT format is supported");
+ res = FALSE;
+ }
+
+ break;
+ }
+ case GST_QUERY_DURATION:
+ {
+ GstFormat format;
+
+ gst_query_parse_duration (query, &format, NULL);
+
+ res = TRUE;
+ if (format == GST_FORMAT_TIME) {
+ GST_OBJECT_LOCK (demux);
+ gst_query_set_duration (query, GST_FORMAT_TIME,
+ demux->common.segment.duration);
+ GST_OBJECT_UNLOCK (demux);
+ } else if (format == GST_FORMAT_DEFAULT && context
+ && context->default_duration) {
+ GST_OBJECT_LOCK (demux);
+ gst_query_set_duration (query, GST_FORMAT_DEFAULT,
+ demux->common.segment.duration / context->default_duration);
+ GST_OBJECT_UNLOCK (demux);
+ } else {
+ GST_DEBUG_OBJECT (demux,
+ "only duration query in TIME and DEFAULT format is supported");
+ res = FALSE;
+ }
+ break;
+ }
+
+ case GST_QUERY_SEEKING:
+ {
+ GstFormat fmt;
+
+ gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+ GST_OBJECT_LOCK (demux);
+ if (fmt == GST_FORMAT_TIME) {
+ gboolean seekable;
+
+ if (demux->streaming) {
+ /* assuming we'll be able to get an index ... */
+ seekable = demux->seekable;
+ } else {
+ seekable = TRUE;
+ }
+
+ gst_query_set_seeking (query, GST_FORMAT_TIME, seekable,
+ 0, demux->common.segment.duration);
+ res = TRUE;
+ }
+ GST_OBJECT_UNLOCK (demux);
+ break;
+ }
+ case GST_QUERY_SEGMENT:
+ {
+ GstFormat format;
+ gint64 start, stop;
+
+ format = demux->common.segment.format;
+
+ start =
+ gst_segment_to_stream_time (&demux->common.segment, format,
+ demux->common.segment.start);
+ if ((stop = demux->common.segment.stop) == -1)
+ stop = demux->common.segment.duration;
+ else
+ stop =
+ gst_segment_to_stream_time (&demux->common.segment, format, stop);
+
+ gst_query_set_segment (query, demux->common.segment.rate, format, start,
+ stop);
+ res = TRUE;
+ break;
+ }
+ default:
+ if (pad)
+ res = gst_pad_query_default (pad, (GstObject *) demux, query);
+ else
+ res =
+ GST_ELEMENT_CLASS (parent_class)->query (GST_ELEMENT_CAST (demux),
+ query);
+ break;
+ }
+
+ return res;
+}
+
+static gboolean
+gst_matroska_demux_element_query (GstElement * element, GstQuery * query)
+{
+ return gst_matroska_demux_query (GST_MATROSKA_DEMUX (element), NULL, query);
+}
+
+static gboolean
+gst_matroska_demux_handle_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (parent);
+
+ return gst_matroska_demux_query (demux, pad, query);
+}
+
+/* returns FALSE if there are no pads to deliver event to,
+ * otherwise TRUE (whatever the outcome of event sending),
+ * takes ownership of the passed event! */
+static gboolean
+gst_matroska_demux_send_event (GstMatroskaDemux * demux, GstEvent * event)
+{
+ gboolean ret = FALSE;
+ gint i;
+
+ g_return_val_if_fail (event != NULL, FALSE);
+
+ GST_DEBUG_OBJECT (demux, "Sending event of type %s to all source pads",
+ GST_EVENT_TYPE_NAME (event));
+
+ g_assert (demux->common.src->len == demux->common.num_streams);
+ for (i = 0; i < demux->common.src->len; i++) {
+ GstMatroskaTrackContext *stream;
+
+ stream = g_ptr_array_index (demux->common.src, i);
+ gst_event_ref (event);
+ gst_pad_push_event (stream->pad, event);
+ ret = TRUE;
+ }
+
+ gst_event_unref (event);
+ return ret;
+}
+
+static void
+gst_matroska_demux_send_tags (GstMatroskaDemux * demux)
+{
+ gint i;
+
+ if (G_UNLIKELY (demux->common.global_tags_changed)) {
+ GstEvent *tag_event;
+ gst_tag_list_add (demux->common.global_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_CONTAINER_FORMAT, "Matroska", NULL);
+ GST_DEBUG_OBJECT (demux, "Sending global_tags %p : %" GST_PTR_FORMAT,
+ demux->common.global_tags, demux->common.global_tags);
+
+ tag_event =
+ gst_event_new_tag (gst_tag_list_copy (demux->common.global_tags));
+
+ for (i = 0; i < demux->common.src->len; i++) {
+ GstMatroskaTrackContext *stream;
+
+ stream = g_ptr_array_index (demux->common.src, i);
+ gst_pad_push_event (stream->pad, gst_event_ref (tag_event));
+ }
+
+ gst_event_unref (tag_event);
+ demux->common.global_tags_changed = FALSE;
+ }
+
+ g_assert (demux->common.src->len == demux->common.num_streams);
+ for (i = 0; i < demux->common.src->len; i++) {
+ GstMatroskaTrackContext *stream;
+
+ stream = g_ptr_array_index (demux->common.src, i);
+
+ if (G_UNLIKELY (stream->tags_changed)) {
+ GST_DEBUG_OBJECT (demux, "Sending tags %p for pad %s:%s : %"
+ GST_PTR_FORMAT, stream->tags,
+ GST_DEBUG_PAD_NAME (stream->pad), stream->tags);
+ gst_pad_push_event (stream->pad,
+ gst_event_new_tag (gst_tag_list_copy (stream->tags)));
+ stream->tags_changed = FALSE;
+ }
+ }
+}
+
+static gboolean
+gst_matroska_demux_element_send_event (GstElement * element, GstEvent * event)
+{
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (element);
+ gboolean res;
+
+ g_return_val_if_fail (event != NULL, FALSE);
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_SEEK) {
+ /* no seeking until we are (safely) ready */
+ if (demux->common.state != GST_MATROSKA_READ_STATE_DATA) {
+ GST_DEBUG_OBJECT (demux,
+ "not ready for seeking yet, deferring seek: %" GST_PTR_FORMAT, event);
+ if (demux->deferred_seek_event)
+ gst_event_unref (demux->deferred_seek_event);
+ demux->deferred_seek_event = event;
+ demux->deferred_seek_pad = NULL;
+ return TRUE;
+ }
+ if (!demux->streaming)
+ res = gst_matroska_demux_handle_seek_event (demux, NULL, event);
+ else
+ res = gst_matroska_demux_handle_seek_push (demux, NULL, event);
+ } else {
+ GST_WARNING_OBJECT (demux, "Unhandled event of type %s",
+ GST_EVENT_TYPE_NAME (event));
+ res = FALSE;
+ }
+ gst_event_unref (event);
+ return res;
+}
+
+static gboolean
+gst_matroska_demux_move_to_entry (GstMatroskaDemux * demux,
+ GstMatroskaIndex * entry, gboolean reset, gboolean update)
+{
+ gint i;
+
+ GST_OBJECT_LOCK (demux);
+
+ if (update) {
+ /* seek (relative to matroska segment) */
+ /* position might be invalid; will error when streaming resumes ... */
+ demux->common.offset = entry->pos + demux->common.ebml_segment_start;
+ demux->next_cluster_offset = 0;
+
+ GST_DEBUG_OBJECT (demux,
+ "Seeked to offset %" G_GUINT64_FORMAT ", block %d, " "time %"
+ GST_TIME_FORMAT, entry->pos + demux->common.ebml_segment_start,
+ entry->block, GST_TIME_ARGS (entry->time));
+
+ /* update the time */
+ gst_matroska_read_common_reset_streams (&demux->common, entry->time, TRUE);
+ gst_flow_combiner_reset (demux->flowcombiner);
+ demux->common.segment.position = entry->time;
+ demux->seek_block = entry->block;
+ demux->seek_first = TRUE;
+ demux->last_stop_end = GST_CLOCK_TIME_NONE;
+ }
+
+ for (i = 0; i < demux->common.src->len; i++) {
+ GstMatroskaTrackContext *stream = g_ptr_array_index (demux->common.src, i);
+
+ if (reset) {
+ stream->to_offset = G_MAXINT64;
+ } else {
+ if (stream->from_offset != -1)
+ stream->to_offset = stream->from_offset;
+ }
+ stream->from_offset = -1;
+ stream->from_time = GST_CLOCK_TIME_NONE;
+ }
+
+ GST_OBJECT_UNLOCK (demux);
+
+ return TRUE;
+}
+
+static gint
+gst_matroska_cluster_compare (gint64 * i1, gint64 * i2)
+{
+ if (*i1 < *i2)
+ return -1;
+ else if (*i1 > *i2)
+ return 1;
+ else
+ return 0;
+}
+
+/* searches for a cluster start from @pos,
+ * return GST_FLOW_OK and cluster position in @pos if found */
+static GstFlowReturn
+gst_matroska_demux_search_cluster (GstMatroskaDemux * demux, gint64 * pos,
+ gboolean forward)
+{
+ gint64 newpos = *pos;
+ gint64 orig_offset;
+ GstFlowReturn ret = GST_FLOW_OK;
+ const guint chunk = 128 * 1024;
+ GstBuffer *buf = NULL;
+ GstMapInfo map;
+ gpointer data = NULL;
+ gsize size;
+ guint64 length;
+ guint32 id;
+ guint needed;
+ gint64 oldpos, oldlength;
+
+ orig_offset = demux->common.offset;
+
+ GST_LOG_OBJECT (demux, "searching cluster %s offset %" G_GINT64_FORMAT,
+ forward ? "following" : "preceding", *pos);
+
+ if (demux->clusters) {
+ gint64 *cpos;
+
+ cpos = gst_util_array_binary_search (demux->clusters->data,
+ demux->clusters->len, sizeof (gint64),
+ (GCompareDataFunc) gst_matroska_cluster_compare,
+ forward ? GST_SEARCH_MODE_AFTER : GST_SEARCH_MODE_BEFORE, pos, NULL);
+ /* sanity check */
+ if (cpos) {
+ GST_DEBUG_OBJECT (demux,
+ "cluster reported at offset %" G_GINT64_FORMAT, *cpos);
+ demux->common.offset = *cpos;
+ ret = gst_matroska_read_common_peek_id_length_pull (&demux->common,
+ GST_ELEMENT_CAST (demux), &id, &length, &needed);
+ if (ret == GST_FLOW_OK && id == GST_MATROSKA_ID_CLUSTER) {
+ newpos = *cpos;
+ goto exit;
+ }
+ }
+ }
+
+ /* read in at newpos and scan for ebml cluster id */
+ oldpos = oldlength = -1;
+ while (1) {
+ GstByteReader reader;
+ gint cluster_pos;
+ guint toread = chunk;
+
+ if (!forward) {
+ /* never read beyond the requested target */
+ if (G_UNLIKELY (newpos < chunk)) {
+ toread = newpos;
+ newpos = 0;
+ } else {
+ newpos -= chunk;
+ }
+ }
+ if (buf != NULL) {
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ buf = NULL;
+ }
+ ret = gst_pad_pull_range (demux->common.sinkpad, newpos, toread, &buf);
+ if (ret != GST_FLOW_OK)
+ break;
+ GST_DEBUG_OBJECT (demux,
+ "read buffer size %" G_GSIZE_FORMAT " at offset %" G_GINT64_FORMAT,
+ gst_buffer_get_size (buf), newpos);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+ if (oldpos == newpos && oldlength == map.size) {
+ GST_ERROR_OBJECT (demux, "Stuck at same position");
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ } else {
+ oldpos = newpos;
+ oldlength = map.size;
+ }
+
+ gst_byte_reader_init (&reader, data, size);
+ cluster_pos = -1;
+ while (1) {
+ gint found = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff,
+ GST_MATROSKA_ID_CLUSTER, 0, gst_byte_reader_get_remaining (&reader));
+ if (forward) {
+ cluster_pos = found;
+ break;
+ }
+ /* need last occurrence when searching backwards */
+ if (found >= 0) {
+ cluster_pos = gst_byte_reader_get_pos (&reader) + found;
+ gst_byte_reader_skip (&reader, found + 4);
+ } else {
+ break;
+ }
+ }
+
+ if (cluster_pos >= 0) {
+ newpos += cluster_pos;
+ GST_DEBUG_OBJECT (demux,
+ "found cluster ebml id at offset %" G_GINT64_FORMAT, newpos);
+ /* extra checks whether we really sync'ed to a cluster:
+ * - either it is the first and only cluster
+ * - either there is a cluster after this one
+ * - either cluster length is undefined
+ */
+ /* ok if first cluster (there may not a subsequent one) */
+ if (newpos == demux->first_cluster_offset) {
+ GST_DEBUG_OBJECT (demux, "cluster is first cluster -> OK");
+ break;
+ }
+ demux->common.offset = newpos;
+ ret = gst_matroska_read_common_peek_id_length_pull (&demux->common,
+ GST_ELEMENT_CAST (demux), &id, &length, &needed);
+ if (ret != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (demux, "need more data -> continue");
+ goto next;
+ }
+ g_assert (id == GST_MATROSKA_ID_CLUSTER);
+ GST_DEBUG_OBJECT (demux, "cluster size %" G_GUINT64_FORMAT ", prefix %d",
+ length, needed);
+ /* ok if undefined length or first cluster */
+ if (length == GST_EBML_SIZE_UNKNOWN || length == G_MAXUINT64) {
+ GST_DEBUG_OBJECT (demux, "cluster has undefined length -> OK");
+ break;
+ }
+ /* skip cluster */
+ demux->common.offset += length + needed;
+ ret = gst_matroska_read_common_peek_id_length_pull (&demux->common,
+ GST_ELEMENT_CAST (demux), &id, &length, &needed);
+ if (ret != GST_FLOW_OK)
+ goto next;
+ GST_DEBUG_OBJECT (demux, "next element is %scluster",
+ id == GST_MATROSKA_ID_CLUSTER ? "" : "not ");
+ if (id == GST_MATROSKA_ID_CLUSTER)
+ break;
+ next:
+ if (forward)
+ newpos += 1;
+ } else {
+ /* partial cluster id may have been in tail of buffer */
+ newpos +=
+ forward ? MAX (gst_byte_reader_get_remaining (&reader), 4) - 3 : 3;
+ }
+ }
+
+ if (buf) {
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ buf = NULL;
+ }
+
+exit:
+ demux->common.offset = orig_offset;
+ *pos = newpos;
+ return ret;
+}
+
+/* Three states to express: starts with I-frame, starts with delta, don't know */
+typedef enum
+{
+ CLUSTER_STATUS_NONE = 0,
+ CLUSTER_STATUS_STARTS_WITH_KEYFRAME,
+ CLUSTER_STATUS_STARTS_WITH_DELTAUNIT,
+} ClusterStatus;
+
+typedef struct
+{
+ guint64 offset;
+ guint64 size;
+ guint64 prev_size;
+ GstClockTime time;
+ ClusterStatus status;
+} ClusterInfo;
+
+static const gchar *
+cluster_status_get_nick (ClusterStatus status)
+{
+ switch (status) {
+ case CLUSTER_STATUS_NONE:
+ return "none";
+ case CLUSTER_STATUS_STARTS_WITH_KEYFRAME:
+ return "key";
+ case CLUSTER_STATUS_STARTS_WITH_DELTAUNIT:
+ return "delta";
+ }
+ return "???";
+}
+
+/* Skip ebml-coded number:
+ * 1xxx.. = 1 byte
+ * 01xx.. = 2 bytes
+ * 001x.. = 3 bytes, etc.
+ */
+static gboolean
+bit_reader_skip_ebml_num (GstBitReader * br)
+{
+ guint8 i, v = 0;
+
+ if (!gst_bit_reader_peek_bits_uint8 (br, &v, 8))
+ return FALSE;
+
+ for (i = 0; i < 8; i++) {
+ if ((v & (0x80 >> i)) != 0)
+ break;
+ }
+ return gst_bit_reader_skip (br, (i + 1) * 8);
+}
+
+/* Don't probe more than that many bytes into the cluster for keyframe info
+ * (random value, mostly for sanity checking) */
+#define MAX_CLUSTER_INFO_PROBE_LENGTH 256
+
+static gboolean
+gst_matroska_demux_peek_cluster_info (GstMatroskaDemux * demux,
+ ClusterInfo * cluster, guint64 offset)
+{
+ demux->common.offset = offset;
+ demux->cluster_time = GST_CLOCK_TIME_NONE;
+
+ cluster->offset = offset;
+ cluster->size = 0;
+ cluster->prev_size = 0;
+ cluster->time = GST_CLOCK_TIME_NONE;
+ cluster->status = CLUSTER_STATUS_NONE;
+
+ /* parse first few elements in cluster */
+ do {
+ GstFlowReturn flow;
+ guint64 length;
+ guint32 id;
+ guint needed;
+
+ flow = gst_matroska_read_common_peek_id_length_pull (&demux->common,
+ GST_ELEMENT_CAST (demux), &id, &length, &needed);
+
+ if (flow != GST_FLOW_OK)
+ break;
+
+ GST_LOG_OBJECT (demux, "Offset %" G_GUINT64_FORMAT ", Element id 0x%x, "
+ "size %" G_GUINT64_FORMAT ", needed %d", demux->common.offset, id,
+ length, needed);
+
+ /* Reached start of next cluster without finding data, stop processing */
+ if (id == GST_MATROSKA_ID_CLUSTER && cluster->offset != offset)
+ break;
+
+ /* Not going to parse into these for now, stop processing */
+ if (id == GST_MATROSKA_ID_ENCRYPTEDBLOCK
+ || id == GST_MATROSKA_ID_BLOCKGROUP || id == GST_MATROSKA_ID_BLOCK)
+ break;
+
+ /* SimpleBlock: peek at headers to check if it's a keyframe */
+ if (id == GST_MATROSKA_ID_SIMPLEBLOCK) {
+ GstBitReader br;
+ guint8 *d, hdr_len, v = 0;
+
+ GST_DEBUG_OBJECT (demux, "SimpleBlock found");
+
+ /* SimpleBlock header is max. 21 bytes */
+ hdr_len = MIN (21, length);
+
+ flow = gst_matroska_read_common_peek_bytes (&demux->common,
+ demux->common.offset, hdr_len, NULL, &d);
+
+ if (flow != GST_FLOW_OK)
+ break;
+
+ gst_bit_reader_init (&br, d, hdr_len);
+
+ /* skip prefix: ebml id (SimpleBlock) + element length */
+ if (!gst_bit_reader_skip (&br, 8 * needed))
+ break;
+
+ /* skip track number (ebml coded) */
+ if (!bit_reader_skip_ebml_num (&br))
+ break;
+
+ /* skip Timecode */
+ if (!gst_bit_reader_skip (&br, 16))
+ break;
+
+ /* read flags */
+ if (!gst_bit_reader_get_bits_uint8 (&br, &v, 8))
+ break;
+
+ if ((v & 0x80) != 0)
+ cluster->status = CLUSTER_STATUS_STARTS_WITH_KEYFRAME;
+ else
+ cluster->status = CLUSTER_STATUS_STARTS_WITH_DELTAUNIT;
+
+ break;
+ }
+
+ flow = gst_matroska_demux_parse_id (demux, id, length, needed);
+
+ if (flow != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_CLUSTER:
+ if (length == G_MAXUINT64)
+ cluster->size = 0;
+ else
+ cluster->size = length + needed;
+ break;
+ case GST_MATROSKA_ID_PREVSIZE:
+ cluster->prev_size = demux->cluster_prevsize;
+ break;
+ case GST_MATROSKA_ID_CLUSTERTIMECODE:
+ cluster->time = demux->cluster_time * demux->common.time_scale;
+ break;
+ case GST_MATROSKA_ID_SILENTTRACKS:
+ case GST_EBML_ID_CRC32:
+ /* ignore and continue */
+ break;
+ default:
+ GST_WARNING_OBJECT (demux, "Unknown ebml id 0x%08x (possibly garbage), "
+ "bailing out", id);
+ goto out;
+ }
+ } while (demux->common.offset - offset < MAX_CLUSTER_INFO_PROBE_LENGTH);
+
+out:
+
+ GST_INFO_OBJECT (demux, "Cluster @ %" G_GUINT64_FORMAT ": "
+ "time %" GST_TIME_FORMAT ", size %" G_GUINT64_FORMAT ", "
+ "prev_size %" G_GUINT64_FORMAT ", %s", cluster->offset,
+ GST_TIME_ARGS (cluster->time), cluster->size, cluster->prev_size,
+ cluster_status_get_nick (cluster->status));
+
+ /* return success as long as we could extract the minimum useful information */
+ return cluster->time != GST_CLOCK_TIME_NONE;
+}
+
+/* returns TRUE if the cluster offset was updated */
+static gboolean
+gst_matroska_demux_scan_back_for_keyframe_cluster (GstMatroskaDemux * demux,
+ gint64 * cluster_offset, GstClockTime * cluster_time)
+{
+ GstClockTime stream_start_time = demux->stream_start_time;
+ guint64 first_cluster_offset = demux->first_cluster_offset;
+ gint64 off = *cluster_offset;
+ ClusterInfo cluster = { 0, };
+
+ GST_INFO_OBJECT (demux, "Checking if cluster starts with keyframe");
+ while (off > first_cluster_offset) {
+ if (!gst_matroska_demux_peek_cluster_info (demux, &cluster, off)) {
+ GST_LOG_OBJECT (demux,
+ "Couldn't get info on cluster @ %" G_GUINT64_FORMAT, off);
+ break;
+ }
+
+ /* Keyframe? Then we're done */
+ if (cluster.status == CLUSTER_STATUS_STARTS_WITH_KEYFRAME) {
+ GST_LOG_OBJECT (demux,
+ "Found keyframe at start of cluster @ %" G_GUINT64_FORMAT, off);
+ break;
+ }
+
+ /* We only scan back if we *know* we landed on a cluster that
+ * starts with a delta frame. */
+ if (cluster.status != CLUSTER_STATUS_STARTS_WITH_DELTAUNIT) {
+ GST_LOG_OBJECT (demux,
+ "No delta frame at start of cluster @ %" G_GUINT64_FORMAT, off);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "Cluster starts with delta frame, backtracking");
+
+ /* Don't scan back more than this much in time from the cluster we
+ * originally landed on. This is mostly a sanity check in case a file
+ * always has keyframes in the middle of clusters and never at the
+ * beginning. Without this we would always scan back to the beginning
+ * of the file in that case. */
+ if (cluster.time != GST_CLOCK_TIME_NONE) {
+ GstClockTimeDiff distance = GST_CLOCK_DIFF (cluster.time, *cluster_time);
+
+ if (distance < 0 || distance > demux->max_backtrack_distance * GST_SECOND) {
+ GST_DEBUG_OBJECT (demux, "Haven't found cluster with keyframe within "
+ "%u secs of original seek target cluster, stopping",
+ demux->max_backtrack_distance);
+ break;
+ }
+ }
+
+ /* If we have cluster prev_size we can skip back efficiently. If not,
+ * we'll just do a brute force search for a cluster identifier */
+ if (cluster.prev_size > 0 && off >= cluster.prev_size) {
+ off -= cluster.prev_size;
+ } else {
+ GstFlowReturn flow;
+
+ GST_LOG_OBJECT (demux, "Cluster has no or invalid prev size, searching "
+ "for previous cluster instead then");
+
+ flow = gst_matroska_demux_search_cluster (demux, &off, FALSE);
+ if (flow != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (demux, "cluster search yielded flow %s, stopping",
+ gst_flow_get_name (flow));
+ break;
+ }
+ }
+
+ if (off <= first_cluster_offset) {
+ GST_LOG_OBJECT (demux, "Reached first cluster, stopping");
+ *cluster_offset = first_cluster_offset;
+ *cluster_time = stream_start_time;
+ return TRUE;
+ }
+ GST_LOG_OBJECT (demux, "Trying prev cluster @ %" G_GUINT64_FORMAT, off);
+ }
+
+ /* If we found a cluster starting with a keyframe jump to that instead,
+ * otherwise leave everything as it was before */
+ if (cluster.time != GST_CLOCK_TIME_NONE
+ && (cluster.offset == first_cluster_offset
+ || cluster.status == CLUSTER_STATUS_STARTS_WITH_KEYFRAME)) {
+ *cluster_offset = cluster.offset;
+ *cluster_time = cluster.time;
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+/* bisect and scan through file for cluster starting before @time,
+ * returns fake index entry with corresponding info on cluster */
+static GstMatroskaIndex *
+gst_matroska_demux_search_pos (GstMatroskaDemux * demux, GstClockTime time)
+{
+ GstMatroskaIndex *entry = NULL;
+ GstMatroskaReadState current_state;
+ GstClockTime otime, prev_cluster_time, current_cluster_time, cluster_time;
+ GstClockTime atime;
+ gint64 opos, newpos, current_offset;
+ gint64 prev_cluster_offset = -1, current_cluster_offset, cluster_offset;
+ gint64 apos, maxpos;
+ guint64 cluster_size = 0;
+ GstFlowReturn ret;
+ guint64 length;
+ guint32 id;
+ guint needed;
+
+ /* estimate new position, resync using cluster ebml id,
+ * and bisect further or scan forward to appropriate cluster */
+
+ /* save some current global state which will be touched by our scanning */
+ current_state = demux->common.state;
+ g_return_val_if_fail (current_state == GST_MATROSKA_READ_STATE_DATA, NULL);
+
+ current_cluster_offset = demux->cluster_offset;
+ current_cluster_time = demux->cluster_time;
+ current_offset = demux->common.offset;
+
+ demux->common.state = GST_MATROSKA_READ_STATE_SCANNING;
+
+ /* estimate using start and last known cluster */
+ GST_OBJECT_LOCK (demux);
+ apos = demux->first_cluster_offset;
+ atime = demux->stream_start_time;
+ opos = demux->last_cluster_offset;
+ otime = demux->stream_last_time;
+ GST_OBJECT_UNLOCK (demux);
+
+ /* sanitize */
+ time = MAX (time, atime);
+ otime = MAX (otime, atime);
+ opos = MAX (opos, apos);
+
+ maxpos = gst_matroska_read_common_get_length (&demux->common);
+
+ /* invariants;
+ * apos <= opos
+ * atime <= otime
+ * apos always refer to a cluster before target time;
+ * opos may or may not be after target time, but if it is once so,
+ * then also in next iteration
+ * */
+
+retry:
+ GST_LOG_OBJECT (demux,
+ "apos: %" G_GUINT64_FORMAT ", atime: %" GST_TIME_FORMAT ", %"
+ GST_TIME_FORMAT " in stream time, "
+ "opos: %" G_GUINT64_FORMAT ", otime: %" GST_TIME_FORMAT ", %"
+ GST_TIME_FORMAT " in stream time (start %" GST_TIME_FORMAT "), time %"
+ GST_TIME_FORMAT, apos, GST_TIME_ARGS (atime),
+ GST_TIME_ARGS (atime - demux->stream_start_time), opos,
+ GST_TIME_ARGS (otime), GST_TIME_ARGS (otime - demux->stream_start_time),
+ GST_TIME_ARGS (demux->stream_start_time), GST_TIME_ARGS (time));
+
+ g_assert (atime <= otime);
+ g_assert (apos <= opos);
+ if (time == GST_CLOCK_TIME_NONE) {
+ GST_DEBUG_OBJECT (demux, "searching last cluster");
+ newpos = maxpos;
+ if (newpos == -1) {
+ GST_DEBUG_OBJECT (demux, "unknown file size; bailing out");
+ goto exit;
+ }
+ } else if (otime <= atime) {
+ newpos = apos;
+ } else {
+ newpos = apos +
+ gst_util_uint64_scale (opos - apos, time - atime, otime - atime);
+ if (maxpos != -1 && newpos > maxpos)
+ newpos = maxpos;
+ }
+
+ GST_DEBUG_OBJECT (demux,
+ "estimated offset for %" GST_TIME_FORMAT ": %" G_GINT64_FORMAT,
+ GST_TIME_ARGS (time), newpos);
+
+ /* search backwards */
+ if (newpos > apos) {
+ ret = gst_matroska_demux_search_cluster (demux, &newpos, FALSE);
+ if (ret != GST_FLOW_OK)
+ goto exit;
+ }
+
+ /* then start scanning and parsing for cluster time,
+ * re-estimate if possible, otherwise next cluster and so on */
+ /* note that each re-estimate is entered with a change in apos or opos,
+ * avoiding infinite loop */
+ demux->common.offset = newpos;
+ demux->cluster_time = cluster_time = GST_CLOCK_TIME_NONE;
+ cluster_size = 0;
+ prev_cluster_time = GST_CLOCK_TIME_NONE;
+ while (1) {
+ /* peek and parse some elements */
+ ret = gst_matroska_read_common_peek_id_length_pull (&demux->common,
+ GST_ELEMENT_CAST (demux), &id, &length, &needed);
+ if (ret != GST_FLOW_OK)
+ goto error;
+ GST_LOG_OBJECT (demux, "Offset %" G_GUINT64_FORMAT ", Element id 0x%x, "
+ "size %" G_GUINT64_FORMAT ", needed %d", demux->common.offset, id,
+ length, needed);
+ ret = gst_matroska_demux_parse_id (demux, id, length, needed);
+ if (ret != GST_FLOW_OK)
+ goto error;
+
+ if (id == GST_MATROSKA_ID_CLUSTER) {
+ cluster_time = GST_CLOCK_TIME_NONE;
+ if (length == G_MAXUINT64)
+ cluster_size = 0;
+ else
+ cluster_size = length + needed;
+ }
+ if (demux->cluster_time != GST_CLOCK_TIME_NONE &&
+ cluster_time == GST_CLOCK_TIME_NONE) {
+ cluster_time = demux->cluster_time * demux->common.time_scale;
+ cluster_offset = demux->cluster_offset;
+ GST_DEBUG_OBJECT (demux, "found cluster at offset %" G_GINT64_FORMAT
+ " with time %" GST_TIME_FORMAT, cluster_offset,
+ GST_TIME_ARGS (cluster_time));
+ if (time == GST_CLOCK_TIME_NONE) {
+ GST_DEBUG_OBJECT (demux, "found last cluster");
+ prev_cluster_time = cluster_time;
+ prev_cluster_offset = cluster_offset;
+ break;
+ }
+ if (cluster_time > time) {
+ GST_DEBUG_OBJECT (demux, "overshot target");
+ /* cluster overshoots */
+ if (cluster_offset == demux->first_cluster_offset) {
+ /* but no prev one */
+ GST_DEBUG_OBJECT (demux, "but using first cluster anyway");
+ prev_cluster_time = cluster_time;
+ prev_cluster_offset = cluster_offset;
+ break;
+ }
+ if (prev_cluster_time != GST_CLOCK_TIME_NONE) {
+ /* prev cluster did not overshoot, so prev cluster is target */
+ break;
+ } else {
+ /* re-estimate using this new position info */
+ opos = cluster_offset;
+ otime = cluster_time;
+ goto retry;
+ }
+ } else {
+ /* cluster undershoots */
+ GST_DEBUG_OBJECT (demux, "undershot target");
+ /* ok if close enough */
+ if (GST_CLOCK_DIFF (cluster_time, time) < 5 * GST_SECOND) {
+ GST_DEBUG_OBJECT (demux, "target close enough");
+ prev_cluster_time = cluster_time;
+ prev_cluster_offset = cluster_offset;
+ break;
+ }
+ if (otime > time) {
+ /* we are in between atime and otime => can bisect if worthwhile */
+ if (prev_cluster_time != GST_CLOCK_TIME_NONE &&
+ cluster_time > prev_cluster_time &&
+ (GST_CLOCK_DIFF (prev_cluster_time, cluster_time) * 10 <
+ GST_CLOCK_DIFF (cluster_time, time))) {
+ /* we moved at least one cluster forward,
+ * and it looks like target is still far away,
+ * let's estimate again */
+ GST_DEBUG_OBJECT (demux, "bisecting with new apos");
+ apos = cluster_offset;
+ atime = cluster_time;
+ goto retry;
+ }
+ }
+ /* cluster undershoots, goto next one */
+ prev_cluster_time = cluster_time;
+ prev_cluster_offset = cluster_offset;
+ /* skip cluster if length is defined,
+ * otherwise will be skippingly parsed into */
+ if (cluster_size) {
+ GST_DEBUG_OBJECT (demux, "skipping to next cluster");
+ demux->common.offset = cluster_offset + cluster_size;
+ demux->cluster_time = GST_CLOCK_TIME_NONE;
+ } else {
+ GST_DEBUG_OBJECT (demux, "parsing/skipping cluster elements");
+ }
+ }
+ }
+ continue;
+
+ error:
+ if (ret == GST_FLOW_EOS) {
+ if (prev_cluster_time != GST_CLOCK_TIME_NONE)
+ break;
+ }
+ goto exit;
+ }
+
+ /* In the bisect loop above we always undershoot and then jump forward
+ * cluster-by-cluster until we overshoot, so if we get here we've gone
+ * over and the previous cluster is where we need to go to. */
+ cluster_offset = prev_cluster_offset;
+ cluster_time = prev_cluster_time;
+
+ /* If we have video and can easily backtrack, check if we landed on a cluster
+ * that starts with a keyframe - and if not backtrack until we find one that
+ * does. */
+ if (demux->have_nonintraonly_v_streams && demux->max_backtrack_distance > 0) {
+ if (gst_matroska_demux_scan_back_for_keyframe_cluster (demux,
+ &cluster_offset, &cluster_time)) {
+ GST_INFO_OBJECT (demux, "Adjusted cluster to %" GST_TIME_FORMAT " @ "
+ "%" G_GUINT64_FORMAT, GST_TIME_ARGS (cluster_time), cluster_offset);
+ }
+ }
+
+ entry = g_new0 (GstMatroskaIndex, 1);
+ entry->time = cluster_time;
+ entry->pos = cluster_offset - demux->common.ebml_segment_start;
+ GST_DEBUG_OBJECT (demux, "simulated index entry; time %" GST_TIME_FORMAT
+ ", pos %" G_GUINT64_FORMAT, GST_TIME_ARGS (entry->time), entry->pos);
+
+exit:
+
+ /* restore some state */
+ demux->cluster_offset = current_cluster_offset;
+ demux->cluster_time = current_cluster_time;
+ demux->common.offset = current_offset;
+ demux->common.state = current_state;
+
+ return entry;
+}
+
+static gboolean
+gst_matroska_demux_handle_seek_event (GstMatroskaDemux * demux,
+ GstPad * pad, GstEvent * event)
+{
+ GstMatroskaIndex *entry = NULL;
+ GstMatroskaIndex scan_entry;
+ GstSeekFlags flags;
+ GstSeekType cur_type, stop_type;
+ GstFormat format;
+ gboolean flush, keyunit, instant_rate_change, before, after, accurate,
+ snap_next;
+ gdouble rate;
+ gint64 cur, stop;
+ GstMatroskaTrackContext *track = NULL;
+ GstSegment seeksegment = { 0, };
+ guint64 seekpos;
+ gboolean update = TRUE;
+ gboolean pad_locked = FALSE;
+ guint32 seqnum;
+ GstSearchMode snap_dir;
+
+ g_return_val_if_fail (event != NULL, FALSE);
+
+ if (pad)
+ track = gst_pad_get_element_private (pad);
+
+ GST_DEBUG_OBJECT (demux, "Have seek %" GST_PTR_FORMAT, event);
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, &cur,
+ &stop_type, &stop);
+ seqnum = gst_event_get_seqnum (event);
+
+ /* we can only seek on time */
+ if (format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (demux, "Can only seek on TIME");
+ return FALSE;
+ }
+
+ GST_DEBUG_OBJECT (demux, "configuring seek");
+
+ flush = ! !(flags & GST_SEEK_FLAG_FLUSH);
+ keyunit = ! !(flags & GST_SEEK_FLAG_KEY_UNIT);
+ after = ! !(flags & GST_SEEK_FLAG_SNAP_AFTER);
+ before = ! !(flags & GST_SEEK_FLAG_SNAP_BEFORE);
+ accurate = ! !(flags & GST_SEEK_FLAG_ACCURATE);
+ instant_rate_change = ! !(flags & GST_SEEK_FLAG_INSTANT_RATE_CHANGE);
+
+ /* Directly send the instant-rate-change event here before taking the
+ * stream-lock so that it can be applied as soon as possible */
+ if (instant_rate_change) {
+ GstEvent *ev;
+
+ /* instant rate change only supported if direction does not change. All
+ * other requirements are already checked before creating the seek event
+ * but let's double-check here to be sure */
+ if ((rate > 0 && demux->common.segment.rate < 0) ||
+ (rate < 0 && demux->common.segment.rate > 0) ||
+ cur_type != GST_SEEK_TYPE_NONE ||
+ stop_type != GST_SEEK_TYPE_NONE || flush) {
+ GST_ERROR_OBJECT (demux,
+ "Instant rate change seeks only supported in the "
+ "same direction, without flushing and position change");
+ return FALSE;
+ }
+
+ ev = gst_event_new_instant_rate_change (rate /
+ demux->common.segment.rate, (GstSegmentFlags) flags);
+ gst_event_set_seqnum (ev, seqnum);
+ gst_matroska_demux_send_event (demux, ev);
+ return TRUE;
+ }
+
+ /* copy segment, we need this because we still need the old
+ * segment when we close the current segment. */
+ memcpy (&seeksegment, &demux->common.segment, sizeof (GstSegment));
+
+ /* pull mode without index means that the actual duration is not known,
+ * we might be playing a file that's still being recorded
+ * so, invalidate our current duration, which is only a moving target,
+ * and should not be used to clamp anything */
+ if (!demux->streaming && !demux->common.index && demux->invalid_duration) {
+ seeksegment.duration = GST_CLOCK_TIME_NONE;
+ }
+
+ /* Subtract stream_start_time so we always seek on a segment
+ * in stream time */
+ if (GST_CLOCK_TIME_IS_VALID (demux->stream_start_time)) {
+ seeksegment.start -= demux->stream_start_time;
+ seeksegment.position -= demux->stream_start_time;
+ if (GST_CLOCK_TIME_IS_VALID (seeksegment.stop))
+ seeksegment.stop -= demux->stream_start_time;
+ else
+ seeksegment.stop = seeksegment.duration;
+ }
+
+ if (!gst_segment_do_seek (&seeksegment, rate, format, flags,
+ cur_type, cur, stop_type, stop, &update)) {
+ GST_WARNING_OBJECT (demux, "gst_segment_do_seek() failed.");
+ return FALSE;
+ }
+
+ /* Restore the clip timestamp offset */
+ if (GST_CLOCK_TIME_IS_VALID (demux->stream_start_time)) {
+ seeksegment.position += demux->stream_start_time;
+ seeksegment.start += demux->stream_start_time;
+ if (!GST_CLOCK_TIME_IS_VALID (seeksegment.stop))
+ seeksegment.stop = seeksegment.duration;
+ if (GST_CLOCK_TIME_IS_VALID (seeksegment.stop))
+ seeksegment.stop += demux->stream_start_time;
+ }
+
+ /* restore segment duration (if any effect),
+ * would be determined again when parsing, but anyway ... */
+ seeksegment.duration = demux->common.segment.duration;
+
+ /* always do full update if flushing,
+ * otherwise problems might arise downstream with missing keyframes etc */
+ update = update || flush;
+
+ GST_DEBUG_OBJECT (demux, "New segment %" GST_SEGMENT_FORMAT, &seeksegment);
+
+ /* check sanity before we start flushing and all that */
+ snap_next = after && !before;
+ if (seeksegment.rate < 0)
+ snap_dir = snap_next ? GST_SEARCH_MODE_BEFORE : GST_SEARCH_MODE_AFTER;
+ else
+ snap_dir = snap_next ? GST_SEARCH_MODE_AFTER : GST_SEARCH_MODE_BEFORE;
+
+ GST_OBJECT_LOCK (demux);
+
+ seekpos = seeksegment.position;
+ if (accurate) {
+ seekpos -= MIN (seeksegment.position, demux->audio_lead_in_ts);
+ }
+
+ track = gst_matroska_read_common_get_seek_track (&demux->common, track);
+ if ((entry = gst_matroska_read_common_do_index_seek (&demux->common, track,
+ seekpos, &demux->seek_index, &demux->seek_entry,
+ snap_dir)) == NULL) {
+ /* pull mode without index can scan later on */
+ if (demux->streaming) {
+ GST_DEBUG_OBJECT (demux, "No matching seek entry in index");
+ GST_OBJECT_UNLOCK (demux);
+ return FALSE;
+ } else if (rate < 0.0) {
+ /* FIXME: We should build an index during playback or when scanning
+ * that can be used here. The reverse playback code requires seek_index
+ * and seek_entry to be set!
+ */
+ GST_DEBUG_OBJECT (demux,
+ "No matching seek entry in index, needed for reverse playback");
+ GST_OBJECT_UNLOCK (demux);
+ return FALSE;
+ }
+ }
+ GST_DEBUG_OBJECT (demux, "Seek position looks sane");
+ GST_OBJECT_UNLOCK (demux);
+
+ if (!update) {
+ /* only have to update some segment,
+ * but also still have to honour flush and so on */
+ GST_DEBUG_OBJECT (demux, "... no update");
+ /* bad goto, bad ... */
+ goto next;
+ }
+
+ if (demux->streaming)
+ goto finish;
+
+next:
+ if (flush) {
+ GstEvent *flush_event = gst_event_new_flush_start ();
+ gst_event_set_seqnum (flush_event, seqnum);
+ GST_DEBUG_OBJECT (demux, "Starting flush");
+ gst_pad_push_event (demux->common.sinkpad, gst_event_ref (flush_event));
+ gst_matroska_demux_send_event (demux, flush_event);
+ } else {
+ GST_DEBUG_OBJECT (demux, "Non-flushing seek, pausing task");
+ gst_pad_pause_task (demux->common.sinkpad);
+ }
+ /* ouch */
+ if (!update) {
+ GST_PAD_STREAM_LOCK (demux->common.sinkpad);
+ pad_locked = TRUE;
+ goto exit;
+ }
+
+ /* now grab the stream lock so that streaming cannot continue, for
+ * non flushing seeks when the element is in PAUSED this could block
+ * forever. */
+ GST_DEBUG_OBJECT (demux, "Waiting for streaming to stop");
+ GST_PAD_STREAM_LOCK (demux->common.sinkpad);
+ pad_locked = TRUE;
+
+ /* pull mode without index can do some scanning */
+ if (!demux->streaming && !entry) {
+ GstEvent *flush_event;
+
+ /* need to stop flushing upstream as we need it next */
+ if (flush) {
+ flush_event = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (flush_event, seqnum);
+ gst_pad_push_event (demux->common.sinkpad, flush_event);
+ }
+ entry = gst_matroska_demux_search_pos (demux, seekpos);
+ /* keep local copy */
+ if (entry) {
+ scan_entry = *entry;
+ g_free (entry);
+ entry = &scan_entry;
+ } else {
+ GST_DEBUG_OBJECT (demux, "Scan failed to find matching position");
+ if (flush) {
+ flush_event = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (flush_event, seqnum);
+ gst_matroska_demux_send_event (demux, flush_event);
+ }
+ goto seek_error;
+ }
+ }
+
+finish:
+ if (keyunit && seeksegment.rate > 0) {
+ GST_DEBUG_OBJECT (demux, "seek to key unit, adjusting segment start from %"
+ GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (seeksegment.start), GST_TIME_ARGS (entry->time));
+ seeksegment.start = MAX (entry->time, demux->stream_start_time);
+ seeksegment.position = seeksegment.start;
+ seeksegment.time = seeksegment.start - demux->stream_start_time;
+ } else if (keyunit) {
+ GST_DEBUG_OBJECT (demux, "seek to key unit, adjusting segment stop from %"
+ GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (seeksegment.stop), GST_TIME_ARGS (entry->time));
+ seeksegment.stop = MAX (entry->time, demux->stream_start_time);
+ seeksegment.position = seeksegment.stop;
+ }
+
+ if (demux->streaming) {
+ GST_OBJECT_LOCK (demux);
+ /* track real position we should start at */
+ GST_DEBUG_OBJECT (demux, "storing segment start");
+ demux->requested_seek_time = seeksegment.position;
+ demux->seek_offset = entry->pos + demux->common.ebml_segment_start;
+ GST_OBJECT_UNLOCK (demux);
+ /* need to seek to cluster start to pick up cluster time */
+ /* upstream takes care of flushing and all that
+ * ... and newsegment event handling takes care of the rest */
+ return perform_seek_to_offset (demux, rate,
+ entry->pos + demux->common.ebml_segment_start, seqnum, flags);
+ }
+
+exit:
+ if (flush) {
+ GstEvent *flush_event = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (flush_event, seqnum);
+ GST_DEBUG_OBJECT (demux, "Stopping flush");
+ gst_pad_push_event (demux->common.sinkpad, gst_event_ref (flush_event));
+ gst_matroska_demux_send_event (demux, flush_event);
+ }
+
+ GST_OBJECT_LOCK (demux);
+ /* now update the real segment info */
+ GST_DEBUG_OBJECT (demux, "Committing new seek segment");
+ memcpy (&demux->common.segment, &seeksegment, sizeof (GstSegment));
+ GST_OBJECT_UNLOCK (demux);
+
+ /* update some (segment) state */
+ if (!gst_matroska_demux_move_to_entry (demux, entry, TRUE, update))
+ goto seek_error;
+
+ /* notify start of new segment */
+ if (demux->common.segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ GstMessage *msg;
+
+ msg = gst_message_new_segment_start (GST_OBJECT (demux),
+ GST_FORMAT_TIME, demux->common.segment.start);
+ gst_message_set_seqnum (msg, seqnum);
+ gst_element_post_message (GST_ELEMENT (demux), msg);
+ }
+
+ GST_OBJECT_LOCK (demux);
+ if (demux->new_segment)
+ gst_event_unref (demux->new_segment);
+
+ /* On port from 0.10, discarded !update (for segment.update) here, FIXME? */
+ demux->new_segment = gst_event_new_segment (&demux->common.segment);
+ gst_event_set_seqnum (demux->new_segment, seqnum);
+ if (demux->common.segment.rate < 0 && demux->common.segment.stop == -1)
+ demux->to_time = demux->common.segment.position;
+ else
+ demux->to_time = GST_CLOCK_TIME_NONE;
+ demux->segment_seqnum = seqnum;
+ GST_OBJECT_UNLOCK (demux);
+
+ /* restart our task since it might have been stopped when we did the
+ * flush. */
+ gst_pad_start_task (demux->common.sinkpad,
+ (GstTaskFunction) gst_matroska_demux_loop, demux->common.sinkpad, NULL);
+
+ /* streaming can continue now */
+ if (pad_locked) {
+ GST_PAD_STREAM_UNLOCK (demux->common.sinkpad);
+ }
+
+ return TRUE;
+
+seek_error:
+ {
+ if (pad_locked) {
+ GST_PAD_STREAM_UNLOCK (demux->common.sinkpad);
+ }
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX, (NULL), ("Got a seek error"));
+ return FALSE;
+ }
+}
+
+/*
+ * Handle whether we can perform the seek event or if we have to let the chain
+ * function handle seeks to build the seek indexes first.
+ */
+static gboolean
+gst_matroska_demux_handle_seek_push (GstMatroskaDemux * demux, GstPad * pad,
+ GstEvent * event)
+{
+ GstSeekFlags flags;
+ GstSeekType cur_type, stop_type;
+ GstFormat format;
+ gdouble rate;
+ gint64 cur, stop;
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, &cur,
+ &stop_type, &stop);
+
+ /* Directly send the instant-rate-change event here before taking the
+ * stream-lock so that it can be applied as soon as possible */
+ if (flags & GST_SEEK_FLAG_INSTANT_RATE_CHANGE) {
+ guint32 seqnum;
+ GstEvent *ev;
+
+ /* instant rate change only supported if direction does not change. All
+ * other requirements are already checked before creating the seek event
+ * but let's double-check here to be sure */
+ if ((rate > 0 && demux->common.segment.rate < 0) ||
+ (rate < 0 && demux->common.segment.rate > 0) ||
+ cur_type != GST_SEEK_TYPE_NONE ||
+ stop_type != GST_SEEK_TYPE_NONE || (flags & GST_SEEK_FLAG_FLUSH)) {
+ GST_ERROR_OBJECT (demux,
+ "Instant rate change seeks only supported in the "
+ "same direction, without flushing and position change");
+ return FALSE;
+ }
+
+ seqnum = gst_event_get_seqnum (event);
+ ev = gst_event_new_instant_rate_change (rate / demux->common.segment.rate,
+ (GstSegmentFlags) flags);
+ gst_event_set_seqnum (ev, seqnum);
+ gst_matroska_demux_send_event (demux, ev);
+ return TRUE;
+ }
+
+
+
+ /* sanity checks */
+
+ /* we can only seek on time */
+ if (format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (demux, "Can only seek on TIME");
+ return FALSE;
+ }
+
+ if (stop_type != GST_SEEK_TYPE_NONE && stop != GST_CLOCK_TIME_NONE) {
+ GST_DEBUG_OBJECT (demux, "Seek end-time not supported in streaming mode");
+ return FALSE;
+ }
+
+ if (!(flags & GST_SEEK_FLAG_FLUSH)) {
+ GST_DEBUG_OBJECT (demux,
+ "Non-flushing seek not supported in streaming mode");
+ return FALSE;
+ }
+
+ if (flags & GST_SEEK_FLAG_SEGMENT) {
+ GST_DEBUG_OBJECT (demux, "Segment seek not supported in streaming mode");
+ return FALSE;
+ }
+
+ /* check for having parsed index already */
+ if (!demux->common.index_parsed) {
+ gboolean building_index;
+ guint64 offset = 0;
+
+ if (!demux->index_offset) {
+ GST_DEBUG_OBJECT (demux, "no index (location); no seek in push mode");
+ return FALSE;
+ }
+
+ GST_OBJECT_LOCK (demux);
+ /* handle the seek event in the chain function */
+ demux->common.state = GST_MATROSKA_READ_STATE_SEEK;
+ /* no more seek can be issued until state reset to _DATA */
+
+ /* copy the event */
+ if (demux->seek_event)
+ gst_event_unref (demux->seek_event);
+ demux->seek_event = gst_event_ref (event);
+
+ /* set the building_index flag so that only one thread can setup the
+ * structures for index seeking. */
+ building_index = demux->building_index;
+ if (!building_index) {
+ demux->building_index = TRUE;
+ offset = demux->index_offset;
+ }
+ GST_OBJECT_UNLOCK (demux);
+
+ if (!building_index) {
+ /* seek to the first subindex or legacy index */
+ GST_INFO_OBJECT (demux, "Seeking to Cues at %" G_GUINT64_FORMAT, offset);
+ return perform_seek_to_offset (demux, rate, offset,
+ gst_event_get_seqnum (event), GST_SEEK_FLAG_NONE);
+ }
+
+ /* well, we are handling it already */
+ return TRUE;
+ }
+
+ /* delegate to tweaked regular seek */
+ return gst_matroska_demux_handle_seek_event (demux, pad, event);
+}
+
+static gboolean
+gst_matroska_demux_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (parent);
+ gboolean res = TRUE;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ /* no seeking until we are (safely) ready */
+ if (demux->common.state != GST_MATROSKA_READ_STATE_DATA) {
+ GST_DEBUG_OBJECT (demux,
+ "not ready for seeking yet, deferring seek event: %" GST_PTR_FORMAT,
+ event);
+ if (demux->deferred_seek_event)
+ gst_event_unref (demux->deferred_seek_event);
+ demux->deferred_seek_event = event;
+ demux->deferred_seek_pad = pad;
+ return TRUE;
+ }
+
+ {
+ guint32 seqnum = gst_event_get_seqnum (event);
+ if (seqnum == demux->segment_seqnum) {
+ GST_LOG_OBJECT (pad,
+ "Drop duplicated SEEK event seqnum %" G_GUINT32_FORMAT, seqnum);
+ gst_event_unref (event);
+ return TRUE;
+ }
+ }
+
+ if (!demux->streaming)
+ res = gst_matroska_demux_handle_seek_event (demux, pad, event);
+ else
+ res = gst_matroska_demux_handle_seek_push (demux, pad, event);
+ gst_event_unref (event);
+ break;
+
+ case GST_EVENT_QOS:
+ {
+ GstMatroskaTrackContext *context = gst_pad_get_element_private (pad);
+ if (context->type == GST_MATROSKA_TRACK_TYPE_VIDEO) {
+ GstMatroskaTrackVideoContext *videocontext =
+ (GstMatroskaTrackVideoContext *) context;
+ gdouble proportion;
+ GstClockTimeDiff diff;
+ GstClockTime timestamp;
+
+ gst_event_parse_qos (event, NULL, &proportion, &diff, &timestamp);
+
+ GST_OBJECT_LOCK (demux);
+ videocontext->earliest_time = timestamp + diff;
+ GST_OBJECT_UNLOCK (demux);
+ }
+ res = TRUE;
+ gst_event_unref (event);
+ break;
+ }
+
+ case GST_EVENT_TOC_SELECT:
+ {
+ char *uid = NULL;
+ GstTocEntry *entry = NULL;
+ GstEvent *seek_event;
+ gint64 start_pos;
+
+ if (!demux->common.toc) {
+ GST_DEBUG_OBJECT (demux, "no TOC to select");
+ return FALSE;
+ } else {
+ gst_event_parse_toc_select (event, &uid);
+ if (uid != NULL) {
+ GST_OBJECT_LOCK (demux);
+ entry = gst_toc_find_entry (demux->common.toc, uid);
+ if (entry == NULL) {
+ GST_OBJECT_UNLOCK (demux);
+ GST_WARNING_OBJECT (demux, "no TOC entry with given UID: %s", uid);
+ res = FALSE;
+ } else {
+ gst_toc_entry_get_start_stop_times (entry, &start_pos, NULL);
+ GST_OBJECT_UNLOCK (demux);
+ seek_event = gst_event_new_seek (1.0,
+ GST_FORMAT_TIME,
+ GST_SEEK_FLAG_FLUSH,
+ GST_SEEK_TYPE_SET, start_pos, GST_SEEK_TYPE_SET, -1);
+ res = gst_matroska_demux_handle_seek_event (demux, pad, seek_event);
+ gst_event_unref (seek_event);
+ }
+ g_free (uid);
+ } else {
+ GST_WARNING_OBJECT (demux, "received empty TOC select event");
+ res = FALSE;
+ }
+ }
+ gst_event_unref (event);
+ break;
+ }
+
+ /* events we don't need to handle */
+ case GST_EVENT_NAVIGATION:
+ gst_event_unref (event);
+ res = FALSE;
+ break;
+
+ case GST_EVENT_LATENCY:
+ default:
+ res = gst_pad_push_event (demux->common.sinkpad, event);
+ break;
+ }
+
+ return res;
+}
+
+static gboolean
+gst_matroska_demux_handle_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (parent);
+ gboolean res = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_BITRATE:
+ {
+ if (G_UNLIKELY (demux->cached_length == G_MAXUINT64 ||
+ demux->common.offset >= demux->cached_length)) {
+ demux->cached_length =
+ gst_matroska_read_common_get_length (&demux->common);
+ }
+
+ if (demux->cached_length < G_MAXUINT64
+ && demux->common.segment.duration > 0) {
+ /* TODO: better results based on ranges/index tables */
+ guint bitrate =
+ gst_util_uint64_scale (8 * demux->cached_length, GST_SECOND,
+ demux->common.segment.duration);
+
+ GST_LOG_OBJECT (demux, "bitrate query byte length: %" G_GUINT64_FORMAT
+ " duration %" GST_TIME_FORMAT " resulting in a bitrate of %u",
+ demux->cached_length,
+ GST_TIME_ARGS (demux->common.segment.duration), bitrate);
+
+ gst_query_set_bitrate (query, bitrate);
+ res = TRUE;
+ }
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, (GstObject *) demux, query);
+ break;
+ }
+
+ return res;
+}
+
+static GstFlowReturn
+gst_matroska_demux_seek_to_previous_keyframe (GstMatroskaDemux * demux)
+{
+ GstFlowReturn ret = GST_FLOW_EOS;
+ gboolean done = TRUE;
+ gint i;
+
+ g_return_val_if_fail (demux->seek_index, GST_FLOW_EOS);
+ g_return_val_if_fail (demux->seek_entry < demux->seek_index->len,
+ GST_FLOW_EOS);
+
+ GST_DEBUG_OBJECT (demux, "locating previous keyframe");
+
+ if (!demux->seek_entry) {
+ GST_DEBUG_OBJECT (demux, "no earlier index entry");
+ goto exit;
+ }
+
+ for (i = 0; i < demux->common.src->len; i++) {
+ GstMatroskaTrackContext *stream = g_ptr_array_index (demux->common.src, i);
+
+ GST_DEBUG_OBJECT (demux, "segment start %" GST_TIME_FORMAT
+ ", stream %d at %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->common.segment.start), stream->index,
+ GST_TIME_ARGS (stream->from_time));
+ if (GST_CLOCK_TIME_IS_VALID (stream->from_time)) {
+ if (stream->from_time > demux->common.segment.start) {
+ GST_DEBUG_OBJECT (demux, "stream %d not finished yet", stream->index);
+ done = FALSE;
+ }
+ } else {
+ /* nothing pushed for this stream;
+ * likely seek entry did not start at keyframe, so all was skipped.
+ * So we need an earlier entry */
+ done = FALSE;
+ }
+ }
+
+ if (!done) {
+ GstMatroskaIndex *entry;
+
+ entry = &g_array_index (demux->seek_index, GstMatroskaIndex,
+ --demux->seek_entry);
+ if (!gst_matroska_demux_move_to_entry (demux, entry, FALSE, TRUE))
+ goto exit;
+
+ ret = GST_FLOW_OK;
+ }
+
+exit:
+ return ret;
+}
+
+static GstFlowReturn
+gst_matroska_demux_parse_tracks (GstMatroskaDemux * demux, GstEbmlRead * ebml)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint32 id;
+
+ DEBUG_ELEMENT_START (demux, ebml, "Tracks");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (demux, ebml, "Tracks", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ /* one track within the "all-tracks" header */
+ case GST_MATROSKA_ID_TRACKENTRY:{
+ GstMatroskaTrackContext *track;
+ ret = gst_matroska_demux_parse_stream (demux, ebml, &track);
+ if (track != NULL) {
+ if (gst_matroska_read_common_tracknumber_unique (&demux->common,
+ track->num)) {
+ gst_matroska_demux_add_stream (demux, track);
+ } else {
+ GST_ERROR_OBJECT (demux,
+ "TrackNumber %" G_GUINT64_FORMAT " is not unique", track->num);
+ ret = GST_FLOW_ERROR;
+ gst_matroska_track_free (track);
+ track = NULL;
+ }
+ }
+ break;
+ }
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (&demux->common, ebml,
+ "Track", id);
+ break;
+ }
+ }
+ DEBUG_ELEMENT_STOP (demux, ebml, "Tracks", ret);
+
+ demux->tracks_parsed = TRUE;
+ GST_DEBUG_OBJECT (demux, "signaling no more pads");
+ gst_element_no_more_pads (GST_ELEMENT (demux));
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_matroska_demux_update_tracks (GstMatroskaDemux * demux, GstEbmlRead * ebml)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint num_tracks_found = 0;
+ guint32 id;
+
+ GST_INFO_OBJECT (demux, "Reparsing Tracks element");
+
+ DEBUG_ELEMENT_START (demux, ebml, "Tracks");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (demux, ebml, "Tracks", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ /* one track within the "all-tracks" header */
+ case GST_MATROSKA_ID_TRACKENTRY:{
+ GstMatroskaTrackContext *new_track;
+ gint old_track_index;
+ GstMatroskaTrackContext *old_track;
+ ret = gst_matroska_demux_parse_stream (demux, ebml, &new_track);
+ if (new_track == NULL)
+ break;
+ num_tracks_found++;
+
+ if (gst_matroska_read_common_tracknumber_unique (&demux->common,
+ new_track->num)) {
+ GST_ERROR_OBJECT (demux,
+ "Unexpected new TrackNumber: %" G_GUINT64_FORMAT, new_track->num);
+ goto track_mismatch_error;
+ }
+
+ old_track_index =
+ gst_matroska_read_common_stream_from_num (&demux->common,
+ new_track->num);
+ g_assert (old_track_index != -1);
+ old_track = g_ptr_array_index (demux->common.src, old_track_index);
+
+ if (old_track->type != new_track->type) {
+ GST_ERROR_OBJECT (demux,
+ "Mismatch reparsing track %" G_GUINT64_FORMAT
+ " on track type. Expected %d, found %d", new_track->num,
+ old_track->type, new_track->type);
+ goto track_mismatch_error;
+ }
+
+ if (g_strcmp0 (old_track->codec_id, new_track->codec_id) != 0) {
+ GST_ERROR_OBJECT (demux,
+ "Mismatch reparsing track %" G_GUINT64_FORMAT
+ " on codec id. Expected '%s', found '%s'", new_track->num,
+ old_track->codec_id, new_track->codec_id);
+ goto track_mismatch_error;
+ }
+
+ /* The new track matches the old track. No problems on our side.
+ * Let's make it replace the old track. */
+ new_track->pad = old_track->pad;
+ new_track->index = old_track->index;
+ new_track->pos = old_track->pos;
+ g_ptr_array_index (demux->common.src, old_track_index) = new_track;
+ gst_pad_set_element_private (new_track->pad, new_track);
+
+ if (!gst_caps_is_equal (old_track->caps, new_track->caps)) {
+ gst_pad_set_caps (new_track->pad, new_track->caps);
+ }
+ gst_caps_replace (&old_track->caps, NULL);
+
+ if (!gst_tag_list_is_equal (old_track->tags, new_track->tags)) {
+ GST_DEBUG_OBJECT (old_track->pad, "Sending tags %p: %"
+ GST_PTR_FORMAT, new_track->tags, new_track->tags);
+ gst_pad_push_event (new_track->pad,
+ gst_event_new_tag (gst_tag_list_copy (new_track->tags)));
+ }
+
+ gst_matroska_track_free (old_track);
+ break;
+
+ track_mismatch_error:
+ gst_matroska_track_free (new_track);
+ new_track = NULL;
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (&demux->common, ebml,
+ "Track", id);
+ break;
+ }
+ }
+ DEBUG_ELEMENT_STOP (demux, ebml, "Tracks", ret);
+
+ if (ret != GST_FLOW_ERROR && demux->common.num_streams != num_tracks_found) {
+ GST_ERROR_OBJECT (demux,
+ "Mismatch on the number of tracks. Expected %du tracks, found %du",
+ demux->common.num_streams, num_tracks_found);
+ ret = GST_FLOW_ERROR;
+ }
+
+ return ret;
+}
+
+/*
+ * Read signed/unsigned "EBML" numbers.
+ * Return: number of bytes processed.
+ */
+
+static gint
+gst_matroska_ebmlnum_uint (guint8 * data, guint size, guint64 * num)
+{
+ gint len_mask = 0x80, read = 1, n = 1, num_ffs = 0;
+ guint64 total;
+
+ if (size <= 0) {
+ return -1;
+ }
+
+ total = data[0];
+ while (read <= 8 && !(total & len_mask)) {
+ read++;
+ len_mask >>= 1;
+ }
+ if (read > 8)
+ return -1;
+
+ if ((total &= (len_mask - 1)) == len_mask - 1)
+ num_ffs++;
+ if (size < read)
+ return -1;
+ while (n < read) {
+ if (data[n] == 0xff)
+ num_ffs++;
+ total = (total << 8) | data[n];
+ n++;
+ }
+
+ if (read == num_ffs && total != 0)
+ *num = G_MAXUINT64;
+ else
+ *num = total;
+
+ return read;
+}
+
+static gint
+gst_matroska_ebmlnum_sint (guint8 * data, guint size, gint64 * num)
+{
+ guint64 unum;
+ gint res;
+
+ /* read as unsigned number first */
+ if ((res = gst_matroska_ebmlnum_uint (data, size, &unum)) < 0)
+ return -1;
+
+ /* make signed */
+ if (unum == G_MAXUINT64)
+ *num = G_MAXINT64;
+ else
+ *num = unum - ((1 << ((7 * res) - 1)) - 1);
+
+ return res;
+}
+
+/*
+ * Mostly used for subtitles. We add void filler data for each
+ * lagging stream to make sure we don't deadlock.
+ */
+
+static void
+gst_matroska_demux_sync_streams (GstMatroskaDemux * demux)
+{
+ GstClockTime gap_threshold;
+ gint stream_nr;
+
+ GST_OBJECT_LOCK (demux);
+
+ GST_LOG_OBJECT (demux, "Sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->common.segment.position));
+
+ g_assert (demux->common.num_streams == demux->common.src->len);
+ for (stream_nr = 0; stream_nr < demux->common.src->len; stream_nr++) {
+ GstMatroskaTrackContext *context;
+
+ context = g_ptr_array_index (demux->common.src, stream_nr);
+
+ GST_LOG_OBJECT (demux,
+ "Checking for resync on stream %d (%" GST_TIME_FORMAT ")", stream_nr,
+ GST_TIME_ARGS (context->pos));
+
+ /* Only send gap events on non-subtitle streams if lagging way behind.
+ * The 0.5 second threshold for subtitle streams is also quite random. */
+ if (context->type == GST_MATROSKA_TRACK_TYPE_SUBTITLE)
+ gap_threshold = GST_SECOND / 2;
+ else
+ gap_threshold = 3 * GST_SECOND;
+
+ /* Lag need only be considered if we have advanced into requested segment */
+ if (GST_CLOCK_TIME_IS_VALID (context->pos) &&
+ GST_CLOCK_TIME_IS_VALID (demux->common.segment.position) &&
+ demux->common.segment.position > demux->common.segment.start &&
+ context->pos + gap_threshold < demux->common.segment.position) {
+
+ GstEvent *event;
+ guint64 start = context->pos;
+ guint64 stop = demux->common.segment.position - gap_threshold;
+
+ GST_DEBUG_OBJECT (demux,
+ "Synchronizing stream %d with other by advancing time from %"
+ GST_TIME_FORMAT " to %" GST_TIME_FORMAT, stream_nr,
+ GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
+
+ context->pos = stop;
+
+ event = gst_event_new_gap (start, stop - start);
+ GST_OBJECT_UNLOCK (demux);
+ gst_pad_push_event (context->pad, event);
+ GST_OBJECT_LOCK (demux);
+ }
+ }
+
+ GST_OBJECT_UNLOCK (demux);
+}
+
+static GstFlowReturn
+gst_matroska_demux_push_stream_headers (GstMatroskaDemux * demux,
+ GstMatroskaTrackContext * stream)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ gint i, num;
+
+ num = gst_buffer_list_length (stream->stream_headers);
+ for (i = 0; i < num; ++i) {
+ GstBuffer *buf;
+
+ buf = gst_buffer_list_get (stream->stream_headers, i);
+ buf = gst_buffer_copy (buf);
+
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
+
+ if (stream->set_discont) {
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+ stream->set_discont = FALSE;
+ } else {
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
+ }
+
+ /* push out all headers in one go and use last flow return */
+ ret = gst_pad_push (stream->pad, buf);
+ }
+
+ /* don't need these any longer */
+ gst_buffer_list_unref (stream->stream_headers);
+ stream->stream_headers = NULL;
+
+ /* combine flows */
+ ret = gst_flow_combiner_update_flow (demux->flowcombiner, ret);
+
+ return ret;
+}
+
+static void
+gst_matroska_demux_push_dvd_clut_change_event (GstMatroskaDemux * demux,
+ GstMatroskaTrackContext * stream)
+{
+ gchar *buf, *start;
+
+ g_assert (!strcmp (stream->codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_VOBSUB));
+
+ if (!stream->codec_priv)
+ return;
+
+ /* ideally, VobSub private data should be parsed and stored more convenient
+ * elsewhere, but for now, only interested in a small part */
+
+ /* make sure we have terminating 0 */
+ buf = g_strndup (stream->codec_priv, stream->codec_priv_size);
+
+ /* just locate and parse palette part */
+ start = strstr (buf, "palette:");
+ if (start) {
+ gint i;
+ guint32 clut[16];
+ guint32 col;
+ guint8 r, g, b, y, u, v;
+
+ start += 8;
+ while (g_ascii_isspace (*start))
+ start++;
+ for (i = 0; i < 16; i++) {
+ if (sscanf (start, "%06x", &col) != 1)
+ break;
+ start += 6;
+ while ((*start == ',') || g_ascii_isspace (*start))
+ start++;
+ /* sigh, need to convert this from vobsub pseudo-RGB to YUV */
+ r = (col >> 16) & 0xff;
+ g = (col >> 8) & 0xff;
+ b = col & 0xff;
+ y = CLAMP ((0.1494 * r + 0.6061 * g + 0.2445 * b) * 219 / 255 + 16, 0,
+ 255);
+ u = CLAMP (0.6066 * r - 0.4322 * g - 0.1744 * b + 128, 0, 255);
+ v = CLAMP (-0.08435 * r - 0.3422 * g + 0.4266 * b + 128, 0, 255);
+ clut[i] = (y << 16) | (u << 8) | v;
+ }
+
+ /* got them all without problems; build and send event */
+ if (i == 16) {
+ GstStructure *s;
+
+ s = gst_structure_new ("application/x-gst-dvd", "event", G_TYPE_STRING,
+ "dvd-spu-clut-change", "clut00", G_TYPE_INT, clut[0], "clut01",
+ G_TYPE_INT, clut[1], "clut02", G_TYPE_INT, clut[2], "clut03",
+ G_TYPE_INT, clut[3], "clut04", G_TYPE_INT, clut[4], "clut05",
+ G_TYPE_INT, clut[5], "clut06", G_TYPE_INT, clut[6], "clut07",
+ G_TYPE_INT, clut[7], "clut08", G_TYPE_INT, clut[8], "clut09",
+ G_TYPE_INT, clut[9], "clut10", G_TYPE_INT, clut[10], "clut11",
+ G_TYPE_INT, clut[11], "clut12", G_TYPE_INT, clut[12], "clut13",
+ G_TYPE_INT, clut[13], "clut14", G_TYPE_INT, clut[14], "clut15",
+ G_TYPE_INT, clut[15], NULL);
+
+ gst_pad_push_event (stream->pad,
+ gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM_STICKY, s));
+ }
+ }
+ g_free (buf);
+}
+
+static void
+gst_matroska_demux_push_codec_data_all (GstMatroskaDemux * demux)
+{
+ gint stream_nr;
+
+ g_assert (demux->common.num_streams == demux->common.src->len);
+ for (stream_nr = 0; stream_nr < demux->common.src->len; stream_nr++) {
+ GstMatroskaTrackContext *stream;
+
+ stream = g_ptr_array_index (demux->common.src, stream_nr);
+
+ if (stream->send_stream_headers) {
+ if (stream->stream_headers != NULL) {
+ gst_matroska_demux_push_stream_headers (demux, stream);
+ } else {
+ /* FIXME: perhaps we can just disable and skip this stream then */
+ GST_ELEMENT_ERROR (demux, STREAM, DECODE, (NULL),
+ ("Failed to extract stream headers from codec private data"));
+ }
+ stream->send_stream_headers = FALSE;
+ }
+
+ if (stream->send_dvd_event) {
+ gst_matroska_demux_push_dvd_clut_change_event (demux, stream);
+ /* FIXME: should we send this event again after (flushing) seek ? */
+ stream->send_dvd_event = FALSE;
+ }
+ }
+
+}
+
+static GstFlowReturn
+gst_matroska_demux_add_mpeg_seq_header (GstElement * element,
+ GstMatroskaTrackContext * stream, GstBuffer ** buf)
+{
+ guint8 *seq_header;
+ guint seq_header_len;
+ guint32 header, tmp;
+
+ if (stream->codec_state) {
+ seq_header = stream->codec_state;
+ seq_header_len = stream->codec_state_size;
+ } else if (stream->codec_priv) {
+ seq_header = stream->codec_priv;
+ seq_header_len = stream->codec_priv_size;
+ } else {
+ return GST_FLOW_OK;
+ }
+
+ /* Sequence header only needed for keyframes */
+ if (GST_BUFFER_FLAG_IS_SET (*buf, GST_BUFFER_FLAG_DELTA_UNIT))
+ return GST_FLOW_OK;
+
+ if (gst_buffer_get_size (*buf) < 4)
+ return GST_FLOW_OK;
+
+ gst_buffer_extract (*buf, 0, &tmp, sizeof (guint32));
+ header = GUINT32_FROM_BE (tmp);
+
+ /* Sequence start code, if not found prepend */
+ if (header != 0x000001b3) {
+ GstBuffer *newbuf;
+
+ GST_DEBUG_OBJECT (element, "Prepending MPEG sequence header");
+
+ newbuf = gst_buffer_new_memdup (seq_header, seq_header_len);
+
+ gst_buffer_copy_into (newbuf, *buf, GST_BUFFER_COPY_TIMESTAMPS |
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_MEMORY, 0,
+ gst_buffer_get_size (*buf));
+
+ gst_buffer_unref (*buf);
+ *buf = newbuf;
+ }
+
+ return GST_FLOW_OK;
+}
+
+static GstFlowReturn
+gst_matroska_demux_add_wvpk_header (GstElement * element,
+ GstMatroskaTrackContext * stream, GstBuffer ** buf)
+{
+ GstMatroskaTrackAudioContext *audiocontext =
+ (GstMatroskaTrackAudioContext *) stream;
+ GstBuffer *newbuf = NULL;
+ GstMapInfo map, outmap;
+ guint8 *buf_data, *data;
+ Wavpack4Header wvh;
+
+ wvh.ck_id[0] = 'w';
+ wvh.ck_id[1] = 'v';
+ wvh.ck_id[2] = 'p';
+ wvh.ck_id[3] = 'k';
+
+ wvh.version = GST_READ_UINT16_LE (stream->codec_priv);
+ wvh.track_no = 0;
+ wvh.index_no = 0;
+ wvh.total_samples = -1;
+ wvh.block_index = audiocontext->wvpk_block_index;
+
+ if (audiocontext->channels <= 2) {
+ guint32 block_samples, tmp;
+ gsize size = gst_buffer_get_size (*buf);
+
+ if (size < 4) {
+ GST_ERROR_OBJECT (element, "Too small wavpack buffer");
+ gst_buffer_unmap (*buf, &map);
+ return GST_FLOW_ERROR;
+ }
+
+ gst_buffer_extract (*buf, 0, &tmp, sizeof (guint32));
+ block_samples = GUINT32_FROM_LE (tmp);
+ /* we need to reconstruct the header of the wavpack block */
+
+ /* -20 because ck_size is the size of the wavpack block -8
+ * and lace_size is the size of the wavpack block + 12
+ * (the three guint32 of the header that already are in the buffer) */
+ wvh.ck_size = size + WAVPACK4_HEADER_SIZE - 20;
+
+ /* block_samples, flags and crc are already in the buffer */
+ newbuf = gst_buffer_new_allocate (NULL, WAVPACK4_HEADER_SIZE - 12, NULL);
+
+ gst_buffer_map (newbuf, &outmap, GST_MAP_WRITE);
+ data = outmap.data;
+ data[0] = 'w';
+ data[1] = 'v';
+ data[2] = 'p';
+ data[3] = 'k';
+ GST_WRITE_UINT32_LE (data + 4, wvh.ck_size);
+ GST_WRITE_UINT16_LE (data + 8, wvh.version);
+ GST_WRITE_UINT8 (data + 10, wvh.track_no);
+ GST_WRITE_UINT8 (data + 11, wvh.index_no);
+ GST_WRITE_UINT32_LE (data + 12, wvh.total_samples);
+ GST_WRITE_UINT32_LE (data + 16, wvh.block_index);
+ gst_buffer_unmap (newbuf, &outmap);
+
+ /* Append data from buf: */
+ gst_buffer_copy_into (newbuf, *buf, GST_BUFFER_COPY_TIMESTAMPS |
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_MEMORY, 0, size);
+
+ gst_buffer_unref (*buf);
+ *buf = newbuf;
+ audiocontext->wvpk_block_index += block_samples;
+ } else {
+ guint8 *outdata = NULL;
+ gsize buf_size, size;
+ guint32 block_samples, flags, crc, blocksize;
+ GstAdapter *adapter;
+
+ adapter = gst_adapter_new ();
+
+ gst_buffer_map (*buf, &map, GST_MAP_READ);
+ buf_data = map.data;
+ buf_size = map.size;
+
+ if (buf_size < 4) {
+ GST_ERROR_OBJECT (element, "Too small wavpack buffer");
+ gst_buffer_unmap (*buf, &map);
+ g_object_unref (adapter);
+ return GST_FLOW_ERROR;
+ }
+
+ data = buf_data;
+ size = buf_size;
+
+ block_samples = GST_READ_UINT32_LE (data);
+ data += 4;
+ size -= 4;
+
+ while (size > 12) {
+ flags = GST_READ_UINT32_LE (data);
+ data += 4;
+ size -= 4;
+ crc = GST_READ_UINT32_LE (data);
+ data += 4;
+ size -= 4;
+ blocksize = GST_READ_UINT32_LE (data);
+ data += 4;
+ size -= 4;
+
+ if (blocksize == 0 || size < blocksize) {
+ GST_ERROR_OBJECT (element, "Too small wavpack buffer");
+ gst_buffer_unmap (*buf, &map);
+ g_object_unref (adapter);
+ return GST_FLOW_ERROR;
+ }
+
+ g_assert (newbuf == NULL);
+
+ newbuf =
+ gst_buffer_new_allocate (NULL, WAVPACK4_HEADER_SIZE + blocksize,
+ NULL);
+ gst_buffer_map (newbuf, &outmap, GST_MAP_WRITE);
+ outdata = outmap.data;
+
+ outdata[0] = 'w';
+ outdata[1] = 'v';
+ outdata[2] = 'p';
+ outdata[3] = 'k';
+ outdata += 4;
+
+ GST_WRITE_UINT32_LE (outdata, blocksize + WAVPACK4_HEADER_SIZE - 8);
+ GST_WRITE_UINT16_LE (outdata + 4, wvh.version);
+ GST_WRITE_UINT8 (outdata + 6, wvh.track_no);
+ GST_WRITE_UINT8 (outdata + 7, wvh.index_no);
+ GST_WRITE_UINT32_LE (outdata + 8, wvh.total_samples);
+ GST_WRITE_UINT32_LE (outdata + 12, wvh.block_index);
+ GST_WRITE_UINT32_LE (outdata + 16, block_samples);
+ GST_WRITE_UINT32_LE (outdata + 20, flags);
+ GST_WRITE_UINT32_LE (outdata + 24, crc);
+ outdata += 28;
+
+ memcpy (outdata, data, blocksize);
+
+ gst_buffer_unmap (newbuf, &outmap);
+ gst_adapter_push (adapter, newbuf);
+ newbuf = NULL;
+
+ data += blocksize;
+ size -= blocksize;
+ }
+ gst_buffer_unmap (*buf, &map);
+
+ newbuf = gst_adapter_take_buffer (adapter, gst_adapter_available (adapter));
+ g_object_unref (adapter);
+
+ gst_buffer_copy_into (newbuf, *buf,
+ GST_BUFFER_COPY_TIMESTAMPS | GST_BUFFER_COPY_FLAGS, 0, -1);
+ gst_buffer_unref (*buf);
+ *buf = newbuf;
+
+ audiocontext->wvpk_block_index += block_samples;
+ }
+
+ return GST_FLOW_OK;
+}
+
+static GstFlowReturn
+gst_matroska_demux_add_prores_header (GstElement * element,
+ GstMatroskaTrackContext * stream, GstBuffer ** buf)
+{
+ GstBuffer *newbuf = gst_buffer_new_allocate (NULL, 8, NULL);
+ GstMapInfo map;
+ guint32 frame_size;
+
+ if (!gst_buffer_map (newbuf, &map, GST_MAP_WRITE)) {
+ GST_ERROR ("Failed to map newly allocated buffer");
+ return GST_FLOW_ERROR;
+ }
+
+ frame_size = gst_buffer_get_size (*buf);
+
+ GST_WRITE_UINT32_BE (map.data, frame_size);
+ map.data[4] = 'i';
+ map.data[5] = 'c';
+ map.data[6] = 'p';
+ map.data[7] = 'f';
+
+ gst_buffer_unmap (newbuf, &map);
+ *buf = gst_buffer_append (newbuf, *buf);
+
+ return GST_FLOW_OK;
+}
+
+/* @text must be null-terminated */
+static gboolean
+gst_matroska_demux_subtitle_chunk_has_tag (GstElement * element,
+ const gchar * text)
+{
+ gchar *tag;
+
+ g_return_val_if_fail (text != NULL, FALSE);
+
+ /* yes, this might all lead to false positives ... */
+ tag = (gchar *) text;
+ while ((tag = strchr (tag, '<'))) {
+ tag++;
+ if (*tag != '\0' && *(tag + 1) == '>') {
+ /* some common convenience ones */
+ /* maybe any character will do here ? */
+ switch (*tag) {
+ case 'b':
+ case 'i':
+ case 'u':
+ case 's':
+ return TRUE;
+ default:
+ return FALSE;
+ }
+ }
+ }
+
+ if (strstr (text, "<span"))
+ return TRUE;
+
+ return FALSE;
+}
+
+static GstFlowReturn
+gst_matroska_demux_check_subtitle_buffer (GstElement * element,
+ GstMatroskaTrackContext * stream, GstBuffer ** buf)
+{
+ GstMatroskaTrackSubtitleContext *sub_stream;
+ const gchar *encoding;
+ GError *err = NULL;
+ GstBuffer *newbuf;
+ gchar *utf8;
+ GstMapInfo map;
+ gboolean needs_unmap = TRUE;
+
+ sub_stream = (GstMatroskaTrackSubtitleContext *) stream;
+
+ if (!gst_buffer_get_size (*buf) || !gst_buffer_map (*buf, &map, GST_MAP_READ))
+ return GST_FLOW_OK;
+
+ /* The subtitle buffer we push out should not include a NUL terminator as
+ * part of the data. */
+ if (map.data[map.size - 1] == '\0') {
+ gst_buffer_set_size (*buf, map.size - 1);
+ gst_buffer_unmap (*buf, &map);
+ gst_buffer_map (*buf, &map, GST_MAP_READ);
+ }
+
+ if (!sub_stream->invalid_utf8) {
+ if (g_utf8_validate ((gchar *) map.data, map.size, NULL)) {
+ goto next;
+ }
+ GST_WARNING_OBJECT (element, "subtitle stream %" G_GUINT64_FORMAT
+ " is not valid UTF-8, this is broken according to the matroska"
+ " specification", stream->num);
+ sub_stream->invalid_utf8 = TRUE;
+ }
+
+ /* file with broken non-UTF8 subtitle, do the best we can do to fix it */
+ encoding = g_getenv ("GST_SUBTITLE_ENCODING");
+ if (encoding == NULL || *encoding == '\0') {
+ /* if local encoding is UTF-8 and no encoding specified
+ * via the environment variable, assume ISO-8859-15 */
+ if (g_get_charset (&encoding)) {
+ encoding = "ISO-8859-15";
+ }
+ }
+
+ utf8 =
+ g_convert_with_fallback ((gchar *) map.data, map.size, "UTF-8", encoding,
+ (char *) "*", NULL, NULL, &err);
+
+ if (err) {
+ GST_LOG_OBJECT (element, "could not convert string from '%s' to UTF-8: %s",
+ encoding, err->message);
+ g_error_free (err);
+ g_free (utf8);
+
+ /* invalid input encoding, fall back to ISO-8859-15 (always succeeds) */
+ encoding = "ISO-8859-15";
+ utf8 =
+ g_convert_with_fallback ((gchar *) map.data, map.size, "UTF-8",
+ encoding, (char *) "*", NULL, NULL, NULL);
+ }
+
+ GST_LOG_OBJECT (element, "converted subtitle text from %s to UTF-8 %s",
+ encoding, (err) ? "(using ISO-8859-15 as fallback)" : "");
+
+ if (utf8 == NULL)
+ utf8 = g_strdup ("invalid subtitle");
+
+ newbuf = gst_buffer_new_wrapped (utf8, strlen (utf8));
+ gst_buffer_unmap (*buf, &map);
+ gst_buffer_copy_into (newbuf, *buf,
+ GST_BUFFER_COPY_TIMESTAMPS | GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_META,
+ 0, -1);
+ gst_buffer_unref (*buf);
+
+ *buf = newbuf;
+ gst_buffer_map (*buf, &map, GST_MAP_READ);
+
+next:
+
+ if (sub_stream->check_markup) {
+ /* caps claim markup text, so we need to escape text,
+ * except if text is already markup and then needs no further escaping */
+ sub_stream->seen_markup_tag = sub_stream->seen_markup_tag ||
+ gst_matroska_demux_subtitle_chunk_has_tag (element, (gchar *) map.data);
+
+ if (!sub_stream->seen_markup_tag) {
+ utf8 = g_markup_escape_text ((gchar *) map.data, map.size);
+
+ newbuf = gst_buffer_new_wrapped (utf8, strlen (utf8));
+ gst_buffer_unmap (*buf, &map);
+ gst_buffer_copy_into (newbuf, *buf,
+ GST_BUFFER_COPY_TIMESTAMPS | GST_BUFFER_COPY_FLAGS |
+ GST_BUFFER_COPY_META, 0, -1);
+ gst_buffer_unref (*buf);
+
+ *buf = newbuf;
+ needs_unmap = FALSE;
+ }
+ }
+
+ if (needs_unmap)
+ gst_buffer_unmap (*buf, &map);
+
+ return GST_FLOW_OK;
+}
+
+static GstFlowReturn
+gst_matroska_demux_check_aac (GstElement * element,
+ GstMatroskaTrackContext * stream, GstBuffer ** buf)
+{
+ guint8 data[2];
+ guint size;
+
+ gst_buffer_extract (*buf, 0, data, 2);
+ size = gst_buffer_get_size (*buf);
+
+ if (size > 2 && data[0] == 0xff && (data[1] >> 4 == 0x0f)) {
+ GstStructure *s;
+
+ /* tss, ADTS data, remove codec_data
+ * still assume it is at least parsed */
+ stream->caps = gst_caps_make_writable (stream->caps);
+ s = gst_caps_get_structure (stream->caps, 0);
+ g_assert (s);
+ gst_structure_remove_field (s, "codec_data");
+ gst_pad_set_caps (stream->pad, stream->caps);
+ GST_DEBUG_OBJECT (element, "ADTS AAC audio data; removing codec-data, "
+ "new caps: %" GST_PTR_FORMAT, stream->caps);
+ }
+
+ /* disable subsequent checking */
+ stream->postprocess_frame = NULL;
+
+ return GST_FLOW_OK;
+}
+
+static GstBuffer *
+gst_matroska_demux_align_buffer (GstMatroskaDemux * demux,
+ GstBuffer * buffer, gsize alignment)
+{
+ GstMapInfo map;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ if (map.size < sizeof (guintptr)) {
+ gst_buffer_unmap (buffer, &map);
+ return buffer;
+ }
+
+ if (((guintptr) map.data) & (alignment - 1)) {
+ GstBuffer *new_buffer;
+ GstAllocationParams params = { 0, alignment - 1, 0, 0, };
+
+ new_buffer = gst_buffer_new_allocate (NULL,
+ gst_buffer_get_size (buffer), &params);
+
+ /* Copy data "by hand", so ensure alignment is kept: */
+ gst_buffer_fill (new_buffer, 0, map.data, map.size);
+
+ gst_buffer_copy_into (new_buffer, buffer, GST_BUFFER_COPY_METADATA, 0, -1);
+ GST_DEBUG_OBJECT (demux,
+ "We want output aligned on %" G_GSIZE_FORMAT ", reallocated",
+ alignment);
+
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+
+ return new_buffer;
+ }
+
+ gst_buffer_unmap (buffer, &map);
+ return buffer;
+}
+
+typedef struct
+{
+ guint8 *data;
+ gsize size;
+ guint64 id;
+} BlockAddition;
+
+static GstFlowReturn
+gst_matroska_demux_parse_blockmore (GstMatroskaDemux * demux,
+ GstEbmlRead * ebml, GQueue * additions)
+{
+ GstFlowReturn ret;
+ guint32 id;
+ guint64 block_id = 1;
+ guint64 datalen = 0;
+ guint8 *data = NULL;
+
+ ret = gst_ebml_read_master (ebml, &id); /* GST_MATROSKA_ID_BLOCKMORE */
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* read all BlockMore sub-entries */
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_BLOCKADDID:
+ ret = gst_ebml_read_uint (ebml, &id, &block_id);
+ if (block_id == 0)
+ block_id = 1;
+ break;
+ case GST_MATROSKA_ID_BLOCKADDITIONAL:
+ g_free (data);
+ data = NULL;
+ datalen = 0;
+ ret = gst_ebml_read_binary (ebml, &id, &data, &datalen);
+ break;
+ default:
+ ret = gst_matroska_read_common_parse_skip (&demux->common, ebml,
+ "BlockMore", id);
+ break;
+ }
+ }
+
+ if (data != NULL && datalen > 0) {
+ BlockAddition *blockadd = g_new (BlockAddition, 1);
+
+ GST_LOG_OBJECT (demux, "BlockAddition %" G_GUINT64_FORMAT ": "
+ "%" G_GUINT64_FORMAT " bytes", block_id, datalen);
+ GST_MEMDUMP_OBJECT (demux, "BlockAdditional", data, datalen);
+ blockadd->data = data;
+ blockadd->size = datalen;
+ blockadd->id = block_id;
+ g_queue_push_tail (additions, blockadd);
+ GST_LOG_OBJECT (demux, "now %d pending block additions", additions->length);
+ }
+
+ return ret;
+}
+
+/* BLOCKADDITIONS
+ * BLOCKMORE
+ * BLOCKADDID
+ * BLOCKADDITIONAL
+ */
+static GstFlowReturn
+gst_matroska_demux_parse_blockadditions (GstMatroskaDemux * demux,
+ GstEbmlRead * ebml, GQueue * additions)
+{
+ GstFlowReturn ret;
+ guint32 id;
+
+ ret = gst_ebml_read_master (ebml, &id); /* GST_MATROSKA_ID_BLOCKADDITIONS */
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ /* read all BlockMore sub-entries */
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ if (id == GST_MATROSKA_ID_BLOCKMORE) {
+ DEBUG_ELEMENT_START (demux, ebml, "BlockMore");
+ ret = gst_matroska_demux_parse_blockmore (demux, ebml, additions);
+ DEBUG_ELEMENT_STOP (demux, ebml, "BlockMore", ret);
+ if (ret != GST_FLOW_OK)
+ break;
+ } else {
+ GST_WARNING_OBJECT (demux, "Expected BlockMore, got %x", id);
+ }
+ }
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_matroska_demux_parse_blockgroup_or_simpleblock (GstMatroskaDemux * demux,
+ GstEbmlRead * ebml, guint64 cluster_time, guint64 cluster_offset,
+ gboolean is_simpleblock)
+{
+ GstMatroskaTrackContext *stream = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+ gboolean readblock = FALSE;
+ guint32 id;
+ guint64 block_duration = -1;
+ gint64 block_discardpadding = 0;
+ GstBuffer *buf = NULL;
+ GstMapInfo map;
+ gint stream_num = -1, n, laces = 0;
+ guint size = 0;
+ gint *lace_size = NULL;
+ gint64 time = 0;
+ gint flags = 0;
+ gint64 referenceblock = 0;
+ gint64 offset;
+ GstClockTime buffer_timestamp;
+ GQueue additions = G_QUEUE_INIT;
+
+ offset = gst_ebml_read_get_offset (ebml);
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if (!is_simpleblock) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK) {
+ goto data_error;
+ }
+ } else {
+ id = GST_MATROSKA_ID_SIMPLEBLOCK;
+ }
+
+ switch (id) {
+ /* one block inside the group. Note, block parsing is one
+ * of the harder things, so this code is a bit complicated.
+ * See http://www.matroska.org/ for documentation. */
+ case GST_MATROSKA_ID_SIMPLEBLOCK:
+ case GST_MATROSKA_ID_BLOCK:
+ {
+ guint64 num;
+ guint8 *data;
+
+ if (buf) {
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ buf = NULL;
+ }
+ if ((ret = gst_ebml_read_buffer (ebml, &id, &buf)) != GST_FLOW_OK)
+ break;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ /* first byte(s): blocknum */
+ if ((n = gst_matroska_ebmlnum_uint (data, size, &num)) < 0)
+ goto data_error;
+ data += n;
+ size -= n;
+
+ /* fetch stream from num */
+ stream_num = gst_matroska_read_common_stream_from_num (&demux->common,
+ num);
+ if (G_UNLIKELY (size < 3)) {
+ GST_WARNING_OBJECT (demux, "Invalid size %u", size);
+ /* non-fatal, try next block(group) */
+ ret = GST_FLOW_OK;
+ goto done;
+ } else if (G_UNLIKELY (stream_num < 0 ||
+ stream_num >= demux->common.num_streams)) {
+ /* let's not give up on a stray invalid track number */
+ GST_WARNING_OBJECT (demux,
+ "Invalid stream %d for track number %" G_GUINT64_FORMAT
+ "; ignoring block", stream_num, num);
+ goto done;
+ }
+
+ stream = g_ptr_array_index (demux->common.src, stream_num);
+
+ /* time (relative to cluster time) */
+ time = ((gint16) GST_READ_UINT16_BE (data));
+ data += 2;
+ size -= 2;
+ flags = GST_READ_UINT8 (data);
+ data += 1;
+ size -= 1;
+
+ GST_LOG_OBJECT (demux, "time %" G_GUINT64_FORMAT ", flags %d", time,
+ flags);
+
+ switch ((flags & 0x06) >> 1) {
+ case 0x0: /* no lacing */
+ laces = 1;
+ lace_size = g_new (gint, 1);
+ lace_size[0] = size;
+ break;
+
+ case 0x1: /* xiph lacing */
+ case 0x2: /* fixed-size lacing */
+ case 0x3: /* EBML lacing */
+ if (size == 0)
+ goto invalid_lacing;
+ laces = GST_READ_UINT8 (data) + 1;
+ data += 1;
+ size -= 1;
+ lace_size = g_new0 (gint, laces);
+
+ switch ((flags & 0x06) >> 1) {
+ case 0x1: /* xiph lacing */ {
+ guint temp, total = 0;
+
+ for (n = 0; ret == GST_FLOW_OK && n < laces - 1; n++) {
+ while (1) {
+ if (size == 0)
+ goto invalid_lacing;
+ temp = GST_READ_UINT8 (data);
+ lace_size[n] += temp;
+ data += 1;
+ size -= 1;
+ if (temp != 0xff)
+ break;
+ }
+ total += lace_size[n];
+ }
+ lace_size[n] = size - total;
+ break;
+ }
+
+ case 0x2: /* fixed-size lacing */
+ for (n = 0; n < laces; n++)
+ lace_size[n] = size / laces;
+ break;
+
+ case 0x3: /* EBML lacing */ {
+ guint total;
+
+ if ((n = gst_matroska_ebmlnum_uint (data, size, &num)) < 0)
+ goto data_error;
+ data += n;
+ size -= n;
+ total = lace_size[0] = num;
+ for (n = 1; ret == GST_FLOW_OK && n < laces - 1; n++) {
+ gint64 snum;
+ gint r;
+
+ if ((r = gst_matroska_ebmlnum_sint (data, size, &snum)) < 0)
+ goto data_error;
+ data += r;
+ size -= r;
+ lace_size[n] = lace_size[n - 1] + snum;
+ total += lace_size[n];
+ }
+ if (n < laces)
+ lace_size[n] = size - total;
+ break;
+ }
+ }
+ break;
+ }
+
+ if (ret != GST_FLOW_OK)
+ break;
+
+ readblock = TRUE;
+ break;
+ }
+
+ case GST_MATROSKA_ID_BLOCKADDITIONS:
+ {
+ DEBUG_ELEMENT_START (demux, ebml, "BlockAdditions");
+ ret = gst_matroska_demux_parse_blockadditions (demux, ebml, &additions);
+ DEBUG_ELEMENT_STOP (demux, ebml, "BlockAdditions", ret);
+ break;
+ }
+
+ case GST_MATROSKA_ID_BLOCKDURATION:{
+ ret = gst_ebml_read_uint (ebml, &id, &block_duration);
+ GST_DEBUG_OBJECT (demux, "BlockDuration: %" G_GUINT64_FORMAT,
+ block_duration);
+ break;
+ }
+
+ case GST_MATROSKA_ID_DISCARDPADDING:{
+ ret = gst_ebml_read_sint (ebml, &id, &block_discardpadding);
+ GST_DEBUG_OBJECT (demux, "DiscardPadding: %" GST_STIME_FORMAT,
+ GST_STIME_ARGS (block_discardpadding));
+ break;
+ }
+
+ case GST_MATROSKA_ID_REFERENCEBLOCK:{
+ ret = gst_ebml_read_sint (ebml, &id, &referenceblock);
+ GST_DEBUG_OBJECT (demux, "ReferenceBlock: %" G_GINT64_FORMAT,
+ referenceblock);
+ break;
+ }
+
+ case GST_MATROSKA_ID_CODECSTATE:{
+ guint8 *data;
+ guint64 data_len = 0;
+
+ if ((ret =
+ gst_ebml_read_binary (ebml, &id, &data,
+ &data_len)) != GST_FLOW_OK)
+ break;
+
+ if (G_UNLIKELY (stream == NULL)) {
+ GST_WARNING_OBJECT (demux,
+ "Unexpected CodecState subelement - ignoring");
+ break;
+ }
+
+ g_free (stream->codec_state);
+ stream->codec_state = data;
+ stream->codec_state_size = data_len;
+
+ /* Decode if necessary */
+ if (stream->encodings && stream->encodings->len > 0
+ && stream->codec_state && stream->codec_state_size > 0) {
+ if (!gst_matroska_decode_data (stream->encodings,
+ &stream->codec_state, &stream->codec_state_size,
+ GST_MATROSKA_TRACK_ENCODING_SCOPE_CODEC_DATA, TRUE)) {
+ GST_WARNING_OBJECT (demux, "Decoding codec state failed");
+ }
+ }
+
+ GST_DEBUG_OBJECT (demux, "CodecState of %" G_GSIZE_FORMAT " bytes",
+ stream->codec_state_size);
+ break;
+ }
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (&demux->common, ebml,
+ "BlockGroup", id);
+ break;
+
+ case GST_MATROSKA_ID_BLOCKVIRTUAL:
+ case GST_MATROSKA_ID_REFERENCEPRIORITY:
+ case GST_MATROSKA_ID_REFERENCEVIRTUAL:
+ case GST_MATROSKA_ID_SLICES:
+ GST_DEBUG_OBJECT (demux,
+ "Skipping BlockGroup subelement 0x%x - ignoring", id);
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+
+ if (is_simpleblock)
+ break;
+ }
+
+ /* reading a number or so could have failed */
+ if (ret != GST_FLOW_OK)
+ goto data_error;
+
+ if (ret == GST_FLOW_OK && readblock) {
+ gboolean invisible_frame = FALSE;
+ gboolean delta_unit = FALSE;
+ guint64 duration = 0;
+ gint64 lace_time = 0;
+ gboolean keep_seek_start = TRUE;
+ GstEvent *protect_event;
+
+ stream = g_ptr_array_index (demux->common.src, stream_num);
+
+ if (cluster_time != GST_CLOCK_TIME_NONE) {
+ /* FIXME: What to do with negative timestamps? Give timestamp 0 or -1?
+ * Drop unless the lace contains timestamp 0? */
+ if (time < 0 && (-time) > cluster_time) {
+ lace_time = 0;
+ } else {
+ if (stream->timecodescale == 1.0)
+ lace_time = (cluster_time + time) * demux->common.time_scale;
+ else
+ lace_time =
+ gst_util_guint64_to_gdouble ((cluster_time + time) *
+ demux->common.time_scale) * stream->timecodescale;
+ }
+ } else {
+ lace_time = GST_CLOCK_TIME_NONE;
+ }
+ /* Send the GST_PROTECTION event */
+ while ((protect_event = g_queue_pop_head (&stream->protection_event_queue))) {
+ GST_TRACE_OBJECT (demux, "pushing protection event for stream %d:%s",
+ stream->index, GST_STR_NULL (stream->name));
+ gst_pad_push_event (stream->pad, protect_event);
+ }
+
+ /* need to refresh segment info ASAP */
+ if (GST_CLOCK_TIME_IS_VALID (lace_time)
+ && GST_CLOCK_TIME_IS_VALID (demux->stream_start_time)
+ && lace_time < demux->stream_start_time) {
+ keep_seek_start =
+ (demux->common.segment.start > demux->stream_start_time);
+ demux->stream_start_time = lace_time;
+ demux->need_segment = TRUE;
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (lace_time) && demux->need_segment) {
+ GstSegment *segment = &demux->common.segment;
+ guint64 clace_time;
+ GstEvent *segment_event;
+
+ if (!GST_CLOCK_TIME_IS_VALID (demux->stream_start_time)) {
+ demux->stream_start_time = lace_time;
+ GST_DEBUG_OBJECT (demux,
+ "Setting stream start time to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (lace_time));
+ }
+ clace_time = MAX (lace_time, demux->stream_start_time);
+ if (keep_seek_start
+ && GST_CLOCK_TIME_IS_VALID (demux->common.segment.position)
+ && demux->common.segment.position != 0) {
+ GST_DEBUG_OBJECT (demux, "using stored seek position %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->common.segment.position));
+ clace_time = demux->common.segment.position;
+ }
+ segment->start = clace_time;
+ segment->stop = demux->common.segment.stop;
+ segment->time = segment->start - demux->stream_start_time;
+ segment->position = segment->start - demux->stream_start_time;
+ GST_DEBUG_OBJECT (demux,
+ "generated segment starting at %" GST_TIME_FORMAT ": %"
+ GST_SEGMENT_FORMAT, GST_TIME_ARGS (lace_time), segment);
+ /* now convey our segment notion downstream */
+ segment_event = gst_event_new_segment (segment);
+ if (demux->segment_seqnum)
+ gst_event_set_seqnum (segment_event, demux->segment_seqnum);
+ gst_matroska_demux_send_event (demux, segment_event);
+ demux->need_segment = FALSE;
+ demux->segment_seqnum = 0;
+ }
+
+ /* send pending codec data headers for all streams,
+ * before we perform sync across all streams */
+ gst_matroska_demux_push_codec_data_all (demux);
+
+ if (block_duration != -1) {
+ if (stream->timecodescale == 1.0)
+ duration = gst_util_uint64_scale (block_duration,
+ demux->common.time_scale, 1);
+ else
+ duration =
+ gst_util_gdouble_to_guint64 (gst_util_guint64_to_gdouble
+ (gst_util_uint64_scale (block_duration, demux->common.time_scale,
+ 1)) * stream->timecodescale);
+ } else if (stream->default_duration) {
+ duration = stream->default_duration * laces;
+ }
+ /* else duration is diff between timecode of this and next block */
+
+ if (stream->type == GST_MATROSKA_TRACK_TYPE_VIDEO) {
+ /* For SimpleBlock, look at the keyframe bit in flags. Otherwise,
+ a ReferenceBlock implies that this is not a keyframe. In either
+ case, it only makes sense for video streams. */
+ if ((is_simpleblock && !(flags & 0x80)) || referenceblock) {
+ delta_unit = TRUE;
+ invisible_frame = ((flags & 0x08)) &&
+ (!strcmp (stream->codec_id, GST_MATROSKA_CODEC_ID_VIDEO_VP8) ||
+ !strcmp (stream->codec_id, GST_MATROSKA_CODEC_ID_VIDEO_VP9) ||
+ !strcmp (stream->codec_id, GST_MATROSKA_CODEC_ID_VIDEO_AV1));
+ }
+
+ /* If we're doing a keyframe-only trickmode, only push keyframes on video
+ * streams */
+ if (delta_unit
+ && demux->common.segment.
+ flags & GST_SEGMENT_FLAG_TRICKMODE_KEY_UNITS) {
+ GST_LOG_OBJECT (demux, "Skipping non-keyframe on stream %d",
+ stream->index);
+ ret = GST_FLOW_OK;
+ goto done;
+ }
+ }
+
+ for (n = 0; n < laces; n++) {
+ GstBuffer *sub;
+
+ if (G_UNLIKELY (lace_size[n] > size)) {
+ GST_WARNING_OBJECT (demux, "Invalid lace size");
+ break;
+ }
+
+ /* QoS for video track with an index. the assumption is that
+ index entries point to keyframes, but if that is not true we
+ will instead skip until the next keyframe. */
+ if (GST_CLOCK_TIME_IS_VALID (lace_time) &&
+ stream->type == GST_MATROSKA_TRACK_TYPE_VIDEO &&
+ stream->index_table && demux->common.segment.rate > 0.0) {
+ GstMatroskaTrackVideoContext *videocontext =
+ (GstMatroskaTrackVideoContext *) stream;
+ GstClockTime earliest_time;
+ GstClockTime earliest_stream_time;
+
+ GST_OBJECT_LOCK (demux);
+ earliest_time = videocontext->earliest_time;
+ GST_OBJECT_UNLOCK (demux);
+ earliest_stream_time =
+ gst_segment_position_from_running_time (&demux->common.segment,
+ GST_FORMAT_TIME, earliest_time);
+
+ if (GST_CLOCK_TIME_IS_VALID (lace_time) &&
+ GST_CLOCK_TIME_IS_VALID (earliest_stream_time) &&
+ lace_time <= earliest_stream_time) {
+ /* find index entry (keyframe) <= earliest_stream_time */
+ GstMatroskaIndex *entry =
+ gst_util_array_binary_search (stream->index_table->data,
+ stream->index_table->len, sizeof (GstMatroskaIndex),
+ (GCompareDataFunc) gst_matroska_index_seek_find,
+ GST_SEARCH_MODE_BEFORE, &earliest_stream_time, NULL);
+
+ /* if that entry (keyframe) is after the current the current
+ buffer, we can skip pushing (and thus decoding) all
+ buffers until that keyframe. */
+ if (entry && GST_CLOCK_TIME_IS_VALID (entry->time) &&
+ entry->time > lace_time) {
+ GST_LOG_OBJECT (demux, "Skipping lace before late keyframe");
+ stream->set_discont = TRUE;
+ goto next_lace;
+ }
+ }
+ }
+
+ sub = gst_buffer_copy_region (buf, GST_BUFFER_COPY_ALL,
+ gst_buffer_get_size (buf) - size, lace_size[n]);
+ GST_DEBUG_OBJECT (demux, "created subbuffer %p", sub);
+
+ if (delta_unit)
+ GST_BUFFER_FLAG_SET (sub, GST_BUFFER_FLAG_DELTA_UNIT);
+ else
+ GST_BUFFER_FLAG_UNSET (sub, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ if (invisible_frame)
+ GST_BUFFER_FLAG_SET (sub, GST_BUFFER_FLAG_DECODE_ONLY);
+
+ if (stream->encodings != NULL && stream->encodings->len > 0)
+ sub = gst_matroska_decode_buffer (stream, sub);
+
+ if (sub == NULL) {
+ GST_WARNING_OBJECT (demux, "Decoding buffer failed");
+ goto next_lace;
+ }
+
+ if (!stream->dts_only) {
+ GST_BUFFER_PTS (sub) = lace_time;
+ } else {
+ GST_BUFFER_DTS (sub) = lace_time;
+ if (stream->intra_only)
+ GST_BUFFER_PTS (sub) = lace_time;
+ }
+
+ buffer_timestamp = gst_matroska_track_get_buffer_timestamp (stream, sub);
+
+ if (GST_CLOCK_TIME_IS_VALID (lace_time)) {
+ GstClockTime last_stop_end;
+
+ /* Check if this stream is after segment stop,
+ * but only terminate if we hit the next keyframe,
+ * to make sure that all frames potentially inside the segment
+ * are available to the decoder for decoding / reordering.*/
+ if (!delta_unit && GST_CLOCK_TIME_IS_VALID (demux->common.segment.stop)
+ && lace_time >= demux->common.segment.stop) {
+ GST_DEBUG_OBJECT (demux,
+ "Stream %d lace time: %" GST_TIME_FORMAT " after segment stop: %"
+ GST_TIME_FORMAT, stream->index, GST_TIME_ARGS (lace_time),
+ GST_TIME_ARGS (demux->common.segment.stop));
+ gst_buffer_unref (sub);
+ goto eos;
+ }
+ if (offset >= stream->to_offset
+ || (GST_CLOCK_TIME_IS_VALID (demux->to_time)
+ && lace_time > demux->to_time)) {
+ GST_DEBUG_OBJECT (demux, "Stream %d after playback section",
+ stream->index);
+ gst_buffer_unref (sub);
+ goto eos;
+ }
+
+ /* handle gaps, e.g. non-zero start-time, or an cue index entry
+ * that landed us with timestamps not quite intended */
+ GST_OBJECT_LOCK (demux);
+ if (demux->max_gap_time &&
+ GST_CLOCK_TIME_IS_VALID (demux->last_stop_end) &&
+ demux->common.segment.rate > 0.0) {
+ GstClockTimeDiff diff;
+
+ /* only send segments with increasing start times,
+ * otherwise if these go back and forth downstream (sinks) increase
+ * accumulated time and running_time */
+ diff = GST_CLOCK_DIFF (demux->last_stop_end, lace_time);
+ if (diff > 0 && diff > demux->max_gap_time
+ && lace_time > demux->common.segment.start
+ && (!GST_CLOCK_TIME_IS_VALID (demux->common.segment.stop)
+ || lace_time < demux->common.segment.stop)) {
+ GstEvent *event;
+ GST_DEBUG_OBJECT (demux,
+ "Gap of %" G_GINT64_FORMAT " ns detected in"
+ "stream %d (%" GST_TIME_FORMAT " -> %" GST_TIME_FORMAT "). "
+ "Sending updated SEGMENT events", diff,
+ stream->index, GST_TIME_ARGS (stream->pos),
+ GST_TIME_ARGS (lace_time));
+
+ event = gst_event_new_gap (demux->last_stop_end, diff);
+ GST_OBJECT_UNLOCK (demux);
+ gst_pad_push_event (stream->pad, event);
+ GST_OBJECT_LOCK (demux);
+ }
+ }
+
+ if (!GST_CLOCK_TIME_IS_VALID (demux->common.segment.position)
+ || demux->common.segment.position < lace_time) {
+ demux->common.segment.position = lace_time;
+ }
+ GST_OBJECT_UNLOCK (demux);
+
+ last_stop_end = lace_time;
+ if (duration) {
+ GST_BUFFER_DURATION (sub) = duration / laces;
+ last_stop_end += GST_BUFFER_DURATION (sub);
+ }
+
+ if (!GST_CLOCK_TIME_IS_VALID (demux->last_stop_end) ||
+ demux->last_stop_end < last_stop_end)
+ demux->last_stop_end = last_stop_end;
+
+ GST_OBJECT_LOCK (demux);
+ if (demux->common.segment.duration == -1 ||
+ demux->stream_start_time + demux->common.segment.duration <
+ last_stop_end) {
+ demux->common.segment.duration =
+ last_stop_end - demux->stream_start_time;
+ GST_OBJECT_UNLOCK (demux);
+ if (!demux->invalid_duration) {
+ gst_element_post_message (GST_ELEMENT_CAST (demux),
+ gst_message_new_duration_changed (GST_OBJECT_CAST (demux)));
+ demux->invalid_duration = TRUE;
+ }
+ } else {
+ GST_OBJECT_UNLOCK (demux);
+ }
+ }
+
+ stream->pos = lace_time;
+
+ gst_matroska_demux_sync_streams (demux);
+
+ if (stream->set_discont) {
+ GST_DEBUG_OBJECT (demux, "marking DISCONT");
+ GST_BUFFER_FLAG_SET (sub, GST_BUFFER_FLAG_DISCONT);
+ stream->set_discont = FALSE;
+ } else {
+ GST_BUFFER_FLAG_UNSET (sub, GST_BUFFER_FLAG_DISCONT);
+ }
+
+ /* reverse playback book-keeping */
+ if (!GST_CLOCK_TIME_IS_VALID (stream->from_time))
+ stream->from_time = lace_time;
+ if (stream->from_offset == -1)
+ stream->from_offset = offset;
+
+ GST_DEBUG_OBJECT (demux,
+ "Pushing lace %d, data of size %" G_GSIZE_FORMAT
+ " for stream %d, time=%" GST_TIME_FORMAT " and duration=%"
+ GST_TIME_FORMAT, n, gst_buffer_get_size (sub), stream_num,
+ GST_TIME_ARGS (buffer_timestamp),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (sub)));
+
+#if 0
+ if (demux->common.element_index) {
+ if (stream->index_writer_id == -1)
+ gst_index_get_writer_id (demux->common.element_index,
+ GST_OBJECT (stream->pad), &stream->index_writer_id);
+
+ GST_LOG_OBJECT (demux, "adding association %" GST_TIME_FORMAT "-> %"
+ G_GUINT64_FORMAT " for writer id %d",
+ GST_TIME_ARGS (buffer_timestamp), cluster_offset,
+ stream->index_writer_id);
+ gst_index_add_association (demux->common.element_index,
+ stream->index_writer_id, GST_BUFFER_FLAG_IS_SET (sub,
+ GST_BUFFER_FLAG_DELTA_UNIT) ? 0 : GST_ASSOCIATION_FLAG_KEY_UNIT,
+ GST_FORMAT_TIME, buffer_timestamp, GST_FORMAT_BYTES, cluster_offset,
+ NULL);
+ }
+#endif
+
+ /* Postprocess the buffers depending on the codec used */
+ if (stream->postprocess_frame) {
+ GST_LOG_OBJECT (demux, "running post process");
+ ret = stream->postprocess_frame (GST_ELEMENT (demux), stream, &sub);
+ }
+
+ /* At this point, we have a sub-buffer pointing at data within a larger
+ buffer. This data might not be aligned with anything. If the data is
+ raw samples though, we want it aligned to the raw type (eg, 4 bytes
+ for 32 bit samples, etc), or bad things will happen downstream as
+ elements typically assume minimal alignment.
+ Therefore, create an aligned copy if necessary. */
+ sub = gst_matroska_demux_align_buffer (demux, sub, stream->alignment);
+
+ if (!strcmp (stream->codec_id, GST_MATROSKA_CODEC_ID_AUDIO_OPUS)) {
+ guint64 start_clip = 0, end_clip = 0;
+
+ /* Codec delay is part of the timestamps */
+ if (GST_BUFFER_PTS_IS_VALID (sub) && stream->codec_delay) {
+ if (GST_BUFFER_PTS (sub) > stream->codec_delay) {
+ GST_BUFFER_PTS (sub) -= stream->codec_delay;
+ } else {
+ GST_BUFFER_PTS (sub) = 0;
+
+ /* Opus GstAudioClippingMeta units are scaled by 48000/sample_rate.
+ That is, if a Opus track has audio encoded at 24000 Hz and 132
+ samples need to be clipped, GstAudioClippingMeta.start will be
+ set to 264. (This is also the case for buffer offsets.)
+ Opus sample rates are always divisors of 48000 Hz, which is the
+ maximum allowed sample rate. */
+ start_clip =
+ gst_util_uint64_scale_round (stream->codec_delay, 48000,
+ GST_SECOND);
+
+ if (GST_BUFFER_DURATION_IS_VALID (sub)) {
+ if (GST_BUFFER_DURATION (sub) > stream->codec_delay)
+ GST_BUFFER_DURATION (sub) -= stream->codec_delay;
+ else
+ GST_BUFFER_DURATION (sub) = 0;
+ }
+ }
+ }
+
+ if (block_discardpadding) {
+ end_clip =
+ gst_util_uint64_scale_round (block_discardpadding, 48000,
+ GST_SECOND);
+ }
+
+ if (start_clip || end_clip) {
+ gst_buffer_add_audio_clipping_meta (sub, GST_FORMAT_DEFAULT,
+ start_clip, end_clip);
+ }
+ }
+
+ if (GST_BUFFER_PTS_IS_VALID (sub)) {
+ stream->pos = GST_BUFFER_PTS (sub);
+ if (GST_BUFFER_DURATION_IS_VALID (sub))
+ stream->pos += GST_BUFFER_DURATION (sub);
+ } else if (GST_BUFFER_DTS_IS_VALID (sub)) {
+ stream->pos = GST_BUFFER_DTS (sub);
+ if (GST_BUFFER_DURATION_IS_VALID (sub))
+ stream->pos += GST_BUFFER_DURATION (sub);
+ }
+
+ /* Attach BlockAdditions to buffer; we assume a single buffer per group
+ * in this case */
+ if (additions.length > 0) {
+ BlockAddition *blockadd;
+
+ if (laces > 2)
+ GST_FIXME_OBJECT (demux, "Fix block additions with laced buffers");
+
+ while ((blockadd = g_queue_pop_head (&additions))) {
+ GstMatroskaTrackVideoContext *videocontext =
+ (GstMatroskaTrackVideoContext *) stream;
+ if (blockadd->id == 1 && videocontext->alpha_mode
+ && (!strcmp (stream->codec_id, GST_MATROSKA_CODEC_ID_VIDEO_VP8)
+ || !strcmp (stream->codec_id,
+ GST_MATROSKA_CODEC_ID_VIDEO_VP9))) {
+ GstBuffer *alpha_buffer;
+
+ GST_TRACE_OBJECT (demux, "adding block addition %u as VP8/VP9 "
+ "alpha meta to buffer %p, %u bytes", (guint) blockadd->id, buf,
+ (guint) blockadd->size);
+
+ alpha_buffer = gst_buffer_new_wrapped (blockadd->data,
+ blockadd->size);
+ gst_buffer_copy_into (alpha_buffer, sub,
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
+ gst_buffer_add_video_codec_alpha_meta (sub, alpha_buffer);
+ } else {
+ g_free (blockadd->data);
+ }
+ g_free (blockadd);
+ }
+ }
+
+ ret = gst_pad_push (stream->pad, sub);
+
+ if (demux->common.segment.rate < 0) {
+ if (lace_time > demux->common.segment.stop && ret == GST_FLOW_EOS) {
+ /* In reverse playback we can get a GST_FLOW_EOS when
+ * we are at the end of the segment, so we just need to jump
+ * back to the previous section. */
+ GST_DEBUG_OBJECT (demux, "downstream has reached end of segment");
+ ret = GST_FLOW_OK;
+ }
+ }
+ /* combine flows */
+ ret = gst_flow_combiner_update_pad_flow (demux->flowcombiner,
+ stream->pad, ret);
+
+ next_lace:
+ size -= lace_size[n];
+ if (lace_time != GST_CLOCK_TIME_NONE && duration)
+ lace_time += duration / laces;
+ else
+ lace_time = GST_CLOCK_TIME_NONE;
+ }
+ }
+
+done:
+ if (buf) {
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ }
+ g_free (lace_size);
+ {
+ BlockAddition *blockadd;
+
+ while ((blockadd = g_queue_pop_head (&additions))) {
+ g_free (blockadd->data);
+ g_free (blockadd);
+ }
+ }
+ return ret;
+
+ /* EXITS */
+eos:
+ {
+ stream->eos = TRUE;
+ ret = GST_FLOW_OK;
+ /* combine flows */
+ ret = gst_flow_combiner_update_pad_flow (demux->flowcombiner, stream->pad,
+ ret);
+ goto done;
+ }
+invalid_lacing:
+ {
+ GST_ELEMENT_WARNING (demux, STREAM, DEMUX, (NULL), ("Invalid lacing size"));
+ /* non-fatal, try next block(group) */
+ ret = GST_FLOW_OK;
+ goto done;
+ }
+data_error:
+ {
+ GST_ELEMENT_WARNING (demux, STREAM, DEMUX, (NULL), ("Data error"));
+ /* non-fatal, try next block(group) */
+ ret = GST_FLOW_OK;
+ goto done;
+ }
+}
+
+/* return FALSE if block(group) should be skipped (due to a seek) */
+static inline gboolean
+gst_matroska_demux_seek_block (GstMatroskaDemux * demux)
+{
+ if (G_UNLIKELY (demux->seek_block)) {
+ if (!(--demux->seek_block)) {
+ return TRUE;
+ } else {
+ GST_LOG_OBJECT (demux, "should skip block due to seek");
+ return FALSE;
+ }
+ } else {
+ return TRUE;
+ }
+}
+
+static GstFlowReturn
+gst_matroska_demux_parse_contents_seekentry (GstMatroskaDemux * demux,
+ GstEbmlRead * ebml)
+{
+ GstFlowReturn ret;
+ guint64 seek_pos = (guint64) - 1;
+ guint32 seek_id = 0;
+ guint32 id;
+
+ DEBUG_ELEMENT_START (demux, ebml, "Seek");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (demux, ebml, "Seek", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_SEEKID:
+ {
+ guint64 t;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &t)) != GST_FLOW_OK)
+ break;
+
+ GST_DEBUG_OBJECT (demux, "SeekID: %" G_GUINT64_FORMAT, t);
+ seek_id = t;
+ break;
+ }
+
+ case GST_MATROSKA_ID_SEEKPOSITION:
+ {
+ guint64 t;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &t)) != GST_FLOW_OK)
+ break;
+
+ if (t > G_MAXINT64) {
+ GST_WARNING_OBJECT (demux,
+ "Too large SeekPosition %" G_GUINT64_FORMAT, t);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (demux, "SeekPosition: %" G_GUINT64_FORMAT, t);
+ seek_pos = t;
+ break;
+ }
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (&demux->common, ebml,
+ "SeekHead", id);
+ break;
+ }
+ }
+
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_EOS)
+ return ret;
+
+ if (!seek_id || seek_pos == (guint64) - 1) {
+ GST_WARNING_OBJECT (demux, "Incomplete seekhead entry (0x%x/%"
+ G_GUINT64_FORMAT ")", seek_id, seek_pos);
+ return GST_FLOW_OK;
+ }
+
+ switch (seek_id) {
+ case GST_MATROSKA_ID_SEEKHEAD:
+ {
+ }
+ case GST_MATROSKA_ID_CUES:
+ case GST_MATROSKA_ID_TAGS:
+ case GST_MATROSKA_ID_TRACKS:
+ case GST_MATROSKA_ID_SEGMENTINFO:
+ case GST_MATROSKA_ID_ATTACHMENTS:
+ case GST_MATROSKA_ID_CHAPTERS:
+ {
+ guint64 before_pos, length;
+ guint needed;
+
+ /* remember */
+ length = gst_matroska_read_common_get_length (&demux->common);
+ before_pos = demux->common.offset;
+
+ if (length == (guint64) - 1) {
+ GST_DEBUG_OBJECT (demux, "no upstream length, skipping SeakHead entry");
+ break;
+ }
+
+ /* check for validity */
+ if (seek_pos + demux->common.ebml_segment_start + 12 >= length) {
+ GST_WARNING_OBJECT (demux,
+ "SeekHead reference lies outside file!" " (%"
+ G_GUINT64_FORMAT "+%" G_GUINT64_FORMAT "+12 >= %"
+ G_GUINT64_FORMAT ")", seek_pos, demux->common.ebml_segment_start,
+ length);
+ break;
+ }
+
+ /* only pick up index location when streaming */
+ if (demux->streaming) {
+ if (seek_id == GST_MATROSKA_ID_CUES) {
+ demux->index_offset = seek_pos + demux->common.ebml_segment_start;
+ GST_DEBUG_OBJECT (demux, "Cues located at offset %" G_GUINT64_FORMAT,
+ demux->index_offset);
+ }
+ break;
+ }
+
+ /* seek */
+ demux->common.offset = seek_pos + demux->common.ebml_segment_start;
+
+ /* check ID */
+ if ((ret = gst_matroska_read_common_peek_id_length_pull (&demux->common,
+ GST_ELEMENT_CAST (demux), &id, &length, &needed)) !=
+ GST_FLOW_OK)
+ goto finish;
+
+ if (id != seek_id) {
+ GST_WARNING_OBJECT (demux,
+ "We looked for ID=0x%x but got ID=0x%x (pos=%" G_GUINT64_FORMAT ")",
+ seek_id, id, seek_pos + demux->common.ebml_segment_start);
+ } else {
+ /* now parse */
+ ret = gst_matroska_demux_parse_id (demux, id, length, needed);
+ }
+
+ finish:
+ /* seek back */
+ demux->common.offset = before_pos;
+ break;
+ }
+
+ case GST_MATROSKA_ID_CLUSTER:
+ {
+ guint64 pos = seek_pos + demux->common.ebml_segment_start;
+
+ GST_LOG_OBJECT (demux, "Cluster position");
+ if (G_UNLIKELY (!demux->clusters))
+ demux->clusters = g_array_sized_new (TRUE, TRUE, sizeof (guint64), 100);
+ g_array_append_val (demux->clusters, pos);
+ break;
+ }
+
+ default:
+ GST_DEBUG_OBJECT (demux, "Ignoring Seek entry for ID=0x%x", seek_id);
+ break;
+ }
+ DEBUG_ELEMENT_STOP (demux, ebml, "Seek", ret);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_matroska_demux_parse_contents (GstMatroskaDemux * demux, GstEbmlRead * ebml)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint32 id;
+
+ DEBUG_ELEMENT_START (demux, ebml, "SeekHead");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (demux, ebml, "SeekHead", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_SEEKENTRY:
+ {
+ ret = gst_matroska_demux_parse_contents_seekentry (demux, ebml);
+ /* Ignore EOS and errors here */
+ if (ret != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (demux, "Ignoring %s", gst_flow_get_name (ret));
+ ret = GST_FLOW_OK;
+ }
+ break;
+ }
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (&demux->common,
+ ebml, "SeekHead", id);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (demux, ebml, "SeekHead", ret);
+
+ /* Sort clusters by position for easier searching */
+ if (demux->clusters)
+ g_array_sort (demux->clusters, (GCompareFunc) gst_matroska_cluster_compare);
+
+ return ret;
+}
+
+#define GST_FLOW_OVERFLOW GST_FLOW_CUSTOM_ERROR
+
+#define MAX_BLOCK_SIZE (15 * 1024 * 1024)
+
+static inline GstFlowReturn
+gst_matroska_demux_check_read_size (GstMatroskaDemux * demux, guint64 bytes)
+{
+ if (G_UNLIKELY (bytes > MAX_BLOCK_SIZE)) {
+ /* only a few blocks are expected/allowed to be large,
+ * and will be recursed into, whereas others will be read and must fit */
+ if (demux->streaming) {
+ /* fatal in streaming case, as we can't step over easily */
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX, (NULL),
+ ("reading large block of size %" G_GUINT64_FORMAT " not supported; "
+ "file might be corrupt.", bytes));
+ return GST_FLOW_ERROR;
+ } else {
+ /* indicate higher level to quietly give up */
+ GST_DEBUG_OBJECT (demux,
+ "too large block of size %" G_GUINT64_FORMAT, bytes);
+ return GST_FLOW_ERROR;
+ }
+ } else {
+ return GST_FLOW_OK;
+ }
+}
+
+/* returns TRUE if we truly are in error state, and should give up */
+static inline GstFlowReturn
+gst_matroska_demux_check_parse_error (GstMatroskaDemux * demux)
+{
+ if (!demux->streaming && demux->next_cluster_offset > 0) {
+ /* just repositioning to where next cluster should be and try from there */
+ GST_WARNING_OBJECT (demux, "parse error, trying next cluster expected at %"
+ G_GUINT64_FORMAT, demux->next_cluster_offset);
+ demux->common.offset = demux->next_cluster_offset;
+ demux->next_cluster_offset = 0;
+ return GST_FLOW_OK;
+ } else {
+ gint64 pos;
+ GstFlowReturn ret;
+
+ /* sigh, one last attempt above and beyond call of duty ...;
+ * search for cluster mark following current pos */
+ pos = demux->common.offset;
+ GST_WARNING_OBJECT (demux, "parse error, looking for next cluster");
+ if ((ret = gst_matroska_demux_search_cluster (demux, &pos, TRUE)) !=
+ GST_FLOW_OK) {
+ /* did not work, give up */
+ return ret;
+ } else {
+ GST_DEBUG_OBJECT (demux, "... found at %" G_GUINT64_FORMAT, pos);
+ /* try that position */
+ demux->common.offset = pos;
+ return GST_FLOW_OK;
+ }
+ }
+}
+
+static inline GstFlowReturn
+gst_matroska_demux_flush (GstMatroskaDemux * demux, guint flush)
+{
+ GST_LOG_OBJECT (demux, "skipping %d bytes", flush);
+ demux->common.offset += flush;
+ if (demux->streaming) {
+ GstFlowReturn ret;
+
+ /* hard to skip large blocks when streaming */
+ ret = gst_matroska_demux_check_read_size (demux, flush);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ if (flush <= gst_adapter_available (demux->common.adapter))
+ gst_adapter_flush (demux->common.adapter, flush);
+ else
+ return GST_FLOW_EOS;
+ }
+ return GST_FLOW_OK;
+}
+
+/* initializes @ebml with @bytes from input stream at current offset.
+ * Returns EOS if insufficient available,
+ * ERROR if too much was attempted to read. */
+static inline GstFlowReturn
+gst_matroska_demux_take (GstMatroskaDemux * demux, guint64 bytes,
+ GstEbmlRead * ebml)
+{
+ GstBuffer *buffer = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ GST_LOG_OBJECT (demux, "taking %" G_GUINT64_FORMAT " bytes for parsing",
+ bytes);
+ ret = gst_matroska_demux_check_read_size (demux, bytes);
+ if (G_UNLIKELY (ret != GST_FLOW_OK)) {
+ if (!demux->streaming) {
+ /* in pull mode, we can skip */
+ if ((ret = gst_matroska_demux_flush (demux, bytes)) == GST_FLOW_OK)
+ ret = GST_FLOW_OVERFLOW;
+ } else {
+ /* otherwise fatal */
+ ret = GST_FLOW_ERROR;
+ }
+ goto exit;
+ }
+ if (demux->streaming) {
+ if (gst_adapter_available (demux->common.adapter) >= bytes)
+ buffer = gst_adapter_take_buffer (demux->common.adapter, bytes);
+ else
+ ret = GST_FLOW_EOS;
+ } else
+ ret = gst_matroska_read_common_peek_bytes (&demux->common,
+ demux->common.offset, bytes, &buffer, NULL);
+ if (G_LIKELY (buffer)) {
+ gst_ebml_read_init (ebml, GST_ELEMENT_CAST (demux), buffer,
+ demux->common.offset);
+ demux->common.offset += bytes;
+ }
+exit:
+ return ret;
+}
+
+static void
+gst_matroska_demux_check_seekability (GstMatroskaDemux * demux)
+{
+ GstQuery *query;
+ gboolean seekable = FALSE;
+ gint64 start = -1, stop = -1;
+
+ query = gst_query_new_seeking (GST_FORMAT_BYTES);
+ if (!gst_pad_peer_query (demux->common.sinkpad, query)) {
+ GST_DEBUG_OBJECT (demux, "seeking query failed");
+ goto done;
+ }
+
+ gst_query_parse_seeking (query, NULL, &seekable, &start, &stop);
+
+ /* try harder to query upstream size if we didn't get it the first time */
+ if (seekable && stop == -1) {
+ GST_DEBUG_OBJECT (demux, "doing duration query to fix up unset stop");
+ gst_pad_peer_query_duration (demux->common.sinkpad, GST_FORMAT_BYTES,
+ &stop);
+ }
+
+ /* if upstream doesn't know the size, it's likely that it's not seekable in
+ * practice even if it technically may be seekable */
+ if (seekable && (start != 0 || stop <= start)) {
+ GST_DEBUG_OBJECT (demux, "seekable but unknown start/stop -> disable");
+ seekable = FALSE;
+ }
+
+done:
+ GST_INFO_OBJECT (demux, "seekable: %d (%" G_GUINT64_FORMAT " - %"
+ G_GUINT64_FORMAT ")", seekable, start, stop);
+ demux->seekable = seekable;
+
+ gst_query_unref (query);
+}
+
+static GstFlowReturn
+gst_matroska_demux_find_tracks (GstMatroskaDemux * demux)
+{
+ guint32 id;
+ guint64 before_pos;
+ guint64 length;
+ guint needed;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ GST_WARNING_OBJECT (demux,
+ "Found Cluster element before Tracks, searching Tracks");
+
+ /* remember */
+ before_pos = demux->common.offset;
+
+ /* Search Tracks element */
+ while (TRUE) {
+ ret = gst_matroska_read_common_peek_id_length_pull (&demux->common,
+ GST_ELEMENT_CAST (demux), &id, &length, &needed);
+ if (ret != GST_FLOW_OK)
+ break;
+
+ if (id != GST_MATROSKA_ID_TRACKS) {
+ /* we may be skipping large cluster here, so forego size check etc */
+ /* ... but we can't skip undefined size; force error */
+ if (length == G_MAXUINT64) {
+ ret = gst_matroska_demux_check_read_size (demux, length);
+ break;
+ } else {
+ demux->common.offset += needed;
+ demux->common.offset += length;
+ }
+ continue;
+ }
+
+ /* will lead to track parsing ... */
+ ret = gst_matroska_demux_parse_id (demux, id, length, needed);
+ break;
+ }
+
+ /* seek back */
+ demux->common.offset = before_pos;
+
+ return ret;
+}
+
+#define GST_READ_CHECK(stmt) \
+G_STMT_START { \
+ if (G_UNLIKELY ((ret = (stmt)) != GST_FLOW_OK)) { \
+ if (ret == GST_FLOW_OVERFLOW) { \
+ ret = GST_FLOW_OK; \
+ } \
+ goto read_error; \
+ } \
+} G_STMT_END
+
+static GstFlowReturn
+gst_matroska_demux_parse_id (GstMatroskaDemux * demux, guint32 id,
+ guint64 length, guint needed)
+{
+ GstEbmlRead ebml = { 0, };
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint64 read;
+
+ GST_LOG_OBJECT (demux, "Parsing Element id 0x%x, "
+ "size %" G_GUINT64_FORMAT ", prefix %d", id, length, needed);
+
+ /* if we plan to read and parse this element, we need prefix (id + length)
+ * and the contents */
+ /* mind about overflow wrap-around when dealing with undefined size */
+ read = length;
+ if (G_LIKELY (length != G_MAXUINT64))
+ read += needed;
+
+ switch (demux->common.state) {
+ case GST_MATROSKA_READ_STATE_START:
+ switch (id) {
+ case GST_EBML_ID_HEADER:
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ ret = gst_matroska_read_common_parse_header (&demux->common, &ebml);
+ if (ret != GST_FLOW_OK)
+ goto parse_failed;
+ demux->common.state = GST_MATROSKA_READ_STATE_SEGMENT;
+ gst_matroska_demux_check_seekability (demux);
+ break;
+ default:
+ goto invalid_header;
+ break;
+ }
+ break;
+ case GST_MATROSKA_READ_STATE_SEGMENT:
+ switch (id) {
+ case GST_MATROSKA_ID_SEGMENT:
+ /* eat segment prefix */
+ GST_READ_CHECK (gst_matroska_demux_flush (demux, needed));
+ GST_DEBUG_OBJECT (demux,
+ "Found Segment start at offset %" G_GUINT64_FORMAT " with size %"
+ G_GUINT64_FORMAT, demux->common.offset, length);
+ /* seeks are from the beginning of the segment,
+ * after the segment ID/length */
+ demux->common.ebml_segment_start = demux->common.offset;
+ if (length == 0)
+ length = G_MAXUINT64;
+ demux->common.ebml_segment_length = length;
+ demux->common.state = GST_MATROSKA_READ_STATE_HEADER;
+ break;
+ default:
+ GST_WARNING_OBJECT (demux,
+ "Expected a Segment ID (0x%x), but received 0x%x!",
+ GST_MATROSKA_ID_SEGMENT, id);
+ GST_READ_CHECK (gst_matroska_demux_flush (demux, read));
+ break;
+ }
+ break;
+ case GST_MATROSKA_READ_STATE_SCANNING:
+ if (id != GST_MATROSKA_ID_CLUSTER &&
+ id != GST_MATROSKA_ID_PREVSIZE &&
+ id != GST_MATROSKA_ID_CLUSTERTIMECODE) {
+ if (demux->common.start_resync_offset != -1) {
+ /* we need to skip byte per byte if we are scanning for a new cluster
+ * after invalid data is found
+ */
+ read = 1;
+ }
+ goto skip;
+ } else {
+ if (demux->common.start_resync_offset != -1) {
+ GST_LOG_OBJECT (demux, "Resync done, new cluster found!");
+ demux->common.start_resync_offset = -1;
+ demux->common.state = demux->common.state_to_restore;
+ }
+ }
+ /* fall-through */
+ case GST_MATROSKA_READ_STATE_HEADER:
+ case GST_MATROSKA_READ_STATE_DATA:
+ case GST_MATROSKA_READ_STATE_SEEK:
+ switch (id) {
+ case GST_EBML_ID_HEADER:
+ GST_READ_CHECK (gst_matroska_demux_flush (demux, read));
+ demux->common.state = GST_MATROSKA_READ_STATE_SEGMENT;
+ gst_matroska_demux_check_seekability (demux);
+ break;
+ case GST_MATROSKA_ID_SEGMENTINFO:
+ if (!demux->common.segmentinfo_parsed) {
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ ret = gst_matroska_read_common_parse_info (&demux->common,
+ GST_ELEMENT_CAST (demux), &ebml);
+ if (ret == GST_FLOW_OK)
+ gst_matroska_demux_send_tags (demux);
+ } else {
+ GST_READ_CHECK (gst_matroska_demux_flush (demux, read));
+ }
+ break;
+ case GST_MATROSKA_ID_TRACKS:
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ if (!demux->tracks_parsed) {
+ ret = gst_matroska_demux_parse_tracks (demux, &ebml);
+ } else {
+ ret = gst_matroska_demux_update_tracks (demux, &ebml);
+ }
+ break;
+ case GST_MATROSKA_ID_CLUSTER:
+ if (G_UNLIKELY (!demux->tracks_parsed)) {
+ if (demux->streaming) {
+ GST_DEBUG_OBJECT (demux, "Cluster before Track");
+ goto not_streamable;
+ } else {
+ ret = gst_matroska_demux_find_tracks (demux);
+ if (!demux->tracks_parsed)
+ goto no_tracks;
+ }
+ }
+ if (demux->common.state == GST_MATROSKA_READ_STATE_HEADER) {
+ demux->common.state = GST_MATROSKA_READ_STATE_DATA;
+ demux->first_cluster_offset = demux->common.offset;
+
+ if (!demux->streaming &&
+ !GST_CLOCK_TIME_IS_VALID (demux->common.segment.duration)) {
+ GstMatroskaIndex *last = NULL;
+
+ GST_DEBUG_OBJECT (demux,
+ "estimating duration using last cluster");
+ if ((last = gst_matroska_demux_search_pos (demux,
+ GST_CLOCK_TIME_NONE)) != NULL) {
+ demux->last_cluster_offset =
+ last->pos + demux->common.ebml_segment_start;
+ demux->stream_last_time = last->time;
+ demux->common.segment.duration =
+ demux->stream_last_time - demux->stream_start_time;
+ /* above estimate should not be taken all too strongly */
+ demux->invalid_duration = TRUE;
+ GST_DEBUG_OBJECT (demux,
+ "estimated duration as %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (demux->common.segment.duration));
+
+ g_free (last);
+ }
+ }
+
+ /* Peek at second cluster in order to figure out if we have cluster
+ * prev_size or not (which is never set on the first cluster for
+ * obvious reasons). This is useful in case someone initiates a
+ * seek or direction change before we reach the second cluster. */
+ if (!demux->streaming) {
+ ClusterInfo cluster = { 0, };
+
+ if (gst_matroska_demux_peek_cluster_info (demux, &cluster,
+ demux->first_cluster_offset) && cluster.size > 0) {
+ gst_matroska_demux_peek_cluster_info (demux, &cluster,
+ demux->first_cluster_offset + cluster.size);
+ }
+ demux->common.offset = demux->first_cluster_offset;
+ }
+
+ if (demux->deferred_seek_event) {
+ GstEvent *seek_event;
+ GstPad *seek_pad;
+ seek_event = demux->deferred_seek_event;
+ seek_pad = demux->deferred_seek_pad;
+ demux->deferred_seek_event = NULL;
+ demux->deferred_seek_pad = NULL;
+ GST_DEBUG_OBJECT (demux,
+ "Handling deferred seek event: %" GST_PTR_FORMAT, seek_event);
+ gst_matroska_demux_handle_seek_event (demux, seek_pad,
+ seek_event);
+ gst_event_unref (seek_event);
+ }
+
+ /* send initial segment - we wait till we know the first
+ incoming timestamp, so we can properly set the start of
+ the segment. */
+ demux->need_segment = TRUE;
+ }
+ demux->cluster_time = GST_CLOCK_TIME_NONE;
+ demux->cluster_offset = demux->common.offset;
+ demux->cluster_prevsize = 0;
+ if (G_UNLIKELY (!demux->seek_first && demux->seek_block)) {
+ GST_DEBUG_OBJECT (demux, "seek target block %" G_GUINT64_FORMAT
+ " not found in Cluster, trying next Cluster's first block instead",
+ demux->seek_block);
+ demux->seek_block = 0;
+ }
+ demux->seek_first = FALSE;
+ /* record next cluster for recovery */
+ if (read != G_MAXUINT64)
+ demux->next_cluster_offset = demux->cluster_offset + read;
+ /* eat cluster prefix */
+ gst_matroska_demux_flush (demux, needed);
+ break;
+ case GST_MATROSKA_ID_CLUSTERTIMECODE:
+ {
+ guint64 num;
+
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ if ((ret = gst_ebml_read_uint (&ebml, &id, &num)) != GST_FLOW_OK)
+ goto parse_failed;
+ GST_DEBUG_OBJECT (demux, "ClusterTimeCode: %" G_GUINT64_FORMAT, num);
+ demux->cluster_time = num;
+ /* track last cluster */
+ if (demux->cluster_offset > demux->last_cluster_offset) {
+ demux->last_cluster_offset = demux->cluster_offset;
+ demux->stream_last_time =
+ demux->cluster_time * demux->common.time_scale;
+ }
+#if 0
+ if (demux->common.element_index) {
+ if (demux->common.element_index_writer_id == -1)
+ gst_index_get_writer_id (demux->common.element_index,
+ GST_OBJECT (demux), &demux->common.element_index_writer_id);
+ GST_LOG_OBJECT (demux, "adding association %" GST_TIME_FORMAT "-> %"
+ G_GUINT64_FORMAT " for writer id %d",
+ GST_TIME_ARGS (demux->cluster_time), demux->cluster_offset,
+ demux->common.element_index_writer_id);
+ gst_index_add_association (demux->common.element_index,
+ demux->common.element_index_writer_id,
+ GST_ASSOCIATION_FLAG_KEY_UNIT,
+ GST_FORMAT_TIME, demux->cluster_time,
+ GST_FORMAT_BYTES, demux->cluster_offset, NULL);
+ }
+#endif
+ break;
+ }
+ case GST_MATROSKA_ID_BLOCKGROUP:
+ if (!gst_matroska_demux_seek_block (demux))
+ goto skip;
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ DEBUG_ELEMENT_START (demux, &ebml, "BlockGroup");
+ if ((ret = gst_ebml_read_master (&ebml, &id)) == GST_FLOW_OK) {
+ ret = gst_matroska_demux_parse_blockgroup_or_simpleblock (demux,
+ &ebml, demux->cluster_time, demux->cluster_offset, FALSE);
+ }
+ DEBUG_ELEMENT_STOP (demux, &ebml, "BlockGroup", ret);
+ break;
+ case GST_MATROSKA_ID_SIMPLEBLOCK:
+ if (!gst_matroska_demux_seek_block (demux))
+ goto skip;
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ DEBUG_ELEMENT_START (demux, &ebml, "SimpleBlock");
+ ret = gst_matroska_demux_parse_blockgroup_or_simpleblock (demux,
+ &ebml, demux->cluster_time, demux->cluster_offset, TRUE);
+ DEBUG_ELEMENT_STOP (demux, &ebml, "SimpleBlock", ret);
+ break;
+ case GST_MATROSKA_ID_ATTACHMENTS:
+ if (!demux->common.attachments_parsed) {
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ ret = gst_matroska_read_common_parse_attachments (&demux->common,
+ GST_ELEMENT_CAST (demux), &ebml);
+ if (ret == GST_FLOW_OK)
+ gst_matroska_demux_send_tags (demux);
+ } else {
+ GST_READ_CHECK (gst_matroska_demux_flush (demux, read));
+ }
+ break;
+ case GST_MATROSKA_ID_TAGS:
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ ret = gst_matroska_read_common_parse_metadata (&demux->common,
+ GST_ELEMENT_CAST (demux), &ebml);
+ if (ret == GST_FLOW_OK)
+ gst_matroska_demux_send_tags (demux);
+ break;
+ case GST_MATROSKA_ID_CHAPTERS:
+ if (!demux->common.chapters_parsed) {
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ ret =
+ gst_matroska_read_common_parse_chapters (&demux->common, &ebml);
+
+ if (demux->common.toc) {
+ gst_matroska_demux_send_event (demux,
+ gst_event_new_toc (demux->common.toc, FALSE));
+ }
+ } else
+ GST_READ_CHECK (gst_matroska_demux_flush (demux, read));
+ break;
+ case GST_MATROSKA_ID_SEEKHEAD:
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ ret = gst_matroska_demux_parse_contents (demux, &ebml);
+ break;
+ case GST_MATROSKA_ID_CUES:
+ if (demux->common.index_parsed) {
+ GST_READ_CHECK (gst_matroska_demux_flush (demux, read));
+ break;
+ }
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ ret = gst_matroska_read_common_parse_index (&demux->common, &ebml);
+ /* only push based; delayed index building */
+ if (ret == GST_FLOW_OK
+ && demux->common.state == GST_MATROSKA_READ_STATE_SEEK) {
+ GstEvent *event;
+
+ GST_OBJECT_LOCK (demux);
+ event = demux->seek_event;
+ demux->seek_event = NULL;
+ GST_OBJECT_UNLOCK (demux);
+
+ g_assert (event);
+ /* unlikely to fail, since we managed to seek to this point */
+ if (!gst_matroska_demux_handle_seek_event (demux, NULL, event)) {
+ gst_event_unref (event);
+ goto seek_failed;
+ }
+ gst_event_unref (event);
+ /* resume data handling, main thread clear to seek again */
+ GST_OBJECT_LOCK (demux);
+ demux->common.state = GST_MATROSKA_READ_STATE_DATA;
+ GST_OBJECT_UNLOCK (demux);
+ }
+ break;
+ case GST_MATROSKA_ID_PREVSIZE:{
+ guint64 num;
+
+ GST_READ_CHECK (gst_matroska_demux_take (demux, read, &ebml));
+ if ((ret = gst_ebml_read_uint (&ebml, &id, &num)) != GST_FLOW_OK)
+ goto parse_failed;
+ GST_LOG_OBJECT (demux, "ClusterPrevSize: %" G_GUINT64_FORMAT, num);
+ demux->cluster_prevsize = num;
+ demux->seen_cluster_prevsize = TRUE;
+ break;
+ }
+ case GST_MATROSKA_ID_POSITION:
+ case GST_MATROSKA_ID_ENCRYPTEDBLOCK:
+ /* The WebM doesn't support the EncryptedBlock element.
+ * The Matroska spec doesn't give us more detail, how to parse this element,
+ * for example the field TransformID isn't specified yet.*/
+ case GST_MATROSKA_ID_SILENTTRACKS:
+ GST_DEBUG_OBJECT (demux,
+ "Skipping Cluster subelement 0x%x - ignoring", id);
+ /* fall-through */
+ default:
+ skip:
+ GST_DEBUG_OBJECT (demux, "skipping Element 0x%x", id);
+ GST_READ_CHECK (gst_matroska_demux_flush (demux, read));
+ break;
+ }
+ break;
+ }
+
+ if (ret == GST_FLOW_PARSE)
+ goto parse_failed;
+
+exit:
+ gst_ebml_read_clear (&ebml);
+ return ret;
+
+ /* ERRORS */
+read_error:
+ {
+ /* simply exit, maybe not enough data yet */
+ /* no ebml to clear if read error */
+ return ret;
+ }
+parse_failed:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX, (NULL),
+ ("Failed to parse Element 0x%x", id));
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ }
+not_streamable:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX, (NULL),
+ ("File layout does not permit streaming"));
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ }
+no_tracks:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX, (NULL),
+ ("No Tracks element found"));
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ }
+invalid_header:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX, (NULL), ("Invalid header"));
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ }
+seek_failed:
+ {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX, (NULL), ("Failed to seek"));
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ }
+}
+
+static void
+gst_matroska_demux_loop (GstPad * pad)
+{
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (GST_PAD_PARENT (pad));
+ GstFlowReturn ret;
+ guint32 id;
+ guint64 length;
+ guint needed;
+
+ /* If we have to close a segment, send a new segment to do this now */
+ if (G_LIKELY (demux->common.state == GST_MATROSKA_READ_STATE_DATA)) {
+ if (G_UNLIKELY (demux->new_segment)) {
+ gst_matroska_demux_send_event (demux, demux->new_segment);
+ demux->new_segment = NULL;
+ }
+ }
+
+ ret = gst_matroska_read_common_peek_id_length_pull (&demux->common,
+ GST_ELEMENT_CAST (demux), &id, &length, &needed);
+ if (ret == GST_FLOW_EOS) {
+ goto eos;
+ } else if (ret == GST_FLOW_FLUSHING) {
+ goto pause;
+ } else if (ret != GST_FLOW_OK) {
+ ret = gst_matroska_demux_check_parse_error (demux);
+
+ /* Only handle EOS as no error if we're outside the segment already */
+ if (ret == GST_FLOW_EOS && (demux->common.ebml_segment_length != G_MAXUINT64
+ && demux->common.offset >=
+ demux->common.ebml_segment_start +
+ demux->common.ebml_segment_length))
+ goto eos;
+ else if (ret != GST_FLOW_OK)
+ goto pause;
+ else
+ return;
+ }
+
+ GST_LOG_OBJECT (demux, "Offset %" G_GUINT64_FORMAT ", Element id 0x%x, "
+ "size %" G_GUINT64_FORMAT ", needed %d", demux->common.offset, id,
+ length, needed);
+
+ ret = gst_matroska_demux_parse_id (demux, id, length, needed);
+ if (ret == GST_FLOW_EOS)
+ goto eos;
+ if (ret != GST_FLOW_OK)
+ goto pause;
+
+ /* check if we're at the end of a configured segment */
+ if (G_LIKELY (demux->common.src->len)) {
+ guint i;
+
+ g_assert (demux->common.num_streams == demux->common.src->len);
+ for (i = 0; i < demux->common.src->len; i++) {
+ GstMatroskaTrackContext *context = g_ptr_array_index (demux->common.src,
+ i);
+ GST_DEBUG_OBJECT (context->pad, "pos %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (context->pos));
+ if (context->eos == FALSE)
+ goto next;
+ }
+
+ GST_INFO_OBJECT (demux, "All streams are EOS");
+ ret = GST_FLOW_EOS;
+ goto eos;
+ }
+
+next:
+ if (G_UNLIKELY (demux->cached_length == G_MAXUINT64 ||
+ demux->common.offset >= demux->cached_length)) {
+ demux->cached_length = gst_matroska_read_common_get_length (&demux->common);
+ if (demux->common.offset == demux->cached_length) {
+ GST_LOG_OBJECT (demux, "Reached end of stream");
+ ret = GST_FLOW_EOS;
+ goto eos;
+ }
+ }
+
+ return;
+
+ /* ERRORS */
+eos:
+ {
+ if (demux->common.segment.rate < 0.0) {
+ ret = gst_matroska_demux_seek_to_previous_keyframe (demux);
+ if (ret == GST_FLOW_OK)
+ return;
+ }
+ /* fall-through */
+ }
+pause:
+ {
+ const gchar *reason = gst_flow_get_name (ret);
+ gboolean push_eos = FALSE;
+
+ GST_LOG_OBJECT (demux, "pausing task, reason %s", reason);
+ gst_pad_pause_task (demux->common.sinkpad);
+
+ if (ret == GST_FLOW_EOS) {
+ /* perform EOS logic */
+
+ /* If we were in the headers, make sure we send no-more-pads.
+ This will ensure decodebin does not get stuck thinking
+ the chain is not complete yet, and waiting indefinitely. */
+ if (G_UNLIKELY (demux->common.state == GST_MATROSKA_READ_STATE_HEADER)) {
+ if (demux->common.src->len == 0) {
+ GST_ELEMENT_ERROR (demux, STREAM, FAILED, (NULL),
+ ("No pads created"));
+ } else {
+ GST_ELEMENT_WARNING (demux, STREAM, DEMUX, (NULL),
+ ("Failed to finish reading headers"));
+ }
+ gst_element_no_more_pads (GST_ELEMENT (demux));
+ }
+
+ if (demux->common.segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ GstEvent *event;
+ GstMessage *msg;
+ gint64 stop;
+
+ /* for segment playback we need to post when (in stream time)
+ * we stopped, this is either stop (when set) or the duration. */
+ if ((stop = demux->common.segment.stop) == -1)
+ stop = demux->last_stop_end;
+
+ GST_LOG_OBJECT (demux, "Sending segment done, at end of segment");
+ msg = gst_message_new_segment_done (GST_OBJECT (demux), GST_FORMAT_TIME,
+ stop);
+ if (demux->segment_seqnum)
+ gst_message_set_seqnum (msg, demux->segment_seqnum);
+ gst_element_post_message (GST_ELEMENT (demux), msg);
+
+ event = gst_event_new_segment_done (GST_FORMAT_TIME, stop);
+ if (demux->segment_seqnum)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ gst_matroska_demux_send_event (demux, event);
+ } else {
+ push_eos = TRUE;
+ }
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
+ /* for fatal errors we post an error message */
+ GST_ELEMENT_FLOW_ERROR (demux, ret);
+ push_eos = TRUE;
+ }
+ if (push_eos) {
+ GstEvent *event;
+
+ /* send EOS, and prevent hanging if no streams yet */
+ GST_LOG_OBJECT (demux, "Sending EOS, at end of stream");
+ event = gst_event_new_eos ();
+ if (demux->segment_seqnum)
+ gst_event_set_seqnum (event, demux->segment_seqnum);
+ if (!gst_matroska_demux_send_event (demux, event) &&
+ (ret == GST_FLOW_EOS)) {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX,
+ (NULL), ("got eos but no streams (yet)"));
+ }
+ }
+ return;
+ }
+}
+
+/*
+ * Create and push a flushing seek event upstream
+ */
+static gboolean
+perform_seek_to_offset (GstMatroskaDemux * demux, gdouble rate, guint64 offset,
+ guint32 seqnum, GstSeekFlags flags)
+{
+ GstEvent *event;
+ gboolean res = 0;
+
+ GST_DEBUG_OBJECT (demux, "Seeking to %" G_GUINT64_FORMAT, offset);
+
+ event =
+ gst_event_new_seek (rate, GST_FORMAT_BYTES,
+ flags | GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE,
+ GST_SEEK_TYPE_SET, offset, GST_SEEK_TYPE_NONE, -1);
+ gst_event_set_seqnum (event, seqnum);
+
+ res = gst_pad_push_event (demux->common.sinkpad, event);
+
+ /* segment event will update offset */
+ return res;
+}
+
+static GstFlowReturn
+gst_matroska_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+{
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (parent);
+ guint available;
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint needed = 0;
+ guint32 id;
+ guint64 length;
+
+ if (G_UNLIKELY (GST_BUFFER_IS_DISCONT (buffer))) {
+ GST_DEBUG_OBJECT (demux, "got DISCONT");
+ gst_adapter_clear (demux->common.adapter);
+ GST_OBJECT_LOCK (demux);
+ gst_matroska_read_common_reset_streams (&demux->common,
+ GST_CLOCK_TIME_NONE, FALSE);
+ GST_OBJECT_UNLOCK (demux);
+ }
+
+ gst_adapter_push (demux->common.adapter, buffer);
+ buffer = NULL;
+
+next:
+ available = gst_adapter_available (demux->common.adapter);
+
+ ret = gst_matroska_read_common_peek_id_length_push (&demux->common,
+ GST_ELEMENT_CAST (demux), &id, &length, &needed);
+ if (G_UNLIKELY (ret != GST_FLOW_OK && ret != GST_FLOW_EOS)) {
+ if (demux->common.ebml_segment_length != G_MAXUINT64
+ && demux->common.offset >=
+ demux->common.ebml_segment_start + demux->common.ebml_segment_length) {
+ return GST_FLOW_OK;
+ } else {
+ gint64 bytes_scanned;
+ if (demux->common.start_resync_offset == -1) {
+ demux->common.start_resync_offset = demux->common.offset;
+ demux->common.state_to_restore = demux->common.state;
+ }
+ bytes_scanned = demux->common.offset - demux->common.start_resync_offset;
+ if (bytes_scanned <= INVALID_DATA_THRESHOLD) {
+ GST_WARNING_OBJECT (demux,
+ "parse error, looking for next cluster, actual offset %"
+ G_GUINT64_FORMAT ", start resync offset %" G_GUINT64_FORMAT,
+ demux->common.offset, demux->common.start_resync_offset);
+ demux->common.state = GST_MATROSKA_READ_STATE_SCANNING;
+ ret = GST_FLOW_OK;
+ } else {
+ GST_WARNING_OBJECT (demux,
+ "unrecoverable parse error, next cluster not found and threshold "
+ "exceeded, bytes scanned %" G_GINT64_FORMAT, bytes_scanned);
+ return ret;
+ }
+ }
+ }
+
+ GST_LOG_OBJECT (demux, "Offset %" G_GUINT64_FORMAT ", Element id 0x%x, "
+ "size %" G_GUINT64_FORMAT ", needed %d, available %d",
+ demux->common.offset, id, length, needed, available);
+
+ if (needed > available)
+ return GST_FLOW_OK;
+
+ ret = gst_matroska_demux_parse_id (demux, id, length, needed);
+ if (ret == GST_FLOW_EOS) {
+ /* need more data */
+ return GST_FLOW_OK;
+ } else if (ret != GST_FLOW_OK) {
+ return ret;
+ } else
+ goto next;
+}
+
+static gboolean
+gst_matroska_demux_handle_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ gboolean res = TRUE;
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (parent);
+
+ GST_DEBUG_OBJECT (demux,
+ "have event type %s: %p on sink pad", GST_EVENT_TYPE_NAME (event), event);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:
+ {
+ const GstSegment *segment;
+
+ /* some debug output */
+ gst_event_parse_segment (event, &segment);
+ /* FIXME: do we need to update segment base here (like accum in 0.10)? */
+ GST_DEBUG_OBJECT (demux,
+ "received format %d segment %" GST_SEGMENT_FORMAT, segment->format,
+ segment);
+
+ if (demux->common.state < GST_MATROSKA_READ_STATE_DATA) {
+ GST_DEBUG_OBJECT (demux, "still starting");
+ goto exit;
+ }
+
+ /* we only expect a BYTE segment, e.g. following a seek */
+ if (segment->format != GST_FORMAT_BYTES) {
+ GST_DEBUG_OBJECT (demux, "unsupported segment format, ignoring");
+ goto exit;
+ }
+
+ GST_DEBUG_OBJECT (demux, "clearing segment state");
+ GST_OBJECT_LOCK (demux);
+ /* clear current segment leftover */
+ gst_adapter_clear (demux->common.adapter);
+ /* and some streaming setup */
+ demux->common.offset = segment->start;
+ /* accumulate base based on current position */
+ if (GST_CLOCK_TIME_IS_VALID (demux->common.segment.position))
+ demux->common.segment.base +=
+ (MAX (demux->common.segment.position, demux->stream_start_time)
+ - demux->stream_start_time) / fabs (demux->common.segment.rate);
+ /* do not know where we are;
+ * need to come across a cluster and generate segment */
+ demux->common.segment.position = GST_CLOCK_TIME_NONE;
+ demux->cluster_time = GST_CLOCK_TIME_NONE;
+ demux->cluster_offset = 0;
+ demux->cluster_prevsize = 0;
+ demux->need_segment = TRUE;
+ demux->segment_seqnum = gst_event_get_seqnum (event);
+ /* but keep some of the upstream segment */
+ demux->common.segment.rate = segment->rate;
+ demux->common.segment.flags = segment->flags;
+ /* also check if need to keep some of the requested seek position */
+ if (demux->seek_offset == segment->start) {
+ GST_DEBUG_OBJECT (demux, "position matches requested seek");
+ demux->common.segment.position = demux->requested_seek_time;
+ } else {
+ GST_DEBUG_OBJECT (demux, "unexpected segment position");
+ }
+ demux->requested_seek_time = GST_CLOCK_TIME_NONE;
+ demux->seek_offset = -1;
+ GST_OBJECT_UNLOCK (demux);
+ exit:
+ /* chain will send initial segment after pads have been added,
+ * or otherwise come up with one */
+ GST_DEBUG_OBJECT (demux, "eating event");
+ gst_event_unref (event);
+ res = TRUE;
+ break;
+ }
+ case GST_EVENT_EOS:
+ {
+ if (demux->common.state != GST_MATROSKA_READ_STATE_DATA
+ && demux->common.state != GST_MATROSKA_READ_STATE_SCANNING) {
+ gst_event_unref (event);
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX,
+ (NULL), ("got eos and didn't receive a complete header object"));
+ } else if (demux->common.num_streams == 0) {
+ GST_ELEMENT_ERROR (demux, STREAM, DEMUX,
+ (NULL), ("got eos but no streams (yet)"));
+ } else {
+ gst_matroska_demux_send_event (demux, event);
+ }
+ break;
+ }
+ case GST_EVENT_FLUSH_STOP:
+ {
+ guint64 dur;
+
+ gst_adapter_clear (demux->common.adapter);
+ GST_OBJECT_LOCK (demux);
+ gst_matroska_read_common_reset_streams (&demux->common,
+ GST_CLOCK_TIME_NONE, TRUE);
+ gst_flow_combiner_reset (demux->flowcombiner);
+ dur = demux->common.segment.duration;
+ gst_segment_init (&demux->common.segment, GST_FORMAT_TIME);
+ demux->common.segment.duration = dur;
+ demux->cluster_time = GST_CLOCK_TIME_NONE;
+ demux->cluster_offset = 0;
+ demux->cluster_prevsize = 0;
+ GST_OBJECT_UNLOCK (demux);
+ /* fall-through */
+ }
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return res;
+}
+
+static gboolean
+gst_matroska_demux_sink_activate (GstPad * sinkpad, GstObject * parent)
+{
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (parent);
+ GstQuery *query;
+ gboolean pull_mode = FALSE;
+
+ query = gst_query_new_scheduling ();
+
+ if (gst_pad_peer_query (sinkpad, query))
+ pull_mode = gst_query_has_scheduling_mode_with_flags (query,
+ GST_PAD_MODE_PULL, GST_SCHEDULING_FLAG_SEEKABLE);
+
+ gst_query_unref (query);
+
+ if (pull_mode) {
+ GST_DEBUG ("going to pull mode");
+ demux->streaming = FALSE;
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE);
+ } else {
+ GST_DEBUG ("going to push (streaming) mode");
+ demux->streaming = TRUE;
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
+ }
+}
+
+static gboolean
+gst_matroska_demux_sink_activate_mode (GstPad * sinkpad, GstObject * parent,
+ GstPadMode mode, gboolean active)
+{
+ switch (mode) {
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ /* if we have a scheduler we can start the task */
+ gst_pad_start_task (sinkpad, (GstTaskFunction) gst_matroska_demux_loop,
+ sinkpad, NULL);
+ } else {
+ gst_pad_stop_task (sinkpad);
+ }
+ return TRUE;
+ case GST_PAD_MODE_PUSH:
+ return TRUE;
+ default:
+ return FALSE;
+ }
+}
+
+static GstCaps *
+gst_matroska_demux_video_caps (GstMatroskaTrackVideoContext *
+ videocontext, const gchar * codec_id, guint8 * data, guint size,
+ gchar ** codec_name, guint32 * riff_fourcc)
+{
+ GstMatroskaTrackContext *context = (GstMatroskaTrackContext *) videocontext;
+ GstCaps *caps = NULL;
+
+ g_assert (videocontext != NULL);
+ g_assert (codec_name != NULL);
+
+ if (riff_fourcc)
+ *riff_fourcc = 0;
+
+ /* TODO: check if we have all codec types from matroska-ids.h
+ * check if we have to do more special things with codec_private
+ *
+ * Add support for
+ * GST_MATROSKA_CODEC_ID_VIDEO_QUICKTIME
+ * GST_MATROSKA_CODEC_ID_VIDEO_SNOW
+ */
+
+ if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_VFW_FOURCC)) {
+ gst_riff_strf_vids *vids = NULL;
+
+ if (data) {
+ GstBuffer *buf = NULL;
+
+ vids = (gst_riff_strf_vids *) data;
+
+ /* assure size is big enough */
+ if (size < 24) {
+ GST_WARNING ("Too small BITMAPINFOHEADER (%d bytes)", size);
+ return NULL;
+ }
+ if (size < sizeof (gst_riff_strf_vids)) {
+ vids = g_new (gst_riff_strf_vids, 1);
+ memcpy (vids, data, size);
+ }
+
+ context->dts_only = TRUE; /* VFW files only store DTS */
+
+ /* little-endian -> byte-order */
+ vids->size = GUINT32_FROM_LE (vids->size);
+ vids->width = GUINT32_FROM_LE (vids->width);
+ vids->height = GUINT32_FROM_LE (vids->height);
+ vids->planes = GUINT16_FROM_LE (vids->planes);
+ vids->bit_cnt = GUINT16_FROM_LE (vids->bit_cnt);
+ vids->compression = GUINT32_FROM_LE (vids->compression);
+ vids->image_size = GUINT32_FROM_LE (vids->image_size);
+ vids->xpels_meter = GUINT32_FROM_LE (vids->xpels_meter);
+ vids->ypels_meter = GUINT32_FROM_LE (vids->ypels_meter);
+ vids->num_colors = GUINT32_FROM_LE (vids->num_colors);
+ vids->imp_colors = GUINT32_FROM_LE (vids->imp_colors);
+
+ if (size > sizeof (gst_riff_strf_vids)) { /* some extra_data */
+ gsize offset = sizeof (gst_riff_strf_vids);
+
+ buf = gst_buffer_new_memdup ((guint8 *) vids + offset, size - offset);
+ }
+
+ if (riff_fourcc)
+ *riff_fourcc = vids->compression;
+
+ caps = gst_riff_create_video_caps (vids->compression, NULL, vids,
+ buf, NULL, codec_name);
+
+ if (caps == NULL) {
+ GST_WARNING ("Unhandled RIFF fourcc %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (vids->compression));
+ } else {
+ static GstStaticCaps intra_caps = GST_STATIC_CAPS ("image/jpeg; "
+ "video/x-raw; image/png; video/x-dv; video/x-huffyuv; video/x-ffv; "
+ "video/x-compressed-yuv");
+ context->intra_only =
+ gst_caps_can_intersect (gst_static_caps_get (&intra_caps), caps);
+ }
+
+ if (buf)
+ gst_buffer_unref (buf);
+
+ if (vids != (gst_riff_strf_vids *) data)
+ g_free (vids);
+ }
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_UNCOMPRESSED)) {
+ GstVideoInfo info;
+ GstVideoFormat format;
+
+ gst_video_info_init (&info);
+ switch (videocontext->fourcc) {
+ case GST_MAKE_FOURCC ('I', '4', '2', '0'):
+ format = GST_VIDEO_FORMAT_I420;
+ break;
+ case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
+ format = GST_VIDEO_FORMAT_YUY2;
+ break;
+ case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
+ format = GST_VIDEO_FORMAT_YV12;
+ break;
+ case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
+ format = GST_VIDEO_FORMAT_UYVY;
+ break;
+ case GST_MAKE_FOURCC ('A', 'Y', 'U', 'V'):
+ format = GST_VIDEO_FORMAT_AYUV;
+ break;
+ case GST_MAKE_FOURCC ('Y', '8', '0', '0'):
+ case GST_MAKE_FOURCC ('Y', '8', ' ', ' '):
+ format = GST_VIDEO_FORMAT_GRAY8;
+ break;
+ case GST_MAKE_FOURCC ('R', 'G', 'B', 24):
+ format = GST_VIDEO_FORMAT_RGB;
+ break;
+ case GST_MAKE_FOURCC ('B', 'G', 'R', 24):
+ format = GST_VIDEO_FORMAT_BGR;
+ break;
+ default:
+ GST_DEBUG ("Unknown fourcc %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (videocontext->fourcc));
+ return NULL;
+ }
+
+ context->intra_only = TRUE;
+
+ gst_video_info_set_format (&info, format, videocontext->pixel_width,
+ videocontext->pixel_height);
+ caps = gst_video_info_to_caps (&info);
+ *codec_name = gst_pb_utils_get_codec_description (caps);
+ context->alignment = 32;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MPEG4_SP)) {
+ caps = gst_caps_new_simple ("video/x-divx",
+ "divxversion", G_TYPE_INT, 4, NULL);
+ *codec_name = g_strdup ("MPEG-4 simple profile");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MPEG4_ASP) ||
+ !strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MPEG4_AP)) {
+ caps = gst_caps_new_simple ("video/mpeg",
+ "mpegversion", G_TYPE_INT, 4,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ if (data) {
+ GstBuffer *priv;
+
+ priv = gst_buffer_new_memdup (data, size);
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, NULL);
+ gst_buffer_unref (priv);
+
+ gst_codec_utils_mpeg4video_caps_set_level_and_profile (caps, data, size);
+ }
+ if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MPEG4_ASP))
+ *codec_name = g_strdup ("MPEG-4 advanced simple profile");
+ else
+ *codec_name = g_strdup ("MPEG-4 advanced profile");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MSMPEG4V3)) {
+#if 0
+ caps = gst_caps_new_full (gst_structure_new ("video/x-divx",
+ "divxversion", G_TYPE_INT, 3, NULL),
+ gst_structure_new ("video/x-msmpeg",
+ "msmpegversion", G_TYPE_INT, 43, NULL), NULL);
+#endif
+ caps = gst_caps_new_simple ("video/x-msmpeg",
+ "msmpegversion", G_TYPE_INT, 43, NULL);
+ *codec_name = g_strdup ("Microsoft MPEG-4 v.3");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MPEG1) ||
+ !strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MPEG2)) {
+ gint mpegversion;
+
+ if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MPEG1))
+ mpegversion = 1;
+ else
+ mpegversion = 2;
+
+ caps = gst_caps_new_simple ("video/mpeg",
+ "systemstream", G_TYPE_BOOLEAN, FALSE,
+ "mpegversion", G_TYPE_INT, mpegversion, NULL);
+ *codec_name = g_strdup_printf ("MPEG-%d video", mpegversion);
+ context->postprocess_frame = gst_matroska_demux_add_mpeg_seq_header;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MJPEG)) {
+ caps = gst_caps_new_empty_simple ("image/jpeg");
+ *codec_name = g_strdup ("Motion-JPEG");
+ context->intra_only = TRUE;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MPEG4_AVC)) {
+ caps = gst_caps_new_empty_simple ("video/x-h264");
+ if (data) {
+ GstBuffer *priv;
+
+ /* First byte is the version, second is the profile indication, and third
+ * is the 5 contraint_set_flags and 3 reserved bits. Fourth byte is the
+ * level indication. */
+ gst_codec_utils_h264_caps_set_level_and_profile (caps, data + 1,
+ size - 1);
+
+ priv = gst_buffer_new_memdup (data, size);
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, NULL);
+ gst_buffer_unref (priv);
+
+ gst_caps_set_simple (caps, "stream-format", G_TYPE_STRING, "avc",
+ "alignment", G_TYPE_STRING, "au", NULL);
+ } else {
+ GST_WARNING ("No codec data found, assuming output is byte-stream");
+ gst_caps_set_simple (caps, "stream-format", G_TYPE_STRING, "byte-stream",
+ NULL);
+ }
+ *codec_name = g_strdup ("H264");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_MPEGH_HEVC)) {
+ caps = gst_caps_new_empty_simple ("video/x-h265");
+ if (data) {
+ GstBuffer *priv;
+
+ gst_codec_utils_h265_caps_set_level_tier_and_profile (caps, data + 1,
+ size - 1);
+
+ priv = gst_buffer_new_memdup (data, size);
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, NULL);
+ gst_buffer_unref (priv);
+
+ gst_caps_set_simple (caps, "stream-format", G_TYPE_STRING, "hvc1",
+ "alignment", G_TYPE_STRING, "au", NULL);
+ } else {
+ GST_WARNING ("No codec data found, assuming output is byte-stream");
+ gst_caps_set_simple (caps, "stream-format", G_TYPE_STRING, "byte-stream",
+ NULL);
+ }
+ *codec_name = g_strdup ("HEVC");
+ } else if ((!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO1)) ||
+ (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO2)) ||
+ (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO3)) ||
+ (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO4))) {
+ gint rmversion = -1;
+
+ if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO1))
+ rmversion = 1;
+ else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO2))
+ rmversion = 2;
+ else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO3))
+ rmversion = 3;
+ else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO4))
+ rmversion = 4;
+
+ caps = gst_caps_new_simple ("video/x-pn-realvideo",
+ "rmversion", G_TYPE_INT, rmversion, NULL);
+ GST_DEBUG ("data:%p, size:0x%x", data, size);
+ /* We need to extract the extradata ! */
+ if (data && (size >= 0x22)) {
+ GstBuffer *priv;
+ guint rformat;
+ guint subformat;
+
+ subformat = GST_READ_UINT32_BE (data + 0x1a);
+ rformat = GST_READ_UINT32_BE (data + 0x1e);
+
+ priv = gst_buffer_new_memdup (data + 0x1a, size - 0x1a);
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, "format",
+ G_TYPE_INT, rformat, "subformat", G_TYPE_INT, subformat, NULL);
+ gst_buffer_unref (priv);
+
+ }
+ *codec_name = g_strdup_printf ("RealVideo %d.0", rmversion);
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_THEORA)) {
+ caps = gst_caps_new_empty_simple ("video/x-theora");
+ context->stream_headers =
+ gst_matroska_parse_xiph_stream_headers (context->codec_priv,
+ context->codec_priv_size);
+ /* FIXME: mark stream as broken and skip if there are no stream headers */
+ context->send_stream_headers = TRUE;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_DIRAC)) {
+ caps = gst_caps_new_empty_simple ("video/x-dirac");
+ *codec_name = g_strdup_printf ("Dirac");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_VP8)) {
+ caps = gst_caps_new_empty_simple ("video/x-vp8");
+ if (videocontext->alpha_mode)
+ gst_caps_set_simple (caps, "codec-alpha", G_TYPE_BOOLEAN, TRUE, NULL);
+ *codec_name = g_strdup_printf ("On2 VP8");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_VP9)) {
+ caps = gst_caps_new_empty_simple ("video/x-vp9");
+ if (videocontext->alpha_mode)
+ gst_caps_set_simple (caps, "codec-alpha", G_TYPE_BOOLEAN, TRUE, NULL);
+ *codec_name = g_strdup_printf ("On2 VP9");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_AV1)) {
+ caps = gst_caps_new_empty_simple ("video/x-av1");
+ if (data) {
+ GstBuffer *priv;
+
+ priv = gst_buffer_new_memdup (data, size);
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, NULL);
+ gst_buffer_unref (priv);
+ } else {
+ GST_WARNING ("No AV1 codec data found!");
+ }
+ *codec_name = g_strdup_printf ("AOM AV1");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_FFV1)) {
+ caps =
+ gst_caps_new_simple ("video/x-ffv", "ffvversion", G_TYPE_INT, 1, NULL);
+ if (data) {
+ GstBuffer *priv;
+
+ priv = gst_buffer_new_memdup (data, size);
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, NULL);
+ gst_buffer_unref (priv);
+ } else {
+ GST_WARNING ("No FFV1 codec data found!");
+ }
+ *codec_name = g_strdup_printf ("FFMpeg v1");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_VIDEO_PRORES)) {
+ guint32 fourcc;
+ const gchar *variant, *variant_descr = "";
+
+ /* Expect a fourcc in the codec private data */
+ if (!data || size < 4) {
+ GST_WARNING ("No or too small PRORESS fourcc (%d bytes)", size);
+ return NULL;
+ }
+
+ fourcc = GST_STR_FOURCC (data);
+ switch (fourcc) {
+ case GST_MAKE_FOURCC ('a', 'p', 'c', 's'):
+ variant_descr = " 4:2:2 LT";
+ variant = "lt";
+ break;
+ case GST_MAKE_FOURCC ('a', 'p', 'c', 'h'):
+ variant = "hq";
+ variant_descr = " 4:2:2 HQ";
+ break;
+ case GST_MAKE_FOURCC ('a', 'p', '4', 'h'):
+ variant = "4444";
+ variant_descr = " 4:4:4:4";
+ break;
+ case GST_MAKE_FOURCC ('a', 'p', 'c', 'o'):
+ variant = "proxy";
+ variant_descr = " 4:2:2 Proxy";
+ break;
+ case GST_MAKE_FOURCC ('a', 'p', 'c', 'n'):
+ default:
+ variant = "standard";
+ variant_descr = " 4:2:2 SD";
+ break;
+ }
+
+ GST_LOG ("Prores video, codec fourcc %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (fourcc));
+
+ caps = gst_caps_new_simple ("video/x-prores",
+ "format", G_TYPE_STRING, variant, NULL);
+ *codec_name = g_strdup_printf ("Apple ProRes%s", variant_descr);
+ context->postprocess_frame = gst_matroska_demux_add_prores_header;
+ } else {
+ GST_WARNING ("Unknown codec '%s', cannot build Caps", codec_id);
+ return NULL;
+ }
+
+ if (caps != NULL) {
+ int i;
+ GstStructure *structure;
+
+ for (i = 0; i < gst_caps_get_size (caps); i++) {
+ structure = gst_caps_get_structure (caps, i);
+
+ /* FIXME: use the real unit here! */
+ GST_DEBUG ("video size %dx%d, target display size %dx%d (any unit)",
+ videocontext->pixel_width,
+ videocontext->pixel_height,
+ videocontext->display_width, videocontext->display_height);
+
+ /* pixel width and height are the w and h of the video in pixels */
+ if (videocontext->pixel_width > 0 && videocontext->pixel_height > 0) {
+ gint w = videocontext->pixel_width;
+ gint h = videocontext->pixel_height;
+
+ gst_structure_set (structure,
+ "width", G_TYPE_INT, w, "height", G_TYPE_INT, h, NULL);
+ }
+
+ if (videocontext->display_width > 0 || videocontext->display_height > 0) {
+ int n, d;
+
+ if (videocontext->display_width <= 0)
+ videocontext->display_width = videocontext->pixel_width;
+ if (videocontext->display_height <= 0)
+ videocontext->display_height = videocontext->pixel_height;
+
+ /* calculate the pixel aspect ratio using the display and pixel w/h */
+ n = videocontext->display_width * videocontext->pixel_height;
+ d = videocontext->display_height * videocontext->pixel_width;
+ GST_DEBUG ("setting PAR to %d/%d", n, d);
+ gst_structure_set (structure, "pixel-aspect-ratio",
+ GST_TYPE_FRACTION,
+ videocontext->display_width * videocontext->pixel_height,
+ videocontext->display_height * videocontext->pixel_width, NULL);
+ }
+
+ if (videocontext->default_fps > 0.0) {
+ gint fps_n, fps_d;
+
+ gst_util_double_to_fraction (videocontext->default_fps, &fps_n, &fps_d);
+
+ GST_DEBUG ("using default fps %d/%d", fps_n, fps_d);
+
+ gst_structure_set (structure, "framerate", GST_TYPE_FRACTION, fps_n,
+ fps_d, NULL);
+ } else if (context->default_duration > 0) {
+ int fps_n, fps_d;
+
+ gst_video_guess_framerate (context->default_duration, &fps_n, &fps_d);
+
+ GST_INFO ("using default duration %" G_GUINT64_FORMAT
+ " framerate %d/%d", context->default_duration, fps_n, fps_d);
+
+ gst_structure_set (structure, "framerate", GST_TYPE_FRACTION,
+ fps_n, fps_d, NULL);
+ } else {
+ gst_structure_set (structure, "framerate", GST_TYPE_FRACTION,
+ 0, 1, NULL);
+ }
+
+ switch (videocontext->interlace_mode) {
+ case GST_MATROSKA_INTERLACE_MODE_PROGRESSIVE:
+ gst_structure_set (structure,
+ "interlace-mode", G_TYPE_STRING, "progressive", NULL);
+ break;
+ case GST_MATROSKA_INTERLACE_MODE_INTERLACED:
+ gst_structure_set (structure,
+ "interlace-mode", G_TYPE_STRING, "interleaved", NULL);
+
+ if (videocontext->field_order != GST_VIDEO_FIELD_ORDER_UNKNOWN)
+ gst_structure_set (structure, "field-order", G_TYPE_STRING,
+ gst_video_field_order_to_string (videocontext->field_order),
+ NULL);
+ break;
+ default:
+ break;
+ }
+ }
+ if (videocontext->multiview_mode != GST_VIDEO_MULTIVIEW_MODE_NONE) {
+ if (gst_video_multiview_guess_half_aspect (videocontext->multiview_mode,
+ videocontext->pixel_width, videocontext->pixel_height,
+ videocontext->display_width * videocontext->pixel_height,
+ videocontext->display_height * videocontext->pixel_width)) {
+ videocontext->multiview_flags |= GST_VIDEO_MULTIVIEW_FLAGS_HALF_ASPECT;
+ }
+ gst_caps_set_simple (caps,
+ "multiview-mode", G_TYPE_STRING,
+ gst_video_multiview_mode_to_caps_string
+ (videocontext->multiview_mode), "multiview-flags",
+ GST_TYPE_VIDEO_MULTIVIEW_FLAGSET, videocontext->multiview_flags,
+ GST_FLAG_SET_MASK_EXACT, NULL);
+ }
+
+ if (videocontext->colorimetry.range != GST_VIDEO_COLOR_RANGE_UNKNOWN ||
+ videocontext->colorimetry.matrix != GST_VIDEO_COLOR_MATRIX_UNKNOWN ||
+ videocontext->colorimetry.transfer != GST_VIDEO_TRANSFER_UNKNOWN ||
+ videocontext->colorimetry.primaries !=
+ GST_VIDEO_COLOR_PRIMARIES_UNKNOWN) {
+ gchar *colorimetry =
+ gst_video_colorimetry_to_string (&videocontext->colorimetry);
+ gst_caps_set_simple (caps, "colorimetry", G_TYPE_STRING, colorimetry,
+ NULL);
+ GST_DEBUG ("setting colorimetry to %s", colorimetry);
+ g_free (colorimetry);
+ }
+
+ if (videocontext->mastering_display_info_present) {
+ if (!gst_video_mastering_display_info_add_to_caps
+ (&videocontext->mastering_display_info, caps)) {
+ GST_WARNING ("couldn't set mastering display info to caps");
+ }
+ }
+
+ if (videocontext->content_light_level.max_content_light_level &&
+ videocontext->content_light_level.max_frame_average_light_level) {
+ if (!gst_video_content_light_level_add_to_caps
+ (&videocontext->content_light_level, caps)) {
+ GST_WARNING ("couldn't set content light level to caps");
+ }
+ }
+
+ caps = gst_caps_simplify (caps);
+ }
+
+ return caps;
+}
+
+/*
+ * Some AAC specific code... *sigh*
+ * FIXME: maybe we should use '15' and code the sample rate explicitly
+ * if the sample rate doesn't match the predefined rates exactly? (tpm)
+ */
+
+static gint
+aac_rate_idx (gint rate)
+{
+ if (92017 <= rate)
+ return 0;
+ else if (75132 <= rate)
+ return 1;
+ else if (55426 <= rate)
+ return 2;
+ else if (46009 <= rate)
+ return 3;
+ else if (37566 <= rate)
+ return 4;
+ else if (27713 <= rate)
+ return 5;
+ else if (23004 <= rate)
+ return 6;
+ else if (18783 <= rate)
+ return 7;
+ else if (13856 <= rate)
+ return 8;
+ else if (11502 <= rate)
+ return 9;
+ else if (9391 <= rate)
+ return 10;
+ else
+ return 11;
+}
+
+static gint
+aac_profile_idx (const gchar * codec_id)
+{
+ gint profile;
+
+ if (strlen (codec_id) <= 12)
+ profile = 3;
+ else if (!strncmp (&codec_id[12], "MAIN", 4))
+ profile = 0;
+ else if (!strncmp (&codec_id[12], "LC", 2))
+ profile = 1;
+ else if (!strncmp (&codec_id[12], "SSR", 3))
+ profile = 2;
+ else
+ profile = 3;
+
+ return profile;
+}
+
+static guint
+round_up_pow2 (guint n)
+{
+ n = n - 1;
+ n = n | (n >> 1);
+ n = n | (n >> 2);
+ n = n | (n >> 4);
+ n = n | (n >> 8);
+ n = n | (n >> 16);
+ return n + 1;
+}
+
+#define AAC_SYNC_EXTENSION_TYPE 0x02b7
+
+static GstCaps *
+gst_matroska_demux_audio_caps (GstMatroskaTrackAudioContext *
+ audiocontext, const gchar * codec_id, guint8 * data, guint size,
+ gchar ** codec_name, guint16 * riff_audio_fmt, GstClockTime * lead_in_ts)
+{
+ GstMatroskaTrackContext *context = (GstMatroskaTrackContext *) audiocontext;
+ GstCaps *caps = NULL;
+ guint lead_in = 0;
+ /* Max potential blocksize causing the longest possible lead_in_ts need, as
+ * we don't have the exact number parsed out here */
+ guint max_blocksize = 0;
+ /* Original samplerate before SBR multiplications, as parsers would use */
+ guint rate = audiocontext->samplerate;
+
+ g_assert (audiocontext != NULL);
+ g_assert (codec_name != NULL);
+
+ if (riff_audio_fmt)
+ *riff_audio_fmt = 0;
+
+ /* TODO: check if we have all codec types from matroska-ids.h
+ * check if we have to do more special things with codec_private
+ * check if we need bitdepth in different places too
+ * implement channel position magic
+ * Add support for:
+ * GST_MATROSKA_CODEC_ID_AUDIO_AC3_BSID9
+ * GST_MATROSKA_CODEC_ID_AUDIO_AC3_BSID10
+ * GST_MATROSKA_CODEC_ID_AUDIO_QUICKTIME_QDMC
+ * GST_MATROSKA_CODEC_ID_AUDIO_QUICKTIME_QDM2
+ */
+
+ if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_MPEG1_L1) ||
+ !strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_MPEG1_L2) ||
+ !strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_MPEG1_L3)) {
+ gint layer;
+
+ if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_MPEG1_L1))
+ layer = 1;
+ else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_MPEG1_L2))
+ layer = 2;
+ else
+ layer = 3;
+
+ lead_in = 30; /* Could mp2 need as much too? */
+ max_blocksize = 1152;
+ caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 1, "layer", G_TYPE_INT, layer, NULL);
+ *codec_name = g_strdup_printf ("MPEG-1 layer %d", layer);
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_PCM_INT_BE) ||
+ !strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_PCM_INT_LE)) {
+ gboolean sign;
+ gint endianness;
+ GstAudioFormat format;
+
+ sign = (audiocontext->bitdepth != 8);
+ if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_PCM_INT_BE))
+ endianness = G_BIG_ENDIAN;
+ else
+ endianness = G_LITTLE_ENDIAN;
+
+ format = gst_audio_format_build_integer (sign, endianness,
+ audiocontext->bitdepth, audiocontext->bitdepth);
+
+ /* FIXME: Channel mask and reordering */
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, gst_audio_format_to_string (format),
+ "layout", G_TYPE_STRING, "interleaved",
+ "channel-mask", GST_TYPE_BITMASK,
+ gst_audio_channel_get_fallback_mask (audiocontext->channels), NULL);
+
+ *codec_name = g_strdup_printf ("Raw %d-bit PCM audio",
+ audiocontext->bitdepth);
+ context->alignment = GST_ROUND_UP_8 (audiocontext->bitdepth) / 8;
+ context->alignment = round_up_pow2 (context->alignment);
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_PCM_FLOAT)) {
+ const gchar *format;
+ if (audiocontext->bitdepth == 32)
+ format = "F32LE";
+ else
+ format = "F64LE";
+ /* FIXME: Channel mask and reordering */
+ caps = gst_caps_new_simple ("audio/x-raw",
+ "format", G_TYPE_STRING, format,
+ "layout", G_TYPE_STRING, "interleaved",
+ "channel-mask", GST_TYPE_BITMASK,
+ gst_audio_channel_get_fallback_mask (audiocontext->channels), NULL);
+ *codec_name = g_strdup_printf ("Raw %d-bit floating-point audio",
+ audiocontext->bitdepth);
+ context->alignment = audiocontext->bitdepth / 8;
+ context->alignment = round_up_pow2 (context->alignment);
+ } else if (!strncmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_AC3,
+ strlen (GST_MATROSKA_CODEC_ID_AUDIO_AC3))) {
+ lead_in = 2;
+ max_blocksize = 1536;
+ caps = gst_caps_new_simple ("audio/x-ac3",
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ *codec_name = g_strdup ("AC-3 audio");
+ } else if (!strncmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_EAC3,
+ strlen (GST_MATROSKA_CODEC_ID_AUDIO_EAC3))) {
+ lead_in = 2;
+ max_blocksize = 1536;
+ caps = gst_caps_new_simple ("audio/x-eac3",
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ *codec_name = g_strdup ("E-AC-3 audio");
+ } else if (!strncmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_TRUEHD,
+ strlen (GST_MATROSKA_CODEC_ID_AUDIO_TRUEHD))) {
+ caps = gst_caps_new_empty_simple ("audio/x-true-hd");
+ *codec_name = g_strdup ("Dolby TrueHD");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_DTS)) {
+ caps = gst_caps_new_empty_simple ("audio/x-dts");
+ *codec_name = g_strdup ("DTS audio");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_VORBIS)) {
+ caps = gst_caps_new_empty_simple ("audio/x-vorbis");
+ context->stream_headers =
+ gst_matroska_parse_xiph_stream_headers (context->codec_priv,
+ context->codec_priv_size);
+ /* FIXME: mark stream as broken and skip if there are no stream headers */
+ context->send_stream_headers = TRUE;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_FLAC)) {
+ caps = gst_caps_new_empty_simple ("audio/x-flac");
+ context->stream_headers =
+ gst_matroska_parse_flac_stream_headers (context->codec_priv,
+ context->codec_priv_size);
+ /* FIXME: mark stream as broken and skip if there are no stream headers */
+ context->send_stream_headers = TRUE;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_SPEEX)) {
+ caps = gst_caps_new_empty_simple ("audio/x-speex");
+ context->stream_headers =
+ gst_matroska_parse_speex_stream_headers (context->codec_priv,
+ context->codec_priv_size);
+ /* FIXME: mark stream as broken and skip if there are no stream headers */
+ context->send_stream_headers = TRUE;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_OPUS)) {
+ GstBuffer *tmp;
+
+ if (context->codec_priv_size >= 19) {
+ if (audiocontext->samplerate)
+ GST_WRITE_UINT32_LE ((guint8 *) context->codec_priv + 12,
+ audiocontext->samplerate);
+ if (context->codec_delay) {
+ guint64 delay =
+ gst_util_uint64_scale_round (context->codec_delay, 48000,
+ GST_SECOND);
+ GST_WRITE_UINT16_LE ((guint8 *) context->codec_priv + 10, delay);
+ }
+
+ tmp =
+ gst_buffer_new_memdup (context->codec_priv, context->codec_priv_size);
+ caps = gst_codec_utils_opus_create_caps_from_header (tmp, NULL);
+ gst_buffer_unref (tmp);
+ *codec_name = g_strdup ("Opus");
+ } else if (context->codec_priv_size == 0) {
+ GST_WARNING ("No Opus codec data found, trying to create one");
+ if (audiocontext->channels <= 2) {
+ guint8 streams, coupled, channels;
+ guint32 samplerate;
+
+ samplerate =
+ audiocontext->samplerate == 0 ? 48000 : audiocontext->samplerate;
+ rate = samplerate;
+ channels = audiocontext->channels == 0 ? 2 : audiocontext->channels;
+ if (channels == 1) {
+ streams = 1;
+ coupled = 0;
+ } else {
+ streams = 1;
+ coupled = 1;
+ }
+
+ caps =
+ gst_codec_utils_opus_create_caps (samplerate, channels, 0, streams,
+ coupled, NULL);
+ if (caps) {
+ *codec_name = g_strdup ("Opus");
+ } else {
+ GST_WARNING ("Failed to create Opus caps from audio context");
+ }
+ } else {
+ GST_WARNING ("No Opus codec data, and not enough info to create one");
+ }
+ } else {
+ GST_WARNING ("Invalid Opus codec data size (got %" G_GSIZE_FORMAT
+ ", expected 19)", context->codec_priv_size);
+ }
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_ACM)) {
+ gst_riff_strf_auds auds;
+
+ if (data && size >= 18) {
+ GstBuffer *codec_data = NULL;
+
+ /* little-endian -> byte-order */
+ auds.format = GST_READ_UINT16_LE (data);
+ auds.channels = GST_READ_UINT16_LE (data + 2);
+ auds.rate = GST_READ_UINT32_LE (data + 4);
+ auds.av_bps = GST_READ_UINT32_LE (data + 8);
+ auds.blockalign = GST_READ_UINT16_LE (data + 12);
+ auds.bits_per_sample = GST_READ_UINT16_LE (data + 16);
+
+ /* 18 is the waveformatex size */
+ if (size > 18) {
+ codec_data = gst_buffer_new_wrapped_full (GST_MEMORY_FLAG_READONLY,
+ data + 18, size - 18, 0, size - 18, NULL, NULL);
+ }
+
+ if (riff_audio_fmt)
+ *riff_audio_fmt = auds.format;
+
+ /* FIXME: Handle reorder map */
+ caps = gst_riff_create_audio_caps (auds.format, NULL, &auds, codec_data,
+ NULL, codec_name, NULL);
+ if (codec_data)
+ gst_buffer_unref (codec_data);
+
+ if (caps == NULL) {
+ GST_WARNING ("Unhandled RIFF audio format 0x%02x", auds.format);
+ }
+ } else {
+ GST_WARNING ("Invalid codec data size (%d expected, got %d)", 18, size);
+ }
+ } else if (g_str_has_prefix (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_AAC)) {
+ GstBuffer *priv = NULL;
+ gint mpegversion;
+ gint rate_idx, profile;
+ guint8 *data = NULL;
+
+ /* unspecified AAC profile with opaque private codec data */
+ if (strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_AAC) == 0) {
+ if (context->codec_priv_size >= 2) {
+ guint obj_type, freq_index, explicit_freq_bytes = 0;
+
+ codec_id = GST_MATROSKA_CODEC_ID_AUDIO_AAC_MPEG4;
+ mpegversion = 4;
+ freq_index = (GST_READ_UINT16_BE (context->codec_priv) & 0x780) >> 7;
+ obj_type = (GST_READ_UINT16_BE (context->codec_priv) & 0xF800) >> 11;
+ if (freq_index == 15)
+ explicit_freq_bytes = 3;
+ GST_DEBUG ("obj_type = %u, freq_index = %u", obj_type, freq_index);
+ priv = gst_buffer_new_memdup (context->codec_priv,
+ context->codec_priv_size);
+ /* assume SBR if samplerate <= 24kHz */
+ if (obj_type == 5 || (freq_index >= 6 && freq_index != 15) ||
+ (context->codec_priv_size == (5 + explicit_freq_bytes))) {
+ /* TODO: Commonly aacparse will reset the rate in caps to
+ * non-multiplied - which one is correct? */
+ audiocontext->samplerate *= 2;
+ }
+ } else {
+ GST_WARNING ("Opaque A_AAC codec ID, but no codec private data");
+ /* this is pretty broken;
+ * maybe we need to make up some default private,
+ * or maybe ADTS data got dumped in.
+ * Let's set up some private data now, and check actual data later */
+ /* just try this and see what happens ... */
+ codec_id = GST_MATROSKA_CODEC_ID_AUDIO_AAC_MPEG4;
+ context->postprocess_frame = gst_matroska_demux_check_aac;
+ }
+ }
+
+ /* make up decoder-specific data if it is not supplied */
+ if (priv == NULL) {
+ GstMapInfo map;
+
+ priv = gst_buffer_new_allocate (NULL, 5, NULL);
+ gst_buffer_map (priv, &map, GST_MAP_WRITE);
+ data = map.data;
+ rate_idx = aac_rate_idx (audiocontext->samplerate);
+ profile = aac_profile_idx (codec_id);
+
+ data[0] = ((profile + 1) << 3) | ((rate_idx & 0xE) >> 1);
+ data[1] = ((rate_idx & 0x1) << 7) | (audiocontext->channels << 3);
+
+ if (!strncmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_AAC_MPEG2,
+ strlen (GST_MATROSKA_CODEC_ID_AUDIO_AAC_MPEG2))) {
+ mpegversion = 2;
+ gst_buffer_unmap (priv, &map);
+ gst_buffer_set_size (priv, 2);
+ } else if (!strncmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_AAC_MPEG4,
+ strlen (GST_MATROSKA_CODEC_ID_AUDIO_AAC_MPEG4))) {
+ mpegversion = 4;
+
+ if (g_strrstr (codec_id, "SBR")) {
+ /* HE-AAC (aka SBR AAC) */
+ audiocontext->samplerate *= 2;
+ rate_idx = aac_rate_idx (audiocontext->samplerate);
+ data[2] = AAC_SYNC_EXTENSION_TYPE >> 3;
+ data[3] = ((AAC_SYNC_EXTENSION_TYPE & 0x07) << 5) | 5;
+ data[4] = (1 << 7) | (rate_idx << 3);
+ gst_buffer_unmap (priv, &map);
+ } else {
+ gst_buffer_unmap (priv, &map);
+ gst_buffer_set_size (priv, 2);
+ }
+ } else {
+ gst_buffer_unmap (priv, &map);
+ gst_buffer_unref (priv);
+ priv = NULL;
+ GST_ERROR ("Unknown AAC profile and no codec private data");
+ }
+ }
+
+ if (priv) {
+ lead_in = 2;
+ max_blocksize = 1024;
+ caps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, mpegversion,
+ "framed", G_TYPE_BOOLEAN, TRUE,
+ "stream-format", G_TYPE_STRING, "raw", NULL);
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, NULL);
+ if (context->codec_priv && context->codec_priv_size > 0)
+ gst_codec_utils_aac_caps_set_level_and_profile (caps,
+ context->codec_priv, context->codec_priv_size);
+ *codec_name = g_strdup_printf ("MPEG-%d AAC audio", mpegversion);
+ gst_buffer_unref (priv);
+ }
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_TTA)) {
+ caps = gst_caps_new_simple ("audio/x-tta",
+ "width", G_TYPE_INT, audiocontext->bitdepth, NULL);
+ *codec_name = g_strdup ("TTA audio");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_WAVPACK4)) {
+ caps = gst_caps_new_simple ("audio/x-wavpack",
+ "width", G_TYPE_INT, audiocontext->bitdepth,
+ "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ *codec_name = g_strdup ("Wavpack audio");
+ context->postprocess_frame = gst_matroska_demux_add_wvpk_header;
+ audiocontext->wvpk_block_index = 0;
+ } else if ((!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_REAL_14_4)) ||
+ (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_REAL_28_8)) ||
+ (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_REAL_COOK))) {
+ gint raversion = -1;
+
+ if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_REAL_14_4))
+ raversion = 1;
+ else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_REAL_COOK))
+ raversion = 8;
+ else
+ raversion = 2;
+
+ caps = gst_caps_new_simple ("audio/x-pn-realaudio",
+ "raversion", G_TYPE_INT, raversion, NULL);
+ /* Extract extra information from caps, mapping varies based on codec */
+ if (data && (size >= 0x50)) {
+ GstBuffer *priv;
+ guint flavor;
+ guint packet_size;
+ guint height;
+ guint leaf_size;
+ guint sample_width;
+ guint extra_data_size;
+
+ GST_DEBUG ("real audio raversion:%d", raversion);
+ if (raversion == 8) {
+ /* COOK */
+ flavor = GST_READ_UINT16_BE (data + 22);
+ packet_size = GST_READ_UINT32_BE (data + 24);
+ height = GST_READ_UINT16_BE (data + 40);
+ leaf_size = GST_READ_UINT16_BE (data + 44);
+ sample_width = GST_READ_UINT16_BE (data + 58);
+ extra_data_size = GST_READ_UINT32_BE (data + 74);
+
+ GST_DEBUG
+ ("flavor:%d, packet_size:%d, height:%d, leaf_size:%d, sample_width:%d, extra_data_size:%d",
+ flavor, packet_size, height, leaf_size, sample_width,
+ extra_data_size);
+ gst_caps_set_simple (caps, "flavor", G_TYPE_INT, flavor, "packet_size",
+ G_TYPE_INT, packet_size, "height", G_TYPE_INT, height, "leaf_size",
+ G_TYPE_INT, leaf_size, "width", G_TYPE_INT, sample_width, NULL);
+
+ if ((size - 78) >= extra_data_size) {
+ priv = gst_buffer_new_memdup (data + 78, extra_data_size);
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, priv, NULL);
+ gst_buffer_unref (priv);
+ }
+ }
+ }
+
+ *codec_name = g_strdup_printf ("RealAudio %d.0", raversion);
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_REAL_SIPR)) {
+ caps = gst_caps_new_empty_simple ("audio/x-sipro");
+ *codec_name = g_strdup ("Sipro/ACELP.NET Voice Codec");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_REAL_RALF)) {
+ caps = gst_caps_new_empty_simple ("audio/x-ralf-mpeg4-generic");
+ *codec_name = g_strdup ("Real Audio Lossless");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_AUDIO_REAL_ATRC)) {
+ caps = gst_caps_new_empty_simple ("audio/x-vnd.sony.atrac3");
+ *codec_name = g_strdup ("Sony ATRAC3");
+ } else {
+ GST_WARNING ("Unknown codec '%s', cannot build Caps", codec_id);
+ return NULL;
+ }
+
+ if (caps != NULL) {
+ if (audiocontext->samplerate > 0 && audiocontext->channels > 0) {
+ gint i;
+
+ for (i = 0; i < gst_caps_get_size (caps); i++) {
+ gst_structure_set (gst_caps_get_structure (caps, i),
+ "channels", G_TYPE_INT, audiocontext->channels,
+ "rate", G_TYPE_INT, audiocontext->samplerate, NULL);
+ }
+ }
+
+ caps = gst_caps_simplify (caps);
+ }
+
+ if (lead_in_ts && lead_in && max_blocksize && rate) {
+ *lead_in_ts =
+ gst_util_uint64_scale (GST_SECOND, max_blocksize * lead_in, rate);
+ }
+
+ return caps;
+}
+
+static GstCaps *
+gst_matroska_demux_subtitle_caps (GstMatroskaTrackSubtitleContext *
+ subtitlecontext, const gchar * codec_id, gpointer data, guint size)
+{
+ GstCaps *caps = NULL;
+ GstMatroskaTrackContext *context =
+ (GstMatroskaTrackContext *) subtitlecontext;
+
+ /* for backwards compatibility */
+ if (!g_ascii_strcasecmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_ASCII))
+ codec_id = GST_MATROSKA_CODEC_ID_SUBTITLE_UTF8;
+ else if (!g_ascii_strcasecmp (codec_id, "S_SSA"))
+ codec_id = GST_MATROSKA_CODEC_ID_SUBTITLE_SSA;
+ else if (!g_ascii_strcasecmp (codec_id, "S_ASS"))
+ codec_id = GST_MATROSKA_CODEC_ID_SUBTITLE_ASS;
+ else if (!g_ascii_strcasecmp (codec_id, "S_USF"))
+ codec_id = GST_MATROSKA_CODEC_ID_SUBTITLE_USF;
+
+ /* TODO: Add GST_MATROSKA_CODEC_ID_SUBTITLE_BMP support
+ * Check if we have to do something with codec_private */
+ if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_UTF8)) {
+ /* well, plain text simply does not have a lot of markup ... */
+ caps = gst_caps_new_simple ("text/x-raw", "format", G_TYPE_STRING,
+ "pango-markup", NULL);
+ context->postprocess_frame = gst_matroska_demux_check_subtitle_buffer;
+ subtitlecontext->check_markup = TRUE;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_SSA)) {
+ caps = gst_caps_new_empty_simple ("application/x-ssa");
+ context->postprocess_frame = gst_matroska_demux_check_subtitle_buffer;
+ subtitlecontext->check_markup = FALSE;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_ASS)) {
+ caps = gst_caps_new_empty_simple ("application/x-ass");
+ context->postprocess_frame = gst_matroska_demux_check_subtitle_buffer;
+ subtitlecontext->check_markup = FALSE;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_USF)) {
+ caps = gst_caps_new_empty_simple ("application/x-usf");
+ context->postprocess_frame = gst_matroska_demux_check_subtitle_buffer;
+ subtitlecontext->check_markup = FALSE;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_VOBSUB)) {
+ caps = gst_caps_new_empty_simple ("subpicture/x-dvd");
+ ((GstMatroskaTrackContext *) subtitlecontext)->send_dvd_event = TRUE;
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_HDMVPGS)) {
+ caps = gst_caps_new_empty_simple ("subpicture/x-pgs");
+ } else if (!strcmp (codec_id, GST_MATROSKA_CODEC_ID_SUBTITLE_KATE)) {
+ caps = gst_caps_new_empty_simple ("subtitle/x-kate");
+ context->stream_headers =
+ gst_matroska_parse_xiph_stream_headers (context->codec_priv,
+ context->codec_priv_size);
+ /* FIXME: mark stream as broken and skip if there are no stream headers */
+ context->send_stream_headers = TRUE;
+ } else {
+ GST_DEBUG ("Unknown subtitle stream: codec_id='%s'", codec_id);
+ caps = gst_caps_new_empty_simple ("application/x-subtitle-unknown");
+ }
+
+ if (data != NULL && size > 0) {
+ GstBuffer *buf;
+
+ buf = gst_buffer_new_memdup (data, size);
+ gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, buf, NULL);
+ gst_buffer_unref (buf);
+ }
+
+ return caps;
+}
+
+#if 0
+static void
+gst_matroska_demux_set_index (GstElement * element, GstIndex * index)
+{
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (element);
+
+ GST_OBJECT_LOCK (demux);
+ if (demux->common.element_index)
+ gst_object_unref (demux->common.element_index);
+ demux->common.element_index = index ? gst_object_ref (index) : NULL;
+ GST_OBJECT_UNLOCK (demux);
+ GST_DEBUG_OBJECT (demux, "Set index %" GST_PTR_FORMAT,
+ demux->common.element_index);
+}
+
+static GstIndex *
+gst_matroska_demux_get_index (GstElement * element)
+{
+ GstIndex *result = NULL;
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (element);
+
+ GST_OBJECT_LOCK (demux);
+ if (demux->common.element_index)
+ result = gst_object_ref (demux->common.element_index);
+ GST_OBJECT_UNLOCK (demux);
+
+ GST_DEBUG_OBJECT (demux, "Returning index %" GST_PTR_FORMAT, result);
+
+ return result;
+}
+#endif
+
+static GstStateChangeReturn
+gst_matroska_demux_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstMatroskaDemux *demux = GST_MATROSKA_DEMUX (element);
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+
+ /* handle upwards state changes here */
+ switch (transition) {
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ /* handle downwards state changes */
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_matroska_demux_reset (GST_ELEMENT (demux));
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static void
+gst_matroska_demux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstMatroskaDemux *demux;
+
+ g_return_if_fail (GST_IS_MATROSKA_DEMUX (object));
+ demux = GST_MATROSKA_DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_MAX_GAP_TIME:
+ GST_OBJECT_LOCK (demux);
+ demux->max_gap_time = g_value_get_uint64 (value);
+ GST_OBJECT_UNLOCK (demux);
+ break;
+ case PROP_MAX_BACKTRACK_DISTANCE:
+ GST_OBJECT_LOCK (demux);
+ demux->max_backtrack_distance = g_value_get_uint (value);
+ GST_OBJECT_UNLOCK (demux);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_matroska_demux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstMatroskaDemux *demux;
+
+ g_return_if_fail (GST_IS_MATROSKA_DEMUX (object));
+ demux = GST_MATROSKA_DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_MAX_GAP_TIME:
+ GST_OBJECT_LOCK (demux);
+ g_value_set_uint64 (value, demux->max_gap_time);
+ GST_OBJECT_UNLOCK (demux);
+ break;
+ case PROP_MAX_BACKTRACK_DISTANCE:
+ GST_OBJECT_LOCK (demux);
+ g_value_set_uint (value, demux->max_backtrack_distance);
+ GST_OBJECT_UNLOCK (demux);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static const gchar *
+gst_matroska_track_encryption_algorithm_name (gint val)
+{
+ GEnumValue *en;
+ GEnumClass *enum_class =
+ g_type_class_ref (MATROSKA_TRACK_ENCRYPTION_ALGORITHM_TYPE);
+ en = g_enum_get_value (G_ENUM_CLASS (enum_class), val);
+ return en ? en->value_nick : NULL;
+}
+
+static const gchar *
+gst_matroska_track_encryption_cipher_mode_name (gint val)
+{
+ GEnumValue *en;
+ GEnumClass *enum_class =
+ g_type_class_ref (MATROSKA_TRACK_ENCRYPTION_CIPHER_MODE_TYPE);
+ en = g_enum_get_value (G_ENUM_CLASS (enum_class), val);
+ return en ? en->value_nick : NULL;
+}
+
+static const gchar *
+gst_matroska_track_encoding_scope_name (gint val)
+{
+ GEnumValue *en;
+ GEnumClass *enum_class =
+ g_type_class_ref (MATROSKA_TRACK_ENCODING_SCOPE_TYPE);
+
+ en = g_enum_get_value (G_ENUM_CLASS (enum_class), val);
+ return en ? en->value_nick : NULL;
+}
diff --git a/gst/matroska/matroska-demux.h b/gst/matroska/matroska-demux.h
new file mode 100644
index 0000000000..a0a27947e5
--- /dev/null
+++ b/gst/matroska/matroska-demux.h
@@ -0,0 +1,140 @@
+/* GStreamer Matroska muxer/demuxer
+ * (c) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * (c) 2011 Debarshi Ray <rishi@gnu.org>
+ *
+ * matroska-demux.h: matroska file/stream demuxer definition
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_MATROSKA_DEMUX_H__
+#define __GST_MATROSKA_DEMUX_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstflowcombiner.h>
+
+#include "ebml-read.h"
+#include "matroska-ids.h"
+#include "matroska-read-common.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MATROSKA_DEMUX \
+ (gst_matroska_demux_get_type ())
+#define GST_MATROSKA_DEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_MATROSKA_DEMUX, GstMatroskaDemux))
+#define GST_MATROSKA_DEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_MATROSKA_DEMUX, GstMatroskaDemuxClass))
+#define GST_IS_MATROSKA_DEMUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_MATROSKA_DEMUX))
+#define GST_IS_MATROSKA_DEMUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_MATROSKA_DEMUX))
+
+typedef struct _GstMatroskaDemux {
+ GstElement parent;
+
+ /* < private > */
+
+ GstMatroskaReadCommon common;
+
+ /* pads */
+ GstClock *clock;
+ gboolean have_nonintraonly_v_streams;
+ guint num_v_streams;
+ guint num_a_streams;
+ guint num_t_streams;
+
+ guint group_id;
+ gboolean have_group_id;
+
+ GstFlowCombiner *flowcombiner;
+
+ /* state */
+ gboolean streaming;
+ guint64 seek_block;
+ gboolean seek_first;
+
+ /* did we parse cues/tracks/segmentinfo already? */
+ gboolean tracks_parsed;
+ GList *seek_parsed;
+
+ /* cluster positions (optional) */
+ GArray *clusters;
+
+ /* keeping track of playback position */
+ GstClockTime last_stop_end;
+ GstClockTime stream_start_time;
+
+ /* Stop time for reverse playback */
+ GstClockTime to_time;
+ GstEvent *new_segment;
+
+ /* some state saving */
+ GstClockTime cluster_time;
+ guint64 cluster_offset;
+ guint64 cluster_prevsize; /* 0 if unknown */
+ guint64 first_cluster_offset;
+ guint64 next_cluster_offset;
+ GstClockTime requested_seek_time;
+ guint64 seek_offset;
+ GstClockTime audio_lead_in_ts;
+
+ /* alternative duration; optionally obtained from last cluster */
+ guint64 last_cluster_offset;
+ GstClockTime stream_last_time;
+
+ /* index stuff */
+ gboolean seekable;
+ gboolean building_index;
+ guint64 index_offset;
+ GstEvent *seek_event;
+ GstEvent *deferred_seek_event;
+ GstPad *deferred_seek_pad;
+ gboolean need_segment;
+ guint32 segment_seqnum;
+
+ /* reverse playback */
+ GArray *seek_index;
+ gint seek_entry;
+
+ gboolean seen_cluster_prevsize; /* We track this because the
+ * first cluster won't have
+ * this set, so we can't just
+ * check cluster_prevsize to
+ * determine if it's there
+ * or not. We assume if one
+ * cluster has it, all but
+ * the first will have it. */
+
+ guint max_backtrack_distance; /* in seconds (0 = don't backtrack) */
+
+ /* gap handling */
+ guint64 max_gap_time;
+
+ /* for non-finalized files, with invalid segment duration */
+ gboolean invalid_duration;
+
+ /* Cached upstream length (default G_MAXUINT64) */
+ guint64 cached_length;
+} GstMatroskaDemux;
+
+typedef struct _GstMatroskaDemuxClass {
+ GstElementClass parent;
+} GstMatroskaDemuxClass;
+
+G_END_DECLS
+
+#endif /* __GST_MATROSKA_DEMUX_H__ */
diff --git a/gst/matroska/matroska-ids.c b/gst/matroska/matroska-ids.c
new file mode 100644
index 0000000000..eca5279224
--- /dev/null
+++ b/gst/matroska/matroska-ids.c
@@ -0,0 +1,438 @@
+/* GStreamer Matroska muxer/demuxer
+ * (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * (C) 2006 Tim-Philipp Müller <tim centricular net>
+ *
+ * matroska-ids.c: matroska track context utility functions
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "matroska-ids.h"
+
+#include <string.h>
+
+gboolean
+gst_matroska_track_init_video_context (GstMatroskaTrackContext ** p_context)
+{
+ GstMatroskaTrackVideoContext *video_context;
+
+ g_assert (p_context != NULL && *p_context != NULL);
+
+ /* already set up? (track info might come before track type) */
+ if ((*p_context)->type == GST_MATROSKA_TRACK_TYPE_VIDEO) {
+ GST_LOG ("video context already set up");
+ return TRUE;
+ }
+
+ /* it better not have been set up as some other track type ... */
+ if ((*p_context)->type != 0) {
+ g_return_val_if_reached (FALSE);
+ }
+
+ video_context = g_renew (GstMatroskaTrackVideoContext, *p_context, 1);
+ *p_context = (GstMatroskaTrackContext *) video_context;
+
+ /* defaults */
+ (*p_context)->type = GST_MATROSKA_TRACK_TYPE_VIDEO;
+ video_context->display_width = 0;
+ video_context->display_height = 0;
+ video_context->pixel_width = 0;
+ video_context->pixel_height = 0;
+ video_context->asr_mode = 0;
+ video_context->fourcc = 0;
+ video_context->default_fps = 0.0;
+ video_context->interlace_mode = GST_MATROSKA_INTERLACE_MODE_UNKNOWN;
+ video_context->field_order = GST_VIDEO_FIELD_ORDER_UNKNOWN;
+ video_context->earliest_time = GST_CLOCK_TIME_NONE;
+ video_context->dirac_unit = NULL;
+ video_context->earliest_time = GST_CLOCK_TIME_NONE;
+ video_context->multiview_mode = GST_VIDEO_MULTIVIEW_MODE_NONE;
+ video_context->multiview_flags = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
+ video_context->alpha_mode = FALSE;
+ video_context->colorimetry.range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
+ video_context->colorimetry.matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
+ video_context->colorimetry.transfer = GST_VIDEO_TRANSFER_UNKNOWN;
+ video_context->colorimetry.primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
+ gst_video_mastering_display_info_init
+ (&video_context->mastering_display_info);
+ video_context->mastering_display_info_present = FALSE;
+ gst_video_content_light_level_init (&video_context->content_light_level);
+
+ return TRUE;
+}
+
+gboolean
+gst_matroska_track_init_audio_context (GstMatroskaTrackContext ** p_context)
+{
+ GstMatroskaTrackAudioContext *audio_context;
+
+ g_assert (p_context != NULL && *p_context != NULL);
+
+ /* already set up? (track info might come before track type) */
+ if ((*p_context)->type == GST_MATROSKA_TRACK_TYPE_AUDIO)
+ return TRUE;
+
+ /* it better not have been set up as some other track type ... */
+ if ((*p_context)->type != 0) {
+ g_return_val_if_reached (FALSE);
+ }
+
+ audio_context = g_renew (GstMatroskaTrackAudioContext, *p_context, 1);
+ *p_context = (GstMatroskaTrackContext *) audio_context;
+
+ /* defaults */
+ (*p_context)->type = GST_MATROSKA_TRACK_TYPE_AUDIO;
+ audio_context->channels = 1;
+ audio_context->samplerate = 8000;
+ audio_context->bitdepth = 16;
+ audio_context->wvpk_block_index = 0;
+ return TRUE;
+}
+
+gboolean
+gst_matroska_track_init_subtitle_context (GstMatroskaTrackContext ** p_context)
+{
+ GstMatroskaTrackSubtitleContext *subtitle_context;
+
+ g_assert (p_context != NULL && *p_context != NULL);
+
+ /* already set up? (track info might come before track type) */
+ if ((*p_context)->type == GST_MATROSKA_TRACK_TYPE_SUBTITLE)
+ return TRUE;
+
+ /* it better not have been set up as some other track type ... */
+ if ((*p_context)->type != 0) {
+ g_return_val_if_reached (FALSE);
+ }
+
+ subtitle_context = g_renew (GstMatroskaTrackSubtitleContext, *p_context, 1);
+ *p_context = (GstMatroskaTrackContext *) subtitle_context;
+
+ (*p_context)->type = GST_MATROSKA_TRACK_TYPE_SUBTITLE;
+ subtitle_context->check_utf8 = TRUE;
+ subtitle_context->invalid_utf8 = FALSE;
+ subtitle_context->check_markup = TRUE;
+ subtitle_context->seen_markup_tag = FALSE;
+ return TRUE;
+}
+
+void
+gst_matroska_register_tags (void)
+{
+ /* TODO: register other custom tags */
+}
+
+GstBufferList *
+gst_matroska_parse_xiph_stream_headers (gpointer codec_data,
+ gsize codec_data_size)
+{
+ GstBufferList *list = NULL;
+ guint8 *p = codec_data;
+ gint i, offset, num_packets;
+ guint *length, last;
+
+ GST_MEMDUMP ("xiph codec data", codec_data, codec_data_size);
+
+ if (codec_data == NULL || codec_data_size == 0)
+ goto error;
+
+ /* start of the stream and vorbis audio or theora video, need to
+ * send the codec_priv data as first three packets */
+ num_packets = p[0] + 1;
+ GST_DEBUG ("%u stream headers, total length=%" G_GSIZE_FORMAT " bytes",
+ (guint) num_packets, codec_data_size);
+
+ length = g_alloca (num_packets * sizeof (guint));
+ last = 0;
+ offset = 1;
+
+ /* first packets, read length values */
+ for (i = 0; i < num_packets - 1; i++) {
+ length[i] = 0;
+ while (offset < codec_data_size) {
+ length[i] += p[offset];
+ if (p[offset++] != 0xff)
+ break;
+ }
+ last += length[i];
+ }
+ if (offset + last > codec_data_size)
+ goto error;
+
+ /* last packet is the remaining size */
+ length[i] = codec_data_size - offset - last;
+
+ list = gst_buffer_list_new ();
+
+ for (i = 0; i < num_packets; i++) {
+ GstBuffer *hdr;
+
+ GST_DEBUG ("buffer %d: %u bytes", i, (guint) length[i]);
+
+ if (offset + length[i] > codec_data_size)
+ goto error;
+
+ hdr = gst_buffer_new_memdup (p + offset, length[i]);
+ gst_buffer_list_add (list, hdr);
+
+ offset += length[i];
+ }
+
+ return list;
+
+/* ERRORS */
+error:
+ {
+ if (list != NULL)
+ gst_buffer_list_unref (list);
+ return NULL;
+ }
+}
+
+GstBufferList *
+gst_matroska_parse_speex_stream_headers (gpointer codec_data,
+ gsize codec_data_size)
+{
+ GstBufferList *list = NULL;
+ GstBuffer *hdr;
+ guint8 *pdata = codec_data;
+
+ GST_MEMDUMP ("speex codec data", codec_data, codec_data_size);
+
+ if (codec_data == NULL || codec_data_size < 80) {
+ GST_WARNING ("not enough codec priv data for speex headers");
+ return NULL;
+ }
+
+ if (memcmp (pdata, "Speex ", 8) != 0) {
+ GST_WARNING ("no Speex marker at start of stream headers");
+ return NULL;
+ }
+
+ list = gst_buffer_list_new ();
+
+ hdr = gst_buffer_new_memdup (pdata, 80);
+ gst_buffer_list_add (list, hdr);
+
+ if (codec_data_size > 80) {
+ hdr = gst_buffer_new_memdup (pdata + 80, codec_data_size - 80);
+ gst_buffer_list_add (list, hdr);
+ }
+
+ return list;
+}
+
+GstBufferList *
+gst_matroska_parse_opus_stream_headers (gpointer codec_data,
+ gsize codec_data_size)
+{
+ GstBufferList *list = NULL;
+ GstBuffer *hdr;
+ guint8 *pdata = codec_data;
+
+ GST_MEMDUMP ("opus codec data", codec_data, codec_data_size);
+
+ if (codec_data == NULL || codec_data_size < 19) {
+ GST_WARNING ("not enough codec priv data for opus headers");
+ return NULL;
+ }
+
+ if (memcmp (pdata, "OpusHead", 8) != 0) {
+ GST_WARNING ("no OpusHead marker at start of stream headers");
+ return NULL;
+ }
+
+ list = gst_buffer_list_new ();
+
+ hdr = gst_buffer_new_memdup (pdata, codec_data_size);
+ gst_buffer_list_add (list, hdr);
+
+ return list;
+}
+
+GstBufferList *
+gst_matroska_parse_flac_stream_headers (gpointer codec_data,
+ gsize codec_data_size)
+{
+ GstBufferList *list = NULL;
+ GstBuffer *hdr;
+ guint8 *pdata = codec_data;
+ guint len, off;
+
+ GST_MEMDUMP ("flac codec data", codec_data, codec_data_size);
+
+ /* need at least 'fLaC' marker + STREAMINFO metadata block */
+ if (codec_data == NULL || codec_data_size < ((4) + (4 + 34))) {
+ GST_WARNING ("not enough codec priv data for flac headers");
+ return NULL;
+ }
+
+ if (memcmp (pdata, "fLaC", 4) != 0) {
+ GST_WARNING ("no flac marker at start of stream headers");
+ return NULL;
+ }
+
+ list = gst_buffer_list_new ();
+
+ hdr = gst_buffer_new_memdup (pdata, 4);
+ gst_buffer_list_add (list, hdr);
+
+ /* skip fLaC marker */
+ off = 4;
+
+ while (off < codec_data_size - 3) {
+ len = GST_READ_UINT8 (pdata + off + 1) << 16;
+ len |= GST_READ_UINT8 (pdata + off + 2) << 8;
+ len |= GST_READ_UINT8 (pdata + off + 3);
+
+ GST_DEBUG ("header packet: len=%u bytes, flags=0x%02x", len, pdata[off]);
+
+ if (off + len > codec_data_size) {
+ gst_buffer_list_unref (list);
+ return NULL;
+ }
+
+ hdr = gst_buffer_new_memdup (pdata + off, len + 4);
+ gst_buffer_list_add (list, hdr);
+
+ off += 4 + len;
+ }
+ return list;
+}
+
+GstClockTime
+gst_matroska_track_get_buffer_timestamp (GstMatroskaTrackContext * track,
+ GstBuffer * buf)
+{
+ if (track->dts_only) {
+ return GST_BUFFER_DTS_OR_PTS (buf);
+ } else {
+ return GST_BUFFER_PTS (buf);
+ }
+}
+
+void
+gst_matroska_track_free (GstMatroskaTrackContext * track)
+{
+ g_free (track->codec_id);
+ g_free (track->codec_name);
+ g_free (track->name);
+ g_free (track->language);
+ g_free (track->codec_priv);
+ g_free (track->codec_state);
+ gst_caps_replace (&track->caps, NULL);
+
+ if (track->encodings != NULL) {
+ int i;
+
+ for (i = 0; i < track->encodings->len; ++i) {
+ GstMatroskaTrackEncoding *enc = &g_array_index (track->encodings,
+ GstMatroskaTrackEncoding,
+ i);
+
+ g_free (enc->comp_settings);
+ }
+ g_array_free (track->encodings, TRUE);
+ }
+
+ if (track->tags)
+ gst_tag_list_unref (track->tags);
+
+ if (track->index_table)
+ g_array_free (track->index_table, TRUE);
+
+ if (track->stream_headers)
+ gst_buffer_list_unref (track->stream_headers);
+
+ g_queue_foreach (&track->protection_event_queue, (GFunc) gst_event_unref,
+ NULL);
+ g_queue_clear (&track->protection_event_queue);
+
+ if (track->protection_info)
+ gst_structure_free (track->protection_info);
+
+ g_free (track);
+}
+
+GType
+matroska_track_encryption_algorithm_get_type (void)
+{
+ static GType type = 0;
+
+ static const GEnumValue types[] = {
+ {GST_MATROSKA_TRACK_ENCRYPTION_ALGORITHM_NONE, "Not encrypted",
+ "None"},
+ {GST_MATROSKA_TRACK_ENCRYPTION_ALGORITHM_DES, "DES encryption algorithm",
+ "DES"},
+ {GST_MATROSKA_TRACK_ENCRYPTION_ALGORITHM_3DES, "3DES encryption algorithm",
+ "3DES"},
+ {GST_MATROSKA_TRACK_ENCRYPTION_ALGORITHM_TWOFISH,
+ "TwoFish encryption algorithm", "TwoFish"},
+ {GST_MATROSKA_TRACK_ENCRYPTION_ALGORITHM_BLOWFISH,
+ "BlowFish encryption algorithm", "BlowFish"},
+ {GST_MATROSKA_TRACK_ENCRYPTION_ALGORITHM_AES, "AES encryption algorithm",
+ "AES"},
+ {0, NULL, NULL}
+ };
+
+ if (!type) {
+ type = g_enum_register_static ("MatroskaTrackEncryptionAlgorithm", types);
+ }
+ return type;
+}
+
+GType
+matroska_track_encryption_cipher_mode_get_type (void)
+{
+ static GType type = 0;
+
+ static const GEnumValue types[] = {
+ {GST_MATROSKA_TRACK_ENCRYPTION_CIPHER_MODE_NONE, "Not defined",
+ "None"},
+ {GST_MATROSKA_TRACK_ENCRYPTION_CIPHER_MODE_CTR, "CTR encryption mode",
+ "CTR"},
+ {0, NULL, NULL}
+ };
+
+ if (!type) {
+ type = g_enum_register_static ("MatroskaTrackEncryptionCipherMode", types);
+ }
+ return type;
+}
+
+GType
+matroska_track_encoding_scope_get_type (void)
+{
+ static GType type = 0;
+
+ static const GEnumValue types[] = {
+ {GST_MATROSKA_TRACK_ENCODING_SCOPE_FRAME, "Encoding scope frame", "frame"},
+ {GST_MATROSKA_TRACK_ENCODING_SCOPE_CODEC_DATA, "Encoding scope codec data",
+ "codec-data"},
+ {GST_MATROSKA_TRACK_ENCODING_SCOPE_NEXT_CONTENT_ENCODING,
+ "Encoding scope next content", "next-content"},
+ {0, NULL, NULL}
+ };
+
+ if (!type) {
+ type = g_enum_register_static ("MatroskaTrackEncodingScope", types);
+ }
+ return type;
+}
diff --git a/gst/matroska/matroska-ids.h b/gst/matroska/matroska-ids.h
new file mode 100644
index 0000000000..c4fc73caad
--- /dev/null
+++ b/gst/matroska/matroska-ids.h
@@ -0,0 +1,773 @@
+/* GStreamer Matroska muxer/demuxer
+ * (c) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ *
+ * matroska-ids.h: matroska file/stream data IDs
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_MATROSKA_IDS_H__
+#define __GST_MATROSKA_IDS_H__
+
+#include <gst/gst.h>
+#include <gst/video/video-info.h>
+
+#include "ebml-ids.h"
+
+/*
+ * EBML DocType.
+ */
+
+#define GST_MATROSKA_DOCTYPE_MATROSKA "matroska"
+#define GST_MATROSKA_DOCTYPE_WEBM "webm"
+
+/*
+ * Matroska element IDs. max. 32-bit.
+ */
+
+/* toplevel Segment */
+#define GST_MATROSKA_ID_SEGMENT 0x18538067
+
+/* matroska top-level master IDs, childs of Segment */
+#define GST_MATROSKA_ID_SEGMENTINFO 0x1549A966
+#define GST_MATROSKA_ID_TRACKS 0x1654AE6B
+#define GST_MATROSKA_ID_CUES 0x1C53BB6B
+#define GST_MATROSKA_ID_TAGS 0x1254C367
+#define GST_MATROSKA_ID_SEEKHEAD 0x114D9B74
+#define GST_MATROSKA_ID_CLUSTER 0x1F43B675
+#define GST_MATROSKA_ID_ATTACHMENTS 0x1941A469
+#define GST_MATROSKA_ID_CHAPTERS 0x1043A770
+
+/* IDs in the SegmentInfo master */
+#define GST_MATROSKA_ID_TIMECODESCALE 0x2AD7B1
+#define GST_MATROSKA_ID_DURATION 0x4489
+#define GST_MATROSKA_ID_WRITINGAPP 0x5741
+#define GST_MATROSKA_ID_MUXINGAPP 0x4D80
+#define GST_MATROSKA_ID_DATEUTC 0x4461
+#define GST_MATROSKA_ID_SEGMENTUID 0x73A4
+#define GST_MATROSKA_ID_SEGMENTFILENAME 0x7384
+#define GST_MATROSKA_ID_PREVUID 0x3CB923
+#define GST_MATROSKA_ID_PREVFILENAME 0x3C83AB
+#define GST_MATROSKA_ID_NEXTUID 0x3EB923
+#define GST_MATROSKA_ID_NEXTFILENAME 0x3E83BB
+#define GST_MATROSKA_ID_TITLE 0x7BA9
+#define GST_MATROSKA_ID_SEGMENTFAMILY 0x4444
+#define GST_MATROSKA_ID_CHAPTERTRANSLATE 0x6924
+
+/* IDs in the ChapterTranslate master */
+#define GST_MATROSKA_ID_CHAPTERTRANSLATEEDITIONUID 0x69FC
+#define GST_MATROSKA_ID_CHAPTERTRANSLATECODEC 0x69BF
+#define GST_MATROSKA_ID_CHAPTERTRANSLATEID 0x69A5
+
+/* ID in the Tracks master */
+#define GST_MATROSKA_ID_TRACKENTRY 0xAE
+
+/* IDs in the TrackEntry master */
+#define GST_MATROSKA_ID_TRACKNUMBER 0xD7
+#define GST_MATROSKA_ID_TRACKUID 0x73C5
+#define GST_MATROSKA_ID_TRACKTYPE 0x83
+#define GST_MATROSKA_ID_TRACKAUDIO 0xE1
+#define GST_MATROSKA_ID_TRACKVIDEO 0xE0
+#define GST_MATROSKA_ID_CONTENTENCODINGS 0x6D80
+#define GST_MATROSKA_ID_CODECID 0x86
+#define GST_MATROSKA_ID_CODECPRIVATE 0x63A2
+#define GST_MATROSKA_ID_CODECNAME 0x258688
+#define GST_MATROSKA_ID_TRACKNAME 0x536E
+#define GST_MATROSKA_ID_TRACKLANGUAGE 0x22B59C
+#define GST_MATROSKA_ID_TRACKFLAGENABLED 0xB9
+#define GST_MATROSKA_ID_TRACKFLAGDEFAULT 0x88
+#define GST_MATROSKA_ID_TRACKFLAGFORCED 0x55AA
+#define GST_MATROSKA_ID_TRACKFLAGLACING 0x9C
+#define GST_MATROSKA_ID_TRACKMINCACHE 0x6DE7
+#define GST_MATROSKA_ID_TRACKMAXCACHE 0x6DF8
+#define GST_MATROSKA_ID_TRACKDEFAULTDURATION 0x23E383
+#define GST_MATROSKA_ID_TRACKTIMECODESCALE 0x23314F
+#define GST_MATROSKA_ID_MAXBLOCKADDITIONID 0x55EE
+#define GST_MATROSKA_ID_TRACKATTACHMENTLINK 0x7446
+#define GST_MATROSKA_ID_TRACKOVERLAY 0x6FAB
+#define GST_MATROSKA_ID_TRACKTRANSLATE 0x6624
+/* semi-draft */
+#define GST_MATROSKA_ID_TRACKOFFSET 0x537F
+/* semi-draft */
+#define GST_MATROSKA_ID_CODECSETTINGS 0x3A9697
+/* semi-draft */
+#define GST_MATROSKA_ID_CODECINFOURL 0x3B4040
+/* semi-draft */
+#define GST_MATROSKA_ID_CODECDOWNLOADURL 0x26B240
+/* semi-draft */
+#define GST_MATROSKA_ID_CODECDECODEALL 0xAA
+#define GST_MATROSKA_ID_SEEKPREROLL 0x56BB
+#define GST_MATROSKA_ID_CODECDELAY 0x56AA
+
+/* IDs in the TrackTranslate master */
+#define GST_MATROSKA_ID_TRACKTRANSLATEEDITIONUID 0x66FC
+#define GST_MATROSKA_ID_TRACKTRANSLATECODEC 0x66BF
+#define GST_MATROSKA_ID_TRACKTRANSLATETRACKID 0x66A5
+
+
+/* IDs in the TrackVideo master */
+/* NOTE: This one is here only for backward compatibility.
+ * Use _TRACKDEFAULDURATION */
+#define GST_MATROSKA_ID_VIDEOFRAMERATE 0x2383E3
+#define GST_MATROSKA_ID_VIDEODISPLAYWIDTH 0x54B0
+#define GST_MATROSKA_ID_VIDEODISPLAYHEIGHT 0x54BA
+#define GST_MATROSKA_ID_VIDEODISPLAYUNIT 0x54B2
+#define GST_MATROSKA_ID_VIDEOPIXELWIDTH 0xB0
+#define GST_MATROSKA_ID_VIDEOPIXELHEIGHT 0xBA
+#define GST_MATROSKA_ID_VIDEOPIXELCROPBOTTOM 0x54AA
+#define GST_MATROSKA_ID_VIDEOPIXELCROPTOP 0x54BB
+#define GST_MATROSKA_ID_VIDEOPIXELCROPLEFT 0x54CC
+#define GST_MATROSKA_ID_VIDEOPIXELCROPRIGHT 0x54DD
+#define GST_MATROSKA_ID_VIDEOFLAGINTERLACED 0x9A
+#define GST_MATROSKA_ID_VIDEOFIELDORDER 0x9D
+/* semi-draft */
+#define GST_MATROSKA_ID_VIDEOSTEREOMODE 0x53B8
+#define GST_MATROSKA_ID_VIDEOALPHAMODE 0x53C0
+#define GST_MATROSKA_ID_VIDEOASPECTRATIOTYPE 0x54B3
+#define GST_MATROSKA_ID_VIDEOCOLOURSPACE 0x2EB524
+/* semi-draft */
+#define GST_MATROSKA_ID_VIDEOGAMMAVALUE 0x2FB523
+
+#define GST_MATROSKA_ID_VIDEOCOLOUR 0x55B0
+/* IDs in the Colour master*/
+#define GST_MATROSKA_ID_VIDEOMATRIXCOEFFICIENTS 0x55B1
+#define GST_MATROSKA_ID_VIDEORANGE 0x55B9
+#define GST_MATROSKA_ID_VIDEOTRANSFERCHARACTERISTICS 0x55BA
+#define GST_MATROSKA_ID_VIDEOPRIMARIES 0x55BB
+#define GST_MATROSKA_ID_MAXCLL 0x55BC
+#define GST_MATROSKA_ID_MAXFALL 0x55BD
+#define GST_MATROSKA_ID_MASTERINGMETADATA 0x55D0
+/* IDs in the MasteringMetadata */
+#define GST_MATROSKA_ID_PRIMARYRCHROMATICITYX 0x55D1
+#define GST_MATROSKA_ID_PRIMARYRCHROMATICITYY 0x55D2
+#define GST_MATROSKA_ID_PRIMARYGCHROMATICITYX 0x55D3
+#define GST_MATROSKA_ID_PRIMARYGCHROMATICITYY 0x55D4
+#define GST_MATROSKA_ID_PRIMARYBCHROMATICITYX 0x55D5
+#define GST_MATROSKA_ID_PRIMARYBCHROMATICITYY 0x55D6
+#define GST_MATROSKA_ID_WHITEPOINTCHROMATICITYX 0x55D7
+#define GST_MATROSKA_ID_WHITEPOINTCHROMATICITYY 0x55D8
+#define GST_MATROSKA_ID_LUMINANCEMAX 0x55D9
+#define GST_MATROSKA_ID_LUMINANCEMIN 0x55DA
+
+/* IDs in the TrackAudio master */
+#define GST_MATROSKA_ID_AUDIOSAMPLINGFREQ 0xB5
+#define GST_MATROSKA_ID_AUDIOBITDEPTH 0x6264
+#define GST_MATROSKA_ID_AUDIOCHANNELS 0x9F
+/* semi-draft */
+#define GST_MATROSKA_ID_AUDIOCHANNELPOSITIONS 0x7D7B
+#define GST_MATROSKA_ID_AUDIOOUTPUTSAMPLINGFREQ 0x78B5
+
+/* IDs in the TrackContentEncoding master */
+#define GST_MATROSKA_ID_CONTENTENCODING 0x6240
+
+/* IDs in the ContentEncoding master */
+#define GST_MATROSKA_ID_CONTENTENCODINGORDER 0x5031
+#define GST_MATROSKA_ID_CONTENTENCODINGSCOPE 0x5032
+#define GST_MATROSKA_ID_CONTENTENCODINGTYPE 0x5033
+#define GST_MATROSKA_ID_CONTENTCOMPRESSION 0x5034
+#define GST_MATROSKA_ID_CONTENTENCRYPTION 0x5035
+
+/* IDs in the ContentCompression master */
+#define GST_MATROSKA_ID_CONTENTCOMPALGO 0x4254
+#define GST_MATROSKA_ID_CONTENTCOMPSETTINGS 0x4255
+
+/* IDs in the ContentEncryption master */
+#define GST_MATROSKA_ID_CONTENTENCALGO 0x47E1
+#define GST_MATROSKA_ID_CONTENTENCKEYID 0x47E2
+#define GST_MATROSKA_ID_CONTENTSIGNATURE 0x47E3
+#define GST_MATROSKA_ID_CONTENTSIGKEYID 0x47E4
+#define GST_MATROSKA_ID_CONTENTSIGALGO 0x47E5
+#define GST_MATROSKA_ID_CONTENTSIGHASHALGO 0x47E6
+/* Added in WebM spec */
+#define GST_MATROSKA_ID_CONTENTENCAESSETTINGS 0x47E7
+#define GST_MATROSKA_ID_AESSETTINGSCIPHERMODE 0x47E8
+
+/* ID in the CUEs master */
+#define GST_MATROSKA_ID_POINTENTRY 0xBB
+
+/* IDs in the pointentry master */
+#define GST_MATROSKA_ID_CUETIME 0xB3
+#define GST_MATROSKA_ID_CUETRACKPOSITIONS 0xB7
+
+/* IDs in the CueTrackPositions master */
+#define GST_MATROSKA_ID_CUETRACK 0xF7
+#define GST_MATROSKA_ID_CUECLUSTERPOSITION 0xF1
+#define GST_MATROSKA_ID_CUEBLOCKNUMBER 0x5378
+/* semi-draft */
+#define GST_MATROSKA_ID_CUECODECSTATE 0xEA
+/* semi-draft */
+#define GST_MATROSKA_ID_CUEREFERENCE 0xDB
+
+/* IDs in the CueReference master */
+/* semi-draft */
+#define GST_MATROSKA_ID_CUEREFTIME 0x96
+/* semi-draft */
+#define GST_MATROSKA_ID_CUEREFCLUSTER 0x97
+/* semi-draft */
+#define GST_MATROSKA_ID_CUEREFNUMBER 0x535F
+/* semi-draft */
+#define GST_MATROSKA_ID_CUEREFCODECSTATE 0xEB
+
+/* IDs in the Tags master */
+#define GST_MATROSKA_ID_TAG 0x7373
+
+/* in the Tag master */
+#define GST_MATROSKA_ID_SIMPLETAG 0x67C8
+#define GST_MATROSKA_ID_TARGETS 0x63C0
+
+/* in the SimpleTag master */
+#define GST_MATROSKA_ID_TAGNAME 0x45A3
+#define GST_MATROSKA_ID_TAGSTRING 0x4487
+#define GST_MATROSKA_ID_TAGLANGUAGE 0x447A
+#define GST_MATROSKA_ID_TAGDEFAULT 0x4484
+#define GST_MATROSKA_ID_TAGBINARY 0x4485
+
+/* in the Targets master */
+#define GST_MATROSKA_ID_TARGETTYPEVALUE 0x68CA
+#define GST_MATROSKA_ID_TARGETTYPE 0x63CA
+#define GST_MATROSKA_ID_TARGETTRACKUID 0x63C5
+#define GST_MATROSKA_ID_TARGETEDITIONUID 0x63C9
+#define GST_MATROSKA_ID_TARGETCHAPTERUID 0x63C4
+#define GST_MATROSKA_ID_TARGETATTACHMENTUID 0x63C6
+
+/* IDs in the SeekHead master */
+#define GST_MATROSKA_ID_SEEKENTRY 0x4DBB
+
+/* IDs in the SeekEntry master */
+#define GST_MATROSKA_ID_SEEKID 0x53AB
+#define GST_MATROSKA_ID_SEEKPOSITION 0x53AC
+
+/* IDs in the Cluster master */
+#define GST_MATROSKA_ID_CLUSTERTIMECODE 0xE7
+#define GST_MATROSKA_ID_BLOCKGROUP 0xA0
+#define GST_MATROSKA_ID_SIMPLEBLOCK 0xA3
+#define GST_MATROSKA_ID_REFERENCEBLOCK 0xFB
+#define GST_MATROSKA_ID_POSITION 0xA7
+#define GST_MATROSKA_ID_PREVSIZE 0xAB
+/* semi-draft */
+#define GST_MATROSKA_ID_ENCRYPTEDBLOCK 0xAF
+#define GST_MATROSKA_ID_SILENTTRACKS 0x5854
+
+/* IDs in the SilentTracks master */
+#define GST_MATROSKA_ID_SILENTTRACKNUMBER 0x58D7
+
+/* IDs in the BlockGroup master */
+#define GST_MATROSKA_ID_BLOCK 0xA1
+#define GST_MATROSKA_ID_BLOCKDURATION 0x9B
+/* semi-draft */
+#define GST_MATROSKA_ID_BLOCKVIRTUAL 0xA2
+#define GST_MATROSKA_ID_REFERENCEBLOCK 0xFB
+#define GST_MATROSKA_ID_BLOCKADDITIONS 0x75A1
+#define GST_MATROSKA_ID_REFERENCEPRIORITY 0xFA
+/* semi-draft */
+#define GST_MATROSKA_ID_REFERENCEVIRTUAL 0xFD
+/* semi-draft */
+#define GST_MATROSKA_ID_CODECSTATE 0xA4
+#define GST_MATROSKA_ID_SLICES 0x8E
+#define GST_MATROSKA_ID_DISCARDPADDING 0x75A2
+
+/* IDs in the BlockAdditions master */
+#define GST_MATROSKA_ID_BLOCKMORE 0xA6
+
+/* IDs in the BlockMore master */
+#define GST_MATROSKA_ID_BLOCKADDID 0xEE
+#define GST_MATROSKA_ID_BLOCKADDITIONAL 0xA5
+
+/* IDs in the Slices master */
+#define GST_MATROSKA_ID_TIMESLICE 0xE8
+
+/* IDs in the TimeSlice master */
+#define GST_MATROSKA_ID_LACENUMBER 0xCC
+/* semi-draft */
+#define GST_MATROSKA_ID_FRAMENUMBER 0xCD
+/* semi-draft */
+#define GST_MATROSKA_ID_BLOCKADDITIONID 0xCB
+/* semi-draft */
+#define GST_MATROSKA_ID_TIMESLICEDELAY 0xCE
+#define GST_MATROSKA_ID_TIMESLICEDURATION 0xCF
+
+/* IDs in the Attachments master */
+#define GST_MATROSKA_ID_ATTACHEDFILE 0x61A7
+
+/* IDs in the AttachedFile master */
+#define GST_MATROSKA_ID_FILEDESCRIPTION 0x467E
+#define GST_MATROSKA_ID_FILENAME 0x466E
+#define GST_MATROSKA_ID_FILEMIMETYPE 0x4660
+#define GST_MATROSKA_ID_FILEDATA 0x465C
+#define GST_MATROSKA_ID_FILEUID 0x46AE
+/* semi-draft */
+#define GST_MATROSKA_ID_FILEREFERRAL 0x4675
+
+/* IDs in the Chapters master */
+#define GST_MATROSKA_ID_EDITIONENTRY 0x45B9
+
+/* IDs in the EditionEntry master */
+#define GST_MATROSKA_ID_EDITIONUID 0x45BC
+#define GST_MATROSKA_ID_EDITIONFLAGHIDDEN 0x45BD
+#define GST_MATROSKA_ID_EDITIONFLAGDEFAULT 0x45DB
+#define GST_MATROSKA_ID_EDITIONFLAGORDERED 0x45DD
+#define GST_MATROSKA_ID_CHAPTERATOM 0xB6
+
+/* IDs in the ChapterAtom master */
+#define GST_MATROSKA_ID_CHAPTERUID 0x73C4
+#define GST_MATROSKA_ID_CHAPTERSTRINGUID 0x5654
+#define GST_MATROSKA_ID_CHAPTERTIMESTART 0x91
+#define GST_MATROSKA_ID_CHAPTERTIMESTOP 0x92
+#define GST_MATROSKA_ID_CHAPTERFLAGHIDDEN 0x98
+#define GST_MATROSKA_ID_CHAPTERFLAGENABLED 0x4598
+#define GST_MATROSKA_ID_CHAPTERSEGMENTUID 0x6E67
+#define GST_MATROSKA_ID_CHAPTERSEGMENTEDITIONUID 0x6EBC
+#define GST_MATROSKA_ID_CHAPTERPHYSICALEQUIV 0x63C3
+#define GST_MATROSKA_ID_CHAPTERTRACK 0x8F
+#define GST_MATROSKA_ID_CHAPTERDISPLAY 0x80
+#define GST_MATROSKA_ID_CHAPPROCESS 0x6944
+
+/* IDs in the ChapProcess master */
+#define GST_MATROSKA_ID_CHAPPROCESSCODECID 0x6955
+#define GST_MATROSKA_ID_CHAPPROCESSPRIVATE 0x450D
+#define GST_MATROSKA_ID_CHAPPROCESSCOMMAND 0x6911
+
+/* IDs in the ChapProcessCommand master */
+#define GST_MATROSKA_ID_CHAPPROCESSTIME 0x6922
+#define GST_MATROSKA_ID_CHAPPROCESSDATA 0x6933
+
+/* IDs in the ChapterDisplay master */
+#define GST_MATROSKA_ID_CHAPSTRING 0x85
+#define GST_MATROSKA_ID_CHAPLANGUAGE 0x437C
+#define GST_MATROSKA_ID_CHAPCOUNTRY 0x437E
+
+/* IDs in the ChapterTrack master */
+#define GST_MATROSKA_ID_CHAPTERTRACKNUMBER 0x89
+
+/*
+ * Matroska Codec IDs. Strings.
+ */
+
+#define GST_MATROSKA_CODEC_ID_VIDEO_VFW_FOURCC "V_MS/VFW/FOURCC"
+#define GST_MATROSKA_CODEC_ID_VIDEO_UNCOMPRESSED "V_UNCOMPRESSED"
+#define GST_MATROSKA_CODEC_ID_VIDEO_MPEG4_SP "V_MPEG4/ISO/SP"
+#define GST_MATROSKA_CODEC_ID_VIDEO_MPEG4_ASP "V_MPEG4/ISO/ASP"
+#define GST_MATROSKA_CODEC_ID_VIDEO_MPEG4_AP "V_MPEG4/ISO/AP"
+#define GST_MATROSKA_CODEC_ID_VIDEO_MPEG4_AVC "V_MPEG4/ISO/AVC"
+#define GST_MATROSKA_CODEC_ID_VIDEO_MSMPEG4V3 "V_MPEG4/MS/V3"
+#define GST_MATROSKA_CODEC_ID_VIDEO_MPEG1 "V_MPEG1"
+#define GST_MATROSKA_CODEC_ID_VIDEO_MPEG2 "V_MPEG2"
+/* FIXME: not (yet) in the spec! */
+#define GST_MATROSKA_CODEC_ID_VIDEO_MJPEG "V_MJPEG"
+#define GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO1 "V_REAL/RV10"
+#define GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO2 "V_REAL/RV20"
+#define GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO3 "V_REAL/RV30"
+#define GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO4 "V_REAL/RV40"
+#define GST_MATROSKA_CODEC_ID_VIDEO_THEORA "V_THEORA"
+#define GST_MATROSKA_CODEC_ID_VIDEO_QUICKTIME "V_QUICKTIME"
+#define GST_MATROSKA_CODEC_ID_VIDEO_SNOW "V_SNOW"
+#define GST_MATROSKA_CODEC_ID_VIDEO_DIRAC "V_DIRAC"
+#define GST_MATROSKA_CODEC_ID_VIDEO_VP8 "V_VP8"
+#define GST_MATROSKA_CODEC_ID_VIDEO_VP9 "V_VP9"
+#define GST_MATROSKA_CODEC_ID_VIDEO_AV1 "V_AV1"
+#define GST_MATROSKA_CODEC_ID_VIDEO_MPEGH_HEVC "V_MPEGH/ISO/HEVC"
+#define GST_MATROSKA_CODEC_ID_VIDEO_PRORES "V_PRORES"
+#define GST_MATROSKA_CODEC_ID_VIDEO_FFV1 "V_FFV1"
+
+#define GST_MATROSKA_CODEC_ID_AUDIO_MPEG1_L1 "A_MPEG/L1"
+#define GST_MATROSKA_CODEC_ID_AUDIO_MPEG1_L2 "A_MPEG/L2"
+#define GST_MATROSKA_CODEC_ID_AUDIO_MPEG1_L3 "A_MPEG/L3"
+#define GST_MATROSKA_CODEC_ID_AUDIO_PCM_INT_BE "A_PCM/INT/BIG"
+#define GST_MATROSKA_CODEC_ID_AUDIO_PCM_INT_LE "A_PCM/INT/LIT"
+#define GST_MATROSKA_CODEC_ID_AUDIO_PCM_FLOAT "A_PCM/FLOAT/IEEE"
+#define GST_MATROSKA_CODEC_ID_AUDIO_AC3 "A_AC3"
+#define GST_MATROSKA_CODEC_ID_AUDIO_AC3_BSID9 "A_AC3/BSID9"
+#define GST_MATROSKA_CODEC_ID_AUDIO_AC3_BSID10 "A_AC3/BSID10"
+#define GST_MATROSKA_CODEC_ID_AUDIO_EAC3 "A_EAC3"
+#define GST_MATROSKA_CODEC_ID_AUDIO_TRUEHD "A_TRUEHD"
+#define GST_MATROSKA_CODEC_ID_AUDIO_DTS "A_DTS"
+#define GST_MATROSKA_CODEC_ID_AUDIO_VORBIS "A_VORBIS"
+#define GST_MATROSKA_CODEC_ID_AUDIO_FLAC "A_FLAC"
+/* FIXME: not yet in the spec */
+#define GST_MATROSKA_CODEC_ID_AUDIO_SPEEX "A_SPEEX"
+#define GST_MATROSKA_CODEC_ID_AUDIO_ACM "A_MS/ACM"
+#define GST_MATROSKA_CODEC_ID_AUDIO_TTA "A_TTA1"
+#define GST_MATROSKA_CODEC_ID_AUDIO_WAVPACK4 "A_WAVPACK4"
+#define GST_MATROSKA_CODEC_ID_AUDIO_REAL_14_4 "A_REAL/14_4"
+#define GST_MATROSKA_CODEC_ID_AUDIO_REAL_28_8 "A_REAL/28_8"
+#define GST_MATROSKA_CODEC_ID_AUDIO_REAL_COOK "A_REAL/COOK"
+#define GST_MATROSKA_CODEC_ID_AUDIO_REAL_SIPR "A_REAL/SIPR"
+#define GST_MATROSKA_CODEC_ID_AUDIO_REAL_RALF "A_REAL/RALF"
+#define GST_MATROSKA_CODEC_ID_AUDIO_REAL_ATRC "A_REAL/ATRC"
+#define GST_MATROSKA_CODEC_ID_AUDIO_AAC "A_AAC"
+#define GST_MATROSKA_CODEC_ID_AUDIO_AAC_MPEG2 "A_AAC/MPEG2/"
+#define GST_MATROSKA_CODEC_ID_AUDIO_AAC_MPEG4 "A_AAC/MPEG4/"
+#define GST_MATROSKA_CODEC_ID_AUDIO_QUICKTIME_QDMC "A_QUICKTIME/QDMC"
+#define GST_MATROSKA_CODEC_ID_AUDIO_QUICKTIME_QDM2 "A_QUICKTIME/QDM2"
+#define GST_MATROSKA_CODEC_ID_AUDIO_OPUS "A_OPUS"
+/* Undefined for now:
+#define GST_MATROSKA_CODEC_ID_AUDIO_MPC "A_MPC"
+*/
+
+#define GST_MATROSKA_CODEC_ID_SUBTITLE_ASCII "S_TEXT/ASCII"
+#define GST_MATROSKA_CODEC_ID_SUBTITLE_UTF8 "S_TEXT/UTF8"
+#define GST_MATROSKA_CODEC_ID_SUBTITLE_SSA "S_TEXT/SSA"
+#define GST_MATROSKA_CODEC_ID_SUBTITLE_ASS "S_TEXT/ASS"
+#define GST_MATROSKA_CODEC_ID_SUBTITLE_USF "S_TEXT/USF"
+#define GST_MATROSKA_CODEC_ID_SUBTITLE_VOBSUB "S_VOBSUB"
+#define GST_MATROSKA_CODEC_ID_SUBTITLE_HDMVPGS "S_HDMV/PGS"
+#define GST_MATROSKA_CODEC_ID_SUBTITLE_BMP "S_IMAGE/BMP"
+#define GST_MATROSKA_CODEC_ID_SUBTITLE_KATE "S_KATE"
+
+/*
+ * Matroska tags. Strings.
+ */
+
+#define GST_MATROSKA_TAG_ID_TITLE "TITLE"
+#define GST_MATROSKA_TAG_ID_AUTHOR "AUTHOR"
+#define GST_MATROSKA_TAG_ID_ARTIST "ARTIST"
+#define GST_MATROSKA_TAG_ID_ALBUM "ALBUM"
+#define GST_MATROSKA_TAG_ID_COMMENTS "COMMENTS"
+#define GST_MATROSKA_TAG_ID_COMMENT "COMMENT"
+#define GST_MATROSKA_TAG_ID_BITSPS "BITSPS"
+#define GST_MATROSKA_TAG_ID_BPS "BPS"
+#define GST_MATROSKA_TAG_ID_ENCODER "ENCODER"
+#define GST_MATROSKA_TAG_ID_ISRC "ISRC"
+#define GST_MATROSKA_TAG_ID_COPYRIGHT "COPYRIGHT"
+#define GST_MATROSKA_TAG_ID_BPM "BPM"
+#define GST_MATROSKA_TAG_ID_TERMS_OF_USE "TERMS_OF_USE"
+#define GST_MATROSKA_TAG_ID_DATE "DATE"
+#define GST_MATROSKA_TAG_ID_COMPOSER "COMPOSER"
+#define GST_MATROSKA_TAG_ID_LEAD_PERFORMER "LEAD_PERFOMER"
+#define GST_MATROSKA_TAG_ID_GENRE "GENRE"
+#define GST_MATROSKA_TAG_ID_TOTAL_PARTS "TOTAL_PARTS"
+#define GST_MATROSKA_TAG_ID_PART_NUMBER "PART_NUMBER"
+#define GST_MATROSKA_TAG_ID_SUBTITLE "SUBTITLE"
+#define GST_MATROSKA_TAG_ID_ACCOMPANIMENT "ACCOMPANIMENT"
+#define GST_MATROSKA_TAG_ID_LYRICS "LYRICS"
+#define GST_MATROSKA_TAG_ID_CONDUCTOR "CONDUCTOR"
+#define GST_MATROSKA_TAG_ID_ENCODED_BY "ENCODED_BY"
+#define GST_MATROSKA_TAG_ID_DESCRIPTION "DESCRIPTION"
+#define GST_MATROSKA_TAG_ID_KEYWORDS "KEYWORDS"
+#define GST_MATROSKA_TAG_ID_DATE_RELEASED "DATE_RELEASED"
+#define GST_MATROSKA_TAG_ID_DATE_RECORDED "DATE_RECORDED"
+#define GST_MATROSKA_TAG_ID_DATE_ENCODED "DATE_ENCODED"
+#define GST_MATROSKA_TAG_ID_DATE_TAGGED "DATE_TAGGED"
+#define GST_MATROSKA_TAG_ID_DATE_DIGITIZED "DATE_DIGITIZED"
+#define GST_MATROSKA_TAG_ID_DATE_WRITTEN "DATE_WRITTEN"
+#define GST_MATROSKA_TAG_ID_DATE_PURCHASED "DATE_PURCHASED"
+#define GST_MATROSKA_TAG_ID_RECORDING_LOCATION "RECORDING_LOCATION"
+#define GST_MATROSKA_TAG_ID_PRODUCTION_COPYRIGHT "PRODUCTION_COPYRIGHT"
+#define GST_MATROSKA_TAG_ID_LICENSE "LICENSE"
+
+/*
+ * TODO: add this tag & mappings
+ * "REPLAYGAIN_GAIN" -> GST_TAG_*_GAIN see http://replaygain.hydrogenaudio.org/rg_data_format.html
+ * "REPLAYGAIN_PEAK" -> GST_TAG_*_PEAK see http://replaygain.hydrogenaudio.org/peak_data_format.html
+ * both are depending on the target (track, album?)
+ *
+ * "TOTAL_PARTS" -> GST_TAG_TRACK_COUNT depending on target
+ * "PART_NUMBER" -> GST_TAG_TRACK_NUMBER depending on target
+ *
+ * "SORT_WITH" -> nested in other elements, GST_TAG_TITLE_SORTNAME, etc
+ *
+ * TODO: maybe add custom gstreamer tags for other standard matroska tags,
+ * see http://matroska.org/technical/specs/tagging/index.html
+ *
+ * TODO: handle tag targets and nesting correctly
+ */
+
+/*
+ * Enumerations for various types (mapping from binary
+ * value to what it actually means).
+ */
+
+typedef enum {
+ GST_MATROSKA_TRACK_TYPE_VIDEO = 0x1,
+ GST_MATROSKA_TRACK_TYPE_AUDIO = 0x2,
+ GST_MATROSKA_TRACK_TYPE_COMPLEX = 0x3,
+ GST_MATROSKA_TRACK_TYPE_LOGO = 0x10,
+ GST_MATROSKA_TRACK_TYPE_SUBTITLE = 0x11,
+ GST_MATROSKA_TRACK_TYPE_BUTTONS = 0x12,
+ GST_MATROSKA_TRACK_TYPE_CONTROL = 0x20,
+} GstMatroskaTrackType;
+
+typedef enum {
+ GST_MATROSKA_ASPECT_RATIO_MODE_FREE = 0x0,
+ GST_MATROSKA_ASPECT_RATIO_MODE_KEEP = 0x1,
+ GST_MATROSKA_ASPECT_RATIO_MODE_FIXED = 0x2,
+} GstMatroskaAspectRatioMode;
+
+/*
+ * These aren't in any way "matroska-form" things,
+ * it's just something I use in the muxer/demuxer.
+ */
+
+typedef enum {
+ GST_MATROSKA_TRACK_ENABLED = (1<<0),
+ GST_MATROSKA_TRACK_DEFAULT = (1<<1),
+ GST_MATROSKA_TRACK_LACING = (1<<2),
+ GST_MATROSKA_TRACK_FORCED = (1<<3),
+ GST_MATROSKA_TRACK_SHIFT = (1<<16)
+} GstMatroskaTrackFlags;
+
+typedef enum {
+ GST_MATROSKA_INTERLACE_MODE_UNKNOWN = 0,
+ GST_MATROSKA_INTERLACE_MODE_INTERLACED = 1,
+ GST_MATROSKA_INTERLACE_MODE_PROGRESSIVE = 2,
+} GstMatroskaInterlaceMode;
+
+typedef enum {
+ GST_MATROSKA_STEREO_MODE_SBS_LR = 0x1,
+ GST_MATROSKA_STEREO_MODE_TB_RL = 0x2,
+ GST_MATROSKA_STEREO_MODE_TB_LR = 0x3,
+ GST_MATROSKA_STEREO_MODE_CHECKER_RL = 0x4,
+ GST_MATROSKA_STEREO_MODE_CHECKER_LR = 0x5,
+ GST_MATROSKA_STEREO_MODE_SBS_RL = 0x9,
+ GST_MATROSKA_STEREO_MODE_FBF_LR = 0xD,
+ GST_MATROSKA_STEREO_MODE_FBF_RL = 0xE
+} GstMatroskaStereoMode;
+
+typedef enum {
+ GST_MATROSKA_ENCODING_COMPRESSION = 0x00,
+ GST_MATROSKA_ENCODING_ENCRYPTION = 0x01
+} GstMatroskaEncodingType;
+
+/* WebM spec */
+typedef enum {
+ GST_MATROSKA_BLOCK_ENCRYPTED = 0x01,
+ GST_MATROSKA_BLOCK_PARTITIONED = 0x02
+} GstMatroskaEncryptedBlockFlags;
+
+typedef struct _GstMatroskaTrackContext GstMatroskaTrackContext;
+
+/* TODO: check if all fields are used */
+struct _GstMatroskaTrackContext {
+ GstPad *pad;
+ GstCaps *caps;
+ guint index;
+ /* reverse playback */
+ GstClockTime from_time;
+ gint64 from_offset;
+ gint64 to_offset;
+
+ GArray *index_table;
+
+ gint index_writer_id;
+
+ /* some often-used info */
+ gchar *codec_id, *codec_name, *name, *language;
+ gpointer codec_priv;
+ gsize codec_priv_size;
+ gpointer codec_state;
+ gsize codec_state_size;
+ GstMatroskaTrackType type;
+ guint64 uid, num;
+ GstMatroskaTrackFlags flags;
+ guint64 default_duration;
+ guint64 pos;
+ gdouble timecodescale;
+ guint64 seek_preroll;
+ guint64 codec_delay;
+
+ gboolean set_discont; /* TRUE = set DISCONT flag on next buffer */
+
+ /* Queue to save the GST_PROTECTION events which will be sent before the first source buffer */
+ GQueue protection_event_queue;
+ /* Protection information structure which will be added in protection metadata for each encrypted buffer */
+ GstStructure * protection_info;
+
+ /* Stream header buffer, to put into caps and send before any other buffers */
+ GstBufferList * stream_headers;
+ gboolean send_stream_headers;
+
+ /* Special flag for VobSub, for which we have to send colour table info
+ * (if available) first before sending any data, and just testing
+ * for time == 0 is not enough to detect that. Used by demuxer */
+ gboolean send_dvd_event;
+
+ /* Special counter for muxer to skip the first N vorbis/theora headers -
+ * they are put into codec private data, not muxed into the stream */
+ guint xiph_headers_to_skip;
+
+ /* Used for postprocessing a frame before it is pushed from the demuxer */
+ GstFlowReturn (*postprocess_frame) (GstElement *element,
+ GstMatroskaTrackContext *context,
+ GstBuffer **buffer);
+
+ /* List of tags for this stream */
+ GstTagList *tags;
+ /* Tags changed and should be pushed again */
+ gboolean tags_changed;
+
+ /* A GArray of GstMatroskaTrackEncoding structures which contain the
+ * encoding (compression/encryption) settings for this track, if any */
+ GArray *encodings;
+
+ /* Whether the stream is EOS */
+ gboolean eos;
+
+ /* any alignment we need our output buffers to have */
+ gint alignment;
+
+ /* for compatibility with VFW files, where timestamp represents DTS */
+ gboolean dts_only;
+
+ /* indicate that the track is raw (jpeg,raw variants) and so pts=dts */
+ gboolean intra_only;
+};
+
+typedef struct _GstMatroskaTrackVideoContext {
+ GstMatroskaTrackContext parent;
+
+ guint pixel_width, pixel_height;
+ guint display_width, display_height;
+ gdouble default_fps;
+ GstMatroskaAspectRatioMode asr_mode;
+ guint32 fourcc;
+
+ GstMatroskaInterlaceMode interlace_mode;
+ GstVideoFieldOrder field_order;
+
+ GstVideoMultiviewMode multiview_mode;
+ GstVideoMultiviewFlags multiview_flags;
+
+ gboolean alpha_mode;
+
+ /* QoS */
+ GstClockTime earliest_time;
+
+ GstBuffer *dirac_unit;
+ GstVideoColorimetry colorimetry;
+
+ GstVideoMasteringDisplayInfo mastering_display_info;
+ gboolean mastering_display_info_present;
+
+ GstVideoContentLightLevel content_light_level;
+} GstMatroskaTrackVideoContext;
+
+typedef struct _GstMatroskaTrackAudioContext {
+ GstMatroskaTrackContext parent;
+
+ guint samplerate, channels, bitdepth;
+
+ guint32 wvpk_block_index;
+} GstMatroskaTrackAudioContext;
+
+typedef struct _GstMatroskaTrackSubtitleContext {
+ GstMatroskaTrackContext parent;
+
+ gboolean check_utf8; /* buffers should be valid UTF-8 */
+ gboolean check_markup; /* check if buffers contain markup
+ * or plaintext and escape characters */
+ gboolean invalid_utf8; /* work around broken files */
+ gboolean seen_markup_tag; /* markup found in text */
+} GstMatroskaTrackSubtitleContext;
+
+typedef struct _GstMatroskaIndex {
+ guint64 pos; /* of the corresponding *cluster*! */
+ GstClockTime time; /* in nanoseconds */
+ guint32 block; /* number of the block in the cluster */
+ guint16 track; /* reference to 'num' */
+} GstMatroskaIndex;
+
+typedef struct _Wavpack4Header {
+ guchar ck_id [4]; /* "wvpk" */
+ guint32 ck_size; /* size of entire frame (minus 8, of course) */
+ guint16 version; /* 0x403 for now */
+ guint8 track_no; /* track number (0 if not used, like now) */
+ guint8 index_no; /* remember these? (0 if not used, like now) */
+ guint32 total_samples; /* for entire file (-1 if unknown) */
+ guint32 block_index; /* index of first sample in block (to file begin) */
+ guint32 block_samples; /* # samples in this block */
+ guint32 flags; /* various flags for id and decoding */
+ guint32 crc; /* crc for actual decoded data */
+} Wavpack4Header;
+
+#define WAVPACK4_HEADER_SIZE (32)
+
+typedef enum {
+ GST_MATROSKA_TRACK_ENCODING_SCOPE_FRAME = (1<<0),
+ GST_MATROSKA_TRACK_ENCODING_SCOPE_CODEC_DATA = (1<<1),
+ GST_MATROSKA_TRACK_ENCODING_SCOPE_NEXT_CONTENT_ENCODING = (1<<2)
+} GstMatroskaTrackEncodingScope;
+
+#define MATROSKA_TRACK_ENCODING_SCOPE_TYPE (matroska_track_encoding_scope_get_type())
+GType matroska_track_encoding_scope_get_type (void);
+
+typedef enum {
+ GST_MATROSKA_TRACK_COMPRESSION_ALGORITHM_ZLIB = 0,
+ GST_MATROSKA_TRACK_COMPRESSION_ALGORITHM_BZLIB = 1,
+ GST_MATROSKA_TRACK_COMPRESSION_ALGORITHM_LZO1X = 2,
+ GST_MATROSKA_TRACK_COMPRESSION_ALGORITHM_HEADERSTRIP = 3
+} GstMatroskaTrackCompressionAlgorithm;
+
+/* The encryption algorithm used. The value '0' means that the contents
+ * have not been encrypted but only signed.
+ * Predefined values: 1 - DES; 2 - 3DES; 3 - Twofish; 4 - Blowfish; 5 - AES.
+ * WebM only supports a value of 5 (AES).
+ */
+typedef enum {
+ GST_MATROSKA_TRACK_ENCRYPTION_ALGORITHM_NONE = 0,
+ GST_MATROSKA_TRACK_ENCRYPTION_ALGORITHM_DES = 1,
+ GST_MATROSKA_TRACK_ENCRYPTION_ALGORITHM_3DES = 2,
+ GST_MATROSKA_TRACK_ENCRYPTION_ALGORITHM_TWOFISH = 3,
+ GST_MATROSKA_TRACK_ENCRYPTION_ALGORITHM_BLOWFISH = 4,
+ GST_MATROSKA_TRACK_ENCRYPTION_ALGORITHM_AES = 5
+} GstMatroskaTrackEncryptionAlgorithm;
+
+#define MATROSKA_TRACK_ENCRYPTION_ALGORITHM_TYPE (matroska_track_encryption_algorithm_get_type())
+GType matroska_track_encryption_algorithm_get_type (void);
+
+/* Defined only in WebM spec.
+ * The cipher mode used in the encryption. Predefined values: 1 - CTR
+ */
+typedef enum {
+ GST_MATROSKA_TRACK_ENCRYPTION_CIPHER_MODE_NONE = 0,
+ GST_MATROSKA_TRACK_ENCRYPTION_CIPHER_MODE_CTR = 1
+} GstMatroskaTrackEncryptionCipherMode;
+
+#define MATROSKA_TRACK_ENCRYPTION_CIPHER_MODE_TYPE (matroska_track_encryption_cipher_mode_get_type())
+GType matroska_track_encryption_cipher_mode_get_type (void);
+
+
+typedef struct _GstMatroskaTrackEncoding {
+ guint order;
+ guint scope : 3;
+ guint type : 1;
+ guint comp_algo : 2;
+ guint8 *comp_settings;
+ guint comp_settings_length;
+ guint enc_algo : 3;
+ guint enc_cipher_mode : 2;
+} GstMatroskaTrackEncoding;
+
+gboolean gst_matroska_track_init_video_context (GstMatroskaTrackContext ** p_context);
+gboolean gst_matroska_track_init_audio_context (GstMatroskaTrackContext ** p_context);
+gboolean gst_matroska_track_init_subtitle_context (GstMatroskaTrackContext ** p_context);
+
+void gst_matroska_register_tags (void);
+
+GstBufferList * gst_matroska_parse_xiph_stream_headers (gpointer codec_data,
+ gsize codec_data_size);
+
+GstBufferList * gst_matroska_parse_speex_stream_headers (gpointer codec_data,
+ gsize codec_data_size);
+
+GstBufferList * gst_matroska_parse_opus_stream_headers (gpointer codec_data,
+ gsize codec_data_size);
+
+GstBufferList * gst_matroska_parse_flac_stream_headers (gpointer codec_data,
+ gsize codec_data_size);
+void gst_matroska_track_free (GstMatroskaTrackContext * track);
+GstClockTime gst_matroska_track_get_buffer_timestamp (GstMatroskaTrackContext * track, GstBuffer *buf);
+
+#endif /* __GST_MATROSKA_IDS_H__ */
diff --git a/gst/matroska/matroska-mux.c b/gst/matroska/matroska-mux.c
new file mode 100644
index 0000000000..c7a5adfd60
--- /dev/null
+++ b/gst/matroska/matroska-mux.c
@@ -0,0 +1,4458 @@
+/* GStreamer Matroska muxer/demuxer
+ * (c) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * (c) 2005 Michal Benes <michal.benes@xeris.cz>
+ * (c) 2008 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * (c) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>
+ *
+ * matroska-mux.c: matroska file/stream muxer
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/* TODO: - check everywhere that we don't write invalid values
+ * - make sure timestamps are correctly scaled everywhere
+ */
+
+/**
+ * SECTION:element-matroskamux
+ * @title: matroskamux
+ *
+ * matroskamux muxes different input streams into a Matroska file.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v filesrc location=/path/to/mp3 ! mpegaudioparse ! matroskamux name=mux ! filesink location=test.mkv filesrc location=/path/to/theora.ogg ! oggdemux ! theoraparse ! mux.
+ * ]| This pipeline muxes an MP3 file and a Ogg Theora video into a Matroska file.
+ * |[
+ * gst-launch-1.0 -v audiotestsrc num-buffers=100 ! audioconvert ! vorbisenc ! matroskamux ! filesink location=test.mka
+ * ]| This pipeline muxes a 440Hz sine wave encoded with the Vorbis codec into a Matroska file.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <math.h>
+#include <stdio.h>
+#include <string.h>
+
+#include <gst/audio/audio.h>
+#include <gst/riff/riff-media.h>
+#include <gst/tag/tag.h>
+#include <gst/pbutils/codec-utils.h>
+
+#include "gstmatroskaelements.h"
+#include "matroska-mux.h"
+#include "matroska-ids.h"
+
+#define GST_MATROSKA_MUX_CHAPLANG "und"
+
+GST_DEBUG_CATEGORY_STATIC (matroskamux_debug);
+#define GST_CAT_DEFAULT matroskamux_debug
+
+enum
+{
+ PROP_0,
+ PROP_WRITING_APP,
+ PROP_DOCTYPE_VERSION,
+ PROP_MIN_INDEX_INTERVAL,
+ PROP_STREAMABLE,
+ PROP_TIMECODESCALE,
+ PROP_MIN_CLUSTER_DURATION,
+ PROP_MAX_CLUSTER_DURATION,
+ PROP_OFFSET_TO_ZERO,
+ PROP_CREATION_TIME,
+ PROP_CLUSTER_TIMESTAMP_OFFSET,
+};
+
+#define DEFAULT_DOCTYPE_VERSION 2
+#define DEFAULT_WRITING_APP "GStreamer Matroska muxer"
+#define DEFAULT_MIN_INDEX_INTERVAL 0
+#define DEFAULT_STREAMABLE FALSE
+#define DEFAULT_TIMECODESCALE GST_MSECOND
+#define DEFAULT_MIN_CLUSTER_DURATION 500 * GST_MSECOND
+#define DEFAULT_MAX_CLUSTER_DURATION 65535 * GST_MSECOND
+#define DEFAULT_OFFSET_TO_ZERO FALSE
+#define DEFAULT_CLUSTER_TIMESTAMP_OFFSET 0
+
+/* WAVEFORMATEX is gst_riff_strf_auds + an extra guint16 extension size */
+#define WAVEFORMATEX_SIZE (2 + sizeof (gst_riff_strf_auds))
+
+static GstStaticPadTemplate src_templ = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-matroska; video/x-matroska-3d; audio/x-matroska")
+ );
+
+#define COMMON_VIDEO_CAPS \
+ "width = (int) [ 1, MAX ], " \
+ "height = (int) [ 1, MAX ] "
+
+/* FIXME:
+ * * require codec data, etc as needed
+ */
+
+static GstStaticPadTemplate videosink_templ =
+ GST_STATIC_PAD_TEMPLATE ("video_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("video/mpeg, "
+ "mpegversion = (int) { 1, 2, 4 }, "
+ "systemstream = (boolean) false, "
+ COMMON_VIDEO_CAPS "; "
+ "video/x-h264, stream-format = (string) { avc, avc3 }, alignment=au, "
+ COMMON_VIDEO_CAPS "; "
+ "video/x-h265, stream-format = (string) { hvc1, hev1 }, alignment=au, "
+ COMMON_VIDEO_CAPS "; "
+ "video/x-divx, "
+ COMMON_VIDEO_CAPS "; "
+ "video/x-huffyuv, "
+ COMMON_VIDEO_CAPS "; "
+ "video/x-dv, "
+ COMMON_VIDEO_CAPS "; "
+ "video/x-h263, "
+ COMMON_VIDEO_CAPS "; "
+ "video/x-msmpeg, "
+ COMMON_VIDEO_CAPS "; "
+ "image/jpeg, "
+ COMMON_VIDEO_CAPS "; "
+ "video/x-theora; "
+ "video/x-dirac, "
+ COMMON_VIDEO_CAPS "; "
+ "video/x-pn-realvideo, "
+ "rmversion = (int) [1, 4], "
+ COMMON_VIDEO_CAPS "; "
+ "video/x-vp8, "
+ COMMON_VIDEO_CAPS "; "
+ "video/x-vp9, "
+ COMMON_VIDEO_CAPS "; "
+ "video/x-raw, "
+ "format = (string) { YUY2, I420, YV12, UYVY, AYUV, GRAY8, BGR, RGB }, "
+ COMMON_VIDEO_CAPS "; "
+ "video/x-prores, "
+ COMMON_VIDEO_CAPS "; "
+ "video/x-wmv, " "wmvversion = (int) [ 1, 3 ], " COMMON_VIDEO_CAPS "; "
+ "video/x-av1, " COMMON_VIDEO_CAPS ";"
+ "video/x-ffv, ffversion = (int) 1, " COMMON_VIDEO_CAPS)
+ );
+
+#define COMMON_AUDIO_CAPS \
+ "channels = (int) [ 1, MAX ], " \
+ "rate = (int) [ 1, MAX ]"
+
+/* FIXME:
+ * * require codec data, etc as needed
+ */
+static GstStaticPadTemplate audiosink_templ =
+ GST_STATIC_PAD_TEMPLATE ("audio_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("audio/mpeg, "
+ "mpegversion = (int) 1, "
+ "layer = (int) [ 1, 3 ], "
+ COMMON_AUDIO_CAPS "; "
+ "audio/mpeg, "
+ "mpegversion = (int) { 2, 4 }, "
+ "stream-format = (string) raw, "
+ COMMON_AUDIO_CAPS "; "
+ "audio/x-ac3, "
+ COMMON_AUDIO_CAPS "; "
+ "audio/x-eac3, "
+ COMMON_AUDIO_CAPS "; "
+ "audio/x-dts, "
+ COMMON_AUDIO_CAPS "; "
+ "audio/x-vorbis, "
+ COMMON_AUDIO_CAPS "; "
+ "audio/x-flac, "
+ COMMON_AUDIO_CAPS "; "
+ "audio/x-opus; "
+ "audio/x-speex, "
+ COMMON_AUDIO_CAPS "; "
+ "audio/x-raw, "
+ "format = (string) { U8, S16BE, S16LE, S24BE, S24LE, S32BE, S32LE, F32LE, F64LE }, "
+ "layout = (string) interleaved, "
+ COMMON_AUDIO_CAPS ";"
+ "audio/x-tta, "
+ "width = (int) { 8, 16, 24 }, "
+ "channels = (int) { 1, 2 }, " "rate = (int) [ 8000, 96000 ]; "
+ "audio/x-pn-realaudio, "
+ "raversion = (int) { 1, 2, 8 }, " COMMON_AUDIO_CAPS "; "
+ "audio/x-wma, " "wmaversion = (int) [ 1, 3 ], "
+ "block_align = (int) [ 0, 65535 ], bitrate = (int) [ 0, 524288 ], "
+ COMMON_AUDIO_CAPS ";"
+ "audio/x-alaw, "
+ "channels = (int) {1, 2}, " "rate = (int) [ 8000, 192000 ]; "
+ "audio/x-mulaw, "
+ "channels = (int) {1, 2}, " "rate = (int) [ 8000, 192000 ]; "
+ "audio/x-adpcm, "
+ "layout = (string)dvi, "
+ "block_align = (int)[64, 8192], "
+ "channels = (int) { 1, 2 }, " "rate = (int) [ 8000, 96000 ]; "
+ "audio/G722, "
+ "channels = (int)1," "rate = (int)16000; "
+ "audio/x-adpcm, "
+ "layout = (string)g726, " "channels = (int)1," "rate = (int)8000; ")
+ );
+
+static GstStaticPadTemplate subtitlesink_templ =
+ GST_STATIC_PAD_TEMPLATE ("subtitle_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("subtitle/x-kate; "
+ "text/x-raw, format=utf8; application/x-ssa; application/x-ass; "
+ "application/x-usf; subpicture/x-dvd; "
+ "application/x-subtitle-unknown")
+ );
+
+static gpointer parent_class; /* NULL */
+
+/* Matroska muxer destructor */
+static void gst_matroska_mux_class_init (GstMatroskaMuxClass * klass);
+static void gst_matroska_mux_init (GstMatroskaMux * mux, gpointer g_class);
+static void gst_matroska_mux_finalize (GObject * object);
+
+/* Pads collected callback */
+static GstFlowReturn gst_matroska_mux_handle_buffer (GstCollectPads * pads,
+ GstCollectData * data, GstBuffer * buf, gpointer user_data);
+static gboolean gst_matroska_mux_handle_sink_event (GstCollectPads * pads,
+ GstCollectData * data, GstEvent * event, gpointer user_data);
+
+/* pad functions */
+static gboolean gst_matroska_mux_handle_src_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+static GstPad *gst_matroska_mux_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
+static void gst_matroska_mux_release_pad (GstElement * element, GstPad * pad);
+
+/* gst internal change state handler */
+static GstStateChangeReturn
+gst_matroska_mux_change_state (GstElement * element, GstStateChange transition);
+
+/* gobject bla bla */
+static void gst_matroska_mux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_matroska_mux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+/* reset muxer */
+static void gst_matroska_mux_reset (GstElement * element);
+
+/* uid generation */
+static guint64 gst_matroska_mux_create_uid (GstMatroskaMux * mux);
+
+static gboolean theora_streamheader_to_codecdata (const GValue * streamheader,
+ GstMatroskaTrackContext * context);
+static gboolean vorbis_streamheader_to_codecdata (const GValue * streamheader,
+ GstMatroskaTrackContext * context);
+static gboolean speex_streamheader_to_codecdata (const GValue * streamheader,
+ GstMatroskaTrackContext * context);
+static gboolean kate_streamheader_to_codecdata (const GValue * streamheader,
+ GstMatroskaTrackContext * context);
+static gboolean flac_streamheader_to_codecdata (const GValue * streamheader,
+ GstMatroskaTrackContext * context);
+static void
+gst_matroska_mux_write_simple_tag (const GstTagList * list, const gchar * tag,
+ gpointer data);
+static gboolean gst_matroska_mux_tag_list_is_empty (const GstTagList * list);
+static void gst_matroska_mux_write_streams_tags (GstMatroskaMux * mux);
+static gboolean gst_matroska_mux_streams_have_tags (GstMatroskaMux * mux);
+
+/* Cannot use boilerplate macros here because we need the full init function
+ * signature with the additional class argument, so we use the right template
+ * for the sink caps */
+GType
+gst_matroska_mux_get_type (void)
+{
+ static GType object_type; /* 0 */
+
+ if (object_type == 0) {
+ static const GTypeInfo object_info = {
+ sizeof (GstMatroskaMuxClass),
+ NULL, /* base_init */
+ NULL, /* base_finalize */
+ (GClassInitFunc) gst_matroska_mux_class_init,
+ NULL, /* class_finalize */
+ NULL, /* class_data */
+ sizeof (GstMatroskaMux),
+ 0, /* n_preallocs */
+ (GInstanceInitFunc) gst_matroska_mux_init
+ };
+ const GInterfaceInfo iface_info = { NULL };
+
+ object_type = g_type_register_static (GST_TYPE_ELEMENT,
+ "GstMatroskaMux", &object_info, (GTypeFlags) 0);
+
+ g_type_add_interface_static (object_type, GST_TYPE_TAG_SETTER, &iface_info);
+ g_type_add_interface_static (object_type, GST_TYPE_TOC_SETTER, &iface_info);
+ }
+
+ return object_type;
+}
+
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (matroskamux, "matroskamux",
+ GST_RANK_PRIMARY, GST_TYPE_MATROSKA_MUX, matroska_element_init (plugin));
+
+static void
+gst_matroska_mux_class_init (GstMatroskaMuxClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &videosink_templ);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &audiosink_templ);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &subtitlesink_templ);
+ gst_element_class_add_static_pad_template (gstelement_class, &src_templ);
+ gst_element_class_set_static_metadata (gstelement_class, "Matroska muxer",
+ "Codec/Muxer",
+ "Muxes video/audio/subtitle streams into a matroska stream",
+ "GStreamer maintainers <gstreamer-devel@lists.freedesktop.org>");
+
+ GST_DEBUG_CATEGORY_INIT (matroskamux_debug, "matroskamux", 0,
+ "Matroska muxer");
+
+ gobject_class->finalize = gst_matroska_mux_finalize;
+
+ gobject_class->get_property = gst_matroska_mux_get_property;
+ gobject_class->set_property = gst_matroska_mux_set_property;
+
+ g_object_class_install_property (gobject_class, PROP_WRITING_APP,
+ g_param_spec_string ("writing-app", "Writing application.",
+ "The name the application that creates the matroska file.",
+ NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_DOCTYPE_VERSION,
+ g_param_spec_int ("version", "DocType version",
+ "This parameter determines what Matroska features can be used.",
+ 1, 2, DEFAULT_DOCTYPE_VERSION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MIN_INDEX_INTERVAL,
+ g_param_spec_int64 ("min-index-interval", "Minimum time between index "
+ "entries", "An index entry is created every so many nanoseconds.",
+ 0, G_MAXINT64, DEFAULT_MIN_INDEX_INTERVAL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_STREAMABLE,
+ g_param_spec_boolean ("streamable", "Determines whether output should "
+ "be streamable", "If set to true, the output should be as if it is "
+ "to be streamed and hence no indexes written or duration written.",
+ DEFAULT_STREAMABLE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_TIMECODESCALE,
+ g_param_spec_int64 ("timecodescale", "Timecode Scale",
+ "TimecodeScale used to calculate the Raw Timecode of a Block", 1,
+ GST_SECOND, DEFAULT_TIMECODESCALE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MIN_CLUSTER_DURATION,
+ g_param_spec_int64 ("min-cluster-duration", "Minimum cluster duration",
+ "Desired cluster duration as nanoseconds. A new cluster will be "
+ "created irrespective of this property if a force key unit event "
+ "is received. 0 means create a new cluster for each video keyframe "
+ "or for each audio buffer in audio only streams.", 0,
+ G_MAXINT64, DEFAULT_MIN_CLUSTER_DURATION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MAX_CLUSTER_DURATION,
+ g_param_spec_int64 ("max-cluster-duration", "Maximum cluster duration",
+ "A new cluster will be created if its duration exceeds this value. "
+ "0 means no maximum duration.", 0,
+ G_MAXINT64, DEFAULT_MAX_CLUSTER_DURATION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_OFFSET_TO_ZERO,
+ g_param_spec_boolean ("offset-to-zero", "Offset To Zero",
+ "Offsets all streams so that the " "earliest stream starts at 0.",
+ DEFAULT_OFFSET_TO_ZERO, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_CREATION_TIME,
+ g_param_spec_boxed ("creation-time", "Creation Time",
+ "Date and time of creation. This will be used for the DateUTC field."
+ " NULL means that the current time will be used.",
+ G_TYPE_DATE_TIME, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstMatroskaMux:cluster-timestamp-offset:
+ *
+ * An offset to add to all clusters/blocks (in nanoseconds)
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class, PROP_CLUSTER_TIMESTAMP_OFFSET,
+ g_param_spec_uint64 ("cluster-timestamp-offset",
+ "Cluster timestamp offset",
+ "An offset to add to all clusters/blocks (in nanoseconds)", 0,
+ G_MAXUINT64, DEFAULT_CLUSTER_TIMESTAMP_OFFSET,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_matroska_mux_change_state);
+ gstelement_class->request_new_pad =
+ GST_DEBUG_FUNCPTR (gst_matroska_mux_request_new_pad);
+ gstelement_class->release_pad =
+ GST_DEBUG_FUNCPTR (gst_matroska_mux_release_pad);
+
+ parent_class = g_type_class_peek_parent (klass);
+}
+
+/*
+ * Start of pad option handler code
+ */
+#define DEFAULT_PAD_FRAME_DURATION TRUE
+
+enum
+{
+ PROP_PAD_0,
+ PROP_PAD_FRAME_DURATION
+};
+
+typedef struct
+{
+ GstPad parent;
+ gboolean frame_duration;
+ gboolean frame_duration_user;
+} GstMatroskamuxPad;
+
+typedef GstPadClass GstMatroskamuxPadClass;
+
+GType gst_matroskamux_pad_get_type (void);
+G_DEFINE_TYPE (GstMatroskamuxPad, gst_matroskamux_pad, GST_TYPE_PAD);
+
+#define GST_TYPE_MATROSKAMUX_PAD (gst_matroskamux_pad_get_type())
+#define GST_MATROSKAMUX_PAD(pad) (G_TYPE_CHECK_INSTANCE_CAST((pad),GST_TYPE_MATROSKAMUX_PAD,GstMatroskamuxPad))
+#define GST_MATROSKAMUX_PAD_CAST(pad) ((GstMatroskamuxPad *) pad)
+#define GST_IS_MATROSKAMUX_PAD(pad) (G_TYPE_CHECK_INSTANCE_TYPE((pad),GST_TYPE_MATROSKAMUX_PAD))
+
+static void
+gst_matroskamux_pad_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstMatroskamuxPad *pad = GST_MATROSKAMUX_PAD (object);
+
+ switch (prop_id) {
+ case PROP_PAD_FRAME_DURATION:
+ g_value_set_boolean (value, pad->frame_duration);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_matroskamux_pad_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstMatroskamuxPad *pad = GST_MATROSKAMUX_PAD (object);
+
+ switch (prop_id) {
+ case PROP_PAD_FRAME_DURATION:
+ pad->frame_duration = g_value_get_boolean (value);
+ pad->frame_duration_user = TRUE;
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_matroskamux_pad_class_init (GstMatroskamuxPadClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ gobject_class->set_property = gst_matroskamux_pad_set_property;
+ gobject_class->get_property = gst_matroskamux_pad_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_PAD_FRAME_DURATION,
+ g_param_spec_boolean ("frame-duration", "Frame duration",
+ "Default frame duration", DEFAULT_PAD_FRAME_DURATION,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+}
+
+static void
+gst_matroskamux_pad_init (GstMatroskamuxPad * pad)
+{
+ pad->frame_duration = DEFAULT_PAD_FRAME_DURATION;
+ pad->frame_duration_user = FALSE;
+}
+
+/*
+ * End of pad option handler code
+ **/
+
+static void
+gst_matroska_mux_init (GstMatroskaMux * mux, gpointer g_class)
+{
+ GstPadTemplate *templ;
+
+ templ =
+ gst_element_class_get_pad_template (GST_ELEMENT_CLASS (g_class), "src");
+ mux->srcpad = gst_pad_new_from_template (templ, "src");
+
+ gst_pad_set_event_function (mux->srcpad, gst_matroska_mux_handle_src_event);
+ gst_element_add_pad (GST_ELEMENT (mux), mux->srcpad);
+ gst_pad_use_fixed_caps (mux->srcpad);
+
+ mux->collect = gst_collect_pads_new ();
+ gst_collect_pads_set_clip_function (mux->collect,
+ GST_DEBUG_FUNCPTR (gst_collect_pads_clip_running_time), mux);
+ gst_collect_pads_set_buffer_function (mux->collect,
+ GST_DEBUG_FUNCPTR (gst_matroska_mux_handle_buffer), mux);
+ gst_collect_pads_set_event_function (mux->collect,
+ GST_DEBUG_FUNCPTR (gst_matroska_mux_handle_sink_event), mux);
+
+ mux->ebml_write = gst_ebml_write_new (mux->srcpad);
+ mux->doctype = GST_MATROSKA_DOCTYPE_MATROSKA;
+
+ /* property defaults */
+ mux->doctype_version = DEFAULT_DOCTYPE_VERSION;
+ mux->writing_app = g_strdup (DEFAULT_WRITING_APP);
+ mux->min_index_interval = DEFAULT_MIN_INDEX_INTERVAL;
+ mux->ebml_write->streamable = DEFAULT_STREAMABLE;
+ mux->time_scale = DEFAULT_TIMECODESCALE;
+ mux->min_cluster_duration = DEFAULT_MIN_CLUSTER_DURATION;
+ mux->max_cluster_duration = DEFAULT_MAX_CLUSTER_DURATION;
+ mux->cluster_timestamp_offset = DEFAULT_CLUSTER_TIMESTAMP_OFFSET;
+
+ /* initialize internal variables */
+ mux->index = NULL;
+ mux->num_streams = 0;
+ mux->num_a_streams = 0;
+ mux->num_t_streams = 0;
+ mux->num_v_streams = 0;
+ mux->internal_toc = NULL;
+
+ /* initialize remaining variables */
+ gst_matroska_mux_reset (GST_ELEMENT (mux));
+}
+
+
+/**
+ * gst_matroska_mux_finalize:
+ * @object: #GstMatroskaMux that should be finalized.
+ *
+ * Finalize matroska muxer.
+ */
+static void
+gst_matroska_mux_finalize (GObject * object)
+{
+ GstMatroskaMux *mux = GST_MATROSKA_MUX (object);
+
+ gst_event_replace (&mux->force_key_unit_event, NULL);
+
+ gst_object_unref (mux->collect);
+ gst_object_unref (mux->ebml_write);
+ g_free (mux->writing_app);
+ g_clear_pointer (&mux->creation_time, g_date_time_unref);
+
+ if (mux->internal_toc) {
+ gst_toc_unref (mux->internal_toc);
+ mux->internal_toc = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+
+/**
+ * gst_matroska_mux_create_uid:
+ * @mux: #GstMatroskaMux to generate UID for.
+ *
+ * Generate new track UID.
+ *
+ * Returns: New track UID.
+ */
+static guint64
+gst_matroska_mux_create_uid (GstMatroskaMux * mux)
+{
+ return (((guint64) g_random_int ()) << 32) | g_random_int ();
+}
+
+
+/**
+ * gst_matroska_pad_reset:
+ * @collect_pad: the #GstMatroskaPad
+ *
+ * Reset and/or release resources of a matroska collect pad.
+ */
+static void
+gst_matroska_pad_reset (GstMatroskaPad * collect_pad, gboolean full)
+{
+ gchar *name = NULL;
+ GstMatroskaTrackType type = 0;
+
+ /* free track information */
+ if (collect_pad->track != NULL) {
+ /* retrieve for optional later use */
+ name = collect_pad->track->name;
+ type = collect_pad->track->type;
+ /* extra for video */
+ if (type == GST_MATROSKA_TRACK_TYPE_VIDEO) {
+ GstMatroskaTrackVideoContext *ctx =
+ (GstMatroskaTrackVideoContext *) collect_pad->track;
+
+ if (ctx->dirac_unit) {
+ gst_buffer_unref (ctx->dirac_unit);
+ ctx->dirac_unit = NULL;
+ }
+ }
+ g_free (collect_pad->track->codec_id);
+ g_free (collect_pad->track->codec_name);
+ if (full)
+ g_free (collect_pad->track->name);
+ g_free (collect_pad->track->language);
+ g_free (collect_pad->track->codec_priv);
+ g_free (collect_pad->track);
+ collect_pad->track = NULL;
+ if (collect_pad->tags) {
+ gst_tag_list_unref (collect_pad->tags);
+ collect_pad->tags = NULL;
+ }
+ }
+
+ if (!full && type != 0) {
+ GstMatroskaTrackContext *context;
+
+ /* create a fresh context */
+ switch (type) {
+ case GST_MATROSKA_TRACK_TYPE_VIDEO:
+ context = (GstMatroskaTrackContext *)
+ g_new0 (GstMatroskaTrackVideoContext, 1);
+ break;
+ case GST_MATROSKA_TRACK_TYPE_AUDIO:
+ context = (GstMatroskaTrackContext *)
+ g_new0 (GstMatroskaTrackAudioContext, 1);
+ break;
+ case GST_MATROSKA_TRACK_TYPE_SUBTITLE:
+ context = (GstMatroskaTrackContext *)
+ g_new0 (GstMatroskaTrackSubtitleContext, 1);
+ break;
+ default:
+ g_assert_not_reached ();
+ return;
+ }
+
+ context->type = type;
+ context->name = name;
+ context->uid = gst_matroska_mux_create_uid (collect_pad->mux);
+ /* TODO: check default values for the context */
+ context->flags = GST_MATROSKA_TRACK_ENABLED | GST_MATROSKA_TRACK_DEFAULT;
+ collect_pad->track = context;
+ collect_pad->start_ts = GST_CLOCK_TIME_NONE;
+ collect_pad->end_ts = GST_CLOCK_TIME_NONE;
+ collect_pad->tags = gst_tag_list_new_empty ();
+ gst_tag_list_set_scope (collect_pad->tags, GST_TAG_SCOPE_STREAM);
+ }
+}
+
+/**
+ * gst_matroska_pad_free:
+ * @collect_pad: the #GstMatroskaPad
+ *
+ * Release resources of a matroska collect pad.
+ */
+static void
+gst_matroska_pad_free (GstPad * collect_pad)
+{
+ gst_matroska_pad_reset ((GstMatroskaPad *) collect_pad, TRUE);
+}
+
+
+/**
+ * gst_matroska_mux_reset:
+ * @element: #GstMatroskaMux that should be reset.
+ *
+ * Reset matroska muxer back to initial state.
+ */
+static void
+gst_matroska_mux_reset (GstElement * element)
+{
+ GstMatroskaMux *mux = GST_MATROSKA_MUX (element);
+ GSList *walk;
+
+ /* reset EBML write */
+ gst_ebml_write_reset (mux->ebml_write);
+
+ /* reset input */
+ mux->state = GST_MATROSKA_MUX_STATE_START;
+
+ /* clean up existing streams */
+
+ for (walk = mux->collect->data; walk; walk = g_slist_next (walk)) {
+ GstMatroskaPad *collect_pad;
+
+ collect_pad = (GstMatroskaPad *) walk->data;
+
+ /* reset collect pad to pristine state */
+ gst_matroska_pad_reset (collect_pad, FALSE);
+ }
+
+ /* reset indexes */
+ mux->num_indexes = 0;
+ g_free (mux->index);
+ mux->index = NULL;
+
+ /* reset timers */
+ mux->duration = 0;
+
+ /* reset cluster */
+ mux->cluster = 0;
+ mux->cluster_time = 0;
+ mux->cluster_pos = 0;
+ mux->prev_cluster_size = 0;
+
+ /* reset tags */
+ gst_tag_setter_reset_tags (GST_TAG_SETTER (mux));
+
+ mux->tags_pos = 0;
+
+ /* reset chapters */
+ gst_toc_setter_reset (GST_TOC_SETTER (mux));
+ if (mux->internal_toc) {
+ gst_toc_unref (mux->internal_toc);
+ mux->internal_toc = NULL;
+ }
+
+ mux->chapters_pos = 0;
+}
+
+/**
+ * gst_matroska_mux_handle_src_event:
+ * @pad: Pad which received the event.
+ * @event: Received event.
+ *
+ * handle events - copied from oggmux without understanding
+ *
+ * Returns: %TRUE on success.
+ */
+static gboolean
+gst_matroska_mux_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstEventType type;
+
+ type = event ? GST_EVENT_TYPE (event) : GST_EVENT_UNKNOWN;
+
+ switch (type) {
+ case GST_EVENT_SEEK:
+ /* disable seeking for now */
+ return FALSE;
+ default:
+ break;
+ }
+
+ return gst_pad_event_default (pad, parent, event);
+}
+
+
+static void
+gst_matroska_mux_free_codec_priv (GstMatroskaTrackContext * context)
+{
+ if (context->codec_priv != NULL) {
+ g_free (context->codec_priv);
+ context->codec_priv = NULL;
+ context->codec_priv_size = 0;
+ }
+}
+
+static void
+gst_matroska_mux_build_vobsub_private (GstMatroskaTrackContext * context,
+ const guint * clut)
+{
+ gchar *clutv[17];
+ gchar *sclut;
+ gint i;
+ guint32 col;
+ gdouble y, u, v;
+ guint8 r, g, b;
+
+ /* produce comma-separated list in hex format */
+ for (i = 0; i < 16; ++i) {
+ col = clut[i];
+ /* replicate vobsub's slightly off RGB conversion calculation */
+ y = (((col >> 16) & 0xff) - 16) * 255 / 219;
+ u = ((col >> 8) & 0xff) - 128;
+ v = (col & 0xff) - 128;
+ r = CLAMP (1.0 * y + 1.4022 * u, 0, 255);
+ g = CLAMP (1.0 * y - 0.3456 * u - 0.7145 * v, 0, 255);
+ b = CLAMP (1.0 * y + 1.7710 * v, 0, 255);
+ clutv[i] = g_strdup_printf ("%02x%02x%02x", r, g, b);
+ }
+ clutv[i] = NULL;
+ sclut = g_strjoinv (",", clutv);
+
+ /* build codec private; only palette for now */
+ gst_matroska_mux_free_codec_priv (context);
+ context->codec_priv = (guint8 *) g_strdup_printf ("palette: %s", sclut);
+ /* include terminating 0 */
+ context->codec_priv_size = strlen ((gchar *) context->codec_priv) + 1;
+ g_free (sclut);
+ for (i = 0; i < 16; ++i) {
+ g_free (clutv[i]);
+ }
+}
+
+
+/**
+ * gst_matroska_mux_handle_sink_event:
+ * @pad: Pad which received the event.
+ * @event: Received event.
+ *
+ * handle events - informational ones like tags
+ *
+ * Returns: %TRUE on success.
+ */
+static gboolean
+gst_matroska_mux_handle_sink_event (GstCollectPads * pads,
+ GstCollectData * data, GstEvent * event, gpointer user_data)
+{
+ GstMatroskaPad *collect_pad;
+ GstMatroskaTrackContext *context;
+ GstMatroskaMux *mux;
+ GstPad *pad;
+ GstTagList *list;
+ gboolean ret = TRUE;
+
+ mux = GST_MATROSKA_MUX (user_data);
+ collect_pad = (GstMatroskaPad *) data;
+ pad = data->pad;
+ context = collect_pad->track;
+ g_assert (context);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:{
+ GstCaps *caps;
+
+ collect_pad = (GstMatroskaPad *) gst_pad_get_element_private (pad);
+ gst_event_parse_caps (event, &caps);
+
+ ret = collect_pad->capsfunc (pad, caps);
+ gst_event_unref (event);
+ event = NULL;
+ break;
+ }
+ case GST_EVENT_TAG:{
+ gchar *lang = NULL;
+
+ GST_DEBUG_OBJECT (mux, "received tag event");
+ gst_event_parse_tag (event, &list);
+
+ /* Matroska wants ISO 639-2B code, taglist most likely contains 639-1 */
+ if (gst_tag_list_get_string (list, GST_TAG_LANGUAGE_CODE, &lang)) {
+ const gchar *lang_code;
+
+ lang_code = gst_tag_get_language_code_iso_639_2B (lang);
+ if (lang_code) {
+ GST_INFO_OBJECT (pad, "Setting language to '%s'", lang_code);
+ g_free (context->language);
+ context->language = g_strdup (lang_code);
+ } else {
+ GST_WARNING_OBJECT (pad, "Did not get language code for '%s'", lang);
+ }
+ g_free (lang);
+ }
+
+ /* FIXME: what about stream-specific tags? */
+ if (gst_tag_list_get_scope (list) == GST_TAG_SCOPE_GLOBAL) {
+ gst_tag_setter_merge_tags (GST_TAG_SETTER (mux), list,
+ gst_tag_setter_get_tag_merge_mode (GST_TAG_SETTER (mux)));
+ } else {
+ gst_tag_list_insert (collect_pad->tags, list, GST_TAG_MERGE_REPLACE);
+ }
+
+ gst_event_unref (event);
+ /* handled this, don't want collectpads to forward it downstream */
+ event = NULL;
+ ret = TRUE;
+ break;
+ }
+ case GST_EVENT_TOC:{
+ GstToc *toc, *old_toc;
+
+ if (mux->chapters_pos > 0)
+ break;
+
+ GST_DEBUG_OBJECT (mux, "received toc event");
+ gst_event_parse_toc (event, &toc, NULL);
+
+ if (toc != NULL) {
+ old_toc = gst_toc_setter_get_toc (GST_TOC_SETTER (mux));
+ if (old_toc != NULL) {
+ if (old_toc != toc)
+ GST_INFO_OBJECT (pad, "Replacing TOC with a new one");
+ gst_toc_unref (old_toc);
+ }
+
+ gst_toc_setter_set_toc (GST_TOC_SETTER (mux), toc);
+ gst_toc_unref (toc);
+ }
+
+ gst_event_unref (event);
+ /* handled this, don't want collectpads to forward it downstream */
+ event = NULL;
+ break;
+ }
+ case GST_EVENT_CUSTOM_DOWNSTREAM:
+ case GST_EVENT_CUSTOM_DOWNSTREAM_STICKY:{
+ const GstStructure *structure;
+
+ structure = gst_event_get_structure (event);
+ if (gst_structure_has_name (structure, "GstForceKeyUnit")) {
+ gst_event_replace (&mux->force_key_unit_event, NULL);
+ mux->force_key_unit_event = event;
+ event = NULL;
+ } else if (gst_structure_has_name (structure, "application/x-gst-dvd") &&
+ !strcmp ("dvd-spu-clut-change",
+ gst_structure_get_string (structure, "event"))) {
+ gchar name[16];
+ gint i, value;
+ guint clut[16];
+
+ GST_DEBUG_OBJECT (pad, "New DVD colour table received");
+ if (context->type != GST_MATROSKA_TRACK_TYPE_SUBTITLE) {
+ GST_DEBUG_OBJECT (pad, "... discarding");
+ break;
+ }
+ /* first transform event data into table form */
+ for (i = 0; i < 16; i++) {
+ g_snprintf (name, sizeof (name), "clut%02d", i);
+ if (!gst_structure_get_int (structure, name, &value)) {
+ GST_ERROR_OBJECT (mux, "dvd-spu-clut-change event did not "
+ "contain %s field", name);
+ goto break_hard;
+ }
+ clut[i] = value;
+ }
+
+ /* transform into private data for stream; text form */
+ gst_matroska_mux_build_vobsub_private (context, clut);
+ }
+ }
+ /* fall through */
+ default:
+ break;
+ }
+
+break_hard:
+ if (event != NULL)
+ return gst_collect_pads_event_default (pads, data, event, FALSE);
+
+ return ret;
+}
+
+static void
+gst_matroska_mux_set_codec_id (GstMatroskaTrackContext * context,
+ const char *id)
+{
+ g_assert (context && id);
+ g_free (context->codec_id);
+ context->codec_id = g_strdup (id);
+}
+
+static gboolean
+check_field (GQuark field_id, const GValue * value, gpointer user_data)
+{
+ GstStructure *structure = (GstStructure *) user_data;
+ const gchar *name = gst_structure_get_name (structure);
+
+ if ((g_strcmp0 (name, "video/x-h264") == 0 &&
+ !g_strcmp0 (gst_structure_get_string (structure, "stream-format"),
+ "avc3")) || (g_strcmp0 (name, "video/x-h265") == 0
+ && !g_strcmp0 (gst_structure_get_string (structure, "stream-format"),
+ "hev1"))
+ ) {
+ /* While in theory, matroska only supports avc1 / hvc1, and doesn't support codec_data
+ * changes, in practice most decoders will use in-band SPS / PPS (avc3 / hev1), if the
+ * input stream is avc3 / hev1 we let the new codec_data slide to support "smart" encoding.
+ *
+ * We don't warn here as we already warned elsewhere.
+ */
+ if (field_id == g_quark_from_static_string ("codec_data")) {
+ return FALSE;
+ } else if (field_id == g_quark_from_static_string ("tier")) {
+ return FALSE;
+ } else if (field_id == g_quark_from_static_string ("profile")) {
+ return FALSE;
+ } else if (field_id == g_quark_from_static_string ("level")) {
+ return FALSE;
+ }
+ }
+
+ return TRUE;
+}
+
+static gboolean
+check_new_caps (GstCaps * old_caps, GstCaps * new_caps)
+{
+ GstStructure *old_s, *new_s;
+ gboolean ret;
+
+ old_caps = gst_caps_copy (old_caps);
+ new_caps = gst_caps_copy (new_caps);
+
+ new_s = gst_caps_get_structure (new_caps, 0);
+ old_s = gst_caps_get_structure (old_caps, 0);
+
+ gst_structure_filter_and_map_in_place (new_s,
+ (GstStructureFilterMapFunc) check_field, new_s);
+ gst_structure_filter_and_map_in_place (old_s,
+ (GstStructureFilterMapFunc) check_field, old_s);
+
+ ret = gst_caps_is_subset (new_caps, old_caps);
+
+ gst_caps_unref (new_caps);
+ gst_caps_unref (old_caps);
+
+ return ret;
+}
+
+/**
+ * gst_matroska_mux_video_pad_setcaps:
+ * @pad: Pad which got the caps.
+ * @caps: New caps.
+ *
+ * Setcaps function for video sink pad.
+ *
+ * Returns: %TRUE on success.
+ */
+static gboolean
+gst_matroska_mux_video_pad_setcaps (GstPad * pad, GstCaps * caps)
+{
+ GstMatroskaTrackContext *context = NULL;
+ GstMatroskaTrackVideoContext *videocontext;
+ GstMatroskaMux *mux;
+ GstMatroskaPad *collect_pad;
+ GstStructure *structure;
+ const gchar *mimetype;
+ const gchar *interlace_mode, *s;
+ const GValue *value = NULL;
+ GstBuffer *codec_buf = NULL;
+ gint width, height, pixel_width, pixel_height;
+ gint fps_d, fps_n;
+ guint multiview_flags;
+ GstCaps *old_caps;
+
+ mux = GST_MATROSKA_MUX (GST_PAD_PARENT (pad));
+
+ if ((old_caps = gst_pad_get_current_caps (pad))) {
+ if (mux->state >= GST_MATROSKA_MUX_STATE_HEADER
+ && !check_new_caps (old_caps, caps)) {
+ GST_ELEMENT_ERROR (mux, STREAM, MUX, (NULL),
+ ("Caps changes are not supported by Matroska\nCurrent: `%"
+ GST_PTR_FORMAT "`\nNew: `%" GST_PTR_FORMAT "`", old_caps, caps));
+ gst_caps_unref (old_caps);
+ goto refuse_caps;
+ }
+ gst_caps_unref (old_caps);
+ } else if (mux->state >= GST_MATROSKA_MUX_STATE_HEADER) {
+ GST_ELEMENT_ERROR (mux, STREAM, MUX, (NULL),
+ ("Caps on pad %" GST_PTR_FORMAT
+ " arrived late. Headers were already written", pad));
+ goto refuse_caps;
+ }
+
+ /* find context */
+ collect_pad = (GstMatroskaPad *) gst_pad_get_element_private (pad);
+ g_assert (collect_pad);
+ context = collect_pad->track;
+ g_assert (context);
+ g_assert (context->type == GST_MATROSKA_TRACK_TYPE_VIDEO);
+ videocontext = (GstMatroskaTrackVideoContext *) context;
+
+ /* gst -> matroska ID'ing */
+ structure = gst_caps_get_structure (caps, 0);
+
+ mimetype = gst_structure_get_name (structure);
+
+ interlace_mode = gst_structure_get_string (structure, "interlace-mode");
+ if (interlace_mode != NULL) {
+ if (strcmp (interlace_mode, "progressive") == 0)
+ videocontext->interlace_mode = GST_MATROSKA_INTERLACE_MODE_PROGRESSIVE;
+ else
+ videocontext->interlace_mode = GST_MATROSKA_INTERLACE_MODE_INTERLACED;
+ } else {
+ videocontext->interlace_mode = GST_MATROSKA_INTERLACE_MODE_UNKNOWN;
+ }
+
+ if (!strcmp (mimetype, "video/x-theora")) {
+ /* we'll extract the details later from the theora identification header */
+ goto skip_details;
+ }
+
+ /* get general properties */
+ /* spec says it is mandatory */
+ if (!gst_structure_get_int (structure, "width", &width) ||
+ !gst_structure_get_int (structure, "height", &height))
+ goto refuse_caps;
+
+ videocontext->pixel_width = width;
+ videocontext->pixel_height = height;
+
+ if (GST_MATROSKAMUX_PAD_CAST (pad)->frame_duration
+ && gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d)
+ && fps_n > 0) {
+ context->default_duration =
+ gst_util_uint64_scale_int (GST_SECOND, fps_d, fps_n);
+ GST_LOG_OBJECT (pad, "default duration = %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (context->default_duration));
+ } else {
+ context->default_duration = 0;
+ }
+ if (gst_structure_get_fraction (structure, "pixel-aspect-ratio",
+ &pixel_width, &pixel_height)) {
+ if (pixel_width > pixel_height) {
+ videocontext->display_width = width * pixel_width / pixel_height;
+ videocontext->display_height = height;
+ } else if (pixel_width < pixel_height) {
+ videocontext->display_width = width;
+ videocontext->display_height = height * pixel_height / pixel_width;
+ } else {
+ videocontext->display_width = 0;
+ videocontext->display_height = 0;
+ }
+ } else {
+ videocontext->display_width = 0;
+ videocontext->display_height = 0;
+ }
+
+ if ((s = gst_structure_get_string (structure, "colorimetry"))) {
+ if (!gst_video_colorimetry_from_string (&videocontext->colorimetry, s)) {
+ GST_WARNING_OBJECT (pad, "Could not parse colorimetry %s", s);
+ }
+ }
+
+ if ((s = gst_structure_get_string (structure, "mastering-display-info"))) {
+ if (!gst_video_mastering_display_info_from_string
+ (&videocontext->mastering_display_info, s)) {
+ GST_WARNING_OBJECT (pad, "Could not parse mastering-display-metadata %s",
+ s);
+ } else {
+ videocontext->mastering_display_info_present = TRUE;
+ }
+ }
+
+ if ((s = gst_structure_get_string (structure, "content-light-level"))) {
+ if (!gst_video_content_light_level_from_string
+ (&videocontext->content_light_level, s))
+ GST_WARNING_OBJECT (pad, "Could not parse content-light-level %s", s);
+ }
+
+ /* Collect stereoscopic info, if any */
+ if ((s = gst_structure_get_string (structure, "multiview-mode")))
+ videocontext->multiview_mode =
+ gst_video_multiview_mode_from_caps_string (s);
+ gst_structure_get_flagset (structure, "multiview-flags", &multiview_flags,
+ NULL);
+ videocontext->multiview_flags = multiview_flags;
+
+
+skip_details:
+
+ videocontext->asr_mode = GST_MATROSKA_ASPECT_RATIO_MODE_FREE;
+ videocontext->fourcc = 0;
+
+ /* TODO: - check if we handle all codecs by the spec, i.e. codec private
+ * data and other settings
+ * - add new formats
+ */
+
+ /* extract codec_data, may turn out needed */
+ value = gst_structure_get_value (structure, "codec_data");
+ if (value)
+ codec_buf = (GstBuffer *) gst_value_get_buffer (value);
+
+ /* find type */
+ if (!strcmp (mimetype, "video/x-raw")) {
+ const gchar *fstr;
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_VIDEO_UNCOMPRESSED);
+ fstr = gst_structure_get_string (structure, "format");
+ if (fstr) {
+ if (strlen (fstr) == 4)
+ videocontext->fourcc = GST_STR_FOURCC (fstr);
+ else if (!strcmp (fstr, "GRAY8"))
+ videocontext->fourcc = GST_MAKE_FOURCC ('Y', '8', '0', '0');
+ else if (!strcmp (fstr, "BGR"))
+ videocontext->fourcc = GST_MAKE_FOURCC ('B', 'G', 'R', 24);
+ else if (!strcmp (fstr, "RGB"))
+ videocontext->fourcc = GST_MAKE_FOURCC ('R', 'G', 'B', 24);
+ }
+ } else if (!strcmp (mimetype, "video/x-huffyuv") /* MS/VfW compatibility cases */
+ ||!strcmp (mimetype, "video/x-divx")
+ || !strcmp (mimetype, "video/x-dv")
+ || !strcmp (mimetype, "video/x-h263")
+ || !strcmp (mimetype, "video/x-msmpeg")
+ || !strcmp (mimetype, "video/x-wmv")
+ || !strcmp (mimetype, "image/jpeg")) {
+ gst_riff_strf_vids *bih;
+ gint size = sizeof (gst_riff_strf_vids);
+ guint32 fourcc = 0;
+
+ if (!strcmp (mimetype, "video/x-huffyuv"))
+ fourcc = GST_MAKE_FOURCC ('H', 'F', 'Y', 'U');
+ else if (!strcmp (mimetype, "video/x-dv"))
+ fourcc = GST_MAKE_FOURCC ('D', 'V', 'S', 'D');
+ else if (!strcmp (mimetype, "video/x-h263"))
+ fourcc = GST_MAKE_FOURCC ('H', '2', '6', '3');
+ else if (!strcmp (mimetype, "video/x-divx")) {
+ gint divxversion;
+
+ gst_structure_get_int (structure, "divxversion", &divxversion);
+ switch (divxversion) {
+ case 3:
+ fourcc = GST_MAKE_FOURCC ('D', 'I', 'V', '3');
+ break;
+ case 4:
+ fourcc = GST_MAKE_FOURCC ('D', 'I', 'V', 'X');
+ break;
+ case 5:
+ fourcc = GST_MAKE_FOURCC ('D', 'X', '5', '0');
+ break;
+ }
+ } else if (!strcmp (mimetype, "video/x-msmpeg")) {
+ gint msmpegversion;
+
+ gst_structure_get_int (structure, "msmpegversion", &msmpegversion);
+ switch (msmpegversion) {
+ case 41:
+ fourcc = GST_MAKE_FOURCC ('M', 'P', 'G', '4');
+ break;
+ case 42:
+ fourcc = GST_MAKE_FOURCC ('M', 'P', '4', '2');
+ break;
+ case 43:
+ goto msmpeg43;
+ break;
+ }
+ } else if (!strcmp (mimetype, "video/x-wmv")) {
+ gint wmvversion;
+ const gchar *fstr;
+
+ fstr = gst_structure_get_string (structure, "format");
+ if (fstr && strlen (fstr) == 4) {
+ fourcc = GST_STR_FOURCC (fstr);
+ } else if (gst_structure_get_int (structure, "wmvversion", &wmvversion)) {
+ if (wmvversion == 2) {
+ fourcc = GST_MAKE_FOURCC ('W', 'M', 'V', '2');
+ } else if (wmvversion == 1) {
+ fourcc = GST_MAKE_FOURCC ('W', 'M', 'V', '1');
+ } else if (wmvversion == 3) {
+ fourcc = GST_MAKE_FOURCC ('W', 'M', 'V', '3');
+ }
+ }
+ } else if (!strcmp (mimetype, "image/jpeg")) {
+ fourcc = GST_MAKE_FOURCC ('M', 'J', 'P', 'G');
+ }
+
+ if (!fourcc)
+ goto refuse_caps;
+
+ bih = g_new0 (gst_riff_strf_vids, 1);
+ GST_WRITE_UINT32_LE (&bih->size, size);
+ GST_WRITE_UINT32_LE (&bih->width, videocontext->pixel_width);
+ GST_WRITE_UINT32_LE (&bih->height, videocontext->pixel_height);
+ GST_WRITE_UINT32_LE (&bih->compression, fourcc);
+ GST_WRITE_UINT16_LE (&bih->planes, (guint16) 1);
+ GST_WRITE_UINT16_LE (&bih->bit_cnt, (guint16) 24);
+ GST_WRITE_UINT32_LE (&bih->image_size, videocontext->pixel_width *
+ videocontext->pixel_height * 3);
+
+ /* process codec private/initialization data, if any */
+ if (codec_buf) {
+ size += gst_buffer_get_size (codec_buf);
+ bih = g_realloc (bih, size);
+ GST_WRITE_UINT32_LE (&bih->size, size);
+ gst_buffer_extract (codec_buf, 0,
+ (guint8 *) bih + sizeof (gst_riff_strf_vids), -1);
+ }
+
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_VIDEO_VFW_FOURCC);
+ gst_matroska_mux_free_codec_priv (context);
+ context->codec_priv = (gpointer) bih;
+ context->codec_priv_size = size;
+ context->dts_only = TRUE;
+ } else if (!strcmp (mimetype, "video/x-h264")) {
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_VIDEO_MPEG4_AVC);
+ gst_matroska_mux_free_codec_priv (context);
+
+ if (!g_strcmp0 (gst_structure_get_string (structure, "stream-format"),
+ "avc3")) {
+ GST_WARNING_OBJECT (mux,
+ "avc3 is not officially supported, only use this format for smart encoding");
+ }
+
+ /* Create avcC header */
+ if (codec_buf != NULL) {
+ context->codec_priv_size = gst_buffer_get_size (codec_buf);
+ context->codec_priv = g_malloc0 (context->codec_priv_size);
+ gst_buffer_extract (codec_buf, 0, context->codec_priv, -1);
+ }
+ } else if (!strcmp (mimetype, "video/x-h265")) {
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_VIDEO_MPEGH_HEVC);
+ gst_matroska_mux_free_codec_priv (context);
+
+ if (!g_strcmp0 (gst_structure_get_string (structure, "stream-format"),
+ "hev1")) {
+ GST_WARNING_OBJECT (mux,
+ "hev1 is not officially supported, only use this format for smart encoding");
+ }
+
+ /* Create hvcC header */
+ if (codec_buf != NULL) {
+ context->codec_priv_size = gst_buffer_get_size (codec_buf);
+ context->codec_priv = g_malloc0 (context->codec_priv_size);
+ gst_buffer_extract (codec_buf, 0, context->codec_priv, -1);
+ }
+ } else if (!strcmp (mimetype, "video/x-theora")) {
+ const GValue *streamheader;
+
+ gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_VIDEO_THEORA);
+
+ gst_matroska_mux_free_codec_priv (context);
+
+ streamheader = gst_structure_get_value (structure, "streamheader");
+ if (!theora_streamheader_to_codecdata (streamheader, context)) {
+ GST_ELEMENT_ERROR (mux, STREAM, MUX, (NULL),
+ ("theora stream headers missing or malformed"));
+ goto refuse_caps;
+ }
+ } else if (!strcmp (mimetype, "video/x-dirac")) {
+ gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_VIDEO_DIRAC);
+ } else if (!strcmp (mimetype, "video/x-vp8")) {
+ gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_VIDEO_VP8);
+ } else if (!strcmp (mimetype, "video/x-vp9")) {
+ gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_VIDEO_VP9);
+ } else if (!strcmp (mimetype, "video/x-av1")) {
+ gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_VIDEO_AV1);
+ gst_matroska_mux_free_codec_priv (context);
+ /* Create av1C header */
+ if (codec_buf != NULL)
+ gst_buffer_extract_dup (codec_buf, 0, gst_buffer_get_size (codec_buf),
+ &context->codec_priv, &context->codec_priv_size);
+ } else if (!strcmp (mimetype, "video/x-ffv")) {
+ gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_VIDEO_FFV1);
+ gst_matroska_mux_free_codec_priv (context);
+ if (codec_buf != NULL)
+ gst_buffer_extract_dup (codec_buf, 0, gst_buffer_get_size (codec_buf),
+ &context->codec_priv, &context->codec_priv_size);
+ } else if (!strcmp (mimetype, "video/mpeg")) {
+ gint mpegversion;
+
+ gst_structure_get_int (structure, "mpegversion", &mpegversion);
+ switch (mpegversion) {
+ case 1:
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_VIDEO_MPEG1);
+ break;
+ case 2:
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_VIDEO_MPEG2);
+ break;
+ case 4:
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_VIDEO_MPEG4_ASP);
+ break;
+ default:
+ goto refuse_caps;
+ }
+
+ /* global headers may be in codec data */
+ if (codec_buf != NULL) {
+ gst_matroska_mux_free_codec_priv (context);
+ context->codec_priv_size = gst_buffer_get_size (codec_buf);
+ context->codec_priv = g_malloc0 (context->codec_priv_size);
+ gst_buffer_extract (codec_buf, 0, context->codec_priv, -1);
+ }
+ } else if (!strcmp (mimetype, "video/x-msmpeg")) {
+ msmpeg43:
+ /* can only make it here if preceding case verified it was version 3 */
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_VIDEO_MSMPEG4V3);
+ } else if (!strcmp (mimetype, "video/x-pn-realvideo")) {
+ gint rmversion;
+ const GValue *mdpr_data;
+
+ gst_structure_get_int (structure, "rmversion", &rmversion);
+ switch (rmversion) {
+ case 1:
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO1);
+ break;
+ case 2:
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO2);
+ break;
+ case 3:
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO3);
+ break;
+ case 4:
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_VIDEO_REALVIDEO4);
+ break;
+ default:
+ goto refuse_caps;
+ }
+
+ mdpr_data = gst_structure_get_value (structure, "mdpr_data");
+ if (mdpr_data != NULL) {
+ guint8 *priv_data = NULL;
+ guint priv_data_size = 0;
+
+ GstBuffer *codec_data_buf = g_value_peek_pointer (mdpr_data);
+
+ priv_data_size = gst_buffer_get_size (codec_data_buf);
+ priv_data = g_malloc0 (priv_data_size);
+
+ gst_buffer_extract (codec_data_buf, 0, priv_data, -1);
+
+ gst_matroska_mux_free_codec_priv (context);
+ context->codec_priv = priv_data;
+ context->codec_priv_size = priv_data_size;
+ }
+ } else if (strcmp (mimetype, "video/x-prores") == 0) {
+ const gchar *variant;
+
+ gst_matroska_mux_free_codec_priv (context);
+
+ variant = gst_structure_get_string (structure, "format");
+ if (!variant || !g_strcmp0 (variant, "standard"))
+ context->codec_priv = g_strdup ("apcn");
+ else if (!g_strcmp0 (variant, "hq"))
+ context->codec_priv = g_strdup ("apch");
+ else if (!g_strcmp0 (variant, "lt"))
+ context->codec_priv = g_strdup ("apcs");
+ else if (!g_strcmp0 (variant, "proxy"))
+ context->codec_priv = g_strdup ("apco");
+ else if (!g_strcmp0 (variant, "4444"))
+ context->codec_priv = g_strdup ("ap4h");
+ else {
+ GST_WARNING_OBJECT (mux, "Unhandled prores format: %s", variant);
+
+ goto refuse_caps;
+ }
+
+ context->codec_priv_size = sizeof (guint32);
+ gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_VIDEO_PRORES);
+ }
+
+ return TRUE;
+
+ /* ERRORS */
+refuse_caps:
+ {
+ GST_WARNING_OBJECT (mux, "pad %s refused caps %" GST_PTR_FORMAT,
+ GST_PAD_NAME (pad), caps);
+ return FALSE;
+ }
+}
+
+/* N > 0 to expect a particular number of headers, negative if the
+ number of headers is variable */
+static gboolean
+xiphN_streamheader_to_codecdata (const GValue * streamheader,
+ GstMatroskaTrackContext * context, GstBuffer ** p_buf0, int N)
+{
+ GstBuffer **buf = NULL;
+ GArray *bufarr;
+ guint8 *priv_data;
+ guint bufi, i, offset, priv_data_size;
+
+ if (streamheader == NULL)
+ goto no_stream_headers;
+
+ if (G_VALUE_TYPE (streamheader) != GST_TYPE_ARRAY)
+ goto wrong_type;
+
+ bufarr = g_value_peek_pointer (streamheader);
+ if (bufarr->len <= 0 || bufarr->len > 255) /* at least one header, and count stored in a byte */
+ goto wrong_count;
+ if (N > 0 && bufarr->len != N)
+ goto wrong_count;
+
+ context->xiph_headers_to_skip = bufarr->len;
+
+ buf = (GstBuffer **) g_malloc0 (sizeof (GstBuffer *) * bufarr->len);
+ for (i = 0; i < bufarr->len; i++) {
+ GValue *bufval = &g_array_index (bufarr, GValue, i);
+
+ if (G_VALUE_TYPE (bufval) != GST_TYPE_BUFFER) {
+ g_free (buf);
+ goto wrong_content_type;
+ }
+
+ buf[i] = g_value_peek_pointer (bufval);
+ }
+
+ priv_data_size = 1;
+ if (bufarr->len > 0) {
+ for (i = 0; i < bufarr->len - 1; i++) {
+ priv_data_size += gst_buffer_get_size (buf[i]) / 0xff + 1;
+ }
+ }
+
+ for (i = 0; i < bufarr->len; ++i) {
+ priv_data_size += gst_buffer_get_size (buf[i]);
+ }
+
+ priv_data = g_malloc0 (priv_data_size);
+
+ priv_data[0] = bufarr->len - 1;
+ offset = 1;
+
+ if (bufarr->len > 0) {
+ for (bufi = 0; bufi < bufarr->len - 1; bufi++) {
+ for (i = 0; i < gst_buffer_get_size (buf[bufi]) / 0xff; ++i) {
+ priv_data[offset++] = 0xff;
+ }
+ priv_data[offset++] = gst_buffer_get_size (buf[bufi]) % 0xff;
+ }
+ }
+
+ for (i = 0; i < bufarr->len; ++i) {
+ gst_buffer_extract (buf[i], 0, priv_data + offset, -1);
+ offset += gst_buffer_get_size (buf[i]);
+ }
+
+ gst_matroska_mux_free_codec_priv (context);
+ context->codec_priv = priv_data;
+ context->codec_priv_size = priv_data_size;
+
+ if (p_buf0)
+ *p_buf0 = gst_buffer_ref (buf[0]);
+
+ g_free (buf);
+
+ return TRUE;
+
+/* ERRORS */
+no_stream_headers:
+ {
+ GST_WARNING ("required streamheaders missing in sink caps!");
+ return FALSE;
+ }
+wrong_type:
+ {
+ GST_WARNING ("streamheaders are not a GST_TYPE_ARRAY, but a %s",
+ G_VALUE_TYPE_NAME (streamheader));
+ return FALSE;
+ }
+wrong_count:
+ {
+ GST_WARNING ("got %u streamheaders, not %d as expected", bufarr->len, N);
+ return FALSE;
+ }
+wrong_content_type:
+ {
+ GST_WARNING ("streamheaders array does not contain GstBuffers");
+ return FALSE;
+ }
+}
+
+static gboolean
+vorbis_streamheader_to_codecdata (const GValue * streamheader,
+ GstMatroskaTrackContext * context)
+{
+ GstBuffer *buf0 = NULL;
+
+ if (!xiphN_streamheader_to_codecdata (streamheader, context, &buf0, 3))
+ return FALSE;
+
+ if (buf0 == NULL || gst_buffer_get_size (buf0) < 1 + 6 + 4) {
+ GST_WARNING ("First vorbis header too small, ignoring");
+ } else {
+ if (gst_buffer_memcmp (buf0, 1, "vorbis", 6) == 0) {
+ GstMatroskaTrackAudioContext *audiocontext;
+ GstMapInfo map;
+ guint8 *hdr;
+
+ gst_buffer_map (buf0, &map, GST_MAP_READ);
+ hdr = map.data + 1 + 6 + 4;
+ audiocontext = (GstMatroskaTrackAudioContext *) context;
+ audiocontext->channels = GST_READ_UINT8 (hdr);
+ audiocontext->samplerate = GST_READ_UINT32_LE (hdr + 1);
+ gst_buffer_unmap (buf0, &map);
+ }
+ }
+
+ if (buf0)
+ gst_buffer_unref (buf0);
+
+ return TRUE;
+}
+
+static gboolean
+theora_streamheader_to_codecdata (const GValue * streamheader,
+ GstMatroskaTrackContext * context)
+{
+ GstBuffer *buf0 = NULL;
+
+ if (!xiphN_streamheader_to_codecdata (streamheader, context, &buf0, 3))
+ return FALSE;
+
+ if (buf0 == NULL || gst_buffer_get_size (buf0) < 1 + 6 + 26) {
+ GST_WARNING ("First theora header too small, ignoring");
+ } else if (gst_buffer_memcmp (buf0, 0, "\200theora\003\002", 9) != 0) {
+ GST_WARNING ("First header not a theora identification header, ignoring");
+ } else {
+ GstMatroskaTrackVideoContext *videocontext;
+ guint fps_num, fps_denom, par_num, par_denom;
+ GstMapInfo map;
+ guint8 *hdr;
+
+ gst_buffer_map (buf0, &map, GST_MAP_READ);
+ hdr = map.data + 1 + 6 + 3 + 2 + 2;
+
+ videocontext = (GstMatroskaTrackVideoContext *) context;
+ videocontext->pixel_width = GST_READ_UINT32_BE (hdr) >> 8;
+ videocontext->pixel_height = GST_READ_UINT32_BE (hdr + 3) >> 8;
+ hdr += 3 + 3 + 1 + 1;
+ fps_num = GST_READ_UINT32_BE (hdr);
+ fps_denom = GST_READ_UINT32_BE (hdr + 4);
+ context->default_duration = gst_util_uint64_scale_int (GST_SECOND,
+ fps_denom, fps_num);
+ hdr += 4 + 4;
+ par_num = GST_READ_UINT32_BE (hdr) >> 8;
+ par_denom = GST_READ_UINT32_BE (hdr + 3) >> 8;
+ if (par_num > 0 && par_denom > 0) {
+ if (par_num > par_denom) {
+ videocontext->display_width =
+ videocontext->pixel_width * par_num / par_denom;
+ videocontext->display_height = videocontext->pixel_height;
+ } else if (par_num < par_denom) {
+ videocontext->display_width = videocontext->pixel_width;
+ videocontext->display_height =
+ videocontext->pixel_height * par_denom / par_num;
+ } else {
+ videocontext->display_width = 0;
+ videocontext->display_height = 0;
+ }
+ } else {
+ videocontext->display_width = 0;
+ videocontext->display_height = 0;
+ }
+
+ gst_buffer_unmap (buf0, &map);
+ }
+
+ if (buf0)
+ gst_buffer_unref (buf0);
+
+ return TRUE;
+}
+
+static gboolean
+kate_streamheader_to_codecdata (const GValue * streamheader,
+ GstMatroskaTrackContext * context)
+{
+ GstBuffer *buf0 = NULL;
+
+ if (!xiphN_streamheader_to_codecdata (streamheader, context, &buf0, -1))
+ return FALSE;
+
+ if (buf0 == NULL || gst_buffer_get_size (buf0) < 64) { /* Kate ID header is 64 bytes */
+ GST_WARNING ("First kate header too small, ignoring");
+ } else if (gst_buffer_memcmp (buf0, 0, "\200kate\0\0\0", 8) != 0) {
+ GST_WARNING ("First header not a kate identification header, ignoring");
+ }
+
+ if (buf0)
+ gst_buffer_unref (buf0);
+
+ return TRUE;
+}
+
+static gboolean
+flac_streamheader_to_codecdata (const GValue * streamheader,
+ GstMatroskaTrackContext * context)
+{
+ GArray *bufarr;
+ gint i;
+ GValue *bufval;
+ GstBuffer *buffer;
+
+ if (streamheader == NULL || G_VALUE_TYPE (streamheader) != GST_TYPE_ARRAY) {
+ GST_WARNING ("No or invalid streamheader field in the caps");
+ return FALSE;
+ }
+
+ bufarr = g_value_peek_pointer (streamheader);
+ if (bufarr->len < 2) {
+ GST_WARNING ("Too few headers in streamheader field");
+ return FALSE;
+ }
+
+ context->xiph_headers_to_skip = bufarr->len + 1;
+
+ bufval = &g_array_index (bufarr, GValue, 0);
+ if (G_VALUE_TYPE (bufval) != GST_TYPE_BUFFER) {
+ GST_WARNING ("streamheaders array does not contain GstBuffers");
+ return FALSE;
+ }
+
+ buffer = g_value_peek_pointer (bufval);
+
+ /* Need at least OggFLAC mapping header, fLaC marker and STREAMINFO block */
+ if (gst_buffer_get_size (buffer) < 9 + 4 + 4 + 34
+ || gst_buffer_memcmp (buffer, 1, "FLAC", 4) != 0
+ || gst_buffer_memcmp (buffer, 9, "fLaC", 4) != 0) {
+ GST_WARNING ("Invalid streamheader for FLAC");
+ return FALSE;
+ }
+
+ gst_matroska_mux_free_codec_priv (context);
+ context->codec_priv_size = gst_buffer_get_size (buffer) - 9;
+ context->codec_priv = g_malloc (context->codec_priv_size);
+ gst_buffer_extract (buffer, 9, context->codec_priv, -1);
+
+ for (i = 1; i < bufarr->len; i++) {
+ guint old_size;
+ bufval = &g_array_index (bufarr, GValue, i);
+
+ if (G_VALUE_TYPE (bufval) != GST_TYPE_BUFFER) {
+ gst_matroska_mux_free_codec_priv (context);
+ GST_WARNING ("streamheaders array does not contain GstBuffers");
+ return FALSE;
+ }
+
+ buffer = g_value_peek_pointer (bufval);
+
+ old_size = context->codec_priv_size;
+ context->codec_priv_size += gst_buffer_get_size (buffer);
+
+ context->codec_priv = g_realloc (context->codec_priv,
+ context->codec_priv_size);
+ gst_buffer_extract (buffer, 0,
+ (guint8 *) context->codec_priv + old_size, -1);
+ }
+
+ return TRUE;
+}
+
+static gboolean
+speex_streamheader_to_codecdata (const GValue * streamheader,
+ GstMatroskaTrackContext * context)
+{
+ GArray *bufarr;
+ GValue *bufval;
+ GstBuffer *buffer;
+ guint old_size;
+
+ if (streamheader == NULL || G_VALUE_TYPE (streamheader) != GST_TYPE_ARRAY) {
+ GST_WARNING ("No or invalid streamheader field in the caps");
+ return FALSE;
+ }
+
+ bufarr = g_value_peek_pointer (streamheader);
+ if (bufarr->len != 2) {
+ GST_WARNING ("Too few headers in streamheader field");
+ return FALSE;
+ }
+
+ context->xiph_headers_to_skip = bufarr->len + 1;
+
+ bufval = &g_array_index (bufarr, GValue, 0);
+ if (G_VALUE_TYPE (bufval) != GST_TYPE_BUFFER) {
+ GST_WARNING ("streamheaders array does not contain GstBuffers");
+ return FALSE;
+ }
+
+ buffer = g_value_peek_pointer (bufval);
+
+ if (gst_buffer_get_size (buffer) < 80
+ || gst_buffer_memcmp (buffer, 0, "Speex ", 8) != 0) {
+ GST_WARNING ("Invalid streamheader for Speex");
+ return FALSE;
+ }
+
+ gst_matroska_mux_free_codec_priv (context);
+ context->codec_priv_size = gst_buffer_get_size (buffer);
+ context->codec_priv = g_malloc (context->codec_priv_size);
+ gst_buffer_extract (buffer, 0, context->codec_priv, -1);
+
+ bufval = &g_array_index (bufarr, GValue, 1);
+
+ if (G_VALUE_TYPE (bufval) != GST_TYPE_BUFFER) {
+ gst_matroska_mux_free_codec_priv (context);
+ GST_WARNING ("streamheaders array does not contain GstBuffers");
+ return FALSE;
+ }
+
+ buffer = g_value_peek_pointer (bufval);
+
+ old_size = context->codec_priv_size;
+ context->codec_priv_size += gst_buffer_get_size (buffer);
+ context->codec_priv = g_realloc (context->codec_priv,
+ context->codec_priv_size);
+ gst_buffer_extract (buffer, 0, (guint8 *) context->codec_priv + old_size, -1);
+
+ return TRUE;
+}
+
+static gboolean
+opus_streamheader_to_codecdata (const GValue * streamheader,
+ GstMatroskaTrackContext * context)
+{
+ GArray *bufarr;
+ GValue *bufval;
+ GstBuffer *buf;
+
+ if (G_VALUE_TYPE (streamheader) != GST_TYPE_ARRAY)
+ goto wrong_type;
+
+ bufarr = g_value_peek_pointer (streamheader);
+ if (bufarr->len != 1 && bufarr->len != 2) /* one header, and count stored in a byte */
+ goto wrong_count;
+
+ /* Opus headers are not in-band */
+ context->xiph_headers_to_skip = 0;
+
+ bufval = &g_array_index (bufarr, GValue, 0);
+ if (G_VALUE_TYPE (bufval) != GST_TYPE_BUFFER) {
+ goto wrong_content_type;
+ }
+ buf = g_value_peek_pointer (bufval);
+
+ gst_matroska_mux_free_codec_priv (context);
+
+ context->codec_priv_size = gst_buffer_get_size (buf);
+ context->codec_priv = g_malloc0 (context->codec_priv_size);
+ gst_buffer_extract (buf, 0, context->codec_priv, -1);
+
+ context->codec_delay =
+ GST_READ_UINT16_LE ((guint8 *) context->codec_priv + 10);
+ context->codec_delay =
+ gst_util_uint64_scale_round (context->codec_delay, GST_SECOND, 48000);
+ context->seek_preroll = 80 * GST_MSECOND;
+
+ return TRUE;
+
+/* ERRORS */
+wrong_type:
+ {
+ GST_WARNING ("streamheaders are not a GST_TYPE_ARRAY, but a %s",
+ G_VALUE_TYPE_NAME (streamheader));
+ return FALSE;
+ }
+wrong_count:
+ {
+ GST_WARNING ("got %u streamheaders, not 1 or 2 as expected", bufarr->len);
+ return FALSE;
+ }
+wrong_content_type:
+ {
+ GST_WARNING ("streamheaders array does not contain GstBuffers");
+ return FALSE;
+ }
+}
+
+static gboolean
+opus_make_codecdata (GstMatroskaTrackContext * context, GstCaps * caps)
+{
+ guint32 rate;
+ guint8 channels;
+ guint8 channel_mapping_family;
+ guint8 stream_count, coupled_count, channel_mapping[256];
+ GstBuffer *buffer;
+ GstMapInfo map;
+
+ /* Opus headers are not in-band */
+ context->xiph_headers_to_skip = 0;
+
+ context->codec_delay = 0;
+ context->seek_preroll = 80 * GST_MSECOND;
+
+ if (!gst_codec_utils_opus_parse_caps (caps, &rate, &channels,
+ &channel_mapping_family, &stream_count, &coupled_count,
+ channel_mapping)) {
+ GST_WARNING ("Failed to parse caps for Opus");
+ return FALSE;
+ }
+
+ buffer =
+ gst_codec_utils_opus_create_header (rate, channels,
+ channel_mapping_family, stream_count, coupled_count, channel_mapping, 0,
+ 0);
+ if (!buffer) {
+ GST_WARNING ("Failed to create Opus header from caps");
+ return FALSE;
+ }
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ context->codec_priv_size = map.size;
+ context->codec_priv = g_malloc (context->codec_priv_size);
+ memcpy (context->codec_priv, map.data, map.size);
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+
+ return TRUE;
+}
+
+/**
+ * gst_matroska_mux_audio_pad_setcaps:
+ * @pad: Pad which got the caps.
+ * @caps: New caps.
+ *
+ * Setcaps function for audio sink pad.
+ *
+ * Returns: %TRUE on success.
+ */
+static gboolean
+gst_matroska_mux_audio_pad_setcaps (GstPad * pad, GstCaps * caps)
+{
+ GstMatroskaTrackContext *context = NULL;
+ GstMatroskaTrackAudioContext *audiocontext;
+ GstMatroskaMux *mux;
+ GstMatroskaPad *collect_pad;
+ const gchar *mimetype;
+ gint samplerate = 0, channels = 0;
+ GstStructure *structure;
+ const GValue *codec_data = NULL;
+ GstBuffer *buf = NULL;
+ const gchar *stream_format = NULL;
+ GstCaps *old_caps;
+
+ mux = GST_MATROSKA_MUX (GST_PAD_PARENT (pad));
+
+ if ((old_caps = gst_pad_get_current_caps (pad))) {
+ if (mux->state >= GST_MATROSKA_MUX_STATE_HEADER
+ && !gst_caps_is_equal (caps, old_caps)) {
+ GST_ELEMENT_ERROR (mux, STREAM, MUX, (NULL),
+ ("Caps changes are not supported by Matroska"));
+ gst_caps_unref (old_caps);
+ goto refuse_caps;
+ }
+ gst_caps_unref (old_caps);
+ } else if (mux->state >= GST_MATROSKA_MUX_STATE_HEADER) {
+ GST_ELEMENT_ERROR (mux, STREAM, MUX, (NULL),
+ ("Caps on pad %" GST_PTR_FORMAT
+ " arrived late. Headers were already written", pad));
+ goto refuse_caps;
+ }
+
+ /* find context */
+ collect_pad = (GstMatroskaPad *) gst_pad_get_element_private (pad);
+ g_assert (collect_pad);
+ context = collect_pad->track;
+ g_assert (context);
+ g_assert (context->type == GST_MATROSKA_TRACK_TYPE_AUDIO);
+ audiocontext = (GstMatroskaTrackAudioContext *) context;
+
+ structure = gst_caps_get_structure (caps, 0);
+ mimetype = gst_structure_get_name (structure);
+
+ /* general setup */
+ gst_structure_get_int (structure, "rate", &samplerate);
+ gst_structure_get_int (structure, "channels", &channels);
+
+ audiocontext->samplerate = samplerate;
+ audiocontext->channels = channels;
+ audiocontext->bitdepth = 0;
+ context->default_duration = 0;
+
+ codec_data = gst_structure_get_value (structure, "codec_data");
+ if (codec_data)
+ buf = gst_value_get_buffer (codec_data);
+
+ /* TODO: - check if we handle all codecs by the spec, i.e. codec private
+ * data and other settings
+ * - add new formats
+ */
+
+ if (!strcmp (mimetype, "audio/mpeg")) {
+ gint mpegversion = 0;
+
+ gst_structure_get_int (structure, "mpegversion", &mpegversion);
+ switch (mpegversion) {
+ case 1:{
+ gint layer;
+ gint version = 1;
+ gint spf;
+
+ gst_structure_get_int (structure, "layer", &layer);
+
+ if (!gst_structure_get_int (structure, "mpegaudioversion", &version)) {
+ GST_WARNING_OBJECT (mux,
+ "Unable to determine MPEG audio version, assuming 1");
+ version = 1;
+ }
+
+ if (layer == 1)
+ spf = 384;
+ else if (layer == 2)
+ spf = 1152;
+ else if (version == 2)
+ spf = 576;
+ else
+ spf = 1152;
+
+ context->default_duration =
+ gst_util_uint64_scale (GST_SECOND, spf, audiocontext->samplerate);
+
+ switch (layer) {
+ case 1:
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_AUDIO_MPEG1_L1);
+ break;
+ case 2:
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_AUDIO_MPEG1_L2);
+ break;
+ case 3:
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_AUDIO_MPEG1_L3);
+ break;
+ default:
+ goto refuse_caps;
+ }
+ break;
+ }
+ case 2:
+ case 4:
+ stream_format = gst_structure_get_string (structure, "stream-format");
+ /* check this is raw aac */
+ if (stream_format) {
+ if (strcmp (stream_format, "raw") != 0) {
+ GST_WARNING_OBJECT (mux, "AAC stream-format must be 'raw', not %s",
+ stream_format);
+ }
+ } else {
+ GST_WARNING_OBJECT (mux, "AAC stream-format not specified, "
+ "assuming 'raw'");
+ }
+
+ if (buf) {
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_AUDIO_AAC);
+ context->codec_priv_size = gst_buffer_get_size (buf);
+ context->codec_priv = g_malloc (context->codec_priv_size);
+ gst_buffer_extract (buf, 0, context->codec_priv,
+ context->codec_priv_size);
+ } else {
+ GST_DEBUG_OBJECT (mux, "no AAC codec_data; not packetized");
+ goto refuse_caps;
+ }
+ break;
+ default:
+ goto refuse_caps;
+ }
+ } else if (!strcmp (mimetype, "audio/x-raw")) {
+ GstAudioInfo info;
+
+ gst_audio_info_init (&info);
+ if (!gst_audio_info_from_caps (&info, caps)) {
+ GST_DEBUG_OBJECT (mux,
+ "broken caps, rejected by gst_audio_info_from_caps");
+ goto refuse_caps;
+ }
+
+ switch (GST_AUDIO_INFO_FORMAT (&info)) {
+ case GST_AUDIO_FORMAT_U8:
+ case GST_AUDIO_FORMAT_S16BE:
+ case GST_AUDIO_FORMAT_S16LE:
+ case GST_AUDIO_FORMAT_S24BE:
+ case GST_AUDIO_FORMAT_S24LE:
+ case GST_AUDIO_FORMAT_S32BE:
+ case GST_AUDIO_FORMAT_S32LE:
+ if (GST_AUDIO_INFO_WIDTH (&info) != GST_AUDIO_INFO_DEPTH (&info)) {
+ GST_DEBUG_OBJECT (mux, "width must be same as depth!");
+ goto refuse_caps;
+ }
+ if (GST_AUDIO_INFO_IS_BIG_ENDIAN (&info))
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_AUDIO_PCM_INT_BE);
+ else
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_AUDIO_PCM_INT_LE);
+ break;
+ case GST_AUDIO_FORMAT_F32LE:
+ case GST_AUDIO_FORMAT_F64LE:
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_AUDIO_PCM_FLOAT);
+ break;
+
+ default:
+ GST_DEBUG_OBJECT (mux, "wrong format in raw audio caps");
+ goto refuse_caps;
+ }
+
+ audiocontext->bitdepth = GST_AUDIO_INFO_WIDTH (&info);
+ } else if (!strcmp (mimetype, "audio/x-vorbis")) {
+ const GValue *streamheader;
+
+ gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_AUDIO_VORBIS);
+
+ gst_matroska_mux_free_codec_priv (context);
+
+ streamheader = gst_structure_get_value (structure, "streamheader");
+ if (!vorbis_streamheader_to_codecdata (streamheader, context)) {
+ GST_ELEMENT_ERROR (mux, STREAM, MUX, (NULL),
+ ("vorbis stream headers missing or malformed"));
+ goto refuse_caps;
+ }
+ } else if (!strcmp (mimetype, "audio/x-flac")) {
+ const GValue *streamheader;
+
+ gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_AUDIO_FLAC);
+
+ gst_matroska_mux_free_codec_priv (context);
+
+ streamheader = gst_structure_get_value (structure, "streamheader");
+ if (!flac_streamheader_to_codecdata (streamheader, context)) {
+ GST_ELEMENT_ERROR (mux, STREAM, MUX, (NULL),
+ ("flac stream headers missing or malformed"));
+ goto refuse_caps;
+ }
+ } else if (!strcmp (mimetype, "audio/x-speex")) {
+ const GValue *streamheader;
+
+ gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_AUDIO_SPEEX);
+ gst_matroska_mux_free_codec_priv (context);
+
+ streamheader = gst_structure_get_value (structure, "streamheader");
+ if (!speex_streamheader_to_codecdata (streamheader, context)) {
+ GST_ELEMENT_ERROR (mux, STREAM, MUX, (NULL),
+ ("speex stream headers missing or malformed"));
+ goto refuse_caps;
+ }
+ } else if (!strcmp (mimetype, "audio/x-opus")) {
+ const GValue *streamheader;
+
+ gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_AUDIO_OPUS);
+
+ streamheader = gst_structure_get_value (structure, "streamheader");
+ if (streamheader) {
+ gst_matroska_mux_free_codec_priv (context);
+ if (!opus_streamheader_to_codecdata (streamheader, context)) {
+ GST_ELEMENT_ERROR (mux, STREAM, MUX, (NULL),
+ ("opus stream headers missing or malformed"));
+ goto refuse_caps;
+ }
+ } else {
+ /* no streamheader, but we need to have one, so we make one up
+ based on caps */
+ gst_matroska_mux_free_codec_priv (context);
+ if (!opus_make_codecdata (context, caps)) {
+ GST_ELEMENT_ERROR (mux, STREAM, MUX, (NULL),
+ ("opus stream headers missing or malformed"));
+ goto refuse_caps;
+ }
+ }
+ } else if (!strcmp (mimetype, "audio/x-ac3")) {
+ gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_AUDIO_AC3);
+ } else if (!strcmp (mimetype, "audio/x-eac3")) {
+ gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_AUDIO_EAC3);
+ } else if (!strcmp (mimetype, "audio/x-dts")) {
+ gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_AUDIO_DTS);
+ } else if (!strcmp (mimetype, "audio/x-tta")) {
+ gint width;
+
+ /* TTA frame duration */
+ context->default_duration = 1.04489795918367346939 * GST_SECOND;
+
+ gst_structure_get_int (structure, "width", &width);
+ audiocontext->bitdepth = width;
+ gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_AUDIO_TTA);
+
+ } else if (!strcmp (mimetype, "audio/x-pn-realaudio")) {
+ gint raversion;
+ const GValue *mdpr_data;
+
+ gst_structure_get_int (structure, "raversion", &raversion);
+ switch (raversion) {
+ case 1:
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_AUDIO_REAL_14_4);
+ break;
+ case 2:
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_AUDIO_REAL_28_8);
+ break;
+ case 8:
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_AUDIO_REAL_COOK);
+ break;
+ default:
+ goto refuse_caps;
+ }
+
+ mdpr_data = gst_structure_get_value (structure, "mdpr_data");
+ if (mdpr_data != NULL) {
+ guint8 *priv_data = NULL;
+ guint priv_data_size = 0;
+
+ GstBuffer *codec_data_buf = g_value_peek_pointer (mdpr_data);
+
+ priv_data_size = gst_buffer_get_size (codec_data_buf);
+ priv_data = g_malloc0 (priv_data_size);
+
+ gst_buffer_extract (codec_data_buf, 0, priv_data, -1);
+
+ gst_matroska_mux_free_codec_priv (context);
+
+ context->codec_priv = priv_data;
+ context->codec_priv_size = priv_data_size;
+ }
+
+ } else if (!strcmp (mimetype, "audio/x-wma")
+ || !strcmp (mimetype, "audio/x-alaw")
+ || !strcmp (mimetype, "audio/x-mulaw")
+ || !strcmp (mimetype, "audio/x-adpcm")
+ || !strcmp (mimetype, "audio/G722")) {
+ guint8 *codec_priv;
+ guint codec_priv_size;
+ guint16 format = 0;
+ gint block_align = 0;
+ gint bitrate = 0;
+
+ if (samplerate == 0 || channels == 0) {
+ GST_WARNING_OBJECT (mux, "Missing channels/samplerate on caps");
+ goto refuse_caps;
+ }
+
+ if (!strcmp (mimetype, "audio/x-wma")) {
+ gint wmaversion;
+ gint depth;
+
+ if (!gst_structure_get_int (structure, "wmaversion", &wmaversion)
+ || !gst_structure_get_int (structure, "block_align", &block_align)
+ || !gst_structure_get_int (structure, "bitrate", &bitrate)) {
+ GST_WARNING_OBJECT (mux, "Missing wmaversion/block_align/bitrate"
+ " on WMA caps");
+ goto refuse_caps;
+ }
+
+ switch (wmaversion) {
+ case 1:
+ format = GST_RIFF_WAVE_FORMAT_WMAV1;
+ break;
+ case 2:
+ format = GST_RIFF_WAVE_FORMAT_WMAV2;
+ break;
+ case 3:
+ format = GST_RIFF_WAVE_FORMAT_WMAV3;
+ break;
+ default:
+ GST_WARNING_OBJECT (mux, "Unexpected WMA version: %d", wmaversion);
+ goto refuse_caps;
+ }
+
+ if (gst_structure_get_int (structure, "depth", &depth))
+ audiocontext->bitdepth = depth;
+ } else if (!strcmp (mimetype, "audio/x-alaw")
+ || !strcmp (mimetype, "audio/x-mulaw")) {
+ audiocontext->bitdepth = 8;
+ if (!strcmp (mimetype, "audio/x-alaw"))
+ format = GST_RIFF_WAVE_FORMAT_ALAW;
+ else
+ format = GST_RIFF_WAVE_FORMAT_MULAW;
+
+ block_align = channels;
+ bitrate = block_align * samplerate;
+ } else if (!strcmp (mimetype, "audio/x-adpcm")) {
+ const char *layout;
+
+ layout = gst_structure_get_string (structure, "layout");
+ if (!layout) {
+ GST_WARNING_OBJECT (mux, "Missing layout on adpcm caps");
+ goto refuse_caps;
+ }
+
+ if (!gst_structure_get_int (structure, "block_align", &block_align)) {
+ GST_WARNING_OBJECT (mux, "Missing block_align on adpcm caps");
+ goto refuse_caps;
+ }
+
+ if (!strcmp (layout, "dvi")) {
+ format = GST_RIFF_WAVE_FORMAT_DVI_ADPCM;
+ } else if (!strcmp (layout, "g726")) {
+ format = GST_RIFF_WAVE_FORMAT_ITU_G726_ADPCM;
+ if (!gst_structure_get_int (structure, "bitrate", &bitrate)) {
+ GST_WARNING_OBJECT (mux, "Missing bitrate on adpcm g726 caps");
+ goto refuse_caps;
+ }
+ } else {
+ GST_WARNING_OBJECT (mux, "Unknown layout on adpcm caps");
+ goto refuse_caps;
+ }
+
+ } else if (!strcmp (mimetype, "audio/G722")) {
+ format = GST_RIFF_WAVE_FORMAT_ADPCM_G722;
+ }
+ g_assert (format != 0);
+
+ codec_priv_size = WAVEFORMATEX_SIZE;
+ if (buf)
+ codec_priv_size += gst_buffer_get_size (buf);
+
+ /* serialize waveformatex structure */
+ codec_priv = g_malloc0 (codec_priv_size);
+ GST_WRITE_UINT16_LE (codec_priv, format);
+ GST_WRITE_UINT16_LE (codec_priv + 2, channels);
+ GST_WRITE_UINT32_LE (codec_priv + 4, samplerate);
+ GST_WRITE_UINT32_LE (codec_priv + 8, bitrate / 8);
+ GST_WRITE_UINT16_LE (codec_priv + 12, block_align);
+ GST_WRITE_UINT16_LE (codec_priv + 14, 0);
+ if (buf)
+ GST_WRITE_UINT16_LE (codec_priv + 16, gst_buffer_get_size (buf));
+ else
+ GST_WRITE_UINT16_LE (codec_priv + 16, 0);
+
+ /* process codec private/initialization data, if any */
+ if (buf) {
+ gst_buffer_extract (buf, 0,
+ (guint8 *) codec_priv + WAVEFORMATEX_SIZE, -1);
+ }
+
+ gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_AUDIO_ACM);
+ gst_matroska_mux_free_codec_priv (context);
+ context->codec_priv = (gpointer) codec_priv;
+ context->codec_priv_size = codec_priv_size;
+ }
+
+ return TRUE;
+
+ /* ERRORS */
+refuse_caps:
+ {
+ GST_WARNING_OBJECT (mux, "pad %s refused caps %" GST_PTR_FORMAT,
+ GST_PAD_NAME (pad), caps);
+ return FALSE;
+ }
+}
+
+/* we probably don't have the data at start,
+ * so have to reserve (a maximum) space to write this at the end.
+ * bit spacy, but some formats can hold quite some */
+#define SUBTITLE_MAX_CODEC_PRIVATE 2048 /* must be > 128 */
+
+/**
+ * gst_matroska_mux_subtitle_pad_setcaps:
+ * @pad: Pad which got the caps.
+ * @caps: New caps.
+ *
+ * Setcaps function for subtitle sink pad.
+ *
+ * Returns: %TRUE on success.
+ */
+static gboolean
+gst_matroska_mux_subtitle_pad_setcaps (GstPad * pad, GstCaps * caps)
+{
+ /* There is now (at least) one such alement (kateenc), and I'm going
+ to handle it here and claim it works when it can be piped back
+ through GStreamer and VLC */
+
+ GstMatroskaTrackContext *context = NULL;
+ GstMatroskaTrackSubtitleContext *scontext;
+ GstMatroskaMux *mux;
+ GstMatroskaPad *collect_pad;
+ GstCollectData *data;
+ const gchar *mimetype;
+ GstStructure *structure;
+ const GValue *value = NULL;
+ GstBuffer *buf = NULL;
+ gboolean ret = TRUE;
+ GstCaps *old_caps;
+
+ mux = GST_MATROSKA_MUX (GST_PAD_PARENT (pad));
+
+ if ((old_caps = gst_pad_get_current_caps (pad))) {
+ if (mux->state >= GST_MATROSKA_MUX_STATE_HEADER
+ && !gst_caps_is_equal (caps, old_caps)) {
+ GST_ELEMENT_ERROR (mux, STREAM, MUX, (NULL),
+ ("Caps changes are not supported by Matroska"));
+ gst_caps_unref (old_caps);
+ goto refuse_caps;
+ }
+ gst_caps_unref (old_caps);
+ } else if (mux->state >= GST_MATROSKA_MUX_STATE_HEADER) {
+ GST_ELEMENT_ERROR (mux, STREAM, MUX, (NULL),
+ ("Caps on pad %" GST_PTR_FORMAT
+ " arrived late. Headers were already written", pad));
+ goto refuse_caps;
+ }
+
+ /* find context */
+ collect_pad = (GstMatroskaPad *) gst_pad_get_element_private (pad);
+ g_assert (collect_pad);
+ data = (GstCollectData *) (collect_pad);
+
+ context = collect_pad->track;
+ g_assert (context);
+ g_assert (context->type == GST_MATROSKA_TRACK_TYPE_SUBTITLE);
+ scontext = (GstMatroskaTrackSubtitleContext *) context;
+
+ structure = gst_caps_get_structure (caps, 0);
+ mimetype = gst_structure_get_name (structure);
+
+ /* general setup */
+ scontext->check_utf8 = 1;
+ scontext->invalid_utf8 = 0;
+ context->default_duration = 0;
+
+ if (!strcmp (mimetype, "subtitle/x-kate")) {
+ const GValue *streamheader;
+
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_SUBTITLE_KATE);
+
+ gst_matroska_mux_free_codec_priv (context);
+
+ streamheader = gst_structure_get_value (structure, "streamheader");
+ if (!kate_streamheader_to_codecdata (streamheader, context)) {
+ GST_ELEMENT_ERROR (mux, STREAM, MUX, (NULL),
+ ("kate stream headers missing or malformed"));
+ ret = FALSE;
+ goto exit;
+ }
+ } else if (!strcmp (mimetype, "text/x-raw")) {
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_SUBTITLE_UTF8);
+ } else if (!strcmp (mimetype, "application/x-ssa")) {
+ gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_SUBTITLE_SSA);
+ } else if (!strcmp (mimetype, "application/x-ass")) {
+ gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_SUBTITLE_ASS);
+ } else if (!strcmp (mimetype, "application/x-usf")) {
+ gst_matroska_mux_set_codec_id (context, GST_MATROSKA_CODEC_ID_SUBTITLE_USF);
+ } else if (!strcmp (mimetype, "subpicture/x-dvd")) {
+ gst_matroska_mux_set_codec_id (context,
+ GST_MATROSKA_CODEC_ID_SUBTITLE_VOBSUB);
+ } else {
+ ret = FALSE;
+ goto exit;
+ }
+
+ /* maybe some private data, e.g. vobsub */
+ value = gst_structure_get_value (structure, "codec_data");
+ if (value)
+ buf = gst_value_get_buffer (value);
+ if (buf != NULL) {
+ GstMapInfo map;
+ guint8 *priv_data = NULL;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ if (map.size > SUBTITLE_MAX_CODEC_PRIVATE) {
+ GST_WARNING_OBJECT (mux, "pad %" GST_PTR_FORMAT " subtitle private data"
+ " exceeded maximum (%d); discarding", pad,
+ SUBTITLE_MAX_CODEC_PRIVATE);
+ gst_buffer_unmap (buf, &map);
+ return TRUE;
+ }
+
+ gst_matroska_mux_free_codec_priv (context);
+
+ priv_data = g_malloc0 (map.size);
+ memcpy (priv_data, map.data, map.size);
+ context->codec_priv = priv_data;
+ context->codec_priv_size = map.size;
+ gst_buffer_unmap (buf, &map);
+ }
+
+ GST_DEBUG_OBJECT (pad, "codec_id %s, codec data size %" G_GSIZE_FORMAT,
+ GST_STR_NULL (context->codec_id), context->codec_priv_size);
+
+ /* This pad is sparse. Now that we have caps on it, we can tell collectpads
+ * not to actually wait for data when muxing */
+ GST_COLLECT_PADS_STREAM_LOCK (mux->collect);
+ GST_COLLECT_PADS_STATE_UNSET (data, GST_COLLECT_PADS_STATE_LOCKED);
+ gst_collect_pads_set_waiting (mux->collect, data, FALSE);
+ GST_COLLECT_PADS_STREAM_UNLOCK (mux->collect);
+
+exit:
+
+ return ret;
+
+ /* ERRORS */
+refuse_caps:
+ {
+ GST_WARNING_OBJECT (mux, "pad %s refused caps %" GST_PTR_FORMAT,
+ GST_PAD_NAME (pad), caps);
+ return FALSE;
+ }
+}
+
+
+/**
+ * gst_matroska_mux_request_new_pad:
+ * @element: #GstMatroskaMux.
+ * @templ: #GstPadTemplate.
+ * @pad_name: New pad name.
+ *
+ * Request pad function for sink templates.
+ *
+ * Returns: New #GstPad.
+ */
+static GstPad *
+gst_matroska_mux_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
+{
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (element);
+ GstMatroskaMux *mux = GST_MATROSKA_MUX (element);
+ GstMatroskaPad *collect_pad;
+ GstMatroskamuxPad *newpad;
+ gchar *name = NULL;
+ const gchar *pad_name = NULL;
+ GstMatroskaCapsFunc capsfunc = NULL;
+ GstMatroskaTrackContext *context = NULL;
+ gint pad_id;
+ const gchar *id = NULL;
+
+ if (templ == gst_element_class_get_pad_template (klass, "audio_%u")) {
+ /* don't mix named and unnamed pads, if the pad already exists we fail when
+ * trying to add it */
+ if (req_name != NULL && sscanf (req_name, "audio_%u", &pad_id) == 1) {
+ pad_name = req_name;
+ } else {
+ name = g_strdup_printf ("audio_%u", mux->num_a_streams++);
+ pad_name = name;
+ }
+ capsfunc = GST_DEBUG_FUNCPTR (gst_matroska_mux_audio_pad_setcaps);
+ context = (GstMatroskaTrackContext *)
+ g_new0 (GstMatroskaTrackAudioContext, 1);
+ context->type = GST_MATROSKA_TRACK_TYPE_AUDIO;
+ context->name = g_strdup ("Audio");
+ } else if (templ == gst_element_class_get_pad_template (klass, "video_%u")) {
+ /* don't mix named and unnamed pads, if the pad already exists we fail when
+ * trying to add it */
+ if (req_name != NULL && sscanf (req_name, "video_%u", &pad_id) == 1) {
+ pad_name = req_name;
+ } else {
+ name = g_strdup_printf ("video_%u", mux->num_v_streams++);
+ pad_name = name;
+ }
+ capsfunc = GST_DEBUG_FUNCPTR (gst_matroska_mux_video_pad_setcaps);
+ context = (GstMatroskaTrackContext *)
+ g_new0 (GstMatroskaTrackVideoContext, 1);
+ context->type = GST_MATROSKA_TRACK_TYPE_VIDEO;
+ context->name = g_strdup ("Video");
+ } else if (templ == gst_element_class_get_pad_template (klass, "subtitle_%u")) {
+ /* don't mix named and unnamed pads, if the pad already exists we fail when
+ * trying to add it */
+ if (req_name != NULL && sscanf (req_name, "subtitle_%u", &pad_id) == 1) {
+ pad_name = req_name;
+ } else {
+ name = g_strdup_printf ("subtitle_%u", mux->num_t_streams++);
+ pad_name = name;
+ }
+ capsfunc = GST_DEBUG_FUNCPTR (gst_matroska_mux_subtitle_pad_setcaps);
+ context = (GstMatroskaTrackContext *)
+ g_new0 (GstMatroskaTrackSubtitleContext, 1);
+ context->type = GST_MATROSKA_TRACK_TYPE_SUBTITLE;
+ context->name = g_strdup ("Subtitle");
+ /* setcaps may only provide proper one a lot later */
+ id = "S_SUB_UNKNOWN";
+ } else {
+ GST_WARNING_OBJECT (mux, "This is not our template!");
+ return NULL;
+ }
+
+ newpad = g_object_new (GST_TYPE_MATROSKAMUX_PAD,
+ "name", pad_name, "direction", templ->direction, "template", templ, NULL);
+
+ gst_matroskamux_pad_init (newpad);
+ collect_pad = (GstMatroskaPad *)
+ gst_collect_pads_add_pad (mux->collect, GST_PAD (newpad),
+ sizeof (GstMatroskaPad),
+ (GstCollectDataDestroyNotify) gst_matroska_pad_free, TRUE);
+
+ collect_pad->mux = mux;
+ collect_pad->track = context;
+ gst_matroska_pad_reset (collect_pad, FALSE);
+ if (id)
+ gst_matroska_mux_set_codec_id (collect_pad->track, id);
+ collect_pad->track->dts_only = FALSE;
+
+ collect_pad->capsfunc = capsfunc;
+ gst_pad_set_active (GST_PAD (newpad), TRUE);
+ if (!gst_element_add_pad (element, GST_PAD (newpad)))
+ goto pad_add_failed;
+
+ g_free (name);
+
+ mux->num_streams++;
+
+ GST_DEBUG_OBJECT (newpad, "Added new request pad");
+
+ return GST_PAD (newpad);
+
+ /* ERROR cases */
+pad_add_failed:
+ {
+ GST_WARNING_OBJECT (mux, "Adding the new pad '%s' failed", pad_name);
+ g_free (name);
+ gst_object_unref (newpad);
+ return NULL;
+ }
+}
+
+/**
+ * gst_matroska_mux_release_pad:
+ * @element: #GstMatroskaMux.
+ * @pad: Pad to release.
+ *
+ * Release a previously requested pad.
+*/
+static void
+gst_matroska_mux_release_pad (GstElement * element, GstPad * pad)
+{
+ GstMatroskaMux *mux;
+ GSList *walk;
+
+ mux = GST_MATROSKA_MUX (GST_PAD_PARENT (pad));
+
+ for (walk = mux->collect->data; walk; walk = g_slist_next (walk)) {
+ GstCollectData *cdata = (GstCollectData *) walk->data;
+ GstMatroskaPad *collect_pad = (GstMatroskaPad *) cdata;
+
+ if (cdata->pad == pad) {
+ /*
+ * observed duration, this will remain GST_CLOCK_TIME_NONE
+ * only if the pad is reset
+ */
+ GstClockTime collected_duration = GST_CLOCK_TIME_NONE;
+
+ if (GST_CLOCK_TIME_IS_VALID (collect_pad->start_ts) &&
+ GST_CLOCK_TIME_IS_VALID (collect_pad->end_ts)) {
+ collected_duration =
+ GST_CLOCK_DIFF (collect_pad->start_ts, collect_pad->end_ts);
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (collected_duration)
+ && mux->duration < collected_duration)
+ mux->duration = collected_duration;
+
+ break;
+ }
+ }
+
+ gst_collect_pads_remove_pad (mux->collect, pad);
+ if (gst_element_remove_pad (element, pad))
+ mux->num_streams--;
+}
+
+static void
+gst_matroska_mux_write_mastering_metadata (GstMatroskaMux * mux,
+ GstMatroskaTrackVideoContext * videocontext)
+{
+ GstEbmlWrite *ebml = mux->ebml_write;
+ guint64 master;
+ GstVideoMasteringDisplayInfo *minfo = &videocontext->mastering_display_info;
+ gdouble value;
+ const gdouble chroma_scale = 50000;
+ const gdouble luma_scale = 50000;
+
+ if (!videocontext->mastering_display_info_present)
+ return;
+
+ master =
+ gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_MASTERINGMETADATA);
+
+ value = (gdouble) minfo->display_primaries[0].x / chroma_scale;
+ gst_ebml_write_float (ebml, GST_MATROSKA_ID_PRIMARYRCHROMATICITYX, value);
+
+ value = (gdouble) minfo->display_primaries[0].y / chroma_scale;
+ gst_ebml_write_float (ebml, GST_MATROSKA_ID_PRIMARYRCHROMATICITYY, value);
+
+ value = (gdouble) minfo->display_primaries[1].x / chroma_scale;
+ gst_ebml_write_float (ebml, GST_MATROSKA_ID_PRIMARYGCHROMATICITYX, value);
+
+ value = (gdouble) minfo->display_primaries[1].y / chroma_scale;
+ gst_ebml_write_float (ebml, GST_MATROSKA_ID_PRIMARYGCHROMATICITYY, value);
+
+ value = (gdouble) minfo->display_primaries[2].x / chroma_scale;
+ gst_ebml_write_float (ebml, GST_MATROSKA_ID_PRIMARYBCHROMATICITYX, value);
+
+ value = (gdouble) minfo->display_primaries[2].y / chroma_scale;
+ gst_ebml_write_float (ebml, GST_MATROSKA_ID_PRIMARYBCHROMATICITYY, value);
+
+ value = (gdouble) minfo->white_point.x / chroma_scale;
+ gst_ebml_write_float (ebml, GST_MATROSKA_ID_WHITEPOINTCHROMATICITYX, value);
+
+ value = (gdouble) minfo->white_point.y / chroma_scale;
+ gst_ebml_write_float (ebml, GST_MATROSKA_ID_WHITEPOINTCHROMATICITYY, value);
+
+ value = (gdouble) minfo->max_display_mastering_luminance / luma_scale;
+ gst_ebml_write_float (ebml, GST_MATROSKA_ID_LUMINANCEMAX, value);
+
+ value = (gdouble) minfo->min_display_mastering_luminance / luma_scale;
+ gst_ebml_write_float (ebml, GST_MATROSKA_ID_LUMINANCEMIN, value);
+
+ gst_ebml_write_master_finish (ebml, master);
+ return;
+}
+
+static void
+gst_matroska_mux_write_colour (GstMatroskaMux * mux,
+ GstMatroskaTrackVideoContext * videocontext)
+{
+ GstEbmlWrite *ebml = mux->ebml_write;
+ guint64 master;
+ guint matrix_id = 0;
+ guint range_id = 0;
+ guint transfer_id = 0;
+ guint primaries_id = 0;
+
+ master = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_VIDEOCOLOUR);
+
+ switch (videocontext->colorimetry.range) {
+ case GST_VIDEO_COLOR_RANGE_UNKNOWN:
+ range_id = 0;
+ break;
+ case GST_VIDEO_COLOR_RANGE_16_235:
+ range_id = 1;
+ break;
+ case GST_VIDEO_COLOR_RANGE_0_255:
+ range_id = 2;
+ }
+
+ matrix_id = gst_video_color_matrix_to_iso (videocontext->colorimetry.matrix);
+ transfer_id =
+ gst_video_transfer_function_to_iso (videocontext->colorimetry.transfer);
+ primaries_id =
+ gst_video_color_primaries_to_iso (videocontext->colorimetry.primaries);
+
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_VIDEORANGE, range_id);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_VIDEOMATRIXCOEFFICIENTS,
+ matrix_id);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_VIDEOTRANSFERCHARACTERISTICS,
+ transfer_id);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_VIDEOPRIMARIES, primaries_id);
+ if (videocontext->content_light_level.max_content_light_level &&
+ videocontext->content_light_level.max_frame_average_light_level) {
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_MAXCLL,
+ videocontext->content_light_level.max_content_light_level);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_MAXFALL,
+ videocontext->content_light_level.max_frame_average_light_level);
+ }
+
+ gst_matroska_mux_write_mastering_metadata (mux, videocontext);
+ gst_ebml_write_master_finish (ebml, master);
+}
+
+/**
+ * gst_matroska_mux_track_header:
+ * @mux: #GstMatroskaMux
+ * @context: Tack context.
+ *
+ * Write a track header.
+ */
+static void
+gst_matroska_mux_track_header (GstMatroskaMux * mux,
+ GstMatroskaTrackContext * context)
+{
+ GstEbmlWrite *ebml = mux->ebml_write;
+ guint64 master;
+
+ /* TODO: check if everything necessary is written and check default values */
+
+ /* track type goes before the type-specific stuff */
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_TRACKNUMBER, context->num);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_TRACKTYPE, context->type);
+
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_TRACKUID, context->uid);
+ if (context->default_duration) {
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_TRACKDEFAULTDURATION,
+ context->default_duration);
+ }
+ if (context->language) {
+ gst_ebml_write_utf8 (ebml, GST_MATROSKA_ID_TRACKLANGUAGE,
+ context->language);
+ }
+
+ /* FIXME: until we have a nice way of getting the codecname
+ * out of the caps, I'm not going to enable this. Too much
+ * (useless, double, boring) work... */
+ /* TODO: Use value from tags if any */
+ /*gst_ebml_write_utf8 (ebml, GST_MATROSKA_ID_CODECNAME,
+ context->codec_name); */
+ gst_ebml_write_utf8 (ebml, GST_MATROSKA_ID_TRACKNAME, context->name);
+
+ /* type-specific stuff */
+ switch (context->type) {
+ case GST_MATROSKA_TRACK_TYPE_VIDEO:{
+ GstMatroskaTrackVideoContext *videocontext =
+ (GstMatroskaTrackVideoContext *) context;
+
+ master = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_TRACKVIDEO);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_VIDEOPIXELWIDTH,
+ videocontext->pixel_width);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_VIDEOPIXELHEIGHT,
+ videocontext->pixel_height);
+ if (videocontext->display_width && videocontext->display_height) {
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_VIDEODISPLAYWIDTH,
+ videocontext->display_width);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_VIDEODISPLAYHEIGHT,
+ videocontext->display_height);
+ }
+ switch (videocontext->interlace_mode) {
+ case GST_MATROSKA_INTERLACE_MODE_INTERLACED:
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_VIDEOFLAGINTERLACED, 1);
+ break;
+ case GST_MATROSKA_INTERLACE_MODE_PROGRESSIVE:
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_VIDEOFLAGINTERLACED, 2);
+ break;
+ default:
+ break;
+ }
+
+ if (videocontext->fourcc) {
+ guint32 fcc_le = GUINT32_TO_LE (videocontext->fourcc);
+
+ gst_ebml_write_binary (ebml, GST_MATROSKA_ID_VIDEOCOLOURSPACE,
+ (gpointer) & fcc_le, 4);
+ }
+ gst_matroska_mux_write_colour (mux, videocontext);
+ if (videocontext->multiview_mode != GST_VIDEO_MULTIVIEW_MODE_NONE) {
+ guint64 stereo_mode = 0;
+
+ switch (videocontext->multiview_mode) {
+ case GST_VIDEO_MULTIVIEW_MODE_MONO:
+ break;
+ case GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE:
+ if (videocontext->multiview_flags &
+ GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST)
+ stereo_mode = GST_MATROSKA_STEREO_MODE_SBS_RL;
+ else
+ stereo_mode = GST_MATROSKA_STEREO_MODE_SBS_LR;
+ break;
+ case GST_VIDEO_MULTIVIEW_MODE_TOP_BOTTOM:
+ if (videocontext->multiview_flags &
+ GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST)
+ stereo_mode = GST_MATROSKA_STEREO_MODE_TB_RL;
+ else
+ stereo_mode = GST_MATROSKA_STEREO_MODE_TB_LR;
+ break;
+ case GST_VIDEO_MULTIVIEW_MODE_CHECKERBOARD:
+ if (videocontext->multiview_flags &
+ GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST)
+ stereo_mode = GST_MATROSKA_STEREO_MODE_CHECKER_RL;
+ else
+ stereo_mode = GST_MATROSKA_STEREO_MODE_CHECKER_LR;
+ break;
+ case GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME:
+ if (videocontext->multiview_flags &
+ GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST)
+ stereo_mode = GST_MATROSKA_STEREO_MODE_FBF_RL;
+ else
+ stereo_mode = GST_MATROSKA_STEREO_MODE_FBF_LR;
+ /* FIXME: In frame-by-frame mode, left/right frame buffers need to be
+ * laced within one block. See http://www.matroska.org/technical/specs/index.html#StereoMode */
+ GST_FIXME_OBJECT (mux,
+ "Frame-by-frame stereoscopic mode not fully implemented");
+ break;
+ default:
+ GST_WARNING_OBJECT (mux,
+ "Multiview mode %d not supported in Matroska/WebM",
+ videocontext->multiview_mode);
+ break;
+ }
+
+ if (stereo_mode != 0)
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_VIDEOSTEREOMODE,
+ stereo_mode);
+ }
+ gst_ebml_write_master_finish (ebml, master);
+
+ break;
+ }
+
+ case GST_MATROSKA_TRACK_TYPE_AUDIO:{
+ GstMatroskaTrackAudioContext *audiocontext =
+ (GstMatroskaTrackAudioContext *) context;
+
+ master = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_TRACKAUDIO);
+ if (audiocontext->samplerate != 8000)
+ gst_ebml_write_float (ebml, GST_MATROSKA_ID_AUDIOSAMPLINGFREQ,
+ audiocontext->samplerate);
+ if (audiocontext->channels != 1)
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_AUDIOCHANNELS,
+ audiocontext->channels);
+ if (audiocontext->bitdepth) {
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_AUDIOBITDEPTH,
+ audiocontext->bitdepth);
+ }
+
+ gst_ebml_write_master_finish (ebml, master);
+
+ break;
+ }
+
+ case GST_MATROSKA_TRACK_TYPE_SUBTITLE:{
+ break;
+ }
+ default:
+ /* doesn't need type-specific data */
+ break;
+ }
+
+ GST_DEBUG_OBJECT (mux, "Wrote track header. Codec %s", context->codec_id);
+
+ gst_ebml_write_ascii (ebml, GST_MATROSKA_ID_CODECID, context->codec_id);
+ if (context->codec_priv)
+ gst_ebml_write_binary (ebml, GST_MATROSKA_ID_CODECPRIVATE,
+ context->codec_priv, context->codec_priv_size);
+
+ if (context->seek_preroll) {
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_SEEKPREROLL,
+ context->seek_preroll);
+ }
+
+ if (context->codec_delay) {
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_CODECDELAY,
+ context->codec_delay);
+ }
+}
+
+static void
+gst_matroska_mux_write_chapter_title (const gchar * title, GstEbmlWrite * ebml)
+{
+ guint64 title_master;
+
+ title_master =
+ gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_CHAPTERDISPLAY);
+
+ gst_ebml_write_utf8 (ebml, GST_MATROSKA_ID_CHAPSTRING, title);
+ gst_ebml_write_ascii (ebml, GST_MATROSKA_ID_CHAPLANGUAGE,
+ GST_MATROSKA_MUX_CHAPLANG);
+
+ gst_ebml_write_master_finish (ebml, title_master);
+}
+
+static GstTocEntry *
+gst_matroska_mux_write_chapter (GstMatroskaMux * mux, GstTocEntry * edition,
+ GstTocEntry * entry, GstEbmlWrite * ebml, guint64 * master_chapters,
+ guint64 * master_edition)
+{
+ guint64 master_chapteratom;
+ GList *cur;
+ guint count, i;
+ gchar *title;
+ gint64 start, stop;
+ guint64 uid;
+ gchar s_uid[32];
+ GstTocEntry *internal_chapter, *internal_nested;
+ GstTagList *tags;
+
+ if (G_UNLIKELY (master_chapters != NULL && *master_chapters == 0))
+ *master_chapters =
+ gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_CHAPTERS);
+
+ if (G_UNLIKELY (master_edition != NULL && *master_edition == 0)) {
+ /* create uid for the parent */
+ *master_edition =
+ gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_EDITIONENTRY);
+
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_EDITIONUID,
+ g_ascii_strtoull (gst_toc_entry_get_uid (edition), NULL, 10));
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_EDITIONFLAGHIDDEN, 0);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_EDITIONFLAGDEFAULT, 0);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_EDITIONFLAGORDERED, 0);
+ }
+
+ gst_toc_entry_get_start_stop_times (entry, &start, &stop);
+ tags = gst_toc_entry_get_tags (entry);
+ if (tags != NULL) {
+ tags = gst_tag_list_copy (tags);
+ }
+
+ /* build internal chapter */
+ uid = gst_matroska_mux_create_uid (mux);
+ g_snprintf (s_uid, sizeof (s_uid), "%" G_GINT64_FORMAT, uid);
+ internal_chapter = gst_toc_entry_new (GST_TOC_ENTRY_TYPE_CHAPTER, s_uid);
+
+ /* Write the chapter entry */
+ master_chapteratom =
+ gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_CHAPTERATOM);
+
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_CHAPTERUID, uid);
+ /* Store the user provided UID in the ChapterStringUID */
+ gst_ebml_write_utf8 (ebml, GST_MATROSKA_ID_CHAPTERSTRINGUID,
+ gst_toc_entry_get_uid (entry));
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_CHAPTERTIMESTART, start);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_CHAPTERTIMESTOP, stop);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_CHAPTERFLAGHIDDEN, 0);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_CHAPTERFLAGENABLED, 1);
+
+ /* write current ChapterDisplays before the nested chapters */
+ if (G_LIKELY (tags != NULL)) {
+ count = gst_tag_list_get_tag_size (tags, GST_TAG_TITLE);
+
+ for (i = 0; i < count; ++i) {
+ gst_tag_list_get_string_index (tags, GST_TAG_TITLE, i, &title);
+ /* FIXME: handle ChapterLanguage entries */
+ gst_matroska_mux_write_chapter_title (title, ebml);
+ g_free (title);
+ }
+
+ /* remove title tag */
+ if (G_LIKELY (count > 0))
+ gst_tag_list_remove_tag (tags, GST_TAG_TITLE);
+
+ gst_toc_entry_set_tags (internal_chapter, tags);
+ }
+
+ /* Write nested chapters */
+ for (cur = gst_toc_entry_get_sub_entries (entry); cur != NULL;
+ cur = cur->next) {
+ internal_nested = gst_matroska_mux_write_chapter (mux, NULL, cur->data,
+ ebml, NULL, NULL);
+
+ gst_toc_entry_append_sub_entry (internal_chapter, internal_nested);
+ }
+
+ gst_ebml_write_master_finish (ebml, master_chapteratom);
+
+ return internal_chapter;
+}
+
+static GstTocEntry *
+gst_matroska_mux_write_chapter_edition (GstMatroskaMux * mux,
+ GstTocEntry * edition, GList * chapters, GstEbmlWrite * ebml,
+ guint64 * master_chapters)
+{
+ guint64 master_edition = 0;
+ gchar s_uid[32];
+ GList *cur;
+ GstTocEntry *internal_edition, *internal_chapter;
+ GstTagList *tags = NULL;
+
+ g_snprintf (s_uid, sizeof (s_uid), "%" G_GINT64_FORMAT,
+ gst_matroska_mux_create_uid (mux));
+
+ if (edition != NULL) {
+ /* Edition entry defined, get its tags */
+ tags = gst_toc_entry_get_tags (edition);
+ if (tags != NULL) {
+ tags = gst_tag_list_copy (tags);
+ }
+ }
+
+ internal_edition = gst_toc_entry_new (GST_TOC_ENTRY_TYPE_EDITION, s_uid);
+ if (tags != NULL) {
+ gst_toc_entry_set_tags (internal_edition, tags);
+ }
+
+ for (cur = g_list_first (chapters); cur != NULL; cur = cur->next) {
+ internal_chapter = gst_matroska_mux_write_chapter (mux, internal_edition,
+ cur->data, ebml, master_chapters, &master_edition);
+
+ gst_toc_entry_append_sub_entry (internal_edition, internal_chapter);
+ }
+
+ if (G_LIKELY (master_edition != 0))
+ gst_ebml_write_master_finish (ebml, master_edition);
+
+ return internal_edition;
+}
+
+/**
+ * gst_matroska_mux_start:
+ * @mux: #GstMatroskaMux
+ *
+ * Start a new matroska file (write headers etc...)
+ */
+static void
+gst_matroska_mux_start (GstMatroskaMux * mux, GstMatroskaPad * first_pad,
+ GstBuffer * first_pad_buf)
+{
+ GstEbmlWrite *ebml = mux->ebml_write;
+ const gchar *doctype;
+ guint32 seekhead_id[] = { GST_MATROSKA_ID_SEGMENTINFO,
+ GST_MATROSKA_ID_TRACKS,
+ GST_MATROSKA_ID_CHAPTERS,
+ GST_MATROSKA_ID_CUES,
+ GST_MATROSKA_ID_TAGS,
+ 0
+ };
+ const gchar *media_type;
+ gboolean audio_only;
+ guint64 master, child;
+ GSList *collected;
+ int i;
+ guint tracknum = 1;
+ GstClockTime earliest_time = GST_CLOCK_TIME_NONE;
+ GstClockTime duration = 0;
+ guint32 segment_uid[4];
+ gint64 time;
+ gchar s_id[32];
+ GstToc *toc;
+
+ /* if not streaming, check if downstream is seekable */
+ if (!mux->ebml_write->streamable) {
+ gboolean seekable;
+ GstQuery *query;
+
+ query = gst_query_new_seeking (GST_FORMAT_BYTES);
+ if (gst_pad_peer_query (mux->srcpad, query)) {
+ gst_query_parse_seeking (query, NULL, &seekable, NULL, NULL);
+ GST_INFO_OBJECT (mux, "downstream is %sseekable", seekable ? "" : "not ");
+ } else {
+ /* assume seeking is not supported if query not handled downstream */
+ GST_WARNING_OBJECT (mux, "downstream did not handle seeking query");
+ seekable = FALSE;
+ }
+ if (!seekable) {
+ mux->ebml_write->streamable = TRUE;
+ g_object_notify (G_OBJECT (mux), "streamable");
+ GST_WARNING_OBJECT (mux, "downstream is not seekable, but "
+ "streamable=false. Will ignore that and create streamable output "
+ "instead");
+ }
+ gst_query_unref (query);
+ }
+
+ /* stream-start (FIXME: create id based on input ids) */
+ g_snprintf (s_id, sizeof (s_id), "matroskamux-%08x", g_random_int ());
+ gst_pad_push_event (mux->srcpad, gst_event_new_stream_start (s_id));
+
+ /* output caps */
+ audio_only = mux->num_v_streams == 0 && mux->num_a_streams > 0;
+ if (mux->is_webm) {
+ media_type = (audio_only) ? "audio/webm" : "video/webm";
+ } else {
+ media_type = (audio_only) ? "audio/x-matroska" : "video/x-matroska";
+ }
+ ebml->caps = gst_caps_new_empty_simple (media_type);
+ gst_pad_set_caps (mux->srcpad, ebml->caps);
+ /* we start with a EBML header */
+ doctype = mux->doctype;
+ GST_INFO_OBJECT (ebml, "DocType: %s, Version: %d",
+ doctype, mux->doctype_version);
+ gst_ebml_write_header (ebml, doctype, mux->doctype_version);
+
+ /* the rest of the header is cached */
+ gst_ebml_write_set_cache (ebml, 0x1000);
+
+ /* start a segment */
+ mux->segment_pos =
+ gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_SEGMENT);
+ mux->segment_master = ebml->pos;
+
+ if (!mux->ebml_write->streamable) {
+ /* seekhead (table of contents) - we set the positions later */
+ mux->seekhead_pos = ebml->pos;
+ master = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_SEEKHEAD);
+ for (i = 0; seekhead_id[i] != 0; i++) {
+ child = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_SEEKENTRY);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_SEEKID, seekhead_id[i]);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_SEEKPOSITION, -1);
+ gst_ebml_write_master_finish (ebml, child);
+ }
+ gst_ebml_write_master_finish (ebml, master);
+ }
+
+ if (mux->ebml_write->streamable) {
+ const GstTagList *tags;
+ gboolean has_main_tags;
+
+ /* tags */
+ tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (mux));
+ has_main_tags = tags != NULL && !gst_matroska_mux_tag_list_is_empty (tags);
+
+ if (has_main_tags || gst_matroska_mux_streams_have_tags (mux)) {
+ guint64 master_tags, master_tag;
+
+ GST_DEBUG_OBJECT (mux, "Writing tags");
+
+ mux->tags_pos = ebml->pos;
+ master_tags = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_TAGS);
+ if (has_main_tags) {
+ master_tag = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_TAG);
+ gst_tag_list_foreach (tags, gst_matroska_mux_write_simple_tag, ebml);
+ gst_ebml_write_master_finish (ebml, master_tag);
+ }
+ gst_matroska_mux_write_streams_tags (mux);
+ gst_ebml_write_master_finish (ebml, master_tags);
+ }
+ }
+
+ /* segment info */
+ mux->info_pos = ebml->pos;
+ master = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_SEGMENTINFO);
+
+ /* WebM does not support SegmentUID field on SegmentInfo */
+ if (!mux->is_webm) {
+ for (i = 0; i < 4; i++) {
+ segment_uid[i] = g_random_int ();
+ }
+ gst_ebml_write_binary (ebml, GST_MATROSKA_ID_SEGMENTUID,
+ (guint8 *) segment_uid, 16);
+ }
+
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_TIMECODESCALE, mux->time_scale);
+ mux->duration_pos = ebml->pos;
+ /* get duration */
+ if (!mux->ebml_write->streamable) {
+ for (collected = mux->collect->data; collected;
+ collected = g_slist_next (collected)) {
+ GstMatroskaPad *collect_pad;
+ GstPad *thepad;
+ gint64 trackduration;
+
+ collect_pad = (GstMatroskaPad *) collected->data;
+ thepad = collect_pad->collect.pad;
+
+ /* Query the total length of the track. */
+ GST_DEBUG_OBJECT (thepad, "querying peer duration");
+ if (gst_pad_peer_query_duration (thepad, GST_FORMAT_TIME, &trackduration)) {
+ GST_DEBUG_OBJECT (thepad, "duration: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (trackduration));
+ if (trackduration != GST_CLOCK_TIME_NONE && trackduration > duration) {
+ duration = (GstClockTime) trackduration;
+ }
+ }
+ }
+ gst_ebml_write_float (ebml, GST_MATROSKA_ID_DURATION,
+ gst_guint64_to_gdouble (duration) /
+ gst_guint64_to_gdouble (mux->time_scale));
+ }
+ gst_ebml_write_utf8 (ebml, GST_MATROSKA_ID_MUXINGAPP,
+ "GStreamer matroskamux version " PACKAGE_VERSION);
+ if (mux->writing_app && mux->writing_app[0]) {
+ gst_ebml_write_utf8 (ebml, GST_MATROSKA_ID_WRITINGAPP, mux->writing_app);
+ }
+ if (mux->creation_time != NULL) {
+ time = g_date_time_to_unix (mux->creation_time) * GST_SECOND;
+ time += g_date_time_get_microsecond (mux->creation_time) * GST_USECOND;
+ } else {
+ time = g_get_real_time () * GST_USECOND;
+ }
+ gst_ebml_write_date (ebml, GST_MATROSKA_ID_DATEUTC, time);
+ gst_ebml_write_master_finish (ebml, master);
+
+ /* tracks */
+ mux->tracks_pos = ebml->pos;
+ master = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_TRACKS);
+
+ for (collected = mux->collect->data; collected;
+ collected = g_slist_next (collected)) {
+ GstMatroskaPad *collect_pad;
+ GstBuffer *buf;
+
+ collect_pad = (GstMatroskaPad *) collected->data;
+
+ /* This will cause an error at a later time */
+ if (collect_pad->track->codec_id == NULL)
+ continue;
+
+ /* Find the smallest timestamp so we can offset all streams by this to
+ * start at 0 */
+ if (mux->offset_to_zero) {
+ GstClockTime ts;
+
+ if (collect_pad == first_pad)
+ buf = first_pad_buf ? gst_buffer_ref (first_pad_buf) : NULL;
+ else
+ buf = gst_collect_pads_peek (mux->collect, collected->data);
+
+ if (buf) {
+ ts = gst_matroska_track_get_buffer_timestamp (collect_pad->track, buf);
+
+ if (earliest_time == GST_CLOCK_TIME_NONE)
+ earliest_time = ts;
+ else if (ts != GST_CLOCK_TIME_NONE && ts < earliest_time)
+ earliest_time = ts;
+ }
+
+ if (buf)
+ gst_buffer_unref (buf);
+ }
+
+ /* For audio tracks, use the first buffers duration as the default
+ * duration if we didn't get any better idea from the caps event already
+ */
+ if (collect_pad->track->type == GST_MATROSKA_TRACK_TYPE_AUDIO &&
+ collect_pad->track->default_duration == 0) {
+ if (collect_pad == first_pad)
+ buf = first_pad_buf ? gst_buffer_ref (first_pad_buf) : NULL;
+ else
+ buf = gst_collect_pads_peek (mux->collect, collected->data);
+
+ if (buf && GST_BUFFER_DURATION_IS_VALID (buf))
+ collect_pad->track->default_duration =
+ GST_BUFFER_DURATION (buf) + collect_pad->track->codec_delay;
+ if (buf)
+ gst_buffer_unref (buf);
+ }
+
+ collect_pad->track->num = tracknum++;
+ child = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_TRACKENTRY);
+ gst_matroska_mux_track_header (mux, collect_pad->track);
+ gst_ebml_write_master_finish (ebml, child);
+ /* some remaining pad/track setup */
+ collect_pad->default_duration_scaled =
+ gst_util_uint64_scale (collect_pad->track->default_duration,
+ 1, mux->time_scale);
+ }
+ gst_ebml_write_master_finish (ebml, master);
+
+ mux->earliest_time = earliest_time == GST_CLOCK_TIME_NONE ? 0 : earliest_time;
+
+ /* chapters */
+ toc = gst_toc_setter_get_toc (GST_TOC_SETTER (mux));
+ if (toc != NULL && !mux->ebml_write->streamable) {
+ guint64 master_chapters = 0;
+ GstTocEntry *internal_edition;
+ GList *cur, *chapters;
+
+ GST_DEBUG ("Writing chapters");
+
+ /* There are two UIDs for Chapters:
+ * - The ChapterUID is a mandatory unsigned integer which internally
+ * refers to a given chapter. Except for the title & language which use
+ * dedicated fields, this UID can also be used to add tags to the Chapter.
+ * The tags come in a separate section of the container.
+ * - The ChapterStringUID is an optional UTF-8 string which also uniquely
+ * refers to a chapter but from an external perspective. It can act as a
+ * "WebVTT cue identifier" which "can be used to reference a specific cue,
+ * for example from script or CSS".
+ *
+ * The ChapterUID will be generated and checked for unicity, while the
+ * ChapterStringUID will receive the user defined UID.
+ *
+ * In order to be able to refer to chapters from the tags section,
+ * we must maintain an internal Toc tree with the generated ChapterUID
+ * (see gst_matroska_mux_write_toc_entry_tags) */
+
+ /* Check whether we have editions or chapters at the root level. */
+ cur = gst_toc_get_entries (toc);
+ if (cur != NULL) {
+ mux->chapters_pos = ebml->pos;
+
+ mux->internal_toc = gst_toc_new (GST_TOC_SCOPE_GLOBAL);
+
+ if (gst_toc_entry_get_entry_type (cur->data) ==
+ GST_TOC_ENTRY_TYPE_EDITION) {
+ /* Editions at the root level */
+ for (; cur != NULL; cur = cur->next) {
+ chapters = gst_toc_entry_get_sub_entries (cur->data);
+ internal_edition = gst_matroska_mux_write_chapter_edition (mux,
+ cur->data, chapters, ebml, &master_chapters);
+ gst_toc_append_entry (mux->internal_toc, internal_edition);
+ }
+ } else {
+ /* Chapters at the root level */
+ internal_edition = gst_matroska_mux_write_chapter_edition (mux,
+ NULL, cur, ebml, &master_chapters);
+ gst_toc_append_entry (mux->internal_toc, internal_edition);
+ }
+
+ /* close master element if any edition was written */
+ if (G_LIKELY (master_chapters != 0))
+ gst_ebml_write_master_finish (ebml, master_chapters);
+ }
+ }
+
+ /* lastly, flush the cache */
+ gst_ebml_write_flush_cache (ebml, FALSE, 0);
+
+ if (toc != NULL)
+ gst_toc_unref (toc);
+}
+
+/* TODO: more sensible tag mappings */
+static const struct
+{
+ const gchar *matroska_tagname;
+ const gchar *gstreamer_tagname;
+}
+gst_matroska_tag_conv[] = {
+ {
+ GST_MATROSKA_TAG_ID_TITLE, GST_TAG_TITLE}, {
+ GST_MATROSKA_TAG_ID_ARTIST, GST_TAG_ARTIST}, {
+ GST_MATROSKA_TAG_ID_ALBUM, GST_TAG_ALBUM}, {
+ GST_MATROSKA_TAG_ID_COMMENTS, GST_TAG_COMMENT}, {
+ GST_MATROSKA_TAG_ID_BITSPS, GST_TAG_BITRATE}, {
+ GST_MATROSKA_TAG_ID_BPS, GST_TAG_BITRATE}, {
+ GST_MATROSKA_TAG_ID_ENCODER, GST_TAG_ENCODER}, {
+ GST_MATROSKA_TAG_ID_DATE, GST_TAG_DATE}, {
+ GST_MATROSKA_TAG_ID_ISRC, GST_TAG_ISRC}, {
+ GST_MATROSKA_TAG_ID_COPYRIGHT, GST_TAG_COPYRIGHT}, {
+ GST_MATROSKA_TAG_ID_BPM, GST_TAG_BEATS_PER_MINUTE}, {
+ GST_MATROSKA_TAG_ID_TERMS_OF_USE, GST_TAG_LICENSE}, {
+ GST_MATROSKA_TAG_ID_COMPOSER, GST_TAG_COMPOSER}, {
+ GST_MATROSKA_TAG_ID_LEAD_PERFORMER, GST_TAG_PERFORMER}, {
+ GST_MATROSKA_TAG_ID_GENRE, GST_TAG_GENRE}
+};
+
+/* Every stagefright implementation on android up to and including 6.0.1 is using
+ libwebm with bug in matroska parsing, where it will choke on empty tag elements;
+ so before outputting tags and tag elements we better make sure that there are
+ actually tags we are going to write */
+static gboolean
+gst_matroska_mux_tag_list_is_empty (const GstTagList * list)
+{
+ int i;
+ for (i = 0; i < gst_tag_list_n_tags (list); i++) {
+ const gchar *tag = gst_tag_list_nth_tag_name (list, i);
+ int i;
+ for (i = 0; i < G_N_ELEMENTS (gst_matroska_tag_conv); i++) {
+ const gchar *tagname_gst = gst_matroska_tag_conv[i].gstreamer_tagname;
+ if (strcmp (tagname_gst, tag) == 0) {
+ GValue src = { 0, };
+ gchar *dest;
+
+ if (!gst_tag_list_copy_value (&src, list, tag))
+ break;
+ dest = gst_value_serialize (&src);
+
+ g_value_unset (&src);
+ if (dest) {
+ g_free (dest);
+ return FALSE;
+ }
+ }
+ }
+ }
+ return TRUE;
+}
+
+static void
+gst_matroska_mux_write_simple_tag (const GstTagList * list, const gchar * tag,
+ gpointer data)
+{
+ GstEbmlWrite *ebml = (GstEbmlWrite *) data;
+ guint i;
+ guint64 simpletag_master;
+
+ for (i = 0; i < G_N_ELEMENTS (gst_matroska_tag_conv); i++) {
+ const gchar *tagname_gst = gst_matroska_tag_conv[i].gstreamer_tagname;
+ const gchar *tagname_mkv = gst_matroska_tag_conv[i].matroska_tagname;
+
+ if (strcmp (tagname_gst, tag) == 0) {
+ GValue src = { 0, };
+ gchar *dest;
+
+ if (!gst_tag_list_copy_value (&src, list, tag))
+ break;
+ if ((dest = gst_value_serialize (&src))) {
+
+ simpletag_master = gst_ebml_write_master_start (ebml,
+ GST_MATROSKA_ID_SIMPLETAG);
+ gst_ebml_write_ascii (ebml, GST_MATROSKA_ID_TAGNAME, tagname_mkv);
+ gst_ebml_write_utf8 (ebml, GST_MATROSKA_ID_TAGSTRING, dest);
+ gst_ebml_write_master_finish (ebml, simpletag_master);
+ g_free (dest);
+ } else {
+ GST_WARNING ("Can't transform tag '%s' to string", tagname_mkv);
+ }
+ g_value_unset (&src);
+ break;
+ }
+ }
+}
+
+static void
+gst_matroska_mux_write_stream_tags (GstMatroskaMux * mux, GstMatroskaPad * mpad)
+{
+ guint64 master_tag, master_targets;
+ GstEbmlWrite *ebml;
+
+ ebml = mux->ebml_write;
+
+ if (G_UNLIKELY (mpad->tags == NULL
+ || gst_matroska_mux_tag_list_is_empty (mpad->tags)))
+ return;
+
+ master_tag = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_TAG);
+ master_targets = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_TARGETS);
+
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_TARGETTRACKUID, mpad->track->uid);
+
+ gst_ebml_write_master_finish (ebml, master_targets);
+ gst_tag_list_foreach (mpad->tags, gst_matroska_mux_write_simple_tag, ebml);
+ gst_ebml_write_master_finish (ebml, master_tag);
+}
+
+static void
+gst_matroska_mux_write_streams_tags (GstMatroskaMux * mux)
+{
+ GSList *walk;
+
+ for (walk = mux->collect->data; walk; walk = g_slist_next (walk)) {
+ GstMatroskaPad *collect_pad;
+
+ collect_pad = (GstMatroskaPad *) walk->data;
+
+ gst_matroska_mux_write_stream_tags (mux, collect_pad);
+ }
+}
+
+static gboolean
+gst_matroska_mux_streams_have_tags (GstMatroskaMux * mux)
+{
+ GSList *walk;
+
+ for (walk = mux->collect->data; walk; walk = g_slist_next (walk)) {
+ GstMatroskaPad *collect_pad;
+
+ collect_pad = (GstMatroskaPad *) walk->data;
+ if (!gst_matroska_mux_tag_list_is_empty (collect_pad->tags))
+ return TRUE;
+ }
+ return FALSE;
+}
+
+static void
+gst_matroska_mux_write_toc_entry_tags (GstMatroskaMux * mux,
+ const GstTocEntry * entry, guint64 * master_tags, gboolean * has_tags)
+{
+ guint64 master_tag, master_targets;
+ GstEbmlWrite *ebml;
+ GList *cur;
+ const GstTagList *tags;
+
+ ebml = mux->ebml_write;
+
+ tags = gst_toc_entry_get_tags (entry);
+ if (G_UNLIKELY (tags != NULL && !gst_matroska_mux_tag_list_is_empty (tags))) {
+ *has_tags = TRUE;
+
+ if (*master_tags == 0) {
+ mux->tags_pos = ebml->pos;
+ *master_tags = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_TAGS);
+ }
+
+ master_tag = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_TAG);
+ master_targets =
+ gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_TARGETS);
+
+ if (gst_toc_entry_get_entry_type (entry) == GST_TOC_ENTRY_TYPE_EDITION)
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_TARGETEDITIONUID,
+ g_ascii_strtoull (gst_toc_entry_get_uid (entry), NULL, 10));
+ else
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_TARGETCHAPTERUID,
+ g_ascii_strtoull (gst_toc_entry_get_uid (entry), NULL, 10));
+
+ gst_ebml_write_master_finish (ebml, master_targets);
+ gst_tag_list_foreach (tags, gst_matroska_mux_write_simple_tag, ebml);
+ gst_ebml_write_master_finish (ebml, master_tag);
+ }
+
+ for (cur = gst_toc_entry_get_sub_entries (entry); cur != NULL;
+ cur = cur->next) {
+ gst_matroska_mux_write_toc_entry_tags (mux, cur->data, master_tags,
+ has_tags);
+ }
+}
+
+/**
+ * gst_matroska_mux_finish:
+ * @mux: #GstMatroskaMux
+ *
+ * Finish a new matroska file (write index etc...)
+ */
+static void
+gst_matroska_mux_finish (GstMatroskaMux * mux)
+{
+ GstEbmlWrite *ebml = mux->ebml_write;
+ guint64 pos;
+ guint64 duration = 0;
+ GSList *collected;
+ const GstTagList *tags, *toc_tags;
+ const GstToc *toc;
+ gboolean has_main_tags, toc_has_tags = FALSE;
+ GList *cur;
+
+ /* finish last cluster */
+ if (mux->cluster) {
+ gst_ebml_write_master_finish (ebml, mux->cluster);
+ }
+
+ /* cues */
+ if (mux->index != NULL) {
+ guint n;
+ guint64 master, pointentry_master, trackpos_master;
+
+ mux->cues_pos = ebml->pos;
+ gst_ebml_write_set_cache (ebml, 12 + 41 * mux->num_indexes);
+ master = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_CUES);
+
+ for (n = 0; n < mux->num_indexes; n++) {
+ GstMatroskaIndex *idx = &mux->index[n];
+
+ pointentry_master = gst_ebml_write_master_start (ebml,
+ GST_MATROSKA_ID_POINTENTRY);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_CUETIME,
+ idx->time / mux->time_scale);
+ trackpos_master = gst_ebml_write_master_start (ebml,
+ GST_MATROSKA_ID_CUETRACKPOSITIONS);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_CUETRACK, idx->track);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_CUECLUSTERPOSITION,
+ idx->pos - mux->segment_master);
+ gst_ebml_write_master_finish (ebml, trackpos_master);
+ gst_ebml_write_master_finish (ebml, pointentry_master);
+ }
+
+ gst_ebml_write_master_finish (ebml, master);
+ gst_ebml_write_flush_cache (ebml, FALSE, GST_CLOCK_TIME_NONE);
+ }
+
+ /* tags */
+ tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (mux));
+ has_main_tags = tags != NULL && !gst_matroska_mux_tag_list_is_empty (tags);
+ toc = gst_toc_setter_get_toc (GST_TOC_SETTER (mux));
+
+ if (has_main_tags || gst_matroska_mux_streams_have_tags (mux) || toc != NULL) {
+ guint64 master_tags = 0, master_tag;
+
+ GST_DEBUG_OBJECT (mux, "Writing tags");
+
+ if (has_main_tags) {
+ /* TODO: maybe limit via the TARGETS id by looking at the source pad */
+ mux->tags_pos = ebml->pos;
+ master_tags = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_TAGS);
+ master_tag = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_TAG);
+
+ if (tags != NULL)
+ gst_tag_list_foreach (tags, gst_matroska_mux_write_simple_tag, ebml);
+ if (mux->internal_toc != NULL) {
+ toc_tags = gst_toc_get_tags (mux->internal_toc);
+ toc_has_tags = (toc_tags != NULL);
+ gst_tag_list_foreach (toc_tags, gst_matroska_mux_write_simple_tag,
+ ebml);
+ }
+
+ gst_ebml_write_master_finish (ebml, master_tag);
+ }
+
+ if (mux->internal_toc != NULL) {
+ for (cur = gst_toc_get_entries (mux->internal_toc); cur != NULL;
+ cur = cur->next) {
+ gst_matroska_mux_write_toc_entry_tags (mux, cur->data, &master_tags,
+ &toc_has_tags);
+ }
+ }
+
+ if (master_tags == 0 && gst_matroska_mux_streams_have_tags (mux)) {
+ mux->tags_pos = ebml->pos;
+ master_tags = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_TAGS);
+ }
+ gst_matroska_mux_write_streams_tags (mux);
+
+ if (master_tags != 0)
+ gst_ebml_write_master_finish (ebml, master_tags);
+ }
+
+ /* update seekhead. We know that:
+ * - a seekhead contains 5 entries.
+ * - order of entries is as above.
+ * - a seekhead has a 4-byte header + 8-byte length
+ * - each entry is 2-byte master, 2-byte ID pointer,
+ * 2-byte length pointer, all 8/1-byte length, 4-
+ * byte ID and 8-byte length pointer, where the
+ * length pointer starts at 20.
+ * - all entries are local to the segment (so pos - segment_master).
+ * - so each entry is at 12 + 20 + num * 28. */
+ gst_ebml_replace_uint (ebml, mux->seekhead_pos + 32,
+ mux->info_pos - mux->segment_master);
+ gst_ebml_replace_uint (ebml, mux->seekhead_pos + 60,
+ mux->tracks_pos - mux->segment_master);
+ if (toc != NULL && mux->chapters_pos > 0) {
+ gst_ebml_replace_uint (ebml, mux->seekhead_pos + 88,
+ mux->chapters_pos - mux->segment_master);
+ } else {
+ /* void'ify */
+ guint64 my_pos = ebml->pos;
+
+ gst_ebml_write_seek (ebml, mux->seekhead_pos + 68);
+ gst_ebml_write_buffer_header (ebml, GST_EBML_ID_VOID, 26);
+ gst_ebml_write_seek (ebml, my_pos);
+ }
+ if (mux->index != NULL) {
+ gst_ebml_replace_uint (ebml, mux->seekhead_pos + 116,
+ mux->cues_pos - mux->segment_master);
+ } else {
+ /* void'ify */
+ guint64 my_pos = ebml->pos;
+
+ gst_ebml_write_seek (ebml, mux->seekhead_pos + 96);
+ gst_ebml_write_buffer_header (ebml, GST_EBML_ID_VOID, 26);
+ gst_ebml_write_seek (ebml, my_pos);
+ }
+
+ if (mux->tags_pos != 0 || toc_has_tags) {
+ gst_ebml_replace_uint (ebml, mux->seekhead_pos + 144,
+ mux->tags_pos - mux->segment_master);
+ } else {
+ /* void'ify */
+ guint64 my_pos = ebml->pos;
+
+ gst_ebml_write_seek (ebml, mux->seekhead_pos + 124);
+ gst_ebml_write_buffer_header (ebml, GST_EBML_ID_VOID, 26);
+ gst_ebml_write_seek (ebml, my_pos);
+ }
+
+ if (toc != NULL) {
+ gst_toc_unref (toc);
+ }
+
+ /* loop tracks:
+ * - first get the overall duration
+ * (a released track may have left a duration in here)
+ * - write some track header data for subtitles
+ */
+ duration = mux->duration;
+ pos = ebml->pos;
+ for (collected = mux->collect->data; collected;
+ collected = g_slist_next (collected)) {
+ GstMatroskaPad *collect_pad;
+ /*
+ * observed duration, this will never remain GST_CLOCK_TIME_NONE
+ * since this means buffer without timestamps that is not possible
+ */
+ GstClockTime collected_duration = GST_CLOCK_TIME_NONE;
+
+ collect_pad = (GstMatroskaPad *) collected->data;
+
+ GST_DEBUG_OBJECT (mux,
+ "Pad %" GST_PTR_FORMAT " start ts %" GST_TIME_FORMAT
+ " end ts %" GST_TIME_FORMAT, collect_pad,
+ GST_TIME_ARGS (collect_pad->start_ts),
+ GST_TIME_ARGS (collect_pad->end_ts));
+
+ if (GST_CLOCK_TIME_IS_VALID (collect_pad->start_ts) &&
+ GST_CLOCK_TIME_IS_VALID (collect_pad->end_ts)) {
+ collected_duration =
+ GST_CLOCK_DIFF (collect_pad->start_ts, collect_pad->end_ts);
+ GST_DEBUG_OBJECT (collect_pad->collect.pad,
+ "final track duration: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (collected_duration));
+ } else {
+ GST_WARNING_OBJECT (collect_pad->collect.pad,
+ "unable to get final track duration");
+ }
+ if (GST_CLOCK_TIME_IS_VALID (collected_duration) &&
+ duration < collected_duration)
+ duration = collected_duration;
+
+ }
+
+ /* seek back (optional, but do anyway) */
+ gst_ebml_write_seek (ebml, pos);
+
+ /* update duration */
+ if (duration != 0) {
+ GST_DEBUG_OBJECT (mux, "final total duration: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (duration));
+ pos = mux->ebml_write->pos;
+ gst_ebml_write_seek (ebml, mux->duration_pos);
+ gst_ebml_write_float (ebml, GST_MATROSKA_ID_DURATION,
+ gst_guint64_to_gdouble (duration) /
+ gst_guint64_to_gdouble (mux->time_scale));
+ gst_ebml_write_seek (ebml, pos);
+ } else {
+ /* void'ify */
+ guint64 my_pos = ebml->pos;
+
+ gst_ebml_write_seek (ebml, mux->duration_pos);
+ gst_ebml_write_buffer_header (ebml, GST_EBML_ID_VOID, 8);
+ gst_ebml_write_seek (ebml, my_pos);
+ }
+ GST_DEBUG_OBJECT (mux, "finishing segment");
+ /* finish segment - this also writes element length */
+ gst_ebml_write_master_finish (ebml, mux->segment_pos);
+}
+
+/**
+ * gst_matroska_mux_buffer_header:
+ * @track: Track context.
+ * @relative_timestamp: relative timestamp of the buffer
+ * @flags: Buffer flags.
+ *
+ * Create a buffer containing buffer header.
+ *
+ * Returns: New buffer.
+ */
+static GstBuffer *
+gst_matroska_mux_create_buffer_header (GstMatroskaTrackContext * track,
+ gint16 relative_timestamp, int flags)
+{
+ GstBuffer *hdr;
+ guint8 *data = g_malloc (4);
+
+ hdr = gst_buffer_new_wrapped (data, 4);
+ /* track num - FIXME: what if num >= 0x80 (unlikely)? */
+ data[0] = track->num | 0x80;
+ /* time relative to clustertime */
+ GST_WRITE_UINT16_BE (data + 1, relative_timestamp);
+
+ /* flags */
+ data[3] = flags;
+
+ return hdr;
+}
+
+#define DIRAC_PARSE_CODE_SEQUENCE_HEADER 0x00
+#define DIRAC_PARSE_CODE_END_OF_SEQUENCE 0x10
+#define DIRAC_PARSE_CODE_IS_PICTURE(x) ((x & 0x08) != 0)
+
+static GstBuffer *
+gst_matroska_mux_handle_dirac_packet (GstMatroskaMux * mux,
+ GstMatroskaPad * collect_pad, GstBuffer * buf)
+{
+ GstMatroskaTrackVideoContext *ctx =
+ (GstMatroskaTrackVideoContext *) collect_pad->track;
+ GstMapInfo map;
+ guint8 *data;
+ gsize size;
+ guint8 parse_code;
+ guint32 next_parse_offset;
+ GstBuffer *ret = NULL;
+ gboolean is_muxing_unit = FALSE;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ if (size < 13) {
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ return ret;
+ }
+
+ /* Check if this buffer contains a picture or end-of-sequence packet */
+ while (size >= 13) {
+ if (GST_READ_UINT32_BE (data) != 0x42424344 /* 'BBCD' */ ) {
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ return ret;
+ }
+
+ parse_code = GST_READ_UINT8 (data + 4);
+ if (parse_code == DIRAC_PARSE_CODE_SEQUENCE_HEADER) {
+ if (ctx->dirac_unit) {
+ gst_buffer_unref (ctx->dirac_unit);
+ ctx->dirac_unit = NULL;
+ }
+ } else if (DIRAC_PARSE_CODE_IS_PICTURE (parse_code) ||
+ parse_code == DIRAC_PARSE_CODE_END_OF_SEQUENCE) {
+ is_muxing_unit = TRUE;
+ break;
+ }
+
+ next_parse_offset = GST_READ_UINT32_BE (data + 5);
+
+ if (G_UNLIKELY (next_parse_offset == 0 || next_parse_offset > size))
+ break;
+
+ data += next_parse_offset;
+ size -= next_parse_offset;
+ }
+
+ if (ctx->dirac_unit)
+ ctx->dirac_unit = gst_buffer_append (ctx->dirac_unit, gst_buffer_ref (buf));
+ else
+ ctx->dirac_unit = gst_buffer_ref (buf);
+
+ gst_buffer_unmap (buf, &map);
+
+ if (is_muxing_unit) {
+ ret = gst_buffer_make_writable (ctx->dirac_unit);
+ ctx->dirac_unit = NULL;
+ gst_buffer_copy_into (ret, buf,
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
+ gst_buffer_unref (buf);
+ } else {
+ gst_buffer_unref (buf);
+ ret = NULL;
+ }
+
+ return ret;
+}
+
+static void
+gst_matroska_mux_stop_streamheader (GstMatroskaMux * mux)
+{
+ GstCaps *caps;
+ GstStructure *s;
+ GValue streamheader = { 0 };
+ GValue bufval = { 0 };
+ GstBuffer *streamheader_buffer;
+ GstEbmlWrite *ebml = mux->ebml_write;
+
+ streamheader_buffer = gst_ebml_stop_streamheader (ebml);
+ caps = gst_caps_copy (mux->ebml_write->caps);
+ s = gst_caps_get_structure (caps, 0);
+ g_value_init (&streamheader, GST_TYPE_ARRAY);
+ g_value_init (&bufval, GST_TYPE_BUFFER);
+ GST_BUFFER_FLAG_SET (streamheader_buffer, GST_BUFFER_FLAG_HEADER);
+ gst_value_set_buffer (&bufval, streamheader_buffer);
+ gst_value_array_append_value (&streamheader, &bufval);
+ g_value_unset (&bufval);
+ gst_structure_set_value (s, "streamheader", &streamheader);
+ g_value_unset (&streamheader);
+ gst_caps_replace (&ebml->caps, caps);
+ gst_buffer_unref (streamheader_buffer);
+ gst_pad_set_caps (mux->srcpad, caps);
+ gst_caps_unref (caps);
+}
+
+/**
+ * gst_matroska_mux_write_data:
+ * @mux: #GstMatroskaMux
+ * @collect_pad: #GstMatroskaPad with the data
+ *
+ * Write collected data (called from gst_matroska_mux_collected).
+ *
+ * Returns: Result of the gst_pad_push issued to write the data.
+ */
+static GstFlowReturn
+gst_matroska_mux_write_data (GstMatroskaMux * mux, GstMatroskaPad * collect_pad,
+ GstBuffer * buf)
+{
+ GstEbmlWrite *ebml = mux->ebml_write;
+ GstBuffer *hdr;
+ guint64 blockgroup;
+ gboolean write_duration;
+ guint64 cluster_time_scaled;
+ gint16 relative_timestamp;
+ gint64 relative_timestamp64;
+ guint64 block_duration, duration_diff = 0;
+ gboolean is_video_keyframe = FALSE;
+ gboolean is_video_invisible = FALSE;
+ gboolean is_audio_only = FALSE;
+ gboolean is_min_duration_reached = FALSE;
+ gboolean is_max_duration_exceeded = FALSE;
+ GstMatroskamuxPad *pad;
+ gint flags = 0;
+ GstClockTime buffer_timestamp;
+ GstAudioClippingMeta *cmeta = NULL;
+
+ /* write data */
+ pad = GST_MATROSKAMUX_PAD_CAST (collect_pad->collect.pad);
+
+ /* vorbis/theora headers are retrieved from caps and put in CodecPrivate */
+ if (collect_pad->track->xiph_headers_to_skip > 0) {
+ --collect_pad->track->xiph_headers_to_skip;
+ if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_HEADER)) {
+ GST_LOG_OBJECT (collect_pad->collect.pad, "dropping streamheader buffer");
+ gst_buffer_unref (buf);
+ return GST_FLOW_OK;
+ }
+ }
+
+ /* for dirac we have to queue up everything up to a picture unit */
+ if (!strcmp (collect_pad->track->codec_id, GST_MATROSKA_CODEC_ID_VIDEO_DIRAC)) {
+ buf = gst_matroska_mux_handle_dirac_packet (mux, collect_pad, buf);
+ if (!buf)
+ return GST_FLOW_OK;
+ } else if (!strcmp (collect_pad->track->codec_id,
+ GST_MATROSKA_CODEC_ID_VIDEO_PRORES)) {
+ /* Remove the 'Frame container atom' header' */
+ buf = gst_buffer_make_writable (buf);
+ gst_buffer_resize (buf, 8, gst_buffer_get_size (buf) - 8);
+ }
+
+ buffer_timestamp =
+ gst_matroska_track_get_buffer_timestamp (collect_pad->track, buf);
+ if (buffer_timestamp >= mux->earliest_time) {
+ buffer_timestamp -= mux->earliest_time;
+ } else {
+ buffer_timestamp = 0;
+ }
+
+ /* hm, invalid timestamp (due to --to be fixed--- element upstream);
+ * this would wreak havoc with time stored in matroska file */
+ /* TODO: maybe calculate a timestamp by using the previous timestamp
+ * and default duration */
+ if (!GST_CLOCK_TIME_IS_VALID (buffer_timestamp)) {
+ GST_WARNING_OBJECT (collect_pad->collect.pad,
+ "Invalid buffer timestamp; dropping buffer");
+ gst_buffer_unref (buf);
+ return GST_FLOW_OK;
+ }
+
+ if (!strcmp (collect_pad->track->codec_id, GST_MATROSKA_CODEC_ID_AUDIO_OPUS)
+ && collect_pad->track->codec_delay) {
+ /* All timestamps should include the codec delay */
+ if (buffer_timestamp > collect_pad->track->codec_delay) {
+ buffer_timestamp += collect_pad->track->codec_delay;
+ } else {
+ buffer_timestamp = 0;
+ duration_diff = collect_pad->track->codec_delay - buffer_timestamp;
+ }
+ }
+
+ /* set the timestamp for outgoing buffers */
+ ebml->timestamp = buffer_timestamp;
+
+ if (collect_pad->track->type == GST_MATROSKA_TRACK_TYPE_VIDEO) {
+ if (!GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT)) {
+ GST_LOG_OBJECT (mux, "have video keyframe, ts=%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (buffer_timestamp));
+ is_video_keyframe = TRUE;
+ } else if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DECODE_ONLY) &&
+ (!strcmp (collect_pad->track->codec_id, GST_MATROSKA_CODEC_ID_VIDEO_VP8)
+ || !strcmp (collect_pad->track->codec_id,
+ GST_MATROSKA_CODEC_ID_VIDEO_VP9))) {
+ GST_LOG_OBJECT (mux,
+ "have VP8 video invisible frame, " "ts=%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (buffer_timestamp));
+ is_video_invisible = TRUE;
+ }
+ }
+
+ /* From this point on we use the buffer_timestamp to do cluster and other
+ * related arithmetic, so apply the timestamp offset if we have one */
+ buffer_timestamp += mux->cluster_timestamp_offset;
+
+ is_audio_only = (collect_pad->track->type == GST_MATROSKA_TRACK_TYPE_AUDIO) &&
+ (mux->num_streams == 1);
+ is_min_duration_reached = (mux->min_cluster_duration == 0
+ || (buffer_timestamp > mux->cluster_time
+ && (buffer_timestamp - mux->cluster_time) >=
+ mux->min_cluster_duration));
+ is_max_duration_exceeded = (mux->max_cluster_duration > 0
+ && buffer_timestamp > mux->cluster_time
+ && (buffer_timestamp - mux->cluster_time) >=
+ MIN (G_MAXINT16 * mux->time_scale, mux->max_cluster_duration));
+
+ if (mux->cluster) {
+ /* start a new cluster at every keyframe, at every GstForceKeyUnit event,
+ * or when we may be reaching the limit of the relative timestamp */
+ if (is_max_duration_exceeded || (is_video_keyframe
+ && is_min_duration_reached) || mux->force_key_unit_event
+ || (is_audio_only && is_min_duration_reached)) {
+ if (!mux->ebml_write->streamable)
+ gst_ebml_write_master_finish (ebml, mux->cluster);
+
+ /* Forward the GstForceKeyUnit event after finishing the cluster */
+ if (mux->force_key_unit_event) {
+ gst_pad_push_event (mux->srcpad, mux->force_key_unit_event);
+ mux->force_key_unit_event = NULL;
+ }
+ cluster_time_scaled =
+ gst_util_uint64_scale (buffer_timestamp, 1, mux->time_scale);
+
+ mux->prev_cluster_size = ebml->pos - mux->cluster_pos;
+ mux->cluster_pos = ebml->pos;
+ gst_ebml_write_set_cache (ebml, 0x20);
+ mux->cluster =
+ gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_CLUSTER);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_CLUSTERTIMECODE,
+ cluster_time_scaled);
+ GST_LOG_OBJECT (mux, "cluster timestamp %" G_GUINT64_FORMAT,
+ gst_util_uint64_scale (buffer_timestamp, 1, mux->time_scale));
+ gst_ebml_write_flush_cache (ebml, is_video_keyframe
+ || is_audio_only, buffer_timestamp);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_PREVSIZE,
+ mux->prev_cluster_size);
+ /* cluster_time needs to be identical in value to what's stored in the
+ * matroska so we need to have it with the same precision as what's
+ * possible with the set timecodescale rather than just using the
+ * buffer_timestamp.
+ * If this is not done the rounding of relative_timestamp will be
+ * incorrect and possibly making the timestamps get out of order if tw
+ * buffers arrive at the same millisecond (assuming default timecodescale
+ * of 1ms) */
+ mux->cluster_time =
+ gst_util_uint64_scale (cluster_time_scaled, mux->time_scale, 1);
+ }
+ } else {
+ /* first cluster */
+ cluster_time_scaled =
+ gst_util_uint64_scale (buffer_timestamp, 1, mux->time_scale);
+ mux->cluster_pos = ebml->pos;
+ gst_ebml_write_set_cache (ebml, 0x20);
+ mux->cluster = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_CLUSTER);
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_CLUSTERTIMECODE,
+ cluster_time_scaled);
+ gst_ebml_write_flush_cache (ebml, TRUE, buffer_timestamp);
+ /* cluster_time needs to be identical in value to what's stored in the
+ * matroska so we need to have it with the same precision as what's
+ * possible with the set timecodescale rather than just using the
+ * buffer_timestamp.
+ * If this is not done the rounding of relative_timestamp will be
+ * incorrect and possibly making the timestamps get out of order if tw
+ * buffers arrive at the same millisecond (assuming default timecodescale
+ * of 1ms) */
+ mux->cluster_time =
+ gst_util_uint64_scale (cluster_time_scaled, mux->time_scale, 1);
+ }
+
+ /* We currently write index entries for all video tracks or for the audio
+ * track in a single-track audio file. This could be improved by keeping the
+ * index only for the *first* video track. */
+
+ /* TODO: index is useful for every track, should contain the number of
+ * the block in the cluster which contains the timestamp, should also work
+ * for files with multiple audio tracks.
+ */
+ if (!mux->ebml_write->streamable && (is_video_keyframe || is_audio_only)) {
+ gint last_idx = -1;
+
+ if (mux->min_index_interval != 0) {
+ for (last_idx = mux->num_indexes - 1; last_idx >= 0; last_idx--) {
+ if (mux->index[last_idx].track == collect_pad->track->num)
+ break;
+ }
+ }
+
+ if (last_idx < 0 || mux->min_index_interval == 0 ||
+ (GST_CLOCK_DIFF (mux->index[last_idx].time, buffer_timestamp)
+ >= mux->min_index_interval)) {
+ GstMatroskaIndex *idx;
+
+ if (mux->num_indexes % 32 == 0) {
+ mux->index = g_renew (GstMatroskaIndex, mux->index,
+ mux->num_indexes + 32);
+ }
+ idx = &mux->index[mux->num_indexes++];
+
+ idx->pos = mux->cluster_pos;
+ idx->time = buffer_timestamp;
+ idx->track = collect_pad->track->num;
+ }
+ }
+
+ /* Check if the duration differs from the default duration. */
+ write_duration = FALSE;
+ block_duration = 0;
+ if (pad->frame_duration && GST_BUFFER_DURATION_IS_VALID (buf)) {
+ block_duration = GST_BUFFER_DURATION (buf) + duration_diff;
+ block_duration = gst_util_uint64_scale (block_duration, 1, mux->time_scale);
+
+ /* small difference should be ok. */
+ if (block_duration > collect_pad->default_duration_scaled + 1 ||
+ block_duration < collect_pad->default_duration_scaled - 1) {
+ write_duration = TRUE;
+ }
+ }
+
+ /* write the block, for doctype v2 use SimpleBlock if possible
+ * one slice (*breath*).
+ * FIXME: Need to do correct lacing! */
+ relative_timestamp64 = buffer_timestamp - mux->cluster_time;
+ if (relative_timestamp64 >= 0) {
+ /* round the timestamp */
+ relative_timestamp64 += gst_util_uint64_scale (mux->time_scale, 1, 2);
+ relative_timestamp = gst_util_uint64_scale (relative_timestamp64, 1,
+ mux->time_scale);
+ } else {
+ /* round the timestamp */
+ relative_timestamp64 -= gst_util_uint64_scale (mux->time_scale, 1, 2);
+ relative_timestamp =
+ -((gint16) gst_util_uint64_scale (-relative_timestamp64, 1,
+ mux->time_scale));
+ }
+
+ if (is_video_invisible)
+ flags |= 0x08;
+
+ if (!strcmp (collect_pad->track->codec_id, GST_MATROSKA_CODEC_ID_AUDIO_OPUS)) {
+ cmeta = gst_buffer_get_audio_clipping_meta (buf);
+ g_assert (!cmeta || cmeta->format == GST_FORMAT_DEFAULT);
+
+ /* Start clipping is done via header and CodecDelay */
+ if (cmeta && !cmeta->end)
+ cmeta = NULL;
+ }
+
+ if (mux->doctype_version > 1 && !write_duration && !cmeta) {
+ if (is_video_keyframe)
+ flags |= 0x80;
+
+ hdr =
+ gst_matroska_mux_create_buffer_header (collect_pad->track,
+ relative_timestamp, flags);
+ gst_ebml_write_set_cache (ebml, 0x40);
+ gst_ebml_write_buffer_header (ebml, GST_MATROSKA_ID_SIMPLEBLOCK,
+ gst_buffer_get_size (buf) + gst_buffer_get_size (hdr));
+ gst_ebml_write_buffer (ebml, hdr);
+ gst_ebml_write_flush_cache (ebml, FALSE, buffer_timestamp);
+ gst_ebml_write_buffer (ebml, buf);
+
+ return gst_ebml_last_write_result (ebml);
+ } else {
+ gst_ebml_write_set_cache (ebml, gst_buffer_get_size (buf) * 2);
+ /* write and call order slightly unnatural,
+ * but avoids seek and minizes pushing */
+ blockgroup = gst_ebml_write_master_start (ebml, GST_MATROSKA_ID_BLOCKGROUP);
+ hdr =
+ gst_matroska_mux_create_buffer_header (collect_pad->track,
+ relative_timestamp, flags);
+ if (write_duration)
+ gst_ebml_write_uint (ebml, GST_MATROSKA_ID_BLOCKDURATION, block_duration);
+
+ if (!strcmp (collect_pad->track->codec_id, GST_MATROSKA_CODEC_ID_AUDIO_OPUS)
+ && cmeta) {
+ /* Start clipping is done via header and CodecDelay */
+ if (cmeta->end) {
+ guint64 end =
+ gst_util_uint64_scale_round (cmeta->end, GST_SECOND, 48000);
+ gst_ebml_write_sint (ebml, GST_MATROSKA_ID_DISCARDPADDING, end);
+ }
+ }
+
+ gst_ebml_write_buffer_header (ebml, GST_MATROSKA_ID_BLOCK,
+ gst_buffer_get_size (buf) + gst_buffer_get_size (hdr));
+ gst_ebml_write_buffer (ebml, hdr);
+ gst_ebml_write_master_finish_full (ebml, blockgroup,
+ gst_buffer_get_size (buf));
+ gst_ebml_write_flush_cache (ebml, FALSE, buffer_timestamp);
+ gst_ebml_write_buffer (ebml, buf);
+
+ return gst_ebml_last_write_result (ebml);
+ }
+}
+
+/**
+ * gst_matroska_mux_handle_buffer:
+ * @pads: #GstCollectPads
+ * @uuser_data: #GstMatroskaMux
+ *
+ * Collectpads callback.
+ *
+ * Returns: #GstFlowReturn
+ */
+static GstFlowReturn
+gst_matroska_mux_handle_buffer (GstCollectPads * pads, GstCollectData * data,
+ GstBuffer * buf, gpointer user_data)
+{
+ GstClockTime buffer_timestamp;
+ GstMatroskaMux *mux = GST_MATROSKA_MUX (user_data);
+ GstEbmlWrite *ebml = mux->ebml_write;
+ GstMatroskaPad *best = (GstMatroskaPad *) data;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GST_DEBUG_OBJECT (mux, "Collected pads");
+
+ /* start with a header */
+ if (mux->state == GST_MATROSKA_MUX_STATE_START) {
+ if (mux->collect->data == NULL) {
+ GST_ELEMENT_ERROR (mux, STREAM, MUX, (NULL),
+ ("No input streams configured"));
+ return GST_FLOW_ERROR;
+ }
+ mux->state = GST_MATROSKA_MUX_STATE_HEADER;
+ gst_ebml_start_streamheader (ebml);
+ gst_matroska_mux_start (mux, best, buf);
+ gst_matroska_mux_stop_streamheader (mux);
+ mux->state = GST_MATROSKA_MUX_STATE_DATA;
+ }
+
+ /* if there is no best pad, we have reached EOS */
+ if (best == NULL) {
+ GST_DEBUG_OBJECT (mux, "No best pad. Finishing...");
+ if (!mux->ebml_write->streamable) {
+ gst_matroska_mux_finish (mux);
+ } else {
+ GST_DEBUG_OBJECT (mux, "... but streamable, nothing to finish");
+ }
+ gst_pad_push_event (mux->srcpad, gst_event_new_eos ());
+ ret = GST_FLOW_EOS;
+ goto exit;
+ }
+
+ if (best->track->codec_id == NULL) {
+ GST_ERROR_OBJECT (best->collect.pad, "No codec-id for pad");
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ goto exit;
+ }
+
+ /* if we have a best stream, should also have a buffer */
+ g_assert (buf);
+
+ buffer_timestamp = gst_matroska_track_get_buffer_timestamp (best->track, buf);
+ if (buffer_timestamp >= mux->earliest_time) {
+ buffer_timestamp -= mux->earliest_time;
+ } else {
+ GST_ERROR_OBJECT (mux,
+ "PTS before first PTS (%" GST_TIME_FORMAT " < %" GST_TIME_FORMAT ")",
+ GST_TIME_ARGS (buffer_timestamp), GST_TIME_ARGS (mux->earliest_time));
+ buffer_timestamp = 0;
+ }
+
+ GST_DEBUG_OBJECT (best->collect.pad, "best pad - buffer ts %"
+ GST_TIME_FORMAT " dur %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (buffer_timestamp),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
+
+ /* make note of first and last encountered timestamps, so we can calculate
+ * the actual duration later when we send an updated header on eos */
+ if (GST_CLOCK_TIME_IS_VALID (buffer_timestamp)) {
+ GstClockTime start_ts = buffer_timestamp;
+ GstClockTime end_ts = start_ts;
+
+ if (GST_BUFFER_DURATION_IS_VALID (buf))
+ end_ts += GST_BUFFER_DURATION (buf);
+ else if (best->track->default_duration)
+ end_ts += best->track->default_duration;
+
+ if (!GST_CLOCK_TIME_IS_VALID (best->end_ts) || end_ts > best->end_ts)
+ best->end_ts = end_ts;
+
+ if (G_UNLIKELY (best->start_ts == GST_CLOCK_TIME_NONE ||
+ start_ts < best->start_ts))
+ best->start_ts = start_ts;
+ }
+
+ /* write one buffer */
+ ret = gst_matroska_mux_write_data (mux, best, buf);
+
+exit:
+ return ret;
+}
+
+
+/**
+ * gst_matroska_mux_change_state:
+ * @element: #GstMatroskaMux
+ * @transition: State change transition.
+ *
+ * Change the muxer state.
+ *
+ * Returns: #GstStateChangeReturn
+ */
+static GstStateChangeReturn
+gst_matroska_mux_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstMatroskaMux *mux = GST_MATROSKA_MUX (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_collect_pads_start (mux->collect);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_collect_pads_stop (mux->collect);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_matroska_mux_reset (GST_ELEMENT (mux));
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static void
+gst_matroska_mux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstMatroskaMux *mux;
+
+ g_return_if_fail (GST_IS_MATROSKA_MUX (object));
+ mux = GST_MATROSKA_MUX (object);
+
+ switch (prop_id) {
+ case PROP_WRITING_APP:
+ if (!g_value_get_string (value)) {
+ GST_WARNING_OBJECT (mux, "writing-app property can not be NULL");
+ break;
+ }
+ g_free (mux->writing_app);
+ mux->writing_app = g_value_dup_string (value);
+ break;
+ case PROP_DOCTYPE_VERSION:
+ mux->doctype_version = g_value_get_int (value);
+ break;
+ case PROP_MIN_INDEX_INTERVAL:
+ mux->min_index_interval = g_value_get_int64 (value);
+ break;
+ case PROP_STREAMABLE:
+ mux->ebml_write->streamable = g_value_get_boolean (value);
+ break;
+ case PROP_TIMECODESCALE:
+ mux->time_scale = g_value_get_int64 (value);
+ break;
+ case PROP_MIN_CLUSTER_DURATION:
+ mux->min_cluster_duration = g_value_get_int64 (value);
+ break;
+ case PROP_MAX_CLUSTER_DURATION:
+ mux->max_cluster_duration = g_value_get_int64 (value);
+ break;
+ case PROP_OFFSET_TO_ZERO:
+ mux->offset_to_zero = g_value_get_boolean (value);
+ break;
+ case PROP_CREATION_TIME:
+ g_clear_pointer (&mux->creation_time, g_date_time_unref);
+ mux->creation_time = g_value_dup_boxed (value);
+ break;
+ case PROP_CLUSTER_TIMESTAMP_OFFSET:
+ mux->cluster_timestamp_offset = g_value_get_uint64 (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_matroska_mux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstMatroskaMux *mux;
+
+ g_return_if_fail (GST_IS_MATROSKA_MUX (object));
+ mux = GST_MATROSKA_MUX (object);
+
+ switch (prop_id) {
+ case PROP_WRITING_APP:
+ g_value_set_string (value, mux->writing_app);
+ break;
+ case PROP_DOCTYPE_VERSION:
+ g_value_set_int (value, mux->doctype_version);
+ break;
+ case PROP_MIN_INDEX_INTERVAL:
+ g_value_set_int64 (value, mux->min_index_interval);
+ break;
+ case PROP_STREAMABLE:
+ g_value_set_boolean (value, mux->ebml_write->streamable);
+ break;
+ case PROP_TIMECODESCALE:
+ g_value_set_int64 (value, mux->time_scale);
+ break;
+ case PROP_MIN_CLUSTER_DURATION:
+ g_value_set_int64 (value, mux->min_cluster_duration);
+ break;
+ case PROP_MAX_CLUSTER_DURATION:
+ g_value_set_int64 (value, mux->max_cluster_duration);
+ break;
+ case PROP_OFFSET_TO_ZERO:
+ g_value_set_boolean (value, mux->offset_to_zero);
+ break;
+ case PROP_CREATION_TIME:
+ g_value_set_boxed (value, mux->creation_time);
+ break;
+ case PROP_CLUSTER_TIMESTAMP_OFFSET:
+ g_value_set_uint64 (value, mux->cluster_timestamp_offset);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/matroska/matroska-mux.h b/gst/matroska/matroska-mux.h
new file mode 100644
index 0000000000..bcb61a929b
--- /dev/null
+++ b/gst/matroska/matroska-mux.h
@@ -0,0 +1,160 @@
+/* GStreamer Matroska muxer/demuxer
+ * (c) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * (c) 2005 Michal Benes <michal.benes@xeris.cz>
+ *
+ * matroska-mux.h: matroska file/stream muxer object types
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_MATROSKA_MUX_H__
+#define __GST_MATROSKA_MUX_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstcollectpads.h>
+
+#include "ebml-write.h"
+#include "matroska-ids.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MATROSKA_MUX \
+ (gst_matroska_mux_get_type ())
+#define GST_MATROSKA_MUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_MATROSKA_MUX, GstMatroskaMux))
+#define GST_MATROSKA_MUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_MATROSKA_MUX, GstMatroskaMuxClass))
+#define GST_IS_MATROSKA_MUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_MATROSKA_MUX))
+#define GST_IS_MATROSKA_MUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_MATROSKA_MUX))
+
+typedef enum {
+ GST_MATROSKA_MUX_STATE_START,
+ GST_MATROSKA_MUX_STATE_HEADER,
+ GST_MATROSKA_MUX_STATE_DATA,
+} GstMatroskaMuxState;
+
+typedef struct _GstMatroskaMetaSeekIndex {
+ guint32 id;
+ guint64 pos;
+} GstMatroskaMetaSeekIndex;
+
+typedef gboolean (*GstMatroskaCapsFunc) (GstPad *pad, GstCaps *caps);
+
+typedef struct _GstMatroskaMux GstMatroskaMux;
+
+/* all information needed for one matroska stream */
+typedef struct
+{
+ GstCollectData collect; /* we extend the CollectData */
+ GstMatroskaCapsFunc capsfunc;
+ GstMatroskaTrackContext *track;
+
+ GstMatroskaMux *mux;
+
+ GstTagList *tags;
+
+ GstClockTime start_ts;
+ GstClockTime end_ts; /* last timestamp + (if available) duration */
+ guint64 default_duration_scaled;
+}
+GstMatroskaPad;
+
+
+struct _GstMatroskaMux {
+ GstElement element;
+
+ /* < private > */
+
+ /* pads */
+ GstPad *srcpad;
+ GstCollectPads *collect;
+ GstEbmlWrite *ebml_write;
+
+ guint num_streams,
+ num_v_streams, num_a_streams, num_t_streams;
+
+ /* Application name (for the writing application header element) */
+ gchar *writing_app;
+
+ /* Date (for the DateUTC header element) */
+ GDateTime *creation_time;
+
+ /* EBML DocType. */
+ const gchar *doctype;
+
+ /* DocType version. */
+ guint doctype_version;
+
+ /* state */
+ GstMatroskaMuxState state;
+
+ /* a cue (index) table */
+ GstMatroskaIndex *index;
+ guint num_indexes;
+ GstClockTimeDiff min_index_interval;
+
+ /* timescale in the file */
+ guint64 time_scale;
+ /* minimum and maximum limit of nanoseconds you can have in a cluster */
+ guint64 max_cluster_duration;
+ guint64 min_cluster_duration;
+
+ /* earliest timestamp (time, ns) if offsetting to zero */
+ gboolean offset_to_zero;
+ guint64 cluster_timestamp_offset;
+ guint64 earliest_time;
+ /* length, position (time, ns) */
+ guint64 duration;
+
+ /* byte-positions of master-elements (for replacing contents) */
+ guint64 segment_pos,
+ seekhead_pos,
+ cues_pos,
+ chapters_pos,
+ tags_pos,
+ info_pos,
+ tracks_pos,
+ duration_pos,
+ meta_pos;
+ guint64 segment_master;
+
+ /* current cluster */
+ guint64 cluster,
+ cluster_time,
+ cluster_pos,
+ prev_cluster_size;
+
+ /* GstForceKeyUnit event */
+ GstEvent *force_key_unit_event;
+
+ /* Internal Toc (adjusted UIDs and title tags removed when processed) */
+ GstToc *internal_toc;
+
+ /* Flag to ease handling of WebM specifics */
+ gboolean is_webm;
+};
+
+typedef struct _GstMatroskaMuxClass {
+ GstElementClass parent;
+} GstMatroskaMuxClass;
+
+GType gst_matroska_mux_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_MATROSKA_MUX_H__ */
diff --git a/gst/matroska/matroska-parse.c b/gst/matroska/matroska-parse.c
new file mode 100644
index 0000000000..3fcb5c55e8
--- /dev/null
+++ b/gst/matroska/matroska-parse.c
@@ -0,0 +1,3272 @@
+/* GStreamer Matroska muxer/demuxer
+ * (c) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * (c) 2006 Tim-Philipp Müller <tim centricular net>
+ * (c) 2008 Sebastian Dröge <slomo@circular-chaos.org>
+ * (c) 2011 Debarshi Ray <rishi@gnu.org>
+ *
+ * matroska-parse.c: matroska file/stream parser
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/* TODO: check CRC32 if present
+ * TODO: there can be a segment after the first segment. Handle like
+ * chained oggs. Fixes #334082
+ * TODO: Test samples: http://www.matroska.org/samples/matrix/index.html
+ * http://samples.mplayerhq.hu/Matroska/
+ * TODO: check if parsing is done correct for all codecs according to spec
+ * TODO: seeking with incomplete or without CUE
+ */
+
+/**
+ * SECTION:element-matroskaparse
+ * @title: matroskaparse
+ *
+ * matroskaparse parsees a Matroska file into the different contained streams.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v filesrc location=/path/to/mkv ! matroskaparse ! vorbisdec ! audioconvert ! audioresample ! autoaudiosink
+ * ]| This pipeline parsees a Matroska file and outputs the contained Vorbis audio.
+ *
+ */
+
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <math.h>
+#include <string.h>
+#include <glib/gprintf.h>
+
+/* For AVI compatibility mode
+ and for fourcc stuff */
+#include <gst/riff/riff-read.h>
+#include <gst/riff/riff-ids.h>
+#include <gst/riff/riff-media.h>
+
+#include <gst/tag/tag.h>
+
+#include <gst/pbutils/pbutils.h>
+
+#include "gstmatroskaelements.h"
+#include "matroska-parse.h"
+#include "matroska-ids.h"
+
+GST_DEBUG_CATEGORY_STATIC (matroskaparse_debug);
+#define GST_CAT_DEFAULT matroskaparse_debug
+
+#define DEBUG_ELEMENT_START(parse, ebml, element) \
+ GST_DEBUG_OBJECT (parse, "Parsing " element " element at offset %" \
+ G_GUINT64_FORMAT, gst_ebml_read_get_pos (ebml))
+
+#define DEBUG_ELEMENT_STOP(parse, ebml, element, ret) \
+ GST_DEBUG_OBJECT (parse, "Parsing " element " element " \
+ " finished with '%s'", gst_flow_get_name (ret))
+
+#define INVALID_DATA_THRESHOLD (2 * 1024 * 1024)
+
+enum
+{
+ PROP_0
+};
+
+static GstStaticPadTemplate sink_templ = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-matroska; video/x-matroska; "
+ "video/x-matroska-3d; audio/webm; video/webm")
+ );
+
+static GstStaticPadTemplate src_templ = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-matroska; video/x-matroska; "
+ "video/x-matroska-3d; audio/webm; video/webm")
+ );
+
+static GstFlowReturn gst_matroska_parse_parse_id (GstMatroskaParse * parse,
+ guint32 id, guint64 length, guint needed);
+
+/* element functions */
+//static void gst_matroska_parse_loop (GstPad * pad);
+
+static gboolean gst_matroska_parse_element_send_event (GstElement * element,
+ GstEvent * event);
+static gboolean gst_matroska_parse_element_query (GstElement * element,
+ GstQuery * query);
+
+/* pad functions */
+static gboolean gst_matroska_parse_handle_seek_event (GstMatroskaParse * parse,
+ GstPad * pad, GstEvent * event);
+static gboolean gst_matroska_parse_handle_src_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+static gboolean gst_matroska_parse_handle_src_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
+
+static gboolean gst_matroska_parse_handle_sink_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+static GstFlowReturn gst_matroska_parse_chain (GstPad * pad,
+ GstObject * parent, GstBuffer * buffer);
+
+static GstStateChangeReturn
+gst_matroska_parse_change_state (GstElement * element,
+ GstStateChange transition);
+#if 0
+static void
+gst_matroska_parse_set_index (GstElement * element, GstIndex * index);
+static GstIndex *gst_matroska_parse_get_index (GstElement * element);
+#endif
+
+/* stream methods */
+static void gst_matroska_parse_reset (GstElement * element);
+static gboolean perform_seek_to_offset (GstMatroskaParse * parse,
+ guint64 offset);
+static GstCaps *gst_matroska_parse_forge_caps (gboolean is_webm,
+ gboolean has_video);
+
+GType gst_matroska_parse_get_type (void);
+#define parent_class gst_matroska_parse_parent_class
+G_DEFINE_TYPE (GstMatroskaParse, gst_matroska_parse, GST_TYPE_ELEMENT);
+#define _do_init \
+ gst_riff_init (); \
+ matroska_element_init (plugin);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (matroskaparse, "matroskaparse",
+ GST_RANK_NONE, GST_TYPE_MATROSKA_PARSE, _do_init);
+
+static void
+gst_matroska_parse_finalize (GObject * object)
+{
+ GstMatroskaParse *parse = GST_MATROSKA_PARSE (object);
+
+ gst_matroska_read_common_finalize (&parse->common);
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_matroska_parse_class_init (GstMatroskaParseClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (matroskaparse_debug, "matroskaparse", 0,
+ "Matroska parser");
+
+ gobject_class->finalize = gst_matroska_parse_finalize;
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_matroska_parse_change_state);
+ gstelement_class->send_event =
+ GST_DEBUG_FUNCPTR (gst_matroska_parse_element_send_event);
+ gstelement_class->query =
+ GST_DEBUG_FUNCPTR (gst_matroska_parse_element_query);
+
+#if 0
+ gstelement_class->set_index =
+ GST_DEBUG_FUNCPTR (gst_matroska_parse_set_index);
+ gstelement_class->get_index =
+ GST_DEBUG_FUNCPTR (gst_matroska_parse_get_index);
+#endif
+
+ gst_element_class_add_static_pad_template (gstelement_class, &src_templ);
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_templ);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Matroska parser", "Codec/Parser",
+ "Parses Matroska/WebM streams into video/audio/subtitles",
+ "GStreamer maintainers <gstreamer-devel@lists.freedesktop.org>");
+}
+
+static void
+gst_matroska_parse_init (GstMatroskaParse * parse)
+{
+ parse->common.sinkpad = gst_pad_new_from_static_template (&sink_templ,
+ "sink");
+ gst_pad_set_chain_function (parse->common.sinkpad,
+ GST_DEBUG_FUNCPTR (gst_matroska_parse_chain));
+ gst_pad_set_event_function (parse->common.sinkpad,
+ GST_DEBUG_FUNCPTR (gst_matroska_parse_handle_sink_event));
+ gst_element_add_pad (GST_ELEMENT (parse), parse->common.sinkpad);
+
+ parse->srcpad = gst_pad_new_from_static_template (&src_templ, "src");
+ gst_pad_set_event_function (parse->srcpad,
+ GST_DEBUG_FUNCPTR (gst_matroska_parse_handle_src_event));
+ gst_pad_set_query_function (parse->srcpad,
+ GST_DEBUG_FUNCPTR (gst_matroska_parse_handle_src_query));
+ gst_pad_use_fixed_caps (parse->srcpad);
+
+ gst_element_add_pad (GST_ELEMENT (parse), parse->srcpad);
+
+ /* init defaults for common read context */
+ gst_matroska_read_common_init (&parse->common);
+
+ GST_OBJECT_FLAG_SET (parse, GST_ELEMENT_FLAG_INDEXABLE);
+
+ /* finish off */
+ gst_matroska_parse_reset (GST_ELEMENT (parse));
+}
+
+static void
+gst_matroska_parse_reset (GstElement * element)
+{
+ GstMatroskaParse *parse = GST_MATROSKA_PARSE (element);
+
+ GST_DEBUG_OBJECT (parse, "Resetting state");
+
+ gst_matroska_read_common_reset (GST_ELEMENT (parse), &parse->common);
+
+ parse->num_a_streams = 0;
+ parse->num_t_streams = 0;
+ parse->num_v_streams = 0;
+
+ parse->clock = NULL;
+ parse->tracks_parsed = FALSE;
+
+ g_list_foreach (parse->seek_parsed,
+ (GFunc) gst_matroska_read_common_free_parsed_el, NULL);
+ g_list_free (parse->seek_parsed);
+ parse->seek_parsed = NULL;
+
+ parse->last_stop_end = GST_CLOCK_TIME_NONE;
+ parse->seek_block = 0;
+ parse->cluster_time = GST_CLOCK_TIME_NONE;
+ parse->cluster_offset = 0;
+ parse->next_cluster_offset = 0;
+ parse->index_offset = 0;
+ parse->seekable = FALSE;
+ parse->need_newsegment = TRUE;
+ parse->building_index = FALSE;
+ if (parse->seek_event) {
+ gst_event_unref (parse->seek_event);
+ parse->seek_event = NULL;
+ }
+
+ parse->seek_index = NULL;
+ parse->seek_entry = 0;
+
+ if (parse->close_segment) {
+ gst_event_unref (parse->close_segment);
+ parse->close_segment = NULL;
+ }
+
+ if (parse->new_segment) {
+ gst_event_unref (parse->new_segment);
+ parse->new_segment = NULL;
+ }
+
+ if (parse->streamheader != NULL) {
+ gst_buffer_unref (parse->streamheader);
+ parse->streamheader = NULL;
+ }
+}
+
+static GstFlowReturn
+gst_matroska_parse_add_stream (GstMatroskaParse * parse, GstEbmlRead * ebml)
+{
+ GstMatroskaTrackContext *context;
+ GstFlowReturn ret;
+ guint32 id;
+
+ DEBUG_ELEMENT_START (parse, ebml, "TrackEntry");
+
+ /* start with the master */
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (parse, ebml, "TrackEntry", ret);
+ return ret;
+ }
+
+ /* allocate generic... if we know the type, we'll g_renew()
+ * with the precise type */
+ context = g_new0 (GstMatroskaTrackContext, 1);
+ g_ptr_array_add (parse->common.src, context);
+ context->index = parse->common.num_streams;
+ context->index_writer_id = -1;
+ context->type = 0; /* no type yet */
+ context->default_duration = 0;
+ context->pos = 0;
+ context->set_discont = TRUE;
+ context->timecodescale = 1.0;
+ context->flags =
+ GST_MATROSKA_TRACK_ENABLED | GST_MATROSKA_TRACK_DEFAULT |
+ GST_MATROSKA_TRACK_LACING;
+ context->to_offset = G_MAXINT64;
+ context->alignment = 1;
+ parse->common.num_streams++;
+ g_assert (parse->common.src->len == parse->common.num_streams);
+
+ GST_DEBUG_OBJECT (parse, "Stream number %d", context->index);
+
+ /* try reading the trackentry headers */
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ /* track number (unique stream ID) */
+ case GST_MATROSKA_ID_TRACKNUMBER:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_ERROR_OBJECT (parse, "Invalid TrackNumber 0");
+ ret = GST_FLOW_ERROR;
+ break;
+ } else if (!gst_matroska_read_common_tracknumber_unique (&parse->common,
+ num)) {
+ GST_ERROR_OBJECT (parse, "TrackNumber %" G_GUINT64_FORMAT
+ " is not unique", num);
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ GST_DEBUG_OBJECT (parse, "TrackNumber: %" G_GUINT64_FORMAT, num);
+ context->num = num;
+ break;
+ }
+ /* track UID (unique identifier) */
+ case GST_MATROSKA_ID_TRACKUID:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_ERROR_OBJECT (parse, "Invalid TrackUID 0");
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ GST_DEBUG_OBJECT (parse, "TrackUID: %" G_GUINT64_FORMAT, num);
+ context->uid = num;
+ break;
+ }
+
+ /* track type (video, audio, combined, subtitle, etc.) */
+ case GST_MATROSKA_ID_TRACKTYPE:{
+ guint64 track_type;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &track_type)) != GST_FLOW_OK) {
+ break;
+ }
+
+ if (context->type != 0 && context->type != track_type) {
+ GST_WARNING_OBJECT (parse,
+ "More than one tracktype defined in a TrackEntry - skipping");
+ break;
+ } else if (track_type < 1 || track_type > 254) {
+ GST_WARNING_OBJECT (parse, "Invalid TrackType %" G_GUINT64_FORMAT,
+ track_type);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (parse, "TrackType: %" G_GUINT64_FORMAT, track_type);
+
+ /* ok, so we're actually going to reallocate this thing */
+ switch (track_type) {
+ case GST_MATROSKA_TRACK_TYPE_VIDEO:
+ gst_matroska_track_init_video_context (&context);
+ parse->common.has_video = TRUE;
+ break;
+ case GST_MATROSKA_TRACK_TYPE_AUDIO:
+ gst_matroska_track_init_audio_context (&context);
+ break;
+ case GST_MATROSKA_TRACK_TYPE_SUBTITLE:
+ gst_matroska_track_init_subtitle_context (&context);
+ break;
+ case GST_MATROSKA_TRACK_TYPE_COMPLEX:
+ case GST_MATROSKA_TRACK_TYPE_LOGO:
+ case GST_MATROSKA_TRACK_TYPE_BUTTONS:
+ case GST_MATROSKA_TRACK_TYPE_CONTROL:
+ default:
+ GST_WARNING_OBJECT (parse,
+ "Unknown or unsupported TrackType %" G_GUINT64_FORMAT,
+ track_type);
+ context->type = 0;
+ break;
+ }
+ g_ptr_array_index (parse->common.src, parse->common.num_streams - 1)
+ = context;
+ break;
+ }
+
+ /* tracktype specific stuff for video */
+ case GST_MATROSKA_ID_TRACKVIDEO:{
+ GstMatroskaTrackVideoContext *videocontext;
+
+ DEBUG_ELEMENT_START (parse, ebml, "TrackVideo");
+
+ if (!gst_matroska_track_init_video_context (&context)) {
+ GST_WARNING_OBJECT (parse,
+ "TrackVideo element in non-video track - ignoring track");
+ ret = GST_FLOW_ERROR;
+ break;
+ } else if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ break;
+ }
+ videocontext = (GstMatroskaTrackVideoContext *) context;
+ g_ptr_array_index (parse->common.src, parse->common.num_streams - 1)
+ = context;
+
+ while (ret == GST_FLOW_OK &&
+ gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ /* Should be one level up but some broken muxers write it here. */
+ case GST_MATROSKA_ID_TRACKDEFAULTDURATION:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (parse, "Invalid TrackDefaultDuration 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (parse,
+ "TrackDefaultDuration: %" G_GUINT64_FORMAT, num);
+ context->default_duration = num;
+ break;
+ }
+
+ /* video framerate */
+ /* NOTE: This one is here only for backward compatibility.
+ * Use _TRACKDEFAULDURATION one level up. */
+ case GST_MATROSKA_ID_VIDEOFRAMERATE:{
+ gdouble num;
+
+ if ((ret = gst_ebml_read_float (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num <= 0.0) {
+ GST_WARNING_OBJECT (parse, "Invalid TrackVideoFPS %lf", num);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (parse, "TrackVideoFrameRate: %lf", num);
+ if (context->default_duration == 0)
+ context->default_duration =
+ gst_gdouble_to_guint64 ((gdouble) GST_SECOND * (1.0 / num));
+ videocontext->default_fps = num;
+ break;
+ }
+
+ /* width of the size to display the video at */
+ case GST_MATROSKA_ID_VIDEODISPLAYWIDTH:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (parse, "Invalid TrackVideoDisplayWidth 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (parse,
+ "TrackVideoDisplayWidth: %" G_GUINT64_FORMAT, num);
+ videocontext->display_width = num;
+ break;
+ }
+
+ /* height of the size to display the video at */
+ case GST_MATROSKA_ID_VIDEODISPLAYHEIGHT:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (parse, "Invalid TrackVideoDisplayHeight 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (parse,
+ "TrackVideoDisplayHeight: %" G_GUINT64_FORMAT, num);
+ videocontext->display_height = num;
+ break;
+ }
+
+ /* width of the video in the file */
+ case GST_MATROSKA_ID_VIDEOPIXELWIDTH:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (parse, "Invalid TrackVideoPixelWidth 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (parse,
+ "TrackVideoPixelWidth: %" G_GUINT64_FORMAT, num);
+ videocontext->pixel_width = num;
+ break;
+ }
+
+ /* height of the video in the file */
+ case GST_MATROSKA_ID_VIDEOPIXELHEIGHT:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (parse, "Invalid TrackVideoPixelHeight 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (parse,
+ "TrackVideoPixelHeight: %" G_GUINT64_FORMAT, num);
+ videocontext->pixel_height = num;
+ break;
+ }
+
+ /* whether the video is interlaced */
+ case GST_MATROSKA_ID_VIDEOFLAGINTERLACED:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 1)
+ videocontext->interlace_mode =
+ GST_MATROSKA_INTERLACE_MODE_INTERLACED;
+ else if (num == 2)
+ videocontext->interlace_mode =
+ GST_MATROSKA_INTERLACE_MODE_PROGRESSIVE;
+ else
+ videocontext->interlace_mode =
+ GST_MATROSKA_INTERLACE_MODE_UNKNOWN;
+
+ GST_DEBUG_OBJECT (parse, "video track interlacing mode: %d",
+ videocontext->interlace_mode);
+ break;
+ }
+
+ /* interlaced field order */
+ case GST_MATROSKA_ID_VIDEOFIELDORDER:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (videocontext->interlace_mode !=
+ GST_MATROSKA_INTERLACE_MODE_INTERLACED) {
+ GST_WARNING_OBJECT (parse,
+ "FieldOrder element when not interlaced - ignoring");
+ break;
+ }
+
+ if (num == 0)
+ /* turns out we're actually progressive */
+ videocontext->interlace_mode =
+ GST_MATROSKA_INTERLACE_MODE_PROGRESSIVE;
+ else if (num == 2)
+ videocontext->field_order = GST_VIDEO_FIELD_ORDER_UNKNOWN;
+ else if (num == 9)
+ videocontext->field_order =
+ GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST;
+ else if (num == 14)
+ videocontext->field_order =
+ GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST;
+ else {
+ GST_FIXME_OBJECT (parse,
+ "Unknown or unsupported FieldOrder %" G_GUINT64_FORMAT,
+ num);
+ videocontext->field_order = GST_VIDEO_FIELD_ORDER_UNKNOWN;
+ }
+
+ GST_DEBUG_OBJECT (parse, "video track field order: %d",
+ videocontext->field_order);
+ break;
+ }
+
+ /* aspect ratio behaviour */
+ case GST_MATROSKA_ID_VIDEOASPECTRATIOTYPE:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num != GST_MATROSKA_ASPECT_RATIO_MODE_FREE &&
+ num != GST_MATROSKA_ASPECT_RATIO_MODE_KEEP &&
+ num != GST_MATROSKA_ASPECT_RATIO_MODE_FIXED) {
+ GST_WARNING_OBJECT (parse,
+ "Unknown TrackVideoAspectRatioType 0x%x", (guint) num);
+ break;
+ }
+ GST_DEBUG_OBJECT (parse,
+ "TrackVideoAspectRatioType: %" G_GUINT64_FORMAT, num);
+ videocontext->asr_mode = num;
+ break;
+ }
+
+ /* colourspace (only matters for raw video) fourcc */
+ case GST_MATROSKA_ID_VIDEOCOLOURSPACE:{
+ guint8 *data;
+ guint64 datalen;
+
+ if ((ret =
+ gst_ebml_read_binary (ebml, &id, &data,
+ &datalen)) != GST_FLOW_OK)
+ break;
+
+ if (datalen != 4) {
+ g_free (data);
+ GST_WARNING_OBJECT (parse,
+ "Invalid TrackVideoColourSpace length %" G_GUINT64_FORMAT,
+ datalen);
+ break;
+ }
+
+ memcpy (&videocontext->fourcc, data, 4);
+ GST_DEBUG_OBJECT (parse,
+ "TrackVideoColourSpace: %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (videocontext->fourcc));
+ g_free (data);
+ break;
+ }
+
+ default:
+ GST_WARNING_OBJECT (parse,
+ "Unknown TrackVideo subelement 0x%x - ignoring", id);
+ /* fall through */
+ case GST_MATROSKA_ID_VIDEOSTEREOMODE:
+ case GST_MATROSKA_ID_VIDEODISPLAYUNIT:
+ case GST_MATROSKA_ID_VIDEOPIXELCROPBOTTOM:
+ case GST_MATROSKA_ID_VIDEOPIXELCROPTOP:
+ case GST_MATROSKA_ID_VIDEOPIXELCROPLEFT:
+ case GST_MATROSKA_ID_VIDEOPIXELCROPRIGHT:
+ case GST_MATROSKA_ID_VIDEOGAMMAVALUE:
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (parse, ebml, "TrackVideo", ret);
+ break;
+ }
+
+ /* tracktype specific stuff for audio */
+ case GST_MATROSKA_ID_TRACKAUDIO:{
+ GstMatroskaTrackAudioContext *audiocontext;
+
+ DEBUG_ELEMENT_START (parse, ebml, "TrackAudio");
+
+ if (!gst_matroska_track_init_audio_context (&context)) {
+ GST_WARNING_OBJECT (parse,
+ "TrackAudio element in non-audio track - ignoring track");
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ audiocontext = (GstMatroskaTrackAudioContext *) context;
+ g_ptr_array_index (parse->common.src, parse->common.num_streams - 1)
+ = context;
+
+ while (ret == GST_FLOW_OK &&
+ gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ /* samplerate */
+ case GST_MATROSKA_ID_AUDIOSAMPLINGFREQ:{
+ gdouble num;
+
+ if ((ret = gst_ebml_read_float (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+
+ if (num <= 0.0) {
+ GST_WARNING_OBJECT (parse,
+ "Invalid TrackAudioSamplingFrequency %lf", num);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (parse, "TrackAudioSamplingFrequency: %lf", num);
+ audiocontext->samplerate = num;
+ break;
+ }
+
+ /* bitdepth */
+ case GST_MATROSKA_ID_AUDIOBITDEPTH:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (parse, "Invalid TrackAudioBitDepth 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (parse, "TrackAudioBitDepth: %" G_GUINT64_FORMAT,
+ num);
+ audiocontext->bitdepth = num;
+ break;
+ }
+
+ /* channels */
+ case GST_MATROSKA_ID_AUDIOCHANNELS:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (parse, "Invalid TrackAudioChannels 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (parse, "TrackAudioChannels: %" G_GUINT64_FORMAT,
+ num);
+ audiocontext->channels = num;
+ break;
+ }
+
+ default:
+ GST_WARNING_OBJECT (parse,
+ "Unknown TrackAudio subelement 0x%x - ignoring", id);
+ /* fall through */
+ case GST_MATROSKA_ID_AUDIOCHANNELPOSITIONS:
+ case GST_MATROSKA_ID_AUDIOOUTPUTSAMPLINGFREQ:
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (parse, ebml, "TrackAudio", ret);
+
+ break;
+ }
+
+ /* codec identifier */
+ case GST_MATROSKA_ID_CODECID:{
+ gchar *text;
+
+ if ((ret = gst_ebml_read_ascii (ebml, &id, &text)) != GST_FLOW_OK)
+ break;
+
+ GST_DEBUG_OBJECT (parse, "CodecID: %s", GST_STR_NULL (text));
+ context->codec_id = text;
+ break;
+ }
+
+ /* codec private data */
+ case GST_MATROSKA_ID_CODECPRIVATE:{
+ guint8 *data;
+ guint64 size;
+
+ if ((ret =
+ gst_ebml_read_binary (ebml, &id, &data, &size)) != GST_FLOW_OK)
+ break;
+
+ context->codec_priv = data;
+ context->codec_priv_size = size;
+
+ GST_DEBUG_OBJECT (parse, "CodecPrivate of size %" G_GUINT64_FORMAT,
+ size);
+ break;
+ }
+
+ /* name of the codec */
+ case GST_MATROSKA_ID_CODECNAME:{
+ gchar *text;
+
+ if ((ret = gst_ebml_read_utf8 (ebml, &id, &text)) != GST_FLOW_OK)
+ break;
+
+ GST_DEBUG_OBJECT (parse, "CodecName: %s", GST_STR_NULL (text));
+ context->codec_name = text;
+ break;
+ }
+
+ /* name of this track */
+ case GST_MATROSKA_ID_TRACKNAME:{
+ gchar *text;
+
+ if ((ret = gst_ebml_read_utf8 (ebml, &id, &text)) != GST_FLOW_OK)
+ break;
+
+ context->name = text;
+ GST_DEBUG_OBJECT (parse, "TrackName: %s", GST_STR_NULL (text));
+ break;
+ }
+
+ /* language (matters for audio/subtitles, mostly) */
+ case GST_MATROSKA_ID_TRACKLANGUAGE:{
+ gchar *text;
+
+ if ((ret = gst_ebml_read_utf8 (ebml, &id, &text)) != GST_FLOW_OK)
+ break;
+
+
+ context->language = text;
+
+ /* fre-ca => fre */
+ if (strlen (context->language) >= 4 && context->language[3] == '-')
+ context->language[3] = '\0';
+
+ GST_DEBUG_OBJECT (parse, "TrackLanguage: %s",
+ GST_STR_NULL (context->language));
+ break;
+ }
+
+ /* whether this is actually used */
+ case GST_MATROSKA_ID_TRACKFLAGENABLED:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num)
+ context->flags |= GST_MATROSKA_TRACK_ENABLED;
+ else
+ context->flags &= ~GST_MATROSKA_TRACK_ENABLED;
+
+ GST_DEBUG_OBJECT (parse, "TrackEnabled: %d",
+ (context->flags & GST_MATROSKA_TRACK_ENABLED) ? 1 : 0);
+ break;
+ }
+
+ /* whether it's the default for this track type */
+ case GST_MATROSKA_ID_TRACKFLAGDEFAULT:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num)
+ context->flags |= GST_MATROSKA_TRACK_DEFAULT;
+ else
+ context->flags &= ~GST_MATROSKA_TRACK_DEFAULT;
+
+ GST_DEBUG_OBJECT (parse, "TrackDefault: %d",
+ (context->flags & GST_MATROSKA_TRACK_ENABLED) ? 1 : 0);
+ break;
+ }
+
+ /* whether the track must be used during playback */
+ case GST_MATROSKA_ID_TRACKFLAGFORCED:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num)
+ context->flags |= GST_MATROSKA_TRACK_FORCED;
+ else
+ context->flags &= ~GST_MATROSKA_TRACK_FORCED;
+
+ GST_DEBUG_OBJECT (parse, "TrackForced: %d",
+ (context->flags & GST_MATROSKA_TRACK_ENABLED) ? 1 : 0);
+ break;
+ }
+
+ /* lacing (like MPEG, where blocks don't end/start on frame
+ * boundaries) */
+ case GST_MATROSKA_ID_TRACKFLAGLACING:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num)
+ context->flags |= GST_MATROSKA_TRACK_LACING;
+ else
+ context->flags &= ~GST_MATROSKA_TRACK_LACING;
+
+ GST_DEBUG_OBJECT (parse, "TrackLacing: %d",
+ (context->flags & GST_MATROSKA_TRACK_ENABLED) ? 1 : 0);
+ break;
+ }
+
+ /* default length (in time) of one data block in this track */
+ case GST_MATROSKA_ID_TRACKDEFAULTDURATION:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (parse, "Invalid TrackDefaultDuration 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (parse, "TrackDefaultDuration: %" G_GUINT64_FORMAT,
+ num);
+ context->default_duration = num;
+ break;
+ }
+
+ case GST_MATROSKA_ID_CONTENTENCODINGS:{
+ ret = gst_matroska_read_common_read_track_encodings (&parse->common,
+ ebml, context);
+ break;
+ }
+
+ case GST_MATROSKA_ID_TRACKTIMECODESCALE:{
+ gdouble num;
+
+ if ((ret = gst_ebml_read_float (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num <= 0.0) {
+ GST_WARNING_OBJECT (parse, "Invalid TrackTimeCodeScale %lf", num);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (parse, "TrackTimeCodeScale: %lf", num);
+ context->timecodescale = num;
+ break;
+ }
+
+ default:
+ GST_WARNING ("Unknown TrackEntry subelement 0x%x - ignoring", id);
+ /* pass-through */
+
+ /* we ignore these because they're nothing useful (i.e. crap)
+ * or simply not implemented yet. */
+ case GST_MATROSKA_ID_TRACKMINCACHE:
+ case GST_MATROSKA_ID_TRACKMAXCACHE:
+ case GST_MATROSKA_ID_MAXBLOCKADDITIONID:
+ case GST_MATROSKA_ID_TRACKATTACHMENTLINK:
+ case GST_MATROSKA_ID_TRACKOVERLAY:
+ case GST_MATROSKA_ID_TRACKTRANSLATE:
+ case GST_MATROSKA_ID_TRACKOFFSET:
+ case GST_MATROSKA_ID_CODECSETTINGS:
+ case GST_MATROSKA_ID_CODECINFOURL:
+ case GST_MATROSKA_ID_CODECDOWNLOADURL:
+ case GST_MATROSKA_ID_CODECDECODEALL:
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (parse, ebml, "TrackEntry", ret);
+
+ /* Decode codec private data if necessary */
+ if (context->encodings && context->encodings->len > 0 && context->codec_priv
+ && context->codec_priv_size > 0) {
+ if (!gst_matroska_decode_data (context->encodings,
+ &context->codec_priv, &context->codec_priv_size,
+ GST_MATROSKA_TRACK_ENCODING_SCOPE_CODEC_DATA, TRUE)) {
+ GST_WARNING_OBJECT (parse, "Decoding codec private data failed");
+ ret = GST_FLOW_ERROR;
+ }
+ }
+
+ if (context->type == 0 || context->codec_id == NULL || (ret != GST_FLOW_OK
+ && ret != GST_FLOW_EOS)) {
+ if (ret == GST_FLOW_OK || ret == GST_FLOW_EOS)
+ GST_WARNING_OBJECT (ebml, "Unknown stream/codec in track entry header");
+
+ parse->common.num_streams--;
+ g_ptr_array_remove_index (parse->common.src, parse->common.num_streams);
+ g_assert (parse->common.src->len == parse->common.num_streams);
+ gst_matroska_track_free (context);
+
+ return ret;
+ }
+
+ if ((context->language == NULL || *context->language == '\0') &&
+ (context->type == GST_MATROSKA_TRACK_TYPE_AUDIO ||
+ context->type == GST_MATROSKA_TRACK_TYPE_SUBTITLE)) {
+ GST_LOG ("stream %d: language=eng (assuming default)", context->index);
+ context->language = g_strdup ("eng");
+ }
+
+
+ /* tadaah! */
+ return ret;
+}
+
+static gboolean
+gst_matroska_parse_query (GstMatroskaParse * parse, GstPad * pad,
+ GstQuery * query)
+{
+ gboolean res = FALSE;
+ GstMatroskaTrackContext *context = NULL;
+
+ if (pad) {
+ context = gst_pad_get_element_private (pad);
+ }
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:
+ {
+ GstFormat format;
+
+ gst_query_parse_position (query, &format, NULL);
+
+ if (format == GST_FORMAT_TIME) {
+ GST_OBJECT_LOCK (parse);
+ if (context)
+ gst_query_set_position (query, GST_FORMAT_TIME, context->pos);
+ else
+ gst_query_set_position (query, GST_FORMAT_TIME,
+ parse->common.segment.position);
+ GST_OBJECT_UNLOCK (parse);
+ } else if (format == GST_FORMAT_DEFAULT && context
+ && context->default_duration) {
+ GST_OBJECT_LOCK (parse);
+ gst_query_set_position (query, GST_FORMAT_DEFAULT,
+ context->pos / context->default_duration);
+ GST_OBJECT_UNLOCK (parse);
+ } else {
+ GST_DEBUG_OBJECT (parse,
+ "only position query in TIME and DEFAULT format is supported");
+ }
+
+ res = TRUE;
+ break;
+ }
+ case GST_QUERY_DURATION:
+ {
+ GstFormat format;
+
+ gst_query_parse_duration (query, &format, NULL);
+
+ if (format == GST_FORMAT_TIME) {
+ GST_OBJECT_LOCK (parse);
+ gst_query_set_duration (query, GST_FORMAT_TIME,
+ parse->common.segment.duration);
+ GST_OBJECT_UNLOCK (parse);
+ } else if (format == GST_FORMAT_DEFAULT && context
+ && context->default_duration) {
+ GST_OBJECT_LOCK (parse);
+ gst_query_set_duration (query, GST_FORMAT_DEFAULT,
+ parse->common.segment.duration / context->default_duration);
+ GST_OBJECT_UNLOCK (parse);
+ } else {
+ GST_DEBUG_OBJECT (parse,
+ "only duration query in TIME and DEFAULT format is supported");
+ }
+
+ res = TRUE;
+ break;
+ }
+
+ case GST_QUERY_SEEKING:
+ {
+ GstFormat fmt;
+
+ gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+ if (fmt == GST_FORMAT_TIME) {
+ gboolean seekable;
+
+ /* assuming we'll be able to get an index ... */
+ seekable = parse->seekable;
+
+ gst_query_set_seeking (query, GST_FORMAT_TIME, seekable,
+ 0, parse->common.segment.duration);
+ res = TRUE;
+ }
+ break;
+ }
+ default:
+ if (pad)
+ res = gst_pad_query_default (pad, (GstObject *) parse, query);
+ break;
+ }
+
+ return res;
+}
+
+static gboolean
+gst_matroska_parse_element_query (GstElement * element, GstQuery * query)
+{
+ return gst_matroska_parse_query (GST_MATROSKA_PARSE (element), NULL, query);
+}
+
+static gboolean
+gst_matroska_parse_handle_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ gboolean ret;
+ GstMatroskaParse *parse = GST_MATROSKA_PARSE (parent);
+
+ ret = gst_matroska_parse_query (parse, pad, query);
+
+ return ret;
+}
+
+static void
+gst_matroska_parse_send_tags (GstMatroskaParse * parse)
+{
+ if (G_UNLIKELY (parse->common.global_tags_changed)) {
+ GstEvent *tag_event;
+ gst_tag_list_add (parse->common.global_tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_CONTAINER_FORMAT, "Matroska", NULL);
+ GST_DEBUG_OBJECT (parse, "Sending global_tags %p : %" GST_PTR_FORMAT,
+ parse->common.global_tags, parse->common.global_tags);
+
+ /* Send a copy as we want to keep our local ref writable to add more tags
+ * if any are found */
+ tag_event =
+ gst_event_new_tag (gst_tag_list_copy (parse->common.global_tags));
+
+ gst_pad_push_event (parse->srcpad, tag_event);
+
+ parse->common.global_tags_changed = FALSE;
+ }
+}
+
+/* returns FALSE if there are no pads to deliver event to,
+ * otherwise TRUE (whatever the outcome of event sending),
+ * takes ownership of the passed event! */
+static gboolean
+gst_matroska_parse_send_event (GstMatroskaParse * parse, GstEvent * event)
+{
+ gboolean ret = FALSE;
+
+ g_return_val_if_fail (event != NULL, FALSE);
+
+ GST_DEBUG_OBJECT (parse, "Sending event of type %s to all source pads",
+ GST_EVENT_TYPE_NAME (event));
+
+ gst_pad_push_event (parse->srcpad, event);
+
+ return ret;
+}
+
+static gboolean
+gst_matroska_parse_element_send_event (GstElement * element, GstEvent * event)
+{
+ GstMatroskaParse *parse = GST_MATROSKA_PARSE (element);
+ gboolean res;
+
+ g_return_val_if_fail (event != NULL, FALSE);
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_SEEK) {
+ res = gst_matroska_parse_handle_seek_event (parse, NULL, event);
+ } else {
+ GST_WARNING_OBJECT (parse, "Unhandled event of type %s",
+ GST_EVENT_TYPE_NAME (event));
+ res = FALSE;
+ }
+ gst_event_unref (event);
+ return res;
+}
+
+#if 0
+/* searches for a cluster start from @pos,
+ * return GST_FLOW_OK and cluster position in @pos if found */
+static GstFlowReturn
+gst_matroska_parse_search_cluster (GstMatroskaParse * parse, gint64 * pos)
+{
+ gint64 newpos = *pos;
+ gint64 orig_offset;
+ GstFlowReturn ret = GST_FLOW_OK;
+ const guint chunk = 64 * 1024;
+ GstBuffer *buf;
+ GstMapInfo map;
+ gpointer data;
+ gsize size;
+ guint64 length;
+ guint32 id;
+ guint needed;
+
+ orig_offset = parse->common.offset;
+
+ /* read in at newpos and scan for ebml cluster id */
+ while (1) {
+ GstByteReader reader;
+ gint cluster_pos;
+
+ buf = NULL;
+ ret = gst_pad_pull_range (parse->common.sinkpad, newpos, chunk, &buf);
+ if (ret != GST_FLOW_OK)
+ break;
+ GST_DEBUG_OBJECT (parse,
+ "read buffer size %" G_GSIZE_FORMAT " at offset %" G_GINT64_FORMAT,
+ gst_buffer_get_size (buf), newpos);
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+ gst_byte_reader_init (&reader, data, size);
+ cluster_pos = 0;
+ resume:
+ cluster_pos = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff,
+ GST_MATROSKA_ID_CLUSTER, cluster_pos, size - cluster_pos);
+ if (cluster_pos >= 0) {
+ newpos += cluster_pos;
+ GST_DEBUG_OBJECT (parse,
+ "found cluster ebml id at offset %" G_GINT64_FORMAT, newpos);
+ /* extra checks whether we really sync'ed to a cluster:
+ * - either it is the first and only cluster
+ * - either there is a cluster after this one
+ * - either cluster length is undefined
+ */
+ /* ok if first cluster (there may not a subsequent one) */
+ if (newpos == parse->first_cluster_offset) {
+ GST_DEBUG_OBJECT (parse, "cluster is first cluster -> OK");
+ break;
+ }
+ parse->common.offset = newpos;
+ ret = gst_matroska_read_common_peek_id_length_pull (&parse->common,
+ GST_ELEMENT_CAST (parse), &id, &length, &needed);
+ if (ret != GST_FLOW_OK)
+ goto resume;
+ g_assert (id == GST_MATROSKA_ID_CLUSTER);
+ GST_DEBUG_OBJECT (parse, "cluster size %" G_GUINT64_FORMAT ", prefix %d",
+ length, needed);
+ /* ok if undefined length or first cluster */
+ if (length == G_MAXUINT64) {
+ GST_DEBUG_OBJECT (parse, "cluster has undefined length -> OK");
+ break;
+ }
+ /* skip cluster */
+ parse->common.offset += length + needed;
+ ret = gst_matroska_read_common_peek_id_length_pull (&parse->common,
+ GST_ELEMENT_CAST (parse), &id, &length, &needed);
+ if (ret != GST_FLOW_OK)
+ goto resume;
+ GST_DEBUG_OBJECT (parse, "next element is %scluster",
+ id == GST_MATROSKA_ID_CLUSTER ? "" : "not ");
+ if (id == GST_MATROSKA_ID_CLUSTER)
+ break;
+ /* not ok, resume */
+ goto resume;
+ } else {
+ /* partial cluster id may have been in tail of buffer */
+ newpos += MAX (size, 4) - 3;
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ buf = NULL;
+ }
+ }
+
+ if (buf) {
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ buf = NULL;
+ }
+
+ parse->common.offset = orig_offset;
+ *pos = newpos;
+ return ret;
+}
+#endif
+
+static gboolean
+gst_matroska_parse_handle_seek_event (GstMatroskaParse * parse,
+ GstPad * pad, GstEvent * event)
+{
+ GstMatroskaIndex *entry = NULL;
+ GstSeekFlags flags;
+ GstSeekType cur_type, stop_type;
+ GstFormat format;
+ gdouble rate;
+ gint64 cur, stop;
+ GstMatroskaTrackContext *track = NULL;
+ GstSegment seeksegment = { 0, };
+ gboolean update;
+ GstSearchMode snap_dir;
+
+ if (pad)
+ track = gst_pad_get_element_private (pad);
+
+ track = gst_matroska_read_common_get_seek_track (&parse->common, track);
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, &cur,
+ &stop_type, &stop);
+
+ /* we can only seek on time */
+ if (format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (parse, "Can only seek on TIME");
+ return FALSE;
+ }
+
+ /* copy segment, we need this because we still need the old
+ * segment when we close the current segment. */
+ memcpy (&seeksegment, &parse->common.segment, sizeof (GstSegment));
+
+ if (event) {
+ GST_DEBUG_OBJECT (parse, "configuring seek");
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
+ cur_type, cur, stop_type, stop, &update);
+ }
+
+ GST_DEBUG_OBJECT (parse, "New segment %" GST_SEGMENT_FORMAT, &seeksegment);
+
+ if (seeksegment.rate < 0)
+ snap_dir = GST_SEARCH_MODE_AFTER;
+ else
+ snap_dir = GST_SEARCH_MODE_BEFORE;
+
+ /* check sanity before we start flushing and all that */
+ GST_OBJECT_LOCK (parse);
+ if ((entry = gst_matroska_read_common_do_index_seek (&parse->common, track,
+ seeksegment.position, &parse->seek_index, &parse->seek_entry,
+ snap_dir)) == NULL) {
+ /* pull mode without index can scan later on */
+ GST_DEBUG_OBJECT (parse, "No matching seek entry in index");
+ GST_OBJECT_UNLOCK (parse);
+ return FALSE;
+ }
+ GST_DEBUG_OBJECT (parse, "Seek position looks sane");
+ GST_OBJECT_UNLOCK (parse);
+
+ /* need to seek to cluster start to pick up cluster time */
+ /* upstream takes care of flushing and all that
+ * ... and newsegment event handling takes care of the rest */
+ return perform_seek_to_offset (parse, entry->pos
+ + parse->common.ebml_segment_start);
+}
+
+/*
+ * Handle whether we can perform the seek event or if we have to let the chain
+ * function handle seeks to build the seek indexes first.
+ */
+static gboolean
+gst_matroska_parse_handle_seek_push (GstMatroskaParse * parse, GstPad * pad,
+ GstEvent * event)
+{
+ GstSeekFlags flags;
+ GstSeekType cur_type, stop_type;
+ GstFormat format;
+ gdouble rate;
+ gint64 cur, stop;
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, &cur,
+ &stop_type, &stop);
+
+ /* sanity checks */
+
+ /* we can only seek on time */
+ if (format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (parse, "Can only seek on TIME");
+ return FALSE;
+ }
+
+ if (stop_type != GST_SEEK_TYPE_NONE && stop != GST_CLOCK_TIME_NONE) {
+ GST_DEBUG_OBJECT (parse, "Seek end-time not supported in streaming mode");
+ return FALSE;
+ }
+
+ if (!(flags & GST_SEEK_FLAG_FLUSH)) {
+ GST_DEBUG_OBJECT (parse,
+ "Non-flushing seek not supported in streaming mode");
+ return FALSE;
+ }
+
+ if (flags & GST_SEEK_FLAG_SEGMENT) {
+ GST_DEBUG_OBJECT (parse, "Segment seek not supported in streaming mode");
+ return FALSE;
+ }
+
+ /* check for having parsed index already */
+ if (!parse->common.index_parsed) {
+ gboolean building_index;
+ guint64 offset = 0;
+
+ if (!parse->index_offset) {
+ GST_DEBUG_OBJECT (parse, "no index (location); no seek in push mode");
+ return FALSE;
+ }
+
+ GST_OBJECT_LOCK (parse);
+ /* handle the seek event in the chain function */
+ parse->common.state = GST_MATROSKA_READ_STATE_SEEK;
+ /* no more seek can be issued until state reset to _DATA */
+
+ /* copy the event */
+ if (parse->seek_event)
+ gst_event_unref (parse->seek_event);
+ parse->seek_event = gst_event_ref (event);
+
+ /* set the building_index flag so that only one thread can setup the
+ * structures for index seeking. */
+ building_index = parse->building_index;
+ if (!building_index) {
+ parse->building_index = TRUE;
+ offset = parse->index_offset;
+ }
+ GST_OBJECT_UNLOCK (parse);
+
+ if (!building_index) {
+ /* seek to the first subindex or legacy index */
+ GST_INFO_OBJECT (parse, "Seeking to Cues at %" G_GUINT64_FORMAT, offset);
+ return perform_seek_to_offset (parse, offset);
+ }
+
+ /* well, we are handling it already */
+ return TRUE;
+ }
+
+ /* delegate to tweaked regular seek */
+ return gst_matroska_parse_handle_seek_event (parse, pad, event);
+}
+
+static gboolean
+gst_matroska_parse_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstMatroskaParse *parse = GST_MATROSKA_PARSE (parent);
+ gboolean res = TRUE;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ /* no seeking until we are (safely) ready */
+ if (parse->common.state != GST_MATROSKA_READ_STATE_DATA) {
+ GST_DEBUG_OBJECT (parse, "not ready for seeking yet");
+ return FALSE;
+ }
+ res = gst_matroska_parse_handle_seek_push (parse, pad, event);
+ gst_event_unref (event);
+ break;
+
+ case GST_EVENT_QOS:
+ {
+ GstMatroskaTrackContext *context = gst_pad_get_element_private (pad);
+ if (context->type == GST_MATROSKA_TRACK_TYPE_VIDEO) {
+ GstMatroskaTrackVideoContext *videocontext =
+ (GstMatroskaTrackVideoContext *) context;
+ gdouble proportion;
+ GstClockTimeDiff diff;
+ GstClockTime timestamp;
+
+ gst_event_parse_qos (event, NULL, &proportion, &diff, &timestamp);
+
+ GST_OBJECT_LOCK (parse);
+ videocontext->earliest_time = timestamp + diff;
+ GST_OBJECT_UNLOCK (parse);
+ }
+ res = TRUE;
+ gst_event_unref (event);
+ break;
+ }
+
+ /* events we don't need to handle */
+ case GST_EVENT_NAVIGATION:
+ gst_event_unref (event);
+ res = FALSE;
+ break;
+
+ case GST_EVENT_LATENCY:
+ default:
+ res = gst_pad_push_event (parse->common.sinkpad, event);
+ break;
+ }
+
+ return res;
+}
+
+static GstFlowReturn
+gst_matroska_parse_parse_tracks (GstMatroskaParse * parse, GstEbmlRead * ebml)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint32 id;
+
+ DEBUG_ELEMENT_START (parse, ebml, "Tracks");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (parse, ebml, "Tracks", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ /* one track within the "all-tracks" header */
+ case GST_MATROSKA_ID_TRACKENTRY:
+ ret = gst_matroska_parse_add_stream (parse, ebml);
+ break;
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (&parse->common, ebml,
+ "Track", id);
+ break;
+ }
+ }
+ DEBUG_ELEMENT_STOP (parse, ebml, "Tracks", ret);
+
+ parse->tracks_parsed = TRUE;
+
+ return ret;
+}
+
+/*
+ * Read signed/unsigned "EBML" numbers.
+ * Return: number of bytes processed.
+ */
+
+static gint
+gst_matroska_ebmlnum_uint (guint8 * data, guint size, guint64 * num)
+{
+ gint len_mask = 0x80, read = 1, n = 1, num_ffs = 0;
+ guint64 total;
+
+ if (size <= 0) {
+ return -1;
+ }
+
+ total = data[0];
+ while (read <= 8 && !(total & len_mask)) {
+ read++;
+ len_mask >>= 1;
+ }
+ if (read > 8)
+ return -1;
+
+ if ((total &= (len_mask - 1)) == len_mask - 1)
+ num_ffs++;
+ if (size < read)
+ return -1;
+ while (n < read) {
+ if (data[n] == 0xff)
+ num_ffs++;
+ total = (total << 8) | data[n];
+ n++;
+ }
+
+ if (read == num_ffs && total != 0)
+ *num = G_MAXUINT64;
+ else
+ *num = total;
+
+ return read;
+}
+
+static gint
+gst_matroska_ebmlnum_sint (guint8 * data, guint size, gint64 * num)
+{
+ guint64 unum;
+ gint res;
+
+ /* read as unsigned number first */
+ if ((res = gst_matroska_ebmlnum_uint (data, size, &unum)) < 0)
+ return -1;
+
+ /* make signed */
+ if (unum == G_MAXUINT64)
+ *num = G_MAXINT64;
+ else
+ *num = unum - ((1 << ((7 * res) - 1)) - 1);
+
+ return res;
+}
+
+static GstFlowReturn
+gst_matroska_parse_parse_blockgroup_or_simpleblock (GstMatroskaParse * parse,
+ GstEbmlRead * ebml, guint64 cluster_time, guint64 cluster_offset,
+ gboolean is_simpleblock)
+{
+ GstMatroskaTrackContext *stream = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+ gboolean readblock = FALSE;
+ guint32 id;
+ guint64 block_duration = 0;
+ GstBuffer *buf = NULL;
+ GstMapInfo map;
+ gint stream_num = -1, n, laces = 0;
+ guint size = 0;
+ gint *lace_size = NULL;
+ gint64 time = 0;
+ gint flags = 0;
+ gint64 referenceblock = 0;
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if (!is_simpleblock) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK) {
+ goto data_error;
+ }
+ } else {
+ id = GST_MATROSKA_ID_SIMPLEBLOCK;
+ }
+
+ switch (id) {
+ /* one block inside the group. Note, block parsing is one
+ * of the harder things, so this code is a bit complicated.
+ * See http://www.matroska.org/ for documentation. */
+ case GST_MATROSKA_ID_SIMPLEBLOCK:
+ case GST_MATROSKA_ID_BLOCK:
+ {
+ guint64 num;
+ guint8 *data;
+
+ if (buf) {
+ gst_buffer_unref (buf);
+ buf = NULL;
+ }
+ if ((ret = gst_ebml_read_buffer (ebml, &id, &buf)) != GST_FLOW_OK)
+ break;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ /* first byte(s): blocknum */
+ if ((n = gst_matroska_ebmlnum_uint (data, size, &num)) < 0)
+ goto data_error;
+ data += n;
+ size -= n;
+
+ /* fetch stream from num */
+ stream_num = gst_matroska_read_common_stream_from_num (&parse->common,
+ num);
+ if (G_UNLIKELY (size < 3)) {
+ GST_WARNING_OBJECT (parse, "Invalid size %u", size);
+ /* non-fatal, try next block(group) */
+ ret = GST_FLOW_OK;
+ goto done;
+ } else if (G_UNLIKELY (stream_num < 0 ||
+ stream_num >= parse->common.num_streams)) {
+ /* let's not give up on a stray invalid track number */
+ GST_WARNING_OBJECT (parse,
+ "Invalid stream %d for track number %" G_GUINT64_FORMAT
+ "; ignoring block", stream_num, num);
+ goto done;
+ }
+
+ stream = g_ptr_array_index (parse->common.src, stream_num);
+
+ /* time (relative to cluster time) */
+ time = ((gint16) GST_READ_UINT16_BE (data));
+ data += 2;
+ size -= 2;
+ flags = GST_READ_UINT8 (data);
+ data += 1;
+ size -= 1;
+
+ GST_LOG_OBJECT (parse, "time %" G_GUINT64_FORMAT ", flags %d", time,
+ flags);
+
+ switch ((flags & 0x06) >> 1) {
+ case 0x0: /* no lacing */
+ laces = 1;
+ lace_size = g_new (gint, 1);
+ lace_size[0] = size;
+ break;
+
+ case 0x1: /* xiph lacing */
+ case 0x2: /* fixed-size lacing */
+ case 0x3: /* EBML lacing */
+ if (size == 0)
+ goto invalid_lacing;
+ laces = GST_READ_UINT8 (data) + 1;
+ data += 1;
+ size -= 1;
+ lace_size = g_new0 (gint, laces);
+
+ switch ((flags & 0x06) >> 1) {
+ case 0x1: /* xiph lacing */ {
+ guint temp, total = 0;
+
+ for (n = 0; ret == GST_FLOW_OK && n < laces - 1; n++) {
+ while (1) {
+ if (size == 0)
+ goto invalid_lacing;
+ temp = GST_READ_UINT8 (data);
+ lace_size[n] += temp;
+ data += 1;
+ size -= 1;
+ if (temp != 0xff)
+ break;
+ }
+ total += lace_size[n];
+ }
+ lace_size[n] = size - total;
+ break;
+ }
+
+ case 0x2: /* fixed-size lacing */
+ for (n = 0; n < laces; n++)
+ lace_size[n] = size / laces;
+ break;
+
+ case 0x3: /* EBML lacing */ {
+ guint total;
+
+ if ((n = gst_matroska_ebmlnum_uint (data, size, &num)) < 0)
+ goto data_error;
+ data += n;
+ size -= n;
+ total = lace_size[0] = num;
+ for (n = 1; ret == GST_FLOW_OK && n < laces - 1; n++) {
+ gint64 snum;
+ gint r;
+
+ if ((r = gst_matroska_ebmlnum_sint (data, size, &snum)) < 0)
+ goto data_error;
+ data += r;
+ size -= r;
+ lace_size[n] = lace_size[n - 1] + snum;
+ total += lace_size[n];
+ }
+ if (n < laces)
+ lace_size[n] = size - total;
+ break;
+ }
+ }
+ break;
+ }
+
+ if (ret != GST_FLOW_OK)
+ break;
+
+ readblock = TRUE;
+ break;
+ }
+
+ case GST_MATROSKA_ID_BLOCKDURATION:{
+ ret = gst_ebml_read_uint (ebml, &id, &block_duration);
+ GST_DEBUG_OBJECT (parse, "BlockDuration: %" G_GUINT64_FORMAT,
+ block_duration);
+ break;
+ }
+
+ case GST_MATROSKA_ID_REFERENCEBLOCK:{
+ ret = gst_ebml_read_sint (ebml, &id, &referenceblock);
+ GST_DEBUG_OBJECT (parse, "ReferenceBlock: %" G_GINT64_FORMAT,
+ referenceblock);
+ break;
+ }
+
+ case GST_MATROSKA_ID_CODECSTATE:{
+ guint8 *data;
+ guint64 data_len = 0;
+
+ if ((ret =
+ gst_ebml_read_binary (ebml, &id, &data,
+ &data_len)) != GST_FLOW_OK)
+ break;
+
+ if (G_UNLIKELY (stream == NULL)) {
+ GST_WARNING_OBJECT (parse,
+ "Unexpected CodecState subelement - ignoring");
+ break;
+ }
+
+ g_free (stream->codec_state);
+ stream->codec_state = data;
+ stream->codec_state_size = data_len;
+
+ break;
+ }
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (&parse->common, ebml,
+ "BlockGroup", id);
+ break;
+
+ case GST_MATROSKA_ID_BLOCKVIRTUAL:
+ case GST_MATROSKA_ID_BLOCKADDITIONS:
+ case GST_MATROSKA_ID_REFERENCEPRIORITY:
+ case GST_MATROSKA_ID_REFERENCEVIRTUAL:
+ case GST_MATROSKA_ID_SLICES:
+ GST_DEBUG_OBJECT (parse,
+ "Skipping BlockGroup subelement 0x%x - ignoring", id);
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+
+ if (is_simpleblock)
+ break;
+ }
+
+ /* reading a number or so could have failed */
+ if (ret != GST_FLOW_OK)
+ goto data_error;
+
+ if (ret == GST_FLOW_OK && readblock) {
+ guint64 duration = 0;
+ gint64 lace_time = 0;
+ gboolean delta_unit;
+
+ stream = g_ptr_array_index (parse->common.src, stream_num);
+
+ if (cluster_time != GST_CLOCK_TIME_NONE) {
+ /* FIXME: What to do with negative timestamps? Give timestamp 0 or -1?
+ * Drop unless the lace contains timestamp 0? */
+ if (time < 0 && (-time) > cluster_time) {
+ lace_time = 0;
+ } else {
+ if (stream->timecodescale == 1.0)
+ lace_time = (cluster_time + time) * parse->common.time_scale;
+ else
+ lace_time =
+ gst_util_guint64_to_gdouble ((cluster_time + time) *
+ parse->common.time_scale) * stream->timecodescale;
+ }
+ } else {
+ lace_time = GST_CLOCK_TIME_NONE;
+ }
+
+ if (lace_time != GST_CLOCK_TIME_NONE) {
+ parse->last_timestamp = lace_time;
+ }
+ /* need to refresh segment info ASAP */
+ if (GST_CLOCK_TIME_IS_VALID (lace_time) && parse->need_newsegment) {
+ GstSegment segment;
+ GST_DEBUG_OBJECT (parse,
+ "generating segment starting at %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (lace_time));
+ /* pretend we seeked here */
+ gst_segment_do_seek (&parse->common.segment, parse->common.segment.rate,
+ GST_FORMAT_TIME, 0, GST_SEEK_TYPE_SET, lace_time,
+ GST_SEEK_TYPE_SET, GST_CLOCK_TIME_NONE, NULL);
+ /* now convey our segment notion downstream */
+ segment = parse->common.segment;
+ segment.position = segment.start;
+ gst_matroska_parse_send_event (parse, gst_event_new_segment (&segment));
+ parse->need_newsegment = FALSE;
+ }
+
+ if (block_duration) {
+ if (stream->timecodescale == 1.0)
+ duration = gst_util_uint64_scale (block_duration,
+ parse->common.time_scale, 1);
+ else
+ duration =
+ gst_util_gdouble_to_guint64 (gst_util_guint64_to_gdouble
+ (gst_util_uint64_scale (block_duration, parse->common.time_scale,
+ 1)) * stream->timecodescale);
+ } else if (stream->default_duration) {
+ duration = stream->default_duration * laces;
+ }
+ /* else duration is diff between timecode of this and next block */
+
+ /* For SimpleBlock, look at the keyframe bit in flags. Otherwise,
+ a ReferenceBlock implies that this is not a keyframe. In either
+ case, it only makes sense for video streams. */
+ delta_unit = stream->type == GST_MATROSKA_TRACK_TYPE_VIDEO &&
+ ((is_simpleblock && !(flags & 0x80)) || referenceblock);
+
+ if (delta_unit && stream->set_discont) {
+ /* When doing seeks or such, we need to restart on key frames or
+ * decoders might choke. */
+ GST_DEBUG_OBJECT (parse, "skipping delta unit");
+ goto done;
+ }
+
+ for (n = 0; n < laces; n++) {
+ if (G_UNLIKELY (lace_size[n] > size)) {
+ GST_WARNING_OBJECT (parse, "Invalid lace size");
+ break;
+ }
+
+ /* QoS for video track with an index. the assumption is that
+ index entries point to keyframes, but if that is not true we
+ will instead skip until the next keyframe. */
+ if (GST_CLOCK_TIME_IS_VALID (lace_time) &&
+ stream->type == GST_MATROSKA_TRACK_TYPE_VIDEO &&
+ stream->index_table && parse->common.segment.rate > 0.0) {
+ GstMatroskaTrackVideoContext *videocontext =
+ (GstMatroskaTrackVideoContext *) stream;
+ GstClockTime earliest_time;
+ GstClockTime earliest_stream_time;
+
+ GST_OBJECT_LOCK (parse);
+ earliest_time = videocontext->earliest_time;
+ GST_OBJECT_UNLOCK (parse);
+ earliest_stream_time =
+ gst_segment_position_from_running_time (&parse->common.segment,
+ GST_FORMAT_TIME, earliest_time);
+
+ if (GST_CLOCK_TIME_IS_VALID (lace_time) &&
+ GST_CLOCK_TIME_IS_VALID (earliest_stream_time) &&
+ lace_time <= earliest_stream_time) {
+ /* find index entry (keyframe) <= earliest_stream_time */
+ GstMatroskaIndex *entry =
+ gst_util_array_binary_search (stream->index_table->data,
+ stream->index_table->len, sizeof (GstMatroskaIndex),
+ (GCompareDataFunc) gst_matroska_index_seek_find,
+ GST_SEARCH_MODE_BEFORE, &earliest_stream_time, NULL);
+
+ /* if that entry (keyframe) is after the current the current
+ buffer, we can skip pushing (and thus decoding) all
+ buffers until that keyframe. */
+ if (entry && GST_CLOCK_TIME_IS_VALID (entry->time) &&
+ entry->time > lace_time) {
+ GST_LOG_OBJECT (parse, "Skipping lace before late keyframe");
+ stream->set_discont = TRUE;
+ goto next_lace;
+ }
+ }
+ }
+#if 0
+ sub = gst_buffer_create_sub (buf,
+ GST_BUFFER_SIZE (buf) - size, lace_size[n]);
+ GST_DEBUG_OBJECT (parse, "created subbuffer %p", sub);
+
+ if (delta_unit)
+ GST_BUFFER_FLAG_SET (sub, GST_BUFFER_FLAG_DELTA_UNIT);
+ else
+ GST_BUFFER_FLAG_UNSET (sub, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ if (stream->encodings != NULL && stream->encodings->len > 0)
+ sub = gst_matroska_decode_buffer (stream, sub);
+
+ if (sub == NULL) {
+ GST_WARNING_OBJECT (parse, "Decoding buffer failed");
+ goto next_lace;
+ }
+
+ GST_BUFFER_TIMESTAMP (sub) = lace_time;
+
+ if (GST_CLOCK_TIME_IS_VALID (lace_time)) {
+ GstClockTime last_stop_end;
+
+ /* Check if this stream is after segment stop */
+ if (GST_CLOCK_TIME_IS_VALID (parse->common.segment.stop) &&
+ lace_time >= parse->common.segment.stop) {
+ GST_DEBUG_OBJECT (parse,
+ "Stream %d after segment stop %" GST_TIME_FORMAT, stream->index,
+ GST_TIME_ARGS (parse->common.segment.stop));
+ gst_buffer_unref (sub);
+ goto eos;
+ }
+ if (offset >= stream->to_offset) {
+ GST_DEBUG_OBJECT (parse, "Stream %d after playback section",
+ stream->index);
+ gst_buffer_unref (sub);
+ goto eos;
+ }
+
+ /* handle gaps, e.g. non-zero start-time, or an cue index entry
+ * that landed us with timestamps not quite intended */
+ if (GST_CLOCK_TIME_IS_VALID (parse->segment.last_stop) &&
+ parse->segment.rate > 0.0) {
+ GstClockTimeDiff diff;
+
+ /* only send newsegments with increasing start times,
+ * otherwise if these go back and forth downstream (sinks) increase
+ * accumulated time and running_time */
+ diff = GST_CLOCK_DIFF (parse->segment.last_stop, lace_time);
+ if (diff > 2 * GST_SECOND && lace_time > parse->segment.start &&
+ (!GST_CLOCK_TIME_IS_VALID (parse->segment.stop) ||
+ lace_time < parse->segment.stop)) {
+ GST_DEBUG_OBJECT (parse,
+ "Gap of %" G_GINT64_FORMAT " ns detected in"
+ "stream %d (%" GST_TIME_FORMAT " -> %" GST_TIME_FORMAT "). "
+ "Sending updated NEWSEGMENT events", diff,
+ stream->index, GST_TIME_ARGS (stream->pos),
+ GST_TIME_ARGS (lace_time));
+ /* send newsegment events such that the gap is not accounted in
+ * accum time, hence running_time */
+ /* close ahead of gap */
+ gst_matroska_parse_send_event (parse,
+ gst_event_new_new_segment (TRUE, parse->segment.rate,
+ parse->segment.format, parse->segment.last_stop,
+ parse->segment.last_stop, parse->segment.last_stop));
+ /* skip gap */
+ gst_matroska_parse_send_event (parse,
+ gst_event_new_new_segment (FALSE, parse->segment.rate,
+ parse->segment.format, lace_time, parse->segment.stop,
+ lace_time));
+ /* align segment view with downstream,
+ * prevents double-counting accum when closing segment */
+ gst_segment_set_newsegment (&parse->segment, FALSE,
+ parse->segment.rate, parse->segment.format, lace_time,
+ parse->segment.stop, lace_time);
+ parse->segment.last_stop = lace_time;
+ }
+ }
+
+ if (!GST_CLOCK_TIME_IS_VALID (parse->segment.last_stop)
+ || parse->segment.last_stop < lace_time) {
+ parse->segment.last_stop = lace_time;
+ }
+
+ last_stop_end = lace_time;
+ if (duration) {
+ GST_BUFFER_DURATION (sub) = duration / laces;
+ last_stop_end += GST_BUFFER_DURATION (sub);
+ }
+
+ if (!GST_CLOCK_TIME_IS_VALID (parse->last_stop_end) ||
+ parse->last_stop_end < last_stop_end)
+ parse->last_stop_end = last_stop_end;
+
+ if (parse->segment.duration == -1 ||
+ parse->segment.duration < lace_time) {
+ gst_segment_set_duration (&parse->segment, GST_FORMAT_TIME,
+ last_stop_end);
+ gst_element_post_message (GST_ELEMENT_CAST (parse),
+ gst_message_new_duration (GST_OBJECT_CAST (parse),
+ GST_FORMAT_TIME, GST_CLOCK_TIME_NONE));
+ }
+ }
+
+ stream->pos = lace_time;
+
+ gst_matroska_parse_sync_streams (parse);
+
+ if (stream->set_discont) {
+ GST_DEBUG_OBJECT (parse, "marking DISCONT");
+ GST_BUFFER_FLAG_SET (sub, GST_BUFFER_FLAG_DISCONT);
+ stream->set_discont = FALSE;
+ }
+
+ /* reverse playback book-keeping */
+ if (!GST_CLOCK_TIME_IS_VALID (stream->from_time))
+ stream->from_time = lace_time;
+ if (stream->from_offset == -1)
+ stream->from_offset = offset;
+
+ GST_DEBUG_OBJECT (parse,
+ "Pushing lace %d, data of size %d for stream %d, time=%"
+ GST_TIME_FORMAT " and duration=%" GST_TIME_FORMAT, n,
+ GST_BUFFER_SIZE (sub), stream_num,
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (sub)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (sub)));
+
+ if (parse->element_index) {
+ if (stream->index_writer_id == -1)
+ gst_index_get_writer_id (parse->element_index,
+ GST_OBJECT (stream->pad), &stream->index_writer_id);
+
+ GST_LOG_OBJECT (parse, "adding association %" GST_TIME_FORMAT "-> %"
+ G_GUINT64_FORMAT " for writer id %d",
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (sub)), cluster_offset,
+ stream->index_writer_id);
+ gst_index_add_association (parse->element_index,
+ stream->index_writer_id, GST_BUFFER_FLAG_IS_SET (sub,
+ GST_BUFFER_FLAG_DELTA_UNIT) ? 0 : GST_ASSOCIATION_FLAG_KEY_UNIT,
+ GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (sub), GST_FORMAT_BYTES,
+ cluster_offset, NULL);
+ }
+
+ gst_buffer_set_caps (sub, GST_PAD_CAPS (parse->srcpad));
+
+ /* Postprocess the buffers depending on the codec used */
+ if (stream->postprocess_frame) {
+ GST_LOG_OBJECT (parse, "running post process");
+ ret = stream->postprocess_frame (GST_ELEMENT (parse), stream, &sub);
+ }
+
+ ret = gst_pad_push (stream->pad, sub);
+ if (parse->segment.rate < 0) {
+ if (lace_time > parse->segment.stop && ret == GST_FLOW_EOS) {
+ /* In reverse playback we can get a GST_FLOW_EOS when
+ * we are at the end of the segment, so we just need to jump
+ * back to the previous section. */
+ GST_DEBUG_OBJECT (parse, "downstream has reached end of segment");
+ ret = GST_FLOW_OK;
+ }
+ }
+ /* combine flows */
+ ret = gst_matroska_parse_combine_flows (parse, stream, ret);
+#endif
+
+ next_lace:
+ size -= lace_size[n];
+ if (lace_time != GST_CLOCK_TIME_NONE && duration)
+ lace_time += duration / laces;
+ else
+ lace_time = GST_CLOCK_TIME_NONE;
+ }
+ }
+
+done:
+ if (buf) {
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ }
+ g_free (lace_size);
+
+ return ret;
+
+ /* EXITS */
+invalid_lacing:
+ {
+ GST_ELEMENT_WARNING (parse, STREAM, DEMUX, (NULL), ("Invalid lacing size"));
+ /* non-fatal, try next block(group) */
+ ret = GST_FLOW_OK;
+ goto done;
+ }
+data_error:
+ {
+ GST_ELEMENT_WARNING (parse, STREAM, DEMUX, (NULL), ("Data error"));
+ /* non-fatal, try next block(group) */
+ ret = GST_FLOW_OK;
+ goto done;
+ }
+}
+
+/* return FALSE if block(group) should be skipped (due to a seek) */
+static inline gboolean
+gst_matroska_parse_seek_block (GstMatroskaParse * parse)
+{
+ if (G_UNLIKELY (parse->seek_block)) {
+ if (!(--parse->seek_block)) {
+ return TRUE;
+ } else {
+ GST_LOG_OBJECT (parse, "should skip block due to seek");
+ return FALSE;
+ }
+ } else {
+ return TRUE;
+ }
+}
+
+static GstFlowReturn
+gst_matroska_parse_parse_contents_seekentry (GstMatroskaParse * parse,
+ GstEbmlRead * ebml)
+{
+ GstFlowReturn ret;
+ guint64 seek_pos = (guint64) - 1;
+ guint32 seek_id = 0;
+ guint32 id;
+
+ DEBUG_ELEMENT_START (parse, ebml, "Seek");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (parse, ebml, "Seek", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_SEEKID:
+ {
+ guint64 t;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &t)) != GST_FLOW_OK)
+ break;
+
+ GST_DEBUG_OBJECT (parse, "SeekID: %" G_GUINT64_FORMAT, t);
+ seek_id = t;
+ break;
+ }
+
+ case GST_MATROSKA_ID_SEEKPOSITION:
+ {
+ guint64 t;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &t)) != GST_FLOW_OK)
+ break;
+
+ if (t > G_MAXINT64) {
+ GST_WARNING_OBJECT (parse,
+ "Too large SeekPosition %" G_GUINT64_FORMAT, t);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (parse, "SeekPosition: %" G_GUINT64_FORMAT, t);
+ seek_pos = t;
+ break;
+ }
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (&parse->common, ebml,
+ "SeekHead", id);
+ break;
+ }
+ }
+
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_EOS)
+ return ret;
+
+ if (!seek_id || seek_pos == (guint64) - 1) {
+ GST_WARNING_OBJECT (parse, "Incomplete seekhead entry (0x%x/%"
+ G_GUINT64_FORMAT ")", seek_id, seek_pos);
+ return GST_FLOW_OK;
+ }
+
+ switch (seek_id) {
+ case GST_MATROSKA_ID_SEEKHEAD:
+ {
+ }
+ case GST_MATROSKA_ID_CUES:
+ case GST_MATROSKA_ID_TAGS:
+ case GST_MATROSKA_ID_TRACKS:
+ case GST_MATROSKA_ID_SEGMENTINFO:
+ case GST_MATROSKA_ID_ATTACHMENTS:
+ case GST_MATROSKA_ID_CHAPTERS:
+ {
+ guint64 length;
+
+ /* remember */
+ length = gst_matroska_read_common_get_length (&parse->common);
+
+ if (length == (guint64) - 1) {
+ GST_DEBUG_OBJECT (parse, "no upstream length, skipping SeakHead entry");
+ break;
+ }
+
+ /* check for validity */
+ if (seek_pos + parse->common.ebml_segment_start + 12 >= length) {
+ GST_WARNING_OBJECT (parse,
+ "SeekHead reference lies outside file!" " (%"
+ G_GUINT64_FORMAT "+%" G_GUINT64_FORMAT "+12 >= %"
+ G_GUINT64_FORMAT ")", seek_pos, parse->common.ebml_segment_start,
+ length);
+ break;
+ }
+
+ /* only pick up index location when streaming */
+ if (seek_id == GST_MATROSKA_ID_CUES) {
+ parse->index_offset = seek_pos + parse->common.ebml_segment_start;
+ GST_DEBUG_OBJECT (parse, "Cues located at offset %" G_GUINT64_FORMAT,
+ parse->index_offset);
+ }
+ break;
+ }
+
+ default:
+ GST_DEBUG_OBJECT (parse, "Ignoring Seek entry for ID=0x%x", seek_id);
+ break;
+ }
+ DEBUG_ELEMENT_STOP (parse, ebml, "Seek", ret);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_matroska_parse_parse_contents (GstMatroskaParse * parse, GstEbmlRead * ebml)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint32 id;
+
+ DEBUG_ELEMENT_START (parse, ebml, "SeekHead");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (parse, ebml, "SeekHead", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_SEEKENTRY:
+ {
+ ret = gst_matroska_parse_parse_contents_seekentry (parse, ebml);
+ /* Ignore EOS and errors here */
+ if (ret != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (parse, "Ignoring %s", gst_flow_get_name (ret));
+ ret = GST_FLOW_OK;
+ }
+ break;
+ }
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (&parse->common, ebml,
+ "SeekHead", id);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (parse, ebml, "SeekHead", ret);
+
+ return ret;
+}
+
+#define GST_FLOW_OVERFLOW GST_FLOW_CUSTOM_ERROR
+
+#define MAX_BLOCK_SIZE (15 * 1024 * 1024)
+
+static inline GstFlowReturn
+gst_matroska_parse_check_read_size (GstMatroskaParse * parse, guint64 bytes)
+{
+ if (G_UNLIKELY (bytes > MAX_BLOCK_SIZE)) {
+ /* only a few blocks are expected/allowed to be large,
+ * and will be recursed into, whereas others will be read and must fit */
+ /* fatal in streaming case, as we can't step over easily */
+ GST_ELEMENT_ERROR (parse, STREAM, DEMUX, (NULL),
+ ("reading large block of size %" G_GUINT64_FORMAT " not supported; "
+ "file might be corrupt.", bytes));
+ return GST_FLOW_ERROR;
+ } else {
+ return GST_FLOW_OK;
+ }
+}
+
+#if 0
+/* returns TRUE if we truly are in error state, and should give up */
+static inline gboolean
+gst_matroska_parse_check_parse_error (GstMatroskaParse * parse)
+{
+ gint64 pos;
+
+ /* sigh, one last attempt above and beyond call of duty ...;
+ * search for cluster mark following current pos */
+ pos = parse->common.offset;
+ GST_WARNING_OBJECT (parse, "parse error, looking for next cluster");
+ if (gst_matroska_parse_search_cluster (parse, &pos) != GST_FLOW_OK) {
+ /* did not work, give up */
+ return TRUE;
+ } else {
+ GST_DEBUG_OBJECT (parse, "... found at %" G_GUINT64_FORMAT, pos);
+ /* try that position */
+ parse->common.offset = pos;
+ return FALSE;
+ }
+}
+#endif
+
+/* initializes @ebml with @bytes from input stream at current offset.
+ * Returns EOS if insufficient available,
+ * ERROR if too much was attempted to read. */
+static inline GstFlowReturn
+gst_matroska_parse_take (GstMatroskaParse * parse, guint64 bytes,
+ GstEbmlRead * ebml)
+{
+ GstBuffer *buffer = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ GST_LOG_OBJECT (parse, "taking %" G_GUINT64_FORMAT " bytes for parsing",
+ bytes);
+ ret = gst_matroska_parse_check_read_size (parse, bytes);
+ if (G_UNLIKELY (ret != GST_FLOW_OK)) {
+ /* otherwise fatal */
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ }
+ if (gst_adapter_available (parse->common.adapter) < bytes)
+ return GST_FLOW_EOS;
+
+ buffer = gst_adapter_take_buffer (parse->common.adapter, bytes);
+ if (G_LIKELY (buffer)) {
+ gst_ebml_read_init (ebml, GST_ELEMENT_CAST (parse), buffer,
+ parse->common.offset);
+ parse->common.offset += bytes;
+ } else {
+ ret = GST_FLOW_ERROR;
+ }
+exit:
+
+ return ret;
+}
+
+static void
+gst_matroska_parse_check_seekability (GstMatroskaParse * parse)
+{
+ GstQuery *query;
+ gboolean seekable = FALSE;
+ gint64 start = -1, stop = -1;
+
+ query = gst_query_new_seeking (GST_FORMAT_BYTES);
+ if (!gst_pad_peer_query (parse->common.sinkpad, query)) {
+ GST_DEBUG_OBJECT (parse, "seeking query failed");
+ goto done;
+ }
+
+ gst_query_parse_seeking (query, NULL, &seekable, &start, &stop);
+
+ /* try harder to query upstream size if we didn't get it the first time */
+ if (seekable && stop == -1) {
+ GST_DEBUG_OBJECT (parse, "doing duration query to fix up unset stop");
+ gst_pad_peer_query_duration (parse->common.sinkpad, GST_FORMAT_BYTES,
+ &stop);
+ }
+
+ /* if upstream doesn't know the size, it's likely that it's not seekable in
+ * practice even if it technically may be seekable */
+ if (seekable && (start != 0 || stop <= start)) {
+ GST_DEBUG_OBJECT (parse, "seekable but unknown start/stop -> disable");
+ seekable = FALSE;
+ }
+
+done:
+ GST_INFO_OBJECT (parse, "seekable: %d (%" G_GUINT64_FORMAT " - %"
+ G_GUINT64_FORMAT ")", seekable, start, stop);
+ parse->seekable = seekable;
+
+ gst_query_unref (query);
+}
+
+#if 0
+static GstFlowReturn
+gst_matroska_parse_find_tracks (GstMatroskaParse * parse)
+{
+ guint32 id;
+ guint64 before_pos;
+ guint64 length;
+ guint needed;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ GST_WARNING_OBJECT (parse,
+ "Found Cluster element before Tracks, searching Tracks");
+
+ /* remember */
+ before_pos = parse->common.offset;
+
+ /* Search Tracks element */
+ while (TRUE) {
+ ret = gst_matroska_read_common_peek_id_length_pull (&parse->common,
+ GST_ELEMENT_CAST (parse), &id, &length, &needed);
+ if (ret != GST_FLOW_OK)
+ break;
+
+ if (id != GST_MATROSKA_ID_TRACKS) {
+ /* we may be skipping large cluster here, so forego size check etc */
+ /* ... but we can't skip undefined size; force error */
+ if (length == G_MAXUINT64) {
+ ret = gst_matroska_parse_check_read_size (parse, length);
+ break;
+ } else {
+ parse->common.offset += needed;
+ parse->offset += length;
+ }
+ continue;
+ }
+
+ /* will lead to track parsing ... */
+ ret = gst_matroska_parse_parse_id (parse, id, length, needed);
+ break;
+ }
+
+ /* seek back */
+ parse->offset = before_pos;
+
+ return ret;
+}
+#endif
+
+#define GST_READ_CHECK(stmt) \
+G_STMT_START { \
+ if (G_UNLIKELY ((ret = (stmt)) != GST_FLOW_OK)) { \
+ if (ret == GST_FLOW_OVERFLOW) { \
+ ret = GST_FLOW_OK; \
+ } \
+ goto read_error; \
+ } \
+} G_STMT_END
+
+static void
+gst_matroska_parse_accumulate_streamheader (GstMatroskaParse * parse,
+ GstBuffer * buffer)
+{
+ if (parse->pushed_headers) {
+ GST_WARNING_OBJECT (parse,
+ "Accumulating headers, but headers are already pushed");
+ }
+
+ if (parse->streamheader) {
+ parse->streamheader = gst_buffer_append (parse->streamheader,
+ gst_buffer_ref (buffer));
+ } else {
+ parse->streamheader = gst_buffer_ref (buffer);
+ }
+
+ GST_DEBUG ("%" G_GSIZE_FORMAT, gst_buffer_get_size (parse->streamheader));
+}
+
+static GstFlowReturn
+gst_matroska_parse_output (GstMatroskaParse * parse, GstBuffer * buffer,
+ gboolean keyframe)
+{
+ GstFlowReturn ret;
+
+ if (!parse->pushed_headers) {
+ GstCaps *caps;
+ GstStructure *s;
+ GValue streamheader = { 0 };
+ GValue bufval = { 0 };
+ GstBuffer *buf;
+
+ caps = gst_pad_get_current_caps (parse->common.sinkpad);
+ if (caps == NULL) {
+ caps = gst_matroska_parse_forge_caps (parse->common.is_webm,
+ parse->common.has_video);
+ } else
+ caps = gst_caps_make_writable (caps);
+
+ s = gst_caps_get_structure (caps, 0);
+ g_value_init (&streamheader, GST_TYPE_ARRAY);
+ g_value_init (&bufval, GST_TYPE_BUFFER);
+ buf = gst_buffer_copy (parse->streamheader);
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
+ gst_value_set_buffer (&bufval, buf);
+ gst_buffer_unref (buf);
+ gst_value_array_append_value (&streamheader, &bufval);
+ g_value_unset (&bufval);
+ gst_structure_set_value (s, "streamheader", &streamheader);
+ g_value_unset (&streamheader);
+ //gst_caps_replace (parse->caps, caps);
+ gst_pad_set_caps (parse->srcpad, caps);
+
+ if (parse->need_newsegment) {
+ gst_pad_push_event (parse->srcpad,
+ gst_event_new_segment (&parse->common.segment));
+ parse->need_newsegment = FALSE;
+ }
+
+ buf = gst_buffer_copy (parse->streamheader);
+ gst_caps_unref (caps);
+
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER);
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ ret = gst_pad_push (parse->srcpad, buf);
+ if (ret != GST_FLOW_OK) {
+ GST_WARNING_OBJECT (parse, "Failed to push buffer");
+ return ret;
+ }
+
+ parse->pushed_headers = TRUE;
+ }
+
+ if (!keyframe) {
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+ } else {
+ GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+ }
+ if (GST_BUFFER_TIMESTAMP (buffer) != GST_CLOCK_TIME_NONE) {
+ parse->last_timestamp = GST_BUFFER_TIMESTAMP (buffer);
+ } else {
+ GST_BUFFER_TIMESTAMP (buffer) = parse->last_timestamp;
+ }
+
+ return gst_pad_push (parse->srcpad, gst_buffer_ref (buffer));
+}
+
+static GstFlowReturn
+gst_matroska_parse_parse_id (GstMatroskaParse * parse, guint32 id,
+ guint64 length, guint needed)
+{
+ GstEbmlRead ebml = { 0, };
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint64 read;
+ //GstBuffer *buffer;
+
+ GST_DEBUG_OBJECT (parse, "Parsing Element id 0x%x, "
+ "size %" G_GUINT64_FORMAT ", prefix %d", id, length, needed);
+
+#if 0
+ if (gst_adapter_available (parse->adapter) >= length + needed) {
+ buffer = gst_adapter_take_buffer (parse->adapter, length + needed);
+ gst_pad_push (parse->srcpad, buffer);
+ } else {
+ ret = GST_FLOW_EOS;
+ }
+ //GST_READ_CHECK (gst_matroska_parse_take (parse, read, &ebml));
+
+ return ret;
+#endif
+
+
+
+ /* if we plan to read and parse this element, we need prefix (id + length)
+ * and the contents */
+ /* mind about overflow wrap-around when dealing with undefined size */
+ read = length;
+ if (G_LIKELY (length != G_MAXUINT64))
+ read += needed;
+
+ switch (parse->common.state) {
+ case GST_MATROSKA_READ_STATE_START:
+ switch (id) {
+ case GST_EBML_ID_HEADER:
+ GST_READ_CHECK (gst_matroska_parse_take (parse, read, &ebml));
+ ret = gst_matroska_read_common_parse_header (&parse->common, &ebml);
+ if (ret != GST_FLOW_OK)
+ goto parse_failed;
+ parse->common.state = GST_MATROSKA_READ_STATE_SEGMENT;
+ gst_matroska_parse_check_seekability (parse);
+ gst_matroska_parse_accumulate_streamheader (parse, ebml.buf);
+ break;
+ default:
+ goto invalid_header;
+ break;
+ }
+ break;
+ case GST_MATROSKA_READ_STATE_SEGMENT:
+ switch (id) {
+ case GST_MATROSKA_ID_SEGMENT:
+ /* eat segment prefix */
+ GST_READ_CHECK (gst_matroska_parse_take (parse, needed, &ebml));
+ GST_DEBUG_OBJECT (parse,
+ "Found Segment start at offset %" G_GUINT64_FORMAT " with size %"
+ G_GUINT64_FORMAT, parse->common.offset, length);
+ /* seeks are from the beginning of the segment,
+ * after the segment ID/length */
+ parse->common.ebml_segment_start = parse->common.offset;
+ if (length == 0)
+ length = G_MAXUINT64;
+ parse->common.ebml_segment_length = length;
+ parse->common.state = GST_MATROSKA_READ_STATE_HEADER;
+ gst_matroska_parse_accumulate_streamheader (parse, ebml.buf);
+ break;
+ default:
+ GST_WARNING_OBJECT (parse,
+ "Expected a Segment ID (0x%x), but received 0x%x!",
+ GST_MATROSKA_ID_SEGMENT, id);
+ GST_READ_CHECK (gst_matroska_parse_take (parse, needed, &ebml));
+ gst_matroska_parse_accumulate_streamheader (parse, ebml.buf);
+ break;
+ }
+ break;
+ case GST_MATROSKA_READ_STATE_SCANNING:
+ if (id != GST_MATROSKA_ID_CLUSTER &&
+ id != GST_MATROSKA_ID_CLUSTERTIMECODE) {
+ /* we need to skip byte per byte if we are scanning for a new cluster */
+ read = 1;
+ goto skip;
+ } else {
+ GST_LOG_OBJECT (parse, "Resync done, new cluster found!");
+ parse->common.start_resync_offset = -1;
+ parse->common.state = parse->common.state_to_restore;
+ }
+ /* fall-through */
+ case GST_MATROSKA_READ_STATE_HEADER:
+ case GST_MATROSKA_READ_STATE_DATA:
+ case GST_MATROSKA_READ_STATE_SEEK:
+ switch (id) {
+ case GST_MATROSKA_ID_SEGMENTINFO:
+ GST_READ_CHECK (gst_matroska_parse_take (parse, read, &ebml));
+ if (!parse->common.segmentinfo_parsed) {
+ ret = gst_matroska_read_common_parse_info (&parse->common,
+ GST_ELEMENT_CAST (parse), &ebml);
+ if (ret == GST_FLOW_OK)
+ gst_matroska_parse_send_tags (parse);
+ }
+ gst_matroska_parse_accumulate_streamheader (parse, ebml.buf);
+ break;
+ case GST_MATROSKA_ID_TRACKS:
+ GST_READ_CHECK (gst_matroska_parse_take (parse, read, &ebml));
+ if (!parse->tracks_parsed) {
+ ret = gst_matroska_parse_parse_tracks (parse, &ebml);
+ }
+ gst_matroska_parse_accumulate_streamheader (parse, ebml.buf);
+ break;
+ case GST_MATROSKA_ID_CLUSTER:
+ if (G_UNLIKELY (!parse->tracks_parsed)) {
+ GST_DEBUG_OBJECT (parse, "Cluster before Track");
+ goto not_streamable;
+ }
+ if (G_UNLIKELY (parse->common.state
+ == GST_MATROSKA_READ_STATE_HEADER)) {
+ parse->common.state = GST_MATROSKA_READ_STATE_DATA;
+ parse->first_cluster_offset = parse->common.offset;
+ GST_DEBUG_OBJECT (parse, "signaling no more pads");
+ }
+ parse->cluster_time = GST_CLOCK_TIME_NONE;
+ parse->cluster_offset = parse->common.offset;
+ if (G_UNLIKELY (!parse->seek_first && parse->seek_block)) {
+ GST_DEBUG_OBJECT (parse, "seek target block %" G_GUINT64_FORMAT
+ " not found in Cluster, trying next Cluster's first block instead",
+ parse->seek_block);
+ parse->seek_block = 0;
+ }
+ parse->seek_first = FALSE;
+ /* record next cluster for recovery */
+ if (read != G_MAXUINT64)
+ parse->next_cluster_offset = parse->cluster_offset + read;
+ /* eat cluster prefix */
+ GST_READ_CHECK (gst_matroska_parse_take (parse, needed, &ebml));
+ ret = gst_matroska_parse_output (parse, ebml.buf, TRUE);
+ //gst_matroska_parse_accumulate_streamheader (parse, ebml.buf);
+ break;
+ case GST_MATROSKA_ID_CLUSTERTIMECODE:
+ {
+ guint64 num;
+
+ GST_READ_CHECK (gst_matroska_parse_take (parse, read, &ebml));
+ if ((ret = gst_ebml_read_uint (&ebml, &id, &num)) != GST_FLOW_OK)
+ goto parse_failed;
+ GST_DEBUG_OBJECT (parse, "ClusterTimeCode: %" G_GUINT64_FORMAT, num);
+ parse->cluster_time = num;
+#if 0
+ if (parse->common.element_index) {
+ if (parse->common.element_index_writer_id == -1)
+ gst_index_get_writer_id (parse->common.element_index,
+ GST_OBJECT (parse), &parse->common.element_index_writer_id);
+ GST_LOG_OBJECT (parse, "adding association %" GST_TIME_FORMAT "-> %"
+ G_GUINT64_FORMAT " for writer id %d",
+ GST_TIME_ARGS (parse->cluster_time), parse->cluster_offset,
+ parse->common.element_index_writer_id);
+ gst_index_add_association (parse->common.element_index,
+ parse->common.element_index_writer_id,
+ GST_ASSOCIATION_FLAG_KEY_UNIT,
+ GST_FORMAT_TIME, parse->cluster_time,
+ GST_FORMAT_BYTES, parse->cluster_offset, NULL);
+ }
+#endif
+ gst_matroska_parse_output (parse, ebml.buf, FALSE);
+ break;
+ }
+ case GST_MATROSKA_ID_BLOCKGROUP:
+ if (!gst_matroska_parse_seek_block (parse))
+ goto skip;
+ GST_READ_CHECK (gst_matroska_parse_take (parse, read, &ebml));
+ DEBUG_ELEMENT_START (parse, &ebml, "BlockGroup");
+ if ((ret = gst_ebml_read_master (&ebml, &id)) == GST_FLOW_OK) {
+ ret = gst_matroska_parse_parse_blockgroup_or_simpleblock (parse,
+ &ebml, parse->cluster_time, parse->cluster_offset, FALSE);
+ }
+ DEBUG_ELEMENT_STOP (parse, &ebml, "BlockGroup", ret);
+ gst_matroska_parse_output (parse, ebml.buf, FALSE);
+ break;
+ case GST_MATROSKA_ID_SIMPLEBLOCK:
+ if (!gst_matroska_parse_seek_block (parse))
+ goto skip;
+ GST_READ_CHECK (gst_matroska_parse_take (parse, read, &ebml));
+ DEBUG_ELEMENT_START (parse, &ebml, "SimpleBlock");
+ ret = gst_matroska_parse_parse_blockgroup_or_simpleblock (parse,
+ &ebml, parse->cluster_time, parse->cluster_offset, TRUE);
+ DEBUG_ELEMENT_STOP (parse, &ebml, "SimpleBlock", ret);
+ gst_matroska_parse_output (parse, ebml.buf, FALSE);
+ break;
+ case GST_MATROSKA_ID_ATTACHMENTS:
+ GST_READ_CHECK (gst_matroska_parse_take (parse, read, &ebml));
+ if (!parse->common.attachments_parsed) {
+ ret = gst_matroska_read_common_parse_attachments (&parse->common,
+ GST_ELEMENT_CAST (parse), &ebml);
+ if (ret == GST_FLOW_OK)
+ gst_matroska_parse_send_tags (parse);
+ }
+ gst_matroska_parse_output (parse, ebml.buf, FALSE);
+ break;
+ case GST_MATROSKA_ID_TAGS:
+ GST_READ_CHECK (gst_matroska_parse_take (parse, read, &ebml));
+ ret = gst_matroska_read_common_parse_metadata (&parse->common,
+ GST_ELEMENT_CAST (parse), &ebml);
+ if (ret == GST_FLOW_OK)
+ gst_matroska_parse_send_tags (parse);
+ gst_matroska_parse_accumulate_streamheader (parse, ebml.buf);
+ break;
+ case GST_MATROSKA_ID_CHAPTERS:
+ GST_READ_CHECK (gst_matroska_parse_take (parse, read, &ebml));
+ ret = gst_matroska_read_common_parse_chapters (&parse->common, &ebml);
+ gst_matroska_parse_output (parse, ebml.buf, FALSE);
+ break;
+ case GST_MATROSKA_ID_SEEKHEAD:
+ GST_READ_CHECK (gst_matroska_parse_take (parse, read, &ebml));
+ ret = gst_matroska_parse_parse_contents (parse, &ebml);
+ gst_matroska_parse_output (parse, ebml.buf, FALSE);
+ break;
+ case GST_MATROSKA_ID_CUES:
+ GST_READ_CHECK (gst_matroska_parse_take (parse, read, &ebml));
+ if (!parse->common.index_parsed) {
+ ret = gst_matroska_read_common_parse_index (&parse->common, &ebml);
+ /* only push based; delayed index building */
+ if (ret == GST_FLOW_OK
+ && parse->common.state == GST_MATROSKA_READ_STATE_SEEK) {
+ GstEvent *event;
+
+ GST_OBJECT_LOCK (parse);
+ event = parse->seek_event;
+ parse->seek_event = NULL;
+ GST_OBJECT_UNLOCK (parse);
+
+ g_assert (event);
+ /* unlikely to fail, since we managed to seek to this point */
+ if (!gst_matroska_parse_handle_seek_event (parse, NULL, event))
+ goto seek_failed;
+ /* resume data handling, main thread clear to seek again */
+ GST_OBJECT_LOCK (parse);
+ parse->common.state = GST_MATROSKA_READ_STATE_DATA;
+ GST_OBJECT_UNLOCK (parse);
+ }
+ }
+ gst_matroska_parse_output (parse, ebml.buf, FALSE);
+ break;
+ case GST_MATROSKA_ID_POSITION:
+ case GST_MATROSKA_ID_PREVSIZE:
+ case GST_MATROSKA_ID_ENCRYPTEDBLOCK:
+ case GST_MATROSKA_ID_SILENTTRACKS:
+ GST_DEBUG_OBJECT (parse,
+ "Skipping Cluster subelement 0x%x - ignoring", id);
+ /* fall-through */
+ default:
+ skip:
+ GST_DEBUG_OBJECT (parse, "skipping Element 0x%x", id);
+ GST_READ_CHECK (gst_matroska_parse_take (parse, read, &ebml));
+ gst_matroska_parse_output (parse, ebml.buf, FALSE);
+ break;
+ }
+ break;
+ }
+
+ if (ret == GST_FLOW_PARSE)
+ goto parse_failed;
+
+exit:
+ gst_ebml_read_clear (&ebml);
+ return ret;
+
+ /* ERRORS */
+read_error:
+ {
+ /* simply exit, maybe not enough data yet */
+ /* no ebml to clear if read error */
+ return ret;
+ }
+parse_failed:
+ {
+ GST_ELEMENT_ERROR (parse, STREAM, DEMUX, (NULL),
+ ("Failed to parse Element 0x%x", id));
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ }
+not_streamable:
+ {
+ GST_ELEMENT_ERROR (parse, STREAM, DEMUX, (NULL),
+ ("File layout does not permit streaming"));
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ }
+#if 0
+no_tracks:
+ {
+ GST_ELEMENT_ERROR (parse, STREAM, DEMUX, (NULL),
+ ("No Tracks element found"));
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ }
+#endif
+invalid_header:
+ {
+ GST_ELEMENT_ERROR (parse, STREAM, DEMUX, (NULL), ("Invalid header"));
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ }
+seek_failed:
+ {
+ GST_ELEMENT_ERROR (parse, STREAM, DEMUX, (NULL), ("Failed to seek"));
+ ret = GST_FLOW_ERROR;
+ goto exit;
+ }
+}
+
+#if 0
+static void
+gst_matroska_parse_loop (GstPad * pad)
+{
+ GstMatroskaParse *parse = GST_MATROSKA_PARSE (GST_PAD_PARENT (pad));
+ GstFlowReturn ret;
+ guint32 id;
+ guint64 length;
+ guint needed;
+
+ /* If we have to close a segment, send a new segment to do this now */
+ if (G_LIKELY (parse->common.state == GST_MATROSKA_READ_STATE_DATA)) {
+ if (G_UNLIKELY (parse->close_segment)) {
+ gst_matroska_parse_send_event (parse, parse->close_segment);
+ parse->close_segment = NULL;
+ }
+ if (G_UNLIKELY (parse->new_segment)) {
+ gst_matroska_parse_send_event (parse, parse->new_segment);
+ parse->new_segment = NULL;
+ }
+ }
+
+ ret = gst_matroska_read_common_peek_id_length_pull (&parse->common,
+ GST_ELEMENT_CAST (parse), &id, &length, &needed);
+ if (ret == GST_FLOW_EOS)
+ goto eos;
+ if (ret != GST_FLOW_OK) {
+ if (gst_matroska_parse_check_parse_error (parse))
+ goto pause;
+ else
+ return;
+ }
+
+ GST_LOG_OBJECT (parse, "Offset %" G_GUINT64_FORMAT ", Element id 0x%x, "
+ "size %" G_GUINT64_FORMAT ", needed %d", parse->offset, id,
+ length, needed);
+
+ ret = gst_matroska_parse_parse_id (parse, id, length, needed);
+ if (ret == GST_FLOW_EOS)
+ goto eos;
+ if (ret != GST_FLOW_OK)
+ goto pause;
+
+ /* check if we're at the end of a configured segment */
+ if (G_LIKELY (parse->src->len)) {
+ guint i;
+
+ g_assert (parse->num_streams == parse->src->len);
+ for (i = 0; i < parse->src->len; i++) {
+ GstMatroskaTrackContext *context = g_ptr_array_index (parse->src, i);
+ GST_DEBUG_OBJECT (context->pad, "pos %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (context->pos));
+ if (context->eos == FALSE)
+ goto next;
+ }
+
+ GST_INFO_OBJECT (parse, "All streams are EOS");
+ ret = GST_FLOW_EOS;
+ goto eos;
+ }
+
+next:
+ if (G_UNLIKELY (parse->offset ==
+ gst_matroska_read_common_get_length (&parse->common))) {
+ GST_LOG_OBJECT (parse, "Reached end of stream");
+ ret = GST_FLOW_EOS;
+ goto eos;
+ }
+
+ return;
+
+ /* ERRORS */
+eos:
+ {
+ if (parse->segment.rate < 0.0) {
+ ret = gst_matroska_parse_seek_to_previous_keyframe (parse);
+ if (ret == GST_FLOW_OK)
+ return;
+ }
+ /* fall-through */
+ }
+pause:
+ {
+ const gchar *reason = gst_flow_get_name (ret);
+ gboolean push_eos = FALSE;
+
+ GST_LOG_OBJECT (parse, "pausing task, reason %s", reason);
+ parse->segment_running = FALSE;
+ gst_pad_pause_task (parse->common.sinkpad);
+
+ if (ret == GST_FLOW_EOS) {
+ /* perform EOS logic */
+
+ /* Close the segment, i.e. update segment stop with the duration
+ * if no stop was set */
+ if (GST_CLOCK_TIME_IS_VALID (parse->last_stop_end) &&
+ !GST_CLOCK_TIME_IS_VALID (parse->segment.stop)) {
+ GstEvent *event =
+ gst_event_new_new_segment_full (TRUE, parse->segment.rate,
+ parse->segment.applied_rate, parse->segment.format,
+ parse->segment.start,
+ MAX (parse->last_stop_end, parse->segment.start),
+ parse->segment.time);
+ gst_matroska_parse_send_event (parse, event);
+ }
+
+ if (parse->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ gint64 stop;
+
+ /* for segment playback we need to post when (in stream time)
+ * we stopped, this is either stop (when set) or the duration. */
+ if ((stop = parse->segment.stop) == -1)
+ stop = parse->last_stop_end;
+
+ GST_LOG_OBJECT (parse, "Sending segment done, at end of segment");
+ gst_element_post_message (GST_ELEMENT (parse),
+ gst_message_new_segment_done (GST_OBJECT (parse), GST_FORMAT_TIME,
+ stop));
+ gst_matroska_parse_send_event (parse,
+ gst_event_new_segment_done (GST_FORMAT_TIME, stop));
+ } else {
+ push_eos = TRUE;
+ }
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
+ /* for fatal errors we post an error message */
+ GST_ELEMENT_FLOW_ERROR (parse, ret);
+ push_eos = TRUE;
+ }
+ if (push_eos) {
+ /* send EOS, and prevent hanging if no streams yet */
+ GST_LOG_OBJECT (parse, "Sending EOS, at end of stream");
+ if (!gst_matroska_parse_send_event (parse, gst_event_new_eos ()) &&
+ (ret == GST_FLOW_EOS)) {
+ GST_ELEMENT_ERROR (parse, STREAM, DEMUX,
+ (NULL), ("got eos but no streams (yet)"));
+ }
+ }
+ return;
+ }
+}
+#endif
+
+/*
+ * Create and push a flushing seek event upstream
+ */
+static gboolean
+perform_seek_to_offset (GstMatroskaParse * parse, guint64 offset)
+{
+ GstEvent *event;
+ gboolean res = 0;
+
+ GST_DEBUG_OBJECT (parse, "Seeking to %" G_GUINT64_FORMAT, offset);
+
+ event =
+ gst_event_new_seek (1.0, GST_FORMAT_BYTES,
+ GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, GST_SEEK_TYPE_SET, offset,
+ GST_SEEK_TYPE_NONE, -1);
+
+ res = gst_pad_push_event (parse->common.sinkpad, event);
+
+ /* newsegment event will update offset */
+ return res;
+}
+
+/*
+ * Forge empty default caps when all we know is the stream's EBML
+ * type and whether it has video or not.
+ *
+ * FIXME: Do something with video/x-matroska-3d if possible
+ */
+static GstCaps *
+gst_matroska_parse_forge_caps (gboolean is_webm, gboolean has_video)
+{
+ GstCaps *caps;
+
+ if (is_webm) {
+ if (has_video)
+ caps = gst_caps_new_empty_simple ("video/webm");
+ else
+ caps = gst_caps_new_empty_simple ("audio/webm");
+ } else {
+ if (has_video)
+ caps = gst_caps_new_empty_simple ("video/x-matroska");
+ else
+ caps = gst_caps_new_empty_simple ("audio/x-matroska");
+ }
+ return caps;
+}
+
+static GstFlowReturn
+gst_matroska_parse_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+{
+ GstMatroskaParse *parse = GST_MATROSKA_PARSE (parent);
+ guint available;
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint needed = 0;
+ guint32 id;
+ guint64 length;
+
+ if (G_UNLIKELY (GST_BUFFER_IS_DISCONT (buffer))) {
+ GST_DEBUG_OBJECT (parse, "got DISCONT");
+ gst_adapter_clear (parse->common.adapter);
+ GST_OBJECT_LOCK (parse);
+ gst_matroska_read_common_reset_streams (&parse->common,
+ GST_CLOCK_TIME_NONE, FALSE);
+ GST_OBJECT_UNLOCK (parse);
+ }
+
+ gst_adapter_push (parse->common.adapter, buffer);
+ buffer = NULL;
+
+next:
+ available = gst_adapter_available (parse->common.adapter);
+
+ ret = gst_matroska_read_common_peek_id_length_push (&parse->common,
+ GST_ELEMENT_CAST (parse), &id, &length, &needed);
+ if (G_UNLIKELY (ret != GST_FLOW_OK && ret != GST_FLOW_EOS)) {
+ if (parse->common.ebml_segment_length != G_MAXUINT64
+ && parse->common.offset >=
+ parse->common.ebml_segment_start + parse->common.ebml_segment_length) {
+ return GST_FLOW_EOS;
+ } else {
+ /*
+ * parsing error: we need to flush a byte from the adapter if the id is
+ * not a cluster and so on until we found a new cluser or the
+ * INVALID_DATA_THRESHOLD is exceeded, we reuse gst_matroska_parse_parse_id
+ * setting the state to GST_MATROSKA_READ_STATE_SCANNING so the bytes
+ * are skipped until a new cluster is found
+ */
+ gint64 bytes_scanned;
+ if (parse->common.start_resync_offset == -1) {
+ parse->common.start_resync_offset = parse->common.offset;
+ parse->common.state_to_restore = parse->common.state;
+ }
+ bytes_scanned = parse->common.offset - parse->common.start_resync_offset;
+ if (bytes_scanned <= INVALID_DATA_THRESHOLD) {
+ GST_WARNING_OBJECT (parse,
+ "parse error, looking for next cluster, actual offset %"
+ G_GUINT64_FORMAT ", start resync offset %" G_GUINT64_FORMAT,
+ parse->common.offset, parse->common.start_resync_offset);
+ parse->common.state = GST_MATROSKA_READ_STATE_SCANNING;
+ ret = GST_FLOW_OK;
+ } else {
+ GST_WARNING_OBJECT (parse,
+ "unrecoverable parse error, next cluster not found and threshold "
+ "exceeded, bytes scanned %" G_GINT64_FORMAT, bytes_scanned);
+ return ret;
+ }
+ }
+ }
+
+ GST_LOG_OBJECT (parse, "Offset %" G_GUINT64_FORMAT ", Element id 0x%x, "
+ "size %" G_GUINT64_FORMAT ", needed %d, available %d",
+ parse->common.offset, id, length, needed, available);
+
+ if (needed > available)
+ return GST_FLOW_OK;
+
+ ret = gst_matroska_parse_parse_id (parse, id, length, needed);
+ if (ret == GST_FLOW_EOS) {
+ /* need more data */
+ return GST_FLOW_OK;
+ } else if (ret != GST_FLOW_OK) {
+ return ret;
+ } else
+ goto next;
+}
+
+static gboolean
+gst_matroska_parse_handle_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ gboolean res = TRUE;
+ GstMatroskaParse *parse = GST_MATROSKA_PARSE (GST_PAD_PARENT (pad));
+
+ GST_DEBUG_OBJECT (parse,
+ "have event type %s: %p on sink pad", GST_EVENT_TYPE_NAME (event), event);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:
+ {
+ const GstSegment *segment;
+
+ /* some debug output */
+ gst_event_parse_segment (event, &segment);
+ GST_DEBUG_OBJECT (parse,
+ "received format %d newsegment %" GST_SEGMENT_FORMAT,
+ segment->format, segment);
+
+ if (parse->common.state < GST_MATROSKA_READ_STATE_DATA) {
+ GST_DEBUG_OBJECT (parse, "still starting");
+ goto exit;
+ }
+
+ /* we only expect a BYTE segment, e.g. following a seek */
+ if (segment->format != GST_FORMAT_BYTES) {
+ GST_DEBUG_OBJECT (parse, "unsupported segment format, ignoring");
+ goto exit;
+ }
+
+ GST_DEBUG_OBJECT (parse, "clearing segment state");
+ /* clear current segment leftover */
+ gst_adapter_clear (parse->common.adapter);
+ /* and some streaming setup */
+ parse->common.offset = segment->start;
+ /* do not know where we are;
+ * need to come across a cluster and generate newsegment */
+ parse->common.segment.position = GST_CLOCK_TIME_NONE;
+ parse->cluster_time = GST_CLOCK_TIME_NONE;
+ parse->cluster_offset = 0;
+ parse->need_newsegment = TRUE;
+ /* but keep some of the upstream segment */
+ parse->common.segment.rate = segment->rate;
+ exit:
+ /* chain will send initial newsegment after pads have been added,
+ * or otherwise come up with one */
+ GST_DEBUG_OBJECT (parse, "eating event");
+ gst_event_unref (event);
+ res = TRUE;
+ break;
+ }
+ case GST_EVENT_EOS:
+ {
+ if (parse->common.state != GST_MATROSKA_READ_STATE_DATA
+ && parse->common.state != GST_MATROSKA_READ_STATE_SCANNING) {
+ gst_event_unref (event);
+ GST_ELEMENT_ERROR (parse, STREAM, DEMUX,
+ (NULL), ("got eos and didn't receive a complete header object"));
+ } else if (parse->common.num_streams == 0) {
+ GST_ELEMENT_ERROR (parse, STREAM, DEMUX,
+ (NULL), ("got eos but no streams (yet)"));
+ } else {
+ gst_matroska_parse_send_event (parse, event);
+ }
+ break;
+ }
+ case GST_EVENT_FLUSH_STOP:
+ {
+ gst_adapter_clear (parse->common.adapter);
+ GST_OBJECT_LOCK (parse);
+ gst_matroska_read_common_reset_streams (&parse->common,
+ GST_CLOCK_TIME_NONE, TRUE);
+ GST_OBJECT_UNLOCK (parse);
+ parse->common.segment.position = GST_CLOCK_TIME_NONE;
+ parse->cluster_time = GST_CLOCK_TIME_NONE;
+ parse->cluster_offset = 0;
+ /* fall-through */
+ }
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return res;
+}
+
+#if 0
+static void
+gst_matroska_parse_set_index (GstElement * element, GstIndex * index)
+{
+ GstMatroskaParse *parse = GST_MATROSKA_PARSE (element);
+
+ GST_OBJECT_LOCK (parse);
+ if (parse->common.element_index)
+ gst_object_unref (parse->common.element_index);
+ parse->common.element_index = index ? gst_object_ref (index) : NULL;
+ GST_OBJECT_UNLOCK (parse);
+ GST_DEBUG_OBJECT (parse, "Set index %" GST_PTR_FORMAT,
+ parse->common.element_index);
+}
+
+static GstIndex *
+gst_matroska_parse_get_index (GstElement * element)
+{
+ GstIndex *result = NULL;
+ GstMatroskaParse *parse = GST_MATROSKA_PARSE (element);
+
+ GST_OBJECT_LOCK (parse);
+ if (parse->common.element_index)
+ result = gst_object_ref (parse->common.element_index);
+ GST_OBJECT_UNLOCK (parse);
+
+ GST_DEBUG_OBJECT (parse, "Returning index %" GST_PTR_FORMAT, result);
+
+ return result;
+}
+#endif
+
+static GstStateChangeReturn
+gst_matroska_parse_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstMatroskaParse *parse = GST_MATROSKA_PARSE (element);
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+
+ /* handle upwards state changes here */
+ switch (transition) {
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ /* handle downwards state changes */
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_matroska_parse_reset (GST_ELEMENT (parse));
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
diff --git a/gst/matroska/matroska-parse.h b/gst/matroska/matroska-parse.h
new file mode 100644
index 0000000000..4a28fb86da
--- /dev/null
+++ b/gst/matroska/matroska-parse.h
@@ -0,0 +1,103 @@
+/* GStreamer Matroska muxer/demuxer
+ * (c) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * (c) 2011 Debarshi Ray <rishi@gnu.org>
+ *
+ * matroska-parse.h: matroska file/stream parseer definition
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_MATROSKA_PARSE_H__
+#define __GST_MATROSKA_PARSE_H__
+
+#include <gst/gst.h>
+
+#include "ebml-read.h"
+#include "matroska-ids.h"
+#include "matroska-read-common.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MATROSKA_PARSE \
+ (gst_matroska_parse_get_type ())
+#define GST_MATROSKA_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_MATROSKA_PARSE, GstMatroskaParse))
+#define GST_MATROSKA_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_MATROSKA_PARSE, GstMatroskaParseClass))
+#define GST_IS_MATROSKA_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_MATROSKA_PARSE))
+#define GST_IS_MATROSKA_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_MATROSKA_PARSE))
+
+typedef struct _GstMatroskaParse {
+ GstElement parent;
+
+ /* < private > */
+
+ GstMatroskaReadCommon common;
+
+ /* pads */
+ GstPad *srcpad;
+ GstClock *clock;
+ guint num_v_streams;
+ guint num_a_streams;
+ guint num_t_streams;
+
+ GstBuffer *streamheader;
+ gboolean pushed_headers;
+ GstClockTime last_timestamp;
+
+ /* state */
+ //gboolean streaming;
+ guint64 seek_block;
+ gboolean seek_first;
+
+ /* did we parse cues/tracks/segmentinfo already? */
+ gboolean tracks_parsed;
+ GList *seek_parsed;
+
+ /* keeping track of playback position */
+ gboolean segment_running;
+ GstClockTime last_stop_end;
+
+ GstEvent *close_segment;
+ GstEvent *new_segment;
+
+ /* some state saving */
+ GstClockTime cluster_time;
+ guint64 cluster_offset;
+ guint64 first_cluster_offset;
+ guint64 next_cluster_offset;
+
+ /* index stuff */
+ gboolean seekable;
+ gboolean building_index;
+ guint64 index_offset;
+ GstEvent *seek_event;
+ gboolean need_newsegment;
+
+ /* reverse playback */
+ GArray *seek_index;
+ gint seek_entry;
+} GstMatroskaParse;
+
+typedef struct _GstMatroskaParseClass {
+ GstElementClass parent;
+} GstMatroskaParseClass;
+
+G_END_DECLS
+
+#endif /* __GST_MATROSKA_PARSE_H__ */
diff --git a/gst/matroska/matroska-read-common.c b/gst/matroska/matroska-read-common.c
new file mode 100644
index 0000000000..062044f641
--- /dev/null
+++ b/gst/matroska/matroska-read-common.c
@@ -0,0 +1,3411 @@
+/* GStreamer Matroska muxer/demuxer
+ * (c) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * (c) 2006 Tim-Philipp Müller <tim centricular net>
+ * (c) 2008 Sebastian Dröge <slomo@circular-chaos.org>
+ * (c) 2011 Debarshi Ray <rishi@gnu.org>
+ *
+ * matroska-read-common.c: shared by matroska file/stream demuxer and parser
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <stdio.h>
+#include <string.h>
+
+#ifdef HAVE_ZLIB
+#include <zlib.h>
+#endif
+
+#ifdef HAVE_BZ2
+#include <bzlib.h>
+#endif
+
+#include <gst/tag/tag.h>
+#include <gst/base/gsttypefindhelper.h>
+#include <gst/base/gstbytewriter.h>
+
+#include "lzo.h"
+
+#include "ebml-read.h"
+#include "matroska-read-common.h"
+#include "matroska-ids.h"
+
+GST_DEBUG_CATEGORY (matroskareadcommon_debug);
+#define GST_CAT_DEFAULT matroskareadcommon_debug
+
+#define DEBUG_ELEMENT_START(common, ebml, element) \
+ GST_DEBUG_OBJECT (common->sinkpad, "Parsing " element " element at offset %" \
+ G_GUINT64_FORMAT, gst_ebml_read_get_pos (ebml))
+
+#define DEBUG_ELEMENT_STOP(common, ebml, element, ret) \
+ GST_DEBUG_OBJECT (common->sinkpad, "Parsing " element " element " \
+ " finished with '%s'", gst_flow_get_name (ret))
+
+#define GST_MATROSKA_TOC_UID_CHAPTER "chapter"
+#define GST_MATROSKA_TOC_UID_EDITION "edition"
+#define GST_MATROSKA_TOC_UID_EMPTY "empty"
+
+typedef struct
+{
+ GstTagList *result;
+ guint64 target_type_value;
+ gchar *target_type;
+ gboolean audio_only;
+} TargetTypeContext;
+
+
+static gboolean
+gst_matroska_decompress_data (GstMatroskaTrackEncoding * enc,
+ gpointer * data_out, gsize * size_out,
+ GstMatroskaTrackCompressionAlgorithm algo)
+{
+ guint8 *new_data = NULL;
+ guint new_size = 0;
+ guint8 *data = *data_out;
+ guint size = *size_out;
+ gboolean ret = TRUE;
+
+ if (algo == GST_MATROSKA_TRACK_COMPRESSION_ALGORITHM_ZLIB) {
+#ifdef HAVE_ZLIB
+ /* zlib encoded data */
+ z_stream zstream;
+ guint orig_size;
+ int result;
+
+ orig_size = size;
+ zstream.zalloc = (alloc_func) 0;
+ zstream.zfree = (free_func) 0;
+ zstream.opaque = (voidpf) 0;
+ if (inflateInit (&zstream) != Z_OK) {
+ GST_WARNING ("zlib initialization failed.");
+ ret = FALSE;
+ goto out;
+ }
+ zstream.next_in = (Bytef *) data;
+ zstream.avail_in = orig_size;
+ new_size = orig_size;
+ new_data = g_malloc (new_size);
+ zstream.avail_out = new_size;
+ zstream.next_out = (Bytef *) new_data;
+
+ do {
+ result = inflate (&zstream, Z_NO_FLUSH);
+ if (result == Z_STREAM_END) {
+ break;
+ } else if (result != Z_OK) {
+ GST_WARNING ("inflate() returned %d", result);
+ break;
+ }
+
+ new_size += 4096;
+ new_data = g_realloc (new_data, new_size);
+ zstream.next_out = (Bytef *) (new_data + zstream.total_out);
+ zstream.avail_out += 4096;
+ } while (zstream.avail_in > 0);
+
+ if (result != Z_STREAM_END) {
+ ret = FALSE;
+ g_free (new_data);
+ } else {
+ new_size = zstream.total_out;
+ }
+ inflateEnd (&zstream);
+
+#else
+ GST_WARNING ("zlib encoded tracks not supported.");
+ ret = FALSE;
+ goto out;
+#endif
+ } else if (algo == GST_MATROSKA_TRACK_COMPRESSION_ALGORITHM_BZLIB) {
+#ifdef HAVE_BZ2
+ /* bzip2 encoded data */
+ bz_stream bzstream;
+ guint orig_size;
+ int result;
+
+ bzstream.bzalloc = NULL;
+ bzstream.bzfree = NULL;
+ bzstream.opaque = NULL;
+ orig_size = size;
+
+ if (BZ2_bzDecompressInit (&bzstream, 0, 0) != BZ_OK) {
+ GST_WARNING ("bzip2 initialization failed.");
+ ret = FALSE;
+ goto out;
+ }
+
+ bzstream.next_in = (char *) data;
+ bzstream.avail_in = orig_size;
+ new_size = orig_size;
+ new_data = g_malloc (new_size);
+ bzstream.avail_out = new_size;
+ bzstream.next_out = (char *) new_data;
+
+ do {
+ result = BZ2_bzDecompress (&bzstream);
+ if (result == BZ_STREAM_END) {
+ break;
+ } else if (result != BZ_OK) {
+ GST_WARNING ("BZ2_bzDecompress() returned %d", result);
+ break;
+ }
+
+ new_size += 4096;
+ new_data = g_realloc (new_data, new_size);
+ bzstream.next_out = (char *) (new_data + bzstream.total_out_lo32);
+ bzstream.avail_out += 4096;
+ } while (bzstream.avail_in > 0);
+
+ if (result != BZ_STREAM_END) {
+ ret = FALSE;
+ g_free (new_data);
+ } else {
+ new_size = bzstream.total_out_lo32;
+ }
+ BZ2_bzDecompressEnd (&bzstream);
+
+#else
+ GST_WARNING ("bzip2 encoded tracks not supported.");
+ ret = FALSE;
+ goto out;
+#endif
+ } else if (algo == GST_MATROSKA_TRACK_COMPRESSION_ALGORITHM_LZO1X) {
+ /* lzo encoded data */
+ int result;
+ int orig_size, out_size;
+
+ orig_size = size;
+ out_size = size;
+ new_size = size;
+ new_data = g_malloc (new_size);
+
+ do {
+ orig_size = size;
+ out_size = new_size;
+
+ result = lzo1x_decode (new_data, &out_size, data, &orig_size);
+
+ if (orig_size > 0) {
+ new_size += 4096;
+ new_data = g_realloc (new_data, new_size);
+ }
+ } while (orig_size > 0 && result == LZO_OUTPUT_FULL);
+
+ new_size -= out_size;
+
+ if (result != LZO_OUTPUT_FULL) {
+ GST_WARNING ("lzo decompression failed");
+ g_free (new_data);
+
+ ret = FALSE;
+ goto out;
+ }
+
+ } else if (algo == GST_MATROSKA_TRACK_COMPRESSION_ALGORITHM_HEADERSTRIP) {
+ /* header stripped encoded data */
+ if (enc->comp_settings_length > 0) {
+ new_data = g_malloc (size + enc->comp_settings_length);
+ new_size = size + enc->comp_settings_length;
+
+ memcpy (new_data, enc->comp_settings, enc->comp_settings_length);
+ memcpy (new_data + enc->comp_settings_length, data, size);
+ }
+ } else {
+ GST_ERROR ("invalid compression algorithm %d", algo);
+ ret = FALSE;
+ }
+
+out:
+
+ if (!ret) {
+ *data_out = NULL;
+ *size_out = 0;
+ } else {
+ *data_out = new_data;
+ *size_out = new_size;
+ }
+
+ return ret;
+}
+
+GstFlowReturn
+gst_matroska_decode_content_encodings (GArray * encodings)
+{
+ gint i;
+
+ if (encodings == NULL)
+ return GST_FLOW_OK;
+
+ for (i = 0; i < encodings->len; i++) {
+ GstMatroskaTrackEncoding *enc =
+ &g_array_index (encodings, GstMatroskaTrackEncoding, i);
+ gpointer data = NULL;
+ gsize size;
+
+ if ((enc->scope & GST_MATROSKA_TRACK_ENCODING_SCOPE_NEXT_CONTENT_ENCODING)
+ == 0)
+ continue;
+
+ /* Other than ENCODING_COMPRESSION not handled here */
+ if (enc->type != GST_MATROSKA_ENCODING_COMPRESSION)
+ continue;
+
+ if (i + 1 >= encodings->len)
+ return GST_FLOW_ERROR;
+
+ if (enc->comp_settings_length == 0)
+ continue;
+
+ data = enc->comp_settings;
+ size = enc->comp_settings_length;
+
+ if (!gst_matroska_decompress_data (enc, &data, &size, enc->comp_algo))
+ return GST_FLOW_ERROR;
+
+ g_free (enc->comp_settings);
+
+ enc->comp_settings = data;
+ enc->comp_settings_length = size;
+ }
+
+ return GST_FLOW_OK;
+}
+
+gboolean
+gst_matroska_decode_data (GArray * encodings, gpointer * data_out,
+ gsize * size_out, GstMatroskaTrackEncodingScope scope, gboolean free)
+{
+ gpointer data;
+ gsize size;
+ gboolean ret = TRUE;
+ gint i;
+
+ g_return_val_if_fail (encodings != NULL, FALSE);
+ g_return_val_if_fail (data_out != NULL && *data_out != NULL, FALSE);
+ g_return_val_if_fail (size_out != NULL, FALSE);
+
+ data = *data_out;
+ size = *size_out;
+
+ for (i = 0; i < encodings->len; i++) {
+ GstMatroskaTrackEncoding *enc =
+ &g_array_index (encodings, GstMatroskaTrackEncoding, i);
+ gpointer new_data = NULL;
+ gsize new_size = 0;
+
+ if ((enc->scope & scope) == 0)
+ continue;
+
+ /* Encryption not handled here */
+ if (enc->type != GST_MATROSKA_ENCODING_COMPRESSION) {
+ ret = TRUE;
+ break;
+ }
+
+ new_data = data;
+ new_size = size;
+
+ ret =
+ gst_matroska_decompress_data (enc, &new_data, &new_size,
+ enc->comp_algo);
+
+ if (!ret)
+ break;
+
+ if ((data == *data_out && free) || (data != *data_out))
+ g_free (data);
+
+ data = new_data;
+ size = new_size;
+ }
+
+ if (!ret) {
+ if ((data == *data_out && free) || (data != *data_out))
+ g_free (data);
+
+ *data_out = NULL;
+ *size_out = 0;
+ } else {
+ *data_out = data;
+ *size_out = size;
+ }
+
+ return ret;
+}
+
+/* This function parses the protection info of Block/SimpleBlock and extracts the
+ * IV and partitioning format (subsample) information.
+ * Set those parsed information into protection info structure @info_protect which
+ * will be added in protection metadata of the Gstbuffer.
+ * The subsamples format follows the same pssh box format in Common Encryption spec:
+ * subsample number + clear subsample size (16bit bigendian) | encrypted subsample size (32bit bigendian) | ...
+ * @encrypted is an output argument: TRUE if the current Block/SimpleBlock is encrypted else FALSE
+ */
+gboolean
+gst_matroska_parse_protection_meta (gpointer * data_out, gsize * size_out,
+ GstStructure * info_protect, gboolean * encrypted)
+{
+ guint8 *data;
+ GstBuffer *buf_iv;
+ guint8 *data_iv;
+ guint8 *subsamples;
+ guint8 signal_byte;
+ gint i;
+ GstByteReader reader;
+
+ g_return_val_if_fail (data_out != NULL && *data_out != NULL, FALSE);
+ g_return_val_if_fail (size_out != NULL, FALSE);
+ g_return_val_if_fail (info_protect != NULL, FALSE);
+ g_return_val_if_fail (encrypted != NULL, FALSE);
+
+ *encrypted = FALSE;
+ data = *data_out;
+ gst_byte_reader_init (&reader, data, *size_out);
+
+ /* WebM spec:
+ * 4.7 Signal Byte Format
+ * 0 1 2 3 4 5 6 7
+ * +-+-+-+-+-+-+-+-+
+ * |X| RSV |P|E|
+ * +-+-+-+-+-+-+-+-+
+ *
+ * Extension bit (X)
+ * If set, another signal byte will follow this byte. Reserved for future expansion (currently MUST be set to 0).
+ * RSV bits (RSV)
+ * Bits reserved for future use. MUST be set to 0 and MUST be ignored.
+ * Encrypted bit (E)
+ * If set, the Block MUST contain an IV immediately followed by an encrypted frame. If not set, the Block MUST NOT include an IV and the frame MUST be unencrypted. The unencrypted frame MUST immediately follow the Signal Byte.
+ * Partitioned bit (P)
+ * Used to indicate that the sample has subsample partitions. If set, the IV will be followed by a num_partitions byte, and num_partitions * 32-bit partition offsets. This bit can only be set if the E bit is also set.
+ */
+ if (!gst_byte_reader_get_uint8 (&reader, &signal_byte)) {
+ GST_ERROR ("Error reading the signal byte");
+ return FALSE;
+ }
+
+ /* Unencrypted buffer */
+ if (!(signal_byte & GST_MATROSKA_BLOCK_ENCRYPTED)) {
+ return TRUE;
+ }
+
+ /* Encrypted buffer */
+ *encrypted = TRUE;
+ /* Create IV buffer */
+ if (!gst_byte_reader_dup_data (&reader, sizeof (guint64), &data_iv)) {
+ GST_ERROR ("Error reading the IV data");
+ return FALSE;
+ }
+ buf_iv = gst_buffer_new_wrapped ((gpointer) data_iv, sizeof (guint64));
+ gst_structure_set (info_protect, "iv", GST_TYPE_BUFFER, buf_iv, NULL);
+ gst_buffer_unref (buf_iv);
+
+ /* Partitioned in subsample */
+ if (signal_byte & GST_MATROSKA_BLOCK_PARTITIONED) {
+ guint nb_subsample;
+ guint32 offset = 0;
+ guint32 offset_prev;
+ guint32 encrypted_bytes = 0;
+ guint16 clear_bytes = 0;
+ GstBuffer *buf_sub_sample;
+ guint8 nb_part;
+ GstByteWriter writer;
+
+ /* Read the number of partitions (1 byte) */
+ if (!gst_byte_reader_get_uint8 (&reader, &nb_part)) {
+ GST_ERROR ("Error reading the partition number");
+ return FALSE;
+ }
+
+ if (nb_part == 0) {
+ GST_ERROR ("Partitioned, but the subsample number equal to zero");
+ return FALSE;
+ }
+
+ nb_subsample = (nb_part + 2) >> 1;
+
+ gst_structure_set (info_protect, "subsample_count", G_TYPE_UINT,
+ nb_subsample, NULL);
+
+ /* WebM Spec:
+ *
+ * 4.6 Subsample Encrypted Block Format
+ *
+ * The Subsample Encrypted Block format extends the Full-sample format by setting a "partitioned" (P) bit in the Signal Byte.
+ * If this bit is set, the EncryptedBlock header shall include an
+ * 8-bit integer indicating the number of sample partitions (dividers between clear/encrypted sections),
+ * and a series of 32-bit integers in big-endian encoding indicating the byte offsets of such partitions.
+ *
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Signal Byte | |
+ * +-+-+-+-+-+-+-+-+ IV |
+ * | |
+ * | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | | num_partition | Partition 0 offset -> |
+ * |-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-|
+ * | -> Partition 0 offset | ... |
+ * |-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-|
+ * | ... | Partition n-1 offset -> |
+ * |-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-|
+ * | -> Partition n-1 offset | |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ |
+ * | Clear/encrypted sample data |
+ * | |
+ * | |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ *
+ * 4.6.1 SAMPLE PARTITIONS
+ *
+ * The samples shall be partitioned into alternating clear and encrypted sections,
+ * always starting with a clear section.
+ * Generally for n clear/encrypted sections there shall be n-1 partition offsets.
+ * However, if it is required that the first section be encrypted, then the first partition shall be at byte offset 0
+ * (indicating a zero-size clear section), and there shall be n partition offsets.
+ * Please refer to the "Sample Encryption" description of the "Common Encryption"
+ * section of the VP Codec ISO Media File Format Binding Specification for more
+ * detail on how subsample encryption is implemented.
+ */
+ subsamples =
+ g_malloc (nb_subsample * (sizeof (guint16) + sizeof (guint32)));
+
+ gst_byte_writer_init_with_data (&writer, subsamples,
+ nb_subsample * (sizeof (guint16) + sizeof (guint32)), FALSE);
+
+ for (i = 0; i <= nb_part; i++) {
+ offset_prev = offset;
+ if (i == nb_part) {
+ offset = gst_byte_reader_get_remaining (&reader);
+ } else {
+ if (!gst_byte_reader_get_uint32_be (&reader, &offset)) {
+ GST_ERROR ("Error reading the partition offset");
+ goto release_err;
+ }
+ }
+
+ if (offset < offset_prev) {
+ GST_ERROR ("Partition offsets should not decrease");
+ goto release_err;
+ }
+
+ if (i % 2 == 0) {
+ if ((offset - offset_prev) & 0xFFFF0000) {
+ GST_ERROR
+ ("The Clear Partition exceed 64KB in encrypted subsample format");
+ goto release_err;
+ }
+ /* We set the Clear partition size in 16 bits, in order to
+ * follow the same format of the box PSSH in CENC spec */
+ clear_bytes = offset - offset_prev;
+ if (i == nb_part)
+ encrypted_bytes = 0;
+ } else {
+ encrypted_bytes = offset - offset_prev;
+ }
+
+ if ((i % 2 == 1) || (i == nb_part)) {
+ if (clear_bytes == 0 && encrypted_bytes == 0) {
+ GST_ERROR ("Found 2 partitions with the same offsets.");
+ goto release_err;
+ }
+ if (!gst_byte_writer_put_uint16_be (&writer, clear_bytes)) {
+ GST_ERROR ("Error writing the number of clear bytes");
+ goto release_err;
+ }
+ if (!gst_byte_writer_put_uint32_be (&writer, encrypted_bytes)) {
+ GST_ERROR ("Error writing the number of encrypted bytes");
+ goto release_err;
+ }
+ }
+ }
+
+ buf_sub_sample =
+ gst_buffer_new_wrapped (subsamples,
+ nb_subsample * (sizeof (guint16) + sizeof (guint32)));
+ gst_structure_set (info_protect, "subsamples", GST_TYPE_BUFFER,
+ buf_sub_sample, NULL);
+ gst_buffer_unref (buf_sub_sample);
+ } else {
+ gst_structure_set (info_protect, "subsample_count", G_TYPE_UINT, 0, NULL);
+ }
+
+ gst_byte_reader_get_data (&reader, 0, (const guint8 **) data_out);
+ *size_out = gst_byte_reader_get_remaining (&reader);
+ return TRUE;
+
+release_err:
+ g_free (subsamples);
+ return FALSE;
+}
+
+static gint
+gst_matroska_index_compare (GstMatroskaIndex * i1, GstMatroskaIndex * i2)
+{
+ if (i1->time < i2->time)
+ return -1;
+ else if (i1->time > i2->time)
+ return 1;
+ else if (i1->block < i2->block)
+ return -1;
+ else if (i1->block > i2->block)
+ return 1;
+ else
+ return 0;
+}
+
+gint
+gst_matroska_index_seek_find (GstMatroskaIndex * i1, GstClockTime * time,
+ gpointer user_data)
+{
+ if (i1->time < *time)
+ return -1;
+ else if (i1->time > *time)
+ return 1;
+ else
+ return 0;
+}
+
+GstMatroskaIndex *
+gst_matroska_read_common_do_index_seek (GstMatroskaReadCommon * common,
+ GstMatroskaTrackContext * track, gint64 seek_pos, GArray ** _index,
+ gint * _entry_index, GstSearchMode snap_dir)
+{
+ GstMatroskaIndex *entry = NULL;
+ GArray *index;
+
+ /* find entry just before or at the requested position */
+ if (track && track->index_table)
+ index = track->index_table;
+ else
+ index = common->index;
+
+ if (!index || !index->len)
+ return NULL;
+
+ entry =
+ gst_util_array_binary_search (index->data, index->len,
+ sizeof (GstMatroskaIndex),
+ (GCompareDataFunc) gst_matroska_index_seek_find, snap_dir, &seek_pos,
+ NULL);
+
+ if (entry == NULL) {
+ if (snap_dir == GST_SEARCH_MODE_AFTER) {
+ /* Can only happen with a reverse seek past the end */
+ entry = &g_array_index (index, GstMatroskaIndex, index->len - 1);
+ } else {
+ /* Can only happen with a forward seek before the start */
+ entry = &g_array_index (index, GstMatroskaIndex, 0);
+ }
+ }
+
+ if (_index)
+ *_index = index;
+ if (_entry_index)
+ *_entry_index = entry - (GstMatroskaIndex *) index->data;
+
+ return entry;
+}
+
+static gint
+gst_matroska_read_common_encoding_cmp (GstMatroskaTrackEncoding * a,
+ GstMatroskaTrackEncoding * b)
+{
+ if (b->order > a->order)
+ return 1;
+ else if (b->order < a->order)
+ return -1;
+ else
+ return 0;
+}
+
+static gboolean
+gst_matroska_read_common_encoding_order_unique (GArray * encodings, guint64
+ order)
+{
+ gint i;
+
+ if (encodings == NULL || encodings->len == 0)
+ return TRUE;
+
+ for (i = 0; i < encodings->len; i++)
+ if (g_array_index (encodings, GstMatroskaTrackEncoding, i).order == order)
+ return FALSE;
+
+ return TRUE;
+}
+
+/* takes ownership of taglist */
+void
+gst_matroska_read_common_found_global_tag (GstMatroskaReadCommon * common,
+ GstElement * el, GstTagList * taglist)
+{
+ if (common->global_tags) {
+ gst_tag_list_insert (common->global_tags, taglist, GST_TAG_MERGE_APPEND);
+ gst_tag_list_unref (taglist);
+ } else {
+ common->global_tags = taglist;
+ }
+ common->global_tags_changed = TRUE;
+}
+
+gint64
+gst_matroska_read_common_get_length (GstMatroskaReadCommon * common)
+{
+ gint64 end = -1;
+
+ if (!gst_pad_peer_query_duration (common->sinkpad, GST_FORMAT_BYTES,
+ &end) || end < 0)
+ GST_DEBUG_OBJECT (common->sinkpad, "no upstream length");
+
+ return end;
+}
+
+/* determine track to seek in */
+GstMatroskaTrackContext *
+gst_matroska_read_common_get_seek_track (GstMatroskaReadCommon * common,
+ GstMatroskaTrackContext * track)
+{
+ gint i;
+
+ if (track && track->type == GST_MATROSKA_TRACK_TYPE_VIDEO)
+ return track;
+
+ for (i = 0; i < common->src->len; i++) {
+ GstMatroskaTrackContext *stream;
+
+ stream = g_ptr_array_index (common->src, i);
+ if (stream->type == GST_MATROSKA_TRACK_TYPE_VIDEO && stream->index_table)
+ track = stream;
+ }
+
+ return track;
+}
+
+/* skip unknown or alike element */
+GstFlowReturn
+gst_matroska_read_common_parse_skip (GstMatroskaReadCommon * common,
+ GstEbmlRead * ebml, const gchar * parent_name, guint id)
+{
+ if (id == GST_EBML_ID_VOID) {
+ GST_DEBUG_OBJECT (common->sinkpad, "Skipping EBML Void element");
+ } else if (id == GST_EBML_ID_CRC32) {
+ GST_DEBUG_OBJECT (common->sinkpad, "Skipping EBML CRC32 element");
+ } else {
+ GST_WARNING_OBJECT (common->sinkpad,
+ "Unknown %s subelement 0x%x - ignoring", parent_name, id);
+ }
+
+ return gst_ebml_read_skip (ebml);
+}
+
+static GstFlowReturn
+gst_matroska_read_common_parse_attached_file (GstMatroskaReadCommon * common,
+ GstEbmlRead * ebml, GstTagList * taglist)
+{
+ guint32 id;
+ GstFlowReturn ret;
+ gchar *description = NULL;
+ gchar *filename = NULL;
+ gchar *mimetype = NULL;
+ guint8 *data = NULL;
+ guint64 datalen = 0;
+
+ DEBUG_ELEMENT_START (common, ebml, "AttachedFile");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (common, ebml, "AttachedFile", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ /* read all sub-entries */
+
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_FILEDESCRIPTION:
+ if (description) {
+ GST_WARNING_OBJECT (common->sinkpad,
+ "FileDescription can only appear once");
+ break;
+ }
+
+ ret = gst_ebml_read_utf8 (ebml, &id, &description);
+ GST_DEBUG_OBJECT (common->sinkpad, "FileDescription: %s",
+ GST_STR_NULL (description));
+ break;
+ case GST_MATROSKA_ID_FILENAME:
+ if (filename) {
+ GST_WARNING_OBJECT (common->sinkpad, "FileName can only appear once");
+ break;
+ }
+
+ ret = gst_ebml_read_utf8 (ebml, &id, &filename);
+
+ GST_DEBUG_OBJECT (common->sinkpad, "FileName: %s",
+ GST_STR_NULL (filename));
+ break;
+ case GST_MATROSKA_ID_FILEMIMETYPE:
+ if (mimetype) {
+ GST_WARNING_OBJECT (common->sinkpad,
+ "FileMimeType can only appear once");
+ break;
+ }
+
+ ret = gst_ebml_read_ascii (ebml, &id, &mimetype);
+ GST_DEBUG_OBJECT (common->sinkpad, "FileMimeType: %s",
+ GST_STR_NULL (mimetype));
+ break;
+ case GST_MATROSKA_ID_FILEDATA:
+ if (data) {
+ GST_WARNING_OBJECT (common->sinkpad, "FileData can only appear once");
+ break;
+ }
+
+ ret = gst_ebml_read_binary (ebml, &id, &data, &datalen);
+ GST_DEBUG_OBJECT (common->sinkpad,
+ "FileData of size %" G_GUINT64_FORMAT, datalen);
+ break;
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (common, ebml,
+ "AttachedFile", id);
+ break;
+ case GST_MATROSKA_ID_FILEUID:
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (common, ebml, "AttachedFile", ret);
+
+ if (filename && mimetype && data && datalen > 0 && datalen < G_MAXUINT) {
+ GstTagImageType image_type = GST_TAG_IMAGE_TYPE_NONE;
+ GstBuffer *tagbuffer = NULL;
+ GstSample *tagsample = NULL;
+ GstStructure *info = NULL;
+ GstCaps *caps = NULL;
+ gchar *filename_lc = g_utf8_strdown (filename, -1);
+
+ GST_DEBUG_OBJECT (common->sinkpad, "Creating tag for attachment with "
+ "filename '%s', mimetype '%s', description '%s', "
+ "size %" G_GUINT64_FORMAT, filename, mimetype,
+ GST_STR_NULL (description), datalen);
+
+ /* TODO: better heuristics for different image types */
+ if (strstr (filename_lc, "cover")) {
+ if (strstr (filename_lc, "back"))
+ image_type = GST_TAG_IMAGE_TYPE_BACK_COVER;
+ else
+ image_type = GST_TAG_IMAGE_TYPE_FRONT_COVER;
+ } else if (g_str_has_prefix (mimetype, "image/") ||
+ g_str_has_suffix (filename_lc, "png") ||
+ g_str_has_suffix (filename_lc, "jpg") ||
+ g_str_has_suffix (filename_lc, "jpeg") ||
+ g_str_has_suffix (filename_lc, "gif") ||
+ g_str_has_suffix (filename_lc, "bmp")) {
+ image_type = GST_TAG_IMAGE_TYPE_UNDEFINED;
+ }
+ g_free (filename_lc);
+
+ /* First try to create an image tag buffer from this */
+ if (image_type != GST_TAG_IMAGE_TYPE_NONE) {
+ tagsample =
+ gst_tag_image_data_to_image_sample (data, datalen, image_type);
+
+ if (!tagsample) {
+ image_type = GST_TAG_IMAGE_TYPE_NONE;
+ } else {
+ tagbuffer = gst_buffer_ref (gst_sample_get_buffer (tagsample));
+ caps = gst_caps_ref (gst_sample_get_caps (tagsample));
+ info = gst_structure_copy (gst_sample_get_info (tagsample));
+ gst_sample_unref (tagsample);
+ }
+ }
+
+ /* if this failed create an attachment buffer */
+ if (!tagbuffer) {
+ tagbuffer = gst_buffer_new_memdup (data, datalen);
+
+ caps = gst_type_find_helper_for_buffer (NULL, tagbuffer, NULL);
+ if (caps == NULL)
+ caps = gst_caps_new_empty_simple (mimetype);
+ }
+
+ /* Set filename and description in the info */
+ if (info == NULL)
+ info = gst_structure_new_empty ("GstTagImageInfo");
+
+ gst_structure_set (info, "filename", G_TYPE_STRING, filename, NULL);
+ if (description)
+ gst_structure_set (info, "description", G_TYPE_STRING, description, NULL);
+
+ tagsample = gst_sample_new (tagbuffer, caps, NULL, info);
+
+ gst_buffer_unref (tagbuffer);
+ gst_caps_unref (caps);
+
+ GST_DEBUG_OBJECT (common->sinkpad,
+ "Created attachment sample: %" GST_PTR_FORMAT, tagsample);
+
+ /* and append to the tag list */
+ if (image_type != GST_TAG_IMAGE_TYPE_NONE)
+ gst_tag_list_add (taglist, GST_TAG_MERGE_APPEND, GST_TAG_IMAGE, tagsample,
+ NULL);
+ else
+ gst_tag_list_add (taglist, GST_TAG_MERGE_APPEND, GST_TAG_ATTACHMENT,
+ tagsample, NULL);
+
+ /* the list adds it own ref */
+ gst_sample_unref (tagsample);
+ }
+
+ g_free (filename);
+ g_free (mimetype);
+ g_free (data);
+ g_free (description);
+
+ return ret;
+}
+
+GstFlowReturn
+gst_matroska_read_common_parse_attachments (GstMatroskaReadCommon * common,
+ GstElement * el, GstEbmlRead * ebml)
+{
+ guint32 id;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstTagList *taglist;
+
+ DEBUG_ELEMENT_START (common, ebml, "Attachments");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (common, ebml, "Attachments", ret);
+ return ret;
+ }
+
+ taglist = gst_tag_list_new_empty ();
+ gst_tag_list_set_scope (taglist, GST_TAG_SCOPE_GLOBAL);
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_ATTACHEDFILE:
+ ret = gst_matroska_read_common_parse_attached_file (common, ebml,
+ taglist);
+ break;
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (common, ebml,
+ "Attachments", id);
+ break;
+ }
+ }
+ DEBUG_ELEMENT_STOP (common, ebml, "Attachments", ret);
+
+ if (gst_tag_list_n_tags (taglist) > 0) {
+ GST_DEBUG_OBJECT (common->sinkpad, "Storing attachment tags");
+ gst_matroska_read_common_found_global_tag (common, el, taglist);
+ } else {
+ GST_DEBUG_OBJECT (common->sinkpad, "No valid attachments found");
+ gst_tag_list_unref (taglist);
+ }
+
+ common->attachments_parsed = TRUE;
+
+ return ret;
+}
+
+static void
+gst_matroska_read_common_parse_toc_tag (GstTocEntry * entry,
+ GstTocEntry * internal_entry, GArray * edition_targets,
+ GArray * chapter_targets, GstTagList * tags)
+{
+ gchar *uid;
+ guint i;
+ guint64 tgt;
+ GArray *targets;
+ GList *cur, *internal_cur;
+ GstTagList *etags;
+
+ targets =
+ (gst_toc_entry_get_entry_type (entry) ==
+ GST_TOC_ENTRY_TYPE_EDITION) ? edition_targets : chapter_targets;
+
+ etags = gst_tag_list_new_empty ();
+
+ for (i = 0; i < targets->len; ++i) {
+ tgt = g_array_index (targets, guint64, i);
+
+ if (tgt == 0)
+ gst_tag_list_insert (etags, tags, GST_TAG_MERGE_APPEND);
+ else {
+ uid = g_strdup_printf ("%" G_GUINT64_FORMAT, tgt);
+ if (g_strcmp0 (gst_toc_entry_get_uid (internal_entry), uid) == 0)
+ gst_tag_list_insert (etags, tags, GST_TAG_MERGE_APPEND);
+ g_free (uid);
+ }
+ }
+
+ gst_toc_entry_merge_tags (entry, etags, GST_TAG_MERGE_APPEND);
+ gst_tag_list_unref (etags);
+
+ cur = gst_toc_entry_get_sub_entries (entry);
+ internal_cur = gst_toc_entry_get_sub_entries (internal_entry);
+ while (cur != NULL && internal_cur != NULL) {
+ gst_matroska_read_common_parse_toc_tag (cur->data, internal_cur->data,
+ edition_targets, chapter_targets, tags);
+ cur = cur->next;
+ internal_cur = internal_cur->next;
+ }
+}
+
+static GstFlowReturn
+gst_matroska_read_common_parse_metadata_targets (GstMatroskaReadCommon * common,
+ GstEbmlRead * ebml, GArray * edition_targets, GArray * chapter_targets,
+ GArray * track_targets, guint64 * target_type_value, gchar ** target_type)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint32 id;
+ guint64 uid;
+ guint64 tmp;
+ gchar *str;
+
+ DEBUG_ELEMENT_START (common, ebml, "TagTargets");
+
+ *target_type_value = 50;
+ *target_type = NULL;
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (common, ebml, "TagTargets", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_TARGETCHAPTERUID:
+ if ((ret = gst_ebml_read_uint (ebml, &id, &uid)) == GST_FLOW_OK)
+ g_array_append_val (chapter_targets, uid);
+ break;
+
+ case GST_MATROSKA_ID_TARGETEDITIONUID:
+ if ((ret = gst_ebml_read_uint (ebml, &id, &uid)) == GST_FLOW_OK)
+ g_array_append_val (edition_targets, uid);
+ break;
+
+ case GST_MATROSKA_ID_TARGETTRACKUID:
+ if ((ret = gst_ebml_read_uint (ebml, &id, &uid)) == GST_FLOW_OK)
+ g_array_append_val (track_targets, uid);
+ break;
+
+ case GST_MATROSKA_ID_TARGETTYPEVALUE:
+ if ((ret = gst_ebml_read_uint (ebml, &id, &tmp)) == GST_FLOW_OK)
+ *target_type_value = tmp;
+ break;
+
+ case GST_MATROSKA_ID_TARGETTYPE:
+ if ((ret = gst_ebml_read_ascii (ebml, &id, &str)) == GST_FLOW_OK) {
+ g_free (*target_type);
+ *target_type = str;
+ }
+ break;
+
+ default:
+ ret =
+ gst_matroska_read_common_parse_skip (common, ebml, "TagTargets",
+ id);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (common, ebml, "TagTargets", ret);
+
+ return ret;
+}
+
+static void
+gst_matroska_read_common_postprocess_toc_entries (GList * toc_entries,
+ guint64 max, const gchar * parent_uid)
+{
+ GstTocEntry *cur_info, *prev_info, *next_info;
+ GList *cur_list, *prev_list, *next_list;
+ gint64 cur_start, prev_start, stop;
+
+ cur_list = toc_entries;
+ while (cur_list != NULL) {
+ cur_info = cur_list->data;
+
+ switch (gst_toc_entry_get_entry_type (cur_info)) {
+ case GST_TOC_ENTRY_TYPE_ANGLE:
+ case GST_TOC_ENTRY_TYPE_VERSION:
+ case GST_TOC_ENTRY_TYPE_EDITION:
+ /* in Matroska terms edition has duration of full track */
+ gst_toc_entry_set_start_stop_times (cur_info, 0, max);
+
+ gst_matroska_read_common_postprocess_toc_entries
+ (gst_toc_entry_get_sub_entries (cur_info), max,
+ gst_toc_entry_get_uid (cur_info));
+ break;
+
+ case GST_TOC_ENTRY_TYPE_TITLE:
+ case GST_TOC_ENTRY_TYPE_TRACK:
+ case GST_TOC_ENTRY_TYPE_CHAPTER:
+ prev_list = cur_list->prev;
+ next_list = cur_list->next;
+
+ if (prev_list != NULL)
+ prev_info = prev_list->data;
+ else
+ prev_info = NULL;
+
+ if (next_list != NULL)
+ next_info = next_list->data;
+ else
+ next_info = NULL;
+
+ /* updated stop time in previous chapter and it's subchapters */
+ if (prev_info != NULL) {
+ gst_toc_entry_get_start_stop_times (prev_info, &prev_start, &stop);
+ gst_toc_entry_get_start_stop_times (cur_info, &cur_start, &stop);
+
+ stop = cur_start;
+ gst_toc_entry_set_start_stop_times (prev_info, prev_start, stop);
+
+ gst_matroska_read_common_postprocess_toc_entries
+ (gst_toc_entry_get_sub_entries (prev_info), cur_start,
+ gst_toc_entry_get_uid (prev_info));
+ }
+
+ /* updated stop time in current chapter and it's subchapters */
+ if (next_info == NULL) {
+ gst_toc_entry_get_start_stop_times (cur_info, &cur_start, &stop);
+
+ if (stop == -1) {
+ stop = max;
+ gst_toc_entry_set_start_stop_times (cur_info, cur_start, stop);
+ }
+
+ gst_matroska_read_common_postprocess_toc_entries
+ (gst_toc_entry_get_sub_entries (cur_info), stop,
+ gst_toc_entry_get_uid (cur_info));
+ }
+ break;
+ case GST_TOC_ENTRY_TYPE_INVALID:
+ break;
+ }
+ cur_list = cur_list->next;
+ }
+}
+
+static GstFlowReturn
+gst_matroska_read_common_parse_chapter_titles (GstMatroskaReadCommon * common,
+ GstEbmlRead * ebml, GstTagList * titles)
+{
+ guint32 id;
+ gchar *title = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ DEBUG_ELEMENT_START (common, ebml, "ChaptersTitles");
+
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (common, ebml, "ChaptersTitles", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_CHAPSTRING:
+ ret = gst_ebml_read_utf8 (ebml, &id, &title);
+ break;
+
+ default:
+ ret =
+ gst_matroska_read_common_parse_skip (common, ebml, "ChaptersTitles",
+ id);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (common, ebml, "ChaptersTitles", ret);
+
+ if (title != NULL && ret == GST_FLOW_OK)
+ gst_tag_list_add (titles, GST_TAG_MERGE_APPEND, GST_TAG_TITLE, title, NULL);
+
+ g_free (title);
+ return ret;
+}
+
+static GstFlowReturn
+gst_matroska_read_common_parse_chapter_element (GstMatroskaReadCommon * common,
+ GstEbmlRead * ebml, GList ** subentries, GList ** internal_subentries)
+{
+ guint32 id;
+ guint64 start_time = -1, stop_time = -1;
+ guint64 is_hidden = 0, is_enabled = 1, uid = 0;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstTocEntry *chapter_info, *internal_chapter_info;
+ GstTagList *tags;
+ gchar *uid_str, *string_uid = NULL;
+ GList *subsubentries = NULL, *internal_subsubentries = NULL, *l, *il;
+
+ DEBUG_ELEMENT_START (common, ebml, "ChaptersElement");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (common, ebml, "ChaptersElement", ret);
+ return ret;
+ }
+
+ tags = gst_tag_list_new_empty ();
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_CHAPTERUID:
+ ret = gst_ebml_read_uint (ebml, &id, &uid);
+ break;
+
+ case GST_MATROSKA_ID_CHAPTERSTRINGUID:
+ ret = gst_ebml_read_utf8 (ebml, &id, &string_uid);
+ break;
+
+ case GST_MATROSKA_ID_CHAPTERTIMESTART:
+ ret = gst_ebml_read_uint (ebml, &id, &start_time);
+ break;
+
+ case GST_MATROSKA_ID_CHAPTERTIMESTOP:
+ ret = gst_ebml_read_uint (ebml, &id, &stop_time);
+ break;
+
+ case GST_MATROSKA_ID_CHAPTERATOM:
+ ret = gst_matroska_read_common_parse_chapter_element (common, ebml,
+ &subsubentries, &internal_subsubentries);
+ break;
+
+ case GST_MATROSKA_ID_CHAPTERDISPLAY:
+ ret =
+ gst_matroska_read_common_parse_chapter_titles (common, ebml, tags);
+ break;
+
+ case GST_MATROSKA_ID_CHAPTERFLAGHIDDEN:
+ ret = gst_ebml_read_uint (ebml, &id, &is_hidden);
+ break;
+
+ case GST_MATROSKA_ID_CHAPTERFLAGENABLED:
+ ret = gst_ebml_read_uint (ebml, &id, &is_enabled);
+ break;
+
+ default:
+ ret =
+ gst_matroska_read_common_parse_skip (common, ebml,
+ "ChaptersElement", id);
+ break;
+ }
+ }
+
+ if (uid == 0)
+ uid = (((guint64) g_random_int ()) << 32) | g_random_int ();
+ uid_str = g_strdup_printf ("%" G_GUINT64_FORMAT, uid);
+ if (string_uid != NULL) {
+ /* init toc with provided String UID */
+ chapter_info = gst_toc_entry_new (GST_TOC_ENTRY_TYPE_CHAPTER, string_uid);
+ g_free (string_uid);
+ } else {
+ /* No String UID provided => use the internal UID instead */
+ chapter_info = gst_toc_entry_new (GST_TOC_ENTRY_TYPE_CHAPTER, uid_str);
+ }
+ /* init internal toc with internal UID */
+ internal_chapter_info = gst_toc_entry_new (GST_TOC_ENTRY_TYPE_CHAPTER,
+ uid_str);
+ g_free (uid_str);
+
+ gst_toc_entry_set_tags (chapter_info, tags);
+ gst_toc_entry_set_start_stop_times (chapter_info, start_time, stop_time);
+
+ for (l = subsubentries, il = internal_subsubentries;
+ l && il; l = l->next, il = il->next) {
+ gst_toc_entry_append_sub_entry (chapter_info, l->data);
+ gst_toc_entry_append_sub_entry (internal_chapter_info, il->data);
+ }
+ g_list_free (subsubentries);
+ g_list_free (internal_subsubentries);
+
+ DEBUG_ELEMENT_STOP (common, ebml, "ChaptersElement", ret);
+
+ /* start time is mandatory and has no default value,
+ * so we should skip chapters without it */
+ if (is_hidden == 0 && is_enabled > 0 &&
+ start_time != -1 && ret == GST_FLOW_OK) {
+ *subentries = g_list_append (*subentries, chapter_info);
+ *internal_subentries = g_list_append (*internal_subentries,
+ internal_chapter_info);
+ } else {
+ gst_toc_entry_unref (chapter_info);
+ gst_toc_entry_unref (internal_chapter_info);
+ }
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_matroska_read_common_parse_chapter_edition (GstMatroskaReadCommon * common,
+ GstEbmlRead * ebml, GstToc * toc, GstToc * internal_toc)
+{
+ guint32 id;
+ guint64 is_hidden = 0, uid = 0;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstTocEntry *edition_info, *internal_edition_info;
+ GList *subentries = NULL, *internal_subentries = NULL, *l, *il;
+ gchar *uid_str;
+
+ DEBUG_ELEMENT_START (common, ebml, "ChaptersEdition");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (common, ebml, "ChaptersEdition", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_EDITIONUID:
+ ret = gst_ebml_read_uint (ebml, &id, &uid);
+ break;
+
+ case GST_MATROSKA_ID_CHAPTERATOM:
+ ret = gst_matroska_read_common_parse_chapter_element (common, ebml,
+ &subentries, &internal_subentries);
+ break;
+
+ case GST_MATROSKA_ID_EDITIONFLAGHIDDEN:
+ ret = gst_ebml_read_uint (ebml, &id, &is_hidden);
+ break;
+
+ default:
+ ret =
+ gst_matroska_read_common_parse_skip (common, ebml,
+ "ChaptersEdition", id);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (common, ebml, "ChaptersEdition", ret);
+
+ if (uid == 0)
+ uid = (((guint64) g_random_int ()) << 32) | g_random_int ();
+ uid_str = g_strdup_printf ("%" G_GUINT64_FORMAT, uid);
+ edition_info = gst_toc_entry_new (GST_TOC_ENTRY_TYPE_EDITION, uid_str);
+ gst_toc_entry_set_start_stop_times (edition_info, -1, -1);
+ internal_edition_info = gst_toc_entry_new (GST_TOC_ENTRY_TYPE_EDITION,
+ uid_str);
+ g_free (uid_str);
+
+ for (l = subentries, il = internal_subentries; l && il;
+ l = l->next, il = il->next) {
+ gst_toc_entry_append_sub_entry (edition_info, l->data);
+ gst_toc_entry_append_sub_entry (internal_edition_info, il->data);
+ }
+ g_list_free (subentries);
+ g_list_free (internal_subentries);
+
+ if (is_hidden == 0 && subentries != NULL && ret == GST_FLOW_OK) {
+ gst_toc_append_entry (toc, edition_info);
+ gst_toc_append_entry (internal_toc, internal_edition_info);
+ } else {
+ GST_DEBUG_OBJECT (common->sinkpad,
+ "Skipping empty or hidden edition in the chapters TOC");
+ gst_toc_entry_unref (edition_info);
+ gst_toc_entry_unref (internal_edition_info);
+ }
+
+ return ret;
+}
+
+GstFlowReturn
+gst_matroska_read_common_parse_chapters (GstMatroskaReadCommon * common,
+ GstEbmlRead * ebml)
+{
+ guint32 id;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstToc *toc, *internal_toc;
+
+ DEBUG_ELEMENT_START (common, ebml, "Chapters");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (common, ebml, "Chapters", ret);
+ return ret;
+ }
+
+ /* FIXME: create CURRENT toc as well */
+ toc = gst_toc_new (GST_TOC_SCOPE_GLOBAL);
+ internal_toc = gst_toc_new (GST_TOC_SCOPE_GLOBAL);
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_EDITIONENTRY:
+ ret = gst_matroska_read_common_parse_chapter_edition (common, ebml,
+ toc, internal_toc);
+ break;
+
+ default:
+ ret =
+ gst_matroska_read_common_parse_skip (common, ebml, "Chapters", id);
+ break;
+ }
+ }
+
+ if (gst_toc_get_entries (toc) != NULL) {
+ gst_matroska_read_common_postprocess_toc_entries (gst_toc_get_entries (toc),
+ common->segment.duration, "");
+ /* no need to postprocess internal_toc as we don't need to keep track
+ * of start / end and tags (only UIDs) */
+
+ common->toc = toc;
+ common->internal_toc = internal_toc;
+ } else {
+ gst_toc_unref (toc);
+ gst_toc_unref (internal_toc);
+ }
+
+ common->chapters_parsed = TRUE;
+
+ DEBUG_ELEMENT_STOP (common, ebml, "Chapters", ret);
+ return ret;
+}
+
+GstFlowReturn
+gst_matroska_read_common_parse_header (GstMatroskaReadCommon * common,
+ GstEbmlRead * ebml)
+{
+ GstFlowReturn ret;
+ gchar *doctype;
+ guint version;
+ guint32 id;
+
+ /* this function is the first to be called */
+
+ /* default init */
+ doctype = NULL;
+ version = 1;
+
+ ret = gst_ebml_peek_id (ebml, &id);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ GST_DEBUG_OBJECT (common->sinkpad, "id: %08x", id);
+
+ if (id != GST_EBML_ID_HEADER) {
+ GST_ERROR_OBJECT (common->sinkpad, "Failed to read header");
+ goto exit;
+ }
+
+ ret = gst_ebml_read_master (ebml, &id);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ while (gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ ret = gst_ebml_peek_id (ebml, &id);
+ if (ret != GST_FLOW_OK)
+ goto exit_error;
+
+ switch (id) {
+ /* is our read version up-to-date? */
+ case GST_EBML_ID_EBMLREADVERSION:{
+ guint64 num;
+
+ ret = gst_ebml_read_uint (ebml, &id, &num);
+ if (ret != GST_FLOW_OK)
+ goto exit_error;
+ if (num != GST_EBML_VERSION) {
+ GST_ERROR_OBJECT (common->sinkpad,
+ "Unsupported EBML version %" G_GUINT64_FORMAT, num);
+ goto exit_error;
+ }
+
+ GST_DEBUG_OBJECT (common->sinkpad,
+ "EbmlReadVersion: %" G_GUINT64_FORMAT, num);
+ break;
+ }
+
+ /* we only handle 8 byte lengths at max */
+ case GST_EBML_ID_EBMLMAXSIZELENGTH:{
+ guint64 num;
+
+ ret = gst_ebml_read_uint (ebml, &id, &num);
+ if (ret != GST_FLOW_OK)
+ goto exit_error;
+ if (num > sizeof (guint64)) {
+ GST_ERROR_OBJECT (common->sinkpad,
+ "Unsupported EBML maximum size %" G_GUINT64_FORMAT, num);
+ return GST_FLOW_ERROR;
+ }
+ GST_DEBUG_OBJECT (common->sinkpad,
+ "EbmlMaxSizeLength: %" G_GUINT64_FORMAT, num);
+ break;
+ }
+
+ /* we handle 4 byte IDs at max */
+ case GST_EBML_ID_EBMLMAXIDLENGTH:{
+ guint64 num;
+
+ ret = gst_ebml_read_uint (ebml, &id, &num);
+ if (ret != GST_FLOW_OK)
+ goto exit_error;
+ if (num > sizeof (guint32)) {
+ GST_ERROR_OBJECT (common->sinkpad,
+ "Unsupported EBML maximum ID %" G_GUINT64_FORMAT, num);
+ return GST_FLOW_ERROR;
+ }
+ GST_DEBUG_OBJECT (common->sinkpad,
+ "EbmlMaxIdLength: %" G_GUINT64_FORMAT, num);
+ break;
+ }
+
+ case GST_EBML_ID_DOCTYPE:{
+ gchar *text;
+
+ ret = gst_ebml_read_ascii (ebml, &id, &text);
+ if (ret != GST_FLOW_OK)
+ goto exit_error;
+
+ GST_DEBUG_OBJECT (common->sinkpad, "EbmlDocType: %s",
+ GST_STR_NULL (text));
+
+ g_free (doctype);
+ doctype = text;
+ break;
+ }
+
+ case GST_EBML_ID_DOCTYPEREADVERSION:{
+ guint64 num;
+
+ ret = gst_ebml_read_uint (ebml, &id, &num);
+ if (ret != GST_FLOW_OK)
+ goto exit_error;
+ version = num;
+ GST_DEBUG_OBJECT (common->sinkpad,
+ "EbmlReadVersion: %" G_GUINT64_FORMAT, num);
+ break;
+ }
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (common, ebml,
+ "EBML header", id);
+ if (ret != GST_FLOW_OK)
+ goto exit_error;
+ break;
+
+ /* we ignore these two, as they don't tell us anything we care about */
+ case GST_EBML_ID_EBMLVERSION:
+ case GST_EBML_ID_DOCTYPEVERSION:
+ ret = gst_ebml_read_skip (ebml);
+ if (ret != GST_FLOW_OK)
+ goto exit_error;
+ break;
+ }
+ }
+
+exit:
+
+ if ((doctype != NULL && !strcmp (doctype, GST_MATROSKA_DOCTYPE_MATROSKA)) ||
+ (doctype != NULL && !strcmp (doctype, GST_MATROSKA_DOCTYPE_WEBM)) ||
+ (doctype == NULL)) {
+ if (version <= 2) {
+ if (doctype) {
+ GST_INFO_OBJECT (common->sinkpad, "Input is %s version %d", doctype,
+ version);
+ if (!strcmp (doctype, GST_MATROSKA_DOCTYPE_WEBM))
+ common->is_webm = TRUE;
+ } else {
+ GST_WARNING_OBJECT (common->sinkpad,
+ "Input is EBML without doctype, assuming " "matroska (version %d)",
+ version);
+ }
+ ret = GST_FLOW_OK;
+ } else {
+ GST_ELEMENT_ERROR (common, STREAM, DEMUX, (NULL),
+ ("Demuxer version (2) is too old to read %s version %d",
+ GST_STR_NULL (doctype), version));
+ ret = GST_FLOW_ERROR;
+ }
+ } else {
+ GST_ELEMENT_ERROR (common, STREAM, WRONG_TYPE, (NULL),
+ ("Input is not a matroska stream (doctype=%s)", doctype));
+ ret = GST_FLOW_ERROR;
+ }
+
+exit_error:
+
+ g_free (doctype);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_matroska_read_common_parse_index_cuetrack (GstMatroskaReadCommon * common,
+ GstEbmlRead * ebml, guint * nentries)
+{
+ guint32 id;
+ GstFlowReturn ret;
+ GstMatroskaIndex idx;
+
+ idx.pos = (guint64) - 1;
+ idx.track = 0;
+ idx.time = GST_CLOCK_TIME_NONE;
+ idx.block = 1;
+
+ DEBUG_ELEMENT_START (common, ebml, "CueTrackPositions");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (common, ebml, "CueTrackPositions", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ /* track number */
+ case GST_MATROSKA_ID_CUETRACK:
+ {
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ idx.track = 0;
+ GST_WARNING_OBJECT (common->sinkpad, "Invalid CueTrack 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (common->sinkpad, "CueTrack: %" G_GUINT64_FORMAT, num);
+ idx.track = num;
+ break;
+ }
+
+ /* position in file */
+ case GST_MATROSKA_ID_CUECLUSTERPOSITION:
+ {
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num > G_MAXINT64) {
+ GST_WARNING_OBJECT (common->sinkpad,
+ "CueClusterPosition %" G_GUINT64_FORMAT " too large", num);
+ break;
+ }
+
+ idx.pos = num;
+ break;
+ }
+
+ /* number of block in the cluster */
+ case GST_MATROSKA_ID_CUEBLOCKNUMBER:
+ {
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num == 0) {
+ GST_WARNING_OBJECT (common->sinkpad, "Invalid CueBlockNumber 0");
+ break;
+ }
+
+ GST_DEBUG_OBJECT (common->sinkpad, "CueBlockNumber: %" G_GUINT64_FORMAT,
+ num);
+ idx.block = num;
+
+ /* mild sanity check, disregard strange cases ... */
+ if (idx.block > G_MAXUINT16) {
+ GST_DEBUG_OBJECT (common->sinkpad, "... looks suspicious, ignoring");
+ idx.block = 1;
+ }
+ break;
+ }
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (common, ebml,
+ "CueTrackPositions", id);
+ break;
+
+ case GST_MATROSKA_ID_CUECODECSTATE:
+ case GST_MATROSKA_ID_CUEREFERENCE:
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (common, ebml, "CueTrackPositions", ret);
+
+ /* (e.g.) lavf typically creates entries without a block number,
+ * which is bogus and leads to contradictory information */
+ if (common->index->len) {
+ GstMatroskaIndex *last_idx;
+
+ last_idx = &g_array_index (common->index, GstMatroskaIndex,
+ common->index->len - 1);
+ if (last_idx->block == idx.block && last_idx->pos == idx.pos &&
+ last_idx->track == idx.track && idx.time > last_idx->time) {
+ GST_DEBUG_OBJECT (common->sinkpad, "Cue entry refers to same location, "
+ "but has different time than previous entry; discarding");
+ idx.track = 0;
+ }
+ }
+
+ if ((ret == GST_FLOW_OK || ret == GST_FLOW_EOS)
+ && idx.pos != (guint64) - 1 && idx.track > 0) {
+ g_array_append_val (common->index, idx);
+ (*nentries)++;
+ } else if (ret == GST_FLOW_OK || ret == GST_FLOW_EOS) {
+ GST_DEBUG_OBJECT (common->sinkpad,
+ "CueTrackPositions without valid content");
+ }
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_matroska_read_common_parse_index_pointentry (GstMatroskaReadCommon *
+ common, GstEbmlRead * ebml)
+{
+ guint32 id;
+ GstFlowReturn ret;
+ GstClockTime time = GST_CLOCK_TIME_NONE;
+ guint nentries = 0;
+
+ DEBUG_ELEMENT_START (common, ebml, "CuePoint");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (common, ebml, "CuePoint", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ /* one single index entry ('point') */
+ case GST_MATROSKA_ID_CUETIME:
+ {
+ if ((ret = gst_ebml_read_uint (ebml, &id, &time)) != GST_FLOW_OK)
+ break;
+
+ GST_DEBUG_OBJECT (common->sinkpad, "CueTime: %" G_GUINT64_FORMAT, time);
+ time = time * common->time_scale;
+ break;
+ }
+
+ /* position in the file + track to which it belongs */
+ case GST_MATROSKA_ID_CUETRACKPOSITIONS:
+ {
+ ret = gst_matroska_read_common_parse_index_cuetrack (common, ebml,
+ &nentries);
+ break;
+ }
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (common, ebml, "CuePoint",
+ id);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (common, ebml, "CuePoint", ret);
+
+ if (nentries > 0) {
+ if (time == GST_CLOCK_TIME_NONE) {
+ GST_WARNING_OBJECT (common->sinkpad, "CuePoint without valid time");
+ g_array_remove_range (common->index, common->index->len - nentries,
+ nentries);
+ } else {
+ gint i;
+
+ for (i = common->index->len - nentries; i < common->index->len; i++) {
+ GstMatroskaIndex *idx =
+ &g_array_index (common->index, GstMatroskaIndex, i);
+
+ idx->time = time;
+ GST_DEBUG_OBJECT (common->sinkpad, "Index entry: pos=%" G_GUINT64_FORMAT
+ ", time=%" GST_TIME_FORMAT ", track=%u, block=%u", idx->pos,
+ GST_TIME_ARGS (idx->time), (guint) idx->track, (guint) idx->block);
+ }
+ }
+ } else {
+ GST_DEBUG_OBJECT (common->sinkpad, "Empty CuePoint");
+ }
+
+ return ret;
+}
+
+gint
+gst_matroska_read_common_stream_from_num (GstMatroskaReadCommon * common,
+ guint track_num)
+{
+ guint n;
+
+ g_assert (common->src->len == common->num_streams);
+ for (n = 0; n < common->src->len; n++) {
+ GstMatroskaTrackContext *context = g_ptr_array_index (common->src, n);
+
+ if (context->num == track_num) {
+ return n;
+ }
+ }
+
+ if (n == common->num_streams)
+ GST_WARNING_OBJECT (common->sinkpad,
+ "Failed to find corresponding pad for tracknum %d", track_num);
+
+ return -1;
+}
+
+GstFlowReturn
+gst_matroska_read_common_parse_index (GstMatroskaReadCommon * common,
+ GstEbmlRead * ebml)
+{
+ guint32 id;
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint i;
+
+ if (common->index)
+ g_array_free (common->index, TRUE);
+ common->index =
+ g_array_sized_new (FALSE, FALSE, sizeof (GstMatroskaIndex), 128);
+
+ DEBUG_ELEMENT_START (common, ebml, "Cues");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (common, ebml, "Cues", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ /* one single index entry ('point') */
+ case GST_MATROSKA_ID_POINTENTRY:
+ ret = gst_matroska_read_common_parse_index_pointentry (common, ebml);
+ break;
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (common, ebml, "Cues", id);
+ break;
+ }
+ }
+ DEBUG_ELEMENT_STOP (common, ebml, "Cues", ret);
+
+ /* Sort index by time, smallest time first, for easier searching */
+ g_array_sort (common->index, (GCompareFunc) gst_matroska_index_compare);
+
+ /* Now sort the track specific index entries into their own arrays */
+ for (i = 0; i < common->index->len; i++) {
+ GstMatroskaIndex *idx = &g_array_index (common->index, GstMatroskaIndex,
+ i);
+ gint track_num;
+ GstMatroskaTrackContext *ctx;
+
+#if 0
+ if (common->element_index) {
+ gint writer_id;
+
+ if (idx->track != 0 &&
+ (track_num =
+ gst_matroska_read_common_stream_from_num (common,
+ idx->track)) != -1) {
+ ctx = g_ptr_array_index (common->src, track_num);
+
+ if (ctx->index_writer_id == -1)
+ gst_index_get_writer_id (common->element_index,
+ GST_OBJECT (ctx->pad), &ctx->index_writer_id);
+ writer_id = ctx->index_writer_id;
+ } else {
+ if (common->element_index_writer_id == -1)
+ gst_index_get_writer_id (common->element_index, GST_OBJECT (common),
+ &common->element_index_writer_id);
+ writer_id = common->element_index_writer_id;
+ }
+
+ GST_LOG_OBJECT (common->sinkpad,
+ "adding association %" GST_TIME_FORMAT "-> %" G_GUINT64_FORMAT
+ " for writer id %d", GST_TIME_ARGS (idx->time), idx->pos, writer_id);
+ gst_index_add_association (common->element_index, writer_id,
+ GST_ASSOCIATION_FLAG_KEY_UNIT, GST_FORMAT_TIME, idx->time,
+ GST_FORMAT_BYTES, idx->pos + common->ebml_segment_start, NULL);
+ }
+#endif
+
+ if (idx->track == 0)
+ continue;
+
+ track_num = gst_matroska_read_common_stream_from_num (common, idx->track);
+ if (track_num == -1)
+ continue;
+
+ ctx = g_ptr_array_index (common->src, track_num);
+
+ if (ctx->index_table == NULL)
+ ctx->index_table =
+ g_array_sized_new (FALSE, FALSE, sizeof (GstMatroskaIndex), 128);
+
+ g_array_append_vals (ctx->index_table, idx, 1);
+ }
+
+ common->index_parsed = TRUE;
+
+ /* sanity check; empty index normalizes to no index */
+ if (common->index->len == 0) {
+ g_array_free (common->index, TRUE);
+ common->index = NULL;
+ }
+
+ return ret;
+}
+
+GstFlowReturn
+gst_matroska_read_common_parse_info (GstMatroskaReadCommon * common,
+ GstElement * el, GstEbmlRead * ebml)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ gdouble dur_f = -1.0;
+ guint32 id;
+
+ DEBUG_ELEMENT_START (common, ebml, "SegmentInfo");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (common, ebml, "SegmentInfo", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ /* cluster timecode */
+ case GST_MATROSKA_ID_TIMECODESCALE:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+
+ GST_DEBUG_OBJECT (common->sinkpad, "TimeCodeScale: %" G_GUINT64_FORMAT,
+ num);
+ common->time_scale = num;
+ break;
+ }
+
+ case GST_MATROSKA_ID_DURATION:{
+ if ((ret = gst_ebml_read_float (ebml, &id, &dur_f)) != GST_FLOW_OK)
+ break;
+
+ if (dur_f <= 0.0) {
+ GST_WARNING_OBJECT (common->sinkpad, "Invalid duration %lf", dur_f);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (common->sinkpad, "Duration: %lf", dur_f);
+ break;
+ }
+
+ case GST_MATROSKA_ID_WRITINGAPP:{
+ gchar *text;
+
+ if ((ret = gst_ebml_read_utf8 (ebml, &id, &text)) != GST_FLOW_OK)
+ break;
+
+ GST_DEBUG_OBJECT (common->sinkpad, "WritingApp: %s",
+ GST_STR_NULL (text));
+ common->writing_app = text;
+ break;
+ }
+
+ case GST_MATROSKA_ID_MUXINGAPP:{
+ gchar *text;
+
+ if ((ret = gst_ebml_read_utf8 (ebml, &id, &text)) != GST_FLOW_OK)
+ break;
+
+ GST_DEBUG_OBJECT (common->sinkpad, "MuxingApp: %s",
+ GST_STR_NULL (text));
+ common->muxing_app = text;
+ break;
+ }
+
+ case GST_MATROSKA_ID_DATEUTC:{
+ gint64 time;
+ GstDateTime *datetime;
+ GstTagList *taglist;
+
+ if ((ret = gst_ebml_read_date (ebml, &id, &time)) != GST_FLOW_OK)
+ break;
+
+ GST_DEBUG_OBJECT (common->sinkpad, "DateUTC: %" G_GINT64_FORMAT, time);
+ common->created = time;
+ datetime =
+ gst_date_time_new_from_unix_epoch_utc_usecs (time / GST_USECOND);
+ taglist = gst_tag_list_new (GST_TAG_DATE_TIME, datetime, NULL);
+ gst_tag_list_set_scope (taglist, GST_TAG_SCOPE_GLOBAL);
+ gst_matroska_read_common_found_global_tag (common, el, taglist);
+ gst_date_time_unref (datetime);
+ break;
+ }
+
+ case GST_MATROSKA_ID_TITLE:{
+ gchar *text;
+ GstTagList *taglist;
+
+ if ((ret = gst_ebml_read_utf8 (ebml, &id, &text)) != GST_FLOW_OK)
+ break;
+
+ GST_DEBUG_OBJECT (common->sinkpad, "Title: %s", GST_STR_NULL (text));
+ taglist = gst_tag_list_new (GST_TAG_TITLE, text, NULL);
+ gst_tag_list_set_scope (taglist, GST_TAG_SCOPE_GLOBAL);
+ gst_matroska_read_common_found_global_tag (common, el, taglist);
+ g_free (text);
+ break;
+ }
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (common, ebml,
+ "SegmentInfo", id);
+ break;
+
+ /* fall through */
+ case GST_MATROSKA_ID_SEGMENTUID:
+ case GST_MATROSKA_ID_SEGMENTFILENAME:
+ case GST_MATROSKA_ID_PREVUID:
+ case GST_MATROSKA_ID_PREVFILENAME:
+ case GST_MATROSKA_ID_NEXTUID:
+ case GST_MATROSKA_ID_NEXTFILENAME:
+ case GST_MATROSKA_ID_SEGMENTFAMILY:
+ case GST_MATROSKA_ID_CHAPTERTRANSLATE:
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+
+ if (dur_f > 0.0) {
+ GstClockTime dur_u;
+
+ dur_u = gst_gdouble_to_guint64 (dur_f *
+ gst_guint64_to_gdouble (common->time_scale));
+ if (GST_CLOCK_TIME_IS_VALID (dur_u) && dur_u <= G_MAXINT64)
+ common->segment.duration = dur_u;
+ }
+
+ DEBUG_ELEMENT_STOP (common, ebml, "SegmentInfo", ret);
+
+ common->segmentinfo_parsed = TRUE;
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_matroska_read_common_parse_metadata_id_simple_tag (GstMatroskaReadCommon *
+ common, GstEbmlRead * ebml, GstTagList ** p_taglist, gchar * parent)
+{
+ /* FIXME: check if there are more useful mappings */
+ static const struct
+ {
+ const gchar *matroska_tagname;
+ const gchar *gstreamer_tagname;
+ }
+
+ /* *INDENT-OFF* */
+ tag_conv[] = {
+ {
+ /* The following list has the _same_ order as the one in Matroska spec. Please, don't mess it up. */
+ /* TODO: Nesting information:
+ ORIGINAL A special tag that is meant to have other tags inside (using nested tags) to describe the original work of art that this item is based on. All tags in this list can be used "under" the ORIGINAL tag like LYRICIST, PERFORMER, etc.
+ SAMPLE A tag that contains other tags to describe a sample used in the targeted item taken from another work of art. All tags in this list can be used "under" the SAMPLE tag like TITLE, ARTIST, DATE_RELEASED, etc.
+ COUNTRY The name of the country (biblio ISO-639-2) that is meant to have other tags inside (using nested tags) to country specific information about the item. All tags in this list can be used "under" the COUNTRY_SPECIFIC tag like LABEL, PUBLISH_RATING, etc.
+ */
+
+ /* Organizational Information */
+ GST_MATROSKA_TAG_ID_TOTAL_PARTS, GST_TAG_TRACK_COUNT}, {
+ GST_MATROSKA_TAG_ID_PART_NUMBER, GST_TAG_TRACK_NUMBER}, {
+ /* TODO: PART_OFFSET A number to add to PART_NUMBER when the parts at that level don't start at 1. (e.g. if TargetType is TRACK, the track number of the second audio CD) */
+
+ /* Titles */
+ GST_MATROSKA_TAG_ID_SUBTITLE, GST_TAG_TITLE}, { /* Sub Title of the entity. Since we're concat'ing all title-like entities anyway, might as well add the sub-title. */
+ GST_MATROSKA_TAG_ID_TITLE, GST_TAG_TITLE}, {
+ GST_MATROSKA_TAG_ID_ALBUM, GST_TAG_ALBUM}, { /* Matroska spec does NOT have this tag! Dunno what it was doing here, probably for compatibility. */
+
+ /* TODO: Nested Information:
+ URL URL corresponding to the tag it's included in.
+ SORT_WITH A child element to indicate what alternative value the parent tag can have to be sorted, for example "Pet Shop Boys" instead of "The Pet Shop Boys". Or "Marley Bob" and "Marley Ziggy" (no comma needed).
+ INSTRUMENTS The instruments that are being used/played, separated by a comma. It should be a child of the following tags: ARTIST, LEAD_PERFORMER or ACCOMPANIMENT.
+ EMAIL Email corresponding to the tag it's included in.
+ ADDRESS The physical address of the entity. The address should include a country code. It can be useful for a recording label.
+ FAX The fax number corresponding to the tag it's included in. It can be useful for a recording label.
+ PHONE The phone number corresponding to the tag it's included in. It can be useful for a recording label.
+ */
+
+ /* Entities */
+ GST_MATROSKA_TAG_ID_ARTIST, GST_TAG_ARTIST}, {
+ GST_MATROSKA_TAG_ID_LEAD_PERFORMER, GST_TAG_PERFORMER}, {
+ GST_MATROSKA_TAG_ID_ACCOMPANIMENT, GST_TAG_PERFORMER}, { /* Band/orchestra/accompaniment/musician. This is akin to the TPE2 tag in ID3. */
+ GST_MATROSKA_TAG_ID_COMPOSER, GST_TAG_COMPOSER}, {
+ /* ARRANGER The person who arranged the piece, e.g., Ravel. */
+ GST_MATROSKA_TAG_ID_LYRICS, GST_TAG_LYRICS}, { /* The lyrics corresponding to a song (in case audio synchronization is not known or as a doublon to a subtitle track). Editing this value when subtitles are found should also result in editing the subtitle track for more consistency. */
+ /* LYRICIST The person who wrote the lyrics for a musical item. This is akin to the TEXT tag in ID3. */
+ GST_MATROSKA_TAG_ID_CONDUCTOR, GST_TAG_PERFORMER}, { /* Conductor/performer refinement. This is akin to the TPE3 tag in ID3. */
+ /* DIRECTOR This is akin to the IART tag in RIFF. */
+ GST_MATROSKA_TAG_ID_AUTHOR, GST_TAG_ARTIST}, {
+ /* ASSISTANT_DIRECTOR The name of the assistant director. */
+ /* DIRECTOR_OF_PHOTOGRAPHY The name of the director of photography, also known as cinematographer. This is akin to the ICNM tag in Extended RIFF. */
+ /* SOUND_ENGINEER The name of the sound engineer or sound recordist. */
+ /* ART_DIRECTOR The person who oversees the artists and craftspeople who build the sets. */
+ /* PRODUCTION_DESIGNER Artist responsible for designing the overall visual appearance of a movie. */
+ /* CHOREGRAPHER The name of the choregrapher */
+ /* COSTUME_DESIGNER The name of the costume designer */
+ /* ACTOR An actor or actress playing a role in this movie. This is the person's real name, not the character's name the person is playing. */
+ /* CHARACTER The name of the character an actor or actress plays in this movie. This should be a sub-tag of an ACTOR tag in order not to cause ambiguities. */
+ /* WRITTEN_BY The author of the story or script (used for movies and TV shows). */
+ /* SCREENPLAY_BY The author of the screenplay or scenario (used for movies and TV shows). */
+ /* EDITED_BY This is akin to the IEDT tag in Extended RIFF. */
+ /* PRODUCER Produced by. This is akin to the IPRO tag in Extended RIFF. */
+ /* COPRODUCER The name of a co-producer. */
+ /* EXECUTIVE_PRODUCER The name of an executive producer. */
+ /* DISTRIBUTED_BY This is akin to the IDST tag in Extended RIFF. */
+ /* MASTERED_BY The engineer who mastered the content for a physical medium or for digital distribution. */
+ GST_MATROSKA_TAG_ID_ENCODED_BY, GST_TAG_ENCODED_BY}, { /* This is akin to the TENC tag in ID3. */
+ /* MIXED_BY DJ mix by the artist specified */
+ /* REMIXED_BY Interpreted, remixed, or otherwise modified by. This is akin to the TPE4 tag in ID3. */
+ /* PRODUCTION_STUDIO This is akin to the ISTD tag in Extended RIFF. */
+ /* THANKS_TO A very general tag for everyone else that wants to be listed. */
+ /* PUBLISHER This is akin to the TPUB tag in ID3. */
+ /* LABEL The record label or imprint on the disc. */
+ /* Search / Classification */
+ GST_MATROSKA_TAG_ID_GENRE, GST_TAG_GENRE}, {
+ /* MOOD Intended to reflect the mood of the item with a few keywords, e.g. "Romantic", "Sad" or "Uplifting". The format follows that of the TMOO tag in ID3. */
+ /* ORIGINAL_MEDIA_TYPE Describes the original type of the media, such as, "DVD", "CD", "computer image," "drawing," "lithograph," and so forth. This is akin to the TMED tag in ID3. */
+ /* CONTENT_TYPE The type of the item. e.g. Documentary, Feature Film, Cartoon, Music Video, Music, Sound FX, ... */
+ /* SUBJECT Describes the topic of the file, such as "Aerial view of Seattle." */
+ GST_MATROSKA_TAG_ID_DESCRIPTION, GST_TAG_DESCRIPTION}, { /* A short description of the content, such as "Two birds flying." */
+ GST_MATROSKA_TAG_ID_KEYWORDS, GST_TAG_KEYWORDS}, { /* Keywords to the item separated by a comma, used for searching. */
+ /* SUMMARY A plot outline or a summary of the story. */
+ /* SYNOPSIS A description of the story line of the item. */
+ /* INITIAL_KEY The initial key that a musical track starts in. The format is identical to ID3. */
+ /* PERIOD Describes the period that the piece is from or about. For example, "Renaissance". */
+ /* LAW_RATING Depending on the country it's the format of the rating of a movie (P, R, X in the USA, an age in other countries or a URI defining a logo). */
+ /* ICRA The ICRA content rating for parental control. (Previously RSACi) */
+
+ /* Temporal Information */
+ GST_MATROSKA_TAG_ID_DATE_RELEASED, GST_TAG_DATE}, { /* The time that the item was originally released. This is akin to the TDRL tag in ID3. */
+ GST_MATROSKA_TAG_ID_DATE_RECORDED, GST_TAG_DATE}, { /* The time that the recording began. This is akin to the TDRC tag in ID3. */
+ GST_MATROSKA_TAG_ID_DATE_ENCODED, GST_TAG_DATE}, { /* The time that the encoding of this item was completed began. This is akin to the TDEN tag in ID3. */
+ GST_MATROSKA_TAG_ID_DATE_TAGGED, GST_TAG_DATE}, { /* The time that the tags were done for this item. This is akin to the TDTG tag in ID3. */
+ GST_MATROSKA_TAG_ID_DATE_DIGITIZED, GST_TAG_DATE}, { /* The time that the item was transferred to a digital medium. This is akin to the IDIT tag in RIFF. */
+ GST_MATROSKA_TAG_ID_DATE_WRITTEN, GST_TAG_DATE}, { /* The time that the writing of the music/script began. */
+ GST_MATROSKA_TAG_ID_DATE_PURCHASED, GST_TAG_DATE}, { /* Information on when the file was purchased (see also purchase tags). */
+ GST_MATROSKA_TAG_ID_DATE, GST_TAG_DATE}, { /* Matroska spec does NOT have this tag! Dunno what it was doing here, probably for compatibility. */
+
+ /* Spacial Information */
+ GST_MATROSKA_TAG_ID_RECORDING_LOCATION, GST_TAG_GEO_LOCATION_NAME}, { /* The location where the item was recorded. The countries corresponding to the string, same 2 octets as in Internet domains, or possibly ISO-3166. This code is followed by a comma, then more detailed information such as state/province, another comma, and then city. For example, "US, Texas, Austin". This will allow for easy sorting. It is okay to only store the country, or the country and the state/province. More detailed information can be added after the city through the use of additional commas. In cases where the province/state is unknown, but you want to store the city, simply leave a space between the two commas. For example, "US, , Austin". */
+ /* COMPOSITION_LOCATION Location that the item was originally designed/written. The countries corresponding to the string, same 2 octets as in Internet domains, or possibly ISO-3166. This code is followed by a comma, then more detailed information such as state/province, another comma, and then city. For example, "US, Texas, Austin". This will allow for easy sorting. It is okay to only store the country, or the country and the state/province. More detailed information can be added after the city through the use of additional commas. In cases where the province/state is unknown, but you want to store the city, simply leave a space between the two commas. For example, "US, , Austin". */
+ /* COMPOSER_NATIONALITY Nationality of the main composer of the item, mostly for classical music. The countries corresponding to the string, same 2 octets as in Internet domains, or possibly ISO-3166. */
+
+ /* Personal */
+ GST_MATROSKA_TAG_ID_COMMENT, GST_TAG_COMMENT}, { /* Any comment related to the content. */
+ GST_MATROSKA_TAG_ID_COMMENTS, GST_TAG_COMMENT}, { /* Matroska spec does NOT have this tag! Dunno what it was doing here, probably for compatibility. */
+ /* PLAY_COUNTER The number of time the item has been played. */
+ /* TODO: RATING A numeric value defining how much a person likes the song/movie. The number is between 0 and 5 with decimal values possible (e.g. 2.7), 5(.0) being the highest possible rating. Other rating systems with different ranges will have to be scaled. */
+
+ /* Technical Information */
+ GST_MATROSKA_TAG_ID_ENCODER, GST_TAG_ENCODER}, {
+ /* ENCODER_SETTINGS A list of the settings used for encoding this item. No specific format. */
+ GST_MATROSKA_TAG_ID_BPS, GST_TAG_BITRATE}, {
+ GST_MATROSKA_TAG_ID_BITSPS, GST_TAG_BITRATE}, { /* Matroska spec does NOT have this tag! Dunno what it was doing here, probably for compatibility. */
+ /* WONTFIX (already handled in another way): FPS The average frames per second of the specified item. This is typically the average number of Blocks per second. In the event that lacing is used, each laced chunk is to be counted as a separate frame. */
+ GST_MATROSKA_TAG_ID_BPM, GST_TAG_BEATS_PER_MINUTE}, {
+ /* MEASURE In music, a measure is a unit of time in Western music like "4/4". It represents a regular grouping of beats, a meter, as indicated in musical notation by the time signature.. The majority of the contemporary rock and pop music you hear on the radio these days is written in the 4/4 time signature. */
+ /* TUNING It is saved as a frequency in hertz to allow near-perfect tuning of instruments to the same tone as the musical piece (e.g. "441.34" in Hertz). The default value is 440.0 Hz. */
+ /* TODO: REPLAYGAIN_GAIN The gain to apply to reach 89dB SPL on playback. This is based on the Replay Gain standard. Note that ReplayGain information can be found at all TargetType levels (track, album, etc). */
+ /* TODO: REPLAYGAIN_PEAK The maximum absolute peak value of the item. This is based on the Replay Gain standard. */
+
+ /* Identifiers */
+ GST_MATROSKA_TAG_ID_ISRC, GST_TAG_ISRC}, {
+ /* MCDI This is a binary dump of the TOC of the CDROM that this item was taken from. This holds the same information as the MCDI in ID3. */
+ /* ISBN International Standard Book Number */
+ /* BARCODE EAN-13 (European Article Numbering) or UPC-A (Universal Product Code) bar code identifier */
+ /* CATALOG_NUMBER A label-specific string used to identify the release (TIC 01 for example). */
+ /* LABEL_CODE A 4-digit or 5-digit number to identify the record label, typically printed as (LC) xxxx or (LC) 0xxxx on CDs medias or covers (only the number is stored). */
+ /* LCCN Library of Congress Control Number */
+
+ /* Commercial */
+ /* PURCHASE_ITEM URL to purchase this file. This is akin to the WPAY tag in ID3. */
+ /* PURCHASE_INFO Information on where to purchase this album. This is akin to the WCOM tag in ID3. */
+ /* PURCHASE_OWNER Information on the person who purchased the file. This is akin to the TOWN tag in ID3. */
+ /* PURCHASE_PRICE The amount paid for entity. There should only be a numeric value in here. Only numbers, no letters or symbols other than ".". For instance, you would store "15.59" instead of "$15.59USD". */
+ /* PURCHASE_CURRENCY The currency type used to pay for the entity. Use ISO-4217 for the 3 letter currency code. */
+
+ /* Legal */
+ GST_MATROSKA_TAG_ID_COPYRIGHT, GST_TAG_COPYRIGHT}, {
+ GST_MATROSKA_TAG_ID_PRODUCTION_COPYRIGHT, GST_TAG_COPYRIGHT}, { /* The copyright information as per the production copyright holder. This is akin to the TPRO tag in ID3. */
+ GST_MATROSKA_TAG_ID_LICENSE, GST_TAG_LICENSE}, { /* The license applied to the content (like Creative Commons variants). */
+ GST_MATROSKA_TAG_ID_TERMS_OF_USE, GST_TAG_LICENSE}
+ };
+ /* *INDENT-ON* */
+ static const struct
+ {
+ const gchar *matroska_tagname;
+ const gchar *gstreamer_tagname;
+ }
+
+ /* *INDENT-OFF* */
+ child_tag_conv[] = {
+ {
+ "TITLE/SORT_WITH=", GST_TAG_TITLE_SORTNAME}, {
+ "ARTIST/SORT_WITH=", GST_TAG_ARTIST_SORTNAME}, {
+ /* ALBUM-stuff is handled elsewhere */
+ "COMPOSER/SORT_WITH=", GST_TAG_TITLE_SORTNAME}, {
+ "ORIGINAL/URL=", GST_TAG_LOCATION}, {
+ /* EMAIL, PHONE, FAX all can be mapped to GST_TAG_CONTACT, there is special
+ * code for that later.
+ */
+ "TITLE/URL=", GST_TAG_HOMEPAGE}, {
+ "ARTIST/URL=", GST_TAG_HOMEPAGE}, {
+ "COPYRIGHT/URL=", GST_TAG_COPYRIGHT_URI}, {
+ "LICENSE/URL=", GST_TAG_LICENSE_URI}, {
+ "LICENSE/URL=", GST_TAG_LICENSE_URI}
+ };
+ /* *INDENT-ON* */
+ GstFlowReturn ret;
+ guint32 id;
+ gchar *value = NULL;
+ gchar *tag = NULL;
+ gchar *name_with_parent = NULL;
+ GstTagList *child_taglist = NULL;
+
+ DEBUG_ELEMENT_START (common, ebml, "SimpleTag");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (common, ebml, "SimpleTag", ret);
+ return ret;
+ }
+
+ if (parent)
+ child_taglist = *p_taglist;
+ else
+ child_taglist = gst_tag_list_new_empty ();
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ /* read all sub-entries */
+
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_TAGNAME:
+ g_free (tag);
+ tag = NULL;
+ ret = gst_ebml_read_ascii (ebml, &id, &tag);
+ GST_DEBUG_OBJECT (common->sinkpad, "TagName: %s", GST_STR_NULL (tag));
+ g_free (name_with_parent);
+ if (parent != NULL)
+ name_with_parent = g_strdup_printf ("%s/%s", parent, tag);
+ else
+ name_with_parent = g_strdup (tag);
+ break;
+
+ case GST_MATROSKA_ID_TAGSTRING:
+ g_free (value);
+ value = NULL;
+ ret = gst_ebml_read_utf8 (ebml, &id, &value);
+ GST_DEBUG_OBJECT (common->sinkpad, "TagString: %s",
+ GST_STR_NULL (value));
+ break;
+
+ case GST_MATROSKA_ID_SIMPLETAG:
+ /* Recursive SimpleTag */
+ /* This implementation requires tag name of _this_ tag to be known
+ * in order to read its children. It's not in the spec, just the way
+ * the code is written.
+ */
+ if (name_with_parent != NULL) {
+ ret = gst_matroska_read_common_parse_metadata_id_simple_tag (common,
+ ebml, &child_taglist, name_with_parent);
+ break;
+ }
+ /* fall-through */
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (common, ebml, "SimpleTag",
+ id);
+ break;
+
+ case GST_MATROSKA_ID_TAGLANGUAGE:
+ case GST_MATROSKA_ID_TAGDEFAULT:
+ case GST_MATROSKA_ID_TAGBINARY:
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (common, ebml, "SimpleTag", ret);
+
+ if (parent && tag && value && *value != '\0') {
+ /* Don't bother mapping children tags - parent will do that */
+ gchar *key_val;
+ /* TODO: read LANGUAGE sub-tag, and use "key[lc]=val" form */
+ key_val = g_strdup_printf ("%s=%s", name_with_parent, value);
+ gst_tag_list_add (*p_taglist, GST_TAG_MERGE_APPEND,
+ GST_TAG_EXTENDED_COMMENT, key_val, NULL);
+ g_free (key_val);
+ } else if (tag && value && *value != '\0') {
+ gboolean matched = FALSE;
+ guint i;
+
+ for (i = 0; !matched && i < G_N_ELEMENTS (tag_conv); i++) {
+ const gchar *tagname_gst = tag_conv[i].gstreamer_tagname;
+
+ const gchar *tagname_mkv = tag_conv[i].matroska_tagname;
+
+ if (strcmp (tagname_mkv, tag) == 0) {
+ GValue dest = { 0, };
+ GType dest_type = gst_tag_get_type (tagname_gst);
+
+ /* Ensure that any date string is complete */
+ if (dest_type == G_TYPE_DATE) {
+ guint year = 1901, month = 1, day = 1;
+
+ /* Dates can be yyyy-MM-dd, yyyy-MM or yyyy, but we need
+ * the first type */
+ if (sscanf (value, "%04u-%02u-%02u", &year, &month, &day) != 0) {
+ g_free (value);
+ value = g_strdup_printf ("%04u-%02u-%02u", year, month, day);
+ }
+ }
+
+ g_value_init (&dest, dest_type);
+ if (gst_value_deserialize (&dest, value)) {
+ gst_tag_list_add_values (*p_taglist, GST_TAG_MERGE_APPEND,
+ tagname_gst, &dest, NULL);
+ } else {
+ GST_WARNING_OBJECT (common->sinkpad, "Can't transform tag '%s' with "
+ "value '%s' to target type '%s'", tag, value,
+ g_type_name (dest_type));
+ }
+ g_value_unset (&dest);
+ matched = TRUE;
+ }
+ }
+ if (!matched) {
+ gchar *key_val;
+ /* TODO: read LANGUAGE sub-tag, and use "key[lc]=val" form */
+ key_val = g_strdup_printf ("%s=%s", tag, value);
+ gst_tag_list_add (*p_taglist, GST_TAG_MERGE_APPEND,
+ GST_TAG_EXTENDED_COMMENT, key_val, NULL);
+ g_free (key_val);
+ }
+ }
+
+ if (!parent) {
+ /* Map children tags. This only supports top-anchored mapping. That is,
+ * we start at toplevel tag (this tag), and see how its combinations
+ * with its children can be mapped. Which means that grandchildren
+ * are also combined here, with _this_ tag taken into consideration.
+ * If grandchildren can be combined only with children, that combination
+ * will not happen.
+ */
+ gint child_tags_n = gst_tag_list_n_tags (child_taglist);
+ if (child_tags_n > 0) {
+ gint i;
+ for (i = 0; i < child_tags_n; i++) {
+ gint j;
+ const gchar *child_name = gst_tag_list_nth_tag_name (child_taglist, i);
+ guint taglen = gst_tag_list_get_tag_size (child_taglist, child_name);
+ for (j = 0; j < taglen; j++) {
+ gchar *val;
+ gboolean matched = FALSE;
+ gchar *val_pre, *val_post;
+ gint k;
+
+ if (!gst_tag_list_get_string_index (child_taglist, child_name,
+ j, &val))
+ continue;
+ if (!strchr (val, '=')) {
+ g_free (val);
+ continue;
+ }
+ val_post = g_strdup (strchr (val, '=') + 1);
+ val_pre = g_strdup (val);
+ *(strchr (val_pre, '=') + 1) = '\0';
+
+ for (k = 0; !matched && k < G_N_ELEMENTS (child_tag_conv); k++) {
+ const gchar *tagname_gst = child_tag_conv[k].gstreamer_tagname;
+
+ const gchar *tagname_mkv = child_tag_conv[k].matroska_tagname;
+
+ /* TODO: Once "key[lc]=value" form support is implemented,
+ * strip [lc] here. It can't be used in combined tags.
+ * If a tag is not combined, leave [lc] as it is.
+ */
+ if (strcmp (tagname_mkv, val_pre) == 0) {
+ GValue dest = { 0, };
+ GType dest_type = gst_tag_get_type (tagname_gst);
+
+ g_value_init (&dest, dest_type);
+ if (gst_value_deserialize (&dest, val_post)) {
+ gst_tag_list_add_values (*p_taglist, GST_TAG_MERGE_APPEND,
+ tagname_gst, &dest, NULL);
+ } else {
+ GST_WARNING_OBJECT (common->sinkpad,
+ "Can't transform complex tag '%s' " "to target type '%s'",
+ val, g_type_name (dest_type));
+ }
+ g_value_unset (&dest);
+ matched = TRUE;
+ }
+ }
+ if (!matched) {
+ gchar *last_slash = strrchr (val_pre, '/');
+ if (last_slash) {
+ last_slash++;
+ if (strcmp (last_slash, "EMAIL=") == 0 ||
+ strcmp (last_slash, "PHONE=") == 0 ||
+ strcmp (last_slash, "ADDRESS=") == 0 ||
+ strcmp (last_slash, "FAX=") == 0) {
+ gst_tag_list_add (*p_taglist, GST_TAG_MERGE_APPEND,
+ GST_TAG_CONTACT, val_post, NULL);
+ matched = TRUE;
+ }
+ }
+ }
+ if (!matched)
+ gst_tag_list_add (*p_taglist, GST_TAG_MERGE_APPEND,
+ GST_TAG_EXTENDED_COMMENT, val, NULL);
+ g_free (val_post);
+ g_free (val_pre);
+ g_free (val);
+ }
+ }
+ }
+ gst_tag_list_unref (child_taglist);
+ }
+
+ g_free (tag);
+ g_free (value);
+ g_free (name_with_parent);
+
+ return ret;
+}
+
+
+static void
+gst_matroska_read_common_count_streams (GstMatroskaReadCommon * common,
+ gint * a, gint * v, gint * s)
+{
+ gint i;
+ gint video_streams = 0, audio_streams = 0, subtitle_streams = 0;
+
+ for (i = 0; i < common->src->len; i++) {
+ GstMatroskaTrackContext *stream;
+
+ stream = g_ptr_array_index (common->src, i);
+ if (stream->type == GST_MATROSKA_TRACK_TYPE_VIDEO)
+ video_streams += 1;
+ else if (stream->type == GST_MATROSKA_TRACK_TYPE_AUDIO)
+ audio_streams += 1;
+ else if (stream->type == GST_MATROSKA_TRACK_TYPE_SUBTITLE)
+ subtitle_streams += 1;
+ }
+ *v = video_streams;
+ *a = audio_streams;
+ *s = subtitle_streams;
+}
+
+
+static void
+gst_matroska_read_common_apply_target_type_foreach (const GstTagList * list,
+ const gchar * tag, gpointer user_data)
+{
+ guint vallen;
+ guint i;
+ TargetTypeContext *ctx = (TargetTypeContext *) user_data;
+
+ vallen = gst_tag_list_get_tag_size (list, tag);
+ if (vallen == 0)
+ return;
+
+ for (i = 0; i < vallen; i++) {
+ const GValue *val_ref;
+
+ val_ref = gst_tag_list_get_value_index (list, tag, i);
+ if (val_ref == NULL)
+ continue;
+
+ /* TODO: use the optional ctx->target_type somehow */
+ if (strcmp (tag, GST_TAG_TITLE) == 0) {
+ if (ctx->target_type_value >= 70 && !ctx->audio_only) {
+ gst_tag_list_add_value (ctx->result, GST_TAG_MERGE_APPEND,
+ GST_TAG_SHOW_NAME, val_ref);
+ continue;
+ } else if (ctx->target_type_value >= 50) {
+ gst_tag_list_add_value (ctx->result, GST_TAG_MERGE_APPEND,
+ GST_TAG_TITLE, val_ref);
+ continue;
+ }
+ } else if (strcmp (tag, GST_TAG_TITLE_SORTNAME) == 0) {
+ if (ctx->target_type_value >= 70 && !ctx->audio_only) {
+ gst_tag_list_add_value (ctx->result, GST_TAG_MERGE_APPEND,
+ GST_TAG_SHOW_SORTNAME, val_ref);
+ continue;
+ } else if (ctx->target_type_value >= 50) {
+ gst_tag_list_add_value (ctx->result, GST_TAG_MERGE_APPEND,
+ GST_TAG_TITLE_SORTNAME, val_ref);
+ continue;
+ }
+ } else if (strcmp (tag, GST_TAG_ARTIST) == 0) {
+ if (ctx->target_type_value >= 50) {
+ gst_tag_list_add_value (ctx->result, GST_TAG_MERGE_APPEND,
+ GST_TAG_ARTIST, val_ref);
+ continue;
+ }
+ } else if (strcmp (tag, GST_TAG_ARTIST_SORTNAME) == 0) {
+ if (ctx->target_type_value >= 50) {
+ gst_tag_list_add_value (ctx->result, GST_TAG_MERGE_APPEND,
+ GST_TAG_ARTIST_SORTNAME, val_ref);
+ continue;
+ }
+ } else if (strcmp (tag, GST_TAG_TRACK_COUNT) == 0) {
+ if (ctx->target_type_value >= 60) {
+ gst_tag_list_add_value (ctx->result, GST_TAG_MERGE_APPEND,
+ GST_TAG_ALBUM_VOLUME_COUNT, val_ref);
+ continue;
+ }
+ } else if (strcmp (tag, GST_TAG_TRACK_NUMBER) == 0) {
+ if (ctx->target_type_value >= 60 && !ctx->audio_only) {
+ gst_tag_list_add_value (ctx->result, GST_TAG_MERGE_APPEND,
+ GST_TAG_SHOW_SEASON_NUMBER, val_ref);
+ continue;
+ } else if (ctx->target_type_value >= 50 && !ctx->audio_only) {
+ gst_tag_list_add_value (ctx->result, GST_TAG_MERGE_APPEND,
+ GST_TAG_SHOW_EPISODE_NUMBER, val_ref);
+ continue;
+ } else if (ctx->target_type_value >= 50) {
+ gst_tag_list_add_value (ctx->result, GST_TAG_MERGE_APPEND,
+ GST_TAG_ALBUM_VOLUME_NUMBER, val_ref);
+ continue;
+ }
+ }
+ gst_tag_list_add_value (ctx->result, GST_TAG_MERGE_APPEND, tag, val_ref);
+ }
+}
+
+
+static GstTagList *
+gst_matroska_read_common_apply_target_type (GstMatroskaReadCommon * common,
+ GstTagList * taglist, guint64 target_type_value, gchar * target_type)
+{
+ TargetTypeContext ctx;
+ gint a = 0;
+ gint v = 0;
+ gint s = 0;
+
+ gst_matroska_read_common_count_streams (common, &a, &v, &s);
+
+ ctx.audio_only = (a > 0 && v == 0 && s == 0);
+ ctx.result = gst_tag_list_new_empty ();
+ ctx.target_type_value = target_type_value;
+ ctx.target_type = target_type;
+
+ gst_tag_list_foreach (taglist,
+ gst_matroska_read_common_apply_target_type_foreach, &ctx);
+
+ gst_tag_list_unref (taglist);
+ return ctx.result;
+}
+
+
+static GstFlowReturn
+gst_matroska_read_common_parse_metadata_id_tag (GstMatroskaReadCommon * common,
+ GstEbmlRead * ebml, GstTagList ** p_taglist)
+{
+ guint32 id;
+ GstFlowReturn ret;
+ GArray *chapter_targets, *edition_targets, *track_targets;
+ GstTagList *taglist;
+ GList *cur, *internal_cur;
+ guint64 target_type_value = 50;
+ gchar *target_type = NULL;
+
+ DEBUG_ELEMENT_START (common, ebml, "Tag");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (common, ebml, "Tag", ret);
+ return ret;
+ }
+
+ edition_targets = g_array_new (FALSE, FALSE, sizeof (guint64));
+ chapter_targets = g_array_new (FALSE, FALSE, sizeof (guint64));
+ track_targets = g_array_new (FALSE, FALSE, sizeof (guint64));
+ taglist = gst_tag_list_new_empty ();
+ target_type = NULL;
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ /* read all sub-entries */
+
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_SIMPLETAG:
+ ret = gst_matroska_read_common_parse_metadata_id_simple_tag (common,
+ ebml, &taglist, NULL);
+ break;
+
+ case GST_MATROSKA_ID_TARGETS:
+ g_free (target_type);
+ target_type = NULL;
+ target_type_value = 50;
+ ret = gst_matroska_read_common_parse_metadata_targets (common, ebml,
+ edition_targets, chapter_targets, track_targets,
+ &target_type_value, &target_type);
+ break;
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (common, ebml, "Tag", id);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (common, ebml, "Tag", ret);
+
+ taglist = gst_matroska_read_common_apply_target_type (common, taglist,
+ target_type_value, target_type);
+ g_free (target_type);
+
+ /* if tag is chapter/edition specific - try to find that entry */
+ if (G_UNLIKELY (chapter_targets->len > 0 || edition_targets->len > 0 ||
+ track_targets->len > 0)) {
+ gint i;
+ if (chapter_targets->len > 0 || edition_targets->len > 0) {
+ if (common->toc == NULL)
+ GST_WARNING_OBJECT (common->sinkpad,
+ "Found chapter/edition specific tag, but TOC is not present");
+ else {
+ cur = gst_toc_get_entries (common->toc);
+ internal_cur = gst_toc_get_entries (common->internal_toc);
+ while (cur != NULL && internal_cur != NULL) {
+ gst_matroska_read_common_parse_toc_tag (cur->data, internal_cur->data,
+ edition_targets, chapter_targets, taglist);
+ cur = cur->next;
+ internal_cur = internal_cur->next;
+ }
+ common->toc_updated = TRUE;
+ }
+ }
+ for (i = 0; i < track_targets->len; i++) {
+ gint j;
+ gboolean found = FALSE;
+ guint64 tgt = g_array_index (track_targets, guint64, i);
+
+ for (j = 0; j < common->src->len; j++) {
+ GstMatroskaTrackContext *stream = g_ptr_array_index (common->src, j);
+
+ if (stream->uid == tgt) {
+ gst_tag_list_insert (stream->tags, taglist, GST_TAG_MERGE_REPLACE);
+ stream->tags_changed = TRUE;
+ found = TRUE;
+ }
+ }
+ if (!found) {
+ /* Cache the track taglist: possibly belongs to a track that will be parsed
+ later in gst_matroska_demux.c:gst_matroska_demux_add_stream (...) */
+ gpointer track_uid = GUINT_TO_POINTER (tgt);
+ GstTagList *cached_taglist =
+ g_hash_table_lookup (common->cached_track_taglists, track_uid);
+ if (cached_taglist)
+ gst_tag_list_insert (cached_taglist, taglist, GST_TAG_MERGE_REPLACE);
+ else {
+ gst_tag_list_ref (taglist);
+ g_hash_table_insert (common->cached_track_taglists, track_uid,
+ taglist);
+ }
+ GST_DEBUG_OBJECT (common->sinkpad,
+ "Found track-specific tag(s), but track %" G_GUINT64_FORMAT
+ " is not known yet, caching", tgt);
+ }
+ }
+ } else
+ gst_tag_list_insert (*p_taglist, taglist, GST_TAG_MERGE_APPEND);
+
+ gst_tag_list_unref (taglist);
+ g_array_unref (chapter_targets);
+ g_array_unref (edition_targets);
+ g_array_unref (track_targets);
+
+ return ret;
+}
+
+GstFlowReturn
+gst_matroska_read_common_parse_metadata (GstMatroskaReadCommon * common,
+ GstElement * el, GstEbmlRead * ebml)
+{
+ GstTagList *taglist;
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint32 id;
+ GList *l;
+ guint64 curpos;
+
+ /* Make sure we don't parse a tags element twice and
+ * post it's tags twice */
+ curpos = gst_ebml_read_get_pos (ebml);
+ for (l = common->tags_parsed; l; l = l->next) {
+ guint64 *pos = l->data;
+
+ if (*pos == curpos) {
+ GST_DEBUG_OBJECT (common->sinkpad,
+ "Skipping already parsed Tags at offset %" G_GUINT64_FORMAT, curpos);
+ return GST_FLOW_OK;
+ }
+ }
+
+ common->tags_parsed =
+ g_list_prepend (common->tags_parsed, g_slice_new (guint64));
+ *((guint64 *) common->tags_parsed->data) = curpos;
+ /* fall-through */
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (common, ebml, "Tags", ret);
+ return ret;
+ }
+
+ taglist = gst_tag_list_new_empty ();
+ gst_tag_list_set_scope (taglist, GST_TAG_SCOPE_GLOBAL);
+ common->toc_updated = FALSE;
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_TAG:
+ ret = gst_matroska_read_common_parse_metadata_id_tag (common, ebml,
+ &taglist);
+ break;
+
+ default:
+ ret = gst_matroska_read_common_parse_skip (common, ebml, "Tags", id);
+ break;
+ /* FIXME: Use to limit the tags to specific pads */
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (common, ebml, "Tags", ret);
+
+ if (G_LIKELY (!gst_tag_list_is_empty (taglist)))
+ gst_matroska_read_common_found_global_tag (common, el, taglist);
+ else
+ gst_tag_list_unref (taglist);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_matroska_read_common_peek_adapter (GstMatroskaReadCommon * common, guint
+ peek, const guint8 ** data)
+{
+ /* Caller needs to gst_adapter_unmap. */
+ *data = gst_adapter_map (common->adapter, peek);
+ if (*data == NULL)
+ return GST_FLOW_EOS;
+
+ return GST_FLOW_OK;
+}
+
+/*
+ * Calls pull_range for (offset,size) without advancing our offset
+ */
+GstFlowReturn
+gst_matroska_read_common_peek_bytes (GstMatroskaReadCommon * common, guint64
+ offset, guint size, GstBuffer ** p_buf, guint8 ** bytes)
+{
+ GstFlowReturn ret;
+
+ /* Caching here actually makes much less difference than one would expect.
+ * We do it mainly to avoid pulling buffers of 1 byte all the time */
+ if (common->cached_buffer) {
+ guint64 cache_offset = GST_BUFFER_OFFSET (common->cached_buffer);
+ gsize cache_size = gst_buffer_get_size (common->cached_buffer);
+
+ if (cache_offset <= common->offset &&
+ (common->offset + size) <= (cache_offset + cache_size)) {
+ if (p_buf)
+ *p_buf = gst_buffer_copy_region (common->cached_buffer,
+ GST_BUFFER_COPY_ALL, common->offset - cache_offset, size);
+ if (bytes) {
+ if (!common->cached_data) {
+ gst_buffer_map (common->cached_buffer, &common->cached_map,
+ GST_MAP_READ);
+ common->cached_data = common->cached_map.data;
+ }
+ *bytes = common->cached_data + common->offset - cache_offset;
+ }
+ return GST_FLOW_OK;
+ }
+ /* not enough data in the cache, free cache and get a new one */
+ if (common->cached_data) {
+ gst_buffer_unmap (common->cached_buffer, &common->cached_map);
+ common->cached_data = NULL;
+ }
+ gst_buffer_unref (common->cached_buffer);
+ common->cached_buffer = NULL;
+ }
+
+ /* refill the cache */
+ ret = gst_pad_pull_range (common->sinkpad, common->offset,
+ MAX (size, 64 * 1024), &common->cached_buffer);
+ if (ret != GST_FLOW_OK) {
+ common->cached_buffer = NULL;
+ return ret;
+ }
+
+ if (gst_buffer_get_size (common->cached_buffer) >= size) {
+ if (p_buf)
+ *p_buf = gst_buffer_copy_region (common->cached_buffer,
+ GST_BUFFER_COPY_ALL, 0, size);
+ if (bytes) {
+ gst_buffer_map (common->cached_buffer, &common->cached_map, GST_MAP_READ);
+ common->cached_data = common->cached_map.data;
+ *bytes = common->cached_data;
+ }
+ return GST_FLOW_OK;
+ }
+
+ /* Not possible to get enough data, try a last time with
+ * requesting exactly the size we need */
+ gst_buffer_unref (common->cached_buffer);
+ common->cached_buffer = NULL;
+
+ ret =
+ gst_pad_pull_range (common->sinkpad, common->offset, size,
+ &common->cached_buffer);
+ if (ret != GST_FLOW_OK) {
+ GST_DEBUG_OBJECT (common->sinkpad, "pull_range returned %d", ret);
+ if (p_buf)
+ *p_buf = NULL;
+ if (bytes)
+ *bytes = NULL;
+ return ret;
+ }
+
+ if (gst_buffer_get_size (common->cached_buffer) < size) {
+ GST_WARNING_OBJECT (common->sinkpad, "Dropping short buffer at offset %"
+ G_GUINT64_FORMAT ": wanted %u bytes, got %" G_GSIZE_FORMAT " bytes",
+ common->offset, size, gst_buffer_get_size (common->cached_buffer));
+
+ gst_buffer_unref (common->cached_buffer);
+ common->cached_buffer = NULL;
+ if (p_buf)
+ *p_buf = NULL;
+ if (bytes)
+ *bytes = NULL;
+ return GST_FLOW_EOS;
+ }
+
+ if (p_buf)
+ *p_buf = gst_buffer_copy_region (common->cached_buffer,
+ GST_BUFFER_COPY_ALL, 0, size);
+ if (bytes) {
+ gst_buffer_map (common->cached_buffer, &common->cached_map, GST_MAP_READ);
+ common->cached_data = common->cached_map.data;
+ *bytes = common->cached_data;
+ }
+
+ return GST_FLOW_OK;
+}
+
+static GstFlowReturn
+gst_matroska_read_common_peek_pull (GstMatroskaReadCommon * common, guint peek,
+ guint8 ** data)
+{
+ return gst_matroska_read_common_peek_bytes (common, common->offset, peek,
+ NULL, data);
+}
+
+GstFlowReturn
+gst_matroska_read_common_peek_id_length_pull (GstMatroskaReadCommon * common,
+ GstElement * el, guint32 * _id, guint64 * _length, guint * _needed)
+{
+ return gst_ebml_peek_id_length (_id, _length, _needed,
+ (GstPeekData) gst_matroska_read_common_peek_pull, (gpointer) common, el,
+ common->offset);
+}
+
+GstFlowReturn
+gst_matroska_read_common_peek_id_length_push (GstMatroskaReadCommon * common,
+ GstElement * el, guint32 * _id, guint64 * _length, guint * _needed)
+{
+ GstFlowReturn ret;
+
+ ret = gst_ebml_peek_id_length (_id, _length, _needed,
+ (GstPeekData) gst_matroska_read_common_peek_adapter, (gpointer) common,
+ el, common->offset);
+
+ gst_adapter_unmap (common->adapter);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_matroska_read_common_read_track_encoding (GstMatroskaReadCommon * common,
+ GstEbmlRead * ebml, GstMatroskaTrackContext * context)
+{
+ GstMatroskaTrackEncoding enc = { 0, };
+ GstFlowReturn ret;
+ guint32 id;
+
+ DEBUG_ELEMENT_START (common, ebml, "ContentEncoding");
+ /* Set default values */
+ enc.scope = 1;
+ /* All other default values are 0 */
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (common, ebml, "ContentEncoding", ret);
+ return ret;
+ }
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_CONTENTENCODINGORDER:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (!gst_matroska_read_common_encoding_order_unique (context->encodings,
+ num)) {
+ GST_ERROR_OBJECT (common->sinkpad,
+ "ContentEncodingOrder %" G_GUINT64_FORMAT
+ "is not unique for track %" G_GUINT64_FORMAT, num, context->num);
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ GST_DEBUG_OBJECT (common->sinkpad,
+ "ContentEncodingOrder: %" G_GUINT64_FORMAT, num);
+ enc.order = num;
+ break;
+ }
+ case GST_MATROSKA_ID_CONTENTENCODINGSCOPE:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num > 7 || num == 0) {
+ GST_ERROR_OBJECT (common->sinkpad, "Invalid ContentEncodingScope %"
+ G_GUINT64_FORMAT, num);
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ GST_DEBUG_OBJECT (common->sinkpad,
+ "ContentEncodingScope: %" G_GUINT64_FORMAT, num);
+ enc.scope = num;
+
+ break;
+ }
+ case GST_MATROSKA_ID_CONTENTENCODINGTYPE:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK)
+ break;
+
+ if (num > 1) {
+ GST_ERROR_OBJECT (common->sinkpad, "Invalid ContentEncodingType %"
+ G_GUINT64_FORMAT, num);
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ if ((!common->is_webm) && (num == GST_MATROSKA_ENCODING_ENCRYPTION)) {
+ GST_ERROR_OBJECT (common->sinkpad,
+ "Encrypted tracks are supported only in WebM");
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+ GST_DEBUG_OBJECT (common->sinkpad,
+ "ContentEncodingType: %" G_GUINT64_FORMAT, num);
+ enc.type = num;
+ break;
+ }
+ case GST_MATROSKA_ID_CONTENTCOMPRESSION:{
+
+ DEBUG_ELEMENT_START (common, ebml, "ContentCompression");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ while (ret == GST_FLOW_OK &&
+ gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_CONTENTCOMPALGO:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK) {
+ break;
+ }
+ if (num > 3) {
+ GST_ERROR_OBJECT (common->sinkpad, "Invalid ContentCompAlgo %"
+ G_GUINT64_FORMAT, num);
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+ GST_DEBUG_OBJECT (common->sinkpad,
+ "ContentCompAlgo: %" G_GUINT64_FORMAT, num);
+ enc.comp_algo = num;
+
+ break;
+ }
+ case GST_MATROSKA_ID_CONTENTCOMPSETTINGS:{
+ guint8 *data;
+ guint64 size;
+
+ if ((ret =
+ gst_ebml_read_binary (ebml, &id, &data,
+ &size)) != GST_FLOW_OK) {
+ break;
+ }
+ enc.comp_settings = data;
+ enc.comp_settings_length = size;
+ GST_DEBUG_OBJECT (common->sinkpad,
+ "ContentCompSettings of size %" G_GUINT64_FORMAT, size);
+ break;
+ }
+ default:
+ GST_WARNING_OBJECT (common->sinkpad,
+ "Unknown ContentCompression subelement 0x%x - ignoring", id);
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+ DEBUG_ELEMENT_STOP (common, ebml, "ContentCompression", ret);
+ break;
+ }
+
+ case GST_MATROSKA_ID_CONTENTENCRYPTION:{
+
+ DEBUG_ELEMENT_START (common, ebml, "ContentEncryption");
+
+ if (enc.type != GST_MATROSKA_ENCODING_ENCRYPTION) {
+ GST_WARNING_OBJECT (common->sinkpad,
+ "Unexpected to have Content Encryption because it isn't encryption type");
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ while (ret == GST_FLOW_OK &&
+ gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_CONTENTENCALGO:{
+ guint64 num;
+
+ if ((ret = gst_ebml_read_uint (ebml, &id, &num)) != GST_FLOW_OK) {
+ break;
+ }
+
+ if (num > GST_MATROSKA_TRACK_ENCRYPTION_ALGORITHM_AES) {
+ GST_ERROR_OBJECT (common->sinkpad, "Invalid ContentEncAlgo %"
+ G_GUINT64_FORMAT, num);
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+ GST_DEBUG_OBJECT (common->sinkpad,
+ "ContentEncAlgo: %" G_GUINT64_FORMAT, num);
+ enc.enc_algo = num;
+
+ break;
+ }
+ case GST_MATROSKA_ID_CONTENTENCAESSETTINGS:{
+
+ DEBUG_ELEMENT_START (common, ebml, "ContentEncAESSettings");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ while (ret == GST_FLOW_OK &&
+ gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_AESSETTINGSCIPHERMODE:{
+ guint64 num;
+
+ if ((ret =
+ gst_ebml_read_uint (ebml, &id,
+ &num)) != GST_FLOW_OK) {
+ break;
+ }
+ if (num > 3) {
+ GST_ERROR_OBJECT (common->sinkpad, "Invalid Cipher Mode %"
+ G_GUINT64_FORMAT, num);
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+ GST_DEBUG_OBJECT (common->sinkpad,
+ "ContentEncAESSettings: %" G_GUINT64_FORMAT, num);
+ enc.enc_cipher_mode = num;
+ break;
+ }
+ default:
+ GST_WARNING_OBJECT (common->sinkpad,
+ "Unknown ContentEncAESSettings subelement 0x%x - ignoring",
+ id);
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+ DEBUG_ELEMENT_STOP (common, ebml, "ContentEncAESSettings", ret);
+ break;
+ }
+
+ case GST_MATROSKA_ID_CONTENTENCKEYID:{
+ guint8 *data;
+ guint64 size;
+ GstBuffer *keyId_buf;
+ GstEvent *event;
+
+ if ((ret =
+ gst_ebml_read_binary (ebml, &id, &data,
+ &size)) != GST_FLOW_OK) {
+ break;
+ }
+ GST_DEBUG_OBJECT (common->sinkpad,
+ "ContentEncrypt KeyID length : %" G_GUINT64_FORMAT, size);
+ keyId_buf = gst_buffer_new_wrapped (data, size);
+
+ /* Push an event containing the Key ID into the queues of all streams. */
+ /* system_id field is set to GST_PROTECTION_UNSPECIFIED_SYSTEM_ID because it isn't specified neither in WebM nor in Matroska spec. */
+ event =
+ gst_event_new_protection
+ (GST_PROTECTION_UNSPECIFIED_SYSTEM_ID, keyId_buf,
+ "matroskademux");
+ GST_TRACE_OBJECT (common->sinkpad,
+ "adding protection event for stream %d", context->index);
+ g_queue_push_tail (&context->protection_event_queue, event);
+
+ context->protection_info =
+ gst_structure_new ("application/x-cenc", "iv_size",
+ G_TYPE_UINT, 8, "encrypted", G_TYPE_BOOLEAN, TRUE, "kid",
+ GST_TYPE_BUFFER, keyId_buf, NULL);
+
+ gst_buffer_unref (keyId_buf);
+ break;
+ }
+ default:
+ GST_WARNING_OBJECT (common->sinkpad,
+ "Unknown ContentEncryption subelement 0x%x - ignoring", id);
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+ DEBUG_ELEMENT_STOP (common, ebml, "ContentEncryption", ret);
+ break;
+ }
+ default:
+ GST_WARNING_OBJECT (common->sinkpad,
+ "Unknown ContentEncoding subelement 0x%x - ignoring", id);
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (common, ebml, "ContentEncoding", ret);
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_EOS)
+ return ret;
+
+ /* TODO: Check if the combination of values is valid */
+
+ g_array_append_val (context->encodings, enc);
+
+ return ret;
+}
+
+GstFlowReturn
+gst_matroska_read_common_read_track_encodings (GstMatroskaReadCommon * common,
+ GstEbmlRead * ebml, GstMatroskaTrackContext * context)
+{
+ GstFlowReturn ret;
+ guint32 id;
+
+ DEBUG_ELEMENT_START (common, ebml, "ContentEncodings");
+
+ if ((ret = gst_ebml_read_master (ebml, &id)) != GST_FLOW_OK) {
+ DEBUG_ELEMENT_STOP (common, ebml, "ContentEncodings", ret);
+ return ret;
+ }
+
+ context->encodings =
+ g_array_sized_new (FALSE, FALSE, sizeof (GstMatroskaTrackEncoding), 1);
+
+ while (ret == GST_FLOW_OK && gst_ebml_read_has_remaining (ebml, 1, TRUE)) {
+ if ((ret = gst_ebml_peek_id (ebml, &id)) != GST_FLOW_OK)
+ break;
+
+ switch (id) {
+ case GST_MATROSKA_ID_CONTENTENCODING:
+ ret = gst_matroska_read_common_read_track_encoding (common, ebml,
+ context);
+ break;
+ default:
+ GST_WARNING_OBJECT (common->sinkpad,
+ "Unknown ContentEncodings subelement 0x%x - ignoring", id);
+ ret = gst_ebml_read_skip (ebml);
+ break;
+ }
+ }
+
+ DEBUG_ELEMENT_STOP (common, ebml, "ContentEncodings", ret);
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_EOS)
+ return ret;
+
+ /* Sort encodings according to their order */
+ g_array_sort (context->encodings,
+ (GCompareFunc) gst_matroska_read_common_encoding_cmp);
+
+ return gst_matroska_decode_content_encodings (context->encodings);
+}
+
+void
+gst_matroska_read_common_free_parsed_el (gpointer mem, gpointer user_data)
+{
+ g_slice_free (guint64, mem);
+}
+
+void
+gst_matroska_read_common_init (GstMatroskaReadCommon * ctx)
+{
+ ctx->src = NULL;
+ ctx->writing_app = NULL;
+ ctx->muxing_app = NULL;
+ ctx->index = NULL;
+ ctx->global_tags = NULL;
+ ctx->adapter = gst_adapter_new ();
+ ctx->toc = NULL;
+ ctx->internal_toc = NULL;
+ ctx->toc_updated = FALSE;
+ ctx->cached_track_taglists =
+ g_hash_table_new_full (NULL, NULL, NULL,
+ (GDestroyNotify) gst_tag_list_unref);
+}
+
+void
+gst_matroska_read_common_finalize (GstMatroskaReadCommon * ctx)
+{
+ if (ctx->src) {
+ g_ptr_array_free (ctx->src, TRUE);
+ ctx->src = NULL;
+ }
+
+ if (ctx->global_tags) {
+ gst_tag_list_unref (ctx->global_tags);
+ ctx->global_tags = NULL;
+ }
+
+ if (ctx->toc) {
+ gst_toc_unref (ctx->toc);
+ ctx->toc = NULL;
+ }
+ if (ctx->internal_toc) {
+ gst_toc_unref (ctx->internal_toc);
+ ctx->internal_toc = NULL;
+ }
+
+ ctx->toc_updated = FALSE;
+
+ g_object_unref (ctx->adapter);
+ g_hash_table_remove_all (ctx->cached_track_taglists);
+ g_hash_table_unref (ctx->cached_track_taglists);
+
+}
+
+void
+gst_matroska_read_common_reset (GstElement * element,
+ GstMatroskaReadCommon * ctx)
+{
+ guint i;
+
+ GST_LOG_OBJECT (ctx->sinkpad, "resetting read context");
+
+ /* reset input */
+ ctx->state = GST_MATROSKA_READ_STATE_START;
+
+ /* clean up existing streams if any */
+ if (ctx->src) {
+ g_assert (ctx->src->len == ctx->num_streams);
+ for (i = 0; i < ctx->src->len; i++) {
+ GstMatroskaTrackContext *context = g_ptr_array_index (ctx->src, i);
+
+ if (context->pad != NULL)
+ gst_element_remove_pad (element, context->pad);
+
+ gst_matroska_track_free (context);
+ }
+ g_ptr_array_free (ctx->src, TRUE);
+ }
+ ctx->src = g_ptr_array_new ();
+ ctx->num_streams = 0;
+
+ /* reset media info */
+ g_free (ctx->writing_app);
+ ctx->writing_app = NULL;
+ g_free (ctx->muxing_app);
+ ctx->muxing_app = NULL;
+
+ /* reset stream type */
+ ctx->is_webm = FALSE;
+ ctx->has_video = FALSE;
+
+ /* reset indexes */
+ if (ctx->index) {
+ g_array_free (ctx->index, TRUE);
+ ctx->index = NULL;
+ }
+
+ /* reset timers */
+ ctx->time_scale = 1000000;
+ ctx->created = G_MININT64;
+
+ /* cues/tracks/segmentinfo */
+ ctx->index_parsed = FALSE;
+ ctx->segmentinfo_parsed = FALSE;
+ ctx->attachments_parsed = FALSE;
+ ctx->chapters_parsed = FALSE;
+
+ /* tags */
+ ctx->global_tags_changed = FALSE;
+ g_list_foreach (ctx->tags_parsed,
+ (GFunc) gst_matroska_read_common_free_parsed_el, NULL);
+ g_list_free (ctx->tags_parsed);
+ ctx->tags_parsed = NULL;
+ if (ctx->global_tags) {
+ gst_tag_list_unref (ctx->global_tags);
+ }
+ ctx->global_tags = gst_tag_list_new_empty ();
+ gst_tag_list_set_scope (ctx->global_tags, GST_TAG_SCOPE_GLOBAL);
+
+ gst_segment_init (&ctx->segment, GST_FORMAT_TIME);
+ ctx->offset = 0;
+ ctx->start_resync_offset = -1;
+ ctx->state_to_restore = -1;
+
+ if (ctx->cached_buffer) {
+ if (ctx->cached_data) {
+ gst_buffer_unmap (ctx->cached_buffer, &ctx->cached_map);
+ ctx->cached_data = NULL;
+ }
+ gst_buffer_unref (ctx->cached_buffer);
+ ctx->cached_buffer = NULL;
+ }
+
+ /* free chapters TOC if any */
+ if (ctx->toc) {
+ gst_toc_unref (ctx->toc);
+ ctx->toc = NULL;
+ }
+ if (ctx->internal_toc) {
+ gst_toc_unref (ctx->internal_toc);
+ ctx->internal_toc = NULL;
+ }
+ ctx->toc_updated = FALSE;
+}
+
+/* call with object lock held */
+void
+gst_matroska_read_common_reset_streams (GstMatroskaReadCommon * common,
+ GstClockTime time, gboolean full)
+{
+ gint i;
+
+ GST_DEBUG_OBJECT (common->sinkpad, "resetting stream state");
+
+ g_assert (common->src->len == common->num_streams);
+ for (i = 0; i < common->src->len; i++) {
+ GstMatroskaTrackContext *context = g_ptr_array_index (common->src, i);
+ context->pos = time;
+ context->set_discont = TRUE;
+ context->eos = FALSE;
+ context->from_time = GST_CLOCK_TIME_NONE;
+ if (context->type == GST_MATROSKA_TRACK_TYPE_VIDEO) {
+ GstMatroskaTrackVideoContext *videocontext =
+ (GstMatroskaTrackVideoContext *) context;
+ /* demux object lock held by caller */
+ videocontext->earliest_time = GST_CLOCK_TIME_NONE;
+ }
+ }
+}
+
+gboolean
+gst_matroska_read_common_tracknumber_unique (GstMatroskaReadCommon * common,
+ guint64 num)
+{
+ gint i;
+
+ g_assert (common->src->len == common->num_streams);
+ for (i = 0; i < common->src->len; i++) {
+ GstMatroskaTrackContext *context = g_ptr_array_index (common->src, i);
+
+ if (context->num == num)
+ return FALSE;
+ }
+
+ return TRUE;
+}
diff --git a/gst/matroska/matroska-read-common.h b/gst/matroska/matroska-read-common.h
new file mode 100644
index 0000000000..98cfc2451b
--- /dev/null
+++ b/gst/matroska/matroska-read-common.h
@@ -0,0 +1,175 @@
+/* GStreamer Matroska muxer/demuxer
+ * (c) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ * (c) 2011 Debarshi Ray <rishi@gnu.org>
+ *
+ * matroska-read-common.h: shared by matroska file/stream demuxer and parser
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_MATROSKA_READ_COMMON_H__
+#define __GST_MATROSKA_READ_COMMON_H__
+
+#include <glib.h>
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+
+#include "matroska-ids.h"
+
+G_BEGIN_DECLS
+
+GST_DEBUG_CATEGORY_EXTERN(matroskareadcommon_debug);
+
+typedef enum {
+ GST_MATROSKA_READ_STATE_START,
+ GST_MATROSKA_READ_STATE_SEGMENT,
+ GST_MATROSKA_READ_STATE_HEADER,
+ GST_MATROSKA_READ_STATE_DATA,
+ GST_MATROSKA_READ_STATE_SEEK,
+ GST_MATROSKA_READ_STATE_SCANNING
+} GstMatroskaReadState;
+
+typedef struct _GstMatroskaReadCommon {
+#if 0
+ GstIndex *element_index;
+ gint element_index_writer_id;
+#endif
+
+ /* pads */
+ GstPad *sinkpad;
+ GPtrArray *src;
+ guint num_streams;
+
+ /* metadata */
+ gchar *muxing_app;
+ gchar *writing_app;
+ gint64 created;
+
+ /* state */
+ GstMatroskaReadState state;
+
+ /* stream type */
+ gboolean is_webm;
+ gboolean has_video;
+
+ /* did we parse cues/tracks/segmentinfo already? */
+ gboolean index_parsed;
+ gboolean segmentinfo_parsed;
+ gboolean attachments_parsed;
+ gboolean chapters_parsed;
+ GList *tags_parsed;
+
+ /* chapters stuff */
+ /* Internal toc is used to keep track of the internal UID
+ * which are different from the external StringUID used
+ * in the user toc */
+ GstToc *toc;
+ GstToc *internal_toc;
+ gboolean toc_updated;
+
+ /* start-of-segment and length */
+ guint64 ebml_segment_start;
+ guint64 ebml_segment_length;
+
+ /* a cue (index) table */
+ GArray *index;
+
+ /* timescale in the file */
+ guint64 time_scale;
+
+ /* keeping track of playback position */
+ GstSegment segment;
+
+ GstTagList *global_tags;
+ gboolean global_tags_changed;
+
+ /* pull mode caching */
+ GstBuffer *cached_buffer;
+ guint8 *cached_data;
+ GstMapInfo cached_map;
+
+ /* push and pull mode */
+ guint64 offset;
+
+ guint64 start_resync_offset;
+
+ /* state to restore after scanning for invalid data */
+ gint state_to_restore;
+
+ /* push based mode usual suspects */
+ GstAdapter *adapter;
+
+ /* cache for track tags that forward-reference their tracks */
+ GHashTable *cached_track_taglists ;
+
+} GstMatroskaReadCommon;
+
+GstFlowReturn gst_matroska_decode_content_encodings (GArray * encodings);
+gboolean gst_matroska_decode_data (GArray * encodings, gpointer * data_out,
+ gsize * size_out, GstMatroskaTrackEncodingScope scope, gboolean free);
+gboolean
+gst_matroska_parse_protection_meta (gpointer * data_out, gsize * size_out,
+ GstStructure * info_protect, gboolean * encrypted);
+gint gst_matroska_index_seek_find (GstMatroskaIndex * i1, GstClockTime * time,
+ gpointer user_data);
+GstMatroskaIndex * gst_matroska_read_common_do_index_seek (
+ GstMatroskaReadCommon * common, GstMatroskaTrackContext * track, gint64
+ seek_pos, GArray ** _index, gint * _entry_index, GstSearchMode snap_dir);
+void gst_matroska_read_common_found_global_tag (GstMatroskaReadCommon * common,
+ GstElement * el, GstTagList * taglist);
+gint64 gst_matroska_read_common_get_length (GstMatroskaReadCommon * common);
+GstMatroskaTrackContext * gst_matroska_read_common_get_seek_track (
+ GstMatroskaReadCommon * common, GstMatroskaTrackContext * track);
+GstFlowReturn gst_matroska_read_common_parse_index (GstMatroskaReadCommon *
+ common, GstEbmlRead * ebml);
+GstFlowReturn gst_matroska_read_common_parse_info (GstMatroskaReadCommon *
+ common, GstElement * el, GstEbmlRead * ebml);
+GstFlowReturn gst_matroska_read_common_parse_attachments (
+ GstMatroskaReadCommon * common, GstElement * el, GstEbmlRead * ebml);
+GstFlowReturn gst_matroska_read_common_parse_chapters (GstMatroskaReadCommon *
+ common, GstEbmlRead * ebml);
+GstFlowReturn gst_matroska_read_common_parse_header (GstMatroskaReadCommon *
+ common, GstEbmlRead * ebml);
+GstFlowReturn gst_matroska_read_common_parse_metadata (GstMatroskaReadCommon *
+ common, GstElement * el, GstEbmlRead * ebml);
+GstFlowReturn gst_matroska_read_common_parse_skip (GstMatroskaReadCommon *
+ common, GstEbmlRead * ebml, const gchar * parent_name, guint id);
+GstFlowReturn gst_matroska_read_common_peek_bytes (GstMatroskaReadCommon *
+ common, guint64 offset, guint size, GstBuffer ** p_buf, guint8 ** bytes);
+GstFlowReturn gst_matroska_read_common_peek_id_length_pull (GstMatroskaReadCommon *
+ common, GstElement * el, guint32 * _id, guint64 * _length, guint *
+ _needed);
+GstFlowReturn gst_matroska_read_common_peek_id_length_push (GstMatroskaReadCommon *
+ common, GstElement * el, guint32 * _id, guint64 * _length, guint *
+ _needed);
+gint gst_matroska_read_common_stream_from_num (GstMatroskaReadCommon * common,
+ guint track_num);
+GstFlowReturn gst_matroska_read_common_read_track_encodings (
+ GstMatroskaReadCommon * common, GstEbmlRead * ebml,
+ GstMatroskaTrackContext * context);
+void gst_matroska_read_common_reset_streams (GstMatroskaReadCommon * common,
+ GstClockTime time, gboolean full);
+void gst_matroska_read_common_free_parsed_el (gpointer mem, gpointer user_data);
+void gst_matroska_read_common_init (GstMatroskaReadCommon * ctx);
+void gst_matroska_read_common_finalize (GstMatroskaReadCommon * ctx);
+void gst_matroska_read_common_reset (GstElement * element,
+ GstMatroskaReadCommon * ctx);
+gboolean gst_matroska_read_common_tracknumber_unique (GstMatroskaReadCommon *
+ common, guint64 num);
+
+G_END_DECLS
+
+#endif /* __GST_MATROSKA_READ_COMMON_H__ */
diff --git a/gst/matroska/matroska.c b/gst/matroska/matroska.c
new file mode 100644
index 0000000000..f73168747e
--- /dev/null
+++ b/gst/matroska/matroska.c
@@ -0,0 +1,45 @@
+/* GStreamer Matroska muxer/demuxer
+ * (c) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
+ *
+ * matroska.c: plugin loader
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstmatroskaelements.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (matroskademux, plugin);
+ ret |= GST_ELEMENT_REGISTER (matroskaparse, plugin);
+ ret |= GST_ELEMENT_REGISTER (matroskamux, plugin);
+ ret |= GST_ELEMENT_REGISTER (webmmux, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ matroska,
+ "Matroska and WebM stream handling",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/matroska/meson.build b/gst/matroska/meson.build
new file mode 100644
index 0000000000..d8a6a961fc
--- /dev/null
+++ b/gst/matroska/meson.build
@@ -0,0 +1,30 @@
+matroska_sources = [
+ 'ebml-read.c',
+ 'ebml-write.c',
+ 'gstmatroskaelement.c',
+ 'matroska.c',
+ 'matroska-demux.c',
+ 'matroska-parse.c',
+ 'matroska-ids.c',
+ 'matroska-mux.c',
+ 'matroska-read-common.c',
+ 'webm-mux.c',
+ 'lzo.c',
+]
+
+bz2_dep = cc.find_library('bz2', required : get_option('bz2'))
+cdata.set('HAVE_BZ2', bz2_dep.found() and cc.has_header('bzlib.h'))
+
+gstmatroska = library('gstmatroska',
+ matroska_sources,
+ c_args : gst_plugins_good_args,
+ link_args : noseh_link_args,
+ include_directories : [configinc],
+ dependencies : [gstpbutils_dep, gstaudio_dep, gstriff_dep,
+ gstvideo_dep, gsttag_dep, gstbase_dep,
+ gst_dep, zlib_dep, bz2_dep, libm],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstmatroska, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstmatroska]
diff --git a/gst/matroska/webm-mux.c b/gst/matroska/webm-mux.c
new file mode 100644
index 0000000000..0738d5a4fe
--- /dev/null
+++ b/gst/matroska/webm-mux.c
@@ -0,0 +1,104 @@
+/* GStreamer WebM muxer
+ * Copyright (c) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-webmmux
+ * @title: webmmux
+ *
+ * webmmux muxes VP8 video and Vorbis audio streams into a WebM file.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 webmmux name=mux ! filesink location=newfile.webm \
+ * uridecodebin uri=file:///path/to/somefile.ogv name=demux \
+ * demux. ! videoconvert ! vp8enc ! queue ! mux.video_0 \
+ * demux. ! progressreport ! audioconvert ! audiorate ! vorbisenc ! queue ! mux.audio_0
+ * ]| This pipeline re-encodes a video file of any format into a WebM file.
+ * |[
+ * gst-launch-1.0 webmmux name=mux ! filesink location=test.webm \
+ * videotestsrc num-buffers=250 ! video/x-raw,framerate=25/1 ! videoconvert ! vp8enc ! queue ! mux.video_0 \
+ * audiotestsrc samplesperbuffer=44100 num-buffers=10 ! audio/x-raw,rate=44100 ! vorbisenc ! queue ! mux.audio_0
+ * ]| This pipeline muxes a test video and a sine wave into a WebM file.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstmatroskaelements.h"
+#include "webm-mux.h"
+
+#define COMMON_VIDEO_CAPS \
+ "width = (int) [ 16, MAX ], " \
+ "height = (int) [ 16, MAX ], " \
+ "framerate = (fraction) [ 0, MAX ]"
+
+#define COMMON_AUDIO_CAPS \
+ "channels = (int) [ 1, MAX ], " \
+ "rate = (int) [ 1, MAX ]"
+
+G_DEFINE_TYPE (GstWebMMux, gst_webm_mux, GST_TYPE_MATROSKA_MUX);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (webmmux, "webmmux",
+ GST_RANK_PRIMARY, GST_TYPE_WEBM_MUX, matroska_element_init (plugin));
+
+static GstStaticPadTemplate webm_src_templ = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/webm; audio/webm")
+ );
+
+static GstStaticPadTemplate webm_videosink_templ =
+ GST_STATIC_PAD_TEMPLATE ("video_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("video/x-vp8, " COMMON_VIDEO_CAPS ";"
+ "video/x-vp9, " COMMON_VIDEO_CAPS ";" "video/x-av1, " COMMON_VIDEO_CAPS)
+ );
+
+static GstStaticPadTemplate webm_audiosink_templ =
+ GST_STATIC_PAD_TEMPLATE ("audio_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("audio/x-vorbis, " COMMON_AUDIO_CAPS ";"
+ "audio/x-opus, " COMMON_AUDIO_CAPS)
+ );
+
+static void
+gst_webm_mux_class_init (GstWebMMuxClass * klass)
+{
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &webm_videosink_templ);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &webm_audiosink_templ);
+ gst_element_class_add_static_pad_template (gstelement_class, &webm_src_templ);
+ gst_element_class_set_static_metadata (gstelement_class, "WebM muxer",
+ "Codec/Muxer",
+ "Muxes video and audio streams into a WebM stream",
+ "GStreamer maintainers <gstreamer-devel@lists.freedesktop.org>");
+}
+
+static void
+gst_webm_mux_init (GstWebMMux * mux)
+{
+ GST_MATROSKA_MUX (mux)->doctype = GST_MATROSKA_DOCTYPE_WEBM;
+ GST_MATROSKA_MUX (mux)->is_webm = TRUE;
+}
diff --git a/gst/matroska/webm-mux.h b/gst/matroska/webm-mux.h
new file mode 100644
index 0000000000..6fee844633
--- /dev/null
+++ b/gst/matroska/webm-mux.h
@@ -0,0 +1,49 @@
+/* GStreamer WebM muxer
+ * Copyright (c) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_WEBM_MUX_H__
+#define __GST_WEBM_MUX_H__
+
+#include "matroska-mux.h"
+
+#define GST_TYPE_WEBM_MUX \
+ (gst_webm_mux_get_type ())
+#define GST_WEBM_MUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_WEBM_MUX, GstWebMMux))
+#define GST_WEBM_MUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_WEBM_MUX, GstWebMMuxClass))
+#define GST_IS_WEBM_MUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_WEBM_MUX))
+#define GST_IS_WEBM_MUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_WEBM_MUX))
+
+typedef struct _GstWebMMux GstWebMMux;
+typedef struct _GstWebMMuxClass GstWebMMuxClass;
+
+struct _GstWebMMux {
+ GstMatroskaMux matroskamux;
+};
+
+struct _GstWebMMuxClass {
+ GstMatroskaMuxClass matroskamuxclass;
+};
+
+GType gst_webm_mux_get_type (void);
+
+#endif /* __GST_WEBM_MUX_H__ */
diff --git a/gst/meson.build b/gst/meson.build
new file mode 100644
index 0000000000..cbc28caf2d
--- /dev/null
+++ b/gst/meson.build
@@ -0,0 +1,13 @@
+foreach plugin : ['alpha', 'apetag', 'audiofx', 'audioparsers', 'auparse',
+ 'autodetect', 'avi', 'cutter', 'debugutils', 'deinterlace',
+ 'dtmf', 'effectv', 'equalizer', 'flv', 'flx', 'goom',
+ 'goom2k1', 'icydemux', 'id3demux', 'imagefreeze',
+ 'interleave', 'isomp4', 'law', 'level', 'matroska',
+ 'monoscope', 'multifile', 'multipart', 'replaygain', 'rtp',
+ 'rtpmanager', 'rtsp', 'shapewipe', 'smpte', 'spectrum',
+ 'udp', 'videobox', 'videocrop', 'videofilter', 'videomixer',
+ 'wavenc', 'wavparse', 'y4m']
+ if not get_option(plugin).disabled()
+ subdir(plugin)
+ endif
+endforeach
diff --git a/gst/monoscope/README b/gst/monoscope/README
new file mode 100644
index 0000000000..1aff0c0469
--- /dev/null
+++ b/gst/monoscope/README
@@ -0,0 +1,9 @@
+This is a visualization based on on the monoscope output plugin from
+alsaplayer.
+
+The monoscope convolution matching code was written by Ralph Loader.
+
+The monoscope.c and monoscope.h files are under the BSD license (without advertising clause).
+
+This implementation is taken from alsaplayer version 0.99.54, at
+http://www.alsaplayer.org/
diff --git a/gst/monoscope/convolve.c b/gst/monoscope/convolve.c
new file mode 100644
index 0000000000..a63e797d84
--- /dev/null
+++ b/gst/monoscope/convolve.c
@@ -0,0 +1,363 @@
+/* Karatsuba convolution
+ *
+ * Copyright (C) 1999 Ralph Loader <suckfish@ihug.co.nz>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ *
+ * Note: 7th December 2004: This file used to be licensed under the GPL,
+ * but we got permission from Ralp Loader to relicense it to LGPL.
+ *
+ * $Id$
+ *
+ */
+
+/* The algorithm is based on the following. For the convolution of a pair
+ * of pairs, (a,b) * (c,d) = (0, a.c, a.d+b.c, b.d), we can reduce the four
+ * multiplications to three, by the formulae a.d+b.c = (a+b).(c+d) - a.c -
+ * b.d. A similar relation enables us to compute a 2n by 2n convolution
+ * using 3 n by n convolutions, and thus a 2^n by 2^n convolution using 3^n
+ * multiplications (as opposed to the 4^n that the quadratic algorithm
+ * takes. */
+
+/* For large n, this is slower than the O(n log n) that the FFT method
+ * takes, but we avoid using complex numbers, and we only have to compute
+ * one convolution, as opposed to 3 FFTs. We have good locality-of-
+ * reference as well, which will help on CPUs with tiny caches. */
+
+/* E.g., for a 512 x 512 convolution, the FFT method takes 55 * 512 = 28160
+ * (real) multiplications, as opposed to 3^9 = 19683 for the Karatsuba
+ * algorithm. We actually want 257 outputs of a 256 x 512 convolution;
+ * that doesn't appear to give an easy advantage for the FFT algorithm, but
+ * for the Karatsuba algorithm, it's easy to use two 256 x 256
+ * convolutions, taking 2 x 3^8 = 12312 multiplications. [This difference
+ * is that the FFT method "wraps" the arrays, doing a 2^n x 2^n -> 2^n,
+ * while the Karatsuba algorithm pads with zeros, doing 2^n x 2^n -> 2.2^n
+ * - 1]. */
+
+/* There's a big lie above, actually... for a 4x4 convolution, it's quicker
+ * to do it using 16 multiplications than the more complex Karatsuba
+ * algorithm... So the recursion bottoms out at 4x4s. This increases the
+ * number of multiplications by a factor of 16/9, but reduces the overheads
+ * dramatically. */
+
+/* The convolution algorithm is implemented as a stack machine. We have a
+ * stack of commands, each in one of the forms "do a 2^n x 2^n
+ * convolution", or "combine these three length 2^n outputs into one
+ * 2^{n+1} output." */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <stdlib.h>
+#include "convolve.h"
+
+typedef union stack_entry_s
+{
+ struct
+ {
+ const double *left, *right;
+ double *out;
+ }
+ v;
+ struct
+ {
+ double *main, *null;
+ }
+ b;
+
+}
+stack_entry;
+
+struct _struct_convolve_state
+{
+ int depth, small, big, stack_size;
+ double *left;
+ double *right;
+ double *scratch;
+ stack_entry *stack;
+};
+
+/*
+ * Initialisation routine - sets up tables and space to work in.
+ * Returns a pointer to internal state, to be used when performing calls.
+ * On error, returns NULL.
+ * The pointer should be freed when it is finished with, by convolve_close().
+ */
+convolve_state *
+convolve_init (int depth)
+{
+ convolve_state *state;
+
+ state = malloc (sizeof (convolve_state));
+ state->depth = depth;
+ state->small = (1 << depth);
+ state->big = (2 << depth);
+ state->stack_size = depth * 3;
+ state->left = calloc (state->big, sizeof (double));
+ state->right = calloc (state->small * 3, sizeof (double));
+ state->scratch = calloc (state->small * 3, sizeof (double));
+ state->stack = calloc (state->stack_size + 1, sizeof (stack_entry));
+ return state;
+}
+
+/*
+ * Free the state allocated with convolve_init().
+ */
+void
+convolve_close (convolve_state * state)
+{
+ free (state->left);
+ free (state->right);
+ free (state->scratch);
+ free (state->stack);
+ free (state);
+}
+
+static void
+convolve_4 (double *out, const double *left, const double *right)
+/* This does a 4x4 -> 7 convolution. For what it's worth, the slightly odd
+ * ordering gives about a 1% speed up on my Pentium II. */
+{
+ double l0, l1, l2, l3, r0, r1, r2, r3;
+ double a;
+
+ l0 = left[0];
+ r0 = right[0];
+ a = l0 * r0;
+ l1 = left[1];
+ r1 = right[1];
+ out[0] = a;
+ a = (l0 * r1) + (l1 * r0);
+ l2 = left[2];
+ r2 = right[2];
+ out[1] = a;
+ a = (l0 * r2) + (l1 * r1) + (l2 * r0);
+ l3 = left[3];
+ r3 = right[3];
+ out[2] = a;
+
+ out[3] = (l0 * r3) + (l1 * r2) + (l2 * r1) + (l3 * r0);
+ out[4] = (l1 * r3) + (l2 * r2) + (l3 * r1);
+ out[5] = (l2 * r3) + (l3 * r2);
+ out[6] = l3 * r3;
+}
+
+static void
+convolve_run (stack_entry * top, unsigned size, double *scratch)
+/* Interpret a stack of commands. The stack starts with two entries; the
+ * convolution to do, and an illegal entry used to mark the stack top. The
+ * size is the number of entries in each input, and must be a power of 2,
+ * and at least 8. It is OK to have out equal to left and/or right.
+ * scratch must have length 3*size. The number of stack entries needed is
+ * 3n-4 where size=2^n. */
+{
+ do {
+ const double *left;
+ const double *right;
+ double *out;
+
+ /* When we get here, the stack top is always a convolve,
+ * with size > 4. So we will split it. We repeatedly split
+ * the top entry until we get to size = 4. */
+
+ left = top->v.left;
+ right = top->v.right;
+ out = top->v.out;
+ top++;
+
+ do {
+ double *s_left, *s_right;
+ int i;
+
+ /* Halve the size. */
+ size >>= 1;
+
+ /* Allocate the scratch areas. */
+ s_left = scratch + size * 3;
+ /* s_right is a length 2*size buffer also used for
+ * intermediate output. */
+ s_right = scratch + size * 4;
+
+ /* Create the intermediate factors. */
+ for (i = 0; i < size; i++) {
+ double l = left[i] + left[i + size];
+ double r = right[i] + right[i + size];
+
+ s_left[i + size] = r;
+ s_left[i] = l;
+ }
+
+ /* Push the combine entry onto the stack. */
+ top -= 3;
+ top[2].b.main = out;
+ top[2].b.null = NULL;
+
+ /* Push the low entry onto the stack. This must be
+ * the last of the three sub-convolutions, because
+ * it may overwrite the arguments. */
+ top[1].v.left = left;
+ top[1].v.right = right;
+ top[1].v.out = out;
+
+ /* Push the mid entry onto the stack. */
+ top[0].v.left = s_left;
+ top[0].v.right = s_right;
+ top[0].v.out = s_right;
+
+ /* Leave the high entry in variables. */
+ left += size;
+ right += size;
+ out += size * 2;
+
+ } while (size > 4);
+
+ /* When we get here, the stack top is a group of 3
+ * convolves, with size = 4, followed by some combines. */
+ convolve_4 (out, left, right);
+ convolve_4 (top[0].v.out, top[0].v.left, top[0].v.right);
+ convolve_4 (top[1].v.out, top[1].v.left, top[1].v.right);
+ top += 2;
+
+ /* Now process combines. */
+ do {
+ /* b.main is the output buffer, mid is the middle
+ * part which needs to be adjusted in place, and
+ * then folded back into the output. We do this in
+ * a slightly strange way, so as to avoid having
+ * two loops. */
+ double *out = top->b.main;
+ double *mid = scratch + size * 4;
+ unsigned int i;
+
+ top++;
+ out[size * 2 - 1] = 0;
+ for (i = 0; i < size - 1; i++) {
+ double lo;
+ double hi;
+
+ lo = mid[0] - (out[0] + out[2 * size]) + out[size];
+ hi = mid[size] - (out[size] + out[3 * size]) + out[2 * size];
+ out[size] = lo;
+ out[2 * size] = hi;
+ out++;
+ mid++;
+ }
+ size <<= 1;
+ } while (top->b.null == NULL);
+ } while (top->b.main != NULL);
+}
+
+/*
+ * convolve_match:
+ * @lastchoice: an array of size SMALL.
+ * @input: an array of size BIG (2*SMALL)
+ * @state: a (non-NULL) pointer returned by convolve_init.
+ *
+ * We find the contiguous SMALL-size sub-array of input that best matches
+ * lastchoice. A measure of how good a sub-array is compared with the lastchoice
+ * is given by the sum of the products of each pair of entries. We maximise
+ * that, by taking an appropriate convolution, and then finding the maximum
+ * entry in the convolutions.
+ *
+ * Return: the position of the best match
+ */
+int
+convolve_match (const int *lastchoice, const short *input,
+ convolve_state * state)
+{
+ double avg = 0;
+ double best;
+ int p = 0;
+ int i;
+ double *left = state->left;
+ double *right = state->right;
+ double *scratch = state->scratch;
+ stack_entry *top = state->stack + (state->stack_size - 1);
+
+ for (i = 0; i < state->big; i++)
+ left[i] = input[i];
+
+ for (i = 0; i < state->small; i++) {
+ double a = lastchoice[(state->small - 1) - i];
+
+ right[i] = a;
+ avg += a;
+ }
+
+ /* We adjust the smaller of the two input arrays to have average
+ * value 0. This makes the eventual result insensitive to both
+ * constant offsets and positive multipliers of the inputs. */
+ avg /= state->small;
+ for (i = 0; i < state->small; i++)
+ right[i] -= avg;
+ /* End-of-stack marker. */
+ top[1].b.null = scratch;
+ top[1].b.main = NULL;
+ /* The low (small x small) part, of which we want the high outputs. */
+ top->v.left = left;
+ top->v.right = right;
+ top->v.out = right + state->small;
+ convolve_run (top, state->small, scratch);
+
+ /* The high (small x small) part, of which we want the low outputs. */
+ top->v.left = left + state->small;
+ top->v.right = right;
+ top->v.out = right;
+ convolve_run (top, state->small, scratch);
+
+ /* Now find the best position amoungs this. Apart from the first
+ * and last, the required convolution outputs are formed by adding
+ * outputs from the two convolutions above. */
+ best = right[state->big - 1];
+ right[state->big + state->small - 1] = 0;
+ p = -1;
+ for (i = 0; i < state->small; i++) {
+ double a = right[i] + right[i + state->big];
+
+ if (a > best) {
+ best = a;
+ p = i;
+ }
+ }
+ p++;
+
+#if 0
+ {
+ /* This is some debugging code... */
+ best = 0;
+ for (i = 0; i < state->small; i++)
+ best += ((double) input[i + p]) * ((double) lastchoice[i] - avg);
+
+ for (i = 0; i <= state->small; i++) {
+ double tot = 0;
+ unsigned int j;
+
+ for (j = 0; j < state->small; j++)
+ tot += ((double) input[i + j]) * ((double) lastchoice[j] - avg);
+ if (tot > best)
+ printf ("(%i)", i);
+ if (tot != left[i + (state->small - 1)])
+ printf ("!");
+ }
+
+ printf ("%i\n", p);
+ }
+#endif
+
+ return p;
+}
diff --git a/gst/monoscope/convolve.h b/gst/monoscope/convolve.h
new file mode 100644
index 0000000000..d6a7f05cca
--- /dev/null
+++ b/gst/monoscope/convolve.h
@@ -0,0 +1,48 @@
+/* convolve.h: Header for convolutions.
+ *
+ * Copyright (C) 1999 Ralph Loader <suckfish@ihug.co.nz>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ *
+ * Note: 7th December 2004: This file used to be licensed under the GPL,
+ * but we got permission from Ralp Loader to relicense it to LGPL.
+ *
+ *
+ */
+
+#ifndef CONVOLVE_H
+#define CONVOLVE_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* Convolution state */
+typedef struct _struct_convolve_state convolve_state;
+
+convolve_state *convolve_init (int depth);
+void convolve_close (convolve_state * state);
+
+int convolve_match (const int * lastchoice,
+ const short int * input,
+ convolve_state * state);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/gst/monoscope/gstmonoscope.c b/gst/monoscope/gstmonoscope.c
new file mode 100644
index 0000000000..867e7ec0e0
--- /dev/null
+++ b/gst/monoscope/gstmonoscope.c
@@ -0,0 +1,598 @@
+/* gstmonoscope.c: implementation of monoscope drawing element
+ * Copyright (C) <2002> Richard Boulton <richard@tartarus.org>
+ * Copyright (C) <2006> Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) <2006> Wim Taymans <wim at fluendo dot com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-monoscope
+ * @title: monoscope
+ * @see_also: goom
+ *
+ * Monoscope is an audio visualisation element. It creates a coloured
+ * curve of the audio signal like on an oscilloscope.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v audiotestsrc ! audioconvert ! monoscope ! videoconvert ! ximagesink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/video/video.h>
+#include <gst/audio/audio.h>
+#include <string.h>
+#include "gstmonoscope.h"
+#include "monoscope.h"
+
+GST_DEBUG_CATEGORY_STATIC (monoscope_debug);
+#define GST_CAT_DEFAULT monoscope_debug
+
+#if G_BYTE_ORDER == G_BIG_ENDIAN
+#define RGB_ORDER "xRGB"
+#else
+#define RGB_ORDER "BGRx"
+#endif
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-raw, "
+ "format = (string) " RGB_ORDER ", "
+ "width = " G_STRINGIFY (scope_width) ", "
+ "height = " G_STRINGIFY (scope_height) ", "
+ "framerate = " GST_VIDEO_FPS_RANGE)
+ );
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (S16) ", "
+ "rate = (int) [ 8000, 96000 ], "
+ "channels = (int) 1, " "layout = (string) interleaved")
+ );
+
+
+#define gst_monoscope_parent_class parent_class
+G_DEFINE_TYPE (GstMonoscope, gst_monoscope, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (monoscope, "monoscope",
+ GST_RANK_NONE, GST_TYPE_MONOSCOPE,
+ GST_DEBUG_CATEGORY_INIT (monoscope_debug, "monoscope", 0,
+ "monoscope element"););
+
+static void gst_monoscope_finalize (GObject * object);
+static GstFlowReturn gst_monoscope_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+static gboolean gst_monoscope_src_setcaps (GstMonoscope * mono, GstCaps * caps);
+static gboolean gst_monoscope_sink_setcaps (GstMonoscope * mono,
+ GstCaps * caps);
+static void gst_monoscope_reset (GstMonoscope * monoscope);
+static gboolean gst_monoscope_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static gboolean gst_monoscope_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstStateChangeReturn gst_monoscope_change_state (GstElement * element,
+ GstStateChange transition);
+
+static void
+gst_monoscope_class_init (GstMonoscopeClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->finalize = gst_monoscope_finalize;
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_monoscope_change_state);
+
+ gst_element_class_add_static_pad_template (gstelement_class, &src_template);
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
+ gst_element_class_set_static_metadata (gstelement_class, "Monoscope",
+ "Visualization",
+ "Displays a highly stabilised waveform of audio input",
+ "Richard Boulton <richard@tartarus.org>");
+}
+
+static void
+gst_monoscope_init (GstMonoscope * monoscope)
+{
+ monoscope->sinkpad =
+ gst_pad_new_from_static_template (&sink_template, "sink");
+ gst_pad_set_chain_function (monoscope->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_monoscope_chain));
+ gst_pad_set_event_function (monoscope->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_monoscope_sink_event));
+ gst_element_add_pad (GST_ELEMENT (monoscope), monoscope->sinkpad);
+
+ monoscope->srcpad = gst_pad_new_from_static_template (&src_template, "src");
+ gst_pad_set_event_function (monoscope->srcpad,
+ GST_DEBUG_FUNCPTR (gst_monoscope_src_event));
+ gst_element_add_pad (GST_ELEMENT (monoscope), monoscope->srcpad);
+
+ monoscope->adapter = gst_adapter_new ();
+ monoscope->next_ts = GST_CLOCK_TIME_NONE;
+ monoscope->bps = sizeof (gint16);
+
+ /* reset the initial video state */
+ monoscope->width = scope_width;
+ monoscope->height = scope_height;
+ monoscope->fps_num = 25; /* desired frame rate */
+ monoscope->fps_denom = 1;
+ monoscope->visstate = NULL;
+
+ /* reset the initial audio state */
+ monoscope->rate = GST_AUDIO_DEF_RATE;
+}
+
+static void
+gst_monoscope_finalize (GObject * object)
+{
+ GstMonoscope *monoscope = GST_MONOSCOPE (object);
+
+ if (monoscope->visstate)
+ monoscope_close (monoscope->visstate);
+
+ g_object_unref (monoscope->adapter);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_monoscope_reset (GstMonoscope * monoscope)
+{
+ monoscope->next_ts = GST_CLOCK_TIME_NONE;
+
+ gst_adapter_clear (monoscope->adapter);
+ gst_segment_init (&monoscope->segment, GST_FORMAT_UNDEFINED);
+ monoscope->segment_pending = FALSE;
+
+ GST_OBJECT_LOCK (monoscope);
+ monoscope->proportion = 1.0;
+ monoscope->earliest_time = -1;
+ GST_OBJECT_UNLOCK (monoscope);
+}
+
+static gboolean
+gst_monoscope_sink_setcaps (GstMonoscope * monoscope, GstCaps * caps)
+{
+ GstStructure *structure;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ gst_structure_get_int (structure, "rate", &monoscope->rate);
+
+ GST_DEBUG_OBJECT (monoscope, "sample rate = %d", monoscope->rate);
+ return TRUE;
+}
+
+static gboolean
+gst_monoscope_src_setcaps (GstMonoscope * monoscope, GstCaps * caps)
+{
+ GstStructure *structure;
+ gboolean res;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ gst_structure_get_int (structure, "width", &monoscope->width);
+ gst_structure_get_int (structure, "height", &monoscope->height);
+ gst_structure_get_fraction (structure, "framerate", &monoscope->fps_num,
+ &monoscope->fps_denom);
+
+ monoscope->outsize = monoscope->width * monoscope->height * 4;
+ monoscope->frame_duration = gst_util_uint64_scale_int (GST_SECOND,
+ monoscope->fps_denom, monoscope->fps_num);
+ monoscope->spf =
+ gst_util_uint64_scale_int (monoscope->rate, monoscope->fps_denom,
+ monoscope->fps_num);
+
+ GST_DEBUG_OBJECT (monoscope, "dimension %dx%d, framerate %d/%d, spf %d",
+ monoscope->width, monoscope->height, monoscope->fps_num,
+ monoscope->fps_denom, monoscope->spf);
+
+ if (monoscope->visstate) {
+ monoscope_close (monoscope->visstate);
+ monoscope->visstate = NULL;
+ }
+
+ monoscope->visstate = monoscope_init (monoscope->width, monoscope->height);
+
+ res = gst_pad_set_caps (monoscope->srcpad, caps);
+
+ return res && (monoscope->visstate != NULL);
+}
+
+static gboolean
+gst_monoscope_src_negotiate (GstMonoscope * monoscope)
+{
+ GstCaps *othercaps, *target;
+ GstStructure *structure;
+ GstCaps *templ;
+ GstQuery *query;
+ GstBufferPool *pool;
+ GstStructure *config;
+ guint size, min, max;
+
+ templ = gst_pad_get_pad_template_caps (monoscope->srcpad);
+
+ GST_DEBUG_OBJECT (monoscope, "performing negotiation");
+
+ /* see what the peer can do */
+ othercaps = gst_pad_peer_query_caps (monoscope->srcpad, NULL);
+ if (othercaps) {
+ target = gst_caps_intersect (othercaps, templ);
+ gst_caps_unref (othercaps);
+ gst_caps_unref (templ);
+
+ if (gst_caps_is_empty (target))
+ goto no_format;
+
+ target = gst_caps_truncate (target);
+ } else {
+ target = templ;
+ }
+
+ target = gst_caps_make_writable (target);
+ structure = gst_caps_get_structure (target, 0);
+ gst_structure_fixate_field_nearest_int (structure, "width", 320);
+ gst_structure_fixate_field_nearest_int (structure, "height", 240);
+ gst_structure_fixate_field_nearest_fraction (structure, "framerate", 25, 1);
+ if (gst_structure_has_field (structure, "pixel-aspect-ratio"))
+ gst_structure_fixate_field_nearest_fraction (structure,
+ "pixel-aspect-ratio", 1, 1);
+ target = gst_caps_fixate (target);
+
+ gst_monoscope_src_setcaps (monoscope, target);
+
+ /* try to get a bufferpool now */
+ /* find a pool for the negotiated caps now */
+ query = gst_query_new_allocation (target, TRUE);
+
+ if (!gst_pad_peer_query (monoscope->srcpad, query)) {
+ }
+
+ if (gst_query_get_n_allocation_pools (query) > 0) {
+ /* we got configuration from our peer, parse them */
+ gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
+ } else {
+ pool = NULL;
+ size = monoscope->outsize;
+ min = max = 0;
+ }
+
+ if (pool == NULL) {
+ /* we did not get a pool, make one ourselves then */
+ pool = gst_buffer_pool_new ();
+ }
+
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_set_params (config, target, size, min, max);
+ gst_buffer_pool_set_config (pool, config);
+
+ if (monoscope->pool) {
+ gst_buffer_pool_set_active (monoscope->pool, TRUE);
+ gst_object_unref (monoscope->pool);
+ }
+ monoscope->pool = pool;
+
+ /* and activate */
+ gst_buffer_pool_set_active (pool, TRUE);
+
+ gst_query_unref (query);
+ gst_caps_unref (target);
+
+ return TRUE;
+
+no_format:
+ {
+ gst_caps_unref (target);
+ return FALSE;
+ }
+}
+
+/* make sure we are negotiated */
+static GstFlowReturn
+ensure_negotiated (GstMonoscope * monoscope)
+{
+ gboolean reconfigure;
+
+ reconfigure = gst_pad_check_reconfigure (monoscope->srcpad);
+
+ /* we don't know an output format yet, pick one */
+ if (reconfigure || !gst_pad_has_current_caps (monoscope->srcpad)) {
+ if (!gst_monoscope_src_negotiate (monoscope)) {
+ gst_pad_mark_reconfigure (monoscope->srcpad);
+ if (GST_PAD_IS_FLUSHING (monoscope->srcpad))
+ return GST_FLOW_FLUSHING;
+ else
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+ }
+ return GST_FLOW_OK;
+}
+
+static GstFlowReturn
+gst_monoscope_chain (GstPad * pad, GstObject * parent, GstBuffer * inbuf)
+{
+ GstFlowReturn flow_ret = GST_FLOW_OK;
+ GstMonoscope *monoscope;
+
+ monoscope = GST_MONOSCOPE (parent);
+
+ if (monoscope->rate == 0) {
+ gst_buffer_unref (inbuf);
+ flow_ret = GST_FLOW_NOT_NEGOTIATED;
+ goto out;
+ }
+
+ /* Make sure have an output format */
+ flow_ret = ensure_negotiated (monoscope);
+ if (flow_ret != GST_FLOW_OK) {
+ gst_buffer_unref (inbuf);
+ goto out;
+ }
+
+ if (monoscope->segment_pending) {
+ gst_pad_push_event (monoscope->srcpad,
+ gst_event_new_segment (&monoscope->segment));
+ monoscope->segment_pending = FALSE;
+ }
+
+ /* don't try to combine samples from discont buffer */
+ if (GST_BUFFER_FLAG_IS_SET (inbuf, GST_BUFFER_FLAG_DISCONT)) {
+ gst_adapter_clear (monoscope->adapter);
+ monoscope->next_ts = GST_CLOCK_TIME_NONE;
+ }
+
+ /* Match timestamps from the incoming audio */
+ if (GST_BUFFER_TIMESTAMP (inbuf) != GST_CLOCK_TIME_NONE)
+ monoscope->next_ts = GST_BUFFER_TIMESTAMP (inbuf);
+
+ GST_LOG_OBJECT (monoscope,
+ "in buffer has %" G_GSIZE_FORMAT " samples, ts=%" GST_TIME_FORMAT,
+ gst_buffer_get_size (inbuf) / monoscope->bps,
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (inbuf)));
+
+ gst_adapter_push (monoscope->adapter, inbuf);
+ inbuf = NULL;
+
+ /* Collect samples until we have enough for an output frame */
+ while (flow_ret == GST_FLOW_OK) {
+ gint16 *samples;
+ GstBuffer *outbuf = NULL;
+ guint32 *pixels, avail, bytesperframe;
+
+ avail = gst_adapter_available (monoscope->adapter);
+ GST_LOG_OBJECT (monoscope, "bytes avail now %u", avail);
+
+ bytesperframe = monoscope->spf * monoscope->bps;
+ if (avail < bytesperframe)
+ break;
+
+ /* FIXME: something is wrong with QoS, we are skipping way too much
+ * stuff even with very low CPU loads */
+#if 0
+ if (monoscope->next_ts != -1) {
+ gboolean need_skip;
+ gint64 qostime;
+
+ qostime = gst_segment_to_running_time (&monoscope->segment,
+ GST_FORMAT_TIME, monoscope->next_ts);
+
+ GST_OBJECT_LOCK (monoscope);
+ /* check for QoS, don't compute buffers that are known to be late */
+ need_skip =
+ GST_CLOCK_TIME_IS_VALID (monoscope->earliest_time) &&
+ qostime <= monoscope->earliest_time;
+ GST_OBJECT_UNLOCK (monoscope);
+
+ if (need_skip) {
+ GST_WARNING_OBJECT (monoscope,
+ "QoS: skip ts: %" GST_TIME_FORMAT ", earliest: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (qostime), GST_TIME_ARGS (monoscope->earliest_time));
+ goto skip;
+ }
+ }
+#endif
+
+ samples = (gint16 *) gst_adapter_map (monoscope->adapter, bytesperframe);
+
+ if (monoscope->spf < convolver_big) {
+ gint16 in_data[convolver_big], i;
+ gdouble scale = (gdouble) monoscope->spf / (gdouble) convolver_big;
+
+ for (i = 0; i < convolver_big; ++i) {
+ gdouble off = (gdouble) i * scale;
+ in_data[i] = samples[MIN ((guint) off, monoscope->spf)];
+ }
+ pixels = monoscope_update (monoscope->visstate, in_data);
+ } else {
+ /* not really correct, but looks much prettier */
+ pixels = monoscope_update (monoscope->visstate, samples);
+ }
+
+ GST_LOG_OBJECT (monoscope, "allocating output buffer");
+ flow_ret = gst_buffer_pool_acquire_buffer (monoscope->pool, &outbuf, NULL);
+ if (flow_ret != GST_FLOW_OK) {
+ gst_adapter_unmap (monoscope->adapter);
+ goto out;
+ }
+
+ gst_buffer_fill (outbuf, 0, pixels, monoscope->outsize);
+
+ GST_BUFFER_TIMESTAMP (outbuf) = monoscope->next_ts;
+ GST_BUFFER_DURATION (outbuf) = monoscope->frame_duration;
+
+ flow_ret = gst_pad_push (monoscope->srcpad, outbuf);
+
+#if 0
+ skip:
+#endif
+
+ if (GST_CLOCK_TIME_IS_VALID (monoscope->next_ts))
+ monoscope->next_ts += monoscope->frame_duration;
+
+ gst_adapter_flush (monoscope->adapter, bytesperframe);
+ }
+
+out:
+
+ return flow_ret;
+}
+
+static gboolean
+gst_monoscope_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstMonoscope *monoscope;
+ gboolean res;
+
+ monoscope = GST_MONOSCOPE (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_START:
+ res = gst_pad_push_event (monoscope->srcpad, event);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ gst_monoscope_reset (monoscope);
+ res = gst_pad_push_event (monoscope->srcpad, event);
+ break;
+ case GST_EVENT_SEGMENT:
+ {
+ /* the newsegment values are used to clip the input samples
+ * and to convert the incoming timestamps to running time so
+ * we can do QoS */
+ gst_event_copy_segment (event, &monoscope->segment);
+
+ /* We forward the event from the chain function after caps are
+ * negotiated. Otherwise we would potentially break the event order and
+ * send the segment event before the caps event */
+ monoscope->segment_pending = TRUE;
+ gst_event_unref (event);
+ res = TRUE;
+ break;
+ }
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ gst_monoscope_sink_setcaps (monoscope, caps);
+ gst_event_unref (event);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_push_event (monoscope->srcpad, event);
+ break;
+ }
+
+ return res;
+}
+
+static gboolean
+gst_monoscope_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstMonoscope *monoscope;
+ gboolean res;
+
+ monoscope = GST_MONOSCOPE (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_QOS:{
+ gdouble proportion;
+ GstClockTimeDiff diff;
+ GstClockTime timestamp;
+
+ gst_event_parse_qos (event, NULL, &proportion, &diff, &timestamp);
+
+ /* save stuff for the _chain() function */
+ GST_OBJECT_LOCK (monoscope);
+ monoscope->proportion = proportion;
+ if (diff >= 0)
+ /* we're late, this is a good estimate for next displayable
+ * frame (see part-qos.txt) */
+ monoscope->earliest_time =
+ timestamp + 2 * diff + monoscope->frame_duration;
+ else
+ monoscope->earliest_time = timestamp + diff;
+ GST_OBJECT_UNLOCK (monoscope);
+
+ res = gst_pad_push_event (monoscope->sinkpad, event);
+ break;
+ }
+ default:
+ res = gst_pad_push_event (monoscope->sinkpad, event);
+ break;
+ }
+
+ return res;
+}
+
+static GstStateChangeReturn
+gst_monoscope_change_state (GstElement * element, GstStateChange transition)
+{
+ GstMonoscope *monoscope = GST_MONOSCOPE (element);
+ GstStateChangeReturn ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_monoscope_reset (monoscope);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ if (monoscope->pool) {
+ gst_buffer_pool_set_active (monoscope->pool, FALSE);
+ gst_object_replace ((GstObject **) & monoscope->pool, NULL);
+ }
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ return GST_ELEMENT_REGISTER (monoscope, plugin);
+
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ monoscope,
+ "Monoscope visualization",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
diff --git a/gst/monoscope/gstmonoscope.h b/gst/monoscope/gstmonoscope.h
new file mode 100644
index 0000000000..f8f8211483
--- /dev/null
+++ b/gst/monoscope/gstmonoscope.h
@@ -0,0 +1,86 @@
+/* GStreamer monoscope visualisation element
+ * Copyright (C) <2002> Richard Boulton <richard@tartarus.org>
+ * Copyright (C) <2006> Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) <2006> Wim Taymans <wim at fluendo dot com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_MONOSCOPE__
+#define __GST_MONOSCOPE__
+
+G_BEGIN_DECLS
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+
+#define GST_TYPE_MONOSCOPE (gst_monoscope_get_type())
+#define GST_MONOSCOPE(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MONOSCOPE,GstMonoscope))
+#define GST_MONOSCOPE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MONOSCOPE,GstMonoscopeClass))
+#define GST_IS_MONOSCOPE(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MONOSCOPE))
+#define GST_IS_MONOSCOPE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MONOSCOPE))
+
+typedef struct _GstMonoscope GstMonoscope;
+typedef struct _GstMonoscopeClass GstMonoscopeClass;
+
+struct _GstMonoscope
+{
+ GstElement element;
+
+ /* pads */
+ GstPad *sinkpad;
+ GstPad *srcpad;
+
+ GstAdapter *adapter;
+
+ guint64 next_ts; /* expected timestamp of the next frame */
+ guint64 frame_duration; /* video frame duration */
+ gint rate; /* sample rate */
+ guint bps; /* bytes per sample */
+ guint spf; /* samples per video frame */
+ GstBufferPool *pool;
+
+ GstSegment segment;
+ gboolean segment_pending;
+
+ /* QoS stuff *//* with LOCK */
+ gdouble proportion;
+ GstClockTime earliest_time;
+
+ /* video state */
+ gint fps_num;
+ gint fps_denom;
+ gint width;
+ gint height;
+ guint outsize;
+
+ /* visualisation state */
+ struct monoscope_state *visstate;
+};
+
+struct _GstMonoscopeClass
+{
+ GstElementClass parent_class;
+};
+
+GType gst_monoscope_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (monoscope);
+
+G_END_DECLS
+
+#endif /* __GST_MONOSCOPE__ */
+
diff --git a/gst/monoscope/meson.build b/gst/monoscope/meson.build
new file mode 100644
index 0000000000..a10ccc937f
--- /dev/null
+++ b/gst/monoscope/meson.build
@@ -0,0 +1,12 @@
+gstmonoscope = library('gstmonoscope',
+ 'gstmonoscope.c',
+ 'monoscope.c',
+ 'convolve.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gstbase_dep, gstaudio_dep, gstvideo_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstmonoscope, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstmonoscope]
diff --git a/gst/monoscope/monoscope.c b/gst/monoscope/monoscope.c
new file mode 100644
index 0000000000..69ad0b0033
--- /dev/null
+++ b/gst/monoscope/monoscope.c
@@ -0,0 +1,168 @@
+/* monoscope.cpp
+ * Copyright (C) 2002 Richard Boulton <richard@tartarus.org>
+ * Copyright (C) 1998-2001 Andy Lo A Foe <andy@alsaplayer.org>
+ * Original code by Tinic Uro
+ *
+ * This code is copied from Alsaplayer. The original code was by Tinic Uro and under
+ * the BSD license without a advertisig clause. Andy Lo A Foe then relicensed the
+ * code when he used it for Alsaplayer to GPL with Tinic's permission. Richard Boulton
+ * then took this code and made a GPL plugin out of it.
+ *
+ * 7th December 2004 Christian Schaller: Richard Boulton and Andy Lo A Foe gave
+ * permission to relicense their changes under BSD license so we where able to restore the
+ * code to Tinic's original BSD license.
+ *
+ * This file is under what is known as the BSD license:
+ *
+ * Redistribution and use in source and binary forms, with or without modification, i
+ * are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice, this list of
+ * conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ * of conditions and the following disclaimer in the documentation and/or other materials
+ * provided with the distribution.
+ * 3. The name of the author may not be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
+ * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+ * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
+ * WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "monoscope.h"
+
+#include <string.h>
+#include <stdlib.h>
+
+static void
+colors_init (guint32 * colors)
+{
+ int i;
+ int hq = (scope_height / 4);
+ int hq1 = hq - 1;
+ int hh1 = (scope_height / 2) - 1;
+ double scl = (256.0 / (double) hq);
+
+ for (i = 0; i < hq; i++) {
+ /* green to yellow */
+ colors[i] = ((int) (i * scl) << 16) + (255 << 8);
+ /* yellow to red */
+ colors[i + hq1] = (255 << 16) + ((int) ((hq1 - i) * scl) << 8);
+ }
+ colors[hh1] = (40 << 16) + (75 << 8);
+}
+
+struct monoscope_state *
+monoscope_init (guint32 resx, guint32 resy)
+{
+ struct monoscope_state *stateptr;
+
+ /* I didn't program monoscope to only do 256*128, but it works that way */
+ g_return_val_if_fail (resx == scope_width, 0);
+ g_return_val_if_fail (resy == scope_height, 0);
+ stateptr = calloc (1, sizeof (struct monoscope_state));
+ if (stateptr == 0)
+ return 0;
+ stateptr->cstate = convolve_init (convolver_depth);
+ colors_init (stateptr->colors);
+ return stateptr;
+}
+
+void
+monoscope_close (struct monoscope_state *stateptr)
+{
+ convolve_close (stateptr->cstate);
+ free (stateptr);
+}
+
+guint32 *
+monoscope_update (struct monoscope_state *stateptr, gint16 data[convolver_big])
+{
+ /* Really, we want samples evenly spread over the available data.
+ * Just taking a continuous chunk will do for now, though. */
+ int i;
+ int foo, bar;
+ int avg;
+ int h;
+ int hh = (scope_height / 2);
+ int hh1 = hh - 1;
+ guint32 *loc;
+
+ double factor;
+ int max = 1;
+ short *thisEq = stateptr->copyEq;
+
+ memcpy (thisEq, data, sizeof (short) * convolver_big);
+ thisEq += convolve_match (stateptr->avgEq, thisEq, stateptr->cstate);
+
+ memset (stateptr->display, 0, scope_width * scope_height * sizeof (guint32));
+ for (i = 0; i < convolver_small; i++) {
+ avg = (thisEq[i] + stateptr->avgEq[i]) >> 1;
+ stateptr->avgEq[i] = avg;
+ avg = abs (avg);
+ max = MAX (max, avg);
+ }
+ /* running average, 4 values is enough to make it follow volume changes
+ * if this value is too large it will converge slowly
+ */
+ stateptr->avgMax += (max / 4) - (stateptr->avgMax / 4);
+
+ /* input is +/- avgMax, output is +/- hh */
+ if (stateptr->avgMax) {
+ factor = (gdouble) hh / stateptr->avgMax;
+ } else {
+ factor = 1.0;
+ }
+
+ for (i = 0; i < scope_width; i++) {
+ /* scale 16bit signed audio values to scope_height */
+ foo = stateptr->avgEq[i] * factor;
+ foo = CLAMP (foo, -hh1, hh1);
+ bar = (i + ((foo + hh) * scope_width));
+ if ((bar > 0) && (bar < (scope_width * scope_height))) {
+ loc = stateptr->display + bar;
+ /* draw up / down bars */
+ if (foo < 0) {
+ for (h = 0; h <= (-foo); h++) {
+ *loc = stateptr->colors[h];
+ loc += scope_width;
+ }
+ } else {
+ for (h = 0; h <= foo; h++) {
+ *loc = stateptr->colors[h];
+ loc -= scope_width;
+ }
+ }
+ }
+ }
+
+ /* Draw grid. */
+ {
+ guint32 gray = stateptr->colors[hh1];
+
+ for (i = 16; i < scope_height; i += 16) {
+ for (h = 0; h < scope_width; h += 2) {
+ stateptr->display[(i * scope_width) + h] = gray;
+ if (i == hh)
+ stateptr->display[(i * scope_width) + h + 1] = gray;
+ }
+ }
+ for (i = 16; i < scope_width; i += 16) {
+ for (h = 0; h < scope_height; h += 2) {
+ stateptr->display[i + (h * scope_width)] = gray;
+ }
+ }
+ }
+ return stateptr->display;
+}
diff --git a/gst/monoscope/monoscope.h b/gst/monoscope/monoscope.h
new file mode 100644
index 0000000000..1f84dc928a
--- /dev/null
+++ b/gst/monoscope/monoscope.h
@@ -0,0 +1,27 @@
+#ifndef _MONOSCOPE_H
+#define _MONOSCOPE_H
+
+#include <glib.h>
+#include "convolve.h"
+
+#define convolver_depth 8
+#define convolver_small (1 << convolver_depth)
+#define convolver_big (2 << convolver_depth)
+#define scope_width 256
+#define scope_height 128
+
+struct monoscope_state {
+ short copyEq[convolver_big];
+ int avgEq[convolver_small]; /* a running average of the last few. */
+ int avgMax; /* running average of max sample. */
+ guint32 display[scope_width * scope_height];
+
+ convolve_state *cstate;
+ guint32 colors[scope_height / 2];
+};
+
+struct monoscope_state * monoscope_init (guint32 resx, guint32 resy);
+guint32 * monoscope_update (struct monoscope_state * stateptr, gint16 data [convolver_big]);
+void monoscope_close (struct monoscope_state * stateptr);
+
+#endif
diff --git a/gst/multifile/gstimagesequencesrc.c b/gst/multifile/gstimagesequencesrc.c
new file mode 100644
index 0000000000..ce289b7219
--- /dev/null
+++ b/gst/multifile/gstimagesequencesrc.c
@@ -0,0 +1,659 @@
+/* GStreamer
+ * Copyright (C) 2006 David A. Schleef ds@schleef.org
+ * Copyright (C) 2019 Cesar Fabian Orccon Chipana
+ * Copyright (C) 2020 Thibault Saunier <tsaunier@igalia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
+ * Boston, MA 02110-1335, USA.
+ */
+
+/**
+ * SECTION:element-imagesequencesrc
+ *
+ * Stream image sequences from image files.
+ *
+ * ```
+ * gst-launch-1.0 imagesequencesrc location=image-%05d.jpg start-index=1 stop-index=50 framerate=24/1 ! decodebin ! videoconvert ! autovideosink
+ * ```
+ *
+ * This elements implements the #GstURIHandler interface meaning that you can use it with playbin,
+ * (make sure to quote the URI for the filename pattern, like: `%2505d` instead of the `%05d` you would use
+ * when dealing with the location).
+ *
+ * Note that you can pass the #imagesequencesrc:framerate, #imagesequencesrc:start-index and #imagesequencesrc:stop-index
+ * properties directly in the URI using its 'query' component, for example:
+ *
+ * ```
+ * gst-launch-1.0 playbin uri="imagesequence://path/to/image-%2505d.jpeg?start-index=0&framerate=30/1"
+ * ```
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/base/gsttypefindhelper.h>
+
+#include "gstimagesequencesrc.h"
+
+#define LOCK(self) (g_rec_mutex_lock (&self->fields_lock))
+#define UNLOCK(self) (g_rec_mutex_unlock (&self->fields_lock))
+
+static GstFlowReturn gst_image_sequence_src_create (GstPushSrc * src,
+ GstBuffer ** buffer);
+
+
+static void gst_image_sequence_src_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_image_sequence_src_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+static GstCaps *gst_image_sequence_src_getcaps (GstBaseSrc * src,
+ GstCaps * filter);
+static gboolean gst_image_sequence_src_query (GstBaseSrc * src,
+ GstQuery * query);
+static void gst_image_sequence_src_set_caps (GstImageSequenceSrc * self,
+ GstCaps * caps);
+static void gst_image_sequence_src_set_duration (GstImageSequenceSrc * self);
+static gint gst_image_sequence_src_count_frames (GstImageSequenceSrc * self,
+ gboolean can_read);
+
+
+static GstStaticPadTemplate gst_image_sequence_src_pad_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+GST_DEBUG_CATEGORY_STATIC (gst_image_sequence_src_debug);
+#define GST_CAT_DEFAULT gst_image_sequence_src_debug
+
+enum
+{
+ PROP_0,
+ PROP_LOCATION,
+ PROP_START_INDEX,
+ PROP_STOP_INDEX,
+ PROP_FRAMERATE
+};
+
+#define DEFAULT_LOCATION "%05d"
+#define DEFAULT_START_INDEX 0
+#define DEFAULT_STOP_INDEX -1
+#define DEFAULT_FRAMERATE 30
+
+/* Call with LOCK taken */
+static gboolean
+gst_image_sequence_src_set_location (GstImageSequenceSrc * self,
+ const gchar * location)
+{
+ g_free (self->path);
+ if (location != NULL)
+ self->path = g_strdup (location);
+ else
+ self->path = NULL;
+
+ return TRUE;
+}
+
+/*** GSTURIHANDLER INTERFACE *************************************************/
+
+static GstURIType
+gst_image_sequence_src_uri_get_type (GType type)
+{
+ return GST_URI_SRC;
+}
+
+static const gchar *const *
+gst_image_sequence_src_uri_get_protocols (GType type)
+{
+ static const gchar *protocols[] = { "imagesequence", NULL };
+
+ return protocols;
+}
+
+static gchar *
+gst_image_sequence_src_uri_get_uri (GstURIHandler * handler)
+{
+ GstImageSequenceSrc *self = GST_IMAGE_SEQUENCE_SRC (handler);
+ gchar *uri = NULL;
+
+ LOCK (self);
+ if (self->uri)
+ uri = gst_uri_to_string (self->uri);
+ else if (self->path)
+ uri = gst_uri_construct ("imagesequence", self->path);
+ UNLOCK (self);
+
+ return uri;
+}
+
+static gboolean
+gst_image_sequence_src_uri_set_uri (GstURIHandler * handler, const gchar * uri,
+ GError ** err)
+{
+ gchar *hostname = NULL, *location = NULL, *tmp;
+ gboolean ret = FALSE;
+ GstImageSequenceSrc *self = GST_IMAGE_SEQUENCE_SRC (handler);
+ GstUri *ruri = gst_uri_from_string (uri);
+ GHashTable *query = NULL;
+
+ if (!ruri) {
+ g_set_error (err, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
+ "imagesequencesrc URI is invalid: '%s'", uri);
+ goto beach;
+ }
+
+
+ LOCK (self);
+ g_clear_pointer (&self->uri, gst_uri_unref);
+ self->uri = ruri;
+ tmp = gst_filename_to_uri (gst_uri_get_path (ruri), err);
+ location = g_filename_from_uri (tmp, &hostname, err);
+ g_free (tmp);
+ query = gst_uri_get_query_table (ruri);
+ if (!location || (err != NULL && *err != NULL)) {
+ GST_WARNING_OBJECT (self, "Invalid URI '%s' for imagesequencesrc: %s", uri,
+ (err != NULL && *err != NULL) ? (*err)->message : "unknown error");
+ goto beach;
+ }
+
+ if (hostname && strcmp (hostname, "localhost")) {
+ /* Only 'localhost' is permitted */
+ GST_WARNING_OBJECT (self, "Invalid hostname '%s' for filesrc", hostname);
+ g_set_error (err, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
+ "File URI with invalid hostname '%s'", hostname);
+ goto beach;
+ }
+#ifdef G_OS_WIN32
+ /* Unfortunately, g_filename_from_uri() doesn't handle some UNC paths
+ * correctly on windows, it leaves them with an extra backslash
+ * at the start if they're of the mozilla-style file://///host/path/file
+ * form. Correct this.
+ */
+ if (location[0] == '\\' && location[1] == '\\' && location[2] == '\\')
+ memmove (location, location + 1, strlen (location + 1) + 1);
+#endif
+
+ ret = gst_image_sequence_src_set_location (self, location);
+
+ if (query) {
+ GHashTableIter iter;
+ gpointer key, value;
+
+ g_hash_table_iter_init (&iter, query);
+ while (g_hash_table_iter_next (&iter, &key, &value)) {
+ GST_INFO_OBJECT (self, "Setting property from URI: %s=%s", (gchar *) key,
+ (gchar *) value);
+ gst_util_set_object_arg (G_OBJECT (self), key, value);
+ }
+ }
+
+beach:
+ UNLOCK (self);
+
+ g_free (location);
+ g_free (hostname);
+ g_clear_pointer (&query, g_hash_table_unref);
+
+ return ret;
+}
+
+static void
+gst_image_sequence_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
+{
+ GstURIHandlerInterface *iface = (GstURIHandlerInterface *) g_iface;
+
+ iface->get_type = gst_image_sequence_src_uri_get_type;
+ iface->get_protocols = gst_image_sequence_src_uri_get_protocols;
+ iface->get_uri = gst_image_sequence_src_uri_get_uri;
+ iface->set_uri = gst_image_sequence_src_uri_set_uri;
+}
+
+#define gst_image_sequence_src_parent_class parent_class
+#define _do_init \
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER, gst_image_sequence_src_uri_handler_init); \
+ GST_DEBUG_CATEGORY_INIT (gst_image_sequence_src_debug, "imagesequencesrc", \
+ 0, "imagesequencesrc element");
+G_DEFINE_TYPE_WITH_CODE (GstImageSequenceSrc, gst_image_sequence_src,
+ GST_TYPE_PUSH_SRC, _do_init);
+GST_ELEMENT_REGISTER_DEFINE (imagesequencesrc, "imagesequencesrc",
+ GST_RANK_NONE, gst_image_sequence_src_get_type ());
+
+static gboolean
+is_seekable (GstBaseSrc * src)
+{
+ GstImageSequenceSrc *self = GST_IMAGE_SEQUENCE_SRC (src);
+
+ if ((self->n_frames != 0) && (self->fps_n) && (self->fps_d))
+ return TRUE;
+ return FALSE;
+}
+
+
+static gboolean
+do_seek (GstBaseSrc * bsrc, GstSegment * segment)
+{
+ GstImageSequenceSrc *self;
+
+ self = GST_IMAGE_SEQUENCE_SRC (bsrc);
+
+ self->reverse = segment->rate < 0;
+ if (self->reverse) {
+ segment->time = segment->start;
+ }
+
+ self->index =
+ self->start_index +
+ segment->position * self->fps_n / (self->fps_d * GST_SECOND);
+
+ return TRUE;
+}
+
+static void
+gst_image_sequence_src_finalize (GObject * object)
+{
+ GstImageSequenceSrc *self = GST_IMAGE_SEQUENCE_SRC (object);
+
+ g_clear_pointer (&self->path, g_free);
+ g_rec_mutex_clear (&self->fields_lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_image_sequence_src_dispose (GObject * object)
+{
+ GstImageSequenceSrc *self = GST_IMAGE_SEQUENCE_SRC (object);
+
+ gst_clear_caps (&self->caps);
+ g_clear_pointer (&self->uri, gst_uri_unref);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static void
+gst_image_sequence_src_class_init (GstImageSequenceSrcClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GstPushSrcClass *gstpushsrc_class = GST_PUSH_SRC_CLASS (klass);
+ GstBaseSrcClass *gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
+
+ gobject_class->set_property = gst_image_sequence_src_set_property;
+ gobject_class->get_property = gst_image_sequence_src_get_property;
+
+
+ g_object_class_install_property (gobject_class, PROP_LOCATION,
+ g_param_spec_string ("location", "File Location",
+ "Pattern to create file names of input files. File names are "
+ "created by calling sprintf() with the pattern and the current "
+ "index.", DEFAULT_LOCATION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_START_INDEX,
+ g_param_spec_int ("start-index", "Start Index",
+ "Start value of index. The initial value of index can be set "
+ "either by setting index or start-index. When the end of the loop "
+ "is reached, the index will be set to the value start-index.",
+ 0, INT_MAX, DEFAULT_START_INDEX,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_STOP_INDEX,
+ g_param_spec_int ("stop-index", "Stop Index",
+ "Stop value of index. The special value -1 means no stop.",
+ -1, INT_MAX, DEFAULT_STOP_INDEX,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_FRAMERATE,
+ gst_param_spec_fraction ("framerate", "Framerate",
+ "The output framerate.",
+ 1, 1, G_MAXINT, 1, DEFAULT_FRAMERATE, 1,
+ G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS));
+
+ gobject_class->finalize = gst_image_sequence_src_finalize;
+ gobject_class->dispose = gst_image_sequence_src_dispose;
+
+ gstbasesrc_class->get_caps = gst_image_sequence_src_getcaps;
+ gstbasesrc_class->query = gst_image_sequence_src_query;
+ gstbasesrc_class->is_seekable = is_seekable;
+ gstbasesrc_class->do_seek = do_seek;
+
+ gstpushsrc_class->create = gst_image_sequence_src_create;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_image_sequence_src_pad_template);
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Image Sequence Source", "Source/File/Video",
+ "Create a video stream from a sequence of image files",
+ "Cesar Fabian Orccon Chipana <cfoch.fabian@gmail.com>\n"
+ "Thibault Saunier <tsaunier@igalia.com>");
+}
+
+static void
+gst_image_sequence_src_init (GstImageSequenceSrc * self)
+{
+ GstBaseSrc *bsrc;
+
+ GST_DEBUG_CATEGORY_INIT (gst_image_sequence_src_debug, "imagesequencesrc", 0,
+ "imagesequencesrc element");
+
+ bsrc = GST_BASE_SRC (self);
+ gst_base_src_set_format (bsrc, GST_FORMAT_TIME);
+
+ g_rec_mutex_init (&self->fields_lock);
+ self->start_index = DEFAULT_START_INDEX;
+ self->index = 0;
+ self->stop_index = DEFAULT_STOP_INDEX;
+ self->path = NULL;
+ self->caps = NULL;
+ self->n_frames = 0;
+ self->fps_n = 30;
+ self->fps_d = 1;
+}
+
+static GstCaps *
+gst_image_sequence_src_getcaps (GstBaseSrc * src, GstCaps * filter)
+{
+ GstImageSequenceSrc *self = GST_IMAGE_SEQUENCE_SRC (src);
+
+ GST_DEBUG_OBJECT (self, "returning %" GST_PTR_FORMAT, self->caps);
+
+ if (filter) {
+ if (self->caps)
+ return gst_caps_intersect_full (filter, self->caps,
+ GST_CAPS_INTERSECT_FIRST);
+ else
+ return gst_caps_ref (filter);
+ }
+
+ return gst_caps_new_any ();
+}
+
+static gboolean
+gst_image_sequence_src_query (GstBaseSrc * bsrc, GstQuery * query)
+{
+ gboolean ret;
+ GstImageSequenceSrc *self;
+
+ self = GST_IMAGE_SEQUENCE_SRC (bsrc);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_DURATION:
+ {
+ GstFormat format;
+
+ gst_query_parse_duration (query, &format, NULL);
+
+ switch (format) {
+ case GST_FORMAT_TIME:
+ LOCK (self);
+ if (self->n_frames <= 0) {
+ gst_image_sequence_src_count_frames (self, FALSE);
+ gst_image_sequence_src_set_duration (self);
+ }
+
+ if (self->n_frames > 0)
+ gst_query_set_duration (query, format, self->duration);
+ UNLOCK (self);
+
+ ret = TRUE;
+ break;
+ default:
+ ret = GST_BASE_SRC_CLASS (parent_class)->query (bsrc, query);
+ }
+ break;
+ }
+ default:
+ ret = GST_BASE_SRC_CLASS (parent_class)->query (bsrc, query);
+ break;
+ }
+
+ return ret;
+}
+
+static void
+gst_image_sequence_src_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstImageSequenceSrc *self = GST_IMAGE_SEQUENCE_SRC (object);
+
+ LOCK (self);
+ switch (prop_id) {
+ case PROP_LOCATION:
+ gst_image_sequence_src_set_location (self, g_value_get_string (value));
+ break;
+ case PROP_START_INDEX:
+ self->start_index = g_value_get_int (value);
+ gst_image_sequence_src_count_frames (self, FALSE);
+ break;
+ case PROP_STOP_INDEX:
+ self->stop_index = g_value_get_int (value);
+ gst_image_sequence_src_count_frames (self, FALSE);
+ break;
+ case PROP_FRAMERATE:
+ self->fps_n = gst_value_get_fraction_numerator (value);
+ self->fps_d = gst_value_get_fraction_denominator (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ UNLOCK (self);
+}
+
+static void
+gst_image_sequence_src_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstImageSequenceSrc *self = GST_IMAGE_SEQUENCE_SRC (object);
+
+ LOCK (self);
+ switch (prop_id) {
+ case PROP_LOCATION:
+ g_value_set_string (value, self->path);
+ break;
+ case PROP_START_INDEX:
+ g_value_set_int (value, self->start_index);
+ break;
+ case PROP_STOP_INDEX:
+ g_value_set_int (value, self->stop_index);
+ break;
+ case PROP_FRAMERATE:
+ self->fps_n = gst_value_get_fraction_numerator (value);
+ self->fps_d = gst_value_get_fraction_denominator (value);
+ GST_DEBUG_OBJECT (self, "Set (framerate) property to (%d/%d)",
+ self->fps_n, self->fps_d);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ UNLOCK (self);
+}
+
+/* Call with LOCK */
+static gint
+gst_image_sequence_src_count_frames (GstImageSequenceSrc * self,
+ gboolean can_read)
+{
+ if (can_read && self->stop_index < 0 && self->path) {
+ gint i;
+
+ for (i = self->start_index;; i++) {
+ gchar *filename = g_strdup_printf (self->path, i);
+
+ if (!g_file_test (filename, G_FILE_TEST_IS_REGULAR)) {
+ i--;
+ g_free (filename);
+ break;
+ }
+
+ g_free (filename);
+ }
+ if (i > self->start_index)
+ self->stop_index = i;
+ }
+
+ if (self->stop_index >= self->start_index)
+ self->n_frames = self->stop_index - self->start_index + 1;
+ return self->n_frames;
+}
+
+static void
+gst_image_sequence_src_set_caps (GstImageSequenceSrc * self, GstCaps * caps)
+{
+ GstCaps *new_caps;
+
+ g_assert (caps != NULL);
+ new_caps = gst_caps_copy (caps);
+
+ if (self->n_frames > 0) {
+ GValue fps = G_VALUE_INIT;
+ g_value_init (&fps, GST_TYPE_FRACTION);
+ gst_value_set_fraction (&fps, self->fps_n, self->fps_d);
+ gst_caps_set_value (new_caps, "framerate", &fps);
+ g_value_unset (&fps);
+ }
+
+ gst_caps_replace (&self->caps, new_caps);
+ gst_pad_set_caps (GST_BASE_SRC_PAD (self), new_caps);
+
+ GST_DEBUG_OBJECT (self, "Setting new caps: %" GST_PTR_FORMAT, new_caps);
+}
+
+/* Call with LOCK */
+static void
+gst_image_sequence_src_set_duration (GstImageSequenceSrc * self)
+{
+ GstClockTime old_duration = self->duration;
+
+ if (self->n_frames <= 0)
+ return;
+
+ /* Calculate duration */
+ self->duration =
+ gst_util_uint64_scale (GST_SECOND * self->n_frames, self->fps_d,
+ self->fps_n);
+
+ if (self->duration != old_duration) {
+ UNLOCK (self);
+ gst_element_post_message (GST_ELEMENT (self),
+ gst_message_new_duration_changed (GST_OBJECT (self)));
+ LOCK (self);
+ }
+}
+
+/* Call with LOCK */
+static gchar *
+gst_image_sequence_src_get_filename (GstImageSequenceSrc * self)
+{
+ gchar *filename;
+
+ GST_DEBUG ("Reading filename at index %d.", self->index);
+ filename = g_strdup_printf (self->path, self->index);
+
+ return filename;
+}
+
+static GstFlowReturn
+gst_image_sequence_src_create (GstPushSrc * src, GstBuffer ** buffer)
+{
+ GstImageSequenceSrc *self;
+ gsize size;
+ gchar *data;
+ gchar *filename;
+ GstBuffer *buf;
+ gboolean ret;
+ GError *error = NULL;
+ gint fps_n, fps_d, start_index, stop_index;
+
+ self = GST_IMAGE_SEQUENCE_SRC (src);
+
+ LOCK (self);
+ start_index = self->start_index;
+ stop_index = self->stop_index;
+ if (self->index > stop_index && stop_index > 0) {
+ UNLOCK (self);
+
+ return GST_FLOW_EOS;
+ }
+
+ if (self->index < self->start_index)
+ self->index = self->start_index;
+
+ g_assert (start_index <= self->index &&
+ (self->index <= stop_index || stop_index <= 0));
+
+ filename = gst_image_sequence_src_get_filename (self);
+ fps_n = self->fps_n;
+ fps_d = self->fps_d;
+ UNLOCK (self);
+
+ if (!filename)
+ goto handle_error;
+
+ ret = g_file_get_contents (filename, &data, &size, &error);
+ if (!ret)
+ goto handle_error;
+
+ buf = gst_buffer_new_wrapped_full (0, data, size, 0, size, NULL, g_free);
+
+ if (!self->caps) {
+ GstCaps *caps;
+ caps = gst_type_find_helper_for_buffer (NULL, buf, NULL);
+ if (!caps) {
+ GST_ELEMENT_ERROR (self, STREAM, TYPE_NOT_FOUND, (NULL),
+ ("Could not determine image type."));
+
+ return GST_FLOW_NOT_SUPPORTED;
+ }
+
+ LOCK (self);
+ gst_image_sequence_src_count_frames (self, TRUE);
+ gst_image_sequence_src_set_duration (self);
+ UNLOCK (self);
+
+ gst_image_sequence_src_set_caps (self, caps);
+ gst_caps_unref (caps);
+ }
+
+ GST_BUFFER_PTS (buf) =
+ gst_util_uint64_scale_ceil ((self->index - start_index) * GST_SECOND,
+ fps_d, fps_n);
+ GST_BUFFER_DURATION (buf) = gst_util_uint64_scale (GST_SECOND, fps_d, fps_n);
+ GST_BUFFER_OFFSET (buf) = self->index - start_index;
+ GST_LOG_OBJECT (self, "index: %d, %s - %" GST_PTR_FORMAT, self->index,
+ filename, buf);
+
+ g_free (filename);
+ *buffer = buf;
+
+ self->index += self->reverse ? -1 : 1;
+ return GST_FLOW_OK;
+
+handle_error:
+ {
+ if (error != NULL) {
+ GST_ELEMENT_ERROR (self, RESOURCE, READ,
+ ("Error while reading from file \"%s\".", filename),
+ ("%s", error->message));
+ g_error_free (error);
+ } else {
+ GST_ELEMENT_ERROR (self, RESOURCE, READ,
+ ("Error while reading from file \"%s\".", filename),
+ ("%s", g_strerror (errno)));
+ }
+ g_free (filename);
+ return GST_FLOW_ERROR;
+ }
+}
diff --git a/gst/multifile/gstimagesequencesrc.h b/gst/multifile/gstimagesequencesrc.h
new file mode 100644
index 0000000000..1fb95a273d
--- /dev/null
+++ b/gst/multifile/gstimagesequencesrc.h
@@ -0,0 +1,56 @@
+/* GStreamer
+ * Copyright (C) 2019 Cesar Fabian Orccon Chipana
+ * Copyright (C) 2020 Thibault Saunier <tsaunier@igalia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
+ * Boston, MA 02110-1335, USA.
+ */
+
+#ifndef __GST_IMAGESEQUENCESRC_H__
+#define __GST_IMAGESEQUENCESRC_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstpushsrc.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_IMAGE_SEQUENCE_SRC (gst_image_sequence_src_get_type())
+G_DECLARE_FINAL_TYPE(GstImageSequenceSrc, gst_image_sequence_src, GST, IMAGE_SEQUENCE_SRC, GstPushSrc)
+
+struct _GstImageSequenceSrc
+{
+ GstPushSrc parent;
+
+ GRecMutex fields_lock;
+ gchar* path;
+ GstUri *uri;
+ gint start_index;
+ gint stop_index;
+ gint index;
+ gint n_frames;
+
+ guint64 duration;
+ gboolean reverse;
+
+ GstCaps *caps;
+
+ gint fps_n, fps_d;
+};
+
+GST_ELEMENT_REGISTER_DECLARE (imagesequencesrc);
+
+G_END_DECLS
+
+#endif /* __GST_IMAGESEQUENCESRC_H__ */
diff --git a/gst/multifile/gstmultifile.c b/gst/multifile/gstmultifile.c
new file mode 100644
index 0000000000..d8fcb978fe
--- /dev/null
+++ b/gst/multifile/gstmultifile.c
@@ -0,0 +1,56 @@
+/* GStreamer
+ * Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2000 Wim Taymans <wtay@chello.be>
+ * 2006 Wim Taymans <wim@fluendo.com>
+ * 2006 David A. Schleef <ds@schleef.org>
+ *
+ * gstmultifilesink.c:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/gst.h>
+
+#include "gstmultifilesink.h"
+#include "gstmultifilesrc.h"
+#include "gstsplitfilesrc.h"
+#include "gstsplitmuxsink.h"
+#include "gstsplitmuxsrc.h"
+#include "gstimagesequencesrc.h"
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (multifilesrc, plugin);
+ ret |= GST_ELEMENT_REGISTER (multifilesink, plugin);
+ ret |= GST_ELEMENT_REGISTER (splitfilesrc, plugin);
+ ret |= GST_ELEMENT_REGISTER (imagesequencesrc, plugin);
+ ret |= GST_ELEMENT_REGISTER (splitmuxsink, plugin);
+ ret |= GST_ELEMENT_REGISTER (splitmuxsrc, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ multifile,
+ "Reads/Writes buffers from/to sequentially named files",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
diff --git a/gst/multifile/gstmultifilesink.c b/gst/multifile/gstmultifilesink.c
new file mode 100644
index 0000000000..8e8d0ece7a
--- /dev/null
+++ b/gst/multifile/gstmultifilesink.c
@@ -0,0 +1,1082 @@
+/* GStreamer
+ * Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2000 Wim Taymans <wtay@chello.be>
+ * 2006 Wim Taymans <wim@fluendo.com>
+ * 2006 David A. Schleef <ds@schleef.org>
+ * 2011 Collabora Ltd. <tim.muller@collabora.co.uk>
+ * 2015 Tim-Philipp Müller <tim@centricular.com>
+ *
+ * gstmultifilesink.c:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-multifilesink
+ * @title: multifilesink
+ * @see_also: #GstFileSrc
+ *
+ * Write incoming data to a series of sequentially-named files.
+ *
+ * This element is usually used with data where each buffer is an
+ * independent unit of data in its own right (e.g. raw video buffers or
+ * encoded JPEG or PNG images) or with streamable container formats such
+ * as MPEG-TS or MPEG-PS.
+ *
+ * It is not possible to use this element to create independently playable
+ * mp4 files, use the splitmuxsink element for that instead.
+ *
+ * The filename property should contain a string with a \%d placeholder that will
+ * be substituted with the index for each filename.
+ *
+ * If the #GstMultiFileSink:post-messages property is %TRUE, it sends an application
+ * message named `GstMultiFileSink` after writing each buffer.
+ *
+ * The message's structure contains these fields:
+ *
+ * * #gchararray `filename`: the filename where the buffer was written.
+ * * #gint `index`: index of the buffer.
+ * * #GstClockTime `timestamp`: the timestamp of the buffer.
+ * * #GstClockTime `stream-time`: the stream time of the buffer.
+ * * #GstClockTime running-time`: the running_time of the buffer.
+ * * #GstClockTime `duration`: the duration of the buffer.
+ * * #guint64 `offset`: the offset of the buffer that triggered the message.
+ * * #guint64 `offset-end`: the offset-end of the buffer that triggered the message.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 audiotestsrc ! multifilesink
+ * gst-launch-1.0 videotestsrc ! multifilesink post-messages=true location="frame%d"
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+#include <gst/base/gstbasetransform.h>
+#include <gst/video/video.h>
+#include <glib/gstdio.h>
+#include "gstmultifilesink.h"
+
+static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+GST_DEBUG_CATEGORY_STATIC (gst_multi_file_sink_debug);
+#define GST_CAT_DEFAULT gst_multi_file_sink_debug
+
+#define DEFAULT_LOCATION "%05d"
+#define DEFAULT_INDEX 0
+#define DEFAULT_POST_MESSAGES FALSE
+#define DEFAULT_NEXT_FILE GST_MULTI_FILE_SINK_NEXT_BUFFER
+#define DEFAULT_MAX_FILES 0
+#define DEFAULT_MAX_FILE_SIZE G_GUINT64_CONSTANT(2*1024*1024*1024)
+#define DEFAULT_MAX_FILE_DURATION GST_CLOCK_TIME_NONE
+#define DEFAULT_AGGREGATE_GOPS FALSE
+
+enum
+{
+ PROP_0,
+ PROP_LOCATION,
+ PROP_INDEX,
+ PROP_POST_MESSAGES,
+ PROP_NEXT_FILE,
+ PROP_MAX_FILES,
+ PROP_MAX_FILE_SIZE,
+ PROP_MAX_FILE_DURATION,
+ PROP_AGGREGATE_GOPS
+};
+
+static void gst_multi_file_sink_finalize (GObject * object);
+
+static void gst_multi_file_sink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_multi_file_sink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_multi_file_sink_start (GstBaseSink * bsink);
+static gboolean gst_multi_file_sink_stop (GstBaseSink * sink);
+static GstFlowReturn gst_multi_file_sink_render (GstBaseSink * sink,
+ GstBuffer * buffer);
+static GstFlowReturn gst_multi_file_sink_render_list (GstBaseSink * sink,
+ GstBufferList * buffer_list);
+static gboolean gst_multi_file_sink_set_caps (GstBaseSink * sink,
+ GstCaps * caps);
+static gboolean gst_multi_file_sink_open_next_file (GstMultiFileSink *
+ multifilesink);
+static void gst_multi_file_sink_close_file (GstMultiFileSink * multifilesink,
+ GstBuffer * buffer);
+static void gst_multi_file_sink_add_old_file (GstMultiFileSink * multifilesink,
+ gchar * fn);
+static void gst_multi_file_sink_ensure_max_files (GstMultiFileSink *
+ multifilesink);
+static gboolean gst_multi_file_sink_event (GstBaseSink * sink,
+ GstEvent * event);
+
+#define GST_TYPE_MULTI_FILE_SINK_NEXT (gst_multi_file_sink_next_get_type ())
+static GType
+gst_multi_file_sink_next_get_type (void)
+{
+ static GType multi_file_sink_next_type = 0;
+ static const GEnumValue next_types[] = {
+ {GST_MULTI_FILE_SINK_NEXT_BUFFER, "New file for each buffer", "buffer"},
+ {GST_MULTI_FILE_SINK_NEXT_DISCONT, "New file after each discontinuity",
+ "discont"},
+ {GST_MULTI_FILE_SINK_NEXT_KEY_FRAME, "New file at each key frame "
+ "(Useful for MPEG-TS segmenting)", "key-frame"},
+ {GST_MULTI_FILE_SINK_NEXT_KEY_UNIT_EVENT,
+ "New file after a force key unit event", "key-unit-event"},
+ {GST_MULTI_FILE_SINK_NEXT_MAX_SIZE, "New file when the configured maximum "
+ "file size would be exceeded with the next buffer or buffer list",
+ "max-size"},
+ {GST_MULTI_FILE_SINK_NEXT_MAX_DURATION,
+ "New file when the configured maximum "
+ "file duration would be exceeded with the next buffer or buffer list",
+ "max-duration"},
+ {0, NULL, NULL}
+ };
+
+ if (!multi_file_sink_next_type) {
+ multi_file_sink_next_type =
+ g_enum_register_static ("GstMultiFileSinkNext", next_types);
+ }
+
+ return multi_file_sink_next_type;
+}
+
+#define gst_multi_file_sink_parent_class parent_class
+G_DEFINE_TYPE (GstMultiFileSink, gst_multi_file_sink, GST_TYPE_BASE_SINK);
+GST_ELEMENT_REGISTER_DEFINE (multifilesink, "multifilesink", GST_RANK_NONE,
+ gst_multi_file_sink_get_type ());
+
+static void
+gst_multi_file_sink_class_init (GstMultiFileSinkClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GstBaseSinkClass *gstbasesink_class = GST_BASE_SINK_CLASS (klass);
+
+ gobject_class->set_property = gst_multi_file_sink_set_property;
+ gobject_class->get_property = gst_multi_file_sink_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_LOCATION,
+ g_param_spec_string ("location", "File Location",
+ "Location of the file to write", NULL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_INDEX,
+ g_param_spec_int ("index", "Index",
+ "Index to use with location property to create file names. The "
+ "index is incremented by one for each buffer written.",
+ 0, G_MAXINT, DEFAULT_INDEX,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstMultiFileSink:post-messages:
+ *
+ * Post a message on the GstBus for each file.
+ */
+ g_object_class_install_property (gobject_class, PROP_POST_MESSAGES,
+ g_param_spec_boolean ("post-messages", "Post Messages",
+ "Post a message for each file with information of the buffer",
+ DEFAULT_POST_MESSAGES, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstMultiFileSink:next-file:
+ *
+ * When to start a new file.
+ */
+ g_object_class_install_property (gobject_class, PROP_NEXT_FILE,
+ g_param_spec_enum ("next-file", "Next File",
+ "When to start a new file",
+ GST_TYPE_MULTI_FILE_SINK_NEXT, DEFAULT_NEXT_FILE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+
+ /**
+ * GstMultiFileSink:max-files:
+ *
+ * Maximum number of files to keep on disk. Once the maximum is reached, old
+ * files start to be deleted to make room for new ones.
+ */
+ g_object_class_install_property (gobject_class, PROP_MAX_FILES,
+ g_param_spec_uint ("max-files", "Max files",
+ "Maximum number of files to keep on disk. Once the maximum is reached,"
+ "old files start to be deleted to make room for new ones.",
+ 0, G_MAXUINT, DEFAULT_MAX_FILES,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstMultiFileSink:max-file-size:
+ *
+ * Maximum file size before starting a new file in max-size mode.
+ */
+ g_object_class_install_property (gobject_class, PROP_MAX_FILE_SIZE,
+ g_param_spec_uint64 ("max-file-size", "Maximum File Size",
+ "Maximum file size before starting a new file in max-size mode",
+ 0, G_MAXUINT64, DEFAULT_MAX_FILE_SIZE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstMultiFileSink:max-file-duration:
+ *
+ * Maximum file size before starting a new file in max-size mode.
+ */
+ g_object_class_install_property (gobject_class, PROP_MAX_FILE_DURATION,
+ g_param_spec_uint64 ("max-file-duration", "Maximum File Duration",
+ "Maximum file duration before starting a new file in max-duration mode "
+ "(in nanoseconds)", 0, G_MAXUINT64, DEFAULT_MAX_FILE_DURATION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstMultiFileSink:aggregate-gops:
+ *
+ * Whether to aggregate complete GOPs before doing any processing. Set this
+ * to TRUE to make sure each new file starts with a keyframe. This requires
+ * the upstream element to flag buffers containing key units and delta
+ * units correctly. At least the MPEG-PS and MPEG-TS muxers should be doing
+ * this.
+ *
+ * Since: 1.6
+ */
+ g_object_class_install_property (gobject_class, PROP_AGGREGATE_GOPS,
+ g_param_spec_boolean ("aggregate-gops", "Aggregate GOPs",
+ "Whether to aggregate GOPs and process them as a whole without "
+ "splitting", DEFAULT_AGGREGATE_GOPS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gobject_class->finalize = gst_multi_file_sink_finalize;
+
+ gstbasesink_class->start = GST_DEBUG_FUNCPTR (gst_multi_file_sink_start);
+ gstbasesink_class->stop = GST_DEBUG_FUNCPTR (gst_multi_file_sink_stop);
+ gstbasesink_class->render = GST_DEBUG_FUNCPTR (gst_multi_file_sink_render);
+ gstbasesink_class->render_list =
+ GST_DEBUG_FUNCPTR (gst_multi_file_sink_render_list);
+ gstbasesink_class->set_caps =
+ GST_DEBUG_FUNCPTR (gst_multi_file_sink_set_caps);
+ gstbasesink_class->event = GST_DEBUG_FUNCPTR (gst_multi_file_sink_event);
+
+ GST_DEBUG_CATEGORY_INIT (gst_multi_file_sink_debug, "multifilesink", 0,
+ "multifilesink element");
+
+ gst_element_class_add_static_pad_template (gstelement_class, &sinktemplate);
+ gst_element_class_set_static_metadata (gstelement_class, "Multi-File Sink",
+ "Sink/File",
+ "Write buffers to a sequentially named set of files",
+ "David Schleef <ds@schleef.org>");
+
+ gst_type_mark_as_plugin_api (GST_TYPE_MULTI_FILE_SINK_NEXT, 0);
+}
+
+static void
+gst_multi_file_sink_init (GstMultiFileSink * multifilesink)
+{
+ multifilesink->filename = g_strdup (DEFAULT_LOCATION);
+ multifilesink->index = DEFAULT_INDEX;
+ multifilesink->post_messages = DEFAULT_POST_MESSAGES;
+ multifilesink->max_files = DEFAULT_MAX_FILES;
+ multifilesink->max_file_size = DEFAULT_MAX_FILE_SIZE;
+ multifilesink->max_file_duration = DEFAULT_MAX_FILE_DURATION;
+
+ multifilesink->aggregate_gops = DEFAULT_AGGREGATE_GOPS;
+ multifilesink->gop_adapter = NULL;
+
+ gst_base_sink_set_sync (GST_BASE_SINK (multifilesink), FALSE);
+
+ multifilesink->next_segment = GST_CLOCK_TIME_NONE;
+ multifilesink->force_key_unit_count = -1;
+}
+
+static void
+gst_multi_file_sink_finalize (GObject * object)
+{
+ GstMultiFileSink *sink = GST_MULTI_FILE_SINK (object);
+
+ g_free (sink->filename);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_multi_file_sink_set_location (GstMultiFileSink * sink,
+ const gchar * location)
+{
+ g_free (sink->filename);
+ /* FIXME: validate location to have just one %d */
+ sink->filename = g_strdup (location);
+
+ return TRUE;
+}
+
+static void
+gst_multi_file_sink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstMultiFileSink *sink = GST_MULTI_FILE_SINK (object);
+
+ switch (prop_id) {
+ case PROP_LOCATION:
+ gst_multi_file_sink_set_location (sink, g_value_get_string (value));
+ break;
+ case PROP_INDEX:
+ sink->index = g_value_get_int (value);
+ break;
+ case PROP_POST_MESSAGES:
+ sink->post_messages = g_value_get_boolean (value);
+ break;
+ case PROP_NEXT_FILE:
+ sink->next_file = g_value_get_enum (value);
+ break;
+ case PROP_MAX_FILES:
+ sink->max_files = g_value_get_uint (value);
+ break;
+ case PROP_MAX_FILE_SIZE:
+ sink->max_file_size = g_value_get_uint64 (value);
+ break;
+ case PROP_MAX_FILE_DURATION:
+ sink->max_file_duration = g_value_get_uint64 (value);
+ break;
+ case PROP_AGGREGATE_GOPS:
+ sink->aggregate_gops = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_multi_file_sink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstMultiFileSink *sink = GST_MULTI_FILE_SINK (object);
+
+ switch (prop_id) {
+ case PROP_LOCATION:
+ g_value_set_string (value, sink->filename);
+ break;
+ case PROP_INDEX:
+ g_value_set_int (value, sink->index);
+ break;
+ case PROP_POST_MESSAGES:
+ g_value_set_boolean (value, sink->post_messages);
+ break;
+ case PROP_NEXT_FILE:
+ g_value_set_enum (value, sink->next_file);
+ break;
+ case PROP_MAX_FILES:
+ g_value_set_uint (value, sink->max_files);
+ break;
+ case PROP_MAX_FILE_SIZE:
+ g_value_set_uint64 (value, sink->max_file_size);
+ break;
+ case PROP_MAX_FILE_DURATION:
+ g_value_set_uint64 (value, sink->max_file_duration);
+ break;
+ case PROP_AGGREGATE_GOPS:
+ g_value_set_boolean (value, sink->aggregate_gops);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+gst_multi_file_sink_start (GstBaseSink * bsink)
+{
+ GstMultiFileSink *sink = GST_MULTI_FILE_SINK (bsink);
+
+ if (sink->aggregate_gops)
+ sink->gop_adapter = gst_adapter_new ();
+ sink->potential_next_gop = NULL;
+ sink->file_pts = GST_CLOCK_TIME_NONE;
+
+ g_queue_init (&sink->old_files);
+
+ return TRUE;
+}
+
+static gboolean
+gst_multi_file_sink_stop (GstBaseSink * sink)
+{
+ GstMultiFileSink *multifilesink;
+ int i;
+
+ multifilesink = GST_MULTI_FILE_SINK (sink);
+
+ if (multifilesink->file != NULL) {
+ fclose (multifilesink->file);
+ multifilesink->file = NULL;
+ }
+
+ if (multifilesink->streamheaders) {
+ for (i = 0; i < multifilesink->n_streamheaders; i++) {
+ gst_buffer_unref (multifilesink->streamheaders[i]);
+ }
+ g_free (multifilesink->streamheaders);
+ multifilesink->streamheaders = NULL;
+ }
+
+ if (multifilesink->gop_adapter != NULL) {
+ g_object_unref (multifilesink->gop_adapter);
+ multifilesink->gop_adapter = NULL;
+ }
+
+ if (multifilesink->potential_next_gop != NULL) {
+ g_list_free_full (multifilesink->potential_next_gop,
+ (GDestroyNotify) gst_buffer_unref);
+ multifilesink->potential_next_gop = NULL;
+ }
+
+ multifilesink->force_key_unit_count = -1;
+
+ g_queue_foreach (&multifilesink->old_files, (GFunc) g_free, NULL);
+ g_queue_clear (&multifilesink->old_files);
+
+ return TRUE;
+}
+
+
+static void
+gst_multi_file_sink_post_message_full (GstMultiFileSink * multifilesink,
+ GstClockTime timestamp, GstClockTime duration, GstClockTime offset,
+ GstClockTime offset_end, GstClockTime running_time,
+ GstClockTime stream_time, const char *filename)
+{
+ GstStructure *s;
+
+ if (!multifilesink->post_messages)
+ return;
+
+ s = gst_structure_new ("GstMultiFileSink",
+ "filename", G_TYPE_STRING, filename,
+ "index", G_TYPE_INT, multifilesink->index,
+ "timestamp", G_TYPE_UINT64, timestamp,
+ "stream-time", G_TYPE_UINT64, stream_time,
+ "running-time", G_TYPE_UINT64, running_time,
+ "duration", G_TYPE_UINT64, duration,
+ "offset", G_TYPE_UINT64, offset,
+ "offset-end", G_TYPE_UINT64, offset_end, NULL);
+
+ gst_element_post_message (GST_ELEMENT_CAST (multifilesink),
+ gst_message_new_element (GST_OBJECT_CAST (multifilesink), s));
+}
+
+static void
+gst_multi_file_sink_post_message_from_time (GstMultiFileSink * multifilesink,
+ GstClockTime timestamp, GstClockTime duration, const char *filename)
+{
+ GstClockTime running_time, stream_time;
+ guint64 offset, offset_end;
+ GstSegment *segment;
+ GstFormat format;
+
+ if (!multifilesink->post_messages)
+ return;
+
+ segment = &GST_BASE_SINK (multifilesink)->segment;
+ format = segment->format;
+
+ offset = -1;
+ offset_end = -1;
+
+ running_time = gst_segment_to_running_time (segment, format, timestamp);
+ stream_time = gst_segment_to_stream_time (segment, format, timestamp);
+
+ gst_multi_file_sink_post_message_full (multifilesink, timestamp, duration,
+ offset, offset_end, running_time, stream_time, filename);
+}
+
+static void
+gst_multi_file_sink_post_message (GstMultiFileSink * multifilesink,
+ GstBuffer * buffer, const char *filename)
+{
+ GstClockTime duration, timestamp;
+ GstClockTime running_time, stream_time;
+ guint64 offset, offset_end;
+ GstSegment *segment;
+ GstFormat format;
+
+ if (!multifilesink->post_messages)
+ return;
+
+ segment = &GST_BASE_SINK (multifilesink)->segment;
+ format = segment->format;
+
+ timestamp = GST_BUFFER_TIMESTAMP (buffer);
+ duration = GST_BUFFER_DURATION (buffer);
+ offset = GST_BUFFER_OFFSET (buffer);
+ offset_end = GST_BUFFER_OFFSET_END (buffer);
+
+ running_time = gst_segment_to_running_time (segment, format, timestamp);
+ stream_time = gst_segment_to_stream_time (segment, format, timestamp);
+
+ gst_multi_file_sink_post_message_full (multifilesink, timestamp, duration,
+ offset, offset_end, running_time, stream_time, filename);
+}
+
+static gboolean
+gst_multi_file_sink_write_stream_headers (GstMultiFileSink * sink)
+{
+ int i;
+
+ if (sink->streamheaders == NULL)
+ return TRUE;
+
+ /* we want to write these at the beginning */
+ g_assert (sink->cur_file_size == 0);
+
+ for (i = 0; i < sink->n_streamheaders; i++) {
+ GstBuffer *hdr;
+ GstMapInfo map;
+ int ret;
+
+ hdr = sink->streamheaders[i];
+ gst_buffer_map (hdr, &map, GST_MAP_READ);
+ ret = fwrite (map.data, map.size, 1, sink->file);
+ gst_buffer_unmap (hdr, &map);
+
+ if (ret != 1)
+ return FALSE;
+
+ sink->cur_file_size += map.size;
+ }
+
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_multi_file_sink_write_buffer (GstMultiFileSink * multifilesink,
+ GstBuffer * buffer)
+{
+ GstMapInfo map;
+ gchar *filename;
+ gboolean ret;
+ GError *error = NULL;
+ gboolean first_file = TRUE;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ switch (multifilesink->next_file) {
+ case GST_MULTI_FILE_SINK_NEXT_BUFFER:
+ gst_multi_file_sink_ensure_max_files (multifilesink);
+
+ filename = g_strdup_printf (multifilesink->filename,
+ multifilesink->index);
+ ret = g_file_set_contents (filename, (char *) map.data, map.size, &error);
+ if (!ret)
+ goto write_error;
+
+ gst_multi_file_sink_post_message (multifilesink, buffer, filename);
+
+ gst_multi_file_sink_add_old_file (multifilesink, filename);
+
+ multifilesink->index++;
+
+ break;
+ case GST_MULTI_FILE_SINK_NEXT_DISCONT:
+ if (GST_BUFFER_IS_DISCONT (buffer)) {
+ if (multifilesink->file)
+ gst_multi_file_sink_close_file (multifilesink, buffer);
+ }
+
+ if (multifilesink->file == NULL) {
+ if (!gst_multi_file_sink_open_next_file (multifilesink))
+ goto stdio_write_error;
+ }
+
+ ret = fwrite (map.data, map.size, 1, multifilesink->file);
+ if (ret != 1)
+ goto stdio_write_error;
+
+ break;
+ case GST_MULTI_FILE_SINK_NEXT_KEY_FRAME:
+ if (multifilesink->next_segment == GST_CLOCK_TIME_NONE) {
+ if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
+ multifilesink->next_segment = GST_BUFFER_TIMESTAMP (buffer) +
+ 10 * GST_SECOND;
+ }
+ }
+
+ if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer) &&
+ GST_BUFFER_TIMESTAMP (buffer) >= multifilesink->next_segment &&
+ !GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT)) {
+ if (multifilesink->file) {
+ first_file = FALSE;
+ gst_multi_file_sink_close_file (multifilesink, buffer);
+ }
+ multifilesink->next_segment += 10 * GST_SECOND;
+ }
+
+ if (multifilesink->file == NULL) {
+ if (!gst_multi_file_sink_open_next_file (multifilesink))
+ goto stdio_write_error;
+
+ if (!first_file)
+ gst_multi_file_sink_write_stream_headers (multifilesink);
+ }
+
+ ret = fwrite (map.data, map.size, 1, multifilesink->file);
+ if (ret != 1)
+ goto stdio_write_error;
+
+ break;
+ case GST_MULTI_FILE_SINK_NEXT_KEY_UNIT_EVENT:
+ if (multifilesink->file == NULL) {
+ if (!gst_multi_file_sink_open_next_file (multifilesink))
+ goto stdio_write_error;
+
+ /* we don't need to write stream headers here, they will be inserted in
+ * the stream by upstream elements if key unit events have
+ * all_headers=true set
+ */
+ }
+
+ ret = fwrite (map.data, map.size, 1, multifilesink->file);
+
+ if (ret != 1)
+ goto stdio_write_error;
+
+ break;
+ case GST_MULTI_FILE_SINK_NEXT_MAX_SIZE:{
+ guint64 new_size;
+
+ new_size = multifilesink->cur_file_size + map.size;
+ if (new_size > multifilesink->max_file_size) {
+
+ GST_INFO_OBJECT (multifilesink, "current size: %" G_GUINT64_FORMAT
+ ", new_size: %" G_GUINT64_FORMAT ", max. size %" G_GUINT64_FORMAT,
+ multifilesink->cur_file_size, new_size,
+ multifilesink->max_file_size);
+
+ if (multifilesink->file != NULL) {
+ first_file = FALSE;
+ gst_multi_file_sink_close_file (multifilesink, buffer);
+ }
+ }
+
+ if (multifilesink->file == NULL) {
+ if (!gst_multi_file_sink_open_next_file (multifilesink))
+ goto stdio_write_error;
+
+ if (!first_file)
+ gst_multi_file_sink_write_stream_headers (multifilesink);
+ }
+
+ ret = fwrite (map.data, map.size, 1, multifilesink->file);
+
+ if (ret != 1)
+ goto stdio_write_error;
+
+ multifilesink->cur_file_size += map.size;
+ break;
+ }
+ case GST_MULTI_FILE_SINK_NEXT_MAX_DURATION:{
+ GstClockTime new_duration = 0;
+
+ if (GST_BUFFER_PTS_IS_VALID (buffer)
+ && GST_CLOCK_TIME_IS_VALID (multifilesink->file_pts)) {
+ /* The new duration will extend to this new buffer pts ... */
+ new_duration = GST_BUFFER_PTS (buffer) - multifilesink->file_pts;
+ /* ... and duration (if it has one) */
+ if (GST_BUFFER_DURATION_IS_VALID (buffer))
+ new_duration += GST_BUFFER_DURATION (buffer);
+ }
+
+ if (new_duration > multifilesink->max_file_duration) {
+
+ GST_INFO_OBJECT (multifilesink,
+ "new_duration: %" G_GUINT64_FORMAT ", max. duration %"
+ G_GUINT64_FORMAT, new_duration, multifilesink->max_file_duration);
+
+ if (multifilesink->file != NULL) {
+ first_file = FALSE;
+ gst_multi_file_sink_close_file (multifilesink, buffer);
+ }
+ }
+
+ if (multifilesink->file == NULL) {
+ if (!gst_multi_file_sink_open_next_file (multifilesink))
+ goto stdio_write_error;
+
+ multifilesink->file_pts = GST_BUFFER_PTS (buffer);
+ if (!first_file)
+ gst_multi_file_sink_write_stream_headers (multifilesink);
+ }
+
+ ret = fwrite (map.data, map.size, 1, multifilesink->file);
+
+ if (ret != 1)
+ goto stdio_write_error;
+
+ break;
+ }
+ default:
+ g_assert_not_reached ();
+ }
+
+ gst_buffer_unmap (buffer, &map);
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+write_error:
+ {
+ switch (error->code) {
+ case G_FILE_ERROR_NOSPC:{
+ GST_ELEMENT_ERROR (multifilesink, RESOURCE, NO_SPACE_LEFT, (NULL),
+ (NULL));
+ break;
+ }
+ default:{
+ GST_ELEMENT_ERROR (multifilesink, RESOURCE, WRITE,
+ ("Error while writing to file \"%s\".", filename),
+ ("%s", g_strerror (errno)));
+ }
+ }
+ g_error_free (error);
+ g_free (filename);
+
+ gst_buffer_unmap (buffer, &map);
+ return GST_FLOW_ERROR;
+ }
+stdio_write_error:
+ switch (errno) {
+ case ENOSPC:
+ GST_ELEMENT_ERROR (multifilesink, RESOURCE, NO_SPACE_LEFT,
+ ("Error while writing to file."), ("%s", g_strerror (errno)));
+ break;
+ default:
+ GST_ELEMENT_ERROR (multifilesink, RESOURCE, WRITE,
+ ("Error while writing to file."), ("%s", g_strerror (errno)));
+ }
+ gst_buffer_unmap (buffer, &map);
+ return GST_FLOW_ERROR;
+}
+
+static GstFlowReturn
+gst_multi_file_sink_render (GstBaseSink * bsink, GstBuffer * buffer)
+{
+ GstMultiFileSink *sink = GST_MULTI_FILE_SINK (bsink);
+ GstFlowReturn flow = GST_FLOW_OK;
+ gboolean key_unit, header;
+
+ header = GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER);
+ key_unit = !GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ if (sink->aggregate_gops) {
+ GstBuffer *gop_buffer = NULL;
+ guint avail;
+
+ avail = gst_adapter_available (sink->gop_adapter);
+
+ GST_LOG_OBJECT (sink, "aggregate GOP: received %s%s unit buffer: "
+ "%" GST_PTR_FORMAT,
+ (key_unit) ? "key" : "delta", (header) ? " header" : "", buffer);
+
+ /* If it's a header buffer, it might potentially be for the next GOP */
+ if (header) {
+ GST_LOG_OBJECT (sink, "Accumulating buffer to potential next GOP");
+ sink->potential_next_gop =
+ g_list_append (sink->potential_next_gop, gst_buffer_ref (buffer));
+ } else {
+ if (key_unit && avail > 0) {
+ GstClockTime pts, dts;
+ GST_LOG_OBJECT (sink, "Grabbing pending completed GOP");
+ pts = gst_adapter_prev_pts_at_offset (sink->gop_adapter, 0, NULL);
+ dts = gst_adapter_prev_dts_at_offset (sink->gop_adapter, 0, NULL);
+ gop_buffer = gst_adapter_take_buffer (sink->gop_adapter, avail);
+ GST_BUFFER_PTS (gop_buffer) = pts;
+ GST_BUFFER_DTS (gop_buffer) = dts;
+ }
+
+ /* just accumulate the buffer */
+ if (sink->potential_next_gop) {
+ GList *tmp;
+ GST_LOG_OBJECT (sink,
+ "Carrying over pending next GOP data into adapter");
+ /* If we have pending data, put that first in the adapter */
+ for (tmp = sink->potential_next_gop; tmp; tmp = tmp->next) {
+ GstBuffer *tmpb = (GstBuffer *) tmp->data;
+ gst_adapter_push (sink->gop_adapter, tmpb);
+ }
+ g_list_free (sink->potential_next_gop);
+ sink->potential_next_gop = NULL;
+ }
+ GST_LOG_OBJECT (sink, "storing buffer in adapter");
+ gst_adapter_push (sink->gop_adapter, gst_buffer_ref (buffer));
+
+ if (gop_buffer != NULL) {
+ GST_DEBUG_OBJECT (sink, "writing out pending GOP, %u bytes", avail);
+ GST_DEBUG_OBJECT (sink,
+ "gop buffer pts:%" GST_TIME_FORMAT " dts:%" GST_TIME_FORMAT
+ " duration:%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (GST_BUFFER_PTS (gop_buffer)),
+ GST_TIME_ARGS (GST_BUFFER_DTS (gop_buffer)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (gop_buffer)));
+ flow = gst_multi_file_sink_write_buffer (sink, gop_buffer);
+ gst_buffer_unref (gop_buffer);
+ }
+ }
+ } else {
+ flow = gst_multi_file_sink_write_buffer (sink, buffer);
+ }
+ return flow;
+}
+
+static gboolean
+buffer_list_copy_data (GstBuffer ** buf, guint idx, gpointer data)
+{
+ GstBuffer *dest = data;
+ guint num, i;
+
+ if (idx == 0)
+ gst_buffer_copy_into (dest, *buf, GST_BUFFER_COPY_METADATA, 0, -1);
+
+ num = gst_buffer_n_memory (*buf);
+ for (i = 0; i < num; ++i) {
+ GstMemory *mem;
+
+ mem = gst_buffer_get_memory (*buf, i);
+ gst_buffer_append_memory (dest, mem);
+ }
+
+ return TRUE;
+}
+
+/* Our assumption for now is that the buffers in a buffer list should always
+ * end up in the same file. If someone wants different behaviour, they'll just
+ * have to add a property for that. */
+static GstFlowReturn
+gst_multi_file_sink_render_list (GstBaseSink * sink, GstBufferList * list)
+{
+ GstBuffer *buf;
+ guint size;
+
+ size = gst_buffer_list_calculate_size (list);
+ GST_LOG_OBJECT (sink, "total size of buffer list %p: %u", list, size);
+
+ /* copy all buffers in the list into one single buffer, so we can use
+ * the normal render function (FIXME: optimise to avoid the memcpy) */
+ buf = gst_buffer_new ();
+ gst_buffer_list_foreach (list, buffer_list_copy_data, buf);
+ g_assert (gst_buffer_get_size (buf) == size);
+
+ gst_multi_file_sink_render (sink, buf);
+ gst_buffer_unref (buf);
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_multi_file_sink_set_caps (GstBaseSink * sink, GstCaps * caps)
+{
+ GstMultiFileSink *multifilesink;
+ GstStructure *structure;
+
+ multifilesink = GST_MULTI_FILE_SINK (sink);
+
+ structure = gst_caps_get_structure (caps, 0);
+ if (structure) {
+ const GValue *value;
+
+ value = gst_structure_get_value (structure, "streamheader");
+
+ if (GST_VALUE_HOLDS_ARRAY (value)) {
+ int i;
+
+ if (multifilesink->streamheaders) {
+ for (i = 0; i < multifilesink->n_streamheaders; i++) {
+ gst_buffer_unref (multifilesink->streamheaders[i]);
+ }
+ g_free (multifilesink->streamheaders);
+ }
+
+ multifilesink->n_streamheaders = gst_value_array_get_size (value);
+ multifilesink->streamheaders =
+ g_malloc (sizeof (GstBuffer *) * multifilesink->n_streamheaders);
+
+ for (i = 0; i < multifilesink->n_streamheaders; i++) {
+ multifilesink->streamheaders[i] =
+ gst_buffer_ref (gst_value_get_buffer (gst_value_array_get_value
+ (value, i)));
+ }
+ }
+ }
+
+ return TRUE;
+}
+
+/* Takes ownership of the filename string */
+static void
+gst_multi_file_sink_add_old_file (GstMultiFileSink * multifilesink, gchar * fn)
+{
+ /* Only add file to the list if a max_files limit is set, otherwise we never
+ * prune the list and memory just builds up until the pipeline is stopped. */
+ if (multifilesink->max_files > 0) {
+ g_queue_push_tail (&multifilesink->old_files, fn);
+ } else {
+ g_free (fn);
+ }
+}
+
+static void
+gst_multi_file_sink_ensure_max_files (GstMultiFileSink * multifilesink)
+{
+ guint max_files = multifilesink->max_files;
+
+ if (max_files == 0)
+ return;
+
+ while (g_queue_get_length (&multifilesink->old_files) >= max_files) {
+ gchar *filename;
+
+ filename = g_queue_pop_head (&multifilesink->old_files);
+ g_remove (filename);
+ g_free (filename);
+ }
+}
+
+static gboolean
+gst_multi_file_sink_event (GstBaseSink * sink, GstEvent * event)
+{
+ GstMultiFileSink *multifilesink;
+ gchar *filename;
+
+ multifilesink = GST_MULTI_FILE_SINK (sink);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_DOWNSTREAM:
+ {
+ GstClockTime timestamp, duration;
+ GstClockTime running_time, stream_time;
+ guint64 offset, offset_end;
+ gboolean all_headers;
+ guint count;
+
+ if (multifilesink->next_file != GST_MULTI_FILE_SINK_NEXT_KEY_UNIT_EVENT ||
+ !gst_video_event_is_force_key_unit (event))
+ goto out;
+
+ gst_video_event_parse_downstream_force_key_unit (event, &timestamp,
+ &stream_time, &running_time, &all_headers, &count);
+
+ if (multifilesink->force_key_unit_count != -1 &&
+ multifilesink->force_key_unit_count == count)
+ goto out;
+
+ multifilesink->force_key_unit_count = count;
+
+ if (multifilesink->file) {
+ duration = GST_CLOCK_TIME_NONE;
+ offset = offset_end = -1;
+ filename = g_strdup_printf (multifilesink->filename,
+ multifilesink->index);
+
+ gst_multi_file_sink_close_file (multifilesink, NULL);
+
+ gst_multi_file_sink_post_message_full (multifilesink, timestamp,
+ duration, offset, offset_end, running_time, stream_time, filename);
+ g_free (filename);
+ }
+
+ if (multifilesink->file == NULL) {
+ if (!gst_multi_file_sink_open_next_file (multifilesink))
+ goto stdio_write_error;
+ }
+
+ break;
+ }
+ case GST_EVENT_EOS:
+ if (multifilesink->aggregate_gops) {
+ GstBuffer *buf = gst_buffer_new ();
+
+ /* push key unit buffer to force writing out the pending GOP data */
+ GST_INFO_OBJECT (sink, "EOS, write pending GOP data");
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+ gst_multi_file_sink_render (sink, buf);
+ gst_buffer_unref (buf);
+ }
+ if (multifilesink->file) {
+ gchar *filename;
+
+ filename = g_strdup_printf (multifilesink->filename,
+ multifilesink->index);
+
+ gst_multi_file_sink_close_file (multifilesink, NULL);
+
+ gst_multi_file_sink_post_message_from_time (multifilesink,
+ GST_BASE_SINK (multifilesink)->segment.position, -1, filename);
+ g_free (filename);
+ }
+ break;
+ default:
+ break;
+ }
+
+out:
+ return GST_BASE_SINK_CLASS (parent_class)->event (sink, event);
+
+ /* ERRORS */
+stdio_write_error:
+ {
+ GST_ELEMENT_ERROR (multifilesink, RESOURCE, WRITE,
+ ("Error while writing to file."), (NULL));
+ gst_event_unref (event);
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_multi_file_sink_open_next_file (GstMultiFileSink * multifilesink)
+{
+ char *filename;
+
+ g_return_val_if_fail (multifilesink->file == NULL, FALSE);
+
+ gst_multi_file_sink_ensure_max_files (multifilesink);
+
+ filename = g_strdup_printf (multifilesink->filename, multifilesink->index);
+ multifilesink->file = g_fopen (filename, "wb");
+ if (multifilesink->file == NULL) {
+ g_free (filename);
+ return FALSE;
+ }
+
+ GST_INFO_OBJECT (multifilesink, "opening file %s", filename);
+
+ gst_multi_file_sink_add_old_file (multifilesink, filename);
+
+ multifilesink->cur_file_size = 0;
+ return TRUE;
+}
+
+static void
+gst_multi_file_sink_close_file (GstMultiFileSink * multifilesink,
+ GstBuffer * buffer)
+{
+ char *filename;
+
+ fclose (multifilesink->file);
+ multifilesink->file = NULL;
+
+ if (buffer) {
+ filename = g_strdup_printf (multifilesink->filename, multifilesink->index);
+ gst_multi_file_sink_post_message (multifilesink, buffer, filename);
+ g_free (filename);
+ }
+
+ multifilesink->index++;
+}
diff --git a/gst/multifile/gstmultifilesink.h b/gst/multifile/gstmultifilesink.h
new file mode 100644
index 0000000000..1478341032
--- /dev/null
+++ b/gst/multifile/gstmultifilesink.h
@@ -0,0 +1,120 @@
+/* GStreamer
+ * Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2000 Wim Taymans <wtay@chello.be>
+ * 2006 Wim Taymans <wim@fluendo.com>
+ * 2006 David A. Schleef <ds@schleef.org>
+ *
+ * gstmultifilesink.h:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_MULTIFILESINK_H__
+#define __GST_MULTIFILESINK_H__
+
+#include <gst/gst.h>
+#include <gst/base/base.h>
+#include <errno.h>
+#include <stdio.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#ifdef HAVE_UNISTD_H
+#include <unistd.h>
+#endif
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MULTI_FILE_SINK \
+ (gst_multi_file_sink_get_type())
+#define GST_MULTI_FILE_SINK(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MULTI_FILE_SINK,GstMultiFileSink))
+#define GST_MULTI_FILE_SINK_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MULTI_FILE_SINK,GstMultiFileSinkClass))
+#define GST_IS_MULTI_FILE_SINK(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MULTI_FILE_SINK))
+#define GST_IS_MULTI_FILE_SINK_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MULTI_FILE_SINK))
+
+typedef struct _GstMultiFileSink GstMultiFileSink;
+typedef struct _GstMultiFileSinkClass GstMultiFileSinkClass;
+
+/**
+ * GstMultiFileSinkNext:
+ * @GST_MULTI_FILE_SINK_NEXT_BUFFER: New file for each buffer
+ * @GST_MULTI_FILE_SINK_NEXT_DISCONT: New file after each discontinuity
+ * @GST_MULTI_FILE_SINK_NEXT_KEY_FRAME: New file at each key frame
+ * (Useful for MPEG-TS segmenting)
+ * @GST_MULTI_FILE_SINK_NEXT_KEY_UNIT_EVENT: New file after a force key unit
+ * event
+ * @GST_MULTI_FILE_SINK_NEXT_MAX_SIZE: New file when the configured maximum file
+ * size would be exceeded with the next buffer or buffer list
+ * @GST_MULTI_FILE_SINK_NEXT_MAX_DURATION: New file when the configured maximum duration
+ * would be exceeded with the next buffer or buffer list
+ *
+ * File splitting modes.
+ */
+typedef enum {
+ GST_MULTI_FILE_SINK_NEXT_BUFFER,
+ GST_MULTI_FILE_SINK_NEXT_DISCONT,
+ GST_MULTI_FILE_SINK_NEXT_KEY_FRAME,
+ GST_MULTI_FILE_SINK_NEXT_KEY_UNIT_EVENT,
+ GST_MULTI_FILE_SINK_NEXT_MAX_SIZE,
+ GST_MULTI_FILE_SINK_NEXT_MAX_DURATION
+} GstMultiFileSinkNext;
+
+struct _GstMultiFileSink
+{
+ GstBaseSink parent;
+
+ gchar *filename;
+ gint index;
+ gboolean post_messages;
+ GstMultiFileSinkNext next_file;
+ FILE *file;
+
+ guint max_files;
+ GQueue old_files; /* keep track of old files for max_files handling */
+
+ gint64 next_segment;
+
+ int n_streamheaders;
+ GstBuffer **streamheaders;
+ guint force_key_unit_count;
+
+ guint64 cur_file_size;
+ guint64 max_file_size;
+
+ GstClockTime file_pts;
+ GstClockTime max_file_duration;
+
+ gboolean aggregate_gops;
+ GstAdapter *gop_adapter; /* to aggregate GOPs */
+ GList *potential_next_gop; /* To detect false-positives */
+};
+
+struct _GstMultiFileSinkClass
+{
+ GstBaseSinkClass parent_class;
+};
+
+GType gst_multi_file_sink_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (multifilesink);
+
+G_END_DECLS
+
+#endif /* __GST_MULTIFILESINK_H__ */
diff --git a/gst/multifile/gstmultifilesrc.c b/gst/multifile/gstmultifilesrc.c
new file mode 100644
index 0000000000..b2fabc878e
--- /dev/null
+++ b/gst/multifile/gstmultifilesrc.c
@@ -0,0 +1,573 @@
+/* GStreamer
+ * Copyright (C) 2006 David A. Schleef <ds@schleef.org>
+ *
+ * gstmultifilesrc.c:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-multifilesrc
+ * @title: multifilesrc
+ * @see_also: #GstFileSrc
+ *
+ * Reads buffers from sequentially named files. If used together with an image
+ * decoder, one needs to use the #GstMultiFileSrc:caps property or a capsfilter
+ * to force to caps containing a framerate. Otherwise image decoders send EOS
+ * after the first picture. We also need a videorate element to set timestamps
+ * on all buffers after the first one in accordance with the framerate.
+ *
+ * File names are created by replacing "\%d" with the index using `printf()`.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 multifilesrc location="img.%04d.png" index=0 caps="image/png,framerate=\(fraction\)12/1" ! \
+ * pngdec ! videoconvert ! videorate ! theoraenc ! oggmux ! \
+ * filesink location="images.ogg"
+ * ]| This pipeline creates a video file "images.ogg" by joining multiple PNG
+ * files named img.0000.png, img.0001.png, etc.
+ *
+*/
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include "gstmultifilesrc.h"
+
+
+static GstFlowReturn gst_multi_file_src_create (GstPushSrc * src,
+ GstBuffer ** buffer);
+
+static void gst_multi_file_src_dispose (GObject * object);
+
+static void gst_multi_file_src_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_multi_file_src_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static GstCaps *gst_multi_file_src_getcaps (GstBaseSrc * src, GstCaps * filter);
+static gboolean gst_multi_file_src_query (GstBaseSrc * src, GstQuery * query);
+static void gst_multi_file_src_uri_handler_init (gpointer g_iface,
+ gpointer iface_data);
+
+
+static GstStaticPadTemplate gst_multi_file_src_pad_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+GST_DEBUG_CATEGORY_STATIC (gst_multi_file_src_debug);
+#define GST_CAT_DEFAULT gst_multi_file_src_debug
+
+enum
+{
+ PROP_0,
+ PROP_LOCATION,
+ PROP_INDEX,
+ PROP_START_INDEX,
+ PROP_STOP_INDEX,
+ PROP_CAPS,
+ PROP_LOOP
+};
+
+#define DEFAULT_LOCATION "%05d"
+#define DEFAULT_INDEX 0
+
+#define gst_multi_file_src_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstMultiFileSrc, gst_multi_file_src, GST_TYPE_PUSH_SRC,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER,
+ gst_multi_file_src_uri_handler_init));
+GST_ELEMENT_REGISTER_DEFINE (multifilesrc, "multifilesrc", GST_RANK_NONE,
+ gst_multi_file_src_get_type ());
+
+static gboolean
+is_seekable (GstBaseSrc * src)
+{
+ GstMultiFileSrc *mfs = GST_MULTI_FILE_SRC (src);
+
+ if (mfs->fps_n != -1)
+ return TRUE;
+
+ return FALSE;
+}
+
+static gboolean
+do_seek (GstBaseSrc * bsrc, GstSegment * segment)
+{
+ gboolean reverse;
+ GstClockTime position;
+ GstMultiFileSrc *src;
+
+ src = GST_MULTI_FILE_SRC (bsrc);
+
+ segment->time = segment->start;
+ position = segment->position;
+ reverse = segment->rate < 0;
+
+ if (reverse) {
+ GST_FIXME_OBJECT (src, "Handle reverse playback");
+
+ return FALSE;
+ }
+
+ /* now move to the position indicated */
+ if (src->fps_n) {
+ src->index = gst_util_uint64_scale (position,
+ src->fps_n, src->fps_d * GST_SECOND);
+ } else {
+ src->index = 0;
+ GST_WARNING_OBJECT (src, "No FPS set, can not seek");
+
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+static void
+gst_multi_file_src_class_init (GstMultiFileSrcClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GstPushSrcClass *gstpushsrc_class = GST_PUSH_SRC_CLASS (klass);
+ GstBaseSrcClass *gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
+
+ gobject_class->set_property = gst_multi_file_src_set_property;
+ gobject_class->get_property = gst_multi_file_src_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_LOCATION,
+ g_param_spec_string ("location", "File Location",
+ "Pattern to create file names of input files. File names are "
+ "created by calling sprintf() with the pattern and the current "
+ "index.", DEFAULT_LOCATION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_INDEX,
+ g_param_spec_int ("index", "File Index",
+ "Index to use with location property to create file names. The "
+ "index is incremented by one for each buffer read.",
+ 0, INT_MAX, DEFAULT_INDEX,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_START_INDEX,
+ g_param_spec_int ("start-index", "Start Index",
+ "Start value of index. The initial value of index can be set "
+ "either by setting index or start-index. When the end of the loop "
+ "is reached, the index will be set to the value start-index.",
+ 0, INT_MAX, DEFAULT_INDEX,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_STOP_INDEX,
+ g_param_spec_int ("stop-index", "Stop Index",
+ "Stop value of index. The special value -1 means no stop.",
+ -1, INT_MAX, DEFAULT_INDEX,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_CAPS,
+ g_param_spec_boxed ("caps", "Caps",
+ "Caps describing the format of the data.",
+ GST_TYPE_CAPS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_LOOP,
+ g_param_spec_boolean ("loop", "Loop",
+ "Whether to repeat from the beginning when all files have been read.",
+ FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gobject_class->dispose = gst_multi_file_src_dispose;
+
+ gstbasesrc_class->get_caps = gst_multi_file_src_getcaps;
+ gstbasesrc_class->query = gst_multi_file_src_query;
+ gstbasesrc_class->is_seekable = is_seekable;
+ gstbasesrc_class->do_seek = do_seek;
+
+ gstpushsrc_class->create = gst_multi_file_src_create;
+
+ GST_DEBUG_CATEGORY_INIT (gst_multi_file_src_debug, "multifilesrc", 0,
+ "multifilesrc element");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_multi_file_src_pad_template);
+ gst_element_class_set_static_metadata (gstelement_class, "Multi-File Source",
+ "Source/File", "Read a sequentially named set of files into buffers",
+ "David Schleef <ds@schleef.org>");
+}
+
+static void
+gst_multi_file_src_init (GstMultiFileSrc * multifilesrc)
+{
+ multifilesrc->start_index = DEFAULT_INDEX;
+ multifilesrc->index = DEFAULT_INDEX;
+ multifilesrc->stop_index = -1;
+ multifilesrc->filename = g_strdup (DEFAULT_LOCATION);
+ multifilesrc->successful_read = FALSE;
+ multifilesrc->fps_n = multifilesrc->fps_d = -1;
+
+}
+
+static void
+gst_multi_file_src_dispose (GObject * object)
+{
+ GstMultiFileSrc *src = GST_MULTI_FILE_SRC (object);
+
+ g_free (src->filename);
+ src->filename = NULL;
+ if (src->caps)
+ gst_caps_unref (src->caps);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static GstCaps *
+gst_multi_file_src_getcaps (GstBaseSrc * src, GstCaps * filter)
+{
+ GstMultiFileSrc *multi_file_src = GST_MULTI_FILE_SRC (src);
+
+ GST_DEBUG_OBJECT (src, "returning %" GST_PTR_FORMAT, multi_file_src->caps);
+
+ if (multi_file_src->caps) {
+ if (filter)
+ return gst_caps_intersect_full (filter, multi_file_src->caps,
+ GST_CAPS_INTERSECT_FIRST);
+ else
+ return gst_caps_ref (multi_file_src->caps);
+ } else {
+ if (filter)
+ return gst_caps_ref (filter);
+ else
+ return gst_caps_new_any ();
+ }
+}
+
+static gboolean
+gst_multi_file_src_query (GstBaseSrc * src, GstQuery * query)
+{
+ gboolean res;
+ GstMultiFileSrc *mfsrc;
+
+ mfsrc = GST_MULTI_FILE_SRC (src);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:
+ {
+ GstFormat format;
+
+ gst_query_parse_position (query, &format, NULL);
+ switch (format) {
+ case GST_FORMAT_BUFFERS:
+ case GST_FORMAT_DEFAULT:
+ gst_query_set_position (query, format,
+ mfsrc->index - mfsrc->start_index);
+ res = TRUE;
+ break;
+ default:
+ res = GST_BASE_SRC_CLASS (parent_class)->query (src, query);
+ break;
+ }
+ break;
+ }
+ default:
+ res = GST_BASE_SRC_CLASS (parent_class)->query (src, query);
+ break;
+ }
+ return res;
+}
+
+static gboolean
+gst_multi_file_src_set_location (GstMultiFileSrc * src, const gchar * location)
+{
+ g_free (src->filename);
+ if (location != NULL) {
+ src->filename = g_strdup (location);
+ } else {
+ src->filename = NULL;
+ }
+
+ return TRUE;
+}
+
+static void
+gst_multi_file_src_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstMultiFileSrc *src = GST_MULTI_FILE_SRC (object);
+
+ switch (prop_id) {
+ case PROP_LOCATION:
+ gst_multi_file_src_set_location (src, g_value_get_string (value));
+ break;
+ case PROP_INDEX:
+ GST_OBJECT_LOCK (src);
+ /* index was really meant to be read-only, but for backwards-compatibility
+ * we set start_index to make it work as it used to */
+ if (!GST_OBJECT_FLAG_IS_SET (src, GST_BASE_SRC_FLAG_STARTED))
+ src->start_index = g_value_get_int (value);
+ else
+ src->index = g_value_get_int (value);
+ GST_OBJECT_UNLOCK (src);
+ break;
+ case PROP_START_INDEX:
+ src->start_index = g_value_get_int (value);
+ break;
+ case PROP_STOP_INDEX:
+ src->stop_index = g_value_get_int (value);
+ break;
+ case PROP_CAPS:
+ {
+ GstStructure *st = NULL;
+ const GstCaps *caps = gst_value_get_caps (value);
+ GstCaps *new_caps;
+
+ if (caps == NULL) {
+ new_caps = gst_caps_new_any ();
+ } else {
+ new_caps = gst_caps_copy (caps);
+ }
+ gst_caps_replace (&src->caps, new_caps);
+ gst_pad_set_caps (GST_BASE_SRC_PAD (src), new_caps);
+
+ if (new_caps && gst_caps_get_size (new_caps) == 1 &&
+ (st = gst_caps_get_structure (new_caps, 0))
+ && gst_structure_get_fraction (st, "framerate", &src->fps_n,
+ &src->fps_d)) {
+ GST_INFO_OBJECT (src, "Setting framerate to %d/%d", src->fps_n,
+ src->fps_d);
+ } else {
+ src->fps_n = -1;
+ src->fps_d = -1;
+ }
+ }
+ break;
+ case PROP_LOOP:
+ src->loop = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_multi_file_src_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstMultiFileSrc *src = GST_MULTI_FILE_SRC (object);
+
+ switch (prop_id) {
+ case PROP_LOCATION:
+ g_value_set_string (value, src->filename);
+ break;
+ case PROP_INDEX:
+ g_value_set_int (value, src->index);
+ break;
+ case PROP_START_INDEX:
+ g_value_set_int (value, src->start_index);
+ break;
+ case PROP_STOP_INDEX:
+ g_value_set_int (value, src->stop_index);
+ break;
+ case PROP_CAPS:
+ gst_value_set_caps (value, src->caps);
+ break;
+ case PROP_LOOP:
+ g_value_set_boolean (value, src->loop);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gchar *
+gst_multi_file_src_get_filename (GstMultiFileSrc * multifilesrc)
+{
+ gchar *filename;
+
+ GST_DEBUG ("%d", multifilesrc->index);
+ filename = g_strdup_printf (multifilesrc->filename, multifilesrc->index);
+
+ return filename;
+}
+
+static GstFlowReturn
+gst_multi_file_src_create (GstPushSrc * src, GstBuffer ** buffer)
+{
+ GstMultiFileSrc *multifilesrc;
+ gsize size;
+ gchar *data;
+ gchar *filename;
+ GstBuffer *buf;
+ gboolean ret;
+ GError *error = NULL;
+
+ multifilesrc = GST_MULTI_FILE_SRC (src);
+
+ if (multifilesrc->index < multifilesrc->start_index) {
+ multifilesrc->index = multifilesrc->start_index;
+ }
+
+ if (multifilesrc->stop_index != -1 &&
+ multifilesrc->index > multifilesrc->stop_index) {
+ if (multifilesrc->loop)
+ multifilesrc->index = multifilesrc->start_index;
+ else
+ return GST_FLOW_EOS;
+ }
+
+ filename = gst_multi_file_src_get_filename (multifilesrc);
+
+ GST_DEBUG_OBJECT (multifilesrc, "reading from file \"%s\".", filename);
+
+ ret = g_file_get_contents (filename, &data, &size, &error);
+ if (!ret) {
+ if (multifilesrc->successful_read) {
+ /* If we've read at least one buffer successfully, not finding the
+ * next file is EOS. */
+ g_free (filename);
+ if (error != NULL)
+ g_error_free (error);
+
+ if (multifilesrc->loop) {
+ error = NULL;
+ multifilesrc->index = multifilesrc->start_index;
+
+ filename = gst_multi_file_src_get_filename (multifilesrc);
+ ret = g_file_get_contents (filename, &data, &size, &error);
+ if (!ret) {
+ g_free (filename);
+ if (error != NULL)
+ g_error_free (error);
+
+ return GST_FLOW_EOS;
+ }
+ } else {
+ return GST_FLOW_EOS;
+ }
+ } else {
+ goto handle_error;
+ }
+ }
+
+ multifilesrc->successful_read = TRUE;
+ multifilesrc->index++;
+
+ buf = gst_buffer_new ();
+ gst_buffer_append_memory (buf,
+ gst_memory_new_wrapped (0, data, size, 0, size, data, g_free));
+ GST_BUFFER_OFFSET (buf) = multifilesrc->offset;
+ GST_BUFFER_OFFSET_END (buf) = multifilesrc->offset + size;
+ multifilesrc->offset += size;
+
+ GST_DEBUG_OBJECT (multifilesrc, "read file \"%s\".", filename);
+
+ g_free (filename);
+ *buffer = buf;
+ return GST_FLOW_OK;
+
+handle_error:
+ {
+ if (error != NULL) {
+ GST_ELEMENT_ERROR (multifilesrc, RESOURCE, READ,
+ ("Error while reading from file \"%s\".", filename),
+ ("%s", error->message));
+ g_error_free (error);
+ } else {
+ GST_ELEMENT_ERROR (multifilesrc, RESOURCE, READ,
+ ("Error while reading from file \"%s\".", filename),
+ ("%s", g_strerror (errno)));
+ }
+ g_free (filename);
+ return GST_FLOW_ERROR;
+ }
+}
+
+static GstURIType
+gst_multi_file_src_uri_get_type (GType type)
+{
+ return GST_URI_SRC;
+}
+
+static const gchar *const *
+gst_multi_file_src_uri_get_protocols (GType type)
+{
+ static const gchar *protocols[] = { "multifile", NULL };
+
+ return (const gchar * const *) protocols;
+}
+
+static gchar *
+gst_multi_file_src_uri_get_uri (GstURIHandler * handler)
+{
+ GstMultiFileSrc *src = GST_MULTI_FILE_SRC (handler);
+ gchar *ret;
+
+ GST_OBJECT_LOCK (src);
+ if (src->filename != NULL) {
+ GstUri *uri = gst_uri_new ("multifle", NULL, NULL, GST_URI_NO_PORT,
+ src->filename, NULL, NULL);
+
+ ret = gst_uri_to_string (uri);
+ gst_uri_unref (uri);
+ } else {
+ ret = NULL;
+ }
+ GST_OBJECT_UNLOCK (src);
+
+ return ret;
+}
+
+static gboolean
+gst_multi_file_src_uri_set_uri (GstURIHandler * handler, const gchar * uri,
+ GError ** error)
+{
+ GstMultiFileSrc *src = GST_MULTI_FILE_SRC (handler);
+ GstUri *gsturi;
+ gchar *path;
+
+ gsturi = gst_uri_from_string (uri);
+ if (gsturi == NULL)
+ goto invalid_uri;
+
+ /* This should get us the unescaped path */
+ path = gst_uri_get_path (gsturi);
+ if (path == NULL)
+ goto invalid_uri;
+
+ GST_OBJECT_LOCK (src);
+ gst_multi_file_src_set_location (src, path);
+ GST_OBJECT_UNLOCK (src);
+
+ g_free (path);
+ gst_uri_unref (gsturi);
+
+ return TRUE;
+
+/* ERRORS */
+invalid_uri:
+ {
+ GST_WARNING_OBJECT (src, "Invalid multifile URI '%s'", uri);
+ g_set_error (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
+ "Invalid multifile URI");
+ if (gsturi)
+ gst_uri_unref (gsturi);
+ return FALSE;
+ }
+}
+
+static void
+gst_multi_file_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
+{
+ GstURIHandlerInterface *iface = (GstURIHandlerInterface *) g_iface;
+
+ iface->get_type = gst_multi_file_src_uri_get_type;
+ iface->get_protocols = gst_multi_file_src_uri_get_protocols;
+ iface->get_uri = gst_multi_file_src_uri_get_uri;
+ iface->set_uri = gst_multi_file_src_uri_set_uri;
+}
diff --git a/gst/multifile/gstmultifilesrc.h b/gst/multifile/gstmultifilesrc.h
new file mode 100644
index 0000000000..81ab9e83eb
--- /dev/null
+++ b/gst/multifile/gstmultifilesrc.h
@@ -0,0 +1,74 @@
+/* GStreamer
+ * Copyright (C) 2006 David A. Schleef <ds@schleef.org>
+ *
+ * gstmultifilesrc.c:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_MULTIFILESRC_H__
+#define __GST_MULTIFILESRC_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstpushsrc.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MULTI_FILE_SRC \
+ (gst_multi_file_src_get_type())
+#define GST_MULTI_FILE_SRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MULTI_FILE_SRC,GstMultiFileSrc))
+#define GST_MULTI_FILE_SRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MULTI_FILE_SRC,GstMultiFileSrcClass))
+#define GST_IS_MULTI_FILE_SRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MULTI_FILE_SRC))
+#define GST_IS_MULTI_FILE_SRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MULTI_FILE_SRC))
+
+typedef struct _GstMultiFileSrc GstMultiFileSrc;
+typedef struct _GstMultiFileSrcClass GstMultiFileSrcClass;
+
+struct _GstMultiFileSrc
+{
+ GstPushSrc parent;
+
+ gchar *filename;
+ int start_index;
+ int stop_index;
+ int index;
+
+ int offset;
+
+ gboolean loop;
+
+ GstCaps *caps;
+ gboolean successful_read;
+
+ gint fps_n, fps_d;
+};
+
+struct _GstMultiFileSrcClass
+{
+ GstPushSrcClass parent_class;
+};
+
+GType gst_multi_file_src_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (multifilesrc);
+
+G_END_DECLS
+
+#endif /* __GST_MULTIFILESRC_H__ */
diff --git a/gst/multifile/gstsplitfilesrc.c b/gst/multifile/gstsplitfilesrc.c
new file mode 100644
index 0000000000..f147e1d558
--- /dev/null
+++ b/gst/multifile/gstsplitfilesrc.c
@@ -0,0 +1,600 @@
+/* GStreamer Split File Source
+ * Copyright (C) 2011 Collabora Ltd. <tim.muller@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-splitfilesrc
+ * @title: splitfilesrc
+ * @see_also: #GstFileSrc, #GstMultiFileSrc
+ *
+ * Reads data from multiple files, presenting those files as one continuous
+ * file to downstream elements. This is useful for reading a large file that
+ * had to be split into multiple parts due to filesystem file size limitations,
+ * for example.
+ *
+ * The files to select are chosen via the location property, which supports
+ * (and expects) shell-style wildcards (but only for the filename, not for
+ * directories). The results will be sorted.
+ *
+ * ## Example launch lines
+ * |[
+ * gst-launch-1.0 splitfilesrc location="/path/to/part-*.mpg" ! decodebin ! ...
+ * ]| Plays the different parts as if they were one single MPEG file.
+ * |[
+ * gst-launch-1.0 playbin uri="splitfile://path/to/foo.avi.*"
+ * ]| Plays the different parts as if they were one single AVI file.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include "gstsplitfilesrc.h"
+#include "gstsplitutils.h"
+
+#include <string.h>
+
+enum
+{
+ PROP_LOCATION = 1
+};
+
+#define DEFAULT_LOCATION NULL
+
+static void gst_split_file_src_uri_handler_init (gpointer g_iface,
+ gpointer iface_data);
+static void gst_split_file_src_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_split_file_src_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_split_file_src_finalize (GObject * obj);
+
+static gboolean gst_split_file_src_start (GstBaseSrc * basesrc);
+static gboolean gst_split_file_src_stop (GstBaseSrc * basesrc);
+static gboolean gst_split_file_src_can_seek (GstBaseSrc * basesrc);
+static gboolean gst_split_file_src_get_size (GstBaseSrc * basesrc, guint64 * s);
+static gboolean gst_split_file_src_unlock (GstBaseSrc * basesrc);
+static GstFlowReturn gst_split_file_src_create (GstBaseSrc * basesrc,
+ guint64 offset, guint size, GstBuffer ** buffer);
+
+static GstStaticPadTemplate gst_split_file_src_pad_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+GST_DEBUG_CATEGORY_STATIC (splitfilesrc_debug);
+#define GST_CAT_DEFAULT splitfilesrc_debug
+
+
+G_DEFINE_TYPE_WITH_CODE (GstSplitFileSrc, gst_split_file_src, GST_TYPE_BASE_SRC,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER,
+ gst_split_file_src_uri_handler_init));
+GST_ELEMENT_REGISTER_DEFINE (splitfilesrc, "splitfilesrc", GST_RANK_NONE,
+ gst_split_file_src_get_type ());
+
+#ifdef G_OS_WIN32
+#define WIN32_BLURB " Location string must be in UTF-8 encoding (on Windows)."
+#else
+#define WIN32_BLURB /* nothing */
+#endif
+
+static void
+gst_split_file_src_class_init (GstSplitFileSrcClass * klass)
+{
+ GstBaseSrcClass *gstbasesrc_class = GST_BASE_SRC_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+
+ gobject_class->set_property = gst_split_file_src_set_property;
+ gobject_class->get_property = gst_split_file_src_get_property;
+ gobject_class->finalize = gst_split_file_src_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_LOCATION,
+ g_param_spec_string ("location", "File Location",
+ "Wildcard pattern to match file names of the input files. If "
+ "the location is an absolute path or contains directory components, "
+ "only the base file name part will be considered for pattern "
+ "matching. The results will be sorted." WIN32_BLURB,
+ DEFAULT_LOCATION, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstbasesrc_class->start = GST_DEBUG_FUNCPTR (gst_split_file_src_start);
+ gstbasesrc_class->stop = GST_DEBUG_FUNCPTR (gst_split_file_src_stop);
+ gstbasesrc_class->create = GST_DEBUG_FUNCPTR (gst_split_file_src_create);
+ gstbasesrc_class->get_size = GST_DEBUG_FUNCPTR (gst_split_file_src_get_size);
+ gstbasesrc_class->unlock = GST_DEBUG_FUNCPTR (gst_split_file_src_unlock);
+ gstbasesrc_class->is_seekable =
+ GST_DEBUG_FUNCPTR (gst_split_file_src_can_seek);
+
+ GST_DEBUG_CATEGORY_INIT (splitfilesrc_debug, "splitfilesrc", 0,
+ "splitfilesrc element");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_split_file_src_pad_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "Split-File Source",
+ "Source/File",
+ "Read a sequentially named set of files as if it was one large file",
+ "Tim-Philipp Müller <tim.muller@collabora.co.uk>");
+}
+
+static void
+gst_split_file_src_init (GstSplitFileSrc * splitfilesrc)
+{
+}
+
+static void
+gst_split_file_src_finalize (GObject * obj)
+{
+ GstSplitFileSrc *src = GST_SPLIT_FILE_SRC (obj);
+
+ g_free (src->location);
+ src->location = NULL;
+
+ G_OBJECT_CLASS (gst_split_file_src_parent_class)->finalize (obj);
+}
+
+static gboolean
+gst_split_file_src_can_seek (GstBaseSrc * basesrc)
+{
+ return TRUE;
+}
+
+static gboolean
+gst_split_file_src_unlock (GstBaseSrc * basesrc)
+{
+ /* This is not actually that useful, since all normal file
+ * operations are fully blocking anyway */
+#if 0
+ GstSplitFileSrc *src = GST_SPLIT_FILE_SRC (basesrc);
+
+ GST_DEBUG_OBJECT (src, "cancelling pending I/O operation if there is one");
+ /* g_cancellable_cancel (src->cancellable); */
+ GST_DEBUG_OBJECT (src, "done");
+#endif
+
+ return TRUE;
+}
+
+static gboolean
+gst_split_file_src_get_size (GstBaseSrc * basesrc, guint64 * size)
+{
+ GstSplitFileSrc *src = GST_SPLIT_FILE_SRC (basesrc);
+
+ *size = src->parts[src->num_parts - 1].stop + 1;
+ return TRUE;
+}
+
+static void
+gst_split_file_src_set_location (GstSplitFileSrc * src, const char *location)
+{
+ GST_OBJECT_LOCK (src);
+ g_free (src->location);
+
+ if (location != NULL && g_str_has_prefix (location, "splitfile://"))
+ src->location = gst_uri_get_location (location);
+ else
+ src->location = g_strdup (location);
+#ifdef G_OS_WIN32
+ if (!g_utf8_validate (src->location, -1, NULL)) {
+ g_warning ("splitfilesrc 'location' property must be in UTF-8 "
+ "encoding on Windows");
+ }
+#endif
+ GST_OBJECT_UNLOCK (src);
+}
+
+static void
+gst_split_file_src_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstSplitFileSrc *src = GST_SPLIT_FILE_SRC (object);
+
+ switch (prop_id) {
+ case PROP_LOCATION:
+ gst_split_file_src_set_location (src, g_value_get_string (value));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_split_file_src_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstSplitFileSrc *src = GST_SPLIT_FILE_SRC (object);
+
+ switch (prop_id) {
+ case PROP_LOCATION:
+ GST_OBJECT_LOCK (src);
+ g_value_set_string (value, src->location);
+ GST_OBJECT_UNLOCK (src);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+gst_split_file_src_start (GstBaseSrc * basesrc)
+{
+ GstSplitFileSrc *src = GST_SPLIT_FILE_SRC (basesrc);
+ GCancellable *cancel;
+ gboolean ret = FALSE;
+ guint64 offset;
+ GError *err = NULL;
+ gchar *basename = NULL;
+ gchar *dirname = NULL;
+ gchar **files;
+ guint i;
+
+ GST_OBJECT_LOCK (src);
+ if (src->location != NULL && src->location[0] != '\0') {
+ basename = g_path_get_basename (src->location);
+ dirname = g_path_get_dirname (src->location);
+ }
+ GST_OBJECT_UNLOCK (src);
+
+ files = gst_split_util_find_files (dirname, basename, &err);
+
+ if (files == NULL || *files == NULL)
+ goto no_files;
+
+ src->num_parts = g_strv_length (files);
+ src->parts = g_new0 (GstFilePart, src->num_parts);
+
+ cancel = src->cancellable;
+
+ offset = 0;
+ for (i = 0; i < src->num_parts; ++i) {
+ GFileInputStream *stream;
+ GFileInfo *info;
+ goffset size;
+ GFile *file;
+
+ file = g_file_new_for_path (files[i]);
+ stream = g_file_read (file, cancel, &err);
+ g_object_unref (file);
+
+ if (err != NULL)
+ goto open_read_error;
+
+ info = g_file_input_stream_query_info (stream, "standard::*", NULL, &err);
+ if (err != NULL) {
+ g_object_unref (stream);
+ goto query_info_error;
+ }
+
+ size = g_file_info_get_size (info);
+ g_object_unref (info);
+
+ src->parts[i].stream = stream;
+ src->parts[i].path = g_strdup (files[i]);
+ src->parts[i].start = offset;
+ src->parts[i].stop = offset + size - 1;
+
+ GST_DEBUG ("[%010" G_GUINT64_FORMAT "-%010" G_GUINT64_FORMAT "] %s",
+ src->parts[i].start, src->parts[i].stop, src->parts[i].path);
+
+ offset += size;
+ }
+
+ GST_INFO ("Successfully opened %u file parts for reading", src->num_parts);
+
+ src->cur_part = 0;
+
+ src->cancellable = g_cancellable_new ();
+
+ ret = TRUE;
+
+done:
+ if (err != NULL)
+ g_error_free (err);
+ g_strfreev (files);
+ g_free (basename);
+ g_free (dirname);
+ return ret;
+
+/* ERRORS */
+no_files:
+ {
+ if (err->code == G_IO_ERROR_CANCELLED)
+ goto cancelled;
+
+ GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, ("%s", err->message),
+ ("Failed to find files in '%s' for pattern '%s'",
+ GST_STR_NULL (dirname), GST_STR_NULL (basename)));
+ goto done;
+ }
+open_read_error:
+ {
+ if (err->code == G_IO_ERROR_CANCELLED)
+ goto cancelled;
+
+ GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, ("%s", err->message),
+ ("Failed to open file '%s' for reading", files[i]));
+ goto done;
+ }
+query_info_error:
+ {
+ if (err->code == G_IO_ERROR_CANCELLED)
+ goto cancelled;
+
+ GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, ("%s", err->message),
+ ("Failed to query info for file '%s'", files[i]));
+ goto done;
+ }
+cancelled:
+ {
+ GST_DEBUG_OBJECT (src, "I/O operation cancelled from another thread");
+ goto done;
+ }
+}
+
+static gboolean
+gst_split_file_src_stop (GstBaseSrc * basesrc)
+{
+ GstSplitFileSrc *src = GST_SPLIT_FILE_SRC (basesrc);
+ guint i;
+
+ for (i = 0; i < src->num_parts; ++i) {
+ if (src->parts[i].stream != NULL)
+ g_object_unref (src->parts[i].stream);
+ g_free (src->parts[i].path);
+ }
+ g_free (src->parts);
+ src->parts = NULL;
+ src->num_parts = 0;
+
+ g_object_unref (src->cancellable);
+ src->cancellable = NULL;
+
+ return TRUE;
+}
+
+static gint
+gst_split_file_src_part_search (GstFilePart * part, guint64 * offset,
+ gpointer user_data)
+{
+ if (*offset > part->stop)
+ return -1; /* The target is after this part */
+ else if (*offset < part->start)
+ return 1; /* The target is before this part */
+ else
+ return 0; /* This is the target part */
+}
+
+static gboolean
+gst_split_file_src_find_part_for_offset (GstSplitFileSrc * src, guint64 offset,
+ guint * part_number)
+{
+ gboolean res = TRUE;
+ GstFilePart *part;
+
+ part =
+ gst_util_array_binary_search (src->parts, src->num_parts,
+ sizeof (GstFilePart),
+ (GCompareDataFunc) gst_split_file_src_part_search,
+ GST_SEARCH_MODE_AFTER, &offset, NULL);
+
+ if (part)
+ *part_number = part - src->parts;
+ else
+ res = FALSE;
+
+ return res;
+}
+
+static GstFlowReturn
+gst_split_file_src_create (GstBaseSrc * basesrc, guint64 offset, guint size,
+ GstBuffer ** buffer)
+{
+ GstSplitFileSrc *src = GST_SPLIT_FILE_SRC (basesrc);
+ GstFilePart cur_part;
+ GInputStream *stream;
+ GCancellable *cancel;
+ GSeekable *seekable;
+ GstBuffer *buf;
+ GError *err = NULL;
+ guint64 read_offset;
+ GstMapInfo map;
+ guint8 *data;
+ guint to_read;
+
+ cur_part = src->parts[src->cur_part];
+ if (offset < cur_part.start || offset > cur_part.stop) {
+ if (!gst_split_file_src_find_part_for_offset (src, offset, &src->cur_part))
+ return GST_FLOW_EOS;
+ cur_part = src->parts[src->cur_part];
+ }
+
+ GST_LOG_OBJECT (src, "current part: %u (%" G_GUINT64_FORMAT " - "
+ "%" G_GUINT64_FORMAT ", %s)", src->cur_part, cur_part.start,
+ cur_part.stop, cur_part.path);
+
+ buf = gst_buffer_new_allocate (NULL, size, NULL);
+
+ GST_BUFFER_OFFSET (buf) = offset;
+
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ data = map.data;
+
+ cancel = src->cancellable;
+
+ while (size > 0) {
+ guint64 bytes_to_end_of_part;
+ gsize read = 0;
+
+ /* we want the offset into the file part */
+ read_offset = offset - cur_part.start;
+
+ GST_LOG ("Reading part %03u from offset %" G_GUINT64_FORMAT " (%s)",
+ src->cur_part, read_offset, cur_part.path);
+
+ /* FIXME: only seek when needed (hopefully gio is smart) */
+ seekable = G_SEEKABLE (cur_part.stream);
+ if (!g_seekable_seek (seekable, read_offset, G_SEEK_SET, cancel, &err))
+ goto seek_failed;
+
+ GST_LOG_OBJECT (src, "now: %" G_GUINT64_FORMAT, g_seekable_tell (seekable));
+
+ bytes_to_end_of_part = (cur_part.stop - cur_part.start) + 1 - read_offset;
+ to_read = MIN (size, bytes_to_end_of_part);
+
+ GST_LOG_OBJECT (src, "reading %u bytes from part %u (bytes to end of "
+ "part: %u)", to_read, src->cur_part, (guint) bytes_to_end_of_part);
+
+ stream = G_INPUT_STREAM (cur_part.stream);
+
+ /* NB: we won't try to read beyond EOF */
+ if (!g_input_stream_read_all (stream, data, to_read, &read, cancel, &err))
+ goto read_failed;
+
+ GST_LOG_OBJECT (src, "read %u bytes", (guint) read);
+
+ data += read;
+ size -= read;
+ offset += read;
+
+ /* are we done? */
+ if (size == 0)
+ break;
+
+ GST_LOG_OBJECT (src, "%u bytes left to read for this chunk", size);
+
+ /* corner case, this should never really happen (assuming basesrc clips
+ * requests beyond the file size) */
+ if (read < to_read) {
+ if (src->cur_part == src->num_parts - 1) {
+ /* last file part, stop reading and truncate buffer */
+ gst_buffer_set_size (buf, offset - GST_BUFFER_OFFSET (buf));
+ break;
+ } else {
+ goto file_part_changed;
+ }
+ }
+
+ ++src->cur_part;
+ cur_part = src->parts[src->cur_part];
+ }
+
+ GST_BUFFER_OFFSET_END (buf) = offset;
+
+ gst_buffer_unmap (buf, &map);
+
+ *buffer = buf;
+ GST_LOG_OBJECT (src, "read %" G_GSIZE_FORMAT " bytes into buf %p",
+ gst_buffer_get_size (buf), buf);
+ return GST_FLOW_OK;
+
+/* ERRORS */
+seek_failed:
+ {
+ if (err->code == G_IO_ERROR_CANCELLED)
+ goto cancelled;
+
+ GST_ELEMENT_ERROR (src, RESOURCE, SEEK, (NULL),
+ ("Seek to %" G_GUINT64_FORMAT " in %s failed", read_offset,
+ cur_part.path));
+ g_error_free (err);
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+read_failed:
+ {
+ if (err->code == G_IO_ERROR_CANCELLED)
+ goto cancelled;
+
+ GST_ELEMENT_ERROR (src, RESOURCE, READ, ("%s", err->message),
+ ("Read from %" G_GUINT64_FORMAT " in %s failed", read_offset,
+ cur_part.path));
+ g_error_free (err);
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+file_part_changed:
+ {
+ GST_ELEMENT_ERROR (src, RESOURCE, READ,
+ ("Read error while reading file part %s", cur_part.path),
+ ("Short read in file part, file may have been modified since start"));
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+cancelled:
+ {
+ GST_DEBUG_OBJECT (src, "I/O operation cancelled from another thread");
+ g_error_free (err);
+ gst_buffer_unref (buf);
+ return GST_FLOW_FLUSHING;
+ }
+}
+
+static GstURIType
+gst_split_file_src_uri_get_type (GType type)
+{
+ return GST_URI_SRC;
+}
+
+static const gchar *const *
+gst_split_file_src_uri_get_protocols (GType type)
+{
+ static const gchar *protocols[] = { "splitfile", NULL };
+
+ return (const gchar * const *) protocols;
+}
+
+static gchar *
+gst_split_file_src_uri_get_uri (GstURIHandler * handler)
+{
+ GstSplitFileSrc *src = GST_SPLIT_FILE_SRC (handler);
+ gchar *ret;
+
+ GST_OBJECT_LOCK (src);
+ if (src->location != NULL)
+ ret = g_strdup_printf ("splitfile://%s", src->location);
+ else
+ ret = NULL;
+ GST_OBJECT_UNLOCK (src);
+
+ return ret;
+}
+
+static gboolean
+gst_split_file_src_uri_set_uri (GstURIHandler * handler, const gchar * uri,
+ GError ** error)
+{
+ GstSplitFileSrc *src = GST_SPLIT_FILE_SRC (handler);
+
+ gst_split_file_src_set_location (src, uri);
+
+ return TRUE;
+}
+
+static void
+gst_split_file_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
+{
+ GstURIHandlerInterface *iface = (GstURIHandlerInterface *) g_iface;
+
+ iface->get_type = gst_split_file_src_uri_get_type;
+ iface->get_protocols = gst_split_file_src_uri_get_protocols;
+ iface->get_uri = gst_split_file_src_uri_get_uri;
+ iface->set_uri = gst_split_file_src_uri_set_uri;
+}
diff --git a/gst/multifile/gstsplitfilesrc.h b/gst/multifile/gstsplitfilesrc.h
new file mode 100644
index 0000000000..e3af184d00
--- /dev/null
+++ b/gst/multifile/gstsplitfilesrc.h
@@ -0,0 +1,76 @@
+/* GStreamer Split File Source
+ * Copyright (C) 2011 Collabora Ltd. <tim.muller@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifndef __GST_SPLIT_FILE_SRC_H__
+#define __GST_SPLIT_FILE_SRC_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbasesrc.h>
+#include <gio/gio.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_SPLIT_FILE_SRC \
+ (gst_split_file_src_get_type())
+#define GST_SPLIT_FILE_SRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_SPLIT_FILE_SRC,GstSplitFileSrc))
+#define GST_SPLIT_FILE_SRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_SPLIT_FILE_SRC,GstSplitFileSrcClass))
+#define GST_IS_SPLIT_FILE_SRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_SPLIT_FILE_SRC))
+#define GST_IS_SPLIT_FILE_SRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_SPLIT_FILE_SRC))
+
+typedef struct _GstFilePart GstFilePart;
+typedef struct _GstSplitFileSrc GstSplitFileSrc;
+typedef struct _GstSplitFileSrcClass GstSplitFileSrcClass;
+
+struct _GstFilePart
+{
+ GFileInputStream *stream;
+ gchar *path;
+ guint64 start; /* inclusive */
+ guint64 stop; /* inclusive */
+};
+
+struct _GstSplitFileSrc
+{
+ GstBaseSrc parent;
+
+ gchar *location; /* OBJECT_LOCK */
+
+ GstFilePart *parts;
+ guint num_parts;
+
+ guint cur_part; /* part used last (likely also to be used next) */
+
+ GCancellable *cancellable; /* so we can interrupt blocking operations */
+};
+
+struct _GstSplitFileSrcClass
+{
+ GstBaseSrcClass parent_class;
+};
+
+GType gst_split_file_src_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (splitfilesrc);
+
+G_END_DECLS
+
+#endif /* __GST_SPLIT_FILE_SRC_H__ */
diff --git a/gst/multifile/gstsplitmuxpartreader.c b/gst/multifile/gstsplitmuxpartreader.c
new file mode 100644
index 0000000000..77a1745c7f
--- /dev/null
+++ b/gst/multifile/gstsplitmuxpartreader.c
@@ -0,0 +1,1377 @@
+/* GStreamer Split Demuxer bin that recombines files created by
+ * the splitmuxsink element.
+ *
+ * Copyright (C) <2014> Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include "gstsplitmuxsrc.h"
+
+GST_DEBUG_CATEGORY_STATIC (splitmux_part_debug);
+#define GST_CAT_DEFAULT splitmux_part_debug
+
+#define SPLITMUX_PART_LOCK(p) g_mutex_lock(&(p)->lock)
+#define SPLITMUX_PART_UNLOCK(p) g_mutex_unlock(&(p)->lock)
+#define SPLITMUX_PART_WAIT(p) g_cond_wait (&(p)->inactive_cond, &(p)->lock)
+#define SPLITMUX_PART_BROADCAST(p) g_cond_broadcast (&(p)->inactive_cond)
+
+#define SPLITMUX_PART_TYPE_LOCK(p) g_mutex_lock(&(p)->type_lock)
+#define SPLITMUX_PART_TYPE_UNLOCK(p) g_mutex_unlock(&(p)->type_lock)
+
+#define SPLITMUX_PART_MSG_LOCK(p) g_mutex_lock(&(p)->msg_lock)
+#define SPLITMUX_PART_MSG_UNLOCK(p) g_mutex_unlock(&(p)->msg_lock)
+
+typedef struct _GstSplitMuxPartPad
+{
+ GstPad parent;
+
+ /* Reader we belong to */
+ GstSplitMuxPartReader *reader;
+ /* Output splitmuxsrc source pad */
+ GstPad *target;
+
+ GstDataQueue *queue;
+
+ gboolean is_eos;
+ gboolean flushing;
+ gboolean seen_buffer;
+
+ gboolean is_sparse;
+ GstClockTime max_ts;
+ GstSegment segment;
+
+ GstSegment orig_segment;
+ GstClockTime initial_ts_offset;
+} GstSplitMuxPartPad;
+
+typedef struct _GstSplitMuxPartPadClass
+{
+ GstPadClass parent;
+} GstSplitMuxPartPadClass;
+
+static GType gst_splitmux_part_pad_get_type (void);
+#define SPLITMUX_TYPE_PART_PAD gst_splitmux_part_pad_get_type()
+#define SPLITMUX_PART_PAD_CAST(p) ((GstSplitMuxPartPad *)(p))
+
+static void splitmux_part_pad_constructed (GObject * pad);
+static void splitmux_part_pad_finalize (GObject * pad);
+static void handle_buffer_measuring (GstSplitMuxPartReader * reader,
+ GstSplitMuxPartPad * part_pad, GstBuffer * buf);
+
+static gboolean splitmux_data_queue_is_full_cb (GstDataQueue * queue,
+ guint visible, guint bytes, guint64 time, gpointer checkdata);
+static void type_found (GstElement * typefind, guint probability,
+ GstCaps * caps, GstSplitMuxPartReader * reader);
+static void check_if_pads_collected (GstSplitMuxPartReader * reader);
+
+static void
+gst_splitmux_part_reader_finish_measuring_streams (GstSplitMuxPartReader *
+ reader);
+
+/* Called with reader lock held */
+static gboolean
+have_empty_queue (GstSplitMuxPartReader * reader)
+{
+ GList *cur;
+
+ for (cur = g_list_first (reader->pads); cur != NULL; cur = g_list_next (cur)) {
+ GstSplitMuxPartPad *part_pad = SPLITMUX_PART_PAD_CAST (cur->data);
+ if (part_pad->is_eos) {
+ GST_LOG_OBJECT (part_pad, "Pad is EOS");
+ return TRUE;
+ }
+ if (gst_data_queue_is_empty (part_pad->queue)) {
+ GST_LOG_OBJECT (part_pad, "Queue is empty");
+ return TRUE;
+ }
+ }
+
+ return FALSE;
+}
+
+/* Called with reader lock held */
+static gboolean
+block_until_can_push (GstSplitMuxPartReader * reader)
+{
+ while (reader->running) {
+ if (reader->flushing)
+ goto out;
+ if (reader->active && have_empty_queue (reader))
+ goto out;
+
+ GST_LOG_OBJECT (reader,
+ "Waiting for activation or empty queue on reader %s", reader->path);
+ SPLITMUX_PART_WAIT (reader);
+ }
+
+ GST_LOG_OBJECT (reader, "Done waiting on reader %s active %d flushing %d",
+ reader->path, reader->active, reader->flushing);
+out:
+ return reader->active && !reader->flushing;
+}
+
+static void
+handle_buffer_measuring (GstSplitMuxPartReader * reader,
+ GstSplitMuxPartPad * part_pad, GstBuffer * buf)
+{
+ GstClockTimeDiff ts = GST_CLOCK_STIME_NONE;
+ GstClockTimeDiff offset;
+
+ if (reader->prep_state == PART_STATE_PREPARING_COLLECT_STREAMS &&
+ !part_pad->seen_buffer) {
+ /* If this is the first buffer on the pad in the collect_streams state,
+ * then calculate initial offset based on running time of this segment */
+ part_pad->initial_ts_offset =
+ part_pad->orig_segment.start + part_pad->orig_segment.base -
+ part_pad->orig_segment.time;
+ GST_DEBUG_OBJECT (reader,
+ "Initial TS offset for pad %" GST_PTR_FORMAT " now %" GST_TIME_FORMAT,
+ part_pad, GST_TIME_ARGS (part_pad->initial_ts_offset));
+ }
+ part_pad->seen_buffer = TRUE;
+
+ /* Adjust buffer timestamps */
+ offset = reader->start_offset + part_pad->segment.base;
+ offset -= part_pad->initial_ts_offset;
+ /* We don't add the ts_offset here, because we
+ * want to measure the logical length of the stream,
+ * not to generate output timestamps */
+
+ /* Update the stored max duration on the pad,
+ * always preferring making DTS contiguous
+ * where possible */
+ if (GST_BUFFER_DTS_IS_VALID (buf))
+ ts = GST_BUFFER_DTS (buf) + offset;
+ else if (GST_BUFFER_PTS_IS_VALID (buf))
+ ts = GST_BUFFER_PTS (buf) + offset;
+
+ GST_DEBUG_OBJECT (reader, "Pad %" GST_PTR_FORMAT
+ " incoming DTS %" GST_TIME_FORMAT
+ " PTS %" GST_TIME_FORMAT " offset by %" GST_STIME_FORMAT
+ " to %" GST_STIME_FORMAT, part_pad,
+ GST_TIME_ARGS (GST_BUFFER_DTS (buf)),
+ GST_TIME_ARGS (GST_BUFFER_PTS (buf)),
+ GST_STIME_ARGS (offset), GST_STIME_ARGS (ts));
+
+ if (GST_CLOCK_STIME_IS_VALID (ts)) {
+ if (GST_BUFFER_DURATION_IS_VALID (buf))
+ ts += GST_BUFFER_DURATION (buf);
+
+ if (GST_CLOCK_STIME_IS_VALID (ts)
+ && ts > (GstClockTimeDiff) part_pad->max_ts) {
+ part_pad->max_ts = ts;
+ GST_LOG_OBJECT (reader,
+ "pad %" GST_PTR_FORMAT " max TS now %" GST_TIME_FORMAT, part_pad,
+ GST_TIME_ARGS (part_pad->max_ts));
+ }
+ }
+ /* Is it time to move to measuring state yet? */
+ check_if_pads_collected (reader);
+}
+
+static gboolean
+splitmux_data_queue_is_full_cb (GstDataQueue * queue,
+ guint visible, guint bytes, guint64 time, gpointer checkdata)
+{
+ /* Arbitrary safety limit. If we hit it, playback is likely to stall */
+ if (time > 20 * GST_SECOND)
+ return TRUE;
+ return FALSE;
+}
+
+static void
+splitmux_part_free_queue_item (GstDataQueueItem * item)
+{
+ gst_mini_object_unref (item->object);
+ g_slice_free (GstDataQueueItem, item);
+}
+
+static GstFlowReturn
+splitmux_part_pad_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+{
+ GstSplitMuxPartPad *part_pad = SPLITMUX_PART_PAD_CAST (pad);
+ GstSplitMuxPartReader *reader = part_pad->reader;
+ GstDataQueueItem *item;
+ GstClockTimeDiff offset;
+
+ GST_LOG_OBJECT (reader, "Pad %" GST_PTR_FORMAT " %" GST_PTR_FORMAT, pad, buf);
+ SPLITMUX_PART_LOCK (reader);
+
+ if (reader->prep_state == PART_STATE_PREPARING_COLLECT_STREAMS ||
+ reader->prep_state == PART_STATE_PREPARING_MEASURE_STREAMS) {
+ handle_buffer_measuring (reader, part_pad, buf);
+ gst_buffer_unref (buf);
+ SPLITMUX_PART_UNLOCK (reader);
+ return GST_FLOW_OK;
+ }
+
+ if (!block_until_can_push (reader)) {
+ /* Flushing */
+ SPLITMUX_PART_UNLOCK (reader);
+ gst_buffer_unref (buf);
+ return GST_FLOW_FLUSHING;
+ }
+
+ /* Adjust buffer timestamps */
+ offset = reader->start_offset + part_pad->segment.base;
+ offset -= part_pad->initial_ts_offset;
+ offset += reader->ts_offset;
+
+ if (GST_BUFFER_PTS_IS_VALID (buf))
+ GST_BUFFER_PTS (buf) += offset;
+ if (GST_BUFFER_DTS_IS_VALID (buf))
+ GST_BUFFER_DTS (buf) += offset;
+
+ /* We are active, and one queue is empty, place this buffer in
+ * the dataqueue */
+ GST_LOG_OBJECT (reader, "Enqueueing buffer %" GST_PTR_FORMAT, buf);
+ item = g_slice_new (GstDataQueueItem);
+ item->destroy = (GDestroyNotify) splitmux_part_free_queue_item;
+ item->object = GST_MINI_OBJECT (buf);
+ item->size = gst_buffer_get_size (buf);
+ item->duration = GST_BUFFER_DURATION (buf);
+ if (item->duration == GST_CLOCK_TIME_NONE)
+ item->duration = 0;
+ item->visible = TRUE;
+
+ gst_object_ref (part_pad);
+
+ SPLITMUX_PART_UNLOCK (reader);
+
+ if (!gst_data_queue_push (part_pad->queue, item)) {
+ splitmux_part_free_queue_item (item);
+ gst_object_unref (part_pad);
+ return GST_FLOW_FLUSHING;
+ }
+
+ gst_object_unref (part_pad);
+ return GST_FLOW_OK;
+}
+
+/* Called with splitmux part lock held */
+static gboolean
+splitmux_part_is_eos_locked (GstSplitMuxPartReader * part)
+{
+ GList *cur;
+ for (cur = g_list_first (part->pads); cur != NULL; cur = g_list_next (cur)) {
+ GstSplitMuxPartPad *part_pad = SPLITMUX_PART_PAD_CAST (cur->data);
+ if (!part_pad->is_eos)
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+static gboolean
+splitmux_part_is_prerolled_locked (GstSplitMuxPartReader * part)
+{
+ GList *cur;
+ GST_LOG_OBJECT (part, "Checking for preroll");
+ for (cur = g_list_first (part->pads); cur != NULL; cur = g_list_next (cur)) {
+ GstSplitMuxPartPad *part_pad = SPLITMUX_PART_PAD_CAST (cur->data);
+ if (!part_pad->seen_buffer) {
+ GST_LOG_OBJECT (part, "Part pad %" GST_PTR_FORMAT " is not prerolled",
+ part_pad);
+ return FALSE;
+ }
+ }
+ GST_LOG_OBJECT (part, "Part is prerolled");
+ return TRUE;
+}
+
+
+gboolean
+gst_splitmux_part_is_eos (GstSplitMuxPartReader * reader)
+{
+ gboolean res;
+
+ SPLITMUX_PART_LOCK (reader);
+ res = splitmux_part_is_eos_locked (reader);
+ SPLITMUX_PART_UNLOCK (reader);
+
+ return res;
+}
+
+/* Called with splitmux part lock held */
+static gboolean
+splitmux_is_flushing (GstSplitMuxPartReader * reader)
+{
+ GList *cur;
+ for (cur = g_list_first (reader->pads); cur != NULL; cur = g_list_next (cur)) {
+ GstSplitMuxPartPad *part_pad = SPLITMUX_PART_PAD_CAST (cur->data);
+ if (part_pad->flushing)
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+static gboolean
+splitmux_part_pad_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstSplitMuxPartPad *part_pad = SPLITMUX_PART_PAD_CAST (pad);
+ GstSplitMuxPartReader *reader = part_pad->reader;
+ gboolean ret = TRUE;
+ SplitMuxSrcPad *target;
+ GstDataQueueItem *item;
+
+ SPLITMUX_PART_LOCK (reader);
+
+ target = gst_object_ref (part_pad->target);
+
+ GST_LOG_OBJECT (reader, "Pad %" GST_PTR_FORMAT " event %" GST_PTR_FORMAT, pad,
+ event);
+
+ if (part_pad->flushing && GST_EVENT_TYPE (event) != GST_EVENT_FLUSH_STOP)
+ goto drop_event;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_STREAM_START:{
+ GstStreamFlags flags;
+ gst_event_parse_stream_flags (event, &flags);
+ part_pad->is_sparse = (flags & GST_STREAM_FLAG_SPARSE);
+ break;
+ }
+ case GST_EVENT_SEGMENT:{
+ GstSegment *seg = &part_pad->segment;
+
+ GST_LOG_OBJECT (pad, "Received segment %" GST_PTR_FORMAT, event);
+
+ gst_event_copy_segment (event, seg);
+ gst_event_copy_segment (event, &part_pad->orig_segment);
+
+ if (seg->format != GST_FORMAT_TIME)
+ goto wrong_segment;
+
+ /* Adjust segment */
+ /* Adjust start/stop so the overall file is 0 + start_offset based,
+ * adding a fixed offset so that DTS is never negative */
+ if (seg->stop != -1) {
+ seg->stop -= seg->start;
+ seg->stop += seg->time + reader->start_offset + reader->ts_offset;
+ }
+ seg->start = seg->time + reader->start_offset + reader->ts_offset;
+ seg->time += reader->start_offset;
+ seg->position += reader->start_offset;
+
+ /* Replace event */
+ gst_event_unref (event);
+ event = gst_event_new_segment (seg);
+
+ GST_LOG_OBJECT (pad, "Adjusted segment now %" GST_PTR_FORMAT, event);
+
+ if (reader->prep_state != PART_STATE_PREPARING_COLLECT_STREAMS
+ && reader->prep_state != PART_STATE_PREPARING_MEASURE_STREAMS)
+ break; /* Only do further stuff with segments during initial measuring */
+
+ /* Take the first segment from the first part */
+ if (target->segment.format == GST_FORMAT_UNDEFINED) {
+ gst_segment_copy_into (seg, &target->segment);
+ GST_DEBUG_OBJECT (reader,
+ "Target pad segment now %" GST_SEGMENT_FORMAT, &target->segment);
+ }
+
+ if (seg->stop != -1 && target->segment.stop != -1) {
+ GstClockTime stop = seg->base + seg->stop;
+ if (stop > target->segment.stop) {
+ target->segment.stop = stop;
+ GST_DEBUG_OBJECT (reader,
+ "Adjusting segment stop by %" GST_TIME_FORMAT
+ " output now %" GST_SEGMENT_FORMAT,
+ GST_TIME_ARGS (reader->start_offset), &target->segment);
+ }
+ }
+ GST_LOG_OBJECT (pad, "Forwarding segment %" GST_PTR_FORMAT, event);
+ break;
+ }
+ case GST_EVENT_EOS:{
+
+ GST_DEBUG_OBJECT (part_pad,
+ "State %u EOS event. MaxTS seen %" GST_TIME_FORMAT,
+ reader->prep_state, GST_TIME_ARGS (part_pad->max_ts));
+
+ if (reader->prep_state == PART_STATE_PREPARING_COLLECT_STREAMS ||
+ reader->prep_state == PART_STATE_PREPARING_MEASURE_STREAMS) {
+ /* Mark this pad as EOS */
+ part_pad->is_eos = TRUE;
+ if (splitmux_part_is_eos_locked (reader)) {
+ /* Finished measuring things, set state and tell the state change func
+ * so it can seek back to the start */
+ GST_LOG_OBJECT (reader,
+ "EOS while measuring streams. Resetting for ready");
+ reader->prep_state = PART_STATE_PREPARING_RESET_FOR_READY;
+
+ gst_element_call_async (GST_ELEMENT_CAST (reader),
+ (GstElementCallAsyncFunc)
+ gst_splitmux_part_reader_finish_measuring_streams, NULL, NULL);
+ }
+ goto drop_event;
+ }
+ break;
+ }
+ case GST_EVENT_FLUSH_START:
+ reader->flushing = TRUE;
+ part_pad->flushing = TRUE;
+ GST_LOG_OBJECT (reader, "Pad %" GST_PTR_FORMAT " flushing dataqueue",
+ part_pad);
+ gst_data_queue_set_flushing (part_pad->queue, TRUE);
+ SPLITMUX_PART_BROADCAST (reader);
+ break;
+ case GST_EVENT_FLUSH_STOP:{
+ gst_data_queue_set_flushing (part_pad->queue, FALSE);
+ gst_data_queue_flush (part_pad->queue);
+ part_pad->seen_buffer = FALSE;
+ part_pad->flushing = FALSE;
+ part_pad->is_eos = FALSE;
+
+ reader->flushing = splitmux_is_flushing (reader);
+ GST_LOG_OBJECT (reader,
+ "%s pad %" GST_PTR_FORMAT " flush_stop. Overall flushing=%d",
+ reader->path, pad, reader->flushing);
+ SPLITMUX_PART_BROADCAST (reader);
+ break;
+ }
+ default:
+ break;
+ }
+
+ /* Don't send events downstream while preparing */
+ if (reader->prep_state != PART_STATE_READY)
+ goto drop_event;
+
+ /* Don't pass flush events - those are done by the parent */
+ if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_START ||
+ GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP)
+ goto drop_event;
+
+ if (!block_until_can_push (reader))
+ goto drop_event;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_GAP:{
+ /* FIXME: Drop initial gap (if any) in each segment, not all GAPs */
+ goto drop_event;
+ }
+ default:
+ break;
+ }
+
+ /* We are active, and one queue is empty, place this buffer in
+ * the dataqueue */
+ gst_object_ref (part_pad->queue);
+ SPLITMUX_PART_UNLOCK (reader);
+
+ GST_LOG_OBJECT (reader, "Enqueueing event %" GST_PTR_FORMAT, event);
+ item = g_slice_new (GstDataQueueItem);
+ item->destroy = (GDestroyNotify) splitmux_part_free_queue_item;
+ item->object = GST_MINI_OBJECT (event);
+ item->size = 0;
+ item->duration = 0;
+ if (item->duration == GST_CLOCK_TIME_NONE)
+ item->duration = 0;
+ item->visible = FALSE;
+
+ if (!gst_data_queue_push (part_pad->queue, item)) {
+ splitmux_part_free_queue_item (item);
+ ret = FALSE;
+ }
+
+ gst_object_unref (part_pad->queue);
+ gst_object_unref (target);
+
+ return ret;
+wrong_segment:
+ gst_event_unref (event);
+ gst_object_unref (target);
+ SPLITMUX_PART_UNLOCK (reader);
+ GST_ELEMENT_ERROR (reader, STREAM, FAILED, (NULL),
+ ("Received non-time segment - reader %s pad %" GST_PTR_FORMAT,
+ reader->path, pad));
+ return FALSE;
+drop_event:
+ GST_LOG_OBJECT (pad, "Dropping event %" GST_PTR_FORMAT
+ " from %" GST_PTR_FORMAT " on %" GST_PTR_FORMAT, event, pad, target);
+ gst_event_unref (event);
+ gst_object_unref (target);
+ SPLITMUX_PART_UNLOCK (reader);
+ return TRUE;
+}
+
+static gboolean
+splitmux_part_pad_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ GstSplitMuxPartPad *part_pad = SPLITMUX_PART_PAD_CAST (pad);
+ GstSplitMuxPartReader *reader = part_pad->reader;
+ GstPad *target;
+ gboolean ret = FALSE;
+ gboolean active;
+
+ SPLITMUX_PART_LOCK (reader);
+ target = gst_object_ref (part_pad->target);
+ active = reader->active;
+ SPLITMUX_PART_UNLOCK (reader);
+
+ if (active) {
+ GST_LOG_OBJECT (pad, "Forwarding query %" GST_PTR_FORMAT
+ " from %" GST_PTR_FORMAT " on %" GST_PTR_FORMAT, query, pad, target);
+
+ ret = gst_pad_query (target, query);
+ }
+
+ gst_object_unref (target);
+
+ return ret;
+}
+
+G_DEFINE_TYPE (GstSplitMuxPartPad, gst_splitmux_part_pad, GST_TYPE_PAD);
+
+static void
+splitmux_part_pad_constructed (GObject * pad)
+{
+ gst_pad_set_chain_function (GST_PAD (pad),
+ GST_DEBUG_FUNCPTR (splitmux_part_pad_chain));
+ gst_pad_set_event_function (GST_PAD (pad),
+ GST_DEBUG_FUNCPTR (splitmux_part_pad_event));
+ gst_pad_set_query_function (GST_PAD (pad),
+ GST_DEBUG_FUNCPTR (splitmux_part_pad_query));
+
+ G_OBJECT_CLASS (gst_splitmux_part_pad_parent_class)->constructed (pad);
+}
+
+static void
+gst_splitmux_part_pad_class_init (GstSplitMuxPartPadClass * klass)
+{
+ GObjectClass *gobject_klass = (GObjectClass *) (klass);
+
+ gobject_klass->constructed = splitmux_part_pad_constructed;
+ gobject_klass->finalize = splitmux_part_pad_finalize;
+}
+
+static void
+gst_splitmux_part_pad_init (GstSplitMuxPartPad * pad)
+{
+ pad->queue = gst_data_queue_new (splitmux_data_queue_is_full_cb,
+ NULL, NULL, pad);
+ gst_segment_init (&pad->segment, GST_FORMAT_UNDEFINED);
+ gst_segment_init (&pad->orig_segment, GST_FORMAT_UNDEFINED);
+}
+
+static void
+splitmux_part_pad_finalize (GObject * obj)
+{
+ GstSplitMuxPartPad *pad = (GstSplitMuxPartPad *) (obj);
+
+ GST_DEBUG_OBJECT (obj, "finalize");
+ gst_data_queue_set_flushing (pad->queue, TRUE);
+ gst_data_queue_flush (pad->queue);
+ gst_object_unref (GST_OBJECT_CAST (pad->queue));
+ pad->queue = NULL;
+
+ G_OBJECT_CLASS (gst_splitmux_part_pad_parent_class)->finalize (obj);
+}
+
+static void
+new_decoded_pad_added_cb (GstElement * element, GstPad * pad,
+ GstSplitMuxPartReader * part);
+static void no_more_pads (GstElement * element, GstSplitMuxPartReader * reader);
+static GstStateChangeReturn
+gst_splitmux_part_reader_change_state (GstElement * element,
+ GstStateChange transition);
+static gboolean gst_splitmux_part_reader_send_event (GstElement * element,
+ GstEvent * event);
+static void gst_splitmux_part_reader_set_flushing_locked (GstSplitMuxPartReader
+ * part, gboolean flushing);
+static void bus_handler (GstBin * bin, GstMessage * msg);
+static void splitmux_part_reader_dispose (GObject * object);
+static void splitmux_part_reader_finalize (GObject * object);
+static void splitmux_part_reader_reset (GstSplitMuxPartReader * reader);
+
+#define gst_splitmux_part_reader_parent_class parent_class
+G_DEFINE_TYPE (GstSplitMuxPartReader, gst_splitmux_part_reader,
+ GST_TYPE_PIPELINE);
+
+static void
+gst_splitmux_part_reader_class_init (GstSplitMuxPartReaderClass * klass)
+{
+ GObjectClass *gobject_klass = (GObjectClass *) (klass);
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBinClass *gstbin_class = (GstBinClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (splitmux_part_debug, "splitmuxpartreader", 0,
+ "Split File Demuxing Source helper");
+
+ gobject_klass->dispose = splitmux_part_reader_dispose;
+ gobject_klass->finalize = splitmux_part_reader_finalize;
+
+ gstelement_class->change_state = gst_splitmux_part_reader_change_state;
+ gstelement_class->send_event = gst_splitmux_part_reader_send_event;
+
+ gstbin_class->handle_message = bus_handler;
+}
+
+static void
+gst_splitmux_part_reader_init (GstSplitMuxPartReader * reader)
+{
+ GstElement *typefind;
+
+ reader->active = FALSE;
+ reader->duration = GST_CLOCK_TIME_NONE;
+
+ g_cond_init (&reader->inactive_cond);
+ g_mutex_init (&reader->lock);
+ g_mutex_init (&reader->type_lock);
+ g_mutex_init (&reader->msg_lock);
+
+ /* FIXME: Create elements on a state change */
+ reader->src = gst_element_factory_make ("filesrc", NULL);
+ if (reader->src == NULL) {
+ GST_ERROR_OBJECT (reader, "Failed to create filesrc element");
+ return;
+ }
+ gst_bin_add (GST_BIN_CAST (reader), reader->src);
+
+ typefind = gst_element_factory_make ("typefind", NULL);
+ if (!typefind) {
+ GST_ERROR_OBJECT (reader,
+ "Failed to create typefind element - check your installation");
+ return;
+ }
+
+ gst_bin_add (GST_BIN_CAST (reader), typefind);
+ reader->typefind = typefind;
+
+ if (!gst_element_link_pads (reader->src, NULL, typefind, "sink")) {
+ GST_ERROR_OBJECT (reader,
+ "Failed to link typefind element - check your installation");
+ return;
+ }
+
+ g_signal_connect (reader->typefind, "have-type", G_CALLBACK (type_found),
+ reader);
+}
+
+static void
+splitmux_part_reader_dispose (GObject * object)
+{
+ GstSplitMuxPartReader *reader = (GstSplitMuxPartReader *) object;
+
+ splitmux_part_reader_reset (reader);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static void
+splitmux_part_reader_finalize (GObject * object)
+{
+ GstSplitMuxPartReader *reader = (GstSplitMuxPartReader *) object;
+
+ g_cond_clear (&reader->inactive_cond);
+ g_mutex_clear (&reader->lock);
+ g_mutex_clear (&reader->type_lock);
+ g_mutex_clear (&reader->msg_lock);
+
+ g_free (reader->path);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+do_async_start (GstSplitMuxPartReader * reader)
+{
+ GstMessage *message;
+
+ SPLITMUX_PART_MSG_LOCK (reader);
+ reader->async_pending = TRUE;
+
+ message = gst_message_new_async_start (GST_OBJECT_CAST (reader));
+ GST_BIN_CLASS (parent_class)->handle_message (GST_BIN_CAST (reader), message);
+ SPLITMUX_PART_MSG_UNLOCK (reader);
+}
+
+static void
+do_async_done (GstSplitMuxPartReader * reader)
+{
+ GstMessage *message;
+
+ SPLITMUX_PART_MSG_LOCK (reader);
+ if (reader->async_pending) {
+ message =
+ gst_message_new_async_done (GST_OBJECT_CAST (reader),
+ GST_CLOCK_TIME_NONE);
+ GST_BIN_CLASS (parent_class)->handle_message (GST_BIN_CAST (reader),
+ message);
+
+ reader->async_pending = FALSE;
+ }
+ SPLITMUX_PART_MSG_UNLOCK (reader);
+}
+
+static void
+splitmux_part_reader_reset (GstSplitMuxPartReader * reader)
+{
+ GList *cur;
+
+ SPLITMUX_PART_LOCK (reader);
+ for (cur = g_list_first (reader->pads); cur != NULL; cur = g_list_next (cur)) {
+ GstPad *pad = GST_PAD_CAST (cur->data);
+ gst_pad_set_active (GST_PAD_CAST (pad), FALSE);
+ gst_object_unref (GST_OBJECT_CAST (pad));
+ }
+
+ g_list_free (reader->pads);
+ reader->pads = NULL;
+ SPLITMUX_PART_UNLOCK (reader);
+}
+
+static GstSplitMuxPartPad *
+gst_splitmux_part_reader_new_proxy_pad (GstSplitMuxPartReader * reader,
+ GstPad * target)
+{
+ GstSplitMuxPartPad *pad = g_object_new (SPLITMUX_TYPE_PART_PAD,
+ "name", GST_PAD_NAME (target),
+ "direction", GST_PAD_SINK,
+ NULL);
+ pad->target = target;
+ pad->reader = reader;
+
+ gst_pad_set_active (GST_PAD_CAST (pad), TRUE);
+
+ return pad;
+}
+
+static void
+new_decoded_pad_added_cb (GstElement * element, GstPad * pad,
+ GstSplitMuxPartReader * reader)
+{
+ GstPad *out_pad = NULL;
+ GstSplitMuxPartPad *proxy_pad;
+ GstCaps *caps;
+ GstPadLinkReturn link_ret;
+
+ caps = gst_pad_get_current_caps (pad);
+
+ GST_DEBUG_OBJECT (reader, "file %s new decoded pad %" GST_PTR_FORMAT
+ " caps %" GST_PTR_FORMAT, reader->path, pad, caps);
+
+ gst_caps_unref (caps);
+
+ /* Look up or create the output pad */
+ if (reader->get_pad_cb)
+ out_pad = reader->get_pad_cb (reader, pad, reader->cb_data);
+ if (out_pad == NULL) {
+ GST_DEBUG_OBJECT (reader,
+ "No output pad for %" GST_PTR_FORMAT ". Ignoring", pad);
+ return;
+ }
+
+ /* Create our proxy pad to interact with this new pad */
+ proxy_pad = gst_splitmux_part_reader_new_proxy_pad (reader, out_pad);
+ GST_DEBUG_OBJECT (reader,
+ "created proxy pad %" GST_PTR_FORMAT " for target %" GST_PTR_FORMAT,
+ proxy_pad, out_pad);
+
+ link_ret = gst_pad_link (pad, GST_PAD (proxy_pad));
+ if (link_ret != GST_PAD_LINK_OK) {
+ gst_object_unref (proxy_pad);
+ GST_ELEMENT_ERROR (reader, STREAM, FAILED, (NULL),
+ ("Failed to link proxy pad for stream part %s pad %" GST_PTR_FORMAT
+ " ret %d", reader->path, pad, link_ret));
+ return;
+ }
+ GST_DEBUG_OBJECT (reader,
+ "new decoded pad %" GST_PTR_FORMAT " linked to %" GST_PTR_FORMAT,
+ pad, proxy_pad);
+
+ SPLITMUX_PART_LOCK (reader);
+ reader->pads = g_list_prepend (reader->pads, proxy_pad);
+ SPLITMUX_PART_UNLOCK (reader);
+}
+
+static gboolean
+gst_splitmux_part_reader_send_event (GstElement * element, GstEvent * event)
+{
+ GstSplitMuxPartReader *reader = (GstSplitMuxPartReader *) element;
+ gboolean ret = FALSE;
+ GstPad *pad = NULL;
+
+ /* Send event to the first source pad we found */
+ SPLITMUX_PART_LOCK (reader);
+ if (reader->pads) {
+ GstPad *proxy_pad = GST_PAD_CAST (reader->pads->data);
+ pad = gst_pad_get_peer (proxy_pad);
+ }
+ SPLITMUX_PART_UNLOCK (reader);
+
+ if (pad) {
+ ret = gst_pad_send_event (pad, event);
+ gst_object_unref (pad);
+ } else {
+ gst_event_unref (event);
+ }
+
+ return ret;
+}
+
+/* Called with lock held. Seeks to an 'internal' time from 0 to length of this piece */
+static void
+gst_splitmux_part_reader_seek_to_time_locked (GstSplitMuxPartReader * reader,
+ GstClockTime time)
+{
+ SPLITMUX_PART_UNLOCK (reader);
+ GST_DEBUG_OBJECT (reader, "Seeking to time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (time));
+ gst_element_seek (GST_ELEMENT_CAST (reader), 1.0, GST_FORMAT_TIME,
+ GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE, GST_SEEK_TYPE_SET, time,
+ GST_SEEK_TYPE_END, 0);
+
+ SPLITMUX_PART_LOCK (reader);
+
+ /* Wait for flush to finish, so old data is gone */
+ while (reader->flushing) {
+ GST_LOG_OBJECT (reader, "%s Waiting for flush to finish", reader->path);
+ SPLITMUX_PART_WAIT (reader);
+ }
+}
+
+/* Map the passed segment to 'internal' time from 0 to length of this piece and seek. Lock cannot be held */
+static gboolean
+gst_splitmux_part_reader_seek_to_segment (GstSplitMuxPartReader * reader,
+ GstSegment * target_seg, GstSeekFlags extra_flags)
+{
+ GstSeekFlags flags;
+ GstClockTime start = 0, stop = GST_CLOCK_TIME_NONE;
+
+ flags = target_seg->flags | GST_SEEK_FLAG_FLUSH | extra_flags;
+
+ SPLITMUX_PART_LOCK (reader);
+ if (target_seg->start >= reader->start_offset)
+ start = target_seg->start - reader->start_offset;
+ /* If the segment stop is within this part, don't play to the end */
+ if (target_seg->stop != -1 &&
+ target_seg->stop < reader->start_offset + reader->duration)
+ stop = target_seg->stop - reader->start_offset;
+
+ SPLITMUX_PART_UNLOCK (reader);
+
+ GST_DEBUG_OBJECT (reader,
+ "Seeking rate %f format %d flags 0x%x start %" GST_TIME_FORMAT " stop %"
+ GST_TIME_FORMAT, target_seg->rate, target_seg->format, flags,
+ GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
+
+ return gst_element_seek (GST_ELEMENT_CAST (reader), target_seg->rate,
+ target_seg->format, flags, GST_SEEK_TYPE_SET, start, GST_SEEK_TYPE_SET,
+ stop);
+}
+
+/* Called with lock held */
+static void
+gst_splitmux_part_reader_measure_streams (GstSplitMuxPartReader * reader)
+{
+ SPLITMUX_PART_LOCK (reader);
+ /* Trigger a flushing seek to near the end of the file and run each stream
+ * to EOS in order to find the smallest end timestamp to start the next
+ * file from
+ */
+ if (GST_CLOCK_TIME_IS_VALID (reader->duration)
+ && reader->duration > GST_SECOND) {
+ GstClockTime seek_ts = reader->duration - (0.5 * GST_SECOND);
+ gst_splitmux_part_reader_seek_to_time_locked (reader, seek_ts);
+ }
+ SPLITMUX_PART_UNLOCK (reader);
+}
+
+static void
+gst_splitmux_part_reader_finish_measuring_streams (GstSplitMuxPartReader *
+ reader)
+{
+ SPLITMUX_PART_LOCK (reader);
+ if (reader->prep_state == PART_STATE_PREPARING_RESET_FOR_READY) {
+ /* Fire the prepared signal and go to READY state */
+ GST_DEBUG_OBJECT (reader,
+ "Stream measuring complete. File %s is now ready", reader->path);
+ reader->prep_state = PART_STATE_READY;
+ SPLITMUX_PART_UNLOCK (reader);
+ do_async_done (reader);
+ } else {
+ SPLITMUX_PART_UNLOCK (reader);
+ }
+}
+
+static GstElement *
+find_demuxer (GstCaps * caps)
+{
+ GList *factories =
+ gst_element_factory_list_get_elements (GST_ELEMENT_FACTORY_TYPE_DEMUXER,
+ GST_RANK_MARGINAL);
+ GList *compat_elements;
+ GstElement *e = NULL;
+
+ if (factories == NULL)
+ return NULL;
+
+ compat_elements =
+ gst_element_factory_list_filter (factories, caps, GST_PAD_SINK, TRUE);
+
+ if (compat_elements) {
+ /* Just take the first (highest ranked) option */
+ GstElementFactory *factory =
+ GST_ELEMENT_FACTORY_CAST (compat_elements->data);
+ e = gst_element_factory_create (factory, NULL);
+ gst_plugin_feature_list_free (compat_elements);
+ }
+
+ if (factories)
+ gst_plugin_feature_list_free (factories);
+
+ return e;
+}
+
+static void
+type_found (GstElement * typefind, guint probability,
+ GstCaps * caps, GstSplitMuxPartReader * reader)
+{
+ GstElement *demux;
+
+ GST_INFO_OBJECT (reader, "Got type %" GST_PTR_FORMAT, caps);
+
+ /* typefind found a type. Look for the demuxer to handle it */
+ demux = reader->demux = find_demuxer (caps);
+ if (reader->demux == NULL) {
+ GST_ERROR_OBJECT (reader, "Failed to create demuxer element");
+ return;
+ }
+
+ /* Connect to demux signals */
+ g_signal_connect (demux,
+ "pad-added", G_CALLBACK (new_decoded_pad_added_cb), reader);
+ g_signal_connect (demux, "no-more-pads", G_CALLBACK (no_more_pads), reader);
+
+ gst_element_set_locked_state (demux, TRUE);
+ gst_bin_add (GST_BIN_CAST (reader), demux);
+ gst_element_link_pads (reader->typefind, "src", demux, NULL);
+ gst_element_set_state (reader->demux, GST_STATE_TARGET (reader));
+ gst_element_set_locked_state (demux, FALSE);
+}
+
+static void
+check_if_pads_collected (GstSplitMuxPartReader * reader)
+{
+ if (reader->prep_state == PART_STATE_PREPARING_COLLECT_STREAMS) {
+ /* Check we have all pads and each pad has seen a buffer */
+ if (reader->no_more_pads && splitmux_part_is_prerolled_locked (reader)) {
+ GST_DEBUG_OBJECT (reader,
+ "no more pads - file %s. Measuring stream length", reader->path);
+ reader->prep_state = PART_STATE_PREPARING_MEASURE_STREAMS;
+ gst_element_call_async (GST_ELEMENT_CAST (reader),
+ (GstElementCallAsyncFunc) gst_splitmux_part_reader_measure_streams,
+ NULL, NULL);
+ }
+ }
+}
+
+static void
+no_more_pads (GstElement * element, GstSplitMuxPartReader * reader)
+{
+ GstClockTime duration = GST_CLOCK_TIME_NONE;
+ GList *cur;
+ /* Query the minimum duration of any pad in this piece and store it.
+ * FIXME: Only consider audio and video */
+ SPLITMUX_PART_LOCK (reader);
+ for (cur = g_list_first (reader->pads); cur != NULL; cur = g_list_next (cur)) {
+ GstPad *target = GST_PAD_CAST (cur->data);
+ if (target) {
+ gint64 cur_duration;
+ if (gst_pad_peer_query_duration (target, GST_FORMAT_TIME, &cur_duration)) {
+ GST_INFO_OBJECT (reader,
+ "file %s pad %" GST_PTR_FORMAT " duration %" GST_TIME_FORMAT,
+ reader->path, target, GST_TIME_ARGS (cur_duration));
+ if (cur_duration < duration)
+ duration = cur_duration;
+ }
+ }
+ }
+ GST_INFO_OBJECT (reader, "file %s duration %" GST_TIME_FORMAT,
+ reader->path, GST_TIME_ARGS (duration));
+ reader->duration = (GstClockTime) duration;
+
+ reader->no_more_pads = TRUE;
+
+ check_if_pads_collected (reader);
+ SPLITMUX_PART_UNLOCK (reader);
+}
+
+gboolean
+gst_splitmux_part_reader_src_query (GstSplitMuxPartReader * part,
+ GstPad * src_pad, GstQuery * query)
+{
+ GstPad *target = NULL;
+ gboolean ret;
+ GList *cur;
+
+ SPLITMUX_PART_LOCK (part);
+ /* Find the pad corresponding to the visible output target pad */
+ for (cur = g_list_first (part->pads); cur != NULL; cur = g_list_next (cur)) {
+ GstSplitMuxPartPad *part_pad = SPLITMUX_PART_PAD_CAST (cur->data);
+ if (part_pad->target == src_pad) {
+ target = gst_object_ref (GST_OBJECT_CAST (part_pad));
+ break;
+ }
+ }
+ SPLITMUX_PART_UNLOCK (part);
+
+ if (target == NULL)
+ return FALSE;
+
+ ret = gst_pad_peer_query (target, query);
+
+ if (ret == FALSE)
+ goto out;
+
+ /* Post-massaging of queries */
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:{
+ GstFormat fmt;
+ gint64 position;
+
+ gst_query_parse_position (query, &fmt, &position);
+ if (fmt != GST_FORMAT_TIME)
+ return FALSE;
+ SPLITMUX_PART_LOCK (part);
+ position += part->start_offset;
+ GST_LOG_OBJECT (part, "Position %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (position));
+ SPLITMUX_PART_UNLOCK (part);
+
+ gst_query_set_position (query, fmt, position);
+ break;
+ }
+ default:
+ break;
+ }
+
+out:
+ gst_object_unref (target);
+ return ret;
+}
+
+static GstStateChangeReturn
+gst_splitmux_part_reader_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstSplitMuxPartReader *reader = (GstSplitMuxPartReader *) element;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:{
+ break;
+ }
+ case GST_STATE_CHANGE_READY_TO_PAUSED:{
+ SPLITMUX_PART_LOCK (reader);
+ g_object_set (reader->src, "location", reader->path, NULL);
+ reader->prep_state = PART_STATE_PREPARING_COLLECT_STREAMS;
+ gst_splitmux_part_reader_set_flushing_locked (reader, FALSE);
+ reader->running = TRUE;
+ SPLITMUX_PART_UNLOCK (reader);
+
+ /* we go to PAUSED asynchronously once all streams have been collected
+ * and seeks to measure the stream lengths are done */
+ do_async_start (reader);
+ break;
+ }
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ SPLITMUX_PART_LOCK (reader);
+ gst_splitmux_part_reader_set_flushing_locked (reader, TRUE);
+ reader->running = FALSE;
+ SPLITMUX_PART_BROADCAST (reader);
+ SPLITMUX_PART_UNLOCK (reader);
+ break;
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ SPLITMUX_PART_LOCK (reader);
+ reader->active = FALSE;
+ gst_splitmux_part_reader_set_flushing_locked (reader, TRUE);
+ SPLITMUX_PART_BROADCAST (reader);
+ SPLITMUX_PART_UNLOCK (reader);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE) {
+ do_async_done (reader);
+ goto beach;
+ }
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ ret = GST_STATE_CHANGE_ASYNC;
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ do_async_done (reader);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ SPLITMUX_PART_LOCK (reader);
+ gst_splitmux_part_reader_set_flushing_locked (reader, FALSE);
+ reader->active = TRUE;
+ SPLITMUX_PART_BROADCAST (reader);
+ SPLITMUX_PART_UNLOCK (reader);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ reader->prep_state = PART_STATE_NULL;
+ splitmux_part_reader_reset (reader);
+ break;
+ default:
+ break;
+ }
+
+beach:
+ return ret;
+}
+
+gboolean
+gst_splitmux_part_reader_prepare (GstSplitMuxPartReader * part)
+{
+ GstStateChangeReturn ret;
+
+ ret = gst_element_set_state (GST_ELEMENT_CAST (part), GST_STATE_PAUSED);
+
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ return FALSE;
+
+ return TRUE;
+}
+
+void
+gst_splitmux_part_reader_unprepare (GstSplitMuxPartReader * part)
+{
+ gst_element_set_state (GST_ELEMENT_CAST (part), GST_STATE_NULL);
+}
+
+void
+gst_splitmux_part_reader_set_location (GstSplitMuxPartReader * reader,
+ const gchar * path)
+{
+ reader->path = g_strdup (path);
+}
+
+gboolean
+gst_splitmux_part_reader_activate (GstSplitMuxPartReader * reader,
+ GstSegment * seg, GstSeekFlags extra_flags)
+{
+ GST_DEBUG_OBJECT (reader, "Activating part reader");
+
+ if (!gst_splitmux_part_reader_seek_to_segment (reader, seg, extra_flags)) {
+ GST_ERROR_OBJECT (reader, "Failed to seek part to %" GST_SEGMENT_FORMAT,
+ seg);
+ return FALSE;
+ }
+ if (gst_element_set_state (GST_ELEMENT_CAST (reader),
+ GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
+ GST_ERROR_OBJECT (reader, "Failed to set state to PLAYING");
+ return FALSE;
+ }
+ return TRUE;
+}
+
+gboolean
+gst_splitmux_part_reader_is_active (GstSplitMuxPartReader * part)
+{
+ gboolean ret;
+
+ SPLITMUX_PART_LOCK (part);
+ ret = part->active;
+ SPLITMUX_PART_UNLOCK (part);
+
+ return ret;
+}
+
+void
+gst_splitmux_part_reader_deactivate (GstSplitMuxPartReader * reader)
+{
+ GST_DEBUG_OBJECT (reader, "Deactivating reader");
+ gst_element_set_state (GST_ELEMENT_CAST (reader), GST_STATE_PAUSED);
+}
+
+void
+gst_splitmux_part_reader_set_flushing_locked (GstSplitMuxPartReader * reader,
+ gboolean flushing)
+{
+ GList *cur;
+
+ GST_LOG_OBJECT (reader, "%s dataqueues",
+ flushing ? "Flushing" : "Done flushing");
+ for (cur = g_list_first (reader->pads); cur != NULL; cur = g_list_next (cur)) {
+ GstSplitMuxPartPad *part_pad = SPLITMUX_PART_PAD_CAST (cur->data);
+ gst_data_queue_set_flushing (part_pad->queue, flushing);
+ if (flushing)
+ gst_data_queue_flush (part_pad->queue);
+ }
+};
+
+void
+gst_splitmux_part_reader_set_callbacks (GstSplitMuxPartReader * reader,
+ gpointer cb_data, GstSplitMuxPartReaderPadCb get_pad_cb)
+{
+ reader->cb_data = cb_data;
+ reader->get_pad_cb = get_pad_cb;
+}
+
+GstClockTime
+gst_splitmux_part_reader_get_end_offset (GstSplitMuxPartReader * reader)
+{
+ GList *cur;
+ GstClockTime ret = GST_CLOCK_TIME_NONE;
+
+ SPLITMUX_PART_LOCK (reader);
+ for (cur = g_list_first (reader->pads); cur != NULL; cur = g_list_next (cur)) {
+ GstSplitMuxPartPad *part_pad = SPLITMUX_PART_PAD_CAST (cur->data);
+ if (!part_pad->is_sparse && part_pad->max_ts < ret)
+ ret = part_pad->max_ts;
+ }
+
+ SPLITMUX_PART_UNLOCK (reader);
+
+ return ret;
+}
+
+void
+gst_splitmux_part_reader_set_start_offset (GstSplitMuxPartReader * reader,
+ GstClockTime time_offset, GstClockTime ts_offset)
+{
+ SPLITMUX_PART_LOCK (reader);
+ reader->start_offset = time_offset;
+ reader->ts_offset = ts_offset;
+ GST_INFO_OBJECT (reader, "Time offset now %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (time_offset));
+ SPLITMUX_PART_UNLOCK (reader);
+}
+
+GstClockTime
+gst_splitmux_part_reader_get_start_offset (GstSplitMuxPartReader * reader)
+{
+ GstClockTime ret = GST_CLOCK_TIME_NONE;
+
+ SPLITMUX_PART_LOCK (reader);
+ ret = reader->start_offset;
+ SPLITMUX_PART_UNLOCK (reader);
+
+ return ret;
+}
+
+GstClockTime
+gst_splitmux_part_reader_get_duration (GstSplitMuxPartReader * reader)
+{
+ GstClockTime dur;
+
+ SPLITMUX_PART_LOCK (reader);
+ dur = reader->duration;
+ SPLITMUX_PART_UNLOCK (reader);
+
+ return dur;
+}
+
+GstPad *
+gst_splitmux_part_reader_lookup_pad (GstSplitMuxPartReader * reader,
+ GstPad * target)
+{
+ GstPad *result = NULL;
+ GList *cur;
+
+ SPLITMUX_PART_LOCK (reader);
+ for (cur = g_list_first (reader->pads); cur != NULL; cur = g_list_next (cur)) {
+ GstSplitMuxPartPad *part_pad = SPLITMUX_PART_PAD_CAST (cur->data);
+ if (part_pad->target == target) {
+ result = (GstPad *) gst_object_ref (part_pad);
+ break;
+ }
+ }
+ SPLITMUX_PART_UNLOCK (reader);
+
+ return result;
+}
+
+GstFlowReturn
+gst_splitmux_part_reader_pop (GstSplitMuxPartReader * reader, GstPad * pad,
+ GstDataQueueItem ** item)
+{
+ GstSplitMuxPartPad *part_pad = (GstSplitMuxPartPad *) (pad);
+ GstDataQueue *q;
+ GstFlowReturn ret;
+
+ /* Get one item from the appropriate dataqueue */
+ SPLITMUX_PART_LOCK (reader);
+ if (reader->prep_state == PART_STATE_FAILED) {
+ SPLITMUX_PART_UNLOCK (reader);
+ return GST_FLOW_ERROR;
+ }
+
+ q = gst_object_ref (part_pad->queue);
+
+ /* Have to drop the lock around pop, so we can be woken up for flush */
+ SPLITMUX_PART_UNLOCK (reader);
+ if (!gst_data_queue_pop (q, item) || (*item == NULL)) {
+ ret = GST_FLOW_FLUSHING;
+ goto out;
+ }
+
+ SPLITMUX_PART_LOCK (reader);
+
+ SPLITMUX_PART_BROADCAST (reader);
+ if (GST_IS_EVENT ((*item)->object)) {
+ GstEvent *e = (GstEvent *) ((*item)->object);
+ /* Mark this pad as EOS */
+ if (GST_EVENT_TYPE (e) == GST_EVENT_EOS)
+ part_pad->is_eos = TRUE;
+ }
+
+ SPLITMUX_PART_UNLOCK (reader);
+
+ ret = GST_FLOW_OK;
+out:
+ gst_object_unref (q);
+ return ret;
+}
+
+static void
+bus_handler (GstBin * bin, GstMessage * message)
+{
+ GstSplitMuxPartReader *reader = (GstSplitMuxPartReader *) bin;
+
+ switch (GST_MESSAGE_TYPE (message)) {
+ case GST_MESSAGE_ERROR:
+ /* Make sure to set the state to failed and wake up the listener
+ * on error */
+ SPLITMUX_PART_LOCK (reader);
+ GST_ERROR_OBJECT (reader, "Got error message from child %" GST_PTR_FORMAT
+ " marking this reader as failed", GST_MESSAGE_SRC (message));
+ reader->prep_state = PART_STATE_FAILED;
+ SPLITMUX_PART_BROADCAST (reader);
+ SPLITMUX_PART_UNLOCK (reader);
+ do_async_done (reader);
+ break;
+ default:
+ break;
+ }
+
+ GST_BIN_CLASS (parent_class)->handle_message (bin, message);
+}
diff --git a/gst/multifile/gstsplitmuxpartreader.h b/gst/multifile/gstsplitmuxpartreader.h
new file mode 100644
index 0000000000..78ecc6eb5a
--- /dev/null
+++ b/gst/multifile/gstsplitmuxpartreader.h
@@ -0,0 +1,122 @@
+/* GStreamer Split Muxed File Source - Part reader
+ * Copyright (C) 2014 Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifndef __GST_SPLITMUX_PART_READER_H__
+#define __GST_SPLITMUX_PART_READER_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstdataqueue.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_SPLITMUX_PART_READER \
+ (gst_splitmux_part_reader_get_type())
+#define GST_SPLITMUX_PART_READER(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_SPLITMUX_PART_READER,GstSplitMuxSrc))
+#define GST_SPLITMUX_PART_READER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_SPLITMUX_PART_READER,GstSplitMuxSrcClass))
+#define GST_IS_SPLITMUX_PART_READER(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_SPLITMUX_PART_READER))
+#define GST_IS_SPLITMUX_PART_READER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_SPLITMUX_PART_READER))
+
+typedef struct _GstSplitMuxPartReader GstSplitMuxPartReader;
+typedef struct _GstSplitMuxPartReaderClass GstSplitMuxPartReaderClass;
+typedef struct _SplitMuxSrcPad SplitMuxSrcPad;
+typedef struct _SplitMuxSrcPadClass SplitMuxSrcPadClass;
+
+typedef enum
+{
+ PART_STATE_NULL,
+ PART_STATE_PREPARING_COLLECT_STREAMS,
+ PART_STATE_PREPARING_MEASURE_STREAMS,
+ PART_STATE_PREPARING_RESET_FOR_READY,
+ PART_STATE_READY,
+ PART_STATE_FAILED,
+} GstSplitMuxPartState;
+
+typedef GstPad *(*GstSplitMuxPartReaderPadCb)(GstSplitMuxPartReader *reader, GstPad *src_pad, gpointer cb_data);
+
+struct _GstSplitMuxPartReader
+{
+ GstPipeline parent;
+
+ GstSplitMuxPartState prep_state;
+
+ gchar *path;
+
+ GstElement *src;
+ GstElement *typefind;
+ GstElement *demux;
+
+ gboolean async_pending;
+ gboolean active;
+ gboolean running;
+ gboolean prepared;
+ gboolean flushing;
+ gboolean no_more_pads;
+
+ GstClockTime duration;
+ GstClockTime start_offset;
+ GstClockTime ts_offset;
+
+ GList *pads;
+
+ GCond inactive_cond;
+ GMutex lock;
+ GMutex type_lock;
+ GMutex msg_lock;
+
+ GstSplitMuxPartReaderPadCb get_pad_cb;
+ gpointer cb_data;
+};
+
+struct _GstSplitMuxPartReaderClass
+{
+ GstPipelineClass parent_class;
+
+ void (*prepared) (GstSplitMuxPartReader *reader);
+ void (*end_of_part) (GstSplitMuxPartReader *reader);
+};
+
+GType gst_splitmux_part_reader_get_type (void);
+
+void gst_splitmux_part_reader_set_callbacks (GstSplitMuxPartReader *reader,
+ gpointer cb_data, GstSplitMuxPartReaderPadCb get_pad_cb);
+gboolean gst_splitmux_part_reader_prepare (GstSplitMuxPartReader *part);
+void gst_splitmux_part_reader_unprepare (GstSplitMuxPartReader *part);
+void gst_splitmux_part_reader_set_location (GstSplitMuxPartReader *reader,
+ const gchar *path);
+gboolean gst_splitmux_part_is_eos (GstSplitMuxPartReader *reader);
+
+gboolean gst_splitmux_part_reader_activate (GstSplitMuxPartReader *part, GstSegment *seg, GstSeekFlags extra_flags);
+void gst_splitmux_part_reader_deactivate (GstSplitMuxPartReader *part);
+gboolean gst_splitmux_part_reader_is_active (GstSplitMuxPartReader *part);
+
+gboolean gst_splitmux_part_reader_src_query (GstSplitMuxPartReader *part, GstPad *src_pad, GstQuery * query);
+void gst_splitmux_part_reader_set_start_offset (GstSplitMuxPartReader *part, GstClockTime time_offset, GstClockTime ts_offset);
+GstClockTime gst_splitmux_part_reader_get_start_offset (GstSplitMuxPartReader *part);
+GstClockTime gst_splitmux_part_reader_get_end_offset (GstSplitMuxPartReader *part);
+GstClockTime gst_splitmux_part_reader_get_duration (GstSplitMuxPartReader * reader);
+
+GstPad *gst_splitmux_part_reader_lookup_pad (GstSplitMuxPartReader *reader, GstPad *target);
+GstFlowReturn gst_splitmux_part_reader_pop (GstSplitMuxPartReader *reader, GstPad *part_pad, GstDataQueueItem ** item);
+
+G_END_DECLS
+
+#endif
diff --git a/gst/multifile/gstsplitmuxsink.c b/gst/multifile/gstsplitmuxsink.c
new file mode 100644
index 0000000000..0d4a6a3f79
--- /dev/null
+++ b/gst/multifile/gstsplitmuxsink.c
@@ -0,0 +1,3819 @@
+/* GStreamer Muxer bin that splits output stream by size/time
+ * Copyright (C) <2014-2019> Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-splitmuxsink
+ * @title: splitmuxsink
+ * @short_description: Muxer wrapper for splitting output stream by size or time
+ *
+ * This element wraps a muxer and a sink, and starts a new file when the mux
+ * contents are about to cross a threshold of maximum size of maximum time,
+ * splitting at video keyframe boundaries. Exactly one input video stream
+ * can be muxed, with as many accompanying audio and subtitle streams as
+ * desired.
+ *
+ * By default, it uses mp4mux and filesink, but they can be changed via
+ * the 'muxer' and 'sink' properties.
+ *
+ * The minimum file size is 1 GOP, however - so limits may be overrun if the
+ * distance between any 2 keyframes is larger than the limits.
+ *
+ * If a video stream is available, the splitting process is driven by the video
+ * stream contents, and the video stream must contain closed GOPs for the output
+ * file parts to be played individually correctly. In the absence of a video
+ * stream, the first available stream is used as reference for synchronization.
+ *
+ * In the async-finalize mode, when the threshold is crossed, the old muxer
+ * and sink is disconnected from the pipeline and left to finish the file
+ * asynchronously, and a new muxer and sink is created to continue with the
+ * next fragment. For that reason, instead of muxer and sink objects, the
+ * muxer-factory and sink-factory properties are used to construct the new
+ * objects, together with muxer-properties and sink-properties.
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 -e v4l2src num-buffers=500 ! video/x-raw,width=320,height=240 ! videoconvert ! queue ! timeoverlay ! x264enc key-int-max=10 ! h264parse ! splitmuxsink location=video%02d.mov max-size-time=10000000000 max-size-bytes=1000000
+ * ]|
+ * Records a video stream captured from a v4l2 device and muxes it into
+ * ISO mp4 files, splitting as needed to limit size/duration to 10 seconds
+ * and 1MB maximum size.
+ *
+ * |[
+ * gst-launch-1.0 -e v4l2src num-buffers=500 ! video/x-raw,width=320,height=240 ! videoconvert ! queue ! timeoverlay ! x264enc key-int-max=10 ! h264parse ! splitmuxsink location=video%02d.mkv max-size-time=10000000000 muxer-factory=matroskamux muxer-properties="properties,streamable=true"
+ * ]|
+ * Records a video stream captured from a v4l2 device and muxer it into
+ * streamable Matroska files, splitting as needed to limit size/duration to 10
+ * seconds. Each file will finalize asynchronously.
+ *
+ * |[
+ * gst-launch-1.0 videotestsrc num-buffers=10 ! jpegenc ! .video splitmuxsink muxer=qtmux muxer-pad-map=x-pad-map,video=video_1 location=test%05d.mp4 -v
+ * ]|
+ * Records 10 frames to an mp4 file, using a muxer-pad-map to make explicit mappings between the splitmuxsink sink pad and the corresponding muxer pad
+ * it will deliver to.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <glib/gstdio.h>
+#include <gst/video/video.h>
+#include "gstsplitmuxsink.h"
+
+GST_DEBUG_CATEGORY_STATIC (splitmux_debug);
+#define GST_CAT_DEFAULT splitmux_debug
+
+#define GST_SPLITMUX_STATE_LOCK(s) g_mutex_lock(&(s)->state_lock)
+#define GST_SPLITMUX_STATE_UNLOCK(s) g_mutex_unlock(&(s)->state_lock)
+
+#define GST_SPLITMUX_LOCK(s) g_mutex_lock(&(s)->lock)
+#define GST_SPLITMUX_UNLOCK(s) g_mutex_unlock(&(s)->lock)
+#define GST_SPLITMUX_WAIT_INPUT(s) g_cond_wait (&(s)->input_cond, &(s)->lock)
+#define GST_SPLITMUX_BROADCAST_INPUT(s) g_cond_broadcast (&(s)->input_cond)
+
+#define GST_SPLITMUX_WAIT_OUTPUT(s) g_cond_wait (&(s)->output_cond, &(s)->lock)
+#define GST_SPLITMUX_BROADCAST_OUTPUT(s) g_cond_broadcast (&(s)->output_cond)
+
+static void split_now (GstSplitMuxSink * splitmux);
+static void split_after (GstSplitMuxSink * splitmux);
+static void split_at_running_time (GstSplitMuxSink * splitmux,
+ GstClockTime split_time);
+
+enum
+{
+ PROP_0,
+ PROP_LOCATION,
+ PROP_START_INDEX,
+ PROP_MAX_SIZE_TIME,
+ PROP_MAX_SIZE_BYTES,
+ PROP_MAX_SIZE_TIMECODE,
+ PROP_SEND_KEYFRAME_REQUESTS,
+ PROP_MAX_FILES,
+ PROP_MUXER_OVERHEAD,
+ PROP_USE_ROBUST_MUXING,
+ PROP_ALIGNMENT_THRESHOLD,
+ PROP_MUXER,
+ PROP_SINK,
+ PROP_RESET_MUXER,
+ PROP_ASYNC_FINALIZE,
+ PROP_MUXER_FACTORY,
+ PROP_MUXER_PRESET,
+ PROP_MUXER_PROPERTIES,
+ PROP_SINK_FACTORY,
+ PROP_SINK_PRESET,
+ PROP_SINK_PROPERTIES,
+ PROP_MUXERPAD_MAP
+};
+
+#define DEFAULT_MAX_SIZE_TIME 0
+#define DEFAULT_MAX_SIZE_BYTES 0
+#define DEFAULT_MAX_FILES 0
+#define DEFAULT_MUXER_OVERHEAD 0.02
+#define DEFAULT_SEND_KEYFRAME_REQUESTS FALSE
+#define DEFAULT_ALIGNMENT_THRESHOLD 0
+#define DEFAULT_MUXER "mp4mux"
+#define DEFAULT_SINK "filesink"
+#define DEFAULT_USE_ROBUST_MUXING FALSE
+#define DEFAULT_RESET_MUXER TRUE
+#define DEFAULT_ASYNC_FINALIZE FALSE
+#define DEFAULT_START_INDEX 0
+
+typedef struct _AsyncEosHelper
+{
+ MqStreamCtx *ctx;
+ GstPad *pad;
+} AsyncEosHelper;
+
+enum
+{
+ SIGNAL_FORMAT_LOCATION,
+ SIGNAL_FORMAT_LOCATION_FULL,
+ SIGNAL_SPLIT_NOW,
+ SIGNAL_SPLIT_AFTER,
+ SIGNAL_SPLIT_AT_RUNNING_TIME,
+ SIGNAL_MUXER_ADDED,
+ SIGNAL_SINK_ADDED,
+ SIGNAL_LAST
+};
+
+static guint signals[SIGNAL_LAST];
+
+static GstStaticPadTemplate video_sink_template =
+GST_STATIC_PAD_TEMPLATE ("video",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS_ANY);
+static GstStaticPadTemplate video_aux_sink_template =
+GST_STATIC_PAD_TEMPLATE ("video_aux_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS_ANY);
+static GstStaticPadTemplate audio_sink_template =
+GST_STATIC_PAD_TEMPLATE ("audio_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS_ANY);
+static GstStaticPadTemplate subtitle_sink_template =
+GST_STATIC_PAD_TEMPLATE ("subtitle_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS_ANY);
+static GstStaticPadTemplate caption_sink_template =
+GST_STATIC_PAD_TEMPLATE ("caption_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS_ANY);
+
+static GQuark PAD_CONTEXT;
+static GQuark EOS_FROM_US;
+static GQuark RUNNING_TIME;
+/* EOS_FROM_US is only valid in async-finalize mode. We need to know whether
+ * to forward an incoming EOS message, but we cannot rely on the state of the
+ * splitmux anymore, so we set this qdata on the sink instead.
+ * The muxer and sink must be destroyed after both of these things have
+ * finished:
+ * 1) The EOS message has been sent when the fragment is ending
+ * 2) The muxer has been unlinked and relinked
+ * Therefore, EOS_FROM_US can have these two values:
+ * 0: EOS was not requested from us. Forward the message. The muxer and the
+ * sink will be destroyed together with the rest of the bin.
+ * 1: EOS was requested from us, but the other of the two tasks hasn't
+ * finished. Set EOS_FROM_US to 2 and do your stuff.
+ * 2: EOS was requested from us and the other of the two tasks has finished.
+ * Now we can destroy the muxer and the sink.
+ */
+
+static void
+_do_init (void)
+{
+ PAD_CONTEXT = g_quark_from_static_string ("pad-context");
+ EOS_FROM_US = g_quark_from_static_string ("eos-from-us");
+ RUNNING_TIME = g_quark_from_static_string ("running-time");
+ GST_DEBUG_CATEGORY_INIT (splitmux_debug, "splitmuxsink", 0,
+ "Split File Muxing Sink");
+}
+
+#define gst_splitmux_sink_parent_class parent_class
+G_DEFINE_TYPE_EXTENDED (GstSplitMuxSink, gst_splitmux_sink, GST_TYPE_BIN, 0,
+ _do_init ());
+GST_ELEMENT_REGISTER_DEFINE (splitmuxsink, "splitmuxsink", GST_RANK_NONE,
+ GST_TYPE_SPLITMUX_SINK);
+
+static gboolean create_muxer (GstSplitMuxSink * splitmux);
+static gboolean create_sink (GstSplitMuxSink * splitmux);
+static void gst_splitmux_sink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_splitmux_sink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_splitmux_sink_dispose (GObject * object);
+static void gst_splitmux_sink_finalize (GObject * object);
+
+static GstPad *gst_splitmux_sink_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
+static void gst_splitmux_sink_release_pad (GstElement * element, GstPad * pad);
+
+static GstStateChangeReturn gst_splitmux_sink_change_state (GstElement *
+ element, GstStateChange transition);
+
+static void bus_handler (GstBin * bin, GstMessage * msg);
+static void set_next_filename (GstSplitMuxSink * splitmux, MqStreamCtx * ctx);
+static GstFlowReturn start_next_fragment (GstSplitMuxSink * splitmux,
+ MqStreamCtx * ctx);
+static void mq_stream_ctx_free (MqStreamCtx * ctx);
+static void grow_blocked_queues (GstSplitMuxSink * splitmux);
+
+static void gst_splitmux_sink_ensure_max_files (GstSplitMuxSink * splitmux);
+static GstElement *create_element (GstSplitMuxSink * splitmux,
+ const gchar * factory, const gchar * name, gboolean locked);
+
+static void do_async_done (GstSplitMuxSink * splitmux);
+static void gst_splitmux_reset_timecode (GstSplitMuxSink * splitmux);
+
+static MqStreamBuf *
+mq_stream_buf_new (void)
+{
+ return g_slice_new0 (MqStreamBuf);
+}
+
+static void
+mq_stream_buf_free (MqStreamBuf * data)
+{
+ g_slice_free (MqStreamBuf, data);
+}
+
+static SplitMuxOutputCommand *
+out_cmd_buf_new (void)
+{
+ return g_slice_new0 (SplitMuxOutputCommand);
+}
+
+static void
+out_cmd_buf_free (SplitMuxOutputCommand * data)
+{
+ g_slice_free (SplitMuxOutputCommand, data);
+}
+
+static void
+gst_splitmux_sink_class_init (GstSplitMuxSinkClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBinClass *gstbin_class = (GstBinClass *) klass;
+
+ gobject_class->set_property = gst_splitmux_sink_set_property;
+ gobject_class->get_property = gst_splitmux_sink_get_property;
+ gobject_class->dispose = gst_splitmux_sink_dispose;
+ gobject_class->finalize = gst_splitmux_sink_finalize;
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Split Muxing Bin", "Generic/Bin/Muxer",
+ "Convenience bin that muxes incoming streams into multiple time/size limited files",
+ "Jan Schmidt <jan@centricular.com>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &video_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &video_aux_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &audio_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &subtitle_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &caption_sink_template);
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_splitmux_sink_change_state);
+ gstelement_class->request_new_pad =
+ GST_DEBUG_FUNCPTR (gst_splitmux_sink_request_new_pad);
+ gstelement_class->release_pad =
+ GST_DEBUG_FUNCPTR (gst_splitmux_sink_release_pad);
+
+ gstbin_class->handle_message = bus_handler;
+
+ g_object_class_install_property (gobject_class, PROP_LOCATION,
+ g_param_spec_string ("location", "File Output Pattern",
+ "Format string pattern for the location of the files to write (e.g. video%05d.mp4)",
+ NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MUXER_OVERHEAD,
+ g_param_spec_double ("mux-overhead", "Muxing Overhead",
+ "Extra size overhead of muxing (0.02 = 2%)", 0.0, 1.0,
+ DEFAULT_MUXER_OVERHEAD,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_SIZE_TIME,
+ g_param_spec_uint64 ("max-size-time", "Max. size (ns)",
+ "Max. amount of time per file (in ns, 0=disable)", 0, G_MAXUINT64,
+ DEFAULT_MAX_SIZE_TIME,
+ G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
+ G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MAX_SIZE_BYTES,
+ g_param_spec_uint64 ("max-size-bytes", "Max. size bytes",
+ "Max. amount of data per file (in bytes, 0=disable)", 0, G_MAXUINT64,
+ DEFAULT_MAX_SIZE_BYTES,
+ G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
+ G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MAX_SIZE_TIMECODE,
+ g_param_spec_string ("max-size-timecode", "Maximum timecode difference",
+ "Maximum difference in timecode between first and last frame. "
+ "Separator is assumed to be \":\" everywhere (e.g. 01:00:00:00). "
+ "Will only be effective if a timecode track is present.", NULL,
+ G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
+ G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_SEND_KEYFRAME_REQUESTS,
+ g_param_spec_boolean ("send-keyframe-requests",
+ "Request keyframes at max-size-time",
+ "Request a keyframe every max-size-time ns to try splitting at that point. "
+ "Needs max-size-bytes to be 0 in order to be effective.",
+ DEFAULT_SEND_KEYFRAME_REQUESTS,
+ G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
+ G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MAX_FILES,
+ g_param_spec_uint ("max-files", "Max files",
+ "Maximum number of files to keep on disk. Once the maximum is reached,"
+ "old files start to be deleted to make room for new ones.", 0,
+ G_MAXUINT, DEFAULT_MAX_FILES,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_ALIGNMENT_THRESHOLD,
+ g_param_spec_uint64 ("alignment-threshold", "Alignment threshold (ns)",
+ "Allow non-reference streams to be that many ns before the reference"
+ " stream", 0, G_MAXUINT64, DEFAULT_ALIGNMENT_THRESHOLD,
+ G_PARAM_READWRITE | GST_PARAM_MUTABLE_READY |
+ G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MUXER,
+ g_param_spec_object ("muxer", "Muxer",
+ "The muxer element to use (NULL = default mp4mux). "
+ "Valid only for async-finalize = FALSE",
+ GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_SINK,
+ g_param_spec_object ("sink", "Sink",
+ "The sink element (or element chain) to use (NULL = default filesink). "
+ "Valid only for async-finalize = FALSE",
+ GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_USE_ROBUST_MUXING,
+ g_param_spec_boolean ("use-robust-muxing",
+ "Support robust-muxing mode of some muxers",
+ "Check if muxers support robust muxing via the reserved-max-duration and "
+ "reserved-duration-remaining properties and use them if so. "
+ "(Only present on qtmux and mp4mux for now). splitmuxsink may then also "
+ " create new fragments if the reserved header space is about to overflow. "
+ "Note that for mp4mux and qtmux, reserved-moov-update-period must be set "
+ "manually by the app to a non-zero value for robust muxing to have an effect.",
+ DEFAULT_USE_ROBUST_MUXING,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RESET_MUXER,
+ g_param_spec_boolean ("reset-muxer",
+ "Reset Muxer",
+ "Reset the muxer after each segment. Disabling this will not work for most muxers.",
+ DEFAULT_RESET_MUXER, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_ASYNC_FINALIZE,
+ g_param_spec_boolean ("async-finalize",
+ "Finalize fragments asynchronously",
+ "Finalize each fragment asynchronously and start a new one",
+ DEFAULT_ASYNC_FINALIZE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MUXER_FACTORY,
+ g_param_spec_string ("muxer-factory", "Muxer factory",
+ "The muxer element factory to use (default = mp4mux). "
+ "Valid only for async-finalize = TRUE",
+ "mp4mux", G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstSplitMuxSink:muxer-preset
+ *
+ * An optional #GstPreset name to use for the muxer. This only has an effect
+ * in `async-finalize=TRUE` mode.
+ *
+ * Since: 1.18
+ */
+ g_object_class_install_property (gobject_class, PROP_MUXER_PRESET,
+ g_param_spec_string ("muxer-preset", "Muxer preset",
+ "The muxer preset to use. "
+ "Valid only for async-finalize = TRUE",
+ NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MUXER_PROPERTIES,
+ g_param_spec_boxed ("muxer-properties", "Muxer properties",
+ "The muxer element properties to use. "
+ "Example: {properties,boolean-prop=true,string-prop=\"hi\"}. "
+ "Valid only for async-finalize = TRUE",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_SINK_FACTORY,
+ g_param_spec_string ("sink-factory", "Sink factory",
+ "The sink element factory to use (default = filesink). "
+ "Valid only for async-finalize = TRUE",
+ "filesink", G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstSplitMuxSink:sink-preset
+ *
+ * An optional #GstPreset name to use for the sink. This only has an effect
+ * in `async-finalize=TRUE` mode.
+ *
+ * Since: 1.18
+ */
+ g_object_class_install_property (gobject_class, PROP_SINK_PRESET,
+ g_param_spec_string ("sink-preset", "Sink preset",
+ "The sink preset to use. "
+ "Valid only for async-finalize = TRUE",
+ NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_SINK_PROPERTIES,
+ g_param_spec_boxed ("sink-properties", "Sink properties",
+ "The sink element properties to use. "
+ "Example: {properties,boolean-prop=true,string-prop=\"hi\"}. "
+ "Valid only for async-finalize = TRUE",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_START_INDEX,
+ g_param_spec_int ("start-index", "Start Index",
+ "Start value of fragment index.",
+ 0, G_MAXINT, DEFAULT_START_INDEX,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstSplitMuxSink::muxer-pad-map
+ *
+ * An optional GstStructure that provides a map from splitmuxsink sinkpad
+ * names to muxer pad names they should feed. Splitmuxsink has some default
+ * mapping behaviour to link video to video pads and audio to audio pads
+ * that usually works fine. This property is useful if you need to ensure
+ * a particular mapping to muxed streams.
+ *
+ * The GstStructure contains string fields like so:
+ * splitmuxsink muxer-pad-map=x-pad-map,video=video_1
+ *
+ * Since: 1.18
+ */
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_MUXERPAD_MAP,
+ g_param_spec_boxed ("muxer-pad-map", "Muxer pad map",
+ "A GstStructure specifies the mapping from splitmuxsink sink pads to muxer pads",
+ GST_TYPE_STRUCTURE,
+ (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
+
+ /**
+ * GstSplitMuxSink::format-location:
+ * @splitmux: the #GstSplitMuxSink
+ * @fragment_id: the sequence number of the file to be created
+ *
+ * Returns: the location to be used for the next output file. This must be
+ * a newly-allocated string which will be freed with g_free() by the
+ * splitmuxsink element when it no longer needs it, so use g_strdup() or
+ * g_strdup_printf() or similar functions to allocate it.
+ */
+ signals[SIGNAL_FORMAT_LOCATION] =
+ g_signal_new ("format-location", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_STRING, 1, G_TYPE_UINT);
+
+ /**
+ * GstSplitMuxSink::format-location-full:
+ * @splitmux: the #GstSplitMuxSink
+ * @fragment_id: the sequence number of the file to be created
+ * @first_sample: A #GstSample containing the first buffer
+ * from the reference stream in the new file
+ *
+ * Returns: the location to be used for the next output file. This must be
+ * a newly-allocated string which will be freed with g_free() by the
+ * splitmuxsink element when it no longer needs it, so use g_strdup() or
+ * g_strdup_printf() or similar functions to allocate it.
+ *
+ * Since: 1.12
+ */
+ signals[SIGNAL_FORMAT_LOCATION_FULL] =
+ g_signal_new ("format-location-full", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_STRING, 2, G_TYPE_UINT,
+ GST_TYPE_SAMPLE);
+
+ /**
+ * GstSplitMuxSink::split-now:
+ * @splitmux: the #GstSplitMuxSink
+ *
+ * When called by the user, this action signal splits the video file (and begins a new one) immediately.
+ * The current GOP will be output to the new file.
+ *
+ * Since: 1.14
+ */
+ signals[SIGNAL_SPLIT_NOW] =
+ g_signal_new ("split-now", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_STRUCT_OFFSET (GstSplitMuxSinkClass, split_now), NULL, NULL, NULL,
+ G_TYPE_NONE, 0);
+
+ /**
+ * GstSplitMuxSink::split-after:
+ * @splitmux: the #GstSplitMuxSink
+ *
+ * When called by the user, this action signal splits the video file (and begins a new one) immediately.
+ * Unlike the 'split-now' signal, with 'split-after', the current GOP will be output to the old file.
+ *
+ * Since: 1.16
+ */
+ signals[SIGNAL_SPLIT_AFTER] =
+ g_signal_new ("split-after", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_STRUCT_OFFSET (GstSplitMuxSinkClass, split_after), NULL, NULL, NULL,
+ G_TYPE_NONE, 0);
+
+ /**
+ * GstSplitMuxSink::split-at-running-time:
+ * @splitmux: the #GstSplitMuxSink
+ *
+ * When called by the user, this action signal splits the video file (and
+ * begins a new one) as soon as the given running time is reached. If this
+ * action signal is called multiple times, running times are queued up and
+ * processed in the order they were given.
+ *
+ * Note that this is prone to race conditions, where said running time is
+ * reached and surpassed before we had a chance to split. The file will
+ * still split immediately, but in order to make sure that the split doesn't
+ * happen too late, it is recommended to call this action signal from
+ * something that will prevent further buffers from flowing into
+ * splitmuxsink before the split is completed, such as a pad probe before
+ * splitmuxsink.
+ *
+ *
+ * Since: 1.16
+ */
+ signals[SIGNAL_SPLIT_AT_RUNNING_TIME] =
+ g_signal_new ("split-at-running-time", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_STRUCT_OFFSET (GstSplitMuxSinkClass, split_at_running_time), NULL, NULL,
+ NULL, G_TYPE_NONE, 1, G_TYPE_UINT64);
+
+ /**
+ * GstSplitMuxSink::muxer-added:
+ * @splitmux: the #GstSplitMuxSink
+ * @muxer: the newly added muxer element
+ *
+ * Since: 1.14
+ */
+ signals[SIGNAL_MUXER_ADDED] =
+ g_signal_new ("muxer-added", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 1, GST_TYPE_ELEMENT);
+
+ /**
+ * GstSplitMuxSink::sink-added:
+ * @splitmux: the #GstSplitMuxSink
+ * @sink: the newly added sink element
+ *
+ * Since: 1.14
+ */
+ signals[SIGNAL_SINK_ADDED] =
+ g_signal_new ("sink-added", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 1, GST_TYPE_ELEMENT);
+
+ klass->split_now = split_now;
+ klass->split_after = split_after;
+ klass->split_at_running_time = split_at_running_time;
+}
+
+static void
+gst_splitmux_sink_init (GstSplitMuxSink * splitmux)
+{
+ g_mutex_init (&splitmux->lock);
+ g_mutex_init (&splitmux->state_lock);
+ g_cond_init (&splitmux->input_cond);
+ g_cond_init (&splitmux->output_cond);
+ g_queue_init (&splitmux->out_cmd_q);
+
+ splitmux->mux_overhead = DEFAULT_MUXER_OVERHEAD;
+ splitmux->threshold_time = DEFAULT_MAX_SIZE_TIME;
+ splitmux->threshold_bytes = DEFAULT_MAX_SIZE_BYTES;
+ splitmux->max_files = DEFAULT_MAX_FILES;
+ splitmux->send_keyframe_requests = DEFAULT_SEND_KEYFRAME_REQUESTS;
+ splitmux->alignment_threshold = DEFAULT_ALIGNMENT_THRESHOLD;
+ splitmux->use_robust_muxing = DEFAULT_USE_ROBUST_MUXING;
+ splitmux->reset_muxer = DEFAULT_RESET_MUXER;
+
+ splitmux->threshold_timecode_str = NULL;
+ gst_splitmux_reset_timecode (splitmux);
+
+ splitmux->async_finalize = DEFAULT_ASYNC_FINALIZE;
+ splitmux->muxer_factory = g_strdup (DEFAULT_MUXER);
+ splitmux->muxer_properties = NULL;
+ splitmux->sink_factory = g_strdup (DEFAULT_SINK);
+ splitmux->sink_properties = NULL;
+
+ GST_OBJECT_FLAG_SET (splitmux, GST_ELEMENT_FLAG_SINK);
+ splitmux->split_requested = FALSE;
+ splitmux->do_split_next_gop = FALSE;
+ splitmux->times_to_split = gst_queue_array_new_for_struct (8, 8);
+ splitmux->next_fku_time = GST_CLOCK_TIME_NONE;
+}
+
+static void
+gst_splitmux_reset_elements (GstSplitMuxSink * splitmux)
+{
+ if (splitmux->muxer) {
+ gst_element_set_locked_state (splitmux->muxer, TRUE);
+ gst_element_set_state (splitmux->muxer, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN (splitmux), splitmux->muxer);
+ }
+ if (splitmux->active_sink) {
+ gst_element_set_locked_state (splitmux->active_sink, TRUE);
+ gst_element_set_state (splitmux->active_sink, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN (splitmux), splitmux->active_sink);
+ }
+
+ splitmux->sink = splitmux->active_sink = splitmux->muxer = NULL;
+}
+
+static void
+gst_splitmux_reset_timecode (GstSplitMuxSink * splitmux)
+{
+ g_clear_pointer (&splitmux->in_tc, gst_video_time_code_free);
+ g_clear_pointer (&splitmux->fragment_start_tc, gst_video_time_code_free);
+ g_clear_pointer (&splitmux->gop_start_tc, gst_video_time_code_free);
+ splitmux->next_fragment_start_tc_time = GST_CLOCK_TIME_NONE;
+}
+
+static void
+gst_splitmux_sink_dispose (GObject * object)
+{
+ GstSplitMuxSink *splitmux = GST_SPLITMUX_SINK (object);
+
+ /* Calling parent dispose invalidates all child pointers */
+ splitmux->sink = splitmux->active_sink = splitmux->muxer = NULL;
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static void
+gst_splitmux_sink_finalize (GObject * object)
+{
+ GstSplitMuxSink *splitmux = GST_SPLITMUX_SINK (object);
+ g_cond_clear (&splitmux->input_cond);
+ g_cond_clear (&splitmux->output_cond);
+ g_mutex_clear (&splitmux->lock);
+ g_mutex_clear (&splitmux->state_lock);
+ g_queue_foreach (&splitmux->out_cmd_q, (GFunc) out_cmd_buf_free, NULL);
+ g_queue_clear (&splitmux->out_cmd_q);
+
+ if (splitmux->muxerpad_map)
+ gst_structure_free (splitmux->muxerpad_map);
+
+ if (splitmux->provided_sink)
+ gst_object_unref (splitmux->provided_sink);
+ if (splitmux->provided_muxer)
+ gst_object_unref (splitmux->provided_muxer);
+
+ if (splitmux->muxer_factory)
+ g_free (splitmux->muxer_factory);
+ if (splitmux->muxer_preset)
+ g_free (splitmux->muxer_preset);
+ if (splitmux->muxer_properties)
+ gst_structure_free (splitmux->muxer_properties);
+ if (splitmux->sink_factory)
+ g_free (splitmux->sink_factory);
+ if (splitmux->sink_preset)
+ g_free (splitmux->sink_preset);
+ if (splitmux->sink_properties)
+ gst_structure_free (splitmux->sink_properties);
+
+ if (splitmux->threshold_timecode_str)
+ g_free (splitmux->threshold_timecode_str);
+ if (splitmux->tc_interval)
+ gst_video_time_code_interval_free (splitmux->tc_interval);
+
+ if (splitmux->times_to_split)
+ gst_queue_array_free (splitmux->times_to_split);
+
+ g_free (splitmux->location);
+
+ /* Make sure to free any un-released contexts. There should not be any,
+ * because the dispose will have freed all request pads though */
+ g_list_foreach (splitmux->contexts, (GFunc) mq_stream_ctx_free, NULL);
+ g_list_free (splitmux->contexts);
+ gst_splitmux_reset_timecode (splitmux);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+/*
+ * Set any time threshold to the muxer, if it has
+ * reserved-max-duration and reserved-duration-remaining
+ * properties. Called when creating/claiming the muxer
+ * in create_elements() */
+static void
+update_muxer_properties (GstSplitMuxSink * sink)
+{
+ GObjectClass *klass;
+ GstClockTime threshold_time;
+
+ sink->muxer_has_reserved_props = FALSE;
+ if (sink->muxer == NULL)
+ return;
+ klass = G_OBJECT_GET_CLASS (sink->muxer);
+ if (g_object_class_find_property (klass, "reserved-max-duration") == NULL)
+ return;
+ if (g_object_class_find_property (klass,
+ "reserved-duration-remaining") == NULL)
+ return;
+ sink->muxer_has_reserved_props = TRUE;
+
+ GST_LOG_OBJECT (sink, "Setting muxer reserved time to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (sink->threshold_time));
+ GST_OBJECT_LOCK (sink);
+ threshold_time = sink->threshold_time;
+ GST_OBJECT_UNLOCK (sink);
+
+ if (threshold_time > 0) {
+ /* Tell the muxer how much space to reserve */
+ GstClockTime muxer_threshold = threshold_time;
+ g_object_set (sink->muxer, "reserved-max-duration", muxer_threshold, NULL);
+ }
+}
+
+static void
+gst_splitmux_sink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstSplitMuxSink *splitmux = GST_SPLITMUX_SINK (object);
+
+ switch (prop_id) {
+ case PROP_LOCATION:{
+ GST_OBJECT_LOCK (splitmux);
+ g_free (splitmux->location);
+ splitmux->location = g_value_dup_string (value);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ }
+ case PROP_START_INDEX:
+ GST_OBJECT_LOCK (splitmux);
+ splitmux->start_index = g_value_get_int (value);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_MAX_SIZE_BYTES:
+ GST_OBJECT_LOCK (splitmux);
+ splitmux->threshold_bytes = g_value_get_uint64 (value);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_MAX_SIZE_TIME:
+ GST_OBJECT_LOCK (splitmux);
+ splitmux->threshold_time = g_value_get_uint64 (value);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_MAX_SIZE_TIMECODE:
+ GST_OBJECT_LOCK (splitmux);
+ g_free (splitmux->threshold_timecode_str);
+ /* will be calculated later */
+ g_clear_pointer (&splitmux->tc_interval,
+ gst_video_time_code_interval_free);
+ gst_splitmux_reset_timecode (splitmux);
+
+ splitmux->threshold_timecode_str = g_value_dup_string (value);
+ if (splitmux->threshold_timecode_str) {
+ splitmux->tc_interval =
+ gst_video_time_code_interval_new_from_string
+ (splitmux->threshold_timecode_str);
+ if (!splitmux->tc_interval) {
+ g_warning ("Wrong timecode string %s",
+ splitmux->threshold_timecode_str);
+ g_free (splitmux->threshold_timecode_str);
+ splitmux->threshold_timecode_str = NULL;
+ }
+ }
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_SEND_KEYFRAME_REQUESTS:
+ GST_OBJECT_LOCK (splitmux);
+ splitmux->send_keyframe_requests = g_value_get_boolean (value);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_MAX_FILES:
+ GST_OBJECT_LOCK (splitmux);
+ splitmux->max_files = g_value_get_uint (value);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_MUXER_OVERHEAD:
+ GST_OBJECT_LOCK (splitmux);
+ splitmux->mux_overhead = g_value_get_double (value);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_USE_ROBUST_MUXING:
+ GST_OBJECT_LOCK (splitmux);
+ splitmux->use_robust_muxing = g_value_get_boolean (value);
+ GST_OBJECT_UNLOCK (splitmux);
+ if (splitmux->use_robust_muxing)
+ update_muxer_properties (splitmux);
+ break;
+ case PROP_ALIGNMENT_THRESHOLD:
+ GST_OBJECT_LOCK (splitmux);
+ splitmux->alignment_threshold = g_value_get_uint64 (value);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_SINK:
+ GST_OBJECT_LOCK (splitmux);
+ gst_clear_object (&splitmux->provided_sink);
+ splitmux->provided_sink = g_value_get_object (value);
+ if (splitmux->provided_sink)
+ gst_object_ref_sink (splitmux->provided_sink);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_MUXER:
+ GST_OBJECT_LOCK (splitmux);
+ gst_clear_object (&splitmux->provided_muxer);
+ splitmux->provided_muxer = g_value_get_object (value);
+ if (splitmux->provided_muxer)
+ gst_object_ref_sink (splitmux->provided_muxer);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_RESET_MUXER:
+ GST_OBJECT_LOCK (splitmux);
+ splitmux->reset_muxer = g_value_get_boolean (value);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_ASYNC_FINALIZE:
+ GST_OBJECT_LOCK (splitmux);
+ splitmux->async_finalize = g_value_get_boolean (value);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_MUXER_FACTORY:
+ GST_OBJECT_LOCK (splitmux);
+ if (splitmux->muxer_factory)
+ g_free (splitmux->muxer_factory);
+ splitmux->muxer_factory = g_value_dup_string (value);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_MUXER_PRESET:
+ GST_OBJECT_LOCK (splitmux);
+ if (splitmux->muxer_preset)
+ g_free (splitmux->muxer_preset);
+ splitmux->muxer_preset = g_value_dup_string (value);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_MUXER_PROPERTIES:
+ GST_OBJECT_LOCK (splitmux);
+ if (splitmux->muxer_properties)
+ gst_structure_free (splitmux->muxer_properties);
+ if (gst_value_get_structure (value))
+ splitmux->muxer_properties =
+ gst_structure_copy (gst_value_get_structure (value));
+ else
+ splitmux->muxer_properties = NULL;
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_SINK_FACTORY:
+ GST_OBJECT_LOCK (splitmux);
+ if (splitmux->sink_factory)
+ g_free (splitmux->sink_factory);
+ splitmux->sink_factory = g_value_dup_string (value);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_SINK_PRESET:
+ GST_OBJECT_LOCK (splitmux);
+ if (splitmux->sink_preset)
+ g_free (splitmux->sink_preset);
+ splitmux->sink_preset = g_value_dup_string (value);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_SINK_PROPERTIES:
+ GST_OBJECT_LOCK (splitmux);
+ if (splitmux->sink_properties)
+ gst_structure_free (splitmux->sink_properties);
+ if (gst_value_get_structure (value))
+ splitmux->sink_properties =
+ gst_structure_copy (gst_value_get_structure (value));
+ else
+ splitmux->sink_properties = NULL;
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_MUXERPAD_MAP:
+ {
+ const GstStructure *s = gst_value_get_structure (value);
+ GST_SPLITMUX_LOCK (splitmux);
+ if (splitmux->muxerpad_map) {
+ gst_structure_free (splitmux->muxerpad_map);
+ }
+ if (s)
+ splitmux->muxerpad_map = gst_structure_copy (s);
+ else
+ splitmux->muxerpad_map = NULL;
+ GST_SPLITMUX_UNLOCK (splitmux);
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_splitmux_sink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstSplitMuxSink *splitmux = GST_SPLITMUX_SINK (object);
+
+ switch (prop_id) {
+ case PROP_LOCATION:
+ GST_OBJECT_LOCK (splitmux);
+ g_value_set_string (value, splitmux->location);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_START_INDEX:
+ GST_OBJECT_LOCK (splitmux);
+ g_value_set_int (value, splitmux->start_index);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_MAX_SIZE_BYTES:
+ GST_OBJECT_LOCK (splitmux);
+ g_value_set_uint64 (value, splitmux->threshold_bytes);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_MAX_SIZE_TIME:
+ GST_OBJECT_LOCK (splitmux);
+ g_value_set_uint64 (value, splitmux->threshold_time);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_MAX_SIZE_TIMECODE:
+ GST_OBJECT_LOCK (splitmux);
+ g_value_set_string (value, splitmux->threshold_timecode_str);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_SEND_KEYFRAME_REQUESTS:
+ GST_OBJECT_LOCK (splitmux);
+ g_value_set_boolean (value, splitmux->send_keyframe_requests);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_MAX_FILES:
+ GST_OBJECT_LOCK (splitmux);
+ g_value_set_uint (value, splitmux->max_files);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_MUXER_OVERHEAD:
+ GST_OBJECT_LOCK (splitmux);
+ g_value_set_double (value, splitmux->mux_overhead);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_USE_ROBUST_MUXING:
+ GST_OBJECT_LOCK (splitmux);
+ g_value_set_boolean (value, splitmux->use_robust_muxing);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_ALIGNMENT_THRESHOLD:
+ GST_OBJECT_LOCK (splitmux);
+ g_value_set_uint64 (value, splitmux->alignment_threshold);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_SINK:
+ GST_OBJECT_LOCK (splitmux);
+ g_value_set_object (value, splitmux->provided_sink);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_MUXER:
+ GST_OBJECT_LOCK (splitmux);
+ g_value_set_object (value, splitmux->provided_muxer);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_RESET_MUXER:
+ GST_OBJECT_LOCK (splitmux);
+ g_value_set_boolean (value, splitmux->reset_muxer);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_ASYNC_FINALIZE:
+ GST_OBJECT_LOCK (splitmux);
+ g_value_set_boolean (value, splitmux->async_finalize);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_MUXER_FACTORY:
+ GST_OBJECT_LOCK (splitmux);
+ g_value_set_string (value, splitmux->muxer_factory);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_MUXER_PRESET:
+ GST_OBJECT_LOCK (splitmux);
+ g_value_set_string (value, splitmux->muxer_preset);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_MUXER_PROPERTIES:
+ GST_OBJECT_LOCK (splitmux);
+ gst_value_set_structure (value, splitmux->muxer_properties);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_SINK_FACTORY:
+ GST_OBJECT_LOCK (splitmux);
+ g_value_set_string (value, splitmux->sink_factory);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_SINK_PRESET:
+ GST_OBJECT_LOCK (splitmux);
+ g_value_set_string (value, splitmux->sink_preset);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_SINK_PROPERTIES:
+ GST_OBJECT_LOCK (splitmux);
+ gst_value_set_structure (value, splitmux->sink_properties);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ case PROP_MUXERPAD_MAP:
+ GST_SPLITMUX_LOCK (splitmux);
+ gst_value_set_structure (value, splitmux->muxerpad_map);
+ GST_SPLITMUX_UNLOCK (splitmux);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+/* Convenience function */
+static inline GstClockTimeDiff
+my_segment_to_running_time (GstSegment * segment, GstClockTime val)
+{
+ GstClockTimeDiff res = GST_CLOCK_STIME_NONE;
+
+ if (GST_CLOCK_TIME_IS_VALID (val)) {
+ gboolean sign =
+ gst_segment_to_running_time_full (segment, GST_FORMAT_TIME, val, &val);
+ if (sign > 0)
+ res = val;
+ else if (sign < 0)
+ res = -val;
+ }
+ return res;
+}
+
+static void
+mq_stream_ctx_reset (MqStreamCtx * ctx)
+{
+ gst_segment_init (&ctx->in_segment, GST_FORMAT_UNDEFINED);
+ gst_segment_init (&ctx->out_segment, GST_FORMAT_UNDEFINED);
+ ctx->in_running_time = ctx->out_running_time = GST_CLOCK_STIME_NONE;
+ g_queue_foreach (&ctx->queued_bufs, (GFunc) mq_stream_buf_free, NULL);
+ g_queue_clear (&ctx->queued_bufs);
+}
+
+static MqStreamCtx *
+mq_stream_ctx_new (GstSplitMuxSink * splitmux)
+{
+ MqStreamCtx *ctx;
+
+ ctx = g_new0 (MqStreamCtx, 1);
+ ctx->splitmux = splitmux;
+ g_queue_init (&ctx->queued_bufs);
+ mq_stream_ctx_reset (ctx);
+
+ return ctx;
+}
+
+static void
+mq_stream_ctx_free (MqStreamCtx * ctx)
+{
+ if (ctx->q) {
+ GstObject *parent = gst_object_get_parent (GST_OBJECT (ctx->q));
+
+ g_signal_handler_disconnect (ctx->q, ctx->q_overrun_id);
+
+ if (parent == GST_OBJECT_CAST (ctx->splitmux)) {
+ gst_element_set_locked_state (ctx->q, TRUE);
+ gst_element_set_state (ctx->q, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN (ctx->splitmux), ctx->q);
+ gst_object_unref (parent);
+ }
+ gst_object_unref (ctx->q);
+ }
+ gst_object_unref (ctx->sinkpad);
+ gst_object_unref (ctx->srcpad);
+ g_queue_foreach (&ctx->queued_bufs, (GFunc) mq_stream_buf_free, NULL);
+ g_queue_clear (&ctx->queued_bufs);
+ g_free (ctx);
+}
+
+static void
+send_fragment_opened_closed_msg (GstSplitMuxSink * splitmux, gboolean opened,
+ GstElement * sink)
+{
+ gchar *location = NULL;
+ GstMessage *msg;
+ const gchar *msg_name = opened ?
+ "splitmuxsink-fragment-opened" : "splitmuxsink-fragment-closed";
+ GstClockTime running_time = splitmux->reference_ctx->out_running_time;
+
+ if (!opened) {
+ GstClockTime *rtime = g_object_get_qdata (G_OBJECT (sink), RUNNING_TIME);
+ if (rtime)
+ running_time = *rtime;
+ }
+
+ if (g_object_class_find_property (G_OBJECT_GET_CLASS (sink),
+ "location") != NULL)
+ g_object_get (sink, "location", &location, NULL);
+
+ GST_DEBUG_OBJECT (splitmux,
+ "Sending %s message. Running time %" GST_TIME_FORMAT " location %s",
+ msg_name, GST_TIME_ARGS (running_time), GST_STR_NULL (location));
+
+ /* If it's in the middle of a teardown, the reference_ctc might have become
+ * NULL */
+ if (splitmux->reference_ctx) {
+ msg = gst_message_new_element (GST_OBJECT (splitmux),
+ gst_structure_new (msg_name,
+ "location", G_TYPE_STRING, location,
+ "running-time", GST_TYPE_CLOCK_TIME, running_time,
+ "sink", GST_TYPE_ELEMENT, sink, NULL));
+ gst_element_post_message (GST_ELEMENT_CAST (splitmux), msg);
+ }
+
+ g_free (location);
+}
+
+static void
+send_eos_async (GstSplitMuxSink * splitmux, AsyncEosHelper * helper)
+{
+ GstEvent *eos;
+ GstPad *pad;
+ MqStreamCtx *ctx;
+
+ eos = gst_event_new_eos ();
+ pad = helper->pad;
+ ctx = helper->ctx;
+
+ GST_SPLITMUX_LOCK (splitmux);
+ if (!pad)
+ pad = gst_pad_get_peer (ctx->srcpad);
+ GST_SPLITMUX_UNLOCK (splitmux);
+
+ gst_pad_send_event (pad, eos);
+ GST_INFO_OBJECT (splitmux, "Sent async EOS on %" GST_PTR_FORMAT, pad);
+
+ gst_object_unref (pad);
+ g_free (helper);
+}
+
+/* Called with lock held, drops the lock to send EOS to the
+ * pad
+ */
+static void
+send_eos (GstSplitMuxSink * splitmux, MqStreamCtx * ctx)
+{
+ GstEvent *eos;
+ GstPad *pad;
+
+ eos = gst_event_new_eos ();
+ pad = gst_pad_get_peer (ctx->srcpad);
+
+ ctx->out_eos = TRUE;
+
+ GST_INFO_OBJECT (splitmux, "Sending EOS on %" GST_PTR_FORMAT, pad);
+ GST_SPLITMUX_UNLOCK (splitmux);
+ gst_pad_send_event (pad, eos);
+ GST_SPLITMUX_LOCK (splitmux);
+
+ gst_object_unref (pad);
+}
+
+/* Called with lock held. Schedules an EOS event to the ctx pad
+ * to happen in another thread */
+static void
+eos_context_async (MqStreamCtx * ctx, GstSplitMuxSink * splitmux)
+{
+ AsyncEosHelper *helper = g_new0 (AsyncEosHelper, 1);
+ GstPad *srcpad, *sinkpad;
+
+ srcpad = ctx->srcpad;
+ sinkpad = gst_pad_get_peer (srcpad);
+
+ helper->ctx = ctx;
+ helper->pad = sinkpad; /* Takes the reference */
+
+ ctx->out_eos_async_done = TRUE;
+
+ /* There used to be a bug here, where we had to explicitly remove
+ * the SINK flag so that GstBin would ignore it for EOS purposes.
+ * That fixed a race where if splitmuxsink really reaches EOS
+ * before an asynchronous background element has finished, then
+ * the bin wouldn't actually send EOS to the pipeline. Even after
+ * finishing and removing the old element, the bin didn't re-check
+ * EOS status on removing a SINK element. That bug was fixed
+ * in core. */
+ GST_DEBUG_OBJECT (splitmux, "scheduled EOS to pad %" GST_PTR_FORMAT " ctx %p",
+ sinkpad, ctx);
+
+ g_assert_nonnull (helper->pad);
+ gst_element_call_async (GST_ELEMENT (splitmux),
+ (GstElementCallAsyncFunc) send_eos_async, helper, NULL);
+}
+
+/* Called with lock held. TRUE iff all contexts have a
+ * pending (or delivered) async eos event */
+static gboolean
+all_contexts_are_async_eos (GstSplitMuxSink * splitmux)
+{
+ gboolean ret = TRUE;
+ GList *item;
+
+ for (item = splitmux->contexts; item; item = item->next) {
+ MqStreamCtx *ctx = item->data;
+ ret &= ctx->out_eos_async_done;
+ }
+ return ret;
+}
+
+/* Called with splitmux lock held to check if this output
+ * context needs to sleep to wait for the release of the
+ * next GOP, or to send EOS to close out the current file
+ */
+static GstFlowReturn
+complete_or_wait_on_out (GstSplitMuxSink * splitmux, MqStreamCtx * ctx)
+{
+ if (ctx->caps_change)
+ return GST_FLOW_OK;
+
+ do {
+ /* When first starting up, the reference stream has to output
+ * the first buffer to prepare the muxer and sink */
+ gboolean can_output = (ctx->is_reference || splitmux->ready_for_output);
+ GstClockTimeDiff my_max_out_running_time = splitmux->max_out_running_time;
+
+ if (!(splitmux->max_out_running_time == 0 ||
+ splitmux->max_out_running_time == GST_CLOCK_STIME_NONE ||
+ splitmux->alignment_threshold == 0 ||
+ splitmux->max_out_running_time < splitmux->alignment_threshold)) {
+ my_max_out_running_time -= splitmux->alignment_threshold;
+ GST_LOG_OBJECT (ctx->srcpad,
+ "Max out running time currently %" GST_STIME_FORMAT
+ ", with threshold applied it is %" GST_STIME_FORMAT,
+ GST_STIME_ARGS (splitmux->max_out_running_time),
+ GST_STIME_ARGS (my_max_out_running_time));
+ }
+
+ if (ctx->flushing
+ || splitmux->output_state == SPLITMUX_OUTPUT_STATE_STOPPED)
+ return GST_FLOW_FLUSHING;
+
+ GST_LOG_OBJECT (ctx->srcpad,
+ "Checking running time %" GST_STIME_FORMAT " against max %"
+ GST_STIME_FORMAT, GST_STIME_ARGS (ctx->out_running_time),
+ GST_STIME_ARGS (my_max_out_running_time));
+
+ if (can_output) {
+ if (splitmux->max_out_running_time == GST_CLOCK_STIME_NONE ||
+ ctx->out_running_time < my_max_out_running_time) {
+ return GST_FLOW_OK;
+ }
+
+ switch (splitmux->output_state) {
+ case SPLITMUX_OUTPUT_STATE_OUTPUT_GOP:
+ /* We only get here if we've finished outputting a GOP and need to know
+ * what to do next */
+ splitmux->output_state = SPLITMUX_OUTPUT_STATE_AWAITING_COMMAND;
+ GST_SPLITMUX_BROADCAST_OUTPUT (splitmux);
+ continue;
+
+ case SPLITMUX_OUTPUT_STATE_ENDING_FILE:
+ case SPLITMUX_OUTPUT_STATE_ENDING_STREAM:
+ /* We've reached the max out running_time to get here, so end this file now */
+ if (ctx->out_eos == FALSE) {
+ if (splitmux->async_finalize) {
+ /* We must set EOS asynchronously at this point. We cannot defer
+ * it, because we need all contexts to wake up, for the
+ * reference context to eventually give us something at
+ * START_NEXT_FILE. Otherwise, collectpads might choose another
+ * context to give us the first buffer, and format-location-full
+ * will not contain a valid sample. */
+ g_object_set_qdata ((GObject *) splitmux->sink, EOS_FROM_US,
+ GINT_TO_POINTER (1));
+ eos_context_async (ctx, splitmux);
+ if (all_contexts_are_async_eos (splitmux)) {
+ GST_INFO_OBJECT (splitmux,
+ "All contexts are async_eos. Moving to the next file.");
+ /* We can start the next file once we've asked each pad to go EOS */
+ splitmux->output_state = SPLITMUX_OUTPUT_STATE_START_NEXT_FILE;
+ GST_SPLITMUX_BROADCAST_OUTPUT (splitmux);
+ continue;
+ }
+ } else {
+ send_eos (splitmux, ctx);
+ continue;
+ }
+ } else {
+ GST_INFO_OBJECT (splitmux,
+ "At end-of-file state, but context %p is already EOS", ctx);
+ }
+ break;
+ case SPLITMUX_OUTPUT_STATE_START_NEXT_FILE:
+ if (ctx->is_reference) {
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ /* Special handling on the reference ctx to start new fragments
+ * and collect commands from the command queue */
+ /* drops the splitmux lock briefly: */
+ /* We must have reference ctx in order for format-location-full to
+ * have a sample */
+ ret = start_next_fragment (splitmux, ctx);
+ if (ret != GST_FLOW_OK)
+ return ret;
+
+ continue;
+ }
+ break;
+ case SPLITMUX_OUTPUT_STATE_AWAITING_COMMAND:{
+ do {
+ SplitMuxOutputCommand *cmd =
+ g_queue_pop_tail (&splitmux->out_cmd_q);
+ if (cmd != NULL) {
+ /* If we pop the last command, we need to make our queues bigger */
+ if (g_queue_get_length (&splitmux->out_cmd_q) == 0)
+ grow_blocked_queues (splitmux);
+
+ if (cmd->start_new_fragment) {
+ if (splitmux->muxed_out_bytes > 0) {
+ GST_DEBUG_OBJECT (splitmux, "Got cmd to start new fragment");
+ splitmux->output_state = SPLITMUX_OUTPUT_STATE_ENDING_FILE;
+ } else {
+ GST_DEBUG_OBJECT (splitmux,
+ "Got cmd to start new fragment, but fragment is empty - ignoring.");
+ }
+ } else {
+ GST_DEBUG_OBJECT (splitmux,
+ "Got new output cmd for time %" GST_STIME_FORMAT,
+ GST_STIME_ARGS (cmd->max_output_ts));
+
+ /* Extend the output range immediately */
+ splitmux->max_out_running_time = cmd->max_output_ts;
+ splitmux->output_state = SPLITMUX_OUTPUT_STATE_OUTPUT_GOP;
+ }
+ GST_SPLITMUX_BROADCAST_OUTPUT (splitmux);
+
+ out_cmd_buf_free (cmd);
+ break;
+ } else {
+ GST_SPLITMUX_WAIT_OUTPUT (splitmux);
+ }
+ } while (!ctx->flushing && splitmux->output_state ==
+ SPLITMUX_OUTPUT_STATE_AWAITING_COMMAND);
+ /* loop and re-check the state */
+ continue;
+ }
+ case SPLITMUX_OUTPUT_STATE_STOPPED:
+ return GST_FLOW_FLUSHING;
+ }
+ } else {
+ GST_LOG_OBJECT (ctx->srcpad, "Not yet ready for output");
+ }
+
+ GST_INFO_OBJECT (ctx->srcpad,
+ "Sleeping for running time %"
+ GST_STIME_FORMAT " (max %" GST_STIME_FORMAT ") or state change.",
+ GST_STIME_ARGS (ctx->out_running_time),
+ GST_STIME_ARGS (splitmux->max_out_running_time));
+ GST_SPLITMUX_WAIT_OUTPUT (splitmux);
+ GST_INFO_OBJECT (ctx->srcpad,
+ "Woken for new max running time %" GST_STIME_FORMAT,
+ GST_STIME_ARGS (splitmux->max_out_running_time));
+ }
+ while (1);
+
+ return GST_FLOW_OK;
+}
+
+static GstClockTime
+calculate_next_max_timecode (GstSplitMuxSink * splitmux,
+ const GstVideoTimeCode * cur_tc, GstClockTime running_time,
+ GstVideoTimeCode ** next_tc)
+{
+ GstVideoTimeCode *target_tc;
+ GstClockTime cur_tc_time, target_tc_time, next_max_tc_time;
+
+ if (cur_tc == NULL || splitmux->tc_interval == NULL)
+ return GST_CLOCK_TIME_NONE;
+
+ target_tc = gst_video_time_code_add_interval (cur_tc, splitmux->tc_interval);
+ if (!target_tc) {
+ GST_ELEMENT_ERROR (splitmux,
+ STREAM, FAILED, (NULL), ("Couldn't calculate target timecode"));
+ return GST_CLOCK_TIME_NONE;
+ }
+
+ /* Convert to ns */
+ target_tc_time = gst_video_time_code_nsec_since_daily_jam (target_tc);
+ cur_tc_time = gst_video_time_code_nsec_since_daily_jam (cur_tc);
+
+ /* Add running_time, accounting for wraparound. */
+ if (target_tc_time >= cur_tc_time) {
+ next_max_tc_time = target_tc_time - cur_tc_time + running_time;
+ } else {
+ GstClockTime day_in_ns = 24 * 60 * 60 * GST_SECOND;
+
+ if ((cur_tc->config.flags & GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME) &&
+ (cur_tc->config.fps_d == 1001)) {
+ /* Checking fps_d is probably unneeded, but better safe than sorry
+ * (e.g. someone accidentally set a flag) */
+ GstVideoTimeCode *tc_for_offset;
+
+ /* Here, the duration of the 24:00:00;00 timecode isn't exactly one day,
+ * but slightly less. Calculate that duration from a fake timecode. The
+ * problem is that 24:00:00;00 isn't a valid timecode, so the workaround
+ * is to add one frame to 23:59:59;29 */
+ tc_for_offset =
+ gst_video_time_code_new (cur_tc->config.fps_n, cur_tc->config.fps_d,
+ NULL, cur_tc->config.flags, 23, 59, 59,
+ cur_tc->config.fps_n / cur_tc->config.fps_d, 0);
+ day_in_ns =
+ gst_video_time_code_nsec_since_daily_jam (tc_for_offset) +
+ gst_util_uint64_scale (GST_SECOND, cur_tc->config.fps_d,
+ cur_tc->config.fps_n);
+ gst_video_time_code_free (tc_for_offset);
+ }
+ next_max_tc_time = day_in_ns - cur_tc_time + target_tc_time + running_time;
+ }
+
+ GST_INFO_OBJECT (splitmux, "Next max TC time: %" GST_TIME_FORMAT
+ " from ref TC: %" GST_TIME_FORMAT, GST_TIME_ARGS (next_max_tc_time),
+ GST_TIME_ARGS (cur_tc_time));
+ if (next_tc)
+ *next_tc = target_tc;
+ else
+ gst_video_time_code_free (target_tc);
+
+ return next_max_tc_time;
+}
+
+static gboolean
+request_next_keyframe (GstSplitMuxSink * splitmux, GstBuffer * buffer,
+ GstClockTime running_time)
+{
+ GstEvent *ev;
+ GstClockTime target_time;
+ gboolean timecode_based = FALSE;
+ GstClockTime max_tc_time = GST_CLOCK_TIME_NONE;
+ GstClockTime next_max_tc_time = GST_CLOCK_TIME_NONE;
+ GstClockTime next_fku_time = GST_CLOCK_TIME_NONE;
+ GstClockTime tc_rounding_error = 5 * GST_USECOND;
+
+ if (!splitmux->send_keyframe_requests)
+ return TRUE;
+
+ if (splitmux->tc_interval) {
+ if (splitmux->in_tc && gst_video_time_code_is_valid (splitmux->in_tc)) {
+ GstVideoTimeCode *next_tc = NULL;
+ max_tc_time =
+ calculate_next_max_timecode (splitmux, splitmux->in_tc,
+ running_time, &next_tc);
+
+ /* calculate the next expected keyframe time to prevent too early fku
+ * event */
+ if (GST_CLOCK_TIME_IS_VALID (max_tc_time) && next_tc) {
+ next_max_tc_time =
+ calculate_next_max_timecode (splitmux, next_tc, max_tc_time, NULL);
+ }
+ if (next_tc)
+ gst_video_time_code_free (next_tc);
+
+ timecode_based = GST_CLOCK_TIME_IS_VALID (max_tc_time) &&
+ GST_CLOCK_TIME_IS_VALID (next_max_tc_time);
+ } else {
+ /* This can happen in the presence of GAP events that trigger
+ * a new fragment start */
+ GST_WARNING_OBJECT (splitmux,
+ "No buffer available to calculate next timecode");
+ }
+ }
+
+ if ((splitmux->threshold_time == 0 && !timecode_based)
+ || splitmux->threshold_bytes != 0)
+ return TRUE;
+
+ if (timecode_based) {
+ /* We might have rounding errors: aim slightly earlier */
+ if (max_tc_time >= tc_rounding_error) {
+ target_time = max_tc_time - tc_rounding_error;
+ } else {
+ /* unreliable target time */
+ GST_DEBUG_OBJECT (splitmux, "tc time %" GST_TIME_FORMAT
+ " is smaller than allowed rounding error, set it to zero",
+ GST_TIME_ARGS (max_tc_time));
+ target_time = 0;
+ }
+
+ if (next_max_tc_time >= tc_rounding_error) {
+ next_fku_time = next_max_tc_time - tc_rounding_error;
+ } else {
+ /* unreliable target time */
+ GST_DEBUG_OBJECT (splitmux, "next tc time %" GST_TIME_FORMAT
+ " is smaller than allowed rounding error, set it to zero",
+ GST_TIME_ARGS (next_max_tc_time));
+ next_fku_time = 0;
+ }
+ } else {
+ target_time = running_time + splitmux->threshold_time;
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (splitmux->next_fku_time)) {
+ GstClockTime allowed_time = splitmux->next_fku_time;
+
+ if (timecode_based) {
+ if (allowed_time >= tc_rounding_error) {
+ allowed_time -= tc_rounding_error;
+ } else {
+ /* unreliable next force key unit time */
+ GST_DEBUG_OBJECT (splitmux, "expected next force key unit time %"
+ GST_TIME_FORMAT
+ " is smaller than allowed rounding error, set it to zero",
+ GST_TIME_ARGS (splitmux->next_fku_time));
+ allowed_time = 0;
+ }
+ }
+
+ if (target_time < allowed_time) {
+ GST_LOG_OBJECT (splitmux, "Target time %" GST_TIME_FORMAT
+ " is smaller than expected next keyframe time %" GST_TIME_FORMAT
+ ", rounding error compensated next keyframe time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (target_time),
+ GST_TIME_ARGS (splitmux->next_fku_time),
+ GST_TIME_ARGS (allowed_time));
+
+ return TRUE;
+ } else if (allowed_time != splitmux->next_fku_time &&
+ target_time < splitmux->next_fku_time) {
+ GST_DEBUG_OBJECT (splitmux, "Target time %" GST_TIME_FORMAT
+ " is smaller than expected next keyframe time %" GST_TIME_FORMAT
+ ", but the difference is smaller than allowed rounding error",
+ GST_TIME_ARGS (target_time), GST_TIME_ARGS (splitmux->next_fku_time));
+ }
+ }
+
+ if (!timecode_based) {
+ next_fku_time = target_time + splitmux->threshold_time;
+ }
+
+ splitmux->next_fku_time = next_fku_time;
+
+ ev = gst_video_event_new_upstream_force_key_unit (target_time, TRUE, 0);
+ GST_INFO_OBJECT (splitmux, "Requesting keyframe at %" GST_TIME_FORMAT
+ ", the next expected keyframe is %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (target_time), GST_TIME_ARGS (next_fku_time));
+ return gst_pad_push_event (splitmux->reference_ctx->sinkpad, ev);
+}
+
+static GstPadProbeReturn
+handle_mq_output (GstPad * pad, GstPadProbeInfo * info, MqStreamCtx * ctx)
+{
+ GstSplitMuxSink *splitmux = ctx->splitmux;
+ MqStreamBuf *buf_info = NULL;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ GST_LOG_OBJECT (pad, "Fired probe type 0x%x", info->type);
+
+ /* FIXME: Handle buffer lists, until then make it clear they won't work */
+ if (info->type & GST_PAD_PROBE_TYPE_BUFFER_LIST) {
+ g_warning ("Buffer list handling not implemented");
+ return GST_PAD_PROBE_DROP;
+ }
+ if (info->type & GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM ||
+ info->type & GST_PAD_PROBE_TYPE_EVENT_FLUSH) {
+ GstEvent *event = gst_pad_probe_info_get_event (info);
+ gboolean locked = FALSE, wait = !ctx->is_reference;
+
+ GST_LOG_OBJECT (pad, "Event %" GST_PTR_FORMAT, event);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:
+ gst_event_copy_segment (event, &ctx->out_segment);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ GST_SPLITMUX_LOCK (splitmux);
+ locked = TRUE;
+ gst_segment_init (&ctx->out_segment, GST_FORMAT_UNDEFINED);
+ g_queue_foreach (&ctx->queued_bufs, (GFunc) mq_stream_buf_free, NULL);
+ g_queue_clear (&ctx->queued_bufs);
+ g_queue_clear (&ctx->queued_bufs);
+ /* If this is the reference context, we just threw away any queued keyframes */
+ if (ctx->is_reference)
+ splitmux->queued_keyframes = 0;
+ ctx->flushing = FALSE;
+ wait = FALSE;
+ break;
+ case GST_EVENT_FLUSH_START:
+ GST_SPLITMUX_LOCK (splitmux);
+ locked = TRUE;
+ GST_LOG_OBJECT (pad, "Flush start");
+ ctx->flushing = TRUE;
+ GST_SPLITMUX_BROADCAST_INPUT (splitmux);
+ GST_SPLITMUX_BROADCAST_OUTPUT (splitmux);
+ break;
+ case GST_EVENT_EOS:
+ GST_SPLITMUX_LOCK (splitmux);
+ locked = TRUE;
+ if (splitmux->output_state == SPLITMUX_OUTPUT_STATE_STOPPED)
+ goto beach;
+ ctx->out_eos = TRUE;
+
+ if (ctx == splitmux->reference_ctx) {
+ splitmux->output_state = SPLITMUX_OUTPUT_STATE_ENDING_STREAM;
+ GST_SPLITMUX_BROADCAST_OUTPUT (splitmux);
+ }
+
+ GST_INFO_OBJECT (splitmux,
+ "Have EOS event at pad %" GST_PTR_FORMAT " ctx %p", pad, ctx);
+ break;
+ case GST_EVENT_GAP:{
+ GstClockTime gap_ts;
+ GstClockTimeDiff rtime;
+
+ gst_event_parse_gap (event, &gap_ts, NULL);
+ if (gap_ts == GST_CLOCK_TIME_NONE)
+ break;
+
+ GST_SPLITMUX_LOCK (splitmux);
+ locked = TRUE;
+
+ if (splitmux->output_state == SPLITMUX_OUTPUT_STATE_STOPPED)
+ goto beach;
+
+ /* When we get a gap event on the
+ * reference stream and we're trying to open a
+ * new file, we need to store it until we get
+ * the buffer afterwards
+ */
+ if (ctx->is_reference &&
+ (splitmux->output_state != SPLITMUX_OUTPUT_STATE_OUTPUT_GOP)) {
+ GST_DEBUG_OBJECT (pad, "Storing GAP event until buffer arrives");
+ gst_event_replace (&ctx->pending_gap, event);
+ GST_SPLITMUX_UNLOCK (splitmux);
+ return GST_PAD_PROBE_HANDLED;
+ }
+
+ rtime = my_segment_to_running_time (&ctx->out_segment, gap_ts);
+
+ GST_LOG_OBJECT (pad, "Have GAP w/ ts %" GST_STIME_FORMAT,
+ GST_STIME_ARGS (rtime));
+
+ if (rtime != GST_CLOCK_STIME_NONE) {
+ ctx->out_running_time = rtime;
+ complete_or_wait_on_out (splitmux, ctx);
+ }
+ break;
+ }
+ case GST_EVENT_CUSTOM_DOWNSTREAM:{
+ const GstStructure *s;
+ GstClockTimeDiff ts = 0;
+
+ s = gst_event_get_structure (event);
+ if (!gst_structure_has_name (s, "splitmuxsink-unblock"))
+ break;
+
+ gst_structure_get_int64 (s, "timestamp", &ts);
+
+ GST_SPLITMUX_LOCK (splitmux);
+ locked = TRUE;
+
+ if (splitmux->output_state == SPLITMUX_OUTPUT_STATE_STOPPED)
+ goto beach;
+ ctx->out_running_time = ts;
+ if (!ctx->is_reference)
+ ret = complete_or_wait_on_out (splitmux, ctx);
+ GST_SPLITMUX_UNLOCK (splitmux);
+ GST_PAD_PROBE_INFO_FLOW_RETURN (info) = ret;
+ return GST_PAD_PROBE_DROP;
+ }
+ case GST_EVENT_CAPS:{
+ GstPad *peer;
+
+ if (!ctx->is_reference)
+ break;
+
+ peer = gst_pad_get_peer (pad);
+ if (peer) {
+ gboolean ok = gst_pad_send_event (peer, gst_event_ref (event));
+
+ gst_object_unref (peer);
+
+ if (ok)
+ break;
+
+ } else {
+ break;
+ }
+ /* This is in the case the muxer doesn't allow this change of caps */
+ GST_SPLITMUX_LOCK (splitmux);
+ locked = TRUE;
+ ctx->caps_change = TRUE;
+
+ if (splitmux->output_state != SPLITMUX_OUTPUT_STATE_START_NEXT_FILE) {
+ GST_DEBUG_OBJECT (splitmux,
+ "New caps were not accepted. Switching output file");
+ if (ctx->out_eos == FALSE) {
+ splitmux->output_state = SPLITMUX_OUTPUT_STATE_ENDING_FILE;
+ GST_SPLITMUX_BROADCAST_OUTPUT (splitmux);
+ }
+ }
+
+ /* Lets it fall through, if it fails again, then the muxer just can't
+ * support this format, but at least we have a closed file.
+ */
+ break;
+ }
+ default:
+ break;
+ }
+
+ /* We need to make sure events aren't passed
+ * until the muxer / sink are ready for it */
+ if (!locked)
+ GST_SPLITMUX_LOCK (splitmux);
+ if (wait)
+ ret = complete_or_wait_on_out (splitmux, ctx);
+ GST_SPLITMUX_UNLOCK (splitmux);
+
+ /* Don't try to forward sticky events before the next buffer is there
+ * because it would cause a new file to be created without the first
+ * buffer being available.
+ */
+ GST_PAD_PROBE_INFO_FLOW_RETURN (info) = ret;
+ if (ctx->caps_change && GST_EVENT_IS_STICKY (event)) {
+ gst_event_unref (event);
+ return GST_PAD_PROBE_HANDLED;
+ } else {
+ return GST_PAD_PROBE_PASS;
+ }
+ }
+
+ /* Allow everything through until the configured next stopping point */
+ GST_SPLITMUX_LOCK (splitmux);
+
+ buf_info = g_queue_pop_tail (&ctx->queued_bufs);
+ if (buf_info == NULL) {
+ /* Can only happen due to a poorly timed flush */
+ ret = GST_FLOW_FLUSHING;
+ goto beach;
+ }
+
+ /* If we have popped a keyframe, decrement the queued_gop count */
+ if (buf_info->keyframe && splitmux->queued_keyframes > 0)
+ splitmux->queued_keyframes--;
+
+ ctx->out_running_time = buf_info->run_ts;
+ ctx->cur_out_buffer = gst_pad_probe_info_get_buffer (info);
+
+ GST_LOG_OBJECT (splitmux,
+ "Pad %" GST_PTR_FORMAT " buffer with run TS %" GST_STIME_FORMAT
+ " size %" G_GUINT64_FORMAT,
+ pad, GST_STIME_ARGS (ctx->out_running_time), buf_info->buf_size);
+
+ ctx->caps_change = FALSE;
+
+ ret = complete_or_wait_on_out (splitmux, ctx);
+
+ splitmux->muxed_out_bytes += buf_info->buf_size;
+
+#ifndef GST_DISABLE_GST_DEBUG
+ {
+ GstBuffer *buf = gst_pad_probe_info_get_buffer (info);
+ GST_LOG_OBJECT (pad, "Returning to pass buffer %" GST_PTR_FORMAT
+ " run ts %" GST_STIME_FORMAT, buf,
+ GST_STIME_ARGS (ctx->out_running_time));
+ }
+#endif
+
+ ctx->cur_out_buffer = NULL;
+ GST_SPLITMUX_UNLOCK (splitmux);
+
+ /* pending_gap is protected by the STREAM lock */
+ if (ctx->pending_gap) {
+ /* If we previously stored a gap event, send it now */
+ GstPad *peer = gst_pad_get_peer (ctx->srcpad);
+
+ GST_DEBUG_OBJECT (splitmux,
+ "Pad %" GST_PTR_FORMAT " sending pending GAP event", ctx->srcpad);
+
+ gst_pad_send_event (peer, ctx->pending_gap);
+ ctx->pending_gap = NULL;
+
+ gst_object_unref (peer);
+ }
+
+ mq_stream_buf_free (buf_info);
+
+ GST_PAD_PROBE_INFO_FLOW_RETURN (info) = ret;
+ return GST_PAD_PROBE_PASS;
+
+beach:
+ GST_SPLITMUX_UNLOCK (splitmux);
+ GST_PAD_PROBE_INFO_FLOW_RETURN (info) = ret;
+ return GST_PAD_PROBE_DROP;
+}
+
+static gboolean
+resend_sticky (GstPad * pad, GstEvent ** event, GstPad * peer)
+{
+ return gst_pad_send_event (peer, gst_event_ref (*event));
+}
+
+static void
+unlock_context (MqStreamCtx * ctx, GstSplitMuxSink * splitmux)
+{
+ if (ctx->fragment_block_id > 0) {
+ gst_pad_remove_probe (ctx->srcpad, ctx->fragment_block_id);
+ ctx->fragment_block_id = 0;
+ }
+}
+
+static void
+restart_context (MqStreamCtx * ctx, GstSplitMuxSink * splitmux)
+{
+ GstPad *peer = gst_pad_get_peer (ctx->srcpad);
+
+ gst_pad_sticky_events_foreach (ctx->srcpad,
+ (GstPadStickyEventsForeachFunction) (resend_sticky), peer);
+
+ /* Clear EOS flag if not actually EOS */
+ ctx->out_eos = GST_PAD_IS_EOS (ctx->srcpad);
+ ctx->out_eos_async_done = ctx->out_eos;
+
+ gst_object_unref (peer);
+}
+
+static void
+relink_context (MqStreamCtx * ctx, GstSplitMuxSink * splitmux)
+{
+ GstPad *sinkpad, *srcpad, *newpad;
+ GstPadTemplate *templ;
+
+ srcpad = ctx->srcpad;
+ sinkpad = gst_pad_get_peer (srcpad);
+
+ templ = sinkpad->padtemplate;
+ newpad =
+ gst_element_request_pad (splitmux->muxer, templ,
+ GST_PAD_NAME (sinkpad), NULL);
+
+ GST_DEBUG_OBJECT (splitmux, "Relinking ctx %p to pad %" GST_PTR_FORMAT, ctx,
+ newpad);
+ if (!gst_pad_unlink (srcpad, sinkpad)) {
+ gst_object_unref (sinkpad);
+ goto fail;
+ }
+ if (gst_pad_link_full (srcpad, newpad,
+ GST_PAD_LINK_CHECK_NO_RECONFIGURE) != GST_PAD_LINK_OK) {
+ gst_element_release_request_pad (splitmux->muxer, newpad);
+ gst_object_unref (sinkpad);
+ gst_object_unref (newpad);
+ goto fail;
+ }
+ gst_object_unref (newpad);
+ gst_object_unref (sinkpad);
+ return;
+
+fail:
+ GST_ELEMENT_ERROR (splitmux, RESOURCE, SETTINGS,
+ ("Could not create the new muxer/sink"), NULL);
+}
+
+static GstPadProbeReturn
+_block_pad (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
+{
+ return GST_PAD_PROBE_OK;
+}
+
+static void
+block_context (MqStreamCtx * ctx, GstSplitMuxSink * splitmux)
+{
+ ctx->fragment_block_id =
+ gst_pad_add_probe (ctx->srcpad, GST_PAD_PROBE_TYPE_BLOCK, _block_pad,
+ NULL, NULL);
+}
+
+static gboolean
+_set_property_from_structure (GQuark field_id, const GValue * value,
+ gpointer user_data)
+{
+ const gchar *property_name = g_quark_to_string (field_id);
+ GObject *element = G_OBJECT (user_data);
+
+ g_object_set_property (element, property_name, value);
+
+ return TRUE;
+}
+
+static void
+_lock_and_set_to_null (GstElement * element, GstSplitMuxSink * splitmux)
+{
+ gst_element_set_locked_state (element, TRUE);
+ gst_element_set_state (element, GST_STATE_NULL);
+ GST_LOG_OBJECT (splitmux, "Removing old element %" GST_PTR_FORMAT, element);
+ gst_bin_remove (GST_BIN (splitmux), element);
+}
+
+
+static void
+_send_event (const GValue * value, gpointer user_data)
+{
+ GstPad *pad = g_value_get_object (value);
+ GstEvent *ev = user_data;
+
+ gst_pad_send_event (pad, gst_event_ref (ev));
+}
+
+/* Called with lock held when a fragment
+ * reaches EOS and it is time to restart
+ * a new fragment
+ */
+static GstFlowReturn
+start_next_fragment (GstSplitMuxSink * splitmux, MqStreamCtx * ctx)
+{
+ GstElement *muxer, *sink;
+
+ g_assert (ctx->is_reference);
+
+ /* 1 change to new file */
+ splitmux->switching_fragment = TRUE;
+
+ /* We need to drop the splitmux lock to acquire the state lock
+ * here and ensure there's no racy state change going on elsewhere */
+ muxer = gst_object_ref (splitmux->muxer);
+ sink = gst_object_ref (splitmux->active_sink);
+
+ GST_SPLITMUX_UNLOCK (splitmux);
+ GST_SPLITMUX_STATE_LOCK (splitmux);
+
+ if (splitmux->shutdown) {
+ GST_DEBUG_OBJECT (splitmux,
+ "Shutdown requested. Aborting fragment switch.");
+ GST_SPLITMUX_LOCK (splitmux);
+ GST_SPLITMUX_STATE_UNLOCK (splitmux);
+ gst_object_unref (muxer);
+ gst_object_unref (sink);
+ return GST_FLOW_FLUSHING;
+ }
+
+ if (splitmux->async_finalize) {
+ if (splitmux->muxed_out_bytes > 0
+ || splitmux->fragment_id != splitmux->start_index) {
+ gchar *newname;
+ GstElement *new_sink, *new_muxer;
+
+ GST_DEBUG_OBJECT (splitmux, "Starting fragment %u",
+ splitmux->fragment_id);
+ g_list_foreach (splitmux->contexts, (GFunc) block_context, splitmux);
+ newname = g_strdup_printf ("sink_%u", splitmux->fragment_id);
+ GST_SPLITMUX_LOCK (splitmux);
+ if ((splitmux->sink =
+ create_element (splitmux, splitmux->sink_factory, newname,
+ TRUE)) == NULL)
+ goto fail;
+ if (splitmux->sink_preset && GST_IS_PRESET (splitmux->sink))
+ gst_preset_load_preset (GST_PRESET (splitmux->sink),
+ splitmux->sink_preset);
+ if (splitmux->sink_properties)
+ gst_structure_foreach (splitmux->sink_properties,
+ _set_property_from_structure, splitmux->sink);
+ splitmux->active_sink = splitmux->sink;
+ g_signal_emit (splitmux, signals[SIGNAL_SINK_ADDED], 0, splitmux->sink);
+ g_free (newname);
+ newname = g_strdup_printf ("muxer_%u", splitmux->fragment_id);
+ if ((splitmux->muxer =
+ create_element (splitmux, splitmux->muxer_factory, newname,
+ TRUE)) == NULL)
+ goto fail;
+ if (g_object_class_find_property (G_OBJECT_GET_CLASS (splitmux->sink),
+ "async") != NULL) {
+ /* async child elements are causing state change races and weird
+ * failures, so let's try and turn that off */
+ g_object_set (splitmux->sink, "async", FALSE, NULL);
+ }
+ if (splitmux->muxer_preset && GST_IS_PRESET (splitmux->muxer))
+ gst_preset_load_preset (GST_PRESET (splitmux->muxer),
+ splitmux->muxer_preset);
+ if (splitmux->muxer_properties)
+ gst_structure_foreach (splitmux->muxer_properties,
+ _set_property_from_structure, splitmux->muxer);
+ g_signal_emit (splitmux, signals[SIGNAL_MUXER_ADDED], 0, splitmux->muxer);
+ g_free (newname);
+ new_sink = splitmux->sink;
+ new_muxer = splitmux->muxer;
+ GST_SPLITMUX_UNLOCK (splitmux);
+ g_list_foreach (splitmux->contexts, (GFunc) relink_context, splitmux);
+ gst_element_link (new_muxer, new_sink);
+
+ if (g_object_get_qdata ((GObject *) sink, EOS_FROM_US)) {
+ if (GPOINTER_TO_INT (g_object_get_qdata ((GObject *) sink,
+ EOS_FROM_US)) == 2) {
+ _lock_and_set_to_null (muxer, splitmux);
+ _lock_and_set_to_null (sink, splitmux);
+ } else {
+ g_object_set_qdata ((GObject *) sink, EOS_FROM_US,
+ GINT_TO_POINTER (2));
+ }
+ }
+ gst_object_unref (muxer);
+ gst_object_unref (sink);
+ muxer = new_muxer;
+ sink = new_sink;
+ gst_object_ref (muxer);
+ gst_object_ref (sink);
+ }
+ } else {
+
+ gst_element_set_locked_state (muxer, TRUE);
+ gst_element_set_locked_state (sink, TRUE);
+ gst_element_set_state (sink, GST_STATE_NULL);
+
+ if (splitmux->reset_muxer) {
+ gst_element_set_state (muxer, GST_STATE_NULL);
+ } else {
+ GstIterator *it = gst_element_iterate_sink_pads (muxer);
+ GstEvent *ev;
+ guint32 seqnum;
+
+ ev = gst_event_new_flush_start ();
+ seqnum = gst_event_get_seqnum (ev);
+ while (gst_iterator_foreach (it, _send_event, ev) == GST_ITERATOR_RESYNC);
+ gst_event_unref (ev);
+
+ gst_iterator_resync (it);
+
+ ev = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (ev, seqnum);
+ while (gst_iterator_foreach (it, _send_event, ev) == GST_ITERATOR_RESYNC);
+ gst_event_unref (ev);
+
+ gst_iterator_free (it);
+ }
+ }
+
+ GST_SPLITMUX_LOCK (splitmux);
+ set_next_filename (splitmux, ctx);
+ splitmux->muxed_out_bytes = 0;
+ GST_SPLITMUX_UNLOCK (splitmux);
+
+ if (gst_element_set_state (sink,
+ GST_STATE_TARGET (splitmux)) == GST_STATE_CHANGE_FAILURE) {
+ gst_element_set_state (sink, GST_STATE_NULL);
+ gst_element_set_locked_state (muxer, FALSE);
+ gst_element_set_locked_state (sink, FALSE);
+
+ goto fail_output;
+ }
+
+ if (gst_element_set_state (muxer,
+ GST_STATE_TARGET (splitmux)) == GST_STATE_CHANGE_FAILURE) {
+ gst_element_set_state (muxer, GST_STATE_NULL);
+ gst_element_set_state (sink, GST_STATE_NULL);
+ gst_element_set_locked_state (muxer, FALSE);
+ gst_element_set_locked_state (sink, FALSE);
+ goto fail_muxer;
+ }
+
+ gst_element_set_locked_state (muxer, FALSE);
+ gst_element_set_locked_state (sink, FALSE);
+
+ gst_object_unref (sink);
+ gst_object_unref (muxer);
+
+ GST_SPLITMUX_LOCK (splitmux);
+ GST_SPLITMUX_STATE_UNLOCK (splitmux);
+ splitmux->switching_fragment = FALSE;
+ do_async_done (splitmux);
+
+ splitmux->ready_for_output = TRUE;
+
+ g_list_foreach (splitmux->contexts, (GFunc) unlock_context, splitmux);
+ g_list_foreach (splitmux->contexts, (GFunc) restart_context, splitmux);
+
+ send_fragment_opened_closed_msg (splitmux, TRUE, sink);
+
+ /* FIXME: Is this always the correct next state? */
+ GST_LOG_OBJECT (splitmux, "Resetting state to AWAITING_COMMAND");
+ splitmux->output_state = SPLITMUX_OUTPUT_STATE_AWAITING_COMMAND;
+ GST_SPLITMUX_BROADCAST_OUTPUT (splitmux);
+ return GST_FLOW_OK;
+
+fail:
+ gst_object_unref (sink);
+ gst_object_unref (muxer);
+
+ GST_SPLITMUX_LOCK (splitmux);
+ GST_SPLITMUX_STATE_UNLOCK (splitmux);
+ GST_ELEMENT_ERROR (splitmux, RESOURCE, SETTINGS,
+ ("Could not create the new muxer/sink"), NULL);
+ return GST_FLOW_ERROR;
+
+fail_output:
+ GST_ELEMENT_ERROR (splitmux, RESOURCE, SETTINGS,
+ ("Could not start new output sink"), NULL);
+ gst_object_unref (sink);
+ gst_object_unref (muxer);
+
+ GST_SPLITMUX_LOCK (splitmux);
+ GST_SPLITMUX_STATE_UNLOCK (splitmux);
+ splitmux->switching_fragment = FALSE;
+ return GST_FLOW_ERROR;
+
+fail_muxer:
+ GST_ELEMENT_ERROR (splitmux, RESOURCE, SETTINGS,
+ ("Could not start new muxer"), NULL);
+ gst_object_unref (sink);
+ gst_object_unref (muxer);
+
+ GST_SPLITMUX_LOCK (splitmux);
+ GST_SPLITMUX_STATE_UNLOCK (splitmux);
+ splitmux->switching_fragment = FALSE;
+ return GST_FLOW_ERROR;
+}
+
+static void
+bus_handler (GstBin * bin, GstMessage * message)
+{
+ GstSplitMuxSink *splitmux = GST_SPLITMUX_SINK (bin);
+
+ switch (GST_MESSAGE_TYPE (message)) {
+ case GST_MESSAGE_EOS:{
+ /* If the state is draining out the current file, drop this EOS */
+ GstElement *sink;
+
+ sink = GST_ELEMENT (GST_MESSAGE_SRC (message));
+ GST_SPLITMUX_LOCK (splitmux);
+
+ send_fragment_opened_closed_msg (splitmux, FALSE, sink);
+
+ if (splitmux->async_finalize) {
+
+ if (g_object_get_qdata ((GObject *) sink, EOS_FROM_US)) {
+ if (GPOINTER_TO_INT (g_object_get_qdata ((GObject *) sink,
+ EOS_FROM_US)) == 2) {
+ GstElement *muxer;
+ GstPad *sinksink, *muxersrc;
+
+ sinksink = gst_element_get_static_pad (sink, "sink");
+ muxersrc = gst_pad_get_peer (sinksink);
+ muxer = gst_pad_get_parent_element (muxersrc);
+ gst_object_unref (sinksink);
+ gst_object_unref (muxersrc);
+
+ gst_element_call_async (muxer,
+ (GstElementCallAsyncFunc) _lock_and_set_to_null,
+ gst_object_ref (splitmux), gst_object_unref);
+ gst_element_call_async (sink,
+ (GstElementCallAsyncFunc) _lock_and_set_to_null,
+ gst_object_ref (splitmux), gst_object_unref);
+ gst_object_unref (muxer);
+ } else {
+ g_object_set_qdata ((GObject *) sink, EOS_FROM_US,
+ GINT_TO_POINTER (2));
+ }
+ GST_DEBUG_OBJECT (splitmux,
+ "Caught async EOS from previous muxer+sink. Dropping.");
+ /* We forward the EOS so that it gets aggregated as normal. If the sink
+ * finishes and is removed before the end, it will be de-aggregated */
+ gst_message_unref (message);
+ GST_SPLITMUX_UNLOCK (splitmux);
+ return;
+ }
+ } else if (splitmux->output_state == SPLITMUX_OUTPUT_STATE_ENDING_STREAM) {
+ GST_DEBUG_OBJECT (splitmux,
+ "Passing EOS message. Output state %d max_out_running_time %"
+ GST_STIME_FORMAT, splitmux->output_state,
+ GST_STIME_ARGS (splitmux->max_out_running_time));
+ } else {
+ GST_DEBUG_OBJECT (splitmux, "Caught EOS at end of fragment, dropping");
+ splitmux->output_state = SPLITMUX_OUTPUT_STATE_START_NEXT_FILE;
+ GST_SPLITMUX_BROADCAST_OUTPUT (splitmux);
+
+ gst_message_unref (message);
+ GST_SPLITMUX_UNLOCK (splitmux);
+ return;
+ }
+ GST_SPLITMUX_UNLOCK (splitmux);
+ break;
+ }
+ case GST_MESSAGE_ASYNC_START:
+ case GST_MESSAGE_ASYNC_DONE:
+ /* Ignore state changes from our children while switching */
+ GST_SPLITMUX_LOCK (splitmux);
+ if (splitmux->switching_fragment) {
+ if (GST_MESSAGE_SRC (message) == (GstObject *) splitmux->active_sink
+ || GST_MESSAGE_SRC (message) == (GstObject *) splitmux->muxer) {
+ GST_LOG_OBJECT (splitmux,
+ "Ignoring state change from child %" GST_PTR_FORMAT
+ " while switching", GST_MESSAGE_SRC (message));
+ gst_message_unref (message);
+ GST_SPLITMUX_UNLOCK (splitmux);
+ return;
+ }
+ }
+ GST_SPLITMUX_UNLOCK (splitmux);
+ break;
+ case GST_MESSAGE_WARNING:
+ {
+ GError *gerror = NULL;
+
+ gst_message_parse_warning (message, &gerror, NULL);
+
+ if (g_error_matches (gerror, GST_STREAM_ERROR, GST_STREAM_ERROR_FORMAT)) {
+ GList *item;
+ gboolean caps_change = FALSE;
+
+ GST_SPLITMUX_LOCK (splitmux);
+
+ for (item = splitmux->contexts; item; item = item->next) {
+ MqStreamCtx *ctx = item->data;
+
+ if (ctx->caps_change) {
+ caps_change = TRUE;
+ break;
+ }
+ }
+
+ GST_SPLITMUX_UNLOCK (splitmux);
+
+ if (caps_change) {
+ GST_LOG_OBJECT (splitmux,
+ "Ignoring warning change from child %" GST_PTR_FORMAT
+ " while switching caps", GST_MESSAGE_SRC (message));
+ gst_message_unref (message);
+ return;
+ }
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ GST_BIN_CLASS (parent_class)->handle_message (bin, message);
+}
+
+static void
+ctx_set_unblock (MqStreamCtx * ctx)
+{
+ ctx->need_unblock = TRUE;
+}
+
+static gboolean
+need_new_fragment (GstSplitMuxSink * splitmux,
+ GstClockTime queued_time, GstClockTime queued_gop_time,
+ guint64 queued_bytes)
+{
+ guint64 thresh_bytes;
+ GstClockTime thresh_time;
+ gboolean check_robust_muxing;
+ GstClockTime time_to_split = GST_CLOCK_TIME_NONE;
+ GstClockTime *ptr_to_time;
+
+ GST_OBJECT_LOCK (splitmux);
+ thresh_bytes = splitmux->threshold_bytes;
+ thresh_time = splitmux->threshold_time;
+ ptr_to_time = (GstClockTime *)
+ gst_queue_array_peek_head_struct (splitmux->times_to_split);
+ if (ptr_to_time)
+ time_to_split = *ptr_to_time;
+ check_robust_muxing = splitmux->use_robust_muxing
+ && splitmux->muxer_has_reserved_props;
+ GST_OBJECT_UNLOCK (splitmux);
+
+ /* Have we muxed at least one thing from the reference
+ * stream into the file? If not, no other streams can have
+ * either */
+ if (splitmux->fragment_reference_bytes <= 0) {
+ GST_TRACE_OBJECT (splitmux,
+ "Not ready to split - nothing muxed on the reference stream");
+ return FALSE;
+ }
+
+ /* User told us to split now */
+ if (g_atomic_int_get (&(splitmux->do_split_next_gop)) == TRUE) {
+ GST_TRACE_OBJECT (splitmux, "Forcing because split_next_gop is set");
+ return TRUE;
+ }
+
+ /* User told us to split at this running time */
+ if (splitmux->gop_start_time >= time_to_split) {
+ GST_OBJECT_LOCK (splitmux);
+ /* Dequeue running time */
+ gst_queue_array_pop_head_struct (splitmux->times_to_split);
+ /* Empty any running times after this that are past now */
+ ptr_to_time = gst_queue_array_peek_head_struct (splitmux->times_to_split);
+ while (ptr_to_time) {
+ time_to_split = *ptr_to_time;
+ if (splitmux->gop_start_time < time_to_split) {
+ break;
+ }
+ gst_queue_array_pop_head_struct (splitmux->times_to_split);
+ ptr_to_time = gst_queue_array_peek_head_struct (splitmux->times_to_split);
+ }
+ GST_TRACE_OBJECT (splitmux,
+ "GOP start time %" GST_STIME_FORMAT " is after requested split point %"
+ GST_STIME_FORMAT, GST_STIME_ARGS (splitmux->gop_start_time),
+ GST_STIME_ARGS (time_to_split));
+ GST_OBJECT_UNLOCK (splitmux);
+ return TRUE;
+ }
+
+ if (thresh_bytes > 0 && queued_bytes > thresh_bytes) {
+ GST_TRACE_OBJECT (splitmux,
+ "queued bytes %" G_GUINT64_FORMAT " overruns byte limit", queued_bytes);
+ return TRUE; /* Would overrun byte limit */
+ }
+
+ if (thresh_time > 0 && queued_time > thresh_time) {
+ GST_TRACE_OBJECT (splitmux,
+ "queued time %" GST_STIME_FORMAT " overruns time limit",
+ GST_STIME_ARGS (queued_time));
+ return TRUE; /* Would overrun time limit */
+ }
+
+ if (splitmux->tc_interval &&
+ GST_CLOCK_TIME_IS_VALID (splitmux->next_fragment_start_tc_time) &&
+ splitmux->reference_ctx->in_running_time >
+ splitmux->next_fragment_start_tc_time + 5 * GST_USECOND) {
+ GST_TRACE_OBJECT (splitmux,
+ "in running time %" GST_STIME_FORMAT " overruns time limit %"
+ GST_TIME_FORMAT,
+ GST_STIME_ARGS (splitmux->reference_ctx->in_running_time),
+ GST_TIME_ARGS (splitmux->next_fragment_start_tc_time));
+ return TRUE;
+ }
+
+ if (check_robust_muxing) {
+ GstClockTime mux_reserved_remain;
+
+ g_object_get (splitmux->muxer,
+ "reserved-duration-remaining", &mux_reserved_remain, NULL);
+
+ GST_LOG_OBJECT (splitmux,
+ "Muxer robust muxing report - %" G_GUINT64_FORMAT
+ " remaining. New GOP would enqueue %" G_GUINT64_FORMAT,
+ mux_reserved_remain, queued_gop_time);
+
+ if (queued_gop_time >= mux_reserved_remain) {
+ GST_INFO_OBJECT (splitmux,
+ "File is about to run out of header room - %" G_GUINT64_FORMAT
+ " remaining. New GOP would enqueue %" G_GUINT64_FORMAT
+ ". Switching to new file", mux_reserved_remain, queued_gop_time);
+ return TRUE;
+ }
+ }
+
+ /* Continue and mux this GOP */
+ return FALSE;
+}
+
+/* probably we want to add this API? */
+static void
+video_time_code_replace (GstVideoTimeCode ** old_tc, GstVideoTimeCode * new_tc)
+{
+ GstVideoTimeCode *timecode = NULL;
+
+ g_return_if_fail (old_tc != NULL);
+
+ if (*old_tc == new_tc)
+ return;
+
+ if (new_tc)
+ timecode = gst_video_time_code_copy (new_tc);
+
+ if (*old_tc)
+ gst_video_time_code_free (*old_tc);
+
+ *old_tc = timecode;
+}
+
+/* Called with splitmux lock held */
+/* Called when entering ProcessingCompleteGop state
+ * Assess if mq contents overflowed the current file
+ * -> If yes, need to switch to new file
+ * -> if no, set max_out_running_time to let this GOP in and
+ * go to COLLECTING_GOP_START state
+ */
+static void
+handle_gathered_gop (GstSplitMuxSink * splitmux)
+{
+ guint64 queued_bytes;
+ GstClockTimeDiff queued_time = 0;
+ GstClockTimeDiff queued_gop_time = 0;
+ GstClockTimeDiff new_out_ts = splitmux->reference_ctx->in_running_time;
+ SplitMuxOutputCommand *cmd;
+
+ /* Assess if the multiqueue contents overflowed the current file */
+ /* When considering if a newly gathered GOP overflows
+ * the time limit for the file, only consider the running time of the
+ * reference stream. Other streams might have run ahead a little bit,
+ * but extra pieces won't be released to the muxer beyond the reference
+ * stream cut-off anyway - so it forms the limit. */
+ queued_bytes = splitmux->fragment_total_bytes + splitmux->gop_total_bytes;
+ queued_time = splitmux->reference_ctx->in_running_time;
+ /* queued_gop_time tracks how much unwritten data there is waiting to
+ * be written to this fragment including this GOP */
+ if (splitmux->reference_ctx->out_running_time != GST_CLOCK_STIME_NONE)
+ queued_gop_time =
+ splitmux->reference_ctx->in_running_time -
+ splitmux->reference_ctx->out_running_time;
+ else
+ queued_gop_time =
+ splitmux->reference_ctx->in_running_time - splitmux->gop_start_time;
+
+ GST_LOG_OBJECT (splitmux, " queued_bytes %" G_GUINT64_FORMAT, queued_bytes);
+ GST_LOG_OBJECT (splitmux, "mq at TS %" GST_STIME_FORMAT
+ " bytes %" G_GUINT64_FORMAT " in running time %" GST_STIME_FORMAT
+ " gop start time %" GST_STIME_FORMAT,
+ GST_STIME_ARGS (queued_time), queued_bytes,
+ GST_STIME_ARGS (splitmux->reference_ctx->in_running_time),
+ GST_STIME_ARGS (splitmux->gop_start_time));
+
+ if (queued_gop_time < 0)
+ goto error_gop_duration;
+
+ if (queued_time < splitmux->fragment_start_time)
+ goto error_queued_time;
+
+ queued_time -= splitmux->fragment_start_time;
+ if (queued_time < queued_gop_time)
+ queued_gop_time = queued_time;
+
+ /* Expand queued bytes estimate by muxer overhead */
+ queued_bytes += (queued_bytes * splitmux->mux_overhead);
+
+ /* Check for overrun - have we output at least one byte and overrun
+ * either threshold? */
+ if (need_new_fragment (splitmux, queued_time, queued_gop_time, queued_bytes)) {
+ if (splitmux->async_finalize) {
+ GstClockTime *sink_running_time = g_new (GstClockTime, 1);
+ *sink_running_time = splitmux->reference_ctx->out_running_time;
+ g_object_set_qdata_full (G_OBJECT (splitmux->sink),
+ RUNNING_TIME, sink_running_time, g_free);
+ }
+ g_atomic_int_set (&(splitmux->do_split_next_gop), FALSE);
+ /* Tell the output side to start a new fragment */
+ GST_INFO_OBJECT (splitmux,
+ "This GOP (dur %" GST_STIME_FORMAT
+ ") would overflow the fragment, Sending start_new_fragment cmd",
+ GST_STIME_ARGS (splitmux->reference_ctx->in_running_time -
+ splitmux->gop_start_time));
+ cmd = out_cmd_buf_new ();
+ cmd->start_new_fragment = TRUE;
+ g_queue_push_head (&splitmux->out_cmd_q, cmd);
+ GST_SPLITMUX_BROADCAST_OUTPUT (splitmux);
+
+ new_out_ts = splitmux->reference_ctx->in_running_time;
+ splitmux->fragment_start_time = splitmux->gop_start_time;
+ splitmux->fragment_total_bytes = 0;
+ splitmux->fragment_reference_bytes = 0;
+
+ if (splitmux->tc_interval) {
+ video_time_code_replace (&splitmux->fragment_start_tc,
+ splitmux->gop_start_tc);
+ splitmux->next_fragment_start_tc_time =
+ calculate_next_max_timecode (splitmux, splitmux->fragment_start_tc,
+ splitmux->fragment_start_time, NULL);
+ if (!GST_CLOCK_TIME_IS_VALID (splitmux->next_fragment_start_tc_time)) {
+ GST_WARNING_OBJECT (splitmux,
+ "Couldn't calculate next fragment start time for timecode mode");
+ /* shouldn't happen, but reset all and try again with next buffers */
+ gst_splitmux_reset_timecode (splitmux);
+ }
+ }
+ }
+
+ /* And set up to collect the next GOP */
+ if (!splitmux->reference_ctx->in_eos) {
+ splitmux->input_state = SPLITMUX_INPUT_STATE_COLLECTING_GOP_START;
+ splitmux->gop_start_time = new_out_ts;
+ if (splitmux->tc_interval)
+ video_time_code_replace (&splitmux->gop_start_tc, splitmux->in_tc);
+ } else {
+ /* This is probably already the current state, but just in case: */
+ splitmux->input_state = SPLITMUX_INPUT_STATE_FINISHING_UP;
+ new_out_ts = GST_CLOCK_STIME_NONE; /* EOS runs until forever */
+ }
+
+ /* And wake all input contexts to send a wake-up event */
+ g_list_foreach (splitmux->contexts, (GFunc) ctx_set_unblock, NULL);
+ GST_SPLITMUX_BROADCAST_INPUT (splitmux);
+
+ /* Now either way - either there was no overflow, or we requested a new fragment: release this GOP */
+ splitmux->fragment_total_bytes += splitmux->gop_total_bytes;
+ splitmux->fragment_reference_bytes += splitmux->gop_reference_bytes;
+
+ if (splitmux->gop_total_bytes > 0) {
+ GST_LOG_OBJECT (splitmux,
+ "Releasing GOP to output. Bytes in fragment now %" G_GUINT64_FORMAT
+ " time %" GST_STIME_FORMAT,
+ splitmux->fragment_total_bytes, GST_STIME_ARGS (queued_time));
+
+ /* Send this GOP to the output command queue */
+ cmd = out_cmd_buf_new ();
+ cmd->start_new_fragment = FALSE;
+ cmd->max_output_ts = new_out_ts;
+ GST_LOG_OBJECT (splitmux, "Sending GOP cmd to output for TS %"
+ GST_STIME_FORMAT, GST_STIME_ARGS (new_out_ts));
+ g_queue_push_head (&splitmux->out_cmd_q, cmd);
+
+ GST_SPLITMUX_BROADCAST_OUTPUT (splitmux);
+ }
+
+ splitmux->gop_total_bytes = 0;
+ splitmux->gop_reference_bytes = 0;
+ return;
+
+error_gop_duration:
+ GST_ELEMENT_ERROR (splitmux,
+ STREAM, FAILED, ("Timestamping error on input streams"),
+ ("Queued GOP time is negative %" GST_STIME_FORMAT,
+ GST_STIME_ARGS (queued_gop_time)));
+ return;
+error_queued_time:
+ GST_ELEMENT_ERROR (splitmux,
+ STREAM, FAILED, ("Timestamping error on input streams"),
+ ("Queued time is negative. Input went backwards. queued_time - %"
+ GST_STIME_FORMAT, GST_STIME_ARGS (queued_time)));
+ return;
+}
+
+/* Called with splitmux lock held */
+/* Called from each input pad when it is has all the pieces
+ * for a GOP or EOS, starting with the reference pad which has set the
+ * splitmux->max_in_running_time
+ */
+static void
+check_completed_gop (GstSplitMuxSink * splitmux, MqStreamCtx * ctx)
+{
+ GList *cur;
+ GstEvent *event;
+
+ /* On ENDING_FILE, the reference stream sends a command to start a new
+ * fragment, then releases the GOP for output in the new fragment.
+ * If some streams received no buffer during the last GOP that overran,
+ * because its next buffer has a timestamp bigger than
+ * ctx->max_in_running_time, its queue is empty. In that case the only
+ * way to wakeup the output thread is by injecting an event in the
+ * queue. This usually happen with subtitle streams.
+ * See https://bugzilla.gnome.org/show_bug.cgi?id=763711. */
+ if (ctx->need_unblock) {
+ GST_LOG_OBJECT (ctx->sinkpad, "Sending splitmuxsink-unblock event");
+ event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM |
+ GST_EVENT_TYPE_SERIALIZED,
+ gst_structure_new ("splitmuxsink-unblock", "timestamp",
+ G_TYPE_INT64, splitmux->max_in_running_time, NULL));
+
+ GST_SPLITMUX_UNLOCK (splitmux);
+ gst_pad_send_event (ctx->sinkpad, event);
+ GST_SPLITMUX_LOCK (splitmux);
+
+ ctx->need_unblock = FALSE;
+ GST_SPLITMUX_BROADCAST_INPUT (splitmux);
+ /* state may have changed while we were unlocked. Loop again if so */
+ if (splitmux->input_state != SPLITMUX_INPUT_STATE_WAITING_GOP_COLLECT)
+ return;
+ }
+
+ do {
+ if (splitmux->input_state == SPLITMUX_INPUT_STATE_WAITING_GOP_COLLECT) {
+ gboolean ready = TRUE;
+
+ /* Iterate each pad, and check that the input running time is at least
+ * up to the reference running time, and if so handle the collected GOP */
+ GST_LOG_OBJECT (splitmux, "Checking GOP collected, Max in running time %"
+ GST_STIME_FORMAT " ctx %p",
+ GST_STIME_ARGS (splitmux->max_in_running_time), ctx);
+ for (cur = g_list_first (splitmux->contexts); cur != NULL;
+ cur = g_list_next (cur)) {
+ MqStreamCtx *tmpctx = (MqStreamCtx *) (cur->data);
+
+ GST_LOG_OBJECT (splitmux,
+ "Context %p sink pad %" GST_PTR_FORMAT " @ TS %" GST_STIME_FORMAT
+ " EOS %d", tmpctx, tmpctx->sinkpad,
+ GST_STIME_ARGS (tmpctx->in_running_time), tmpctx->in_eos);
+
+ if (splitmux->max_in_running_time != GST_CLOCK_STIME_NONE &&
+ tmpctx->in_running_time < splitmux->max_in_running_time &&
+ !tmpctx->in_eos) {
+ GST_LOG_OBJECT (splitmux,
+ "Context %p sink pad %" GST_PTR_FORMAT " not ready. We'll sleep",
+ tmpctx, tmpctx->sinkpad);
+ ready = FALSE;
+ break;
+ }
+ }
+ if (ready) {
+ GST_DEBUG_OBJECT (splitmux,
+ "Collected GOP is complete. Processing (ctx %p)", ctx);
+ /* All pads have a complete GOP, release it into the multiqueue */
+ handle_gathered_gop (splitmux);
+
+ /* The user has requested a split, we can split now that the previous GOP
+ * has been collected to the correct location */
+ if (g_atomic_int_compare_and_exchange (&(splitmux->split_requested),
+ TRUE, FALSE)) {
+ g_atomic_int_set (&(splitmux->do_split_next_gop), TRUE);
+ }
+ }
+ }
+
+ /* If upstream reached EOS we are not expecting more data, no need to wait
+ * here. */
+ if (ctx->in_eos)
+ return;
+
+ if (splitmux->input_state == SPLITMUX_INPUT_STATE_WAITING_GOP_COLLECT &&
+ !ctx->flushing &&
+ (ctx->in_running_time >= splitmux->max_in_running_time) &&
+ (splitmux->max_in_running_time != GST_CLOCK_STIME_NONE)) {
+ /* Some pad is not yet ready, or GOP is being pushed
+ * either way, sleep and wait to get woken */
+ GST_LOG_OBJECT (splitmux, "Sleeping for GOP collection (ctx %p)", ctx);
+ GST_SPLITMUX_WAIT_INPUT (splitmux);
+ GST_LOG_OBJECT (splitmux, "Done waiting for complete GOP (ctx %p)", ctx);
+ } else {
+ /* This pad is not ready or the state changed - break out and get another
+ * buffer / event */
+ break;
+ }
+ } while (splitmux->input_state == SPLITMUX_INPUT_STATE_WAITING_GOP_COLLECT);
+}
+
+static GstPadProbeReturn
+handle_mq_input (GstPad * pad, GstPadProbeInfo * info, MqStreamCtx * ctx)
+{
+ GstSplitMuxSink *splitmux = ctx->splitmux;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBuffer *buf;
+ MqStreamBuf *buf_info = NULL;
+ GstClockTime ts;
+ gboolean loop_again;
+ gboolean keyframe = FALSE;
+
+ GST_LOG_OBJECT (pad, "Fired probe type 0x%x", info->type);
+
+ /* FIXME: Handle buffer lists, until then make it clear they won't work */
+ if (info->type & GST_PAD_PROBE_TYPE_BUFFER_LIST) {
+ g_warning ("Buffer list handling not implemented");
+ return GST_PAD_PROBE_DROP;
+ }
+ if (info->type & GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM ||
+ info->type & GST_PAD_PROBE_TYPE_EVENT_FLUSH) {
+ GstEvent *event = gst_pad_probe_info_get_event (info);
+
+ GST_LOG_OBJECT (pad, "Event %" GST_PTR_FORMAT, event);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:
+ gst_event_copy_segment (event, &ctx->in_segment);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ GST_SPLITMUX_LOCK (splitmux);
+ gst_segment_init (&ctx->in_segment, GST_FORMAT_UNDEFINED);
+ ctx->in_eos = FALSE;
+ ctx->in_running_time = GST_CLOCK_STIME_NONE;
+ GST_SPLITMUX_UNLOCK (splitmux);
+ break;
+ case GST_EVENT_EOS:
+ GST_SPLITMUX_LOCK (splitmux);
+ ctx->in_eos = TRUE;
+
+ if (splitmux->input_state == SPLITMUX_INPUT_STATE_STOPPED) {
+ ret = GST_FLOW_FLUSHING;
+ goto beach;
+ }
+
+ if (ctx->is_reference) {
+ GST_INFO_OBJECT (splitmux, "Got Reference EOS. Finishing up");
+ /* check_completed_gop will act as if this is a new keyframe with infinite timestamp */
+ splitmux->input_state = SPLITMUX_INPUT_STATE_WAITING_GOP_COLLECT;
+ /* Wake up other input pads to collect this GOP */
+ GST_SPLITMUX_BROADCAST_INPUT (splitmux);
+ check_completed_gop (splitmux, ctx);
+ } else if (splitmux->input_state ==
+ SPLITMUX_INPUT_STATE_WAITING_GOP_COLLECT) {
+ /* If we are waiting for a GOP to be completed (ie, for aux
+ * pads to catch up), then this pad is complete, so check
+ * if the whole GOP is.
+ */
+ check_completed_gop (splitmux, ctx);
+ }
+ GST_SPLITMUX_UNLOCK (splitmux);
+ break;
+ case GST_EVENT_GAP:{
+ GstClockTime gap_ts;
+ GstClockTimeDiff rtime;
+
+ gst_event_parse_gap (event, &gap_ts, NULL);
+ if (gap_ts == GST_CLOCK_TIME_NONE)
+ break;
+
+ GST_SPLITMUX_LOCK (splitmux);
+
+ if (splitmux->input_state == SPLITMUX_INPUT_STATE_STOPPED) {
+ ret = GST_FLOW_FLUSHING;
+ goto beach;
+ }
+ rtime = my_segment_to_running_time (&ctx->in_segment, gap_ts);
+
+ GST_LOG_OBJECT (pad, "Have GAP w/ ts %" GST_STIME_FORMAT,
+ GST_STIME_ARGS (rtime));
+
+ if (ctx->is_reference
+ && splitmux->fragment_start_time == GST_CLOCK_STIME_NONE) {
+ splitmux->gop_start_time = splitmux->fragment_start_time = rtime;
+ GST_LOG_OBJECT (splitmux, "Mux start time now %" GST_STIME_FORMAT,
+ GST_STIME_ARGS (splitmux->fragment_start_time));
+ /* Also take this as the first start time when starting up,
+ * so that we start counting overflow from the first frame */
+ if (!GST_CLOCK_STIME_IS_VALID (splitmux->max_in_running_time))
+ splitmux->max_in_running_time = splitmux->fragment_start_time;
+ }
+
+ GST_SPLITMUX_UNLOCK (splitmux);
+ break;
+ }
+ default:
+ break;
+ }
+ return GST_PAD_PROBE_PASS;
+ } else if (info->type & GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM) {
+ switch (GST_QUERY_TYPE (GST_QUERY (info->data))) {
+ case GST_QUERY_ALLOCATION:
+ return GST_PAD_PROBE_DROP;
+ default:
+ return GST_PAD_PROBE_PASS;
+ }
+ }
+
+ buf = gst_pad_probe_info_get_buffer (info);
+ buf_info = mq_stream_buf_new ();
+
+ if (GST_BUFFER_PTS_IS_VALID (buf))
+ ts = GST_BUFFER_PTS (buf);
+ else
+ ts = GST_BUFFER_DTS (buf);
+
+ GST_LOG_OBJECT (pad, "Buffer TS is %" GST_TIME_FORMAT, GST_TIME_ARGS (ts));
+
+ GST_SPLITMUX_LOCK (splitmux);
+
+ if (splitmux->input_state == SPLITMUX_INPUT_STATE_STOPPED) {
+ ret = GST_FLOW_FLUSHING;
+ goto beach;
+ }
+
+ /* If this buffer has a timestamp, advance the input timestamp of the
+ * stream */
+ if (GST_CLOCK_TIME_IS_VALID (ts)) {
+ GstClockTimeDiff running_time =
+ my_segment_to_running_time (&ctx->in_segment, ts);
+
+ GST_LOG_OBJECT (pad, "Buffer running TS is %" GST_STIME_FORMAT,
+ GST_STIME_ARGS (running_time));
+
+ if (GST_CLOCK_STIME_IS_VALID (running_time)
+ && running_time > ctx->in_running_time)
+ ctx->in_running_time = running_time;
+ }
+
+ /* Try to make sure we have a valid running time */
+ if (!GST_CLOCK_STIME_IS_VALID (ctx->in_running_time)) {
+ ctx->in_running_time =
+ my_segment_to_running_time (&ctx->in_segment, ctx->in_segment.start);
+ }
+
+ GST_LOG_OBJECT (pad, "in running time now %" GST_STIME_FORMAT,
+ GST_STIME_ARGS (ctx->in_running_time));
+
+ buf_info->run_ts = ctx->in_running_time;
+ buf_info->buf_size = gst_buffer_get_size (buf);
+ buf_info->duration = GST_BUFFER_DURATION (buf);
+
+ if (ctx->is_reference) {
+ /* initialize fragment_start_time */
+ if (splitmux->fragment_start_time == GST_CLOCK_STIME_NONE) {
+ splitmux->gop_start_time = splitmux->fragment_start_time =
+ buf_info->run_ts;
+ GST_LOG_OBJECT (splitmux, "Mux start time now %" GST_STIME_FORMAT,
+ GST_STIME_ARGS (splitmux->fragment_start_time));
+
+ /* Also take this as the first start time when starting up,
+ * so that we start counting overflow from the first frame */
+ if (!GST_CLOCK_STIME_IS_VALID (splitmux->max_in_running_time))
+ splitmux->max_in_running_time = splitmux->fragment_start_time;
+ }
+
+ if (splitmux->tc_interval) {
+ GstVideoTimeCodeMeta *tc_meta = gst_buffer_get_video_time_code_meta (buf);
+ if (tc_meta) {
+ video_time_code_replace (&splitmux->in_tc, &tc_meta->tc);
+
+ if (!splitmux->fragment_start_tc) {
+ /* also initialize fragment_start_tc */
+ video_time_code_replace (&splitmux->gop_start_tc, &tc_meta->tc);
+ video_time_code_replace (&splitmux->fragment_start_tc, &tc_meta->tc);
+
+ splitmux->next_fragment_start_tc_time =
+ calculate_next_max_timecode (splitmux, splitmux->in_tc,
+ ctx->in_running_time, NULL);
+ GST_DEBUG_OBJECT (splitmux, "Initialize next fragment start tc time %"
+ GST_TIME_FORMAT,
+ GST_TIME_ARGS (splitmux->next_fragment_start_tc_time));
+ }
+ }
+ }
+
+ /* Check whether we need to request next keyframe depending on
+ * current running time */
+ if (!GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT) &&
+ request_next_keyframe (splitmux, buf, ctx->in_running_time) == FALSE) {
+ GST_WARNING_OBJECT (splitmux,
+ "Could not request a keyframe. Files may not split at the exact location they should");
+ }
+ }
+
+ GST_DEBUG_OBJECT (pad, "Buf TS %" GST_STIME_FORMAT
+ " total GOP bytes %" G_GUINT64_FORMAT,
+ GST_STIME_ARGS (buf_info->run_ts), splitmux->gop_total_bytes);
+
+ loop_again = TRUE;
+ do {
+ if (ctx->flushing)
+ break;
+
+ switch (splitmux->input_state) {
+ case SPLITMUX_INPUT_STATE_COLLECTING_GOP_START:
+ if (ctx->is_releasing) {
+ /* The pad belonging to this context is being released */
+ GST_WARNING_OBJECT (pad, "Pad is being released while the muxer is "
+ "running. Data might not drain correctly");
+ loop_again = FALSE;
+ } else if (ctx->is_reference) {
+ /* This is the reference context. If it's a keyframe,
+ * it marks the start of a new GOP and we should wait in
+ * check_completed_gop before continuing, but either way
+ * (keyframe or no, we'll pass this buffer through after
+ * so set loop_again to FALSE */
+ loop_again = FALSE;
+
+ if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT)) {
+ /* Allow other input pads to catch up to here too */
+ splitmux->max_in_running_time = ctx->in_running_time;
+ GST_LOG_OBJECT (splitmux,
+ "Max in running time now %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (splitmux->max_in_running_time));
+ GST_SPLITMUX_BROADCAST_INPUT (splitmux);
+ break;
+ }
+ GST_INFO_OBJECT (pad,
+ "Have keyframe with running time %" GST_STIME_FORMAT,
+ GST_STIME_ARGS (ctx->in_running_time));
+ keyframe = TRUE;
+ splitmux->input_state = SPLITMUX_INPUT_STATE_WAITING_GOP_COLLECT;
+ splitmux->max_in_running_time = ctx->in_running_time;
+ GST_LOG_OBJECT (splitmux, "Max in running time now %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (splitmux->max_in_running_time));
+ /* Wake up other input pads to collect this GOP */
+ GST_SPLITMUX_BROADCAST_INPUT (splitmux);
+ check_completed_gop (splitmux, ctx);
+ } else {
+ /* Pass this buffer if the reference ctx is far enough ahead */
+ if (ctx->in_running_time < splitmux->max_in_running_time) {
+ loop_again = FALSE;
+ break;
+ }
+
+ /* We're still waiting for a keyframe on the reference pad, sleep */
+ GST_LOG_OBJECT (pad, "Sleeping for GOP start");
+ GST_SPLITMUX_WAIT_INPUT (splitmux);
+ GST_LOG_OBJECT (pad,
+ "Done sleeping for GOP start input state now %d",
+ splitmux->input_state);
+ }
+ break;
+ case SPLITMUX_INPUT_STATE_WAITING_GOP_COLLECT:{
+ /* We're collecting a GOP. If this is the reference context,
+ * we need to check if this is a keyframe that marks the start
+ * of the next GOP. If it is, it marks the end of the GOP we're
+ * collecting, so sleep and wait until all the other pads also
+ * reach that timestamp - at which point, we have an entire GOP
+ * and either go to ENDING_FILE or release this GOP to the muxer and
+ * go back to COLLECT_GOP_START. */
+
+ /* If we overran the target timestamp, it might be time to process
+ * the GOP, otherwise bail out for more data
+ */
+ GST_LOG_OBJECT (pad,
+ "Checking TS %" GST_STIME_FORMAT " against max %"
+ GST_STIME_FORMAT, GST_STIME_ARGS (ctx->in_running_time),
+ GST_STIME_ARGS (splitmux->max_in_running_time));
+
+ if (ctx->in_running_time < splitmux->max_in_running_time) {
+ loop_again = FALSE;
+ break;
+ }
+
+ GST_LOG_OBJECT (pad,
+ "Collected last packet of GOP. Checking other pads");
+ check_completed_gop (splitmux, ctx);
+ break;
+ }
+ case SPLITMUX_INPUT_STATE_FINISHING_UP:
+ loop_again = FALSE;
+ break;
+ default:
+ loop_again = FALSE;
+ break;
+ }
+ }
+ while (loop_again);
+
+ if (keyframe) {
+ splitmux->queued_keyframes++;
+ buf_info->keyframe = TRUE;
+ }
+
+ /* Update total input byte counter for overflow detect */
+ splitmux->gop_total_bytes += buf_info->buf_size;
+ if (ctx->is_reference) {
+ splitmux->gop_reference_bytes += buf_info->buf_size;
+ }
+
+ /* Now add this buffer to the queue just before returning */
+ g_queue_push_head (&ctx->queued_bufs, buf_info);
+
+ GST_LOG_OBJECT (pad, "Returning to queue buffer %" GST_PTR_FORMAT
+ " run ts %" GST_STIME_FORMAT, buf, GST_STIME_ARGS (ctx->in_running_time));
+
+ GST_SPLITMUX_UNLOCK (splitmux);
+ GST_PAD_PROBE_INFO_FLOW_RETURN (info) = ret;
+ return GST_PAD_PROBE_PASS;
+
+beach:
+ GST_SPLITMUX_UNLOCK (splitmux);
+ if (buf_info)
+ mq_stream_buf_free (buf_info);
+ GST_PAD_PROBE_INFO_FLOW_RETURN (info) = ret;
+ return GST_PAD_PROBE_PASS;
+}
+
+static void
+grow_blocked_queues (GstSplitMuxSink * splitmux)
+{
+ GList *cur;
+
+ /* Scan other queues for full-ness and grow them */
+ for (cur = g_list_first (splitmux->contexts);
+ cur != NULL; cur = g_list_next (cur)) {
+ MqStreamCtx *tmpctx = (MqStreamCtx *) (cur->data);
+ guint cur_limit;
+ guint cur_len = g_queue_get_length (&tmpctx->queued_bufs);
+
+ g_object_get (tmpctx->q, "max-size-buffers", &cur_limit, NULL);
+ GST_LOG_OBJECT (tmpctx->q, "Queue len %u", cur_len);
+
+ if (cur_len >= cur_limit) {
+ cur_limit = cur_len + 1;
+ GST_DEBUG_OBJECT (tmpctx->q,
+ "Queue overflowed and needs enlarging. Growing to %u buffers",
+ cur_limit);
+ g_object_set (tmpctx->q, "max-size-buffers", cur_limit, NULL);
+ }
+ }
+}
+
+static void
+handle_q_underrun (GstElement * q, gpointer user_data)
+{
+ MqStreamCtx *ctx = (MqStreamCtx *) (user_data);
+ GstSplitMuxSink *splitmux = ctx->splitmux;
+
+ GST_SPLITMUX_LOCK (splitmux);
+ GST_DEBUG_OBJECT (q,
+ "Queue reported underrun with %d keyframes and %d cmds enqueued",
+ splitmux->queued_keyframes, g_queue_get_length (&splitmux->out_cmd_q));
+ grow_blocked_queues (splitmux);
+ GST_SPLITMUX_UNLOCK (splitmux);
+}
+
+static void
+handle_q_overrun (GstElement * q, gpointer user_data)
+{
+ MqStreamCtx *ctx = (MqStreamCtx *) (user_data);
+ GstSplitMuxSink *splitmux = ctx->splitmux;
+ gboolean allow_grow = FALSE;
+
+ GST_SPLITMUX_LOCK (splitmux);
+ GST_DEBUG_OBJECT (q,
+ "Queue reported overrun with %d keyframes and %d cmds enqueued",
+ splitmux->queued_keyframes, g_queue_get_length (&splitmux->out_cmd_q));
+
+ if (splitmux->queued_keyframes < 2) {
+ /* Less than a full GOP queued, grow the queue */
+ allow_grow = TRUE;
+ } else if (g_queue_get_length (&splitmux->out_cmd_q) < 1) {
+ allow_grow = TRUE;
+ } else {
+ /* If another queue is starved, grow */
+ GList *cur;
+ for (cur = g_list_first (splitmux->contexts);
+ cur != NULL; cur = g_list_next (cur)) {
+ MqStreamCtx *tmpctx = (MqStreamCtx *) (cur->data);
+ if (tmpctx != ctx && g_queue_get_length (&tmpctx->queued_bufs) < 1) {
+ allow_grow = TRUE;
+ }
+ }
+ }
+ GST_SPLITMUX_UNLOCK (splitmux);
+
+ if (allow_grow) {
+ guint cur_limit;
+
+ g_object_get (q, "max-size-buffers", &cur_limit, NULL);
+ cur_limit++;
+
+ GST_DEBUG_OBJECT (q,
+ "Queue overflowed and needs enlarging. Growing to %u buffers",
+ cur_limit);
+
+ g_object_set (q, "max-size-buffers", cur_limit, NULL);
+ }
+}
+
+/* Called with SPLITMUX lock held */
+static const gchar *
+lookup_muxer_pad (GstSplitMuxSink * splitmux, const gchar * sinkpad_name)
+{
+ const gchar *ret = NULL;
+
+ if (splitmux->muxerpad_map == NULL)
+ return NULL;
+
+ if (sinkpad_name == NULL) {
+ GST_WARNING_OBJECT (splitmux,
+ "Can't look up request pad in pad map without providing a pad name");
+ return NULL;
+ }
+
+ ret = gst_structure_get_string (splitmux->muxerpad_map, sinkpad_name);
+ if (ret) {
+ GST_INFO_OBJECT (splitmux, "Sink pad %s maps to muxer pad %s", sinkpad_name,
+ ret);
+ return g_strdup (ret);
+ }
+
+ return NULL;
+}
+
+static GstPad *
+gst_splitmux_sink_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps)
+{
+ GstSplitMuxSink *splitmux = (GstSplitMuxSink *) element;
+ GstPadTemplate *mux_template = NULL;
+ GstPad *ret = NULL, *muxpad = NULL;
+ GstElement *q;
+ GstPad *q_sink = NULL, *q_src = NULL;
+ gchar *gname, *qname;
+ gboolean is_primary_video = FALSE, is_video = FALSE,
+ muxer_is_requestpad = FALSE;
+ MqStreamCtx *ctx;
+ const gchar *muxer_padname = NULL;
+
+ GST_DEBUG_OBJECT (splitmux, "templ:%s, name:%s", templ->name_template, name);
+
+ GST_SPLITMUX_LOCK (splitmux);
+ if (!create_muxer (splitmux))
+ goto fail;
+ g_signal_emit (splitmux, signals[SIGNAL_MUXER_ADDED], 0, splitmux->muxer);
+
+ if (g_str_equal (templ->name_template, "video") ||
+ g_str_has_prefix (templ->name_template, "video_aux_")) {
+ is_primary_video = g_str_equal (templ->name_template, "video");
+ if (is_primary_video && splitmux->have_video)
+ goto already_have_video;
+ is_video = TRUE;
+ }
+
+ /* See if there's a pad map and it lists this pad */
+ muxer_padname = lookup_muxer_pad (splitmux, name);
+
+ if (muxer_padname == NULL) {
+ if (is_video) {
+ /* FIXME: Look for a pad template with matching caps, rather than by name */
+ GST_DEBUG_OBJECT (element,
+ "searching for pad-template with name 'video_%%u'");
+ mux_template =
+ gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS
+ (splitmux->muxer), "video_%u");
+
+ /* Fallback to find sink pad templates named 'video' (flvmux) */
+ if (!mux_template) {
+ GST_DEBUG_OBJECT (element,
+ "searching for pad-template with name 'video'");
+ mux_template =
+ gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS
+ (splitmux->muxer), "video");
+ }
+ name = NULL;
+ } else {
+ GST_DEBUG_OBJECT (element, "searching for pad-template with name '%s'",
+ templ->name_template);
+ mux_template =
+ gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS
+ (splitmux->muxer), templ->name_template);
+
+ /* Fallback to find sink pad templates named 'audio' (flvmux) */
+ if (!mux_template && g_str_has_prefix (templ->name_template, "audio_")) {
+ GST_DEBUG_OBJECT (element,
+ "searching for pad-template with name 'audio'");
+ mux_template =
+ gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS
+ (splitmux->muxer), "audio");
+ name = NULL;
+ }
+ }
+
+ if (mux_template == NULL) {
+ GST_DEBUG_OBJECT (element,
+ "searching for pad-template with name 'sink_%%d'");
+ mux_template =
+ gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS
+ (splitmux->muxer), "sink_%d");
+ name = NULL;
+ }
+ if (mux_template == NULL) {
+ GST_DEBUG_OBJECT (element, "searching for pad-template with name 'sink'");
+ mux_template =
+ gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS
+ (splitmux->muxer), "sink");
+ name = NULL;
+ }
+
+ if (mux_template == NULL) {
+ GST_ERROR_OBJECT (element,
+ "unable to find a suitable sink pad-template on the muxer");
+ goto fail;
+ }
+ GST_DEBUG_OBJECT (element, "found sink pad-template '%s' on the muxer",
+ mux_template->name_template);
+
+ if (mux_template->presence == GST_PAD_REQUEST) {
+ GST_DEBUG_OBJECT (element, "requesting pad from pad-template");
+
+ muxpad =
+ gst_element_request_pad (splitmux->muxer, mux_template, name, caps);
+ muxer_is_requestpad = TRUE;
+ } else if (mux_template->presence == GST_PAD_ALWAYS) {
+ GST_DEBUG_OBJECT (element, "accessing always pad from pad-template");
+
+ muxpad =
+ gst_element_get_static_pad (splitmux->muxer,
+ mux_template->name_template);
+ } else {
+ GST_ERROR_OBJECT (element,
+ "unexpected pad presence %d", mux_template->presence);
+ goto fail;
+ }
+ } else {
+ /* Have a muxer pad name */
+ if (!(muxpad = gst_element_get_static_pad (splitmux->muxer, muxer_padname))) {
+ if ((muxpad =
+ gst_element_request_pad_simple (splitmux->muxer, muxer_padname)))
+ muxer_is_requestpad = TRUE;
+ }
+ g_free ((gchar *) muxer_padname);
+ muxer_padname = NULL;
+ }
+
+ /* One way or another, we must have a muxer pad by now */
+ if (muxpad == NULL)
+ goto fail;
+
+ if (is_primary_video)
+ gname = g_strdup ("video");
+ else if (name == NULL)
+ gname = gst_pad_get_name (muxpad);
+ else
+ gname = g_strdup (name);
+
+ qname = g_strdup_printf ("queue_%s", gname);
+ if ((q = create_element (splitmux, "queue", qname, FALSE)) == NULL) {
+ g_free (qname);
+ goto fail;
+ }
+ g_free (qname);
+
+ gst_element_set_state (q, GST_STATE_TARGET (splitmux));
+
+ g_object_set (q, "max-size-bytes", 0, "max-size-time", (guint64) (0),
+ "max-size-buffers", 5, NULL);
+
+ q_sink = gst_element_get_static_pad (q, "sink");
+ q_src = gst_element_get_static_pad (q, "src");
+
+ if (gst_pad_link (q_src, muxpad) != GST_PAD_LINK_OK) {
+ if (muxer_is_requestpad)
+ gst_element_release_request_pad (splitmux->muxer, muxpad);
+ gst_object_unref (GST_OBJECT (muxpad));
+ goto fail;
+ }
+
+ gst_object_unref (GST_OBJECT (muxpad));
+
+ ctx = mq_stream_ctx_new (splitmux);
+ /* Context holds a ref: */
+ ctx->q = gst_object_ref (q);
+ ctx->srcpad = q_src;
+ ctx->sinkpad = q_sink;
+ ctx->q_overrun_id =
+ g_signal_connect (q, "overrun", (GCallback) handle_q_overrun, ctx);
+ g_signal_connect (q, "underrun", (GCallback) handle_q_underrun, ctx);
+
+ ctx->src_pad_block_id =
+ gst_pad_add_probe (q_src,
+ GST_PAD_PROBE_TYPE_DATA_DOWNSTREAM | GST_PAD_PROBE_TYPE_EVENT_FLUSH,
+ (GstPadProbeCallback) handle_mq_output, ctx, NULL);
+ if (is_primary_video && splitmux->reference_ctx != NULL) {
+ splitmux->reference_ctx->is_reference = FALSE;
+ splitmux->reference_ctx = NULL;
+ }
+ if (splitmux->reference_ctx == NULL) {
+ splitmux->reference_ctx = ctx;
+ ctx->is_reference = TRUE;
+ }
+
+ ret = gst_ghost_pad_new_from_template (gname, q_sink, templ);
+ g_object_set_qdata ((GObject *) (ret), PAD_CONTEXT, ctx);
+
+ ctx->sink_pad_block_id =
+ gst_pad_add_probe (q_sink,
+ GST_PAD_PROBE_TYPE_DATA_DOWNSTREAM | GST_PAD_PROBE_TYPE_EVENT_FLUSH |
+ GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM,
+ (GstPadProbeCallback) handle_mq_input, ctx, NULL);
+
+ GST_DEBUG_OBJECT (splitmux, "splitmuxsink pad %" GST_PTR_FORMAT
+ " feeds queue pad %" GST_PTR_FORMAT, ret, q_sink);
+
+ splitmux->contexts = g_list_append (splitmux->contexts, ctx);
+
+ g_free (gname);
+
+ if (is_primary_video)
+ splitmux->have_video = TRUE;
+
+ gst_pad_set_active (ret, TRUE);
+ gst_element_add_pad (GST_ELEMENT (splitmux), ret);
+
+ GST_SPLITMUX_UNLOCK (splitmux);
+
+ return ret;
+fail:
+ GST_SPLITMUX_UNLOCK (splitmux);
+
+ if (q_sink)
+ gst_object_unref (q_sink);
+ if (q_src)
+ gst_object_unref (q_src);
+ return NULL;
+already_have_video:
+ GST_DEBUG_OBJECT (splitmux, "video sink pad already requested");
+ GST_SPLITMUX_UNLOCK (splitmux);
+ return NULL;
+}
+
+static void
+gst_splitmux_sink_release_pad (GstElement * element, GstPad * pad)
+{
+ GstSplitMuxSink *splitmux = (GstSplitMuxSink *) element;
+ GstPad *muxpad = NULL;
+ MqStreamCtx *ctx =
+ (MqStreamCtx *) (g_object_get_qdata ((GObject *) (pad), PAD_CONTEXT));
+
+ GST_SPLITMUX_LOCK (splitmux);
+
+ if (splitmux->muxer == NULL)
+ goto fail; /* Elements don't exist yet - nothing to release */
+
+ GST_INFO_OBJECT (pad, "releasing request pad");
+
+ muxpad = gst_pad_get_peer (ctx->srcpad);
+
+ /* Remove the context from our consideration */
+ splitmux->contexts = g_list_remove (splitmux->contexts, ctx);
+
+ GST_SPLITMUX_UNLOCK (splitmux);
+
+ if (ctx->sink_pad_block_id) {
+ gst_pad_remove_probe (ctx->sinkpad, ctx->sink_pad_block_id);
+ gst_pad_send_event (ctx->sinkpad, gst_event_new_flush_start ());
+ }
+
+ if (ctx->src_pad_block_id)
+ gst_pad_remove_probe (ctx->srcpad, ctx->src_pad_block_id);
+
+ GST_SPLITMUX_LOCK (splitmux);
+
+ ctx->is_releasing = TRUE;
+ GST_SPLITMUX_BROADCAST_INPUT (splitmux);
+
+ /* Can release the context now */
+ mq_stream_ctx_free (ctx);
+ if (ctx == splitmux->reference_ctx)
+ splitmux->reference_ctx = NULL;
+
+ /* Release and free the muxer input */
+ if (muxpad) {
+ gst_element_release_request_pad (splitmux->muxer, muxpad);
+ gst_object_unref (muxpad);
+ }
+
+ if (GST_PAD_PAD_TEMPLATE (pad) &&
+ g_str_equal (GST_PAD_TEMPLATE_NAME_TEMPLATE (GST_PAD_PAD_TEMPLATE
+ (pad)), "video"))
+ splitmux->have_video = FALSE;
+
+ gst_element_remove_pad (element, pad);
+
+ /* Reset the internal elements only after all request pads are released */
+ if (splitmux->contexts == NULL)
+ gst_splitmux_reset_elements (splitmux);
+
+ /* Wake up other input streams to check if the completion conditions have
+ * changed */
+ GST_SPLITMUX_BROADCAST_INPUT (splitmux);
+
+fail:
+ GST_SPLITMUX_UNLOCK (splitmux);
+}
+
+static GstElement *
+create_element (GstSplitMuxSink * splitmux,
+ const gchar * factory, const gchar * name, gboolean locked)
+{
+ GstElement *ret = gst_element_factory_make (factory, name);
+ if (ret == NULL) {
+ g_warning ("Failed to create %s - splitmuxsink will not work", name);
+ return NULL;
+ }
+
+ if (locked) {
+ /* Ensure the sink starts in locked state and NULL - it will be changed
+ * by the filename setting code */
+ gst_element_set_locked_state (ret, TRUE);
+ gst_element_set_state (ret, GST_STATE_NULL);
+ }
+
+ if (!gst_bin_add (GST_BIN (splitmux), ret)) {
+ g_warning ("Could not add %s element - splitmuxsink will not work", name);
+ gst_object_unref (ret);
+ return NULL;
+ }
+
+ return ret;
+}
+
+static gboolean
+create_muxer (GstSplitMuxSink * splitmux)
+{
+ /* Create internal elements */
+ if (splitmux->muxer == NULL) {
+ GstElement *provided_muxer = NULL;
+
+ GST_OBJECT_LOCK (splitmux);
+ if (splitmux->provided_muxer != NULL)
+ provided_muxer = gst_object_ref (splitmux->provided_muxer);
+ GST_OBJECT_UNLOCK (splitmux);
+
+ if ((!splitmux->async_finalize && provided_muxer == NULL) ||
+ (splitmux->async_finalize && splitmux->muxer_factory == NULL)) {
+ if ((splitmux->muxer =
+ create_element (splitmux,
+ splitmux->muxer_factory ? splitmux->
+ muxer_factory : DEFAULT_MUXER, "muxer", FALSE)) == NULL)
+ goto fail;
+ } else if (splitmux->async_finalize) {
+ if ((splitmux->muxer =
+ create_element (splitmux, splitmux->muxer_factory, "muxer",
+ FALSE)) == NULL)
+ goto fail;
+ if (splitmux->muxer_preset && GST_IS_PRESET (splitmux->muxer))
+ gst_preset_load_preset (GST_PRESET (splitmux->muxer),
+ splitmux->muxer_preset);
+ if (splitmux->muxer_properties)
+ gst_structure_foreach (splitmux->muxer_properties,
+ _set_property_from_structure, splitmux->muxer);
+ } else {
+ /* Ensure it's not in locked state (we might be reusing an old element) */
+ gst_element_set_locked_state (provided_muxer, FALSE);
+ if (!gst_bin_add (GST_BIN (splitmux), provided_muxer)) {
+ g_warning ("Could not add muxer element - splitmuxsink will not work");
+ gst_object_unref (provided_muxer);
+ goto fail;
+ }
+
+ splitmux->muxer = provided_muxer;
+ gst_object_unref (provided_muxer);
+ }
+
+ if (splitmux->use_robust_muxing) {
+ update_muxer_properties (splitmux);
+ }
+ }
+
+ return TRUE;
+fail:
+ return FALSE;
+}
+
+static GstElement *
+find_sink (GstElement * e)
+{
+ GstElement *res = NULL;
+ GstIterator *iter;
+ gboolean done = FALSE;
+ GValue data = { 0, };
+
+ if (!GST_IS_BIN (e))
+ return e;
+
+ if (g_object_class_find_property (G_OBJECT_GET_CLASS (e), "location") != NULL)
+ return e;
+
+ iter = gst_bin_iterate_sinks (GST_BIN (e));
+ while (!done) {
+ switch (gst_iterator_next (iter, &data)) {
+ case GST_ITERATOR_OK:
+ {
+ GstElement *child = g_value_get_object (&data);
+ if (g_object_class_find_property (G_OBJECT_GET_CLASS (child),
+ "location") != NULL) {
+ res = child;
+ done = TRUE;
+ }
+ g_value_reset (&data);
+ break;
+ }
+ case GST_ITERATOR_RESYNC:
+ gst_iterator_resync (iter);
+ break;
+ case GST_ITERATOR_DONE:
+ done = TRUE;
+ break;
+ case GST_ITERATOR_ERROR:
+ g_assert_not_reached ();
+ break;
+ }
+ }
+ g_value_unset (&data);
+ gst_iterator_free (iter);
+
+ return res;
+}
+
+static gboolean
+create_sink (GstSplitMuxSink * splitmux)
+{
+ GstElement *provided_sink = NULL;
+
+ if (splitmux->active_sink == NULL) {
+
+ GST_OBJECT_LOCK (splitmux);
+ if (splitmux->provided_sink != NULL)
+ provided_sink = gst_object_ref (splitmux->provided_sink);
+ GST_OBJECT_UNLOCK (splitmux);
+
+ if ((!splitmux->async_finalize && provided_sink == NULL) ||
+ (splitmux->async_finalize && splitmux->sink_factory == NULL)) {
+ if ((splitmux->sink =
+ create_element (splitmux, DEFAULT_SINK, "sink", TRUE)) == NULL)
+ goto fail;
+ splitmux->active_sink = splitmux->sink;
+ } else if (splitmux->async_finalize) {
+ if ((splitmux->sink =
+ create_element (splitmux, splitmux->sink_factory, "sink",
+ TRUE)) == NULL)
+ goto fail;
+ if (splitmux->sink_preset && GST_IS_PRESET (splitmux->sink))
+ gst_preset_load_preset (GST_PRESET (splitmux->sink),
+ splitmux->sink_preset);
+ if (splitmux->sink_properties)
+ gst_structure_foreach (splitmux->sink_properties,
+ _set_property_from_structure, splitmux->sink);
+ splitmux->active_sink = splitmux->sink;
+ } else {
+ /* Ensure the sink starts in locked state and NULL - it will be changed
+ * by the filename setting code */
+ gst_element_set_locked_state (provided_sink, TRUE);
+ gst_element_set_state (provided_sink, GST_STATE_NULL);
+ if (!gst_bin_add (GST_BIN (splitmux), provided_sink)) {
+ g_warning ("Could not add sink elements - splitmuxsink will not work");
+ gst_object_unref (provided_sink);
+ goto fail;
+ }
+
+ splitmux->active_sink = provided_sink;
+
+ /* The bin holds a ref now, we can drop our tmp ref */
+ gst_object_unref (provided_sink);
+
+ /* Find the sink element */
+ splitmux->sink = find_sink (splitmux->active_sink);
+ if (splitmux->sink == NULL) {
+ g_warning
+ ("Could not locate sink element in provided sink - splitmuxsink will not work");
+ goto fail;
+ }
+ }
+
+#if 1
+ if (g_object_class_find_property (G_OBJECT_GET_CLASS (splitmux->sink),
+ "async") != NULL) {
+ /* async child elements are causing state change races and weird
+ * failures, so let's try and turn that off */
+ g_object_set (splitmux->sink, "async", FALSE, NULL);
+ }
+#endif
+
+ if (!gst_element_link (splitmux->muxer, splitmux->active_sink)) {
+ g_warning ("Failed to link muxer and sink- splitmuxsink will not work");
+ goto fail;
+ }
+ }
+
+ return TRUE;
+fail:
+ return FALSE;
+}
+
+#ifdef __GNUC__
+#pragma GCC diagnostic ignored "-Wformat-nonliteral"
+#endif
+static void
+set_next_filename (GstSplitMuxSink * splitmux, MqStreamCtx * ctx)
+{
+ gchar *fname = NULL;
+ GstSample *sample;
+ GstCaps *caps;
+
+ gst_splitmux_sink_ensure_max_files (splitmux);
+
+ if (ctx->cur_out_buffer == NULL) {
+ GST_WARNING_OBJECT (splitmux, "Starting next file without buffer");
+ }
+
+ caps = gst_pad_get_current_caps (ctx->srcpad);
+ sample = gst_sample_new (ctx->cur_out_buffer, caps, &ctx->out_segment, NULL);
+ g_signal_emit (splitmux, signals[SIGNAL_FORMAT_LOCATION_FULL], 0,
+ splitmux->fragment_id, sample, &fname);
+ gst_sample_unref (sample);
+ if (caps)
+ gst_caps_unref (caps);
+
+ if (fname == NULL) {
+ /* Fallback to the old signal if the new one returned nothing */
+ g_signal_emit (splitmux, signals[SIGNAL_FORMAT_LOCATION], 0,
+ splitmux->fragment_id, &fname);
+ }
+
+ if (!fname)
+ fname = splitmux->location ?
+ g_strdup_printf (splitmux->location, splitmux->fragment_id) : NULL;
+
+ if (fname) {
+ GST_INFO_OBJECT (splitmux, "Setting file to %s", fname);
+ if (g_object_class_find_property (G_OBJECT_GET_CLASS (splitmux->sink),
+ "location") != NULL)
+ g_object_set (splitmux->sink, "location", fname, NULL);
+ g_free (fname);
+ }
+
+ splitmux->fragment_id++;
+}
+
+/* called with GST_SPLITMUX_LOCK */
+static void
+do_async_start (GstSplitMuxSink * splitmux)
+{
+ GstMessage *message;
+
+ if (!splitmux->need_async_start) {
+ GST_INFO_OBJECT (splitmux, "no async_start needed");
+ return;
+ }
+
+ splitmux->async_pending = TRUE;
+
+ GST_INFO_OBJECT (splitmux, "Sending async_start message");
+ message = gst_message_new_async_start (GST_OBJECT_CAST (splitmux));
+
+ GST_SPLITMUX_UNLOCK (splitmux);
+ GST_BIN_CLASS (parent_class)->handle_message (GST_BIN_CAST
+ (splitmux), message);
+ GST_SPLITMUX_LOCK (splitmux);
+}
+
+/* called with GST_SPLITMUX_LOCK */
+static void
+do_async_done (GstSplitMuxSink * splitmux)
+{
+ GstMessage *message;
+
+ if (splitmux->async_pending) {
+ GST_INFO_OBJECT (splitmux, "Sending async_done message");
+ splitmux->async_pending = FALSE;
+ GST_SPLITMUX_UNLOCK (splitmux);
+
+ message =
+ gst_message_new_async_done (GST_OBJECT_CAST (splitmux),
+ GST_CLOCK_TIME_NONE);
+ GST_BIN_CLASS (parent_class)->handle_message (GST_BIN_CAST
+ (splitmux), message);
+ GST_SPLITMUX_LOCK (splitmux);
+ }
+
+ splitmux->need_async_start = FALSE;
+}
+
+static void
+gst_splitmux_sink_reset (GstSplitMuxSink * splitmux)
+{
+ splitmux->max_in_running_time = GST_CLOCK_STIME_NONE;
+ splitmux->gop_start_time = splitmux->fragment_start_time =
+ GST_CLOCK_STIME_NONE;
+ splitmux->max_out_running_time = 0;
+ splitmux->fragment_total_bytes = 0;
+ splitmux->fragment_reference_bytes = 0;
+ splitmux->gop_total_bytes = 0;
+ splitmux->gop_reference_bytes = 0;
+ splitmux->muxed_out_bytes = 0;
+ splitmux->ready_for_output = FALSE;
+
+ g_atomic_int_set (&(splitmux->split_requested), FALSE);
+ g_atomic_int_set (&(splitmux->do_split_next_gop), FALSE);
+
+ splitmux->next_fku_time = GST_CLOCK_TIME_NONE;
+ gst_queue_array_clear (splitmux->times_to_split);
+
+ g_list_foreach (splitmux->contexts, (GFunc) mq_stream_ctx_reset, NULL);
+ splitmux->queued_keyframes = 0;
+
+ g_queue_foreach (&splitmux->out_cmd_q, (GFunc) out_cmd_buf_free, NULL);
+ g_queue_clear (&splitmux->out_cmd_q);
+}
+
+static GstStateChangeReturn
+gst_splitmux_sink_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstSplitMuxSink *splitmux = (GstSplitMuxSink *) element;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:{
+ GST_SPLITMUX_LOCK (splitmux);
+ if (!create_muxer (splitmux) || !create_sink (splitmux)) {
+ ret = GST_STATE_CHANGE_FAILURE;
+ GST_SPLITMUX_UNLOCK (splitmux);
+ goto beach;
+ }
+ g_signal_emit (splitmux, signals[SIGNAL_MUXER_ADDED], 0, splitmux->muxer);
+ g_signal_emit (splitmux, signals[SIGNAL_SINK_ADDED], 0, splitmux->sink);
+ GST_SPLITMUX_UNLOCK (splitmux);
+ splitmux->fragment_id = splitmux->start_index;
+ break;
+ }
+ case GST_STATE_CHANGE_READY_TO_PAUSED:{
+ GST_SPLITMUX_LOCK (splitmux);
+ /* Make sure contexts and tracking times are cleared, in case we're being reused */
+ gst_splitmux_sink_reset (splitmux);
+ /* Start by collecting one input on each pad */
+ splitmux->input_state = SPLITMUX_INPUT_STATE_COLLECTING_GOP_START;
+ splitmux->output_state = SPLITMUX_OUTPUT_STATE_START_NEXT_FILE;
+
+ GST_SPLITMUX_UNLOCK (splitmux);
+
+ GST_SPLITMUX_STATE_LOCK (splitmux);
+ splitmux->shutdown = FALSE;
+ GST_SPLITMUX_STATE_UNLOCK (splitmux);
+ break;
+ }
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ case GST_STATE_CHANGE_READY_TO_READY:
+ g_atomic_int_set (&(splitmux->split_requested), FALSE);
+ g_atomic_int_set (&(splitmux->do_split_next_gop), FALSE);
+ /* Fall through */
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ GST_SPLITMUX_STATE_LOCK (splitmux);
+ splitmux->shutdown = TRUE;
+ GST_SPLITMUX_STATE_UNLOCK (splitmux);
+
+ GST_SPLITMUX_LOCK (splitmux);
+ gst_splitmux_sink_reset (splitmux);
+ splitmux->output_state = SPLITMUX_OUTPUT_STATE_STOPPED;
+ splitmux->input_state = SPLITMUX_INPUT_STATE_STOPPED;
+ /* Wake up any blocked threads */
+ GST_LOG_OBJECT (splitmux,
+ "State change -> NULL or READY. Waking threads");
+ GST_SPLITMUX_BROADCAST_INPUT (splitmux);
+ GST_SPLITMUX_BROADCAST_OUTPUT (splitmux);
+ GST_SPLITMUX_UNLOCK (splitmux);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto beach;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ splitmux->need_async_start = TRUE;
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:{
+ /* Change state async, because our child sink might not
+ * be ready to do that for us yet if it's state is still locked */
+
+ splitmux->need_async_start = TRUE;
+ /* we want to go async to PAUSED until we managed to configure and add the
+ * sink */
+ GST_SPLITMUX_LOCK (splitmux);
+ do_async_start (splitmux);
+ GST_SPLITMUX_UNLOCK (splitmux);
+ ret = GST_STATE_CHANGE_ASYNC;
+ break;
+ }
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ GST_SPLITMUX_LOCK (splitmux);
+ splitmux->fragment_id = 0;
+ /* Reset internal elements only if no pad contexts are using them */
+ if (splitmux->contexts == NULL)
+ gst_splitmux_reset_elements (splitmux);
+ do_async_done (splitmux);
+ GST_SPLITMUX_UNLOCK (splitmux);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+
+beach:
+ if (transition == GST_STATE_CHANGE_NULL_TO_READY) {
+ /* Cleanup elements on failed transition out of NULL */
+ gst_splitmux_reset_elements (splitmux);
+ GST_SPLITMUX_LOCK (splitmux);
+ do_async_done (splitmux);
+ GST_SPLITMUX_UNLOCK (splitmux);
+ }
+ if (transition == GST_STATE_CHANGE_READY_TO_READY) {
+ /* READY to READY transition only happens when we're already
+ * in READY state, but a child element is in NULL, which
+ * happens when there's an error changing the state of the sink.
+ * We need to make sure not to fail the state transition, or
+ * the core won't transition us back to NULL successfully */
+ ret = GST_STATE_CHANGE_SUCCESS;
+ }
+ return ret;
+}
+
+static void
+gst_splitmux_sink_ensure_max_files (GstSplitMuxSink * splitmux)
+{
+ if (splitmux->max_files && splitmux->fragment_id >= splitmux->max_files) {
+ splitmux->fragment_id = 0;
+ }
+}
+
+static void
+split_now (GstSplitMuxSink * splitmux)
+{
+ g_atomic_int_set (&(splitmux->do_split_next_gop), TRUE);
+}
+
+static void
+split_after (GstSplitMuxSink * splitmux)
+{
+ g_atomic_int_set (&(splitmux->split_requested), TRUE);
+}
+
+static void
+split_at_running_time (GstSplitMuxSink * splitmux, GstClockTime split_time)
+{
+ gboolean send_keyframe_requests;
+
+ GST_SPLITMUX_LOCK (splitmux);
+ gst_queue_array_push_tail_struct (splitmux->times_to_split, &split_time);
+ send_keyframe_requests = splitmux->send_keyframe_requests;
+ GST_SPLITMUX_UNLOCK (splitmux);
+
+ if (send_keyframe_requests) {
+ GstEvent *ev =
+ gst_video_event_new_upstream_force_key_unit (split_time, TRUE, 0);
+ GST_INFO_OBJECT (splitmux, "Requesting next keyframe at %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (split_time));
+ if (!gst_pad_push_event (splitmux->reference_ctx->sinkpad, ev)) {
+ GST_WARNING_OBJECT (splitmux,
+ "Could not request keyframe at %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (split_time));
+ }
+ }
+}
diff --git a/gst/multifile/gstsplitmuxsink.h b/gst/multifile/gstsplitmuxsink.h
new file mode 100644
index 0000000000..e268cb4783
--- /dev/null
+++ b/gst/multifile/gstsplitmuxsink.h
@@ -0,0 +1,225 @@
+/* GStreamer split muxer bin
+ * Copyright (C) 2014-2019 Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_SPLITMUXSINK_H__
+#define __GST_SPLITMUXSINK_H__
+
+#include <gst/gst.h>
+#include <gst/pbutils/pbutils.h>
+#include <gst/base/base.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_SPLITMUX_SINK (gst_splitmux_sink_get_type())
+#define GST_SPLITMUX_SINK(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_SPLITMUX_SINK,GstSplitMuxSink))
+#define GST_SPLITMUX_SINK_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_SPLITMUX_SINK,GstSplitMuxSinkClass))
+#define GST_IS_SPLITMUX_SINK(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_SPLITMUX_SINK))
+#define GST_IS_SPLITMUX_SINK_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_SPLITMUX_SINK))
+typedef struct _GstSplitMuxSink GstSplitMuxSink;
+typedef struct _GstSplitMuxSinkClass GstSplitMuxSinkClass;
+
+GType gst_splitmux_sink_get_type (void);
+
+typedef enum _SplitMuxInputState
+{
+ SPLITMUX_INPUT_STATE_STOPPED,
+ SPLITMUX_INPUT_STATE_COLLECTING_GOP_START, /* Waiting for the next ref ctx keyframe */
+ SPLITMUX_INPUT_STATE_WAITING_GOP_COLLECT, /* Waiting for all streams to collect GOP */
+ SPLITMUX_INPUT_STATE_FINISHING_UP /* Got EOS from reference ctx, send everything */
+} SplitMuxInputState;
+
+typedef enum _SplitMuxOutputState
+{
+ SPLITMUX_OUTPUT_STATE_STOPPED,
+ SPLITMUX_OUTPUT_STATE_AWAITING_COMMAND, /* Waiting first command packet from input */
+ SPLITMUX_OUTPUT_STATE_OUTPUT_GOP, /* Outputting a collected GOP */
+ SPLITMUX_OUTPUT_STATE_ENDING_FILE, /* Finishing the current fragment */
+ SPLITMUX_OUTPUT_STATE_ENDING_STREAM, /* Finishing up the entire stream due to input EOS */
+ SPLITMUX_OUTPUT_STATE_START_NEXT_FILE /* Restarting after ENDING_FILE */
+} SplitMuxOutputState;
+
+typedef struct _SplitMuxOutputCommand
+{
+ gboolean start_new_fragment; /* Whether to start a new fragment before advancing output ts */
+ GstClockTimeDiff max_output_ts; /* Set the limit to stop GOP output */
+} SplitMuxOutputCommand;
+
+typedef struct _MqStreamBuf
+{
+ gboolean keyframe;
+ GstClockTimeDiff run_ts;
+ guint64 buf_size;
+ GstClockTime duration;
+} MqStreamBuf;
+
+typedef struct _MqStreamCtx
+{
+ GstSplitMuxSink *splitmux;
+
+ guint q_overrun_id;
+ guint sink_pad_block_id;
+ guint src_pad_block_id;
+ gulong fragment_block_id;
+
+ gboolean is_reference;
+
+ gboolean flushing;
+ gboolean in_eos;
+ gboolean out_eos;
+ gboolean out_eos_async_done;
+ gboolean need_unblock;
+ gboolean caps_change;
+ gboolean is_releasing;
+
+ GstSegment in_segment;
+ GstSegment out_segment;
+
+ GstClockTimeDiff in_running_time;
+ GstClockTimeDiff out_running_time;
+
+ GstElement *q;
+ GQueue queued_bufs;
+
+ GstPad *sinkpad;
+ GstPad *srcpad;
+
+ GstBuffer *cur_out_buffer;
+ GstEvent *pending_gap;
+} MqStreamCtx;
+
+struct _GstSplitMuxSink
+{
+ GstBin parent;
+
+ GMutex state_lock;
+ gboolean shutdown;
+
+ GMutex lock;
+
+ GCond input_cond;
+ GCond output_cond;
+
+ gdouble mux_overhead;
+
+ GstClockTime threshold_time;
+ guint64 threshold_bytes;
+ guint max_files;
+ gboolean send_keyframe_requests;
+ gchar *threshold_timecode_str;
+ /* created from threshold_timecode_str */
+ GstVideoTimeCodeInterval *tc_interval;
+ GstClockTime alignment_threshold;
+ /* expected running time of next force keyframe unit event */
+ GstClockTime next_fku_time;
+
+ gboolean reset_muxer;
+
+ GstElement *muxer;
+ GstElement *sink;
+
+ GstElement *provided_muxer;
+
+ GstElement *provided_sink;
+ GstElement *active_sink;
+
+ gboolean ready_for_output;
+
+ gchar *location;
+ guint fragment_id;
+ guint start_index;
+ GList *contexts;
+
+ SplitMuxInputState input_state;
+ GstClockTimeDiff max_in_running_time;
+ /* Number of bytes sent to the
+ * current fragment */
+ guint64 fragment_total_bytes;
+ /* Number of bytes for the reference
+ * stream in this fragment */
+ guint64 fragment_reference_bytes;
+
+ /* Number of bytes we've collected into
+ * the GOP that's being collected */
+ guint64 gop_total_bytes;
+ /* Number of bytes from the reference context
+ * that we've collected into the current GOP */
+ guint64 gop_reference_bytes;
+ /* Start time of the current fragment */
+ GstClockTimeDiff fragment_start_time;
+ /* Start time of the current GOP */
+ GstClockTimeDiff gop_start_time;
+ /* The last timecode we have */
+ GstVideoTimeCode *in_tc;
+ /* Start timecode of the current fragment */
+ GstVideoTimeCode *fragment_start_tc;
+ /* Start timecode of the current GOP */
+ GstVideoTimeCode *gop_start_tc;
+ /* expected running time of next fragment in timecode mode */
+ GstClockTime next_fragment_start_tc_time;
+
+ GQueue out_cmd_q; /* Queue of commands for output thread */
+
+ SplitMuxOutputState output_state;
+ GstClockTimeDiff max_out_running_time;
+
+ guint64 muxed_out_bytes;
+
+ MqStreamCtx *reference_ctx;
+ /* Count of queued keyframes in the reference ctx */
+ guint queued_keyframes;
+
+ gboolean switching_fragment;
+
+ gboolean have_video;
+
+ gboolean need_async_start;
+ gboolean async_pending;
+
+ gboolean use_robust_muxing;
+ gboolean muxer_has_reserved_props;
+
+ gboolean split_requested;
+ gboolean do_split_next_gop;
+ GstQueueArray *times_to_split;
+
+ /* Async finalize options */
+ gboolean async_finalize;
+ gchar *muxer_factory;
+ gchar *muxer_preset;
+ GstStructure *muxer_properties;
+ gchar *sink_factory;
+ gchar *sink_preset;
+ GstStructure *sink_properties;
+
+ GstStructure *muxerpad_map;
+};
+
+struct _GstSplitMuxSinkClass
+{
+ GstBinClass parent_class;
+
+ /* actions */
+ void (*split_now) (GstSplitMuxSink * splitmux);
+ void (*split_after) (GstSplitMuxSink * splitmux);
+ void (*split_at_running_time) (GstSplitMuxSink * splitmux, GstClockTime split_time);
+};
+
+GST_ELEMENT_REGISTER_DECLARE (splitmuxsink);
+
+G_END_DECLS
+#endif /* __GST_SPLITMUXSINK_H__ */
diff --git a/gst/multifile/gstsplitmuxsrc.c b/gst/multifile/gstsplitmuxsrc.c
new file mode 100644
index 0000000000..6f220ed9b8
--- /dev/null
+++ b/gst/multifile/gstsplitmuxsrc.c
@@ -0,0 +1,1544 @@
+/* GStreamer Split Demuxer bin that recombines files created by
+ * the splitmuxsink element.
+ *
+ * Copyright (C) <2014> Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-splitmuxsrc
+ * @title: splitmuxsrc
+ * @short_description: Split Demuxer bin that recombines files created by
+ * the splitmuxsink element.
+ *
+ * This element reads a set of input files created by the splitmuxsink element
+ * containing contiguous elementary streams split across multiple files.
+ *
+ * This element is similar to splitfilesrc, except that it recombines the
+ * streams in each file part at the demuxed elementary level, rather than
+ * as a single larger bytestream.
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 splitmuxsrc location=video*.mov ! decodebin ! xvimagesink
+ * ]| Demux each file part and output the video stream as one continuous stream
+ * |[
+ * gst-launch-1.0 playbin uri="splitmux://path/to/foo.mp4.*"
+ * ]| Play back a set of files created by splitmuxsink
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include "gstsplitmuxsrc.h"
+#include "gstsplitutils.h"
+
+#include "../../gst-libs/gst/gst-i18n-plugin.h"
+
+GST_DEBUG_CATEGORY (splitmux_debug);
+#define GST_CAT_DEFAULT splitmux_debug
+
+#define FIXED_TS_OFFSET (1000*GST_SECOND)
+
+enum
+{
+ PROP_0,
+ PROP_LOCATION
+};
+
+enum
+{
+ SIGNAL_FORMAT_LOCATION,
+ SIGNAL_LAST
+};
+
+static guint signals[SIGNAL_LAST];
+
+static GstStaticPadTemplate video_src_template =
+GST_STATIC_PAD_TEMPLATE ("video",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+static GstStaticPadTemplate video_aux_src_template =
+GST_STATIC_PAD_TEMPLATE ("video_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+static GstStaticPadTemplate audio_src_template =
+GST_STATIC_PAD_TEMPLATE ("audio_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+static GstStaticPadTemplate subtitle_src_template =
+GST_STATIC_PAD_TEMPLATE ("subtitle_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+static GstStateChangeReturn gst_splitmux_src_change_state (GstElement *
+ element, GstStateChange transition);
+static void gst_splitmux_src_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_splitmux_src_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_splitmux_src_dispose (GObject * object);
+static void gst_splitmux_src_finalize (GObject * object);
+static gboolean gst_splitmux_src_start (GstSplitMuxSrc * splitmux);
+static gboolean gst_splitmux_src_stop (GstSplitMuxSrc * splitmux);
+static void splitmux_src_pad_constructed (GObject * pad);
+static gboolean splitmux_src_pad_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static gboolean splitmux_src_pad_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+static void splitmux_src_uri_handler_init (gpointer g_iface,
+ gpointer iface_data);
+
+
+static GstPad *gst_splitmux_find_output_pad (GstSplitMuxPartReader * part,
+ GstPad * pad, GstSplitMuxSrc * splitmux);
+static gboolean gst_splitmux_end_of_part (GstSplitMuxSrc * splitmux,
+ SplitMuxSrcPad * pad);
+static gboolean gst_splitmux_check_new_caps (SplitMuxSrcPad * splitpad,
+ GstEvent * event);
+static gboolean gst_splitmux_src_prepare_next_part (GstSplitMuxSrc * splitmux);
+static gboolean gst_splitmux_src_activate_part (GstSplitMuxSrc * splitmux,
+ guint part, GstSeekFlags extra_flags);
+
+#define _do_init \
+ G_IMPLEMENT_INTERFACE(GST_TYPE_URI_HANDLER, splitmux_src_uri_handler_init); \
+ GST_DEBUG_CATEGORY_INIT (splitmux_debug, "splitmuxsrc", 0, "Split File Demuxing Source");
+#define gst_splitmux_src_parent_class parent_class
+
+G_DEFINE_TYPE_EXTENDED (GstSplitMuxSrc, gst_splitmux_src, GST_TYPE_BIN, 0,
+ _do_init);
+GST_ELEMENT_REGISTER_DEFINE (splitmuxsrc, "splitmuxsrc", GST_RANK_NONE,
+ GST_TYPE_SPLITMUX_SRC);
+
+static GstURIType
+splitmux_src_uri_get_type (GType type)
+{
+ return GST_URI_SRC;
+}
+
+static const gchar *const *
+splitmux_src_uri_get_protocols (GType type)
+{
+ static const gchar *protocols[] = { "splitmux", NULL };
+
+ return protocols;
+}
+
+static gchar *
+splitmux_src_uri_get_uri (GstURIHandler * handler)
+{
+ GstSplitMuxSrc *splitmux = GST_SPLITMUX_SRC (handler);
+ gchar *ret = NULL;
+
+ GST_OBJECT_LOCK (splitmux);
+ if (splitmux->location)
+ ret = g_strdup_printf ("splitmux://%s", splitmux->location);
+ GST_OBJECT_UNLOCK (splitmux);
+ return ret;
+}
+
+static gboolean
+splitmux_src_uri_set_uri (GstURIHandler * handler, const gchar * uri,
+ GError ** err)
+{
+ GstSplitMuxSrc *splitmux = GST_SPLITMUX_SRC (handler);
+ gchar *protocol, *location;
+
+ protocol = gst_uri_get_protocol (uri);
+ if (protocol == NULL || !g_str_equal (protocol, "splitmux"))
+ goto wrong_uri;
+ g_free (protocol);
+
+ location = gst_uri_get_location (uri);
+ GST_OBJECT_LOCK (splitmux);
+ g_free (splitmux->location);
+ splitmux->location = location;
+ GST_OBJECT_UNLOCK (splitmux);
+
+ return TRUE;
+
+wrong_uri:
+ g_free (protocol);
+ GST_ELEMENT_ERROR (splitmux, RESOURCE, READ, (NULL),
+ ("Error parsing uri %s", uri));
+ g_set_error_literal (err, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
+ "Could not parse splitmux URI");
+ return FALSE;
+}
+
+static void
+splitmux_src_uri_handler_init (gpointer g_iface, gpointer iface_data)
+{
+ GstURIHandlerInterface *iface = (GstURIHandlerInterface *) (g_iface);
+
+ iface->get_type = splitmux_src_uri_get_type;
+ iface->get_protocols = splitmux_src_uri_get_protocols;
+ iface->set_uri = splitmux_src_uri_set_uri;
+ iface->get_uri = splitmux_src_uri_get_uri;
+}
+
+
+static void
+gst_splitmux_src_class_init (GstSplitMuxSrcClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->set_property = gst_splitmux_src_set_property;
+ gobject_class->get_property = gst_splitmux_src_get_property;
+ gobject_class->dispose = gst_splitmux_src_dispose;
+ gobject_class->finalize = gst_splitmux_src_finalize;
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Split File Demuxing Bin", "Generic/Bin/Demuxer",
+ "Source that reads a set of files created by splitmuxsink",
+ "Jan Schmidt <jan@centricular.com>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &video_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &video_aux_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &audio_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &subtitle_src_template);
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_splitmux_src_change_state);
+
+ g_object_class_install_property (gobject_class, PROP_LOCATION,
+ g_param_spec_string ("location", "File Input Pattern",
+ "Glob pattern for the location of the files to read", NULL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstSplitMuxSrc::format-location:
+ * @splitmux: the #GstSplitMuxSrc
+ *
+ * Returns: A NULL-terminated sorted array of strings containing the
+ * filenames of the input files. The array will be freed internally
+ * using g_strfreev()
+ *
+ * Since: 1.8
+ */
+ signals[SIGNAL_FORMAT_LOCATION] =
+ g_signal_new ("format-location", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_STRV, 0);
+}
+
+static void
+gst_splitmux_src_init (GstSplitMuxSrc * splitmux)
+{
+ g_mutex_init (&splitmux->lock);
+ g_rw_lock_init (&splitmux->pads_rwlock);
+ splitmux->total_duration = GST_CLOCK_TIME_NONE;
+ gst_segment_init (&splitmux->play_segment, GST_FORMAT_TIME);
+}
+
+static void
+gst_splitmux_src_dispose (GObject * object)
+{
+ GstSplitMuxSrc *splitmux = GST_SPLITMUX_SRC (object);
+ GList *cur;
+
+ SPLITMUX_SRC_PADS_WLOCK (splitmux);
+
+ for (cur = g_list_first (splitmux->pads);
+ cur != NULL; cur = g_list_next (cur)) {
+ GstPad *pad = GST_PAD (cur->data);
+ gst_element_remove_pad (GST_ELEMENT (splitmux), pad);
+ }
+ g_list_free (splitmux->pads);
+ splitmux->n_pads = 0;
+ splitmux->pads = NULL;
+ SPLITMUX_SRC_PADS_WUNLOCK (splitmux);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static void
+gst_splitmux_src_finalize (GObject * object)
+{
+ GstSplitMuxSrc *splitmux = GST_SPLITMUX_SRC (object);
+ g_mutex_clear (&splitmux->lock);
+ g_rw_lock_clear (&splitmux->pads_rwlock);
+ g_free (splitmux->location);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_splitmux_src_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstSplitMuxSrc *splitmux = GST_SPLITMUX_SRC (object);
+
+ switch (prop_id) {
+ case PROP_LOCATION:{
+ GST_OBJECT_LOCK (splitmux);
+ g_free (splitmux->location);
+ splitmux->location = g_value_dup_string (value);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_splitmux_src_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstSplitMuxSrc *splitmux = GST_SPLITMUX_SRC (object);
+
+ switch (prop_id) {
+ case PROP_LOCATION:
+ GST_OBJECT_LOCK (splitmux);
+ g_value_set_string (value, splitmux->location);
+ GST_OBJECT_UNLOCK (splitmux);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+do_async_start (GstSplitMuxSrc * splitmux)
+{
+ GstMessage *message;
+
+ SPLITMUX_SRC_MSG_LOCK (splitmux);
+ splitmux->async_pending = TRUE;
+
+ message = gst_message_new_async_start (GST_OBJECT_CAST (splitmux));
+ GST_BIN_CLASS (parent_class)->handle_message (GST_BIN_CAST (splitmux),
+ message);
+ SPLITMUX_SRC_MSG_UNLOCK (splitmux);
+}
+
+static void
+do_async_done (GstSplitMuxSrc * splitmux)
+{
+ GstMessage *message;
+
+ SPLITMUX_SRC_MSG_LOCK (splitmux);
+ if (splitmux->async_pending) {
+ message =
+ gst_message_new_async_done (GST_OBJECT_CAST (splitmux),
+ GST_CLOCK_TIME_NONE);
+ GST_BIN_CLASS (parent_class)->handle_message (GST_BIN_CAST (splitmux),
+ message);
+
+ splitmux->async_pending = FALSE;
+ }
+ SPLITMUX_SRC_MSG_UNLOCK (splitmux);
+}
+
+static GstStateChangeReturn
+gst_splitmux_src_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstSplitMuxSrc *splitmux = (GstSplitMuxSrc *) element;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:{
+ break;
+ }
+ case GST_STATE_CHANGE_READY_TO_PAUSED:{
+ do_async_start (splitmux);
+
+ if (!gst_splitmux_src_start (splitmux)) {
+ do_async_done (splitmux);
+ return GST_STATE_CHANGE_FAILURE;
+ }
+ break;
+ }
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ /* Make sure the element will shut down */
+ if (!gst_splitmux_src_stop (splitmux))
+ return GST_STATE_CHANGE_FAILURE;
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE) {
+ do_async_done (splitmux);
+ return ret;
+ }
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ ret = GST_STATE_CHANGE_ASYNC;
+ break;
+ default:
+ break;
+ }
+
+
+ return ret;
+}
+
+static void
+gst_splitmux_src_activate_first_part (GstSplitMuxSrc * splitmux)
+{
+ SPLITMUX_SRC_LOCK (splitmux);
+ if (splitmux->running) {
+ if (!gst_splitmux_src_activate_part (splitmux, 0, GST_SEEK_FLAG_NONE)) {
+ GST_ELEMENT_ERROR (splitmux, RESOURCE, OPEN_READ, (NULL),
+ ("Failed to activate first part for playback"));
+ }
+ }
+ SPLITMUX_SRC_UNLOCK (splitmux);
+}
+
+static GstBusSyncReply
+gst_splitmux_part_bus_handler (GstBus * bus, GstMessage * msg,
+ gpointer user_data)
+{
+ GstSplitMuxSrc *splitmux = user_data;
+
+ switch (GST_MESSAGE_TYPE (msg)) {
+ case GST_MESSAGE_ASYNC_DONE:{
+ guint idx = splitmux->num_prepared_parts;
+ gboolean need_no_more_pads;
+
+ if (idx >= splitmux->num_parts) {
+ /* Shouldn't really happen! */
+ do_async_done (splitmux);
+ g_warn_if_reached ();
+ break;
+ }
+
+ GST_DEBUG_OBJECT (splitmux, "Prepared file part %s (%u)",
+ splitmux->parts[idx]->path, idx);
+
+ /* signal no-more-pads as we have all pads at this point now */
+ SPLITMUX_SRC_LOCK (splitmux);
+ need_no_more_pads = !splitmux->pads_complete;
+ splitmux->pads_complete = TRUE;
+ SPLITMUX_SRC_UNLOCK (splitmux);
+
+ if (need_no_more_pads) {
+ GST_DEBUG_OBJECT (splitmux, "Signalling no-more-pads");
+ gst_element_no_more_pads (GST_ELEMENT_CAST (splitmux));
+ }
+
+ /* Extend our total duration to cover this part */
+ GST_OBJECT_LOCK (splitmux);
+ splitmux->total_duration +=
+ gst_splitmux_part_reader_get_duration (splitmux->parts[idx]);
+ splitmux->play_segment.duration = splitmux->total_duration;
+ GST_OBJECT_UNLOCK (splitmux);
+
+ splitmux->end_offset =
+ gst_splitmux_part_reader_get_end_offset (splitmux->parts[idx]);
+
+ GST_DEBUG_OBJECT (splitmux,
+ "Duration %" GST_TIME_FORMAT ", total duration now: %" GST_TIME_FORMAT
+ " and end offset %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (gst_splitmux_part_reader_get_duration (splitmux->parts
+ [idx])), GST_TIME_ARGS (splitmux->total_duration),
+ GST_TIME_ARGS (splitmux->end_offset));
+
+ splitmux->num_prepared_parts++;
+
+ /* If we're done or preparing the next part fails, finish here */
+ if (splitmux->num_prepared_parts >= splitmux->num_parts
+ || !gst_splitmux_src_prepare_next_part (splitmux)) {
+ /* Store how many parts we actually prepared in the end */
+ splitmux->num_parts = splitmux->num_prepared_parts;
+ do_async_done (splitmux);
+
+ /* All done preparing, activate the first part */
+ GST_INFO_OBJECT (splitmux,
+ "All parts prepared. Total duration %" GST_TIME_FORMAT
+ " Activating first part", GST_TIME_ARGS (splitmux->total_duration));
+ gst_element_call_async (GST_ELEMENT_CAST (splitmux),
+ (GstElementCallAsyncFunc) gst_splitmux_src_activate_first_part,
+ NULL, NULL);
+ }
+
+ break;
+ }
+ case GST_MESSAGE_ERROR:{
+ GST_ERROR_OBJECT (splitmux,
+ "Got error message from part %" GST_PTR_FORMAT ": %" GST_PTR_FORMAT,
+ GST_MESSAGE_SRC (msg), msg);
+ if (splitmux->num_prepared_parts < splitmux->num_parts) {
+ guint idx = splitmux->num_prepared_parts;
+
+ if (idx == 0) {
+ GST_ERROR_OBJECT (splitmux,
+ "Failed to prepare first file part %s for playback",
+ splitmux->parts[idx]->path);
+ GST_ELEMENT_ERROR (splitmux, RESOURCE, OPEN_READ, (NULL),
+ ("Failed to prepare first file part %s for playback",
+ splitmux->parts[idx]->path));
+ } else {
+ GST_WARNING_OBJECT (splitmux,
+ "Failed to prepare file part %s. Cannot play past there.",
+ splitmux->parts[idx]->path);
+ GST_ELEMENT_WARNING (splitmux, RESOURCE, READ, (NULL),
+ ("Failed to prepare file part %s. Cannot play past there.",
+ splitmux->parts[idx]->path));
+ }
+
+ /* Store how many parts we actually prepared in the end */
+ splitmux->num_parts = splitmux->num_prepared_parts;
+ do_async_done (splitmux);
+
+ if (idx > 0) {
+ /* All done preparing, activate the first part */
+ GST_INFO_OBJECT (splitmux,
+ "All parts prepared. Total duration %" GST_TIME_FORMAT
+ " Activating first part",
+ GST_TIME_ARGS (splitmux->total_duration));
+ gst_element_call_async (GST_ELEMENT_CAST (splitmux),
+ (GstElementCallAsyncFunc) gst_splitmux_src_activate_first_part,
+ NULL, NULL);
+ }
+ } else {
+ /* Need to update the message source so that it's part of the element
+ * hierarchy the application would expect */
+ msg = gst_message_copy (msg);
+ gst_object_replace ((GstObject **) & msg->src, (GstObject *) splitmux);
+ gst_element_post_message (GST_ELEMENT_CAST (splitmux), msg);
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ return GST_BUS_PASS;
+}
+
+static GstSplitMuxPartReader *
+gst_splitmux_part_create (GstSplitMuxSrc * splitmux, char *filename)
+{
+ GstSplitMuxPartReader *r;
+ GstBus *bus;
+
+ r = g_object_new (GST_TYPE_SPLITMUX_PART_READER, NULL);
+
+ gst_splitmux_part_reader_set_callbacks (r, splitmux,
+ (GstSplitMuxPartReaderPadCb) gst_splitmux_find_output_pad);
+ gst_splitmux_part_reader_set_location (r, filename);
+
+ bus = gst_element_get_bus (GST_ELEMENT_CAST (r));
+ gst_bus_set_sync_handler (bus, gst_splitmux_part_bus_handler, splitmux, NULL);
+ gst_object_unref (bus);
+
+ return r;
+}
+
+static gboolean
+gst_splitmux_check_new_caps (SplitMuxSrcPad * splitpad, GstEvent * event)
+{
+ GstCaps *curcaps = gst_pad_get_current_caps ((GstPad *) (splitpad));
+ GstCaps *newcaps;
+ GstCaps *tmpcaps;
+ GstCaps *tmpcurcaps;
+
+ GstStructure *s;
+ gboolean res = TRUE;
+
+ gst_event_parse_caps (event, &newcaps);
+
+ GST_LOG_OBJECT (splitpad, "Comparing caps %" GST_PTR_FORMAT
+ " and %" GST_PTR_FORMAT, curcaps, newcaps);
+
+ if (curcaps == NULL)
+ return TRUE;
+
+ /* If caps are exactly equal exit early */
+ if (gst_caps_is_equal (curcaps, newcaps)) {
+ gst_caps_unref (curcaps);
+ return FALSE;
+ }
+
+ /* More extensive check, ignore changes in framerate, because
+ * demuxers get that wrong */
+ tmpcaps = gst_caps_copy (newcaps);
+ s = gst_caps_get_structure (tmpcaps, 0);
+ gst_structure_remove_field (s, "framerate");
+
+ tmpcurcaps = gst_caps_copy (curcaps);
+ gst_caps_unref (curcaps);
+ s = gst_caps_get_structure (tmpcurcaps, 0);
+ gst_structure_remove_field (s, "framerate");
+
+ /* Now check if these filtered caps are equal */
+ if (gst_caps_is_equal (tmpcurcaps, tmpcaps)) {
+ GST_INFO_OBJECT (splitpad, "Ignoring framerate-only caps change");
+ res = FALSE;
+ }
+
+ gst_caps_unref (tmpcaps);
+ gst_caps_unref (tmpcurcaps);
+ return res;
+}
+
+static void
+gst_splitmux_handle_event (GstSplitMuxSrc * splitmux,
+ SplitMuxSrcPad * splitpad, GstPad * part_pad, GstEvent * event)
+{
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_STREAM_START:{
+ if (splitpad->sent_stream_start)
+ goto drop_event;
+ splitpad->sent_stream_start = TRUE;
+ break;
+ }
+ case GST_EVENT_EOS:{
+ if (gst_splitmux_end_of_part (splitmux, splitpad))
+ // Continuing to next part, drop the EOS
+ goto drop_event;
+ if (splitmux->segment_seqnum) {
+ event = gst_event_make_writable (event);
+ gst_event_set_seqnum (event, splitmux->segment_seqnum);
+ }
+ break;
+ }
+ case GST_EVENT_SEGMENT:{
+ GstClockTime duration;
+ GstSegment seg;
+
+ gst_event_copy_segment (event, &seg);
+
+ splitpad->segment.position = seg.position;
+
+ if (splitpad->sent_segment)
+ goto drop_event; /* We already forwarded a segment event */
+
+ /* Calculate output segment */
+ GST_LOG_OBJECT (splitpad, "Pad seg %" GST_SEGMENT_FORMAT
+ " got seg %" GST_SEGMENT_FORMAT
+ " play seg %" GST_SEGMENT_FORMAT,
+ &splitpad->segment, &seg, &splitmux->play_segment);
+
+ /* If playing forward, take the stop time from the overall
+ * seg or play_segment */
+ if (splitmux->play_segment.rate > 0.0) {
+ if (splitmux->play_segment.stop != -1)
+ seg.stop = splitmux->play_segment.stop + FIXED_TS_OFFSET;
+ else
+ seg.stop = splitpad->segment.stop;
+ } else {
+ /* Reverse playback from stop time to start time */
+ /* See if an end point was requested in the seek */
+ if (splitmux->play_segment.start != -1) {
+ seg.start = splitmux->play_segment.start + FIXED_TS_OFFSET;
+ seg.time = splitmux->play_segment.time;
+ } else {
+ seg.start = splitpad->segment.start;
+ seg.time = splitpad->segment.time;
+ }
+ }
+
+ GST_OBJECT_LOCK (splitmux);
+ duration = splitmux->total_duration;
+ GST_OBJECT_UNLOCK (splitmux);
+
+ if (duration > 0)
+ seg.duration = duration;
+ else
+ seg.duration = GST_CLOCK_TIME_NONE;
+
+ GST_INFO_OBJECT (splitpad,
+ "Forwarding segment %" GST_SEGMENT_FORMAT, &seg);
+
+ gst_event_unref (event);
+ event = gst_event_new_segment (&seg);
+ if (splitmux->segment_seqnum)
+ gst_event_set_seqnum (event, splitmux->segment_seqnum);
+ splitpad->sent_segment = TRUE;
+ break;
+ }
+ case GST_EVENT_CAPS:{
+ if (!gst_splitmux_check_new_caps (splitpad, event))
+ goto drop_event;
+ splitpad->sent_caps = TRUE;
+ break;
+ }
+ default:
+ break;
+ }
+
+ gst_pad_push_event ((GstPad *) (splitpad), event);
+ return;
+drop_event:
+ gst_event_unref (event);
+ return;
+}
+
+static GstFlowReturn
+gst_splitmux_handle_buffer (GstSplitMuxSrc * splitmux,
+ SplitMuxSrcPad * splitpad, GstBuffer * buf)
+{
+ GstFlowReturn ret;
+
+ if (splitpad->clear_next_discont) {
+ GST_LOG_OBJECT (splitpad, "Clearing discont flag on buffer");
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
+ splitpad->clear_next_discont = FALSE;
+ }
+ if (splitpad->set_next_discont) {
+ GST_LOG_OBJECT (splitpad, "Setting discont flag on buffer");
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+ splitpad->set_next_discont = FALSE;
+ }
+
+ ret = gst_pad_push (GST_PAD_CAST (splitpad), buf);
+
+ GST_LOG_OBJECT (splitpad, "Pad push returned %d", ret);
+ return ret;
+}
+
+static guint
+count_not_linked (GstSplitMuxSrc * splitmux)
+{
+ GList *cur;
+ guint ret = 0;
+
+ for (cur = g_list_first (splitmux->pads);
+ cur != NULL; cur = g_list_next (cur)) {
+ SplitMuxSrcPad *splitpad = (SplitMuxSrcPad *) (cur->data);
+ if (GST_PAD_LAST_FLOW_RETURN (splitpad) == GST_FLOW_NOT_LINKED)
+ ret++;
+ }
+
+ return ret;
+}
+
+static void
+gst_splitmux_pad_loop (GstPad * pad)
+{
+ /* Get one event/buffer from the associated part and push */
+ SplitMuxSrcPad *splitpad = (SplitMuxSrcPad *) (pad);
+ GstSplitMuxSrc *splitmux = (GstSplitMuxSrc *) gst_pad_get_parent (pad);
+ GstDataQueueItem *item = NULL;
+ GstSplitMuxPartReader *reader;
+ GstPad *part_pad;
+ GstFlowReturn ret;
+
+ GST_OBJECT_LOCK (splitpad);
+ if (splitpad->part_pad == NULL) {
+ GST_DEBUG_OBJECT (splitmux,
+ "Pausing task because part reader is not present");
+ GST_OBJECT_UNLOCK (splitpad);
+ gst_pad_pause_task (pad);
+ gst_object_unref (splitmux);
+ return;
+ }
+ part_pad = gst_object_ref (splitpad->part_pad);
+ reader = splitpad->reader;
+ GST_OBJECT_UNLOCK (splitpad);
+
+ GST_LOG_OBJECT (splitpad, "Popping data queue item from %" GST_PTR_FORMAT
+ " pad %" GST_PTR_FORMAT, reader, part_pad);
+ ret = gst_splitmux_part_reader_pop (reader, part_pad, &item);
+ if (ret == GST_FLOW_ERROR)
+ goto error;
+ if (ret == GST_FLOW_FLUSHING || item == NULL)
+ goto flushing;
+
+ GST_DEBUG_OBJECT (splitpad, "Got data queue item %" GST_PTR_FORMAT,
+ item->object);
+
+ if (GST_IS_EVENT (item->object)) {
+ GstEvent *event = (GstEvent *) (item->object);
+ gst_splitmux_handle_event (splitmux, splitpad, part_pad, event);
+ } else {
+ GstBuffer *buf = (GstBuffer *) (item->object);
+ GstFlowReturn ret = gst_splitmux_handle_buffer (splitmux, splitpad, buf);
+ if (G_UNLIKELY (ret != GST_FLOW_OK && ret != GST_FLOW_EOS)) {
+ /* Stop immediately on error or flushing */
+ GST_INFO_OBJECT (splitpad, "Stopping due to pad_push() result %d", ret);
+ gst_pad_pause_task (pad);
+ if (ret < GST_FLOW_EOS) {
+ GST_ELEMENT_FLOW_ERROR (splitmux, ret);
+ } else if (ret == GST_FLOW_NOT_LINKED) {
+ gboolean post_error;
+ guint n_notlinked;
+
+ /* Only post not-linked if all pads are not-linked */
+ SPLITMUX_SRC_PADS_RLOCK (splitmux);
+ n_notlinked = count_not_linked (splitmux);
+ post_error = (splitmux->pads_complete
+ && n_notlinked == splitmux->n_pads);
+ SPLITMUX_SRC_PADS_RUNLOCK (splitmux);
+
+ if (post_error)
+ GST_ELEMENT_FLOW_ERROR (splitmux, ret);
+ }
+ }
+ }
+ g_slice_free (GstDataQueueItem, item);
+
+ gst_object_unref (part_pad);
+ gst_object_unref (splitmux);
+ return;
+
+error:
+ /* Fall through */
+ GST_ELEMENT_ERROR (splitmux, RESOURCE, OPEN_READ, (NULL),
+ ("Error reading part file %s", GST_STR_NULL (reader->path)));
+flushing:
+ gst_pad_pause_task (pad);
+ gst_object_unref (part_pad);
+ gst_object_unref (splitmux);
+ return;
+}
+
+static gboolean
+gst_splitmux_src_activate_part (GstSplitMuxSrc * splitmux, guint part,
+ GstSeekFlags extra_flags)
+{
+ GList *cur;
+
+ GST_DEBUG_OBJECT (splitmux, "Activating part %d", part);
+
+ splitmux->cur_part = part;
+ if (!gst_splitmux_part_reader_activate (splitmux->parts[part],
+ &splitmux->play_segment, extra_flags))
+ return FALSE;
+
+ SPLITMUX_SRC_PADS_RLOCK (splitmux);
+ for (cur = g_list_first (splitmux->pads);
+ cur != NULL; cur = g_list_next (cur)) {
+ SplitMuxSrcPad *splitpad = (SplitMuxSrcPad *) (cur->data);
+ GST_OBJECT_LOCK (splitpad);
+ splitpad->cur_part = part;
+ splitpad->reader = splitmux->parts[splitpad->cur_part];
+ if (splitpad->part_pad)
+ gst_object_unref (splitpad->part_pad);
+ splitpad->part_pad =
+ gst_splitmux_part_reader_lookup_pad (splitpad->reader,
+ (GstPad *) (splitpad));
+ GST_OBJECT_UNLOCK (splitpad);
+
+ /* Make sure we start with a DISCONT */
+ splitpad->set_next_discont = TRUE;
+ splitpad->clear_next_discont = FALSE;
+
+ gst_pad_start_task (GST_PAD (splitpad),
+ (GstTaskFunction) gst_splitmux_pad_loop, splitpad, NULL);
+ }
+ SPLITMUX_SRC_PADS_RUNLOCK (splitmux);
+
+ return TRUE;
+}
+
+static gboolean
+gst_splitmux_src_prepare_next_part (GstSplitMuxSrc * splitmux)
+{
+ guint idx = splitmux->num_prepared_parts;
+
+ g_assert (idx < splitmux->num_parts);
+
+ GST_DEBUG_OBJECT (splitmux, "Preparing file part %s (%u)",
+ splitmux->parts[idx]->path, idx);
+
+ gst_splitmux_part_reader_set_start_offset (splitmux->parts[idx],
+ splitmux->end_offset, FIXED_TS_OFFSET);
+ if (!gst_splitmux_part_reader_prepare (splitmux->parts[idx])) {
+ GST_WARNING_OBJECT (splitmux,
+ "Failed to prepare file part %s. Cannot play past there.",
+ splitmux->parts[idx]->path);
+ GST_ELEMENT_WARNING (splitmux, RESOURCE, READ, (NULL),
+ ("Failed to prepare file part %s. Cannot play past there.",
+ splitmux->parts[idx]->path));
+ gst_splitmux_part_reader_unprepare (splitmux->parts[idx]);
+ g_object_unref (splitmux->parts[idx]);
+ splitmux->parts[idx] = NULL;
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_splitmux_src_start (GstSplitMuxSrc * splitmux)
+{
+ gboolean ret = FALSE;
+ GError *err = NULL;
+ gchar *basename = NULL;
+ gchar *dirname = NULL;
+ gchar **files;
+ guint i;
+
+ SPLITMUX_SRC_LOCK (splitmux);
+ if (splitmux->running) {
+ /* splitmux is still running / stopping. We can't start again yet */
+ SPLITMUX_SRC_UNLOCK (splitmux);
+ return FALSE;
+ }
+ SPLITMUX_SRC_UNLOCK (splitmux);
+
+ GST_DEBUG_OBJECT (splitmux, "Starting");
+
+ g_signal_emit (splitmux, signals[SIGNAL_FORMAT_LOCATION], 0, &files);
+
+ if (files == NULL || *files == NULL) {
+ GST_OBJECT_LOCK (splitmux);
+ if (splitmux->location != NULL && splitmux->location[0] != '\0') {
+ basename = g_path_get_basename (splitmux->location);
+ dirname = g_path_get_dirname (splitmux->location);
+ }
+ GST_OBJECT_UNLOCK (splitmux);
+
+ g_strfreev (files);
+ files = gst_split_util_find_files (dirname, basename, &err);
+
+ if (files == NULL || *files == NULL)
+ goto no_files;
+ }
+
+ SPLITMUX_SRC_LOCK (splitmux);
+ splitmux->pads_complete = FALSE;
+ splitmux->running = TRUE;
+ SPLITMUX_SRC_UNLOCK (splitmux);
+
+ splitmux->num_parts = g_strv_length (files);
+
+ splitmux->parts = g_new0 (GstSplitMuxPartReader *, splitmux->num_parts);
+
+ /* Create all part pipelines */
+ for (i = 0; i < splitmux->num_parts; i++) {
+ splitmux->parts[i] = gst_splitmux_part_create (splitmux, files[i]);
+ if (splitmux->parts[i] == NULL)
+ break;
+ }
+
+ /* Store how many parts we actually created */
+ splitmux->num_created_parts = splitmux->num_parts = i;
+ splitmux->num_prepared_parts = 0;
+
+ /* Update total_duration state variable */
+ GST_OBJECT_LOCK (splitmux);
+ splitmux->total_duration = 0;
+ splitmux->end_offset = 0;
+ GST_OBJECT_UNLOCK (splitmux);
+
+ /* Then start the first: it will asynchronously go to PAUSED
+ * or error out and then we can proceed with the next one
+ */
+ if (!gst_splitmux_src_prepare_next_part (splitmux) || splitmux->num_parts < 1)
+ goto failed_part;
+
+ /* All good now: we have to wait for all parts to be asynchronously
+ * prepared to know the total duration we can play */
+ ret = TRUE;
+
+done:
+ if (err != NULL)
+ g_error_free (err);
+ g_strfreev (files);
+ g_free (basename);
+ g_free (dirname);
+
+ return ret;
+
+/* ERRORS */
+no_files:
+ {
+ GST_ELEMENT_ERROR (splitmux, RESOURCE, OPEN_READ, ("%s", err->message),
+ ("Failed to find files in '%s' for pattern '%s'",
+ GST_STR_NULL (dirname), GST_STR_NULL (basename)));
+ goto done;
+ }
+failed_part:
+ {
+ GST_ELEMENT_ERROR (splitmux, RESOURCE, OPEN_READ, (NULL),
+ ("Failed to open any files for reading"));
+ goto done;
+ }
+}
+
+static gboolean
+gst_splitmux_src_stop (GstSplitMuxSrc * splitmux)
+{
+ gboolean ret = TRUE;
+ guint i;
+ GList *cur, *pads_list;
+
+ SPLITMUX_SRC_LOCK (splitmux);
+ if (!splitmux->running)
+ goto out;
+ splitmux->running = FALSE;
+ GST_DEBUG_OBJECT (splitmux, "Stopping");
+
+ SPLITMUX_SRC_UNLOCK (splitmux);
+
+ /* Stop and destroy all parts. We don't need the lock here,
+ * because all parts were created in _start() */
+ for (i = 0; i < splitmux->num_created_parts; i++) {
+ if (splitmux->parts[i] == NULL)
+ continue;
+ gst_splitmux_part_reader_unprepare (splitmux->parts[i]);
+ g_object_unref (splitmux->parts[i]);
+ splitmux->parts[i] = NULL;
+ }
+ SPLITMUX_SRC_LOCK (splitmux);
+
+ SPLITMUX_SRC_PADS_WLOCK (splitmux);
+ pads_list = splitmux->pads;
+ splitmux->pads = NULL;
+ SPLITMUX_SRC_PADS_WUNLOCK (splitmux);
+
+ SPLITMUX_SRC_UNLOCK (splitmux);
+ for (cur = g_list_first (pads_list); cur != NULL; cur = g_list_next (cur)) {
+ SplitMuxSrcPad *tmp = (SplitMuxSrcPad *) (cur->data);
+ gst_pad_stop_task (GST_PAD (tmp));
+ gst_element_remove_pad (GST_ELEMENT (splitmux), GST_PAD (tmp));
+ }
+ g_list_free (pads_list);
+ SPLITMUX_SRC_LOCK (splitmux);
+
+ g_free (splitmux->parts);
+ splitmux->parts = NULL;
+ splitmux->num_parts = 0;
+ splitmux->num_prepared_parts = 0;
+ splitmux->num_created_parts = 0;
+ splitmux->total_duration = GST_CLOCK_TIME_NONE;
+ /* Reset playback segment */
+ gst_segment_init (&splitmux->play_segment, GST_FORMAT_TIME);
+out:
+ SPLITMUX_SRC_UNLOCK (splitmux);
+ return ret;
+}
+
+typedef struct
+{
+ GstSplitMuxSrc *splitmux;
+ SplitMuxSrcPad *splitpad;
+} SplitMuxAndPad;
+
+static gboolean
+handle_sticky_events (GstPad * pad, GstEvent ** event, gpointer user_data)
+{
+ SplitMuxAndPad *splitmux_and_pad;
+ GstSplitMuxSrc *splitmux;
+ SplitMuxSrcPad *splitpad;
+
+ splitmux_and_pad = user_data;
+ splitmux = splitmux_and_pad->splitmux;
+ splitpad = splitmux_and_pad->splitpad;
+
+ GST_DEBUG_OBJECT (splitpad, "handle sticky event %" GST_PTR_FORMAT, *event);
+ gst_event_ref (*event);
+ gst_splitmux_handle_event (splitmux, splitpad, pad, *event);
+
+ return TRUE;
+}
+
+static GstPad *
+gst_splitmux_find_output_pad (GstSplitMuxPartReader * part, GstPad * pad,
+ GstSplitMuxSrc * splitmux)
+{
+ GList *cur;
+ gchar *pad_name = gst_pad_get_name (pad);
+ GstPad *target = NULL;
+ gboolean is_new_pad = FALSE;
+
+ SPLITMUX_SRC_LOCK (splitmux);
+ SPLITMUX_SRC_PADS_WLOCK (splitmux);
+ for (cur = g_list_first (splitmux->pads);
+ cur != NULL; cur = g_list_next (cur)) {
+ GstPad *tmp = (GstPad *) (cur->data);
+ if (g_str_equal (GST_PAD_NAME (tmp), pad_name)) {
+ target = tmp;
+ break;
+ }
+ }
+
+ if (target == NULL && !splitmux->pads_complete) {
+ SplitMuxAndPad splitmux_and_pad;
+
+ /* No pad found, create one */
+ target = g_object_new (SPLITMUX_TYPE_SRC_PAD,
+ "name", pad_name, "direction", GST_PAD_SRC, NULL);
+ splitmux->pads = g_list_prepend (splitmux->pads, target);
+ splitmux->n_pads++;
+
+ gst_pad_set_active (target, TRUE);
+
+ splitmux_and_pad.splitmux = splitmux;
+ splitmux_and_pad.splitpad = (SplitMuxSrcPad *) target;
+ gst_pad_sticky_events_foreach (pad, handle_sticky_events,
+ &splitmux_and_pad);
+ is_new_pad = TRUE;
+ }
+ SPLITMUX_SRC_PADS_WUNLOCK (splitmux);
+ SPLITMUX_SRC_UNLOCK (splitmux);
+
+ g_free (pad_name);
+
+ if (target == NULL)
+ goto pad_not_found;
+
+ if (is_new_pad)
+ gst_element_add_pad (GST_ELEMENT_CAST (splitmux), target);
+
+ return target;
+
+pad_not_found:
+ GST_ELEMENT_ERROR (splitmux, STREAM, FAILED, (NULL),
+ ("Stream part %s contains extra unknown pad %" GST_PTR_FORMAT,
+ part->path, pad));
+ return NULL;
+}
+
+static void
+gst_splitmux_push_event (GstSplitMuxSrc * splitmux, GstEvent * e,
+ guint32 seqnum)
+{
+ GList *cur;
+
+ if (seqnum) {
+ e = gst_event_make_writable (e);
+ gst_event_set_seqnum (e, seqnum);
+ }
+
+ SPLITMUX_SRC_PADS_RLOCK (splitmux);
+ for (cur = g_list_first (splitmux->pads);
+ cur != NULL; cur = g_list_next (cur)) {
+ GstPad *pad = GST_PAD_CAST (cur->data);
+ gst_event_ref (e);
+ gst_pad_push_event (pad, e);
+ }
+ SPLITMUX_SRC_PADS_RUNLOCK (splitmux);
+
+ gst_event_unref (e);
+}
+
+static void
+gst_splitmux_push_flush_stop (GstSplitMuxSrc * splitmux, guint32 seqnum)
+{
+ GstEvent *e = gst_event_new_flush_stop (TRUE);
+ GList *cur;
+
+ if (seqnum) {
+ e = gst_event_make_writable (e);
+ gst_event_set_seqnum (e, seqnum);
+ }
+
+ SPLITMUX_SRC_PADS_RLOCK (splitmux);
+ for (cur = g_list_first (splitmux->pads);
+ cur != NULL; cur = g_list_next (cur)) {
+ SplitMuxSrcPad *target = (SplitMuxSrcPad *) (cur->data);
+
+ gst_event_ref (e);
+ gst_pad_push_event (GST_PAD_CAST (target), e);
+ target->sent_caps = FALSE;
+ target->sent_stream_start = FALSE;
+ target->sent_segment = FALSE;
+ }
+ SPLITMUX_SRC_PADS_RUNLOCK (splitmux);
+
+ gst_event_unref (e);
+}
+
+/* Callback for when a part finishes and we need to move to the next */
+static gboolean
+gst_splitmux_end_of_part (GstSplitMuxSrc * splitmux, SplitMuxSrcPad * splitpad)
+{
+ gint next_part = -1;
+ gint cur_part = splitpad->cur_part;
+ gboolean res = FALSE;
+
+ if (splitmux->play_segment.rate >= 0.0) {
+ if (cur_part + 1 < splitmux->num_parts)
+ next_part = cur_part + 1;
+ /* Make sure the transition is seamless */
+ splitpad->set_next_discont = FALSE;
+ splitpad->clear_next_discont = TRUE;
+ } else {
+ /* Reverse play - move to previous segment */
+ if (cur_part > 0) {
+ next_part = cur_part - 1;
+ /* Non-seamless transition in reverse */
+ splitpad->set_next_discont = TRUE;
+ splitpad->clear_next_discont = FALSE;
+ }
+ }
+
+ SPLITMUX_SRC_LOCK (splitmux);
+
+ /* If all pads are done with this part, deactivate it */
+ if (gst_splitmux_part_is_eos (splitmux->parts[splitpad->cur_part]))
+ gst_splitmux_part_reader_deactivate (splitmux->parts[cur_part]);
+
+ if (splitmux->play_segment.rate >= 0.0) {
+ if (splitmux->play_segment.stop != -1) {
+ GstClockTime part_end =
+ gst_splitmux_part_reader_get_end_offset (splitmux->parts[cur_part]);
+ if (part_end >= splitmux->play_segment.stop) {
+ GST_DEBUG_OBJECT (splitmux,
+ "Stop position was within that part. Finishing");
+ next_part = -1;
+ }
+ }
+ } else {
+ if (splitmux->play_segment.start != -1) {
+ GstClockTime part_start =
+ gst_splitmux_part_reader_get_start_offset (splitmux->parts[cur_part]);
+ if (part_start <= splitmux->play_segment.start) {
+ GST_DEBUG_OBJECT (splitmux,
+ "Start position %" GST_TIME_FORMAT
+ " was within that part. Finishing",
+ GST_TIME_ARGS (splitmux->play_segment.start));
+ next_part = -1;
+ }
+ }
+ }
+
+ if (next_part != -1) {
+ GST_DEBUG_OBJECT (splitmux, "At EOS on pad %" GST_PTR_FORMAT
+ " moving to part %d", splitpad, next_part);
+ splitpad->cur_part = next_part;
+ splitpad->reader = splitmux->parts[splitpad->cur_part];
+ if (splitpad->part_pad)
+ gst_object_unref (splitpad->part_pad);
+ splitpad->part_pad =
+ gst_splitmux_part_reader_lookup_pad (splitpad->reader,
+ (GstPad *) (splitpad));
+
+ if (splitmux->cur_part != next_part) {
+ if (!gst_splitmux_part_reader_is_active (splitpad->reader)) {
+ GstSegment tmp;
+ /* If moving backward into a new part, set stop
+ * to -1 to ensure we play the entire file - workaround
+ * a bug in qtdemux that misses bits at the end */
+ gst_segment_copy_into (&splitmux->play_segment, &tmp);
+ if (tmp.rate < 0)
+ tmp.stop = -1;
+
+ /* This is the first pad to move to the new part, activate it */
+ GST_DEBUG_OBJECT (splitpad,
+ "First pad to change part. Activating part %d with seg %"
+ GST_SEGMENT_FORMAT, next_part, &tmp);
+ if (!gst_splitmux_part_reader_activate (splitpad->reader, &tmp,
+ GST_SEEK_FLAG_NONE))
+ goto error;
+ }
+ splitmux->cur_part = next_part;
+ }
+ res = TRUE;
+ }
+
+ SPLITMUX_SRC_UNLOCK (splitmux);
+ return res;
+error:
+ SPLITMUX_SRC_UNLOCK (splitmux);
+ GST_ELEMENT_ERROR (splitmux, RESOURCE, READ, (NULL),
+ ("Failed to activate part %d", splitmux->cur_part));
+ return FALSE;
+}
+
+G_DEFINE_TYPE (SplitMuxSrcPad, splitmux_src_pad, GST_TYPE_PAD);
+
+static void
+splitmux_src_pad_constructed (GObject * pad)
+{
+ gst_pad_set_event_function (GST_PAD (pad),
+ GST_DEBUG_FUNCPTR (splitmux_src_pad_event));
+ gst_pad_set_query_function (GST_PAD (pad),
+ GST_DEBUG_FUNCPTR (splitmux_src_pad_query));
+
+ G_OBJECT_CLASS (splitmux_src_pad_parent_class)->constructed (pad);
+}
+
+static void
+gst_splitmux_src_pad_dispose (GObject * object)
+{
+ SplitMuxSrcPad *pad = (SplitMuxSrcPad *) (object);
+
+ GST_OBJECT_LOCK (pad);
+ if (pad->part_pad) {
+ gst_object_unref (pad->part_pad);
+ pad->part_pad = NULL;
+ }
+ GST_OBJECT_UNLOCK (pad);
+
+ G_OBJECT_CLASS (splitmux_src_pad_parent_class)->dispose (object);
+}
+
+static void
+splitmux_src_pad_class_init (SplitMuxSrcPadClass * klass)
+{
+ GObjectClass *gobject_klass = (GObjectClass *) (klass);
+
+ gobject_klass->constructed = splitmux_src_pad_constructed;
+ gobject_klass->dispose = gst_splitmux_src_pad_dispose;
+}
+
+static void
+splitmux_src_pad_init (SplitMuxSrcPad * pad)
+{
+}
+
+/* Event handler for source pads. Proxy events into the child
+ * parts as needed
+ */
+static gboolean
+splitmux_src_pad_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstSplitMuxSrc *splitmux = GST_SPLITMUX_SRC (parent);
+ gboolean ret = FALSE;
+
+ GST_DEBUG_OBJECT (parent, "event %" GST_PTR_FORMAT
+ " on %" GST_PTR_FORMAT, event, pad);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:{
+ GstFormat format;
+ gdouble rate;
+ GstSeekFlags flags;
+ GstSeekType start_type, stop_type;
+ gint64 start, stop;
+ guint32 seqnum;
+ gint i;
+ GstClockTime part_start, position;
+ GList *cur;
+ GstSegment tmp;
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &start_type, &start, &stop_type, &stop);
+
+ if (format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (splitmux, "can only seek on TIME");
+ goto error;
+ }
+ /* FIXME: Support non-flushing seeks, which might never wake up */
+ if (!(flags & GST_SEEK_FLAG_FLUSH)) {
+ GST_DEBUG_OBJECT (splitmux, "Only flushing seeks supported");
+ goto error;
+ }
+ seqnum = gst_event_get_seqnum (event);
+
+ SPLITMUX_SRC_LOCK (splitmux);
+ if (!splitmux->running || splitmux->num_parts < 1) {
+ /* Not started yet */
+ SPLITMUX_SRC_UNLOCK (splitmux);
+ goto error;
+ }
+ if (splitmux->segment_seqnum == seqnum) {
+ GST_DEBUG_OBJECT (splitmux, "Ignoring duplicate seek event");
+ SPLITMUX_SRC_UNLOCK (splitmux);
+ ret = TRUE;
+ goto done;
+ }
+
+ gst_segment_copy_into (&splitmux->play_segment, &tmp);
+
+ if (!gst_segment_do_seek (&tmp, rate,
+ format, flags, start_type, start, stop_type, stop, NULL)) {
+ /* Invalid seek requested, ignore it */
+ SPLITMUX_SRC_UNLOCK (splitmux);
+ goto error;
+ }
+ position = tmp.position;
+
+ GST_DEBUG_OBJECT (splitmux, "Performing seek with seg %"
+ GST_SEGMENT_FORMAT, &tmp);
+
+ GST_DEBUG_OBJECT (splitmux,
+ "Handling flushing seek. Sending flush start");
+
+ /* Send flush_start */
+ gst_splitmux_push_event (splitmux, gst_event_new_flush_start (), seqnum);
+
+ /* Stop all parts, which will work because of the flush */
+ SPLITMUX_SRC_PADS_RLOCK (splitmux);
+ SPLITMUX_SRC_UNLOCK (splitmux);
+ for (cur = g_list_first (splitmux->pads);
+ cur != NULL; cur = g_list_next (cur)) {
+ SplitMuxSrcPad *target = (SplitMuxSrcPad *) (cur->data);
+ GstSplitMuxPartReader *reader = splitmux->parts[target->cur_part];
+ gst_splitmux_part_reader_deactivate (reader);
+ }
+
+ /* Shut down pad tasks */
+ GST_DEBUG_OBJECT (splitmux, "Pausing pad tasks");
+ for (cur = g_list_first (splitmux->pads);
+ cur != NULL; cur = g_list_next (cur)) {
+ GstPad *splitpad = (GstPad *) (cur->data);
+ gst_pad_pause_task (GST_PAD (splitpad));
+ }
+ SPLITMUX_SRC_PADS_RUNLOCK (splitmux);
+ SPLITMUX_SRC_LOCK (splitmux);
+
+ /* Send flush stop */
+ GST_DEBUG_OBJECT (splitmux, "Sending flush stop");
+ gst_splitmux_push_flush_stop (splitmux, seqnum);
+
+ /* Everything is stopped, so update the play_segment */
+ gst_segment_copy_into (&tmp, &splitmux->play_segment);
+ splitmux->segment_seqnum = seqnum;
+
+ /* Work out where to start from now */
+ for (i = 0; i < splitmux->num_parts; i++) {
+ GstSplitMuxPartReader *reader = splitmux->parts[i];
+ GstClockTime part_end =
+ gst_splitmux_part_reader_get_end_offset (reader);
+
+ if (part_end > position)
+ break;
+ }
+ if (i == splitmux->num_parts)
+ i = splitmux->num_parts - 1;
+
+ part_start =
+ gst_splitmux_part_reader_get_start_offset (splitmux->parts[i]);
+
+ GST_DEBUG_OBJECT (splitmux,
+ "Seek to time %" GST_TIME_FORMAT " landed in part %d offset %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (position),
+ i, GST_TIME_ARGS (position - part_start));
+
+ ret = gst_splitmux_src_activate_part (splitmux, i, flags);
+ SPLITMUX_SRC_UNLOCK (splitmux);
+ }
+ case GST_EVENT_RECONFIGURE:{
+ GST_DEBUG_OBJECT (splitmux, "reconfigure event on pad %" GST_PTR_FORMAT,
+ pad);
+
+ SPLITMUX_SRC_PADS_RLOCK (splitmux);
+ /* Restart the task on this pad */
+ gst_pad_start_task (GST_PAD (pad),
+ (GstTaskFunction) gst_splitmux_pad_loop, pad, NULL);
+ SPLITMUX_SRC_PADS_RUNLOCK (splitmux);
+ break;
+ }
+ default:
+ break;
+ }
+
+done:
+ gst_event_unref (event);
+error:
+ return ret;
+}
+
+static gboolean
+splitmux_src_pad_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ /* Query handler for source pads. Proxy queries into the child
+ * parts as needed
+ */
+ GstSplitMuxSrc *splitmux = GST_SPLITMUX_SRC (parent);
+ gboolean ret = FALSE;
+
+ GST_LOG_OBJECT (parent, "query %" GST_PTR_FORMAT
+ " on %" GST_PTR_FORMAT, query, pad);
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ case GST_QUERY_POSITION:{
+ GstSplitMuxPartReader *part;
+ SplitMuxSrcPad *anypad;
+
+ SPLITMUX_SRC_LOCK (splitmux);
+ SPLITMUX_SRC_PADS_RLOCK (splitmux);
+ anypad = (SplitMuxSrcPad *) (splitmux->pads->data);
+ part = splitmux->parts[anypad->cur_part];
+ ret = gst_splitmux_part_reader_src_query (part, pad, query);
+ SPLITMUX_SRC_PADS_RUNLOCK (splitmux);
+ SPLITMUX_SRC_UNLOCK (splitmux);
+ break;
+ }
+ case GST_QUERY_DURATION:{
+ GstClockTime duration;
+ GstFormat fmt;
+
+ gst_query_parse_duration (query, &fmt, NULL);
+ if (fmt != GST_FORMAT_TIME)
+ break;
+
+ GST_OBJECT_LOCK (splitmux);
+ duration = splitmux->total_duration;
+ GST_OBJECT_UNLOCK (splitmux);
+
+ if (duration > 0 && duration != GST_CLOCK_TIME_NONE) {
+ gst_query_set_duration (query, GST_FORMAT_TIME, duration);
+ ret = TRUE;
+ }
+ break;
+ }
+ case GST_QUERY_SEEKING:{
+ GstFormat format;
+
+ gst_query_parse_seeking (query, &format, NULL, NULL, NULL);
+ if (format != GST_FORMAT_TIME)
+ break;
+
+ GST_OBJECT_LOCK (splitmux);
+ gst_query_set_seeking (query, GST_FORMAT_TIME, TRUE, 0,
+ splitmux->total_duration);
+ ret = TRUE;
+ GST_OBJECT_UNLOCK (splitmux);
+
+ break;
+ }
+ case GST_QUERY_SEGMENT:{
+ GstFormat format;
+ gint64 start, stop;
+
+ SPLITMUX_SRC_LOCK (splitmux);
+ format = splitmux->play_segment.format;
+
+ start =
+ gst_segment_to_stream_time (&splitmux->play_segment, format,
+ splitmux->play_segment.start);
+ if (splitmux->play_segment.stop == GST_CLOCK_TIME_NONE) {
+ if (splitmux->play_segment.duration == GST_CLOCK_TIME_NONE)
+ stop = GST_CLOCK_TIME_NONE;
+ else
+ stop = start + splitmux->play_segment.duration;
+ } else {
+ stop = gst_segment_to_stream_time (&splitmux->play_segment, format,
+ splitmux->play_segment.stop);
+ }
+
+ gst_query_set_segment (query, splitmux->play_segment.rate, format, start,
+ stop);
+ ret = TRUE;
+
+ SPLITMUX_SRC_UNLOCK (splitmux);
+ }
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/multifile/gstsplitmuxsrc.h b/gst/multifile/gstsplitmuxsrc.h
new file mode 100644
index 0000000000..f13ee08729
--- /dev/null
+++ b/gst/multifile/gstsplitmuxsrc.h
@@ -0,0 +1,123 @@
+/* GStreamer Split Muxed File Source
+ * Copyright (C) 2014 Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifndef __GST_SPLITMUX_SRC_H__
+#define __GST_SPLITMUX_SRC_H__
+
+#include <gst/gst.h>
+
+#include "gstsplitmuxpartreader.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_SPLITMUX_SRC \
+ (gst_splitmux_src_get_type())
+#define GST_SPLITMUX_SRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_SPLITMUX_SRC,GstSplitMuxSrc))
+#define GST_SPLITMUX_SRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_SPLITMUX_SRC,GstSplitMuxSrcClass))
+#define GST_IS_SPLITMUX_SRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_SPLITMUX_SRC))
+#define GST_IS_SPLITMUX_SRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_SPLITMUX_SRC))
+
+typedef struct _GstSplitMuxSrc GstSplitMuxSrc;
+typedef struct _GstSplitMuxSrcClass GstSplitMuxSrcClass;
+
+struct _GstSplitMuxSrc
+{
+ GstBin parent;
+
+ GMutex lock;
+ GMutex msg_lock;
+ gboolean running;
+
+ gchar *location; /* OBJECT_LOCK */
+
+ GstSplitMuxPartReader **parts;
+ guint num_parts;
+ guint num_prepared_parts;
+ guint num_created_parts;
+ guint cur_part;
+
+ gboolean async_pending;
+ gboolean pads_complete;
+
+ GRWLock pads_rwlock;
+ GList *pads; /* pads_lock */
+ guint n_pads;
+ guint n_notlinked;
+
+ GstClockTime total_duration;
+ GstClockTime end_offset;
+ GstSegment play_segment;
+ guint32 segment_seqnum;
+};
+
+struct _GstSplitMuxSrcClass
+{
+ GstBinClass parent_class;
+};
+
+GType splitmux_src_pad_get_type (void);
+#define SPLITMUX_TYPE_SRC_PAD splitmux_src_pad_get_type()
+#define SPLITMUX_SRC_PAD_CAST(p) ((SplitMuxSrcPad *)(p))
+#define SPLITMUX_SRC_PAD(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),SPLITMUX_TYPE_SRC_PAD,SplitMuxSrcPad))
+
+struct _SplitMuxSrcPad
+{
+ GstPad parent;
+
+ guint cur_part;
+ GstSplitMuxPartReader *reader;
+ GstPad *part_pad;
+
+ GstSegment segment;
+
+ gboolean set_next_discont;
+ gboolean clear_next_discont;
+
+ gboolean sent_stream_start;
+ gboolean sent_caps;
+ gboolean sent_segment;
+};
+
+struct _SplitMuxSrcPadClass
+{
+ GstPadClass parent;
+};
+
+GType gst_splitmux_src_get_type (void);
+
+#define SPLITMUX_SRC_LOCK(s) g_mutex_lock(&(s)->lock)
+#define SPLITMUX_SRC_UNLOCK(s) g_mutex_unlock(&(s)->lock)
+
+#define SPLITMUX_SRC_MSG_LOCK(s) g_mutex_lock(&(s)->msg_lock)
+#define SPLITMUX_SRC_MSG_UNLOCK(s) g_mutex_unlock(&(s)->msg_lock)
+
+#define SPLITMUX_SRC_PADS_WLOCK(s) g_rw_lock_writer_lock(&(s)->pads_rwlock)
+#define SPLITMUX_SRC_PADS_WUNLOCK(s) g_rw_lock_writer_unlock(&(s)->pads_rwlock)
+#define SPLITMUX_SRC_PADS_RLOCK(s) g_rw_lock_reader_lock(&(s)->pads_rwlock)
+#define SPLITMUX_SRC_PADS_RUNLOCK(s) g_rw_lock_reader_unlock(&(s)->pads_rwlock)
+
+GST_ELEMENT_REGISTER_DECLARE (splitmuxsrc);
+
+G_END_DECLS
+
+#endif /* __GST_SPLITMUX_SRC_H__ */
diff --git a/gst/multifile/gstsplitutils.c b/gst/multifile/gstsplitutils.c
new file mode 100644
index 0000000000..9b088a5f77
--- /dev/null
+++ b/gst/multifile/gstsplitutils.c
@@ -0,0 +1,105 @@
+/* GStreamer Split Source Utility Functions
+ * Copyright (C) 2011 Collabora Ltd. <tim.muller@collabora.co.uk>
+ * Copyright (C) 2014 Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include "gstsplitutils.h"
+#include "patternspec.h"
+
+static int
+gst_split_util_array_sortfunc (gchar ** a, gchar ** b)
+{
+ return strcmp (*a, *b);
+}
+
+gchar **
+gst_split_util_find_files (const gchar * dirname,
+ const gchar * basename, GError ** err)
+{
+ PatternSpec *pspec;
+ GPtrArray *files;
+ const gchar *name;
+ GDir *dir;
+
+ if (dirname == NULL || basename == NULL)
+ goto invalid_location;
+
+ GST_INFO ("checking in directory '%s' for pattern '%s'", dirname, basename);
+
+ dir = g_dir_open (dirname, 0, err);
+ if (dir == NULL)
+ return NULL;
+
+ if (DEFAULT_PATTERN_MATCH_MODE == MATCH_MODE_UTF8 &&
+ !g_utf8_validate (basename, -1, NULL)) {
+ goto not_utf8;
+ }
+
+ /* mode will be AUTO on linux/unix and UTF8 on win32 */
+ pspec = pattern_spec_new (basename, DEFAULT_PATTERN_MATCH_MODE);
+
+ files = g_ptr_array_new ();
+
+ while ((name = g_dir_read_name (dir))) {
+ GST_TRACE ("check: %s", name);
+ if (pattern_match_string (pspec, name)) {
+ GST_DEBUG ("match: %s", name);
+ g_ptr_array_add (files, g_build_filename (dirname, name, NULL));
+ }
+ }
+
+ if (files->len == 0)
+ goto no_matches;
+
+ g_ptr_array_sort (files, (GCompareFunc) gst_split_util_array_sortfunc);
+ g_ptr_array_add (files, NULL);
+
+ pattern_spec_free (pspec);
+ g_dir_close (dir);
+
+ return (gchar **) g_ptr_array_free (files, FALSE);
+
+/* ERRORS */
+invalid_location:
+ {
+ g_set_error_literal (err, G_FILE_ERROR, G_FILE_ERROR_INVAL,
+ "No filename specified.");
+ return NULL;
+ }
+not_utf8:
+ {
+ g_dir_close (dir);
+ g_set_error_literal (err, G_FILE_ERROR, G_FILE_ERROR_INVAL,
+ "Filename pattern must be UTF-8 on Windows.");
+ return NULL;
+ }
+no_matches:
+ {
+ pattern_spec_free (pspec);
+ g_dir_close (dir);
+ g_set_error_literal (err, G_FILE_ERROR, G_FILE_ERROR_NOENT,
+ "Found no files matching the pattern.");
+ return NULL;
+ }
+}
diff --git a/gst/multifile/gstsplitutils.h b/gst/multifile/gstsplitutils.h
new file mode 100644
index 0000000000..2c78b233f1
--- /dev/null
+++ b/gst/multifile/gstsplitutils.h
@@ -0,0 +1,40 @@
+/* GStreamer Split Source Utility Functions
+ * Copyright (C) 2011 Collabora Ltd. <tim.muller@collabora.co.uk>
+ * Copyright (C) 2014 Jan Schmidt <jan@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_SPLITUTILS_H__
+#define __GST_SPLITUTILS_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+#ifdef G_OS_WIN32
+#define DEFAULT_PATTERN_MATCH_MODE MATCH_MODE_UTF8
+#else
+#define DEFAULT_PATTERN_MATCH_MODE MATCH_MODE_AUTO
+#endif
+
+gchar **
+gst_split_util_find_files (const gchar * dirname,
+ const gchar * basename, GError ** err);
+
+G_END_DECLS
+
+#endif
diff --git a/gst/multifile/meson.build b/gst/multifile/meson.build
new file mode 100644
index 0000000000..b7215f301b
--- /dev/null
+++ b/gst/multifile/meson.build
@@ -0,0 +1,40 @@
+multifile_sources = [
+ 'gstmultifilesink.c',
+ 'gstmultifilesrc.c',
+ 'gstmultifile.c',
+ 'gstsplitfilesrc.c',
+ 'gstsplitmuxpartreader.c',
+ 'gstsplitmuxsink.c',
+ 'gstsplitmuxsrc.c',
+ 'gstsplitutils.c',
+ 'patternspec.c',
+ 'gstimagesequencesrc.c',
+]
+
+gstmultifile = library('gstmultifile',
+ multifile_sources,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc, libsinc],
+ dependencies : [gstvideo_dep, gstbase_dep,
+ gstpbutils_dep, gio_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstmultifile, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstmultifile]
+
+test_splitmuxpartreader_sources = [
+ 'test-splitmuxpartreader.c',
+ 'gstsplitmuxpartreader.c',
+ 'gstsplitmuxsrc.c',
+ 'gstsplitutils.c',
+ 'patternspec.c',
+]
+
+executable('test-splitmuxpartreader',
+ test_splitmuxpartreader_sources,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc, libsinc],
+ dependencies : [gsttag_dep, gstbase_dep],
+ install : false,
+)
diff --git a/gst/multifile/patternspec.c b/gst/multifile/patternspec.c
new file mode 100644
index 0000000000..1833601620
--- /dev/null
+++ b/gst/multifile/patternspec.c
@@ -0,0 +1,334 @@
+/* GPattern copy that supports raw (non-utf8) matching
+ * based on: GLIB - Library of useful routines for C programming
+ * Copyright (C) 1995-1997, 1999 Peter Mattis, Red Hat, Inc.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "patternspec.h"
+#include <string.h>
+
+typedef enum
+{
+ MATCH_ALL, /* "*A?A*" */
+ MATCH_ALL_TAIL, /* "*A?AA" */
+ MATCH_HEAD, /* "AAAA*" */
+ MATCH_TAIL, /* "*AAAA" */
+ MATCH_EXACT, /* "AAAAA" */
+ MATCH_LAST
+} MatchType;
+
+struct _PatternSpec
+{
+ MatchMode match_mode;
+ MatchType match_type;
+ guint pattern_length;
+ guint min_length;
+ guint max_length;
+ gchar *pattern;
+};
+
+static inline gchar *
+raw_strreverse (const gchar * str, gssize size)
+{
+ g_assert (size > 0);
+ return g_strreverse (g_strndup (str, size));
+}
+
+static inline gboolean
+pattern_ph_match (const gchar * match_pattern, MatchMode match_mode,
+ const gchar * match_string, gboolean * wildcard_reached_p)
+{
+ register const gchar *pattern, *string;
+ register gchar ch;
+
+ pattern = match_pattern;
+ string = match_string;
+
+ ch = *pattern;
+ pattern++;
+ while (ch) {
+ switch (ch) {
+ case '?':
+ if (!*string)
+ return FALSE;
+ if (match_mode == MATCH_MODE_UTF8)
+ string = g_utf8_next_char (string);
+ else
+ ++string;
+ break;
+
+ case '*':
+ *wildcard_reached_p = TRUE;
+ do {
+ ch = *pattern;
+ pattern++;
+ if (ch == '?') {
+ if (!*string)
+ return FALSE;
+ if (match_mode == MATCH_MODE_UTF8)
+ string = g_utf8_next_char (string);
+ else
+ ++string;
+ }
+ }
+ while (ch == '*' || ch == '?');
+ if (!ch)
+ return TRUE;
+ do {
+ gboolean next_wildcard_reached = FALSE;
+ while (ch != *string) {
+ if (!*string)
+ return FALSE;
+ if (match_mode == MATCH_MODE_UTF8)
+ string = g_utf8_next_char (string);
+ else
+ ++string;
+ }
+ string++;
+ if (pattern_ph_match (pattern, match_mode, string,
+ &next_wildcard_reached))
+ return TRUE;
+ if (next_wildcard_reached)
+ /* the forthcoming pattern substring up to the next wildcard has
+ * been matched, but a mismatch occurred for the rest of the
+ * pattern, following the next wildcard.
+ * there's no need to advance the current match position any
+ * further if the rest pattern will not match.
+ */
+ return FALSE;
+ }
+ while (*string);
+ break;
+
+ default:
+ if (ch == *string)
+ string++;
+ else
+ return FALSE;
+ break;
+ }
+
+ ch = *pattern;
+ pattern++;
+ }
+
+ return *string == 0;
+}
+
+static gboolean
+pattern_match (PatternSpec * pspec, guint string_length,
+ const gchar * string, const gchar * string_reversed)
+{
+ MatchMode match_mode;
+
+ g_assert (pspec != NULL);
+ g_assert (string != NULL);
+
+ if (string_length < pspec->min_length || string_length > pspec->max_length)
+ return FALSE;
+
+ match_mode = pspec->match_mode;
+ if (match_mode == MATCH_MODE_AUTO) {
+ if (!g_utf8_validate (string, string_length, NULL))
+ match_mode = MATCH_MODE_RAW;
+ else
+ match_mode = MATCH_MODE_UTF8;
+ }
+
+ switch (pspec->match_type) {
+ gboolean dummy;
+ case MATCH_ALL:
+ return pattern_ph_match (pspec->pattern, match_mode, string, &dummy);
+ case MATCH_ALL_TAIL:
+ if (string_reversed)
+ return pattern_ph_match (pspec->pattern, match_mode, string_reversed,
+ &dummy);
+ else {
+ gboolean result;
+ gchar *tmp;
+ if (match_mode == MATCH_MODE_UTF8) {
+ tmp = g_utf8_strreverse (string, string_length);
+ } else {
+ tmp = raw_strreverse (string, string_length);
+ }
+ result = pattern_ph_match (pspec->pattern, match_mode, tmp, &dummy);
+ g_free (tmp);
+ return result;
+ }
+ case MATCH_HEAD:
+ if (pspec->pattern_length == string_length)
+ return memcmp (pspec->pattern, string, string_length) == 0;
+ else if (pspec->pattern_length)
+ return memcmp (pspec->pattern, string, pspec->pattern_length) == 0;
+ else
+ return TRUE;
+ case MATCH_TAIL:
+ if (pspec->pattern_length)
+ /* compare incl. NUL terminator */
+ return memcmp (pspec->pattern,
+ string + (string_length - pspec->pattern_length),
+ pspec->pattern_length + 1) == 0;
+ else
+ return TRUE;
+ case MATCH_EXACT:
+ if (pspec->pattern_length != string_length)
+ return FALSE;
+ else
+ return memcmp (pspec->pattern, string, string_length) == 0;
+ default:
+ g_return_val_if_fail (pspec->match_type < MATCH_LAST, FALSE);
+ return FALSE;
+ }
+}
+
+PatternSpec *
+pattern_spec_new (const gchar * pattern, MatchMode match_mode)
+{
+ PatternSpec *pspec;
+ gboolean seen_joker = FALSE, seen_wildcard = FALSE, more_wildcards = FALSE;
+ gint hw_pos = -1, tw_pos = -1, hj_pos = -1, tj_pos = -1;
+ gboolean follows_wildcard = FALSE;
+ guint pending_jokers = 0;
+ const gchar *s;
+ gchar *d;
+ guint i;
+
+ g_assert (pattern != NULL);
+ g_assert (match_mode != MATCH_MODE_UTF8
+ || g_utf8_validate (pattern, -1, NULL));
+
+ /* canonicalize pattern and collect necessary stats */
+ pspec = g_new (PatternSpec, 1);
+ pspec->match_mode = match_mode;
+ pspec->pattern_length = strlen (pattern);
+ pspec->min_length = 0;
+ pspec->max_length = 0;
+ pspec->pattern = g_new (gchar, pspec->pattern_length + 1);
+
+ if (pspec->match_mode == MATCH_MODE_AUTO) {
+ if (!g_utf8_validate (pattern, -1, NULL))
+ pspec->match_mode = MATCH_MODE_RAW;
+ }
+
+ d = pspec->pattern;
+ for (i = 0, s = pattern; *s != 0; s++) {
+ switch (*s) {
+ case '*':
+ if (follows_wildcard) { /* compress multiple wildcards */
+ pspec->pattern_length--;
+ continue;
+ }
+ follows_wildcard = TRUE;
+ if (hw_pos < 0)
+ hw_pos = i;
+ tw_pos = i;
+ break;
+ case '?':
+ pending_jokers++;
+ pspec->min_length++;
+ if (pspec->match_mode == MATCH_MODE_RAW) {
+ pspec->max_length += 1;
+ } else {
+ pspec->max_length += 4; /* maximum UTF-8 character length */
+ }
+ continue;
+ default:
+ for (; pending_jokers; pending_jokers--, i++) {
+ *d++ = '?';
+ if (hj_pos < 0)
+ hj_pos = i;
+ tj_pos = i;
+ }
+ follows_wildcard = FALSE;
+ pspec->min_length++;
+ pspec->max_length++;
+ break;
+ }
+ *d++ = *s;
+ i++;
+ }
+ for (; pending_jokers; pending_jokers--) {
+ *d++ = '?';
+ if (hj_pos < 0)
+ hj_pos = i;
+ tj_pos = i;
+ }
+ *d++ = 0;
+ seen_joker = hj_pos >= 0;
+ seen_wildcard = hw_pos >= 0;
+ more_wildcards = seen_wildcard && hw_pos != tw_pos;
+ if (seen_wildcard)
+ pspec->max_length = G_MAXUINT;
+
+ /* special case sole head/tail wildcard or exact matches */
+ if (!seen_joker && !more_wildcards) {
+ if (pspec->pattern[0] == '*') {
+ pspec->match_type = MATCH_TAIL;
+ memmove (pspec->pattern, pspec->pattern + 1, --pspec->pattern_length);
+ pspec->pattern[pspec->pattern_length] = 0;
+ return pspec;
+ }
+ if (pspec->pattern_length > 0 &&
+ pspec->pattern[pspec->pattern_length - 1] == '*') {
+ pspec->match_type = MATCH_HEAD;
+ pspec->pattern[--pspec->pattern_length] = 0;
+ return pspec;
+ }
+ if (!seen_wildcard) {
+ pspec->match_type = MATCH_EXACT;
+ return pspec;
+ }
+ }
+
+ /* now just need to distinguish between head or tail match start */
+ tw_pos = pspec->pattern_length - 1 - tw_pos; /* last pos to tail distance */
+ tj_pos = pspec->pattern_length - 1 - tj_pos; /* last pos to tail distance */
+ if (seen_wildcard)
+ pspec->match_type = tw_pos > hw_pos ? MATCH_ALL_TAIL : MATCH_ALL;
+ else /* seen_joker */
+ pspec->match_type = tj_pos > hj_pos ? MATCH_ALL_TAIL : MATCH_ALL;
+ if (pspec->match_type == MATCH_ALL_TAIL) {
+ gchar *tmp = pspec->pattern;
+
+ if (pspec->match_mode == MATCH_MODE_RAW) {
+ pspec->pattern = raw_strreverse (pspec->pattern, pspec->pattern_length);
+ } else {
+ pspec->pattern =
+ g_utf8_strreverse (pspec->pattern, pspec->pattern_length);
+ }
+ g_free (tmp);
+ }
+ return pspec;
+}
+
+void
+pattern_spec_free (PatternSpec * pspec)
+{
+ g_assert (pspec != NULL);
+
+ g_free (pspec->pattern);
+ g_free (pspec);
+}
+
+gboolean
+pattern_match_string (PatternSpec * pspec, const gchar * string)
+{
+ return pattern_match (pspec, strlen (string), string, NULL);
+}
diff --git a/gst/multifile/patternspec.h b/gst/multifile/patternspec.h
new file mode 100644
index 0000000000..5bb9b403c5
--- /dev/null
+++ b/gst/multifile/patternspec.h
@@ -0,0 +1,47 @@
+/* GPattern copy that supports raw (non-utf8) matching
+ * based on: GLIB - Library of useful routines for C programming
+ * Copyright (C) 1995-1997, 1999 Peter Mattis, Red Hat, Inc.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __PATTERN_SPEC_H__
+#define __PATTERN_SPEC_H__
+
+#include <glib.h>
+
+G_BEGIN_DECLS
+
+typedef enum
+{
+ MATCH_MODE_AUTO = 0,
+ MATCH_MODE_UTF8,
+ MATCH_MODE_RAW
+} MatchMode;
+
+typedef struct _PatternSpec PatternSpec;
+
+PatternSpec * pattern_spec_new (const gchar * pattern,
+ MatchMode match_mode);
+
+void pattern_spec_free (PatternSpec * pspec);
+
+gboolean pattern_match_string (PatternSpec * pspec,
+ const gchar * string);
+
+G_END_DECLS
+
+#endif /* __PATTERN_SPEC_H__ */
diff --git a/gst/multifile/test-splitmuxpartreader.c b/gst/multifile/test-splitmuxpartreader.c
new file mode 100644
index 0000000000..18756a6ee9
--- /dev/null
+++ b/gst/multifile/test-splitmuxpartreader.c
@@ -0,0 +1,104 @@
+#include <gst/gst.h>
+#include "gstsplitmuxpartreader.h"
+#include "gstsplitmuxsrc.h"
+
+GST_DEBUG_CATEGORY_EXTERN (splitmux_debug);
+
+static const gchar *const path = "out001.mp4";
+
+typedef struct _CustomData
+{
+ GstSplitMuxPartReader *reader;
+ GMainLoop *main_loop;
+ GstBus *bus;
+} CustomData;
+
+static void
+part_prepared (GstSplitMuxPartReader * reader)
+{
+ g_print ("Part prepared\n");
+}
+
+static gboolean
+handle_message (GstBus * bus, GstMessage * msg, CustomData * data)
+{
+ GError *err;
+ gchar *debug_info;
+
+ switch (GST_MESSAGE_TYPE (msg)) {
+ case GST_MESSAGE_ERROR:
+ gst_message_parse_error (msg, &err, &debug_info);
+ g_print ("Error received from element %s: %s\n",
+ GST_OBJECT_NAME (msg->src), err->message);
+ g_print ("Debugging information: %s\n", debug_info ? debug_info : "none");
+ g_clear_error (&err);
+ g_free (debug_info);
+ g_main_loop_quit (data->main_loop);
+ break;
+ case GST_MESSAGE_EOS:
+ g_print ("End-Of-Stream reached.\n");
+ g_main_loop_quit (data->main_loop);
+ break;
+ default:
+ break;
+ }
+
+ return TRUE;
+}
+
+static gboolean
+start_reader (CustomData * data)
+{
+ g_print ("Preparing part reader for %s\n", path);
+ gst_splitmux_part_reader_prepare (data->reader);
+ return FALSE;
+}
+
+static GstPad *
+handle_get_pad (GstSplitMuxPartReader * reader, GstPad * src_pad,
+ CustomData * data)
+{
+ /* Create a dummy target pad for the reader */
+ GstPad *new_pad = g_object_new (SPLITMUX_TYPE_SRC_PAD,
+ "name", GST_PAD_NAME (src_pad), "direction", GST_PAD_SRC, NULL);
+
+ g_print ("Creating new dummy pad %s\n", GST_PAD_NAME (src_pad));
+
+ return new_pad;
+}
+
+int
+main (int argc, char **argv)
+{
+ CustomData data;
+
+ gst_init (&argc, &argv);
+
+ data.main_loop = g_main_loop_new (NULL, FALSE);
+
+ data.reader = g_object_new (GST_TYPE_SPLITMUX_PART_READER, NULL);
+ data.bus = gst_element_get_bus (GST_ELEMENT_CAST (data.reader));
+
+ /* Listen for bus messages */
+ gst_bus_add_watch (data.bus, (GstBusFunc) handle_message, &data);
+
+ gst_splitmux_part_reader_set_location (data.reader, path);
+
+ /* Connect to prepare signal */
+ g_signal_connect (data.reader, "prepared", (GCallback) part_prepared, &data);
+ gst_splitmux_part_reader_set_callbacks (data.reader, &data,
+ (GstSplitMuxPartReaderPadCb) handle_get_pad);
+
+ g_idle_add ((GSourceFunc) start_reader, &data);
+
+ /* Run mainloop */
+ g_main_loop_run (data.main_loop);
+
+ gst_splitmux_part_reader_unprepare (data.reader);
+
+ g_main_loop_unref (data.main_loop);
+ gst_object_unref (data.bus);
+ g_object_unref (data.reader);
+
+ return 0;
+}
diff --git a/gst/multipart/meson.build b/gst/multipart/meson.build
new file mode 100644
index 0000000000..1c9f6bad0c
--- /dev/null
+++ b/gst/multipart/meson.build
@@ -0,0 +1,12 @@
+gstmultipart = library('gstmultipart',
+ 'multipart.c',
+ 'multipartdemux.c',
+ 'multipartmux.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gstbase_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstmultipart, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstmultipart]
diff --git a/gst/multipart/multipart.c b/gst/multipart/multipart.c
new file mode 100644
index 0000000000..ac1638478d
--- /dev/null
+++ b/gst/multipart/multipart.c
@@ -0,0 +1,44 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+
+#include "multipartdemux.h"
+#include "multipartmux.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (multipartdemux, plugin);
+ ret |= GST_ELEMENT_REGISTER (multipartmux, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ multipart,
+ "multipart stream manipulation",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/multipart/multipartdemux.c b/gst/multipart/multipartdemux.c
new file mode 100644
index 0000000000..28bee57092
--- /dev/null
+++ b/gst/multipart/multipartdemux.c
@@ -0,0 +1,802 @@
+/* GStreamer
+ * Copyright (C) 2006 Sjoerd Simons <sjoerd@luon.net>
+ * Copyright (C) 2004 Wim Taymans <wim@fluendo.com>
+ *
+ * gstmultipartdemux.c: multipart stream demuxer
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-multipartdemux
+ * @title: multipartdemux
+ * @see_also: #GstMultipartMux
+ *
+ * MultipartDemux uses the Content-type field of incoming buffers to demux and
+ * push data to dynamic source pads. Most of the time multipart streams are
+ * sequential JPEG frames generated from a live source such as a network source
+ * or a camera.
+ *
+ * The output buffers of the multipartdemux typically have no timestamps and are
+ * usually played as fast as possible (at the rate that the source provides the
+ * data).
+ *
+ * the content in multipart files is separated with a boundary string that can
+ * be configured specifically with the #GstMultipartDemux:boundary property
+ * otherwise it will be autodetected.
+ *
+ * ## Sample pipelines
+ * |[
+ * gst-launch-1.0 filesrc location=/tmp/test.multipart ! multipartdemux ! image/jpeg,framerate=\(fraction\)5/1 ! jpegparse ! jpegdec ! videoconvert ! autovideosink
+ * ]| a simple pipeline to demux a multipart file muxed with #GstMultipartMux
+ * containing JPEG frames.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "multipartdemux.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_multipart_demux_debug);
+#define GST_CAT_DEFAULT gst_multipart_demux_debug
+
+#define DEFAULT_BOUNDARY NULL
+#define DEFAULT_SINGLE_STREAM FALSE
+
+enum
+{
+ PROP_0,
+ PROP_BOUNDARY,
+ PROP_SINGLE_STREAM
+};
+
+static GstStaticPadTemplate multipart_demux_src_template_factory =
+GST_STATIC_PAD_TEMPLATE ("src_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+static GstStaticPadTemplate multipart_demux_sink_template_factory =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("multipart/x-mixed-replace")
+ );
+
+typedef struct
+{
+ const gchar *key;
+ const gchar *val;
+} GstNamesMap;
+
+/* convert from mime types to gst structure names. Add more when needed. The
+ * mime-type is stored as lowercase */
+static const GstNamesMap gstnames[] = {
+ /* RFC 2046 says audio/basic is mulaw, mono, 8000Hz */
+ {"audio/basic", "audio/x-mulaw, channels=1, rate=8000"},
+ {"audio/g726-16",
+ "audio/x-adpcm, bitrate=16000, layout=g726, channels=1, rate=8000"},
+ {"audio/g726-24",
+ "audio/x-adpcm, bitrate=24000, layout=g726, channels=1, rate=8000"},
+ {"audio/g726-32",
+ "audio/x-adpcm, bitrate=32000, layout=g726, channels=1, rate=8000"},
+ {"audio/g726-40",
+ "audio/x-adpcm, bitrate=40000, layout=g726, channels=1, rate=8000"},
+ /* Panasonic Network Cameras non-standard types */
+ {"audio/g726",
+ "audio/x-adpcm, bitrate=32000, layout=g726, channels=1, rate=8000"},
+ {NULL, NULL}
+};
+
+
+static GstFlowReturn gst_multipart_demux_chain (GstPad * pad,
+ GstObject * parent, GstBuffer * buf);
+static gboolean gst_multipart_demux_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+
+static GstStateChangeReturn gst_multipart_demux_change_state (GstElement *
+ element, GstStateChange transition);
+
+static void gst_multipart_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+
+static void gst_multipart_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static void gst_multipart_demux_dispose (GObject * object);
+
+#define gst_multipart_demux_parent_class parent_class
+G_DEFINE_TYPE (GstMultipartDemux, gst_multipart_demux, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (multipartdemux, "multipartdemux",
+ GST_RANK_PRIMARY, GST_TYPE_MULTIPART_DEMUX,
+ GST_DEBUG_CATEGORY_INIT (gst_multipart_demux_debug, "multipartdemux", 0,
+ "multipart demuxer"));
+
+static void
+gst_multipart_demux_class_init (GstMultipartDemuxClass * klass)
+{
+ int i;
+
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ gobject_class->dispose = gst_multipart_demux_dispose;
+ gobject_class->set_property = gst_multipart_set_property;
+ gobject_class->get_property = gst_multipart_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_BOUNDARY,
+ g_param_spec_string ("boundary", "Boundary",
+ "The boundary string separating data, automatic if NULL",
+ DEFAULT_BOUNDARY,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstMultipartDemux:single-stream:
+ *
+ * Assume that there is only one stream whose content-type will
+ * not change and emit no-more-pads as soon as the first boundary
+ * content is parsed, decoded, and pads are linked.
+ */
+ g_object_class_install_property (gobject_class, PROP_SINGLE_STREAM,
+ g_param_spec_boolean ("single-stream", "Single Stream",
+ "Assume that there is only one stream whose content-type will not change and emit no-more-pads as soon as the first boundary content is parsed, decoded, and pads are linked",
+ DEFAULT_SINGLE_STREAM, G_PARAM_READWRITE));
+
+ /* populate gst names and mime types pairs */
+ klass->gstnames = g_hash_table_new (g_str_hash, g_str_equal);
+ for (i = 0; gstnames[i].key; i++) {
+ g_hash_table_insert (klass->gstnames, (gpointer) gstnames[i].key,
+ (gpointer) gstnames[i].val);
+ }
+
+ gstelement_class->change_state = gst_multipart_demux_change_state;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &multipart_demux_sink_template_factory);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &multipart_demux_src_template_factory);
+ gst_element_class_set_static_metadata (gstelement_class, "Multipart demuxer",
+ "Codec/Demuxer", "demux multipart streams",
+ "Wim Taymans <wim.taymans@gmail.com>, Sjoerd Simons <sjoerd@luon.net>");
+}
+
+static void
+gst_multipart_demux_init (GstMultipartDemux * multipart)
+{
+ /* create the sink pad */
+ multipart->sinkpad =
+ gst_pad_new_from_static_template (&multipart_demux_sink_template_factory,
+ "sink");
+ gst_element_add_pad (GST_ELEMENT_CAST (multipart), multipart->sinkpad);
+ gst_pad_set_chain_function (multipart->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_multipart_demux_chain));
+ gst_pad_set_event_function (multipart->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_multipart_demux_event));
+
+ multipart->adapter = gst_adapter_new ();
+ multipart->boundary = DEFAULT_BOUNDARY;
+ multipart->mime_type = NULL;
+ multipart->content_length = -1;
+ multipart->header_completed = FALSE;
+ multipart->scanpos = 0;
+ multipart->singleStream = DEFAULT_SINGLE_STREAM;
+ multipart->have_group_id = FALSE;
+ multipart->group_id = G_MAXUINT;
+}
+
+static void
+gst_multipart_demux_remove_src_pads (GstMultipartDemux * demux)
+{
+ while (demux->srcpads != NULL) {
+ GstMultipartPad *mppad = demux->srcpads->data;
+
+ gst_element_remove_pad (GST_ELEMENT (demux), mppad->pad);
+ g_free (mppad->mime);
+ g_free (mppad);
+ demux->srcpads = g_slist_delete_link (demux->srcpads, demux->srcpads);
+ }
+ demux->srcpads = NULL;
+ demux->numpads = 0;
+}
+
+static void
+gst_multipart_demux_dispose (GObject * object)
+{
+ GstMultipartDemux *demux = GST_MULTIPART_DEMUX (object);
+
+ if (demux->adapter != NULL)
+ g_object_unref (demux->adapter);
+ demux->adapter = NULL;
+ g_free (demux->boundary);
+ demux->boundary = NULL;
+ g_free (demux->mime_type);
+ demux->mime_type = NULL;
+ gst_multipart_demux_remove_src_pads (demux);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static const gchar *
+gst_multipart_demux_get_gstname (GstMultipartDemux * demux, gchar * mimetype)
+{
+ GstMultipartDemuxClass *klass;
+ const gchar *gstname;
+
+ klass = GST_MULTIPART_DEMUX_GET_CLASS (demux);
+
+ /* use hashtable to convert to gst name */
+ gstname = g_hash_table_lookup (klass->gstnames, mimetype);
+ if (gstname == NULL) {
+ /* no gst name mapping, use mime type */
+ gstname = mimetype;
+ }
+ GST_DEBUG_OBJECT (demux, "gst name for %s is %s", mimetype, gstname);
+ return gstname;
+}
+
+static GstFlowReturn
+gst_multipart_combine_flows (GstMultipartDemux * demux, GstMultipartPad * pad,
+ GstFlowReturn ret)
+{
+ GSList *walk;
+
+ /* store the value */
+ pad->last_ret = ret;
+
+ /* any other error that is not-linked can be returned right
+ * away */
+ if (ret != GST_FLOW_NOT_LINKED)
+ goto done;
+
+ /* only return NOT_LINKED if all other pads returned NOT_LINKED */
+ for (walk = demux->srcpads; walk; walk = g_slist_next (walk)) {
+ GstMultipartPad *opad = (GstMultipartPad *) walk->data;
+
+ ret = opad->last_ret;
+ /* some other return value (must be SUCCESS but we can return
+ * other values as well) */
+ if (ret != GST_FLOW_NOT_LINKED)
+ goto done;
+ }
+ /* if we get here, all other pads were unlinked and we return
+ * NOT_LINKED then */
+done:
+ return ret;
+}
+
+static GstMultipartPad *
+gst_multipart_find_pad_by_mime (GstMultipartDemux * demux, gchar * mime,
+ gboolean * created)
+{
+ GSList *walk;
+
+ walk = demux->srcpads;
+ while (walk) {
+ GstMultipartPad *pad = (GstMultipartPad *) walk->data;
+
+ if (!strcmp (pad->mime, mime)) {
+ if (created) {
+ *created = FALSE;
+ }
+ return pad;
+ }
+
+ walk = walk->next;
+ }
+ /* pad not found, create it */
+ {
+ GstPad *pad;
+ GstMultipartPad *mppad;
+ gchar *name;
+ const gchar *capsname;
+ GstCaps *caps;
+ gchar *stream_id;
+ GstEvent *event;
+
+ mppad = g_new0 (GstMultipartPad, 1);
+
+ GST_DEBUG_OBJECT (demux, "creating pad with mime: %s", mime);
+
+ name = g_strdup_printf ("src_%u", demux->numpads);
+ pad =
+ gst_pad_new_from_static_template (&multipart_demux_src_template_factory,
+ name);
+ g_free (name);
+
+ mppad->pad = pad;
+ mppad->mime = g_strdup (mime);
+ mppad->last_ret = GST_FLOW_OK;
+ mppad->last_ts = GST_CLOCK_TIME_NONE;
+ mppad->discont = TRUE;
+
+ demux->srcpads = g_slist_prepend (demux->srcpads, mppad);
+ demux->numpads++;
+
+ gst_pad_use_fixed_caps (pad);
+ gst_pad_set_active (pad, TRUE);
+
+ /* prepare and send stream-start */
+ if (!demux->have_group_id) {
+ event = gst_pad_get_sticky_event (demux->sinkpad,
+ GST_EVENT_STREAM_START, 0);
+
+ if (event) {
+ demux->have_group_id =
+ gst_event_parse_group_id (event, &demux->group_id);
+ gst_event_unref (event);
+ } else if (!demux->have_group_id) {
+ demux->have_group_id = TRUE;
+ demux->group_id = gst_util_group_id_next ();
+ }
+ }
+
+ stream_id = gst_pad_create_stream_id (pad,
+ GST_ELEMENT_CAST (demux), demux->mime_type);
+
+ event = gst_event_new_stream_start (stream_id);
+ if (demux->have_group_id)
+ gst_event_set_group_id (event, demux->group_id);
+
+ gst_pad_store_sticky_event (pad, event);
+ g_free (stream_id);
+ gst_event_unref (event);
+
+ /* take the mime type, convert it to the caps name */
+ capsname = gst_multipart_demux_get_gstname (demux, mime);
+ caps = gst_caps_from_string (capsname);
+ GST_DEBUG_OBJECT (demux, "caps for pad: %s", capsname);
+ gst_pad_set_caps (pad, caps);
+ gst_element_add_pad (GST_ELEMENT_CAST (demux), pad);
+ gst_caps_unref (caps);
+
+ if (created) {
+ *created = TRUE;
+ }
+
+ if (demux->singleStream) {
+ gst_element_no_more_pads (GST_ELEMENT_CAST (demux));
+ }
+
+ return mppad;
+ }
+}
+
+static gboolean
+get_line_end (const guint8 * data, const guint8 * dataend, guint8 ** end,
+ guint8 ** next)
+{
+ guint8 *x;
+ gboolean foundr = FALSE;
+
+ for (x = (guint8 *) data; x < dataend; x++) {
+ if (*x == '\r') {
+ foundr = TRUE;
+ } else if (*x == '\n') {
+ *end = x - (foundr ? 1 : 0);
+ *next = x + 1;
+ return TRUE;
+ }
+ }
+ return FALSE;
+}
+
+static guint
+get_mime_len (const guint8 * data, guint maxlen)
+{
+ guint8 *x;
+
+ x = (guint8 *) data;
+ while (*x != '\0' && *x != '\r' && *x != '\n' && *x != ';') {
+ x++;
+ }
+ return x - data;
+}
+
+static gint
+multipart_parse_header (GstMultipartDemux * multipart)
+{
+ const guint8 *data;
+ const guint8 *dataend;
+ gchar *boundary;
+ int boundary_len;
+ int datalen;
+ guint8 *pos;
+ guint8 *end, *next;
+
+ datalen = gst_adapter_available (multipart->adapter);
+ data = gst_adapter_map (multipart->adapter, datalen);
+ dataend = data + datalen;
+
+ /* Skip leading whitespace, pos endposition should at least leave space for
+ * the boundary and a \n */
+ for (pos = (guint8 *) data; pos < dataend - 4 && g_ascii_isspace (*pos);
+ pos++);
+
+ if (pos >= dataend - 4)
+ goto need_more_data;
+
+ if (G_UNLIKELY (pos[0] != '-' || pos[1] != '-')) {
+ GST_DEBUG_OBJECT (multipart, "No boundary available");
+ goto wrong_header;
+ }
+
+ /* First the boundary */
+ if (!get_line_end (pos, dataend, &end, &next))
+ goto need_more_data;
+
+ /* Ignore the leading -- */
+ boundary_len = end - pos - 2;
+ boundary = (gchar *) pos + 2;
+ if (boundary_len < 1) {
+ GST_DEBUG_OBJECT (multipart, "No boundary available");
+ goto wrong_header;
+ }
+
+ if (G_UNLIKELY (multipart->boundary == NULL)) {
+ /* First time we see the boundary, copy it */
+ multipart->boundary = g_strndup (boundary, boundary_len);
+ multipart->boundary_len = boundary_len;
+ } else if (G_UNLIKELY (boundary_len != multipart->boundary_len)) {
+ /* Something odd is going on, either the boundary indicated EOS or it's
+ * invalid */
+ if (G_UNLIKELY (boundary_len == multipart->boundary_len + 2 &&
+ !strncmp (boundary, multipart->boundary, multipart->boundary_len) &&
+ !strncmp (boundary + multipart->boundary_len, "--", 2)))
+ goto eos;
+
+ GST_DEBUG_OBJECT (multipart,
+ "Boundary length doesn't match detected boundary (%d <> %d",
+ boundary_len, multipart->boundary_len);
+ goto wrong_header;
+ } else if (G_UNLIKELY (strncmp (boundary, multipart->boundary, boundary_len))) {
+ GST_DEBUG_OBJECT (multipart, "Boundary doesn't match previous boundary");
+ goto wrong_header;
+ }
+
+ pos = next;
+ while (get_line_end (pos, dataend, &end, &next)) {
+ guint len = end - pos;
+
+ if (len == 0) {
+ /* empty line, data starts behind us */
+ GST_DEBUG_OBJECT (multipart,
+ "Parsed the header - boundary: %s, mime-type: %s, content-length: %d",
+ multipart->boundary, multipart->mime_type, multipart->content_length);
+ gst_adapter_unmap (multipart->adapter);
+ return next - data;
+ }
+
+ if (len >= 14 && !g_ascii_strncasecmp ("content-type:", (gchar *) pos, 13)) {
+ guint mime_len;
+
+ /* only take the mime type up to the first ; if any. After ; there can be
+ * properties that we don't handle yet. */
+ mime_len = get_mime_len (pos + 14, len - 14);
+
+ g_free (multipart->mime_type);
+ multipart->mime_type = g_ascii_strdown ((gchar *) pos + 14, mime_len);
+ } else if (len >= 15 &&
+ !g_ascii_strncasecmp ("content-length:", (gchar *) pos, 15)) {
+ multipart->content_length =
+ g_ascii_strtoull ((gchar *) pos + 15, NULL, 10);
+ }
+ pos = next;
+ }
+
+need_more_data:
+ GST_DEBUG_OBJECT (multipart, "Need more data for the header");
+ gst_adapter_unmap (multipart->adapter);
+
+ return MULTIPART_NEED_MORE_DATA;
+
+wrong_header:
+ {
+ GST_ELEMENT_ERROR (multipart, STREAM, DEMUX, (NULL),
+ ("Boundary not found in the multipart header"));
+ gst_adapter_unmap (multipart->adapter);
+ return MULTIPART_DATA_ERROR;
+ }
+eos:
+ {
+ GST_DEBUG_OBJECT (multipart, "we are EOS");
+ gst_adapter_unmap (multipart->adapter);
+ return MULTIPART_DATA_EOS;
+ }
+}
+
+static gint
+multipart_find_boundary (GstMultipartDemux * multipart, gint * datalen)
+{
+ /* Adaptor is positioned at the start of the data */
+ const guint8 *data, *pos;
+ const guint8 *dataend;
+ gint len;
+
+ if (multipart->content_length >= 0) {
+ /* fast path, known content length :) */
+ len = multipart->content_length;
+ if (gst_adapter_available (multipart->adapter) >= len + 2) {
+ *datalen = len;
+ data = gst_adapter_map (multipart->adapter, len + 1);
+
+ /* If data[len] contains \r then assume a newline is \r\n */
+ if (data[len] == '\r')
+ len += 2;
+ else if (data[len] == '\n')
+ len += 1;
+
+ gst_adapter_unmap (multipart->adapter);
+ /* Don't check if boundary is actually there, but let the header parsing
+ * bail out if it isn't */
+ return len;
+ } else {
+ /* need more data */
+ return MULTIPART_NEED_MORE_DATA;
+ }
+ }
+
+ len = gst_adapter_available (multipart->adapter);
+ if (len == 0)
+ return MULTIPART_NEED_MORE_DATA;
+ data = gst_adapter_map (multipart->adapter, len);
+ dataend = data + len;
+
+ for (pos = data + multipart->scanpos;
+ pos <= dataend - multipart->boundary_len - 2; pos++) {
+ if (*pos == '-' && pos[1] == '-' &&
+ !strncmp ((gchar *) pos + 2,
+ multipart->boundary, multipart->boundary_len)) {
+ /* Found the boundary! Check if there was a newline before the boundary */
+ len = pos - data;
+ if (pos - 2 > data && pos[-2] == '\r')
+ len -= 2;
+ else if (pos - 1 > data && pos[-1] == '\n')
+ len -= 1;
+ *datalen = len;
+
+ gst_adapter_unmap (multipart->adapter);
+ multipart->scanpos = 0;
+ return pos - data;
+ }
+ }
+ gst_adapter_unmap (multipart->adapter);
+ multipart->scanpos = pos - data;
+ return MULTIPART_NEED_MORE_DATA;
+}
+
+static gboolean
+gst_multipart_demux_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstMultipartDemux *multipart;
+
+ multipart = GST_MULTIPART_DEMUX (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ if (!multipart->srcpads) {
+ GST_ELEMENT_ERROR (multipart, STREAM, WRONG_TYPE,
+ ("This stream contains no valid streams."),
+ ("Got EOS before adding any pads"));
+ gst_event_unref (event);
+ return FALSE;
+ } else {
+ return gst_pad_event_default (pad, parent, event);
+ }
+ break;
+ default:
+ return gst_pad_event_default (pad, parent, event);
+ }
+}
+
+static GstFlowReturn
+gst_multipart_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+{
+ GstMultipartDemux *multipart;
+ GstAdapter *adapter;
+ gint size = 1;
+ GstFlowReturn res;
+
+ multipart = GST_MULTIPART_DEMUX (parent);
+ adapter = multipart->adapter;
+
+ res = GST_FLOW_OK;
+
+ if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) {
+ GSList *l;
+
+ for (l = multipart->srcpads; l != NULL; l = l->next) {
+ GstMultipartPad *srcpad = l->data;
+
+ srcpad->discont = TRUE;
+ }
+ gst_adapter_clear (adapter);
+ }
+ gst_adapter_push (adapter, buf);
+
+ while (gst_adapter_available (adapter) > 0) {
+ GstMultipartPad *srcpad;
+ GstBuffer *outbuf;
+ gboolean created;
+ gint datalen;
+
+ if (G_UNLIKELY (!multipart->header_completed)) {
+ if ((size = multipart_parse_header (multipart)) < 0) {
+ goto nodata;
+ } else {
+ gst_adapter_flush (adapter, size);
+ multipart->header_completed = TRUE;
+ }
+ }
+ if ((size = multipart_find_boundary (multipart, &datalen)) < 0) {
+ goto nodata;
+ }
+
+ /* Invalidate header info */
+ multipart->header_completed = FALSE;
+ multipart->content_length = -1;
+
+ if (G_UNLIKELY (datalen <= 0)) {
+ GST_DEBUG_OBJECT (multipart, "skipping empty content.");
+ gst_adapter_flush (adapter, size - datalen);
+ } else if (G_UNLIKELY (!multipart->mime_type)) {
+ GST_DEBUG_OBJECT (multipart, "content has no MIME type.");
+ gst_adapter_flush (adapter, size - datalen);
+ } else {
+ GstClockTime ts;
+
+ srcpad =
+ gst_multipart_find_pad_by_mime (multipart,
+ multipart->mime_type, &created);
+
+ ts = gst_adapter_prev_pts (adapter, NULL);
+ outbuf = gst_adapter_take_buffer (adapter, datalen);
+ gst_adapter_flush (adapter, size - datalen);
+
+ if (created) {
+ GstTagList *tags;
+ GstSegment segment;
+
+ gst_segment_init (&segment, GST_FORMAT_TIME);
+
+ /* Push new segment, first buffer has 0 timestamp */
+ gst_pad_push_event (srcpad->pad, gst_event_new_segment (&segment));
+
+ tags = gst_tag_list_new (GST_TAG_CONTAINER_FORMAT, "Multipart", NULL);
+ gst_tag_list_set_scope (tags, GST_TAG_SCOPE_GLOBAL);
+ gst_pad_push_event (srcpad->pad, gst_event_new_tag (tags));
+ }
+
+ outbuf = gst_buffer_make_writable (outbuf);
+ if (srcpad->last_ts == GST_CLOCK_TIME_NONE || srcpad->last_ts != ts) {
+ GST_BUFFER_TIMESTAMP (outbuf) = ts;
+ srcpad->last_ts = ts;
+ } else {
+ GST_BUFFER_TIMESTAMP (outbuf) = GST_CLOCK_TIME_NONE;
+ }
+
+ if (srcpad->discont) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
+ srcpad->discont = FALSE;
+ } else {
+ GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DISCONT);
+ }
+
+ GST_DEBUG_OBJECT (multipart,
+ "pushing buffer with timestamp %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));
+ res = gst_pad_push (srcpad->pad, outbuf);
+ res = gst_multipart_combine_flows (multipart, srcpad, res);
+ if (res != GST_FLOW_OK)
+ break;
+ }
+ }
+
+nodata:
+ if (G_UNLIKELY (size == MULTIPART_DATA_ERROR))
+ return GST_FLOW_ERROR;
+ if (G_UNLIKELY (size == MULTIPART_DATA_EOS))
+ return GST_FLOW_EOS;
+
+ return res;
+}
+
+static GstStateChangeReturn
+gst_multipart_demux_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstMultipartDemux *multipart;
+ GstStateChangeReturn ret;
+
+ multipart = GST_MULTIPART_DEMUX (element);
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ return ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ multipart->header_completed = FALSE;
+ g_free (multipart->boundary);
+ multipart->boundary = NULL;
+ g_free (multipart->mime_type);
+ multipart->mime_type = NULL;
+ gst_adapter_clear (multipart->adapter);
+ multipart->content_length = -1;
+ multipart->scanpos = 0;
+ gst_multipart_demux_remove_src_pads (multipart);
+ multipart->have_group_id = FALSE;
+ multipart->group_id = G_MAXUINT;
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+
+static void
+gst_multipart_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstMultipartDemux *filter;
+
+ filter = GST_MULTIPART_DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_BOUNDARY:
+ /* Not really that useful anymore as we can reliably autoscan */
+ g_free (filter->boundary);
+ filter->boundary = g_value_dup_string (value);
+ if (filter->boundary != NULL) {
+ filter->boundary_len = strlen (filter->boundary);
+ }
+ break;
+ case PROP_SINGLE_STREAM:
+ filter->singleStream = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_multipart_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstMultipartDemux *filter;
+
+ filter = GST_MULTIPART_DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_BOUNDARY:
+ g_value_set_string (value, filter->boundary);
+ break;
+ case PROP_SINGLE_STREAM:
+ g_value_set_boolean (value, filter->singleStream);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/multipart/multipartdemux.h b/gst/multipart/multipartdemux.h
new file mode 100644
index 0000000000..b450b950bb
--- /dev/null
+++ b/gst/multipart/multipartdemux.h
@@ -0,0 +1,110 @@
+/* GStreamer
+ * Copyright (C) 2006 Sjoerd Simons <sjoerd@luon.net>
+ * Copyright (C) 2004 Wim Taymans <wim@fluendo.com>
+ *
+ * gstmultipartdemux.h: multipart stream demuxer
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_MULTIPART_DEMUX__
+#define __GST_MULTIPART_DEMUX__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+
+#include <string.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MULTIPART_DEMUX (gst_multipart_demux_get_type())
+#define GST_MULTIPART_DEMUX(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MULTIPART_DEMUX, GstMultipartDemux))
+#define GST_MULTIPART_DEMUX_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MULTIPART_DEMUX, GstMultipartDemux))
+#define GST_MULTIPART_DEMUX_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_MULTIPART_DEMUX, GstMultipartDemuxClass))
+#define GST_IS_MULTIPART_DEMUX(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MULTIPART_DEMUX))
+#define GST_IS_MULTIPART_DEMUX_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MULTIPART_DEMUX))
+
+typedef struct _GstMultipartDemux GstMultipartDemux;
+typedef struct _GstMultipartDemuxClass GstMultipartDemuxClass;
+
+#define MULTIPART_NEED_MORE_DATA -1
+#define MULTIPART_DATA_ERROR -2
+#define MULTIPART_DATA_EOS -3
+
+/* all information needed for one multipart stream */
+typedef struct
+{
+ GstPad *pad; /* reference for this pad is held by element we belong to */
+
+ gchar *mime;
+
+ GstClockTime last_ts; /* last timestamp to make sure we don't send
+ * two buffers with the same timestamp */
+ GstFlowReturn last_ret;
+
+ gboolean discont;
+}
+GstMultipartPad;
+
+/**
+ * GstMultipartDemux:
+ *
+ * The opaque #GstMultipartDemux structure.
+ */
+struct _GstMultipartDemux
+{
+ GstElement element;
+
+ /* pad */
+ GstPad *sinkpad;
+
+ GSList *srcpads;
+ guint numpads;
+
+ GstAdapter *adapter;
+
+ /* Header information of the current frame */
+ gboolean header_completed;
+ gchar *boundary;
+ guint boundary_len;
+ gchar *mime_type;
+ gint content_length;
+
+ /* Index inside the current data when manually looking for the boundary */
+ gint scanpos;
+
+ gboolean singleStream;
+
+ /* to handle stream-start */
+ gboolean have_group_id;
+ guint group_id;
+};
+
+struct _GstMultipartDemuxClass
+{
+ GstElementClass parent_class;
+
+ GHashTable *gstnames;
+};
+
+GType gst_multipart_demux_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (multipartdemux);
+
+G_END_DECLS
+
+#endif /* __GST_MULTIPART_DEMUX__ */
+
diff --git a/gst/multipart/multipartmux.c b/gst/multipart/multipartmux.c
new file mode 100644
index 0000000000..f8d7689a61
--- /dev/null
+++ b/gst/multipart/multipartmux.c
@@ -0,0 +1,689 @@
+/* multipart muxer plugin for GStreamer
+ * Copyright (C) 2004 Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-multipartmux
+ * @title: multipartmux
+ *
+ * MultipartMux uses the #GstCaps of the sink pad as the Content-type field for
+ * incoming buffers when muxing them to a multipart stream. Most of the time
+ * multipart streams are sequential JPEG frames.
+ *
+ * ## Sample pipelines
+ * |[
+ * gst-launch-1.0 videotestsrc ! video/x-raw, framerate='(fraction)'5/1 ! jpegenc ! multipartmux ! filesink location=/tmp/test.multipart
+ * ]| a pipeline to mux 5 JPEG frames per second into a multipart stream
+ * stored to a file.
+ *
+ */
+
+/* FIXME: drop/merge tag events, or at least send them delayed after stream-start */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "multipartmux.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_multipart_mux_debug);
+#define GST_CAT_DEFAULT gst_multipart_mux_debug
+
+#define DEFAULT_BOUNDARY "ThisRandomString"
+
+enum
+{
+ PROP_0,
+ PROP_BOUNDARY
+ /* FILL ME */
+};
+
+static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("multipart/x-mixed-replace")
+ );
+
+static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS_ANY /* we can take anything, really */
+ );
+
+typedef struct
+{
+ const gchar *key;
+ const gchar *val;
+} MimeTypeMap;
+
+/* convert from gst structure names to mime types. Add more when needed. */
+static const MimeTypeMap mimetypes[] = {
+ {"audio/x-mulaw", "audio/basic"},
+ {NULL, NULL}
+};
+
+static void gst_multipart_mux_finalize (GObject * object);
+
+static gboolean gst_multipart_mux_handle_src_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+static GstPad *gst_multipart_mux_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
+static GstStateChangeReturn gst_multipart_mux_change_state (GstElement *
+ element, GstStateChange transition);
+
+static gboolean gst_multipart_mux_sink_event (GstCollectPads * pads,
+ GstCollectData * pad, GstEvent * event, GstMultipartMux * mux);
+static GstFlowReturn gst_multipart_mux_collected (GstCollectPads * pads,
+ GstMultipartMux * mux);
+
+static void gst_multipart_mux_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_multipart_mux_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+#define gst_multipart_mux_parent_class parent_class
+G_DEFINE_TYPE (GstMultipartMux, gst_multipart_mux, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (multipartmux, "multipartmux",
+ GST_RANK_NONE, GST_TYPE_MULTIPART_MUX,
+ GST_DEBUG_CATEGORY_INIT (gst_multipart_mux_debug, "multipartmux", 0,
+ "multipart muxer"));
+
+static void
+gst_multipart_mux_class_init (GstMultipartMuxClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ gint i;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->finalize = gst_multipart_mux_finalize;
+ gobject_class->get_property = gst_multipart_mux_get_property;
+ gobject_class->set_property = gst_multipart_mux_set_property;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BOUNDARY,
+ g_param_spec_string ("boundary", "Boundary", "Boundary string",
+ DEFAULT_BOUNDARY, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstelement_class->request_new_pad = gst_multipart_mux_request_new_pad;
+ gstelement_class->change_state = gst_multipart_mux_change_state;
+
+ gst_element_class_add_static_pad_template (gstelement_class, &src_factory);
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_factory);
+
+ gst_element_class_set_static_metadata (gstelement_class, "Multipart muxer",
+ "Codec/Muxer", "mux multipart streams", "Wim Taymans <wim@fluendo.com>");
+
+ /* populate mime types */
+ klass->mimetypes = g_hash_table_new (g_str_hash, g_str_equal);
+ for (i = 0; mimetypes[i].key; i++) {
+ g_hash_table_insert (klass->mimetypes, (gpointer) mimetypes[i].key,
+ (gpointer) mimetypes[i].val);
+ }
+}
+
+static void
+gst_multipart_mux_init (GstMultipartMux * multipart_mux)
+{
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (multipart_mux);
+
+ multipart_mux->srcpad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
+ "src"), "src");
+ gst_pad_set_event_function (multipart_mux->srcpad,
+ gst_multipart_mux_handle_src_event);
+ gst_element_add_pad (GST_ELEMENT (multipart_mux), multipart_mux->srcpad);
+
+ multipart_mux->boundary = g_strdup (DEFAULT_BOUNDARY);
+
+ multipart_mux->collect = gst_collect_pads_new ();
+ gst_collect_pads_set_event_function (multipart_mux->collect,
+ (GstCollectPadsEventFunction)
+ GST_DEBUG_FUNCPTR (gst_multipart_mux_sink_event), multipart_mux);
+ gst_collect_pads_set_function (multipart_mux->collect,
+ (GstCollectPadsFunction) GST_DEBUG_FUNCPTR (gst_multipart_mux_collected),
+ multipart_mux);
+}
+
+static void
+gst_multipart_mux_finalize (GObject * object)
+{
+ GstMultipartMux *multipart_mux;
+
+ multipart_mux = GST_MULTIPART_MUX (object);
+
+ g_free (multipart_mux->boundary);
+
+ if (multipart_mux->collect)
+ gst_object_unref (multipart_mux->collect);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static GstPad *
+gst_multipart_mux_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
+{
+ GstMultipartMux *multipart_mux;
+ GstPad *newpad;
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (element);
+ gchar *name;
+
+ if (templ != gst_element_class_get_pad_template (klass, "sink_%u"))
+ goto wrong_template;
+
+ multipart_mux = GST_MULTIPART_MUX (element);
+
+ /* create new pad with the name */
+ name = g_strdup_printf ("sink_%u", multipart_mux->numpads);
+ newpad = gst_pad_new_from_template (templ, name);
+ g_free (name);
+
+ /* construct our own wrapper data structure for the pad to
+ * keep track of its status */
+ {
+ GstMultipartPadData *multipartpad;
+
+ multipartpad = (GstMultipartPadData *)
+ gst_collect_pads_add_pad (multipart_mux->collect, newpad,
+ sizeof (GstMultipartPadData), NULL, TRUE);
+
+ /* save a pointer to our data in the pad */
+ multipartpad->pad = newpad;
+ gst_pad_set_element_private (newpad, multipartpad);
+ multipart_mux->numpads++;
+ }
+
+ /* add the pad to the element */
+ gst_element_add_pad (element, newpad);
+
+ return newpad;
+
+ /* ERRORS */
+wrong_template:
+ {
+ g_warning ("multipart_mux: this is not our template!");
+ return NULL;
+ }
+}
+
+/* handle events */
+static gboolean
+gst_multipart_mux_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstEventType type;
+
+ type = event ? GST_EVENT_TYPE (event) : GST_EVENT_UNKNOWN;
+
+ switch (type) {
+ case GST_EVENT_SEEK:
+ /* disable seeking for now */
+ return FALSE;
+ default:
+ break;
+ }
+
+ return gst_pad_event_default (pad, parent, event);
+}
+
+static const gchar *
+gst_multipart_mux_get_mime (GstMultipartMux * mux, GstStructure * s)
+{
+ GstMultipartMuxClass *klass;
+ const gchar *mime;
+ const gchar *name;
+ gint rate;
+ gint channels;
+ gint bitrate = 0;
+
+ klass = GST_MULTIPART_MUX_GET_CLASS (mux);
+
+ name = gst_structure_get_name (s);
+
+ /* use hashtable to convert to mime type */
+ mime = g_hash_table_lookup (klass->mimetypes, name);
+ if (mime == NULL) {
+ if (!strcmp (name, "audio/x-adpcm"))
+ gst_structure_get_int (s, "bitrate", &bitrate);
+
+ switch (bitrate) {
+ case 16000:
+ mime = "audio/G726-16";
+ break;
+ case 24000:
+ mime = "audio/G726-24";
+ break;
+ case 32000:
+ mime = "audio/G726-32";
+ break;
+ case 40000:
+ mime = "audio/G726-40";
+ break;
+ default:
+ /* no mime type mapping, use name */
+ mime = name;
+ break;
+ }
+ }
+ /* RFC2046 requires audio/basic to be mulaw 8000Hz mono */
+ if (g_ascii_strcasecmp (mime, "audio/basic") == 0) {
+ if (gst_structure_get_int (s, "rate", &rate) &&
+ gst_structure_get_int (s, "channels", &channels)) {
+ if (rate != 8000 || channels != 1) {
+ mime = name;
+ }
+ } else {
+ mime = name;
+ }
+ }
+ return mime;
+}
+
+/*
+ * Given two pads, compare the buffers queued on it and return 0 if they have
+ * an equal priority, 1 if the new pad is better, -1 if the old pad is better
+ */
+static gint
+gst_multipart_mux_compare_pads (GstMultipartMux * multipart_mux,
+ GstMultipartPadData * old, GstMultipartPadData * new)
+{
+ guint64 oldtime, newtime;
+
+ /* if the old pad doesn't contain anything or is even NULL, return
+ * the new pad as best candidate and vice versa */
+ if (old == NULL || old->buffer == NULL)
+ return 1;
+ if (new == NULL || new->buffer == NULL)
+ return -1;
+
+ if (GST_CLOCK_TIME_IS_VALID (old->dts_timestamp) &&
+ GST_CLOCK_TIME_IS_VALID (new->dts_timestamp)) {
+ oldtime = old->dts_timestamp;
+ newtime = new->dts_timestamp;
+ } else {
+ oldtime = old->pts_timestamp;
+ newtime = new->pts_timestamp;
+ }
+
+ /* no timestamp on old buffer, it must go first */
+ if (oldtime == GST_CLOCK_TIME_NONE)
+ return -1;
+
+ /* no timestamp on new buffer, it must go first */
+ if (newtime == GST_CLOCK_TIME_NONE)
+ return 1;
+
+ /* old buffer has higher timestamp, new one should go first */
+ if (newtime < oldtime)
+ return 1;
+ /* new buffer has higher timestamp, old one should go first */
+ else if (newtime > oldtime)
+ return -1;
+
+ /* same priority if all of the above failed */
+ return 0;
+}
+
+/* make sure a buffer is queued on all pads, returns a pointer to an multipartpad
+ * that holds the best buffer or NULL when no pad was usable */
+static GstMultipartPadData *
+gst_multipart_mux_queue_pads (GstMultipartMux * mux)
+{
+ GSList *walk = NULL;
+ GstMultipartPadData *bestpad = NULL;
+
+ g_return_val_if_fail (GST_IS_MULTIPART_MUX (mux), NULL);
+
+ /* try to make sure we have a buffer from each usable pad first */
+ walk = mux->collect->data;
+ while (walk) {
+ GstCollectData *data = (GstCollectData *) walk->data;
+ GstMultipartPadData *pad = (GstMultipartPadData *) data;
+
+ walk = g_slist_next (walk);
+
+ /* try to get a new buffer for this pad if needed and possible */
+ if (pad->buffer == NULL) {
+ GstBuffer *buf = NULL;
+
+ buf = gst_collect_pads_pop (mux->collect, data);
+
+ /* Store timestamps with segment_start and preroll */
+ if (buf && GST_BUFFER_PTS_IS_VALID (buf)) {
+ pad->pts_timestamp =
+ gst_segment_to_running_time (&data->segment, GST_FORMAT_TIME,
+ GST_BUFFER_PTS (buf));
+ } else {
+ pad->pts_timestamp = GST_CLOCK_TIME_NONE;
+ }
+ if (buf && GST_BUFFER_DTS_IS_VALID (buf)) {
+ pad->dts_timestamp =
+ gst_segment_to_running_time (&data->segment, GST_FORMAT_TIME,
+ GST_BUFFER_DTS (buf));
+ } else {
+ pad->dts_timestamp = GST_CLOCK_TIME_NONE;
+ }
+
+
+ pad->buffer = buf;
+ }
+
+ /* we should have a buffer now, see if it is the best stream to
+ * pull on */
+ if (pad->buffer != NULL) {
+ if (gst_multipart_mux_compare_pads (mux, bestpad, pad) > 0) {
+ bestpad = pad;
+ }
+ }
+ }
+
+ return bestpad;
+}
+
+static gboolean
+gst_multipart_mux_sink_event (GstCollectPads * pads, GstCollectData * data,
+ GstEvent * event, GstMultipartMux * mux)
+{
+ gboolean ret;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ {
+ mux->need_segment = TRUE;
+ break;
+ }
+ default:
+ break;
+ }
+
+ ret = gst_collect_pads_event_default (pads, data, event, FALSE);
+
+ return ret;
+}
+
+/* basic idea:
+ *
+ * 1) find a pad to pull on, this is done by pulling on all pads and
+ * looking at the buffers to decide which one should be muxed first.
+ * 2) create a new buffer for the header
+ * 3) push both buffers on best pad, go to 1
+ */
+static GstFlowReturn
+gst_multipart_mux_collected (GstCollectPads * pads, GstMultipartMux * mux)
+{
+ GstMultipartPadData *best;
+ GstFlowReturn ret = GST_FLOW_OK;
+ gchar *header = NULL;
+ size_t headerlen;
+ GstBuffer *headerbuf = NULL;
+ GstBuffer *footerbuf = NULL;
+ GstBuffer *databuf = NULL;
+ GstStructure *structure = NULL;
+ GstCaps *caps;
+ const gchar *mime;
+
+ GST_DEBUG_OBJECT (mux, "all pads are collected");
+
+ if (mux->need_stream_start) {
+ gchar s_id[32];
+
+ /* stream-start (FIXME: create id based on input ids) */
+ g_snprintf (s_id, sizeof (s_id), "multipartmux-%08x", g_random_int ());
+ gst_pad_push_event (mux->srcpad, gst_event_new_stream_start (s_id));
+
+ mux->need_stream_start = FALSE;
+ }
+
+ /* queue buffers on all pads; find a buffer with the lowest timestamp */
+ best = gst_multipart_mux_queue_pads (mux);
+ if (!best)
+ /* EOS */
+ goto eos;
+ else if (!best->buffer)
+ goto buffer_error;
+
+ /* If not negotiated yet set caps on src pad */
+ if (!mux->negotiated) {
+ GstCaps *newcaps;
+
+ newcaps = gst_caps_new_simple ("multipart/x-mixed-replace",
+ "boundary", G_TYPE_STRING, mux->boundary, NULL);
+
+ if (!gst_pad_set_caps (mux->srcpad, newcaps)) {
+ gst_caps_unref (newcaps);
+ goto nego_error;
+ }
+
+ gst_caps_unref (newcaps);
+ mux->negotiated = TRUE;
+ }
+
+ /* see if we need to push a segment */
+ if (mux->need_segment) {
+ GstClockTime time;
+ GstSegment segment;
+
+ if (best->dts_timestamp != GST_CLOCK_TIME_NONE) {
+ time = best->dts_timestamp;
+ } else if (best->pts_timestamp != GST_CLOCK_TIME_NONE) {
+ time = best->pts_timestamp;
+ } else {
+ time = 0;
+ }
+
+ /* for the segment, we take the first timestamp we see, we don't know the
+ * length and the position is 0 */
+ gst_segment_init (&segment, GST_FORMAT_TIME);
+ segment.start = time;
+
+ gst_pad_push_event (mux->srcpad, gst_event_new_segment (&segment));
+
+ mux->need_segment = FALSE;
+ }
+
+ caps = gst_pad_get_current_caps (best->pad);
+ if (caps == NULL)
+ goto no_caps;
+
+ structure = gst_caps_get_structure (caps, 0);
+ if (!structure) {
+ gst_caps_unref (caps);
+ goto no_caps;
+ }
+
+ /* get the mime type for the structure */
+ mime = gst_multipart_mux_get_mime (mux, structure);
+ gst_caps_unref (caps);
+
+ header = g_strdup_printf ("--%s\r\nContent-Type: %s\r\n"
+ "Content-Length: %" G_GSIZE_FORMAT "\r\n\r\n",
+ mux->boundary, mime, gst_buffer_get_size (best->buffer));
+ headerlen = strlen (header);
+
+ headerbuf = gst_buffer_new_allocate (NULL, headerlen, NULL);
+ gst_buffer_fill (headerbuf, 0, header, headerlen);
+ g_free (header);
+
+ /* the header has the same timestamps as the data buffer (which we will push
+ * below) and has a duration of 0 */
+ GST_BUFFER_PTS (headerbuf) = best->pts_timestamp;
+ GST_BUFFER_DTS (headerbuf) = best->dts_timestamp;
+ GST_BUFFER_DURATION (headerbuf) = 0;
+ GST_BUFFER_OFFSET (headerbuf) = mux->offset;
+ mux->offset += headerlen;
+ GST_BUFFER_OFFSET_END (headerbuf) = mux->offset;
+
+ GST_DEBUG_OBJECT (mux, "pushing %" G_GSIZE_FORMAT " bytes header buffer",
+ headerlen);
+ ret = gst_pad_push (mux->srcpad, headerbuf);
+ if (ret != GST_FLOW_OK)
+ /* push always takes ownership of the buffer, even after an error, so we
+ * don't need to unref headerbuf here. */
+ goto beach;
+
+ /* take best->buffer, we don't need to unref it later as we will push it
+ * now. */
+ databuf = gst_buffer_make_writable (best->buffer);
+ best->buffer = NULL;
+
+ /* we need to updated the timestamps to match the running_time */
+ GST_BUFFER_PTS (databuf) = best->pts_timestamp;
+ GST_BUFFER_DTS (databuf) = best->dts_timestamp;
+ GST_BUFFER_OFFSET (databuf) = mux->offset;
+ mux->offset += gst_buffer_get_size (databuf);
+ GST_BUFFER_OFFSET_END (databuf) = mux->offset;
+ GST_BUFFER_FLAG_SET (databuf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ GST_DEBUG_OBJECT (mux, "pushing %" G_GSIZE_FORMAT " bytes data buffer",
+ gst_buffer_get_size (databuf));
+ ret = gst_pad_push (mux->srcpad, databuf);
+ if (ret != GST_FLOW_OK)
+ /* push always takes ownership of the buffer, even after an error, so we
+ * don't need to unref headerbuf here. */
+ goto beach;
+
+ footerbuf = gst_buffer_new_allocate (NULL, 2, NULL);
+ gst_buffer_fill (footerbuf, 0, "\r\n", 2);
+
+ /* the footer has the same timestamps as the data buffer and has a
+ * duration of 0 */
+ GST_BUFFER_PTS (footerbuf) = best->pts_timestamp;
+ GST_BUFFER_DTS (footerbuf) = best->dts_timestamp;
+ GST_BUFFER_DURATION (footerbuf) = 0;
+ GST_BUFFER_OFFSET (footerbuf) = mux->offset;
+ mux->offset += 2;
+ GST_BUFFER_OFFSET_END (footerbuf) = mux->offset;
+ GST_BUFFER_FLAG_SET (footerbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ GST_DEBUG_OBJECT (mux, "pushing 2 bytes footer buffer");
+ ret = gst_pad_push (mux->srcpad, footerbuf);
+
+beach:
+ if (best && best->buffer) {
+ gst_buffer_unref (best->buffer);
+ best->buffer = NULL;
+ }
+ return ret;
+
+ /* ERRORS */
+buffer_error:
+ {
+ /* There is a best but no buffer, this is not quite right.. */
+ GST_ELEMENT_ERROR (mux, STREAM, FAILED, (NULL), ("internal muxing error"));
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+eos:
+ {
+ GST_DEBUG_OBJECT (mux, "Pushing EOS");
+ gst_pad_push_event (mux->srcpad, gst_event_new_eos ());
+ ret = GST_FLOW_EOS;
+ goto beach;
+ }
+nego_error:
+ {
+ GST_WARNING_OBJECT (mux, "failed to set caps");
+ GST_ELEMENT_ERROR (mux, CORE, NEGOTIATION, (NULL), (NULL));
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ goto beach;
+ }
+no_caps:
+ {
+ GST_WARNING_OBJECT (mux, "no caps on the incoming buffer %p", best->buffer);
+ GST_ELEMENT_ERROR (mux, CORE, NEGOTIATION, (NULL), (NULL));
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ goto beach;
+ }
+}
+
+static void
+gst_multipart_mux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstMultipartMux *mux;
+
+ mux = GST_MULTIPART_MUX (object);
+
+ switch (prop_id) {
+ case PROP_BOUNDARY:
+ g_value_set_string (value, mux->boundary);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_multipart_mux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstMultipartMux *mux;
+
+ mux = GST_MULTIPART_MUX (object);
+
+ switch (prop_id) {
+ case PROP_BOUNDARY:
+ g_free (mux->boundary);
+ mux->boundary = g_value_dup_string (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstStateChangeReturn
+gst_multipart_mux_change_state (GstElement * element, GstStateChange transition)
+{
+ GstMultipartMux *multipart_mux;
+ GstStateChangeReturn ret;
+
+ multipart_mux = GST_MULTIPART_MUX (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ multipart_mux->offset = 0;
+ multipart_mux->negotiated = FALSE;
+ multipart_mux->need_segment = TRUE;
+ multipart_mux->need_stream_start = TRUE;
+ GST_DEBUG_OBJECT (multipart_mux, "starting collect pads");
+ gst_collect_pads_start (multipart_mux->collect);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ GST_DEBUG_OBJECT (multipart_mux, "stopping collect pads");
+ gst_collect_pads_stop (multipart_mux->collect);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ return ret;
+
+ switch (transition) {
+ default:
+ break;
+ }
+
+ return ret;
+}
diff --git a/gst/multipart/multipartmux.h b/gst/multipart/multipartmux.h
new file mode 100644
index 0000000000..9ff94e5832
--- /dev/null
+++ b/gst/multipart/multipartmux.h
@@ -0,0 +1,98 @@
+/* GStreamer
+ * Copyright (C) 2004 Wim Taymans <wim@fluendo.com>
+ *
+ * gstmultipartmux.h: multipart stream muxer
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_MULTIPART_MUX__
+#define __GST_MULTIPART_MUX__
+
+#include <gst/gst.h>
+#include <gst/base/gstcollectpads.h>
+
+#include <string.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MULTIPART_MUX (gst_multipart_mux_get_type())
+#define GST_MULTIPART_MUX(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MULTIPART_MUX, GstMultipartMux))
+#define GST_MULTIPART_MUX_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MULTIPART_MUX, GstMultipartMux))
+#define GST_MULTIPART_MUX_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_MULTIPART_MUX, GstMultipartMuxClass))
+#define GST_IS_MULTIPART_MUX(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MULTIPART_MUX))
+#define GST_IS_MULTIPART_MUX_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MULTIPART_MUX))
+
+typedef struct _GstMultipartMux GstMultipartMux;
+typedef struct _GstMultipartMuxClass GstMultipartMuxClass;
+
+/* all information needed for one multipart stream */
+typedef struct
+{
+ GstCollectData collect; /* we extend the CollectData */
+
+ GstBuffer *buffer; /* the queued buffer for this pad */
+ GstClockTime pts_timestamp; /* its pts timestamp, converted to running_time so that we can
+ correctly sort over multiple segments. */
+ GstClockTime dts_timestamp; /* its dts timestamp, converted to running_time so that we can
+ correctly sort over multiple segments. */
+ GstPad *pad;
+}
+GstMultipartPadData;
+
+/**
+ * GstMultipartMux:
+ *
+ * The opaque #GstMultipartMux structure.
+ */
+struct _GstMultipartMux
+{
+ GstElement element;
+
+ /* pad */
+ GstPad *srcpad;
+
+ /* sinkpads */
+ GstCollectPads *collect;
+
+ gint numpads;
+
+ /* offset in stream */
+ guint64 offset;
+
+ /* boundary string */
+ gchar *boundary;
+
+ gboolean negotiated;
+ gboolean need_segment;
+ gboolean need_stream_start;
+};
+
+struct _GstMultipartMuxClass
+{
+ GstElementClass parent_class;
+
+ GHashTable *mimetypes;
+};
+
+GType gst_multipart_mux_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (multipartmux);
+
+G_END_DECLS
+
+#endif /* __GST_MULTIPART_MUX__ */
+
diff --git a/gst/replaygain/gstrganalysis.c b/gst/replaygain/gstrganalysis.c
new file mode 100644
index 0000000000..7274749073
--- /dev/null
+++ b/gst/replaygain/gstrganalysis.c
@@ -0,0 +1,705 @@
+/* GStreamer ReplayGain analysis
+ *
+ * Copyright (C) 2006 Rene Stadler <mail@renestadler.de>
+ *
+ * gstrganalysis.c: Element that performs the ReplayGain analysis
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public License
+ * as published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+ * 02110-1301 USA
+ */
+
+/**
+ * SECTION:element-rganalysis
+ * @title: rganalysis
+ * @see_also: #GstRgVolume
+ *
+ * This element analyzes raw audio sample data in accordance with the proposed
+ * [ReplayGain standard](https://wiki.hydrogenaud.io/index.php?title=ReplayGain) for
+ * calculating the ideal replay gain for music tracks and albums. The element
+ * is designed as a pass-through filter that never modifies any data. As it
+ * receives an EOS event, it finalizes the ongoing analysis and generates a tag
+ * list containing the results. It is sent downstream with a tag event and
+ * posted on the message bus with a tag message. The EOS event is forwarded as
+ * normal afterwards. Result tag lists at least contain the tags
+ * #GST_TAG_TRACK_GAIN, #GST_TAG_TRACK_PEAK and #GST_TAG_REFERENCE_LEVEL.
+ *
+ * Because the generated metadata tags become available at the end of streams,
+ * downstream muxer and encoder elements are normally unable to save them in
+ * their output since they generally save metadata in the file header.
+ * Therefore, it is often necessary that applications read the results in a bus
+ * event handler for the tag message. Obtaining the values this way is always
+ * needed for album processing (see #GstRgAnalysis:num-tracks property) since
+ * the album gain and peak values need to be associated with all tracks of an
+ * album, not just the last one.
+ *
+ * ## Example launch lines
+ * |[
+ * gst-launch-1.0 -t audiotestsrc wave=sine num-buffers=512 ! rganalysis ! fakesink
+ * ]| Analyze a simple test waveform
+ * |[
+ * gst-launch-1.0 -t filesrc location=filename.ext ! decodebin \
+ * ! audioconvert ! audioresample ! rganalysis ! fakesink
+ * ]| Analyze a given file
+ * |[
+ * gst-launch-1.0 -t gnomevfssrc location=http://replaygain.hydrogenaudio.org/ref_pink.wav \
+ * ! wavparse ! rganalysis ! fakesink
+ * ]| Analyze the pink noise reference file
+ *
+ * The above launch line yields a result gain of +6 dB (instead of the expected
+ * +0 dB). This is not in error, refer to the #GstRgAnalysis:reference-level
+ * property documentation for more information.
+ *
+ * ## Acknowledgements
+ *
+ * This element is based on code used in the [vorbisgain](https://sjeng.org/vorbisgain.html)
+ * program and many others. The relevant parts are copyrighted by David Robinson, Glen Sawyer
+ * and Frank Klemm.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+#include <gst/audio/audio.h>
+
+#include "gstrganalysis.h"
+#include "replaygain.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_rg_analysis_debug);
+#define GST_CAT_DEFAULT gst_rg_analysis_debug
+
+/* Default property value. */
+#define FORCED_DEFAULT TRUE
+#define DEFAULT_MESSAGE FALSE
+
+enum
+{
+ PROP_0,
+ PROP_NUM_TRACKS,
+ PROP_FORCED,
+ PROP_REFERENCE_LEVEL,
+ PROP_MESSAGE
+};
+
+/* The ReplayGain algorithm is intended for use with mono and stereo
+ * audio. The used implementation has filter coefficients for the
+ * "usual" sample rates in the 8000 to 48000 Hz range. */
+#define REPLAY_GAIN_CAPS "audio/x-raw," \
+ "format = (string) { "GST_AUDIO_NE(F32)","GST_AUDIO_NE(S16)" }, " \
+ "layout = (string) interleaved, " \
+ "channels = (int) 1, " \
+ "rate = (int) { 8000, 11025, 12000, 16000, 22050, 24000, 32000, " \
+ "44100, 48000 }; " \
+ "audio/x-raw," \
+ "format = (string) { "GST_AUDIO_NE(F32)","GST_AUDIO_NE(S16)" }, " \
+ "layout = (string) interleaved, " \
+ "channels = (int) 2, " \
+ "channel-mask = (bitmask) 0x3, " \
+ "rate = (int) { 8000, 11025, 12000, 16000, 22050, 24000, 32000, " \
+ "44100, 48000 }"
+
+static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (REPLAY_GAIN_CAPS));
+
+static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (REPLAY_GAIN_CAPS));
+
+#define gst_rg_analysis_parent_class parent_class
+G_DEFINE_TYPE (GstRgAnalysis, gst_rg_analysis, GST_TYPE_BASE_TRANSFORM);
+GST_ELEMENT_REGISTER_DEFINE (rganalysis, "rganalysis", GST_RANK_NONE,
+ GST_TYPE_RG_ANALYSIS);
+
+static void gst_rg_analysis_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rg_analysis_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_rg_analysis_start (GstBaseTransform * base);
+static gboolean gst_rg_analysis_set_caps (GstBaseTransform * base,
+ GstCaps * incaps, GstCaps * outcaps);
+static GstFlowReturn gst_rg_analysis_transform_ip (GstBaseTransform * base,
+ GstBuffer * buf);
+static gboolean gst_rg_analysis_sink_event (GstBaseTransform * base,
+ GstEvent * event);
+static gboolean gst_rg_analysis_stop (GstBaseTransform * base);
+
+static void gst_rg_analysis_handle_tags (GstRgAnalysis * filter,
+ const GstTagList * tag_list);
+static void gst_rg_analysis_handle_eos (GstRgAnalysis * filter);
+static gboolean gst_rg_analysis_track_result (GstRgAnalysis * filter,
+ GstTagList ** tag_list);
+static gboolean gst_rg_analysis_album_result (GstRgAnalysis * filter,
+ GstTagList ** tag_list);
+
+static void
+gst_rg_analysis_class_init (GstRgAnalysisClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstBaseTransformClass *trans_class;
+
+ gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
+
+ gobject_class->set_property = gst_rg_analysis_set_property;
+ gobject_class->get_property = gst_rg_analysis_get_property;
+
+ /**
+ * GstRgAnalysis:num-tracks:
+ *
+ * Number of remaining album tracks.
+ *
+ * Analyzing several streams sequentially and assigning them a common result
+ * gain is known as "album processing". If this gain is used during playback
+ * (by switching to "album mode"), all tracks of an album receive the same
+ * amplification. This keeps the relative volume levels between the tracks
+ * intact. To enable this, set this property to the number of streams that
+ * will be processed as album tracks.
+ *
+ * Every time an EOS event is received, the value of this property is
+ * decremented by one. As it reaches zero, it is assumed that the last track
+ * of the album finished. The tag list for the final stream will contain the
+ * additional tags #GST_TAG_ALBUM_GAIN and #GST_TAG_ALBUM_PEAK. All other
+ * streams just get the two track tags posted because the values for the album
+ * tags are not known before all tracks are analyzed. Applications need to
+ * ensure that the album gain and peak values are also associated with the
+ * other tracks when storing the results.
+ *
+ * If the total number of album tracks is unknown beforehand, just ensure that
+ * the value is greater than 1 before each track starts. Then before the end
+ * of the last track, set it to the value 1.
+ *
+ * To perform album processing, the element has to preserve data between
+ * streams. This cannot survive a state change to the NULL or READY state.
+ * If you change your pipeline's state to NULL or READY between tracks, lock
+ * the element's state using gst_element_set_locked_state() when it is in
+ * PAUSED or PLAYING.
+ */
+ g_object_class_install_property (gobject_class, PROP_NUM_TRACKS,
+ g_param_spec_int ("num-tracks", "Number of album tracks",
+ "Number of remaining album tracks", 0, G_MAXINT, 0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRgAnalysis:forced:
+ *
+ * Whether to analyze streams even when ReplayGain tags exist.
+ *
+ * For assisting transcoder/converter applications, the element can silently
+ * skip the processing of streams that already contain the necessary tags.
+ * Data will flow as usual but the element will not consume CPU time and will
+ * not generate result tags. To enable possible skipping, set this property
+ * to %FALSE.
+ *
+ * If used in conjunction with <link linkend="GstRgAnalysis--num-tracks">album
+ * processing</link>, the element will skip the number of remaining album
+ * tracks if a full set of tags is found for the first track. If a subsequent
+ * track of the album is missing tags, processing cannot start again. If this
+ * is undesired, the application has to scan all files beforehand and enable
+ * forcing of processing if needed.
+ */
+ g_object_class_install_property (gobject_class, PROP_FORCED,
+ g_param_spec_boolean ("forced", "Forced",
+ "Analyze even if ReplayGain tags exist",
+ FORCED_DEFAULT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRgAnalysis:reference-level:
+ *
+ * Reference level [dB].
+ *
+ * Analyzing the ReplayGain pink noise reference waveform computes a result of
+ * +6 dB instead of the expected 0 dB. This is because the default reference
+ * level is 89 dB. To obtain values as lined out in the original proposal of
+ * ReplayGain, set this property to 83.
+ *
+ * Almost all software uses 89 dB as a reference however, and this value has
+ * become the new official value, and that change has been acclaimed by the
+ * original author of the ReplayGain proposal.
+ *
+ * The value was changed because the original proposal recommends a default
+ * pre-amp value of +6 dB for playback. This seemed a bit odd, as it means
+ * that the algorithm has the general tendency to produce adjustment values
+ * that are 6 dB too low. Bumping the reference level by 6 dB compensated for
+ * this.
+ *
+ * The problem of the reference level being ambiguous for lack of concise
+ * standardization is to be solved by adopting the #GST_TAG_REFERENCE_LEVEL
+ * tag, which allows to store the used value alongside the gain values.
+ */
+ g_object_class_install_property (gobject_class, PROP_REFERENCE_LEVEL,
+ g_param_spec_double ("reference-level", "Reference level",
+ "Reference level [dB]", 0.0, 150., RG_REFERENCE_LEVEL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_MESSAGE,
+ g_param_spec_boolean ("message", "Message",
+ "Post statics messages",
+ DEFAULT_MESSAGE,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+
+ trans_class = (GstBaseTransformClass *) klass;
+ trans_class->start = GST_DEBUG_FUNCPTR (gst_rg_analysis_start);
+ trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_rg_analysis_set_caps);
+ trans_class->transform_ip = GST_DEBUG_FUNCPTR (gst_rg_analysis_transform_ip);
+ trans_class->sink_event = GST_DEBUG_FUNCPTR (gst_rg_analysis_sink_event);
+ trans_class->stop = GST_DEBUG_FUNCPTR (gst_rg_analysis_stop);
+ trans_class->passthrough_on_same_caps = TRUE;
+
+ gst_element_class_add_static_pad_template (element_class, &src_factory);
+ gst_element_class_add_static_pad_template (element_class, &sink_factory);
+ gst_element_class_set_static_metadata (element_class, "ReplayGain analysis",
+ "Filter/Analyzer/Audio",
+ "Perform the ReplayGain analysis",
+ "Ren\xc3\xa9 Stadler <mail@renestadler.de>");
+
+ GST_DEBUG_CATEGORY_INIT (gst_rg_analysis_debug, "rganalysis", 0,
+ "ReplayGain analysis element");
+}
+
+static void
+gst_rg_analysis_init (GstRgAnalysis * filter)
+{
+ GstBaseTransform *base = GST_BASE_TRANSFORM (filter);
+
+ gst_base_transform_set_gap_aware (base, TRUE);
+
+ filter->num_tracks = 0;
+ filter->forced = FORCED_DEFAULT;
+ filter->message = DEFAULT_MESSAGE;
+ filter->reference_level = RG_REFERENCE_LEVEL;
+
+ filter->ctx = NULL;
+ filter->analyze = NULL;
+}
+
+static void
+gst_rg_analysis_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRgAnalysis *filter = GST_RG_ANALYSIS (object);
+
+ GST_OBJECT_LOCK (filter);
+ switch (prop_id) {
+ case PROP_NUM_TRACKS:
+ filter->num_tracks = g_value_get_int (value);
+ break;
+ case PROP_FORCED:
+ filter->forced = g_value_get_boolean (value);
+ break;
+ case PROP_REFERENCE_LEVEL:
+ filter->reference_level = g_value_get_double (value);
+ break;
+ case PROP_MESSAGE:
+ filter->message = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (filter);
+}
+
+static void
+gst_rg_analysis_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRgAnalysis *filter = GST_RG_ANALYSIS (object);
+
+ GST_OBJECT_LOCK (filter);
+ switch (prop_id) {
+ case PROP_NUM_TRACKS:
+ g_value_set_int (value, filter->num_tracks);
+ break;
+ case PROP_FORCED:
+ g_value_set_boolean (value, filter->forced);
+ break;
+ case PROP_REFERENCE_LEVEL:
+ g_value_set_double (value, filter->reference_level);
+ break;
+ case PROP_MESSAGE:
+ g_value_set_boolean (value, filter->message);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (filter);
+}
+
+static void
+gst_rg_analysis_post_message (gpointer rganalysis, GstClockTime timestamp,
+ GstClockTime duration, gdouble rglevel)
+{
+ GstRgAnalysis *filter = GST_RG_ANALYSIS (rganalysis);
+ if (filter->message) {
+ GstMessage *m;
+
+ m = gst_message_new_element (GST_OBJECT_CAST (rganalysis),
+ gst_structure_new ("rganalysis",
+ "timestamp", G_TYPE_UINT64, timestamp,
+ "duration", G_TYPE_UINT64, duration,
+ "rglevel", G_TYPE_DOUBLE, rglevel, NULL));
+
+ gst_element_post_message (GST_ELEMENT_CAST (rganalysis), m);
+ }
+}
+
+
+static gboolean
+gst_rg_analysis_start (GstBaseTransform * base)
+{
+ GstRgAnalysis *filter = GST_RG_ANALYSIS (base);
+
+ filter->ignore_tags = FALSE;
+ filter->skip = FALSE;
+ filter->has_track_gain = FALSE;
+ filter->has_track_peak = FALSE;
+ filter->has_album_gain = FALSE;
+ filter->has_album_peak = FALSE;
+
+ filter->ctx = rg_analysis_new ();
+ GST_OBJECT_LOCK (filter);
+ rg_analysis_init_silence_detection (filter->ctx, gst_rg_analysis_post_message,
+ filter);
+ GST_OBJECT_UNLOCK (filter);
+ filter->analyze = NULL;
+
+ GST_LOG_OBJECT (filter, "started");
+
+ return TRUE;
+}
+
+static gboolean
+gst_rg_analysis_set_caps (GstBaseTransform * base, GstCaps * in_caps,
+ GstCaps * out_caps)
+{
+ GstRgAnalysis *filter = GST_RG_ANALYSIS (base);
+ GstAudioInfo info;
+ gint rate, channels;
+
+ g_return_val_if_fail (filter->ctx != NULL, FALSE);
+
+ GST_DEBUG_OBJECT (filter,
+ "set_caps in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT,
+ in_caps, out_caps);
+
+ if (!gst_audio_info_from_caps (&info, in_caps))
+ goto invalid_format;
+
+ rate = GST_AUDIO_INFO_RATE (&info);
+
+ if (!rg_analysis_set_sample_rate (filter->ctx, rate))
+ goto invalid_format;
+
+ channels = GST_AUDIO_INFO_CHANNELS (&info);
+
+ if (channels < 1 || channels > 2)
+ goto invalid_format;
+
+ switch (GST_AUDIO_INFO_FORMAT (&info)) {
+ case GST_AUDIO_FORMAT_F32:
+ /* The depth is not variable for float formats of course. It just
+ * makes the transform function nice and simple if the
+ * rg_analysis_analyze_* functions have a common signature. */
+ filter->depth = sizeof (gfloat) * 8;
+
+ if (channels == 1)
+ filter->analyze = rg_analysis_analyze_mono_float;
+ else
+ filter->analyze = rg_analysis_analyze_stereo_float;
+
+ break;
+ case GST_AUDIO_FORMAT_S16:
+ filter->depth = sizeof (gint16) * 8;
+
+ if (channels == 1)
+ filter->analyze = rg_analysis_analyze_mono_int16;
+ else
+ filter->analyze = rg_analysis_analyze_stereo_int16;
+ break;
+ default:
+ goto invalid_format;
+ }
+
+ return TRUE;
+
+ /* Errors. */
+invalid_format:
+ {
+ filter->analyze = NULL;
+ GST_ELEMENT_ERROR (filter, CORE, NEGOTIATION,
+ ("Invalid incoming caps: %" GST_PTR_FORMAT, in_caps), (NULL));
+ return FALSE;
+ }
+}
+
+static GstFlowReturn
+gst_rg_analysis_transform_ip (GstBaseTransform * base, GstBuffer * buf)
+{
+ GstRgAnalysis *filter = GST_RG_ANALYSIS (base);
+ GstMapInfo map;
+
+ g_return_val_if_fail (filter->ctx != NULL, GST_FLOW_FLUSHING);
+ g_return_val_if_fail (filter->analyze != NULL, GST_FLOW_NOT_NEGOTIATED);
+
+ if (filter->skip)
+ return GST_FLOW_OK;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ GST_LOG_OBJECT (filter, "processing buffer of size %" G_GSIZE_FORMAT,
+ map.size);
+
+ rg_analysis_start_buffer (filter->ctx, GST_BUFFER_TIMESTAMP (buf));
+ filter->analyze (filter->ctx, map.data, map.size, filter->depth);
+
+ gst_buffer_unmap (buf, &map);
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_rg_analysis_sink_event (GstBaseTransform * base, GstEvent * event)
+{
+ GstRgAnalysis *filter = GST_RG_ANALYSIS (base);
+
+ g_return_val_if_fail (filter->ctx != NULL, TRUE);
+
+ switch (GST_EVENT_TYPE (event)) {
+
+ case GST_EVENT_EOS:
+ {
+ GST_LOG_OBJECT (filter, "received EOS event");
+
+ gst_rg_analysis_handle_eos (filter);
+
+ GST_LOG_OBJECT (filter, "passing on EOS event");
+
+ break;
+ }
+ case GST_EVENT_TAG:
+ {
+ GstTagList *tag_list;
+
+ /* The reference to the tag list is borrowed. */
+ gst_event_parse_tag (event, &tag_list);
+ gst_rg_analysis_handle_tags (filter, tag_list);
+
+ break;
+ }
+ default:
+ break;
+ }
+
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (base, event);
+}
+
+static gboolean
+gst_rg_analysis_stop (GstBaseTransform * base)
+{
+ GstRgAnalysis *filter = GST_RG_ANALYSIS (base);
+
+ g_return_val_if_fail (filter->ctx != NULL, FALSE);
+
+ rg_analysis_destroy (filter->ctx);
+ filter->ctx = NULL;
+
+ GST_LOG_OBJECT (filter, "stopped");
+
+ return TRUE;
+}
+
+/* FIXME: handle global vs. stream-tags? */
+static void
+gst_rg_analysis_handle_tags (GstRgAnalysis * filter,
+ const GstTagList * tag_list)
+{
+ gboolean album_processing = (filter->num_tracks > 0);
+ gdouble dummy;
+
+ if (!album_processing)
+ filter->ignore_tags = FALSE;
+
+ if (filter->skip && album_processing) {
+ GST_DEBUG_OBJECT (filter, "ignoring tag event: skipping album");
+ return;
+ } else if (filter->skip) {
+ GST_DEBUG_OBJECT (filter, "ignoring tag event: skipping track");
+ return;
+ } else if (filter->ignore_tags) {
+ GST_DEBUG_OBJECT (filter, "ignoring tag event: cannot skip anyways");
+ return;
+ }
+
+ filter->has_track_gain |= gst_tag_list_get_double (tag_list,
+ GST_TAG_TRACK_GAIN, &dummy);
+ filter->has_track_peak |= gst_tag_list_get_double (tag_list,
+ GST_TAG_TRACK_PEAK, &dummy);
+ filter->has_album_gain |= gst_tag_list_get_double (tag_list,
+ GST_TAG_ALBUM_GAIN, &dummy);
+ filter->has_album_peak |= gst_tag_list_get_double (tag_list,
+ GST_TAG_ALBUM_PEAK, &dummy);
+
+ if (!(filter->has_track_gain && filter->has_track_peak)) {
+ GST_DEBUG_OBJECT (filter, "track tags not complete yet");
+ return;
+ }
+
+ if (album_processing && !(filter->has_album_gain && filter->has_album_peak)) {
+ GST_DEBUG_OBJECT (filter, "album tags not complete yet");
+ return;
+ }
+
+ if (filter->forced) {
+ GST_DEBUG_OBJECT (filter,
+ "existing tags are sufficient, but processing anyway (forced)");
+ return;
+ }
+
+ filter->skip = TRUE;
+ rg_analysis_reset (filter->ctx);
+
+ if (!album_processing) {
+ GST_DEBUG_OBJECT (filter,
+ "existing tags are sufficient, will not process this track");
+ } else {
+ GST_DEBUG_OBJECT (filter,
+ "existing tags are sufficient, will not process this album");
+ }
+}
+
+static void
+gst_rg_analysis_handle_eos (GstRgAnalysis * filter)
+{
+ gboolean album_processing = (filter->num_tracks > 0);
+ gboolean album_finished = (filter->num_tracks == 1);
+ gboolean album_skipping = album_processing && filter->skip;
+
+ filter->has_track_gain = FALSE;
+ filter->has_track_peak = FALSE;
+
+ if (album_finished) {
+ filter->ignore_tags = FALSE;
+ filter->skip = FALSE;
+ filter->has_album_gain = FALSE;
+ filter->has_album_peak = FALSE;
+ } else if (!album_skipping) {
+ filter->skip = FALSE;
+ }
+
+ /* We might have just fully processed a track because it has
+ * incomplete tags. If we do album processing and allow skipping
+ * (not forced), prevent switching to skipping if a later track with
+ * full tags comes along: */
+ if (!filter->forced && album_processing && !album_finished)
+ filter->ignore_tags = TRUE;
+
+ if (!filter->skip) {
+ GstTagList *tag_list = NULL;
+ gboolean track_success;
+ gboolean album_success = FALSE;
+
+ track_success = gst_rg_analysis_track_result (filter, &tag_list);
+
+ if (album_finished)
+ album_success = gst_rg_analysis_album_result (filter, &tag_list);
+ else if (!album_processing)
+ rg_analysis_reset_album (filter->ctx);
+
+ if (track_success || album_success) {
+ GST_LOG_OBJECT (filter, "posting tag list with results");
+ gst_tag_list_add (tag_list, GST_TAG_MERGE_APPEND,
+ GST_TAG_REFERENCE_LEVEL, filter->reference_level, NULL);
+ /* This takes ownership of our reference to the list */
+ gst_pad_push_event (GST_BASE_TRANSFORM_SRC_PAD (filter),
+ gst_event_new_tag (tag_list));
+ tag_list = NULL;
+ }
+ }
+
+ if (album_processing) {
+ filter->num_tracks--;
+
+ if (!album_finished) {
+ GST_DEBUG_OBJECT (filter, "album not finished yet (num-tracks is now %u)",
+ filter->num_tracks);
+ } else {
+ GST_DEBUG_OBJECT (filter, "album finished (num-tracks is now 0)");
+ }
+ }
+
+ if (album_processing)
+ g_object_notify (G_OBJECT (filter), "num-tracks");
+}
+
+/* FIXME: return tag list (lists?) based on input tags.. */
+static gboolean
+gst_rg_analysis_track_result (GstRgAnalysis * filter, GstTagList ** tag_list)
+{
+ gboolean track_success;
+ gdouble track_gain, track_peak;
+
+ track_success = rg_analysis_track_result (filter->ctx, &track_gain,
+ &track_peak);
+
+ if (track_success) {
+ track_gain += filter->reference_level - RG_REFERENCE_LEVEL;
+ GST_INFO_OBJECT (filter, "track gain is %+.2f dB, peak %.6f", track_gain,
+ track_peak);
+ } else {
+ GST_INFO_OBJECT (filter, "track was too short to analyze");
+ }
+
+ if (track_success) {
+ if (*tag_list == NULL)
+ *tag_list = gst_tag_list_new_empty ();
+ gst_tag_list_add (*tag_list, GST_TAG_MERGE_APPEND,
+ GST_TAG_TRACK_PEAK, track_peak, GST_TAG_TRACK_GAIN, track_gain, NULL);
+ }
+
+ return track_success;
+}
+
+static gboolean
+gst_rg_analysis_album_result (GstRgAnalysis * filter, GstTagList ** tag_list)
+{
+ gboolean album_success;
+ gdouble album_gain, album_peak;
+
+ album_success = rg_analysis_album_result (filter->ctx, &album_gain,
+ &album_peak);
+
+ if (album_success) {
+ album_gain += filter->reference_level - RG_REFERENCE_LEVEL;
+ GST_INFO_OBJECT (filter, "album gain is %+.2f dB, peak %.6f", album_gain,
+ album_peak);
+ } else {
+ GST_INFO_OBJECT (filter, "album was too short to analyze");
+ }
+
+ if (album_success) {
+ if (*tag_list == NULL)
+ *tag_list = gst_tag_list_new_empty ();
+ gst_tag_list_add (*tag_list, GST_TAG_MERGE_APPEND,
+ GST_TAG_ALBUM_PEAK, album_peak, GST_TAG_ALBUM_GAIN, album_gain, NULL);
+ }
+
+ return album_success;
+}
diff --git a/gst/replaygain/gstrganalysis.h b/gst/replaygain/gstrganalysis.h
new file mode 100644
index 0000000000..0d68e6309b
--- /dev/null
+++ b/gst/replaygain/gstrganalysis.h
@@ -0,0 +1,86 @@
+/* GStreamer ReplayGain analysis
+ *
+ * Copyright (C) 2006 Rene Stadler <mail@renestadler.de>
+ *
+ * gstrganalysis.h: Element that performs the ReplayGain analysis
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public License
+ * as published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+ * 02110-1301 USA
+ */
+
+#ifndef __GST_RG_ANALYSIS_H__
+#define __GST_RG_ANALYSIS_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+
+#include "rganalysis.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RG_ANALYSIS \
+ (gst_rg_analysis_get_type())
+#define GST_RG_ANALYSIS(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RG_ANALYSIS,GstRgAnalysis))
+#define GST_RG_ANALYSIS_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RG_ANALYSIS,GstRgAnalysisClass))
+#define GST_IS_RG_ANALYSIS(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RG_ANALYSIS))
+#define GST_IS_RG_ANALYSIS_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RG_ANALYSIS))
+typedef struct _GstRgAnalysis GstRgAnalysis;
+typedef struct _GstRgAnalysisClass GstRgAnalysisClass;
+
+/**
+ * GstRgAnalysis:
+ *
+ * Opaque data structure.
+ */
+struct _GstRgAnalysis
+{
+ GstBaseTransform element;
+
+ /*< private >*/
+
+ RgAnalysisCtx *ctx;
+ void (*analyze) (RgAnalysisCtx * ctx, gconstpointer data, gsize size,
+ guint depth);
+ gint depth;
+
+ /* Property values. */
+ guint num_tracks;
+ gdouble reference_level;
+ gboolean forced;
+ gboolean message;
+
+ /* State machinery for skipping. */
+ gboolean ignore_tags;
+ gboolean skip;
+ gboolean has_track_gain;
+ gboolean has_track_peak;
+ gboolean has_album_gain;
+ gboolean has_album_peak;
+};
+
+struct _GstRgAnalysisClass
+{
+ GstBaseTransformClass parent_class;
+};
+
+GType gst_rg_analysis_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RG_ANALYSIS_H__ */
diff --git a/gst/replaygain/gstrglimiter.c b/gst/replaygain/gstrglimiter.c
new file mode 100644
index 0000000000..f55073adf8
--- /dev/null
+++ b/gst/replaygain/gstrglimiter.c
@@ -0,0 +1,201 @@
+/* GStreamer ReplayGain limiter
+ *
+ * Copyright (C) 2007 Rene Stadler <mail@renestadler.de>
+ *
+ * gstrglimiter.c: Element to apply signal compression to raw audio data
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public License
+ * as published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+ * 02110-1301 USA
+ */
+
+/**
+ * SECTION:element-rglimiter
+ * @title: rglimiter
+ * @see_also: #GstRgVolume
+ *
+ * This element applies signal compression/limiting to raw audio data. It
+ * performs strict hard limiting with soft-knee characteristics, using a
+ * threshold of -6 dB. This type of filter is mentioned in the proposed
+ * [ReplayGain standard](https://wiki.hydrogenaud.io/index.php?title=ReplayGain).
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=filename.ext ! decodebin ! audioconvert \
+ * ! rgvolume pre-amp=6.0 headroom=10.0 ! rglimiter \
+ * ! audioconvert ! audioresample ! alsasink
+ * ]|Playback of a file
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include <gst/gst.h>
+#include <math.h>
+#include <gst/audio/audio.h>
+
+#include "gstrglimiter.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_rg_limiter_debug);
+#define GST_CAT_DEFAULT gst_rg_limiter_debug
+
+enum
+{
+ PROP_0,
+ PROP_ENABLED,
+};
+
+static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (F32) ", "
+ "layout = (string) { interleaved, non-interleaved }, "
+ "channels = (int) [1, MAX], " "rate = (int) [1, MAX]"));
+
+static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " GST_AUDIO_NE (F32) ", "
+ "layout = (string) { interleaved, non-interleaved}, "
+ "channels = (int) [1, MAX], " "rate = (int) [1, MAX]"));
+
+#define gst_rg_limiter_parent_class parent_class
+G_DEFINE_TYPE (GstRgLimiter, gst_rg_limiter, GST_TYPE_BASE_TRANSFORM);
+GST_ELEMENT_REGISTER_DEFINE (rglimiter, "rglimiter", GST_RANK_NONE,
+ GST_TYPE_RG_LIMITER);
+
+static void gst_rg_limiter_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rg_limiter_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static GstFlowReturn gst_rg_limiter_transform_ip (GstBaseTransform * base,
+ GstBuffer * buf);
+
+static void
+gst_rg_limiter_class_init (GstRgLimiterClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstBaseTransformClass *trans_class;
+
+ gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
+
+ gobject_class->set_property = gst_rg_limiter_set_property;
+ gobject_class->get_property = gst_rg_limiter_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_ENABLED,
+ g_param_spec_boolean ("enabled", "Enabled", "Enable processing", TRUE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ trans_class = GST_BASE_TRANSFORM_CLASS (klass);
+ trans_class->transform_ip = GST_DEBUG_FUNCPTR (gst_rg_limiter_transform_ip);
+ trans_class->passthrough_on_same_caps = FALSE;
+
+ gst_element_class_add_static_pad_template (element_class, &src_factory);
+ gst_element_class_add_static_pad_template (element_class, &sink_factory);
+ gst_element_class_set_static_metadata (element_class, "ReplayGain limiter",
+ "Filter/Effect/Audio",
+ "Apply signal compression to raw audio data",
+ "Ren\xc3\xa9 Stadler <mail@renestadler.de>");
+
+ GST_DEBUG_CATEGORY_INIT (gst_rg_limiter_debug, "rglimiter", 0,
+ "ReplayGain limiter element");
+}
+
+static void
+gst_rg_limiter_init (GstRgLimiter * filter)
+{
+ GstBaseTransform *base = GST_BASE_TRANSFORM (filter);
+
+ gst_base_transform_set_passthrough (base, FALSE);
+ gst_base_transform_set_gap_aware (base, TRUE);
+
+ filter->enabled = TRUE;
+}
+
+static void
+gst_rg_limiter_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRgLimiter *filter = GST_RG_LIMITER (object);
+
+ switch (prop_id) {
+ case PROP_ENABLED:
+ filter->enabled = g_value_get_boolean (value);
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (filter),
+ !filter->enabled);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rg_limiter_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRgLimiter *filter = GST_RG_LIMITER (object);
+
+ switch (prop_id) {
+ case PROP_ENABLED:
+ g_value_set_boolean (value, filter->enabled);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+#define LIMIT 1.0
+#define THRES 0.5 /* ca. -6 dB */
+#define COMPL 0.5 /* LIMIT - THRESH */
+
+static GstFlowReturn
+gst_rg_limiter_transform_ip (GstBaseTransform * base, GstBuffer * buf)
+{
+ GstRgLimiter *filter = GST_RG_LIMITER (base);
+ gfloat *input;
+ GstMapInfo map;
+ guint count;
+ guint i;
+
+ if (!filter->enabled)
+ return GST_FLOW_OK;
+
+ if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_GAP))
+ return GST_FLOW_OK;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ input = (gfloat *) map.data;
+ count = gst_buffer_get_size (buf) / sizeof (gfloat);
+
+ for (i = count; i--;) {
+ if (*input > THRES)
+ *input = tanhf ((*input - THRES) / COMPL) * COMPL + THRES;
+ else if (*input < -THRES)
+ *input = tanhf ((*input + THRES) / COMPL) * COMPL - THRES;
+ input++;
+ }
+
+ gst_buffer_unmap (buf, &map);
+
+ return GST_FLOW_OK;
+}
diff --git a/gst/replaygain/gstrglimiter.h b/gst/replaygain/gstrglimiter.h
new file mode 100644
index 0000000000..19c8457eb0
--- /dev/null
+++ b/gst/replaygain/gstrglimiter.h
@@ -0,0 +1,66 @@
+/* GStreamer ReplayGain limiter
+ *
+ * Copyright (C) 2007 Rene Stadler <mail@renestadler.de>
+ *
+ * gstrglimiter.h: Element to apply signal compression to raw audio data
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public License
+ * as published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+ * 02110-1301 USA
+ */
+
+#ifndef __GST_RG_LIMITER_H__
+#define __GST_RG_LIMITER_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbasetransform.h>
+
+#define GST_TYPE_RG_LIMITER \
+ (gst_rg_limiter_get_type())
+#define GST_RG_LIMITER(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RG_LIMITER,GstRgLimiter))
+#define GST_RG_LIMITER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RG_LIMITER,GstRgLimiterClass))
+#define GST_IS_RG_LIMITER(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RG_LIMITER))
+#define GST_IS_RG_LIMITER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RG_LIMITER))
+
+typedef struct _GstRgLimiter GstRgLimiter;
+typedef struct _GstRgLimiterClass GstRgLimiterClass;
+
+/**
+ * GstRgLimiter:
+ *
+ * Opaque data structure.
+ */
+struct _GstRgLimiter
+{
+ GstBaseTransform element;
+
+ /*< private >*/
+
+ gboolean enabled;
+};
+
+struct _GstRgLimiterClass
+{
+ GstBaseTransformClass parent_class;
+};
+
+GType gst_rg_limiter_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (rglimiter);
+
+#endif /* __GST_RG_LIMITER_H__ */
diff --git a/gst/replaygain/gstrgvolume.c b/gst/replaygain/gstrgvolume.c
new file mode 100644
index 0000000000..1b8ec9bd82
--- /dev/null
+++ b/gst/replaygain/gstrgvolume.c
@@ -0,0 +1,687 @@
+/* GStreamer ReplayGain volume adjustment
+ *
+ * Copyright (C) 2007 Rene Stadler <mail@renestadler.de>
+ *
+ * gstrgvolume.c: Element to apply ReplayGain volume adjustment
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public License
+ * as published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+ * 02110-1301 USA
+ */
+
+/**
+ * SECTION:element-rgvolume
+ * @title: rgvolume
+ * @see_also: #GstRgLimiter, #GstRgAnalysis
+ *
+ * This element applies volume changes to streams as lined out in the proposed
+ * [ReplayGain standard](https://wiki.hydrogenaud.io/index.php?title=ReplayGain).
+ * It interprets the ReplayGain meta data tags and carries out the adjustment
+ * (by using a volume element internally).
+ *
+ * The relevant tags are:
+ * * #GST_TAG_TRACK_GAIN
+ * * #GST_TAG_TRACK_PEAK
+ * * #GST_TAG_ALBUM_GAIN
+ * * #GST_TAG_ALBUM_PEAK
+ * * #GST_TAG_REFERENCE_LEVEL
+ *
+ * The information carried by these tags must have been calculated beforehand by
+ * performing the ReplayGain analysis. This is implemented by the <link
+ * linkend="GstRgAnalysis">rganalysis</link> element.
+ *
+ * The signal compression/limiting recommendations outlined in the proposed
+ * standard are not implemented by this element. This has to be handled by
+ * separate elements because applications might want to have additional filters
+ * between the volume adjustment and the limiting stage. A basic limiter is
+ * included with this plugin: The <link linkend="GstRgLimiter">rglimiter</link>
+ * element applies -6 dB hard limiting as mentioned in the ReplayGain standard.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=filename.ext ! decodebin ! audioconvert \
+ * ! rgvolume ! audioconvert ! audioresample ! alsasink
+ * ]| Playback of a file
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include <gst/gst.h>
+#include <gst/pbutils/pbutils.h>
+#include <gst/audio/audio.h>
+#include <math.h>
+
+#include "gstrgvolume.h"
+#include "replaygain.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_rg_volume_debug);
+#define GST_CAT_DEFAULT gst_rg_volume_debug
+
+enum
+{
+ PROP_0,
+ PROP_ALBUM_MODE,
+ PROP_HEADROOM,
+ PROP_PRE_AMP,
+ PROP_FALLBACK_GAIN,
+ PROP_TARGET_GAIN,
+ PROP_RESULT_GAIN
+};
+
+#define DEFAULT_ALBUM_MODE TRUE
+#define DEFAULT_HEADROOM 0.0
+#define DEFAULT_PRE_AMP 0.0
+#define DEFAULT_FALLBACK_GAIN 0.0
+
+#define DB_TO_LINEAR(x) pow (10., (x) / 20.)
+#define LINEAR_TO_DB(x) (20. * log10 (x))
+
+#define GAIN_FORMAT "+.02f dB"
+#define PEAK_FORMAT ".06f"
+
+#define VALID_GAIN(x) ((x) > -60.00 && (x) < 60.00)
+#define VALID_PEAK(x) ((x) > 0.)
+
+/* Same template caps as GstVolume, for I don't like having just ANY caps. */
+
+#define FORMAT "{ "GST_AUDIO_NE(F32)","GST_AUDIO_NE(S16)" }"
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " FORMAT ", "
+ "layout = (string) { interleaved, non-interleaved }, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]"));
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) " FORMAT ", "
+ "layout = (string) { interleaved, non-interleaved }, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]"));
+
+#define gst_rg_volume_parent_class parent_class
+G_DEFINE_TYPE (GstRgVolume, gst_rg_volume, GST_TYPE_BIN);
+GST_ELEMENT_REGISTER_DEFINE (rgvolume, "rgvolume", GST_RANK_NONE,
+ GST_TYPE_RG_VOLUME);
+
+static void gst_rg_volume_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rg_volume_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_rg_volume_dispose (GObject * object);
+
+static GstStateChangeReturn gst_rg_volume_change_state (GstElement * element,
+ GstStateChange transition);
+static gboolean gst_rg_volume_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+
+static GstEvent *gst_rg_volume_tag_event (GstRgVolume * self, GstEvent * event);
+static void gst_rg_volume_reset (GstRgVolume * self);
+static void gst_rg_volume_update_gain (GstRgVolume * self);
+static inline void gst_rg_volume_determine_gain (GstRgVolume * self,
+ gdouble * target_gain, gdouble * result_gain);
+
+static void
+gst_rg_volume_class_init (GstRgVolumeClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstBinClass *bin_class;
+
+ gobject_class = (GObjectClass *) klass;
+
+ gobject_class->set_property = gst_rg_volume_set_property;
+ gobject_class->get_property = gst_rg_volume_get_property;
+ gobject_class->dispose = gst_rg_volume_dispose;
+
+ /**
+ * GstRgVolume:album-mode:
+ *
+ * Whether to prefer album gain over track gain.
+ *
+ * If set to %TRUE, use album gain instead of track gain if both are
+ * available. This keeps the relative loudness levels of tracks from the same
+ * album intact.
+ *
+ * If set to %FALSE, track mode is used instead. This effectively leads to
+ * more extensive normalization.
+ *
+ * If album mode is enabled but the album gain tag is absent in the stream,
+ * the track gain is used instead. If both gain tags are missing, the value
+ * of the #GstRgVolume:fallback-gain property is used instead.
+ */
+ g_object_class_install_property (gobject_class, PROP_ALBUM_MODE,
+ g_param_spec_boolean ("album-mode", "Album mode",
+ "Prefer album over track gain", DEFAULT_ALBUM_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRgVolume:headroom:
+ *
+ * Extra headroom [dB]. This controls the amount by which the output can
+ * exceed digital full scale.
+ *
+ * Only set this to a value greater than 0.0 if signal compression/limiting of
+ * a suitable form is applied to the output (or output is brought into the
+ * correct range by some other transformation).
+ *
+ * This element internally uses a volume element, which also supports
+ * operating on integer audio formats. These formats do not allow exceeding
+ * digital full scale. If extra headroom is used, make sure that the raw
+ * audio data format is floating point (F32). Otherwise,
+ * clipping distortion might be introduced as part of the volume adjustment
+ * itself.
+ */
+ g_object_class_install_property (gobject_class, PROP_HEADROOM,
+ g_param_spec_double ("headroom", "Headroom", "Extra headroom [dB]",
+ 0., 60., DEFAULT_HEADROOM,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRgVolume:pre-amp:
+ *
+ * Additional gain to apply globally [dB]. This controls the trade-off
+ * between uniformity of normalization and utilization of available dynamic
+ * range.
+ *
+ * Note that the default value is 0 dB because the ReplayGain reference value
+ * was adjusted by +6 dB (from 83 to 89 dB). The original proposal stated
+ * that a proper default pre-amp value is +6 dB, this translates to the used 0
+ * dB.
+ */
+ g_object_class_install_property (gobject_class, PROP_PRE_AMP,
+ g_param_spec_double ("pre-amp", "Pre-amp", "Extra gain [dB]",
+ -60., 60., DEFAULT_PRE_AMP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRgVolume:fallback-gain:
+ *
+ * Fallback gain [dB] for streams missing ReplayGain tags.
+ */
+ g_object_class_install_property (gobject_class, PROP_FALLBACK_GAIN,
+ g_param_spec_double ("fallback-gain", "Fallback gain",
+ "Gain for streams missing tags [dB]",
+ -60., 60., DEFAULT_FALLBACK_GAIN,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRgVolume:result-gain:
+ *
+ * Applied gain [dB]. This gain is applied to processed buffer data.
+ *
+ * This is set to the #GstRgVolume:target-gain if amplification by that amount
+ * can be applied safely. "Safely" means that the volume adjustment does not
+ * inflict clipping distortion. Should this not be the case, the result gain
+ * is set to an appropriately reduced value (by applying peak normalization).
+ * The proposed standard calls this "clipping prevention".
+ *
+ * The difference between target and result gain reflects the necessary amount
+ * of reduction. Applications can make use of this information to temporarily
+ * reduce the #GstRgVolume:pre-amp for subsequent streams, as recommended by
+ * the ReplayGain standard.
+ *
+ * Note that target and result gain differing for a great majority of streams
+ * indicates a problem: What happens in this case is that most streams receive
+ * peak normalization instead of amplification by the ideal replay gain. To
+ * prevent this, the #GstRgVolume:pre-amp has to be lowered and/or a limiter
+ * has to be used which facilitates the use of #GstRgVolume:headroom.
+ */
+ g_object_class_install_property (gobject_class, PROP_RESULT_GAIN,
+ g_param_spec_double ("result-gain", "Result-gain", "Applied gain [dB]",
+ -120., 120., 0., G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRgVolume:target-gain:
+ *
+ * Applicable gain [dB]. This gain is supposed to be applied.
+ *
+ * Depending on the value of the #GstRgVolume:album-mode property and the
+ * presence of ReplayGain tags in the stream, this is set according to one of
+ * these simple formulas:
+ *
+ *
+ * * #GstRgVolume:pre-amp + album gain of the stream
+ * * #GstRgVolume:pre-amp + track gain of the stream
+ * * #GstRgVolume:pre-amp + #GstRgVolume:fallback-gain
+ *
+ */
+ g_object_class_install_property (gobject_class, PROP_TARGET_GAIN,
+ g_param_spec_double ("target-gain", "Target-gain",
+ "Applicable gain [dB]", -120., 120., 0.,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ element_class = (GstElementClass *) klass;
+ element_class->change_state = GST_DEBUG_FUNCPTR (gst_rg_volume_change_state);
+
+ bin_class = (GstBinClass *) klass;
+ /* Setting these to NULL makes gst_bin_add and _remove refuse to let anyone
+ * mess with our internals. */
+ bin_class->add_element = NULL;
+ bin_class->remove_element = NULL;
+
+ gst_element_class_add_static_pad_template (element_class, &src_template);
+ gst_element_class_add_static_pad_template (element_class, &sink_template);
+ gst_element_class_set_static_metadata (element_class, "ReplayGain volume",
+ "Filter/Effect/Audio",
+ "Apply ReplayGain volume adjustment",
+ "Ren\xc3\xa9 Stadler <mail@renestadler.de>");
+
+ GST_DEBUG_CATEGORY_INIT (gst_rg_volume_debug, "rgvolume", 0,
+ "ReplayGain volume element");
+}
+
+static void
+gst_rg_volume_init (GstRgVolume * self)
+{
+ GObjectClass *volume_class;
+ GstPad *volume_pad, *ghost_pad;
+
+ self->album_mode = DEFAULT_ALBUM_MODE;
+ self->headroom = DEFAULT_HEADROOM;
+ self->pre_amp = DEFAULT_PRE_AMP;
+ self->fallback_gain = DEFAULT_FALLBACK_GAIN;
+ self->target_gain = 0.0;
+ self->result_gain = 0.0;
+
+ self->volume_element = gst_element_factory_make ("volume", "rgvolume-volume");
+ if (G_UNLIKELY (self->volume_element == NULL)) {
+ GstMessage *msg;
+
+ GST_WARNING_OBJECT (self, "could not create volume element");
+ msg = gst_missing_element_message_new (GST_ELEMENT_CAST (self), "volume");
+ gst_element_post_message (GST_ELEMENT_CAST (self), msg);
+
+ /* Nothing else to do, we will refuse the state change from NULL to READY to
+ * indicate that something went very wrong. It is doubtful that someone
+ * attempts changing our state though, since we end up having no pads! */
+ return;
+ }
+
+ volume_class = G_OBJECT_GET_CLASS (G_OBJECT (self->volume_element));
+ self->max_volume = G_PARAM_SPEC_DOUBLE
+ (g_object_class_find_property (volume_class, "volume"))->maximum;
+
+ GST_BIN_CLASS (parent_class)->add_element (GST_BIN_CAST (self),
+ self->volume_element);
+
+ volume_pad = gst_element_get_static_pad (self->volume_element, "sink");
+ ghost_pad = gst_ghost_pad_new_from_template ("sink", volume_pad,
+ GST_PAD_PAD_TEMPLATE (volume_pad));
+ gst_object_unref (volume_pad);
+ gst_pad_set_event_function (ghost_pad, gst_rg_volume_sink_event);
+ gst_element_add_pad (GST_ELEMENT_CAST (self), ghost_pad);
+
+ volume_pad = gst_element_get_static_pad (self->volume_element, "src");
+ ghost_pad = gst_ghost_pad_new_from_template ("src", volume_pad,
+ GST_PAD_PAD_TEMPLATE (volume_pad));
+ gst_object_unref (volume_pad);
+ gst_element_add_pad (GST_ELEMENT_CAST (self), ghost_pad);
+}
+
+static void
+gst_rg_volume_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRgVolume *self = GST_RG_VOLUME (object);
+
+ switch (prop_id) {
+ case PROP_ALBUM_MODE:
+ self->album_mode = g_value_get_boolean (value);
+ break;
+ case PROP_HEADROOM:
+ self->headroom = g_value_get_double (value);
+ break;
+ case PROP_PRE_AMP:
+ self->pre_amp = g_value_get_double (value);
+ break;
+ case PROP_FALLBACK_GAIN:
+ self->fallback_gain = g_value_get_double (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+
+ gst_rg_volume_update_gain (self);
+}
+
+static void
+gst_rg_volume_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRgVolume *self = GST_RG_VOLUME (object);
+
+ switch (prop_id) {
+ case PROP_ALBUM_MODE:
+ g_value_set_boolean (value, self->album_mode);
+ break;
+ case PROP_HEADROOM:
+ g_value_set_double (value, self->headroom);
+ break;
+ case PROP_PRE_AMP:
+ g_value_set_double (value, self->pre_amp);
+ break;
+ case PROP_FALLBACK_GAIN:
+ g_value_set_double (value, self->fallback_gain);
+ break;
+ case PROP_TARGET_GAIN:
+ g_value_set_double (value, self->target_gain);
+ break;
+ case PROP_RESULT_GAIN:
+ g_value_set_double (value, self->result_gain);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rg_volume_dispose (GObject * object)
+{
+ GstRgVolume *self = GST_RG_VOLUME (object);
+
+ if (self->volume_element != NULL) {
+ /* Manually remove our child using the bin implementation of remove_element.
+ * This is needed because we prevent gst_bin_remove from working, which the
+ * parent dispose handler would use if we had any children left. */
+ GST_BIN_CLASS (parent_class)->remove_element (GST_BIN_CAST (self),
+ self->volume_element);
+ self->volume_element = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static GstStateChangeReturn
+gst_rg_volume_change_state (GstElement * element, GstStateChange transition)
+{
+ GstRgVolume *self = GST_RG_VOLUME (element);
+ GstStateChangeReturn res;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+
+ if (G_UNLIKELY (self->volume_element == NULL)) {
+ /* Creating our child volume element in _init failed. */
+ return GST_STATE_CHANGE_FAILURE;
+ }
+ break;
+
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+
+ gst_rg_volume_reset (self);
+ break;
+
+ default:
+ break;
+ }
+
+ res = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ return res;
+}
+
+/* Event function for the ghost sink pad. */
+static gboolean
+gst_rg_volume_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstRgVolume *self;
+ GstEvent *send_event = event;
+ gboolean res;
+
+ self = GST_RG_VOLUME (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_TAG:
+
+ GST_LOG_OBJECT (self, "received tag event");
+
+ send_event = gst_rg_volume_tag_event (self, event);
+
+ if (send_event == NULL)
+ GST_LOG_OBJECT (self, "all tags handled, dropping event");
+
+ break;
+
+ case GST_EVENT_EOS:
+
+ gst_rg_volume_reset (self);
+ break;
+
+ default:
+ break;
+ }
+
+ if (G_LIKELY (send_event != NULL))
+ res = gst_pad_event_default (pad, parent, send_event);
+ else
+ res = TRUE;
+
+ return res;
+}
+
+static GstEvent *
+gst_rg_volume_tag_event (GstRgVolume * self, GstEvent * event)
+{
+ GstTagList *tag_list;
+ gboolean has_track_gain, has_track_peak, has_album_gain, has_album_peak;
+ gboolean has_ref_level;
+
+ g_return_val_if_fail (event != NULL, NULL);
+ g_return_val_if_fail (GST_EVENT_TYPE (event) == GST_EVENT_TAG, event);
+
+ gst_event_parse_tag (event, &tag_list);
+
+ if (gst_tag_list_is_empty (tag_list))
+ return event;
+
+ has_track_gain = gst_tag_list_get_double (tag_list, GST_TAG_TRACK_GAIN,
+ &self->track_gain);
+ has_track_peak = gst_tag_list_get_double (tag_list, GST_TAG_TRACK_PEAK,
+ &self->track_peak);
+ has_album_gain = gst_tag_list_get_double (tag_list, GST_TAG_ALBUM_GAIN,
+ &self->album_gain);
+ has_album_peak = gst_tag_list_get_double (tag_list, GST_TAG_ALBUM_PEAK,
+ &self->album_peak);
+ has_ref_level = gst_tag_list_get_double (tag_list, GST_TAG_REFERENCE_LEVEL,
+ &self->reference_level);
+
+ if (!has_track_gain && !has_track_peak && !has_album_gain && !has_album_peak)
+ return event;
+
+ if (has_ref_level && (has_track_gain || has_album_gain)
+ && (ABS (self->reference_level - RG_REFERENCE_LEVEL) > 1.e-6)) {
+ /* Log a message stating the amount of adjustment that is applied below. */
+ GST_DEBUG_OBJECT (self,
+ "compensating for reference level difference by %" GAIN_FORMAT,
+ RG_REFERENCE_LEVEL - self->reference_level);
+ }
+ if (has_track_gain) {
+ self->track_gain += RG_REFERENCE_LEVEL - self->reference_level;
+ }
+ if (has_album_gain) {
+ self->album_gain += RG_REFERENCE_LEVEL - self->reference_level;
+ }
+
+ /* Ignore values that are obviously invalid. */
+ if (G_UNLIKELY (has_track_gain && !VALID_GAIN (self->track_gain))) {
+ GST_DEBUG_OBJECT (self,
+ "ignoring bogus track gain value %" GAIN_FORMAT, self->track_gain);
+ has_track_gain = FALSE;
+ }
+ if (G_UNLIKELY (has_track_peak && !VALID_PEAK (self->track_peak))) {
+ GST_DEBUG_OBJECT (self,
+ "ignoring bogus track peak value %" PEAK_FORMAT, self->track_peak);
+ has_track_peak = FALSE;
+ }
+ if (G_UNLIKELY (has_album_gain && !VALID_GAIN (self->album_gain))) {
+ GST_DEBUG_OBJECT (self,
+ "ignoring bogus album gain value %" GAIN_FORMAT, self->album_gain);
+ has_album_gain = FALSE;
+ }
+ if (G_UNLIKELY (has_album_peak && !VALID_PEAK (self->album_peak))) {
+ GST_DEBUG_OBJECT (self,
+ "ignoring bogus album peak value %" PEAK_FORMAT, self->album_peak);
+ has_album_peak = FALSE;
+ }
+
+ /* Clamp peaks >1.0. Float based decoders can produce spurious samples >1.0,
+ * cutting these files back to 1.0 should not cause any audible distortion.
+ * This is most often seen with Vorbis files. */
+ if (has_track_peak && self->track_peak > 1.) {
+ GST_DEBUG_OBJECT (self,
+ "clamping track peak %" PEAK_FORMAT " to 1.0", self->track_peak);
+ self->track_peak = 1.0;
+ }
+ if (has_album_peak && self->album_peak > 1.) {
+ GST_DEBUG_OBJECT (self,
+ "clamping album peak %" PEAK_FORMAT " to 1.0", self->album_peak);
+ self->album_peak = 1.0;
+ }
+
+ self->has_track_gain |= has_track_gain;
+ self->has_track_peak |= has_track_peak;
+ self->has_album_gain |= has_album_gain;
+ self->has_album_peak |= has_album_peak;
+
+ tag_list = gst_tag_list_copy (tag_list);
+ gst_tag_list_remove_tag (tag_list, GST_TAG_TRACK_GAIN);
+ gst_tag_list_remove_tag (tag_list, GST_TAG_TRACK_PEAK);
+ gst_tag_list_remove_tag (tag_list, GST_TAG_ALBUM_GAIN);
+ gst_tag_list_remove_tag (tag_list, GST_TAG_ALBUM_PEAK);
+ gst_tag_list_remove_tag (tag_list, GST_TAG_REFERENCE_LEVEL);
+
+ gst_rg_volume_update_gain (self);
+
+ gst_event_unref (event);
+ if (gst_tag_list_is_empty (tag_list)) {
+ gst_tag_list_unref (tag_list);
+ return NULL;
+ }
+
+ return gst_event_new_tag (tag_list);
+}
+
+static void
+gst_rg_volume_reset (GstRgVolume * self)
+{
+ self->has_track_gain = FALSE;
+ self->has_track_peak = FALSE;
+ self->has_album_gain = FALSE;
+ self->has_album_peak = FALSE;
+
+ self->reference_level = RG_REFERENCE_LEVEL;
+
+ gst_rg_volume_update_gain (self);
+}
+
+static void
+gst_rg_volume_update_gain (GstRgVolume * self)
+{
+ gdouble target_gain, result_gain, result_volume;
+ gboolean target_gain_changed, result_gain_changed;
+
+ gst_rg_volume_determine_gain (self, &target_gain, &result_gain);
+
+ result_volume = DB_TO_LINEAR (result_gain);
+
+ /* Ensure that the result volume is within the range that the volume element
+ * can handle. Currently, the limit is 10. (+20 dB), which should not be
+ * restrictive. */
+ if (G_UNLIKELY (result_volume > self->max_volume)) {
+ GST_INFO_OBJECT (self,
+ "cannot handle result gain of %" GAIN_FORMAT " (%0.6f), adjusting",
+ result_gain, result_volume);
+
+ result_volume = self->max_volume;
+ result_gain = LINEAR_TO_DB (result_volume);
+ }
+
+ /* Direct comparison is OK in this case. */
+ if (target_gain == result_gain) {
+ GST_INFO_OBJECT (self,
+ "result gain is %" GAIN_FORMAT " (%0.6f), matching target",
+ result_gain, result_volume);
+ } else {
+ GST_INFO_OBJECT (self,
+ "result gain is %" GAIN_FORMAT " (%0.6f), target is %" GAIN_FORMAT,
+ result_gain, result_volume, target_gain);
+ }
+
+ target_gain_changed = (self->target_gain != target_gain);
+ result_gain_changed = (self->result_gain != result_gain);
+
+ self->target_gain = target_gain;
+ self->result_gain = result_gain;
+
+ g_object_set (self->volume_element, "volume", result_volume, NULL);
+
+ if (target_gain_changed)
+ g_object_notify ((GObject *) self, "target-gain");
+ if (result_gain_changed)
+ g_object_notify ((GObject *) self, "result-gain");
+}
+
+static inline void
+gst_rg_volume_determine_gain (GstRgVolume * self, gdouble * target_gain,
+ gdouble * result_gain)
+{
+ gdouble gain, peak;
+
+ if (!self->has_track_gain && !self->has_album_gain) {
+
+ GST_DEBUG_OBJECT (self, "using fallback gain");
+ gain = self->fallback_gain;
+ peak = 1.0;
+
+ } else if ((self->album_mode && self->has_album_gain)
+ || (!self->album_mode && !self->has_track_gain)) {
+
+ gain = self->album_gain;
+ if (G_LIKELY (self->has_album_peak)) {
+ peak = self->album_peak;
+ } else {
+ GST_DEBUG_OBJECT (self, "album peak missing, assuming 1.0");
+ peak = 1.0;
+ }
+ /* Falling back from track to album gain shouldn't really happen. */
+ if (G_UNLIKELY (!self->album_mode))
+ GST_INFO_OBJECT (self, "falling back to album gain");
+
+ } else {
+ /* !album_mode && !has_album_gain || album_mode && has_track_gain */
+
+ gain = self->track_gain;
+ if (G_LIKELY (self->has_track_peak)) {
+ peak = self->track_peak;
+ } else {
+ GST_DEBUG_OBJECT (self, "track peak missing, assuming 1.0");
+ peak = 1.0;
+ }
+ if (self->album_mode)
+ GST_INFO_OBJECT (self, "falling back to track gain");
+ }
+
+ gain += self->pre_amp;
+
+ *target_gain = gain;
+ *result_gain = gain;
+
+ if (LINEAR_TO_DB (peak) + gain > self->headroom) {
+ *result_gain = LINEAR_TO_DB (1. / peak) + self->headroom;
+ }
+}
diff --git a/gst/replaygain/gstrgvolume.h b/gst/replaygain/gstrgvolume.h
new file mode 100644
index 0000000000..f63da9d5ae
--- /dev/null
+++ b/gst/replaygain/gstrgvolume.h
@@ -0,0 +1,90 @@
+/* GStreamer ReplayGain volume adjustment
+ *
+ * Copyright (C) 2007 Rene Stadler <mail@renestadler.de>
+ *
+ * gstrgvolume.h: Element to apply ReplayGain volume adjustment
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public License
+ * as published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+ * 02110-1301 USA
+ */
+
+#ifndef __GST_RG_VOLUME_H__
+#define __GST_RG_VOLUME_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RG_VOLUME \
+ (gst_rg_volume_get_type())
+#define GST_RG_VOLUME(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RG_VOLUME,GstRgVolume))
+#define GST_RG_VOLUME_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RG_VOLUME,GstRgVolumeClass))
+#define GST_IS_RG_VOLUME(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RG_VOLUME))
+#define GST_IS_RG_VOLUME_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RG_VOLUME))
+
+typedef struct _GstRgVolume GstRgVolume;
+typedef struct _GstRgVolumeClass GstRgVolumeClass;
+
+/**
+ * GstRgVolume:
+ *
+ * Opaque data structure.
+ */
+struct _GstRgVolume
+{
+ GstBin bin;
+
+ /*< private >*/
+
+ GstElement *volume_element;
+ gdouble max_volume;
+
+ gboolean album_mode;
+ gdouble headroom;
+ gdouble pre_amp;
+ gdouble fallback_gain;
+
+ gdouble target_gain;
+ gdouble result_gain;
+
+ gdouble track_gain;
+ gdouble track_peak;
+ gdouble album_gain;
+ gdouble album_peak;
+
+ gboolean has_track_gain;
+ gboolean has_track_peak;
+ gboolean has_album_gain;
+ gboolean has_album_peak;
+
+ gdouble reference_level;
+};
+
+struct _GstRgVolumeClass
+{
+ GstBinClass parent_class;
+};
+
+GType gst_rg_volume_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (rgvolume);
+
+G_END_DECLS
+
+#endif /* __GST_RG_VOLUME_H__ */
diff --git a/gst/replaygain/meson.build b/gst/replaygain/meson.build
new file mode 100644
index 0000000000..79eb002af5
--- /dev/null
+++ b/gst/replaygain/meson.build
@@ -0,0 +1,19 @@
+replaygain_sources = [
+ 'gstrganalysis.c',
+ 'gstrglimiter.c',
+ 'gstrgvolume.c',
+ 'replaygain.c',
+ 'rganalysis.c',
+]
+
+gstreplaygain = library('gstreplaygain',
+ replaygain_sources,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gst_dep, gstbase_dep, gstpbutils_dep, gstaudio_dep, libm],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstreplaygain, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstreplaygain]
+
diff --git a/gst/replaygain/replaygain.c b/gst/replaygain/replaygain.c
new file mode 100644
index 0000000000..481544229f
--- /dev/null
+++ b/gst/replaygain/replaygain.c
@@ -0,0 +1,47 @@
+/* GStreamer ReplayGain plugin
+ *
+ * Copyright (C) 2006 Rene Stadler <mail@renestadler.de>
+ *
+ * replaygain.c: Plugin providing ReplayGain related elements
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public License
+ * as published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+ * 02110-1301 USA
+ */
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include <gst/gst.h>
+
+#include "gstrganalysis.h"
+#include "gstrglimiter.h"
+#include "gstrgvolume.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (rganalysis, plugin);
+ ret |= GST_ELEMENT_REGISTER (rglimiter, plugin);
+ ret |= GST_ELEMENT_REGISTER (rgvolume, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, GST_VERSION_MINOR, replaygain,
+ "ReplayGain volume normalization", plugin_init, VERSION, GST_LICENSE,
+ GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
diff --git a/gst/replaygain/replaygain.h b/gst/replaygain/replaygain.h
new file mode 100644
index 0000000000..b7c5938112
--- /dev/null
+++ b/gst/replaygain/replaygain.h
@@ -0,0 +1,36 @@
+/* GStreamer ReplayGain plugin
+ *
+ * Copyright (C) 2006 Rene Stadler <mail@renestadler.de>
+ *
+ * replaygain.h: Plugin providing ReplayGain related elements
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public License
+ * as published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+ * 02110-1301 USA
+ */
+
+#ifndef __REPLAYGAIN_H__
+#define __REPLAYGAIN_H__
+
+G_BEGIN_DECLS
+
+/* Reference level (in dBSPL). The 2001 proposal specifies 83. This was
+ * changed later in all implementations to 89, which is the new, official value:
+ * David Robinson acknowledged the change but didn't update the website yet. */
+
+#define RG_REFERENCE_LEVEL 89.
+
+G_END_DECLS
+
+#endif /* __REPLAYGAIN_H__ */
diff --git a/gst/replaygain/rganalysis.c b/gst/replaygain/rganalysis.c
new file mode 100644
index 0000000000..304037699f
--- /dev/null
+++ b/gst/replaygain/rganalysis.c
@@ -0,0 +1,824 @@
+/* GStreamer ReplayGain analysis
+ *
+ * Copyright (C) 2006 Rene Stadler <mail@renestadler.de>
+ * Copyright (C) 2001 David Robinson <David@Robinson.org>
+ * Glen Sawyer <glensawyer@hotmail.com>
+ *
+ * rganalysis.c: Analyze raw audio data in accordance with ReplayGain
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public License
+ * as published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+ * 02110-1301 USA
+ */
+
+/* Based on code with Copyright (C) 2001 David Robinson
+ * <David@Robinson.org> and Glen Sawyer <glensawyer@hotmail.com>,
+ * which is distributed under the LGPL as part of the vorbisgain
+ * program. The original code also mentions Frank Klemm
+ * (http://www.uni-jena.de/~pfk/mpp/) for having contributed lots of
+ * good code. Specifically, this is based on the file
+ * "gain_analysis.c" from vorbisgain version 0.34.
+ */
+
+/* Room for future improvement: Mono data is currently in fact copied
+ * to two channels which get processed normally. This means that mono
+ * input data is processed twice.
+ */
+
+/* Helpful information for understanding this code: The two IIR
+ * filters depend on previous input _and_ previous output samples (up
+ * to the filter's order number of samples). This explains the whole
+ * lot of memcpy'ing done in rg_analysis_analyze and why the context
+ * holds so many buffers.
+ */
+
+#include <math.h>
+#include <string.h>
+#include <glib.h>
+
+#include "rganalysis.h"
+
+#define YULE_ORDER 10
+#define BUTTER_ORDER 2
+/* Percentile which is louder than the proposed level: */
+#define RMS_PERCENTILE 95
+/* Duration of RMS window in milliseconds: */
+#define RMS_WINDOW_MSECS 50
+/* Histogram array elements per dB: */
+#define STEPS_PER_DB 100
+/* Histogram upper bound in dB (normal max. values in the wild are
+ * assumed to be around 70, 80 dB): */
+#define MAX_DB 120
+/* Calibration value: */
+#define PINK_REF 64.82 /* 298640883795 */
+
+#define MAX_ORDER MAX (BUTTER_ORDER, YULE_ORDER)
+#define MAX_SAMPLE_RATE 48000
+/* The + 999 has the effect of ceil()ing: */
+#define MAX_SAMPLE_WINDOW (guint) \
+ ((MAX_SAMPLE_RATE * RMS_WINDOW_MSECS + 999) / 1000)
+
+/* Analysis result accumulator. */
+
+struct _RgAnalysisAcc
+{
+ guint32 histogram[STEPS_PER_DB * MAX_DB];
+ gdouble peak;
+};
+
+typedef struct _RgAnalysisAcc RgAnalysisAcc;
+
+/* Analysis context. */
+
+struct _RgAnalysisCtx
+{
+ /* Filter buffers for left channel. */
+ gfloat inprebuf_l[MAX_ORDER * 2];
+ gfloat *inpre_l;
+ gfloat stepbuf_l[MAX_SAMPLE_WINDOW + MAX_ORDER];
+ gfloat *step_l;
+ gfloat outbuf_l[MAX_SAMPLE_WINDOW + MAX_ORDER];
+ gfloat *out_l;
+ /* Filter buffers for right channel. */
+ gfloat inprebuf_r[MAX_ORDER * 2];
+ gfloat *inpre_r;
+ gfloat stepbuf_r[MAX_SAMPLE_WINDOW + MAX_ORDER];
+ gfloat *step_r;
+ gfloat outbuf_r[MAX_SAMPLE_WINDOW + MAX_ORDER];
+ gfloat *out_r;
+
+ /* Number of samples to reach duration of the RMS window: */
+ guint window_n_samples;
+ /* Progress of the running window: */
+ guint window_n_samples_done;
+ gdouble window_square_sum;
+
+ gint sample_rate;
+ gint sample_rate_index;
+
+ RgAnalysisAcc track;
+ RgAnalysisAcc album;
+ void (*post_message) (gpointer analysis,
+ GstClockTime timestamp, GstClockTime duration, gdouble rglevel);
+ gpointer analysis;
+ /* The timestamp of the current incoming buffer. */
+ GstClockTime buffer_timestamp;
+ /* Number of samples processed in current buffer, during emit_signal,
+ this will always be on an RMS window boundary. */
+ guint buffer_n_samples_done;
+};
+
+/* Filter coefficients for the IIR filters that form the equal
+ * loudness filter. XFilter[ctx->sample_rate_index] gives the array
+ * of the X coefficients (A or B) for the configured sample rate. */
+
+#ifdef _MSC_VER
+/* Disable double-to-float warning: */
+/* A better solution would be to append 'f' to each constant, but that
+ * makes the code ugly. */
+#pragma warning ( disable : 4305 )
+#endif
+
+static const gfloat AYule[9][11] = {
+ {1., -3.84664617118067, 7.81501653005538, -11.34170355132042,
+ 13.05504219327545, -12.28759895145294, 9.48293806319790,
+ -5.87257861775999, 2.75465861874613, -0.86984376593551,
+ 0.13919314567432},
+ {1., -3.47845948550071, 6.36317777566148, -8.54751527471874, 9.47693607801280,
+ -8.81498681370155, 6.85401540936998, -4.39470996079559,
+ 2.19611684890774, -0.75104302451432, 0.13149317958808},
+ {1., -2.37898834973084, 2.84868151156327, -2.64577170229825, 2.23697657451713,
+ -1.67148153367602, 1.00595954808547, -0.45953458054983,
+ 0.16378164858596, -0.05032077717131, 0.02347897407020},
+ {1., -1.61273165137247, 1.07977492259970, -0.25656257754070,
+ -0.16276719120440, -0.22638893773906, 0.39120800788284,
+ -0.22138138954925, 0.04500235387352, 0.02005851806501,
+ 0.00302439095741},
+ {1., -1.49858979367799, 0.87350271418188, 0.12205022308084, -0.80774944671438,
+ 0.47854794562326, -0.12453458140019, -0.04067510197014,
+ 0.08333755284107, -0.04237348025746, 0.02977207319925},
+ {1., -0.62820619233671, 0.29661783706366, -0.37256372942400, 0.00213767857124,
+ -0.42029820170918, 0.22199650564824, 0.00613424350682, 0.06747620744683,
+ 0.05784820375801, 0.03222754072173},
+ {1., -1.04800335126349, 0.29156311971249, -0.26806001042947, 0.00819999645858,
+ 0.45054734505008, -0.33032403314006, 0.06739368333110,
+ -0.04784254229033, 0.01639907836189, 0.01807364323573},
+ {1., -0.51035327095184, -0.31863563325245, -0.20256413484477,
+ 0.14728154134330, 0.38952639978999, -0.23313271880868,
+ -0.05246019024463, -0.02505961724053, 0.02442357316099,
+ 0.01818801111503},
+ {1., -0.25049871956020, -0.43193942311114, -0.03424681017675,
+ -0.04678328784242, 0.26408300200955, 0.15113130533216,
+ -0.17556493366449, -0.18823009262115, 0.05477720428674,
+ 0.04704409688120}
+};
+
+static const gfloat BYule[9][11] = {
+ {0.03857599435200, -0.02160367184185, -0.00123395316851, -0.00009291677959,
+ -0.01655260341619, 0.02161526843274, -0.02074045215285,
+ 0.00594298065125, 0.00306428023191, 0.00012025322027, 0.00288463683916},
+ {0.05418656406430, -0.02911007808948, -0.00848709379851, -0.00851165645469,
+ -0.00834990904936, 0.02245293253339, -0.02596338512915,
+ 0.01624864962975, -0.00240879051584, 0.00674613682247,
+ -0.00187763777362},
+ {0.15457299681924, -0.09331049056315, -0.06247880153653, 0.02163541888798,
+ -0.05588393329856, 0.04781476674921, 0.00222312597743, 0.03174092540049,
+ -0.01390589421898, 0.00651420667831, -0.00881362733839},
+ {0.30296907319327, -0.22613988682123, -0.08587323730772, 0.03282930172664,
+ -0.00915702933434, -0.02364141202522, -0.00584456039913,
+ 0.06276101321749, -0.00000828086748, 0.00205861885564,
+ -0.02950134983287},
+ {0.33642304856132, -0.25572241425570, -0.11828570177555, 0.11921148675203,
+ -0.07834489609479, -0.00469977914380, -0.00589500224440,
+ 0.05724228140351, 0.00832043980773, -0.01635381384540,
+ -0.01760176568150},
+ {0.44915256608450, -0.14351757464547, -0.22784394429749, -0.01419140100551,
+ 0.04078262797139, -0.12398163381748, 0.04097565135648, 0.10478503600251,
+ -0.01863887810927, -0.03193428438915, 0.00541907748707},
+ {0.56619470757641, -0.75464456939302, 0.16242137742230, 0.16744243493672,
+ -0.18901604199609, 0.30931782841830, -0.27562961986224,
+ 0.00647310677246, 0.08647503780351, -0.03788984554840,
+ -0.00588215443421},
+ {0.58100494960553, -0.53174909058578, -0.14289799034253, 0.17520704835522,
+ 0.02377945217615, 0.15558449135573, -0.25344790059353, 0.01628462406333,
+ 0.06920467763959, -0.03721611395801, -0.00749618797172},
+ {0.53648789255105, -0.42163034350696, -0.00275953611929, 0.04267842219415,
+ -0.10214864179676, 0.14590772289388, -0.02459864859345,
+ -0.11202315195388, -0.04060034127000, 0.04788665548180,
+ -0.02217936801134}
+};
+
+static const gfloat AButter[9][3] = {
+ {1., -1.97223372919527, 0.97261396931306},
+ {1., -1.96977855582618, 0.97022847566350},
+ {1., -1.95835380975398, 0.95920349965459},
+ {1., -1.95002759149878, 0.95124613669835},
+ {1., -1.94561023566527, 0.94705070426118},
+ {1., -1.92783286977036, 0.93034775234268},
+ {1., -1.91858953033784, 0.92177618768381},
+ {1., -1.91542108074780, 0.91885558323625},
+ {1., -1.88903307939452, 0.89487434461664}
+};
+
+static const gfloat BButter[9][3] = {
+ {0.98621192462708, -1.97242384925416, 0.98621192462708},
+ {0.98500175787242, -1.97000351574484, 0.98500175787242},
+ {0.97938932735214, -1.95877865470428, 0.97938932735214},
+ {0.97531843204928, -1.95063686409857, 0.97531843204928},
+ {0.97316523498161, -1.94633046996323, 0.97316523498161},
+ {0.96454515552826, -1.92909031105652, 0.96454515552826},
+ {0.96009142950541, -1.92018285901082, 0.96009142950541},
+ {0.95856916599601, -1.91713833199203, 0.95856916599601},
+ {0.94597685600279, -1.89195371200558, 0.94597685600279}
+};
+
+#ifdef _MSC_VER
+#pragma warning ( default : 4305 )
+#endif
+
+/* Filter functions. These access elements with negative indices of
+ * the input and output arrays (up to the filter's order). */
+
+/* For much better performance, the function below has been
+ * implemented by unrolling the inner loop for our two use cases. */
+
+/*
+ * static inline void
+ * apply_filter (const gfloat * input, gfloat * output, guint n_samples,
+ * const gfloat * a, const gfloat * b, guint order)
+ * {
+ * gfloat y;
+ * gint i, k;
+ *
+ * for (i = 0; i < n_samples; i++) {
+ * y = input[i] * b[0];
+ * for (k = 1; k <= order; k++)
+ * y += input[i - k] * b[k] - output[i - k] * a[k];
+ * output[i] = y;
+ * }
+ * }
+ */
+
+static inline void
+yule_filter (const gfloat * input, gfloat * output,
+ const gfloat * a, const gfloat * b)
+{
+ /* 1e-10 is added below to avoid running into denormals when operating on
+ * near silence. */
+
+ output[0] = 1e-10 + input[0] * b[0]
+ + input[-1] * b[1] - output[-1] * a[1]
+ + input[-2] * b[2] - output[-2] * a[2]
+ + input[-3] * b[3] - output[-3] * a[3]
+ + input[-4] * b[4] - output[-4] * a[4]
+ + input[-5] * b[5] - output[-5] * a[5]
+ + input[-6] * b[6] - output[-6] * a[6]
+ + input[-7] * b[7] - output[-7] * a[7]
+ + input[-8] * b[8] - output[-8] * a[8]
+ + input[-9] * b[9] - output[-9] * a[9]
+ + input[-10] * b[10] - output[-10] * a[10];
+}
+
+static inline void
+butter_filter (const gfloat * input, gfloat * output,
+ const gfloat * a, const gfloat * b)
+{
+ output[0] = input[0] * b[0]
+ + input[-1] * b[1] - output[-1] * a[1]
+ + input[-2] * b[2] - output[-2] * a[2];
+}
+
+/* Because butter_filter and yule_filter are inlined, this function is
+ * a bit blown-up (code-size wise), but not inlining gives a ca. 40%
+ * performance penalty. */
+
+static inline void
+apply_filters (const RgAnalysisCtx * ctx, const gfloat * input_l,
+ const gfloat * input_r, guint n_samples)
+{
+ const gfloat *ayule = AYule[ctx->sample_rate_index];
+ const gfloat *byule = BYule[ctx->sample_rate_index];
+ const gfloat *abutter = AButter[ctx->sample_rate_index];
+ const gfloat *bbutter = BButter[ctx->sample_rate_index];
+ gint pos = ctx->window_n_samples_done;
+ gint i;
+
+ for (i = 0; i < n_samples; i++, pos++) {
+ yule_filter (input_l + i, ctx->step_l + pos, ayule, byule);
+ butter_filter (ctx->step_l + pos, ctx->out_l + pos, abutter, bbutter);
+
+ yule_filter (input_r + i, ctx->step_r + pos, ayule, byule);
+ butter_filter (ctx->step_r + pos, ctx->out_r + pos, abutter, bbutter);
+ }
+}
+
+/* Clear filter buffer state and current RMS window. */
+
+static void
+reset_filters (RgAnalysisCtx * ctx)
+{
+ gint i;
+
+ for (i = 0; i < MAX_ORDER; i++) {
+
+ ctx->inprebuf_l[i] = 0.;
+ ctx->stepbuf_l[i] = 0.;
+ ctx->outbuf_l[i] = 0.;
+
+ ctx->inprebuf_r[i] = 0.;
+ ctx->stepbuf_r[i] = 0.;
+ ctx->outbuf_r[i] = 0.;
+ }
+
+ ctx->window_square_sum = 0.;
+ ctx->window_n_samples_done = 0;
+}
+
+/* Accumulator functions. */
+
+/* Add two accumulators in-place. The sum is defined as the result of
+ * the vector sum of the histogram array and the maximum value of the
+ * peak field. Thus "adding" the accumulators for all tracks yields
+ * the correct result for obtaining the album gain and peak. */
+
+static void
+accumulator_add (RgAnalysisAcc * acc, const RgAnalysisAcc * acc_other)
+{
+ gint i;
+
+ for (i = 0; i < G_N_ELEMENTS (acc->histogram); i++)
+ acc->histogram[i] += acc_other->histogram[i];
+
+ acc->peak = MAX (acc->peak, acc_other->peak);
+}
+
+/* Reset an accumulator to zero. */
+
+static void
+accumulator_clear (RgAnalysisAcc * acc)
+{
+ memset (acc->histogram, 0, sizeof (acc->histogram));
+ acc->peak = 0.;
+}
+
+/* Obtain final analysis result from an accumulator. Returns TRUE on
+ * success, FALSE on error (if accumulator is still zero). */
+
+static gboolean
+accumulator_result (const RgAnalysisAcc * acc, gdouble * result_gain,
+ gdouble * result_peak)
+{
+ guint32 sum = 0;
+ guint32 upper;
+ guint i;
+
+ for (i = 0; i < G_N_ELEMENTS (acc->histogram); i++)
+ sum += acc->histogram[i];
+
+ if (sum == 0)
+ /* All entries are 0: We got less than 50ms of data. */
+ return FALSE;
+
+ upper = (guint32) ceil (sum * (1. - (gdouble) (RMS_PERCENTILE / 100.)));
+
+ for (i = G_N_ELEMENTS (acc->histogram); i--;) {
+ if (upper <= acc->histogram[i])
+ break;
+ upper -= acc->histogram[i];
+ }
+
+ if (result_peak != NULL)
+ *result_peak = acc->peak;
+ if (result_gain != NULL)
+ *result_gain = PINK_REF - (gdouble) i / STEPS_PER_DB;
+
+ return TRUE;
+}
+
+/* Functions that operate on contexts, for external usage. */
+
+/* Create a new context. Before it can be used, a sample rate must be
+ * configured using rg_analysis_set_sample_rate. */
+
+RgAnalysisCtx *
+rg_analysis_new (void)
+{
+ RgAnalysisCtx *ctx;
+
+ ctx = g_new (RgAnalysisCtx, 1);
+
+ ctx->inpre_l = ctx->inprebuf_l + MAX_ORDER;
+ ctx->step_l = ctx->stepbuf_l + MAX_ORDER;
+ ctx->out_l = ctx->outbuf_l + MAX_ORDER;
+
+ ctx->inpre_r = ctx->inprebuf_r + MAX_ORDER;
+ ctx->step_r = ctx->stepbuf_r + MAX_ORDER;
+ ctx->out_r = ctx->outbuf_r + MAX_ORDER;
+
+ ctx->sample_rate = 0;
+
+ accumulator_clear (&ctx->track);
+ accumulator_clear (&ctx->album);
+
+ return ctx;
+}
+
+static void
+reset_silence_detection (RgAnalysisCtx * ctx)
+{
+ ctx->buffer_timestamp = GST_CLOCK_TIME_NONE;
+ ctx->buffer_n_samples_done = 0;
+}
+
+/* Adapt to given sample rate. Does nothing if already the current
+ * rate (returns TRUE then). Returns FALSE only if given sample rate
+ * is not supported. If the configured rate changes, the last
+ * unprocessed incomplete 50ms chunk of data is dropped because the
+ * filters are reset. */
+
+gboolean
+rg_analysis_set_sample_rate (RgAnalysisCtx * ctx, gint sample_rate)
+{
+ g_return_val_if_fail (ctx != NULL, FALSE);
+
+ if (ctx->sample_rate == sample_rate)
+ return TRUE;
+
+ switch (sample_rate) {
+ case 48000:
+ ctx->sample_rate_index = 0;
+ break;
+ case 44100:
+ ctx->sample_rate_index = 1;
+ break;
+ case 32000:
+ ctx->sample_rate_index = 2;
+ break;
+ case 24000:
+ ctx->sample_rate_index = 3;
+ break;
+ case 22050:
+ ctx->sample_rate_index = 4;
+ break;
+ case 16000:
+ ctx->sample_rate_index = 5;
+ break;
+ case 12000:
+ ctx->sample_rate_index = 6;
+ break;
+ case 11025:
+ ctx->sample_rate_index = 7;
+ break;
+ case 8000:
+ ctx->sample_rate_index = 8;
+ break;
+ default:
+ return FALSE;
+ }
+
+ ctx->sample_rate = sample_rate;
+ /* The + 999 has the effect of ceil()ing: */
+ ctx->window_n_samples = (guint) ((sample_rate * RMS_WINDOW_MSECS + 999)
+ / 1000);
+
+ reset_filters (ctx);
+ reset_silence_detection (ctx);
+
+ return TRUE;
+}
+
+void
+rg_analysis_init_silence_detection (RgAnalysisCtx * ctx,
+ void (*post_message) (gpointer analysis, GstClockTime timestamp,
+ GstClockTime duration, gdouble rglevel), gpointer analysis)
+{
+ ctx->post_message = post_message;
+ ctx->analysis = analysis;
+ reset_silence_detection (ctx);
+}
+
+void
+rg_analysis_start_buffer (RgAnalysisCtx * ctx, GstClockTime buffer_timestamp)
+{
+ ctx->buffer_timestamp = buffer_timestamp;
+ ctx->buffer_n_samples_done = 0;
+}
+
+void
+rg_analysis_destroy (RgAnalysisCtx * ctx)
+{
+ g_free (ctx);
+}
+
+/* Entry points for analyzing sample data in common raw data formats.
+ * The stereo format functions expect interleaved frames. It is
+ * possible to pass data in different formats for the same context,
+ * there are no restrictions. All functions have the same signature;
+ * the depth argument for the float functions is not variable and must
+ * be given the value 32. */
+
+void
+rg_analysis_analyze_mono_float (RgAnalysisCtx * ctx, gconstpointer data,
+ gsize size, guint depth)
+{
+ gfloat conv_samples[512];
+ const gfloat *samples = (gfloat *) data;
+ guint n_samples = size / sizeof (gfloat);
+ gint i;
+
+ g_return_if_fail (depth == 32);
+ g_return_if_fail (size % sizeof (gfloat) == 0);
+
+ while (n_samples) {
+ gint n = MIN (n_samples, G_N_ELEMENTS (conv_samples));
+
+ n_samples -= n;
+ memcpy (conv_samples, samples, n * sizeof (gfloat));
+ for (i = 0; i < n; i++) {
+ ctx->track.peak = MAX (ctx->track.peak, fabs (conv_samples[i]));
+ conv_samples[i] *= 32768.;
+ }
+ samples += n;
+ rg_analysis_analyze (ctx, conv_samples, NULL, n);
+ }
+}
+
+void
+rg_analysis_analyze_stereo_float (RgAnalysisCtx * ctx, gconstpointer data,
+ gsize size, guint depth)
+{
+ gfloat conv_samples_l[256];
+ gfloat conv_samples_r[256];
+ const gfloat *samples = (gfloat *) data;
+ guint n_frames = size / (sizeof (gfloat) * 2);
+ gint i;
+
+ g_return_if_fail (depth == 32);
+ g_return_if_fail (size % (sizeof (gfloat) * 2) == 0);
+
+ while (n_frames) {
+ gint n = MIN (n_frames, G_N_ELEMENTS (conv_samples_l));
+
+ n_frames -= n;
+ for (i = 0; i < n; i++) {
+ gfloat old_sample;
+
+ old_sample = samples[2 * i];
+ ctx->track.peak = MAX (ctx->track.peak, fabs (old_sample));
+ conv_samples_l[i] = old_sample * 32768.;
+
+ old_sample = samples[2 * i + 1];
+ ctx->track.peak = MAX (ctx->track.peak, fabs (old_sample));
+ conv_samples_r[i] = old_sample * 32768.;
+ }
+ samples += 2 * n;
+ rg_analysis_analyze (ctx, conv_samples_l, conv_samples_r, n);
+ }
+}
+
+void
+rg_analysis_analyze_mono_int16 (RgAnalysisCtx * ctx, gconstpointer data,
+ gsize size, guint depth)
+{
+ gfloat conv_samples[512];
+ gint32 peak_sample = 0;
+ const gint16 *samples = (gint16 *) data;
+ guint n_samples = size / sizeof (gint16);
+ gint shift = 1 << (sizeof (gint16) * 8 - depth);
+ gint i;
+
+ g_return_if_fail (depth <= (sizeof (gint16) * 8));
+ g_return_if_fail (size % sizeof (gint16) == 0);
+
+ while (n_samples) {
+ gint n = MIN (n_samples, G_N_ELEMENTS (conv_samples));
+
+ n_samples -= n;
+ for (i = 0; i < n; i++) {
+ gint16 old_sample = samples[i] * shift;
+
+ peak_sample = MAX (peak_sample, ABS ((gint32) old_sample));
+ conv_samples[i] = (gfloat) old_sample;
+ }
+ samples += n;
+ rg_analysis_analyze (ctx, conv_samples, NULL, n);
+ }
+ ctx->track.peak = MAX (ctx->track.peak,
+ (gdouble) peak_sample / ((gdouble) (1u << 15)));
+}
+
+void
+rg_analysis_analyze_stereo_int16 (RgAnalysisCtx * ctx, gconstpointer data,
+ gsize size, guint depth)
+{
+ gfloat conv_samples_l[256];
+ gfloat conv_samples_r[256];
+ gint32 peak_sample = 0;
+ const gint16 *samples = (gint16 *) data;
+ guint n_frames = size / (sizeof (gint16) * 2);
+ gint shift = 1 << (sizeof (gint16) * 8 - depth);
+ gint i;
+
+ g_return_if_fail (depth <= (sizeof (gint16) * 8));
+ g_return_if_fail (size % (sizeof (gint16) * 2) == 0);
+
+ while (n_frames) {
+ gint n = MIN (n_frames, G_N_ELEMENTS (conv_samples_l));
+
+ n_frames -= n;
+ for (i = 0; i < n; i++) {
+ gint16 old_sample;
+
+ old_sample = samples[2 * i] * shift;
+ peak_sample = MAX (peak_sample, ABS ((gint32) old_sample));
+ conv_samples_l[i] = (gfloat) old_sample;
+
+ old_sample = samples[2 * i + 1] * shift;
+ peak_sample = MAX (peak_sample, ABS ((gint32) old_sample));
+ conv_samples_r[i] = (gfloat) old_sample;
+ }
+ samples += 2 * n;
+ rg_analysis_analyze (ctx, conv_samples_l, conv_samples_r, n);
+ }
+ ctx->track.peak = MAX (ctx->track.peak,
+ (gdouble) peak_sample / ((gdouble) (1u << 15)));
+}
+
+/* Analyze the given chunk of samples. The sample data is given in
+ * floating point format but should be scaled such that the values
+ * +/-32768.0 correspond to the -0dBFS reference amplitude.
+ *
+ * samples_l: Buffer with sample data for the left channel or of the
+ * mono channel.
+ *
+ * samples_r: Buffer with sample data for the right channel or NULL
+ * for mono.
+ *
+ * n_samples: Number of samples passed in each buffer.
+ */
+
+void
+rg_analysis_analyze (RgAnalysisCtx * ctx, const gfloat * samples_l,
+ const gfloat * samples_r, guint n_samples)
+{
+ const gfloat *input_l, *input_r;
+ guint n_samples_done;
+ gint i;
+
+ g_return_if_fail (ctx != NULL);
+ g_return_if_fail (samples_l != NULL);
+ g_return_if_fail (ctx->sample_rate != 0);
+
+ if (n_samples == 0)
+ return;
+
+ if (samples_r == NULL)
+ /* Mono. */
+ samples_r = samples_l;
+
+ memcpy (ctx->inpre_l, samples_l,
+ MIN (n_samples, MAX_ORDER) * sizeof (gfloat));
+ memcpy (ctx->inpre_r, samples_r,
+ MIN (n_samples, MAX_ORDER) * sizeof (gfloat));
+
+ n_samples_done = 0;
+ while (n_samples_done < n_samples) {
+ /* Limit number of samples to be processed in this iteration to
+ * the number needed to complete the next window: */
+ guint n_samples_current = MIN (n_samples - n_samples_done,
+ ctx->window_n_samples - ctx->window_n_samples_done);
+
+ if (n_samples_done < MAX_ORDER) {
+ input_l = ctx->inpre_l + n_samples_done;
+ input_r = ctx->inpre_r + n_samples_done;
+ n_samples_current = MIN (n_samples_current, MAX_ORDER - n_samples_done);
+ } else {
+ input_l = samples_l + n_samples_done;
+ input_r = samples_r + n_samples_done;
+ }
+
+ apply_filters (ctx, input_l, input_r, n_samples_current);
+
+ /* Update the square sum. */
+ for (i = 0; i < n_samples_current; i++)
+ ctx->window_square_sum += ctx->out_l[ctx->window_n_samples_done + i]
+ * ctx->out_l[ctx->window_n_samples_done + i]
+ + ctx->out_r[ctx->window_n_samples_done + i]
+ * ctx->out_r[ctx->window_n_samples_done + i];
+
+ ctx->window_n_samples_done += n_samples_current;
+ ctx->buffer_n_samples_done += n_samples_current;
+
+ g_return_if_fail (ctx->window_n_samples_done <= ctx->window_n_samples);
+
+ if (ctx->window_n_samples_done == ctx->window_n_samples) {
+ /* Get the Root Mean Square (RMS) for this set of samples. */
+ gdouble val = STEPS_PER_DB * 10. * log10 (ctx->window_square_sum /
+ ctx->window_n_samples * 0.5 + 1.e-37);
+ gint ival = CLAMP ((gint) val, 0,
+ (gint) G_N_ELEMENTS (ctx->track.histogram) - 1);
+ /* Compute the per-window gain */
+ const gdouble gain = PINK_REF - (gdouble) ival / STEPS_PER_DB;
+ const GstClockTime timestamp = ctx->buffer_timestamp
+ + gst_util_uint64_scale_int_ceil (GST_SECOND,
+ ctx->buffer_n_samples_done,
+ ctx->sample_rate)
+ - RMS_WINDOW_MSECS * GST_MSECOND;
+
+ ctx->post_message (ctx->analysis, timestamp,
+ RMS_WINDOW_MSECS * GST_MSECOND, -gain);
+
+
+ ctx->track.histogram[ival]++;
+ ctx->window_square_sum = 0.;
+ ctx->window_n_samples_done = 0;
+
+ /* No need for memmove here, the areas never overlap: Even for
+ * the smallest sample rate, the number of samples needed for
+ * the window is greater than MAX_ORDER. */
+
+ memcpy (ctx->stepbuf_l, ctx->stepbuf_l + ctx->window_n_samples,
+ MAX_ORDER * sizeof (gfloat));
+ memcpy (ctx->outbuf_l, ctx->outbuf_l + ctx->window_n_samples,
+ MAX_ORDER * sizeof (gfloat));
+
+ memcpy (ctx->stepbuf_r, ctx->stepbuf_r + ctx->window_n_samples,
+ MAX_ORDER * sizeof (gfloat));
+ memcpy (ctx->outbuf_r, ctx->outbuf_r + ctx->window_n_samples,
+ MAX_ORDER * sizeof (gfloat));
+ }
+
+ n_samples_done += n_samples_current;
+ }
+
+ if (n_samples >= MAX_ORDER) {
+
+ memcpy (ctx->inprebuf_l, samples_l + n_samples - MAX_ORDER,
+ MAX_ORDER * sizeof (gfloat));
+
+ memcpy (ctx->inprebuf_r, samples_r + n_samples - MAX_ORDER,
+ MAX_ORDER * sizeof (gfloat));
+
+ } else {
+
+ memmove (ctx->inprebuf_l, ctx->inprebuf_l + n_samples,
+ (MAX_ORDER - n_samples) * sizeof (gfloat));
+ memcpy (ctx->inprebuf_l + MAX_ORDER - n_samples, samples_l,
+ n_samples * sizeof (gfloat));
+
+ memmove (ctx->inprebuf_r, ctx->inprebuf_r + n_samples,
+ (MAX_ORDER - n_samples) * sizeof (gfloat));
+ memcpy (ctx->inprebuf_r + MAX_ORDER - n_samples, samples_r,
+ n_samples * sizeof (gfloat));
+
+ }
+}
+
+/* Obtain track gain and peak. Returns TRUE on success. Can fail if
+ * not enough samples have been processed. Updates album accumulator.
+ * Resets track accumulator. */
+
+gboolean
+rg_analysis_track_result (RgAnalysisCtx * ctx, gdouble * gain, gdouble * peak)
+{
+ gboolean result;
+
+ g_return_val_if_fail (ctx != NULL, FALSE);
+
+ accumulator_add (&ctx->album, &ctx->track);
+ result = accumulator_result (&ctx->track, gain, peak);
+ accumulator_clear (&ctx->track);
+
+ reset_filters (ctx);
+ reset_silence_detection (ctx);
+
+ return result;
+}
+
+/* Obtain album gain and peak. Returns TRUE on success. Can fail if
+ * not enough samples have been processed. Resets album
+ * accumulator. */
+
+gboolean
+rg_analysis_album_result (RgAnalysisCtx * ctx, gdouble * gain, gdouble * peak)
+{
+ gboolean result;
+
+ g_return_val_if_fail (ctx != NULL, FALSE);
+
+ result = accumulator_result (&ctx->album, gain, peak);
+ accumulator_clear (&ctx->album);
+
+ return result;
+}
+
+void
+rg_analysis_reset_album (RgAnalysisCtx * ctx)
+{
+ accumulator_clear (&ctx->album);
+}
+
+/* Reset internal buffers as well as track and album accumulators.
+ * Configured sample rate is kept intact. */
+
+void
+rg_analysis_reset (RgAnalysisCtx * ctx)
+{
+ g_return_if_fail (ctx != NULL);
+
+ reset_filters (ctx);
+ accumulator_clear (&ctx->track);
+ accumulator_clear (&ctx->album);
+ reset_silence_detection (ctx);
+}
diff --git a/gst/replaygain/rganalysis.h b/gst/replaygain/rganalysis.h
new file mode 100644
index 0000000000..67e915017b
--- /dev/null
+++ b/gst/replaygain/rganalysis.h
@@ -0,0 +1,65 @@
+/* GStreamer ReplayGain analysis
+ *
+ * Copyright (C) 2006 Rene Stadler <mail@renestadler.de>
+ * Copyright (C) 2001 David Robinson <David@Robinson.org>
+ * Glen Sawyer <glensawyer@hotmail.com>
+ *
+ * rganalysis.h: Analyze raw audio data in accordance with ReplayGain
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public License
+ * as published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful, but
+ * WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
+ * 02110-1301 USA
+ */
+
+#ifndef __RG_ANALYSIS_H__
+#define __RG_ANALYSIS_H__
+
+#include <glib.h>
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+typedef struct _RgAnalysisCtx RgAnalysisCtx;
+
+RgAnalysisCtx *rg_analysis_new (void);
+gboolean rg_analysis_set_sample_rate (RgAnalysisCtx * ctx, gint sample_rate);
+void rg_analysis_analyze_mono_float (RgAnalysisCtx * ctx, gconstpointer data,
+ gsize size, guint depth);
+void rg_analysis_analyze_stereo_float (RgAnalysisCtx * ctx, gconstpointer data,
+ gsize size, guint depth);
+void rg_analysis_analyze_mono_int16 (RgAnalysisCtx * ctx, gconstpointer data,
+ gsize size, guint depth);
+void rg_analysis_analyze_stereo_int16 (RgAnalysisCtx * ctx, gconstpointer data,
+ gsize size, guint depth);
+void rg_analysis_analyze (RgAnalysisCtx * ctx, const gfloat * samples_l,
+ const gfloat * samples_r, guint n_samples);
+gboolean rg_analysis_track_result (RgAnalysisCtx * ctx, gdouble * gain,
+ gdouble * peak);
+gboolean rg_analysis_album_result (RgAnalysisCtx * ctx, gdouble * gain,
+ gdouble * peak);
+void rg_analysis_init_silence_detection (
+ RgAnalysisCtx * ctx,
+ void (*post_message) (gpointer analysis, GstClockTime timestamp, GstClockTime duration, gdouble rglevel),
+ gpointer analysis);
+void rg_analysis_start_buffer (RgAnalysisCtx * ctx,
+ GstClockTime buffer_timestamp);
+void rg_analysis_reset_album (RgAnalysisCtx * ctx);
+void rg_analysis_reset (RgAnalysisCtx * ctx);
+void rg_analysis_destroy (RgAnalysisCtx * ctx);
+
+GST_ELEMENT_REGISTER_DECLARE (rganalysis);
+
+G_END_DECLS
+
+#endif /* __RG_ANALYSIS_H__ */
diff --git a/gst/rtp/README b/gst/rtp/README
new file mode 100644
index 0000000000..a518598d41
--- /dev/null
+++ b/gst/rtp/README
@@ -0,0 +1,398 @@
+This directory contains some RTP payloaders/depayloaders for different payload
+types. Use one payloader/depayloder pair per payload. If several payloads can be
+payloaded/depayloaded by the same element, make different copies of it, one for
+each payload.
+
+The application/x-rtp mime type
+-------------------------------
+
+For valid RTP packets encapsulated in GstBuffers, we use the caps with
+mime type application/x-rtp.
+
+The following fields can or must (*) be specified in the structure:
+
+ * media: (String) [ "audio", "video", "application", "data", "control" ]
+ Defined in RFC 2327 in the SDP media announcement field.
+ Converted to lower case.
+
+ * payload: (int) [0, 127]
+ For audio and video, these will normally be a media payload type as
+ defined in the RTP Audio/Video Profile. For dynamically allocated
+ payload types, this value will be >= 96 and the encoding-name must be
+ set.
+
+ * clock-rate: (int) [0 - MAXINT]
+ The RTP clock rate.
+
+ encoding-name: (String) ANY
+ typically second part of the mime type. ex. MP4V-ES. only required if
+ payload type >= 96. Converted to upper case.
+
+ encoding-params: (String) ANY
+ extra encoding parameters (as in the SDP a=rtpmap: field). only required
+ if different from the default of the encoding-name.
+ Converted to lower-case.
+
+ ssrc: (uint) [0 - MAXINT]
+ The ssrc value currently in use. (default = the SSRC of the first RTP
+ packet)
+
+ timestamp-offset: (uint) [0 - MAXINT]
+ The RTP time representing time npt-start. (default = rtptime of first RTP
+ packet).
+
+ seqnum-offset: (uint) [0 - MAXINT]
+ The RTP sequence number representing the first rtp packet. When this
+ parameter is given, all sequence numbers below this seqnum should be
+ ignored. (default = seqnum of first RTP packet).
+
+ npt-start: (uint64) [0 - MAXINT]
+ The Normal Play Time for clock-base. This is the position in the stream and
+ is between 0 and the duration of the stream. This value is expressed in
+ nanoseconds GstClockTime. (default = 0)
+
+ npt-stop: (uint64) [0 - MAXINT]
+ The last position in the stream. This value is expressed in nanoseconds
+ GstClockTime. (default = -1, stop unknown)
+
+ play-speed: (gdouble) [-MIN - MAX]
+ The intended playback speed of the stream. The client is delivered data at
+ the adjusted speed. The client should adjust its playback speed with this
+ value and thus corresponds to the GStreamer rate field in the NEWSEGMENT
+ event. (default = 1.0)
+
+ play-scale: (gdouble) [-MIN - MAX]
+ The rate already applied to the stream. The client is delivered a stream
+ that is scaled by this amount. This value is used to adjust position
+ reporting and corresponds to the GStream applied-rate field in the
+ NEWSEGMENT event. (default = 1.0)
+
+ maxptime: (uint) [0, MAX]
+ The maxptime as defined in RFC 4566, this defines the maximum size of a
+ packet. It overrides the max-ptime property of payloaders.
+
+ Optional parameters as key/value pairs, media type specific. The value type
+ should be of type G_TYPE_STRING. The key is converted to lower-case. The
+ value is left in its original case.
+ A parameter with no value is converted to <param>=1.
+
+ Example:
+
+ "application/x-rtp",
+ "media", G_TYPE_STRING, "audio", -.
+ "payload", G_TYPE_INT, 96, | - required
+ "clock-rate", G_TYPE_INT, 8000, -'
+ "encoding-name", G_TYPE_STRING, "AMR", -. - required since payload >= 96
+ "encoding-params", G_TYPE_STRING, "1", -' - optional param for AMR
+ "octet-align", G_TYPE_STRING, "1", -.
+ "crc", G_TYPE_STRING, "0", |
+ "robust-sorting", G_TYPE_STRING, "0", | AMR specific params.
+ "interleaving", G_TYPE_STRING, "0", -'
+
+ Mapping of caps to and from SDP fields:
+
+ m=<media> <udp port> RTP/AVP <payload> -] media and payload from caps
+ a=rtpmap:<payload> <encoding-name>/<clock-rate>[/<encoding-params>]
+ -> when <payload> >= 96
+ a=fmtp:<payload> <param>=<value>;...
+
+ For above caps:
+
+ m=audio <udp port> RTP/AVP 96
+ a=rtpmap:96 AMR/8000/1
+ a=fmtp:96 octet-align=1;crc=0;robust-sorting=0;interleaving=0
+
+ Attributes are converted as follows:
+
+ IANA registered attribute names are prepended with 'a-' before putting them in
+ the caps. Unregistered keys (starting with 'x-') are copied directly into the
+ caps.
+
+ in RTSP, the SSRC is also sent.
+
+ The optional parameters in the SDP fields are case insensitive. In the caps we
+ always use the lowercase names so that the SDP -> caps mapping remains
+ possible.
+
+ Mapping of caps to NEWSEGMENT:
+
+ rate: <play-speed>
+ applied-rate: <play-scale>
+ format: GST_FORMAT_TIME
+ start: <clock-base> * GST_SECOND / <clock-rate>
+ stop: if <ntp-stop> != -1
+ <npt-stop> - <npt-start> + start
+ else
+ -1
+ time: <npt-start>
+
+
+Timestamping
+------------
+
+RTP in GStreamer uses a combination of the RTP timestamps and GStreamer buffer
+timestamps to ensure proper synchronisation at the sender and the receiver end.
+
+In RTP applications, the synchronisation is most complex at the receiver side.
+
+At the sender side, the RTP timestamps are generated in the payloaders based on
+GStreamer timestamps. At the receiver, GStreamer timestamps are reconstructed
+from the RTP timestamps and the GStreamer timestamps in the jitterbuffer. This
+process is explained in more detail below.
+
+= synchronisation at the sender
+
+Individual streams at the sender are synchronised using GStreamer timestamps.
+The payloader at the sender will convert the GStreamer timestamp into an RTP
+timestamp using the following formula:
+
+ RTP = ((RT - RT-base) * clock-rate / GST_SECOND) + RTP-offset
+
+ RTP: the RTP timestamp for the stream. This value is truncated to
+ 32 bits.
+ RT: the GStreamer running time corresponding to the timestamp of the
+ packet to payload
+ RT-base: the GStreamer running time of the first packet encoded
+ clock-rate: the clock-rate of the stream
+ RTP-offset: a random RTP offset
+
+The RTP timestamp corresponding to RT-base is the clock-base (see caps above).
+
+In addition to setting an RTP timestamp in the RTP packet, the payloader is also
+responsible for putting the GStreamer timestamp on the resulting output buffer.
+This timestamp is used for further synchronisation at the sender pipeline, such
+as for sending out the packet on the network.
+
+Notice that the absolute timing information is lost; if the sender is sending
+multiple streams, the RTP timestamps in the packets do not contain enough
+information to synchronize them in the receiver. The receiver can however use
+the RTP timestamps to reconstruct the timing of the stream as it was created by
+the sender according to the sender's clock.
+
+Because the payloaded packet contains both an RTP timestamp and a GStreamer
+timestamp, it is possible for an RTP session manager to derive the relation
+between the RTP and GST timestamps. This information is used by a session
+manager to create SR reports. The NTP time in the report will contain the
+running time converted to NTP time and the corresponding RTP timestamp.
+
+Note that at the sender side, the RTP and GStreamer timestamp both increment at
+the same rate, the sender rate. This rate depends on the global pipeline clock
+of the sender.
+
+Some pipelines to illustrate the process:
+
+ gst-launch-1.0 v4l2src ! videoconvert ! avenc_h263p ! rtph263ppay ! udpsink
+
+ v4l2src puts a GStreamer timestamp on the video frames base on the current
+ running_time. The encoder encodes and passed the timestamp on. The payloader
+ generates an RTP timestamp using the above formula and puts it in the RTP
+ packet. It also copies the incoming GStreamer timestamp on the output RTP
+ packet. udpsink synchronizes on the gstreamer timestamp before pushing out the
+ packet.
+
+
+= synchronisation at the receiver
+
+The receiver is responsible for timestamping the received RTP packet with the
+running_time of the clock at the time the packet was received. This GStreamer
+timestamp reflects the receiver rate and depends on the global pipeline clock of
+the receiver. The gstreamer timestamp of the received RTP packet contains a
+certain amount of jitter introduced by the network.
+
+The most simple option for the receiver is to depayload the RTP packet and play
+it back as soon as possible, this is with the timestamp when it was received
+from the network. For the above sender pipeline this would be done with the
+following pipeline:
+
+ gst-launch-1.0 udpsrc caps="application/x-rtp, media=(string)video,
+ clock-rate=(int)90000, encoding-name=(string)H263-1998" ! rtph263pdepay !
+ avdec_h263 ! autovideosink
+
+It is important that the depayloader copies the incoming GStreamer timestamp
+directly to the depayloaded output buffer. It should never attempt to perform
+any logic with the RTP timestamp, this task is for the jitterbuffer as we will
+see next.
+
+The above pipeline does not attempt to deal with reordered packets or network
+jitter, which could result in jerky playback in the case of high jitter or
+corrupted video in the case of packet loss or reordering. This functionality is
+performed by the gstrtpjitterbuffer in GStreamer.
+
+The task of the gstrtpjitterbuffer element is to:
+
+ - deal with reordered packets based on the seqnum
+ - calculate the drift between the sender and receiver clocks using the
+ GStreamer timestamps (receiver clock rate) and RTP timestamps (sender clock
+ rate).
+
+To deal with reordered packet, the jitterbuffer holds on to the received RTP
+packets in a queue for a configurable amount of time, called the latency.
+
+The jitterbuffer also eliminates network jitter and then tracks the drift
+between the local clock (as expressed in the GStreamer timestamps) and the
+remote clock (as expressed in the RTP timestamps). It will remove the jitter
+and will apply the drift correction to the GStreamer timestamp before pushing
+the buffer downstream. The result is that the depayloader receives a smoothed
+GStreamer timestamp on the RTP packet, which is copied to the depayloaded data.
+
+The following pipeline illustrates a receiver with a jitterbuffer.
+
+ gst-launch-1.0 udpsrc caps="application/x-rtp, media=(string)video,
+ clock-rate=(int)90000, encoding-name=(string)H263-1998" !
+ rtpjitterbuffer latency=100 ! rtph263pdepay ! avdec_h263 ! autovideosink
+
+The latency property on the jitterbuffer controls the amount of delay (in
+milliseconds) to apply to the outgoing packets. A higher latency will produce
+smoother playback in networks with high jitter but cause a higher latency.
+Choosing a good value for the latency is a tradeoff between the quality and
+latency. The better the network, the lower the latency can be set.
+
+
+usage with UDP
+--------------
+
+To correctly and completely use the RTP payloaders on the sender and the
+receiver you need to write an application. It is not possible to write a full
+blown RTP server with a single gst-launch-1.0 line.
+
+That said, it is possible to do something functional with a few gst-launch
+lines. The biggest problem when constructing a correct gst-launch-1.0 line lies on
+the receiver end.
+
+The receiver needs to know about the type of the RTP data along with a set of
+RTP configuration parameters. This information is usually transmitted to the
+client using some sort of session description language (SDP) over some reliable
+channel (HTTP/RTSP/...).
+
+All of the required parameters to connect and use the RTP session on the
+server can be found in the caps on the server end. The client receives this
+information in some way (caps are converted to and from SDP, as explained above,
+for example).
+
+Some gst-launch-1.0 lines:
+
+ gst-launch-1.0 -v videotestsrc ! videoconvert ! avenc_h263p ! rtph263ppay ! udpsink
+
+ Setting pipeline to PAUSED ...
+ /pipeline0/videotestsrc0.src: caps = video/x-raw, format=(string)I420,
+ width=(int)320, height=(int)240, framerate=(fraction)30/1
+ Pipeline is PREROLLING ...
+ ....
+ /pipeline0/udpsink0.sink: caps = application/x-rtp, media=(string)video,
+ payload=(int)96, clock-rate=(int)90000, encoding-name=(string)H263-1998,
+ ssrc=(guint)527842345, clock-base=(guint)1150776941, seqnum-base=(guint)30982
+ ....
+ Pipeline is PREROLLED ...
+ Setting pipeline to PLAYING ...
+ New clock: GstSystemClock
+
+ Write down the caps on the udpsink and set them as the caps of the UDP
+ receiver:
+
+ gst-launch-1.0 -v udpsrc caps="application/x-rtp, media=(string)video,
+ payload=(int)96, clock-rate=(int)90000, encoding-name=(string)H263-1998,
+ ssrc=(guint)527842345, clock-base=(guint)1150776941, seqnum-base=(guint)30982"
+ ! rtph263pdepay ! avdec_h263 ! autovideosink
+
+ The receiver now displays an h263 image. Since there is no jitterbuffer in the
+ pipeline, frames will be displayed at the time when they are received. This can
+ result in jerky playback in the case of high network jitter or corrupted video
+ when packets are dropped or reordered.
+
+ Stream a quicktime file with mpeg4 video and AAC audio on port 5000 and port
+ 5002.
+
+ gst-launch-1.0 -v filesrc location=~/data/sincity.mp4 ! qtdemux name=d ! queue ! rtpmp4vpay ! udpsink port=5000
+ d. ! queue ! rtpmp4gpay ! udpsink port=5002
+ ....
+ /pipeline0/udpsink0.sink: caps = application/x-rtp, media=(string)video,
+ payload=(int)96, clock-rate=(int)90000, encoding-name=(string)MP4V-ES,
+ ssrc=(guint)1162703703, clock-base=(guint)816135835, seqnum-base=(guint)9294,
+ profile-level-id=(string)3, config=(string)000001b003000001b50900000100000001200086c5d4c307d314043c1463000001b25876694430303334
+ /pipeline0/udpsink1.sink: caps = application/x-rtp, media=(string)audio,
+ payload=(int)96, clock-rate=(int)44100, encoding-name=(string)MPEG4-GENERIC,
+ ssrc=(guint)3246149898, clock-base=(guint)4134514058, seqnum-base=(guint)57633,
+ encoding-params=(string)2, streamtype=(string)5, profile-level-id=(string)1,
+ mode=(string)aac-hbr, config=(string)1210, sizelength=(string)13,
+ indexlength=(string)3, indexdeltalength=(string)3
+ ....
+
+ Again copy the caps on both sinks to the receiver launch line
+
+ gst-launch-1.0
+ udpsrc port=5000 caps="application/x-rtp, media=(string)video, payload=(int)96,
+ clock-rate=(int)90000, encoding-name=(string)MP4V-ES, ssrc=(guint)1162703703,
+ clock-base=(guint)816135835, seqnum-base=(guint)9294, profile-level-id=(string)3,
+ config=(string)000001b003000001b50900000100000001200086c5d4c307d314043c1463000001b25876694430303334"
+ ! rtpmp4vdepay ! ffdec_mpeg4 ! autovideosink sync=false
+ udpsrc port=5002 caps="application/x-rtp, media=(string)audio, payload=(int)96,
+ clock-rate=(int)44100, encoding-name=(string)MPEG4-GENERIC, ssrc=(guint)3246149898,
+ clock-base=(guint)4134514058, seqnum-base=(guint)57633, encoding-params=(string)2,
+ streamtype=(string)5, profile-level-id=(string)1, mode=(string)aac-hbr,
+ config=(string)1210, sizelength=(string)13, indexlength=(string)3,
+ indexdeltalength=(string)3"
+ ! rtpmp4gdepay ! faad ! alsasink sync=false
+
+ The caps on the udpsinks can be retrieved when the server pipeline prerolled to
+ PAUSED.
+
+ The above pipeline sets sync=false on the audio and video sink which means that
+ no synchronisation will be performed in the sinks, they play the data when it
+ arrives. If you want to enable synchronisation in the sinks it is highly
+ recommended to use a gstrtpjitterbuffer after the udpsrc elements.
+
+ Even when sync is enabled, the two different streams will not play synchronised
+ against each other because the receiver does not have enough information to
+ perform this task. For this you need to add the rtpbin element in both the
+ sender and receiver pipeline and use additional sources and sinks to transmit
+ RTCP packets used for inter-stream synchronisation.
+
+ The caps on the receiver side can be set on the UDP source elements when the
+ pipeline went to PAUSED. In that state no data is received from the UDP sources
+ as they are live sources and only produce data in PLAYING.
+
+
+Relevant RFCs
+-------------
+
+3550 RTP: A Transport Protocol for Real-Time Applications. ( 1889 Obsolete )
+
+2198 RTP Payload for Redundant Audio Data.
+3119 A More Loss-Tolerant RTP Payload Format for MP3 Audio.
+
+2793 RTP Payload for Text Conversation.
+
+2032 RTP Payload Format for H.261 Video Streams.
+2190 RTP Payload Format for H.263 Video Streams.
+2250 RTP Payload Format for MPEG1/MPEG2 Video.
+2343 RTP Payload Format for Bundled MPEG.
+2429 RTP Payload Format for the 1998 Version of ITU-T Rec. H.263 Video
+2431 RTP Payload Format for BT.656 Video Encoding.
+2435 RTP Payload Format for JPEG-compressed Video.
+3016 RTP Payload Format for MPEG-4 Audio/Visual Streams.
+3047 RTP Payload Format for ITU-T Recommendation G.722.1.
+3189 RTP Payload Format for DV (IEC 61834) Video.
+3190 RTP Payload Format for 12-bit DAT Audio and 20- and 24-bit Linear Sampled Audio.
+3389 Real-time Transport Protocol (RTP) Payload for Comfort Noise (CN)
+2733 An RTP Payload Format for Generic Forward Error Correction.
+2833 RTP Payload for DTMF Digits, Telephony Tones and Telephony
+ Signals.
+2862 RTP Payload Format for Real-Time Pointers.
+3351 RTP Profile for Audio and Video Conferences with Minimal Control. ( 1890 Obsolete )
+3555 MIME Type Registration of RTP Payload Formats.
+
+2508 Compressing IP/UDP/RTP Headers for Low-Speed Serial Links.
+1305 Network Time Protocol (Version 3) Specification, Implementation and Analysis.
+3339 Date and Time on the Internet: Timestamps.
+2246 The TLS Protocol Version 1.0
+3546 Transport Layer Security (TLS) Extensions. ( Updates 2246 )
+
+do we care?
+-----------
+
+2029 RTP Payload Format of Sun's CellB Video Encoding.
+
+useful
+------
+
+http://www.iana.org/assignments/rtp-parameters
diff --git a/gst/rtp/TODO b/gst/rtp/TODO
new file mode 100644
index 0000000000..8065a9ce31
--- /dev/null
+++ b/gst/rtp/TODO
@@ -0,0 +1,15 @@
+* MPEG4 header
+ - ffmpeg mpeg4 decoder gives error message when sending only the config
+ string, parsing is OK, error just means no picture was found in the
+ stream.
+
+* compare H263 encoders and H263+
+
+* better RTP packetizing for h263
+
+* bitrate tuning in ffmpeg
+ - fixed the qmax values so we can quantize more.
+
+* make ffmpeg negotiate only with accepted framerates
+
+
diff --git a/gst/rtp/dboolhuff.LICENSE b/gst/rtp/dboolhuff.LICENSE
new file mode 100644
index 0000000000..83e4e6f6d7
--- /dev/null
+++ b/gst/rtp/dboolhuff.LICENSE
@@ -0,0 +1,29 @@
+Copyright (c) 2010, Google Inc. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in
+ the documentation and/or other materials provided with the
+ distribution.
+
+ * Neither the name of Google nor the names of its contributors may
+ be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/gst/rtp/dboolhuff.c b/gst/rtp/dboolhuff.c
new file mode 100644
index 0000000000..3d0fc8f040
--- /dev/null
+++ b/gst/rtp/dboolhuff.c
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the dboolhuff.LICENSE file in this directory.
+ * See the libvpx original distribution for more information,
+ * including patent information, and author information.
+ */
+
+
+#include "dboolhuff.h"
+
+#ifdef _MSC_VER
+__declspec (align (16))
+ const unsigned char vp8_norm[256] = {
+#else
+const unsigned char vp8_norm[256] __attribute__ ((aligned (16))) = {
+#endif
+0, 7, 6, 6, 5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
+
+int
+vp8dx_start_decode (BOOL_DECODER * br,
+ const unsigned char *source, unsigned int source_sz)
+{
+ br->user_buffer_end = source + source_sz;
+ br->user_buffer = source;
+ br->value = 0;
+ br->count = -8;
+ br->range = 255;
+
+ if (source_sz && !source)
+ return 1;
+
+ /* Populate the buffer */
+ vp8dx_bool_decoder_fill (br);
+
+ return 0;
+}
+
+
+void
+vp8dx_bool_decoder_fill (BOOL_DECODER * br)
+{
+ const unsigned char *bufptr;
+ const unsigned char *bufend;
+ VP8_BD_VALUE value;
+ int count;
+ bufend = br->user_buffer_end;
+ bufptr = br->user_buffer;
+ value = br->value;
+ count = br->count;
+
+ VP8DX_BOOL_DECODER_FILL (count, value, bufptr, bufend);
+
+ br->user_buffer = bufptr;
+ br->value = value;
+ br->count = count;
+}
diff --git a/gst/rtp/dboolhuff.h b/gst/rtp/dboolhuff.h
new file mode 100644
index 0000000000..e0a45a2236
--- /dev/null
+++ b/gst/rtp/dboolhuff.h
@@ -0,0 +1,155 @@
+/*
+ * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the dboolhuff.LICENSE file in this directory.
+ * See the libvpx original distribution for more information,
+ * including patent information, and author information.
+ */
+
+
+#ifndef DBOOLHUFF_H
+#define DBOOLHUFF_H
+#include <stddef.h>
+#include <limits.h>
+#include <glib.h>
+
+typedef size_t VP8_BD_VALUE;
+
+# define VP8_BD_VALUE_SIZE ((int)sizeof(VP8_BD_VALUE)*CHAR_BIT)
+/*This is meant to be a large, positive constant that can still be efficiently
+ loaded as an immediate (on platforms like ARM, for example).
+ Even relatively modest values like 100 would work fine.*/
+# define VP8_LOTS_OF_BITS (0x40000000)
+
+typedef struct
+{
+ const unsigned char *user_buffer_end;
+ const unsigned char *user_buffer;
+ VP8_BD_VALUE value;
+ int count;
+ unsigned int range;
+} BOOL_DECODER;
+
+#ifdef _MSC_VER
+__declspec(align(16)) extern const unsigned char vp8_norm[256];
+#else
+extern const unsigned char vp8_norm[256] __attribute__((aligned(16)));
+#endif
+
+int vp8dx_start_decode(BOOL_DECODER *br,
+ const unsigned char *source,
+ unsigned int source_sz);
+
+void vp8dx_bool_decoder_fill(BOOL_DECODER *br);
+
+/*The refill loop is used in several places, so define it in a macro to make
+ sure they're all consistent.
+ An inline function would be cleaner, but has a significant penalty, because
+ multiple BOOL_DECODER fields must be modified, and the compiler is not smart
+ enough to eliminate the stores to those fields and the subsequent reloads
+ from them when inlining the function.*/
+#define VP8DX_BOOL_DECODER_FILL(_count,_value,_bufptr,_bufend) \
+ do \
+ { \
+ int shift = VP8_BD_VALUE_SIZE - 8 - ((_count) + 8); \
+ int loop_end, x; \
+ size_t bits_left = ((_bufend)-(_bufptr))*CHAR_BIT; \
+ \
+ x = shift + CHAR_BIT - bits_left; \
+ loop_end = 0; \
+ if(x >= 0) \
+ { \
+ (_count) += VP8_LOTS_OF_BITS; \
+ loop_end = x; \
+ if(!bits_left) break; \
+ } \
+ while(shift >= loop_end) \
+ { \
+ (_count) += CHAR_BIT; \
+ (_value) |= (VP8_BD_VALUE)*(_bufptr)++ << shift; \
+ shift -= CHAR_BIT; \
+ } \
+ } \
+ while(0) \
+
+
+static int vp8dx_decode_bool(BOOL_DECODER *br, int probability) {
+ unsigned int bit = 0;
+ VP8_BD_VALUE value;
+ unsigned int split;
+ VP8_BD_VALUE bigsplit;
+ int count;
+ unsigned int range;
+
+ split = 1 + (((br->range - 1) * probability) >> 8);
+
+ if(br->count < 0)
+ vp8dx_bool_decoder_fill(br);
+
+ value = br->value;
+ count = br->count;
+
+ bigsplit = (VP8_BD_VALUE)split << (VP8_BD_VALUE_SIZE - 8);
+
+ range = split;
+
+ if (value >= bigsplit)
+ {
+ range = br->range - split;
+ value = value - bigsplit;
+ bit = 1;
+ }
+
+ {
+ register unsigned int shift = vp8_norm[range];
+ range <<= shift;
+ value <<= shift;
+ count -= shift;
+ }
+ br->value = value;
+ br->count = count;
+ br->range = range;
+
+ return bit;
+}
+
+static G_GNUC_UNUSED int vp8_decode_value(BOOL_DECODER *br, int bits)
+{
+ int z = 0;
+ int bit;
+
+ for (bit = bits - 1; bit >= 0; bit--)
+ {
+ z |= (vp8dx_decode_bool(br, 0x80) << bit);
+ }
+
+ return z;
+}
+
+static G_GNUC_UNUSED int vp8dx_bool_error(BOOL_DECODER *br)
+{
+ /* Check if we have reached the end of the buffer.
+ *
+ * Variable 'count' stores the number of bits in the 'value' buffer, minus
+ * 8. The top byte is part of the algorithm, and the remainder is buffered
+ * to be shifted into it. So if count == 8, the top 16 bits of 'value' are
+ * occupied, 8 for the algorithm and 8 in the buffer.
+ *
+ * When reading a byte from the user's buffer, count is filled with 8 and
+ * one byte is filled into the value buffer. When we reach the end of the
+ * data, count is additionally filled with VP8_LOTS_OF_BITS. So when
+ * count == VP8_LOTS_OF_BITS - 1, the user's data has been exhausted.
+ */
+ if ((br->count > VP8_BD_VALUE_SIZE) && (br->count < VP8_LOTS_OF_BITS))
+ {
+ /* We have tried to decode bits after the end of
+ * stream was encountered.
+ */
+ return 1;
+ }
+
+ /* No error. */
+ return 0;
+}
+#endif
diff --git a/gst/rtp/fnv1hash.c b/gst/rtp/fnv1hash.c
new file mode 100644
index 0000000000..9885bb2688
--- /dev/null
+++ b/gst/rtp/fnv1hash.c
@@ -0,0 +1,63 @@
+/* GStreamer
+ * Copyright (C) 2007 Thomas Vander Stichele <thomas at apestaart dot org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <glib.h>
+
+#include "fnv1hash.h"
+
+/* This file implements FNV-1 hashing used in the Ogg payload encoders
+ * to generate the 24-bit ident value based on the header pages.
+ * See http://isthe.com/chongo/tech/comp/fnv/
+ */
+
+#define MASK_24 (((guint32) 1 << 24) -1)
+
+#define FNV1_HASH_32_INIT ((guint32) 0x811C9DC5L)
+//2166136261L)
+#define FNV1_HASH_32_PRIME 16777619
+
+guint32
+fnv1_hash_32_new (void)
+{
+ return FNV1_HASH_32_INIT;
+}
+
+guint32
+fnv1_hash_32_update (guint32 hash, const guchar * data, guint length)
+{
+ guint i;
+ const guchar *p = data;
+
+ for (i = 0; i < length; ++i, ++p) {
+ hash *= FNV1_HASH_32_PRIME;
+ hash ^= *p;
+ }
+
+ return hash;
+}
+
+guint32
+fnv1_hash_32_to_24 (guint32 hash)
+{
+ return (hash >> 24) ^ (hash & MASK_24);
+}
diff --git a/gst/rtp/fnv1hash.h b/gst/rtp/fnv1hash.h
new file mode 100644
index 0000000000..7047067ff3
--- /dev/null
+++ b/gst/rtp/fnv1hash.h
@@ -0,0 +1,36 @@
+/* GStreamer
+ * Copyright (C) 2007 Thomas Vander Stichele <thomas at apestaart dot org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_FNV1_HASH_H__
+#define __GST_FNV1_HASH_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+#include <gst/base/gstadapter.h>
+
+G_BEGIN_DECLS
+
+guint32 fnv1_hash_32_new (void);
+guint32 fnv1_hash_32_update (guint32 hash, const guchar *data, guint length);
+guint32 fnv1_hash_32_to_24 (guint32 hash);
+
+G_END_DECLS
+
+#endif /* __GST_FNV1_HASH_H__ */
+
diff --git a/gst/rtp/gstasteriskh263.c b/gst/rtp/gstasteriskh263.c
new file mode 100644
index 0000000000..f22c33f316
--- /dev/null
+++ b/gst/rtp/gstasteriskh263.c
@@ -0,0 +1,226 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include "gstrtpelements.h"
+#include "gstasteriskh263.h"
+
+#define GST_ASTERISKH263_HEADER_LEN 6
+
+typedef struct _GstAsteriskH263Header
+{
+ guint32 timestamp; /* Timestamp */
+ guint16 length; /* Length */
+} GstAsteriskH263Header;
+
+#define GST_ASTERISKH263_HEADER_TIMESTAMP(data) (((GstAsteriskH263Header *)(data))->timestamp)
+#define GST_ASTERISKH263_HEADER_LENGTH(data) (((GstAsteriskH263Header *)(data))->length)
+
+static GstStaticPadTemplate gst_asteriskh263_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-asteriskh263")
+ );
+
+static GstStaticPadTemplate gst_asteriskh263_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) [ 96, 127 ], "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"H263-1998\"")
+ );
+
+static void gst_asteriskh263_finalize (GObject * object);
+
+static GstFlowReturn gst_asteriskh263_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+
+static GstStateChangeReturn gst_asteriskh263_change_state (GstElement *
+ element, GstStateChange transition);
+
+#define gst_asteriskh263_parent_class parent_class
+G_DEFINE_TYPE (GstAsteriskh263, gst_asteriskh263, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (asteriskh263, "asteriskh263",
+ GST_RANK_NONE, GST_TYPE_ASTERISK_H263, rtp_element_init (plugin));
+
+static void
+gst_asteriskh263_class_init (GstAsteriskh263Class * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->finalize = gst_asteriskh263_finalize;
+
+ gstelement_class->change_state = gst_asteriskh263_change_state;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_asteriskh263_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_asteriskh263_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP Asterisk H263 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts H263 video from RTP and encodes in Asterisk H263 format",
+ "Neil Stratford <neils@vipadia.com>");
+}
+
+static void
+gst_asteriskh263_init (GstAsteriskh263 * asteriskh263)
+{
+ asteriskh263->srcpad =
+ gst_pad_new_from_static_template (&gst_asteriskh263_src_template, "src");
+ gst_element_add_pad (GST_ELEMENT (asteriskh263), asteriskh263->srcpad);
+
+ asteriskh263->sinkpad =
+ gst_pad_new_from_static_template (&gst_asteriskh263_sink_template,
+ "sink");
+ gst_pad_set_chain_function (asteriskh263->sinkpad, gst_asteriskh263_chain);
+ gst_element_add_pad (GST_ELEMENT (asteriskh263), asteriskh263->sinkpad);
+
+ asteriskh263->adapter = gst_adapter_new ();
+}
+
+static void
+gst_asteriskh263_finalize (GObject * object)
+{
+ GstAsteriskh263 *asteriskh263;
+
+ asteriskh263 = GST_ASTERISK_H263 (object);
+
+ g_object_unref (asteriskh263->adapter);
+ asteriskh263->adapter = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static GstFlowReturn
+gst_asteriskh263_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+{
+ GstAsteriskh263 *asteriskh263;
+ GstBuffer *outbuf;
+ GstFlowReturn ret;
+
+ asteriskh263 = GST_ASTERISK_H263 (parent);
+
+ {
+ gint payload_len;
+ guint8 *payload;
+ gboolean M;
+ guint32 timestamp;
+ guint32 samples;
+ guint16 asterisk_len;
+ GstRTPBuffer rtp = { NULL };
+ GstMapInfo map;
+
+ if (!gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp))
+ goto bad_packet;
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+
+ M = gst_rtp_buffer_get_marker (&rtp);
+ timestamp = gst_rtp_buffer_get_timestamp (&rtp);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ outbuf = gst_buffer_new_and_alloc (payload_len +
+ GST_ASTERISKH263_HEADER_LEN);
+
+ /* build the asterisk header */
+ asterisk_len = payload_len;
+ if (M)
+ asterisk_len |= 0x8000;
+ if (!asteriskh263->lastts)
+ asteriskh263->lastts = timestamp;
+ samples = timestamp - asteriskh263->lastts;
+ asteriskh263->lastts = timestamp;
+
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ GST_ASTERISKH263_HEADER_TIMESTAMP (map.data) = g_htonl (samples);
+ GST_ASTERISKH263_HEADER_LENGTH (map.data) = g_htons (asterisk_len);
+
+ /* copy the data into place */
+ memcpy (map.data + GST_ASTERISKH263_HEADER_LEN, payload, payload_len);
+
+ gst_buffer_unmap (outbuf, &map);
+
+ GST_BUFFER_PTS (outbuf) = timestamp;
+ if (!gst_pad_has_current_caps (asteriskh263->srcpad)) {
+ GstCaps *caps;
+
+ caps = gst_pad_get_pad_template_caps (asteriskh263->srcpad);
+ gst_pad_set_caps (asteriskh263->srcpad, caps);
+ gst_caps_unref (caps);
+ }
+
+ ret = gst_pad_push (asteriskh263->srcpad, outbuf);
+
+ gst_buffer_unref (buf);
+ }
+
+ return ret;
+
+bad_packet:
+ {
+ GST_DEBUG ("Packet does not validate");
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+}
+
+static GstStateChangeReturn
+gst_asteriskh263_change_state (GstElement * element, GstStateChange transition)
+{
+ GstAsteriskh263 *asteriskh263;
+ GstStateChangeReturn ret;
+
+ asteriskh263 = GST_ASTERISK_H263 (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_adapter_clear (asteriskh263->adapter);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ /*
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ */
+ return ret;
+}
diff --git a/gst/rtp/gstasteriskh263.h b/gst/rtp/gstasteriskh263.h
new file mode 100644
index 0000000000..f0416b561f
--- /dev/null
+++ b/gst/rtp/gstasteriskh263.h
@@ -0,0 +1,63 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_ASTERISK_H263_H__
+#define __GST_ASTERISK_H263_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_ASTERISK_H263 \
+ (gst_asteriskh263_get_type())
+#define GST_ASTERISK_H263(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_ASTERISK_H263,GstAsteriskh263))
+#define GST_ASTERISK_H263_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_ASTERISK_H263,GstAsteriskh263Class))
+#define GST_IS_ASTERISK_H263(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_ASTERISK_H263))
+#define GST_IS_ASTERISK_H263_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_ASTERISK_H263))
+
+typedef struct _GstAsteriskh263 GstAsteriskh263;
+typedef struct _GstAsteriskh263Class GstAsteriskh263Class;
+
+struct _GstAsteriskh263
+{
+ GstElement element;
+
+ GstPad *sinkpad;
+ GstPad *srcpad;
+
+ GstAdapter *adapter;
+
+ guint32 lastts;
+};
+
+struct _GstAsteriskh263Class
+{
+ GstElementClass parent_class;
+};
+
+GType gst_asteriskh263_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_ASTERISK_H263_H__ */
diff --git a/gst/rtp/gstbuffermemory.c b/gst/rtp/gstbuffermemory.c
new file mode 100644
index 0000000000..d6fd6477f3
--- /dev/null
+++ b/gst/rtp/gstbuffermemory.c
@@ -0,0 +1,120 @@
+/* GStreamer
+ * Copyright (C) 2020 Ognyan Tonchev <ognyan at axis dot com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "gstbuffermemory.h"
+
+gboolean
+gst_buffer_memory_map (GstBuffer * buffer, GstBufferMemoryMap * map)
+{
+ GstMemory *mem;
+
+ g_return_val_if_fail (GST_IS_BUFFER (buffer), FALSE);
+ g_return_val_if_fail (map != NULL, FALSE);
+
+ if (gst_buffer_n_memory (buffer) == 0) {
+ GST_DEBUG ("no memory blocks in buffer");
+ return FALSE;
+ }
+
+ mem = gst_buffer_get_memory (buffer, 0);
+
+ if (!gst_memory_map (mem, &map->map, GST_MAP_READ)) {
+ GST_ERROR ("failed to map memory");
+ gst_memory_unref (mem);
+ return FALSE;
+ }
+
+ map->buf = buffer;
+ map->mem = mem;
+ map->data = map->map.data;
+ map->size = map->map.size;
+ map->index = 0;
+ map->total_size = gst_buffer_get_size (buffer);
+ map->offset = 0;
+
+ return TRUE;
+}
+
+static gboolean
+buffer_memory_map_next (GstBufferMemoryMap * map)
+{
+ if (!map->mem)
+ return FALSE;
+
+ gst_memory_unmap (map->mem, &map->map);
+ gst_memory_unref (map->mem);
+ map->mem = NULL;
+ map->data = NULL;
+ map->size = 0;
+
+ map->index++;
+
+ if (map->index >= gst_buffer_n_memory (map->buf)) {
+ GST_DEBUG ("no more memory blocks in buffer");
+ return FALSE;
+ }
+
+ map->mem = gst_buffer_get_memory (map->buf, map->index);
+
+ if (!gst_memory_map (map->mem, &map->map, GST_MAP_READ)) {
+ GST_ERROR ("failed to map memory");
+ gst_memory_unref (map->mem);
+ map->mem = NULL;
+ return FALSE;
+ }
+
+ map->data = map->map.data;
+ map->size = map->map.size;
+
+ return TRUE;
+}
+
+gboolean
+gst_buffer_memory_advance_bytes (GstBufferMemoryMap * map, gsize size)
+{
+ gsize offset = size;
+
+ g_return_val_if_fail (map != NULL, FALSE);
+
+ map->offset += size;
+
+ while (offset >= map->size) {
+ offset -= map->size;
+ GST_DEBUG ("switching memory");
+ if (!buffer_memory_map_next (map))
+ return FALSE;
+ }
+
+ map->data += offset;
+ map->size -= offset;
+
+ return TRUE;
+}
+
+void
+gst_buffer_memory_unmap (GstBufferMemoryMap * map)
+{
+ g_return_if_fail (map != NULL);
+
+ if (map->mem) {
+ gst_memory_unmap (map->mem, &map->map);
+ gst_memory_unref (map->mem);
+ map->mem = NULL;
+ }
+}
diff --git a/gst/rtp/gstbuffermemory.h b/gst/rtp/gstbuffermemory.h
new file mode 100644
index 0000000000..3b78429898
--- /dev/null
+++ b/gst/rtp/gstbuffermemory.h
@@ -0,0 +1,66 @@
+/* GStreamer
+ * Copyright (C) 2020 Ognyan Tonchev <ognyan at axis dot com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_BUFFER_MEMORY_H__
+#define __GST_BUFFER_MEMORY_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+struct _GstBufferMemoryMap
+{
+ /* private datas */
+
+ GstBuffer *buf;
+ GstMemory *mem;
+ GstMapInfo map;
+ guint index;
+ gsize total_size;
+
+ /* public datas */
+
+ /* data of the currently mapped memory */
+ const guint8 *data;
+ guint offset;
+
+ /* size of the currently mapped memory */
+ gsize size;
+
+ /* When advancing through the data with gst_buffer_memory_advance_bytes ()
+ * the data field is also advanced and the size field decreased with the
+ * corresponding number of bytes. If all the bytes from the currently mapped
+ * GstMemory have been consumed then a new GstMemory will be mapped and data
+ * and size fileds will be updated.
+ * */
+};
+typedef struct _GstBufferMemoryMap GstBufferMemoryMap;
+
+G_GNUC_INTERNAL
+gboolean gst_buffer_memory_map (GstBuffer * buffer, GstBufferMemoryMap * map);
+
+G_GNUC_INTERNAL
+gboolean gst_buffer_memory_advance_bytes (GstBufferMemoryMap * map, gsize size);
+
+G_GNUC_INTERNAL
+void gst_buffer_memory_unmap (GstBufferMemoryMap * map);
+
+G_END_DECLS
+
+#endif /* __GST_BUFFER_MEMORY_H__ */
diff --git a/gst/rtp/gstrtp.c b/gst/rtp/gstrtp.c
new file mode 100644
index 0000000000..9528ffb10c
--- /dev/null
+++ b/gst/rtp/gstrtp.c
@@ -0,0 +1,139 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/tag/tag.h>
+
+#include "gstrtpelements.h"
+
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (rtpac3depay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpac3pay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpbvdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpbvpay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpceltdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpceltpay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpdvdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpdvpay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpgstdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpgstpay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpilbcpay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpilbcdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpg722depay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpg722pay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpg723depay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpg723pay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpg726depay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpg726pay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpg729depay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpg729pay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpgsmdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpgsmpay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpamrdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpamrpay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtppcmadepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtppcmudepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtppcmupay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtppcmapay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpmpadepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpmpapay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpmparobustdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpmpvdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpmpvpay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpopusdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpopuspay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtph261pay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtph261depay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtph263ppay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtph263pdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtph263depay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtph263pay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtph264depay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtph264pay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtph265depay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtph265pay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpj2kdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpj2kpay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpjpegdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpjpegpay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpklvdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpklvpay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpL8pay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpL8depay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpL16pay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpL16depay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpL24pay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpL24depay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpldacpay, plugin);
+ ret |= GST_ELEMENT_REGISTER (asteriskh263, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpmp1sdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpmp2tdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpmp2tpay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpmp4vpay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpmp4vdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpmp4apay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpmp4adepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpmp4gdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpmp4gpay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpqcelpdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpqdm2depay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpsbcdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpsbcpay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpsirenpay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpsirendepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpspeexpay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpspeexdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpsv3vdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtptheoradepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtptheorapay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpvorbisdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpvorbispay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpvp8depay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpvp8pay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpvp9depay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpvp9pay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpvrawdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpvrawpay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpstreampay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpstreamdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpisacpay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpisacdepay, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpredenc, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpreddec, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpulpfecdec, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpulpfecenc, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpstorage, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtphdrextcolorspace, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ rtp,
+ "Real-time protocol plugins",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
diff --git a/gst/rtp/gstrtpL16depay.c b/gst/rtp/gstrtpL16depay.c
new file mode 100644
index 0000000000..aa7e5d03da
--- /dev/null
+++ b/gst/rtp/gstrtpL16depay.c
@@ -0,0 +1,296 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpL16depay
+ * @title: rtpL16depay
+ * @see_also: rtpL16pay
+ *
+ * Extract raw audio from RTP packets according to RFC 3551.
+ * For detailed information see: http://www.rfc-editor.org/rfc/rfc3551.txt
+ *
+ * ## Example pipeline
+ * |[
+ * gst-launch-1.0 udpsrc caps='application/x-rtp, media=(string)audio, clock-rate=(int)44100, encoding-name=(string)L16, encoding-params=(string)1, channels=(int)1, payload=(int)96' ! rtpL16depay ! pulsesink
+ * ]| This example pipeline will depayload an RTP raw audio stream. Refer to
+ * the rtpL16pay example to create the RTP stream.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <stdlib.h>
+
+#include <gst/audio/audio.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpL16depay.h"
+#include "gstrtpchannels.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpL16depay_debug);
+#define GST_CAT_DEFAULT (rtpL16depay_debug)
+
+static GstStaticPadTemplate gst_rtp_L16_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) S16BE, "
+ "layout = (string) interleaved, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]")
+ );
+
+static GstStaticPadTemplate gst_rtp_L16_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", " "clock-rate = (int) [ 1, MAX ], "
+ /* "channels = (int) [1, MAX]" */
+ /* "emphasis = (string) ANY" */
+ /* "channel-order = (string) ANY" */
+ "encoding-name = (string) \"L16\";"
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) { " GST_RTP_PAYLOAD_L16_STEREO_STRING ", "
+ GST_RTP_PAYLOAD_L16_MONO_STRING " }," "clock-rate = (int) [ 1, MAX ]"
+ /* "channels = (int) [1, MAX]" */
+ /* "emphasis = (string) ANY" */
+ /* "channel-order = (string) ANY" */
+ )
+ );
+
+#define gst_rtp_L16_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpL16Depay, gst_rtp_L16_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpL16depay, "rtpL16depay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_L16_DEPAY, rtp_element_init (plugin));
+
+static gboolean gst_rtp_L16_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_L16_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+
+static void
+gst_rtp_L16_depay_class_init (GstRtpL16DepayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gstrtpbasedepayload_class->set_caps = gst_rtp_L16_depay_setcaps;
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_L16_depay_process;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_L16_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_L16_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP audio depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts raw audio from RTP packets",
+ "Zeeshan Ali <zak147@yahoo.com>," "Wim Taymans <wim.taymans@gmail.com>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpL16depay_debug, "rtpL16depay", 0,
+ "Raw Audio RTP Depayloader");
+}
+
+static void
+gst_rtp_L16_depay_init (GstRtpL16Depay * rtpL16depay)
+{
+}
+
+static gint
+gst_rtp_L16_depay_parse_int (GstStructure * structure, const gchar * field,
+ gint def)
+{
+ const gchar *str;
+ gint res;
+
+ if ((str = gst_structure_get_string (structure, field)))
+ return atoi (str);
+
+ if (gst_structure_get_int (structure, field, &res))
+ return res;
+
+ return def;
+}
+
+static gboolean
+gst_rtp_L16_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure;
+ GstRtpL16Depay *rtpL16depay;
+ gint clock_rate, payload;
+ gint channels;
+ GstCaps *srccaps;
+ gboolean res;
+ const gchar *channel_order;
+ const GstRTPChannelOrder *order;
+ GstAudioInfo *info;
+
+ rtpL16depay = GST_RTP_L16_DEPAY (depayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ payload = 96;
+ gst_structure_get_int (structure, "payload", &payload);
+ switch (payload) {
+ case GST_RTP_PAYLOAD_L16_STEREO:
+ channels = 2;
+ clock_rate = 44100;
+ break;
+ case GST_RTP_PAYLOAD_L16_MONO:
+ channels = 1;
+ clock_rate = 44100;
+ break;
+ default:
+ /* no fixed mapping, we need clock-rate */
+ channels = 0;
+ clock_rate = 0;
+ break;
+ }
+
+ /* caps can overwrite defaults */
+ clock_rate =
+ gst_rtp_L16_depay_parse_int (structure, "clock-rate", clock_rate);
+ if (clock_rate == 0)
+ goto no_clockrate;
+
+ channels =
+ gst_rtp_L16_depay_parse_int (structure, "encoding-params", channels);
+ if (channels == 0) {
+ channels = gst_rtp_L16_depay_parse_int (structure, "channels", channels);
+ if (channels == 0) {
+ /* channels defaults to 1 otherwise */
+ channels = 1;
+ }
+ }
+
+ depayload->clock_rate = clock_rate;
+
+ info = &rtpL16depay->info;
+ gst_audio_info_init (info);
+ info->finfo = gst_audio_format_get_info (GST_AUDIO_FORMAT_S16BE);
+ info->rate = clock_rate;
+ info->channels = channels;
+ info->bpf = (info->finfo->width / 8) * channels;
+
+ /* add channel positions */
+ channel_order = gst_structure_get_string (structure, "channel-order");
+
+ order = gst_rtp_channels_get_by_order (channels, channel_order);
+ rtpL16depay->order = order;
+ if (order) {
+ memcpy (info->position, order->pos,
+ sizeof (GstAudioChannelPosition) * channels);
+ gst_audio_channel_positions_to_valid_order (info->position, info->channels);
+ } else {
+ GST_ELEMENT_WARNING (rtpL16depay, STREAM, DECODE,
+ (NULL), ("Unknown channel order '%s' for %d channels",
+ GST_STR_NULL (channel_order), channels));
+ /* create default NONE layout */
+ gst_rtp_channels_create_default (channels, info->position);
+ info->flags |= GST_AUDIO_FLAG_UNPOSITIONED;
+ }
+
+ srccaps = gst_audio_info_to_caps (info);
+ res = gst_pad_set_caps (depayload->srcpad, srccaps);
+ gst_caps_unref (srccaps);
+
+ return res;
+
+ /* ERRORS */
+no_clockrate:
+ {
+ GST_ERROR_OBJECT (depayload, "no clock-rate specified");
+ return FALSE;
+ }
+}
+
+static GstBuffer *
+gst_rtp_L16_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpL16Depay *rtpL16depay;
+ GstBuffer *outbuf;
+ gint payload_len;
+ gboolean marker;
+ GstAudioInfo *info;
+
+ rtpL16depay = GST_RTP_L16_DEPAY (depayload);
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+
+ if (payload_len <= 0)
+ goto empty_packet;
+
+ GST_DEBUG_OBJECT (rtpL16depay, "got payload of %d bytes", payload_len);
+
+ outbuf = gst_rtp_buffer_get_payload_buffer (rtp);
+ marker = gst_rtp_buffer_get_marker (rtp);
+
+ if (marker) {
+ /* mark talk spurt with RESYNC */
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC);
+ }
+
+ outbuf = gst_buffer_make_writable (outbuf);
+ info = &rtpL16depay->info;
+
+ if (payload_len % info->bpf != 0)
+ goto wrong_payload_size;
+
+ if (rtpL16depay->order &&
+ !gst_audio_buffer_reorder_channels (outbuf,
+ info->finfo->format, info->channels,
+ info->position, rtpL16depay->order->pos)) {
+ goto reorder_failed;
+ }
+
+ gst_rtp_drop_non_audio_meta (rtpL16depay, outbuf);
+
+ return outbuf;
+
+ /* ERRORS */
+empty_packet:
+ {
+ GST_ELEMENT_WARNING (rtpL16depay, STREAM, DECODE,
+ ("Empty Payload."), (NULL));
+ return NULL;
+ }
+wrong_payload_size:
+ {
+ GST_ELEMENT_WARNING (rtpL16depay, STREAM, DECODE,
+ ("Wrong Payload Size."), (NULL));
+ gst_buffer_unref (outbuf);
+ return NULL;
+ }
+reorder_failed:
+ {
+ GST_ELEMENT_ERROR (rtpL16depay, STREAM, DECODE,
+ ("Channel reordering failed."), (NULL));
+ gst_buffer_unref (outbuf);
+ return NULL;
+ }
+}
diff --git a/gst/rtp/gstrtpL16depay.h b/gst/rtp/gstrtpL16depay.h
new file mode 100644
index 0000000000..fac933bb1f
--- /dev/null
+++ b/gst/rtp/gstrtpL16depay.h
@@ -0,0 +1,65 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_L16_DEPAY_H__
+#define __GST_RTP_L16_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+#include <gst/audio/audio.h>
+
+#include "gstrtpchannels.h"
+
+G_BEGIN_DECLS
+
+/* Standard macros for defining types for this element. */
+#define GST_TYPE_RTP_L16_DEPAY \
+ (gst_rtp_L16_depay_get_type())
+#define GST_RTP_L16_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_L16_DEPAY,GstRtpL16Depay))
+#define GST_RTP_L16_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_L16_DEPAY,GstRtpL16DepayClass))
+#define GST_IS_RTP_L16_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_L16_DEPAY))
+#define GST_IS_RTP_L16_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_L16_DEPAY))
+
+typedef struct _GstRtpL16Depay GstRtpL16Depay;
+typedef struct _GstRtpL16DepayClass GstRtpL16DepayClass;
+
+/* Definition of structure storing data for this element. */
+struct _GstRtpL16Depay
+{
+ GstRTPBaseDepayload depayload;
+
+ GstAudioInfo info;
+ const GstRTPChannelOrder *order;
+};
+
+/* Standard definition defining a class for this element. */
+struct _GstRtpL16DepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_L16_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_L16_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpL16pay.c b/gst/rtp/gstrtpL16pay.c
new file mode 100644
index 0000000000..41a7c0544f
--- /dev/null
+++ b/gst/rtp/gstrtpL16pay.c
@@ -0,0 +1,259 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpL16pay
+ * @title: rtpL16pay
+ * @see_also: rtpL16depay
+ *
+ * Payload raw audio into RTP packets according to RFC 3551.
+ * For detailed information see: http://www.rfc-editor.org/rfc/rfc3551.txt
+ *
+ * ## Example pipeline
+ * |[
+ * gst-launch-1.0 -v audiotestsrc ! audioconvert ! rtpL16pay ! udpsink
+ * ]| This example pipeline will payload raw audio. Refer to
+ * the rtpL16depay example to depayload and play the RTP stream.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/audio/audio.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpL16pay.h"
+#include "gstrtpchannels.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpL16pay_debug);
+#define GST_CAT_DEFAULT (rtpL16pay_debug)
+
+static GstStaticPadTemplate gst_rtp_L16_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) S16BE, "
+ "layout = (string) interleaved, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]")
+ );
+
+static GstStaticPadTemplate gst_rtp_L16_pay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) [ 96, 127 ], "
+ "clock-rate = (int) [ 1, MAX ], "
+ "encoding-name = (string) \"L16\", "
+ "channels = (int) [ 1, MAX ];"
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "encoding-name = (string) \"L16\", "
+ "payload = (int) " GST_RTP_PAYLOAD_L16_STEREO_STRING ", "
+ "clock-rate = (int) 44100;"
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "encoding-name = (string) \"L16\", "
+ "payload = (int) " GST_RTP_PAYLOAD_L16_MONO_STRING ", "
+ "clock-rate = (int) 44100")
+ );
+
+static gboolean gst_rtp_L16_pay_setcaps (GstRTPBasePayload * basepayload,
+ GstCaps * caps);
+static GstCaps *gst_rtp_L16_pay_getcaps (GstRTPBasePayload * rtppayload,
+ GstPad * pad, GstCaps * filter);
+static GstFlowReturn
+gst_rtp_L16_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer);
+
+#define gst_rtp_L16_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpL16Pay, gst_rtp_L16_pay, GST_TYPE_RTP_BASE_AUDIO_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpL16pay, "rtpL16pay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_L16_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_L16_pay_class_init (GstRtpL16PayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_L16_pay_setcaps;
+ gstrtpbasepayload_class->get_caps = gst_rtp_L16_pay_getcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_L16_pay_handle_buffer;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_L16_pay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_L16_pay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP audio payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encode Raw audio into RTP packets (RFC 3551)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpL16pay_debug, "rtpL16pay", 0,
+ "L16 RTP Payloader");
+}
+
+static void
+gst_rtp_L16_pay_init (GstRtpL16Pay * rtpL16pay)
+{
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
+
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpL16pay);
+
+ /* tell rtpbaseaudiopayload that this is a sample based codec */
+ gst_rtp_base_audio_payload_set_sample_based (rtpbaseaudiopayload);
+}
+
+static gboolean
+gst_rtp_L16_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
+{
+ GstRtpL16Pay *rtpL16pay;
+ gboolean res;
+ gchar *params;
+ GstAudioInfo *info;
+ const GstRTPChannelOrder *order;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
+
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (basepayload);
+ rtpL16pay = GST_RTP_L16_PAY (basepayload);
+
+ info = &rtpL16pay->info;
+ gst_audio_info_init (info);
+ if (!gst_audio_info_from_caps (info, caps))
+ goto invalid_caps;
+
+ order = gst_rtp_channels_get_by_pos (info->channels, info->position);
+ rtpL16pay->order = order;
+
+ gst_rtp_base_payload_set_options (basepayload, "audio", TRUE, "L16",
+ info->rate);
+ params = g_strdup_printf ("%d", info->channels);
+
+ if (!order && info->channels > 2) {
+ GST_ELEMENT_WARNING (rtpL16pay, STREAM, DECODE,
+ (NULL), ("Unknown channel order for %d channels", info->channels));
+ }
+
+ if (order && order->name) {
+ res = gst_rtp_base_payload_set_outcaps (basepayload,
+ "encoding-params", G_TYPE_STRING, params, "channels", G_TYPE_INT,
+ info->channels, "channel-order", G_TYPE_STRING, order->name, NULL);
+ } else {
+ res = gst_rtp_base_payload_set_outcaps (basepayload,
+ "encoding-params", G_TYPE_STRING, params, "channels", G_TYPE_INT,
+ info->channels, NULL);
+ }
+
+ g_free (params);
+
+ /* octet-per-sample is 2 * channels for L16 */
+ gst_rtp_base_audio_payload_set_sample_options (rtpbaseaudiopayload,
+ 2 * info->channels);
+
+ return res;
+
+ /* ERRORS */
+invalid_caps:
+ {
+ GST_DEBUG_OBJECT (rtpL16pay, "invalid caps");
+ return FALSE;
+ }
+}
+
+static GstCaps *
+gst_rtp_L16_pay_getcaps (GstRTPBasePayload * rtppayload, GstPad * pad,
+ GstCaps * filter)
+{
+ GstCaps *otherpadcaps;
+ GstCaps *caps;
+
+ caps = gst_pad_get_pad_template_caps (pad);
+
+ otherpadcaps = gst_pad_get_allowed_caps (rtppayload->srcpad);
+ if (otherpadcaps) {
+ if (!gst_caps_is_empty (otherpadcaps)) {
+ GstStructure *structure;
+ gint channels;
+ gint pt;
+ gint rate;
+
+ structure = gst_caps_get_structure (otherpadcaps, 0);
+ caps = gst_caps_make_writable (caps);
+
+ if (gst_structure_get_int (structure, "channels", &channels)) {
+ gst_caps_set_simple (caps, "channels", G_TYPE_INT, channels, NULL);
+ } else if (gst_structure_get_int (structure, "payload", &pt)) {
+ if (pt == GST_RTP_PAYLOAD_L16_STEREO)
+ gst_caps_set_simple (caps, "channels", G_TYPE_INT, 2, NULL);
+ else if (pt == GST_RTP_PAYLOAD_L16_MONO)
+ gst_caps_set_simple (caps, "channels", G_TYPE_INT, 1, NULL);
+ }
+
+ if (gst_structure_get_int (structure, "clock-rate", &rate)) {
+ gst_caps_set_simple (caps, "rate", G_TYPE_INT, rate, NULL);
+ } else if (gst_structure_get_int (structure, "payload", &pt)) {
+ if (pt == GST_RTP_PAYLOAD_L16_STEREO || pt == GST_RTP_PAYLOAD_L16_MONO)
+ gst_caps_set_simple (caps, "rate", G_TYPE_INT, 44100, NULL);
+ }
+
+ }
+ gst_caps_unref (otherpadcaps);
+ }
+
+ if (filter) {
+ GstCaps *tcaps = caps;
+
+ caps = gst_caps_intersect_full (filter, tcaps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (tcaps);
+ }
+
+ return caps;
+}
+
+static GstFlowReturn
+gst_rtp_L16_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRtpL16Pay *rtpL16pay;
+
+ rtpL16pay = GST_RTP_L16_PAY (basepayload);
+ buffer = gst_buffer_make_writable (buffer);
+
+ if (rtpL16pay->order &&
+ !gst_audio_buffer_reorder_channels (buffer, rtpL16pay->info.finfo->format,
+ rtpL16pay->info.channels, rtpL16pay->info.position,
+ rtpL16pay->order->pos)) {
+ return GST_FLOW_ERROR;
+ }
+
+ return GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->handle_buffer (basepayload,
+ buffer);
+}
diff --git a/gst/rtp/gstrtpL16pay.h b/gst/rtp/gstrtpL16pay.h
new file mode 100644
index 0000000000..b3078db05e
--- /dev/null
+++ b/gst/rtp/gstrtpL16pay.h
@@ -0,0 +1,61 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_L16_PAY_H__
+#define __GST_RTP_L16_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
+
+#include "gstrtpchannels.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_L16_PAY \
+ (gst_rtp_L16_pay_get_type())
+#define GST_RTP_L16_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_L16_PAY,GstRtpL16Pay))
+#define GST_RTP_L16_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_L16_PAY,GstRtpL16PayClass))
+#define GST_IS_RTP_L16_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_L16_PAY))
+#define GST_IS_RTP_L16_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_L16_PAY))
+
+typedef struct _GstRtpL16Pay GstRtpL16Pay;
+typedef struct _GstRtpL16PayClass GstRtpL16PayClass;
+
+struct _GstRtpL16Pay
+{
+ GstRTPBaseAudioPayload payload;
+
+ GstAudioInfo info;
+ const GstRTPChannelOrder *order;
+};
+
+struct _GstRtpL16PayClass
+{
+ GstRTPBaseAudioPayloadClass parent_class;
+};
+
+GType gst_rtp_L16_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_L16_PAY_H__ */
diff --git a/gst/rtp/gstrtpL24depay.c b/gst/rtp/gstrtpL24depay.c
new file mode 100644
index 0000000000..e39fe7c2f4
--- /dev/null
+++ b/gst/rtp/gstrtpL24depay.c
@@ -0,0 +1,261 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpL24depay
+ * @title: rtpL24depay
+ * @see_also: rtpL24pay
+ *
+ * Extract raw audio from RTP packets according to RFC 3190, section 4.
+ * For detailed information see: http://www.rfc-editor.org/rfc/rfc3190.txt
+ *
+ * ## Example pipeline
+ * |[
+ * gst-launch-1.0 udpsrc caps='application/x-rtp, media=(string)audio, clock-rate=(int)44100, encoding-name=(string)L24, encoding-params=(string)1, channels=(int)1, payload=(int)96' ! rtpL24depay ! pulsesink
+ * ]| This example pipeline will depayload an RTP raw audio stream. Refer to
+ * the rtpL24pay example to create the RTP stream.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <stdlib.h>
+
+#include <gst/audio/audio.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpL24depay.h"
+#include "gstrtpchannels.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpL24depay_debug);
+#define GST_CAT_DEFAULT (rtpL24depay_debug)
+
+static GstStaticPadTemplate gst_rtp_L24_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) S24BE, "
+ "layout = (string) interleaved, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]")
+ );
+
+static GstStaticPadTemplate gst_rtp_L24_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", " "clock-rate = (int) [ 1, MAX ], "
+ "encoding-name = (string) \"L24\"")
+ );
+
+#define gst_rtp_L24_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpL24Depay, gst_rtp_L24_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpL24depay, "rtpL24depay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_L24_DEPAY, rtp_element_init (plugin));
+
+static gboolean gst_rtp_L24_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_L24_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+
+static void
+gst_rtp_L24_depay_class_init (GstRtpL24DepayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gstrtpbasedepayload_class->set_caps = gst_rtp_L24_depay_setcaps;
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_L24_depay_process;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_L24_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_L24_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP audio depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts raw 24-bit audio from RTP packets",
+ "Zeeshan Ali <zak147@yahoo.com>," "Wim Taymans <wim.taymans@gmail.com>,"
+ "David Holroyd <dave@badgers-in-foil.co.uk>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpL24depay_debug, "rtpL24depay", 0,
+ "Raw Audio RTP Depayloader");
+}
+
+static void
+gst_rtp_L24_depay_init (GstRtpL24Depay * rtpL24depay)
+{
+}
+
+static gint
+gst_rtp_L24_depay_parse_int (GstStructure * structure, const gchar * field,
+ gint def)
+{
+ const gchar *str;
+ gint res;
+
+ if ((str = gst_structure_get_string (structure, field)))
+ return atoi (str);
+
+ if (gst_structure_get_int (structure, field, &res))
+ return res;
+
+ return def;
+}
+
+static gboolean
+gst_rtp_L24_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure;
+ GstRtpL24Depay *rtpL24depay;
+ gint clock_rate, payload;
+ gint channels;
+ GstCaps *srccaps;
+ gboolean res;
+ const gchar *channel_order;
+ const GstRTPChannelOrder *order;
+ GstAudioInfo *info;
+
+ rtpL24depay = GST_RTP_L24_DEPAY (depayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ payload = 96;
+ gst_structure_get_int (structure, "payload", &payload);
+ /* no fixed mapping, we need clock-rate */
+ channels = 0;
+ clock_rate = 0;
+
+ /* caps can overwrite defaults */
+ clock_rate =
+ gst_rtp_L24_depay_parse_int (structure, "clock-rate", clock_rate);
+ if (clock_rate == 0)
+ goto no_clockrate;
+
+ channels =
+ gst_rtp_L24_depay_parse_int (structure, "encoding-params", channels);
+ if (channels == 0) {
+ channels = gst_rtp_L24_depay_parse_int (structure, "channels", channels);
+ if (channels == 0) {
+ /* channels defaults to 1 otherwise */
+ channels = 1;
+ }
+ }
+
+ depayload->clock_rate = clock_rate;
+
+ info = &rtpL24depay->info;
+ gst_audio_info_init (info);
+ info->finfo = gst_audio_format_get_info (GST_AUDIO_FORMAT_S24BE);
+ info->rate = clock_rate;
+ info->channels = channels;
+ info->bpf = (info->finfo->width / 8) * channels;
+
+ /* add channel positions */
+ channel_order = gst_structure_get_string (structure, "channel-order");
+
+ order = gst_rtp_channels_get_by_order (channels, channel_order);
+ rtpL24depay->order = order;
+ if (order) {
+ memcpy (info->position, order->pos,
+ sizeof (GstAudioChannelPosition) * channels);
+ gst_audio_channel_positions_to_valid_order (info->position, info->channels);
+ } else {
+ GST_ELEMENT_WARNING (rtpL24depay, STREAM, DECODE,
+ (NULL), ("Unknown channel order '%s' for %d channels",
+ GST_STR_NULL (channel_order), channels));
+ /* create default NONE layout */
+ gst_rtp_channels_create_default (channels, info->position);
+ info->flags |= GST_AUDIO_FLAG_UNPOSITIONED;
+ }
+
+ srccaps = gst_audio_info_to_caps (info);
+ res = gst_pad_set_caps (depayload->srcpad, srccaps);
+ gst_caps_unref (srccaps);
+
+ return res;
+
+ /* ERRORS */
+no_clockrate:
+ {
+ GST_ERROR_OBJECT (depayload, "no clock-rate specified");
+ return FALSE;
+ }
+}
+
+static GstBuffer *
+gst_rtp_L24_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpL24Depay *rtpL24depay;
+ GstBuffer *outbuf;
+ gint payload_len;
+ gboolean marker;
+
+ rtpL24depay = GST_RTP_L24_DEPAY (depayload);
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+
+ if (payload_len <= 0)
+ goto empty_packet;
+
+ GST_DEBUG_OBJECT (rtpL24depay, "got payload of %d bytes", payload_len);
+
+ outbuf = gst_rtp_buffer_get_payload_buffer (rtp);
+ marker = gst_rtp_buffer_get_marker (rtp);
+
+ if (marker) {
+ /* mark talk spurt with RESYNC */
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC);
+ }
+
+ outbuf = gst_buffer_make_writable (outbuf);
+ if (outbuf) {
+ gst_rtp_drop_non_audio_meta (rtpL24depay, outbuf);
+ }
+ if (rtpL24depay->order &&
+ !gst_audio_buffer_reorder_channels (outbuf,
+ rtpL24depay->info.finfo->format, rtpL24depay->info.channels,
+ rtpL24depay->info.position, rtpL24depay->order->pos)) {
+ goto reorder_failed;
+ }
+
+ return outbuf;
+
+ /* ERRORS */
+empty_packet:
+ {
+ GST_ELEMENT_WARNING (rtpL24depay, STREAM, DECODE,
+ ("Empty Payload."), (NULL));
+ return NULL;
+ }
+reorder_failed:
+ {
+ GST_ELEMENT_ERROR (rtpL24depay, STREAM, DECODE,
+ ("Channel reordering failed."), (NULL));
+ return NULL;
+ }
+}
diff --git a/gst/rtp/gstrtpL24depay.h b/gst/rtp/gstrtpL24depay.h
new file mode 100644
index 0000000000..411adf9107
--- /dev/null
+++ b/gst/rtp/gstrtpL24depay.h
@@ -0,0 +1,65 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_L24_DEPAY_H__
+#define __GST_RTP_L24_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+#include <gst/audio/audio.h>
+
+#include "gstrtpchannels.h"
+
+G_BEGIN_DECLS
+
+/* Standard macros for defining types for this element. */
+#define GST_TYPE_RTP_L24_DEPAY \
+ (gst_rtp_L24_depay_get_type())
+#define GST_RTP_L24_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_L24_DEPAY,GstRtpL24Depay))
+#define GST_RTP_L24_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_L24_DEPAY,GstRtpL24DepayClass))
+#define GST_IS_RTP_L24_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_L24_DEPAY))
+#define GST_IS_RTP_L24_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_L24_DEPAY))
+
+typedef struct _GstRtpL24Depay GstRtpL24Depay;
+typedef struct _GstRtpL24DepayClass GstRtpL24DepayClass;
+
+/* Definition of structure storing data for this element. */
+struct _GstRtpL24Depay
+{
+ GstRTPBaseDepayload depayload;
+
+ GstAudioInfo info;
+ const GstRTPChannelOrder *order;
+};
+
+/* Standard definition defining a class for this element. */
+struct _GstRtpL24DepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_L24_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_L24_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpL24pay.c b/gst/rtp/gstrtpL24pay.c
new file mode 100644
index 0000000000..aa8fc22734
--- /dev/null
+++ b/gst/rtp/gstrtpL24pay.c
@@ -0,0 +1,240 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpL24pay
+ * @title: rtpL24pay
+ * @see_also: rtpL24depay
+ *
+ * Payload raw 24-bit audio into RTP packets according to RFC 3190, section 4.
+ * For detailed information see: http://www.rfc-editor.org/rfc/rfc3190.txt
+ *
+ * ## Example pipeline
+ * |[
+ * gst-launch-1.0 -v audiotestsrc ! audioconvert ! rtpL24pay ! udpsink
+ * ]| This example pipeline will payload raw audio. Refer to
+ * the rtpL24depay example to depayload and play the RTP stream.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/audio/audio.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpL24pay.h"
+#include "gstrtpchannels.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpL24pay_debug);
+#define GST_CAT_DEFAULT (rtpL24pay_debug)
+
+static GstStaticPadTemplate gst_rtp_L24_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) S24BE, "
+ "layout = (string) interleaved, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]")
+ );
+
+static GstStaticPadTemplate gst_rtp_L24_pay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) [ 96, 127 ], "
+ "clock-rate = (int) [ 1, MAX ], "
+ "encoding-name = (string) \"L24\", " "channels = (int) [ 1, MAX ];")
+ );
+
+static gboolean gst_rtp_L24_pay_setcaps (GstRTPBasePayload * basepayload,
+ GstCaps * caps);
+static GstCaps *gst_rtp_L24_pay_getcaps (GstRTPBasePayload * rtppayload,
+ GstPad * pad, GstCaps * filter);
+static GstFlowReturn
+gst_rtp_L24_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer);
+
+#define gst_rtp_L24_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpL24Pay, gst_rtp_L24_pay, GST_TYPE_RTP_BASE_AUDIO_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpL24pay, "rtpL24pay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_L24_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_L24_pay_class_init (GstRtpL24PayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_L24_pay_setcaps;
+ gstrtpbasepayload_class->get_caps = gst_rtp_L24_pay_getcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_L24_pay_handle_buffer;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_L24_pay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_L24_pay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP audio payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encode Raw 24-bit audio into RTP packets (RFC 3190)",
+ "Wim Taymans <wim.taymans@gmail.com>,"
+ "David Holroyd <dave@badgers-in-foil.co.uk>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpL24pay_debug, "rtpL24pay", 0,
+ "L24 RTP Payloader");
+}
+
+static void
+gst_rtp_L24_pay_init (GstRtpL24Pay * rtpL24pay)
+{
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
+
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpL24pay);
+
+ /* tell rtpbaseaudiopayload that this is a sample based codec */
+ gst_rtp_base_audio_payload_set_sample_based (rtpbaseaudiopayload);
+}
+
+static gboolean
+gst_rtp_L24_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
+{
+ GstRtpL24Pay *rtpL24pay;
+ gboolean res;
+ gchar *params;
+ GstAudioInfo *info;
+ const GstRTPChannelOrder *order;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
+
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (basepayload);
+ rtpL24pay = GST_RTP_L24_PAY (basepayload);
+
+ info = &rtpL24pay->info;
+ gst_audio_info_init (info);
+ if (!gst_audio_info_from_caps (info, caps))
+ goto invalid_caps;
+
+ order = gst_rtp_channels_get_by_pos (info->channels, info->position);
+ rtpL24pay->order = order;
+
+ gst_rtp_base_payload_set_options (basepayload, "audio", TRUE, "L24",
+ info->rate);
+ params = g_strdup_printf ("%d", info->channels);
+
+ if (!order && info->channels > 2) {
+ GST_ELEMENT_WARNING (rtpL24pay, STREAM, DECODE,
+ (NULL), ("Unknown channel order for %d channels", info->channels));
+ }
+
+ if (order && order->name) {
+ res = gst_rtp_base_payload_set_outcaps (basepayload,
+ "encoding-params", G_TYPE_STRING, params, "channels", G_TYPE_INT,
+ info->channels, "channel-order", G_TYPE_STRING, order->name, NULL);
+ } else {
+ res = gst_rtp_base_payload_set_outcaps (basepayload,
+ "encoding-params", G_TYPE_STRING, params, "channels", G_TYPE_INT,
+ info->channels, NULL);
+ }
+
+ g_free (params);
+
+ /* octet-per-sample is 3 * channels for L24 */
+ gst_rtp_base_audio_payload_set_sample_options (rtpbaseaudiopayload,
+ 3 * info->channels);
+
+ return res;
+
+ /* ERRORS */
+invalid_caps:
+ {
+ GST_DEBUG_OBJECT (rtpL24pay, "invalid caps");
+ return FALSE;
+ }
+}
+
+static GstCaps *
+gst_rtp_L24_pay_getcaps (GstRTPBasePayload * rtppayload, GstPad * pad,
+ GstCaps * filter)
+{
+ GstCaps *otherpadcaps;
+ GstCaps *caps;
+
+ caps = gst_pad_get_pad_template_caps (pad);
+
+ otherpadcaps = gst_pad_get_allowed_caps (rtppayload->srcpad);
+ if (otherpadcaps) {
+ if (!gst_caps_is_empty (otherpadcaps)) {
+ GstStructure *structure;
+ gint channels;
+ gint rate;
+
+ structure = gst_caps_get_structure (otherpadcaps, 0);
+ caps = gst_caps_make_writable (caps);
+
+ if (gst_structure_get_int (structure, "channels", &channels)) {
+ gst_caps_set_simple (caps, "channels", G_TYPE_INT, channels, NULL);
+ }
+
+ if (gst_structure_get_int (structure, "clock-rate", &rate)) {
+ gst_caps_set_simple (caps, "rate", G_TYPE_INT, rate, NULL);
+ }
+
+ }
+ gst_caps_unref (otherpadcaps);
+ }
+
+ if (filter) {
+ GstCaps *tcaps = caps;
+
+ caps = gst_caps_intersect_full (filter, tcaps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (tcaps);
+ }
+
+ return caps;
+}
+
+static GstFlowReturn
+gst_rtp_L24_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRtpL24Pay *rtpL24pay;
+
+ rtpL24pay = GST_RTP_L24_PAY (basepayload);
+ buffer = gst_buffer_make_writable (buffer);
+
+ if (rtpL24pay->order &&
+ !gst_audio_buffer_reorder_channels (buffer, rtpL24pay->info.finfo->format,
+ rtpL24pay->info.channels, rtpL24pay->info.position,
+ rtpL24pay->order->pos)) {
+ return GST_FLOW_ERROR;
+ }
+
+ return GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->handle_buffer (basepayload,
+ buffer);
+}
diff --git a/gst/rtp/gstrtpL24pay.h b/gst/rtp/gstrtpL24pay.h
new file mode 100644
index 0000000000..4dd64dbef7
--- /dev/null
+++ b/gst/rtp/gstrtpL24pay.h
@@ -0,0 +1,61 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_L24_PAY_H__
+#define __GST_RTP_L24_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
+
+#include "gstrtpchannels.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_L24_PAY \
+ (gst_rtp_L24_pay_get_type())
+#define GST_RTP_L24_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_L24_PAY,GstRtpL24Pay))
+#define GST_RTP_L24_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_L24_PAY,GstRtpL24PayClass))
+#define GST_IS_RTP_L24_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_L24_PAY))
+#define GST_IS_RTP_L24_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_L24_PAY))
+
+typedef struct _GstRtpL24Pay GstRtpL24Pay;
+typedef struct _GstRtpL24PayClass GstRtpL24PayClass;
+
+struct _GstRtpL24Pay
+{
+ GstRTPBaseAudioPayload payload;
+
+ GstAudioInfo info;
+ const GstRTPChannelOrder *order;
+};
+
+struct _GstRtpL24PayClass
+{
+ GstRTPBaseAudioPayloadClass parent_class;
+};
+
+GType gst_rtp_L24_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_L24_PAY_H__ */
diff --git a/gst/rtp/gstrtpL8depay.c b/gst/rtp/gstrtpL8depay.c
new file mode 100644
index 0000000000..f1c0fe3024
--- /dev/null
+++ b/gst/rtp/gstrtpL8depay.c
@@ -0,0 +1,265 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ * Copyright (C) <2015> GE Intelligent Platforms Embedded Systems, Inc.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpL8depay
+ * @see_also: rtpL8pay
+ *
+ * Extract raw audio from RTP packets according to RFC 3551.
+ * For detailed information see: http://www.rfc-editor.org/rfc/rfc3551.txt
+ *
+ * ## Example pipeline
+ *
+ * |[
+ * gst-launch udpsrc caps='application/x-rtp, media=(string)audio, clock-rate=(int)44100, encoding-name=(string)L8, encoding-params=(string)1, channels=(int)1, payload=(int)96' ! rtpL8depay ! pulsesink
+ * ]| This example pipeline will depayload an RTP raw audio stream. Refer to
+ * the rtpL8pay example to create the RTP stream.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <stdlib.h>
+
+#include <gst/audio/audio.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpL8depay.h"
+#include "gstrtpchannels.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpL8depay_debug);
+#define GST_CAT_DEFAULT (rtpL8depay_debug)
+
+static GstStaticPadTemplate gst_rtp_L8_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) U8, "
+ "layout = (string) interleaved, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]")
+ );
+
+static GstStaticPadTemplate gst_rtp_L8_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) audio, clock-rate = (int) [ 1, MAX ], "
+ /* "channels = (int) [1, MAX]" */
+ /* "emphasis = (string) ANY" */
+ /* "channel-order = (string) ANY" */
+ "encoding-name = (string) L8;")
+ );
+
+#define gst_rtp_L8_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpL8Depay, gst_rtp_L8_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
+
+
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpL8depay, "rtpL8depay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_L8_DEPAY, rtp_element_init (plugin));
+
+static gboolean gst_rtp_L8_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_L8_depay_process (GstRTPBaseDepayload * depayload,
+ GstBuffer * buf);
+
+static void
+gst_rtp_L8_depay_class_init (GstRtpL8DepayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gstrtpbasedepayload_class->set_caps = gst_rtp_L8_depay_setcaps;
+ gstrtpbasedepayload_class->process = gst_rtp_L8_depay_process;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_L8_depay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_L8_depay_sink_template));
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP audio depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts raw audio from RTP packets",
+ "Zeeshan Ali <zak147@yahoo.com>," "Wim Taymans <wim.taymans@gmail.com>, "
+ "GE Intelligent Platforms Embedded Systems, Inc.");
+
+ GST_DEBUG_CATEGORY_INIT (rtpL8depay_debug, "rtpL8depay", 0,
+ "Raw Audio RTP Depayloader");
+}
+
+static void
+gst_rtp_L8_depay_init (GstRtpL8Depay * rtpL8depay)
+{
+}
+
+static gint
+gst_rtp_L8_depay_parse_int (GstStructure * structure, const gchar * field,
+ gint def)
+{
+ const gchar *str;
+ gint res;
+
+ if ((str = gst_structure_get_string (structure, field)))
+ return atoi (str);
+
+ if (gst_structure_get_int (structure, field, &res))
+ return res;
+
+ return def;
+}
+
+static gboolean
+gst_rtp_L8_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure;
+ GstRtpL8Depay *rtpL8depay;
+ gint clock_rate;
+ gint channels;
+ GstCaps *srccaps;
+ gboolean res;
+ const gchar *channel_order;
+ const GstRTPChannelOrder *order;
+ GstAudioInfo *info;
+
+ rtpL8depay = GST_RTP_L8_DEPAY (depayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ /* no fixed mapping, we need clock-rate */
+ channels = 0;
+ clock_rate = 0;
+
+ /* caps can overwrite defaults */
+ clock_rate = gst_rtp_L8_depay_parse_int (structure, "clock-rate", clock_rate);
+ if (clock_rate == 0)
+ goto no_clockrate;
+
+ channels =
+ gst_rtp_L8_depay_parse_int (structure, "encoding-params", channels);
+ if (channels == 0) {
+ channels = gst_rtp_L8_depay_parse_int (structure, "channels", channels);
+ if (channels == 0) {
+ /* channels defaults to 1 otherwise */
+ channels = 1;
+ }
+ }
+
+ depayload->clock_rate = clock_rate;
+
+ info = &rtpL8depay->info;
+ gst_audio_info_init (info);
+ info->finfo = gst_audio_format_get_info (GST_AUDIO_FORMAT_U8);
+ info->rate = clock_rate;
+ info->channels = channels;
+ info->bpf = (info->finfo->width / 8) * channels;
+
+ /* add channel positions */
+ channel_order = gst_structure_get_string (structure, "channel-order");
+
+ order = gst_rtp_channels_get_by_order (channels, channel_order);
+ rtpL8depay->order = order;
+ if (order) {
+ memcpy (info->position, order->pos,
+ sizeof (GstAudioChannelPosition) * channels);
+ gst_audio_channel_positions_to_valid_order (info->position, info->channels);
+ } else {
+ GST_ELEMENT_WARNING (rtpL8depay, STREAM, DECODE,
+ (NULL), ("Unknown channel order '%s' for %d channels",
+ GST_STR_NULL (channel_order), channels));
+ /* create default NONE layout */
+ gst_rtp_channels_create_default (channels, info->position);
+ info->flags |= GST_AUDIO_FLAG_UNPOSITIONED;
+ }
+
+ srccaps = gst_audio_info_to_caps (info);
+ res = gst_pad_set_caps (depayload->srcpad, srccaps);
+ gst_caps_unref (srccaps);
+
+ return res;
+
+ /* ERRORS */
+no_clockrate:
+ {
+ GST_ERROR_OBJECT (depayload, "no clock-rate specified");
+ return FALSE;
+ }
+}
+
+static GstBuffer *
+gst_rtp_L8_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
+{
+ GstRtpL8Depay *rtpL8depay;
+ GstBuffer *outbuf;
+ gint payload_len;
+ gboolean marker;
+ GstRTPBuffer rtp = { NULL };
+
+ rtpL8depay = GST_RTP_L8_DEPAY (depayload);
+
+ gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+
+ if (payload_len <= 0)
+ goto empty_packet;
+
+ GST_DEBUG_OBJECT (rtpL8depay, "got payload of %d bytes", payload_len);
+
+ outbuf = gst_rtp_buffer_get_payload_buffer (&rtp);
+ marker = gst_rtp_buffer_get_marker (&rtp);
+
+ if (marker) {
+ /* mark talk spurt with RESYNC */
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC);
+ }
+
+ outbuf = gst_buffer_make_writable (outbuf);
+ if (rtpL8depay->order &&
+ !gst_audio_buffer_reorder_channels (outbuf,
+ rtpL8depay->info.finfo->format, rtpL8depay->info.channels,
+ rtpL8depay->info.position, rtpL8depay->order->pos)) {
+ goto reorder_failed;
+ }
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ return outbuf;
+
+ /* ERRORS */
+empty_packet:
+ {
+ GST_ELEMENT_WARNING (rtpL8depay, STREAM, DECODE,
+ ("Empty Payload."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
+ return NULL;
+ }
+reorder_failed:
+ {
+ GST_ELEMENT_ERROR (rtpL8depay, STREAM, DECODE,
+ ("Channel reordering failed."), (NULL));
+ gst_rtp_buffer_unmap (&rtp);
+ return NULL;
+ }
+}
diff --git a/gst/rtp/gstrtpL8depay.h b/gst/rtp/gstrtpL8depay.h
new file mode 100644
index 0000000000..589e9fb27f
--- /dev/null
+++ b/gst/rtp/gstrtpL8depay.h
@@ -0,0 +1,63 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ * Copyright (C) <2015> GE Intelligent Platforms Embedded Systems, Inc.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_L8_DEPAY_H__
+#define __GST_RTP_L8_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+#include <gst/audio/audio.h>
+
+#include "gstrtpchannels.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_L8_DEPAY \
+ (gst_rtp_L8_depay_get_type())
+#define GST_RTP_L8_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_L8_DEPAY,GstRtpL8Depay))
+#define GST_RTP_L8_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_L8_DEPAY,GstRtpL8DepayClass))
+#define GST_IS_RTP_L8_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_L8_DEPAY))
+#define GST_IS_RTP_L8_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_L8_DEPAY))
+
+typedef struct _GstRtpL8Depay GstRtpL8Depay;
+typedef struct _GstRtpL8DepayClass GstRtpL8DepayClass;
+
+struct _GstRtpL8Depay
+{
+ GstRTPBaseDepayload depayload;
+
+ GstAudioInfo info;
+ const GstRTPChannelOrder *order;
+};
+
+struct _GstRtpL8DepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_L8_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_L8_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpL8pay.c b/gst/rtp/gstrtpL8pay.c
new file mode 100644
index 0000000000..b7a39e89f6
--- /dev/null
+++ b/gst/rtp/gstrtpL8pay.c
@@ -0,0 +1,241 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ * Copyright (C) <2015> GE Intelligent Platforms Embedded Systems, Inc.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpL8pay
+ * @see_also: rtpL8depay
+ *
+ * Payload raw audio into RTP packets according to RFC 3551.
+ * For detailed information see: http://www.rfc-editor.org/rfc/rfc3551.txt
+ *
+ * ## Example pipeline
+ *
+ * |[
+ * gst-launch -v audiotestsrc ! audioconvert ! rtpL8pay ! udpsink
+ * ]| This example pipeline will payload raw audio. Refer to
+ * the rtpL8depay example to depayload and play the RTP stream.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/audio/audio.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpL8pay.h"
+#include "gstrtpchannels.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpL8pay_debug);
+#define GST_CAT_DEFAULT (rtpL8pay_debug)
+
+static GstStaticPadTemplate gst_rtp_L8_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-raw, "
+ "format = (string) U8, "
+ "layout = (string) interleaved, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]")
+ );
+
+static GstStaticPadTemplate gst_rtp_L8_pay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) audio, "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) [ 1, MAX ], "
+ "encoding-name = (string) L8, " "channels = (int) [ 1, MAX ];")
+ );
+
+static gboolean gst_rtp_L8_pay_setcaps (GstRTPBasePayload * basepayload,
+ GstCaps * caps);
+static GstCaps *gst_rtp_L8_pay_getcaps (GstRTPBasePayload * rtppayload,
+ GstPad * pad, GstCaps * filter);
+static GstFlowReturn
+gst_rtp_L8_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer);
+
+#define gst_rtp_L8_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpL8Pay, gst_rtp_L8_pay, GST_TYPE_RTP_BASE_AUDIO_PAYLOAD);
+
+
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpL8pay, "rtpL8pay", GST_RANK_SECONDARY,
+ GST_TYPE_RTP_L8_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_L8_pay_class_init (GstRtpL8PayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_L8_pay_setcaps;
+ gstrtpbasepayload_class->get_caps = gst_rtp_L8_pay_getcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_L8_pay_handle_buffer;
+
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_L8_pay_src_template));
+ gst_element_class_add_pad_template (gstelement_class,
+ gst_static_pad_template_get (&gst_rtp_L8_pay_sink_template));
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP audio payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encode Raw audio into RTP packets (RFC 3551)",
+ "Wim Taymans <wim.taymans@gmail.com>, "
+ "GE Intelligent Platforms Embedded Systems, Inc.");
+
+ GST_DEBUG_CATEGORY_INIT (rtpL8pay_debug, "rtpL8pay", 0, "L8 RTP Payloader");
+}
+
+static void
+gst_rtp_L8_pay_init (GstRtpL8Pay * rtpL8pay)
+{
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
+
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpL8pay);
+
+ /* tell rtpbaseaudiopayload that this is a sample based codec */
+ gst_rtp_base_audio_payload_set_sample_based (rtpbaseaudiopayload);
+}
+
+static gboolean
+gst_rtp_L8_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
+{
+ GstRtpL8Pay *rtpL8pay;
+ gboolean res;
+ gchar *params;
+ GstAudioInfo *info;
+ const GstRTPChannelOrder *order;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
+
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (basepayload);
+ rtpL8pay = GST_RTP_L8_PAY (basepayload);
+
+ info = &rtpL8pay->info;
+ gst_audio_info_init (info);
+ if (!gst_audio_info_from_caps (info, caps))
+ goto invalid_caps;
+
+ order = gst_rtp_channels_get_by_pos (info->channels, info->position);
+ rtpL8pay->order = order;
+
+ gst_rtp_base_payload_set_options (basepayload, "audio", TRUE, "L8",
+ info->rate);
+ params = g_strdup_printf ("%d", info->channels);
+
+ if (!order && info->channels > 2) {
+ GST_ELEMENT_WARNING (rtpL8pay, STREAM, DECODE,
+ (NULL), ("Unknown channel order for %d channels", info->channels));
+ }
+
+ if (order && order->name) {
+ res = gst_rtp_base_payload_set_outcaps (basepayload,
+ "encoding-params", G_TYPE_STRING, params, "channels", G_TYPE_INT,
+ info->channels, "channel-order", G_TYPE_STRING, order->name, NULL);
+ } else {
+ res = gst_rtp_base_payload_set_outcaps (basepayload,
+ "encoding-params", G_TYPE_STRING, params, "channels", G_TYPE_INT,
+ info->channels, NULL);
+ }
+
+ g_free (params);
+
+ /* octet-per-sample is # channels for L8 */
+ gst_rtp_base_audio_payload_set_sample_options (rtpbaseaudiopayload,
+ info->channels);
+
+ return res;
+
+ /* ERRORS */
+invalid_caps:
+ {
+ GST_DEBUG_OBJECT (rtpL8pay, "invalid caps");
+ return FALSE;
+ }
+}
+
+static GstCaps *
+gst_rtp_L8_pay_getcaps (GstRTPBasePayload * rtppayload, GstPad * pad,
+ GstCaps * filter)
+{
+ GstCaps *otherpadcaps;
+ GstCaps *caps;
+
+ caps = gst_pad_get_pad_template_caps (pad);
+
+ otherpadcaps = gst_pad_get_allowed_caps (rtppayload->srcpad);
+ if (otherpadcaps) {
+ if (!gst_caps_is_empty (otherpadcaps)) {
+ GstStructure *structure;
+ gint channels;
+ gint rate;
+
+ structure = gst_caps_get_structure (otherpadcaps, 0);
+ caps = gst_caps_make_writable (caps);
+
+ if (gst_structure_get_int (structure, "channels", &channels)) {
+ gst_caps_set_simple (caps, "channels", G_TYPE_INT, channels, NULL);
+ }
+
+ if (gst_structure_get_int (structure, "clock-rate", &rate)) {
+ gst_caps_set_simple (caps, "rate", G_TYPE_INT, rate, NULL);
+ }
+
+ }
+ gst_caps_unref (otherpadcaps);
+ }
+
+ if (filter) {
+ GstCaps *tcaps = caps;
+
+ caps = gst_caps_intersect_full (filter, tcaps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (tcaps);
+ }
+
+ return caps;
+}
+
+static GstFlowReturn
+gst_rtp_L8_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRtpL8Pay *rtpL8pay;
+
+ rtpL8pay = GST_RTP_L8_PAY (basepayload);
+ buffer = gst_buffer_make_writable (buffer);
+
+ if (rtpL8pay->order &&
+ !gst_audio_buffer_reorder_channels (buffer, rtpL8pay->info.finfo->format,
+ rtpL8pay->info.channels, rtpL8pay->info.position,
+ rtpL8pay->order->pos)) {
+ return GST_FLOW_ERROR;
+ }
+
+ return GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->handle_buffer (basepayload,
+ buffer);
+}
diff --git a/gst/rtp/gstrtpL8pay.h b/gst/rtp/gstrtpL8pay.h
new file mode 100644
index 0000000000..bebd724f71
--- /dev/null
+++ b/gst/rtp/gstrtpL8pay.h
@@ -0,0 +1,62 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ * Copyright (C) <2015> GE Intelligent Platforms Embedded Systems, Inc.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_L8_PAY_H__
+#define __GST_RTP_L8_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
+
+#include "gstrtpchannels.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_L8_PAY \
+ (gst_rtp_L8_pay_get_type())
+#define GST_RTP_L8_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_L8_PAY,GstRtpL8Pay))
+#define GST_RTP_L8_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_L8_PAY,GstRtpL8PayClass))
+#define GST_IS_RTP_L8_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_L8_PAY))
+#define GST_IS_RTP_L8_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_L8_PAY))
+
+typedef struct _GstRtpL8Pay GstRtpL8Pay;
+typedef struct _GstRtpL8PayClass GstRtpL8PayClass;
+
+struct _GstRtpL8Pay
+{
+ GstRTPBaseAudioPayload payload;
+
+ GstAudioInfo info;
+ const GstRTPChannelOrder *order;
+};
+
+struct _GstRtpL8PayClass
+{
+ GstRTPBaseAudioPayloadClass parent_class;
+};
+
+GType gst_rtp_L8_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_L8_PAY_H__ */
diff --git a/gst/rtp/gstrtpac3depay.c b/gst/rtp/gstrtpac3depay.c
new file mode 100644
index 0000000000..68c23e2768
--- /dev/null
+++ b/gst/rtp/gstrtpac3depay.c
@@ -0,0 +1,176 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpac3depay
+ * @title: rtpac3depay
+ * @see_also: rtpac3pay
+ *
+ * Extract AC3 audio from RTP packets according to RFC 4184.
+ * For detailed information see: http://www.rfc-editor.org/rfc/rfc4184.txt
+ *
+ * ## Example pipeline
+ * |[
+ * gst-launch-1.0 udpsrc caps='application/x-rtp, media=(string)audio, clock-rate=(int)44100, encoding-name=(string)AC3, payload=(int)96' ! rtpac3depay ! a52dec ! pulsesink
+ * ]| This example pipeline will depayload and decode an RTP AC3 stream. Refer to
+ * the rtpac3pay example to create the RTP stream.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+
+#include <string.h>
+#include "gstrtpelements.h"
+#include "gstrtpac3depay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpac3depay_debug);
+#define GST_CAT_DEFAULT (rtpac3depay_debug)
+
+static GstStaticPadTemplate gst_rtp_ac3_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/ac3")
+ );
+
+static GstStaticPadTemplate gst_rtp_ac3_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "clock-rate = (int) { 32000, 44100, 48000 }, "
+ "encoding-name = (string) \"AC3\"")
+ );
+
+G_DEFINE_TYPE (GstRtpAC3Depay, gst_rtp_ac3_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpac3depay, "rtpac3depay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_AC3_DEPAY, rtp_element_init (plugin));
+
+static gboolean gst_rtp_ac3_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_ac3_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+
+static void
+gst_rtp_ac3_depay_class_init (GstRtpAC3DepayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_ac3_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_ac3_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP AC3 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts AC3 audio from RTP packets (RFC 4184)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasedepayload_class->set_caps = gst_rtp_ac3_depay_setcaps;
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_ac3_depay_process;
+
+ GST_DEBUG_CATEGORY_INIT (rtpac3depay_debug, "rtpac3depay", 0,
+ "AC3 Audio RTP Depayloader");
+}
+
+static void
+gst_rtp_ac3_depay_init (GstRtpAC3Depay * rtpac3depay)
+{
+ /* needed because of G_DEFINE_TYPE */
+}
+
+static gboolean
+gst_rtp_ac3_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure;
+ gint clock_rate;
+ GstCaps *srccaps;
+ gboolean res;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 90000; /* default */
+ depayload->clock_rate = clock_rate;
+
+ srccaps = gst_caps_new_empty_simple ("audio/ac3");
+ res = gst_pad_set_caps (depayload->srcpad, srccaps);
+ gst_caps_unref (srccaps);
+
+ return res;
+}
+
+static GstBuffer *
+gst_rtp_ac3_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpAC3Depay *rtpac3depay;
+ GstBuffer *outbuf;
+ guint8 *payload;
+ guint16 FT, NF;
+
+ rtpac3depay = GST_RTP_AC3_DEPAY (depayload);
+
+ if (gst_rtp_buffer_get_payload_len (rtp) < 2)
+ goto empty_packet;
+
+ payload = gst_rtp_buffer_get_payload (rtp);
+
+ /* strip off header
+ *
+ * 0 1
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | MBZ | FT| NF |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ FT = payload[0] & 0x3;
+ NF = payload[1];
+
+ GST_DEBUG_OBJECT (rtpac3depay, "FT: %d, NF: %d", FT, NF);
+
+ /* We don't bother with fragmented packets yet */
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (rtp, 2, -1);
+
+ if (outbuf) {
+ gst_rtp_drop_non_audio_meta (rtpac3depay, outbuf);
+ GST_DEBUG_OBJECT (rtpac3depay, "pushing buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (outbuf));
+ }
+
+ return outbuf;
+
+ /* ERRORS */
+empty_packet:
+ {
+ GST_ELEMENT_WARNING (rtpac3depay, STREAM, DECODE,
+ ("Empty Payload."), (NULL));
+ return NULL;
+ }
+}
diff --git a/gst/rtp/gstrtpac3depay.h b/gst/rtp/gstrtpac3depay.h
new file mode 100644
index 0000000000..62478f9c82
--- /dev/null
+++ b/gst/rtp/gstrtpac3depay.h
@@ -0,0 +1,56 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_AC3_DEPAY_H__
+#define __GST_RTP_AC3_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_AC3_DEPAY \
+ (gst_rtp_ac3_depay_get_type())
+#define GST_RTP_AC3_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_AC3_DEPAY,GstRtpAC3Depay))
+#define GST_RTP_AC3_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_AC3_DEPAY,GstRtpAC3DepayClass))
+#define GST_IS_RTP_AC3_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_AC3_DEPAY))
+#define GST_IS_RTP_AC3_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_AC3_DEPAY))
+
+typedef struct _GstRtpAC3Depay GstRtpAC3Depay;
+typedef struct _GstRtpAC3DepayClass GstRtpAC3DepayClass;
+
+struct _GstRtpAC3Depay
+{
+ GstRTPBaseDepayload depayload;
+};
+
+struct _GstRtpAC3DepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_ac3_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_AC3_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpac3pay.c b/gst/rtp/gstrtpac3pay.c
new file mode 100644
index 0000000000..efce5b54b2
--- /dev/null
+++ b/gst/rtp/gstrtpac3pay.c
@@ -0,0 +1,475 @@
+/* GStreamer
+ * Copyright (C) <2010> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpac3pay
+ * @title: rtpac3pay
+ * @see_also: rtpac3depay
+ *
+ * Payload AC3 audio into RTP packets according to RFC 4184.
+ * For detailed information see: http://www.rfc-editor.org/rfc/rfc4184.txt
+ *
+ * ## Example pipeline
+ * |[
+ * gst-launch-1.0 -v audiotestsrc ! avenc_ac3 ! rtpac3pay ! udpsink
+ * ]| This example pipeline will encode and payload AC3 stream. Refer to
+ * the rtpac3depay example to depayload and decode the RTP stream.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpac3pay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpac3pay_debug);
+#define GST_CAT_DEFAULT (rtpac3pay_debug)
+
+static GstStaticPadTemplate gst_rtp_ac3_pay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/ac3; " "audio/x-ac3; ")
+ );
+
+static GstStaticPadTemplate gst_rtp_ac3_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) { 32000, 44100, 48000 }, "
+ "encoding-name = (string) \"AC3\"")
+ );
+
+static void gst_rtp_ac3_pay_finalize (GObject * object);
+
+static GstStateChangeReturn gst_rtp_ac3_pay_change_state (GstElement * element,
+ GstStateChange transition);
+
+static gboolean gst_rtp_ac3_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static gboolean gst_rtp_ac3_pay_sink_event (GstRTPBasePayload * payload,
+ GstEvent * event);
+static GstFlowReturn gst_rtp_ac3_pay_flush (GstRtpAC3Pay * rtpac3pay);
+static GstFlowReturn gst_rtp_ac3_pay_handle_buffer (GstRTPBasePayload * payload,
+ GstBuffer * buffer);
+
+#define gst_rtp_ac3_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpAC3Pay, gst_rtp_ac3_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpac3pay, "rtpac3pay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_AC3_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_ac3_pay_class_init (GstRtpAC3PayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpac3pay_debug, "rtpac3pay", 0,
+ "AC3 Audio RTP Depayloader");
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_ac3_pay_finalize;
+
+ gstelement_class->change_state = gst_rtp_ac3_pay_change_state;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_ac3_pay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_ac3_pay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP AC3 audio payloader", "Codec/Payloader/Network/RTP",
+ "Payload AC3 audio as RTP packets (RFC 4184)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_ac3_pay_setcaps;
+ gstrtpbasepayload_class->sink_event = gst_rtp_ac3_pay_sink_event;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_ac3_pay_handle_buffer;
+}
+
+static void
+gst_rtp_ac3_pay_init (GstRtpAC3Pay * rtpac3pay)
+{
+ rtpac3pay->adapter = gst_adapter_new ();
+}
+
+static void
+gst_rtp_ac3_pay_finalize (GObject * object)
+{
+ GstRtpAC3Pay *rtpac3pay;
+
+ rtpac3pay = GST_RTP_AC3_PAY (object);
+
+ g_object_unref (rtpac3pay->adapter);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_rtp_ac3_pay_reset (GstRtpAC3Pay * pay)
+{
+ pay->first_ts = -1;
+ pay->duration = 0;
+ gst_adapter_clear (pay->adapter);
+ GST_DEBUG_OBJECT (pay, "reset depayloader");
+}
+
+static gboolean
+gst_rtp_ac3_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ gboolean res;
+ gint rate;
+ GstStructure *structure;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (structure, "rate", &rate))
+ rate = 90000; /* default */
+
+ gst_rtp_base_payload_set_options (payload, "audio", TRUE, "AC3", rate);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
+
+ return res;
+}
+
+static gboolean
+gst_rtp_ac3_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
+{
+ gboolean res;
+ GstRtpAC3Pay *rtpac3pay;
+
+ rtpac3pay = GST_RTP_AC3_PAY (payload);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ /* make sure we push the last packets in the adapter on EOS */
+ gst_rtp_ac3_pay_flush (rtpac3pay);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ gst_rtp_ac3_pay_reset (rtpac3pay);
+ break;
+ default:
+ break;
+ }
+
+ res = GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload, event);
+
+ return res;
+}
+
+struct frmsize_s
+{
+ guint16 bit_rate;
+ guint16 frm_size[3];
+};
+
+static const struct frmsize_s frmsizecod_tbl[] = {
+ {32, {64, 69, 96}},
+ {32, {64, 70, 96}},
+ {40, {80, 87, 120}},
+ {40, {80, 88, 120}},
+ {48, {96, 104, 144}},
+ {48, {96, 105, 144}},
+ {56, {112, 121, 168}},
+ {56, {112, 122, 168}},
+ {64, {128, 139, 192}},
+ {64, {128, 140, 192}},
+ {80, {160, 174, 240}},
+ {80, {160, 175, 240}},
+ {96, {192, 208, 288}},
+ {96, {192, 209, 288}},
+ {112, {224, 243, 336}},
+ {112, {224, 244, 336}},
+ {128, {256, 278, 384}},
+ {128, {256, 279, 384}},
+ {160, {320, 348, 480}},
+ {160, {320, 349, 480}},
+ {192, {384, 417, 576}},
+ {192, {384, 418, 576}},
+ {224, {448, 487, 672}},
+ {224, {448, 488, 672}},
+ {256, {512, 557, 768}},
+ {256, {512, 558, 768}},
+ {320, {640, 696, 960}},
+ {320, {640, 697, 960}},
+ {384, {768, 835, 1152}},
+ {384, {768, 836, 1152}},
+ {448, {896, 975, 1344}},
+ {448, {896, 976, 1344}},
+ {512, {1024, 1114, 1536}},
+ {512, {1024, 1115, 1536}},
+ {576, {1152, 1253, 1728}},
+ {576, {1152, 1254, 1728}},
+ {640, {1280, 1393, 1920}},
+ {640, {1280, 1394, 1920}}
+};
+
+static GstFlowReturn
+gst_rtp_ac3_pay_flush (GstRtpAC3Pay * rtpac3pay)
+{
+ guint avail, FT, NF, mtu;
+ GstBuffer *outbuf;
+ GstFlowReturn ret;
+
+ /* the data available in the adapter is either smaller
+ * than the MTU or bigger. In the case it is smaller, the complete
+ * adapter contents can be put in one packet. In the case the
+ * adapter has more than one MTU, we need to split the AC3 data
+ * over multiple packets. */
+ avail = gst_adapter_available (rtpac3pay->adapter);
+
+ ret = GST_FLOW_OK;
+
+ FT = 0;
+ /* number of frames */
+ NF = rtpac3pay->NF;
+
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (rtpac3pay);
+
+ GST_LOG_OBJECT (rtpac3pay, "flushing %u bytes", avail);
+
+ while (avail > 0) {
+ guint towrite;
+ guint8 *payload;
+ guint payload_len;
+ guint packet_len;
+ GstRTPBuffer rtp = { NULL, };
+ GstBuffer *payload_buffer;
+
+ /* this will be the total length of the packet */
+ packet_len = gst_rtp_buffer_calc_packet_len (2 + avail, 0, 0);
+
+ /* fill one MTU or all available bytes */
+ towrite = MIN (packet_len, mtu);
+
+ /* this is the payload length */
+ payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);
+
+ /* create buffer to hold the payload */
+ outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD
+ (rtpac3pay), 2, 0, 0);
+
+ if (FT == 0) {
+ /* check if it all fits */
+ if (towrite < packet_len) {
+ guint maxlen;
+
+ GST_LOG_OBJECT (rtpac3pay, "we need to fragment");
+ /* check if we will be able to put at least 5/8th of the total
+ * frame in this first frame. */
+ if ((avail * 5) / 8 >= (payload_len - 2))
+ FT = 1;
+ else
+ FT = 2;
+ /* check how many fragments we will need */
+ maxlen = gst_rtp_buffer_calc_payload_len (mtu - 2, 0, 0);
+ NF = (avail + maxlen - 1) / maxlen;
+ }
+ } else if (FT != 3) {
+ /* remaining fragment */
+ FT = 3;
+ }
+
+ /*
+ * 0 1
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | MBZ | FT| NF |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ *
+ * FT: 0: one or more complete frames
+ * 1: initial 5/8 fragment
+ * 2: initial fragment not 5/8
+ * 3: other fragment
+ * NF: amount of frames if FT = 0, else number of fragments.
+ */
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ GST_LOG_OBJECT (rtpac3pay, "FT %u, NF %u", FT, NF);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ payload[0] = (FT & 3);
+ payload[1] = NF;
+ payload_len -= 2;
+
+ if (avail == payload_len)
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
+ gst_rtp_buffer_unmap (&rtp);
+
+ payload_buffer =
+ gst_adapter_take_buffer_fast (rtpac3pay->adapter, payload_len);
+
+ gst_rtp_copy_audio_meta (rtpac3pay, outbuf, payload_buffer);
+
+ outbuf = gst_buffer_append (outbuf, payload_buffer);
+
+ avail -= payload_len;
+
+ GST_BUFFER_PTS (outbuf) = rtpac3pay->first_ts;
+ GST_BUFFER_DURATION (outbuf) = rtpac3pay->duration;
+
+ ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpac3pay), outbuf);
+ }
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_rtp_ac3_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRtpAC3Pay *rtpac3pay;
+ GstFlowReturn ret;
+ gsize avail, left, NF;
+ GstMapInfo map;
+ guint8 *p;
+ guint packet_len;
+ GstClockTime duration, timestamp;
+
+ rtpac3pay = GST_RTP_AC3_PAY (basepayload);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ duration = GST_BUFFER_DURATION (buffer);
+ timestamp = GST_BUFFER_PTS (buffer);
+
+ if (GST_BUFFER_IS_DISCONT (buffer)) {
+ GST_DEBUG_OBJECT (rtpac3pay, "DISCONT");
+ gst_rtp_ac3_pay_reset (rtpac3pay);
+ }
+
+ /* count the amount of incoming packets */
+ NF = 0;
+ left = map.size;
+ p = map.data;
+ while (TRUE) {
+ guint bsid, fscod, frmsizecod, frame_size;
+
+ if (left < 6)
+ break;
+
+ if (p[0] != 0x0b || p[1] != 0x77)
+ break;
+
+ bsid = p[5] >> 3;
+ if (bsid > 8)
+ break;
+
+ frmsizecod = p[4] & 0x3f;
+ fscod = p[4] >> 6;
+
+ GST_DEBUG_OBJECT (rtpac3pay, "fscod %u, %u", fscod, frmsizecod);
+
+ if (fscod >= 3 || frmsizecod >= 38)
+ break;
+
+ frame_size = frmsizecod_tbl[frmsizecod].frm_size[fscod] * 2;
+ if (frame_size > left)
+ break;
+
+ NF++;
+ GST_DEBUG_OBJECT (rtpac3pay, "found frame %" G_GSIZE_FORMAT " of size %u",
+ NF, frame_size);
+
+ p += frame_size;
+ left -= frame_size;
+ }
+ gst_buffer_unmap (buffer, &map);
+ if (NF == 0)
+ goto no_frames;
+
+ avail = gst_adapter_available (rtpac3pay->adapter);
+
+ /* get packet length of previous data and this new data,
+ * payload length includes a 4 byte header */
+ packet_len = gst_rtp_buffer_calc_packet_len (2 + avail + map.size, 0, 0);
+
+ /* if this buffer is going to overflow the packet, flush what we
+ * have. */
+ if (gst_rtp_base_payload_is_filled (basepayload,
+ packet_len, rtpac3pay->duration + duration)) {
+ ret = gst_rtp_ac3_pay_flush (rtpac3pay);
+ avail = 0;
+ } else {
+ ret = GST_FLOW_OK;
+ }
+
+ if (avail == 0) {
+ GST_DEBUG_OBJECT (rtpac3pay,
+ "first packet, save timestamp %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+ rtpac3pay->first_ts = timestamp;
+ rtpac3pay->duration = 0;
+ rtpac3pay->NF = 0;
+ }
+
+ gst_adapter_push (rtpac3pay->adapter, buffer);
+ rtpac3pay->duration += duration;
+ rtpac3pay->NF += NF;
+
+ return ret;
+
+ /* ERRORS */
+no_frames:
+ {
+ GST_WARNING_OBJECT (rtpac3pay, "no valid AC3 frames found");
+ return GST_FLOW_OK;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_ac3_pay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstRtpAC3Pay *rtpac3pay;
+ GstStateChangeReturn ret;
+
+ rtpac3pay = GST_RTP_AC3_PAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_rtp_ac3_pay_reset (rtpac3pay);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_ac3_pay_reset (rtpac3pay);
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtpac3pay.h b/gst/rtp/gstrtpac3pay.h
new file mode 100644
index 0000000000..918a250e49
--- /dev/null
+++ b/gst/rtp/gstrtpac3pay.h
@@ -0,0 +1,62 @@
+/* GStreamer
+ * Copyright (C) <2010> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_AC3_PAY_H__
+#define __GST_RTP_AC3_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+#include <gst/base/gstadapter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_AC3_PAY \
+ (gst_rtp_ac3_pay_get_type())
+#define GST_RTP_AC3_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_AC3_PAY,GstRtpAC3Pay))
+#define GST_RTP_AC3_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_AC3_PAY,GstRtpAC3PayClass))
+#define GST_IS_RTP_AC3_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_AC3_PAY))
+#define GST_IS_RTP_AC3_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_AC3_PAY))
+
+typedef struct _GstRtpAC3Pay GstRtpAC3Pay;
+typedef struct _GstRtpAC3PayClass GstRtpAC3PayClass;
+
+struct _GstRtpAC3Pay
+{
+ GstRTPBasePayload payload;
+
+ GstAdapter *adapter;
+ GstClockTime first_ts;
+ GstClockTime duration;
+ guint NF;
+};
+
+struct _GstRtpAC3PayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_ac3_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_AC3_PAY_H__ */
diff --git a/gst/rtp/gstrtpamrdepay.c b/gst/rtp/gstrtpamrdepay.c
new file mode 100644
index 0000000000..08e64e5180
--- /dev/null
+++ b/gst/rtp/gstrtpamrdepay.c
@@ -0,0 +1,474 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpamrdepay
+ * @title: rtpamrdepay
+ * @see_also: rtpamrpay
+ *
+ * Extract AMR audio from RTP packets according to RFC 3267.
+ * For detailed information see: http://www.rfc-editor.org/rfc/rfc3267.txt
+ *
+ * ## Example pipeline
+ * |[
+ * gst-launch-1.0 udpsrc caps='application/x-rtp, media=(string)audio, clock-rate=(int)8000, encoding-name=(string)AMR, encoding-params=(string)1, octet-align=(string)1, payload=(int)96' ! rtpamrdepay ! amrnbdec ! pulsesink
+ * ]| This example pipeline will depayload and decode an RTP AMR stream. Refer to
+ * the rtpamrpay example to create the RTP stream.
+ *
+ */
+
+/*
+ * RFC 3267 - Real-Time Transport Protocol (RTP) Payload Format and File
+ * Storage Format for the Adaptive Multi-Rate (AMR) and Adaptive Multi-Rate
+ * Wideband (AMR-WB) Audio Codecs.
+ *
+ */
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+
+#include <stdlib.h>
+#include <string.h>
+#include "gstrtpelements.h"
+#include "gstrtpamrdepay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpamrdepay_debug);
+#define GST_CAT_DEFAULT (rtpamrdepay_debug)
+
+/* RtpAMRDepay signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0
+};
+
+/* input is an RTP packet
+ *
+ * params see RFC 3267, section 8.1
+ */
+static GstStaticPadTemplate gst_rtp_amr_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "clock-rate = (int) 8000, " "encoding-name = (string) \"AMR\", "
+ /* This is the default, so the peer doesn't have to specify it
+ * "encoding-params = (string) \"1\", " */
+ /* NOTE that all values must be strings in orde to be able to do SDP <->
+ * GstCaps mapping. */
+ "octet-align = (string) \"1\";"
+ /* following options are not needed for a decoder
+ *
+ "crc = (string) { \"0\", \"1\" }, "
+ "robust-sorting = (string) \"0\", "
+ "interleaving = (string) \"0\";"
+ "mode-set = (int) [ 0, 7 ], "
+ "mode-change-period = (int) [ 1, MAX ], "
+ "mode-change-neighbor = (boolean) { TRUE, FALSE }, "
+ "maxptime = (int) [ 20, MAX ], "
+ "ptime = (int) [ 20, MAX ]"
+ */
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "clock-rate = (int) 16000, " "encoding-name = (string) \"AMR-WB\", "
+ /* This is the default, so the peer doesn't have to specify it
+ * "encoding-params = (string) \"1\", " */
+ /* NOTE that all values must be strings in orde to be able to do SDP <->
+ * GstCaps mapping. */
+ "octet-align = (string) \"1\";"
+ /* following options are not needed for a decoder
+ *
+ "crc = (string) { \"0\", \"1\" }, "
+ "robust-sorting = (string) \"0\", "
+ "interleaving = (string) \"0\""
+ "mode-set = (int) [ 0, 7 ], "
+ "mode-change-period = (int) [ 1, MAX ], "
+ "mode-change-neighbor = (boolean) { TRUE, FALSE }, "
+ "maxptime = (int) [ 20, MAX ], "
+ "ptime = (int) [ 20, MAX ]"
+ */
+ )
+ );
+
+static GstStaticPadTemplate gst_rtp_amr_depay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/AMR, " "channels = (int) 1," "rate = (int) 8000;"
+ "audio/AMR-WB, " "channels = (int) 1," "rate = (int) 16000")
+ );
+
+static gboolean gst_rtp_amr_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_amr_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+
+#define gst_rtp_amr_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpAMRDepay, gst_rtp_amr_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpamrdepay, "rtpamrdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_AMR_DEPAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_amr_depay_class_init (GstRtpAMRDepayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_amr_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_amr_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP AMR depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts AMR or AMR-WB audio from RTP packets (RFC 3267)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_amr_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_amr_depay_setcaps;
+
+ GST_DEBUG_CATEGORY_INIT (rtpamrdepay_debug, "rtpamrdepay", 0,
+ "AMR/AMR-WB RTP Depayloader");
+}
+
+static void
+gst_rtp_amr_depay_init (GstRtpAMRDepay * rtpamrdepay)
+{
+ GstRTPBaseDepayload *depayload;
+
+ depayload = GST_RTP_BASE_DEPAYLOAD (rtpamrdepay);
+
+ gst_pad_use_fixed_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
+}
+
+static gboolean
+gst_rtp_amr_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure;
+ GstCaps *srccaps;
+ GstRtpAMRDepay *rtpamrdepay;
+ const gchar *params;
+ const gchar *str, *type;
+ gint clock_rate, need_clock_rate;
+ gboolean res;
+
+ rtpamrdepay = GST_RTP_AMR_DEPAY (depayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ /* figure out the mode first and set the clock rates */
+ if ((str = gst_structure_get_string (structure, "encoding-name"))) {
+ if (strcmp (str, "AMR") == 0) {
+ rtpamrdepay->mode = GST_RTP_AMR_DP_MODE_NB;
+ need_clock_rate = 8000;
+ type = "audio/AMR";
+ } else if (strcmp (str, "AMR-WB") == 0) {
+ rtpamrdepay->mode = GST_RTP_AMR_DP_MODE_WB;
+ need_clock_rate = 16000;
+ type = "audio/AMR-WB";
+ } else
+ goto invalid_mode;
+ } else
+ goto invalid_mode;
+
+ if (!(str = gst_structure_get_string (structure, "octet-align")))
+ rtpamrdepay->octet_align = FALSE;
+ else
+ rtpamrdepay->octet_align = (atoi (str) == 1);
+
+ if (!(str = gst_structure_get_string (structure, "crc")))
+ rtpamrdepay->crc = FALSE;
+ else
+ rtpamrdepay->crc = (atoi (str) == 1);
+
+ if (rtpamrdepay->crc) {
+ /* crc mode implies octet aligned mode */
+ rtpamrdepay->octet_align = TRUE;
+ }
+
+ if (!(str = gst_structure_get_string (structure, "robust-sorting")))
+ rtpamrdepay->robust_sorting = FALSE;
+ else
+ rtpamrdepay->robust_sorting = (atoi (str) == 1);
+
+ if (rtpamrdepay->robust_sorting) {
+ /* robust_sorting mode implies octet aligned mode */
+ rtpamrdepay->octet_align = TRUE;
+ }
+
+ if (!(str = gst_structure_get_string (structure, "interleaving")))
+ rtpamrdepay->interleaving = FALSE;
+ else
+ rtpamrdepay->interleaving = (atoi (str) == 1);
+
+ if (rtpamrdepay->interleaving) {
+ /* interleaving mode implies octet aligned mode */
+ rtpamrdepay->octet_align = TRUE;
+ }
+
+ if (!(params = gst_structure_get_string (structure, "encoding-params")))
+ rtpamrdepay->channels = 1;
+ else {
+ rtpamrdepay->channels = atoi (params);
+ }
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = need_clock_rate;
+ depayload->clock_rate = clock_rate;
+
+ /* we require 1 channel, 8000 Hz, octet aligned, no CRC,
+ * no robust sorting, no interleaving for now */
+ if (rtpamrdepay->channels != 1)
+ return FALSE;
+ if (clock_rate != need_clock_rate)
+ return FALSE;
+ if (rtpamrdepay->octet_align != TRUE)
+ return FALSE;
+ if (rtpamrdepay->robust_sorting != FALSE)
+ return FALSE;
+ if (rtpamrdepay->interleaving != FALSE)
+ return FALSE;
+
+ srccaps = gst_caps_new_simple (type,
+ "channels", G_TYPE_INT, rtpamrdepay->channels,
+ "rate", G_TYPE_INT, clock_rate, NULL);
+ res = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ gst_caps_unref (srccaps);
+
+ return res;
+
+ /* ERRORS */
+invalid_mode:
+ {
+ GST_ERROR_OBJECT (rtpamrdepay, "invalid encoding-name");
+ return FALSE;
+ }
+}
+
+/* -1 is invalid */
+static const gint nb_frame_size[16] = {
+ 12, 13, 15, 17, 19, 20, 26, 31,
+ 5, -1, -1, -1, -1, -1, -1, 0
+};
+
+static const gint wb_frame_size[16] = {
+ 17, 23, 32, 36, 40, 46, 50, 58,
+ 60, 5, -1, -1, -1, -1, -1, 0
+};
+
+static GstBuffer *
+gst_rtp_amr_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpAMRDepay *rtpamrdepay;
+ const gint *frame_size;
+ GstBuffer *outbuf = NULL;
+ gint payload_len;
+ GstMapInfo map;
+
+ rtpamrdepay = GST_RTP_AMR_DEPAY (depayload);
+
+ /* setup frame size pointer */
+ if (rtpamrdepay->mode == GST_RTP_AMR_DP_MODE_NB)
+ frame_size = nb_frame_size;
+ else
+ frame_size = wb_frame_size;
+
+ /* when we get here, 1 channel, 8000/16000 Hz, octet aligned, no CRC,
+ * no robust sorting, no interleaving data is to be depayloaded */
+ {
+ guint8 *payload, *p, *dp;
+ gint i, num_packets, num_nonempty_packets;
+ gint amr_len;
+ gint ILL, ILP;
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+
+ /* need at least 2 bytes for the header */
+ if (payload_len < 2)
+ goto too_small;
+
+ payload = gst_rtp_buffer_get_payload (rtp);
+
+ /* depay CMR. The CMR is used by the sender to request
+ * a new encoding mode.
+ *
+ * 0 1 2 3 4 5 6 7
+ * +-+-+-+-+-+-+-+-+
+ * | CMR |R|R|R|R|
+ * +-+-+-+-+-+-+-+-+
+ */
+ /* CMR = (payload[0] & 0xf0) >> 4; */
+
+ /* strip CMR header now, pack FT and the data for the decoder */
+ payload_len -= 1;
+ payload += 1;
+
+ GST_DEBUG_OBJECT (rtpamrdepay, "payload len %d", payload_len);
+
+ if (rtpamrdepay->interleaving) {
+ ILL = (payload[0] & 0xf0) >> 4;
+ ILP = (payload[0] & 0x0f);
+
+ payload_len -= 1;
+ payload += 1;
+
+ if (ILP > ILL)
+ goto wrong_interleaving;
+ }
+
+ /*
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6
+ * +-+-+-+-+-+-+-+-+..
+ * |F| FT |Q|P|P| more FT..
+ * +-+-+-+-+-+-+-+-+..
+ */
+ /* count number of packets by counting the FTs. Also
+ * count number of amr data bytes and number of non-empty
+ * packets (this is also the number of CRCs if present). */
+ amr_len = 0;
+ num_nonempty_packets = 0;
+ num_packets = 0;
+ for (i = 0; i < payload_len; i++) {
+ gint fr_size;
+ guint8 FT;
+
+ FT = (payload[i] & 0x78) >> 3;
+
+ fr_size = frame_size[FT];
+ GST_DEBUG_OBJECT (rtpamrdepay, "frame size %d", fr_size);
+ if (fr_size == -1)
+ goto wrong_framesize;
+
+ if (fr_size > 0) {
+ amr_len += fr_size;
+ num_nonempty_packets++;
+ }
+ num_packets++;
+
+ if ((payload[i] & 0x80) == 0)
+ break;
+ }
+
+ if (rtpamrdepay->crc) {
+ /* data len + CRC len + header bytes should be smaller than payload_len */
+ if (num_packets + num_nonempty_packets + amr_len > payload_len)
+ goto wrong_length_1;
+ } else {
+ /* data len + header bytes should be smaller than payload_len */
+ if (num_packets + amr_len > payload_len)
+ goto wrong_length_2;
+ }
+
+ outbuf = gst_buffer_new_and_alloc (payload_len);
+
+ /* point to destination */
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+
+ /* point to first data packet */
+ p = map.data;
+ dp = payload + num_packets;
+ if (rtpamrdepay->crc) {
+ /* skip CRC if present */
+ dp += num_nonempty_packets;
+ }
+
+ for (i = 0; i < num_packets; i++) {
+ gint fr_size;
+
+ /* copy FT, clear F bit */
+ *p++ = payload[i] & 0x7f;
+
+ fr_size = frame_size[(payload[i] & 0x78) >> 3];
+ if (fr_size > 0) {
+ /* copy data packet, FIXME, calc CRC here. */
+ memcpy (p, dp, fr_size);
+
+ p += fr_size;
+ dp += fr_size;
+ }
+ }
+ gst_buffer_unmap (outbuf, &map);
+
+ /* we can set the duration because each packet is 20 milliseconds */
+ GST_BUFFER_DURATION (outbuf) = num_packets * 20 * GST_MSECOND;
+
+ if (gst_rtp_buffer_get_marker (rtp)) {
+ /* marker bit marks a buffer after a talkspurt. */
+ GST_DEBUG_OBJECT (depayload, "marker bit was set");
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC);
+ }
+
+ GST_DEBUG_OBJECT (depayload, "pushing buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (outbuf));
+
+ gst_rtp_copy_audio_meta (rtpamrdepay, outbuf, rtp->buffer);
+ }
+
+ return outbuf;
+
+ /* ERRORS */
+too_small:
+ {
+ GST_ELEMENT_WARNING (rtpamrdepay, STREAM, DECODE,
+ (NULL), ("AMR RTP payload too small (%d)", payload_len));
+ goto bad_packet;
+ }
+wrong_interleaving:
+ {
+ GST_ELEMENT_WARNING (rtpamrdepay, STREAM, DECODE,
+ (NULL), ("AMR RTP wrong interleaving"));
+ goto bad_packet;
+ }
+wrong_framesize:
+ {
+ GST_ELEMENT_WARNING (rtpamrdepay, STREAM, DECODE,
+ (NULL), ("AMR RTP frame size == -1"));
+ goto bad_packet;
+ }
+wrong_length_1:
+ {
+ GST_ELEMENT_WARNING (rtpamrdepay, STREAM, DECODE,
+ (NULL), ("AMR RTP wrong length 1"));
+ goto bad_packet;
+ }
+wrong_length_2:
+ {
+ GST_ELEMENT_WARNING (rtpamrdepay, STREAM, DECODE,
+ (NULL), ("AMR RTP wrong length 2"));
+ goto bad_packet;
+ }
+bad_packet:
+ {
+ /* no fatal error */
+ return NULL;
+ }
+}
diff --git a/gst/rtp/gstrtpamrdepay.h b/gst/rtp/gstrtpamrdepay.h
new file mode 100644
index 0000000000..b1ebe4f8f0
--- /dev/null
+++ b/gst/rtp/gstrtpamrdepay.h
@@ -0,0 +1,75 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_AMR_DEPAY_H__
+#define __GST_RTP_AMR_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_AMR_DEPAY \
+ (gst_rtp_amr_depay_get_type())
+#define GST_RTP_AMR_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_AMR_DEPAY,GstRtpAMRDepay))
+#define GST_RTP_AMR_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_AMR_DEPAY,GstRtpAMRDepayClass))
+#define GST_IS_RTP_AMR_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_AMR_DEPAY))
+#define GST_IS_RTP_AMR_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_AMR_DEPAY))
+
+typedef struct _GstRtpAMRDepay GstRtpAMRDepay;
+typedef struct _GstRtpAMRDepayClass GstRtpAMRDepayClass;
+
+typedef enum {
+ GST_RTP_AMR_DP_MODE_INVALID = 0,
+ GST_RTP_AMR_DP_MODE_NB = 1,
+ GST_RTP_AMR_DP_MODE_WB = 2
+} GstRtpAMRDepayMode;
+
+struct _GstRtpAMRDepay
+{
+ GstRTPBaseDepayload depayload;
+
+ GstRtpAMRDepayMode mode;
+
+ gboolean octet_align;
+ guint8 mode_set;
+ gint mode_change_period;
+ gboolean mode_change_neighbor;
+ gint maxptime;
+ gboolean crc;
+ gboolean robust_sorting;
+ gboolean interleaving;
+ gint ptime;
+ gint channels;
+};
+
+struct _GstRtpAMRDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_amr_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_AMR_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpamrpay.c b/gst/rtp/gstrtpamrpay.c
new file mode 100644
index 0000000000..bf92a5483b
--- /dev/null
+++ b/gst/rtp/gstrtpamrpay.c
@@ -0,0 +1,461 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpamrpay
+ * @title: rtpamrpay
+ * @see_also: rtpamrdepay
+ *
+ * Payload AMR audio into RTP packets according to RFC 3267.
+ * For detailed information see: http://www.rfc-editor.org/rfc/rfc3267.txt
+ *
+ * ## Example pipeline
+ * |[
+ * gst-launch-1.0 -v audiotestsrc ! amrnbenc ! rtpamrpay ! udpsink
+ * ]| This example pipeline will encode and payload an AMR stream. Refer to
+ * the rtpamrdepay example to depayload and decode the RTP stream.
+ *
+ */
+
+/* references:
+ *
+ * RFC 3267 - Real-Time Transport Protocol (RTP) Payload Format and File
+ * Storage Format for the Adaptive Multi-Rate (AMR) and Adaptive
+ * Multi-Rate Wideband (AMR-WB) Audio Codecs.
+ *
+ * ETSI TS 126 201 V6.0.0 (2004-12) - Digital cellular telecommunications system (Phase 2+);
+ * Universal Mobile Telecommunications System (UMTS);
+ * AMR speech codec, wideband;
+ * Frame structure
+ * (3GPP TS 26.201 version 6.0.0 Release 6)
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpamrpay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpamrpay_debug);
+#define GST_CAT_DEFAULT (rtpamrpay_debug)
+
+static GstStaticPadTemplate gst_rtp_amr_pay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/AMR, channels=(int)1, rate=(int)8000; "
+ "audio/AMR-WB, channels=(int)1, rate=(int)16000")
+ );
+
+static GstStaticPadTemplate gst_rtp_amr_pay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 8000, "
+ "encoding-name = (string) \"AMR\", "
+ "encoding-params = (string) \"1\", "
+ "octet-align = (string) \"1\", "
+ "crc = (string) \"0\", "
+ "robust-sorting = (string) \"0\", "
+ "interleaving = (string) \"0\", "
+ "mode-set = (int) [ 0, 7 ], "
+ "mode-change-period = (int) [ 1, MAX ], "
+ "mode-change-neighbor = (string) { \"0\", \"1\" }, "
+ "maxptime = (int) [ 20, MAX ], " "ptime = (int) [ 20, MAX ];"
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 16000, "
+ "encoding-name = (string) \"AMR-WB\", "
+ "encoding-params = (string) \"1\", "
+ "octet-align = (string) \"1\", "
+ "crc = (string) \"0\", "
+ "robust-sorting = (string) \"0\", "
+ "interleaving = (string) \"0\", "
+ "mode-set = (int) [ 0, 7 ], "
+ "mode-change-period = (int) [ 1, MAX ], "
+ "mode-change-neighbor = (string) { \"0\", \"1\" }, "
+ "maxptime = (int) [ 20, MAX ], " "ptime = (int) [ 20, MAX ]")
+ );
+
+static gboolean gst_rtp_amr_pay_setcaps (GstRTPBasePayload * basepayload,
+ GstCaps * caps);
+static GstFlowReturn gst_rtp_amr_pay_handle_buffer (GstRTPBasePayload * pad,
+ GstBuffer * buffer);
+
+static GstStateChangeReturn
+gst_rtp_amr_pay_change_state (GstElement * element, GstStateChange transition);
+
+#define gst_rtp_amr_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpAMRPay, gst_rtp_amr_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpamrpay, "rtpamrpay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_AMR_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_amr_pay_class_init (GstRtpAMRPayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gstelement_class->change_state = gst_rtp_amr_pay_change_state;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_amr_pay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_amr_pay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "RTP AMR payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encode AMR or AMR-WB audio into RTP packets (RFC 3267)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_amr_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_amr_pay_handle_buffer;
+
+ GST_DEBUG_CATEGORY_INIT (rtpamrpay_debug, "rtpamrpay", 0,
+ "AMR/AMR-WB RTP Payloader");
+}
+
+static void
+gst_rtp_amr_pay_init (GstRtpAMRPay * rtpamrpay)
+{
+}
+
+static void
+gst_rtp_amr_pay_reset (GstRtpAMRPay * pay)
+{
+ pay->next_rtp_time = 0;
+ pay->first_ts = GST_CLOCK_TIME_NONE;
+ pay->first_rtp_time = 0;
+}
+
+static gboolean
+gst_rtp_amr_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
+{
+ GstRtpAMRPay *rtpamrpay;
+ gboolean res;
+ const GstStructure *s;
+ const gchar *str;
+
+ rtpamrpay = GST_RTP_AMR_PAY (basepayload);
+
+ /* figure out the mode Narrow or Wideband */
+ s = gst_caps_get_structure (caps, 0);
+ if ((str = gst_structure_get_name (s))) {
+ if (strcmp (str, "audio/AMR") == 0)
+ rtpamrpay->mode = GST_RTP_AMR_P_MODE_NB;
+ else if (strcmp (str, "audio/AMR-WB") == 0)
+ rtpamrpay->mode = GST_RTP_AMR_P_MODE_WB;
+ else
+ goto wrong_type;
+ } else
+ goto wrong_type;
+
+ if (rtpamrpay->mode == GST_RTP_AMR_P_MODE_NB)
+ gst_rtp_base_payload_set_options (basepayload, "audio", TRUE, "AMR", 8000);
+ else
+ gst_rtp_base_payload_set_options (basepayload, "audio", TRUE, "AMR-WB",
+ 16000);
+
+ res = gst_rtp_base_payload_set_outcaps (basepayload,
+ "encoding-params", G_TYPE_STRING, "1", "octet-align", G_TYPE_STRING, "1",
+ /* don't set the defaults
+ *
+ * "crc", G_TYPE_STRING, "0",
+ * "robust-sorting", G_TYPE_STRING, "0",
+ * "interleaving", G_TYPE_STRING, "0",
+ */
+ NULL);
+
+ return res;
+
+ /* ERRORS */
+wrong_type:
+ {
+ GST_ERROR_OBJECT (rtpamrpay, "unsupported media type '%s'",
+ GST_STR_NULL (str));
+ return FALSE;
+ }
+}
+
+static void
+gst_rtp_amr_pay_recalc_rtp_time (GstRtpAMRPay * rtpamrpay,
+ GstClockTime timestamp)
+{
+ /* re-sync rtp time */
+ if (GST_CLOCK_TIME_IS_VALID (rtpamrpay->first_ts) &&
+ GST_CLOCK_TIME_IS_VALID (timestamp) && timestamp >= rtpamrpay->first_ts) {
+ GstClockTime diff;
+ guint32 rtpdiff;
+
+ /* interpolate to reproduce gap from start, rather than intermediate
+ * intervals to avoid roundup accumulation errors */
+ diff = timestamp - rtpamrpay->first_ts;
+ rtpdiff = ((diff / GST_MSECOND) * 8) <<
+ (rtpamrpay->mode == GST_RTP_AMR_P_MODE_WB);
+ rtpamrpay->next_rtp_time = rtpamrpay->first_rtp_time + rtpdiff;
+ GST_DEBUG_OBJECT (rtpamrpay,
+ "elapsed time %" GST_TIME_FORMAT ", rtp %" G_GUINT32_FORMAT ", "
+ "new offset %" G_GUINT32_FORMAT, GST_TIME_ARGS (diff), rtpdiff,
+ rtpamrpay->next_rtp_time);
+ }
+}
+
+/* -1 is invalid */
+static const gint nb_frame_size[16] = {
+ 12, 13, 15, 17, 19, 20, 26, 31,
+ 5, -1, -1, -1, -1, -1, -1, 0
+};
+
+static const gint wb_frame_size[16] = {
+ 17, 23, 32, 36, 40, 46, 50, 58,
+ 60, 5, -1, -1, -1, -1, -1, 0
+};
+
+static GstFlowReturn
+gst_rtp_amr_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRtpAMRPay *rtpamrpay;
+ const gint *frame_size;
+ GstFlowReturn ret;
+ guint payload_len;
+ GstMapInfo map;
+ GstBuffer *outbuf;
+ guint8 *payload, *ptr, *payload_amr;
+ GstClockTime timestamp, duration;
+ guint packet_len, mtu;
+ gint i, num_packets, num_nonempty_packets;
+ gint amr_len;
+ gboolean sid = FALSE;
+ GstRTPBuffer rtp = { NULL };
+
+ rtpamrpay = GST_RTP_AMR_PAY (basepayload);
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (rtpamrpay);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ timestamp = GST_BUFFER_PTS (buffer);
+ duration = GST_BUFFER_DURATION (buffer);
+
+ /* setup frame size pointer */
+ if (rtpamrpay->mode == GST_RTP_AMR_P_MODE_NB)
+ frame_size = nb_frame_size;
+ else
+ frame_size = wb_frame_size;
+
+ GST_DEBUG_OBJECT (basepayload, "got %" G_GSIZE_FORMAT " bytes", map.size);
+
+ /* FIXME, only
+ * octet aligned, no interleaving, single channel, no CRC,
+ * no robust-sorting. To fix this you need to implement the downstream
+ * negotiation function. */
+
+ /* first count number of packets and total amr frame size */
+ amr_len = num_packets = num_nonempty_packets = 0;
+ for (i = 0; i < map.size; i++) {
+ guint8 FT;
+ gint fr_size;
+
+ FT = (map.data[i] & 0x78) >> 3;
+
+ fr_size = frame_size[FT];
+ GST_DEBUG_OBJECT (basepayload, "frame type %d, frame size %d", FT, fr_size);
+ /* FIXME, we don't handle this yet.. */
+ if (fr_size <= 0)
+ goto wrong_size;
+
+ if (fr_size == 5)
+ sid = TRUE;
+
+ amr_len += fr_size;
+ num_nonempty_packets++;
+ num_packets++;
+ i += fr_size;
+ }
+ if (amr_len > map.size)
+ goto incomplete_frame;
+
+ /* we need one extra byte for the CMR, the ToC is in the input
+ * data */
+ payload_len = map.size + 1;
+
+ /* get packet len to check against MTU */
+ packet_len = gst_rtp_buffer_calc_packet_len (payload_len, 0, 0);
+ if (packet_len > mtu)
+ goto too_big;
+
+ /* now alloc output buffer */
+ outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (basepayload, payload_len, 0,
+ 0);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
+ /* copy timestamp */
+ GST_BUFFER_PTS (outbuf) = timestamp;
+
+ if (duration != GST_CLOCK_TIME_NONE)
+ GST_BUFFER_DURATION (outbuf) = duration;
+ else {
+ GST_BUFFER_DURATION (outbuf) = num_packets * 20 * GST_MSECOND;
+ }
+
+ if (GST_BUFFER_IS_DISCONT (buffer)) {
+ GST_DEBUG_OBJECT (basepayload, "discont, setting marker bit");
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
+ gst_rtp_amr_pay_recalc_rtp_time (rtpamrpay, timestamp);
+ }
+
+ if (G_UNLIKELY (sid)) {
+ gst_rtp_amr_pay_recalc_rtp_time (rtpamrpay, timestamp);
+ }
+
+ /* perfect rtptime */
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (rtpamrpay->first_ts))) {
+ rtpamrpay->first_ts = timestamp;
+ rtpamrpay->first_rtp_time = rtpamrpay->next_rtp_time;
+ }
+ GST_BUFFER_OFFSET (outbuf) = rtpamrpay->next_rtp_time;
+ rtpamrpay->next_rtp_time +=
+ (num_packets * 160) << (rtpamrpay->mode == GST_RTP_AMR_P_MODE_WB);
+
+ /* get payload, this is now writable */
+ payload = gst_rtp_buffer_get_payload (&rtp);
+
+ /* 0 1 2 3 4 5 6 7
+ * +-+-+-+-+-+-+-+-+
+ * | CMR |R|R|R|R|
+ * +-+-+-+-+-+-+-+-+
+ */
+ payload[0] = 0xF0; /* CMR, no specific mode requested */
+
+ /* this is where we copy the AMR data, after num_packets FTs and the
+ * CMR. */
+ payload_amr = payload + num_packets + 1;
+
+ /* copy data in payload, first we copy all the FTs then all
+ * the AMR data. The last FT has to have the F flag cleared. */
+ ptr = map.data;
+ for (i = 1; i <= num_packets; i++) {
+ guint8 FT;
+ gint fr_size;
+
+ /* 0 1 2 3 4 5 6 7
+ * +-+-+-+-+-+-+-+-+
+ * |F| FT |Q|P|P| more FT...
+ * +-+-+-+-+-+-+-+-+
+ */
+ FT = (*ptr & 0x78) >> 3;
+
+ fr_size = frame_size[FT];
+
+ if (i == num_packets)
+ /* last packet, clear F flag */
+ payload[i] = *ptr & 0x7f;
+ else
+ /* set F flag */
+ payload[i] = *ptr | 0x80;
+
+ memcpy (payload_amr, &ptr[1], fr_size);
+
+ /* all sizes are > 0 since we checked for that above */
+ ptr += fr_size + 1;
+ payload_amr += fr_size;
+ }
+
+ gst_buffer_unmap (buffer, &map);
+ gst_rtp_buffer_unmap (&rtp);
+
+ gst_rtp_copy_audio_meta (rtpamrpay, outbuf, buffer);
+
+ gst_buffer_unref (buffer);
+
+ ret = gst_rtp_base_payload_push (basepayload, outbuf);
+
+ return ret;
+
+ /* ERRORS */
+wrong_size:
+ {
+ GST_ELEMENT_ERROR (basepayload, STREAM, FORMAT,
+ (NULL), ("received AMR frame with size <= 0"));
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+
+ return GST_FLOW_ERROR;
+ }
+incomplete_frame:
+ {
+ GST_ELEMENT_ERROR (basepayload, STREAM, FORMAT,
+ (NULL), ("received incomplete AMR frames"));
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+
+ return GST_FLOW_ERROR;
+ }
+too_big:
+ {
+ GST_ELEMENT_ERROR (basepayload, STREAM, FORMAT,
+ (NULL), ("received too many AMR frames for MTU"));
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+
+ return GST_FLOW_ERROR;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_amr_pay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+
+ /* handle upwards state changes here */
+ switch (transition) {
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ /* handle downwards state changes */
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_amr_pay_reset (GST_RTP_AMR_PAY (element));
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
diff --git a/gst/rtp/gstrtpamrpay.h b/gst/rtp/gstrtpamrpay.h
new file mode 100644
index 0000000000..b6e21483e2
--- /dev/null
+++ b/gst/rtp/gstrtpamrpay.h
@@ -0,0 +1,68 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_AMR_PAY_H__
+#define __GST_RTP_AMR_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+#include <gst/base/gstadapter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_AMR_PAY \
+ (gst_rtp_amr_pay_get_type())
+#define GST_RTP_AMR_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_AMR_PAY,GstRtpAMRPay))
+#define GST_RTP_AMR_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_AMR_PAY,GstRtpAMRPayClass))
+#define GST_IS_RTP_AMR_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_AMR_PAY))
+#define GST_IS_RTP_AMR_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_AMR_PAY))
+
+typedef struct _GstRtpAMRPay GstRtpAMRPay;
+typedef struct _GstRtpAMRPayClass GstRtpAMRPayClass;
+
+typedef enum {
+ GST_RTP_AMR_P_MODE_INVALID = 0,
+ GST_RTP_AMR_P_MODE_NB = 1,
+ GST_RTP_AMR_P_MODE_WB = 2
+} GstRtpAMRPayMode;
+
+struct _GstRtpAMRPay
+{
+ GstRTPBasePayload payload;
+
+ GstRtpAMRPayMode mode;
+ GstClockTime first_ts;
+ guint32 first_rtp_time;
+ guint32 next_rtp_time;
+};
+
+struct _GstRtpAMRPayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_amr_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_AMR_PAY_H__ */
diff --git a/gst/rtp/gstrtpbvdepay.c b/gst/rtp/gstrtpbvdepay.c
new file mode 100644
index 0000000000..98cff92fd1
--- /dev/null
+++ b/gst/rtp/gstrtpbvdepay.c
@@ -0,0 +1,187 @@
+/* GStreamer
+ * Copyright (C) <2009> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpbvdepay
+ * @title: rtpbvdepay
+ * @see_also: rtpbvpay
+ *
+ * Extract BroadcomVoice audio from RTP packets according to RFC 4298.
+ * For detailed information see: http://www.rfc-editor.org/rfc/rfc4298.txt
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+#include <stdlib.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+#include "gstrtpelements.h"
+#include "gstrtpbvdepay.h"
+#include "gstrtputils.h"
+
+static GstStaticPadTemplate gst_rtp_bv_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "clock-rate = (int) 8000, "
+ "encoding-name = (string) \"BV16\"; "
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "clock-rate = (int) 16000, " "encoding-name = (string) \"BV32\"")
+ );
+
+static GstStaticPadTemplate gst_rtp_bv_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-bv, " "mode = (int) { 16, 32 }")
+ );
+
+static GstBuffer *gst_rtp_bv_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+static gboolean gst_rtp_bv_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+
+#define gst_rtp_bv_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPBVDepay, gst_rtp_bv_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpbvdepay, "rtpbvdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_BV_DEPAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_bv_depay_class_init (GstRTPBVDepayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_bv_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_bv_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP BroadcomVoice depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts BroadcomVoice audio from RTP packets (RFC 4298)",
+ "Wim Taymans <wim.taymans@collabora.co.uk>");
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_bv_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_bv_depay_setcaps;
+}
+
+static void
+gst_rtp_bv_depay_init (GstRTPBVDepay * rtpbvdepay)
+{
+ rtpbvdepay->mode = -1;
+}
+
+static gboolean
+gst_rtp_bv_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstRTPBVDepay *rtpbvdepay = GST_RTP_BV_DEPAY (depayload);
+ GstCaps *srccaps;
+ GstStructure *structure;
+ const gchar *mode_str = NULL;
+ gint mode, clock_rate, expected_rate;
+ gboolean ret;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ mode_str = gst_structure_get_string (structure, "encoding-name");
+ if (!mode_str)
+ goto no_mode;
+
+ if (!strcmp (mode_str, "BV16")) {
+ mode = 16;
+ expected_rate = 8000;
+ } else if (!strcmp (mode_str, "BV32")) {
+ mode = 32;
+ expected_rate = 16000;
+ } else
+ goto invalid_mode;
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = expected_rate;
+ else if (clock_rate != expected_rate)
+ goto wrong_rate;
+
+ depayload->clock_rate = clock_rate;
+ rtpbvdepay->mode = mode;
+
+ srccaps = gst_caps_new_simple ("audio/x-bv",
+ "mode", G_TYPE_INT, rtpbvdepay->mode, NULL);
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
+
+ GST_DEBUG ("set caps on source: %" GST_PTR_FORMAT " (ret=%d)", srccaps, ret);
+ gst_caps_unref (srccaps);
+
+ return ret;
+
+ /* ERRORS */
+no_mode:
+ {
+ GST_ERROR_OBJECT (rtpbvdepay, "did not receive an encoding-name");
+ return FALSE;
+ }
+invalid_mode:
+ {
+ GST_ERROR_OBJECT (rtpbvdepay,
+ "invalid encoding-name, expected BV16 or BV32, got %s", mode_str);
+ return FALSE;
+ }
+wrong_rate:
+ {
+ GST_ERROR_OBJECT (rtpbvdepay, "invalid clock-rate, expected %d, got %d",
+ expected_rate, clock_rate);
+ return FALSE;
+ }
+}
+
+static GstBuffer *
+gst_rtp_bv_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstBuffer *outbuf;
+ gboolean marker;
+
+ marker = gst_rtp_buffer_get_marker (rtp);
+
+ GST_DEBUG ("process : got %" G_GSIZE_FORMAT " bytes, mark %d ts %u seqn %d",
+ gst_buffer_get_size (rtp->buffer), marker,
+ gst_rtp_buffer_get_timestamp (rtp), gst_rtp_buffer_get_seq (rtp));
+
+ outbuf = gst_rtp_buffer_get_payload_buffer (rtp);
+
+ if (marker && outbuf) {
+ /* mark start of talkspurt with RESYNC */
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC);
+ }
+
+ if (outbuf) {
+ gst_rtp_drop_non_audio_meta (depayload, outbuf);
+ }
+
+ return outbuf;
+}
diff --git a/gst/rtp/gstrtpbvdepay.h b/gst/rtp/gstrtpbvdepay.h
new file mode 100644
index 0000000000..9a9ea7c407
--- /dev/null
+++ b/gst/rtp/gstrtpbvdepay.h
@@ -0,0 +1,58 @@
+/* GStreamer
+ * Copyright (C) <2009> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_BV_DEPAY_H__
+#define __GST_RTP_BV_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstRTPBVDepay GstRTPBVDepay;
+typedef struct _GstRTPBVDepayClass GstRTPBVDepayClass;
+
+#define GST_TYPE_RTP_BV_DEPAY \
+ (gst_rtp_bv_depay_get_type())
+#define GST_RTP_BV_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_BV_DEPAY,GstRTPBVDepay))
+#define GST_RTP_BV_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_BV_DEPAY,GstRTPBVDepayClass))
+#define GST_IS_RTP_BV_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_BV_DEPAY))
+#define GST_IS_RTP_BV_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_BV_DEPAY))
+
+struct _GstRTPBVDepay
+{
+ GstRTPBaseDepayload depayload;
+
+ gint mode;
+};
+
+struct _GstRTPBVDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_bv_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_BV_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpbvpay.c b/gst/rtp/gstrtpbvpay.c
new file mode 100644
index 0000000000..a70f3c2379
--- /dev/null
+++ b/gst/rtp/gstrtpbvpay.c
@@ -0,0 +1,236 @@
+/* GStreamer
+ * Copyright (C) <2009> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpbvpay
+ * @title: rtpbvpay
+ * @see_also: rtpbvdepay
+ *
+ * Payload BroadcomVoice audio into RTP packets according to RFC 4298.
+ * For detailed information see: http://www.rfc-editor.org/rfc/rfc4298.txt
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <stdlib.h>
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include "gstrtpelements.h"
+#include "gstrtpbvpay.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpbvpay_debug);
+#define GST_CAT_DEFAULT (rtpbvpay_debug)
+
+static GstStaticPadTemplate gst_rtp_bv_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-bv, " "mode = (int) {16, 32}")
+ );
+
+static GstStaticPadTemplate gst_rtp_bv_pay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 8000, "
+ "encoding-name = (string) \"BV16\";"
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 16000, " "encoding-name = (string) \"BV32\"")
+ );
+
+
+static GstCaps *gst_rtp_bv_pay_sink_getcaps (GstRTPBasePayload * payload,
+ GstPad * pad, GstCaps * filter);
+static gboolean gst_rtp_bv_pay_sink_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+
+#define gst_rtp_bv_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPBVPay, gst_rtp_bv_pay, GST_TYPE_RTP_BASE_AUDIO_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpbvpay, "rtpbvpay", GST_RANK_SECONDARY,
+ GST_TYPE_RTP_BV_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_bv_pay_class_init (GstRTPBVPayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpbvpay_debug, "rtpbvpay", 0,
+ "BroadcomVoice audio RTP payloader");
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_bv_pay_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_bv_pay_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "RTP BV Payloader",
+ "Codec/Payloader/Network/RTP",
+ "Packetize BroadcomVoice audio streams into RTP packets (RFC 4298)",
+ "Wim Taymans <wim.taymans@collabora.co.uk>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_bv_pay_sink_setcaps;
+ gstrtpbasepayload_class->get_caps = gst_rtp_bv_pay_sink_getcaps;
+}
+
+static void
+gst_rtp_bv_pay_init (GstRTPBVPay * rtpbvpay)
+{
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
+
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpbvpay);
+
+ rtpbvpay->mode = -1;
+
+ /* tell rtpbaseaudiopayload that this is a frame based codec */
+ gst_rtp_base_audio_payload_set_frame_based (rtpbaseaudiopayload);
+}
+
+static gboolean
+gst_rtp_bv_pay_sink_setcaps (GstRTPBasePayload * rtpbasepayload, GstCaps * caps)
+{
+ GstRTPBVPay *rtpbvpay;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
+ gint mode;
+ GstStructure *structure;
+ const char *payload_name;
+
+ rtpbvpay = GST_RTP_BV_PAY (rtpbasepayload);
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpbasepayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ payload_name = gst_structure_get_name (structure);
+ if (g_ascii_strcasecmp ("audio/x-bv", payload_name))
+ goto wrong_caps;
+
+ if (!gst_structure_get_int (structure, "mode", &mode))
+ goto no_mode;
+
+ if (mode != 16 && mode != 32)
+ goto wrong_mode;
+
+ if (mode == 16) {
+ gst_rtp_base_payload_set_options (rtpbasepayload, "audio", TRUE, "BV16",
+ 8000);
+ rtpbasepayload->clock_rate = 8000;
+ } else {
+ gst_rtp_base_payload_set_options (rtpbasepayload, "audio", TRUE, "BV32",
+ 16000);
+ rtpbasepayload->clock_rate = 16000;
+ }
+
+ /* set options for this frame based audio codec */
+ gst_rtp_base_audio_payload_set_frame_options (rtpbaseaudiopayload,
+ mode, mode == 16 ? 10 : 20);
+
+ if (mode != rtpbvpay->mode && rtpbvpay->mode != -1)
+ goto mode_changed;
+
+ rtpbvpay->mode = mode;
+
+ return TRUE;
+
+ /* ERRORS */
+wrong_caps:
+ {
+ GST_ERROR_OBJECT (rtpbvpay, "expected audio/x-bv, received %s",
+ payload_name);
+ return FALSE;
+ }
+no_mode:
+ {
+ GST_ERROR_OBJECT (rtpbvpay, "did not receive a mode");
+ return FALSE;
+ }
+wrong_mode:
+ {
+ GST_ERROR_OBJECT (rtpbvpay, "mode must be 16 or 32, received %d", mode);
+ return FALSE;
+ }
+mode_changed:
+ {
+ GST_ERROR_OBJECT (rtpbvpay, "Mode has changed from %d to %d! "
+ "Mode cannot change while streaming", rtpbvpay->mode, mode);
+ return FALSE;
+ }
+}
+
+/* we return the padtemplate caps with the mode field fixated to a value if we
+ * can */
+static GstCaps *
+gst_rtp_bv_pay_sink_getcaps (GstRTPBasePayload * rtppayload, GstPad * pad,
+ GstCaps * filter)
+{
+ GstCaps *otherpadcaps;
+ GstCaps *caps;
+
+ caps = gst_pad_get_pad_template_caps (pad);
+
+ otherpadcaps = gst_pad_get_allowed_caps (rtppayload->srcpad);
+ if (otherpadcaps) {
+ if (!gst_caps_is_empty (otherpadcaps)) {
+ GstStructure *structure;
+ const gchar *mode_str;
+ gint mode;
+
+ structure = gst_caps_get_structure (otherpadcaps, 0);
+
+ /* construct mode, if we can */
+ mode_str = gst_structure_get_string (structure, "encoding-name");
+ if (mode_str) {
+ if (!strcmp (mode_str, "BV16"))
+ mode = 16;
+ else if (!strcmp (mode_str, "BV32"))
+ mode = 32;
+ else
+ mode = -1;
+
+ if (mode == 16 || mode == 32) {
+ caps = gst_caps_make_writable (caps);
+ structure = gst_caps_get_structure (caps, 0);
+ gst_structure_set (structure, "mode", G_TYPE_INT, mode, NULL);
+ }
+ }
+ }
+ gst_caps_unref (otherpadcaps);
+ }
+
+ if (filter) {
+ GstCaps *tmp;
+
+ GST_DEBUG_OBJECT (rtppayload, "Intersect %" GST_PTR_FORMAT " and filter %"
+ GST_PTR_FORMAT, caps, filter);
+ tmp = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ caps = tmp;
+ }
+
+ return caps;
+}
diff --git a/gst/rtp/gstrtpbvpay.h b/gst/rtp/gstrtpbvpay.h
new file mode 100644
index 0000000000..afb3485b98
--- /dev/null
+++ b/gst/rtp/gstrtpbvpay.h
@@ -0,0 +1,58 @@
+/* GStreamer
+ * Copyright (C) <2009> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_BV_PAY_H__
+#define __GST_RTP_BV_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_BV_PAY \
+ (gst_rtp_bv_pay_get_type())
+#define GST_RTP_BV_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_BV_PAY,GstRTPBVPay))
+#define GST_RTP_BV_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_BV_PAY,GstRTPBVPayClass))
+#define GST_IS_RTP_BV_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_BV_PAY))
+#define GST_IS_RTP_BV_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_BV_PAY))
+
+typedef struct _GstRTPBVPay GstRTPBVPay;
+typedef struct _GstRTPBVPayClass GstRTPBVPayClass;
+
+struct _GstRTPBVPay
+{
+ GstRTPBaseAudioPayload audiopayload;
+
+ gint mode;
+};
+
+struct _GstRTPBVPayClass
+{
+ GstRTPBaseAudioPayloadClass parent_class;
+};
+
+GType gst_rtp_bv_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_BV_PAY_H__ */
diff --git a/gst/rtp/gstrtpceltdepay.c b/gst/rtp/gstrtpceltdepay.c
new file mode 100644
index 0000000000..9054af7c66
--- /dev/null
+++ b/gst/rtp/gstrtpceltdepay.c
@@ -0,0 +1,271 @@
+/* GStreamer
+ * Copyright (C) <2009> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+#include <stdlib.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpceltdepay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpceltdepay_debug);
+#define GST_CAT_DEFAULT (rtpceltdepay_debug)
+
+/* RtpCELTDepay signals and args */
+
+#define DEFAULT_FRAMESIZE 480
+#define DEFAULT_CHANNELS 1
+#define DEFAULT_CLOCKRATE 32000
+
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0
+};
+
+static GstStaticPadTemplate gst_rtp_celt_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "clock-rate = (int) [32000, 48000], "
+ "encoding-name = (string) \"CELT\"")
+ );
+
+static GstStaticPadTemplate gst_rtp_celt_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-celt")
+ );
+
+static GstBuffer *gst_rtp_celt_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+static gboolean gst_rtp_celt_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+
+#define gst_rtp_celt_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpCELTDepay, gst_rtp_celt_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpceltdepay, "rtpceltdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_CELT_DEPAY, rtp_element_init (plugin));
+static void
+gst_rtp_celt_depay_class_init (GstRtpCELTDepayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpceltdepay_debug, "rtpceltdepay", 0,
+ "CELT RTP Depayloader");
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_celt_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_celt_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP CELT depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts CELT audio from RTP packets",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_celt_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_celt_depay_setcaps;
+}
+
+static void
+gst_rtp_celt_depay_init (GstRtpCELTDepay * rtpceltdepay)
+{
+}
+
+/* len 4 bytes LE,
+ * vendor string (len bytes),
+ * user_len 4 (0) bytes LE
+ */
+static const gchar gst_rtp_celt_comment[] =
+ "\045\0\0\0Depayloaded with GStreamer celtdepay\0\0\0\0";
+
+static gboolean
+gst_rtp_celt_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure;
+ GstRtpCELTDepay *rtpceltdepay;
+ gint clock_rate, nb_channels = 0, frame_size = 0;
+ GstBuffer *buf;
+ GstMapInfo map;
+ guint8 *ptr;
+ const gchar *params;
+ GstCaps *srccaps;
+ gboolean res;
+
+ rtpceltdepay = GST_RTP_CELT_DEPAY (depayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ goto no_clockrate;
+ depayload->clock_rate = clock_rate;
+
+ if ((params = gst_structure_get_string (structure, "encoding-params")))
+ nb_channels = atoi (params);
+ if (!nb_channels)
+ nb_channels = DEFAULT_CHANNELS;
+
+ if ((params = gst_structure_get_string (structure, "frame-size")))
+ frame_size = atoi (params);
+ if (!frame_size)
+ frame_size = DEFAULT_FRAMESIZE;
+ rtpceltdepay->frame_size = frame_size;
+
+ GST_DEBUG_OBJECT (depayload, "clock-rate=%d channels=%d frame-size=%d",
+ clock_rate, nb_channels, frame_size);
+
+ /* construct minimal header and comment packet for the decoder */
+ buf = gst_buffer_new_and_alloc (60);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ ptr = map.data;
+ memcpy (ptr, "CELT ", 8);
+ ptr += 8;
+ memcpy (ptr, "1.1.12", 7);
+ ptr += 20;
+ GST_WRITE_UINT32_LE (ptr, 0x80000006); /* version */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, 56); /* header_size */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, clock_rate); /* rate */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, nb_channels); /* channels */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, frame_size); /* frame-size */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, -1); /* overlap */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, -1); /* bytes_per_packet */
+ ptr += 4;
+ GST_WRITE_UINT32_LE (ptr, 0); /* extra headers */
+ gst_buffer_unmap (buf, &map);
+
+ srccaps = gst_caps_new_empty_simple ("audio/x-celt");
+ res = gst_pad_set_caps (depayload->srcpad, srccaps);
+ gst_caps_unref (srccaps);
+
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtpceltdepay), buf);
+
+ buf = gst_buffer_new_and_alloc (sizeof (gst_rtp_celt_comment));
+ gst_buffer_fill (buf, 0, gst_rtp_celt_comment, sizeof (gst_rtp_celt_comment));
+
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtpceltdepay), buf);
+
+ return res;
+
+ /* ERRORS */
+no_clockrate:
+ {
+ GST_ERROR_OBJECT (depayload, "no clock-rate specified");
+ return FALSE;
+ }
+}
+
+static GstBuffer *
+gst_rtp_celt_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstBuffer *outbuf = NULL;
+ guint8 *payload;
+ guint offset, pos, payload_len, total_size, size;
+ guint8 s;
+ gint clock_rate = 0, frame_size = 0;
+ GstClockTime framesize_ns = 0, timestamp;
+ guint n = 0;
+ GstRtpCELTDepay *rtpceltdepay;
+
+ rtpceltdepay = GST_RTP_CELT_DEPAY (depayload);
+ clock_rate = depayload->clock_rate;
+ frame_size = rtpceltdepay->frame_size;
+ framesize_ns = gst_util_uint64_scale_int (frame_size, GST_SECOND, clock_rate);
+
+ timestamp = GST_BUFFER_PTS (rtp->buffer);
+
+ GST_LOG_OBJECT (depayload,
+ "got %" G_GSIZE_FORMAT " bytes, mark %d ts %u seqn %d",
+ gst_buffer_get_size (rtp->buffer), gst_rtp_buffer_get_marker (rtp),
+ gst_rtp_buffer_get_timestamp (rtp), gst_rtp_buffer_get_seq (rtp));
+
+ GST_LOG_OBJECT (depayload, "got clock-rate=%d, frame_size=%d, "
+ "_ns=%" GST_TIME_FORMAT ", timestamp=%" GST_TIME_FORMAT, clock_rate,
+ frame_size, GST_TIME_ARGS (framesize_ns), GST_TIME_ARGS (timestamp));
+
+ payload = gst_rtp_buffer_get_payload (rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+
+ /* first count how many bytes are consumed by the size headers and make offset
+ * point to the first data byte */
+ total_size = 0;
+ offset = 0;
+ while (total_size < payload_len) {
+ do {
+ s = payload[offset++];
+ total_size += s + 1;
+ } while (s == 0xff);
+ }
+
+ /* offset is now pointing to the payload */
+ total_size = 0;
+ pos = 0;
+ while (total_size < payload_len) {
+ n++;
+ size = 0;
+ do {
+ s = payload[pos++];
+ size += s;
+ total_size += s + 1;
+ } while (s == 0xff);
+
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (rtp, offset, size);
+ offset += size;
+
+ if (frame_size != -1 && clock_rate != -1) {
+ GST_BUFFER_PTS (outbuf) = timestamp + framesize_ns * n;
+ GST_BUFFER_DURATION (outbuf) = framesize_ns;
+ }
+ GST_LOG_OBJECT (depayload, "push timestamp=%"
+ GST_TIME_FORMAT ", duration=%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (GST_BUFFER_PTS (outbuf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)));
+
+ gst_rtp_drop_non_audio_meta (depayload, outbuf);
+
+ gst_rtp_base_depayload_push (depayload, outbuf);
+ }
+
+ return NULL;
+}
diff --git a/gst/rtp/gstrtpceltdepay.h b/gst/rtp/gstrtpceltdepay.h
new file mode 100644
index 0000000000..eb4bc30999
--- /dev/null
+++ b/gst/rtp/gstrtpceltdepay.h
@@ -0,0 +1,52 @@
+/* GStreamer
+ * Copyright (C) <2009> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more
+ */
+
+#ifndef __GST_RTP_CELT_DEPAY_H__
+#define __GST_RTP_CELT_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstRtpCELTDepay GstRtpCELTDepay;
+typedef struct _GstRtpCELTDepayClass GstRtpCELTDepayClass;
+
+#define GST_TYPE_RTP_CELT_DEPAY \
+ (gst_rtp_celt_depay_get_type())
+#define GST_RTP_CELT_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_CELT_DEPAY,GstRtpCELTDepay))
+#define GST_RTP_CELT_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_CELT_DEPAY,GstRtpCELTDepayClass))
+#define GST_IS_RTP_CELT_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_CELT_DEPAY))
+#define GST_IS_RTP_CELT_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_CELT_DEPAY))
+
+struct _GstRtpCELTDepay
+{
+ GstRTPBaseDepayload depayload;
+ gint frame_size;
+};
+
+struct _GstRtpCELTDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_celt_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_CELT_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpceltpay.c b/gst/rtp/gstrtpceltpay.c
new file mode 100644
index 0000000000..18ef556402
--- /dev/null
+++ b/gst/rtp/gstrtpceltpay.c
@@ -0,0 +1,500 @@
+/* GStreamer
+ * Copyright (C) <2009> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <stdlib.h>
+#include <string.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpceltpay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpceltpay_debug);
+#define GST_CAT_DEFAULT (rtpceltpay_debug)
+
+static GstStaticPadTemplate gst_rtp_celt_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-celt, "
+ "rate = (int) [ 32000, 64000 ], "
+ "channels = (int) [1, 2], " "frame-size = (int) [ 64, 512 ]")
+ );
+
+static GstStaticPadTemplate gst_rtp_celt_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) [ 32000, 48000 ], "
+ "encoding-name = (string) \"CELT\"")
+ );
+
+static void gst_rtp_celt_pay_finalize (GObject * object);
+
+static GstStateChangeReturn gst_rtp_celt_pay_change_state (GstElement *
+ element, GstStateChange transition);
+
+static gboolean gst_rtp_celt_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static GstCaps *gst_rtp_celt_pay_getcaps (GstRTPBasePayload * payload,
+ GstPad * pad, GstCaps * filter);
+static GstFlowReturn gst_rtp_celt_pay_handle_buffer (GstRTPBasePayload *
+ payload, GstBuffer * buffer);
+
+#define gst_rtp_celt_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpCELTPay, gst_rtp_celt_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpceltpay, "rtpceltpay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_CELT_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_celt_pay_class_init (GstRtpCELTPayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpceltpay_debug, "rtpceltpay", 0,
+ "CELT RTP Payloader");
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_celt_pay_finalize;
+
+ gstelement_class->change_state = gst_rtp_celt_pay_change_state;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_celt_pay_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_celt_pay_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "RTP CELT payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encodes CELT audio into a RTP packet",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_celt_pay_setcaps;
+ gstrtpbasepayload_class->get_caps = gst_rtp_celt_pay_getcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_celt_pay_handle_buffer;
+}
+
+static void
+gst_rtp_celt_pay_init (GstRtpCELTPay * rtpceltpay)
+{
+ rtpceltpay->queue = g_queue_new ();
+}
+
+static void
+gst_rtp_celt_pay_finalize (GObject * object)
+{
+ GstRtpCELTPay *rtpceltpay;
+
+ rtpceltpay = GST_RTP_CELT_PAY (object);
+
+ g_queue_free (rtpceltpay->queue);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_rtp_celt_pay_clear_queued (GstRtpCELTPay * rtpceltpay)
+{
+ GstBuffer *buf;
+
+ while ((buf = g_queue_pop_head (rtpceltpay->queue)))
+ gst_buffer_unref (buf);
+
+ rtpceltpay->bytes = 0;
+ rtpceltpay->sbytes = 0;
+ rtpceltpay->qduration = 0;
+}
+
+static void
+gst_rtp_celt_pay_add_queued (GstRtpCELTPay * rtpceltpay, GstBuffer * buffer,
+ guint ssize, guint size, GstClockTime duration)
+{
+ g_queue_push_tail (rtpceltpay->queue, buffer);
+ rtpceltpay->sbytes += ssize;
+ rtpceltpay->bytes += size;
+ /* only add durations when we have a valid previous duration */
+ if (rtpceltpay->qduration != -1) {
+ if (duration != -1)
+ /* only add valid durations */
+ rtpceltpay->qduration += duration;
+ else
+ /* if we add a buffer without valid duration, our total queued duration
+ * becomes unknown */
+ rtpceltpay->qduration = -1;
+ }
+}
+
+static gboolean
+gst_rtp_celt_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ /* don't configure yet, we wait for the ident packet */
+ return TRUE;
+}
+
+
+static GstCaps *
+gst_rtp_celt_pay_getcaps (GstRTPBasePayload * payload, GstPad * pad,
+ GstCaps * filter)
+{
+ GstCaps *otherpadcaps;
+ GstCaps *caps;
+ const gchar *params;
+
+ caps = gst_pad_get_pad_template_caps (pad);
+
+ otherpadcaps = gst_pad_get_allowed_caps (payload->srcpad);
+ if (otherpadcaps) {
+ if (!gst_caps_is_empty (otherpadcaps)) {
+ GstStructure *ps;
+ GstStructure *s;
+ gint clock_rate = 0, frame_size = 0, channels = 1;
+
+ caps = gst_caps_make_writable (caps);
+
+ ps = gst_caps_get_structure (otherpadcaps, 0);
+ s = gst_caps_get_structure (caps, 0);
+
+ if (gst_structure_get_int (ps, "clock-rate", &clock_rate)) {
+ gst_structure_fixate_field_nearest_int (s, "rate", clock_rate);
+ }
+
+ if ((params = gst_structure_get_string (ps, "frame-size")))
+ frame_size = atoi (params);
+ if (frame_size)
+ gst_structure_set (s, "frame-size", G_TYPE_INT, frame_size, NULL);
+
+ if ((params = gst_structure_get_string (ps, "encoding-params"))) {
+ channels = atoi (params);
+ gst_structure_fixate_field_nearest_int (s, "channels", channels);
+ }
+
+ GST_DEBUG_OBJECT (payload, "clock-rate=%d frame-size=%d channels=%d",
+ clock_rate, frame_size, channels);
+ }
+ gst_caps_unref (otherpadcaps);
+ }
+
+ if (filter) {
+ GstCaps *tmp;
+
+ GST_DEBUG_OBJECT (payload, "Intersect %" GST_PTR_FORMAT " and filter %"
+ GST_PTR_FORMAT, caps, filter);
+ tmp = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ caps = tmp;
+ }
+
+ return caps;
+}
+
+static gboolean
+gst_rtp_celt_pay_parse_ident (GstRtpCELTPay * rtpceltpay,
+ const guint8 * data, guint size)
+{
+ guint32 version, header_size, rate, nb_channels, frame_size, overlap;
+ guint32 bytes_per_packet;
+ GstRTPBasePayload *payload;
+ gchar *cstr, *fsstr;
+ gboolean res;
+
+ /* we need the header string (8), the version string (20), the version
+ * and the header length. */
+ if (size < 36)
+ goto too_small;
+
+ if (!g_str_has_prefix ((const gchar *) data, "CELT "))
+ goto wrong_header;
+
+ /* skip header and version string */
+ data += 28;
+
+ version = GST_READ_UINT32_LE (data);
+ GST_DEBUG_OBJECT (rtpceltpay, "version %08x", version);
+#if 0
+ if (version != 1)
+ goto wrong_version;
+#endif
+
+ data += 4;
+ /* ensure sizes */
+ header_size = GST_READ_UINT32_LE (data);
+ if (header_size < 56)
+ goto header_too_small;
+
+ if (size < header_size)
+ goto payload_too_small;
+
+ data += 4;
+ rate = GST_READ_UINT32_LE (data);
+ data += 4;
+ nb_channels = GST_READ_UINT32_LE (data);
+ data += 4;
+ frame_size = GST_READ_UINT32_LE (data);
+ data += 4;
+ overlap = GST_READ_UINT32_LE (data);
+ data += 4;
+ bytes_per_packet = GST_READ_UINT32_LE (data);
+
+ GST_DEBUG_OBJECT (rtpceltpay, "rate %d, nb_channels %d, frame_size %d",
+ rate, nb_channels, frame_size);
+ GST_DEBUG_OBJECT (rtpceltpay, "overlap %d, bytes_per_packet %d",
+ overlap, bytes_per_packet);
+
+ payload = GST_RTP_BASE_PAYLOAD (rtpceltpay);
+
+ gst_rtp_base_payload_set_options (payload, "audio", FALSE, "CELT", rate);
+ cstr = g_strdup_printf ("%d", nb_channels);
+ fsstr = g_strdup_printf ("%d", frame_size);
+ res = gst_rtp_base_payload_set_outcaps (payload, "encoding-params",
+ G_TYPE_STRING, cstr, "frame-size", G_TYPE_STRING, fsstr, NULL);
+ g_free (cstr);
+ g_free (fsstr);
+
+ return res;
+
+ /* ERRORS */
+too_small:
+ {
+ GST_DEBUG_OBJECT (rtpceltpay,
+ "ident packet too small, need at least 32 bytes");
+ return FALSE;
+ }
+wrong_header:
+ {
+ GST_DEBUG_OBJECT (rtpceltpay,
+ "ident packet does not start with \"CELT \"");
+ return FALSE;
+ }
+#if 0
+wrong_version:
+ {
+ GST_DEBUG_OBJECT (rtpceltpay, "can only handle version 1, have version %d",
+ version);
+ return FALSE;
+ }
+#endif
+header_too_small:
+ {
+ GST_DEBUG_OBJECT (rtpceltpay,
+ "header size too small, need at least 80 bytes, " "got only %d",
+ header_size);
+ return FALSE;
+ }
+payload_too_small:
+ {
+ GST_DEBUG_OBJECT (rtpceltpay,
+ "payload too small, need at least %d bytes, got only %d", header_size,
+ size);
+ return FALSE;
+ }
+}
+
+static GstFlowReturn
+gst_rtp_celt_pay_flush_queued (GstRtpCELTPay * rtpceltpay)
+{
+ GstFlowReturn ret;
+ GstBuffer *buf, *outbuf;
+ guint8 *payload, *spayload;
+ guint payload_len;
+ GstClockTime duration;
+ GstRTPBuffer rtp = { NULL, };
+
+ payload_len = rtpceltpay->bytes + rtpceltpay->sbytes;
+ duration = rtpceltpay->qduration;
+
+ GST_DEBUG_OBJECT (rtpceltpay, "flushing out %u, duration %" GST_TIME_FORMAT,
+ payload_len, GST_TIME_ARGS (rtpceltpay->qduration));
+
+ /* get a big enough packet for the sizes + payloads */
+ outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD
+ (rtpceltpay), payload_len, 0, 0);
+
+ GST_BUFFER_DURATION (outbuf) = duration;
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
+ /* point to the payload for size headers and data */
+ spayload = gst_rtp_buffer_get_payload (&rtp);
+ payload = spayload + rtpceltpay->sbytes;
+
+ while ((buf = g_queue_pop_head (rtpceltpay->queue))) {
+ guint size;
+
+ /* copy first timestamp to output */
+ if (GST_BUFFER_PTS (outbuf) == -1)
+ GST_BUFFER_PTS (outbuf) = GST_BUFFER_PTS (buf);
+
+ /* write the size to the header */
+ size = gst_buffer_get_size (buf);
+ while (size > 0xff) {
+ *spayload++ = 0xff;
+ size -= 0xff;
+ }
+ *spayload++ = size;
+
+ /* copy payload */
+ size = gst_buffer_get_size (buf);
+ gst_buffer_extract (buf, 0, payload, size);
+ payload += size;
+
+ gst_rtp_copy_audio_meta (rtpceltpay, outbuf, buf);
+
+ gst_buffer_unref (buf);
+ }
+ gst_rtp_buffer_unmap (&rtp);
+
+ /* we consumed it all */
+ rtpceltpay->bytes = 0;
+ rtpceltpay->sbytes = 0;
+ rtpceltpay->qduration = 0;
+
+ ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpceltpay), outbuf);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_rtp_celt_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstFlowReturn ret;
+ GstRtpCELTPay *rtpceltpay;
+ gsize payload_len;
+ GstMapInfo map;
+ GstClockTime duration, packet_dur;
+ guint i, ssize, packet_len;
+
+ rtpceltpay = GST_RTP_CELT_PAY (basepayload);
+
+ ret = GST_FLOW_OK;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ switch (rtpceltpay->packet) {
+ case 0:
+ /* ident packet. We need to parse the headers to construct the RTP
+ * properties. */
+ if (!gst_rtp_celt_pay_parse_ident (rtpceltpay, map.data, map.size))
+ goto parse_error;
+
+ goto cleanup;
+ case 1:
+ /* comment packet, we ignore it */
+ goto cleanup;
+ default:
+ /* other packets go in the payload */
+ break;
+ }
+ gst_buffer_unmap (buffer, &map);
+
+ duration = GST_BUFFER_DURATION (buffer);
+
+ GST_LOG_OBJECT (rtpceltpay,
+ "got buffer of duration %" GST_TIME_FORMAT ", size %" G_GSIZE_FORMAT,
+ GST_TIME_ARGS (duration), map.size);
+
+ /* calculate the size of the size field and the payload */
+ ssize = 1;
+ for (i = map.size; i > 0xff; i -= 0xff)
+ ssize++;
+
+ GST_DEBUG_OBJECT (rtpceltpay, "bytes for size %u", ssize);
+
+ /* calculate what the new size and duration would be of the packet */
+ payload_len = ssize + map.size + rtpceltpay->bytes + rtpceltpay->sbytes;
+ if (rtpceltpay->qduration != -1 && duration != -1)
+ packet_dur = rtpceltpay->qduration + duration;
+ else
+ packet_dur = 0;
+
+ packet_len = gst_rtp_buffer_calc_packet_len (payload_len, 0, 0);
+
+ if (gst_rtp_base_payload_is_filled (basepayload, packet_len, packet_dur)) {
+ /* size or duration would overflow the packet, flush the queued data */
+ ret = gst_rtp_celt_pay_flush_queued (rtpceltpay);
+ }
+
+ /* queue the packet */
+ gst_rtp_celt_pay_add_queued (rtpceltpay, buffer, ssize, map.size, duration);
+
+done:
+ rtpceltpay->packet++;
+
+ return ret;
+
+ /* ERRORS */
+cleanup:
+ {
+ gst_buffer_unmap (buffer, &map);
+ goto done;
+ }
+parse_error:
+ {
+ GST_ELEMENT_ERROR (rtpceltpay, STREAM, DECODE, (NULL),
+ ("Error parsing first identification packet."));
+ gst_buffer_unmap (buffer, &map);
+ return GST_FLOW_ERROR;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_celt_pay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstRtpCELTPay *rtpceltpay;
+ GstStateChangeReturn ret;
+
+ rtpceltpay = GST_RTP_CELT_PAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ rtpceltpay->packet = 0;
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_celt_pay_clear_queued (rtpceltpay);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtpceltpay.h b/gst/rtp/gstrtpceltpay.h
new file mode 100644
index 0000000000..452c124523
--- /dev/null
+++ b/gst/rtp/gstrtpceltpay.h
@@ -0,0 +1,60 @@
+/* GStreamer
+ * Copyright (C) <2009> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more
+ */
+
+
+#ifndef __GST_RTP_CELT_PAY_H__
+#define __GST_RTP_CELT_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstRtpCELTPay GstRtpCELTPay;
+typedef struct _GstRtpCELTPayClass GstRtpCELTPayClass;
+
+#define GST_TYPE_RTP_CELT_PAY \
+ (gst_rtp_celt_pay_get_type())
+#define GST_RTP_CELT_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_CELT_PAY,GstRtpCELTPay))
+#define GST_RTP_CELT_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_CELT_PAY,GstRtpCELTPayClass))
+#define GST_IS_RTP_CELT_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_CELT_PAY))
+#define GST_IS_RTP_CELT_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_CELT_PAY))
+
+struct _GstRtpCELTPay
+{
+ GstRTPBasePayload payload;
+
+ guint64 packet;
+
+ /* queue to hold packets */
+ GQueue *queue;
+ guint sbytes; /* bytes queued for sizes */
+ guint bytes; /* bytes queued for data */
+ GstClockTime qduration; /* queued duration */
+};
+
+struct _GstRtpCELTPayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_celt_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_CELT_PAY_H__ */
diff --git a/gst/rtp/gstrtpchannels.c b/gst/rtp/gstrtpchannels.c
new file mode 100644
index 0000000000..9921293fd4
--- /dev/null
+++ b/gst/rtp/gstrtpchannels.c
@@ -0,0 +1,310 @@
+/* GStreamer
+ * Copyright (C) <2008> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include <string.h>
+#include <stdlib.h>
+
+#include "gstrtpchannels.h"
+
+/*
+ * RTP channel positions as discussed in RFC 3551 and also RFC 3555
+ *
+ * We can't really represent the described channel positions in GStreamer but we
+ * implement a (very rough) approximation here.
+ */
+
+static const GstAudioChannelPosition pos_4_1[] = {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT
+};
+
+static const GstAudioChannelPosition pos_4_2[] = {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_LFE1
+};
+
+static const GstAudioChannelPosition pos_4_3[] = {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_LFE1
+};
+
+static const GstAudioChannelPosition pos_5_1[] = {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER
+};
+
+static const GstAudioChannelPosition pos_6_1[] = {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_LFE1
+};
+
+static const GstAudioChannelPosition pos_6_2[] = {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT
+};
+
+static const GstAudioChannelPosition pos_8_1[] = {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT
+};
+
+static const GstAudioChannelPosition pos_8_2[] = {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT
+};
+
+static const GstAudioChannelPosition pos_8_3[] = {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT
+};
+
+static const GstAudioChannelPosition pos_def_1[] = {
+ GST_AUDIO_CHANNEL_POSITION_MONO
+};
+
+static const GstAudioChannelPosition pos_def_2[] = {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT
+};
+
+static const GstAudioChannelPosition pos_def_3[] = {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER
+};
+
+static const GstAudioChannelPosition pos_def_4[] = {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_LFE1
+};
+
+static const GstAudioChannelPosition pos_def_5[] = {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT
+};
+
+static const GstAudioChannelPosition pos_def_6[] = {
+ GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_LFE1
+};
+
+const GstRTPChannelOrder gst_rtp_channel_orders[] = {
+ /* 4 channels */
+ {"DV.LRLsRs", 4, pos_4_1},
+ {"DV.LRCS", 4, pos_4_2},
+ {"DV.LRCWo", 4, pos_4_3},
+ /* 5 channels */
+ {"DV.LRLsRsC", 5, pos_5_1},
+ /* 6 channels */
+ {"DV.LRLsRsCS", 6, pos_6_1},
+ {"DV.LmixRmixTWoQ1Q2", 6, pos_6_2},
+ /* 8 channels */
+ {"DV.LRCWoLsRsLmixRmix", 8, pos_8_1},
+ {"DV.LRCWoLs1Rs1Ls2Rs2", 8, pos_8_2},
+ {"DV.LRCWoLsRsLcRc", 8, pos_8_3},
+
+ /* default layouts */
+ {NULL, 1, pos_def_1},
+ {NULL, 2, pos_def_2},
+ {NULL, 3, pos_def_3},
+ {NULL, 4, pos_def_4},
+ {NULL, 5, pos_def_5},
+ {NULL, 6, pos_def_6},
+
+ /* terminator, invalid entry */
+ {NULL, 0, NULL},
+};
+
+static gboolean
+check_channels (const GstRTPChannelOrder * order,
+ const GstAudioChannelPosition * pos)
+{
+ gint i, j;
+ gboolean res = TRUE;
+
+ for (i = 0; i < order->channels; i++) {
+ for (j = 0; j < order->channels; j++) {
+ if (order->pos[j] == pos[i])
+ break;
+ }
+ if (j == order->channels)
+ return FALSE;
+ }
+ return res;
+}
+
+/**
+ * gst_rtp_channels_get_by_pos:
+ * @channels: the amount of channels
+ * @pos: a channel layout
+ *
+ * Return a description of the channel layout.
+ *
+ * Returns: a #GstRTPChannelOrder with the channel information or NULL when @pos
+ * is not a valid layout.
+ */
+const GstRTPChannelOrder *
+gst_rtp_channels_get_by_pos (gint channels, const GstAudioChannelPosition * pos)
+{
+ gint i;
+ const GstRTPChannelOrder *res = NULL;
+
+ g_return_val_if_fail (pos != NULL, NULL);
+
+ for (i = 0; channel_orders[i].pos; i++) {
+ if (channel_orders[i].channels != channels)
+ continue;
+
+ if (check_channels (&channel_orders[i], pos)) {
+ res = &channel_orders[i];
+ break;
+ }
+ }
+ return res;
+}
+
+/**
+ * gst_rtp_channels_create_default:
+ * @channels: the amount of channels
+ * @order: a channel order
+ *
+ * Get the channel order info the @order and @channels.
+ *
+ * Returns: a #GstRTPChannelOrder with the channel information or NULL when
+ * @order is not a know layout for @channels.
+ */
+const GstRTPChannelOrder *
+gst_rtp_channels_get_by_order (gint channels, const gchar * order)
+{
+ gint i;
+ const GstRTPChannelOrder *res = NULL;
+
+ for (i = 0; channel_orders[i].pos; i++) {
+ if (channel_orders[i].channels != channels)
+ continue;
+
+ /* no name but channels match, continue */
+ if (!channel_orders[i].name || !order) {
+ res = &channel_orders[i];
+ break;
+ }
+
+ /* compare names */
+ if (g_ascii_strcasecmp (channel_orders[i].name, order)) {
+ res = &channel_orders[i];
+ break;
+ }
+ }
+ return res;
+}
+
+/**
+ * gst_rtp_channels_get_by_index:
+ * @channels: the amount of channels
+ * @idx: the channel index to retrieve
+ *
+ * Get the allowed channel order descriptions for @channels. @idx can be used to
+ * retrieve the desired index.
+ *
+ * Returns: a #GstRTPChannelOrder at @idx, NULL when there are no valid channel
+ * orders.
+ */
+const GstRTPChannelOrder *
+gst_rtp_channels_get_by_index (gint channels, guint idx)
+{
+ gint i;
+ const GstRTPChannelOrder *res = NULL;
+
+ for (i = 0; channel_orders[i].pos; i++) {
+ if (channel_orders[i].channels != channels)
+ continue;
+
+ if (idx == 0) {
+ res = &channel_orders[i];
+ break;
+ }
+ idx--;
+ }
+ return res;
+}
+
+
+/**
+ * gst_rtp_channels_create_default:
+ * @channels: the amount of channels
+ *
+ * Create a default none channel mapping for @channels.
+ *
+ * Returns: a #GstAudioChannelPosition with all the channel position info set to
+ * #GST_AUDIO_CHANNEL_POSITION_NONE.
+ */
+void
+gst_rtp_channels_create_default (gint channels, GstAudioChannelPosition * posn)
+{
+ gint i;
+
+ g_return_if_fail (channels > 0);
+
+ for (i = 0; i < channels; i++)
+ posn[i] = GST_AUDIO_CHANNEL_POSITION_NONE;
+}
diff --git a/gst/rtp/gstrtpchannels.h b/gst/rtp/gstrtpchannels.h
new file mode 100644
index 0000000000..6d3f530902
--- /dev/null
+++ b/gst/rtp/gstrtpchannels.h
@@ -0,0 +1,46 @@
+/* GStreamer
+ * Copyright (C) <2008> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include <string.h>
+#include <stdlib.h>
+
+#include <gst/audio/audio.h>
+
+#ifndef __GST_RTP_CHANNELS_H__
+#define __GST_RTP_CHANNELS_H__
+
+typedef struct
+{
+ const gchar *name;
+ gint channels;
+ const GstAudioChannelPosition *pos;
+} GstRTPChannelOrder;
+
+#define channel_orders gst_rtp_channel_orders
+G_GNUC_INTERNAL extern const GstRTPChannelOrder gst_rtp_channel_orders[];
+
+const GstRTPChannelOrder * gst_rtp_channels_get_by_pos (gint channels,
+ const GstAudioChannelPosition *pos);
+const GstRTPChannelOrder * gst_rtp_channels_get_by_order (gint channels,
+ const gchar *order);
+const GstRTPChannelOrder * gst_rtp_channels_get_by_index (gint channels, guint idx);
+
+void gst_rtp_channels_create_default (gint channels, GstAudioChannelPosition *pos);
+
+#endif /* __GST_RTP_CHANNELS_H__ */
diff --git a/gst/rtp/gstrtpdvdepay.c b/gst/rtp/gstrtpdvdepay.c
new file mode 100644
index 0000000000..6558dddd19
--- /dev/null
+++ b/gst/rtp/gstrtpdvdepay.c
@@ -0,0 +1,421 @@
+/* Farsight
+ * Copyright (C) 2006 Marcel Moreaux <marcelm@spacelabs.nl>
+ * (C) 2008 Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * RTP DV depayloader.
+ *
+ * Important note for NTSC-users:
+ *
+ * Because the author uses PAL video, and he does not have proper DV
+ * documentation (the DV format specification is not freely available),
+ * this code may very well contain PAL-specific assumptions.
+ */
+
+#include <stdlib.h>
+#include <string.h>
+#include <gst/gst.h>
+
+#include "gstrtpdvdepay.h"
+
+#include "gstrtpelements.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY (rtpdvdepay_debug);
+#define GST_CAT_DEFAULT (rtpdvdepay_debug)
+/* Filter signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+};
+
+static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-dv")
+ );
+
+static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) { \"video\", \"audio\" },"
+ "encoding-name = (string) \"DV\", "
+ "clock-rate = (int) 90000,"
+ "encode = (string) { \"SD-VCR/525-60\", \"SD-VCR/625-50\", \"HD-VCR/1125-60\","
+ "\"HD-VCR/1250-50\", \"SDL-VCR/525-60\", \"SDL-VCR/625-50\","
+ "\"306M/525-60\", \"306M/625-50\", \"314M-25/525-60\","
+ "\"314M-25/625-50\", \"314M-50/525-60\", \"314M-50/625-50\" }"
+ /* optional parameters can't go in the template
+ * "audio = (string) { \"bundled\", \"none\" }"
+ */
+ )
+ );
+
+static GstStateChangeReturn
+gst_rtp_dv_depay_change_state (GstElement * element, GstStateChange transition);
+
+static GstBuffer *gst_rtp_dv_depay_process (GstRTPBaseDepayload * base,
+ GstRTPBuffer * rtp);
+static gboolean gst_rtp_dv_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+
+#define gst_rtp_dv_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPDVDepay, gst_rtp_dv_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpdvdepay, "rtpdvdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_DV_DEPAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_dv_depay_class_init (GstRTPDVDepayClass * klass)
+{
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class =
+ (GstRTPBaseDepayloadClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (rtpdvdepay_debug, "rtpdvdepay", 0,
+ "DV RTP Depayloader");
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_rtp_dv_depay_change_state);
+
+ gst_element_class_add_static_pad_template (gstelement_class, &src_factory);
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_factory);
+
+ gst_element_class_set_static_metadata (gstelement_class, "RTP DV Depayloader",
+ "Codec/Depayloader/Network/RTP",
+ "Depayloads DV from RTP packets (RFC 3189)",
+ "Marcel Moreaux <marcelm@spacelabs.nl>, Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasedepayload_class->process_rtp_packet =
+ GST_DEBUG_FUNCPTR (gst_rtp_dv_depay_process);
+ gstrtpbasedepayload_class->set_caps =
+ GST_DEBUG_FUNCPTR (gst_rtp_dv_depay_setcaps);
+}
+
+/* initialize the new element
+ * instantiate pads and add them to element
+ * set functions
+ * initialize structure
+ */
+static void
+gst_rtp_dv_depay_init (GstRTPDVDepay * filter)
+{
+}
+
+static gboolean
+parse_encode (GstRTPDVDepay * rtpdvdepay, const gchar * encode)
+{
+ rtpdvdepay->width = 720;
+ if (!strcmp (encode, "314M-25/525-60")) {
+ rtpdvdepay->frame_size = 240000;
+ rtpdvdepay->height = 480;
+ rtpdvdepay->rate_num = 30000;
+ rtpdvdepay->rate_denom = 1001;
+ } else if (!strcmp (encode, "SD-VCR/525-60")) {
+ rtpdvdepay->frame_size = 120000;
+ rtpdvdepay->height = 480;
+ rtpdvdepay->rate_num = 30000;
+ rtpdvdepay->rate_denom = 1001;
+ } else if (!strcmp (encode, "314M-50/625-50")) {
+ rtpdvdepay->frame_size = 288000;
+ rtpdvdepay->height = 576;
+ rtpdvdepay->rate_num = 25;
+ rtpdvdepay->rate_denom = 1;
+ } else if (!strcmp (encode, "SD-VCR/625-50")) {
+ rtpdvdepay->frame_size = 144000;
+ rtpdvdepay->height = 576;
+ rtpdvdepay->rate_num = 25;
+ rtpdvdepay->rate_denom = 1;
+ } else if (!strcmp (encode, "314M-25/625-50")) {
+ rtpdvdepay->frame_size = 144000;
+ rtpdvdepay->height = 576;
+ rtpdvdepay->rate_num = 25;
+ rtpdvdepay->rate_denom = 1;
+ } else
+ rtpdvdepay->frame_size = -1;
+
+ return rtpdvdepay->frame_size != -1;
+}
+
+static gboolean
+gst_rtp_dv_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure;
+ GstRTPDVDepay *rtpdvdepay;
+ GstCaps *srccaps;
+ gint clock_rate;
+ gboolean systemstream, ret;
+ const gchar *encode, *media;
+
+ rtpdvdepay = GST_RTP_DV_DEPAY (depayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 90000; /* default */
+ depayload->clock_rate = clock_rate;
+
+ /* we really need the encode property to figure out the frame size, it's also
+ * required by the spec */
+ if (!(encode = gst_structure_get_string (structure, "encode")))
+ goto no_encode;
+
+ /* figure out the size of one frame */
+ if (!parse_encode (rtpdvdepay, encode))
+ goto unknown_encode;
+
+ /* check the media, this tells us that the stream has video or not */
+ if (!(media = gst_structure_get_string (structure, "media")))
+ goto no_media;
+
+ systemstream = FALSE;
+
+ if (!strcmp (media, "audio")) {
+ /* we need a demuxer for audio only */
+ systemstream = TRUE;
+ } else if (!strcmp (media, "video")) {
+ const gchar *audio;
+
+ /* check the optional audio field, if it's present and set to bundled, we
+ * are dealing with a system stream. */
+ if ((audio = gst_structure_get_string (structure, "audio"))) {
+ if (!strcmp (audio, "bundled"))
+ systemstream = TRUE;
+ }
+ }
+
+ /* allocate accumulator */
+ rtpdvdepay->acc = gst_buffer_new_and_alloc (rtpdvdepay->frame_size);
+
+ /* Initialize the new accumulator frame.
+ * If the previous frame exists, copy that into the accumulator frame.
+ * This way, missing packets in the stream won't show up badly. */
+ gst_buffer_memset (rtpdvdepay->acc, 0, 0, rtpdvdepay->frame_size);
+
+ srccaps = gst_caps_new_simple ("video/x-dv",
+ "systemstream", G_TYPE_BOOLEAN, systemstream,
+ "width", G_TYPE_INT, rtpdvdepay->width,
+ "height", G_TYPE_INT, rtpdvdepay->height,
+ "framerate", GST_TYPE_FRACTION, rtpdvdepay->rate_num,
+ rtpdvdepay->rate_denom, NULL);
+ ret = gst_pad_set_caps (depayload->srcpad, srccaps);
+ gst_caps_unref (srccaps);
+
+ return ret;
+
+ /* ERRORS */
+no_encode:
+ {
+ GST_ERROR_OBJECT (rtpdvdepay, "required encode property not found in caps");
+ return FALSE;
+ }
+unknown_encode:
+ {
+ GST_ERROR_OBJECT (rtpdvdepay, "unknown encode property %s found", encode);
+ return FALSE;
+ }
+no_media:
+ {
+ GST_ERROR_OBJECT (rtpdvdepay, "required media property not found in caps");
+ return FALSE;
+ }
+}
+
+/* A DV frame consists of a bunch of 80-byte DIF blocks.
+ * Each DIF block contains a 3-byte header telling where in the DV frame the
+ * DIF block should go. We use this information to calculate its position.
+ */
+static guint
+calculate_difblock_location (guint8 * block)
+{
+ gint block_type, dif_sequence, dif_block;
+ guint location;
+
+ block_type = block[0] >> 5;
+ dif_sequence = block[1] >> 4;
+ dif_block = block[2];
+
+ location = dif_sequence * 150;
+
+ switch (block_type) {
+ case 0: /* Header block, no offset */
+ break;
+ case 1: /* Subcode block */
+ location += (1 + dif_block);
+ break;
+ case 2: /* VAUX block */
+ location += (3 + dif_block);
+ break;
+ case 3: /* Audio block */
+ location += (6 + dif_block * 16);
+ break;
+ case 4: /* Video block */
+ location += (7 + (dif_block / 15) + dif_block);
+ break;
+ default: /* Something bogus */
+ GST_DEBUG ("UNKNOWN BLOCK");
+ location = -1;
+ break;
+ }
+ return location;
+}
+
+static gboolean
+foreach_metadata_drop (GstBuffer * inbuf, GstMeta ** meta, gpointer user_data)
+{
+ *meta = NULL;
+ return TRUE;
+}
+
+/* Process one RTP packet. Accumulate RTP payload in the proper place in a DV
+ * frame, and return that frame if we detect a new frame, or NULL otherwise.
+ * We assume a DV frame is 144000 bytes. That should accommodate PAL as well as
+ * NTSC.
+ */
+static GstBuffer *
+gst_rtp_dv_depay_process (GstRTPBaseDepayload * base, GstRTPBuffer * rtp)
+{
+ GstBuffer *out = NULL;
+ guint8 *payload;
+ guint32 rtp_ts;
+ guint payload_len, location;
+ GstRTPDVDepay *dvdepay = GST_RTP_DV_DEPAY (base);
+ gboolean marker;
+ GstMapInfo map;
+
+ marker = gst_rtp_buffer_get_marker (rtp);
+
+ /* Check if the received packet contains (the start of) a new frame, we do
+ * this by checking the RTP timestamp. */
+ rtp_ts = gst_rtp_buffer_get_timestamp (rtp);
+
+ /* we cannot copy the packet yet if the marker is set, we will do that below
+ * after taking out the data */
+ if (dvdepay->prev_ts != -1 && rtp_ts != dvdepay->prev_ts && !marker) {
+ /* the timestamp changed */
+ GST_DEBUG_OBJECT (dvdepay, "new frame with ts %u, old ts %u", rtp_ts,
+ dvdepay->prev_ts);
+
+ /* return copy of accumulator. */
+ out = gst_buffer_copy (dvdepay->acc);
+ gst_buffer_foreach_meta (dvdepay->acc, foreach_metadata_drop, NULL);
+ }
+
+ /* Extract the payload */
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+ payload = gst_rtp_buffer_get_payload (rtp);
+
+ /* copy all DIF chunks in their place. */
+ gst_buffer_map (dvdepay->acc, &map, GST_MAP_READWRITE);
+ while (payload_len >= 80) {
+ guint offset;
+
+ /* Calculate where in the frame the payload should go */
+ location = calculate_difblock_location (payload);
+
+ if (location < 6) {
+ /* part of a header, set the flag to mark that we have the header. */
+ dvdepay->header_mask |= (1 << location);
+ GST_LOG_OBJECT (dvdepay, "got header at location %d, now %02x", location,
+ dvdepay->header_mask);
+ } else {
+ GST_LOG_OBJECT (dvdepay, "got block at location %d", location);
+ }
+
+ if (location != -1) {
+ /* get the byte offset of the dif block */
+ offset = location * 80;
+
+ /* And copy it in, provided the location is sane. */
+ if (offset <= dvdepay->frame_size - 80) {
+ memcpy (map.data + offset, payload, 80);
+ gst_rtp_copy_meta (GST_ELEMENT_CAST (dvdepay), dvdepay->acc,
+ rtp->buffer, 0);
+ }
+ }
+
+ payload += 80;
+ payload_len -= 80;
+ }
+ gst_buffer_unmap (dvdepay->acc, &map);
+
+ if (marker) {
+ GST_DEBUG_OBJECT (dvdepay, "marker bit complete frame %u", rtp_ts);
+ /* only copy the frame when we have a complete header */
+ if (dvdepay->header_mask == 0x3f) {
+ /* The marker marks the end of a frame that we need to push. The next frame
+ * will change the timestamp but we won't copy the accumulator again because
+ * we set the prev_ts to -1. */
+ out = gst_buffer_copy (dvdepay->acc);
+ gst_buffer_foreach_meta (dvdepay->acc, foreach_metadata_drop, NULL);
+ } else {
+ GST_WARNING_OBJECT (dvdepay, "waiting for frame headers %02x",
+ dvdepay->header_mask);
+ }
+ dvdepay->prev_ts = -1;
+ } else {
+ /* save last timestamp */
+ dvdepay->prev_ts = rtp_ts;
+ }
+ return out;
+}
+
+static void
+gst_rtp_dv_depay_reset (GstRTPDVDepay * depay)
+{
+ if (depay->acc)
+ gst_buffer_unref (depay->acc);
+ depay->acc = NULL;
+
+ depay->prev_ts = -1;
+ depay->header_mask = 0;
+}
+
+static GstStateChangeReturn
+gst_rtp_dv_depay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstRTPDVDepay *depay = GST_RTP_DV_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_rtp_dv_depay_reset (depay);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS, change_state,
+ (element, transition), GST_STATE_CHANGE_FAILURE);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_dv_depay_reset (depay);
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtpdvdepay.h b/gst/rtp/gstrtpdvdepay.h
new file mode 100644
index 0000000000..3cd9214600
--- /dev/null
+++ b/gst/rtp/gstrtpdvdepay.h
@@ -0,0 +1,64 @@
+/* Farsight
+ * Copyright (C) 2006 Marcel Moreaux <marcelm@spacelabs.nl>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GSTRTPDVDEPAY_H__
+#define __GSTRTPDVDEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+/* #define's don't like whitespacey bits */
+#define GST_TYPE_RTP_DV_DEPAY (gst_rtp_dv_depay_get_type())
+#define GST_RTP_DV_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_DV_DEPAY,GstRTPDVDepay))
+#define GST_RTP_DV_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_DV_DEPAY,GstRTPDVDepay))
+#define GST_IS_RTP_DV_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_DV_DEPAY))
+#define GST_IS_RTP_DV_DEPAY_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_DV_DEPAY))
+
+typedef struct _GstRTPDVDepay GstRTPDVDepay;
+typedef struct _GstRTPDVDepayClass GstRTPDVDepayClass;
+
+struct _GstRTPDVDepay
+{
+ GstRTPBaseDepayload parent;
+
+ GstBuffer *acc;
+ guint frame_size;
+ guint32 prev_ts;
+ guint8 header_mask;
+
+ gint width, height;
+ gint rate_num, rate_denom;
+};
+
+struct _GstRTPDVDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_dv_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GSTRTPDVDEPAY_H__ */
diff --git a/gst/rtp/gstrtpdvpay.c b/gst/rtp/gstrtpdvpay.c
new file mode 100644
index 0000000000..2a0e92e66d
--- /dev/null
+++ b/gst/rtp/gstrtpdvpay.c
@@ -0,0 +1,397 @@
+/* Farsight
+ * Copyright (C) 2006 Marcel Moreaux <marcelm@spacelabs.nl>
+ * (C) 2008 Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <stdlib.h>
+#include <string.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpdvpay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY (rtpdvpay_debug);
+#define GST_CAT_DEFAULT (rtpdvpay_debug)
+
+#define DEFAULT_MODE GST_DV_PAY_MODE_VIDEO
+enum
+{
+ PROP_0,
+ PROP_MODE
+};
+
+/* takes both system and non-system streams */
+static GstStaticPadTemplate gst_rtp_dv_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-dv")
+ );
+
+static GstStaticPadTemplate gst_rtp_dv_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) { \"video\", \"audio\" } ,"
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "encoding-name = (string) \"DV\", "
+ "clock-rate = (int) 90000,"
+ "encode = (string) { \"SD-VCR/525-60\", \"SD-VCR/625-50\", \"HD-VCR/1125-60\","
+ "\"HD-VCR/1250-50\", \"SDL-VCR/525-60\", \"SDL-VCR/625-50\","
+ "\"306M/525-60\", \"306M/625-50\", \"314M-25/525-60\","
+ "\"314M-25/625-50\", \"314M-50/525-60\", \"314M-50/625-50\" }"
+ /* optional parameters can't go in the template
+ * "audio = (string) { \"bundled\", \"none\" }"
+ */
+ )
+ );
+
+static gboolean gst_rtp_dv_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static GstFlowReturn gst_rtp_dv_pay_handle_buffer (GstRTPBasePayload * payload,
+ GstBuffer * buffer);
+
+#define GST_TYPE_DV_PAY_MODE (gst_dv_pay_mode_get_type())
+static GType
+gst_dv_pay_mode_get_type (void)
+{
+ static GType dv_pay_mode_type = 0;
+ static const GEnumValue dv_pay_modes[] = {
+ {GST_DV_PAY_MODE_VIDEO, "Video only", "video"},
+ {GST_DV_PAY_MODE_BUNDLED, "Video and Audio bundled", "bundled"},
+ {GST_DV_PAY_MODE_AUDIO, "Audio only", "audio"},
+ {0, NULL, NULL},
+ };
+
+ if (!dv_pay_mode_type) {
+ dv_pay_mode_type = g_enum_register_static ("GstDVPayMode", dv_pay_modes);
+ }
+ return dv_pay_mode_type;
+}
+
+
+static void gst_dv_pay_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_dv_pay_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+#define gst_rtp_dv_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPDVPay, gst_rtp_dv_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpdvpay, "rtpdvpay", GST_RANK_SECONDARY,
+ GST_TYPE_RTP_DV_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_dv_pay_class_init (GstRTPDVPayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpdvpay_debug, "rtpdvpay", 0, "DV RTP Payloader");
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gobject_class->set_property = gst_dv_pay_set_property;
+ gobject_class->get_property = gst_dv_pay_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_MODE,
+ g_param_spec_enum ("mode", "Mode",
+ "The payload mode of payloading",
+ GST_TYPE_DV_PAY_MODE, DEFAULT_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_dv_pay_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_dv_pay_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "RTP DV Payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payloads DV into RTP packets (RFC 3189)",
+ "Marcel Moreaux <marcelm@spacelabs.nl>, Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_dv_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_dv_pay_handle_buffer;
+
+ gst_type_mark_as_plugin_api (GST_TYPE_DV_PAY_MODE, 0);
+}
+
+static void
+gst_rtp_dv_pay_init (GstRTPDVPay * rtpdvpay)
+{
+}
+
+static void
+gst_dv_pay_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstRTPDVPay *rtpdvpay = GST_RTP_DV_PAY (object);
+
+ switch (prop_id) {
+ case PROP_MODE:
+ rtpdvpay->mode = g_value_get_enum (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_dv_pay_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstRTPDVPay *rtpdvpay = GST_RTP_DV_PAY (object);
+
+ switch (prop_id) {
+ case PROP_MODE:
+ g_value_set_enum (value, rtpdvpay->mode);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+gst_rtp_dv_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ /* We don't do anything here, but we could check if it's a system stream and if
+ * it's not, default to sending the video only. We will negotiate downstream
+ * caps when we get to see the first frame. */
+
+ return TRUE;
+}
+
+static gboolean
+gst_dv_pay_negotiate (GstRTPDVPay * rtpdvpay, guint8 * data, gsize size)
+{
+ const gchar *encode, *media;
+ gboolean audio_bundled, res;
+
+ if ((data[3] & 0x80) == 0) { /* DSF flag */
+ /* it's an NTSC format */
+ if ((data[80 * 5 + 48 + 3] & 0x4) && (data[80 * 5 + 48] == 0x60)) { /* 4:2:2 sampling */
+ /* NTSC 50Mbps */
+ encode = "314M-25/525-60";
+ } else { /* 4:1:1 sampling */
+ /* NTSC 25Mbps */
+ encode = "SD-VCR/525-60";
+ }
+ } else {
+ /* it's a PAL format */
+ if ((data[80 * 5 + 48 + 3] & 0x4) && (data[80 * 5 + 48] == 0x60)) { /* 4:2:2 sampling */
+ /* PAL 50Mbps */
+ encode = "314M-50/625-50";
+ } else if ((data[5] & 0x07) == 0) { /* APT flag */
+ /* PAL 25Mbps 4:2:0 */
+ encode = "SD-VCR/625-50";
+ } else
+ /* PAL 25Mbps 4:1:1 */
+ encode = "314M-25/625-50";
+ }
+
+ media = "video";
+ audio_bundled = FALSE;
+
+ switch (rtpdvpay->mode) {
+ case GST_DV_PAY_MODE_AUDIO:
+ media = "audio";
+ break;
+ case GST_DV_PAY_MODE_BUNDLED:
+ audio_bundled = TRUE;
+ break;
+ default:
+ break;
+ }
+ gst_rtp_base_payload_set_options (GST_RTP_BASE_PAYLOAD (rtpdvpay), media,
+ TRUE, "DV", 90000);
+
+ if (audio_bundled) {
+ res = gst_rtp_base_payload_set_outcaps (GST_RTP_BASE_PAYLOAD (rtpdvpay),
+ "encode", G_TYPE_STRING, encode,
+ "audio", G_TYPE_STRING, "bundled", NULL);
+ } else {
+ res = gst_rtp_base_payload_set_outcaps (GST_RTP_BASE_PAYLOAD (rtpdvpay),
+ "encode", G_TYPE_STRING, encode, NULL);
+ }
+ return res;
+}
+
+static gboolean
+include_dif (GstRTPDVPay * rtpdvpay, guint8 * data)
+{
+ gint block_type;
+ gboolean res;
+
+ block_type = data[0] >> 5;
+
+ switch (block_type) {
+ case 0: /* Header block */
+ case 1: /* Subcode block */
+ case 2: /* VAUX block */
+ /* always include these blocks */
+ res = TRUE;
+ break;
+ case 3: /* Audio block */
+ /* never include audio if we are doing video only */
+ if (rtpdvpay->mode == GST_DV_PAY_MODE_VIDEO)
+ res = FALSE;
+ else
+ res = TRUE;
+ break;
+ case 4: /* Video block */
+ /* never include video if we are doing audio only */
+ if (rtpdvpay->mode == GST_DV_PAY_MODE_AUDIO)
+ res = FALSE;
+ else
+ res = TRUE;
+ break;
+ default: /* Something bogus, just ignore */
+ res = FALSE;
+ break;
+ }
+ return res;
+}
+
+/* Get a DV frame, chop it up in pieces, and push the pieces to the RTP layer.
+ */
+static GstFlowReturn
+gst_rtp_dv_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRTPDVPay *rtpdvpay;
+ guint max_payload_size;
+ GstBuffer *outbuf;
+ GstFlowReturn ret = GST_FLOW_OK;
+ gint hdrlen;
+ gsize size;
+ GstMapInfo map;
+ guint8 *data;
+ guint8 *dest;
+ guint filled;
+ GstRTPBuffer rtp = { NULL, };
+
+ rtpdvpay = GST_RTP_DV_PAY (basepayload);
+
+ hdrlen = gst_rtp_buffer_calc_header_len (0);
+ /* DV frames are made up from a bunch of DIF blocks. DIF blocks are 80 bytes
+ * each, and we should put an integral number of them in each RTP packet.
+ * Therefore, we round the available room down to the nearest multiple of 80.
+ *
+ * The available room is just the packet MTU, minus the RTP header length. */
+ max_payload_size = ((GST_RTP_BASE_PAYLOAD_MTU (rtpdvpay) - hdrlen) / 80) * 80;
+
+ /* The length of the buffer to transmit. */
+ if (!gst_buffer_map (buffer, &map, GST_MAP_READ)) {
+ GST_ELEMENT_ERROR (rtpdvpay, CORE, FAILED,
+ (NULL), ("Failed to map buffer"));
+ gst_buffer_unref (buffer);
+ return GST_FLOW_ERROR;
+ }
+ data = map.data;
+ size = map.size;
+
+ GST_DEBUG_OBJECT (rtpdvpay,
+ "DV RTP payloader got buffer of %" G_GSIZE_FORMAT
+ " bytes, splitting in %u byte " "payload fragments, at time %"
+ GST_TIME_FORMAT, size, max_payload_size,
+ GST_TIME_ARGS (GST_BUFFER_PTS (buffer)));
+
+ if (!rtpdvpay->negotiated) {
+ gst_dv_pay_negotiate (rtpdvpay, data, size);
+ /* if we have not yet scanned the stream for its type, do so now */
+ rtpdvpay->negotiated = TRUE;
+ }
+
+ outbuf = NULL;
+ dest = NULL;
+ filled = 0;
+
+ /* while we have a complete DIF chunks left */
+ while (size >= 80) {
+ /* Allocate a new buffer, set the timestamp */
+ if (outbuf == NULL) {
+ outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (basepayload,
+ max_payload_size, 0, 0);
+ GST_BUFFER_PTS (outbuf) = GST_BUFFER_PTS (buffer);
+
+ if (!gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp)) {
+ gst_buffer_unref (outbuf);
+ GST_ELEMENT_ERROR (rtpdvpay, CORE, FAILED,
+ (NULL), ("Failed to map RTP buffer"));
+ ret = GST_FLOW_ERROR;
+ goto beach;
+ }
+ dest = gst_rtp_buffer_get_payload (&rtp);
+ filled = 0;
+ }
+
+ /* inspect the DIF chunk, if we don't need to include it, skip to the next one. */
+ if (include_dif (rtpdvpay, data)) {
+ /* copy data in packet */
+ memcpy (dest, data, 80);
+
+ dest += 80;
+ filled += 80;
+ }
+
+ /* go to next dif chunk */
+ size -= 80;
+ data += 80;
+
+ /* push out the buffer if the next one would exceed the max packet size or
+ * when we are pushing the last packet */
+ if (filled + 80 > max_payload_size || size < 80) {
+ if (size < 160) {
+ guint hlen;
+
+ /* set marker */
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
+
+ /* shrink buffer to last packet */
+ hlen = gst_rtp_buffer_get_header_len (&rtp);
+ gst_rtp_buffer_set_packet_len (&rtp, hlen + filled);
+ }
+
+ /* Push out the created piece, and check for errors. */
+ gst_rtp_buffer_unmap (&rtp);
+ gst_rtp_copy_meta (GST_ELEMENT_CAST (basepayload), outbuf, buffer, 0);
+ ret = gst_rtp_base_payload_push (basepayload, outbuf);
+ if (ret != GST_FLOW_OK)
+ break;
+
+ outbuf = NULL;
+ }
+ }
+
+beach:
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+
+ return ret;
+}
diff --git a/gst/rtp/gstrtpdvpay.h b/gst/rtp/gstrtpdvpay.h
new file mode 100644
index 0000000000..def525b2ca
--- /dev/null
+++ b/gst/rtp/gstrtpdvpay.h
@@ -0,0 +1,67 @@
+/* Farsight
+ * Copyright (C) 2006 Marcel Moreaux <marcelm@spacelabs.nl>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GSTRTPDVPAY_H__
+#define __GSTRTPDVPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstRTPDVPay GstRTPDVPay;
+typedef struct _GstRTPDVPayClass GstRTPDVPayClass;
+
+#define GST_TYPE_RTP_DV_PAY \
+ (gst_rtp_dv_pay_get_type())
+#define GST_RTP_DV_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_DV_PAY,GstRTPDVPay))
+#define GST_RTP_DV_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_DV_PAY,GstRTPDVPay))
+#define GST_IS_RTP_DV_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_DV_PAY))
+#define GST_IS_RTP_DV_PAY_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_DV_PAY))
+
+typedef enum
+{
+ GST_DV_PAY_MODE_VIDEO,
+ GST_DV_PAY_MODE_BUNDLED,
+ GST_DV_PAY_MODE_AUDIO
+} GstDVPayMode;
+
+struct _GstRTPDVPay
+{
+ GstRTPBasePayload payload;
+
+ gboolean negotiated;
+ GstDVPayMode mode;
+};
+
+struct _GstRTPDVPayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_dv_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GSTRTPDVPAY_H__ */
diff --git a/gst/rtp/gstrtpelement.c b/gst/rtp/gstrtpelement.c
new file mode 100644
index 0000000000..cd6d883b0a
--- /dev/null
+++ b/gst/rtp/gstrtpelement.c
@@ -0,0 +1,46 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) 2020 Huawei Technologies Co., Ltd.
+ * @Author: Julian Bouzas <julian.bouzas@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/tag/tag.h>
+#include <gst/video/video.h>
+#include <gst/audio/audio.h>
+
+#include "gstrtpelements.h"
+#include "gstrtputils.h"
+
+
+void
+rtp_element_init (GstPlugin * plugin)
+{
+ static gsize res = FALSE;
+ if (g_once_init_enter (&res)) {
+ gst_tag_image_type_get_type ();
+ rtp_quark_meta_tag_video =
+ g_quark_from_static_string (GST_META_TAG_VIDEO_STR);
+ rtp_quark_meta_tag_audio =
+ g_quark_from_static_string (GST_META_TAG_AUDIO_STR);
+ g_once_init_leave (&res, TRUE);
+ }
+}
diff --git a/gst/rtp/gstrtpelements.h b/gst/rtp/gstrtpelements.h
new file mode 100644
index 0000000000..9321d3530a
--- /dev/null
+++ b/gst/rtp/gstrtpelements.h
@@ -0,0 +1,134 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) 2020 Huawei Technologies Co., Ltd.
+ * @Author: Julian Bouzas <julian.bouzas@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_ELEMENTS_H__
+#define __GST_RTP_ELEMENTS_H__
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+void rtp_element_init (GstPlugin * plugin);
+
+GST_ELEMENT_REGISTER_DECLARE (rtpac3depay);
+GST_ELEMENT_REGISTER_DECLARE (rtpac3pay);
+GST_ELEMENT_REGISTER_DECLARE (rtpbvdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpbvpay);
+GST_ELEMENT_REGISTER_DECLARE (rtpceltdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpceltpay);
+GST_ELEMENT_REGISTER_DECLARE (rtpdvdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpdvpay);
+GST_ELEMENT_REGISTER_DECLARE (rtpgstdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpgstpay);
+GST_ELEMENT_REGISTER_DECLARE (rtpilbcpay);
+GST_ELEMENT_REGISTER_DECLARE (rtpilbcdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpg722depay);
+GST_ELEMENT_REGISTER_DECLARE (rtpg722pay);
+GST_ELEMENT_REGISTER_DECLARE (rtpg723depay);
+GST_ELEMENT_REGISTER_DECLARE (rtpg723pay);
+GST_ELEMENT_REGISTER_DECLARE (rtpg726depay);
+GST_ELEMENT_REGISTER_DECLARE (rtpg726pay);
+GST_ELEMENT_REGISTER_DECLARE (rtpg729depay);
+GST_ELEMENT_REGISTER_DECLARE (rtpg729pay);
+GST_ELEMENT_REGISTER_DECLARE (rtpgsmdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpgsmpay);
+GST_ELEMENT_REGISTER_DECLARE (rtpamrdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpamrpay);
+GST_ELEMENT_REGISTER_DECLARE (rtppcmadepay);
+GST_ELEMENT_REGISTER_DECLARE (rtppcmudepay);
+GST_ELEMENT_REGISTER_DECLARE (rtppcmupay);
+GST_ELEMENT_REGISTER_DECLARE (rtppcmapay);
+GST_ELEMENT_REGISTER_DECLARE (rtpmpadepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpmpapay);
+GST_ELEMENT_REGISTER_DECLARE (rtpmparobustdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpmpvdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpmpvpay);
+GST_ELEMENT_REGISTER_DECLARE (rtpopusdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpopuspay);
+GST_ELEMENT_REGISTER_DECLARE (rtph261pay);
+GST_ELEMENT_REGISTER_DECLARE (rtph261depay);
+GST_ELEMENT_REGISTER_DECLARE (rtph263ppay);
+GST_ELEMENT_REGISTER_DECLARE (rtph263pdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtph263depay);
+GST_ELEMENT_REGISTER_DECLARE (rtph263pay);
+GST_ELEMENT_REGISTER_DECLARE (rtph264depay);
+GST_ELEMENT_REGISTER_DECLARE (rtph264pay);
+GST_ELEMENT_REGISTER_DECLARE (rtph265depay);
+GST_ELEMENT_REGISTER_DECLARE (rtph265pay);
+GST_ELEMENT_REGISTER_DECLARE (rtpj2kdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpj2kpay);
+GST_ELEMENT_REGISTER_DECLARE (rtpjpegdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpjpegpay);
+GST_ELEMENT_REGISTER_DECLARE (rtpklvdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpklvpay);
+GST_ELEMENT_REGISTER_DECLARE (rtpL8pay);
+GST_ELEMENT_REGISTER_DECLARE (rtpL8depay);
+GST_ELEMENT_REGISTER_DECLARE (rtpL16pay);
+GST_ELEMENT_REGISTER_DECLARE (rtpL16depay);
+GST_ELEMENT_REGISTER_DECLARE (rtpL24pay);
+GST_ELEMENT_REGISTER_DECLARE (rtpL24depay);
+GST_ELEMENT_REGISTER_DECLARE (rtpldacpay);
+GST_ELEMENT_REGISTER_DECLARE (asteriskh263);
+GST_ELEMENT_REGISTER_DECLARE (rtpmp1sdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpmp2tdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpmp2tpay);
+GST_ELEMENT_REGISTER_DECLARE (rtpmp4vpay);
+GST_ELEMENT_REGISTER_DECLARE (rtpmp4vdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpmp4apay);
+GST_ELEMENT_REGISTER_DECLARE (rtpmp4adepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpmp4gdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpmp4gpay);
+GST_ELEMENT_REGISTER_DECLARE (rtpqcelpdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpqdm2depay);
+GST_ELEMENT_REGISTER_DECLARE (rtpsbcdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpsbcpay);
+GST_ELEMENT_REGISTER_DECLARE (rtpsirenpay);
+GST_ELEMENT_REGISTER_DECLARE (rtpsirendepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpspeexpay);
+GST_ELEMENT_REGISTER_DECLARE (rtpspeexdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpsv3vdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtptheoradepay);
+GST_ELEMENT_REGISTER_DECLARE (rtptheorapay);
+GST_ELEMENT_REGISTER_DECLARE (rtpvorbisdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpvorbispay);
+GST_ELEMENT_REGISTER_DECLARE (rtpvp8depay);
+GST_ELEMENT_REGISTER_DECLARE (rtpvp8pay);
+GST_ELEMENT_REGISTER_DECLARE (rtpvp9depay);
+GST_ELEMENT_REGISTER_DECLARE (rtpvp9pay);
+GST_ELEMENT_REGISTER_DECLARE (rtpvrawdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpvrawpay);
+GST_ELEMENT_REGISTER_DECLARE (rtpstreampay);
+GST_ELEMENT_REGISTER_DECLARE (rtpstreamdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpisacpay);
+GST_ELEMENT_REGISTER_DECLARE (rtpisacdepay);
+GST_ELEMENT_REGISTER_DECLARE (rtpredenc);
+GST_ELEMENT_REGISTER_DECLARE (rtpreddec);
+GST_ELEMENT_REGISTER_DECLARE (rtpulpfecdec);
+GST_ELEMENT_REGISTER_DECLARE (rtpulpfecenc);
+GST_ELEMENT_REGISTER_DECLARE (rtpstorage);
+GST_ELEMENT_REGISTER_DECLARE (rtphdrextcolorspace);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_ELEMENTS_H__ */
diff --git a/gst/rtp/gstrtpg722depay.c b/gst/rtp/gstrtpg722depay.c
new file mode 100644
index 0000000000..060c7e1c8c
--- /dev/null
+++ b/gst/rtp/gstrtpg722depay.c
@@ -0,0 +1,258 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <stdlib.h>
+
+#include <gst/audio/audio.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpg722depay.h"
+#include "gstrtpchannels.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpg722depay_debug);
+#define GST_CAT_DEFAULT (rtpg722depay_debug)
+
+static GstStaticPadTemplate gst_rtp_g722_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/G722, "
+ "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, MAX ]")
+ );
+
+static GstStaticPadTemplate gst_rtp_g722_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", " "clock-rate = (int) 8000, "
+ /* "channels = (int) [1, MAX]" */
+ /* "channel-order = (string) ANY" */
+ "encoding-name = (string) \"G722\";"
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_G722_STRING ", "
+ "clock-rate = (int) [ 1, MAX ]"
+ /* "channels = (int) [1, MAX]" */
+ /* "emphasis = (string) ANY" */
+ /* "channel-order = (string) ANY" */
+ )
+ );
+
+#define gst_rtp_g722_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG722Depay, gst_rtp_g722_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpg722depay, "rtpg722depay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_G722_DEPAY, rtp_element_init (plugin));
+
+static gboolean gst_rtp_g722_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_g722_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+
+static void
+gst_rtp_g722_depay_class_init (GstRtpG722DepayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpg722depay_debug, "rtpg722depay", 0,
+ "G722 RTP Depayloader");
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_g722_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_g722_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP audio depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts G722 audio from RTP packets",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasedepayload_class->set_caps = gst_rtp_g722_depay_setcaps;
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_g722_depay_process;
+}
+
+static void
+gst_rtp_g722_depay_init (GstRtpG722Depay * rtpg722depay)
+{
+}
+
+static gint
+gst_rtp_g722_depay_parse_int (GstStructure * structure, const gchar * field,
+ gint def)
+{
+ const gchar *str;
+ gint res;
+
+ if ((str = gst_structure_get_string (structure, field)))
+ return atoi (str);
+
+ if (gst_structure_get_int (structure, field, &res))
+ return res;
+
+ return def;
+}
+
+static gboolean
+gst_rtp_g722_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure;
+ GstRtpG722Depay *rtpg722depay;
+ gint clock_rate, payload, samplerate;
+ gint channels;
+ GstCaps *srccaps;
+ gboolean res;
+#if 0
+ const gchar *channel_order;
+ const GstRTPChannelOrder *order;
+#endif
+
+ rtpg722depay = GST_RTP_G722_DEPAY (depayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ payload = 96;
+ gst_structure_get_int (structure, "payload", &payload);
+ switch (payload) {
+ case GST_RTP_PAYLOAD_G722:
+ channels = 1;
+ clock_rate = 8000;
+ samplerate = 16000;
+ break;
+ default:
+ /* no fixed mapping, we need clock-rate */
+ channels = 0;
+ clock_rate = 0;
+ samplerate = 0;
+ break;
+ }
+
+ /* caps can overwrite defaults */
+ clock_rate =
+ gst_rtp_g722_depay_parse_int (structure, "clock-rate", clock_rate);
+ if (clock_rate == 0)
+ goto no_clockrate;
+
+ if (clock_rate == 8000)
+ samplerate = 16000;
+
+ if (samplerate == 0)
+ samplerate = clock_rate;
+
+ channels =
+ gst_rtp_g722_depay_parse_int (structure, "encoding-params", channels);
+ if (channels == 0) {
+ channels = gst_rtp_g722_depay_parse_int (structure, "channels", channels);
+ if (channels == 0) {
+ /* channels defaults to 1 otherwise */
+ channels = 1;
+ }
+ }
+
+ depayload->clock_rate = clock_rate;
+ rtpg722depay->rate = samplerate;
+ rtpg722depay->channels = channels;
+
+ srccaps = gst_caps_new_simple ("audio/G722",
+ "rate", G_TYPE_INT, samplerate, "channels", G_TYPE_INT, channels, NULL);
+
+ /* FIXME: Do something with the channel order */
+#if 0
+ /* add channel positions */
+ channel_order = gst_structure_get_string (structure, "channel-order");
+
+ order = gst_rtp_channels_get_by_order (channels, channel_order);
+ if (order) {
+ gst_audio_set_channel_positions (gst_caps_get_structure (srccaps, 0),
+ order->pos);
+ } else {
+ GstAudioChannelPosition *pos;
+
+ GST_ELEMENT_WARNING (rtpg722depay, STREAM, DECODE,
+ (NULL), ("Unknown channel order '%s' for %d channels",
+ GST_STR_NULL (channel_order), channels));
+ /* create default NONE layout */
+ pos = gst_rtp_channels_create_default (channels);
+ gst_audio_set_channel_positions (gst_caps_get_structure (srccaps, 0), pos);
+ g_free (pos);
+ }
+#endif
+
+ res = gst_pad_set_caps (depayload->srcpad, srccaps);
+ gst_caps_unref (srccaps);
+
+ return res;
+
+ /* ERRORS */
+no_clockrate:
+ {
+ GST_ERROR_OBJECT (depayload, "no clock-rate specified");
+ return FALSE;
+ }
+}
+
+static GstBuffer *
+gst_rtp_g722_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpG722Depay *rtpg722depay;
+ GstBuffer *outbuf;
+ gint payload_len;
+ gboolean marker;
+
+ rtpg722depay = GST_RTP_G722_DEPAY (depayload);
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+
+ if (payload_len <= 0)
+ goto empty_packet;
+
+ GST_DEBUG_OBJECT (rtpg722depay, "got payload of %d bytes", payload_len);
+
+ outbuf = gst_rtp_buffer_get_payload_buffer (rtp);
+ marker = gst_rtp_buffer_get_marker (rtp);
+
+ if (marker && outbuf) {
+ /* mark talk spurt with RESYNC */
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC);
+ }
+
+ if (outbuf) {
+ gst_rtp_drop_non_audio_meta (rtpg722depay, outbuf);
+ }
+
+ return outbuf;
+
+ /* ERRORS */
+empty_packet:
+ {
+ GST_ELEMENT_WARNING (rtpg722depay, STREAM, DECODE,
+ ("Empty Payload."), (NULL));
+ return NULL;
+ }
+}
diff --git a/gst/rtp/gstrtpg722depay.h b/gst/rtp/gstrtpg722depay.h
new file mode 100644
index 0000000000..2acdf28b2e
--- /dev/null
+++ b/gst/rtp/gstrtpg722depay.h
@@ -0,0 +1,62 @@
+/* GStreamer
+ * Copyright (C) <2010> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_G722_DEPAY_H__
+#define __GST_RTP_G722_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+/* Standard macros for defining types for this element. */
+#define GST_TYPE_RTP_G722_DEPAY \
+ (gst_rtp_g722_depay_get_type())
+#define GST_RTP_G722_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_G722_DEPAY,GstRtpG722Depay))
+#define GST_RTP_G722_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_G722_DEPAY,GstRtpG722DepayClass))
+#define GST_IS_RTP_G722_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_G722_DEPAY))
+#define GST_IS_RTP_G722_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_G722_DEPAY))
+
+typedef struct _GstRtpG722Depay GstRtpG722Depay;
+typedef struct _GstRtpG722DepayClass GstRtpG722DepayClass;
+
+/* Definition of structure storing data for this element. */
+struct _GstRtpG722Depay
+{
+ GstRTPBaseDepayload depayload;
+
+ guint rate;
+ guint channels;
+};
+
+/* Standard definition defining a class for this element. */
+struct _GstRtpG722DepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_g722_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_G722_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpg722pay.c b/gst/rtp/gstrtpg722pay.c
new file mode 100644
index 0000000000..8afc2ebe0e
--- /dev/null
+++ b/gst/rtp/gstrtpg722pay.c
@@ -0,0 +1,232 @@
+/* GStreamer
+ * Copyright (C) <2010> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/audio/audio.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpg722pay.h"
+#include "gstrtpchannels.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpg722pay_debug);
+#define GST_CAT_DEFAULT (rtpg722pay_debug)
+
+static GstStaticPadTemplate gst_rtp_g722_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/G722, " "rate = (int) 16000, " "channels = (int) 1")
+ );
+
+static GstStaticPadTemplate gst_rtp_g722_pay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "encoding-name = (string) \"G722\", "
+ "payload = (int) " GST_RTP_PAYLOAD_G722_STRING ", "
+ "encoding-params = (string) 1, "
+ "clock-rate = (int) 8000; "
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "encoding-name = (string) \"G722\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "encoding-params = (string) 1, " "clock-rate = (int) 8000")
+ );
+
+static gboolean gst_rtp_g722_pay_setcaps (GstRTPBasePayload * basepayload,
+ GstCaps * caps);
+static GstCaps *gst_rtp_g722_pay_getcaps (GstRTPBasePayload * rtppayload,
+ GstPad * pad, GstCaps * filter);
+
+#define gst_rtp_g722_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG722Pay, gst_rtp_g722_pay,
+ GST_TYPE_RTP_BASE_AUDIO_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpg722pay, "rtpg722pay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_G722_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_g722_pay_class_init (GstRtpG722PayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpg722pay_debug, "rtpg722pay", 0,
+ "G722 RTP Payloader");
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_g722_pay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_g722_pay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP audio payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encode Raw audio into RTP packets (RFC 3551)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_g722_pay_setcaps;
+ gstrtpbasepayload_class->get_caps = gst_rtp_g722_pay_getcaps;
+}
+
+static void
+gst_rtp_g722_pay_init (GstRtpG722Pay * rtpg722pay)
+{
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
+
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpg722pay);
+
+ GST_RTP_BASE_PAYLOAD (rtpg722pay)->pt = GST_RTP_PAYLOAD_G722;
+
+ /* tell rtpbaseaudiopayload that this is a sample based codec */
+ gst_rtp_base_audio_payload_set_sample_based (rtpbaseaudiopayload);
+}
+
+static gboolean
+gst_rtp_g722_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
+{
+ GstRtpG722Pay *rtpg722pay;
+ GstStructure *structure;
+ gint rate, channels, clock_rate;
+ gboolean res;
+ gchar *params;
+#if 0
+ GstAudioChannelPosition *pos;
+ const GstRTPChannelOrder *order;
+#endif
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
+
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (basepayload);
+ rtpg722pay = GST_RTP_G722_PAY (basepayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ /* first parse input caps */
+ if (!gst_structure_get_int (structure, "rate", &rate))
+ goto no_rate;
+
+ if (!gst_structure_get_int (structure, "channels", &channels))
+ goto no_channels;
+
+ /* FIXME: Do something with the channel positions */
+#if 0
+ /* get the channel order */
+ pos = gst_audio_get_channel_positions (structure);
+ if (pos)
+ order = gst_rtp_channels_get_by_pos (channels, pos);
+ else
+ order = NULL;
+#endif
+
+ /* Clock rate is always 8000 Hz for G722 according to
+ * RFC 3551 although the sampling rate is 16000 Hz */
+ clock_rate = 8000;
+
+ gst_rtp_base_payload_set_options (basepayload, "audio",
+ basepayload->pt != GST_RTP_PAYLOAD_G722, "G722", clock_rate);
+ params = g_strdup_printf ("%d", channels);
+
+#if 0
+ if (!order && channels > 2) {
+ GST_ELEMENT_WARNING (rtpg722pay, STREAM, DECODE,
+ (NULL), ("Unknown channel order for %d channels", channels));
+ }
+
+ if (order && order->name) {
+ res = gst_rtp_base_payload_set_outcaps (basepayload,
+ "encoding-params", G_TYPE_STRING, params, "channels", G_TYPE_INT,
+ channels, "channel-order", G_TYPE_STRING, order->name, NULL);
+ } else {
+#endif
+ res = gst_rtp_base_payload_set_outcaps (basepayload,
+ "encoding-params", G_TYPE_STRING, params, "channels", G_TYPE_INT,
+ channels, NULL);
+#if 0
+ }
+#endif
+
+ g_free (params);
+#if 0
+ g_free (pos);
+#endif
+
+ rtpg722pay->rate = rate;
+ rtpg722pay->channels = channels;
+
+ /* bits-per-sample is 4 * channels for G722, but as the RTP clock runs at
+ * half speed (8 instead of 16 khz), pretend it's 8 bits per sample
+ * channels. */
+ gst_rtp_base_audio_payload_set_samplebits_options (rtpbaseaudiopayload,
+ 8 * rtpg722pay->channels);
+
+ return res;
+
+ /* ERRORS */
+no_rate:
+ {
+ GST_DEBUG_OBJECT (rtpg722pay, "no rate given");
+ return FALSE;
+ }
+no_channels:
+ {
+ GST_DEBUG_OBJECT (rtpg722pay, "no channels given");
+ return FALSE;
+ }
+}
+
+static GstCaps *
+gst_rtp_g722_pay_getcaps (GstRTPBasePayload * rtppayload, GstPad * pad,
+ GstCaps * filter)
+{
+ GstCaps *otherpadcaps;
+ GstCaps *caps;
+
+ otherpadcaps = gst_pad_get_allowed_caps (rtppayload->srcpad);
+ caps = gst_pad_get_pad_template_caps (pad);
+
+ if (otherpadcaps) {
+ if (!gst_caps_is_empty (otherpadcaps)) {
+ caps = gst_caps_make_writable (caps);
+ gst_caps_set_simple (caps, "channels", G_TYPE_INT, 1, NULL);
+ gst_caps_set_simple (caps, "rate", G_TYPE_INT, 16000, NULL);
+ }
+ gst_caps_unref (otherpadcaps);
+ }
+
+ if (filter) {
+ GstCaps *tmp;
+
+ GST_DEBUG_OBJECT (rtppayload, "Intersect %" GST_PTR_FORMAT " and filter %"
+ GST_PTR_FORMAT, caps, filter);
+ tmp = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ caps = tmp;
+ }
+
+ return caps;
+}
diff --git a/gst/rtp/gstrtpg722pay.h b/gst/rtp/gstrtpg722pay.h
new file mode 100644
index 0000000000..1211ca0a0d
--- /dev/null
+++ b/gst/rtp/gstrtpg722pay.h
@@ -0,0 +1,59 @@
+/* GStreamer
+ * Copyright (C) <2010> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_G722_PAY_H__
+#define __GST_RTP_G722_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_G722_PAY \
+ (gst_rtp_g722_pay_get_type())
+#define GST_RTP_G722_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_G722_PAY,GstRtpG722Pay))
+#define GST_RTP_G722_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_G722_PAY,GstRtpG722PayClass))
+#define GST_IS_RTP_G722_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_G722_PAY))
+#define GST_IS_RTP_G722_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_G722_PAY))
+
+typedef struct _GstRtpG722Pay GstRtpG722Pay;
+typedef struct _GstRtpG722PayClass GstRtpG722PayClass;
+
+struct _GstRtpG722Pay
+{
+ GstRTPBaseAudioPayload payload;
+
+ gint rate;
+ gint channels;
+};
+
+struct _GstRtpG722PayClass
+{
+ GstRTPBaseAudioPayloadClass parent_class;
+};
+
+GType gst_rtp_g722_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_G722_PAY_H__ */
diff --git a/gst/rtp/gstrtpg723depay.c b/gst/rtp/gstrtpg723depay.c
new file mode 100644
index 0000000000..e4f416ea26
--- /dev/null
+++ b/gst/rtp/gstrtpg723depay.c
@@ -0,0 +1,219 @@
+/* GStreamer
+ *
+ * Copyright (C) <2010> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include <stdlib.h>
+#include <string.h>
+#include "gstrtpelements.h"
+#include "gstrtpg723depay.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpg723depay_debug);
+#define GST_CAT_DEFAULT (rtpg723depay_debug)
+
+
+/* references:
+ *
+ * RFC 3551 (4.5.3)
+ */
+
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0
+};
+
+/* input is an RTP packet
+ *
+ */
+static GstStaticPadTemplate gst_rtp_g723_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "clock-rate = (int) 8000, "
+ "encoding-name = (string) \"G723\"; "
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_G723_STRING ", "
+ "clock-rate = (int) 8000")
+ );
+
+static GstStaticPadTemplate gst_rtp_g723_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/G723, " "channels = (int) 1," "rate = (int) 8000")
+ );
+
+static gboolean gst_rtp_g723_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_g723_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+
+#define gst_rtp_g723_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG723Depay, gst_rtp_g723_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpg723depay, "rtpg723depay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_G723_DEPAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_g723_depay_class_init (GstRtpG723DepayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpg723depay_debug, "rtpg723depay", 0,
+ "G.723 RTP Depayloader");
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_g723_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_g723_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP G.723 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts G.723 audio from RTP packets (RFC 3551)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_g723_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_g723_depay_setcaps;
+}
+
+static void
+gst_rtp_g723_depay_init (GstRtpG723Depay * rtpg723depay)
+{
+ GstRTPBaseDepayload *depayload;
+
+ depayload = GST_RTP_BASE_DEPAYLOAD (rtpg723depay);
+
+ gst_pad_use_fixed_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
+}
+
+static gboolean
+gst_rtp_g723_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure;
+ GstCaps *srccaps;
+ GstRtpG723Depay *rtpg723depay;
+ const gchar *params;
+ gint clock_rate, channels;
+ gboolean ret;
+
+ rtpg723depay = GST_RTP_G723_DEPAY (depayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!(params = gst_structure_get_string (structure, "encoding-params")))
+ channels = 1;
+ else {
+ channels = atoi (params);
+ }
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 8000;
+
+ if (channels != 1)
+ goto wrong_channels;
+
+ if (clock_rate != 8000)
+ goto wrong_clock_rate;
+
+ depayload->clock_rate = clock_rate;
+
+ srccaps = gst_caps_new_simple ("audio/G723",
+ "channels", G_TYPE_INT, channels, "rate", G_TYPE_INT, clock_rate, NULL);
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ gst_caps_unref (srccaps);
+
+ return ret;
+
+ /* ERRORS */
+wrong_channels:
+ {
+ GST_DEBUG_OBJECT (rtpg723depay, "expected 1 channel, got %d", channels);
+ return FALSE;
+ }
+wrong_clock_rate:
+ {
+ GST_DEBUG_OBJECT (rtpg723depay, "expected 8000 clock-rate, got %d",
+ clock_rate);
+ return FALSE;
+ }
+}
+
+
+static GstBuffer *
+gst_rtp_g723_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpG723Depay *rtpg723depay;
+ GstBuffer *outbuf = NULL;
+ gint payload_len;
+ gboolean marker;
+
+ rtpg723depay = GST_RTP_G723_DEPAY (depayload);
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+
+ /* At least 4 bytes */
+ if (payload_len < 4)
+ goto too_small;
+
+ GST_LOG_OBJECT (rtpg723depay, "payload len %d", payload_len);
+
+ outbuf = gst_rtp_buffer_get_payload_buffer (rtp);
+ marker = gst_rtp_buffer_get_marker (rtp);
+
+ if (marker) {
+ /* marker bit starts talkspurt */
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC);
+ }
+
+ GST_LOG_OBJECT (depayload, "pushing buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (outbuf));
+
+ return outbuf;
+
+ /* ERRORS */
+too_small:
+ {
+ GST_ELEMENT_WARNING (rtpg723depay, STREAM, DECODE,
+ (NULL), ("G723 RTP payload too small (%d)", payload_len));
+ goto bad_packet;
+ }
+bad_packet:
+ {
+ /* no fatal error */
+ return NULL;
+ }
+}
diff --git a/gst/rtp/gstrtpg723depay.h b/gst/rtp/gstrtpg723depay.h
new file mode 100644
index 0000000000..673a137660
--- /dev/null
+++ b/gst/rtp/gstrtpg723depay.h
@@ -0,0 +1,57 @@
+/* GStreamer
+ *
+ * Copyright (C) <2010> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_G723_DEPAY_H__
+#define __GST_RTP_G723_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_G723_DEPAY \
+ (gst_rtp_g723_depay_get_type())
+#define GST_RTP_G723_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_G723_DEPAY,GstRtpG723Depay))
+#define GST_RTP_G723_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_G723_DEPAY,GstRtpG723DepayClass))
+#define GST_IS_RTP_G723_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_G723_DEPAY))
+#define GST_IS_RTP_G723_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_G723_DEPAY))
+
+typedef struct _GstRtpG723Depay GstRtpG723Depay;
+typedef struct _GstRtpG723DepayClass GstRtpG723DepayClass;
+
+struct _GstRtpG723Depay
+{
+ GstRTPBaseDepayload depayload;
+};
+
+struct _GstRtpG723DepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_g723_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_G723_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpg723pay.c b/gst/rtp/gstrtpg723pay.c
new file mode 100644
index 0000000000..ebe7343d95
--- /dev/null
+++ b/gst/rtp/gstrtpg723pay.c
@@ -0,0 +1,303 @@
+/* GStreamer
+ * Copyright (C) <2007> Nokia Corporation
+ * Copyright (C) <2007> Collabora Ltd
+ * @author: Olivier Crete <olivier.crete@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include <string.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/base/gstadapter.h>
+#include <gst/audio/audio.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpg723pay.h"
+#include "gstrtputils.h"
+
+#define G723_FRAME_DURATION (30 * GST_MSECOND)
+
+static gboolean gst_rtp_g723_pay_set_caps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static GstFlowReturn gst_rtp_g723_pay_handle_buffer (GstRTPBasePayload *
+ payload, GstBuffer * buf);
+
+static GstStaticPadTemplate gst_rtp_g723_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/G723, " /* according to RFC 3551 */
+ "channels = (int) 1, " "rate = (int) 8000")
+ );
+
+static GstStaticPadTemplate gst_rtp_g723_pay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_G723_STRING ", "
+ "clock-rate = (int) 8000, "
+ "encoding-name = (string) \"G723\"; "
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 8000, " "encoding-name = (string) \"G723\"")
+ );
+
+static void gst_rtp_g723_pay_finalize (GObject * object);
+
+static GstStateChangeReturn gst_rtp_g723_pay_change_state (GstElement * element,
+ GstStateChange transition);
+
+#define gst_rtp_g723_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPG723Pay, gst_rtp_g723_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpg723pay, "rtpg723pay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_G723_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_g723_pay_class_init (GstRTPG723PayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *payload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ payload_class = (GstRTPBasePayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_g723_pay_finalize;
+
+ gstelement_class->change_state = gst_rtp_g723_pay_change_state;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_g723_pay_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_g723_pay_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP G.723 payloader", "Codec/Payloader/Network/RTP",
+ "Packetize G.723 audio into RTP packets",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ payload_class->set_caps = gst_rtp_g723_pay_set_caps;
+ payload_class->handle_buffer = gst_rtp_g723_pay_handle_buffer;
+}
+
+static void
+gst_rtp_g723_pay_init (GstRTPG723Pay * pay)
+{
+ GstRTPBasePayload *payload = GST_RTP_BASE_PAYLOAD (pay);
+
+ pay->adapter = gst_adapter_new ();
+
+ payload->pt = GST_RTP_PAYLOAD_G723;
+}
+
+static void
+gst_rtp_g723_pay_finalize (GObject * object)
+{
+ GstRTPG723Pay *pay;
+
+ pay = GST_RTP_G723_PAY (object);
+
+ g_object_unref (pay->adapter);
+ pay->adapter = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+
+static gboolean
+gst_rtp_g723_pay_set_caps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ gboolean res;
+
+ gst_rtp_base_payload_set_options (payload, "audio",
+ payload->pt != GST_RTP_PAYLOAD_G723, "G723", 8000);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
+
+ return res;
+}
+
+static GstFlowReturn
+gst_rtp_g723_pay_flush (GstRTPG723Pay * pay)
+{
+ GstBuffer *outbuf, *payload_buf;
+ GstFlowReturn ret;
+ guint avail;
+ GstRTPBuffer rtp = { NULL };
+
+ avail = gst_adapter_available (pay->adapter);
+
+ outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD (pay),
+ 0, 0, 0);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
+ GST_BUFFER_PTS (outbuf) = pay->timestamp;
+ GST_BUFFER_DURATION (outbuf) = pay->duration;
+
+ /* copy G723 data as payload */
+ payload_buf = gst_adapter_take_buffer_fast (pay->adapter, avail);
+
+ pay->timestamp = GST_CLOCK_TIME_NONE;
+ pay->duration = 0;
+
+ /* set discont and marker */
+ if (pay->discont) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
+ pay->discont = FALSE;
+ }
+ gst_rtp_buffer_unmap (&rtp);
+ gst_rtp_copy_audio_meta (pay, outbuf, payload_buf);
+
+ outbuf = gst_buffer_append (outbuf, payload_buf);
+
+ ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (pay), outbuf);
+
+ return ret;
+}
+
+/* 00 high-rate speech (6.3 kb/s) 24
+ * 01 low-rate speech (5.3 kb/s) 20
+ * 10 SID frame 4
+ * 11 reserved 0 */
+static const guint size_tab[4] = {
+ 24, 20, 4, 0
+};
+
+static GstFlowReturn
+gst_rtp_g723_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buf)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstMapInfo map;
+ guint8 HDR;
+ GstRTPG723Pay *pay;
+ GstClockTime packet_dur, timestamp;
+ guint payload_len, packet_len;
+
+ pay = GST_RTP_G723_PAY (payload);
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ timestamp = GST_BUFFER_PTS (buf);
+
+ if (GST_BUFFER_IS_DISCONT (buf)) {
+ /* flush everything on discont */
+ gst_adapter_clear (pay->adapter);
+ pay->timestamp = GST_CLOCK_TIME_NONE;
+ pay->duration = 0;
+ pay->discont = TRUE;
+ }
+
+ /* should be one of these sizes */
+ if (map.size != 4 && map.size != 20 && map.size != 24)
+ goto invalid_size;
+
+ /* check size by looking at the header bits */
+ HDR = map.data[0] & 0x3;
+ if (size_tab[HDR] != map.size)
+ goto wrong_size;
+
+ /* calculate packet size and duration */
+ payload_len = gst_adapter_available (pay->adapter) + map.size;
+ packet_dur = pay->duration + G723_FRAME_DURATION;
+ packet_len = gst_rtp_buffer_calc_packet_len (payload_len, 0, 0);
+
+ if (gst_rtp_base_payload_is_filled (payload, packet_len, packet_dur)) {
+ /* size or duration would overflow the packet, flush the queued data */
+ ret = gst_rtp_g723_pay_flush (pay);
+ }
+
+ /* update timestamp, we keep the timestamp for the first packet in the adapter
+ * but are able to calculate it from next packets. */
+ if (timestamp != GST_CLOCK_TIME_NONE && pay->timestamp == GST_CLOCK_TIME_NONE) {
+ if (timestamp > pay->duration)
+ pay->timestamp = timestamp - pay->duration;
+ else
+ pay->timestamp = 0;
+ }
+ gst_buffer_unmap (buf, &map);
+
+ /* add packet to the queue */
+ gst_adapter_push (pay->adapter, buf);
+ pay->duration = packet_dur;
+
+ /* check if we can flush now */
+ if (pay->duration >= payload->min_ptime) {
+ ret = gst_rtp_g723_pay_flush (pay);
+ }
+
+ return ret;
+
+ /* WARNINGS */
+invalid_size:
+ {
+ GST_ELEMENT_WARNING (pay, STREAM, WRONG_TYPE,
+ ("Invalid input buffer size"),
+ ("Input size should be 4, 20 or 24, got %" G_GSIZE_FORMAT, map.size));
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ return GST_FLOW_OK;
+ }
+wrong_size:
+ {
+ GST_ELEMENT_WARNING (pay, STREAM, WRONG_TYPE,
+ ("Wrong input buffer size"),
+ ("Expected input buffer size %u but got %" G_GSIZE_FORMAT,
+ size_tab[HDR], map.size));
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ return GST_FLOW_OK;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_g723_pay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstRTPG723Pay *pay;
+
+ pay = GST_RTP_G723_PAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_adapter_clear (pay->adapter);
+ pay->timestamp = GST_CLOCK_TIME_NONE;
+ pay->duration = 0;
+ pay->discont = TRUE;
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_adapter_clear (pay->adapter);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
diff --git a/gst/rtp/gstrtpg723pay.h b/gst/rtp/gstrtpg723pay.h
new file mode 100644
index 0000000000..03bff50025
--- /dev/null
+++ b/gst/rtp/gstrtpg723pay.h
@@ -0,0 +1,62 @@
+/* GStreamer
+ * Copyright (C) <2007> Nokia Corporation
+ * Copyright (C) <2007> Collabora Ltd
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_G723_PAY_H__
+#define __GST_RTP_G723_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_G723_PAY \
+ (gst_rtp_g723_pay_get_type())
+#define GST_RTP_G723_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_G723_PAY,GstRTPG723Pay))
+#define GST_RTP_G723_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_G723_PAY,GstRTPG723PayClass))
+#define GST_IS_RTP_G723_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_G723_PAY))
+#define GST_IS_RTP_G723_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_G723_PAY))
+
+typedef struct _GstRTPG723Pay GstRTPG723Pay;
+typedef struct _GstRTPG723PayClass GstRTPG723PayClass;
+
+struct _GstRTPG723Pay
+{
+ GstRTPBasePayload payload;
+
+ GstAdapter *adapter;
+ GstClockTime duration;
+ GstClockTime timestamp;
+ gboolean discont;
+};
+
+struct _GstRTPG723PayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_g723_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_G723_PAY_H__ */
diff --git a/gst/rtp/gstrtpg726depay.c b/gst/rtp/gstrtpg726depay.c
new file mode 100644
index 0000000000..a3c5de321a
--- /dev/null
+++ b/gst/rtp/gstrtpg726depay.c
@@ -0,0 +1,389 @@
+/* GStreamer
+ * Copyright (C) 1999 Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) 2005 Edgard Lima <edgard.lima@gmail.com>
+ * Copyright (C) 2005 Zeeshan Ali <zeenix@gmail.com>
+ * Copyright (C) 2008 Axis Communications <dev-gstreamer@axis.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpg726depay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpg726depay_debug);
+#define GST_CAT_DEFAULT (rtpg726depay_debug)
+
+#define DEFAULT_BIT_RATE 32000
+#define DEFAULT_BLOCK_ALIGN 4
+#define SAMPLE_RATE 8000
+#define LAYOUT_G726 "g726"
+
+/* RtpG726Depay signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+#define DEFAULT_FORCE_AAL2 TRUE
+
+enum
+{
+ PROP_0,
+ PROP_FORCE_AAL2
+};
+
+static GstStaticPadTemplate gst_rtp_g726_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "encoding-name = (string) { \"G726\", \"G726-16\", \"G726-24\", \"G726-32\", \"G726-40\", "
+ "\"AAL2-G726-16\", \"AAL2-G726-24\", \"AAL2-G726-32\", \"AAL2-G726-40\" }, "
+ "clock-rate = (int) 8000;")
+ );
+
+static GstStaticPadTemplate gst_rtp_g726_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-adpcm, "
+ "channels = (int) 1, "
+ "rate = (int) 8000, "
+ "bitrate = (int) { 16000, 24000, 32000, 40000 }, "
+ "block_align = (int) { 2, 3, 4, 5 }, " "layout = (string) \"g726\"")
+ );
+
+static void gst_rtp_g726_depay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_rtp_g726_depay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+
+static GstBuffer *gst_rtp_g726_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+static gboolean gst_rtp_g726_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+
+#define gst_rtp_g726_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG726Depay, gst_rtp_g726_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpg726depay, "rtpg726depay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_G726_DEPAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_g726_depay_class_init (GstRtpG726DepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpg726depay_debug, "rtpg726depay", 0,
+ "G.726 RTP Depayloader");
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gobject_class->set_property = gst_rtp_g726_depay_set_property;
+ gobject_class->get_property = gst_rtp_g726_depay_get_property;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_FORCE_AAL2,
+ g_param_spec_boolean ("force-aal2", "Force AAL2",
+ "Force AAL2 decoding for compatibility with bad payloaders",
+ DEFAULT_FORCE_AAL2, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_g726_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_g726_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP G.726 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts G.726 audio from RTP packets",
+ "Axis Communications <dev-gstreamer@axis.com>");
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_g726_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_g726_depay_setcaps;
+}
+
+static void
+gst_rtp_g726_depay_init (GstRtpG726Depay * rtpG726depay)
+{
+ GstRTPBaseDepayload *depayload;
+
+ depayload = GST_RTP_BASE_DEPAYLOAD (rtpG726depay);
+
+ gst_pad_use_fixed_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
+
+ rtpG726depay->force_aal2 = DEFAULT_FORCE_AAL2;
+}
+
+static gboolean
+gst_rtp_g726_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstCaps *srccaps;
+ GstStructure *structure;
+ gboolean ret;
+ gint clock_rate;
+ const gchar *encoding_name;
+ GstRtpG726Depay *depay;
+
+ depay = GST_RTP_G726_DEPAY (depayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 8000; /* default */
+ depayload->clock_rate = clock_rate;
+
+ depay->aal2 = FALSE;
+ encoding_name = gst_structure_get_string (structure, "encoding-name");
+ if (encoding_name == NULL || g_ascii_strcasecmp (encoding_name, "G726") == 0) {
+ depay->bitrate = DEFAULT_BIT_RATE;
+ depay->block_align = DEFAULT_BLOCK_ALIGN;
+ } else {
+ if (g_str_has_prefix (encoding_name, "AAL2-")) {
+ depay->aal2 = TRUE;
+ encoding_name += 5;
+ }
+ if (g_ascii_strcasecmp (encoding_name, "G726-16") == 0) {
+ depay->bitrate = 16000;
+ depay->block_align = 2;
+ } else if (g_ascii_strcasecmp (encoding_name, "G726-24") == 0) {
+ depay->bitrate = 24000;
+ depay->block_align = 3;
+ } else if (g_ascii_strcasecmp (encoding_name, "G726-32") == 0) {
+ depay->bitrate = 32000;
+ depay->block_align = 4;
+ } else if (g_ascii_strcasecmp (encoding_name, "G726-40") == 0) {
+ depay->bitrate = 40000;
+ depay->block_align = 5;
+ } else
+ goto unknown_encoding;
+ }
+
+ GST_DEBUG ("RTP G.726 depayloader, bitrate set to %d\n", depay->bitrate);
+
+ srccaps = gst_caps_new_simple ("audio/x-adpcm",
+ "channels", G_TYPE_INT, 1,
+ "rate", G_TYPE_INT, clock_rate,
+ "bitrate", G_TYPE_INT, depay->bitrate,
+ "block_align", G_TYPE_INT, depay->block_align,
+ "layout", G_TYPE_STRING, LAYOUT_G726, NULL);
+
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ gst_caps_unref (srccaps);
+
+ return ret;
+
+ /* ERRORS */
+unknown_encoding:
+ {
+ GST_WARNING ("Could not determine bitrate from encoding-name (%s)",
+ encoding_name);
+ return FALSE;
+ }
+}
+
+
+static GstBuffer *
+gst_rtp_g726_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpG726Depay *depay;
+ GstBuffer *outbuf = NULL;
+ gboolean marker;
+
+ depay = GST_RTP_G726_DEPAY (depayload);
+
+ marker = gst_rtp_buffer_get_marker (rtp);
+
+ GST_DEBUG ("process : got %" G_GSIZE_FORMAT " bytes, mark %d ts %u seqn %d",
+ gst_buffer_get_size (rtp->buffer), marker,
+ gst_rtp_buffer_get_timestamp (rtp), gst_rtp_buffer_get_seq (rtp));
+
+ if (depay->aal2 || depay->force_aal2) {
+ /* AAL2, we can just copy the bytes */
+ outbuf = gst_rtp_buffer_get_payload_buffer (rtp);
+ if (!outbuf)
+ goto bad_len;
+ gst_rtp_drop_non_audio_meta (depay, outbuf);
+ } else {
+ guint8 *in, *out, tmp;
+ guint len;
+ GstMapInfo map;
+
+ in = gst_rtp_buffer_get_payload (rtp);
+ len = gst_rtp_buffer_get_payload_len (rtp);
+
+ outbuf = gst_rtp_buffer_get_payload_buffer (rtp);
+ if (!outbuf)
+ goto bad_len;
+ outbuf = gst_buffer_make_writable (outbuf);
+
+ gst_rtp_drop_non_audio_meta (depay, outbuf);
+
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ out = map.data;
+
+ /* we need to reshuffle the bytes, input is always of the form
+ * A B C D ... with the number of bits depending on the bitrate. */
+ switch (depay->bitrate) {
+ case 16000:
+ {
+ /* 0
+ * 0 1 2 3 4 5 6 7
+ * +-+-+-+-+-+-+-+-+-
+ * |D D|C C|B B|A A| ...
+ * |0 1|0 1|0 1|0 1|
+ * +-+-+-+-+-+-+-+-+-
+ */
+ while (len > 0) {
+ tmp = *in++;
+ *out++ = ((tmp & 0xc0) >> 6) |
+ ((tmp & 0x30) >> 2) | ((tmp & 0x0c) << 2) | ((tmp & 0x03) << 6);
+ len--;
+ }
+ break;
+ }
+ case 24000:
+ {
+ /* 0 1 2
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-
+ * |C C|B B B|A A A|F|E E E|D D D|C|H H H|G G G|F F| ...
+ * |1 2|0 1 2|0 1 2|2|0 1 2|0 1 2|0|0 1 2|0 1 2|0 1|
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-
+ */
+ while (len > 2) {
+ tmp = *in++;
+ *out++ = ((tmp & 0xe0) >> 5) |
+ ((tmp & 0x1c) << 1) | ((tmp & 0x03) << 6);
+ tmp = *in++;
+ *out++ = ((tmp & 0x80) >> 7) |
+ ((tmp & 0x70) >> 3) | ((tmp & 0x0e) << 4) | ((tmp & 0x01) << 7);
+ tmp = *in++;
+ *out++ = ((tmp & 0xc0) >> 6) |
+ ((tmp & 0x38) >> 1) | ((tmp & 0x07) << 5);
+ len -= 3;
+ }
+ break;
+ }
+ case 32000:
+ {
+ /* 0 1
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-
+ * |B B B B|A A A A|D D D D|C C C C| ...
+ * |0 1 2 3|0 1 2 3|0 1 2 3|0 1 2 3|
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-
+ */
+ while (len > 0) {
+ tmp = *in++;
+ *out++ = ((tmp & 0xf0) >> 4) | ((tmp & 0x0f) << 4);
+ len--;
+ }
+ break;
+ }
+ case 40000:
+ {
+ /* 0 1 2 3 4
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-
+ * |B B B|A A A A A|D|C C C C C|B B|E E E E|D D D D|G G|F F F F F|E|H H H H H|G G G|
+ * |2 3 4|0 1 2 3 4|4|0 1 2 3 4|0 1|1 2 3 4|0 1 2 3|3 4|0 1 2 3 4|0|0 1 2 3 4|0 1 2|
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-
+ */
+ while (len > 4) {
+ tmp = *in++;
+ *out++ = ((tmp & 0xf8) >> 3) | ((tmp & 0x07) << 5);
+ tmp = *in++;
+ *out++ = ((tmp & 0xc0) >> 6) |
+ ((tmp & 0x3e) << 1) | ((tmp & 0x01) << 7);
+ tmp = *in++;
+ *out++ = ((tmp & 0xf0) >> 4) | ((tmp & 0x0f) << 4);
+ tmp = *in++;
+ *out++ = ((tmp & 0x80) >> 7) |
+ ((tmp & 0x7c) >> 1) | ((tmp & 0x03) << 6);
+ tmp = *in++;
+ *out++ = ((tmp & 0xe0) >> 5) | ((tmp & 0x1f) << 3);
+ len -= 5;
+ }
+ break;
+ }
+ }
+ gst_buffer_unmap (outbuf, &map);
+ }
+
+ if (marker) {
+ /* mark start of talkspurt with RESYNC */
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC);
+ }
+
+ return outbuf;
+
+bad_len:
+ {
+ return NULL;
+ }
+}
+
+static void
+gst_rtp_g726_depay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpG726Depay *rtpg726depay;
+
+ rtpg726depay = GST_RTP_G726_DEPAY (object);
+
+ switch (prop_id) {
+ case PROP_FORCE_AAL2:
+ rtpg726depay->force_aal2 = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_g726_depay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpG726Depay *rtpg726depay;
+
+ rtpg726depay = GST_RTP_G726_DEPAY (object);
+
+ switch (prop_id) {
+ case PROP_FORCE_AAL2:
+ g_value_set_boolean (value, rtpg726depay->force_aal2);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/rtp/gstrtpg726depay.h b/gst/rtp/gstrtpg726depay.h
new file mode 100644
index 0000000000..04ceec2f8d
--- /dev/null
+++ b/gst/rtp/gstrtpg726depay.h
@@ -0,0 +1,56 @@
+/* GStreamer
+ * Copyright (C) 2005 Edgard Lima <edgard.lima@gmail.com>
+ * Copyright (C) 2008 Axis Communications AB <dev-gstreamer@axis.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more
+ */
+
+#ifndef __GST_RTP_G726_DEPAY_H__
+#define __GST_RTP_G726_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstRtpG726Depay GstRtpG726Depay;
+typedef struct _GstRtpG726DepayClass GstRtpG726DepayClass;
+
+#define GST_TYPE_RTP_G726_DEPAY \
+ (gst_rtp_g726_depay_get_type())
+#define GST_RTP_G726_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_G726_DEPAY,GstRtpG726Depay))
+#define GST_RTP_G726_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_G726_DEPAY,GstRtpG726DepayClass))
+#define GST_IS_RTP_G726_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_G726_DEPAY))
+#define GST_IS_RTP_G726_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_G726_DEPAY))
+
+struct _GstRtpG726Depay
+{
+ GstRTPBaseDepayload depayload;
+
+ gboolean aal2;
+ gboolean force_aal2;
+ gint bitrate;
+ guint block_align;
+};
+
+struct _GstRtpG726DepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_g726_depay_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_RTP_G726_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpg726pay.c b/gst/rtp/gstrtpg726pay.c
new file mode 100644
index 0000000000..76dad40535
--- /dev/null
+++ b/gst/rtp/gstrtpg726pay.c
@@ -0,0 +1,418 @@
+/* GStreamer
+ * Copyright (C) 1999 Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) 2005 Edgard Lima <edgard.lima@gmail.com>
+ * Copyright (C) 2005 Nokia Corporation <kai.vehmanen@nokia.com>
+ * Copyright (C) 2007,2008 Axis Communications <dev-gstreamer@axis.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <stdlib.h>
+#include <string.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpg726pay.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpg726pay_debug);
+#define GST_CAT_DEFAULT (rtpg726pay_debug)
+
+#define DEFAULT_FORCE_AAL2 TRUE
+
+enum
+{
+ PROP_0,
+ PROP_FORCE_AAL2
+};
+
+static GstStaticPadTemplate gst_rtp_g726_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-adpcm, "
+ "channels = (int) 1, "
+ "rate = (int) 8000, "
+ "bitrate = (int) { 16000, 24000, 32000, 40000 }, "
+ "layout = (string) \"g726\"")
+ );
+
+static GstStaticPadTemplate gst_rtp_g726_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 8000, "
+ "encoding-name = (string) { \"G726-16\", \"G726-24\", \"G726-32\", \"G726-40\", "
+ " \"AAL2-G726-16\", \"AAL2-G726-24\", \"AAL2-G726-32\", \"AAL2-G726-40\" } ")
+ );
+
+static void gst_rtp_g726_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_rtp_g726_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+
+static gboolean gst_rtp_g726_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static GstFlowReturn gst_rtp_g726_pay_handle_buffer (GstRTPBasePayload *
+ payload, GstBuffer * buffer);
+
+#define gst_rtp_g726_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG726Pay, gst_rtp_g726_pay,
+ GST_TYPE_RTP_BASE_AUDIO_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpg726pay, "rtpg726pay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_G726_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_g726_pay_class_init (GstRtpG726PayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gobject_class->set_property = gst_rtp_g726_pay_set_property;
+ gobject_class->get_property = gst_rtp_g726_pay_get_property;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_FORCE_AAL2,
+ g_param_spec_boolean ("force-aal2", "Force AAL2",
+ "Force AAL2 encoding for compatibility with bad depayloaders",
+ DEFAULT_FORCE_AAL2, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_g726_pay_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_g726_pay_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP G.726 payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encodes G.726 audio into a RTP packet",
+ "Axis Communications <dev-gstreamer@axis.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_g726_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_g726_pay_handle_buffer;
+
+ GST_DEBUG_CATEGORY_INIT (rtpg726pay_debug, "rtpg726pay", 0,
+ "G.726 RTP Payloader");
+}
+
+static void
+gst_rtp_g726_pay_init (GstRtpG726Pay * rtpg726pay)
+{
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
+
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpg726pay);
+
+ GST_RTP_BASE_PAYLOAD (rtpg726pay)->clock_rate = 8000;
+
+ rtpg726pay->force_aal2 = DEFAULT_FORCE_AAL2;
+
+ /* sample based codec */
+ gst_rtp_base_audio_payload_set_sample_based (rtpbaseaudiopayload);
+}
+
+static gboolean
+gst_rtp_g726_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ gchar *encoding_name;
+ GstStructure *structure;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
+ GstRtpG726Pay *pay;
+ GstCaps *peercaps;
+ gboolean res;
+
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (payload);
+ pay = GST_RTP_G726_PAY (payload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (structure, "bitrate", &pay->bitrate))
+ pay->bitrate = 32000;
+
+ GST_DEBUG_OBJECT (payload, "using bitrate %d", pay->bitrate);
+
+ pay->aal2 = FALSE;
+
+ /* first see what we can do with the bitrate */
+ switch (pay->bitrate) {
+ case 16000:
+ encoding_name = g_strdup ("G726-16");
+ gst_rtp_base_audio_payload_set_samplebits_options (rtpbaseaudiopayload,
+ 2);
+ break;
+ case 24000:
+ encoding_name = g_strdup ("G726-24");
+ gst_rtp_base_audio_payload_set_samplebits_options (rtpbaseaudiopayload,
+ 3);
+ break;
+ case 32000:
+ encoding_name = g_strdup ("G726-32");
+ gst_rtp_base_audio_payload_set_samplebits_options (rtpbaseaudiopayload,
+ 4);
+ break;
+ case 40000:
+ encoding_name = g_strdup ("G726-40");
+ gst_rtp_base_audio_payload_set_samplebits_options (rtpbaseaudiopayload,
+ 5);
+ break;
+ default:
+ goto invalid_bitrate;
+ }
+
+ GST_DEBUG_OBJECT (payload, "selected base encoding %s", encoding_name);
+
+ /* now see if we need to produce AAL2 or not */
+ peercaps = gst_pad_peer_query_caps (payload->srcpad, NULL);
+ if (peercaps) {
+ GstCaps *filter, *intersect;
+ gchar *capsstr;
+
+ GST_DEBUG_OBJECT (payload, "have peercaps %" GST_PTR_FORMAT, peercaps);
+
+ capsstr = g_strdup_printf ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "clock-rate = (int) 8000, "
+ "encoding-name = (string) %s; "
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "clock-rate = (int) 8000, "
+ "encoding-name = (string) AAL2-%s", encoding_name, encoding_name);
+ filter = gst_caps_from_string (capsstr);
+ g_free (capsstr);
+ g_free (encoding_name);
+
+ /* intersect to filter */
+ intersect = gst_caps_intersect (peercaps, filter);
+ gst_caps_unref (peercaps);
+ gst_caps_unref (filter);
+
+ GST_DEBUG_OBJECT (payload, "intersected to %" GST_PTR_FORMAT, intersect);
+
+ if (!intersect)
+ goto no_format;
+ if (gst_caps_is_empty (intersect)) {
+ gst_caps_unref (intersect);
+ goto no_format;
+ }
+
+ structure = gst_caps_get_structure (intersect, 0);
+
+ /* now see what encoding name we settled on, we need to dup because the
+ * string goes away when we unref the intersection below. */
+ encoding_name =
+ g_strdup (gst_structure_get_string (structure, "encoding-name"));
+
+ /* if we managed to negotiate to AAL2, we definitely are going to do AAL2
+ * encoding. Else we only encode AAL2 when explicitly set by the
+ * property. */
+ if (g_str_has_prefix (encoding_name, "AAL2-"))
+ pay->aal2 = TRUE;
+ else
+ pay->aal2 = pay->force_aal2;
+
+ GST_DEBUG_OBJECT (payload, "final encoding %s, AAL2 %d", encoding_name,
+ pay->aal2);
+
+ gst_caps_unref (intersect);
+ } else {
+ /* downstream can do anything but we prefer the better supported non-AAL2 */
+ pay->aal2 = pay->force_aal2;
+ GST_DEBUG_OBJECT (payload, "no peer caps, AAL2 %d", pay->aal2);
+ }
+
+ gst_rtp_base_payload_set_options (payload, "audio", TRUE, encoding_name,
+ 8000);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
+
+ g_free (encoding_name);
+
+ return res;
+
+ /* ERRORS */
+invalid_bitrate:
+ {
+ GST_ERROR_OBJECT (payload, "invalid bitrate %d specified", pay->bitrate);
+ return FALSE;
+ }
+no_format:
+ {
+ GST_ERROR_OBJECT (payload, "could not negotiate format");
+ return FALSE;
+ }
+}
+
+static GstFlowReturn
+gst_rtp_g726_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer)
+{
+ GstFlowReturn res;
+ GstRtpG726Pay *pay;
+
+ pay = GST_RTP_G726_PAY (payload);
+
+ if (!pay->aal2) {
+ GstMapInfo map;
+ guint8 *data, tmp;
+ gsize size;
+
+ /* for non AAL2, we need to reshuffle the bytes, we can do this in-place
+ * when the buffer is writable. */
+ buffer = gst_buffer_make_writable (buffer);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READWRITE);
+ data = map.data;
+ size = map.size;
+
+ GST_LOG_OBJECT (pay, "packing %" G_GSIZE_FORMAT " bytes of data", map.size);
+
+ /* we need to reshuffle the bytes, output is of the form:
+ * A B C D .. with the number of bits depending on the bitrate. */
+ switch (pay->bitrate) {
+ case 16000:
+ {
+ /* 0
+ * 0 1 2 3 4 5 6 7
+ * +-+-+-+-+-+-+-+-+-
+ * |D D|C C|B B|A A| ...
+ * |0 1|0 1|0 1|0 1|
+ * +-+-+-+-+-+-+-+-+-
+ */
+ while (size > 0) {
+ tmp = *data;
+ *data++ = ((tmp & 0xc0) >> 6) |
+ ((tmp & 0x30) >> 2) | ((tmp & 0x0c) << 2) | ((tmp & 0x03) << 6);
+ size--;
+ }
+ break;
+ }
+ case 24000:
+ {
+ /* 0 1 2
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-
+ * |C C|B B B|A A A|F|E E E|D D D|C|H H H|G G G|F F| ...
+ * |1 2|0 1 2|0 1 2|2|0 1 2|0 1 2|0|0 1 2|0 1 2|0 1|
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-
+ */
+ while (size > 2) {
+ tmp = *data;
+ *data++ = ((tmp & 0xc0) >> 6) |
+ ((tmp & 0x38) >> 1) | ((tmp & 0x07) << 5);
+ tmp = *data;
+ *data++ = ((tmp & 0x80) >> 7) |
+ ((tmp & 0x70) >> 3) | ((tmp & 0x0e) << 4) | ((tmp & 0x01) << 7);
+ tmp = *data;
+ *data++ = ((tmp & 0xe0) >> 5) |
+ ((tmp & 0x1c) >> 2) | ((tmp & 0x03) << 6);
+ size -= 3;
+ }
+ break;
+ }
+ case 32000:
+ {
+ /* 0 1
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-
+ * |B B B B|A A A A|D D D D|C C C C| ...
+ * |0 1 2 3|0 1 2 3|0 1 2 3|0 1 2 3|
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-
+ */
+ while (size > 0) {
+ tmp = *data;
+ *data++ = ((tmp & 0xf0) >> 4) | ((tmp & 0x0f) << 4);
+ size--;
+ }
+ break;
+ }
+ case 40000:
+ {
+ /* 0 1 2 3 4
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-
+ * |B B B|A A A A A|D|C C C C C|B B|E E E E|D D D D|G G|F F F F F|E|H H H H H|G G G|
+ * |2 3 4|0 1 2 3 4|4|0 1 2 3 4|0 1|1 2 3 4|0 1 2 3|3 4|0 1 2 3 4|0|0 1 2 3 4|0 1 2|
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-
+ */
+ while (size > 4) {
+ tmp = *data;
+ *data++ = ((tmp & 0xe0) >> 5) | ((tmp & 0x1f) << 3);
+ tmp = *data;
+ *data++ = ((tmp & 0x80) >> 7) |
+ ((tmp & 0x7c) >> 2) | ((tmp & 0x03) << 6);
+ tmp = *data;
+ *data++ = ((tmp & 0xf0) >> 4) | ((tmp & 0x0f) << 4);
+ tmp = *data;
+ *data++ = ((tmp & 0xc0) >> 6) |
+ ((tmp & 0x3e) << 2) | ((tmp & 0x01) << 7);
+ tmp = *data;
+ *data++ = ((tmp & 0xf8) >> 3) | ((tmp & 0x07) << 5);
+ size -= 5;
+ }
+ break;
+ }
+ }
+ gst_buffer_unmap (buffer, &map);
+ }
+
+ res =
+ GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->handle_buffer (payload,
+ buffer);
+
+ return res;
+}
+
+static void
+gst_rtp_g726_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpG726Pay *rtpg726pay;
+
+ rtpg726pay = GST_RTP_G726_PAY (object);
+
+ switch (prop_id) {
+ case PROP_FORCE_AAL2:
+ rtpg726pay->force_aal2 = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_g726_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpG726Pay *rtpg726pay;
+
+ rtpg726pay = GST_RTP_G726_PAY (object);
+
+ switch (prop_id) {
+ case PROP_FORCE_AAL2:
+ g_value_set_boolean (value, rtpg726pay->force_aal2);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/rtp/gstrtpg726pay.h b/gst/rtp/gstrtpg726pay.h
new file mode 100644
index 0000000000..fd3078c2fe
--- /dev/null
+++ b/gst/rtp/gstrtpg726pay.h
@@ -0,0 +1,53 @@
+/* GStreamer
+ * Copyright (C) 2005 Edgard Lima <edgard.lima@gmail.com>
+ * Copyright (C) 2007,2008 Axis Communications <dev-gstreamer@axis.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more
+ */
+
+#ifndef __GST_RTP_G726_PAY_H__
+#define __GST_RTP_G726_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
+
+G_BEGIN_DECLS typedef struct _GstRtpG726Pay GstRtpG726Pay;
+typedef struct _GstRtpG726PayClass GstRtpG726PayClass;
+
+#define GST_TYPE_RTP_G726_PAY \
+ (gst_rtp_g726_pay_get_type())
+#define GST_RTP_G726_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_G726_PAY,GstRtpG726Pay))
+#define GST_RTP_G726_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_G726_PAY,GstRtpG726PayClass))
+#define GST_IS_RTP_G726_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_G726_PAY))
+#define GST_IS_RTP_G726_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_G726_PAY))
+
+struct _GstRtpG726Pay
+{
+ GstRTPBaseAudioPayload audiopayload;
+
+ gboolean aal2;
+ gboolean force_aal2;
+ gint bitrate;
+};
+
+struct _GstRtpG726PayClass
+{
+ GstRTPBaseAudioPayloadClass parent_class;
+};
+
+GType gst_rtp_g726_pay_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_RTP_G726_PAY_H__ */
diff --git a/gst/rtp/gstrtpg729depay.c b/gst/rtp/gstrtpg729depay.c
new file mode 100644
index 0000000000..0fb0bcd137
--- /dev/null
+++ b/gst/rtp/gstrtpg729depay.c
@@ -0,0 +1,221 @@
+/* GStreamer
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+
+#include <stdlib.h>
+#include <string.h>
+#include "gstrtpelements.h"
+#include "gstrtpg729depay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpg729depay_debug);
+#define GST_CAT_DEFAULT (rtpg729depay_debug)
+
+
+/* references:
+ *
+ * RFC 3551 (4.5.6)
+ */
+
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0
+};
+
+/* input is an RTP packet
+ *
+ */
+static GstStaticPadTemplate gst_rtp_g729_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "clock-rate = (int) 8000, "
+ "encoding-name = (string) \"G729\"; "
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_G729_STRING ", "
+ "clock-rate = (int) 8000")
+ );
+
+static GstStaticPadTemplate gst_rtp_g729_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/G729, " "channels = (int) 1," "rate = (int) 8000")
+ );
+
+static gboolean gst_rtp_g729_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_g729_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+
+#define gst_rtp_g729_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpG729Depay, gst_rtp_g729_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpg729depay, "rtpg729depay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_G729_DEPAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_g729_depay_class_init (GstRtpG729DepayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpg729depay_debug, "rtpg729depay", 0,
+ "G.729 RTP Depayloader");
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_g729_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_g729_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP G.729 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts G.729 audio from RTP packets (RFC 3551)",
+ "Laurent Glayal <spglegle@yahoo.fr>");
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_g729_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_g729_depay_setcaps;
+}
+
+static void
+gst_rtp_g729_depay_init (GstRtpG729Depay * rtpg729depay)
+{
+ GstRTPBaseDepayload *depayload;
+
+ depayload = GST_RTP_BASE_DEPAYLOAD (rtpg729depay);
+
+ gst_pad_use_fixed_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
+}
+
+static gboolean
+gst_rtp_g729_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure;
+ GstCaps *srccaps;
+ GstRtpG729Depay *rtpg729depay;
+ const gchar *params;
+ gint clock_rate, channels;
+ gboolean ret;
+
+ rtpg729depay = GST_RTP_G729_DEPAY (depayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!(params = gst_structure_get_string (structure, "encoding-params")))
+ channels = 1;
+ else {
+ channels = atoi (params);
+ }
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 8000;
+
+ if (channels != 1)
+ goto wrong_channels;
+
+ if (clock_rate != 8000)
+ goto wrong_clock_rate;
+
+ depayload->clock_rate = clock_rate;
+
+ srccaps = gst_caps_new_simple ("audio/G729",
+ "channels", G_TYPE_INT, channels, "rate", G_TYPE_INT, clock_rate, NULL);
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ gst_caps_unref (srccaps);
+
+ return ret;
+
+ /* ERRORS */
+wrong_channels:
+ {
+ GST_DEBUG_OBJECT (rtpg729depay, "expected 1 channel, got %d", channels);
+ return FALSE;
+ }
+wrong_clock_rate:
+ {
+ GST_DEBUG_OBJECT (rtpg729depay, "expected 8000 clock-rate, got %d",
+ clock_rate);
+ return FALSE;
+ }
+}
+
+static GstBuffer *
+gst_rtp_g729_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpG729Depay *rtpg729depay;
+ GstBuffer *outbuf = NULL;
+ gint payload_len;
+ gboolean marker;
+
+ rtpg729depay = GST_RTP_G729_DEPAY (depayload);
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+
+ /* At least 2 bytes (CNG from G729 Annex B) */
+ if (payload_len < 2) {
+ GST_ELEMENT_WARNING (rtpg729depay, STREAM, DECODE,
+ (NULL), ("G729 RTP payload too small (%d)", payload_len));
+ goto bad_packet;
+ }
+
+ GST_LOG_OBJECT (rtpg729depay, "payload len %d", payload_len);
+
+ if ((payload_len % 10) == 2) {
+ GST_LOG_OBJECT (rtpg729depay, "G729 payload contains CNG frame");
+ }
+
+ outbuf = gst_rtp_buffer_get_payload_buffer (rtp);
+ marker = gst_rtp_buffer_get_marker (rtp);
+
+ if (marker) {
+ /* marker bit starts talkspurt */
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC);
+ }
+
+ gst_rtp_drop_non_audio_meta (depayload, outbuf);
+
+ GST_LOG_OBJECT (depayload, "pushing buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (outbuf));
+
+ return outbuf;
+
+ /* ERRORS */
+bad_packet:
+ {
+ /* no fatal error */
+ return NULL;
+ }
+}
diff --git a/gst/rtp/gstrtpg729depay.h b/gst/rtp/gstrtpg729depay.h
new file mode 100644
index 0000000000..355ee50682
--- /dev/null
+++ b/gst/rtp/gstrtpg729depay.h
@@ -0,0 +1,59 @@
+/* GStreamer
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_G729_DEPAY_H__
+#define __GST_RTP_G729_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_G729_DEPAY \
+ (gst_rtp_g729_depay_get_type())
+
+#define GST_RTP_G729_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_G729_DEPAY,GstRtpG729Depay))
+
+#define GST_RTP_G729_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_G729_DEPAY,GstRtpG729DepayClass))
+
+#define GST_IS_RTP_G729_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_G729_DEPAY))
+
+#define GST_IS_RTP_G729_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_G729_DEPAY))
+
+typedef struct _GstRtpG729Depay GstRtpG729Depay;
+typedef struct _GstRtpG729DepayClass GstRtpG729DepayClass;
+
+struct _GstRtpG729Depay
+{
+ GstRTPBaseDepayload depayload;
+};
+
+struct _GstRtpG729DepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_g729_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_G729_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpg729pay.c b/gst/rtp/gstrtpg729pay.c
new file mode 100644
index 0000000000..18e2bad8ec
--- /dev/null
+++ b/gst/rtp/gstrtpg729pay.c
@@ -0,0 +1,394 @@
+/* GStreamer
+ * Copyright (C) <2007> Nokia Corporation
+ * Copyright (C) <2007> Collabora Ltd
+ * @author: Olivier Crete <olivier.crete@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * This payloader assumes that the data will ALWAYS come as zero or more
+ * 10 bytes frame of audio followed by 0 or 1 2 byte frame of silence.
+ * Any other buffer format won't work
+ */
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include <string.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/base/gstadapter.h>
+#include <gst/audio/audio.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpg729pay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpg729pay_debug);
+#define GST_CAT_DEFAULT (rtpg729pay_debug)
+
+#define G729_FRAME_SIZE 10
+#define G729B_CN_FRAME_SIZE 2
+#define G729_FRAME_DURATION (10 * GST_MSECOND)
+#define G729_FRAME_DURATION_MS (10)
+
+static gboolean
+gst_rtp_g729_pay_set_caps (GstRTPBasePayload * payload, GstCaps * caps);
+static GstFlowReturn
+gst_rtp_g729_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buf);
+
+static GstStateChangeReturn
+gst_rtp_g729_pay_change_state (GstElement * element, GstStateChange transition);
+
+static GstStaticPadTemplate gst_rtp_g729_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/G729, " /* according to RFC 3555 */
+ "channels = (int) 1, " "rate = (int) 8000")
+ );
+
+static GstStaticPadTemplate gst_rtp_g729_pay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_G729_STRING ", "
+ "clock-rate = (int) 8000, "
+ "encoding-name = (string) \"G729\"; "
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 8000, " "encoding-name = (string) \"G729\"")
+ );
+
+#define gst_rtp_g729_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPG729Pay, gst_rtp_g729_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpg729pay, "rtpg729pay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_G729_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_g729_pay_finalize (GObject * object)
+{
+ GstRTPG729Pay *pay = GST_RTP_G729_PAY (object);
+
+ g_object_unref (pay->adapter);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_rtp_g729_pay_class_init (GstRTPG729PayClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstRTPBasePayloadClass *payload_class = GST_RTP_BASE_PAYLOAD_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (rtpg729pay_debug, "rtpg729pay", 0,
+ "G.729 RTP Payloader");
+
+ gobject_class->finalize = gst_rtp_g729_pay_finalize;
+
+ gstelement_class->change_state = gst_rtp_g729_pay_change_state;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_g729_pay_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_g729_pay_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP G.729 payloader", "Codec/Payloader/Network/RTP",
+ "Packetize G.729 audio into RTP packets",
+ "Olivier Crete <olivier.crete@collabora.co.uk>");
+
+ payload_class->set_caps = gst_rtp_g729_pay_set_caps;
+ payload_class->handle_buffer = gst_rtp_g729_pay_handle_buffer;
+}
+
+static void
+gst_rtp_g729_pay_init (GstRTPG729Pay * pay)
+{
+ GstRTPBasePayload *payload = GST_RTP_BASE_PAYLOAD (pay);
+
+ payload->pt = GST_RTP_PAYLOAD_G729;
+
+ pay->adapter = gst_adapter_new ();
+}
+
+static void
+gst_rtp_g729_pay_reset (GstRTPG729Pay * pay)
+{
+ gst_adapter_clear (pay->adapter);
+ pay->discont = FALSE;
+ pay->next_rtp_time = 0;
+ pay->first_ts = GST_CLOCK_TIME_NONE;
+ pay->first_rtp_time = 0;
+}
+
+static gboolean
+gst_rtp_g729_pay_set_caps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ gboolean res;
+
+ gst_rtp_base_payload_set_options (payload, "audio",
+ payload->pt != GST_RTP_PAYLOAD_G729, "G729", 8000);
+
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
+
+ return res;
+}
+
+static GstFlowReturn
+gst_rtp_g729_pay_push (GstRTPG729Pay * rtpg729pay, GstBuffer * buf)
+{
+ GstRTPBasePayload *basepayload;
+ GstClockTime duration;
+ guint frames;
+ GstBuffer *outbuf;
+ GstFlowReturn ret;
+ GstRTPBuffer rtp = { NULL };
+ guint payload_len = gst_buffer_get_size (buf);
+
+ basepayload = GST_RTP_BASE_PAYLOAD (rtpg729pay);
+
+ GST_DEBUG_OBJECT (rtpg729pay, "Pushing %d bytes ts %" GST_TIME_FORMAT,
+ payload_len, GST_TIME_ARGS (rtpg729pay->next_ts));
+
+ /* create buffer to hold the payload */
+ outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD
+ (rtpg729pay), 0, 0, 0);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_READWRITE, &rtp);
+
+ /* set metadata */
+ frames =
+ (payload_len / G729_FRAME_SIZE) + ((payload_len % G729_FRAME_SIZE) >> 1);
+ duration = frames * G729_FRAME_DURATION;
+ GST_BUFFER_PTS (outbuf) = rtpg729pay->next_ts;
+ GST_BUFFER_DURATION (outbuf) = duration;
+ GST_BUFFER_OFFSET (outbuf) = rtpg729pay->next_rtp_time;
+ rtpg729pay->next_ts += duration;
+ rtpg729pay->next_rtp_time += frames * 80;
+
+ if (G_UNLIKELY (rtpg729pay->discont)) {
+ GST_DEBUG_OBJECT (basepayload, "discont, setting marker bit");
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
+ rtpg729pay->discont = FALSE;
+ }
+ gst_rtp_buffer_unmap (&rtp);
+
+ /* append payload */
+ gst_rtp_copy_audio_meta (basepayload, outbuf, buf);
+ outbuf = gst_buffer_append (outbuf, buf);
+
+ ret = gst_rtp_base_payload_push (basepayload, outbuf);
+
+ return ret;
+}
+
+static void
+gst_rtp_g729_pay_recalc_rtp_time (GstRTPG729Pay * rtpg729pay, GstClockTime time)
+{
+ if (GST_CLOCK_TIME_IS_VALID (rtpg729pay->first_ts)
+ && GST_CLOCK_TIME_IS_VALID (time) && time >= rtpg729pay->first_ts) {
+ GstClockTime diff;
+ guint32 rtpdiff;
+
+ diff = time - rtpg729pay->first_ts;
+ rtpdiff = (diff / GST_MSECOND) * 8;
+ rtpg729pay->next_rtp_time = rtpg729pay->first_rtp_time + rtpdiff;
+ GST_DEBUG_OBJECT (rtpg729pay,
+ "elapsed time %" GST_TIME_FORMAT ", rtp %" G_GUINT32_FORMAT ", "
+ "new offset %" G_GUINT32_FORMAT, GST_TIME_ARGS (diff), rtpdiff,
+ rtpg729pay->next_rtp_time);
+ }
+}
+
+static GstFlowReturn
+gst_rtp_g729_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buf)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstRTPG729Pay *rtpg729pay = GST_RTP_G729_PAY (payload);
+ GstAdapter *adapter = NULL;
+ guint payload_len;
+ guint available;
+ guint maxptime_octets = G_MAXUINT;
+ guint minptime_octets = 0;
+ guint min_payload_len;
+ guint max_payload_len;
+ gsize size;
+ GstClockTime timestamp;
+
+ size = gst_buffer_get_size (buf);
+
+ if (size % G729_FRAME_SIZE != 0 &&
+ size % G729_FRAME_SIZE != G729B_CN_FRAME_SIZE)
+ goto invalid_size;
+
+ /* max number of bytes based on given ptime, has to be multiple of
+ * frame_duration */
+ if (payload->max_ptime != -1) {
+ guint ptime_ms = payload->max_ptime / GST_MSECOND;
+
+ maxptime_octets = G729_FRAME_SIZE *
+ (int) (ptime_ms / G729_FRAME_DURATION_MS);
+
+ if (maxptime_octets < G729_FRAME_SIZE) {
+ GST_WARNING_OBJECT (payload, "Given ptime %" G_GINT64_FORMAT
+ " is smaller than minimum %d ns, overwriting to minimum",
+ payload->max_ptime, G729_FRAME_DURATION_MS);
+ maxptime_octets = G729_FRAME_SIZE;
+ }
+ }
+
+ max_payload_len = MIN (
+ /* MTU max */
+ (int) (gst_rtp_buffer_calc_payload_len (GST_RTP_BASE_PAYLOAD_MTU
+ (payload), 0, 0) / G729_FRAME_SIZE)
+ * G729_FRAME_SIZE,
+ /* ptime max */
+ maxptime_octets);
+
+ /* min number of bytes based on a given ptime, has to be a multiple
+ of frame duration */
+ {
+ guint64 min_ptime = payload->min_ptime;
+
+ min_ptime = min_ptime / GST_MSECOND;
+ minptime_octets = G729_FRAME_SIZE *
+ (int) (min_ptime / G729_FRAME_DURATION_MS);
+ }
+
+ min_payload_len = MAX (minptime_octets, G729_FRAME_SIZE);
+
+ if (min_payload_len > max_payload_len) {
+ min_payload_len = max_payload_len;
+ }
+
+ /* If the ptime is specified in the caps, tried to adhere to it exactly */
+ if (payload->ptime) {
+ guint64 ptime = payload->ptime / GST_MSECOND;
+ guint ptime_in_bytes = G729_FRAME_SIZE *
+ (guint) (ptime / G729_FRAME_DURATION_MS);
+
+ /* clip to computed min and max lengths */
+ ptime_in_bytes = MAX (min_payload_len, ptime_in_bytes);
+ ptime_in_bytes = MIN (max_payload_len, ptime_in_bytes);
+
+ min_payload_len = max_payload_len = ptime_in_bytes;
+ }
+
+ GST_LOG_OBJECT (payload,
+ "Calculated min_payload_len %u and max_payload_len %u",
+ min_payload_len, max_payload_len);
+
+ adapter = rtpg729pay->adapter;
+ available = gst_adapter_available (adapter);
+
+ timestamp = GST_BUFFER_PTS (buf);
+
+ /* resync rtp time on discont or a discontinuous cn packet */
+ if (GST_BUFFER_IS_DISCONT (buf)) {
+ /* flush remainder */
+ if (available > 0) {
+ gst_rtp_g729_pay_push (rtpg729pay,
+ gst_adapter_take_buffer_fast (adapter, available));
+ available = 0;
+ }
+ rtpg729pay->discont = TRUE;
+ gst_rtp_g729_pay_recalc_rtp_time (rtpg729pay, timestamp);
+ }
+
+ if (size < G729_FRAME_SIZE)
+ gst_rtp_g729_pay_recalc_rtp_time (rtpg729pay, timestamp);
+
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (rtpg729pay->first_ts))) {
+ rtpg729pay->first_ts = timestamp;
+ rtpg729pay->first_rtp_time = rtpg729pay->next_rtp_time;
+ }
+
+ /* let's reset the base timestamp when the adapter is empty */
+ if (available == 0)
+ rtpg729pay->next_ts = timestamp;
+
+ if (available == 0 && size >= min_payload_len && size <= max_payload_len) {
+ ret = gst_rtp_g729_pay_push (rtpg729pay, buf);
+ return ret;
+ }
+
+ gst_adapter_push (adapter, buf);
+ available = gst_adapter_available (adapter);
+
+ /* as long as we have full frames */
+ /* this loop will push all available buffers till the last frame */
+ while (available >= min_payload_len ||
+ available % G729_FRAME_SIZE == G729B_CN_FRAME_SIZE) {
+ /* We send as much as we can */
+ if (available <= max_payload_len) {
+ payload_len = available;
+ } else {
+ payload_len = MIN (max_payload_len,
+ (available / G729_FRAME_SIZE) * G729_FRAME_SIZE);
+ }
+
+ ret = gst_rtp_g729_pay_push (rtpg729pay,
+ gst_adapter_take_buffer_fast (adapter, payload_len));
+ available -= payload_len;
+ }
+
+ return ret;
+
+ /* ERRORS */
+invalid_size:
+ {
+ GST_ELEMENT_ERROR (payload, STREAM, WRONG_TYPE,
+ ("Invalid input buffer size"),
+ ("Invalid buffer size, should be a multiple of"
+ " G729_FRAME_SIZE(10) with an optional G729B_CN_FRAME_SIZE(2)"
+ " added to it, but it is %" G_GSIZE_FORMAT, size));
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_g729_pay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+
+ /* handle upwards state changes here */
+ switch (transition) {
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ /* handle downwards state changes */
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_g729_pay_reset (GST_RTP_G729_PAY (element));
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
diff --git a/gst/rtp/gstrtpg729pay.h b/gst/rtp/gstrtpg729pay.h
new file mode 100644
index 0000000000..c9e76c27b3
--- /dev/null
+++ b/gst/rtp/gstrtpg729pay.h
@@ -0,0 +1,64 @@
+/* GStreamer
+ * Copyright (C) <2007> Nokia Corporation
+ * Copyright (C) <2007> Collabora Ltd
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_G729_PAY_H__
+#define __GST_RTP_G729_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_G729_PAY \
+ (gst_rtp_g729_pay_get_type())
+#define GST_RTP_G729_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_G729_PAY,GstRTPG729Pay))
+#define GST_RTP_G729_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_G729_PAY,GstRTPG729PayClass))
+#define GST_IS_RTP_G729_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_G729_PAY))
+#define GST_IS_RTP_G729_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_G729_PAY))
+
+typedef struct _GstRTPG729Pay GstRTPG729Pay;
+typedef struct _GstRTPG729PayClass GstRTPG729PayClass;
+
+struct _GstRTPG729Pay
+{
+ GstRTPBasePayload payload;
+
+ GstAdapter *adapter;
+ GstClockTime next_ts;
+ guint32 next_rtp_time;
+ GstClockTime first_ts;
+ guint32 first_rtp_time;
+ gboolean discont;
+};
+
+struct _GstRTPG729PayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_g729_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_G729_PAY_H__ */
diff --git a/gst/rtp/gstrtpgsmdepay.c b/gst/rtp/gstrtpgsmdepay.c
new file mode 100644
index 0000000000..313acb545d
--- /dev/null
+++ b/gst/rtp/gstrtpgsmdepay.c
@@ -0,0 +1,148 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2005> Zeeshan Ali <zeenix@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+#include "gstrtpelements.h"
+#include "gstrtpgsmdepay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpgsmdepay_debug);
+#define GST_CAT_DEFAULT (rtpgsmdepay_debug)
+
+/* RTPGSMDepay signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+static GstStaticPadTemplate gst_rtp_gsm_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-gsm, " "rate = (int) 8000, " "channels = 1")
+ );
+
+static GstStaticPadTemplate gst_rtp_gsm_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "clock-rate = (int) 8000, " "encoding-name = (string) \"GSM\";"
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_GSM_STRING ", "
+ "clock-rate = (int) 8000")
+ );
+
+static GstBuffer *gst_rtp_gsm_depay_process (GstRTPBaseDepayload * _depayload,
+ GstRTPBuffer * rtp);
+static gboolean gst_rtp_gsm_depay_setcaps (GstRTPBaseDepayload * _depayload,
+ GstCaps * caps);
+
+#define gst_rtp_gsm_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPGSMDepay, gst_rtp_gsm_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpgsmdepay, "rtpgsmdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_GSM_DEPAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_gsm_depay_class_init (GstRTPGSMDepayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbase_depayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbase_depayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_gsm_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_gsm_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP GSM depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts GSM audio from RTP packets", "Zeeshan Ali <zeenix@gmail.com>");
+
+ gstrtpbase_depayload_class->process_rtp_packet = gst_rtp_gsm_depay_process;
+ gstrtpbase_depayload_class->set_caps = gst_rtp_gsm_depay_setcaps;
+
+ GST_DEBUG_CATEGORY_INIT (rtpgsmdepay_debug, "rtpgsmdepay", 0,
+ "GSM Audio RTP Depayloader");
+}
+
+static void
+gst_rtp_gsm_depay_init (GstRTPGSMDepay * rtpgsmdepay)
+{
+}
+
+static gboolean
+gst_rtp_gsm_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstCaps *srccaps;
+ gboolean ret;
+ GstStructure *structure;
+ gint clock_rate;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 8000; /* default */
+ depayload->clock_rate = clock_rate;
+
+ srccaps = gst_caps_new_simple ("audio/x-gsm",
+ "channels", G_TYPE_INT, 1, "rate", G_TYPE_INT, clock_rate, NULL);
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ gst_caps_unref (srccaps);
+
+ return ret;
+}
+
+static GstBuffer *
+gst_rtp_gsm_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstBuffer *outbuf = NULL;
+ gboolean marker;
+
+ marker = gst_rtp_buffer_get_marker (rtp);
+
+ GST_DEBUG ("process : got %" G_GSIZE_FORMAT " bytes, mark %d ts %u seqn %d",
+ gst_buffer_get_size (rtp->buffer), marker,
+ gst_rtp_buffer_get_timestamp (rtp), gst_rtp_buffer_get_seq (rtp));
+
+ outbuf = gst_rtp_buffer_get_payload_buffer (rtp);
+
+ if (marker && outbuf) {
+ /* mark start of talkspurt with RESYNC */
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC);
+ }
+
+ if (outbuf) {
+ gst_rtp_drop_non_audio_meta (depayload, outbuf);
+ }
+
+ return outbuf;
+}
diff --git a/gst/rtp/gstrtpgsmdepay.h b/gst/rtp/gstrtpgsmdepay.h
new file mode 100644
index 0000000000..166f558691
--- /dev/null
+++ b/gst/rtp/gstrtpgsmdepay.h
@@ -0,0 +1,56 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_GSM_DEPAY_H__
+#define __GST_RTP_GSM_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstRTPGSMDepay GstRTPGSMDepay;
+typedef struct _GstRTPGSMDepayClass GstRTPGSMDepayClass;
+
+#define GST_TYPE_RTP_GSM_DEPAY \
+ (gst_rtp_gsm_depay_get_type())
+#define GST_RTP_GSM_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_GSM_DEPAY,GstRTPGSMDepay))
+#define GST_RTP_GSM_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_GSM_DEPAY,GstRTPGSMDepayClass))
+#define GST_IS_RTP_GSM_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_GSM_DEPAY))
+#define GST_IS_RTP_GSM_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_GSM_DEPAY))
+
+struct _GstRTPGSMDepay
+{
+ GstRTPBaseDepayload _depayload;
+};
+
+struct _GstRTPGSMDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_gsm_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_GSM_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpgsmpay.c b/gst/rtp/gstrtpgsmpay.c
new file mode 100644
index 0000000000..b3197cb31a
--- /dev/null
+++ b/gst/rtp/gstrtpgsmpay.c
@@ -0,0 +1,177 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2005> Zeeshan Ali <zeenix@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <stdlib.h>
+#include <string.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpgsmpay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpgsmpay_debug);
+#define GST_CAT_DEFAULT (rtpgsmpay_debug)
+
+static GstStaticPadTemplate gst_rtp_gsm_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-gsm, " "rate = (int) 8000, " "channels = (int) 1")
+ );
+
+static GstStaticPadTemplate gst_rtp_gsm_pay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_GSM_STRING ", "
+ "clock-rate = (int) 8000, " "encoding-name = (string) \"GSM\"; "
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 8000, " "encoding-name = (string) \"GSM\"")
+ );
+
+static gboolean gst_rtp_gsm_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static GstFlowReturn gst_rtp_gsm_pay_handle_buffer (GstRTPBasePayload * payload,
+ GstBuffer * buffer);
+
+#define gst_rtp_gsm_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPGSMPay, gst_rtp_gsm_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpgsmpay, "rtpgsmpay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_GSM_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_gsm_pay_class_init (GstRTPGSMPayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpgsmpay_debug, "rtpgsmpay", 0,
+ "GSM Audio RTP Payloader");
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_gsm_pay_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_gsm_pay_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "RTP GSM payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encodes GSM audio into a RTP packet",
+ "Zeeshan Ali <zeenix@gmail.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_gsm_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_gsm_pay_handle_buffer;
+}
+
+static void
+gst_rtp_gsm_pay_init (GstRTPGSMPay * rtpgsmpay)
+{
+ GST_RTP_BASE_PAYLOAD (rtpgsmpay)->clock_rate = 8000;
+ GST_RTP_BASE_PAYLOAD_PT (rtpgsmpay) = GST_RTP_PAYLOAD_GSM;
+}
+
+static gboolean
+gst_rtp_gsm_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ const char *stname;
+ GstStructure *structure;
+ gboolean res;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ stname = gst_structure_get_name (structure);
+
+ if (strcmp ("audio/x-gsm", stname))
+ goto invalid_type;
+
+ gst_rtp_base_payload_set_options (payload, "audio",
+ payload->pt != GST_RTP_PAYLOAD_GSM, "GSM", 8000);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
+
+ return res;
+
+ /* ERRORS */
+invalid_type:
+ {
+ GST_WARNING_OBJECT (payload, "invalid media type received");
+ return FALSE;
+ }
+}
+
+static GstFlowReturn
+gst_rtp_gsm_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRTPGSMPay *rtpgsmpay;
+ guint payload_len;
+ GstBuffer *outbuf;
+ GstClockTime timestamp, duration;
+ GstFlowReturn ret;
+
+ rtpgsmpay = GST_RTP_GSM_PAY (basepayload);
+
+ timestamp = GST_BUFFER_PTS (buffer);
+ duration = GST_BUFFER_DURATION (buffer);
+
+ /* FIXME, only one GSM frame per RTP packet for now */
+ payload_len = gst_buffer_get_size (buffer);
+
+ /* FIXME, just error out for now */
+ if (payload_len > GST_RTP_BASE_PAYLOAD_MTU (rtpgsmpay))
+ goto too_big;
+
+ outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 0, 0, 0);
+
+ /* copy timestamp and duration */
+ GST_BUFFER_PTS (outbuf) = timestamp;
+ GST_BUFFER_DURATION (outbuf) = duration;
+
+ gst_rtp_copy_audio_meta (rtpgsmpay, outbuf, buffer);
+
+ /* append payload */
+ outbuf = gst_buffer_append (outbuf, buffer);
+
+ GST_DEBUG ("gst_rtp_gsm_pay_chain: pushing buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (outbuf));
+
+ ret = gst_rtp_base_payload_push (basepayload, outbuf);
+
+ return ret;
+
+ /* ERRORS */
+too_big:
+ {
+ GST_ELEMENT_ERROR (rtpgsmpay, STREAM, ENCODE, (NULL),
+ ("payload_len %u > mtu %u", payload_len,
+ GST_RTP_BASE_PAYLOAD_MTU (rtpgsmpay)));
+ return GST_FLOW_ERROR;
+ }
+}
diff --git a/gst/rtp/gstrtpgsmpay.h b/gst/rtp/gstrtpgsmpay.h
new file mode 100644
index 0000000000..357a7c6955
--- /dev/null
+++ b/gst/rtp/gstrtpgsmpay.h
@@ -0,0 +1,58 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_RTP_GSM_PAY_H__
+#define __GST_RTP_GSM_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstRTPGSMPay GstRTPGSMPay;
+typedef struct _GstRTPGSMPayClass GstRTPGSMPayClass;
+
+#define GST_TYPE_RTP_GSM_PAY \
+ (gst_rtp_gsm_pay_get_type())
+#define GST_RTP_GSM_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_GSM_PAY,GstRTPGSMPay))
+#define GST_RTP_GSM_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_GSM_PAY,GstRTPGSMPayClass))
+#define GST_IS_RTP_GSM_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_GSM_PAY))
+#define GST_IS_RTP_GSM_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_GSM_PAY))
+
+struct _GstRTPGSMPay
+{
+ GstRTPBasePayload payload;
+};
+
+struct _GstRTPGSMPayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_gsm_pay_get_type (void);
+
+
+G_END_DECLS
+
+#endif /* __GST_RTP_GSM_PAY_H__ */
diff --git a/gst/rtp/gstrtpgstdepay.c b/gst/rtp/gstrtpgstdepay.c
new file mode 100644
index 0000000000..ebf8382960
--- /dev/null
+++ b/gst/rtp/gstrtpgstdepay.c
@@ -0,0 +1,607 @@
+/* GStreamer
+ * Copyright (C) <2010> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+#include <stdlib.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpgstdepay.h"
+#include "gstrtputils.h"
+
+#include <gst/video/video.h>
+
+GST_DEBUG_CATEGORY_STATIC (rtpgstdepay_debug);
+#define GST_CAT_DEFAULT (rtpgstdepay_debug)
+
+static GstStaticPadTemplate gst_rtp_gst_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+static GstStaticPadTemplate gst_rtp_gst_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"application\", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"X-GST\"")
+ );
+
+#define gst_rtp_gst_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpGSTDepay, gst_rtp_gst_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpgstdepay, "rtpgstdepay",
+ GST_RANK_MARGINAL, GST_TYPE_RTP_GST_DEPAY, rtp_element_init (plugin));
+
+static void gst_rtp_gst_depay_finalize (GObject * object);
+
+static gboolean gst_rtp_gst_depay_handle_event (GstRTPBaseDepayload * depay,
+ GstEvent * event);
+static GstStateChangeReturn gst_rtp_gst_depay_change_state (GstElement *
+ element, GstStateChange transition);
+
+static void gst_rtp_gst_depay_reset (GstRtpGSTDepay * rtpgstdepay, gboolean
+ full);
+static gboolean gst_rtp_gst_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_gst_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+
+static void
+gst_rtp_gst_depay_class_init (GstRtpGSTDepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpgstdepay_debug, "rtpgstdepay", 0,
+ "Gstreamer RTP Depayloader");
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_gst_depay_finalize;
+
+ gstelement_class->change_state = gst_rtp_gst_depay_change_state;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_gst_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_gst_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "GStreamer depayloader", "Codec/Depayloader/Network",
+ "Extracts GStreamer buffers from RTP packets",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasedepayload_class->handle_event = gst_rtp_gst_depay_handle_event;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_gst_depay_setcaps;
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_gst_depay_process;
+}
+
+static void
+gst_rtp_gst_depay_init (GstRtpGSTDepay * rtpgstdepay)
+{
+ rtpgstdepay->adapter = gst_adapter_new ();
+}
+
+static void
+gst_rtp_gst_depay_finalize (GObject * object)
+{
+ GstRtpGSTDepay *rtpgstdepay;
+
+ rtpgstdepay = GST_RTP_GST_DEPAY (object);
+
+ gst_rtp_gst_depay_reset (rtpgstdepay, TRUE);
+ g_object_unref (rtpgstdepay->adapter);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_rtp_gst_depay_reset (GstRtpGSTDepay * rtpgstdepay, gboolean full)
+{
+ gst_adapter_clear (rtpgstdepay->adapter);
+ if (full) {
+ rtpgstdepay->current_CV = 0;
+ gst_caps_replace (&rtpgstdepay->current_caps, NULL);
+ g_free (rtpgstdepay->stream_id);
+ rtpgstdepay->stream_id = NULL;
+ if (rtpgstdepay->tags)
+ gst_tag_list_unref (rtpgstdepay->tags);
+ rtpgstdepay->tags = NULL;
+ }
+}
+
+static gboolean
+gst_rtp_gst_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstRtpGSTDepay *rtpgstdepay;
+ GstStructure *structure;
+ gint clock_rate;
+ gboolean res;
+ const gchar *capsenc;
+
+ rtpgstdepay = GST_RTP_GST_DEPAY (depayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 90000;
+ depayload->clock_rate = clock_rate;
+
+ capsenc = gst_structure_get_string (structure, "caps");
+ if (capsenc) {
+ GstCaps *outcaps;
+ gsize out_len;
+ gchar *capsstr;
+ const gchar *capsver;
+ guint CV;
+
+ /* decode caps */
+ capsstr = (gchar *) g_base64_decode (capsenc, &out_len);
+ outcaps = gst_caps_from_string (capsstr);
+ g_free (capsstr);
+
+ /* parse version */
+ capsver = gst_structure_get_string (structure, "capsversion");
+ if (capsver) {
+ CV = atoi (capsver);
+ } else {
+ /* no version, assume 0 */
+ CV = 0;
+ }
+ /* store in cache */
+ rtpgstdepay->current_CV = CV;
+ gst_caps_replace (&rtpgstdepay->current_caps, outcaps);
+
+ res = gst_pad_set_caps (depayload->srcpad, outcaps);
+ gst_caps_unref (outcaps);
+ } else {
+ GST_WARNING_OBJECT (depayload, "no caps given");
+ rtpgstdepay->current_CV = -1;
+ gst_caps_replace (&rtpgstdepay->current_caps, NULL);
+ res = TRUE;
+ }
+
+ return res;
+}
+
+static gboolean
+read_length (GstRtpGSTDepay * rtpgstdepay, guint8 * data, guint size,
+ guint * length, guint * skip)
+{
+ guint b, len, offset;
+
+ /* start reading the length, we need this to skip to the data later */
+ len = offset = 0;
+ do {
+ if (offset >= size)
+ return FALSE;
+ b = data[offset++];
+ len = (len << 7) | (b & 0x7f);
+ } while (b & 0x80);
+
+ /* check remaining buffer size */
+ if (size - offset < len)
+ return FALSE;
+
+ *length = len;
+ *skip = offset;
+
+ return TRUE;
+}
+
+static GstCaps *
+read_caps (GstRtpGSTDepay * rtpgstdepay, GstBuffer * buf, guint * skip)
+{
+ guint offset, length;
+ GstCaps *caps;
+ GstMapInfo map;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ GST_DEBUG_OBJECT (rtpgstdepay, "buffer size %" G_GSIZE_FORMAT, map.size);
+
+ if (!read_length (rtpgstdepay, map.data, map.size, &length, &offset))
+ goto too_small;
+
+ if (length == 0 || map.data[offset + length - 1] != '\0')
+ goto invalid_buffer;
+
+ GST_DEBUG_OBJECT (rtpgstdepay, "parsing caps %s", &map.data[offset]);
+
+ /* parse and store in cache */
+ caps = gst_caps_from_string ((gchar *) & map.data[offset]);
+ gst_buffer_unmap (buf, &map);
+
+ *skip = length + offset;
+
+ return caps;
+
+too_small:
+ {
+ GST_ELEMENT_WARNING (rtpgstdepay, STREAM, DECODE,
+ ("Buffer too small."), (NULL));
+ gst_buffer_unmap (buf, &map);
+ return NULL;
+ }
+invalid_buffer:
+ {
+ GST_ELEMENT_WARNING (rtpgstdepay, STREAM, DECODE,
+ ("caps string not 0-terminated."), (NULL));
+ gst_buffer_unmap (buf, &map);
+ return NULL;
+ }
+}
+
+static GstEvent *
+read_event (GstRtpGSTDepay * rtpgstdepay, guint type,
+ GstBuffer * buf, guint * skip)
+{
+ guint offset, length;
+ GstStructure *s;
+ GstEvent *event;
+ GstEventType etype;
+ gchar *end;
+ GstMapInfo map;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ GST_DEBUG_OBJECT (rtpgstdepay, "buffer size %" G_GSIZE_FORMAT, map.size);
+
+ if (!read_length (rtpgstdepay, map.data, map.size, &length, &offset))
+ goto too_small;
+
+ if (length == 0)
+ goto invalid_buffer;
+ /* backward compat, old payloader did not put 0-byte at the end */
+ if (map.data[offset + length - 1] != '\0'
+ && map.data[offset + length - 1] != ';')
+ goto invalid_buffer;
+
+ GST_DEBUG_OBJECT (rtpgstdepay, "parsing event %s", &map.data[offset]);
+
+ /* parse */
+ s = gst_structure_from_string ((gchar *) & map.data[offset], &end);
+ gst_buffer_unmap (buf, &map);
+
+ if (s == NULL)
+ goto parse_failed;
+
+ switch (type) {
+ case 1:
+ etype = GST_EVENT_TAG;
+ break;
+ case 2:
+ etype = GST_EVENT_CUSTOM_DOWNSTREAM;
+ break;
+ case 3:
+ etype = GST_EVENT_CUSTOM_BOTH;
+ break;
+ case 4:
+ etype = GST_EVENT_STREAM_START;
+ break;
+ default:
+ goto unknown_event;
+ }
+ event = gst_event_new_custom (etype, s);
+
+ *skip = length + offset;
+
+ return event;
+
+too_small:
+ {
+ GST_ELEMENT_WARNING (rtpgstdepay, STREAM, DECODE,
+ ("Buffer too small."), (NULL));
+ gst_buffer_unmap (buf, &map);
+ return NULL;
+ }
+invalid_buffer:
+ {
+ GST_ELEMENT_WARNING (rtpgstdepay, STREAM, DECODE,
+ ("event string not 0-terminated."), (NULL));
+ gst_buffer_unmap (buf, &map);
+ return NULL;
+ }
+parse_failed:
+ {
+ GST_WARNING_OBJECT (rtpgstdepay, "could not parse event");
+ return NULL;
+ }
+unknown_event:
+ {
+ GST_DEBUG_OBJECT (rtpgstdepay, "unknown event type");
+ gst_structure_free (s);
+ return NULL;
+ }
+}
+
+static void
+store_event (GstRtpGSTDepay * rtpgstdepay, GstEvent * event)
+{
+ gboolean do_push = FALSE;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_TAG:
+ {
+ GstTagList *old, *tags;
+
+ gst_event_parse_tag (event, &tags);
+
+ old = rtpgstdepay->tags;
+ if (!old || !gst_tag_list_is_equal (old, tags)) {
+ do_push = TRUE;
+ if (old)
+ gst_tag_list_unref (old);
+ rtpgstdepay->tags = gst_tag_list_ref (tags);
+ }
+ break;
+ }
+ case GST_EVENT_CUSTOM_DOWNSTREAM:
+ case GST_EVENT_CUSTOM_BOTH:
+ /* always push custom events */
+ do_push = TRUE;
+ break;
+ case GST_EVENT_STREAM_START:
+ {
+ gchar *old;
+ const gchar *stream_id = NULL;
+
+ gst_event_parse_stream_start (event, &stream_id);
+
+ old = rtpgstdepay->stream_id;
+ if (!old || g_strcmp0 (old, stream_id)) {
+ do_push = TRUE;
+ g_free (old);
+ rtpgstdepay->stream_id = g_strdup (stream_id);
+ }
+ break;
+ }
+ default:
+ /* unknown event, don't push */
+ break;
+ }
+ if (do_push)
+ gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD (rtpgstdepay)->srcpad, event);
+ else
+ gst_event_unref (event);
+}
+
+static GstBuffer *
+gst_rtp_gst_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpGSTDepay *rtpgstdepay;
+ GstBuffer *subbuf, *outbuf = NULL;
+ gint payload_len;
+ guint8 *payload;
+ guint CV, frag_offset, avail, offset;
+
+ rtpgstdepay = GST_RTP_GST_DEPAY (depayload);
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+
+ if (payload_len <= 8)
+ goto empty_packet;
+
+ if (GST_BUFFER_IS_DISCONT (rtp->buffer)) {
+ GST_WARNING_OBJECT (rtpgstdepay, "DISCONT, clear adapter");
+ gst_adapter_clear (rtpgstdepay->adapter);
+ }
+
+ payload = gst_rtp_buffer_get_payload (rtp);
+
+ /* strip off header
+ *
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |C| CV |D|0|0|0| ETYPE | MBZ |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Frag_offset |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ frag_offset =
+ (payload[4] << 24) | (payload[5] << 16) | (payload[6] << 8) | payload[7];
+
+ avail = gst_adapter_available (rtpgstdepay->adapter);
+ if (avail != frag_offset)
+ goto wrong_frag;
+
+ /* subbuffer skipping the 8 header bytes */
+ subbuf = gst_rtp_buffer_get_payload_subbuffer (rtp, 8, -1);
+ gst_adapter_push (rtpgstdepay->adapter, subbuf);
+
+ offset = 0;
+ if (gst_rtp_buffer_get_marker (rtp)) {
+ guint avail;
+ GstCaps *outcaps;
+
+ /* take the buffer */
+ avail = gst_adapter_available (rtpgstdepay->adapter);
+ outbuf = gst_adapter_take_buffer (rtpgstdepay->adapter, avail);
+
+ CV = (payload[0] >> 4) & 0x7;
+
+ if (payload[0] & 0x80) {
+ guint size;
+
+ /* C bit, we have inline caps */
+ outcaps = read_caps (rtpgstdepay, outbuf, &size);
+ if (outcaps == NULL)
+ goto no_caps;
+
+ GST_DEBUG_OBJECT (rtpgstdepay,
+ "inline caps %u, length %u, %" GST_PTR_FORMAT, CV, size, outcaps);
+
+ if (!rtpgstdepay->current_caps
+ || !gst_caps_is_strictly_equal (rtpgstdepay->current_caps, outcaps))
+ gst_pad_set_caps (depayload->srcpad, outcaps);
+ gst_caps_replace (&rtpgstdepay->current_caps, outcaps);
+ gst_caps_unref (outcaps);
+ rtpgstdepay->current_CV = CV;
+
+ /* skip caps */
+ offset += size;
+ avail -= size;
+ }
+ if (payload[1]) {
+ guint size;
+ GstEvent *event;
+
+ /* we have an event */
+ event = read_event (rtpgstdepay, payload[1], outbuf, &size);
+ if (event == NULL)
+ goto no_event;
+
+ GST_DEBUG_OBJECT (rtpgstdepay,
+ "inline event, length %u, %" GST_PTR_FORMAT, size, event);
+
+ store_event (rtpgstdepay, event);
+
+ /* no buffer after event */
+ avail = 0;
+ }
+
+ if (avail) {
+ if (offset != 0) {
+ GstBuffer *temp;
+
+ GST_DEBUG_OBJECT (rtpgstdepay, "sub buffer: offset %u, size %u", offset,
+ avail);
+
+ temp =
+ gst_buffer_copy_region (outbuf, GST_BUFFER_COPY_ALL, offset, avail);
+
+ gst_buffer_unref (outbuf);
+ outbuf = temp;
+ }
+
+ /* see what caps we need */
+ if (CV != rtpgstdepay->current_CV) {
+ /* we need to switch caps but didn't receive the new caps yet */
+ gst_caps_replace (&rtpgstdepay->current_caps, NULL);
+ goto missing_caps;
+ }
+
+ if (payload[0] & 0x8)
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+ } else {
+ gst_buffer_unref (outbuf);
+ outbuf = NULL;
+ }
+ }
+
+ if (outbuf) {
+ gst_rtp_drop_meta (GST_ELEMENT_CAST (rtpgstdepay), outbuf, 0);
+ }
+
+ return outbuf;
+
+ /* ERRORS */
+empty_packet:
+ {
+ GST_ELEMENT_WARNING (rtpgstdepay, STREAM, DECODE,
+ ("Empty Payload."), (NULL));
+ return NULL;
+ }
+wrong_frag:
+ {
+ gst_adapter_clear (rtpgstdepay->adapter);
+ GST_LOG_OBJECT (rtpgstdepay, "wrong fragment, skipping");
+ return NULL;
+ }
+no_caps:
+ {
+ GST_WARNING_OBJECT (rtpgstdepay, "failed to parse caps");
+ gst_buffer_unref (outbuf);
+ return NULL;
+ }
+no_event:
+ {
+ GST_WARNING_OBJECT (rtpgstdepay, "failed to parse event");
+ gst_buffer_unref (outbuf);
+ return NULL;
+ }
+missing_caps:
+ {
+ GST_INFO_OBJECT (rtpgstdepay, "No caps received yet %u", CV);
+ gst_buffer_unref (outbuf);
+
+ gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (rtpgstdepay),
+ gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE,
+ TRUE, 0));
+
+ return NULL;
+ }
+}
+
+static gboolean
+gst_rtp_gst_depay_handle_event (GstRTPBaseDepayload * depay, GstEvent * event)
+{
+ GstRtpGSTDepay *rtpgstdepay;
+
+ rtpgstdepay = GST_RTP_GST_DEPAY (depay);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ gst_rtp_gst_depay_reset (rtpgstdepay, FALSE);
+ break;
+ default:
+ break;
+ }
+
+ return
+ GST_RTP_BASE_DEPAYLOAD_CLASS (parent_class)->handle_event (depay, event);
+}
+
+
+static GstStateChangeReturn
+gst_rtp_gst_depay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstRtpGSTDepay *rtpgstdepay;
+ GstStateChangeReturn ret;
+
+ rtpgstdepay = GST_RTP_GST_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_rtp_gst_depay_reset (rtpgstdepay, TRUE);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_gst_depay_reset (rtpgstdepay, TRUE);
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtpgstdepay.h b/gst/rtp/gstrtpgstdepay.h
new file mode 100644
index 0000000000..41528792ea
--- /dev/null
+++ b/gst/rtp/gstrtpgstdepay.h
@@ -0,0 +1,64 @@
+/* GStreamer
+ * Copyright (C) <2010> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_GST_DEPAY_H__
+#define __GST_RTP_GST_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_GST_DEPAY \
+ (gst_rtp_gst_depay_get_type())
+#define GST_RTP_GST_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_GST_DEPAY,GstRtpGSTDepay))
+#define GST_RTP_GST_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_GST_DEPAY,GstRtpGSTDepayClass))
+#define GST_IS_RTP_GST_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_GST_DEPAY))
+#define GST_IS_RTP_GST_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_GST_DEPAY))
+
+typedef struct _GstRtpGSTDepay GstRtpGSTDepay;
+typedef struct _GstRtpGSTDepayClass GstRtpGSTDepayClass;
+
+struct _GstRtpGSTDepay
+{
+ GstRTPBaseDepayload depayload;
+
+ GstAdapter *adapter;
+ guint current_CV;
+ GstCaps *current_caps;
+
+ GstTagList *tags;
+ gchar *stream_id;
+};
+
+struct _GstRtpGSTDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_gst_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_GST_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpgstpay.c b/gst/rtp/gstrtpgstpay.c
new file mode 100644
index 0000000000..81ff4a4d46
--- /dev/null
+++ b/gst/rtp/gstrtpgstpay.c
@@ -0,0 +1,699 @@
+/* GStreamer
+ * Copyright (C) <2010> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpgstpay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_pay_debug);
+#define GST_CAT_DEFAULT gst_rtp_pay_debug
+
+/*
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |C| CV |D|0|0|0| ETYPE | MBZ |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Frag_offset |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ *
+ * C: caps inlined flag
+ * When C set, first part of payload contains caps definition. Caps definition
+ * starts with variable-length length prefix and then a string of that length.
+ * the length is encoded in big endian 7 bit chunks, the top 1 bit of a byte
+ * is the continuation marker and the 7 next bits the data. A continuation
+ * marker of 1 means that the next byte contains more data.
+ *
+ * CV: caps version, 0 = caps from SDP, 1 - 7 inlined caps
+ * D: delta unit buffer
+ * ETYPE: type of event. Payload contains the event, prefixed with a
+ * variable length field.
+ * 0 = NO event
+ * 1 = GST_EVENT_TAG
+ * 2 = GST_EVENT_CUSTOM_DOWNSTREAM
+ * 3 = GST_EVENT_CUSTOM_BOTH
+ * 4 = GST_EVENT_STREAM_START
+ */
+
+static GstStaticPadTemplate gst_rtp_gst_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+static GstStaticPadTemplate gst_rtp_gst_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"application\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"X-GST\"")
+ );
+
+enum
+{
+ PROP_0,
+ PROP_CONFIG_INTERVAL
+};
+
+#define DEFAULT_CONFIG_INTERVAL 0
+
+static void gst_rtp_gst_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtp_gst_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_rtp_gst_pay_finalize (GObject * obj);
+static GstStateChangeReturn gst_rtp_gst_pay_change_state (GstElement * element,
+ GstStateChange transition);
+
+static gboolean gst_rtp_gst_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static GstFlowReturn gst_rtp_gst_pay_handle_buffer (GstRTPBasePayload * payload,
+ GstBuffer * buffer);
+static gboolean gst_rtp_gst_pay_sink_event (GstRTPBasePayload * payload,
+ GstEvent * event);
+static gboolean gst_rtp_gst_pay_src_event (GstRTPBasePayload * payload,
+ GstEvent * event);
+
+#define gst_rtp_gst_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpGSTPay, gst_rtp_gst_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpgstpay, "rtpgstpay", GST_RANK_NONE,
+ GST_TYPE_RTP_GST_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_gst_pay_class_init (GstRtpGSTPayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gobject_class->set_property = gst_rtp_gst_pay_set_property;
+ gobject_class->get_property = gst_rtp_gst_pay_get_property;
+ gobject_class->finalize = gst_rtp_gst_pay_finalize;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass),
+ PROP_CONFIG_INTERVAL,
+ g_param_spec_uint ("config-interval",
+ "Caps/Tags Send Interval",
+ "Interval for sending caps and TAG events in seconds (0 = disabled)",
+ 0, 3600, DEFAULT_CONFIG_INTERVAL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ gstelement_class->change_state = gst_rtp_gst_pay_change_state;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_gst_pay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_gst_pay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP GStreamer payloader", "Codec/Payloader/Network/RTP",
+ "Payload GStreamer buffers as RTP packets",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_gst_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_gst_pay_handle_buffer;
+ gstrtpbasepayload_class->sink_event = gst_rtp_gst_pay_sink_event;
+ gstrtpbasepayload_class->src_event = gst_rtp_gst_pay_src_event;
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_pay_debug, "rtpgstpay", 0,
+ "rtpgstpay element");
+}
+
+static void
+gst_rtp_gst_pay_init (GstRtpGSTPay * rtpgstpay)
+{
+ rtpgstpay->adapter = gst_adapter_new ();
+ rtpgstpay->pending_buffers = NULL;
+ gst_rtp_base_payload_set_options (GST_RTP_BASE_PAYLOAD (rtpgstpay),
+ "application", TRUE, "X-GST", 90000);
+ rtpgstpay->last_config = GST_CLOCK_TIME_NONE;
+ rtpgstpay->taglist = NULL;
+ rtpgstpay->config_interval = DEFAULT_CONFIG_INTERVAL;
+}
+
+static void
+gst_rtp_gst_pay_reset (GstRtpGSTPay * rtpgstpay, gboolean full)
+{
+ rtpgstpay->last_config = GST_CLOCK_TIME_NONE;
+ gst_adapter_clear (rtpgstpay->adapter);
+ rtpgstpay->flags &= 0x70;
+ rtpgstpay->etype = 0;
+ if (rtpgstpay->pending_buffers)
+ g_list_free_full (rtpgstpay->pending_buffers,
+ (GDestroyNotify) gst_buffer_list_unref);
+ rtpgstpay->pending_buffers = NULL;
+ if (full) {
+ if (rtpgstpay->taglist)
+ gst_tag_list_unref (rtpgstpay->taglist);
+ rtpgstpay->taglist = NULL;
+ g_free (rtpgstpay->stream_id);
+ rtpgstpay->stream_id = NULL;
+ rtpgstpay->current_CV = 0;
+ rtpgstpay->next_CV = 0;
+ }
+}
+
+static void
+gst_rtp_gst_pay_finalize (GObject * obj)
+{
+ GstRtpGSTPay *rtpgstpay;
+
+ rtpgstpay = GST_RTP_GST_PAY (obj);
+
+ gst_rtp_gst_pay_reset (rtpgstpay, TRUE);
+
+ g_object_unref (rtpgstpay->adapter);
+
+ G_OBJECT_CLASS (parent_class)->finalize (obj);
+}
+
+static void
+gst_rtp_gst_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpGSTPay *rtpgstpay;
+
+ rtpgstpay = GST_RTP_GST_PAY (object);
+
+ switch (prop_id) {
+ case PROP_CONFIG_INTERVAL:
+ rtpgstpay->config_interval = g_value_get_uint (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_gst_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpGSTPay *rtpgstpay;
+
+ rtpgstpay = GST_RTP_GST_PAY (object);
+
+ switch (prop_id) {
+ case PROP_CONFIG_INTERVAL:
+ g_value_set_uint (value, rtpgstpay->config_interval);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_gst_pay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstRtpGSTPay *rtpgstpay;
+ GstStateChangeReturn ret;
+
+ rtpgstpay = GST_RTP_GST_PAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_rtp_gst_pay_reset (rtpgstpay, TRUE);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_gst_pay_reset (rtpgstpay, TRUE);
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
+
+#define RTP_HEADER_LEN 12
+
+static gboolean
+gst_rtp_gst_pay_create_from_adapter (GstRtpGSTPay * rtpgstpay,
+ GstClockTime timestamp)
+{
+ guint avail, mtu;
+ guint frag_offset;
+ GstBufferList *list;
+
+ avail = gst_adapter_available (rtpgstpay->adapter);
+ if (avail == 0)
+ return FALSE;
+
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (rtpgstpay);
+
+ list = gst_buffer_list_new_sized ((avail / (mtu - (RTP_HEADER_LEN + 8))) + 1);
+ frag_offset = 0;
+
+ while (avail) {
+ guint towrite;
+ guint8 *payload;
+ guint payload_len;
+ guint packet_len;
+ GstBuffer *outbuf;
+ GstRTPBuffer rtp = { NULL };
+ GstBuffer *paybuf;
+
+
+ /* this will be the total length of the packet */
+ packet_len = gst_rtp_buffer_calc_packet_len (8 + avail, 0, 0);
+
+ /* fill one MTU or all available bytes */
+ towrite = MIN (packet_len, mtu);
+
+ /* this is the payload length */
+ payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);
+
+ /* create buffer to hold the header */
+ outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD
+ (rtpgstpay), 8, 0, 0);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+
+ GST_DEBUG_OBJECT (rtpgstpay, "new packet len %u, frag %u", packet_len,
+ frag_offset);
+
+ /*
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |C| CV |D|0|0|0| ETYPE | MBZ |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Frag_offset |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ payload[0] = rtpgstpay->flags;
+ payload[1] = rtpgstpay->etype;
+ payload[2] = payload[3] = 0;
+ payload[4] = frag_offset >> 24;
+ payload[5] = frag_offset >> 16;
+ payload[6] = frag_offset >> 8;
+ payload[7] = frag_offset & 0xff;
+
+ payload += 8;
+ payload_len -= 8;
+
+ frag_offset += payload_len;
+ avail -= payload_len;
+
+ if (avail == 0)
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ /* create a new buf to hold the payload */
+ GST_DEBUG_OBJECT (rtpgstpay, "take %u bytes from adapter", payload_len);
+ paybuf = gst_adapter_take_buffer_fast (rtpgstpay->adapter, payload_len);
+
+ if (GST_BUFFER_FLAG_IS_SET (paybuf, GST_BUFFER_FLAG_DELTA_UNIT))
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ /* create a new group to hold the rtp header and the payload */
+ gst_rtp_copy_meta (GST_ELEMENT_CAST (rtpgstpay), outbuf, paybuf, 0);
+ outbuf = gst_buffer_append (outbuf, paybuf);
+
+ GST_BUFFER_PTS (outbuf) = timestamp;
+
+ /* and add to list */
+ gst_buffer_list_insert (list, -1, outbuf);
+ }
+
+ rtpgstpay->flags &= 0x70;
+ rtpgstpay->etype = 0;
+ rtpgstpay->pending_buffers = g_list_append (rtpgstpay->pending_buffers, list);
+
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_rtp_gst_pay_flush (GstRtpGSTPay * rtpgstpay, GstClockTime timestamp)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GList *iter;
+
+ gst_rtp_gst_pay_create_from_adapter (rtpgstpay, timestamp);
+
+ iter = rtpgstpay->pending_buffers;
+ while (iter) {
+ GstBufferList *list = iter->data;
+
+ rtpgstpay->pending_buffers = iter =
+ g_list_delete_link (rtpgstpay->pending_buffers, iter);
+
+ /* push the whole buffer list at once */
+ ret = gst_rtp_base_payload_push_list (GST_RTP_BASE_PAYLOAD (rtpgstpay),
+ list);
+ if (ret != GST_FLOW_OK)
+ break;
+ }
+
+ return ret;
+}
+
+static GstBuffer *
+make_data_buffer (GstRtpGSTPay * rtpgstpay, gchar * data, guint size)
+{
+ guint plen;
+ guint8 *ptr;
+ GstBuffer *outbuf;
+ GstMapInfo map;
+
+ /* calculate length */
+ plen = 1;
+ while (size >> (7 * plen))
+ plen++;
+
+ outbuf = gst_buffer_new_allocate (NULL, plen + size, NULL);
+
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ ptr = map.data;
+
+ /* write length */
+ while (plen) {
+ plen--;
+ *ptr++ = ((plen > 0) ? 0x80 : 0) | ((size >> (7 * plen)) & 0x7f);
+ }
+ /* copy data */
+ memcpy (ptr, data, size);
+ gst_buffer_unmap (outbuf, &map);
+
+ return outbuf;
+}
+
+static void
+gst_rtp_gst_pay_send_caps (GstRtpGSTPay * rtpgstpay, guint8 cv, GstCaps * caps)
+{
+ gchar *capsstr;
+ guint capslen;
+ GstBuffer *outbuf;
+
+ if (rtpgstpay->flags == ((1 << 7) | (cv << 4))) {
+ /* If caps for the current CV are pending in the adapter already, do
+ * nothing at all here
+ */
+ return;
+ } else if (rtpgstpay->flags & (1 << 7)) {
+ /* Create a new standalone caps packet if caps were already pending.
+ * The next caps are going to be merged with the following buffer or
+ * sent standalone if another event is sent first */
+ gst_rtp_gst_pay_create_from_adapter (rtpgstpay, GST_CLOCK_TIME_NONE);
+ }
+
+ capsstr = gst_caps_to_string (caps);
+ capslen = strlen (capsstr);
+ /* for 0 byte */
+ capslen++;
+
+ GST_DEBUG_OBJECT (rtpgstpay, "sending caps=%s", capsstr);
+
+ /* make a data buffer of it */
+ outbuf = make_data_buffer (rtpgstpay, capsstr, capslen);
+ g_free (capsstr);
+
+ /* store in adapter, we don't flush yet, buffer might follow */
+ rtpgstpay->flags = (1 << 7) | (cv << 4);
+ gst_adapter_push (rtpgstpay->adapter, outbuf);
+}
+
+static gboolean
+gst_rtp_gst_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ GstRtpGSTPay *rtpgstpay;
+ gboolean res;
+ gchar *capsstr, *capsenc, *capsver;
+ guint capslen;
+
+ rtpgstpay = GST_RTP_GST_PAY (payload);
+
+ capsstr = gst_caps_to_string (caps);
+ capslen = strlen (capsstr);
+
+ /* encode without 0 byte */
+ capsenc = g_base64_encode ((guchar *) capsstr, capslen);
+ GST_DEBUG_OBJECT (payload, "caps=%s, caps(base64)=%s", capsstr, capsenc);
+ g_free (capsstr);
+
+ /* Send the new caps */
+ rtpgstpay->current_CV = rtpgstpay->next_CV;
+ rtpgstpay->next_CV = (rtpgstpay->next_CV + 1) & 0x7;
+ gst_rtp_gst_pay_send_caps (rtpgstpay, rtpgstpay->current_CV, caps);
+
+ /* make caps for SDP */
+ capsver = g_strdup_printf ("%d", rtpgstpay->current_CV);
+ res =
+ gst_rtp_base_payload_set_outcaps (payload, "caps", G_TYPE_STRING, capsenc,
+ "capsversion", G_TYPE_STRING, capsver, NULL);
+ g_free (capsenc);
+ g_free (capsver);
+
+ return res;
+}
+
+static void
+gst_rtp_gst_pay_send_event (GstRtpGSTPay * rtpgstpay, guint etype,
+ GstEvent * event)
+{
+ const GstStructure *s;
+ gchar *estr;
+ guint elen;
+ GstBuffer *outbuf;
+
+ /* Create the standalone caps packet if an inlined caps was pending */
+ gst_rtp_gst_pay_create_from_adapter (rtpgstpay, GST_CLOCK_TIME_NONE);
+
+ s = gst_event_get_structure (event);
+
+ estr = gst_structure_to_string (s);
+ elen = strlen (estr);
+ /* for 0 byte */
+ elen++;
+ outbuf = make_data_buffer (rtpgstpay, estr, elen);
+ GST_DEBUG_OBJECT (rtpgstpay, "sending event=%s", estr);
+ g_free (estr);
+
+ rtpgstpay->etype = etype;
+ gst_adapter_push (rtpgstpay->adapter, outbuf);
+ /* Create the event packet now to avoid conflict with data/caps packets */
+ gst_rtp_gst_pay_create_from_adapter (rtpgstpay, GST_CLOCK_TIME_NONE);
+}
+
+static gboolean
+gst_rtp_gst_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
+{
+ gboolean ret;
+ GstRtpGSTPay *rtpgstpay;
+ guint etype = 0;
+
+ rtpgstpay = GST_RTP_GST_PAY (payload);
+
+ if (gst_video_event_is_force_key_unit (event)) {
+ g_atomic_int_set (&rtpgstpay->force_config, TRUE);
+ }
+
+ ret =
+ GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload,
+ gst_event_ref (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ gst_rtp_gst_pay_reset (rtpgstpay, FALSE);
+ break;
+ case GST_EVENT_TAG:{
+ GstTagList *tags;
+
+ gst_event_parse_tag (event, &tags);
+
+ if (gst_tag_list_get_scope (tags) == GST_TAG_SCOPE_STREAM) {
+ GstTagList *old;
+
+ GST_DEBUG_OBJECT (rtpgstpay, "storing stream tags %" GST_PTR_FORMAT,
+ tags);
+ if ((old = rtpgstpay->taglist))
+ gst_tag_list_unref (old);
+ rtpgstpay->taglist = gst_tag_list_ref (tags);
+ }
+ etype = 1;
+ break;
+ }
+ case GST_EVENT_CUSTOM_DOWNSTREAM:
+ etype = 2;
+ break;
+ case GST_EVENT_CUSTOM_BOTH:
+ etype = 3;
+ break;
+ case GST_EVENT_STREAM_START:{
+ const gchar *stream_id = NULL;
+
+ if (rtpgstpay->taglist)
+ gst_tag_list_unref (rtpgstpay->taglist);
+ rtpgstpay->taglist = NULL;
+
+ gst_event_parse_stream_start (event, &stream_id);
+ if (stream_id) {
+ g_free (rtpgstpay->stream_id);
+ rtpgstpay->stream_id = g_strdup (stream_id);
+ }
+ etype = 4;
+ break;
+ }
+ default:
+ GST_LOG_OBJECT (rtpgstpay, "no event for %s",
+ GST_EVENT_TYPE_NAME (event));
+ break;
+ }
+ if (etype) {
+ GST_DEBUG_OBJECT (rtpgstpay, "make event type %d for %s",
+ etype, GST_EVENT_TYPE_NAME (event));
+ gst_rtp_gst_pay_send_event (rtpgstpay, etype, event);
+ /* Do not send stream-start right away since caps/new-segment were not yet
+ sent, so our data would be considered invalid */
+ if (etype != 4) {
+ /* flush the adapter immediately */
+ gst_rtp_gst_pay_flush (rtpgstpay, GST_CLOCK_TIME_NONE);
+ }
+ }
+
+ gst_event_unref (event);
+
+ return ret;
+}
+
+static gboolean
+gst_rtp_gst_pay_src_event (GstRTPBasePayload * payload, GstEvent * event)
+{
+ GstRtpGSTPay *rtpgstpay;
+
+ rtpgstpay = GST_RTP_GST_PAY (payload);
+
+ if (gst_video_event_is_force_key_unit (event)) {
+ g_atomic_int_set (&rtpgstpay->force_config, TRUE);
+ }
+
+ return GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->src_event (payload, event);
+}
+
+static void
+gst_rtp_gst_pay_send_config (GstRtpGSTPay * rtpgstpay,
+ GstClockTime running_time)
+{
+ GstPad *pad = GST_RTP_BASE_PAYLOAD_SINKPAD (rtpgstpay);
+ GstCaps *caps = NULL;
+ GstEvent *tag = NULL;
+ GstEvent *stream_start = NULL;
+
+ GST_DEBUG_OBJECT (rtpgstpay, "time to send config");
+ /* Send tags */
+ if (rtpgstpay->taglist && !gst_tag_list_is_empty (rtpgstpay->taglist))
+ tag = gst_event_new_tag (gst_tag_list_ref (rtpgstpay->taglist));
+ if (tag) {
+ /* Send start-stream to clear tags */
+ if (rtpgstpay->stream_id)
+ stream_start = gst_event_new_stream_start (rtpgstpay->stream_id);
+ if (stream_start) {
+ gst_rtp_gst_pay_send_event (rtpgstpay, 4, stream_start);
+ gst_event_unref (stream_start);
+ }
+ gst_rtp_gst_pay_send_event (rtpgstpay, 1, tag);
+ gst_event_unref (tag);
+ }
+ /* send caps */
+ caps = gst_pad_get_current_caps (pad);
+ if (caps) {
+ gst_rtp_gst_pay_send_caps (rtpgstpay, rtpgstpay->current_CV, caps);
+ gst_caps_unref (caps);
+ }
+ rtpgstpay->last_config = running_time;
+}
+
+static GstFlowReturn
+gst_rtp_gst_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstFlowReturn ret;
+ GstRtpGSTPay *rtpgstpay;
+ GstClockTime timestamp, running_time;
+
+ rtpgstpay = GST_RTP_GST_PAY (basepayload);
+
+ timestamp = GST_BUFFER_PTS (buffer);
+ running_time =
+ gst_segment_to_running_time (&basepayload->segment, GST_FORMAT_TIME,
+ timestamp);
+
+ /* check if we need to send the caps and taglist now */
+ if (rtpgstpay->config_interval > 0
+ || g_atomic_int_compare_and_exchange (&rtpgstpay->force_config, TRUE,
+ FALSE)) {
+ GST_DEBUG_OBJECT (rtpgstpay,
+ "running time %" GST_TIME_FORMAT ", last config %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (running_time), GST_TIME_ARGS (rtpgstpay->last_config));
+
+ if (running_time != GST_CLOCK_TIME_NONE &&
+ rtpgstpay->last_config != GST_CLOCK_TIME_NONE) {
+ guint64 diff;
+
+ /* calculate diff between last SPS/PPS in milliseconds */
+ if (running_time > rtpgstpay->last_config)
+ diff = running_time - rtpgstpay->last_config;
+ else
+ diff = 0;
+
+ GST_DEBUG_OBJECT (rtpgstpay,
+ "interval since last config %" GST_TIME_FORMAT, GST_TIME_ARGS (diff));
+
+ /* bigger than interval, queue SPS/PPS */
+ if (GST_TIME_AS_SECONDS (diff) >= rtpgstpay->config_interval)
+ gst_rtp_gst_pay_send_config (rtpgstpay, running_time);
+ } else {
+ gst_rtp_gst_pay_send_config (rtpgstpay, running_time);
+ }
+ }
+
+ /* caps always from SDP for now */
+ if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT))
+ rtpgstpay->flags |= (1 << 3);
+
+ gst_adapter_push (rtpgstpay->adapter, buffer);
+ ret = gst_rtp_gst_pay_flush (rtpgstpay, timestamp);
+
+ return ret;
+}
diff --git a/gst/rtp/gstrtpgstpay.h b/gst/rtp/gstrtpgstpay.h
new file mode 100644
index 0000000000..2294e174ef
--- /dev/null
+++ b/gst/rtp/gstrtpgstpay.h
@@ -0,0 +1,71 @@
+/* GStreamer
+ * Copyright (C) <2010> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_GST_PAY_H__
+#define __GST_RTP_GST_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+#include <gst/base/gstadapter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_GST_PAY \
+ (gst_rtp_gst_pay_get_type())
+#define GST_RTP_GST_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_GST_PAY,GstRtpGSTPay))
+#define GST_RTP_GST_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_GST_PAY,GstRtpGSTPayClass))
+#define GST_IS_RTP_GST_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_GST_PAY))
+#define GST_IS_RTP_GST_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_GST_PAY))
+
+typedef struct _GstRtpGSTPay GstRtpGSTPay;
+typedef struct _GstRtpGSTPayClass GstRtpGSTPayClass;
+
+struct _GstRtpGSTPay
+{
+ GstRTPBasePayload payload;
+
+ GList *pending_buffers; /* GstBufferList */
+ GstAdapter *adapter;
+ guint8 flags;
+ guint8 etype;
+
+ guint8 current_CV; /* CV field of incoming caps*/
+ guint8 next_CV;
+
+ gchar *stream_id;
+ GstTagList *taglist;
+ guint config_interval;
+ GstClockTime last_config;
+ gboolean force_config;
+};
+
+struct _GstRtpGSTPayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_gst_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_GST_PAY_H__ */
diff --git a/gst/rtp/gstrtph261depay.c b/gst/rtp/gstrtph261depay.c
new file mode 100644
index 0000000000..23a888a4ea
--- /dev/null
+++ b/gst/rtp/gstrtph261depay.c
@@ -0,0 +1,289 @@
+/* GStreamer
+ *
+ * Copyright (C) <2014> Stian Selnes <stian@pexip.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/**
+ * SECTION:element-rtph261depay
+ * @title: rtph261depay
+ * @see_also: rtph261pay
+ *
+ * Extract encoded H.261 video frames from RTP packets according to RFC 4587.
+ * For detailed information see: https://www.rfc-editor.org/rfc/rfc4587.txt
+ *
+ * The depayloader takes an RTP packet and extracts its H.261 stream. It
+ * aggregates the extracted stream until a complete frame is received before
+ * it pushes it downstream.
+ *
+ * ## Example pipeline
+ * |[
+ * gst-launch-1.0 udpsrc caps='application/x-rtp, payload=31' ! rtph261depay ! avdec_h261 ! autovideosink
+ * ]| This example pipeline will depayload and decode an RTP H.261 video stream.
+ * Refer to the rtph261pay example to create the RTP stream.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+#include "gstrtpelements.h"
+#include "gstrtph261depay.h"
+#include "gstrtph261pay.h" /* GstRtpH261PayHeader */
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtph261depay_debug);
+#define GST_CAT_DEFAULT (rtph261depay_debug)
+
+static const guint8 NO_LEFTOVER = 0xFF;
+
+static GstStaticPadTemplate gst_rtp_h261_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-h261")
+ );
+
+static GstStaticPadTemplate gst_rtp_h261_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_H261_STRING ", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"H261\"; "
+ "application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"H261\"")
+ );
+#define parent_class gst_rtp_h261_depay_parent_class
+G_DEFINE_TYPE (GstRtpH261Depay, gst_rtp_h261_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph261depay, "rtph261depay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_H261_DEPAY, rtp_element_init (plugin));
+
+static GstBuffer *
+gst_rtp_h261_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpH261Depay *depay;
+ GstBuffer *outbuf = NULL;
+ gint payload_len;
+ guint8 *payload;
+ const guint header_len = GST_RTP_H261_PAYLOAD_HEADER_LEN;
+ gboolean marker;
+ GstRtpH261PayHeader *header;
+
+ depay = GST_RTP_H261_DEPAY (depayload);
+
+ if (GST_BUFFER_IS_DISCONT (rtp->buffer)) {
+ GST_DEBUG_OBJECT (depay, "Discont buffer, flushing adapter");
+ gst_adapter_clear (depay->adapter);
+ depay->leftover = NO_LEFTOVER;
+ depay->start = FALSE;
+ }
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+ payload = gst_rtp_buffer_get_payload (rtp);
+
+ marker = gst_rtp_buffer_get_marker (rtp);
+
+ if (payload_len < header_len + 1) {
+ /* Must have at least one byte payload */
+ GST_WARNING_OBJECT (depay, "Dropping packet with invalid payload length");
+ return NULL;
+ }
+
+ header = (GstRtpH261PayHeader *) payload;
+
+ GST_DEBUG_OBJECT (depay,
+ "payload_len: %d, header_len: %d, sbit: %d, ebit: %d, marker %d",
+ payload_len, header_len, header->sbit, header->ebit, marker);
+
+ payload += header_len;
+ payload_len -= header_len;
+
+ if (!depay->start) {
+ /* Check for picture start code */
+ guint32 bits = GST_READ_UINT32_BE (payload) << header->sbit;
+ if (payload_len > 4 && bits >> 12 == 0x10) {
+ GST_DEBUG_OBJECT (depay, "Found picture start code");
+ depay->start = TRUE;
+ } else {
+ GST_DEBUG_OBJECT (depay, "No picture start code yet, skipping payload");
+ goto skip;
+ }
+ }
+
+ if (header->sbit != 0) {
+ /* Take the leftover from previous packet and merge it at the beginning */
+ payload[0] &= 0xFF >> header->sbit;
+ if (depay->leftover != NO_LEFTOVER) {
+ /* Happens if sbit is set for first packet in frame. Then previous byte
+ * has already been flushed. */
+ payload[0] |= depay->leftover;
+ }
+ depay->leftover = NO_LEFTOVER;
+ }
+
+ if (header->ebit == 0) {
+ /* H.261 stream ends on byte boundary, take entire packet */
+ gst_adapter_push (depay->adapter,
+ gst_rtp_buffer_get_payload_subbuffer (rtp, header_len, payload_len));
+ } else {
+ /* Take the entire buffer except for the last byte, which will be kept to
+ * merge with next packet */
+ gst_adapter_push (depay->adapter,
+ gst_rtp_buffer_get_payload_subbuffer (rtp, header_len,
+ payload_len - 1));
+ depay->leftover = payload[payload_len - 1] & (0xFF << header->ebit);
+ }
+
+skip:
+ if (marker) {
+ if (depay->start) {
+ guint avail;
+
+ if (depay->leftover != NO_LEFTOVER) {
+ GstBuffer *buf = gst_buffer_new_and_alloc (1);
+ gst_buffer_memset (buf, 0, depay->leftover, 1);
+ gst_adapter_push (depay->adapter, buf);
+ depay->leftover = NO_LEFTOVER;
+ }
+
+ avail = gst_adapter_available (depay->adapter);
+ outbuf = gst_adapter_take_buffer (depay->adapter, avail);
+ gst_rtp_drop_non_video_meta (depay, outbuf);
+
+ /* Note that the I flag does not mean intra frame, but that the entire
+ * stream is intra coded. */
+ if (header->i)
+ GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+ else
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ GST_DEBUG_OBJECT (depay, "Pushing out a buffer of %u bytes", avail);
+ depay->start = FALSE;
+ } else {
+ depay->start = TRUE;
+ }
+ }
+
+ return outbuf;
+}
+
+static gboolean
+gst_rtp_h261_depay_setcaps (GstRTPBaseDepayload * filter, GstCaps * caps)
+{
+ GstCaps *srccaps;
+
+ srccaps = gst_caps_new_empty_simple ("video/x-h261");
+ gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (filter), srccaps);
+ gst_caps_unref (srccaps);
+
+ return TRUE;
+}
+
+static GstStateChangeReturn
+gst_rtp_h261_depay_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstRtpH261Depay *depay;
+ GstStateChangeReturn ret;
+
+ depay = GST_RTP_H261_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_adapter_clear (depay->adapter);
+ depay->start = FALSE;
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
+
+static void
+gst_rtp_h261_depay_dispose (GObject * object)
+{
+ GstRtpH261Depay *depay;
+
+ depay = GST_RTP_H261_DEPAY (object);
+
+ if (depay->adapter) {
+ gst_object_unref (depay->adapter);
+ depay->adapter = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static void
+gst_rtp_h261_depay_class_init (GstRtpH261DepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ gstelement_class = GST_ELEMENT_CLASS (klass);
+ gstrtpbasedepayload_class = GST_RTP_BASE_DEPAYLOAD_CLASS (klass);
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_h261_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_h261_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP H261 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts H261 video from RTP packets (RFC 4587)",
+ "Stian Selnes <stian@pexip.com>");
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_h261_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_h261_depay_setcaps;
+
+ gobject_class->dispose = gst_rtp_h261_depay_dispose;
+
+ gstelement_class->change_state = gst_rtp_h261_depay_change_state;
+
+ GST_DEBUG_CATEGORY_INIT (rtph261depay_debug, "rtph261depay", 0,
+ "H261 Video RTP Depayloader");
+}
+
+static void
+gst_rtp_h261_depay_init (GstRtpH261Depay * depay)
+{
+ depay->adapter = gst_adapter_new ();
+ depay->leftover = NO_LEFTOVER;
+}
diff --git a/gst/rtp/gstrtph261depay.h b/gst/rtp/gstrtph261depay.h
new file mode 100644
index 0000000000..821eff993a
--- /dev/null
+++ b/gst/rtp/gstrtph261depay.h
@@ -0,0 +1,58 @@
+/* GStreamer
+ * Copyright (C) <2014> Stian Selnes <stian@pexip.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_RTP_H261_DEPAY_H__
+#define __GST_RTP_H261_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_RTP_H261_DEPAY \
+ (gst_rtp_h261_depay_get_type())
+#define GST_RTP_H261_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_H261_DEPAY,GstRtpH261Depay))
+#define GST_RTP_H261_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_H261_DEPAY,GstRtpH261DepayClass))
+#define GST_IS_RTP_H261_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_H261_DEPAY))
+#define GST_IS_RTP_H261_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_H261_DEPAY))
+typedef struct _GstRtpH261Depay GstRtpH261Depay;
+typedef struct _GstRtpH261DepayClass GstRtpH261DepayClass;
+
+struct _GstRtpH261Depay
+{
+ GstRTPBaseDepayload depayload;
+
+ GstAdapter *adapter;
+ gboolean start;
+ guint8 leftover;
+};
+
+struct _GstRtpH261DepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_h261_depay_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_RTP_H261_DEPAY_H__ */
diff --git a/gst/rtp/gstrtph261pay.c b/gst/rtp/gstrtph261pay.c
new file mode 100644
index 0000000000..18143252ce
--- /dev/null
+++ b/gst/rtp/gstrtph261pay.c
@@ -0,0 +1,1068 @@
+/* GStreamer
+ * Copyright (C) <2014> Stian Selnes <stian@pexip.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ *
+ */
+
+/**
+ * SECTION:element-rtph261pay
+ * @title: rtph261pay
+ * @see_also: rtph261depay
+ *
+ * Payload encoded H.261 video frames into RTP packets according to RFC 4587.
+ * For detailed information see: https://www.rfc-editor.org/rfc/rfc4587.txt
+ *
+ * The payloader takes a H.261 frame, parses it and splits it into fragments
+ * on MB boundaries in order to match configured MTU size. For each fragment
+ * an RTP packet is constructed with an RTP packet header followed by the
+ * fragment. In addition the payloader will make sure the packetized H.261
+ * stream appears as a continuous bit-stream after depacketization by shifting
+ * the encoded bit-stream of a frame to align with the last significant bit of
+ * the previous frame. This helps interoperability in the case where the
+ * encoder does not produce a continuous bit-stream but the decoder requires
+ * it.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 videotestsrc ! avenc_h261 ! rtph261pay ! udpsink
+ * ]| This will encode a test video and payload it. Refer to the rtph261depay
+ * example to depayload and play the RTP stream.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include "gstrtpelements.h"
+#include "gstrtph261pay.h"
+#include "gstrtputils.h"
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+#include <gst/base/gstbitreader.h>
+#include <string.h>
+
+GST_DEBUG_CATEGORY_STATIC (rtph261pay_debug);
+#define GST_CAT_DEFAULT (rtph261pay_debug)
+
+#define GST_RTP_HEADER_LEN 12
+#define GST_RTP_H261_PAYLOAD_HEADER_LEN 4
+
+static GstStaticPadTemplate gst_rtp_h261_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-h261")
+ );
+
+static GstStaticPadTemplate gst_rtp_h261_pay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_H261_STRING ", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"H261\"; "
+ "application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"H261\"")
+ );
+
+#define parent_class gst_rtp_h261_pay_parent_class
+G_DEFINE_TYPE (GstRtpH261Pay, gst_rtp_h261_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph261pay, "rtph261pay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_H261_PAY, rtp_element_init (plugin));
+
+typedef struct
+{
+ guint32 mba;
+ guint32 mtype;
+ guint32 quant;
+ gint mvx;
+ gint mvy;
+ guint endpos;
+ gint gobn;
+} Macroblock;
+
+typedef struct
+{
+ Macroblock last;
+ guint startpos;
+ guint endpos;
+ guint32 gn;
+ guint32 gquant;
+} Gob;
+
+#define PSC_LEN 20
+#define TR_LEN 5
+#define PTYPE_LEN 6
+#define GBSC_LEN 16
+#define GN_LEN 4
+#define GQUANT_LEN 5
+#define GEI_LEN 1
+#define GSPARE_LEN 8
+#define MQUANT_LEN 5
+#define MAX_NUM_GOB 12
+
+typedef enum
+{
+ PARSE_END_OF_BUFFER = -2,
+ PARSE_ERROR = -1,
+ PARSE_OK = 0,
+ PARSE_END_OF_FRAME,
+ PARSE_END_OF_GOB,
+} ParseReturn;
+
+
+#define SKIP_BITS(br,nbits) G_STMT_START { \
+ if (!gst_bit_reader_skip (br, nbits)) \
+ return PARSE_END_OF_BUFFER; \
+ } G_STMT_END
+
+#define GET_BITS(br,val,nbits) G_STMT_START { \
+ if (!gst_bit_reader_get_bits_uint32 (br, val, nbits)) \
+ return PARSE_END_OF_BUFFER; \
+ } G_STMT_END
+
+/* Unchecked since we peek outside the buffer. Ok because of padding. */
+#define PEEK_BITS(br,val,nbits) G_STMT_START { \
+ *val = gst_bit_reader_peek_bits_uint16_unchecked (br, nbits); \
+ } G_STMT_END
+
+
+#define MBA_STUFFING 34
+#define MBA_START_CODE 35
+#define MBA_LEN 35
+#define MBA_WID 4
+/* [code, mask, nbits, mba] */
+static const guint16 mba_table[MBA_LEN][MBA_WID] = {
+ {0x8000, 0x8000, 1, 1},
+ {0x6000, 0xe000, 3, 2},
+ {0x4000, 0xe000, 3, 3},
+ {0x3000, 0xf000, 4, 4},
+ {0x2000, 0xf000, 4, 5},
+ {0x1800, 0xf800, 5, 6},
+ {0x1000, 0xf800, 5, 7},
+ {0x0e00, 0xfe00, 7, 8},
+ {0x0c00, 0xfe00, 7, 9},
+ {0x0b00, 0xff00, 8, 10},
+ {0x0a00, 0xff00, 8, 11},
+ {0x0900, 0xff00, 8, 12},
+ {0x0800, 0xff00, 8, 13},
+ {0x0700, 0xff00, 8, 14},
+ {0x0600, 0xff00, 8, 15},
+ {0x05c0, 0xffc0, 10, 16},
+ {0x0580, 0xffc0, 10, 17},
+ {0x0540, 0xffc0, 10, 18},
+ {0x0500, 0xffc0, 10, 19},
+ {0x04c0, 0xffc0, 10, 20},
+ {0x0480, 0xffc0, 10, 21},
+ {0x0460, 0xffe0, 11, 22},
+ {0x0440, 0xffe0, 11, 23},
+ {0x0420, 0xffe0, 11, 24},
+ {0x0400, 0xffe0, 11, 25},
+ {0x03e0, 0xffe0, 11, 26},
+ {0x03c0, 0xffe0, 11, 27},
+ {0x03a0, 0xffe0, 11, 28},
+ {0x0380, 0xffe0, 11, 29},
+ {0x0360, 0xffe0, 11, 30},
+ {0x0340, 0xffe0, 11, 31},
+ {0x0320, 0xffe0, 11, 32},
+ {0x0300, 0xffe0, 11, 33},
+ {0x01e0, 0xffe0, 11, MBA_STUFFING},
+ {0x0001, 0xffff, 16, MBA_START_CODE},
+};
+
+#define MTYPE_INTRA (1 << 0)
+#define MTYPE_INTER (1 << 1)
+#define MTYPE_MC (1 << 2)
+#define MTYPE_FIL (1 << 3)
+#define MTYPE_MQUANT (1 << 4)
+#define MTYPE_MVD (1 << 5)
+#define MTYPE_CBP (1 << 6)
+#define MTYPE_TCOEFF (1 << 7)
+#define MTYPE_LEN 10
+#define MTYPE_WID 4
+/* [code, mask, nbits, flags] */
+static const guint16 mtype_table[MTYPE_LEN][MTYPE_WID] = {
+ {0x8000, 0x8000, 1, MTYPE_INTER | MTYPE_CBP | MTYPE_TCOEFF},
+ {0x4000, 0xc000, 2,
+ MTYPE_INTER | MTYPE_MC | MTYPE_FIL | MTYPE_MVD | MTYPE_CBP |
+ MTYPE_TCOEFF},
+ {0x2000, 0xe000, 3, MTYPE_INTER | MTYPE_MC | MTYPE_FIL | MTYPE_MVD},
+ {0x1000, 0xf000, 4, MTYPE_INTRA | MTYPE_TCOEFF},
+ {0x0800, 0xf800, 5, MTYPE_INTER | MTYPE_MQUANT | MTYPE_CBP | MTYPE_TCOEFF},
+ {0x0400, 0xfc00, 6,
+ MTYPE_INTER | MTYPE_MC | MTYPE_FIL | MTYPE_MQUANT | MTYPE_MVD |
+ MTYPE_CBP | MTYPE_TCOEFF},
+ {0x0200, 0xfe00, 7, MTYPE_INTRA | MTYPE_MQUANT | MTYPE_TCOEFF},
+ {0x0100, 0xff00, 8,
+ MTYPE_INTER | MTYPE_MC | MTYPE_MVD | MTYPE_CBP | MTYPE_TCOEFF},
+ {0x0080, 0xff80, 9, MTYPE_INTER | MTYPE_MC | MTYPE_MVD},
+ {0x0040, 0xffc0, 10,
+ MTYPE_INTER | MTYPE_MC | MTYPE_MQUANT | MTYPE_MVD | MTYPE_CBP |
+ MTYPE_TCOEFF},
+};
+
+#define MVD_LEN 32
+#define MVD_WID 5
+/* [code, mask, nbits, mvd1, mvd2] */
+static const guint16 mvd_table[MVD_LEN][MVD_WID] = {
+ {0x8000, 0x8000, 1, 0, 0},
+ {0x6000, 0xe000, 3, -1, -1},
+ {0x4000, 0xe000, 3, 1, 1},
+ {0x3000, 0xf000, 4, -2, 30},
+ {0x2000, 0xf000, 4, 2, -30},
+ {0x1800, 0xf800, 5, -3, 29},
+ {0x1000, 0xf800, 5, 3, -29},
+ {0x0e00, 0xfe00, 7, -4, 28},
+ {0x0c00, 0xfe00, 7, 4, -28},
+ {0x0700, 0xff00, 8, -7, 25},
+ {0x0900, 0xff00, 8, -6, 26},
+ {0x0b00, 0xff00, 8, -5, 27},
+ {0x0a00, 0xff00, 8, 5, -27},
+ {0x0800, 0xff00, 8, 6, -26},
+ {0x0600, 0xff00, 8, 7, -25},
+ {0x04c0, 0xffc0, 10, -10, 22},
+ {0x0540, 0xffc0, 10, -9, 23},
+ {0x05c0, 0xffc0, 10, -8, 24},
+ {0x0580, 0xffc0, 10, 8, -24},
+ {0x0500, 0xffc0, 10, 9, -23},
+ {0x0480, 0xffc0, 10, 10, -22},
+ {0x0320, 0xffe0, 11, -16, 16},
+ {0x0360, 0xffe0, 11, -15, 17},
+ {0x03a0, 0xffe0, 11, -14, 18},
+ {0x03e0, 0xffe0, 11, -13, 19},
+ {0x0420, 0xffe0, 11, -12, 20},
+ {0x0460, 0xffe0, 11, -11, 21},
+ {0x0440, 0xffe0, 11, 11, -21},
+ {0x0400, 0xffe0, 11, 12, -20},
+ {0x03c0, 0xffe0, 11, 13, -19},
+ {0x0380, 0xffe0, 11, 14, -18},
+ {0x0340, 0xffe0, 11, 15, -17},
+};
+
+#define CBP_LEN 63
+/* [code, mask, nbits, cbp] */
+static const guint16 cbp_table[CBP_LEN][4] = {
+ {0xe000, 0xe000, 3, 60},
+ {0xd000, 0xf000, 4, 4},
+ {0xc000, 0xf000, 4, 8},
+ {0xb000, 0xf000, 4, 16},
+ {0xa000, 0xf000, 4, 32},
+ {0x9800, 0xf800, 5, 12},
+ {0x9000, 0xf800, 5, 48},
+ {0x8800, 0xf800, 5, 20},
+ {0x8000, 0xf800, 5, 40},
+ {0x7800, 0xf800, 5, 28},
+ {0x7000, 0xf800, 5, 44},
+ {0x6800, 0xf800, 5, 52},
+ {0x6000, 0xf800, 5, 56},
+ {0x5800, 0xf800, 5, 1},
+ {0x5000, 0xf800, 5, 61},
+ {0x4800, 0xf800, 5, 2},
+ {0x4000, 0xf800, 5, 62},
+ {0x3c00, 0xfc00, 6, 24},
+ {0x3800, 0xfc00, 6, 36},
+ {0x3400, 0xfc00, 6, 3},
+ {0x3000, 0xfc00, 6, 63},
+ {0x2e00, 0xfe00, 7, 5},
+ {0x2c00, 0xfe00, 7, 9},
+ {0x2a00, 0xfe00, 7, 17},
+ {0x2800, 0xfe00, 7, 33},
+ {0x2600, 0xfe00, 7, 6},
+ {0x2400, 0xfe00, 7, 10},
+ {0x2200, 0xfe00, 7, 18},
+ {0x2000, 0xfe00, 7, 34},
+ {0x1f00, 0xff00, 8, 7},
+ {0x1e00, 0xff00, 8, 11},
+ {0x1d00, 0xff00, 8, 19},
+ {0x1c00, 0xff00, 8, 35},
+ {0x1b00, 0xff00, 8, 13},
+ {0x1a00, 0xff00, 8, 49},
+ {0x1900, 0xff00, 8, 21},
+ {0x1800, 0xff00, 8, 41},
+ {0x1700, 0xff00, 8, 14},
+ {0x1600, 0xff00, 8, 50},
+ {0x1500, 0xff00, 8, 22},
+ {0x1400, 0xff00, 8, 42},
+ {0x1300, 0xff00, 8, 15},
+ {0x1200, 0xff00, 8, 51},
+ {0x1100, 0xff00, 8, 23},
+ {0x1000, 0xff00, 8, 43},
+ {0x0f00, 0xff00, 8, 25},
+ {0x0e00, 0xff00, 8, 37},
+ {0x0d00, 0xff00, 8, 26},
+ {0x0c00, 0xff00, 8, 38},
+ {0x0b00, 0xff00, 8, 29},
+ {0x0a00, 0xff00, 8, 45},
+ {0x0900, 0xff00, 8, 53},
+ {0x0800, 0xff00, 8, 57},
+ {0x0700, 0xff00, 8, 30},
+ {0x0600, 0xff00, 8, 46},
+ {0x0500, 0xff00, 8, 54},
+ {0x0400, 0xff00, 8, 58},
+ {0x0380, 0xff80, 9, 31},
+ {0x0300, 0xff80, 9, 47},
+ {0x0280, 0xff80, 9, 55},
+ {0x0200, 0xff80, 9, 59},
+ {0x0180, 0xff80, 9, 27},
+ {0x0100, 0xff80, 9, 39},
+};
+
+#define TCOEFF_EOB 0xffff
+#define TCOEFF_ESC 0xfffe
+#define TCOEFF_LEN 65
+/* [code, mask, nbits, run, level] */
+static const guint16 tcoeff_table[TCOEFF_LEN][5] = {
+ {0x8000, 0xc000, 2, TCOEFF_EOB, 0}, /* Not available for first coeff */
+ /* {0x8000, 0x8000, 2, 0, 1}, *//* Available only for first Inter coeff */
+ {0xc000, 0xc000, 3, 0, 1}, /* Not available for first coeff */
+ {0x6000, 0xe000, 4, 1, 1},
+ {0x4000, 0xf000, 5, 0, 2},
+ {0x5000, 0xf000, 5, 2, 1},
+ {0x2800, 0xf800, 6, 0, 3},
+ {0x3800, 0xf800, 6, 3, 1},
+ {0x3000, 0xf800, 6, 4, 1},
+ {0x0400, 0xfc00, 6, TCOEFF_ESC, 0},
+ {0x1800, 0xfc00, 7, 1, 2},
+ {0x1c00, 0xfc00, 7, 5, 1},
+ {0x1400, 0xfc00, 7, 6, 1},
+ {0x1000, 0xfc00, 7, 7, 1},
+ {0x0c00, 0xfe00, 8, 0, 4},
+ {0x0800, 0xfe00, 8, 2, 2},
+ {0x0e00, 0xfe00, 8, 8, 1},
+ {0x0a00, 0xfe00, 8, 9, 1},
+ {0x2600, 0xff00, 9, 0, 5},
+ {0x2100, 0xff00, 9, 0, 6},
+ {0x2500, 0xff00, 9, 1, 3},
+ {0x2400, 0xff00, 9, 3, 2},
+ {0x2700, 0xff00, 9, 10, 1},
+ {0x2300, 0xff00, 9, 11, 1},
+ {0x2200, 0xff00, 9, 12, 1},
+ {0x2000, 0xff00, 9, 13, 1},
+ {0x0280, 0xffc0, 11, 0, 7},
+ {0x0300, 0xffc0, 11, 1, 4},
+ {0x02c0, 0xffc0, 11, 2, 3},
+ {0x03c0, 0xffc0, 11, 4, 2},
+ {0x0240, 0xffc0, 11, 5, 2},
+ {0x0380, 0xffc0, 11, 14, 1},
+ {0x0340, 0xffc0, 11, 15, 1},
+ {0x0200, 0xffc0, 11, 16, 1},
+ {0x01d0, 0xfff0, 13, 0, 8},
+ {0x0180, 0xfff0, 13, 0, 9},
+ {0x0130, 0xfff0, 13, 0, 10},
+ {0x0100, 0xfff0, 13, 0, 11},
+ {0x01b0, 0xfff0, 13, 1, 5},
+ {0x0140, 0xfff0, 13, 2, 4},
+ {0x01c0, 0xfff0, 13, 3, 3},
+ {0x0120, 0xfff0, 13, 4, 3},
+ {0x01e0, 0xfff0, 13, 6, 2},
+ {0x0150, 0xfff0, 13, 7, 2},
+ {0x0110, 0xfff0, 13, 8, 2},
+ {0x01f0, 0xfff0, 13, 17, 1},
+ {0x01a0, 0xfff0, 13, 18, 1},
+ {0x0190, 0xfff0, 13, 19, 1},
+ {0x0170, 0xfff0, 13, 20, 1},
+ {0x0160, 0xfff0, 13, 21, 1},
+ {0x00d0, 0xfff8, 14, 0, 12},
+ {0x00c8, 0xfff8, 14, 0, 13},
+ {0x00c0, 0xfff8, 14, 0, 14},
+ {0x00b8, 0xfff8, 14, 0, 15},
+ {0x00b0, 0xfff8, 14, 1, 6},
+ {0x00a8, 0xfff8, 14, 1, 7},
+ {0x00a0, 0xfff8, 14, 2, 5},
+ {0x0098, 0xfff8, 14, 3, 4},
+ {0x0090, 0xfff8, 14, 5, 3},
+ {0x0088, 0xfff8, 14, 9, 2},
+ {0x0080, 0xfff8, 14, 10, 2},
+ {0x00f8, 0xfff8, 14, 22, 1},
+ {0x00f0, 0xfff8, 14, 23, 1},
+ {0x00e8, 0xfff8, 14, 24, 1},
+ {0x00e0, 0xfff8, 14, 25, 1},
+ {0x00d8, 0xfff8, 14, 26, 1},
+};
+
+static ParseReturn
+decode_mba (GstBitReader * br, gint * mba)
+{
+ gint i;
+ guint16 code;
+
+ *mba = -1;
+ do {
+ PEEK_BITS (br, &code, 16);
+ for (i = 0; i < MBA_LEN; i++) {
+ if ((code & mba_table[i][1]) == mba_table[i][0]) {
+ *mba = mba_table[i][3];
+
+ if (*mba == MBA_START_CODE)
+ return PARSE_END_OF_GOB;
+ SKIP_BITS (br, mba_table[i][2]);
+ if (*mba != MBA_STUFFING)
+ return PARSE_OK;
+ }
+ }
+ } while (*mba == MBA_STUFFING);
+
+ /* 0x0 indicates end of frame since we appended 0-bytes */
+ if (code == 0x0)
+ return PARSE_END_OF_FRAME;
+
+ return PARSE_ERROR;
+}
+
+static ParseReturn
+decode_mtype (GstBitReader * br, guint * mtype)
+{
+ gint i;
+ guint16 code;
+
+ PEEK_BITS (br, &code, 16);
+ for (i = 0; i < MTYPE_LEN; i++) {
+ if ((code & mtype_table[i][1]) == mtype_table[i][0]) {
+ SKIP_BITS (br, mtype_table[i][2]);
+ *mtype = mtype_table[i][3];
+ return PARSE_OK;
+ }
+ }
+
+ return PARSE_ERROR;
+}
+
+static ParseReturn
+decode_mvd (GstBitReader * br, gint * mvd1, gint * mvd2)
+{
+ gint i;
+ guint16 code;
+
+ PEEK_BITS (br, &code, 16);
+ for (i = 0; i < MVD_LEN; i++) {
+ if ((code & mvd_table[i][1]) == mvd_table[i][0]) {
+ SKIP_BITS (br, mvd_table[i][2]);
+ *mvd1 = (gint16) mvd_table[i][3];
+ *mvd2 = (gint16) mvd_table[i][4];
+ return PARSE_OK;
+ }
+ }
+
+ return PARSE_ERROR;
+}
+
+static ParseReturn
+decode_cbp (GstBitReader * br, guint * cbp)
+{
+ gint i;
+ guint16 code;
+
+ PEEK_BITS (br, &code, 16);
+ for (i = 0; i < CBP_LEN; i++) {
+ if ((code & cbp_table[i][1]) == cbp_table[i][0]) {
+ SKIP_BITS (br, cbp_table[i][2]);
+ *cbp = cbp_table[i][3];
+ return PARSE_OK;
+ }
+ }
+
+ return PARSE_ERROR;
+}
+
+static ParseReturn
+decode_tcoeff (GstBitReader * br, guint mtype)
+{
+ gint i;
+ guint16 code;
+ gboolean eob;
+
+ /* Special handling of first coeff */
+ if (mtype & MTYPE_INTER) {
+ /* Inter, different vlc since EOB is not allowed */
+ PEEK_BITS (br, &code, 16);
+ if (code & 0x8000) {
+ SKIP_BITS (br, 2);
+ GST_TRACE ("tcoeff first inter special");
+ } else {
+ /* Fallthrough. Let the first coeff be handled like other coeffs since
+ * the vlc is the same as long as the first bit is not set. */
+ }
+ } else {
+ /* Intra, first coeff is fixed 8-bit */
+ GST_TRACE ("tcoeff first intra special");
+ SKIP_BITS (br, 8);
+ }
+
+ /* Block must end with EOB. */
+ eob = FALSE;
+ while (!eob) {
+ PEEK_BITS (br, &code, 16);
+ for (i = 0; i < TCOEFF_LEN; i++) {
+ if ((code & tcoeff_table[i][1]) == tcoeff_table[i][0]) {
+ GST_TRACE ("tcoeff vlc[%d], run=%d, level=%d", i, tcoeff_table[i][3],
+ tcoeff_table[i][4]);
+ SKIP_BITS (br, tcoeff_table[i][2]);
+ if (tcoeff_table[i][3] == TCOEFF_EOB) {
+ eob = TRUE;
+ } else if (tcoeff_table[i][3] == TCOEFF_ESC) {
+#if 0
+ guint16 val;
+ val = gst_bit_reader_peek_bits_uint16_unchecked (br, 6 + 8);
+ GST_TRACE ("esc run=%d, level=%d", val >> 8, (gint8) (val & 0xff));
+#endif
+ SKIP_BITS (br, 6 + 8);
+ }
+ break;
+ }
+ }
+ if (i == TCOEFF_LEN)
+ /* No matching VLC */
+ return PARSE_ERROR;
+ }
+
+ return PARSE_OK;
+}
+
+static gint
+find_picture_header_offset (const guint8 * data, gsize size)
+{
+ gint i;
+ guint32 val;
+
+ if (size < 4)
+ return -1;
+
+ val = GST_READ_UINT32_BE (data);
+ for (i = 0; i < 8; i++) {
+ if ((val >> (12 - i)) == 0x10)
+ return i;
+ }
+
+ return -1;
+}
+
+static ParseReturn
+parse_picture_header (GstRtpH261Pay * pay, GstBitReader * br, gint * num_gob)
+{
+ guint32 val;
+
+ GET_BITS (br, &val, PSC_LEN);
+ if (val != 0x10)
+ return PARSE_ERROR;
+ SKIP_BITS (br, TR_LEN);
+ GET_BITS (br, &val, PTYPE_LEN);
+ *num_gob = (val & 0x04) == 0 ? 3 : 12;
+
+ return PARSE_OK;
+}
+
+static ParseReturn
+parse_gob_header (GstRtpH261Pay * pay, GstBitReader * br, Gob * gob)
+{
+ guint32 val;
+
+ GET_BITS (br, &val, GBSC_LEN);
+ if (val != 0x01)
+ return PARSE_ERROR;
+ GET_BITS (br, &gob->gn, GN_LEN);
+ GST_TRACE_OBJECT (pay, "Parsing GOB %d", gob->gn);
+
+ GET_BITS (br, &gob->gquant, GQUANT_LEN);
+ GST_TRACE_OBJECT (pay, "GQUANT %d", gob->gquant);
+ GET_BITS (br, &val, GEI_LEN);
+ while (val != 0) {
+ SKIP_BITS (br, GSPARE_LEN);
+ GET_BITS (br, &val, GEI_LEN);
+ }
+
+ return PARSE_OK;
+}
+
+static ParseReturn
+parse_mb (GstRtpH261Pay * pay, GstBitReader * br, const Macroblock * prev,
+ Macroblock * mb)
+{
+ gint mba_diff;
+ guint cbp;
+ ParseReturn ret;
+
+ cbp = 0x3f;
+ mb->quant = prev->quant;
+
+ if ((ret = decode_mba (br, &mba_diff)) != PARSE_OK)
+ return ret;
+ mb->mba = prev->mba == 0 ? mba_diff : prev->mba + mba_diff;
+ GST_TRACE_OBJECT (pay, "Parse MBA %d (mba_diff %d)", mb->mba, mba_diff);
+
+ if ((ret = decode_mtype (br, &mb->mtype)) != PARSE_OK)
+ return ret;
+ GST_TRACE_OBJECT (pay,
+ "MTYPE: inter %d, mc %d, fil %d, mquant %d, mvd %d, cbp %d, tcoeff %d",
+ (mb->mtype & MTYPE_INTER) != 0, (mb->mtype & MTYPE_MC) != 0,
+ (mb->mtype & MTYPE_FIL) != 0, (mb->mtype & MTYPE_MQUANT) != 0,
+ (mb->mtype & MTYPE_MVD) != 0, (mb->mtype & MTYPE_CBP) != 0,
+ (mb->mtype & MTYPE_TCOEFF) != 0);
+
+ if (mb->mtype & MTYPE_MQUANT) {
+ GET_BITS (br, &mb->quant, MQUANT_LEN);
+ GST_TRACE_OBJECT (pay, "MQUANT: %d", mb->quant);
+ }
+
+ if (mb->mtype & MTYPE_MVD) {
+ gint i, pmv[2], mv[2];
+
+ if (mb->mba == 1 || mb->mba == 12 || mb->mba == 23 || mba_diff != 1 ||
+ (prev->mtype & MTYPE_INTER) == 0) {
+ pmv[0] = 0;
+ pmv[1] = 0;
+ } else {
+ pmv[0] = prev->mvx;
+ pmv[1] = prev->mvy;
+ }
+ for (i = 0; i < 2; i++) {
+ gint mvd1, mvd2;
+ if ((ret = decode_mvd (br, &mvd1, &mvd2)) != PARSE_OK)
+ return ret;
+ if (ABS (pmv[i] + mvd1) <= 15)
+ mv[i] = pmv[i] + mvd1;
+ else
+ mv[i] = pmv[i] + mvd2;
+ }
+ mb->mvx = mv[0];
+ mb->mvy = mv[1];
+ } else {
+ mb->mvx = 0;
+ mb->mvy = 0;
+ }
+
+ if (mb->mtype & MTYPE_CBP) {
+ if ((ret = decode_cbp (br, &cbp)) != PARSE_OK)
+ return ret;
+ }
+
+ /* Block layer */
+ if (mb->mtype & MTYPE_TCOEFF) {
+ gint block;
+ for (block = 0; block < 6; block++) {
+ if (cbp & (1 << (5 - block))) {
+ GST_TRACE_OBJECT (pay, "Decode TCOEFF for block %d", block);
+ if ((ret = decode_tcoeff (br, mb->mtype)) != PARSE_OK)
+ return ret;
+ }
+ }
+ }
+
+ mb->endpos = gst_bit_reader_get_pos (br);
+
+ return ret;
+}
+
+/* Parse macroblocks until the next MB that exceeds maxpos. At least one MB is
+ * included even if it exceeds maxpos. Returns endpos of last included MB. */
+static ParseReturn
+parse_mb_until_pos (GstRtpH261Pay * pay, GstBitReader * br, Gob * gob,
+ guint * endpos)
+{
+ ParseReturn ret;
+ gint count = 0;
+ gboolean stop = FALSE;
+ guint maxpos = *endpos;
+ Macroblock mb;
+
+ GST_LOG_OBJECT (pay, "Parse until pos %u, start at pos %u, gobn %d, mba %d",
+ maxpos, gst_bit_reader_get_pos (br), gob->gn, gob->last.mba);
+
+ while (!stop) {
+ ret = parse_mb (pay, br, &gob->last, &mb);
+
+ switch (ret) {
+ case PARSE_OK:
+ if (mb.endpos > maxpos && count > 0) {
+ /* Don't include current MB */
+ stop = TRUE;
+ } else {
+ /* Update to include current MB */
+ *endpos = mb.endpos;
+ gob->last = mb;
+ count++;
+ }
+ break;
+
+ case PARSE_END_OF_FRAME:
+ *endpos = gst_bit_reader_get_pos (br);
+ GST_DEBUG_OBJECT (pay, "End of frame at pos %u (last GOBN %d MBA %d)",
+ *endpos, gob->gn, gob->last.mba);
+ stop = TRUE;
+ break;
+
+ case PARSE_END_OF_GOB:
+ /* Note that a GOB can contain nothing, so we may get here on the first
+ * iteration. */
+ *endpos = gob->last.mba == 0 ?
+ gob->startpos : gst_bit_reader_get_pos (br);
+ GST_DEBUG_OBJECT (pay, "End of gob at pos %u (last GOBN %d MBA %d)",
+ *endpos, gob->gn, gob->last.mba);
+ stop = TRUE;
+ break;
+
+ case PARSE_END_OF_BUFFER:
+ case PARSE_ERROR:
+ GST_WARNING_OBJECT (pay, "Failed to parse stream (reason %d)", ret);
+ return ret;
+ break;
+
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ }
+ gob->last.gobn = gob->gn;
+
+ if (ret == PARSE_OK) {
+ GST_DEBUG_OBJECT (pay,
+ "Split GOBN %d after MBA %d (endpos %u, maxpos %u, nextpos %u)",
+ gob->gn, gob->last.mba, *endpos, maxpos, mb.endpos);
+ gst_bit_reader_set_pos (br, *endpos);
+ }
+
+ return ret;
+}
+
+static guint
+bitrange_to_bytes (guint first, guint last)
+{
+ return (GST_ROUND_UP_8 (last) - GST_ROUND_DOWN_8 (first)) / 8;
+}
+
+/* Find next 16-bit GOB start code (0x0001), which may not be byte aligned.
+ * Returns the bit offset of the first bit of GBSC. */
+static gssize
+find_gob (GstRtpH261Pay * pay, const guint8 * data, guint size, guint pos)
+{
+ gssize ret = -1;
+ guint offset;
+
+ GST_LOG_OBJECT (pay, "Search for GOB from pos %u", pos);
+
+ for (offset = pos / 8; offset < size - 1; offset++) {
+ if (data[offset] == 0x0) {
+ gint msb = g_bit_nth_msf (data[offset + 1], 8);
+ gint lsb = offset > 0 ? g_bit_nth_lsf (data[offset - 1], -1) : 0;
+ if (lsb == -1)
+ lsb = 8;
+ if (msb >= 0 && lsb >= msb) {
+ ret = offset * 8 - msb;
+ GST_LOG_OBJECT (pay, "Found GOB start code at bitpos %"
+ G_GSSIZE_FORMAT " (%02x %02x %02x)", ret,
+ offset > 0 ? data[offset - 1] : 0, data[offset], data[offset + 1]);
+ break;
+ }
+ }
+ }
+
+ return ret;
+}
+
+/* Scans after all GOB start codes and initializes the GOB structure with start
+ * and end positions. */
+static ParseReturn
+gst_rtp_h261_pay_init_gobs (GstRtpH261Pay * pay, Gob * gobs, gint num_gobs,
+ const guint8 * bits, gint len, guint pos)
+{
+ gint i;
+
+ for (i = 0; i < num_gobs; i++) {
+ gssize gobpos = find_gob (pay, bits, len, pos);
+ if (gobpos == -1) {
+ GST_WARNING_OBJECT (pay, "Found only %d of %d GOBs", i, num_gobs);
+ return PARSE_ERROR;
+ }
+ GST_LOG_OBJECT (pay, "Found GOB %d at pos %" G_GSSIZE_FORMAT, i, gobpos);
+ pos = gobpos + GBSC_LEN;
+
+ gobs[i].startpos = gobpos;
+ if (i > 0)
+ gobs[i - 1].endpos = gobpos;
+ }
+ gobs[num_gobs - 1].endpos = len * 8;
+
+ return PARSE_OK;
+}
+
+static GstFlowReturn
+gst_rtp_h261_pay_fragment_push (GstRtpH261Pay * pay, GstBuffer * buffer,
+ const guint8 * bits, guint start, guint end,
+ const Macroblock * last_mb_in_previous_packet, gboolean marker)
+{
+ GstBuffer *outbuf;
+ guint8 *payload;
+ GstRtpH261PayHeader *header;
+ gint nbytes;
+ const Macroblock *last = last_mb_in_previous_packet;
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+
+ nbytes = bitrange_to_bytes (start, end);
+
+ outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD (pay),
+ nbytes + GST_RTP_H261_PAYLOAD_HEADER_LEN, 0, 0);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ header = (GstRtpH261PayHeader *) payload;
+
+ memset (header, 0, GST_RTP_H261_PAYLOAD_HEADER_LEN);
+ header->v = 1;
+ header->sbit = start & 7;
+ header->ebit = (8 - (end & 7)) & 7;
+
+ if (last != NULL && last->mba != 0 && last->mba != 33) {
+ /* NOTE: MVD assumes that we're running on 2's complement architecture */
+ guint mbap = last->mba - 1;
+ header->gobn = last->gobn;
+ header->mbap1 = mbap >> 1;
+ header->mbap2 = mbap & 1;
+ header->quant = last->quant;
+ header->hmvd1 = last->mvx >> 3;
+ header->hmvd2 = last->mvx & 7;
+ header->vmvd = last->mvy;
+ }
+
+ memcpy (payload + GST_RTP_H261_PAYLOAD_HEADER_LEN,
+ bits + GST_ROUND_DOWN_8 (start) / 8, nbytes);
+
+ GST_BUFFER_TIMESTAMP (outbuf) = pay->timestamp;
+ gst_rtp_buffer_set_marker (&rtp, marker);
+ pay->offset = end & 7;
+
+ GST_DEBUG_OBJECT (pay,
+ "Push fragment, bytes %d, sbit %d, ebit %d, gobn %d, mbap %d, marker %d",
+ nbytes, header->sbit, header->ebit, last != NULL ? last->gobn : 0,
+ last != NULL ? MAX (last->mba - 1, 0) : 0, marker);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ gst_rtp_copy_video_meta (pay, outbuf, buffer);
+
+ return gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD_CAST (pay), outbuf);
+}
+
+static GstFlowReturn
+gst_rtp_h261_packetize_and_push (GstRtpH261Pay * pay, GstBuffer * buffer,
+ const guint8 * bits, gsize len)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBitReader br_;
+ GstBitReader *br = &br_;
+ guint max_payload_size =
+ gst_rtp_buffer_calc_payload_len (GST_RTP_BASE_PAYLOAD_MTU (pay) -
+ GST_RTP_H261_PAYLOAD_HEADER_LEN, 0, 0);
+ guint startpos;
+ gint num_gobs = 0;
+ Gob gobs[MAX_NUM_GOB];
+ Gob *gob;
+ Macroblock last_mb_in_previous_packet = { 0 };
+ gboolean marker;
+ ParseReturn result;
+
+ gst_bit_reader_init (br, bits, len);
+ gst_bit_reader_set_pos (br, pay->offset);
+ startpos = pay->offset;
+
+ if (parse_picture_header (pay, br, &num_gobs) < PARSE_OK) {
+ GST_WARNING_OBJECT (pay, "Failed to parse picture header");
+ goto beach;
+ }
+
+ if (gst_rtp_h261_pay_init_gobs (pay, gobs, num_gobs, bits, len,
+ gst_bit_reader_get_pos (br)) < PARSE_OK)
+ goto beach;
+
+ /* Split, create and push packets */
+ gob = gobs;
+ marker = FALSE;
+ while (marker == FALSE && ret == GST_FLOW_OK) {
+ guint endpos;
+
+ /* Check if there is wrap around because of extremely high MTU */
+ endpos = GST_ROUND_DOWN_8 (startpos) + max_payload_size * 8;
+ if (endpos < startpos)
+ endpos = G_MAXUINT;
+
+ GST_LOG_OBJECT (pay, "Next packet startpos %u maxpos %u", startpos, endpos);
+
+ /* Find the last GOB that does not completely fit in packet */
+ for (; gob < &gobs[num_gobs - 1]; gob++) {
+ if (bitrange_to_bytes (startpos, gob->endpos) > max_payload_size) {
+ GST_LOG_OBJECT (pay, "Split gob (start %u, end %u)",
+ gob->startpos, gob->endpos);
+ break;
+ }
+ }
+
+ if (startpos <= gob->startpos) {
+ /* Fast-forward until start of GOB */
+ gst_bit_reader_set_pos (br, gob->startpos);
+ if (parse_gob_header (pay, br, gob) < PARSE_OK) {
+ GST_WARNING_OBJECT (pay, "Failed to parse GOB header");
+ goto beach;
+ }
+ gob->last.mba = 0;
+ gob->last.gobn = gob->gn;
+ gob->last.quant = gob->gquant;
+ }
+
+ /* Parse MBs to find position where to split. Can only be done on after MB
+ * or at GOB boundary. */
+ result = parse_mb_until_pos (pay, br, gob, &endpos);
+ if (result < PARSE_OK)
+ goto beach;
+
+ marker = result == PARSE_END_OF_FRAME;
+ ret = gst_rtp_h261_pay_fragment_push (pay, buffer, bits, startpos, endpos,
+ &last_mb_in_previous_packet, marker);
+
+ last_mb_in_previous_packet = gob->last;
+ if (endpos == gob->endpos)
+ gob++;
+ startpos = endpos;
+ }
+
+beach:
+ return ret;
+}
+
+/* Shift buffer to packetize a continuous stream of bits (not bytes). Some
+ * payloaders/decoders are very picky about correct sbit/ebit for frames. */
+static guint8 *
+gst_rtp_h261_pay_shift_buffer (GstRtpH261Pay * pay, const guint8 * data,
+ gsize size, gint offset, gsize * newsize)
+{
+ /* In order to read variable length codes at the very end of the buffer
+ * without peeking into possibly unallocated data, we pad with extra 0's
+ * which will generate an invalid code at the end of the buffer. */
+ guint pad = 4;
+ gsize allocsize = size + pad;
+ guint8 *bits = g_malloc (allocsize);
+ gint i;
+
+ if (offset == 0) {
+ memcpy (bits, data, size);
+ *newsize = size;
+ } else if (offset > 0) {
+ bits[0] = 0;
+ for (i = 0; i < size; i++) {
+ bits[i] |= data[i] >> offset;
+ bits[i + 1] = data[i] << (8 - offset);
+ }
+ *newsize = size + 1;
+ } else {
+ gint shift = -offset;
+ for (i = 0; i < size - 1; i++)
+ bits[i] = (data[i] << shift) | (data[i + 1] >> (8 - shift));
+ bits[i] = data[i] << shift;
+ *newsize = size;
+ }
+ for (i = *newsize; i < allocsize; i++)
+ bits[i] = 0;
+
+ return bits;
+}
+
+static GstFlowReturn
+gst_rtp_h261_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstRtpH261Pay *pay = GST_RTP_H261_PAY (payload);
+ gsize len;
+ guint8 *bits;
+ gint psc_offset, shift;
+ GstMapInfo map;
+
+ GST_DEBUG_OBJECT (pay, "Handle buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (buffer));
+
+ pay->timestamp = GST_BUFFER_TIMESTAMP (buffer);
+
+ if (!gst_buffer_map (buffer, &map, GST_MAP_READ) || !map.data) {
+ GST_WARNING_OBJECT (pay, "Failed to map buffer");
+ return GST_FLOW_ERROR;
+ }
+
+ psc_offset = find_picture_header_offset (map.data, map.size);
+ if (psc_offset < 0) {
+ GST_WARNING_OBJECT (pay, "Failed to find picture header offset");
+ goto beach;
+ } else {
+ GST_DEBUG_OBJECT (pay, "Picture header offset: %d", psc_offset);
+ }
+
+ shift = pay->offset - psc_offset;
+ bits = gst_rtp_h261_pay_shift_buffer (pay, map.data, map.size, shift, &len);
+ ret = gst_rtp_h261_packetize_and_push (pay, buffer, bits, len);
+ g_free (bits);
+
+beach:
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ return ret;
+}
+
+
+static gboolean
+gst_rtp_h261_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ gboolean res;
+
+ gst_rtp_base_payload_set_options (payload, "video",
+ payload->pt != GST_RTP_PAYLOAD_H261, "H261", 90000);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
+
+ return res;
+}
+
+static void
+gst_rtp_h261_pay_init (GstRtpH261Pay * pay)
+{
+ GstRTPBasePayload *payload = GST_RTP_BASE_PAYLOAD (pay);
+ payload->pt = GST_RTP_PAYLOAD_H261;
+ pay->offset = 0;
+}
+
+static void
+gst_rtp_h261_pay_class_init (GstRtpH261PayClass * klass)
+{
+ GstElementClass *element_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ element_class = GST_ELEMENT_CLASS (klass);
+ gstrtpbasepayload_class = GST_RTP_BASE_PAYLOAD_CLASS (klass);
+
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_rtp_h261_pay_src_template);
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_rtp_h261_pay_sink_template);
+
+ gst_element_class_set_static_metadata (element_class,
+ "RTP H261 packet payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encodes H261 video in RTP packets (RFC 4587)",
+ "Stian Selnes <stian@pexip.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_h261_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_h261_pay_handle_buffer;
+
+ GST_DEBUG_CATEGORY_INIT (rtph261pay_debug, "rtph261pay", 0,
+ "H261 RTP Payloader");
+}
diff --git a/gst/rtp/gstrtph261pay.h b/gst/rtp/gstrtph261pay.h
new file mode 100644
index 0000000000..1052d01be3
--- /dev/null
+++ b/gst/rtp/gstrtph261pay.h
@@ -0,0 +1,98 @@
+/* GStreamer
+ * Copyright (C) <2014> Stian Selnes <stian@pexip.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ *
+ * Author: Dejan Sakelsak sahel@kiberpipa.org
+ */
+
+#ifndef __GST_RTP_H261_PAY_H__
+#define __GST_RTP_H261_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+#include <gst/base/gstadapter.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_RTP_H261_PAY \
+ (gst_rtp_h261_pay_get_type())
+#define GST_RTP_H261_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_H261_PAY,GstRtpH261Pay))
+#define GST_RTP_H261_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_H261_PAY,GstRtpH261PayClass))
+#define GST_IS_RTP_H261_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_H261_PAY))
+#define GST_IS_RTP_H261_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_H261_PAY))
+typedef struct _GstRtpH261PayClass GstRtpH261PayClass;
+typedef struct _GstRtpH261Pay GstRtpH261Pay;
+
+struct _GstRtpH261Pay
+{
+ GstRTPBasePayload payload;
+
+ GstAdapter *adapter;
+ gint offset;
+ GstClockTime timestamp;
+};
+
+struct _GstRtpH261PayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+typedef struct _GstRtpH261PayHeader
+{
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ unsigned int v:1; /* Motion vector flag */
+ unsigned int i:1; /* Intra encoded data */
+ unsigned int ebit:3; /* End position */
+ unsigned int sbit:3; /* Start position */
+
+ unsigned int mbap1:4; /* MB address predictor - part1 */
+ unsigned int gobn:4; /* GOB number */
+
+ unsigned int hmvd1:2; /* Horizontal motion vector data - part1 */
+ unsigned int quant:5; /* Quantizer */
+ unsigned int mbap2:1; /* MB address predictor - part2 */
+
+ unsigned int vmvd:5; /* Horizontal motion vector data - part1 */
+ unsigned int hmvd2:3; /* Vertical motion vector data */
+#elif G_BYTE_ORDER == G_BIG_ENDIAN
+ unsigned int sbit:3; /* Start position */
+ unsigned int ebit:3; /* End position */
+ unsigned int i:1; /* Intra encoded data */
+ unsigned int v:1; /* Motion vector flag */
+
+ unsigned int gobn:4; /* GOB number */
+ unsigned int mbap1:4; /* MB address predictor - part1 */
+
+ unsigned int mbap2:1; /* MB address predictor - part2 */
+ unsigned int quant:5; /* Quantizer */
+ unsigned int hmvd1:2; /* Horizontal motion vector data - part1 */
+
+ unsigned int hmvd2:3; /* Vertical motion vector data */
+ unsigned int vmvd:5; /* Horizontal motion vector data - part1 */
+#else
+#error "G_BYTE_ORDER should be big or little endian."
+#endif
+} GstRtpH261PayHeader;
+#define GST_RTP_H261_PAYLOAD_HEADER_LEN 4
+
+GType gst_rtp_h261_pay_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_RTP_H261_PAY_H__ */
diff --git a/gst/rtp/gstrtph263depay.c b/gst/rtp/gstrtph263depay.c
new file mode 100644
index 0000000000..f6b41a5bc9
--- /dev/null
+++ b/gst/rtp/gstrtph263depay.c
@@ -0,0 +1,443 @@
+/* GStreamer
+ *
+ * Copyright 2007 Nokia Corporation
+ * Copyright 2007 Collabora Ltd,
+ * @author: Philippe Kalaf <philippe.kalaf@collabora.co.uk>
+ *
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ * <2007> Edward Hervey <bilboed@bilboed.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+#include "gstrtpelements.h"
+#include "gstrtph263depay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtph263depay_debug);
+#define GST_CAT_DEFAULT (rtph263depay_debug)
+
+#define GST_RFC2190A_HEADER_LEN 4
+#define GST_RFC2190B_HEADER_LEN 8
+#define GST_RFC2190C_HEADER_LEN 12
+
+static GstStaticPadTemplate gst_rtp_h263_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-h263, "
+ "variant = (string) \"itu\", " "h263version = (string) \"h263\"")
+ );
+
+static GstStaticPadTemplate gst_rtp_h263_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_H263_STRING ", "
+ "clock-rate = (int) 90000; "
+ /* optional SDP attribute:
+ * "a-framesize = (string) \"1234-1234\", "
+ */
+ "application/x-rtp, "
+ "media = (string) \"video\", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"H263\""
+ /* optional SDP attribute:
+ * "a-framesize = (string) \"1234-1234\", "
+ */
+ )
+ );
+
+#define gst_rtp_h263_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpH263Depay, gst_rtp_h263_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph263depay, "rtph263depay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_H263_DEPAY, rtp_element_init (plugin));
+
+static void gst_rtp_h263_depay_finalize (GObject * object);
+
+static GstStateChangeReturn gst_rtp_h263_depay_change_state (GstElement *
+ element, GstStateChange transition);
+
+static GstBuffer *gst_rtp_h263_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+gboolean gst_rtp_h263_depay_setcaps (GstRTPBaseDepayload * filter,
+ GstCaps * caps);
+
+static void
+gst_rtp_h263_depay_class_init (GstRtpH263DepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtph263depay_debug, "rtph263depay", 0,
+ "H263 Video RTP Depayloader");
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_h263_depay_finalize;
+
+ gstelement_class->change_state = gst_rtp_h263_depay_change_state;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_h263_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_h263_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP H263 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts H263 video from RTP packets (RFC 2190)",
+ "Philippe Kalaf <philippe.kalaf@collabora.co.uk>, "
+ "Edward Hervey <bilboed@bilboed.com>");
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_h263_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_h263_depay_setcaps;
+}
+
+static void
+gst_rtp_h263_depay_init (GstRtpH263Depay * rtph263depay)
+{
+ rtph263depay->adapter = gst_adapter_new ();
+
+ rtph263depay->offset = 0;
+ rtph263depay->leftover = 0;
+}
+
+static void
+gst_rtp_h263_depay_finalize (GObject * object)
+{
+ GstRtpH263Depay *rtph263depay;
+
+ rtph263depay = GST_RTP_H263_DEPAY (object);
+
+ g_object_unref (rtph263depay->adapter);
+ rtph263depay->adapter = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_rtp_h263_parse_framesize (GstRTPBaseDepayload * filter,
+ const gchar * media_attr, GstCaps * srccaps)
+{
+ gchar *dimension, *endptr;
+ gint width, height;
+ GstStructure *d;
+
+ width = g_ascii_strtoull (media_attr, &endptr, 10);
+ if (width <= 0) {
+ GST_ERROR_OBJECT (filter,
+ "Framesize media attribute width out of valid range");
+ return FALSE;
+ } else if (*endptr != '-') {
+ GST_ERROR_OBJECT (filter,
+ "Framesize media attribute has invalid dimension separator");
+ return FALSE;
+ }
+
+ dimension = endptr + 1;
+ height = g_ascii_strtoull (dimension, &endptr, 10);
+ if (height <= 0) {
+ GST_ERROR_OBJECT (filter,
+ "Framesize media attribute height out of valid range");
+ return FALSE;
+ } else if (*endptr != '\0') {
+ GST_ERROR_OBJECT (filter,
+ "Framesize media attribute unexpectedly has trailing characters");
+ return FALSE;
+ }
+
+ d = gst_caps_get_structure (srccaps, 0);
+ gst_structure_set (d, "width", G_TYPE_INT, width, "height", G_TYPE_INT,
+ height, NULL);
+
+ return TRUE;
+}
+
+gboolean
+gst_rtp_h263_depay_setcaps (GstRTPBaseDepayload * filter, GstCaps * caps)
+{
+ GstCaps *srccaps;
+ GstStructure *structure = gst_caps_get_structure (caps, 0);
+ gint clock_rate;
+ const gchar *framesize;
+
+ srccaps = gst_caps_new_simple ("video/x-h263",
+ "variant", G_TYPE_STRING, "itu",
+ "h263version", G_TYPE_STRING, "h263", NULL);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 90000; /* default */
+ filter->clock_rate = clock_rate;
+
+ framesize = gst_structure_get_string (structure, "a-framesize");
+ if (framesize != NULL) {
+ if (!gst_rtp_h263_parse_framesize (filter, framesize, srccaps)) {
+ return FALSE;
+ }
+ }
+
+ gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (filter), srccaps);
+ gst_caps_unref (srccaps);
+
+ return TRUE;
+}
+
+static GstBuffer *
+gst_rtp_h263_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpH263Depay *rtph263depay;
+ GstBuffer *outbuf;
+ gint payload_len;
+ guint8 *payload;
+ guint header_len;
+ guint SBIT, EBIT;
+ gboolean F, P, M;
+ gboolean I;
+
+ rtph263depay = GST_RTP_H263_DEPAY (depayload);
+
+ /* flush remaining data on discont */
+ if (GST_BUFFER_IS_DISCONT (rtp->buffer)) {
+ GST_LOG_OBJECT (depayload, "Discont buffer, flushing adapter");
+ gst_adapter_clear (rtph263depay->adapter);
+ rtph263depay->offset = 0;
+ rtph263depay->leftover = 0;
+ rtph263depay->start = FALSE;
+ }
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+ payload = gst_rtp_buffer_get_payload (rtp);
+
+ M = gst_rtp_buffer_get_marker (rtp);
+
+ if (payload_len < 1)
+ goto too_small;
+
+ /* Let's see what mode we are using */
+ F = (payload[0] & 0x80) == 0x80;
+ P = (payload[0] & 0x40) == 0x40;
+
+ /* Bit shifting */
+ SBIT = (payload[0] & 0x38) >> 3;
+ EBIT = (payload[0] & 0x07);
+
+ /* Figure out header length and I-flag */
+ if (F == 0) {
+ /* F == 0 and P == 0 or 1
+ * mode A */
+ header_len = GST_RFC2190A_HEADER_LEN;
+ GST_LOG ("Mode A");
+
+ /* 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |F|P|SBIT |EBIT | SRC |I|U|S|A|R |DBQ| TRB | TR |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ if (payload_len <= header_len)
+ goto too_small;
+ I = (payload[1] & 0x10) == 0x10;
+ } else {
+ if (P == 0) {
+ /* F == 1 and P == 0
+ * mode B */
+ header_len = GST_RFC2190B_HEADER_LEN;
+ GST_LOG ("Mode B");
+
+ /* 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |F|P|SBIT |EBIT | SRC | QUANT | GOBN | MBA |R |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |I|U|S|A| HMV1 | VMV1 | HMV2 | VMV2 |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ if (payload_len <= header_len)
+ goto too_small;
+ I = (payload[4] & 0x80) == 0x80;
+ } else {
+ /* F == 1 and P == 1
+ * mode C */
+ header_len = GST_RFC2190C_HEADER_LEN;
+ GST_LOG ("Mode C");
+
+ /* 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |F|P|SBIT |EBIT | SRC | QUANT | GOBN | MBA |R |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |I|U|S|A| HMV1 | VMV1 | HMV2 | VMV2 |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | RR |DBQ| TRB | TR |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ if (payload_len <= header_len)
+ goto too_small;
+ I = (payload[4] & 0x80) == 0x80;
+ }
+ }
+
+ GST_LOG ("F/P/M/I : %d/%d/%d/%d", F, P, M, I);
+ GST_LOG ("SBIT : %d , EBIT : %d", SBIT, EBIT);
+ GST_LOG ("payload_len : %d, header_len : %d , leftover : 0x%x",
+ payload_len, header_len, rtph263depay->leftover);
+
+ /* skip header */
+ payload += header_len;
+ payload_len -= header_len;
+
+ if (!rtph263depay->start) {
+ /* only mode A should be used when there is a picture start code, but
+ * buggy payloaders may send mode B/C in start of frame */
+ if (payload_len > 4 && (GST_READ_UINT32_BE (payload) >> 10 == 0x20)) {
+ GST_DEBUG ("Mode %c with PSC => frame start", "ABC"[F + P]);
+ rtph263depay->start = TRUE;
+ if ((! !(payload[4] & 0x02)) != I) {
+ GST_DEBUG ("Wrong Picture Coding Type Flag in rtp header");
+ I = !I;
+ }
+ rtph263depay->psc_I = I;
+ } else {
+ GST_DEBUG ("no frame start yet, skipping payload");
+ goto skip;
+ }
+ }
+
+ /* only trust I info from Mode A starting packet
+ * from buggy payloaders or hw */
+ I = rtph263depay->psc_I;
+
+ if (SBIT) {
+ /* take the leftover and merge it at the beginning, FIXME make the buffer
+ * data writable. */
+ GST_LOG ("payload[0] : 0x%x", payload[0]);
+ payload[0] &= 0xFF >> SBIT;
+ GST_LOG ("payload[0] : 0x%x", payload[0]);
+ payload[0] |= rtph263depay->leftover;
+ GST_LOG ("payload[0] : 0x%x", payload[0]);
+ rtph263depay->leftover = 0;
+ rtph263depay->offset = 0;
+ }
+
+ if (!EBIT) {
+ GstBuffer *tmp;
+
+ /* Take the entire buffer */
+ tmp = gst_rtp_buffer_get_payload_subbuffer (rtp, header_len, payload_len);
+ gst_adapter_push (rtph263depay->adapter, tmp);
+ } else {
+ GstBuffer *tmp;
+
+ /* Take the entire buffer except for the last byte */
+ tmp = gst_rtp_buffer_get_payload_subbuffer (rtp, header_len,
+ payload_len - 1);
+ gst_adapter_push (rtph263depay->adapter, tmp);
+
+ /* Put the last byte into the leftover */
+ GST_DEBUG ("payload[payload_len - 1] : 0x%x", payload[payload_len - 1]);
+ GST_DEBUG ("mask : 0x%x", 0xFF << EBIT);
+ rtph263depay->leftover = (payload[payload_len - 1] >> EBIT) << EBIT;
+ rtph263depay->offset = 1;
+ GST_DEBUG ("leftover : 0x%x", rtph263depay->leftover);
+ }
+
+skip:
+ if (M) {
+ if (rtph263depay->start) {
+ /* frame is completed */
+ guint avail;
+
+ if (rtph263depay->offset) {
+ /* push in the leftover */
+ GstBuffer *buf = gst_buffer_new_and_alloc (1);
+
+ GST_DEBUG ("Pushing leftover in adapter");
+ gst_buffer_fill (buf, 0, &rtph263depay->leftover, 1);
+ gst_adapter_push (rtph263depay->adapter, buf);
+ }
+
+ avail = gst_adapter_available (rtph263depay->adapter);
+ outbuf = gst_adapter_take_buffer (rtph263depay->adapter, avail);
+
+ if (I)
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ GST_DEBUG ("Pushing out a buffer of %d bytes", avail);
+
+ gst_rtp_drop_non_video_meta (rtph263depay, outbuf);
+
+ gst_rtp_base_depayload_push (depayload, outbuf);
+ rtph263depay->offset = 0;
+ rtph263depay->leftover = 0;
+ rtph263depay->start = FALSE;
+ } else {
+ rtph263depay->start = TRUE;
+ }
+ }
+
+ return NULL;
+
+too_small:
+ {
+ GST_ELEMENT_WARNING (rtph263depay, STREAM, DECODE,
+ ("Packet payload was too small"), (NULL));
+ return NULL;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_h263_depay_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstRtpH263Depay *rtph263depay;
+ GstStateChangeReturn ret;
+
+ rtph263depay = GST_RTP_H263_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_adapter_clear (rtph263depay->adapter);
+ rtph263depay->start = TRUE;
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtph263depay.h b/gst/rtp/gstrtph263depay.h
new file mode 100644
index 0000000000..485fc9dfaa
--- /dev/null
+++ b/gst/rtp/gstrtph263depay.h
@@ -0,0 +1,64 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_H263_DEPAY_H__
+#define __GST_RTP_H263_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_H263_DEPAY \
+ (gst_rtp_h263_depay_get_type())
+#define GST_RTP_H263_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_H263_DEPAY,GstRtpH263Depay))
+#define GST_RTP_H263_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_H263_DEPAY,GstRtpH263DepayClass))
+#define GST_IS_RTP_H263_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_H263_DEPAY))
+#define GST_IS_RTP_H263_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_H263_DEPAY))
+
+typedef struct _GstRtpH263Depay GstRtpH263Depay;
+typedef struct _GstRtpH263DepayClass GstRtpH263DepayClass;
+
+struct _GstRtpH263Depay
+{
+ GstRTPBaseDepayload depayload;
+
+ guint8 offset; /* offset to apply to next payload */
+ guint8 leftover; /* leftover from previous payload (if offset != 0) */
+ gboolean psc_I; /* Picture-Coding-Type == I from Picture Start Code packet */
+ GstAdapter *adapter;
+ gboolean start;
+};
+
+struct _GstRtpH263DepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_h263_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_H263_DEPAY_H__ */
+
diff --git a/gst/rtp/gstrtph263pay.c b/gst/rtp/gstrtph263pay.c
new file mode 100644
index 0000000000..b7239f49ba
--- /dev/null
+++ b/gst/rtp/gstrtph263pay.c
@@ -0,0 +1,1870 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ * Copyright (C) <2008> Dejan Sakelsak <dejan.sakelsak@marand.si>
+ * Copyright (C) <2009> Janin Kolenc <janin.kolenc@marand.si>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+#include <math.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+
+#include "gstrtpelements.h"
+#include "gstrtph263pay.h"
+#include "gstrtputils.h"
+
+typedef enum
+{
+ GST_H263_FRAME_TYPE_I = 0,
+ GST_H263_FRAME_TYPE_P = 1,
+ GST_H263_FRAME_TYPE_PB = 2
+} GstRtpH263PayFrameType;
+
+typedef enum
+{
+ GST_RTP_H263_PAYLOAD_PICTURE_FORMAT_RES1 = 0,
+ GST_RTP_H263_PAYLOAD_PICTURE_FORMAT_SQCIF = 1,
+ GST_RTP_H263_PAYLOAD_PICTURE_FORMAT_QCIF = 2,
+ GST_RTP_H263_PAYLOAD_PICTURE_FORMAT_CIF = 3,
+ GST_RTP_H263_PAYLOAD_PICTURE_FORMAT_4CIF = 4,
+ GST_RTP_H263_PAYLOAD_PICTURE_FORMAT_16CIF = 5,
+ GST_RTP_H263_PAYLOAD_PICTURE_FORMAT_RES2 = 6,
+ GST_H263_PAYLOAD_PICTURE_FORMAT_PLUS = 7
+} GstRtpH263PayPictureFormat;
+
+static const guint format_props[8][2] = { {254, 254},
+{6, 8},
+{9, 11},
+{18, 22},
+{18, 88},
+{18, 352},
+{254, 254},
+{255, 255}
+};
+
+/*
+ * I-frame MCBPC table: code, mask, nbits, cb, cr, mb type -> 10 = undefined (because we have guint)
+ */
+#define MCBPC_I_LEN 9
+#define MCBPC_I_WID 6
+static const guint32 mcbpc_I[9][6] = {
+ {0x8000, 0x8000, 1, 0, 0, 3},
+ {0x2000, 0xe000, 3, 0, 1, 3},
+ {0x4000, 0xe000, 3, 1, 0, 3},
+ {0x6000, 0xe000, 3, 1, 1, 3},
+ {0x1000, 0xf000, 4, 0, 0, 4},
+ {0x0400, 0xfc00, 6, 0, 1, 4},
+ {0x0800, 0xfc00, 6, 1, 0, 4},
+ {0x0c00, 0xfc00, 6, 1, 1, 4},
+ {0x0080, 0xff80, 9, 10, 10, 10}
+};
+
+/*
+ * P-frame MCBPC table: code, mask, nbits, cb, cr, mb type -> 10 = undefined (because we have guint)
+ */
+#define MCBPC_P_LEN 21
+#define MCBPC_P_WID 6
+static const guint16 mcbpc_P[21][6] = {
+ {0x8000, 0x8000, 1, 0, 0, 0},
+ {0x3000, 0xf000, 4, 0, 1, 0},
+ {0x2000, 0xf000, 4, 1, 0, 0},
+ {0x1400, 0xfc00, 6, 1, 1, 0},
+ {0x6000, 0xe000, 3, 0, 0, 1},
+ {0x0e00, 0xfe00, 7, 0, 1, 1},
+ {0x0c00, 0xfe00, 7, 1, 0, 1},
+ {0x0280, 0xff80, 9, 1, 1, 1},
+ {0x4000, 0xe000, 3, 0, 0, 2},
+ {0x0a00, 0xfe00, 7, 0, 1, 2},
+ {0x0800, 0xfe00, 7, 1, 0, 2},
+ {0x0500, 0xff00, 8, 1, 1, 2},
+ {0x1800, 0xf800, 5, 0, 0, 3},
+ {0x0400, 0xff00, 8, 0, 1, 3},
+ {0x0300, 0xff00, 8, 1, 0, 3},
+ {0x0600, 0xfe00, 7, 1, 1, 3},
+ {0x1000, 0xfc00, 6, 0, 0, 4},
+ {0x0200, 0xff80, 9, 0, 1, 4},
+ {0x0180, 0xff80, 9, 1, 0, 4},
+ {0x0100, 0xff80, 9, 1, 1, 4},
+ {0x0080, 0xff80, 9, 10, 10, 10}
+};
+
+/*
+ * I-frame CBPY (code, mask, nbits, Y0, Y1, Y2, Y3)
+ */
+#define CBPY_LEN 16
+#define CBPY_WID 7
+static const guint8 cbpy_I[16][7] = {
+ {0x30, 0xf0, 4, 0, 0, 0, 0},
+ {0x28, 0xf8, 5, 0, 0, 0, 1},
+ {0x20, 0xf8, 5, 0, 0, 1, 0},
+ {0x90, 0xf0, 4, 0, 0, 1, 1},
+ {0x18, 0xf8, 5, 0, 1, 0, 0},
+ {0x70, 0xf0, 4, 0, 1, 0, 1},
+ {0x08, 0xfc, 6, 0, 1, 1, 0},
+ {0xb0, 0xf0, 4, 0, 1, 1, 1},
+ {0x10, 0xf8, 5, 1, 0, 0, 0},
+ {0x0c, 0xfc, 6, 1, 0, 0, 1},
+ {0x50, 0xf0, 4, 1, 0, 1, 0},
+ {0xa0, 0xf0, 4, 1, 0, 1, 1},
+ {0x40, 0xf0, 4, 1, 1, 0, 0},
+ {0x80, 0xf0, 4, 1, 1, 0, 1},
+ {0x60, 0xf0, 4, 1, 1, 1, 0},
+ {0xc0, 0xc0, 2, 1, 1, 1, 1}
+};
+
+/*
+ * P-frame CBPY (code, mask, nbits, Y0, Y1, Y2, Y3)
+ */
+static const guint8 cbpy_P[16][7] = {
+ {0x30, 0xf0, 4, 1, 1, 1, 1},
+ {0x28, 0xf8, 5, 1, 1, 1, 0},
+ {0x20, 0xf8, 5, 1, 1, 0, 1},
+ {0x90, 0xf0, 4, 1, 1, 0, 0},
+ {0x18, 0xf8, 5, 1, 0, 1, 1},
+ {0x70, 0xf0, 4, 1, 0, 1, 0},
+ {0x08, 0xfc, 6, 1, 0, 0, 1},
+ {0xb0, 0xf0, 4, 1, 0, 0, 0},
+ {0x10, 0xf8, 5, 0, 1, 1, 1},
+ {0x0c, 0xfc, 6, 0, 1, 1, 0},
+ {0x50, 0xf0, 4, 0, 1, 0, 1},
+ {0xa0, 0xf0, 4, 0, 1, 0, 0},
+ {0x40, 0xf0, 4, 0, 0, 1, 1},
+ {0x80, 0xf0, 4, 0, 0, 1, 0},
+ {0x60, 0xf0, 4, 0, 0, 0, 1},
+ {0xc0, 0xc0, 2, 0, 0, 0, 0}
+};
+
+/*
+ * Block TCOEF table (code, mask, nbits, LAST, RUN, LEVEL)
+ */
+#define TCOEF_LEN 103
+#define TCOEF_WID 6
+static const guint16 tcoef[103][6] = {
+ {0x8000, 0xc000, 3, 0, 0, 1},
+ {0xf000, 0xf000, 5, 0, 0, 2},
+ {0x5400, 0xfc00, 7, 0, 0, 3},
+ {0x2e00, 0xfe00, 8, 0, 0, 4},
+ {0x1f00, 0xff00, 9, 0, 0, 5},
+ {0x1280, 0xff80, 10, 0, 0, 6},
+ {0x1200, 0xff80, 10, 0, 0, 7},
+ {0x0840, 0xffc0, 11, 0, 0, 8},
+ {0x0800, 0xffc0, 11, 0, 0, 9},
+ {0x00e0, 0xffe0, 12, 0, 0, 10}, //10
+ {0x00c0, 0xffe0, 12, 0, 0, 11},
+ {0x0400, 0xffe0, 12, 0, 0, 12},
+ {0xc000, 0xe000, 4, 0, 1, 1},
+ {0x5000, 0xfc00, 7, 0, 1, 2},
+ {0x1e00, 0xff00, 9, 0, 1, 3},
+ {0x03c0, 0xffc0, 11, 0, 1, 4},
+ {0x0420, 0xffe0, 12, 0, 1, 5},
+ {0x0500, 0xfff0, 13, 0, 1, 6},
+ {0xe000, 0xf000, 5, 0, 2, 1},
+ {0x1d00, 0xff00, 9, 0, 2, 2}, //20
+ {0x0380, 0xffc0, 11, 0, 2, 3},
+ {0x0510, 0xfff0, 13, 0, 2, 4},
+ {0x6800, 0xf800, 6, 0, 3, 1},
+ {0x1180, 0xff80, 10, 0, 3, 2},
+ {0x0340, 0xffc0, 11, 0, 3, 3},
+ {0x6000, 0xf800, 6, 0, 4, 1},
+ {0x1100, 0xff80, 10, 0, 4, 2},
+ {0x0520, 0xfff0, 13, 0, 4, 3},
+ {0x5800, 0xf800, 6, 0, 5, 1},
+ {0x0300, 0xffc0, 11, 0, 5, 2}, // 30
+ {0x0530, 0xfff0, 13, 0, 5, 3},
+ {0x4c00, 0xfc00, 7, 0, 6, 1},
+ {0x02c0, 0xffc0, 11, 0, 6, 2},
+ {0x0540, 0xfff0, 13, 0, 6, 3},
+ {0x4800, 0xfc00, 7, 0, 7, 1},
+ {0x0280, 0xffc0, 11, 0, 7, 2},
+ {0x4400, 0xfc00, 7, 0, 8, 1},
+ {0x0240, 0xffc0, 11, 0, 8, 2},
+ {0x4000, 0xfc00, 7, 0, 9, 1},
+ {0x0200, 0xffc0, 11, 0, 9, 2}, // 40
+ {0x2c00, 0xfe00, 8, 0, 10, 1},
+ {0x0550, 0xfff0, 13, 0, 10, 2},
+ {0x2a00, 0xfe00, 8, 0, 11, 1},
+ {0x2800, 0xfe00, 8, 0, 12, 1},
+ {0x1c00, 0xff00, 9, 0, 13, 1},
+ {0x1b00, 0xff00, 9, 0, 14, 1},
+ {0x1080, 0xff80, 10, 0, 15, 1},
+ {0x1000, 0xff80, 10, 0, 16, 1},
+ {0x0f80, 0xff80, 10, 0, 17, 1},
+ {0x0f00, 0xff80, 10, 0, 18, 1}, // 50
+ {0x0e80, 0xff80, 10, 0, 19, 1},
+ {0x0e00, 0xff80, 10, 0, 20, 1},
+ {0x0d80, 0xff80, 10, 0, 21, 1},
+ {0x0d00, 0xff80, 10, 0, 22, 1},
+ {0x0440, 0xffe0, 12, 0, 23, 1},
+ {0x0460, 0xffe0, 12, 0, 24, 1},
+ {0x0560, 0xfff0, 13, 0, 25, 1},
+ {0x0570, 0xfff0, 13, 0, 26, 1},
+ {0x7000, 0xf000, 5, 1, 0, 1},
+ {0x0c80, 0xff80, 10, 1, 0, 2}, // 60
+ {0x00a0, 0xffe0, 12, 1, 0, 3},
+ {0x3c00, 0xfc00, 7, 1, 1, 1},
+ {0x0080, 0xffe0, 12, 1, 1, 2},
+ {0x3800, 0xfc00, 7, 1, 2, 1},
+ {0x3400, 0xfc00, 7, 1, 3, 1},
+ {0x3000, 0xfc00, 7, 1, 4, 1},
+ {0x2600, 0xfe00, 8, 1, 5, 1},
+ {0x2400, 0xfe00, 8, 1, 6, 1},
+ {0x2200, 0xfe00, 8, 1, 7, 1},
+ {0x2000, 0xfe00, 8, 1, 8, 1}, // 70
+ {0x1a00, 0xff00, 9, 1, 9, 1},
+ {0x1900, 0xff00, 9, 1, 10, 1},
+ {0x1800, 0xff00, 9, 1, 11, 1},
+ {0x1700, 0xff00, 9, 1, 12, 1},
+ {0x1600, 0xff00, 9, 1, 13, 1},
+ {0x1500, 0xff00, 9, 1, 14, 1},
+ {0x1400, 0xff00, 9, 1, 15, 1},
+ {0x1300, 0xff00, 9, 1, 16, 1},
+ {0x0c00, 0xff80, 10, 1, 17, 1},
+ {0x0b80, 0xff80, 10, 1, 18, 1}, // 80
+ {0x0b00, 0xff80, 10, 1, 19, 1},
+ {0x0a80, 0xff80, 10, 1, 20, 1},
+ {0x0a00, 0xff80, 10, 1, 21, 1},
+ {0x0980, 0xff80, 10, 1, 22, 1},
+ {0x0900, 0xff80, 10, 1, 23, 1},
+ {0x0880, 0xff80, 10, 1, 24, 1},
+ {0x01c0, 0xffc0, 11, 1, 25, 1},
+ {0x0180, 0xffc0, 11, 1, 26, 1},
+ {0x0140, 0xffc0, 11, 1, 27, 1},
+ {0x0100, 0xffc0, 11, 1, 28, 1}, // 90
+ {0x0480, 0xffe0, 12, 1, 29, 1},
+ {0x04a0, 0xffe0, 12, 1, 30, 1},
+ {0x04c0, 0xffe0, 12, 1, 31, 1},
+ {0x04e0, 0xffe0, 12, 1, 32, 1},
+ {0x0580, 0xfff0, 13, 1, 33, 1},
+ {0x0590, 0xfff0, 13, 1, 34, 1},
+ {0x05a0, 0xfff0, 13, 1, 35, 1},
+ {0x05b0, 0xfff0, 13, 1, 36, 1},
+ {0x05c0, 0xfff0, 13, 1, 37, 1},
+ {0x05d0, 0xfff0, 13, 1, 38, 1}, // 100
+ {0x05e0, 0xfff0, 13, 1, 39, 1},
+ {0x05f0, 0xfff0, 13, 1, 40, 1},
+ {0x0600, 0xfe00, 7, 0, 0xffff, 0xffff}
+};
+
+/*
+ * Motion vector code table (Code, mask, nbits, vector (halfpixel, two's complement), diff (halfpixel, two's complement))
+ */
+#define MVD_LEN 64
+#define MVD_WID 5
+static const guint16 mvd[64][5] = {
+ {0x0028, 0xfff8, 13, 0x0060, 0x0020},
+ {0x0038, 0xfff8, 13, 0x0061, 0x0021},
+ {0x0050, 0xfff0, 12, 0x0062, 0x0022},
+ {0x0070, 0xfff0, 12, 0x0063, 0x0023},
+ {0x0090, 0xfff0, 12, 0x0064, 0x0024},
+ {0x00b0, 0xfff0, 12, 0x0065, 0x0025},
+ {0x00d0, 0xfff0, 12, 0x0066, 0x0026},
+ {0x00f0, 0xfff0, 12, 0x0067, 0x0027},
+ {0x0120, 0xffe0, 11, 0x0068, 0x0028},
+ {0x0160, 0xffe0, 11, 0x0069, 0x0029},
+ {0x01a0, 0xffe0, 11, 0x006a, 0x002a},
+ {0x01e0, 0xffe0, 11, 0x006b, 0x002b},
+ {0x0220, 0xffe0, 11, 0x006c, 0x002c},
+ {0x0260, 0xffe0, 11, 0x006d, 0x002d},
+ {0x02a0, 0xffe0, 11, 0x006e, 0x002e},
+ {0x02e0, 0xffe0, 11, 0x006f, 0x002f},
+ {0x0320, 0xffe0, 11, 0x0070, 0x0030},
+ {0x0360, 0xffe0, 11, 0x0071, 0x0031},
+ {0x03a0, 0xffe0, 11, 0x0072, 0x0032},
+ {0x03e0, 0xffe0, 11, 0x0073, 0x0033},
+ {0x0420, 0xffe0, 11, 0x0074, 0x0034},
+ {0x0460, 0xffe0, 11, 0x0075, 0x0035},
+ {0x04c0, 0xffc0, 10, 0x0076, 0x0036},
+ {0x0540, 0xffc0, 10, 0x0077, 0x0037},
+ {0x05c0, 0xffc0, 10, 0x0078, 0x0038},
+ {0x0700, 0xff00, 8, 0x0079, 0x0039},
+ {0x0900, 0xff00, 8, 0x007a, 0x003a},
+ {0x0b00, 0xff00, 8, 0x007b, 0x003b},
+ {0x0e00, 0xfe00, 7, 0x007c, 0x003c},
+ {0x1800, 0xf800, 5, 0x007d, 0x003d},
+ {0x3000, 0xf000, 4, 0x007e, 0x003e},
+ {0x6000, 0xe000, 3, 0x007f, 0x003f},
+ {0x8000, 0x8000, 1, 0x0000, 0x0000},
+ {0x4000, 0xe000, 3, 0x0001, 0x0041},
+ {0x2000, 0xf000, 4, 0x0002, 0x0042},
+ {0x1000, 0xf800, 5, 0x0003, 0x0043},
+ {0x0c00, 0xfe00, 7, 0x0004, 0x0044},
+ {0x0a00, 0xff00, 8, 0x0005, 0x0045},
+ {0x0800, 0xff00, 8, 0x0006, 0x0046},
+ {0x0600, 0xff00, 8, 0x0007, 0x0047},
+ {0x0580, 0xffc0, 10, 0x0008, 0x0048},
+ {0x0500, 0xffc0, 10, 0x0009, 0x0049},
+ {0x0480, 0xffc0, 10, 0x000a, 0x004a},
+ {0x0440, 0xffe0, 11, 0x000b, 0x004b},
+ {0x0400, 0xffe0, 11, 0x000c, 0x004c},
+ {0x03c0, 0xffe0, 11, 0x000d, 0x004d},
+ {0x0380, 0xffe0, 11, 0x000e, 0x004e},
+ {0x0340, 0xffe0, 11, 0x000f, 0x004f},
+ {0x0300, 0xffe0, 11, 0x0010, 0x0050},
+ {0x02c0, 0xffe0, 11, 0x0011, 0x0051},
+ {0x0280, 0xffe0, 11, 0x0012, 0x0052},
+ {0x0240, 0xffe0, 11, 0x0013, 0x0053},
+ {0x0200, 0xffe0, 11, 0x0014, 0x0054},
+ {0x01c0, 0xffe0, 11, 0x0015, 0x0055},
+ {0x0180, 0xffe0, 11, 0x0016, 0x0056},
+ {0x0140, 0xffe0, 11, 0x0017, 0x0057},
+ {0x0100, 0xffe0, 11, 0x0018, 0x0058},
+ {0x00e0, 0xfff0, 12, 0x0019, 0x0059},
+ {0x00c0, 0xfff0, 12, 0x001a, 0x005a},
+ {0x00a0, 0xfff0, 12, 0x001b, 0x005b},
+ {0x0080, 0xfff0, 12, 0x001c, 0x005c},
+ {0x0060, 0xfff0, 12, 0x001d, 0x005d},
+ {0x0040, 0xfff0, 12, 0x001e, 0x005e},
+ {0x0030, 0xfff8, 13, 0x001f, 0x005f}
+};
+
+GST_DEBUG_CATEGORY_STATIC (rtph263pay_debug);
+#define GST_CAT_DEFAULT (rtph263pay_debug)
+
+#define GST_RTP_HEADER_LEN 12
+
+enum
+{
+ PROP_0,
+ PROP_MODE_A_ONLY
+};
+
+static GstStaticPadTemplate gst_rtp_h263_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-h263, "
+ "variant = (string) \"itu\", " "h263version = (string) \"h263\"")
+ );
+
+static GstStaticPadTemplate gst_rtp_h263_pay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_H263_STRING ", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"H263\"; "
+ "application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"H263\"")
+ );
+
+static void gst_rtp_h263_pay_finalize (GObject * object);
+
+static gboolean gst_rtp_h263_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static void gst_rtp_h263_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtp_h263_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static GstFlowReturn gst_rtp_h263_pay_handle_buffer (GstRTPBasePayload *
+ payload, GstBuffer * buffer);
+
+static void gst_rtp_h263_pay_boundry_init (GstRtpH263PayBoundry * boundry,
+ guint8 * start, guint8 * end, guint8 sbit, guint8 ebit);
+static GstRtpH263PayGob *gst_rtp_h263_pay_gob_new (GstRtpH263PayBoundry *
+ boundry, guint gobn);
+static GstRtpH263PayMB *gst_rtp_h263_pay_mb_new (GstRtpH263PayBoundry * boundry,
+ guint mba);
+static GstRtpH263PayPackage *gst_rtp_h263_pay_package_new_empty ();
+static GstRtpH263PayPackage *gst_rtp_h263_pay_package_new (guint8 * start,
+ guint8 * end, guint length, guint8 sbit, guint8 ebit, GstBuffer * outbuf,
+ gboolean marker);
+
+static void gst_rtp_h263_pay_mb_destroy (GstRtpH263PayMB * mb);
+static void gst_rtp_h263_pay_gob_destroy (GstRtpH263PayGob * gob, guint ind);
+static void gst_rtp_h263_pay_context_destroy (GstRtpH263PayContext * context,
+ guint ind);
+static void gst_rtp_h263_pay_package_destroy (GstRtpH263PayPackage * pack);
+
+#define gst_rtp_h263_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpH263Pay, gst_rtp_h263_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph263pay, "rtph263pay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_H263_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_h263_pay_class_init (GstRtpH263PayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_h263_pay_finalize;
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_h263_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_h263_pay_handle_buffer;
+ gobject_class->set_property = gst_rtp_h263_pay_set_property;
+ gobject_class->get_property = gst_rtp_h263_pay_get_property;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass),
+ PROP_MODE_A_ONLY, g_param_spec_boolean ("modea-only",
+ "Fragment packets in mode A Only",
+ "Disable packetization modes B and C", DEFAULT_MODE_A,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_h263_pay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_h263_pay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP H263 packet payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encodes H263 video in RTP packets (RFC 2190)",
+ "Neil Stratford <neils@vipadia.com>"
+ "Dejan Sakelsak <dejan.sakelsak@marand.si>");
+
+ GST_DEBUG_CATEGORY_INIT (rtph263pay_debug, "rtph263pay", 0,
+ "H263 RTP Payloader");
+}
+
+static void
+gst_rtp_h263_pay_init (GstRtpH263Pay * rtph263pay)
+{
+ GST_RTP_BASE_PAYLOAD_PT (rtph263pay) = GST_RTP_PAYLOAD_H263;
+ rtph263pay->prop_payload_mode = DEFAULT_MODE_A;
+}
+
+static void
+gst_rtp_h263_pay_finalize (GObject * object)
+{
+ GstRtpH263Pay *rtph263pay;
+
+ rtph263pay = GST_RTP_H263_PAY (object);
+
+ gst_buffer_replace (&rtph263pay->current_buffer, NULL);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_rtp_h263_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ GstStructure *s = gst_caps_get_structure (caps, 0);
+ gint width, height;
+ gchar *framesize = NULL;
+ gboolean res;
+
+ if (gst_structure_has_field (s, "width") &&
+ gst_structure_has_field (s, "height")) {
+ if (!gst_structure_get_int (s, "width", &width) || width <= 0) {
+ goto invalid_dimension;
+ }
+
+ if (!gst_structure_get_int (s, "height", &height) || height <= 0) {
+ goto invalid_dimension;
+ }
+
+ framesize = g_strdup_printf ("%d-%d", width, height);
+ }
+
+ gst_rtp_base_payload_set_options (payload, "video",
+ payload->pt != GST_RTP_PAYLOAD_H263, "H263", 90000);
+
+ if (framesize != NULL) {
+ res = gst_rtp_base_payload_set_outcaps (payload,
+ "a-framesize", G_TYPE_STRING, framesize, NULL);
+ } else {
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
+ }
+ g_free (framesize);
+
+ return res;
+
+ /* ERRORS */
+invalid_dimension:
+ {
+ GST_ERROR_OBJECT (payload, "Invalid width/height from caps");
+ return FALSE;
+ }
+}
+
+static void
+gst_rtp_h263_pay_context_destroy (GstRtpH263PayContext * context, guint ind)
+{
+ if (!context)
+ return;
+
+ if (context->gobs) {
+ guint i;
+
+ for (i = 0; i < format_props[ind][0]; i++) {
+ if (context->gobs[i]) {
+ gst_rtp_h263_pay_gob_destroy (context->gobs[i], ind);
+ }
+ }
+
+ g_free (context->gobs);
+ }
+
+ g_free (context);
+}
+
+static void
+gst_rtp_h263_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpH263Pay *rtph263pay;
+
+ rtph263pay = GST_RTP_H263_PAY (object);
+
+ switch (prop_id) {
+ case PROP_MODE_A_ONLY:
+ rtph263pay->prop_payload_mode = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_h263_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpH263Pay *rtph263pay;
+
+ rtph263pay = GST_RTP_H263_PAY (object);
+
+ switch (prop_id) {
+ case PROP_MODE_A_ONLY:
+ g_value_set_boolean (value, rtph263pay->prop_payload_mode);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstRtpH263PayPackage *
+gst_rtp_h263_pay_package_new_empty (void)
+{
+ return (GstRtpH263PayPackage *) g_malloc0 (sizeof (GstRtpH263PayPackage));
+}
+
+static GstRtpH263PayPackage *
+gst_rtp_h263_pay_package_new (guint8 * start, guint8 * end, guint length,
+ guint8 sbit, guint8 ebit, GstBuffer * outbuf, gboolean marker)
+{
+
+ GstRtpH263PayPackage *package;
+
+ package = gst_rtp_h263_pay_package_new_empty ();
+
+ package->payload_start = start;
+ package->payload_end = end;
+ package->payload_len = length;
+ package->sbit = sbit;
+ package->ebit = ebit;
+ package->outbuf = outbuf;
+ package->marker = marker;
+
+ return package;
+}
+
+static void
+gst_rtp_h263_pay_package_destroy (GstRtpH263PayPackage * pack)
+{
+ if (pack)
+ g_free (pack);
+}
+
+static void
+gst_rtp_h263_pay_boundry_init (GstRtpH263PayBoundry * boundry,
+ guint8 * start, guint8 * end, guint8 sbit, guint8 ebit)
+{
+ boundry->start = start;
+ boundry->end = end;
+ boundry->sbit = sbit;
+ boundry->ebit = ebit;
+}
+
+static void
+gst_rtp_h263_pay_splat_header_A (guint8 * header,
+ GstRtpH263PayPackage * package, GstRtpH263PayPic * piclayer)
+{
+
+ GstRtpH263PayAHeader *a_header;
+
+ a_header = (GstRtpH263PayAHeader *) header;
+
+ a_header->f = 0;
+ a_header->p = 0;
+ a_header->sbit = package->sbit;
+ a_header->ebit = package->ebit;
+ a_header->src = GST_H263_PICTURELAYER_PLSRC (piclayer);
+ a_header->i = GST_H263_PICTURELAYER_PLTYPE (piclayer);
+ a_header->u = GST_H263_PICTURELAYER_PLUMV (piclayer);
+ a_header->s = GST_H263_PICTURELAYER_PLSAC (piclayer);
+ a_header->a = GST_H263_PICTURELAYER_PLAP (piclayer);
+ a_header->r1 = 0;
+ a_header->r2 = 0;
+ a_header->dbq = 0;
+ a_header->trb = 0;
+ a_header->tr = 0;
+
+}
+
+static void
+gst_rtp_h263_pay_splat_header_B (guint8 * header,
+ GstRtpH263PayPackage * package, GstRtpH263PayPic * piclayer)
+{
+
+ GstRtpH263PayBHeader *b_header;
+
+ b_header = (GstRtpH263PayBHeader *) header;
+
+ b_header->f = 1;
+ b_header->p = 0;
+ b_header->sbit = package->sbit;
+ b_header->ebit = package->ebit;
+ b_header->src = GST_H263_PICTURELAYER_PLSRC (piclayer);
+ b_header->quant = package->quant;
+ b_header->gobn = package->gobn;
+ b_header->mba1 = package->mba >> 6;
+ b_header->mba2 = package->mba & 0x003f;
+ b_header->r = 0;
+ b_header->i = GST_H263_PICTURELAYER_PLTYPE (piclayer);
+ b_header->u = GST_H263_PICTURELAYER_PLUMV (piclayer);
+ b_header->s = GST_H263_PICTURELAYER_PLSAC (piclayer);
+ b_header->a = GST_H263_PICTURELAYER_PLAP (piclayer);
+
+ b_header->hmv11 = 0;
+ b_header->hmv12 = 0;
+ b_header->vmv11 = 0;
+ b_header->vmv12 = 0;
+ b_header->hmv21 = 0;
+ b_header->hmv22 = 0;
+ b_header->vmv21 = 0;
+
+ if (package->nmvd > 0) {
+ //mvd[0]
+ b_header->hmv11 = (package->mvd[0] & 0x7f) >> 3;
+ b_header->hmv12 = (package->mvd[0] & 0x07);
+ //mvd[1]
+ b_header->vmv11 = (package->mvd[1] & 0x07f) >> 2;
+ b_header->vmv12 = (package->mvd[1] & 0x03);
+
+ if (package->nmvd == 8) {
+ //mvd[4]
+ b_header->hmv21 = (package->mvd[4] & 0x7f) >> 1;
+ b_header->hmv22 = (package->mvd[4] & 0x01);
+ //mvd[5]
+ b_header->vmv21 = (package->mvd[5] & 0x7f);
+ }
+ }
+
+}
+
+static gboolean
+gst_rtp_h263_pay_gobfinder (GstRtpH263Pay * rtph263pay,
+ GstRtpH263PayBoundry * boundry)
+{
+ guint8 *current;
+ guint range;
+ guint i;
+
+ current = boundry->end + 1;
+ range = (rtph263pay->data - current) + rtph263pay->available_data;
+
+
+ GST_DEBUG_OBJECT (rtph263pay,
+ "Searching for next GOB, data:%p, len:%u, payload_len:%p,"
+ " current:%p, range:%u", rtph263pay->data, rtph263pay->available_data,
+ boundry->end + 1, current, range);
+
+ /* If we are past the end, stop */
+ if (current >= rtph263pay->data + rtph263pay->available_data)
+ return FALSE;
+
+ for (i = 3; i < range - 3; i++) {
+ if ((current[i] == 0x0) &&
+ (current[i + 1] == 0x0) && (current[i + 2] >> 7 == 0x1)) {
+ GST_LOG_OBJECT (rtph263pay, "GOB end found at: %p start: %p len: %u",
+ current + i - 1, boundry->end + 1,
+ (guint) (current + i - boundry->end + 2));
+ gst_rtp_h263_pay_boundry_init (boundry, boundry->end + 1, current + i - 1,
+ 0, 0);
+
+ return TRUE;
+ }
+ }
+
+ GST_DEBUG_OBJECT (rtph263pay,
+ "Couldn't find any new GBSC in this frame, range:%u", range);
+
+ gst_rtp_h263_pay_boundry_init (boundry, boundry->end + 1,
+ (guint8 *) (rtph263pay->data + rtph263pay->available_data - 1), 0, 0);
+
+ return TRUE;
+}
+
+static GstRtpH263PayGob *
+gst_rtp_h263_pay_gob_new (GstRtpH263PayBoundry * boundry, guint gobn)
+{
+ GstRtpH263PayGob *gob;
+
+ gob = (GstRtpH263PayGob *) g_malloc0 (sizeof (GstRtpH263PayGob));
+
+ gob->start = boundry->start;
+ gob->end = boundry->end;
+ gob->length = boundry->end - boundry->start + 1;
+ gob->ebit = boundry->ebit;
+ gob->sbit = boundry->sbit;
+ gob->gobn = gobn;
+ gob->quant = 0;
+ gob->macroblocks = NULL;
+ gob->nmacroblocs = 0;
+
+ return gob;
+}
+
+static void
+gst_rtp_h263_pay_gob_destroy (GstRtpH263PayGob * gob, guint ind)
+{
+
+ if (!gob)
+ return;
+
+ if (gob->macroblocks) {
+
+ guint i;
+
+ for (i = 0; i < gob->nmacroblocs; i++) {
+ gst_rtp_h263_pay_mb_destroy (gob->macroblocks[i]);
+ }
+
+ g_free (gob->macroblocks);
+ }
+
+ g_free (gob);
+}
+
+/*
+ * decode MCBPC for I frames and return index in table or -1 if not found
+ */
+static gint
+gst_rtp_h263_pay_decode_mcbpc_I (GstRtpH263Pay * rtph263pay, guint32 value)
+{
+
+ gint i;
+ guint16 code;
+
+ code = value >> 16;
+
+ GST_TRACE_OBJECT (rtph263pay, "value:0x%08x, code:0x%04x", value, code);
+
+ for (i = 0; i < MCBPC_I_LEN; i++) {
+ if ((code & mcbpc_I[i][1]) == mcbpc_I[i][0]) {
+ return i;
+ }
+ }
+
+ GST_WARNING_OBJECT (rtph263pay, "Couldn't find code, returning -1");
+
+ return -1;
+}
+
+/*
+ * decode MCBPC for P frames and return index in table or -1 if not found
+ */
+static gint
+gst_rtp_h263_pay_decode_mcbpc_P (GstRtpH263Pay * rtph263pay, guint32 value)
+{
+
+ gint i;
+ guint16 code;
+
+ code = value >> 16;
+
+ GST_TRACE_OBJECT (rtph263pay, "value:0x%08x, code:0x%04x", value, code);
+
+ for (i = 0; i < MCBPC_P_LEN; i++) {
+ if ((code & mcbpc_P[i][1]) == mcbpc_P[i][0]) {
+ return i;
+ }
+ }
+
+ GST_WARNING_OBJECT (rtph263pay, "Couldn't find code, returning -1");
+
+ return -1;
+}
+
+/*
+ * decode CBPY and return index in table or -1 if not found
+ */
+static gint
+gst_rtp_h263_pay_decode_cbpy (GstRtpH263Pay * rtph263pay, guint32 value,
+ const guint8 cbpy_table[16][7])
+{
+
+ gint i;
+ guint8 code;
+
+ code = value >> 24;
+
+ GST_TRACE_OBJECT (rtph263pay, "value:0x%08x, code:0x%04x", value, code);
+
+ for (i = 0; i < CBPY_LEN; i++) {
+ if ((code & cbpy_table[i][1]) == cbpy_table[i][0]) {
+ return i;
+ }
+ }
+
+ GST_WARNING_OBJECT (rtph263pay, "Couldn't find code, returning -1");
+
+ return -1;
+}
+
+/*
+ * decode MVD and return index in table or -1 if not found
+ */
+static gint
+gst_rtp_h263_pay_decode_mvd (GstRtpH263Pay * rtph263pay, guint32 value)
+{
+
+ gint i;
+ guint16 code;
+
+ code = value >> 16;
+
+ GST_TRACE_OBJECT (rtph263pay, "value:0x%08x, code:0x%04x", value, code);
+
+ for (i = 0; i < MVD_LEN; i++) {
+ if ((code & mvd[i][1]) == mvd[i][0]) {
+ return i;
+ }
+ }
+
+ GST_WARNING_OBJECT (rtph263pay, "Couldn't find code, returning -1");
+
+ return -1;
+}
+
+/*
+ * decode TCOEF and return index in table or -1 if not found
+ */
+static gint
+gst_rtp_h263_pay_decode_tcoef (GstRtpH263Pay * rtph263pay, guint32 value)
+{
+
+ gint i;
+ guint16 code;
+
+ code = value >> 16;
+
+ GST_TRACE_OBJECT (rtph263pay, "value:0x%08x, code:0x%04x", value, code);
+
+ for (i = 0; i < TCOEF_LEN; i++) {
+ if ((code & tcoef[i][1]) == tcoef[i][0]) {
+ GST_TRACE_OBJECT (rtph263pay, "tcoef is %d", i);
+ return i;
+ }
+ }
+
+ GST_WARNING_OBJECT (rtph263pay, "Couldn't find code, returning -1");
+
+ return -1;
+}
+
+/*
+ * the 32-bit register is like a window that we move right for "move_bits" to get the next v "data" h263 field
+ * "rest_bits" tells how many bits in the "data" byte address are still not used
+ */
+
+static gint
+gst_rtp_h263_pay_move_window_right (GstRtpH263Pay * rtph263pay,
+ GstRtpH263PayContext * context, guint n, guint rest_bits,
+ guint8 ** orig_data, guint8 ** data_end)
+{
+
+ GST_TRACE_OBJECT (rtph263pay,
+ "Moving window: 0x%08x from: %p for %d bits, rest_bits: %d, data_end %p",
+ context->window, context->win_end, n, rest_bits, *data_end);
+
+ if (n == 0)
+ return rest_bits;
+
+ while (n != 0 || context->win_end == ((*data_end) + 1)) {
+ guint8 b = context->win_end <= *data_end ? *context->win_end : 0;
+ if (rest_bits == 0) {
+ if (n > 8) {
+ context->window = (context->window << 8) | b;
+ n -= 8;
+ } else {
+ context->window = (context->window << n) | (b >> (8 - n));
+ rest_bits = 8 - n;
+ if (rest_bits == 0)
+ context->win_end++;
+ break;
+ }
+ } else {
+ if (n > rest_bits) {
+ context->window = (context->window << rest_bits) |
+ (b & (((guint) pow (2.0, (double) rest_bits)) - 1));
+ n -= rest_bits;
+ rest_bits = 0;
+ } else {
+ context->window = (context->window << n) |
+ ((b & (((guint) pow (2.0, (double) rest_bits)) - 1)) >>
+ (rest_bits - n));
+ rest_bits -= n;
+ if (rest_bits == 0)
+ context->win_end++;
+ break;
+ }
+ }
+
+ context->win_end++;
+ }
+
+ *orig_data = context->win_end - 4;
+
+ GST_TRACE_OBJECT (rtph263pay,
+ "Window moved to %p with value: 0x%08x and orig_data: %p rest_bits: %d",
+ context->win_end, context->window, *orig_data, rest_bits);
+ return rest_bits;
+}
+
+/*
+ * Find the start of the next MB (end of the current MB)
+ * returns the number of excess bits and stores the end of the MB in end
+ * data must be placed on first MB byte
+ */
+static GstRtpH263PayMB *
+gst_rtp_h263_pay_B_mbfinder (GstRtpH263Pay * rtph263pay,
+ GstRtpH263PayContext * context, GstRtpH263PayGob * gob,
+ GstRtpH263PayMB * macroblock, guint mba)
+{
+ guint mb_type_index;
+ guint cbpy_type_index;
+ guint tcoef_type_index;
+ GstRtpH263PayMB *mac;
+ GstRtpH263PayBoundry boundry;
+
+ gst_rtp_h263_pay_boundry_init (&boundry, macroblock->end,
+ macroblock->end, 8 - macroblock->ebit, macroblock->ebit);
+ mac = gst_rtp_h263_pay_mb_new (&boundry, mba);
+
+ if (mac->sbit == 8) {
+ mac->start++;
+// mac->end++;
+ mac->sbit = 0;
+ }
+
+ GST_LOG_OBJECT (rtph263pay,
+ "current_pos:%p, end:%p, rest_bits:%d, window:0x%08x", mac->start,
+ mac->end, macroblock->ebit, context->window);
+
+ if (context->piclayer->ptype_pictype == 0) {
+ //We have an I frame
+ gint i;
+ guint last;
+ guint ind;
+
+ //Step 2 decode MCBPC I
+ mb_type_index =
+ gst_rtp_h263_pay_decode_mcbpc_I (rtph263pay, context->window);
+
+ GST_TRACE_OBJECT (rtph263pay, "MCBPC index: %d", mb_type_index);
+ if (mb_type_index == -1) {
+ GST_ERROR_OBJECT (rtph263pay, "MB index shouldn't be -1 in window: %08x",
+ context->window);
+ goto beach;
+ }
+
+ mac->ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context,
+ mcbpc_I[mb_type_index][2], mac->ebit, &mac->end, &gob->end);
+
+ mac->mb_type = mcbpc_I[mb_type_index][5];
+
+ if (mb_type_index == 8) {
+ GST_TRACE_OBJECT (rtph263pay, "Stuffing skipping rest of MB header");
+ return mac;
+ }
+ //Step 3 decode CBPY I
+ cbpy_type_index =
+ gst_rtp_h263_pay_decode_cbpy (rtph263pay, context->window, cbpy_I);
+
+ GST_TRACE_OBJECT (rtph263pay, "CBPY index: %d", cbpy_type_index);
+ if (cbpy_type_index == -1) {
+ GST_ERROR_OBJECT (rtph263pay,
+ "CBPY index shouldn't be -1 in window: %08x", context->window);
+ goto beach;
+ }
+
+ mac->ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context,
+ cbpy_I[cbpy_type_index][2], mac->ebit, &mac->end, &gob->end);
+
+ //Step 4 decode rest of MB
+ //MB type 1 and 4 have DQUANT - we store it for packaging purposes
+ if (mcbpc_I[mb_type_index][5] == 4) {
+ GST_TRACE_OBJECT (rtph263pay, "Shifting DQUANT");
+
+ mac->quant = (context->window >> 30);
+
+ mac->ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context, 2, mac->ebit,
+ &mac->end, &gob->end);
+ }
+ //Step 5 go trough the blocks - decode DC and TCOEF
+ last = 0;
+ for (i = 0; i < N_BLOCKS; i++) {
+
+ GST_TRACE_OBJECT (rtph263pay, "Decoding INTRADC and TCOEF, i:%d", i);
+ mac->ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context, 8, mac->ebit,
+ &mac->end, &gob->end);
+
+ if (i > 3) {
+ ind = mcbpc_I[mb_type_index][i - 1];
+ } else {
+ ind = cbpy_I[cbpy_type_index][i + 3];
+ }
+
+ if (ind == 1) {
+ while (last == 0) {
+ tcoef_type_index =
+ gst_rtp_h263_pay_decode_tcoef (rtph263pay, context->window);
+
+ GST_TRACE_OBJECT (rtph263pay, "TCOEF index: %d", tcoef_type_index);
+ if (tcoef_type_index == -1) {
+ GST_ERROR_OBJECT (rtph263pay,
+ "TCOEF index shouldn't be -1 in window: %08x", context->window);
+ goto beach;
+ }
+ mac->ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context,
+ tcoef[tcoef_type_index][2], mac->ebit, &mac->end, &gob->end);
+
+ last = tcoef[tcoef_type_index][3];
+ if (tcoef_type_index == 102) {
+ if ((context->window & 0x80000000) == 0x80000000)
+ last = 1;
+ else
+ last = 0;
+
+ mac->ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context, 15,
+ mac->ebit, &mac->end, &gob->end);
+ }
+ }
+ last = 0;
+ }
+ }
+
+ } else {
+ //We have a P frame
+ guint i;
+ guint last;
+ guint ind;
+
+ //Step 1 check COD
+ GST_TRACE_OBJECT (rtph263pay, "Checking for COD");
+ if ((context->window & 0x80000000) == 0x80000000) {
+ //The MB is not coded
+ mac->ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context, 1, mac->ebit,
+ &mac->end, &gob->end);
+ GST_TRACE_OBJECT (rtph263pay, "COOOOOOOOOOOD = 1");
+
+ return mac;
+ } else {
+ //The MB is coded
+ mac->ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context, 1, mac->ebit,
+ &mac->end, &gob->end);
+ }
+
+ //Step 2 decode MCBPC P
+ mb_type_index =
+ gst_rtp_h263_pay_decode_mcbpc_P (rtph263pay, context->window);
+
+ GST_TRACE_OBJECT (rtph263pay, "MCBPC index: %d", mb_type_index);
+ if (mb_type_index == -1) {
+ GST_ERROR_OBJECT (rtph263pay, "MB index shouldn't be -1 in window: %08x",
+ context->window);
+ goto beach;
+ }
+ mac->ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context,
+ mcbpc_P[mb_type_index][2], mac->ebit, &mac->end, &gob->end);
+
+ mac->mb_type = mcbpc_P[mb_type_index][5];
+
+ if (mb_type_index == 20) {
+ GST_TRACE_OBJECT (rtph263pay, "Stuffing skipping rest of MB header");
+ return mac;
+ }
+ //Step 3 decode CBPY P
+ cbpy_type_index =
+ gst_rtp_h263_pay_decode_cbpy (rtph263pay, context->window, cbpy_P);
+
+ GST_TRACE_OBJECT (rtph263pay, "CBPY index: %d", cbpy_type_index);
+ if (cbpy_type_index == -1) {
+ GST_ERROR_OBJECT (rtph263pay,
+ "CBPY index shouldn't be -1 in window: %08x", context->window);
+ goto beach;
+ }
+ mac->ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context,
+ cbpy_P[cbpy_type_index][2], mac->ebit, &mac->end, &gob->end);
+
+ //MB type 1 and 4 have DQUANT - we add it to MB object and jump over
+ if (mcbpc_P[mb_type_index][5] == 4 || mcbpc_P[mb_type_index][5] == 1) {
+ GST_TRACE_OBJECT (rtph263pay, "Shifting DQUANT");
+
+ mac->quant = context->window >> 30;
+
+ mac->ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context, 2, mac->ebit,
+ &mac->end, &gob->end);
+ }
+ //MB types < 3 have MVD1-4
+ if (mcbpc_P[mb_type_index][5] < 3) {
+
+ guint nmvd;
+ gint j;
+
+ nmvd = 2;
+ if (mcbpc_P[mb_type_index][5] == 2)
+ nmvd = 8;
+
+ for (j = 0; j < nmvd; j++) {
+ guint mvd_type;
+
+ mvd_type = gst_rtp_h263_pay_decode_mvd (rtph263pay, context->window);
+
+ if (mvd_type == -1) {
+ GST_ERROR_OBJECT (rtph263pay,
+ "MVD1-4 index shouldn't be -1 in window: %08x", context->window);
+ goto beach;
+ }
+ //set the MB mvd values
+ mac->mvd[j] = mvd[mvd_type][3];
+
+ mac->ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context,
+ mvd[mvd_type][2], mac->ebit, &mac->end, &gob->end);
+ }
+
+
+ }
+ //Step 5 go trough the blocks - decode DC and TCOEF
+ last = 0;
+ for (i = 0; i < N_BLOCKS; i++) {
+
+ //if MB type 3 or 4 then INTRADC coef are present in blocks
+ if (mcbpc_P[mb_type_index][5] > 2) {
+ GST_TRACE_OBJECT (rtph263pay, "INTRADC coef: %d", i);
+ mac->ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context, 8,
+ mac->ebit, &mac->end, &gob->end);
+ } else {
+ GST_TRACE_OBJECT (rtph263pay, "INTRADC coef is not present");
+ }
+
+ //check if the block has TCOEF
+ if (i > 3) {
+ ind = mcbpc_P[mb_type_index][i - 1];
+ } else {
+ if (mcbpc_P[mb_type_index][5] > 2) {
+ ind = cbpy_I[cbpy_type_index][i + 3];
+ } else {
+ ind = cbpy_P[cbpy_type_index][i + 3];
+ }
+ }
+
+ if (ind == 1) {
+ while (last == 0) {
+ tcoef_type_index =
+ gst_rtp_h263_pay_decode_tcoef (rtph263pay, context->window);
+
+ GST_TRACE_OBJECT (rtph263pay, "TCOEF index: %d", tcoef_type_index);
+ if (tcoef_type_index == -1) {
+ GST_ERROR_OBJECT (rtph263pay,
+ "TCOEF index shouldn't be -1 in window: %08x", context->window);
+ goto beach;
+ }
+
+ mac->ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context,
+ tcoef[tcoef_type_index][2], mac->ebit, &mac->end, &gob->end);
+
+ last = tcoef[tcoef_type_index][3];
+ if (tcoef_type_index == 102) {
+ if ((context->window & 0x80000000) == 0x80000000)
+ last = 1;
+ else
+ last = 0;
+
+ mac->ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context, 15,
+ mac->ebit, &mac->end, &gob->end);
+ }
+ }
+ last = 0;
+ }
+ }
+ }
+
+ mac->length = mac->end - mac->start + 1;
+
+ return mac;
+
+beach:
+ gst_rtp_h263_pay_mb_destroy (mac);
+ return NULL;
+}
+
+static GstRtpH263PayMB *
+gst_rtp_h263_pay_mb_new (GstRtpH263PayBoundry * boundry, guint mba)
+{
+ GstRtpH263PayMB *mb;
+ gint i;
+
+ mb = (GstRtpH263PayMB *) g_malloc0 (sizeof (GstRtpH263PayMB));
+
+ mb->start = boundry->start;
+ mb->end = boundry->end;
+ mb->length = boundry->end - boundry->start + 1;
+ mb->sbit = boundry->sbit;
+ mb->ebit = boundry->ebit;
+ mb->mba = mba;
+
+ for (i = 0; i < 5; i++)
+ mb->mvd[i] = 0;
+
+ return mb;
+}
+
+static void
+gst_rtp_h263_pay_mb_destroy (GstRtpH263PayMB * mb)
+{
+ if (!mb)
+ return;
+
+ g_free (mb);
+}
+
+static GstFlowReturn
+gst_rtp_h263_pay_push (GstRtpH263Pay * rtph263pay,
+ GstRtpH263PayContext * context, GstRtpH263PayPackage * package)
+{
+
+ /*
+ * Splat the payload header values
+ */
+ guint8 *header;
+ GstFlowReturn ret;
+ GstRTPBuffer rtp = { NULL };
+
+ gst_rtp_buffer_map (package->outbuf, GST_MAP_WRITE, &rtp);
+
+ header = gst_rtp_buffer_get_payload (&rtp);
+
+ switch (package->mode) {
+ case GST_RTP_H263_PAYLOAD_HEADER_MODE_A:
+ GST_LOG_OBJECT (rtph263pay, "Pushing A packet");
+ gst_rtp_h263_pay_splat_header_A (header, package, context->piclayer);
+ break;
+ case GST_RTP_H263_PAYLOAD_HEADER_MODE_B:
+ GST_LOG_OBJECT (rtph263pay, "Pushing B packet");
+ gst_rtp_h263_pay_splat_header_B (header, package, context->piclayer);
+ break;
+ case GST_RTP_H263_PAYLOAD_HEADER_MODE_C:
+ //gst_rtp_h263_pay_splat_header_C(header, package, context->piclayer);
+ //break;
+ default:
+ return GST_FLOW_ERROR;
+ }
+
+ /*
+ * timestamp the buffer
+ */
+ GST_BUFFER_PTS (package->outbuf) = rtph263pay->first_ts;
+
+ gst_rtp_buffer_set_marker (&rtp, package->marker);
+ if (package->marker)
+ GST_DEBUG_OBJECT (rtph263pay, "Marker set!");
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ /*
+ * Copy the payload data in the buffer
+ */
+ GST_DEBUG_OBJECT (rtph263pay, "Copying memory");
+ gst_buffer_copy_into (package->outbuf, rtph263pay->current_buffer,
+ GST_BUFFER_COPY_MEMORY, package->payload_start - rtph263pay->map.data,
+ package->payload_len);
+ gst_rtp_copy_video_meta (rtph263pay, package->outbuf,
+ rtph263pay->current_buffer);
+
+ ret =
+ gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtph263pay),
+ package->outbuf);
+ GST_DEBUG_OBJECT (rtph263pay, "Package pushed, returning");
+
+ gst_rtp_h263_pay_package_destroy (package);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_rtp_h263_pay_A_fragment_push (GstRtpH263Pay * rtph263pay,
+ GstRtpH263PayContext * context, guint first, guint last)
+{
+
+ GstRtpH263PayPackage *pack;
+
+ pack = gst_rtp_h263_pay_package_new_empty ();
+
+ pack->payload_start = context->gobs[first]->start;
+ pack->sbit = context->gobs[first]->sbit;
+ pack->ebit = context->gobs[last]->ebit;
+ pack->payload_len =
+ (context->gobs[last]->end - context->gobs[first]->start) + 1;
+ pack->marker = FALSE;
+
+ if (last == context->no_gobs - 1) {
+ pack->marker = TRUE;
+ }
+
+ pack->gobn = context->gobs[first]->gobn;
+ pack->mode = GST_RTP_H263_PAYLOAD_HEADER_MODE_A;
+ pack->outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD
+ (rtph263pay), pack->mode, 0, 0);
+
+ GST_DEBUG_OBJECT (rtph263pay, "Sending len:%d data to push function",
+ pack->payload_len);
+
+ return gst_rtp_h263_pay_push (rtph263pay, context, pack);
+}
+
+static GstFlowReturn
+gst_rtp_h263_pay_B_fragment_push (GstRtpH263Pay * rtph263pay,
+ GstRtpH263PayContext * context, GstRtpH263PayGob * gob, guint first,
+ guint last, GstRtpH263PayBoundry * boundry)
+{
+
+ GstRtpH263PayPackage *pack;
+ guint mv;
+
+ pack = gst_rtp_h263_pay_package_new_empty ();
+
+ pack->payload_start = gob->macroblocks[first]->start;
+ pack->sbit = gob->macroblocks[first]->sbit;
+ if (first == 0) {
+ pack->payload_start = boundry->start;
+ pack->sbit = boundry->sbit;
+ pack->quant = gob->quant;
+ } else {
+ pack->quant = gob->macroblocks[first]->quant;
+ }
+ pack->payload_end = gob->macroblocks[last]->end;
+
+ pack->ebit = gob->macroblocks[last]->ebit;
+ pack->mba = gob->macroblocks[first]->mba;
+ pack->gobn = gob->gobn;
+ pack->mode = GST_RTP_H263_PAYLOAD_HEADER_MODE_B;
+ pack->nmvd = 0;
+
+ if (gob->macroblocks[first]->mb_type < 3) {
+ if (gob->macroblocks[first]->mb_type == 2)
+ pack->nmvd = 8;
+ else if (gob->macroblocks[first]->mb_type < 2)
+ pack->nmvd = 2;
+
+ for (mv = 0; mv < pack->nmvd; mv++)
+ pack->mvd[mv] = gob->macroblocks[first]->mvd[mv];
+ }
+
+ pack->marker = FALSE;
+ if (last == gob->nmacroblocs - 1) {
+ pack->ebit = 0;
+ }
+
+ if ((format_props[context->piclayer->ptype_srcformat][0] - 1 == gob->gobn)
+ && (last == gob->nmacroblocs - 1)) {
+ pack->marker = TRUE;
+ }
+
+ pack->payload_len = pack->payload_end - pack->payload_start + 1;
+ pack->outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD
+ (rtph263pay), pack->mode, 0, 0);
+
+ return gst_rtp_h263_pay_push (rtph263pay, context, pack);
+}
+
+
+static gboolean
+gst_rtp_h263_pay_mode_B_fragment (GstRtpH263Pay * rtph263pay,
+ GstRtpH263PayContext * context, GstRtpH263PayGob * gob)
+{
+
+
+ /*---------- MODE B MODE FRAGMENTATION ----------*/
+ GstRtpH263PayMB *mac, *mac0;
+ guint max_payload_size;
+ GstRtpH263PayBoundry boundry;
+ guint mb;
+ guint8 ebit;
+
+ guint first = 0;
+ guint payload_len;
+
+ max_payload_size =
+ context->mtu - GST_RTP_H263_PAYLOAD_HEADER_MODE_B - GST_RTP_HEADER_LEN;
+
+ gst_rtp_h263_pay_boundry_init (&boundry, gob->start, gob->start, gob->sbit,
+ 0);
+
+ gob->macroblocks =
+ (GstRtpH263PayMB **) g_malloc0 (sizeof (GstRtpH263PayMB *) *
+ format_props[context->piclayer->ptype_srcformat][1]);
+
+ GST_LOG_OBJECT (rtph263pay, "GOB isn't PB frame, applying mode B");
+
+ //initializing window
+ context->win_end = boundry.end;
+ if (gst_rtp_h263_pay_move_window_right (rtph263pay, context, 32, boundry.ebit,
+ &boundry.end, &gob->end) != 0) {
+ GST_ERROR_OBJECT (rtph263pay,
+ "The rest of the bits should be 0, exiting, because something bad happend");
+ goto decode_error;
+ }
+ //The first GOB of a frame "has no" actual header - PICTURE header is his header
+ if (gob->gobn == 0) {
+ guint shift;
+ GST_LOG_OBJECT (rtph263pay, "Initial GOB");
+ shift = 43;
+
+ boundry.ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context, shift,
+ boundry.ebit, &boundry.end, &gob->end);
+
+ //We need PQUANT for mode B packages - so we store it
+ gob->quant = context->window >> 27;
+
+ //if PCM == 1, then PSBI is present - header has 51 bits
+ //shift for PQUANT (5) and PCM (1) = 6 bits
+ shift = 6;
+ if (context->cpm == 1)
+ shift += 2;
+ boundry.ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context, shift,
+ boundry.ebit, &boundry.end, &gob->end);
+
+ GST_TRACE_OBJECT (rtph263pay, "window: 0x%08x", context->window);
+
+ //Shifting the PEI and PSPARE fields
+ while ((context->window & 0x80000000) == 0x80000000) {
+ boundry.ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context, 9,
+ boundry.ebit, &boundry.end, &gob->end);
+ GST_TRACE_OBJECT (rtph263pay, "window: 0x%08x", context->window);
+ }
+
+ //shift the last PEI field
+ boundry.ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context, 1,
+ boundry.ebit, &boundry.end, &gob->end);
+
+ } else {
+ //skipping GOBs 24 header bits + 5 GQUANT
+ guint shift = 24;
+
+ GST_TRACE_OBJECT (rtph263pay, "INTER GOB");
+
+ //if CPM == 1, there are 2 more bits in the header - GSBI header is 31 bits long
+ if (context->cpm == 1)
+ shift += 2;
+
+ GST_TRACE_OBJECT (rtph263pay, "window: 0x%08x", context->window);
+ boundry.ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context, shift,
+ boundry.ebit, &boundry.end, &gob->end);
+
+ //We need GQUANT for mode B packages - so we store it
+ gob->quant = context->window >> 27;
+
+ shift = 5;
+ boundry.ebit =
+ gst_rtp_h263_pay_move_window_right (rtph263pay, context, shift,
+ boundry.ebit, &boundry.end, &gob->end);
+
+ GST_TRACE_OBJECT (rtph263pay, "window: 0x%08x", context->window);
+ }
+
+ GST_TRACE_OBJECT (rtph263pay, "GQUANT IS: %08x", gob->quant);
+
+ // We are on MB layer
+
+ mac = mac0 = gst_rtp_h263_pay_mb_new (&boundry, 0);
+ for (mb = 0; mb < format_props[context->piclayer->ptype_srcformat][1]; mb++) {
+
+ GST_TRACE_OBJECT (rtph263pay,
+ "================ START MB %d =================", mb);
+
+ //Find next macroblock boundaries
+ ebit = mac->ebit;
+ if (!(mac =
+ gst_rtp_h263_pay_B_mbfinder (rtph263pay, context, gob, mac, mb))) {
+
+ GST_LOG_OBJECT (rtph263pay, "Error decoding MB - sbit: %d", 8 - ebit);
+ GST_ERROR_OBJECT (rtph263pay, "Error decoding in GOB");
+
+ gst_rtp_h263_pay_mb_destroy (mac0);
+ goto decode_error;
+ }
+
+ /* Store macroblock for further processing and delete old MB if any */
+ gst_rtp_h263_pay_mb_destroy (gob->macroblocks[mb]);
+ gob->macroblocks[mb] = mac;
+
+ //If mb_type == stuffing, don't increment the mb address
+ if (mac->mb_type == 10) {
+ mb--;
+ continue;
+ } else {
+ gob->nmacroblocs++;
+ }
+
+ if (mac->end >= gob->end) {
+ GST_LOG_OBJECT (rtph263pay, "No more MBs in this GOB");
+ if (!mac->ebit) {
+ mac->end--;
+ }
+ gob->end = mac->end;
+ break;
+ }
+ GST_DEBUG_OBJECT (rtph263pay,
+ "Found MB: mba: %d start: %p end: %p len: %d sbit: %d ebit: %d",
+ mac->mba, mac->start, mac->end, mac->length, mac->sbit, mac->ebit);
+ GST_TRACE_OBJECT (rtph263pay,
+ "================ END MB %d =================", mb);
+ }
+ gst_rtp_h263_pay_mb_destroy (mac0);
+
+ mb = 0;
+ first = 0;
+ payload_len = boundry.end - boundry.start + 1;
+ GST_DEBUG_OBJECT (rtph263pay,
+ "------------------------- NEW PACKAGE ----------------------");
+ while (mb < gob->nmacroblocs) {
+ if (payload_len + gob->macroblocks[mb]->length < max_payload_size) {
+
+ //FIXME: payload_len is not the real length -> ignoring sbit/ebit
+ payload_len += gob->macroblocks[mb]->length;
+ mb++;
+
+ } else {
+ //FIXME: we should include the last few bits of the GOB in the package - do we do that now?
+ //GST_DEBUG_OBJECT (rtph263pay, "Pushing GOBS %d to %d because payload size is %d", first,
+ // first == mb - 1, payload_len);
+
+ // FIXME: segfault if mb == 0 (first MB is larger than max_payload_size)
+ GST_DEBUG_OBJECT (rtph263pay, "Push B mode fragment from mb %d to %d",
+ first, mb - 1);
+ if (gst_rtp_h263_pay_B_fragment_push (rtph263pay, context, gob, first,
+ mb - 1, &boundry)) {
+ GST_ERROR_OBJECT (rtph263pay, "Oooops, there was an error sending");
+ goto decode_error;
+ }
+
+ payload_len = 0;
+ first = mb;
+ GST_DEBUG_OBJECT (rtph263pay,
+ "------------------------- END PACKAGE ----------------------");
+ GST_DEBUG_OBJECT (rtph263pay,
+ "------------------------- NEW PACKAGE ----------------------");
+ }
+ }
+
+ /* Push rest */
+ GST_DEBUG_OBJECT (rtph263pay, "Remainder first: %d, MB: %d", first, mb);
+ if (payload_len != 0) {
+ GST_DEBUG_OBJECT (rtph263pay, "Push B mode fragment from mb %d to %d",
+ first, mb - 1);
+ if (gst_rtp_h263_pay_B_fragment_push (rtph263pay, context, gob, first,
+ mb - 1, &boundry)) {
+ GST_ERROR_OBJECT (rtph263pay, "Oooops, there was an error sending!");
+ goto decode_error;
+ }
+ }
+
+ /*---------- END OF MODE B FRAGMENTATION ----------*/
+
+ return TRUE;
+
+decode_error:
+ return FALSE;
+}
+
+static GstFlowReturn
+gst_rtp_h263_send_entire_frame (GstRtpH263Pay * rtph263pay,
+ GstRtpH263PayContext * context)
+{
+ GstRtpH263PayPackage *pack;
+ pack =
+ gst_rtp_h263_pay_package_new (rtph263pay->data,
+ rtph263pay->data + rtph263pay->available_data,
+ rtph263pay->available_data, 0, 0, NULL, TRUE);
+ pack->mode = GST_RTP_H263_PAYLOAD_HEADER_MODE_A;
+
+ GST_DEBUG_OBJECT (rtph263pay, "Available data: %d",
+ rtph263pay->available_data);
+
+ pack->outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD
+ (rtph263pay), GST_RTP_H263_PAYLOAD_HEADER_MODE_A, 0, 0);
+
+ return gst_rtp_h263_pay_push (rtph263pay, context, pack);
+}
+
+static GstFlowReturn
+gst_rtp_h263_pay_flush (GstRtpH263Pay * rtph263pay)
+{
+
+ /*
+ * FIXME: GSTUF bits are ignored right now,
+ * - not using EBIT/SBIT payload header fields in mode A fragmentation - ffmpeg doesn't need them, but others?
+ */
+
+ GstFlowReturn ret;
+ GstRtpH263PayContext *context;
+ gint i;
+
+ ret = 0;
+ context = (GstRtpH263PayContext *) g_malloc0 (sizeof (GstRtpH263PayContext));
+ context->mtu =
+ rtph263pay->payload.mtu - (MTU_SECURITY_OFFSET + GST_RTP_HEADER_LEN +
+ GST_RTP_H263_PAYLOAD_HEADER_MODE_C);
+
+ GST_DEBUG_OBJECT (rtph263pay, "MTU: %d", context->mtu);
+ rtph263pay->available_data = gst_buffer_get_size (rtph263pay->current_buffer);
+ if (rtph263pay->available_data == 0) {
+ ret = GST_FLOW_OK;
+ goto end;
+ }
+
+ /* Get a pointer to all the data for the frame */
+ gst_buffer_map (rtph263pay->current_buffer, &rtph263pay->map, GST_MAP_READ);
+ rtph263pay->data = (guint8 *) rtph263pay->map.data;
+
+ /* Picture header */
+ context->piclayer = (GstRtpH263PayPic *) rtph263pay->data;
+
+ if (context->piclayer->ptype_pictype == 0)
+ GST_DEBUG_OBJECT (rtph263pay, "We got an I-frame");
+ else
+ GST_DEBUG_OBJECT (rtph263pay, "We got a P-frame");
+
+ context->cpm = rtph263pay->data[6] >> 7;
+
+ GST_DEBUG_OBJECT (rtph263pay, "CPM: %d", context->cpm);
+
+ GST_DEBUG_OBJECT (rtph263pay, "Payload length is: %d",
+ rtph263pay->available_data);
+
+ /*
+ * - MODE A - If normal, I and P frames, -> mode A
+ * - GOB layer fragmentation
+ * - MODE B - If normal, I and P frames, but GOBs > mtu
+ * - MB layer fragmentation
+ * - MODE C - For P frames with PB option, but GOBs > mtu
+ * - MB layer fragmentation
+ */
+ if (rtph263pay->available_data + GST_RTP_H263_PAYLOAD_HEADER_MODE_A +
+ GST_RTP_HEADER_LEN < context->mtu) {
+ ret = gst_rtp_h263_send_entire_frame (rtph263pay, context);
+ } else {
+
+ /*---------- FRAGMENTING THE FRAME BECAUSE TOO LARGE TO FIT IN MTU ----------*/
+ GstRtpH263PayBoundry bound;
+ gint first;
+ guint payload_len;
+ gboolean forcea = FALSE;
+
+ GST_DEBUG_OBJECT (rtph263pay, "Frame too large for MTU");
+ /*
+ * Let's go trough all the data and fragment it until end is reached
+ */
+
+ gst_rtp_h263_pay_boundry_init (&bound, NULL, rtph263pay->data - 1, 0, 0);
+ context->gobs =
+ (GstRtpH263PayGob **) g_malloc0 (format_props[context->piclayer->
+ ptype_srcformat][0] * sizeof (GstRtpH263PayGob *));
+
+
+ for (i = 0; i < format_props[context->piclayer->ptype_srcformat][0]; i++) {
+ GST_DEBUG_OBJECT (rtph263pay, "Searching for gob %d", i);
+ if (!gst_rtp_h263_pay_gobfinder (rtph263pay, &bound)) {
+ if (i <= 1) {
+ GST_WARNING_OBJECT (rtph263pay,
+ "No GOB's were found in data stream! Please enable RTP mode in encoder. Forcing mode A for now.");
+ ret = gst_rtp_h263_send_entire_frame (rtph263pay, context);
+ goto end;
+ } else {
+ /* try to send fragments corresponding to found GOBs */
+ forcea = TRUE;
+ break;
+ }
+ }
+
+ context->gobs[i] = gst_rtp_h263_pay_gob_new (&bound, i);
+ //FIXME - encoders may generate an EOS gob that has to be processed
+ GST_DEBUG_OBJECT (rtph263pay,
+ "Gob values are: gobn: %d, start: %p len: %d ebit: %d sbit: %d", i,
+ context->gobs[i]->start, context->gobs[i]->length,
+ context->gobs[i]->ebit, context->gobs[i]->sbit);
+ }
+ /* NOTE some places may still assume this to be the max possible */
+ context->no_gobs = i;
+ GST_DEBUG_OBJECT (rtph263pay, "Found %d GOBS of maximum %d",
+ context->no_gobs, format_props[context->piclayer->ptype_srcformat][0]);
+
+ // Make packages smaller than MTU
+ // A mode
+ // - if ( GOB > MTU) -> B mode || C mode
+ // Push packages
+
+ first = 0;
+ payload_len = 0;
+ i = 0;
+ while (i < context->no_gobs) {
+
+ if (context->gobs[i]->length >= context->mtu) {
+ if (payload_len == 0) {
+
+ GST_DEBUG_OBJECT (rtph263pay, "GOB len > MTU");
+ if (rtph263pay->prop_payload_mode || forcea) {
+ payload_len = context->gobs[i]->length;
+ goto force_a;
+ }
+ if (!context->piclayer->ptype_pbmode) {
+ GST_DEBUG_OBJECT (rtph263pay, "MODE B on GOB %d needed", i);
+ if (!gst_rtp_h263_pay_mode_B_fragment (rtph263pay, context,
+ context->gobs[i])) {
+ GST_ERROR_OBJECT (rtph263pay,
+ "There was an error fragmenting in mode B");
+ ret = GST_FLOW_ERROR;
+ goto end;
+ }
+ } else {
+ //IMPLEMENT C mode
+ GST_ERROR_OBJECT (rtph263pay,
+ "MODE C on GOB %d needed, but not supported yet", i);
+ /*if(!gst_rtp_h263_pay_mode_C_fragment(rtph263pay, context, context->gobs[i])) {
+ ret = GST_FLOW_OK;
+ GST_ERROR("There was an error fragmenting in mode C");
+ goto decode_error;
+ } */
+ goto decode_error;
+ }
+ decode_error:
+ i++;
+ first = i;
+ continue;
+
+ } else {
+ goto payload_a_push;
+ }
+ }
+
+ if (payload_len + context->gobs[i]->length < context->mtu) {
+ GST_DEBUG_OBJECT (rtph263pay, "GOB %d fills mtu", i);
+ payload_len += context->gobs[i]->length;
+ i++;
+ if (i == context->no_gobs) {
+ GST_DEBUG_OBJECT (rtph263pay, "LAST GOB %d", i);
+ goto payload_a_push;
+ }
+
+ } else {
+ payload_a_push:
+ GST_DEBUG_OBJECT (rtph263pay,
+ "Pushing GOBS %d to %d because payload size is %d", first,
+ first == i ? i : i - 1, payload_len);
+ gst_rtp_h263_pay_A_fragment_push (rtph263pay, context, first,
+ first == i ? i : i - 1);
+ payload_len = 0;
+ first = i;
+ }
+ continue;
+
+ force_a:
+ GST_DEBUG_OBJECT (rtph263pay,
+ "Pushing GOBS %d to %d because payload size is %d", first, i,
+ payload_len);
+ gst_rtp_h263_pay_A_fragment_push (rtph263pay, context, first, i);
+ payload_len = 0;
+ i++;
+ first = i;
+ }
+
+
+ }/*---------- END OF FRAGMENTATION ----------*/
+
+ /* Flush the input buffer data */
+
+end:
+ gst_rtp_h263_pay_context_destroy (context,
+ context->piclayer->ptype_srcformat);
+ gst_buffer_unmap (rtph263pay->current_buffer, &rtph263pay->map);
+ gst_buffer_replace (&rtph263pay->current_buffer, NULL);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_rtp_h263_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer)
+{
+
+ GstRtpH263Pay *rtph263pay;
+ GstFlowReturn ret;
+
+ rtph263pay = GST_RTP_H263_PAY (payload);
+ GST_DEBUG_OBJECT (rtph263pay,
+ "-------------------- NEW FRAME ---------------");
+
+ rtph263pay->first_ts = GST_BUFFER_PTS (buffer);
+
+ gst_buffer_replace (&rtph263pay->current_buffer, buffer);
+ gst_buffer_unref (buffer);
+
+ /* we always encode and flush a full picture */
+ ret = gst_rtp_h263_pay_flush (rtph263pay);
+ GST_DEBUG_OBJECT (rtph263pay,
+ "-------------------- END FRAME ---------------");
+
+ return ret;
+}
diff --git a/gst/rtp/gstrtph263pay.h b/gst/rtp/gstrtph263pay.h
new file mode 100644
index 0000000000..c77e6ac15a
--- /dev/null
+++ b/gst/rtp/gstrtph263pay.h
@@ -0,0 +1,413 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ * Author: Dejan Sakelsak sahel@kiberpipa.org
+ */
+
+#ifndef __GST_RTP_H263_PAY_H__
+#define __GST_RTP_H263_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_RTP_H263_PAY \
+ (gst_rtp_h263_pay_get_type())
+#define GST_RTP_H263_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_H263_PAY,GstRtpH263Pay))
+#define GST_RTP_H263_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_H263_PAY,GstRtpH263PayClass))
+#define GST_IS_RTP_H263_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_H263_PAY))
+#define GST_IS_RTP_H263_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_H263_PAY))
+// blocks per macroblock
+#define N_BLOCKS 6
+#define DEFAULT_MODE_A FALSE
+#define MTU_SECURITY_OFFSET 50
+ typedef enum _GstRtpH263PayHeaderMode
+{
+ GST_RTP_H263_PAYLOAD_HEADER_MODE_A = 4,
+ GST_RTP_H263_PAYLOAD_HEADER_MODE_B = 8,
+ GST_RTP_H263_PAYLOAD_HEADER_MODE_C = 12
+} GstRtpH263PayHeaderMode;
+
+typedef struct _GstRtpH263PayContext GstRtpH263PayContext;
+typedef struct _GstRtpH263PayPic GstRtpH263PayPic;
+typedef struct _GstRtpH263PayClass GstRtpH263PayClass;
+typedef struct _GstRtpH263Pay GstRtpH263Pay;
+typedef struct _GstRtpH263PayBoundry GstRtpH263PayBoundry;
+typedef struct _GstRtpH263PayMB GstRtpH263PayMB;
+typedef struct _GstRtpH263PayGob GstRtpH263PayGob;
+typedef struct _GstRtpH263PayPackage GstRtpH263PayPackage;
+
+//typedef enum _GstRtpH263PayHeaderMode GstRtpH263PayHeaderMode;
+
+struct _GstRtpH263Pay
+{
+ GstRTPBasePayload payload;
+
+ GstBuffer *current_buffer;
+ GstMapInfo map;
+
+ GstClockTime first_ts;
+ gboolean prop_payload_mode;
+ guint8 *data;
+ guint available_data;
+
+};
+
+struct _GstRtpH263PayContext
+{
+ GstRtpH263PayPic *piclayer;
+
+ guint mtu;
+ guint window;
+ guint8 *win_end;
+ guint8 cpm;
+
+ guint no_gobs;
+ GstRtpH263PayGob **gobs;
+
+};
+
+struct _GstRtpH263PayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+typedef struct _GstRtpH263PayAHeader
+{
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ unsigned int ebit:3; /* End position */
+ unsigned int sbit:3; /* Start position */
+ unsigned int p:1; /* PB-frames mode */
+ unsigned int f:1; /* flag bit */
+
+ unsigned int r1:1; /* Reserved */
+ unsigned int a:1; /* Advanced Prediction */
+ unsigned int s:1; /* syntax based arithmetic coding */
+ unsigned int u:1; /* Unrestricted motion vector */
+ unsigned int i:1; /* Picture coding type */
+ unsigned int src:3; /* Source format */
+
+ unsigned int trb:3; /* Temporal ref for B frame */
+ unsigned int dbq:2; /* Differential Quantisation parameter */
+ unsigned int r2:3; /* Reserved */
+#elif G_BYTE_ORDER == G_BIG_ENDIAN
+ unsigned int f:1; /* flag bit */
+ unsigned int p:1; /* PB-frames mode */
+ unsigned int sbit:3; /* Start position */
+ unsigned int ebit:3; /* End position */
+
+ unsigned int src:3; /* Source format */
+ unsigned int i:1; /* Picture coding type */
+ unsigned int u:1; /* Unrestricted motion vector */
+ unsigned int s:1; /* syntax based arithmetic coding */
+ unsigned int a:1; /* Advanced Prediction */
+ unsigned int r1:1; /* Reserved */
+
+ unsigned int r2:3; /* Reserved */
+ unsigned int dbq:2; /* Differential Quantisation parameter */
+ unsigned int trb:3; /* Temporal ref for B frame */
+#else
+#error "G_BYTE_ORDER should be big or little endian."
+#endif
+ unsigned int tr:8; /* Temporal ref for P frame */
+} GstRtpH263PayAHeader;
+
+typedef struct _GstRtpH263PayBHeader
+{
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ unsigned int ebit:3; /* End position */
+ unsigned int sbit:3; /* Start position */
+ unsigned int p:1; /* PB-frames mode */
+ unsigned int f:1; /* flag bit */
+
+ unsigned int quant:5; /* Quantization value for first MB */
+ unsigned int src:3; /* Source format */
+
+ unsigned int mba1:3; /* Address of first MB starting count from 0 - part1 */
+ unsigned int gobn:5; /* GOB number in effect at start of packet */
+
+ unsigned int r:2; /* Reserved */
+ unsigned int mba2:6; /* Address of first MB starting count from 0 - part2 */
+
+ unsigned int hmv11:4; /* horizontal motion vector predictor for MB 1 - part 1 */
+ unsigned int a:1; /* Advanced Prediction */
+ unsigned int s:1; /* syntax based arithmetic coding */
+ unsigned int u:1; /* Unrestricted motion vector */
+ unsigned int i:1; /* Picture coding type */
+
+ unsigned int vmv11:5; /* vertical motion vector predictor for MB 1 - part 1 */
+ unsigned int hmv12:3; /* horizontal motion vector predictor for MB 1 - part 2 */
+
+ unsigned int hmv21:6; /* horizontal motion vector predictor for MB 3 - part 1 */
+ unsigned int vmv12:2; /* vertical motion vector predictor for MB 1 - part 2 */
+
+ unsigned int vmv21:7; /* vertical motion vector predictor for MB 3 */
+ unsigned int hmv22:1; /* horizontal motion vector predictor for MB 3 - part 2 */
+
+#elif G_BYTE_ORDER == G_BIG_ENDIAN
+ unsigned int f:1; /* flag bit */
+ unsigned int p:1; /* PB-frames mode */
+ unsigned int sbit:3; /* Start position */
+ unsigned int ebit:3; /* End position */
+
+ unsigned int src:3; /* Source format */
+ unsigned int quant:5; /* Quantization value for first MB */
+
+ unsigned int gobn:5; /* GOB number in effect at start of packet */
+ unsigned int mba1:3; /* Address of first MB starting count from 0 - part1 */
+
+ unsigned int mba2:6; /* Address of first MB starting count from 0 - part2 */
+ unsigned int r:2; /* Reserved */
+
+ unsigned int i:1; /* Picture coding type */
+ unsigned int u:1; /* Unrestricted motion vector */
+ unsigned int s:1; /* syntax based arithmetic coding */
+ unsigned int a:1; /* Advanced Prediction */
+ unsigned int hmv11:4; /* horizontal motion vector predictor for MB 1 - part 1 */
+
+ unsigned int hmv12:3; /* horizontal motion vector predictor for MB 1 - part 2 */
+ unsigned int vmv11:5; /* vertical motion vector predictor for MB 1 - part 1 */
+
+ unsigned int vmv12:2; /* vertical motion vector predictor for MB 1 - part 2 */
+ unsigned int hmv21:6; /* horizontal motion vector predictor for MB 3 - part 1 */
+
+ unsigned int hmv22:1; /* horizontal motion vector predictor for MB 3 - part 2 */
+ unsigned int vmv21:7; /* vertical motion vector predictor for MB 3 */
+#else
+#error "G_BYTE_ORDER should be big or little endian."
+#endif
+} GstRtpH263PayBHeader;
+
+typedef struct _GstRtpH263PayCHeader
+{
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ unsigned int ebit:3; /* End position */
+ unsigned int sbit:3; /* Start position */
+ unsigned int p:1; /* PB-frames mode */
+ unsigned int f:1; /* flag bit */
+
+ unsigned int quant:5; /* Quantization value for first MB */
+ unsigned int src:3; /* Source format */
+
+ unsigned int mba1:3; /* Address of first MB starting count from 0 - part1 */
+ unsigned int gobn:5; /* GOB number in effect at start of packet */
+
+ unsigned int r:2; /* Reserved */
+ unsigned int mba2:6; /* Address of first MB starting count from 0 - part2 */
+
+ unsigned int hmv11:4; /* horizontal motion vector predictor for MB 1 - part 1 */
+ unsigned int a:1; /* Advanced Prediction */
+ unsigned int s:1; /* syntax based arithmetic coding */
+ unsigned int u:1; /* Unrestricted motion vector */
+ unsigned int i:1; /* Picture coding type */
+
+ unsigned int vmv11:5; /* vertical motion vector predictor for MB 1 - part 1 */
+ unsigned int hmv12:3; /* horizontal motion vector predictor for MB 1 - part 2 */
+
+ unsigned int hmv21:6; /* horizontal motion vector predictor for MB 3 - part 1 */
+ unsigned int vmv12:2; /* vertical motion vector predictor for MB 1 - part 2 */
+
+ unsigned int vmv21:7; /* vertical motion vector predictor for MB 3 */
+ unsigned int hmv22:1; /* horizontal motion vector predictor for MB 3 - part 2 */
+
+ unsigned int rr1:8; /* reserved */
+
+ unsigned int rr2:8; /* reserved */
+
+ unsigned int trb:3; /* Temporal Reference for the B */
+ unsigned int dbq:2; /* Differential quantization parameter */
+ unsigned int rr3:3; /* reserved */
+
+ unsigned int tr:8; /* Temporal Reference for the P frame */
+
+#elif G_BYTE_ORDER == G_BIG_ENDIAN
+ unsigned int f:1; /* flag bit */
+ unsigned int p:1; /* PB-frames mode */
+ unsigned int sbit:3; /* Start position */
+ unsigned int ebit:3; /* End position */
+
+ unsigned int src:3; /* Source format */
+ unsigned int quant:5; /* Quantization value for first MB */
+
+ unsigned int gobn:5; /* GOB number in effect at start of packet */
+ unsigned int mba1:3; /* Address of first MB starting count from 0 - part1 */
+
+ unsigned int mba2:6; /* Address of first MB starting count from 0 - part2 */
+ unsigned int r:2; /* Reserved */
+
+ unsigned int i:1; /* Picture coding type */
+ unsigned int u:1; /* Unrestricted motion vector */
+ unsigned int s:1; /* syntax based arithmetic coding */
+ unsigned int a:1; /* Advanced Prediction */
+ unsigned int hmv11:4; /* horizontal motion vector predictor for MB 1 - part 1 */
+
+ unsigned int hmv12:3; /* horizontal motion vector predictor for MB 1 - part 2 */
+ unsigned int vmv11:5; /* vertical motion vector predictor for MB 1 - part 1 */
+
+ unsigned int vmv12:2; /* vertical motion vector predictor for MB 1 - part 2 */
+ unsigned int hmv21:6; /* horizontal motion vector predictor for MB 3 - part 1 */
+
+ unsigned int hmv22:1; /* horizontal motion vector predictor for MB 3 - part 2 */
+ unsigned int vmv21:7; /* vertical motion vector predictor for MB 3 */
+ unsigned int rr1:8; /* reserved */
+ unsigned int rr2:8; /* reserved */
+
+ unsigned int rr3:3; /* reserved */
+ unsigned int dbq:2; /* Differential quantization parameter */
+ unsigned int trb:3; /* Temporal Reference for the B */
+
+ unsigned int tr:8; /* Temporal Reference for the P frame */
+#else
+#error "G_BYTE_ORDER should be big or little endian."
+#endif
+} GstRtpH263PayCHeader;
+
+struct _GstRtpH263PayPic
+{
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ unsigned int psc1:16;
+
+ unsigned int tr1:2;
+ unsigned int psc2:6;
+
+ unsigned int ptype_263:1;
+ unsigned int ptype_start:1;
+ unsigned int tr2:6;
+
+ unsigned int ptype_umvmode:1;
+ unsigned int ptype_pictype:1;
+ unsigned int ptype_srcformat:3;
+ unsigned int ptype_freeze:1;
+ unsigned int ptype_camera:1;
+ unsigned int ptype_split:1;
+
+ unsigned int pquant:5;
+ unsigned int ptype_pbmode:1;
+ unsigned int ptype_apmode:1;
+ unsigned int ptype_sacmode:1;
+
+#elif G_BYTE_ORDER == G_BIG_ENDIAN
+ unsigned int psc1:16;
+
+ unsigned int psc2:6;
+ unsigned int tr1:2;
+
+ unsigned int tr2:6;
+ unsigned int ptype_start:2;
+
+ unsigned int ptype_split:1;
+ unsigned int ptype_camera:1;
+ unsigned int ptype_freeze:1;
+ unsigned int ptype_srcformat:3;
+ unsigned int ptype_pictype:1;
+ unsigned int ptype_umvmode:1;
+
+ unsigned int ptype_sacmode:1;
+ unsigned int ptype_apmode:1;
+ unsigned int ptype_pbmode:1;
+ unsigned int pquant:5;
+
+#else
+#error "G_BYTE_ORDER should be big or little endian."
+#endif
+};
+
+struct _GstRtpH263PayBoundry
+{
+
+ guint8 *start;
+ guint8 *end;
+ guint8 sbit;
+ guint8 ebit;
+
+};
+
+struct _GstRtpH263PayMB
+{
+ guint8 *start;
+ guint8 *end;
+ guint8 sbit;
+ guint8 ebit;
+ guint length;
+
+ guint8 mb_type;
+ guint quant;
+
+ guint mba;
+ guint8 mvd[10];
+};
+
+struct _GstRtpH263PayGob
+{
+ guint8 *start;
+ guint8 *end;
+ guint length;
+ guint8 sbit;
+ guint8 ebit;
+
+ guint gobn;
+ guint quant;
+
+ GstRtpH263PayMB **macroblocks;
+ guint nmacroblocs;
+};
+
+struct _GstRtpH263PayPackage
+{
+ guint8 *payload_start;
+ guint8 *payload_end;
+ guint payload_len;
+ guint8 sbit;
+ guint8 ebit;
+ GstBuffer *outbuf;
+ gboolean marker;
+
+ GstRtpH263PayHeaderMode mode;
+
+ /*
+ * mode B,C data
+ */
+
+ guint16 mba;
+ guint nmvd;
+ guint8 mvd[10];
+ guint gobn;
+ guint quant;
+};
+
+#define GST_H263_PICTURELAYER_PLSRC(buf) (((GstRtpH263PayPic *)(buf))->ptype_srcformat)
+#define GST_H263_PICTURELAYER_PLTYPE(buf) (((GstRtpH263PayPic *)(buf))->ptype_pictype)
+#define GST_H263_PICTURELAYER_PLUMV(buf) (((GstRtpH263PayPic *)(buf))->ptype_umvmode)
+#define GST_H263_PICTURELAYER_PLSAC(buf) (((GstRtpH263PayPic *)(buf))->ptype_sacmode)
+#define GST_H263_PICTURELAYER_PLAP(buf) (((GstRtpH263PayPic *)(buf))->ptype_apmode)
+
+/*
+ * TODO: PB frame relevant tables
+ */
+
+#define GST_RTP_H263_PAY_END(start, len) (((guint8 *)start) + ((guint)len))
+#define GST_RTP_H263_PAY_GOBN(gob) (((((guint8 *) gob)[2] >> 2) & 0x1f)
+
+GType gst_rtp_h263_pay_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_RTP_H263_PAY_H__ */
diff --git a/gst/rtp/gstrtph263pdepay.c b/gst/rtp/gstrtph263pdepay.c
new file mode 100644
index 0000000000..8b371ba92f
--- /dev/null
+++ b/gst/rtp/gstrtph263pdepay.c
@@ -0,0 +1,493 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+#include "gstrtpelements.h"
+#include "gstrtph263pdepay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtph263pdepay_debug);
+#define GST_CAT_DEFAULT (rtph263pdepay_debug)
+
+static GstStaticPadTemplate gst_rtp_h263p_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-h263, " "variant = (string) \"itu\" ")
+ );
+
+static GstStaticPadTemplate gst_rtp_h263p_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "clock-rate = (int) [1, MAX], "
+ "encoding-name = (string) \"H263-1998\"; "
+ /* optional params */
+ /* NOTE all optional SDP params must be strings in the caps */
+ /*
+ "sqcif = (string) [1, 32], "
+ "qcif = (string) [1, 32], "
+ "cif = (string) [1, 32], "
+ "cif4 = (string) [1, 32], "
+ "cif16 = (string) [1, 32], "
+ "custom = (string) ANY, "
+ "f = (string) {0, 1},"
+ "i = (string) {0, 1},"
+ "j = (string) {0, 1},"
+ "t = (string) {0, 1},"
+ "k = (string) {1, 2, 3, 4},"
+ "n = (string) {1, 2, 3, 4},"
+ "p = (string) ANY,"
+ "par = (string) ANY, "
+ "cpcf = (string) ANY, "
+ "bpp = (string) [0, 65536], "
+ "hrd = (string) {0, 1}; "
+ */
+ "application/x-rtp, "
+ "media = (string) \"video\", "
+ "clock-rate = (int) [1, MAX], "
+ "encoding-name = (string) \"H263-2000\" "
+ /* optional params */
+ /* NOTE all optional SDP params must be strings in the caps */
+ /*
+ "profile = (string) [0, 10], "
+ "level = (string) {10, 20, 30, 40, 45, 50, 60, 70}, "
+ "interlace = (string) {0, 1};"
+ */
+ )
+ );
+
+#define gst_rtp_h263p_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpH263PDepay, gst_rtp_h263p_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph263pdepay, "rtph263pdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_H263P_DEPAY, rtp_element_init (plugin));
+
+static void gst_rtp_h263p_depay_finalize (GObject * object);
+
+static GstStateChangeReturn gst_rtp_h263p_depay_change_state (GstElement *
+ element, GstStateChange transition);
+
+static GstBuffer *gst_rtp_h263p_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+gboolean gst_rtp_h263p_depay_setcaps (GstRTPBaseDepayload * filter,
+ GstCaps * caps);
+
+static void
+gst_rtp_h263p_depay_class_init (GstRtpH263PDepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_h263p_depay_finalize;
+
+ gstelement_class->change_state = gst_rtp_h263p_depay_change_state;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_h263p_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_h263p_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP H263 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts H263/+/++ video from RTP packets (RFC 4629)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_h263p_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_h263p_depay_setcaps;
+
+ GST_DEBUG_CATEGORY_INIT (rtph263pdepay_debug, "rtph263pdepay", 0,
+ "H263+ Video RTP Depayloader");
+}
+
+static void
+gst_rtp_h263p_depay_init (GstRtpH263PDepay * rtph263pdepay)
+{
+ rtph263pdepay->adapter = gst_adapter_new ();
+}
+
+static void
+gst_rtp_h263p_depay_finalize (GObject * object)
+{
+ GstRtpH263PDepay *rtph263pdepay;
+
+ rtph263pdepay = GST_RTP_H263P_DEPAY (object);
+
+ g_object_unref (rtph263pdepay->adapter);
+ rtph263pdepay->adapter = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+gboolean
+gst_rtp_h263p_depay_setcaps (GstRTPBaseDepayload * filter, GstCaps * caps)
+{
+ GstCaps *srccaps = NULL;
+ GstStructure *structure = gst_caps_get_structure (caps, 0);
+ gint clock_rate;
+ const gchar *encoding_name = NULL;
+ gboolean res;
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 90000; /* default */
+ filter->clock_rate = clock_rate;
+
+ encoding_name = gst_structure_get_string (structure, "encoding-name");
+ if (encoding_name == NULL)
+ goto no_encoding_name;
+
+ if (g_ascii_strcasecmp (encoding_name, "H263-2000") == 0) {
+ /* always h263++ */
+ srccaps = gst_caps_new_simple ("video/x-h263",
+ "variant", G_TYPE_STRING, "itu",
+ "h263version", G_TYPE_STRING, "h263pp", NULL);
+ } else if (g_ascii_strcasecmp (encoding_name, "H263-1998") == 0) {
+ /* this can be H263 or H263+ depending on defined appendixes in the optional
+ * SDP params */
+ const gchar *F, *I, *J, *T, *K, *N, *P;
+ gboolean is_h263p = FALSE;
+
+ F = gst_structure_get_string (structure, "f");
+ if (F)
+ if (g_ascii_strcasecmp (F, "1") == 0)
+ is_h263p = TRUE;
+ I = gst_structure_get_string (structure, "i");
+ if (I)
+ if (g_ascii_strcasecmp (I, "1") == 0)
+ is_h263p = TRUE;
+ J = gst_structure_get_string (structure, "j");
+ if (J)
+ if (g_ascii_strcasecmp (J, "1") == 0)
+ is_h263p = TRUE;
+ T = gst_structure_get_string (structure, "t");
+ if (T)
+ if (g_ascii_strcasecmp (T, "1") == 0)
+ is_h263p = TRUE;
+ K = gst_structure_get_string (structure, "k");
+ if (K)
+ is_h263p = TRUE;
+ N = gst_structure_get_string (structure, "n");
+ if (N)
+ is_h263p = TRUE;
+ P = gst_structure_get_string (structure, "p");
+ if (P)
+ is_h263p = TRUE;
+
+ if (is_h263p) {
+ srccaps = gst_caps_new_simple ("video/x-h263",
+ "variant", G_TYPE_STRING, "itu",
+ "h263version", G_TYPE_STRING, "h263p", NULL);
+ } else {
+ srccaps = gst_caps_new_simple ("video/x-h263",
+ "variant", G_TYPE_STRING, "itu",
+ "h263version", G_TYPE_STRING, "h263", NULL);
+ }
+ }
+ if (!srccaps)
+ goto no_caps;
+
+ res = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (filter), srccaps);
+ gst_caps_unref (srccaps);
+
+ return res;
+
+ /* ERRORS */
+no_encoding_name:
+ {
+ GST_ERROR_OBJECT (filter, "no encoding-name");
+ return FALSE;
+ }
+no_caps:
+ {
+ GST_ERROR_OBJECT (filter, "invalid encoding-name");
+ return FALSE;
+ }
+}
+
+static void
+gst_rtp_h263p_depay_decorate_output_buffer (GstRtpH263PDepay * rtph263pdepay,
+ GstBuffer * outbuf)
+{
+ gboolean is_intra = FALSE;
+ GstBitReader bits;
+ guint8 pic_hdr[16];
+ gsize pic_hdr_len = 0;
+ guint32 psc, ptype, mpptype;
+ guint8 ufep;
+
+ pic_hdr_len = gst_buffer_extract (outbuf, 0, pic_hdr, sizeof (pic_hdr));
+
+ GST_MEMDUMP_OBJECT (rtph263pdepay, "pic_hdr", pic_hdr, pic_hdr_len);
+
+#if 0
+ if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) >= GST_LEVEL_MEMDUMP) {
+ gchar bit_str[1 + sizeof (pic_hdr) * 8] = { 0, };
+ guint8 b;
+
+ gst_bit_reader_init (&bits, pic_hdr, pic_hdr_len);
+ while ((gst_bit_reader_get_bits_uint8 (&bits, &b, 1))) {
+ g_strlcat (bit_str, b ? "1" : "0", sizeof (bit_str));
+ }
+ GST_TRACE_OBJECT (rtph263pdepay, "pic_hdr bits: %s", bit_str);
+ }
+#endif
+
+ gst_bit_reader_init (&bits, pic_hdr, pic_hdr_len);
+
+ /* PSC - Picture Start Code: 22 bits: 0000 0000 0000 0000 10 0000 */
+ if (!gst_bit_reader_get_bits_uint32 (&bits, &psc, 22) || psc != 0x20) {
+ GST_WARNING_OBJECT (rtph263pdepay, "No picture start code");
+ return;
+ }
+
+ /* TR - Temporal Reference: 8 bits */
+ if (!gst_bit_reader_skip (&bits, 8)) {
+ GST_WARNING_OBJECT (rtph263pdepay, "Short picture header: no TR");
+ return;
+ }
+
+ /* PTYPE (first 8 bits) */
+ if (!gst_bit_reader_get_bits_uint32 (&bits, &ptype, 8) || (ptype >> 6) != 2) {
+ GST_WARNING_OBJECT (rtph263pdepay, "Short picture header: no PTYPE");
+ return;
+ }
+
+ /* PTYPE: check for extended PTYPE (bits 6-8 = 111) */
+ if ((ptype & 7) != 7) {
+ /* No extended PTYPE, read remaining 5 bits */
+ if (!gst_bit_reader_get_bits_uint32 (&bits, &ptype, 5)) {
+ GST_WARNING_OBJECT (rtph263pdepay, "Short picture header: no PTYPE");
+ return;
+ }
+ is_intra = (ptype & 0x10) == 0;
+ goto done;
+ }
+
+ /* UFEP - Update Full Extended PTYPE */
+ ufep = 0;
+ if (!gst_bit_reader_get_bits_uint8 (&bits, &ufep, 3) || ufep > 1) {
+ GST_WARNING_OBJECT (rtph263pdepay, "Short picture header: no PLUSPTYPE, %d",
+ ufep);
+ return;
+ }
+
+ /* Skip optional part of PLUSPTYPE (OPPTYPE) */
+ if (ufep == 1 && !gst_bit_reader_skip (&bits, 18)) {
+ GST_WARNING_OBJECT (rtph263pdepay, "Short picture header: no OPPTYPE");
+ return;
+ }
+
+ /* Mandatory part of PLUSPTYPE (MPPTYPE) */
+ if (!gst_bit_reader_get_bits_uint32 (&bits, &mpptype, 9)
+ || (mpptype & 7) != 1) {
+ GST_WARNING_OBJECT (rtph263pdepay, "Short picture header: no MPPTYPE");
+ return;
+ }
+
+ is_intra = (mpptype >> 6) == 0;
+
+done:
+
+ if (is_intra) {
+ GST_LOG_OBJECT (rtph263pdepay, "I-frame");
+ GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+ } else {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+ }
+}
+
+static GstBuffer *
+gst_rtp_h263p_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp)
+{
+ GstRtpH263PDepay *rtph263pdepay;
+ GstBuffer *outbuf;
+ gint payload_len;
+ guint8 *payload;
+ gboolean P, V, M;
+ guint header_len;
+ guint8 PLEN, PEBIT;
+
+ rtph263pdepay = GST_RTP_H263P_DEPAY (depayload);
+
+ /* flush remaining data on discont */
+ if (GST_BUFFER_IS_DISCONT (rtp->buffer)) {
+ GST_LOG_OBJECT (depayload, "DISCONT, flushing adapter");
+ gst_adapter_clear (rtph263pdepay->adapter);
+ rtph263pdepay->wait_start = TRUE;
+ }
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+ header_len = 2;
+
+ if (payload_len < header_len)
+ goto too_small;
+
+ payload = gst_rtp_buffer_get_payload (rtp);
+
+ M = gst_rtp_buffer_get_marker (rtp);
+
+ /* 0 1
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | RR |P|V| PLEN |PEBIT|
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ P = (payload[0] & 0x04) == 0x04;
+ V = (payload[0] & 0x02) == 0x02;
+ PLEN = ((payload[0] & 0x1) << 5) | (payload[1] >> 3);
+ PEBIT = payload[1] & 0x7;
+
+ GST_LOG_OBJECT (depayload, "P %d, V %d, PLEN %d, PEBIT %d", P, V, PLEN,
+ PEBIT);
+
+ if (V) {
+ header_len++;
+ }
+ if (PLEN) {
+ header_len += PLEN;
+ }
+
+ if ((!P && payload_len < header_len) || (P && payload_len < header_len - 2))
+ goto too_small;
+
+ if (P) {
+ rtph263pdepay->wait_start = FALSE;
+ header_len -= 2;
+ }
+
+ if (rtph263pdepay->wait_start)
+ goto waiting_start;
+
+ if (payload_len < header_len)
+ goto too_small;
+
+ /* FIXME do not ignore the VRC header (See RFC 2429 section 4.2) */
+ /* FIXME actually use the RTP picture header when it is lost in the network */
+ /* for now strip off header */
+ payload_len -= header_len;
+
+ if (M) {
+ /* frame is completed: append to previous, push it out */
+ guint len, padlen;
+ guint avail;
+ GstBuffer *padbuf;
+
+ GST_LOG_OBJECT (depayload, "Frame complete");
+
+ outbuf =
+ gst_rtp_buffer_get_payload_subbuffer (rtp, header_len, payload_len);
+ if (P)
+ gst_buffer_memset (outbuf, 0, 0, 2);
+ gst_adapter_push (rtph263pdepay->adapter, outbuf);
+ outbuf = NULL;
+
+ avail = gst_adapter_available (rtph263pdepay->adapter);
+ len = avail + payload_len;
+ padlen = (len % 4) + 4;
+
+ if (avail == 0)
+ goto empty_frame;
+
+ outbuf = gst_adapter_take_buffer (rtph263pdepay->adapter, avail);
+ if (padlen) {
+ padbuf = gst_buffer_new_and_alloc (padlen);
+ gst_buffer_memset (padbuf, 0, 0, padlen);
+ outbuf = gst_buffer_append (outbuf, padbuf);
+ }
+
+ gst_rtp_drop_non_video_meta (rtph263pdepay, outbuf);
+
+ gst_rtp_h263p_depay_decorate_output_buffer (rtph263pdepay, outbuf);
+
+ return outbuf;
+ } else {
+ /* frame not completed: store in adapter */
+ GST_LOG_OBJECT (depayload, "Frame incomplete, storing %d", payload_len);
+
+ outbuf =
+ gst_rtp_buffer_get_payload_subbuffer (rtp, header_len, payload_len);
+ if (P)
+ gst_buffer_memset (outbuf, 0, 0, 2);
+ gst_adapter_push (rtph263pdepay->adapter, outbuf);
+ }
+ return NULL;
+
+too_small:
+ {
+ GST_ELEMENT_WARNING (rtph263pdepay, STREAM, DECODE,
+ ("Packet payload was too small"), (NULL));
+ return NULL;
+ }
+waiting_start:
+ {
+ GST_DEBUG_OBJECT (rtph263pdepay, "waiting for picture start");
+ return NULL;
+ }
+empty_frame:
+ {
+ GST_WARNING_OBJECT (rtph263pdepay, "Depayloaded frame is empty, dropping");
+ return NULL;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_h263p_depay_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstRtpH263PDepay *rtph263pdepay;
+ GstStateChangeReturn ret;
+
+ rtph263pdepay = GST_RTP_H263P_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_adapter_clear (rtph263pdepay->adapter);
+ rtph263pdepay->wait_start = TRUE;
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtph263pdepay.h b/gst/rtp/gstrtph263pdepay.h
new file mode 100644
index 0000000000..bdcb826a99
--- /dev/null
+++ b/gst/rtp/gstrtph263pdepay.h
@@ -0,0 +1,60 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_H263P_DEPAY_H__
+#define __GST_RTP_H263P_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_H263P_DEPAY \
+ (gst_rtp_h263p_depay_get_type())
+#define GST_RTP_H263P_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_H263P_DEPAY,GstRtpH263PDepay))
+#define GST_RTP_H263P_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_H263P_DEPAY,GstRtpH263PDepayClass))
+#define GST_IS_RTP_H263P_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_H263P_DEPAY))
+#define GST_IS_RTP_H263P_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_H263P_DEPAY))
+
+typedef struct _GstRtpH263PDepay GstRtpH263PDepay;
+typedef struct _GstRtpH263PDepayClass GstRtpH263PDepayClass;
+
+struct _GstRtpH263PDepay
+{
+ GstRTPBaseDepayload depayload;
+
+ GstAdapter *adapter;
+ gboolean wait_start;
+};
+
+struct _GstRtpH263PDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_h263p_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_H263P_DEPAY_H__ */
diff --git a/gst/rtp/gstrtph263ppay.c b/gst/rtp/gstrtph263ppay.c
new file mode 100644
index 0000000000..5e4aac0550
--- /dev/null
+++ b/gst/rtp/gstrtph263ppay.c
@@ -0,0 +1,814 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+#include <stdlib.h>
+#include <stdio.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+
+#include "gstrtpelements.h"
+#include "gstrtph263ppay.h"
+#include "gstrtputils.h"
+
+#define DEFAULT_FRAGMENTATION_MODE GST_FRAGMENTATION_MODE_NORMAL
+
+enum
+{
+ PROP_0,
+ PROP_FRAGMENTATION_MODE
+};
+
+#define GST_TYPE_FRAGMENTATION_MODE (gst_fragmentation_mode_get_type())
+static GType
+gst_fragmentation_mode_get_type (void)
+{
+ static GType fragmentation_mode_type = 0;
+ static const GEnumValue fragmentation_mode[] = {
+ {GST_FRAGMENTATION_MODE_NORMAL, "Normal", "normal"},
+ {GST_FRAGMENTATION_MODE_SYNC, "Fragment at sync points", "sync"},
+ {0, NULL, NULL},
+ };
+
+ if (!fragmentation_mode_type) {
+ fragmentation_mode_type =
+ g_enum_register_static ("GstFragmentationMode", fragmentation_mode);
+ }
+ return fragmentation_mode_type;
+}
+
+
+GST_DEBUG_CATEGORY_STATIC (rtph263ppay_debug);
+#define GST_CAT_DEFAULT rtph263ppay_debug
+
+static GstStaticPadTemplate gst_rtp_h263p_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-h263, variant = (string) itu")
+ );
+
+/*
+ * We also return these in getcaps() as required by the SDP caps
+ *
+ * width = (int) [16, 4096]
+ * height = (int) [16, 4096]
+ * "annex-f = (boolean) {true, false},"
+ * "annex-i = (boolean) {true, false},"
+ * "annex-j = (boolean) {true, false},"
+ * "annex-l = (boolean) {true, false},"
+ * "annex-t = (boolean) {true, false},"
+ * "annex-v = (boolean) {true, false}")
+ */
+
+
+static GstStaticPadTemplate gst_rtp_h263p_pay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"H263-1998\"; "
+ "application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"H263-2000\"")
+ );
+
+static void gst_rtp_h263p_pay_finalize (GObject * object);
+
+static void gst_rtp_h263p_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtp_h263p_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_rtp_h263p_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static GstCaps *gst_rtp_h263p_pay_sink_getcaps (GstRTPBasePayload * payload,
+ GstPad * pad, GstCaps * filter);
+static GstFlowReturn gst_rtp_h263p_pay_handle_buffer (GstRTPBasePayload *
+ payload, GstBuffer * buffer);
+
+#define gst_rtp_h263p_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpH263PPay, gst_rtp_h263p_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph263ppay, "rtph263ppay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_H263P_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_h263p_pay_class_init (GstRtpH263PPayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_h263p_pay_finalize;
+ gobject_class->set_property = gst_rtp_h263p_pay_set_property;
+ gobject_class->get_property = gst_rtp_h263p_pay_get_property;
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_h263p_pay_setcaps;
+ gstrtpbasepayload_class->get_caps = gst_rtp_h263p_pay_sink_getcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_h263p_pay_handle_buffer;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass),
+ PROP_FRAGMENTATION_MODE, g_param_spec_enum ("fragmentation-mode",
+ "Fragmentation Mode",
+ "Packet Fragmentation Mode", GST_TYPE_FRAGMENTATION_MODE,
+ DEFAULT_FRAGMENTATION_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_h263p_pay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_h263p_pay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "RTP H263 payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encodes H263/+/++ video in RTP packets (RFC 4629)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ GST_DEBUG_CATEGORY_INIT (rtph263ppay_debug, "rtph263ppay",
+ 0, "rtph263ppay (RFC 4629)");
+
+ gst_type_mark_as_plugin_api (GST_TYPE_FRAGMENTATION_MODE, 0);
+}
+
+static void
+gst_rtp_h263p_pay_init (GstRtpH263PPay * rtph263ppay)
+{
+ rtph263ppay->adapter = gst_adapter_new ();
+
+ rtph263ppay->fragmentation_mode = DEFAULT_FRAGMENTATION_MODE;
+}
+
+static void
+gst_rtp_h263p_pay_finalize (GObject * object)
+{
+ GstRtpH263PPay *rtph263ppay;
+
+ rtph263ppay = GST_RTP_H263P_PAY (object);
+
+ g_object_unref (rtph263ppay->adapter);
+ rtph263ppay->adapter = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_rtp_h263p_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ gboolean res;
+ GstCaps *peercaps;
+ gchar *encoding_name = NULL;
+
+ g_return_val_if_fail (gst_caps_is_fixed (caps), FALSE);
+
+ peercaps =
+ gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), NULL);
+ if (peercaps) {
+ GstCaps *tcaps =
+ gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload));
+ GstCaps *intersect = gst_caps_intersect (peercaps, tcaps);
+ gst_caps_unref (tcaps);
+
+ gst_caps_unref (peercaps);
+ if (!gst_caps_is_empty (intersect)) {
+ GstStructure *s = gst_caps_get_structure (intersect, 0);
+ encoding_name = g_strdup (gst_structure_get_string (s, "encoding-name"));
+ }
+ gst_caps_unref (intersect);
+ }
+
+ if (!encoding_name)
+ encoding_name = g_strdup ("H263-1998");
+
+ gst_rtp_base_payload_set_options (payload, "video", TRUE,
+ (gchar *) encoding_name, 90000);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
+ g_free (encoding_name);
+
+ return res;
+}
+
+static GstCaps *
+caps_append (GstCaps * caps, GstStructure * in_s, guint x, guint y, guint mpi)
+{
+ GstStructure *s;
+
+ if (!in_s)
+ return caps;
+
+ if (mpi < 1 || mpi > 32)
+ return caps;
+
+ s = gst_structure_copy (in_s);
+
+ gst_structure_set (s,
+ "width", GST_TYPE_INT_RANGE, 1, x,
+ "height", GST_TYPE_INT_RANGE, 1, y,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 1001 * mpi, NULL);
+
+ caps = gst_caps_merge_structure (caps, s);
+
+ return caps;
+}
+
+
+static GstCaps *
+gst_rtp_h263p_pay_sink_getcaps (GstRTPBasePayload * payload, GstPad * pad,
+ GstCaps * filter)
+{
+ GstRtpH263PPay *rtph263ppay;
+ GstCaps *caps = NULL, *templ;
+ GstCaps *peercaps = NULL;
+ GstCaps *intersect = NULL;
+ guint i;
+
+ rtph263ppay = GST_RTP_H263P_PAY (payload);
+
+ peercaps =
+ gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), NULL);
+
+ /* if we're just outputting to udpsink or fakesink or so, we should also
+ * accept any input compatible with our sink template caps */
+ if (!peercaps || gst_caps_is_any (peercaps)) {
+ if (peercaps)
+ gst_caps_unref (peercaps);
+ caps =
+ gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SINKPAD (payload));
+ goto done;
+ }
+
+ /* We basically need to differentiate two use-cases here: One where there's
+ * a capsfilter after the payloader with caps created from an SDP; in this
+ * case the filter caps are fixed and we want to signal to an encoder what
+ * we want it to produce. The second case is simply payloader ! depayloader
+ * where we are dealing with the depayloader's template caps. In this case
+ * we should accept any input compatible with our sink template caps. */
+ if (!gst_caps_is_fixed (peercaps)) {
+ gst_caps_unref (peercaps);
+ caps =
+ gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SINKPAD (payload));
+ goto done;
+ }
+
+ templ = gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload));
+ intersect = gst_caps_intersect (peercaps, templ);
+ gst_caps_unref (peercaps);
+ gst_caps_unref (templ);
+
+ if (gst_caps_is_empty (intersect))
+ return intersect;
+
+ caps = gst_caps_new_empty ();
+ for (i = 0; i < gst_caps_get_size (intersect); i++) {
+ GstStructure *s = gst_caps_get_structure (intersect, i);
+ const gchar *encoding_name = gst_structure_get_string (s, "encoding-name");
+
+ if (!strcmp (encoding_name, "H263-2000")) {
+ const gchar *profile_str = gst_structure_get_string (s, "profile");
+ const gchar *level_str = gst_structure_get_string (s, "level");
+ int profile = 0;
+ int level = 0;
+
+ if (profile_str && level_str) {
+ gboolean i = FALSE, j = FALSE, l = FALSE, t = FALSE, f = FALSE,
+ v = FALSE;
+ GstStructure *new_s = gst_structure_new ("video/x-h263",
+ "variant", G_TYPE_STRING, "itu",
+ NULL);
+
+ profile = atoi (profile_str);
+ level = atoi (level_str);
+
+ /* These profiles are defined in the H.263 Annex X */
+ switch (profile) {
+ case 0:
+ /* The Baseline Profile (Profile 0) */
+ break;
+ case 1:
+ /* H.320 Coding Efficiency Version 2 Backward-Compatibility Profile
+ * (Profile 1)
+ * Baseline + Annexes I, J, L.4 and T
+ */
+ i = j = l = t = TRUE;
+ break;
+ case 2:
+ /* Version 1 Backward-Compatibility Profile (Profile 2)
+ * Baseline + Annex F
+ */
+ i = j = l = t = f = TRUE;
+ break;
+ case 3:
+ /* Version 2 Interactive and Streaming Wireless Profile
+ * Baseline + Annexes I, J, T
+ */
+ i = j = t = TRUE;
+ break;
+ case 4:
+ /* Version 3 Interactive and Streaming Wireless Profile (Profile 4)
+ * Baseline + Annexes I, J, T, V, W.6.3.8,
+ */
+ /* Missing W.6.3.8 */
+ i = j = t = v = TRUE;
+ break;
+ case 5:
+ /* Conversational High Compression Profile (Profile 5)
+ * Baseline + Annexes F, I, J, L.4, T, D, U
+ */
+ /* Missing D, U */
+ f = i = j = l = t = TRUE;
+ break;
+ case 6:
+ /* Conversational Internet Profile (Profile 6)
+ * Baseline + Annexes F, I, J, L.4, T, D, U and
+ * K with arbitratry slice ordering
+ */
+ /* Missing D, U, K with arbitratry slice ordering */
+ f = i = j = l = t = TRUE;
+ break;
+ case 7:
+ /* Conversational Interlace Profile (Profile 7)
+ * Baseline + Annexes F, I, J, L.4, T, D, U, W.6.3.11
+ */
+ /* Missing D, U, W.6.3.11 */
+ f = i = j = l = t = TRUE;
+ break;
+ case 8:
+ /* High Latency Profile (Profile 8)
+ * Baseline + Annexes F, I, J, L.4, T, D, U, P.5, O.1.1 and
+ * K with arbitratry slice ordering
+ */
+ /* Missing D, U, P.5, O.1.1 */
+ f = i = j = l = t = TRUE;
+ break;
+ }
+
+
+ if (f || i || j || t || l || v) {
+ GValue list = { 0 };
+ GValue vstr = { 0 };
+
+ g_value_init (&list, GST_TYPE_LIST);
+ g_value_init (&vstr, G_TYPE_STRING);
+
+ g_value_set_static_string (&vstr, "h263");
+ gst_value_list_append_value (&list, &vstr);
+ g_value_set_static_string (&vstr, "h263p");
+ gst_value_list_append_value (&list, &vstr);
+
+ if (l || v) {
+ g_value_set_static_string (&vstr, "h263pp");
+ gst_value_list_append_value (&list, &vstr);
+ }
+ g_value_unset (&vstr);
+
+ gst_structure_set_value (new_s, "h263version", &list);
+ g_value_unset (&list);
+ } else {
+ gst_structure_set (new_s, "h263version", G_TYPE_STRING, "h263", NULL);
+ }
+
+
+ if (!f)
+ gst_structure_set (new_s, "annex-f", G_TYPE_BOOLEAN, FALSE, NULL);
+ if (!i)
+ gst_structure_set (new_s, "annex-i", G_TYPE_BOOLEAN, FALSE, NULL);
+ if (!j)
+ gst_structure_set (new_s, "annex-j", G_TYPE_BOOLEAN, FALSE, NULL);
+ if (!t)
+ gst_structure_set (new_s, "annex-t", G_TYPE_BOOLEAN, FALSE, NULL);
+ if (!l)
+ gst_structure_set (new_s, "annex-l", G_TYPE_BOOLEAN, FALSE, NULL);
+ if (!v)
+ gst_structure_set (new_s, "annex-v", G_TYPE_BOOLEAN, FALSE, NULL);
+
+
+ if (level <= 10 || level == 45) {
+ gst_structure_set (new_s,
+ "width", GST_TYPE_INT_RANGE, 1, 176,
+ "height", GST_TYPE_INT_RANGE, 1, 144,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 2002, NULL);
+ caps = gst_caps_merge_structure (caps, new_s);
+ } else if (level <= 20) {
+ GstStructure *s_copy = gst_structure_copy (new_s);
+
+ gst_structure_set (new_s,
+ "width", GST_TYPE_INT_RANGE, 1, 352,
+ "height", GST_TYPE_INT_RANGE, 1, 288,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 2002, NULL);
+ caps = gst_caps_merge_structure (caps, new_s);
+
+ gst_structure_set (s_copy,
+ "width", GST_TYPE_INT_RANGE, 1, 176,
+ "height", GST_TYPE_INT_RANGE, 1, 144,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 1001, NULL);
+ caps = gst_caps_merge_structure (caps, s_copy);
+ } else if (level <= 40) {
+
+ gst_structure_set (new_s,
+ "width", GST_TYPE_INT_RANGE, 1, 352,
+ "height", GST_TYPE_INT_RANGE, 1, 288,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 30000, 1001, NULL);
+ caps = gst_caps_merge_structure (caps, new_s);
+ } else if (level <= 50) {
+ GstStructure *s_copy = gst_structure_copy (new_s);
+
+ gst_structure_set (new_s,
+ "width", GST_TYPE_INT_RANGE, 1, 352,
+ "height", GST_TYPE_INT_RANGE, 1, 288,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL);
+ caps = gst_caps_merge_structure (caps, new_s);
+
+ gst_structure_set (s_copy,
+ "width", GST_TYPE_INT_RANGE, 1, 352,
+ "height", GST_TYPE_INT_RANGE, 1, 240,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL);
+ caps = gst_caps_merge_structure (caps, s_copy);
+ } else if (level <= 60) {
+ GstStructure *s_copy = gst_structure_copy (new_s);
+
+ gst_structure_set (new_s,
+ "width", GST_TYPE_INT_RANGE, 1, 720,
+ "height", GST_TYPE_INT_RANGE, 1, 288,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL);
+ caps = gst_caps_merge_structure (caps, new_s);
+
+ gst_structure_set (s_copy,
+ "width", GST_TYPE_INT_RANGE, 1, 720,
+ "height", GST_TYPE_INT_RANGE, 1, 240,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL);
+ caps = gst_caps_merge_structure (caps, s_copy);
+ } else if (level <= 70) {
+ GstStructure *s_copy = gst_structure_copy (new_s);
+
+ gst_structure_set (new_s,
+ "width", GST_TYPE_INT_RANGE, 1, 720,
+ "height", GST_TYPE_INT_RANGE, 1, 576,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 50, 1, NULL);
+ caps = gst_caps_merge_structure (caps, new_s);
+
+ gst_structure_set (s_copy,
+ "width", GST_TYPE_INT_RANGE, 1, 720,
+ "height", GST_TYPE_INT_RANGE, 1, 480,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 60000, 1001, NULL);
+ caps = gst_caps_merge_structure (caps, s_copy);
+ } else {
+ caps = gst_caps_merge_structure (caps, new_s);
+ }
+
+ } else {
+ GstStructure *new_s = gst_structure_new ("video/x-h263",
+ "variant", G_TYPE_STRING, "itu",
+ "h263version", G_TYPE_STRING, "h263",
+ NULL);
+
+ GST_DEBUG_OBJECT (rtph263ppay, "No profile or level specified"
+ " for H263-2000, defaulting to baseline H263");
+
+ caps = gst_caps_merge_structure (caps, new_s);
+ }
+ } else {
+ gboolean f = FALSE, i = FALSE, j = FALSE, t = FALSE;
+ /* FIXME: ffmpeg support the Appendix K too, how do we express it ?
+ * guint k;
+ */
+ const gchar *str;
+ GstStructure *new_s = gst_structure_new ("video/x-h263",
+ "variant", G_TYPE_STRING, "itu",
+ NULL);
+ gboolean added = FALSE;
+
+ str = gst_structure_get_string (s, "f");
+ if (str && !strcmp (str, "1"))
+ f = TRUE;
+
+ str = gst_structure_get_string (s, "i");
+ if (str && !strcmp (str, "1"))
+ i = TRUE;
+
+ str = gst_structure_get_string (s, "j");
+ if (str && !strcmp (str, "1"))
+ j = TRUE;
+
+ str = gst_structure_get_string (s, "t");
+ if (str && !strcmp (str, "1"))
+ t = TRUE;
+
+ if (f || i || j || t) {
+ GValue list = { 0 };
+ GValue vstr = { 0 };
+
+ g_value_init (&list, GST_TYPE_LIST);
+ g_value_init (&vstr, G_TYPE_STRING);
+
+ g_value_set_static_string (&vstr, "h263");
+ gst_value_list_append_value (&list, &vstr);
+ g_value_set_static_string (&vstr, "h263p");
+ gst_value_list_append_value (&list, &vstr);
+ g_value_unset (&vstr);
+
+ gst_structure_set_value (new_s, "h263version", &list);
+ g_value_unset (&list);
+ } else {
+ gst_structure_set (new_s, "h263version", G_TYPE_STRING, "h263", NULL);
+ }
+
+ if (!f)
+ gst_structure_set (new_s, "annex-f", G_TYPE_BOOLEAN, FALSE, NULL);
+ if (!i)
+ gst_structure_set (new_s, "annex-i", G_TYPE_BOOLEAN, FALSE, NULL);
+ if (!j)
+ gst_structure_set (new_s, "annex-j", G_TYPE_BOOLEAN, FALSE, NULL);
+ if (!t)
+ gst_structure_set (new_s, "annex-t", G_TYPE_BOOLEAN, FALSE, NULL);
+
+
+ str = gst_structure_get_string (s, "custom");
+ if (str) {
+ unsigned int xmax, ymax, mpi;
+ if (sscanf (str, "%u,%u,%u", &xmax, &ymax, &mpi) == 3) {
+ if (xmax % 4 && ymax % 4 && mpi >= 1 && mpi <= 32) {
+ caps = caps_append (caps, new_s, xmax, ymax, mpi);
+ added = TRUE;
+ } else {
+ GST_WARNING_OBJECT (rtph263ppay, "Invalid custom framesize/MPI"
+ " %u x %u at %u, ignoring", xmax, ymax, mpi);
+ }
+ } else {
+ GST_WARNING_OBJECT (rtph263ppay, "Invalid custom framesize/MPI: %s,"
+ " ignoring", str);
+ }
+ }
+
+ str = gst_structure_get_string (s, "16cif");
+ if (str) {
+ int mpi = atoi (str);
+ caps = caps_append (caps, new_s, 1408, 1152, mpi);
+ added = TRUE;
+ }
+
+ str = gst_structure_get_string (s, "4cif");
+ if (str) {
+ int mpi = atoi (str);
+ caps = caps_append (caps, new_s, 704, 576, mpi);
+ added = TRUE;
+ }
+
+ str = gst_structure_get_string (s, "cif");
+ if (str) {
+ int mpi = atoi (str);
+ caps = caps_append (caps, new_s, 352, 288, mpi);
+ added = TRUE;
+ }
+
+ str = gst_structure_get_string (s, "qcif");
+ if (str) {
+ int mpi = atoi (str);
+ caps = caps_append (caps, new_s, 176, 144, mpi);
+ added = TRUE;
+ }
+
+ str = gst_structure_get_string (s, "sqcif");
+ if (str) {
+ int mpi = atoi (str);
+ caps = caps_append (caps, new_s, 128, 96, mpi);
+ added = TRUE;
+ }
+
+ if (added)
+ gst_structure_free (new_s);
+ else
+ caps = gst_caps_merge_structure (caps, new_s);
+ }
+ }
+
+ gst_caps_unref (intersect);
+
+done:
+
+ if (filter) {
+ GstCaps *tmp;
+
+ GST_DEBUG_OBJECT (payload, "Intersect %" GST_PTR_FORMAT " and filter %"
+ GST_PTR_FORMAT, caps, filter);
+ tmp = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ caps = tmp;
+ }
+
+ return caps;
+}
+
+
+static void
+gst_rtp_h263p_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpH263PPay *rtph263ppay;
+
+ rtph263ppay = GST_RTP_H263P_PAY (object);
+
+ switch (prop_id) {
+ case PROP_FRAGMENTATION_MODE:
+ rtph263ppay->fragmentation_mode = g_value_get_enum (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_h263p_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpH263PPay *rtph263ppay;
+
+ rtph263ppay = GST_RTP_H263P_PAY (object);
+
+ switch (prop_id) {
+ case PROP_FRAGMENTATION_MODE:
+ g_value_set_enum (value, rtph263ppay->fragmentation_mode);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstFlowReturn
+gst_rtp_h263p_pay_flush (GstRtpH263PPay * rtph263ppay)
+{
+ guint avail;
+ GstBufferList *list = NULL;
+ GstBuffer *outbuf = NULL;
+ GstFlowReturn ret;
+ gboolean fragmented = FALSE;
+
+ avail = gst_adapter_available (rtph263ppay->adapter);
+ if (avail == 0)
+ return GST_FLOW_OK;
+
+ fragmented = FALSE;
+ /* This algorithm assumes the H263/+/++ encoder sends complete frames in each
+ * buffer */
+ /* With Fragmentation Mode at GST_FRAGMENTATION_MODE_NORMAL:
+ * This algorithm implements the Follow-on packets method for packetization.
+ * This assumes low packet loss network.
+ * With Fragmentation Mode at GST_FRAGMENTATION_MODE_SYNC:
+ * This algorithm separates large frames at synchronisation points (Segments)
+ * (See RFC 4629 section 6). It would be interesting to have a property such as network
+ * quality to select between both packetization methods */
+ /* TODO Add VRC support (See RFC 4629 section 5.2) */
+
+ while (avail > 0) {
+ guint towrite;
+ guint8 *payload;
+ gint header_len;
+ guint next_gop = 0;
+ gboolean found_gob = FALSE;
+ GstRTPBuffer rtp = { NULL };
+ GstBuffer *payload_buf;
+
+ if (rtph263ppay->fragmentation_mode == GST_FRAGMENTATION_MODE_SYNC) {
+ /* start after 1st gop possible */
+
+ /* Check if we have a gob or eos , eossbs */
+ /* FIXME EOS and EOSSBS packets should never contain any gobs and vice-versa */
+ next_gop =
+ gst_adapter_masked_scan_uint32 (rtph263ppay->adapter, 0xffff8000,
+ 0x00008000, 0, avail);
+ if (next_gop == 0) {
+ GST_DEBUG_OBJECT (rtph263ppay, " Found GOB header");
+ found_gob = TRUE;
+ }
+
+ /* Find next and cut the packet accordingly */
+ /* TODO we should get as many gobs as possible until MTU is reached, this
+ * code seems to just get one GOB per packet */
+ if (next_gop == 0 && avail > 3)
+ next_gop =
+ gst_adapter_masked_scan_uint32 (rtph263ppay->adapter, 0xffff8000,
+ 0x00008000, 3, avail - 3);
+ GST_DEBUG_OBJECT (rtph263ppay, " Next GOB Detected at : %d", next_gop);
+ if (next_gop == -1)
+ next_gop = 0;
+ }
+
+ /* for picture start frames (non-fragmented), we need to remove the first
+ * two 0x00 bytes and set P=1 */
+ if (!fragmented || found_gob) {
+ gst_adapter_flush (rtph263ppay->adapter, 2);
+ avail -= 2;
+ }
+ header_len = 2;
+
+ towrite = MIN (avail, gst_rtp_buffer_calc_payload_len
+ (GST_RTP_BASE_PAYLOAD_MTU (rtph263ppay) - header_len, 0, 0));
+
+ if (next_gop > 0)
+ towrite = MIN (next_gop, towrite);
+
+ outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD
+ (rtph263ppay), header_len, 0, 0);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ /* last fragment gets the marker bit set */
+ gst_rtp_buffer_set_marker (&rtp, avail > towrite ? 0 : 1);
+
+ payload = gst_rtp_buffer_get_payload (&rtp);
+
+ /* 0 1
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | RR |P|V| PLEN |PEBIT|
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ /* if fragmented or gop header , write p bit =1 */
+ payload[0] = (fragmented && !found_gob) ? 0x00 : 0x04;
+ payload[1] = 0;
+
+ GST_BUFFER_PTS (outbuf) = rtph263ppay->first_timestamp;
+ GST_BUFFER_DURATION (outbuf) = rtph263ppay->first_duration;
+ gst_rtp_buffer_unmap (&rtp);
+
+ payload_buf = gst_adapter_take_buffer_fast (rtph263ppay->adapter, towrite);
+ gst_rtp_copy_video_meta (rtph263ppay, outbuf, payload_buf);
+ outbuf = gst_buffer_append (outbuf, payload_buf);
+ avail -= towrite;
+
+ /* If more data is available and this is our first iteration,
+ * we create a buffer list and remember that we're fragmented.
+ *
+ * If we're fragmented already, add buffers to the previously
+ * created buffer list.
+ *
+ * Otherwise fragmented will be FALSE and we just push the single output
+ * buffer, and no list is allocated.
+ */
+ if (avail && !fragmented) {
+ fragmented = TRUE;
+ list = gst_buffer_list_new ();
+ gst_buffer_list_add (list, outbuf);
+ } else if (fragmented) {
+ gst_buffer_list_add (list, outbuf);
+ }
+ }
+
+ if (fragmented) {
+ ret =
+ gst_rtp_base_payload_push_list (GST_RTP_BASE_PAYLOAD (rtph263ppay),
+ list);
+ } else {
+ ret =
+ gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtph263ppay), outbuf);
+ }
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_rtp_h263p_pay_handle_buffer (GstRTPBasePayload * payload,
+ GstBuffer * buffer)
+{
+ GstRtpH263PPay *rtph263ppay;
+ GstFlowReturn ret;
+
+ rtph263ppay = GST_RTP_H263P_PAY (payload);
+
+ rtph263ppay->first_timestamp = GST_BUFFER_PTS (buffer);
+ rtph263ppay->first_duration = GST_BUFFER_DURATION (buffer);
+
+ /* we always encode and flush a full picture */
+ gst_adapter_push (rtph263ppay->adapter, buffer);
+ ret = gst_rtp_h263p_pay_flush (rtph263ppay);
+
+ return ret;
+}
diff --git a/gst/rtp/gstrtph263ppay.h b/gst/rtp/gstrtph263ppay.h
new file mode 100644
index 0000000000..7197b86e5b
--- /dev/null
+++ b/gst/rtp/gstrtph263ppay.h
@@ -0,0 +1,68 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_H263P_PAY_H__
+#define __GST_RTP_H263P_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+#include <gst/base/gstadapter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_H263P_PAY \
+ (gst_rtp_h263p_pay_get_type())
+#define GST_RTP_H263P_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_H263P_PAY,GstRtpH263PPay))
+#define GST_RTP_H263P_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_H263P_PAY,GstRtpH263PPayClass))
+#define GST_IS_RTP_H263P_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_H263P_PAY))
+#define GST_IS_RTP_H263P_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_H263P_PAY))
+
+typedef struct _GstRtpH263PPay GstRtpH263PPay;
+typedef struct _GstRtpH263PPayClass GstRtpH263PPayClass;
+
+typedef enum
+{
+ GST_FRAGMENTATION_MODE_NORMAL = 0,
+ GST_FRAGMENTATION_MODE_SYNC = 1
+} GstFragmentationMode;
+
+struct _GstRtpH263PPay
+{
+ GstRTPBasePayload payload;
+
+ GstAdapter *adapter;
+ GstClockTime first_timestamp;
+ GstClockTime first_duration;
+ GstFragmentationMode fragmentation_mode;
+};
+
+struct _GstRtpH263PPayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_h263p_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_H263P_PAY_H__ */
diff --git a/gst/rtp/gstrtph264depay.c b/gst/rtp/gstrtph264depay.c
new file mode 100644
index 0000000000..9cef347c21
--- /dev/null
+++ b/gst/rtp/gstrtph264depay.c
@@ -0,0 +1,1512 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <stdio.h>
+#include <string.h>
+
+#include <gst/base/gstbitreader.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/pbutils/pbutils.h>
+#include <gst/video/video.h>
+#include "gstrtpelements.h"
+#include "gstrtph264depay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtph264depay_debug);
+#define GST_CAT_DEFAULT (rtph264depay_debug)
+
+/* This is what we'll default to when downstream hasn't
+ * expressed a restriction or preference via caps */
+#define DEFAULT_BYTE_STREAM TRUE
+#define DEFAULT_ACCESS_UNIT FALSE
+#define DEFAULT_WAIT_FOR_KEYFRAME FALSE
+#define DEFAULT_REQUEST_KEYFRAME FALSE
+
+enum
+{
+ PROP_0,
+ PROP_WAIT_FOR_KEYFRAME,
+ PROP_REQUEST_KEYFRAME,
+};
+
+
+/* 3 zero bytes syncword */
+static const guint8 sync_bytes[] = { 0, 0, 0, 1 };
+
+static GstStaticPadTemplate gst_rtp_h264_depay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-h264, "
+ "stream-format = (string) avc, alignment = (string) au; "
+ "video/x-h264, "
+ "stream-format = (string) byte-stream, alignment = (string) { nal, au }")
+ );
+
+static GstStaticPadTemplate gst_rtp_h264_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"H264\"")
+ /* optional parameters */
+ /* "profile-level-id = (string) ANY, " */
+ /* "max-mbps = (string) ANY, " */
+ /* "max-fs = (string) ANY, " */
+ /* "max-cpb = (string) ANY, " */
+ /* "max-dpb = (string) ANY, " */
+ /* "max-br = (string) ANY, " */
+ /* "redundant-pic-cap = (string) { \"0\", \"1\" }, " */
+ /* "sprop-parameter-sets = (string) ANY, " */
+ /* "parameter-add = (string) { \"0\", \"1\" }, " */
+ /* "packetization-mode = (string) { \"0\", \"1\", \"2\" }, " */
+ /* "sprop-interleaving-depth = (string) ANY, " */
+ /* "sprop-deint-buf-req = (string) ANY, " */
+ /* "deint-buf-cap = (string) ANY, " */
+ /* "sprop-init-buf-time = (string) ANY, " */
+ /* "sprop-max-don-diff = (string) ANY, " */
+ /* "max-rcmd-nalu-size = (string) ANY " */
+ );
+
+#define gst_rtp_h264_depay_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstRtpH264Depay, gst_rtp_h264_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD, GST_DEBUG_CATEGORY_INIT (rtph264depay_debug,
+ "rtph264depay", 0, "H264 Video RTP Depayloader"));
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph264depay, "rtph264depay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_H264_DEPAY, rtp_element_init (plugin));
+
+static void gst_rtp_h264_depay_finalize (GObject * object);
+
+static GstStateChangeReturn gst_rtp_h264_depay_change_state (GstElement *
+ element, GstStateChange transition);
+
+static GstBuffer *gst_rtp_h264_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+static gboolean gst_rtp_h264_depay_setcaps (GstRTPBaseDepayload * filter,
+ GstCaps * caps);
+static gboolean gst_rtp_h264_depay_handle_event (GstRTPBaseDepayload * depay,
+ GstEvent * event);
+static GstBuffer *gst_rtp_h264_complete_au (GstRtpH264Depay * rtph264depay,
+ GstClockTime * out_timestamp, gboolean * out_keyframe);
+static void gst_rtp_h264_depay_push (GstRtpH264Depay * rtph264depay,
+ GstBuffer * outbuf, gboolean keyframe, GstClockTime timestamp,
+ gboolean marker);
+
+static void
+gst_rtp_h264_depay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpH264Depay *self = GST_RTP_H264_DEPAY (object);
+
+ switch (prop_id) {
+ case PROP_WAIT_FOR_KEYFRAME:
+ self->wait_for_keyframe = g_value_get_boolean (value);
+ break;
+ case PROP_REQUEST_KEYFRAME:
+ self->request_keyframe = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_h264_depay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpH264Depay *self = GST_RTP_H264_DEPAY (object);
+
+ switch (prop_id) {
+ case PROP_WAIT_FOR_KEYFRAME:
+ g_value_set_boolean (value, self->wait_for_keyframe);
+ break;
+ case PROP_REQUEST_KEYFRAME:
+ g_value_set_boolean (value, self->request_keyframe);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_h264_depay_class_init (GstRtpH264DepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_h264_depay_finalize;
+ gobject_class->set_property = gst_rtp_h264_depay_set_property;
+ gobject_class->get_property = gst_rtp_h264_depay_get_property;
+
+ /**
+ * GstRtpH264Depay:wait-for-keyframe:
+ *
+ * Wait for the next keyframe after packet loss,
+ * meaningful only when outputting access units
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class, PROP_WAIT_FOR_KEYFRAME,
+ g_param_spec_boolean ("wait-for-keyframe", "Wait for Keyframe",
+ "Wait for the next keyframe after packet loss, meaningful only when "
+ "outputting access units",
+ DEFAULT_WAIT_FOR_KEYFRAME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpH264Depay:request-keyframe:
+ *
+ * Request new keyframe when packet loss is detected
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class, PROP_REQUEST_KEYFRAME,
+ g_param_spec_boolean ("request-keyframe", "Request Keyframe",
+ "Request new keyframe when packet loss is detected",
+ DEFAULT_REQUEST_KEYFRAME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_h264_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_h264_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP H264 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts H264 video from RTP packets (RFC 3984)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+ gstelement_class->change_state = gst_rtp_h264_depay_change_state;
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_h264_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_h264_depay_setcaps;
+ gstrtpbasedepayload_class->handle_event = gst_rtp_h264_depay_handle_event;
+}
+
+static void
+gst_rtp_h264_depay_init (GstRtpH264Depay * rtph264depay)
+{
+ rtph264depay->adapter = gst_adapter_new ();
+ rtph264depay->picture_adapter = gst_adapter_new ();
+ rtph264depay->byte_stream = DEFAULT_BYTE_STREAM;
+ rtph264depay->merge = DEFAULT_ACCESS_UNIT;
+ rtph264depay->sps = g_ptr_array_new_with_free_func (
+ (GDestroyNotify) gst_buffer_unref);
+ rtph264depay->pps = g_ptr_array_new_with_free_func (
+ (GDestroyNotify) gst_buffer_unref);
+ rtph264depay->wait_for_keyframe = DEFAULT_WAIT_FOR_KEYFRAME;
+ rtph264depay->request_keyframe = DEFAULT_REQUEST_KEYFRAME;
+}
+
+static void
+gst_rtp_h264_depay_reset (GstRtpH264Depay * rtph264depay, gboolean hard)
+{
+ gst_adapter_clear (rtph264depay->adapter);
+ rtph264depay->wait_start = TRUE;
+ rtph264depay->waiting_for_keyframe = rtph264depay->wait_for_keyframe;
+ gst_adapter_clear (rtph264depay->picture_adapter);
+ rtph264depay->picture_start = FALSE;
+ rtph264depay->last_keyframe = FALSE;
+ rtph264depay->last_ts = 0;
+ rtph264depay->current_fu_type = 0;
+ rtph264depay->new_codec_data = FALSE;
+ g_ptr_array_set_size (rtph264depay->sps, 0);
+ g_ptr_array_set_size (rtph264depay->pps, 0);
+
+ if (hard) {
+ if (rtph264depay->allocator != NULL) {
+ gst_object_unref (rtph264depay->allocator);
+ rtph264depay->allocator = NULL;
+ }
+ gst_allocation_params_init (&rtph264depay->params);
+ }
+}
+
+static void
+gst_rtp_h264_depay_drain (GstRtpH264Depay * rtph264depay)
+{
+ GstClockTime timestamp;
+ gboolean keyframe;
+ GstBuffer *outbuf;
+
+ if (!rtph264depay->picture_start)
+ return;
+
+ outbuf = gst_rtp_h264_complete_au (rtph264depay, &timestamp, &keyframe);
+ if (outbuf)
+ gst_rtp_h264_depay_push (rtph264depay, outbuf, keyframe, timestamp, FALSE);
+}
+
+static void
+gst_rtp_h264_depay_finalize (GObject * object)
+{
+ GstRtpH264Depay *rtph264depay;
+
+ rtph264depay = GST_RTP_H264_DEPAY (object);
+
+ if (rtph264depay->codec_data)
+ gst_buffer_unref (rtph264depay->codec_data);
+
+ g_object_unref (rtph264depay->adapter);
+ g_object_unref (rtph264depay->picture_adapter);
+
+ g_ptr_array_free (rtph264depay->sps, TRUE);
+ g_ptr_array_free (rtph264depay->pps, TRUE);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_rtp_h264_depay_negotiate (GstRtpH264Depay * rtph264depay)
+{
+ GstCaps *caps;
+ gint byte_stream = -1;
+ gint merge = -1;
+
+ caps =
+ gst_pad_get_allowed_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (rtph264depay));
+
+ GST_DEBUG_OBJECT (rtph264depay, "allowed caps: %" GST_PTR_FORMAT, caps);
+
+ if (caps) {
+ if (gst_caps_get_size (caps) > 0) {
+ GstStructure *s = gst_caps_get_structure (caps, 0);
+ const gchar *str = NULL;
+
+ if ((str = gst_structure_get_string (s, "stream-format"))) {
+ if (strcmp (str, "avc") == 0) {
+ byte_stream = FALSE;
+ } else if (strcmp (str, "byte-stream") == 0) {
+ byte_stream = TRUE;
+ } else {
+ GST_DEBUG_OBJECT (rtph264depay, "unknown stream-format: %s", str);
+ }
+ }
+
+ if ((str = gst_structure_get_string (s, "alignment"))) {
+ if (strcmp (str, "au") == 0) {
+ merge = TRUE;
+ } else if (strcmp (str, "nal") == 0) {
+ merge = FALSE;
+ } else {
+ GST_DEBUG_OBJECT (rtph264depay, "unknown alignment: %s", str);
+ }
+ }
+ }
+ gst_caps_unref (caps);
+ }
+
+ if (byte_stream != -1) {
+ GST_DEBUG_OBJECT (rtph264depay, "downstream requires byte-stream %d",
+ byte_stream);
+ rtph264depay->byte_stream = byte_stream;
+ } else {
+ GST_DEBUG_OBJECT (rtph264depay, "defaulting to byte-stream %d",
+ DEFAULT_BYTE_STREAM);
+ rtph264depay->byte_stream = DEFAULT_BYTE_STREAM;
+ }
+ if (merge != -1) {
+ GST_DEBUG_OBJECT (rtph264depay, "downstream requires merge %d", merge);
+ rtph264depay->merge = merge;
+ } else {
+ GST_DEBUG_OBJECT (rtph264depay, "defaulting to merge %d",
+ DEFAULT_ACCESS_UNIT);
+ rtph264depay->merge = DEFAULT_ACCESS_UNIT;
+ }
+}
+
+static gboolean
+parse_sps (GstMapInfo * map, guint32 * sps_id)
+{
+ GstBitReader br = GST_BIT_READER_INIT (map->data + 4,
+ map->size - 4);
+
+ if (map->size < 5)
+ return FALSE;
+
+ if (!gst_rtp_read_golomb (&br, sps_id))
+ return FALSE;
+
+ return TRUE;
+}
+
+static gboolean
+parse_pps (GstMapInfo * map, guint32 * sps_id, guint32 * pps_id)
+{
+ GstBitReader br = GST_BIT_READER_INIT (map->data + 1,
+ map->size - 1);
+
+ if (map->size < 2)
+ return FALSE;
+
+ if (!gst_rtp_read_golomb (&br, pps_id))
+ return FALSE;
+ if (!gst_rtp_read_golomb (&br, sps_id))
+ return FALSE;
+
+ return TRUE;
+}
+
+static gboolean
+gst_rtp_h264_depay_set_output_caps (GstRtpH264Depay * rtph264depay,
+ GstCaps * caps)
+{
+ GstAllocationParams params;
+ GstAllocator *allocator = NULL;
+ GstPad *srcpad;
+ gboolean res;
+
+ gst_allocation_params_init (&params);
+
+ srcpad = GST_RTP_BASE_DEPAYLOAD_SRCPAD (rtph264depay);
+ res = gst_pad_set_caps (srcpad, caps);
+ if (res) {
+ GstQuery *query;
+
+ query = gst_query_new_allocation (caps, TRUE);
+ if (!gst_pad_peer_query (srcpad, query)) {
+ GST_DEBUG_OBJECT (rtph264depay, "downstream ALLOCATION query failed");
+ }
+
+ if (gst_query_get_n_allocation_params (query) > 0) {
+ gst_query_parse_nth_allocation_param (query, 0, &allocator, &params);
+ }
+
+ gst_query_unref (query);
+ }
+
+ if (rtph264depay->allocator)
+ gst_object_unref (rtph264depay->allocator);
+
+ rtph264depay->allocator = allocator;
+ rtph264depay->params = params;
+
+ return res;
+}
+
+static gboolean
+gst_rtp_h264_set_src_caps (GstRtpH264Depay * rtph264depay)
+{
+ gboolean res = TRUE;
+ GstCaps *srccaps;
+ GstCaps *old_caps;
+ GstPad *srcpad;
+
+ if (!rtph264depay->byte_stream &&
+ (!rtph264depay->new_codec_data ||
+ rtph264depay->sps->len == 0 || rtph264depay->pps->len == 0))
+ return TRUE;
+
+ srccaps = gst_caps_new_simple ("video/x-h264",
+ "stream-format", G_TYPE_STRING,
+ rtph264depay->byte_stream ? "byte-stream" : "avc",
+ "alignment", G_TYPE_STRING, rtph264depay->merge ? "au" : "nal", NULL);
+
+ if (!rtph264depay->byte_stream) {
+ GstBuffer *codec_data;
+ GstMapInfo map;
+ GstMapInfo nalmap;
+ guint8 *data;
+ guint len;
+ guint new_size;
+ guint i, first_sps, num_sps, first_pps, num_pps;
+ guchar level = 0;
+ guchar profile_compat = G_MAXUINT8;
+
+ /* start with 7 bytes header */
+ len = 7;
+ /* count sps & pps */
+ for (i = 0; i < rtph264depay->sps->len; i++)
+ len += 2 + gst_buffer_get_size (g_ptr_array_index (rtph264depay->sps, i));
+ for (i = 0; i < rtph264depay->pps->len; i++)
+ len += 2 + gst_buffer_get_size (g_ptr_array_index (rtph264depay->pps, i));
+
+ codec_data = gst_buffer_new_and_alloc (len);
+ gst_buffer_map (codec_data, &map, GST_MAP_READWRITE);
+ data = map.data;
+
+ /* 8 bits version == 1 */
+ *data++ = 1;
+
+ /* According to: ISO/IEC 14496-15:2004(E) section 5.2.4.1
+ * The level is the max level of all SPSes
+ * A profile compat bit can only be set if all SPSes include that bit
+ */
+ for (i = 0; i < rtph264depay->sps->len; i++) {
+ gst_buffer_map (g_ptr_array_index (rtph264depay->sps, i), &nalmap,
+ GST_MAP_READ);
+ profile_compat &= nalmap.data[2];
+ level = MAX (level, nalmap.data[3]);
+ gst_buffer_unmap (g_ptr_array_index (rtph264depay->sps, i), &nalmap);
+ }
+
+ /* Assume all SPSes use the same profile, so extract from the first SPS */
+ gst_buffer_map (g_ptr_array_index (rtph264depay->sps, 0), &nalmap,
+ GST_MAP_READ);
+ *data++ = nalmap.data[1];
+ gst_buffer_unmap (g_ptr_array_index (rtph264depay->sps, 0), &nalmap);
+ *data++ = profile_compat;
+ *data++ = level;
+
+ /* 6 bits reserved | 2 bits lengthSizeMinusOn */
+ *data++ = 0xff;
+
+ if (rtph264depay->sps->len > 31) {
+ GST_WARNING_OBJECT (rtph264depay,
+ "Too many SPS to put in codec_data. Sending the most recent 31");
+ num_sps = 31;
+ first_sps = rtph264depay->sps->len - 31;
+ } else {
+ num_sps = rtph264depay->sps->len;
+ first_sps = 0;
+ }
+
+ /* 3 bits reserved | 5 bits numOfSequenceParameterSets */
+ *data++ = 0xe0 | (num_sps & 0x1f);
+
+ /* copy all SPS */
+ for (i = first_sps; i < rtph264depay->sps->len; i++) {
+ gst_buffer_map (g_ptr_array_index (rtph264depay->sps, i), &nalmap,
+ GST_MAP_READ);
+
+ GST_DEBUG_OBJECT (rtph264depay, "copy SPS %d of length %u", i,
+ (guint) nalmap.size);
+ GST_WRITE_UINT16_BE (data, nalmap.size);
+ data += 2;
+ memcpy (data, nalmap.data, nalmap.size);
+ data += nalmap.size;
+ gst_buffer_unmap (g_ptr_array_index (rtph264depay->sps, i), &nalmap);
+ }
+
+ if (rtph264depay->pps->len > 255) {
+ GST_WARNING_OBJECT (rtph264depay,
+ "Too many PPS to put in codec_data. Sending the most recent 255");
+ num_pps = 255;
+ first_pps = rtph264depay->pps->len - 255;
+ } else {
+ num_pps = rtph264depay->pps->len;
+ first_pps = 0;
+ }
+
+ /* 8 bits numOfPictureParameterSets */
+ *data++ = num_pps;
+
+ /* copy all PPS */
+ for (i = first_pps; i < rtph264depay->pps->len; i++) {
+ gst_buffer_map (g_ptr_array_index (rtph264depay->pps, i), &nalmap,
+ GST_MAP_READ);
+
+ GST_DEBUG_OBJECT (rtph264depay, "copy PPS %d of length %u", i,
+ (guint) nalmap.size);
+ GST_WRITE_UINT16_BE (data, nalmap.size);
+ data += 2;
+ memcpy (data, nalmap.data, nalmap.size);
+ data += nalmap.size;
+ gst_buffer_unmap (g_ptr_array_index (rtph264depay->pps, i), &nalmap);
+ }
+
+ new_size = data - map.data;
+ gst_buffer_unmap (codec_data, &map);
+ gst_buffer_set_size (codec_data, new_size);
+
+ gst_caps_set_simple (srccaps,
+ "codec_data", GST_TYPE_BUFFER, codec_data, NULL);
+ gst_buffer_unref (codec_data);
+ }
+
+ /* Set profile a level from SPS */
+ {
+ gint i;
+ GstBuffer *max_level_sps = NULL;
+ gint level = 0;
+ GstMapInfo nalmap;
+
+ /* Get the SPS with the highest level. We assume
+ * all SPS have the same profile */
+ for (i = 0; i < rtph264depay->sps->len; i++) {
+ gst_buffer_map (g_ptr_array_index (rtph264depay->sps, i), &nalmap,
+ GST_MAP_READ);
+ if (level == 0 || level < nalmap.data[3]) {
+ max_level_sps = g_ptr_array_index (rtph264depay->sps, i);
+ level = nalmap.data[3];
+ }
+ gst_buffer_unmap (g_ptr_array_index (rtph264depay->sps, i), &nalmap);
+ }
+
+ if (max_level_sps) {
+ gst_buffer_map (max_level_sps, &nalmap, GST_MAP_READ);
+ gst_codec_utils_h264_caps_set_level_and_profile (srccaps, nalmap.data + 1,
+ nalmap.size - 1);
+ gst_buffer_unmap (max_level_sps, &nalmap);
+ }
+ }
+
+ srcpad = GST_RTP_BASE_DEPAYLOAD_SRCPAD (rtph264depay);
+
+ old_caps = gst_pad_get_current_caps (srcpad);
+
+ if (old_caps == NULL || !gst_caps_is_equal (srccaps, old_caps)) {
+ res = gst_rtp_h264_depay_set_output_caps (rtph264depay, srccaps);
+ }
+
+ gst_clear_caps (&old_caps);
+ gst_caps_unref (srccaps);
+
+ /* Insert SPS and PPS into the stream on next opportunity (if bytestream) */
+ if (rtph264depay->byte_stream
+ && (rtph264depay->sps->len > 0 || rtph264depay->pps->len > 0)) {
+ gint i;
+ GstBuffer *codec_data;
+ GstMapInfo map;
+ guint8 *data;
+ guint len = 0;
+
+ for (i = 0; i < rtph264depay->sps->len; i++) {
+ len += 4 + gst_buffer_get_size (g_ptr_array_index (rtph264depay->sps, i));
+ }
+
+ for (i = 0; i < rtph264depay->pps->len; i++) {
+ len += 4 + gst_buffer_get_size (g_ptr_array_index (rtph264depay->pps, i));
+ }
+
+ codec_data = gst_buffer_new_and_alloc (len);
+ gst_buffer_map (codec_data, &map, GST_MAP_WRITE);
+ data = map.data;
+
+ for (i = 0; i < rtph264depay->sps->len; i++) {
+ GstBuffer *sps_buf = g_ptr_array_index (rtph264depay->sps, i);
+ guint sps_size = gst_buffer_get_size (sps_buf);
+
+ if (rtph264depay->byte_stream)
+ memcpy (data, sync_bytes, sizeof (sync_bytes));
+ else
+ GST_WRITE_UINT32_BE (data, sps_size);
+ gst_buffer_extract (sps_buf, 0, data + 4, -1);
+ data += 4 + sps_size;
+ }
+
+ for (i = 0; i < rtph264depay->pps->len; i++) {
+ GstBuffer *pps_buf = g_ptr_array_index (rtph264depay->pps, i);
+ guint pps_size = gst_buffer_get_size (pps_buf);
+
+ if (rtph264depay->byte_stream)
+ memcpy (data, sync_bytes, sizeof (sync_bytes));
+ else
+ GST_WRITE_UINT32_BE (data, pps_size);
+ gst_buffer_extract (pps_buf, 0, data + 4, -1);
+ data += 4 + pps_size;
+ }
+
+ gst_buffer_unmap (codec_data, &map);
+ if (rtph264depay->codec_data)
+ gst_buffer_unref (rtph264depay->codec_data);
+ rtph264depay->codec_data = codec_data;
+ }
+
+ if (res)
+ rtph264depay->new_codec_data = FALSE;
+
+ return res;
+}
+
+gboolean
+gst_rtp_h264_add_sps_pps (GstElement * rtph264, GPtrArray * sps_array,
+ GPtrArray * pps_array, GstBuffer * nal)
+{
+ GstMapInfo map;
+ guchar type;
+ guint i;
+
+ gst_buffer_map (nal, &map, GST_MAP_READ);
+
+ type = map.data[0] & 0x1f;
+
+ if (type == 7) {
+ guint32 sps_id;
+
+ if (!parse_sps (&map, &sps_id)) {
+ GST_WARNING_OBJECT (rtph264, "Invalid SPS,"
+ " can't parse seq_parameter_set_id");
+ goto drop;
+ }
+
+ for (i = 0; i < sps_array->len; i++) {
+ GstBuffer *sps = g_ptr_array_index (sps_array, i);
+ GstMapInfo spsmap;
+ guint32 tmp_sps_id;
+
+ gst_buffer_map (sps, &spsmap, GST_MAP_READ);
+ parse_sps (&spsmap, &tmp_sps_id);
+
+ if (sps_id == tmp_sps_id) {
+ /* If this is already the most recent SPS and unchanged, nothing to do */
+ if (i == (sps_array->len - 1) && map.size == spsmap.size &&
+ memcmp (map.data, spsmap.data, spsmap.size) == 0) {
+ GST_LOG_OBJECT (rtph264,
+ "Unchanged SPS %u already most recent, not updating", sps_id);
+ gst_buffer_unmap (sps, &spsmap);
+ goto drop;
+ } else {
+ gst_buffer_unmap (sps, &spsmap);
+ g_ptr_array_remove_index (sps_array, i);
+ g_ptr_array_add (sps_array, nal);
+ GST_LOG_OBJECT (rtph264, "Modified SPS %u, replacing", sps_id);
+ goto done;
+ }
+ }
+ gst_buffer_unmap (sps, &spsmap);
+ }
+ GST_LOG_OBJECT (rtph264, "Adding new SPS %u", sps_id);
+ g_ptr_array_add (sps_array, nal);
+ } else if (type == 8) {
+ guint32 sps_id;
+ guint32 pps_id;
+
+ if (!parse_pps (&map, &sps_id, &pps_id)) {
+ GST_WARNING_OBJECT (rtph264, "Invalid PPS,"
+ " can't parse seq_parameter_set_id or pic_parameter_set_id");
+ goto drop;
+ }
+
+ for (i = 0; i < pps_array->len; i++) {
+ GstBuffer *pps = g_ptr_array_index (pps_array, i);
+ GstMapInfo ppsmap;
+ guint32 tmp_sps_id;
+ guint32 tmp_pps_id;
+
+
+ gst_buffer_map (pps, &ppsmap, GST_MAP_READ);
+ parse_pps (&ppsmap, &tmp_sps_id, &tmp_pps_id);
+
+ if (pps_id == tmp_pps_id) {
+ /* If this is already the most recent PPS and unchanged, nothing to do */
+ if (i == (pps_array->len - 1) && map.size == ppsmap.size &&
+ memcmp (map.data, ppsmap.data, ppsmap.size) == 0) {
+ GST_LOG_OBJECT (rtph264,
+ "Unchanged PPS %u:%u already most recent, not updating", sps_id,
+ pps_id);
+ gst_buffer_unmap (pps, &ppsmap);
+ goto drop;
+ } else {
+ gst_buffer_unmap (pps, &ppsmap);
+ g_ptr_array_remove_index (pps_array, i);
+ g_ptr_array_add (pps_array, nal);
+ GST_LOG_OBJECT (rtph264, "Modified PPS %u:%u, replacing",
+ sps_id, pps_id);
+ goto done;
+ }
+ }
+ gst_buffer_unmap (pps, &ppsmap);
+ }
+ GST_LOG_OBJECT (rtph264, "Adding new PPS %u:%i", sps_id, pps_id);
+ g_ptr_array_add (pps_array, nal);
+ } else {
+ goto drop;
+ }
+
+done:
+ gst_buffer_unmap (nal, &map);
+
+ return TRUE;
+
+drop:
+ gst_buffer_unmap (nal, &map);
+ gst_buffer_unref (nal);
+
+ return FALSE;
+}
+
+
+static void
+gst_rtp_h264_depay_add_sps_pps (GstRtpH264Depay * rtph264depay, GstBuffer * nal)
+{
+ if (gst_rtp_h264_add_sps_pps (GST_ELEMENT (rtph264depay),
+ rtph264depay->sps, rtph264depay->pps, nal))
+ rtph264depay->new_codec_data = TRUE;
+}
+
+static gboolean
+gst_rtp_h264_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ gint clock_rate;
+ GstStructure *structure = gst_caps_get_structure (caps, 0);
+ GstRtpH264Depay *rtph264depay;
+ const gchar *ps;
+ GstBuffer *codec_data;
+ GstMapInfo map;
+ guint8 *ptr;
+
+ rtph264depay = GST_RTP_H264_DEPAY (depayload);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 90000;
+ depayload->clock_rate = clock_rate;
+
+ /* Base64 encoded, comma separated config NALs */
+ ps = gst_structure_get_string (structure, "sprop-parameter-sets");
+
+ /* negotiate with downstream w.r.t. output format and alignment */
+ gst_rtp_h264_depay_negotiate (rtph264depay);
+
+ if (rtph264depay->byte_stream && ps != NULL) {
+ /* for bytestream we only need the parameter sets but we don't error out
+ * when they are not there, we assume they are in the stream. */
+ gchar **params;
+ guint len, total;
+ gint i;
+
+ params = g_strsplit (ps, ",", 0);
+
+ /* count total number of bytes in base64. Also include the sync bytes in
+ * front of the params. */
+ len = 0;
+ for (i = 0; params[i]; i++) {
+ len += strlen (params[i]);
+ len += sizeof (sync_bytes);
+ }
+ /* we seriously overshoot the length, but it's fine. */
+ codec_data = gst_buffer_new_and_alloc (len);
+
+ gst_buffer_map (codec_data, &map, GST_MAP_WRITE);
+ ptr = map.data;
+ total = 0;
+ for (i = 0; params[i]; i++) {
+ guint save = 0;
+ gint state = 0;
+
+ GST_DEBUG_OBJECT (depayload, "decoding param %d (%s)", i, params[i]);
+ memcpy (ptr, sync_bytes, sizeof (sync_bytes));
+ ptr += sizeof (sync_bytes);
+ len =
+ g_base64_decode_step (params[i], strlen (params[i]), ptr, &state,
+ &save);
+ GST_DEBUG_OBJECT (depayload, "decoded %d bytes", len);
+ total += len + sizeof (sync_bytes);
+ ptr += len;
+ }
+ gst_buffer_unmap (codec_data, &map);
+ gst_buffer_resize (codec_data, 0, total);
+ g_strfreev (params);
+
+ /* keep the codec_data, we need to send it as the first buffer. We cannot
+ * push it in the adapter because the adapter might be flushed on discont.
+ */
+ if (rtph264depay->codec_data)
+ gst_buffer_unref (rtph264depay->codec_data);
+ rtph264depay->codec_data = codec_data;
+ } else if (!rtph264depay->byte_stream) {
+ gchar **params;
+ gint i;
+
+ if (ps == NULL)
+ goto incomplete_caps;
+
+ params = g_strsplit (ps, ",", 0);
+
+ GST_DEBUG_OBJECT (depayload, "we have %d params", g_strv_length (params));
+
+ /* start with 7 bytes header */
+ for (i = 0; params[i]; i++) {
+ GstBuffer *nal;
+ GstMapInfo nalmap;
+ gsize nal_len;
+ guint save = 0;
+ gint state = 0;
+
+ nal_len = strlen (params[i]);
+ if (nal_len == 0) {
+ GST_WARNING_OBJECT (depayload, "empty param '%s' (#%d)", params[i], i);
+ continue;
+ }
+ nal = gst_buffer_new_and_alloc (nal_len);
+ gst_buffer_map (nal, &nalmap, GST_MAP_READWRITE);
+
+ nal_len =
+ g_base64_decode_step (params[i], nal_len, nalmap.data, &state, &save);
+
+ GST_DEBUG_OBJECT (depayload, "adding param %d as %s", i,
+ ((nalmap.data[0] & 0x1f) == 7) ? "SPS" : "PPS");
+
+ gst_buffer_unmap (nal, &nalmap);
+ gst_buffer_set_size (nal, nal_len);
+
+ gst_rtp_h264_depay_add_sps_pps (rtph264depay, nal);
+ }
+ g_strfreev (params);
+
+ if (rtph264depay->sps->len == 0 || rtph264depay->pps->len == 0)
+ goto incomplete_caps;
+ }
+
+ return gst_rtp_h264_set_src_caps (rtph264depay);
+
+ /* ERRORS */
+incomplete_caps:
+ {
+ GST_DEBUG_OBJECT (depayload, "we have incomplete caps,"
+ " doing setcaps later");
+ return TRUE;
+ }
+}
+
+static GstBuffer *
+gst_rtp_h264_depay_allocate_output_buffer (GstRtpH264Depay * depay, gsize size)
+{
+ GstBuffer *buffer = NULL;
+
+ g_return_val_if_fail (size > 0, NULL);
+
+ GST_LOG_OBJECT (depay, "want output buffer of %u bytes", (guint) size);
+
+ buffer = gst_buffer_new_allocate (depay->allocator, size, &depay->params);
+ if (buffer == NULL) {
+ GST_INFO_OBJECT (depay, "couldn't allocate output buffer");
+ buffer = gst_buffer_new_allocate (NULL, size, NULL);
+ }
+
+ return buffer;
+}
+
+static GstBuffer *
+gst_rtp_h264_complete_au (GstRtpH264Depay * rtph264depay,
+ GstClockTime * out_timestamp, gboolean * out_keyframe)
+{
+ GstBufferList *list;
+ GstMapInfo outmap;
+ GstBuffer *outbuf;
+ guint outsize, offset = 0;
+ gint b, n_bufs, m, n_mem;
+
+ /* we had a picture in the adapter and we completed it */
+ GST_DEBUG_OBJECT (rtph264depay, "taking completed AU");
+ outsize = gst_adapter_available (rtph264depay->picture_adapter);
+
+ outbuf = gst_rtp_h264_depay_allocate_output_buffer (rtph264depay, outsize);
+
+ if (outbuf == NULL)
+ return NULL;
+
+ if (!gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE))
+ return NULL;
+
+ list = gst_adapter_take_buffer_list (rtph264depay->picture_adapter, outsize);
+
+ n_bufs = gst_buffer_list_length (list);
+ for (b = 0; b < n_bufs; ++b) {
+ GstBuffer *buf = gst_buffer_list_get (list, b);
+
+ n_mem = gst_buffer_n_memory (buf);
+ for (m = 0; m < n_mem; ++m) {
+ GstMemory *mem = gst_buffer_peek_memory (buf, m);
+ gsize mem_size = gst_memory_get_sizes (mem, NULL, NULL);
+ GstMapInfo mem_map;
+
+ if (gst_memory_map (mem, &mem_map, GST_MAP_READ)) {
+ memcpy (outmap.data + offset, mem_map.data, mem_size);
+ gst_memory_unmap (mem, &mem_map);
+ } else {
+ memset (outmap.data + offset, 0, mem_size);
+ }
+ offset += mem_size;
+ }
+
+ gst_rtp_copy_video_meta (rtph264depay, outbuf, buf);
+ }
+ gst_buffer_list_unref (list);
+ gst_buffer_unmap (outbuf, &outmap);
+
+ *out_timestamp = rtph264depay->last_ts;
+ *out_keyframe = rtph264depay->last_keyframe;
+
+ rtph264depay->last_keyframe = FALSE;
+ rtph264depay->picture_start = FALSE;
+
+ return outbuf;
+}
+
+static void
+gst_rtp_h264_depay_push (GstRtpH264Depay * rtph264depay, GstBuffer * outbuf,
+ gboolean keyframe, GstClockTime timestamp, gboolean marker)
+{
+ /* prepend codec_data */
+ if (rtph264depay->codec_data) {
+ GST_DEBUG_OBJECT (rtph264depay, "prepending codec_data");
+ gst_rtp_copy_video_meta (rtph264depay, rtph264depay->codec_data, outbuf);
+ outbuf = gst_buffer_append (rtph264depay->codec_data, outbuf);
+ rtph264depay->codec_data = NULL;
+ keyframe = TRUE;
+ }
+ outbuf = gst_buffer_make_writable (outbuf);
+
+ gst_rtp_drop_non_video_meta (rtph264depay, outbuf);
+
+ GST_BUFFER_PTS (outbuf) = timestamp;
+
+ if (keyframe)
+ GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+ else
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ if (marker)
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER);
+
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtph264depay), outbuf);
+}
+
+/* SPS/PPS/IDR considered key, all others DELTA;
+ * so downstream waiting for keyframe can pick up at SPS/PPS/IDR */
+#define NAL_TYPE_IS_KEY(nt) (((nt) == 5) || ((nt) == 7) || ((nt) == 8))
+
+static void
+gst_rtp_h264_depay_handle_nal (GstRtpH264Depay * rtph264depay, GstBuffer * nal,
+ GstClockTime in_timestamp, gboolean marker)
+{
+ GstRTPBaseDepayload *depayload = GST_RTP_BASE_DEPAYLOAD (rtph264depay);
+ gint nal_type;
+ GstMapInfo map;
+ GstBuffer *outbuf = NULL;
+ GstClockTime out_timestamp;
+ gboolean keyframe, out_keyframe;
+
+ gst_buffer_map (nal, &map, GST_MAP_READ);
+ if (G_UNLIKELY (map.size < 5))
+ goto short_nal;
+
+ nal_type = map.data[4] & 0x1f;
+ GST_DEBUG_OBJECT (rtph264depay, "handle NAL type %d", nal_type);
+
+ keyframe = NAL_TYPE_IS_KEY (nal_type);
+
+ out_keyframe = keyframe;
+ out_timestamp = in_timestamp;
+
+ if (!rtph264depay->byte_stream) {
+ if (nal_type == 7 || nal_type == 8) {
+ gst_rtp_h264_depay_add_sps_pps (rtph264depay,
+ gst_buffer_copy_region (nal, GST_BUFFER_COPY_ALL,
+ 4, gst_buffer_get_size (nal) - 4));
+ gst_buffer_unmap (nal, &map);
+ gst_buffer_unref (nal);
+ return;
+ } else if (rtph264depay->sps->len == 0 || rtph264depay->pps->len == 0) {
+ /* Down push down any buffer in non-bytestream mode if the SPS/PPS haven't
+ * go through yet
+ */
+ gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depayload),
+ gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
+ gst_structure_new ("GstForceKeyUnit",
+ "all-headers", G_TYPE_BOOLEAN, TRUE, NULL)));
+ gst_buffer_unmap (nal, &map);
+ gst_buffer_unref (nal);
+ return;
+ }
+
+ if (rtph264depay->new_codec_data &&
+ rtph264depay->sps->len > 0 && rtph264depay->pps->len > 0)
+ gst_rtp_h264_set_src_caps (rtph264depay);
+ }
+
+
+ if (rtph264depay->merge) {
+ gboolean start = FALSE, complete = FALSE;
+
+ /* consider a coded slices (IDR or not) to start a picture,
+ * (so ending the previous one) if first_mb_in_slice == 0
+ * (non-0 is part of previous one) */
+ /* NOTE this is not entirely according to Access Unit specs in 7.4.1.2.4,
+ * but in practice it works in sane cases, needs not much parsing,
+ * and also works with broken frame_num in NAL (where spec-wise would fail) */
+ /* FIXME: this code isn't correct for interlaced content as AUs should be
+ * constructed with pairs of fields and the guess here will just push out
+ * AUs with a single field in it */
+ if (nal_type == 1 || nal_type == 2 || nal_type == 5) {
+ /* we have a picture start */
+ start = TRUE;
+ if (map.data[5] & 0x80) {
+ /* first_mb_in_slice == 0 completes a picture */
+ complete = TRUE;
+ }
+ } else if (nal_type >= 6 && nal_type <= 9) {
+ /* SEI, SPS, PPS, AU terminate picture */
+ complete = TRUE;
+ }
+ GST_DEBUG_OBJECT (depayload, "start %d, complete %d", start, complete);
+
+ /* marker bit isn't mandatory so in the following code we try to guess
+ * an AU boundary by detecting a new picture start */
+ if (!marker) {
+ if (complete && rtph264depay->picture_start)
+ outbuf = gst_rtp_h264_complete_au (rtph264depay, &out_timestamp,
+ &out_keyframe);
+ }
+ /* add to adapter */
+ gst_buffer_unmap (nal, &map);
+
+ if (!rtph264depay->picture_start && start && out_keyframe)
+ rtph264depay->waiting_for_keyframe = FALSE;
+
+ GST_DEBUG_OBJECT (depayload, "adding NAL to picture adapter");
+ gst_adapter_push (rtph264depay->picture_adapter, nal);
+ rtph264depay->last_ts = in_timestamp;
+ rtph264depay->last_keyframe |= keyframe;
+ rtph264depay->picture_start |= start;
+
+ if (marker)
+ outbuf = gst_rtp_h264_complete_au (rtph264depay, &out_timestamp,
+ &out_keyframe);
+ } else {
+ /* no merge, output is input nal */
+ GST_DEBUG_OBJECT (depayload, "using NAL as output");
+ outbuf = nal;
+ gst_buffer_unmap (nal, &map);
+ }
+
+ if (outbuf) {
+ if (!rtph264depay->waiting_for_keyframe) {
+ gst_rtp_h264_depay_push (rtph264depay, outbuf, out_keyframe,
+ out_timestamp, marker);
+ } else {
+ GST_LOG_OBJECT (depayload,
+ "Dropping %" GST_PTR_FORMAT ", we are waiting for a keyframe",
+ outbuf);
+ gst_buffer_unref (outbuf);
+ }
+ }
+
+ return;
+
+ /* ERRORS */
+short_nal:
+ {
+ GST_WARNING_OBJECT (depayload, "dropping short NAL");
+ gst_buffer_unmap (nal, &map);
+ gst_buffer_unref (nal);
+ return;
+ }
+}
+
+static void
+gst_rtp_h264_finish_fragmentation_unit (GstRtpH264Depay * rtph264depay)
+{
+ guint outsize;
+ GstMapInfo map;
+ GstBuffer *outbuf;
+
+ outsize = gst_adapter_available (rtph264depay->adapter);
+ outbuf = gst_adapter_take_buffer (rtph264depay->adapter, outsize);
+
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ GST_DEBUG_OBJECT (rtph264depay, "output %d bytes", outsize);
+
+ if (rtph264depay->byte_stream) {
+ memcpy (map.data, sync_bytes, sizeof (sync_bytes));
+ } else {
+ outsize -= 4;
+ map.data[0] = (outsize >> 24);
+ map.data[1] = (outsize >> 16);
+ map.data[2] = (outsize >> 8);
+ map.data[3] = (outsize);
+ }
+ gst_buffer_unmap (outbuf, &map);
+
+ rtph264depay->current_fu_type = 0;
+
+ gst_rtp_h264_depay_handle_nal (rtph264depay, outbuf,
+ rtph264depay->fu_timestamp, rtph264depay->fu_marker);
+}
+
+static GstBuffer *
+gst_rtp_h264_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpH264Depay *rtph264depay;
+ GstBuffer *outbuf = NULL;
+ guint8 nal_unit_type;
+
+ rtph264depay = GST_RTP_H264_DEPAY (depayload);
+
+ if (!rtph264depay->merge)
+ rtph264depay->waiting_for_keyframe = FALSE;
+
+ /* flush remaining data on discont */
+ if (GST_BUFFER_IS_DISCONT (rtp->buffer)) {
+ gst_adapter_clear (rtph264depay->adapter);
+ rtph264depay->wait_start = TRUE;
+ rtph264depay->current_fu_type = 0;
+ rtph264depay->last_fu_seqnum = 0;
+
+ if (rtph264depay->merge && rtph264depay->wait_for_keyframe) {
+ rtph264depay->waiting_for_keyframe = TRUE;
+ }
+
+
+ if (rtph264depay->request_keyframe)
+ gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depayload),
+ gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE,
+ TRUE, 0));
+ }
+
+ {
+ gint payload_len;
+ guint8 *payload;
+ guint header_len;
+ guint8 nal_ref_idc;
+ GstMapInfo map;
+ guint outsize, nalu_size;
+ GstClockTime timestamp;
+ gboolean marker;
+
+ timestamp = GST_BUFFER_PTS (rtp->buffer);
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+ payload = gst_rtp_buffer_get_payload (rtp);
+ marker = gst_rtp_buffer_get_marker (rtp);
+
+ GST_DEBUG_OBJECT (rtph264depay, "receiving %d bytes", payload_len);
+
+ if (payload_len == 0)
+ goto empty_packet;
+
+ /* +---------------+
+ * |0|1|2|3|4|5|6|7|
+ * +-+-+-+-+-+-+-+-+
+ * |F|NRI| Type |
+ * +---------------+
+ *
+ * F must be 0.
+ */
+ nal_ref_idc = (payload[0] & 0x60) >> 5;
+ nal_unit_type = payload[0] & 0x1f;
+
+ /* at least one byte header with type */
+ header_len = 1;
+
+ GST_DEBUG_OBJECT (rtph264depay, "NRI %d, Type %d %s", nal_ref_idc,
+ nal_unit_type, marker ? "marker" : "");
+
+ /* If FU unit was being processed, but the current nal is of a different
+ * type. Assume that the remote payloader is buggy (didn't set the end bit
+ * when the FU ended) and send out what we gathered thusfar */
+ if (G_UNLIKELY (rtph264depay->current_fu_type != 0 &&
+ nal_unit_type != rtph264depay->current_fu_type))
+ gst_rtp_h264_finish_fragmentation_unit (rtph264depay);
+
+ switch (nal_unit_type) {
+ case 0:
+ case 30:
+ case 31:
+ /* undefined */
+ goto undefined_type;
+ case 25:
+ /* STAP-B Single-time aggregation packet 5.7.1 */
+ /* 2 byte extra header for DON */
+ header_len += 2;
+ /* fallthrough */
+ case 24:
+ {
+ /* strip headers */
+ payload += header_len;
+ payload_len -= header_len;
+
+ rtph264depay->wait_start = FALSE;
+
+
+ /* STAP-A Single-time aggregation packet 5.7.1 */
+ while (payload_len > 2) {
+ gboolean last = FALSE;
+
+ /* 1
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | NALU Size |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ nalu_size = (payload[0] << 8) | payload[1];
+
+ /* don't include nalu_size */
+ if (nalu_size > (payload_len - 2))
+ nalu_size = payload_len - 2;
+
+ outsize = nalu_size + sizeof (sync_bytes);
+ outbuf = gst_buffer_new_and_alloc (outsize);
+
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ if (rtph264depay->byte_stream) {
+ memcpy (map.data, sync_bytes, sizeof (sync_bytes));
+ } else {
+ map.data[0] = map.data[1] = 0;
+ map.data[2] = payload[0];
+ map.data[3] = payload[1];
+ }
+
+ /* strip NALU size */
+ payload += 2;
+ payload_len -= 2;
+
+ memcpy (map.data + sizeof (sync_bytes), payload, nalu_size);
+ gst_buffer_unmap (outbuf, &map);
+
+ gst_rtp_copy_video_meta (rtph264depay, outbuf, rtp->buffer);
+
+ if (payload_len - nalu_size <= 2)
+ last = TRUE;
+
+ gst_rtp_h264_depay_handle_nal (rtph264depay, outbuf, timestamp,
+ marker && last);
+
+ payload += nalu_size;
+ payload_len -= nalu_size;
+ }
+ break;
+ }
+ case 26:
+ /* MTAP16 Multi-time aggregation packet 5.7.2 */
+ // header_len = 5;
+ /* fallthrough, not implemented */
+ case 27:
+ /* MTAP24 Multi-time aggregation packet 5.7.2 */
+ // header_len = 6;
+ goto not_implemented;
+ break;
+ case 28:
+ case 29:
+ {
+ /* FU-A Fragmentation unit 5.8 */
+ /* FU-B Fragmentation unit 5.8 */
+ gboolean S, E;
+
+ /* +---------------+
+ * |0|1|2|3|4|5|6|7|
+ * +-+-+-+-+-+-+-+-+
+ * |S|E|R| Type |
+ * +---------------+
+ *
+ * R is reserved and always 0
+ */
+ S = (payload[1] & 0x80) == 0x80;
+ E = (payload[1] & 0x40) == 0x40;
+
+ GST_DEBUG_OBJECT (rtph264depay, "S %d, E %d", S, E);
+
+ if (rtph264depay->wait_start && !S)
+ goto waiting_start;
+
+ if (S) {
+ /* NAL unit starts here */
+ guint8 nal_header;
+
+ /* If a new FU unit started, while still processing an older one.
+ * Assume that the remote payloader is buggy (doesn't set the end
+ * bit) and send out what we've gathered thusfar */
+ if (G_UNLIKELY (rtph264depay->current_fu_type != 0))
+ gst_rtp_h264_finish_fragmentation_unit (rtph264depay);
+
+ rtph264depay->current_fu_type = nal_unit_type;
+ rtph264depay->fu_timestamp = timestamp;
+ rtph264depay->last_fu_seqnum = gst_rtp_buffer_get_seq (rtp);
+
+ rtph264depay->wait_start = FALSE;
+
+ /* reconstruct NAL header */
+ nal_header = (payload[0] & 0xe0) | (payload[1] & 0x1f);
+
+ /* strip type header, keep FU header, we'll reuse it to reconstruct
+ * the NAL header. */
+ payload += 1;
+ payload_len -= 1;
+
+ nalu_size = payload_len;
+ outsize = nalu_size + sizeof (sync_bytes);
+ outbuf = gst_buffer_new_and_alloc (outsize);
+
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ memcpy (map.data + sizeof (sync_bytes), payload, nalu_size);
+ map.data[sizeof (sync_bytes)] = nal_header;
+ gst_buffer_unmap (outbuf, &map);
+
+ gst_rtp_copy_video_meta (rtph264depay, outbuf, rtp->buffer);
+
+ GST_DEBUG_OBJECT (rtph264depay, "queueing %d bytes", outsize);
+
+ /* and assemble in the adapter */
+ gst_adapter_push (rtph264depay->adapter, outbuf);
+ } else {
+ if (rtph264depay->current_fu_type == 0) {
+ /* previous FU packet missing start bit? */
+ GST_WARNING_OBJECT (rtph264depay, "missing FU start bit on an "
+ "earlier packet. Dropping.");
+ gst_adapter_clear (rtph264depay->adapter);
+ return NULL;
+ }
+ if (gst_rtp_buffer_compare_seqnum (rtph264depay->last_fu_seqnum,
+ gst_rtp_buffer_get_seq (rtp)) != 1) {
+ /* jump in sequence numbers within an FU is cause for discarding */
+ GST_WARNING_OBJECT (rtph264depay, "Jump in sequence numbers from "
+ "%u to %u within Fragmentation Unit. Data was lost, dropping "
+ "stored.", rtph264depay->last_fu_seqnum,
+ gst_rtp_buffer_get_seq (rtp));
+ gst_adapter_clear (rtph264depay->adapter);
+ return NULL;
+ }
+ rtph264depay->last_fu_seqnum = gst_rtp_buffer_get_seq (rtp);
+
+ /* strip off FU indicator and FU header bytes */
+ payload += 2;
+ payload_len -= 2;
+
+ outsize = payload_len;
+ outbuf = gst_buffer_new_and_alloc (outsize);
+ gst_buffer_fill (outbuf, 0, payload, outsize);
+
+ gst_rtp_copy_video_meta (rtph264depay, outbuf, rtp->buffer);
+
+ GST_DEBUG_OBJECT (rtph264depay, "queueing %d bytes", outsize);
+
+ /* and assemble in the adapter */
+ gst_adapter_push (rtph264depay->adapter, outbuf);
+ }
+
+ outbuf = NULL;
+ rtph264depay->fu_marker = marker;
+
+ /* if NAL unit ends, flush the adapter */
+ if (E)
+ gst_rtp_h264_finish_fragmentation_unit (rtph264depay);
+ break;
+ }
+ default:
+ {
+ rtph264depay->wait_start = FALSE;
+
+ /* 1-23 NAL unit Single NAL unit packet per H.264 5.6 */
+ /* the entire payload is the output buffer */
+ nalu_size = payload_len;
+ outsize = nalu_size + sizeof (sync_bytes);
+ outbuf = gst_buffer_new_and_alloc (outsize);
+
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ if (rtph264depay->byte_stream) {
+ memcpy (map.data, sync_bytes, sizeof (sync_bytes));
+ } else {
+ map.data[0] = map.data[1] = 0;
+ map.data[2] = nalu_size >> 8;
+ map.data[3] = nalu_size & 0xff;
+ }
+ memcpy (map.data + sizeof (sync_bytes), payload, nalu_size);
+ gst_buffer_unmap (outbuf, &map);
+
+ gst_rtp_copy_video_meta (rtph264depay, outbuf, rtp->buffer);
+
+ gst_rtp_h264_depay_handle_nal (rtph264depay, outbuf, timestamp, marker);
+ break;
+ }
+ }
+ }
+
+ return NULL;
+
+ /* ERRORS */
+empty_packet:
+ {
+ GST_DEBUG_OBJECT (rtph264depay, "empty packet");
+ return NULL;
+ }
+undefined_type:
+ {
+ GST_ELEMENT_WARNING (rtph264depay, STREAM, DECODE,
+ (NULL), ("Undefined packet type"));
+ return NULL;
+ }
+waiting_start:
+ {
+ GST_DEBUG_OBJECT (rtph264depay, "waiting for start");
+ return NULL;
+ }
+not_implemented:
+ {
+ GST_ELEMENT_ERROR (rtph264depay, STREAM, FORMAT,
+ (NULL), ("NAL unit type %d not supported yet", nal_unit_type));
+ return NULL;
+ }
+}
+
+static gboolean
+gst_rtp_h264_depay_handle_event (GstRTPBaseDepayload * depay, GstEvent * event)
+{
+ GstRtpH264Depay *rtph264depay;
+
+ rtph264depay = GST_RTP_H264_DEPAY (depay);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ gst_rtp_h264_depay_reset (rtph264depay, FALSE);
+ break;
+ case GST_EVENT_EOS:
+ gst_rtp_h264_depay_drain (rtph264depay);
+ break;
+ default:
+ break;
+ }
+
+ return
+ GST_RTP_BASE_DEPAYLOAD_CLASS (parent_class)->handle_event (depay, event);
+}
+
+static GstStateChangeReturn
+gst_rtp_h264_depay_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstRtpH264Depay *rtph264depay;
+ GstStateChangeReturn ret;
+
+ rtph264depay = GST_RTP_H264_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_rtp_h264_depay_reset (rtph264depay, TRUE);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_h264_depay_reset (rtph264depay, TRUE);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtph264depay.h b/gst/rtp/gstrtph264depay.h
new file mode 100644
index 0000000000..8ca7381beb
--- /dev/null
+++ b/gst/rtp/gstrtph264depay.h
@@ -0,0 +1,92 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_H264_DEPAY_H__
+#define __GST_RTP_H264_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_H264_DEPAY \
+ (gst_rtp_h264_depay_get_type())
+#define GST_RTP_H264_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_H264_DEPAY,GstRtpH264Depay))
+#define GST_RTP_H264_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_H264_DEPAY,GstRtpH264DepayClass))
+#define GST_IS_RTP_H264_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_H264_DEPAY))
+#define GST_IS_RTP_H264_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_H264_DEPAY))
+
+typedef struct _GstRtpH264Depay GstRtpH264Depay;
+typedef struct _GstRtpH264DepayClass GstRtpH264DepayClass;
+
+struct _GstRtpH264Depay
+{
+ GstRTPBaseDepayload depayload;
+
+ gboolean byte_stream;
+
+ GstBuffer *codec_data;
+ GstAdapter *adapter;
+ gboolean wait_start;
+
+ /* nal merging */
+ gboolean merge;
+ GstAdapter *picture_adapter;
+ gboolean picture_start;
+ GstClockTime last_ts;
+ gboolean last_keyframe;
+
+ /* Work around broken payloaders wrt. FU-A & FU-B */
+ guint8 current_fu_type;
+ guint16 last_fu_seqnum;
+ GstClockTime fu_timestamp;
+ gboolean fu_marker;
+
+ /* misc */
+ GPtrArray *sps;
+ GPtrArray *pps;
+ gboolean new_codec_data;
+
+ /* downstream allocator */
+ GstAllocator *allocator;
+ GstAllocationParams params;
+
+ gboolean wait_for_keyframe;
+ gboolean request_keyframe;
+ gboolean waiting_for_keyframe;
+};
+
+struct _GstRtpH264DepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_h264_depay_get_type (void);
+
+gboolean gst_rtp_h264_add_sps_pps (GstElement * rtph264, GPtrArray * sps,
+ GPtrArray * pps, GstBuffer * nal);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_H264_DEPAY_H__ */
diff --git a/gst/rtp/gstrtph264pay.c b/gst/rtp/gstrtph264pay.c
new file mode 100644
index 0000000000..49471d8678
--- /dev/null
+++ b/gst/rtp/gstrtph264pay.c
@@ -0,0 +1,1814 @@
+/* ex: set tabstop=2 shiftwidth=2 expandtab: */
+/* GStreamer
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+#include <stdlib.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/pbutils/pbutils.h>
+#include <gst/video/video.h>
+
+/* Included to not duplicate gst_rtp_h264_add_sps_pps () */
+#include "gstrtph264depay.h"
+
+#include "gstrtpelements.h"
+#include "gstrtph264pay.h"
+#include "gstrtputils.h"
+#include "gstbuffermemory.h"
+
+
+#define IDR_TYPE_ID 5
+#define SPS_TYPE_ID 7
+#define PPS_TYPE_ID 8
+#define AUD_TYPE_ID 9
+#define STAP_A_TYPE_ID 24
+#define FU_A_TYPE_ID 28
+
+GST_DEBUG_CATEGORY_STATIC (rtph264pay_debug);
+#define GST_CAT_DEFAULT (rtph264pay_debug)
+
+#define GST_TYPE_RTP_H264_AGGREGATE_MODE \
+ (gst_rtp_h264_aggregate_mode_get_type ())
+
+
+static GType
+gst_rtp_h264_aggregate_mode_get_type (void)
+{
+ static GType type = 0;
+ static const GEnumValue values[] = {
+ {GST_RTP_H264_AGGREGATE_NONE, "Do not aggregate NAL units", "none"},
+ {GST_RTP_H264_AGGREGATE_ZERO_LATENCY,
+ "Aggregate NAL units until a VCL unit is included", "zero-latency"},
+ {GST_RTP_H264_AGGREGATE_MAX_STAP,
+ "Aggregate all NAL units with the same timestamp (adds one frame of"
+ " latency)", "max-stap"},
+ {0, NULL, NULL},
+ };
+
+ if (!type) {
+ type = g_enum_register_static ("GstRtpH264AggregateMode", values);
+ }
+ return type;
+}
+
+
+
+/* references:
+*
+ * RFC 3984
+ */
+
+static GstStaticPadTemplate gst_rtp_h264_pay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-h264, "
+ "stream-format = (string) avc, alignment = (string) au;"
+ "video/x-h264, "
+ "stream-format = (string) byte-stream, alignment = (string) { nal, au }")
+ );
+
+static GstStaticPadTemplate gst_rtp_h264_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"H264\"")
+ );
+
+#define DEFAULT_SPROP_PARAMETER_SETS NULL
+#define DEFAULT_CONFIG_INTERVAL 0
+#define DEFAULT_AGGREGATE_MODE GST_RTP_H264_AGGREGATE_NONE
+
+enum
+{
+ PROP_0,
+ PROP_SPROP_PARAMETER_SETS,
+ PROP_CONFIG_INTERVAL,
+ PROP_AGGREGATE_MODE,
+};
+
+static void gst_rtp_h264_pay_finalize (GObject * object);
+
+static void gst_rtp_h264_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtp_h264_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static GstCaps *gst_rtp_h264_pay_getcaps (GstRTPBasePayload * payload,
+ GstPad * pad, GstCaps * filter);
+static gboolean gst_rtp_h264_pay_setcaps (GstRTPBasePayload * basepayload,
+ GstCaps * caps);
+static GstFlowReturn gst_rtp_h264_pay_handle_buffer (GstRTPBasePayload * pad,
+ GstBuffer * buffer);
+static gboolean gst_rtp_h264_pay_sink_event (GstRTPBasePayload * payload,
+ GstEvent * event);
+static GstStateChangeReturn gst_rtp_h264_pay_change_state (GstElement *
+ element, GstStateChange transition);
+static gboolean gst_rtp_h264_pay_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+
+static void gst_rtp_h264_pay_reset_bundle (GstRtpH264Pay * rtph264pay);
+
+#define gst_rtp_h264_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpH264Pay, gst_rtp_h264_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph264pay, "rtph264pay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_H264_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_h264_pay_class_init (GstRtpH264PayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gobject_class->set_property = gst_rtp_h264_pay_set_property;
+ gobject_class->get_property = gst_rtp_h264_pay_get_property;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass),
+ PROP_SPROP_PARAMETER_SETS, g_param_spec_string ("sprop-parameter-sets",
+ "sprop-parameter-sets",
+ "The base64 sprop-parameter-sets to set in out caps (set to NULL to "
+ "extract from stream)",
+ DEFAULT_SPROP_PARAMETER_SETS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass),
+ PROP_CONFIG_INTERVAL,
+ g_param_spec_int ("config-interval",
+ "SPS PPS Send Interval",
+ "Send SPS and PPS Insertion Interval in seconds (sprop parameter sets "
+ "will be multiplexed in the data stream when detected.) "
+ "(0 = disabled, -1 = send with every IDR frame)",
+ -1, 3600, DEFAULT_CONFIG_INTERVAL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ /**
+ * GstRtpH264Pay:aggregate-mode
+ *
+ * Bundle suitable SPS/PPS NAL units into STAP-A aggregate packets.
+ *
+ * This can potentially reduce RTP packetization overhead but not all
+ * RTP implementations handle it correctly.
+ *
+ * For best compatibility, it is recommended to set this to "none" (the
+ * default) for RTSP and for WebRTC to "zero-latency".
+ *
+ * Since: 1.18
+ */
+ g_object_class_install_property (G_OBJECT_CLASS (klass),
+ PROP_AGGREGATE_MODE,
+ g_param_spec_enum ("aggregate-mode",
+ "Attempt to use aggregate packets",
+ "Bundle suitable SPS/PPS NAL units into STAP-A "
+ "aggregate packets",
+ GST_TYPE_RTP_H264_AGGREGATE_MODE,
+ DEFAULT_AGGREGATE_MODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ gobject_class->finalize = gst_rtp_h264_pay_finalize;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_h264_pay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_h264_pay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "RTP H264 payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encode H264 video into RTP packets (RFC 3984)",
+ "Laurent Glayal <spglegle@yahoo.fr>");
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_rtp_h264_pay_change_state);
+
+ gstrtpbasepayload_class->get_caps = gst_rtp_h264_pay_getcaps;
+ gstrtpbasepayload_class->set_caps = gst_rtp_h264_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_h264_pay_handle_buffer;
+ gstrtpbasepayload_class->sink_event = gst_rtp_h264_pay_sink_event;
+
+ GST_DEBUG_CATEGORY_INIT (rtph264pay_debug, "rtph264pay", 0,
+ "H264 RTP Payloader");
+
+ gst_type_mark_as_plugin_api (GST_TYPE_RTP_H264_AGGREGATE_MODE, 0);
+}
+
+static void
+gst_rtp_h264_pay_init (GstRtpH264Pay * rtph264pay)
+{
+ rtph264pay->queue = g_array_new (FALSE, FALSE, sizeof (guint));
+ rtph264pay->profile = 0;
+ rtph264pay->sps = g_ptr_array_new_with_free_func (
+ (GDestroyNotify) gst_buffer_unref);
+ rtph264pay->pps = g_ptr_array_new_with_free_func (
+ (GDestroyNotify) gst_buffer_unref);
+ rtph264pay->last_spspps = -1;
+ rtph264pay->spspps_interval = DEFAULT_CONFIG_INTERVAL;
+ rtph264pay->aggregate_mode = DEFAULT_AGGREGATE_MODE;
+ rtph264pay->delta_unit = FALSE;
+ rtph264pay->discont = FALSE;
+
+ rtph264pay->adapter = gst_adapter_new ();
+
+ gst_pad_set_query_function (GST_RTP_BASE_PAYLOAD_SRCPAD (rtph264pay),
+ gst_rtp_h264_pay_src_query);
+}
+
+static void
+gst_rtp_h264_pay_clear_sps_pps (GstRtpH264Pay * rtph264pay)
+{
+ g_ptr_array_set_size (rtph264pay->sps, 0);
+ g_ptr_array_set_size (rtph264pay->pps, 0);
+}
+
+static void
+gst_rtp_h264_pay_finalize (GObject * object)
+{
+ GstRtpH264Pay *rtph264pay;
+
+ rtph264pay = GST_RTP_H264_PAY (object);
+
+ g_array_free (rtph264pay->queue, TRUE);
+
+ g_ptr_array_free (rtph264pay->sps, TRUE);
+ g_ptr_array_free (rtph264pay->pps, TRUE);
+
+ g_free (rtph264pay->sprop_parameter_sets);
+
+ g_object_unref (rtph264pay->adapter);
+ gst_rtp_h264_pay_reset_bundle (rtph264pay);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static const gchar all_levels[][4] = {
+ "1",
+ "1b",
+ "1.1",
+ "1.2",
+ "1.3",
+ "2",
+ "2.1",
+ "2.2",
+ "3",
+ "3.1",
+ "3.2",
+ "4",
+ "4.1",
+ "4.2",
+ "5",
+ "5.1"
+};
+
+static GstCaps *
+gst_rtp_h264_pay_getcaps (GstRTPBasePayload * payload, GstPad * pad,
+ GstCaps * filter)
+{
+ GstCaps *template_caps;
+ GstCaps *allowed_caps;
+ GstCaps *caps, *icaps;
+ gboolean append_unrestricted;
+ guint i;
+
+ allowed_caps =
+ gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), NULL);
+
+ if (allowed_caps == NULL)
+ return NULL;
+
+ template_caps =
+ gst_static_pad_template_get_caps (&gst_rtp_h264_pay_sink_template);
+
+ if (gst_caps_is_any (allowed_caps)) {
+ caps = gst_caps_ref (template_caps);
+ goto done;
+ }
+
+ if (gst_caps_is_empty (allowed_caps)) {
+ caps = gst_caps_ref (allowed_caps);
+ goto done;
+ }
+
+ caps = gst_caps_new_empty ();
+
+ append_unrestricted = FALSE;
+ for (i = 0; i < gst_caps_get_size (allowed_caps); i++) {
+ GstStructure *s = gst_caps_get_structure (allowed_caps, i);
+ GstStructure *new_s = gst_structure_new_empty ("video/x-h264");
+ const gchar *profile_level_id;
+
+ profile_level_id = gst_structure_get_string (s, "profile-level-id");
+
+ if (profile_level_id && strlen (profile_level_id) == 6) {
+ const gchar *profile;
+ const gchar *level;
+ long int spsint;
+ guint8 sps[3];
+
+ spsint = strtol (profile_level_id, NULL, 16);
+ sps[0] = spsint >> 16;
+ sps[1] = spsint >> 8;
+ sps[2] = spsint;
+
+ profile = gst_codec_utils_h264_get_profile (sps, 3);
+ level = gst_codec_utils_h264_get_level (sps, 3);
+
+ if (profile && level) {
+ GST_LOG_OBJECT (payload, "In caps, have profile %s and level %s",
+ profile, level);
+
+ if (!strcmp (profile, "constrained-baseline"))
+ gst_structure_set (new_s, "profile", G_TYPE_STRING, profile, NULL);
+ else {
+ GValue val = { 0, };
+ GValue profiles = { 0, };
+
+ g_value_init (&profiles, GST_TYPE_LIST);
+ g_value_init (&val, G_TYPE_STRING);
+
+ g_value_set_static_string (&val, profile);
+ gst_value_list_append_value (&profiles, &val);
+
+ g_value_set_static_string (&val, "constrained-baseline");
+ gst_value_list_append_value (&profiles, &val);
+
+ gst_structure_take_value (new_s, "profile", &profiles);
+ }
+
+ if (!strcmp (level, "1"))
+ gst_structure_set (new_s, "level", G_TYPE_STRING, level, NULL);
+ else {
+ GValue levels = { 0, };
+ GValue val = { 0, };
+ int j;
+
+ g_value_init (&levels, GST_TYPE_LIST);
+ g_value_init (&val, G_TYPE_STRING);
+
+ for (j = 0; j < G_N_ELEMENTS (all_levels); j++) {
+ g_value_set_static_string (&val, all_levels[j]);
+ gst_value_list_prepend_value (&levels, &val);
+ if (!strcmp (level, all_levels[j]))
+ break;
+ }
+ gst_structure_take_value (new_s, "level", &levels);
+ }
+ } else {
+ /* Invalid profile-level-id means baseline */
+
+ gst_structure_set (new_s,
+ "profile", G_TYPE_STRING, "constrained-baseline", NULL);
+ }
+ } else {
+ /* No profile-level-id means baseline or unrestricted */
+
+ gst_structure_set (new_s,
+ "profile", G_TYPE_STRING, "constrained-baseline", NULL);
+ append_unrestricted = TRUE;
+ }
+
+ caps = gst_caps_merge_structure (caps, new_s);
+ }
+
+ if (append_unrestricted) {
+ caps =
+ gst_caps_merge_structure (caps, gst_structure_new ("video/x-h264", NULL,
+ NULL));
+ }
+
+ icaps = gst_caps_intersect (caps, template_caps);
+ gst_caps_unref (caps);
+ caps = icaps;
+
+done:
+ if (filter) {
+ GST_DEBUG_OBJECT (payload, "Intersect %" GST_PTR_FORMAT " and filter %"
+ GST_PTR_FORMAT, caps, filter);
+ icaps = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ caps = icaps;
+ }
+
+ gst_caps_unref (template_caps);
+ gst_caps_unref (allowed_caps);
+
+ GST_LOG_OBJECT (payload, "returning caps %" GST_PTR_FORMAT, caps);
+ return caps;
+}
+
+static gboolean
+gst_rtp_h264_pay_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ GstRtpH264Pay *rtph264pay = GST_RTP_H264_PAY (parent);
+
+ if (GST_QUERY_TYPE (query) == GST_QUERY_LATENCY) {
+ gboolean retval;
+ gboolean live;
+ GstClockTime min_latency, max_latency;
+
+ retval = gst_pad_query_default (pad, parent, query);
+ if (!retval)
+ return retval;
+
+ if (rtph264pay->stream_format == GST_H264_STREAM_FORMAT_UNKNOWN ||
+ rtph264pay->alignment == GST_H264_ALIGNMENT_UNKNOWN)
+ return FALSE;
+
+ gst_query_parse_latency (query, &live, &min_latency, &max_latency);
+
+ if (rtph264pay->aggregate_mode == GST_RTP_H264_AGGREGATE_MAX_STAP &&
+ rtph264pay->alignment != GST_H264_ALIGNMENT_AU && rtph264pay->fps_num) {
+ GstClockTime one_frame = gst_util_uint64_scale_int (GST_SECOND,
+ rtph264pay->fps_denum, rtph264pay->fps_num);
+
+ min_latency += one_frame;
+ max_latency += one_frame;
+ gst_query_set_latency (query, live, min_latency, max_latency);
+ }
+ return TRUE;
+ }
+
+ return gst_pad_query_default (pad, parent, query);
+}
+
+
+/* take the currently configured SPS and PPS lists and set them on the caps as
+ * sprop-parameter-sets */
+static gboolean
+gst_rtp_h264_pay_set_sps_pps (GstRTPBasePayload * basepayload)
+{
+ GstRtpH264Pay *payloader = GST_RTP_H264_PAY (basepayload);
+ gchar *profile;
+ gchar *set;
+ GString *sprops;
+ guint count;
+ gboolean res;
+ GstMapInfo map;
+ guint i;
+
+ sprops = g_string_new ("");
+ count = 0;
+
+ /* build the sprop-parameter-sets */
+ for (i = 0; i < payloader->sps->len; i++) {
+ GstBuffer *sps_buf =
+ GST_BUFFER_CAST (g_ptr_array_index (payloader->sps, i));
+
+ gst_buffer_map (sps_buf, &map, GST_MAP_READ);
+ set = g_base64_encode (map.data, map.size);
+ gst_buffer_unmap (sps_buf, &map);
+
+ g_string_append_printf (sprops, "%s%s", count ? "," : "", set);
+ g_free (set);
+ count++;
+ }
+ for (i = 0; i < payloader->pps->len; i++) {
+ GstBuffer *pps_buf =
+ GST_BUFFER_CAST (g_ptr_array_index (payloader->pps, i));
+
+ gst_buffer_map (pps_buf, &map, GST_MAP_READ);
+ set = g_base64_encode (map.data, map.size);
+ gst_buffer_unmap (pps_buf, &map);
+
+ g_string_append_printf (sprops, "%s%s", count ? "," : "", set);
+ g_free (set);
+ count++;
+ }
+
+ if (G_LIKELY (count)) {
+ if (payloader->profile != 0) {
+ /* profile is 24 bit. Force it to respect the limit */
+ profile = g_strdup_printf ("%06x", payloader->profile & 0xffffff);
+ /* combine into output caps */
+ res = gst_rtp_base_payload_set_outcaps (basepayload,
+ "packetization-mode", G_TYPE_STRING, "1",
+ "profile-level-id", G_TYPE_STRING, profile,
+ "sprop-parameter-sets", G_TYPE_STRING, sprops->str, NULL);
+ g_free (profile);
+ } else {
+ res = gst_rtp_base_payload_set_outcaps (basepayload,
+ "packetization-mode", G_TYPE_STRING, "1",
+ "sprop-parameter-sets", G_TYPE_STRING, sprops->str, NULL);
+ }
+
+ } else {
+ res = gst_rtp_base_payload_set_outcaps (basepayload, NULL);
+ }
+ g_string_free (sprops, TRUE);
+
+ return res;
+}
+
+
+static gboolean
+gst_rtp_h264_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
+{
+ GstRtpH264Pay *rtph264pay;
+ GstStructure *str;
+ const GValue *value;
+ GstMapInfo map;
+ guint8 *data;
+ gsize size;
+ GstBuffer *buffer;
+ const gchar *alignment, *stream_format;
+
+ rtph264pay = GST_RTP_H264_PAY (basepayload);
+
+ str = gst_caps_get_structure (caps, 0);
+
+ /* we can only set the output caps when we found the sprops and profile
+ * NALs */
+ gst_rtp_base_payload_set_options (basepayload, "video", TRUE, "H264", 90000);
+
+ rtph264pay->alignment = GST_H264_ALIGNMENT_UNKNOWN;
+ alignment = gst_structure_get_string (str, "alignment");
+ if (alignment) {
+ if (g_str_equal (alignment, "au"))
+ rtph264pay->alignment = GST_H264_ALIGNMENT_AU;
+ if (g_str_equal (alignment, "nal"))
+ rtph264pay->alignment = GST_H264_ALIGNMENT_NAL;
+ }
+
+ rtph264pay->stream_format = GST_H264_STREAM_FORMAT_UNKNOWN;
+ stream_format = gst_structure_get_string (str, "stream-format");
+ if (stream_format) {
+ if (g_str_equal (stream_format, "avc"))
+ rtph264pay->stream_format = GST_H264_STREAM_FORMAT_AVC;
+ if (g_str_equal (stream_format, "byte-stream"))
+ rtph264pay->stream_format = GST_H264_STREAM_FORMAT_BYTESTREAM;
+ }
+
+ if (!gst_structure_get_fraction (str, "framerate", &rtph264pay->fps_num,
+ &rtph264pay->fps_denum))
+ rtph264pay->fps_num = rtph264pay->fps_denum = 0;
+
+ /* packetized AVC video has a codec_data */
+ if ((value = gst_structure_get_value (str, "codec_data"))) {
+ guint num_sps, num_pps;
+ gint i, nal_size;
+
+ GST_DEBUG_OBJECT (rtph264pay, "have packetized h264");
+
+ buffer = gst_value_get_buffer (value);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ /* parse the avcC data */
+ if (size < 7)
+ goto avcc_too_small;
+ /* parse the version, this must be 1 */
+ if (data[0] != 1)
+ goto wrong_version;
+
+ /* AVCProfileIndication */
+ /* profile_compat */
+ /* AVCLevelIndication */
+ rtph264pay->profile = (data[1] << 16) | (data[2] << 8) | data[3];
+ GST_DEBUG_OBJECT (rtph264pay, "profile %06x", rtph264pay->profile);
+
+ /* 6 bits reserved | 2 bits lengthSizeMinusOne */
+ /* this is the number of bytes in front of the NAL units to mark their
+ * length */
+ rtph264pay->nal_length_size = (data[4] & 0x03) + 1;
+ GST_DEBUG_OBJECT (rtph264pay, "nal length %u", rtph264pay->nal_length_size);
+ /* 3 bits reserved | 5 bits numOfSequenceParameterSets */
+ num_sps = data[5] & 0x1f;
+ GST_DEBUG_OBJECT (rtph264pay, "num SPS %u", num_sps);
+
+ data += 6;
+ size -= 6;
+
+ /* create the sprop-parameter-sets */
+ for (i = 0; i < num_sps; i++) {
+ GstBuffer *sps_buf;
+
+ if (size < 2)
+ goto avcc_error;
+
+ nal_size = (data[0] << 8) | data[1];
+ data += 2;
+ size -= 2;
+
+ GST_LOG_OBJECT (rtph264pay, "SPS %d size %d", i, nal_size);
+
+ if (size < nal_size)
+ goto avcc_error;
+
+ /* make a buffer out of it and add to SPS list */
+ sps_buf = gst_buffer_new_and_alloc (nal_size);
+ gst_buffer_fill (sps_buf, 0, data, nal_size);
+ gst_rtp_h264_add_sps_pps (GST_ELEMENT (rtph264pay), rtph264pay->sps,
+ rtph264pay->pps, sps_buf);
+ data += nal_size;
+ size -= nal_size;
+ }
+ if (size < 1)
+ goto avcc_error;
+
+ /* 8 bits numOfPictureParameterSets */
+ num_pps = data[0];
+ data += 1;
+ size -= 1;
+
+ GST_DEBUG_OBJECT (rtph264pay, "num PPS %u", num_pps);
+ for (i = 0; i < num_pps; i++) {
+ GstBuffer *pps_buf;
+
+ if (size < 2)
+ goto avcc_error;
+
+ nal_size = (data[0] << 8) | data[1];
+ data += 2;
+ size -= 2;
+
+ GST_LOG_OBJECT (rtph264pay, "PPS %d size %d", i, nal_size);
+
+ if (size < nal_size)
+ goto avcc_error;
+
+ /* make a buffer out of it and add to PPS list */
+ pps_buf = gst_buffer_new_and_alloc (nal_size);
+ gst_buffer_fill (pps_buf, 0, data, nal_size);
+ gst_rtp_h264_add_sps_pps (GST_ELEMENT (rtph264pay), rtph264pay->sps,
+ rtph264pay->pps, pps_buf);
+
+ data += nal_size;
+ size -= nal_size;
+ }
+
+ /* and update the caps with the collected data */
+ if (!gst_rtp_h264_pay_set_sps_pps (basepayload))
+ goto set_sps_pps_failed;
+
+ gst_buffer_unmap (buffer, &map);
+ } else {
+ GST_DEBUG_OBJECT (rtph264pay, "have bytestream h264");
+ }
+
+ return TRUE;
+
+avcc_too_small:
+ {
+ GST_ERROR_OBJECT (rtph264pay, "avcC size %" G_GSIZE_FORMAT " < 7", size);
+ goto error;
+ }
+wrong_version:
+ {
+ GST_ERROR_OBJECT (rtph264pay, "wrong avcC version");
+ goto error;
+ }
+avcc_error:
+ {
+ GST_ERROR_OBJECT (rtph264pay, "avcC too small ");
+ goto error;
+ }
+set_sps_pps_failed:
+ {
+ GST_ERROR_OBJECT (rtph264pay, "failed to set sps/pps");
+ goto error;
+ }
+error:
+ {
+ gst_buffer_unmap (buffer, &map);
+ return FALSE;
+ }
+}
+
+static void
+gst_rtp_h264_pay_parse_sprop_parameter_sets (GstRtpH264Pay * rtph264pay)
+{
+ const gchar *ps;
+ gchar **params;
+ guint len;
+ gint i;
+ GstBuffer *buf;
+
+ ps = rtph264pay->sprop_parameter_sets;
+ if (ps == NULL)
+ return;
+
+ gst_rtp_h264_pay_clear_sps_pps (rtph264pay);
+
+ params = g_strsplit (ps, ",", 0);
+ len = g_strv_length (params);
+
+ GST_DEBUG_OBJECT (rtph264pay, "we have %d params", len);
+
+ for (i = 0; params[i]; i++) {
+ gsize nal_len;
+ GstMapInfo map;
+ guint8 *nalp;
+ guint save = 0;
+ gint state = 0;
+
+ nal_len = strlen (params[i]);
+ buf = gst_buffer_new_and_alloc (nal_len);
+
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ nalp = map.data;
+ nal_len = g_base64_decode_step (params[i], nal_len, nalp, &state, &save);
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_resize (buf, 0, nal_len);
+
+ if (!nal_len) {
+ gst_buffer_unref (buf);
+ continue;
+ }
+
+ gst_rtp_h264_add_sps_pps (GST_ELEMENT (rtph264pay), rtph264pay->sps,
+ rtph264pay->pps, buf);
+ }
+ g_strfreev (params);
+}
+
+static guint
+next_start_code (const guint8 * data, guint size)
+{
+ /* Boyer-Moore string matching algorithm, in a degenerative
+ * sense because our search 'alphabet' is binary - 0 & 1 only.
+ * This allow us to simplify the general BM algorithm to a very
+ * simple form. */
+ /* assume 1 is in the 3th byte */
+ guint offset = 2;
+
+ while (offset < size) {
+ if (1 == data[offset]) {
+ unsigned int shift = offset;
+
+ if (0 == data[--shift]) {
+ if (0 == data[--shift]) {
+ return shift;
+ }
+ }
+ /* The jump is always 3 because of the 1 previously matched.
+ * All the 0's must be after this '1' matched at offset */
+ offset += 3;
+ } else if (0 == data[offset]) {
+ /* maybe next byte is 1? */
+ offset++;
+ } else {
+ /* can jump 3 bytes forward */
+ offset += 3;
+ }
+ /* at each iteration, we rescan in a backward manner until
+ * we match 0.0.1 in reverse order. Since our search string
+ * has only 2 'alpabets' (i.e. 0 & 1), we know that any
+ * mismatch will force us to shift a fixed number of steps */
+ }
+ GST_DEBUG ("Cannot find next NAL start code. returning %u", size);
+
+ return size;
+}
+
+static gboolean
+gst_rtp_h264_pay_decode_nal (GstRtpH264Pay * payloader,
+ const guint8 * data, guint size, GstClockTime dts, GstClockTime pts)
+{
+ guint8 header, type;
+ gboolean updated;
+
+ /* default is no update */
+ updated = FALSE;
+
+ GST_DEBUG ("NAL payload len=%u", size);
+
+ header = data[0];
+ type = header & 0x1f;
+
+ /* We record the timestamp of the last SPS/PPS so
+ * that we can insert them at regular intervals and when needed. */
+ if (SPS_TYPE_ID == type || PPS_TYPE_ID == type) {
+ GstBuffer *nal;
+
+ /* trailing 0x0 are not part of the SPS/PPS */
+ while (size > 0 && data[size - 1] == 0x0)
+ size--;
+
+ /* encode the entire SPS NAL in base64 */
+ GST_DEBUG ("Found %s %x %x %x Len=%u", type == SPS_TYPE_ID ? "SPS" : "PPS",
+ (header >> 7), (header >> 5) & 3, type, size);
+
+ nal = gst_buffer_new_allocate (NULL, size, NULL);
+ gst_buffer_fill (nal, 0, data, size);
+
+ updated = gst_rtp_h264_add_sps_pps (GST_ELEMENT (payloader),
+ payloader->sps, payloader->pps, nal);
+
+ /* remember when we last saw SPS */
+ if (pts != -1)
+ payloader->last_spspps =
+ gst_segment_to_running_time (&GST_RTP_BASE_PAYLOAD_CAST
+ (payloader)->segment, GST_FORMAT_TIME, pts);
+ } else {
+ GST_DEBUG ("NAL: %x %x %x Len = %u", (header >> 7),
+ (header >> 5) & 3, type, size);
+ }
+
+ return updated;
+}
+
+static GstFlowReturn
+gst_rtp_h264_pay_payload_nal (GstRTPBasePayload * basepayload,
+ GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
+ gboolean delta_unit, gboolean discont);
+
+static GstFlowReturn
+gst_rtp_h264_pay_payload_nal_single (GstRTPBasePayload * basepayload,
+ GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
+ gboolean delta_unit, gboolean discont);
+
+static GstFlowReturn
+gst_rtp_h264_pay_payload_nal_fragment (GstRTPBasePayload * basepayload,
+ GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
+ gboolean delta_unit, gboolean discont, guint8 nal_header);
+
+static GstFlowReturn
+gst_rtp_h264_pay_payload_nal_bundle (GstRTPBasePayload * basepayload,
+ GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
+ gboolean delta_unit, gboolean discont, guint8 nal_header);
+
+static GstFlowReturn
+gst_rtp_h264_pay_send_sps_pps (GstRTPBasePayload * basepayload,
+ GstClockTime dts, GstClockTime pts, gboolean delta_unit, gboolean discont)
+{
+ GstRtpH264Pay *rtph264pay = GST_RTP_H264_PAY (basepayload);
+ GstFlowReturn ret = GST_FLOW_OK;
+ gboolean sent_all_sps_pps = TRUE;
+ guint i;
+
+ for (i = 0; i < rtph264pay->sps->len; i++) {
+ GstBuffer *sps_buf =
+ GST_BUFFER_CAST (g_ptr_array_index (rtph264pay->sps, i));
+
+ GST_DEBUG_OBJECT (rtph264pay, "inserting SPS in the stream");
+ /* resend SPS */
+ ret = gst_rtp_h264_pay_payload_nal (basepayload, gst_buffer_ref (sps_buf),
+ dts, pts, FALSE, delta_unit, discont);
+ /* Not critical here; but throw a warning */
+ if (ret != GST_FLOW_OK) {
+ sent_all_sps_pps = FALSE;
+ GST_WARNING_OBJECT (basepayload, "Problem pushing SPS");
+ }
+ }
+ for (i = 0; i < rtph264pay->pps->len; i++) {
+ GstBuffer *pps_buf =
+ GST_BUFFER_CAST (g_ptr_array_index (rtph264pay->pps, i));
+
+ GST_DEBUG_OBJECT (rtph264pay, "inserting PPS in the stream");
+ /* resend PPS */
+ ret = gst_rtp_h264_pay_payload_nal (basepayload, gst_buffer_ref (pps_buf),
+ dts, pts, FALSE, TRUE, FALSE);
+ /* Not critical here; but throw a warning */
+ if (ret != GST_FLOW_OK) {
+ sent_all_sps_pps = FALSE;
+ GST_WARNING_OBJECT (basepayload, "Problem pushing PPS");
+ }
+ }
+
+ if (pts != -1 && sent_all_sps_pps)
+ rtph264pay->last_spspps =
+ gst_segment_to_running_time (&basepayload->segment, GST_FORMAT_TIME,
+ pts);
+
+ return ret;
+}
+
+/* @delta_unit: if %FALSE the first packet sent won't have the
+ * GST_BUFFER_FLAG_DELTA_UNIT flag.
+ * @discont: if %TRUE the first packet sent will have the
+ * GST_BUFFER_FLAG_DISCONT flag.
+ */
+static GstFlowReturn
+gst_rtp_h264_pay_payload_nal (GstRTPBasePayload * basepayload,
+ GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
+ gboolean delta_unit, gboolean discont)
+{
+ GstRtpH264Pay *rtph264pay;
+ guint8 nal_header, nal_type;
+ gboolean send_spspps;
+ guint size;
+
+ rtph264pay = GST_RTP_H264_PAY (basepayload);
+ size = gst_buffer_get_size (paybuf);
+
+ gst_buffer_extract (paybuf, 0, &nal_header, 1);
+ nal_type = nal_header & 0x1f;
+
+ /* These payload type are reserved for STAP-A, STAP-B, MTAP16, and MTAP24
+ * as internally used NAL types */
+ switch (nal_type) {
+ case 24:
+ case 25:
+ case 26:
+ case 27:
+ GST_WARNING_OBJECT (rtph264pay, "Ignoring reserved NAL TYPE=%d",
+ nal_type);
+ gst_buffer_unref (paybuf);
+ return GST_FLOW_OK;
+ default:
+ break;
+ }
+
+ GST_DEBUG_OBJECT (rtph264pay,
+ "payloading NAL Unit: datasize=%u type=%d pts=%" GST_TIME_FORMAT,
+ size, nal_type, GST_TIME_ARGS (pts));
+
+ /* should set src caps before pushing stuff,
+ * and if we did not see enough SPS/PPS, that may not be the case */
+ if (G_UNLIKELY (!gst_pad_has_current_caps (GST_RTP_BASE_PAYLOAD_SRCPAD
+ (basepayload))))
+ gst_rtp_h264_pay_set_sps_pps (basepayload);
+
+ send_spspps = FALSE;
+
+ /* check if we need to emit an SPS/PPS now */
+ if (nal_type == IDR_TYPE_ID && rtph264pay->spspps_interval > 0) {
+ if (rtph264pay->last_spspps != -1) {
+ guint64 diff;
+ GstClockTime running_time =
+ gst_segment_to_running_time (&basepayload->segment, GST_FORMAT_TIME,
+ pts);
+
+ GST_LOG_OBJECT (rtph264pay,
+ "now %" GST_TIME_FORMAT ", last SPS/PPS %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (running_time),
+ GST_TIME_ARGS (rtph264pay->last_spspps));
+
+ /* calculate diff between last SPS/PPS in milliseconds */
+ if (running_time > rtph264pay->last_spspps)
+ diff = running_time - rtph264pay->last_spspps;
+ else
+ diff = 0;
+
+ GST_DEBUG_OBJECT (rtph264pay,
+ "interval since last SPS/PPS %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (diff));
+
+ /* bigger than interval, queue SPS/PPS */
+ if (GST_TIME_AS_SECONDS (diff) >= rtph264pay->spspps_interval) {
+ GST_DEBUG_OBJECT (rtph264pay, "time to send SPS/PPS");
+ send_spspps = TRUE;
+ }
+ } else {
+ /* no know previous SPS/PPS time, send now */
+ GST_DEBUG_OBJECT (rtph264pay, "no previous SPS/PPS time, send now");
+ send_spspps = TRUE;
+ }
+ } else if (nal_type == IDR_TYPE_ID && rtph264pay->spspps_interval == -1) {
+ GST_DEBUG_OBJECT (rtph264pay, "sending SPS/PPS before current IDR frame");
+ /* send SPS/PPS before every IDR frame */
+ send_spspps = TRUE;
+ }
+
+ if (send_spspps || rtph264pay->send_spspps) {
+ /* we need to send SPS/PPS now first. FIXME, don't use the pts for
+ * checking when we need to send SPS/PPS but convert to running_time first. */
+ GstFlowReturn ret;
+
+ rtph264pay->send_spspps = FALSE;
+
+ ret = gst_rtp_h264_pay_send_sps_pps (basepayload, dts, pts, delta_unit,
+ discont);
+ if (ret != GST_FLOW_OK) {
+ gst_buffer_unref (paybuf);
+ return ret;
+ }
+
+ delta_unit = TRUE;
+ discont = FALSE;
+ }
+
+ if (rtph264pay->aggregate_mode != GST_RTP_H264_AGGREGATE_NONE)
+ return gst_rtp_h264_pay_payload_nal_bundle (basepayload, paybuf, dts, pts,
+ end_of_au, delta_unit, discont, nal_header);
+
+ return gst_rtp_h264_pay_payload_nal_fragment (basepayload, paybuf, dts, pts,
+ end_of_au, delta_unit, discont, nal_header);
+}
+
+static GstFlowReturn
+gst_rtp_h264_pay_payload_nal_fragment (GstRTPBasePayload * basepayload,
+ GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
+ gboolean delta_unit, gboolean discont, guint8 nal_header)
+{
+ GstRtpH264Pay *rtph264pay;
+ guint mtu, size, max_fragment_size, max_fragments, ii, pos;
+ GstBuffer *outbuf;
+ guint8 *payload;
+ GstBufferList *list = NULL;
+ GstRTPBuffer rtp = { NULL };
+
+ rtph264pay = GST_RTP_H264_PAY (basepayload);
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (rtph264pay);
+ size = gst_buffer_get_size (paybuf);
+
+ if (gst_rtp_buffer_calc_packet_len (size, 0, 0) <= mtu) {
+ /* We don't need to fragment this packet */
+ GST_DEBUG_OBJECT (rtph264pay,
+ "sending NAL Unit: datasize=%u mtu=%u", size, mtu);
+ return gst_rtp_h264_pay_payload_nal_single (basepayload, paybuf, dts, pts,
+ end_of_au, delta_unit, discont);
+ }
+
+ GST_DEBUG_OBJECT (basepayload,
+ "using FU-A fragmentation for NAL Unit: datasize=%u mtu=%u", size, mtu);
+
+ /* We keep 2 bytes for FU indicator and FU Header */
+ max_fragment_size = gst_rtp_buffer_calc_payload_len (mtu - 2, 0, 0);
+ max_fragments = (size + max_fragment_size - 2) / max_fragment_size;
+ list = gst_buffer_list_new_sized (max_fragments);
+
+ /* Start at the NALU payload */
+ for (pos = 1, ii = 0; pos < size; pos += max_fragment_size, ii++) {
+ guint remaining, fragment_size;
+ gboolean first_fragment, last_fragment;
+
+ remaining = size - pos;
+ fragment_size = MIN (remaining, max_fragment_size);
+ first_fragment = (pos == 1);
+ last_fragment = (remaining <= max_fragment_size);
+
+ GST_DEBUG_OBJECT (basepayload,
+ "creating FU-A packet %u/%u, size %u",
+ ii + 1, max_fragments, fragment_size);
+
+ /* use buffer lists
+ * create buffer without payload containing only the RTP header
+ * (memory block at index 0) */
+ outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 2, 0, 0);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
+ GST_BUFFER_DTS (outbuf) = dts;
+ GST_BUFFER_PTS (outbuf) = pts;
+ payload = gst_rtp_buffer_get_payload (&rtp);
+
+ /* If it's the last fragment and the end of this au, mark the end of
+ * slice */
+ gst_rtp_buffer_set_marker (&rtp, last_fragment && end_of_au);
+
+ /* FU indicator */
+ payload[0] = (nal_header & 0x60) | FU_A_TYPE_ID;
+
+ /* FU Header */
+ payload[1] = (first_fragment << 7) | (last_fragment << 6) |
+ (nal_header & 0x1f);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ /* insert payload memory block */
+ gst_rtp_copy_video_meta (rtph264pay, outbuf, paybuf);
+ gst_buffer_copy_into (outbuf, paybuf, GST_BUFFER_COPY_MEMORY, pos,
+ fragment_size);
+
+ if (!delta_unit)
+ /* Only the first packet sent should not have the flag */
+ delta_unit = TRUE;
+ else
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ if (discont) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
+ /* Only the first packet sent should have the flag */
+ discont = FALSE;
+ }
+
+ /* add the buffer to the buffer list */
+ gst_buffer_list_add (list, outbuf);
+ }
+
+ GST_DEBUG_OBJECT (rtph264pay,
+ "sending FU-A fragments: n=%u datasize=%u mtu=%u", ii, size, mtu);
+
+ gst_buffer_unref (paybuf);
+ return gst_rtp_base_payload_push_list (basepayload, list);
+}
+
+static GstFlowReturn
+gst_rtp_h264_pay_payload_nal_single (GstRTPBasePayload * basepayload,
+ GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
+ gboolean delta_unit, gboolean discont)
+{
+ GstRtpH264Pay *rtph264pay;
+ GstBuffer *outbuf;
+ GstRTPBuffer rtp = { NULL };
+
+ rtph264pay = GST_RTP_H264_PAY (basepayload);
+
+ /* create buffer without payload containing only the RTP header
+ * (memory block at index 0) */
+ outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 0, 0, 0);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
+ /* Mark the end of a frame */
+ gst_rtp_buffer_set_marker (&rtp, end_of_au);
+
+ /* timestamp the outbuffer */
+ GST_BUFFER_PTS (outbuf) = pts;
+ GST_BUFFER_DTS (outbuf) = dts;
+
+ if (delta_unit)
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ if (discont)
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ /* insert payload memory block */
+ gst_rtp_copy_video_meta (rtph264pay, outbuf, paybuf);
+ outbuf = gst_buffer_append (outbuf, paybuf);
+
+ /* push the buffer to the next element */
+ return gst_rtp_base_payload_push (basepayload, outbuf);
+}
+
+static void
+gst_rtp_h264_pay_reset_bundle (GstRtpH264Pay * rtph264pay)
+{
+ g_clear_pointer (&rtph264pay->bundle, gst_buffer_list_unref);
+ rtph264pay->bundle_size = 0;
+ rtph264pay->bundle_contains_vcl = FALSE;
+}
+
+static GstFlowReturn
+gst_rtp_h264_pay_send_bundle (GstRtpH264Pay * rtph264pay, gboolean end_of_au)
+{
+ GstRTPBasePayload *basepayload;
+ GstBufferList *bundle;
+ guint length, bundle_size;
+ GstBuffer *first, *outbuf;
+ GstClockTime dts, pts;
+ gboolean delta, discont;
+
+ bundle_size = rtph264pay->bundle_size;
+
+ if (bundle_size == 0) {
+ GST_DEBUG_OBJECT (rtph264pay, "no bundle, nothing to send");
+ return GST_FLOW_OK;
+ }
+
+ basepayload = GST_RTP_BASE_PAYLOAD (rtph264pay);
+ bundle = rtph264pay->bundle;
+ length = gst_buffer_list_length (bundle);
+
+ first = gst_buffer_list_get (bundle, 0);
+ dts = GST_BUFFER_DTS (first);
+ pts = GST_BUFFER_PTS (first);
+ delta = GST_BUFFER_FLAG_IS_SET (first, GST_BUFFER_FLAG_DELTA_UNIT);
+ discont = GST_BUFFER_FLAG_IS_SET (first, GST_BUFFER_FLAG_DISCONT);
+
+ if (length == 1) {
+ /* Push unaggregated NALU */
+ outbuf = gst_buffer_ref (first);
+
+ GST_DEBUG_OBJECT (rtph264pay,
+ "sending NAL Unit unaggregated: datasize=%u", bundle_size - 2);
+ } else {
+ guint8 stap_header;
+ guint i;
+
+ outbuf = gst_buffer_new_allocate (NULL, sizeof stap_header, NULL);
+ stap_header = STAP_A_TYPE_ID;
+
+ for (i = 0; i < length; i++) {
+ GstBuffer *buf = gst_buffer_list_get (bundle, i);
+ guint8 nal_header;
+ GstMemory *size_header;
+ GstMapInfo map;
+
+ gst_buffer_extract (buf, 0, &nal_header, sizeof nal_header);
+
+ /* Propagate F bit */
+ if ((nal_header & 0x80))
+ stap_header |= 0x80;
+
+ /* Select highest nal_ref_idc */
+ if ((nal_header & 0x60) > (stap_header & 0x60))
+ stap_header = (stap_header & 0x9f) | (nal_header & 0x60);
+
+ /* append NALU size */
+ size_header = gst_allocator_alloc (NULL, 2, NULL);
+ gst_memory_map (size_header, &map, GST_MAP_WRITE);
+ GST_WRITE_UINT16_BE (map.data, gst_buffer_get_size (buf));
+ gst_memory_unmap (size_header, &map);
+ gst_buffer_append_memory (outbuf, size_header);
+
+ /* append NALU data */
+ outbuf = gst_buffer_append (outbuf, gst_buffer_ref (buf));
+ }
+
+ gst_buffer_fill (outbuf, 0, &stap_header, sizeof stap_header);
+
+ GST_DEBUG_OBJECT (rtph264pay,
+ "sending STAP-A bundle: n=%u header=%02x datasize=%u",
+ length, stap_header, bundle_size);
+ }
+
+ gst_rtp_h264_pay_reset_bundle (rtph264pay);
+ return gst_rtp_h264_pay_payload_nal_single (basepayload, outbuf, dts, pts,
+ end_of_au, delta, discont);
+}
+
+static gboolean
+gst_rtp_h264_pay_payload_nal_bundle (GstRTPBasePayload * basepayload,
+ GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean end_of_au,
+ gboolean delta_unit, gboolean discont, guint8 nal_header)
+{
+ GstRtpH264Pay *rtph264pay;
+ GstFlowReturn ret;
+ guint mtu, pay_size, bundle_size;
+ GstBufferList *bundle;
+ guint8 nal_type;
+ gboolean start_of_au;
+
+ rtph264pay = GST_RTP_H264_PAY (basepayload);
+ nal_type = nal_header & 0x1f;
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (rtph264pay);
+ pay_size = 2 + gst_buffer_get_size (paybuf);
+ bundle = rtph264pay->bundle;
+ start_of_au = FALSE;
+
+ if (bundle) {
+ GstBuffer *first = gst_buffer_list_get (bundle, 0);
+
+ if (nal_type == AUD_TYPE_ID) {
+ GST_DEBUG_OBJECT (rtph264pay, "found access delimiter");
+ start_of_au = TRUE;
+ } else if (discont) {
+ GST_DEBUG_OBJECT (rtph264pay, "found discont");
+ start_of_au = TRUE;
+ } else if (GST_BUFFER_PTS (first) != pts || GST_BUFFER_DTS (first) != dts) {
+ GST_DEBUG_OBJECT (rtph264pay, "found timestamp mismatch");
+ start_of_au = TRUE;
+ }
+ }
+
+ if (start_of_au) {
+ GST_DEBUG_OBJECT (rtph264pay, "sending bundle before start of AU");
+
+ ret = gst_rtp_h264_pay_send_bundle (rtph264pay, TRUE);
+ if (ret != GST_FLOW_OK)
+ goto out;
+
+ bundle = NULL;
+ }
+
+ bundle_size = 1 + pay_size;
+
+ if (gst_rtp_buffer_calc_packet_len (bundle_size, 0, 0) > mtu) {
+ GST_DEBUG_OBJECT (rtph264pay, "NAL Unit cannot fit in a bundle");
+
+ ret = gst_rtp_h264_pay_send_bundle (rtph264pay, FALSE);
+ if (ret != GST_FLOW_OK)
+ goto out;
+
+ return gst_rtp_h264_pay_payload_nal_fragment (basepayload, paybuf, dts, pts,
+ end_of_au, delta_unit, discont, nal_header);
+ }
+
+ bundle_size = rtph264pay->bundle_size + pay_size;
+
+ if (gst_rtp_buffer_calc_packet_len (bundle_size, 0, 0) > mtu) {
+ GST_DEBUG_OBJECT (rtph264pay,
+ "bundle overflows, sending: bundlesize=%u datasize=2+%u mtu=%u",
+ rtph264pay->bundle_size, pay_size - 2, mtu);
+
+ ret = gst_rtp_h264_pay_send_bundle (rtph264pay, FALSE);
+ if (ret != GST_FLOW_OK)
+ goto out;
+
+ bundle = NULL;
+ }
+
+ if (!bundle) {
+ GST_DEBUG_OBJECT (rtph264pay, "creating new STAP-A aggregate");
+ bundle = rtph264pay->bundle = gst_buffer_list_new ();
+ bundle_size = rtph264pay->bundle_size = 1;
+ rtph264pay->bundle_contains_vcl = FALSE;
+ }
+
+ GST_DEBUG_OBJECT (rtph264pay,
+ "bundling NAL Unit: bundlesize=%u datasize=2+%u mtu=%u",
+ rtph264pay->bundle_size, pay_size - 2, mtu);
+
+ paybuf = gst_buffer_make_writable (paybuf);
+ GST_BUFFER_PTS (paybuf) = pts;
+ GST_BUFFER_DTS (paybuf) = dts;
+
+ if (delta_unit)
+ GST_BUFFER_FLAG_SET (paybuf, GST_BUFFER_FLAG_DELTA_UNIT);
+ else
+ GST_BUFFER_FLAG_UNSET (paybuf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ if (discont)
+ GST_BUFFER_FLAG_SET (paybuf, GST_BUFFER_FLAG_DISCONT);
+ else
+ GST_BUFFER_FLAG_UNSET (paybuf, GST_BUFFER_FLAG_DISCONT);
+
+ gst_buffer_list_add (bundle, gst_buffer_ref (paybuf));
+ rtph264pay->bundle_size += pay_size;
+ ret = GST_FLOW_OK;
+
+ if ((nal_type >= 1 && nal_type <= 5) || nal_type == 14 ||
+ (nal_type >= 20 && nal_type <= 23))
+ rtph264pay->bundle_contains_vcl = TRUE;
+
+ if (end_of_au) {
+ GST_DEBUG_OBJECT (rtph264pay, "sending bundle at end of AU");
+ ret = gst_rtp_h264_pay_send_bundle (rtph264pay, TRUE);
+ }
+
+out:
+ gst_buffer_unref (paybuf);
+ return ret;
+}
+
+static GstFlowReturn
+gst_rtp_h264_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRtpH264Pay *rtph264pay;
+ GstFlowReturn ret;
+ gsize size;
+ guint nal_len, i;
+ const guint8 *data;
+ GstClockTime dts, pts;
+ GArray *nal_queue;
+ gboolean avc;
+ GstBuffer *paybuf = NULL;
+ gsize skip;
+ gboolean delayed_not_delta_unit = FALSE;
+ gboolean delayed_discont = FALSE;
+ gboolean marker = FALSE;
+ gboolean draining = (buffer == NULL);
+
+ rtph264pay = GST_RTP_H264_PAY (basepayload);
+
+ /* the input buffer contains one or more NAL units */
+
+ avc = rtph264pay->stream_format == GST_H264_STREAM_FORMAT_AVC;
+
+ if (avc) {
+ /* In AVC mode, there is no adapter, so nothing to drain */
+ if (draining)
+ return GST_FLOW_OK;
+ } else {
+ if (buffer) {
+ if (!GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT)) {
+ if (gst_adapter_available (rtph264pay->adapter) == 0)
+ rtph264pay->delta_unit = FALSE;
+ else
+ /* This buffer contains a key frame but the adapter isn't empty. So
+ * we'll purge it first by sending a first packet and then the second
+ * one won't have the DELTA_UNIT flag. */
+ delayed_not_delta_unit = TRUE;
+ }
+
+ if (GST_BUFFER_IS_DISCONT (buffer)) {
+ if (gst_adapter_available (rtph264pay->adapter) == 0)
+ rtph264pay->discont = TRUE;
+ else
+ /* This buffer has the DISCONT flag but the adapter isn't empty. So
+ * we'll purge it first by sending a first packet and then the second
+ * one will have the DISCONT flag set. */
+ delayed_discont = TRUE;
+ }
+
+ marker = GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_MARKER);
+ gst_adapter_push (rtph264pay->adapter, buffer);
+ buffer = NULL;
+ }
+
+ /* We want to use the first TS used to construct the following NAL */
+ dts = gst_adapter_prev_dts (rtph264pay->adapter, NULL);
+ pts = gst_adapter_prev_pts (rtph264pay->adapter, NULL);
+
+ size = gst_adapter_available (rtph264pay->adapter);
+ /* Nothing to do here if the adapter is empty, e.g. on EOS */
+ if (size == 0)
+ return GST_FLOW_OK;
+ data = gst_adapter_map (rtph264pay->adapter, size);
+ GST_DEBUG_OBJECT (basepayload, "got %" G_GSIZE_FORMAT " bytes", size);
+ }
+
+ ret = GST_FLOW_OK;
+
+ /* now loop over all NAL units and put them in a packet */
+ if (avc) {
+ GstBufferMemoryMap memory;
+ gsize remaining_buffer_size;
+ guint nal_length_size;
+ gsize offset = 0;
+
+ gst_buffer_memory_map (buffer, &memory);
+ remaining_buffer_size = gst_buffer_get_size (buffer);
+
+ pts = GST_BUFFER_PTS (buffer);
+ dts = GST_BUFFER_DTS (buffer);
+ rtph264pay->delta_unit = GST_BUFFER_FLAG_IS_SET (buffer,
+ GST_BUFFER_FLAG_DELTA_UNIT);
+ rtph264pay->discont = GST_BUFFER_IS_DISCONT (buffer);
+ marker = GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_MARKER);
+ GST_DEBUG_OBJECT (basepayload, "got %" G_GSIZE_FORMAT " bytes",
+ remaining_buffer_size);
+
+ nal_length_size = rtph264pay->nal_length_size;
+
+ while (remaining_buffer_size > nal_length_size) {
+ gint i;
+ gboolean end_of_au = FALSE;
+
+ nal_len = 0;
+ for (i = 0; i < nal_length_size; i++) {
+ nal_len = (nal_len << 8) + *memory.data;
+ if (!gst_buffer_memory_advance_bytes (&memory, 1))
+ break;
+ }
+
+ offset += nal_length_size;
+ remaining_buffer_size -= nal_length_size;
+
+ if (remaining_buffer_size >= nal_len) {
+ GST_DEBUG_OBJECT (basepayload, "got NAL of size %u", nal_len);
+ } else {
+ nal_len = remaining_buffer_size;
+ GST_DEBUG_OBJECT (basepayload, "got incomplete NAL of size %u",
+ nal_len);
+ }
+
+ /* If we're at the end of the buffer, then we're at the end of the
+ * access unit
+ */
+ if (remaining_buffer_size - nal_len <= nal_length_size) {
+ if (rtph264pay->alignment == GST_H264_ALIGNMENT_AU || marker)
+ end_of_au = TRUE;
+ }
+
+ paybuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, offset,
+ nal_len);
+ ret =
+ gst_rtp_h264_pay_payload_nal (basepayload, paybuf, dts, pts,
+ end_of_au, rtph264pay->delta_unit, rtph264pay->discont);
+
+ if (!rtph264pay->delta_unit)
+ /* Only the first outgoing packet doesn't have the DELTA_UNIT flag */
+ rtph264pay->delta_unit = TRUE;
+
+ if (rtph264pay->discont)
+ /* Only the first outgoing packet have the DISCONT flag */
+ rtph264pay->discont = FALSE;
+
+ if (ret != GST_FLOW_OK)
+ break;
+
+ /* Skip current nal. If it is split over multiple GstMemory
+ * advance_bytes () will switch to the correct GstMemory. The payloader
+ * does not access those bytes directly but uses gst_buffer_copy_region ()
+ * to create a sub-buffer referencing the nal instead */
+ if (!gst_buffer_memory_advance_bytes (&memory, nal_len))
+ break;
+
+ offset += nal_len;
+ remaining_buffer_size -= nal_len;
+ }
+
+ gst_buffer_memory_unmap (&memory);
+ gst_buffer_unref (buffer);
+ } else {
+ guint next;
+ gboolean update = FALSE;
+
+ /* get offset of first start code */
+ next = next_start_code (data, size);
+
+ /* skip to start code, if no start code is found, next will be size and we
+ * will not collect data. */
+ data += next;
+ size -= next;
+ nal_queue = rtph264pay->queue;
+ skip = next;
+
+ /* array must be empty when we get here */
+ g_assert (nal_queue->len == 0);
+
+ GST_DEBUG_OBJECT (basepayload,
+ "found first start at %u, bytes left %" G_GSIZE_FORMAT, next, size);
+
+ /* first pass to locate NALs and parse SPS/PPS */
+ while (size > 4) {
+ /* skip start code */
+ data += 3;
+ size -= 3;
+
+ /* use next_start_code() to scan buffer.
+ * next_start_code() returns the offset in data,
+ * starting from zero to the first byte of 0.0.0.1
+ * If no start code is found, it returns the value of the
+ * 'size' parameter.
+ * data is unchanged by the call to next_start_code()
+ */
+ next = next_start_code (data, size);
+
+ /* nal or au aligned input needs no delaying until next time */
+ if (next == size && !draining &&
+ rtph264pay->alignment == GST_H264_ALIGNMENT_UNKNOWN) {
+ /* Didn't find the start of next NAL and it's not EOS,
+ * handle it next time */
+ break;
+ }
+
+ /* nal length is distance to next start code */
+ nal_len = next;
+
+ GST_DEBUG_OBJECT (basepayload, "found next start at %u of size %u", next,
+ nal_len);
+
+ if (rtph264pay->sprop_parameter_sets != NULL) {
+ /* explicitly set profile and sprop, use those */
+ if (rtph264pay->update_caps) {
+ if (!gst_rtp_base_payload_set_outcaps (basepayload,
+ "sprop-parameter-sets", G_TYPE_STRING,
+ rtph264pay->sprop_parameter_sets, NULL))
+ goto caps_rejected;
+
+ /* parse SPS and PPS from provided parameter set (for insertion) */
+ gst_rtp_h264_pay_parse_sprop_parameter_sets (rtph264pay);
+
+ rtph264pay->update_caps = FALSE;
+
+ GST_DEBUG ("outcaps update: sprop-parameter-sets=%s",
+ rtph264pay->sprop_parameter_sets);
+ }
+ } else {
+ /* We know our stream is a valid H264 NAL packet,
+ * go parse it for SPS/PPS to enrich the caps */
+ /* order: make sure to check nal */
+ update =
+ gst_rtp_h264_pay_decode_nal (rtph264pay, data, nal_len, dts, pts)
+ || update;
+ }
+ /* move to next NAL packet */
+ data += nal_len;
+ size -= nal_len;
+
+ g_array_append_val (nal_queue, nal_len);
+ }
+
+ /* if has new SPS & PPS, update the output caps */
+ if (G_UNLIKELY (update))
+ if (!gst_rtp_h264_pay_set_sps_pps (basepayload))
+ goto caps_rejected;
+
+ /* second pass to payload and push */
+
+ if (nal_queue->len != 0)
+ gst_adapter_flush (rtph264pay->adapter, skip);
+
+ for (i = 0; i < nal_queue->len; i++) {
+ guint size;
+ gboolean end_of_au = FALSE;
+
+ nal_len = g_array_index (nal_queue, guint, i);
+ /* skip start code */
+ gst_adapter_flush (rtph264pay->adapter, 3);
+
+ /* Trim the end unless we're the last NAL in the stream.
+ * In case we're not at the end of the buffer we know the next block
+ * starts with 0x000001 so all the 0x00 bytes at the end of this one are
+ * trailing 0x0 that can be discarded */
+ size = nal_len;
+ data = gst_adapter_map (rtph264pay->adapter, size);
+ if (i + 1 != nal_queue->len || !draining)
+ for (; size > 1 && data[size - 1] == 0x0; size--)
+ /* skip */ ;
+
+
+ /* If it's the last nal unit we have in non-bytestream mode, we can
+ * assume it's the end of an access-unit
+ *
+ * FIXME: We need to wait until the next packet or EOS to
+ * actually payload the NAL so we can know if the current NAL is
+ * the last one of an access unit or not if we are in bytestream mode
+ */
+ if (i == nal_queue->len - 1) {
+ if (rtph264pay->alignment == GST_H264_ALIGNMENT_AU ||
+ marker || draining)
+ end_of_au = TRUE;
+ }
+ paybuf = gst_adapter_take_buffer (rtph264pay->adapter, size);
+ g_assert (paybuf);
+
+ /* put the data in one or more RTP packets */
+ ret =
+ gst_rtp_h264_pay_payload_nal (basepayload, paybuf, dts, pts,
+ end_of_au, rtph264pay->delta_unit, rtph264pay->discont);
+
+ if (delayed_not_delta_unit) {
+ rtph264pay->delta_unit = FALSE;
+ delayed_not_delta_unit = FALSE;
+ } else {
+ /* Only the first outgoing packet doesn't have the DELTA_UNIT flag */
+ rtph264pay->delta_unit = TRUE;
+ }
+
+ if (delayed_discont) {
+ rtph264pay->discont = TRUE;
+ delayed_discont = FALSE;
+ } else {
+ /* Only the first outgoing packet have the DISCONT flag */
+ rtph264pay->discont = FALSE;
+ }
+
+ if (ret != GST_FLOW_OK) {
+ break;
+ }
+
+ /* move to next NAL packet */
+ /* Skips the trailing zeros */
+ gst_adapter_flush (rtph264pay->adapter, nal_len - size);
+ }
+ g_array_set_size (nal_queue, 0);
+ }
+
+ if (ret == GST_FLOW_OK && rtph264pay->bundle_size > 0 &&
+ rtph264pay->aggregate_mode == GST_RTP_H264_AGGREGATE_ZERO_LATENCY &&
+ rtph264pay->bundle_contains_vcl) {
+ GST_DEBUG_OBJECT (rtph264pay, "sending bundle at end incoming packet");
+ ret = gst_rtp_h264_pay_send_bundle (rtph264pay, FALSE);
+ }
+
+
+done:
+ if (!avc) {
+ gst_adapter_unmap (rtph264pay->adapter);
+ }
+
+ return ret;
+
+caps_rejected:
+ {
+ GST_WARNING_OBJECT (basepayload, "Could not set outcaps");
+ g_array_set_size (nal_queue, 0);
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ goto done;
+ }
+}
+
+static gboolean
+gst_rtp_h264_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
+{
+ gboolean res;
+ const GstStructure *s;
+ GstRtpH264Pay *rtph264pay = GST_RTP_H264_PAY (payload);
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ gst_adapter_clear (rtph264pay->adapter);
+ gst_rtp_h264_pay_reset_bundle (rtph264pay);
+ break;
+ case GST_EVENT_CUSTOM_DOWNSTREAM:
+ s = gst_event_get_structure (event);
+ if (gst_structure_has_name (s, "GstForceKeyUnit")) {
+ gboolean resend_codec_data;
+
+ if (gst_structure_get_boolean (s, "all-headers",
+ &resend_codec_data) && resend_codec_data)
+ rtph264pay->send_spspps = TRUE;
+ }
+ break;
+ case GST_EVENT_EOS:
+ {
+ /* call handle_buffer with NULL to flush last NAL from adapter
+ * in byte-stream mode
+ */
+ gst_rtp_h264_pay_handle_buffer (payload, NULL);
+ ret = gst_rtp_h264_pay_send_bundle (rtph264pay, TRUE);
+ break;
+ }
+ case GST_EVENT_STREAM_START:
+ GST_DEBUG_OBJECT (rtph264pay, "New stream detected => Clear SPS and PPS");
+ gst_rtp_h264_pay_clear_sps_pps (rtph264pay);
+ ret = gst_rtp_h264_pay_send_bundle (rtph264pay, TRUE);
+ break;
+ default:
+ break;
+ }
+
+ if (ret != GST_FLOW_OK)
+ return FALSE;
+
+ res = GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload, event);
+
+ return res;
+}
+
+static GstStateChangeReturn
+gst_rtp_h264_pay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstRtpH264Pay *rtph264pay = GST_RTP_H264_PAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ rtph264pay->send_spspps = FALSE;
+ gst_adapter_clear (rtph264pay->adapter);
+ gst_rtp_h264_pay_reset_bundle (rtph264pay);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ rtph264pay->last_spspps = -1;
+ gst_rtp_h264_pay_clear_sps_pps (rtph264pay);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static void
+gst_rtp_h264_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpH264Pay *rtph264pay;
+
+ rtph264pay = GST_RTP_H264_PAY (object);
+
+ switch (prop_id) {
+ case PROP_SPROP_PARAMETER_SETS:
+ g_free (rtph264pay->sprop_parameter_sets);
+ rtph264pay->sprop_parameter_sets = g_value_dup_string (value);
+ rtph264pay->update_caps = TRUE;
+ break;
+ case PROP_CONFIG_INTERVAL:
+ rtph264pay->spspps_interval = g_value_get_int (value);
+ break;
+ case PROP_AGGREGATE_MODE:
+ rtph264pay->aggregate_mode = g_value_get_enum (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_h264_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpH264Pay *rtph264pay;
+
+ rtph264pay = GST_RTP_H264_PAY (object);
+
+ switch (prop_id) {
+ case PROP_SPROP_PARAMETER_SETS:
+ g_value_set_string (value, rtph264pay->sprop_parameter_sets);
+ break;
+ case PROP_CONFIG_INTERVAL:
+ g_value_set_int (value, rtph264pay->spspps_interval);
+ break;
+ case PROP_AGGREGATE_MODE:
+ g_value_set_enum (value, rtph264pay->aggregate_mode);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/rtp/gstrtph264pay.h b/gst/rtp/gstrtph264pay.h
new file mode 100644
index 0000000000..47cc896e1d
--- /dev/null
+++ b/gst/rtp/gstrtph264pay.h
@@ -0,0 +1,109 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_H264_PAY_H__
+#define __GST_RTP_H264_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_H264_PAY \
+ (gst_rtp_h264_pay_get_type())
+#define GST_RTP_H264_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_H264_PAY,GstRtpH264Pay))
+#define GST_RTP_H264_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_H264_PAY,GstRtpH264PayClass))
+#define GST_IS_RTP_H264_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_H264_PAY))
+#define GST_IS_RTP_H264_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_H264_PAY))
+
+typedef struct _GstRtpH264Pay GstRtpH264Pay;
+typedef struct _GstRtpH264PayClass GstRtpH264PayClass;
+
+typedef enum
+{
+ GST_H264_STREAM_FORMAT_UNKNOWN,
+ GST_H264_STREAM_FORMAT_BYTESTREAM,
+ GST_H264_STREAM_FORMAT_AVC
+} GstH264StreamFormat;
+
+typedef enum
+{
+ GST_H264_ALIGNMENT_UNKNOWN,
+ GST_H264_ALIGNMENT_NAL,
+ GST_H264_ALIGNMENT_AU
+} GstH264Alignment;
+
+typedef enum
+{
+ GST_RTP_H264_AGGREGATE_NONE,
+ GST_RTP_H264_AGGREGATE_ZERO_LATENCY,
+ GST_RTP_H264_AGGREGATE_MAX_STAP,
+} GstRTPH264AggregateMode;
+
+struct _GstRtpH264Pay
+{
+ GstRTPBasePayload payload;
+
+ guint profile;
+ GPtrArray *sps, *pps;
+
+ GstH264StreamFormat stream_format;
+ GstH264Alignment alignment;
+ guint nal_length_size;
+ GArray *queue;
+
+ gchar *sprop_parameter_sets;
+ gboolean update_caps;
+
+ GstAdapter *adapter;
+
+ gint spspps_interval;
+ gboolean send_spspps;
+ GstClockTime last_spspps;
+
+ gint fps_num;
+ gint fps_denum;
+
+ /* TRUE if the next NALU processed should have the DELTA_UNIT flag */
+ gboolean delta_unit;
+ /* TRUE if the next NALU processed should have the DISCONT flag */
+ gboolean discont;
+
+ /* aggregate buffers with STAP-A */
+ GstBufferList *bundle;
+ guint bundle_size;
+ gboolean bundle_contains_vcl;
+ GstRTPH264AggregateMode aggregate_mode;
+};
+
+struct _GstRtpH264PayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_h264_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_H264_PAY_H__ */
diff --git a/gst/rtp/gstrtph265depay.c b/gst/rtp/gstrtph265depay.c
new file mode 100644
index 0000000000..41d2762ffe
--- /dev/null
+++ b/gst/rtp/gstrtph265depay.c
@@ -0,0 +1,1639 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim.taymans@gmail.com>
+ * Copyright (C) <2014> Jurgen Slowack <jurgenslowack@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <stdio.h>
+#include <string.h>
+
+#include <gst/base/gstbitreader.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+#include "gstrtpelements.h"
+#include "gstrtph265depay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtph265depay_debug);
+#define GST_CAT_DEFAULT (rtph265depay_debug)
+
+/* This is what we'll default to when downstream hasn't
+ * expressed a restriction or preference via caps */
+#define DEFAULT_STREAM_FORMAT GST_H265_STREAM_FORMAT_BYTESTREAM
+#define DEFAULT_ACCESS_UNIT FALSE
+
+/* 3 zero bytes syncword */
+static const guint8 sync_bytes[] = { 0, 0, 0, 1 };
+
+static GstStaticPadTemplate gst_rtp_h265_depay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS
+ ("video/x-h265, stream-format=(string)hvc1, alignment=(string)au; "
+ /* FIXME: hev1 format is not supported yet */
+ /* "video/x-h265, "
+ "stream-format = (string) hev1, alignment = (string) au; " */
+ "video/x-h265, "
+ "stream-format = (string) byte-stream, alignment = (string) { nal, au }")
+ );
+
+static GstStaticPadTemplate gst_rtp_h265_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"H265\"")
+ /* optional parameters */
+ /* "profile-space = (int) [ 0, 3 ], " */
+ /* "profile-id = (int) [ 0, 31 ], " */
+ /* "tier-flag = (int) [ 0, 1 ], " */
+ /* "level-id = (int) [ 0, 255 ], " */
+ /* "interop-constraints = (string) ANY, " */
+ /* "profile-compatibility-indicator = (string) ANY, " */
+ /* "sprop-sub-layer-id = (int) [ 0, 6 ], " */
+ /* "recv-sub-layer-id = (int) [ 0, 6 ], " */
+ /* "max-recv-level-id = (int) [ 0, 255 ], " */
+ /* "tx-mode = (string) {MST , SST}, " */
+ /* "sprop-vps = (string) ANY, " */
+ /* "sprop-sps = (string) ANY, " */
+ /* "sprop-pps = (string) ANY, " */
+ /* "sprop-sei = (string) ANY, " */
+ /* "max-lsr = (int) ANY, " *//* MUST be in the range of MaxLumaSR to 16 * MaxLumaSR, inclusive */
+ /* "max-lps = (int) ANY, " *//* MUST be in the range of MaxLumaPS to 16 * MaxLumaPS, inclusive */
+ /* "max-cpb = (int) ANY, " *//* MUST be in the range of MaxCPB to 16 * MaxCPB, inclusive */
+ /* "max-dpb = (int) [1, 16], " */
+ /* "max-br = (int) ANY, " *//* MUST be in the range of MaxBR to 16 * MaxBR, inclusive, for the highest level */
+ /* "max-tr = (int) ANY, " *//* MUST be in the range of MaxTileRows to 16 * MaxTileRows, inclusive, for the highest level */
+ /* "max-tc = (int) ANY, " *//* MUST be in the range of MaxTileCols to 16 * MaxTileCols, inclusive, for the highest level */
+ /* "max-fps = (int) ANY, " */
+ /* "sprop-max-don-diff = (int) [0, 32767], " */
+ /* "sprop-depack-buf-nalus = (int) [0, 32767], " */
+ /* "sprop-depack-buf-nalus = (int) [0, 4294967295], " */
+ /* "depack-buf-cap = (int) [1, 4294967295], " */
+ /* "sprop-segmentation-id = (int) [0, 3], " */
+ /* "sprop-spatial-segmentation-idc = (string) ANY, " */
+ /* "dec-parallel-cap = (string) ANY, " */
+ );
+
+#define gst_rtp_h265_depay_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstRtpH265Depay, gst_rtp_h265_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD, GST_DEBUG_CATEGORY_INIT (rtph265depay_debug,
+ "rtph265depay", 0, "H265 Video RTP Depayloader"));
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph265depay, "rtph265depay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_H265_DEPAY, rtp_element_init (plugin));
+
+static void gst_rtp_h265_depay_finalize (GObject * object);
+
+static GstStateChangeReturn gst_rtp_h265_depay_change_state (GstElement *
+ element, GstStateChange transition);
+
+static GstBuffer *gst_rtp_h265_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+static gboolean gst_rtp_h265_depay_setcaps (GstRTPBaseDepayload * filter,
+ GstCaps * caps);
+static gboolean gst_rtp_h265_depay_handle_event (GstRTPBaseDepayload * depay,
+ GstEvent * event);
+static GstBuffer *gst_rtp_h265_complete_au (GstRtpH265Depay * rtph265depay,
+ GstClockTime * out_timestamp, gboolean * out_keyframe);
+static void gst_rtp_h265_depay_push (GstRtpH265Depay * rtph265depay,
+ GstBuffer * outbuf, gboolean keyframe, GstClockTime timestamp,
+ gboolean marker);
+
+
+static void
+gst_rtp_h265_depay_class_init (GstRtpH265DepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_h265_depay_finalize;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_h265_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_h265_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP H265 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts H265 video from RTP packets (RFC 7798)",
+ "Jurgen Slowack <jurgenslowack@gmail.com>");
+ gstelement_class->change_state = gst_rtp_h265_depay_change_state;
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_h265_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_h265_depay_setcaps;
+ gstrtpbasedepayload_class->handle_event = gst_rtp_h265_depay_handle_event;
+}
+
+static void
+gst_rtp_h265_depay_init (GstRtpH265Depay * rtph265depay)
+{
+ rtph265depay->adapter = gst_adapter_new ();
+ rtph265depay->picture_adapter = gst_adapter_new ();
+ rtph265depay->output_format = DEFAULT_STREAM_FORMAT;
+ rtph265depay->byte_stream =
+ (DEFAULT_STREAM_FORMAT == GST_H265_STREAM_FORMAT_BYTESTREAM);
+ rtph265depay->stream_format = NULL;
+ rtph265depay->merge = DEFAULT_ACCESS_UNIT;
+ rtph265depay->vps = g_ptr_array_new_with_free_func (
+ (GDestroyNotify) gst_buffer_unref);
+ rtph265depay->sps = g_ptr_array_new_with_free_func (
+ (GDestroyNotify) gst_buffer_unref);
+ rtph265depay->pps = g_ptr_array_new_with_free_func (
+ (GDestroyNotify) gst_buffer_unref);
+}
+
+static void
+gst_rtp_h265_depay_reset (GstRtpH265Depay * rtph265depay, gboolean hard)
+{
+ gst_adapter_clear (rtph265depay->adapter);
+ rtph265depay->wait_start = TRUE;
+ gst_adapter_clear (rtph265depay->picture_adapter);
+ rtph265depay->picture_start = FALSE;
+ rtph265depay->last_keyframe = FALSE;
+ rtph265depay->last_ts = 0;
+ rtph265depay->current_fu_type = 0;
+ rtph265depay->new_codec_data = FALSE;
+ g_ptr_array_set_size (rtph265depay->vps, 0);
+ g_ptr_array_set_size (rtph265depay->sps, 0);
+ g_ptr_array_set_size (rtph265depay->pps, 0);
+
+ if (hard) {
+ if (rtph265depay->allocator != NULL) {
+ gst_object_unref (rtph265depay->allocator);
+ rtph265depay->allocator = NULL;
+ }
+ gst_allocation_params_init (&rtph265depay->params);
+ }
+}
+
+static void
+gst_rtp_h265_depay_drain (GstRtpH265Depay * rtph265depay)
+{
+ GstClockTime timestamp;
+ gboolean keyframe;
+ GstBuffer *outbuf;
+
+ if (!rtph265depay->picture_start)
+ return;
+
+ outbuf = gst_rtp_h265_complete_au (rtph265depay, &timestamp, &keyframe);
+ if (outbuf)
+ gst_rtp_h265_depay_push (rtph265depay, outbuf, keyframe, timestamp, FALSE);
+}
+
+static void
+gst_rtp_h265_depay_finalize (GObject * object)
+{
+ GstRtpH265Depay *rtph265depay;
+
+ rtph265depay = GST_RTP_H265_DEPAY (object);
+
+ if (rtph265depay->codec_data)
+ gst_buffer_unref (rtph265depay->codec_data);
+
+ g_object_unref (rtph265depay->adapter);
+ g_object_unref (rtph265depay->picture_adapter);
+
+ g_ptr_array_free (rtph265depay->vps, TRUE);
+ g_ptr_array_free (rtph265depay->sps, TRUE);
+ g_ptr_array_free (rtph265depay->pps, TRUE);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static inline const gchar *
+stream_format_get_nick (GstH265StreamFormat fmt)
+{
+ switch (fmt) {
+ case GST_H265_STREAM_FORMAT_BYTESTREAM:
+ return "byte-stream";
+ case GST_H265_STREAM_FORMAT_HVC1:
+ return "hvc1";
+ case GST_H265_STREAM_FORMAT_HEV1:
+ return "hev1";
+ default:
+ break;
+ }
+ return "unknown";
+}
+
+static void
+gst_rtp_h265_depay_negotiate (GstRtpH265Depay * rtph265depay)
+{
+ GstH265StreamFormat stream_format = GST_H265_STREAM_FORMAT_UNKNOWN;
+ GstCaps *caps;
+ gint merge = -1;
+
+ caps =
+ gst_pad_get_allowed_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (rtph265depay));
+
+ GST_DEBUG_OBJECT (rtph265depay, "allowed caps: %" GST_PTR_FORMAT, caps);
+
+ if (caps) {
+ if (gst_caps_get_size (caps) > 0) {
+ GstStructure *s = gst_caps_get_structure (caps, 0);
+ const gchar *str = NULL;
+
+ if ((str = gst_structure_get_string (s, "stream-format"))) {
+ rtph265depay->stream_format = g_intern_string (str);
+
+ if (strcmp (str, "hev1") == 0) {
+ stream_format = GST_H265_STREAM_FORMAT_HEV1;
+ } else if (strcmp (str, "hvc1") == 0) {
+ stream_format = GST_H265_STREAM_FORMAT_HVC1;
+ } else if (strcmp (str, "byte-stream") == 0) {
+ stream_format = GST_H265_STREAM_FORMAT_BYTESTREAM;
+ } else {
+ GST_DEBUG_OBJECT (rtph265depay, "unknown stream-format: %s", str);
+ }
+ }
+
+ if ((str = gst_structure_get_string (s, "alignment"))) {
+ if (strcmp (str, "au") == 0) {
+ merge = TRUE;
+ } else if (strcmp (str, "nal") == 0) {
+ merge = FALSE;
+ } else {
+ GST_DEBUG_OBJECT (rtph265depay, "unknown alignment: %s", str);
+ }
+ }
+ }
+ gst_caps_unref (caps);
+ }
+
+ if (stream_format != GST_H265_STREAM_FORMAT_UNKNOWN) {
+ GST_DEBUG_OBJECT (rtph265depay, "downstream wants stream-format %s",
+ stream_format_get_nick (stream_format));
+ rtph265depay->output_format = stream_format;
+ } else {
+ GST_DEBUG_OBJECT (rtph265depay, "defaulting to output stream-format %s",
+ stream_format_get_nick (DEFAULT_STREAM_FORMAT));
+ rtph265depay->stream_format =
+ stream_format_get_nick (DEFAULT_STREAM_FORMAT);
+ rtph265depay->output_format = DEFAULT_STREAM_FORMAT;
+ }
+ rtph265depay->byte_stream =
+ (rtph265depay->output_format == GST_H265_STREAM_FORMAT_BYTESTREAM);
+
+ if (merge != -1) {
+ GST_DEBUG_OBJECT (rtph265depay, "downstream requires merge %d", merge);
+ rtph265depay->merge = merge;
+ } else {
+ GST_DEBUG_OBJECT (rtph265depay, "defaulting to merge %d",
+ DEFAULT_ACCESS_UNIT);
+ rtph265depay->merge = DEFAULT_ACCESS_UNIT;
+ }
+}
+
+static gboolean
+parse_sps (GstMapInfo * map, guint32 * sps_id)
+{ /* To parse seq_parameter_set_id */
+ GstBitReader br = GST_BIT_READER_INIT (map->data + 15,
+ map->size - 15);
+
+ GST_MEMDUMP ("SPS", map->data, map->size);
+
+ if (map->size < 16)
+ return FALSE;
+
+ if (!gst_rtp_read_golomb (&br, sps_id))
+ return FALSE;
+
+ return TRUE;
+}
+
+static gboolean
+parse_pps (GstMapInfo * map, guint32 * sps_id, guint32 * pps_id)
+{ /* To parse picture_parameter_set_id */
+ GstBitReader br = GST_BIT_READER_INIT (map->data + 2,
+ map->size - 2);
+
+ GST_MEMDUMP ("PPS", map->data, map->size);
+
+ if (map->size < 3)
+ return FALSE;
+
+ if (!gst_rtp_read_golomb (&br, pps_id))
+ return FALSE;
+ if (!gst_rtp_read_golomb (&br, sps_id))
+ return FALSE;
+
+ return TRUE;
+}
+
+static gboolean
+gst_rtp_h265_depay_set_output_caps (GstRtpH265Depay * rtph265depay,
+ GstCaps * caps)
+{
+ GstAllocationParams params;
+ GstAllocator *allocator = NULL;
+ GstPad *srcpad;
+ gboolean res;
+
+ gst_allocation_params_init (&params);
+
+ srcpad = GST_RTP_BASE_DEPAYLOAD_SRCPAD (rtph265depay);
+
+ res = gst_pad_set_caps (srcpad, caps);
+
+ if (res) {
+ GstQuery *query;
+
+ query = gst_query_new_allocation (caps, TRUE);
+ if (!gst_pad_peer_query (srcpad, query)) {
+ GST_DEBUG_OBJECT (rtph265depay, "downstream ALLOCATION query failed");
+ }
+
+ if (gst_query_get_n_allocation_params (query) > 0) {
+ gst_query_parse_nth_allocation_param (query, 0, &allocator, &params);
+ }
+
+ gst_query_unref (query);
+ }
+
+ if (rtph265depay->allocator)
+ gst_object_unref (rtph265depay->allocator);
+
+ rtph265depay->allocator = allocator;
+ rtph265depay->params = params;
+
+ return res;
+}
+
+static gboolean
+gst_rtp_h265_set_src_caps (GstRtpH265Depay * rtph265depay)
+{
+ gboolean res;
+ GstCaps *old_caps;
+ GstCaps *srccaps;
+ GstPad *srcpad;
+
+ if (!rtph265depay->byte_stream &&
+ (!rtph265depay->new_codec_data ||
+ rtph265depay->vps->len == 0 || rtph265depay->sps->len == 0
+ || rtph265depay->pps->len == 0))
+ return TRUE;
+
+ srccaps = gst_caps_new_simple ("video/x-h265",
+ "stream-format", G_TYPE_STRING, rtph265depay->stream_format,
+ "alignment", G_TYPE_STRING, rtph265depay->merge ? "au" : "nal", NULL);
+
+ if (!rtph265depay->byte_stream) {
+ GstBuffer *codec_data;
+ gint i = 0;
+ gint len;
+ guint num_vps = rtph265depay->vps->len;
+ guint num_sps = rtph265depay->sps->len;
+ guint num_pps = rtph265depay->pps->len;
+ GstMapInfo map, nalmap;
+ guint8 *data;
+ guint8 num_arrays = 0;
+ guint new_size;
+ GstBitReader br;
+ guint32 tmp;
+ guint8 tmp8 = 0;
+ guint32 max_sub_layers_minus1, temporal_id_nesting_flag, chroma_format_idc,
+ bit_depth_luma_minus8, bit_depth_chroma_minus8,
+ min_spatial_segmentation_idc;
+
+ /* Fixme: Current implementation is not embedding SEI in codec_data */
+
+ if (num_sps == 0)
+ return FALSE;
+
+ /* start with 23 bytes header */
+ len = 23;
+
+ num_arrays = (num_vps > 0) + (num_sps > 0) + (num_pps > 0);
+ len += 3 * num_arrays;
+
+ /* add size of vps, sps & pps */
+ for (i = 0; i < num_vps; i++)
+ len += 2 + gst_buffer_get_size (g_ptr_array_index (rtph265depay->vps, i));
+ for (i = 0; i < num_sps; i++)
+ len += 2 + gst_buffer_get_size (g_ptr_array_index (rtph265depay->sps, i));
+ for (i = 0; i < num_pps; i++)
+ len += 2 + gst_buffer_get_size (g_ptr_array_index (rtph265depay->pps, i));
+
+ GST_DEBUG_OBJECT (rtph265depay,
+ "constructing codec_data: num_vps =%d num_sps=%d, num_pps=%d", num_vps,
+ num_sps, num_pps);
+
+ codec_data = gst_buffer_new_and_alloc (len);
+ gst_buffer_map (codec_data, &map, GST_MAP_READWRITE);
+ data = map.data;
+
+ memset (data, 0, map.size);
+
+ /* Parsing sps to get the info required further on */
+
+ gst_buffer_map (g_ptr_array_index (rtph265depay->sps, 0), &nalmap,
+ GST_MAP_READ);
+
+ max_sub_layers_minus1 = ((nalmap.data[2]) >> 1) & 0x07;
+ temporal_id_nesting_flag = nalmap.data[2] & 0x01;
+
+ gst_bit_reader_init (&br, nalmap.data + 15, nalmap.size - 15);
+
+ gst_rtp_read_golomb (&br, &tmp); /* sps_seq_parameter_set_id */
+ gst_rtp_read_golomb (&br, &chroma_format_idc); /* chroma_format_idc */
+
+ if (chroma_format_idc == 3)
+ gst_bit_reader_get_bits_uint8 (&br, &tmp8, 1); /* separate_colour_plane_flag */
+
+ gst_rtp_read_golomb (&br, &tmp); /* pic_width_in_luma_samples */
+ gst_rtp_read_golomb (&br, &tmp); /* pic_height_in_luma_samples */
+
+ gst_bit_reader_get_bits_uint8 (&br, &tmp8, 1); /* conformance_window_flag */
+ if (tmp8) {
+ gst_rtp_read_golomb (&br, &tmp); /* conf_win_left_offset */
+ gst_rtp_read_golomb (&br, &tmp); /* conf_win_right_offset */
+ gst_rtp_read_golomb (&br, &tmp); /* conf_win_top_offset */
+ gst_rtp_read_golomb (&br, &tmp); /* conf_win_bottom_offset */
+ }
+
+ gst_rtp_read_golomb (&br, &bit_depth_luma_minus8); /* bit_depth_luma_minus8 */
+ gst_rtp_read_golomb (&br, &bit_depth_chroma_minus8); /* bit_depth_chroma_minus8 */
+
+ GST_DEBUG_OBJECT (rtph265depay,
+ "Ignoring min_spatial_segmentation for now (assuming zero)");
+
+ min_spatial_segmentation_idc = 0; /* NOTE - we ignore this for now, but in a perfect world, we should continue parsing to obtain the real value */
+
+ gst_buffer_unmap (g_ptr_array_index (rtph265depay->sps, 0), &nalmap);
+
+ /* HEVCDecoderConfigurationVersion = 1 */
+ data[0] = 1;
+
+ /* Copy from profile_tier_level (Rec. ITU-T H.265 (04/2013) section 7.3.3
+ *
+ * profile_space | tier_flat | profile_idc |
+ * profile_compatibility_flags | constraint_indicator_flags |
+ * level_idc | progressive_source_flag | interlaced_source_flag
+ * non_packed_constraint_flag | frame_only_constraint_flag
+ * reserved_zero_44bits | level_idc */
+ gst_buffer_map (g_ptr_array_index (rtph265depay->sps, 0), &nalmap,
+ GST_MAP_READ);
+ for (i = 0; i < 12; i++)
+ data[i + 1] = nalmap.data[i];
+ gst_buffer_unmap (g_ptr_array_index (rtph265depay->sps, 0), &nalmap);
+
+ /* min_spatial_segmentation_idc */
+ GST_WRITE_UINT16_BE (data + 13, min_spatial_segmentation_idc);
+ data[13] |= 0xf0;
+ data[15] = 0xfc; /* keeping parrallelismType as zero (unknown) */
+ data[16] = 0xfc | chroma_format_idc;
+ data[17] = 0xf8 | bit_depth_luma_minus8;
+ data[18] = 0xf8 | bit_depth_chroma_minus8;
+ data[19] = 0x00; /* keep avgFrameRate as unspecified */
+ data[20] = 0x00; /* keep avgFrameRate as unspecified */
+ /* constFrameRate(2 bits): 0, stream may or may not be of constant framerate
+ * numTemporalLayers (3 bits): number of temporal layers, value from SPS
+ * TemporalIdNested (1 bit): sps_temporal_id_nesting_flag from SPS
+ * lengthSizeMinusOne (2 bits): plus 1 indicates the length of the NALUnitLength */
+ /* we always output NALs with 4-byte nal unit length markers (or sync code) */
+ data[21] = rtph265depay->byte_stream ? 0x00 : 0x03;
+ data[21] |= ((max_sub_layers_minus1 + 1) << 3);
+ data[21] |= (temporal_id_nesting_flag << 2);
+ GST_WRITE_UINT8 (data + 22, num_arrays); /* numOfArrays */
+
+ data += 23;
+
+ /* copy all VPS */
+ if (num_vps > 0) {
+ /* array_completeness | reserved_zero bit | nal_unit_type */
+ data[0] = 0x00 | 0x20;
+ data++;
+
+ GST_WRITE_UINT16_BE (data, num_vps);
+ data += 2;
+
+ for (i = 0; i < num_vps; i++) {
+ gsize nal_size =
+ gst_buffer_get_size (g_ptr_array_index (rtph265depay->vps, i));
+ GST_WRITE_UINT16_BE (data, nal_size);
+ gst_buffer_extract (g_ptr_array_index (rtph265depay->vps, i), 0,
+ data + 2, nal_size);
+ data += 2 + nal_size;
+ GST_DEBUG_OBJECT (rtph265depay, "Copied VPS %d of length %u", i,
+ (guint) nal_size);
+ }
+ }
+
+ /* copy all SPS */
+ if (num_sps > 0) {
+ /* array_completeness | reserved_zero bit | nal_unit_type */
+ data[0] = 0x00 | 0x21;
+ data++;
+
+ GST_WRITE_UINT16_BE (data, num_sps);
+ data += 2;
+
+ for (i = 0; i < num_sps; i++) {
+ gsize nal_size =
+ gst_buffer_get_size (g_ptr_array_index (rtph265depay->sps, i));
+ GST_WRITE_UINT16_BE (data, nal_size);
+ gst_buffer_extract (g_ptr_array_index (rtph265depay->sps, i), 0,
+ data + 2, nal_size);
+ data += 2 + nal_size;
+ GST_DEBUG_OBJECT (rtph265depay, "Copied SPS %d of length %u", i,
+ (guint) nal_size);
+ }
+ }
+
+ /* copy all PPS */
+ if (num_pps > 0) {
+ /* array_completeness | reserved_zero bit | nal_unit_type */
+ data[0] = 0x00 | 0x22;
+ data++;
+
+ GST_WRITE_UINT16_BE (data, num_pps);
+ data += 2;
+
+ for (i = 0; i < num_pps; i++) {
+ gsize nal_size =
+ gst_buffer_get_size (g_ptr_array_index (rtph265depay->pps, i));
+ GST_WRITE_UINT16_BE (data, nal_size);
+ gst_buffer_extract (g_ptr_array_index (rtph265depay->pps, i), 0,
+ data + 2, nal_size);
+ data += 2 + nal_size;
+ GST_DEBUG_OBJECT (rtph265depay, "Copied PPS %d of length %u", i,
+ (guint) nal_size);
+ }
+ }
+
+ new_size = data - map.data;
+ gst_buffer_unmap (codec_data, &map);
+ gst_buffer_set_size (codec_data, new_size);
+
+ gst_caps_set_simple (srccaps,
+ "codec_data", GST_TYPE_BUFFER, codec_data, NULL);
+ gst_buffer_unref (codec_data);
+ }
+
+ srcpad = GST_RTP_BASE_DEPAYLOAD_SRCPAD (rtph265depay);
+ old_caps = gst_pad_get_current_caps (srcpad);
+
+ if (old_caps == NULL || !gst_caps_is_equal (srccaps, old_caps)) {
+ res = gst_rtp_h265_depay_set_output_caps (rtph265depay, srccaps);
+ } else {
+ res = TRUE;
+ }
+
+ gst_caps_unref (srccaps);
+
+ /* Insert SPS and PPS into the stream on next opportunity */
+ if (rtph265depay->output_format != GST_H265_STREAM_FORMAT_HVC1
+ && (rtph265depay->sps->len > 0 || rtph265depay->pps->len > 0)) {
+ gint i;
+ GstBuffer *codec_data;
+ GstMapInfo map;
+ guint8 *data;
+ guint len = 0;
+
+ for (i = 0; i < rtph265depay->sps->len; i++) {
+ len += 4 + gst_buffer_get_size (g_ptr_array_index (rtph265depay->sps, i));
+ }
+
+ for (i = 0; i < rtph265depay->pps->len; i++) {
+ len += 4 + gst_buffer_get_size (g_ptr_array_index (rtph265depay->pps, i));
+ }
+
+ codec_data = gst_buffer_new_and_alloc (len);
+ gst_buffer_map (codec_data, &map, GST_MAP_WRITE);
+ data = map.data;
+
+ for (i = 0; i < rtph265depay->sps->len; i++) {
+ GstBuffer *sps_buf = g_ptr_array_index (rtph265depay->sps, i);
+ guint sps_size = gst_buffer_get_size (sps_buf);
+
+ if (rtph265depay->byte_stream)
+ memcpy (data, sync_bytes, sizeof (sync_bytes));
+ else
+ GST_WRITE_UINT32_BE (data, sps_size);
+ gst_buffer_extract (sps_buf, 0, data + 4, -1);
+ data += 4 + sps_size;
+ }
+
+ for (i = 0; i < rtph265depay->pps->len; i++) {
+ GstBuffer *pps_buf = g_ptr_array_index (rtph265depay->pps, i);
+ guint pps_size = gst_buffer_get_size (pps_buf);
+
+ if (rtph265depay->byte_stream)
+ memcpy (data, sync_bytes, sizeof (sync_bytes));
+ else
+ GST_WRITE_UINT32_BE (data, pps_size);
+ gst_buffer_extract (pps_buf, 0, data + 4, -1);
+ data += 4 + pps_size;
+ }
+
+ gst_buffer_unmap (codec_data, &map);
+ if (rtph265depay->codec_data)
+ gst_buffer_unref (rtph265depay->codec_data);
+ rtph265depay->codec_data = codec_data;
+ }
+
+ if (res)
+ rtph265depay->new_codec_data = FALSE;
+
+ return res;
+}
+
+gboolean
+gst_rtp_h265_add_vps_sps_pps (GstElement * rtph265, GPtrArray * vps_array,
+ GPtrArray * sps_array, GPtrArray * pps_array, GstBuffer * nal)
+{
+ GstMapInfo map;
+ guchar type;
+ guint i;
+
+ gst_buffer_map (nal, &map, GST_MAP_READ);
+
+ type = (map.data[0] >> 1) & 0x3f;
+
+ if (type == GST_H265_VPS_NUT) {
+ guint32 vps_id = (map.data[2] >> 4) & 0x0f;
+
+ for (i = 0; i < vps_array->len; i++) {
+ GstBuffer *vps = g_ptr_array_index (vps_array, i);
+ GstMapInfo vpsmap;
+ guint32 tmp_vps_id;
+
+ gst_buffer_map (vps, &vpsmap, GST_MAP_READ);
+ tmp_vps_id = (vpsmap.data[2] >> 4) & 0x0f;
+
+ if (vps_id == tmp_vps_id) {
+ if (map.size == vpsmap.size &&
+ memcmp (map.data, vpsmap.data, vpsmap.size) == 0) {
+ GST_LOG_OBJECT (rtph265, "Unchanged VPS %u, not updating", vps_id);
+ gst_buffer_unmap (vps, &vpsmap);
+ goto drop;
+ } else {
+ gst_buffer_unmap (vps, &vpsmap);
+ g_ptr_array_remove_index_fast (vps_array, i);
+ g_ptr_array_add (vps_array, nal);
+ GST_LOG_OBJECT (rtph265, "Modified VPS %u, replacing", vps_id);
+ goto done;
+ }
+ }
+ gst_buffer_unmap (vps, &vpsmap);
+ }
+ GST_LOG_OBJECT (rtph265, "Adding new VPS %u", vps_id);
+ g_ptr_array_add (vps_array, nal);
+ } else if (type == GST_H265_SPS_NUT) {
+ guint32 sps_id;
+
+ if (!parse_sps (&map, &sps_id)) {
+ GST_WARNING_OBJECT (rtph265, "Invalid SPS,"
+ " can't parse seq_parameter_set_id");
+ goto drop;
+ }
+
+ for (i = 0; i < sps_array->len; i++) {
+ GstBuffer *sps = g_ptr_array_index (sps_array, i);
+ GstMapInfo spsmap;
+ guint32 tmp_sps_id;
+
+ gst_buffer_map (sps, &spsmap, GST_MAP_READ);
+ parse_sps (&spsmap, &tmp_sps_id);
+
+ if (sps_id == tmp_sps_id) {
+ if (map.size == spsmap.size &&
+ memcmp (map.data, spsmap.data, spsmap.size) == 0) {
+ GST_LOG_OBJECT (rtph265, "Unchanged SPS %u, not updating", sps_id);
+ gst_buffer_unmap (sps, &spsmap);
+ goto drop;
+ } else {
+ gst_buffer_unmap (sps, &spsmap);
+ g_ptr_array_remove_index_fast (sps_array, i);
+ g_ptr_array_add (sps_array, nal);
+ GST_LOG_OBJECT (rtph265, "Modified SPS %u, replacing", sps_id);
+ goto done;
+ }
+ }
+ gst_buffer_unmap (sps, &spsmap);
+ }
+ GST_LOG_OBJECT (rtph265, "Adding new SPS %u", sps_id);
+ g_ptr_array_add (sps_array, nal);
+ } else if (type == GST_H265_PPS_NUT) {
+ guint32 sps_id;
+ guint32 pps_id;
+
+ if (!parse_pps (&map, &sps_id, &pps_id)) {
+ GST_WARNING_OBJECT (rtph265, "Invalid PPS,"
+ " can't parse seq_parameter_set_id or pic_parameter_set_id");
+ goto drop;
+ }
+
+ for (i = 0; i < pps_array->len; i++) {
+ GstBuffer *pps = g_ptr_array_index (pps_array, i);
+ GstMapInfo ppsmap;
+ guint32 tmp_sps_id;
+ guint32 tmp_pps_id;
+
+
+ gst_buffer_map (pps, &ppsmap, GST_MAP_READ);
+ parse_pps (&ppsmap, &tmp_sps_id, &tmp_pps_id);
+
+ if (pps_id == tmp_pps_id) {
+ if (map.size == ppsmap.size &&
+ memcmp (map.data, ppsmap.data, ppsmap.size) == 0) {
+ GST_LOG_OBJECT (rtph265, "Unchanged PPS %u:%u, not updating", sps_id,
+ pps_id);
+ gst_buffer_unmap (pps, &ppsmap);
+ goto drop;
+ } else {
+ gst_buffer_unmap (pps, &ppsmap);
+ g_ptr_array_remove_index_fast (pps_array, i);
+ g_ptr_array_add (pps_array, nal);
+ GST_LOG_OBJECT (rtph265, "Modified PPS %u:%u, replacing",
+ sps_id, pps_id);
+ goto done;
+ }
+ }
+ gst_buffer_unmap (pps, &ppsmap);
+ }
+ GST_LOG_OBJECT (rtph265, "Adding new PPS %u:%i", sps_id, pps_id);
+ g_ptr_array_add (pps_array, nal);
+ } else {
+ goto drop;
+ }
+
+done:
+ gst_buffer_unmap (nal, &map);
+
+ return TRUE;
+
+drop:
+ gst_buffer_unmap (nal, &map);
+ gst_buffer_unref (nal);
+
+ return FALSE;
+}
+
+
+static void
+gst_rtp_h265_depay_add_vps_sps_pps (GstRtpH265Depay * rtph265depay,
+ GstBuffer * nal)
+{
+ if (gst_rtp_h265_add_vps_sps_pps (GST_ELEMENT (rtph265depay),
+ rtph265depay->vps, rtph265depay->sps, rtph265depay->pps, nal))
+ rtph265depay->new_codec_data = TRUE;
+}
+
+static gboolean
+gst_rtp_h265_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ gint clock_rate;
+ GstStructure *structure = gst_caps_get_structure (caps, 0);
+ GstRtpH265Depay *rtph265depay;
+ const gchar *vps;
+ const gchar *sps;
+ const gchar *pps;
+ gchar *ps;
+ GstMapInfo map;
+ guint8 *ptr;
+
+ rtph265depay = GST_RTP_H265_DEPAY (depayload);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 90000;
+ depayload->clock_rate = clock_rate;
+
+ /* Base64 encoded, comma separated config NALs */
+ vps = gst_structure_get_string (structure, "sprop-vps");
+ sps = gst_structure_get_string (structure, "sprop-sps");
+ pps = gst_structure_get_string (structure, "sprop-pps");
+ if (vps == NULL || sps == NULL || pps == NULL) {
+ ps = NULL;
+ } else {
+ ps = g_strdup_printf ("%s,%s,%s", vps, sps, pps);
+ }
+
+ /* negotiate with downstream w.r.t. output format and alignment */
+ gst_rtp_h265_depay_negotiate (rtph265depay);
+
+ if (rtph265depay->byte_stream && ps != NULL) {
+ /* for bytestream we only need the parameter sets but we don't error out
+ * when they are not there, we assume they are in the stream. */
+ gchar **params;
+ GstBuffer *codec_data;
+ guint len, total;
+ gint i;
+
+ params = g_strsplit (ps, ",", 0);
+
+ /* count total number of bytes in base64. Also include the sync bytes in
+ * front of the params. */
+ len = 0;
+ for (i = 0; params[i]; i++) {
+ len += strlen (params[i]);
+ len += sizeof (sync_bytes);
+ }
+ /* we seriously overshoot the length, but it's fine. */
+ codec_data = gst_buffer_new_and_alloc (len);
+
+ gst_buffer_map (codec_data, &map, GST_MAP_WRITE);
+ ptr = map.data;
+ total = 0;
+ for (i = 0; params[i]; i++) {
+ guint save = 0;
+ gint state = 0;
+
+ GST_DEBUG_OBJECT (depayload, "decoding param %d (%s)", i, params[i]);
+ memcpy (ptr, sync_bytes, sizeof (sync_bytes));
+ ptr += sizeof (sync_bytes);
+ len =
+ g_base64_decode_step (params[i], strlen (params[i]), ptr, &state,
+ &save);
+ GST_DEBUG_OBJECT (depayload, "decoded %d bytes", len);
+ total += len + sizeof (sync_bytes);
+ ptr += len;
+ }
+ gst_buffer_unmap (codec_data, &map);
+ gst_buffer_resize (codec_data, 0, total);
+ g_strfreev (params);
+
+ /* keep the codec_data, we need to send it as the first buffer. We cannot
+ * push it in the adapter because the adapter might be flushed on discont.
+ */
+ if (rtph265depay->codec_data)
+ gst_buffer_unref (rtph265depay->codec_data);
+ rtph265depay->codec_data = codec_data;
+ } else if (!rtph265depay->byte_stream) {
+ gchar **params;
+ gint i;
+
+ if (ps == NULL)
+ goto incomplete_caps;
+
+ params = g_strsplit (ps, ",", 0);
+
+ GST_DEBUG_OBJECT (depayload, "we have %d params", g_strv_length (params));
+
+ /* start with 23 bytes header */
+ for (i = 0; params[i]; i++) {
+ GstBuffer *nal;
+ GstMapInfo nalmap;
+ gsize nal_len;
+ guint save = 0;
+ gint state = 0;
+
+ nal_len = strlen (params[i]);
+ if (nal_len == 0) {
+ GST_WARNING_OBJECT (depayload, "empty param '%s' (#%d)", params[i], i);
+ continue;
+ }
+ nal = gst_buffer_new_and_alloc (nal_len);
+ gst_buffer_map (nal, &nalmap, GST_MAP_READWRITE);
+
+ nal_len =
+ g_base64_decode_step (params[i], nal_len, nalmap.data, &state, &save);
+
+ GST_DEBUG_OBJECT (depayload, "adding param %d as %s", i,
+ (((nalmap.data[0] >> 1) & 0x3f) ==
+ 32) ? "VPS" : (((nalmap.data[0] >> 1) & 0x3f) ==
+ 33) ? "SPS" : "PPS");
+
+ gst_buffer_unmap (nal, &nalmap);
+ gst_buffer_set_size (nal, nal_len);
+
+ gst_rtp_h265_depay_add_vps_sps_pps (rtph265depay, nal);
+ }
+ g_strfreev (params);
+
+ if (rtph265depay->vps->len == 0 || rtph265depay->sps->len == 0 ||
+ rtph265depay->pps->len == 0) {
+ goto incomplete_caps;
+ }
+ }
+
+ g_free (ps);
+
+ return gst_rtp_h265_set_src_caps (rtph265depay);
+
+ /* ERRORS */
+incomplete_caps:
+ {
+ GST_DEBUG_OBJECT (depayload, "we have incomplete caps,"
+ " doing setcaps later");
+ g_free (ps);
+ return TRUE;
+ }
+}
+
+static GstBuffer *
+gst_rtp_h265_depay_allocate_output_buffer (GstRtpH265Depay * depay, gsize size)
+{
+ GstBuffer *buffer = NULL;
+
+ g_return_val_if_fail (size > 0, NULL);
+
+ GST_LOG_OBJECT (depay, "want output buffer of %u bytes", (guint) size);
+
+ buffer = gst_buffer_new_allocate (depay->allocator, size, &depay->params);
+ if (buffer == NULL) {
+ GST_INFO_OBJECT (depay, "couldn't allocate output buffer");
+ buffer = gst_buffer_new_allocate (NULL, size, NULL);
+ }
+
+ return buffer;
+}
+
+static GstBuffer *
+gst_rtp_h265_complete_au (GstRtpH265Depay * rtph265depay,
+ GstClockTime * out_timestamp, gboolean * out_keyframe)
+{
+ GstBufferList *list;
+ GstMapInfo outmap;
+ GstBuffer *outbuf;
+ guint outsize, offset = 0;
+ gint b, n_bufs, m, n_mem;
+
+ /* we had a picture in the adapter and we completed it */
+ GST_DEBUG_OBJECT (rtph265depay, "taking completed AU");
+ outsize = gst_adapter_available (rtph265depay->picture_adapter);
+
+ outbuf = gst_rtp_h265_depay_allocate_output_buffer (rtph265depay, outsize);
+
+ if (outbuf == NULL)
+ return NULL;
+
+ if (!gst_buffer_map (outbuf, &outmap, GST_MAP_WRITE))
+ return NULL;
+
+ list = gst_adapter_take_buffer_list (rtph265depay->picture_adapter, outsize);
+
+ n_bufs = gst_buffer_list_length (list);
+ for (b = 0; b < n_bufs; ++b) {
+ GstBuffer *buf = gst_buffer_list_get (list, b);
+
+ n_mem = gst_buffer_n_memory (buf);
+ for (m = 0; m < n_mem; ++m) {
+ GstMemory *mem = gst_buffer_peek_memory (buf, m);
+ gsize mem_size = gst_memory_get_sizes (mem, NULL, NULL);
+ GstMapInfo mem_map;
+
+ if (gst_memory_map (mem, &mem_map, GST_MAP_READ)) {
+ memcpy (outmap.data + offset, mem_map.data, mem_size);
+ gst_memory_unmap (mem, &mem_map);
+ } else {
+ memset (outmap.data + offset, 0, mem_size);
+ }
+ offset += mem_size;
+ }
+
+ gst_rtp_copy_video_meta (rtph265depay, outbuf, buf);
+ }
+ gst_buffer_list_unref (list);
+ gst_buffer_unmap (outbuf, &outmap);
+
+ *out_timestamp = rtph265depay->last_ts;
+ *out_keyframe = rtph265depay->last_keyframe;
+
+ rtph265depay->last_keyframe = FALSE;
+ rtph265depay->picture_start = FALSE;
+
+ return outbuf;
+}
+
+/* VPS/SPS/PPS/RADL/TSA/RASL/IDR/CRA is considered key, all others DELTA;
+ * so downstream waiting for keyframe can pick up at VPS/SPS/PPS/IDR */
+
+#define NAL_TYPE_IS_PARAMETER_SET(nt) ( ((nt) == GST_H265_VPS_NUT)\
+ || ((nt) == GST_H265_SPS_NUT)\
+ || ((nt) == GST_H265_PPS_NUT) )
+
+#define NAL_TYPE_IS_CODED_SLICE_SEGMENT(nt) ( ((nt) == GST_H265_NAL_SLICE_TRAIL_N)\
+ || ((nt) == GST_H265_NAL_SLICE_TRAIL_R)\
+ || ((nt) == GST_H265_NAL_SLICE_TSA_N)\
+ || ((nt) == GST_H265_NAL_SLICE_TSA_R)\
+ || ((nt) == GST_H265_NAL_SLICE_STSA_N)\
+ || ((nt) == GST_H265_NAL_SLICE_STSA_R)\
+ || ((nt) == GST_H265_NAL_SLICE_RASL_N)\
+ || ((nt) == GST_H265_NAL_SLICE_RASL_R)\
+ || ((nt) == GST_H265_NAL_SLICE_BLA_W_LP)\
+ || ((nt) == GST_H265_NAL_SLICE_BLA_W_RADL)\
+ || ((nt) == GST_H265_NAL_SLICE_BLA_N_LP)\
+ || ((nt) == GST_H265_NAL_SLICE_IDR_W_RADL)\
+ || ((nt) == GST_H265_NAL_SLICE_IDR_N_LP)\
+ || ((nt) == GST_H265_NAL_SLICE_CRA_NUT) )
+
+/* Intra random access point */
+#define NAL_TYPE_IS_IRAP(nt) (((nt) == GST_H265_NAL_SLICE_BLA_W_LP) \
+ || ((nt) == GST_H265_NAL_SLICE_BLA_W_RADL) \
+ || ((nt) == GST_H265_NAL_SLICE_BLA_N_LP) \
+ || ((nt) == GST_H265_NAL_SLICE_IDR_W_RADL) \
+ || ((nt) == GST_H265_NAL_SLICE_IDR_N_LP) \
+ || ((nt) == GST_H265_NAL_SLICE_CRA_NUT))
+
+#define NAL_TYPE_IS_KEY(nt) (NAL_TYPE_IS_PARAMETER_SET(nt) || NAL_TYPE_IS_IRAP(nt))
+
+static void
+gst_rtp_h265_depay_push (GstRtpH265Depay * rtph265depay, GstBuffer * outbuf,
+ gboolean keyframe, GstClockTime timestamp, gboolean marker)
+{
+ /* prepend codec_data */
+ if (rtph265depay->codec_data) {
+ GST_DEBUG_OBJECT (rtph265depay, "prepending codec_data");
+ gst_rtp_copy_video_meta (rtph265depay, rtph265depay->codec_data, outbuf);
+ outbuf = gst_buffer_append (rtph265depay->codec_data, outbuf);
+ rtph265depay->codec_data = NULL;
+ keyframe = TRUE;
+ }
+ outbuf = gst_buffer_make_writable (outbuf);
+
+ gst_rtp_drop_non_video_meta (rtph265depay, outbuf);
+
+ GST_BUFFER_PTS (outbuf) = timestamp;
+
+ if (keyframe)
+ GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+ else
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ if (marker)
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER);
+
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtph265depay), outbuf);
+}
+
+static void
+gst_rtp_h265_depay_handle_nal (GstRtpH265Depay * rtph265depay, GstBuffer * nal,
+ GstClockTime in_timestamp, gboolean marker)
+{
+ GstRTPBaseDepayload *depayload = GST_RTP_BASE_DEPAYLOAD (rtph265depay);
+ gint nal_type;
+ GstMapInfo map;
+ GstBuffer *outbuf = NULL;
+ GstClockTime out_timestamp;
+ gboolean keyframe, out_keyframe;
+
+ gst_buffer_map (nal, &map, GST_MAP_READ);
+ if (G_UNLIKELY (map.size < 5))
+ goto short_nal;
+
+ nal_type = (map.data[4] >> 1) & 0x3f;
+ GST_DEBUG_OBJECT (rtph265depay, "handle NAL type %d (RTP marker bit %d)",
+ nal_type, marker);
+
+ keyframe = NAL_TYPE_IS_KEY (nal_type);
+
+ out_keyframe = keyframe;
+ out_timestamp = in_timestamp;
+
+ if (!rtph265depay->byte_stream) {
+ if (NAL_TYPE_IS_PARAMETER_SET (nal_type)) {
+ gst_rtp_h265_depay_add_vps_sps_pps (rtph265depay,
+ gst_buffer_copy_region (nal, GST_BUFFER_COPY_ALL,
+ 4, gst_buffer_get_size (nal) - 4));
+ gst_buffer_unmap (nal, &map);
+ gst_buffer_unref (nal);
+ return;
+ } else if (rtph265depay->sps->len == 0 || rtph265depay->pps->len == 0) {
+ /* Down push down any buffer in non-bytestream mode if the SPS/PPS haven't
+ * go through yet
+ */
+ gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depayload),
+ gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
+ gst_structure_new ("GstForceKeyUnit",
+ "all-headers", G_TYPE_BOOLEAN, TRUE, NULL)));
+ gst_buffer_unmap (nal, &map);
+ gst_buffer_unref (nal);
+ return;
+ }
+
+ if (rtph265depay->new_codec_data &&
+ rtph265depay->sps->len > 0 && rtph265depay->pps->len > 0)
+ gst_rtp_h265_set_src_caps (rtph265depay);
+ }
+
+ if (rtph265depay->merge) {
+ gboolean start = FALSE, complete = FALSE;
+
+ /* marker bit isn't mandatory so in the following code we try to detect
+ * an AU boundary (see H.265 spec section 7.4.2.4.4) */
+ if (!marker) {
+ if (NAL_TYPE_IS_CODED_SLICE_SEGMENT (nal_type)) {
+ /* A NAL unit (X) ends an access unit if the next-occurring VCL NAL unit (Y) has the high-order bit of the first byte after its NAL unit header equal to 1 */
+ start = TRUE;
+ if (((map.data[6] >> 7) & 0x01) == 1) {
+ complete = TRUE;
+ }
+ } else if ((nal_type >= 32 && nal_type <= 35)
+ || nal_type == 39 || (nal_type >= 41 && nal_type <= 44)
+ || (nal_type >= 48 && nal_type <= 55)) {
+ /* VPS, SPS, PPS, SEI, ... terminate an access unit */
+ complete = TRUE;
+ }
+ GST_DEBUG_OBJECT (depayload, "start %d, complete %d", start, complete);
+
+ if (complete && rtph265depay->picture_start)
+ outbuf = gst_rtp_h265_complete_au (rtph265depay, &out_timestamp,
+ &out_keyframe);
+ }
+ /* add to adapter */
+ gst_buffer_unmap (nal, &map);
+
+ GST_DEBUG_OBJECT (depayload, "adding NAL to picture adapter");
+ gst_adapter_push (rtph265depay->picture_adapter, nal);
+ rtph265depay->last_ts = in_timestamp;
+ rtph265depay->last_keyframe |= keyframe;
+ rtph265depay->picture_start |= start;
+
+ if (marker)
+ outbuf = gst_rtp_h265_complete_au (rtph265depay, &out_timestamp,
+ &out_keyframe);
+ } else {
+ /* no merge, output is input nal */
+ GST_DEBUG_OBJECT (depayload, "using NAL as output");
+ outbuf = nal;
+ gst_buffer_unmap (nal, &map);
+ }
+
+ if (outbuf) {
+ gst_rtp_h265_depay_push (rtph265depay, outbuf, out_keyframe, out_timestamp,
+ marker);
+ }
+
+ return;
+
+ /* ERRORS */
+short_nal:
+ {
+ GST_WARNING_OBJECT (depayload, "dropping short NAL");
+ gst_buffer_unmap (nal, &map);
+ gst_buffer_unref (nal);
+ return;
+ }
+}
+
+static void
+gst_rtp_h265_finish_fragmentation_unit (GstRtpH265Depay * rtph265depay)
+{
+ guint outsize;
+ GstMapInfo map;
+ GstBuffer *outbuf;
+
+ outsize = gst_adapter_available (rtph265depay->adapter);
+ g_assert (outsize >= 4);
+
+ outbuf = gst_adapter_take_buffer (rtph265depay->adapter, outsize);
+
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ GST_DEBUG_OBJECT (rtph265depay, "output %d bytes", outsize);
+
+ if (rtph265depay->byte_stream) {
+ memcpy (map.data, sync_bytes, sizeof (sync_bytes));
+ } else {
+ GST_WRITE_UINT32_BE (map.data, outsize - 4);
+ }
+ gst_buffer_unmap (outbuf, &map);
+
+ rtph265depay->current_fu_type = 0;
+
+ gst_rtp_h265_depay_handle_nal (rtph265depay, outbuf,
+ rtph265depay->fu_timestamp, rtph265depay->fu_marker);
+}
+
+static GstBuffer *
+gst_rtp_h265_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpH265Depay *rtph265depay;
+ GstBuffer *outbuf = NULL;
+ guint8 nal_unit_type;
+
+ rtph265depay = GST_RTP_H265_DEPAY (depayload);
+
+ /* flush remaining data on discont */
+ if (GST_BUFFER_IS_DISCONT (rtp->buffer)) {
+ gst_adapter_clear (rtph265depay->adapter);
+ rtph265depay->wait_start = TRUE;
+ rtph265depay->current_fu_type = 0;
+ rtph265depay->last_fu_seqnum = 0;
+ }
+
+ {
+ gint payload_len;
+ guint8 *payload;
+ guint header_len;
+ GstMapInfo map;
+ guint outsize, nalu_size;
+ GstClockTime timestamp;
+ gboolean marker;
+ guint8 nuh_layer_id, nuh_temporal_id_plus1;
+ guint8 S, E;
+ guint16 nal_header;
+#if 0
+ gboolean donl_present = FALSE;
+#endif
+
+ timestamp = GST_BUFFER_PTS (rtp->buffer);
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+ payload = gst_rtp_buffer_get_payload (rtp);
+ marker = gst_rtp_buffer_get_marker (rtp);
+
+ GST_DEBUG_OBJECT (rtph265depay, "receiving %d bytes", payload_len);
+
+ if (payload_len == 0)
+ goto empty_packet;
+
+ /* +---------------+---------------+
+ * |0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7|
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |F| Type | LayerId | TID |
+ * +-------------+-----------------+
+ *
+ * F must be 0.
+ *
+ */
+ nal_unit_type = (payload[0] >> 1) & 0x3f;
+ nuh_layer_id = ((payload[0] & 0x01) << 5) | (payload[1] >> 3); /* should be zero for now but this could change in future HEVC extensions */
+ nuh_temporal_id_plus1 = payload[1] & 0x03;
+
+ /* At least two byte header with type */
+ header_len = 2;
+
+ GST_DEBUG_OBJECT (rtph265depay,
+ "NAL header nal_unit_type %d, nuh_temporal_id_plus1 %d", nal_unit_type,
+ nuh_temporal_id_plus1);
+
+ GST_FIXME_OBJECT (rtph265depay, "Assuming DONL field is not present");
+
+ /* FIXME - assuming DONL field is not present for now */
+ /*donl_present = (tx-mode == "MST") || (sprop-max-don-diff > 0); */
+
+ /* If FU unit was being processed, but the current nal is of a different
+ * type. Assume that the remote payloader is buggy (didn't set the end bit
+ * when the FU ended) and send out what we gathered thusfar */
+ if (G_UNLIKELY (rtph265depay->current_fu_type != 0 &&
+ nal_unit_type != rtph265depay->current_fu_type))
+ gst_rtp_h265_finish_fragmentation_unit (rtph265depay);
+
+ switch (nal_unit_type) {
+ case 48:
+ {
+ GST_DEBUG_OBJECT (rtph265depay, "Processing aggregation packet");
+
+ /* Aggregation packet (section 4.7) */
+
+ /* An example of an AP packet containing two aggregation units
+ without the DONL and DOND fields
+
+ 0 1 2 3
+ 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | RTP Header |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | PayloadHdr (Type=48) | NALU 1 Size |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | NALU 1 HDR | |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ NALU 1 Data |
+ | . . . |
+ | |
+ + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | . . . | NALU 2 Size | NALU 2 HDR |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | NALU 2 HDR | |
+ +-+-+-+-+-+-+-+-+ NALU 2 Data |
+ | . . . |
+ | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | :...OPTIONAL RTP padding |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+
+ /* strip headers */
+ payload += header_len;
+ payload_len -= header_len;
+
+ rtph265depay->wait_start = FALSE;
+
+#if 0
+ if (donl_present)
+ goto not_implemented_donl_present;
+#endif
+
+ while (payload_len > 2) {
+ gboolean last = FALSE;
+
+ nalu_size = (payload[0] << 8) | payload[1];
+
+ /* don't include nalu_size */
+ if (nalu_size > (payload_len - 2))
+ nalu_size = payload_len - 2;
+
+ outsize = nalu_size + sizeof (sync_bytes);
+ outbuf = gst_buffer_new_and_alloc (outsize);
+
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ if (rtph265depay->byte_stream) {
+ memcpy (map.data, sync_bytes, sizeof (sync_bytes));
+ } else {
+ GST_WRITE_UINT32_BE (map.data, nalu_size);
+ }
+
+ /* strip NALU size */
+ payload += 2;
+ payload_len -= 2;
+
+ memcpy (map.data + sizeof (sync_bytes), payload, nalu_size);
+ gst_buffer_unmap (outbuf, &map);
+
+ gst_rtp_copy_video_meta (rtph265depay, outbuf, rtp->buffer);
+
+ if (payload_len - nalu_size <= 2)
+ last = TRUE;
+
+ gst_rtp_h265_depay_handle_nal (rtph265depay, outbuf, timestamp,
+ marker && last);
+
+ payload += nalu_size;
+ payload_len -= nalu_size;
+ }
+ break;
+ }
+ case 49:
+ {
+ GST_DEBUG_OBJECT (rtph265depay, "Processing Fragmentation Unit");
+
+ /* Fragmentation units (FUs) Section 4.8 */
+
+ /* The structure of a Fragmentation Unit (FU)
+ *
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | PayloadHdr (Type=49) | FU header | DONL (cond) |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-|
+ | DONL (cond) | |
+ |-+-+-+-+-+-+-+-+ |
+ | FU payload |
+ | |
+ | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | :...OPTIONAL RTP padding |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ *
+ *
+ */
+
+ /* strip headers */
+ payload += header_len;
+ payload_len -= header_len;
+
+ /* processing FU header */
+ S = (payload[0] & 0x80) == 0x80;
+ E = (payload[0] & 0x40) == 0x40;
+
+ GST_DEBUG_OBJECT (rtph265depay,
+ "FU header with S %d, E %d, nal_unit_type %d", S, E,
+ payload[0] & 0x3f);
+
+ if (rtph265depay->wait_start && !S)
+ goto waiting_start;
+
+#if 0
+ if (donl_present)
+ goto not_implemented_donl_present;
+#endif
+
+ if (S) {
+
+ GST_DEBUG_OBJECT (rtph265depay, "Start of Fragmentation Unit");
+
+ /* If a new FU unit started, while still processing an older one.
+ * Assume that the remote payloader is buggy (doesn't set the end
+ * bit) and send out what we've gathered thusfar */
+ if (G_UNLIKELY (rtph265depay->current_fu_type != 0))
+ gst_rtp_h265_finish_fragmentation_unit (rtph265depay);
+
+ rtph265depay->current_fu_type = nal_unit_type;
+ rtph265depay->fu_timestamp = timestamp;
+ rtph265depay->last_fu_seqnum = gst_rtp_buffer_get_seq (rtp);
+
+ rtph265depay->wait_start = FALSE;
+
+ /* reconstruct NAL header */
+ nal_header =
+ ((payload[0] & 0x3f) << 9) | (nuh_layer_id << 3) |
+ nuh_temporal_id_plus1;
+
+ /* go back one byte so we can copy the payload + two bytes more in the front which
+ * will be overwritten by the nal_header
+ */
+ payload -= 1;
+ payload_len += 1;
+
+ nalu_size = payload_len;
+ outsize = nalu_size + sizeof (sync_bytes);
+ outbuf = gst_buffer_new_and_alloc (outsize);
+
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ if (rtph265depay->byte_stream) {
+ GST_WRITE_UINT32_BE (map.data, 0x00000001);
+ } else {
+ /* will be fixed up in finish_fragmentation_unit() */
+ GST_WRITE_UINT32_BE (map.data, 0xffffffff);
+ }
+ memcpy (map.data + sizeof (sync_bytes), payload, nalu_size);
+ map.data[4] = nal_header >> 8;
+ map.data[5] = nal_header & 0xff;
+ gst_buffer_unmap (outbuf, &map);
+
+ gst_rtp_copy_video_meta (rtph265depay, outbuf, rtp->buffer);
+
+ GST_DEBUG_OBJECT (rtph265depay, "queueing %d bytes", outsize);
+
+ /* and assemble in the adapter */
+ gst_adapter_push (rtph265depay->adapter, outbuf);
+ } else {
+ if (rtph265depay->current_fu_type == 0) {
+ /* previous FU packet missing start bit? */
+ GST_WARNING_OBJECT (rtph265depay, "missing FU start bit on an "
+ "earlier packet. Dropping.");
+ gst_adapter_clear (rtph265depay->adapter);
+ return NULL;
+ }
+ if (gst_rtp_buffer_compare_seqnum (rtph265depay->last_fu_seqnum,
+ gst_rtp_buffer_get_seq (rtp)) != 1) {
+ /* jump in sequence numbers within an FU is cause for discarding */
+ GST_WARNING_OBJECT (rtph265depay, "Jump in sequence numbers from "
+ "%u to %u within Fragmentation Unit. Data was lost, dropping "
+ "stored.", rtph265depay->last_fu_seqnum,
+ gst_rtp_buffer_get_seq (rtp));
+ gst_adapter_clear (rtph265depay->adapter);
+ return NULL;
+ }
+ rtph265depay->last_fu_seqnum = gst_rtp_buffer_get_seq (rtp);
+
+ GST_DEBUG_OBJECT (rtph265depay,
+ "Following part of Fragmentation Unit");
+
+ /* strip off FU header byte */
+ payload += 1;
+ payload_len -= 1;
+
+ outsize = payload_len;
+ outbuf = gst_buffer_new_and_alloc (outsize);
+ gst_buffer_fill (outbuf, 0, payload, outsize);
+
+ gst_rtp_copy_video_meta (rtph265depay, outbuf, rtp->buffer);
+
+ GST_DEBUG_OBJECT (rtph265depay, "queueing %d bytes", outsize);
+
+ /* and assemble in the adapter */
+ gst_adapter_push (rtph265depay->adapter, outbuf);
+ }
+
+ outbuf = NULL;
+ rtph265depay->fu_marker = marker;
+
+ /* if NAL unit ends, flush the adapter */
+ if (E) {
+ gst_rtp_h265_finish_fragmentation_unit (rtph265depay);
+ GST_DEBUG_OBJECT (rtph265depay, "End of Fragmentation Unit");
+ }
+ break;
+ }
+ case 50:
+ goto not_implemented; /* PACI packets Section 4.9 */
+ default:
+ {
+ rtph265depay->wait_start = FALSE;
+
+ /* All other cases: Single NAL unit packet Section 4.6 */
+ /* the entire payload is the output buffer */
+
+#if 0
+ if (donl_present)
+ goto not_implemented_donl_present;
+#endif
+
+ nalu_size = payload_len;
+ outsize = nalu_size + sizeof (sync_bytes);
+ outbuf = gst_buffer_new_and_alloc (outsize);
+
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ if (rtph265depay->byte_stream) {
+ memcpy (map.data, sync_bytes, sizeof (sync_bytes));
+ } else {
+ GST_WRITE_UINT32_BE (map.data, nalu_size);
+ }
+ memcpy (map.data + 4, payload, nalu_size);
+ gst_buffer_unmap (outbuf, &map);
+
+ gst_rtp_copy_video_meta (rtph265depay, outbuf, rtp->buffer);
+
+ gst_rtp_h265_depay_handle_nal (rtph265depay, outbuf, timestamp, marker);
+ break;
+ }
+ }
+ }
+
+ return NULL;
+
+ /* ERRORS */
+empty_packet:
+ {
+ GST_DEBUG_OBJECT (rtph265depay, "empty packet");
+ return NULL;
+ }
+waiting_start:
+ {
+ GST_DEBUG_OBJECT (rtph265depay, "waiting for start");
+ return NULL;
+ }
+#if 0
+not_implemented_donl_present:
+ {
+ GST_ELEMENT_ERROR (rtph265depay, STREAM, FORMAT,
+ (NULL), ("DONL field present not supported yet"));
+ return NULL;
+ }
+#endif
+not_implemented:
+ {
+ GST_ELEMENT_ERROR (rtph265depay, STREAM, FORMAT,
+ (NULL), ("NAL unit type %d not supported yet", nal_unit_type));
+ return NULL;
+ }
+}
+
+static gboolean
+gst_rtp_h265_depay_handle_event (GstRTPBaseDepayload * depay, GstEvent * event)
+{
+ GstRtpH265Depay *rtph265depay;
+
+ rtph265depay = GST_RTP_H265_DEPAY (depay);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ gst_rtp_h265_depay_reset (rtph265depay, FALSE);
+ break;
+ case GST_EVENT_EOS:
+ gst_rtp_h265_depay_drain (rtph265depay);
+ break;
+ default:
+ break;
+ }
+
+ return
+ GST_RTP_BASE_DEPAYLOAD_CLASS (parent_class)->handle_event (depay, event);
+}
+
+static GstStateChangeReturn
+gst_rtp_h265_depay_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstRtpH265Depay *rtph265depay;
+ GstStateChangeReturn ret;
+
+ rtph265depay = GST_RTP_H265_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_rtp_h265_depay_reset (rtph265depay, TRUE);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_h265_depay_reset (rtph265depay, TRUE);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtph265depay.h b/gst/rtp/gstrtph265depay.h
new file mode 100644
index 0000000000..505bdb5cd1
--- /dev/null
+++ b/gst/rtp/gstrtph265depay.h
@@ -0,0 +1,115 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim.taymans@gmail.com>
+ * Copyright (C) <2014> Jurgen Slowack <jurgenslowack@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_H265_DEPAY_H__
+#define __GST_RTP_H265_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+#include "gstrtph265types.h"
+
+G_BEGIN_DECLS
+#define GST_TYPE_RTP_H265_DEPAY \
+ (gst_rtp_h265_depay_get_type())
+#define GST_RTP_H265_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_H265_DEPAY,GstRtpH265Depay))
+#define GST_RTP_H265_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_H265_DEPAY,GstRtpH265DepayClass))
+#define GST_IS_RTP_H265_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_H265_DEPAY))
+#define GST_IS_RTP_H265_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_H265_DEPAY))
+typedef struct _GstRtpH265Depay GstRtpH265Depay;
+typedef struct _GstRtpH265DepayClass GstRtpH265DepayClass;
+
+#define GST_H265_VPS_NUT 32
+#define GST_H265_SPS_NUT 33
+#define GST_H265_PPS_NUT 34
+
+typedef enum
+{
+ GST_H265_STREAM_FORMAT_UNKNOWN,
+ GST_H265_STREAM_FORMAT_BYTESTREAM,
+ GST_H265_STREAM_FORMAT_HVC1,
+ GST_H265_STREAM_FORMAT_HEV1
+} GstH265StreamFormat;
+
+struct _GstRtpH265Depay
+{
+ GstRTPBaseDepayload depayload;
+
+ const gchar *stream_format;
+ GstH265StreamFormat output_format; /* bytestream, hvc1 or hev1 */
+ gboolean byte_stream;
+
+ GstBuffer *codec_data;
+ GstAdapter *adapter;
+ gboolean wait_start;
+
+ /* nal merging */
+ gboolean merge;
+ GstAdapter *picture_adapter;
+ gboolean picture_start;
+ GstClockTime last_ts;
+ gboolean last_keyframe;
+
+ /* Work around broken payloaders wrt. Fragmentation Units */
+ guint8 current_fu_type;
+ guint16 last_fu_seqnum;
+ GstClockTime fu_timestamp;
+ gboolean fu_marker;
+
+ /* misc */
+ GPtrArray *vps;
+ GPtrArray *sps;
+ GPtrArray *pps;
+ gboolean new_codec_data;
+
+ /* downstream allocator */
+ GstAllocator *allocator;
+ GstAllocationParams params;
+};
+
+struct _GstRtpH265DepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+typedef struct
+{
+ GstElement *element;
+ GstBuffer *outbuf;
+ GQuark copy_tag;
+} CopyMetaData;
+
+typedef struct
+{
+ GstElement *element;
+ GQuark keep_tag;
+} DropMetaData;
+
+GType gst_rtp_h265_depay_get_type (void);
+
+gboolean gst_rtp_h265_add_vps_sps_pps (GstElement * rtph265, GPtrArray * vps,
+ GPtrArray * sps, GPtrArray * pps, GstBuffer * nal);
+
+G_END_DECLS
+#endif /* __GST_RTP_H265_DEPAY_H__ */
diff --git a/gst/rtp/gstrtph265pay.c b/gst/rtp/gstrtph265pay.c
new file mode 100644
index 0000000000..9c195152bc
--- /dev/null
+++ b/gst/rtp/gstrtph265pay.c
@@ -0,0 +1,1826 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim.taymans@gmail.com>
+ * Copyright (C) <2014> Jurgen Slowack <jurgenslowack@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+#include <stdlib.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/pbutils/pbutils.h>
+#include <gst/video/video.h>
+
+/* Included to not duplicate gst_rtp_h265_add_vps_sps_pps () */
+#include "gstrtph265depay.h"
+
+#include "gstrtpelements.h"
+#include "gstrtph265pay.h"
+#include "gstrtputils.h"
+#include "gstbuffermemory.h"
+
+#define AP_TYPE_ID 48
+#define FU_TYPE_ID 49
+
+GST_DEBUG_CATEGORY_STATIC (rtph265pay_debug);
+#define GST_CAT_DEFAULT (rtph265pay_debug)
+
+#define GST_TYPE_RTP_H265_AGGREGATE_MODE \
+ (gst_rtp_h265_aggregate_mode_get_type ())
+
+
+static GType
+gst_rtp_h265_aggregate_mode_get_type (void)
+{
+ static GType type = 0;
+ static const GEnumValue values[] = {
+ {GST_RTP_H265_AGGREGATE_NONE, "Do not aggregate NAL units", "none"},
+ {GST_RTP_H265_AGGREGATE_ZERO_LATENCY,
+ "Aggregate NAL units until a VCL or suffix unit is included",
+ "zero-latency"},
+ {GST_RTP_H265_AGGREGATE_MAX,
+ "Aggregate all NAL units with the same timestamp (adds one frame of"
+ " latency)", "max"},
+ {0, NULL, NULL},
+ };
+
+ if (!type) {
+ type = g_enum_register_static ("GstRtpH265AggregateMode", values);
+ }
+ return type;
+}
+
+
+
+/* references:
+ *
+ * Internet Draft RTP Payload Format for High Efficiency Video Coding
+ *
+ * draft-ietf-payload-rtp-h265-03.txt
+ *
+ * This draft will be replaced with an RFC, so some details may change.
+ *
+ */
+
+static GstStaticPadTemplate gst_rtp_h265_pay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (
+ /* only hvc1 and byte-stream formats supported for now */
+ "video/x-h265, stream-format = (string) hvc1, alignment = (string) au; "
+ /* "video/x-h265, "
+ "stream-format = (string) hev1, alignment = (string) au; " */
+ "video/x-h265, stream-format = (string) byte-stream, "
+ "alignment = (string) { nal, au }")
+ );
+
+static GstStaticPadTemplate gst_rtp_h265_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"H265\"")
+ /* optional parameters */
+ /* "profile-space = (int) [ 0, 3 ], " */
+ /* "profile-id = (int) [ 0, 31 ], " */
+ /* "tier-flag = (int) [ 0, 1 ], " */
+ /* "level-id = (int) [ 0, 255 ], " */
+ /* "interop-constraints = (string) ANY, " */
+ /* "profile-compatibility-indicator = (string) ANY, " */
+ /* "sprop-sub-layer-id = (int) [ 0, 6 ], " */
+ /* "recv-sub-layer-id = (int) [ 0, 6 ], " */
+ /* "max-recv-level-id = (int) [ 0, 255 ], " */
+ /* "tx-mode = (string) {MST , SST}, " */
+ /* "sprop-vps = (string) ANY, " */
+ /* "sprop-sps = (string) ANY, " */
+ /* "sprop-pps = (string) ANY, " */
+ /* "sprop-sei = (string) ANY, " */
+ /* "max-lsr = (int) ANY, " *//* MUST be in the range of MaxLumaSR to 16 * MaxLumaSR, inclusive */
+ /* "max-lps = (int) ANY, " *//* MUST be in the range of MaxLumaPS to 16 * MaxLumaPS, inclusive */
+ /* "max-cpb = (int) ANY, " *//* MUST be in the range of MaxCPB to 16 * MaxCPB, inclusive */
+ /* "max-dpb = (int) [1, 16], " */
+ /* "max-br = (int) ANY, " *//* MUST be in the range of MaxBR to 16 * MaxBR, inclusive, for the highest level */
+ /* "max-tr = (int) ANY, " *//* MUST be in the range of MaxTileRows to 16 * MaxTileRows, inclusive, for the highest level */
+ /* "max-tc = (int) ANY, " *//* MUST be in the range of MaxTileCols to 16 * MaxTileCols, inclusive, for the highest level */
+ /* "max-fps = (int) ANY, " */
+ /* "sprop-max-don-diff = (int) [0, 32767], " */
+ /* "sprop-depack-buf-nalus = (int) [0, 32767], " */
+ /* "sprop-depack-buf-nalus = (int) [0, 4294967295], " */
+ /* "depack-buf-cap = (int) [1, 4294967295], " */
+ /* "sprop-segmentation-id = (int) [0, 3], " */
+ /* "sprop-spatial-segmentation-idc = (string) ANY, " */
+ /* "dec-parallel-cap = (string) ANY, " */
+ );
+
+#define DEFAULT_CONFIG_INTERVAL 0
+#define DEFAULT_AGGREGATE_MODE GST_RTP_H265_AGGREGATE_NONE
+
+enum
+{
+ PROP_0,
+ PROP_CONFIG_INTERVAL,
+ PROP_AGGREGATE_MODE,
+};
+
+static void gst_rtp_h265_pay_finalize (GObject * object);
+
+static void gst_rtp_h265_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtp_h265_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static GstCaps *gst_rtp_h265_pay_getcaps (GstRTPBasePayload * payload,
+ GstPad * pad, GstCaps * filter);
+static gboolean gst_rtp_h265_pay_setcaps (GstRTPBasePayload * basepayload,
+ GstCaps * caps);
+static GstFlowReturn gst_rtp_h265_pay_handle_buffer (GstRTPBasePayload * pad,
+ GstBuffer * buffer);
+static gboolean gst_rtp_h265_pay_sink_event (GstRTPBasePayload * payload,
+ GstEvent * event);
+static GstStateChangeReturn gst_rtp_h265_pay_change_state (GstElement *
+ element, GstStateChange transition);
+static gboolean gst_rtp_h265_pay_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+
+static void gst_rtp_h265_pay_reset_bundle (GstRtpH265Pay * rtph265pay);
+
+#define gst_rtp_h265_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpH265Pay, gst_rtp_h265_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtph265pay, "rtph265pay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_H265_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_h265_pay_class_init (GstRtpH265PayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gobject_class->set_property = gst_rtp_h265_pay_set_property;
+ gobject_class->get_property = gst_rtp_h265_pay_get_property;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass),
+ PROP_CONFIG_INTERVAL,
+ g_param_spec_int ("config-interval",
+ "VPS SPS PPS Send Interval",
+ "Send VPS, SPS and PPS Insertion Interval in seconds (sprop parameter sets "
+ "will be multiplexed in the data stream when detected.) "
+ "(0 = disabled, -1 = send with every IDR frame)",
+ -1, 3600, DEFAULT_CONFIG_INTERVAL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ /**
+ * GstRtpH265Pay:aggregate-mode
+ *
+ * Bundle suitable SPS/PPS NAL units into STAP-A aggregate packets.
+ *
+ * This can potentially reduce RTP packetization overhead but not all
+ * RTP implementations handle it correctly.
+ *
+ * For best compatibility, it is recommended to set this to "none" (the
+ * default) for RTSP and for WebRTC to "zero-latency".
+ *
+ * Since: 1.18
+ */
+ g_object_class_install_property (G_OBJECT_CLASS (klass),
+ PROP_AGGREGATE_MODE,
+ g_param_spec_enum ("aggregate-mode",
+ "Attempt to use aggregate packets",
+ "Bundle suitable SPS/PPS NAL units into aggregate packets.",
+ GST_TYPE_RTP_H265_AGGREGATE_MODE,
+ DEFAULT_AGGREGATE_MODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ gobject_class->finalize = gst_rtp_h265_pay_finalize;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_h265_pay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_h265_pay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "RTP H265 payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encode H265 video into RTP packets (RFC 7798)",
+ "Jurgen Slowack <jurgenslowack@gmail.com>");
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_rtp_h265_pay_change_state);
+
+ gstrtpbasepayload_class->get_caps = gst_rtp_h265_pay_getcaps;
+ gstrtpbasepayload_class->set_caps = gst_rtp_h265_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_h265_pay_handle_buffer;
+ gstrtpbasepayload_class->sink_event = gst_rtp_h265_pay_sink_event;
+
+ GST_DEBUG_CATEGORY_INIT (rtph265pay_debug, "rtph265pay", 0,
+ "H265 RTP Payloader");
+
+ gst_type_mark_as_plugin_api (GST_TYPE_RTP_H265_AGGREGATE_MODE, 0);
+}
+
+static void
+gst_rtp_h265_pay_init (GstRtpH265Pay * rtph265pay)
+{
+ rtph265pay->queue = g_array_new (FALSE, FALSE, sizeof (guint));
+ rtph265pay->sps = g_ptr_array_new_with_free_func (
+ (GDestroyNotify) gst_buffer_unref);
+ rtph265pay->pps = g_ptr_array_new_with_free_func (
+ (GDestroyNotify) gst_buffer_unref);
+ rtph265pay->vps = g_ptr_array_new_with_free_func (
+ (GDestroyNotify) gst_buffer_unref);
+ rtph265pay->last_vps_sps_pps = -1;
+ rtph265pay->vps_sps_pps_interval = DEFAULT_CONFIG_INTERVAL;
+ rtph265pay->aggregate_mode = DEFAULT_AGGREGATE_MODE;
+
+ rtph265pay->adapter = gst_adapter_new ();
+
+ gst_pad_set_query_function (GST_RTP_BASE_PAYLOAD_SRCPAD (rtph265pay),
+ gst_rtp_h265_pay_src_query);
+}
+
+static void
+gst_rtp_h265_pay_clear_vps_sps_pps (GstRtpH265Pay * rtph265pay)
+{
+ g_ptr_array_set_size (rtph265pay->vps, 0);
+ g_ptr_array_set_size (rtph265pay->sps, 0);
+ g_ptr_array_set_size (rtph265pay->pps, 0);
+}
+
+static void
+gst_rtp_h265_pay_finalize (GObject * object)
+{
+ GstRtpH265Pay *rtph265pay;
+
+ rtph265pay = GST_RTP_H265_PAY (object);
+
+ g_array_free (rtph265pay->queue, TRUE);
+
+ g_ptr_array_free (rtph265pay->sps, TRUE);
+ g_ptr_array_free (rtph265pay->pps, TRUE);
+ g_ptr_array_free (rtph265pay->vps, TRUE);
+
+ g_object_unref (rtph265pay->adapter);
+
+ gst_rtp_h265_pay_reset_bundle (rtph265pay);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_rtp_h265_pay_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ GstRtpH265Pay *rtph265pay = GST_RTP_H265_PAY (parent);
+
+ if (GST_QUERY_TYPE (query) == GST_QUERY_LATENCY) {
+ gboolean retval;
+ gboolean live;
+ GstClockTime min_latency, max_latency;
+
+ retval = gst_pad_query_default (pad, parent, query);
+ if (!retval)
+ return retval;
+
+ if (rtph265pay->stream_format == GST_H265_STREAM_FORMAT_UNKNOWN ||
+ rtph265pay->alignment == GST_H265_ALIGNMENT_UNKNOWN)
+ return FALSE;
+
+ gst_query_parse_latency (query, &live, &min_latency, &max_latency);
+
+ if (rtph265pay->aggregate_mode == GST_RTP_H265_AGGREGATE_MAX &&
+ rtph265pay->alignment != GST_H265_ALIGNMENT_AU && rtph265pay->fps_num) {
+ GstClockTime one_frame = gst_util_uint64_scale_int (GST_SECOND,
+ rtph265pay->fps_denum, rtph265pay->fps_num);
+
+ min_latency += one_frame;
+ max_latency += one_frame;
+ gst_query_set_latency (query, live, min_latency, max_latency);
+ }
+ return TRUE;
+ }
+
+ return gst_pad_query_default (pad, parent, query);
+}
+
+
+static const gchar all_levels[][4] = {
+ "1",
+ "2",
+ "2.1",
+ "3",
+ "3.1",
+ "4",
+ "4.1",
+ "5",
+ "5.1",
+ "5.2",
+ "6",
+ "6.1",
+ "6.2"
+};
+
+static gboolean
+parse_field (GstStructure * s, const gchar * field, gulong min, gulong max,
+ guint8 * result)
+{
+ const gchar *str;
+
+ g_assert (result != NULL);
+
+ str = gst_structure_get_string (s, field);
+ if (str != NULL && *str != '\0') {
+ gulong value;
+ gchar *end;
+
+ value = strtoul (str, &end, 10);
+ if (*end == '\0' && value >= min && value <= max) {
+ *result = (guint8) value;
+ } else {
+ return FALSE;
+ }
+ } else {
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+static GstCaps *
+gst_rtp_h265_pay_getcaps (GstRTPBasePayload * payload, GstPad * pad,
+ GstCaps * filter)
+{
+ GstCaps *template_caps;
+ GstCaps *allowed_caps;
+ GstCaps *caps;
+ GstCaps *icaps;
+ guint i;
+
+ allowed_caps =
+ gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload), NULL);
+
+ if (allowed_caps == NULL)
+ return NULL;
+
+ template_caps =
+ gst_static_pad_template_get_caps (&gst_rtp_h265_pay_sink_template);
+
+ if (gst_caps_is_any (allowed_caps)) {
+ caps = gst_caps_ref (template_caps);
+ goto done;
+ }
+
+ if (gst_caps_is_empty (allowed_caps)) {
+ caps = gst_caps_ref (allowed_caps);
+ goto done;
+ }
+
+ caps = gst_caps_new_empty ();
+ for (i = 0; i < gst_caps_get_size (allowed_caps); i++) {
+ GstStructure *s = gst_caps_get_structure (allowed_caps, i);
+ GstStructure *new_s = gst_structure_new_empty ("video/x-h265");
+ guint8 ptl[12] = { 0, };
+ guint8 value;
+
+ if (parse_field (s, "profile-id", 0, 31, &value)) {
+ const gchar *profile;
+
+ ptl[0] = value;
+ profile = gst_codec_utils_h265_get_profile (ptl, sizeof (ptl));
+ if (profile != NULL) {
+ GST_DEBUG_OBJECT (payload, "profile %s", profile);
+ gst_structure_set (new_s, "profile", G_TYPE_STRING, profile, NULL);
+ } else {
+ GST_WARNING_OBJECT (payload, "invalid profile-id %d in caps", value);
+ }
+ } else {
+ GST_DEBUG_OBJECT (payload, "no valid profile-id in caps");
+ }
+
+ if (parse_field (s, "tier-flag", 0, 1, &value)) {
+ const gchar *tier;
+
+ ptl[0] |= value << 5;
+ tier = gst_codec_utils_h265_get_tier (ptl, sizeof (ptl));
+ GST_DEBUG_OBJECT (payload, "tier %s", tier);
+ gst_structure_set (new_s, "tier", G_TYPE_STRING, tier, NULL);
+ } else {
+ GST_DEBUG_OBJECT (payload, "no valid tier-flag in caps");
+ }
+
+ if (parse_field (s, "level-id", 0, 255, &value)) {
+ const gchar *level;
+
+ ptl[11] = value;
+ level = gst_codec_utils_h265_get_level (ptl, sizeof (ptl));
+ if (level != NULL) {
+ GST_DEBUG_OBJECT (payload, "level %s", level);
+ if (strcmp (level, "1") == 0) {
+ gst_structure_set (new_s, "level", G_TYPE_STRING, level, NULL);
+ } else {
+ GValue levels = { 0, };
+ GValue val = { 0, };
+ int j;
+
+ g_value_init (&levels, GST_TYPE_LIST);
+ g_value_init (&val, G_TYPE_STRING);
+
+ for (j = 0; j < G_N_ELEMENTS (all_levels); j++) {
+ g_value_set_static_string (&val, all_levels[j]);
+ gst_value_list_prepend_value (&levels, &val);
+ if (!strcmp (level, all_levels[j]))
+ break;
+ }
+ gst_structure_take_value (new_s, "level", &levels);
+ }
+ } else {
+ GST_WARNING_OBJECT (payload, "invalid level-id %d in caps", value);
+ }
+ } else {
+ GST_DEBUG_OBJECT (payload, "no valid level-id in caps");
+ }
+
+ caps = gst_caps_merge_structure (caps, new_s);
+ }
+
+ icaps = gst_caps_intersect (caps, template_caps);
+ gst_caps_unref (caps);
+ caps = icaps;
+
+done:
+
+ if (filter) {
+ GstCaps *tmp;
+
+ GST_DEBUG_OBJECT (payload, "Intersect %" GST_PTR_FORMAT " and filter %"
+ GST_PTR_FORMAT, caps, filter);
+ tmp = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ caps = tmp;
+ }
+
+ gst_caps_unref (template_caps);
+ gst_caps_unref (allowed_caps);
+
+ GST_LOG_OBJECT (payload, "returning caps %" GST_PTR_FORMAT, caps);
+ return caps;
+}
+
+/* take the currently configured VPS, SPS and PPS lists and set them on the
+ * caps */
+static gboolean
+gst_rtp_h265_pay_set_vps_sps_pps (GstRTPBasePayload * basepayload)
+{
+ GstRtpH265Pay *payloader = GST_RTP_H265_PAY (basepayload);
+ gchar *set;
+ GString *vps;
+ GString *sps;
+ GString *pps;
+ guint count;
+ gboolean res;
+ GstMapInfo map;
+ guint i;
+
+ vps = g_string_new ("");
+ sps = g_string_new ("");
+ pps = g_string_new ("");
+ count = 0;
+
+ for (i = 0; i < payloader->vps->len; i++) {
+ GstBuffer *vps_buf =
+ GST_BUFFER_CAST (g_ptr_array_index (payloader->vps, i));
+
+ gst_buffer_map (vps_buf, &map, GST_MAP_READ);
+ set = g_base64_encode (map.data, map.size);
+ gst_buffer_unmap (vps_buf, &map);
+
+ g_string_append_printf (vps, "%s%s", i ? "," : "", set);
+ g_free (set);
+ count++;
+ }
+ for (i = 0; i < payloader->sps->len; i++) {
+ GstBuffer *sps_buf =
+ GST_BUFFER_CAST (g_ptr_array_index (payloader->sps, i));
+
+ gst_buffer_map (sps_buf, &map, GST_MAP_READ);
+ set = g_base64_encode (map.data, map.size);
+ gst_buffer_unmap (sps_buf, &map);
+
+ g_string_append_printf (sps, "%s%s", i ? "," : "", set);
+ g_free (set);
+ count++;
+ }
+ for (i = 0; i < payloader->pps->len; i++) {
+ GstBuffer *pps_buf =
+ GST_BUFFER_CAST (g_ptr_array_index (payloader->pps, i));
+
+ gst_buffer_map (pps_buf, &map, GST_MAP_READ);
+ set = g_base64_encode (map.data, map.size);
+ gst_buffer_unmap (pps_buf, &map);
+
+ g_string_append_printf (pps, "%s%s", i ? "," : "", set);
+ g_free (set);
+ count++;
+ }
+
+ if (G_LIKELY (count)) {
+ /* combine into output caps */
+ res = gst_rtp_base_payload_set_outcaps (basepayload,
+ "sprop-vps", G_TYPE_STRING, vps->str,
+ "sprop-sps", G_TYPE_STRING, sps->str,
+ "sprop-pps", G_TYPE_STRING, pps->str, NULL);
+ } else {
+ res = gst_rtp_base_payload_set_outcaps (basepayload, NULL);
+ }
+ g_string_free (vps, TRUE);
+ g_string_free (sps, TRUE);
+ g_string_free (pps, TRUE);
+
+ return res;
+}
+
+
+static gboolean
+gst_rtp_h265_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
+{
+ GstRtpH265Pay *rtph265pay;
+ GstStructure *str;
+ const GValue *value;
+ GstMapInfo map;
+ guint8 *data;
+ gsize size;
+ GstBuffer *buffer;
+ const gchar *alignment, *stream_format;
+ guint8 num_arrays;
+
+ rtph265pay = GST_RTP_H265_PAY (basepayload);
+
+ str = gst_caps_get_structure (caps, 0);
+
+ /* we can only set the output caps when we found the sprops and profile
+ * NALs */
+ gst_rtp_base_payload_set_options (basepayload, "video", TRUE, "H265", 90000);
+
+ rtph265pay->alignment = GST_H265_ALIGNMENT_UNKNOWN;
+ alignment = gst_structure_get_string (str, "alignment");
+ if (alignment) {
+ if (g_str_equal (alignment, "au"))
+ rtph265pay->alignment = GST_H265_ALIGNMENT_AU;
+ if (g_str_equal (alignment, "nal"))
+ rtph265pay->alignment = GST_H265_ALIGNMENT_NAL;
+ }
+
+ rtph265pay->stream_format = GST_H265_STREAM_FORMAT_UNKNOWN;
+ stream_format = gst_structure_get_string (str, "stream-format");
+ if (stream_format) {
+ if (g_str_equal (stream_format, "hvc1"))
+ rtph265pay->stream_format = GST_H265_STREAM_FORMAT_HVC1;
+ if (g_str_equal (stream_format, "hev1"))
+ rtph265pay->stream_format = GST_H265_STREAM_FORMAT_HEV1;
+ if (g_str_equal (stream_format, "byte-stream"))
+ rtph265pay->stream_format = GST_H265_STREAM_FORMAT_BYTESTREAM;
+ }
+
+ if (!gst_structure_get_fraction (str, "framerate", &rtph265pay->fps_num,
+ &rtph265pay->fps_denum))
+ rtph265pay->fps_num = rtph265pay->fps_denum = 0;
+
+
+ /* packetized HEVC video has a codec_data */
+ if ((value = gst_structure_get_value (str, "codec_data"))) {
+ guint num_vps, num_sps, num_pps;
+ gint i, j, nal_size;
+
+ GST_DEBUG_OBJECT (rtph265pay, "have packetized h265");
+
+ buffer = gst_value_get_buffer (value);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ /* parse the hevcC data */
+ if (size < 23)
+ goto hevcc_too_small;
+ /* HEVCDecoderConfigurationVersion (must be 1) */
+ if (data[0] != 1)
+ goto wrong_version;
+
+ /* profile_space | tier_flag | profile_idc */
+ GST_DEBUG_OBJECT (rtph265pay, "profile %06x", data[1]);
+
+ /* profile_compatibility_flags */
+ for (i = 2; i < 6; i++) {
+ for (j = 7; j >= 0; j--) {
+ GST_DEBUG_OBJECT (rtph265pay, "profile_compatibility_flag %06x",
+ (data[i] >> j) & 1);
+ }
+ }
+
+ GST_DEBUG_OBJECT (rtph265pay, "progressive_source_flag %06x",
+ (data[6] >> 7) & 1);
+ GST_DEBUG_OBJECT (rtph265pay, "interlaced_source_flag %06x",
+ (data[6] >> 6) & 1);
+ GST_DEBUG_OBJECT (rtph265pay, "non_packed_constraint_flag %06x",
+ (data[6] >> 5) & 1);
+ GST_DEBUG_OBJECT (rtph265pay, "frame_only_constraint_flag %06x",
+ (data[6] >> 4) & 1);
+
+ GST_DEBUG_OBJECT (rtph265pay, "level_idc %06x", data[12]);
+
+ GST_DEBUG_OBJECT (rtph265pay, "min_spatial_segmentation_idc %06x",
+ ((data[13] ^ 0xf0) << 8) + data[14]);
+ GST_DEBUG_OBJECT (rtph265pay, "parrallelismType %06x (ignored by paloader)",
+ data[15]);
+
+ GST_DEBUG_OBJECT (rtph265pay, "sps_chroma_format_idc %06x",
+ data[16] ^ 0xfc);
+ GST_DEBUG_OBJECT (rtph265pay, "bit_depth_luma_minus8 %06x",
+ data[17] ^ 0xf8);
+ GST_DEBUG_OBJECT (rtph265pay, "bit_depth_chroma_minus8 %06x",
+ data[18] ^ 0xf8);
+ GST_DEBUG_OBJECT (rtph265pay, "avgFrameRate %06x", data[19]);
+ GST_DEBUG_OBJECT (rtph265pay, "avgFrameRate %06x", data[20]);
+
+ /* constFrameRate(2 bits): 0, stream may or may not be of constant framerate
+ * numTemporalLayers (3 bits): number of temporal layers, value from SPS
+ * TemporalIdNested (1 bit): sps_temporal_id_nesting_flag from SPS
+ * lengthSizeMinusOne (2 bits): plus 1 indicates the length of the NALUnitLength */
+ GST_DEBUG_OBJECT (rtph265pay, "constFrameRate %06x",
+ (data[21] >> 6) & 0x03);
+ GST_DEBUG_OBJECT (rtph265pay, "numTemporalLayers %06x",
+ (data[21] >> 3) & 0x07);
+ GST_DEBUG_OBJECT (rtph265pay, "temporal_id_nesting_flag %06x",
+ (data[21] >> 2) & 0x01);
+
+ rtph265pay->nal_length_size = (data[21] & 0x3) + 1;
+ GST_DEBUG_OBJECT (rtph265pay, "nal length %u", rtph265pay->nal_length_size);
+
+ num_arrays = GST_READ_UINT8 (data + 22);
+
+ data += 23;
+ size -= 23;
+
+ if (num_arrays > 0) {
+ if ((data[0] & 0x3f) == 0x20) { /* VPS */
+
+ data++;
+ num_vps = data[0] << 8 | data[1];
+ data += 2;
+ size -= 2;
+
+ for (i = 0; i < num_vps; i++) {
+
+ GstBuffer *vps_buf;
+
+ if (size < 2)
+ goto hevcc_error;
+
+ nal_size = (data[0] << 8) | data[1];
+ data += 2;
+ size -= 2;
+
+ GST_LOG_OBJECT (rtph265pay, "VPS %d size %d", i, nal_size);
+
+ if (size < nal_size)
+ goto hevcc_error;
+
+ /* make a buffer out of it and add to VPS list */
+ vps_buf = gst_buffer_new_and_alloc (nal_size);
+ gst_buffer_fill (vps_buf, 0, data, nal_size);
+ gst_rtp_h265_add_vps_sps_pps (GST_ELEMENT (rtph265pay),
+ rtph265pay->vps, rtph265pay->sps, rtph265pay->pps, vps_buf);
+ data += nal_size;
+ size -= nal_size;
+ }
+ }
+
+ --num_arrays;
+ }
+
+ if (num_arrays > 0) {
+ if ((data[0] & 0x3f) == 0x21) { /* SPS */
+
+ data++;
+ num_sps = data[0] << 8 | data[1];
+ data += 2;
+ size -= 2;
+
+ for (i = 0; i < num_sps; i++) {
+
+ GstBuffer *sps_buf;
+
+ if (size < 2)
+ goto hevcc_error;
+
+ nal_size = (data[0] << 8) | data[1];
+ data += 2;
+ size -= 2;
+
+ GST_LOG_OBJECT (rtph265pay, "SPS %d size %d", i, nal_size);
+
+ if (size < nal_size)
+ goto hevcc_error;
+
+ /* make a buffer out of it and add to SPS list */
+ sps_buf = gst_buffer_new_and_alloc (nal_size);
+ gst_buffer_fill (sps_buf, 0, data, nal_size);
+ gst_rtp_h265_add_vps_sps_pps (GST_ELEMENT (rtph265pay),
+ rtph265pay->vps, rtph265pay->sps, rtph265pay->pps, sps_buf);
+ data += nal_size;
+ size -= nal_size;
+ }
+ }
+
+ --num_arrays;
+ }
+
+ if (num_arrays > 0) {
+ if ((data[0] & 0x3f) == 0x22) { /* PPS */
+
+ data++;
+ num_pps = data[0] << 8 | data[1];
+ data += 2;
+ size -= 2;
+
+ for (i = 0; i < num_pps; i++) {
+
+ GstBuffer *pps_buf;
+
+ if (size < 2)
+ goto hevcc_error;
+
+ nal_size = (data[0] << 8) | data[1];
+ data += 2;
+ size -= 2;
+
+ GST_LOG_OBJECT (rtph265pay, "PPS %d size %d", i, nal_size);
+
+ if (size < nal_size)
+ goto hevcc_error;
+
+ /* make a buffer out of it and add to PPS list */
+ pps_buf = gst_buffer_new_and_alloc (nal_size);
+ gst_buffer_fill (pps_buf, 0, data, nal_size);
+ gst_rtp_h265_add_vps_sps_pps (GST_ELEMENT (rtph265pay),
+ rtph265pay->vps, rtph265pay->sps, rtph265pay->pps, pps_buf);
+ data += nal_size;
+ size -= nal_size;
+ }
+ }
+
+ --num_arrays;
+ }
+
+ /* and update the caps with the collected data */
+ if (!gst_rtp_h265_pay_set_vps_sps_pps (basepayload))
+ goto set_vps_sps_pps_failed;
+
+ GST_DEBUG_OBJECT (rtph265pay, "Caps have been set");
+
+ gst_buffer_unmap (buffer, &map);
+ } else {
+ GST_DEBUG_OBJECT (rtph265pay, "have bytestream h265");
+ }
+
+ return TRUE;
+
+hevcc_too_small:
+ {
+ GST_ERROR_OBJECT (rtph265pay, "hevcC size %" G_GSIZE_FORMAT " < 7", size);
+ goto error;
+ }
+wrong_version:
+ {
+ GST_ERROR_OBJECT (rtph265pay, "wrong hevcC version");
+ goto error;
+ }
+hevcc_error:
+ {
+ GST_ERROR_OBJECT (rtph265pay, "hevcC too small ");
+ goto error;
+ }
+set_vps_sps_pps_failed:
+ {
+ GST_ERROR_OBJECT (rtph265pay, "failed to set vps/sps/pps");
+ goto error;
+ }
+error:
+ {
+ gst_buffer_unmap (buffer, &map);
+ return FALSE;
+ }
+}
+
+static guint
+next_start_code (const guint8 * data, guint size)
+{
+ /* Boyer-Moore string matching algorithm, in a degenerative
+ * sense because our search 'alphabet' is binary - 0 & 1 only.
+ * This allow us to simplify the general BM algorithm to a very
+ * simple form. */
+ /* assume 1 is in the 3rd byte */
+ guint offset = 2;
+
+ while (offset < size) {
+ if (1 == data[offset]) {
+ unsigned int shift = offset;
+
+ if (0 == data[--shift]) {
+ if (0 == data[--shift]) {
+ return shift;
+ }
+ }
+ /* The jump is always 3 because of the 1 previously matched.
+ * All the 0's must be after this '1' matched at offset */
+ offset += 3;
+ } else if (0 == data[offset]) {
+ /* maybe next byte is 1? */
+ offset++;
+ } else {
+ /* can jump 3 bytes forward */
+ offset += 3;
+ }
+ /* at each iteration, we rescan in a backward manner until
+ * we match 0.0.1 in reverse order. Since our search string
+ * has only 2 'alpabets' (i.e. 0 & 1), we know that any
+ * mismatch will force us to shift a fixed number of steps */
+ }
+ GST_DEBUG ("Cannot find next NAL start code. returning %u", size);
+
+ return size;
+}
+
+static gboolean
+gst_rtp_h265_pay_decode_nal (GstRtpH265Pay * payloader,
+ const guint8 * data, guint size, GstClockTime dts, GstClockTime pts)
+{
+ guint8 type;
+ gboolean updated;
+
+ /* default is no update */
+ updated = FALSE;
+
+ GST_DEBUG_OBJECT (payloader, "NAL payload size %u", size);
+
+ type = (data[0] >> 1) & 0x3f;
+
+ /* We record the timestamp of the last SPS/PPS so
+ * that we can insert them at regular intervals and when needed. */
+ if (GST_H265_NAL_VPS == type || GST_H265_NAL_SPS == type
+ || GST_H265_NAL_PPS == type) {
+ GstBuffer *nal;
+
+ /* trailing 0x0 are not part of the VPS/SPS/PPS */
+ while (size > 0 && data[size - 1] == 0x0)
+ size--;
+
+ /* encode the entire NAL in base64 */
+ GST_DEBUG_OBJECT (payloader, "found %s (type 0x%x), size %u",
+ type == GST_H265_NAL_VPS ? "VPS" : type == GST_H265_NAL_SPS ?
+ "SPS" : "PPS", type, size);
+
+ nal = gst_buffer_new_allocate (NULL, size, NULL);
+ gst_buffer_fill (nal, 0, data, size);
+
+ updated = gst_rtp_h265_add_vps_sps_pps (GST_ELEMENT (payloader),
+ payloader->vps, payloader->sps, payloader->pps, nal);
+
+ /* remember when we last saw VPS */
+ if (pts != -1)
+ payloader->last_vps_sps_pps =
+ gst_segment_to_running_time (&GST_RTP_BASE_PAYLOAD_CAST
+ (payloader)->segment, GST_FORMAT_TIME, pts);
+ } else {
+ GST_DEBUG_OBJECT (payloader, "NALU type 0x%x, size %u", type, size);
+ }
+
+ return updated;
+}
+
+static GstFlowReturn gst_rtp_h265_pay_payload_nal (GstRTPBasePayload *
+ basepayload, GPtrArray * paybufs, GstClockTime dts, GstClockTime pts);
+static GstFlowReturn gst_rtp_h265_pay_payload_nal_single (GstRTPBasePayload *
+ basepayload, GstBuffer * paybuf, GstClockTime dts, GstClockTime pts,
+ gboolean marker);
+static GstFlowReturn gst_rtp_h265_pay_payload_nal_fragment (GstRTPBasePayload *
+ basepayload, GstBuffer * paybuf, GstClockTime dts, GstClockTime pts,
+ gboolean marker, guint mtu, guint8 nal_type, const guint8 * nal_header,
+ int size);
+static GstFlowReturn gst_rtp_h265_pay_payload_nal_bundle (GstRTPBasePayload *
+ basepayload, GstBuffer * paybuf, GstClockTime dts, GstClockTime pts,
+ gboolean marker, guint8 nal_type, const guint8 * nal_header, int size);
+
+static GstFlowReturn
+gst_rtp_h265_pay_send_vps_sps_pps (GstRTPBasePayload * basepayload,
+ GstRtpH265Pay * rtph265pay, GstClockTime dts, GstClockTime pts)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ gboolean sent_all_vps_sps_pps = TRUE;
+ guint i;
+ GPtrArray *bufs;
+
+ bufs = g_ptr_array_new ();
+
+ for (i = 0; i < rtph265pay->vps->len; i++) {
+ GstBuffer *vps_buf =
+ GST_BUFFER_CAST (g_ptr_array_index (rtph265pay->vps, i));
+
+ GST_DEBUG_OBJECT (rtph265pay, "inserting VPS in the stream");
+ g_ptr_array_add (bufs, gst_buffer_ref (vps_buf));
+ }
+ for (i = 0; i < rtph265pay->sps->len; i++) {
+ GstBuffer *sps_buf =
+ GST_BUFFER_CAST (g_ptr_array_index (rtph265pay->sps, i));
+
+ GST_DEBUG_OBJECT (rtph265pay, "inserting SPS in the stream");
+ g_ptr_array_add (bufs, gst_buffer_ref (sps_buf));
+ }
+ for (i = 0; i < rtph265pay->pps->len; i++) {
+ GstBuffer *pps_buf =
+ GST_BUFFER_CAST (g_ptr_array_index (rtph265pay->pps, i));
+
+ GST_DEBUG_OBJECT (rtph265pay, "inserting PPS in the stream");
+ g_ptr_array_add (bufs, gst_buffer_ref (pps_buf));
+ }
+
+ ret = gst_rtp_h265_pay_payload_nal (basepayload, bufs, dts, pts);
+ if (ret != GST_FLOW_OK) {
+ /* not critical but warn */
+ GST_WARNING_OBJECT (basepayload, "failed pushing VPS/SPS/PPS");
+
+ sent_all_vps_sps_pps = FALSE;
+ }
+
+ if (pts != -1 && sent_all_vps_sps_pps)
+ rtph265pay->last_vps_sps_pps =
+ gst_segment_to_running_time (&basepayload->segment, GST_FORMAT_TIME,
+ pts);
+
+ return ret;
+}
+
+static void
+gst_rtp_h265_pay_reset_bundle (GstRtpH265Pay * rtph265pay)
+{
+ g_clear_pointer (&rtph265pay->bundle, gst_buffer_list_unref);
+ rtph265pay->bundle_size = 0;
+ rtph265pay->bundle_contains_vcl_or_suffix = FALSE;
+}
+
+static GstFlowReturn
+gst_rtp_h265_pay_payload_nal (GstRTPBasePayload * basepayload,
+ GPtrArray * paybufs, GstClockTime dts, GstClockTime pts)
+{
+ GstRtpH265Pay *rtph265pay;
+ guint mtu;
+ GstFlowReturn ret;
+ gint i;
+ gboolean sent_ps;
+
+ rtph265pay = GST_RTP_H265_PAY (basepayload);
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (rtph265pay);
+
+ /* should set src caps before pushing stuff,
+ * and if we did not see enough VPS/SPS/PPS, that may not be the case */
+ if (G_UNLIKELY (!gst_pad_has_current_caps (GST_RTP_BASE_PAYLOAD_SRCPAD
+ (basepayload))))
+ gst_rtp_h265_pay_set_vps_sps_pps (basepayload);
+
+ ret = GST_FLOW_OK;
+ sent_ps = FALSE;
+ for (i = 0; i < paybufs->len; i++) {
+ guint8 nal_header[2];
+ guint8 nal_type;
+ GstBuffer *paybuf;
+ gboolean send_ps;
+ guint size;
+ gboolean marker;
+
+ paybuf = g_ptr_array_index (paybufs, i);
+
+ if (ret != GST_FLOW_OK) {
+ /* unref buffers that will not be payloaded after a flow error */
+ gst_buffer_unref (paybuf);
+ continue;
+ }
+
+ marker = GST_BUFFER_FLAG_IS_SET (paybuf, GST_BUFFER_FLAG_MARKER);
+
+ size = gst_buffer_get_size (paybuf);
+ gst_buffer_extract (paybuf, 0, nal_header, 2);
+ nal_type = (nal_header[0] >> 1) & 0x3f;
+
+ GST_DEBUG_OBJECT (rtph265pay, "payloading NAL Unit: datasize=%u type=%d"
+ " pts=%" GST_TIME_FORMAT, size, nal_type, GST_TIME_ARGS (pts));
+
+ send_ps = FALSE;
+
+ /* check if we need to emit an VPS/SPS/PPS now */
+ if ((nal_type == GST_H265_NAL_SLICE_TRAIL_N)
+ || (nal_type == GST_H265_NAL_SLICE_TRAIL_R)
+ || (nal_type == GST_H265_NAL_SLICE_TSA_N)
+ || (nal_type == GST_H265_NAL_SLICE_TSA_R)
+ || (nal_type == GST_H265_NAL_SLICE_STSA_N)
+ || (nal_type == GST_H265_NAL_SLICE_STSA_R)
+ || (nal_type == GST_H265_NAL_SLICE_RASL_N)
+ || (nal_type == GST_H265_NAL_SLICE_RASL_R)
+ || (nal_type == GST_H265_NAL_SLICE_BLA_W_LP)
+ || (nal_type == GST_H265_NAL_SLICE_BLA_W_RADL)
+ || (nal_type == GST_H265_NAL_SLICE_BLA_N_LP)
+ || (nal_type == GST_H265_NAL_SLICE_IDR_W_RADL)
+ || (nal_type == GST_H265_NAL_SLICE_IDR_N_LP)
+ || (nal_type == GST_H265_NAL_SLICE_CRA_NUT)) {
+ if (rtph265pay->vps_sps_pps_interval > 0) {
+ if (rtph265pay->last_vps_sps_pps != -1) {
+ guint64 diff;
+ GstClockTime running_time =
+ gst_segment_to_running_time (&basepayload->segment,
+ GST_FORMAT_TIME, pts);
+
+ GST_LOG_OBJECT (rtph265pay,
+ "now %" GST_TIME_FORMAT ", last VPS/SPS/PPS %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (running_time),
+ GST_TIME_ARGS (rtph265pay->last_vps_sps_pps));
+
+ /* calculate diff between last SPS/PPS in milliseconds */
+ if (running_time > rtph265pay->last_vps_sps_pps)
+ diff = running_time - rtph265pay->last_vps_sps_pps;
+ else
+ diff = 0;
+
+ GST_DEBUG_OBJECT (rtph265pay,
+ "interval since last VPS/SPS/PPS %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (diff));
+
+ /* bigger than interval, queue SPS/PPS */
+ if (GST_TIME_AS_SECONDS (diff) >= rtph265pay->vps_sps_pps_interval) {
+ GST_DEBUG_OBJECT (rtph265pay, "time to send VPS/SPS/PPS");
+ send_ps = TRUE;
+ }
+ } else {
+ /* no known previous SPS/PPS time, send now */
+ GST_DEBUG_OBJECT (rtph265pay,
+ "no previous VPS/SPS/PPS time, send now");
+ send_ps = TRUE;
+ }
+ } else if (rtph265pay->vps_sps_pps_interval == -1
+ && (nal_type == GST_H265_NAL_SLICE_IDR_W_RADL
+ || nal_type == GST_H265_NAL_SLICE_IDR_N_LP)) {
+ /* send VPS/SPS/PPS before every IDR frame */
+ send_ps = TRUE;
+ }
+ }
+
+ if (!sent_ps && (send_ps || rtph265pay->send_vps_sps_pps)) {
+ /* we need to send SPS/PPS now first. FIXME, don't use the pts for
+ * checking when we need to send SPS/PPS but convert to running_time
+ * first */
+ rtph265pay->send_vps_sps_pps = FALSE;
+ sent_ps = TRUE;
+ GST_DEBUG_OBJECT (rtph265pay, "sending VPS/SPS/PPS before current frame");
+ ret =
+ gst_rtp_h265_pay_send_vps_sps_pps (basepayload, rtph265pay, dts, pts);
+ if (ret != GST_FLOW_OK) {
+ gst_buffer_unref (paybuf);
+ continue;
+ }
+ }
+
+ if (rtph265pay->aggregate_mode != GST_RTP_H265_AGGREGATE_NONE)
+ ret = gst_rtp_h265_pay_payload_nal_bundle (basepayload, paybuf, dts, pts,
+ marker, nal_type, nal_header, size);
+ else
+ ret = gst_rtp_h265_pay_payload_nal_fragment (basepayload, paybuf, dts,
+ pts, marker, mtu, nal_type, nal_header, size);
+ }
+
+ g_ptr_array_free (paybufs, TRUE);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_rtp_h265_pay_payload_nal_single (GstRTPBasePayload * basepayload,
+ GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean marker)
+{
+ GstBufferList *outlist;
+ GstBuffer *outbuf;
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+
+ /* use buffer lists
+ * create buffer without payload containing only the RTP header
+ * (memory block at index 0) */
+ outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 0, 0, 0);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
+ /* Mark the end of a frame */
+ gst_rtp_buffer_set_marker (&rtp, marker);
+
+ /* timestamp the outbuffer */
+ GST_BUFFER_PTS (outbuf) = pts;
+ GST_BUFFER_DTS (outbuf) = dts;
+
+ /* insert payload memory block */
+ gst_rtp_copy_video_meta (basepayload, outbuf, paybuf);
+ outbuf = gst_buffer_append (outbuf, paybuf);
+
+ outlist = gst_buffer_list_new ();
+
+ /* add the buffer to the buffer list */
+ gst_buffer_list_add (outlist, outbuf);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ /* push the list to the next element in the pipe */
+ return gst_rtp_base_payload_push_list (basepayload, outlist);
+}
+
+static GstFlowReturn
+gst_rtp_h265_pay_payload_nal_fragment (GstRTPBasePayload * basepayload,
+ GstBuffer * paybuf, GstClockTime dts, GstClockTime pts, gboolean marker,
+ guint mtu, guint8 nal_type, const guint8 * nal_header, int size)
+{
+ GstRtpH265Pay *rtph265pay = (GstRtpH265Pay *) basepayload;
+ GstFlowReturn ret;
+ guint max_fragment_size, ii, pos;
+ GstBuffer *outbuf;
+ GstBufferList *outlist = NULL;
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+ guint8 *payload;
+
+ if (gst_rtp_buffer_calc_packet_len (size, 0, 0) < mtu) {
+ GST_DEBUG_OBJECT (rtph265pay,
+ "NAL Unit fit in one packet datasize=%d mtu=%d", size, mtu);
+ /* will fit in one packet */
+ return gst_rtp_h265_pay_payload_nal_single (basepayload, paybuf, dts, pts,
+ marker);
+ }
+
+ GST_DEBUG_OBJECT (basepayload,
+ "NAL Unit DOES NOT fit in one packet datasize=%d mtu=%d", size, mtu);
+
+ GST_DEBUG_OBJECT (basepayload, "Using FU fragmentation for data size=%d",
+ size - 2);
+
+ /* We keep 3 bytes for PayloadHdr and FU Header */
+ max_fragment_size = gst_rtp_buffer_calc_payload_len (mtu - 3, 0, 0);
+
+ outlist = gst_buffer_list_new ();
+
+ for (pos = 2, ii = 0; pos < size; pos += max_fragment_size, ii++) {
+ guint remaining, fragment_size;
+ gboolean first_fragment, last_fragment;
+
+ remaining = size - pos;
+ fragment_size = MIN (remaining, max_fragment_size);
+ first_fragment = (pos == 2);
+ last_fragment = (remaining <= max_fragment_size);
+
+ GST_DEBUG_OBJECT (basepayload,
+ "Inside FU fragmentation fragment_size=%u iteration=%d %s%s",
+ fragment_size, ii, first_fragment ? "first" : "",
+ last_fragment ? "last" : "");
+
+ /* use buffer lists
+ * create buffer without payload containing only the RTP header
+ * (memory block at index 0), and with space for PayloadHdr and FU header */
+ outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 3, 0, 0);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
+ GST_BUFFER_DTS (outbuf) = dts;
+ GST_BUFFER_PTS (outbuf) = pts;
+ payload = gst_rtp_buffer_get_payload (&rtp);
+
+ /* PayloadHdr (type = FU_TYPE_ID (49)) */
+ payload[0] = (nal_header[0] & 0x81) | (FU_TYPE_ID << 1);
+ payload[1] = nal_header[1];
+
+ /* If it's the last fragment and the end of this au, mark the end of
+ * slice */
+ gst_rtp_buffer_set_marker (&rtp, last_fragment && marker);
+
+ /* FU Header */
+ payload[2] = (first_fragment << 7) | (last_fragment << 6) |
+ (nal_type & 0x3f);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ /* insert payload memory block */
+ gst_rtp_copy_video_meta (rtph265pay, outbuf, paybuf);
+ gst_buffer_copy_into (outbuf, paybuf, GST_BUFFER_COPY_MEMORY, pos,
+ fragment_size);
+ /* add the buffer to the buffer list */
+ gst_buffer_list_add (outlist, outbuf);
+ }
+
+ ret = gst_rtp_base_payload_push_list (basepayload, outlist);
+ gst_buffer_unref (paybuf);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_rtp_h265_pay_send_bundle (GstRtpH265Pay * rtph265pay, gboolean marker)
+{
+ GstRTPBasePayload *basepayload;
+ GstBufferList *bundle;
+ guint length, bundle_size;
+ GstBuffer *first, *outbuf;
+ GstClockTime dts, pts;
+
+ bundle_size = rtph265pay->bundle_size;
+
+ if (bundle_size == 0) {
+ GST_DEBUG_OBJECT (rtph265pay, "no bundle, nothing to send");
+ return GST_FLOW_OK;
+ }
+
+ basepayload = GST_RTP_BASE_PAYLOAD (rtph265pay);
+ bundle = rtph265pay->bundle;
+ length = gst_buffer_list_length (bundle);
+
+ first = gst_buffer_list_get (bundle, 0);
+ dts = GST_BUFFER_DTS (first);
+ pts = GST_BUFFER_PTS (first);
+
+ if (length == 1) {
+ /* Push unaggregated NALU */
+ outbuf = gst_buffer_ref (first);
+
+ GST_DEBUG_OBJECT (rtph265pay,
+ "sending NAL Unit unaggregated: datasize=%u", bundle_size - 2);
+ } else {
+ guint8 ap_header[2];
+ guint i;
+ guint8 layer_id = 0xFF;
+ guint8 temporal_id = 0xFF;
+
+ outbuf = gst_buffer_new_allocate (NULL, sizeof ap_header, NULL);
+
+ for (i = 0; i < length; i++) {
+ GstBuffer *buf = gst_buffer_list_get (bundle, i);
+ guint8 nal_header[2];
+ GstMemory *size_header;
+ GstMapInfo map;
+ guint8 nal_layer_id;
+ guint8 nal_temporal_id;
+
+ gst_buffer_extract (buf, 0, &nal_header, sizeof nal_header);
+
+ /* Propagate F bit */
+ if ((nal_header[0] & 0x80))
+ ap_header[0] |= 0x80;
+
+ /* Select lowest layer_id & temporal_id */
+ nal_layer_id = ((nal_header[0] & 0x01) << 5) |
+ ((nal_header[1] >> 3) & 0x1F);
+ nal_temporal_id = nal_header[1] & 0x7;
+ layer_id = MIN (layer_id, nal_layer_id);
+ temporal_id = MIN (temporal_id, nal_temporal_id);
+
+ /* append NALU size */
+ size_header = gst_allocator_alloc (NULL, 2, NULL);
+ gst_memory_map (size_header, &map, GST_MAP_WRITE);
+ GST_WRITE_UINT16_BE (map.data, gst_buffer_get_size (buf));
+ gst_memory_unmap (size_header, &map);
+ gst_buffer_append_memory (outbuf, size_header);
+
+ /* append NALU data */
+ outbuf = gst_buffer_append (outbuf, gst_buffer_ref (buf));
+ }
+
+ ap_header[0] = (AP_TYPE_ID << 1) | (layer_id & 0x20);
+ ap_header[1] = ((layer_id & 0x1F) << 3) | (temporal_id & 0x07);
+
+ gst_buffer_fill (outbuf, 0, &ap_header, sizeof ap_header);
+
+ GST_DEBUG_OBJECT (rtph265pay,
+ "sending AP bundle: n=%u header=%02x%02x datasize=%u",
+ length, ap_header[0], ap_header[1], bundle_size);
+ }
+
+ gst_rtp_h265_pay_reset_bundle (rtph265pay);
+ return gst_rtp_h265_pay_payload_nal_single (basepayload, outbuf, dts, pts,
+ marker);
+}
+
+static gboolean
+gst_rtp_h265_pay_payload_nal_bundle (GstRTPBasePayload * basepayload,
+ GstBuffer * paybuf, GstClockTime dts, GstClockTime pts,
+ gboolean marker, guint8 nal_type, const guint8 * nal_header, int size)
+{
+ GstRtpH265Pay *rtph265pay;
+ GstFlowReturn ret;
+ guint pay_size, bundle_size;
+ GstBufferList *bundle;
+ gboolean start_of_au;
+ guint mtu;
+
+ rtph265pay = GST_RTP_H265_PAY (basepayload);
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (rtph265pay);
+ pay_size = 2 + gst_buffer_get_size (paybuf);
+ bundle = rtph265pay->bundle;
+ start_of_au = FALSE;
+
+ if (bundle) {
+ GstBuffer *first = gst_buffer_list_get (bundle, 0);
+
+ if (nal_type == GST_H265_NAL_AUD) {
+ GST_DEBUG_OBJECT (rtph265pay, "found access delimiter");
+ start_of_au = TRUE;
+ } else if (GST_BUFFER_IS_DISCONT (paybuf)) {
+ GST_DEBUG_OBJECT (rtph265pay, "found discont");
+ start_of_au = TRUE;
+ } else if (GST_BUFFER_PTS (first) != pts || GST_BUFFER_DTS (first) != dts) {
+ GST_DEBUG_OBJECT (rtph265pay, "found timestamp mismatch");
+ start_of_au = TRUE;
+ }
+ }
+
+ if (start_of_au) {
+ GST_DEBUG_OBJECT (rtph265pay, "sending bundle before start of AU");
+
+ ret = gst_rtp_h265_pay_send_bundle (rtph265pay, TRUE);
+ if (ret != GST_FLOW_OK)
+ goto out;
+
+ bundle = NULL;
+ }
+
+ bundle_size = 2 + pay_size;
+
+ if (gst_rtp_buffer_calc_packet_len (bundle_size, 0, 0) > mtu) {
+ GST_DEBUG_OBJECT (rtph265pay, "NAL Unit cannot fit in a bundle");
+
+ ret = gst_rtp_h265_pay_send_bundle (rtph265pay, FALSE);
+ if (ret != GST_FLOW_OK)
+ goto out;
+
+ return gst_rtp_h265_pay_payload_nal_fragment (basepayload, paybuf, dts, pts,
+ marker, mtu, nal_type, nal_header, size);
+ }
+
+ bundle_size = rtph265pay->bundle_size + pay_size;
+
+ if (gst_rtp_buffer_calc_packet_len (bundle_size, 0, 0) > mtu) {
+ GST_DEBUG_OBJECT (rtph265pay,
+ "bundle overflows, sending: bundlesize=%u datasize=2+%u mtu=%u",
+ rtph265pay->bundle_size, pay_size - 2, mtu);
+
+ ret = gst_rtp_h265_pay_send_bundle (rtph265pay, FALSE);
+ if (ret != GST_FLOW_OK)
+ goto out;
+
+ bundle = NULL;
+ }
+
+ if (!bundle) {
+ GST_DEBUG_OBJECT (rtph265pay, "creating new AP aggregate");
+ bundle = rtph265pay->bundle = gst_buffer_list_new ();
+ bundle_size = rtph265pay->bundle_size = 2;
+ rtph265pay->bundle_contains_vcl_or_suffix = FALSE;
+ }
+
+ GST_DEBUG_OBJECT (rtph265pay,
+ "bundling NAL Unit: bundlesize=%u datasize=2+%u mtu=%u",
+ rtph265pay->bundle_size, pay_size - 2, mtu);
+
+ paybuf = gst_buffer_make_writable (paybuf);
+ GST_BUFFER_PTS (paybuf) = pts;
+ GST_BUFFER_DTS (paybuf) = dts;
+
+ gst_buffer_list_add (bundle, gst_buffer_ref (paybuf));
+ rtph265pay->bundle_size += pay_size;
+ ret = GST_FLOW_OK;
+
+ /* In H.265, all VCL NAL units are < 32 */
+ if (nal_type < 32 || nal_type == GST_H265_NAL_EOS ||
+ nal_type == GST_H265_NAL_EOB || nal_type == GST_H265_NAL_SUFFIX_SEI ||
+ (nal_type >= 45 && nal_type <= 47) || (nal_type >= 56 && nal_type < 63))
+ rtph265pay->bundle_contains_vcl_or_suffix = TRUE;
+
+ if (marker) {
+ GST_DEBUG_OBJECT (rtph265pay, "sending bundle at marker");
+ ret = gst_rtp_h265_pay_send_bundle (rtph265pay, TRUE);
+ }
+
+out:
+ gst_buffer_unref (paybuf);
+ return ret;
+}
+
+static GstFlowReturn
+gst_rtp_h265_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRtpH265Pay *rtph265pay;
+ GstFlowReturn ret;
+ gsize size;
+ guint nal_len, i;
+ const guint8 *data;
+ GstClockTime dts, pts;
+ GArray *nal_queue;
+ gboolean hevc;
+ GstBuffer *paybuf = NULL;
+ gsize skip;
+ gboolean marker = FALSE;
+ gboolean discont = FALSE;
+ gboolean draining = (buffer == NULL);
+
+ rtph265pay = GST_RTP_H265_PAY (basepayload);
+
+ /* the input buffer contains one or more NAL units */
+
+ hevc = (rtph265pay->stream_format == GST_H265_STREAM_FORMAT_HEV1)
+ || (rtph265pay->stream_format == GST_H265_STREAM_FORMAT_HVC1);
+
+ if (hevc) {
+ /* In hevc mode, there is no adapter, so nothing to drain */
+ if (draining)
+ return GST_FLOW_OK;
+ } else {
+ if (buffer) {
+ if (gst_adapter_available (rtph265pay->adapter) == 0)
+ discont = GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT);
+ marker = GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_MARKER);
+ gst_adapter_push (rtph265pay->adapter, buffer);
+ buffer = NULL;
+ }
+
+ /* We want to use the first TS used to construct the following NAL */
+ dts = gst_adapter_prev_dts (rtph265pay->adapter, NULL);
+ pts = gst_adapter_prev_pts (rtph265pay->adapter, NULL);
+
+ size = gst_adapter_available (rtph265pay->adapter);
+ /* Nothing to do here if the adapter is empty, e.g. on EOS */
+ if (size == 0)
+ return GST_FLOW_OK;
+ data = gst_adapter_map (rtph265pay->adapter, size);
+ GST_DEBUG_OBJECT (basepayload, "got %" G_GSIZE_FORMAT " bytes", size);
+ }
+
+ ret = GST_FLOW_OK;
+
+ /* now loop over all NAL units and put them in a packet */
+ if (hevc) {
+ GstBufferMemoryMap memory;
+ gsize remaining_buffer_size;
+ guint nal_length_size;
+ gsize offset = 0;
+ GPtrArray *paybufs;
+
+ paybufs = g_ptr_array_new ();
+ nal_length_size = rtph265pay->nal_length_size;
+
+ gst_buffer_memory_map (buffer, &memory);
+ remaining_buffer_size = gst_buffer_get_size (buffer);
+
+ pts = GST_BUFFER_PTS (buffer);
+ dts = GST_BUFFER_DTS (buffer);
+ marker = GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_MARKER);
+ GST_DEBUG_OBJECT (basepayload, "got %" G_GSIZE_FORMAT " bytes",
+ remaining_buffer_size);
+
+ while (remaining_buffer_size > nal_length_size) {
+ gint i;
+
+ nal_len = 0;
+ for (i = 0; i < nal_length_size; i++) {
+ nal_len = (nal_len << 8) + *memory.data;
+ if (!gst_buffer_memory_advance_bytes (&memory, 1))
+ break;
+ }
+
+ offset += nal_length_size;
+ remaining_buffer_size -= nal_length_size;
+
+ if (remaining_buffer_size >= nal_len) {
+ GST_DEBUG_OBJECT (basepayload, "got NAL of size %u", nal_len);
+ } else {
+ nal_len = remaining_buffer_size;
+ GST_DEBUG_OBJECT (basepayload, "got incomplete NAL of size %u",
+ nal_len);
+ }
+
+ paybuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, offset,
+ nal_len);
+ g_ptr_array_add (paybufs, paybuf);
+
+ /* If we're at the end of the buffer, then we're at the end of the
+ * access unit
+ */
+ GST_BUFFER_FLAG_UNSET (paybuf, GST_BUFFER_FLAG_MARKER);
+ if (remaining_buffer_size - nal_len <= nal_length_size) {
+ if (rtph265pay->alignment == GST_H265_ALIGNMENT_AU || marker)
+ GST_BUFFER_FLAG_SET (paybuf, GST_BUFFER_FLAG_MARKER);
+ }
+
+ GST_BUFFER_FLAG_UNSET (paybuf, GST_BUFFER_FLAG_DISCONT);
+ if (discont) {
+ GST_BUFFER_FLAG_SET (paybuf, GST_BUFFER_FLAG_DISCONT);
+ discont = FALSE;
+ }
+
+ /* Skip current nal. If it is split over multiple GstMemory
+ * advance_bytes () will switch to the correct GstMemory. The payloader
+ * does not access those bytes directly but uses gst_buffer_copy_region ()
+ * to create a sub-buffer referencing the nal instead */
+ if (!gst_buffer_memory_advance_bytes (&memory, nal_len))
+ break;
+ offset += nal_len;
+ remaining_buffer_size -= nal_len;
+ }
+ ret = gst_rtp_h265_pay_payload_nal (basepayload, paybufs, dts, pts);
+
+ gst_buffer_memory_unmap (&memory);
+ gst_buffer_unref (buffer);
+ } else {
+ guint next;
+ gboolean update = FALSE;
+ GPtrArray *paybufs;
+
+ /* get offset of first start code */
+ next = next_start_code (data, size);
+
+ /* skip to start code, if no start code is found, next will be size and we
+ * will not collect data. */
+ data += next;
+ size -= next;
+ nal_queue = rtph265pay->queue;
+ skip = next;
+
+ /* array must be empty when we get here */
+ g_assert (nal_queue->len == 0);
+
+ GST_DEBUG_OBJECT (basepayload,
+ "found first start at %u, bytes left %" G_GSIZE_FORMAT, next, size);
+
+ paybufs = g_ptr_array_new ();
+
+ /* first pass to locate NALs and parse VPS/SPS/PPS */
+ while (size > 4) {
+ /* skip start code */
+ data += 3;
+ size -= 3;
+
+ /* use next_start_code() to scan buffer.
+ * next_start_code() returns the offset in data,
+ * starting from zero to the first byte of 0.0.0.1
+ * If no start code is found, it returns the value of the
+ * 'size' parameter.
+ * data is unchanged by the call to next_start_code()
+ */
+ next = next_start_code (data, size);
+
+ /* nal or au aligned input needs no delaying until next time */
+ if (next == size && !draining &&
+ rtph265pay->alignment == GST_H265_ALIGNMENT_UNKNOWN) {
+ /* Didn't find the start of next NAL and it's not EOS,
+ * handle it next time */
+ break;
+ }
+
+ /* nal length is distance to next start code */
+ nal_len = next;
+
+ GST_DEBUG_OBJECT (basepayload, "found next start at %u of size %u", next,
+ nal_len);
+
+ /* We know our stream is a valid H265 NAL packet,
+ * go parse it for VPS/SPS/PPS to enrich the caps */
+ /* order: make sure to check nal */
+ update = gst_rtp_h265_pay_decode_nal (rtph265pay, data, nal_len, dts, pts)
+ || update;
+
+ /* move to next NAL packet */
+ data += nal_len;
+ size -= nal_len;
+
+ g_array_append_val (nal_queue, nal_len);
+ }
+
+ /* if has new VPS, SPS & PPS, update the output caps */
+ if (G_UNLIKELY (update))
+ if (!gst_rtp_h265_pay_set_vps_sps_pps (basepayload))
+ goto caps_rejected;
+
+ /* second pass to payload and push */
+
+ if (nal_queue->len != 0)
+ gst_adapter_flush (rtph265pay->adapter, skip);
+
+ for (i = 0; i < nal_queue->len; i++) {
+ guint size;
+
+ nal_len = g_array_index (nal_queue, guint, i);
+ /* skip start code */
+ gst_adapter_flush (rtph265pay->adapter, 3);
+
+ /* Trim the end unless we're the last NAL in the stream.
+ * In case we're not at the end of the buffer we know the next block
+ * starts with 0x000001 so all the 0x00 bytes at the end of this one are
+ * trailing 0x0 that can be discarded */
+ size = nal_len;
+ data = gst_adapter_map (rtph265pay->adapter, size);
+ if (i + 1 != nal_queue->len || !draining)
+ for (; size > 2 && data[size - 1] == 0x0; size--)
+ /* skip */ ;
+
+ paybuf = gst_adapter_take_buffer (rtph265pay->adapter, size);
+ g_assert (paybuf);
+ g_ptr_array_add (paybufs, paybuf);
+
+ /* If it's the last nal unit we have in non-bytestream mode, we can
+ * assume it's the end of an access-unit */
+ GST_BUFFER_FLAG_UNSET (paybuf, GST_BUFFER_FLAG_MARKER);
+ if (i == nal_queue->len - 1) {
+ if (rtph265pay->alignment == GST_H265_ALIGNMENT_AU ||
+ marker || draining)
+ GST_BUFFER_FLAG_SET (paybuf, GST_BUFFER_FLAG_MARKER);
+ }
+
+ GST_BUFFER_FLAG_UNSET (paybuf, GST_BUFFER_FLAG_DISCONT);
+ if (discont) {
+ GST_BUFFER_FLAG_SET (paybuf, GST_BUFFER_FLAG_DISCONT);
+ discont = FALSE;
+ }
+
+ /* move to next NAL packet */
+ /* Skips the trailing zeros */
+ gst_adapter_flush (rtph265pay->adapter, nal_len - size);
+ }
+ /* put the data in one or more RTP packets */
+ ret = gst_rtp_h265_pay_payload_nal (basepayload, paybufs, dts, pts);
+ g_array_set_size (nal_queue, 0);
+ }
+
+ if (ret == GST_FLOW_OK && rtph265pay->bundle_size > 0 &&
+ rtph265pay->aggregate_mode == GST_RTP_H265_AGGREGATE_ZERO_LATENCY &&
+ rtph265pay->bundle_contains_vcl_or_suffix) {
+ GST_DEBUG_OBJECT (rtph265pay, "sending bundle at end incoming packet");
+ ret = gst_rtp_h265_pay_send_bundle (rtph265pay, FALSE);
+ }
+
+done:
+ if (!hevc) {
+ gst_adapter_unmap (rtph265pay->adapter);
+ }
+
+ return ret;
+
+caps_rejected:
+ {
+ GST_WARNING_OBJECT (basepayload, "Could not set outcaps");
+ g_array_set_size (nal_queue, 0);
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ goto done;
+ }
+}
+
+static gboolean
+gst_rtp_h265_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
+{
+ gboolean res;
+ const GstStructure *s;
+ GstRtpH265Pay *rtph265pay = GST_RTP_H265_PAY (payload);
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ gst_adapter_clear (rtph265pay->adapter);
+ gst_rtp_h265_pay_reset_bundle (rtph265pay);
+ break;
+ case GST_EVENT_CUSTOM_DOWNSTREAM:
+ s = gst_event_get_structure (event);
+ if (gst_structure_has_name (s, "GstForceKeyUnit")) {
+ gboolean resend_codec_data;
+
+ if (gst_structure_get_boolean (s, "all-headers",
+ &resend_codec_data) && resend_codec_data)
+ rtph265pay->send_vps_sps_pps = TRUE;
+ }
+ break;
+ case GST_EVENT_EOS:
+ {
+ /* call handle_buffer with NULL to flush last NAL from adapter
+ * in byte-stream mode
+ */
+ gst_rtp_h265_pay_handle_buffer (payload, NULL);
+ ret = gst_rtp_h265_pay_send_bundle (rtph265pay, TRUE);
+
+ break;
+ }
+ case GST_EVENT_STREAM_START:
+ GST_DEBUG_OBJECT (rtph265pay,
+ "New stream detected => Clear VPS, SPS and PPS");
+ gst_rtp_h265_pay_clear_vps_sps_pps (rtph265pay);
+ break;
+ default:
+ break;
+ }
+
+ if (ret != GST_FLOW_OK)
+ return FALSE;
+
+ res = GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload, event);
+
+ return res;
+}
+
+static GstStateChangeReturn
+gst_rtp_h265_pay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstRtpH265Pay *rtph265pay = GST_RTP_H265_PAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ rtph265pay->send_vps_sps_pps = FALSE;
+ gst_adapter_clear (rtph265pay->adapter);
+ gst_rtp_h265_pay_reset_bundle (rtph265pay);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ rtph265pay->last_vps_sps_pps = -1;
+ gst_rtp_h265_pay_clear_vps_sps_pps (rtph265pay);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static void
+gst_rtp_h265_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpH265Pay *rtph265pay;
+
+ rtph265pay = GST_RTP_H265_PAY (object);
+
+ switch (prop_id) {
+ case PROP_CONFIG_INTERVAL:
+ rtph265pay->vps_sps_pps_interval = g_value_get_int (value);
+ break;
+ case PROP_AGGREGATE_MODE:
+ rtph265pay->aggregate_mode = g_value_get_enum (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_h265_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpH265Pay *rtph265pay;
+
+ rtph265pay = GST_RTP_H265_PAY (object);
+
+ switch (prop_id) {
+ case PROP_CONFIG_INTERVAL:
+ g_value_set_int (value, rtph265pay->vps_sps_pps_interval);
+ break;
+ case PROP_AGGREGATE_MODE:
+ g_value_set_enum (value, rtph265pay->aggregate_mode);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/rtp/gstrtph265pay.h b/gst/rtp/gstrtph265pay.h
new file mode 100644
index 0000000000..af80c72bef
--- /dev/null
+++ b/gst/rtp/gstrtph265pay.h
@@ -0,0 +1,91 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim.taymans@gmail.com>
+ * Copyright (C) <2014> Jurgen Slowack <jurgenslowack@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_H265_PAY_H__
+#define __GST_RTP_H265_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+#include "gstrtph265types.h"
+
+G_BEGIN_DECLS
+#define GST_TYPE_RTP_H265_PAY \
+ (gst_rtp_h265_pay_get_type())
+#define GST_RTP_H265_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_H265_PAY,GstRtpH265Pay))
+#define GST_RTP_H265_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_H265_PAY,GstRtpH265PayClass))
+#define GST_IS_RTP_H265_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_H265_PAY))
+#define GST_IS_RTP_H265_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_H265_PAY))
+typedef struct _GstRtpH265Pay GstRtpH265Pay;
+typedef struct _GstRtpH265PayClass GstRtpH265PayClass;
+
+typedef enum
+{
+ GST_H265_ALIGNMENT_UNKNOWN,
+ GST_H265_ALIGNMENT_NAL,
+ GST_H265_ALIGNMENT_AU
+} GstH265Alignment;
+
+typedef enum
+{
+ GST_RTP_H265_AGGREGATE_NONE,
+ GST_RTP_H265_AGGREGATE_ZERO_LATENCY,
+ GST_RTP_H265_AGGREGATE_MAX,
+} GstRTPH265AggregateMode;
+
+struct _GstRtpH265Pay
+{
+ GstRTPBasePayload payload;
+
+ GPtrArray *sps, *pps, *vps;
+
+ GstH265StreamFormat stream_format;
+ GstH265Alignment alignment;
+ gint fps_num;
+ gint fps_denum;
+ guint nal_length_size;
+ GArray *queue;
+
+ GstAdapter *adapter;
+
+ gint vps_sps_pps_interval;
+ gboolean send_vps_sps_pps;
+ GstClockTime last_vps_sps_pps;
+
+ /* aggregate buffers with AP */
+ GstBufferList *bundle;
+ guint bundle_size;
+ gboolean bundle_contains_vcl_or_suffix;
+ GstRTPH265AggregateMode aggregate_mode;
+};
+
+struct _GstRtpH265PayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_h265_pay_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_RTP_H265_PAY_H__ */
diff --git a/gst/rtp/gstrtph265types.h b/gst/rtp/gstrtph265types.h
new file mode 100644
index 0000000000..b2692e9942
--- /dev/null
+++ b/gst/rtp/gstrtph265types.h
@@ -0,0 +1,76 @@
+/* GStreamer H.265 parser types
+ * Copyright (C) 2013 Intel Corporation
+ * Copyright (C) 2013 Sreerenj Balachandran <sreerenj.balachandran@intel.com>
+ *
+ * Contact: Sreerenj Balachandran <sreerenj.balachandran@intel.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_H265_TYPES_H__
+#define __GST_RTP_H265_TYPES_H__
+
+#include <glib.h>
+
+G_BEGIN_DECLS
+
+typedef enum
+{
+ GST_H265_NAL_SLICE_TRAIL_N = 0,
+ GST_H265_NAL_SLICE_TRAIL_R = 1,
+ GST_H265_NAL_SLICE_TSA_N = 2,
+ GST_H265_NAL_SLICE_TSA_R = 3,
+ GST_H265_NAL_SLICE_STSA_N = 4,
+ GST_H265_NAL_SLICE_STSA_R = 5,
+ GST_H265_NAL_SLICE_RADL_N = 6,
+ GST_H265_NAL_SLICE_RADL_R = 7,
+ GST_H265_NAL_SLICE_RASL_N = 8,
+ GST_H265_NAL_SLICE_RASL_R = 9,
+ GST_H265_NAL_SLICE_BLA_W_LP = 16,
+ GST_H265_NAL_SLICE_BLA_W_RADL = 17,
+ GST_H265_NAL_SLICE_BLA_N_LP = 18,
+ GST_H265_NAL_SLICE_IDR_W_RADL = 19,
+ GST_H265_NAL_SLICE_IDR_N_LP = 20,
+ GST_H265_NAL_SLICE_CRA_NUT = 21,
+ GST_H265_NAL_VPS = 32,
+ GST_H265_NAL_SPS = 33,
+ GST_H265_NAL_PPS = 34,
+ GST_H265_NAL_AUD = 35,
+ GST_H265_NAL_EOS = 36,
+ GST_H265_NAL_EOB = 37,
+ GST_H265_NAL_FD = 38,
+ GST_H265_NAL_PREFIX_SEI = 39,
+ GST_H265_NAL_SUFFIX_SEI = 40
+} GstH265NalUnitType;
+
+#define RESERVED_NON_IRAP_SUBLAYER_NAL_TYPE_MIN 10
+#define RESERVED_NON_IRAP_SUBLAYER_NAL_TYPE_MAX 15
+
+#define RESERVED_IRAP_NAL_TYPE_MIN 22
+#define RESERVED_IRAP_NAL_TYPE_MAX 23
+
+#define RESERVED_NON_IRAP_NAL_TYPE_MIN 24
+#define RESERVED_NON_IRAP_NAL_TYPE_MAX 31
+
+#define RESERVED_NON_VCL_NAL_TYPE_MIN 41
+#define RESERVED_NON_VCL_NAL_TYPE_MAX 47
+
+#define UNSPECIFIED_NON_VCL_NAL_TYPE_MIN 48
+#define UNSPECIFIED_NON_VCL_NAL_TYPE_MAX 63
+
+G_END_DECLS
+
+#endif
diff --git a/gst/rtp/gstrtphdrext-colorspace.c b/gst/rtp/gstrtphdrext-colorspace.c
new file mode 100644
index 0000000000..eded3bed60
--- /dev/null
+++ b/gst/rtp/gstrtphdrext-colorspace.c
@@ -0,0 +1,465 @@
+/* GStreamer
+ * Copyright (C) 2020-2021 Collabora Ltd.
+ * @author: Jakub Adam <jakub.adam@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/**
+ * SECTION:rtphdrextcolorspace
+ * @title: GstRtphdrext-Colorspace
+ * @short_description: Helper methods for dealing with Color Space RTP header
+ * extension as defined in http://www.webrtc.org/experiments/rtp-hdrext/color-space
+ * @see_also: #GstRTPHeaderExtension, #GstRTPBasePayload, #GstRTPBaseDepayload
+ *
+ * Since: 1.20
+ */
+
+#include "gstrtphdrext-colorspace.h"
+
+#include "gstrtpelements.h"
+
+#include <gst/base/gstbytereader.h>
+#include <gst/video/video-color.h>
+#include <gst/video/video-hdr.h>
+
+GST_DEBUG_CATEGORY_STATIC (rtphdrext_colorspace_debug);
+#define GST_CAT_DEFAULT (rtphdrext_colorspace_debug)
+
+/**
+ * GstRTPHeaderExtensionColorspace:
+ * @parent: the parent #GstRTPHeaderExtension
+ *
+ * Instance struct for Color Space RTP header extension.
+ *
+ * http://www.webrtc.org/experiments/rtp-hdrext/color-space
+ */
+struct _GstRTPHeaderExtensionColorspace
+{
+ GstRTPHeaderExtension parent;
+
+ GstVideoColorimetry colorimetry;
+ GstVideoChromaSite chroma_site;
+ GstVideoMasteringDisplayInfo mdi;
+ GstVideoContentLightLevel cll;
+ gboolean has_hdr_meta;
+};
+
+G_DEFINE_TYPE_WITH_CODE (GstRTPHeaderExtensionColorspace,
+ gst_rtp_header_extension_colorspace, GST_TYPE_RTP_HEADER_EXTENSION,
+ GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, "rtphdrextcolorspace", 0,
+ "RTP Color Space Header Extension");
+ );
+GST_ELEMENT_REGISTER_DEFINE (rtphdrextcolorspace, "rtphdrextcolorspace",
+ GST_RANK_MARGINAL, GST_TYPE_RTP_HEADER_EXTENSION_COLORSPACE);
+
+static void
+gst_rtp_header_extension_colorspace_init (GstRTPHeaderExtensionColorspace *
+ self)
+{
+}
+
+static GstRTPHeaderExtensionFlags
+gst_rtp_header_extension_colorspace_get_supported_flags (GstRTPHeaderExtension *
+ ext)
+{
+ GstRTPHeaderExtensionColorspace *self =
+ GST_RTP_HEADER_EXTENSION_COLORSPACE (ext);
+
+ return self->has_hdr_meta ?
+ GST_RTP_HEADER_EXTENSION_TWO_BYTE : GST_RTP_HEADER_EXTENSION_ONE_BYTE;
+}
+
+static gsize
+gst_rtp_header_extension_colorspace_get_max_size (GstRTPHeaderExtension * ext,
+ const GstBuffer * buffer)
+{
+ GstRTPHeaderExtensionColorspace *self =
+ GST_RTP_HEADER_EXTENSION_COLORSPACE (ext);
+
+ return self->has_hdr_meta ?
+ GST_RTP_HDREXT_COLORSPACE_WITH_HDR_META_SIZE :
+ GST_RTP_HDREXT_COLORSPACE_SIZE;
+}
+
+static gssize
+gst_rtp_header_extension_colorspace_write (GstRTPHeaderExtension * ext,
+ const GstBuffer * input_meta, GstRTPHeaderExtensionFlags write_flags,
+ GstBuffer * output, guint8 * data, gsize size)
+{
+ GstRTPHeaderExtensionColorspace *self =
+ GST_RTP_HEADER_EXTENSION_COLORSPACE (ext);
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+ gboolean is_frame_last_buffer;
+ guint8 *ptr = data;
+ guint8 horizontal_site;
+ guint8 vertical_site;
+
+ g_return_val_if_fail (size >=
+ gst_rtp_header_extension_colorspace_get_max_size (ext, NULL), -1);
+ g_return_val_if_fail (write_flags &
+ gst_rtp_header_extension_colorspace_get_supported_flags (ext), -1);
+
+ if (self->colorimetry.matrix == GST_VIDEO_COLOR_MATRIX_UNKNOWN &&
+ self->colorimetry.primaries == GST_VIDEO_COLOR_PRIMARIES_UNKNOWN &&
+ self->colorimetry.range == GST_VIDEO_COLOR_RANGE_UNKNOWN &&
+ self->colorimetry.transfer == GST_VIDEO_TRANSFER_UNKNOWN) {
+ /* Nothing to write. */
+ return 0;
+ }
+
+ gst_rtp_buffer_map (output, GST_MAP_READ, &rtp);
+ is_frame_last_buffer = gst_rtp_buffer_get_marker (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
+
+ if (!is_frame_last_buffer) {
+ /* Only a video frame's final packet should carry color space info. */
+ return 0;
+ }
+
+ *ptr++ = gst_video_color_primaries_to_iso (self->colorimetry.primaries);
+ *ptr++ = gst_video_transfer_function_to_iso (self->colorimetry.transfer);
+ *ptr++ = gst_video_color_matrix_to_iso (self->colorimetry.matrix);
+
+ if (self->chroma_site & GST_VIDEO_CHROMA_SITE_H_COSITED) {
+ horizontal_site = 1;
+ } else if (self->chroma_site & GST_VIDEO_CHROMA_SITE_NONE) {
+ horizontal_site = 2;
+ } else {
+ horizontal_site = 0;
+ }
+
+ if (self->chroma_site & GST_VIDEO_CHROMA_SITE_V_COSITED) {
+ vertical_site = 1;
+ } else if (self->chroma_site & GST_VIDEO_CHROMA_SITE_NONE) {
+ vertical_site = 2;
+ } else {
+ vertical_site = 0;
+ }
+
+ *ptr++ =
+ (self->colorimetry.range << 4) + (horizontal_site << 2) + vertical_site;
+
+ if (self->has_hdr_meta) {
+ guint i;
+
+ GST_WRITE_UINT16_BE (ptr,
+ self->mdi.max_display_mastering_luminance / 10000);
+ ptr += 2;
+ GST_WRITE_UINT16_BE (ptr, self->mdi.min_display_mastering_luminance);
+ ptr += 2;
+
+ for (i = 0; i < 3; ++i) {
+ GST_WRITE_UINT16_BE (ptr, self->mdi.display_primaries[i].x);
+ ptr += 2;
+ GST_WRITE_UINT16_BE (ptr, self->mdi.display_primaries[i].y);
+ ptr += 2;
+ }
+
+ GST_WRITE_UINT16_BE (ptr, self->mdi.white_point.x);
+ ptr += 2;
+ GST_WRITE_UINT16_BE (ptr, self->mdi.white_point.y);
+ ptr += 2;
+
+ GST_WRITE_UINT16_BE (ptr, self->cll.max_content_light_level);
+ ptr += 2;
+ GST_WRITE_UINT16_BE (ptr, self->cll.max_frame_average_light_level);
+ ptr += 2;
+ }
+
+ return ptr - data;
+}
+
+static gboolean
+parse_colorspace (GstByteReader * reader, GstVideoColorimetry * colorimetry,
+ GstVideoChromaSite * chroma_site)
+{
+ guint8 val;
+
+ g_return_val_if_fail (reader != NULL, FALSE);
+ g_return_val_if_fail (colorimetry != NULL, FALSE);
+ g_return_val_if_fail (chroma_site != NULL, FALSE);
+
+ if (gst_byte_reader_get_remaining (reader) < GST_RTP_HDREXT_COLORSPACE_SIZE) {
+ return FALSE;
+ }
+
+ if (!gst_byte_reader_get_uint8 (reader, &val)) {
+ return FALSE;
+ }
+ colorimetry->primaries = gst_video_color_primaries_from_iso (val);
+
+ if (!gst_byte_reader_get_uint8 (reader, &val)) {
+ return FALSE;
+ }
+ colorimetry->transfer = gst_video_transfer_function_from_iso (val);
+
+ if (!gst_byte_reader_get_uint8 (reader, &val)) {
+ return FALSE;
+ }
+ colorimetry->matrix = gst_video_color_matrix_from_iso (val);
+
+ *chroma_site = GST_VIDEO_CHROMA_SITE_UNKNOWN;
+
+ if (!gst_byte_reader_get_uint8 (reader, &val)) {
+ return FALSE;
+ }
+ switch ((val >> 2) & 0x03) {
+ case 1:
+ *chroma_site |= GST_VIDEO_CHROMA_SITE_H_COSITED;
+ break;
+ case 2:
+ *chroma_site |= GST_VIDEO_CHROMA_SITE_NONE;
+ break;
+ }
+
+ switch (val & 0x03) {
+ case 1:
+ *chroma_site |= GST_VIDEO_CHROMA_SITE_V_COSITED;
+ break;
+ case 2:
+ *chroma_site |= GST_VIDEO_CHROMA_SITE_NONE;
+ break;
+ }
+
+ colorimetry->range = val >> 4;
+
+ return TRUE;
+}
+
+static gboolean
+parse_colorspace_with_hdr_meta (GstByteReader * reader,
+ GstVideoColorimetry * colorimetry,
+ GstVideoChromaSite * chroma_site,
+ GstVideoMasteringDisplayInfo * mastering_display_info,
+ GstVideoContentLightLevel * content_light_level)
+{
+ guint i;
+ guint16 val16;
+
+ g_return_val_if_fail (reader != NULL, FALSE);
+ g_return_val_if_fail (mastering_display_info != NULL, FALSE);
+ g_return_val_if_fail (content_light_level != NULL, FALSE);
+
+ if (gst_byte_reader_get_remaining (reader) <
+ GST_RTP_HDREXT_COLORSPACE_WITH_HDR_META_SIZE) {
+ return FALSE;
+ }
+
+ if (!parse_colorspace (reader, colorimetry, chroma_site)) {
+ return FALSE;
+ }
+
+ if (!gst_byte_reader_get_uint16_be (reader, &val16)) {
+ return FALSE;
+ }
+ mastering_display_info->max_display_mastering_luminance = val16 * 10000;
+
+ if (!gst_byte_reader_get_uint16_be (reader, &val16)) {
+ return FALSE;
+ }
+ mastering_display_info->min_display_mastering_luminance = val16;
+
+ for (i = 0; i < 3; ++i) {
+ if (!gst_byte_reader_get_uint16_be (reader,
+ &mastering_display_info->display_primaries[i].x)) {
+ return FALSE;
+ }
+
+ if (!gst_byte_reader_get_uint16_be (reader,
+ &mastering_display_info->display_primaries[i].y)) {
+ return FALSE;
+ }
+ }
+
+ if (!gst_byte_reader_get_uint16_be (reader,
+ &mastering_display_info->white_point.x)) {
+ return FALSE;
+ }
+ if (!gst_byte_reader_get_uint16_be (reader,
+ &mastering_display_info->white_point.y)) {
+ return FALSE;
+ }
+
+ if (!gst_byte_reader_get_uint16_be (reader,
+ &content_light_level->max_content_light_level)) {
+ return FALSE;
+ }
+ if (!gst_byte_reader_get_uint16_be (reader,
+ &content_light_level->max_frame_average_light_level)) {
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_rtp_header_extension_colorspace_read (GstRTPHeaderExtension * ext,
+ GstRTPHeaderExtensionFlags read_flags, const guint8 * data, gsize size,
+ GstBuffer * buffer)
+{
+ GstRTPHeaderExtensionColorspace *self =
+ GST_RTP_HEADER_EXTENSION_COLORSPACE (ext);
+ gboolean has_hdr_meta;
+ GstByteReader *reader;
+ GstVideoColorimetry colorimetry;
+ GstVideoChromaSite chroma_site;
+ GstVideoMasteringDisplayInfo mdi;
+ GstVideoContentLightLevel cll;
+ gboolean caps_update_needed;
+ gboolean result;
+
+ if (size != GST_RTP_HDREXT_COLORSPACE_SIZE &&
+ size != GST_RTP_HDREXT_COLORSPACE_WITH_HDR_META_SIZE) {
+ GST_WARNING_OBJECT (ext, "Invalid Color Space header extension size %"
+ G_GSIZE_FORMAT, size);
+ return FALSE;
+ }
+
+ has_hdr_meta = size == GST_RTP_HDREXT_COLORSPACE_WITH_HDR_META_SIZE;
+
+ reader = gst_byte_reader_new (data, size);
+
+ if (has_hdr_meta) {
+ result = parse_colorspace_with_hdr_meta (reader, &colorimetry, &chroma_site,
+ &mdi, &cll);
+ } else {
+ result = parse_colorspace (reader, &colorimetry, &chroma_site);
+ }
+
+ g_clear_pointer (&reader, gst_byte_reader_free);
+
+ if (!gst_video_colorimetry_is_equal (&self->colorimetry, &colorimetry)) {
+ caps_update_needed = TRUE;
+ self->colorimetry = colorimetry;
+ }
+
+ if (self->chroma_site != chroma_site) {
+ caps_update_needed = TRUE;
+ self->chroma_site = chroma_site;
+ }
+
+ if (self->has_hdr_meta != has_hdr_meta) {
+ caps_update_needed = TRUE;
+ self->has_hdr_meta = has_hdr_meta;
+ }
+
+ if (has_hdr_meta) {
+ if (!gst_video_mastering_display_info_is_equal (&self->mdi, &mdi)) {
+ caps_update_needed = TRUE;
+ self->mdi = mdi;
+ }
+ if (!gst_video_content_light_level_is_equal (&self->cll, &cll)) {
+ caps_update_needed = TRUE;
+ self->cll = cll;
+ }
+ }
+
+ if (caps_update_needed) {
+ gst_rtp_header_extension_set_wants_update_non_rtp_src_caps (ext, TRUE);
+ }
+
+ return result;
+}
+
+static gboolean
+ gst_rtp_header_extension_colorspace_set_non_rtp_sink_caps
+ (GstRTPHeaderExtension * ext, const GstCaps * caps)
+{
+ GstRTPHeaderExtensionColorspace *self =
+ GST_RTP_HEADER_EXTENSION_COLORSPACE (ext);
+ GstStructure *s;
+ const gchar *colorimetry;
+ const gchar *chroma_site;
+
+ s = gst_caps_get_structure (caps, 0);
+
+ colorimetry = gst_structure_get_string (s, "colorimetry");
+ if (colorimetry) {
+ gst_video_colorimetry_from_string (&self->colorimetry, colorimetry);
+
+ self->has_hdr_meta =
+ gst_video_mastering_display_info_from_caps (&self->mdi, caps);
+
+ gst_video_content_light_level_from_caps (&self->cll, caps);
+ }
+
+ chroma_site = gst_structure_get_string (s, "chroma-site");
+ if (chroma_site) {
+ self->chroma_site = gst_video_chroma_from_string (chroma_site);
+ }
+
+ return TRUE;
+}
+
+static gboolean
+ gst_rtp_header_extension_colorspace_update_non_rtp_src_caps
+ (GstRTPHeaderExtension * ext, GstCaps * caps)
+{
+ GstRTPHeaderExtensionColorspace *self =
+ GST_RTP_HEADER_EXTENSION_COLORSPACE (ext);
+
+ gchar *color_str;
+
+ gst_structure_remove_fields (gst_caps_get_structure (caps, 0),
+ "mastering-display-info", "content-light-level", NULL);
+
+ if ((color_str = gst_video_colorimetry_to_string (&self->colorimetry))) {
+ gst_caps_set_simple (caps, "colorimetry", G_TYPE_STRING, color_str, NULL);
+ g_free (color_str);
+ }
+ if (self->chroma_site != GST_VIDEO_CHROMA_SITE_UNKNOWN) {
+ gst_caps_set_simple (caps, "chroma-site", G_TYPE_STRING,
+ gst_video_chroma_to_string (self->chroma_site), NULL);
+ }
+ if (self->has_hdr_meta) {
+ gst_video_mastering_display_info_add_to_caps (&self->mdi, caps);
+ gst_video_content_light_level_add_to_caps (&self->cll, caps);
+ }
+
+ return TRUE;
+}
+
+static void
+ gst_rtp_header_extension_colorspace_class_init
+ (GstRTPHeaderExtensionColorspaceClass * klass)
+{
+ GstRTPHeaderExtensionClass *rtp_hdr_class =
+ GST_RTP_HEADER_EXTENSION_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ rtp_hdr_class->get_supported_flags =
+ gst_rtp_header_extension_colorspace_get_supported_flags;
+ rtp_hdr_class->get_max_size =
+ gst_rtp_header_extension_colorspace_get_max_size;
+ rtp_hdr_class->write = gst_rtp_header_extension_colorspace_write;
+ rtp_hdr_class->read = gst_rtp_header_extension_colorspace_read;
+ rtp_hdr_class->set_non_rtp_sink_caps =
+ gst_rtp_header_extension_colorspace_set_non_rtp_sink_caps;
+ rtp_hdr_class->update_non_rtp_src_caps =
+ gst_rtp_header_extension_colorspace_update_non_rtp_src_caps;
+ rtp_hdr_class->set_attributes_from_caps =
+ gst_rtp_header_extension_set_attributes_from_caps_simple_sdp;
+ rtp_hdr_class->set_caps_from_attributes =
+ gst_rtp_header_extension_set_caps_from_attributes_simple_sdp;
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Color Space", GST_RTP_HDREXT_ELEMENT_CLASS,
+ "Extends RTP packets with color space and high dynamic range (HDR) information.",
+ "Jakub Adam <jakub.adam@collabora.com>");
+ gst_rtp_header_extension_class_set_uri (rtp_hdr_class,
+ GST_RTP_HDREXT_COLORSPACE_URI);
+}
diff --git a/gst/rtp/gstrtphdrext-colorspace.h b/gst/rtp/gstrtphdrext-colorspace.h
new file mode 100644
index 0000000000..c451cc63f9
--- /dev/null
+++ b/gst/rtp/gstrtphdrext-colorspace.h
@@ -0,0 +1,41 @@
+/* GStreamer
+ * Copyright (C) 2020-2021 Collabora Ltd.
+ * @author: Jakub Adam <jakub.adam@collabora.com>
+ *
+ * gstrtphdrext-colorspace.h: Color Space RTP header extension
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_RTPHDREXT_COLORSPACE_H__
+#define __GST_RTPHDREXT_COLORSPACE_H__
+
+#include <gst/rtp/gstrtphdrext.h>
+
+G_BEGIN_DECLS
+
+#define GST_RTP_HDREXT_COLORSPACE_SIZE 4
+#define GST_RTP_HDREXT_COLORSPACE_WITH_HDR_META_SIZE 28
+#define GST_RTP_HDREXT_COLORSPACE_URI "http://www.webrtc.org/experiments/rtp-hdrext/color-space"
+
+#define GST_TYPE_RTP_HEADER_EXTENSION_COLORSPACE (gst_rtp_header_extension_colorspace_get_type())
+
+G_DECLARE_FINAL_TYPE (GstRTPHeaderExtensionColorspace, gst_rtp_header_extension_colorspace,
+ GST, RTP_HEADER_EXTENSION_COLORSPACE, GstRTPHeaderExtension)
+
+G_END_DECLS
+
+#endif /* __GST_RTPHDREXT_COLORSPACE_H__ */
diff --git a/gst/rtp/gstrtpilbcdepay.c b/gst/rtp/gstrtpilbcdepay.c
new file mode 100644
index 0000000000..043e065d7d
--- /dev/null
+++ b/gst/rtp/gstrtpilbcdepay.c
@@ -0,0 +1,235 @@
+/* GStreamer
+ * Copyright (C) <2006> Philippe Khalaf <burger@speedy.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+#include <stdlib.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+#include "gstrtpelements.h"
+#include "gstrtpilbcdepay.h"
+#include "gstrtputils.h"
+
+/* RtpiLBCDepay signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+#define DEFAULT_MODE GST_ILBC_MODE_30
+
+enum
+{
+ PROP_0,
+ PROP_MODE
+};
+
+/* FIXME, mode should be string because it is a parameter in SDP fmtp */
+static GstStaticPadTemplate gst_rtp_ilbc_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "clock-rate = (int) 8000, " "encoding-name = (string) \"ILBC\"")
+ /* "mode = (string) { \"20\", \"30\" }" */
+ );
+
+static GstStaticPadTemplate gst_rtp_ilbc_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-iLBC, " "mode = (int) { 20, 30 }")
+ );
+
+static void gst_ilbc_depay_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_ilbc_depay_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+static GstBuffer *gst_rtp_ilbc_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+static gboolean gst_rtp_ilbc_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+
+#define gst_rtp_ilbc_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPiLBCDepay, gst_rtp_ilbc_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpilbcdepay, "rtpilbcdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_ILBC_DEPAY, rtp_element_init (plugin));
+
+#define GST_TYPE_ILBC_MODE (gst_ilbc_mode_get_type())
+static GType
+gst_ilbc_mode_get_type (void)
+{
+ static GType ilbc_mode_type = 0;
+ static const GEnumValue ilbc_modes[] = {
+ {GST_ILBC_MODE_20, "20ms frames", "20ms"},
+ {GST_ILBC_MODE_30, "30ms frames", "30ms"},
+ {0, NULL, NULL},
+ };
+
+ if (!ilbc_mode_type) {
+ ilbc_mode_type = g_enum_register_static ("iLBCMode", ilbc_modes);
+ }
+ return ilbc_mode_type;
+}
+
+static void
+gst_rtp_ilbc_depay_class_init (GstRTPiLBCDepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gobject_class->set_property = gst_ilbc_depay_set_property;
+ gobject_class->get_property = gst_ilbc_depay_get_property;
+
+ /* FIXME, mode is in the caps */
+ g_object_class_install_property (gobject_class, PROP_MODE,
+ g_param_spec_enum ("mode", "Mode", "iLBC frame mode",
+ GST_TYPE_ILBC_MODE, DEFAULT_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_ilbc_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_ilbc_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP iLBC depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts iLBC audio from RTP packets (RFC 3952)",
+ "Philippe Kalaf <philippe.kalaf@collabora.co.uk>");
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_ilbc_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_ilbc_depay_setcaps;
+
+ gst_type_mark_as_plugin_api (GST_TYPE_ILBC_MODE, 0);
+}
+
+static void
+gst_rtp_ilbc_depay_init (GstRTPiLBCDepay * rtpilbcdepay)
+{
+ /* Set default mode */
+ rtpilbcdepay->mode = DEFAULT_MODE;
+}
+
+static gboolean
+gst_rtp_ilbc_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstRTPiLBCDepay *rtpilbcdepay = GST_RTP_ILBC_DEPAY (depayload);
+ GstCaps *srccaps;
+ GstStructure *structure;
+ const gchar *mode_str = NULL;
+ gint mode, clock_rate;
+ gboolean ret;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ mode = rtpilbcdepay->mode;
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 8000;
+ depayload->clock_rate = clock_rate;
+
+ /* parse mode, if we can */
+ mode_str = gst_structure_get_string (structure, "mode");
+ if (mode_str) {
+ mode = strtol (mode_str, NULL, 10);
+ if (mode != 20 && mode != 30)
+ mode = rtpilbcdepay->mode;
+ }
+
+ rtpilbcdepay->mode = mode;
+
+ srccaps = gst_caps_new_simple ("audio/x-iLBC",
+ "mode", G_TYPE_INT, rtpilbcdepay->mode, NULL);
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
+
+ GST_DEBUG ("set caps on source: %" GST_PTR_FORMAT " (ret=%d)", srccaps, ret);
+ gst_caps_unref (srccaps);
+
+ return ret;
+}
+
+static GstBuffer *
+gst_rtp_ilbc_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstBuffer *outbuf;
+ gboolean marker;
+
+ marker = gst_rtp_buffer_get_marker (rtp);
+
+ GST_DEBUG ("process : got %" G_GSIZE_FORMAT " bytes, mark %d ts %u seqn %d",
+ gst_buffer_get_size (rtp->buffer), marker,
+ gst_rtp_buffer_get_timestamp (rtp), gst_rtp_buffer_get_seq (rtp));
+
+ outbuf = gst_rtp_buffer_get_payload_buffer (rtp);
+
+ if (marker && outbuf) {
+ /* mark start of talkspurt with RESYNC */
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC);
+ }
+
+ if (outbuf) {
+ gst_rtp_drop_non_audio_meta (depayload, outbuf);
+ }
+
+ return outbuf;
+}
+
+static void
+gst_ilbc_depay_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstRTPiLBCDepay *rtpilbcdepay = GST_RTP_ILBC_DEPAY (object);
+
+ switch (prop_id) {
+ case PROP_MODE:
+ rtpilbcdepay->mode = g_value_get_enum (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_ilbc_depay_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstRTPiLBCDepay *rtpilbcdepay = GST_RTP_ILBC_DEPAY (object);
+
+ switch (prop_id) {
+ case PROP_MODE:
+ g_value_set_enum (value, rtpilbcdepay->mode);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/rtp/gstrtpilbcdepay.h b/gst/rtp/gstrtpilbcdepay.h
new file mode 100644
index 0000000000..b016004e4a
--- /dev/null
+++ b/gst/rtp/gstrtpilbcdepay.h
@@ -0,0 +1,63 @@
+/* GStreamer
+ * Copyright (C) <2006> Philippe Khalaf <burger@speedy.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_ILBC_DEPAY_H__
+#define __GST_RTP_ILBC_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstRTPiLBCDepay GstRTPiLBCDepay;
+typedef struct _GstRTPiLBCDepayClass GstRTPiLBCDepayClass;
+
+#define GST_TYPE_RTP_ILBC_DEPAY \
+ (gst_rtp_ilbc_depay_get_type())
+#define GST_RTP_ILBC_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_ILBC_DEPAY,GstRTPiLBCDepay))
+#define GST_RTP_ILBC_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_ILBC_DEPAY,GstRTPiLBCDepayClass))
+#define GST_IS_RTP_ILBC_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_ILBC_DEPAY))
+#define GST_IS_RTP_ILBC_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_ILBC_DEPAY))
+
+typedef enum {
+ GST_ILBC_MODE_20 = 20,
+ GST_ILBC_MODE_30 = 30
+} GstiLBCMode;
+
+struct _GstRTPiLBCDepay
+{
+ GstRTPBaseDepayload depayload;
+
+ GstiLBCMode mode;
+};
+
+struct _GstRTPiLBCDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_ilbc_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_ILBC_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpilbcpay.c b/gst/rtp/gstrtpilbcpay.c
new file mode 100644
index 0000000000..0048045aec
--- /dev/null
+++ b/gst/rtp/gstrtpilbcpay.c
@@ -0,0 +1,224 @@
+/* GStreamer
+ * Copyright (C) <2006> Philippe Khalaf <burger@speedy.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <stdlib.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include "gstrtpelements.h"
+#include "gstrtpilbcpay.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpilbcpay_debug);
+#define GST_CAT_DEFAULT (rtpilbcpay_debug)
+
+static GstStaticPadTemplate gst_rtp_ilbc_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-iLBC, " "mode = (int) {20, 30}")
+ );
+
+static GstStaticPadTemplate gst_rtp_ilbc_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 8000, "
+ "encoding-name = (string) \"ILBC\", "
+ "mode = (string) { \"20\", \"30\" }")
+ );
+
+
+static GstCaps *gst_rtp_ilbc_pay_sink_getcaps (GstRTPBasePayload * payload,
+ GstPad * pad, GstCaps * filter);
+static gboolean gst_rtp_ilbc_pay_sink_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+
+#define gst_rtp_ilbc_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPILBCPay, gst_rtp_ilbc_pay,
+ GST_TYPE_RTP_BASE_AUDIO_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpilbcpay, "rtpilbcpay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_ILBC_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_ilbc_pay_class_init (GstRTPILBCPayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpilbcpay_debug, "rtpilbcpay", 0,
+ "iLBC audio RTP payloader");
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_ilbc_pay_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_ilbc_pay_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "RTP iLBC Payloader",
+ "Codec/Payloader/Network/RTP",
+ "Packetize iLBC audio streams into RTP packets",
+ "Philippe Kalaf <philippe.kalaf@collabora.co.uk>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_ilbc_pay_sink_setcaps;
+ gstrtpbasepayload_class->get_caps = gst_rtp_ilbc_pay_sink_getcaps;
+}
+
+static void
+gst_rtp_ilbc_pay_init (GstRTPILBCPay * rtpilbcpay)
+{
+ GstRTPBasePayload *rtpbasepayload;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
+
+ rtpbasepayload = GST_RTP_BASE_PAYLOAD (rtpilbcpay);
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpilbcpay);
+
+ /* we don't set the payload type, it should be set by the application using
+ * the pt property or the default 96 will be used */
+ rtpbasepayload->clock_rate = 8000;
+
+ rtpilbcpay->mode = -1;
+
+ /* tell rtpbaseaudiopayload that this is a frame based codec */
+ gst_rtp_base_audio_payload_set_frame_based (rtpbaseaudiopayload);
+}
+
+static gboolean
+gst_rtp_ilbc_pay_sink_setcaps (GstRTPBasePayload * rtpbasepayload,
+ GstCaps * caps)
+{
+ GstRTPILBCPay *rtpilbcpay;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
+ gboolean ret;
+ gint mode;
+ gchar *mode_str;
+ GstStructure *structure;
+ const char *payload_name;
+
+ rtpilbcpay = GST_RTP_ILBC_PAY (rtpbasepayload);
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpbasepayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ payload_name = gst_structure_get_name (structure);
+ if (g_ascii_strcasecmp ("audio/x-iLBC", payload_name))
+ goto wrong_caps;
+
+ if (!gst_structure_get_int (structure, "mode", &mode))
+ goto no_mode;
+
+ if (mode != 20 && mode != 30)
+ goto wrong_mode;
+
+ gst_rtp_base_payload_set_options (rtpbasepayload, "audio", TRUE, "ILBC",
+ 8000);
+ /* set options for this frame based audio codec */
+ gst_rtp_base_audio_payload_set_frame_options (rtpbaseaudiopayload,
+ mode, mode == 30 ? 50 : 38);
+
+ mode_str = g_strdup_printf ("%d", mode);
+ ret =
+ gst_rtp_base_payload_set_outcaps (rtpbasepayload, "mode", G_TYPE_STRING,
+ mode_str, NULL);
+ g_free (mode_str);
+
+ if (mode != rtpilbcpay->mode && rtpilbcpay->mode != -1)
+ goto mode_changed;
+
+ rtpilbcpay->mode = mode;
+
+ return ret;
+
+ /* ERRORS */
+wrong_caps:
+ {
+ GST_ERROR_OBJECT (rtpilbcpay, "expected audio/x-iLBC, received %s",
+ payload_name);
+ return FALSE;
+ }
+no_mode:
+ {
+ GST_ERROR_OBJECT (rtpilbcpay, "did not receive a mode");
+ return FALSE;
+ }
+wrong_mode:
+ {
+ GST_ERROR_OBJECT (rtpilbcpay, "mode must be 20 or 30, received %d", mode);
+ return FALSE;
+ }
+mode_changed:
+ {
+ GST_ERROR_OBJECT (rtpilbcpay, "Mode has changed from %d to %d! "
+ "Mode cannot change while streaming", rtpilbcpay->mode, mode);
+ return FALSE;
+ }
+}
+
+/* we return the padtemplate caps with the mode field fixated to a value if we
+ * can */
+static GstCaps *
+gst_rtp_ilbc_pay_sink_getcaps (GstRTPBasePayload * rtppayload, GstPad * pad,
+ GstCaps * filter)
+{
+ GstCaps *otherpadcaps;
+ GstCaps *caps;
+
+ otherpadcaps = gst_pad_get_allowed_caps (rtppayload->srcpad);
+ caps = gst_pad_get_pad_template_caps (pad);
+
+ if (otherpadcaps) {
+ if (!gst_caps_is_empty (otherpadcaps)) {
+ GstStructure *structure;
+ const gchar *mode_str;
+ gint mode;
+
+ structure = gst_caps_get_structure (otherpadcaps, 0);
+
+ /* parse mode, if we can */
+ mode_str = gst_structure_get_string (structure, "mode");
+ if (mode_str) {
+ mode = strtol (mode_str, NULL, 10);
+ if (mode == 20 || mode == 30) {
+ caps = gst_caps_make_writable (caps);
+ structure = gst_caps_get_structure (caps, 0);
+ gst_structure_set (structure, "mode", G_TYPE_INT, mode, NULL);
+ }
+ }
+ }
+ gst_caps_unref (otherpadcaps);
+ }
+
+ if (filter) {
+ GstCaps *tmp;
+
+ GST_DEBUG_OBJECT (rtppayload, "Intersect %" GST_PTR_FORMAT " and filter %"
+ GST_PTR_FORMAT, caps, filter);
+ tmp = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ caps = tmp;
+ }
+
+ return caps;
+}
diff --git a/gst/rtp/gstrtpilbcpay.h b/gst/rtp/gstrtpilbcpay.h
new file mode 100644
index 0000000000..d30c11239b
--- /dev/null
+++ b/gst/rtp/gstrtpilbcpay.h
@@ -0,0 +1,58 @@
+/* GStreamer
+ * Copyright (C) <2006> Philippe Khalaf <burger@speedy.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_ILBC_PAY_H__
+#define __GST_RTP_ILBC_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_ILBC_PAY \
+ (gst_rtp_ilbc_pay_get_type())
+#define GST_RTP_ILBC_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_ILBC_PAY,GstRTPILBCPay))
+#define GST_RTP_ILBC_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_ILBC_PAY,GstRTPILBCPayClass))
+#define GST_IS_RTP_ILBC_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_ILBC_PAY))
+#define GST_IS_RTP_ILBC_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_ILBC_PAY))
+
+typedef struct _GstRTPILBCPay GstRTPILBCPay;
+typedef struct _GstRTPILBCPayClass GstRTPILBCPayClass;
+
+struct _GstRTPILBCPay
+{
+ GstRTPBaseAudioPayload audiopayload;
+
+ gint mode;
+};
+
+struct _GstRTPILBCPayClass
+{
+ GstRTPBaseAudioPayloadClass parent_class;
+};
+
+GType gst_rtp_ilbc_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_ILBC_PAY_H__ */
diff --git a/gst/rtp/gstrtpisacdepay.c b/gst/rtp/gstrtpisacdepay.c
new file mode 100644
index 0000000000..bac1fa048f
--- /dev/null
+++ b/gst/rtp/gstrtpisacdepay.c
@@ -0,0 +1,147 @@
+/* GStreamer
+ * Copyright (C) 2020 Collabora Ltd.
+ * Author: Guillaume Desmottes <guillaume.desmottes@collabora.com>, Collabora Ltd.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpisacdepay
+ * @title: rtpisacdepay
+ * @short_description: iSAC RTP Depayloader
+ *
+ * Since: 1.20
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <stdlib.h>
+#include <string.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpisacdepay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpisacdepay_debug);
+#define GST_CAT_DEFAULT (rtpisacdepay_debug)
+
+static GstStaticPadTemplate gst_rtp_isac_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) { 16000, 32000 }, "
+ "encoding-name = (string) \"ISAC\"")
+ );
+
+static GstStaticPadTemplate gst_rtp_isac_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/isac, "
+ "rate = (int) { 16000, 32000 }, " "channels = (int) 1")
+ );
+
+struct _GstRtpIsacDepay
+{
+ /*< private > */
+ GstRTPBaseDepayload parent;
+
+ guint64 packet;
+};
+
+#define gst_rtp_isac_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpIsacDepay, gst_rtp_isac_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpisacdepay, "rtpisacdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_ISAC_DEPAY, rtp_element_init (plugin));
+
+static gboolean
+gst_rtp_isac_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstCaps *src_caps;
+ GstStructure *s;
+ gint rate;
+ gboolean ret;
+
+ GST_DEBUG_OBJECT (depayload, "sink caps: %" GST_PTR_FORMAT, caps);
+
+ s = gst_caps_get_structure (caps, 0);
+ if (!gst_structure_get_int (s, "clock-rate", &rate)) {
+ GST_ERROR_OBJECT (depayload, "Missing 'clock-rate' in caps");
+ return FALSE;
+ }
+
+ src_caps = gst_caps_new_simple ("audio/isac",
+ "channels", G_TYPE_INT, 1, "rate", G_TYPE_INT, rate, NULL);
+
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), src_caps);
+
+ GST_DEBUG_OBJECT (depayload,
+ "set caps on source: %" GST_PTR_FORMAT " (ret=%d)", src_caps, ret);
+ gst_caps_unref (src_caps);
+
+ return ret;
+}
+
+static GstBuffer *
+gst_rtp_isac_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp_buffer)
+{
+ GstBuffer *outbuf;
+
+ outbuf = gst_rtp_buffer_get_payload_buffer (rtp_buffer);
+
+ gst_rtp_drop_non_audio_meta (depayload, outbuf);
+
+ return outbuf;
+}
+
+static void
+gst_rtp_isac_depay_class_init (GstRtpIsacDepayClass * klass)
+{
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstRTPBaseDepayloadClass *depayload_class =
+ (GstRTPBaseDepayloadClass *) klass;
+
+ depayload_class->set_caps = gst_rtp_isac_depay_setcaps;
+ depayload_class->process_rtp_packet = gst_rtp_isac_depay_process;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_isac_depay_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_isac_depay_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP iSAC depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts iSAC audio from RTP packets",
+ "Guillaume Desmottes <guillaume.desmottes@collabora.com>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpisacdepay_debug, "rtpisacdepay", 0,
+ "iSAC RTP Depayloader");
+}
+
+static void
+gst_rtp_isac_depay_init (GstRtpIsacDepay * rtpisacdepay)
+{
+}
diff --git a/gst/rtp/gstrtpisacdepay.h b/gst/rtp/gstrtpisacdepay.h
new file mode 100644
index 0000000000..f5ab289543
--- /dev/null
+++ b/gst/rtp/gstrtpisacdepay.h
@@ -0,0 +1,31 @@
+/* GStreamer
+ * Copyright (C) 2020 Collabora Ltd.
+ * Author: Guillaume Desmottes <guillaume.desmottes@collabora.com>, Collabora Ltd.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more
+ */
+
+
+#ifndef __GST_RTP_ISAC_DEPAY_H__
+#define __GST_RTP_ISAC_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_ISAC_DEPAY gst_rtp_isac_depay_get_type ()
+
+G_DECLARE_FINAL_TYPE (GstRtpIsacDepay, gst_rtp_isac_depay, GST, RTP_ISAC_DEPAY,
+ GstRTPBaseDepayload);
+
+G_END_DECLS
+#endif /* __GST_RTP_ISAC_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpisacpay.c b/gst/rtp/gstrtpisacpay.c
new file mode 100644
index 0000000000..ad03a190f1
--- /dev/null
+++ b/gst/rtp/gstrtpisacpay.c
@@ -0,0 +1,183 @@
+/* GStreamer
+ * Copyright (C) 2020 Collabora Ltd.
+ * Author: Guillaume Desmottes <guillaume.desmottes@collabora.com>, Collabora Ltd.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpisacpay
+ * @title: rtpisacpay
+ * @short_description: iSAC RTP Payloader
+ *
+ * Since: 1.20
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpisacpay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpisacpay_debug);
+#define GST_CAT_DEFAULT (rtpisacpay_debug)
+
+static GstStaticPadTemplate gst_rtp_isac_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/isac, "
+ "rate = (int) { 16000, 32000 }, " "channels = (int) 1")
+ );
+
+static GstStaticPadTemplate gst_rtp_isac_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) { 16000, 32000 }, "
+ "encoding-name = (string) \"ISAC\", "
+ "encoding-params = (string) \"1\"")
+ );
+
+struct _GstRtpIsacPay
+{
+ /*< private > */
+ GstRTPBasePayload parent;
+};
+
+#define gst_rtp_isac_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpIsacPay, gst_rtp_isac_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpisacpay, "rtpisacpay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_ISAC_PAY, rtp_element_init (plugin));
+
+static GstCaps *
+gst_rtp_isac_pay_getcaps (GstRTPBasePayload * payload, GstPad * pad,
+ GstCaps * filter)
+{
+ GstCaps *otherpadcaps;
+ GstCaps *caps;
+
+ otherpadcaps = gst_pad_get_allowed_caps (payload->srcpad);
+ caps = gst_pad_get_pad_template_caps (pad);
+
+ if (otherpadcaps) {
+ if (!gst_caps_is_empty (otherpadcaps)) {
+ GstStructure *ps;
+ GstStructure *s;
+ const GValue *v;
+
+ ps = gst_caps_get_structure (otherpadcaps, 0);
+ caps = gst_caps_make_writable (caps);
+ s = gst_caps_get_structure (caps, 0);
+
+ v = gst_structure_get_value (ps, "clock-rate");
+ if (v)
+ gst_structure_set_value (s, "rate", v);
+ }
+ gst_caps_unref (otherpadcaps);
+ }
+
+ if (filter) {
+ GstCaps *tcaps = caps;
+
+ caps = gst_caps_intersect_full (filter, tcaps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (tcaps);
+ }
+
+ GST_DEBUG_OBJECT (payload, "%" GST_PTR_FORMAT, caps);
+
+ return caps;
+}
+
+static gboolean
+gst_rtp_isac_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ GstStructure *s;
+ gint rate;
+
+ GST_DEBUG_OBJECT (payload, "%" GST_PTR_FORMAT, caps);
+
+ s = gst_caps_get_structure (caps, 0);
+ if (!gst_structure_get_int (s, "rate", &rate)) {
+ GST_ERROR_OBJECT (payload, "Missing 'rate' in caps");
+ return FALSE;
+ }
+
+ gst_rtp_base_payload_set_options (payload, "audio", TRUE, "ISAC", rate);
+
+ return gst_rtp_base_payload_set_outcaps (payload, NULL);
+}
+
+static GstFlowReturn
+gst_rtp_isac_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstBuffer *outbuf;
+ GstClockTime pts, dts, duration;
+
+ pts = GST_BUFFER_PTS (buffer);
+ dts = GST_BUFFER_DTS (buffer);
+ duration = GST_BUFFER_DURATION (buffer);
+
+ outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 0, 0, 0);
+
+ gst_rtp_copy_audio_meta (basepayload, outbuf, buffer);
+
+ outbuf = gst_buffer_append (outbuf, buffer);
+
+ GST_BUFFER_PTS (outbuf) = pts;
+ GST_BUFFER_DTS (outbuf) = dts;
+ GST_BUFFER_DURATION (outbuf) = duration;
+
+ return gst_rtp_base_payload_push (basepayload, outbuf);
+}
+
+static void
+gst_rtp_isac_pay_class_init (GstRtpIsacPayClass * klass)
+{
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstRTPBasePayloadClass *payload_class = (GstRTPBasePayloadClass *) klass;
+
+ payload_class->get_caps = gst_rtp_isac_pay_getcaps;
+ payload_class->set_caps = gst_rtp_isac_pay_setcaps;
+ payload_class->handle_buffer = gst_rtp_isac_pay_handle_buffer;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_isac_pay_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_isac_pay_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP iSAC payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encodes iSAC audio into a RTP packet",
+ "Guillaume Desmottes <guillaume.desmottes@collabora.com>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpisacpay_debug, "rtpisacpay", 0,
+ "iSAC RTP Payloader");
+}
+
+static void
+gst_rtp_isac_pay_init (GstRtpIsacPay * rtpisacpay)
+{
+}
diff --git a/gst/rtp/gstrtpisacpay.h b/gst/rtp/gstrtpisacpay.h
new file mode 100644
index 0000000000..82c072bddf
--- /dev/null
+++ b/gst/rtp/gstrtpisacpay.h
@@ -0,0 +1,31 @@
+/* GStreamer
+ * Copyright (C) 2020 Collabora Ltd.
+ * Author: Guillaume Desmottes <guillaume.desmottes@collabora.com>, Collabora Ltd.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more
+ */
+
+
+#ifndef __GST_RTP_ISAC_PAY_H__
+#define __GST_RTP_ISAC_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_ISAC_PAY gst_rtp_isac_pay_get_type ()
+
+G_DECLARE_FINAL_TYPE(GstRtpIsacPay, gst_rtp_isac_pay, GST, RTP_ISAC_PAY, GstRTPBasePayload);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_ISAC_PAY_H__ */
diff --git a/gst/rtp/gstrtpj2kcommon.h b/gst/rtp/gstrtpj2kcommon.h
new file mode 100644
index 0000000000..09f1bbbf7c
--- /dev/null
+++ b/gst/rtp/gstrtpj2kcommon.h
@@ -0,0 +1,102 @@
+/* GStreamer
+* Copyright (C) 2009 Wim Taymans <wim.taymans@gmail.com>
+*
+* This library is free software; you can redistribute it and/or
+* modify it under the terms of the GNU Library General Public
+* License as published by the Free Software Foundation; either
+* version 2 of the License, or (at your option) any later version.
+*
+* This library is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+* Library General Public License for more details.
+*
+* You should have received a copy of the GNU Library General Public
+* License along with this library; if not, write to the
+* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+* Boston, MA 02110-1301, USA.
+*/
+
+
+#ifndef __GST_RTP_J2K_COMMON_H__
+#define __GST_RTP_J2K_COMMON_H__
+
+
+
+/* Sampling values from RFC 5371 for JPEG 2000 over RTP : https://datatracker.ietf.org/doc/rfc5371/C
+
+RGB: standard Red, Green, Blue color space.
+
+BGR: standard Blue, Green, Red color space.
+
+RGBA: standard Red, Green, Blue, Alpha color space.
+
+BGRA: standard Blue, Green, Red, Alpha color space.
+
+YCbCr-4:4:4: standard YCbCr color space; no subsampling.
+
+YCbCr-4:2:2: standard YCbCr color space; Cb and Cr are subsampled horizontally by 1/2.
+
+YCbCr-4:2:0: standard YCbCr color space; Cb and Cr are subsampled horizontally and vertically by 1/2.
+
+YCbCr-4:1:1: standard YCbCr color space; Cb and Cr are subsampled vertically by 1/4.
+
+GRAYSCALE: basically, a single component image of just multilevels of grey.
+*/
+
+
+#define GST_RTP_J2K_RGB "RGB"
+#define GST_RTP_J2K_BGR "BGR"
+#define GST_RTP_J2K_RGBA "RGBA"
+#define GST_RTP_J2K_BGRA "BGRA"
+#define GST_RTP_J2K_YBRA "YCbCrA"
+#define GST_RTP_J2K_YBR444 "YCbCr-4:4:4"
+#define GST_RTP_J2K_YBR422 "YCbCr-4:2:2"
+#define GST_RTP_J2K_YBR420 "YCbCr-4:2:0"
+#define GST_RTP_J2K_YBR410 "YCbCr-4:1:0"
+#define GST_RTP_J2K_GRAYSCALE "GRAYSCALE"
+
+#define GST_RTP_J2K_SAMPLING_LIST "sampling = (string) {\"RGB\", \"BGR\", \"RGBA\", \"BGRA\", \"YCbCrA\", \"YCbCr-4:4:4\", \"YCbCr-4:2:2\", \"YCbCr-4:2:0\", \"YCbCr-4:1:1\", \"GRAYSCALE\"}"
+
+typedef enum
+{
+
+ GST_RTP_SAMPLING_NONE,
+ GST_RTP_SAMPLING_RGB,
+ GST_RTP_SAMPLING_BGR,
+ GST_RTP_SAMPLING_RGBA,
+ GST_RTP_SAMPLING_BGRA,
+ GST_RTP_SAMPLING_YBRA,
+ GST_RTP_SAMPLING_YBR444,
+ GST_RTP_SAMPLING_YBR422,
+ GST_RTP_SAMPLING_YBR420,
+ GST_RTP_SAMPLING_YBR410,
+ GST_RTP_SAMPLING_GRAYSCALE
+} GstRtpSampling;
+
+
+/*
+* GstRtpJ2KMarker:
+* @GST_J2K_MARKER: Prefix for JPEG 2000 marker
+* @GST_J2K_MARKER_SOC: Start of Codestream
+* @GST_J2K_MARKER_SOT: Start of tile
+* @GST_J2K_MARKER_EOC: End of Codestream
+*
+* Identifiers for markers in JPEG 2000 code streams
+*/
+typedef enum
+{
+ GST_J2K_MARKER = 0xFF,
+ GST_J2K_MARKER_SOC = 0x4F,
+ GST_J2K_MARKER_SOT = 0x90,
+ GST_J2K_MARKER_SOP = 0x91,
+ GST_J2K_MARKER_EPH = 0x92,
+ GST_J2K_MARKER_SOD = 0x93,
+ GST_J2K_MARKER_EOC = 0xD9
+} GstRtpJ2KMarker;
+
+
+#define GST_RTP_J2K_HEADER_SIZE 8
+
+
+#endif /* __GST_RTP_J2K_COMMON_H__ */
diff --git a/gst/rtp/gstrtpj2kdepay.c b/gst/rtp/gstrtpj2kdepay.c
new file mode 100644
index 0000000000..4456b3dd89
--- /dev/null
+++ b/gst/rtp/gstrtpj2kdepay.c
@@ -0,0 +1,664 @@
+/* GStreamer
+ * Copyright (C) <2009> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+ /**
+ * SECTION:element-rtpj2kdepay
+ * @title: rtpj2kdepay
+ *
+ * Depayload an RTP-payloaded JPEG 2000 image into RTP packets according to RFC 5371
+ * and RFC 5372.
+ * For detailed information see: https://datatracker.ietf.org/doc/rfc5371/
+ * and https://datatracker.ietf.org/doc/rfc5372/
+ */
+
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+
+#include <string.h>
+#include "gstrtpelements.h"
+#include "gstrtpj2kcommon.h"
+#include "gstrtpj2kdepay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpj2kdepay_debug);
+#define GST_CAT_DEFAULT (rtpj2kdepay_debug)
+
+static GstStaticPadTemplate gst_rtp_j2k_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("image/x-jpc, "
+ "colorspace = (string) { sRGB, sYUV, GRAY }")
+ );
+
+static GstStaticPadTemplate gst_rtp_j2k_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", " "clock-rate = (int) 90000, "
+ GST_RTP_J2K_SAMPLING_LIST ","
+ "encoding-name = (string) \"JPEG2000\";"
+ "application/x-rtp, "
+ "media = (string) \"video\", " "clock-rate = (int) 90000, "
+ "colorspace = (string) { sRGB, sYUV, GRAY }, "
+ "encoding-name = (string) \"JPEG2000\";")
+ );
+
+enum
+{
+ PROP_0,
+ PROP_LAST
+};
+
+#define gst_rtp_j2k_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpJ2KDepay, gst_rtp_j2k_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpj2kdepay, "rtpj2kdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_J2K_DEPAY, rtp_element_init (plugin));
+
+static void gst_rtp_j2k_depay_finalize (GObject * object);
+
+static void gst_rtp_j2k_depay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtp_j2k_depay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static GstStateChangeReturn
+gst_rtp_j2k_depay_change_state (GstElement * element,
+ GstStateChange transition);
+
+static gboolean gst_rtp_j2k_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_j2k_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+
+static void
+gst_rtp_j2k_depay_class_init (GstRtpJ2KDepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_j2k_depay_finalize;
+
+ gobject_class->set_property = gst_rtp_j2k_depay_set_property;
+ gobject_class->get_property = gst_rtp_j2k_depay_get_property;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_j2k_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_j2k_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP JPEG 2000 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts JPEG 2000 video from RTP packets (RFC 5371)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstelement_class->change_state = gst_rtp_j2k_depay_change_state;
+
+ gstrtpbasedepayload_class->set_caps = gst_rtp_j2k_depay_setcaps;
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_j2k_depay_process;
+
+ GST_DEBUG_CATEGORY_INIT (rtpj2kdepay_debug, "rtpj2kdepay", 0,
+ "J2K Video RTP Depayloader");
+}
+
+static void
+gst_rtp_j2k_depay_init (GstRtpJ2KDepay * rtpj2kdepay)
+{
+ rtpj2kdepay->pu_adapter = gst_adapter_new ();
+ rtpj2kdepay->t_adapter = gst_adapter_new ();
+ rtpj2kdepay->f_adapter = gst_adapter_new ();
+}
+
+static void
+store_mheader (GstRtpJ2KDepay * rtpj2kdepay, guint idx, GstBuffer * buf)
+{
+ GstBuffer *old;
+
+ GST_DEBUG_OBJECT (rtpj2kdepay, "storing main header %p at index %u", buf,
+ idx);
+ if ((old = rtpj2kdepay->MH[idx]))
+ gst_buffer_unref (old);
+ rtpj2kdepay->MH[idx] = buf;
+}
+
+static void
+clear_mheaders (GstRtpJ2KDepay * rtpj2kdepay)
+{
+ guint i;
+
+ for (i = 0; i < 8; i++)
+ store_mheader (rtpj2kdepay, i, NULL);
+}
+
+static void
+gst_rtp_j2k_depay_reset (GstRtpJ2KDepay * rtpj2kdepay)
+{
+ clear_mheaders (rtpj2kdepay);
+ gst_adapter_clear (rtpj2kdepay->pu_adapter);
+ gst_adapter_clear (rtpj2kdepay->t_adapter);
+ gst_adapter_clear (rtpj2kdepay->f_adapter);
+ rtpj2kdepay->next_frag = 0;
+}
+
+static void
+gst_rtp_j2k_depay_finalize (GObject * object)
+{
+ GstRtpJ2KDepay *rtpj2kdepay;
+
+ rtpj2kdepay = GST_RTP_J2K_DEPAY (object);
+
+ clear_mheaders (rtpj2kdepay);
+
+ g_object_unref (rtpj2kdepay->pu_adapter);
+ g_object_unref (rtpj2kdepay->t_adapter);
+ g_object_unref (rtpj2kdepay->f_adapter);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_rtp_j2k_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure = NULL;
+ gint clock_rate;
+ GstCaps *outcaps = NULL;
+ gboolean res = FALSE;
+ const gchar *colorspace = NULL;
+ const gchar *sampling = NULL;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 90000;
+ depayload->clock_rate = clock_rate;
+
+ sampling = gst_structure_get_string (structure, "sampling");
+ if (sampling) {
+ if (!strcmp (sampling, GST_RTP_J2K_RGB) ||
+ !strcmp (sampling, GST_RTP_J2K_RGBA) ||
+ !strcmp (sampling, GST_RTP_J2K_BGR) ||
+ !strcmp (sampling, GST_RTP_J2K_BGRA))
+ colorspace = "sRGB";
+ else if (!strcmp (sampling, GST_RTP_J2K_GRAYSCALE))
+ colorspace = "GRAY";
+ else
+ colorspace = "sYUV";
+ } else {
+ GST_ELEMENT_WARNING (depayload, STREAM, DEMUX, NULL,
+ ("Non-compliant stream: sampling field missing. Frames my appear incorrect"));
+ colorspace = gst_structure_get_string (structure, "colorspace");
+ if (!strcmp (colorspace, "GRAY")) {
+ sampling = GST_RTP_J2K_GRAYSCALE;
+ }
+ }
+
+ outcaps = gst_caps_new_simple ("image/x-jpc",
+ "framerate", GST_TYPE_FRACTION, 0, 1,
+ "fields", G_TYPE_INT, 1, "colorspace", G_TYPE_STRING, colorspace, NULL);
+
+ if (sampling)
+ gst_caps_set_simple (outcaps, "sampling", G_TYPE_STRING, sampling, NULL);
+
+ res = gst_pad_set_caps (depayload->srcpad, outcaps);
+
+ gst_caps_unref (outcaps);
+
+ return res;
+}
+
+static void
+gst_rtp_j2k_depay_clear_pu (GstRtpJ2KDepay * rtpj2kdepay)
+{
+ gst_adapter_clear (rtpj2kdepay->pu_adapter);
+ rtpj2kdepay->have_sync = FALSE;
+}
+
+static GstFlowReturn
+gst_rtp_j2k_depay_flush_pu (GstRTPBaseDepayload * depayload)
+{
+ GstRtpJ2KDepay *rtpj2kdepay;
+ GstBuffer *mheader;
+ guint avail, MHF, mh_id;
+
+ rtpj2kdepay = GST_RTP_J2K_DEPAY (depayload);
+
+ /* take all available buffers */
+ avail = gst_adapter_available (rtpj2kdepay->pu_adapter);
+ if (avail == 0)
+ goto done;
+
+ MHF = rtpj2kdepay->pu_MHF;
+ mh_id = rtpj2kdepay->last_mh_id;
+
+ GST_DEBUG_OBJECT (rtpj2kdepay, "flushing PU of size %u", avail);
+
+ if (MHF == 0) {
+ GList *packets, *walk;
+
+ packets = gst_adapter_take_list (rtpj2kdepay->pu_adapter, avail);
+ /* append packets */
+ for (walk = packets; walk; walk = g_list_next (walk)) {
+ GstBuffer *buf = GST_BUFFER_CAST (walk->data);
+ GST_DEBUG_OBJECT (rtpj2kdepay,
+ "append pu packet of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (buf));
+ gst_adapter_push (rtpj2kdepay->t_adapter, buf);
+ }
+ g_list_free (packets);
+ } else {
+ /* we have a header */
+ GST_DEBUG_OBJECT (rtpj2kdepay, "keeping header %u", mh_id);
+ /* we managed to see the start and end of the header, take all from
+ * adapter and keep in header */
+ mheader = gst_adapter_take_buffer (rtpj2kdepay->pu_adapter, avail);
+
+ store_mheader (rtpj2kdepay, mh_id, mheader);
+ }
+
+done:
+ rtpj2kdepay->have_sync = FALSE;
+
+ return GST_FLOW_OK;
+}
+
+static GstFlowReturn
+gst_rtp_j2k_depay_flush_tile (GstRTPBaseDepayload * depayload)
+{
+ GstRtpJ2KDepay *rtpj2kdepay;
+ guint avail, mh_id;
+ GList *packets, *walk;
+ guint8 end[2];
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstMapInfo map;
+ GstBuffer *buf;
+
+ rtpj2kdepay = GST_RTP_J2K_DEPAY (depayload);
+
+ /* flush pending PU */
+ gst_rtp_j2k_depay_flush_pu (depayload);
+
+ /* take all available buffers */
+ avail = gst_adapter_available (rtpj2kdepay->t_adapter);
+ if (avail == 0)
+ goto done;
+
+ mh_id = rtpj2kdepay->last_mh_id;
+
+ GST_DEBUG_OBJECT (rtpj2kdepay, "flushing tile of size %u", avail);
+
+ if (gst_adapter_available (rtpj2kdepay->f_adapter) == 0) {
+ GstBuffer *mheader;
+
+ /* we need a header now */
+ if ((mheader = rtpj2kdepay->MH[mh_id]) == NULL)
+ goto waiting_header;
+
+ /* push header in the adapter */
+ GST_DEBUG_OBJECT (rtpj2kdepay, "pushing header %u", mh_id);
+ gst_adapter_push (rtpj2kdepay->f_adapter, gst_buffer_ref (mheader));
+ }
+
+ /* check for last bytes */
+ gst_adapter_copy (rtpj2kdepay->t_adapter, end, avail - 2, 2);
+
+ /* now append the tile packets to the frame */
+ packets = gst_adapter_take_list (rtpj2kdepay->t_adapter, avail);
+ for (walk = packets; walk; walk = g_list_next (walk)) {
+ buf = GST_BUFFER_CAST (walk->data);
+
+ if (walk == packets) {
+ /* first buffer should contain the SOT */
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ if (map.size < 12)
+ goto invalid_tile;
+
+ if (map.data[0] == GST_J2K_MARKER && map.data[1] == GST_J2K_MARKER_SOT) {
+ guint Psot, nPsot;
+
+ if (end[0] == GST_J2K_MARKER && end[1] == GST_J2K_MARKER_EOC)
+ nPsot = avail - 2;
+ else
+ nPsot = avail;
+
+ Psot = GST_READ_UINT32_BE (&map.data[6]);
+ if (Psot != nPsot && Psot != 0) {
+ /* Psot must match the size of the tile */
+ GST_DEBUG_OBJECT (rtpj2kdepay, "set Psot from %u to %u", Psot, nPsot);
+ gst_buffer_unmap (buf, &map);
+
+ buf = gst_buffer_make_writable (buf);
+
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ GST_WRITE_UINT32_BE (&map.data[6], nPsot);
+ }
+ }
+ gst_buffer_unmap (buf, &map);
+ }
+
+ GST_DEBUG_OBJECT (rtpj2kdepay, "append pu packet of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (buf));
+ gst_adapter_push (rtpj2kdepay->f_adapter, buf);
+ }
+ g_list_free (packets);
+
+done:
+ rtpj2kdepay->last_tile = -1;
+
+ return ret;
+
+ /* errors */
+waiting_header:
+ {
+ GST_DEBUG_OBJECT (rtpj2kdepay, "waiting for header %u", mh_id);
+ gst_adapter_clear (rtpj2kdepay->t_adapter);
+ rtpj2kdepay->last_tile = -1;
+ return ret;
+ }
+invalid_tile:
+ {
+ GST_ELEMENT_WARNING (rtpj2kdepay, STREAM, DECODE, ("Invalid tile"), (NULL));
+ gst_buffer_unmap (buf, &map);
+ gst_adapter_clear (rtpj2kdepay->t_adapter);
+ rtpj2kdepay->last_tile = -1;
+ return ret;
+ }
+}
+
+static GstFlowReturn
+gst_rtp_j2k_depay_flush_frame (GstRTPBaseDepayload * depayload)
+{
+ GstRtpJ2KDepay *rtpj2kdepay;
+ guint8 end[2];
+ guint avail;
+
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ rtpj2kdepay = GST_RTP_J2K_DEPAY (depayload);
+
+ /* flush pending tile */
+ gst_rtp_j2k_depay_flush_tile (depayload);
+
+ /* last buffer take all data out of the adapter */
+ avail = gst_adapter_available (rtpj2kdepay->f_adapter);
+ if (avail == 0)
+ goto done;
+
+ if (avail > 2) {
+ GstBuffer *outbuf;
+
+ /* take the last bytes of the JPEG 2000 data to see if there is an EOC
+ * marker */
+ gst_adapter_copy (rtpj2kdepay->f_adapter, end, avail - 2, 2);
+
+ if (end[0] != GST_J2K_MARKER && end[1] != GST_J2K_MARKER_EOC) {
+ end[0] = GST_J2K_MARKER;
+ end[1] = GST_J2K_MARKER_EOC;
+
+ GST_DEBUG_OBJECT (rtpj2kdepay, "no EOC marker, adding one");
+
+ /* no EOI marker, add one */
+ outbuf = gst_buffer_new_and_alloc (2);
+ gst_buffer_fill (outbuf, 0, end, 2);
+
+ gst_adapter_push (rtpj2kdepay->f_adapter, outbuf);
+ avail += 2;
+ }
+
+ GST_DEBUG_OBJECT (rtpj2kdepay, "pushing buffer of %u bytes", avail);
+ outbuf = gst_adapter_take_buffer (rtpj2kdepay->f_adapter, avail);
+ gst_rtp_drop_non_video_meta (depayload, outbuf);
+ ret = gst_rtp_base_depayload_push (depayload, outbuf);
+ } else {
+ GST_WARNING_OBJECT (rtpj2kdepay, "empty packet");
+ gst_adapter_clear (rtpj2kdepay->f_adapter);
+ }
+
+ /* we accept any mh_id now */
+ rtpj2kdepay->last_mh_id = -1;
+
+ /* reset state */
+ rtpj2kdepay->next_frag = 0;
+ rtpj2kdepay->have_sync = FALSE;
+
+done:
+ /* we can't keep headers with mh_id of 0 */
+ store_mheader (rtpj2kdepay, 0, NULL);
+
+ return ret;
+}
+
+static GstBuffer *
+gst_rtp_j2k_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpJ2KDepay *rtpj2kdepay;
+ guint8 *payload;
+ guint MHF, mh_id, frag_offset, tile, payload_len, j2klen;
+ gint gap;
+ guint32 rtptime;
+
+ rtpj2kdepay = GST_RTP_J2K_DEPAY (depayload);
+
+ payload = gst_rtp_buffer_get_payload (rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+
+ /* we need at least a header */
+ if (payload_len < GST_RTP_J2K_HEADER_SIZE)
+ goto empty_packet;
+
+ rtptime = gst_rtp_buffer_get_timestamp (rtp);
+
+ /* new timestamp marks new frame */
+ if (rtpj2kdepay->last_rtptime != rtptime) {
+ rtpj2kdepay->last_rtptime = rtptime;
+ /* flush pending frame */
+ gst_rtp_j2k_depay_flush_frame (depayload);
+ }
+
+ /*
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |tp |MHF|mh_id|T| priority | tile number |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |reserved | fragment offset |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ MHF = (payload[0] & 0x30) >> 4;
+ mh_id = (payload[0] & 0xe) >> 1;
+
+ if (rtpj2kdepay->last_mh_id == -1)
+ rtpj2kdepay->last_mh_id = mh_id;
+ else if (rtpj2kdepay->last_mh_id != mh_id)
+ goto wrong_mh_id;
+
+ tile = (payload[2] << 8) | payload[3];
+ frag_offset = (payload[5] << 16) | (payload[6] << 8) | payload[7];
+ j2klen = payload_len - GST_RTP_J2K_HEADER_SIZE;
+
+ GST_DEBUG_OBJECT (rtpj2kdepay, "MHF %u, tile %u, frag %u, expected %u", MHF,
+ tile, frag_offset, rtpj2kdepay->next_frag);
+
+ /* calculate the gap between expected frag */
+ gap = frag_offset - rtpj2kdepay->next_frag;
+ /* calculate next frag */
+ rtpj2kdepay->next_frag = frag_offset + j2klen;
+
+ if (gap != 0) {
+ GST_DEBUG_OBJECT (rtpj2kdepay, "discont of %d, clear PU", gap);
+ /* discont, clear pu adapter and resync */
+ gst_rtp_j2k_depay_clear_pu (rtpj2kdepay);
+ }
+
+ /* check for sync code */
+ if (j2klen > 2 && payload[GST_RTP_J2K_HEADER_SIZE] == GST_J2K_MARKER) {
+ guint marker = payload[GST_RTP_J2K_HEADER_SIZE + 1];
+
+ /* packets must start with SOC, SOT or SOP */
+ switch (marker) {
+ case GST_J2K_MARKER_SOC:
+ GST_DEBUG_OBJECT (rtpj2kdepay, "found SOC packet");
+ /* flush the previous frame, should have happened when the timestamp
+ * changed above. */
+ gst_rtp_j2k_depay_flush_frame (depayload);
+ rtpj2kdepay->have_sync = TRUE;
+ break;
+ case GST_J2K_MARKER_SOT:
+ /* flush the previous tile */
+ gst_rtp_j2k_depay_flush_tile (depayload);
+ GST_DEBUG_OBJECT (rtpj2kdepay, "found SOT packet");
+ rtpj2kdepay->have_sync = TRUE;
+ /* we sync on the tile now */
+ rtpj2kdepay->last_tile = tile;
+ break;
+ case GST_J2K_MARKER_SOP:
+ GST_DEBUG_OBJECT (rtpj2kdepay, "found SOP packet");
+ /* flush the previous PU */
+ gst_rtp_j2k_depay_flush_pu (depayload);
+ if (rtpj2kdepay->last_tile != tile) {
+ /* wrong tile, we lose sync and we need a new SOT or SOC to regain
+ * sync. First flush out the previous tile if we have one. */
+ if (rtpj2kdepay->last_tile != -1)
+ gst_rtp_j2k_depay_flush_tile (depayload);
+ /* now we have no more valid tile and no sync */
+ rtpj2kdepay->last_tile = -1;
+ rtpj2kdepay->have_sync = FALSE;
+ } else {
+ rtpj2kdepay->have_sync = TRUE;
+ }
+ break;
+ default:
+ GST_DEBUG_OBJECT (rtpj2kdepay, "no sync packet 0x%02d", marker);
+ break;
+ }
+ }
+
+ if (rtpj2kdepay->have_sync) {
+ GstBuffer *pu_frag;
+
+ if (gst_adapter_available (rtpj2kdepay->pu_adapter) == 0) {
+ /* first part of pu, record state */
+ GST_DEBUG_OBJECT (rtpj2kdepay, "first PU");
+ rtpj2kdepay->pu_MHF = MHF;
+ }
+ /* and push in pu adapter */
+ GST_DEBUG_OBJECT (rtpj2kdepay, "push pu of size %u in adapter", j2klen);
+ pu_frag = gst_rtp_buffer_get_payload_subbuffer (rtp, 8, -1);
+ gst_adapter_push (rtpj2kdepay->pu_adapter, pu_frag);
+
+ if (MHF & 2) {
+ /* last part of main header received, we can flush it */
+ GST_DEBUG_OBJECT (rtpj2kdepay, "header end, flush pu");
+ gst_rtp_j2k_depay_flush_pu (depayload);
+ }
+ } else {
+ GST_DEBUG_OBJECT (rtpj2kdepay, "discard packet, no sync");
+ }
+
+ /* marker bit finishes the frame */
+ if (gst_rtp_buffer_get_marker (rtp)) {
+ GST_DEBUG_OBJECT (rtpj2kdepay, "marker set, last buffer");
+ /* then flush frame */
+ gst_rtp_j2k_depay_flush_frame (depayload);
+ }
+
+ return NULL;
+
+ /* ERRORS */
+empty_packet:
+ {
+ GST_ELEMENT_WARNING (rtpj2kdepay, STREAM, DECODE,
+ ("Empty Payload."), (NULL));
+ return NULL;
+ }
+wrong_mh_id:
+ {
+ GST_ELEMENT_WARNING (rtpj2kdepay, STREAM, DECODE,
+ ("Invalid mh_id %u, expected %u", mh_id, rtpj2kdepay->last_mh_id),
+ (NULL));
+ gst_rtp_j2k_depay_clear_pu (rtpj2kdepay);
+ return NULL;
+ }
+}
+
+static void
+gst_rtp_j2k_depay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ switch (prop_id) {
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_j2k_depay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ switch (prop_id) {
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_j2k_depay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstRtpJ2KDepay *rtpj2kdepay;
+ GstStateChangeReturn ret;
+
+ rtpj2kdepay = GST_RTP_J2K_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_rtp_j2k_depay_reset (rtpj2kdepay);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_j2k_depay_reset (rtpj2kdepay);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtpj2kdepay.h b/gst/rtp/gstrtpj2kdepay.h
new file mode 100644
index 0000000000..5f499393d7
--- /dev/null
+++ b/gst/rtp/gstrtpj2kdepay.h
@@ -0,0 +1,71 @@
+/* GStreamer
+ * Copyright (C) <2009> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_J2K_DEPAY_H__
+#define __GST_RTP_J2K_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_RTP_J2K_DEPAY \
+ (gst_rtp_j2k_depay_get_type())
+#define GST_RTP_J2K_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_J2K_DEPAY,GstRtpJ2KDepay))
+#define GST_RTP_J2K_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_J2K_DEPAY,GstRtpJ2KDepayClass))
+#define GST_IS_RTP_J2K_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_J2K_DEPAY))
+#define GST_IS_RTP_J2K_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_J2K_DEPAY))
+typedef struct _GstRtpJ2KDepay GstRtpJ2KDepay;
+typedef struct _GstRtpJ2KDepayClass GstRtpJ2KDepayClass;
+
+struct _GstRtpJ2KDepay
+{
+ GstRTPBaseDepayload depayload;
+
+ guint64 last_rtptime;
+ guint last_mh_id;
+ guint last_tile;
+
+ GstBuffer *MH[8];
+
+ guint pu_MHF;
+ GstAdapter *pu_adapter;
+ GstAdapter *t_adapter;
+ GstAdapter *f_adapter;
+
+ guint next_frag;
+ gboolean have_sync;
+
+ gint width, height;
+};
+
+struct _GstRtpJ2KDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_j2k_depay_get_type (void);
+
+
+G_END_DECLS
+#endif /* __GST_RTP_J2K_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpj2kpay.c b/gst/rtp/gstrtpj2kpay.c
new file mode 100644
index 0000000000..1fb35164cd
--- /dev/null
+++ b/gst/rtp/gstrtpj2kpay.c
@@ -0,0 +1,567 @@
+/* GStreamer
+ * Copyright (C) 2009 Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:element-rtpj2kpay
+ * @title: rtpj2kpay
+ *
+ * Payload encode JPEG 2000 images into RTP packets according to RFC 5371
+ * and RFC 5372.
+ * For detailed information see: https://datatracker.ietf.org/doc/rfc5371/
+ * and https://datatracker.ietf.org/doc/rfc5372/
+ *
+ * The payloader takes a JPEG 2000 image, scans it for "packetization
+ * units" and constructs the RTP packet header followed by the JPEG 2000
+ * codestream. A "packetization unit" is defined as either a JPEG 2000 main header,
+ * a JPEG 2000 tile-part header, or a JPEG 2000 packet.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+#include "gstrtpelements.h"
+#include "gstrtpj2kcommon.h"
+#include "gstrtpj2kpay.h"
+#include "gstrtputils.h"
+
+static GstStaticPadTemplate gst_rtp_j2k_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("image/x-jpc, " GST_RTP_J2K_SAMPLING_LIST)
+ );
+
+
+static GstStaticPadTemplate gst_rtp_j2k_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ " media = (string) \"video\", "
+ " payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ " clock-rate = (int) 90000, "
+ GST_RTP_J2K_SAMPLING_LIST "," " encoding-name = (string) \"JPEG2000\"")
+ );
+
+GST_DEBUG_CATEGORY_STATIC (rtpj2kpay_debug);
+#define GST_CAT_DEFAULT (rtpj2kpay_debug)
+
+
+enum
+{
+ PROP_0,
+ PROP_LAST
+};
+
+typedef struct
+{
+ guint tp:2;
+ guint MHF:2;
+ guint mh_id:3;
+ guint T:1;
+ guint priority:8;
+ guint tile:16;
+ guint offset:24;
+} RtpJ2KHeader;
+
+static void gst_rtp_j2k_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtp_j2k_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_rtp_j2k_pay_setcaps (GstRTPBasePayload * basepayload,
+ GstCaps * caps);
+
+static GstFlowReturn gst_rtp_j2k_pay_handle_buffer (GstRTPBasePayload * pad,
+ GstBuffer * buffer);
+
+#define gst_rtp_j2k_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpJ2KPay, gst_rtp_j2k_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpj2kpay, "rtpj2kpay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_J2K_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_j2k_pay_class_init (GstRtpJ2KPayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gobject_class->set_property = gst_rtp_j2k_pay_set_property;
+ gobject_class->get_property = gst_rtp_j2k_pay_get_property;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_j2k_pay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_j2k_pay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP JPEG 2000 payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encodes JPEG 2000 pictures into RTP packets (RFC 5371)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_j2k_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_j2k_pay_handle_buffer;
+
+ GST_DEBUG_CATEGORY_INIT (rtpj2kpay_debug, "rtpj2kpay", 0,
+ "JPEG 2000 RTP Payloader");
+}
+
+static void
+gst_rtp_j2k_pay_init (GstRtpJ2KPay * pay)
+{
+}
+
+static gboolean
+gst_rtp_j2k_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
+{
+ GstStructure *caps_structure = gst_caps_get_structure (caps, 0);
+ gboolean res;
+ gint width = 0, height = 0;
+ const gchar *sampling = NULL;
+
+ gboolean has_width = gst_structure_get_int (caps_structure, "width", &width);
+ gboolean has_height =
+ gst_structure_get_int (caps_structure, "height", &height);
+
+
+ /* sampling is a required field */
+ sampling = gst_structure_get_string (caps_structure, "sampling");
+
+ gst_rtp_base_payload_set_options (basepayload, "video", TRUE, "JPEG2000",
+ 90000);
+
+ if (has_width && has_height)
+ res = gst_rtp_base_payload_set_outcaps (basepayload,
+ "sampling", G_TYPE_STRING, sampling, "width", G_TYPE_INT, width,
+ "height", G_TYPE_INT, height, NULL);
+ else
+ res =
+ gst_rtp_base_payload_set_outcaps (basepayload, "sampling",
+ G_TYPE_STRING, sampling, NULL);
+ return res;
+}
+
+
+static guint
+gst_rtp_j2k_pay_header_size (const guint8 * data, guint offset)
+{
+ return data[offset] << 8 | data[offset + 1];
+}
+
+
+static GstRtpJ2KMarker
+gst_rtp_j2k_pay_scan_marker (const guint8 * data, guint size, guint * offset)
+{
+ while ((data[(*offset)++] != GST_J2K_MARKER) && ((*offset) < size));
+
+ if (G_UNLIKELY ((*offset) >= size)) {
+ return GST_J2K_MARKER_EOC;
+ } else {
+ guint8 marker = data[(*offset)++];
+ return (GstRtpJ2KMarker) marker;
+ }
+}
+
+typedef struct
+{
+ RtpJ2KHeader header;
+ gboolean multi_tile;
+ gboolean bitstream;
+ guint next_sot;
+ gboolean force_packet;
+} RtpJ2KState;
+
+
+/* Note: The standard recommends that headers be put in their own RTP packets, so we follow
+ * this recommendation in the code. Also, this method groups together all J2K packets
+ * for a tile part and treats this group as a packetization unit. According to the RFC,
+ * only an individual J2K packet is considered a packetization unit.
+ */
+
+static guint
+find_pu_end (GstRtpJ2KPay * pay, const guint8 * data, guint size,
+ guint offset, RtpJ2KState * state)
+{
+ gboolean cut_sop = FALSE;
+ GstRtpJ2KMarker marker;
+
+ /* parse the j2k header for 'start of codestream' */
+ GST_LOG_OBJECT (pay, "checking from offset %u", offset);
+ while (offset < size) {
+ marker = gst_rtp_j2k_pay_scan_marker (data, size, &offset);
+
+ if (state->bitstream) {
+ /* parsing bitstream, only look for SOP */
+ switch (marker) {
+ case GST_J2K_MARKER_SOP:
+ GST_LOG_OBJECT (pay, "found SOP at %u", offset);
+ if (cut_sop)
+ return offset - 2;
+ cut_sop = TRUE;
+ break;
+ case GST_J2K_MARKER_EPH:
+ /* just skip over EPH */
+ GST_LOG_OBJECT (pay, "found EPH at %u", offset);
+ break;
+ default:
+ if (offset >= state->next_sot) {
+ GST_LOG_OBJECT (pay, "reached next SOT at %u", offset);
+ state->bitstream = FALSE;
+ state->force_packet = TRUE;
+ if (marker == GST_J2K_MARKER_EOC && state->next_sot + 2 <= size)
+ /* include EOC but never go past the max size */
+ return state->next_sot + 2;
+ else
+ return state->next_sot;
+ }
+ break;
+ }
+ } else {
+ switch (marker) {
+ case GST_J2K_MARKER_SOC:
+ GST_LOG_OBJECT (pay, "found SOC at %u", offset);
+ /* start off by assuming that we will fit the entire header
+ into the RTP payload */
+ state->header.MHF = 3;
+ break;
+ case GST_J2K_MARKER_SOT:
+ {
+ guint len, Psot, tile;
+
+ GST_LOG_OBJECT (pay, "found SOT at %u", offset);
+ /* SOT for first tile part in code stream:
+ force close of current RTP packet, so that it
+ only contains main header */
+ if (state->header.MHF) {
+ state->force_packet = TRUE;
+ return offset - 2;
+ }
+
+ /* parse SOT but do some sanity checks first */
+ len = gst_rtp_j2k_pay_header_size (data, offset);
+ GST_LOG_OBJECT (pay, "SOT length %u", len);
+ if (len < 8)
+ return size;
+ if (offset + len >= size)
+ return size;
+
+ /* Isot */
+ tile = GST_READ_UINT16_BE (&data[offset + 2]);
+
+ if (!state->multi_tile) {
+ /* we have detected multiple tiles in this rtp packet : tile bit is now invalid */
+ if (state->header.T == 0 && state->header.tile != tile) {
+ state->header.T = 1;
+ state->multi_tile = TRUE;
+ } else {
+ state->header.T = 0;
+ }
+ }
+ state->header.tile = tile;
+
+ /* Note: Tile parts from multiple tiles in single RTP packet
+ will make T invalid.
+ This cannot happen in our case since we always
+ send tile headers in their own RTP packets, so we cannot mix
+ tile parts in a single RTP packet */
+
+ /* Psot: offset of next tile. If it's 0, next tile goes all the way
+ to the end of the data */
+ Psot = GST_READ_UINT32_BE (&data[offset + 4]);
+ if (Psot == 0)
+ state->next_sot = size;
+ else
+ state->next_sot = offset - 2 + Psot;
+
+ offset += len;
+ GST_LOG_OBJECT (pay, "Isot %u, Psot %u, next %u", state->header.tile,
+ Psot, state->next_sot);
+ break;
+ }
+ case GST_J2K_MARKER_SOD:
+ GST_LOG_OBJECT (pay, "found SOD at %u", offset);
+ /* go to bitstream parsing */
+ state->bitstream = TRUE;
+ /* cut at the next SOP or else include all data */
+ cut_sop = TRUE;
+ /* force a new packet when we see SOP, this can be optional but the
+ * spec recommends packing headers separately */
+ state->force_packet = TRUE;
+ break;
+ case GST_J2K_MARKER_EOC:
+ GST_LOG_OBJECT (pay, "found EOC at %u", offset);
+ return offset;
+ default:
+ {
+ guint len = gst_rtp_j2k_pay_header_size (data, offset);
+ GST_LOG_OBJECT (pay, "skip 0x%02x len %u", marker, len);
+ offset += len;
+ break;
+ }
+ }
+ }
+ }
+ GST_DEBUG_OBJECT (pay, "reached end of data");
+ return size;
+}
+
+static GstFlowReturn
+gst_rtp_j2k_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRtpJ2KPay *pay;
+ GstClockTime timestamp;
+ GstFlowReturn ret = GST_FLOW_ERROR;
+ RtpJ2KState state;
+ GstBufferList *list = NULL;
+ GstMapInfo map;
+ guint mtu, max_size;
+ guint offset;
+ guint end, pos;
+
+ pay = GST_RTP_J2K_PAY (basepayload);
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (pay);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ timestamp = GST_BUFFER_PTS (buffer);
+ offset = pos = end = 0;
+
+ GST_LOG_OBJECT (pay,
+ "got buffer size %" G_GSIZE_FORMAT ", timestamp %" GST_TIME_FORMAT,
+ map.size, GST_TIME_ARGS (timestamp));
+
+ /* do some header defaults first */
+ state.header.tp = 0; /* only progressive scan */
+ state.header.MHF = 0; /* no header */
+ state.header.mh_id = 0; /* always 0 for now */
+ state.header.T = 1; /* invalid tile, because we always begin with the main header */
+ state.header.priority = 255; /* always 255 for now */
+ state.header.tile = 0xffff; /* no tile number */
+ state.header.offset = 0; /* offset of 0 */
+ state.multi_tile = FALSE;
+ state.bitstream = FALSE;
+ state.next_sot = 0;
+ state.force_packet = FALSE;
+
+ /* get max packet length */
+ max_size =
+ gst_rtp_buffer_calc_payload_len (mtu - GST_RTP_J2K_HEADER_SIZE, 0, 0);
+
+ list = gst_buffer_list_new_sized ((mtu / max_size) + 1);
+
+ do {
+ GstBuffer *outbuf;
+ guint8 *header;
+ guint payload_size;
+ guint pu_size;
+ GstRTPBuffer rtp = { NULL };
+
+ /* try to pack as much as we can */
+ do {
+ /* see how much we have scanned already */
+ pu_size = end - offset;
+ GST_DEBUG_OBJECT (pay, "scanned pu size %u", pu_size);
+
+ /* we need to make a new packet */
+ if (state.force_packet) {
+ GST_DEBUG_OBJECT (pay, "need to force a new packet");
+ state.force_packet = FALSE;
+ pos = end;
+ break;
+ }
+
+ /* else see if we have enough */
+ if (pu_size > max_size) {
+ if (pos != offset)
+ /* the packet became too large, use previous scanpos */
+ pu_size = pos - offset;
+ else
+ /* the already scanned data was already too big, make sure we start
+ * scanning from the last searched position */
+ pos = end;
+
+ GST_DEBUG_OBJECT (pay, "max size exceeded pu_size %u", pu_size);
+ break;
+ }
+
+ pos = end;
+
+ /* exit when finished */
+ if (pos == map.size)
+ break;
+
+ /* scan next packetization unit and fill in the header */
+ end = find_pu_end (pay, map.data, map.size, pos, &state);
+ } while (TRUE);
+
+ while (pu_size > 0) {
+ guint packet_size, data_size;
+ GstBuffer *paybuf;
+
+ /* calculate the packet size */
+ packet_size =
+ gst_rtp_buffer_calc_packet_len (pu_size + GST_RTP_J2K_HEADER_SIZE, 0,
+ 0);
+
+ if (packet_size > mtu) {
+ GST_DEBUG_OBJECT (pay, "needed packet size %u clamped to MTU %u",
+ packet_size, mtu);
+ packet_size = mtu;
+ } else {
+ GST_DEBUG_OBJECT (pay, "needed packet size %u fits in MTU %u",
+ packet_size, mtu);
+ }
+
+ /* get total payload size and data size */
+ payload_size = gst_rtp_buffer_calc_payload_len (packet_size, 0, 0);
+ data_size = payload_size - GST_RTP_J2K_HEADER_SIZE;
+
+ /* make buffer for header */
+ outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (basepayload,
+ GST_RTP_J2K_HEADER_SIZE, 0, 0);
+
+ GST_BUFFER_PTS (outbuf) = timestamp;
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
+ /* get pointer to header */
+ header = gst_rtp_buffer_get_payload (&rtp);
+
+ pu_size -= data_size;
+
+ /* reached the end of a packetization unit */
+ if (pu_size == 0 && end >= map.size) {
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
+ }
+ /* If we were processing a header, see if all fits in one RTP packet
+ or if we have to fragment it */
+ if (state.header.MHF) {
+ switch (state.header.MHF) {
+ case 3:
+ if (pu_size > 0)
+ state.header.MHF = 1;
+ break;
+ case 1:
+ if (pu_size == 0)
+ state.header.MHF = 2;
+ break;
+ default:
+ break;
+ }
+ }
+
+ /*
+ * RtpJ2KHeader:
+ * @tp: type (0 progressive, 1 odd field, 2 even field)
+ * @MHF: Main Header Flag
+ * @mh_id: Main Header Identification
+ * @T: Tile field invalidation flag
+ * @priority: priority
+ * @tile number: the tile number of the payload
+ * @reserved: set to 0
+ * @fragment offset: the byte offset of the current payload
+ *
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |tp |MHF|mh_id|T| priority | tile number |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |reserved | fragment offset |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ header[0] = (state.header.tp << 6) | (state.header.MHF << 4) |
+ (state.header.mh_id << 1) | state.header.T;
+ header[1] = state.header.priority;
+ header[2] = state.header.tile >> 8;
+ header[3] = state.header.tile & 0xff;
+ header[4] = 0;
+ header[5] = state.header.offset >> 16;
+ header[6] = (state.header.offset >> 8) & 0xff;
+ header[7] = state.header.offset & 0xff;
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ /* make subbuffer of j2k data */
+ paybuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL,
+ offset, data_size);
+ gst_rtp_copy_video_meta (basepayload, outbuf, paybuf);
+ outbuf = gst_buffer_append (outbuf, paybuf);
+
+ gst_buffer_list_add (list, outbuf);
+
+ /* reset multi_tile */
+ state.multi_tile = FALSE;
+
+
+ /* set MHF to zero if there is no more main header to process */
+ if (state.header.MHF & 2)
+ state.header.MHF = 0;
+
+ /* tile is valid, if there is no more header to process */
+ if (!state.header.MHF)
+ state.header.T = 0;
+
+
+ offset += data_size;
+ state.header.offset = offset;
+ }
+ offset = pos;
+ } while (offset < map.size);
+
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+
+ /* push the whole buffer list at once */
+ ret = gst_rtp_base_payload_push_list (basepayload, list);
+
+ return ret;
+}
+
+static void
+gst_rtp_j2k_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ switch (prop_id) {
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_j2k_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ switch (prop_id) {
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/rtp/gstrtpj2kpay.h b/gst/rtp/gstrtpj2kpay.h
new file mode 100644
index 0000000000..e5474938bf
--- /dev/null
+++ b/gst/rtp/gstrtpj2kpay.h
@@ -0,0 +1,56 @@
+/* GStreamer
+ * Copyright (C) 2009 Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_J2K_PAY_H__
+#define __GST_RTP_J2K_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_RTP_J2K_PAY \
+ (gst_rtp_j2k_pay_get_type())
+#define GST_RTP_J2K_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_J2K_PAY,GstRtpJ2KPay))
+#define GST_RTP_J2K_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_J2K_PAY,GstRtpJ2KPayClass))
+#define GST_IS_RTP_J2K_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_J2K_PAY))
+#define GST_IS_RTP_J2K_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_J2K_PAY))
+typedef struct _GstRtpJ2KPay GstRtpJ2KPay;
+typedef struct _GstRtpJ2KPayClass GstRtpJ2KPayClass;
+
+struct _GstRtpJ2KPay
+{
+ GstRTPBasePayload payload;
+
+ gint height;
+ gint width;
+};
+
+struct _GstRtpJ2KPayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_j2k_pay_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_RTP_J2K_PAY_H__ */
diff --git a/gst/rtp/gstrtpjpegdepay.c b/gst/rtp/gstrtpjpegdepay.c
new file mode 100644
index 0000000000..02209d53b7
--- /dev/null
+++ b/gst/rtp/gstrtpjpegdepay.c
@@ -0,0 +1,795 @@
+/* GStreamer
+ * Copyright (C) <2008> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+
+#include <math.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include "gstrtpelements.h"
+#include "gstrtpjpegdepay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpjpegdepay_debug);
+#define GST_CAT_DEFAULT (rtpjpegdepay_debug)
+
+static GstStaticPadTemplate gst_rtp_jpeg_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("image/jpeg")
+ );
+
+static GstStaticPadTemplate gst_rtp_jpeg_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"JPEG\"; "
+ /* optional SDP attributes */
+ /*
+ * "a-framerate = (string) 0.00, "
+ * "x-framerate = (string) 0.00, "
+ * "x-dimensions = (string) \"1234,1234\", "
+ */
+ "application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_JPEG_STRING ", "
+ "clock-rate = (int) 90000"
+ /* optional SDP attributes */
+ /*
+ * "a-framerate = (string) 0.00, "
+ * "x-framerate = (string) 0.00, "
+ * "x-dimensions = (string) \"1234,1234\""
+ */
+ )
+ );
+
+#define gst_rtp_jpeg_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpJPEGDepay, gst_rtp_jpeg_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpjpegdepay, "rtpjpegdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_JPEG_DEPAY, rtp_element_init (plugin));
+
+static void gst_rtp_jpeg_depay_finalize (GObject * object);
+
+static GstStateChangeReturn gst_rtp_jpeg_depay_change_state (GstElement *
+ element, GstStateChange transition);
+
+static gboolean gst_rtp_jpeg_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_jpeg_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+
+static void
+gst_rtp_jpeg_depay_class_init (GstRtpJPEGDepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_jpeg_depay_finalize;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_jpeg_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_jpeg_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP JPEG depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts JPEG video from RTP packets (RFC 2435)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstelement_class->change_state = gst_rtp_jpeg_depay_change_state;
+
+ gstrtpbasedepayload_class->set_caps = gst_rtp_jpeg_depay_setcaps;
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_jpeg_depay_process;
+
+ GST_DEBUG_CATEGORY_INIT (rtpjpegdepay_debug, "rtpjpegdepay", 0,
+ "JPEG Video RTP Depayloader");
+}
+
+static void
+gst_rtp_jpeg_depay_init (GstRtpJPEGDepay * rtpjpegdepay)
+{
+ rtpjpegdepay->adapter = gst_adapter_new ();
+}
+
+static void
+gst_rtp_jpeg_depay_reset (GstRtpJPEGDepay * depay)
+{
+ gint i;
+
+ depay->width = 0;
+ depay->height = 0;
+ depay->media_width = 0;
+ depay->media_height = 0;
+ depay->frate_num = 0;
+ depay->frate_denom = 1;
+ depay->discont = TRUE;
+
+ for (i = 0; i < 255; i++) {
+ g_free (depay->qtables[i]);
+ depay->qtables[i] = NULL;
+ }
+
+ gst_adapter_clear (depay->adapter);
+}
+
+static void
+gst_rtp_jpeg_depay_finalize (GObject * object)
+{
+ GstRtpJPEGDepay *rtpjpegdepay;
+
+ rtpjpegdepay = GST_RTP_JPEG_DEPAY (object);
+
+ gst_rtp_jpeg_depay_reset (rtpjpegdepay);
+
+ g_object_unref (rtpjpegdepay->adapter);
+ rtpjpegdepay->adapter = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static const int zigzag[] = {
+ 0, 1, 8, 16, 9, 2, 3, 10,
+ 17, 24, 32, 25, 18, 11, 4, 5,
+ 12, 19, 26, 33, 40, 48, 41, 34,
+ 27, 20, 13, 6, 7, 14, 21, 28,
+ 35, 42, 49, 56, 57, 50, 43, 36,
+ 29, 22, 15, 23, 30, 37, 44, 51,
+ 58, 59, 52, 45, 38, 31, 39, 46,
+ 53, 60, 61, 54, 47, 55, 62, 63
+};
+
+/*
+ * Table K.1 from JPEG spec.
+ */
+static const int jpeg_luma_quantizer[64] = {
+ 16, 11, 10, 16, 24, 40, 51, 61,
+ 12, 12, 14, 19, 26, 58, 60, 55,
+ 14, 13, 16, 24, 40, 57, 69, 56,
+ 14, 17, 22, 29, 51, 87, 80, 62,
+ 18, 22, 37, 56, 68, 109, 103, 77,
+ 24, 35, 55, 64, 81, 104, 113, 92,
+ 49, 64, 78, 87, 103, 121, 120, 101,
+ 72, 92, 95, 98, 112, 100, 103, 99
+};
+
+/*
+ * Table K.2 from JPEG spec.
+ */
+static const int jpeg_chroma_quantizer[64] = {
+ 17, 18, 24, 47, 99, 99, 99, 99,
+ 18, 21, 26, 66, 99, 99, 99, 99,
+ 24, 26, 56, 99, 99, 99, 99, 99,
+ 47, 66, 99, 99, 99, 99, 99, 99,
+ 99, 99, 99, 99, 99, 99, 99, 99,
+ 99, 99, 99, 99, 99, 99, 99, 99,
+ 99, 99, 99, 99, 99, 99, 99, 99,
+ 99, 99, 99, 99, 99, 99, 99, 99
+};
+
+/* Call MakeTables with the Q factor and a guint8[128] return array
+ */
+static void
+MakeTables (GstRtpJPEGDepay * rtpjpegdepay, gint Q, guint8 qtable[128])
+{
+ gint i;
+ guint factor;
+
+ factor = CLAMP (Q, 1, 99);
+
+ if (Q < 50)
+ Q = 5000 / factor;
+ else
+ Q = 200 - factor * 2;
+
+ for (i = 0; i < 64; i++) {
+ gint lq = (jpeg_luma_quantizer[zigzag[i]] * Q + 50) / 100;
+ gint cq = (jpeg_chroma_quantizer[zigzag[i]] * Q + 50) / 100;
+
+ /* Limit the quantizers to 1 <= q <= 255 */
+ qtable[i] = CLAMP (lq, 1, 255);
+ qtable[i + 64] = CLAMP (cq, 1, 255);
+ }
+}
+
+static const guint8 lum_dc_codelens[] = {
+ 0, 1, 5, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0
+};
+
+static const guint8 lum_dc_symbols[] = {
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11
+};
+
+static const guint8 lum_ac_codelens[] = {
+ 0, 2, 1, 3, 3, 2, 4, 3, 5, 5, 4, 4, 0, 0, 1, 0x7d
+};
+
+static const guint8 lum_ac_symbols[] = {
+ 0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12,
+ 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07,
+ 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xa1, 0x08,
+ 0x23, 0x42, 0xb1, 0xc1, 0x15, 0x52, 0xd1, 0xf0,
+ 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0a, 0x16,
+ 0x17, 0x18, 0x19, 0x1a, 0x25, 0x26, 0x27, 0x28,
+ 0x29, 0x2a, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39,
+ 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49,
+ 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59,
+ 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69,
+ 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79,
+ 0x7a, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89,
+ 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98,
+ 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7,
+ 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6,
+ 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3, 0xc4, 0xc5,
+ 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2, 0xd3, 0xd4,
+ 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xe1, 0xe2,
+ 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea,
+ 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
+ 0xf9, 0xfa
+};
+
+static const guint8 chm_dc_codelens[] = {
+ 0, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0
+};
+
+static const guint8 chm_dc_symbols[] = {
+ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11
+};
+
+static const guint8 chm_ac_codelens[] = {
+ 0, 2, 1, 2, 4, 4, 3, 4, 7, 5, 4, 4, 0, 1, 2, 0x77
+};
+
+static const guint8 chm_ac_symbols[] = {
+ 0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21,
+ 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71,
+ 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91,
+ 0xa1, 0xb1, 0xc1, 0x09, 0x23, 0x33, 0x52, 0xf0,
+ 0x15, 0x62, 0x72, 0xd1, 0x0a, 0x16, 0x24, 0x34,
+ 0xe1, 0x25, 0xf1, 0x17, 0x18, 0x19, 0x1a, 0x26,
+ 0x27, 0x28, 0x29, 0x2a, 0x35, 0x36, 0x37, 0x38,
+ 0x39, 0x3a, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48,
+ 0x49, 0x4a, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58,
+ 0x59, 0x5a, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68,
+ 0x69, 0x6a, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78,
+ 0x79, 0x7a, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87,
+ 0x88, 0x89, 0x8a, 0x92, 0x93, 0x94, 0x95, 0x96,
+ 0x97, 0x98, 0x99, 0x9a, 0xa2, 0xa3, 0xa4, 0xa5,
+ 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xb2, 0xb3, 0xb4,
+ 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xc2, 0xc3,
+ 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xd2,
+ 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda,
+ 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9,
+ 0xea, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8,
+ 0xf9, 0xfa
+};
+
+static guint8 *
+MakeQuantHeader (guint8 * p, guint8 * qt, gint size, gint tableNo)
+{
+ *p++ = 0xff;
+ *p++ = 0xdb; /* DQT */
+ *p++ = 0; /* length msb */
+ *p++ = size + 3; /* length lsb */
+ *p++ = tableNo;
+ memcpy (p, qt, size);
+
+ return (p + size);
+}
+
+static guint8 *
+MakeHuffmanHeader (guint8 * p, const guint8 * codelens, int ncodes,
+ const guint8 * symbols, int nsymbols, int tableNo, int tableClass)
+{
+ *p++ = 0xff;
+ *p++ = 0xc4; /* DHT */
+ *p++ = 0; /* length msb */
+ *p++ = 3 + ncodes + nsymbols; /* length lsb */
+ *p++ = (tableClass << 4) | tableNo;
+ memcpy (p, codelens, ncodes);
+ p += ncodes;
+ memcpy (p, symbols, nsymbols);
+ p += nsymbols;
+
+ return (p);
+}
+
+static guint8 *
+MakeDRIHeader (guint8 * p, guint16 dri)
+{
+ *p++ = 0xff;
+ *p++ = 0xdd; /* DRI */
+ *p++ = 0x0; /* length msb */
+ *p++ = 4; /* length lsb */
+ *p++ = dri >> 8; /* dri msb */
+ *p++ = dri & 0xff; /* dri lsb */
+
+ return (p);
+}
+
+/*
+ * Arguments:
+ * type, width, height: as supplied in RTP/JPEG header
+ * qt: quantization tables as either derived from
+ * the Q field using MakeTables() or as specified
+ * in section 4.2.
+ * dri: restart interval in MCUs, or 0 if no restarts.
+ *
+ * p: pointer to return area
+ *
+ * Return value:
+ * The length of the generated headers.
+ *
+ * Generate a frame and scan headers that can be prepended to the
+ * RTP/JPEG data payload to produce a JPEG compressed image in
+ * interchange format (except for possible trailing garbage and
+ * absence of an EOI marker to terminate the scan).
+ */
+static guint
+MakeHeaders (guint8 * p, int type, int width, int height, guint8 * qt,
+ guint precision, guint16 dri)
+{
+ guint8 *start = p;
+ gint size;
+
+ *p++ = 0xff;
+ *p++ = 0xd8; /* SOI */
+
+ size = ((precision & 1) ? 128 : 64);
+ p = MakeQuantHeader (p, qt, size, 0);
+ qt += size;
+
+ size = ((precision & 2) ? 128 : 64);
+ p = MakeQuantHeader (p, qt, size, 1);
+ qt += size;
+
+ if (dri != 0)
+ p = MakeDRIHeader (p, dri);
+
+ *p++ = 0xff;
+ *p++ = 0xc0; /* SOF */
+ *p++ = 0; /* length msb */
+ *p++ = 17; /* length lsb */
+ *p++ = 8; /* 8-bit precision */
+ *p++ = height >> 8; /* height msb */
+ *p++ = height; /* height lsb */
+ *p++ = width >> 8; /* width msb */
+ *p++ = width; /* width lsb */
+ *p++ = 3; /* number of components */
+ *p++ = 0; /* comp 0 */
+ if ((type & 0x3f) == 0)
+ *p++ = 0x21; /* hsamp = 2, vsamp = 1 */
+ else
+ *p++ = 0x22; /* hsamp = 2, vsamp = 2 */
+ *p++ = 0; /* quant table 0 */
+ *p++ = 1; /* comp 1 */
+ *p++ = 0x11; /* hsamp = 1, vsamp = 1 */
+ *p++ = 1; /* quant table 1 */
+ *p++ = 2; /* comp 2 */
+ *p++ = 0x11; /* hsamp = 1, vsamp = 1 */
+ *p++ = 1; /* quant table 1 */
+
+ p = MakeHuffmanHeader (p, lum_dc_codelens,
+ sizeof (lum_dc_codelens), lum_dc_symbols, sizeof (lum_dc_symbols), 0, 0);
+ p = MakeHuffmanHeader (p, lum_ac_codelens,
+ sizeof (lum_ac_codelens), lum_ac_symbols, sizeof (lum_ac_symbols), 0, 1);
+ p = MakeHuffmanHeader (p, chm_dc_codelens,
+ sizeof (chm_dc_codelens), chm_dc_symbols, sizeof (chm_dc_symbols), 1, 0);
+ p = MakeHuffmanHeader (p, chm_ac_codelens,
+ sizeof (chm_ac_codelens), chm_ac_symbols, sizeof (chm_ac_symbols), 1, 1);
+
+ *p++ = 0xff;
+ *p++ = 0xda; /* SOS */
+ *p++ = 0; /* length msb */
+ *p++ = 12; /* length lsb */
+ *p++ = 3; /* 3 components */
+ *p++ = 0; /* comp 0 */
+ *p++ = 0; /* huffman table 0 */
+ *p++ = 1; /* comp 1 */
+ *p++ = 0x11; /* huffman table 1 */
+ *p++ = 2; /* comp 2 */
+ *p++ = 0x11; /* huffman table 1 */
+ *p++ = 0; /* first DCT coeff */
+ *p++ = 63; /* last DCT coeff */
+ *p++ = 0; /* successive approx. */
+
+ return (p - start);
+};
+
+static gboolean
+gst_rtp_jpeg_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstRtpJPEGDepay *rtpjpegdepay;
+ GstStructure *structure;
+ gint clock_rate;
+ const gchar *media_attr;
+
+ rtpjpegdepay = GST_RTP_JPEG_DEPAY (depayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+ GST_DEBUG_OBJECT (rtpjpegdepay, "Caps set: %" GST_PTR_FORMAT, caps);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 90000;
+ depayload->clock_rate = clock_rate;
+
+ /* reset defaults */
+ rtpjpegdepay->width = 0;
+ rtpjpegdepay->height = 0;
+ rtpjpegdepay->media_width = 0;
+ rtpjpegdepay->media_height = 0;
+ rtpjpegdepay->frate_num = 0;
+ rtpjpegdepay->frate_denom = 1;
+
+ /* check for optional SDP attributes */
+ if ((media_attr = gst_structure_get_string (structure, "x-dimensions"))) {
+ gint w, h;
+
+ if (sscanf (media_attr, "%d,%d", &w, &h) == 2) {
+ rtpjpegdepay->media_width = w;
+ rtpjpegdepay->media_height = h;
+ }
+ }
+
+ /* try to get a framerate */
+ media_attr = gst_structure_get_string (structure, "a-framerate");
+ if (!media_attr)
+ media_attr = gst_structure_get_string (structure, "x-framerate");
+
+ if (media_attr) {
+ GValue src = { 0 };
+ GValue dest = { 0 };
+ gchar *s;
+
+ /* canonicalise floating point string so we can handle framerate strings
+ * in the form "24.930" or "24,930" irrespective of the current locale */
+ s = g_strdup (media_attr);
+ g_strdelimit (s, ",", '.');
+
+ /* convert the float to a fraction */
+ g_value_init (&src, G_TYPE_DOUBLE);
+ g_value_set_double (&src, g_ascii_strtod (s, NULL));
+ g_value_init (&dest, GST_TYPE_FRACTION);
+ g_value_transform (&src, &dest);
+
+ rtpjpegdepay->frate_num = gst_value_get_fraction_numerator (&dest);
+ rtpjpegdepay->frate_denom = gst_value_get_fraction_denominator (&dest);
+
+ g_free (s);
+ }
+
+ return TRUE;
+}
+
+static GstBuffer *
+gst_rtp_jpeg_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpJPEGDepay *rtpjpegdepay;
+ GstBuffer *outbuf;
+ gint payload_len, header_len;
+ guint8 *payload;
+ guint frag_offset;
+ gint Q;
+ guint type, width, height;
+ guint16 dri, precision, length;
+ guint8 *qtable;
+
+ rtpjpegdepay = GST_RTP_JPEG_DEPAY (depayload);
+
+ if (GST_BUFFER_IS_DISCONT (rtp->buffer)) {
+ GST_DEBUG_OBJECT (depayload, "DISCONT, reset adapter");
+ gst_adapter_clear (rtpjpegdepay->adapter);
+ rtpjpegdepay->discont = TRUE;
+ }
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+
+ if (payload_len < 8)
+ goto empty_packet;
+
+ payload = gst_rtp_buffer_get_payload (rtp);
+ header_len = 0;
+
+ /* 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Type-specific | Fragment Offset |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Type | Q | Width | Height |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ frag_offset = (payload[1] << 16) | (payload[2] << 8) | payload[3];
+ type = payload[4];
+ Q = payload[5];
+ width = payload[6] * 8;
+ height = payload[7] * 8;
+
+ /* saw a packet with fragment offset > 0 and we don't already have data queued
+ * up (most importantly, we don't have a header for this data) -- drop it
+ * XXX: maybe we can check if the jpeg is progressive and salvage the data?
+ * XXX: not implemented yet because jpegenc can't create progressive jpegs */
+ if (frag_offset > 0 && gst_adapter_available (rtpjpegdepay->adapter) == 0)
+ goto no_header_packet;
+
+ /* allow frame dimensions > 2040, passed in SDP session or media attributes
+ * from gstrtspsrc.c (gst_rtspsrc_sdp_attributes_to_caps), or in caps */
+ if (!width)
+ width = rtpjpegdepay->media_width;
+
+ if (!height)
+ height = rtpjpegdepay->media_height;
+
+ if (width == 0 || height == 0)
+ goto invalid_dimension;
+
+ GST_DEBUG_OBJECT (rtpjpegdepay, "frag %u, type %u, Q %d, width %u, height %u",
+ frag_offset, type, Q, width, height);
+
+ header_len += 8;
+ payload += 8;
+ payload_len -= 8;
+
+ dri = 0;
+ if (type > 63) {
+ if (payload_len < 4)
+ goto empty_packet;
+
+ /* 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Restart Interval |F|L| Restart Count |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ dri = (payload[0] << 8) | payload[1];
+
+ GST_DEBUG_OBJECT (rtpjpegdepay, "DRI %" G_GUINT16_FORMAT, dri);
+
+ payload += 4;
+ header_len += 4;
+ payload_len -= 4;
+ }
+
+ if (Q >= 128 && frag_offset == 0) {
+ if (payload_len < 4)
+ goto empty_packet;
+
+ /* 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | MBZ | Precision | Length |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Quantization Table Data |
+ * | ... |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ precision = payload[1];
+ length = (payload[2] << 8) | payload[3];
+
+ GST_DEBUG_OBJECT (rtpjpegdepay, "precision %04x, length %" G_GUINT16_FORMAT,
+ precision, length);
+
+ if (Q == 255 && length == 0)
+ goto empty_packet;
+
+ payload += 4;
+ header_len += 4;
+ payload_len -= 4;
+
+ if (length > payload_len)
+ goto empty_packet;
+
+ if (length > 0)
+ qtable = payload;
+ else
+ qtable = rtpjpegdepay->qtables[Q];
+
+ payload += length;
+ header_len += length;
+ payload_len -= length;
+ } else {
+ length = 0;
+ qtable = NULL;
+ precision = 0;
+ }
+
+ if (frag_offset == 0) {
+ GstMapInfo map;
+ guint size;
+
+ if (rtpjpegdepay->width != width || rtpjpegdepay->height != height) {
+ GstCaps *outcaps;
+
+ outcaps =
+ gst_caps_new_simple ("image/jpeg", "parsed", G_TYPE_BOOLEAN, TRUE,
+ "framerate", GST_TYPE_FRACTION, rtpjpegdepay->frate_num,
+ rtpjpegdepay->frate_denom, "width", G_TYPE_INT, width,
+ "height", G_TYPE_INT, height, NULL);
+ gst_pad_set_caps (depayload->srcpad, outcaps);
+ gst_caps_unref (outcaps);
+
+ rtpjpegdepay->width = width;
+ rtpjpegdepay->height = height;
+ }
+
+ GST_LOG_OBJECT (rtpjpegdepay, "first packet, length %" G_GUINT16_FORMAT,
+ length);
+
+ /* first packet */
+ if (length == 0) {
+ if (Q < 128) {
+ /* no quant table, see if we have one cached */
+ qtable = rtpjpegdepay->qtables[Q];
+ if (!qtable) {
+ GST_DEBUG_OBJECT (rtpjpegdepay, "making Q %d table", Q);
+ /* make and cache the table */
+ qtable = g_new (guint8, 128);
+ MakeTables (rtpjpegdepay, Q, qtable);
+ rtpjpegdepay->qtables[Q] = qtable;
+ } else {
+ GST_DEBUG_OBJECT (rtpjpegdepay, "using cached table for Q %d", Q);
+ }
+ /* all 8 bit quantizers */
+ precision = 0;
+ } else {
+ if (!qtable)
+ goto no_qtable;
+ }
+ }
+
+ /* I think we can get here with a NULL qtable, so make sure we don't
+ go dereferencing it in MakeHeaders if we do */
+ if (!qtable)
+ goto no_qtable;
+
+ /* max header length, should be big enough */
+ outbuf = gst_buffer_new_and_alloc (1000);
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ size = MakeHeaders (map.data, type, width, height, qtable, precision, dri);
+ gst_buffer_unmap (outbuf, &map);
+ gst_buffer_resize (outbuf, 0, size);
+
+ GST_DEBUG_OBJECT (rtpjpegdepay, "pushing %u bytes of header", size);
+
+ gst_adapter_push (rtpjpegdepay->adapter, outbuf);
+ }
+
+ /* take JPEG data, push in the adapter */
+ GST_DEBUG_OBJECT (rtpjpegdepay, "pushing data at offset %d", header_len);
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (rtp, header_len, -1);
+ gst_adapter_push (rtpjpegdepay->adapter, outbuf);
+ outbuf = NULL;
+
+ if (gst_rtp_buffer_get_marker (rtp)) {
+ guint avail;
+ guint8 end[2];
+ GstMapInfo map;
+
+ /* last buffer take all data out of the adapter */
+ avail = gst_adapter_available (rtpjpegdepay->adapter);
+ GST_DEBUG_OBJECT (rtpjpegdepay, "marker set, last buffer");
+
+ if (avail < 2)
+ goto invalid_packet;
+
+ /* take the last bytes of the jpeg data to see if there is an EOI
+ * marker */
+ gst_adapter_copy (rtpjpegdepay->adapter, end, avail - 2, 2);
+
+ if (end[0] != 0xff && end[1] != 0xd9) {
+ GST_DEBUG_OBJECT (rtpjpegdepay, "no EOI marker, adding one");
+
+ /* no EOI marker, add one */
+ outbuf = gst_buffer_new_and_alloc (2);
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ map.data[0] = 0xff;
+ map.data[1] = 0xd9;
+ gst_buffer_unmap (outbuf, &map);
+
+ gst_adapter_push (rtpjpegdepay->adapter, outbuf);
+ avail += 2;
+ }
+ outbuf = gst_adapter_take_buffer (rtpjpegdepay->adapter, avail);
+
+ if (rtpjpegdepay->discont) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
+ rtpjpegdepay->discont = FALSE;
+ }
+
+ gst_rtp_drop_non_video_meta (rtpjpegdepay, outbuf);
+
+ GST_DEBUG_OBJECT (rtpjpegdepay, "returning %u bytes", avail);
+ }
+
+ return outbuf;
+
+ /* ERRORS */
+empty_packet:
+ {
+ GST_ELEMENT_WARNING (rtpjpegdepay, STREAM, DECODE,
+ ("Empty Payload."), (NULL));
+ return NULL;
+ }
+invalid_dimension:
+ {
+ GST_ELEMENT_WARNING (rtpjpegdepay, STREAM, FORMAT,
+ ("Invalid Dimension %dx%d.", width, height), (NULL));
+ return NULL;
+ }
+no_qtable:
+ {
+ GST_WARNING_OBJECT (rtpjpegdepay, "no qtable");
+ return NULL;
+ }
+invalid_packet:
+ {
+ GST_WARNING_OBJECT (rtpjpegdepay, "invalid packet");
+ gst_adapter_flush (rtpjpegdepay->adapter,
+ gst_adapter_available (rtpjpegdepay->adapter));
+ return NULL;
+ }
+no_header_packet:
+ {
+ GST_WARNING_OBJECT (rtpjpegdepay,
+ "discarding data packets received when we have no header");
+ return NULL;
+ }
+}
+
+
+static GstStateChangeReturn
+gst_rtp_jpeg_depay_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstRtpJPEGDepay *rtpjpegdepay;
+ GstStateChangeReturn ret;
+
+ rtpjpegdepay = GST_RTP_JPEG_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_rtp_jpeg_depay_reset (rtpjpegdepay);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtpjpegdepay.h b/gst/rtp/gstrtpjpegdepay.h
new file mode 100644
index 0000000000..3f7aea219b
--- /dev/null
+++ b/gst/rtp/gstrtpjpegdepay.h
@@ -0,0 +1,69 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_JPEG_DEPAY_H__
+#define __GST_RTP_JPEG_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_JPEG_DEPAY \
+ (gst_rtp_jpeg_depay_get_type())
+#define GST_RTP_JPEG_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_JPEG_DEPAY,GstRtpJPEGDepay))
+#define GST_RTP_JPEG_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_JPEG_DEPAY,GstRtpJPEGDepayClass))
+#define GST_IS_RTP_JPEG_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_JPEG_DEPAY))
+#define GST_IS_RTP_JPEG_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_JPEG_DEPAY))
+
+typedef struct _GstRtpJPEGDepay GstRtpJPEGDepay;
+typedef struct _GstRtpJPEGDepayClass GstRtpJPEGDepayClass;
+
+struct _GstRtpJPEGDepay
+{
+ GstRTPBaseDepayload depayload;
+
+ GstAdapter *adapter;
+ gboolean discont;
+
+ /* cached quant tables */
+ guint8 * qtables[255];
+ gint frate_num;
+ gint frate_denom;
+ gint media_width;
+ gint media_height;
+ gint width, height;
+};
+
+struct _GstRtpJPEGDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_jpeg_depay_get_type (void);
+
+
+G_END_DECLS
+
+#endif /* __GST_RTP_JPEG_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpjpegpay.c b/gst/rtp/gstrtpjpegpay.c
new file mode 100644
index 0000000000..0e32e16fb2
--- /dev/null
+++ b/gst/rtp/gstrtpjpegpay.c
@@ -0,0 +1,1053 @@
+/* GStreamer
+ * Copyright (C) 2008 Axis Communications <dev-gstreamer@axis.com>
+ * @author Bjorn Ostby <bjorn.ostby@axis.com>
+ * @author Peter Kjellerstedt <peter.kjellerstedt@axis.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpjpegpay
+ * @title: rtpjpegpay
+ *
+ * Payload encode JPEG pictures into RTP packets according to RFC 2435.
+ * For detailed information see: http://www.rfc-editor.org/rfc/rfc2435.txt
+ *
+ * The payloader takes a JPEG picture, scans the header for quantization
+ * tables (if needed) and constructs the RTP packet header followed by
+ * the actual JPEG entropy scan.
+ *
+ * The payloader assumes that correct width and height is found in the caps.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpjpegpay.h"
+#include "gstrtputils.h"
+#include "gstbuffermemory.h"
+
+static GstStaticPadTemplate gst_rtp_jpeg_pay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("image/jpeg; " "video/x-jpeg")
+ );
+
+static GstStaticPadTemplate gst_rtp_jpeg_pay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ " media = (string) \"video\", "
+ " payload = (int) " GST_RTP_PAYLOAD_JPEG_STRING ", "
+ " clock-rate = (int) 90000, "
+ " encoding-name = (string) \"JPEG\", "
+ " width = (int) [ 1, 65536 ], " " height = (int) [ 1, 65536 ]; "
+ " application/x-rtp, "
+ " media = (string) \"video\", "
+ " payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ " clock-rate = (int) 90000, "
+ " encoding-name = (string) \"JPEG\", "
+ " width = (int) [ 1, 65536 ], " " height = (int) [ 1, 65536 ]")
+ );
+
+GST_DEBUG_CATEGORY_STATIC (rtpjpegpay_debug);
+#define GST_CAT_DEFAULT (rtpjpegpay_debug)
+
+/*
+ * QUANT_PREFIX_LEN:
+ *
+ * Prefix length in the header before the quantization tables:
+ * Two size bytes and one byte for precision
+ */
+#define QUANT_PREFIX_LEN 3
+
+
+typedef enum _RtpJpegMarker RtpJpegMarker;
+
+/*
+ * RtpJpegMarker:
+ * @JPEG_MARKER: Prefix for JPEG marker
+ * @JPEG_MARKER_SOI: Start of Image marker
+ * @JPEG_MARKER_JFIF: JFIF marker
+ * @JPEG_MARKER_CMT: Comment marker
+ * @JPEG_MARKER_DQT: Define Quantization Table marker
+ * @JPEG_MARKER_SOF: Start of Frame marker
+ * @JPEG_MARKER_DHT: Define Huffman Table marker
+ * @JPEG_MARKER_SOS: Start of Scan marker
+ * @JPEG_MARKER_EOI: End of Image marker
+ * @JPEG_MARKER_DRI: Define Restart Interval marker
+ * @JPEG_MARKER_H264: H264 marker
+ *
+ * Identifiers for markers in JPEG header
+ */
+enum _RtpJpegMarker
+{
+ JPEG_MARKER = 0xFF,
+ JPEG_MARKER_SOI = 0xD8,
+ JPEG_MARKER_JFIF = 0xE0,
+ JPEG_MARKER_CMT = 0xFE,
+ JPEG_MARKER_DQT = 0xDB,
+ JPEG_MARKER_SOF = 0xC0,
+ JPEG_MARKER_DHT = 0xC4,
+ JPEG_MARKER_JPG = 0xC8,
+ JPEG_MARKER_SOS = 0xDA,
+ JPEG_MARKER_EOI = 0xD9,
+ JPEG_MARKER_DRI = 0xDD,
+ JPEG_MARKER_APP0 = 0xE0,
+ JPEG_MARKER_H264 = 0xE4, /* APP4 */
+ JPEG_MARKER_APP15 = 0xEF,
+ JPEG_MARKER_JPG0 = 0xF0,
+ JPEG_MARKER_JPG13 = 0xFD
+};
+
+#define DEFAULT_JPEG_QUANT 255
+
+#define DEFAULT_JPEG_QUALITY 255
+#define DEFAULT_JPEG_TYPE 1
+
+enum
+{
+ PROP_0,
+ PROP_JPEG_QUALITY,
+ PROP_JPEG_TYPE
+};
+
+enum
+{
+ Q_TABLE_0 = 0,
+ Q_TABLE_1,
+ Q_TABLE_MAX /* only support for two tables at the moment */
+};
+
+typedef struct _RtpJpegHeader RtpJpegHeader;
+
+/*
+ * RtpJpegHeader:
+ * @type_spec: type specific
+ * @offset: fragment offset
+ * @type: type field
+ * @q: quantization table for this frame
+ * @width: width of image in 8-pixel multiples
+ * @height: height of image in 8-pixel multiples
+ *
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Type-specific | Fragment Offset |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Type | Q | Width | Height |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+struct _RtpJpegHeader
+{
+ guint type_spec:8;
+ guint offset:24;
+ guint8 type;
+ guint8 q;
+ guint8 width;
+ guint8 height;
+};
+
+/*
+ * RtpQuantHeader
+ * @mbz: must be zero
+ * @precision: specify size of quantization tables
+ * @length: length of quantization data
+ *
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | MBZ | Precision | Length |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Quantization Table Data |
+ * | ... |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+typedef struct
+{
+ guint8 mbz;
+ guint8 precision;
+ guint16 length;
+} RtpQuantHeader;
+
+typedef struct
+{
+ guint8 size;
+ const guint8 *data;
+} RtpQuantTable;
+
+/*
+ * RtpRestartMarkerHeader:
+ * @restartInterval: number of MCUs that appear between restart markers
+ * @restartFirstLastCount: a combination of the first packet mark in the chunk
+ * last packet mark in the chunk and the position of the
+ * first restart interval in the current "chunk"
+ *
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Restart Interval |F|L| Restart Count |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ *
+ * The restart marker header is implemented according to the following
+ * methodology specified in section 3.1.7 of rfc2435.txt.
+ *
+ * "If the restart intervals in a frame are not guaranteed to be aligned
+ * with packet boundaries, the F (first) and L (last) bits MUST be set
+ * to 1 and the Restart Count MUST be set to 0x3FFF. This indicates
+ * that a receiver MUST reassemble the entire frame before decoding it."
+ *
+ */
+
+typedef struct
+{
+ guint16 restart_interval;
+ guint16 restart_count;
+} RtpRestartMarkerHeader;
+
+typedef struct
+{
+ guint8 id;
+ guint8 samp;
+ guint8 qt;
+} CompInfo;
+
+/* FIXME: restart marker header currently unsupported */
+
+static void gst_rtp_jpeg_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+
+static void gst_rtp_jpeg_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_rtp_jpeg_pay_setcaps (GstRTPBasePayload * basepayload,
+ GstCaps * caps);
+
+static GstFlowReturn gst_rtp_jpeg_pay_handle_buffer (GstRTPBasePayload * pad,
+ GstBuffer * buffer);
+
+#define gst_rtp_jpeg_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpJPEGPay, gst_rtp_jpeg_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpjpegpay, "rtpjpegpay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_JPEG_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_jpeg_pay_class_init (GstRtpJPEGPayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gobject_class->set_property = gst_rtp_jpeg_pay_set_property;
+ gobject_class->get_property = gst_rtp_jpeg_pay_get_property;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_jpeg_pay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_jpeg_pay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "RTP JPEG payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encodes JPEG pictures into RTP packets (RFC 2435)",
+ "Axis Communications <dev-gstreamer@axis.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_jpeg_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_jpeg_pay_handle_buffer;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_JPEG_QUALITY,
+ g_param_spec_int ("quality", "Quality",
+ "Quality factor on JPEG data (unused)", 0, 255, 255,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_JPEG_TYPE,
+ g_param_spec_int ("type", "Type",
+ "Default JPEG Type, overwritten by SOF when present", 0, 255,
+ DEFAULT_JPEG_TYPE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ GST_DEBUG_CATEGORY_INIT (rtpjpegpay_debug, "rtpjpegpay", 0,
+ "Motion JPEG RTP Payloader");
+}
+
+static void
+gst_rtp_jpeg_pay_init (GstRtpJPEGPay * pay)
+{
+ pay->quality = DEFAULT_JPEG_QUALITY;
+ pay->quant = DEFAULT_JPEG_QUANT;
+ pay->type = DEFAULT_JPEG_TYPE;
+ pay->width = -1;
+ pay->height = -1;
+
+ GST_RTP_BASE_PAYLOAD_PT (pay) = GST_RTP_PAYLOAD_JPEG;
+}
+
+static gboolean
+gst_rtp_jpeg_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
+{
+ GstStructure *caps_structure = gst_caps_get_structure (caps, 0);
+ GstRtpJPEGPay *pay;
+ gboolean res;
+ gint width = -1, height = -1;
+ gint num = 0, denom;
+ gchar *rate = NULL;
+ gchar *dim = NULL;
+
+ pay = GST_RTP_JPEG_PAY (basepayload);
+
+ /* these properties are mandatory, but they might be adjusted by the SOF, if there
+ * is one. */
+ if (!gst_structure_get_int (caps_structure, "height", &height) || height <= 0) {
+ goto invalid_dimension;
+ }
+
+ if (!gst_structure_get_int (caps_structure, "width", &width) || width <= 0) {
+ goto invalid_dimension;
+ }
+
+ if (gst_structure_get_fraction (caps_structure, "framerate", &num, &denom) &&
+ (num < 0 || denom <= 0)) {
+ goto invalid_framerate;
+ }
+
+ if (height > 2040 || width > 2040) {
+ pay->height = 0;
+ pay->width = 0;
+ } else {
+ pay->height = GST_ROUND_UP_8 (height) / 8;
+ pay->width = GST_ROUND_UP_8 (width) / 8;
+ }
+
+ gst_rtp_base_payload_set_options (basepayload, "video",
+ basepayload->pt != GST_RTP_PAYLOAD_JPEG, "JPEG", 90000);
+
+ if (num > 0) {
+ gdouble framerate;
+ gst_util_fraction_to_double (num, denom, &framerate);
+ rate = g_strdup_printf ("%f", framerate);
+ }
+
+ if (pay->width == 0) {
+ GST_DEBUG_OBJECT (pay,
+ "width or height are greater than 2040, adding x-dimensions to caps");
+ dim = g_strdup_printf ("%d,%d", width, height);
+ }
+
+ if (rate != NULL && dim != NULL) {
+ res = gst_rtp_base_payload_set_outcaps (basepayload, "a-framerate",
+ G_TYPE_STRING, rate, "x-dimensions", G_TYPE_STRING, dim, NULL);
+ } else if (rate != NULL && dim == NULL) {
+ res = gst_rtp_base_payload_set_outcaps (basepayload, "a-framerate",
+ G_TYPE_STRING, rate, NULL);
+ } else if (rate == NULL && dim != NULL) {
+ res = gst_rtp_base_payload_set_outcaps (basepayload, "x-dimensions",
+ G_TYPE_STRING, dim, NULL);
+ } else {
+ res = gst_rtp_base_payload_set_outcaps (basepayload, NULL);
+ }
+
+ g_free (dim);
+ g_free (rate);
+
+ return res;
+
+ /* ERRORS */
+invalid_dimension:
+ {
+ GST_ERROR_OBJECT (pay, "Invalid width/height from caps");
+ return FALSE;
+ }
+invalid_framerate:
+ {
+ GST_ERROR_OBJECT (pay, "Invalid framerate from caps");
+ return FALSE;
+ }
+}
+
+/*
+ * get uint16 value from current position in mapped memory.
+ * the memory offset will be increased with 2.
+ */
+static guint
+parse_mem_inc_offset_guint16 (GstBufferMemoryMap * memory)
+{
+ guint data;
+
+ g_return_val_if_fail (memory->total_size > (memory->offset + 1), 0);
+
+ data = ((guint) * memory->data) << 8;
+ gst_buffer_memory_advance_bytes (memory, 1);
+ data = data | (*memory->data);
+ gst_buffer_memory_advance_bytes (memory, 1);
+
+ return data;
+}
+
+/*
+ * get uint8 value from current position in mapped memory.
+ * the memory offset will be increased with 1.
+ */
+static guint
+parse_mem_inc_offset_guint8 (GstBufferMemoryMap * memory)
+{
+ guint data;
+
+ g_return_val_if_fail (memory->total_size > memory->offset, 0);
+
+ data = (*memory->data);
+ gst_buffer_memory_advance_bytes (memory, 1);
+
+ return data;
+}
+
+static void
+gst_rtp_jpeg_pay_read_quant_table (GstBufferMemoryMap * memory,
+ RtpQuantTable tables[])
+{
+ guint quant_size, tab_size;
+ guint8 prec;
+ guint8 id;
+
+ if (memory->total_size <= (memory->offset + 1))
+ goto too_small;
+
+ quant_size = parse_mem_inc_offset_guint16 (memory);
+ if (quant_size < 2)
+ goto small_quant_size;
+
+ /* clamp to available data */
+ if (memory->offset + quant_size > memory->total_size)
+ quant_size = memory->total_size - memory->offset;
+
+ quant_size -= 2;
+
+ while (quant_size > 0) {
+ guint8 data;
+ /* not enough to read the id */
+ if (memory->offset + 1 > memory->total_size)
+ break;
+
+ data = parse_mem_inc_offset_guint8 (memory);
+ id = data & 0x0f;
+ if (id == 15)
+ /* invalid id received - corrupt data */
+ goto invalid_id;
+
+ prec = (data & 0xf0) >> 4;
+ if (prec)
+ tab_size = 128;
+ else
+ tab_size = 64;
+
+ /* there is not enough for the table */
+ if (quant_size < tab_size + 1)
+ goto no_table;
+
+ GST_LOG ("read quant table %d, tab_size %d, prec %02x", id, tab_size, prec);
+
+ tables[id].size = tab_size;
+ tables[id].data = memory->data;
+
+ quant_size -= (tab_size + 1);
+ if (!gst_buffer_memory_advance_bytes (memory, tab_size)) {
+ goto too_small;
+ }
+ }
+done:
+ return;
+
+ /* ERRORS */
+too_small:
+ {
+ GST_WARNING ("not enough data");
+ return;
+ }
+small_quant_size:
+ {
+ GST_WARNING ("quant_size too small (%u < 2)", quant_size);
+ return;
+ }
+invalid_id:
+ {
+ GST_WARNING ("invalid id");
+ goto done;
+ }
+no_table:
+ {
+ GST_WARNING ("not enough data for table (%u < %u)", quant_size,
+ tab_size + 1);
+ goto done;
+ }
+}
+
+static gboolean
+gst_rtp_jpeg_pay_read_sof (GstRtpJPEGPay * pay, GstBufferMemoryMap * memory,
+ CompInfo info[], RtpQuantTable tables[], gulong tables_elements)
+{
+ guint sof_size, off;
+ guint width, height, infolen;
+ CompInfo elem;
+ gint i, j;
+
+ off = memory->offset;
+
+ /* we need at least 17 bytes for the SOF */
+ if (off + 17 > memory->total_size)
+ goto wrong_size;
+
+ sof_size = parse_mem_inc_offset_guint16 (memory);
+ if (sof_size < 17)
+ goto wrong_length;
+
+ /* precision should be 8 */
+ if (parse_mem_inc_offset_guint8 (memory) != 8)
+ goto bad_precision;
+
+ /* read dimensions */
+ height = parse_mem_inc_offset_guint16 (memory);
+ width = parse_mem_inc_offset_guint16 (memory);
+
+ GST_LOG_OBJECT (pay, "got dimensions %ux%u", height, width);
+
+ if (height == 0) {
+ goto invalid_dimension;
+ }
+ if (height > 2040) {
+ height = 0;
+ }
+ if (width == 0) {
+ goto invalid_dimension;
+ }
+ if (width > 2040) {
+ width = 0;
+ }
+
+ if (height == 0 || width == 0) {
+ pay->height = 0;
+ pay->width = 0;
+ } else {
+ pay->height = GST_ROUND_UP_8 (height) / 8;
+ pay->width = GST_ROUND_UP_8 (width) / 8;
+ }
+
+ /* we only support 3 components */
+ if (parse_mem_inc_offset_guint8 (memory) != 3)
+ goto bad_components;
+
+ infolen = 0;
+ for (i = 0; i < 3; i++) {
+ elem.id = parse_mem_inc_offset_guint8 (memory);
+ elem.samp = parse_mem_inc_offset_guint8 (memory);
+ elem.qt = parse_mem_inc_offset_guint8 (memory);
+ GST_LOG_OBJECT (pay, "got comp %d, samp %02x, qt %d", elem.id, elem.samp,
+ elem.qt);
+ /* insertion sort from the last element to the first */
+ for (j = infolen; j > 1; j--) {
+ if (G_LIKELY (info[j - 1].id < elem.id))
+ break;
+ info[j] = info[j - 1];
+ }
+ info[j] = elem;
+ infolen++;
+ }
+
+ /* see that the components are supported */
+ if (info[0].samp == 0x21)
+ pay->type = 0;
+ else if (info[0].samp == 0x22)
+ pay->type = 1;
+ else
+ goto invalid_comp;
+
+ if (!(info[1].samp == 0x11))
+ goto invalid_comp;
+
+ if (!(info[2].samp == 0x11))
+ goto invalid_comp;
+
+ return TRUE;
+
+ /* ERRORS */
+wrong_size:
+ {
+ GST_ELEMENT_WARNING (pay, STREAM, FORMAT,
+ ("Wrong size %u (needed %u).", (guint) memory->total_size, off + 17),
+ (NULL));
+ return FALSE;
+ }
+wrong_length:
+ {
+ GST_ELEMENT_WARNING (pay, STREAM, FORMAT,
+ ("Wrong SOF length %u.", sof_size), (NULL));
+ return FALSE;
+ }
+bad_precision:
+ {
+ GST_ELEMENT_WARNING (pay, STREAM, FORMAT,
+ ("Wrong precision, expecting 8."), (NULL));
+ return FALSE;
+ }
+invalid_dimension:
+ {
+ GST_ELEMENT_WARNING (pay, STREAM, FORMAT,
+ ("Wrong dimension, size %ux%u", width, height), (NULL));
+ return FALSE;
+ }
+bad_components:
+ {
+ GST_ELEMENT_WARNING (pay, STREAM, FORMAT,
+ ("Wrong number of components"), (NULL));
+ return FALSE;
+ }
+invalid_comp:
+ {
+ GST_ELEMENT_WARNING (pay, STREAM, FORMAT, ("Invalid component"), (NULL));
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_rtp_jpeg_pay_read_dri (GstRtpJPEGPay * pay, GstBufferMemoryMap * memory,
+ RtpRestartMarkerHeader * dri)
+{
+ guint dri_size, restart_interval;
+
+ /* we need at least 4 bytes for the DRI */
+ if (memory->offset + 4 > memory->total_size)
+ goto wrong_size;
+
+ dri_size = parse_mem_inc_offset_guint16 (memory);
+ if (dri_size < 4)
+ goto wrong_length;
+
+ restart_interval = parse_mem_inc_offset_guint16 (memory);
+ dri->restart_interval = g_htons (restart_interval);
+ dri->restart_count = g_htons (0xFFFF);
+ if (!gst_buffer_memory_advance_bytes (memory, dri_size - 4)) {
+ goto wrong_size;
+ }
+
+ return dri->restart_interval > 0;
+
+wrong_size:
+ {
+ GST_WARNING ("not enough data for DRI");
+ return FALSE;
+ }
+wrong_length:
+ {
+ GST_WARNING ("DRI size too small (%u)", dri_size);
+ /* offset got incremented by two when dri_size was parsed. */
+ if (dri_size > 2)
+ gst_buffer_memory_advance_bytes (memory, dri_size - 2);
+ return FALSE;
+ }
+}
+
+static void
+gst_rtp_jpeg_pay_skipping_marker (GstBufferMemoryMap * memory)
+{
+ guint skip;
+
+ if (G_UNLIKELY (((memory->offset + 1) >= memory->total_size))) {
+ goto wrong_size;
+ }
+ skip = parse_mem_inc_offset_guint16 (memory);
+
+ if (G_UNLIKELY (((skip - 2 + memory->offset) > memory->total_size))) {
+ goto wrong_size;
+ }
+ if (skip > 2) {
+ gst_buffer_memory_advance_bytes (memory, skip - 2);
+ }
+ return;
+
+wrong_size:
+ {
+ GST_WARNING ("not enough data");
+ }
+}
+
+static RtpJpegMarker
+gst_rtp_jpeg_pay_scan_marker (GstBufferMemoryMap * memory)
+{
+ guint8 marker = parse_mem_inc_offset_guint8 (memory);
+
+ while (marker != JPEG_MARKER && ((memory->offset) < memory->total_size)) {
+ marker = parse_mem_inc_offset_guint8 (memory);
+ }
+
+ if (G_UNLIKELY ((memory->offset) >= memory->total_size)) {
+ GST_LOG ("found EOI marker");
+ return JPEG_MARKER_EOI;
+ } else {
+ marker = parse_mem_inc_offset_guint8 (memory);
+ return marker;
+ }
+}
+
+#define RTP_HEADER_LEN 12
+
+static GstFlowReturn
+gst_rtp_jpeg_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRtpJPEGPay *pay;
+ GstClockTime timestamp;
+ GstFlowReturn ret = GST_FLOW_ERROR;
+ RtpJpegHeader jpeg_header;
+ RtpQuantHeader quant_header;
+ RtpRestartMarkerHeader restart_marker_header;
+ RtpQuantTable tables[15] = { {0, NULL}, };
+ CompInfo info[3] = { {0,}, };
+ guint quant_data_size;
+ guint mtu, max_payload_size;
+ guint bytes_left;
+ guint jpeg_header_size = 0;
+ guint offset;
+ gboolean frame_done;
+ gboolean sos_found, sof_found, dqt_found, dri_found;
+ gint i;
+ GstBufferList *list = NULL;
+ gboolean discont;
+ GstBufferMemoryMap memory;
+
+ pay = GST_RTP_JPEG_PAY (basepayload);
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (pay);
+
+ gst_buffer_memory_map (buffer, &memory);
+
+ timestamp = GST_BUFFER_PTS (buffer);
+ discont = GST_BUFFER_IS_DISCONT (buffer);
+
+ GST_LOG_OBJECT (pay, "got buffer size %" G_GSIZE_FORMAT
+ " , timestamp %" GST_TIME_FORMAT, memory.total_size,
+ GST_TIME_ARGS (timestamp));
+
+ /* parse the jpeg header for 'start of scan' and read quant tables if needed */
+ sos_found = FALSE;
+ dqt_found = FALSE;
+ sof_found = FALSE;
+ dri_found = FALSE;
+
+ while (!sos_found && (memory.offset < memory.total_size)) {
+ gint marker;
+
+ GST_LOG_OBJECT (pay, "checking from offset %u", memory.offset);
+ marker = gst_rtp_jpeg_pay_scan_marker (&memory);
+ switch (marker) {
+ case JPEG_MARKER_JFIF:
+ case JPEG_MARKER_CMT:
+ case JPEG_MARKER_DHT:
+ case JPEG_MARKER_H264:
+ GST_LOG_OBJECT (pay, "skipping marker");
+ gst_rtp_jpeg_pay_skipping_marker (&memory);
+ break;
+ case JPEG_MARKER_SOF:
+ if (!gst_rtp_jpeg_pay_read_sof (pay, &memory, info, tables,
+ G_N_ELEMENTS (tables)))
+ goto invalid_format;
+ sof_found = TRUE;
+ break;
+ case JPEG_MARKER_DQT:
+ GST_LOG ("DQT found");
+ gst_rtp_jpeg_pay_read_quant_table (&memory, tables);
+ dqt_found = TRUE;
+ break;
+ case JPEG_MARKER_SOS:
+ sos_found = TRUE;
+ GST_LOG_OBJECT (pay, "SOS found");
+ jpeg_header_size = memory.offset;
+ /* Do not re-combine into single statement with previous line! */
+ jpeg_header_size += parse_mem_inc_offset_guint16 (&memory);
+ break;
+ case JPEG_MARKER_EOI:
+ GST_WARNING_OBJECT (pay, "EOI reached before SOS!");
+ break;
+ case JPEG_MARKER_SOI:
+ GST_LOG_OBJECT (pay, "SOI found");
+ break;
+ case JPEG_MARKER_DRI:
+ GST_LOG_OBJECT (pay, "DRI found");
+ if (gst_rtp_jpeg_pay_read_dri (pay, &memory, &restart_marker_header))
+ dri_found = TRUE;
+ break;
+ default:
+ if (marker == JPEG_MARKER_JPG ||
+ (marker >= JPEG_MARKER_JPG0 && marker <= JPEG_MARKER_JPG13) ||
+ (marker >= JPEG_MARKER_APP0 && marker <= JPEG_MARKER_APP15)) {
+ GST_LOG_OBJECT (pay, "skipping marker");
+ gst_rtp_jpeg_pay_skipping_marker (&memory);
+ } else {
+ /* no need to do anything, gst_rtp_jpeg_pay_scan_marker will go on */
+ GST_FIXME_OBJECT (pay, "unhandled marker 0x%02x", marker);
+ }
+ break;
+ }
+ }
+ if (!dqt_found || !sof_found)
+ goto unsupported_jpeg;
+
+ /* by now we should either have negotiated the width/height or the SOF header
+ * should have filled us in */
+ if (pay->width < 0 || pay->height < 0) {
+ goto no_dimension;
+ }
+
+ GST_LOG_OBJECT (pay, "header size %u", jpeg_header_size);
+
+ offset = 0;
+
+ if (dri_found)
+ pay->type += 64;
+
+ /* prepare stuff for the jpeg header */
+ jpeg_header.type_spec = 0;
+ jpeg_header.type = pay->type;
+ jpeg_header.q = pay->quant;
+ jpeg_header.width = pay->width;
+ jpeg_header.height = pay->height;
+ /* collect the quant headers sizes */
+ quant_header.mbz = 0;
+ quant_header.precision = 0;
+ quant_header.length = 0;
+ quant_data_size = 0;
+
+ if (pay->quant > 127) {
+ /* for the Y and U component, look up the quant table and its size. quant
+ * tables for U and V should be the same */
+ for (i = 0; i < 2; i++) {
+ guint qsize;
+ guint qt;
+
+ qt = info[i].qt;
+ if (qt >= G_N_ELEMENTS (tables))
+ goto invalid_quant;
+
+ qsize = tables[qt].size;
+ if (qsize == 0)
+ goto invalid_quant;
+
+ quant_header.precision |= (qsize == 64 ? 0 : (1 << i));
+ quant_data_size += qsize;
+ }
+ quant_header.length = g_htons (quant_data_size);
+ quant_data_size += sizeof (quant_header);
+ }
+
+ GST_LOG_OBJECT (pay, "quant_data size %u", quant_data_size);
+
+ bytes_left =
+ sizeof (jpeg_header) + quant_data_size + memory.total_size -
+ jpeg_header_size;
+
+ if (dri_found)
+ bytes_left += sizeof (restart_marker_header);
+
+ max_payload_size = mtu - (RTP_HEADER_LEN + sizeof (jpeg_header));
+ list = gst_buffer_list_new_sized ((bytes_left / max_payload_size) + 1);
+
+ frame_done = FALSE;
+ do {
+ GstBuffer *outbuf;
+ guint8 *payload;
+ guint payload_size;
+ guint header_size;
+ GstBuffer *paybuf;
+ GstRTPBuffer rtp = { NULL };
+ guint rtp_header_size = gst_rtp_buffer_calc_header_len (0);
+
+ /* The available room is the packet MTU, minus the RTP header length. */
+ payload_size =
+ (bytes_left < (mtu - rtp_header_size) ? bytes_left :
+ (mtu - rtp_header_size));
+
+ header_size = sizeof (jpeg_header) + quant_data_size;
+ if (dri_found)
+ header_size += sizeof (restart_marker_header);
+
+ outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (basepayload, header_size,
+ 0, 0);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
+ if (payload_size == bytes_left) {
+ GST_LOG_OBJECT (pay, "last packet of frame");
+ frame_done = TRUE;
+ gst_rtp_buffer_set_marker (&rtp, 1);
+ }
+
+ payload = gst_rtp_buffer_get_payload (&rtp);
+
+ /* update offset */
+#if (G_BYTE_ORDER == G_LITTLE_ENDIAN)
+ jpeg_header.offset = ((offset & 0x0000FF) << 16) |
+ ((offset & 0xFF0000) >> 16) | (offset & 0x00FF00);
+#else
+ jpeg_header.offset = offset;
+#endif
+ memcpy (payload, &jpeg_header, sizeof (jpeg_header));
+ payload += sizeof (jpeg_header);
+ payload_size -= sizeof (jpeg_header);
+
+ if (dri_found) {
+ memcpy (payload, &restart_marker_header, sizeof (restart_marker_header));
+ payload += sizeof (restart_marker_header);
+ payload_size -= sizeof (restart_marker_header);
+ }
+
+ /* only send quant table with first packet */
+ if (G_UNLIKELY (quant_data_size > 0)) {
+ memcpy (payload, &quant_header, sizeof (quant_header));
+ payload += sizeof (quant_header);
+
+ /* copy the quant tables for luma and chrominance */
+ for (i = 0; i < 2; i++) {
+ guint qsize;
+ guint qt;
+
+ qt = info[i].qt;
+ qsize = tables[qt].size;
+ memcpy (payload, tables[qt].data, qsize);
+
+ GST_LOG_OBJECT (pay, "component %d using quant %d, size %d", i, qt,
+ qsize);
+
+ payload += qsize;
+ }
+ payload_size -= quant_data_size;
+ bytes_left -= quant_data_size;
+ quant_data_size = 0;
+ }
+ GST_LOG_OBJECT (pay, "sending payload size %d", payload_size);
+ gst_rtp_buffer_unmap (&rtp);
+
+ /* create a new buf to hold the payload */
+ paybuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL,
+ jpeg_header_size + offset, payload_size);
+
+ /* join memory parts */
+ gst_rtp_copy_video_meta (pay, outbuf, paybuf);
+ outbuf = gst_buffer_append (outbuf, paybuf);
+
+ GST_BUFFER_PTS (outbuf) = timestamp;
+
+ if (discont) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
+ /* Only the first outputted buffer has the DISCONT flag */
+ discont = FALSE;
+ }
+
+ /* and add to list */
+ gst_buffer_list_insert (list, -1, outbuf);
+
+ bytes_left -= payload_size;
+ offset += payload_size;
+ }
+ while (!frame_done);
+ /* push the whole buffer list at once */
+ ret = gst_rtp_base_payload_push_list (basepayload, list);
+
+ gst_buffer_memory_unmap (&memory);
+ gst_buffer_unref (buffer);
+
+ return ret;
+
+ /* ERRORS */
+unsupported_jpeg:
+ {
+ GST_ELEMENT_WARNING (pay, STREAM, FORMAT, ("Unsupported JPEG"), (NULL));
+ gst_buffer_memory_unmap (&memory);
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ }
+no_dimension:
+ {
+ GST_ELEMENT_WARNING (pay, STREAM, FORMAT, ("No size given"), (NULL));
+ gst_buffer_memory_unmap (&memory);
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ }
+invalid_format:
+ {
+ /* error was posted */
+ gst_buffer_memory_unmap (&memory);
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ }
+invalid_quant:
+ {
+ GST_ELEMENT_WARNING (pay, STREAM, FORMAT, ("Invalid quant tables"), (NULL));
+ gst_buffer_memory_unmap (&memory);
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ }
+}
+
+static void
+gst_rtp_jpeg_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpJPEGPay *rtpjpegpay;
+
+ rtpjpegpay = GST_RTP_JPEG_PAY (object);
+
+ switch (prop_id) {
+ case PROP_JPEG_QUALITY:
+ rtpjpegpay->quality = g_value_get_int (value);
+ GST_DEBUG_OBJECT (object, "quality = %d", rtpjpegpay->quality);
+ break;
+ case PROP_JPEG_TYPE:
+ rtpjpegpay->type = g_value_get_int (value);
+ GST_DEBUG_OBJECT (object, "type = %d", rtpjpegpay->type);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_jpeg_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpJPEGPay *rtpjpegpay;
+
+ rtpjpegpay = GST_RTP_JPEG_PAY (object);
+
+ switch (prop_id) {
+ case PROP_JPEG_QUALITY:
+ g_value_set_int (value, rtpjpegpay->quality);
+ break;
+ case PROP_JPEG_TYPE:
+ g_value_set_int (value, rtpjpegpay->type);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/rtp/gstrtpjpegpay.h b/gst/rtp/gstrtpjpegpay.h
new file mode 100644
index 0000000000..696dc39e35
--- /dev/null
+++ b/gst/rtp/gstrtpjpegpay.h
@@ -0,0 +1,61 @@
+/* GStreamer
+ * Copyright (C) 2008 Axis Communications <dev-gstreamer@axis.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_JPEG_PAY_H__
+#define __GST_RTP_JPEG_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_RTP_JPEG_PAY \
+ (gst_rtp_jpeg_pay_get_type())
+#define GST_RTP_JPEG_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_JPEG_PAY,GstRtpJPEGPay))
+#define GST_RTP_JPEG_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_JPEG_PAY,GstRtpJPEGPayClass))
+#define GST_IS_RTP_JPEG_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_JPEG_PAY))
+#define GST_IS_RTP_JPEG_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_JPEG_PAY))
+typedef struct _GstRtpJPEGPay GstRtpJPEGPay;
+typedef struct _GstRtpJPEGPayClass GstRtpJPEGPayClass;
+
+struct _GstRtpJPEGPay
+{
+ GstRTPBasePayload payload;
+
+ guint8 quality;
+ guint8 type;
+
+ gint height;
+ gint width;
+
+ guint8 quant;
+};
+
+struct _GstRtpJPEGPayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_jpeg_pay_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_RTP_JPEG_PAY_H__ */
diff --git a/gst/rtp/gstrtpklvdepay.c b/gst/rtp/gstrtpklvdepay.c
new file mode 100644
index 0000000000..1cb8bc3ab4
--- /dev/null
+++ b/gst/rtp/gstrtpklvdepay.c
@@ -0,0 +1,393 @@
+/* GStreamer RTP KLV Depayloader
+ * Copyright (C) 2014-2015 Tim-Philipp Müller <tim@centricular.com>>
+ * Copyright (C) 2014-2015 Centricular Ltd
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpklvdepay
+ * @title: rtpklvdepay
+ * @see_also: rtpklvpay
+ *
+ * Extract KLV metadata from RTP packets according to RFC 6597.
+ * For detailed information see: http://tools.ietf.org/html/rfc6597
+ *
+ * ## Example pipeline
+ * |[
+ * gst-launch-1.0 udpsrc caps='application/x-rtp, media=(string)application, clock-rate=(int)90000, encoding-name=(string)SMPTE336M' ! rtpklvdepay ! fakesink dump=true
+ * ]| This example pipeline will depayload an RTP KLV stream and display
+ * a hexdump of the KLV data on stdout.
+ *
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstrtpelements.h"
+#include "gstrtpklvdepay.h"
+
+#include <string.h>
+
+GST_DEBUG_CATEGORY_STATIC (klvdepay_debug);
+#define GST_CAT_DEFAULT (klvdepay_debug)
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("meta/x-klv, parsed = (bool) true"));
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) application, clock-rate = (int) [1, MAX], "
+ "encoding-name = (string) SMPTE336M")
+ );
+
+#define gst_rtp_klv_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpKlvDepay, gst_rtp_klv_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpklvdepay, "rtpklvdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_KLV_DEPAY, rtp_element_init (plugin));
+
+static void gst_rtp_klv_depay_finalize (GObject * object);
+
+static GstStateChangeReturn gst_rtp_klv_depay_change_state (GstElement *
+ element, GstStateChange transition);
+static gboolean gst_rtp_klv_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_klv_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+static gboolean gst_rtp_klv_depay_handle_event (GstRTPBaseDepayload * depay,
+ GstEvent * ev);
+
+static void gst_rtp_klv_depay_reset (GstRtpKlvDepay * klvdepay);
+
+static void
+gst_rtp_klv_depay_class_init (GstRtpKlvDepayClass * klass)
+{
+ GstElementClass *element_class = (GstElementClass *) klass;
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstRTPBaseDepayloadClass *rtpbasedepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (klvdepay_debug, "klvdepay", 0,
+ "RTP KLV Depayloader");
+
+ gobject_class->finalize = gst_rtp_klv_depay_finalize;
+
+ element_class->change_state = gst_rtp_klv_depay_change_state;
+
+ gst_element_class_add_static_pad_template (element_class, &src_template);
+ gst_element_class_add_static_pad_template (element_class, &sink_template);
+
+ gst_element_class_set_static_metadata (element_class,
+ "RTP KLV Depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts KLV (SMPTE ST 336) metadata from RTP packets",
+ "Tim-Philipp Müller <tim@centricular.com>");
+
+ rtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ rtpbasedepayload_class->set_caps = gst_rtp_klv_depay_setcaps;
+ rtpbasedepayload_class->process_rtp_packet = gst_rtp_klv_depay_process;
+ rtpbasedepayload_class->handle_event = gst_rtp_klv_depay_handle_event;
+}
+
+static void
+gst_rtp_klv_depay_init (GstRtpKlvDepay * klvdepay)
+{
+ klvdepay->adapter = gst_adapter_new ();
+}
+
+static void
+gst_rtp_klv_depay_finalize (GObject * object)
+{
+ GstRtpKlvDepay *klvdepay;
+
+ klvdepay = GST_RTP_KLV_DEPAY (object);
+
+ gst_rtp_klv_depay_reset (klvdepay);
+ g_object_unref (klvdepay->adapter);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_rtp_klv_depay_reset (GstRtpKlvDepay * klvdepay)
+{
+ GST_DEBUG_OBJECT (klvdepay, "resetting");
+ gst_adapter_clear (klvdepay->adapter);
+ klvdepay->resync = TRUE;
+ klvdepay->last_rtp_ts = -1;
+}
+
+static gboolean
+gst_rtp_klv_depay_handle_event (GstRTPBaseDepayload * depay, GstEvent * ev)
+{
+ switch (GST_EVENT_TYPE (ev)) {
+ case GST_EVENT_STREAM_START:{
+ GstStreamFlags flags;
+
+ ev = gst_event_make_writable (ev);
+ gst_event_parse_stream_flags (ev, &flags);
+ gst_event_set_stream_flags (ev, flags | GST_STREAM_FLAG_SPARSE);
+ break;
+ }
+ default:
+ break;
+ }
+
+ return GST_RTP_BASE_DEPAYLOAD_CLASS (parent_class)->handle_event (depay, ev);
+}
+
+static gboolean
+gst_rtp_klv_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *s;
+ GstCaps *src_caps;
+ gboolean res;
+ gint clock_rate;
+
+ s = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (s, "clock-rate", &clock_rate))
+ return FALSE;
+
+ depayload->clock_rate = clock_rate;
+
+ src_caps = gst_static_pad_template_get_caps (&src_template);
+ res = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), src_caps);
+ gst_caps_unref (src_caps);
+
+ return res;
+}
+
+static gboolean
+klv_get_vlen (const guint8 * data, guint data_len, guint64 * v_len,
+ gsize * len_size)
+{
+ guint8 first_byte, len_len;
+ guint64 len;
+
+ g_assert (data_len > 0);
+
+ first_byte = *data++;
+
+ if ((first_byte & 0x80) == 0) {
+ *v_len = first_byte & 0x7f;
+ *len_size = 1;
+ return TRUE;
+ }
+
+ len_len = first_byte & 0x7f;
+
+ if (len_len == 0 || len_len > 8)
+ return FALSE;
+
+ if ((1 + len_len) > data_len)
+ return FALSE;
+
+ *len_size = 1 + len_len;
+
+ len = 0;
+ while (len_len > 0) {
+ len = len << 8 | *data++;
+ --len_len;
+ }
+
+ *v_len = len;
+
+ return TRUE;
+}
+
+static GstBuffer *
+gst_rtp_klv_depay_process_data (GstRtpKlvDepay * klvdepay)
+{
+ gsize avail, data_len, len_size;
+ GstBuffer *outbuf;
+ guint8 data[1 + 8];
+ guint64 v_len;
+
+ avail = gst_adapter_available (klvdepay->adapter);
+
+ GST_TRACE_OBJECT (klvdepay, "%" G_GSIZE_FORMAT " bytes in adapter", avail);
+
+ if (avail == 0)
+ return NULL;
+
+ /* need at least 16 bytes of UL key plus 1 byte of length */
+ if (avail < 16 + 1)
+ goto bad_klv_packet;
+
+ /* check if the declared KLV unit size matches actual bytes available */
+ data_len = MIN (avail - 16, 1 + 8);
+ gst_adapter_copy (klvdepay->adapter, data, 16, data_len);
+ if (!klv_get_vlen (data, data_len, &v_len, &len_size))
+ goto bad_klv_packet;
+
+ GST_LOG_OBJECT (klvdepay, "want %" G_GUINT64_FORMAT " bytes, "
+ "have %" G_GSIZE_FORMAT " bytes", 16 + len_size + v_len, avail);
+
+ if (avail < 16 + len_size + v_len)
+ goto incomplete_klv_packet;
+
+ /* something is wrong, this shouldn't ever happen */
+ if (avail > 16 + len_size + v_len)
+ goto bad_klv_packet;
+
+ outbuf = gst_adapter_take_buffer (klvdepay->adapter, avail);
+
+ /* Mark buffers as key unit to signal this is the start of a KLV unit
+ * (for now all buffers will be flagged like this, since all buffers are
+ * self-contained KLV units, but in future that might change) */
+ outbuf = gst_buffer_make_writable (outbuf);
+ GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ return outbuf;
+
+/* ERRORS */
+bad_klv_packet:
+ {
+ GST_WARNING_OBJECT (klvdepay, "bad KLV packet, dropping");
+ gst_rtp_klv_depay_reset (klvdepay);
+ return NULL;
+ }
+incomplete_klv_packet:
+ {
+ GST_DEBUG_OBJECT (klvdepay, "partial KLV packet: have %u bytes, want %u",
+ (guint) avail, (guint) (16 + len_size + v_len));
+ return NULL;
+ }
+}
+
+/* We're trying to be pragmatic here, not quite as strict as the spec wants
+ * us to be with regard to marker bits and resyncing after packet loss */
+static GstBuffer *
+gst_rtp_klv_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpKlvDepay *klvdepay = GST_RTP_KLV_DEPAY (depayload);
+ GstBuffer *payload, *outbuf = NULL;
+ gboolean marker, start = FALSE, maybe_start;
+ guint32 rtp_ts;
+ guint16 seq;
+ guint payload_len;
+
+ /* Ignore DISCONT on first buffer and on buffers following a discont */
+ if (GST_BUFFER_IS_DISCONT (rtp->buffer) && klvdepay->last_rtp_ts != -1) {
+ GST_WARNING_OBJECT (klvdepay, "DISCONT, need to resync");
+ gst_rtp_klv_depay_reset (klvdepay);
+ }
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+
+ /* marker bit signals last fragment of a KLV unit */
+ marker = gst_rtp_buffer_get_marker (rtp);
+
+ seq = gst_rtp_buffer_get_seq (rtp);
+
+ /* packet directly after one with marker bit set => start */
+ start = klvdepay->last_marker_seq != -1
+ && gst_rtp_buffer_compare_seqnum (klvdepay->last_marker_seq, seq) == 1;
+
+ /* deduce start of new KLV unit in case sender doesn't set marker bits
+ * (it's not like the spec is ambiguous about that, but what can you do) */
+ rtp_ts = gst_rtp_buffer_get_timestamp (rtp);
+
+ maybe_start = klvdepay->last_rtp_ts == -1 || klvdepay->last_rtp_ts != rtp_ts;
+
+ klvdepay->last_rtp_ts = rtp_ts;
+
+ /* fallback to detect self-contained single KLV unit (usual case) */
+ if ((!start || !marker || maybe_start) && payload_len > 16) {
+ const guint8 *data;
+ guint64 v_len;
+ gsize len_size;
+
+ data = gst_rtp_buffer_get_payload (rtp);
+ if (GST_READ_UINT32_BE (data) == 0x060e2b34 &&
+ klv_get_vlen (data + 16, payload_len - 16, &v_len, &len_size)) {
+ if (16 + len_size + v_len == payload_len) {
+ GST_LOG_OBJECT (klvdepay, "Looks like a self-contained KLV unit");
+ marker = TRUE;
+ start = TRUE;
+ } else if (16 + len_size + v_len > payload_len) {
+ GST_LOG_OBJECT (klvdepay,
+ "Looks like the start of a fragmented KLV unit");
+ start = TRUE;
+ }
+ }
+ }
+
+ /* If this is the first packet and looks like a start, clear resync flag */
+ if (klvdepay->resync && klvdepay->last_marker_seq == -1 && start)
+ klvdepay->resync = FALSE;
+
+ if (marker)
+ klvdepay->last_marker_seq = seq;
+
+ GST_LOG_OBJECT (klvdepay, "payload of %u bytes, marker=%d, start=%d",
+ payload_len, marker, start);
+
+ if (klvdepay->resync && !start) {
+ GST_DEBUG_OBJECT (klvdepay, "Dropping buffer, waiting to resync");
+
+ if (marker)
+ klvdepay->resync = FALSE;
+
+ goto done;
+ }
+
+ if (start && !marker)
+ outbuf = gst_rtp_klv_depay_process_data (klvdepay);
+
+ payload = gst_rtp_buffer_get_payload_buffer (rtp);
+ gst_adapter_push (klvdepay->adapter, payload);
+
+ if (marker)
+ outbuf = gst_rtp_klv_depay_process_data (klvdepay);
+
+done:
+
+ return outbuf;
+}
+
+static GstStateChangeReturn
+gst_rtp_klv_depay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstRtpKlvDepay *klvdepay;
+ GstStateChangeReturn ret;
+
+ klvdepay = GST_RTP_KLV_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_rtp_klv_depay_reset (klvdepay);
+ klvdepay->last_marker_seq = -1;
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_klv_depay_reset (klvdepay);
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtpklvdepay.h b/gst/rtp/gstrtpklvdepay.h
new file mode 100644
index 0000000000..e1042a7361
--- /dev/null
+++ b/gst/rtp/gstrtpklvdepay.h
@@ -0,0 +1,63 @@
+/* GStreamer RTP KLV Depayloader
+ * Copyright (C) 2014-2015 Tim-Philipp Müller <tim@centricular.com>>
+ * Copyright (C) 2014-2015 Centricular Ltd
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_KLV_DEPAY_H__
+#define __GST_RTP_KLV_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_KLV_DEPAY \
+ (gst_rtp_klv_depay_get_type())
+#define GST_RTP_KLV_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_KLV_DEPAY,GstRtpKlvDepay))
+#define GST_RTP_KLV_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_KLV_DEPAY,GstRtpKlvDepayClass))
+#define GST_IS_RTP_KLV_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_KLV_DEPAY))
+#define GST_IS_RTP_KLV_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_KLV_DEPAY))
+
+typedef struct _GstRtpKlvDepay GstRtpKlvDepay;
+typedef struct _GstRtpKlvDepayClass GstRtpKlvDepayClass;
+
+struct _GstRtpKlvDepay
+{
+ GstRTPBaseDepayload depayload;
+
+ GstAdapter *adapter;
+ gboolean resync;
+ gint last_marker_seq; /* -1 if unset, otherwise 0-G_MAXUINT16 */
+ gint64 last_rtp_ts; /* -1 if unset, otherwise 0-G_MAXUINT32 */
+};
+
+struct _GstRtpKlvDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+G_GNUC_INTERNAL GType gst_rtp_klv_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_KLV_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpklvpay.c b/gst/rtp/gstrtpklvpay.c
new file mode 100644
index 0000000000..03a59a9160
--- /dev/null
+++ b/gst/rtp/gstrtpklvpay.c
@@ -0,0 +1,200 @@
+/* GStreamer RTP KLV Payloader
+ * Copyright (C) 2014-2015 Tim-Philipp Müller <tim@centricular.com>>
+ * Copyright (C) 2014-2015 Centricular Ltd
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpklvpay
+ * @title: rtpklvpay
+ * @see_also: rtpklvdepay
+ *
+ * Payloads KLV metadata into RTP packets according to RFC 6597.
+ * For detailed information see: http://tools.ietf.org/html/rfc6597
+ *
+ * ## Example pipeline
+ * |[
+ * gst-launch-1.0 filesrc location=video-with-klv.ts ! tsdemux ! rtpklvpay ! udpsink
+ * ]| This example pipeline will payload an RTP KLV stream extracted from an
+ * MPEG-TS stream and send it via UDP to an RTP receiver.
+ *
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstrtpelements.h"
+#include "gstrtpklvpay.h"
+#include "gstrtputils.h"
+
+#include <string.h>
+
+GST_DEBUG_CATEGORY_STATIC (klvpay_debug);
+#define GST_CAT_DEFAULT (klvpay_debug)
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) application, clock-rate = (int) [1, MAX], "
+ "encoding-name = (string) SMPTE336M")
+ );
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("meta/x-klv, parsed = (bool) true"));
+
+#define gst_rtp_klv_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpKlvPay, gst_rtp_klv_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpklvpay, "rtpklvpay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_KLV_PAY, rtp_element_init (plugin));
+
+static gboolean gst_rtp_klv_pay_setcaps (GstRTPBasePayload * pay,
+ GstCaps * caps);
+static GstFlowReturn gst_rtp_klv_pay_handle_buffer (GstRTPBasePayload * pay,
+ GstBuffer * buf);
+
+static void
+gst_rtp_klv_pay_class_init (GstRtpKlvPayClass * klass)
+{
+ GstElementClass *element_class = (GstElementClass *) klass;
+ GstRTPBasePayloadClass *rtpbasepay_class;
+
+ GST_DEBUG_CATEGORY_INIT (klvpay_debug, "klvpay", 0, "RTP KLV Payloader");
+
+ gst_element_class_add_static_pad_template (element_class, &src_template);
+ gst_element_class_add_static_pad_template (element_class, &sink_template);
+
+ gst_element_class_set_static_metadata (element_class,
+ "RTP KLV Payloader", "Codec/Payloader/Network/RTP",
+ "Payloads KLV (SMPTE ST 336) metadata as RTP packets",
+ "Tim-Philipp Müller <tim@centricular.com>");
+
+ rtpbasepay_class = (GstRTPBasePayloadClass *) klass;
+
+ rtpbasepay_class->set_caps = gst_rtp_klv_pay_setcaps;
+ rtpbasepay_class->handle_buffer = gst_rtp_klv_pay_handle_buffer;
+}
+
+static void
+gst_rtp_klv_pay_init (GstRtpKlvPay * klvpay)
+{
+ /* nothing to do here yet */
+}
+
+static gboolean
+gst_rtp_klv_pay_setcaps (GstRTPBasePayload * pay, GstCaps * caps)
+{
+ /* FIXME: allow other clock rates */
+ gst_rtp_base_payload_set_options (pay, "application", TRUE, "SMPTE336M",
+ 90000);
+
+ return gst_rtp_base_payload_set_outcaps (pay, NULL);
+}
+
+static GstFlowReturn
+gst_rtp_klv_pay_handle_buffer (GstRTPBasePayload * basepayload, GstBuffer * buf)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBufferList *list = NULL;
+ GstRtpKlvPay *pay;
+ GstMapInfo map;
+ GstBuffer *outbuf = NULL;
+ gsize offset;
+ guint mtu, rtp_header_size, max_payload_size;
+
+ pay = GST_RTP_KLV_PAY (basepayload);
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (basepayload);
+
+ rtp_header_size = gst_rtp_buffer_calc_header_len (0);
+ max_payload_size = mtu - rtp_header_size;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ if (map.size == 0)
+ goto done;
+
+ /* KLV coding shall use and only use a fixed 16-byte SMPTE-administered
+ * Universal Label, according to SMPTE 298M as Key (Rec. ITU R-BT.1653-1) */
+ if (map.size < 16 || GST_READ_UINT32_BE (map.data) != 0x060E2B34)
+ goto bad_input;
+
+ if (map.size > max_payload_size)
+ list = gst_buffer_list_new ();
+
+ GST_LOG_OBJECT (pay, "%" G_GSIZE_FORMAT " bytes of data to payload",
+ map.size);
+
+ offset = 0;
+ while (offset < map.size) {
+ GstBuffer *payloadbuf;
+ GstRTPBuffer rtp = { NULL };
+ guint payload_size;
+ guint bytes_left;
+
+ bytes_left = map.size - offset;
+ payload_size = MIN (bytes_left, max_payload_size);
+
+ outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 0, 0, 0);
+
+ if (payload_size == bytes_left) {
+ GST_LOG_OBJECT (pay, "last packet of KLV unit");
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ gst_rtp_buffer_set_marker (&rtp, 1);
+ gst_rtp_buffer_unmap (&rtp);
+ }
+
+ GST_LOG_OBJECT (pay, "packet with payload size %u", payload_size);
+
+ gst_rtp_copy_meta (GST_ELEMENT_CAST (pay), outbuf, buf, 0);
+
+ payloadbuf = gst_buffer_copy_region (buf, GST_BUFFER_COPY_MEMORY,
+ offset, payload_size);
+
+ /* join rtp header + payload memory parts */
+ outbuf = gst_buffer_append (outbuf, payloadbuf);
+
+ GST_BUFFER_PTS (outbuf) = GST_BUFFER_PTS (buf);
+ GST_BUFFER_DTS (outbuf) = GST_BUFFER_DTS (buf);
+
+ /* and add to list */
+ if (list != NULL)
+ gst_buffer_list_insert (list, -1, outbuf);
+
+ offset += payload_size;
+ }
+
+done:
+
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+
+ if (list != NULL)
+ ret = gst_rtp_base_payload_push_list (basepayload, list);
+ else if (outbuf != NULL)
+ ret = gst_rtp_base_payload_push (basepayload, outbuf);
+
+ return ret;
+
+/* ERRORS */
+bad_input:
+ {
+ GST_ERROR_OBJECT (pay, "Input doesn't look like a KLV packet, ignoring");
+ goto done;
+ }
+}
diff --git a/gst/rtp/gstrtpklvpay.h b/gst/rtp/gstrtpklvpay.h
new file mode 100644
index 0000000000..41187d6cd9
--- /dev/null
+++ b/gst/rtp/gstrtpklvpay.h
@@ -0,0 +1,58 @@
+/* GStreamer RTP KLV Payloader
+ * Copyright (C) 2014-2015 Tim-Philipp Müller <tim@centricular.com>>
+ * Copyright (C) 2014-2015 Centricular Ltd
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_KLV_PAY_H__
+#define __GST_RTP_KLV_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/rtp.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_KLV_PAY \
+ (gst_rtp_klv_pay_get_type())
+#define GST_RTP_KLV_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_KLV_PAY,GstRtpKlvPay))
+#define GST_RTP_KLV_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_KLV_PAY,GstRtpKlvPayClass))
+#define GST_IS_RTP_KLV_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_KLV_PAY))
+#define GST_IS_RTP_KLV_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_KLV_PAY))
+
+typedef struct _GstRtpKlvPay GstRtpKlvPay;
+typedef struct _GstRtpKlvPayClass GstRtpKlvPayClass;
+
+struct _GstRtpKlvPay
+{
+ GstRTPBasePayload rtpbasepayload;
+};
+
+struct _GstRtpKlvPayClass
+{
+ GstRTPBasePayloadClass rtpbasepayload_class;
+};
+
+G_GNUC_INTERNAL GType gst_rtp_klv_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_KLV_PAY_H__ */
diff --git a/gst/rtp/gstrtpldacpay.c b/gst/rtp/gstrtpldacpay.c
new file mode 100644
index 0000000000..2b14b746fe
--- /dev/null
+++ b/gst/rtp/gstrtpldacpay.c
@@ -0,0 +1,171 @@
+/* GStreamer RTP LDAC payloader
+ * Copyright (C) 2020 Asymptotic <sanchayan@asymptotic.io>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpldacpay
+ * @title: rtpldacpay
+ *
+ * Payload LDAC encoded audio into RTP packets.
+ *
+ * LDAC does not have a public specification and concerns itself only with
+ * bluetooth transmission. Due to the unavailability of a specification, we
+ * consider the encoding-name as X-GST-LDAC.
+ *
+ * The best reference is [libldac](https://android.googlesource.com/platform/external/libldac/)
+ * and the A2DP LDAC implementation in Android's bluetooth stack [Flouride]
+ * (https://android.googlesource.com/platform/system/bt/+/refs/heads/master/stack/a2dp/a2dp_vendor_ldac_encoder.cc).
+ *
+ * ## Example pipeline
+ * |[
+ * gst-launch-1.0 -v audiotestsrc ! ldacenc ! rtpldacpay mtu=679 ! avdtpsink
+ * ]| This example pipeline will payload LDAC encoded audio.
+ *
+ * Since: 1.20
+ */
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include <gst/audio/audio.h>
+#include "gstrtpelements.h"
+#include "gstrtpldacpay.h"
+#include "gstrtputils.h"
+
+#define GST_RTP_HEADER_LENGTH 12
+/* MTU size required for LDAC A2DP streaming */
+#define GST_LDAC_MTU_REQUIRED 679
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_ldac_pay_debug);
+#define GST_CAT_DEFAULT gst_rtp_ldac_pay_debug
+
+#define parent_class gst_rtp_ldac_pay_parent_class
+G_DEFINE_TYPE (GstRtpLdacPay, gst_rtp_ldac_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpldacpay, "rtpldacpay", GST_RANK_NONE,
+ GST_TYPE_RTP_LDAC_PAY, rtp_element_init (plugin));
+
+static GstStaticPadTemplate gst_rtp_ldac_pay_sink_factory =
+GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-ldac, "
+ "channels = (int) [ 1, 2 ], "
+ "rate = (int) { 44100, 48000, 88200, 96000 }")
+ );
+
+static GstStaticPadTemplate gst_rtp_ldac_pay_src_factory =
+GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) audio,"
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) { 44100, 48000, 88200, 96000 },"
+ "encoding-name = (string) \"X-GST-LDAC\"")
+ );
+
+static gboolean gst_rtp_ldac_pay_set_caps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static GstFlowReturn gst_rtp_ldac_pay_handle_buffer (GstRTPBasePayload *
+ payload, GstBuffer * buffer);
+
+static void
+gst_rtp_ldac_pay_class_init (GstRtpLdacPayClass * klass)
+{
+ GstRTPBasePayloadClass *payload_class = GST_RTP_BASE_PAYLOAD_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ payload_class->set_caps = GST_DEBUG_FUNCPTR (gst_rtp_ldac_pay_set_caps);
+ payload_class->handle_buffer =
+ GST_DEBUG_FUNCPTR (gst_rtp_ldac_pay_handle_buffer);
+
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_rtp_ldac_pay_sink_factory);
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_rtp_ldac_pay_src_factory);
+
+ gst_element_class_set_static_metadata (element_class, "RTP packet payloader",
+ "Codec/Payloader/Network", "Payload LDAC audio as RTP packets",
+ "Sanchayan Maity <sanchayan@asymptotic.io>");
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_ldac_pay_debug, "rtpldacpay", 0,
+ "RTP LDAC payloader");
+}
+
+static void
+gst_rtp_ldac_pay_init (GstRtpLdacPay * self)
+{
+
+}
+
+static gboolean
+gst_rtp_ldac_pay_set_caps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ GstRtpLdacPay *ldacpay = GST_RTP_LDAC_PAY (payload);
+ GstStructure *structure;
+ gint rate;
+
+ if (GST_RTP_BASE_PAYLOAD_MTU (ldacpay) < GST_LDAC_MTU_REQUIRED) {
+ GST_ERROR_OBJECT (ldacpay, "Invalid MTU %d, should be >= %d",
+ GST_RTP_BASE_PAYLOAD_MTU (ldacpay), GST_LDAC_MTU_REQUIRED);
+ return FALSE;
+ }
+
+ structure = gst_caps_get_structure (caps, 0);
+ if (!gst_structure_get_int (structure, "rate", &rate)) {
+ GST_ERROR_OBJECT (ldacpay, "Failed to get audio rate from caps");
+ return FALSE;
+ }
+
+ gst_rtp_base_payload_set_options (payload, "audio", TRUE, "X-GST-LDAC", rate);
+
+ return gst_rtp_base_payload_set_outcaps (payload, NULL);
+}
+
+/*
+ * LDAC encoder does not handle split frames. Currently, the encoder will
+ * always emit 660 bytes worth of payload encapsulating multiple LDAC frames.
+ * This is as per eqmid and GST_LDAC_MTU_REQUIRED passed for configuring the
+ * encoder upstream. Since the encoder always emit full frames and we do not
+ * need to handle frame splitting, we do not use an adapter and also push out
+ * the buffer as it is received.
+ */
+static GstFlowReturn
+gst_rtp_ldac_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer)
+{
+ GstRtpLdacPay *ldacpay = GST_RTP_LDAC_PAY (payload);
+ GstBuffer *outbuf;
+ GstClockTime outbuf_frame_duration, outbuf_pts;
+ gsize buf_sz;
+
+ outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD
+ (ldacpay), GST_RTP_HEADER_LENGTH, 0, 0);
+
+ outbuf_pts = GST_BUFFER_PTS (buffer);
+ outbuf_frame_duration = GST_BUFFER_DURATION (buffer);
+ buf_sz = gst_buffer_get_size (buffer);
+
+ gst_rtp_copy_audio_meta (ldacpay, outbuf, buffer);
+ outbuf = gst_buffer_append (outbuf, buffer);
+
+ GST_BUFFER_PTS (outbuf) = outbuf_pts;
+ GST_BUFFER_DURATION (outbuf) = outbuf_frame_duration;
+ GST_DEBUG_OBJECT (ldacpay,
+ "Pushing %" G_GSIZE_FORMAT " bytes: %" GST_TIME_FORMAT, buf_sz,
+ GST_TIME_ARGS (GST_BUFFER_PTS (outbuf)));
+
+ return gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (ldacpay), outbuf);
+}
diff --git a/gst/rtp/gstrtpldacpay.h b/gst/rtp/gstrtpldacpay.h
new file mode 100644
index 0000000000..0865ce7ade
--- /dev/null
+++ b/gst/rtp/gstrtpldacpay.h
@@ -0,0 +1,55 @@
+/* GStreamer RTP LDAC payloader
+ * Copyright (C) 2020 Asymptotic <sanchayan@asymptotic.io>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_LDAC_PAY \
+ (gst_rtp_ldac_pay_get_type())
+#define GST_RTP_LDAC_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_LDAC_PAY,\
+ GstRtpLdacPay))
+#define GST_RTP_LDAC_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_LDAC_PAY,\
+ GstRtpLdacPayClass))
+#define GST_IS_RTP_LDAC_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_LDAC_PAY))
+#define GST_IS_RTP_LDAC_PAY_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_LDAC_PAY))
+
+typedef struct _GstRtpLdacPay GstRtpLdacPay;
+typedef struct _GstRtpLdacPayClass GstRtpLdacPayClass;
+
+struct _GstRtpLdacPay {
+ GstRTPBasePayload base;
+};
+
+struct _GstRtpLdacPayClass {
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_ldac_pay_get_type(void);
+
+gboolean gst_rtp_ldac_pay_plugin_init (GstPlugin * plugin);
+
+G_END_DECLS
diff --git a/gst/rtp/gstrtpmp1sdepay.c b/gst/rtp/gstrtpmp1sdepay.c
new file mode 100644
index 0000000000..e07dc2cd3d
--- /dev/null
+++ b/gst/rtp/gstrtpmp1sdepay.c
@@ -0,0 +1,139 @@
+/* GStreamer
+ * Copyright (C) <2008> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include <string.h>
+#include "gstrtpelements.h"
+#include "gstrtpmp1sdepay.h"
+#include "gstrtputils.h"
+
+/* RtpMP1SDepay signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_LAST
+};
+
+static GstStaticPadTemplate gst_rtp_mp1s_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/mpeg,systemstream=(boolean)true")
+ );
+
+/* The spec says video/MP1S but I have seen streams with other/MP1S so we will
+ * allow them both */
+static GstStaticPadTemplate gst_rtp_mp1s_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"other\", "
+ "clock-rate = (int) [1, MAX ], " "encoding-name = (string) \"MP1S\";"
+ "application/x-rtp, "
+ "media = (string) \"video\", "
+ "clock-rate = (int) [1, MAX ], " "encoding-name = (string) \"MP1S\"")
+ );
+
+G_DEFINE_TYPE (GstRtpMP1SDepay, gst_rtp_mp1s_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmp1sdepay, "rtpmp1sdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_MP1S_DEPAY, rtp_element_init (plugin));
+
+static gboolean gst_rtp_mp1s_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_mp1s_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+
+static void
+gst_rtp_mp1s_depay_class_init (GstRtpMP1SDepayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_mp1s_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_mp1s_depay_setcaps;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mp1s_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mp1s_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP MPEG1 System Stream depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts MPEG1 System Streams from RTP packets (RFC 3555)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+}
+
+static void
+gst_rtp_mp1s_depay_init (GstRtpMP1SDepay * rtpmp1sdepay)
+{
+}
+
+static gboolean
+gst_rtp_mp1s_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstCaps *srccaps;
+ GstStructure *structure;
+ gint clock_rate;
+ gboolean res;
+
+ structure = gst_caps_get_structure (caps, 0);
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 90000; /* default */
+ depayload->clock_rate = clock_rate;
+
+ srccaps = gst_caps_new_simple ("video/mpeg",
+ "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
+ res = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ gst_caps_unref (srccaps);
+
+ return res;
+}
+
+static GstBuffer *
+gst_rtp_mp1s_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstBuffer *outbuf;
+
+ outbuf = gst_rtp_buffer_get_payload_buffer (rtp);
+
+ if (outbuf) {
+ GST_DEBUG ("gst_rtp_mp1s_depay_chain: pushing buffer of size %"
+ G_GSIZE_FORMAT, gst_buffer_get_size (outbuf));
+
+ gst_rtp_drop_meta (GST_ELEMENT_CAST (depayload), outbuf, 0);
+ }
+
+ return outbuf;
+}
diff --git a/gst/rtp/gstrtpmp1sdepay.h b/gst/rtp/gstrtpmp1sdepay.h
new file mode 100644
index 0000000000..e2e582c526
--- /dev/null
+++ b/gst/rtp/gstrtpmp1sdepay.h
@@ -0,0 +1,56 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_MP1S_DEPAY_H__
+#define __GST_RTP_MP1S_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_MP1S_DEPAY \
+ (gst_rtp_mp1s_depay_get_type())
+#define GST_RTP_MP1S_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_MP1S_DEPAY,GstRtpMP1SDepay))
+#define GST_RTP_MP1S_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_MP1S_DEPAY,GstRtpMP1SDepayClass))
+#define GST_IS_RTP_MP1S_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_MP1S_DEPAY))
+#define GST_IS_RTP_MP1S_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_MP1S_DEPAY))
+typedef struct _GstRtpMP1SDepay GstRtpMP1SDepay;
+typedef struct _GstRtpMP1SDepayClass GstRtpMP1SDepayClass;
+
+struct _GstRtpMP1SDepay
+{
+ GstRTPBaseDepayload depayload;
+};
+
+struct _GstRtpMP1SDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_mp1s_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_MP1S_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpmp2tdepay.c b/gst/rtp/gstrtpmp2tdepay.c
new file mode 100644
index 0000000000..7acf3f819d
--- /dev/null
+++ b/gst/rtp/gstrtpmp2tdepay.c
@@ -0,0 +1,239 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include <string.h>
+#include "gstrtpelements.h"
+#include "gstrtpmp2tdepay.h"
+#include "gstrtputils.h"
+
+/* RtpMP2TDepay signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+#define DEFAULT_SKIP_FIRST_BYTES 0
+
+enum
+{
+ PROP_0,
+ PROP_SKIP_FIRST_BYTES
+};
+
+static GstStaticPadTemplate gst_rtp_mp2t_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/mpegts,"
+ "packetsize=(int)188," "systemstream=(boolean)true")
+ );
+
+static GstStaticPadTemplate gst_rtp_mp2t_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "clock-rate = (int) [1, MAX ], "
+ "encoding-name = (string) { MP2T, MP2T-ES } ;"
+ /* All optional parameters
+ *
+ * "profile-level-id=[1,MAX]"
+ * "config="
+ */
+ "application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_MP2T_STRING ", "
+ "clock-rate = (int) [1, MAX ]")
+ );
+
+G_DEFINE_TYPE (GstRtpMP2TDepay, gst_rtp_mp2t_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmp2tdepay, "rtpmp2tdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_MP2T_DEPAY, rtp_element_init (plugin));
+
+static gboolean gst_rtp_mp2t_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_mp2t_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+
+static void gst_rtp_mp2t_depay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtp_mp2t_depay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static void
+gst_rtp_mp2t_depay_class_init (GstRtpMP2TDepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_mp2t_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_mp2t_depay_setcaps;
+
+ gobject_class->set_property = gst_rtp_mp2t_depay_set_property;
+ gobject_class->get_property = gst_rtp_mp2t_depay_get_property;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mp2t_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mp2t_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP MPEG Transport Stream depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts MPEG2 TS from RTP packets (RFC 2250)",
+ "Wim Taymans <wim.taymans@gmail.com>, "
+ "Thijs Vermeir <thijs.vermeir@barco.com>");
+
+ g_object_class_install_property (gobject_class, PROP_SKIP_FIRST_BYTES,
+ g_param_spec_uint ("skip-first-bytes",
+ "Skip first bytes",
+ "The amount of bytes that need to be skipped at the beginning of the payload",
+ 0, G_MAXUINT, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+}
+
+static void
+gst_rtp_mp2t_depay_init (GstRtpMP2TDepay * rtpmp2tdepay)
+{
+ rtpmp2tdepay->skip_first_bytes = DEFAULT_SKIP_FIRST_BYTES;
+}
+
+static gboolean
+gst_rtp_mp2t_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstCaps *srccaps;
+ GstStructure *structure;
+ gint clock_rate;
+ gboolean res;
+
+ structure = gst_caps_get_structure (caps, 0);
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 90000; /* default */
+ depayload->clock_rate = clock_rate;
+
+ srccaps = gst_caps_new_simple ("video/mpegts",
+ "packetsize", G_TYPE_INT, 188,
+ "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
+ res = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ gst_caps_unref (srccaps);
+
+ return res;
+}
+
+static GstBuffer *
+gst_rtp_mp2t_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpMP2TDepay *rtpmp2tdepay;
+ GstBuffer *outbuf;
+ gint payload_len, leftover;
+
+ rtpmp2tdepay = GST_RTP_MP2T_DEPAY (depayload);
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+
+ if (G_UNLIKELY (payload_len <= rtpmp2tdepay->skip_first_bytes))
+ goto empty_packet;
+
+ payload_len -= rtpmp2tdepay->skip_first_bytes;
+
+ /* RFC 2250
+ *
+ * 2. Encapsulation of MPEG System and Transport Streams
+ *
+ * For MPEG2 Transport Streams the RTP payload will contain an integral
+ * number of MPEG transport packets.
+ */
+ leftover = payload_len % 188;
+ if (G_UNLIKELY (leftover)) {
+ GST_WARNING ("We don't have an integral number of buffers (leftover: %d)",
+ leftover);
+
+ payload_len -= leftover;
+ }
+
+ outbuf =
+ gst_rtp_buffer_get_payload_subbuffer (rtp,
+ rtpmp2tdepay->skip_first_bytes, payload_len);
+
+ if (outbuf) {
+ GST_DEBUG ("gst_rtp_mp2t_depay_chain: pushing buffer of size %"
+ G_GSIZE_FORMAT, gst_buffer_get_size (outbuf));
+
+ gst_rtp_drop_meta (GST_ELEMENT_CAST (depayload), outbuf, 0);
+ }
+
+ return outbuf;
+
+ /* ERRORS */
+empty_packet:
+ {
+ GST_ELEMENT_WARNING (rtpmp2tdepay, STREAM, DECODE,
+ (NULL), ("Packet was empty"));
+ return NULL;
+ }
+}
+
+static void
+gst_rtp_mp2t_depay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpMP2TDepay *rtpmp2tdepay;
+
+ rtpmp2tdepay = GST_RTP_MP2T_DEPAY (object);
+
+ switch (prop_id) {
+ case PROP_SKIP_FIRST_BYTES:
+ rtpmp2tdepay->skip_first_bytes = g_value_get_uint (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_mp2t_depay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpMP2TDepay *rtpmp2tdepay;
+
+ rtpmp2tdepay = GST_RTP_MP2T_DEPAY (object);
+
+ switch (prop_id) {
+ case PROP_SKIP_FIRST_BYTES:
+ g_value_set_uint (value, rtpmp2tdepay->skip_first_bytes);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/rtp/gstrtpmp2tdepay.h b/gst/rtp/gstrtpmp2tdepay.h
new file mode 100644
index 0000000000..f1e5d0829b
--- /dev/null
+++ b/gst/rtp/gstrtpmp2tdepay.h
@@ -0,0 +1,58 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_MP2T_DEPAY_H__
+#define __GST_RTP_MP2T_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_MP2T_DEPAY \
+ (gst_rtp_mp2t_depay_get_type())
+#define GST_RTP_MP2T_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_MP2T_DEPAY,GstRtpMP2TDepay))
+#define GST_RTP_MP2T_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_MP2T_DEPAY,GstRtpMP2TDepayClass))
+#define GST_IS_RTP_MP2T_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_MP2T_DEPAY))
+#define GST_IS_RTP_MP2T_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_MP2T_DEPAY))
+typedef struct _GstRtpMP2TDepay GstRtpMP2TDepay;
+typedef struct _GstRtpMP2TDepayClass GstRtpMP2TDepayClass;
+
+struct _GstRtpMP2TDepay
+{
+ GstRTPBaseDepayload depayload;
+
+ guint8 skip_first_bytes;
+};
+
+struct _GstRtpMP2TDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_mp2t_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_MP2T_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpmp2tpay.c b/gst/rtp/gstrtpmp2tpay.c
new file mode 100644
index 0000000000..ecde5a298a
--- /dev/null
+++ b/gst/rtp/gstrtpmp2tpay.c
@@ -0,0 +1,235 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <stdlib.h>
+#include <string.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpmp2tpay.h"
+#include "gstrtputils.h"
+
+static GstStaticPadTemplate gst_rtp_mp2t_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/mpegts,"
+ "packetsize=(int)188," "systemstream=(boolean)true")
+ );
+
+static GstStaticPadTemplate gst_rtp_mp2t_pay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_MP2T_STRING ", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"MP2T\" ; "
+ "application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"MP2T\"")
+ );
+
+static gboolean gst_rtp_mp2t_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static GstFlowReturn gst_rtp_mp2t_pay_handle_buffer (GstRTPBasePayload *
+ payload, GstBuffer * buffer);
+static GstFlowReturn gst_rtp_mp2t_pay_flush (GstRTPMP2TPay * rtpmp2tpay);
+static void gst_rtp_mp2t_pay_finalize (GObject * object);
+
+#define gst_rtp_mp2t_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPMP2TPay, gst_rtp_mp2t_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmp2tpay, "rtpmp2tpay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_MP2T_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_mp2t_pay_class_init (GstRTPMP2TPayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_mp2t_pay_finalize;
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_mp2t_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_mp2t_pay_handle_buffer;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mp2t_pay_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mp2t_pay_src_template);
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP MPEG2 Transport Stream payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encodes MPEG2 TS into RTP packets (RFC 2250)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+}
+
+static void
+gst_rtp_mp2t_pay_init (GstRTPMP2TPay * rtpmp2tpay)
+{
+ GST_RTP_BASE_PAYLOAD (rtpmp2tpay)->clock_rate = 90000;
+ GST_RTP_BASE_PAYLOAD_PT (rtpmp2tpay) = GST_RTP_PAYLOAD_MP2T;
+
+ rtpmp2tpay->adapter = gst_adapter_new ();
+}
+
+static void
+gst_rtp_mp2t_pay_finalize (GObject * object)
+{
+ GstRTPMP2TPay *rtpmp2tpay;
+
+ rtpmp2tpay = GST_RTP_MP2T_PAY (object);
+
+ g_object_unref (rtpmp2tpay->adapter);
+ rtpmp2tpay->adapter = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_rtp_mp2t_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ gboolean res;
+
+ gst_rtp_base_payload_set_options (payload, "video",
+ payload->pt != GST_RTP_PAYLOAD_MP2T, "MP2T", 90000);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
+
+ return res;
+}
+
+static GstFlowReturn
+gst_rtp_mp2t_pay_flush (GstRTPMP2TPay * rtpmp2tpay)
+{
+ guint avail, mtu;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBuffer *outbuf;
+
+ avail = gst_adapter_available (rtpmp2tpay->adapter);
+
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (rtpmp2tpay);
+
+ while (avail > 0 && (ret == GST_FLOW_OK)) {
+ guint towrite;
+ guint payload_len;
+ guint packet_len;
+ GstBuffer *paybuf;
+
+ /* this will be the total length of the packet */
+ packet_len = gst_rtp_buffer_calc_packet_len (avail, 0, 0);
+
+ /* fill one MTU or all available bytes */
+ towrite = MIN (packet_len, mtu);
+
+ /* this is the payload length */
+ payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);
+ payload_len -= payload_len % 188;
+
+ /* need whole packets */
+ if (!payload_len)
+ break;
+
+ /* create buffer to hold the payload */
+ outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD
+ (rtpmp2tpay), 0, 0, 0);
+
+ /* get payload */
+ paybuf = gst_adapter_take_buffer_fast (rtpmp2tpay->adapter, payload_len);
+ gst_rtp_copy_meta (GST_ELEMENT_CAST (rtpmp2tpay), outbuf, paybuf, 0);
+ outbuf = gst_buffer_append (outbuf, paybuf);
+ avail -= payload_len;
+
+ GST_BUFFER_PTS (outbuf) = rtpmp2tpay->first_ts;
+ GST_BUFFER_DURATION (outbuf) = rtpmp2tpay->duration;
+
+ GST_DEBUG_OBJECT (rtpmp2tpay, "pushing buffer of size %u",
+ (guint) gst_buffer_get_size (outbuf));
+
+ ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpmp2tpay), outbuf);
+ }
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_rtp_mp2t_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRTPMP2TPay *rtpmp2tpay;
+ guint size, avail, packet_len;
+ GstClockTime timestamp, duration;
+ GstFlowReturn ret;
+
+ rtpmp2tpay = GST_RTP_MP2T_PAY (basepayload);
+
+ size = gst_buffer_get_size (buffer);
+ timestamp = GST_BUFFER_PTS (buffer);
+ duration = GST_BUFFER_DURATION (buffer);
+
+again:
+ ret = GST_FLOW_OK;
+ avail = gst_adapter_available (rtpmp2tpay->adapter);
+
+ /* Initialize new RTP payload */
+ if (avail == 0) {
+ rtpmp2tpay->first_ts = timestamp;
+ rtpmp2tpay->duration = duration;
+ }
+
+ /* get packet length of previous data and this new data */
+ packet_len = gst_rtp_buffer_calc_packet_len (avail + size, 0, 0);
+
+ /* if this buffer is going to overflow the packet, flush what we have,
+ * or if upstream is handing us several packets, to keep latency low */
+ if (!size || gst_rtp_base_payload_is_filled (basepayload,
+ packet_len, rtpmp2tpay->duration + duration)) {
+ ret = gst_rtp_mp2t_pay_flush (rtpmp2tpay);
+ rtpmp2tpay->first_ts = timestamp;
+ rtpmp2tpay->duration = duration;
+
+ /* keep filling the payload */
+ } else {
+ if (GST_CLOCK_TIME_IS_VALID (duration))
+ rtpmp2tpay->duration += duration;
+ }
+
+ /* copy buffer to adapter */
+ if (buffer) {
+ gst_adapter_push (rtpmp2tpay->adapter, buffer);
+ buffer = NULL;
+ }
+
+ if (size >= (188 * 2)) {
+ size = 0;
+ goto again;
+ }
+
+ return ret;
+
+}
diff --git a/gst/rtp/gstrtpmp2tpay.h b/gst/rtp/gstrtpmp2tpay.h
new file mode 100644
index 0000000000..9ed02d9982
--- /dev/null
+++ b/gst/rtp/gstrtpmp2tpay.h
@@ -0,0 +1,62 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_RTP_MP2T_PAY_H__
+#define __GST_RTP_MP2T_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+#include <gst/base/gstadapter.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstRTPMP2TPay GstRTPMP2TPay;
+typedef struct _GstRTPMP2TPayClass GstRTPMP2TPayClass;
+
+#define GST_TYPE_RTP_MP2T_PAY \
+ (gst_rtp_mp2t_pay_get_type())
+#define GST_RTP_MP2T_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_MP2T_PAY,GstRTPMP2TPay))
+#define GST_RTP_MP2T_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_MP2T_PAY,GstRTPMP2TPayClass))
+#define GST_IS_RTP_MP2T_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_MP2T_PAY))
+#define GST_IS_RTP_MP2T_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_MP2T_PAY))
+
+struct _GstRTPMP2TPay
+{
+ GstRTPBasePayload payload;
+
+ GstAdapter *adapter;
+ GstClockTime first_ts;
+ GstClockTime duration;
+};
+
+struct _GstRTPMP2TPayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_mp2t_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_MP2T_PAY_H__ */
diff --git a/gst/rtp/gstrtpmp4adepay.c b/gst/rtp/gstrtpmp4adepay.c
new file mode 100644
index 0000000000..f278fc598c
--- /dev/null
+++ b/gst/rtp/gstrtpmp4adepay.c
@@ -0,0 +1,462 @@
+/* GStreamer
+ * Copyright (C) <2007> Nokia Corporation (contact <stefan.kost@nokia.com>)
+ * <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License version 2 as published by the Free Software Foundation.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/base/gstbitreader.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+
+#include <string.h>
+#include "gstrtpelements.h"
+#include "gstrtpmp4adepay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpmp4adepay_debug);
+#define GST_CAT_DEFAULT (rtpmp4adepay_debug)
+
+static GstStaticPadTemplate gst_rtp_mp4a_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/mpeg,"
+ "mpegversion = (int) 4," "framed = (boolean) { false, true }, "
+ "stream-format = (string) raw")
+ );
+
+static GstStaticPadTemplate gst_rtp_mp4a_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "clock-rate = (int) [1, MAX ], "
+ "encoding-name = (string) \"MP4A-LATM\""
+ /* All optional parameters
+ *
+ * "profile-level-id=[1,MAX]"
+ * "config="
+ */
+ )
+ );
+
+#define gst_rtp_mp4a_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMP4ADepay, gst_rtp_mp4a_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmp4adepay, "rtpmp4adepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_MP4A_DEPAY, rtp_element_init (plugin));
+
+static void gst_rtp_mp4a_depay_finalize (GObject * object);
+
+static gboolean gst_rtp_mp4a_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_mp4a_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+
+static GstStateChangeReturn gst_rtp_mp4a_depay_change_state (GstElement *
+ element, GstStateChange transition);
+
+
+static void
+gst_rtp_mp4a_depay_class_init (GstRtpMP4ADepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_mp4a_depay_finalize;
+
+ gstelement_class->change_state = gst_rtp_mp4a_depay_change_state;
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_mp4a_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_mp4a_depay_setcaps;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mp4a_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mp4a_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP MPEG4 audio depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts MPEG4 audio from RTP packets (RFC 3016)",
+ "Nokia Corporation (contact <stefan.kost@nokia.com>), "
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpmp4adepay_debug, "rtpmp4adepay", 0,
+ "MPEG4 audio RTP Depayloader");
+}
+
+static void
+gst_rtp_mp4a_depay_init (GstRtpMP4ADepay * rtpmp4adepay)
+{
+ rtpmp4adepay->adapter = gst_adapter_new ();
+ rtpmp4adepay->framed = FALSE;
+}
+
+static void
+gst_rtp_mp4a_depay_finalize (GObject * object)
+{
+ GstRtpMP4ADepay *rtpmp4adepay;
+
+ rtpmp4adepay = GST_RTP_MP4A_DEPAY (object);
+
+ g_object_unref (rtpmp4adepay->adapter);
+ rtpmp4adepay->adapter = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static const guint aac_sample_rates[] = { 96000, 88200, 64000, 48000,
+ 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350
+};
+
+static gboolean
+gst_rtp_mp4a_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure;
+ GstRtpMP4ADepay *rtpmp4adepay;
+ GstCaps *srccaps;
+ const gchar *str;
+ gint clock_rate;
+ gint object_type;
+ gint channels = 2; /* default */
+ gboolean res;
+
+ rtpmp4adepay = GST_RTP_MP4A_DEPAY (depayload);
+
+ rtpmp4adepay->framed = FALSE;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 90000; /* default */
+ depayload->clock_rate = clock_rate;
+
+ if (!gst_structure_get_int (structure, "object", &object_type))
+ object_type = 2; /* AAC LC default */
+
+ srccaps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 4,
+ "framed", G_TYPE_BOOLEAN, FALSE, "channels", G_TYPE_INT, channels,
+ "stream-format", G_TYPE_STRING, "raw", NULL);
+
+ if ((str = gst_structure_get_string (structure, "config"))) {
+ GValue v = { 0 };
+
+ g_value_init (&v, GST_TYPE_BUFFER);
+ if (gst_value_deserialize (&v, str)) {
+ GstBuffer *buffer;
+ GstMapInfo map;
+ guint8 *data;
+ gsize size;
+ gint i;
+ guint32 rate = 0;
+ guint8 obj_type = 0, sr_idx = 0, channels = 0;
+ GstBitReader br;
+
+ buffer = gst_value_get_buffer (&v);
+ gst_buffer_ref (buffer);
+ g_value_unset (&v);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ if (size < 2) {
+ GST_WARNING_OBJECT (depayload, "config too short (%d < 2)",
+ (gint) size);
+ goto bad_config;
+ }
+
+ /* Parse StreamMuxConfig according to ISO/IEC 14496-3:
+ *
+ * audioMuxVersion == 0 (1 bit)
+ * allStreamsSameTimeFraming == 1 (1 bit)
+ * numSubFrames == rtpmp4adepay->numSubFrames (6 bits)
+ * numProgram == 0 (4 bits)
+ * numLayer == 0 (3 bits)
+ *
+ * We only require audioMuxVersion == 0;
+ *
+ * The remaining bit of the second byte and the rest of the bits are used
+ * for audioSpecificConfig which we need to set in codec_info.
+ */
+ if ((data[0] & 0x80) != 0x00) {
+ GST_WARNING_OBJECT (depayload, "unknown audioMuxVersion 1");
+ goto bad_config;
+ }
+
+ rtpmp4adepay->numSubFrames = (data[0] & 0x3F);
+
+ GST_LOG_OBJECT (rtpmp4adepay, "numSubFrames %d",
+ rtpmp4adepay->numSubFrames);
+
+ /* shift rest of string 15 bits down */
+ size -= 2;
+ for (i = 0; i < size; i++) {
+ data[i] = ((data[i + 1] & 1) << 7) | ((data[i + 2] & 0xfe) >> 1);
+ }
+
+ gst_bit_reader_init (&br, data, size);
+
+ /* any object type is fine, we need to copy it to the profile-level-id field. */
+ if (!gst_bit_reader_get_bits_uint8 (&br, &obj_type, 5))
+ goto bad_config;
+ if (obj_type == 0) {
+ GST_WARNING_OBJECT (depayload, "invalid object type 0");
+ goto bad_config;
+ }
+
+ if (!gst_bit_reader_get_bits_uint8 (&br, &sr_idx, 4))
+ goto bad_config;
+ if (sr_idx >= G_N_ELEMENTS (aac_sample_rates) && sr_idx != 15) {
+ GST_WARNING_OBJECT (depayload, "invalid sample rate index %d", sr_idx);
+ goto bad_config;
+ }
+ GST_LOG_OBJECT (rtpmp4adepay, "sample rate index %u", sr_idx);
+
+ if (!gst_bit_reader_get_bits_uint8 (&br, &channels, 4))
+ goto bad_config;
+ if (channels > 7) {
+ GST_WARNING_OBJECT (depayload, "invalid channels %u", (guint) channels);
+ goto bad_config;
+ }
+
+ /* rtp rate depends on sampling rate of the audio */
+ if (sr_idx == 15) {
+ /* index of 15 means we get the rate in the next 24 bits */
+ if (!gst_bit_reader_get_bits_uint32 (&br, &rate, 24))
+ goto bad_config;
+ } else if (sr_idx >= G_N_ELEMENTS (aac_sample_rates)) {
+ goto bad_config;
+ } else {
+ /* else use the rate from the table */
+ rate = aac_sample_rates[sr_idx];
+ }
+
+ rtpmp4adepay->frame_len = 1024;
+
+ switch (obj_type) {
+ case 1:
+ case 2:
+ case 3:
+ case 4:
+ case 6:
+ case 7:
+ {
+ guint8 frameLenFlag = 0;
+
+ if (gst_bit_reader_get_bits_uint8 (&br, &frameLenFlag, 1))
+ if (frameLenFlag)
+ rtpmp4adepay->frame_len = 960;
+ break;
+ }
+ default:
+ break;
+ }
+
+ /* ignore remaining bit, we're only interested in full bytes */
+ gst_buffer_resize (buffer, 0, size);
+ gst_buffer_unmap (buffer, &map);
+ data = NULL;
+
+ gst_caps_set_simple (srccaps,
+ "channels", G_TYPE_INT, (gint) channels,
+ "rate", G_TYPE_INT, (gint) rate,
+ "codec_data", GST_TYPE_BUFFER, buffer, NULL);
+ bad_config:
+ if (data)
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ } else {
+ g_warning ("cannot convert config to buffer");
+ }
+ }
+ res = gst_pad_set_caps (depayload->srcpad, srccaps);
+ gst_caps_unref (srccaps);
+
+ return res;
+}
+
+static GstBuffer *
+gst_rtp_mp4a_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpMP4ADepay *rtpmp4adepay;
+ GstBuffer *outbuf;
+ GstMapInfo map;
+
+ rtpmp4adepay = GST_RTP_MP4A_DEPAY (depayload);
+
+ /* flush remaining data on discont */
+ if (GST_BUFFER_IS_DISCONT (rtp->buffer)) {
+ gst_adapter_clear (rtpmp4adepay->adapter);
+ }
+
+ outbuf = gst_rtp_buffer_get_payload_buffer (rtp);
+
+ if (!rtpmp4adepay->framed) {
+ if (gst_rtp_buffer_get_marker (rtp)) {
+ GstCaps *caps;
+
+ rtpmp4adepay->framed = TRUE;
+
+ gst_rtp_base_depayload_push (depayload, outbuf);
+
+ caps = gst_pad_get_current_caps (depayload->srcpad);
+ caps = gst_caps_make_writable (caps);
+ gst_caps_set_simple (caps, "framed", G_TYPE_BOOLEAN, TRUE, NULL);
+ gst_pad_set_caps (depayload->srcpad, caps);
+ gst_caps_unref (caps);
+ return NULL;
+ } else {
+ return outbuf;
+ }
+ }
+
+ outbuf = gst_buffer_make_writable (outbuf);
+ GST_BUFFER_PTS (outbuf) = GST_BUFFER_PTS (rtp->buffer);
+ gst_adapter_push (rtpmp4adepay->adapter, outbuf);
+
+ /* RTP marker bit indicates the last packet of the AudioMuxElement => create
+ * and push a buffer */
+ if (gst_rtp_buffer_get_marker (rtp)) {
+ guint avail;
+ guint i;
+ guint8 *data;
+ guint pos;
+ GstClockTime timestamp;
+
+ avail = gst_adapter_available (rtpmp4adepay->adapter);
+ timestamp = gst_adapter_prev_pts (rtpmp4adepay->adapter, NULL);
+
+ GST_LOG_OBJECT (rtpmp4adepay, "have marker and %u available", avail);
+
+ outbuf = gst_adapter_take_buffer (rtpmp4adepay->adapter, avail);
+ gst_buffer_map (outbuf, &map, GST_MAP_READ);
+ data = map.data;
+ /* position in data we are at */
+ pos = 0;
+
+ /* looping through the number of sub-frames in the audio payload */
+ for (i = 0; i <= rtpmp4adepay->numSubFrames; i++) {
+ /* determine payload length and set buffer data pointer accordingly */
+ guint skip;
+ guint data_len;
+ GstBuffer *tmp = NULL;
+
+ /* each subframe starts with a variable length encoding */
+ data_len = 0;
+ for (skip = 0; skip < avail; skip++) {
+ data_len += data[skip];
+ if (data[skip] != 0xff)
+ break;
+ }
+ skip++;
+
+ /* this can not be possible, we have not enough data or the length
+ * decoding failed because we ran out of data. */
+ if (skip + data_len > avail)
+ goto wrong_size;
+
+ GST_LOG_OBJECT (rtpmp4adepay,
+ "subframe %u, header len %u, data len %u, left %u", i, skip, data_len,
+ avail);
+
+ /* take data out, skip the header */
+ pos += skip;
+ tmp = gst_buffer_copy_region (outbuf, GST_BUFFER_COPY_ALL, pos, data_len);
+
+ /* skip data too */
+ skip += data_len;
+ pos += data_len;
+
+ /* update our pointers with what we consumed */
+ data += skip;
+ avail -= skip;
+
+ GST_BUFFER_PTS (tmp) = timestamp;
+ gst_rtp_drop_non_audio_meta (depayload, tmp);
+ gst_rtp_base_depayload_push (depayload, tmp);
+
+ /* shift ts for next buffers */
+ if (rtpmp4adepay->frame_len && timestamp != -1
+ && depayload->clock_rate != 0) {
+ timestamp +=
+ gst_util_uint64_scale_int (rtpmp4adepay->frame_len, GST_SECOND,
+ depayload->clock_rate);
+ }
+ }
+
+ /* just a check that lengths match */
+ if (avail) {
+ GST_ELEMENT_WARNING (depayload, STREAM, DECODE,
+ ("Packet invalid"), ("Not all payload consumed: "
+ "possible wrongly encoded packet."));
+ }
+
+ gst_buffer_unmap (outbuf, &map);
+ gst_buffer_unref (outbuf);
+ }
+ return NULL;
+
+ /* ERRORS */
+wrong_size:
+ {
+ GST_ELEMENT_WARNING (rtpmp4adepay, STREAM, DECODE,
+ ("Packet did not validate"), ("wrong packet size"));
+ gst_buffer_unmap (outbuf, &map);
+ gst_buffer_unref (outbuf);
+ return NULL;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_mp4a_depay_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstRtpMP4ADepay *rtpmp4adepay;
+ GstStateChangeReturn ret;
+
+ rtpmp4adepay = GST_RTP_MP4A_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_adapter_clear (rtpmp4adepay->adapter);
+ rtpmp4adepay->frame_len = 0;
+ rtpmp4adepay->numSubFrames = 0;
+ rtpmp4adepay->framed = FALSE;
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtpmp4adepay.h b/gst/rtp/gstrtpmp4adepay.h
new file mode 100644
index 0000000000..c5aaaa3496
--- /dev/null
+++ b/gst/rtp/gstrtpmp4adepay.h
@@ -0,0 +1,62 @@
+/* GStreamer
+ * Copyright (C) <2007> Nokia Corporation (contact <stefan.kost@nokia.com>)
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License version 2 as published by the Free Software Foundation.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_MP4A_DEPAY_H__
+#define __GST_RTP_MP4A_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_MP4A_DEPAY \
+ (gst_rtp_mp4a_depay_get_type())
+#define GST_RTP_MP4A_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_MP4A_DEPAY,GstRtpMP4ADepay))
+#define GST_RTP_MP4A_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_MP4A_DEPAY,GstRtpMP4ADepayClass))
+#define GST_IS_RTP_MP4A_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_MP4A_DEPAY))
+#define GST_IS_RTP_MP4A_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_MP4A_DEPAY))
+
+typedef struct _GstRtpMP4ADepay GstRtpMP4ADepay;
+typedef struct _GstRtpMP4ADepayClass GstRtpMP4ADepayClass;
+
+struct _GstRtpMP4ADepay
+{
+ GstRTPBaseDepayload depayload;
+ GstAdapter *adapter;
+ guint8 numSubFrames;
+ guint frame_len;
+
+ gboolean framed;
+};
+
+struct _GstRtpMP4ADepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_mp4a_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_MP4A_DEPAY_H__ */
+
diff --git a/gst/rtp/gstrtpmp4apay.c b/gst/rtp/gstrtpmp4apay.c
new file mode 100644
index 0000000000..bd37062287
--- /dev/null
+++ b/gst/rtp/gstrtpmp4apay.c
@@ -0,0 +1,461 @@
+/* GStreamer
+ * Copyright (C) <2008> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpmp4apay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpmp4apay_debug);
+#define GST_CAT_DEFAULT (rtpmp4apay_debug)
+
+/* FIXME: add framed=(boolean)true once our encoders have this field set
+ * on their output caps */
+static GstStaticPadTemplate gst_rtp_mp4a_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/mpeg, mpegversion=(int)4, "
+ "stream-format=(string)raw")
+ );
+
+static GstStaticPadTemplate gst_rtp_mp4a_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) [1, MAX ], "
+ "encoding-name = (string) \"MP4A-LATM\""
+ /* All optional parameters
+ *
+ * "cpresent = (string) \"0\""
+ * "config="
+ */
+ )
+ );
+
+static void gst_rtp_mp4a_pay_finalize (GObject * object);
+
+static gboolean gst_rtp_mp4a_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static GstFlowReturn gst_rtp_mp4a_pay_handle_buffer (GstRTPBasePayload *
+ payload, GstBuffer * buffer);
+
+#define gst_rtp_mp4a_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMP4APay, gst_rtp_mp4a_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmp4apay, "rtpmp4apay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_MP4A_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_mp4a_pay_class_init (GstRtpMP4APayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_mp4a_pay_finalize;
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_mp4a_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_mp4a_pay_handle_buffer;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mp4a_pay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mp4a_pay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP MPEG4 audio payloader", "Codec/Payloader/Network/RTP",
+ "Payload MPEG4 audio as RTP packets (RFC 3016)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpmp4apay_debug, "rtpmp4apay", 0,
+ "MP4A-LATM RTP Payloader");
+}
+
+static void
+gst_rtp_mp4a_pay_init (GstRtpMP4APay * rtpmp4apay)
+{
+ rtpmp4apay->rate = 90000;
+ rtpmp4apay->profile = g_strdup ("1");
+}
+
+static void
+gst_rtp_mp4a_pay_finalize (GObject * object)
+{
+ GstRtpMP4APay *rtpmp4apay;
+
+ rtpmp4apay = GST_RTP_MP4A_PAY (object);
+
+ g_free (rtpmp4apay->params);
+ rtpmp4apay->params = NULL;
+
+ if (rtpmp4apay->config)
+ gst_buffer_unref (rtpmp4apay->config);
+ rtpmp4apay->config = NULL;
+
+ g_free (rtpmp4apay->profile);
+ rtpmp4apay->profile = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static const unsigned int sampling_table[16] = {
+ 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050,
+ 16000, 12000, 11025, 8000, 7350, 0, 0, 0
+};
+
+static gboolean
+gst_rtp_mp4a_pay_parse_audio_config (GstRtpMP4APay * rtpmp4apay,
+ GstBuffer * buffer)
+{
+ GstMapInfo map;
+ guint8 *data;
+ gsize size;
+ guint8 objectType;
+ guint8 samplingIdx;
+ guint8 channelCfg;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ if (size < 2)
+ goto too_short;
+
+ /* any object type is fine, we need to copy it to the profile-level-id field. */
+ objectType = (data[0] & 0xf8) >> 3;
+ if (objectType == 0)
+ goto invalid_object;
+
+ samplingIdx = ((data[0] & 0x07) << 1) | ((data[1] & 0x80) >> 7);
+ /* only fixed values for now */
+ if (samplingIdx > 12 && samplingIdx != 15)
+ goto wrong_freq;
+
+ channelCfg = ((data[1] & 0x78) >> 3);
+ if (channelCfg > 7)
+ goto wrong_channels;
+
+ /* rtp rate depends on sampling rate of the audio */
+ if (samplingIdx == 15) {
+ if (size < 5)
+ goto too_short;
+
+ /* index of 15 means we get the rate in the next 24 bits */
+ rtpmp4apay->rate = ((data[1] & 0x7f) << 17) |
+ ((data[2]) << 9) | ((data[3]) << 1) | ((data[4] & 0x80) >> 7);
+ } else {
+ /* else use the rate from the table */
+ rtpmp4apay->rate = sampling_table[samplingIdx];
+ }
+ /* extra rtp params contain the number of channels */
+ g_free (rtpmp4apay->params);
+ rtpmp4apay->params = g_strdup_printf ("%d", channelCfg);
+ /* audio stream type */
+ rtpmp4apay->streamtype = "5";
+ /* profile */
+ g_free (rtpmp4apay->profile);
+ rtpmp4apay->profile = g_strdup_printf ("%d", objectType);
+
+ GST_DEBUG_OBJECT (rtpmp4apay,
+ "objectType: %d, samplingIdx: %d (%d), channelCfg: %d", objectType,
+ samplingIdx, rtpmp4apay->rate, channelCfg);
+
+ gst_buffer_unmap (buffer, &map);
+
+ return TRUE;
+
+ /* ERROR */
+too_short:
+ {
+ GST_ELEMENT_ERROR (rtpmp4apay, STREAM, FORMAT,
+ (NULL),
+ ("config string too short, expected 2 bytes, got %" G_GSIZE_FORMAT,
+ size));
+ gst_buffer_unmap (buffer, &map);
+ return FALSE;
+ }
+invalid_object:
+ {
+ GST_ELEMENT_ERROR (rtpmp4apay, STREAM, FORMAT,
+ (NULL), ("invalid object type 0"));
+ gst_buffer_unmap (buffer, &map);
+ return FALSE;
+ }
+wrong_freq:
+ {
+ GST_ELEMENT_ERROR (rtpmp4apay, STREAM, NOT_IMPLEMENTED,
+ (NULL), ("unsupported frequency index %d", samplingIdx));
+ gst_buffer_unmap (buffer, &map);
+ return FALSE;
+ }
+wrong_channels:
+ {
+ GST_ELEMENT_ERROR (rtpmp4apay, STREAM, NOT_IMPLEMENTED,
+ (NULL), ("unsupported number of channels %d, must < 8", channelCfg));
+ gst_buffer_unmap (buffer, &map);
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_rtp_mp4a_pay_new_caps (GstRtpMP4APay * rtpmp4apay)
+{
+ gchar *config;
+ GValue v = { 0 };
+ gboolean res;
+
+ g_value_init (&v, GST_TYPE_BUFFER);
+ gst_value_set_buffer (&v, rtpmp4apay->config);
+ config = gst_value_serialize (&v);
+
+ res = gst_rtp_base_payload_set_outcaps (GST_RTP_BASE_PAYLOAD (rtpmp4apay),
+ "cpresent", G_TYPE_STRING, "0", "config", G_TYPE_STRING, config, NULL);
+
+ g_value_unset (&v);
+ g_free (config);
+
+ return res;
+}
+
+static gboolean
+gst_rtp_mp4a_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ GstRtpMP4APay *rtpmp4apay;
+ GstStructure *structure;
+ const GValue *codec_data;
+ gboolean res, framed = TRUE;
+ const gchar *stream_format;
+
+ rtpmp4apay = GST_RTP_MP4A_PAY (payload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ /* this is already handled by the template caps, but it is better
+ * to leave here to have meaningful warning messages when linking
+ * fails */
+ stream_format = gst_structure_get_string (structure, "stream-format");
+ if (stream_format) {
+ if (strcmp (stream_format, "raw") != 0) {
+ GST_WARNING_OBJECT (rtpmp4apay, "AAC's stream-format must be 'raw', "
+ "%s is not supported", stream_format);
+ return FALSE;
+ }
+ } else {
+ GST_WARNING_OBJECT (rtpmp4apay, "AAC's stream-format not specified, "
+ "assuming 'raw'");
+ }
+
+ codec_data = gst_structure_get_value (structure, "codec_data");
+ if (codec_data) {
+ GST_LOG_OBJECT (rtpmp4apay, "got codec_data");
+ if (G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) {
+ GstBuffer *buffer, *cbuffer;
+ GstMapInfo map;
+ GstMapInfo cmap;
+ guint i;
+
+ buffer = gst_value_get_buffer (codec_data);
+ GST_LOG_OBJECT (rtpmp4apay, "configuring codec_data");
+
+ /* parse buffer */
+ res = gst_rtp_mp4a_pay_parse_audio_config (rtpmp4apay, buffer);
+
+ if (!res)
+ goto config_failed;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ /* make the StreamMuxConfig, we need 15 bits for the header */
+ cbuffer = gst_buffer_new_and_alloc (map.size + 2);
+ gst_buffer_map (cbuffer, &cmap, GST_MAP_WRITE);
+
+ memset (cmap.data, 0, map.size + 2);
+
+ /* Create StreamMuxConfig according to ISO/IEC 14496-3:
+ *
+ * audioMuxVersion == 0 (1 bit)
+ * allStreamsSameTimeFraming == 1 (1 bit)
+ * numSubFrames == numSubFrames (6 bits)
+ * numProgram == 0 (4 bits)
+ * numLayer == 0 (3 bits)
+ */
+ cmap.data[0] = 0x40;
+ cmap.data[1] = 0x00;
+
+ /* append the config bits, shifting them 1 bit left */
+ for (i = 0; i < map.size; i++) {
+ cmap.data[i + 1] |= ((map.data[i] & 0x80) >> 7);
+ cmap.data[i + 2] |= ((map.data[i] & 0x7f) << 1);
+ }
+
+ gst_buffer_unmap (cbuffer, &cmap);
+ gst_buffer_unmap (buffer, &map);
+
+ /* now we can configure the buffer */
+ if (rtpmp4apay->config)
+ gst_buffer_unref (rtpmp4apay->config);
+ rtpmp4apay->config = cbuffer;
+ }
+ }
+
+ if (gst_structure_get_boolean (structure, "framed", &framed) && !framed) {
+ GST_WARNING_OBJECT (payload, "Need framed AAC data as input!");
+ }
+
+ gst_rtp_base_payload_set_options (payload, "audio", TRUE, "MP4A-LATM",
+ rtpmp4apay->rate);
+
+ res = gst_rtp_mp4a_pay_new_caps (rtpmp4apay);
+
+ return res;
+
+ /* ERRORS */
+config_failed:
+ {
+ GST_DEBUG_OBJECT (rtpmp4apay, "failed to parse config");
+ return FALSE;
+ }
+}
+
+#define RTP_HEADER_LEN 12
+
+/* we expect buffers as exactly one complete AU
+ */
+static GstFlowReturn
+gst_rtp_mp4a_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRtpMP4APay *rtpmp4apay;
+ GstFlowReturn ret;
+ GstBufferList *list;
+ guint mtu;
+ guint offset;
+ gsize size;
+ gboolean fragmented;
+ GstClockTime timestamp;
+
+ ret = GST_FLOW_OK;
+
+ rtpmp4apay = GST_RTP_MP4A_PAY (basepayload);
+
+ offset = 0;
+ size = gst_buffer_get_size (buffer);
+
+ timestamp = GST_BUFFER_PTS (buffer);
+
+ fragmented = FALSE;
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (rtpmp4apay);
+
+ list = gst_buffer_list_new_sized (size / (mtu - RTP_HEADER_LEN) + 1);
+
+ while (size > 0) {
+ guint towrite;
+ GstBuffer *outbuf;
+ guint payload_len;
+ guint packet_len;
+ guint header_len;
+ GstBuffer *paybuf;
+ GstRTPBuffer rtp = { NULL };
+
+ header_len = 0;
+ if (!fragmented) {
+ guint count;
+ /* first packet calculate space for the packet including the header */
+ count = size;
+ while (count >= 0xff) {
+ header_len++;
+ count -= 0xff;
+ }
+ header_len++;
+ }
+
+ packet_len = gst_rtp_buffer_calc_packet_len (header_len + size, 0, 0);
+ towrite = MIN (packet_len, mtu);
+ payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);
+ payload_len -= header_len;
+
+ GST_DEBUG_OBJECT (rtpmp4apay,
+ "avail %" G_GSIZE_FORMAT
+ ", header_len %d, packet_len %d, payload_len %d", size, header_len,
+ packet_len, payload_len);
+
+ /* create buffer to hold the payload. */
+ outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload,
+ header_len, 0, 0);
+
+ /* copy payload */
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
+ if (!fragmented) {
+ guint8 *payload = gst_rtp_buffer_get_payload (&rtp);
+ guint count;
+
+ /* first packet write the header */
+ count = size;
+ while (count >= 0xff) {
+ *payload++ = 0xff;
+ count -= 0xff;
+ }
+ *payload++ = count;
+ }
+
+ /* marker only if the packet is complete */
+ gst_rtp_buffer_set_marker (&rtp, size == payload_len);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ /* create a new buf to hold the payload */
+ paybuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL,
+ offset, payload_len);
+
+ /* join memory parts */
+ gst_rtp_copy_audio_meta (rtpmp4apay, outbuf, paybuf);
+ outbuf = gst_buffer_append (outbuf, paybuf);
+ gst_buffer_list_add (list, outbuf);
+ offset += payload_len;
+ size -= payload_len;
+
+ /* copy incoming timestamp (if any) to outgoing buffers */
+ GST_BUFFER_PTS (outbuf) = timestamp;
+
+ fragmented = TRUE;
+ }
+
+ ret =
+ gst_rtp_base_payload_push_list (GST_RTP_BASE_PAYLOAD (rtpmp4apay), list);
+
+ gst_buffer_unref (buffer);
+
+ return ret;
+}
diff --git a/gst/rtp/gstrtpmp4apay.h b/gst/rtp/gstrtpmp4apay.h
new file mode 100644
index 0000000000..997aa35415
--- /dev/null
+++ b/gst/rtp/gstrtpmp4apay.h
@@ -0,0 +1,63 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_MP4A_PAY_H__
+#define __GST_RTP_MP4A_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+#include <gst/base/gstadapter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_MP4A_PAY \
+ (gst_rtp_mp4a_pay_get_type())
+#define GST_RTP_MP4A_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_MP4A_PAY,GstRtpMP4APay))
+#define GST_RTP_MP4A_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_MP4A_PAY,GstRtpMP4APayClass))
+#define GST_IS_RTP_MP4A_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_MP4A_PAY))
+#define GST_IS_RTP_MP4A_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_MP4A_PAY))
+
+typedef struct _GstRtpMP4APay GstRtpMP4APay;
+typedef struct _GstRtpMP4APayClass GstRtpMP4APayClass;
+
+struct _GstRtpMP4APay
+{
+ GstRTPBasePayload payload;
+
+ gint rate;
+ gchar *params;
+ gchar *profile;
+ const gchar *streamtype;
+ GstBuffer *config;
+};
+
+struct _GstRtpMP4APayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_mp4a_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_MP4A_PAY_H__ */
diff --git a/gst/rtp/gstrtpmp4gdepay.c b/gst/rtp/gstrtpmp4gdepay.c
new file mode 100644
index 0000000000..8ee094d5bf
--- /dev/null
+++ b/gst/rtp/gstrtpmp4gdepay.c
@@ -0,0 +1,811 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+#include <stdlib.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpmp4gdepay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpmp4gdepay_debug);
+#define GST_CAT_DEFAULT (rtpmp4gdepay_debug)
+
+static GstStaticPadTemplate gst_rtp_mp4g_depay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/mpeg,"
+ "mpegversion=(int) 4,"
+ "systemstream=(boolean)false;"
+ "audio/mpeg," "mpegversion=(int) 4, " "stream-format=(string)raw")
+ );
+
+static GstStaticPadTemplate gst_rtp_mp4g_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) { \"video\", \"audio\", \"application\" }, "
+ "clock-rate = (int) [1, MAX ], "
+ "encoding-name = (string) \"MPEG4-GENERIC\", "
+ /* required string params */
+ /* "streamtype = (string) { \"4\", \"5\" }, " Not set by Wowza 4 = video, 5 = audio */
+ /* "profile-level-id = (string) [1,MAX], " */
+ /* "config = (string) [1,MAX]" */
+ "mode = (string) { \"generic\", \"CELP-cbr\", \"CELP-vbr\", \"AAC-lbr\", \"AAC-hbr\", \"aac-hbr\" } "
+ /* Optional general parameters */
+ /* "objecttype = (string) [1,MAX], " */
+ /* "constantsize = (string) [1,MAX], " *//* constant size of each AU */
+ /* "constantduration = (string) [1,MAX], " *//* constant duration of each AU */
+ /* "maxdisplacement = (string) [1,MAX], " */
+ /* "de-interleavebuffersize = (string) [1,MAX], " */
+ /* Optional configuration parameters */
+ /* "sizelength = (string) [1, 32], " */
+ /* "indexlength = (string) [1, 32], " */
+ /* "indexdeltalength = (string) [1, 32], " */
+ /* "ctsdeltalength = (string) [1, 32], " */
+ /* "dtsdeltalength = (string) [1, 32], " */
+ /* "randomaccessindication = (string) {0, 1}, " */
+ /* "streamstateindication = (string) [0, 32], " */
+ /* "auxiliarydatasizelength = (string) [0, 32]" */ )
+ );
+
+/* simple bitstream parser */
+typedef struct
+{
+ const guint8 *data;
+ const guint8 *end;
+ gint head; /* bitpos in the cache of next bit */
+ guint64 cache; /* cached bytes */
+} GstBsParse;
+
+static void
+gst_bs_parse_init (GstBsParse * bs, const guint8 * data, guint size)
+{
+ bs->data = data;
+ bs->end = data + size;
+ bs->head = 0;
+ bs->cache = 0xffffffff;
+}
+
+static guint32
+gst_bs_parse_read (GstBsParse * bs, guint n)
+{
+ guint32 res = 0;
+ gint shift;
+
+ if (n == 0)
+ return res;
+
+ /* fill up the cache if we need to */
+ while (bs->head < n) {
+ if (bs->data >= bs->end) {
+ /* we're at the end, can't produce more than head number of bits */
+ n = bs->head;
+ break;
+ }
+ /* shift bytes in cache, moving the head bits of the cache left */
+ bs->cache = (bs->cache << 8) | *bs->data++;
+ bs->head += 8;
+ }
+
+ /* bring the required bits down and truncate */
+ if ((shift = bs->head - n) > 0)
+ res = bs->cache >> shift;
+ else
+ res = bs->cache;
+
+ /* mask out required bits */
+ if (n < 32)
+ res &= (1 << n) - 1;
+
+ bs->head = shift;
+
+ return res;
+}
+
+
+#define gst_rtp_mp4g_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMP4GDepay, gst_rtp_mp4g_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmp4gdepay, "rtpmp4gdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_MP4G_DEPAY, rtp_element_init (plugin));
+
+static void gst_rtp_mp4g_depay_finalize (GObject * object);
+
+static gboolean gst_rtp_mp4g_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_mp4g_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+static gboolean gst_rtp_mp4g_depay_handle_event (GstRTPBaseDepayload * filter,
+ GstEvent * event);
+
+static GstStateChangeReturn gst_rtp_mp4g_depay_change_state (GstElement *
+ element, GstStateChange transition);
+
+
+static void
+gst_rtp_mp4g_depay_class_init (GstRtpMP4GDepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_mp4g_depay_finalize;
+
+ gstelement_class->change_state = gst_rtp_mp4g_depay_change_state;
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_mp4g_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_mp4g_depay_setcaps;
+ gstrtpbasedepayload_class->handle_event = gst_rtp_mp4g_depay_handle_event;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mp4g_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mp4g_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP MPEG4 ES depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts MPEG4 elementary streams from RTP packets (RFC 3640)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpmp4gdepay_debug, "rtpmp4gdepay", 0,
+ "MP4-generic RTP Depayloader");
+}
+
+static void
+gst_rtp_mp4g_depay_init (GstRtpMP4GDepay * rtpmp4gdepay)
+{
+ rtpmp4gdepay->adapter = gst_adapter_new ();
+ rtpmp4gdepay->packets = g_queue_new ();
+}
+
+static void
+gst_rtp_mp4g_depay_finalize (GObject * object)
+{
+ GstRtpMP4GDepay *rtpmp4gdepay;
+
+ rtpmp4gdepay = GST_RTP_MP4G_DEPAY (object);
+
+ g_object_unref (rtpmp4gdepay->adapter);
+ rtpmp4gdepay->adapter = NULL;
+ g_queue_free (rtpmp4gdepay->packets);
+ rtpmp4gdepay->packets = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gint
+gst_rtp_mp4g_depay_parse_int (GstStructure * structure, const gchar * field,
+ gint def)
+{
+ const gchar *str;
+ gint res;
+
+ if ((str = gst_structure_get_string (structure, field)))
+ return atoi (str);
+
+ if (gst_structure_get_int (structure, field, &res))
+ return res;
+
+ return def;
+}
+
+static gboolean
+gst_rtp_mp4g_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure;
+ GstRtpMP4GDepay *rtpmp4gdepay;
+ GstCaps *srccaps = NULL;
+ const gchar *str;
+ gint clock_rate;
+ gint someint;
+ gboolean res;
+
+ rtpmp4gdepay = GST_RTP_MP4G_DEPAY (depayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 90000; /* default */
+ depayload->clock_rate = clock_rate;
+
+ rtpmp4gdepay->check_adts = FALSE;
+
+ if ((str = gst_structure_get_string (structure, "media"))) {
+ if (strcmp (str, "audio") == 0) {
+ srccaps = gst_caps_new_simple ("audio/mpeg",
+ "mpegversion", G_TYPE_INT, 4, "stream-format", G_TYPE_STRING, "raw",
+ NULL);
+ rtpmp4gdepay->check_adts = TRUE;
+ rtpmp4gdepay->warn_adts = TRUE;
+ } else if (strcmp (str, "video") == 0) {
+ srccaps = gst_caps_new_simple ("video/mpeg",
+ "mpegversion", G_TYPE_INT, 4,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ }
+ }
+ if (srccaps == NULL)
+ goto unknown_media;
+
+ /* these values are optional and have a default value of 0 (no header) */
+ rtpmp4gdepay->sizelength =
+ gst_rtp_mp4g_depay_parse_int (structure, "sizelength", 0);
+ rtpmp4gdepay->indexlength =
+ gst_rtp_mp4g_depay_parse_int (structure, "indexlength", 0);
+ rtpmp4gdepay->indexdeltalength =
+ gst_rtp_mp4g_depay_parse_int (structure, "indexdeltalength", 0);
+ rtpmp4gdepay->ctsdeltalength =
+ gst_rtp_mp4g_depay_parse_int (structure, "ctsdeltalength", 0);
+ rtpmp4gdepay->dtsdeltalength =
+ gst_rtp_mp4g_depay_parse_int (structure, "dtsdeltalength", 0);
+ someint =
+ gst_rtp_mp4g_depay_parse_int (structure, "randomaccessindication", 0);
+ rtpmp4gdepay->randomaccessindication = someint > 0 ? 1 : 0;
+ rtpmp4gdepay->streamstateindication =
+ gst_rtp_mp4g_depay_parse_int (structure, "streamstateindication", 0);
+ rtpmp4gdepay->auxiliarydatasizelength =
+ gst_rtp_mp4g_depay_parse_int (structure, "auxiliarydatasizelength", 0);
+ rtpmp4gdepay->constantSize =
+ gst_rtp_mp4g_depay_parse_int (structure, "constantsize", 0);
+ rtpmp4gdepay->constantDuration =
+ gst_rtp_mp4g_depay_parse_int (structure, "constantduration", 0);
+ rtpmp4gdepay->maxDisplacement =
+ gst_rtp_mp4g_depay_parse_int (structure, "maxdisplacement", 0);
+
+
+ /* get config string */
+ if ((str = gst_structure_get_string (structure, "config"))) {
+ GValue v = { 0 };
+
+ g_value_init (&v, GST_TYPE_BUFFER);
+ if (gst_value_deserialize (&v, str)) {
+ GstBuffer *buffer;
+
+ buffer = gst_value_get_buffer (&v);
+ gst_caps_set_simple (srccaps,
+ "codec_data", GST_TYPE_BUFFER, buffer, NULL);
+ g_value_unset (&v);
+ } else {
+ g_warning ("cannot convert config to buffer");
+ }
+ }
+
+ res = gst_pad_set_caps (depayload->srcpad, srccaps);
+ gst_caps_unref (srccaps);
+
+ return res;
+
+ /* ERRORS */
+unknown_media:
+ {
+ GST_DEBUG_OBJECT (rtpmp4gdepay, "Unknown media type");
+ return FALSE;
+ }
+}
+
+static void
+gst_rtp_mp4g_depay_clear_queue (GstRtpMP4GDepay * rtpmp4gdepay)
+{
+ GstBuffer *outbuf;
+
+ while ((outbuf = g_queue_pop_head (rtpmp4gdepay->packets)))
+ gst_buffer_unref (outbuf);
+}
+
+static void
+gst_rtp_mp4g_depay_reset (GstRtpMP4GDepay * rtpmp4gdepay)
+{
+ gst_adapter_clear (rtpmp4gdepay->adapter);
+ rtpmp4gdepay->max_AU_index = -1;
+ rtpmp4gdepay->next_AU_index = -1;
+ rtpmp4gdepay->prev_AU_index = -1;
+ rtpmp4gdepay->prev_rtptime = -1;
+ rtpmp4gdepay->last_AU_index = -1;
+ gst_rtp_mp4g_depay_clear_queue (rtpmp4gdepay);
+}
+
+static void
+gst_rtp_mp4g_depay_push_outbuf (GstRtpMP4GDepay * rtpmp4gdepay,
+ GstBuffer * outbuf, guint AU_index)
+{
+ gboolean discont = FALSE;
+
+ if (AU_index != rtpmp4gdepay->next_AU_index) {
+ GST_DEBUG_OBJECT (rtpmp4gdepay, "discont, expected AU_index %u",
+ rtpmp4gdepay->next_AU_index);
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
+ discont = TRUE;
+ }
+
+ GST_DEBUG_OBJECT (rtpmp4gdepay, "pushing %sAU_index %u",
+ discont ? "" : "expected ", AU_index);
+
+ gst_rtp_drop_meta (GST_ELEMENT_CAST (rtpmp4gdepay), outbuf, 0);
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtpmp4gdepay), outbuf);
+ rtpmp4gdepay->next_AU_index = AU_index + 1;
+}
+
+static void
+gst_rtp_mp4g_depay_flush_queue (GstRtpMP4GDepay * rtpmp4gdepay)
+{
+ GstBuffer *outbuf;
+ guint AU_index;
+
+ while ((outbuf = g_queue_pop_head (rtpmp4gdepay->packets))) {
+ AU_index = GST_BUFFER_OFFSET (outbuf);
+
+ GST_DEBUG_OBJECT (rtpmp4gdepay, "next available AU_index %u", AU_index);
+
+ gst_rtp_mp4g_depay_push_outbuf (rtpmp4gdepay, outbuf, AU_index);
+ }
+}
+
+static void
+gst_rtp_mp4g_depay_queue (GstRtpMP4GDepay * rtpmp4gdepay, GstBuffer * outbuf)
+{
+ guint AU_index = GST_BUFFER_OFFSET (outbuf);
+
+ if (rtpmp4gdepay->next_AU_index == -1) {
+ GST_DEBUG_OBJECT (rtpmp4gdepay, "Init AU counter %u", AU_index);
+ rtpmp4gdepay->next_AU_index = AU_index;
+ }
+
+ if (rtpmp4gdepay->next_AU_index == AU_index) {
+ GST_DEBUG_OBJECT (rtpmp4gdepay, "pushing expected AU_index %u", AU_index);
+
+ /* we received the expected packet, push it and flush as much as we can from
+ * the queue */
+ gst_rtp_mp4g_depay_push_outbuf (rtpmp4gdepay, outbuf, AU_index);
+
+ while ((outbuf = g_queue_peek_head (rtpmp4gdepay->packets))) {
+ AU_index = GST_BUFFER_OFFSET (outbuf);
+
+ GST_DEBUG_OBJECT (rtpmp4gdepay, "next available AU_index %u", AU_index);
+
+ if (rtpmp4gdepay->next_AU_index == AU_index) {
+ outbuf = g_queue_pop_head (rtpmp4gdepay->packets);
+ gst_rtp_mp4g_depay_push_outbuf (rtpmp4gdepay, outbuf, AU_index);
+ } else {
+ GST_DEBUG_OBJECT (rtpmp4gdepay, "waiting for next AU_index %u",
+ rtpmp4gdepay->next_AU_index);
+ break;
+ }
+ }
+ } else {
+ GList *list;
+
+ GST_DEBUG_OBJECT (rtpmp4gdepay, "queueing AU_index %u", AU_index);
+
+ /* loop the list to skip strictly smaller AU_index buffers */
+ for (list = rtpmp4gdepay->packets->head; list; list = g_list_next (list)) {
+ guint idx;
+ gint gap;
+
+ idx = GST_BUFFER_OFFSET (GST_BUFFER_CAST (list->data));
+
+ /* compare the new seqnum to the one in the buffer */
+ gap = (gint) (idx - AU_index);
+
+ GST_DEBUG_OBJECT (rtpmp4gdepay, "compare with AU_index %u, gap %d", idx,
+ gap);
+
+ /* AU_index <= idx, we can stop looking */
+ if (G_LIKELY (gap > 0))
+ break;
+ }
+ if (G_LIKELY (list))
+ g_queue_insert_before (rtpmp4gdepay->packets, list, outbuf);
+ else
+ g_queue_push_tail (rtpmp4gdepay->packets, outbuf);
+ }
+}
+
+static GstBuffer *
+gst_rtp_mp4g_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpMP4GDepay *rtpmp4gdepay;
+ GstBuffer *outbuf = NULL;
+ GstClockTime timestamp;
+
+ rtpmp4gdepay = GST_RTP_MP4G_DEPAY (depayload);
+
+ /* flush remaining data on discont */
+ if (GST_BUFFER_IS_DISCONT (rtp->buffer)) {
+ GST_DEBUG_OBJECT (rtpmp4gdepay, "received DISCONT");
+ gst_adapter_clear (rtpmp4gdepay->adapter);
+ }
+
+ timestamp = GST_BUFFER_PTS (rtp->buffer);
+
+ {
+ gint payload_len, payload_AU;
+ guint8 *payload;
+ guint32 rtptime;
+ guint AU_headers_len;
+ guint AU_size, AU_index, AU_index_delta, payload_AU_size;
+ gboolean M;
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+ payload = gst_rtp_buffer_get_payload (rtp);
+
+ GST_DEBUG_OBJECT (rtpmp4gdepay, "received payload of %d", payload_len);
+
+ rtptime = gst_rtp_buffer_get_timestamp (rtp);
+ M = gst_rtp_buffer_get_marker (rtp);
+
+ if (rtpmp4gdepay->sizelength > 0) {
+ gint num_AU_headers, AU_headers_bytes, i;
+ GstBsParse bs;
+
+ if (payload_len < 2)
+ goto short_payload;
+
+ /* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- .. -+-+-+-+-+-+-+-+-+-+
+ * |AU-headers-length|AU-header|AU-header| |AU-header|padding|
+ * | | (1) | (2) | | (n) * | bits |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- .. -+-+-+-+-+-+-+-+-+-+
+ *
+ * The length is 2 bytes and contains the length of the following
+ * AU-headers in bits.
+ */
+ AU_headers_len = (payload[0] << 8) | payload[1];
+ AU_headers_bytes = (AU_headers_len + 7) / 8;
+ num_AU_headers = AU_headers_len / 16;
+
+ GST_DEBUG_OBJECT (rtpmp4gdepay, "AU headers len %d, bytes %d, num %d",
+ AU_headers_len, AU_headers_bytes, num_AU_headers);
+
+ /* skip header */
+ payload += 2;
+ payload_len -= 2;
+
+ if (payload_len < AU_headers_bytes)
+ goto short_payload;
+
+ /* skip special headers, point to first payload AU */
+ payload_AU = 2 + AU_headers_bytes;
+ payload_AU_size = payload_len - AU_headers_bytes;
+
+ if (G_UNLIKELY (rtpmp4gdepay->auxiliarydatasizelength)) {
+ gint aux_size;
+
+ /* point the bitstream parser to the first auxiliary data bit */
+ gst_bs_parse_init (&bs, payload + AU_headers_bytes,
+ payload_len - AU_headers_bytes);
+ aux_size =
+ gst_bs_parse_read (&bs, rtpmp4gdepay->auxiliarydatasizelength);
+ /* convert to bytes */
+ aux_size = (aux_size + 7) / 8;
+ /* AU data then follows auxiliary data */
+ if (payload_AU_size < aux_size)
+ goto short_payload;
+ payload_AU += aux_size;
+ payload_AU_size -= aux_size;
+ }
+
+ /* point the bitstream parser to the first AU header bit */
+ gst_bs_parse_init (&bs, payload, payload_len);
+ AU_index = AU_index_delta = 0;
+
+ for (i = 0; i < num_AU_headers && payload_AU_size > 0; i++) {
+ /* parse AU header
+ * +---------------------------------------+
+ * | AU-size |
+ * +---------------------------------------+
+ * | AU-Index / AU-Index-delta |
+ * +---------------------------------------+
+ * | CTS-flag |
+ * +---------------------------------------+
+ * | CTS-delta |
+ * +---------------------------------------+
+ * | DTS-flag |
+ * +---------------------------------------+
+ * | DTS-delta |
+ * +---------------------------------------+
+ * | RAP-flag |
+ * +---------------------------------------+
+ * | Stream-state |
+ * +---------------------------------------+
+ */
+ AU_size = gst_bs_parse_read (&bs, rtpmp4gdepay->sizelength);
+
+ /* calculate the AU_index, which is only on the first AU of the packet
+ * and the AU_index_delta on the other AUs. This will be used to
+ * reconstruct the AU ordering when interleaving. */
+ if (i == 0) {
+ AU_index = gst_bs_parse_read (&bs, rtpmp4gdepay->indexlength);
+
+ GST_DEBUG_OBJECT (rtpmp4gdepay, "AU index %u", AU_index);
+
+ if (AU_index == 0 && rtpmp4gdepay->prev_AU_index == 0) {
+ gint diff;
+ gint cd;
+
+ /* if we see two consecutive packets with AU_index of 0, we can
+ * assume we have constantDuration packets. Since we don't have
+ * the index we must use the AU duration to calculate the
+ * index. Get the diff between the timestamps first, this can be
+ * positive or negative. */
+ if (rtpmp4gdepay->prev_rtptime <= rtptime)
+ diff = rtptime - rtpmp4gdepay->prev_rtptime;
+ else
+ diff = -(rtpmp4gdepay->prev_rtptime - rtptime);
+
+ /* if no constantDuration was given, make one */
+ if (rtpmp4gdepay->constantDuration != 0) {
+ cd = rtpmp4gdepay->constantDuration;
+ GST_DEBUG_OBJECT (depayload, "using constantDuration %d", cd);
+ } else if (rtpmp4gdepay->prev_AU_num > 0) {
+ /* use number of packets and of previous frame */
+ cd = diff / rtpmp4gdepay->prev_AU_num;
+ GST_DEBUG_OBJECT (depayload, "guessing constantDuration %d", cd);
+ if (!GST_BUFFER_IS_DISCONT (rtp->buffer)) {
+ /* rfc3640 - 3.2.3.2
+ * if we see two consecutive packets with AU_index of 0 and
+ * there has been no discontinuity, we must conclude that this
+ * value of constantDuration is correct from now on. */
+ GST_DEBUG_OBJECT (depayload,
+ "constantDuration of %d detected", cd);
+ rtpmp4gdepay->constantDuration = cd;
+ }
+ } else {
+ /* assume this frame has the same number of packets as the
+ * previous one */
+ cd = diff / num_AU_headers;
+ GST_DEBUG_OBJECT (depayload, "guessing constantDuration %d", cd);
+ }
+
+ if (cd > 0) {
+ /* get the number of packets by dividing with the duration */
+ diff /= cd;
+ } else {
+ diff = 0;
+ }
+
+ rtpmp4gdepay->last_AU_index += diff;
+ rtpmp4gdepay->prev_AU_index = AU_index;
+
+ AU_index = rtpmp4gdepay->last_AU_index;
+
+ GST_DEBUG_OBJECT (rtpmp4gdepay, "diff %d, AU index %u", diff,
+ AU_index);
+ } else {
+ rtpmp4gdepay->prev_AU_index = AU_index;
+ rtpmp4gdepay->last_AU_index = AU_index;
+ }
+
+ /* keep track of the highest AU_index */
+ if (rtpmp4gdepay->max_AU_index != -1
+ && rtpmp4gdepay->max_AU_index <= AU_index) {
+ GST_DEBUG_OBJECT (rtpmp4gdepay, "new interleave group, flushing");
+ /* a new interleave group started, flush */
+ gst_rtp_mp4g_depay_flush_queue (rtpmp4gdepay);
+ }
+ if (G_UNLIKELY (!rtpmp4gdepay->maxDisplacement &&
+ rtpmp4gdepay->max_AU_index != -1
+ && rtpmp4gdepay->max_AU_index >= AU_index)) {
+ GstBuffer *outbuf;
+
+ /* some broken non-interleaved streams have AU-index jumping around
+ * all over the place, apparently assuming receiver disregards */
+ GST_DEBUG_OBJECT (rtpmp4gdepay, "non-interleaved broken AU indices;"
+ " forcing continuous flush");
+ /* reset AU to avoid repeated DISCONT in such case */
+ outbuf = g_queue_peek_head (rtpmp4gdepay->packets);
+ if (G_LIKELY (outbuf)) {
+ rtpmp4gdepay->next_AU_index = GST_BUFFER_OFFSET (outbuf);
+ gst_rtp_mp4g_depay_flush_queue (rtpmp4gdepay);
+ }
+ /* rebase next_AU_index to current rtp's first AU_index */
+ rtpmp4gdepay->next_AU_index = AU_index;
+ }
+ rtpmp4gdepay->prev_rtptime = rtptime;
+ rtpmp4gdepay->prev_AU_num = num_AU_headers;
+ } else {
+ AU_index_delta =
+ gst_bs_parse_read (&bs, rtpmp4gdepay->indexdeltalength);
+ AU_index += AU_index_delta + 1;
+ }
+ /* keep track of highest AU_index */
+ if (rtpmp4gdepay->max_AU_index == -1
+ || AU_index > rtpmp4gdepay->max_AU_index)
+ rtpmp4gdepay->max_AU_index = AU_index;
+
+ /* the presentation time offset, a 2s-complement value, we need this to
+ * calculate the timestamp on the output packet. */
+ if (rtpmp4gdepay->ctsdeltalength > 0) {
+ if (gst_bs_parse_read (&bs, 1))
+ gst_bs_parse_read (&bs, rtpmp4gdepay->ctsdeltalength);
+ }
+ /* the decoding time offset, a 2s-complement value */
+ if (rtpmp4gdepay->dtsdeltalength > 0) {
+ if (gst_bs_parse_read (&bs, 1))
+ gst_bs_parse_read (&bs, rtpmp4gdepay->dtsdeltalength);
+ }
+ /* RAP-flag to indicate that the AU contains a keyframe */
+ if (rtpmp4gdepay->randomaccessindication)
+ gst_bs_parse_read (&bs, 1);
+ /* stream-state */
+ if (rtpmp4gdepay->streamstateindication > 0)
+ gst_bs_parse_read (&bs, rtpmp4gdepay->streamstateindication);
+
+ GST_DEBUG_OBJECT (rtpmp4gdepay, "size %d, index %d, delta %d", AU_size,
+ AU_index, AU_index_delta);
+
+ /* fragmented pakets have the AU_size set to the size of the
+ * unfragmented AU. */
+ if (AU_size > payload_AU_size)
+ AU_size = payload_AU_size;
+
+ /* collect stuff in the adapter, strip header from payload and push in
+ * the adapter */
+ outbuf =
+ gst_rtp_buffer_get_payload_subbuffer (rtp, payload_AU, AU_size);
+ gst_adapter_push (rtpmp4gdepay->adapter, outbuf);
+
+ if (M) {
+ guint32 v = 0;
+ guint avail;
+
+ /* packet is complete, flush */
+ avail = gst_adapter_available (rtpmp4gdepay->adapter);
+
+ /* Some broken senders send ADTS headers (e.g. some Sony cameras).
+ * Try to detect those and skip them (still needs config set), but
+ * don't check every frame, only the first (unless we detect ADTS) */
+ if (rtpmp4gdepay->check_adts && avail >= 7) {
+ if (gst_adapter_masked_scan_uint32_peek (rtpmp4gdepay->adapter,
+ 0xfffe0000, 0xfff00000, 0, 4, &v) == 0) {
+ guint adts_hdr_len = (((v >> 16) & 0x1) == 0) ? 9 : 7;
+ if (avail > adts_hdr_len) {
+ if (rtpmp4gdepay->warn_adts) {
+ GST_WARNING_OBJECT (rtpmp4gdepay, "Detected ADTS header of "
+ "%u bytes, skipping", adts_hdr_len);
+ rtpmp4gdepay->warn_adts = FALSE;
+ }
+ gst_adapter_flush (rtpmp4gdepay->adapter, adts_hdr_len);
+ avail -= adts_hdr_len;
+ }
+ } else {
+ rtpmp4gdepay->check_adts = FALSE;
+ rtpmp4gdepay->warn_adts = TRUE;
+ }
+ }
+
+ outbuf = gst_adapter_take_buffer (rtpmp4gdepay->adapter, avail);
+
+ /* copy some of the fields we calculated above on the buffer. We also
+ * copy the AU_index so that we can sort the packets in our queue. */
+ GST_BUFFER_PTS (outbuf) = timestamp;
+ GST_BUFFER_OFFSET (outbuf) = AU_index;
+
+ if (rtpmp4gdepay->constantDuration != 0) {
+ /* if we have constantDuration, calculate timestamp for next AU
+ * in this RTP packet. */
+ timestamp += (rtpmp4gdepay->constantDuration * GST_SECOND) /
+ depayload->clock_rate;
+ } else {
+ /* otherwise, make sure we don't use the timestamp again for other
+ * AUs. */
+ timestamp = GST_CLOCK_TIME_NONE;
+ }
+
+ GST_DEBUG_OBJECT (depayload,
+ "pushing buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (outbuf));
+
+ gst_rtp_mp4g_depay_queue (rtpmp4gdepay, outbuf);
+
+ }
+ payload_AU += AU_size;
+ payload_AU_size -= AU_size;
+ }
+ } else {
+ /* push complete buffer in adapter */
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (rtp, 0, payload_len);
+ gst_adapter_push (rtpmp4gdepay->adapter, outbuf);
+
+ /* if this was the last packet of the VOP, create and push a buffer */
+ if (M) {
+ guint avail;
+
+ avail = gst_adapter_available (rtpmp4gdepay->adapter);
+
+ outbuf = gst_adapter_take_buffer (rtpmp4gdepay->adapter, avail);
+
+ GST_DEBUG ("gst_rtp_mp4g_depay_chain: pushing buffer of size %"
+ G_GSIZE_FORMAT, gst_buffer_get_size (outbuf));
+
+ return outbuf;
+ }
+ }
+ }
+
+ return NULL;
+
+ /* ERRORS */
+short_payload:
+ {
+ GST_ELEMENT_WARNING (rtpmp4gdepay, STREAM, DECODE,
+ ("Packet payload was too short."), (NULL));
+ return NULL;
+ }
+}
+
+static gboolean
+gst_rtp_mp4g_depay_handle_event (GstRTPBaseDepayload * filter, GstEvent * event)
+{
+ gboolean ret;
+ GstRtpMP4GDepay *rtpmp4gdepay;
+
+ rtpmp4gdepay = GST_RTP_MP4G_DEPAY (filter);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ gst_rtp_mp4g_depay_reset (rtpmp4gdepay);
+ break;
+ default:
+ break;
+ }
+
+ ret =
+ GST_RTP_BASE_DEPAYLOAD_CLASS (parent_class)->handle_event (filter, event);
+
+ return ret;
+}
+
+static GstStateChangeReturn
+gst_rtp_mp4g_depay_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstRtpMP4GDepay *rtpmp4gdepay;
+ GstStateChangeReturn ret;
+
+ rtpmp4gdepay = GST_RTP_MP4G_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_rtp_mp4g_depay_reset (rtpmp4gdepay);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_mp4g_depay_reset (rtpmp4gdepay);
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtpmp4gdepay.h b/gst/rtp/gstrtpmp4gdepay.h
new file mode 100644
index 0000000000..a6a88a0d4b
--- /dev/null
+++ b/gst/rtp/gstrtpmp4gdepay.h
@@ -0,0 +1,87 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_MP4G_DEPAY_H__
+#define __GST_RTP_MP4G_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_MP4G_DEPAY \
+ (gst_rtp_mp4g_depay_get_type())
+#define GST_RTP_MP4G_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_MP4G_DEPAY,GstRtpMP4GDepay))
+#define GST_RTP_MP4G_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_MP4G_DEPAY,GstRtpMP4GDepayClass))
+#define GST_IS_RTP_MP4G_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_MP4G_DEPAY))
+#define GST_IS_RTP_MP4G_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_MP4G_DEPAY))
+
+typedef struct _GstRtpMP4GDepay GstRtpMP4GDepay;
+typedef struct _GstRtpMP4GDepayClass GstRtpMP4GDepayClass;
+
+struct _GstRtpMP4GDepay
+{
+ GstRTPBaseDepayload depayload;
+
+ gint profile_level_id;
+ gint streamtype;
+
+ gint constantSize;
+ gint constantDuration;
+ gint maxDisplacement;
+
+ gint sizelength;
+ gint indexlength;
+ gint indexdeltalength;
+ gint ctsdeltalength;
+ gint dtsdeltalength;
+ gint randomaccessindication;
+ gint streamstateindication;
+ gint auxiliarydatasizelength;
+
+ guint max_AU_index;
+ guint prev_AU_index;
+ guint last_AU_index;
+ guint next_AU_index;
+ guint32 prev_rtptime;
+ guint prev_AU_num;
+
+ gboolean check_adts; /* check for ADTS headers */
+ gboolean warn_adts; /* warn about ADTS headers */
+
+ GQueue *packets;
+
+ GstAdapter *adapter;
+};
+
+struct _GstRtpMP4GDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_mp4g_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_MP4G_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpmp4gpay.c b/gst/rtp/gstrtpmp4gpay.c
new file mode 100644
index 0000000000..7e61e9f20d
--- /dev/null
+++ b/gst/rtp/gstrtpmp4gpay.c
@@ -0,0 +1,637 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/base/gstbitreader.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpmp4gpay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpmp4gpay_debug);
+#define GST_CAT_DEFAULT (rtpmp4gpay_debug)
+
+static GstStaticPadTemplate gst_rtp_mp4g_pay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/mpeg,"
+ "mpegversion=(int) 4,"
+ "systemstream=(boolean)false;"
+ "audio/mpeg," "mpegversion=(int) 4, " "stream-format=(string) raw")
+ );
+
+static GstStaticPadTemplate gst_rtp_mp4g_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) { \"video\", \"audio\", \"application\" }, "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) [1, MAX ], "
+ "encoding-name = (string) \"MPEG4-GENERIC\", "
+ /* required string params */
+ "streamtype = (string) { \"4\", \"5\" }, " /* 4 = video, 5 = audio */
+ /* "profile-level-id = (string) [1,MAX], " */
+ /* "config = (string) [1,MAX]" */
+ "mode = (string) { \"generic\", \"CELP-cbr\", \"CELP-vbr\", \"AAC-lbr\", \"AAC-hbr\" } "
+ /* Optional general parameters */
+ /* "objecttype = (string) [1,MAX], " */
+ /* "constantsize = (string) [1,MAX], " *//* constant size of each AU */
+ /* "constantduration = (string) [1,MAX], " *//* constant duration of each AU */
+ /* "maxdisplacement = (string) [1,MAX], " */
+ /* "de-interleavebuffersize = (string) [1,MAX], " */
+ /* Optional configuration parameters */
+ /* "sizelength = (string) [1, 16], " *//* max 16 bits, should be enough... */
+ /* "indexlength = (string) [1, 8], " */
+ /* "indexdeltalength = (string) [1, 8], " */
+ /* "ctsdeltalength = (string) [1, 64], " */
+ /* "dtsdeltalength = (string) [1, 64], " */
+ /* "randomaccessindication = (string) {0, 1}, " */
+ /* "streamstateindication = (string) [0, 64], " */
+ /* "auxiliarydatasizelength = (string) [0, 64]" */ )
+ );
+
+
+static void gst_rtp_mp4g_pay_finalize (GObject * object);
+
+static GstStateChangeReturn gst_rtp_mp4g_pay_change_state (GstElement * element,
+ GstStateChange transition);
+
+static gboolean gst_rtp_mp4g_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static GstFlowReturn gst_rtp_mp4g_pay_handle_buffer (GstRTPBasePayload *
+ payload, GstBuffer * buffer);
+static gboolean gst_rtp_mp4g_pay_sink_event (GstRTPBasePayload * payload,
+ GstEvent * event);
+
+#define gst_rtp_mp4g_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMP4GPay, gst_rtp_mp4g_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmp4gpay, "rtpmp4gpay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_MP4G_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_mp4g_pay_class_init (GstRtpMP4GPayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_mp4g_pay_finalize;
+
+ gstelement_class->change_state = gst_rtp_mp4g_pay_change_state;
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_mp4g_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_mp4g_pay_handle_buffer;
+ gstrtpbasepayload_class->sink_event = gst_rtp_mp4g_pay_sink_event;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mp4g_pay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mp4g_pay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP MPEG4 ES payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload MPEG4 elementary streams as RTP packets (RFC 3640)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpmp4gpay_debug, "rtpmp4gpay", 0,
+ "MP4-generic RTP Payloader");
+}
+
+static void
+gst_rtp_mp4g_pay_init (GstRtpMP4GPay * rtpmp4gpay)
+{
+ rtpmp4gpay->adapter = gst_adapter_new ();
+}
+
+static void
+gst_rtp_mp4g_pay_reset (GstRtpMP4GPay * rtpmp4gpay)
+{
+ GST_DEBUG_OBJECT (rtpmp4gpay, "reset");
+
+ gst_adapter_clear (rtpmp4gpay->adapter);
+}
+
+static void
+gst_rtp_mp4g_pay_cleanup (GstRtpMP4GPay * rtpmp4gpay)
+{
+ gst_rtp_mp4g_pay_reset (rtpmp4gpay);
+
+ g_free (rtpmp4gpay->params);
+ rtpmp4gpay->params = NULL;
+
+ if (rtpmp4gpay->config)
+ gst_buffer_unref (rtpmp4gpay->config);
+ rtpmp4gpay->config = NULL;
+
+ g_free (rtpmp4gpay->profile);
+ rtpmp4gpay->profile = NULL;
+
+ rtpmp4gpay->streamtype = NULL;
+ rtpmp4gpay->mode = NULL;
+
+ rtpmp4gpay->frame_len = 0;
+}
+
+static void
+gst_rtp_mp4g_pay_finalize (GObject * object)
+{
+ GstRtpMP4GPay *rtpmp4gpay;
+
+ rtpmp4gpay = GST_RTP_MP4G_PAY (object);
+
+ gst_rtp_mp4g_pay_cleanup (rtpmp4gpay);
+
+ g_object_unref (rtpmp4gpay->adapter);
+ rtpmp4gpay->adapter = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static const unsigned int sampling_table[16] = {
+ 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050,
+ 16000, 12000, 11025, 8000, 7350, 0, 0, 0
+};
+
+static gboolean
+gst_rtp_mp4g_pay_parse_audio_config (GstRtpMP4GPay * rtpmp4gpay,
+ GstBuffer * buffer)
+{
+ GstMapInfo map;
+ guint8 objectType = 0;
+ guint8 samplingIdx = 0;
+ guint8 channelCfg = 0;
+ GstBitReader br;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ gst_bit_reader_init (&br, map.data, map.size);
+
+ /* any object type is fine, we need to copy it to the profile-level-id field. */
+ if (!gst_bit_reader_get_bits_uint8 (&br, &objectType, 5))
+ goto too_short;
+ if (objectType == 0)
+ goto invalid_object;
+
+ if (!gst_bit_reader_get_bits_uint8 (&br, &samplingIdx, 4))
+ goto too_short;
+ /* only fixed values for now */
+ if (samplingIdx > 12 && samplingIdx != 15)
+ goto wrong_freq;
+
+ if (!gst_bit_reader_get_bits_uint8 (&br, &channelCfg, 4))
+ goto too_short;
+ if (channelCfg > 7)
+ goto wrong_channels;
+
+ /* rtp rate depends on sampling rate of the audio */
+ if (samplingIdx == 15) {
+ guint32 rate = 0;
+
+ /* index of 15 means we get the rate in the next 24 bits */
+ if (!gst_bit_reader_get_bits_uint32 (&br, &rate, 24))
+ goto too_short;
+
+ rtpmp4gpay->rate = rate;
+ } else {
+ /* else use the rate from the table */
+ rtpmp4gpay->rate = sampling_table[samplingIdx];
+ }
+
+ rtpmp4gpay->frame_len = 1024;
+
+ switch (objectType) {
+ case 1:
+ case 2:
+ case 3:
+ case 4:
+ case 6:
+ case 7:
+ {
+ guint8 frameLenFlag = 0;
+
+ if (gst_bit_reader_get_bits_uint8 (&br, &frameLenFlag, 1))
+ if (frameLenFlag)
+ rtpmp4gpay->frame_len = 960;
+
+ break;
+ }
+ default:
+ break;
+ }
+
+ /* extra rtp params contain the number of channels */
+ g_free (rtpmp4gpay->params);
+ rtpmp4gpay->params = g_strdup_printf ("%d", channelCfg);
+ /* audio stream type */
+ rtpmp4gpay->streamtype = "5";
+ /* mode only high bitrate for now */
+ rtpmp4gpay->mode = "AAC-hbr";
+ /* profile */
+ g_free (rtpmp4gpay->profile);
+ rtpmp4gpay->profile = g_strdup_printf ("%d", objectType);
+
+ GST_DEBUG_OBJECT (rtpmp4gpay,
+ "objectType: %d, samplingIdx: %d (%d), channelCfg: %d, frame_len %d",
+ objectType, samplingIdx, rtpmp4gpay->rate, channelCfg,
+ rtpmp4gpay->frame_len);
+
+ gst_buffer_unmap (buffer, &map);
+ return TRUE;
+
+ /* ERROR */
+too_short:
+ {
+ GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, FORMAT,
+ (NULL), ("config string too short"));
+ gst_buffer_unmap (buffer, &map);
+ return FALSE;
+ }
+invalid_object:
+ {
+ GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, FORMAT,
+ (NULL), ("invalid object type"));
+ gst_buffer_unmap (buffer, &map);
+ return FALSE;
+ }
+wrong_freq:
+ {
+ GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, NOT_IMPLEMENTED,
+ (NULL), ("unsupported frequency index %d", samplingIdx));
+ gst_buffer_unmap (buffer, &map);
+ return FALSE;
+ }
+wrong_channels:
+ {
+ GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, NOT_IMPLEMENTED,
+ (NULL), ("unsupported number of channels %d, must < 8", channelCfg));
+ gst_buffer_unmap (buffer, &map);
+ return FALSE;
+ }
+}
+
+#define VOS_STARTCODE 0x000001B0
+
+static gboolean
+gst_rtp_mp4g_pay_parse_video_config (GstRtpMP4GPay * rtpmp4gpay,
+ GstBuffer * buffer)
+{
+ GstMapInfo map;
+ guint32 code;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ if (map.size < 5)
+ goto too_short;
+
+ code = GST_READ_UINT32_BE (map.data);
+
+ g_free (rtpmp4gpay->profile);
+ if (code == VOS_STARTCODE) {
+ /* get profile */
+ rtpmp4gpay->profile = g_strdup_printf ("%d", (gint) map.data[4]);
+ } else {
+ GST_ELEMENT_WARNING (rtpmp4gpay, STREAM, FORMAT,
+ (NULL), ("profile not found in config string, assuming \'1\'"));
+ rtpmp4gpay->profile = g_strdup ("1");
+ }
+
+ /* fixed rate */
+ rtpmp4gpay->rate = 90000;
+ /* video stream type */
+ rtpmp4gpay->streamtype = "4";
+ /* no params for video */
+ rtpmp4gpay->params = NULL;
+ /* mode */
+ rtpmp4gpay->mode = "generic";
+
+ GST_LOG_OBJECT (rtpmp4gpay, "profile %s", rtpmp4gpay->profile);
+
+ gst_buffer_unmap (buffer, &map);
+
+ return TRUE;
+
+ /* ERROR */
+too_short:
+ {
+ GST_ELEMENT_ERROR (rtpmp4gpay, STREAM, FORMAT,
+ (NULL), ("config string too short"));
+ gst_buffer_unmap (buffer, &map);
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_rtp_mp4g_pay_new_caps (GstRtpMP4GPay * rtpmp4gpay)
+{
+ gchar *config;
+ GValue v = { 0 };
+ gboolean res;
+
+#define MP4GCAPS \
+ "streamtype", G_TYPE_STRING, rtpmp4gpay->streamtype, \
+ "profile-level-id", G_TYPE_STRING, rtpmp4gpay->profile, \
+ "mode", G_TYPE_STRING, rtpmp4gpay->mode, \
+ "config", G_TYPE_STRING, config, \
+ "sizelength", G_TYPE_STRING, "13", \
+ "indexlength", G_TYPE_STRING, "3", \
+ "indexdeltalength", G_TYPE_STRING, "3", \
+ NULL
+
+ g_value_init (&v, GST_TYPE_BUFFER);
+ gst_value_set_buffer (&v, rtpmp4gpay->config);
+ config = gst_value_serialize (&v);
+
+ /* hmm, silly */
+ if (rtpmp4gpay->params) {
+ res = gst_rtp_base_payload_set_outcaps (GST_RTP_BASE_PAYLOAD (rtpmp4gpay),
+ "encoding-params", G_TYPE_STRING, rtpmp4gpay->params, MP4GCAPS);
+ } else {
+ res = gst_rtp_base_payload_set_outcaps (GST_RTP_BASE_PAYLOAD (rtpmp4gpay),
+ MP4GCAPS);
+ }
+
+ g_value_unset (&v);
+ g_free (config);
+
+#undef MP4GCAPS
+ return res;
+}
+
+static gboolean
+gst_rtp_mp4g_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ GstRtpMP4GPay *rtpmp4gpay;
+ GstStructure *structure;
+ const GValue *codec_data;
+ const gchar *media_type = NULL;
+ gboolean res;
+
+ rtpmp4gpay = GST_RTP_MP4G_PAY (payload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ codec_data = gst_structure_get_value (structure, "codec_data");
+ if (codec_data) {
+ GST_LOG_OBJECT (rtpmp4gpay, "got codec_data");
+ if (G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) {
+ GstBuffer *buffer;
+ const gchar *name;
+
+ buffer = gst_value_get_buffer (codec_data);
+ GST_LOG_OBJECT (rtpmp4gpay, "configuring codec_data");
+
+ name = gst_structure_get_name (structure);
+
+ /* parse buffer */
+ if (!strcmp (name, "audio/mpeg")) {
+ res = gst_rtp_mp4g_pay_parse_audio_config (rtpmp4gpay, buffer);
+ media_type = "audio";
+ } else if (!strcmp (name, "video/mpeg")) {
+ res = gst_rtp_mp4g_pay_parse_video_config (rtpmp4gpay, buffer);
+ media_type = "video";
+ } else {
+ res = FALSE;
+ }
+ if (!res)
+ goto config_failed;
+
+ /* now we can configure the buffer */
+ if (rtpmp4gpay->config)
+ gst_buffer_unref (rtpmp4gpay->config);
+
+ rtpmp4gpay->config = gst_buffer_copy (buffer);
+ }
+ }
+ if (media_type == NULL)
+ goto config_failed;
+
+ gst_rtp_base_payload_set_options (payload, media_type, TRUE, "MPEG4-GENERIC",
+ rtpmp4gpay->rate);
+
+ res = gst_rtp_mp4g_pay_new_caps (rtpmp4gpay);
+
+ return res;
+
+ /* ERRORS */
+config_failed:
+ {
+ GST_DEBUG_OBJECT (rtpmp4gpay, "failed to parse config");
+ return FALSE;
+ }
+}
+
+static GstFlowReturn
+gst_rtp_mp4g_pay_flush (GstRtpMP4GPay * rtpmp4gpay)
+{
+ guint avail, total;
+ GstBuffer *outbuf;
+ GstFlowReturn ret;
+ guint mtu;
+
+ /* the data available in the adapter is either smaller
+ * than the MTU or bigger. In the case it is smaller, the complete
+ * adapter contents can be put in one packet. In the case the
+ * adapter has more than one MTU, we need to fragment the MPEG data
+ * over multiple packets. */
+ total = avail = gst_adapter_available (rtpmp4gpay->adapter);
+
+ ret = GST_FLOW_OK;
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (rtpmp4gpay);
+
+ while (avail > 0) {
+ guint towrite;
+ guint8 *payload;
+ guint payload_len;
+ guint packet_len;
+ GstRTPBuffer rtp = { NULL };
+ GstBuffer *paybuf;
+
+ /* this will be the total length of the packet */
+ packet_len = gst_rtp_buffer_calc_packet_len (avail, 0, 0);
+
+ /* fill one MTU or all available bytes, we need 4 spare bytes for
+ * the AU header. */
+ towrite = MIN (packet_len, mtu - 4);
+
+ /* this is the payload length */
+ payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);
+
+ GST_DEBUG_OBJECT (rtpmp4gpay,
+ "avail %d, towrite %d, packet_len %d, payload_len %d", avail, towrite,
+ packet_len, payload_len);
+
+ /* create buffer to hold the payload, also make room for the 4 header bytes. */
+ outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD
+ (rtpmp4gpay), 4, 0, 0);
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
+ /* copy payload */
+ payload = gst_rtp_buffer_get_payload (&rtp);
+
+ /* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- .. -+-+-+-+-+-+-+-+-+-+
+ * |AU-headers-length|AU-header|AU-header| |AU-header|padding|
+ * | | (1) | (2) | | (n) | bits |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- .. -+-+-+-+-+-+-+-+-+-+
+ */
+ /* AU-headers-length, we only have 1 AU-header */
+ payload[0] = 0x00;
+ payload[1] = 0x10; /* we use 16 bits for the header */
+
+ /* +---------------------------------------+
+ * | AU-size |
+ * +---------------------------------------+
+ * | AU-Index / AU-Index-delta |
+ * +---------------------------------------+
+ * | CTS-flag |
+ * +---------------------------------------+
+ * | CTS-delta |
+ * +---------------------------------------+
+ * | DTS-flag |
+ * +---------------------------------------+
+ * | DTS-delta |
+ * +---------------------------------------+
+ * | RAP-flag |
+ * +---------------------------------------+
+ * | Stream-state |
+ * +---------------------------------------+
+ */
+ /* The AU-header, no CTS, DTS, RAP, Stream-state
+ *
+ * AU-size is always the total size of the AU, not the fragmented size
+ */
+ payload[2] = (total & 0x1fe0) >> 5;
+ payload[3] = (total & 0x1f) << 3; /* we use 13 bits for the size, 3 bits index */
+
+ /* marker only if the packet is complete */
+ gst_rtp_buffer_set_marker (&rtp, avail <= payload_len);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ paybuf = gst_adapter_take_buffer_fast (rtpmp4gpay->adapter, payload_len);
+ gst_rtp_copy_meta (GST_ELEMENT_CAST (rtpmp4gpay), outbuf, paybuf, 0);
+ outbuf = gst_buffer_append (outbuf, paybuf);
+
+ GST_BUFFER_PTS (outbuf) = rtpmp4gpay->first_timestamp;
+ GST_BUFFER_DURATION (outbuf) = rtpmp4gpay->first_duration;
+
+ GST_BUFFER_OFFSET (outbuf) = GST_BUFFER_OFFSET_NONE;
+
+ if (rtpmp4gpay->discont) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
+ /* Only the first outputted buffer has the DISCONT flag */
+ rtpmp4gpay->discont = FALSE;
+ }
+
+ ret = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpmp4gpay), outbuf);
+
+ avail -= payload_len;
+ }
+
+ return ret;
+}
+
+/* we expect buffers as exactly one complete AU
+ */
+static GstFlowReturn
+gst_rtp_mp4g_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRtpMP4GPay *rtpmp4gpay;
+
+ rtpmp4gpay = GST_RTP_MP4G_PAY (basepayload);
+
+ rtpmp4gpay->first_timestamp = GST_BUFFER_PTS (buffer);
+ rtpmp4gpay->first_duration = GST_BUFFER_DURATION (buffer);
+ rtpmp4gpay->discont = GST_BUFFER_IS_DISCONT (buffer);
+
+ /* we always encode and flush a full AU */
+ gst_adapter_push (rtpmp4gpay->adapter, buffer);
+
+ return gst_rtp_mp4g_pay_flush (rtpmp4gpay);
+}
+
+static gboolean
+gst_rtp_mp4g_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
+{
+ GstRtpMP4GPay *rtpmp4gpay;
+
+ rtpmp4gpay = GST_RTP_MP4G_PAY (payload);
+
+ GST_DEBUG ("Got event: %s", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:
+ case GST_EVENT_EOS:
+ /* This flush call makes sure that the last buffer is always pushed
+ * to the base payloader */
+ gst_rtp_mp4g_pay_flush (rtpmp4gpay);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ gst_rtp_mp4g_pay_reset (rtpmp4gpay);
+ break;
+ default:
+ break;
+ }
+
+ /* let parent handle event too */
+ return GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload, event);
+}
+
+static GstStateChangeReturn
+gst_rtp_mp4g_pay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstRtpMP4GPay *rtpmp4gpay;
+
+ rtpmp4gpay = GST_RTP_MP4G_PAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_rtp_mp4g_pay_cleanup (rtpmp4gpay);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_mp4g_pay_cleanup (rtpmp4gpay);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
diff --git a/gst/rtp/gstrtpmp4gpay.h b/gst/rtp/gstrtpmp4gpay.h
new file mode 100644
index 0000000000..6e7a625849
--- /dev/null
+++ b/gst/rtp/gstrtpmp4gpay.h
@@ -0,0 +1,70 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_MP4G_PAY_H__
+#define __GST_RTP_MP4G_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+#include <gst/base/gstadapter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_MP4G_PAY \
+ (gst_rtp_mp4g_pay_get_type())
+#define GST_RTP_MP4G_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_MP4G_PAY,GstRtpMP4GPay))
+#define GST_RTP_MP4G_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_MP4G_PAY,GstRtpMP4GPayClass))
+#define GST_IS_RTP_MP4G_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_MP4G_PAY))
+#define GST_IS_RTP_MP4G_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_MP4G_PAY))
+
+typedef struct _GstRtpMP4GPay GstRtpMP4GPay;
+typedef struct _GstRtpMP4GPayClass GstRtpMP4GPayClass;
+
+struct _GstRtpMP4GPay
+{
+ GstRTPBasePayload payload;
+
+ GstAdapter *adapter;
+ GstClockTime first_timestamp;
+ GstClockTime first_duration;
+ gboolean discont;
+
+ gint rate;
+ gchar *params;
+ gchar *profile;
+ const gchar *streamtype;
+ const gchar *mode;
+ GstBuffer *config;
+ guint frame_len;
+};
+
+struct _GstRtpMP4GPayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_mp4g_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_MP4G_PAY_H__ */
diff --git a/gst/rtp/gstrtpmp4vdepay.c b/gst/rtp/gstrtpmp4vdepay.c
new file mode 100644
index 0000000000..204828c478
--- /dev/null
+++ b/gst/rtp/gstrtpmp4vdepay.c
@@ -0,0 +1,223 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+
+#include <string.h>
+#include "gstrtpelements.h"
+#include "gstrtpmp4vdepay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpmp4vdepay_debug);
+#define GST_CAT_DEFAULT (rtpmp4vdepay_debug)
+
+static GstStaticPadTemplate gst_rtp_mp4v_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/mpeg,"
+ "mpegversion=(int) 4," "systemstream=(boolean)false")
+ );
+
+static GstStaticPadTemplate gst_rtp_mp4v_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "clock-rate = (int) [1, MAX ], " "encoding-name = (string) \"MP4V-ES\""
+ /* All optional parameters
+ *
+ * "profile-level-id=[1,MAX]"
+ * "config="
+ */
+ )
+ );
+
+#define gst_rtp_mp4v_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMP4VDepay, gst_rtp_mp4v_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmp4vdepay, "rtpmp4vdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_MP4V_DEPAY, rtp_element_init (plugin));
+
+static void gst_rtp_mp4v_depay_finalize (GObject * object);
+
+static gboolean gst_rtp_mp4v_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_mp4v_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+
+static GstStateChangeReturn gst_rtp_mp4v_depay_change_state (GstElement *
+ element, GstStateChange transition);
+
+static void
+gst_rtp_mp4v_depay_class_init (GstRtpMP4VDepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_mp4v_depay_finalize;
+
+ gstelement_class->change_state = gst_rtp_mp4v_depay_change_state;
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_mp4v_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_mp4v_depay_setcaps;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mp4v_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mp4v_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP MPEG4 video depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts MPEG4 video from RTP packets (RFC 3016)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpmp4vdepay_debug, "rtpmp4vdepay", 0,
+ "MPEG4 video RTP Depayloader");
+}
+
+static void
+gst_rtp_mp4v_depay_init (GstRtpMP4VDepay * rtpmp4vdepay)
+{
+ rtpmp4vdepay->adapter = gst_adapter_new ();
+}
+
+static void
+gst_rtp_mp4v_depay_finalize (GObject * object)
+{
+ GstRtpMP4VDepay *rtpmp4vdepay;
+
+ rtpmp4vdepay = GST_RTP_MP4V_DEPAY (object);
+
+ g_object_unref (rtpmp4vdepay->adapter);
+ rtpmp4vdepay->adapter = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_rtp_mp4v_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure;
+ GstCaps *srccaps;
+ const gchar *str;
+ gint clock_rate;
+ gboolean res;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 90000; /* default */
+ depayload->clock_rate = clock_rate;
+
+ srccaps = gst_caps_new_simple ("video/mpeg",
+ "mpegversion", G_TYPE_INT, 4,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+
+ if ((str = gst_structure_get_string (structure, "config"))) {
+ GValue v = { 0 };
+
+ g_value_init (&v, GST_TYPE_BUFFER);
+ if (gst_value_deserialize (&v, str)) {
+ GstBuffer *buffer;
+
+ buffer = gst_value_get_buffer (&v);
+ gst_caps_set_simple (srccaps,
+ "codec_data", GST_TYPE_BUFFER, buffer, NULL);
+ /* caps takes ref */
+ g_value_unset (&v);
+ } else {
+ g_warning ("cannot convert config to buffer");
+ }
+ }
+ res = gst_pad_set_caps (depayload->srcpad, srccaps);
+ gst_caps_unref (srccaps);
+
+ return res;
+}
+
+static GstBuffer *
+gst_rtp_mp4v_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpMP4VDepay *rtpmp4vdepay;
+ GstBuffer *pbuf, *outbuf = NULL;
+ gboolean marker;
+
+ rtpmp4vdepay = GST_RTP_MP4V_DEPAY (depayload);
+
+ /* flush remaining data on discont */
+ if (GST_BUFFER_IS_DISCONT (rtp->buffer))
+ gst_adapter_clear (rtpmp4vdepay->adapter);
+
+ pbuf = gst_rtp_buffer_get_payload_buffer (rtp);
+ marker = gst_rtp_buffer_get_marker (rtp);
+
+ gst_adapter_push (rtpmp4vdepay->adapter, pbuf);
+
+ /* if this was the last packet of the VOP, create and push a buffer */
+ if (marker) {
+ guint avail;
+
+ avail = gst_adapter_available (rtpmp4vdepay->adapter);
+ outbuf = gst_adapter_take_buffer (rtpmp4vdepay->adapter, avail);
+
+ GST_DEBUG ("gst_rtp_mp4v_depay_chain: pushing buffer of size %"
+ G_GSIZE_FORMAT, gst_buffer_get_size (outbuf));
+ gst_rtp_drop_non_video_meta (rtpmp4vdepay, outbuf);
+ }
+
+ return outbuf;
+}
+
+static GstStateChangeReturn
+gst_rtp_mp4v_depay_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstRtpMP4VDepay *rtpmp4vdepay;
+ GstStateChangeReturn ret;
+
+ rtpmp4vdepay = GST_RTP_MP4V_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_adapter_clear (rtpmp4vdepay->adapter);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtpmp4vdepay.h b/gst/rtp/gstrtpmp4vdepay.h
new file mode 100644
index 0000000000..0eecdf07fd
--- /dev/null
+++ b/gst/rtp/gstrtpmp4vdepay.h
@@ -0,0 +1,59 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_MP4V_DEPAY_H__
+#define __GST_RTP_MP4V_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_MP4V_DEPAY \
+ (gst_rtp_mp4v_depay_get_type())
+#define GST_RTP_MP4V_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_MP4V_DEPAY,GstRtpMP4VDepay))
+#define GST_RTP_MP4V_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_MP4V_DEPAY,GstRtpMP4VDepayClass))
+#define GST_IS_RTP_MP4V_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_MP4V_DEPAY))
+#define GST_IS_RTP_MP4V_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_MP4V_DEPAY))
+
+typedef struct _GstRtpMP4VDepay GstRtpMP4VDepay;
+typedef struct _GstRtpMP4VDepayClass GstRtpMP4VDepayClass;
+
+struct _GstRtpMP4VDepay
+{
+ GstRTPBaseDepayload depayload;
+
+ GstAdapter *adapter;
+};
+
+struct _GstRtpMP4VDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_mp4v_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_MP4V_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpmp4vpay.c b/gst/rtp/gstrtpmp4vpay.c
new file mode 100644
index 0000000000..f6a4229703
--- /dev/null
+++ b/gst/rtp/gstrtpmp4vpay.c
@@ -0,0 +1,640 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpmp4vpay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpmp4vpay_debug);
+#define GST_CAT_DEFAULT (rtpmp4vpay_debug)
+
+static GstStaticPadTemplate gst_rtp_mp4v_pay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/mpeg,"
+ "mpegversion=(int) 4, systemstream=(boolean)false;" "video/x-divx")
+ );
+
+static GstStaticPadTemplate gst_rtp_mp4v_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) [1, MAX ], " "encoding-name = (string) \"MP4V-ES\""
+ /* two string params
+ *
+ "profile-level-id = (string) [1,MAX]"
+ "config = (string) [1,MAX]"
+ */
+ )
+ );
+
+#define DEFAULT_CONFIG_INTERVAL 0
+
+enum
+{
+ PROP_0,
+ PROP_CONFIG_INTERVAL
+};
+
+
+static void gst_rtp_mp4v_pay_finalize (GObject * object);
+
+static void gst_rtp_mp4v_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtp_mp4v_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_rtp_mp4v_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static GstFlowReturn gst_rtp_mp4v_pay_handle_buffer (GstRTPBasePayload *
+ payload, GstBuffer * buffer);
+static gboolean gst_rtp_mp4v_pay_sink_event (GstRTPBasePayload * pay,
+ GstEvent * event);
+
+#define gst_rtp_mp4v_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMP4VPay, gst_rtp_mp4v_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+/* Note: This element is marked at a "+1" rank to make sure that
+ * auto-plugging of payloaders for MPEG4 elementary streams don't
+ * end up using the 'rtpmp4gpay' element (generic mpeg4) which isn't
+ * as well supported as this RFC */
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmp4vpay, "rtpmp4vpay",
+ GST_RANK_SECONDARY + 1, GST_TYPE_RTP_MP4V_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_mp4v_pay_class_init (GstRtpMP4VPayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gobject_class->set_property = gst_rtp_mp4v_pay_set_property;
+ gobject_class->get_property = gst_rtp_mp4v_pay_get_property;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mp4v_pay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mp4v_pay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP MPEG4 Video payloader", "Codec/Payloader/Network/RTP",
+ "Payload MPEG-4 video as RTP packets (RFC 3016)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_CONFIG_INTERVAL,
+ g_param_spec_int ("config-interval", "Config Send Interval",
+ "Send Config Insertion Interval in seconds (configuration headers "
+ "will be multiplexed in the data stream when detected.) "
+ "(0 = disabled, -1 = send with every IDR frame)",
+ -1, 3600, DEFAULT_CONFIG_INTERVAL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ gobject_class->finalize = gst_rtp_mp4v_pay_finalize;
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_mp4v_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_mp4v_pay_handle_buffer;
+ gstrtpbasepayload_class->sink_event = gst_rtp_mp4v_pay_sink_event;
+
+ GST_DEBUG_CATEGORY_INIT (rtpmp4vpay_debug, "rtpmp4vpay", 0,
+ "MP4 video RTP Payloader");
+}
+
+static void
+gst_rtp_mp4v_pay_init (GstRtpMP4VPay * rtpmp4vpay)
+{
+ rtpmp4vpay->adapter = gst_adapter_new ();
+ rtpmp4vpay->rate = 90000;
+ rtpmp4vpay->profile = 1;
+ rtpmp4vpay->need_config = TRUE;
+ rtpmp4vpay->config_interval = DEFAULT_CONFIG_INTERVAL;
+ rtpmp4vpay->last_config = -1;
+
+ rtpmp4vpay->config = NULL;
+}
+
+static void
+gst_rtp_mp4v_pay_finalize (GObject * object)
+{
+ GstRtpMP4VPay *rtpmp4vpay;
+
+ rtpmp4vpay = GST_RTP_MP4V_PAY (object);
+
+ if (rtpmp4vpay->config) {
+ gst_buffer_unref (rtpmp4vpay->config);
+ rtpmp4vpay->config = NULL;
+ }
+ g_object_unref (rtpmp4vpay->adapter);
+ rtpmp4vpay->adapter = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_rtp_mp4v_pay_new_caps (GstRtpMP4VPay * rtpmp4vpay)
+{
+ gchar *profile, *config;
+ GValue v = { 0 };
+ gboolean res;
+
+ profile = g_strdup_printf ("%d", rtpmp4vpay->profile);
+ g_value_init (&v, GST_TYPE_BUFFER);
+ gst_value_set_buffer (&v, rtpmp4vpay->config);
+ config = gst_value_serialize (&v);
+
+ res = gst_rtp_base_payload_set_outcaps (GST_RTP_BASE_PAYLOAD (rtpmp4vpay),
+ "profile-level-id", G_TYPE_STRING, profile,
+ "config", G_TYPE_STRING, config, NULL);
+
+ g_value_unset (&v);
+
+ g_free (profile);
+ g_free (config);
+
+ return res;
+}
+
+static gboolean
+gst_rtp_mp4v_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ GstRtpMP4VPay *rtpmp4vpay;
+ GstStructure *structure;
+ const GValue *codec_data;
+ gboolean res;
+
+ rtpmp4vpay = GST_RTP_MP4V_PAY (payload);
+
+ gst_rtp_base_payload_set_options (payload, "video", TRUE, "MP4V-ES",
+ rtpmp4vpay->rate);
+
+ res = TRUE;
+
+ structure = gst_caps_get_structure (caps, 0);
+ codec_data = gst_structure_get_value (structure, "codec_data");
+ if (codec_data) {
+ GST_LOG_OBJECT (rtpmp4vpay, "got codec_data");
+ if (G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) {
+ GstBuffer *buffer;
+
+ buffer = gst_value_get_buffer (codec_data);
+
+ if (gst_buffer_get_size (buffer) < 5)
+ goto done;
+
+ gst_buffer_extract (buffer, 4, &rtpmp4vpay->profile, 1);
+ GST_LOG_OBJECT (rtpmp4vpay, "configuring codec_data, profile %d",
+ rtpmp4vpay->profile);
+
+ if (rtpmp4vpay->config)
+ gst_buffer_unref (rtpmp4vpay->config);
+ rtpmp4vpay->config = gst_buffer_copy (buffer);
+ res = gst_rtp_mp4v_pay_new_caps (rtpmp4vpay);
+ }
+ }
+
+done:
+ return res;
+}
+
+static void
+gst_rtp_mp4v_pay_empty (GstRtpMP4VPay * rtpmp4vpay)
+{
+ gst_adapter_clear (rtpmp4vpay->adapter);
+}
+
+#define RTP_HEADER_LEN 12
+
+static GstFlowReturn
+gst_rtp_mp4v_pay_flush (GstRtpMP4VPay * rtpmp4vpay)
+{
+ guint avail, mtu;
+ GstBuffer *outbuf;
+ GstBuffer *outbuf_data = NULL;
+ GstFlowReturn ret;
+ GstBufferList *list = NULL;
+
+ /* the data available in the adapter is either smaller
+ * than the MTU or bigger. In the case it is smaller, the complete
+ * adapter contents can be put in one packet. In the case the
+ * adapter has more than one MTU, we need to split the MP4V data
+ * over multiple packets. */
+ avail = gst_adapter_available (rtpmp4vpay->adapter);
+
+ if (rtpmp4vpay->config == NULL && rtpmp4vpay->need_config) {
+ /* when we don't have a config yet, flush things out */
+ gst_adapter_flush (rtpmp4vpay->adapter, avail);
+ avail = 0;
+ }
+
+ if (!avail)
+ return GST_FLOW_OK;
+
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (rtpmp4vpay);
+
+ /* Use buffer lists. Each frame will be put into a list
+ * of buffers and the whole list will be pushed downstream
+ * at once */
+ list = gst_buffer_list_new_sized ((avail / (mtu - RTP_HEADER_LEN)) + 1);
+
+ while (avail > 0) {
+ guint towrite;
+ guint payload_len;
+ guint packet_len;
+ GstRTPBuffer rtp = { NULL };
+
+ /* this will be the total length of the packet */
+ packet_len = gst_rtp_buffer_calc_packet_len (avail, 0, 0);
+
+ /* fill one MTU or all available bytes */
+ towrite = MIN (packet_len, mtu);
+
+ /* this is the payload length */
+ payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);
+
+ /* create buffer without payload. The payload will be put
+ * in next buffer instead. Both buffers will be merged */
+ outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD
+ (rtpmp4vpay), 0, 0, 0);
+
+ /* Take buffer with the payload from the adapter */
+ outbuf_data = gst_adapter_take_buffer_fast (rtpmp4vpay->adapter,
+ payload_len);
+
+ avail -= payload_len;
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+ gst_rtp_buffer_set_marker (&rtp, avail == 0);
+ gst_rtp_buffer_unmap (&rtp);
+ gst_rtp_copy_video_meta (rtpmp4vpay, outbuf, outbuf_data);
+ outbuf = gst_buffer_append (outbuf, outbuf_data);
+
+ GST_BUFFER_PTS (outbuf) = rtpmp4vpay->first_timestamp;
+
+ /* add to list */
+ gst_buffer_list_insert (list, -1, outbuf);
+ }
+
+ /* push the whole buffer list at once */
+ ret =
+ gst_rtp_base_payload_push_list (GST_RTP_BASE_PAYLOAD (rtpmp4vpay), list);
+
+ return ret;
+}
+
+#define VOS_STARTCODE 0x000001B0
+#define VOS_ENDCODE 0x000001B1
+#define USER_DATA_STARTCODE 0x000001B2
+#define GOP_STARTCODE 0x000001B3
+#define VISUAL_OBJECT_STARTCODE 0x000001B5
+#define VOP_STARTCODE 0x000001B6
+
+static gboolean
+gst_rtp_mp4v_pay_depay_data (GstRtpMP4VPay * enc, guint8 * data, guint size,
+ gint * strip, gboolean * vopi)
+{
+ guint32 code;
+ gboolean result;
+ *vopi = FALSE;
+
+ *strip = 0;
+
+ if (size < 5)
+ return FALSE;
+
+ code = GST_READ_UINT32_BE (data);
+ GST_DEBUG_OBJECT (enc, "start code 0x%08x", code);
+
+ switch (code) {
+ case VOS_STARTCODE:
+ case 0x00000101:
+ {
+ gint i;
+ guint8 profile;
+ gboolean newprofile = FALSE;
+ gboolean equal;
+
+ if (code == VOS_STARTCODE) {
+ /* profile_and_level_indication */
+ profile = data[4];
+
+ GST_DEBUG_OBJECT (enc, "VOS profile 0x%08x", profile);
+
+ if (profile != enc->profile) {
+ newprofile = TRUE;
+ enc->profile = profile;
+ }
+ }
+
+ /* up to the next GOP_STARTCODE or VOP_STARTCODE is
+ * the config information */
+ code = 0xffffffff;
+ for (i = 5; i < size - 4; i++) {
+ code = (code << 8) | data[i];
+ if (code == GOP_STARTCODE || code == VOP_STARTCODE)
+ break;
+ }
+ i -= 3;
+ /* see if config changed */
+ equal = FALSE;
+ if (enc->config) {
+ if (gst_buffer_get_size (enc->config) == i) {
+ equal = gst_buffer_memcmp (enc->config, 0, data, i) == 0;
+ }
+ }
+ /* if config string changed or new profile, make new caps */
+ if (!equal || newprofile) {
+ if (enc->config)
+ gst_buffer_unref (enc->config);
+ enc->config = gst_buffer_new_and_alloc (i);
+
+ gst_buffer_fill (enc->config, 0, data, i);
+
+ gst_rtp_mp4v_pay_new_caps (enc);
+ }
+ *strip = i;
+ /* we need to flush out the current packet. */
+ result = TRUE;
+ break;
+ }
+ case VOP_STARTCODE:
+ GST_DEBUG_OBJECT (enc, "VOP");
+ /* VOP startcode, we don't have to flush the packet */
+ result = FALSE;
+ /* vop-coding-type == I-frame */
+ if (size > 4 && (data[4] >> 6 == 0)) {
+ GST_DEBUG_OBJECT (enc, "VOP-I");
+ *vopi = TRUE;
+ }
+ break;
+ case GOP_STARTCODE:
+ GST_DEBUG_OBJECT (enc, "GOP");
+ *vopi = TRUE;
+ result = TRUE;
+ break;
+ case 0x00000100:
+ enc->need_config = FALSE;
+ result = TRUE;
+ break;
+ default:
+ if (code >= 0x20 && code <= 0x2f) {
+ GST_DEBUG_OBJECT (enc, "short header");
+ result = FALSE;
+ } else {
+ GST_DEBUG_OBJECT (enc, "other startcode");
+ /* all other startcodes need a flush */
+ result = TRUE;
+ }
+ break;
+ }
+ return result;
+}
+
+/* we expect buffers starting on startcodes.
+ */
+static GstFlowReturn
+gst_rtp_mp4v_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRtpMP4VPay *rtpmp4vpay;
+ GstFlowReturn ret;
+ guint avail;
+ guint packet_len;
+ GstMapInfo map;
+ gsize size;
+ gboolean flush;
+ gint strip;
+ GstClockTime timestamp, duration;
+ gboolean vopi;
+ gboolean send_config;
+ GstClockTime running_time = GST_CLOCK_TIME_NONE;
+
+ ret = GST_FLOW_OK;
+ send_config = FALSE;
+
+ rtpmp4vpay = GST_RTP_MP4V_PAY (basepayload);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ size = map.size;
+ timestamp = GST_BUFFER_PTS (buffer);
+ duration = GST_BUFFER_DURATION (buffer);
+ avail = gst_adapter_available (rtpmp4vpay->adapter);
+
+ if (duration == -1)
+ duration = 0;
+
+ /* empty buffer, take timestamp */
+ if (avail == 0) {
+ rtpmp4vpay->first_timestamp = timestamp;
+ rtpmp4vpay->duration = 0;
+ }
+
+ /* depay incoming data and see if we need to start a new RTP
+ * packet */
+ flush =
+ gst_rtp_mp4v_pay_depay_data (rtpmp4vpay, map.data, size, &strip, &vopi);
+ gst_buffer_unmap (buffer, &map);
+
+ if (strip) {
+ /* strip off config if requested, do not strip off if the
+ * config_interval is set to -1 */
+ if (!(rtpmp4vpay->config_interval > 0)
+ && !(rtpmp4vpay->config_interval == -1)) {
+ GstBuffer *subbuf;
+
+ GST_LOG_OBJECT (rtpmp4vpay, "stripping config at %d, size %d", strip,
+ (gint) size - strip);
+
+ /* strip off header */
+ subbuf = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, strip,
+ size - strip);
+ GST_BUFFER_PTS (subbuf) = timestamp;
+ gst_buffer_unref (buffer);
+ buffer = subbuf;
+
+ size = gst_buffer_get_size (buffer);
+ } else {
+ running_time =
+ gst_segment_to_running_time (&basepayload->segment, GST_FORMAT_TIME,
+ timestamp);
+
+ GST_LOG_OBJECT (rtpmp4vpay, "found config in stream");
+ rtpmp4vpay->last_config = running_time;
+ }
+ }
+
+ /* there is a config request, see if we need to insert it */
+ if (vopi && (rtpmp4vpay->config_interval > 0) && rtpmp4vpay->config) {
+ running_time =
+ gst_segment_to_running_time (&basepayload->segment, GST_FORMAT_TIME,
+ timestamp);
+
+ if (rtpmp4vpay->last_config != -1) {
+ guint64 diff;
+
+ GST_LOG_OBJECT (rtpmp4vpay,
+ "now %" GST_TIME_FORMAT ", last VOP-I %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (running_time),
+ GST_TIME_ARGS (rtpmp4vpay->last_config));
+
+ /* calculate diff between last config in milliseconds */
+ if (running_time > rtpmp4vpay->last_config) {
+ diff = running_time - rtpmp4vpay->last_config;
+ } else {
+ diff = 0;
+ }
+
+ GST_DEBUG_OBJECT (rtpmp4vpay,
+ "interval since last config %" GST_TIME_FORMAT, GST_TIME_ARGS (diff));
+
+ /* bigger than interval, queue config */
+ if (GST_TIME_AS_SECONDS (diff) >= rtpmp4vpay->config_interval) {
+ GST_DEBUG_OBJECT (rtpmp4vpay, "time to send config");
+ send_config = TRUE;
+ }
+ } else {
+ /* no known previous config time, send now */
+ GST_DEBUG_OBJECT (rtpmp4vpay, "no previous config time, send now");
+ send_config = TRUE;
+ }
+ }
+
+ if (vopi && (rtpmp4vpay->config_interval == -1)) {
+ GST_DEBUG_OBJECT (rtpmp4vpay, "sending config before current IDR frame");
+ /* send config before every IDR frame */
+ send_config = TRUE;
+ }
+
+ if (send_config) {
+ /* we need to send config now first */
+ GST_LOG_OBJECT (rtpmp4vpay, "inserting config in stream");
+
+ /* insert header */
+ buffer = gst_buffer_append (gst_buffer_ref (rtpmp4vpay->config), buffer);
+
+ GST_BUFFER_PTS (buffer) = timestamp;
+ size = gst_buffer_get_size (buffer);
+
+ if (running_time != -1) {
+ rtpmp4vpay->last_config = running_time;
+ }
+ }
+
+ /* if we need to flush, do so now */
+ if (flush) {
+ ret = gst_rtp_mp4v_pay_flush (rtpmp4vpay);
+ rtpmp4vpay->first_timestamp = timestamp;
+ rtpmp4vpay->duration = 0;
+ avail = 0;
+ }
+
+ /* get packet length of data and see if we exceeded MTU. */
+ packet_len = gst_rtp_buffer_calc_packet_len (avail + size, 0, 0);
+
+ if (gst_rtp_base_payload_is_filled (basepayload,
+ packet_len, rtpmp4vpay->duration + duration)) {
+ ret = gst_rtp_mp4v_pay_flush (rtpmp4vpay);
+ rtpmp4vpay->first_timestamp = timestamp;
+ rtpmp4vpay->duration = 0;
+ }
+
+ /* push new data */
+ gst_adapter_push (rtpmp4vpay->adapter, buffer);
+
+ rtpmp4vpay->duration += duration;
+
+ return ret;
+}
+
+static gboolean
+gst_rtp_mp4v_pay_sink_event (GstRTPBasePayload * pay, GstEvent * event)
+{
+ GstRtpMP4VPay *rtpmp4vpay;
+
+ rtpmp4vpay = GST_RTP_MP4V_PAY (pay);
+
+ GST_DEBUG ("Got event: %s", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:
+ case GST_EVENT_EOS:
+ /* This flush call makes sure that the last buffer is always pushed
+ * to the base payloader */
+ gst_rtp_mp4v_pay_flush (rtpmp4vpay);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ gst_rtp_mp4v_pay_empty (rtpmp4vpay);
+ break;
+ default:
+ break;
+ }
+
+ /* let parent handle event too */
+ return GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (pay, event);
+}
+
+static void
+gst_rtp_mp4v_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpMP4VPay *rtpmp4vpay;
+
+ rtpmp4vpay = GST_RTP_MP4V_PAY (object);
+
+ switch (prop_id) {
+ case PROP_CONFIG_INTERVAL:
+ rtpmp4vpay->config_interval = g_value_get_int (value);
+ break;
+ default:
+ break;
+ }
+}
+
+static void
+gst_rtp_mp4v_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpMP4VPay *rtpmp4vpay;
+
+ rtpmp4vpay = GST_RTP_MP4V_PAY (object);
+
+ switch (prop_id) {
+ case PROP_CONFIG_INTERVAL:
+ g_value_set_int (value, rtpmp4vpay->config_interval);
+ break;
+ default:
+ break;
+ }
+}
diff --git a/gst/rtp/gstrtpmp4vpay.h b/gst/rtp/gstrtpmp4vpay.h
new file mode 100644
index 0000000000..1d906fd6f3
--- /dev/null
+++ b/gst/rtp/gstrtpmp4vpay.h
@@ -0,0 +1,72 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_MP4V_PAY_H__
+#define __GST_RTP_MP4V_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+#include <gst/base/gstadapter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_MP4V_PAY \
+ (gst_rtp_mp4v_pay_get_type())
+#define GST_RTP_MP4V_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_MP4V_PAY,GstRtpMP4VPay))
+#define GST_RTP_MP4V_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_MP4V_PAY,GstRtpMP4VPayClass))
+#define GST_IS_RTP_MP4V_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_MP4V_PAY))
+#define GST_IS_RTP_MP4V_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_MP4V_PAY))
+
+typedef struct _GstRtpMP4VPay GstRtpMP4VPay;
+typedef struct _GstRtpMP4VPayClass GstRtpMP4VPayClass;
+
+struct _GstRtpMP4VPay
+{
+ GstRTPBasePayload payload;
+
+ GstAdapter *adapter;
+ GstClockTime first_timestamp;
+ GstClockTime duration;
+
+ gint rate;
+ gint profile;
+ GstBuffer *config;
+ gboolean send_config;
+ gboolean need_config;
+
+ /* naming might be confusing with send_config; but naming matches h264
+ * payloader */
+ gint config_interval;
+ GstClockTime last_config;
+};
+
+struct _GstRtpMP4VPayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_mp4v_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_MP4V_PAY_H__ */
diff --git a/gst/rtp/gstrtpmpadepay.c b/gst/rtp/gstrtpmpadepay.c
new file mode 100644
index 0000000000..afa852f329
--- /dev/null
+++ b/gst/rtp/gstrtpmpadepay.c
@@ -0,0 +1,177 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+
+#include <string.h>
+#include "gstrtpelements.h"
+#include "gstrtpmpadepay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpmpadepay_debug);
+#define GST_CAT_DEFAULT (rtpmpadepay_debug)
+
+static GstStaticPadTemplate gst_rtp_mpa_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/mpeg, " "mpegversion = (int) 1")
+ );
+
+static GstStaticPadTemplate gst_rtp_mpa_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_MPA_STRING ", "
+ "clock-rate = (int) 90000 ;"
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "encoding-name = (string) \"MPA\", clock-rate = (int) [1, MAX]")
+ );
+
+#define gst_rtp_mpa_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMPADepay, gst_rtp_mpa_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmpadepay, "rtpmpadepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_MPA_DEPAY, rtp_element_init (plugin));
+
+static gboolean gst_rtp_mpa_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_mpa_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+
+static void
+gst_rtp_mpa_depay_class_init (GstRtpMPADepayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpmpadepay_debug, "rtpmpadepay", 0,
+ "MPEG Audio RTP Depayloader");
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mpa_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mpa_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP MPEG audio depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts MPEG audio from RTP packets (RFC 2038)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasedepayload_class->set_caps = gst_rtp_mpa_depay_setcaps;
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_mpa_depay_process;
+}
+
+static void
+gst_rtp_mpa_depay_init (GstRtpMPADepay * rtpmpadepay)
+{
+}
+
+static gboolean
+gst_rtp_mpa_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure;
+ GstCaps *outcaps;
+ gint clock_rate;
+ gboolean res;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 90000;
+ depayload->clock_rate = clock_rate;
+
+ outcaps =
+ gst_caps_new_simple ("audio/mpeg", "mpegversion", G_TYPE_INT, 1, NULL);
+ res = gst_pad_set_caps (depayload->srcpad, outcaps);
+ gst_caps_unref (outcaps);
+
+ return res;
+}
+
+static GstBuffer *
+gst_rtp_mpa_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpMPADepay *rtpmpadepay;
+ GstBuffer *outbuf;
+ gint payload_len;
+#if 0
+ guint8 *payload;
+ guint16 frag_offset;
+#endif
+ gboolean marker;
+
+ rtpmpadepay = GST_RTP_MPA_DEPAY (depayload);
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+
+ if (payload_len <= 4)
+ goto empty_packet;
+
+#if 0
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ /* strip off header
+ *
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | MBZ | Frag_offset |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ frag_offset = (payload[2] << 8) | payload[3];
+#endif
+
+ /* subbuffer skipping the 4 header bytes */
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (rtp, 4, -1);
+ marker = gst_rtp_buffer_get_marker (rtp);
+
+ if (marker) {
+ /* mark start of talkspurt with RESYNC */
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC);
+ }
+ GST_DEBUG_OBJECT (rtpmpadepay,
+ "gst_rtp_mpa_depay_chain: pushing buffer of size %" G_GSIZE_FORMAT "",
+ gst_buffer_get_size (outbuf));
+
+ if (outbuf) {
+ gst_rtp_drop_non_audio_meta (rtpmpadepay, outbuf);
+ }
+
+ /* FIXME, we can push half mpeg frames when they are split over multiple
+ * RTP packets */
+ return outbuf;
+
+ /* ERRORS */
+empty_packet:
+ {
+ GST_ELEMENT_WARNING (rtpmpadepay, STREAM, DECODE,
+ ("Empty Payload."), (NULL));
+ return NULL;
+ }
+}
diff --git a/gst/rtp/gstrtpmpadepay.h b/gst/rtp/gstrtpmpadepay.h
new file mode 100644
index 0000000000..9c06df345e
--- /dev/null
+++ b/gst/rtp/gstrtpmpadepay.h
@@ -0,0 +1,56 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_MPA_DEPAY_H__
+#define __GST_RTP_MPA_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_MPA_DEPAY \
+ (gst_rtp_mpa_depay_get_type())
+#define GST_RTP_MPA_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_MPA_DEPAY,GstRtpMPADepay))
+#define GST_RTP_MPA_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_MPA_DEPAY,GstRtpMPADepayClass))
+#define GST_IS_RTP_MPA_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_MPA_DEPAY))
+#define GST_IS_RTP_MPA_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_MPA_DEPAY))
+
+typedef struct _GstRtpMPADepay GstRtpMPADepay;
+typedef struct _GstRtpMPADepayClass GstRtpMPADepayClass;
+
+struct _GstRtpMPADepay
+{
+ GstRTPBaseDepayload depayload;
+};
+
+struct _GstRtpMPADepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_mpa_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_MPA_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpmpapay.c b/gst/rtp/gstrtpmpapay.c
new file mode 100644
index 0000000000..87ff430fdc
--- /dev/null
+++ b/gst/rtp/gstrtpmpapay.c
@@ -0,0 +1,341 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpmpapay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpmpapay_debug);
+#define GST_CAT_DEFAULT (rtpmpapay_debug)
+
+static GstStaticPadTemplate gst_rtp_mpa_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/mpeg, " "mpegversion = (int) 1")
+ );
+
+static GstStaticPadTemplate gst_rtp_mpa_pay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_MPA_STRING ", "
+ "clock-rate = (int) 90000; "
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"MPA\"")
+ );
+
+static void gst_rtp_mpa_pay_finalize (GObject * object);
+
+static GstStateChangeReturn gst_rtp_mpa_pay_change_state (GstElement * element,
+ GstStateChange transition);
+
+static gboolean gst_rtp_mpa_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static gboolean gst_rtp_mpa_pay_sink_event (GstRTPBasePayload * payload,
+ GstEvent * event);
+static GstFlowReturn gst_rtp_mpa_pay_flush (GstRtpMPAPay * rtpmpapay);
+static GstFlowReturn gst_rtp_mpa_pay_handle_buffer (GstRTPBasePayload * payload,
+ GstBuffer * buffer);
+
+#define gst_rtp_mpa_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMPAPay, gst_rtp_mpa_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmpapay, "rtpmpapay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_MPA_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_mpa_pay_class_init (GstRtpMPAPayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpmpapay_debug, "rtpmpapay", 0,
+ "MPEG Audio RTP Depayloader");
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_mpa_pay_finalize;
+
+ gstelement_class->change_state = gst_rtp_mpa_pay_change_state;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mpa_pay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mpa_pay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP MPEG audio payloader", "Codec/Payloader/Network/RTP",
+ "Payload MPEG audio as RTP packets (RFC 2038)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_mpa_pay_setcaps;
+ gstrtpbasepayload_class->sink_event = gst_rtp_mpa_pay_sink_event;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_mpa_pay_handle_buffer;
+}
+
+static void
+gst_rtp_mpa_pay_init (GstRtpMPAPay * rtpmpapay)
+{
+ rtpmpapay->adapter = gst_adapter_new ();
+
+ GST_RTP_BASE_PAYLOAD (rtpmpapay)->pt = GST_RTP_PAYLOAD_MPA;
+}
+
+static void
+gst_rtp_mpa_pay_finalize (GObject * object)
+{
+ GstRtpMPAPay *rtpmpapay;
+
+ rtpmpapay = GST_RTP_MPA_PAY (object);
+
+ g_object_unref (rtpmpapay->adapter);
+ rtpmpapay->adapter = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_rtp_mpa_pay_reset (GstRtpMPAPay * pay)
+{
+ pay->first_ts = -1;
+ pay->duration = 0;
+ gst_adapter_clear (pay->adapter);
+ GST_DEBUG_OBJECT (pay, "reset depayloader");
+}
+
+static gboolean
+gst_rtp_mpa_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ gboolean res;
+
+ gst_rtp_base_payload_set_options (payload, "audio",
+ payload->pt != GST_RTP_PAYLOAD_MPA, "MPA", 90000);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
+
+ return res;
+}
+
+static gboolean
+gst_rtp_mpa_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
+{
+ gboolean ret;
+ GstRtpMPAPay *rtpmpapay;
+
+ rtpmpapay = GST_RTP_MPA_PAY (payload);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ /* make sure we push the last packets in the adapter on EOS */
+ gst_rtp_mpa_pay_flush (rtpmpapay);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ gst_rtp_mpa_pay_reset (rtpmpapay);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload, event);
+
+ return ret;
+}
+
+#define RTP_HEADER_LEN 12
+
+static GstFlowReturn
+gst_rtp_mpa_pay_flush (GstRtpMPAPay * rtpmpapay)
+{
+ guint avail;
+ GstBuffer *outbuf;
+ GstFlowReturn ret;
+ guint16 frag_offset;
+ GstBufferList *list;
+
+ /* the data available in the adapter is either smaller
+ * than the MTU or bigger. In the case it is smaller, the complete
+ * adapter contents can be put in one packet. In the case the
+ * adapter has more than one MTU, we need to split the MPA data
+ * over multiple packets. The frag_offset in each packet header
+ * needs to be updated with the position in the MPA frame. */
+ avail = gst_adapter_available (rtpmpapay->adapter);
+
+ ret = GST_FLOW_OK;
+
+ list =
+ gst_buffer_list_new_sized (avail / (GST_RTP_BASE_PAYLOAD_MTU (rtpmpapay) -
+ RTP_HEADER_LEN) + 1);
+
+ frag_offset = 0;
+ while (avail > 0) {
+ guint towrite;
+ guint8 *payload;
+ guint payload_len;
+ guint packet_len;
+ GstRTPBuffer rtp = { NULL };
+ GstBuffer *paybuf;
+
+ /* this will be the total length of the packet */
+ packet_len = gst_rtp_buffer_calc_packet_len (4 + avail, 0, 0);
+
+ /* fill one MTU or all available bytes */
+ towrite = MIN (packet_len, GST_RTP_BASE_PAYLOAD_MTU (rtpmpapay));
+
+ /* this is the payload length */
+ payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);
+
+ /* create buffer to hold the payload */
+ outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD
+ (rtpmpapay), 4, 0, 0);
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
+ payload_len -= 4;
+
+ gst_rtp_buffer_set_payload_type (&rtp, GST_RTP_PAYLOAD_MPA);
+
+ /*
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | MBZ | Frag_offset |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ payload[0] = 0;
+ payload[1] = 0;
+ payload[2] = frag_offset >> 8;
+ payload[3] = frag_offset & 0xff;
+
+ avail -= payload_len;
+ frag_offset += payload_len;
+
+ if (avail == 0)
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ paybuf = gst_adapter_take_buffer_fast (rtpmpapay->adapter, payload_len);
+ gst_rtp_copy_audio_meta (rtpmpapay, outbuf, paybuf);
+ outbuf = gst_buffer_append (outbuf, paybuf);
+
+ GST_BUFFER_PTS (outbuf) = rtpmpapay->first_ts;
+ GST_BUFFER_DURATION (outbuf) = rtpmpapay->duration;
+ gst_buffer_list_add (list, outbuf);
+ }
+
+ ret = gst_rtp_base_payload_push_list (GST_RTP_BASE_PAYLOAD (rtpmpapay), list);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_rtp_mpa_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRtpMPAPay *rtpmpapay;
+ GstFlowReturn ret;
+ guint size, avail;
+ guint packet_len;
+ GstClockTime duration, timestamp;
+
+ rtpmpapay = GST_RTP_MPA_PAY (basepayload);
+
+ size = gst_buffer_get_size (buffer);
+ duration = GST_BUFFER_DURATION (buffer);
+ timestamp = GST_BUFFER_PTS (buffer);
+
+ if (GST_BUFFER_IS_DISCONT (buffer)) {
+ GST_DEBUG_OBJECT (rtpmpapay, "DISCONT");
+ gst_rtp_mpa_pay_reset (rtpmpapay);
+ }
+
+ avail = gst_adapter_available (rtpmpapay->adapter);
+
+ /* get packet length of previous data and this new data,
+ * payload length includes a 4 byte header */
+ packet_len = gst_rtp_buffer_calc_packet_len (4 + avail + size, 0, 0);
+
+ /* if this buffer is going to overflow the packet, flush what we
+ * have. */
+ if (gst_rtp_base_payload_is_filled (basepayload,
+ packet_len, rtpmpapay->duration + duration)) {
+ ret = gst_rtp_mpa_pay_flush (rtpmpapay);
+ avail = 0;
+ } else {
+ ret = GST_FLOW_OK;
+ }
+
+ if (avail == 0) {
+ GST_DEBUG_OBJECT (rtpmpapay,
+ "first packet, save timestamp %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+ rtpmpapay->first_ts = timestamp;
+ rtpmpapay->duration = 0;
+ }
+
+ gst_adapter_push (rtpmpapay->adapter, buffer);
+ rtpmpapay->duration = duration;
+
+ return ret;
+}
+
+static GstStateChangeReturn
+gst_rtp_mpa_pay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstRtpMPAPay *rtpmpapay;
+ GstStateChangeReturn ret;
+
+ rtpmpapay = GST_RTP_MPA_PAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_rtp_mpa_pay_reset (rtpmpapay);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_mpa_pay_reset (rtpmpapay);
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtpmpapay.h b/gst/rtp/gstrtpmpapay.h
new file mode 100644
index 0000000000..759ce87e81
--- /dev/null
+++ b/gst/rtp/gstrtpmpapay.h
@@ -0,0 +1,61 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_MPA_PAY_H__
+#define __GST_RTP_MPA_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+#include <gst/base/gstadapter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_MPA_PAY \
+ (gst_rtp_mpa_pay_get_type())
+#define GST_RTP_MPA_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_MPA_PAY,GstRtpMPAPay))
+#define GST_RTP_MPA_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_MPA_PAY,GstRtpMPAPayClass))
+#define GST_IS_RTP_MPA_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_MPA_PAY))
+#define GST_IS_RTP_MPA_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_MPA_PAY))
+
+typedef struct _GstRtpMPAPay GstRtpMPAPay;
+typedef struct _GstRtpMPAPayClass GstRtpMPAPayClass;
+
+struct _GstRtpMPAPay
+{
+ GstRTPBasePayload payload;
+
+ GstAdapter *adapter;
+ GstClockTime first_ts;
+ GstClockTime duration;
+};
+
+struct _GstRtpMPAPayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_mpa_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_MPA_PAY_H__ */
diff --git a/gst/rtp/gstrtpmparobustdepay.c b/gst/rtp/gstrtpmparobustdepay.c
new file mode 100644
index 0000000000..ca7f1f19a3
--- /dev/null
+++ b/gst/rtp/gstrtpmparobustdepay.c
@@ -0,0 +1,808 @@
+/* GStreamer
+ * Copyright (C) <2010> Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>
+ * Copyright (C) <2010> Nokia Corporation
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include <stdio.h>
+#include <string.h>
+#include "gstrtpelements.h"
+#include "gstrtpmparobustdepay.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpmparobustdepay_debug);
+#define GST_CAT_DEFAULT (rtpmparobustdepay_debug)
+
+static GstStaticPadTemplate gst_rtp_mpa_robust_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/mpeg, " "mpegversion = (int) 1")
+ );
+
+static GstStaticPadTemplate gst_rtp_mpa_robust_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "clock-rate = (int) 90000, "
+ "encoding-name = (string) \"MPA-ROBUST\" " "; "
+ /* draft versions appear still in use out there */
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "clock-rate = (int) [1, MAX], "
+ "encoding-name = (string) { \"X-MP3-DRAFT-00\", \"X-MP3-DRAFT-01\", "
+ " \"X-MP3-DRAFT-02\", \"X-MP3-DRAFT-03\", \"X-MP3-DRAFT-04\", "
+ " \"X-MP3-DRAFT-05\", \"X-MP3-DRAFT-06\" }")
+ );
+
+typedef struct _GstADUFrame
+{
+ guint32 header;
+ gint size;
+ gint side_info;
+ gint data_size;
+ gint layer;
+ gint backpointer;
+
+ GstBuffer *buffer;
+} GstADUFrame;
+
+#define gst_rtp_mpa_robust_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpMPARobustDepay, gst_rtp_mpa_robust_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmparobustdepay, "rtpmparobustdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_MPA_ROBUST_DEPAY,
+ rtp_element_init (plugin));
+
+static GstStateChangeReturn gst_rtp_mpa_robust_change_state (GstElement *
+ element, GstStateChange transition);
+
+static gboolean gst_rtp_mpa_robust_depay_setcaps (GstRTPBaseDepayload *
+ depayload, GstCaps * caps);
+static GstBuffer *gst_rtp_mpa_robust_depay_process (GstRTPBaseDepayload *
+ depayload, GstRTPBuffer * rtp);
+
+static void
+gst_rtp_mpa_robust_depay_finalize (GObject * object)
+{
+ GstRtpMPARobustDepay *rtpmpadepay;
+
+ rtpmpadepay = (GstRtpMPARobustDepay *) object;
+
+ g_object_unref (rtpmpadepay->adapter);
+ g_queue_free (rtpmpadepay->adu_frames);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_rtp_mpa_robust_depay_class_init (GstRtpMPARobustDepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ GST_DEBUG_CATEGORY_INIT (rtpmparobustdepay_debug, "rtpmparobustdepay", 0,
+ "Robust MPEG Audio RTP Depayloader");
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_mpa_robust_depay_finalize;
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_rtp_mpa_robust_change_state);
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mpa_robust_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mpa_robust_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP MPEG audio depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts MPEG audio from RTP packets (RFC 5219)",
+ "Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
+
+ gstrtpbasedepayload_class->set_caps = gst_rtp_mpa_robust_depay_setcaps;
+ gstrtpbasedepayload_class->process_rtp_packet =
+ gst_rtp_mpa_robust_depay_process;
+}
+
+static void
+gst_rtp_mpa_robust_depay_init (GstRtpMPARobustDepay * rtpmpadepay)
+{
+ rtpmpadepay->adapter = gst_adapter_new ();
+ rtpmpadepay->adu_frames = g_queue_new ();
+}
+
+static gboolean
+gst_rtp_mpa_robust_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps)
+{
+ GstRtpMPARobustDepay *rtpmpadepay;
+ GstStructure *structure;
+ GstCaps *outcaps;
+ gint clock_rate, draft;
+ gboolean res;
+ const gchar *encoding;
+
+ rtpmpadepay = GST_RTP_MPA_ROBUST_DEPAY (depayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 90000;
+ depayload->clock_rate = clock_rate;
+
+ rtpmpadepay->has_descriptor = TRUE;
+ if ((encoding = gst_structure_get_string (structure, "encoding-name"))) {
+ if (sscanf (encoding, "X-MP3-DRAFT-%d", &draft) && (draft == 0))
+ rtpmpadepay->has_descriptor = FALSE;
+ }
+
+ outcaps =
+ gst_caps_new_simple ("audio/mpeg", "mpegversion", G_TYPE_INT, 1, NULL);
+ res = gst_pad_set_caps (depayload->srcpad, outcaps);
+ gst_caps_unref (outcaps);
+
+ return res;
+}
+
+/* thanks again go to mp3parse ... */
+
+static const guint mp3types_bitrates[2][3][16] = {
+ {
+ {0, 32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448,},
+ {0, 32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384,},
+ {0, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320,}
+ },
+ {
+ {0, 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256,},
+ {0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160,},
+ {0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160,}
+ },
+};
+
+static const guint mp3types_freqs[3][3] = { {44100, 48000, 32000},
+{22050, 24000, 16000},
+{11025, 12000, 8000}
+};
+
+static inline guint
+mp3_type_frame_length_from_header (GstElement * mp3parse, guint32 header,
+ guint * put_version, guint * put_layer, guint * put_channels,
+ guint * put_bitrate, guint * put_samplerate, guint * put_mode,
+ guint * put_crc)
+{
+ guint length;
+ gulong mode, samplerate, bitrate, layer, channels, padding, crc;
+ gulong version;
+ gint lsf, mpg25;
+
+ if (header & (1 << 20)) {
+ lsf = (header & (1 << 19)) ? 0 : 1;
+ mpg25 = 0;
+ } else {
+ lsf = 1;
+ mpg25 = 1;
+ }
+
+ version = 1 + lsf + mpg25;
+
+ layer = 4 - ((header >> 17) & 0x3);
+
+ crc = (header >> 16) & 0x1;
+
+ bitrate = (header >> 12) & 0xF;
+ bitrate = mp3types_bitrates[lsf][layer - 1][bitrate] * 1000;
+ /* The caller has ensured we have a valid header, so bitrate can't be
+ zero here. */
+ if (bitrate == 0) {
+ GST_DEBUG_OBJECT (mp3parse, "invalid bitrate");
+ return 0;
+ }
+
+ samplerate = (header >> 10) & 0x3;
+ samplerate = mp3types_freqs[lsf + mpg25][samplerate];
+
+ padding = (header >> 9) & 0x1;
+
+ mode = (header >> 6) & 0x3;
+ channels = (mode == 3) ? 1 : 2;
+
+ switch (layer) {
+ case 1:
+ length = 4 * ((bitrate * 12) / samplerate + padding);
+ break;
+ case 2:
+ length = (bitrate * 144) / samplerate + padding;
+ break;
+ default:
+ case 3:
+ length = (bitrate * 144) / (samplerate << lsf) + padding;
+ break;
+ }
+
+ GST_LOG_OBJECT (mp3parse, "Calculated mp3 frame length of %u bytes", length);
+ GST_LOG_OBJECT (mp3parse, "samplerate = %lu, bitrate = %lu, version = %lu, "
+ "layer = %lu, channels = %lu, mode = %lu", samplerate, bitrate, version,
+ layer, channels, mode);
+
+ if (put_version)
+ *put_version = version;
+ if (put_layer)
+ *put_layer = layer;
+ if (put_channels)
+ *put_channels = channels;
+ if (put_bitrate)
+ *put_bitrate = bitrate;
+ if (put_samplerate)
+ *put_samplerate = samplerate;
+ if (put_mode)
+ *put_mode = mode;
+ if (put_crc)
+ *put_crc = crc;
+
+ GST_LOG_OBJECT (mp3parse, "size = %u", length);
+ return length;
+}
+
+/* generate empty/silent/dummy frame that mimics @frame,
+ * except for rate, where maximum possible is selected */
+static GstADUFrame *
+gst_rtp_mpa_robust_depay_generate_dummy_frame (GstRtpMPARobustDepay *
+ rtpmpadepay, GstADUFrame * frame)
+{
+ GstADUFrame *dummy;
+ GstMapInfo map;
+
+ dummy = g_slice_dup (GstADUFrame, frame);
+
+ /* go for maximum bitrate */
+ dummy->header = (frame->header & ~(0xf << 12)) | (0xe << 12);
+ dummy->size =
+ mp3_type_frame_length_from_header (GST_ELEMENT_CAST (rtpmpadepay),
+ dummy->header, NULL, NULL, NULL, NULL, NULL, NULL, NULL);
+ dummy->data_size = dummy->size - 4 - dummy->side_info;
+ dummy->backpointer = 0;
+
+ dummy->buffer = gst_buffer_new_and_alloc (dummy->side_info + 4);
+
+ gst_buffer_map (dummy->buffer, &map, GST_MAP_WRITE);
+ memset (map.data, 0, map.size);
+ GST_WRITE_UINT32_BE (map.data, dummy->header);
+ gst_buffer_unmap (dummy->buffer, &map);
+
+ GST_BUFFER_PTS (dummy->buffer) = GST_BUFFER_PTS (frame->buffer);
+
+ return dummy;
+}
+
+/* validates and parses @buf, and queues for further transformation if valid,
+ * otherwise discards @buf
+ * Takes ownership of @buf. */
+static gboolean
+gst_rtp_mpa_robust_depay_queue_frame (GstRtpMPARobustDepay * rtpmpadepay,
+ GstBuffer * buf)
+{
+ GstADUFrame *frame = NULL;
+ guint version, layer, channels, size;
+ guint crc;
+ GstMapInfo map;
+
+ g_return_val_if_fail (buf != NULL, FALSE);
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+
+ if (map.size < 6)
+ goto corrupt_frame;
+
+ frame = g_slice_new0 (GstADUFrame);
+ frame->header = GST_READ_UINT32_BE (map.data);
+
+ size = mp3_type_frame_length_from_header (GST_ELEMENT_CAST (rtpmpadepay),
+ frame->header, &version, &layer, &channels, NULL, NULL, NULL, &crc);
+ if (!size)
+ goto corrupt_frame;
+
+ frame->size = size;
+ frame->layer = layer;
+ if (version == 1 && channels == 2)
+ frame->side_info = 32;
+ else if ((version == 1 && channels == 1) || (version >= 2 && channels == 2))
+ frame->side_info = 17;
+ else if (version >= 2 && channels == 1)
+ frame->side_info = 9;
+ else {
+ g_assert_not_reached ();
+ goto corrupt_frame;
+ }
+
+ /* backpointer */
+ if (layer == 3) {
+ frame->backpointer = GST_READ_UINT16_BE (map.data + 4);
+ frame->backpointer >>= 7;
+ GST_LOG_OBJECT (rtpmpadepay, "backpointer: %d", frame->backpointer);
+ }
+
+ if (!crc)
+ frame->side_info += 2;
+
+ GST_LOG_OBJECT (rtpmpadepay, "side info: %d", frame->side_info);
+ frame->data_size = frame->size - 4 - frame->side_info;
+
+ /* some size validation checks */
+ if (4 + frame->side_info > map.size)
+ goto corrupt_frame;
+
+ /* ADU data would then extend past MP3 frame,
+ * even using past byte reservoir */
+ if (-frame->backpointer + (gint) (map.size) > frame->size)
+ goto corrupt_frame;
+
+ gst_buffer_unmap (buf, &map);
+
+ /* ok, take buffer and queue */
+ frame->buffer = buf;
+ g_queue_push_tail (rtpmpadepay->adu_frames, frame);
+
+ return TRUE;
+
+ /* ERRORS */
+corrupt_frame:
+ {
+ GST_DEBUG_OBJECT (rtpmpadepay, "frame is corrupt");
+ gst_buffer_unmap (buf, &map);
+ gst_buffer_unref (buf);
+ if (frame)
+ g_slice_free (GstADUFrame, frame);
+ return FALSE;
+ }
+}
+
+static inline void
+gst_rtp_mpa_robust_depay_free_frame (GstADUFrame * frame)
+{
+ if (frame->buffer)
+ gst_buffer_unref (frame->buffer);
+ g_slice_free (GstADUFrame, frame);
+}
+
+static inline void
+gst_rtp_mpa_robust_depay_dequeue_frame (GstRtpMPARobustDepay * rtpmpadepay)
+{
+ GstADUFrame *head;
+
+ GST_LOG_OBJECT (rtpmpadepay, "dequeueing ADU frame");
+
+ if (rtpmpadepay->adu_frames->head == rtpmpadepay->cur_adu_frame)
+ rtpmpadepay->cur_adu_frame = NULL;
+
+ head = g_queue_pop_head (rtpmpadepay->adu_frames);
+ g_assert (head->buffer);
+ gst_rtp_mpa_robust_depay_free_frame (head);
+
+ return;
+}
+
+/* returns TRUE if at least one new ADU frame was enqueued for MP3 conversion.
+ * Takes ownership of @buf. */
+static gboolean
+gst_rtp_mpa_robust_depay_deinterleave (GstRtpMPARobustDepay * rtpmpadepay,
+ GstBuffer * buf)
+{
+ gboolean ret = FALSE;
+ GstMapInfo map;
+ guint val, iindex, icc;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ val = GST_READ_UINT16_BE (map.data) >> 5;
+ gst_buffer_unmap (buf, &map);
+
+ iindex = val >> 3;
+ icc = val & 0x7;
+
+ GST_LOG_OBJECT (rtpmpadepay, "sync: 0x%x, index: %u, cycle count: %u",
+ val, iindex, icc);
+
+ /* basic case; no interleaving ever seen */
+ if (val == 0x7ff && rtpmpadepay->last_icc < 0) {
+ ret = gst_rtp_mpa_robust_depay_queue_frame (rtpmpadepay, buf);
+ } else {
+ if (G_UNLIKELY (rtpmpadepay->last_icc < 0)) {
+ rtpmpadepay->last_icc = icc;
+ rtpmpadepay->last_ii = iindex;
+ }
+ if (icc != rtpmpadepay->last_icc || iindex == rtpmpadepay->last_ii) {
+ gint i;
+
+ for (i = 0; i < 256; ++i) {
+ if (rtpmpadepay->deinter[i] != NULL) {
+ ret |= gst_rtp_mpa_robust_depay_queue_frame (rtpmpadepay,
+ rtpmpadepay->deinter[i]);
+ rtpmpadepay->deinter[i] = NULL;
+ }
+ }
+ }
+ /* rewrite buffer sync header */
+ gst_buffer_map (buf, &map, GST_MAP_READWRITE);
+ val = GST_READ_UINT16_BE (map.data);
+ val = (0x7ff << 5) | val;
+ GST_WRITE_UINT16_BE (map.data, val);
+ gst_buffer_unmap (buf, &map);
+ /* store and keep track of last indices */
+ rtpmpadepay->last_icc = icc;
+ rtpmpadepay->last_ii = iindex;
+ rtpmpadepay->deinter[iindex] = buf;
+ }
+
+ return ret;
+}
+
+/* Head ADU frame corresponds to mp3_frame (i.e. in header in side-info) that
+ * is currently being written
+ * cur_adu_frame refers to ADU frame whose data should be bytewritten next
+ * (possibly starting from offset rather than start 0) (and is typicall tail
+ * at time of last push round).
+ * If at start, position where it should start writing depends on (data) sizes
+ * of previous mp3 frames (corresponding to foregoing ADU frames) kept in size,
+ * and its backpointer */
+static GstFlowReturn
+gst_rtp_mpa_robust_depay_push_mp3_frames (GstRtpMPARobustDepay * rtpmpadepay)
+{
+ GstBuffer *buf;
+ GstADUFrame *frame, *head;
+ gint av;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ while (1) {
+ GstMapInfo map;
+
+ if (G_UNLIKELY (!rtpmpadepay->cur_adu_frame)) {
+ rtpmpadepay->cur_adu_frame = rtpmpadepay->adu_frames->head;
+ rtpmpadepay->offset = 0;
+ rtpmpadepay->size = 0;
+ }
+
+ if (G_UNLIKELY (!rtpmpadepay->cur_adu_frame))
+ break;
+
+ frame = (GstADUFrame *) rtpmpadepay->cur_adu_frame->data;
+ head = (GstADUFrame *) rtpmpadepay->adu_frames->head->data;
+
+ /* special case: non-layer III are sent straight through */
+ if (G_UNLIKELY (frame->layer != 3)) {
+ GST_DEBUG_OBJECT (rtpmpadepay, "layer %d frame, sending as-is",
+ frame->layer);
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtpmpadepay),
+ frame->buffer);
+ frame->buffer = NULL;
+ /* and remove it from any further consideration */
+ g_slice_free (GstADUFrame, frame);
+ g_queue_delete_link (rtpmpadepay->adu_frames, rtpmpadepay->cur_adu_frame);
+ rtpmpadepay->cur_adu_frame = NULL;
+ continue;
+ }
+
+ if (rtpmpadepay->offset == gst_buffer_get_size (frame->buffer)) {
+ if (g_list_next (rtpmpadepay->cur_adu_frame)) {
+ rtpmpadepay->size += frame->data_size;
+ rtpmpadepay->cur_adu_frame = g_list_next (rtpmpadepay->cur_adu_frame);
+ frame = (GstADUFrame *) rtpmpadepay->cur_adu_frame->data;
+ rtpmpadepay->offset = 0;
+ GST_LOG_OBJECT (rtpmpadepay,
+ "moving to next ADU frame, size %d, side_info %d, backpointer %d",
+ frame->size, frame->side_info, frame->backpointer);
+ /* layer I and II packets have no bitreservoir and must be sent as-is;
+ * so flush any pending frame */
+ if (G_UNLIKELY (frame->layer != 3 && rtpmpadepay->mp3_frame))
+ goto flush;
+ } else {
+ break;
+ }
+ }
+
+ if (G_UNLIKELY (!rtpmpadepay->mp3_frame)) {
+ GST_LOG_OBJECT (rtpmpadepay,
+ "setting up new MP3 frame of size %d, side_info %d",
+ head->size, head->side_info);
+ rtpmpadepay->mp3_frame = gst_byte_writer_new_with_size (head->size, TRUE);
+ /* 0-fill possible gaps */
+ gst_byte_writer_fill_unchecked (rtpmpadepay->mp3_frame, 0, head->size);
+ gst_byte_writer_set_pos (rtpmpadepay->mp3_frame, 0);
+ /* bytewriter corresponds to head frame,
+ * i.e. the header and the side info must match */
+ g_assert (4 + head->side_info <= head->size);
+ gst_buffer_map (head->buffer, &map, GST_MAP_READ);
+ gst_byte_writer_put_data_unchecked (rtpmpadepay->mp3_frame,
+ map.data, 4 + head->side_info);
+ gst_buffer_unmap (head->buffer, &map);
+ }
+
+ buf = frame->buffer;
+ av = gst_byte_writer_get_remaining (rtpmpadepay->mp3_frame);
+ GST_LOG_OBJECT (rtpmpadepay, "current mp3 frame remaining: %d", av);
+ GST_LOG_OBJECT (rtpmpadepay, "accumulated ADU frame data_size: %d",
+ rtpmpadepay->size);
+
+ if (rtpmpadepay->offset) {
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ /* no need to position, simply append */
+ g_assert (map.size > rtpmpadepay->offset);
+ av = MIN (av, map.size - rtpmpadepay->offset);
+ GST_LOG_OBJECT (rtpmpadepay,
+ "appending %d bytes from ADU frame at offset %d", av,
+ rtpmpadepay->offset);
+ gst_byte_writer_put_data_unchecked (rtpmpadepay->mp3_frame,
+ map.data + rtpmpadepay->offset, av);
+ rtpmpadepay->offset += av;
+ gst_buffer_unmap (buf, &map);
+ } else {
+ gint pos, tpos;
+
+ /* position writing according to ADU frame backpointer */
+ pos = gst_byte_writer_get_pos (rtpmpadepay->mp3_frame);
+ tpos = rtpmpadepay->size - frame->backpointer + 4 + head->side_info;
+ GST_LOG_OBJECT (rtpmpadepay, "current MP3 frame at position %d, "
+ "starting new ADU frame data at offset %d", pos, tpos);
+ if (tpos < pos) {
+ GstADUFrame *dummy;
+
+ /* try to insert as few frames as possible,
+ * so go for a reasonably large dummy frame size */
+ GST_LOG_OBJECT (rtpmpadepay,
+ "overlapping previous data; inserting dummy frame");
+ dummy =
+ gst_rtp_mpa_robust_depay_generate_dummy_frame (rtpmpadepay, frame);
+ g_queue_insert_before (rtpmpadepay->adu_frames,
+ rtpmpadepay->cur_adu_frame, dummy);
+ /* offset is known to be zero, so we can shift current one */
+ rtpmpadepay->cur_adu_frame = rtpmpadepay->cur_adu_frame->prev;
+ if (!rtpmpadepay->size) {
+ g_assert (rtpmpadepay->cur_adu_frame ==
+ rtpmpadepay->adu_frames->head);
+ GST_LOG_OBJECT (rtpmpadepay, "... which is new head frame");
+ gst_byte_writer_free (rtpmpadepay->mp3_frame);
+ rtpmpadepay->mp3_frame = NULL;
+ }
+ /* ... and continue adding that empty one immediately,
+ * and then see if that provided enough extra space */
+ continue;
+ } else if (tpos >= pos + av) {
+ /* ADU frame no longer needs current MP3 frame; move to its end */
+ GST_LOG_OBJECT (rtpmpadepay, "passed current MP3 frame");
+ gst_byte_writer_set_pos (rtpmpadepay->mp3_frame, pos + av);
+ } else {
+ /* position and append */
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ GST_LOG_OBJECT (rtpmpadepay, "adding to current MP3 frame");
+ gst_byte_writer_set_pos (rtpmpadepay->mp3_frame, tpos);
+ av -= (tpos - pos);
+ g_assert (map.size >= 4 + frame->side_info);
+ av = MIN (av, map.size - 4 - frame->side_info);
+ gst_byte_writer_put_data_unchecked (rtpmpadepay->mp3_frame,
+ map.data + 4 + frame->side_info, av);
+ rtpmpadepay->offset += av + 4 + frame->side_info;
+ gst_buffer_unmap (buf, &map);
+ }
+ }
+
+ /* if mp3 frame filled, send on its way */
+ if (gst_byte_writer_get_remaining (rtpmpadepay->mp3_frame) == 0) {
+ flush:
+ buf = gst_byte_writer_free_and_get_buffer (rtpmpadepay->mp3_frame);
+ rtpmpadepay->mp3_frame = NULL;
+ GST_BUFFER_PTS (buf) = GST_BUFFER_PTS (head->buffer);
+ /* no longer need head ADU frame header and side info */
+ /* NOTE maybe head == current, then size and offset go off a bit,
+ * but current gets reset to NULL, and then also offset and size */
+ rtpmpadepay->size -= head->data_size;
+ gst_rtp_mpa_robust_depay_dequeue_frame (rtpmpadepay);
+ /* send */
+ ret = gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtpmpadepay),
+ buf);
+ }
+ }
+
+ return ret;
+}
+
+/* process ADU frame @buf through:
+ * - deinterleaving
+ * - converting to MP3 frames
+ * Takes ownership of @buf.
+ */
+static GstFlowReturn
+gst_rtp_mpa_robust_depay_submit_adu (GstRtpMPARobustDepay * rtpmpadepay,
+ GstBuffer * buf)
+{
+ if (gst_rtp_mpa_robust_depay_deinterleave (rtpmpadepay, buf))
+ return gst_rtp_mpa_robust_depay_push_mp3_frames (rtpmpadepay);
+
+ return GST_FLOW_OK;
+}
+
+static GstBuffer *
+gst_rtp_mpa_robust_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp)
+{
+ GstRtpMPARobustDepay *rtpmpadepay;
+ gint payload_len, offset;
+ guint8 *payload;
+ gboolean cont, dtype;
+ guint av, size;
+ GstClockTime timestamp;
+ GstBuffer *buf;
+
+ rtpmpadepay = GST_RTP_MPA_ROBUST_DEPAY (depayload);
+
+ timestamp = GST_BUFFER_PTS (rtp->buffer);
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+ if (payload_len <= 1)
+ goto short_read;
+
+ payload = gst_rtp_buffer_get_payload (rtp);
+ offset = 0;
+ GST_LOG_OBJECT (rtpmpadepay, "payload_len: %d", payload_len);
+
+ /* strip off descriptor
+ *
+ * 0 1
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |C|T| ADU size |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ *
+ * C: if 1, data is continuation
+ * T: if 1, size is 14 bits, otherwise 6 bits
+ * ADU size: size of following packet (not including descriptor)
+ */
+ while (payload_len) {
+ if (G_LIKELY (rtpmpadepay->has_descriptor)) {
+ cont = ! !(payload[offset] & 0x80);
+ dtype = ! !(payload[offset] & 0x40);
+ if (dtype) {
+ size = (payload[offset] & 0x3f) << 8 | payload[offset + 1];
+ payload_len--;
+ offset++;
+ } else if (payload_len >= 2) {
+ size = (payload[offset] & 0x3f);
+ payload_len -= 2;
+ offset += 2;
+ } else {
+ goto short_read;
+ }
+ } else {
+ cont = FALSE;
+ dtype = -1;
+ size = payload_len;
+ }
+
+ GST_LOG_OBJECT (rtpmpadepay, "offset %d has cont: %d, dtype: %d, size: %d",
+ offset, cont, dtype, size);
+
+ buf = gst_rtp_buffer_get_payload_subbuffer (rtp, offset,
+ MIN (size, payload_len));
+
+ if (cont) {
+ av = gst_adapter_available (rtpmpadepay->adapter);
+ if (G_UNLIKELY (!av)) {
+ GST_DEBUG_OBJECT (rtpmpadepay,
+ "discarding continuation fragment without prior fragment");
+ gst_buffer_unref (buf);
+ } else {
+ av += gst_buffer_get_size (buf);
+ gst_adapter_push (rtpmpadepay->adapter, buf);
+ if (av == size) {
+ timestamp = gst_adapter_prev_pts (rtpmpadepay->adapter, NULL);
+ buf = gst_adapter_take_buffer (rtpmpadepay->adapter, size);
+ GST_BUFFER_PTS (buf) = timestamp;
+ gst_rtp_mpa_robust_depay_submit_adu (rtpmpadepay, buf);
+ } else if (av > size) {
+ GST_DEBUG_OBJECT (rtpmpadepay,
+ "assembled ADU size %d larger than expected %d; discarding",
+ av, size);
+ gst_adapter_clear (rtpmpadepay->adapter);
+ }
+ }
+ size = payload_len;
+ } else {
+ /* not continuation, first fragment or whole ADU */
+ if (payload_len == size) {
+ /* whole ADU */
+ GST_BUFFER_PTS (buf) = timestamp;
+ gst_rtp_mpa_robust_depay_submit_adu (rtpmpadepay, buf);
+ } else if (payload_len < size) {
+ /* first fragment */
+ gst_adapter_push (rtpmpadepay->adapter, buf);
+ size = payload_len;
+ }
+ }
+
+ offset += size;
+ payload_len -= size;
+
+ /* timestamp applies to first payload, no idea for subsequent ones */
+ timestamp = GST_CLOCK_TIME_NONE;
+ }
+
+ return NULL;
+
+ /* ERRORS */
+short_read:
+ {
+ GST_ELEMENT_WARNING (rtpmpadepay, STREAM, DECODE,
+ (NULL), ("Packet contains invalid data"));
+ return NULL;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_mpa_robust_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstRtpMPARobustDepay *rtpmpadepay;
+
+ rtpmpadepay = GST_RTP_MPA_ROBUST_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ rtpmpadepay->last_ii = -1;
+ rtpmpadepay->last_icc = -1;
+ rtpmpadepay->size = 0;
+ rtpmpadepay->offset = 0;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret != GST_STATE_CHANGE_SUCCESS)
+ return ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ {
+ gint i;
+
+ gst_adapter_clear (rtpmpadepay->adapter);
+ for (i = 0; i < G_N_ELEMENTS (rtpmpadepay->deinter); i++) {
+ gst_buffer_replace (&rtpmpadepay->deinter[i], NULL);
+ }
+ rtpmpadepay->cur_adu_frame = NULL;
+ g_queue_foreach (rtpmpadepay->adu_frames,
+ (GFunc) gst_rtp_mpa_robust_depay_free_frame, NULL);
+ g_queue_clear (rtpmpadepay->adu_frames);
+ if (rtpmpadepay->mp3_frame)
+ gst_byte_writer_free (rtpmpadepay->mp3_frame);
+ break;
+ }
+ default:
+ break;
+ }
+
+ return ret;
+}
diff --git a/gst/rtp/gstrtpmparobustdepay.h b/gst/rtp/gstrtpmparobustdepay.h
new file mode 100644
index 0000000000..8a3f51bc5f
--- /dev/null
+++ b/gst/rtp/gstrtpmparobustdepay.h
@@ -0,0 +1,76 @@
+/* GStreamer
+ * Copyright (C) <2010> Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>
+ * Copyright (C) <2010> Nokia Corporation
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_MPA_ROBUST_DEPAY_H__
+#define __GST_RTP_MPA_ROBUST_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+#include <gst/base/gstadapter.h>
+#include <gst/base/gstbytewriter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_MPA_ROBUST_DEPAY \
+ (gst_rtp_mpa_robust_depay_get_type())
+#define GST_RTP_MPA_ROBUST_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_MPA_ROBUST_DEPAY,GstRtpMPARobustDepay))
+#define GST_RTP_MPA_ROBUST_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_MPA_ROBUST_DEPAY,GstRtpMPARobustDepayClass))
+#define GST_IS_RTP_MPA_ROBUST_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_MPA_ROBUST_DEPAY))
+#define GST_IS_RTP_MPA_ROBUST_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_MPA_ROBUST_DEPAY))
+
+typedef struct _GstRtpMPARobustDepay GstRtpMPARobustDepay;
+typedef struct _GstRtpMPARobustDepayClass GstRtpMPARobustDepayClass;
+
+struct _GstRtpMPARobustDepay
+{
+ GstRTPBaseDepayload depayload;
+
+ GstAdapter *adapter;
+ gboolean has_descriptor;
+
+ /* last interleave index */
+ gint last_ii;
+ /* last interleave cycle count */
+ gint last_icc;
+ /* buffers pending deinterleaving */
+ GstBuffer *deinter[256];
+
+ /* ADU buffers pending MP3 transformation */
+ GQueue *adu_frames;
+ GList *cur_adu_frame;
+ gint offset;
+ gint size;
+ GstByteWriter *mp3_frame;
+};
+
+struct _GstRtpMPARobustDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_mpa_robust_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_MPA_ROBUST_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpmpvdepay.c b/gst/rtp/gstrtpmpvdepay.c
new file mode 100644
index 0000000000..71c0712514
--- /dev/null
+++ b/gst/rtp/gstrtpmpvdepay.c
@@ -0,0 +1,194 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+
+#include <string.h>
+#include "gstrtpelements.h"
+#include "gstrtpmpvdepay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpmpvdepay_debug);
+#define GST_CAT_DEFAULT (rtpmpvdepay_debug)
+
+/* FIXME, we set the mpeg version to 2, we should ideally be looking at contents
+ * of the stream to figure out the version */
+static GstStaticPadTemplate gst_rtp_mpv_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS
+ ("video/mpeg, mpegversion = (int) 2, systemstream = (boolean) FALSE")
+ );
+
+static GstStaticPadTemplate gst_rtp_mpv_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"MPV\";"
+ "application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_MPV_STRING ", "
+ "clock-rate = (int) 90000")
+ );
+
+G_DEFINE_TYPE (GstRtpMPVDepay, gst_rtp_mpv_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmpvdepay, "rtpmpvdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_MPV_DEPAY, rtp_element_init (plugin));
+
+static gboolean gst_rtp_mpv_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_mpv_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+
+static void
+gst_rtp_mpv_depay_class_init (GstRtpMPVDepayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mpv_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mpv_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP MPEG video depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts MPEG video from RTP packets (RFC 2250)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstrtpbasedepayload_class->set_caps = gst_rtp_mpv_depay_setcaps;
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_mpv_depay_process;
+
+ GST_DEBUG_CATEGORY_INIT (rtpmpvdepay_debug, "rtpmpvdepay", 0,
+ "MPEG Video RTP Depayloader");
+}
+
+static void
+gst_rtp_mpv_depay_init (GstRtpMPVDepay * rtpmpvdepay)
+{
+}
+
+static gboolean
+gst_rtp_mpv_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure;
+ gint clock_rate;
+ GstCaps *outcaps;
+ gboolean res;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 90000; /* default */
+ depayload->clock_rate = clock_rate;
+
+ outcaps = gst_caps_new_simple ("video/mpeg",
+ "mpegversion", G_TYPE_INT, 2,
+ "systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
+ res = gst_pad_set_caps (depayload->srcpad, outcaps);
+ gst_caps_unref (outcaps);
+
+ return res;
+}
+
+static GstBuffer *
+gst_rtp_mpv_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpMPVDepay *rtpmpvdepay;
+ GstBuffer *outbuf = NULL;
+
+ rtpmpvdepay = GST_RTP_MPV_DEPAY (depayload);
+
+ {
+ gint payload_len, payload_header;
+ guint8 *payload;
+ guint8 T;
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+ payload = gst_rtp_buffer_get_payload (rtp);
+ payload_header = 0;
+
+ if (payload_len <= 4)
+ goto empty_packet;
+
+ /* 3.4 MPEG Video-specific header
+ *
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | MBZ |T| TR | |N|S|B|E| P | | BFC | | FFC |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * AN FBV FFV
+ */
+ T = (payload[0] & 0x04);
+
+ payload_len -= 4;
+ payload_header += 4;
+ payload += 4;
+
+ if (T) {
+ /*
+ * 3.4.1 MPEG-2 Video-specific header extension
+ *
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |X|E|f_[0,0]|f_[0,1]|f_[1,0]|f_[1,1]| DC| PS|T|P|C|Q|V|A|R|H|G|D|
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ if (payload_len <= 4)
+ goto empty_packet;
+
+ payload_len -= 4;
+ payload_header += 4;
+ payload += 4;
+ }
+
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (rtp, payload_header, -1);
+
+ if (outbuf) {
+ GST_DEBUG_OBJECT (rtpmpvdepay,
+ "gst_rtp_mpv_depay_chain: pushing buffer of size %" G_GSIZE_FORMAT,
+ gst_buffer_get_size (outbuf));
+ gst_rtp_drop_non_video_meta (rtpmpvdepay, outbuf);
+
+ }
+ }
+
+ return outbuf;
+
+ /* ERRORS */
+empty_packet:
+ {
+ GST_ELEMENT_WARNING (rtpmpvdepay, STREAM, DECODE,
+ (NULL), ("Empty payload."));
+ return NULL;
+ }
+}
diff --git a/gst/rtp/gstrtpmpvdepay.h b/gst/rtp/gstrtpmpvdepay.h
new file mode 100644
index 0000000000..158c2dcd40
--- /dev/null
+++ b/gst/rtp/gstrtpmpvdepay.h
@@ -0,0 +1,56 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_MPV_DEPAY_H__
+#define __GST_RTP_MPV_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_MPV_DEPAY \
+ (gst_rtp_mpv_depay_get_type())
+#define GST_RTP_MPV_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_MPV_DEPAY,GstRtpMPVDepay))
+#define GST_RTP_MPV_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_MPV_DEPAY,GstRtpMPVDepayClass))
+#define GST_IS_RTP_MPV_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_MPV_DEPAY))
+#define GST_IS_RTP_MPV_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_MPV_DEPAY))
+
+typedef struct _GstRtpMPVDepay GstRtpMPVDepay;
+typedef struct _GstRtpMPVDepayClass GstRtpMPVDepayClass;
+
+struct _GstRtpMPVDepay
+{
+ GstRTPBaseDepayload depayload;
+};
+
+struct _GstRtpMPVDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_mpv_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_MPV_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpmpvpay.c b/gst/rtp/gstrtpmpvpay.c
new file mode 100644
index 0000000000..ef8b4a654a
--- /dev/null
+++ b/gst/rtp/gstrtpmpvpay.c
@@ -0,0 +1,332 @@
+/* GStreamer
+ * Copyright (C) <2007> Thijs Vermeir <thijsvermeir@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <stdlib.h>
+#include <string.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpmpvpay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpmpvpay_debug);
+#define GST_CAT_DEFAULT (rtpmpvpay_debug)
+
+static GstStaticPadTemplate gst_rtp_mpv_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/mpeg, "
+ "mpegversion = (int) 2, systemstream = (boolean) FALSE")
+ );
+
+static GstStaticPadTemplate gst_rtp_mpv_pay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_MPV_STRING ", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"MPV\"; "
+ "application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"MPV\"")
+ );
+
+static GstStateChangeReturn gst_rtp_mpv_pay_change_state (GstElement * element,
+ GstStateChange transition);
+
+static void gst_rtp_mpv_pay_finalize (GObject * object);
+
+static GstFlowReturn gst_rtp_mpv_pay_flush (GstRTPMPVPay * rtpmpvpay);
+static gboolean gst_rtp_mpv_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static GstFlowReturn gst_rtp_mpv_pay_handle_buffer (GstRTPBasePayload *
+ payload, GstBuffer * buffer);
+static gboolean gst_rtp_mpv_pay_sink_event (GstRTPBasePayload * payload,
+ GstEvent * event);
+
+#define gst_rtp_mpv_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRTPMPVPay, gst_rtp_mpv_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpmpvpay, "rtpmpvpay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_MPV_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_mpv_pay_class_init (GstRTPMPVPayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_mpv_pay_finalize;
+
+ gstelement_class->change_state = gst_rtp_mpv_pay_change_state;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mpv_pay_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_mpv_pay_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP MPEG2 ES video payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encodes MPEG2 ES into RTP packets (RFC 2250)",
+ "Thijs Vermeir <thijsvermeir@gmail.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_mpv_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_mpv_pay_handle_buffer;
+ gstrtpbasepayload_class->sink_event = gst_rtp_mpv_pay_sink_event;
+
+ GST_DEBUG_CATEGORY_INIT (rtpmpvpay_debug, "rtpmpvpay", 0,
+ "MPEG2 ES Video RTP Payloader");
+}
+
+static void
+gst_rtp_mpv_pay_init (GstRTPMPVPay * rtpmpvpay)
+{
+ GST_RTP_BASE_PAYLOAD (rtpmpvpay)->clock_rate = 90000;
+ GST_RTP_BASE_PAYLOAD_PT (rtpmpvpay) = GST_RTP_PAYLOAD_MPV;
+
+ rtpmpvpay->adapter = gst_adapter_new ();
+}
+
+static void
+gst_rtp_mpv_pay_finalize (GObject * object)
+{
+ GstRTPMPVPay *rtpmpvpay;
+
+ rtpmpvpay = GST_RTP_MPV_PAY (object);
+
+ g_object_unref (rtpmpvpay->adapter);
+ rtpmpvpay->adapter = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_rtp_mpv_pay_reset (GstRTPMPVPay * pay)
+{
+ pay->first_ts = -1;
+ pay->duration = 0;
+ gst_adapter_clear (pay->adapter);
+ GST_DEBUG_OBJECT (pay, "reset depayloader");
+}
+
+static gboolean
+gst_rtp_mpv_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ gst_rtp_base_payload_set_options (payload, "video",
+ payload->pt != GST_RTP_PAYLOAD_MPV, "MPV", 90000);
+ return gst_rtp_base_payload_set_outcaps (payload, NULL);
+}
+
+static gboolean
+gst_rtp_mpv_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
+{
+ gboolean ret;
+ GstRTPMPVPay *rtpmpvpay;
+
+ rtpmpvpay = GST_RTP_MPV_PAY (payload);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ /* make sure we push the last packets in the adapter on EOS */
+ gst_rtp_mpv_pay_flush (rtpmpvpay);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ gst_rtp_mpv_pay_reset (rtpmpvpay);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload, event);
+
+ return ret;
+}
+
+#define RTP_HEADER_LEN 12
+
+static GstFlowReturn
+gst_rtp_mpv_pay_flush (GstRTPMPVPay * rtpmpvpay)
+{
+ GstFlowReturn ret;
+ guint avail;
+ GstBufferList *list;
+ GstBuffer *outbuf;
+
+ guint8 *payload;
+
+ avail = gst_adapter_available (rtpmpvpay->adapter);
+
+ ret = GST_FLOW_OK;
+
+ GST_DEBUG_OBJECT (rtpmpvpay, "available %u", avail);
+ if (avail == 0)
+ return GST_FLOW_OK;
+
+ list =
+ gst_buffer_list_new_sized (avail / (GST_RTP_BASE_PAYLOAD_MTU (rtpmpvpay) -
+ RTP_HEADER_LEN) + 1);
+
+ while (avail > 0) {
+ guint towrite;
+ guint packet_len;
+ guint payload_len;
+ GstRTPBuffer rtp = { NULL };
+ GstBuffer *paybuf;
+
+ packet_len = gst_rtp_buffer_calc_packet_len (avail + 4, 0, 0);
+
+ towrite = MIN (packet_len, GST_RTP_BASE_PAYLOAD_MTU (rtpmpvpay));
+
+ payload_len = gst_rtp_buffer_calc_payload_len (towrite, 0, 0);
+
+ outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD
+ (rtpmpvpay), 4, 0, 0);
+
+ payload_len -= 4;
+
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ /* enable MPEG Video-specific header
+ *
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | MBZ |T| TR | |N|S|B|E| P | | BFC | | FFC |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * AN FBV FFV
+ */
+
+ /* fill in the MPEG Video-specific header
+ * data is set to 0x0 here
+ */
+ memset (payload, 0x0, 4);
+
+ avail -= payload_len;
+
+ gst_rtp_buffer_set_marker (&rtp, avail == 0);
+ gst_rtp_buffer_unmap (&rtp);
+
+ paybuf = gst_adapter_take_buffer_fast (rtpmpvpay->adapter, payload_len);
+ gst_rtp_copy_video_meta (rtpmpvpay, outbuf, paybuf);
+ outbuf = gst_buffer_append (outbuf, paybuf);
+
+ GST_DEBUG_OBJECT (rtpmpvpay, "Adding buffer");
+
+ GST_BUFFER_PTS (outbuf) = rtpmpvpay->first_ts;
+ gst_buffer_list_add (list, outbuf);
+ }
+
+ ret = gst_rtp_base_payload_push_list (GST_RTP_BASE_PAYLOAD (rtpmpvpay), list);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_rtp_mpv_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRTPMPVPay *rtpmpvpay;
+ guint avail, packet_len;
+ GstClockTime timestamp, duration;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ rtpmpvpay = GST_RTP_MPV_PAY (basepayload);
+
+ timestamp = GST_BUFFER_PTS (buffer);
+ duration = GST_BUFFER_DURATION (buffer);
+
+ if (GST_BUFFER_IS_DISCONT (buffer)) {
+ GST_DEBUG_OBJECT (rtpmpvpay, "DISCONT");
+ gst_rtp_mpv_pay_reset (rtpmpvpay);
+ }
+
+ avail = gst_adapter_available (rtpmpvpay->adapter);
+
+ if (duration == -1)
+ duration = 0;
+
+ if (rtpmpvpay->first_ts == GST_CLOCK_TIME_NONE || avail == 0)
+ rtpmpvpay->first_ts = timestamp;
+
+ if (avail == 0) {
+ rtpmpvpay->duration = duration;
+ } else {
+ rtpmpvpay->duration += duration;
+ }
+
+ gst_adapter_push (rtpmpvpay->adapter, buffer);
+ avail = gst_adapter_available (rtpmpvpay->adapter);
+
+ /* get packet length of previous data and this new data,
+ * payload length includes a 4 byte MPEG video-specific header */
+ packet_len = gst_rtp_buffer_calc_packet_len (avail, 4, 0);
+ GST_LOG_OBJECT (rtpmpvpay, "available %d, rtp packet length %d", avail,
+ packet_len);
+
+ if (gst_rtp_base_payload_is_filled (basepayload,
+ packet_len, rtpmpvpay->duration)) {
+ ret = gst_rtp_mpv_pay_flush (rtpmpvpay);
+ } else {
+ rtpmpvpay->first_ts = timestamp;
+ }
+
+ return ret;
+}
+
+static GstStateChangeReturn
+gst_rtp_mpv_pay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstRTPMPVPay *rtpmpvpay;
+ GstStateChangeReturn ret;
+
+ rtpmpvpay = GST_RTP_MPV_PAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_rtp_mpv_pay_reset (rtpmpvpay);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_mpv_pay_reset (rtpmpvpay);
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtpmpvpay.h b/gst/rtp/gstrtpmpvpay.h
new file mode 100644
index 0000000000..a909ce92a0
--- /dev/null
+++ b/gst/rtp/gstrtpmpvpay.h
@@ -0,0 +1,62 @@
+/* GStreamer
+ * Copyright (C) <2007> Thijs Vermeir <thijsvermeir@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_RTP_MPV_PAY_H__
+#define __GST_RTP_MPV_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+#include <gst/base/gstadapter.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstRTPMPVPay GstRTPMPVPay;
+typedef struct _GstRTPMPVPayClass GstRTPMPVPayClass;
+
+#define GST_TYPE_RTP_MPV_PAY \
+ (gst_rtp_mpv_pay_get_type())
+#define GST_RTP_MPV_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_MPV_PAY,GstRTPMPVPay))
+#define GST_RTP_MPV_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_MPV_PAY,GstRTPMPVPayClass))
+#define GST_IS_RTP_MPV_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_MPV_PAY))
+#define GST_IS_RTP_MPV_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_MPV_PAY))
+
+struct _GstRTPMPVPay
+{
+ GstRTPBasePayload payload;
+
+ GstAdapter *adapter;
+ GstClockTime first_ts;
+ GstClockTime duration;
+};
+
+struct _GstRTPMPVPayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_mpv_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_MPV_PAY_H__ */
diff --git a/gst/rtp/gstrtpopusdepay.c b/gst/rtp/gstrtpopusdepay.c
new file mode 100644
index 0000000000..26434e84f2
--- /dev/null
+++ b/gst/rtp/gstrtpopusdepay.c
@@ -0,0 +1,256 @@
+/*
+ * Opus Depayloader Gst Element
+ *
+ * @author: Danilo Cesar Lemes de Paula <danilo.cesar@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+#include <stdlib.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+#include "gstrtpelements.h"
+#include "gstrtpopusdepay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpopusdepay_debug);
+#define GST_CAT_DEFAULT (rtpopusdepay_debug)
+
+static GstStaticPadTemplate gst_rtp_opus_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ","
+ "clock-rate = (int) 48000, "
+ "encoding-name = (string) { \"OPUS\", \"X-GST-OPUS-DRAFT-SPITTKA-00\", \"multiopus\" }")
+ );
+
+static GstStaticPadTemplate gst_rtp_opus_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-opus, channel-mapping-family = (int) [ 0, 1 ]")
+ );
+
+static GstBuffer *gst_rtp_opus_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp_buffer);
+static gboolean gst_rtp_opus_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+
+G_DEFINE_TYPE (GstRTPOpusDepay, gst_rtp_opus_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpopusdepay, "rtpopusdepay",
+ GST_RANK_PRIMARY, GST_TYPE_RTP_OPUS_DEPAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_opus_depay_class_init (GstRTPOpusDepayClass * klass)
+{
+ GstRTPBaseDepayloadClass *gstbasertpdepayload_class;
+ GstElementClass *element_class;
+
+ element_class = GST_ELEMENT_CLASS (klass);
+ gstbasertpdepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_rtp_opus_depay_src_template);
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_rtp_opus_depay_sink_template);
+ gst_element_class_set_static_metadata (element_class,
+ "RTP Opus packet depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts Opus audio from RTP packets",
+ "Danilo Cesar Lemes de Paula <danilo.cesar@collabora.co.uk>");
+
+ gstbasertpdepayload_class->process_rtp_packet = gst_rtp_opus_depay_process;
+ gstbasertpdepayload_class->set_caps = gst_rtp_opus_depay_setcaps;
+
+ GST_DEBUG_CATEGORY_INIT (rtpopusdepay_debug, "rtpopusdepay", 0,
+ "Opus RTP Depayloader");
+}
+
+static void
+gst_rtp_opus_depay_init (GstRTPOpusDepay * rtpopusdepay)
+{
+
+}
+
+static gboolean
+gst_rtp_opus_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstCaps *srccaps;
+ GstStructure *s;
+ gboolean ret;
+ const gchar *sprop_maxcapturerate;
+
+ srccaps = gst_caps_new_empty_simple ("audio/x-opus");
+
+ s = gst_caps_get_structure (caps, 0);
+
+ if (g_str_equal (gst_structure_get_string (s, "encoding-name"), "multiopus")) {
+ gint channels;
+ gint stream_count;
+ gint coupled_count;
+ const gchar *encoding_params;
+ const gchar *num_streams;
+ const gchar *coupled_streams;
+ const gchar *channel_mapping;
+ gchar *endptr;
+
+ if (!gst_structure_has_field_typed (s, "encoding-params", G_TYPE_STRING) ||
+ !gst_structure_has_field_typed (s, "num_streams", G_TYPE_STRING) ||
+ !gst_structure_has_field_typed (s, "coupled_streams", G_TYPE_STRING) ||
+ !gst_structure_has_field_typed (s, "channel_mapping", G_TYPE_STRING)) {
+ GST_WARNING_OBJECT (depayload, "Encoding name 'multiopus' requires "
+ "encoding-params, num_streams, coupled_streams and channel_mapping "
+ "as string fields in caps.");
+ goto reject_caps;
+ }
+
+ gst_caps_set_simple (srccaps, "channel-mapping-family", G_TYPE_INT, 1,
+ NULL);
+
+ encoding_params = gst_structure_get_string (s, "encoding-params");
+ channels = g_ascii_strtoull (encoding_params, &endptr, 10);
+ if (*endptr != '\0' || channels > 255) {
+ GST_WARNING_OBJECT (depayload, "Invalid encoding-params value '%s'",
+ encoding_params);
+ goto reject_caps;
+ }
+ gst_caps_set_simple (srccaps, "channels", G_TYPE_INT, channels, NULL);
+
+ num_streams = gst_structure_get_string (s, "num_streams");
+ stream_count = g_ascii_strtoull (num_streams, &endptr, 10);
+ if (*endptr != '\0' || stream_count > channels) {
+ GST_WARNING_OBJECT (depayload, "Invalid num_streams value '%s'",
+ num_streams);
+ goto reject_caps;
+ }
+ gst_caps_set_simple (srccaps, "stream-count", G_TYPE_INT, stream_count,
+ NULL);
+
+ coupled_streams = gst_structure_get_string (s, "coupled_streams");
+ coupled_count = g_ascii_strtoull (coupled_streams, &endptr, 10);
+ if (*endptr != '\0' || coupled_count > stream_count) {
+ GST_WARNING_OBJECT (depayload, "Invalid coupled_streams value '%s'",
+ coupled_streams);
+ goto reject_caps;
+ }
+ gst_caps_set_simple (srccaps, "coupled-count", G_TYPE_INT, coupled_count,
+ NULL);
+
+ channel_mapping = gst_structure_get_string (s, "channel_mapping");
+ {
+ gchar **split;
+ gchar **ptr;
+ GValue mapping = G_VALUE_INIT;
+ GValue v = G_VALUE_INIT;
+
+ split = g_strsplit (channel_mapping, ",", -1);
+
+ g_value_init (&mapping, GST_TYPE_ARRAY);
+ g_value_init (&v, G_TYPE_INT);
+
+ for (ptr = split; *ptr; ++ptr) {
+ gint channel = g_ascii_strtoull (*ptr, &endptr, 10);
+ if (*endptr != '\0' || channel > channels) {
+ GST_WARNING_OBJECT (depayload, "Invalid channel_mapping value '%s'",
+ channel_mapping);
+ g_value_unset (&mapping);
+ break;
+ }
+ g_value_set_int (&v, channel);
+ gst_value_array_append_value (&mapping, &v);
+ }
+
+ g_value_unset (&v);
+ g_strfreev (split);
+
+ if (G_IS_VALUE (&mapping)) {
+ gst_caps_set_value (srccaps, "channel-mapping", &mapping);
+ g_value_unset (&mapping);
+ } else {
+ goto reject_caps;
+ }
+ }
+ } else {
+ const gchar *sprop_stereo;
+
+ gst_caps_set_simple (srccaps, "channel-mapping-family", G_TYPE_INT, 0,
+ NULL);
+
+ if ((sprop_stereo = gst_structure_get_string (s, "sprop-stereo"))) {
+ if (strcmp (sprop_stereo, "0") == 0)
+ gst_caps_set_simple (srccaps, "channels", G_TYPE_INT, 1, NULL);
+ else if (strcmp (sprop_stereo, "1") == 0)
+ gst_caps_set_simple (srccaps, "channels", G_TYPE_INT, 2, NULL);
+ else
+ GST_WARNING_OBJECT (depayload, "Unknown sprop-stereo value '%s'",
+ sprop_stereo);
+ } else {
+ /* sprop-stereo defaults to mono as per RFC 7587. */
+ gst_caps_set_simple (srccaps, "channels", G_TYPE_INT, 1, NULL);
+ }
+ }
+
+ if ((sprop_maxcapturerate =
+ gst_structure_get_string (s, "sprop-maxcapturerate"))) {
+ gulong rate;
+ gchar *tailptr;
+
+ rate = strtoul (sprop_maxcapturerate, &tailptr, 10);
+ if (rate > INT_MAX || *tailptr != '\0') {
+ GST_WARNING_OBJECT (depayload,
+ "Failed to parse sprop-maxcapturerate value '%s'",
+ sprop_maxcapturerate);
+ } else {
+ gst_caps_set_simple (srccaps, "rate", G_TYPE_INT, rate, NULL);
+ }
+ }
+
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
+
+ GST_DEBUG_OBJECT (depayload,
+ "set caps on source: %" GST_PTR_FORMAT " (ret=%d)", srccaps, ret);
+ gst_caps_unref (srccaps);
+
+ depayload->clock_rate = 48000;
+
+ return ret;
+
+reject_caps:
+ gst_caps_unref (srccaps);
+
+ return FALSE;
+}
+
+static GstBuffer *
+gst_rtp_opus_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp_buffer)
+{
+ GstBuffer *outbuf;
+
+ outbuf = gst_rtp_buffer_get_payload_buffer (rtp_buffer);
+
+ gst_rtp_drop_non_audio_meta (depayload, outbuf);
+
+ return outbuf;
+}
diff --git a/gst/rtp/gstrtpopusdepay.h b/gst/rtp/gstrtpopusdepay.h
new file mode 100644
index 0000000000..7890eb19d5
--- /dev/null
+++ b/gst/rtp/gstrtpopusdepay.h
@@ -0,0 +1,57 @@
+/*
+ * Opus Depayloader Gst Element
+ *
+ * @author: Danilo Cesar Lemes de Paula <danilo.eu@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_OPUS_DEPAY_H__
+#define __GST_RTP_OPUS_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS typedef struct _GstRTPOpusDepay GstRTPOpusDepay;
+typedef struct _GstRTPOpusDepayClass GstRTPOpusDepayClass;
+
+#define GST_TYPE_RTP_OPUS_DEPAY \
+ (gst_rtp_opus_depay_get_type())
+#define GST_RTP_OPUS_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_OPUS_DEPAY,GstRTPOpusDepay))
+#define GST_RTP_OPUS_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_OPUS_DEPAY,GstRTPOpusDepayClass))
+#define GST_IS_RTP_OPUS_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_OPUS_DEPAY))
+#define GST_IS_RTP_OPUS_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_OPUS_DEPAY))
+
+
+struct _GstRTPOpusDepay
+{
+ GstRTPBaseDepayload depayload;
+
+};
+
+struct _GstRTPOpusDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_opus_depay_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_RTP_OPUS_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpopuspay.c b/gst/rtp/gstrtpopuspay.c
new file mode 100644
index 0000000000..0bcdc6a948
--- /dev/null
+++ b/gst/rtp/gstrtpopuspay.c
@@ -0,0 +1,421 @@
+/*
+ * Opus Payloader Gst Element
+ *
+ * @author: Danilo Cesar Lemes de Paula <danilo.cesar@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpopuspay
+ * @title: rtpopuspay
+ *
+ * rtpopuspay encapsulates Opus-encoded audio data into RTP packets following
+ * the payload format described in RFC 7587.
+ *
+ * In addition to the RFC, which assumes only mono and stereo payload,
+ * the element supports multichannel Opus audio streams using a non-standardized
+ * SDP config and "multiopus" codec developed by Google for libwebrtc. When the
+ * input data have more than 2 channels, rtpopuspay will add extra fields to
+ * output caps that can be used to generate SDP in the syntax understood by
+ * libwebrtc. For example in the case of 5.1 audio:
+ *
+ * |[
+ * a=rtpmap:96 multiopus/48000/6
+ * a=fmtp:96 num_streams=4;coupled_streams=2;channel_mapping=0,4,1,2,3,5
+ * ]|
+ *
+ * See https://webrtc-review.googlesource.com/c/src/+/129768 for more details on
+ * multichannel Opus in libwebrtc.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpopuspay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpopuspay_debug);
+#define GST_CAT_DEFAULT (rtpopuspay_debug)
+
+enum
+{
+ PROP_0,
+ PROP_DTX,
+};
+
+#define DEFAULT_DTX FALSE
+
+static GstStaticPadTemplate gst_rtp_opus_pay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-opus, channel-mapping-family = (int) 0;"
+ "audio/x-opus, channel-mapping-family = (int) 0, channels = (int) [1, 2];"
+ "audio/x-opus, channel-mapping-family = (int) 1, channels = (int) [3, 255]")
+ );
+
+static GstStaticPadTemplate gst_rtp_opus_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 48000, "
+ "encoding-name = (string) { \"OPUS\", \"X-GST-OPUS-DRAFT-SPITTKA-00\", \"multiopus\" }")
+ );
+
+static gboolean gst_rtp_opus_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static GstCaps *gst_rtp_opus_pay_getcaps (GstRTPBasePayload * payload,
+ GstPad * pad, GstCaps * filter);
+static GstFlowReturn gst_rtp_opus_pay_handle_buffer (GstRTPBasePayload *
+ payload, GstBuffer * buffer);
+
+G_DEFINE_TYPE (GstRtpOPUSPay, gst_rtp_opus_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpopuspay, "rtpopuspay",
+ GST_RANK_PRIMARY, GST_TYPE_RTP_OPUS_PAY, rtp_element_init (plugin));
+
+#define GST_RTP_OPUS_PAY_CAST(obj) ((GstRtpOPUSPay *)(obj))
+
+static void
+gst_rtp_opus_pay_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstRtpOPUSPay *self = GST_RTP_OPUS_PAY (object);
+
+ switch (prop_id) {
+ case PROP_DTX:
+ self->dtx = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_opus_pay_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstRtpOPUSPay *self = GST_RTP_OPUS_PAY (object);
+
+ switch (prop_id) {
+ case PROP_DTX:
+ g_value_set_boolean (value, self->dtx);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_opus_pay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstRtpOPUSPay *self = GST_RTP_OPUS_PAY (element);
+ GstStateChangeReturn ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ self->marker = TRUE;
+ break;
+ default:
+ break;
+ }
+
+ ret =
+ GST_ELEMENT_CLASS (gst_rtp_opus_pay_parent_class)->change_state (element,
+ transition);
+
+ switch (transition) {
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static void
+gst_rtp_opus_pay_class_init (GstRtpOPUSPayClass * klass)
+{
+ GstRTPBasePayloadClass *gstbasertppayload_class;
+ GstElementClass *element_class;
+ GObjectClass *gobject_class;
+
+ gstbasertppayload_class = (GstRTPBasePayloadClass *) klass;
+ element_class = GST_ELEMENT_CLASS (klass);
+ gobject_class = (GObjectClass *) klass;
+
+ element_class->change_state = gst_rtp_opus_pay_change_state;
+
+ gstbasertppayload_class->set_caps = gst_rtp_opus_pay_setcaps;
+ gstbasertppayload_class->get_caps = gst_rtp_opus_pay_getcaps;
+ gstbasertppayload_class->handle_buffer = gst_rtp_opus_pay_handle_buffer;
+
+ gobject_class->set_property = gst_rtp_opus_pay_set_property;
+ gobject_class->get_property = gst_rtp_opus_pay_get_property;
+
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_rtp_opus_pay_src_template);
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_rtp_opus_pay_sink_template);
+
+ /**
+ * GstRtpOPUSPay:dtx:
+ *
+ * If enabled, the payloader will not transmit empty packets.
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class, PROP_DTX,
+ g_param_spec_boolean ("dtx", "Discontinuous Transmission",
+ "If enabled, the payloader will not transmit empty packets",
+ DEFAULT_DTX,
+ G_PARAM_READWRITE | GST_PARAM_MUTABLE_PLAYING |
+ G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (element_class,
+ "RTP Opus payloader",
+ "Codec/Payloader/Network/RTP",
+ "Puts Opus audio in RTP packets",
+ "Danilo Cesar Lemes de Paula <danilo.cesar@collabora.co.uk>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpopuspay_debug, "rtpopuspay", 0,
+ "Opus RTP Payloader");
+}
+
+static void
+gst_rtp_opus_pay_init (GstRtpOPUSPay * rtpopuspay)
+{
+ rtpopuspay->dtx = DEFAULT_DTX;
+}
+
+static gboolean
+gst_rtp_opus_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ gboolean res;
+ GstCaps *src_caps;
+ GstStructure *s, *outcaps;
+ const char *encoding_name = "OPUS";
+ gint channels = 2;
+ gint rate;
+ gchar *encoding_params;
+
+ outcaps = gst_structure_new_empty ("unused");
+
+ src_caps = gst_pad_get_allowed_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload));
+ if (src_caps) {
+ GstStructure *s;
+ const GValue *value;
+
+ s = gst_caps_get_structure (src_caps, 0);
+
+ if (gst_structure_has_field (s, "encoding-name")) {
+ GValue default_value = G_VALUE_INIT;
+
+ g_value_init (&default_value, G_TYPE_STRING);
+ g_value_set_static_string (&default_value, encoding_name);
+
+ value = gst_structure_get_value (s, "encoding-name");
+ if (!gst_value_can_intersect (&default_value, value))
+ encoding_name = "X-GST-OPUS-DRAFT-SPITTKA-00";
+ }
+ gst_caps_unref (src_caps);
+ }
+
+ s = gst_caps_get_structure (caps, 0);
+ if (gst_structure_get_int (s, "channels", &channels)) {
+ if (channels > 2) {
+ /* Implies channel-mapping-family = 1. */
+
+ gint stream_count, coupled_count;
+ const GValue *channel_mapping_array;
+
+ /* libwebrtc only supports "multiopus" when channels > 2. Mono and stereo
+ * sound must always be payloaded according to RFC 7587. */
+ encoding_name = "multiopus";
+
+ if (gst_structure_get_int (s, "stream-count", &stream_count)) {
+ char *num_streams = g_strdup_printf ("%d", stream_count);
+ gst_structure_set (outcaps, "num_streams", G_TYPE_STRING, num_streams,
+ NULL);
+ g_free (num_streams);
+ }
+ if (gst_structure_get_int (s, "coupled-count", &coupled_count)) {
+ char *coupled_streams = g_strdup_printf ("%d", coupled_count);
+ gst_structure_set (outcaps, "coupled_streams", G_TYPE_STRING,
+ coupled_streams, NULL);
+ g_free (coupled_streams);
+ }
+
+ channel_mapping_array = gst_structure_get_value (s, "channel-mapping");
+ if (GST_VALUE_HOLDS_ARRAY (channel_mapping_array)) {
+ GString *str = g_string_new (NULL);
+ guint i;
+
+ for (i = 0; i < gst_value_array_get_size (channel_mapping_array); ++i) {
+ if (i != 0) {
+ g_string_append_c (str, ',');
+ }
+ g_string_append_printf (str, "%d",
+ g_value_get_int (gst_value_array_get_value (channel_mapping_array,
+ i)));
+ }
+
+ gst_structure_set (outcaps, "channel_mapping", G_TYPE_STRING, str->str,
+ NULL);
+
+ g_string_free (str, TRUE);
+ }
+ } else {
+ gst_structure_set (outcaps, "sprop-stereo", G_TYPE_STRING,
+ (channels == 2) ? "1" : "0", NULL);
+ /* RFC 7587 requires the number of channels always be 2. */
+ channels = 2;
+ }
+ }
+
+ encoding_params = g_strdup_printf ("%d", channels);
+ gst_structure_set (outcaps, "encoding-params", G_TYPE_STRING,
+ encoding_params, NULL);
+ g_free (encoding_params);
+
+ if (gst_structure_get_int (s, "rate", &rate)) {
+ gchar *sprop_maxcapturerate = g_strdup_printf ("%d", rate);
+
+ gst_structure_set (outcaps, "sprop-maxcapturerate", G_TYPE_STRING,
+ sprop_maxcapturerate, NULL);
+
+ g_free (sprop_maxcapturerate);
+ }
+
+ gst_rtp_base_payload_set_options (payload, "audio", FALSE,
+ encoding_name, 48000);
+
+ res = gst_rtp_base_payload_set_outcaps_structure (payload, outcaps);
+
+ gst_structure_free (outcaps);
+
+ return res;
+}
+
+static GstFlowReturn
+gst_rtp_opus_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRtpOPUSPay *self = GST_RTP_OPUS_PAY_CAST (basepayload);
+ GstBuffer *outbuf;
+ GstClockTime pts, dts, duration;
+
+ /* DTX packets are zero-length frames, with a 1 or 2-bytes header */
+ if (self->dtx && gst_buffer_get_size (buffer) <= 2) {
+ GST_LOG_OBJECT (self,
+ "discard empty buffer as DTX is enabled: %" GST_PTR_FORMAT, buffer);
+ self->marker = TRUE;
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ }
+
+ pts = GST_BUFFER_PTS (buffer);
+ dts = GST_BUFFER_DTS (buffer);
+ duration = GST_BUFFER_DURATION (buffer);
+
+ outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 0, 0, 0);
+
+ gst_rtp_copy_audio_meta (basepayload, outbuf, buffer);
+
+ outbuf = gst_buffer_append (outbuf, buffer);
+
+ GST_BUFFER_PTS (outbuf) = pts;
+ GST_BUFFER_DTS (outbuf) = dts;
+ GST_BUFFER_DURATION (outbuf) = duration;
+
+ if (self->marker) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_MARKER);
+ self->marker = FALSE;
+ }
+
+ /* Push out */
+ return gst_rtp_base_payload_push (basepayload, outbuf);
+}
+
+static GstCaps *
+gst_rtp_opus_pay_getcaps (GstRTPBasePayload * payload,
+ GstPad * pad, GstCaps * filter)
+{
+ GstCaps *caps, *peercaps, *tcaps;
+ GstStructure *s;
+ const gchar *stereo;
+
+ if (pad == GST_RTP_BASE_PAYLOAD_SRCPAD (payload))
+ return
+ GST_RTP_BASE_PAYLOAD_CLASS (gst_rtp_opus_pay_parent_class)->get_caps
+ (payload, pad, filter);
+
+ tcaps = gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload));
+ peercaps = gst_pad_peer_query_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload),
+ tcaps);
+ gst_caps_unref (tcaps);
+ if (!peercaps)
+ return
+ GST_RTP_BASE_PAYLOAD_CLASS (gst_rtp_opus_pay_parent_class)->get_caps
+ (payload, pad, filter);
+
+ if (gst_caps_is_empty (peercaps))
+ return peercaps;
+
+ caps = gst_pad_get_pad_template_caps (GST_RTP_BASE_PAYLOAD_SINKPAD (payload));
+
+ s = gst_caps_get_structure (peercaps, 0);
+ stereo = gst_structure_get_string (s, "stereo");
+ if (stereo != NULL) {
+ caps = gst_caps_make_writable (caps);
+
+ if (!strcmp (stereo, "1")) {
+ GstCaps *caps2 = gst_caps_copy (caps);
+
+ gst_caps_set_simple (caps, "channels", G_TYPE_INT, 2, NULL);
+ gst_caps_set_simple (caps2, "channels", G_TYPE_INT, 1, NULL);
+ caps = gst_caps_merge (caps, caps2);
+ } else if (!strcmp (stereo, "0")) {
+ GstCaps *caps2 = gst_caps_copy (caps);
+
+ gst_caps_set_simple (caps, "channels", G_TYPE_INT, 1, NULL);
+ gst_caps_set_simple (caps2, "channels", G_TYPE_INT, 2, NULL);
+ caps = gst_caps_merge (caps, caps2);
+ }
+ }
+ gst_caps_unref (peercaps);
+
+ if (filter) {
+ GstCaps *tmp = gst_caps_intersect_full (caps, filter,
+ GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ caps = tmp;
+ }
+
+ GST_DEBUG_OBJECT (payload, "Returning caps: %" GST_PTR_FORMAT, caps);
+ return caps;
+}
diff --git a/gst/rtp/gstrtpopuspay.h b/gst/rtp/gstrtpopuspay.h
new file mode 100644
index 0000000000..b862913af4
--- /dev/null
+++ b/gst/rtp/gstrtpopuspay.h
@@ -0,0 +1,63 @@
+/*
+ * Opus Payloader Gst Element
+ *
+ * @author: Danilo Cesar Lemes de Paula <danilo.eu@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_OPUS_PAY_H__
+#define __GST_RTP_OPUS_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_OPUS_PAY \
+ (gst_rtp_opus_pay_get_type())
+#define GST_RTP_OPUS_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_OPUS_PAY,GstRtpOPUSPay))
+#define GST_RTP_OPUS_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_OPUS_PAY,GstRtpOPUSPayClass))
+#define GST_IS_RTP_OPUS_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_OPUS_PAY))
+#define GST_IS_RTP_OPUS_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_OPUS_PAY))
+
+typedef struct _GstRtpOPUSPay GstRtpOPUSPay;
+typedef struct _GstRtpOPUSPayClass GstRtpOPUSPayClass;
+
+struct _GstRtpOPUSPay
+{
+ GstRTPBasePayload payload;
+
+ gboolean dtx;
+
+ /* if the next produced buffer should have the MARKER flag */
+ gboolean marker;
+};
+
+struct _GstRtpOPUSPayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_opus_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_OPUS_PAY_H__ */
diff --git a/gst/rtp/gstrtppcmadepay.c b/gst/rtp/gstrtppcmadepay.c
new file mode 100644
index 0000000000..82727fbe93
--- /dev/null
+++ b/gst/rtp/gstrtppcmadepay.c
@@ -0,0 +1,160 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2005> Edgard Lima <edgard.lima@gmail.com>
+ * Copyright (C) <2005> Zeeshan Ali <zeenix@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+#include "gstrtpelements.h"
+#include "gstrtppcmadepay.h"
+#include "gstrtputils.h"
+
+/* RtpPcmaDepay signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0
+};
+
+static GstStaticPadTemplate gst_rtp_pcma_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_PCMA_STRING ", "
+ "clock-rate = (int) 8000;"
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "clock-rate = (int) [1, MAX ], encoding-name = (string) \"PCMA\"")
+ );
+
+static GstStaticPadTemplate gst_rtp_pcma_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-alaw, channels = (int) 1, rate = (int) [1, MAX ]")
+ );
+
+static GstBuffer *gst_rtp_pcma_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+static gboolean gst_rtp_pcma_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+
+#define gst_rtp_pcma_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpPcmaDepay, gst_rtp_pcma_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtppcmadepay, "rtppcmadepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_PCMA_DEPAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_pcma_depay_class_init (GstRtpPcmaDepayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_pcma_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_pcma_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP PCMA depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts PCMA audio from RTP packets",
+ "Edgard Lima <edgard.lima@gmail.com>, Zeeshan Ali <zeenix@gmail.com>");
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_pcma_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_pcma_depay_setcaps;
+}
+
+static void
+gst_rtp_pcma_depay_init (GstRtpPcmaDepay * rtppcmadepay)
+{
+ GstRTPBaseDepayload *depayload;
+
+ depayload = GST_RTP_BASE_DEPAYLOAD (rtppcmadepay);
+
+ gst_pad_use_fixed_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
+}
+
+static gboolean
+gst_rtp_pcma_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstCaps *srccaps;
+ GstStructure *structure;
+ gboolean ret;
+ gint clock_rate;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 8000; /* default */
+ depayload->clock_rate = clock_rate;
+
+ srccaps = gst_caps_new_simple ("audio/x-alaw",
+ "channels", G_TYPE_INT, 1, "rate", G_TYPE_INT, clock_rate, NULL);
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ gst_caps_unref (srccaps);
+
+ return ret;
+}
+
+static GstBuffer *
+gst_rtp_pcma_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstBuffer *outbuf = NULL;
+ gboolean marker;
+ guint len;
+
+ marker = gst_rtp_buffer_get_marker (rtp);
+
+ GST_DEBUG ("process : got %" G_GSIZE_FORMAT " bytes, mark %d ts %u seqn %d",
+ gst_buffer_get_size (rtp->buffer), marker,
+ gst_rtp_buffer_get_timestamp (rtp), gst_rtp_buffer_get_seq (rtp));
+
+ len = gst_rtp_buffer_get_payload_len (rtp);
+ outbuf = gst_rtp_buffer_get_payload_buffer (rtp);
+
+ if (outbuf) {
+ GST_BUFFER_DURATION (outbuf) =
+ gst_util_uint64_scale_int (len, GST_SECOND, depayload->clock_rate);
+
+ if (marker) {
+ /* mark start of talkspurt with RESYNC */
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC);
+ }
+
+ gst_rtp_drop_non_audio_meta (depayload, outbuf);
+ }
+
+ return outbuf;
+}
diff --git a/gst/rtp/gstrtppcmadepay.h b/gst/rtp/gstrtppcmadepay.h
new file mode 100644
index 0000000000..c74bd05311
--- /dev/null
+++ b/gst/rtp/gstrtppcmadepay.h
@@ -0,0 +1,51 @@
+/* GStreamer
+ * Copyright (C) <2005> Edgard Lima <edgard.lima@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more
+ */
+
+#ifndef __GST_RTP_PCMA_DEPAY_H__
+#define __GST_RTP_PCMA_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstRtpPcmaDepay GstRtpPcmaDepay;
+typedef struct _GstRtpPcmaDepayClass GstRtpPcmaDepayClass;
+
+#define GST_TYPE_RTP_PCMA_DEPAY \
+ (gst_rtp_pcma_depay_get_type())
+#define GST_RTP_PCMA_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_PCMA_DEPAY,GstRtpPcmaDepay))
+#define GST_RTP_PCMA_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_PCMA_DEPAY,GstRtpPcmaDepayClass))
+#define GST_IS_RTP_PCMA_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_PCMA_DEPAY))
+#define GST_IS_RTP_PCMA_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_PCMA_DEPAY))
+
+struct _GstRtpPcmaDepay
+{
+ GstRTPBaseDepayload depayload;
+};
+
+struct _GstRtpPcmaDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_pcma_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_PCMA_DEPAY_H__ */
diff --git a/gst/rtp/gstrtppcmapay.c b/gst/rtp/gstrtppcmapay.c
new file mode 100644
index 0000000000..c51639633c
--- /dev/null
+++ b/gst/rtp/gstrtppcmapay.c
@@ -0,0 +1,112 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2005> Edgard Lima <edgard.lima@gmail.com>
+ * Copyright (C) <2005> Nokia Corporation <kai.vehmanen@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <stdlib.h>
+#include <string.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "gstrtpelements.h"
+#include "gstrtppcmapay.h"
+
+static GstStaticPadTemplate gst_rtp_pcma_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-alaw, channels=(int)1, rate=(int)8000")
+ );
+
+static GstStaticPadTemplate gst_rtp_pcma_pay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_PCMA_STRING ", "
+ "clock-rate = (int) 8000, " "encoding-name = (string) \"PCMA\"; "
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) [1, MAX ], " "encoding-name = (string) \"PCMA\"")
+ );
+
+static gboolean gst_rtp_pcma_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+
+#define gst_rtp_pcma_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpPcmaPay, gst_rtp_pcma_pay,
+ GST_TYPE_RTP_BASE_AUDIO_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtppcmapay, "rtppcmapay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_PCMA_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_pcma_pay_class_init (GstRtpPcmaPayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_pcma_pay_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_pcma_pay_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "RTP PCMA payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encodes PCMA audio into a RTP packet",
+ "Edgard Lima <edgard.lima@gmail.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_pcma_pay_setcaps;
+}
+
+static void
+gst_rtp_pcma_pay_init (GstRtpPcmaPay * rtppcmapay)
+{
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
+
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtppcmapay);
+
+ GST_RTP_BASE_PAYLOAD (rtppcmapay)->pt = GST_RTP_PAYLOAD_PCMA;
+ GST_RTP_BASE_PAYLOAD (rtppcmapay)->clock_rate = 8000;
+
+ /* tell rtpbaseaudiopayload that this is a sample based codec */
+ gst_rtp_base_audio_payload_set_sample_based (rtpbaseaudiopayload);
+
+ /* octet-per-sample is 1 for PCM */
+ gst_rtp_base_audio_payload_set_sample_options (rtpbaseaudiopayload, 1);
+}
+
+static gboolean
+gst_rtp_pcma_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ gboolean res;
+
+ gst_rtp_base_payload_set_options (payload, "audio",
+ payload->pt != GST_RTP_PAYLOAD_PCMA, "PCMA", 8000);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
+
+ return res;
+}
diff --git a/gst/rtp/gstrtppcmapay.h b/gst/rtp/gstrtppcmapay.h
new file mode 100644
index 0000000000..7f67dffcd1
--- /dev/null
+++ b/gst/rtp/gstrtppcmapay.h
@@ -0,0 +1,52 @@
+/* GStreamer
+ * Copyright (C) <2005> Edgard Lima <edgard.lima@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more
+ */
+
+
+#ifndef __GST_RTP_PCMA_PAY_H__
+#define __GST_RTP_PCMA_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstRtpPcmaPay GstRtpPcmaPay;
+typedef struct _GstRtpPcmaPayClass GstRtpPcmaPayClass;
+
+#define GST_TYPE_RTP_PCMA_PAY \
+ (gst_rtp_pcma_pay_get_type())
+#define GST_RTP_PCMA_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_PCMA_PAY,GstRtpPcmaPay))
+#define GST_RTP_PCMA_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_PCMA_PAY,GstRtpPcmaPayClass))
+#define GST_IS_RTP_PCMA_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_PCMA_PAY))
+#define GST_IS_RTP_PCMA_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_PCMA_PAY))
+
+struct _GstRtpPcmaPay
+{
+ GstRTPBaseAudioPayload audiopayload;
+};
+
+struct _GstRtpPcmaPayClass
+{
+ GstRTPBaseAudioPayloadClass parent_class;
+};
+
+GType gst_rtp_pcma_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_PCMA_PAY_H__ */
diff --git a/gst/rtp/gstrtppcmudepay.c b/gst/rtp/gstrtppcmudepay.c
new file mode 100644
index 0000000000..9bd3afb3d6
--- /dev/null
+++ b/gst/rtp/gstrtppcmudepay.c
@@ -0,0 +1,161 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2005> Edgard Lima <edgard.lima@gmail.com>
+ * Copyright (C) <2005> Zeeshan Ali <zeenix@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+#include "gstrtpelements.h"
+#include "gstrtppcmudepay.h"
+#include "gstrtputils.h"
+
+/* RtpPcmuDepay signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0
+};
+
+static GstStaticPadTemplate gst_rtp_pcmu_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_PCMU_STRING ", "
+ "clock-rate = (int) 8000; "
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "encoding-name = (string) \"PCMU\", clock-rate = (int) [1, MAX ]")
+ );
+
+static GstStaticPadTemplate gst_rtp_pcmu_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-mulaw, "
+ "channels = (int) 1, rate = (int) [1, MAX ]")
+ );
+
+static GstBuffer *gst_rtp_pcmu_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+static gboolean gst_rtp_pcmu_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+
+#define gst_rtp_pcmu_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpPcmuDepay, gst_rtp_pcmu_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtppcmudepay, "rtppcmudepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_PCMU_DEPAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_pcmu_depay_class_init (GstRtpPcmuDepayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_pcmu_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_pcmu_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP PCMU depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts PCMU audio from RTP packets",
+ "Edgard Lima <edgard.lima@gmail.com>, Zeeshan Ali <zeenix@gmail.com>");
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_pcmu_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_pcmu_depay_setcaps;
+}
+
+static void
+gst_rtp_pcmu_depay_init (GstRtpPcmuDepay * rtppcmudepay)
+{
+ GstRTPBaseDepayload *depayload;
+
+ depayload = GST_RTP_BASE_DEPAYLOAD (rtppcmudepay);
+
+ gst_pad_use_fixed_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
+}
+
+static gboolean
+gst_rtp_pcmu_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstCaps *srccaps;
+ GstStructure *structure;
+ gboolean ret;
+ gint clock_rate;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 8000; /* default */
+ depayload->clock_rate = clock_rate;
+
+ srccaps = gst_caps_new_simple ("audio/x-mulaw",
+ "channels", G_TYPE_INT, 1, "rate", G_TYPE_INT, clock_rate, NULL);
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ gst_caps_unref (srccaps);
+
+ return ret;
+}
+
+static GstBuffer *
+gst_rtp_pcmu_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstBuffer *outbuf = NULL;
+ guint len;
+ gboolean marker;
+
+ marker = gst_rtp_buffer_get_marker (rtp);
+
+ GST_DEBUG ("process : got %" G_GSIZE_FORMAT " bytes, mark %d ts %u seqn %d",
+ gst_buffer_get_size (rtp->buffer), marker,
+ gst_rtp_buffer_get_timestamp (rtp), gst_rtp_buffer_get_seq (rtp));
+
+ len = gst_rtp_buffer_get_payload_len (rtp);
+ outbuf = gst_rtp_buffer_get_payload_buffer (rtp);
+
+ if (outbuf) {
+ GST_BUFFER_DURATION (outbuf) =
+ gst_util_uint64_scale_int (len, GST_SECOND, depayload->clock_rate);
+
+ if (marker) {
+ /* mark start of talkspurt with RESYNC */
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC);
+ }
+
+ gst_rtp_drop_non_audio_meta (depayload, outbuf);
+ }
+
+ return outbuf;
+}
diff --git a/gst/rtp/gstrtppcmudepay.h b/gst/rtp/gstrtppcmudepay.h
new file mode 100644
index 0000000000..a3d017349a
--- /dev/null
+++ b/gst/rtp/gstrtppcmudepay.h
@@ -0,0 +1,51 @@
+/* GStreamer
+ * Copyright (C) <2005> Edgard Lima <edgard.lima@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more
+ */
+
+#ifndef __GST_RTP_PCMU_DEPAY_H__
+#define __GST_RTP_PCMU_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstRtpPcmuDepay GstRtpPcmuDepay;
+typedef struct _GstRtpPcmuDepayClass GstRtpPcmuDepayClass;
+
+#define GST_TYPE_RTP_PCMU_DEPAY \
+ (gst_rtp_pcmu_depay_get_type())
+#define GST_RTP_PCMU_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_PCMU_DEPAY,GstRtpPcmuDepay))
+#define GST_RTP_PCMU_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_PCMU_DEPAY,GstRtpPcmuDepayClass))
+#define GST_IS_RTP_PCMU_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_PCMU_DEPAY))
+#define GST_IS_RTP_PCMU_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_PCMU_DEPAY))
+
+struct _GstRtpPcmuDepay
+{
+ GstRTPBaseDepayload depayload;
+};
+
+struct _GstRtpPcmuDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_pcmu_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_PCMU_DEPAY_H__ */
diff --git a/gst/rtp/gstrtppcmupay.c b/gst/rtp/gstrtppcmupay.c
new file mode 100644
index 0000000000..5662789cbc
--- /dev/null
+++ b/gst/rtp/gstrtppcmupay.c
@@ -0,0 +1,112 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2005> Edgard Lima <edgard.lima@gmail.com>
+ * Copyright (C) <2005> Nokia Corporation <kai.vehmanen@nokia.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <stdlib.h>
+#include <string.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "gstrtpelements.h"
+#include "gstrtppcmupay.h"
+
+static GstStaticPadTemplate gst_rtp_pcmu_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-mulaw, channels=(int)1, rate=(int)8000")
+ );
+
+static GstStaticPadTemplate gst_rtp_pcmu_pay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_PCMU_STRING ", "
+ "clock-rate = (int) 8000, " "encoding-name = (string) \"PCMU\"; "
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) [1, MAX ], " "encoding-name = (string) \"PCMU\"")
+ );
+
+static gboolean gst_rtp_pcmu_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+
+#define gst_rtp_pcmu_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpPcmuPay, gst_rtp_pcmu_pay,
+ GST_TYPE_RTP_BASE_AUDIO_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtppcmupay, "rtppcmupay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_PCMU_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_pcmu_pay_class_init (GstRtpPcmuPayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_pcmu_pay_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_pcmu_pay_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "RTP PCMU payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encodes PCMU audio into a RTP packet",
+ "Edgard Lima <edgard.lima@gmail.com>");
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_pcmu_pay_setcaps;
+}
+
+static void
+gst_rtp_pcmu_pay_init (GstRtpPcmuPay * rtppcmupay)
+{
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
+
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtppcmupay);
+
+ GST_RTP_BASE_PAYLOAD (rtppcmupay)->pt = GST_RTP_PAYLOAD_PCMU;
+ GST_RTP_BASE_PAYLOAD (rtppcmupay)->clock_rate = 8000;
+
+ /* tell rtpbaseaudiopayload that this is a sample based codec */
+ gst_rtp_base_audio_payload_set_sample_based (rtpbaseaudiopayload);
+
+ /* octet-per-sample is 1 for PCM */
+ gst_rtp_base_audio_payload_set_sample_options (rtpbaseaudiopayload, 1);
+}
+
+static gboolean
+gst_rtp_pcmu_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ gboolean res;
+
+ gst_rtp_base_payload_set_options (payload, "audio",
+ payload->pt != GST_RTP_PAYLOAD_PCMU, "PCMU", 8000);
+ res = gst_rtp_base_payload_set_outcaps (payload, NULL);
+
+ return res;
+}
diff --git a/gst/rtp/gstrtppcmupay.h b/gst/rtp/gstrtppcmupay.h
new file mode 100644
index 0000000000..cdeb3a2bfa
--- /dev/null
+++ b/gst/rtp/gstrtppcmupay.h
@@ -0,0 +1,52 @@
+/* GStreamer
+ * Copyright (C) <2005> Edgard Lima <edgard.lima@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more
+ */
+
+
+#ifndef __GST_RTP_PCMU_PAY_H__
+#define __GST_RTP_PCMU_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstRtpPcmuPay GstRtpPcmuPay;
+typedef struct _GstRtpPcmuPayClass GstRtpPcmuPayClass;
+
+#define GST_TYPE_RTP_PCMU_PAY \
+ (gst_rtp_pcmu_pay_get_type())
+#define GST_RTP_PCMU_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_PCMU_PAY,GstRtpPcmuPay))
+#define GST_RTP_PCMU_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_PCMU_PAY,GstRtpPcmuPayClass))
+#define GST_IS_RTP_PCMU_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_PCMU_PAY))
+#define GST_IS_RTP_PCMU_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_PCMU_PAY))
+
+struct _GstRtpPcmuPay
+{
+ GstRTPBaseAudioPayload audiopayload;
+};
+
+struct _GstRtpPcmuPayClass
+{
+ GstRTPBaseAudioPayloadClass parent_class;
+};
+
+GType gst_rtp_pcmu_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_PCMU_PAY_H__ */
diff --git a/gst/rtp/gstrtpqcelpdepay.c b/gst/rtp/gstrtpqcelpdepay.c
new file mode 100644
index 0000000000..949cf605ba
--- /dev/null
+++ b/gst/rtp/gstrtpqcelpdepay.c
@@ -0,0 +1,428 @@
+/* GStreamer
+ * Copyright (C) <2010> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+
+#include <stdlib.h>
+#include <string.h>
+#include "gstrtpelements.h"
+#include "gstrtpqcelpdepay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpqcelpdepay_debug);
+#define GST_CAT_DEFAULT (rtpqcelpdepay_debug)
+
+/* references:
+ *
+ * RFC 2658 - RTP Payload Format for PureVoice(tm) Audio
+ */
+#define FRAME_DURATION (20 * GST_MSECOND)
+
+/* RtpQCELPDepay signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0
+};
+
+static GstStaticPadTemplate gst_rtp_qcelp_depay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "clock-rate = (int) 8000, "
+ "encoding-name = (string) \"QCELP\"; "
+ "application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_QCELP_STRING ", "
+ "clock-rate = (int) 8000")
+ );
+
+static GstStaticPadTemplate gst_rtp_qcelp_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/qcelp, " "channels = (int) 1," "rate = (int) 8000")
+ );
+
+static void gst_rtp_qcelp_depay_finalize (GObject * object);
+
+static gboolean gst_rtp_qcelp_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_qcelp_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+
+#define gst_rtp_qcelp_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpQCELPDepay, gst_rtp_qcelp_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpqcelpdepay, "rtpqcelpdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_QCELP_DEPAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_qcelp_depay_class_init (GstRtpQCELPDepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_qcelp_depay_finalize;
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_qcelp_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_qcelp_depay_setcaps;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_qcelp_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_qcelp_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP QCELP depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts QCELP (PureVoice) audio from RTP packets (RFC 2658)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpqcelpdepay_debug, "rtpqcelpdepay", 0,
+ "QCELP RTP Depayloader");
+}
+
+static void
+gst_rtp_qcelp_depay_init (GstRtpQCELPDepay * rtpqcelpdepay)
+{
+}
+
+static void
+gst_rtp_qcelp_depay_finalize (GObject * object)
+{
+ GstRtpQCELPDepay *depay;
+
+ depay = GST_RTP_QCELP_DEPAY (object);
+
+ if (depay->packets != NULL) {
+ g_ptr_array_foreach (depay->packets, (GFunc) gst_buffer_unref, NULL);
+ g_ptr_array_free (depay->packets, TRUE);
+ depay->packets = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+
+static gboolean
+gst_rtp_qcelp_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstCaps *srccaps;
+ gboolean res;
+
+ srccaps = gst_caps_new_simple ("audio/qcelp",
+ "channels", G_TYPE_INT, 1, "rate", G_TYPE_INT, 8000, NULL);
+ res = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ gst_caps_unref (srccaps);
+
+ return res;
+}
+
+static const gint frame_size[16] = {
+ 1, 4, 8, 17, 35, -8, 0, 0,
+ 0, 0, 0, 0, 0, 0, 1, 0
+};
+
+/* get the frame length, 0 is invalid, negative values are invalid but can be
+ * recovered from. */
+static gint
+get_frame_len (GstRtpQCELPDepay * depay, guint8 frame_type)
+{
+ if (frame_type >= G_N_ELEMENTS (frame_size))
+ return 0;
+
+ return frame_size[frame_type];
+}
+
+static guint
+count_packets (GstRtpQCELPDepay * depay, guint8 * data, guint size)
+{
+ guint count = 0;
+
+ while (size > 0) {
+ gint frame_len;
+
+ frame_len = get_frame_len (depay, data[0]);
+
+ /* 0 is invalid and we throw away the remainder of the frames */
+ if (frame_len == 0)
+ break;
+
+ if (frame_len < 0)
+ frame_len = -frame_len;
+
+ if (frame_len > size)
+ break;
+
+ size -= frame_len;
+ data += frame_len;
+ count++;
+ }
+ return count;
+}
+
+static void
+flush_packets (GstRtpQCELPDepay * depay)
+{
+ guint i, size;
+
+ GST_DEBUG_OBJECT (depay, "flushing packets");
+
+ size = depay->packets->len;
+
+ for (i = 0; i < size; i++) {
+ GstBuffer *outbuf;
+
+ outbuf = g_ptr_array_index (depay->packets, i);
+ g_ptr_array_index (depay->packets, i) = NULL;
+
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (depay), outbuf);
+ }
+
+ /* and reset interleaving state */
+ depay->interleaved = FALSE;
+ depay->bundling = 0;
+}
+
+static void
+add_packet (GstRtpQCELPDepay * depay, guint LLL, guint NNN, guint index,
+ GstBuffer * outbuf)
+{
+ guint idx;
+ GstBuffer *old;
+
+ /* figure out the position in the array, note that index is never 0 because we
+ * push those packets immediately. */
+ idx = NNN + ((LLL + 1) * (index - 1));
+
+ GST_DEBUG_OBJECT (depay, "adding packet at index %u", idx);
+ /* free old buffer (should not happen) */
+ old = g_ptr_array_index (depay->packets, idx);
+ if (old)
+ gst_buffer_unref (old);
+
+ /* store new buffer */
+ g_ptr_array_index (depay->packets, idx) = outbuf;
+}
+
+static GstBuffer *
+create_erasure_buffer (GstRtpQCELPDepay * depay)
+{
+ GstBuffer *outbuf;
+ GstMapInfo map;
+
+ outbuf = gst_buffer_new_and_alloc (1);
+ gst_buffer_map (outbuf, &map, GST_MAP_WRITE);
+ map.data[0] = 14;
+ gst_buffer_unmap (outbuf, &map);
+
+ return outbuf;
+}
+
+static GstBuffer *
+gst_rtp_qcelp_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp)
+{
+ GstRtpQCELPDepay *depay;
+ GstBuffer *outbuf;
+ GstClockTime timestamp;
+ guint payload_len, offset, index;
+ guint8 *payload;
+ guint LLL, NNN;
+
+ depay = GST_RTP_QCELP_DEPAY (depayload);
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+
+ if (payload_len < 2)
+ goto too_small;
+
+ timestamp = GST_BUFFER_PTS (rtp->buffer);
+
+ payload = gst_rtp_buffer_get_payload (rtp);
+
+ /* 0 1 2 3 4 5 6 7
+ * +-+-+-+-+-+-+-+-+
+ * |RR | LLL | NNN |
+ * +-+-+-+-+-+-+-+-+
+ */
+ /* RR = payload[0] >> 6; */
+ LLL = (payload[0] & 0x38) >> 3;
+ NNN = (payload[0] & 0x07);
+
+ payload_len--;
+ payload++;
+
+ GST_DEBUG_OBJECT (depay, "LLL %u, NNN %u", LLL, NNN);
+
+ if (LLL > 5)
+ goto invalid_lll;
+
+ if (NNN > LLL)
+ goto invalid_nnn;
+
+ if (LLL != 0) {
+ /* we are interleaved */
+ if (!depay->interleaved) {
+ guint size;
+
+ GST_DEBUG_OBJECT (depay, "starting interleaving group");
+ /* bundling is not allowed to change in one interleave group */
+ depay->bundling = count_packets (depay, payload, payload_len);
+ GST_DEBUG_OBJECT (depay, "got bundling of %u", depay->bundling);
+ /* we have one bundle where NNN goes from 0 to L, we don't store the index
+ * 0 frames, so L+1 packets. Each packet has 'bundling - 1' packets */
+ size = (depay->bundling - 1) * (LLL + 1);
+ /* create the array to hold the packets */
+ if (depay->packets == NULL)
+ depay->packets = g_ptr_array_sized_new (size);
+ GST_DEBUG_OBJECT (depay, "created packet array of size %u", size);
+ g_ptr_array_set_size (depay->packets, size);
+ /* we were previously not interleaved, figure out how much space we
+ * need to deinterleave */
+ depay->interleaved = TRUE;
+ }
+ } else {
+ /* we are not interleaved */
+ if (depay->interleaved) {
+ GST_DEBUG_OBJECT (depay, "stopping interleaving");
+ /* flush packets if we were previously interleaved */
+ flush_packets (depay);
+ }
+ depay->bundling = 0;
+ }
+
+ index = 0;
+ offset = 1;
+
+ while (payload_len > 0) {
+ gint frame_len;
+ gboolean do_erasure;
+
+ frame_len = get_frame_len (depay, payload[0]);
+ GST_DEBUG_OBJECT (depay, "got frame len %d", frame_len);
+
+ if (frame_len == 0)
+ goto invalid_frame;
+
+ if (frame_len < 0) {
+ /* need to add an erasure frame but we can recover */
+ frame_len = -frame_len;
+ do_erasure = TRUE;
+ } else {
+ do_erasure = FALSE;
+ }
+
+ if (frame_len > payload_len)
+ goto invalid_frame;
+
+ if (do_erasure) {
+ /* create erasure frame */
+ outbuf = create_erasure_buffer (depay);
+ } else {
+ /* each frame goes into its buffer */
+ outbuf = gst_rtp_buffer_get_payload_subbuffer (rtp, offset, frame_len);
+ }
+
+ GST_BUFFER_PTS (outbuf) = timestamp;
+ GST_BUFFER_DURATION (outbuf) = FRAME_DURATION;
+
+ gst_rtp_drop_non_audio_meta (depayload, outbuf);
+
+ if (!depay->interleaved || index == 0) {
+ /* not interleaved or first frame in packet, just push */
+ gst_rtp_base_depayload_push (depayload, outbuf);
+
+ if (timestamp != -1)
+ timestamp += FRAME_DURATION;
+ } else {
+ /* put in interleave buffer */
+ add_packet (depay, LLL, NNN, index, outbuf);
+
+ if (timestamp != -1)
+ timestamp += (FRAME_DURATION * (LLL + 1));
+ }
+
+ payload_len -= frame_len;
+ payload += frame_len;
+ offset += frame_len;
+ index++;
+
+ /* discard excess packets */
+ if (depay->bundling > 0 && depay->bundling <= index)
+ break;
+ }
+ while (index < depay->bundling) {
+ GST_DEBUG_OBJECT (depay, "filling with erasure buffer");
+ /* fill remainder with erasure packets */
+ outbuf = create_erasure_buffer (depay);
+ add_packet (depay, LLL, NNN, index, outbuf);
+ index++;
+ }
+ if (depay->interleaved && LLL == NNN) {
+ GST_DEBUG_OBJECT (depay, "interleave group ended, flushing");
+ /* we have the complete interleave group, flush */
+ flush_packets (depay);
+ }
+
+ return NULL;
+
+ /* ERRORS */
+too_small:
+ {
+ GST_ELEMENT_WARNING (depay, STREAM, DECODE,
+ (NULL), ("QCELP RTP payload too small (%d)", payload_len));
+ return NULL;
+ }
+invalid_lll:
+ {
+ GST_ELEMENT_WARNING (depay, STREAM, DECODE,
+ (NULL), ("QCELP RTP invalid LLL received (%d)", LLL));
+ return NULL;
+ }
+invalid_nnn:
+ {
+ GST_ELEMENT_WARNING (depay, STREAM, DECODE,
+ (NULL), ("QCELP RTP invalid NNN received (%d)", NNN));
+ return NULL;
+ }
+invalid_frame:
+ {
+ GST_ELEMENT_WARNING (depay, STREAM, DECODE,
+ (NULL), ("QCELP RTP invalid frame received"));
+ return NULL;
+ }
+}
diff --git a/gst/rtp/gstrtpqcelpdepay.h b/gst/rtp/gstrtpqcelpdepay.h
new file mode 100644
index 0000000000..f278a7339a
--- /dev/null
+++ b/gst/rtp/gstrtpqcelpdepay.h
@@ -0,0 +1,60 @@
+/* GStreamer
+ * Copyright (C) <2010> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_QCELP_DEPAY_H__
+#define __GST_RTP_QCELP_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_QCELP_DEPAY \
+ (gst_rtp_qcelp_depay_get_type())
+#define GST_RTP_QCELP_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_QCELP_DEPAY,GstRtpQCELPDepay))
+#define GST_RTP_QCELP_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_QCELP_DEPAY,GstRtpQCELPDepayClass))
+#define GST_IS_RTP_QCELP_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_QCELP_DEPAY))
+#define GST_IS_RTP_QCELP_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_QCELP_DEPAY))
+
+typedef struct _GstRtpQCELPDepay GstRtpQCELPDepay;
+typedef struct _GstRtpQCELPDepayClass GstRtpQCELPDepayClass;
+
+struct _GstRtpQCELPDepay
+{
+ GstRTPBaseDepayload depayload;
+
+ gboolean interleaved;
+ guint bundling;
+ GPtrArray *packets;
+};
+
+struct _GstRtpQCELPDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_qcelp_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_QCELP_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpqdmdepay.c b/gst/rtp/gstrtpqdmdepay.c
new file mode 100644
index 0000000000..3edfb46301
--- /dev/null
+++ b/gst/rtp/gstrtpqdmdepay.c
@@ -0,0 +1,411 @@
+/* GStreamer
+ * Copyright (C) <2009> Edward Hervey <bilboed@bilboed.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+#include "gstrtpelements.h"
+#include "gstrtpqdmdepay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY (rtpqdm2depay_debug);
+#define GST_CAT_DEFAULT rtpqdm2depay_debug
+
+static GstStaticPadTemplate gst_rtp_qdm2_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-qdm2")
+ );
+
+static GstStaticPadTemplate gst_rtp_qdm2_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", " "encoding-name = (string)\"X-QDM\"")
+ );
+
+#define gst_rtp_qdm2_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpQDM2Depay, gst_rtp_qdm2_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+#define _do_init \
+ GST_DEBUG_CATEGORY_INIT (rtpqdm2depay_debug, "rtpqdm2depay", 0, \
+ "RTP QDM2 depayloader"); \
+ rtp_element_init (plugin)
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpqdm2depay, "rtpqdm2depay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_QDM2_DEPAY, _do_init);
+
+static const guint8 headheader[20] = {
+ 0x0, 0x0, 0x0, 0xc, 0x66, 0x72, 0x6d, 0x61,
+ 0x51, 0x44, 0x4d, 0x32, 0x0, 0x0, 0x0, 0x24,
+ 0x51, 0x44, 0x43, 0x41
+};
+
+static void gst_rtp_qdm2_depay_finalize (GObject * object);
+
+static GstStateChangeReturn gst_rtp_qdm2_depay_change_state (GstElement *
+ element, GstStateChange transition);
+
+static GstBuffer *gst_rtp_qdm2_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+gboolean gst_rtp_qdm2_depay_setcaps (GstRTPBaseDepayload * filter,
+ GstCaps * caps);
+
+static void
+gst_rtp_qdm2_depay_class_init (GstRtpQDM2DepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_qdm2_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_qdm2_depay_setcaps;
+
+ gobject_class->finalize = gst_rtp_qdm2_depay_finalize;
+
+ gstelement_class->change_state = gst_rtp_qdm2_depay_change_state;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_qdm2_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_qdm2_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP QDM2 depayloader",
+ "Codec/Depayloader/Network/RTP",
+ "Extracts QDM2 audio from RTP packets (no RFC)",
+ "Edward Hervey <bilboed@bilboed.com>");
+}
+
+static void
+gst_rtp_qdm2_depay_init (GstRtpQDM2Depay * rtpqdm2depay)
+{
+ rtpqdm2depay->adapter = gst_adapter_new ();
+}
+
+static void
+gst_rtp_qdm2_depay_finalize (GObject * object)
+{
+ GstRtpQDM2Depay *rtpqdm2depay;
+
+ rtpqdm2depay = GST_RTP_QDM2_DEPAY (object);
+
+ g_object_unref (rtpqdm2depay->adapter);
+ rtpqdm2depay->adapter = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+/* only on the sink */
+gboolean
+gst_rtp_qdm2_depay_setcaps (GstRTPBaseDepayload * filter, GstCaps * caps)
+{
+ GstStructure *structure = gst_caps_get_structure (caps, 0);
+ gint clock_rate;
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 44100; /* default */
+ filter->clock_rate = clock_rate;
+
+ /* will set caps later */
+
+ return TRUE;
+}
+
+static void
+flush_data (GstRtpQDM2Depay * depay)
+{
+ guint i;
+ guint avail;
+
+ if ((avail = gst_adapter_available (depay->adapter)))
+ gst_adapter_flush (depay->adapter, avail);
+
+ GST_DEBUG ("Flushing %d packets", depay->nbpackets);
+
+ for (i = 0; depay->packets[i]; i++) {
+ QDM2Packet *pack = depay->packets[i];
+ guint32 crc = 0;
+ int i = 0;
+ GstBuffer *buf;
+ guint8 *data;
+
+ /* CRC is the sum of everything (including first bytes) */
+
+ data = pack->data;
+
+ if (G_UNLIKELY (data == NULL))
+ continue;
+
+ /* If the packet size is bigger than 0xff, we need 2 bytes to store the size */
+ if (depay->packetsize > 0xff) {
+ /* Expanded size 0x02 | 0x80 */
+ data[0] = 0x82;
+ GST_WRITE_UINT16_BE (data + 1, depay->packetsize - 3);
+ } else {
+ data[0] = 0x2;
+ data[1] = depay->packetsize - 2;
+ }
+
+ /* Calculate CRC */
+ for (; i < depay->packetsize; i++)
+ crc += data[i];
+
+ GST_DEBUG ("CRC is 0x%x", crc);
+
+ /* Write CRC */
+ if (depay->packetsize > 0xff)
+ GST_WRITE_UINT16_BE (data + 3, crc);
+ else
+ GST_WRITE_UINT16_BE (data + 2, crc);
+
+ GST_MEMDUMP ("Extracted packet", data, depay->packetsize);
+
+ buf = gst_buffer_new ();
+ gst_buffer_append_memory (buf,
+ gst_memory_new_wrapped (0, data, depay->packetsize, 0,
+ depay->packetsize, data, g_free));
+
+ gst_adapter_push (depay->adapter, buf);
+
+ pack->data = NULL;
+ }
+}
+
+static void
+add_packet (GstRtpQDM2Depay * depay, guint32 pid, guint32 len, guint8 * data)
+{
+ QDM2Packet *packet;
+
+ if (G_UNLIKELY (!depay->configured))
+ return;
+
+ GST_DEBUG ("pid:%d, len:%d, data:%p", pid, len, data);
+
+ if (G_UNLIKELY (depay->packets[pid] == NULL)) {
+ depay->packets[pid] = g_malloc0 (sizeof (QDM2Packet));
+ depay->nbpackets = MAX (depay->nbpackets, pid + 1);
+ }
+ packet = depay->packets[pid];
+
+ GST_DEBUG ("packet:%p", packet);
+ GST_DEBUG ("packet->data:%p", packet->data);
+
+ if (G_UNLIKELY (packet->data == NULL)) {
+ packet->data = g_malloc0 (depay->packetsize);
+ /* We leave space for the header/crc */
+ if (depay->packetsize > 0xff)
+ packet->offs = 5;
+ else
+ packet->offs = 4;
+ }
+
+ /* Finally copy the data over */
+ memcpy (packet->data + packet->offs, data, len);
+ packet->offs += len;
+}
+
+static GstBuffer *
+gst_rtp_qdm2_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpQDM2Depay *rtpqdm2depay;
+ GstBuffer *outbuf = NULL;
+ guint16 seq;
+
+ rtpqdm2depay = GST_RTP_QDM2_DEPAY (depayload);
+
+ {
+ gint payload_len;
+ guint8 *payload;
+ guint avail;
+ guint pos = 0;
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+ if (payload_len < 3)
+ goto bad_packet;
+
+ payload = gst_rtp_buffer_get_payload (rtp);
+ seq = gst_rtp_buffer_get_seq (rtp);
+ if (G_UNLIKELY (seq != rtpqdm2depay->nextseq)) {
+ GST_DEBUG ("GAP in sequence number, Resetting data !");
+ /* Flush previous data */
+ flush_data (rtpqdm2depay);
+ /* And store new timestamp */
+ rtpqdm2depay->ptimestamp = rtpqdm2depay->timestamp;
+ rtpqdm2depay->timestamp = GST_BUFFER_PTS (rtp->buffer);
+ /* And that previous data will be pushed at the bottom */
+ }
+ rtpqdm2depay->nextseq = seq + 1;
+
+ GST_DEBUG ("Payload size %d 0x%x sequence:%d", payload_len, payload_len,
+ seq);
+
+ GST_MEMDUMP ("Incoming payload", payload, payload_len);
+
+ while (pos < payload_len) {
+ switch (payload[pos]) {
+ case 0x80:{
+ GST_DEBUG ("Unrecognized 0x80 marker, skipping 12 bytes");
+ pos += 12;
+ }
+ break;
+ case 0xff:
+ /* HEADERS */
+ GST_DEBUG ("Headers");
+ /* Store the incoming timestamp */
+ rtpqdm2depay->ptimestamp = rtpqdm2depay->timestamp;
+ rtpqdm2depay->timestamp = GST_BUFFER_PTS (rtp->buffer);
+ /* flush the internal data if needed */
+ flush_data (rtpqdm2depay);
+ if (G_UNLIKELY (!rtpqdm2depay->configured)) {
+ guint8 *ourdata;
+ GstBuffer *codecdata;
+ GstMapInfo cmap;
+ GstCaps *caps;
+
+ /* First bytes are unknown */
+ GST_MEMDUMP ("Header", payload + pos, 32);
+ ourdata = payload + pos + 10;
+ pos += 10;
+ rtpqdm2depay->channs = GST_READ_UINT32_BE (payload + pos + 4);
+ rtpqdm2depay->samplerate = GST_READ_UINT32_BE (payload + pos + 8);
+ rtpqdm2depay->bitrate = GST_READ_UINT32_BE (payload + pos + 12);
+ rtpqdm2depay->blocksize = GST_READ_UINT32_BE (payload + pos + 16);
+ rtpqdm2depay->framesize = GST_READ_UINT32_BE (payload + pos + 20);
+ rtpqdm2depay->packetsize = GST_READ_UINT32_BE (payload + pos + 24);
+ /* 16 bit empty block (0x02 0x00) */
+ pos += 30;
+ GST_DEBUG
+ ("channs:%d, samplerate:%d, bitrate:%d, blocksize:%d, framesize:%d, packetsize:%d",
+ rtpqdm2depay->channs, rtpqdm2depay->samplerate,
+ rtpqdm2depay->bitrate, rtpqdm2depay->blocksize,
+ rtpqdm2depay->framesize, rtpqdm2depay->packetsize);
+
+ /* Caps */
+ codecdata = gst_buffer_new_and_alloc (48);
+ gst_buffer_map (codecdata, &cmap, GST_MAP_WRITE);
+ memcpy (cmap.data, headheader, 20);
+ memcpy (cmap.data + 20, ourdata, 28);
+ gst_buffer_unmap (codecdata, &cmap);
+
+ caps = gst_caps_new_simple ("audio/x-qdm2",
+ "samplesize", G_TYPE_INT, 16,
+ "rate", G_TYPE_INT, rtpqdm2depay->samplerate,
+ "channels", G_TYPE_INT, rtpqdm2depay->channs,
+ "codec_data", GST_TYPE_BUFFER, codecdata, NULL);
+ gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), caps);
+ gst_caps_unref (caps);
+ rtpqdm2depay->configured = TRUE;
+ } else {
+ GST_DEBUG ("Already configured, skipping headers");
+ pos += 40;
+ }
+ break;
+ default:{
+ /* Shuffled packet contents */
+ guint packetid = payload[pos++];
+ guint packettype = payload[pos++];
+ guint packlen = payload[pos++];
+ guint hsize = 2;
+
+ GST_DEBUG ("Packet id:%d, type:0x%x, len:%d",
+ packetid, packettype, packlen);
+
+ /* Packets bigger than 0xff bytes have a type with the high bit set */
+ if (G_UNLIKELY (packettype & 0x80)) {
+ packettype &= 0x7f;
+ packlen <<= 8;
+ packlen |= payload[pos++];
+ hsize = 3;
+ GST_DEBUG ("Packet id:%d, type:0x%x, len:%d",
+ packetid, packettype, packlen);
+ }
+
+ if (packettype > 0x7f) {
+ GST_ERROR ("HOUSTON WE HAVE A PROBLEM !!!!");
+ }
+ add_packet (rtpqdm2depay, packetid, packlen + hsize,
+ payload + pos - hsize);
+ pos += packlen;
+ }
+ }
+ }
+
+ GST_DEBUG ("final pos %d", pos);
+
+ avail = gst_adapter_available (rtpqdm2depay->adapter);
+ if (G_UNLIKELY (avail)) {
+ GST_DEBUG ("Pushing out %d bytes of collected data", avail);
+ outbuf = gst_adapter_take_buffer (rtpqdm2depay->adapter, avail);
+ GST_BUFFER_PTS (outbuf) = rtpqdm2depay->ptimestamp;
+ GST_DEBUG ("Outgoing buffer timestamp %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (rtpqdm2depay->ptimestamp));
+ }
+ }
+
+ return outbuf;
+
+ /* ERRORS */
+bad_packet:
+ {
+ GST_ELEMENT_WARNING (rtpqdm2depay, STREAM, DECODE,
+ (NULL), ("Packet was too short"));
+ return NULL;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_qdm2_depay_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstRtpQDM2Depay *rtpqdm2depay;
+ GstStateChangeReturn ret;
+
+ rtpqdm2depay = GST_RTP_QDM2_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_adapter_clear (rtpqdm2depay->adapter);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtpqdmdepay.h b/gst/rtp/gstrtpqdmdepay.h
new file mode 100644
index 0000000000..d5d37566e7
--- /dev/null
+++ b/gst/rtp/gstrtpqdmdepay.h
@@ -0,0 +1,83 @@
+/* GStreamer
+ * Copyright (C) <2009> Edward Hervey <bilboed@bilboed.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_QDM2_DEPAY_H__
+#define __GST_RTP_QDM2_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_QDM2_DEPAY \
+ (gst_rtp_qdm2_depay_get_type())
+#define GST_RTP_QDM2_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_QDM2_DEPAY,GstRtpQDM2Depay))
+#define GST_RTP_QDM2_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_QDM2_DEPAY,GstRtpQDM2DepayClass))
+#define GST_IS_RTP_QDM2_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_QDM2_DEPAY))
+#define GST_IS_RTP_QDM2_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_QDM2_DEPAY))
+
+typedef struct _GstRtpQDM2Depay GstRtpQDM2Depay;
+typedef struct _GstRtpQDM2DepayClass GstRtpQDM2DepayClass;
+
+typedef struct _QDM2Packet {
+ guint8* data;
+ guint offs; /* Starts at 4 to give room for the prefix */
+} QDM2Packet;
+
+#define MAX_SCRAMBLED_PACKETS 64
+
+struct _GstRtpQDM2Depay
+{
+ GstRTPBaseDepayload depayload;
+
+ GstAdapter *adapter;
+
+ guint16 nextseq;
+ gboolean configured;
+
+ GstClockTime timestamp; /* Timestamp of current incoming data */
+ GstClockTime ptimestamp; /* Timestamp of data stored in the adapter */
+
+ guint32 channs;
+ guint32 samplerate;
+ guint32 bitrate;
+ guint32 blocksize;
+ guint32 framesize;
+ guint32 packetsize;
+
+ guint nbpackets; /* Number of packets to unscramble */
+
+ QDM2Packet *packets[MAX_SCRAMBLED_PACKETS];
+};
+
+struct _GstRtpQDM2DepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_qdm2_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_QDM2_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpreddec.c b/gst/rtp/gstrtpreddec.c
new file mode 100644
index 0000000000..c9482cbefa
--- /dev/null
+++ b/gst/rtp/gstrtpreddec.c
@@ -0,0 +1,546 @@
+/* GStreamer plugin for forward error correction
+ * Copyright (C) 2017 Pexip
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * Author: Mikhail Fludkov <misha@pexip.com>
+ */
+
+/**
+ * SECTION:element-rtpreddec
+ * @short_description: RTP Redundant Audio Data (RED) decoder
+ * @title: rtpreddec
+ *
+ * Decode Redundant Audio Data (RED) as per RFC 2198.
+ *
+ * This element is mostly provided for chrome webrtc compatibility:
+ * chrome will wrap ulpfec-protected streams in RED packets, and such
+ * streams need to be unwrapped by this element before being passed on
+ * to #GstRtpUlpFecDec.
+ *
+ * The #GstRtpRedDec:pt property should be set to the expected payload
+ * types of the RED packets.
+ *
+ * When using #GstRtpBin, this element should be inserted through the
+ * #GstRtpBin::request-aux-receiver signal.
+ *
+ * ## Example pipeline
+ *
+ * |[
+ * gst-launch-1.0 udpsrc port=8888 caps="application/x-rtp, payload=96, clock-rate=90000" ! rtpreddec pt=122 ! rtpstorage size-time=220000000 ! rtpssrcdemux ! application/x-rtp, payload=96, clock-rate=90000, media=video, encoding-name=H264 ! rtpjitterbuffer do-lost=1 latency=200 ! rtpulpfecdec pt=122 ! rtph264depay ! avdec_h264 ! videoconvert ! autovideosink
+ * ]| This example will receive a stream with RED and ULP FEC and try to reconstruct the packets.
+ *
+ * See also: #GstRtpRedEnc, #GstWebRTCBin, #GstRtpBin
+ * Since: 1.14
+ */
+
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "gstrtpelements.h"
+#include "rtpredcommon.h"
+#include "gstrtpreddec.h"
+#include "rtpulpfeccommon.h"
+
+#define RTP_HISTORY_MAX_SIZE (16)
+
+typedef struct
+{
+ guint32 timestamp;
+ guint16 seq;
+} RTPHistItem;
+
+#define RTP_HIST_ITEM_TIMESTAMP(p) ((RTPHistItem *)p)->timestamp
+#define RTP_HIST_ITEM_SEQ(p) ((RTPHistItem *)p)->seq
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp"));
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp"));
+
+#define UNDEF_PT -1
+#define MIN_PT UNDEF_PT
+#define MAX_PT 127
+#define DEFAULT_PT UNDEF_PT
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_red_dec_debug);
+#define GST_CAT_DEFAULT gst_rtp_red_dec_debug
+
+G_DEFINE_TYPE (GstRtpRedDec, gst_rtp_red_dec, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpreddec, "rtpreddec", GST_RANK_NONE,
+ GST_TYPE_RTP_RED_DEC, rtp_element_init (plugin));
+
+enum
+{
+ PROP_0,
+ PROP_PT,
+ PROP_RECEIVED
+};
+
+static RTPHistItem *
+rtp_hist_item_alloc (void)
+{
+ return g_slice_new (RTPHistItem);
+}
+
+static void
+rtp_hist_item_free (gpointer item)
+{
+ g_slice_free (RTPHistItem, item);
+}
+
+static gint
+gst_rtp_red_history_find_less_or_equal (gconstpointer item,
+ gconstpointer timestamp)
+{
+ guint32 t = GPOINTER_TO_UINT (timestamp);
+ gint32 diff = t - RTP_HIST_ITEM_TIMESTAMP (item);
+ return diff < 0;
+}
+
+static gint
+gst_rtp_red_history_find_less (gconstpointer item, gconstpointer timestamp)
+{
+ guint32 t = GPOINTER_TO_UINT (timestamp);
+ gint32 diff = t - RTP_HIST_ITEM_TIMESTAMP (item);
+ return diff <= 0;
+}
+
+static void
+gst_rtp_red_history_update (GstRtpRedDec * self, GstRTPBuffer * rtp)
+{
+ RTPHistItem *item;
+ GList *link, *sibling;
+
+ /* If we have not reached MAX number of elements in the history,
+ * allocate a new link and a new item,
+ * otherwise reuse the tail (the oldest data) without any reallocations
+ */
+ if (self->rtp_history->length < RTP_HISTORY_MAX_SIZE) {
+ item = rtp_hist_item_alloc ();
+ link = g_list_alloc ();
+ link->data = item;
+ } else {
+ link = g_queue_pop_tail_link (self->rtp_history);
+ item = link->data;
+ }
+
+ item->timestamp = gst_rtp_buffer_get_timestamp (rtp);
+ item->seq = gst_rtp_buffer_get_seq (rtp);
+
+ /* Looking for a place to insert new link.
+ * The queue has newest to oldest rtp timestamps, so in 99% cases
+ * it is inserted before the head of the queue */
+ sibling = g_list_find_custom (self->rtp_history->head,
+ GUINT_TO_POINTER (item->timestamp),
+ gst_rtp_red_history_find_less_or_equal);
+ g_queue_push_nth_link (self->rtp_history,
+ g_list_position (self->rtp_history->head, sibling), link);
+}
+
+static gboolean
+rtp_red_buffer_is_valid (GstRtpRedDec * self, GstRTPBuffer * red_rtp,
+ gsize * dst_first_red_payload_offset)
+{
+ guint8 *payload = gst_rtp_buffer_get_payload (red_rtp);
+ gsize payload_len = gst_rtp_buffer_get_payload_len (red_rtp);
+ gsize red_hdrs_offset = 0;
+ guint red_hdrs_checked = 0;
+ guint redundant_payload_len = 0;
+
+ while (TRUE) {
+ gpointer red_hdr = payload + red_hdrs_offset;
+ gsize red_hdr_len;
+ gboolean is_redundant;
+
+ ++red_hdrs_checked;
+
+ /* Can we address the first byte where F bit is located ? */
+ if (red_hdrs_offset + 1 > payload_len)
+ goto red_buffer_invalid;
+
+ is_redundant = rtp_red_block_is_redundant (red_hdr);
+
+ /* Is it the last block? */
+ if (is_redundant) {
+ red_hdr_len = rtp_red_block_header_get_length (TRUE);
+
+ /* Can we address all the other bytes in RED block header? */
+ if (red_hdrs_offset + red_hdr_len > payload_len)
+ goto red_buffer_invalid;
+
+ redundant_payload_len += rtp_red_block_get_payload_length (red_hdr);
+ red_hdrs_offset += red_hdr_len;
+ } else {
+ red_hdr_len = rtp_red_block_header_get_length (FALSE);
+ red_hdrs_offset += red_hdr_len;
+ break;
+ }
+ }
+
+ /* Do we have enough data to create redundant packets & main packet. Keep in
+ * mind that redundant_payload_len contains the length of redundant packets only.
+ */
+ if (red_hdrs_offset + redundant_payload_len >= payload_len)
+ goto red_buffer_invalid;
+
+ *dst_first_red_payload_offset = red_hdrs_offset;
+
+ GST_LOG_OBJECT (self, "RED packet has %u blocks", red_hdrs_checked);
+ return TRUE;
+
+red_buffer_invalid:
+ GST_WARNING_OBJECT (self, "Received invalid RED packet "
+ "ssrc=0x%08x pt=%u tstamp=%u seq=%u size=%u, "
+ "checked %u blocks",
+ gst_rtp_buffer_get_ssrc (red_rtp),
+ gst_rtp_buffer_get_payload_type (red_rtp),
+ gst_rtp_buffer_get_timestamp (red_rtp),
+ gst_rtp_buffer_get_seq (red_rtp),
+ gst_rtp_buffer_get_packet_len (red_rtp), red_hdrs_checked);
+ return FALSE;
+}
+
+static gboolean
+gst_red_history_lost_seq_num_for_timestamp (GstRtpRedDec * self,
+ guint32 timestamp, guint16 * dst_seq_num)
+{
+ GList *older_sibling = g_list_find_custom (self->rtp_history->head,
+ GUINT_TO_POINTER (timestamp),
+ gst_rtp_red_history_find_less);
+ RTPHistItem *older;
+ RTPHistItem *newer;
+ guint32 timestamp_diff;
+ gint seq_diff, lost_packet_idx;
+
+ if (NULL == older_sibling) {
+ if (self->rtp_history->length == RTP_HISTORY_MAX_SIZE)
+ GST_WARNING_OBJECT (self, "History is too short. "
+ "Oldest rtp timestamp %u, looking for %u, size %u",
+ RTP_HIST_ITEM_TIMESTAMP (self->rtp_history->tail->data),
+ timestamp, self->rtp_history->length);
+ return FALSE;
+ }
+
+ if (NULL == older_sibling->prev) {
+ GST_WARNING_OBJECT (self, "RED block timestamp offset probably wrong. "
+ "Latest rtp timestamp %u, looking for %u, size %u",
+ RTP_HIST_ITEM_TIMESTAMP (self->rtp_history->head->data),
+ timestamp, self->rtp_history->length);
+ return FALSE;
+ }
+
+ older = older_sibling->data;
+ newer = older_sibling->prev->data;
+ /* We know for sure @older has lower timestamp than we are looking for,
+ * if @newer has the same timestamp, there is no packet loss and we
+ * don't need to use redundant data */
+ if (newer->timestamp == timestamp)
+ return FALSE;
+
+ seq_diff = gst_rtp_buffer_compare_seqnum (older->seq, newer->seq);
+ if (seq_diff <= 1) {
+ if (seq_diff == 1)
+ GST_WARNING_OBJECT (self, "RED block timestamp offset is wrong: "
+ "#%u,%u #%u,%u looking for %u",
+ older->seq, older->timestamp,
+ newer->seq, newer->timestamp, timestamp);
+ else
+ GST_WARNING_OBJECT (self, "RTP timestamps increasing while "
+ "sequence numbers decreasing: #%u,%u #%u,%u",
+ older->seq, older->timestamp, newer->seq, newer->timestamp);
+ return FALSE;
+ }
+
+ timestamp_diff = newer->timestamp - older->timestamp;
+ for (lost_packet_idx = 1; lost_packet_idx < seq_diff; ++lost_packet_idx) {
+ guint32 lost_timestamp = older->timestamp +
+ lost_packet_idx * timestamp_diff / seq_diff;
+ if (lost_timestamp == timestamp) {
+ *dst_seq_num = older->seq + lost_packet_idx;
+ return TRUE;
+ }
+ }
+
+ GST_WARNING_OBJECT (self, "Can't find RED block timestamp "
+ "#%u,%u #%u,%u looking for %u",
+ older->seq, older->timestamp, newer->seq, newer->timestamp, timestamp);
+ return FALSE;
+}
+
+static GstBuffer *
+gst_rtp_red_create_packet (GstRtpRedDec * self, GstRTPBuffer * red_rtp,
+ gboolean marker, guint8 pt, guint16 seq_num, guint32 timestamp,
+ gsize red_payload_subbuffer_start, gsize red_payload_subbuffer_len)
+{
+ guint csrc_count = gst_rtp_buffer_get_csrc_count (red_rtp);
+ GstBuffer *ret = gst_rtp_buffer_new_allocate (0, 0, csrc_count);
+ GstRTPBuffer ret_rtp = GST_RTP_BUFFER_INIT;
+ guint i;
+ if (!gst_rtp_buffer_map (ret, GST_MAP_WRITE, &ret_rtp))
+ g_assert_not_reached ();
+
+ gst_rtp_buffer_set_marker (&ret_rtp, marker);
+ gst_rtp_buffer_set_payload_type (&ret_rtp, pt);
+ gst_rtp_buffer_set_seq (&ret_rtp, seq_num);
+ gst_rtp_buffer_set_timestamp (&ret_rtp, timestamp);
+ gst_rtp_buffer_set_ssrc (&ret_rtp, gst_rtp_buffer_get_ssrc (red_rtp));
+ for (i = 0; i < csrc_count; ++i)
+ gst_rtp_buffer_set_csrc (&ret_rtp, i, gst_rtp_buffer_get_csrc (red_rtp, i));
+ gst_rtp_buffer_unmap (&ret_rtp);
+
+ ret = gst_buffer_append (ret,
+ gst_rtp_buffer_get_payload_subbuffer (red_rtp,
+ red_payload_subbuffer_start, red_payload_subbuffer_len));
+
+ /* Timestamps, meta, flags from the RED packet should go to main block packet */
+ gst_buffer_copy_into (ret, red_rtp->buffer, GST_BUFFER_COPY_METADATA, 0, -1);
+ return ret;
+}
+
+static GstBuffer *
+gst_rtp_red_create_from_redundant_block (GstRtpRedDec * self,
+ GstRTPBuffer * red_rtp, gsize * red_hdr_offset, gsize * red_payload_offset)
+{
+ guint8 *payload = gst_rtp_buffer_get_payload (red_rtp);
+ guint8 *red_hdr = payload + *red_hdr_offset;
+ guint32 lost_timestamp = gst_rtp_buffer_get_timestamp (red_rtp) -
+ rtp_red_block_get_timestamp_offset (red_hdr);
+
+ GstBuffer *ret = NULL;
+ guint16 lost_seq = 0;
+ if (gst_red_history_lost_seq_num_for_timestamp (self, lost_timestamp,
+ &lost_seq)) {
+ GST_LOG_OBJECT (self, "Recovering from RED packet pt=%u ts=%u seq=%u"
+ " len=%u present", rtp_red_block_get_payload_type (red_hdr),
+ lost_timestamp, lost_seq, rtp_red_block_get_payload_length (red_hdr));
+ ret =
+ gst_rtp_red_create_packet (self, red_rtp, FALSE,
+ rtp_red_block_get_payload_type (red_hdr), lost_seq, lost_timestamp,
+ *red_payload_offset, rtp_red_block_get_payload_length (red_hdr));
+ GST_BUFFER_FLAG_SET (ret, GST_RTP_BUFFER_FLAG_REDUNDANT);
+ } else {
+ GST_LOG_OBJECT (self, "Ignore RED packet pt=%u ts=%u len=%u because already"
+ " present", rtp_red_block_get_payload_type (red_hdr), lost_timestamp,
+ rtp_red_block_get_payload_length (red_hdr));
+ }
+
+ *red_hdr_offset += rtp_red_block_header_get_length (TRUE);
+ *red_payload_offset += rtp_red_block_get_payload_length (red_hdr);
+ return ret;
+}
+
+static GstBuffer *
+gst_rtp_red_create_from_main_block (GstRtpRedDec * self,
+ GstRTPBuffer * red_rtp, gsize red_hdr_offset, gsize * red_payload_offset)
+{
+ guint8 *payload = gst_rtp_buffer_get_payload (red_rtp);
+ GstBuffer *ret = gst_rtp_red_create_packet (self, red_rtp,
+ gst_rtp_buffer_get_marker (red_rtp),
+ rtp_red_block_get_payload_type (payload + red_hdr_offset),
+ gst_rtp_buffer_get_seq (red_rtp),
+ gst_rtp_buffer_get_timestamp (red_rtp),
+ *red_payload_offset, -1);
+ *red_payload_offset = gst_rtp_buffer_get_payload_len (red_rtp);
+ GST_LOG_OBJECT (self, "Extracting main payload from RED pt=%u seq=%u ts=%u"
+ " marker=%u", rtp_red_block_get_payload_type (payload + red_hdr_offset),
+ gst_rtp_buffer_get_seq (red_rtp), gst_rtp_buffer_get_timestamp (red_rtp),
+ gst_rtp_buffer_get_marker (red_rtp));
+
+ return ret;
+}
+
+static GstBuffer *
+gst_rtp_red_create_from_block (GstRtpRedDec * self, GstRTPBuffer * red_rtp,
+ gsize * red_hdr_offset, gsize * red_payload_offset)
+{
+ guint8 *payload = gst_rtp_buffer_get_payload (red_rtp);
+
+ if (rtp_red_block_is_redundant (payload + (*red_hdr_offset)))
+ return gst_rtp_red_create_from_redundant_block (self, red_rtp,
+ red_hdr_offset, red_payload_offset);
+
+ return gst_rtp_red_create_from_main_block (self, red_rtp, *red_hdr_offset,
+ red_payload_offset);
+}
+
+static GstFlowReturn
+gst_rtp_red_process (GstRtpRedDec * self, GstRTPBuffer * red_rtp,
+ gsize first_red_payload_offset)
+{
+ gsize red_hdr_offset = 0;
+ gsize red_payload_offset = first_red_payload_offset;
+ gsize payload_len = gst_rtp_buffer_get_payload_len (red_rtp);
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ do {
+ GstBuffer *buf =
+ gst_rtp_red_create_from_block (self, red_rtp, &red_hdr_offset,
+ &red_payload_offset);
+ if (buf)
+ ret = gst_pad_push (self->srcpad, buf);
+ } while (GST_FLOW_OK == ret && red_payload_offset < payload_len);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_rtp_red_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+{
+ GstRtpRedDec *self = GST_RTP_RED_DEC (parent);
+ GstRTPBuffer irtp = GST_RTP_BUFFER_INIT;
+ GstFlowReturn ret = GST_FLOW_OK;
+ gsize first_red_payload_offset = 0;
+
+ if (self->pt == UNDEF_PT)
+ return gst_pad_push (self->srcpad, buffer);
+
+ if (!gst_rtp_buffer_map (buffer, GST_MAP_READ, &irtp))
+ return gst_pad_push (self->srcpad, buffer);
+
+ gst_rtp_red_history_update (self, &irtp);
+
+ if (self->pt != gst_rtp_buffer_get_payload_type (&irtp)) {
+ GST_LOG_RTP_PACKET (self, "rtp header (incoming)", &irtp);
+
+ gst_rtp_buffer_unmap (&irtp);
+ return gst_pad_push (self->srcpad, buffer);
+ }
+
+ self->num_received++;
+
+ if (rtp_red_buffer_is_valid (self, &irtp, &first_red_payload_offset)) {
+ GST_DEBUG_RTP_PACKET (self, "rtp header (red)", &irtp);
+ ret = gst_rtp_red_process (self, &irtp, first_red_payload_offset);
+ }
+
+ gst_rtp_buffer_unmap (&irtp);
+ gst_buffer_unref (buffer);
+ return ret;
+}
+
+static void
+gst_rtp_red_dec_dispose (GObject * obj)
+{
+ GstRtpRedDec *self = GST_RTP_RED_DEC (obj);
+
+ g_queue_free_full (self->rtp_history, rtp_hist_item_free);
+
+ G_OBJECT_CLASS (gst_rtp_red_dec_parent_class)->dispose (obj);
+}
+
+static void
+gst_rtp_red_dec_init (GstRtpRedDec * self)
+{
+ GstPadTemplate *pad_template;
+
+ pad_template =
+ gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (self), "src");
+ self->srcpad = gst_pad_new_from_template (pad_template, "src");
+ gst_element_add_pad (GST_ELEMENT_CAST (self), self->srcpad);
+
+ pad_template =
+ gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (self), "sink");
+ self->sinkpad = gst_pad_new_from_template (pad_template, "sink");
+ gst_pad_set_chain_function (self->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_red_dec_chain));
+ GST_PAD_SET_PROXY_CAPS (self->sinkpad);
+ GST_PAD_SET_PROXY_ALLOCATION (self->sinkpad);
+ gst_element_add_pad (GST_ELEMENT (self), self->sinkpad);
+
+ self->pt = DEFAULT_PT;
+ self->num_received = 0;
+ self->rtp_history = g_queue_new ();
+}
+
+
+static void
+gst_rtp_red_dec_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpRedDec *self = GST_RTP_RED_DEC (object);
+
+ switch (prop_id) {
+ case PROP_PT:
+ self->pt = g_value_get_int (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_red_dec_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpRedDec *self = GST_RTP_RED_DEC (object);
+ switch (prop_id) {
+ case PROP_PT:
+ g_value_set_int (value, self->pt);
+ break;
+ case PROP_RECEIVED:
+ g_value_set_uint (value, self->num_received);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_red_dec_class_init (GstRtpRedDecClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ element_class = GST_ELEMENT_CLASS (klass);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+
+ gst_element_class_set_metadata (element_class,
+ "Redundant Audio Data (RED) Decoder",
+ "Codec/Depayloader/Network/RTP",
+ "Decode Redundant Audio Data (RED)",
+ "Hani Mustafa <hani@pexip.com>, Mikhail Fludkov <misha@pexip.com>");
+
+ gobject_class->set_property =
+ GST_DEBUG_FUNCPTR (gst_rtp_red_dec_set_property);
+ gobject_class->get_property =
+ GST_DEBUG_FUNCPTR (gst_rtp_red_dec_get_property);
+ gobject_class->dispose = GST_DEBUG_FUNCPTR (gst_rtp_red_dec_dispose);
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_PT,
+ g_param_spec_int ("pt", "payload type",
+ "Payload type FEC packets",
+ MIN_PT, MAX_PT, DEFAULT_PT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_RECEIVED,
+ g_param_spec_uint ("received", "Received",
+ "Count of received packets",
+ 0, G_MAXUINT32, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_red_dec_debug, "rtpreddec", 0,
+ "RTP RED Decoder");
+}
diff --git a/gst/rtp/gstrtpreddec.h b/gst/rtp/gstrtpreddec.h
new file mode 100644
index 0000000000..1ab864e2c9
--- /dev/null
+++ b/gst/rtp/gstrtpreddec.h
@@ -0,0 +1,61 @@
+/* GStreamer plugin for forward error correction
+ * Copyright (C) 2017 Pexip
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * Author: Mikhail Fludkov <misha@pexip.com>
+ */
+
+#ifndef __GST_RTP_RED_DEC_H__
+#define __GST_RTP_RED_DEC_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_RED_DEC \
+ (gst_rtp_red_dec_get_type())
+#define GST_RTP_RED_DEC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_RED_DEC,GstRtpRedDec))
+#define GST_RTP_RED_DEC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_RED_DEC,GstRtpRedDecClass))
+#define GST_IS_RTP_RED_DEC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_RED_DEC))
+#define GST_IS_RTP_RED_DEC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_RED_DEC))
+
+typedef struct _GstRtpRedDec GstRtpRedDec;
+typedef struct _GstRtpRedDecClass GstRtpRedDecClass;
+
+struct _GstRtpRedDecClass {
+ GstElementClass parent_class;
+};
+
+struct _GstRtpRedDec {
+ GstElement parent;
+
+ GstPad *srcpad;
+ GstPad *sinkpad;
+ gint pt;
+ guint num_received;
+
+ GQueue *rtp_history;
+};
+
+GType gst_rtp_red_dec_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_RED_DEC_H__ */
diff --git a/gst/rtp/gstrtpredenc.c b/gst/rtp/gstrtpredenc.c
new file mode 100644
index 0000000000..862f799cec
--- /dev/null
+++ b/gst/rtp/gstrtpredenc.c
@@ -0,0 +1,532 @@
+/* GStreamer plugin for forward error correction
+ * Copyright (C) 2017 Pexip
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * Author: Mikhail Fludkov <misha@pexip.com>
+ */
+
+/**
+ * SECTION:element-rtpredenc
+ * @short_description: RTP Redundant Audio Data (RED) encoder
+ * @title: rtpredenc
+ *
+ * Encode Redundant Audio Data (RED) as per RFC 2198.
+ *
+ * This element is mostly provided for chrome webrtc compatibility:
+ * chrome expects protection packets generated by #GstRtpUlpFecEnc
+ * to be wrapped in RED packets for backward compatibility purposes,
+ * but does not actually make use of the redundant packets that could
+ * be encoded with this element.
+ *
+ * As such, when used for that purpose, only the #GstRtpRedEnc:pt property
+ * should be set to a payload type different from both the protected and
+ * protection packets' payload types.
+ *
+ * When using #GstRtpBin, this element should be inserted through the
+ * #GstRtpBin::request-fec-encoder signal.
+ *
+ * ## Example pipeline
+ *
+ * |[
+ * gst-launch-1.0 videotestsrc ! x264enc ! video/x-h264, profile=baseline ! rtph264pay pt=96 ! rtpulpfecenc percentage=100 pt=122 ! rtpredenc pt=122 distance=2 ! identity drop-probability=0.05 ! udpsink port=8888
+ * ]| This example will send a stream with RED and ULP FEC.
+ *
+ * See also: #GstRtpRedDec, #GstWebRTCBin, #GstRtpBin
+ * Since: 1.14
+ */
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <string.h>
+#include <stdio.h>
+
+#include "gstrtpelements.h"
+#include "rtpredcommon.h"
+#include "gstrtpredenc.h"
+
+typedef struct
+{
+ guint8 pt;
+ guint32 timestamp;
+ GstBuffer *payload;
+} RTPHistItem;
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp"));
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp"));
+
+#define DEFAULT_PT (0)
+#define DEFAULT_DISTANCE (0)
+#define DEFAULT_ALLOW_NO_RED_BLOCKS (TRUE)
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_red_enc_debug);
+#define GST_CAT_DEFAULT (gst_rtp_red_enc_debug)
+G_DEFINE_TYPE (GstRtpRedEnc, gst_rtp_red_enc, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpredenc, "rtpredenc", GST_RANK_NONE,
+ GST_TYPE_RTP_RED_ENC, rtp_element_init (plugin));
+
+enum
+{
+ PROP_0,
+ PROP_PT,
+ PROP_SENT,
+ PROP_DISTANCE,
+ PROP_ALLOW_NO_RED_BLOCKS
+};
+
+static void
+rtp_hist_item_init (RTPHistItem * item, GstRTPBuffer * rtp,
+ GstBuffer * rtp_payload)
+{
+ item->pt = gst_rtp_buffer_get_payload_type (rtp);
+ item->timestamp = gst_rtp_buffer_get_timestamp (rtp);
+ item->payload = rtp_payload;
+}
+
+static RTPHistItem *
+rtp_hist_item_new (GstRTPBuffer * rtp, GstBuffer * rtp_payload)
+{
+ RTPHistItem *item = g_slice_new0 (RTPHistItem);
+ rtp_hist_item_init (item, rtp, rtp_payload);
+ return item;
+}
+
+static void
+rtp_hist_item_replace (RTPHistItem * item, GstRTPBuffer * rtp,
+ GstBuffer * rtp_payload)
+{
+ gst_buffer_unref (item->payload);
+ rtp_hist_item_init (item, rtp, rtp_payload);
+}
+
+static void
+rtp_hist_item_free (gpointer _item)
+{
+ RTPHistItem *item = _item;
+ gst_buffer_unref (item->payload);
+ g_slice_free (RTPHistItem, item);
+}
+
+static GstEvent *
+_create_caps_event (const GstCaps * caps, guint8 pt)
+{
+ GstEvent *ret;
+ GstCaps *new = gst_caps_copy (caps);
+ GstStructure *s = gst_caps_get_structure (new, 0);
+ gst_structure_set (s, "payload", G_TYPE_INT, pt, NULL);
+ GST_INFO ("sinkcaps %" GST_PTR_FORMAT ", srccaps %" GST_PTR_FORMAT,
+ caps, new);
+ ret = gst_event_new_caps (new);
+ gst_caps_unref (new);
+ return ret;
+}
+
+static GstBuffer *
+_alloc_red_packet_and_fill_headers (GstRtpRedEnc * self,
+ RTPHistItem * redundant_block, GstRTPBuffer * inp_rtp)
+{
+ guint red_header_size = rtp_red_block_header_get_length (FALSE) +
+ (redundant_block ? rtp_red_block_header_get_length (TRUE) : 0);
+
+ guint32 timestamp = gst_rtp_buffer_get_timestamp (inp_rtp);
+ guint csrc_count = gst_rtp_buffer_get_csrc_count (inp_rtp);
+ GstBuffer *red = gst_rtp_buffer_new_allocate (red_header_size, 0, csrc_count);
+ guint8 *red_block_header;
+ GstRTPBuffer red_rtp = GST_RTP_BUFFER_INIT;
+ guint i;
+
+ if (!gst_rtp_buffer_map (red, GST_MAP_WRITE, &red_rtp))
+ g_assert_not_reached ();
+
+ /* Copying RTP header of incoming packet */
+ if (gst_rtp_buffer_get_extension (inp_rtp))
+ GST_WARNING_OBJECT (self, "FIXME: Ignoring RTP extension");
+
+ gst_rtp_buffer_set_marker (&red_rtp, gst_rtp_buffer_get_marker (inp_rtp));
+ gst_rtp_buffer_set_payload_type (&red_rtp, self->pt);
+ gst_rtp_buffer_set_seq (&red_rtp, gst_rtp_buffer_get_seq (inp_rtp));
+ gst_rtp_buffer_set_timestamp (&red_rtp, timestamp);
+ gst_rtp_buffer_set_ssrc (&red_rtp, gst_rtp_buffer_get_ssrc (inp_rtp));
+ for (i = 0; i != csrc_count; ++i)
+ gst_rtp_buffer_set_csrc (&red_rtp, i,
+ gst_rtp_buffer_get_csrc ((inp_rtp), i));
+
+ /* Filling RED block headers */
+ red_block_header = gst_rtp_buffer_get_payload (&red_rtp);
+ if (redundant_block) {
+ rtp_red_block_set_is_redundant (red_block_header, TRUE);
+ rtp_red_block_set_payload_type (red_block_header, redundant_block->pt);
+ rtp_red_block_set_timestamp_offset (red_block_header,
+ timestamp - redundant_block->timestamp);
+ rtp_red_block_set_payload_length (red_block_header,
+ gst_buffer_get_size (redundant_block->payload));
+
+ red_block_header += rtp_red_block_header_get_length (TRUE);
+ }
+ rtp_red_block_set_is_redundant (red_block_header, FALSE);
+ rtp_red_block_set_payload_type (red_block_header,
+ gst_rtp_buffer_get_payload_type (inp_rtp));
+
+ gst_rtp_buffer_unmap (&red_rtp);
+
+ gst_buffer_copy_into (red, inp_rtp->buffer, GST_BUFFER_COPY_METADATA, 0, -1);
+ return red;
+}
+
+static GstBuffer *
+_create_red_packet (GstRtpRedEnc * self,
+ GstRTPBuffer * rtp, RTPHistItem * redundant_block, GstBuffer * main_block)
+{
+ GstBuffer *red =
+ _alloc_red_packet_and_fill_headers (self, redundant_block, rtp);
+ if (redundant_block)
+ red = gst_buffer_append (red, gst_buffer_ref (redundant_block->payload));
+ red = gst_buffer_append (red, gst_buffer_ref (main_block));
+ return red;
+}
+
+static RTPHistItem *
+_red_history_get_redundant_block (GstRtpRedEnc * self,
+ guint32 current_timestamp, guint distance)
+{
+ RTPHistItem *item;
+ gint32 timestamp_offset;
+
+ if (0 == distance || 0 == self->rtp_history->length)
+ return NULL;
+
+ item = self->rtp_history->tail->data;
+ timestamp_offset = current_timestamp - item->timestamp;
+ if (G_UNLIKELY (timestamp_offset > RED_BLOCK_TIMESTAMP_OFFSET_MAX)) {
+ GST_WARNING_OBJECT (self,
+ "Can't create redundant block with distance %u, "
+ "timestamp offset is too large %d (%u - %u) > %u",
+ distance, timestamp_offset, current_timestamp, item->timestamp,
+ RED_BLOCK_TIMESTAMP_OFFSET_MAX);
+ return NULL;
+ }
+
+ if (G_UNLIKELY (timestamp_offset < 0)) {
+ GST_WARNING_OBJECT (self,
+ "Can't create redundant block with distance %u, "
+ "timestamp offset is negative %d (%u - %u)",
+ distance, timestamp_offset, current_timestamp, item->timestamp);
+ return NULL;
+ }
+
+ if (G_UNLIKELY (gst_buffer_get_size (item->payload) > RED_BLOCK_LENGTH_MAX)) {
+ GST_WARNING_OBJECT (self,
+ "Can't create redundant block with distance %u, "
+ "red block is too large %u > %u",
+ distance, (guint) gst_buffer_get_size (item->payload),
+ RED_BLOCK_LENGTH_MAX);
+ return NULL;
+ }
+
+ /* _red_history_trim should take care it never happens */
+ g_assert_cmpint (self->rtp_history->length, <=, distance);
+
+ if (G_UNLIKELY (self->rtp_history->length < distance))
+ GST_DEBUG_OBJECT (self,
+ "Don't have enough buffers yet, "
+ "adding redundant block with distance %u and timestamp %u",
+ self->rtp_history->length, item->timestamp);
+ return item;
+}
+
+static void
+_red_history_prepend (GstRtpRedEnc * self,
+ GstRTPBuffer * rtp, GstBuffer * rtp_payload, guint max_history_length)
+{
+ GList *link;
+
+ if (0 == max_history_length) {
+ if (rtp_payload)
+ gst_buffer_unref (rtp_payload);
+ return;
+ }
+
+ g_assert (NULL != rtp_payload);
+
+ if (self->rtp_history->length >= max_history_length) {
+ link = g_queue_pop_tail_link (self->rtp_history);
+ rtp_hist_item_replace (link->data, rtp, rtp_payload);
+ } else {
+ link = g_list_alloc ();
+ link->data = rtp_hist_item_new (rtp, rtp_payload);
+ }
+ g_queue_push_head_link (self->rtp_history, link);
+}
+
+static void
+_red_history_trim (GstRtpRedEnc * self, guint max_history_length)
+{
+ while (max_history_length < self->rtp_history->length)
+ rtp_hist_item_free (g_queue_pop_tail (self->rtp_history));
+}
+
+static GstFlowReturn
+_pad_push (GstRtpRedEnc * self, GstBuffer * buffer, gboolean is_red)
+{
+ if (self->send_caps || is_red != self->is_current_caps_red) {
+ GstEvent *event;
+ GstCaps *caps = gst_pad_get_current_caps (self->sinkpad);
+ if (is_red)
+ event = _create_caps_event (caps, self->pt);
+ else
+ event = gst_event_new_caps (caps);
+ gst_caps_unref (caps);
+
+ gst_pad_push_event (self->srcpad, event);
+ self->send_caps = FALSE;
+ self->is_current_caps_red = is_red;
+ }
+ return gst_pad_push (self->srcpad, buffer);
+}
+
+static GstFlowReturn
+_push_nonred_packet (GstRtpRedEnc * self,
+ GstRTPBuffer * rtp, GstBuffer * buffer, guint distance)
+{
+ GstBuffer *main_block = distance > 0 ?
+ gst_rtp_buffer_get_payload_buffer (rtp) : NULL;
+ _red_history_prepend (self, rtp, main_block, distance);
+
+ gst_rtp_buffer_unmap (rtp);
+ return _pad_push (self, buffer, FALSE);
+}
+
+static GstFlowReturn
+_push_red_packet (GstRtpRedEnc * self,
+ GstRTPBuffer * rtp, GstBuffer * buffer, RTPHistItem * redundant_block,
+ guint distance)
+{
+ GstBuffer *main_block = gst_rtp_buffer_get_payload_buffer (rtp);
+ GstBuffer *red_buffer =
+ _create_red_packet (self, rtp, redundant_block, main_block);
+
+ _red_history_prepend (self, rtp, main_block, distance);
+ gst_rtp_buffer_unmap (rtp);
+ gst_buffer_unref (buffer);
+
+ self->num_sent++;
+ return _pad_push (self, red_buffer, TRUE);
+}
+
+static GstFlowReturn
+gst_rtp_red_enc_chain (GstPad G_GNUC_UNUSED * pad, GstObject * parent,
+ GstBuffer * buffer)
+{
+ GstRtpRedEnc *self = GST_RTP_RED_ENC (parent);
+ guint distance = self->distance;
+ guint only_with_redundant_data = !self->allow_no_red_blocks;
+ RTPHistItem *redundant_block;
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+
+ /* We need to "trim" the history if 'distance' property has changed */
+ _red_history_trim (self, distance);
+
+ if (0 == distance && only_with_redundant_data)
+ return _pad_push (self, buffer, FALSE);
+
+ if (!gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp))
+ return _pad_push (self, buffer, self->is_current_caps_red);
+
+ /* If can't get data for redundant block push the packet as is */
+ redundant_block = _red_history_get_redundant_block (self,
+ gst_rtp_buffer_get_timestamp (&rtp), distance);
+ if (NULL == redundant_block && only_with_redundant_data)
+ return _push_nonred_packet (self, &rtp, buffer, distance);
+
+ /* About to create RED packet with or without redundant data */
+ return _push_red_packet (self, &rtp, buffer, redundant_block, distance);
+}
+
+static gboolean
+gst_rtp_red_enc_event_sink (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstRtpRedEnc *self = GST_RTP_RED_ENC (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ gboolean replace_with_red_caps =
+ self->is_current_caps_red || self->allow_no_red_blocks;
+
+ if (replace_with_red_caps) {
+ GstCaps *caps;
+ gst_event_parse_caps (event, &caps);
+ gst_event_take (&event, _create_caps_event (caps, self->pt));
+
+ self->is_current_caps_red = TRUE;
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ return gst_pad_event_default (pad, parent, event);
+}
+
+static void
+gst_rtp_red_enc_dispose (GObject * obj)
+{
+ GstRtpRedEnc *self = GST_RTP_RED_ENC (obj);
+
+ g_queue_free_full (self->rtp_history, rtp_hist_item_free);
+
+ G_OBJECT_CLASS (gst_rtp_red_enc_parent_class)->dispose (obj);
+}
+
+static void
+gst_rtp_red_enc_init (GstRtpRedEnc * self)
+{
+ GstPadTemplate *pad_template;
+
+ pad_template =
+ gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (self), "src");
+ self->srcpad = gst_pad_new_from_template (pad_template, "src");
+ gst_element_add_pad (GST_ELEMENT_CAST (self), self->srcpad);
+
+ pad_template =
+ gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (self), "sink");
+ self->sinkpad = gst_pad_new_from_template (pad_template, "sink");
+ gst_pad_set_chain_function (self->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_red_enc_chain));
+ gst_pad_set_event_function (self->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_red_enc_event_sink));
+ GST_PAD_SET_PROXY_CAPS (self->sinkpad);
+ GST_PAD_SET_PROXY_ALLOCATION (self->sinkpad);
+ gst_element_add_pad (GST_ELEMENT (self), self->sinkpad);
+
+ self->pt = DEFAULT_PT;
+ self->distance = DEFAULT_DISTANCE;
+ self->allow_no_red_blocks = DEFAULT_ALLOW_NO_RED_BLOCKS;
+ self->num_sent = 0;
+ self->rtp_history = g_queue_new ();
+}
+
+
+static void
+gst_rtp_red_enc_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpRedEnc *self = GST_RTP_RED_ENC (object);
+ switch (prop_id) {
+ case PROP_PT:
+ {
+ gint prev_pt = self->pt;
+ self->pt = g_value_get_int (value);
+ self->send_caps = self->pt != prev_pt && self->is_current_caps_red;
+ }
+ break;
+ case PROP_DISTANCE:
+ self->distance = g_value_get_uint (value);
+ break;
+ case PROP_ALLOW_NO_RED_BLOCKS:
+ self->allow_no_red_blocks = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_red_enc_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpRedEnc *self = GST_RTP_RED_ENC (object);
+ switch (prop_id) {
+ case PROP_PT:
+ g_value_set_int (value, self->pt);
+ break;
+ case PROP_SENT:
+ g_value_set_uint (value, self->num_sent);
+ break;
+ case PROP_DISTANCE:
+ g_value_set_uint (value, self->distance);
+ break;
+ case PROP_ALLOW_NO_RED_BLOCKS:
+ g_value_set_boolean (value, self->allow_no_red_blocks);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_red_enc_class_init (GstRtpRedEncClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ element_class = GST_ELEMENT_CLASS (klass);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+
+ gst_element_class_set_metadata (element_class,
+ "Redundant Audio Data (RED) Encoder",
+ "Codec/Payloader/Network/RTP",
+ "Encode Redundant Audio Data (RED)",
+ "Hani Mustafa <hani@pexip.com>, Mikhail Fludkov <misha@pexip.com>");
+
+ gobject_class->set_property =
+ GST_DEBUG_FUNCPTR (gst_rtp_red_enc_set_property);
+ gobject_class->get_property =
+ GST_DEBUG_FUNCPTR (gst_rtp_red_enc_get_property);
+ gobject_class->dispose = GST_DEBUG_FUNCPTR (gst_rtp_red_enc_dispose);
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_PT,
+ g_param_spec_int ("pt", "payload type",
+ "Payload type FEC packets (-1 disable)",
+ 0, 127, DEFAULT_PT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_SENT,
+ g_param_spec_uint ("sent", "Sent",
+ "Count of sent packets",
+ 0, G_MAXUINT32, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_DISTANCE,
+ g_param_spec_uint ("distance", "RED distance",
+ "Tells which media packet to use as a redundant block "
+ "(0 - no redundant blocks, 1 to use previous packet, "
+ "2 to use the packet before previous, etc.)",
+ 0, G_MAXUINT32, DEFAULT_DISTANCE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass),
+ PROP_ALLOW_NO_RED_BLOCKS, g_param_spec_boolean ("allow-no-red-blocks",
+ "Allow no redundant blocks",
+ "true - can produce RED packets even without redundant blocks (distance==0) "
+ "false - RED packets will be produced only if distance>0",
+ DEFAULT_ALLOW_NO_RED_BLOCKS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_red_enc_debug, "rtpredenc", 0,
+ "RTP RED Encoder");
+}
diff --git a/gst/rtp/gstrtpredenc.h b/gst/rtp/gstrtpredenc.h
new file mode 100644
index 0000000000..dc2b1ebbc5
--- /dev/null
+++ b/gst/rtp/gstrtpredenc.h
@@ -0,0 +1,65 @@
+/* GStreamer plugin for forward error correction
+ * Copyright (C) 2017 Pexip
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * Author: Mikhail Fludkov <misha@pexip.com>
+ */
+
+#ifndef __GST_RTP_RED_ENC_H__
+#define __GST_RTP_RED_ENC_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_RED_ENC \
+ (gst_rtp_red_enc_get_type())
+#define GST_RTP_RED_ENC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_RED_ENC,GstRtpRedEnc))
+#define GST_RTP_RED_ENC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_RED_ENC,GstRtpRedEncClass))
+#define GST_IS_RTP_RED_ENC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_RED_ENC))
+#define GST_IS_RTP_RED_ENC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_RED_ENC))
+
+typedef struct _GstRtpRedEnc GstRtpRedEnc;
+typedef struct _GstRtpRedEncClass GstRtpRedEncClass;
+
+struct _GstRtpRedEncClass {
+ GstElementClass parent_class;
+};
+
+struct _GstRtpRedEnc {
+ GstElement parent;
+ GstPad *srcpad;
+ GstPad *sinkpad;
+
+ gint pt;
+ guint num_sent;
+ guint distance;
+ gboolean allow_no_red_blocks;
+
+ GQueue *rtp_history;
+ gboolean send_caps;
+ gboolean is_current_caps_red;
+};
+
+GType gst_rtp_red_enc_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_RED_ENC_H__ */
diff --git a/gst/rtp/gstrtpsbcdepay.c b/gst/rtp/gstrtpsbcdepay.c
new file mode 100644
index 0000000000..f5dec8b787
--- /dev/null
+++ b/gst/rtp/gstrtpsbcdepay.c
@@ -0,0 +1,391 @@
+/*
+ * GStreamer RTP SBC depayloader
+ *
+ * Copyright (C) 2012 Collabora Ltd.
+ * @author: Arun Raghavan <arun.raghavan@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+#include "gstrtpelements.h"
+#include "gstrtpsbcdepay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpsbcdepay_debug);
+#define GST_CAT_DEFAULT (rtpsbcdepay_debug)
+
+static GstStaticPadTemplate gst_rtp_sbc_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-sbc, "
+ "rate = (int) { 16000, 32000, 44100, 48000 }, "
+ "channels = (int) [ 1, 2 ], "
+ "mode = (string) { mono, dual, stereo, joint }, "
+ "blocks = (int) { 4, 8, 12, 16 }, "
+ "subbands = (int) { 4, 8 }, "
+ "allocation-method = (string) { snr, loudness }, "
+ "bitpool = (int) [ 2, 64 ]")
+ );
+
+static GstStaticPadTemplate gst_rtp_sbc_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) audio,"
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) { 16000, 32000, 44100, 48000 },"
+ "encoding-name = (string) SBC")
+ );
+
+enum
+{
+ PROP_0,
+ PROP_IGNORE_TIMESTAMPS,
+ PROP_LAST
+};
+
+#define DEFAULT_IGNORE_TIMESTAMPS FALSE
+
+#define gst_rtp_sbc_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpSbcDepay, gst_rtp_sbc_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpsbcdepay, "rtpsbcdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_SBC_DEPAY, rtp_element_init (plugin));
+
+static void gst_rtp_sbc_depay_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_rtp_sbc_depay_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+static void gst_rtp_sbc_depay_finalize (GObject * object);
+
+static gboolean gst_rtp_sbc_depay_setcaps (GstRTPBaseDepayload * base,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_sbc_depay_process (GstRTPBaseDepayload * base,
+ GstRTPBuffer * rtp);
+
+static void
+gst_rtp_sbc_depay_class_init (GstRtpSbcDepayClass * klass)
+{
+ GstRTPBaseDepayloadClass *gstbasertpdepayload_class =
+ GST_RTP_BASE_DEPAYLOAD_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+
+ gobject_class->finalize = gst_rtp_sbc_depay_finalize;
+ gobject_class->set_property = gst_rtp_sbc_depay_set_property;
+ gobject_class->get_property = gst_rtp_sbc_depay_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_IGNORE_TIMESTAMPS,
+ g_param_spec_boolean ("ignore-timestamps", "Ignore Timestamps",
+ "Various statistics", DEFAULT_IGNORE_TIMESTAMPS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstbasertpdepayload_class->set_caps = gst_rtp_sbc_depay_setcaps;
+ gstbasertpdepayload_class->process_rtp_packet = gst_rtp_sbc_depay_process;
+
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_rtp_sbc_depay_src_template);
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_rtp_sbc_depay_sink_template);
+
+ GST_DEBUG_CATEGORY_INIT (rtpsbcdepay_debug, "rtpsbcdepay", 0,
+ "SBC Audio RTP Depayloader");
+
+ gst_element_class_set_static_metadata (element_class,
+ "RTP SBC audio depayloader",
+ "Codec/Depayloader/Network/RTP",
+ "Extracts SBC audio from RTP packets",
+ "Arun Raghavan <arun.raghavan@collabora.co.uk>");
+}
+
+static void
+gst_rtp_sbc_depay_init (GstRtpSbcDepay * rtpsbcdepay)
+{
+ rtpsbcdepay->adapter = gst_adapter_new ();
+ rtpsbcdepay->stream_align =
+ gst_audio_stream_align_new (48000, 40 * GST_MSECOND, 1 * GST_SECOND);
+ rtpsbcdepay->ignore_timestamps = DEFAULT_IGNORE_TIMESTAMPS;
+}
+
+static void
+gst_rtp_sbc_depay_finalize (GObject * object)
+{
+ GstRtpSbcDepay *depay = GST_RTP_SBC_DEPAY (object);
+
+ gst_audio_stream_align_free (depay->stream_align);
+ gst_object_unref (depay->adapter);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_rtp_sbc_depay_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstRtpSbcDepay *depay = GST_RTP_SBC_DEPAY (object);
+
+ switch (prop_id) {
+ case PROP_IGNORE_TIMESTAMPS:
+ depay->ignore_timestamps = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_sbc_depay_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstRtpSbcDepay *depay = GST_RTP_SBC_DEPAY (object);
+
+ switch (prop_id) {
+ case PROP_IGNORE_TIMESTAMPS:
+ g_value_set_boolean (value, depay->ignore_timestamps);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+/* FIXME: This duplicates similar functionality rtpsbcpay, but there isn't a
+ * simple way to consolidate the two. This is best done by moving the function
+ * to the codec-utils library in gst-plugins-base when these elements move to
+ * GStreamer. */
+static int
+gst_rtp_sbc_depay_get_params (GstRtpSbcDepay * depay, const guint8 * data,
+ gint size, int *framelen, int *samples)
+{
+ int blocks, channel_mode, channels, subbands, bitpool;
+ int length;
+
+ if (size < 3) {
+ /* Not enough data for the header */
+ return -1;
+ }
+
+ /* Sanity check */
+ if (data[0] != 0x9c) {
+ GST_WARNING_OBJECT (depay, "Bad packet: couldn't find syncword");
+ return -2;
+ }
+
+ blocks = (data[1] >> 4) & 0x3;
+ blocks = (blocks + 1) * 4;
+ channel_mode = (data[1] >> 2) & 0x3;
+ channels = channel_mode ? 2 : 1;
+ subbands = (data[1] & 0x1);
+ subbands = (subbands + 1) * 4;
+ bitpool = data[2];
+
+ length = 4 + ((4 * subbands * channels) / 8);
+
+ if (channel_mode == 0 || channel_mode == 1) {
+ /* Mono || Dual channel */
+ length += ((blocks * channels * bitpool)
+ + 4 /* round up */ ) / 8;
+ } else {
+ /* Stereo || Joint stereo */
+ gboolean joint = (channel_mode == 3);
+
+ length += ((joint * subbands) + (blocks * bitpool)
+ + 4 /* round up */ ) / 8;
+ }
+
+ *framelen = length;
+ *samples = blocks * subbands;
+
+ return 0;
+}
+
+static gboolean
+gst_rtp_sbc_depay_setcaps (GstRTPBaseDepayload * base, GstCaps * caps)
+{
+ GstRtpSbcDepay *depay = GST_RTP_SBC_DEPAY (base);
+ GstStructure *structure;
+ GstCaps *outcaps, *oldcaps;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &depay->rate))
+ goto bad_caps;
+
+ outcaps = gst_caps_new_simple ("audio/x-sbc", "rate", G_TYPE_INT,
+ depay->rate, NULL);
+
+ gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (base), outcaps);
+
+ oldcaps = gst_pad_get_current_caps (GST_RTP_BASE_DEPAYLOAD_SINKPAD (base));
+ if (oldcaps && !gst_caps_can_intersect (oldcaps, caps)) {
+ /* Caps have changed, flush old data */
+ gst_adapter_clear (depay->adapter);
+ }
+
+ gst_caps_unref (outcaps);
+ if (oldcaps)
+ gst_caps_unref (oldcaps);
+
+ /* Reset when the caps are changing */
+ gst_audio_stream_align_set_rate (depay->stream_align, depay->rate);
+
+ return TRUE;
+
+bad_caps:
+ GST_WARNING_OBJECT (depay, "Can't support the caps we got: %"
+ GST_PTR_FORMAT, caps);
+ return FALSE;
+}
+
+static GstBuffer *
+gst_rtp_sbc_depay_process (GstRTPBaseDepayload * base, GstRTPBuffer * rtp)
+{
+ GstRtpSbcDepay *depay = GST_RTP_SBC_DEPAY (base);
+ GstBuffer *data = NULL;
+
+ gboolean fragment, start, last;
+ guint8 nframes;
+ guint8 *payload;
+ guint payload_len;
+ gint samples = 0;
+
+ GstClockTime timestamp;
+
+ GST_LOG_OBJECT (depay, "Got %" G_GSIZE_FORMAT " bytes",
+ gst_buffer_get_size (rtp->buffer));
+
+ if (gst_rtp_buffer_get_marker (rtp)) {
+ /* Marker isn't supposed to be set */
+ GST_WARNING_OBJECT (depay, "Marker bit was set");
+ goto bad_packet;
+ }
+
+ timestamp = GST_BUFFER_DTS_OR_PTS (rtp->buffer);
+ if (depay->ignore_timestamps && timestamp == GST_CLOCK_TIME_NONE) {
+ GstClockTime initial_timestamp;
+ guint64 n_samples;
+
+ initial_timestamp =
+ gst_audio_stream_align_get_timestamp_at_discont (depay->stream_align);
+ n_samples =
+ gst_audio_stream_align_get_samples_since_discont (depay->stream_align);
+
+ if (initial_timestamp == GST_CLOCK_TIME_NONE) {
+ GST_ERROR_OBJECT (depay,
+ "Can only ignore timestamps on streams without valid initial timestamp");
+ return NULL;
+ }
+
+ timestamp =
+ initial_timestamp + gst_util_uint64_scale (n_samples, GST_SECOND,
+ depay->rate);
+ }
+
+ payload = gst_rtp_buffer_get_payload (rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+
+ fragment = payload[0] & 0x80;
+ start = payload[0] & 0x40;
+ last = payload[0] & 0x20;
+ nframes = payload[0] & 0x0f;
+
+ payload += 1;
+ payload_len -= 1;
+
+ data = gst_rtp_buffer_get_payload_subbuffer (rtp, 1, -1);
+
+ if (fragment) {
+ /* Got a packet with a fragment */
+ GST_LOG_OBJECT (depay, "Got fragment");
+
+ if (start && gst_adapter_available (depay->adapter)) {
+ GST_WARNING_OBJECT (depay, "Missing last fragment");
+ gst_adapter_clear (depay->adapter);
+
+ } else if (!start && !gst_adapter_available (depay->adapter)) {
+ GST_WARNING_OBJECT (depay, "Missing start fragment");
+ gst_buffer_unref (data);
+ data = NULL;
+ goto out;
+ }
+
+ gst_adapter_push (depay->adapter, data);
+
+ if (last) {
+ gint framelen, samples;
+ guint8 header[4];
+
+ data = gst_adapter_take_buffer (depay->adapter,
+ gst_adapter_available (depay->adapter));
+ gst_rtp_drop_non_audio_meta (depay, data);
+
+ if (gst_buffer_extract (data, 0, &header, 4) != 4 ||
+ gst_rtp_sbc_depay_get_params (depay, header,
+ payload_len, &framelen, &samples) < 0) {
+ gst_buffer_unref (data);
+ goto bad_packet;
+ }
+ } else {
+ data = NULL;
+ }
+ } else {
+ /* !fragment */
+ gint framelen;
+
+ GST_LOG_OBJECT (depay, "Got %d frames", nframes);
+
+ if (gst_rtp_sbc_depay_get_params (depay, payload,
+ payload_len, &framelen, &samples) < 0) {
+ gst_adapter_clear (depay->adapter);
+ goto bad_packet;
+ }
+
+ samples *= nframes;
+
+ GST_LOG_OBJECT (depay, "Got payload of %d", payload_len);
+
+ if (nframes * framelen > (gint) payload_len) {
+ GST_WARNING_OBJECT (depay, "Short packet");
+ goto bad_packet;
+ } else if (nframes * framelen < (gint) payload_len) {
+ GST_WARNING_OBJECT (depay, "Junk at end of packet");
+ }
+ }
+
+ if (depay->ignore_timestamps && data) {
+ GstClockTime duration;
+
+ gst_audio_stream_align_process (depay->stream_align,
+ GST_BUFFER_IS_DISCONT (rtp->buffer), timestamp, samples, &timestamp,
+ &duration, NULL);
+
+ GST_BUFFER_PTS (data) = timestamp;
+ GST_BUFFER_DTS (data) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_DURATION (data) = duration;
+ }
+
+out:
+ return data;
+
+bad_packet:
+ GST_ELEMENT_WARNING (depay, STREAM, DECODE,
+ ("Received invalid RTP payload, dropping"), (NULL));
+ goto out;
+}
diff --git a/gst/rtp/gstrtpsbcdepay.h b/gst/rtp/gstrtpsbcdepay.h
new file mode 100644
index 0000000000..bc5a396724
--- /dev/null
+++ b/gst/rtp/gstrtpsbcdepay.h
@@ -0,0 +1,68 @@
+/*
+ *
+ * BlueZ - Bluetooth protocol stack for Linux
+ *
+ * Copyright (C) 2012 Collabora Ltd.
+ *
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ */
+
+#ifndef __GST_RTP_SBC_DEPAY_H
+#define __GST_RTP_SBC_DEPAY_H
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+#include <gst/audio/audio.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_RTP_SBC_DEPAY \
+ (gst_rtp_sbc_depay_get_type())
+#define GST_RTP_SBC_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_SBC_DEPAY,\
+ GstRtpSbcDepay))
+#define GST_RTP_SBC_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_SBC_DEPAY,\
+ GstRtpSbcDepayClass))
+#define GST_IS_RTP_SBC_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_SBC_DEPAY))
+#define GST_IS_RTP_SBC_DEPAY_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_SBC_DEPAY))
+typedef struct _GstRtpSbcDepay GstRtpSbcDepay;
+typedef struct _GstRtpSbcDepayClass GstRtpSbcDepayClass;
+
+struct _GstRtpSbcDepay
+{
+ GstRTPBaseDepayload base;
+
+ int rate;
+ GstAdapter *adapter;
+ gboolean ignore_timestamps;
+
+ /* Timestamp tracking when ignoring input timestamps */
+ GstAudioStreamAlign *stream_align;
+};
+
+struct _GstRtpSbcDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_sbc_depay_get_type (void);
+
+G_END_DECLS
+#endif
diff --git a/gst/rtp/gstrtpsbcpay.c b/gst/rtp/gstrtpsbcpay.c
new file mode 100644
index 0000000000..676416bc00
--- /dev/null
+++ b/gst/rtp/gstrtpsbcpay.c
@@ -0,0 +1,372 @@
+/* GStreamer RTP SBC payloader
+ * BlueZ - Bluetooth protocol stack for Linux
+ *
+ * Copyright (C) 2004-2010 Marcel Holtmann <marcel@holtmann.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include <gst/audio/audio.h>
+#include "gstrtpelements.h"
+#include "gstrtpsbcpay.h"
+#include <math.h>
+#include <string.h>
+#include "gstrtputils.h"
+
+#define RTP_SBC_PAYLOAD_HEADER_SIZE 1
+#define DEFAULT_MIN_FRAMES 0
+#define RTP_SBC_HEADER_TOTAL (12 + RTP_SBC_PAYLOAD_HEADER_SIZE)
+
+enum
+{
+ PROP_0,
+ PROP_MIN_FRAMES
+};
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_sbc_pay_debug);
+#define GST_CAT_DEFAULT gst_rtp_sbc_pay_debug
+
+#define parent_class gst_rtp_sbc_pay_parent_class
+G_DEFINE_TYPE (GstRtpSBCPay, gst_rtp_sbc_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpsbcpay, "rtpsbcpay", GST_RANK_NONE,
+ GST_TYPE_RTP_SBC_PAY, rtp_element_init (plugin));
+
+static GstStaticPadTemplate gst_rtp_sbc_pay_sink_factory =
+GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-sbc, "
+ "rate = (int) { 16000, 32000, 44100, 48000 }, "
+ "channels = (int) [ 1, 2 ], "
+ "channel-mode = (string) { mono, dual, stereo, joint }, "
+ "blocks = (int) { 4, 8, 12, 16 }, "
+ "subbands = (int) { 4, 8 }, "
+ "allocation-method = (string) { snr, loudness }, "
+ "bitpool = (int) [ 2, 64 ]")
+ );
+
+static GstStaticPadTemplate gst_rtp_sbc_pay_src_factory =
+GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) audio,"
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) { 16000, 32000, 44100, 48000 },"
+ "encoding-name = (string) SBC")
+ );
+
+static void gst_rtp_sbc_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtp_sbc_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static GstStateChangeReturn gst_rtp_sbc_pay_change_state (GstElement * element,
+ GstStateChange transition);
+
+static gint
+gst_rtp_sbc_pay_get_frame_len (gint subbands, gint channels,
+ gint blocks, gint bitpool, const gchar * channel_mode)
+{
+ gint len;
+ gint join;
+
+ len = 4 + (4 * subbands * channels) / 8;
+
+ if (strcmp (channel_mode, "mono") == 0 || strcmp (channel_mode, "dual") == 0)
+ len += ((blocks * channels * bitpool) + 7) / 8;
+ else {
+ join = strcmp (channel_mode, "joint") == 0 ? 1 : 0;
+ len += ((join * subbands + blocks * bitpool) + 7) / 8;
+ }
+
+ return len;
+}
+
+static gboolean
+gst_rtp_sbc_pay_set_caps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ GstRtpSBCPay *sbcpay;
+ gint rate, subbands, channels, blocks, bitpool;
+ gint frame_len;
+ const gchar *channel_mode;
+ GstStructure *structure;
+
+ sbcpay = GST_RTP_SBC_PAY (payload);
+
+ structure = gst_caps_get_structure (caps, 0);
+ if (!gst_structure_get_int (structure, "rate", &rate))
+ return FALSE;
+ if (!gst_structure_get_int (structure, "channels", &channels))
+ return FALSE;
+ if (!gst_structure_get_int (structure, "blocks", &blocks))
+ return FALSE;
+ if (!gst_structure_get_int (structure, "bitpool", &bitpool))
+ return FALSE;
+ if (!gst_structure_get_int (structure, "subbands", &subbands))
+ return FALSE;
+
+ channel_mode = gst_structure_get_string (structure, "channel-mode");
+ if (!channel_mode)
+ return FALSE;
+
+ frame_len = gst_rtp_sbc_pay_get_frame_len (subbands, channels, blocks,
+ bitpool, channel_mode);
+
+ sbcpay->frame_length = frame_len;
+ sbcpay->frame_duration = ((blocks * subbands) * GST_SECOND) / rate;
+ sbcpay->last_timestamp = GST_CLOCK_TIME_NONE;
+
+ gst_rtp_base_payload_set_options (payload, "audio", TRUE, "SBC", rate);
+
+ GST_DEBUG_OBJECT (payload, "calculated frame length: %d ", frame_len);
+
+ return gst_rtp_base_payload_set_outcaps (payload, NULL);
+}
+
+static GstFlowReturn
+gst_rtp_sbc_pay_drain_buffers (GstRtpSBCPay * sbcpay)
+{
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+ guint available;
+ guint max_payload;
+ GstBuffer *outbuf, *paybuf;
+ guint8 *payload_data;
+ guint frame_count;
+ guint payload_length;
+ GstFlowReturn res;
+
+ if (sbcpay->frame_length == 0) {
+ GST_ERROR_OBJECT (sbcpay, "Frame length is 0");
+ return GST_FLOW_ERROR;
+ }
+
+ do {
+ available = gst_adapter_available (sbcpay->adapter);
+
+ max_payload =
+ gst_rtp_buffer_calc_payload_len (GST_RTP_BASE_PAYLOAD_MTU (sbcpay) -
+ RTP_SBC_PAYLOAD_HEADER_SIZE, 0, 0);
+
+ max_payload = MIN (max_payload, available);
+ frame_count = max_payload / sbcpay->frame_length;
+ payload_length = frame_count * sbcpay->frame_length;
+ if (payload_length == 0) /* Nothing to send */
+ return GST_FLOW_OK;
+
+ outbuf =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD
+ (sbcpay), RTP_SBC_PAYLOAD_HEADER_SIZE, 0, 0);
+
+ /* get payload */
+ gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtp);
+
+ gst_rtp_buffer_set_payload_type (&rtp, GST_RTP_BASE_PAYLOAD_PT (sbcpay));
+
+ /* write header and copy data into payload */
+ payload_data = gst_rtp_buffer_get_payload (&rtp);
+ /* upper 3 fragment bits not used, ref A2DP v13, 4.3.4 */
+ payload_data[0] = frame_count & 0x0f;
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ paybuf = gst_adapter_take_buffer_fast (sbcpay->adapter, payload_length);
+ gst_rtp_copy_audio_meta (sbcpay, outbuf, paybuf);
+ outbuf = gst_buffer_append (outbuf, paybuf);
+
+ GST_BUFFER_PTS (outbuf) = sbcpay->last_timestamp;
+ GST_BUFFER_DURATION (outbuf) = frame_count * sbcpay->frame_duration;
+ GST_DEBUG_OBJECT (sbcpay, "Pushing %d bytes: %" GST_TIME_FORMAT,
+ payload_length, GST_TIME_ARGS (GST_BUFFER_PTS (outbuf)));
+
+ sbcpay->last_timestamp += frame_count * sbcpay->frame_duration;
+
+ res = gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (sbcpay), outbuf);
+
+ /* try to send another RTP buffer if available data exceeds MTU size */
+ } while (res == GST_FLOW_OK);
+
+ return res;
+}
+
+static GstFlowReturn
+gst_rtp_sbc_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer)
+{
+ GstRtpSBCPay *sbcpay;
+ guint available;
+
+ /* FIXME check for negotiation */
+
+ sbcpay = GST_RTP_SBC_PAY (payload);
+
+ if (GST_BUFFER_IS_DISCONT (buffer)) {
+ /* Try to flush whatever's left */
+ gst_rtp_sbc_pay_drain_buffers (sbcpay);
+ /* Drop the rest */
+ gst_adapter_flush (sbcpay->adapter,
+ gst_adapter_available (sbcpay->adapter));
+ /* Reset timestamps */
+ sbcpay->last_timestamp = GST_CLOCK_TIME_NONE;
+ }
+
+ if (sbcpay->last_timestamp == GST_CLOCK_TIME_NONE)
+ sbcpay->last_timestamp = GST_BUFFER_PTS (buffer);
+
+ gst_adapter_push (sbcpay->adapter, buffer);
+
+ available = gst_adapter_available (sbcpay->adapter);
+ if (available + RTP_SBC_HEADER_TOTAL >=
+ GST_RTP_BASE_PAYLOAD_MTU (sbcpay) ||
+ (available > (sbcpay->min_frames * sbcpay->frame_length)))
+ return gst_rtp_sbc_pay_drain_buffers (sbcpay);
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_rtp_sbc_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
+{
+ GstRtpSBCPay *sbcpay = GST_RTP_SBC_PAY (payload);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ gst_rtp_sbc_pay_drain_buffers (sbcpay);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ gst_adapter_clear (sbcpay->adapter);
+ break;
+ case GST_EVENT_SEGMENT:
+ gst_rtp_sbc_pay_drain_buffers (sbcpay);
+ break;
+ default:
+ break;
+ }
+
+ return GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload, event);
+}
+
+static GstStateChangeReturn
+gst_rtp_sbc_pay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstRtpSBCPay *sbcpay = GST_RTP_SBC_PAY (element);
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_adapter_clear (sbcpay->adapter);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static void
+gst_rtp_sbc_pay_finalize (GObject * object)
+{
+ GstRtpSBCPay *sbcpay = GST_RTP_SBC_PAY (object);
+
+ g_object_unref (sbcpay->adapter);
+
+ GST_CALL_PARENT (G_OBJECT_CLASS, finalize, (object));
+}
+
+static void
+gst_rtp_sbc_pay_class_init (GstRtpSBCPayClass * klass)
+{
+ GstRTPBasePayloadClass *payload_class = GST_RTP_BASE_PAYLOAD_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+
+ gobject_class->finalize = gst_rtp_sbc_pay_finalize;
+ gobject_class->set_property = gst_rtp_sbc_pay_set_property;
+ gobject_class->get_property = gst_rtp_sbc_pay_get_property;
+
+ payload_class->set_caps = GST_DEBUG_FUNCPTR (gst_rtp_sbc_pay_set_caps);
+ payload_class->handle_buffer =
+ GST_DEBUG_FUNCPTR (gst_rtp_sbc_pay_handle_buffer);
+ payload_class->sink_event = GST_DEBUG_FUNCPTR (gst_rtp_sbc_pay_sink_event);
+
+ element_class->change_state = gst_rtp_sbc_pay_change_state;
+
+ /* properties */
+ g_object_class_install_property (G_OBJECT_CLASS (klass),
+ PROP_MIN_FRAMES,
+ g_param_spec_int ("min-frames", "minimum frame number",
+ "Minimum quantity of frames to send in one packet "
+ "(-1 for maximum allowed by the mtu)",
+ -1, G_MAXINT, DEFAULT_MIN_FRAMES, G_PARAM_READWRITE));
+
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_rtp_sbc_pay_sink_factory);
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_rtp_sbc_pay_src_factory);
+
+ gst_element_class_set_static_metadata (element_class, "RTP packet payloader",
+ "Codec/Payloader/Network", "Payload SBC audio as RTP packets",
+ "Thiago Sousa Santos <thiagoss@lcc.ufcg.edu.br>");
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_sbc_pay_debug, "rtpsbcpay", 0,
+ "RTP SBC payloader");
+}
+
+static void
+gst_rtp_sbc_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpSBCPay *sbcpay;
+
+ sbcpay = GST_RTP_SBC_PAY (object);
+
+ switch (prop_id) {
+ case PROP_MIN_FRAMES:
+ sbcpay->min_frames = g_value_get_int (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_sbc_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpSBCPay *sbcpay;
+
+ sbcpay = GST_RTP_SBC_PAY (object);
+
+ switch (prop_id) {
+ case PROP_MIN_FRAMES:
+ g_value_set_int (value, sbcpay->min_frames);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_sbc_pay_init (GstRtpSBCPay * self)
+{
+ self->adapter = gst_adapter_new ();
+ self->frame_length = 0;
+ self->last_timestamp = GST_CLOCK_TIME_NONE;
+
+ self->min_frames = DEFAULT_MIN_FRAMES;
+}
diff --git a/gst/rtp/gstrtpsbcpay.h b/gst/rtp/gstrtpsbcpay.h
new file mode 100644
index 0000000000..6f42c1e749
--- /dev/null
+++ b/gst/rtp/gstrtpsbcpay.h
@@ -0,0 +1,63 @@
+/* GStreamer RTP SBC payloader
+ * BlueZ - Bluetooth protocol stack for Linux
+ *
+ * Copyright (C) 2004-2010 Marcel Holtmann <marcel@holtmann.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ */
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_SBC_PAY \
+ (gst_rtp_sbc_pay_get_type())
+#define GST_RTP_SBC_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_SBC_PAY,\
+ GstRtpSBCPay))
+#define GST_RTP_SBC_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_SBC_PAY,\
+ GstRtpSBCPayClass))
+#define GST_IS_RTP_SBC_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_SBC_PAY))
+#define GST_IS_RTP_SBC_PAY_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_SBC_PAY))
+
+typedef struct _GstRtpSBCPay GstRtpSBCPay;
+typedef struct _GstRtpSBCPayClass GstRtpSBCPayClass;
+
+struct _GstRtpSBCPay {
+ GstRTPBasePayload base;
+
+ GstAdapter *adapter;
+ GstClockTime last_timestamp;
+
+ guint frame_length;
+ GstClockTime frame_duration;
+
+ guint min_frames;
+};
+
+struct _GstRtpSBCPayClass {
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_sbc_pay_get_type(void);
+
+G_END_DECLS
diff --git a/gst/rtp/gstrtpsirendepay.c b/gst/rtp/gstrtpsirendepay.c
new file mode 100644
index 0000000000..86a9dfffe1
--- /dev/null
+++ b/gst/rtp/gstrtpsirendepay.c
@@ -0,0 +1,121 @@
+/*
+ * Siren Depayloader Gst Element
+ *
+ * @author: Youness Alaoui <kakaroto@kakaroto.homelinux.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+#include <stdlib.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+#include "gstrtpelements.h"
+#include "gstrtpsirendepay.h"
+#include "gstrtputils.h"
+
+static GstStaticPadTemplate gst_rtp_siren_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "clock-rate = (int) 16000, " "encoding-name = (string) \"SIREN\"")
+ /* This is the default, so the peer doesn't have to specify it */
+ /* " "dct-length = (int) 320") */
+ );
+
+ static GstStaticPadTemplate gst_rtp_siren_depay_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-siren, " "dct-length = (int) 320")
+ );
+
+ static GstBuffer *gst_rtp_siren_depay_process (GstRTPBaseDepayload *
+ depayload, GstRTPBuffer * rtp);
+ static gboolean gst_rtp_siren_depay_setcaps (GstRTPBaseDepayload *
+ depayload, GstCaps * caps);
+
+G_DEFINE_TYPE (GstRTPSirenDepay, gst_rtp_siren_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpsirendepay, "rtpsirendepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_SIREN_DEPAY, rtp_element_init (plugin));
+
+ static void gst_rtp_siren_depay_class_init (GstRTPSirenDepayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_siren_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_siren_depay_setcaps;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_siren_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_siren_depay_sink_template);
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP Siren packet depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts Siren audio from RTP packets",
+ "Philippe Kalaf <philippe.kalaf@collabora.co.uk>");
+}
+
+static void
+gst_rtp_siren_depay_init (GstRTPSirenDepay * rtpsirendepay)
+{
+
+}
+
+static gboolean
+gst_rtp_siren_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstCaps *srccaps;
+ gboolean ret;
+
+ srccaps = gst_caps_new_simple ("audio/x-siren",
+ "dct-length", G_TYPE_INT, 320, NULL);
+ ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
+
+ GST_DEBUG ("set caps on source: %" GST_PTR_FORMAT " (ret=%d)", srccaps, ret);
+ gst_caps_unref (srccaps);
+
+ /* always fixed clock rate of 16000 */
+ depayload->clock_rate = 16000;
+
+ return ret;
+}
+
+static GstBuffer *
+gst_rtp_siren_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp)
+{
+ GstBuffer *outbuf;
+
+ outbuf = gst_rtp_buffer_get_payload_buffer (rtp);
+
+ if (outbuf) {
+ gst_rtp_drop_non_audio_meta (depayload, outbuf);
+ }
+
+ return outbuf;
+}
diff --git a/gst/rtp/gstrtpsirendepay.h b/gst/rtp/gstrtpsirendepay.h
new file mode 100644
index 0000000000..cdc108add9
--- /dev/null
+++ b/gst/rtp/gstrtpsirendepay.h
@@ -0,0 +1,57 @@
+/*
+ * Siren Depayloader Gst Element
+ *
+ * @author: Youness Alaoui <kakaroto@kakaroto.homelinux.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_SIREN_DEPAY_H__
+#define __GST_RTP_SIREN_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS typedef struct _GstRTPSirenDepay GstRTPSirenDepay;
+typedef struct _GstRTPSirenDepayClass GstRTPSirenDepayClass;
+
+#define GST_TYPE_RTP_SIREN_DEPAY \
+ (gst_rtp_siren_depay_get_type())
+#define GST_RTP_SIREN_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_SIREN_DEPAY,GstRTPSirenDepay))
+#define GST_RTP_SIREN_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_SIREN_DEPAY,GstRTPSirenDepayClass))
+#define GST_IS_RTP_SIREN_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_SIREN_DEPAY))
+#define GST_IS_RTP_SIREN_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_SIREN_DEPAY))
+
+
+struct _GstRTPSirenDepay
+{
+ GstRTPBaseDepayload depayload;
+
+};
+
+struct _GstRTPSirenDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_siren_depay_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_RTP_SIREN_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpsirenpay.c b/gst/rtp/gstrtpsirenpay.c
new file mode 100644
index 0000000000..93385a43fb
--- /dev/null
+++ b/gst/rtp/gstrtpsirenpay.c
@@ -0,0 +1,143 @@
+/*
+ * Siren Payloader Gst Element
+ *
+ * @author: Youness Alaoui <kakaroto@kakaroto.homelinux.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstrtpelements.h"
+#include "gstrtpsirenpay.h"
+#include <gst/rtp/gstrtpbuffer.h>
+
+GST_DEBUG_CATEGORY_STATIC (rtpsirenpay_debug);
+#define GST_CAT_DEFAULT (rtpsirenpay_debug)
+
+static GstStaticPadTemplate gst_rtp_siren_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-siren, " "dct-length = (int) 320")
+ );
+
+static GstStaticPadTemplate gst_rtp_siren_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 16000, "
+ "encoding-name = (string) \"SIREN\", "
+ "bitrate = (string) \"16000\", " "dct-length = (int) 320")
+ );
+
+static gboolean gst_rtp_siren_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+
+G_DEFINE_TYPE (GstRTPSirenPay, gst_rtp_siren_pay,
+ GST_TYPE_RTP_BASE_AUDIO_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpsirenpay, "rtpsirenpay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_SIREN_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_siren_pay_class_init (GstRTPSirenPayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_siren_pay_setcaps;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_siren_pay_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_siren_pay_src_template);
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP Payloader for Siren Audio", "Codec/Payloader/Network/RTP",
+ "Packetize Siren audio streams into RTP packets",
+ "Youness Alaoui <kakaroto@kakaroto.homelinux.net>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpsirenpay_debug, "rtpsirenpay", 0,
+ "siren audio RTP payloader");
+}
+
+static void
+gst_rtp_siren_pay_init (GstRTPSirenPay * rtpsirenpay)
+{
+ GstRTPBasePayload *rtpbasepayload;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
+
+ rtpbasepayload = GST_RTP_BASE_PAYLOAD (rtpsirenpay);
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpsirenpay);
+
+ /* we don't set the payload type, it should be set by the application using
+ * the pt property or the default 96 will be used */
+ rtpbasepayload->clock_rate = 16000;
+
+ /* tell rtpbaseaudiopayload that this is a frame based codec */
+ gst_rtp_base_audio_payload_set_frame_based (rtpbaseaudiopayload);
+}
+
+static gboolean
+gst_rtp_siren_pay_setcaps (GstRTPBasePayload * rtpbasepayload, GstCaps * caps)
+{
+ GstRTPSirenPay *rtpsirenpay;
+ GstRTPBaseAudioPayload *rtpbaseaudiopayload;
+ gint dct_length;
+ GstStructure *structure;
+ const char *payload_name;
+
+ rtpsirenpay = GST_RTP_SIREN_PAY (rtpbasepayload);
+ rtpbaseaudiopayload = GST_RTP_BASE_AUDIO_PAYLOAD (rtpbasepayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ gst_structure_get_int (structure, "dct-length", &dct_length);
+ if (dct_length != 320)
+ goto wrong_dct;
+
+ payload_name = gst_structure_get_name (structure);
+ if (g_ascii_strcasecmp ("audio/x-siren", payload_name))
+ goto wrong_caps;
+
+ gst_rtp_base_payload_set_options (rtpbasepayload, "audio", TRUE, "SIREN",
+ 16000);
+ /* set options for this frame based audio codec */
+ gst_rtp_base_audio_payload_set_frame_options (rtpbaseaudiopayload, 20, 40);
+
+ return gst_rtp_base_payload_set_outcaps (rtpbasepayload, NULL);
+
+ /* ERRORS */
+wrong_dct:
+ {
+ GST_ERROR_OBJECT (rtpsirenpay, "dct-length must be 320, received %d",
+ dct_length);
+ return FALSE;
+ }
+wrong_caps:
+ {
+ GST_ERROR_OBJECT (rtpsirenpay, "expected audio/x-siren, received %s",
+ payload_name);
+ return FALSE;
+ }
+}
diff --git a/gst/rtp/gstrtpsirenpay.h b/gst/rtp/gstrtpsirenpay.h
new file mode 100644
index 0000000000..56fc664ba4
--- /dev/null
+++ b/gst/rtp/gstrtpsirenpay.h
@@ -0,0 +1,55 @@
+/*
+ * Siren Payloader Gst Element
+ *
+ * @author: Youness Alaoui <kakaroto@kakaroto.homelinux.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_SIREN_PAY_H__
+#define __GST_RTP_SIREN_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbaseaudiopayload.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_RTP_SIREN_PAY \
+ (gst_rtp_siren_pay_get_type())
+#define GST_RTP_SIREN_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_SIREN_PAY,GstRTPSirenPay))
+#define GST_RTP_SIREN_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_SIREN_PAY,GstRTPSirenPayClass))
+#define GST_IS_RTP_SIREN_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_SIREN_PAY))
+#define GST_IS_RTP_SIREN_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_SIREN_PAY))
+typedef struct _GstRTPSirenPay GstRTPSirenPay;
+typedef struct _GstRTPSirenPayClass GstRTPSirenPayClass;
+
+struct _GstRTPSirenPay
+{
+ GstRTPBaseAudioPayload audiopayload;
+};
+
+struct _GstRTPSirenPayClass
+{
+ GstRTPBaseAudioPayloadClass parent_class;
+};
+
+GType gst_rtp_siren_pay_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_RTP_SIREN_PAY_H__ */
diff --git a/gst/rtp/gstrtpspeexdepay.c b/gst/rtp/gstrtpspeexdepay.c
new file mode 100644
index 0000000000..ca70cd3e2a
--- /dev/null
+++ b/gst/rtp/gstrtpspeexdepay.c
@@ -0,0 +1,222 @@
+/* GStreamer
+ * Copyright (C) <2005> Edgard Lima <edgard.lima@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+#include <stdlib.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpspeexdepay.h"
+#include "gstrtputils.h"
+
+/* RtpSPEEXDepay signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0
+};
+
+static GstStaticPadTemplate gst_rtp_speex_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "clock-rate = (int) [6000, 48000], "
+ "encoding-name = (string) \"SPEEX\"")
+ /* "encoding-params = (string) \"1\"" */
+ );
+
+static GstStaticPadTemplate gst_rtp_speex_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-speex")
+ );
+
+static GstBuffer *gst_rtp_speex_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+static gboolean gst_rtp_speex_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+
+G_DEFINE_TYPE (GstRtpSPEEXDepay, gst_rtp_speex_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpspeexdepay, "rtpspeexdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_SPEEX_DEPAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_speex_depay_class_init (GstRtpSPEEXDepayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_speex_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_speex_depay_setcaps;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_speex_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_speex_depay_sink_template);
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP Speex depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts Speex audio from RTP packets",
+ "Edgard Lima <edgard.lima@gmail.com>");
+}
+
+static void
+gst_rtp_speex_depay_init (GstRtpSPEEXDepay * rtpspeexdepay)
+{
+}
+
+static gint
+gst_rtp_speex_depay_get_mode (gint rate)
+{
+ if (rate > 25000)
+ return 2;
+ else if (rate > 12500)
+ return 1;
+ else
+ return 0;
+}
+
+/* len 4 bytes LE,
+ * vendor string (len bytes),
+ * user_len 4 (0) bytes LE
+ */
+static const gchar gst_rtp_speex_comment[] =
+ "\045\0\0\0Depayloaded with GStreamer speexdepay\0\0\0\0";
+
+static gboolean
+gst_rtp_speex_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure;
+ GstRtpSPEEXDepay *rtpspeexdepay;
+ gint clock_rate, nb_channels;
+ GstBuffer *buf;
+ GstMapInfo map;
+ guint8 *data;
+ const gchar *params;
+ GstCaps *srccaps;
+ gboolean res;
+
+ rtpspeexdepay = GST_RTP_SPEEX_DEPAY (depayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ goto no_clockrate;
+ depayload->clock_rate = clock_rate;
+
+ if (!(params = gst_structure_get_string (structure, "encoding-params")))
+ nb_channels = 1;
+ else {
+ nb_channels = atoi (params);
+ }
+
+ /* construct minimal header and comment packet for the decoder */
+ buf = gst_buffer_new_and_alloc (80);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ data = map.data;
+ memcpy (data, "Speex ", 8);
+ data += 8;
+ memcpy (data, "1.1.12", 7);
+ data += 20;
+ GST_WRITE_UINT32_LE (data, 1); /* version */
+ data += 4;
+ GST_WRITE_UINT32_LE (data, 80); /* header_size */
+ data += 4;
+ GST_WRITE_UINT32_LE (data, clock_rate); /* rate */
+ data += 4;
+ GST_WRITE_UINT32_LE (data, gst_rtp_speex_depay_get_mode (clock_rate)); /* mode */
+ data += 4;
+ GST_WRITE_UINT32_LE (data, 4); /* mode_bitstream_version */
+ data += 4;
+ GST_WRITE_UINT32_LE (data, nb_channels); /* nb_channels */
+ data += 4;
+ GST_WRITE_UINT32_LE (data, -1); /* bitrate */
+ data += 4;
+ GST_WRITE_UINT32_LE (data, 0xa0); /* frame_size */
+ data += 4;
+ GST_WRITE_UINT32_LE (data, 0); /* VBR */
+ data += 4;
+ GST_WRITE_UINT32_LE (data, 1); /* frames_per_packet */
+ data += 4;
+ GST_WRITE_UINT32_LE (data, 0); /* extra_headers */
+ data += 4;
+ GST_WRITE_UINT32_LE (data, 0); /* reserved1 */
+ data += 4;
+ GST_WRITE_UINT32_LE (data, 0); /* reserved2 */
+ gst_buffer_unmap (buf, &map);
+
+ srccaps = gst_caps_new_empty_simple ("audio/x-speex");
+ res = gst_pad_set_caps (depayload->srcpad, srccaps);
+ gst_caps_unref (srccaps);
+
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtpspeexdepay), buf);
+
+ buf = gst_buffer_new_and_alloc (sizeof (gst_rtp_speex_comment));
+ gst_buffer_fill (buf, 0, gst_rtp_speex_comment,
+ sizeof (gst_rtp_speex_comment));
+
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtpspeexdepay), buf);
+
+ return res;
+
+ /* ERRORS */
+no_clockrate:
+ {
+ GST_DEBUG_OBJECT (depayload, "no clock-rate specified");
+ return FALSE;
+ }
+}
+
+static GstBuffer *
+gst_rtp_speex_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp)
+{
+ GstBuffer *outbuf = NULL;
+
+ GST_DEBUG ("process : got %" G_GSIZE_FORMAT " bytes, mark %d ts %u seqn %d",
+ gst_buffer_get_size (rtp->buffer),
+ gst_rtp_buffer_get_marker (rtp),
+ gst_rtp_buffer_get_timestamp (rtp), gst_rtp_buffer_get_seq (rtp));
+
+ /* nothing special to be done */
+ outbuf = gst_rtp_buffer_get_payload_buffer (rtp);
+
+ if (outbuf) {
+ GST_BUFFER_DURATION (outbuf) = 20 * GST_MSECOND;
+ gst_rtp_drop_non_audio_meta (depayload, outbuf);
+ }
+
+ return outbuf;
+}
diff --git a/gst/rtp/gstrtpspeexdepay.h b/gst/rtp/gstrtpspeexdepay.h
new file mode 100644
index 0000000000..3961ef1cd7
--- /dev/null
+++ b/gst/rtp/gstrtpspeexdepay.h
@@ -0,0 +1,51 @@
+/* GStreamer
+ * Copyright (C) <2005> Edgard Lima <edgard.lima@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more
+ */
+
+#ifndef __GST_RTP_SPEEX_DEPAY_H__
+#define __GST_RTP_SPEEX_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstRtpSPEEXDepay GstRtpSPEEXDepay;
+typedef struct _GstRtpSPEEXDepayClass GstRtpSPEEXDepayClass;
+
+#define GST_TYPE_RTP_SPEEX_DEPAY \
+ (gst_rtp_speex_depay_get_type())
+#define GST_RTP_SPEEX_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_SPEEX_DEPAY,GstRtpSPEEXDepay))
+#define GST_RTP_SPEEX_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_SPEEX_DEPAY,GstRtpSPEEXDepayClass))
+#define GST_IS_RTP_SPEEX_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_SPEEX_DEPAY))
+#define GST_IS_RTP_SPEEX_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_SPEEX_DEPAY))
+
+struct _GstRtpSPEEXDepay
+{
+ GstRTPBaseDepayload depayload;
+};
+
+struct _GstRtpSPEEXDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_speex_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_SPEEX_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpspeexpay.c b/gst/rtp/gstrtpspeexpay.c
new file mode 100644
index 0000000000..17b2aa59e3
--- /dev/null
+++ b/gst/rtp/gstrtpspeexpay.c
@@ -0,0 +1,346 @@
+/* GStreamer
+ * Copyright (C) <2005> Edgard Lima <edgard.lima@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <stdlib.h>
+#include <string.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpspeexpay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpspeexpay_debug);
+#define GST_CAT_DEFAULT (rtpspeexpay_debug)
+
+static GstStaticPadTemplate gst_rtp_speex_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-speex, "
+ "rate = (int) [ 6000, 48000 ], " "channels = (int) 1")
+ );
+
+static GstStaticPadTemplate gst_rtp_speex_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) [ 6000, 48000 ], "
+ "encoding-name = (string) \"SPEEX\", "
+ "encoding-params = (string) \"1\"")
+ );
+
+static GstStateChangeReturn gst_rtp_speex_pay_change_state (GstElement *
+ element, GstStateChange transition);
+
+static gboolean gst_rtp_speex_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static GstCaps *gst_rtp_speex_pay_getcaps (GstRTPBasePayload * payload,
+ GstPad * pad, GstCaps * filter);
+static GstFlowReturn gst_rtp_speex_pay_handle_buffer (GstRTPBasePayload *
+ payload, GstBuffer * buffer);
+
+#define gst_rtp_speex_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpSPEEXPay, gst_rtp_speex_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpspeexpay, "rtpspeexpay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_SPEEX_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_speex_pay_class_init (GstRtpSPEEXPayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gstelement_class->change_state = gst_rtp_speex_pay_change_state;
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_speex_pay_setcaps;
+ gstrtpbasepayload_class->get_caps = gst_rtp_speex_pay_getcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_speex_pay_handle_buffer;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_speex_pay_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_speex_pay_src_template);
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP Speex payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encodes Speex audio into a RTP packet",
+ "Edgard Lima <edgard.lima@gmail.com>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpspeexpay_debug, "rtpspeexpay", 0,
+ "Speex RTP Payloader");
+}
+
+static void
+gst_rtp_speex_pay_init (GstRtpSPEEXPay * rtpspeexpay)
+{
+ GST_RTP_BASE_PAYLOAD (rtpspeexpay)->clock_rate = 8000;
+ GST_RTP_BASE_PAYLOAD_PT (rtpspeexpay) = 110; /* Create String */
+}
+
+static gboolean
+gst_rtp_speex_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ /* don't configure yet, we wait for the ident packet */
+ return TRUE;
+}
+
+
+static GstCaps *
+gst_rtp_speex_pay_getcaps (GstRTPBasePayload * payload, GstPad * pad,
+ GstCaps * filter)
+{
+ GstCaps *otherpadcaps;
+ GstCaps *caps;
+
+ otherpadcaps = gst_pad_get_allowed_caps (payload->srcpad);
+ caps = gst_pad_get_pad_template_caps (pad);
+
+ if (otherpadcaps) {
+ if (!gst_caps_is_empty (otherpadcaps)) {
+ GstStructure *ps;
+ GstStructure *s;
+ gint clock_rate;
+
+ ps = gst_caps_get_structure (otherpadcaps, 0);
+ caps = gst_caps_make_writable (caps);
+ s = gst_caps_get_structure (caps, 0);
+
+ if (gst_structure_get_int (ps, "clock-rate", &clock_rate)) {
+ gst_structure_fixate_field_nearest_int (s, "rate", clock_rate);
+ }
+ }
+ gst_caps_unref (otherpadcaps);
+ }
+
+ if (filter) {
+ GstCaps *tcaps = caps;
+
+ caps = gst_caps_intersect_full (filter, tcaps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (tcaps);
+ }
+
+ return caps;
+}
+
+static gboolean
+gst_rtp_speex_pay_parse_ident (GstRtpSPEEXPay * rtpspeexpay,
+ const guint8 * data, guint size)
+{
+ guint32 version, header_size, rate, mode, nb_channels;
+ GstRTPBasePayload *payload;
+ gchar *cstr;
+ gboolean res;
+
+ /* we need the header string (8), the version string (20), the version
+ * and the header length. */
+ if (size < 36)
+ goto too_small;
+
+ if (!g_str_has_prefix ((const gchar *) data, "Speex "))
+ goto wrong_header;
+
+ /* skip header and version string */
+ data += 28;
+
+ version = GST_READ_UINT32_LE (data);
+ if (version != 1)
+ goto wrong_version;
+
+ data += 4;
+ /* ensure sizes */
+ header_size = GST_READ_UINT32_LE (data);
+ if (header_size < 80)
+ goto header_too_small;
+
+ if (size < header_size)
+ goto payload_too_small;
+
+ data += 4;
+ rate = GST_READ_UINT32_LE (data);
+ data += 4;
+ mode = GST_READ_UINT32_LE (data);
+ data += 8;
+ nb_channels = GST_READ_UINT32_LE (data);
+
+ GST_DEBUG_OBJECT (rtpspeexpay, "rate %d, mode %d, nb_channels %d",
+ rate, mode, nb_channels);
+
+ payload = GST_RTP_BASE_PAYLOAD (rtpspeexpay);
+
+ gst_rtp_base_payload_set_options (payload, "audio", FALSE, "SPEEX", rate);
+ cstr = g_strdup_printf ("%d", nb_channels);
+ res = gst_rtp_base_payload_set_outcaps (payload, "encoding-params",
+ G_TYPE_STRING, cstr, NULL);
+ g_free (cstr);
+
+ return res;
+
+ /* ERRORS */
+too_small:
+ {
+ GST_DEBUG_OBJECT (rtpspeexpay,
+ "ident packet too small, need at least 32 bytes");
+ return FALSE;
+ }
+wrong_header:
+ {
+ GST_DEBUG_OBJECT (rtpspeexpay,
+ "ident packet does not start with \"Speex \"");
+ return FALSE;
+ }
+wrong_version:
+ {
+ GST_DEBUG_OBJECT (rtpspeexpay, "can only handle version 1, have version %d",
+ version);
+ return FALSE;
+ }
+header_too_small:
+ {
+ GST_DEBUG_OBJECT (rtpspeexpay,
+ "header size too small, need at least 80 bytes, " "got only %d",
+ header_size);
+ return FALSE;
+ }
+payload_too_small:
+ {
+ GST_DEBUG_OBJECT (rtpspeexpay,
+ "payload too small, need at least %d bytes, got only %d", header_size,
+ size);
+ return FALSE;
+ }
+}
+
+static GstFlowReturn
+gst_rtp_speex_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRtpSPEEXPay *rtpspeexpay;
+ GstMapInfo map;
+ GstBuffer *outbuf;
+ GstClockTime timestamp, duration;
+ GstFlowReturn ret;
+
+ rtpspeexpay = GST_RTP_SPEEX_PAY (basepayload);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ switch (rtpspeexpay->packet) {
+ case 0:
+ /* ident packet. We need to parse the headers to construct the RTP
+ * properties. */
+ if (!gst_rtp_speex_pay_parse_ident (rtpspeexpay, map.data, map.size)) {
+ gst_buffer_unmap (buffer, &map);
+ goto parse_error;
+ }
+
+ ret = GST_FLOW_OK;
+ gst_buffer_unmap (buffer, &map);
+ goto done;
+ case 1:
+ /* comment packet, we ignore it */
+ ret = GST_FLOW_OK;
+ gst_buffer_unmap (buffer, &map);
+ goto done;
+ default:
+ /* other packets go in the payload */
+ break;
+ }
+ gst_buffer_unmap (buffer, &map);
+
+ if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_GAP)) {
+ ret = GST_FLOW_OK;
+ goto done;
+ }
+
+ timestamp = GST_BUFFER_PTS (buffer);
+ duration = GST_BUFFER_DURATION (buffer);
+
+ /* FIXME, only one SPEEX frame per RTP packet for now */
+ outbuf = gst_rtp_base_payload_allocate_output_buffer (basepayload, 0, 0, 0);
+
+ /* FIXME, assert for now */
+ g_assert (gst_buffer_get_size (buffer) <=
+ GST_RTP_BASE_PAYLOAD_MTU (rtpspeexpay));
+
+ /* copy timestamp and duration */
+ GST_BUFFER_PTS (outbuf) = timestamp;
+ GST_BUFFER_DURATION (outbuf) = duration;
+
+ gst_rtp_copy_audio_meta (basepayload, outbuf, buffer);
+ outbuf = gst_buffer_append (outbuf, buffer);
+ buffer = NULL;
+
+ ret = gst_rtp_base_payload_push (basepayload, outbuf);
+
+done:
+ if (buffer)
+ gst_buffer_unref (buffer);
+
+ rtpspeexpay->packet++;
+
+ return ret;
+
+ /* ERRORS */
+parse_error:
+ {
+ GST_ELEMENT_ERROR (rtpspeexpay, STREAM, DECODE, (NULL),
+ ("Error parsing first identification packet."));
+ gst_buffer_unref (buffer);
+ return GST_FLOW_ERROR;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_speex_pay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstRtpSPEEXPay *rtpspeexpay;
+ GstStateChangeReturn ret;
+
+ rtpspeexpay = GST_RTP_SPEEX_PAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ rtpspeexpay->packet = 0;
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtpspeexpay.h b/gst/rtp/gstrtpspeexpay.h
new file mode 100644
index 0000000000..a89350e4c4
--- /dev/null
+++ b/gst/rtp/gstrtpspeexpay.h
@@ -0,0 +1,54 @@
+/* GStreamer
+ * Copyright (C) <2005> Edgard Lima <edgard.lima@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more
+ */
+
+
+#ifndef __GST_RTP_SPEEX_PAY_H__
+#define __GST_RTP_SPEEX_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstRtpSPEEXPay GstRtpSPEEXPay;
+typedef struct _GstRtpSPEEXPayClass GstRtpSPEEXPayClass;
+
+#define GST_TYPE_RTP_SPEEX_PAY \
+ (gst_rtp_speex_pay_get_type())
+#define GST_RTP_SPEEX_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_SPEEX_PAY,GstRtpSPEEXPay))
+#define GST_RTP_SPEEX_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_SPEEX_PAY,GstRtpSPEEXPayClass))
+#define GST_IS_RTP_SPEEX_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_SPEEX_PAY))
+#define GST_IS_RTP_SPEEX_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_SPEEX_PAY))
+
+struct _GstRtpSPEEXPay
+{
+ GstRTPBasePayload payload;
+
+ guint64 packet;
+};
+
+struct _GstRtpSPEEXPayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_speex_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_SPEEX_PAY_H__ */
diff --git a/gst/rtp/gstrtpstorage.c b/gst/rtp/gstrtpstorage.c
new file mode 100644
index 0000000000..a3e1593ceb
--- /dev/null
+++ b/gst/rtp/gstrtpstorage.c
@@ -0,0 +1,221 @@
+/* GStreamer plugin for forward error correction
+ * Copyright (C) 2017 Pexip
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * Author: Mikhail Fludkov <misha@pexip.com>
+ */
+
+/**
+ * SECTION:element-rtpstorage
+ * @short_description: RTP storage for forward error correction (FEC) in rtpbin
+ * @title: rtpstorage
+ *
+ * Helper element for storing packets to aid later packet recovery from packet
+ * loss using RED/FEC (Forward Error Correction).
+ *
+ * The purpose of this element is to store a moving window of packets which
+ * downstream elements such as #GstRtpUlpFecDec can request in order to perform
+ * recovery of lost packets upon receiving custom GstRtpPacketLost events,
+ * usually from #GstRtpJitterBuffer.
+ *
+ * As such, when building a pipeline manually, it should have the form:
+ *
+ * ```
+ * rtpstorage ! rtpjitterbuffer ! rtpulpfecdec
+ * ```
+ *
+ * where rtpulpfecdec get passed a reference to the object pointed to by
+ * the #GstRtpStorage:internal-storage property.
+ *
+ * The #GstRtpStorage:size-time property should be configured with a value
+ * equal to the #GstRtpJitterBuffer latency, plus some tolerance, in the order
+ * of milliseconds, for example in the example found at
+ * <https://github.com/sdroege/gstreamer-rs/blob/master/examples/src/bin/rtpfecclient.rs>,
+ * `size-time` is configured as 200 + 50 milliseconds (latency + tolerance).
+ *
+ * When using #GstRtpBin, a storage element is created automatically, and
+ * can be configured upon receiving the #GstRtpBin::new-storage signal.
+ *
+ * See also: #GstRtpBin, #GstRtpUlpFecDec
+ * Since: 1.14
+ */
+
+#include "gstrtpelements.h"
+#include "gstrtpstorage.h"
+
+static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+enum
+{
+ PROP_0,
+ PROP_SIZE_TIME,
+ PROP_INTERNAL_STORAGE,
+ N_PROPERTIES
+};
+
+static GParamSpec *klass_properties[N_PROPERTIES] = { NULL, };
+
+#define DEFAULT_SIZE_TIME (0)
+
+GST_DEBUG_CATEGORY (gst_rtp_storage_debug);
+#define GST_CAT_DEFAULT (gst_rtp_storage_debug)
+
+G_DEFINE_TYPE (GstRtpStorage, gst_rtp_storage, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpstorage, "rtpstorage", GST_RANK_NONE,
+ GST_TYPE_RTP_STORAGE, rtp_element_init (plugin));
+
+static GstFlowReturn
+gst_rtp_storage_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+{
+ GstRtpStorage *self = GST_RTP_STORAGE (parent);;
+
+ if (rtp_storage_append_buffer (self->storage, buf))
+ return gst_pad_push (self->srcpad, buf);
+ return GST_FLOW_OK;
+}
+
+static void
+gst_rtp_storage_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpStorage *self = GST_RTP_STORAGE (object);
+
+ switch (prop_id) {
+ case PROP_SIZE_TIME:
+ GST_DEBUG_OBJECT (self, "RTP storage size set to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (g_value_get_uint64 (value)));
+ rtp_storage_set_size (self->storage, g_value_get_uint64 (value));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_storage_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpStorage *self = GST_RTP_STORAGE (object);
+ switch (prop_id) {
+ case PROP_SIZE_TIME:
+ g_value_set_uint64 (value, rtp_storage_get_size (self->storage));
+ break;
+ case PROP_INTERNAL_STORAGE:
+ {
+ g_value_set_object (value, self->storage);
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+gst_rtp_storage_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ GstRtpStorage *self = GST_RTP_STORAGE (parent);
+
+ if (GST_QUERY_TYPE (query) == GST_QUERY_CUSTOM) {
+ GstStructure *s = gst_query_writable_structure (query);
+
+ if (gst_structure_has_name (s, "GstRtpStorage")) {
+ gst_structure_set (s, "storage", G_TYPE_OBJECT, self->storage, NULL);
+ return TRUE;
+ }
+ }
+
+ return gst_pad_query_default (pad, parent, query);
+}
+
+static void
+gst_rtp_storage_init (GstRtpStorage * self)
+{
+ self->srcpad = gst_pad_new_from_static_template (&srctemplate, "src");
+ self->sinkpad = gst_pad_new_from_static_template (&sinktemplate, "sink");
+ GST_PAD_SET_PROXY_CAPS (self->sinkpad);
+ GST_PAD_SET_PROXY_ALLOCATION (self->sinkpad);
+ gst_pad_set_chain_function (self->sinkpad, gst_rtp_storage_chain);
+
+ gst_pad_set_query_function (self->srcpad, gst_rtp_storage_src_query);
+
+ gst_element_add_pad (GST_ELEMENT (self), self->srcpad);
+ gst_element_add_pad (GST_ELEMENT (self), self->sinkpad);
+
+ self->storage = rtp_storage_new ();
+}
+
+static void
+gst_rtp_storage_dispose (GObject * obj)
+{
+ GstRtpStorage *self = GST_RTP_STORAGE (obj);
+ g_object_unref (self->storage);
+ G_OBJECT_CLASS (gst_rtp_storage_parent_class)->dispose (obj);
+}
+
+static void
+gst_rtp_storage_class_init (GstRtpStorageClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_storage_debug,
+ "rtpstorage", 0, "RTP Storage");
+ GST_DEBUG_REGISTER_FUNCPTR (gst_rtp_storage_chain);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&srctemplate));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sinktemplate));
+
+ gst_element_class_set_static_metadata (element_class,
+ "RTP storage",
+ "Analyzer/RTP",
+ "Helper element for various purposes "
+ "(ex. recovering from packet loss using RED/FEC). "
+ "Saves given number of RTP packets. "
+ "Should be instantiated before jitterbuffer",
+ "Mikhail Fludkov <misha@pexip.com>");
+
+ gobject_class->set_property = gst_rtp_storage_set_property;
+ gobject_class->get_property = gst_rtp_storage_get_property;
+ gobject_class->dispose = gst_rtp_storage_dispose;
+
+ klass_properties[PROP_SIZE_TIME] =
+ g_param_spec_uint64 ("size-time", "Storage size (in ns)",
+ "The amount of data to keep in the storage (in ns, 0-disable)", 0,
+ G_MAXUINT64, DEFAULT_SIZE_TIME,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS);
+
+ klass_properties[PROP_INTERNAL_STORAGE] =
+ g_param_spec_object ("internal-storage", "Internal storage",
+ "Internal RtpStorage object", G_TYPE_OBJECT,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS);
+
+ g_object_class_install_properties (gobject_class, N_PROPERTIES,
+ klass_properties);
+}
diff --git a/gst/rtp/gstrtpstorage.h b/gst/rtp/gstrtpstorage.h
new file mode 100644
index 0000000000..3d5c166d67
--- /dev/null
+++ b/gst/rtp/gstrtpstorage.h
@@ -0,0 +1,59 @@
+/* GStreamer plugin for forward error correction
+ * Copyright (C) 2017 Pexip
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * Author: Mikhail Fludkov <misha@pexip.com>
+ */
+
+#ifndef __GST_RTP_STORAGE_H__
+#define __GST_RTP_STORAGE_H__
+
+#include <gst/gst.h>
+#include "rtpstorage.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_STORAGE \
+ (gst_rtp_storage_get_type())
+#define GST_RTP_STORAGE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_STORAGE,GstRtpStorage))
+#define GST_RTP_STORAGE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_STORAGE,GstRtpStorageClass))
+#define RTP_IS_STORAGE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_STORAGE))
+#define RTP_IS_STORAGE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_STORAGE))
+
+typedef struct _GstRtpStorage GstRtpStorage;
+typedef struct _GstRtpStorageClass GstRtpStorageClass;
+
+struct _GstRtpStorageClass {
+ GstElementClass parent_class;
+};
+
+struct _GstRtpStorage {
+ GstElement parent;
+ GstPad *srcpad;
+ GstPad *sinkpad;
+
+ RtpStorage *storage;
+};
+
+GType gst_rtp_storage_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_STORAGE_H__ */
diff --git a/gst/rtp/gstrtpstreamdepay.c b/gst/rtp/gstrtpstreamdepay.c
new file mode 100644
index 0000000000..978c753019
--- /dev/null
+++ b/gst/rtp/gstrtpstreamdepay.c
@@ -0,0 +1,228 @@
+/* GStreamer
+ * Copyright (C) 2013 Sebastian Dröge <sebastian@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpstreamdepay
+ * @title: rtpstreamdepay
+ *
+ * Implements stream depayloading of RTP and RTCP packets for connection-oriented
+ * transport protocols according to RFC4571.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 audiotestsrc ! "audio/x-raw,rate=48000" ! vorbisenc ! rtpvorbispay config-interval=1 ! rtpstreampay ! tcpserversink port=5678
+ * gst-launch-1.0 tcpclientsrc port=5678 host=127.0.0.1 do-timestamp=true ! "application/x-rtp-stream,media=audio,clock-rate=48000,encoding-name=VORBIS" ! rtpstreamdepay ! rtpvorbisdepay ! decodebin ! audioconvert ! audioresample ! autoaudiosink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstrtpelements.h"
+#include "gstrtpstreamdepay.h"
+
+GST_DEBUG_CATEGORY (gst_rtp_stream_depay_debug);
+#define GST_CAT_DEFAULT gst_rtp_stream_depay_debug
+
+static GstStaticPadTemplate src_template =
+ GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp; application/x-rtcp;"
+ "application/x-srtp; application/x-srtcp")
+ );
+
+static GstStaticPadTemplate sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp-stream; application/x-rtcp-stream;"
+ "application/x-srtp-stream; application/x-srtcp-stream")
+ );
+
+#define parent_class gst_rtp_stream_depay_parent_class
+G_DEFINE_TYPE (GstRtpStreamDepay, gst_rtp_stream_depay, GST_TYPE_BASE_PARSE);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpstreamdepay, "rtpstreamdepay",
+ GST_RANK_NONE, GST_TYPE_RTP_STREAM_DEPAY, rtp_element_init (plugin));
+
+static gboolean gst_rtp_stream_depay_set_sink_caps (GstBaseParse * parse,
+ GstCaps * caps);
+static GstCaps *gst_rtp_stream_depay_get_sink_caps (GstBaseParse * parse,
+ GstCaps * filter);
+static GstFlowReturn gst_rtp_stream_depay_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize);
+
+static gboolean gst_rtp_stream_depay_sink_activate (GstPad * pad,
+ GstObject * parent);
+
+static void
+gst_rtp_stream_depay_class_init (GstRtpStreamDepayClass * klass)
+{
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_stream_depay_debug, "rtpstreamdepay", 0,
+ "RTP stream depayloader");
+
+ gst_element_class_add_static_pad_template (gstelement_class, &src_template);
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP Stream Depayloading", "Codec/Depayloader/Network",
+ "Depayloads RTP/RTCP packets for streaming protocols according to RFC4571",
+ "Sebastian Dröge <sebastian@centricular.com>");
+
+ parse_class->set_sink_caps =
+ GST_DEBUG_FUNCPTR (gst_rtp_stream_depay_set_sink_caps);
+ parse_class->get_sink_caps =
+ GST_DEBUG_FUNCPTR (gst_rtp_stream_depay_get_sink_caps);
+ parse_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_rtp_stream_depay_handle_frame);
+}
+
+static void
+gst_rtp_stream_depay_init (GstRtpStreamDepay * self)
+{
+ gst_base_parse_set_min_frame_size (GST_BASE_PARSE (self), 2);
+
+ /* Force activation in push mode. We need to get a caps event from upstream
+ * to know the full RTP caps. */
+ gst_pad_set_activate_function (GST_BASE_PARSE_SINK_PAD (self),
+ gst_rtp_stream_depay_sink_activate);
+}
+
+static gboolean
+gst_rtp_stream_depay_set_sink_caps (GstBaseParse * parse, GstCaps * caps)
+{
+ GstCaps *othercaps;
+ GstStructure *structure;
+ gboolean ret;
+
+ othercaps = gst_caps_copy (caps);
+ structure = gst_caps_get_structure (othercaps, 0);
+
+ if (gst_structure_has_name (structure, "application/x-rtp-stream"))
+ gst_structure_set_name (structure, "application/x-rtp");
+ else if (gst_structure_has_name (structure, "application/x-rtcp-stream"))
+ gst_structure_set_name (structure, "application/x-rtcp");
+ else if (gst_structure_has_name (structure, "application/x-srtp-stream"))
+ gst_structure_set_name (structure, "application/x-srtp");
+ else
+ gst_structure_set_name (structure, "application/x-srtcp");
+
+ ret = gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), othercaps);
+ gst_caps_unref (othercaps);
+
+ return ret;
+}
+
+static GstCaps *
+gst_rtp_stream_depay_get_sink_caps (GstBaseParse * parse, GstCaps * filter)
+{
+ GstCaps *peerfilter = NULL, *peercaps, *templ;
+ GstCaps *res;
+ GstStructure *structure;
+ guint i, n;
+
+ if (filter) {
+ peerfilter = gst_caps_copy (filter);
+ n = gst_caps_get_size (peerfilter);
+ for (i = 0; i < n; i++) {
+ structure = gst_caps_get_structure (peerfilter, i);
+
+ if (gst_structure_has_name (structure, "application/x-rtp-stream"))
+ gst_structure_set_name (structure, "application/x-rtp");
+ else if (gst_structure_has_name (structure, "application/x-rtcp-stream"))
+ gst_structure_set_name (structure, "application/x-rtcp");
+ else if (gst_structure_has_name (structure, "application/x-srtp-stream"))
+ gst_structure_set_name (structure, "application/x-srtp");
+ else
+ gst_structure_set_name (structure, "application/x-srtcp");
+ }
+ }
+
+ templ = gst_pad_get_pad_template_caps (GST_BASE_PARSE_SINK_PAD (parse));
+ peercaps =
+ gst_pad_peer_query_caps (GST_BASE_PARSE_SRC_PAD (parse), peerfilter);
+
+ if (peercaps) {
+ /* Rename structure names */
+ peercaps = gst_caps_make_writable (peercaps);
+ n = gst_caps_get_size (peercaps);
+ for (i = 0; i < n; i++) {
+ structure = gst_caps_get_structure (peercaps, i);
+
+ if (gst_structure_has_name (structure, "application/x-rtp"))
+ gst_structure_set_name (structure, "application/x-rtp-stream");
+ else if (gst_structure_has_name (structure, "application/x-rtcp"))
+ gst_structure_set_name (structure, "application/x-rtcp-stream");
+ else if (gst_structure_has_name (structure, "application/x-srtp"))
+ gst_structure_set_name (structure, "application/x-srtp-stream");
+ else
+ gst_structure_set_name (structure, "application/x-srtcp-stream");
+ }
+
+ res = gst_caps_intersect_full (peercaps, templ, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (peercaps);
+ } else {
+ res = templ;
+ }
+
+ if (filter) {
+ GstCaps *intersection;
+
+ intersection =
+ gst_caps_intersect_full (filter, res, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (res);
+ res = intersection;
+
+ gst_caps_unref (peerfilter);
+ }
+
+ return res;
+}
+
+static GstFlowReturn
+gst_rtp_stream_depay_handle_frame (GstBaseParse * parse,
+ GstBaseParseFrame * frame, gint * skipsize)
+{
+ gsize buf_size;
+ guint16 size;
+
+ if (gst_buffer_extract (frame->buffer, 0, &size, 2) != 2)
+ return GST_FLOW_ERROR;
+
+ size = GUINT16_FROM_BE (size);
+ buf_size = gst_buffer_get_size (frame->buffer);
+
+ /* Need more data */
+ if (size + 2 > buf_size)
+ return GST_FLOW_OK;
+
+ frame->out_buffer =
+ gst_buffer_copy_region (frame->buffer, GST_BUFFER_COPY_ALL, 2, size);
+
+ return gst_base_parse_finish_frame (parse, frame, size + 2);
+}
+
+static gboolean
+gst_rtp_stream_depay_sink_activate (GstPad * pad, GstObject * parent)
+{
+ return gst_pad_activate_mode (pad, GST_PAD_MODE_PUSH, TRUE);
+}
diff --git a/gst/rtp/gstrtpstreamdepay.h b/gst/rtp/gstrtpstreamdepay.h
new file mode 100644
index 0000000000..32bd6c142b
--- /dev/null
+++ b/gst/rtp/gstrtpstreamdepay.h
@@ -0,0 +1,56 @@
+/* GStreamer
+ * Copyright (C) 2013 Sebastian Dröge <sebastian@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_STREAM_DEPAY_H__
+#define __GST_RTP_STREAM_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbaseparse.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_STREAM_DEPAY \
+ (gst_rtp_stream_depay_get_type())
+#define GST_RTP_STREAM_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_STREAM_DEPAY,GstRtpStreamDepay))
+#define GST_RTP_STREAM_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_STREAM_DEPAY,GstRtpStreamDepayClass))
+#define GST_IS_RTP_STREAM_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_STREAM_DEPAY))
+#define GST_IS_RTP_STREAM_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_STREAM_DEPAY))
+
+typedef struct _GstRtpStreamDepay GstRtpStreamDepay;
+typedef struct _GstRtpStreamDepayClass GstRtpStreamDepayClass;
+
+struct _GstRtpStreamDepay
+{
+ GstBaseParse parent;
+};
+
+struct _GstRtpStreamDepayClass
+{
+ GstBaseParseClass parent_class;
+};
+
+GType gst_rtp_stream_depay_get_type (void);
+
+G_END_DECLS
+
+#endif
diff --git a/gst/rtp/gstrtpstreampay.c b/gst/rtp/gstrtpstreampay.c
new file mode 100644
index 0000000000..51206ffb9d
--- /dev/null
+++ b/gst/rtp/gstrtpstreampay.c
@@ -0,0 +1,282 @@
+/*
+ * GStreamer
+ * Copyright (C) 2013 Sebastian Dröge <sebastian@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpstreampay
+ * @title: rtpstreampay
+ *
+ * Implements stream payloading of RTP and RTCP packets for connection-oriented
+ * transport protocols according to RFC4571.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 audiotestsrc ! "audio/x-raw,rate=48000" ! vorbisenc ! rtpvorbispay config-interval=1 ! rtpstreampay ! tcpserversink port=5678
+ * gst-launch-1.0 tcpclientsrc port=5678 host=127.0.0.1 do-timestamp=true ! "application/x-rtp-stream,media=audio,clock-rate=48000,encoding-name=VORBIS" ! rtpstreamdepay ! rtpvorbisdepay ! decodebin ! audioconvert ! audioresample ! autoaudiosink
+ * ]|
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstrtpelements.h"
+#include "gstrtpstreampay.h"
+
+#define GST_CAT_DEFAULT gst_rtp_stream_pay_debug
+GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp; application/x-rtcp; "
+ "application/x-srtp; application/x-srtcp")
+ );
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp-stream; application/x-rtcp-stream; "
+ "application/x-srtp-stream; application/x-srtcp-stream")
+ );
+
+#define parent_class gst_rtp_stream_pay_parent_class
+G_DEFINE_TYPE (GstRtpStreamPay, gst_rtp_stream_pay, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpstreampay, "rtpstreampay",
+ GST_RANK_NONE, GST_TYPE_RTP_STREAM_PAY, rtp_element_init (plugin));
+
+static gboolean gst_rtp_stream_pay_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+static GstFlowReturn gst_rtp_stream_pay_sink_chain (GstPad * pad,
+ GstObject * parent, GstBuffer * inbuf);
+static gboolean gst_rtp_stream_pay_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+
+static void
+gst_rtp_stream_pay_class_init (GstRtpStreamPayClass * klass)
+{
+ GstElementClass *gstelement_class;
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_stream_pay_debug, "rtpstreampay", 0,
+ "RTP stream payloader");
+
+ gstelement_class = (GstElementClass *) klass;
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP Stream Payloading", "Codec/Payloader/Network",
+ "Payloads RTP/RTCP packets for streaming protocols according to RFC4571",
+ "Sebastian Dröge <sebastian@centricular.com>");
+
+ gst_element_class_add_static_pad_template (gstelement_class, &src_template);
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
+}
+
+static void
+gst_rtp_stream_pay_init (GstRtpStreamPay * self)
+{
+ self->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink");
+ gst_pad_set_chain_function (self->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_stream_pay_sink_chain));
+ gst_pad_set_event_function (self->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_stream_pay_sink_event));
+ gst_pad_set_query_function (self->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_stream_pay_sink_query));
+ gst_element_add_pad (GST_ELEMENT (self), self->sinkpad);
+
+ self->srcpad = gst_pad_new_from_static_template (&src_template, "src");
+ gst_pad_use_fixed_caps (self->srcpad);
+ gst_element_add_pad (GST_ELEMENT (self), self->srcpad);
+}
+
+static GstCaps *
+gst_rtp_stream_pay_sink_get_caps (GstRtpStreamPay * self, GstCaps * filter)
+{
+ GstCaps *peerfilter = NULL, *peercaps, *templ;
+ GstCaps *res;
+ GstStructure *structure;
+ guint i, n;
+
+ if (filter) {
+ peerfilter = gst_caps_copy (filter);
+ n = gst_caps_get_size (peerfilter);
+ for (i = 0; i < n; i++) {
+ structure = gst_caps_get_structure (peerfilter, i);
+
+ if (gst_structure_has_name (structure, "application/x-rtp"))
+ gst_structure_set_name (structure, "application/x-rtp-stream");
+ else if (gst_structure_has_name (structure, "application/x-rtcp"))
+ gst_structure_set_name (structure, "application/x-rtcp-stream");
+ else if (gst_structure_has_name (structure, "application/x-srtp"))
+ gst_structure_set_name (structure, "application/x-srtp-stream");
+ else
+ gst_structure_set_name (structure, "application/x-srtcp-stream");
+ }
+ }
+
+ templ = gst_pad_get_pad_template_caps (self->sinkpad);
+ peercaps = gst_pad_peer_query_caps (self->srcpad, peerfilter);
+
+ if (peercaps) {
+ /* Rename structure names */
+ peercaps = gst_caps_make_writable (peercaps);
+ n = gst_caps_get_size (peercaps);
+ for (i = 0; i < n; i++) {
+ structure = gst_caps_get_structure (peercaps, i);
+
+ if (gst_structure_has_name (structure, "application/x-rtp-stream"))
+ gst_structure_set_name (structure, "application/x-rtp");
+ else if (gst_structure_has_name (structure, "application/x-rtcp-stream"))
+ gst_structure_set_name (structure, "application/x-rtcp");
+ else if (gst_structure_has_name (structure, "application/x-srtp-stream"))
+ gst_structure_set_name (structure, "application/x-srtp");
+ else
+ gst_structure_set_name (structure, "application/x-srtcp");
+ }
+
+ res = gst_caps_intersect_full (peercaps, templ, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (peercaps);
+ } else {
+ res = templ;
+ }
+
+ if (filter) {
+ GstCaps *intersection;
+
+ intersection =
+ gst_caps_intersect_full (filter, res, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (res);
+ res = intersection;
+
+ gst_caps_unref (peerfilter);
+ }
+
+ return res;
+}
+
+static gboolean
+gst_rtp_stream_pay_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ GstRtpStreamPay *self = GST_RTP_STREAM_PAY (parent);
+ gboolean ret;
+
+ GST_LOG_OBJECT (pad, "Handling query of type '%s'",
+ gst_query_type_get_name (GST_QUERY_TYPE (query)));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_query_parse_caps (query, &caps);
+ caps = gst_rtp_stream_pay_sink_get_caps (self, caps);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ ret = TRUE;
+ break;
+ }
+ default:
+ ret = gst_pad_query_default (pad, parent, query);
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_rtp_stream_pay_sink_set_caps (GstRtpStreamPay * self, GstCaps * caps)
+{
+ GstCaps *othercaps;
+ GstStructure *structure;
+ gboolean ret;
+
+ othercaps = gst_caps_copy (caps);
+ structure = gst_caps_get_structure (othercaps, 0);
+
+ if (gst_structure_has_name (structure, "application/x-rtp"))
+ gst_structure_set_name (structure, "application/x-rtp-stream");
+ else if (gst_structure_has_name (structure, "application/x-rtcp"))
+ gst_structure_set_name (structure, "application/x-rtcp-stream");
+ else if (gst_structure_has_name (structure, "application/x-srtp"))
+ gst_structure_set_name (structure, "application/x-srtp-stream");
+ else
+ gst_structure_set_name (structure, "application/x-srtcp-stream");
+
+ ret = gst_pad_set_caps (self->srcpad, othercaps);
+ gst_caps_unref (othercaps);
+
+ return ret;
+}
+
+static gboolean
+gst_rtp_stream_pay_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstRtpStreamPay *self = GST_RTP_STREAM_PAY (parent);
+ gboolean ret;
+
+ GST_LOG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ ret = gst_rtp_stream_pay_sink_set_caps (self, caps);
+ gst_event_unref (event);
+ break;
+ }
+ default:
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_rtp_stream_pay_sink_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * inbuf)
+{
+ GstRtpStreamPay *self = GST_RTP_STREAM_PAY (parent);
+ GstBuffer *outbuf;
+ gsize size;
+ guint8 size16[2];
+
+ size = gst_buffer_get_size (inbuf);
+ if (size > G_MAXUINT16) {
+ GST_ELEMENT_ERROR (self, CORE, FAILED, (NULL),
+ ("Only buffers up to %d bytes supported, got %" G_GSIZE_FORMAT,
+ G_MAXUINT16, size));
+ gst_buffer_unref (inbuf);
+ return GST_FLOW_ERROR;
+ }
+
+ outbuf = gst_buffer_new_and_alloc (2);
+
+ GST_WRITE_UINT16_BE (size16, size);
+ gst_buffer_fill (outbuf, 0, size16, 2);
+
+ gst_buffer_copy_into (outbuf, inbuf, GST_BUFFER_COPY_ALL, 0, -1);
+
+ gst_buffer_unref (inbuf);
+
+ return gst_pad_push (self->srcpad, outbuf);
+}
diff --git a/gst/rtp/gstrtpstreampay.h b/gst/rtp/gstrtpstreampay.h
new file mode 100644
index 0000000000..b90165af2c
--- /dev/null
+++ b/gst/rtp/gstrtpstreampay.h
@@ -0,0 +1,52 @@
+/*
+ * GStreamer
+ * Copyright (C) 2013 Sebastian Dröge <sebastian@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_STREAM_PAY_H__
+#define __GST_RTP_STREAM_PAY_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_STREAM_PAY (gst_rtp_stream_pay_get_type())
+#define GST_RTP_STREAM_PAY(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_STREAM_PAY,GstRtpStreamPay))
+#define GST_IS_RTP_STREAM_PAY(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_STREAM_PAY))
+#define GST_RTP_STREAM_PAY_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_RTP_STREAM_PAY,GstRtpStreamPayClass))
+#define GST_IS_RTP_STREAM_PAY_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_RTP_STREAM_PAY))
+#define GST_RTP_STREAM_PAY_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_RTP_STREAM_PAY,GstRtpStreamPayClass))
+
+typedef struct _GstRtpStreamPay GstRtpStreamPay;
+typedef struct _GstRtpStreamPayClass GstRtpStreamPayClass;
+
+struct _GstRtpStreamPay {
+ GstElement parent;
+
+ GstPad *srcpad, *sinkpad;
+};
+
+struct _GstRtpStreamPayClass {
+ GstElementClass parent_class;
+};
+
+GType gst_rtp_stream_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_STREAM_PAY_H__ */
diff --git a/gst/rtp/gstrtpsv3vdepay.c b/gst/rtp/gstrtpsv3vdepay.c
new file mode 100644
index 0000000000..bac99458a1
--- /dev/null
+++ b/gst/rtp/gstrtpsv3vdepay.c
@@ -0,0 +1,316 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+#include "gstrtpelements.h"
+#include "gstrtpsv3vdepay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY (rtpsv3vdepay_debug);
+#define GST_CAT_DEFAULT rtpsv3vdepay_debug
+
+static GstStaticPadTemplate gst_rtp_sv3v_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-svq, " "svqversion = (int) 3")
+ );
+
+static GstStaticPadTemplate gst_rtp_sv3v_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "clock-rate = (int) 90000, "
+ "encoding-name = (string) { \"X-SV3V-ES\", \"X-SORENSON-VIDEO\" , \"X-SORENSONVIDEO\" , \"X-SorensonVideo\" }")
+ );
+
+#define gst_rtp_sv3v_depay_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstRtpSV3VDepay, gst_rtp_sv3v_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD, GST_DEBUG_CATEGORY_INIT (rtpsv3vdepay_debug,
+ "rtpsv3vdepay", 0, "RTP SV3V depayloader"));
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpsv3vdepay, "rtpsv3vdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_SV3V_DEPAY, rtp_element_init (plugin));
+
+static void gst_rtp_sv3v_depay_finalize (GObject * object);
+
+static GstStateChangeReturn gst_rtp_sv3v_depay_change_state (GstElement *
+ element, GstStateChange transition);
+
+static GstBuffer *gst_rtp_sv3v_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+gboolean gst_rtp_sv3v_depay_setcaps (GstRTPBaseDepayload * filter,
+ GstCaps * caps);
+
+static void
+gst_rtp_sv3v_depay_class_init (GstRtpSV3VDepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_sv3v_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_sv3v_depay_setcaps;
+
+ gobject_class->finalize = gst_rtp_sv3v_depay_finalize;
+
+ gstelement_class->change_state = gst_rtp_sv3v_depay_change_state;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_sv3v_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_sv3v_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP SVQ3 depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts SVQ3 video from RTP packets (no RFC)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+}
+
+static void
+gst_rtp_sv3v_depay_init (GstRtpSV3VDepay * rtpsv3vdepay)
+{
+ rtpsv3vdepay->adapter = gst_adapter_new ();
+}
+
+static void
+gst_rtp_sv3v_depay_finalize (GObject * object)
+{
+ GstRtpSV3VDepay *rtpsv3vdepay;
+
+ rtpsv3vdepay = GST_RTP_SV3V_DEPAY (object);
+
+ g_object_unref (rtpsv3vdepay->adapter);
+ rtpsv3vdepay->adapter = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+/* only on the sink */
+gboolean
+gst_rtp_sv3v_depay_setcaps (GstRTPBaseDepayload * filter, GstCaps * caps)
+{
+ GstStructure *structure = gst_caps_get_structure (caps, 0);
+ gint clock_rate;
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 90000; /* default */
+ filter->clock_rate = clock_rate;
+
+ /* will set caps later */
+
+ return TRUE;
+}
+
+static GstBuffer *
+gst_rtp_sv3v_depay_process (GstRTPBaseDepayload * depayload, GstRTPBuffer * rtp)
+{
+ GstRtpSV3VDepay *rtpsv3vdepay;
+ static struct
+ {
+ guint width, height;
+ } resolutions[7] = {
+ {
+ 160, 128}, {
+ 128, 96}, {
+ 176, 144}, {
+ 352, 288}, {
+ 704, 576}, {
+ 240, 180}, {
+ 320, 240}
+ };
+ gint payload_len;
+ guint8 *payload;
+ gboolean M;
+ gboolean C, S, E;
+ GstBuffer *outbuf = NULL;
+ guint16 seq;
+
+ rtpsv3vdepay = GST_RTP_SV3V_DEPAY (depayload);
+
+
+ /* flush on sequence number gaps */
+ seq = gst_rtp_buffer_get_seq (rtp);
+
+ GST_DEBUG ("timestamp %" GST_TIME_FORMAT ", sequence number:%d",
+ GST_TIME_ARGS (GST_BUFFER_PTS (rtp->buffer)), seq);
+
+ if (seq != rtpsv3vdepay->nextseq) {
+ GST_DEBUG ("Sequence discontinuity, clearing adapter");
+ gst_adapter_clear (rtpsv3vdepay->adapter);
+ }
+ rtpsv3vdepay->nextseq = seq + 1;
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+ if (payload_len < 3)
+ goto bad_packet;
+
+ payload = gst_rtp_buffer_get_payload (rtp);
+
+ M = gst_rtp_buffer_get_marker (rtp);
+
+ /* This is all a guess:
+ * 1 1 1 1 1 1
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |0|C|S|E|0|0|0|0|0|0|0|0|0|0|0|0|
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ *
+ * C: config, packet contains config info
+ * S: start, packet contains start of frame
+ * E: end, packet contains end of frame
+ */
+ /* this seems to indicate a packet with a config string sent before each
+ * keyframe */
+ C = (payload[0] & 0x40) == 0x40;
+
+ /* redundant with the RTP marker bit */
+ S = (payload[0] & 0x20) == 0x20;
+ E = (payload[0] & 0x10) == 0x10;
+
+ GST_DEBUG ("M:%d, C:%d, S:%d, E:%d", M, C, S, E);
+
+ GST_MEMDUMP ("incoming buffer", payload, payload_len);
+
+ if (G_UNLIKELY (C)) {
+ GstCaps *caps;
+ GstBuffer *codec_data;
+ GstMapInfo cmap;
+ guint8 res;
+
+ GST_DEBUG ("Configuration packet");
+
+ /* if we already have caps, we don't need to do anything. FIXME, check if
+ * something changed. */
+ if (G_UNLIKELY (gst_pad_has_current_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD
+ (depayload)))) {
+ GST_DEBUG ("Already configured, skipping config parsing");
+ goto beach;
+ }
+
+ res = payload[2] >> 5;
+
+ /* width and height, according to http://wiki.multimedia.cx/index.php?title=Sorenson_Video_1#Stream_Format_And_Header */
+ if (G_LIKELY (res < 7)) {
+ rtpsv3vdepay->width = resolutions[res].width;
+ rtpsv3vdepay->height = resolutions[res].height;
+ } else {
+ /* extended width/height, they're contained in the following 24bit */
+ rtpsv3vdepay->width = ((payload[2] & 0x1f) << 7) | (payload[3] >> 1);
+ rtpsv3vdepay->height =
+ (payload[3] & 0x1) << 11 | payload[4] << 3 | (payload[5] >> 5);
+ }
+
+ /* CodecData needs to be 'SEQH' + len (32bit) + data according to
+ * ffmpeg's libavcodec/svq3.c:svq3_decode_init */
+ codec_data = gst_buffer_new_and_alloc (payload_len + 6);
+ gst_buffer_map (codec_data, &cmap, GST_MAP_WRITE);
+ memcpy (cmap.data, "SEQH", 4);
+ GST_WRITE_UINT32_LE (cmap.data + 4, payload_len - 2);
+ memcpy (cmap.data + 8, payload + 2, payload_len - 2);
+ GST_MEMDUMP ("codec_data", cmap.data, gst_buffer_get_size (codec_data));
+ gst_buffer_unmap (codec_data, &cmap);
+
+ caps = gst_caps_new_simple ("video/x-svq",
+ "svqversion", G_TYPE_INT, 3,
+ "width", G_TYPE_INT, rtpsv3vdepay->width,
+ "height", G_TYPE_INT, rtpsv3vdepay->height,
+ "codec_data", GST_TYPE_BUFFER, codec_data, NULL);
+ gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), caps);
+ gst_caps_unref (caps);
+
+ GST_DEBUG ("Depayloader now configured");
+
+ rtpsv3vdepay->configured = TRUE;
+
+ goto beach;
+ }
+
+ if (G_LIKELY (rtpsv3vdepay->configured)) {
+ GstBuffer *tmpbuf;
+
+ GST_DEBUG ("Storing incoming payload");
+ /* store data in adapter, stip off 2 bytes header */
+ tmpbuf = gst_rtp_buffer_get_payload_subbuffer (rtp, 2, -1);
+ gst_adapter_push (rtpsv3vdepay->adapter, tmpbuf);
+
+ if (G_UNLIKELY (M)) {
+ /* frame is completed: push contents of adapter */
+ guint avail;
+
+ avail = gst_adapter_available (rtpsv3vdepay->adapter);
+ GST_DEBUG ("Returning completed output buffer [%d bytes]", avail);
+ outbuf = gst_adapter_take_buffer (rtpsv3vdepay->adapter, avail);
+ gst_rtp_drop_non_video_meta (rtpsv3vdepay, outbuf);
+ }
+ }
+
+beach:
+ return outbuf;
+
+ /* ERRORS */
+bad_packet:
+ {
+ GST_ELEMENT_WARNING (rtpsv3vdepay, STREAM, DECODE,
+ (NULL), ("Packet was too short"));
+ return NULL;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_sv3v_depay_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstRtpSV3VDepay *rtpsv3vdepay;
+ GstStateChangeReturn ret;
+
+ rtpsv3vdepay = GST_RTP_SV3V_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_adapter_clear (rtpsv3vdepay->adapter);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtpsv3vdepay.h b/gst/rtp/gstrtpsv3vdepay.h
new file mode 100644
index 0000000000..1f3fc7f9bb
--- /dev/null
+++ b/gst/rtp/gstrtpsv3vdepay.h
@@ -0,0 +1,65 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_SV3V_DEPAY_H__
+#define __GST_RTP_SV3V_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_SV3V_DEPAY \
+ (gst_rtp_sv3v_depay_get_type())
+#define GST_RTP_SV3V_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_SV3V_DEPAY,GstRtpSV3VDepay))
+#define GST_RTP_SV3V_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_SV3V_DEPAY,GstRtpSV3VDepayClass))
+#define GST_IS_RTP_SV3V_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_SV3V_DEPAY))
+#define GST_IS_RTP_SV3V_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_SV3V_DEPAY))
+
+typedef struct _GstRtpSV3VDepay GstRtpSV3VDepay;
+typedef struct _GstRtpSV3VDepayClass GstRtpSV3VDepayClass;
+
+struct _GstRtpSV3VDepay
+{
+ GstRTPBaseDepayload depayload;
+
+ GstAdapter *adapter;
+
+ gboolean configured;
+
+ guint16 nextseq;
+ guint width;
+ guint height;
+};
+
+struct _GstRtpSV3VDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_sv3v_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_SV3V_DEPAY_H__ */
diff --git a/gst/rtp/gstrtptheoradepay.c b/gst/rtp/gstrtptheoradepay.c
new file mode 100644
index 0000000000..e7ff9e18c6
--- /dev/null
+++ b/gst/rtp/gstrtptheoradepay.c
@@ -0,0 +1,703 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/tag/tag.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+
+#include <string.h>
+#include "gstrtpelements.h"
+#include "gstrtptheoradepay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtptheoradepay_debug);
+#define GST_CAT_DEFAULT (rtptheoradepay_debug)
+
+static GstStaticPadTemplate gst_rtp_theora_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"THEORA\""
+ /* All required parameters
+ *
+ * "sampling = (string) { "YCbCr-4:2:0", "YCbCr-4:2:2", "YCbCr-4:4:4" } "
+ * "width = (string) [1, 1048561] (multiples of 16) "
+ * "height = (string) [1, 1048561] (multiples of 16) "
+ * "delivery-method = (string) { inline, in_band, out_band/<specific_name> } "
+ * "configuration = (string) ANY"
+ */
+ /* All optional parameters
+ *
+ * "configuration-uri ="
+ */
+ )
+ );
+
+static GstStaticPadTemplate gst_rtp_theora_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-theora")
+ );
+
+#define gst_rtp_theora_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpTheoraDepay, gst_rtp_theora_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtptheoradepay, "rtptheoradepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_THEORA_DEPAY, rtp_element_init (plugin));
+
+static gboolean gst_rtp_theora_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_theora_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+static gboolean gst_rtp_theora_depay_packet_lost (GstRTPBaseDepayload *
+ depayload, GstEvent * event);
+
+static void gst_rtp_theora_depay_finalize (GObject * object);
+
+static GstStateChangeReturn gst_rtp_theora_depay_change_state (GstElement *
+ element, GstStateChange transition);
+
+static void
+gst_rtp_theora_depay_class_init (GstRtpTheoraDepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_theora_depay_finalize;
+
+ gstelement_class->change_state = gst_rtp_theora_depay_change_state;
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_theora_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_theora_depay_setcaps;
+ gstrtpbasedepayload_class->packet_lost = gst_rtp_theora_depay_packet_lost;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_theora_depay_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_theora_depay_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP Theora depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts Theora video from RTP packets (draft-01 of RFC XXXX)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ GST_DEBUG_CATEGORY_INIT (rtptheoradepay_debug, "rtptheoradepay", 0,
+ "Theora RTP Depayloader");
+}
+
+static void
+gst_rtp_theora_depay_init (GstRtpTheoraDepay * rtptheoradepay)
+{
+ rtptheoradepay->adapter = gst_adapter_new ();
+}
+
+static void
+free_config (GstRtpTheoraConfig * config)
+{
+ g_list_free_full (config->headers, (GDestroyNotify) gst_buffer_unref);
+ g_free (config);
+}
+
+static void
+free_indents (GstRtpTheoraDepay * rtptheoradepay)
+{
+ g_list_free_full (rtptheoradepay->configs, (GDestroyNotify) free_config);
+ rtptheoradepay->configs = NULL;
+}
+
+static void
+gst_rtp_theora_depay_finalize (GObject * object)
+{
+ GstRtpTheoraDepay *rtptheoradepay = GST_RTP_THEORA_DEPAY (object);
+
+ g_object_unref (rtptheoradepay->adapter);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_rtp_theora_depay_parse_configuration (GstRtpTheoraDepay * rtptheoradepay,
+ GstBuffer * confbuf)
+{
+ GstBuffer *buf;
+ guint32 num_headers;
+ GstMapInfo map;
+ guint8 *data;
+ gsize size;
+ gint i, j;
+
+ gst_buffer_map (confbuf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ GST_DEBUG_OBJECT (rtptheoradepay, "config size %" G_GSIZE_FORMAT, size);
+
+ /* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Number of packed headers |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Packed header |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Packed header |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | .... |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ if (size < 4)
+ goto too_small;
+
+ num_headers = GST_READ_UINT32_BE (data);
+ size -= 4;
+ data += 4;
+
+ GST_DEBUG_OBJECT (rtptheoradepay, "have %u headers", num_headers);
+
+ /* 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Ident | length ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. | n. of headers | length1 | length2 ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. | Identification Header ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .................................................................
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. | Comment Header ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .................................................................
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. Comment Header |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Setup Header ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .................................................................
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. Setup Header |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ for (i = 0; i < num_headers; i++) {
+ guint32 ident;
+ guint16 length;
+ guint8 n_headers, b;
+ GstRtpTheoraConfig *conf;
+ guint *h_sizes;
+ guint extra = 1;
+
+ if (size < 6)
+ goto too_small;
+
+ ident = (data[0] << 16) | (data[1] << 8) | data[2];
+ length = (data[3] << 8) | data[4];
+ n_headers = data[5];
+ size -= 6;
+ data += 6;
+
+ GST_DEBUG_OBJECT (rtptheoradepay,
+ "header %d, ident 0x%08x, length %u, left %" G_GSIZE_FORMAT, i, ident,
+ length, size);
+
+ /* FIXME check if we already got this ident */
+
+ /* length might also include count of following size fields */
+ if (size < length && size + 1 != length)
+ goto too_small;
+
+ /* read header sizes we read 2 sizes, the third size (for which we allocate
+ * space) must be derived from the total packed header length. */
+ h_sizes = g_newa (guint, n_headers + 1);
+ for (j = 0; j < n_headers; j++) {
+ guint h_size;
+
+ h_size = 0;
+ do {
+ if (size < 1)
+ goto too_small;
+ b = *data++;
+ size--;
+ extra++;
+ h_size = (h_size << 7) | (b & 0x7f);
+ } while (b & 0x80);
+ GST_DEBUG_OBJECT (rtptheoradepay, "headers %d: size: %u", j, h_size);
+ h_sizes[j] = h_size;
+ length -= h_size;
+ }
+ /* last header length is the remaining space */
+ GST_DEBUG_OBJECT (rtptheoradepay, "last header size: %u", length);
+ h_sizes[j] = length;
+
+ GST_DEBUG_OBJECT (rtptheoradepay, "preparing headers");
+ conf = g_new0 (GstRtpTheoraConfig, 1);
+ conf->ident = ident;
+
+ for (j = 0; j <= n_headers; j++) {
+ guint h_size;
+
+ h_size = h_sizes[j];
+ if (size < h_size) {
+ if (j != n_headers || size + extra != h_size) {
+ free_config (conf);
+ goto too_small;
+ } else {
+ /* otherwise means that overall length field contained total length,
+ * including extra fields */
+ h_size -= extra;
+ }
+ }
+
+ GST_DEBUG_OBJECT (rtptheoradepay, "reading header %d, size %u", j,
+ h_size);
+
+ buf =
+ gst_buffer_copy_region (confbuf, GST_BUFFER_COPY_ALL, data - map.data,
+ h_size);
+ conf->headers = g_list_append (conf->headers, buf);
+ data += h_size;
+ size -= h_size;
+ }
+ rtptheoradepay->configs = g_list_append (rtptheoradepay->configs, conf);
+ }
+
+ gst_buffer_unmap (confbuf, &map);
+ gst_buffer_unref (confbuf);
+
+ return TRUE;
+
+ /* ERRORS */
+too_small:
+ {
+ GST_DEBUG_OBJECT (rtptheoradepay, "configuration too small");
+ gst_buffer_unmap (confbuf, &map);
+ gst_buffer_unref (confbuf);
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_rtp_theora_depay_parse_inband_configuration (GstRtpTheoraDepay *
+ rtptheoradepay, guint ident, guint8 * configuration, guint size,
+ guint length)
+{
+ GstBuffer *confbuf;
+ GstMapInfo map;
+
+ if (G_UNLIKELY (size < 4))
+ return FALSE;
+
+ /* transform inline to out-of-band and parse that one */
+ confbuf = gst_buffer_new_and_alloc (size + 9);
+ gst_buffer_map (confbuf, &map, GST_MAP_WRITE);
+ /* 1 header */
+ GST_WRITE_UINT32_BE (map.data, 1);
+ /* write Ident */
+ GST_WRITE_UINT24_BE (map.data + 4, ident);
+ /* write sort-of-length */
+ GST_WRITE_UINT16_BE (map.data + 7, length);
+ /* copy remainder */
+ memcpy (map.data + 9, configuration, size);
+ gst_buffer_unmap (confbuf, &map);
+
+ return gst_rtp_theora_depay_parse_configuration (rtptheoradepay, confbuf);
+}
+
+static gboolean
+gst_rtp_theora_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure;
+ GstRtpTheoraDepay *rtptheoradepay;
+ GstCaps *srccaps;
+ const gchar *configuration;
+ gboolean res;
+
+ rtptheoradepay = GST_RTP_THEORA_DEPAY (depayload);
+
+ rtptheoradepay->needs_keyframe = FALSE;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ /* read and parse configuration string */
+ configuration = gst_structure_get_string (structure, "configuration");
+ if (configuration) {
+ GstBuffer *confbuf;
+ guint8 *data;
+ gsize size;
+
+ /* deserialize base64 to buffer */
+ data = g_base64_decode (configuration, &size);
+
+ confbuf = gst_buffer_new ();
+ gst_buffer_append_memory (confbuf,
+ gst_memory_new_wrapped (0, data, size, 0, size, data, g_free));
+
+ if (!gst_rtp_theora_depay_parse_configuration (rtptheoradepay, confbuf))
+ goto invalid_configuration;
+ }
+
+ /* set caps on pad and on header */
+ srccaps = gst_caps_new_empty_simple ("video/x-theora");
+ res = gst_pad_set_caps (depayload->srcpad, srccaps);
+ gst_caps_unref (srccaps);
+
+ /* Clock rate is always 90000 according to draft-barbato-avt-rtp-theora-01 */
+ depayload->clock_rate = 90000;
+
+ return res;
+
+ /* ERRORS */
+invalid_configuration:
+ {
+ GST_ERROR_OBJECT (rtptheoradepay, "invalid configuration specified");
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_rtp_theora_depay_switch_codebook (GstRtpTheoraDepay * rtptheoradepay,
+ guint32 ident)
+{
+ GList *walk;
+ gboolean res = FALSE;
+
+ for (walk = rtptheoradepay->configs; walk; walk = g_list_next (walk)) {
+ GstRtpTheoraConfig *conf = (GstRtpTheoraConfig *) walk->data;
+
+ if (conf->ident == ident) {
+ GList *headers;
+
+ /* FIXME, remove pads, create new pad.. */
+
+ /* push out all the headers */
+ for (headers = conf->headers; headers; headers = g_list_next (headers)) {
+ GstBuffer *header = GST_BUFFER_CAST (headers->data);
+
+ gst_buffer_ref (header);
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtptheoradepay),
+ header);
+ }
+ /* remember the current config */
+ rtptheoradepay->config = conf;
+ res = TRUE;
+ }
+ }
+ if (!res) {
+ /* we don't know about the headers, figure out an alternative method for
+ * getting the codebooks. FIXME, fail for now. */
+ }
+ return res;
+}
+
+static GstBuffer *
+gst_rtp_theora_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp)
+{
+ GstRtpTheoraDepay *rtptheoradepay;
+ GstBuffer *outbuf;
+ GstFlowReturn ret;
+ gint payload_len;
+ GstMapInfo map;
+ GstBuffer *payload_buffer = NULL;
+ guint8 *payload;
+ guint32 header, ident;
+ guint8 F, TDT, packets;
+ guint length;
+
+ rtptheoradepay = GST_RTP_THEORA_DEPAY (depayload);
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+
+ GST_DEBUG_OBJECT (depayload, "got RTP packet of size %d", payload_len);
+
+ /* we need at least 4 bytes for the packet header */
+ if (G_UNLIKELY (payload_len < 4))
+ goto packet_short;
+
+ payload = gst_rtp_buffer_get_payload (rtp);
+
+ header = GST_READ_UINT32_BE (payload);
+ /*
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Ident | F |TDT|# pkts.|
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ *
+ * F: Fragment type (0=none, 1=start, 2=cont, 3=end)
+ * TDT: Theora data type (0=theora, 1=config, 2=comment, 3=reserved)
+ * pkts: number of packets.
+ */
+ TDT = (header & 0x30) >> 4;
+ if (G_UNLIKELY (TDT == 3))
+ goto ignore_reserved;
+
+ ident = (header >> 8) & 0xffffff;
+ F = (header & 0xc0) >> 6;
+ packets = (header & 0xf);
+
+ GST_DEBUG_OBJECT (depayload, "ident: 0x%08x, F: %d, TDT: %d, packets: %d",
+ ident, F, TDT, packets);
+
+ if (TDT == 0) {
+ gboolean do_switch = FALSE;
+
+ /* we have a raw payload, find the codebook for the ident */
+ if (!rtptheoradepay->config) {
+ /* we don't have an active codebook, find the codebook and
+ * activate it */
+ do_switch = TRUE;
+ } else if (rtptheoradepay->config->ident != ident) {
+ /* codebook changed */
+ do_switch = TRUE;
+ }
+ if (do_switch) {
+ if (!gst_rtp_theora_depay_switch_codebook (rtptheoradepay, ident))
+ goto switch_failed;
+ }
+ }
+
+ /* fragmented packets, assemble */
+ if (F != 0) {
+ GstBuffer *vdata;
+
+ if (F == 1) {
+ /* if we start a packet, clear adapter and start assembling. */
+ gst_adapter_clear (rtptheoradepay->adapter);
+ GST_DEBUG_OBJECT (depayload, "start assemble");
+ rtptheoradepay->assembling = TRUE;
+ }
+
+ if (!rtptheoradepay->assembling)
+ goto no_output;
+
+ /* skip header and length. */
+ vdata = gst_rtp_buffer_get_payload_subbuffer (rtp, 6, -1);
+
+ GST_DEBUG_OBJECT (depayload, "assemble theora packet");
+ gst_adapter_push (rtptheoradepay->adapter, vdata);
+
+ /* packet is not complete, we are done */
+ if (F != 3)
+ goto no_output;
+
+ /* construct assembled buffer */
+ length = gst_adapter_available (rtptheoradepay->adapter);
+ payload_buffer = gst_adapter_take_buffer (rtptheoradepay->adapter, length);
+ } else {
+ length = 0;
+ payload_buffer = gst_rtp_buffer_get_payload_subbuffer (rtp, 4, -1);
+ }
+
+ GST_DEBUG_OBJECT (depayload, "assemble done, payload_len %d", payload_len);
+
+ gst_buffer_map (payload_buffer, &map, GST_MAP_READ);
+ payload = map.data;
+ payload_len = map.size;
+
+ /* we not assembling anymore now */
+ rtptheoradepay->assembling = FALSE;
+ gst_adapter_clear (rtptheoradepay->adapter);
+
+ /* payload now points to a length with that many theora data bytes.
+ * Iterate over the packets and send them out.
+ *
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | length | theora data ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. theora data |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | length | next theora packet data ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. theora data |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+*
+ */
+ while (payload_len >= 2) {
+ /* If length is not 0, we have a reassembled packet for which we
+ * calculated the length already and don't have to skip over the
+ * length field anymore
+ */
+ if (length == 0) {
+ length = GST_READ_UINT16_BE (payload);
+ payload += 2;
+ payload_len -= 2;
+ }
+
+ GST_DEBUG_OBJECT (depayload, "read length %u, avail: %d", length,
+ payload_len);
+
+ /* skip packet if something odd happens */
+ if (G_UNLIKELY (length > payload_len))
+ goto length_short;
+
+ /* handle in-band configuration */
+ if (G_UNLIKELY (TDT == 1)) {
+ GST_DEBUG_OBJECT (rtptheoradepay, "in-band configuration");
+ if (!gst_rtp_theora_depay_parse_inband_configuration (rtptheoradepay,
+ ident, payload, payload_len, length))
+ goto invalid_configuration;
+ goto no_output;
+ }
+
+ /* create buffer for packet */
+ outbuf =
+ gst_buffer_copy_region (payload_buffer, GST_BUFFER_COPY_ALL,
+ payload - map.data, length);
+
+ if (payload_len > 0 && (payload[0] & 0xC0) == 0x0) {
+ rtptheoradepay->needs_keyframe = FALSE;
+ } else {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+ }
+
+ payload += length;
+ payload_len -= length;
+ /* make sure to read next length */
+ length = 0;
+
+ ret = gst_rtp_base_depayload_push (depayload, outbuf);
+ if (ret != GST_FLOW_OK)
+ break;
+ }
+
+ if (rtptheoradepay->needs_keyframe)
+ goto request_keyframe;
+
+out:
+no_output:
+
+ if (payload_buffer) {
+ gst_buffer_unmap (payload_buffer, &map);
+ gst_buffer_unref (payload_buffer);
+ }
+
+ return NULL;
+
+ /* ERRORS */
+switch_failed:
+ {
+ GST_ELEMENT_WARNING (rtptheoradepay, STREAM, DECODE,
+ (NULL), ("Could not switch codebooks"));
+ goto request_config;
+ }
+packet_short:
+ {
+ GST_ELEMENT_WARNING (rtptheoradepay, STREAM, DECODE,
+ (NULL), ("Packet was too short (%d < 4)", payload_len));
+ goto request_keyframe;
+ }
+ignore_reserved:
+ {
+ GST_WARNING_OBJECT (rtptheoradepay, "reserved TDT ignored");
+ goto out;
+ }
+length_short:
+ {
+ GST_ELEMENT_WARNING (rtptheoradepay, STREAM, DECODE,
+ (NULL), ("Packet contains invalid data"));
+ goto request_keyframe;
+ }
+invalid_configuration:
+ {
+ /* fatal, as we otherwise risk carrying on without output */
+ GST_ELEMENT_ERROR (rtptheoradepay, STREAM, DECODE,
+ (NULL), ("Packet contains invalid configuration"));
+ goto request_config;
+ }
+request_config:
+ {
+ gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depayload),
+ gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
+ gst_structure_new ("GstForceKeyUnit",
+ "all-headers", G_TYPE_BOOLEAN, TRUE, NULL)));
+ goto out;
+ }
+request_keyframe:
+ {
+ rtptheoradepay->needs_keyframe = TRUE;
+ gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depayload),
+ gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
+ gst_structure_new_empty ("GstForceKeyUnit")));
+ goto out;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_theora_depay_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstRtpTheoraDepay *rtptheoradepay;
+ GstStateChangeReturn ret;
+
+ rtptheoradepay = GST_RTP_THEORA_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ free_indents (rtptheoradepay);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
+
+static gboolean
+gst_rtp_theora_depay_packet_lost (GstRTPBaseDepayload * depayload,
+ GstEvent * event)
+{
+ GstRtpTheoraDepay *rtptheoradepay = GST_RTP_THEORA_DEPAY (depayload);
+ guint seqnum = 0;
+
+ gst_structure_get_uint (gst_event_get_structure (event), "seqnum", &seqnum);
+ GST_LOG_OBJECT (depayload, "Requested keyframe because frame with seqnum %u"
+ " is missing", seqnum);
+ rtptheoradepay->needs_keyframe = TRUE;
+
+ gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depayload),
+ gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
+ gst_structure_new_empty ("GstForceKeyUnit")));
+
+ return TRUE;
+}
diff --git a/gst/rtp/gstrtptheoradepay.h b/gst/rtp/gstrtptheoradepay.h
new file mode 100644
index 0000000000..492d33dc04
--- /dev/null
+++ b/gst/rtp/gstrtptheoradepay.h
@@ -0,0 +1,70 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_THEORA_DEPAY_H__
+#define __GST_RTP_THEORA_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_THEORA_DEPAY \
+ (gst_rtp_theora_depay_get_type())
+#define GST_RTP_THEORA_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_THEORA_DEPAY,GstRtpTheoraDepay))
+#define GST_RTP_THEORA_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_THEORA_DEPAY,GstRtpTheoraDepayClass))
+#define GST_IS_RTP_THEORA_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_THEORA_DEPAY))
+#define GST_IS_RTP_THEORA_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_THEORA_DEPAY))
+
+typedef struct _GstRtpTheoraDepay GstRtpTheoraDepay;
+typedef struct _GstRtpTheoraDepayClass GstRtpTheoraDepayClass;
+
+typedef struct _GstRtpTheoraConfig {
+ guint32 ident;
+ GList *headers;
+} GstRtpTheoraConfig;
+
+struct _GstRtpTheoraDepay
+{
+ GstRTPBaseDepayload parent;
+
+ GList *configs;
+ GstRtpTheoraConfig *config;
+
+ GstAdapter *adapter;
+ gboolean assembling;
+
+ gboolean needs_keyframe;
+};
+
+struct _GstRtpTheoraDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_theora_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_THEORA_DEPAY_H__ */
diff --git a/gst/rtp/gstrtptheorapay.c b/gst/rtp/gstrtptheorapay.c
new file mode 100644
index 0000000000..57807570e1
--- /dev/null
+++ b/gst/rtp/gstrtptheorapay.c
@@ -0,0 +1,981 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+
+#include "gstrtpelements.h"
+#include "fnv1hash.h"
+#include "gstrtptheorapay.h"
+#include "gstrtputils.h"
+
+#define THEORA_ID_LEN 42
+
+GST_DEBUG_CATEGORY_STATIC (rtptheorapay_debug);
+#define GST_CAT_DEFAULT (rtptheorapay_debug)
+
+/* references:
+ * http://svn.xiph.org/trunk/theora/doc/draft-ietf-avt-rtp-theora-01.txt
+ */
+
+static GstStaticPadTemplate gst_rtp_theora_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 90000, " "encoding-name = (string) \"THEORA\""
+ /* All required parameters
+ *
+ * "sampling = (string) { "YCbCr-4:2:0", "YCbCr-4:2:2", "YCbCr-4:4:4" } "
+ * "width = (string) [1, 1048561] (multiples of 16) "
+ * "height = (string) [1, 1048561] (multiples of 16) "
+ * "configuration = (string) ANY"
+ */
+ /* All optional parameters
+ *
+ * "configuration-uri ="
+ * "delivery-method = (string) { inline, in_band, out_band/<specific_name> } "
+ */
+ )
+ );
+
+static GstStaticPadTemplate gst_rtp_theora_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-theora")
+ );
+
+#define DEFAULT_CONFIG_INTERVAL 0
+
+enum
+{
+ PROP_0,
+ PROP_CONFIG_INTERVAL
+};
+
+#define gst_rtp_theora_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpTheoraPay, gst_rtp_theora_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtptheorapay, "rtptheorapay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_THEORA_PAY, rtp_element_init (plugin));
+
+static gboolean gst_rtp_theora_pay_setcaps (GstRTPBasePayload * basepayload,
+ GstCaps * caps);
+static GstStateChangeReturn gst_rtp_theora_pay_change_state (GstElement *
+ element, GstStateChange transition);
+static GstFlowReturn gst_rtp_theora_pay_handle_buffer (GstRTPBasePayload * pad,
+ GstBuffer * buffer);
+static gboolean gst_rtp_theora_pay_sink_event (GstRTPBasePayload * payload,
+ GstEvent * event);
+
+static gboolean gst_rtp_theora_pay_parse_id (GstRTPBasePayload * basepayload,
+ guint8 * data, guint size);
+static gboolean gst_rtp_theora_pay_finish_headers (GstRTPBasePayload *
+ basepayload);
+
+static void gst_rtp_theora_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtp_theora_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static void
+gst_rtp_theora_pay_class_init (GstRtpTheoraPayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gstelement_class->change_state = gst_rtp_theora_pay_change_state;
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_theora_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_theora_pay_handle_buffer;
+ gstrtpbasepayload_class->sink_event = gst_rtp_theora_pay_sink_event;
+
+ gobject_class->set_property = gst_rtp_theora_pay_set_property;
+ gobject_class->get_property = gst_rtp_theora_pay_get_property;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_theora_pay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_theora_pay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP Theora payloader", "Codec/Payloader/Network/RTP",
+ "Payload-encode Theora video into RTP packets (draft-01 RFC XXXX)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ GST_DEBUG_CATEGORY_INIT (rtptheorapay_debug, "rtptheorapay", 0,
+ "Theora RTP Payloader");
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_CONFIG_INTERVAL,
+ g_param_spec_uint ("config-interval", "Config Send Interval",
+ "Send Config Insertion Interval in seconds (configuration headers "
+ "will be multiplexed in the data stream when detected.) (0 = disabled)",
+ 0, 3600, DEFAULT_CONFIG_INTERVAL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+}
+
+static void
+gst_rtp_theora_pay_init (GstRtpTheoraPay * rtptheorapay)
+{
+ rtptheorapay->last_config = GST_CLOCK_TIME_NONE;
+}
+
+static void
+gst_rtp_theora_pay_clear_packet (GstRtpTheoraPay * rtptheorapay)
+{
+ if (rtptheorapay->packet)
+ gst_buffer_unref (rtptheorapay->packet);
+ rtptheorapay->packet = NULL;
+ g_list_free_full (rtptheorapay->packet_buffers,
+ (GDestroyNotify) gst_buffer_unref);
+ rtptheorapay->packet_buffers = NULL;
+}
+
+static void
+gst_rtp_theora_pay_cleanup (GstRtpTheoraPay * rtptheorapay)
+{
+ gst_rtp_theora_pay_clear_packet (rtptheorapay);
+ g_list_free_full (rtptheorapay->headers, (GDestroyNotify) gst_buffer_unref);
+ rtptheorapay->headers = NULL;
+ g_free (rtptheorapay->config_data);
+ rtptheorapay->config_data = NULL;
+ rtptheorapay->last_config = GST_CLOCK_TIME_NONE;
+}
+
+static gboolean
+gst_rtp_theora_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
+{
+ GstRtpTheoraPay *rtptheorapay;
+ GstStructure *s;
+ const GValue *array;
+ gint asize, i;
+ GstBuffer *buf;
+ GstMapInfo map;
+
+ rtptheorapay = GST_RTP_THEORA_PAY (basepayload);
+
+ s = gst_caps_get_structure (caps, 0);
+
+ rtptheorapay->need_headers = TRUE;
+
+ if ((array = gst_structure_get_value (s, "streamheader")) == NULL)
+ goto done;
+
+ if (G_VALUE_TYPE (array) != GST_TYPE_ARRAY)
+ goto done;
+
+ if ((asize = gst_value_array_get_size (array)) < 3)
+ goto done;
+
+ for (i = 0; i < asize; i++) {
+ const GValue *value;
+
+ value = gst_value_array_get_value (array, i);
+ if ((buf = gst_value_get_buffer (value)) == NULL)
+ goto null_buffer;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ /* no data packets allowed */
+ if (map.size < 1)
+ goto invalid_streamheader;
+
+ /* we need packets with id 0x80, 0x81, 0x82 */
+ if (map.data[0] != 0x80 + i)
+ goto invalid_streamheader;
+
+ if (i == 0) {
+ /* identification, we need to parse this in order to get the clock rate. */
+ if (G_UNLIKELY (!gst_rtp_theora_pay_parse_id (basepayload, map.data,
+ map.size)))
+ goto parse_id_failed;
+ }
+ GST_DEBUG_OBJECT (rtptheorapay, "collecting header %d", i);
+ rtptheorapay->headers =
+ g_list_append (rtptheorapay->headers, gst_buffer_ref (buf));
+ gst_buffer_unmap (buf, &map);
+ }
+ if (!gst_rtp_theora_pay_finish_headers (basepayload))
+ goto finish_failed;
+
+done:
+ return TRUE;
+
+ /* ERRORS */
+null_buffer:
+ {
+ GST_WARNING_OBJECT (rtptheorapay, "streamheader with null buffer received");
+ return FALSE;
+ }
+invalid_streamheader:
+ {
+ GST_WARNING_OBJECT (rtptheorapay, "unable to parse initial header");
+ gst_buffer_unmap (buf, &map);
+ return FALSE;
+ }
+parse_id_failed:
+ {
+ GST_WARNING_OBJECT (rtptheorapay, "unable to parse initial header");
+ gst_buffer_unmap (buf, &map);
+ return FALSE;
+ }
+finish_failed:
+ {
+ GST_WARNING_OBJECT (rtptheorapay, "unable to finish headers");
+ return FALSE;
+ }
+}
+
+static void
+gst_rtp_theora_pay_reset_packet (GstRtpTheoraPay * rtptheorapay, guint8 TDT)
+{
+ guint payload_len;
+ GstRTPBuffer rtp = { NULL };
+
+ GST_DEBUG_OBJECT (rtptheorapay, "reset packet");
+
+ rtptheorapay->payload_pos = 4;
+ gst_rtp_buffer_map (rtptheorapay->packet, GST_MAP_READ, &rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
+ rtptheorapay->payload_left = payload_len - 4;
+ rtptheorapay->payload_duration = 0;
+ rtptheorapay->payload_F = 0;
+ rtptheorapay->payload_TDT = TDT;
+ rtptheorapay->payload_pkts = 0;
+}
+
+static void
+gst_rtp_theora_pay_init_packet (GstRtpTheoraPay * rtptheorapay, guint8 TDT,
+ GstClockTime timestamp)
+{
+ GST_DEBUG_OBJECT (rtptheorapay, "starting new packet, TDT: %d", TDT);
+
+ gst_rtp_theora_pay_clear_packet (rtptheorapay);
+
+ /* new packet allocate max packet size */
+ rtptheorapay->packet =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD
+ (rtptheorapay), GST_RTP_BASE_PAYLOAD_MTU (rtptheorapay), 0, 0);
+ gst_rtp_theora_pay_reset_packet (rtptheorapay, TDT);
+
+ GST_BUFFER_PTS (rtptheorapay->packet) = timestamp;
+}
+
+static GstFlowReturn
+gst_rtp_theora_pay_flush_packet (GstRtpTheoraPay * rtptheorapay)
+{
+ GstFlowReturn ret;
+ guint8 *payload;
+ guint hlen;
+ GstRTPBuffer rtp = { NULL };
+ GList *l;
+
+ /* check for empty packet */
+ if (!rtptheorapay->packet || rtptheorapay->payload_pos <= 4)
+ return GST_FLOW_OK;
+
+ GST_DEBUG_OBJECT (rtptheorapay, "flushing packet");
+
+ gst_rtp_buffer_map (rtptheorapay->packet, GST_MAP_WRITE, &rtp);
+
+ /* fix header */
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ /*
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Ident | F |TDT|# pkts.|
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ *
+ * F: Fragment type (0=none, 1=start, 2=cont, 3=end)
+ * TDT: Theora data type (0=theora, 1=config, 2=comment, 3=reserved)
+ * pkts: number of packets.
+ */
+ payload[0] = (rtptheorapay->payload_ident >> 16) & 0xff;
+ payload[1] = (rtptheorapay->payload_ident >> 8) & 0xff;
+ payload[2] = (rtptheorapay->payload_ident) & 0xff;
+ payload[3] = (rtptheorapay->payload_F & 0x3) << 6 |
+ (rtptheorapay->payload_TDT & 0x3) << 4 |
+ (rtptheorapay->payload_pkts & 0xf);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ /* shrink the buffer size to the last written byte */
+ hlen = gst_rtp_buffer_calc_header_len (0);
+ gst_buffer_resize (rtptheorapay->packet, 0, hlen + rtptheorapay->payload_pos);
+
+ GST_BUFFER_DURATION (rtptheorapay->packet) = rtptheorapay->payload_duration;
+
+ for (l = g_list_last (rtptheorapay->packet_buffers); l; l = l->prev) {
+ GstBuffer *buf = GST_BUFFER_CAST (l->data);
+ gst_rtp_copy_video_meta (rtptheorapay, rtptheorapay->packet, buf);
+ gst_buffer_unref (buf);
+ }
+ g_list_free (rtptheorapay->packet_buffers);
+ rtptheorapay->packet_buffers = NULL;
+
+ /* push, this gives away our ref to the packet, so clear it. */
+ ret =
+ gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtptheorapay),
+ rtptheorapay->packet);
+ rtptheorapay->packet = NULL;
+
+ return ret;
+}
+
+static gboolean
+gst_rtp_theora_pay_finish_headers (GstRTPBasePayload * basepayload)
+{
+ GstRtpTheoraPay *rtptheorapay = GST_RTP_THEORA_PAY (basepayload);
+ GList *walk;
+ guint length, size, n_headers, configlen, extralen;
+ gchar *wstr, *hstr, *configuration;
+ guint8 *data, *config;
+ guint32 ident;
+ gboolean res;
+ const gchar *sampling = NULL;
+
+ GST_DEBUG_OBJECT (rtptheorapay, "finish headers");
+
+ if (!rtptheorapay->headers) {
+ GST_DEBUG_OBJECT (rtptheorapay, "We need 2 headers but have none");
+ goto no_headers;
+ }
+
+ /* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Number of packed headers |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Packed header |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Packed header |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | .... |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ *
+ * We only construct a config containing 1 packed header like this:
+ *
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Ident | length ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. | n. of headers | length1 | length2 ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. | Identification Header ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .................................................................
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. | Comment Header ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .................................................................
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. Comment Header |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Setup Header ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .................................................................
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. Setup Header |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+
+ /* we need 4 bytes for the number of headers (which is always 1), 3 bytes for
+ * the ident, 2 bytes for length, 1 byte for n. of headers. */
+ size = 4 + 3 + 2 + 1;
+
+ /* count the size of the headers first and update the hash */
+ length = 0;
+ n_headers = 0;
+ ident = fnv1_hash_32_new ();
+ extralen = 1;
+ for (walk = rtptheorapay->headers; walk; walk = g_list_next (walk)) {
+ GstBuffer *buf = GST_BUFFER_CAST (walk->data);
+ GstMapInfo map;
+ guint bsize;
+
+ bsize = gst_buffer_get_size (buf);
+ length += bsize;
+ n_headers++;
+
+ /* count number of bytes needed for length fields, we don't need this for
+ * the last header. */
+ if (g_list_next (walk)) {
+ do {
+ size++;
+ extralen++;
+ bsize >>= 7;
+ } while (bsize);
+ }
+ /* update hash */
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ ident = fnv1_hash_32_update (ident, map.data, map.size);
+ gst_buffer_unmap (buf, &map);
+ }
+
+ /* packet length is header size + packet length */
+ configlen = size + length;
+ config = data = g_malloc (configlen);
+
+ /* number of packed headers, we only pack 1 header */
+ data[0] = 0;
+ data[1] = 0;
+ data[2] = 0;
+ data[3] = 1;
+
+ ident = fnv1_hash_32_to_24 (ident);
+ rtptheorapay->payload_ident = ident;
+ GST_DEBUG_OBJECT (rtptheorapay, "ident 0x%08x", ident);
+
+ /* take lower 3 bytes */
+ data[4] = (ident >> 16) & 0xff;
+ data[5] = (ident >> 8) & 0xff;
+ data[6] = ident & 0xff;
+
+ /* store length of all theora headers */
+ data[7] = ((length) >> 8) & 0xff;
+ data[8] = (length) & 0xff;
+
+ /* store number of headers minus one. */
+ data[9] = n_headers - 1;
+ data += 10;
+
+ /* store length for each header */
+ for (walk = rtptheorapay->headers; walk; walk = g_list_next (walk)) {
+ GstBuffer *buf = GST_BUFFER_CAST (walk->data);
+ guint bsize, size, temp;
+ guint flag;
+
+ /* only need to store the length when it's not the last header */
+ if (!g_list_next (walk))
+ break;
+
+ bsize = gst_buffer_get_size (buf);
+
+ /* calc size */
+ size = 0;
+ do {
+ size++;
+ bsize >>= 7;
+ } while (bsize);
+ temp = size;
+
+ bsize = gst_buffer_get_size (buf);
+ /* write the size backwards */
+ flag = 0;
+ while (size) {
+ size--;
+ data[size] = (bsize & 0x7f) | flag;
+ bsize >>= 7;
+ flag = 0x80; /* Flag bit on all bytes of the length except the last */
+ }
+ data += temp;
+ }
+
+ /* copy header data */
+ for (walk = rtptheorapay->headers; walk; walk = g_list_next (walk)) {
+ GstBuffer *buf = GST_BUFFER_CAST (walk->data);
+
+ gst_buffer_extract (buf, 0, data, gst_buffer_get_size (buf));
+ data += gst_buffer_get_size (buf);
+ }
+ rtptheorapay->need_headers = FALSE;
+
+ /* serialize to base64 */
+ configuration = g_base64_encode (config, configlen);
+
+ /* store for later re-sending */
+ g_free (rtptheorapay->config_data);
+ rtptheorapay->config_size = configlen - 4 - 3 - 2;
+ rtptheorapay->config_data = g_malloc (rtptheorapay->config_size);
+ rtptheorapay->config_extra_len = extralen;
+ memcpy (rtptheorapay->config_data, config + 4 + 3 + 2,
+ rtptheorapay->config_size);
+
+ g_free (config);
+
+ /* configure payloader settings */
+ switch (rtptheorapay->pixel_format) {
+ case 2:
+ sampling = "YCbCr-4:2:2";
+ break;
+ case 3:
+ sampling = "YCbCr-4:4:4";
+ break;
+ case 0:
+ default:
+ sampling = "YCbCr-4:2:0";
+ break;
+ }
+
+
+ wstr = g_strdup_printf ("%d", rtptheorapay->width);
+ hstr = g_strdup_printf ("%d", rtptheorapay->height);
+ gst_rtp_base_payload_set_options (basepayload, "video", TRUE, "THEORA",
+ 90000);
+ res =
+ gst_rtp_base_payload_set_outcaps (basepayload, "sampling", G_TYPE_STRING,
+ sampling, "width", G_TYPE_STRING, wstr, "height", G_TYPE_STRING,
+ hstr, "configuration", G_TYPE_STRING, configuration, "delivery-method",
+ G_TYPE_STRING, "inline",
+ /* don't set the other defaults
+ */
+ NULL);
+ g_free (wstr);
+ g_free (hstr);
+ g_free (configuration);
+
+ return res;
+
+ /* ERRORS */
+no_headers:
+ {
+ GST_DEBUG_OBJECT (rtptheorapay, "finish headers");
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_rtp_theora_pay_parse_id (GstRTPBasePayload * basepayload, guint8 * data,
+ guint size)
+{
+ GstRtpTheoraPay *rtptheorapay;
+ gint width, height, pixel_format;
+
+ rtptheorapay = GST_RTP_THEORA_PAY (basepayload);
+
+ if (G_UNLIKELY (size < 42))
+ goto too_short;
+
+ if (G_UNLIKELY (memcmp (data, "\200theora", 7)))
+ goto invalid_start;
+ data += 7;
+
+ if (G_UNLIKELY (data[0] != 3))
+ goto invalid_version;
+ if (G_UNLIKELY (data[1] != 2))
+ goto invalid_version;
+ data += 3;
+
+ width = GST_READ_UINT16_BE (data) << 4;
+ data += 2;
+ height = GST_READ_UINT16_BE (data) << 4;
+ data += 29;
+
+ pixel_format = (GST_READ_UINT8 (data) >> 3) & 0x03;
+
+ /* store values */
+ rtptheorapay->pixel_format = pixel_format;
+ rtptheorapay->width = width;
+ rtptheorapay->height = height;
+
+ return TRUE;
+
+ /* ERRORS */
+too_short:
+ {
+ GST_ELEMENT_ERROR (basepayload, STREAM, DECODE,
+ (NULL),
+ ("Identification packet is too short, need at least 42, got %d", size));
+ return FALSE;
+ }
+invalid_start:
+ {
+ GST_ELEMENT_ERROR (basepayload, STREAM, DECODE,
+ (NULL), ("Invalid header start in identification packet"));
+ return FALSE;
+ }
+invalid_version:
+ {
+ GST_ELEMENT_ERROR (basepayload, STREAM, DECODE,
+ (NULL), ("Invalid version"));
+ return FALSE;
+ }
+}
+
+static GstFlowReturn
+gst_rtp_theora_pay_payload_buffer (GstRtpTheoraPay * rtptheorapay, guint8 TDT,
+ GstBuffer * buffer, guint8 * data, guint size, GstClockTime timestamp,
+ GstClockTime duration, guint not_in_length)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint newsize;
+ guint packet_len;
+ GstClockTime newduration;
+ gboolean flush;
+ guint plen;
+ guint8 *ppos, *payload;
+ gboolean fragmented;
+ GstRTPBuffer rtp = { NULL };
+
+ /* size increases with packet length and 2 bytes size eader. */
+ newduration = rtptheorapay->payload_duration;
+ if (duration != GST_CLOCK_TIME_NONE)
+ newduration += duration;
+
+ newsize = rtptheorapay->payload_pos + 2 + size;
+ packet_len = gst_rtp_buffer_calc_packet_len (newsize, 0, 0);
+
+ /* check buffer filled against length and max latency */
+ flush = gst_rtp_base_payload_is_filled (GST_RTP_BASE_PAYLOAD (rtptheorapay),
+ packet_len, newduration);
+ /* we can store up to 15 theora packets in one RTP packet. */
+ flush |= (rtptheorapay->payload_pkts == 15);
+ /* flush if we have a new TDT */
+ if (rtptheorapay->packet)
+ flush |= (rtptheorapay->payload_TDT != TDT);
+ if (flush)
+ ret = gst_rtp_theora_pay_flush_packet (rtptheorapay);
+
+ if (ret != GST_FLOW_OK)
+ goto done;
+
+ /* create new packet if we must */
+ if (!rtptheorapay->packet) {
+ gst_rtp_theora_pay_init_packet (rtptheorapay, TDT, timestamp);
+ }
+
+ gst_rtp_buffer_map (rtptheorapay->packet, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ ppos = payload + rtptheorapay->payload_pos;
+ fragmented = FALSE;
+
+ /* put buffer in packet, it either fits completely or needs to be fragmented
+ * over multiple RTP packets. */
+ do {
+ plen = MIN (rtptheorapay->payload_left - 2, size);
+
+ GST_DEBUG_OBJECT (rtptheorapay, "append %u bytes", plen);
+
+ /* data is copied in the payload with a 2 byte length header */
+ ppos[0] = ((plen - not_in_length) >> 8) & 0xff;
+ ppos[1] = ((plen - not_in_length) & 0xff);
+ if (plen)
+ memcpy (&ppos[2], data, plen);
+
+ if (buffer) {
+ if (!rtptheorapay->packet_buffers
+ || rtptheorapay->packet_buffers->data != (gpointer) buffer)
+ rtptheorapay->packet_buffers =
+ g_list_prepend (rtptheorapay->packet_buffers,
+ gst_buffer_ref (buffer));
+ } else {
+ GList *l;
+
+ for (l = rtptheorapay->headers; l; l = l->next)
+ rtptheorapay->packet_buffers =
+ g_list_prepend (rtptheorapay->packet_buffers,
+ gst_buffer_ref (l->data));
+ }
+
+ /* only first (only) configuration cuts length field */
+ /* NOTE: spec (if any) is not clear on this ... */
+ not_in_length = 0;
+
+ size -= plen;
+ data += plen;
+
+ rtptheorapay->payload_pos += plen + 2;
+ rtptheorapay->payload_left -= plen + 2;
+
+ if (fragmented) {
+ if (size == 0)
+ /* last fragment, set F to 0x3. */
+ rtptheorapay->payload_F = 0x3;
+ else
+ /* fragment continues, set F to 0x2. */
+ rtptheorapay->payload_F = 0x2;
+ } else {
+ if (size > 0) {
+ /* fragmented packet starts, set F to 0x1, mark ourselves as
+ * fragmented. */
+ rtptheorapay->payload_F = 0x1;
+ fragmented = TRUE;
+ }
+ }
+ if (fragmented) {
+ gst_rtp_buffer_unmap (&rtp);
+ /* fragmented packets are always flushed and have ptks of 0 */
+ rtptheorapay->payload_pkts = 0;
+ ret = gst_rtp_theora_pay_flush_packet (rtptheorapay);
+
+ if (size > 0) {
+ /* start new packet and get pointers. TDT stays the same. */
+ gst_rtp_theora_pay_init_packet (rtptheorapay,
+ rtptheorapay->payload_TDT, timestamp);
+ gst_rtp_buffer_map (rtptheorapay->packet, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ ppos = payload + rtptheorapay->payload_pos;
+ }
+ } else {
+ /* unfragmented packet, update stats for next packet, size == 0 and we
+ * exit the while loop */
+ rtptheorapay->payload_pkts++;
+ if (duration != GST_CLOCK_TIME_NONE)
+ rtptheorapay->payload_duration += duration;
+ }
+ } while (size && ret == GST_FLOW_OK);
+
+ if (rtp.buffer)
+ gst_rtp_buffer_unmap (&rtp);
+done:
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_rtp_theora_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRtpTheoraPay *rtptheorapay;
+ GstFlowReturn ret;
+ GstMapInfo map;
+ gsize size;
+ guint8 *data;
+ GstClockTime duration, timestamp;
+ guint8 TDT;
+ gboolean keyframe = FALSE;
+
+ rtptheorapay = GST_RTP_THEORA_PAY (basepayload);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+ duration = GST_BUFFER_DURATION (buffer);
+ timestamp = GST_BUFFER_PTS (buffer);
+
+ GST_DEBUG_OBJECT (rtptheorapay, "size %" G_GSIZE_FORMAT
+ ", duration %" GST_TIME_FORMAT, size, GST_TIME_ARGS (duration));
+
+ /* find packet type */
+ if (size == 0) {
+ TDT = 0;
+ keyframe = FALSE;
+ } else if (data[0] & 0x80) {
+ /* header */
+ if (data[0] == 0x80) {
+ /* identification, we need to parse this in order to get the clock rate.
+ */
+ if (G_UNLIKELY (!gst_rtp_theora_pay_parse_id (basepayload, data, size)))
+ goto parse_id_failed;
+ TDT = 1;
+ } else if (data[0] == 0x81) {
+ /* comment */
+ TDT = 2;
+ } else if (data[0] == 0x82) {
+ /* setup */
+ TDT = 1;
+ } else
+ goto unknown_header;
+ } else {
+ /* data */
+ TDT = 0;
+ keyframe = ((data[0] & 0x40) == 0);
+ }
+
+ /* we need to collect the headers and construct a config string from them */
+ if (TDT != 0) {
+ GST_DEBUG_OBJECT (rtptheorapay, "collecting header, buffer %p", buffer);
+ /* append header to the list of headers */
+ gst_buffer_unmap (buffer, &map);
+ rtptheorapay->headers = g_list_append (rtptheorapay->headers, buffer);
+ ret = GST_FLOW_OK;
+ goto done;
+ } else if (rtptheorapay->headers && rtptheorapay->need_headers) {
+ if (!gst_rtp_theora_pay_finish_headers (basepayload))
+ goto header_error;
+ }
+
+ /* there is a config request, see if we need to insert it */
+ if (keyframe && (rtptheorapay->config_interval > 0) &&
+ rtptheorapay->config_data) {
+ gboolean send_config = FALSE;
+ GstClockTime running_time =
+ gst_segment_to_running_time (&basepayload->segment, GST_FORMAT_TIME,
+ timestamp);
+
+ if (rtptheorapay->last_config != -1) {
+ guint64 diff;
+
+ GST_LOG_OBJECT (rtptheorapay,
+ "now %" GST_TIME_FORMAT ", last VOP-I %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (running_time),
+ GST_TIME_ARGS (rtptheorapay->last_config));
+
+ /* calculate diff between last config in milliseconds */
+ if (running_time > rtptheorapay->last_config) {
+ diff = running_time - rtptheorapay->last_config;
+ } else {
+ diff = 0;
+ }
+
+ GST_DEBUG_OBJECT (rtptheorapay,
+ "interval since last config %" GST_TIME_FORMAT, GST_TIME_ARGS (diff));
+
+ /* bigger than interval, queue config */
+ if (GST_TIME_AS_SECONDS (diff) >= rtptheorapay->config_interval) {
+ GST_DEBUG_OBJECT (rtptheorapay, "time to send config");
+ send_config = TRUE;
+ }
+ } else {
+ /* no known previous config time, send now */
+ GST_DEBUG_OBJECT (rtptheorapay, "no previous config time, send now");
+ send_config = TRUE;
+ }
+
+ if (send_config) {
+ /* we need to send config now first */
+ /* different TDT type forces flush */
+ gst_rtp_theora_pay_payload_buffer (rtptheorapay, 1,
+ NULL, rtptheorapay->config_data, rtptheorapay->config_size,
+ timestamp, GST_CLOCK_TIME_NONE, rtptheorapay->config_extra_len);
+
+ if (running_time != -1) {
+ rtptheorapay->last_config = running_time;
+ }
+ }
+ }
+
+ ret =
+ gst_rtp_theora_pay_payload_buffer (rtptheorapay, TDT, buffer, data, size,
+ timestamp, duration, 0);
+
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+
+done:
+ return ret;
+
+ /* ERRORS */
+parse_id_failed:
+ {
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ return GST_FLOW_ERROR;
+ }
+unknown_header:
+ {
+ GST_ELEMENT_WARNING (rtptheorapay, STREAM, DECODE,
+ (NULL), ("Ignoring unknown header received"));
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ }
+header_error:
+ {
+ GST_ELEMENT_WARNING (rtptheorapay, STREAM, DECODE,
+ (NULL), ("Error initializing header config"));
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ }
+}
+
+static gboolean
+gst_rtp_theora_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
+{
+ GstRtpTheoraPay *rtptheorapay = GST_RTP_THEORA_PAY (payload);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ gst_rtp_theora_pay_clear_packet (rtptheorapay);
+ break;
+ default:
+ break;
+ }
+ /* false to let parent handle event as well */
+ return GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload, event);
+}
+
+static GstStateChangeReturn
+gst_rtp_theora_pay_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstRtpTheoraPay *rtptheorapay;
+
+ GstStateChangeReturn ret;
+
+ rtptheorapay = GST_RTP_THEORA_PAY (element);
+
+ switch (transition) {
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_theora_pay_cleanup (rtptheorapay);
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
+
+static void
+gst_rtp_theora_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpTheoraPay *rtptheorapay;
+
+ rtptheorapay = GST_RTP_THEORA_PAY (object);
+
+ switch (prop_id) {
+ case PROP_CONFIG_INTERVAL:
+ rtptheorapay->config_interval = g_value_get_uint (value);
+ break;
+ default:
+ break;
+ }
+}
+
+static void
+gst_rtp_theora_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpTheoraPay *rtptheorapay;
+
+ rtptheorapay = GST_RTP_THEORA_PAY (object);
+
+ switch (prop_id) {
+ case PROP_CONFIG_INTERVAL:
+ g_value_set_uint (value, rtptheorapay->config_interval);
+ break;
+ default:
+ break;
+ }
+}
diff --git a/gst/rtp/gstrtptheorapay.h b/gst/rtp/gstrtptheorapay.h
new file mode 100644
index 0000000000..d009364fe9
--- /dev/null
+++ b/gst/rtp/gstrtptheorapay.h
@@ -0,0 +1,84 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_THEORA_PAY_H__
+#define __GST_RTP_THEORA_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+#include <gst/base/gstadapter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_THEORA_PAY \
+ (gst_rtp_theora_pay_get_type())
+#define GST_RTP_THEORA_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_THEORA_PAY,GstRtpTheoraPay))
+#define GST_RTP_THEORA_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_THEORA_PAY,GstRtpTheoraPayClass))
+#define GST_IS_RTP_THEORA_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_THEORA_PAY))
+#define GST_IS_RTP_THEORA_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_THEORA_PAY))
+
+typedef struct _GstRtpTheoraPay GstRtpTheoraPay;
+typedef struct _GstRtpTheoraPayClass GstRtpTheoraPayClass;
+
+struct _GstRtpTheoraPay
+{
+ GstRTPBasePayload payload;
+
+ /* the headers */
+ gboolean need_headers;
+ GList *headers;
+
+ /* queues of buffers along with some stats. */
+ GstBuffer *packet;
+ GList *packet_buffers;
+ guint payload_pos;
+ guint payload_left;
+ guint32 payload_ident;
+ guint8 payload_F;
+ guint8 payload_TDT;
+ guint payload_pkts;
+ GstClockTime payload_timestamp;
+ GstClockTime payload_duration;
+
+ /* config (re-sending) */
+ guint8 *config_data;
+ guint config_size;
+ guint config_extra_len;
+ guint config_interval;
+ GstClockTime last_config;
+
+ gint pixel_format;
+ gint width;
+ gint height;
+};
+
+struct _GstRtpTheoraPayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_theora_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_THEORA_PAY_H__ */
diff --git a/gst/rtp/gstrtpulpfecdec.c b/gst/rtp/gstrtpulpfecdec.c
new file mode 100644
index 0000000000..cfc22294f3
--- /dev/null
+++ b/gst/rtp/gstrtpulpfecdec.c
@@ -0,0 +1,721 @@
+/* GStreamer plugin for forward error correction
+ * Copyright (C) 2017 Pexip
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * Author: Mikhail Fludkov <misha@pexip.com>
+ */
+
+/**
+ * SECTION:element-rtpulpfecdec
+ * @short_description: Generic RTP Forward Error Correction (FEC) decoder
+ * @title: rtpulpfecdec
+ *
+ * Generic Forward Error Correction (FEC) decoder for Uneven Level
+ * Protection (ULP) as described in RFC 5109.
+ *
+ * It differs from the RFC in one important way, it multiplexes the
+ * FEC packets in the same sequence number as media packets. This is to be
+ * compatible with libwebrtc as using in Google Chrome and with Microsoft
+ * Lync / Skype for Business.
+ *
+ * This element will work in combination with an upstream #GstRtpStorage
+ * element and attempt to recover packets declared lost through custom
+ * 'GstRTPPacketLost' events, usually emitted by #GstRtpJitterBuffer.
+ *
+ * If no storage is provided using the #GstRtpUlpFecDec:storage
+ * property, it will try to get it from an element upstream.
+ *
+ * Additionally, the payload types of the protection packets *must* be
+ * provided to this element via its #GstRtpUlpFecDec:pt property.
+ *
+ * When using #GstRtpBin, this element should be inserted through the
+ * #GstRtpBin::request-fec-decoder signal.
+ *
+ * ## Example pipeline
+ *
+ * |[
+ * gst-launch-1.0 udpsrc port=8888 caps="application/x-rtp, payload=96, clock-rate=90000" ! rtpstorage size-time=220000000 ! rtpssrcdemux ! application/x-rtp, payload=96, clock-rate=90000, media=video, encoding-name=H264 ! rtpjitterbuffer do-lost=1 latency=200 ! rtpulpfecdec pt=122 ! rtph264depay ! avdec_h264 ! videoconvert ! autovideosink
+ * ]| This example will receive a stream with FEC and try to reconstruct the packets.
+ *
+ * Example programs are available at
+ * <https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/blob/master/examples/src/bin/rtpfecserver.rs>
+ * and
+ * <https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/blob/master/examples/src/bin/rtpfecclient.rs>
+ *
+ * See also: #GstRtpUlpFecEnc, #GstRtpBin, #GstRtpStorage
+ * Since: 1.14
+ */
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/rtp/gstrtp-enumtypes.h>
+
+#include "gstrtpelements.h"
+#include "rtpulpfeccommon.h"
+#include "gstrtpulpfecdec.h"
+
+static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+enum
+{
+ PROP_0,
+ PROP_PT,
+ PROP_STORAGE,
+ PROP_RECOVERED,
+ PROP_UNRECOVERED,
+ N_PROPERTIES
+};
+
+#define DEFAULT_FEC_PT 0
+
+static GParamSpec *klass_properties[N_PROPERTIES] = { NULL, };
+
+GST_DEBUG_CATEGORY (gst_rtp_ulpfec_dec_debug);
+#define GST_CAT_DEFAULT (gst_rtp_ulpfec_dec_debug)
+
+G_DEFINE_TYPE (GstRtpUlpFecDec, gst_rtp_ulpfec_dec, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpulpfecdec, "rtpulpfecdec",
+ GST_RANK_NONE, GST_TYPE_RTP_ULPFEC_DEC, rtp_element_init (plugin));
+
+#define RTP_FEC_MAP_INFO_NTH(dec, data) (&g_array_index (\
+ ((GstRtpUlpFecDec *)dec)->info_arr, \
+ RtpUlpFecMapInfo, \
+ GPOINTER_TO_UINT(data)))
+
+static gint
+_compare_fec_map_info (gconstpointer a, gconstpointer b, gpointer userdata)
+{
+ guint16 aseq =
+ gst_rtp_buffer_get_seq (&RTP_FEC_MAP_INFO_NTH (userdata, a)->rtp);
+ guint16 bseq =
+ gst_rtp_buffer_get_seq (&RTP_FEC_MAP_INFO_NTH (userdata, b)->rtp);
+ return gst_rtp_buffer_compare_seqnum (bseq, aseq);
+}
+
+static void
+gst_rtp_ulpfec_dec_start (GstRtpUlpFecDec * self, GstBufferList * buflist,
+ guint8 fec_pt, guint16 lost_seq)
+{
+ guint fec_packets = 0;
+ gsize i;
+
+ g_assert (NULL == self->info_media);
+ g_assert (0 == self->info_fec->len);
+ g_assert (0 == self->info_arr->len);
+
+ g_array_set_size (self->info_arr, gst_buffer_list_length (buflist));
+
+ for (i = 0;
+ i < gst_buffer_list_length (buflist) && !self->lost_packet_from_storage;
+ ++i) {
+ GstBuffer *buffer = gst_buffer_list_get (buflist, i);
+ RtpUlpFecMapInfo *info = RTP_FEC_MAP_INFO_NTH (self, i);
+
+ if (!rtp_ulpfec_map_info_map (gst_buffer_ref (buffer), info))
+ g_assert_not_reached ();
+
+ if (fec_pt == gst_rtp_buffer_get_payload_type (&info->rtp)) {
+ GST_DEBUG_RTP_PACKET (self, "rtp header (fec)", &info->rtp);
+
+ ++fec_packets;
+ if (rtp_ulpfec_buffer_is_valid (&info->rtp)) {
+ GST_DEBUG_FEC_PACKET (self, &info->rtp);
+ g_ptr_array_add (self->info_fec, GUINT_TO_POINTER (i));
+ }
+ } else {
+ GST_LOG_RTP_PACKET (self, "rtp header (incoming)", &info->rtp);
+
+ if (lost_seq == gst_rtp_buffer_get_seq (&info->rtp)) {
+ GST_DEBUG_OBJECT (self, "Received lost packet from the storage");
+ g_list_free (self->info_media);
+ self->info_media = NULL;
+ self->lost_packet_from_storage = TRUE;
+ }
+ self->info_media =
+ g_list_insert_sorted_with_data (self->info_media,
+ GUINT_TO_POINTER (i), _compare_fec_map_info, self);
+ }
+ }
+ if (!self->lost_packet_from_storage) {
+ self->fec_packets_received += fec_packets;
+ self->fec_packets_rejected += fec_packets - self->info_fec->len;
+ }
+}
+
+static void
+gst_rtp_ulpfec_dec_stop (GstRtpUlpFecDec * self)
+{
+ g_array_set_size (self->info_arr, 0);
+ g_ptr_array_set_size (self->info_fec, 0);
+ g_list_free (self->info_media);
+ self->info_media = NULL;
+ self->lost_packet_from_storage = FALSE;
+ self->lost_packet_returned = FALSE;
+}
+
+static guint64
+gst_rtp_ulpfec_dec_get_media_buffers_mask (GstRtpUlpFecDec * self,
+ guint16 fec_seq_base)
+{
+ guint64 mask = 0;
+ GList *it;
+
+ for (it = self->info_media; it; it = it->next) {
+ RtpUlpFecMapInfo *info = RTP_FEC_MAP_INFO_NTH (self, it->data);
+ mask |=
+ rtp_ulpfec_packet_mask_from_seqnum (gst_rtp_buffer_get_seq (&info->rtp),
+ fec_seq_base, TRUE);
+ }
+ return mask;
+}
+
+static gboolean
+gst_rtp_ulpfec_dec_is_recovered_pt_valid (GstRtpUlpFecDec * self, gint media_pt,
+ guint8 recovered_pt)
+{
+ GList *it;
+ if (media_pt == recovered_pt)
+ return TRUE;
+
+ for (it = self->info_media; it; it = it->next) {
+ RtpUlpFecMapInfo *info = RTP_FEC_MAP_INFO_NTH (self, it->data);
+ if (gst_rtp_buffer_get_payload_type (&info->rtp) == recovered_pt)
+ return TRUE;
+ }
+ return FALSE;
+}
+
+static GstBuffer *
+gst_rtp_ulpfec_dec_recover_from_fec (GstRtpUlpFecDec * self,
+ RtpUlpFecMapInfo * info_fec, guint32 ssrc, gint media_pt, guint16 seq,
+ guint8 * dst_pt)
+{
+ guint64 fec_mask = rtp_ulpfec_buffer_get_mask (&info_fec->rtp);
+ gboolean fec_mask_long = rtp_ulpfec_buffer_get_fechdr (&info_fec->rtp)->L;
+ guint16 fec_seq_base = rtp_ulpfec_buffer_get_seq_base (&info_fec->rtp);
+ GstBuffer *ret;
+ GList *it;
+
+ g_array_set_size (self->scratch_buf, 0);
+ rtp_buffer_to_ulpfec_bitstring (&info_fec->rtp, self->scratch_buf, TRUE,
+ fec_mask_long);
+
+ for (it = self->info_media; it; it = it->next) {
+ RtpUlpFecMapInfo *info = RTP_FEC_MAP_INFO_NTH (self, it->data);
+ guint64 packet_mask =
+ rtp_ulpfec_packet_mask_from_seqnum (gst_rtp_buffer_get_seq (&info->rtp),
+ fec_seq_base, TRUE);
+
+ if (fec_mask & packet_mask) {
+ fec_mask ^= packet_mask;
+ rtp_buffer_to_ulpfec_bitstring (&info->rtp, self->scratch_buf, FALSE,
+ fec_mask_long);
+ }
+ }
+
+ ret =
+ rtp_ulpfec_bitstring_to_media_rtp_buffer (self->scratch_buf,
+ fec_mask_long, ssrc, seq);
+ if (ret) {
+ /* We are about to put recovered packet back in self->info_media to be able
+ * to reuse it later for recovery of other packets
+ **/
+ gint i = self->info_arr->len;
+ RtpUlpFecMapInfo *info;
+ guint8 recovered_pt;
+
+ g_array_set_size (self->info_arr, self->info_arr->len + 1);
+ info = RTP_FEC_MAP_INFO_NTH (self, i);
+
+ if (!rtp_ulpfec_map_info_map (gst_buffer_ref (ret), info)) {
+ GST_WARNING_OBJECT (self, "Invalid recovered packet");
+ goto recovered_packet_invalid;
+ }
+
+ recovered_pt = gst_rtp_buffer_get_payload_type (&info->rtp);
+ if (!gst_rtp_ulpfec_dec_is_recovered_pt_valid (self, media_pt,
+ recovered_pt)) {
+ GST_WARNING_OBJECT (self,
+ "Recovered packet has unexpected payload type (%u)", recovered_pt);
+ goto recovered_packet_invalid;
+ }
+
+ GST_DEBUG_RTP_PACKET (self, "rtp header (recovered)", &info->rtp);
+ self->info_media =
+ g_list_insert_sorted_with_data (self->info_media, GUINT_TO_POINTER (i),
+ _compare_fec_map_info, self);
+ *dst_pt = recovered_pt;
+ }
+ return ret;
+
+recovered_packet_invalid:
+ g_array_set_size (self->info_arr, self->info_arr->len - 1);
+ gst_buffer_unref (ret);
+ return NULL;
+}
+
+static GstBuffer *
+gst_rtp_ulpfec_dec_recover_from_storage (GstRtpUlpFecDec * self,
+ guint8 * dst_pt, guint16 * dst_seq)
+{
+ RtpUlpFecMapInfo *info;
+
+ if (self->lost_packet_returned)
+ return NULL;
+
+ g_assert (g_list_length (self->info_media) == 1);
+
+ info = RTP_FEC_MAP_INFO_NTH (self, self->info_media->data);
+ *dst_seq = gst_rtp_buffer_get_seq (&info->rtp);
+ *dst_pt = gst_rtp_buffer_get_payload_type (&info->rtp);
+ self->lost_packet_returned = TRUE;
+ GST_DEBUG_RTP_PACKET (self, "rtp header (recovered)", &info->rtp);
+ return gst_buffer_ref (info->rtp.buffer);
+}
+
+/* __has_builtin only works with clang, so test compiler version for gcc */
+/* Intel compiler and MSVC probably have their own things as well */
+/* TODO: make sure we use builtin for clang as well */
+#if defined(__GNUC__) && __GNUC__ >= 4
+#define rtp_ulpfec_ctz64 __builtin_ctzll
+#else
+static inline gint
+rtp_ulpfec_ctz64_inline (guint64 mask)
+{
+ gint nth_bit = 0;
+
+ do {
+ if ((mask & 1))
+ return nth_bit;
+ mask = mask >> 1;
+ } while (++nth_bit < 64);
+
+ return -1; /* should not be reached, since mask must not be 0 */
+}
+
+#define rtp_ulpfec_ctz64 rtp_ulpfec_ctz64_inline
+#endif
+
+static GstBuffer *
+gst_rtp_ulpfec_dec_recover (GstRtpUlpFecDec * self, guint32 ssrc, gint media_pt,
+ guint8 * dst_pt, guint16 * dst_seq)
+{
+ guint64 media_mask = 0;
+ gint media_mask_seq_base = -1;
+ gsize i;
+
+ if (self->lost_packet_from_storage)
+ return gst_rtp_ulpfec_dec_recover_from_storage (self, dst_pt, dst_seq);
+
+ /* Looking for a FEC packet which can be used for recovery */
+ for (i = 0; i < self->info_fec->len; ++i) {
+ RtpUlpFecMapInfo *info = RTP_FEC_MAP_INFO_NTH (self,
+ g_ptr_array_index (self->info_fec, i));
+ guint16 seq_base = rtp_ulpfec_buffer_get_seq_base (&info->rtp);
+ guint64 fec_mask = rtp_ulpfec_buffer_get_mask (&info->rtp);
+ guint64 missing_packets_mask;
+
+ if (media_mask_seq_base != (gint) seq_base) {
+ media_mask_seq_base = seq_base;
+ media_mask = gst_rtp_ulpfec_dec_get_media_buffers_mask (self, seq_base);
+ }
+
+ /* media_mask has 1s if packet exist.
+ * fec_mask is the mask of protected packets
+ * The statement below excludes existing packets from the protected. So
+ * we are left with 1s only for missing packets which can be recovered
+ * by this FEC packet. */
+ missing_packets_mask = fec_mask & (~media_mask);
+
+ /* Do we have any 1s? Checking if current FEC packet can be used for recovery */
+ if (0 != missing_packets_mask) {
+ guint trailing_zeros = rtp_ulpfec_ctz64 (missing_packets_mask);
+
+ /* Is it the only 1 in the mask? Checking if we lacking single packet in
+ * that case FEC packet can be used for recovery */
+ if (missing_packets_mask == (G_GUINT64_CONSTANT (1) << trailing_zeros)) {
+ GstBuffer *ret;
+
+ *dst_seq =
+ seq_base + (RTP_ULPFEC_SEQ_BASE_OFFSET_MAX (TRUE) - trailing_zeros);
+ ret =
+ gst_rtp_ulpfec_dec_recover_from_fec (self, info, ssrc, media_pt,
+ *dst_seq, dst_pt);
+ if (ret)
+ return ret;
+ }
+ }
+ }
+ return NULL;
+}
+
+static GstFlowReturn
+gst_rtp_ulpfec_dec_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+{
+ GstRtpUlpFecDec *self = GST_RTP_ULPFEC_DEC (parent);
+
+ if (G_LIKELY (GST_FLOW_OK == self->chain_return_val)) {
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+ buf = gst_buffer_make_writable (buf);
+
+ if (G_UNLIKELY (self->unset_discont_flag)) {
+ self->unset_discont_flag = FALSE;
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
+ }
+
+ gst_rtp_buffer_map (buf, GST_MAP_WRITE, &rtp);
+ gst_rtp_buffer_set_seq (&rtp, self->next_seqnum++);
+ gst_rtp_buffer_unmap (&rtp);
+
+ return gst_pad_push (self->srcpad, buf);
+ }
+
+ gst_buffer_unref (buf);
+ return self->chain_return_val;
+}
+
+static gboolean
+gst_rtp_ulpfec_dec_handle_packet_loss (GstRtpUlpFecDec * self, guint16 seqnum,
+ GstClockTime timestamp, GstClockTime duration)
+{
+ gint caps_pt = self->have_caps_pt ? self->caps_pt : -1;
+ gboolean ret = TRUE;
+ GstBufferList *buflist =
+ rtp_storage_get_packets_for_recovery (self->storage, self->fec_pt,
+ self->caps_ssrc, seqnum);
+
+ if (buflist) {
+ GstBuffer *recovered_buffer = NULL;
+ guint16 recovered_seq = 0;
+ guint8 recovered_pt = 0;
+
+ gst_rtp_ulpfec_dec_start (self, buflist, self->fec_pt, seqnum);
+
+ while (NULL != (recovered_buffer =
+ gst_rtp_ulpfec_dec_recover (self, self->caps_ssrc, caps_pt,
+ &recovered_pt, &recovered_seq))) {
+ if (seqnum == recovered_seq) {
+ GstBuffer *sent_buffer;
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+
+ recovered_buffer = gst_buffer_make_writable (recovered_buffer);
+ GST_BUFFER_PTS (recovered_buffer) = timestamp;
+ /* GST_BUFFER_DURATION (recovered_buffer) = duration;
+ * JB does not set the duration, so we will not too */
+
+ if (!self->lost_packet_from_storage)
+ rtp_storage_put_recovered_packet (self->storage,
+ recovered_buffer, recovered_pt, self->caps_ssrc, recovered_seq);
+
+ GST_DEBUG_OBJECT (self,
+ "Pushing recovered packet ssrc=0x%08x seq=%u %" GST_PTR_FORMAT,
+ self->caps_ssrc, seqnum, recovered_buffer);
+
+ sent_buffer = gst_buffer_copy_deep (recovered_buffer);
+
+ if (self->lost_packet_from_storage)
+ gst_buffer_unref (recovered_buffer);
+
+ gst_rtp_buffer_map (sent_buffer, GST_MAP_WRITE, &rtp);
+ gst_rtp_buffer_set_seq (&rtp, self->next_seqnum++);
+ gst_rtp_buffer_unmap (&rtp);
+
+ ret = FALSE;
+ self->unset_discont_flag = TRUE;
+ self->chain_return_val = gst_pad_push (self->srcpad, sent_buffer);
+ break;
+ }
+
+ if (!self->lost_packet_from_storage) {
+ rtp_storage_put_recovered_packet (self->storage,
+ recovered_buffer, recovered_pt, self->caps_ssrc, recovered_seq);
+ } else {
+ gst_buffer_unref (recovered_buffer);
+ }
+ }
+
+ gst_rtp_ulpfec_dec_stop (self);
+ gst_buffer_list_unref (buflist);
+ }
+
+ GST_DEBUG_OBJECT (self, "Packet lost ssrc=0x%08x seq=%u", self->caps_ssrc,
+ seqnum);
+
+ return ret;
+}
+
+static gboolean
+gst_rtp_ulpfec_dec_handle_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstRtpUlpFecDec *self = GST_RTP_ULPFEC_DEC (parent);
+ gboolean forward = TRUE;
+
+ GST_LOG_OBJECT (self, "Received event %" GST_PTR_FORMAT, event);
+
+ if (GST_FLOW_OK == self->chain_return_val &&
+ GST_EVENT_CUSTOM_DOWNSTREAM == GST_EVENT_TYPE (event) &&
+ gst_event_has_name (event, "GstRTPPacketLost")) {
+ guint seqnum;
+ GstClockTime timestamp, duration;
+ GstStructure *s;
+
+ event = gst_event_make_writable (event);
+ s = gst_event_writable_structure (event);
+
+ g_assert (self->have_caps_ssrc);
+
+ if (self->storage == NULL) {
+ GstQuery *q = gst_query_new_custom (GST_QUERY_CUSTOM,
+ gst_structure_new_empty ("GstRtpStorage"));
+
+ if (gst_pad_peer_query (self->sinkpad, q)) {
+ const GstStructure *s = gst_query_get_structure (q);
+
+ if (gst_structure_has_field_typed (s, "storage", G_TYPE_OBJECT)) {
+ gst_structure_get (s, "storage", G_TYPE_OBJECT, &self->storage, NULL);
+ }
+ }
+ gst_query_unref (q);
+ }
+
+ if (self->storage == NULL) {
+ GST_ELEMENT_WARNING (self, STREAM, FAILED, ("Internal storage not found"),
+ ("You need to add rtpstorage element upstream from rtpulpfecdec."));
+ return FALSE;
+ }
+
+ if (!gst_structure_get (s,
+ "seqnum", G_TYPE_UINT, &seqnum,
+ "timestamp", G_TYPE_UINT64, &timestamp,
+ "duration", G_TYPE_UINT64, &duration, NULL))
+ g_assert_not_reached ();
+
+ forward =
+ gst_rtp_ulpfec_dec_handle_packet_loss (self, seqnum, timestamp,
+ duration);
+
+ if (forward) {
+ gst_structure_remove_field (s, "seqnum");
+ gst_structure_set (s, "might-have-been-fec", G_TYPE_BOOLEAN, TRUE, NULL);
+ ++self->packets_unrecovered;
+ } else {
+ ++self->packets_recovered;
+ }
+
+ GST_DEBUG_OBJECT (self, "Unrecovered / Recovered: %lu / %lu",
+ (gulong) self->packets_unrecovered, (gulong) self->packets_recovered);
+ } else if (GST_EVENT_CAPS == GST_EVENT_TYPE (event)) {
+ GstCaps *caps;
+ gboolean have_caps_pt = FALSE;
+ gboolean have_caps_ssrc = FALSE;
+ guint caps_ssrc = 0;
+ gint caps_pt = 0;
+
+ gst_event_parse_caps (event, &caps);
+ have_caps_ssrc =
+ gst_structure_get_uint (gst_caps_get_structure (caps, 0), "ssrc",
+ &caps_ssrc);
+ have_caps_pt =
+ gst_structure_get_int (gst_caps_get_structure (caps, 0), "payload",
+ &caps_pt);
+
+ if (self->have_caps_ssrc != have_caps_ssrc || self->caps_ssrc != caps_ssrc)
+ GST_DEBUG_OBJECT (self, "SSRC changed %u, 0x%08x -> %u, 0x%08x",
+ self->have_caps_ssrc, self->caps_ssrc, have_caps_ssrc, caps_ssrc);
+ if (self->have_caps_pt != have_caps_pt || self->caps_pt != caps_pt)
+ GST_DEBUG_OBJECT (self, "PT changed %u, %u -> %u, %u",
+ self->have_caps_pt, self->caps_pt, have_caps_pt, caps_pt);
+
+ self->have_caps_ssrc = have_caps_ssrc;
+ self->have_caps_pt = have_caps_pt;
+ self->caps_ssrc = caps_ssrc;
+ self->caps_pt = caps_pt;
+ }
+
+ if (forward)
+ return gst_pad_push_event (self->srcpad, event);
+ gst_event_unref (event);
+ return TRUE;
+}
+
+static void
+gst_rtp_ulpfec_dec_init (GstRtpUlpFecDec * self)
+{
+ self->srcpad = gst_pad_new_from_static_template (&srctemplate, "src");
+ self->sinkpad = gst_pad_new_from_static_template (&sinktemplate, "sink");
+ GST_PAD_SET_PROXY_CAPS (self->sinkpad);
+ GST_PAD_SET_PROXY_ALLOCATION (self->sinkpad);
+ gst_pad_set_chain_function (self->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_ulpfec_dec_chain));
+ gst_pad_set_event_function (self->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_ulpfec_dec_handle_sink_event));
+
+ gst_element_add_pad (GST_ELEMENT (self), self->srcpad);
+ gst_element_add_pad (GST_ELEMENT (self), self->sinkpad);
+
+ self->fec_pt = DEFAULT_FEC_PT;
+
+ self->next_seqnum = g_random_int_range (0, G_MAXINT16);
+
+ self->chain_return_val = GST_FLOW_OK;
+ self->have_caps_ssrc = FALSE;
+ self->caps_ssrc = 0;
+ self->info_fec = g_ptr_array_new ();
+ self->info_arr = g_array_new (FALSE, TRUE, sizeof (RtpUlpFecMapInfo));
+ g_array_set_clear_func (self->info_arr,
+ (GDestroyNotify) rtp_ulpfec_map_info_unmap);
+ self->scratch_buf = g_array_new (FALSE, TRUE, sizeof (guint8));
+}
+
+static void
+gst_rtp_ulpfec_dec_dispose (GObject * obj)
+{
+ GstRtpUlpFecDec *self = GST_RTP_ULPFEC_DEC (obj);
+
+ GST_INFO_OBJECT (self,
+ " ssrc=0x%08x pt=%u"
+ " packets_recovered=%" G_GSIZE_FORMAT
+ " packets_unrecovered=%" G_GSIZE_FORMAT,
+ self->caps_ssrc, self->caps_pt,
+ self->packets_recovered, self->packets_unrecovered);
+
+ if (self->storage)
+ g_object_unref (self->storage);
+
+ g_assert (NULL == self->info_media);
+ g_assert (0 == self->info_fec->len);
+ g_assert (0 == self->info_arr->len);
+
+ if (self->fec_packets_received) {
+ GST_INFO_OBJECT (self,
+ " fec_packets_received=%" G_GSIZE_FORMAT
+ " fec_packets_rejected=%" G_GSIZE_FORMAT
+ " packets_rejected=%" G_GSIZE_FORMAT,
+ self->fec_packets_received,
+ self->fec_packets_rejected, self->packets_rejected);
+ }
+
+ g_ptr_array_free (self->info_fec, TRUE);
+ g_array_free (self->info_arr, TRUE);
+ g_array_free (self->scratch_buf, TRUE);
+
+ G_OBJECT_CLASS (gst_rtp_ulpfec_dec_parent_class)->dispose (obj);
+}
+
+static void
+gst_rtp_ulpfec_dec_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpUlpFecDec *self = GST_RTP_ULPFEC_DEC (object);
+
+ switch (prop_id) {
+ case PROP_PT:
+ self->fec_pt = g_value_get_uint (value);
+ break;
+ case PROP_STORAGE:
+ if (self->storage)
+ g_object_unref (self->storage);
+ self->storage = g_value_get_object (value);
+ if (self->storage)
+ g_object_ref (self->storage);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_ulpfec_dec_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpUlpFecDec *self = GST_RTP_ULPFEC_DEC (object);
+
+ switch (prop_id) {
+ case PROP_PT:
+ g_value_set_uint (value, self->fec_pt);
+ break;
+ case PROP_STORAGE:
+ g_value_set_object (value, self->storage);
+ break;
+ case PROP_RECOVERED:
+ g_value_set_uint (value, (guint) self->packets_recovered);
+ break;
+ case PROP_UNRECOVERED:
+ g_value_set_uint (value, (guint) self->packets_unrecovered);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_ulpfec_dec_class_init (GstRtpUlpFecDecClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_ulpfec_dec_debug,
+ "rtpulpfecdec", 0, "RTP FEC Decoder");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&srctemplate));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sinktemplate));
+
+ gst_element_class_set_static_metadata (element_class,
+ "RTP FEC Decoder",
+ "Codec/Depayloader/Network/RTP",
+ "Decodes RTP FEC (RFC5109)", "Mikhail Fludkov <misha@pexip.com>");
+
+ gobject_class->set_property =
+ GST_DEBUG_FUNCPTR (gst_rtp_ulpfec_dec_set_property);
+ gobject_class->get_property =
+ GST_DEBUG_FUNCPTR (gst_rtp_ulpfec_dec_get_property);
+ gobject_class->dispose = GST_DEBUG_FUNCPTR (gst_rtp_ulpfec_dec_dispose);
+
+ klass_properties[PROP_PT] = g_param_spec_uint ("pt", "pt",
+ "FEC packets payload type", 0, 127,
+ DEFAULT_FEC_PT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS);
+ klass_properties[PROP_STORAGE] =
+ g_param_spec_object ("storage", "RTP storage", "RTP storage",
+ G_TYPE_OBJECT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS);
+ klass_properties[PROP_RECOVERED] =
+ g_param_spec_uint ("recovered", "recovered",
+ "The number of recovered packets", 0, G_MAXUINT, 0,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS);
+ klass_properties[PROP_UNRECOVERED] =
+ g_param_spec_uint ("unrecovered", "unrecovered",
+ "The number of unrecovered packets", 0, G_MAXUINT, 0,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS);
+
+ g_object_class_install_properties (gobject_class, N_PROPERTIES,
+ klass_properties);
+
+ g_assert (rtp_ulpfec_ctz64 (G_GUINT64_CONSTANT (0x1)) == 0);
+ g_assert (rtp_ulpfec_ctz64 (G_GUINT64_CONSTANT (0x8000000000000000)) == 63);
+}
diff --git a/gst/rtp/gstrtpulpfecdec.h b/gst/rtp/gstrtpulpfecdec.h
new file mode 100644
index 0000000000..f9b10b0689
--- /dev/null
+++ b/gst/rtp/gstrtpulpfecdec.h
@@ -0,0 +1,84 @@
+/* GStreamer plugin for forward error correction
+ * Copyright (C) 2017 Pexip
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * Author: Mikhail Fludkov <misha@pexip.com>
+ */
+
+#ifndef __GST_RTP_ULPFEC_DEC_H__
+#define __GST_RTP_ULPFEC_DEC_H__
+
+#include <gst/gst.h>
+
+#include "rtpstorage.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_ULPFEC_DEC \
+ (gst_rtp_ulpfec_dec_get_type())
+#define GST_RTP_ULPFEC_DEC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_ULPFEC_DEC,GstRtpUlpFecDec))
+#define GST_RTP_ULPFEC_DEC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_ULPFEC_DEC,GstRtpUlpFecDecClass))
+#define GST_IS_RTP_ULPFEC_DEC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_ULPFEC_DEC))
+#define GST_IS_RTP_ULPFEC_DEC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_ULPFEC_DEC))
+
+typedef struct _GstRtpUlpFecDec GstRtpUlpFecDec;
+typedef struct _GstRtpUlpFecDecClass GstRtpUlpFecDecClass;
+
+struct _GstRtpUlpFecDecClass {
+ GstElementClass parent_class;
+};
+
+struct _GstRtpUlpFecDec {
+ GstElement parent;
+ GstPad *srcpad;
+ GstPad *sinkpad;
+
+ /* properties */
+ guint8 fec_pt;
+ RtpStorage *storage;
+ gsize packets_recovered;
+ gsize packets_unrecovered;
+
+ /* internal stuff */
+ GstFlowReturn chain_return_val;
+ gboolean unset_discont_flag;
+ gboolean have_caps_ssrc;
+ gboolean have_caps_pt;
+ guint32 caps_ssrc;
+ guint8 caps_pt;
+ GList *info_media;
+ GPtrArray *info_fec;
+ GArray *info_arr;
+ GArray *scratch_buf;
+ gboolean lost_packet_from_storage;
+ gboolean lost_packet_returned;
+ guint16 next_seqnum;
+
+ /* stats */
+ gsize fec_packets_received;
+ gsize fec_packets_rejected;
+ gsize packets_rejected;
+};
+
+GType gst_rtp_ulpfec_dec_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_ULPFEC_DEC_H__ */
diff --git a/gst/rtp/gstrtpulpfecenc.c b/gst/rtp/gstrtpulpfecenc.c
new file mode 100644
index 0000000000..3862f8f8d8
--- /dev/null
+++ b/gst/rtp/gstrtpulpfecenc.c
@@ -0,0 +1,714 @@
+/* GStreamer plugin for forward error correction
+ * Copyright (C) 2017 Pexip
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * Author: Mikhail Fludkov <misha@pexip.com>
+ */
+
+/**
+ * SECTION:element-rtpulpfecenc
+ * @short_description: Generic RTP Forward Error Correction (FEC) encoder
+ * @title: rtpulpfecenc
+ *
+ * Generic Forward Error Correction (FEC) encoder using Uneven Level
+ * Protection (ULP) as described in RFC 5109.
+ *
+ * It differs from the RFC in one important way, it multiplexes the
+ * FEC packets in the same sequence number as media packets. This is to be
+ * compatible with libwebrtc as using in Google Chrome and with Microsoft
+ * Lync / Skype for Business.
+ *
+ * Be warned that after using this element, it is no longer possible to know if
+ * there is a gap in the media stream based on the sequence numbers as the FEC
+ * packets become interleaved with the media packets.
+ *
+ * This element will insert protection packets in any RTP stream, which
+ * can then be used on the receiving side to recover lost packets.
+ *
+ * This element rewrites packets' seqnums, which means that when combined
+ * with retransmission elements such as #GstRtpRtxSend, it *must* be
+ * placed upstream of those, otherwise retransmission requests will request
+ * incorrect seqnums.
+ *
+ * A payload type for the protection packets *must* be specified, different
+ * from the payload type of the protected packets, with the GstRtpUlpFecEnc:pt
+ * property.
+ *
+ * The marker bit of RTP packets is used to determine sets of packets to
+ * protect as a unit, in order to modulate the level of protection, this
+ * behaviour can be disabled with GstRtpUlpFecEnc:multipacket, but should
+ * be left enabled for video streams.
+ *
+ * The level of protection can be configured with two properties,
+ * #GstRtpUlpFecEnc:percentage and #GstRtpUlpFecEnc:percentage-important,
+ * the element will determine which percentage to use for a given set of
+ * packets based on the presence of the #GST_BUFFER_FLAG_NON_DROPPABLE
+ * flag, upstream payloaders are expected to set this flag on "important"
+ * packets such as those making up a keyframe.
+ *
+ * The percentage is expressed not in terms of bytes, but in terms of
+ * packets, this for implementation convenience. The drawback with this
+ * approach is that when using a percentage different from 100 %, and a
+ * low bitrate, entire frames may be contained in a single packet, leading
+ * to some packets not being protected, thus lowering the overall recovery
+ * rate on the receiving side.
+ *
+ * When using #GstRtpBin, this element should be inserted through the
+ * #GstRtpBin::request-fec-encoder signal.
+ *
+ * ## Example pipeline
+ *
+ * |[
+ * gst-launch-1.0 videotestsrc ! x264enc ! video/x-h264, profile=baseline ! rtph264pay pt=96 ! rtpulpfecenc percentage=100 pt=122 ! udpsink port=8888
+ * ]| This example will receive a stream with FEC and try to reconstruct the packets.
+ *
+ * Example programs are available at
+ * <https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/blob/master/examples/src/bin/rtpfecserver.rs>
+ * and
+ * <https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/blob/master/examples/src/bin/rtpfecclient.rs>
+ *
+ * See also: #GstRtpUlpFecDec, #GstRtpBin
+ * Since: 1.14
+ */
+
+#include <gst/rtp/gstrtp-enumtypes.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <string.h>
+
+#include "gstrtpelements.h"
+#include "rtpulpfeccommon.h"
+#include "gstrtpulpfecenc.h"
+
+static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp"));
+
+static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp"));
+
+#define UNDEF_PT 255
+
+#define DEFAULT_PT UNDEF_PT
+#define DEFAULT_PCT 0
+#define DEFAULT_PCT_IMPORTANT 0
+#define DEFAULT_MULTIPACKET TRUE
+
+#define PACKETS_BUF_MAX_LENGTH (RTP_ULPFEC_PROTECTED_PACKETS_MAX(TRUE))
+
+GST_DEBUG_CATEGORY (gst_rtp_ulpfec_enc_debug);
+#define GST_CAT_DEFAULT (gst_rtp_ulpfec_enc_debug)
+
+G_DEFINE_TYPE (GstRtpUlpFecEnc, gst_rtp_ulpfec_enc, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpulpfecenc, "rtpulpfecenc",
+ GST_RANK_NONE, GST_TYPE_RTP_ULPFEC_ENC, rtp_element_init (plugin));
+
+enum
+{
+ PROP_0,
+ PROP_PT,
+ PROP_MULTIPACKET,
+ PROP_PROTECTED,
+ PROP_PERCENTAGE,
+ PROP_PERCENTAGE_IMPORTANT,
+};
+
+#define RTP_FEC_MAP_INFO_NTH(ctx, data) (&g_array_index (\
+ ((GstRtpUlpFecEncStreamCtx *)ctx)->info_arr, \
+ RtpUlpFecMapInfo, \
+ GPOINTER_TO_UINT(data)))
+
+static void
+gst_rtp_ulpfec_enc_stream_ctx_start (GstRtpUlpFecEncStreamCtx * ctx,
+ GQueue * packets, guint fec_packets)
+{
+ GList *it = packets->tail;
+ guint i;
+
+ g_array_set_size (ctx->info_arr, packets->length);
+
+ for (i = 0; i < packets->length; ++i) {
+ GstBuffer *buffer = it->data;
+ RtpUlpFecMapInfo *info = RTP_FEC_MAP_INFO_NTH (ctx, i);
+
+ if (!rtp_ulpfec_map_info_map (gst_buffer_ref (buffer), info))
+ g_assert_not_reached ();
+
+ GST_LOG_RTP_PACKET (ctx->parent, "rtp header (incoming)", &info->rtp);
+
+ it = g_list_previous (it);
+ }
+
+ ctx->fec_packets = fec_packets;
+ ctx->fec_packet_idx = 0;
+}
+
+static void
+gst_rtp_ulpfec_enc_stream_ctx_stop (GstRtpUlpFecEncStreamCtx * ctx)
+{
+ g_array_set_size (ctx->info_arr, 0);
+ g_array_set_size (ctx->scratch_buf, 0);
+
+ ctx->fec_packets = 0;
+ ctx->fec_packet_idx = 0;
+}
+
+static void
+ gst_rtp_ulpfec_enc_stream_ctx_get_protection_parameters
+ (GstRtpUlpFecEncStreamCtx * ctx, guint16 * dst_seq_base, guint64 * dst_mask,
+ guint * dst_start, guint * dst_end)
+{
+ guint media_packets = ctx->info_arr->len;
+ guint start = ctx->fec_packet_idx * media_packets / ctx->fec_packets;
+ guint end =
+ ((ctx->fec_packet_idx + 1) * media_packets + ctx->fec_packets -
+ 1) / ctx->fec_packets - 1;
+ guint len = end - start + 1;
+ guint64 mask = 0;
+ guint16 seq_base = 0;
+ guint i;
+
+ len = MIN (len, RTP_ULPFEC_PROTECTED_PACKETS_MAX (TRUE));
+ end = start + len - 1;
+
+ for (i = start; i <= end; ++i) {
+ RtpUlpFecMapInfo *info = RTP_FEC_MAP_INFO_NTH (ctx, i);
+ guint16 seq = gst_rtp_buffer_get_seq (&info->rtp);
+
+ if (mask) {
+ gint diff = gst_rtp_buffer_compare_seqnum (seq_base, seq);
+ if (diff < 0) {
+ seq_base = seq;
+ mask = mask >> (-diff);
+ }
+ mask |= rtp_ulpfec_packet_mask_from_seqnum (seq, seq_base, TRUE);
+ } else {
+ seq_base = seq;
+ mask = rtp_ulpfec_packet_mask_from_seqnum (seq, seq_base, TRUE);
+ }
+ }
+
+ *dst_start = start;
+ *dst_end = end;
+ *dst_mask = mask;
+ *dst_seq_base = seq_base;
+}
+
+static GstBuffer *
+gst_rtp_ulpfec_enc_stream_ctx_protect (GstRtpUlpFecEncStreamCtx * ctx,
+ guint8 pt, guint16 seq, guint32 timestamp, guint32 ssrc)
+{
+ guint end = 0;
+ guint start = 0;
+ guint64 fec_mask = 0;
+ guint16 seq_base = 0;
+ GstBuffer *ret;
+ guint64 tmp_mask;
+ gboolean fec_mask_long;
+ guint i;
+
+ if (ctx->fec_packet_idx >= ctx->fec_packets)
+ return NULL;
+
+ g_array_set_size (ctx->scratch_buf, 0);
+ gst_rtp_ulpfec_enc_stream_ctx_get_protection_parameters (ctx, &seq_base,
+ &fec_mask, &start, &end);
+
+ tmp_mask = fec_mask;
+ fec_mask_long = rtp_ulpfec_mask_is_long (fec_mask);
+ for (i = start; i <= end; ++i) {
+ RtpUlpFecMapInfo *info = RTP_FEC_MAP_INFO_NTH (ctx, i);
+ guint64 packet_mask =
+ rtp_ulpfec_packet_mask_from_seqnum (gst_rtp_buffer_get_seq (&info->rtp),
+ seq_base,
+ TRUE);
+
+ if (tmp_mask & packet_mask) {
+ tmp_mask ^= packet_mask;
+ rtp_buffer_to_ulpfec_bitstring (&info->rtp, ctx->scratch_buf, FALSE,
+ fec_mask_long);
+ }
+ }
+
+ g_assert (tmp_mask == 0);
+ ret =
+ rtp_ulpfec_bitstring_to_fec_rtp_buffer (ctx->scratch_buf, seq_base,
+ fec_mask_long, fec_mask, FALSE, pt, seq, timestamp, ssrc);
+ ++ctx->fec_packet_idx;
+ return ret;
+}
+
+static void
+gst_rtp_ulpfec_enc_stream_ctx_report_budget (GstRtpUlpFecEncStreamCtx * ctx)
+{
+ GST_TRACE_OBJECT (ctx->parent, "budget = %f budget_important = %f",
+ ctx->budget, ctx->budget_important);
+}
+
+static void
+gst_rtp_ulpfec_enc_stream_ctx_increment_budget (GstRtpUlpFecEncStreamCtx * ctx,
+ GstBuffer * buffer)
+{
+ if (ctx->percentage == 0 && ctx->percentage_important == 0) {
+ if (ctx->budget > 0) {
+ ctx->budget = 0;
+ ctx->budget_important = 0;
+ }
+ if (ctx->budget < 0)
+ ctx->budget += ctx->budget_inc;
+
+ return;
+ }
+ ctx->budget += ctx->budget_inc;
+
+ if (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_NON_DROPPABLE)) {
+ ctx->budget_important += ctx->budget_inc_important;
+ }
+
+ gst_rtp_ulpfec_enc_stream_ctx_report_budget (ctx);
+}
+
+static void
+gst_rtp_ulpfec_enc_stream_ctx_decrement_budget (GstRtpUlpFecEncStreamCtx * ctx,
+ guint fec_packets_num)
+{
+ if (ctx->budget_important >= 1.)
+ ctx->budget_important -= fec_packets_num;
+ ctx->budget -= fec_packets_num;
+
+ gst_rtp_ulpfec_enc_stream_ctx_report_budget (ctx);
+}
+
+static guint
+gst_rtp_ulpfec_enc_stream_ctx_get_fec_packets_num (GstRtpUlpFecEncStreamCtx *
+ ctx)
+{
+ g_assert_cmpfloat (ctx->budget_important, >=, 0.);
+
+ if (ctx->budget_important >= 1.)
+ return ctx->budget_important;
+ return ctx->budget > 0. ? (guint) ctx->budget : 0;
+}
+
+static void
+gst_rtp_ulpfec_enc_stream_ctx_free_packets_buf (GstRtpUlpFecEncStreamCtx * ctx)
+{
+ while (ctx->packets_buf.length)
+ gst_buffer_unref (g_queue_pop_tail (&ctx->packets_buf));
+}
+
+static void
+gst_rtp_ulpfec_enc_stream_ctx_prepend_to_fec_buffer (GstRtpUlpFecEncStreamCtx *
+ ctx, GstRTPBuffer * rtp, guint buf_max_size)
+{
+ GList *new_head;
+ if (ctx->packets_buf.length == buf_max_size) {
+ new_head = g_queue_pop_tail_link (&ctx->packets_buf);
+ } else {
+ new_head = g_list_alloc ();
+ }
+
+ gst_buffer_replace ((GstBuffer **) & new_head->data, rtp->buffer);
+ g_queue_push_head_link (&ctx->packets_buf, new_head);
+
+ g_assert_cmpint (ctx->packets_buf.length, <=, buf_max_size);
+}
+
+static GstFlowReturn
+gst_rtp_ulpfec_enc_stream_ctx_push_fec_packets (GstRtpUlpFecEncStreamCtx * ctx,
+ guint8 pt, guint16 seq, guint32 timestamp, guint32 ssrc)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint fec_packets_num =
+ gst_rtp_ulpfec_enc_stream_ctx_get_fec_packets_num (ctx);
+
+ if (fec_packets_num) {
+ guint fec_packets_pushed = 0;
+ GstBuffer *latest_packet = ctx->packets_buf.head->data;
+ GstBuffer *fec = NULL;
+
+ gst_rtp_ulpfec_enc_stream_ctx_start (ctx, &ctx->packets_buf,
+ fec_packets_num);
+
+ while (NULL != (fec =
+ gst_rtp_ulpfec_enc_stream_ctx_protect (ctx, pt,
+ seq + fec_packets_pushed, timestamp, ssrc))) {
+ gst_buffer_copy_into (fec, latest_packet, GST_BUFFER_COPY_TIMESTAMPS, 0,
+ -1);
+
+ ret = gst_pad_push (ctx->srcpad, fec);
+ if (GST_FLOW_OK == ret)
+ ++fec_packets_pushed;
+ else
+ break;
+ }
+
+ gst_rtp_ulpfec_enc_stream_ctx_stop (ctx);
+
+ g_assert_cmpint (fec_packets_pushed, <=, fec_packets_num);
+
+ ctx->num_packets_protected += ctx->packets_buf.length;
+ ctx->num_packets_fec += fec_packets_pushed;
+ ctx->seqnum_offset += fec_packets_pushed;
+ ctx->seqnum += fec_packets_pushed;
+ }
+
+ gst_rtp_ulpfec_enc_stream_ctx_decrement_budget (ctx, fec_packets_num);
+ return ret;
+}
+
+static void
+gst_rtp_ulpfec_enc_stream_ctx_cache_packet (GstRtpUlpFecEncStreamCtx * ctx,
+ GstRTPBuffer * rtp, gboolean * dst_empty_packet_buffer,
+ gboolean * dst_push_fec)
+{
+ if (ctx->multipacket) {
+ gst_rtp_ulpfec_enc_stream_ctx_prepend_to_fec_buffer (ctx, rtp,
+ PACKETS_BUF_MAX_LENGTH);
+ gst_rtp_ulpfec_enc_stream_ctx_increment_budget (ctx, rtp->buffer);
+
+ *dst_empty_packet_buffer = gst_rtp_buffer_get_marker (rtp);
+ *dst_push_fec = *dst_empty_packet_buffer;
+ } else {
+ gboolean push_fec;
+
+ gst_rtp_ulpfec_enc_stream_ctx_prepend_to_fec_buffer (ctx, rtp, 1);
+
+ push_fec = ctx->fec_nth == 0 ? FALSE :
+ 0 == (ctx->num_packets_received % ctx->fec_nth);
+
+ ctx->budget = push_fec ? 1 : 0;
+ ctx->budget_important = 0;
+
+ *dst_push_fec = push_fec;
+ *dst_empty_packet_buffer = FALSE;
+ }
+}
+
+static void
+gst_rtp_ulpfec_enc_stream_ctx_configure (GstRtpUlpFecEncStreamCtx * ctx,
+ guint pt, guint percentage, guint percentage_important,
+ gboolean multipacket)
+{
+ ctx->pt = pt;
+ ctx->percentage = percentage;
+ ctx->percentage_important = percentage_important;
+ ctx->multipacket = multipacket;
+
+ ctx->fec_nth = percentage ? 100 / percentage : 0;
+ if (percentage) {
+ ctx->budget_inc = percentage / 100.;
+ ctx->budget_inc_important = percentage > percentage_important ?
+ ctx->budget_inc : percentage_important / 100.;
+ }
+/*
+ else {
+ ctx->budget_inc = 0.0;
+ }
+*/
+ ctx->budget_inc_important = percentage > percentage_important ?
+ ctx->budget_inc : percentage_important / 100.;
+}
+
+static GstRtpUlpFecEncStreamCtx *
+gst_rtp_ulpfec_enc_stream_ctx_new (guint ssrc,
+ GstElement * parent, GstPad * srcpad,
+ guint pt, guint percentage, guint percentage_important,
+ gboolean multipacket)
+{
+ GstRtpUlpFecEncStreamCtx *ctx = g_new0 (GstRtpUlpFecEncStreamCtx, 1);
+
+ ctx->ssrc = ssrc;
+ ctx->parent = parent;
+ ctx->srcpad = srcpad;
+
+ ctx->seqnum = g_random_int_range (0, G_MAXUINT16 / 2);
+
+ ctx->info_arr = g_array_new (FALSE, TRUE, sizeof (RtpUlpFecMapInfo));
+ g_array_set_clear_func (ctx->info_arr,
+ (GDestroyNotify) rtp_ulpfec_map_info_unmap);
+ ctx->parent = parent;
+ ctx->scratch_buf = g_array_new (FALSE, TRUE, sizeof (guint8));
+ gst_rtp_ulpfec_enc_stream_ctx_configure (ctx, pt,
+ percentage, percentage_important, multipacket);
+
+ return ctx;
+}
+
+static void
+gst_rtp_ulpfec_enc_stream_ctx_free (GstRtpUlpFecEncStreamCtx * ctx)
+{
+ if (ctx->num_packets_received) {
+ GST_INFO_OBJECT (ctx->parent, "Actual FEC overhead is %4.2f%% (%u/%u)\n",
+ ctx->num_packets_fec * (double) 100. / ctx->num_packets_received,
+ ctx->num_packets_fec, ctx->num_packets_received);
+ }
+ gst_rtp_ulpfec_enc_stream_ctx_free_packets_buf (ctx);
+
+ g_assert (0 == ctx->info_arr->len);
+ g_array_free (ctx->info_arr, TRUE);
+ g_array_free (ctx->scratch_buf, TRUE);
+ g_slice_free1 (sizeof (GstRtpUlpFecEncStreamCtx), ctx);
+}
+
+static GstFlowReturn
+gst_rtp_ulpfec_enc_stream_ctx_process (GstRtpUlpFecEncStreamCtx * ctx,
+ GstBuffer * buffer)
+{
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+ GstFlowReturn ret;
+ gboolean push_fec = FALSE;
+ gboolean empty_packet_buffer = FALSE;
+
+ ctx->num_packets_received++;
+
+ if (ctx->seqnum_offset > 0) {
+ buffer = gst_buffer_make_writable (buffer);
+ if (!gst_rtp_buffer_map (buffer,
+ GST_MAP_READWRITE | GST_RTP_BUFFER_MAP_FLAG_SKIP_PADDING, &rtp))
+ g_assert_not_reached ();
+ gst_rtp_buffer_set_seq (&rtp,
+ gst_rtp_buffer_get_seq (&rtp) + ctx->seqnum_offset);
+ } else {
+ if (!gst_rtp_buffer_map (buffer,
+ GST_MAP_READ | GST_RTP_BUFFER_MAP_FLAG_SKIP_PADDING, &rtp))
+ g_assert_not_reached ();
+ }
+
+ gst_rtp_ulpfec_enc_stream_ctx_cache_packet (ctx, &rtp, &empty_packet_buffer,
+ &push_fec);
+
+ if (push_fec) {
+ guint32 fec_timestamp = gst_rtp_buffer_get_timestamp (&rtp);
+ guint32 fec_ssrc = gst_rtp_buffer_get_ssrc (&rtp);
+ guint16 fec_seq = gst_rtp_buffer_get_seq (&rtp) + 1;
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ ret = gst_pad_push (ctx->srcpad, buffer);
+ if (GST_FLOW_OK == ret)
+ ret =
+ gst_rtp_ulpfec_enc_stream_ctx_push_fec_packets (ctx, ctx->pt, fec_seq,
+ fec_timestamp, fec_ssrc);
+ } else {
+ gst_rtp_buffer_unmap (&rtp);
+ ret = gst_pad_push (ctx->srcpad, buffer);
+ }
+
+ if (empty_packet_buffer)
+ gst_rtp_ulpfec_enc_stream_ctx_free_packets_buf (ctx);
+
+ return ret;
+}
+
+static GstRtpUlpFecEncStreamCtx *
+gst_rtp_ulpfec_enc_aquire_ctx (GstRtpUlpFecEnc * fec, guint ssrc)
+{
+ GstRtpUlpFecEncStreamCtx *ctx;
+
+ GST_OBJECT_LOCK (fec);
+ ctx = g_hash_table_lookup (fec->ssrc_to_ctx, GUINT_TO_POINTER (ssrc));
+ if (ctx == NULL) {
+ ctx =
+ gst_rtp_ulpfec_enc_stream_ctx_new (ssrc, GST_ELEMENT_CAST (fec),
+ fec->srcpad, fec->pt, fec->percentage,
+ fec->percentage_important, fec->multipacket);
+ g_hash_table_insert (fec->ssrc_to_ctx, GUINT_TO_POINTER (ssrc), ctx);
+ }
+ GST_OBJECT_UNLOCK (fec);
+
+ return ctx;
+}
+
+static GstFlowReturn
+gst_rtp_ulpfec_enc_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+{
+ GstRtpUlpFecEnc *fec = GST_RTP_ULPFEC_ENC (parent);
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+ GstFlowReturn ret;
+ guint ssrc = 0;
+ GstRtpUlpFecEncStreamCtx *ctx;
+
+ if (fec->pt == UNDEF_PT)
+ return gst_pad_push (fec->srcpad, buffer);
+
+ /* FIXME: avoid this additional mapping of the buffer to get the
+ ssrc! */
+ if (!gst_rtp_buffer_map (buffer,
+ GST_MAP_READ | GST_RTP_BUFFER_MAP_FLAG_SKIP_PADDING, &rtp)) {
+ g_assert_not_reached ();
+ }
+ ssrc = gst_rtp_buffer_get_ssrc (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
+
+ ctx = gst_rtp_ulpfec_enc_aquire_ctx (fec, ssrc);
+
+ ret = gst_rtp_ulpfec_enc_stream_ctx_process (ctx, buffer);
+
+ /* FIXME: does not work for multiple ssrcs */
+ fec->num_packets_protected = ctx->num_packets_protected;
+
+ return ret;
+}
+
+static void
+gst_rtp_ulpfec_enc_configure_ctx (gpointer key, gpointer value,
+ gpointer user_data)
+{
+ GstRtpUlpFecEnc *fec = user_data;
+ GstRtpUlpFecEncStreamCtx *ctx = value;
+
+ gst_rtp_ulpfec_enc_stream_ctx_configure (ctx, fec->pt,
+ fec->percentage, fec->percentage_important, fec->multipacket);
+}
+
+static void
+gst_rtp_ulpfec_enc_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpUlpFecEnc *fec = GST_RTP_ULPFEC_ENC (object);
+
+ switch (prop_id) {
+ case PROP_PT:
+ fec->pt = g_value_get_uint (value);
+ break;
+ case PROP_MULTIPACKET:
+ fec->multipacket = g_value_get_boolean (value);
+ break;
+ case PROP_PERCENTAGE:
+ fec->percentage = g_value_get_uint (value);
+ break;
+ case PROP_PERCENTAGE_IMPORTANT:
+ fec->percentage_important = g_value_get_uint (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+
+ GST_OBJECT_LOCK (fec);
+ g_hash_table_foreach (fec->ssrc_to_ctx, gst_rtp_ulpfec_enc_configure_ctx,
+ fec);
+ GST_OBJECT_UNLOCK (fec);
+}
+
+static void
+gst_rtp_ulpfec_enc_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpUlpFecEnc *fec = GST_RTP_ULPFEC_ENC (object);
+ switch (prop_id) {
+ case PROP_PT:
+ g_value_set_uint (value, fec->pt);
+ break;
+ case PROP_PROTECTED:
+ g_value_set_uint (value, fec->num_packets_protected);
+ break;
+ case PROP_PERCENTAGE:
+ g_value_set_uint (value, fec->percentage);
+ break;
+ case PROP_PERCENTAGE_IMPORTANT:
+ g_value_set_uint (value, fec->percentage_important);
+ break;
+ case PROP_MULTIPACKET:
+ g_value_set_boolean (value, fec->multipacket);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_ulpfec_enc_dispose (GObject * obj)
+{
+ GstRtpUlpFecEnc *fec = GST_RTP_ULPFEC_ENC (obj);
+
+ g_hash_table_destroy (fec->ssrc_to_ctx);
+
+ G_OBJECT_CLASS (gst_rtp_ulpfec_enc_parent_class)->dispose (obj);
+}
+
+static void
+gst_rtp_ulpfec_enc_init (GstRtpUlpFecEnc * fec)
+{
+ fec->srcpad = gst_pad_new_from_static_template (&srctemplate, "src");
+ gst_element_add_pad (GST_ELEMENT (fec), fec->srcpad);
+
+ fec->sinkpad = gst_pad_new_from_static_template (&sinktemplate, "sink");
+ GST_PAD_SET_PROXY_CAPS (fec->sinkpad);
+ GST_PAD_SET_PROXY_ALLOCATION (fec->sinkpad);
+ gst_pad_set_chain_function (fec->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_ulpfec_enc_chain));
+ gst_element_add_pad (GST_ELEMENT (fec), fec->sinkpad);
+
+ fec->ssrc_to_ctx = g_hash_table_new_full (NULL, NULL, NULL,
+ (GDestroyNotify) gst_rtp_ulpfec_enc_stream_ctx_free);
+}
+
+static void
+gst_rtp_ulpfec_enc_class_init (GstRtpUlpFecEncClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_ulpfec_enc_debug, "rtpulpfecenc", 0,
+ "FEC encoder element");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&srctemplate));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sinktemplate));
+
+ gst_element_class_set_static_metadata (element_class,
+ "RTP FEC Encoder",
+ "Codec/Payloader/Network/RTP",
+ "Encodes RTP FEC (RFC5109)", "Mikhail Fludkov <misha@pexip.com>");
+
+ gobject_class->set_property =
+ GST_DEBUG_FUNCPTR (gst_rtp_ulpfec_enc_set_property);
+ gobject_class->get_property =
+ GST_DEBUG_FUNCPTR (gst_rtp_ulpfec_enc_get_property);
+ gobject_class->dispose = GST_DEBUG_FUNCPTR (gst_rtp_ulpfec_enc_dispose);
+
+ g_object_class_install_property (gobject_class, PROP_PT,
+ g_param_spec_uint ("pt", "payload type",
+ "The payload type of FEC packets", 0, 255, DEFAULT_PT,
+ G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MULTIPACKET,
+ g_param_spec_boolean ("multipacket", "Multipacket",
+ "Apply FEC on multiple packets", DEFAULT_MULTIPACKET,
+ G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_PERCENTAGE,
+ g_param_spec_uint ("percentage", "Percentage",
+ "FEC overhead percentage for the whole stream", 0, 100, DEFAULT_PCT,
+ G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_PERCENTAGE_IMPORTANT,
+ g_param_spec_uint ("percentage-important", "Percentage important",
+ "FEC overhead percentage for important packets",
+ 0, 100, DEFAULT_PCT_IMPORTANT,
+ G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_PROTECTED,
+ g_param_spec_uint ("protected", "Protected",
+ "Count of protected packets", 0, G_MAXUINT32, 0,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+}
diff --git a/gst/rtp/gstrtpulpfecenc.h b/gst/rtp/gstrtpulpfecenc.h
new file mode 100644
index 0000000000..885c6ad61d
--- /dev/null
+++ b/gst/rtp/gstrtpulpfecenc.h
@@ -0,0 +1,99 @@
+/* GStreamer plugin for forward error correction
+ * Copyright (C) 2017 Pexip
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * Author: Mikhail Fludkov <misha@pexip.com>
+ */
+
+#ifndef __GST_FEC_H__
+#define __GST_FEC_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_ULPFEC_ENC \
+ (gst_rtp_ulpfec_enc_get_type())
+#define GST_RTP_ULPFEC_ENC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_ULPFEC_ENC,GstRtpUlpFecEnc))
+#define GST_RTP_ULPFEC_ENC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_ULPFEC_ENC,GstRtpUlpFecEncClass))
+#define GST_IS_RTP_ULPFEC_ENC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_ULPFEC_ENC))
+#define GST_IS_RTP_ULPFEC_ENC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_ULPFEC_ENC))
+
+typedef struct _GstRtpUlpFecEnc GstRtpUlpFecEnc;
+typedef struct _GstRtpUlpFecEncClass GstRtpUlpFecEncClass;
+
+struct _GstRtpUlpFecEncClass {
+ GstElementClass parent_class;
+};
+
+struct _GstRtpUlpFecEnc {
+ GstElement parent;
+ GstPad *srcpad;
+ GstPad *sinkpad;
+
+ GHashTable *ssrc_to_ctx;
+
+ /* properties */
+ guint pt;
+ guint32 ssrc;
+ guint percentage;
+ guint percentage_important;
+ gboolean multipacket;
+ guint num_packets_protected;
+};
+
+typedef struct {
+ guint ssrc;
+
+ GstElement *parent;
+ GstPad *srcpad;
+
+ /* settings */
+ guint pt;
+ guint percentage;
+ guint percentage_important;
+ gboolean multipacket;
+ gboolean mux_seq;
+
+ guint num_packets_protected;
+ guint16 seqnum;
+ guint seqnum_offset;
+ guint num_packets_received;
+ guint num_packets_fec;
+ guint fec_nth;
+ GQueue packets_buf;
+
+ gdouble budget;
+ gdouble budget_inc;
+ gdouble budget_important;
+ gdouble budget_inc_important;
+
+ GArray *info_arr;
+ GArray *scratch_buf;
+
+ guint fec_packets;
+ guint fec_packet_idx;
+} GstRtpUlpFecEncStreamCtx;
+
+GType gst_rtp_ulpfec_enc_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_FEC_H__ */
diff --git a/gst/rtp/gstrtputils.c b/gst/rtp/gstrtputils.c
new file mode 100644
index 0000000000..4d86fb2208
--- /dev/null
+++ b/gst/rtp/gstrtputils.c
@@ -0,0 +1,153 @@
+/* GStreamer
+ * Copyright (C) 2015 Sebastian Dröge <sebastian@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "gstrtputils.h"
+
+typedef struct
+{
+ GstElement *element;
+ GstBuffer *outbuf;
+ GQuark copy_tag;
+} CopyMetaData;
+
+GQuark rtp_quark_meta_tag_video;
+GQuark rtp_quark_meta_tag_audio;
+
+static gboolean
+foreach_metadata_copy (GstBuffer * inbuf, GstMeta ** meta, gpointer user_data)
+{
+ CopyMetaData *data = user_data;
+ GstElement *element = data->element;
+ GstBuffer *outbuf = data->outbuf;
+ GQuark copy_tag = data->copy_tag;
+ const GstMetaInfo *info = (*meta)->info;
+ const gchar *const *tags = gst_meta_api_type_get_tags (info->api);
+
+ if (info->transform_func && (!tags || !tags[0] || (copy_tag != 0
+ && g_strv_length ((gchar **) tags) == 1
+ && gst_meta_api_type_has_tag (info->api, copy_tag)))) {
+ GstMetaTransformCopy copy_data = { FALSE, 0, -1 };
+ GST_DEBUG_OBJECT (element, "copy metadata %s", g_type_name (info->api));
+ /* simply copy then */
+ info->transform_func (outbuf, *meta, inbuf,
+ _gst_meta_transform_copy, &copy_data);
+ } else {
+ GST_DEBUG_OBJECT (element, "not copying metadata %s",
+ g_type_name (info->api));
+ }
+
+ return TRUE;
+}
+
+/* TODO: Should probably make copy_tag an array at some point */
+void
+gst_rtp_copy_meta (GstElement * element, GstBuffer * outbuf, GstBuffer * inbuf,
+ GQuark copy_tag)
+{
+ CopyMetaData data = { element, outbuf, copy_tag };
+
+ gst_buffer_foreach_meta (inbuf, foreach_metadata_copy, &data);
+}
+
+void
+gst_rtp_copy_video_meta (gpointer element, GstBuffer * outbuf,
+ GstBuffer * inbuf)
+{
+ gst_rtp_copy_meta (element, outbuf, inbuf, rtp_quark_meta_tag_video);
+}
+
+void
+gst_rtp_copy_audio_meta (gpointer element, GstBuffer * outbuf,
+ GstBuffer * inbuf)
+{
+ gst_rtp_copy_meta (element, outbuf, inbuf, rtp_quark_meta_tag_audio);
+}
+
+typedef struct
+{
+ GstElement *element;
+ GQuark keep_tag;
+} DropMetaData;
+
+static gboolean
+foreach_metadata_drop (GstBuffer * inbuf, GstMeta ** meta, gpointer user_data)
+{
+ DropMetaData *data = user_data;
+ GstElement *element = data->element;
+ GQuark keep_tag = data->keep_tag;
+ const GstMetaInfo *info = (*meta)->info;
+ const gchar *const *tags = gst_meta_api_type_get_tags (info->api);
+
+ if (!tags || !tags[0] || (keep_tag != 0
+ && g_strv_length ((gchar **) tags) == 1
+ && gst_meta_api_type_has_tag (info->api, keep_tag))) {
+ GST_DEBUG_OBJECT (element, "keeping metadata %s", g_type_name (info->api));
+ } else {
+ GST_DEBUG_OBJECT (element, "dropping metadata %s", g_type_name (info->api));
+ *meta = NULL;
+ }
+
+ return TRUE;
+}
+
+/* TODO: Should probably make keep_tag an array at some point */
+void
+gst_rtp_drop_meta (GstElement * element, GstBuffer * buf, GQuark keep_tag)
+{
+ DropMetaData data = { element, keep_tag };
+
+ gst_buffer_foreach_meta (buf, foreach_metadata_drop, &data);
+}
+
+void
+gst_rtp_drop_non_audio_meta (gpointer element, GstBuffer * buf)
+{
+ gst_rtp_drop_meta (element, buf, rtp_quark_meta_tag_audio);
+}
+
+void
+gst_rtp_drop_non_video_meta (gpointer element, GstBuffer * buf)
+{
+ gst_rtp_drop_meta (element, buf, rtp_quark_meta_tag_video);
+}
+
+/* Stolen from bad/gst/mpegtsdemux/payloader_parsers.c */
+/* variable length Exp-Golomb parsing according to H.265 spec section 9.2*/
+gboolean
+gst_rtp_read_golomb (GstBitReader * br, guint32 * value)
+{
+ guint8 b, leading_zeros = -1;
+ *value = 1;
+
+ for (b = 0; !b; leading_zeros++) {
+ if (!gst_bit_reader_get_bits_uint8 (br, &b, 1))
+ return FALSE;
+ *value *= 2;
+ }
+
+ *value = (*value >> 1) - 1;
+ if (leading_zeros > 0) {
+ guint32 tmp = 0;
+ if (!gst_bit_reader_get_bits_uint32 (br, &tmp, leading_zeros))
+ return FALSE;
+ *value += tmp;
+ }
+
+ return TRUE;
+}
diff --git a/gst/rtp/gstrtputils.h b/gst/rtp/gstrtputils.h
new file mode 100644
index 0000000000..d5195f2133
--- /dev/null
+++ b/gst/rtp/gstrtputils.h
@@ -0,0 +1,54 @@
+/* GStreamer
+ * Copyright (C) 2015 Sebastian Dröge <sebastian@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_UTILS_H__
+#define __GST_RTP_UTILS_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbitreader.h>
+
+G_BEGIN_DECLS
+
+G_GNUC_INTERNAL
+void gst_rtp_copy_meta (GstElement * element, GstBuffer *outbuf, GstBuffer *inbuf, GQuark copy_tag);
+
+G_GNUC_INTERNAL
+void gst_rtp_copy_audio_meta (gpointer element, GstBuffer *outbuf, GstBuffer *inbuf);
+
+G_GNUC_INTERNAL
+void gst_rtp_copy_video_meta (gpointer element, GstBuffer *outbuf, GstBuffer *inbuf);
+
+G_GNUC_INTERNAL
+void gst_rtp_drop_meta (GstElement * element, GstBuffer *buf, GQuark keep_tag);
+
+G_GNUC_INTERNAL
+void gst_rtp_drop_non_audio_meta (gpointer element, GstBuffer * buf);
+
+G_GNUC_INTERNAL
+void gst_rtp_drop_non_video_meta (gpointer element, GstBuffer * buf);
+
+G_GNUC_INTERNAL
+gboolean gst_rtp_read_golomb (GstBitReader * br, guint32 * value);
+
+G_GNUC_INTERNAL extern GQuark rtp_quark_meta_tag_video;
+G_GNUC_INTERNAL extern GQuark rtp_quark_meta_tag_audio;
+
+G_END_DECLS
+
+#endif /* __GST_RTP_UTILS_H__ */
diff --git a/gst/rtp/gstrtpvorbisdepay.c b/gst/rtp/gstrtpvorbisdepay.c
new file mode 100644
index 0000000000..053e647958
--- /dev/null
+++ b/gst/rtp/gstrtpvorbisdepay.c
@@ -0,0 +1,707 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/tag/tag.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+
+#include <string.h>
+#include "gstrtpelements.h"
+#include "gstrtpvorbisdepay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpvorbisdepay_debug);
+#define GST_CAT_DEFAULT (rtpvorbisdepay_debug)
+
+/* references:
+ * http://www.rfc-editor.org/rfc/rfc5215.txt
+ */
+
+static GstStaticPadTemplate gst_rtp_vorbis_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "clock-rate = (int) [1, MAX ], " "encoding-name = (string) \"VORBIS\""
+ /* All required parameters
+ *
+ * "encoding-params = (string) <num channels>"
+ * "configuration = (string) ANY"
+ */
+ )
+ );
+
+static GstStaticPadTemplate gst_rtp_vorbis_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-vorbis")
+ );
+
+#define gst_rtp_vorbis_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpVorbisDepay, gst_rtp_vorbis_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpvorbisdepay, "rtpvorbisdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_VORBIS_DEPAY, rtp_element_init (plugin));
+
+static gboolean gst_rtp_vorbis_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_vorbis_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+
+static void gst_rtp_vorbis_depay_finalize (GObject * object);
+
+static GstStateChangeReturn gst_rtp_vorbis_depay_change_state (GstElement *
+ element, GstStateChange transition);
+
+static void
+gst_rtp_vorbis_depay_class_init (GstRtpVorbisDepayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_vorbis_depay_finalize;
+
+ gstelement_class->change_state = gst_rtp_vorbis_depay_change_state;
+
+ gstrtpbasedepayload_class->process_rtp_packet = gst_rtp_vorbis_depay_process;
+ gstrtpbasedepayload_class->set_caps = gst_rtp_vorbis_depay_setcaps;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_vorbis_depay_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_vorbis_depay_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP Vorbis depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts Vorbis Audio from RTP packets (RFC 5215)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpvorbisdepay_debug, "rtpvorbisdepay", 0,
+ "Vorbis RTP Depayloader");
+}
+
+static void
+gst_rtp_vorbis_depay_init (GstRtpVorbisDepay * rtpvorbisdepay)
+{
+ rtpvorbisdepay->adapter = gst_adapter_new ();
+}
+
+static void
+free_config (GstRtpVorbisConfig * conf)
+{
+ g_list_free_full (conf->headers, (GDestroyNotify) gst_buffer_unref);
+ g_free (conf);
+}
+
+static void
+free_indents (GstRtpVorbisDepay * rtpvorbisdepay)
+{
+ g_list_free_full (rtpvorbisdepay->configs, (GDestroyNotify) free_config);
+ rtpvorbisdepay->configs = NULL;
+}
+
+static void
+gst_rtp_vorbis_depay_finalize (GObject * object)
+{
+ GstRtpVorbisDepay *rtpvorbisdepay = GST_RTP_VORBIS_DEPAY (object);
+
+ g_object_unref (rtpvorbisdepay->adapter);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_rtp_vorbis_depay_has_ident (GstRtpVorbisDepay * rtpvorbisdepay,
+ guint32 ident)
+{
+ GList *walk;
+
+ for (walk = rtpvorbisdepay->configs; walk; walk = g_list_next (walk)) {
+ GstRtpVorbisConfig *conf = (GstRtpVorbisConfig *) walk->data;
+
+ if (conf->ident == ident)
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+/* takes ownership of confbuf */
+static gboolean
+gst_rtp_vorbis_depay_parse_configuration (GstRtpVorbisDepay * rtpvorbisdepay,
+ GstBuffer * confbuf)
+{
+ GstBuffer *buf;
+ guint32 num_headers;
+ GstMapInfo map;
+ guint8 *data;
+ gsize size;
+ guint offset;
+ gint i, j;
+
+ gst_buffer_map (confbuf, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ GST_DEBUG_OBJECT (rtpvorbisdepay, "config size %" G_GSIZE_FORMAT, size);
+
+ /* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Number of packed headers |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Packed header |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Packed header |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | .... |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ if (size < 4)
+ goto too_small;
+
+ num_headers = GST_READ_UINT32_BE (data);
+ size -= 4;
+ data += 4;
+ offset = 4;
+
+ GST_DEBUG_OBJECT (rtpvorbisdepay, "have %u headers", num_headers);
+
+ /* 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Ident | length ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. | n. of headers | length1 | length2 ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. | Identification Header ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .................................................................
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. | Comment Header ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .................................................................
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. Comment Header |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Setup Header ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .................................................................
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. Setup Header |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ for (i = 0; i < num_headers; i++) {
+ guint32 ident;
+ guint16 length;
+ guint8 n_headers, b;
+ GstRtpVorbisConfig *conf;
+ guint *h_sizes;
+ guint extra = 1;
+
+ if (size < 6)
+ goto too_small;
+
+ ident = (data[0] << 16) | (data[1] << 8) | data[2];
+ length = (data[3] << 8) | data[4];
+ n_headers = data[5];
+ size -= 6;
+ data += 6;
+ offset += 6;
+
+ GST_DEBUG_OBJECT (rtpvorbisdepay,
+ "header %d, ident 0x%08x, length %u, left %" G_GSIZE_FORMAT, i, ident,
+ length, size);
+
+ /* FIXME check if we already got this ident */
+
+ /* length might also include count of following size fields */
+ if (size < length && size + 1 != length)
+ goto too_small;
+
+ if (gst_rtp_vorbis_depay_has_ident (rtpvorbisdepay, ident)) {
+ size -= length;
+ data += length;
+ offset += length;
+ continue;
+ }
+
+ /* read header sizes we read 2 sizes, the third size (for which we allocate
+ * space) must be derived from the total packed header length. */
+ h_sizes = g_newa (guint, n_headers + 1);
+ for (j = 0; j < n_headers; j++) {
+ guint h_size;
+
+ h_size = 0;
+ do {
+ if (size < 1)
+ goto too_small;
+ b = *data++;
+ offset++;
+ extra++;
+ size--;
+ h_size = (h_size << 7) | (b & 0x7f);
+ } while (b & 0x80);
+ GST_DEBUG_OBJECT (rtpvorbisdepay, "headers %d: size: %u", j, h_size);
+
+ if (length < h_size)
+ goto too_small;
+
+ h_sizes[j] = h_size;
+ length -= h_size;
+ }
+ /* last header length is the remaining space */
+ GST_DEBUG_OBJECT (rtpvorbisdepay, "last header size: %u", length);
+ h_sizes[j] = length;
+
+ GST_DEBUG_OBJECT (rtpvorbisdepay, "preparing headers");
+ conf = g_new0 (GstRtpVorbisConfig, 1);
+ conf->ident = ident;
+
+ for (j = 0; j <= n_headers; j++) {
+ guint h_size;
+
+ h_size = h_sizes[j];
+ if (size < h_size) {
+ if (j != n_headers || size + extra != h_size) {
+ free_config (conf);
+ goto too_small;
+ } else {
+ /* otherwise means that overall length field contained total length,
+ * including extra fields */
+ h_size -= extra;
+ }
+ }
+
+ GST_DEBUG_OBJECT (rtpvorbisdepay, "reading header %d, size %u", j,
+ h_size);
+
+ buf = gst_buffer_copy_region (confbuf, GST_BUFFER_COPY_ALL, offset,
+ h_size);
+ conf->headers = g_list_append (conf->headers, buf);
+ offset += h_size;
+ size -= h_size;
+ }
+ rtpvorbisdepay->configs = g_list_append (rtpvorbisdepay->configs, conf);
+ }
+
+ gst_buffer_unmap (confbuf, &map);
+ gst_buffer_unref (confbuf);
+
+ return TRUE;
+
+ /* ERRORS */
+too_small:
+ {
+ GST_DEBUG_OBJECT (rtpvorbisdepay, "configuration too small");
+ gst_buffer_unmap (confbuf, &map);
+ gst_buffer_unref (confbuf);
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_rtp_vorbis_depay_parse_inband_configuration (GstRtpVorbisDepay *
+ rtpvorbisdepay, guint ident, guint8 * configuration, guint size,
+ guint length)
+{
+ GstBuffer *confbuf;
+ GstMapInfo map;
+
+ if (G_UNLIKELY (size < 4))
+ return FALSE;
+
+ /* transform inline to out-of-band and parse that one */
+ confbuf = gst_buffer_new_and_alloc (size + 9);
+ gst_buffer_map (confbuf, &map, GST_MAP_WRITE);
+ /* 1 header */
+ GST_WRITE_UINT32_BE (map.data, 1);
+ /* write Ident */
+ GST_WRITE_UINT24_BE (map.data + 4, ident);
+ /* write sort-of-length */
+ GST_WRITE_UINT16_BE (map.data + 7, length);
+ /* copy remainder */
+ memcpy (map.data + 9, configuration, size);
+ gst_buffer_unmap (confbuf, &map);
+
+ return gst_rtp_vorbis_depay_parse_configuration (rtpvorbisdepay, confbuf);
+}
+
+static gboolean
+gst_rtp_vorbis_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure;
+ GstRtpVorbisDepay *rtpvorbisdepay;
+ GstCaps *srccaps;
+ const gchar *configuration;
+ gint clock_rate;
+ gboolean res;
+
+ rtpvorbisdepay = GST_RTP_VORBIS_DEPAY (depayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ /* get clockrate */
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ goto no_rate;
+
+ /* read and parse configuration string */
+ configuration = gst_structure_get_string (structure, "configuration");
+ if (configuration) {
+ GstBuffer *confbuf;
+ guint8 *data;
+ gsize size;
+
+ /* deserialize base64 to buffer */
+ data = g_base64_decode (configuration, &size);
+
+ confbuf = gst_buffer_new ();
+ gst_buffer_append_memory (confbuf,
+ gst_memory_new_wrapped (0, data, size, 0, size, data, g_free));
+ if (!gst_rtp_vorbis_depay_parse_configuration (rtpvorbisdepay, confbuf))
+ goto invalid_configuration;
+ } else {
+ GST_WARNING_OBJECT (rtpvorbisdepay, "no configuration specified");
+ }
+
+ /* caps seem good, configure element */
+ depayload->clock_rate = clock_rate;
+
+ /* set caps on pad and on header */
+ srccaps = gst_caps_new_empty_simple ("audio/x-vorbis");
+ res = gst_pad_set_caps (depayload->srcpad, srccaps);
+ gst_caps_unref (srccaps);
+
+ return res;
+
+ /* ERRORS */
+invalid_configuration:
+ {
+ GST_ERROR_OBJECT (rtpvorbisdepay, "invalid configuration specified");
+ return FALSE;
+ }
+no_rate:
+ {
+ GST_ERROR_OBJECT (rtpvorbisdepay, "no clock-rate specified");
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_rtp_vorbis_depay_switch_codebook (GstRtpVorbisDepay * rtpvorbisdepay,
+ guint32 ident)
+{
+ GList *walk;
+ gboolean res = FALSE;
+
+ GST_DEBUG_OBJECT (rtpvorbisdepay, "Looking up code book ident 0x%08x", ident);
+ for (walk = rtpvorbisdepay->configs; walk; walk = g_list_next (walk)) {
+ GstRtpVorbisConfig *conf = (GstRtpVorbisConfig *) walk->data;
+
+ if (conf->ident == ident) {
+ GList *headers;
+
+ /* FIXME, remove pads, create new pad.. */
+
+ /* push out all the headers */
+ for (headers = conf->headers; headers; headers = g_list_next (headers)) {
+ GstBuffer *header = GST_BUFFER_CAST (headers->data);
+
+ gst_buffer_ref (header);
+ gst_rtp_base_depayload_push (GST_RTP_BASE_DEPAYLOAD (rtpvorbisdepay),
+ header);
+ }
+ /* remember the current config */
+ rtpvorbisdepay->config = conf;
+ res = TRUE;
+ }
+ }
+ if (!res) {
+ /* we don't know about the headers, figure out an alternative method for
+ * getting the codebooks. FIXME, fail for now. */
+ }
+ return res;
+}
+
+static GstBuffer *
+gst_rtp_vorbis_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp)
+{
+ GstRtpVorbisDepay *rtpvorbisdepay;
+ GstBuffer *outbuf;
+ GstFlowReturn ret;
+ gint payload_len;
+ GstBuffer *payload_buffer = NULL;
+ guint8 *payload;
+ GstMapInfo map;
+ guint32 header, ident;
+ guint8 F, VDT, packets;
+ guint length;
+
+ rtpvorbisdepay = GST_RTP_VORBIS_DEPAY (depayload);
+
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+
+ GST_DEBUG_OBJECT (depayload, "got RTP packet of size %d", payload_len);
+
+ /* we need at least 4 bytes for the packet header */
+ if (G_UNLIKELY (payload_len < 4))
+ goto packet_short;
+
+ payload = gst_rtp_buffer_get_payload (rtp);
+ header = GST_READ_UINT32_BE (payload);
+ /*
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Ident | F |VDT|# pkts.|
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ *
+ * F: Fragment type (0=none, 1=start, 2=cont, 3=end)
+ * VDT: Vorbis data type (0=vorbis, 1=config, 2=comment, 3=reserved)
+ * pkts: number of packets.
+ */
+ VDT = (header & 0x30) >> 4;
+ if (G_UNLIKELY (VDT == 3))
+ goto ignore_reserved;
+
+ GST_DEBUG_OBJECT (depayload, "header: 0x%08x", header);
+ ident = (header >> 8) & 0xffffff;
+ F = (header & 0xc0) >> 6;
+ packets = (header & 0xf);
+
+ if (VDT == 0) {
+ gboolean do_switch = FALSE;
+
+ /* we have a raw payload, find the codebook for the ident */
+ if (!rtpvorbisdepay->config) {
+ /* we don't have an active codebook, find the codebook and
+ * activate it */
+ GST_DEBUG_OBJECT (rtpvorbisdepay, "No active codebook, switching");
+ do_switch = TRUE;
+ } else if (rtpvorbisdepay->config->ident != ident) {
+ /* codebook changed */
+ GST_DEBUG_OBJECT (rtpvorbisdepay, "codebook changed, switching");
+ do_switch = TRUE;
+ }
+ if (do_switch) {
+ if (!gst_rtp_vorbis_depay_switch_codebook (rtpvorbisdepay, ident))
+ goto switch_failed;
+ }
+ }
+
+ GST_DEBUG_OBJECT (depayload, "ident: %u, F: %d, VDT: %d, packets: %d", ident,
+ F, VDT, packets);
+
+ /* fragmented packets, assemble */
+ if (F != 0) {
+ GstBuffer *vdata;
+
+ if (F == 1) {
+ /* if we start a packet, clear adapter and start assembling. */
+ gst_adapter_clear (rtpvorbisdepay->adapter);
+ GST_DEBUG_OBJECT (depayload, "start assemble");
+ rtpvorbisdepay->assembling = TRUE;
+ }
+
+ if (!rtpvorbisdepay->assembling)
+ goto no_output;
+
+ /* skip header and length. */
+ vdata = gst_rtp_buffer_get_payload_subbuffer (rtp, 6, -1);
+
+ GST_DEBUG_OBJECT (depayload, "assemble vorbis packet");
+ gst_adapter_push (rtpvorbisdepay->adapter, vdata);
+
+ /* packet is not complete, we are done */
+ if (F != 3)
+ goto no_output;
+
+ /* construct assembled buffer */
+ length = gst_adapter_available (rtpvorbisdepay->adapter);
+ payload_buffer = gst_adapter_take_buffer (rtpvorbisdepay->adapter, length);
+ } else {
+ payload_buffer = gst_rtp_buffer_get_payload_subbuffer (rtp, 4, -1);
+ length = 0;
+ }
+
+ GST_DEBUG_OBJECT (depayload, "assemble done");
+
+ gst_buffer_map (payload_buffer, &map, GST_MAP_READ);
+ payload = map.data;
+ payload_len = map.size;
+
+ /* we not assembling anymore now */
+ rtpvorbisdepay->assembling = FALSE;
+ gst_adapter_clear (rtpvorbisdepay->adapter);
+
+ /* payload now points to a length with that many vorbis data bytes.
+ * Iterate over the packets and send them out.
+ *
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | length | vorbis data ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. vorbis data |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | length | next vorbis packet data ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. vorbis data |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+*
+ */
+ while (payload_len > 2) {
+ /* If length is not 0, we have a reassembled packet for which we
+ * calculated the length already and don't have to skip over the
+ * length field anymore
+ */
+ if (length == 0) {
+ length = GST_READ_UINT16_BE (payload);
+ payload += 2;
+ payload_len -= 2;
+ }
+
+ GST_DEBUG_OBJECT (depayload, "read length %u, avail: %d", length,
+ payload_len);
+
+ /* skip packet if something odd happens */
+ if (G_UNLIKELY (length > payload_len))
+ goto length_short;
+
+ /* handle in-band configuration */
+ if (G_UNLIKELY (VDT == 1)) {
+ GST_DEBUG_OBJECT (rtpvorbisdepay, "in-band configuration");
+ if (!gst_rtp_vorbis_depay_parse_inband_configuration (rtpvorbisdepay,
+ ident, payload, payload_len, length))
+ goto invalid_configuration;
+ goto no_output;
+ }
+
+ /* create buffer for packet */
+ outbuf =
+ gst_buffer_copy_region (payload_buffer, GST_BUFFER_COPY_ALL,
+ payload - map.data, length);
+
+ payload += length;
+ payload_len -= length;
+ /* make sure to read next length */
+ length = 0;
+
+ ret = gst_rtp_base_depayload_push (depayload, outbuf);
+ if (ret != GST_FLOW_OK)
+ break;
+ }
+
+ gst_buffer_unmap (payload_buffer, &map);
+ gst_buffer_unref (payload_buffer);
+
+ return NULL;
+
+no_output:
+ {
+ if (payload_buffer) {
+ gst_buffer_unmap (payload_buffer, &map);
+ gst_buffer_unref (payload_buffer);
+ }
+ return NULL;
+ }
+ /* ERRORS */
+switch_failed:
+ {
+ GST_ELEMENT_WARNING (rtpvorbisdepay, STREAM, DECODE,
+ (NULL), ("Could not switch codebooks"));
+ return NULL;
+ }
+packet_short:
+ {
+ GST_ELEMENT_WARNING (rtpvorbisdepay, STREAM, DECODE,
+ (NULL), ("Packet was too short (%d < 4)", payload_len));
+ return NULL;
+ }
+ignore_reserved:
+ {
+ GST_WARNING_OBJECT (rtpvorbisdepay, "reserved VDT ignored");
+ return NULL;
+ }
+length_short:
+ {
+ GST_ELEMENT_WARNING (rtpvorbisdepay, STREAM, DECODE,
+ (NULL), ("Packet contains invalid data"));
+ if (payload_buffer) {
+ gst_buffer_unmap (payload_buffer, &map);
+ gst_buffer_unref (payload_buffer);
+ }
+ return NULL;
+ }
+invalid_configuration:
+ {
+ /* fatal, as we otherwise risk carrying on without output */
+ GST_ELEMENT_ERROR (rtpvorbisdepay, STREAM, DECODE,
+ (NULL), ("Packet contains invalid configuration"));
+ if (payload_buffer) {
+ gst_buffer_unmap (payload_buffer, &map);
+ gst_buffer_unref (payload_buffer);
+ }
+ return NULL;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_vorbis_depay_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstRtpVorbisDepay *rtpvorbisdepay;
+ GstStateChangeReturn ret;
+
+ rtpvorbisdepay = GST_RTP_VORBIS_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ free_indents (rtpvorbisdepay);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtpvorbisdepay.h b/gst/rtp/gstrtpvorbisdepay.h
new file mode 100644
index 0000000000..40f8d5f5f1
--- /dev/null
+++ b/gst/rtp/gstrtpvorbisdepay.h
@@ -0,0 +1,68 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_VORBIS_DEPAY_H__
+#define __GST_RTP_VORBIS_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_VORBIS_DEPAY \
+ (gst_rtp_vorbis_depay_get_type())
+#define GST_RTP_VORBIS_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_VORBIS_DEPAY,GstRtpVorbisDepay))
+#define GST_RTP_VORBIS_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_VORBIS_DEPAY,GstRtpVorbisDepayClass))
+#define GST_IS_RTP_VORBIS_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_VORBIS_DEPAY))
+#define GST_IS_RTP_VORBIS_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_VORBIS_DEPAY))
+
+typedef struct _GstRtpVorbisDepay GstRtpVorbisDepay;
+typedef struct _GstRtpVorbisDepayClass GstRtpVorbisDepayClass;
+
+typedef struct _GstRtpVorbisConfig {
+ guint32 ident;
+ GList *headers;
+} GstRtpVorbisConfig;
+
+struct _GstRtpVorbisDepay
+{
+ GstRTPBaseDepayload parent;
+
+ GList *configs;
+ GstRtpVorbisConfig *config;
+
+ GstAdapter *adapter;
+ gboolean assembling;
+};
+
+struct _GstRtpVorbisDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_vorbis_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_VORBIS_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpvorbispay.c b/gst/rtp/gstrtpvorbispay.c
new file mode 100644
index 0000000000..e54e2a7cde
--- /dev/null
+++ b/gst/rtp/gstrtpvorbispay.c
@@ -0,0 +1,1002 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/audio/audio.h>
+
+#include "gstrtpelements.h"
+#include "fnv1hash.h"
+#include "gstrtpvorbispay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpvorbispay_debug);
+#define GST_CAT_DEFAULT (rtpvorbispay_debug)
+
+/* references:
+ * http://www.rfc-editor.org/rfc/rfc5215.txt
+ */
+
+static GstStaticPadTemplate gst_rtp_vorbis_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"audio\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) [1, MAX ], " "encoding-name = (string) \"VORBIS\""
+ /* All required parameters
+ *
+ * "encoding-params = (string) <num channels>"
+ * "configuration = (string) ANY"
+ */
+ )
+ );
+
+static GstStaticPadTemplate gst_rtp_vorbis_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-vorbis")
+ );
+
+#define DEFAULT_CONFIG_INTERVAL 0
+
+enum
+{
+ PROP_0,
+ PROP_CONFIG_INTERVAL
+};
+
+#define gst_rtp_vorbis_pay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpVorbisPay, gst_rtp_vorbis_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpvorbispay, "rtpvorbispay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_VORBIS_PAY, rtp_element_init (plugin));
+
+static gboolean gst_rtp_vorbis_pay_setcaps (GstRTPBasePayload * basepayload,
+ GstCaps * caps);
+static GstStateChangeReturn gst_rtp_vorbis_pay_change_state (GstElement *
+ element, GstStateChange transition);
+static GstFlowReturn gst_rtp_vorbis_pay_handle_buffer (GstRTPBasePayload * pad,
+ GstBuffer * buffer);
+static gboolean gst_rtp_vorbis_pay_sink_event (GstRTPBasePayload * payload,
+ GstEvent * event);
+
+static gboolean gst_rtp_vorbis_pay_parse_id (GstRTPBasePayload * basepayload,
+ guint8 * data, guint size);
+static gboolean gst_rtp_vorbis_pay_finish_headers (GstRTPBasePayload *
+ basepayload);
+
+static void gst_rtp_vorbis_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtp_vorbis_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static void
+gst_rtp_vorbis_pay_class_init (GstRtpVorbisPayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gstelement_class->change_state = gst_rtp_vorbis_pay_change_state;
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_vorbis_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_vorbis_pay_handle_buffer;
+ gstrtpbasepayload_class->sink_event = gst_rtp_vorbis_pay_sink_event;
+
+ gobject_class->set_property = gst_rtp_vorbis_pay_set_property;
+ gobject_class->get_property = gst_rtp_vorbis_pay_get_property;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_vorbis_pay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_vorbis_pay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP Vorbis payloader",
+ "Codec/Payloader/Network/RTP",
+ "Payload-encode Vorbis audio into RTP packets (RFC 5215)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpvorbispay_debug, "rtpvorbispay", 0,
+ "Vorbis RTP Payloader");
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_CONFIG_INTERVAL,
+ g_param_spec_uint ("config-interval", "Config Send Interval",
+ "Send Config Insertion Interval in seconds (configuration headers "
+ "will be multiplexed in the data stream when detected.) (0 = disabled)",
+ 0, 3600, DEFAULT_CONFIG_INTERVAL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+}
+
+static void
+gst_rtp_vorbis_pay_init (GstRtpVorbisPay * rtpvorbispay)
+{
+ rtpvorbispay->last_config = GST_CLOCK_TIME_NONE;
+}
+
+static void
+gst_rtp_vorbis_pay_clear_packet (GstRtpVorbisPay * rtpvorbispay)
+{
+ if (rtpvorbispay->packet)
+ gst_buffer_unref (rtpvorbispay->packet);
+ rtpvorbispay->packet = NULL;
+ g_list_free_full (rtpvorbispay->packet_buffers,
+ (GDestroyNotify) gst_buffer_unref);
+ rtpvorbispay->packet_buffers = NULL;
+}
+
+static void
+gst_rtp_vorbis_pay_cleanup (GstRtpVorbisPay * rtpvorbispay)
+{
+ gst_rtp_vorbis_pay_clear_packet (rtpvorbispay);
+ g_list_free_full (rtpvorbispay->headers, (GDestroyNotify) gst_buffer_unref);
+ rtpvorbispay->headers = NULL;
+ g_free (rtpvorbispay->config_data);
+ rtpvorbispay->config_data = NULL;
+ rtpvorbispay->last_config = GST_CLOCK_TIME_NONE;
+}
+
+static gboolean
+gst_rtp_vorbis_pay_setcaps (GstRTPBasePayload * basepayload, GstCaps * caps)
+{
+ GstRtpVorbisPay *rtpvorbispay;
+ GstStructure *s;
+ const GValue *array;
+ gint asize, i;
+ GstBuffer *buf;
+ GstMapInfo map;
+
+ rtpvorbispay = GST_RTP_VORBIS_PAY (basepayload);
+
+ s = gst_caps_get_structure (caps, 0);
+
+ rtpvorbispay->need_headers = TRUE;
+
+ if ((array = gst_structure_get_value (s, "streamheader")) == NULL)
+ goto done;
+
+ if (G_VALUE_TYPE (array) != GST_TYPE_ARRAY)
+ goto done;
+
+ if ((asize = gst_value_array_get_size (array)) < 3)
+ goto done;
+
+ for (i = 0; i < asize; i++) {
+ const GValue *value;
+
+ value = gst_value_array_get_value (array, i);
+ if ((buf = gst_value_get_buffer (value)) == NULL)
+ goto null_buffer;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (map.size < 1)
+ goto invalid_streamheader;
+
+ /* no data packets allowed */
+ if ((map.data[0] & 1) == 0)
+ goto invalid_streamheader;
+
+ /* we need packets with id 1, 3, 5 */
+ if (map.data[0] != (i * 2) + 1)
+ goto invalid_streamheader;
+
+ if (i == 0) {
+ /* identification, we need to parse this in order to get the clock rate. */
+ if (G_UNLIKELY (!gst_rtp_vorbis_pay_parse_id (basepayload, map.data,
+ map.size)))
+ goto parse_id_failed;
+ }
+ GST_DEBUG_OBJECT (rtpvorbispay, "collecting header %d", i);
+ rtpvorbispay->headers =
+ g_list_append (rtpvorbispay->headers, gst_buffer_ref (buf));
+ gst_buffer_unmap (buf, &map);
+ }
+ if (!gst_rtp_vorbis_pay_finish_headers (basepayload))
+ goto finish_failed;
+
+done:
+ return TRUE;
+
+ /* ERRORS */
+null_buffer:
+ {
+ GST_WARNING_OBJECT (rtpvorbispay, "streamheader with null buffer received");
+ return FALSE;
+ }
+invalid_streamheader:
+ {
+ GST_WARNING_OBJECT (rtpvorbispay, "unable to parse initial header");
+ gst_buffer_unmap (buf, &map);
+ return FALSE;
+ }
+parse_id_failed:
+ {
+ GST_WARNING_OBJECT (rtpvorbispay, "unable to parse initial header");
+ gst_buffer_unmap (buf, &map);
+ return FALSE;
+ }
+finish_failed:
+ {
+ GST_WARNING_OBJECT (rtpvorbispay, "unable to finish headers");
+ return FALSE;
+ }
+}
+
+static void
+gst_rtp_vorbis_pay_reset_packet (GstRtpVorbisPay * rtpvorbispay, guint8 VDT)
+{
+ guint payload_len;
+ GstRTPBuffer rtp = { NULL };
+
+ GST_LOG_OBJECT (rtpvorbispay, "reset packet");
+
+ rtpvorbispay->payload_pos = 4;
+ gst_rtp_buffer_map (rtpvorbispay->packet, GST_MAP_READ, &rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
+ rtpvorbispay->payload_left = payload_len - 4;
+ rtpvorbispay->payload_duration = 0;
+ rtpvorbispay->payload_F = 0;
+ rtpvorbispay->payload_VDT = VDT;
+ rtpvorbispay->payload_pkts = 0;
+}
+
+static void
+gst_rtp_vorbis_pay_init_packet (GstRtpVorbisPay * rtpvorbispay, guint8 VDT,
+ GstClockTime timestamp)
+{
+ guint len;
+
+ GST_LOG_OBJECT (rtpvorbispay, "starting new packet, VDT: %d", VDT);
+
+ gst_rtp_vorbis_pay_clear_packet (rtpvorbispay);
+
+ /* new packet allocate max packet size */
+ len = gst_rtp_buffer_calc_payload_len (GST_RTP_BASE_PAYLOAD_MTU
+ (rtpvorbispay), 0, 0);
+ rtpvorbispay->packet =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD
+ (rtpvorbispay), len, 0, 0);
+ gst_rtp_vorbis_pay_reset_packet (rtpvorbispay, VDT);
+
+ GST_BUFFER_PTS (rtpvorbispay->packet) = timestamp;
+}
+
+static GstFlowReturn
+gst_rtp_vorbis_pay_flush_packet (GstRtpVorbisPay * rtpvorbispay)
+{
+ GstFlowReturn ret;
+ guint8 *payload;
+ guint hlen;
+ GstRTPBuffer rtp = { NULL };
+ GList *l;
+
+ /* check for empty packet */
+ if (!rtpvorbispay->packet || rtpvorbispay->payload_pos <= 4)
+ return GST_FLOW_OK;
+
+ GST_LOG_OBJECT (rtpvorbispay, "flushing packet");
+
+ gst_rtp_buffer_map (rtpvorbispay->packet, GST_MAP_WRITE, &rtp);
+
+ /* fix header */
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ /*
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Ident | F |VDT|# pkts.|
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ *
+ * F: Fragment type (0=none, 1=start, 2=cont, 3=end)
+ * VDT: Vorbis data type (0=vorbis, 1=config, 2=comment, 3=reserved)
+ * pkts: number of packets.
+ */
+ payload[0] = (rtpvorbispay->payload_ident >> 16) & 0xff;
+ payload[1] = (rtpvorbispay->payload_ident >> 8) & 0xff;
+ payload[2] = (rtpvorbispay->payload_ident) & 0xff;
+ payload[3] = (rtpvorbispay->payload_F & 0x3) << 6 |
+ (rtpvorbispay->payload_VDT & 0x3) << 4 |
+ (rtpvorbispay->payload_pkts & 0xf);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ /* shrink the buffer size to the last written byte */
+ hlen = gst_rtp_buffer_calc_header_len (0);
+ gst_buffer_resize (rtpvorbispay->packet, 0, hlen + rtpvorbispay->payload_pos);
+
+ GST_BUFFER_DURATION (rtpvorbispay->packet) = rtpvorbispay->payload_duration;
+
+ for (l = g_list_last (rtpvorbispay->packet_buffers); l; l = l->prev) {
+ GstBuffer *buf = GST_BUFFER_CAST (l->data);
+ gst_rtp_copy_audio_meta (rtpvorbispay, rtpvorbispay->packet, buf);
+ gst_buffer_unref (buf);
+ }
+ g_list_free (rtpvorbispay->packet_buffers);
+ rtpvorbispay->packet_buffers = NULL;
+
+ /* push, this gives away our ref to the packet, so clear it. */
+ ret =
+ gst_rtp_base_payload_push (GST_RTP_BASE_PAYLOAD (rtpvorbispay),
+ rtpvorbispay->packet);
+ rtpvorbispay->packet = NULL;
+
+ return ret;
+}
+
+static gboolean
+gst_rtp_vorbis_pay_finish_headers (GstRTPBasePayload * basepayload)
+{
+ GstRtpVorbisPay *rtpvorbispay = GST_RTP_VORBIS_PAY (basepayload);
+ GList *walk;
+ guint length, size, n_headers, configlen, extralen;
+ gchar *cstr, *configuration;
+ guint8 *data, *config;
+ guint32 ident;
+ gboolean res;
+
+ GST_DEBUG_OBJECT (rtpvorbispay, "finish headers");
+
+ if (!rtpvorbispay->headers)
+ goto no_headers;
+
+ /* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Number of packed headers |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Packed header |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Packed header |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | .... |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ *
+ * We only construct a config containing 1 packed header like this:
+ *
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Ident | length ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. | n. of headers | length1 | length2 ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. | Identification Header ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .................................................................
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. | Comment Header ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .................................................................
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. Comment Header |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Setup Header ..
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .................................................................
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * .. Setup Header |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+
+ /* we need 4 bytes for the number of headers (which is always 1), 3 bytes for
+ * the ident, 2 bytes for length, 1 byte for n. of headers. */
+ size = 4 + 3 + 2 + 1;
+
+ /* count the size of the headers first and update the hash */
+ length = 0;
+ n_headers = 0;
+ ident = fnv1_hash_32_new ();
+ extralen = 1;
+ for (walk = rtpvorbispay->headers; walk; walk = g_list_next (walk)) {
+ GstBuffer *buf = GST_BUFFER_CAST (walk->data);
+ GstMapInfo map;
+ guint bsize;
+
+ bsize = gst_buffer_get_size (buf);
+ length += bsize;
+ n_headers++;
+
+ /* count number of bytes needed for length fields, we don't need this for
+ * the last header. */
+ if (g_list_next (walk)) {
+ do {
+ size++;
+ extralen++;
+ bsize >>= 7;
+ } while (bsize);
+ }
+ /* update hash */
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ ident = fnv1_hash_32_update (ident, map.data, map.size);
+ gst_buffer_unmap (buf, &map);
+ }
+
+ /* packet length is header size + packet length */
+ configlen = size + length;
+ config = data = g_malloc (configlen);
+
+ /* number of packed headers, we only pack 1 header */
+ data[0] = 0;
+ data[1] = 0;
+ data[2] = 0;
+ data[3] = 1;
+
+ ident = fnv1_hash_32_to_24 (ident);
+ rtpvorbispay->payload_ident = ident;
+ GST_DEBUG_OBJECT (rtpvorbispay, "ident 0x%08x", ident);
+
+ /* take lower 3 bytes */
+ data[4] = (ident >> 16) & 0xff;
+ data[5] = (ident >> 8) & 0xff;
+ data[6] = ident & 0xff;
+
+ /* store length of all vorbis headers */
+ data[7] = ((length) >> 8) & 0xff;
+ data[8] = (length) & 0xff;
+
+ /* store number of headers minus one. */
+ data[9] = n_headers - 1;
+ data += 10;
+
+ /* store length for each header */
+ for (walk = rtpvorbispay->headers; walk; walk = g_list_next (walk)) {
+ GstBuffer *buf = GST_BUFFER_CAST (walk->data);
+ guint bsize, size, temp;
+ guint flag;
+
+ /* only need to store the length when it's not the last header */
+ if (!g_list_next (walk))
+ break;
+
+ bsize = gst_buffer_get_size (buf);
+
+ /* calc size */
+ size = 0;
+ do {
+ size++;
+ bsize >>= 7;
+ } while (bsize);
+ temp = size;
+
+ bsize = gst_buffer_get_size (buf);
+ /* write the size backwards */
+ flag = 0;
+ while (size) {
+ size--;
+ data[size] = (bsize & 0x7f) | flag;
+ bsize >>= 7;
+ flag = 0x80; /* Flag bit on all bytes of the length except the last */
+ }
+ data += temp;
+ }
+
+ /* copy header data */
+ for (walk = rtpvorbispay->headers; walk; walk = g_list_next (walk)) {
+ GstBuffer *buf = GST_BUFFER_CAST (walk->data);
+
+ gst_buffer_extract (buf, 0, data, gst_buffer_get_size (buf));
+ data += gst_buffer_get_size (buf);
+ }
+ rtpvorbispay->need_headers = FALSE;
+
+ /* serialize to base64 */
+ configuration = g_base64_encode (config, configlen);
+
+ /* store for later re-sending */
+ g_free (rtpvorbispay->config_data);
+ rtpvorbispay->config_size = configlen - 4 - 3 - 2;
+ rtpvorbispay->config_data = g_malloc (rtpvorbispay->config_size);
+ rtpvorbispay->config_extra_len = extralen;
+ memcpy (rtpvorbispay->config_data, config + 4 + 3 + 2,
+ rtpvorbispay->config_size);
+
+ g_free (config);
+
+ /* configure payloader settings */
+ cstr = g_strdup_printf ("%d", rtpvorbispay->channels);
+ gst_rtp_base_payload_set_options (basepayload, "audio", TRUE, "VORBIS",
+ rtpvorbispay->rate);
+ res =
+ gst_rtp_base_payload_set_outcaps (basepayload, "encoding-params",
+ G_TYPE_STRING, cstr, "configuration", G_TYPE_STRING, configuration, NULL);
+ g_free (cstr);
+ g_free (configuration);
+
+ return res;
+
+ /* ERRORS */
+no_headers:
+ {
+ GST_DEBUG_OBJECT (rtpvorbispay, "finish headers");
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_rtp_vorbis_pay_parse_id (GstRTPBasePayload * basepayload, guint8 * data,
+ guint size)
+{
+ GstRtpVorbisPay *rtpvorbispay = GST_RTP_VORBIS_PAY (basepayload);
+ guint8 channels;
+ gint32 rate, version;
+
+ if (G_UNLIKELY (size < 16))
+ goto too_short;
+
+ if (G_UNLIKELY (memcmp (data, "\001vorbis", 7)))
+ goto invalid_start;
+ data += 7;
+
+ if (G_UNLIKELY ((version = GST_READ_UINT32_LE (data)) != 0))
+ goto invalid_version;
+ data += 4;
+
+ if (G_UNLIKELY ((channels = *data++) < 1))
+ goto invalid_channels;
+
+ if (G_UNLIKELY ((rate = GST_READ_UINT32_LE (data)) < 1))
+ goto invalid_rate;
+
+ /* all fine, store the values */
+ rtpvorbispay->channels = channels;
+ rtpvorbispay->rate = rate;
+
+ return TRUE;
+
+ /* ERRORS */
+too_short:
+ {
+ GST_ELEMENT_ERROR (basepayload, STREAM, DECODE,
+ ("Identification packet is too short, need at least 16, got %d", size),
+ (NULL));
+ return FALSE;
+ }
+invalid_start:
+ {
+ GST_ELEMENT_ERROR (basepayload, STREAM, DECODE,
+ ("Invalid header start in identification packet"), (NULL));
+ return FALSE;
+ }
+invalid_version:
+ {
+ GST_ELEMENT_ERROR (basepayload, STREAM, DECODE,
+ ("Invalid version, expected 0, got %d", version), (NULL));
+ return FALSE;
+ }
+invalid_rate:
+ {
+ GST_ELEMENT_ERROR (basepayload, STREAM, DECODE,
+ ("Invalid rate %d", rate), (NULL));
+ return FALSE;
+ }
+invalid_channels:
+ {
+ GST_ELEMENT_ERROR (basepayload, STREAM, DECODE,
+ ("Invalid channels %d", channels), (NULL));
+ return FALSE;
+ }
+}
+
+static GstFlowReturn
+gst_rtp_vorbis_pay_payload_buffer (GstRtpVorbisPay * rtpvorbispay, guint8 VDT,
+ GstBuffer * buffer, guint8 * data, guint size, GstClockTime timestamp,
+ GstClockTime duration, guint not_in_length)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint newsize;
+ guint packet_len;
+ GstClockTime newduration;
+ gboolean flush;
+ guint plen;
+ guint8 *ppos, *payload;
+ gboolean fragmented;
+ GstRTPBuffer rtp = { NULL };
+
+ /* size increases with packet length and 2 bytes size eader. */
+ newduration = rtpvorbispay->payload_duration;
+ if (duration != GST_CLOCK_TIME_NONE)
+ newduration += duration;
+
+ newsize = rtpvorbispay->payload_pos + 2 + size;
+ packet_len = gst_rtp_buffer_calc_packet_len (newsize, 0, 0);
+
+ /* check buffer filled against length and max latency */
+ flush = gst_rtp_base_payload_is_filled (GST_RTP_BASE_PAYLOAD (rtpvorbispay),
+ packet_len, newduration);
+ /* we can store up to 15 vorbis packets in one RTP packet. */
+ flush |= (rtpvorbispay->payload_pkts == 15);
+ /* flush if we have a new VDT */
+ if (rtpvorbispay->packet)
+ flush |= (rtpvorbispay->payload_VDT != VDT);
+ if (flush)
+ ret = gst_rtp_vorbis_pay_flush_packet (rtpvorbispay);
+
+ if (ret != GST_FLOW_OK)
+ goto done;
+
+ /* create new packet if we must */
+ if (!rtpvorbispay->packet) {
+ gst_rtp_vorbis_pay_init_packet (rtpvorbispay, VDT, timestamp);
+ }
+
+ gst_rtp_buffer_map (rtpvorbispay->packet, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ ppos = payload + rtpvorbispay->payload_pos;
+ fragmented = FALSE;
+
+ /* put buffer in packet, it either fits completely or needs to be fragmented
+ * over multiple RTP packets. */
+ do {
+ plen = MIN (rtpvorbispay->payload_left - 2, size);
+
+ GST_LOG_OBJECT (rtpvorbispay, "append %u bytes", plen);
+
+ /* data is copied in the payload with a 2 byte length header */
+ ppos[0] = ((plen - not_in_length) >> 8) & 0xff;
+ ppos[1] = ((plen - not_in_length) & 0xff);
+ if (plen)
+ memcpy (&ppos[2], data, plen);
+
+ if (buffer) {
+ if (!rtpvorbispay->packet_buffers
+ || rtpvorbispay->packet_buffers->data != (gpointer) buffer)
+ rtpvorbispay->packet_buffers =
+ g_list_prepend (rtpvorbispay->packet_buffers,
+ gst_buffer_ref (buffer));
+ } else {
+ GList *l;
+
+ for (l = rtpvorbispay->headers; l; l = l->next)
+ rtpvorbispay->packet_buffers =
+ g_list_prepend (rtpvorbispay->packet_buffers,
+ gst_buffer_ref (l->data));
+ }
+
+ /* only first (only) configuration cuts length field */
+ /* NOTE: spec (if any) is not clear on this ... */
+ not_in_length = 0;
+
+ size -= plen;
+ data += plen;
+
+ rtpvorbispay->payload_pos += plen + 2;
+ rtpvorbispay->payload_left -= plen + 2;
+
+ if (fragmented) {
+ if (size == 0)
+ /* last fragment, set F to 0x3. */
+ rtpvorbispay->payload_F = 0x3;
+ else
+ /* fragment continues, set F to 0x2. */
+ rtpvorbispay->payload_F = 0x2;
+ } else {
+ if (size > 0) {
+ /* fragmented packet starts, set F to 0x1, mark ourselves as
+ * fragmented. */
+ rtpvorbispay->payload_F = 0x1;
+ fragmented = TRUE;
+ }
+ }
+ if (fragmented) {
+ gst_rtp_buffer_unmap (&rtp);
+ /* fragmented packets are always flushed and have ptks of 0 */
+ rtpvorbispay->payload_pkts = 0;
+ ret = gst_rtp_vorbis_pay_flush_packet (rtpvorbispay);
+
+ if (size > 0) {
+ /* start new packet and get pointers. VDT stays the same. */
+ gst_rtp_vorbis_pay_init_packet (rtpvorbispay,
+ rtpvorbispay->payload_VDT, timestamp);
+ gst_rtp_buffer_map (rtpvorbispay->packet, GST_MAP_WRITE, &rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+ ppos = payload + rtpvorbispay->payload_pos;
+ }
+ } else {
+ /* unfragmented packet, update stats for next packet, size == 0 and we
+ * exit the while loop */
+ rtpvorbispay->payload_pkts++;
+ if (duration != GST_CLOCK_TIME_NONE)
+ rtpvorbispay->payload_duration += duration;
+ }
+ } while (size && ret == GST_FLOW_OK);
+
+ if (rtp.buffer)
+ gst_rtp_buffer_unmap (&rtp);
+
+done:
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_rtp_vorbis_pay_handle_buffer (GstRTPBasePayload * basepayload,
+ GstBuffer * buffer)
+{
+ GstRtpVorbisPay *rtpvorbispay;
+ GstFlowReturn ret;
+ GstMapInfo map;
+ gsize size;
+ guint8 *data;
+ GstClockTime duration, timestamp;
+ guint8 VDT;
+
+ rtpvorbispay = GST_RTP_VORBIS_PAY (basepayload);
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+ duration = GST_BUFFER_DURATION (buffer);
+ timestamp = GST_BUFFER_PTS (buffer);
+
+ GST_LOG_OBJECT (rtpvorbispay, "size %" G_GSIZE_FORMAT
+ ", duration %" GST_TIME_FORMAT, size, GST_TIME_ARGS (duration));
+
+ if (G_UNLIKELY (size < 1))
+ goto wrong_size;
+
+ /* find packet type */
+ if (data[0] & 1) {
+ /* header */
+ if (data[0] == 1) {
+ /* identification, we need to parse this in order to get the clock rate. */
+ if (G_UNLIKELY (!gst_rtp_vorbis_pay_parse_id (basepayload, data, size)))
+ goto parse_id_failed;
+ VDT = 1;
+ } else if (data[0] == 3) {
+ /* comment */
+ VDT = 2;
+ } else if (data[0] == 5) {
+ /* setup */
+ VDT = 1;
+ } else
+ goto unknown_header;
+ } else
+ /* data */
+ VDT = 0;
+
+ /* we need to collect the headers and construct a config string from them */
+ if (VDT != 0) {
+ rtpvorbispay->need_headers = TRUE;
+ if (!rtpvorbispay->need_headers && VDT == 1) {
+ GST_INFO_OBJECT (rtpvorbispay, "getting new headers, replace existing");
+ g_list_free_full (rtpvorbispay->headers,
+ (GDestroyNotify) gst_buffer_unref);
+ rtpvorbispay->headers = NULL;
+ }
+ GST_DEBUG_OBJECT (rtpvorbispay, "collecting header");
+ /* append header to the list of headers, or replace
+ * if the same type of header was already in there.
+ *
+ * This prevents storing an infinite amount of e.g. comment headers, there
+ * must only be one */
+ gst_buffer_unmap (buffer, &map);
+
+ if (rtpvorbispay->headers) {
+ gboolean found = FALSE;
+ GList *l;
+ guint8 new_header_type;
+
+ gst_buffer_extract (buffer, 0, &new_header_type, 1);
+
+ for (l = rtpvorbispay->headers; l; l = l->next) {
+ GstBuffer *header = l->data;
+ guint8 header_type;
+
+ if (gst_buffer_extract (header, 0, &header_type, 1)
+ && header_type == new_header_type) {
+ found = TRUE;
+ gst_buffer_unref (header);
+ l->data = buffer;
+ break;
+ }
+ }
+ if (!found)
+ rtpvorbispay->headers = g_list_append (rtpvorbispay->headers, buffer);
+ } else {
+ rtpvorbispay->headers = g_list_append (rtpvorbispay->headers, buffer);
+ }
+
+ ret = GST_FLOW_OK;
+ goto done;
+ } else if (rtpvorbispay->headers && rtpvorbispay->need_headers) {
+ if (!gst_rtp_vorbis_pay_finish_headers (basepayload))
+ goto header_error;
+ }
+
+ /* there is a config request, see if we need to insert it */
+ if (rtpvorbispay->config_interval > 0 && rtpvorbispay->config_data) {
+ gboolean send_config = FALSE;
+ GstClockTime running_time =
+ gst_segment_to_running_time (&basepayload->segment, GST_FORMAT_TIME,
+ timestamp);
+
+ if (rtpvorbispay->last_config != -1) {
+ guint64 diff;
+
+ GST_LOG_OBJECT (rtpvorbispay,
+ "now %" GST_TIME_FORMAT ", last config %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (running_time),
+ GST_TIME_ARGS (rtpvorbispay->last_config));
+
+ /* calculate diff between last config in milliseconds */
+ if (running_time > rtpvorbispay->last_config) {
+ diff = running_time - rtpvorbispay->last_config;
+ } else {
+ diff = 0;
+ }
+
+ GST_DEBUG_OBJECT (rtpvorbispay,
+ "interval since last config %" GST_TIME_FORMAT, GST_TIME_ARGS (diff));
+
+ /* bigger than interval, queue config */
+ if (GST_TIME_AS_SECONDS (diff) >= rtpvorbispay->config_interval) {
+ GST_DEBUG_OBJECT (rtpvorbispay, "time to send config");
+ send_config = TRUE;
+ }
+ } else {
+ /* no known previous config time, send now */
+ GST_DEBUG_OBJECT (rtpvorbispay, "no previous config time, send now");
+ send_config = TRUE;
+ }
+
+ if (send_config) {
+ /* we need to send config now first */
+ /* different TDT type forces flush */
+ gst_rtp_vorbis_pay_payload_buffer (rtpvorbispay, 1,
+ NULL, rtpvorbispay->config_data, rtpvorbispay->config_size,
+ timestamp, GST_CLOCK_TIME_NONE, rtpvorbispay->config_extra_len);
+
+ if (running_time != -1) {
+ rtpvorbispay->last_config = running_time;
+ }
+ }
+ }
+
+ ret =
+ gst_rtp_vorbis_pay_payload_buffer (rtpvorbispay, VDT, buffer, data, size,
+ timestamp, duration, 0);
+
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+
+done:
+ return ret;
+
+ /* ERRORS */
+wrong_size:
+ {
+ GST_ELEMENT_WARNING (rtpvorbispay, STREAM, DECODE,
+ ("Invalid packet size (1 < %" G_GSIZE_FORMAT ")", size), (NULL));
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ }
+parse_id_failed:
+ {
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ return GST_FLOW_ERROR;
+ }
+unknown_header:
+ {
+ GST_ELEMENT_WARNING (rtpvorbispay, STREAM, DECODE,
+ (NULL), ("Ignoring unknown header received"));
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ }
+header_error:
+ {
+ GST_ELEMENT_WARNING (rtpvorbispay, STREAM, DECODE,
+ (NULL), ("Error initializing header config"));
+ gst_buffer_unmap (buffer, &map);
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ }
+}
+
+static gboolean
+gst_rtp_vorbis_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
+{
+ GstRtpVorbisPay *rtpvorbispay = GST_RTP_VORBIS_PAY (payload);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ gst_rtp_vorbis_pay_clear_packet (rtpvorbispay);
+ break;
+ default:
+ break;
+ }
+ /* false to let parent handle event as well */
+ return GST_RTP_BASE_PAYLOAD_CLASS (parent_class)->sink_event (payload, event);
+}
+
+static GstStateChangeReturn
+gst_rtp_vorbis_pay_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstRtpVorbisPay *rtpvorbispay;
+ GstStateChangeReturn ret;
+
+ rtpvorbispay = GST_RTP_VORBIS_PAY (element);
+
+ switch (transition) {
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_vorbis_pay_cleanup (rtpvorbispay);
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
+
+static void
+gst_rtp_vorbis_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpVorbisPay *rtpvorbispay;
+
+ rtpvorbispay = GST_RTP_VORBIS_PAY (object);
+
+ switch (prop_id) {
+ case PROP_CONFIG_INTERVAL:
+ rtpvorbispay->config_interval = g_value_get_uint (value);
+ break;
+ default:
+ break;
+ }
+}
+
+static void
+gst_rtp_vorbis_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpVorbisPay *rtpvorbispay;
+
+ rtpvorbispay = GST_RTP_VORBIS_PAY (object);
+
+ switch (prop_id) {
+ case PROP_CONFIG_INTERVAL:
+ g_value_set_uint (value, rtpvorbispay->config_interval);
+ break;
+ default:
+ break;
+ }
+}
diff --git a/gst/rtp/gstrtpvorbispay.h b/gst/rtp/gstrtpvorbispay.h
new file mode 100644
index 0000000000..e1625efb0b
--- /dev/null
+++ b/gst/rtp/gstrtpvorbispay.h
@@ -0,0 +1,83 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_VORBIS_PAY_H__
+#define __GST_RTP_VORBIS_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+#include <gst/base/gstadapter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_VORBIS_PAY \
+ (gst_rtp_vorbis_pay_get_type())
+#define GST_RTP_VORBIS_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_VORBIS_PAY,GstRtpVorbisPay))
+#define GST_RTP_VORBIS_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_VORBIS_PAY,GstRtpVorbisPayClass))
+#define GST_IS_RTP_VORBIS_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_VORBIS_PAY))
+#define GST_IS_RTP_VORBIS_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_VORBIS_PAY))
+
+typedef struct _GstRtpVorbisPay GstRtpVorbisPay;
+typedef struct _GstRtpVorbisPayClass GstRtpVorbisPayClass;
+
+struct _GstRtpVorbisPay
+{
+ GstRTPBasePayload payload;
+
+ /* the headers */
+ gboolean need_headers;
+ GList *headers;
+
+ /* queues of buffers along with some stats. */
+ GstBuffer *packet;
+ GList *packet_buffers;
+ guint payload_pos;
+ guint payload_left;
+ guint32 payload_ident;
+ guint8 payload_F;
+ guint8 payload_VDT;
+ guint payload_pkts;
+ GstClockTime payload_timestamp;
+ GstClockTime payload_duration;
+
+ /* config (re-sending) */
+ guint8 *config_data;
+ guint config_size;
+ guint config_extra_len;
+ guint config_interval;
+ GstClockTime last_config;
+
+ gint rate;
+ gint channels;
+};
+
+struct _GstRtpVorbisPayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_vorbis_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_VORBIS_PAY_H__ */
diff --git a/gst/rtp/gstrtpvp8depay.c b/gst/rtp/gstrtpvp8depay.c
new file mode 100644
index 0000000000..bf97d0a7cc
--- /dev/null
+++ b/gst/rtp/gstrtpvp8depay.c
@@ -0,0 +1,563 @@
+/* gstrtpvp8depay.c - Source for GstRtpVP8Depay
+ * Copyright (C) 2011 Sjoerd Simons <sjoerd@luon.net>
+ * Copyright (C) 2011 Collabora Ltd.
+ * Contact: Youness Alaoui <youness.alaoui@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include "gstrtpelements.h"
+#include "gstrtpvp8depay.h"
+#include "gstrtputils.h"
+
+#include <gst/video/video.h>
+
+#include <stdio.h>
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_vp8_depay_debug);
+#define GST_CAT_DEFAULT gst_rtp_vp8_depay_debug
+
+static void gst_rtp_vp8_depay_dispose (GObject * object);
+static void gst_rtp_vp8_depay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_rtp_vp8_depay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static GstBuffer *gst_rtp_vp8_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+static GstStateChangeReturn gst_rtp_vp8_depay_change_state (GstElement *
+ element, GstStateChange transition);
+static gboolean gst_rtp_vp8_depay_handle_event (GstRTPBaseDepayload * depay,
+ GstEvent * event);
+static gboolean gst_rtp_vp8_depay_packet_lost (GstRTPBaseDepayload * depay,
+ GstEvent * event);
+
+G_DEFINE_TYPE (GstRtpVP8Depay, gst_rtp_vp8_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpvp8depay, "rtpvp8depay",
+ GST_RANK_MARGINAL, GST_TYPE_RTP_VP8_DEPAY, rtp_element_init (plugin));
+
+static GstStaticPadTemplate gst_rtp_vp8_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-vp8"));
+
+static GstStaticPadTemplate gst_rtp_vp8_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "clock-rate = (int) 90000,"
+ "media = (string) \"video\","
+ "encoding-name = (string) { \"VP8\", \"VP8-DRAFT-IETF-01\" }"));
+
+#define DEFAULT_WAIT_FOR_KEYFRAME FALSE
+#define DEFAULT_REQUEST_KEYFRAME FALSE
+
+enum
+{
+ PROP_0,
+ PROP_WAIT_FOR_KEYFRAME,
+ PROP_REQUEST_KEYFRAME,
+};
+
+#define PICTURE_ID_NONE (UINT_MAX)
+#define IS_PICTURE_ID_15BITS(pid) (((guint)(pid) & 0x8000) != 0)
+
+static void
+gst_rtp_vp8_depay_init (GstRtpVP8Depay * self)
+{
+ self->adapter = gst_adapter_new ();
+ self->started = FALSE;
+ self->wait_for_keyframe = DEFAULT_WAIT_FOR_KEYFRAME;
+ self->request_keyframe = DEFAULT_REQUEST_KEYFRAME;
+ self->last_pushed_was_lost_event = FALSE;
+}
+
+static void
+gst_rtp_vp8_depay_class_init (GstRtpVP8DepayClass * gst_rtp_vp8_depay_class)
+{
+ GObjectClass *object_class = G_OBJECT_CLASS (gst_rtp_vp8_depay_class);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (gst_rtp_vp8_depay_class);
+ GstRTPBaseDepayloadClass *depay_class =
+ (GstRTPBaseDepayloadClass *) (gst_rtp_vp8_depay_class);
+
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_rtp_vp8_depay_sink_template);
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_rtp_vp8_depay_src_template);
+
+ gst_element_class_set_static_metadata (element_class, "RTP VP8 depayloader",
+ "Codec/Depayloader/Network/RTP",
+ "Extracts VP8 video from RTP packets)",
+ "Sjoerd Simons <sjoerd@luon.net>");
+
+ object_class->dispose = gst_rtp_vp8_depay_dispose;
+ object_class->set_property = gst_rtp_vp8_depay_set_property;
+ object_class->get_property = gst_rtp_vp8_depay_get_property;
+
+ g_object_class_install_property (object_class, PROP_WAIT_FOR_KEYFRAME,
+ g_param_spec_boolean ("wait-for-keyframe", "Wait for Keyframe",
+ "Wait for the next keyframe after packet loss",
+ DEFAULT_WAIT_FOR_KEYFRAME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpVP8Depay:request-keyframe:
+ *
+ * Request new keyframe when packet loss is detected
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (object_class, PROP_REQUEST_KEYFRAME,
+ g_param_spec_boolean ("request-keyframe", "Request Keyframe",
+ "Request new keyframe when packet loss is detected",
+ DEFAULT_REQUEST_KEYFRAME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ element_class->change_state = gst_rtp_vp8_depay_change_state;
+
+ depay_class->process_rtp_packet = gst_rtp_vp8_depay_process;
+ depay_class->handle_event = gst_rtp_vp8_depay_handle_event;
+ depay_class->packet_lost = gst_rtp_vp8_depay_packet_lost;
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_vp8_depay_debug, "rtpvp8depay", 0,
+ "VP8 Video RTP Depayloader");
+}
+
+static void
+gst_rtp_vp8_depay_dispose (GObject * object)
+{
+ GstRtpVP8Depay *self = GST_RTP_VP8_DEPAY (object);
+
+ if (self->adapter != NULL)
+ g_object_unref (self->adapter);
+ self->adapter = NULL;
+
+ /* release any references held by the object here */
+
+ if (G_OBJECT_CLASS (gst_rtp_vp8_depay_parent_class)->dispose)
+ G_OBJECT_CLASS (gst_rtp_vp8_depay_parent_class)->dispose (object);
+}
+
+static void
+gst_rtp_vp8_depay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpVP8Depay *self = GST_RTP_VP8_DEPAY (object);
+
+ switch (prop_id) {
+ case PROP_WAIT_FOR_KEYFRAME:
+ self->wait_for_keyframe = g_value_get_boolean (value);
+ break;
+ case PROP_REQUEST_KEYFRAME:
+ self->request_keyframe = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_vp8_depay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpVP8Depay *self = GST_RTP_VP8_DEPAY (object);
+
+ switch (prop_id) {
+ case PROP_WAIT_FOR_KEYFRAME:
+ g_value_set_boolean (value, self->wait_for_keyframe);
+ break;
+ case PROP_REQUEST_KEYFRAME:
+ g_value_set_boolean (value, self->request_keyframe);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gint
+picture_id_compare (guint16 id0, guint16 id1)
+{
+ guint shift = 16 - (IS_PICTURE_ID_15BITS (id1) ? 15 : 7);
+ id0 = id0 << shift;
+ id1 = id1 << shift;
+ return ((gint16) (id1 - id0)) >> shift;
+}
+
+static void
+send_last_lost_event (GstRtpVP8Depay * self)
+{
+ if (self->last_lost_event) {
+ GST_ERROR_OBJECT (self,
+ "Sending the last stopped lost event: %" GST_PTR_FORMAT,
+ self->last_lost_event);
+ GST_RTP_BASE_DEPAYLOAD_CLASS (gst_rtp_vp8_depay_parent_class)
+ ->packet_lost (GST_RTP_BASE_DEPAYLOAD_CAST (self),
+ self->last_lost_event);
+ gst_event_replace (&self->last_lost_event, NULL);
+ self->last_pushed_was_lost_event = TRUE;
+ }
+}
+
+static void
+send_new_lost_event (GstRtpVP8Depay * self, GstClockTime timestamp,
+ guint new_picture_id, const gchar * reason)
+{
+ GstEvent *event;
+
+ if (!GST_CLOCK_TIME_IS_VALID (timestamp)) {
+ GST_WARNING_OBJECT (self, "Can't create lost event with invalid timestmap");
+ return;
+ }
+
+ event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
+ gst_structure_new ("GstRTPPacketLost",
+ "timestamp", G_TYPE_UINT64, timestamp,
+ "duration", G_TYPE_UINT64, 0, NULL));
+
+ GST_DEBUG_OBJECT (self, "Pushing lost event "
+ "(picids 0x%x 0x%x, reason \"%s\"): %" GST_PTR_FORMAT,
+ self->last_picture_id, new_picture_id, reason, event);
+
+ GST_RTP_BASE_DEPAYLOAD_CLASS (gst_rtp_vp8_depay_parent_class)
+ ->packet_lost (GST_RTP_BASE_DEPAYLOAD_CAST (self), event);
+
+ gst_event_unref (event);
+}
+
+static void
+send_last_lost_event_if_needed (GstRtpVP8Depay * self, guint new_picture_id)
+{
+ if (self->last_picture_id == PICTURE_ID_NONE)
+ return;
+
+ if (self->last_lost_event) {
+ gboolean send_lost_event = FALSE;
+ if (new_picture_id == PICTURE_ID_NONE) {
+ GST_DEBUG_OBJECT (self, "Dropping the last stopped lost event "
+ "(picture id does not exist): %" GST_PTR_FORMAT,
+ self->last_lost_event);
+ } else if (IS_PICTURE_ID_15BITS (self->last_picture_id) &&
+ !IS_PICTURE_ID_15BITS (new_picture_id)) {
+ GST_DEBUG_OBJECT (self, "Dropping the last stopped lost event "
+ "(picture id has less bits than before): %" GST_PTR_FORMAT,
+ self->last_lost_event);
+ } else if (picture_id_compare (self->last_picture_id, new_picture_id) != 1) {
+ GstStructure *s = gst_event_writable_structure (self->last_lost_event);
+
+ GST_DEBUG_OBJECT (self, "Sending the last stopped lost event "
+ "(gap in picture id %u %u): %" GST_PTR_FORMAT,
+ self->last_picture_id, new_picture_id, self->last_lost_event);
+ send_lost_event = TRUE;
+ /* Prevent rtpbasedepayload from dropping the event now
+ * that we have made sure the lost packet was not FEC */
+ gst_structure_remove_field (s, "might-have-been-fec");
+ }
+ if (send_lost_event)
+ GST_RTP_BASE_DEPAYLOAD_CLASS (gst_rtp_vp8_depay_parent_class)
+ ->packet_lost (GST_RTP_BASE_DEPAYLOAD_CAST (self),
+ self->last_lost_event);
+
+ gst_event_replace (&self->last_lost_event, NULL);
+ }
+}
+
+static GstBuffer *
+gst_rtp_vp8_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp)
+{
+ GstRtpVP8Depay *self = GST_RTP_VP8_DEPAY (depay);
+ GstBuffer *payload;
+ guint8 *data;
+ guint hdrsize = 1;
+ guint picture_id = PICTURE_ID_NONE;
+ guint size = gst_rtp_buffer_get_payload_len (rtp);
+ guint s_bit;
+ guint part_id;
+ gboolean frame_start;
+ gboolean sent_lost_event = FALSE;
+
+ if (G_UNLIKELY (GST_BUFFER_IS_DISCONT (rtp->buffer))) {
+ GST_DEBUG_OBJECT (self, "Discontinuity, flushing adapter");
+ gst_adapter_clear (self->adapter);
+ self->started = FALSE;
+
+ if (self->wait_for_keyframe)
+ self->waiting_for_keyframe = TRUE;
+
+ if (self->request_keyframe)
+ gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depay),
+ gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE,
+ TRUE, 0));
+ }
+
+ /* At least one header and one vp8 byte */
+ if (G_UNLIKELY (size < 2))
+ goto too_small;
+
+ data = gst_rtp_buffer_get_payload (rtp);
+
+ s_bit = (data[0] >> 4) & 0x1;
+ part_id = (data[0] >> 0) & 0x7;
+
+ /* Check X optional header */
+ if ((data[0] & 0x80) != 0) {
+ hdrsize++;
+ /* Check I optional header */
+ if ((data[1] & 0x80) != 0) {
+ if (G_UNLIKELY (size < 3))
+ goto too_small;
+ hdrsize++;
+ /* Check for 16 bits PictureID */
+ picture_id = data[2];
+ if ((data[2] & 0x80) != 0) {
+ if (G_UNLIKELY (size < 4))
+ goto too_small;
+ hdrsize++;
+ picture_id = (picture_id << 8) | data[3];
+ }
+ }
+ /* Check L optional header */
+ if ((data[1] & 0x40) != 0)
+ hdrsize++;
+ /* Check T or K optional headers */
+ if ((data[1] & 0x20) != 0 || (data[1] & 0x10) != 0)
+ hdrsize++;
+ }
+ GST_LOG_OBJECT (depay,
+ "hdrsize %u, size %u, picture id 0x%x, s %u, part_id %u", hdrsize, size,
+ picture_id, s_bit, part_id);
+ if (G_UNLIKELY (hdrsize >= size))
+ goto too_small;
+
+ frame_start = (s_bit == 1) && (part_id == 0);
+ if (frame_start) {
+ if (G_UNLIKELY (self->started)) {
+ GST_DEBUG_OBJECT (depay, "Incomplete frame, flushing adapter");
+ gst_adapter_clear (self->adapter);
+ self->started = FALSE;
+
+ send_new_lost_event (self, GST_BUFFER_PTS (rtp->buffer), picture_id,
+ "Incomplete frame detected");
+ sent_lost_event = TRUE;
+ }
+ }
+
+ if (!self->started) {
+ if (G_UNLIKELY (!frame_start)) {
+ GST_DEBUG_OBJECT (depay,
+ "The frame is missing the first packet, ignoring the packet");
+ if (self->stop_lost_events && !sent_lost_event) {
+ send_last_lost_event (self);
+ self->stop_lost_events = FALSE;
+ }
+ goto done;
+ }
+
+ GST_LOG_OBJECT (depay, "Found the start of the frame");
+
+ if (self->stop_lost_events && !sent_lost_event) {
+ send_last_lost_event_if_needed (self, picture_id);
+ self->stop_lost_events = FALSE;
+ }
+
+ self->started = TRUE;
+ }
+
+ payload = gst_rtp_buffer_get_payload_subbuffer (rtp, hdrsize, -1);
+ gst_adapter_push (self->adapter, payload);
+ self->last_picture_id = picture_id;
+
+ /* Marker indicates that it was the last rtp packet for this frame */
+ if (gst_rtp_buffer_get_marker (rtp)) {
+ GstBuffer *out;
+ guint8 header[10];
+
+ GST_LOG_OBJECT (depay,
+ "Found the end of the frame (%" G_GSIZE_FORMAT " bytes)",
+ gst_adapter_available (self->adapter));
+ if (gst_adapter_available (self->adapter) < 10)
+ goto too_small;
+ gst_adapter_copy (self->adapter, &header, 0, 10);
+
+ out = gst_adapter_take_buffer (self->adapter,
+ gst_adapter_available (self->adapter));
+
+ self->started = FALSE;
+
+ /* mark keyframes */
+ out = gst_buffer_make_writable (out);
+ /* Filter away all metas that are not sensible to copy */
+ gst_rtp_drop_non_video_meta (self, out);
+ if ((header[0] & 0x01)) {
+ GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ if (self->waiting_for_keyframe) {
+ gst_buffer_unref (out);
+ out = NULL;
+ GST_INFO_OBJECT (self, "Dropping inter-frame before intra-frame");
+ gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depay),
+ gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE,
+ TRUE, 0));
+ }
+ } else {
+ guint profile, width, height;
+
+ GST_BUFFER_FLAG_UNSET (out, GST_BUFFER_FLAG_DELTA_UNIT);
+ GST_DEBUG_OBJECT (self, "Processed keyframe");
+
+ profile = (header[0] & 0x0e) >> 1;
+ width = GST_READ_UINT16_LE (header + 6) & 0x3fff;
+ height = GST_READ_UINT16_LE (header + 8) & 0x3fff;
+
+ if (G_UNLIKELY (self->last_width != width ||
+ self->last_height != height || self->last_profile != profile)) {
+ gchar profile_str[3];
+ GstCaps *srccaps;
+
+ snprintf (profile_str, 3, "%u", profile);
+ srccaps = gst_caps_new_simple ("video/x-vp8",
+ "framerate", GST_TYPE_FRACTION, 0, 1,
+ "height", G_TYPE_INT, height,
+ "width", G_TYPE_INT, width,
+ "profile", G_TYPE_STRING, profile_str, NULL);
+
+ gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depay), srccaps);
+ gst_caps_unref (srccaps);
+
+ self->last_width = width;
+ self->last_height = height;
+ self->last_profile = profile;
+ }
+ self->waiting_for_keyframe = FALSE;
+ }
+
+ if (picture_id != PICTURE_ID_NONE)
+ self->stop_lost_events = TRUE;
+
+ self->last_pushed_was_lost_event = FALSE;
+
+ return out;
+ }
+
+done:
+ return NULL;
+
+too_small:
+ GST_DEBUG_OBJECT (self, "Invalid rtp packet (too small), ignoring");
+ gst_adapter_clear (self->adapter);
+ self->started = FALSE;
+
+ goto done;
+}
+
+static GstStateChangeReturn
+gst_rtp_vp8_depay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstRtpVP8Depay *self = GST_RTP_VP8_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ self->last_profile = -1;
+ self->last_height = -1;
+ self->last_width = -1;
+ self->waiting_for_keyframe = TRUE;
+ self->caps_sent = FALSE;
+ self->last_picture_id = PICTURE_ID_NONE;
+ gst_event_replace (&self->last_lost_event, NULL);
+ self->stop_lost_events = FALSE;
+ break;
+ default:
+ break;
+ }
+
+ return
+ GST_ELEMENT_CLASS (gst_rtp_vp8_depay_parent_class)->change_state (element,
+ transition);
+}
+
+static gboolean
+gst_rtp_vp8_depay_handle_event (GstRTPBaseDepayload * depay, GstEvent * event)
+{
+ GstRtpVP8Depay *self = GST_RTP_VP8_DEPAY (depay);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ self->last_profile = -1;
+ self->last_height = -1;
+ self->last_width = -1;
+ self->last_picture_id = PICTURE_ID_NONE;
+ gst_event_replace (&self->last_lost_event, NULL);
+ self->stop_lost_events = FALSE;
+ break;
+ default:
+ break;
+ }
+
+ return
+ GST_RTP_BASE_DEPAYLOAD_CLASS
+ (gst_rtp_vp8_depay_parent_class)->handle_event (depay, event);
+}
+
+static gboolean
+gst_rtp_vp8_depay_packet_lost (GstRTPBaseDepayload * depay, GstEvent * event)
+{
+ GstRtpVP8Depay *self = GST_RTP_VP8_DEPAY (depay);
+ const GstStructure *s;
+ gboolean might_have_been_fec;
+ gboolean unref_event = FALSE;
+ gboolean ret;
+
+ s = gst_event_get_structure (event);
+
+ if (self->stop_lost_events) {
+ if (gst_structure_get_boolean (s, "might-have-been-fec",
+ &might_have_been_fec)
+ && might_have_been_fec) {
+ GST_DEBUG_OBJECT (depay, "Stopping lost event %" GST_PTR_FORMAT, event);
+ gst_event_replace (&self->last_lost_event, event);
+ return TRUE;
+ }
+ } else if (self->last_picture_id != PICTURE_ID_NONE) {
+ GstStructure *s;
+
+ if (!gst_event_is_writable (event)) {
+ event = gst_event_copy (event);
+ unref_event = TRUE;
+ }
+
+ s = gst_event_writable_structure (event);
+
+ /* We are currently processing a picture, let's make sure the
+ * base depayloader doesn't drop this lost event */
+ gst_structure_remove_field (s, "might-have-been-fec");
+ }
+
+ self->last_pushed_was_lost_event = TRUE;
+
+ ret =
+ GST_RTP_BASE_DEPAYLOAD_CLASS
+ (gst_rtp_vp8_depay_parent_class)->packet_lost (depay, event);
+
+ if (unref_event)
+ gst_event_unref (event);
+
+ return ret;
+}
diff --git a/gst/rtp/gstrtpvp8depay.h b/gst/rtp/gstrtpvp8depay.h
new file mode 100644
index 0000000000..76be52adc7
--- /dev/null
+++ b/gst/rtp/gstrtpvp8depay.h
@@ -0,0 +1,81 @@
+/*
+ * gstrtpvp8depay.h - Header for GstRtpVP8Depay
+ * Copyright (C) 2011 Sjoerd Simons <sjoerd@luon.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#ifndef __GST_RTP_VP8_DEPAY_H__
+#define __GST_RTP_VP8_DEPAY_H__
+
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_VP8_DEPAY \
+ (gst_rtp_vp8_depay_get_type())
+#define GST_RTP_VP8_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_RTP_VP8_DEPAY, GstRtpVP8Depay))
+#define GST_RTP_VP8_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_RTP_VP8_DEPAY, \
+ GstRtpVP8DepayClass))
+#define GST_IS_RTP_VP8_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_RTP_VP8_DEPAY))
+#define GST_IS_RTP_VP8_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_RTP_VP8_DEPAY))
+#define GST_RTP_VP8_DEPAY_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_RTP_VP8_DEPAY, \
+ GstRtpVP8DepayClass))
+
+typedef struct _GstRtpVP8Depay GstRtpVP8Depay;
+typedef struct _GstRtpVP8DepayClass GstRtpVP8DepayClass;
+
+struct _GstRtpVP8DepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+struct _GstRtpVP8Depay
+{
+ GstRTPBaseDepayload parent;
+ GstAdapter *adapter;
+ gboolean started;
+
+ gboolean caps_sent;
+ /* In between pictures, we might store GstRTPPacketLost events instead
+ * of forwarding them immediately, we check upon reception of a new
+ * picture id whether a gap was introduced, in which case we do forward
+ * the event. This is to avoid forwarding spurious lost events for FEC
+ * packets.
+ */
+ gboolean stop_lost_events;
+ GstEvent *last_lost_event;
+ gboolean waiting_for_keyframe;
+ gint last_profile;
+ gint last_width;
+ gint last_height;
+ guint last_picture_id;
+
+ gboolean wait_for_keyframe;
+ gboolean request_keyframe;
+ gboolean last_pushed_was_lost_event;
+};
+
+GType gst_rtp_vp8_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* #ifndef __GST_RTP_VP8_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpvp8pay.c b/gst/rtp/gstrtpvp8pay.c
new file mode 100644
index 0000000000..61bfcd6b2b
--- /dev/null
+++ b/gst/rtp/gstrtpvp8pay.c
@@ -0,0 +1,727 @@
+/*
+ * gstrtpvp8pay.c - Source for GstRtpVP8Pay
+ * Copyright (C) 2011 Sjoerd Simons <sjoerd@luon.net>
+ * Copyright (C) 2011 Collabora Ltd.
+ * Contact: Youness Alaoui <youness.alaoui@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include <gst/base/gstbitreader.h>
+#include <gst/rtp/gstrtppayloads.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+#include "gstrtpelements.h"
+#include "dboolhuff.h"
+#include "gstrtpvp8pay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_vp8_pay_debug);
+#define GST_CAT_DEFAULT gst_rtp_vp8_pay_debug
+
+#define DEFAULT_PICTURE_ID_MODE VP8_PAY_NO_PICTURE_ID
+#define DEFAULT_PICTURE_ID_OFFSET (-1)
+
+enum
+{
+ PROP_0,
+ PROP_PICTURE_ID_MODE,
+ PROP_PICTURE_ID_OFFSET
+};
+
+#define GST_TYPE_RTP_VP8_PAY_PICTURE_ID_MODE (gst_rtp_vp8_pay_picture_id_mode_get_type())
+static GType
+gst_rtp_vp8_pay_picture_id_mode_get_type (void)
+{
+ static GType mode_type = 0;
+ static const GEnumValue modes[] = {
+ {VP8_PAY_NO_PICTURE_ID, "No Picture ID", "none"},
+ {VP8_PAY_PICTURE_ID_7BITS, "7-bit Picture ID", "7-bit"},
+ {VP8_PAY_PICTURE_ID_15BITS, "15-bit Picture ID", "15-bit"},
+ {0, NULL, NULL},
+ };
+
+ if (!mode_type) {
+ mode_type = g_enum_register_static ("GstVP8RTPPayMode", modes);
+ }
+ return mode_type;
+}
+
+static void gst_rtp_vp8_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_rtp_vp8_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+
+static GstFlowReturn gst_rtp_vp8_pay_handle_buffer (GstRTPBasePayload * payload,
+ GstBuffer * buffer);
+static gboolean gst_rtp_vp8_pay_sink_event (GstRTPBasePayload * payload,
+ GstEvent * event);
+static gboolean gst_rtp_vp8_pay_set_caps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+
+G_DEFINE_TYPE (GstRtpVP8Pay, gst_rtp_vp8_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpvp8pay, "rtpvp8pay",
+ GST_RANK_MARGINAL, GST_TYPE_RTP_VP8_PAY, rtp_element_init (plugin));
+
+static GstStaticPadTemplate gst_rtp_vp8_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ","
+ "clock-rate = (int) 90000, encoding-name = (string) { \"VP8\", \"VP8-DRAFT-IETF-01\" }"));
+
+static GstStaticPadTemplate gst_rtp_vp8_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-vp8"));
+
+static gint
+picture_id_field_len (PictureIDMode mode)
+{
+ if (VP8_PAY_NO_PICTURE_ID == mode)
+ return 0;
+ if (VP8_PAY_PICTURE_ID_7BITS == mode)
+ return 7;
+ return 15;
+}
+
+static void
+gst_rtp_vp8_pay_picture_id_reset (GstRtpVP8Pay * obj)
+{
+ gint nbits;
+
+ if (obj->picture_id_offset == -1)
+ obj->picture_id = g_random_int ();
+ else
+ obj->picture_id = obj->picture_id_offset;
+
+ nbits = picture_id_field_len (obj->picture_id_mode);
+ obj->picture_id &= (1 << nbits) - 1;
+}
+
+static void
+gst_rtp_vp8_pay_picture_id_increment (GstRtpVP8Pay * obj)
+{
+ gint nbits;
+
+ if (obj->picture_id_mode == VP8_PAY_NO_PICTURE_ID)
+ return;
+
+ nbits = picture_id_field_len (obj->picture_id_mode);
+ obj->picture_id++;
+ obj->picture_id &= (1 << nbits) - 1;
+}
+
+static void
+gst_rtp_vp8_pay_reset (GstRtpVP8Pay * obj)
+{
+ gst_rtp_vp8_pay_picture_id_reset (obj);
+ /* tl0picidx MAY start at a random value, but there's no point. Initialize
+ * so that first packet will use 0 for convenience */
+ obj->tl0picidx = -1;
+ obj->temporal_scalability_fields_present = FALSE;
+}
+
+static void
+gst_rtp_vp8_pay_init (GstRtpVP8Pay * obj)
+{
+ obj->picture_id_mode = DEFAULT_PICTURE_ID_MODE;
+ obj->picture_id_offset = DEFAULT_PICTURE_ID_OFFSET;
+ gst_rtp_vp8_pay_reset (obj);
+}
+
+static void
+gst_rtp_vp8_pay_class_init (GstRtpVP8PayClass * gst_rtp_vp8_pay_class)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (gst_rtp_vp8_pay_class);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (gst_rtp_vp8_pay_class);
+ GstRTPBasePayloadClass *pay_class =
+ GST_RTP_BASE_PAYLOAD_CLASS (gst_rtp_vp8_pay_class);
+
+ gobject_class->set_property = gst_rtp_vp8_pay_set_property;
+ gobject_class->get_property = gst_rtp_vp8_pay_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_PICTURE_ID_MODE,
+ g_param_spec_enum ("picture-id-mode", "Picture ID Mode",
+ "The picture ID mode for payloading",
+ GST_TYPE_RTP_VP8_PAY_PICTURE_ID_MODE, DEFAULT_PICTURE_ID_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * rtpvp8pay:picture-id-offset:
+ *
+ * Offset to add to the initial picture-id (-1 = random)
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class, PROP_PICTURE_ID_OFFSET,
+ g_param_spec_int ("picture-id-offset", "Picture ID offset",
+ "Offset to add to the initial picture-id (-1 = random)",
+ -1, 0x7FFF, DEFAULT_PICTURE_ID_OFFSET,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_rtp_vp8_pay_sink_template);
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_rtp_vp8_pay_src_template);
+
+ gst_element_class_set_static_metadata (element_class, "RTP VP8 payloader",
+ "Codec/Payloader/Network/RTP",
+ "Puts VP8 video in RTP packets", "Sjoerd Simons <sjoerd@luon.net>");
+
+ pay_class->handle_buffer = gst_rtp_vp8_pay_handle_buffer;
+ pay_class->sink_event = gst_rtp_vp8_pay_sink_event;
+ pay_class->set_caps = gst_rtp_vp8_pay_set_caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_vp8_pay_debug, "rtpvp8pay", 0,
+ "VP8 Video RTP Payloader");
+
+ gst_type_mark_as_plugin_api (GST_TYPE_RTP_VP8_PAY_PICTURE_ID_MODE, 0);
+}
+
+static void
+gst_rtp_vp8_pay_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstRtpVP8Pay *rtpvp8pay = GST_RTP_VP8_PAY (object);
+
+ switch (prop_id) {
+ case PROP_PICTURE_ID_MODE:
+ rtpvp8pay->picture_id_mode = g_value_get_enum (value);
+ gst_rtp_vp8_pay_picture_id_reset (rtpvp8pay);
+ break;
+ case PROP_PICTURE_ID_OFFSET:
+ rtpvp8pay->picture_id_offset = g_value_get_int (value);
+ gst_rtp_vp8_pay_picture_id_reset (rtpvp8pay);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_vp8_pay_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstRtpVP8Pay *rtpvp8pay = GST_RTP_VP8_PAY (object);
+
+ switch (prop_id) {
+ case PROP_PICTURE_ID_MODE:
+ g_value_set_enum (value, rtpvp8pay->picture_id_mode);
+ break;
+ case PROP_PICTURE_ID_OFFSET:
+ g_value_set_int (value, rtpvp8pay->picture_id_offset);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+gst_rtp_vp8_pay_parse_frame (GstRtpVP8Pay * self, GstBuffer * buffer,
+ gsize buffer_size)
+{
+ GstMapInfo map = GST_MAP_INFO_INIT;
+ GstBitReader reader;
+ guint8 *data;
+ gsize size;
+ int i;
+ gboolean keyframe;
+ guint32 partition0_size;
+ guint8 version;
+ guint8 tmp8 = 0;
+ guint8 partitions;
+ guint offset;
+ BOOL_DECODER bc;
+ guint8 *pdata;
+
+ if (G_UNLIKELY (buffer_size < 3))
+ goto error;
+
+ if (!gst_buffer_map (buffer, &map, GST_MAP_READ) || !map.data)
+ goto error;
+
+ data = map.data;
+ size = map.size;
+
+ gst_bit_reader_init (&reader, data, size);
+
+ self->is_keyframe = keyframe = ((data[0] & 0x1) == 0);
+ version = (data[0] >> 1) & 0x7;
+
+ if (G_UNLIKELY (version > 3)) {
+ GST_ERROR_OBJECT (self, "Unknown VP8 version %u", version);
+ goto error;
+ }
+
+ /* keyframe, version and show_frame use 5 bits */
+ partition0_size = data[2] << 11 | data[1] << 3 | (data[0] >> 5);
+
+ /* Include the uncompressed data blob in the first partition */
+ offset = keyframe ? 10 : 3;
+ partition0_size += offset;
+
+ if (!gst_bit_reader_skip (&reader, 24))
+ goto error;
+
+ if (keyframe) {
+ /* check start tag: 0x9d 0x01 0x2a */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &tmp8, 8) || tmp8 != 0x9d)
+ goto error;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &tmp8, 8) || tmp8 != 0x01)
+ goto error;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &tmp8, 8) || tmp8 != 0x2a)
+ goto error;
+
+ /* Skip horizontal size code (16 bits) vertical size code (16 bits) */
+ if (!gst_bit_reader_skip (&reader, 32))
+ goto error;
+ }
+
+ offset = keyframe ? 10 : 3;
+ vp8dx_start_decode (&bc, data + offset, size - offset);
+
+ if (keyframe) {
+ /* color space (1 bit) and clamping type (1 bit) */
+ vp8dx_decode_bool (&bc, 0x80);
+ vp8dx_decode_bool (&bc, 0x80);
+ }
+
+ /* segmentation_enabled */
+ if (vp8dx_decode_bool (&bc, 0x80)) {
+ guint8 update_mb_segmentation_map = vp8dx_decode_bool (&bc, 0x80);
+ guint8 update_segment_feature_data = vp8dx_decode_bool (&bc, 0x80);
+
+ if (update_segment_feature_data) {
+ /* skip segment feature mode */
+ vp8dx_decode_bool (&bc, 0x80);
+
+ /* quantizer update */
+ for (i = 0; i < 4; i++) {
+ /* skip flagged quantizer value (7 bits) and sign (1 bit) */
+ if (vp8dx_decode_bool (&bc, 0x80))
+ vp8_decode_value (&bc, 8);
+ }
+
+ /* loop filter update */
+ for (i = 0; i < 4; i++) {
+ /* skip flagged lf update value (6 bits) and sign (1 bit) */
+ if (vp8dx_decode_bool (&bc, 0x80))
+ vp8_decode_value (&bc, 7);
+ }
+ }
+
+ if (update_mb_segmentation_map) {
+ /* segment prob update */
+ for (i = 0; i < 3; i++) {
+ /* skip flagged segment prob */
+ if (vp8dx_decode_bool (&bc, 0x80))
+ vp8_decode_value (&bc, 8);
+ }
+ }
+ }
+
+ /* skip filter type (1 bit), loop filter level (6 bits) and
+ * sharpness level (3 bits) */
+ vp8_decode_value (&bc, 1);
+ vp8_decode_value (&bc, 6);
+ vp8_decode_value (&bc, 3);
+
+ /* loop_filter_adj_enabled */
+ if (vp8dx_decode_bool (&bc, 0x80)) {
+
+ /* delta update */
+ if (vp8dx_decode_bool (&bc, 0x80)) {
+
+ for (i = 0; i < 8; i++) {
+ /* 8 updates, 1 bit indicate whether there is one and if follow by a
+ * 7 bit update */
+ if (vp8dx_decode_bool (&bc, 0x80))
+ vp8_decode_value (&bc, 7);
+ }
+ }
+ }
+
+ if (vp8dx_bool_error (&bc))
+ goto error;
+
+ tmp8 = vp8_decode_value (&bc, 2);
+
+ partitions = 1 << tmp8;
+
+ /* Check if things are still sensible */
+ if (partition0_size + (partitions - 1) * 3 >= size)
+ goto error;
+
+ /* partition data is right after the mode partition */
+ pdata = data + partition0_size;
+
+ /* Set up mapping */
+ self->n_partitions = partitions + 1;
+ self->partition_offset[0] = 0;
+ self->partition_size[0] = partition0_size + (partitions - 1) * 3;
+
+ self->partition_offset[1] = self->partition_size[0];
+ for (i = 1; i < partitions; i++) {
+ guint psize = (pdata[2] << 16 | pdata[1] << 8 | pdata[0]);
+
+ pdata += 3;
+ self->partition_size[i] = psize;
+ self->partition_offset[i + 1] = self->partition_offset[i] + psize;
+ }
+
+ /* Check that our partition offsets and sizes don't go outsize the buffer
+ * size. */
+ if (self->partition_offset[i] >= size)
+ goto error;
+
+ self->partition_size[i] = size - self->partition_offset[i];
+
+ self->partition_offset[i + 1] = size;
+
+ gst_buffer_unmap (buffer, &map);
+
+ if (keyframe)
+ GST_DEBUG_OBJECT (self, "Parsed keyframe");
+
+ return TRUE;
+
+error:
+ GST_DEBUG ("Failed to parse frame");
+ if (map.memory != NULL) {
+ gst_buffer_unmap (buffer, &map);
+ }
+ return FALSE;
+}
+
+static guint
+gst_rtp_vp8_offset_to_partition (GstRtpVP8Pay * self, guint offset)
+{
+ int i;
+
+ for (i = 1; i < self->n_partitions; i++) {
+ if (offset < self->partition_offset[i])
+ return i - 1;
+ }
+
+ return i - 1;
+}
+
+static gsize
+gst_rtp_vp8_calc_header_len (GstRtpVP8Pay * self)
+{
+ gsize len;
+
+ switch (self->picture_id_mode) {
+ case VP8_PAY_PICTURE_ID_7BITS:
+ len = 1;
+ break;
+ case VP8_PAY_PICTURE_ID_15BITS:
+ len = 2;
+ break;
+ case VP8_PAY_NO_PICTURE_ID:
+ default:
+ len = 0;
+ break;
+ }
+
+ if (self->temporal_scalability_fields_present) {
+ /* Add on space for TL0PICIDX and TID/Y/KEYIDX */
+ len += 2;
+ }
+
+ if (len > 0) {
+ /* All fields above are extension, so allocate space for the ECB field */
+ len++;
+ }
+
+ return len + 1; /* computed + fixed size header */
+}
+
+/* When growing the vp8 header keep max payload len calculation in sync */
+static GstBuffer *
+gst_rtp_vp8_create_header_buffer (GstRtpVP8Pay * self, guint8 partid,
+ gboolean start, gboolean mark, GstBuffer * in, GstCustomMeta * meta)
+{
+ GstBuffer *out;
+ guint8 *p;
+ GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT;
+
+ out =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD_CAST
+ (self), gst_rtp_vp8_calc_header_len (self), 0, 0);
+ gst_rtp_buffer_map (out, GST_MAP_READWRITE, &rtpbuffer);
+ p = gst_rtp_buffer_get_payload (&rtpbuffer);
+
+ /* X=0,R=0,N=0,S=start,PartID=partid */
+ p[0] = (start << 4) | partid;
+ if (GST_BUFFER_FLAG_IS_SET (in, GST_BUFFER_FLAG_DROPPABLE)) {
+ /* Enable N=1 */
+ p[0] |= 0x20;
+ }
+
+ if (self->picture_id_mode != VP8_PAY_NO_PICTURE_ID ||
+ self->temporal_scalability_fields_present) {
+ gint index;
+
+ /* Enable X=1 */
+ p[0] |= 0x80;
+
+ /* X: I=0,L=0,T=0,K=0,RSV=0 */
+ p[1] = 0x00;
+ if (self->picture_id_mode != VP8_PAY_NO_PICTURE_ID) {
+ /* Set I bit */
+ p[1] |= 0x80;
+ }
+ if (self->temporal_scalability_fields_present) {
+ /* Set L and T bits */
+ p[1] |= 0x60;
+ }
+
+ /* Insert picture ID */
+ if (self->picture_id_mode == VP8_PAY_PICTURE_ID_7BITS) {
+ /* I: 7 bit picture_id */
+ p[2] = self->picture_id & 0x7F;
+ index = 3;
+ } else if (self->picture_id_mode == VP8_PAY_PICTURE_ID_15BITS) {
+ /* I: 15 bit picture_id */
+ p[2] = 0x80 | ((self->picture_id & 0x7FFF) >> 8);
+ p[3] = self->picture_id & 0xFF;
+ index = 4;
+ } else {
+ index = 2;
+ }
+
+ /* Insert TL0PICIDX and TID/Y/KEYIDX */
+ if (self->temporal_scalability_fields_present) {
+ /* The meta contains tl0picidx from the encoder, but we need to ensure
+ * that tl0picidx is increasing correctly. The encoder may reset it's
+ * state and counter, but we cannot. Therefore, we cannot simply copy
+ * the value into the header.*/
+ guint temporal_layer = 0;
+ gboolean layer_sync = FALSE;
+ gboolean use_temporal_scaling = FALSE;
+
+ if (meta) {
+ GstStructure *s = gst_custom_meta_get_structure (meta);
+ gst_structure_get_boolean (s, "use-temporal-scaling",
+ &use_temporal_scaling);
+
+ if (use_temporal_scaling)
+ gst_structure_get (s, "layer-id", G_TYPE_UINT, &temporal_layer,
+ "layer-sync", G_TYPE_BOOLEAN, &layer_sync, NULL);
+ }
+
+ /* FIXME: Support a prediction structure where higher layers don't
+ * necessarily refer to the last base layer frame, ie they use an older
+ * tl0picidx as signalled in the meta */
+ if (temporal_layer == 0 && start)
+ self->tl0picidx++;
+ p[index] = self->tl0picidx & 0xFF;
+ p[index + 1] = ((temporal_layer << 6) | (layer_sync << 5)) & 0xFF;
+ }
+ }
+
+ gst_rtp_buffer_set_marker (&rtpbuffer, mark);
+
+ gst_rtp_buffer_unmap (&rtpbuffer);
+
+ GST_BUFFER_DURATION (out) = GST_BUFFER_DURATION (in);
+ GST_BUFFER_PTS (out) = GST_BUFFER_PTS (in);
+
+ return out;
+}
+
+static gboolean
+foreach_metadata_drop (GstBuffer * buf, GstMeta ** meta, gpointer user_data)
+{
+ GstElement *element = user_data;
+ const GstMetaInfo *info = (*meta)->info;
+
+ if (gst_meta_info_is_custom (info) &&
+ gst_custom_meta_has_name ((GstCustomMeta *) * meta, "GstVP8Meta")) {
+ GST_DEBUG_OBJECT (element, "dropping GstVP8Meta");
+ *meta = NULL;
+ }
+
+ return TRUE;
+}
+
+static void
+gst_rtp_vp8_drop_vp8_meta (gpointer element, GstBuffer * buf)
+{
+ gst_buffer_foreach_meta (buf, foreach_metadata_drop, element);
+}
+
+static guint
+gst_rtp_vp8_payload_next (GstRtpVP8Pay * self, GstBufferList * list,
+ guint offset, GstBuffer * buffer, gsize buffer_size, gsize max_payload_len,
+ GstCustomMeta * meta)
+{
+ guint partition;
+ GstBuffer *header;
+ GstBuffer *sub;
+ GstBuffer *out;
+ gboolean mark;
+ gboolean start;
+ gsize remaining;
+ gsize available;
+
+ remaining = buffer_size - offset;
+ available = max_payload_len;
+ if (available > remaining)
+ available = remaining;
+
+ if (meta) {
+ /* If meta is present, then we have no partition offset information,
+ * so always emit PID 0 and set the start bit for the first packet
+ * of a frame only (c.f. RFC7741 $4.4)
+ */
+ partition = 0;
+ start = (offset == 0);
+ } else {
+ partition = gst_rtp_vp8_offset_to_partition (self, offset);
+ g_assert (partition < self->n_partitions);
+ start = (offset == self->partition_offset[partition]);
+ }
+
+ mark = (remaining == available);
+ /* whole set of partitions, payload them and done */
+ header = gst_rtp_vp8_create_header_buffer (self, partition,
+ start, mark, buffer, meta);
+ sub = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, offset, available);
+
+ gst_rtp_copy_video_meta (self, header, buffer);
+ gst_rtp_vp8_drop_vp8_meta (self, header);
+
+ out = gst_buffer_append (header, sub);
+
+ gst_buffer_list_insert (list, -1, out);
+
+ return available;
+}
+
+
+static GstFlowReturn
+gst_rtp_vp8_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer)
+{
+ GstRtpVP8Pay *self = GST_RTP_VP8_PAY (payload);
+ GstFlowReturn ret;
+ GstBufferList *list;
+ GstCustomMeta *meta;
+ gsize size, max_paylen;
+ guint offset, mtu, vp8_hdr_len;
+
+ size = gst_buffer_get_size (buffer);
+ meta = gst_buffer_get_custom_meta (buffer, "GstVP8Meta");
+ if (G_UNLIKELY (!gst_rtp_vp8_pay_parse_frame (self, buffer, size))) {
+ GST_ELEMENT_ERROR (self, STREAM, ENCODE, (NULL),
+ ("Failed to parse VP8 frame"));
+ return GST_FLOW_ERROR;
+ }
+
+ if (meta) {
+ GstStructure *s = gst_custom_meta_get_structure (meta);
+ gboolean use_temporal_scaling;
+ /* For interop it's most likely better to keep the temporal scalability
+ * fields present if the stream previously had them present. Alternating
+ * whether these fields are present or not may confuse the receiver. */
+
+ gst_structure_get_boolean (s, "use-temporal-scaling",
+ &use_temporal_scaling);
+ if (use_temporal_scaling)
+ self->temporal_scalability_fields_present = TRUE;
+ }
+
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (payload);
+ vp8_hdr_len = gst_rtp_vp8_calc_header_len (self);
+ max_paylen = gst_rtp_buffer_calc_payload_len (mtu - vp8_hdr_len, 0,
+ gst_rtp_base_payload_get_source_count (payload, buffer));
+
+ list = gst_buffer_list_new_sized ((size / max_paylen) + 1);
+
+ offset = 0;
+ while (offset < size) {
+ offset +=
+ gst_rtp_vp8_payload_next (self, list, offset, buffer, size,
+ max_paylen, meta);
+ }
+
+ ret = gst_rtp_base_payload_push_list (payload, list);
+
+ gst_rtp_vp8_pay_picture_id_increment (self);
+
+ gst_buffer_unref (buffer);
+
+ return ret;
+}
+
+static gboolean
+gst_rtp_vp8_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
+{
+ GstRtpVP8Pay *self = GST_RTP_VP8_PAY (payload);
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_START) {
+ gst_rtp_vp8_pay_reset (self);
+ }
+
+ return GST_RTP_BASE_PAYLOAD_CLASS (gst_rtp_vp8_pay_parent_class)->sink_event
+ (payload, event);
+}
+
+static gboolean
+gst_rtp_vp8_pay_set_caps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ GstCaps *src_caps;
+ const char *encoding_name = "VP8";
+
+ src_caps = gst_pad_get_allowed_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload));
+ if (src_caps) {
+ GstStructure *s;
+ const GValue *value;
+
+ s = gst_caps_get_structure (src_caps, 0);
+
+ if (gst_structure_has_field (s, "encoding-name")) {
+ GValue default_value = G_VALUE_INIT;
+
+ g_value_init (&default_value, G_TYPE_STRING);
+ g_value_set_static_string (&default_value, encoding_name);
+
+ value = gst_structure_get_value (s, "encoding-name");
+ if (!gst_value_can_intersect (&default_value, value))
+ encoding_name = "VP8-DRAFT-IETF-01";
+ }
+ gst_caps_unref (src_caps);
+ }
+
+ gst_rtp_base_payload_set_options (payload, "video", TRUE,
+ encoding_name, 90000);
+
+ return gst_rtp_base_payload_set_outcaps (payload, NULL);
+}
diff --git a/gst/rtp/gstrtpvp8pay.h b/gst/rtp/gstrtpvp8pay.h
new file mode 100644
index 0000000000..30ad99a67c
--- /dev/null
+++ b/gst/rtp/gstrtpvp8pay.h
@@ -0,0 +1,75 @@
+/*
+ * gstrtpvp8pay.h - Header for GstRtpVP8Pay
+ * Copyright (C) 2011 Sjoerd Simons <sjoerd@luon.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#ifndef __GST_RTP_VP8_PAY_H__
+#define __GST_RTP_VP8_PAY_H__
+
+#include <gst/rtp/gstrtpbasepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_VP8_PAY \
+ (gst_rtp_vp8_pay_get_type())
+#define GST_RTP_VP8_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_RTP_VP8_PAY, GstRtpVP8Pay))
+#define GST_RTP_VP8_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_RTP_VP8_PAY, GstRtpVP8PayClass))
+#define GST_IS_RTP_VP8_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_RTP_VP8_PAY))
+#define GST_IS_RTP_VP8_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_RTP_VP8_PAY))
+#define GST_RTP_VP8_PAY_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_RTP_VP8_PAY, GstRtpVP8PayClass))
+
+typedef struct _GstRtpVP8Pay GstRtpVP8Pay;
+typedef struct _GstRtpVP8PayClass GstRtpVP8PayClass;
+typedef enum _PictureIDMode PictureIDMode;
+
+enum _PictureIDMode {
+ VP8_PAY_NO_PICTURE_ID,
+ VP8_PAY_PICTURE_ID_7BITS,
+ VP8_PAY_PICTURE_ID_15BITS,
+};
+
+struct _GstRtpVP8PayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+struct _GstRtpVP8Pay
+{
+ GstRTPBasePayload parent;
+ gboolean is_keyframe;
+ gint n_partitions;
+ /* Treat frame header & tag & partition size block as the first partition,
+ * folowed by max. 8 data partitions. last offset is the end of the buffer */
+ guint partition_offset[10];
+ guint partition_size[9];
+ PictureIDMode picture_id_mode;
+ gint picture_id_offset;
+ guint16 picture_id;
+ gboolean temporal_scalability_fields_present;
+ guint8 tl0picidx;
+};
+
+GType gst_rtp_vp8_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* #ifndef __GST_RTP_VP8_PAY_H__ */
diff --git a/gst/rtp/gstrtpvp9depay.c b/gst/rtp/gstrtpvp9depay.c
new file mode 100644
index 0000000000..ec271cf9ff
--- /dev/null
+++ b/gst/rtp/gstrtpvp9depay.c
@@ -0,0 +1,528 @@
+/* gstrtpvp9depay.c - Source for GstRtpVP9Depay
+ * Copyright (C) 2011 Sjoerd Simons <sjoerd@luon.net>
+ * Copyright (C) 2011 Collabora Ltd.
+ * Contact: Youness Alaoui <youness.alaoui@collabora.co.uk>
+ * Copyright (C) 2015 Stian Selnes <stian@pexip.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include "gstrtpelements.h"
+#include "gstrtpvp9depay.h"
+#include "gstrtputils.h"
+
+#include <gst/video/video.h>
+
+#include <stdio.h>
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_vp9_depay_debug);
+#define GST_CAT_DEFAULT gst_rtp_vp9_depay_debug
+
+static void gst_rtp_vp9_depay_dispose (GObject * object);
+static GstBuffer *gst_rtp_vp9_depay_process (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp);
+static GstStateChangeReturn gst_rtp_vp9_depay_change_state (GstElement *
+ element, GstStateChange transition);
+static gboolean gst_rtp_vp9_depay_handle_event (GstRTPBaseDepayload * depay,
+ GstEvent * event);
+static gboolean gst_rtp_vp9_depay_packet_lost (GstRTPBaseDepayload * depay,
+ GstEvent * event);
+
+G_DEFINE_TYPE (GstRtpVP9Depay, gst_rtp_vp9_depay, GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpvp9depay, "rtpvp9depay",
+ GST_RANK_MARGINAL, GST_TYPE_RTP_VP9_DEPAY, rtp_element_init (plugin));
+
+static GstStaticPadTemplate gst_rtp_vp9_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-vp9"));
+
+static GstStaticPadTemplate gst_rtp_vp9_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "clock-rate = (int) 90000,"
+ "media = (string) \"video\","
+ "encoding-name = (string) { \"VP9\", \"VP9-DRAFT-IETF-01\" }"));
+
+#define PICTURE_ID_NONE (UINT_MAX)
+#define IS_PICTURE_ID_15BITS(pid) (((guint)(pid) & 0x8000) != 0)
+
+static void
+gst_rtp_vp9_depay_init (GstRtpVP9Depay * self)
+{
+ self->adapter = gst_adapter_new ();
+ self->started = FALSE;
+ self->inter_picture = FALSE;
+}
+
+static void
+gst_rtp_vp9_depay_class_init (GstRtpVP9DepayClass * gst_rtp_vp9_depay_class)
+{
+ GObjectClass *object_class = G_OBJECT_CLASS (gst_rtp_vp9_depay_class);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (gst_rtp_vp9_depay_class);
+ GstRTPBaseDepayloadClass *depay_class =
+ (GstRTPBaseDepayloadClass *) (gst_rtp_vp9_depay_class);
+
+
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_rtp_vp9_depay_sink_template);
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_rtp_vp9_depay_src_template);
+
+ gst_element_class_set_static_metadata (element_class, "RTP VP9 depayloader",
+ "Codec/Depayloader/Network/RTP",
+ "Extracts VP9 video from RTP packets)", "Stian Selnes <stian@pexip.com>");
+
+ object_class->dispose = gst_rtp_vp9_depay_dispose;
+
+ element_class->change_state = gst_rtp_vp9_depay_change_state;
+
+ depay_class->process_rtp_packet = gst_rtp_vp9_depay_process;
+ depay_class->handle_event = gst_rtp_vp9_depay_handle_event;
+ depay_class->packet_lost = gst_rtp_vp9_depay_packet_lost;
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_vp9_depay_debug, "rtpvp9depay", 0,
+ "VP9 Video RTP Depayloader");
+}
+
+static void
+gst_rtp_vp9_depay_dispose (GObject * object)
+{
+ GstRtpVP9Depay *self = GST_RTP_VP9_DEPAY (object);
+
+ if (self->adapter != NULL)
+ g_object_unref (self->adapter);
+ self->adapter = NULL;
+
+ /* release any references held by the object here */
+
+ if (G_OBJECT_CLASS (gst_rtp_vp9_depay_parent_class)->dispose)
+ G_OBJECT_CLASS (gst_rtp_vp9_depay_parent_class)->dispose (object);
+}
+
+static gint
+picture_id_compare (guint16 id0, guint16 id1)
+{
+ guint shift = 16 - (IS_PICTURE_ID_15BITS (id1) ? 15 : 7);
+ id0 = id0 << shift;
+ id1 = id1 << shift;
+ return ((gint16) (id1 - id0)) >> shift;
+}
+
+static void
+send_last_lost_event (GstRtpVP9Depay * self)
+{
+ if (self->last_lost_event) {
+ GST_DEBUG_OBJECT (self,
+ "Sending the last stopped lost event: %" GST_PTR_FORMAT,
+ self->last_lost_event);
+ GST_RTP_BASE_DEPAYLOAD_CLASS (gst_rtp_vp9_depay_parent_class)
+ ->packet_lost (GST_RTP_BASE_DEPAYLOAD_CAST (self),
+ self->last_lost_event);
+ gst_event_replace (&self->last_lost_event, NULL);
+ }
+}
+
+static void
+send_last_lost_event_if_needed (GstRtpVP9Depay * self, guint new_picture_id)
+{
+ if (self->last_picture_id == PICTURE_ID_NONE ||
+ self->last_picture_id == new_picture_id)
+ return;
+
+ if (self->last_lost_event) {
+ gboolean send_lost_event = FALSE;
+ if (new_picture_id == PICTURE_ID_NONE) {
+ GST_DEBUG_OBJECT (self, "Dropping the last stopped lost event "
+ "(picture id does not exist): %" GST_PTR_FORMAT,
+ self->last_lost_event);
+ } else if (IS_PICTURE_ID_15BITS (self->last_picture_id) &&
+ !IS_PICTURE_ID_15BITS (new_picture_id)) {
+ GST_DEBUG_OBJECT (self, "Dropping the last stopped lost event "
+ "(picture id has less bits than before): %" GST_PTR_FORMAT,
+ self->last_lost_event);
+ } else if (picture_id_compare (self->last_picture_id, new_picture_id) != 1) {
+ GstStructure *s = gst_event_writable_structure (self->last_lost_event);
+
+ GST_DEBUG_OBJECT (self, "Sending the last stopped lost event "
+ "(gap in picture id %u %u): %" GST_PTR_FORMAT,
+ self->last_picture_id, new_picture_id, self->last_lost_event);
+ send_lost_event = TRUE;
+ /* Prevent rtpbasedepayload from dropping the event now
+ * that we have made sure the lost packet was not FEC */
+ gst_structure_remove_field (s, "might-have-been-fec");
+ }
+ if (send_lost_event)
+ GST_RTP_BASE_DEPAYLOAD_CLASS (gst_rtp_vp9_depay_parent_class)
+ ->packet_lost (GST_RTP_BASE_DEPAYLOAD_CAST (self),
+ self->last_lost_event);
+
+ gst_event_replace (&self->last_lost_event, NULL);
+ }
+}
+
+static GstBuffer *
+gst_rtp_vp9_depay_process (GstRTPBaseDepayload * depay, GstRTPBuffer * rtp)
+{
+ GstRtpVP9Depay *self = GST_RTP_VP9_DEPAY (depay);
+ GstBuffer *payload;
+ guint8 *data;
+ guint hdrsize = 1;
+ guint size;
+ gint spatial_layer = 0;
+ guint picture_id = PICTURE_ID_NONE;
+ gboolean i_bit, p_bit, l_bit, f_bit, b_bit, e_bit, v_bit, d_bit = 0;
+ gboolean is_start_of_picture;
+
+ if (G_UNLIKELY (GST_BUFFER_IS_DISCONT (rtp->buffer))) {
+ GST_LOG_OBJECT (self, "Discontinuity, flushing adapter");
+ gst_adapter_clear (self->adapter);
+ self->started = FALSE;
+ }
+
+ size = gst_rtp_buffer_get_payload_len (rtp);
+
+ /* Mandatory with at least one header and one vp9 byte */
+ if (G_UNLIKELY (size < hdrsize + 1))
+ goto too_small;
+
+ data = gst_rtp_buffer_get_payload (rtp);
+ i_bit = (data[0] & 0x80) != 0;
+ p_bit = (data[0] & 0x40) != 0;
+ l_bit = (data[0] & 0x20) != 0;
+ f_bit = (data[0] & 0x10) != 0;
+ b_bit = (data[0] & 0x08) != 0;
+ e_bit = (data[0] & 0x04) != 0;
+ v_bit = (data[0] & 0x02) != 0;
+
+ GST_TRACE_OBJECT (self, "IPLFBEV : %d%d%d%d%d%d%d", i_bit, p_bit, l_bit,
+ f_bit, b_bit, e_bit, v_bit);
+
+ /* Check I optional header Picture ID */
+ if (i_bit) {
+ hdrsize++;
+ if (G_UNLIKELY (size < hdrsize + 1))
+ goto too_small;
+ picture_id = data[1];
+ /* Check M for 15 bits PictureID */
+ if ((data[1] & 0x80) != 0) {
+ hdrsize++;
+ if (G_UNLIKELY (size < hdrsize + 1))
+ goto too_small;
+ picture_id = (picture_id << 8) | data[2];
+ }
+ }
+
+ /* Check L optional header layer indices */
+ if (l_bit) {
+ spatial_layer = (data[hdrsize] >> 1) & 0x07;
+ d_bit = (data[hdrsize] >> 0) & 0x01;
+ GST_TRACE_OBJECT (self, "TID=%d, U=%d, SID=%d, D=%d",
+ (data[hdrsize] >> 5) & 0x07, (data[hdrsize] >> 4) & 0x01,
+ (data[hdrsize] >> 1) & 0x07, (data[hdrsize] >> 0) & 0x01);
+
+ if (spatial_layer == 0 && d_bit != 0) {
+ /* Invalid according to draft-ietf-payload-vp9-06, but firefox 61 and
+ * chrome 66 sends enchanment layers with SID=0, so let's not drop the
+ * packet. */
+ GST_LOG_OBJECT (self, "Invalid inter-layer dependency for base layer");
+ }
+
+ hdrsize++;
+ /* Check TL0PICIDX temporal layer zero index (non-flexible mode) */
+ if (!f_bit)
+ hdrsize++;
+ }
+
+ if (p_bit && f_bit) {
+ gint i;
+
+ /* At least one P_DIFF|N, up to three times */
+ for (i = 0; i < 3; i++) {
+ guint p_diff, n_bit;
+
+ if (G_UNLIKELY (size < hdrsize + 1))
+ goto too_small;
+
+ p_diff = data[hdrsize] >> 1;
+ n_bit = data[hdrsize] & 0x1;
+ GST_TRACE_OBJECT (self, "P_DIFF[%d]=%d", i, p_diff);
+ hdrsize++;
+
+ if (!n_bit)
+ break;
+ }
+ }
+
+ /* Check V optional Scalability Structure */
+ if (v_bit) {
+ guint n_s, y_bit, g_bit;
+ guint8 *ss = &data[hdrsize];
+ guint sssize = 1;
+
+ if (G_UNLIKELY (size < hdrsize + sssize + 1))
+ goto too_small;
+
+ n_s = (ss[0] & 0xe0) >> 5;
+ y_bit = (ss[0] & 0x10) != 0;
+ g_bit = (ss[0] & 0x08) != 0;
+
+ GST_TRACE_OBJECT (self, "SS header: N_S=%u, Y=%u, G=%u", n_s, y_bit, g_bit);
+
+ sssize += y_bit ? (n_s + 1) * 4 : 0;
+ if (G_UNLIKELY (size < hdrsize + sssize + 1))
+ goto too_small;
+
+ if (y_bit) {
+ guint i;
+ for (i = 0; i <= n_s; i++) {
+ /* For now, simply use the last layer specified for width and height */
+ self->ss_width = ss[1 + i * 4] * 256 + ss[2 + i * 4];
+ self->ss_height = ss[3 + i * 4] * 256 + ss[4 + i * 4];
+ GST_TRACE_OBJECT (self, "N_S[%d]: WIDTH=%u, HEIGHT=%u", i,
+ self->ss_width, self->ss_height);
+ }
+ }
+
+ if (g_bit) {
+ guint i, j;
+ guint n_g = ss[sssize];
+ sssize++;
+ if (G_UNLIKELY (size < hdrsize + sssize + 1))
+ goto too_small;
+ for (i = 0; i < n_g; i++) {
+ guint t = (ss[sssize] & 0xe0) >> 5;
+ guint u = (ss[sssize] & 0x10) >> 4;
+ guint r = (ss[sssize] & 0x0c) >> 2;
+ GST_TRACE_OBJECT (self, "N_G[%u]: 0x%02x -> T=%u, U=%u, R=%u", i,
+ ss[sssize], t, u, r);
+ for (j = 0; j < r; j++)
+ GST_TRACE_OBJECT (self, " R[%u]: P_DIFF=%u", j, ss[sssize + 1 + j]);
+ sssize += 1 + r;
+ if (G_UNLIKELY (size < hdrsize + sssize + 1))
+ goto too_small;
+ }
+ }
+ hdrsize += sssize;
+ }
+
+ GST_DEBUG_OBJECT (depay, "hdrsize %u, size %u, picture id 0x%x",
+ hdrsize, size, picture_id);
+
+ if (G_UNLIKELY (hdrsize >= size))
+ goto too_small;
+
+ is_start_of_picture = b_bit && (!l_bit || !d_bit);
+ /* If this is a start frame AND we are already processing a frame, we need to flush and wait for next start frame */
+ if (is_start_of_picture) {
+ if (G_UNLIKELY (self->started)) {
+ GST_DEBUG_OBJECT (depay, "Incomplete frame, flushing adapter");
+ gst_adapter_clear (self->adapter);
+ self->started = FALSE;
+ }
+ }
+
+ if (G_UNLIKELY (!self->started)) {
+ /* Check if this is the start of a VP9 layer frame, otherwise bail */
+ if (!b_bit) {
+ GST_DEBUG_OBJECT (depay,
+ "The layer is missing the first packets, ignoring the packet");
+ if (self->stop_lost_events) {
+ send_last_lost_event (self);
+ self->stop_lost_events = FALSE;
+ }
+ goto done;
+ }
+
+ GST_DEBUG_OBJECT (depay, "Found the start of the layer");
+ if (self->stop_lost_events) {
+ send_last_lost_event_if_needed (self, picture_id);
+ self->stop_lost_events = FALSE;
+ }
+ self->started = TRUE;
+ self->inter_picture = FALSE;
+ }
+
+ payload = gst_rtp_buffer_get_payload_subbuffer (rtp, hdrsize, -1);
+ if (GST_LEVEL_MEMDUMP <= gst_debug_category_get_threshold (GST_CAT_DEFAULT)) {
+ GstMapInfo map;
+ gst_buffer_map (payload, &map, GST_MAP_READ);
+ GST_MEMDUMP_OBJECT (self, "vp9 payload", map.data, 16);
+ gst_buffer_unmap (payload, &map);
+ }
+ gst_adapter_push (self->adapter, payload);
+ self->last_picture_id = picture_id;
+ self->inter_picture |= p_bit;
+
+ /* Marker indicates that it was the last rtp packet for this picture. Note
+ * that if spatial scalability is used, e_bit will be set for the last
+ * packet of a frame while the marker bit is not set until the last packet
+ * of the picture. */
+ if (gst_rtp_buffer_get_marker (rtp)) {
+ GstBuffer *out;
+
+ GST_DEBUG_OBJECT (depay,
+ "Found the end of the frame (%" G_GSIZE_FORMAT " bytes)",
+ gst_adapter_available (self->adapter));
+
+ if (gst_adapter_available (self->adapter) < 10)
+ goto too_small;
+
+ out = gst_adapter_take_buffer (self->adapter,
+ gst_adapter_available (self->adapter));
+
+ self->started = FALSE;
+
+ /* mark keyframes */
+ out = gst_buffer_make_writable (out);
+ /* Filter away all metas that are not sensible to copy */
+ gst_rtp_drop_non_video_meta (self, out);
+ if (self->inter_picture) {
+ GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ if (!self->caps_sent) {
+ gst_buffer_unref (out);
+ out = NULL;
+ GST_INFO_OBJECT (self, "Dropping inter-frame before intra-frame");
+ gst_pad_push_event (GST_RTP_BASE_DEPAYLOAD_SINKPAD (depay),
+ gst_video_event_new_upstream_force_key_unit (GST_CLOCK_TIME_NONE,
+ TRUE, 0));
+ }
+ } else {
+ GST_BUFFER_FLAG_UNSET (out, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ if (self->last_width != self->ss_width ||
+ self->last_height != self->ss_height) {
+ GstCaps *srccaps;
+
+ /* Width and height are optional in the RTP header. Consider to parse
+ * the frame header in addition if missing from RTP header */
+ if (self->ss_width != 0 && self->ss_height != 0) {
+ srccaps = gst_caps_new_simple ("video/x-vp9",
+ "framerate", GST_TYPE_FRACTION, 0, 1,
+ "width", G_TYPE_INT, self->ss_width,
+ "height", G_TYPE_INT, self->ss_height, NULL);
+ } else {
+ srccaps = gst_caps_new_simple ("video/x-vp9",
+ "framerate", GST_TYPE_FRACTION, 0, 1, NULL);
+ }
+
+ gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depay), srccaps);
+ gst_caps_unref (srccaps);
+
+ self->caps_sent = TRUE;
+ self->last_width = self->ss_width;
+ self->last_height = self->ss_height;
+ self->ss_width = 0;
+ self->ss_height = 0;
+ }
+ }
+
+ if (picture_id != PICTURE_ID_NONE)
+ self->stop_lost_events = TRUE;
+ return out;
+ }
+
+done:
+ return NULL;
+
+too_small:
+ GST_LOG_OBJECT (self, "Invalid rtp packet (too small), ignoring");
+ gst_adapter_clear (self->adapter);
+ self->started = FALSE;
+ goto done;
+}
+
+static GstStateChangeReturn
+gst_rtp_vp9_depay_change_state (GstElement * element, GstStateChange transition)
+{
+ GstRtpVP9Depay *self = GST_RTP_VP9_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ self->last_width = -1;
+ self->last_height = -1;
+ self->caps_sent = FALSE;
+ self->last_picture_id = PICTURE_ID_NONE;
+ gst_event_replace (&self->last_lost_event, NULL);
+ self->stop_lost_events = FALSE;
+ break;
+ default:
+ break;
+ }
+
+ return
+ GST_ELEMENT_CLASS (gst_rtp_vp9_depay_parent_class)->change_state (element,
+ transition);
+}
+
+static gboolean
+gst_rtp_vp9_depay_handle_event (GstRTPBaseDepayload * depay, GstEvent * event)
+{
+ GstRtpVP9Depay *self = GST_RTP_VP9_DEPAY (depay);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ self->last_width = -1;
+ self->last_height = -1;
+ self->last_picture_id = PICTURE_ID_NONE;
+ gst_event_replace (&self->last_lost_event, NULL);
+ self->stop_lost_events = FALSE;
+ break;
+ default:
+ break;
+ }
+
+ return
+ GST_RTP_BASE_DEPAYLOAD_CLASS
+ (gst_rtp_vp9_depay_parent_class)->handle_event (depay, event);
+}
+
+static gboolean
+gst_rtp_vp9_depay_packet_lost (GstRTPBaseDepayload * depay, GstEvent * event)
+{
+ GstRtpVP9Depay *self = GST_RTP_VP9_DEPAY (depay);
+ const GstStructure *s;
+ gboolean might_have_been_fec;
+
+ s = gst_event_get_structure (event);
+
+ if (self->stop_lost_events) {
+ if (gst_structure_get_boolean (s, "might-have-been-fec",
+ &might_have_been_fec)
+ && might_have_been_fec) {
+ GST_DEBUG_OBJECT (depay, "Stopping lost event %" GST_PTR_FORMAT, event);
+ gst_event_replace (&self->last_lost_event, event);
+ return TRUE;
+ }
+ } else if (self->last_picture_id != PICTURE_ID_NONE) {
+ GstStructure *s = gst_event_writable_structure (self->last_lost_event);
+
+ /* We are currently processing a picture, let's make sure the
+ * base depayloader doesn't drop this lost event */
+ gst_structure_remove_field (s, "might-have-been-fec");
+ }
+
+ return
+ GST_RTP_BASE_DEPAYLOAD_CLASS
+ (gst_rtp_vp9_depay_parent_class)->packet_lost (depay, event);
+}
diff --git a/gst/rtp/gstrtpvp9depay.h b/gst/rtp/gstrtpvp9depay.h
new file mode 100644
index 0000000000..96007bf919
--- /dev/null
+++ b/gst/rtp/gstrtpvp9depay.h
@@ -0,0 +1,79 @@
+/*
+ * gstrtpvp9depay.h - Header for GstRtpVP9Depay
+ * Copyright (C) 2011 Sjoerd Simons <sjoerd@luon.net>
+ * Copyright (C) 2015 Stian Selnes <stian@pexip.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#ifndef __GST_RTP_VP9_DEPAY_H__
+#define __GST_RTP_VP9_DEPAY_H__
+
+#include <gst/base/gstadapter.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_VP9_DEPAY \
+ (gst_rtp_vp9_depay_get_type())
+#define GST_RTP_VP9_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_RTP_VP9_DEPAY, GstRtpVP9Depay))
+#define GST_RTP_VP9_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_RTP_VP9_DEPAY, \
+ GstRtpVP9DepayClass))
+#define GST_IS_RTP_VP9_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_RTP_VP9_DEPAY))
+#define GST_IS_RTP_VP9_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_RTP_VP9_DEPAY))
+#define GST_RTP_VP9_DEPAY_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_RTP_VP9_DEPAY, \
+ GstRtpVP9DepayClass))
+
+typedef struct _GstRtpVP9Depay GstRtpVP9Depay;
+typedef struct _GstRtpVP9DepayClass GstRtpVP9DepayClass;
+
+struct _GstRtpVP9DepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+struct _GstRtpVP9Depay
+{
+ GstRTPBaseDepayload parent;
+ GstAdapter *adapter;
+ gboolean started;
+
+ gint ss_width;
+ gint ss_height;
+ gint last_width;
+ gint last_height;
+ guint last_picture_id;
+ GstEvent *last_lost_event;
+ gboolean caps_sent;
+ /* In between pictures, we might store GstRTPPacketLost events instead
+ * of forwarding them immediately, we check upon reception of a new
+ * picture id whether a gap was introduced, in which case we do forward
+ * the event. This is to avoid forwarding spurious lost events for FEC
+ * packets.
+ */
+ gboolean stop_lost_events;
+ gboolean inter_picture;
+};
+
+GType gst_rtp_vp9_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* #ifndef __GST_RTP_VP9_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpvp9pay.c b/gst/rtp/gstrtpvp9pay.c
new file mode 100644
index 0000000000..447a429f16
--- /dev/null
+++ b/gst/rtp/gstrtpvp9pay.c
@@ -0,0 +1,563 @@
+/*
+ * gstrtpvp9pay.c - Source for GstRtpVP9Pay
+ * Copyright (C) 2011 Sjoerd Simons <sjoerd@luon.net>
+ * Copyright (C) 2011 Collabora Ltd.
+ * Contact: Youness Alaoui <youness.alaoui@collabora.co.uk>
+ * Copyright (C) 2015 Stian Selnes <stian@pexip.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include <gst/base/gstbitreader.h>
+#include <gst/rtp/gstrtppayloads.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+#include "gstrtpelements.h"
+#include "dboolhuff.h"
+#include "gstrtpvp9pay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_vp9_pay_debug);
+#define GST_CAT_DEFAULT gst_rtp_vp9_pay_debug
+
+#define DEFAULT_PICTURE_ID_MODE VP9_PAY_NO_PICTURE_ID
+
+enum
+{
+ PROP_0,
+ PROP_PICTURE_ID_MODE
+};
+
+#define GST_TYPE_RTP_VP9_PAY_PICTURE_ID_MODE (gst_rtp_vp9_pay_picture_id_mode_get_type())
+static GType
+gst_rtp_vp9_pay_picture_id_mode_get_type (void)
+{
+ static GType mode_type = 0;
+ static const GEnumValue modes[] = {
+ {VP9_PAY_NO_PICTURE_ID, "No Picture ID", "none"},
+ {VP9_PAY_PICTURE_ID_7BITS, "7-bit Picture ID", "7-bit"},
+ {VP9_PAY_PICTURE_ID_15BITS, "15-bit Picture ID", "15-bit"},
+ {0, NULL, NULL},
+ };
+
+ if (!mode_type) {
+ mode_type = g_enum_register_static ("GstVP9RTPPayMode", modes);
+ }
+ return mode_type;
+}
+
+static void gst_rtp_vp9_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_rtp_vp9_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+
+static GstFlowReturn gst_rtp_vp9_pay_handle_buffer (GstRTPBasePayload * payload,
+ GstBuffer * buffer);
+static gboolean gst_rtp_vp9_pay_sink_event (GstRTPBasePayload * payload,
+ GstEvent * event);
+static gboolean gst_rtp_vp9_pay_set_caps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+
+G_DEFINE_TYPE (GstRtpVP9Pay, gst_rtp_vp9_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpvp9pay, "rtpvp9pay",
+ GST_RANK_MARGINAL, GST_TYPE_RTP_VP9_PAY, rtp_element_init (plugin));
+
+static GstStaticPadTemplate gst_rtp_vp9_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ","
+ "clock-rate = (int) 90000, encoding-name = (string) { \"VP9\", \"VP9-DRAFT-IETF-01\" }"));
+
+static GstStaticPadTemplate gst_rtp_vp9_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-vp9"));
+
+static void
+gst_rtp_vp9_pay_init (GstRtpVP9Pay * obj)
+{
+ obj->picture_id_mode = DEFAULT_PICTURE_ID_MODE;
+ if (obj->picture_id_mode == VP9_PAY_PICTURE_ID_7BITS)
+ obj->picture_id = g_random_int_range (0, G_MAXUINT8) & 0x7F;
+ else if (obj->picture_id_mode == VP9_PAY_PICTURE_ID_15BITS)
+ obj->picture_id = g_random_int_range (0, G_MAXUINT16) & 0x7FFF;
+}
+
+static void
+gst_rtp_vp9_pay_class_init (GstRtpVP9PayClass * gst_rtp_vp9_pay_class)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (gst_rtp_vp9_pay_class);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (gst_rtp_vp9_pay_class);
+ GstRTPBasePayloadClass *pay_class =
+ GST_RTP_BASE_PAYLOAD_CLASS (gst_rtp_vp9_pay_class);
+
+ gobject_class->set_property = gst_rtp_vp9_pay_set_property;
+ gobject_class->get_property = gst_rtp_vp9_pay_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_PICTURE_ID_MODE,
+ g_param_spec_enum ("picture-id-mode", "Picture ID Mode",
+ "The picture ID mode for payloading",
+ GST_TYPE_RTP_VP9_PAY_PICTURE_ID_MODE, DEFAULT_PICTURE_ID_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_rtp_vp9_pay_sink_template);
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_rtp_vp9_pay_src_template);
+
+ gst_element_class_set_static_metadata (element_class, "RTP VP9 payloader",
+ "Codec/Payloader/Network/RTP",
+ "Puts VP9 video in RTP packets)", "Stian Selnes <stian@pexip.com>");
+
+ pay_class->handle_buffer = gst_rtp_vp9_pay_handle_buffer;
+ pay_class->sink_event = gst_rtp_vp9_pay_sink_event;
+ pay_class->set_caps = gst_rtp_vp9_pay_set_caps;
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_vp9_pay_debug, "rtpvp9pay", 0,
+ "VP9 Video RTP Payloader");
+
+ gst_type_mark_as_plugin_api (GST_TYPE_RTP_VP9_PAY_PICTURE_ID_MODE, 0);
+}
+
+static void
+gst_rtp_vp9_pay_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstRtpVP9Pay *rtpvp9pay = GST_RTP_VP9_PAY (object);
+
+ switch (prop_id) {
+ case PROP_PICTURE_ID_MODE:
+ rtpvp9pay->picture_id_mode = g_value_get_enum (value);
+ if (rtpvp9pay->picture_id_mode == VP9_PAY_PICTURE_ID_7BITS)
+ rtpvp9pay->picture_id = g_random_int_range (0, G_MAXUINT8) & 0x7F;
+ else if (rtpvp9pay->picture_id_mode == VP9_PAY_PICTURE_ID_15BITS)
+ rtpvp9pay->picture_id = g_random_int_range (0, G_MAXUINT16) & 0x7FFF;
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_vp9_pay_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstRtpVP9Pay *rtpvp9pay = GST_RTP_VP9_PAY (object);
+
+ switch (prop_id) {
+ case PROP_PICTURE_ID_MODE:
+ g_value_set_enum (value, rtpvp9pay->picture_id_mode);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+#define VP9_PROFILE_0 0
+#define VP9_PROFILE_1 1
+#define VP9_PROFILE_2 2
+#define VP9_PROFILE_3 3
+#define VP9_FRAME_MARKER 0x2
+#define VPX_CS_SRGB 7
+
+static gboolean
+gst_rtp_vp9_pay_parse_frame (GstRtpVP9Pay * self, GstBuffer * buffer,
+ gsize buffer_size)
+{
+ GstMapInfo map = GST_MAP_INFO_INIT;
+ GstBitReader reader;
+ guint8 *data;
+ gsize size;
+ gboolean keyframe;
+ guint32 tmp, profile;
+
+ if (G_UNLIKELY (buffer_size < 3))
+ goto error;
+
+ if (!gst_buffer_map (buffer, &map, GST_MAP_READ) || !map.data)
+ goto error;
+
+ data = map.data;
+ size = map.size;
+
+ gst_bit_reader_init (&reader, data, size);
+
+
+ /* frame marker */
+ if (!gst_bit_reader_get_bits_uint32 (&reader, &tmp, 2)
+ || tmp != VP9_FRAME_MARKER)
+ goto error;
+
+ /* profile, variable length */
+ if (!gst_bit_reader_get_bits_uint32 (&reader, &profile, 2))
+ goto error;
+ if (profile > 2) {
+ if (!gst_bit_reader_get_bits_uint32 (&reader, &tmp, 1))
+ goto error;
+ profile += tmp;
+ }
+
+ /* show existing frame */
+ if (!gst_bit_reader_get_bits_uint32 (&reader, &tmp, 1))
+ goto error;
+ if (tmp) {
+ if (!gst_bit_reader_skip (&reader, 3))
+ goto error;
+ return TRUE;
+ }
+
+ /* frame type */
+ if (!gst_bit_reader_get_bits_uint32 (&reader, &tmp, 1))
+ goto error;
+ self->is_keyframe = keyframe = (tmp == 0);
+
+ /* show frame and resilient mode */
+ if (!gst_bit_reader_skip (&reader, 2))
+ goto error;
+
+ if (keyframe) {
+ /* sync code */
+ const guint32 sync_code = 0x498342;
+ if (!gst_bit_reader_get_bits_uint32 (&reader, &tmp, 24))
+ goto error;
+ if (tmp != sync_code)
+ goto error;
+
+ if (profile >= VP9_PROFILE_2) {
+ /* bit depth */
+ if (!gst_bit_reader_skip (&reader, 1))
+ goto error;
+ }
+
+ /* color space */
+ if (!gst_bit_reader_get_bits_uint32 (&reader, &tmp, 3))
+ goto error;
+ if (tmp != VPX_CS_SRGB) {
+ /* color range */
+ if (!gst_bit_reader_skip (&reader, 1))
+ goto error;
+ if (profile == VP9_PROFILE_1 || profile == VP9_PROFILE_3) {
+ /* subsampling + reserved bit */
+ if (!gst_bit_reader_skip (&reader, 2 + 1))
+ goto error;
+ }
+ } else {
+ if (profile == VP9_PROFILE_1 || profile == VP9_PROFILE_3)
+ /* reserved bit */
+ if (!gst_bit_reader_skip (&reader, 1))
+ goto error;
+ }
+
+ /* frame size */
+ if (!gst_bit_reader_get_bits_uint32 (&reader, &tmp, 16))
+ goto error;
+ self->width = tmp + 1;
+ if (!gst_bit_reader_get_bits_uint32 (&reader, &tmp, 16))
+ goto error;
+ self->height = tmp + 1;
+
+ /* render size */
+ if (!gst_bit_reader_get_bits_uint32 (&reader, &tmp, 1))
+ goto error;
+ if (tmp) {
+ if (!gst_bit_reader_skip (&reader, 32))
+ goto error;
+ }
+
+ GST_INFO_OBJECT (self, "parsed width=%d height=%d", self->width,
+ self->height);
+ }
+
+
+ gst_buffer_unmap (buffer, &map);
+ return TRUE;
+
+error:
+ GST_DEBUG ("Failed to parse frame");
+ if (map.memory != NULL) {
+ gst_buffer_unmap (buffer, &map);
+ }
+ return FALSE;
+}
+
+static gsize
+gst_rtp_vp9_calc_header_len (GstRtpVP9Pay * self, gboolean start)
+{
+ gsize len = 1;
+
+ switch (self->picture_id_mode) {
+ case VP9_PAY_PICTURE_ID_7BITS:
+ len += 1;
+ break;
+ case VP9_PAY_PICTURE_ID_15BITS:
+ len += 2;
+ default:
+ break;
+ }
+
+ /* Assume non-flexible mode */
+ /* Assume L-bit not set, no L header */
+
+ if (self->is_keyframe && start) {
+ /* Assume V-bit set */
+ /* FIXME: SS depends on layers and prediction structure */
+ /* For now assume 1 spatial and 1 temporal layer. */
+ /* FIXME: Only for the first packet in the key frame */
+ len += 8;
+ }
+
+ return len;
+}
+
+/* VP9 RTP header, non-flexible mode:
+
+ 0 1 2 3 4 5 6 7
+ +-+-+-+-+-+-+-+-+
+ |I|P|L|F|B|E|V|-| (REQUIRED)
+ +-+-+-+-+-+-+-+-+
+ I: |M| PICTURE ID | (RECOMMENDED)
+ +-+-+-+-+-+-+-+-+
+ M: | EXTENDED PID | (RECOMMENDED)
+ +-+-+-+-+-+-+-+-+
+ L: | T |U| S |D| (CONDITIONALLY RECOMMENDED)
+ +-+-+-+-+-+-+-+-+ -\
+ P,F: | P_DIFF |X|N| (CONDITIONALLY RECOMMENDED) .
+ +-+-+-+-+-+-+-+-+ . - up to 3 times
+ X: |EXTENDED P_DIFF| (OPTIONAL) .
+ +-+-+-+-+-+-+-+-+ -/
+ V: | SS |
+ | .. |
+ +-+-+-+-+-+-+-+-+
+
+ Scalability structure (SS)
+ (from https://chromium.googlesource.com/external/webrtc/+/HEAD/webrtc/modules/rtp_rtcp/source/rtp_format_vp9.cc
+ since latest draft is not up to date with chromium)
+
+ +-+-+-+-+-+-+-+-+
+ V: | N_S |Y|G|-|-|-|
+ +-+-+-+-+-+-+-+-+ -|
+ Y: | WIDTH | (OPTIONAL) .
+ + + .
+ | | (OPTIONAL) .
+ +-+-+-+-+-+-+-+-+ . N_S + 1 times
+ | HEIGHT | (OPTIONAL) .
+ + + .
+ | | (OPTIONAL) .
+ +-+-+-+-+-+-+-+-+ -|
+ G: | N_G | (OPTIONAL)
+ +-+-+-+-+-+-+-+-+ -|
+ N_G: | T |U| R |-|-| (OPTIONAL) .
+ +-+-+-+-+-+-+-+-+ -| . N_G times
+ | P_DIFF | (OPTIONAL) . R times .
+ +-+-+-+-+-+-+-+-+ -| -|
+
+**/
+
+/* When growing the vp9 header keep max payload len calculation in sync */
+static GstBuffer *
+gst_rtp_vp9_create_header_buffer (GstRtpVP9Pay * self,
+ gboolean start, gboolean mark, GstBuffer * in)
+{
+ GstBuffer *out;
+ guint8 *p;
+ GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT;
+ guint off = 1;
+ guint hdrlen = gst_rtp_vp9_calc_header_len (self, start);
+
+ out =
+ gst_rtp_base_payload_allocate_output_buffer (GST_RTP_BASE_PAYLOAD (self),
+ hdrlen, 0, 0);
+ gst_rtp_buffer_map (out, GST_MAP_READWRITE, &rtpbuffer);
+ p = gst_rtp_buffer_get_payload (&rtpbuffer);
+ p[0] = 0x0;
+
+ if (self->picture_id_mode != VP9_PAY_NO_PICTURE_ID) {
+ p[0] |= 0x80;
+ if (self->picture_id_mode == VP9_PAY_PICTURE_ID_7BITS) {
+ /* M=0 */
+ p[off++] = self->picture_id & 0x7F;
+ } else {
+ /* M=1 */
+ p[off++] = 0x80 | ((self->picture_id & 0x7FFF) >> 8);
+ p[off++] = self->picture_id & 0xFF;
+ }
+ }
+
+ if (!self->is_keyframe)
+ p[0] |= 0x40;
+ if (start)
+ p[0] |= 0x08;
+ if (mark)
+ p[0] |= 0x04;
+
+ if (self->is_keyframe && start) {
+ p[0] |= 0x02;
+ /* scalability structure, hard coded for now to be similar to chromium for
+ * quick and dirty interop */
+ p[off++] = 0x18; /* N_S=0 Y=1 G=1 */
+ p[off++] = self->width >> 8;
+ p[off++] = self->width & 0xFF;
+ p[off++] = self->height >> 8;
+ p[off++] = self->height & 0xFF;
+ p[off++] = 0x01; /* N_G=1 */
+ p[off++] = 0x04; /* T=0, U=0, R=1 */
+ p[off++] = 0x01; /* P_DIFF=1 */
+ }
+
+ g_assert_cmpint (off, ==, hdrlen);
+
+ gst_rtp_buffer_set_marker (&rtpbuffer, mark);
+
+ gst_rtp_buffer_unmap (&rtpbuffer);
+
+ GST_BUFFER_DURATION (out) = GST_BUFFER_DURATION (in);
+ GST_BUFFER_PTS (out) = GST_BUFFER_PTS (in);
+
+ return out;
+}
+
+static guint
+gst_rtp_vp9_payload_next (GstRtpVP9Pay * self, GstBufferList * list,
+ guint offset, GstBuffer * buffer, gsize buffer_size, gsize max_payload_len)
+{
+ GstBuffer *header;
+ GstBuffer *sub;
+ GstBuffer *out;
+ gboolean mark;
+ gsize remaining;
+ gsize available;
+
+ remaining = buffer_size - offset;
+ available = max_payload_len;
+ if (available > remaining)
+ available = remaining;
+
+ mark = (remaining == available);
+ header = gst_rtp_vp9_create_header_buffer (self, offset == 0, mark, buffer);
+ sub = gst_buffer_copy_region (buffer, GST_BUFFER_COPY_ALL, offset, available);
+
+ gst_rtp_copy_video_meta (self, header, buffer);
+
+ out = gst_buffer_append (header, sub);
+
+ gst_buffer_list_insert (list, -1, out);
+
+ return available;
+}
+
+
+static GstFlowReturn
+gst_rtp_vp9_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer)
+{
+ GstRtpVP9Pay *self = GST_RTP_VP9_PAY (payload);
+ GstFlowReturn ret;
+ GstBufferList *list;
+ gsize size, max_paylen;
+ guint offset, mtu, vp9_hdr_len;
+
+ size = gst_buffer_get_size (buffer);
+
+ if (G_UNLIKELY (!gst_rtp_vp9_pay_parse_frame (self, buffer, size))) {
+ GST_ELEMENT_ERROR (self, STREAM, ENCODE, (NULL),
+ ("Failed to parse VP9 frame"));
+ return GST_FLOW_ERROR;
+ }
+
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (payload);
+ vp9_hdr_len = gst_rtp_vp9_calc_header_len (self, TRUE);
+ max_paylen = gst_rtp_buffer_calc_payload_len (mtu - vp9_hdr_len, 0, 0);
+
+ list = gst_buffer_list_new_sized ((size / max_paylen) + 1);
+
+ offset = 0;
+ while (offset < size) {
+ offset +=
+ gst_rtp_vp9_payload_next (self, list, offset, buffer, size, max_paylen);
+ }
+
+ ret = gst_rtp_base_payload_push_list (payload, list);
+
+ /* Incremenent and wrap the picture id if it overflows */
+ if ((self->picture_id_mode == VP9_PAY_PICTURE_ID_7BITS &&
+ ++self->picture_id >= 0x80) ||
+ (self->picture_id_mode == VP9_PAY_PICTURE_ID_15BITS &&
+ ++self->picture_id >= 0x8000))
+ self->picture_id = 0;
+
+ gst_buffer_unref (buffer);
+
+ return ret;
+}
+
+static gboolean
+gst_rtp_vp9_pay_sink_event (GstRTPBasePayload * payload, GstEvent * event)
+{
+ GstRtpVP9Pay *self = GST_RTP_VP9_PAY (payload);
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_START) {
+ if (self->picture_id_mode == VP9_PAY_PICTURE_ID_7BITS)
+ self->picture_id = g_random_int_range (0, G_MAXUINT8) & 0x7F;
+ else if (self->picture_id_mode == VP9_PAY_PICTURE_ID_15BITS)
+ self->picture_id = g_random_int_range (0, G_MAXUINT16) & 0x7FFF;
+ }
+
+ return GST_RTP_BASE_PAYLOAD_CLASS (gst_rtp_vp9_pay_parent_class)->sink_event
+ (payload, event);
+}
+
+static gboolean
+gst_rtp_vp9_pay_set_caps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ GstCaps *src_caps;
+ const char *encoding_name = "VP9";
+
+ src_caps = gst_pad_get_allowed_caps (GST_RTP_BASE_PAYLOAD_SRCPAD (payload));
+ if (src_caps) {
+ GstStructure *s;
+ const GValue *value;
+
+ s = gst_caps_get_structure (src_caps, 0);
+
+ if (gst_structure_has_field (s, "encoding-name")) {
+ GValue default_value = G_VALUE_INIT;
+
+ g_value_init (&default_value, G_TYPE_STRING);
+ g_value_set_static_string (&default_value, encoding_name);
+
+ value = gst_structure_get_value (s, "encoding-name");
+ if (!gst_value_can_intersect (&default_value, value))
+ encoding_name = "VP9-DRAFT-IETF-01";
+ }
+ gst_caps_unref (src_caps);
+ }
+
+ gst_rtp_base_payload_set_options (payload, "video", TRUE,
+ encoding_name, 90000);
+
+ return gst_rtp_base_payload_set_outcaps (payload, NULL);
+}
diff --git a/gst/rtp/gstrtpvp9pay.h b/gst/rtp/gstrtpvp9pay.h
new file mode 100644
index 0000000000..407e3e08c4
--- /dev/null
+++ b/gst/rtp/gstrtpvp9pay.h
@@ -0,0 +1,70 @@
+/*
+ * gstrtpvp9pay.h - Header for GstRtpVP9Pay
+ * Copyright (C) 2011 Sjoerd Simons <sjoerd@luon.net>
+ * Copyright (C) 2015 Stian Selnes <stian@pexip.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#ifndef __GST_RTP_VP9_PAY_H__
+#define __GST_RTP_VP9_PAY_H__
+
+#include <gst/rtp/gstrtpbasepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_VP9_PAY \
+ (gst_rtp_vp9_pay_get_type())
+#define GST_RTP_VP9_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_RTP_VP9_PAY, GstRtpVP9Pay))
+#define GST_RTP_VP9_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_RTP_VP9_PAY, GstRtpVP9PayClass))
+#define GST_IS_RTP_VP9_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_RTP_VP9_PAY))
+#define GST_IS_RTP_VP9_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_RTP_VP9_PAY))
+#define GST_RTP_VP9_PAY_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_RTP_VP9_PAY, GstRtpVP9PayClass))
+
+typedef struct _GstRtpVP9Pay GstRtpVP9Pay;
+typedef struct _GstRtpVP9PayClass GstRtpVP9PayClass;
+typedef enum _VP9PictureIDMode VP9PictureIDMode;
+
+enum _VP9PictureIDMode {
+ VP9_PAY_NO_PICTURE_ID,
+ VP9_PAY_PICTURE_ID_7BITS,
+ VP9_PAY_PICTURE_ID_15BITS,
+};
+
+struct _GstRtpVP9PayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+struct _GstRtpVP9Pay
+{
+ GstRTPBasePayload parent;
+ gboolean is_keyframe;
+ guint width;
+ guint height;
+ VP9PictureIDMode picture_id_mode;
+ guint16 picture_id;
+};
+
+GType gst_rtp_vp9_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* #ifndef __GST_RTP_VP9_PAY_H__ */
diff --git a/gst/rtp/gstrtpvrawdepay.c b/gst/rtp/gstrtpvrawdepay.c
new file mode 100644
index 0000000000..d3bb5af05e
--- /dev/null
+++ b/gst/rtp/gstrtpvrawdepay.c
@@ -0,0 +1,663 @@
+/* GStreamer
+ * Copyright (C) <2008> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+
+#include <string.h>
+#include <stdlib.h>
+#include "gstrtpelements.h"
+#include "gstrtpvrawdepay.h"
+#include "gstrtputils.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpvrawdepay_debug);
+#define GST_CAT_DEFAULT (rtpvrawdepay_debug)
+
+static GstStaticPadTemplate gst_rtp_vraw_depay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-raw")
+ );
+
+static GstStaticPadTemplate gst_rtp_vraw_depay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "clock-rate = (int) 90000, "
+ "encoding-name = (string) \"RAW\", "
+ "sampling = (string) { \"RGB\", \"RGBA\", \"BGR\", \"BGRA\", "
+ "\"YCbCr-4:4:4\", \"YCbCr-4:2:2\", \"YCbCr-4:2:0\", "
+ "\"YCbCr-4:1:1\" },"
+ /* we cannot express these as strings
+ * "width = (string) [1 32767],"
+ * "height = (string) [1 32767],"
+ */
+ "depth = (string) { \"8\", \"10\", \"12\", \"16\" }")
+ );
+
+#define gst_rtp_vraw_depay_parent_class parent_class
+G_DEFINE_TYPE (GstRtpVRawDepay, gst_rtp_vraw_depay,
+ GST_TYPE_RTP_BASE_DEPAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpvrawdepay, "rtpvrawdepay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_VRAW_DEPAY, rtp_element_init (plugin));
+
+static gboolean gst_rtp_vraw_depay_setcaps (GstRTPBaseDepayload * depayload,
+ GstCaps * caps);
+static GstBuffer *gst_rtp_vraw_depay_process_packet (GstRTPBaseDepayload *
+ depay, GstRTPBuffer * rtp);
+
+static GstStateChangeReturn gst_rtp_vraw_depay_change_state (GstElement *
+ element, GstStateChange transition);
+
+static gboolean gst_rtp_vraw_depay_handle_event (GstRTPBaseDepayload * filter,
+ GstEvent * event);
+
+static void
+gst_rtp_vraw_depay_class_init (GstRtpVRawDepayClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPBaseDepayloadClass *gstrtpbasedepayload_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasedepayload_class = (GstRTPBaseDepayloadClass *) klass;
+
+ gstelement_class->change_state = gst_rtp_vraw_depay_change_state;
+
+ gstrtpbasedepayload_class->set_caps = gst_rtp_vraw_depay_setcaps;
+ gstrtpbasedepayload_class->process_rtp_packet =
+ gst_rtp_vraw_depay_process_packet;
+ gstrtpbasedepayload_class->handle_event = gst_rtp_vraw_depay_handle_event;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_vraw_depay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_vraw_depay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP Raw Video depayloader", "Codec/Depayloader/Network/RTP",
+ "Extracts raw video from RTP packets (RFC 4175)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpvrawdepay_debug, "rtpvrawdepay", 0,
+ "raw video RTP Depayloader");
+}
+
+static void
+gst_rtp_vraw_depay_init (GstRtpVRawDepay * rtpvrawdepay)
+{
+}
+
+static void
+gst_rtp_vraw_depay_reset (GstRtpVRawDepay * rtpvrawdepay, gboolean full)
+{
+ if (rtpvrawdepay->outbuf) {
+ gst_video_frame_unmap (&rtpvrawdepay->frame);
+ gst_buffer_unref (rtpvrawdepay->outbuf);
+ rtpvrawdepay->outbuf = NULL;
+ }
+ rtpvrawdepay->timestamp = -1;
+
+ if (full && rtpvrawdepay->pool) {
+ gst_buffer_pool_set_active (rtpvrawdepay->pool, FALSE);
+ gst_object_unref (rtpvrawdepay->pool);
+ rtpvrawdepay->pool = NULL;
+ }
+}
+
+static GstFlowReturn
+gst_rtp_vraw_depay_negotiate_pool (GstRtpVRawDepay * depay, GstCaps * caps,
+ GstVideoInfo * info)
+{
+ GstQuery *query;
+ GstBufferPool *pool = NULL;
+ guint size, min, max;
+ GstStructure *config;
+
+ /* find a pool for the negotiated caps now */
+ query = gst_query_new_allocation (caps, TRUE);
+
+ if (!gst_pad_peer_query (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depay), query)) {
+ /* not a problem, we use the defaults of query */
+ GST_DEBUG_OBJECT (depay, "could not get downstream ALLOCATION hints");
+ }
+
+ if (gst_query_get_n_allocation_pools (query) > 0) {
+ /* we got configuration from our peer, parse them */
+ gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
+ } else {
+ GST_DEBUG_OBJECT (depay, "didn't get downstream pool hints");
+ size = info->size;
+ min = max = 0;
+ }
+
+ if (pool == NULL) {
+ /* we did not get a pool, make one ourselves then */
+ pool = gst_video_buffer_pool_new ();
+ }
+
+ if (depay->pool)
+ gst_object_unref (depay->pool);
+ depay->pool = pool;
+
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_set_params (config, caps, size, min, max);
+ if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
+ /* just set the metadata, if the pool can support it we will transparently use
+ * it through the video info API. We could also see if the pool support this
+ * metadata and only activate it then. */
+ gst_buffer_pool_config_add_option (config,
+ GST_BUFFER_POOL_OPTION_VIDEO_META);
+ }
+
+ gst_buffer_pool_set_config (pool, config);
+ /* and activate */
+ gst_buffer_pool_set_active (pool, TRUE);
+
+ gst_query_unref (query);
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_rtp_vraw_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
+{
+ GstStructure *structure;
+ GstRtpVRawDepay *rtpvrawdepay;
+ gint clock_rate;
+ const gchar *str;
+ gint format, width, height, depth, pgroup, xinc, yinc;
+ GstCaps *srccaps;
+ gboolean res;
+ GstFlowReturn ret;
+
+ rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ xinc = yinc = 1;
+
+ if (!gst_structure_get_int (structure, "clock-rate", &clock_rate))
+ clock_rate = 90000; /* default */
+ depayload->clock_rate = clock_rate;
+
+ if (!(str = gst_structure_get_string (structure, "width")))
+ goto no_width;
+ width = atoi (str);
+
+ if (!(str = gst_structure_get_string (structure, "height")))
+ goto no_height;
+ height = atoi (str);
+
+ if (!(str = gst_structure_get_string (structure, "depth")))
+ goto no_depth;
+ depth = atoi (str);
+
+ /* optional interlace value but we don't handle interlaced
+ * formats yet */
+ if (gst_structure_get_string (structure, "interlace"))
+ goto interlaced;
+
+ if (!(str = gst_structure_get_string (structure, "sampling")))
+ goto no_sampling;
+
+ if (!strcmp (str, "RGB")) {
+ format = GST_VIDEO_FORMAT_RGB;
+ pgroup = 3;
+ } else if (!strcmp (str, "RGBA")) {
+ format = GST_VIDEO_FORMAT_RGBA;
+ pgroup = 4;
+ } else if (!strcmp (str, "BGR")) {
+ format = GST_VIDEO_FORMAT_BGR;
+ pgroup = 3;
+ } else if (!strcmp (str, "BGRA")) {
+ format = GST_VIDEO_FORMAT_BGRA;
+ pgroup = 4;
+ } else if (!strcmp (str, "YCbCr-4:4:4")) {
+ format = GST_VIDEO_FORMAT_AYUV;
+ pgroup = 3;
+ } else if (!strcmp (str, "YCbCr-4:2:2")) {
+ if (depth == 8) {
+ format = GST_VIDEO_FORMAT_UYVY;
+ pgroup = 4;
+ } else if (depth == 10) {
+ format = GST_VIDEO_FORMAT_UYVP;
+ pgroup = 5;
+ } else
+ goto unknown_format;
+ xinc = 2;
+ } else if (!strcmp (str, "YCbCr-4:2:0")) {
+ format = GST_VIDEO_FORMAT_I420;
+ pgroup = 6;
+ xinc = yinc = 2;
+ } else if (!strcmp (str, "YCbCr-4:1:1")) {
+ format = GST_VIDEO_FORMAT_Y41B;
+ pgroup = 6;
+ xinc = 4;
+ } else {
+ goto unknown_format;
+ }
+
+ gst_video_info_init (&rtpvrawdepay->vinfo);
+ gst_video_info_set_format (&rtpvrawdepay->vinfo, format, width, height);
+ GST_VIDEO_INFO_FPS_N (&rtpvrawdepay->vinfo) = 0;
+ GST_VIDEO_INFO_FPS_D (&rtpvrawdepay->vinfo) = 1;
+
+ rtpvrawdepay->pgroup = pgroup;
+ rtpvrawdepay->xinc = xinc;
+ rtpvrawdepay->yinc = yinc;
+
+ srccaps = gst_video_info_to_caps (&rtpvrawdepay->vinfo);
+ res = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
+ gst_caps_unref (srccaps);
+
+ GST_DEBUG_OBJECT (depayload, "width %d, height %d, format %d", width, height,
+ format);
+ GST_DEBUG_OBJECT (depayload, "xinc %d, yinc %d, pgroup %d",
+ xinc, yinc, pgroup);
+
+ /* negotiate a bufferpool */
+ if ((ret = gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, srccaps,
+ &rtpvrawdepay->vinfo)) != GST_FLOW_OK)
+ goto no_bufferpool;
+
+ return res;
+
+ /* ERRORS */
+no_width:
+ {
+ GST_ERROR_OBJECT (depayload, "no width specified");
+ return FALSE;
+ }
+no_height:
+ {
+ GST_ERROR_OBJECT (depayload, "no height specified");
+ return FALSE;
+ }
+no_depth:
+ {
+ GST_ERROR_OBJECT (depayload, "no depth specified");
+ return FALSE;
+ }
+interlaced:
+ {
+ GST_ERROR_OBJECT (depayload, "interlaced formats not supported yet");
+ return FALSE;
+ }
+no_sampling:
+ {
+ GST_ERROR_OBJECT (depayload, "no sampling specified");
+ return FALSE;
+ }
+unknown_format:
+ {
+ GST_ERROR_OBJECT (depayload, "unknown sampling format '%s'", str);
+ return FALSE;
+ }
+no_bufferpool:
+ {
+ GST_DEBUG_OBJECT (depayload, "no bufferpool");
+ return FALSE;
+ }
+}
+
+static GstBuffer *
+gst_rtp_vraw_depay_process_packet (GstRTPBaseDepayload * depayload,
+ GstRTPBuffer * rtp)
+{
+ GstRtpVRawDepay *rtpvrawdepay;
+ guint8 *payload, *p0, *yp, *up, *vp, *headers;
+ guint32 timestamp;
+ guint cont, ystride, uvstride, pgroup, payload_len;
+ gint width, height, xinc, yinc;
+ GstVideoFrame *frame;
+ gboolean marker;
+ GstBuffer *outbuf = NULL;
+
+ rtpvrawdepay = GST_RTP_VRAW_DEPAY (depayload);
+
+ timestamp = gst_rtp_buffer_get_timestamp (rtp);
+
+ if (timestamp != rtpvrawdepay->timestamp || rtpvrawdepay->outbuf == NULL) {
+ GstBuffer *new_buffer;
+ GstFlowReturn ret;
+
+ GST_LOG_OBJECT (depayload, "new frame with timestamp %u", timestamp);
+ /* new timestamp, flush old buffer and create new output buffer */
+ if (rtpvrawdepay->outbuf) {
+ gst_video_frame_unmap (&rtpvrawdepay->frame);
+ gst_rtp_base_depayload_push (depayload, rtpvrawdepay->outbuf);
+ rtpvrawdepay->outbuf = NULL;
+ }
+
+ if (gst_pad_check_reconfigure (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload))) {
+ GstCaps *caps;
+
+ caps =
+ gst_pad_get_current_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload));
+ gst_rtp_vraw_depay_negotiate_pool (rtpvrawdepay, caps,
+ &rtpvrawdepay->vinfo);
+ gst_caps_unref (caps);
+ }
+
+ ret =
+ gst_buffer_pool_acquire_buffer (rtpvrawdepay->pool, &new_buffer, NULL);
+
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto alloc_failed;
+
+ /* clear timestamp from alloc... */
+ GST_BUFFER_PTS (new_buffer) = -1;
+
+ if (!gst_video_frame_map (&rtpvrawdepay->frame, &rtpvrawdepay->vinfo,
+ new_buffer, GST_MAP_WRITE | GST_VIDEO_FRAME_MAP_FLAG_NO_REF)) {
+ gst_buffer_unref (new_buffer);
+ goto invalid_frame;
+ }
+
+ rtpvrawdepay->outbuf = new_buffer;
+ rtpvrawdepay->timestamp = timestamp;
+ }
+
+ frame = &rtpvrawdepay->frame;
+
+ g_assert (frame->buffer != NULL);
+
+ /* get pointer and strides of the planes */
+ p0 = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ yp = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
+ up = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
+ vp = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
+
+ ystride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
+ uvstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 1);
+
+ pgroup = rtpvrawdepay->pgroup;
+ width = GST_VIDEO_INFO_WIDTH (&rtpvrawdepay->vinfo);
+ height = GST_VIDEO_INFO_HEIGHT (&rtpvrawdepay->vinfo);
+ xinc = rtpvrawdepay->xinc;
+ yinc = rtpvrawdepay->yinc;
+
+ payload = gst_rtp_buffer_get_payload (rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (rtp);
+
+ if (payload_len < 3)
+ goto short_packet;
+
+ /* skip extended seqnum */
+ payload += 2;
+ payload_len -= 2;
+
+ /* remember header position */
+ headers = payload;
+
+ gst_rtp_copy_video_meta (rtpvrawdepay, frame->buffer, rtp->buffer);
+
+ /* find data start */
+ do {
+ if (payload_len < 6)
+ goto short_packet;
+
+ cont = payload[4] & 0x80;
+
+ payload += 6;
+ payload_len -= 6;
+ } while (cont);
+
+ while (TRUE) {
+ guint length, line, offs, plen;
+ guint8 *datap;
+
+ /* stop when we run out of data */
+ if (payload_len == 0)
+ break;
+
+ /* read length and cont. This should work because we iterated the headers
+ * above. */
+ length = (headers[0] << 8) | headers[1];
+ line = ((headers[2] & 0x7f) << 8) | headers[3];
+ offs = ((headers[4] & 0x7f) << 8) | headers[5];
+ cont = headers[4] & 0x80;
+ headers += 6;
+
+ /* length must be a multiple of pgroup */
+ if (length % pgroup != 0)
+ goto wrong_length;
+
+ if (length > payload_len)
+ length = payload_len;
+
+ /* sanity check */
+ if (line > (height - yinc)) {
+ GST_WARNING_OBJECT (depayload, "skipping line %d: out of range", line);
+ goto next;
+ }
+ if (offs > (width - xinc)) {
+ GST_WARNING_OBJECT (depayload, "skipping offset %d: out of range", offs);
+ goto next;
+ }
+
+ /* calculate the maximum amount of bytes we can use per line */
+ if (offs + ((length / pgroup) * xinc) > width) {
+ plen = ((width - offs) * pgroup) / xinc;
+ GST_WARNING_OBJECT (depayload, "clipping length %d, offset %d, plen %d",
+ length, offs, plen);
+ } else
+ plen = length;
+
+ GST_LOG_OBJECT (depayload,
+ "writing length %u/%u, line %u, offset %u, remaining %u", plen, length,
+ line, offs, payload_len);
+
+ switch (GST_VIDEO_INFO_FORMAT (&rtpvrawdepay->vinfo)) {
+ case GST_VIDEO_FORMAT_RGB:
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_BGR:
+ case GST_VIDEO_FORMAT_BGRA:
+ case GST_VIDEO_FORMAT_UYVY:
+ case GST_VIDEO_FORMAT_UYVP:
+ /* samples are packed just like gstreamer packs them */
+ offs /= xinc;
+ datap = p0 + (line * ystride) + (offs * pgroup);
+
+ memcpy (datap, payload, plen);
+ break;
+ case GST_VIDEO_FORMAT_AYUV:
+ {
+ gint i;
+ guint8 *p;
+
+ datap = p0 + (line * ystride) + (offs * 4);
+ p = payload;
+
+ /* samples are packed in order Cb-Y-Cr for both interlaced and
+ * progressive frames */
+ for (i = 0; i < plen; i += pgroup) {
+ *datap++ = 0;
+ *datap++ = p[1];
+ *datap++ = p[0];
+ *datap++ = p[2];
+ p += pgroup;
+ }
+ break;
+ }
+ case GST_VIDEO_FORMAT_I420:
+ {
+ gint i;
+ guint uvoff;
+ guint8 *yd1p, *yd2p, *udp, *vdp, *p;
+
+ yd1p = yp + (line * ystride) + (offs);
+ yd2p = yd1p + ystride;
+ uvoff = (line / yinc * uvstride) + (offs / xinc);
+
+ udp = up + uvoff;
+ vdp = vp + uvoff;
+ p = payload;
+
+ /* line 0/1: Y00-Y01-Y10-Y11-Cb00-Cr00 Y02-Y03-Y12-Y13-Cb01-Cr01 ... */
+ for (i = 0; i < plen; i += pgroup) {
+ *yd1p++ = p[0];
+ *yd1p++ = p[1];
+ *yd2p++ = p[2];
+ *yd2p++ = p[3];
+ *udp++ = p[4];
+ *vdp++ = p[5];
+ p += pgroup;
+ }
+ break;
+ }
+ case GST_VIDEO_FORMAT_Y41B:
+ {
+ gint i;
+ guint uvoff;
+ guint8 *ydp, *udp, *vdp, *p;
+
+ ydp = yp + (line * ystride) + (offs);
+ uvoff = (line / yinc * uvstride) + (offs / xinc);
+
+ udp = up + uvoff;
+ vdp = vp + uvoff;
+ p = payload;
+
+ /* Samples are packed in order Cb0-Y0-Y1-Cr0-Y2-Y3 for both interlaced
+ * and progressive scan lines */
+ for (i = 0; i < plen; i += pgroup) {
+ *udp++ = p[0];
+ *ydp++ = p[1];
+ *ydp++ = p[2];
+ *vdp++ = p[3];
+ *ydp++ = p[4];
+ *ydp++ = p[5];
+ p += pgroup;
+ }
+ break;
+ }
+ default:
+ goto unknown_sampling;
+ }
+
+ next:
+ if (!cont)
+ break;
+
+ payload += length;
+ payload_len -= length;
+ }
+
+ marker = gst_rtp_buffer_get_marker (rtp);
+
+ if (marker) {
+ GST_LOG_OBJECT (depayload, "marker, flushing frame");
+ gst_video_frame_unmap (&rtpvrawdepay->frame);
+ outbuf = rtpvrawdepay->outbuf;
+ rtpvrawdepay->outbuf = NULL;
+ rtpvrawdepay->timestamp = -1;
+ }
+ return outbuf;
+
+ /* ERRORS */
+unknown_sampling:
+ {
+ GST_ELEMENT_ERROR (depayload, STREAM, FORMAT,
+ (NULL), ("unimplemented sampling"));
+ return NULL;
+ }
+alloc_failed:
+ {
+ GST_WARNING_OBJECT (depayload, "failed to alloc output buffer");
+ return NULL;
+ }
+invalid_frame:
+ {
+ GST_ERROR_OBJECT (depayload, "could not map video frame");
+ return NULL;
+ }
+wrong_length:
+ {
+ GST_WARNING_OBJECT (depayload, "length not multiple of pgroup");
+ return NULL;
+ }
+short_packet:
+ {
+ GST_WARNING_OBJECT (depayload, "short packet");
+ return NULL;
+ }
+}
+
+static gboolean
+gst_rtp_vraw_depay_handle_event (GstRTPBaseDepayload * filter, GstEvent * event)
+{
+ gboolean ret;
+ GstRtpVRawDepay *rtpvrawdepay;
+
+ rtpvrawdepay = GST_RTP_VRAW_DEPAY (filter);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ gst_rtp_vraw_depay_reset (rtpvrawdepay, FALSE);
+ break;
+ default:
+ break;
+ }
+
+ ret =
+ GST_RTP_BASE_DEPAYLOAD_CLASS (parent_class)->handle_event (filter, event);
+
+ return ret;
+}
+
+static GstStateChangeReturn
+gst_rtp_vraw_depay_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstRtpVRawDepay *rtpvrawdepay;
+ GstStateChangeReturn ret;
+
+ rtpvrawdepay = GST_RTP_VRAW_DEPAY (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_rtp_vraw_depay_reset (rtpvrawdepay, TRUE);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_vraw_depay_reset (rtpvrawdepay, TRUE);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtp/gstrtpvrawdepay.h b/gst/rtp/gstrtpvrawdepay.h
new file mode 100644
index 0000000000..736da75835
--- /dev/null
+++ b/gst/rtp/gstrtpvrawdepay.h
@@ -0,0 +1,69 @@
+/* GStreamer
+ * Copyright (C) <2008> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_VRAW_DEPAY_H__
+#define __GST_RTP_VRAW_DEPAY_H__
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include <gst/video/gstvideometa.h>
+#include <gst/video/gstvideopool.h>
+#include <gst/rtp/gstrtpbasedepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_VRAW_DEPAY \
+ (gst_rtp_vraw_depay_get_type())
+#define GST_RTP_VRAW_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_VRAW_DEPAY,GstRtpVRawDepay))
+#define GST_RTP_VRAW_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_VRAW_DEPAY,GstRtpVRawDepayClass))
+#define GST_IS_RTP_VRAW_DEPAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_VRAW_DEPAY))
+#define GST_IS_RTP_VRAW_DEPAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_VRAW_DEPAY))
+
+typedef struct _GstRtpVRawDepay GstRtpVRawDepay;
+typedef struct _GstRtpVRawDepayClass GstRtpVRawDepayClass;
+
+struct _GstRtpVRawDepay
+{
+ GstRTPBaseDepayload payload;
+
+ GstBufferPool *pool;
+ GstVideoInfo vinfo;
+
+ GstVideoFrame frame;
+ GstBuffer *outbuf;
+ guint32 timestamp;
+
+ gint pgroup;
+ gint xinc, yinc;
+};
+
+struct _GstRtpVRawDepayClass
+{
+ GstRTPBaseDepayloadClass parent_class;
+};
+
+GType gst_rtp_vraw_depay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_VRAW_DEPAY_H__ */
diff --git a/gst/rtp/gstrtpvrawpay.c b/gst/rtp/gstrtpvrawpay.c
new file mode 100644
index 0000000000..1b19cdba1c
--- /dev/null
+++ b/gst/rtp/gstrtpvrawpay.c
@@ -0,0 +1,661 @@
+/* GStreamer
+ * Copyright (C) <2008> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/video/video.h>
+
+#include "gstrtpelements.h"
+#include "gstrtpvrawpay.h"
+#include "gstrtputils.h"
+
+enum
+{
+ PROP_CHUNKS_PER_FRAME = 1
+};
+
+#define DEFAULT_CHUNKS_PER_FRAME 10
+
+GST_DEBUG_CATEGORY_STATIC (rtpvrawpay_debug);
+#define GST_CAT_DEFAULT (rtpvrawpay_debug)
+
+static GstStaticPadTemplate gst_rtp_vraw_pay_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-raw, "
+ "format = (string) { RGB, RGBA, BGR, BGRA, AYUV, UYVY, I420, Y41B, UYVP }, "
+ "width = (int) [ 1, 32767 ], " "height = (int) [ 1, 32767 ]; ")
+ );
+
+static GstStaticPadTemplate gst_rtp_vraw_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) \"video\", "
+ "payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
+ "clock-rate = (int) 90000, "
+ "encoding-name = (string) \"RAW\","
+ "sampling = (string) { \"RGB\", \"RGBA\", \"BGR\", \"BGRA\", "
+ "\"YCbCr-4:4:4\", \"YCbCr-4:2:2\", \"YCbCr-4:2:0\", "
+ "\"YCbCr-4:1:1\" },"
+ /* we cannot express these as strings
+ * "width = (string) [1 32767],"
+ * "height = (string) [1 32767],"
+ */
+ "depth = (string) { \"8\", \"10\", \"12\", \"16\" },"
+ "colorimetry = (string) { \"BT601-5\", \"BT709-2\", \"SMPTE240M\" }"
+ /* optional
+ * interlace =
+ * top-field-first =
+ * chroma-position = (string)
+ * gamma = (float)
+ */
+ )
+ );
+
+static gboolean gst_rtp_vraw_pay_setcaps (GstRTPBasePayload * payload,
+ GstCaps * caps);
+static GstFlowReturn gst_rtp_vraw_pay_handle_buffer (GstRTPBasePayload *
+ payload, GstBuffer * buffer);
+static void gst_rtp_vraw_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_rtp_vraw_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+
+G_DEFINE_TYPE (GstRtpVRawPay, gst_rtp_vraw_pay, GST_TYPE_RTP_BASE_PAYLOAD);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpvrawpay, "rtpvrawpay",
+ GST_RANK_SECONDARY, GST_TYPE_RTP_VRAW_PAY, rtp_element_init (plugin));
+
+static void
+gst_rtp_vraw_pay_class_init (GstRtpVRawPayClass * klass)
+{
+ GstRTPBasePayloadClass *gstrtpbasepayload_class;
+ GstElementClass *gstelement_class;
+ GObjectClass *gobject_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpbasepayload_class = (GstRTPBasePayloadClass *) klass;
+
+ gobject_class->set_property = gst_rtp_vraw_pay_set_property;
+ gobject_class->get_property = gst_rtp_vraw_pay_get_property;
+
+ g_object_class_install_property (gobject_class,
+ PROP_CHUNKS_PER_FRAME,
+ g_param_spec_int ("chunks-per-frame", "Chunks per Frame",
+ "Split and send out each frame in multiple chunks to reduce overhead",
+ 1, G_MAXINT, DEFAULT_CHUNKS_PER_FRAME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ gstrtpbasepayload_class->set_caps = gst_rtp_vraw_pay_setcaps;
+ gstrtpbasepayload_class->handle_buffer = gst_rtp_vraw_pay_handle_buffer;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_vraw_pay_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_vraw_pay_sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP Raw Video payloader", "Codec/Payloader/Network/RTP",
+ "Payload raw video as RTP packets (RFC 4175)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ GST_DEBUG_CATEGORY_INIT (rtpvrawpay_debug, "rtpvrawpay", 0,
+ "Raw video RTP Payloader");
+}
+
+static void
+gst_rtp_vraw_pay_init (GstRtpVRawPay * rtpvrawpay)
+{
+ rtpvrawpay->chunks_per_frame = DEFAULT_CHUNKS_PER_FRAME;
+}
+
+static gboolean
+gst_rtp_vraw_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
+{
+ GstRtpVRawPay *rtpvrawpay;
+ gboolean res;
+ gint pgroup, xinc, yinc;
+ const gchar *depthstr, *samplingstr, *colorimetrystr;
+ gchar *wstr, *hstr;
+ GstVideoInfo info;
+
+ rtpvrawpay = GST_RTP_VRAW_PAY (payload);
+
+ if (!gst_video_info_from_caps (&info, caps))
+ goto invalid_caps;
+
+ rtpvrawpay->vinfo = info;
+
+ if (gst_video_colorimetry_matches (&info.colorimetry,
+ GST_VIDEO_COLORIMETRY_BT601)) {
+ colorimetrystr = "BT601-5";
+ } else if (gst_video_colorimetry_matches (&info.colorimetry,
+ GST_VIDEO_COLORIMETRY_BT709)) {
+ colorimetrystr = "BT709-2";
+ } else if (gst_video_colorimetry_matches (&info.colorimetry,
+ GST_VIDEO_COLORIMETRY_SMPTE240M)) {
+ colorimetrystr = "SMPTE240M";
+ } else {
+ colorimetrystr = "SMPTE240M";
+ }
+
+ xinc = yinc = 1;
+
+ /* these values are the only thing we can do */
+ depthstr = "8";
+
+ switch (GST_VIDEO_INFO_FORMAT (&info)) {
+ case GST_VIDEO_FORMAT_RGBA:
+ samplingstr = "RGBA";
+ pgroup = 4;
+ break;
+ case GST_VIDEO_FORMAT_BGRA:
+ samplingstr = "BGRA";
+ pgroup = 4;
+ break;
+ case GST_VIDEO_FORMAT_RGB:
+ samplingstr = "RGB";
+ pgroup = 3;
+ break;
+ case GST_VIDEO_FORMAT_BGR:
+ samplingstr = "BGR";
+ pgroup = 3;
+ break;
+ case GST_VIDEO_FORMAT_AYUV:
+ samplingstr = "YCbCr-4:4:4";
+ pgroup = 3;
+ break;
+ case GST_VIDEO_FORMAT_UYVY:
+ samplingstr = "YCbCr-4:2:2";
+ pgroup = 4;
+ xinc = 2;
+ break;
+ case GST_VIDEO_FORMAT_Y41B:
+ samplingstr = "YCbCr-4:1:1";
+ pgroup = 6;
+ xinc = 4;
+ break;
+ case GST_VIDEO_FORMAT_I420:
+ samplingstr = "YCbCr-4:2:0";
+ pgroup = 6;
+ xinc = yinc = 2;
+ break;
+ case GST_VIDEO_FORMAT_UYVP:
+ samplingstr = "YCbCr-4:2:2";
+ pgroup = 5;
+ xinc = 2;
+ depthstr = "10";
+ break;
+ default:
+ goto unknown_format;
+ break;
+ }
+
+ if (GST_VIDEO_INFO_IS_INTERLACED (&info)) {
+ yinc *= 2;
+ }
+
+ rtpvrawpay->pgroup = pgroup;
+ rtpvrawpay->xinc = xinc;
+ rtpvrawpay->yinc = yinc;
+
+ GST_DEBUG_OBJECT (payload, "width %d, height %d, sampling %s",
+ GST_VIDEO_INFO_WIDTH (&info), GST_VIDEO_INFO_HEIGHT (&info), samplingstr);
+ GST_DEBUG_OBJECT (payload, "xinc %d, yinc %d, pgroup %d", xinc, yinc, pgroup);
+
+ wstr = g_strdup_printf ("%d", GST_VIDEO_INFO_WIDTH (&info));
+ hstr = g_strdup_printf ("%d", GST_VIDEO_INFO_HEIGHT (&info));
+
+ gst_rtp_base_payload_set_options (payload, "video", TRUE, "RAW", 90000);
+ if (GST_VIDEO_INFO_IS_INTERLACED (&info)) {
+ res = gst_rtp_base_payload_set_outcaps (payload, "sampling", G_TYPE_STRING,
+ samplingstr, "depth", G_TYPE_STRING, depthstr, "width", G_TYPE_STRING,
+ wstr, "height", G_TYPE_STRING, hstr, "colorimetry", G_TYPE_STRING,
+ colorimetrystr, "interlace", G_TYPE_STRING, "true", NULL);
+ } else {
+ res = gst_rtp_base_payload_set_outcaps (payload, "sampling", G_TYPE_STRING,
+ samplingstr, "depth", G_TYPE_STRING, depthstr, "width", G_TYPE_STRING,
+ wstr, "height", G_TYPE_STRING, hstr, "colorimetry", G_TYPE_STRING,
+ colorimetrystr, NULL);
+ }
+ g_free (wstr);
+ g_free (hstr);
+
+ return res;
+
+ /* ERRORS */
+invalid_caps:
+ {
+ GST_ERROR_OBJECT (payload, "could not parse caps");
+ return FALSE;
+ }
+unknown_format:
+ {
+ GST_ERROR_OBJECT (payload, "unknown caps format");
+ return FALSE;
+ }
+}
+
+static GstFlowReturn
+gst_rtp_vraw_pay_handle_buffer (GstRTPBasePayload * payload, GstBuffer * buffer)
+{
+ GstRtpVRawPay *rtpvrawpay;
+ GstFlowReturn ret = GST_FLOW_OK;
+ gfloat packets_per_packline;
+ guint pgroups_per_packet;
+ guint packlines_per_list, buffers_per_list;
+ guint lines_delay; /* after how many packed lines we push out a buffer list */
+ guint last_line; /* last pack line number we pushed out a buffer list */
+ guint line, offset;
+ guint8 *p0, *yp, *up, *vp;
+ guint ystride, uvstride;
+ guint xinc, yinc;
+ guint pgroup;
+ guint mtu;
+ guint width, height;
+ gint field, fields;
+ GstVideoFormat format;
+ GstVideoFrame frame;
+ gint interlaced;
+ gboolean use_buffer_lists;
+ GstBufferList *list = NULL;
+ GstRTPBuffer rtp = { NULL, };
+ gboolean discont;
+
+ rtpvrawpay = GST_RTP_VRAW_PAY (payload);
+
+ if (!gst_video_frame_map (&frame, &rtpvrawpay->vinfo, buffer, GST_MAP_READ)) {
+ gst_buffer_unref (buffer);
+ return GST_FLOW_ERROR;
+ }
+
+ discont = GST_BUFFER_IS_DISCONT (buffer);
+
+ GST_LOG_OBJECT (rtpvrawpay, "new frame of %" G_GSIZE_FORMAT " bytes",
+ gst_buffer_get_size (buffer));
+
+ /* get pointer and strides of the planes */
+ p0 = GST_VIDEO_FRAME_PLANE_DATA (&frame, 0);
+ yp = GST_VIDEO_FRAME_COMP_DATA (&frame, 0);
+ up = GST_VIDEO_FRAME_COMP_DATA (&frame, 1);
+ vp = GST_VIDEO_FRAME_COMP_DATA (&frame, 2);
+
+ ystride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
+ uvstride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);
+
+ mtu = GST_RTP_BASE_PAYLOAD_MTU (payload);
+
+ /* amount of bytes for one pixel */
+ pgroup = rtpvrawpay->pgroup;
+ width = GST_VIDEO_INFO_WIDTH (&rtpvrawpay->vinfo);
+ height = GST_VIDEO_INFO_HEIGHT (&rtpvrawpay->vinfo);
+
+ interlaced = GST_VIDEO_INFO_IS_INTERLACED (&rtpvrawpay->vinfo);
+
+ format = GST_VIDEO_INFO_FORMAT (&rtpvrawpay->vinfo);
+
+ yinc = rtpvrawpay->yinc;
+ xinc = rtpvrawpay->xinc;
+
+ /* after how many packed lines we push out a buffer list */
+ lines_delay = GST_ROUND_UP_4 (height / rtpvrawpay->chunks_per_frame);
+
+ /* calculate how many buffers we expect to store in a single buffer list */
+ pgroups_per_packet = (mtu - (12 + 14)) / pgroup;
+ packets_per_packline = width / (xinc * pgroups_per_packet * 1.0);
+ packlines_per_list = height / (yinc * rtpvrawpay->chunks_per_frame);
+ buffers_per_list = packlines_per_list * packets_per_packline;
+ buffers_per_list = GST_ROUND_UP_8 (buffers_per_list);
+
+ use_buffer_lists = buffers_per_list > 1 &&
+ (rtpvrawpay->chunks_per_frame < (height / yinc));
+
+ fields = 1 + interlaced;
+
+ /* start with line 0, offset 0 */
+ for (field = 0; field < fields; field++) {
+ line = field;
+ offset = 0;
+ last_line = 0;
+
+ if (use_buffer_lists)
+ list = gst_buffer_list_new_sized (buffers_per_list);
+
+ /* write all lines */
+ while (line < height) {
+ guint left, pack_line;
+ GstBuffer *out;
+ guint8 *outdata, *headers;
+ gboolean next_line, complete = FALSE;
+ guint length, cont, pixels;
+
+ /* get the max allowed payload length size, we try to fill the complete MTU */
+ left = gst_rtp_buffer_calc_payload_len (mtu, 0, 0);
+ out = gst_rtp_base_payload_allocate_output_buffer (payload, left, 0, 0);
+
+ if (discont) {
+ GST_BUFFER_FLAG_SET (out, GST_BUFFER_FLAG_DISCONT);
+ /* Only the first outputted buffer has the DISCONT flag */
+ discont = FALSE;
+ }
+
+ if (field == 0) {
+ GST_BUFFER_PTS (out) = GST_BUFFER_PTS (buffer);
+ } else {
+ GST_BUFFER_PTS (out) = GST_BUFFER_PTS (buffer) +
+ GST_BUFFER_DURATION (buffer) / 2;
+ }
+
+ gst_rtp_buffer_map (out, GST_MAP_WRITE, &rtp);
+ outdata = gst_rtp_buffer_get_payload (&rtp);
+
+ GST_LOG_OBJECT (rtpvrawpay, "created buffer of size %u for MTU %u", left,
+ mtu);
+
+ /*
+ * 0 1 2 3
+ * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Extended Sequence Number | Length |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |F| Line No |C| Offset |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * | Length |F| Line No |
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ * |C| Offset | .
+ * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ .
+ * . .
+ * . Two (partial) lines of video data .
+ * . .
+ * +---------------------------------------------------------------+
+ */
+
+ /* need 2 bytes for the extended sequence number */
+ *outdata++ = 0;
+ *outdata++ = 0;
+ left -= 2;
+
+ /* the headers start here */
+ headers = outdata;
+
+ /* make sure we can fit at least *one* header and pixel */
+ if (!(left > (6 + pgroup))) {
+ gst_rtp_buffer_unmap (&rtp);
+ gst_buffer_unref (out);
+ goto too_small;
+ }
+
+ /* while we can fit at least one header and one pixel */
+ while (left > (6 + pgroup)) {
+ /* we need a 6 bytes header */
+ left -= 6;
+
+ /* get how may bytes we need for the remaining pixels */
+ pixels = width - offset;
+ length = (pixels * pgroup) / xinc;
+
+ if (left >= length) {
+ /* pixels and header fit completely, we will write them and skip to the
+ * next line. */
+ next_line = TRUE;
+ } else {
+ /* line does not fit completely, see how many pixels fit */
+ pixels = (left / pgroup) * xinc;
+ length = (pixels * pgroup) / xinc;
+ next_line = FALSE;
+ }
+ GST_LOG_OBJECT (rtpvrawpay, "filling %u bytes in %u pixels", length,
+ pixels);
+ left -= length;
+
+ /* write length */
+ *outdata++ = (length >> 8) & 0xff;
+ *outdata++ = length & 0xff;
+
+ /* write line no */
+ *outdata++ = ((line >> 8) & 0x7f) | ((field << 7) & 0x80);
+ *outdata++ = line & 0xff;
+
+ if (next_line) {
+ /* go to next line we do this here to make the check below easier */
+ line += yinc;
+ }
+
+ /* calculate continuation marker */
+ cont = (left > (6 + pgroup) && line < height) ? 0x80 : 0x00;
+
+ /* write offset and continuation marker */
+ *outdata++ = ((offset >> 8) & 0x7f) | cont;
+ *outdata++ = offset & 0xff;
+
+ if (next_line) {
+ /* reset offset */
+ offset = 0;
+ GST_LOG_OBJECT (rtpvrawpay, "go to next line %u", line);
+ } else {
+ offset += pixels;
+ GST_LOG_OBJECT (rtpvrawpay, "next offset %u", offset);
+ }
+
+ if (!cont)
+ break;
+ }
+ GST_LOG_OBJECT (rtpvrawpay, "consumed %u bytes",
+ (guint) (outdata - headers));
+
+ /* second pass, read headers and write the data */
+ while (TRUE) {
+ guint offs, lin;
+
+ /* read length and cont */
+ length = (headers[0] << 8) | headers[1];
+ lin = ((headers[2] & 0x7f) << 8) | headers[3];
+ offs = ((headers[4] & 0x7f) << 8) | headers[5];
+ cont = headers[4] & 0x80;
+ pixels = length / pgroup;
+ headers += 6;
+
+ GST_LOG_OBJECT (payload,
+ "writing length %u, line %u, offset %u, cont %d", length, lin, offs,
+ cont);
+
+ switch (format) {
+ case GST_VIDEO_FORMAT_RGB:
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_BGR:
+ case GST_VIDEO_FORMAT_BGRA:
+ case GST_VIDEO_FORMAT_UYVY:
+ case GST_VIDEO_FORMAT_UYVP:
+ offs /= xinc;
+ memcpy (outdata, p0 + (lin * ystride) + (offs * pgroup), length);
+ outdata += length;
+ break;
+ case GST_VIDEO_FORMAT_AYUV:
+ {
+ gint i;
+ guint8 *datap;
+
+ datap = p0 + (lin * ystride) + (offs * 4);
+
+ for (i = 0; i < pixels; i++) {
+ *outdata++ = datap[2];
+ *outdata++ = datap[1];
+ *outdata++ = datap[3];
+ datap += 4;
+ }
+ break;
+ }
+ case GST_VIDEO_FORMAT_I420:
+ {
+ gint i;
+ guint uvoff;
+ guint8 *yd1p, *yd2p, *udp, *vdp;
+
+ yd1p = yp + (lin * ystride) + (offs);
+ yd2p = yd1p + ystride;
+ uvoff = (lin / yinc * uvstride) + (offs / xinc);
+ udp = up + uvoff;
+ vdp = vp + uvoff;
+
+ for (i = 0; i < pixels; i++) {
+ *outdata++ = *yd1p++;
+ *outdata++ = *yd1p++;
+ *outdata++ = *yd2p++;
+ *outdata++ = *yd2p++;
+ *outdata++ = *udp++;
+ *outdata++ = *vdp++;
+ }
+ break;
+ }
+ case GST_VIDEO_FORMAT_Y41B:
+ {
+ gint i;
+ guint uvoff;
+ guint8 *ydp, *udp, *vdp;
+
+ ydp = yp + (lin * ystride) + offs;
+ uvoff = (lin / yinc * uvstride) + (offs / xinc);
+ udp = up + uvoff;
+ vdp = vp + uvoff;
+
+ for (i = 0; i < pixels; i++) {
+ *outdata++ = *udp++;
+ *outdata++ = *ydp++;
+ *outdata++ = *ydp++;
+ *outdata++ = *vdp++;
+ *outdata++ = *ydp++;
+ *outdata++ = *ydp++;
+ }
+ break;
+ }
+ default:
+ gst_rtp_buffer_unmap (&rtp);
+ gst_buffer_unref (out);
+ goto unknown_sampling;
+ }
+
+ if (!cont)
+ break;
+ }
+
+ if (line >= height) {
+ GST_LOG_OBJECT (rtpvrawpay, "field/frame complete, set marker");
+ gst_rtp_buffer_set_marker (&rtp, TRUE);
+ complete = TRUE;
+ }
+ gst_rtp_buffer_unmap (&rtp);
+ if (left > 0) {
+ GST_LOG_OBJECT (rtpvrawpay, "we have %u bytes left", left);
+ gst_buffer_resize (out, 0, gst_buffer_get_size (out) - left);
+ }
+
+ gst_rtp_copy_video_meta (rtpvrawpay, out, buffer);
+
+ /* Now either push out the buffer directly */
+ if (!use_buffer_lists) {
+ ret = gst_rtp_base_payload_push (payload, out);
+ continue;
+ }
+
+ /* or add the buffer to buffer list ... */
+ gst_buffer_list_add (list, out);
+
+ /* .. and check if we need to push out the list */
+ pack_line = (line - field) / fields;
+ if (complete || (pack_line > last_line && pack_line % lines_delay == 0)) {
+ GST_LOG_OBJECT (rtpvrawpay, "pushing list of %u buffers up to pack "
+ "line %u", gst_buffer_list_length (list), pack_line);
+ ret = gst_rtp_base_payload_push_list (payload, list);
+ list = NULL;
+ if (!complete)
+ list = gst_buffer_list_new_sized (buffers_per_list);
+ last_line = pack_line;
+ }
+ }
+
+ }
+
+ gst_video_frame_unmap (&frame);
+ gst_buffer_unref (buffer);
+
+ return ret;
+
+ /* ERRORS */
+unknown_sampling:
+ {
+ GST_ELEMENT_ERROR (payload, STREAM, FORMAT,
+ (NULL), ("unimplemented sampling"));
+ gst_video_frame_unmap (&frame);
+ gst_buffer_unref (buffer);
+ return GST_FLOW_NOT_SUPPORTED;
+ }
+too_small:
+ {
+ GST_ELEMENT_ERROR (payload, RESOURCE, NO_SPACE_LEFT,
+ (NULL), ("not enough space to send at least one pixel"));
+ gst_video_frame_unmap (&frame);
+ gst_buffer_unref (buffer);
+ return GST_FLOW_NOT_SUPPORTED;
+ }
+}
+
+static void
+gst_rtp_vraw_pay_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpVRawPay *rtpvrawpay;
+
+ rtpvrawpay = GST_RTP_VRAW_PAY (object);
+
+ switch (prop_id) {
+ case PROP_CHUNKS_PER_FRAME:
+ rtpvrawpay->chunks_per_frame = g_value_get_int (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_vraw_pay_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpVRawPay *rtpvrawpay;
+
+ rtpvrawpay = GST_RTP_VRAW_PAY (object);
+
+ switch (prop_id) {
+ case PROP_CHUNKS_PER_FRAME:
+ g_value_set_int (value, rtpvrawpay->chunks_per_frame);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/rtp/gstrtpvrawpay.h b/gst/rtp/gstrtpvrawpay.h
new file mode 100644
index 0000000000..008cbee93c
--- /dev/null
+++ b/gst/rtp/gstrtpvrawpay.h
@@ -0,0 +1,65 @@
+/* GStreamer
+ * Copyright (C) <2008> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_VRAW_PAY_H__
+#define __GST_RTP_VRAW_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include <gst/rtp/gstrtpbasepayload.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_VRAW_PAY \
+ (gst_rtp_vraw_pay_get_type())
+#define GST_RTP_VRAW_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_VRAW_PAY,GstRtpVRawPay))
+#define GST_RTP_VRAW_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_VRAW_PAY,GstRtpVRawPayClass))
+#define GST_IS_RTP_VRAW_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_VRAW_PAY))
+#define GST_IS_RTP_VRAW_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_VRAW_PAY))
+
+typedef struct _GstRtpVRawPay GstRtpVRawPay;
+typedef struct _GstRtpVRawPayClass GstRtpVRawPayClass;
+
+struct _GstRtpVRawPay
+{
+ GstRTPBasePayload payload;
+
+ GstVideoInfo vinfo;
+
+ gint pgroup;
+ gint xinc, yinc;
+
+ /* properties */
+ guint chunks_per_frame;
+};
+
+struct _GstRtpVRawPayClass
+{
+ GstRTPBasePayloadClass parent_class;
+};
+
+GType gst_rtp_vraw_pay_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_VRAW_PAY_H__ */
diff --git a/gst/rtp/meson.build b/gst/rtp/meson.build
new file mode 100644
index 0000000000..2710dcc1a0
--- /dev/null
+++ b/gst/rtp/meson.build
@@ -0,0 +1,128 @@
+rtp_sources = [
+ 'dboolhuff.c',
+ 'fnv1hash.c',
+ 'gstbuffermemory.c',
+ 'gstrtpelement.c',
+ 'gstrtp.c',
+ 'gstrtpchannels.c',
+ 'gstrtpac3depay.c',
+ 'gstrtpac3pay.c',
+ 'gstrtpbvdepay.c',
+ 'gstrtpbvpay.c',
+ 'gstrtpceltdepay.c',
+ 'gstrtpceltpay.c',
+ 'gstrtpdvdepay.c',
+ 'gstrtpdvpay.c',
+ 'gstrtpgstdepay.c',
+ 'gstrtpgstpay.c',
+ 'gstrtpilbcdepay.c',
+ 'gstrtpilbcpay.c',
+ 'gstrtpklvdepay.c',
+ 'gstrtpklvpay.c',
+ 'gstrtpmpadepay.c',
+ 'gstrtpmpapay.c',
+ 'gstrtpmparobustdepay.c',
+ 'gstrtpmpvdepay.c',
+ 'gstrtpmpvpay.c',
+ 'gstrtpopuspay.c',
+ 'gstrtpopusdepay.c',
+ 'gstrtppcmadepay.c',
+ 'gstrtppcmudepay.c',
+ 'gstrtppcmupay.c',
+ 'gstrtppcmapay.c',
+ 'gstrtpg722depay.c',
+ 'gstrtpg722pay.c',
+ 'gstrtpg723depay.c',
+ 'gstrtpg723pay.c',
+ 'gstrtpg726pay.c',
+ 'gstrtpg726depay.c',
+ 'gstrtpg729pay.c',
+ 'gstrtpg729depay.c',
+ 'gstrtpgsmdepay.c',
+ 'gstrtpgsmpay.c',
+ 'gstrtpamrdepay.c',
+ 'gstrtpamrpay.c',
+ 'gstrtphdrext-colorspace.c',
+ 'gstrtph261depay.c',
+ 'gstrtph261pay.c',
+ 'gstrtph263pdepay.c',
+ 'gstrtph263ppay.c',
+ 'gstrtph263depay.c',
+ 'gstrtph263pay.c',
+ 'gstrtph264depay.c',
+ 'gstrtph264pay.c',
+ 'gstrtph265depay.c',
+ 'gstrtph265pay.c',
+ 'gstrtpj2kdepay.c',
+ 'gstrtpj2kpay.c',
+ 'gstrtpjpegdepay.c',
+ 'gstrtpjpegpay.c',
+ 'gstrtpL8depay.c',
+ 'gstrtpL8pay.c',
+ 'gstrtpL16depay.c',
+ 'gstrtpL16pay.c',
+ 'gstrtpL24depay.c',
+ 'gstrtpL24pay.c',
+ 'gstrtpldacpay.c',
+ 'gstasteriskh263.c',
+ 'gstrtpmp1sdepay.c',
+ 'gstrtpmp2tdepay.c',
+ 'gstrtpmp2tpay.c',
+ 'gstrtpmp4vdepay.c',
+ 'gstrtpmp4vpay.c',
+ 'gstrtpmp4gdepay.c',
+ 'gstrtpmp4gpay.c',
+ 'gstrtpmp4adepay.c',
+ 'gstrtpmp4apay.c',
+ 'gstrtpqcelpdepay.c',
+ 'gstrtpqdmdepay.c',
+ 'gstrtpsbcdepay.c',
+ 'gstrtpsbcpay.c',
+ 'gstrtpsirenpay.c',
+ 'gstrtpsirendepay.c',
+ 'gstrtpspeexdepay.c',
+ 'gstrtpspeexpay.c',
+ 'gstrtpsv3vdepay.c',
+ 'gstrtptheoradepay.c',
+ 'gstrtptheorapay.c',
+ 'gstrtpvorbisdepay.c',
+ 'gstrtpvorbispay.c',
+ 'gstrtpvp8depay.c',
+ 'gstrtpvp8pay.c',
+ 'gstrtpvp9depay.c',
+ 'gstrtpvp9pay.c',
+ 'gstrtpvrawdepay.c',
+ 'gstrtpvrawpay.c',
+ 'gstrtpstreampay.c',
+ 'gstrtpstreamdepay.c',
+ 'gstrtputils.c',
+ 'rtpulpfeccommon.c',
+ 'gstrtpulpfecdec.c',
+ 'gstrtpulpfecenc.c',
+ 'rtpredcommon.c',
+ 'gstrtpredenc.c',
+ 'gstrtpreddec.c',
+ 'rtpstorage.c',
+ 'rtpstoragestream.c',
+ 'gstrtpstorage.c',
+ 'gstrtpisacdepay.c',
+ 'gstrtpisacpay.c',
+]
+
+rtp_args = [
+ '-Dvp8_norm=gst_rtpvp8_vp8_norm',
+ '-Dvp8dx_start_decode=gst_rtpvp8_vp8dx_start_decode',
+ '-Dvp8dx_bool_decoder_fill=gst_rtpvp8_vp8dx_bool_decoder_fill',
+]
+
+gstrtp = library('gstrtp',
+ rtp_sources,
+ c_args : gst_plugins_good_args + rtp_args,
+ include_directories : [configinc],
+ dependencies : [gstbase_dep, gstaudio_dep, gstvideo_dep, gsttag_dep,
+ gstrtp_dep, gstpbutils_dep, libm],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstrtp, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstrtp]
diff --git a/gst/rtp/rtpredcommon.c b/gst/rtp/rtpredcommon.c
new file mode 100644
index 0000000000..64362c53e1
--- /dev/null
+++ b/gst/rtp/rtpredcommon.c
@@ -0,0 +1,90 @@
+/* GStreamer plugin for forward error correction
+ * Copyright (C) 2017 Pexip
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * Author: Mikhail Fludkov <misha@pexip.com>
+ */
+
+#include "rtpredcommon.h"
+
+gsize
+rtp_red_block_header_get_length (gboolean is_redundant)
+{
+ return is_redundant ? sizeof (RedBlockHeader) : 1;
+}
+
+gboolean
+rtp_red_block_is_redundant (gpointer red_block)
+{
+ return ((RedBlockHeader *) red_block)->F;
+}
+
+guint8
+rtp_red_block_get_payload_type (gpointer red_block)
+{
+ return ((RedBlockHeader *) red_block)->pt;
+}
+
+guint16
+rtp_red_block_get_payload_length (gpointer red_block)
+{
+ RedBlockHeader *hdr = (RedBlockHeader *) red_block;
+ return (hdr->length_hi << 8) | hdr->length_lo;
+}
+
+guint16
+rtp_red_block_get_timestamp_offset (gpointer red_block)
+{
+ RedBlockHeader *hdr = (RedBlockHeader *) red_block;
+ return (hdr->timestamp_offset_hi << 6) | hdr->timestamp_offset_lo;
+}
+
+void
+rtp_red_block_set_payload_type (gpointer red_block, guint8 pt)
+{
+ ((RedBlockHeader *) red_block)->pt = pt;
+}
+
+void
+rtp_red_block_set_is_redundant (gpointer red_block, gboolean is_redundant)
+{
+ ((RedBlockHeader *) red_block)->F = is_redundant;
+}
+
+void
+rtp_red_block_set_timestamp_offset (gpointer red_block,
+ guint16 timestamp_offset)
+{
+ RedBlockHeader *hdr = (RedBlockHeader *) red_block;
+
+ g_assert (rtp_red_block_is_redundant (red_block));
+ g_assert_cmpint (timestamp_offset, <=, RED_BLOCK_TIMESTAMP_OFFSET_MAX);
+
+ hdr->timestamp_offset_lo = timestamp_offset & 0x3f;
+ hdr->timestamp_offset_hi = timestamp_offset >> 6;
+}
+
+void
+rtp_red_block_set_payload_length (gpointer red_block, guint16 length)
+{
+ RedBlockHeader *hdr = (RedBlockHeader *) red_block;
+
+ g_assert (rtp_red_block_is_redundant (red_block));
+ g_assert_cmpint (length, <=, RED_BLOCK_LENGTH_MAX);
+
+ hdr->length_lo = length & 0xff;
+ hdr->length_hi = length >> 8;
+}
diff --git a/gst/rtp/rtpredcommon.h b/gst/rtp/rtpredcommon.h
new file mode 100644
index 0000000000..80fc413816
--- /dev/null
+++ b/gst/rtp/rtpredcommon.h
@@ -0,0 +1,80 @@
+/* GStreamer plugin for forward error correction
+ * Copyright (C) 2017 Pexip
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * Author: Mikhail Fludkov <misha@pexip.com>
+ */
+
+#ifndef __RTP_RED_COMMON_H__
+#define __RTP_RED_COMMON_H__
+
+#include <glib.h>
+
+G_BEGIN_DECLS
+
+typedef struct _RedBlockHeader RedBlockHeader;
+
+/* RFC 2198 */
+/*
+ 0 1 2 3
+ 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ |F| block PT | timestamp offset | block length |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+*/
+
+struct _RedBlockHeader {
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ guint pt:7;
+ guint F:1;
+
+ guint timestamp_offset_hi: 8;
+
+ guint length_hi: 2;
+ guint timestamp_offset_lo: 6;
+
+ guint length_lo: 8;
+#elif G_BYTE_ORDER == G_BIG_ENDIAN
+ guint F:1;
+ guint pt:7;
+
+ guint timestamp_offset_hi: 8;
+
+ guint timestamp_offset_lo: 6;
+ guint length_hi: 2;
+
+ guint length_lo: 8;
+#else
+#error "G_BYTE_ORDER should be big or little endian."
+#endif
+};
+
+#define RED_BLOCK_TIMESTAMP_OFFSET_MAX ((1<<14) - 1)
+#define RED_BLOCK_LENGTH_MAX ((1<<10) - 1)
+
+gsize rtp_red_block_header_get_length (gboolean is_redundant);
+gboolean rtp_red_block_is_redundant (gpointer red_block);
+void rtp_red_block_set_payload_type (gpointer red_block, guint8 pt);
+void rtp_red_block_set_timestamp_offset (gpointer red_block, guint16 timestamp_offset);
+void rtp_red_block_set_payload_length (gpointer red_block, guint16 length);
+guint16 rtp_red_block_get_timestamp_offset (gpointer red_block);
+guint8 rtp_red_block_get_payload_type (gpointer red_block);
+void rtp_red_block_set_is_redundant (gpointer red_block, gboolean is_redundant);
+guint16 rtp_red_block_get_payload_length (gpointer red_block);
+
+G_END_DECLS
+
+#endif
diff --git a/gst/rtp/rtpstorage.c b/gst/rtp/rtpstorage.c
new file mode 100644
index 0000000000..d83dabbfea
--- /dev/null
+++ b/gst/rtp/rtpstorage.c
@@ -0,0 +1,259 @@
+/* GStreamer plugin for forward error correction
+ * Copyright (C) 2017 Pexip
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * Author: Mikhail Fludkov <misha@pexip.com>
+ */
+
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "rtpstorage.h"
+#include "rtpstoragestream.h"
+
+#define GST_CAT_DEFAULT (gst_rtp_storage_debug)
+
+enum
+{
+ SIGNAL_PACKET_RECOVERED,
+ LAST_SIGNAL,
+};
+
+static guint rtp_storage_signals[LAST_SIGNAL] = { 0 };
+
+G_DEFINE_TYPE (RtpStorage, rtp_storage, G_TYPE_OBJECT);
+
+#define STORAGE_LOCK(s) g_mutex_lock (&(s)->streams_lock)
+#define STORAGE_UNLOCK(s) g_mutex_unlock (&(s)->streams_lock)
+#define DEFAULT_SIZE_TIME (0)
+
+static void
+rtp_storage_init (RtpStorage * self)
+{
+ self->size_time = DEFAULT_SIZE_TIME;
+ self->streams = g_hash_table_new_full (NULL, NULL, NULL,
+ (GDestroyNotify) rtp_storage_stream_free);
+ g_mutex_init (&self->streams_lock);
+}
+
+static void
+rtp_storage_dispose (GObject * obj)
+{
+ RtpStorage *self = RTP_STORAGE (obj);
+ STORAGE_LOCK (self);
+ g_hash_table_unref (self->streams);
+ self->streams = NULL;
+ STORAGE_UNLOCK (self);
+ g_mutex_clear (&self->streams_lock);
+ G_OBJECT_CLASS (rtp_storage_parent_class)->dispose (obj);
+}
+
+static void
+rtp_storage_class_init (RtpStorageClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+
+ rtp_storage_signals[SIGNAL_PACKET_RECOVERED] =
+ g_signal_new ("packet-recovered", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 1, GST_TYPE_BUFFER);
+
+ gobject_class->dispose = rtp_storage_dispose;
+}
+
+GstBufferList *
+rtp_storage_get_packets_for_recovery (RtpStorage * self, gint fec_pt,
+ guint32 ssrc, guint16 lost_seq)
+{
+ GstBufferList *ret = NULL;
+ RtpStorageStream *stream;
+
+ if (0 == self->size_time) {
+ GST_WARNING_OBJECT (self, "Received request for recovery RTP packets"
+ " around lost_seqnum=%u fec_pt=%u for ssrc=%08x, but size is 0",
+ lost_seq, fec_pt, ssrc);
+ return NULL;
+ }
+
+ STORAGE_LOCK (self);
+ stream = g_hash_table_lookup (self->streams, GUINT_TO_POINTER (ssrc));
+ STORAGE_UNLOCK (self);
+
+ if (NULL == stream) {
+ GST_ERROR_OBJECT (self, "Can't find ssrc = 0x08%x", ssrc);
+ } else {
+ STREAM_LOCK (stream);
+ if (stream->queue.length > 0) {
+ GST_LOG_OBJECT (self, "Looking for recovery packets for fec_pt=%u around"
+ " lost_seq=%u for ssrc=%08x", fec_pt, lost_seq, ssrc);
+ ret =
+ rtp_storage_stream_get_packets_for_recovery (stream, fec_pt,
+ lost_seq);
+ } else {
+ GST_DEBUG_OBJECT (self, "Empty RTP storage for ssrc=%08x", ssrc);
+ }
+ STREAM_UNLOCK (stream);
+ }
+
+ return ret;
+}
+
+GstBuffer *
+rtp_storage_get_redundant_packet (RtpStorage * self, guint32 ssrc,
+ guint16 lost_seq)
+{
+ GstBuffer *ret = NULL;
+ RtpStorageStream *stream;
+
+ if (0 == self->size_time) {
+ GST_WARNING_OBJECT (self, "Received request for redundant RTP packet with"
+ " seq=%u for ssrc=%08x, but size is 0", lost_seq, ssrc);
+ return NULL;
+ }
+
+ STORAGE_LOCK (self);
+ stream = g_hash_table_lookup (self->streams, GUINT_TO_POINTER (ssrc));
+ STORAGE_UNLOCK (self);
+
+ if (NULL == stream) {
+ GST_ERROR_OBJECT (self, "Can't find ssrc = 0x%x", ssrc);
+ } else {
+ STREAM_LOCK (stream);
+ if (stream->queue.length > 0) {
+ ret = rtp_storage_stream_get_redundant_packet (stream, lost_seq);
+ } else {
+ GST_DEBUG_OBJECT (self, "Empty RTP storage for ssrc=%08x", ssrc);
+ }
+ STREAM_UNLOCK (stream);
+ }
+
+ return ret;
+}
+
+static void
+rtp_storage_do_put_recovered_packet (RtpStorage * self,
+ GstBuffer * buffer, guint8 pt, guint32 ssrc, guint16 seq)
+{
+ RtpStorageStream *stream;
+
+ STORAGE_LOCK (self);
+ stream = g_hash_table_lookup (self->streams, GUINT_TO_POINTER (ssrc));
+ STORAGE_UNLOCK (self);
+
+ g_assert (stream);
+
+ GST_LOG_OBJECT (self,
+ "Storing recovered RTP packet with ssrc=%08x pt=%u seq=%u %"
+ GST_PTR_FORMAT, ssrc, pt, seq, buffer);
+
+ STREAM_LOCK (stream);
+ rtp_storage_stream_add_item (stream, buffer, pt, seq);
+ STREAM_UNLOCK (stream);
+}
+
+void
+rtp_storage_put_recovered_packet (RtpStorage * self,
+ GstBuffer * buffer, guint8 pt, guint32 ssrc, guint16 seq)
+{
+ rtp_storage_do_put_recovered_packet (self, buffer, pt, ssrc, seq);
+ g_signal_emit (self, rtp_storage_signals[SIGNAL_PACKET_RECOVERED], 0, buffer);
+}
+
+gboolean
+rtp_storage_append_buffer (RtpStorage * self, GstBuffer * buf)
+{
+ GstRTPBuffer rtpbuf = GST_RTP_BUFFER_INIT;
+ RtpStorageStream *stream;
+ guint32 ssrc;
+ guint8 pt;
+ guint16 seq;
+
+ if (0 == self->size_time)
+ return TRUE;
+
+ /* We are about to save it in the queue, it so it is better take a ref before
+ * mapping the buffer */
+ gst_buffer_ref (buf);
+
+ if (!gst_rtp_buffer_map (buf, GST_MAP_READ |
+ GST_RTP_BUFFER_MAP_FLAG_SKIP_PADDING, &rtpbuf)) {
+ gst_buffer_unref (buf);
+ return TRUE;
+ }
+
+ ssrc = gst_rtp_buffer_get_ssrc (&rtpbuf);
+ pt = gst_rtp_buffer_get_payload_type (&rtpbuf);
+ seq = gst_rtp_buffer_get_seq (&rtpbuf);
+
+ STORAGE_LOCK (self);
+
+ stream = g_hash_table_lookup (self->streams, GUINT_TO_POINTER (ssrc));
+ if (NULL == stream) {
+ GST_DEBUG_OBJECT (self,
+ "New media stream (ssrc=0x%08x, pt=%u) detected", ssrc, pt);
+ stream = rtp_storage_stream_new (ssrc);
+ g_hash_table_insert (self->streams, GUINT_TO_POINTER (ssrc), stream);
+ }
+
+ STORAGE_UNLOCK (self);
+
+ GST_LOG_OBJECT (self,
+ "Storing RTP packet with ssrc=%08x pt=%u seq=%u %" GST_PTR_FORMAT,
+ ssrc, pt, seq, buf);
+
+ STREAM_LOCK (stream);
+
+ /* Saving the buffer, now the storage owns it */
+ rtp_storage_stream_resize_and_add_item (stream, self->size_time, buf, pt,
+ seq);
+
+ STREAM_UNLOCK (stream);
+
+ gst_rtp_buffer_unmap (&rtpbuf);
+
+ if (GST_BUFFER_FLAG_IS_SET (buf, GST_RTP_BUFFER_FLAG_REDUNDANT)) {
+ gst_buffer_unref (buf);
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+void
+rtp_storage_clear (RtpStorage * self)
+{
+ STORAGE_LOCK (self);
+ g_hash_table_remove_all (self->streams);
+ STORAGE_UNLOCK (self);
+}
+
+void
+rtp_storage_set_size (RtpStorage * self, GstClockTime size)
+{
+ self->size_time = size;
+ if (0 == self->size_time)
+ rtp_storage_clear (self);
+}
+
+GstClockTime
+rtp_storage_get_size (RtpStorage * self)
+{
+ return self->size_time;
+}
+
+RtpStorage *
+rtp_storage_new (void)
+{
+ return g_object_new (RTP_TYPE_STORAGE, NULL);
+}
diff --git a/gst/rtp/rtpstorage.h b/gst/rtp/rtpstorage.h
new file mode 100644
index 0000000000..bd876326ad
--- /dev/null
+++ b/gst/rtp/rtpstorage.h
@@ -0,0 +1,69 @@
+/* GStreamer plugin for forward error correction
+ * Copyright (C) 2017 Pexip
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * Author: Mikhail Fludkov <misha@pexip.com>
+ */
+
+#ifndef __RTP_STORAGE_H__
+#define __RTP_STORAGE_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+#define RTP_TYPE_STORAGE \
+ (rtp_storage_get_type())
+#define RTP_STORAGE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),RTP_TYPE_STORAGE,RtpStorage))
+#define RTP_STORAGE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),RTP_TYPE_STORAGE,RtpStorageClass))
+#define GST_IS_RTP_STORAGE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),RTP_TYPE_STORAGE))
+#define GST_IS_RTP_STORAGE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),RTP_TYPE_RED_DEC))
+
+typedef struct _RtpStorage RtpStorage;
+typedef struct _RtpStorageClass RtpStorageClass;
+
+struct _RtpStorageClass {
+ GObjectClass parent_class;
+};
+
+struct _RtpStorage {
+ GObject parent;
+ GstClockTime size_time;
+ GHashTable *streams;
+ GMutex streams_lock;
+};
+
+GstBufferList * rtp_storage_get_packets_for_recovery (RtpStorage * self, gint fec_pt,
+ guint32 ssrc, guint16 lost_seq);
+void rtp_storage_put_recovered_packet (RtpStorage * self, GstBuffer * buffer,
+ guint8 pt, guint32 ssrc, guint16 seq);
+GstBuffer * rtp_storage_get_redundant_packet (RtpStorage * self, guint32 ssrc,
+ guint16 lost_seq);
+gboolean rtp_storage_append_buffer (RtpStorage *self, GstBuffer *buffer);
+void rtp_storage_clear (RtpStorage *self);
+RtpStorage * rtp_storage_new (void);
+void rtp_storage_set_size (RtpStorage *self, GstClockTime size);
+GstClockTime rtp_storage_get_size (RtpStorage *self);
+
+GType rtp_storage_get_type (void);
+
+G_END_DECLS
+
+#endif /* __RTP_STORAGE_H__ */
diff --git a/gst/rtp/rtpstoragestream.c b/gst/rtp/rtpstoragestream.c
new file mode 100644
index 0000000000..a0708859e4
--- /dev/null
+++ b/gst/rtp/rtpstoragestream.c
@@ -0,0 +1,277 @@
+/* GStreamer plugin for forward error correction
+ * Copyright (C) 2017 Pexip
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * Author: Mikhail Fludkov <misha@pexip.com>
+ */
+
+#include "rtpstoragestream.h"
+
+#define GST_CAT_DEFAULT (gst_rtp_storage_debug)
+
+static RtpStorageItem *
+rtp_storage_item_new (GstBuffer * buffer, guint8 pt, guint16 seq)
+{
+ RtpStorageItem *ret = g_slice_new0 (RtpStorageItem);
+ ret->buffer = buffer;
+ ret->pt = pt;
+ ret->seq = seq;
+ return ret;
+}
+
+static void
+rtp_storage_item_free (RtpStorageItem * item)
+{
+ g_assert (item->buffer != NULL);
+ gst_buffer_unref (item->buffer);
+ g_slice_free (RtpStorageItem, item);
+}
+
+static gint
+rtp_storage_item_compare (gconstpointer a, gconstpointer b, gpointer userdata)
+{
+ gint seq_diff = gst_rtp_buffer_compare_seqnum (
+ ((RtpStorageItem const *) a)->seq, ((RtpStorageItem const *) b)->seq);
+
+ if (seq_diff >= 0)
+ return 0;
+
+ return 1;
+}
+
+static void
+rtp_storage_stream_resize (RtpStorageStream * stream, GstClockTime size_time)
+{
+ GList *it;
+ guint i, too_old_buffers_num = 0;
+
+ g_assert (GST_CLOCK_TIME_IS_VALID (stream->max_arrival_time));
+ g_assert (GST_CLOCK_TIME_IS_VALID (size_time));
+ g_assert_cmpint (size_time, >, 0);
+
+ /* Iterating from oldest sequence numbers to newest */
+ for (i = 0, it = stream->queue.tail; it; it = it->prev, ++i) {
+ RtpStorageItem *item = it->data;
+ GstClockTime arrival_time = GST_BUFFER_DTS_OR_PTS (item->buffer);
+ if (GST_CLOCK_TIME_IS_VALID (arrival_time)) {
+ if (stream->max_arrival_time - arrival_time > size_time) {
+ too_old_buffers_num = i + 1;
+ } else
+ break;
+ }
+ }
+
+ for (i = 0; i < too_old_buffers_num; ++i) {
+ RtpStorageItem *item = g_queue_pop_tail (&stream->queue);
+
+ GST_TRACE ("Removing %u/%u buffers, pt=%d seq=%d for ssrc=%08x",
+ i, too_old_buffers_num, item->pt, item->seq, stream->ssrc);
+
+ rtp_storage_item_free (item);
+ }
+}
+
+/* This algorithm corresponds to rtp_jitter_buffer_get_seqnum_diff(),
+ * we want to keep the same number of packets in the worse case.
+ */
+
+static guint16
+rtp_storage_stream_get_seqnum_diff (RtpStorageStream * stream)
+{
+ guint32 high_seqnum, low_seqnum;
+ RtpStorageItem *high_item, *low_item;
+ guint16 result;
+
+
+ high_item = (RtpStorageItem *) g_queue_peek_head (&stream->queue);
+ low_item = (RtpStorageItem *) g_queue_peek_tail (&stream->queue);
+
+ if (!high_item || !low_item || high_item == low_item)
+ return 0;
+
+ high_seqnum = high_item->seq;
+ low_seqnum = low_item->seq;
+
+ /* it needs to work if seqnum wraps */
+ if (high_seqnum >= low_seqnum) {
+ result = (guint32) (high_seqnum - low_seqnum);
+ } else {
+ result = (guint32) (high_seqnum + G_MAXUINT16 + 1 - low_seqnum);
+ }
+ return result;
+}
+
+void
+rtp_storage_stream_resize_and_add_item (RtpStorageStream * stream,
+ GstClockTime size_time, GstBuffer * buffer, guint8 pt, guint16 seq)
+{
+ GstClockTime arrival_time = GST_BUFFER_DTS_OR_PTS (buffer);
+
+ /* These limits match those of the jittebuffer, we keep a couple more
+ * packets to avoid races as it can be queried after the output of the
+ * jitterbuffer.
+ */
+ if (rtp_storage_stream_get_seqnum_diff (stream) >= 32765 ||
+ stream->queue.length > 10100) {
+ RtpStorageItem *item = g_queue_pop_tail (&stream->queue);
+
+ GST_WARNING ("Queue too big, removing pt=%d seq=%d for ssrc=%08x",
+ item->pt, item->seq, stream->ssrc);
+
+ rtp_storage_item_free (item);
+ }
+
+ if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (arrival_time))) {
+ if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (stream->max_arrival_time)))
+ stream->max_arrival_time = MAX (stream->max_arrival_time, arrival_time);
+ else
+ stream->max_arrival_time = arrival_time;
+
+ rtp_storage_stream_resize (stream, size_time);
+ rtp_storage_stream_add_item (stream, buffer, pt, seq);
+ } else {
+ rtp_storage_stream_add_item (stream, buffer, pt, seq);
+ }
+}
+
+RtpStorageStream *
+rtp_storage_stream_new (guint32 ssrc)
+{
+ RtpStorageStream *ret = g_slice_new0 (RtpStorageStream);
+ ret->max_arrival_time = GST_CLOCK_TIME_NONE;
+ ret->ssrc = ssrc;
+ g_mutex_init (&ret->stream_lock);
+ return ret;
+}
+
+void
+rtp_storage_stream_free (RtpStorageStream * stream)
+{
+ STREAM_LOCK (stream);
+ while (stream->queue.length)
+ rtp_storage_item_free (g_queue_pop_tail (&stream->queue));
+ STREAM_UNLOCK (stream);
+ g_mutex_clear (&stream->stream_lock);
+ g_slice_free (RtpStorageStream, stream);
+}
+
+void
+rtp_storage_stream_add_item (RtpStorageStream * stream, GstBuffer * buffer,
+ guint8 pt, guint16 seq)
+{
+ RtpStorageItem *item = rtp_storage_item_new (buffer, pt, seq);
+ GList *sibling = g_queue_find_custom (&stream->queue, item,
+ (GCompareFunc) rtp_storage_item_compare);
+
+ g_queue_insert_before (&stream->queue, sibling, item);
+}
+
+GstBufferList *
+rtp_storage_stream_get_packets_for_recovery (RtpStorageStream * stream,
+ guint8 pt_fec, guint16 lost_seq)
+{
+ guint ret_length = 0;
+ GList *end = NULL;
+ GList *start = NULL;
+ gboolean saw_fec = TRUE; /* To initialize the start pointer in the loop below */
+ GList *it;
+
+ /* Looking for media stream chunk with FEC packets at the end, which could
+ * can have the lost packet. For example:
+ *
+ * |#10 FEC| |#9 FEC| |#8| ... |#6| |#5 FEC| |#4 FEC| |#3 FEC| |#2| |#1| |#0|
+ *
+ * Say @lost_seq = 7. Want to return bufferlist with packets [#6 : #10]. Other
+ * packets are not relevant for recovery of packet 7.
+ *
+ * Or the lost packet can be in the storage. In that case single packet is returned.
+ * It can happen if:
+ * - it could have arrived right after it was considered lost (more of a corner case)
+ * - it was recovered together with the other lost packet (most likely)
+ */
+ for (it = stream->queue.tail; it; it = it->prev) {
+ RtpStorageItem *item = it->data;
+ gboolean found_end = FALSE;
+
+ /* Is the buffer we lost in the storage? */
+ if (item->seq == lost_seq) {
+ start = it;
+ end = it;
+ ret_length = 1;
+ break;
+ }
+
+ if (pt_fec == item->pt) {
+ gint seq_diff = gst_rtp_buffer_compare_seqnum (lost_seq, item->seq);
+
+ if (seq_diff >= 0) {
+ if (it->prev) {
+ gboolean media_next =
+ pt_fec != ((RtpStorageItem *) it->prev->data)->pt;
+ found_end = media_next;
+ } else
+ found_end = TRUE;
+ }
+ saw_fec = TRUE;
+ } else if (saw_fec) {
+ saw_fec = FALSE;
+ start = it;
+ ret_length = 0;
+ }
+
+ ++ret_length;
+ if (found_end) {
+ end = it;
+ break;
+ }
+ }
+
+ if (end && !start)
+ start = end;
+
+ if (start && end) {
+ GstBufferList *ret = gst_buffer_list_new_sized (ret_length);
+ GList *it;
+
+ GST_LOG ("Found %u buffers with lost seq=%d for ssrc=%08x, creating %"
+ GST_PTR_FORMAT, ret_length, lost_seq, stream->ssrc, ret);
+
+ for (it = start; it != end->prev; it = it->prev)
+ gst_buffer_list_add (ret,
+ gst_buffer_ref (((RtpStorageItem *) it->data)->buffer));
+ return ret;
+ }
+
+ return NULL;
+}
+
+GstBuffer *
+rtp_storage_stream_get_redundant_packet (RtpStorageStream * stream,
+ guint16 lost_seq)
+{
+ GList *it;
+ for (it = stream->queue.head; it; it = it->next) {
+ RtpStorageItem *item = it->data;
+ if (item->seq == lost_seq) {
+ GST_LOG ("Found buffer pt=%u seq=%u for ssrc=%08x %" GST_PTR_FORMAT,
+ item->pt, item->seq, stream->ssrc, item->buffer);
+ return gst_buffer_ref (item->buffer);
+ }
+ }
+ GST_DEBUG ("Could not find packet with seq=%u for ssrc=%08x",
+ lost_seq, stream->ssrc);
+ return NULL;
+}
diff --git a/gst/rtp/rtpstoragestream.h b/gst/rtp/rtpstoragestream.h
new file mode 100644
index 0000000000..03049181a4
--- /dev/null
+++ b/gst/rtp/rtpstoragestream.h
@@ -0,0 +1,62 @@
+/* GStreamer plugin for forward error correction
+ * Copyright (C) 2017 Pexip
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * Author: Mikhail Fludkov <misha@pexip.com>
+ */
+
+#ifndef __GST_RTP_STORAGE_ITEM_H__
+#define __GST_RTP_STORAGE_ITEM_H__
+
+#include <gst/rtp/gstrtpbuffer.h>
+
+GST_DEBUG_CATEGORY_EXTERN (gst_rtp_storage_debug);
+
+typedef struct {
+ GstBuffer *buffer;
+ guint16 seq;
+ guint8 pt;
+} RtpStorageItem;
+
+typedef struct {
+ GQueue queue;
+ GMutex stream_lock;
+ guint32 ssrc;
+ GstClockTime max_arrival_time;
+} RtpStorageStream;
+
+#define STREAM_LOCK(s) g_mutex_lock (&(s)->stream_lock)
+#define STREAM_UNLOCK(s) g_mutex_unlock (&(s)->stream_lock)
+
+RtpStorageStream * rtp_storage_stream_new (guint32 ssrc);
+void rtp_storage_stream_free (RtpStorageStream * stream);
+void rtp_storage_stream_resize_and_add_item (RtpStorageStream * stream,
+ GstClockTime size_time,
+ GstBuffer *buffer,
+ guint8 pt,
+ guint16 seq);
+void rtp_storage_stream_add_item (RtpStorageStream * stream,
+ GstBuffer *buffer,
+ guint8 pt,
+ guint16 seq);
+GstBufferList * rtp_storage_stream_get_packets_for_recovery (RtpStorageStream *stream,
+ guint8 pt_fec,
+ guint16 lost_seq);
+GstBuffer * rtp_storage_stream_get_redundant_packet (RtpStorageStream *stream,
+ guint16 lost_seq);
+
+#endif /* __GST_RTP_STORAGE_ITEM_H__ */
+
diff --git a/gst/rtp/rtpulpfeccommon.c b/gst/rtp/rtpulpfeccommon.c
new file mode 100644
index 0000000000..25b2f00f14
--- /dev/null
+++ b/gst/rtp/rtpulpfeccommon.c
@@ -0,0 +1,447 @@
+/* GStreamer plugin for forward error correction
+ * Copyright (C) 2017 Pexip
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * Author: Mikhail Fludkov <misha@pexip.com>
+ */
+
+#include <string.h>
+#include "rtpulpfeccommon.h"
+
+#define MIN_RTP_HEADER_LEN 12
+
+typedef struct
+{
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ unsigned int csrc_count:4; /* CSRC count */
+ unsigned int extension:1; /* header extension flag */
+ unsigned int padding:1; /* padding flag */
+ unsigned int version:2; /* protocol version */
+ unsigned int payload_type:7; /* payload type */
+ unsigned int marker:1; /* marker bit */
+#elif G_BYTE_ORDER == G_BIG_ENDIAN
+ unsigned int version:2; /* protocol version */
+ unsigned int padding:1; /* padding flag */
+ unsigned int extension:1; /* header extension flag */
+ unsigned int csrc_count:4; /* CSRC count */
+ unsigned int marker:1; /* marker bit */
+ unsigned int payload_type:7; /* payload type */
+#else
+#error "G_BYTE_ORDER should be big or little endian."
+#endif
+ unsigned int seq:16; /* sequence number */
+ unsigned int timestamp:32; /* timestamp */
+ unsigned int ssrc:32; /* synchronization source */
+ guint8 csrclist[4]; /* optional CSRC list, 32 bits each */
+} RtpHeader;
+
+static gsize
+fec_level_hdr_get_size (gboolean l_bit)
+{
+ return sizeof (RtpUlpFecLevelHeader) - (l_bit ? 0 : 4);
+}
+
+static guint64
+fec_level_hdr_get_mask (RtpUlpFecLevelHeader const *fec_lvl_hdr, gboolean l_bit)
+{
+ return ((guint64) g_ntohs (fec_lvl_hdr->mask) << 32) |
+ (l_bit ? g_ntohl (fec_lvl_hdr->mask_continued) : 0);
+}
+
+static void
+fec_level_hdr_set_mask (RtpUlpFecLevelHeader * fec_lvl_hdr, gboolean l_bit,
+ guint64 mask)
+{
+ fec_lvl_hdr->mask = g_htons (mask >> 32);
+ if (l_bit)
+ fec_lvl_hdr->mask_continued = g_htonl (mask);
+}
+
+static guint16
+fec_level_hdr_get_protection_len (RtpUlpFecLevelHeader * fec_lvl_hdr)
+{
+ return g_ntohs (fec_lvl_hdr->protection_len);
+}
+
+static void
+fec_level_hdr_set_protection_len (RtpUlpFecLevelHeader * fec_lvl_hdr,
+ guint16 len)
+{
+ fec_lvl_hdr->protection_len = g_htons (len);
+}
+
+static RtpUlpFecLevelHeader *
+fec_hdr_get_level_hdr (RtpUlpFecHeader const *fec_hdr)
+{
+ return (RtpUlpFecLevelHeader *) (fec_hdr + 1);
+}
+
+static guint64
+fec_hdr_get_mask (RtpUlpFecHeader const *fec_hdr)
+{
+ return fec_level_hdr_get_mask (fec_hdr_get_level_hdr (fec_hdr), fec_hdr->L);
+}
+
+static guint16
+fec_hdr_get_seq_base (RtpUlpFecHeader const *fec_hdr, gboolean is_ulpfec,
+ guint16 fec_seq)
+{
+ guint16 seq = g_ntohs (fec_hdr->seq);
+ if (is_ulpfec)
+ return seq;
+ return fec_seq - seq;
+}
+
+static guint16
+fec_hdr_get_packets_len_recovery (RtpUlpFecHeader const *fec_hdr)
+{
+ return g_htons (fec_hdr->len);
+}
+
+static guint32
+fec_hdr_get_timestamp_recovery (RtpUlpFecHeader const *fec_hdr)
+{
+ return g_ntohl (fec_hdr->timestamp);
+}
+
+static void
+_xor_mem (guint8 * restrict dst, const guint8 * restrict src, gsize length)
+{
+ guint i;
+
+ for (i = 0; i < (length / sizeof (guint64)); ++i) {
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ GST_WRITE_UINT64_LE (dst,
+ GST_READ_UINT64_LE (dst) ^ GST_READ_UINT64_LE (src));
+#else
+ GST_WRITE_UINT64_BE (dst,
+ GST_READ_UINT64_BE (dst) ^ GST_READ_UINT64_BE (src));
+#endif
+ dst += sizeof (guint64);
+ src += sizeof (guint64);
+ }
+ for (i = 0; i < (length % sizeof (guint64)); ++i)
+ dst[i] ^= src[i];
+}
+
+guint16
+rtp_ulpfec_hdr_get_protection_len (RtpUlpFecHeader const *fec_hdr)
+{
+ return fec_level_hdr_get_protection_len (fec_hdr_get_level_hdr (fec_hdr));
+}
+
+RtpUlpFecHeader *
+rtp_ulpfec_buffer_get_fechdr (GstRTPBuffer * rtp)
+{
+ return (RtpUlpFecHeader *) gst_rtp_buffer_get_payload (rtp);
+}
+
+guint64
+rtp_ulpfec_buffer_get_mask (GstRTPBuffer * rtp)
+{
+ return fec_hdr_get_mask (rtp_ulpfec_buffer_get_fechdr (rtp));
+}
+
+guint16
+rtp_ulpfec_buffer_get_seq_base (GstRTPBuffer * rtp)
+{
+ return g_ntohs (rtp_ulpfec_buffer_get_fechdr (rtp)->seq);
+}
+
+guint
+rtp_ulpfec_get_headers_len (gboolean fec_mask_long)
+{
+ return sizeof (RtpUlpFecHeader) + fec_level_hdr_get_size (fec_mask_long);
+}
+
+#define ONE_64BIT G_GUINT64_CONSTANT(1)
+
+guint64
+rtp_ulpfec_packet_mask_from_seqnum (guint16 seq,
+ guint16 fec_seq_base, gboolean fec_mask_long)
+{
+ gint seq_delta = gst_rtp_buffer_compare_seqnum (fec_seq_base, seq);
+ if (seq_delta >= 0
+ && seq_delta <= RTP_ULPFEC_SEQ_BASE_OFFSET_MAX (fec_mask_long)) {
+ return ONE_64BIT << (RTP_ULPFEC_SEQ_BASE_OFFSET_MAX (TRUE) - seq_delta);
+ }
+ return 0;
+}
+
+gboolean
+rtp_ulpfec_mask_is_long (guint64 mask)
+{
+ return (mask & 0xffffffff) ? TRUE : FALSE;
+}
+
+gboolean
+rtp_ulpfec_buffer_is_valid (GstRTPBuffer * rtp)
+{
+ guint payload_len = gst_rtp_buffer_get_payload_len (rtp);
+ RtpUlpFecHeader *fec_hdr;
+ guint fec_hdrs_len;
+ guint fec_packet_len;
+
+ if (payload_len < sizeof (RtpUlpFecHeader))
+ goto toosmall;
+
+ fec_hdr = rtp_ulpfec_buffer_get_fechdr (rtp);
+ if (fec_hdr->E)
+ goto invalidcontent;
+
+ fec_hdrs_len = rtp_ulpfec_get_headers_len (fec_hdr->L);
+ if (payload_len < fec_hdrs_len)
+ goto toosmall;
+
+ fec_packet_len = fec_hdrs_len + rtp_ulpfec_hdr_get_protection_len (fec_hdr);
+ if (fec_packet_len != payload_len)
+ goto lengthmismatch;
+
+ return TRUE;
+toosmall:
+ GST_WARNING ("FEC packet too small");
+ return FALSE;
+
+lengthmismatch:
+ GST_WARNING ("invalid FEC packet (declared length %u, real length %u)",
+ fec_packet_len, payload_len);
+ return FALSE;
+
+invalidcontent:
+ GST_WARNING ("FEC Header contains invalid fields: %u", fec_hdr->E);
+ return FALSE;
+}
+
+
+void
+rtp_buffer_to_ulpfec_bitstring (GstRTPBuffer * rtp, GArray * dst_arr,
+ gboolean fec_buffer, gboolean fec_mask_long)
+{
+ if (G_UNLIKELY (fec_buffer)) {
+ guint payload_len = gst_rtp_buffer_get_payload_len (rtp);
+ g_array_set_size (dst_arr, MAX (payload_len, dst_arr->len));
+ memcpy (dst_arr->data, gst_rtp_buffer_get_payload (rtp), payload_len);
+ } else {
+ const guint8 *src = rtp->data[0];
+ guint len = gst_rtp_buffer_get_packet_len (rtp) - MIN_RTP_HEADER_LEN;
+ guint dst_offset = rtp_ulpfec_get_headers_len (fec_mask_long);
+ guint src_offset = MIN_RTP_HEADER_LEN;
+ guint8 *dst;
+
+ g_array_set_size (dst_arr, MAX (dst_offset + len, dst_arr->len));
+ dst = (guint8 *) dst_arr->data;
+
+ *((guint64 *) dst) ^= *((const guint64 *) src);
+ ((RtpUlpFecHeader *) dst)->len ^= g_htons (len);
+ _xor_mem (dst + dst_offset, src + src_offset, len);
+ }
+}
+
+GstBuffer *
+rtp_ulpfec_bitstring_to_media_rtp_buffer (GArray * arr,
+ gboolean fec_mask_long, guint32 ssrc, guint16 seq)
+{
+ guint fec_hdrs_len = rtp_ulpfec_get_headers_len (fec_mask_long);
+ guint payload_len =
+ fec_hdr_get_packets_len_recovery ((RtpUlpFecHeader *) arr->data);
+ GstMapInfo ret_info = GST_MAP_INFO_INIT;
+ GstMemory *ret_mem;
+ GstBuffer *ret;
+
+ if (payload_len > arr->len - fec_hdrs_len)
+ return NULL; // Not enough data
+
+ ret_mem = gst_allocator_alloc (NULL, MIN_RTP_HEADER_LEN + payload_len, NULL);
+ gst_memory_map (ret_mem, &ret_info, GST_MAP_READWRITE);
+
+ /* Filling 12 bytes of RTP header */
+ *((guint64 *) ret_info.data) = *((guint64 *) arr->data);
+ ((RtpHeader *) ret_info.data)->version = 2;
+ ((RtpHeader *) ret_info.data)->seq = g_htons (seq);
+ ((RtpHeader *) ret_info.data)->ssrc = g_htonl (ssrc);
+ /* Filling payload */
+ memcpy (ret_info.data + MIN_RTP_HEADER_LEN,
+ arr->data + fec_hdrs_len, payload_len);
+
+ gst_memory_unmap (ret_mem, &ret_info);
+ ret = gst_buffer_new ();
+ gst_buffer_append_memory (ret, ret_mem);
+ return ret;
+}
+
+GstBuffer *
+rtp_ulpfec_bitstring_to_fec_rtp_buffer (GArray * arr,
+ guint16 seq_base, gboolean fec_mask_long, guint64 fec_mask,
+ gboolean marker, guint8 pt, guint16 seq, guint32 timestamp, guint32 ssrc)
+{
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+ GstBuffer *ret;
+
+ /* Filling FEC headers */
+ {
+ RtpUlpFecHeader *hdr = (RtpUlpFecHeader *) arr->data;
+ RtpUlpFecLevelHeader *lvlhdr;
+ hdr->E = 0;
+ hdr->L = fec_mask_long;
+ hdr->seq = g_htons (seq_base);
+
+ lvlhdr = fec_hdr_get_level_hdr (hdr);
+ fec_level_hdr_set_protection_len (lvlhdr,
+ arr->len - rtp_ulpfec_get_headers_len (fec_mask_long));
+ fec_level_hdr_set_mask (lvlhdr, fec_mask_long, fec_mask);
+ }
+
+ /* Filling RTP header, copying payload */
+ ret = gst_rtp_buffer_new_allocate (arr->len, 0, 0);
+ if (!gst_rtp_buffer_map (ret, GST_MAP_READWRITE, &rtp))
+ g_assert_not_reached ();
+
+ gst_rtp_buffer_set_marker (&rtp, marker);
+ gst_rtp_buffer_set_payload_type (&rtp, pt);
+ gst_rtp_buffer_set_seq (&rtp, seq);
+ gst_rtp_buffer_set_timestamp (&rtp, timestamp);
+ gst_rtp_buffer_set_ssrc (&rtp, ssrc);
+
+ memcpy (gst_rtp_buffer_get_payload (&rtp), arr->data, arr->len);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ return ret;
+}
+
+/**
+ * rtp_ulpfec_map_info_map:
+ * @buffer: (transfer: full) #GstBuffer
+ * @info: #RtpUlpFecMapInfo
+ *
+ * Maps the contents of @buffer into @info. If @buffer made of many #GstMemory
+ * objects, merges them together to create a new buffer made of single
+ * continious #GstMemory.
+ *
+ * Returns: %TRUE if @buffer could be mapped
+ **/
+gboolean
+rtp_ulpfec_map_info_map (GstBuffer * buffer, RtpUlpFecMapInfo * info)
+{
+ /* We need to make sure we are working with continious memory chunk.
+ * If not merge all memories together */
+ if (gst_buffer_n_memory (buffer) > 1) {
+ GstBuffer *new_buffer = gst_buffer_new ();
+ GstMemory *mem = gst_buffer_get_all_memory (buffer);
+ gst_buffer_append_memory (new_buffer, mem);
+
+ /* We supposed to own the old buffer, but we don't use it here, so unref */
+ gst_buffer_unref (buffer);
+ buffer = new_buffer;
+ }
+
+ if (!gst_rtp_buffer_map (buffer,
+ GST_MAP_READ | GST_RTP_BUFFER_MAP_FLAG_SKIP_PADDING, &info->rtp)) {
+ /* info->rtp.buffer = NULL is an indication for rtp_ulpfec_map_info_unmap()
+ * that mapping has failed */
+ g_assert (NULL == info->rtp.buffer);
+ gst_buffer_unref (buffer);
+ return FALSE;
+ }
+ return TRUE;
+}
+
+/**
+ * rtp_ulpfec_map_info_unmap:
+ * @info: #RtpUlpFecMapInfo
+ *
+ * Unmap @info previously mapped with rtp_ulpfec_map_info_map() and unrefs the
+ * buffer. For convenience can even be called even if rtp_ulpfec_map_info_map
+ * returned FALSE
+ **/
+void
+rtp_ulpfec_map_info_unmap (RtpUlpFecMapInfo * info)
+{
+ GstBuffer *buffer = info->rtp.buffer;
+
+ if (buffer) {
+ gst_rtp_buffer_unmap (&info->rtp);
+ gst_buffer_unref (buffer);
+ }
+}
+
+#ifndef GST_DISABLE_GST_DEBUG
+void
+rtp_ulpfec_log_rtppacket (GstDebugCategory * cat, GstDebugLevel level,
+ gpointer object, const gchar * name, GstRTPBuffer * rtp)
+{
+ guint seq;
+ guint ssrc;
+ guint timestamp;
+ guint pt;
+
+ if (level > gst_debug_category_get_threshold (cat))
+ return;
+
+ seq = gst_rtp_buffer_get_seq (rtp);
+ ssrc = gst_rtp_buffer_get_ssrc (rtp);
+ timestamp = gst_rtp_buffer_get_timestamp (rtp);
+ pt = gst_rtp_buffer_get_payload_type (rtp);
+
+ GST_CAT_LEVEL_LOG (cat, level, object,
+ "%-22s: [%c%c%c%c] ssrc=0x%08x pt=%u tstamp=%u seq=%u size=%u(%u,%u)",
+ name,
+ gst_rtp_buffer_get_marker (rtp) ? 'M' : ' ',
+ gst_rtp_buffer_get_extension (rtp) ? 'X' : ' ',
+ gst_rtp_buffer_get_padding (rtp) ? 'P' : ' ',
+ gst_rtp_buffer_get_csrc_count (rtp) > 0 ? 'C' : ' ',
+ ssrc, pt, timestamp, seq,
+ gst_rtp_buffer_get_packet_len (rtp),
+ gst_rtp_buffer_get_packet_len (rtp) - MIN_RTP_HEADER_LEN,
+ gst_rtp_buffer_get_payload_len (rtp));
+}
+#endif /* GST_DISABLE_GST_DEBUG */
+
+#ifndef GST_DISABLE_GST_DEBUG
+void
+rtp_ulpfec_log_fec_packet (GstDebugCategory * cat, GstDebugLevel level,
+ gpointer object, GstRTPBuffer * fecrtp)
+{
+ RtpUlpFecHeader *fec_hdr;
+ RtpUlpFecLevelHeader *fec_level_hdr;
+
+ if (level > gst_debug_category_get_threshold (cat))
+ return;
+
+ fec_hdr = gst_rtp_buffer_get_payload (fecrtp);
+ GST_CAT_LEVEL_LOG (cat, level, object,
+ "%-22s: [%c%c%c%c%c%c] pt=%u tstamp=%u seq=%u recovery_len=%u",
+ "fec header",
+ fec_hdr->E ? 'E' : ' ',
+ fec_hdr->L ? 'L' : ' ',
+ fec_hdr->P ? 'P' : ' ',
+ fec_hdr->X ? 'X' : ' ',
+ fec_hdr->CC ? 'C' : ' ',
+ fec_hdr->M ? 'M' : ' ',
+ fec_hdr->pt,
+ fec_hdr_get_timestamp_recovery (fec_hdr),
+ fec_hdr_get_seq_base (fec_hdr, TRUE,
+ gst_rtp_buffer_get_seq (fecrtp)),
+ fec_hdr_get_packets_len_recovery (fec_hdr));
+
+ fec_level_hdr = fec_hdr_get_level_hdr (fec_hdr);
+ GST_CAT_LEVEL_LOG (cat, level, object,
+ "%-22s: protection_len=%u mask=0x%012" G_GINT64_MODIFIER "x",
+ "fec level header",
+ g_ntohs (fec_level_hdr->protection_len),
+ fec_level_hdr_get_mask (fec_level_hdr, fec_hdr->L));
+}
+#endif /* GST_DISABLE_GST_DEBUG */
diff --git a/gst/rtp/rtpulpfeccommon.h b/gst/rtp/rtpulpfeccommon.h
new file mode 100644
index 0000000000..f44174a952
--- /dev/null
+++ b/gst/rtp/rtpulpfeccommon.h
@@ -0,0 +1,162 @@
+/* GStreamer plugin for forward error correction
+ * Copyright (C) 2017 Pexip
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * Author: Mikhail Fludkov <misha@pexip.com>
+ */
+
+#ifndef __RTP_ULPFEC_COMMON_H__
+#define __RTP_ULPFEC_COMMON_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/rtp.h>
+
+G_BEGIN_DECLS
+
+#define GST_WARNING_RTP_PACKET(obj, name, pkt) rtp_ulpfec_log_rtppacket (GST_CAT_DEFAULT, GST_LEVEL_WARNING, obj, name, pkt)
+#define GST_LOG_RTP_PACKET(obj, name, pkt) rtp_ulpfec_log_rtppacket (GST_CAT_DEFAULT, GST_LEVEL_LOG, obj, name, pkt)
+#define GST_DEBUG_RTP_PACKET(obj, name, pkt) rtp_ulpfec_log_rtppacket (GST_CAT_DEFAULT, GST_LEVEL_DEBUG, obj, name, pkt)
+#define GST_INFO_RTP_PACKET(obj, name, pkt) rtp_ulpfec_log_rtppacket (GST_CAT_DEFAULT, GST_LEVEL_INFO, obj, name, pkt)
+#define GST_WARNING_FEC_PACKET(obj, pkt) rtp_ulpfec_log_fec_packet (GST_CAT_DEFAULT, GST_LEVEL_WARNING, obj, pkt)
+#define GST_DEBUG_FEC_PACKET(obj, pkt) rtp_ulpfec_log_fec_packet (GST_CAT_DEFAULT, GST_LEVEL_DEBUG, obj, pkt)
+#define GST_INFO_FEC_PACKET(obj, pkt) rtp_ulpfec_log_fec_packet (GST_CAT_DEFAULT, GST_LEVEL_INFO, obj, pkt)
+
+#define RTP_ULPFEC_PROTECTED_PACKETS_MAX(L) ((L) ? 48 : 16)
+#define RTP_ULPFEC_SEQ_BASE_OFFSET_MAX(L) (RTP_ULPFEC_PROTECTED_PACKETS_MAX(L) - 1)
+
+/**
+ * RtpUlpFecMapInfo: Helper wrapper around GstRTPBuffer
+ *
+ * @rtp: mapped RTP buffer
+ **/
+typedef struct {
+ // FIXME: it used to contain more fields now we are left with only GstRTPBuffer.
+ // it will be nice to use it directly
+ GstRTPBuffer rtp;
+} RtpUlpFecMapInfo;
+
+/* FIXME: parse/write these properly instead of relying in packed structs */
+#ifdef _MSC_VER
+#pragma pack(push, 1)
+#define ATTRIBUTE_PACKED
+#else
+#define ATTRIBUTE_PACKED __attribute__ ((packed))
+#endif
+
+/* RFC 5109 */
+/*
+ 0 1 2 3
+ 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ |E|L|P|X| CC |M| PT recovery | SN base |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | TS recovery |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | length recovery |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+ Figure 3: FEC Header Format
+*/
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+typedef struct {
+ guint8 CC:4;
+ guint8 X:1;
+ guint8 P:1;
+ guint8 L:1;
+ guint8 E:1;
+
+ guint8 pt:7;
+ guint8 M:1;
+
+ guint16 seq;
+ guint32 timestamp;
+ guint16 len;
+} ATTRIBUTE_PACKED RtpUlpFecHeader;
+#else
+typedef struct {
+ guint8 E:1;
+ guint8 L:1;
+ guint8 P:1;
+ guint8 X:1;
+ guint8 CC:4;
+
+ guint8 M:1;
+ guint8 pt:7;
+
+ guint16 seq;
+ guint32 timestamp;
+ guint16 len;
+} ATTRIBUTE_PACKED RtpUlpFecHeader;
+#endif
+
+/*
+ 0 1 2 3
+ 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | Protection Length | mask |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | mask cont. (present only when L = 1) |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+ Figure 4: ULP Level Header Format
+*/
+typedef struct
+{
+ guint16 protection_len;
+ guint16 mask;
+ guint32 mask_continued;
+} ATTRIBUTE_PACKED RtpUlpFecLevelHeader;
+
+#ifdef _MSC_VER
+#pragma pack(pop)
+#else
+#undef ATTRIBUTE_PACKED
+#endif
+
+gboolean rtp_ulpfec_map_info_map (GstBuffer *buffer, RtpUlpFecMapInfo *info);
+void rtp_ulpfec_map_info_unmap (RtpUlpFecMapInfo *info);
+void rtp_buffer_to_ulpfec_bitstring (GstRTPBuffer *rtp, GArray *dst_arr,
+ gboolean fec_buffer, gboolean fec_mask_long);
+GstBuffer * rtp_ulpfec_bitstring_to_media_rtp_buffer (GArray *arr,
+ gboolean fec_mask_long, guint32 ssrc, guint16 seq);
+GstBuffer * rtp_ulpfec_bitstring_to_fec_rtp_buffer (GArray *arr, guint16 seq_base, gboolean fec_mask_long,
+ guint64 fec_mask, gboolean marker, guint8 pt, guint16 seq,
+ guint32 timestamp, guint32 ssrc);
+
+#ifndef GST_DISABLE_GST_DEBUG
+void rtp_ulpfec_log_rtppacket (GstDebugCategory * cat, GstDebugLevel level,
+ gpointer object, const gchar *name,
+ GstRTPBuffer *rtp);
+
+void rtp_ulpfec_log_fec_packet (GstDebugCategory * cat, GstDebugLevel level,
+ gpointer object, GstRTPBuffer *fecrtp);
+#else
+#define rtp_ulpfec_log_rtppacket(cat,level,obj,name,rtp) /* NOOP */
+#define rtp_ulpfec_log_fec_packet(cat,level,obj,fecrtp) /* NOOP */
+#endif
+
+RtpUlpFecHeader * rtp_ulpfec_buffer_get_fechdr (GstRTPBuffer *rtp);
+guint rtp_ulpfec_get_headers_len (gboolean fec_mask_long);
+guint16 rtp_ulpfec_hdr_get_protection_len (RtpUlpFecHeader const *fec_hdr);
+guint64 rtp_ulpfec_packet_mask_from_seqnum (guint16 seq, guint16 fec_seq_base, gboolean fec_mask_long);
+guint64 rtp_ulpfec_buffer_get_mask (GstRTPBuffer *rtp);
+guint16 rtp_ulpfec_buffer_get_seq_base (GstRTPBuffer *rtp);
+gboolean rtp_ulpfec_mask_is_long (guint64 mask);
+gboolean rtp_ulpfec_buffer_is_valid (GstRTPBuffer * rtp);
+
+G_END_DECLS
+
+#endif
diff --git a/gst/rtpmanager/gstrtpbin.c b/gst/rtpmanager/gstrtpbin.c
new file mode 100644
index 0000000000..39596cc91a
--- /dev/null
+++ b/gst/rtpmanager/gstrtpbin.c
@@ -0,0 +1,5273 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpbin
+ * @title: rtpbin
+ * @see_also: rtpjitterbuffer, rtpsession, rtpptdemux, rtpssrcdemux
+ *
+ * RTP bin combines the functions of #GstRtpSession, #GstRtpSsrcDemux,
+ * #GstRtpJitterBuffer and #GstRtpPtDemux in one element. It allows for multiple
+ * RTP sessions that will be synchronized together using RTCP SR packets.
+ *
+ * #GstRtpBin is configured with a number of request pads that define the
+ * functionality that is activated, similar to the #GstRtpSession element.
+ *
+ * To use #GstRtpBin as an RTP receiver, request a recv_rtp_sink_\%u pad. The session
+ * number must be specified in the pad name.
+ * Data received on the recv_rtp_sink_\%u pad will be processed in the #GstRtpSession
+ * manager and after being validated forwarded on #GstRtpSsrcDemux element. Each
+ * RTP stream is demuxed based on the SSRC and send to a #GstRtpJitterBuffer. After
+ * the packets are released from the jitterbuffer, they will be forwarded to a
+ * #GstRtpPtDemux element. The #GstRtpPtDemux element will demux the packets based
+ * on the payload type and will create a unique pad recv_rtp_src_\%u_\%u_\%u on
+ * rtpbin with the session number, SSRC and payload type respectively as the pad
+ * name.
+ *
+ * To also use #GstRtpBin as an RTCP receiver, request a recv_rtcp_sink_\%u pad. The
+ * session number must be specified in the pad name.
+ *
+ * If you want the session manager to generate and send RTCP packets, request
+ * the send_rtcp_src_\%u pad with the session number in the pad name. Packet pushed
+ * on this pad contain SR/RR RTCP reports that should be sent to all participants
+ * in the session.
+ *
+ * To use #GstRtpBin as a sender, request a send_rtp_sink_\%u pad, which will
+ * automatically create a send_rtp_src_\%u pad. If the session number is not provided,
+ * the pad from the lowest available session will be returned. The session manager will modify the
+ * SSRC in the RTP packets to its own SSRC and will forward the packets on the
+ * send_rtp_src_\%u pad after updating its internal state.
+ *
+ * The session manager needs the clock-rate of the payload types it is handling
+ * and will signal the #GstRtpSession::request-pt-map signal when it needs such a
+ * mapping. One can clear the cached values with the #GstRtpSession::clear-pt-map
+ * signal.
+ *
+ * Access to the internal statistics of rtpbin is provided with the
+ * get-internal-session property. This action signal gives access to the
+ * RTPSession object which further provides action signals to retrieve the
+ * internal source and other sources.
+ *
+ * #GstRtpBin also has signals (#GstRtpBin::request-rtp-encoder,
+ * #GstRtpBin::request-rtp-decoder, #GstRtpBin::request-rtcp-encoder and
+ * #GstRtpBin::request-rtp-decoder) to dynamically request for RTP and RTCP encoders
+ * and decoders in order to support SRTP. The encoders must provide the pads
+ * rtp_sink_\%u and rtp_src_\%u for RTP and rtcp_sink_\%u and rtcp_src_\%u for
+ * RTCP. The session number will be used in the pad name. The decoders must provide
+ * rtp_sink and rtp_src for RTP and rtcp_sink and rtcp_src for RTCP. The decoders will
+ * be placed before the #GstRtpSession element, thus they must support SSRC demuxing
+ * internally.
+ *
+ * #GstRtpBin has signals (#GstRtpBin::request-aux-sender and
+ * #GstRtpBin::request-aux-receiver to dynamically request an element that can be
+ * used to create or merge additional RTP streams. AUX elements are needed to
+ * implement FEC or retransmission (such as RFC 4588). An AUX sender must have one
+ * sink_\%u pad that matches the sessionid in the signal and it should have 1 or
+ * more src_\%u pads. For each src_%\u pad, a session will be made (if needed)
+ * and the pad will be linked to the session send_rtp_sink pad. Each session will
+ * then expose its source pad as send_rtp_src_\%u on #GstRtpBin.
+ * An AUX receiver has 1 src_\%u pad that much match the sessionid in the signal
+ * and 1 or more sink_\%u pads. A session will be made for each sink_\%u pad
+ * when the corresponding recv_rtp_sink_\%u pad is requested on #GstRtpBin.
+ * The #GstRtpBin::request-jitterbuffer signal can be used to provide a custom
+ * element to perform arrival time smoothing, reordering and optionally packet
+ * loss detection and retransmission requests.
+ *
+ * ## Example pipelines
+ *
+ * |[
+ * gst-launch-1.0 udpsrc port=5000 caps="application/x-rtp, ..." ! .recv_rtp_sink_0 \
+ * rtpbin ! rtptheoradepay ! theoradec ! xvimagesink
+ * ]| Receive RTP data from port 5000 and send to the session 0 in rtpbin.
+ * |[
+ * gst-launch-1.0 rtpbin name=rtpbin \
+ * v4l2src ! videoconvert ! ffenc_h263 ! rtph263ppay ! rtpbin.send_rtp_sink_0 \
+ * rtpbin.send_rtp_src_0 ! udpsink port=5000 \
+ * rtpbin.send_rtcp_src_0 ! udpsink port=5001 sync=false async=false \
+ * udpsrc port=5005 ! rtpbin.recv_rtcp_sink_0 \
+ * audiotestsrc ! amrnbenc ! rtpamrpay ! rtpbin.send_rtp_sink_1 \
+ * rtpbin.send_rtp_src_1 ! udpsink port=5002 \
+ * rtpbin.send_rtcp_src_1 ! udpsink port=5003 sync=false async=false \
+ * udpsrc port=5007 ! rtpbin.recv_rtcp_sink_1
+ * ]| Encode and payload H263 video captured from a v4l2src. Encode and payload AMR
+ * audio generated from audiotestsrc. The video is sent to session 0 in rtpbin
+ * and the audio is sent to session 1. Video packets are sent on UDP port 5000
+ * and audio packets on port 5002. The video RTCP packets for session 0 are sent
+ * on port 5001 and the audio RTCP packets for session 0 are sent on port 5003.
+ * RTCP packets for session 0 are received on port 5005 and RTCP for session 1
+ * is received on port 5007. Since RTCP packets from the sender should be sent
+ * as soon as possible and do not participate in preroll, sync=false and
+ * async=false is configured on udpsink
+ * |[
+ * gst-launch-1.0 -v rtpbin name=rtpbin \
+ * udpsrc caps="application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H263-1998" \
+ * port=5000 ! rtpbin.recv_rtp_sink_0 \
+ * rtpbin. ! rtph263pdepay ! ffdec_h263 ! xvimagesink \
+ * udpsrc port=5001 ! rtpbin.recv_rtcp_sink_0 \
+ * rtpbin.send_rtcp_src_0 ! udpsink port=5005 sync=false async=false \
+ * udpsrc caps="application/x-rtp,media=(string)audio,clock-rate=(int)8000,encoding-name=(string)AMR,encoding-params=(string)1,octet-align=(string)1" \
+ * port=5002 ! rtpbin.recv_rtp_sink_1 \
+ * rtpbin. ! rtpamrdepay ! amrnbdec ! alsasink \
+ * udpsrc port=5003 ! rtpbin.recv_rtcp_sink_1 \
+ * rtpbin.send_rtcp_src_1 ! udpsink port=5007 sync=false async=false
+ * ]| Receive H263 on port 5000, send it through rtpbin in session 0, depayload,
+ * decode and display the video.
+ * Receive AMR on port 5002, send it through rtpbin in session 1, depayload,
+ * decode and play the audio.
+ * Receive server RTCP packets for session 0 on port 5001 and RTCP packets for
+ * session 1 on port 5003. These packets will be used for session management and
+ * synchronisation.
+ * Send RTCP reports for session 0 on port 5005 and RTCP reports for session 1
+ * on port 5007.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include <stdio.h>
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/rtp/gstrtcpbuffer.h>
+
+#include "gstrtpbin.h"
+#include "rtpsession.h"
+#include "gstrtpsession.h"
+#include "gstrtpjitterbuffer.h"
+
+#include <gst/glib-compat-private.h>
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_bin_debug);
+#define GST_CAT_DEFAULT gst_rtp_bin_debug
+
+/* sink pads */
+static GstStaticPadTemplate rtpbin_recv_rtp_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("recv_rtp_sink_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtp;application/x-srtp")
+ );
+
+/**
+ * GstRtpBin!recv_fec_sink_%u_%u:
+ *
+ * Sink template for receiving Forward Error Correction packets,
+ * in the form recv_fec_sink_<session_idx>_<fec_stream_idx>
+ *
+ * See #GstRTPST_2022_1_FecDec for example usage
+ *
+ * Since: 1.20
+ */
+static GstStaticPadTemplate rtpbin_recv_fec_sink_template =
+GST_STATIC_PAD_TEMPLATE ("recv_fec_sink_%u_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+/**
+ * GstRtpBin!send_fec_src_%u_%u:
+ *
+ * Src template for sending Forward Error Correction packets,
+ * in the form send_fec_src_<session_idx>_<fec_stream_idx>
+ *
+ * See #GstRTPST_2022_1_FecEnc for example usage
+ *
+ * Since: 1.20
+ */
+static GstStaticPadTemplate rtpbin_send_fec_src_template =
+GST_STATIC_PAD_TEMPLATE ("send_fec_src_%u_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+static GstStaticPadTemplate rtpbin_recv_rtcp_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("recv_rtcp_sink_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtcp;application/x-srtcp")
+ );
+
+static GstStaticPadTemplate rtpbin_send_rtp_sink_template =
+GST_STATIC_PAD_TEMPLATE ("send_rtp_sink_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+/* src pads */
+static GstStaticPadTemplate rtpbin_recv_rtp_src_template =
+GST_STATIC_PAD_TEMPLATE ("recv_rtp_src_%u_%u_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+static GstStaticPadTemplate rtpbin_send_rtcp_src_template =
+ GST_STATIC_PAD_TEMPLATE ("send_rtcp_src_%u",
+ GST_PAD_SRC,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtcp;application/x-srtcp")
+ );
+
+static GstStaticPadTemplate rtpbin_send_rtp_src_template =
+ GST_STATIC_PAD_TEMPLATE ("send_rtp_src_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("application/x-rtp;application/x-srtp")
+ );
+
+#define GST_RTP_BIN_LOCK(bin) g_mutex_lock (&(bin)->priv->bin_lock)
+#define GST_RTP_BIN_UNLOCK(bin) g_mutex_unlock (&(bin)->priv->bin_lock)
+
+/* lock to protect dynamic callbacks, like pad-added and new ssrc. */
+#define GST_RTP_BIN_DYN_LOCK(bin) g_mutex_lock (&(bin)->priv->dyn_lock)
+#define GST_RTP_BIN_DYN_UNLOCK(bin) g_mutex_unlock (&(bin)->priv->dyn_lock)
+
+/* lock for shutdown */
+#define GST_RTP_BIN_SHUTDOWN_LOCK(bin,label) \
+G_STMT_START { \
+ if (g_atomic_int_get (&bin->priv->shutdown)) \
+ goto label; \
+ GST_RTP_BIN_DYN_LOCK (bin); \
+ if (g_atomic_int_get (&bin->priv->shutdown)) { \
+ GST_RTP_BIN_DYN_UNLOCK (bin); \
+ goto label; \
+ } \
+} G_STMT_END
+
+/* unlock for shutdown */
+#define GST_RTP_BIN_SHUTDOWN_UNLOCK(bin) \
+ GST_RTP_BIN_DYN_UNLOCK (bin); \
+
+/* Minimum time offset to apply. This compensates for rounding errors in NTP to
+ * RTP timestamp conversions */
+#define MIN_TS_OFFSET (4 * GST_MSECOND)
+
+struct _GstRtpBinPrivate
+{
+ GMutex bin_lock;
+
+ /* lock protecting dynamic adding/removing */
+ GMutex dyn_lock;
+
+ /* if we are shutting down or not */
+ gint shutdown;
+
+ gboolean autoremove;
+
+ /* NTP time in ns of last SR sync used */
+ guint64 last_ntpnstime;
+
+ /* list of extra elements */
+ GList *elements;
+};
+
+/* signals and args */
+enum
+{
+ SIGNAL_REQUEST_PT_MAP,
+ SIGNAL_PAYLOAD_TYPE_CHANGE,
+ SIGNAL_CLEAR_PT_MAP,
+ SIGNAL_RESET_SYNC,
+ SIGNAL_GET_SESSION,
+ SIGNAL_GET_INTERNAL_SESSION,
+ SIGNAL_GET_STORAGE,
+ SIGNAL_GET_INTERNAL_STORAGE,
+ SIGNAL_CLEAR_SSRC,
+
+ SIGNAL_ON_NEW_SSRC,
+ SIGNAL_ON_SSRC_COLLISION,
+ SIGNAL_ON_SSRC_VALIDATED,
+ SIGNAL_ON_SSRC_ACTIVE,
+ SIGNAL_ON_SSRC_SDES,
+ SIGNAL_ON_BYE_SSRC,
+ SIGNAL_ON_BYE_TIMEOUT,
+ SIGNAL_ON_TIMEOUT,
+ SIGNAL_ON_SENDER_TIMEOUT,
+ SIGNAL_ON_NPT_STOP,
+
+ SIGNAL_REQUEST_RTP_ENCODER,
+ SIGNAL_REQUEST_RTP_DECODER,
+ SIGNAL_REQUEST_RTCP_ENCODER,
+ SIGNAL_REQUEST_RTCP_DECODER,
+
+ SIGNAL_REQUEST_FEC_DECODER,
+ SIGNAL_REQUEST_FEC_ENCODER,
+
+ SIGNAL_REQUEST_JITTERBUFFER,
+
+ SIGNAL_NEW_JITTERBUFFER,
+ SIGNAL_NEW_STORAGE,
+
+ SIGNAL_REQUEST_AUX_SENDER,
+ SIGNAL_REQUEST_AUX_RECEIVER,
+
+ SIGNAL_ON_NEW_SENDER_SSRC,
+ SIGNAL_ON_SENDER_SSRC_ACTIVE,
+
+ SIGNAL_ON_BUNDLED_SSRC,
+
+ LAST_SIGNAL
+};
+
+#define DEFAULT_LATENCY_MS 200
+#define DEFAULT_DROP_ON_LATENCY FALSE
+#define DEFAULT_SDES NULL
+#define DEFAULT_DO_LOST FALSE
+#define DEFAULT_IGNORE_PT FALSE
+#define DEFAULT_NTP_SYNC FALSE
+#define DEFAULT_AUTOREMOVE FALSE
+#define DEFAULT_BUFFER_MODE RTP_JITTER_BUFFER_MODE_SLAVE
+#define DEFAULT_USE_PIPELINE_CLOCK FALSE
+#define DEFAULT_RTCP_SYNC GST_RTP_BIN_RTCP_SYNC_ALWAYS
+#define DEFAULT_RTCP_SYNC_INTERVAL 0
+#define DEFAULT_DO_SYNC_EVENT FALSE
+#define DEFAULT_DO_RETRANSMISSION FALSE
+#define DEFAULT_RTP_PROFILE GST_RTP_PROFILE_AVP
+#define DEFAULT_NTP_TIME_SOURCE GST_RTP_NTP_TIME_SOURCE_NTP
+#define DEFAULT_RTCP_SYNC_SEND_TIME TRUE
+#define DEFAULT_MAX_RTCP_RTP_TIME_DIFF 1000
+#define DEFAULT_MAX_DROPOUT_TIME 60000
+#define DEFAULT_MAX_MISORDER_TIME 2000
+#define DEFAULT_RFC7273_SYNC FALSE
+#define DEFAULT_MAX_STREAMS G_MAXUINT
+#define DEFAULT_MAX_TS_OFFSET_ADJUSTMENT G_GUINT64_CONSTANT(0)
+#define DEFAULT_MAX_TS_OFFSET G_GINT64_CONSTANT(3000000000)
+
+enum
+{
+ PROP_0,
+ PROP_LATENCY,
+ PROP_DROP_ON_LATENCY,
+ PROP_SDES,
+ PROP_DO_LOST,
+ PROP_IGNORE_PT,
+ PROP_NTP_SYNC,
+ PROP_RTCP_SYNC,
+ PROP_RTCP_SYNC_INTERVAL,
+ PROP_AUTOREMOVE,
+ PROP_BUFFER_MODE,
+ PROP_USE_PIPELINE_CLOCK,
+ PROP_DO_SYNC_EVENT,
+ PROP_DO_RETRANSMISSION,
+ PROP_RTP_PROFILE,
+ PROP_NTP_TIME_SOURCE,
+ PROP_RTCP_SYNC_SEND_TIME,
+ PROP_MAX_RTCP_RTP_TIME_DIFF,
+ PROP_MAX_DROPOUT_TIME,
+ PROP_MAX_MISORDER_TIME,
+ PROP_RFC7273_SYNC,
+ PROP_MAX_STREAMS,
+ PROP_MAX_TS_OFFSET_ADJUSTMENT,
+ PROP_MAX_TS_OFFSET,
+ PROP_FEC_DECODERS,
+ PROP_FEC_ENCODERS,
+};
+
+#define GST_RTP_BIN_RTCP_SYNC_TYPE (gst_rtp_bin_rtcp_sync_get_type())
+static GType
+gst_rtp_bin_rtcp_sync_get_type (void)
+{
+ static GType rtcp_sync_type = 0;
+ static const GEnumValue rtcp_sync_types[] = {
+ {GST_RTP_BIN_RTCP_SYNC_ALWAYS, "always", "always"},
+ {GST_RTP_BIN_RTCP_SYNC_INITIAL, "initial", "initial"},
+ {GST_RTP_BIN_RTCP_SYNC_RTP, "rtp-info", "rtp-info"},
+ {0, NULL, NULL},
+ };
+
+ if (!rtcp_sync_type) {
+ rtcp_sync_type = g_enum_register_static ("GstRTCPSync", rtcp_sync_types);
+ }
+ return rtcp_sync_type;
+}
+
+/* helper objects */
+typedef struct _GstRtpBinSession GstRtpBinSession;
+typedef struct _GstRtpBinStream GstRtpBinStream;
+typedef struct _GstRtpBinClient GstRtpBinClient;
+
+static guint gst_rtp_bin_signals[LAST_SIGNAL] = { 0 };
+
+static GstCaps *pt_map_requested (GstElement * element, guint pt,
+ GstRtpBinSession * session);
+static void payload_type_change (GstElement * element, guint pt,
+ GstRtpBinSession * session);
+static void remove_recv_rtp (GstRtpBin * rtpbin, GstRtpBinSession * session);
+static void remove_recv_rtcp (GstRtpBin * rtpbin, GstRtpBinSession * session);
+static void remove_recv_fec (GstRtpBin * rtpbin, GstRtpBinSession * session);
+static void remove_send_rtp (GstRtpBin * rtpbin, GstRtpBinSession * session);
+static void remove_send_fec (GstRtpBin * rtpbin, GstRtpBinSession * session);
+static void remove_rtcp (GstRtpBin * rtpbin, GstRtpBinSession * session);
+static void free_client (GstRtpBinClient * client, GstRtpBin * bin);
+static void free_stream (GstRtpBinStream * stream, GstRtpBin * bin);
+static GstRtpBinSession *create_session (GstRtpBin * rtpbin, gint id);
+static GstPad *complete_session_sink (GstRtpBin * rtpbin,
+ GstRtpBinSession * session);
+static void
+complete_session_receiver (GstRtpBin * rtpbin, GstRtpBinSession * session,
+ guint sessid);
+static GstPad *complete_session_rtcp (GstRtpBin * rtpbin,
+ GstRtpBinSession * session, guint sessid);
+static GstElement *session_request_element (GstRtpBinSession * session,
+ guint signal);
+
+/* Manages the RTP stream for one SSRC.
+ *
+ * We pipe the stream (coming from the SSRC demuxer) into a jitterbuffer.
+ * If we see an SDES RTCP packet that links multiple SSRCs together based on a
+ * common CNAME, we create a GstRtpBinClient structure to group the SSRCs
+ * together (see below).
+ */
+struct _GstRtpBinStream
+{
+ /* the SSRC of this stream */
+ guint32 ssrc;
+
+ /* parent bin */
+ GstRtpBin *bin;
+
+ /* the session this SSRC belongs to */
+ GstRtpBinSession *session;
+
+ /* the jitterbuffer of the SSRC */
+ GstElement *buffer;
+ gulong buffer_handlesync_sig;
+ gulong buffer_ptreq_sig;
+ gulong buffer_ntpstop_sig;
+ gint percent;
+
+ /* the PT demuxer of the SSRC */
+ GstElement *demux;
+ gulong demux_newpad_sig;
+ gulong demux_padremoved_sig;
+ gulong demux_ptreq_sig;
+ gulong demux_ptchange_sig;
+
+ /* if we have calculated a valid rt_delta for this stream */
+ gboolean have_sync;
+ /* mapping to local RTP and NTP time */
+ gint64 rt_delta;
+ gint64 rtp_delta;
+ /* base rtptime in gst time */
+ gint64 clock_base;
+};
+
+#define GST_RTP_SESSION_LOCK(sess) g_mutex_lock (&(sess)->lock)
+#define GST_RTP_SESSION_UNLOCK(sess) g_mutex_unlock (&(sess)->lock)
+
+/* Manages the receiving end of the packets.
+ *
+ * There is one such structure for each RTP session (audio/video/...).
+ * We get the RTP/RTCP packets and stuff them into the session manager. From
+ * there they are pushed into an SSRC demuxer that splits the stream based on
+ * SSRC. Each of the SSRC streams go into their own jitterbuffer (managed with
+ * the GstRtpBinStream above).
+ *
+ * Before the SSRC demuxer, a storage element may be inserted for the purpose
+ * of Forward Error Correction.
+ */
+struct _GstRtpBinSession
+{
+ /* session id */
+ gint id;
+ /* the parent bin */
+ GstRtpBin *bin;
+ /* the session element */
+ GstElement *session;
+ /* the SSRC demuxer */
+ GstElement *demux;
+ gulong demux_newpad_sig;
+ gulong demux_padremoved_sig;
+
+ /* Fec support */
+ GstElement *storage;
+
+ GMutex lock;
+
+ /* list of GstRtpBinStream */
+ GSList *streams;
+
+ /* list of elements */
+ GSList *elements;
+
+ /* mapping of payload type to caps */
+ GHashTable *ptmap;
+
+ /* the pads of the session */
+ GstPad *recv_rtp_sink;
+ GstPad *recv_rtp_sink_ghost;
+ GstPad *recv_rtp_src;
+ GstPad *recv_rtcp_sink;
+ GstPad *recv_rtcp_sink_ghost;
+ GstPad *sync_src;
+ GstPad *send_rtp_sink;
+ GstPad *send_rtp_sink_ghost;
+ GstPad *send_rtp_src_ghost;
+ GstPad *send_rtcp_src;
+ GstPad *send_rtcp_src_ghost;
+
+ GSList *recv_fec_sinks;
+ GSList *recv_fec_sink_ghosts;
+ GstElement *fec_decoder;
+
+ GSList *send_fec_src_ghosts;
+};
+
+/* Manages the RTP streams that come from one client and should therefore be
+ * synchronized.
+ */
+struct _GstRtpBinClient
+{
+ /* the common CNAME for the streams */
+ gchar *cname;
+ guint cname_len;
+
+ /* the streams */
+ guint nstreams;
+ GSList *streams;
+};
+
+/* find a session with the given id. Must be called with RTP_BIN_LOCK */
+static GstRtpBinSession *
+find_session_by_id (GstRtpBin * rtpbin, gint id)
+{
+ GSList *walk;
+
+ for (walk = rtpbin->sessions; walk; walk = g_slist_next (walk)) {
+ GstRtpBinSession *sess = (GstRtpBinSession *) walk->data;
+
+ if (sess->id == id)
+ return sess;
+ }
+ return NULL;
+}
+
+static gboolean
+pad_is_recv_fec (GstRtpBinSession * session, GstPad * pad)
+{
+ return g_slist_find (session->recv_fec_sink_ghosts, pad) != NULL;
+}
+
+/* find a session with the given request pad. Must be called with RTP_BIN_LOCK */
+static GstRtpBinSession *
+find_session_by_pad (GstRtpBin * rtpbin, GstPad * pad)
+{
+ GSList *walk;
+
+ for (walk = rtpbin->sessions; walk; walk = g_slist_next (walk)) {
+ GstRtpBinSession *sess = (GstRtpBinSession *) walk->data;
+
+ if ((sess->recv_rtp_sink_ghost == pad) ||
+ (sess->recv_rtcp_sink_ghost == pad) ||
+ (sess->send_rtp_sink_ghost == pad) ||
+ (sess->send_rtcp_src_ghost == pad) || pad_is_recv_fec (sess, pad))
+ return sess;
+ }
+ return NULL;
+}
+
+static void
+on_new_ssrc (GstElement * session, guint32 ssrc, GstRtpBinSession * sess)
+{
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_NEW_SSRC], 0,
+ sess->id, ssrc);
+}
+
+static void
+on_ssrc_collision (GstElement * session, guint32 ssrc, GstRtpBinSession * sess)
+{
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_SSRC_COLLISION], 0,
+ sess->id, ssrc);
+}
+
+static void
+on_ssrc_validated (GstElement * session, guint32 ssrc, GstRtpBinSession * sess)
+{
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_SSRC_VALIDATED], 0,
+ sess->id, ssrc);
+}
+
+static void
+on_ssrc_active (GstElement * session, guint32 ssrc, GstRtpBinSession * sess)
+{
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_SSRC_ACTIVE], 0,
+ sess->id, ssrc);
+}
+
+static void
+on_ssrc_sdes (GstElement * session, guint32 ssrc, GstRtpBinSession * sess)
+{
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_SSRC_SDES], 0,
+ sess->id, ssrc);
+}
+
+static void
+on_bye_ssrc (GstElement * session, guint32 ssrc, GstRtpBinSession * sess)
+{
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_BYE_SSRC], 0,
+ sess->id, ssrc);
+}
+
+static void
+on_bye_timeout (GstElement * session, guint32 ssrc, GstRtpBinSession * sess)
+{
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_BYE_TIMEOUT], 0,
+ sess->id, ssrc);
+
+ if (sess->bin->priv->autoremove)
+ g_signal_emit_by_name (sess->demux, "clear-ssrc", ssrc, NULL);
+}
+
+static void
+on_timeout (GstElement * session, guint32 ssrc, GstRtpBinSession * sess)
+{
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_TIMEOUT], 0,
+ sess->id, ssrc);
+
+ if (sess->bin->priv->autoremove)
+ g_signal_emit_by_name (sess->demux, "clear-ssrc", ssrc, NULL);
+}
+
+static void
+on_sender_timeout (GstElement * session, guint32 ssrc, GstRtpBinSession * sess)
+{
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_SENDER_TIMEOUT], 0,
+ sess->id, ssrc);
+}
+
+static void
+on_npt_stop (GstElement * jbuf, GstRtpBinStream * stream)
+{
+ g_signal_emit (stream->bin, gst_rtp_bin_signals[SIGNAL_ON_NPT_STOP], 0,
+ stream->session->id, stream->ssrc);
+}
+
+static void
+on_new_sender_ssrc (GstElement * session, guint32 ssrc, GstRtpBinSession * sess)
+{
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_NEW_SENDER_SSRC], 0,
+ sess->id, ssrc);
+}
+
+static void
+on_sender_ssrc_active (GstElement * session, guint32 ssrc,
+ GstRtpBinSession * sess)
+{
+ g_signal_emit (sess->bin, gst_rtp_bin_signals[SIGNAL_ON_SENDER_SSRC_ACTIVE],
+ 0, sess->id, ssrc);
+}
+
+/* must be called with the SESSION lock */
+static GstRtpBinStream *
+find_stream_by_ssrc (GstRtpBinSession * session, guint32 ssrc)
+{
+ GSList *walk;
+
+ for (walk = session->streams; walk; walk = g_slist_next (walk)) {
+ GstRtpBinStream *stream = (GstRtpBinStream *) walk->data;
+
+ if (stream->ssrc == ssrc)
+ return stream;
+ }
+ return NULL;
+}
+
+static void
+ssrc_demux_pad_removed (GstElement * element, guint ssrc, GstPad * pad,
+ GstRtpBinSession * session)
+{
+ GstRtpBinStream *stream = NULL;
+ GstRtpBin *rtpbin;
+
+ rtpbin = session->bin;
+
+ GST_RTP_BIN_LOCK (rtpbin);
+
+ GST_RTP_SESSION_LOCK (session);
+ if ((stream = find_stream_by_ssrc (session, ssrc)))
+ session->streams = g_slist_remove (session->streams, stream);
+ GST_RTP_SESSION_UNLOCK (session);
+
+ if (stream)
+ free_stream (stream, rtpbin);
+
+ GST_RTP_BIN_UNLOCK (rtpbin);
+}
+
+/* create a session with the given id. Must be called with RTP_BIN_LOCK */
+static GstRtpBinSession *
+create_session (GstRtpBin * rtpbin, gint id)
+{
+ GstRtpBinSession *sess;
+ GstElement *session, *demux;
+ GstElement *storage = NULL;
+ GstState target;
+
+ if (!(session = gst_element_factory_make ("rtpsession", NULL)))
+ goto no_session;
+
+ if (!(demux = gst_element_factory_make ("rtpssrcdemux", NULL)))
+ goto no_demux;
+
+ if (!(storage = gst_element_factory_make ("rtpstorage", NULL)))
+ goto no_storage;
+
+ /* need to sink the storage or otherwise signal handlers from bindings will
+ * take ownership of it and we don't own it anymore */
+ gst_object_ref_sink (storage);
+ g_signal_emit (rtpbin, gst_rtp_bin_signals[SIGNAL_NEW_STORAGE], 0, storage,
+ id);
+
+ sess = g_new0 (GstRtpBinSession, 1);
+ g_mutex_init (&sess->lock);
+ sess->id = id;
+ sess->bin = rtpbin;
+ sess->session = session;
+ sess->demux = demux;
+ sess->storage = storage;
+
+ sess->ptmap = g_hash_table_new_full (NULL, NULL, NULL,
+ (GDestroyNotify) gst_caps_unref);
+ rtpbin->sessions = g_slist_prepend (rtpbin->sessions, sess);
+
+ /* configure SDES items */
+ GST_OBJECT_LOCK (rtpbin);
+ g_object_set (demux, "max-streams", rtpbin->max_streams, NULL);
+ g_object_set (session, "sdes", rtpbin->sdes, "rtp-profile",
+ rtpbin->rtp_profile, "rtcp-sync-send-time", rtpbin->rtcp_sync_send_time,
+ NULL);
+ if (rtpbin->use_pipeline_clock)
+ g_object_set (session, "use-pipeline-clock", rtpbin->use_pipeline_clock,
+ NULL);
+ else
+ g_object_set (session, "ntp-time-source", rtpbin->ntp_time_source, NULL);
+
+ g_object_set (session, "max-dropout-time", rtpbin->max_dropout_time,
+ "max-misorder-time", rtpbin->max_misorder_time, NULL);
+ GST_OBJECT_UNLOCK (rtpbin);
+
+ /* provide clock_rate to the session manager when needed */
+ g_signal_connect (session, "request-pt-map",
+ (GCallback) pt_map_requested, sess);
+
+ g_signal_connect (sess->session, "on-new-ssrc",
+ (GCallback) on_new_ssrc, sess);
+ g_signal_connect (sess->session, "on-ssrc-collision",
+ (GCallback) on_ssrc_collision, sess);
+ g_signal_connect (sess->session, "on-ssrc-validated",
+ (GCallback) on_ssrc_validated, sess);
+ g_signal_connect (sess->session, "on-ssrc-active",
+ (GCallback) on_ssrc_active, sess);
+ g_signal_connect (sess->session, "on-ssrc-sdes",
+ (GCallback) on_ssrc_sdes, sess);
+ g_signal_connect (sess->session, "on-bye-ssrc",
+ (GCallback) on_bye_ssrc, sess);
+ g_signal_connect (sess->session, "on-bye-timeout",
+ (GCallback) on_bye_timeout, sess);
+ g_signal_connect (sess->session, "on-timeout", (GCallback) on_timeout, sess);
+ g_signal_connect (sess->session, "on-sender-timeout",
+ (GCallback) on_sender_timeout, sess);
+ g_signal_connect (sess->session, "on-new-sender-ssrc",
+ (GCallback) on_new_sender_ssrc, sess);
+ g_signal_connect (sess->session, "on-sender-ssrc-active",
+ (GCallback) on_sender_ssrc_active, sess);
+
+ gst_bin_add (GST_BIN_CAST (rtpbin), session);
+ gst_bin_add (GST_BIN_CAST (rtpbin), demux);
+ gst_bin_add (GST_BIN_CAST (rtpbin), storage);
+
+ /* unref the storage again, the bin has a reference now and
+ * we don't need it anymore */
+ gst_object_unref (storage);
+
+ GST_OBJECT_LOCK (rtpbin);
+ target = GST_STATE_TARGET (rtpbin);
+ GST_OBJECT_UNLOCK (rtpbin);
+
+ /* change state only to what's needed */
+ gst_element_set_state (demux, target);
+ gst_element_set_state (session, target);
+ gst_element_set_state (storage, target);
+
+ return sess;
+
+ /* ERRORS */
+no_session:
+ {
+ g_warning ("rtpbin: could not create rtpsession element");
+ return NULL;
+ }
+no_demux:
+ {
+ gst_object_unref (session);
+ g_warning ("rtpbin: could not create rtpssrcdemux element");
+ return NULL;
+ }
+no_storage:
+ {
+ gst_object_unref (session);
+ gst_object_unref (demux);
+ g_warning ("rtpbin: could not create rtpstorage element");
+ return NULL;
+ }
+}
+
+static gboolean
+bin_manage_element (GstRtpBin * bin, GstElement * element)
+{
+ GstRtpBinPrivate *priv = bin->priv;
+
+ if (g_list_find (priv->elements, element)) {
+ GST_DEBUG_OBJECT (bin, "requested element %p already in bin", element);
+ } else {
+ GST_DEBUG_OBJECT (bin, "adding requested element %p", element);
+
+ if (g_object_is_floating (element))
+ element = gst_object_ref_sink (element);
+
+ if (!gst_bin_add (GST_BIN_CAST (bin), element))
+ goto add_failed;
+ if (!gst_element_sync_state_with_parent (element))
+ GST_WARNING_OBJECT (bin, "unable to sync element state with rtpbin");
+ }
+ /* we add the element multiple times, each we need an equal number of
+ * removes to really remove the element from the bin */
+ priv->elements = g_list_prepend (priv->elements, element);
+
+ return TRUE;
+
+ /* ERRORS */
+add_failed:
+ {
+ GST_WARNING_OBJECT (bin, "unable to add element");
+ gst_object_unref (element);
+ return FALSE;
+ }
+}
+
+static void
+remove_bin_element (GstElement * element, GstRtpBin * bin)
+{
+ GstRtpBinPrivate *priv = bin->priv;
+ GList *find;
+
+ find = g_list_find (priv->elements, element);
+ if (find) {
+ priv->elements = g_list_delete_link (priv->elements, find);
+
+ if (!g_list_find (priv->elements, element)) {
+ gst_element_set_locked_state (element, TRUE);
+ gst_bin_remove (GST_BIN_CAST (bin), element);
+ gst_element_set_state (element, GST_STATE_NULL);
+ }
+
+ gst_object_unref (element);
+ }
+}
+
+/* called with RTP_BIN_LOCK */
+static void
+free_session (GstRtpBinSession * sess, GstRtpBin * bin)
+{
+ GST_DEBUG_OBJECT (bin, "freeing session %p", sess);
+
+ gst_element_set_locked_state (sess->demux, TRUE);
+ gst_element_set_locked_state (sess->session, TRUE);
+ gst_element_set_locked_state (sess->storage, TRUE);
+
+ gst_element_set_state (sess->demux, GST_STATE_NULL);
+ gst_element_set_state (sess->session, GST_STATE_NULL);
+ gst_element_set_state (sess->storage, GST_STATE_NULL);
+
+ remove_recv_rtp (bin, sess);
+ remove_recv_rtcp (bin, sess);
+ remove_recv_fec (bin, sess);
+ remove_send_rtp (bin, sess);
+ remove_send_fec (bin, sess);
+ remove_rtcp (bin, sess);
+
+ gst_bin_remove (GST_BIN_CAST (bin), sess->session);
+ gst_bin_remove (GST_BIN_CAST (bin), sess->demux);
+ gst_bin_remove (GST_BIN_CAST (bin), sess->storage);
+
+ g_slist_foreach (sess->elements, (GFunc) remove_bin_element, bin);
+ g_slist_free (sess->elements);
+ sess->elements = NULL;
+
+ g_slist_foreach (sess->streams, (GFunc) free_stream, bin);
+ g_slist_free (sess->streams);
+
+ g_mutex_clear (&sess->lock);
+ g_hash_table_destroy (sess->ptmap);
+
+ g_free (sess);
+}
+
+/* get the payload type caps for the specific payload @pt in @session */
+static GstCaps *
+get_pt_map (GstRtpBinSession * session, guint pt)
+{
+ GstCaps *caps = NULL;
+ GstRtpBin *bin;
+ GValue ret = { 0 };
+ GValue args[3] = { {0}, {0}, {0} };
+
+ GST_DEBUG ("searching pt %u in cache", pt);
+
+ GST_RTP_SESSION_LOCK (session);
+
+ /* first look in the cache */
+ caps = g_hash_table_lookup (session->ptmap, GINT_TO_POINTER (pt));
+ if (caps) {
+ gst_caps_ref (caps);
+ goto done;
+ }
+
+ bin = session->bin;
+
+ GST_DEBUG ("emitting signal for pt %u in session %u", pt, session->id);
+
+ /* not in cache, send signal to request caps */
+ g_value_init (&args[0], GST_TYPE_ELEMENT);
+ g_value_set_object (&args[0], bin);
+ g_value_init (&args[1], G_TYPE_UINT);
+ g_value_set_uint (&args[1], session->id);
+ g_value_init (&args[2], G_TYPE_UINT);
+ g_value_set_uint (&args[2], pt);
+
+ g_value_init (&ret, GST_TYPE_CAPS);
+ g_value_set_boxed (&ret, NULL);
+
+ GST_RTP_SESSION_UNLOCK (session);
+
+ g_signal_emitv (args, gst_rtp_bin_signals[SIGNAL_REQUEST_PT_MAP], 0, &ret);
+
+ GST_RTP_SESSION_LOCK (session);
+
+ g_value_unset (&args[0]);
+ g_value_unset (&args[1]);
+ g_value_unset (&args[2]);
+
+ /* look in the cache again because we let the lock go */
+ caps = g_hash_table_lookup (session->ptmap, GINT_TO_POINTER (pt));
+ if (caps) {
+ gst_caps_ref (caps);
+ g_value_unset (&ret);
+ goto done;
+ }
+
+ caps = (GstCaps *) g_value_dup_boxed (&ret);
+ g_value_unset (&ret);
+ if (!caps)
+ goto no_caps;
+
+ GST_DEBUG ("caching pt %u as %" GST_PTR_FORMAT, pt, caps);
+
+ /* store in cache, take additional ref */
+ g_hash_table_insert (session->ptmap, GINT_TO_POINTER (pt),
+ gst_caps_ref (caps));
+
+done:
+ GST_RTP_SESSION_UNLOCK (session);
+
+ return caps;
+
+ /* ERRORS */
+no_caps:
+ {
+ GST_RTP_SESSION_UNLOCK (session);
+ GST_DEBUG ("no pt map could be obtained");
+ return NULL;
+ }
+}
+
+static gboolean
+return_true (gpointer key, gpointer value, gpointer user_data)
+{
+ return TRUE;
+}
+
+static void
+gst_rtp_bin_reset_sync (GstRtpBin * rtpbin)
+{
+ GSList *clients, *streams;
+
+ GST_DEBUG_OBJECT (rtpbin, "Reset sync on all clients");
+
+ GST_RTP_BIN_LOCK (rtpbin);
+ for (clients = rtpbin->clients; clients; clients = g_slist_next (clients)) {
+ GstRtpBinClient *client = (GstRtpBinClient *) clients->data;
+
+ /* reset sync on all streams for this client */
+ for (streams = client->streams; streams; streams = g_slist_next (streams)) {
+ GstRtpBinStream *stream = (GstRtpBinStream *) streams->data;
+
+ /* make use require a new SR packet for this stream before we attempt new
+ * lip-sync */
+ stream->have_sync = FALSE;
+ stream->rt_delta = 0;
+ stream->rtp_delta = 0;
+ stream->clock_base = -100 * GST_SECOND;
+ }
+ }
+ GST_RTP_BIN_UNLOCK (rtpbin);
+}
+
+static void
+gst_rtp_bin_clear_pt_map (GstRtpBin * bin)
+{
+ GSList *sessions, *streams;
+
+ GST_RTP_BIN_LOCK (bin);
+ GST_DEBUG_OBJECT (bin, "clearing pt map");
+ for (sessions = bin->sessions; sessions; sessions = g_slist_next (sessions)) {
+ GstRtpBinSession *session = (GstRtpBinSession *) sessions->data;
+
+ GST_DEBUG_OBJECT (bin, "clearing session %p", session);
+ g_signal_emit_by_name (session->session, "clear-pt-map", NULL);
+
+ GST_RTP_SESSION_LOCK (session);
+ g_hash_table_foreach_remove (session->ptmap, return_true, NULL);
+
+ for (streams = session->streams; streams; streams = g_slist_next (streams)) {
+ GstRtpBinStream *stream = (GstRtpBinStream *) streams->data;
+
+ GST_DEBUG_OBJECT (bin, "clearing stream %p", stream);
+ if (g_signal_lookup ("clear-pt-map", G_OBJECT_TYPE (stream->buffer)) != 0)
+ g_signal_emit_by_name (stream->buffer, "clear-pt-map", NULL);
+ if (stream->demux)
+ g_signal_emit_by_name (stream->demux, "clear-pt-map", NULL);
+ }
+ GST_RTP_SESSION_UNLOCK (session);
+ }
+ GST_RTP_BIN_UNLOCK (bin);
+
+ /* reset sync too */
+ gst_rtp_bin_reset_sync (bin);
+}
+
+static GstElement *
+gst_rtp_bin_get_session (GstRtpBin * bin, guint session_id)
+{
+ GstRtpBinSession *session;
+ GstElement *ret = NULL;
+
+ GST_RTP_BIN_LOCK (bin);
+ GST_DEBUG_OBJECT (bin, "retrieving GstRtpSession, index: %u", session_id);
+ session = find_session_by_id (bin, (gint) session_id);
+ if (session) {
+ ret = gst_object_ref (session->session);
+ }
+ GST_RTP_BIN_UNLOCK (bin);
+
+ return ret;
+}
+
+static RTPSession *
+gst_rtp_bin_get_internal_session (GstRtpBin * bin, guint session_id)
+{
+ RTPSession *internal_session = NULL;
+ GstRtpBinSession *session;
+
+ GST_RTP_BIN_LOCK (bin);
+ GST_DEBUG_OBJECT (bin, "retrieving internal RTPSession object, index: %u",
+ session_id);
+ session = find_session_by_id (bin, (gint) session_id);
+ if (session) {
+ g_object_get (session->session, "internal-session", &internal_session,
+ NULL);
+ }
+ GST_RTP_BIN_UNLOCK (bin);
+
+ return internal_session;
+}
+
+static GstElement *
+gst_rtp_bin_get_storage (GstRtpBin * bin, guint session_id)
+{
+ GstRtpBinSession *session;
+ GstElement *res = NULL;
+
+ GST_RTP_BIN_LOCK (bin);
+ GST_DEBUG_OBJECT (bin, "retrieving internal storage object, index: %u",
+ session_id);
+ session = find_session_by_id (bin, (gint) session_id);
+ if (session && session->storage) {
+ res = gst_object_ref (session->storage);
+ }
+ GST_RTP_BIN_UNLOCK (bin);
+
+ return res;
+}
+
+static GObject *
+gst_rtp_bin_get_internal_storage (GstRtpBin * bin, guint session_id)
+{
+ GObject *internal_storage = NULL;
+ GstRtpBinSession *session;
+
+ GST_RTP_BIN_LOCK (bin);
+ GST_DEBUG_OBJECT (bin, "retrieving internal storage object, index: %u",
+ session_id);
+ session = find_session_by_id (bin, (gint) session_id);
+ if (session && session->storage) {
+ g_object_get (session->storage, "internal-storage", &internal_storage,
+ NULL);
+ }
+ GST_RTP_BIN_UNLOCK (bin);
+
+ return internal_storage;
+}
+
+static void
+gst_rtp_bin_clear_ssrc (GstRtpBin * bin, guint session_id, guint32 ssrc)
+{
+ GstRtpBinSession *session;
+ GstElement *demux = NULL;
+
+ GST_RTP_BIN_LOCK (bin);
+ GST_DEBUG_OBJECT (bin, "clearing ssrc %u for session %u", ssrc, session_id);
+ session = find_session_by_id (bin, (gint) session_id);
+ if (session)
+ demux = gst_object_ref (session->demux);
+ GST_RTP_BIN_UNLOCK (bin);
+
+ if (demux) {
+ g_signal_emit_by_name (demux, "clear-ssrc", ssrc, NULL);
+ gst_object_unref (demux);
+ }
+}
+
+static GstElement *
+gst_rtp_bin_request_encoder (GstRtpBin * bin, guint session_id)
+{
+ GST_DEBUG_OBJECT (bin, "return NULL encoder");
+ return NULL;
+}
+
+static GstElement *
+gst_rtp_bin_request_decoder (GstRtpBin * bin, guint session_id)
+{
+ GST_DEBUG_OBJECT (bin, "return NULL decoder");
+ return NULL;
+}
+
+static GstElement *
+gst_rtp_bin_request_jitterbuffer (GstRtpBin * bin, guint session_id)
+{
+ return gst_element_factory_make ("rtpjitterbuffer", NULL);
+}
+
+static void
+gst_rtp_bin_propagate_property_to_jitterbuffer (GstRtpBin * bin,
+ const gchar * name, const GValue * value)
+{
+ GSList *sessions, *streams;
+
+ GST_RTP_BIN_LOCK (bin);
+ for (sessions = bin->sessions; sessions; sessions = g_slist_next (sessions)) {
+ GstRtpBinSession *session = (GstRtpBinSession *) sessions->data;
+
+ GST_RTP_SESSION_LOCK (session);
+ for (streams = session->streams; streams; streams = g_slist_next (streams)) {
+ GstRtpBinStream *stream = (GstRtpBinStream *) streams->data;
+ GObjectClass *jb_class;
+
+ jb_class = G_OBJECT_GET_CLASS (G_OBJECT (stream->buffer));
+ if (g_object_class_find_property (jb_class, name))
+ g_object_set_property (G_OBJECT (stream->buffer), name, value);
+ else
+ GST_WARNING_OBJECT (bin,
+ "Stream jitterbuffer does not expose property %s", name);
+ }
+ GST_RTP_SESSION_UNLOCK (session);
+ }
+ GST_RTP_BIN_UNLOCK (bin);
+}
+
+static void
+gst_rtp_bin_propagate_property_to_session (GstRtpBin * bin,
+ const gchar * name, const GValue * value)
+{
+ GSList *sessions;
+
+ GST_RTP_BIN_LOCK (bin);
+ for (sessions = bin->sessions; sessions; sessions = g_slist_next (sessions)) {
+ GstRtpBinSession *sess = (GstRtpBinSession *) sessions->data;
+
+ g_object_set_property (G_OBJECT (sess->session), name, value);
+ }
+ GST_RTP_BIN_UNLOCK (bin);
+}
+
+/* get a client with the given SDES name. Must be called with RTP_BIN_LOCK */
+static GstRtpBinClient *
+get_client (GstRtpBin * bin, guint8 len, guint8 * data, gboolean * created)
+{
+ GstRtpBinClient *result = NULL;
+ GSList *walk;
+
+ for (walk = bin->clients; walk; walk = g_slist_next (walk)) {
+ GstRtpBinClient *client = (GstRtpBinClient *) walk->data;
+
+ if (len != client->cname_len)
+ continue;
+
+ if (!strncmp ((gchar *) data, client->cname, client->cname_len)) {
+ GST_DEBUG_OBJECT (bin, "found existing client %p with CNAME %s", client,
+ client->cname);
+ result = client;
+ break;
+ }
+ }
+
+ /* nothing found, create one */
+ if (result == NULL) {
+ result = g_new0 (GstRtpBinClient, 1);
+ result->cname = g_strndup ((gchar *) data, len);
+ result->cname_len = len;
+ bin->clients = g_slist_prepend (bin->clients, result);
+ GST_DEBUG_OBJECT (bin, "created new client %p with CNAME %s", result,
+ result->cname);
+ }
+ return result;
+}
+
+static void
+free_client (GstRtpBinClient * client, GstRtpBin * bin)
+{
+ GST_DEBUG_OBJECT (bin, "freeing client %p", client);
+ g_slist_free (client->streams);
+ g_free (client->cname);
+ g_free (client);
+}
+
+static void
+get_current_times (GstRtpBin * bin, GstClockTime * running_time,
+ guint64 * ntpnstime)
+{
+ guint64 ntpns = -1;
+ GstClock *clock;
+ GstClockTime base_time, rt, clock_time;
+
+ GST_OBJECT_LOCK (bin);
+ if ((clock = GST_ELEMENT_CLOCK (bin))) {
+ base_time = GST_ELEMENT_CAST (bin)->base_time;
+ gst_object_ref (clock);
+ GST_OBJECT_UNLOCK (bin);
+
+ /* get current clock time and convert to running time */
+ clock_time = gst_clock_get_time (clock);
+ rt = clock_time - base_time;
+
+ if (bin->use_pipeline_clock) {
+ ntpns = rt;
+ /* add constant to convert from 1970 based time to 1900 based time */
+ ntpns += (2208988800LL * GST_SECOND);
+ } else {
+ switch (bin->ntp_time_source) {
+ case GST_RTP_NTP_TIME_SOURCE_NTP:
+ case GST_RTP_NTP_TIME_SOURCE_UNIX:{
+ /* get current NTP time */
+ ntpns = g_get_real_time () * GST_USECOND;
+
+ /* add constant to convert from 1970 based time to 1900 based time */
+ if (bin->ntp_time_source == GST_RTP_NTP_TIME_SOURCE_NTP)
+ ntpns += (2208988800LL * GST_SECOND);
+ break;
+ }
+ case GST_RTP_NTP_TIME_SOURCE_RUNNING_TIME:
+ ntpns = rt;
+ break;
+ case GST_RTP_NTP_TIME_SOURCE_CLOCK_TIME:
+ ntpns = clock_time;
+ break;
+ default:
+ ntpns = -1; /* Fix uninited compiler warning */
+ g_assert_not_reached ();
+ break;
+ }
+ }
+
+ gst_object_unref (clock);
+ } else {
+ GST_OBJECT_UNLOCK (bin);
+ rt = -1;
+ ntpns = -1;
+ }
+ if (running_time)
+ *running_time = rt;
+ if (ntpnstime)
+ *ntpnstime = ntpns;
+}
+
+static void
+stream_set_ts_offset (GstRtpBin * bin, GstRtpBinStream * stream,
+ gint64 ts_offset, gint64 max_ts_offset, gint64 min_ts_offset,
+ gboolean allow_positive_ts_offset)
+{
+ gint64 prev_ts_offset;
+ GObjectClass *jb_class;
+
+ jb_class = G_OBJECT_GET_CLASS (G_OBJECT (stream->buffer));
+
+ if (!g_object_class_find_property (jb_class, "ts-offset")) {
+ GST_LOG_OBJECT (bin,
+ "stream's jitterbuffer does not expose ts-offset property");
+ return;
+ }
+
+ g_object_get (stream->buffer, "ts-offset", &prev_ts_offset, NULL);
+
+ /* delta changed, see how much */
+ if (prev_ts_offset != ts_offset) {
+ gint64 diff;
+
+ diff = prev_ts_offset - ts_offset;
+
+ GST_DEBUG_OBJECT (bin,
+ "ts-offset %" G_GINT64_FORMAT ", prev %" G_GINT64_FORMAT
+ ", diff: %" G_GINT64_FORMAT, ts_offset, prev_ts_offset, diff);
+
+ /* ignore minor offsets */
+ if (ABS (diff) < min_ts_offset) {
+ GST_DEBUG_OBJECT (bin, "offset too small, ignoring");
+ return;
+ }
+
+ /* sanity check offset */
+ if (max_ts_offset > 0) {
+ if (ts_offset > 0 && !allow_positive_ts_offset) {
+ GST_DEBUG_OBJECT (bin,
+ "offset is positive (clocks are out of sync), ignoring");
+ return;
+ }
+ if (ABS (ts_offset) > max_ts_offset) {
+ GST_DEBUG_OBJECT (bin, "offset too large, ignoring");
+ return;
+ }
+ }
+
+ g_object_set (stream->buffer, "ts-offset", ts_offset, NULL);
+ }
+ GST_DEBUG_OBJECT (bin, "stream SSRC %08x, delta %" G_GINT64_FORMAT,
+ stream->ssrc, ts_offset);
+}
+
+static void
+gst_rtp_bin_send_sync_event (GstRtpBinStream * stream)
+{
+ if (stream->bin->send_sync_event) {
+ GstEvent *event;
+ GstPad *srcpad;
+
+ GST_DEBUG_OBJECT (stream->bin,
+ "sending GstRTCPSRReceived event downstream");
+
+ event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
+ gst_structure_new_empty ("GstRTCPSRReceived"));
+
+ srcpad = gst_element_get_static_pad (stream->buffer, "src");
+ gst_pad_push_event (srcpad, event);
+ gst_object_unref (srcpad);
+ }
+}
+
+/* associate a stream to the given CNAME. This will make sure all streams for
+ * that CNAME are synchronized together.
+ * Must be called with GST_RTP_BIN_LOCK */
+static void
+gst_rtp_bin_associate (GstRtpBin * bin, GstRtpBinStream * stream, guint8 len,
+ guint8 * data, guint64 ntptime, guint64 last_extrtptime,
+ guint64 base_rtptime, guint64 base_time, guint clock_rate,
+ gint64 rtp_clock_base)
+{
+ GstRtpBinClient *client;
+ gboolean created;
+ GSList *walk;
+ GstClockTime running_time, running_time_rtp;
+ guint64 ntpnstime;
+
+ /* first find or create the CNAME */
+ client = get_client (bin, len, data, &created);
+
+ /* find stream in the client */
+ for (walk = client->streams; walk; walk = g_slist_next (walk)) {
+ GstRtpBinStream *ostream = (GstRtpBinStream *) walk->data;
+
+ if (ostream == stream)
+ break;
+ }
+ /* not found, add it to the list */
+ if (walk == NULL) {
+ GST_DEBUG_OBJECT (bin,
+ "new association of SSRC %08x with client %p with CNAME %s",
+ stream->ssrc, client, client->cname);
+ client->streams = g_slist_prepend (client->streams, stream);
+ client->nstreams++;
+ } else {
+ GST_DEBUG_OBJECT (bin,
+ "found association of SSRC %08x with client %p with CNAME %s",
+ stream->ssrc, client, client->cname);
+ }
+
+ if (!GST_CLOCK_TIME_IS_VALID (last_extrtptime)) {
+ GST_DEBUG_OBJECT (bin, "invalidated sync data");
+ if (bin->rtcp_sync == GST_RTP_BIN_RTCP_SYNC_RTP) {
+ /* we don't need that data, so carry on,
+ * but make some values look saner */
+ last_extrtptime = base_rtptime;
+ } else {
+ /* nothing we can do with this data in this case */
+ GST_DEBUG_OBJECT (bin, "bailing out");
+ return;
+ }
+ }
+
+ /* Take the extended rtptime we found in the SR packet and map it to the
+ * local rtptime. The local rtp time is used to construct timestamps on the
+ * buffers so we will calculate what running_time corresponds to the RTP
+ * timestamp in the SR packet. */
+ running_time_rtp = last_extrtptime - base_rtptime;
+
+ GST_DEBUG_OBJECT (bin,
+ "base %" G_GUINT64_FORMAT ", extrtptime %" G_GUINT64_FORMAT
+ ", local RTP %" G_GUINT64_FORMAT ", clock-rate %d, "
+ "clock-base %" G_GINT64_FORMAT, base_rtptime,
+ last_extrtptime, running_time_rtp, clock_rate, rtp_clock_base);
+
+ /* calculate local RTP time in gstreamer timestamp, we essentially perform the
+ * same conversion that a jitterbuffer would use to convert an rtp timestamp
+ * into a corresponding gstreamer timestamp. Note that the base_time also
+ * contains the drift between sender and receiver. */
+ running_time =
+ gst_util_uint64_scale_int (running_time_rtp, GST_SECOND, clock_rate);
+ running_time += base_time;
+
+ /* convert ntptime to nanoseconds */
+ ntpnstime = gst_util_uint64_scale (ntptime, GST_SECOND,
+ (G_GINT64_CONSTANT (1) << 32));
+
+ stream->have_sync = TRUE;
+
+ GST_DEBUG_OBJECT (bin,
+ "SR RTP running time %" G_GUINT64_FORMAT ", SR NTP %" G_GUINT64_FORMAT,
+ running_time, ntpnstime);
+
+ /* recalc inter stream playout offset, but only if there is more than one
+ * stream or we're doing NTP sync. */
+ if (bin->ntp_sync) {
+ gint64 ntpdiff, rtdiff;
+ guint64 local_ntpnstime;
+ GstClockTime local_running_time;
+
+ /* For NTP sync we need to first get a snapshot of running_time and NTP
+ * time. We know at what running_time we play a certain RTP time, we also
+ * calculated when we would play the RTP time in the SR packet. Now we need
+ * to know how the running_time and the NTP time relate to each other. */
+ get_current_times (bin, &local_running_time, &local_ntpnstime);
+
+ /* see how far away the NTP time is. This is the difference between the
+ * current NTP time and the NTP time in the last SR packet. */
+ ntpdiff = local_ntpnstime - ntpnstime;
+ /* see how far away the running_time is. This is the difference between the
+ * current running_time and the running_time of the RTP timestamp in the
+ * last SR packet. */
+ rtdiff = local_running_time - running_time;
+
+ GST_DEBUG_OBJECT (bin,
+ "local NTP time %" G_GUINT64_FORMAT ", SR NTP time %" G_GUINT64_FORMAT,
+ local_ntpnstime, ntpnstime);
+ GST_DEBUG_OBJECT (bin,
+ "local running time %" G_GUINT64_FORMAT ", SR RTP running time %"
+ G_GUINT64_FORMAT, local_running_time, running_time);
+ GST_DEBUG_OBJECT (bin,
+ "NTP diff %" G_GINT64_FORMAT ", RT diff %" G_GINT64_FORMAT, ntpdiff,
+ rtdiff);
+
+ /* combine to get the final diff to apply to the running_time */
+ stream->rt_delta = rtdiff - ntpdiff;
+
+ stream_set_ts_offset (bin, stream, stream->rt_delta, bin->max_ts_offset,
+ 0, FALSE);
+ } else {
+ gint64 min, rtp_min, clock_base = stream->clock_base;
+ gboolean all_sync, use_rtp;
+ gboolean rtcp_sync = g_atomic_int_get (&bin->rtcp_sync);
+
+ /* calculate delta between server and receiver. ntpnstime is created by
+ * converting the ntptime in the last SR packet to a gstreamer timestamp. This
+ * delta expresses the difference to our timeline and the server timeline. The
+ * difference in itself doesn't mean much but we can combine the delta of
+ * multiple streams to create a stream specific offset. */
+ stream->rt_delta = ntpnstime - running_time;
+
+ /* calculate the min of all deltas, ignoring streams that did not yet have a
+ * valid rt_delta because we did not yet receive an SR packet for those
+ * streams.
+ * We calculate the minimum because we would like to only apply positive
+ * offsets to streams, delaying their playback instead of trying to speed up
+ * other streams (which might be impossible when we have to create negative
+ * latencies).
+ * The stream that has the smallest diff is selected as the reference stream,
+ * all other streams will have a positive offset to this difference. */
+
+ /* some alternative setting allow ignoring RTCP as much as possible,
+ * for servers generating bogus ntp timeline */
+ min = rtp_min = G_MAXINT64;
+ use_rtp = FALSE;
+ if (rtcp_sync == GST_RTP_BIN_RTCP_SYNC_RTP) {
+ guint64 ext_base;
+
+ use_rtp = TRUE;
+ /* signed version for convenience */
+ clock_base = base_rtptime;
+ /* deal with possible wrap-around */
+ ext_base = base_rtptime;
+ rtp_clock_base = gst_rtp_buffer_ext_timestamp (&ext_base, rtp_clock_base);
+ /* sanity check; base rtp and provided clock_base should be close */
+ if (rtp_clock_base >= clock_base) {
+ if (rtp_clock_base - clock_base < 10 * clock_rate) {
+ rtp_clock_base = base_time +
+ gst_util_uint64_scale_int (rtp_clock_base - clock_base,
+ GST_SECOND, clock_rate);
+ } else {
+ use_rtp = FALSE;
+ }
+ } else {
+ if (clock_base - rtp_clock_base < 10 * clock_rate) {
+ rtp_clock_base = base_time -
+ gst_util_uint64_scale_int (clock_base - rtp_clock_base,
+ GST_SECOND, clock_rate);
+ } else {
+ use_rtp = FALSE;
+ }
+ }
+ /* warn and bail for clarity out if no sane values */
+ if (!use_rtp) {
+ GST_WARNING_OBJECT (bin, "unable to sync to provided rtptime");
+ return;
+ }
+ /* store to track changes */
+ clock_base = rtp_clock_base;
+ /* generate a fake as before,
+ * now equating rtptime obtained from RTP-Info,
+ * where the large time represent the otherwise irrelevant npt/ntp time */
+ stream->rtp_delta = (GST_SECOND << 28) - rtp_clock_base;
+ } else {
+ clock_base = rtp_clock_base;
+ }
+
+ all_sync = TRUE;
+ for (walk = client->streams; walk; walk = g_slist_next (walk)) {
+ GstRtpBinStream *ostream = (GstRtpBinStream *) walk->data;
+
+ if (!ostream->have_sync) {
+ all_sync = FALSE;
+ continue;
+ }
+
+ /* change in current stream's base from previously init'ed value
+ * leads to reset of all stream's base */
+ if (stream != ostream && stream->clock_base >= 0 &&
+ (stream->clock_base != clock_base)) {
+ GST_DEBUG_OBJECT (bin, "reset upon clock base change");
+ ostream->clock_base = -100 * GST_SECOND;
+ ostream->rtp_delta = 0;
+ }
+
+ if (ostream->rt_delta < min)
+ min = ostream->rt_delta;
+ if (ostream->rtp_delta < rtp_min)
+ rtp_min = ostream->rtp_delta;
+ }
+
+ /* arrange to re-sync for each stream upon significant change,
+ * e.g. post-seek */
+ all_sync = all_sync && (stream->clock_base == clock_base);
+ stream->clock_base = clock_base;
+
+ /* may need init performed above later on, but nothing more to do now */
+ if (client->nstreams <= 1)
+ return;
+
+ GST_DEBUG_OBJECT (bin, "client %p min delta %" G_GINT64_FORMAT
+ " all sync %d", client, min, all_sync);
+ GST_DEBUG_OBJECT (bin, "rtcp sync mode %d, use_rtp %d", rtcp_sync, use_rtp);
+
+ switch (rtcp_sync) {
+ case GST_RTP_BIN_RTCP_SYNC_RTP:
+ if (!use_rtp)
+ break;
+ GST_DEBUG_OBJECT (bin, "using rtp generated reports; "
+ "client %p min rtp delta %" G_GINT64_FORMAT, client, rtp_min);
+ /* fall-through */
+ case GST_RTP_BIN_RTCP_SYNC_INITIAL:
+ /* if all have been synced already, do not bother further */
+ if (all_sync) {
+ GST_DEBUG_OBJECT (bin, "all streams already synced; done");
+ return;
+ }
+ break;
+ default:
+ break;
+ }
+
+ /* bail out if we adjusted recently enough */
+ if (all_sync && (ntpnstime - bin->priv->last_ntpnstime) <
+ bin->rtcp_sync_interval * GST_MSECOND) {
+ GST_DEBUG_OBJECT (bin, "discarding RTCP sender packet for sync; "
+ "previous sender info too recent "
+ "(previous NTP %" G_GUINT64_FORMAT ")", bin->priv->last_ntpnstime);
+ return;
+ }
+ bin->priv->last_ntpnstime = ntpnstime;
+
+ /* calculate offsets for each stream */
+ for (walk = client->streams; walk; walk = g_slist_next (walk)) {
+ GstRtpBinStream *ostream = (GstRtpBinStream *) walk->data;
+ gint64 ts_offset;
+
+ /* ignore streams for which we didn't receive an SR packet yet, we
+ * can't synchronize them yet. We can however sync other streams just
+ * fine. */
+ if (!ostream->have_sync)
+ continue;
+
+ /* calculate offset to our reference stream, this should always give a
+ * positive number. */
+ if (use_rtp)
+ ts_offset = ostream->rtp_delta - rtp_min;
+ else
+ ts_offset = ostream->rt_delta - min;
+
+ stream_set_ts_offset (bin, ostream, ts_offset, bin->max_ts_offset,
+ MIN_TS_OFFSET, TRUE);
+ }
+ }
+ gst_rtp_bin_send_sync_event (stream);
+
+ return;
+}
+
+#define GST_RTCP_BUFFER_FOR_PACKETS(b,buffer,packet) \
+ for ((b) = gst_rtcp_buffer_get_first_packet ((buffer), (packet)); (b); \
+ (b) = gst_rtcp_packet_move_to_next ((packet)))
+
+#define GST_RTCP_SDES_FOR_ITEMS(b,packet) \
+ for ((b) = gst_rtcp_packet_sdes_first_item ((packet)); (b); \
+ (b) = gst_rtcp_packet_sdes_next_item ((packet)))
+
+#define GST_RTCP_SDES_FOR_ENTRIES(b,packet) \
+ for ((b) = gst_rtcp_packet_sdes_first_entry ((packet)); (b); \
+ (b) = gst_rtcp_packet_sdes_next_entry ((packet)))
+
+static void
+gst_rtp_bin_handle_sync (GstElement * jitterbuffer, GstStructure * s,
+ GstRtpBinStream * stream)
+{
+ GstRtpBin *bin;
+ GstRTCPPacket packet;
+ guint32 ssrc;
+ guint64 ntptime;
+ gboolean have_sr, have_sdes;
+ gboolean more;
+ guint64 base_rtptime;
+ guint64 base_time;
+ guint clock_rate;
+ guint64 clock_base;
+ guint64 extrtptime;
+ GstBuffer *buffer;
+ GstRTCPBuffer rtcp = { NULL, };
+
+ bin = stream->bin;
+
+ GST_DEBUG_OBJECT (bin, "sync handler called");
+
+ /* get the last relation between the rtp timestamps and the gstreamer
+ * timestamps. We get this info directly from the jitterbuffer which
+ * constructs gstreamer timestamps from rtp timestamps and so it know exactly
+ * what the current situation is. */
+ base_rtptime =
+ g_value_get_uint64 (gst_structure_get_value (s, "base-rtptime"));
+ base_time = g_value_get_uint64 (gst_structure_get_value (s, "base-time"));
+ clock_rate = g_value_get_uint (gst_structure_get_value (s, "clock-rate"));
+ clock_base = g_value_get_uint64 (gst_structure_get_value (s, "clock-base"));
+ extrtptime =
+ g_value_get_uint64 (gst_structure_get_value (s, "sr-ext-rtptime"));
+ buffer = gst_value_get_buffer (gst_structure_get_value (s, "sr-buffer"));
+
+ have_sr = FALSE;
+ have_sdes = FALSE;
+
+ gst_rtcp_buffer_map (buffer, GST_MAP_READ, &rtcp);
+
+ GST_RTCP_BUFFER_FOR_PACKETS (more, &rtcp, &packet) {
+ /* first packet must be SR or RR or else the validate would have failed */
+ switch (gst_rtcp_packet_get_type (&packet)) {
+ case GST_RTCP_TYPE_SR:
+ /* only parse first. There is only supposed to be one SR in the packet
+ * but we will deal with malformed packets gracefully */
+ if (have_sr)
+ break;
+ /* get NTP and RTP times */
+ gst_rtcp_packet_sr_get_sender_info (&packet, &ssrc, &ntptime, NULL,
+ NULL, NULL);
+
+ GST_DEBUG_OBJECT (bin, "received sync packet from SSRC %08x", ssrc);
+ /* ignore SR that is not ours */
+ if (ssrc != stream->ssrc)
+ continue;
+
+ have_sr = TRUE;
+ break;
+ case GST_RTCP_TYPE_SDES:
+ {
+ gboolean more_items, more_entries;
+
+ /* only deal with first SDES, there is only supposed to be one SDES in
+ * the RTCP packet but we deal with bad packets gracefully. Also bail
+ * out if we have not seen an SR item yet. */
+ if (have_sdes || !have_sr)
+ break;
+
+ GST_RTCP_SDES_FOR_ITEMS (more_items, &packet) {
+ /* skip items that are not about the SSRC of the sender */
+ if (gst_rtcp_packet_sdes_get_ssrc (&packet) != ssrc)
+ continue;
+
+ /* find the CNAME entry */
+ GST_RTCP_SDES_FOR_ENTRIES (more_entries, &packet) {
+ GstRTCPSDESType type;
+ guint8 len;
+ guint8 *data;
+
+ gst_rtcp_packet_sdes_get_entry (&packet, &type, &len, &data);
+
+ if (type == GST_RTCP_SDES_CNAME) {
+ GST_RTP_BIN_LOCK (bin);
+ /* associate the stream to CNAME */
+ gst_rtp_bin_associate (bin, stream, len, data,
+ ntptime, extrtptime, base_rtptime, base_time, clock_rate,
+ clock_base);
+ GST_RTP_BIN_UNLOCK (bin);
+ }
+ }
+ }
+ have_sdes = TRUE;
+ break;
+ }
+ default:
+ /* we can ignore these packets */
+ break;
+ }
+ }
+ gst_rtcp_buffer_unmap (&rtcp);
+}
+
+/* create a new stream with @ssrc in @session. Must be called with
+ * RTP_SESSION_LOCK. */
+static GstRtpBinStream *
+create_stream (GstRtpBinSession * session, guint32 ssrc)
+{
+ GstElement *buffer, *demux = NULL;
+ GstRtpBinStream *stream;
+ GstRtpBin *rtpbin;
+ GstState target;
+ GObjectClass *jb_class;
+
+ rtpbin = session->bin;
+
+ if (g_slist_length (session->streams) >= rtpbin->max_streams)
+ goto max_streams;
+
+ if (!(buffer =
+ session_request_element (session, SIGNAL_REQUEST_JITTERBUFFER)))
+ goto no_jitterbuffer;
+
+ if (!rtpbin->ignore_pt) {
+ if (!(demux = gst_element_factory_make ("rtpptdemux", NULL)))
+ goto no_demux;
+ }
+
+ stream = g_new0 (GstRtpBinStream, 1);
+ stream->ssrc = ssrc;
+ stream->bin = rtpbin;
+ stream->session = session;
+ stream->buffer = gst_object_ref (buffer);
+ stream->demux = demux;
+
+ stream->have_sync = FALSE;
+ stream->rt_delta = 0;
+ stream->rtp_delta = 0;
+ stream->percent = 100;
+ stream->clock_base = -100 * GST_SECOND;
+ session->streams = g_slist_prepend (session->streams, stream);
+
+ jb_class = G_OBJECT_GET_CLASS (G_OBJECT (buffer));
+
+ if (g_signal_lookup ("request-pt-map", G_OBJECT_TYPE (buffer)) != 0) {
+ /* provide clock_rate to the jitterbuffer when needed */
+ stream->buffer_ptreq_sig = g_signal_connect (buffer, "request-pt-map",
+ (GCallback) pt_map_requested, session);
+ }
+ if (g_signal_lookup ("on-npt-stop", G_OBJECT_TYPE (buffer)) != 0) {
+ stream->buffer_ntpstop_sig = g_signal_connect (buffer, "on-npt-stop",
+ (GCallback) on_npt_stop, stream);
+ }
+
+ g_object_set_data (G_OBJECT (buffer), "GstRTPBin.session", session);
+ g_object_set_data (G_OBJECT (buffer), "GstRTPBin.stream", stream);
+
+ /* configure latency and packet lost */
+ g_object_set (buffer, "latency", rtpbin->latency_ms, NULL);
+
+ if (g_object_class_find_property (jb_class, "drop-on-latency"))
+ g_object_set (buffer, "drop-on-latency", rtpbin->drop_on_latency, NULL);
+ if (g_object_class_find_property (jb_class, "do-lost"))
+ g_object_set (buffer, "do-lost", rtpbin->do_lost, NULL);
+ if (g_object_class_find_property (jb_class, "mode"))
+ g_object_set (buffer, "mode", rtpbin->buffer_mode, NULL);
+ if (g_object_class_find_property (jb_class, "do-retransmission"))
+ g_object_set (buffer, "do-retransmission", rtpbin->do_retransmission, NULL);
+ if (g_object_class_find_property (jb_class, "max-rtcp-rtp-time-diff"))
+ g_object_set (buffer, "max-rtcp-rtp-time-diff",
+ rtpbin->max_rtcp_rtp_time_diff, NULL);
+ if (g_object_class_find_property (jb_class, "max-dropout-time"))
+ g_object_set (buffer, "max-dropout-time", rtpbin->max_dropout_time, NULL);
+ if (g_object_class_find_property (jb_class, "max-misorder-time"))
+ g_object_set (buffer, "max-misorder-time", rtpbin->max_misorder_time, NULL);
+ if (g_object_class_find_property (jb_class, "rfc7273-sync"))
+ g_object_set (buffer, "rfc7273-sync", rtpbin->rfc7273_sync, NULL);
+ if (g_object_class_find_property (jb_class, "max-ts-offset-adjustment"))
+ g_object_set (buffer, "max-ts-offset-adjustment",
+ rtpbin->max_ts_offset_adjustment, NULL);
+
+ g_signal_emit (rtpbin, gst_rtp_bin_signals[SIGNAL_NEW_JITTERBUFFER], 0,
+ buffer, session->id, ssrc);
+
+ if (!rtpbin->ignore_pt)
+ gst_bin_add (GST_BIN_CAST (rtpbin), demux);
+
+ /* link stuff */
+ if (demux)
+ gst_element_link_pads_full (buffer, "src", demux, "sink",
+ GST_PAD_LINK_CHECK_NOTHING);
+
+ if (rtpbin->buffering) {
+ guint64 last_out;
+
+ if (g_signal_lookup ("set-active", G_OBJECT_TYPE (buffer)) != 0) {
+ GST_INFO_OBJECT (rtpbin,
+ "bin is buffering, set jitterbuffer as not active");
+ g_signal_emit_by_name (buffer, "set-active", FALSE, (gint64) 0,
+ &last_out);
+ }
+ }
+
+
+ GST_OBJECT_LOCK (rtpbin);
+ target = GST_STATE_TARGET (rtpbin);
+ GST_OBJECT_UNLOCK (rtpbin);
+
+ /* from sink to source */
+ if (demux)
+ gst_element_set_state (demux, target);
+
+ gst_element_set_state (buffer, target);
+
+ return stream;
+
+ /* ERRORS */
+max_streams:
+ {
+ GST_WARNING_OBJECT (rtpbin, "stream exceeds maximum (%d)",
+ rtpbin->max_streams);
+ return NULL;
+ }
+no_jitterbuffer:
+ {
+ g_warning ("rtpbin: could not create rtpjitterbuffer element");
+ return NULL;
+ }
+no_demux:
+ {
+ gst_object_unref (buffer);
+ g_warning ("rtpbin: could not create rtpptdemux element");
+ return NULL;
+ }
+}
+
+/* called with RTP_BIN_LOCK */
+static void
+free_stream (GstRtpBinStream * stream, GstRtpBin * bin)
+{
+ GstRtpBinSession *sess = stream->session;
+ GSList *clients, *next_client;
+
+ GST_DEBUG_OBJECT (bin, "freeing stream %p", stream);
+
+ gst_element_set_locked_state (stream->buffer, TRUE);
+ if (stream->demux)
+ gst_element_set_locked_state (stream->demux, TRUE);
+
+ gst_element_set_state (stream->buffer, GST_STATE_NULL);
+ if (stream->demux)
+ gst_element_set_state (stream->demux, GST_STATE_NULL);
+
+ if (stream->demux) {
+ g_signal_handler_disconnect (stream->demux, stream->demux_newpad_sig);
+ g_signal_handler_disconnect (stream->demux, stream->demux_ptreq_sig);
+ g_signal_handler_disconnect (stream->demux, stream->demux_ptchange_sig);
+ g_signal_handler_disconnect (stream->demux, stream->demux_padremoved_sig);
+ }
+
+ if (stream->buffer_handlesync_sig)
+ g_signal_handler_disconnect (stream->buffer, stream->buffer_handlesync_sig);
+ if (stream->buffer_ptreq_sig)
+ g_signal_handler_disconnect (stream->buffer, stream->buffer_ptreq_sig);
+ if (stream->buffer_ntpstop_sig)
+ g_signal_handler_disconnect (stream->buffer, stream->buffer_ntpstop_sig);
+
+ sess->elements = g_slist_remove (sess->elements, stream->buffer);
+ remove_bin_element (stream->buffer, bin);
+ gst_object_unref (stream->buffer);
+
+ if (stream->demux)
+ gst_bin_remove (GST_BIN_CAST (bin), stream->demux);
+
+ for (clients = bin->clients; clients; clients = next_client) {
+ GstRtpBinClient *client = (GstRtpBinClient *) clients->data;
+ GSList *streams, *next_stream;
+
+ next_client = g_slist_next (clients);
+
+ for (streams = client->streams; streams; streams = next_stream) {
+ GstRtpBinStream *ostream = (GstRtpBinStream *) streams->data;
+
+ next_stream = g_slist_next (streams);
+
+ if (ostream == stream) {
+ client->streams = g_slist_delete_link (client->streams, streams);
+ /* If this was the last stream belonging to this client,
+ * clean up the client. */
+ if (--client->nstreams == 0) {
+ bin->clients = g_slist_delete_link (bin->clients, clients);
+ free_client (client, bin);
+ break;
+ }
+ }
+ }
+ }
+ g_free (stream);
+}
+
+/* GObject vmethods */
+static void gst_rtp_bin_dispose (GObject * object);
+static void gst_rtp_bin_finalize (GObject * object);
+static void gst_rtp_bin_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtp_bin_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+/* GstElement vmethods */
+static GstStateChangeReturn gst_rtp_bin_change_state (GstElement * element,
+ GstStateChange transition);
+static GstPad *gst_rtp_bin_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
+static void gst_rtp_bin_release_pad (GstElement * element, GstPad * pad);
+static void gst_rtp_bin_handle_message (GstBin * bin, GstMessage * message);
+
+#define gst_rtp_bin_parent_class parent_class
+G_DEFINE_TYPE_WITH_PRIVATE (GstRtpBin, gst_rtp_bin, GST_TYPE_BIN);
+GST_ELEMENT_REGISTER_DEFINE (rtpbin, "rtpbin", GST_RANK_NONE, GST_TYPE_RTP_BIN);
+
+static gboolean
+_gst_element_accumulator (GSignalInvocationHint * ihint,
+ GValue * return_accu, const GValue * handler_return, gpointer dummy)
+{
+ GstElement *element;
+
+ element = g_value_get_object (handler_return);
+ GST_DEBUG ("got element %" GST_PTR_FORMAT, element);
+
+ g_value_set_object (return_accu, element);
+
+ /* stop emission if we have an element */
+ return (element == NULL);
+}
+
+static gboolean
+_gst_caps_accumulator (GSignalInvocationHint * ihint,
+ GValue * return_accu, const GValue * handler_return, gpointer dummy)
+{
+ GstCaps *caps;
+
+ caps = g_value_get_boxed (handler_return);
+ GST_DEBUG ("got caps %" GST_PTR_FORMAT, caps);
+
+ g_value_set_boxed (return_accu, caps);
+
+ /* stop emission if we have a caps */
+ return (caps == NULL);
+}
+
+static void
+gst_rtp_bin_class_init (GstRtpBinClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstBinClass *gstbin_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstbin_class = (GstBinClass *) klass;
+
+ gobject_class->dispose = gst_rtp_bin_dispose;
+ gobject_class->finalize = gst_rtp_bin_finalize;
+ gobject_class->set_property = gst_rtp_bin_set_property;
+ gobject_class->get_property = gst_rtp_bin_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_LATENCY,
+ g_param_spec_uint ("latency", "Buffer latency in ms",
+ "Default amount of ms to buffer in the jitterbuffers", 0,
+ G_MAXUINT, DEFAULT_LATENCY_MS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_DROP_ON_LATENCY,
+ g_param_spec_boolean ("drop-on-latency",
+ "Drop buffers when maximum latency is reached",
+ "Tells the jitterbuffer to never exceed the given latency in size",
+ DEFAULT_DROP_ON_LATENCY, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpBin::request-pt-map:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @pt: the pt
+ *
+ * Request the payload type as #GstCaps for @pt in @session.
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_PT_MAP] =
+ g_signal_new ("request-pt-map", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, request_pt_map),
+ _gst_caps_accumulator, NULL, NULL, GST_TYPE_CAPS, 2, G_TYPE_UINT,
+ G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::payload-type-change:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @pt: the pt
+ *
+ * Signal that the current payload type changed to @pt in @session.
+ */
+ gst_rtp_bin_signals[SIGNAL_PAYLOAD_TYPE_CHANGE] =
+ g_signal_new ("payload-type-change", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, payload_type_change),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::clear-pt-map:
+ * @rtpbin: the object which received the signal
+ *
+ * Clear all previously cached pt-mapping obtained with
+ * #GstRtpBin::request-pt-map.
+ */
+ gst_rtp_bin_signals[SIGNAL_CLEAR_PT_MAP] =
+ g_signal_new ("clear-pt-map", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRtpBinClass,
+ clear_pt_map), NULL, NULL, NULL, G_TYPE_NONE, 0, G_TYPE_NONE);
+
+ /**
+ * GstRtpBin::reset-sync:
+ * @rtpbin: the object which received the signal
+ *
+ * Reset all currently configured lip-sync parameters and require new SR
+ * packets for all streams before lip-sync is attempted again.
+ */
+ gst_rtp_bin_signals[SIGNAL_RESET_SYNC] =
+ g_signal_new ("reset-sync", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRtpBinClass,
+ reset_sync), NULL, NULL, NULL, G_TYPE_NONE, 0, G_TYPE_NONE);
+
+ /**
+ * GstRtpBin::get-session:
+ * @rtpbin: the object which received the signal
+ * @id: the session id
+ *
+ * Request the related GstRtpSession as #GstElement related with session @id.
+ *
+ * Since: 1.8
+ */
+ gst_rtp_bin_signals[SIGNAL_GET_SESSION] =
+ g_signal_new ("get-session", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRtpBinClass,
+ get_session), NULL, NULL, NULL, GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::get-internal-session:
+ * @rtpbin: the object which received the signal
+ * @id: the session id
+ *
+ * Request the internal RTPSession object as #GObject in session @id.
+ */
+ gst_rtp_bin_signals[SIGNAL_GET_INTERNAL_SESSION] =
+ g_signal_new ("get-internal-session", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRtpBinClass,
+ get_internal_session), NULL, NULL, NULL, RTP_TYPE_SESSION, 1,
+ G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::get-internal-storage:
+ * @rtpbin: the object which received the signal
+ * @id: the session id
+ *
+ * Request the internal RTPStorage object as #GObject in session @id. This
+ * is the internal storage used by the RTPStorage element, which is used to
+ * keep a backlog of received RTP packets for the session @id.
+ *
+ * Since: 1.14
+ */
+ gst_rtp_bin_signals[SIGNAL_GET_INTERNAL_STORAGE] =
+ g_signal_new ("get-internal-storage", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRtpBinClass,
+ get_internal_storage), NULL, NULL, NULL, G_TYPE_OBJECT, 1,
+ G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::get-storage:
+ * @rtpbin: the object which received the signal
+ * @id: the session id
+ *
+ * Request the RTPStorage element as #GObject in session @id. This element
+ * is used to keep a backlog of received RTP packets for the session @id.
+ *
+ * Since: 1.16
+ */
+ gst_rtp_bin_signals[SIGNAL_GET_STORAGE] =
+ g_signal_new ("get-storage", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRtpBinClass,
+ get_storage), NULL, NULL, NULL, GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::clear-ssrc:
+ * @rtpbin: the object which received the signal
+ * @id: the session id
+ * @ssrc: the ssrc
+ *
+ * Remove all pads from rtpssrcdemux element associated with the specified
+ * ssrc. This delegate the action signal to the rtpssrcdemux element
+ * associated with the specified session.
+ *
+ * Since: 1.20
+ */
+ gst_rtp_bin_signals[SIGNAL_CLEAR_SSRC] =
+ g_signal_new ("clear-ssrc", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRtpBinClass,
+ clear_ssrc), NULL, NULL, NULL, G_TYPE_NONE, 2,
+ G_TYPE_UINT, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::on-new-ssrc:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of a new SSRC that entered @session.
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_NEW_SSRC] =
+ g_signal_new ("on-new-ssrc", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_new_ssrc),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ /**
+ * GstRtpBin::on-ssrc-collision:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify when we have an SSRC collision
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_SSRC_COLLISION] =
+ g_signal_new ("on-ssrc-collision", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_ssrc_collision),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ /**
+ * GstRtpBin::on-ssrc-validated:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of a new SSRC that became validated.
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_SSRC_VALIDATED] =
+ g_signal_new ("on-ssrc-validated", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_ssrc_validated),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ /**
+ * GstRtpBin::on-ssrc-active:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of a SSRC that is active, i.e., sending RTCP.
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_SSRC_ACTIVE] =
+ g_signal_new ("on-ssrc-active", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_ssrc_active),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ /**
+ * GstRtpBin::on-ssrc-sdes:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of a SSRC that is active, i.e., sending RTCP.
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_SSRC_SDES] =
+ g_signal_new ("on-ssrc-sdes", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_ssrc_sdes),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::on-bye-ssrc:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of an SSRC that became inactive because of a BYE packet.
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_BYE_SSRC] =
+ g_signal_new ("on-bye-ssrc", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_bye_ssrc),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ /**
+ * GstRtpBin::on-bye-timeout:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of an SSRC that has timed out because of BYE
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_BYE_TIMEOUT] =
+ g_signal_new ("on-bye-timeout", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_bye_timeout),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ /**
+ * GstRtpBin::on-timeout:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of an SSRC that has timed out
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_TIMEOUT] =
+ g_signal_new ("on-timeout", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_timeout),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ /**
+ * GstRtpBin::on-sender-timeout:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of a sender SSRC that has timed out and became a receiver
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_SENDER_TIMEOUT] =
+ g_signal_new ("on-sender-timeout", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_sender_timeout),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::on-npt-stop:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify that SSRC sender has sent data up to the configured NPT stop time.
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_NPT_STOP] =
+ g_signal_new ("on-npt-stop", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_npt_stop),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::request-rtp-encoder:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ *
+ * Request an RTP encoder element for the given @session. The encoder
+ * element will be added to the bin if not previously added.
+ *
+ * If no handler is connected, no encoder will be used.
+ *
+ * Since: 1.4
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_RTP_ENCODER] =
+ g_signal_new ("request-rtp-encoder", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ request_rtp_encoder), _gst_element_accumulator, NULL, NULL,
+ GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::request-rtp-decoder:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ *
+ * Request an RTP decoder element for the given @session. The decoder
+ * element will be added to the bin if not previously added.
+ *
+ * If no handler is connected, no encoder will be used.
+ *
+ * Since: 1.4
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_RTP_DECODER] =
+ g_signal_new ("request-rtp-decoder", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ request_rtp_decoder), _gst_element_accumulator, NULL,
+ NULL, GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::request-rtcp-encoder:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ *
+ * Request an RTCP encoder element for the given @session. The encoder
+ * element will be added to the bin if not previously added.
+ *
+ * If no handler is connected, no encoder will be used.
+ *
+ * Since: 1.4
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_RTCP_ENCODER] =
+ g_signal_new ("request-rtcp-encoder", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ request_rtcp_encoder), _gst_element_accumulator, NULL, NULL,
+ GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::request-rtcp-decoder:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ *
+ * Request an RTCP decoder element for the given @session. The decoder
+ * element will be added to the bin if not previously added.
+ *
+ * If no handler is connected, no encoder will be used.
+ *
+ * Since: 1.4
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_RTCP_DECODER] =
+ g_signal_new ("request-rtcp-decoder", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ request_rtcp_decoder), _gst_element_accumulator, NULL, NULL,
+ GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::request-jitterbuffer:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ *
+ * Request a jitterbuffer element for the given @session.
+ *
+ * If no handler is connected, the default jitterbuffer will be used.
+ *
+ * Note: The provided element is expected to conform to the API exposed
+ * by the standard #GstRtpJitterBuffer. Runtime checks will be made to
+ * determine whether it exposes properties and signals before attempting
+ * to set, call or connect to them, and some functionalities of #GstRtpBin
+ * may not be available when that is not the case.
+ *
+ * This should be considered experimental API, as the standard jitterbuffer
+ * API is susceptible to change, provided elements will have to update their
+ * custom jitterbuffer's API to match the API of #GstRtpJitterBuffer if and
+ * when it changes.
+ *
+ * Since: 1.18
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_JITTERBUFFER] =
+ g_signal_new ("request-jitterbuffer", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ request_jitterbuffer), _gst_element_accumulator, NULL,
+ g_cclosure_marshal_generic, GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::new-jitterbuffer:
+ * @rtpbin: the object which received the signal
+ * @jitterbuffer: the new jitterbuffer
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify that a new @jitterbuffer was created for @session and @ssrc.
+ * This signal can, for example, be used to configure @jitterbuffer.
+ *
+ * Since: 1.4
+ */
+ gst_rtp_bin_signals[SIGNAL_NEW_JITTERBUFFER] =
+ g_signal_new ("new-jitterbuffer", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ new_jitterbuffer), NULL, NULL, NULL,
+ G_TYPE_NONE, 3, GST_TYPE_ELEMENT, G_TYPE_UINT, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::new-storage:
+ * @rtpbin: the object which received the signal
+ * @storage: the new storage
+ * @session: the session
+ *
+ * Notify that a new @storage was created for @session.
+ * This signal can, for example, be used to configure @storage.
+ *
+ * Since: 1.14
+ */
+ gst_rtp_bin_signals[SIGNAL_NEW_STORAGE] =
+ g_signal_new ("new-storage", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ new_storage), NULL, NULL, NULL,
+ G_TYPE_NONE, 2, GST_TYPE_ELEMENT, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::request-aux-sender:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ *
+ * Request an AUX sender element for the given @session. The AUX
+ * element will be added to the bin.
+ *
+ * If no handler is connected, no AUX element will be used.
+ *
+ * Since: 1.4
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_AUX_SENDER] =
+ g_signal_new ("request-aux-sender", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ request_aux_sender), _gst_element_accumulator, NULL, NULL,
+ GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::request-aux-receiver:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ *
+ * Request an AUX receiver element for the given @session. The AUX
+ * element will be added to the bin.
+ *
+ * If no handler is connected, no AUX element will be used.
+ *
+ * Since: 1.4
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_AUX_RECEIVER] =
+ g_signal_new ("request-aux-receiver", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ request_aux_receiver), _gst_element_accumulator, NULL, NULL,
+ GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::request-fec-decoder:
+ * @rtpbin: the object which received the signal
+ * @session: the session index
+ *
+ * Request a FEC decoder element for the given @session. The element
+ * will be added to the bin after the pt demuxer.
+ *
+ * If no handler is connected, no FEC decoder will be used.
+ *
+ * Since: 1.14
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_FEC_DECODER] =
+ g_signal_new ("request-fec-decoder", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ request_fec_decoder), _gst_element_accumulator, NULL, NULL,
+ GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::request-fec-encoder:
+ * @rtpbin: the object which received the signal
+ * @session: the session index
+ *
+ * Request a FEC encoder element for the given @session. The element
+ * will be added to the bin after the RTPSession.
+ *
+ * If no handler is connected, no FEC encoder will be used.
+ *
+ * Since: 1.14
+ */
+ gst_rtp_bin_signals[SIGNAL_REQUEST_FEC_ENCODER] =
+ g_signal_new ("request-fec-encoder", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ request_fec_encoder), _gst_element_accumulator, NULL, NULL,
+ GST_TYPE_ELEMENT, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpBin::on-new-sender-ssrc:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the sender SSRC
+ *
+ * Notify of a new sender SSRC that entered @session.
+ *
+ * Since: 1.8
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_NEW_SENDER_SSRC] =
+ g_signal_new ("on-new-sender-ssrc", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass, on_new_sender_ssrc),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ /**
+ * GstRtpBin::on-sender-ssrc-active:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the sender SSRC
+ *
+ * Notify of a sender SSRC that is active, i.e., sending RTCP.
+ *
+ * Since: 1.8
+ */
+ gst_rtp_bin_signals[SIGNAL_ON_SENDER_SSRC_ACTIVE] =
+ g_signal_new ("on-sender-ssrc-active", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpBinClass,
+ on_sender_ssrc_active), NULL, NULL, NULL,
+ G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+
+ g_object_class_install_property (gobject_class, PROP_SDES,
+ g_param_spec_boxed ("sdes", "SDES",
+ "The SDES items of this session",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS
+ | GST_PARAM_DOC_SHOW_DEFAULT));
+
+ g_object_class_install_property (gobject_class, PROP_DO_LOST,
+ g_param_spec_boolean ("do-lost", "Do Lost",
+ "Send an event downstream when a packet is lost", DEFAULT_DO_LOST,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_AUTOREMOVE,
+ g_param_spec_boolean ("autoremove", "Auto Remove",
+ "Automatically remove timed out sources", DEFAULT_AUTOREMOVE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_IGNORE_PT,
+ g_param_spec_boolean ("ignore-pt", "Ignore PT",
+ "Do not demultiplex based on PT values", DEFAULT_IGNORE_PT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_USE_PIPELINE_CLOCK,
+ g_param_spec_boolean ("use-pipeline-clock", "Use pipeline clock",
+ "Use the pipeline running-time to set the NTP time in the RTCP SR messages "
+ "(DEPRECATED: Use ntp-time-source property)",
+ DEFAULT_USE_PIPELINE_CLOCK,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
+ /**
+ * GstRtpBin:buffer-mode:
+ *
+ * Control the buffering and timestamping mode used by the jitterbuffer.
+ */
+ g_object_class_install_property (gobject_class, PROP_BUFFER_MODE,
+ g_param_spec_enum ("buffer-mode", "Buffer Mode",
+ "Control the buffering algorithm in use", RTP_TYPE_JITTER_BUFFER_MODE,
+ DEFAULT_BUFFER_MODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRtpBin:ntp-sync:
+ *
+ * Set the NTP time from the sender reports as the running-time on the
+ * buffers. When both the sender and receiver have sychronized
+ * running-time, i.e. when the clock and base-time is shared
+ * between the receivers and the and the senders, this option can be
+ * used to synchronize receivers on multiple machines.
+ */
+ g_object_class_install_property (gobject_class, PROP_NTP_SYNC,
+ g_param_spec_boolean ("ntp-sync", "Sync on NTP clock",
+ "Synchronize received streams to the NTP clock", DEFAULT_NTP_SYNC,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpBin:rtcp-sync:
+ *
+ * If not synchronizing (directly) to the NTP clock, determines how to sync
+ * the various streams.
+ */
+ g_object_class_install_property (gobject_class, PROP_RTCP_SYNC,
+ g_param_spec_enum ("rtcp-sync", "RTCP Sync",
+ "Use of RTCP SR in synchronization", GST_RTP_BIN_RTCP_SYNC_TYPE,
+ DEFAULT_RTCP_SYNC, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpBin:rtcp-sync-interval:
+ *
+ * Determines how often to sync streams using RTCP data.
+ */
+ g_object_class_install_property (gobject_class, PROP_RTCP_SYNC_INTERVAL,
+ g_param_spec_uint ("rtcp-sync-interval", "RTCP Sync Interval",
+ "RTCP SR interval synchronization (ms) (0 = always)",
+ 0, G_MAXUINT, DEFAULT_RTCP_SYNC_INTERVAL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_DO_SYNC_EVENT,
+ g_param_spec_boolean ("do-sync-event", "Do Sync Event",
+ "Send event downstream when a stream is synchronized to the sender",
+ DEFAULT_DO_SYNC_EVENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpBin:do-retransmission:
+ *
+ * Enables RTP retransmission on all streams. To control retransmission on
+ * a per-SSRC basis, connect to the #GstRtpBin::new-jitterbuffer signal and
+ * set the #GstRtpJitterBuffer:do-retransmission property on the
+ * #GstRtpJitterBuffer object instead.
+ */
+ g_object_class_install_property (gobject_class, PROP_DO_RETRANSMISSION,
+ g_param_spec_boolean ("do-retransmission", "Do retransmission",
+ "Enable retransmission on all streams",
+ DEFAULT_DO_RETRANSMISSION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpBin:rtp-profile:
+ *
+ * Sets the default RTP profile of newly created RTP sessions. The
+ * profile can be changed afterwards on a per-session basis.
+ */
+ g_object_class_install_property (gobject_class, PROP_RTP_PROFILE,
+ g_param_spec_enum ("rtp-profile", "RTP Profile",
+ "Default RTP profile of newly created sessions",
+ GST_TYPE_RTP_PROFILE, DEFAULT_RTP_PROFILE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_NTP_TIME_SOURCE,
+ g_param_spec_enum ("ntp-time-source", "NTP Time Source",
+ "NTP time source for RTCP packets",
+ gst_rtp_ntp_time_source_get_type (), DEFAULT_NTP_TIME_SOURCE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RTCP_SYNC_SEND_TIME,
+ g_param_spec_boolean ("rtcp-sync-send-time", "RTCP Sync Send Time",
+ "Use send time or capture time for RTCP sync "
+ "(TRUE = send time, FALSE = capture time)",
+ DEFAULT_RTCP_SYNC_SEND_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_RTCP_RTP_TIME_DIFF,
+ g_param_spec_int ("max-rtcp-rtp-time-diff", "Max RTCP RTP Time Diff",
+ "Maximum amount of time in ms that the RTP time in RTCP SRs "
+ "is allowed to be ahead (-1 disabled)", -1, G_MAXINT,
+ DEFAULT_MAX_RTCP_RTP_TIME_DIFF,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_DROPOUT_TIME,
+ g_param_spec_uint ("max-dropout-time", "Max dropout time",
+ "The maximum time (milliseconds) of missing packets tolerated.",
+ 0, G_MAXUINT, DEFAULT_MAX_DROPOUT_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_MISORDER_TIME,
+ g_param_spec_uint ("max-misorder-time", "Max misorder time",
+ "The maximum time (milliseconds) of misordered packets tolerated.",
+ 0, G_MAXUINT, DEFAULT_MAX_MISORDER_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RFC7273_SYNC,
+ g_param_spec_boolean ("rfc7273-sync", "Sync on RFC7273 clock",
+ "Synchronize received streams to the RFC7273 clock "
+ "(requires clock and offset to be provided)", DEFAULT_RFC7273_SYNC,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_STREAMS,
+ g_param_spec_uint ("max-streams", "Max Streams",
+ "The maximum number of streams to create for one session",
+ 0, G_MAXUINT, DEFAULT_MAX_STREAMS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpBin:max-ts-offset-adjustment:
+ *
+ * Syncing time stamps to NTP time adds a time offset. This parameter
+ * specifies the maximum number of nanoseconds per frame that this time offset
+ * may be adjusted with. This is used to avoid sudden large changes to time
+ * stamps.
+ *
+ * Since: 1.14
+ */
+ g_object_class_install_property (gobject_class, PROP_MAX_TS_OFFSET_ADJUSTMENT,
+ g_param_spec_uint64 ("max-ts-offset-adjustment",
+ "Max Timestamp Offset Adjustment",
+ "The maximum number of nanoseconds per frame that time stamp offsets "
+ "may be adjusted (0 = no limit).", 0, G_MAXUINT64,
+ DEFAULT_MAX_TS_OFFSET_ADJUSTMENT, G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpBin:max-ts-offset:
+ *
+ * Used to set an upper limit of how large a time offset may be. This
+ * is used to protect against unrealistic values as a result of either
+ * client,server or clock issues.
+ *
+ * Since: 1.14
+ */
+ g_object_class_install_property (gobject_class, PROP_MAX_TS_OFFSET,
+ g_param_spec_int64 ("max-ts-offset", "Max TS Offset",
+ "The maximum absolute value of the time offset in (nanoseconds). "
+ "Note, if the ntp-sync parameter is set the default value is "
+ "changed to 0 (no limit)", 0, G_MAXINT64, DEFAULT_MAX_TS_OFFSET,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpBin:fec-decoders:
+ *
+ * Used to provide a factory used to build the FEC decoder for a
+ * given session, as a command line alternative to
+ * #GstRtpBin::request-fec-decoder.
+ *
+ * Expects a GstStructure in the form session_id (gint) -> factory (string)
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class, PROP_FEC_DECODERS,
+ g_param_spec_boxed ("fec-decoders", "Fec Decoders",
+ "GstStructure mapping from session index to FEC decoder "
+ "factory, eg "
+ "fec-decoders='fec,0=\"rtpst2022-1-fecdec\\ size-time\\=1000000000\";'",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpBin:fec-encoders:
+ *
+ * Used to provide a factory used to build the FEC encoder for a
+ * given session, as a command line alternative to
+ * #GstRtpBin::request-fec-encoder.
+ *
+ * Expects a GstStructure in the form session_id (gint) -> factory (string)
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class, PROP_FEC_ENCODERS,
+ g_param_spec_boxed ("fec-encoders", "Fec Encoders",
+ "GstStructure mapping from session index to FEC encoder "
+ "factory, eg "
+ "fec-encoders='fec,0=\"rtpst2022-1-fecenc\\ rows\\=5\\ columns\\=5\";'",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_rtp_bin_change_state);
+ gstelement_class->request_new_pad =
+ GST_DEBUG_FUNCPTR (gst_rtp_bin_request_new_pad);
+ gstelement_class->release_pad = GST_DEBUG_FUNCPTR (gst_rtp_bin_release_pad);
+
+ /* sink pads */
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpbin_recv_rtp_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpbin_recv_fec_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpbin_recv_rtcp_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpbin_send_rtp_sink_template);
+
+ /* src pads */
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpbin_recv_rtp_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpbin_send_rtcp_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpbin_send_rtp_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpbin_send_fec_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "RTP Bin",
+ "Filter/Network/RTP",
+ "Real-Time Transport Protocol bin",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstbin_class->handle_message = GST_DEBUG_FUNCPTR (gst_rtp_bin_handle_message);
+
+ klass->clear_pt_map = GST_DEBUG_FUNCPTR (gst_rtp_bin_clear_pt_map);
+ klass->reset_sync = GST_DEBUG_FUNCPTR (gst_rtp_bin_reset_sync);
+ klass->get_session = GST_DEBUG_FUNCPTR (gst_rtp_bin_get_session);
+ klass->get_internal_session =
+ GST_DEBUG_FUNCPTR (gst_rtp_bin_get_internal_session);
+ klass->get_storage = GST_DEBUG_FUNCPTR (gst_rtp_bin_get_storage);
+ klass->get_internal_storage =
+ GST_DEBUG_FUNCPTR (gst_rtp_bin_get_internal_storage);
+ klass->clear_ssrc = GST_DEBUG_FUNCPTR (gst_rtp_bin_clear_ssrc);
+ klass->request_rtp_encoder = GST_DEBUG_FUNCPTR (gst_rtp_bin_request_encoder);
+ klass->request_rtp_decoder = GST_DEBUG_FUNCPTR (gst_rtp_bin_request_decoder);
+ klass->request_rtcp_encoder = GST_DEBUG_FUNCPTR (gst_rtp_bin_request_encoder);
+ klass->request_rtcp_decoder = GST_DEBUG_FUNCPTR (gst_rtp_bin_request_decoder);
+ klass->request_jitterbuffer =
+ GST_DEBUG_FUNCPTR (gst_rtp_bin_request_jitterbuffer);
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_bin_debug, "rtpbin", 0, "RTP bin");
+
+ gst_type_mark_as_plugin_api (GST_RTP_BIN_RTCP_SYNC_TYPE, 0);
+}
+
+static void
+gst_rtp_bin_init (GstRtpBin * rtpbin)
+{
+ gchar *cname;
+
+ rtpbin->priv = gst_rtp_bin_get_instance_private (rtpbin);
+ g_mutex_init (&rtpbin->priv->bin_lock);
+ g_mutex_init (&rtpbin->priv->dyn_lock);
+
+ rtpbin->latency_ms = DEFAULT_LATENCY_MS;
+ rtpbin->latency_ns = DEFAULT_LATENCY_MS * GST_MSECOND;
+ rtpbin->drop_on_latency = DEFAULT_DROP_ON_LATENCY;
+ rtpbin->do_lost = DEFAULT_DO_LOST;
+ rtpbin->ignore_pt = DEFAULT_IGNORE_PT;
+ rtpbin->ntp_sync = DEFAULT_NTP_SYNC;
+ rtpbin->rtcp_sync = DEFAULT_RTCP_SYNC;
+ rtpbin->rtcp_sync_interval = DEFAULT_RTCP_SYNC_INTERVAL;
+ rtpbin->priv->autoremove = DEFAULT_AUTOREMOVE;
+ rtpbin->buffer_mode = DEFAULT_BUFFER_MODE;
+ rtpbin->use_pipeline_clock = DEFAULT_USE_PIPELINE_CLOCK;
+ rtpbin->send_sync_event = DEFAULT_DO_SYNC_EVENT;
+ rtpbin->do_retransmission = DEFAULT_DO_RETRANSMISSION;
+ rtpbin->rtp_profile = DEFAULT_RTP_PROFILE;
+ rtpbin->ntp_time_source = DEFAULT_NTP_TIME_SOURCE;
+ rtpbin->rtcp_sync_send_time = DEFAULT_RTCP_SYNC_SEND_TIME;
+ rtpbin->max_rtcp_rtp_time_diff = DEFAULT_MAX_RTCP_RTP_TIME_DIFF;
+ rtpbin->max_dropout_time = DEFAULT_MAX_DROPOUT_TIME;
+ rtpbin->max_misorder_time = DEFAULT_MAX_MISORDER_TIME;
+ rtpbin->rfc7273_sync = DEFAULT_RFC7273_SYNC;
+ rtpbin->max_streams = DEFAULT_MAX_STREAMS;
+ rtpbin->max_ts_offset_adjustment = DEFAULT_MAX_TS_OFFSET_ADJUSTMENT;
+ rtpbin->max_ts_offset = DEFAULT_MAX_TS_OFFSET;
+ rtpbin->max_ts_offset_is_set = FALSE;
+
+ /* some default SDES entries */
+ cname = g_strdup_printf ("user%u@host-%x", g_random_int (), g_random_int ());
+ rtpbin->sdes = gst_structure_new ("application/x-rtp-source-sdes",
+ "cname", G_TYPE_STRING, cname, "tool", G_TYPE_STRING, "GStreamer", NULL);
+ rtpbin->fec_decoders =
+ gst_structure_new_empty ("application/x-rtp-fec-decoders");
+ rtpbin->fec_encoders =
+ gst_structure_new_empty ("application/x-rtp-fec-encoders");
+ g_free (cname);
+}
+
+static void
+gst_rtp_bin_dispose (GObject * object)
+{
+ GstRtpBin *rtpbin;
+
+ rtpbin = GST_RTP_BIN (object);
+
+ GST_RTP_BIN_LOCK (rtpbin);
+ GST_DEBUG_OBJECT (object, "freeing sessions");
+ g_slist_foreach (rtpbin->sessions, (GFunc) free_session, rtpbin);
+ g_slist_free (rtpbin->sessions);
+ rtpbin->sessions = NULL;
+ GST_RTP_BIN_UNLOCK (rtpbin);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static void
+gst_rtp_bin_finalize (GObject * object)
+{
+ GstRtpBin *rtpbin;
+
+ rtpbin = GST_RTP_BIN (object);
+
+ if (rtpbin->sdes)
+ gst_structure_free (rtpbin->sdes);
+
+ if (rtpbin->fec_decoders)
+ gst_structure_free (rtpbin->fec_decoders);
+
+ if (rtpbin->fec_encoders)
+ gst_structure_free (rtpbin->fec_encoders);
+
+ g_mutex_clear (&rtpbin->priv->bin_lock);
+ g_mutex_clear (&rtpbin->priv->dyn_lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+
+static void
+gst_rtp_bin_set_sdes_struct (GstRtpBin * bin, const GstStructure * sdes)
+{
+ GSList *item;
+
+ if (sdes == NULL)
+ return;
+
+ GST_RTP_BIN_LOCK (bin);
+
+ GST_OBJECT_LOCK (bin);
+ if (bin->sdes)
+ gst_structure_free (bin->sdes);
+ bin->sdes = gst_structure_copy (sdes);
+ GST_OBJECT_UNLOCK (bin);
+
+ /* store in all sessions */
+ for (item = bin->sessions; item; item = g_slist_next (item)) {
+ GstRtpBinSession *session = item->data;
+ g_object_set (session->session, "sdes", sdes, NULL);
+ }
+
+ GST_RTP_BIN_UNLOCK (bin);
+}
+
+static void
+gst_rtp_bin_set_fec_decoders_struct (GstRtpBin * bin,
+ const GstStructure * decoders)
+{
+ if (decoders == NULL)
+ return;
+
+ GST_RTP_BIN_LOCK (bin);
+
+ GST_OBJECT_LOCK (bin);
+ if (bin->fec_decoders)
+ gst_structure_free (bin->fec_decoders);
+ bin->fec_decoders = gst_structure_copy (decoders);
+
+ GST_OBJECT_UNLOCK (bin);
+
+ GST_RTP_BIN_UNLOCK (bin);
+}
+
+static void
+gst_rtp_bin_set_fec_encoders_struct (GstRtpBin * bin,
+ const GstStructure * encoders)
+{
+ if (encoders == NULL)
+ return;
+
+ GST_RTP_BIN_LOCK (bin);
+
+ GST_OBJECT_LOCK (bin);
+ if (bin->fec_encoders)
+ gst_structure_free (bin->fec_encoders);
+ bin->fec_encoders = gst_structure_copy (encoders);
+
+ GST_OBJECT_UNLOCK (bin);
+
+ GST_RTP_BIN_UNLOCK (bin);
+}
+
+static GstStructure *
+gst_rtp_bin_get_sdes_struct (GstRtpBin * bin)
+{
+ GstStructure *result;
+
+ GST_OBJECT_LOCK (bin);
+ result = gst_structure_copy (bin->sdes);
+ GST_OBJECT_UNLOCK (bin);
+
+ return result;
+}
+
+static GstStructure *
+gst_rtp_bin_get_fec_decoders_struct (GstRtpBin * bin)
+{
+ GstStructure *result;
+
+ GST_OBJECT_LOCK (bin);
+ result = gst_structure_copy (bin->fec_decoders);
+ GST_OBJECT_UNLOCK (bin);
+
+ return result;
+}
+
+static GstStructure *
+gst_rtp_bin_get_fec_encoders_struct (GstRtpBin * bin)
+{
+ GstStructure *result;
+
+ GST_OBJECT_LOCK (bin);
+ result = gst_structure_copy (bin->fec_encoders);
+ GST_OBJECT_UNLOCK (bin);
+
+ return result;
+}
+
+static void
+gst_rtp_bin_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpBin *rtpbin;
+
+ rtpbin = GST_RTP_BIN (object);
+
+ switch (prop_id) {
+ case PROP_LATENCY:
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->latency_ms = g_value_get_uint (value);
+ rtpbin->latency_ns = rtpbin->latency_ms * GST_MSECOND;
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ /* propagate the property down to the jitterbuffer */
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin, "latency", value);
+ break;
+ case PROP_DROP_ON_LATENCY:
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->drop_on_latency = g_value_get_boolean (value);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ /* propagate the property down to the jitterbuffer */
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin,
+ "drop-on-latency", value);
+ break;
+ case PROP_SDES:
+ gst_rtp_bin_set_sdes_struct (rtpbin, g_value_get_boxed (value));
+ break;
+ case PROP_DO_LOST:
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->do_lost = g_value_get_boolean (value);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin, "do-lost", value);
+ break;
+ case PROP_NTP_SYNC:
+ rtpbin->ntp_sync = g_value_get_boolean (value);
+ /* The default value of max_ts_offset depends on ntp_sync. If user
+ * hasn't set it then change default value */
+ if (!rtpbin->max_ts_offset_is_set) {
+ if (rtpbin->ntp_sync) {
+ rtpbin->max_ts_offset = 0;
+ } else {
+ rtpbin->max_ts_offset = DEFAULT_MAX_TS_OFFSET;
+ }
+ }
+ break;
+ case PROP_RTCP_SYNC:
+ g_atomic_int_set (&rtpbin->rtcp_sync, g_value_get_enum (value));
+ break;
+ case PROP_RTCP_SYNC_INTERVAL:
+ rtpbin->rtcp_sync_interval = g_value_get_uint (value);
+ break;
+ case PROP_IGNORE_PT:
+ rtpbin->ignore_pt = g_value_get_boolean (value);
+ break;
+ case PROP_AUTOREMOVE:
+ rtpbin->priv->autoremove = g_value_get_boolean (value);
+ break;
+ case PROP_USE_PIPELINE_CLOCK:
+ {
+ GSList *sessions;
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->use_pipeline_clock = g_value_get_boolean (value);
+ for (sessions = rtpbin->sessions; sessions;
+ sessions = g_slist_next (sessions)) {
+ GstRtpBinSession *session = (GstRtpBinSession *) sessions->data;
+
+ g_object_set (G_OBJECT (session->session),
+ "use-pipeline-clock", rtpbin->use_pipeline_clock, NULL);
+ }
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ }
+ break;
+ case PROP_DO_SYNC_EVENT:
+ rtpbin->send_sync_event = g_value_get_boolean (value);
+ break;
+ case PROP_BUFFER_MODE:
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->buffer_mode = g_value_get_enum (value);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ /* propagate the property down to the jitterbuffer */
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin, "mode", value);
+ break;
+ case PROP_DO_RETRANSMISSION:
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->do_retransmission = g_value_get_boolean (value);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin,
+ "do-retransmission", value);
+ break;
+ case PROP_RTP_PROFILE:
+ rtpbin->rtp_profile = g_value_get_enum (value);
+ break;
+ case PROP_NTP_TIME_SOURCE:{
+ GSList *sessions;
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->ntp_time_source = g_value_get_enum (value);
+ for (sessions = rtpbin->sessions; sessions;
+ sessions = g_slist_next (sessions)) {
+ GstRtpBinSession *session = (GstRtpBinSession *) sessions->data;
+
+ g_object_set (G_OBJECT (session->session),
+ "ntp-time-source", rtpbin->ntp_time_source, NULL);
+ }
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ break;
+ }
+ case PROP_RTCP_SYNC_SEND_TIME:{
+ GSList *sessions;
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->rtcp_sync_send_time = g_value_get_boolean (value);
+ for (sessions = rtpbin->sessions; sessions;
+ sessions = g_slist_next (sessions)) {
+ GstRtpBinSession *session = (GstRtpBinSession *) sessions->data;
+
+ g_object_set (G_OBJECT (session->session),
+ "rtcp-sync-send-time", rtpbin->rtcp_sync_send_time, NULL);
+ }
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ break;
+ }
+ case PROP_MAX_RTCP_RTP_TIME_DIFF:
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->max_rtcp_rtp_time_diff = g_value_get_int (value);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin,
+ "max-rtcp-rtp-time-diff", value);
+ break;
+ case PROP_MAX_DROPOUT_TIME:
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->max_dropout_time = g_value_get_uint (value);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin,
+ "max-dropout-time", value);
+ gst_rtp_bin_propagate_property_to_session (rtpbin, "max-dropout-time",
+ value);
+ break;
+ case PROP_MAX_MISORDER_TIME:
+ GST_RTP_BIN_LOCK (rtpbin);
+ rtpbin->max_misorder_time = g_value_get_uint (value);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin,
+ "max-misorder-time", value);
+ gst_rtp_bin_propagate_property_to_session (rtpbin, "max-misorder-time",
+ value);
+ break;
+ case PROP_RFC7273_SYNC:
+ rtpbin->rfc7273_sync = g_value_get_boolean (value);
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin,
+ "rfc7273-sync", value);
+ break;
+ case PROP_MAX_STREAMS:
+ rtpbin->max_streams = g_value_get_uint (value);
+ break;
+ case PROP_MAX_TS_OFFSET_ADJUSTMENT:
+ rtpbin->max_ts_offset_adjustment = g_value_get_uint64 (value);
+ gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin,
+ "max-ts-offset-adjustment", value);
+ break;
+ case PROP_MAX_TS_OFFSET:
+ rtpbin->max_ts_offset = g_value_get_int64 (value);
+ rtpbin->max_ts_offset_is_set = TRUE;
+ break;
+ case PROP_FEC_DECODERS:
+ gst_rtp_bin_set_fec_decoders_struct (rtpbin, g_value_get_boxed (value));
+ break;
+ case PROP_FEC_ENCODERS:
+ gst_rtp_bin_set_fec_encoders_struct (rtpbin, g_value_get_boxed (value));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_bin_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpBin *rtpbin;
+
+ rtpbin = GST_RTP_BIN (object);
+
+ switch (prop_id) {
+ case PROP_LATENCY:
+ GST_RTP_BIN_LOCK (rtpbin);
+ g_value_set_uint (value, rtpbin->latency_ms);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ break;
+ case PROP_DROP_ON_LATENCY:
+ GST_RTP_BIN_LOCK (rtpbin);
+ g_value_set_boolean (value, rtpbin->drop_on_latency);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ break;
+ case PROP_SDES:
+ g_value_take_boxed (value, gst_rtp_bin_get_sdes_struct (rtpbin));
+ break;
+ case PROP_DO_LOST:
+ GST_RTP_BIN_LOCK (rtpbin);
+ g_value_set_boolean (value, rtpbin->do_lost);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ break;
+ case PROP_IGNORE_PT:
+ g_value_set_boolean (value, rtpbin->ignore_pt);
+ break;
+ case PROP_NTP_SYNC:
+ g_value_set_boolean (value, rtpbin->ntp_sync);
+ break;
+ case PROP_RTCP_SYNC:
+ g_value_set_enum (value, g_atomic_int_get (&rtpbin->rtcp_sync));
+ break;
+ case PROP_RTCP_SYNC_INTERVAL:
+ g_value_set_uint (value, rtpbin->rtcp_sync_interval);
+ break;
+ case PROP_AUTOREMOVE:
+ g_value_set_boolean (value, rtpbin->priv->autoremove);
+ break;
+ case PROP_BUFFER_MODE:
+ g_value_set_enum (value, rtpbin->buffer_mode);
+ break;
+ case PROP_USE_PIPELINE_CLOCK:
+ g_value_set_boolean (value, rtpbin->use_pipeline_clock);
+ break;
+ case PROP_DO_SYNC_EVENT:
+ g_value_set_boolean (value, rtpbin->send_sync_event);
+ break;
+ case PROP_DO_RETRANSMISSION:
+ GST_RTP_BIN_LOCK (rtpbin);
+ g_value_set_boolean (value, rtpbin->do_retransmission);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ break;
+ case PROP_RTP_PROFILE:
+ g_value_set_enum (value, rtpbin->rtp_profile);
+ break;
+ case PROP_NTP_TIME_SOURCE:
+ g_value_set_enum (value, rtpbin->ntp_time_source);
+ break;
+ case PROP_RTCP_SYNC_SEND_TIME:
+ g_value_set_boolean (value, rtpbin->rtcp_sync_send_time);
+ break;
+ case PROP_MAX_RTCP_RTP_TIME_DIFF:
+ GST_RTP_BIN_LOCK (rtpbin);
+ g_value_set_int (value, rtpbin->max_rtcp_rtp_time_diff);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ break;
+ case PROP_MAX_DROPOUT_TIME:
+ g_value_set_uint (value, rtpbin->max_dropout_time);
+ break;
+ case PROP_MAX_MISORDER_TIME:
+ g_value_set_uint (value, rtpbin->max_misorder_time);
+ break;
+ case PROP_RFC7273_SYNC:
+ g_value_set_boolean (value, rtpbin->rfc7273_sync);
+ break;
+ case PROP_MAX_STREAMS:
+ g_value_set_uint (value, rtpbin->max_streams);
+ break;
+ case PROP_MAX_TS_OFFSET_ADJUSTMENT:
+ g_value_set_uint64 (value, rtpbin->max_ts_offset_adjustment);
+ break;
+ case PROP_MAX_TS_OFFSET:
+ g_value_set_int64 (value, rtpbin->max_ts_offset);
+ break;
+ case PROP_FEC_DECODERS:
+ g_value_take_boxed (value, gst_rtp_bin_get_fec_decoders_struct (rtpbin));
+ break;
+ case PROP_FEC_ENCODERS:
+ g_value_take_boxed (value, gst_rtp_bin_get_fec_encoders_struct (rtpbin));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_bin_handle_message (GstBin * bin, GstMessage * message)
+{
+ GstRtpBin *rtpbin;
+
+ rtpbin = GST_RTP_BIN (bin);
+
+ switch (GST_MESSAGE_TYPE (message)) {
+ case GST_MESSAGE_ELEMENT:
+ {
+ const GstStructure *s = gst_message_get_structure (message);
+
+ /* we change the structure name and add the session ID to it */
+ if (gst_structure_has_name (s, "application/x-rtp-source-sdes")) {
+ GstRtpBinSession *sess;
+
+ /* find the session we set it as object data */
+ sess = g_object_get_data (G_OBJECT (GST_MESSAGE_SRC (message)),
+ "GstRTPBin.session");
+
+ if (G_LIKELY (sess)) {
+ message = gst_message_make_writable (message);
+ s = gst_message_get_structure (message);
+ gst_structure_set ((GstStructure *) s, "session", G_TYPE_UINT,
+ sess->id, NULL);
+ }
+ }
+ GST_BIN_CLASS (parent_class)->handle_message (bin, message);
+ break;
+ }
+ case GST_MESSAGE_BUFFERING:
+ {
+ gint percent;
+ gint min_percent = 100;
+ GSList *sessions, *streams;
+ GstRtpBinStream *stream;
+ gboolean change = FALSE, active = FALSE;
+ GstClockTime min_out_time;
+ GstBufferingMode mode;
+ gint avg_in, avg_out;
+ gint64 buffering_left;
+
+ gst_message_parse_buffering (message, &percent);
+ gst_message_parse_buffering_stats (message, &mode, &avg_in, &avg_out,
+ &buffering_left);
+
+ stream =
+ g_object_get_data (G_OBJECT (GST_MESSAGE_SRC (message)),
+ "GstRTPBin.stream");
+
+ GST_DEBUG_OBJECT (bin, "got percent %d from stream %p", percent, stream);
+
+ /* get the stream */
+ if (G_LIKELY (stream)) {
+ GST_RTP_BIN_LOCK (rtpbin);
+ /* fill in the percent */
+ stream->percent = percent;
+
+ /* calculate the min value for all streams */
+ for (sessions = rtpbin->sessions; sessions;
+ sessions = g_slist_next (sessions)) {
+ GstRtpBinSession *session = (GstRtpBinSession *) sessions->data;
+
+ GST_RTP_SESSION_LOCK (session);
+ if (session->streams) {
+ for (streams = session->streams; streams;
+ streams = g_slist_next (streams)) {
+ GstRtpBinStream *stream = (GstRtpBinStream *) streams->data;
+
+ GST_DEBUG_OBJECT (bin, "stream %p percent %d", stream,
+ stream->percent);
+
+ /* find min percent */
+ if (min_percent > stream->percent)
+ min_percent = stream->percent;
+ }
+ } else {
+ GST_INFO_OBJECT (bin,
+ "session has no streams, setting min_percent to 0");
+ min_percent = 0;
+ }
+ GST_RTP_SESSION_UNLOCK (session);
+ }
+ GST_DEBUG_OBJECT (bin, "min percent %d", min_percent);
+
+ if (rtpbin->buffering) {
+ if (min_percent == 100) {
+ rtpbin->buffering = FALSE;
+ active = TRUE;
+ change = TRUE;
+ }
+ } else {
+ if (min_percent < 100) {
+ /* pause the streams */
+ rtpbin->buffering = TRUE;
+ active = FALSE;
+ change = TRUE;
+ }
+ }
+ GST_RTP_BIN_UNLOCK (rtpbin);
+
+ gst_message_unref (message);
+
+ /* make a new buffering message with the min value */
+ message =
+ gst_message_new_buffering (GST_OBJECT_CAST (bin), min_percent);
+ gst_message_set_buffering_stats (message, mode, avg_in, avg_out,
+ buffering_left);
+
+ if (G_UNLIKELY (change)) {
+ GstClock *clock;
+ guint64 running_time = 0;
+ guint64 offset = 0;
+
+ /* figure out the running time when we have a clock */
+ if (G_LIKELY ((clock =
+ gst_element_get_clock (GST_ELEMENT_CAST (bin))))) {
+ guint64 now, base_time;
+
+ now = gst_clock_get_time (clock);
+ base_time = gst_element_get_base_time (GST_ELEMENT_CAST (bin));
+ running_time = now - base_time;
+ gst_object_unref (clock);
+ }
+ GST_DEBUG_OBJECT (bin,
+ "running time now %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (running_time));
+
+ GST_RTP_BIN_LOCK (rtpbin);
+
+ /* when we reactivate, calculate the offsets so that all streams have
+ * an output time that is at least as big as the running_time */
+ offset = 0;
+ if (active) {
+ if (running_time > rtpbin->buffer_start) {
+ offset = running_time - rtpbin->buffer_start;
+ if (offset >= rtpbin->latency_ns)
+ offset -= rtpbin->latency_ns;
+ else
+ offset = 0;
+ }
+ }
+
+ /* pause all streams */
+ min_out_time = -1;
+ for (sessions = rtpbin->sessions; sessions;
+ sessions = g_slist_next (sessions)) {
+ GstRtpBinSession *session = (GstRtpBinSession *) sessions->data;
+
+ GST_RTP_SESSION_LOCK (session);
+ for (streams = session->streams; streams;
+ streams = g_slist_next (streams)) {
+ GstRtpBinStream *stream = (GstRtpBinStream *) streams->data;
+ GstElement *element = stream->buffer;
+ guint64 last_out = -1;
+
+ if (g_signal_lookup ("set-active", G_OBJECT_TYPE (element)) != 0) {
+ g_signal_emit_by_name (element, "set-active", active, offset,
+ &last_out);
+ }
+
+ if (!active) {
+ g_object_get (element, "percent", &stream->percent, NULL);
+
+ if (last_out == -1)
+ last_out = 0;
+ if (min_out_time == -1 || last_out < min_out_time)
+ min_out_time = last_out;
+ }
+
+ GST_DEBUG_OBJECT (bin,
+ "setting %p to %d, offset %" GST_TIME_FORMAT ", last %"
+ GST_TIME_FORMAT ", percent %d", element, active,
+ GST_TIME_ARGS (offset), GST_TIME_ARGS (last_out),
+ stream->percent);
+ }
+ GST_RTP_SESSION_UNLOCK (session);
+ }
+ GST_DEBUG_OBJECT (bin,
+ "min out time %" GST_TIME_FORMAT, GST_TIME_ARGS (min_out_time));
+
+ /* the buffer_start is the min out time of all paused jitterbuffers */
+ if (!active)
+ rtpbin->buffer_start = min_out_time;
+
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ }
+ }
+ GST_BIN_CLASS (parent_class)->handle_message (bin, message);
+ break;
+ }
+ default:
+ {
+ GST_BIN_CLASS (parent_class)->handle_message (bin, message);
+ break;
+ }
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_bin_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn res;
+ GstRtpBin *rtpbin;
+ GstRtpBinPrivate *priv;
+
+ rtpbin = GST_RTP_BIN (element);
+ priv = rtpbin->priv;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ priv->last_ntpnstime = 0;
+ GST_LOG_OBJECT (rtpbin, "clearing shutdown flag");
+ g_atomic_int_set (&priv->shutdown, 0);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ GST_LOG_OBJECT (rtpbin, "setting shutdown flag");
+ g_atomic_int_set (&priv->shutdown, 1);
+ /* wait for all callbacks to end by taking the lock. No new callbacks will
+ * be able to happen as we set the shutdown flag. */
+ GST_RTP_BIN_DYN_LOCK (rtpbin);
+ GST_LOG_OBJECT (rtpbin, "dynamic lock taken, we can continue shutdown");
+ GST_RTP_BIN_DYN_UNLOCK (rtpbin);
+ break;
+ default:
+ break;
+ }
+
+ res = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return res;
+}
+
+static GstElement *
+session_request_element (GstRtpBinSession * session, guint signal)
+{
+ GstElement *element = NULL;
+ GstRtpBin *bin = session->bin;
+
+ g_signal_emit (bin, gst_rtp_bin_signals[signal], 0, session->id, &element);
+
+ if (element) {
+ if (!bin_manage_element (bin, element))
+ goto manage_failed;
+ session->elements = g_slist_prepend (session->elements, element);
+ }
+ return element;
+
+ /* ERRORS */
+manage_failed:
+ {
+ GST_WARNING_OBJECT (bin, "unable to manage element");
+ gst_object_unref (element);
+ return NULL;
+ }
+}
+
+static gboolean
+copy_sticky_events (GstPad * pad, GstEvent ** event, gpointer user_data)
+{
+ GstPad *gpad = GST_PAD_CAST (user_data);
+
+ GST_DEBUG_OBJECT (gpad, "store sticky event %" GST_PTR_FORMAT, *event);
+ gst_pad_store_sticky_event (gpad, *event);
+
+ return TRUE;
+}
+
+static gboolean
+ensure_fec_decoder (GstRtpBin * rtpbin, GstRtpBinSession * session)
+{
+ const gchar *factory;
+ gchar *sess_id_str;
+
+ if (session->fec_decoder)
+ goto done;
+
+ sess_id_str = g_strdup_printf ("%u", session->id);
+ factory = gst_structure_get_string (rtpbin->fec_decoders, sess_id_str);
+ g_free (sess_id_str);
+
+ /* First try the property */
+ if (factory) {
+ GError *err = NULL;
+
+ session->fec_decoder =
+ gst_parse_bin_from_description_full (factory, TRUE, NULL,
+ GST_PARSE_FLAG_NO_SINGLE_ELEMENT_BINS | GST_PARSE_FLAG_FATAL_ERRORS,
+ &err);
+ if (!session->fec_decoder) {
+ GST_ERROR_OBJECT (rtpbin, "Failed to build decoder from factory: %s",
+ err->message);
+ }
+
+ bin_manage_element (session->bin, session->fec_decoder);
+ session->elements =
+ g_slist_prepend (session->elements, session->fec_decoder);
+ GST_INFO_OBJECT (rtpbin, "Built FEC decoder: %" GST_PTR_FORMAT
+ " for session %u", session->fec_decoder, session->id);
+ }
+
+ /* Fallback to the signal */
+ if (!session->fec_decoder)
+ session->fec_decoder =
+ session_request_element (session, SIGNAL_REQUEST_FEC_DECODER);
+
+done:
+ return session->fec_decoder != NULL;
+}
+
+static void
+expose_recv_src_pad (GstRtpBin * rtpbin, GstPad * pad, GstRtpBinStream * stream,
+ guint8 pt)
+{
+ GstElementClass *klass;
+ GstPadTemplate *templ;
+ gchar *padname;
+ GstPad *gpad;
+
+ gst_object_ref (pad);
+
+ if (stream->session->storage && !stream->session->fec_decoder) {
+ if (ensure_fec_decoder (rtpbin, stream->session)) {
+ GstElement *fec_decoder = stream->session->fec_decoder;
+ GstPad *sinkpad, *srcpad;
+ GstPadLinkReturn ret;
+
+ sinkpad = gst_element_get_static_pad (fec_decoder, "sink");
+
+ if (!sinkpad)
+ goto fec_decoder_sink_failed;
+
+ ret = gst_pad_link (pad, sinkpad);
+ gst_object_unref (sinkpad);
+
+ if (ret != GST_PAD_LINK_OK)
+ goto fec_decoder_link_failed;
+
+ srcpad = gst_element_get_static_pad (fec_decoder, "src");
+
+ if (!srcpad)
+ goto fec_decoder_src_failed;
+
+ gst_pad_sticky_events_foreach (pad, copy_sticky_events, srcpad);
+ gst_object_unref (pad);
+ pad = srcpad;
+ }
+ }
+
+ GST_RTP_BIN_SHUTDOWN_LOCK (rtpbin, shutdown);
+
+ /* ghost the pad to the parent */
+ klass = GST_ELEMENT_GET_CLASS (rtpbin);
+ templ = gst_element_class_get_pad_template (klass, "recv_rtp_src_%u_%u_%u");
+ padname = g_strdup_printf ("recv_rtp_src_%u_%u_%u",
+ stream->session->id, stream->ssrc, pt);
+ gpad = gst_ghost_pad_new_from_template (padname, pad, templ);
+ g_free (padname);
+ g_object_set_data (G_OBJECT (pad), "GstRTPBin.ghostpad", gpad);
+
+ gst_pad_set_active (gpad, TRUE);
+ GST_RTP_BIN_SHUTDOWN_UNLOCK (rtpbin);
+
+ gst_pad_sticky_events_foreach (pad, copy_sticky_events, gpad);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), gpad);
+
+done:
+ gst_object_unref (pad);
+
+ return;
+
+shutdown:
+ {
+ GST_DEBUG ("ignoring, we are shutting down");
+ goto done;
+ }
+fec_decoder_sink_failed:
+ {
+ g_warning ("rtpbin: failed to get fec encoder sink pad for session %u",
+ stream->session->id);
+ goto done;
+ }
+fec_decoder_src_failed:
+ {
+ g_warning ("rtpbin: failed to get fec encoder src pad for session %u",
+ stream->session->id);
+ goto done;
+ }
+fec_decoder_link_failed:
+ {
+ g_warning ("rtpbin: failed to link fec decoder for session %u",
+ stream->session->id);
+ goto done;
+ }
+}
+
+/* a new pad (SSRC) was created in @session. This signal is emitted from the
+ * payload demuxer. */
+static void
+new_payload_found (GstElement * element, guint pt, GstPad * pad,
+ GstRtpBinStream * stream)
+{
+ GstRtpBin *rtpbin;
+
+ rtpbin = stream->bin;
+
+ GST_DEBUG_OBJECT (rtpbin, "new payload pad %u", pt);
+
+ expose_recv_src_pad (rtpbin, pad, stream, pt);
+}
+
+static void
+payload_pad_removed (GstElement * element, GstPad * pad,
+ GstRtpBinStream * stream)
+{
+ GstRtpBin *rtpbin;
+ GstPad *gpad;
+
+ rtpbin = stream->bin;
+
+ GST_DEBUG ("payload pad removed");
+
+ GST_RTP_BIN_DYN_LOCK (rtpbin);
+ if ((gpad = g_object_get_data (G_OBJECT (pad), "GstRTPBin.ghostpad"))) {
+ g_object_set_data (G_OBJECT (pad), "GstRTPBin.ghostpad", NULL);
+
+ gst_pad_set_active (gpad, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpbin), gpad);
+ }
+ GST_RTP_BIN_DYN_UNLOCK (rtpbin);
+}
+
+static GstCaps *
+pt_map_requested (GstElement * element, guint pt, GstRtpBinSession * session)
+{
+ GstRtpBin *rtpbin;
+ GstCaps *caps;
+
+ rtpbin = session->bin;
+
+ GST_DEBUG_OBJECT (rtpbin, "payload map requested for pt %u in session %u", pt,
+ session->id);
+
+ caps = get_pt_map (session, pt);
+ if (!caps)
+ goto no_caps;
+
+ return caps;
+
+ /* ERRORS */
+no_caps:
+ {
+ GST_DEBUG_OBJECT (rtpbin, "could not get caps");
+ return NULL;
+ }
+}
+
+static GstCaps *
+ptdemux_pt_map_requested (GstElement * element, guint pt,
+ GstRtpBinSession * session)
+{
+ GstCaps *ret = pt_map_requested (element, pt, session);
+
+ if (ret && gst_caps_get_size (ret) == 1) {
+ const GstStructure *s = gst_caps_get_structure (ret, 0);
+ gboolean is_fec;
+
+ if (gst_structure_get_boolean (s, "is-fec", &is_fec) && is_fec) {
+ GValue v = G_VALUE_INIT;
+ GValue v2 = G_VALUE_INIT;
+
+ GST_INFO_OBJECT (session->bin, "Will ignore FEC pt %u in session %u", pt,
+ session->id);
+ g_value_init (&v, GST_TYPE_ARRAY);
+ g_value_init (&v2, G_TYPE_INT);
+ g_object_get_property (G_OBJECT (element), "ignored-payload-types", &v);
+ g_value_set_int (&v2, pt);
+ gst_value_array_append_value (&v, &v2);
+ g_value_unset (&v2);
+ g_object_set_property (G_OBJECT (element), "ignored-payload-types", &v);
+ g_value_unset (&v);
+ }
+ }
+
+ return ret;
+}
+
+static void
+payload_type_change (GstElement * element, guint pt, GstRtpBinSession * session)
+{
+ GST_DEBUG_OBJECT (session->bin,
+ "emitting signal for pt type changed to %u in session %u", pt,
+ session->id);
+
+ g_signal_emit (session->bin, gst_rtp_bin_signals[SIGNAL_PAYLOAD_TYPE_CHANGE],
+ 0, session->id, pt);
+}
+
+/* emitted when caps changed for the session */
+static void
+caps_changed (GstPad * pad, GParamSpec * pspec, GstRtpBinSession * session)
+{
+ GstRtpBin *bin;
+ GstCaps *caps;
+ gint payload;
+ const GstStructure *s;
+
+ bin = session->bin;
+
+ g_object_get (pad, "caps", &caps, NULL);
+
+ if (caps == NULL)
+ return;
+
+ GST_DEBUG_OBJECT (bin, "got caps %" GST_PTR_FORMAT, caps);
+
+ s = gst_caps_get_structure (caps, 0);
+
+ /* get payload, finish when it's not there */
+ if (!gst_structure_get_int (s, "payload", &payload)) {
+ gst_caps_unref (caps);
+ return;
+ }
+
+ GST_RTP_SESSION_LOCK (session);
+ GST_DEBUG_OBJECT (bin, "insert caps for payload %d", payload);
+ g_hash_table_insert (session->ptmap, GINT_TO_POINTER (payload), caps);
+ GST_RTP_SESSION_UNLOCK (session);
+}
+
+/* a new pad (SSRC) was created in @session */
+static void
+new_ssrc_pad_found (GstElement * element, guint ssrc, GstPad * pad,
+ GstRtpBinSession * session)
+{
+ GstRtpBin *rtpbin;
+ GstRtpBinStream *stream;
+ GstPad *sinkpad, *srcpad;
+ gchar *padname;
+
+ rtpbin = session->bin;
+
+ GST_DEBUG_OBJECT (rtpbin, "new SSRC pad %08x, %s:%s", ssrc,
+ GST_DEBUG_PAD_NAME (pad));
+
+ GST_RTP_BIN_SHUTDOWN_LOCK (rtpbin, shutdown);
+
+ GST_RTP_SESSION_LOCK (session);
+
+ /* create new stream */
+ stream = create_stream (session, ssrc);
+ if (!stream)
+ goto no_stream;
+
+ /* get pad and link */
+ GST_DEBUG_OBJECT (rtpbin, "linking jitterbuffer RTP");
+ padname = g_strdup_printf ("src_%u", ssrc);
+ srcpad = gst_element_get_static_pad (element, padname);
+ g_free (padname);
+
+ if (session->fec_decoder) {
+ sinkpad = gst_element_get_static_pad (session->fec_decoder, "sink");
+ gst_pad_link_full (srcpad, sinkpad, GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (sinkpad);
+ gst_object_unref (srcpad);
+ srcpad = gst_element_get_static_pad (session->fec_decoder, "src");
+ }
+
+ sinkpad = gst_element_get_static_pad (stream->buffer, "sink");
+ gst_pad_link_full (srcpad, sinkpad, GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (sinkpad);
+ gst_object_unref (srcpad);
+
+ sinkpad = gst_element_request_pad_simple (stream->buffer, "sink_rtcp");
+ if (sinkpad) {
+ GST_DEBUG_OBJECT (rtpbin, "linking jitterbuffer RTCP");
+ padname = g_strdup_printf ("rtcp_src_%u", ssrc);
+ srcpad = gst_element_get_static_pad (element, padname);
+ g_free (padname);
+ gst_pad_link_full (srcpad, sinkpad, GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (sinkpad);
+ gst_object_unref (srcpad);
+ }
+
+ if (g_signal_lookup ("handle-sync", G_OBJECT_TYPE (stream->buffer)) != 0) {
+ /* connect to the RTCP sync signal from the jitterbuffer */
+ GST_DEBUG_OBJECT (rtpbin, "connecting sync signal");
+ stream->buffer_handlesync_sig = g_signal_connect (stream->buffer,
+ "handle-sync", (GCallback) gst_rtp_bin_handle_sync, stream);
+ }
+
+ if (stream->demux) {
+ /* connect to the new-pad signal of the payload demuxer, this will expose the
+ * new pad by ghosting it. */
+ stream->demux_newpad_sig = g_signal_connect (stream->demux,
+ "new-payload-type", (GCallback) new_payload_found, stream);
+ stream->demux_padremoved_sig = g_signal_connect (stream->demux,
+ "pad-removed", (GCallback) payload_pad_removed, stream);
+
+ /* connect to the request-pt-map signal. This signal will be emitted by the
+ * demuxer so that it can apply a proper caps on the buffers for the
+ * depayloaders. */
+ stream->demux_ptreq_sig = g_signal_connect (stream->demux,
+ "request-pt-map", (GCallback) ptdemux_pt_map_requested, session);
+ /* connect to the signal so it can be forwarded. */
+ stream->demux_ptchange_sig = g_signal_connect (stream->demux,
+ "payload-type-change", (GCallback) payload_type_change, session);
+
+ GST_RTP_SESSION_UNLOCK (session);
+ GST_RTP_BIN_SHUTDOWN_UNLOCK (rtpbin);
+ } else {
+ /* add rtpjitterbuffer src pad to pads */
+ GstPad *pad;
+
+ pad = gst_element_get_static_pad (stream->buffer, "src");
+
+ GST_RTP_SESSION_UNLOCK (session);
+ GST_RTP_BIN_SHUTDOWN_UNLOCK (rtpbin);
+
+ expose_recv_src_pad (rtpbin, pad, stream, 255);
+
+ gst_object_unref (pad);
+ }
+
+ return;
+
+ /* ERRORS */
+shutdown:
+ {
+ GST_DEBUG_OBJECT (rtpbin, "we are shutting down");
+ return;
+ }
+no_stream:
+ {
+ GST_RTP_SESSION_UNLOCK (session);
+ GST_RTP_BIN_SHUTDOWN_UNLOCK (rtpbin);
+ GST_DEBUG_OBJECT (rtpbin, "could not create stream");
+ return;
+ }
+}
+
+static GstPad *
+complete_session_sink (GstRtpBin * rtpbin, GstRtpBinSession * session)
+{
+ guint sessid = session->id;
+ GstPad *recv_rtp_sink;
+ GstElement *decoder;
+
+ g_assert (!session->recv_rtp_sink);
+
+ /* get recv_rtp pad and store */
+ session->recv_rtp_sink =
+ gst_element_request_pad_simple (session->session, "recv_rtp_sink");
+ if (session->recv_rtp_sink == NULL)
+ goto pad_failed;
+
+ g_signal_connect (session->recv_rtp_sink, "notify::caps",
+ (GCallback) caps_changed, session);
+
+ GST_DEBUG_OBJECT (rtpbin, "requesting RTP decoder");
+ decoder = session_request_element (session, SIGNAL_REQUEST_RTP_DECODER);
+ if (decoder) {
+ GstPad *decsrc, *decsink;
+ GstPadLinkReturn ret;
+
+ GST_DEBUG_OBJECT (rtpbin, "linking RTP decoder");
+ decsink = gst_element_get_static_pad (decoder, "rtp_sink");
+ if (decsink == NULL)
+ goto dec_sink_failed;
+
+ recv_rtp_sink = decsink;
+
+ decsrc = gst_element_get_static_pad (decoder, "rtp_src");
+ if (decsrc == NULL)
+ goto dec_src_failed;
+
+ ret = gst_pad_link (decsrc, session->recv_rtp_sink);
+
+ gst_object_unref (decsrc);
+
+ if (ret != GST_PAD_LINK_OK)
+ goto dec_link_failed;
+
+ } else {
+ GST_DEBUG_OBJECT (rtpbin, "no RTP decoder given");
+ recv_rtp_sink = gst_object_ref (session->recv_rtp_sink);
+ }
+
+ return recv_rtp_sink;
+
+ /* ERRORS */
+pad_failed:
+ {
+ g_warning ("rtpbin: failed to get session recv_rtp_sink pad");
+ return NULL;
+ }
+dec_sink_failed:
+ {
+ g_warning ("rtpbin: failed to get decoder sink pad for session %u", sessid);
+ return NULL;
+ }
+dec_src_failed:
+ {
+ g_warning ("rtpbin: failed to get decoder src pad for session %u", sessid);
+ gst_object_unref (recv_rtp_sink);
+ return NULL;
+ }
+dec_link_failed:
+ {
+ g_warning ("rtpbin: failed to link rtp decoder for session %u", sessid);
+ gst_object_unref (recv_rtp_sink);
+ return NULL;
+ }
+}
+
+static void
+complete_session_receiver (GstRtpBin * rtpbin, GstRtpBinSession * session,
+ guint sessid)
+{
+ GstElement *aux;
+ GstPad *recv_rtp_src;
+
+ g_assert (!session->recv_rtp_src);
+
+ session->recv_rtp_src =
+ gst_element_get_static_pad (session->session, "recv_rtp_src");
+ if (session->recv_rtp_src == NULL)
+ goto pad_failed;
+
+ /* find out if we need AUX elements */
+ aux = session_request_element (session, SIGNAL_REQUEST_AUX_RECEIVER);
+ if (aux) {
+ gchar *pname;
+ GstPad *auxsink;
+ GstPadLinkReturn ret;
+
+ GST_DEBUG_OBJECT (rtpbin, "linking AUX receiver");
+
+ pname = g_strdup_printf ("sink_%u", sessid);
+ auxsink = gst_element_get_static_pad (aux, pname);
+ g_free (pname);
+ if (auxsink == NULL)
+ goto aux_sink_failed;
+
+ ret = gst_pad_link (session->recv_rtp_src, auxsink);
+ gst_object_unref (auxsink);
+ if (ret != GST_PAD_LINK_OK)
+ goto aux_link_failed;
+
+ /* this can be NULL when this AUX element is not to be linked any further */
+ pname = g_strdup_printf ("src_%u", sessid);
+ recv_rtp_src = gst_element_get_static_pad (aux, pname);
+ g_free (pname);
+ } else {
+ recv_rtp_src = gst_object_ref (session->recv_rtp_src);
+ }
+
+ /* Add a storage element if needed */
+ if (recv_rtp_src && session->storage) {
+ GstPadLinkReturn ret;
+ GstPad *sinkpad = gst_element_get_static_pad (session->storage, "sink");
+
+ ret = gst_pad_link (recv_rtp_src, sinkpad);
+
+ gst_object_unref (sinkpad);
+ gst_object_unref (recv_rtp_src);
+
+ if (ret != GST_PAD_LINK_OK)
+ goto storage_link_failed;
+
+ recv_rtp_src = gst_element_get_static_pad (session->storage, "src");
+ }
+
+ if (recv_rtp_src) {
+ GstPad *sinkdpad;
+
+ GST_DEBUG_OBJECT (rtpbin, "getting demuxer RTP sink pad");
+ sinkdpad = gst_element_get_static_pad (session->demux, "sink");
+ GST_DEBUG_OBJECT (rtpbin, "linking demuxer RTP sink pad");
+ gst_pad_link_full (recv_rtp_src, sinkdpad, GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (sinkdpad);
+ gst_object_unref (recv_rtp_src);
+
+ /* connect to the new-ssrc-pad signal of the SSRC demuxer */
+ session->demux_newpad_sig = g_signal_connect (session->demux,
+ "new-ssrc-pad", (GCallback) new_ssrc_pad_found, session);
+ session->demux_padremoved_sig = g_signal_connect (session->demux,
+ "removed-ssrc-pad", (GCallback) ssrc_demux_pad_removed, session);
+ }
+
+ return;
+
+pad_failed:
+ {
+ g_warning ("rtpbin: failed to get session recv_rtp_src pad");
+ return;
+ }
+aux_sink_failed:
+ {
+ g_warning ("rtpbin: failed to get AUX sink pad for session %u", sessid);
+ return;
+ }
+aux_link_failed:
+ {
+ g_warning ("rtpbin: failed to link AUX pad to session %u", sessid);
+ return;
+ }
+storage_link_failed:
+ {
+ g_warning ("rtpbin: failed to link storage");
+ return;
+ }
+}
+
+/* Create a pad for receiving RTP for the session in @name. Must be called with
+ * RTP_BIN_LOCK.
+ */
+static GstPad *
+create_recv_rtp (GstRtpBin * rtpbin, GstPadTemplate * templ, const gchar * name)
+{
+ guint sessid;
+ GstRtpBinSession *session;
+ GstPad *recv_rtp_sink;
+
+ /* first get the session number */
+ if (name == NULL || sscanf (name, "recv_rtp_sink_%u", &sessid) != 1)
+ goto no_name;
+
+ GST_DEBUG_OBJECT (rtpbin, "finding session %u", sessid);
+
+ /* get or create session */
+ session = find_session_by_id (rtpbin, sessid);
+ if (!session) {
+ GST_DEBUG_OBJECT (rtpbin, "creating session %u", sessid);
+ /* create session now */
+ session = create_session (rtpbin, sessid);
+ if (session == NULL)
+ goto create_error;
+ }
+
+ /* check if pad was requested */
+ if (session->recv_rtp_sink_ghost != NULL)
+ return session->recv_rtp_sink_ghost;
+
+ /* setup the session sink pad */
+ recv_rtp_sink = complete_session_sink (rtpbin, session);
+ if (!recv_rtp_sink)
+ goto session_sink_failed;
+
+ GST_DEBUG_OBJECT (rtpbin, "ghosting session sink pad");
+ session->recv_rtp_sink_ghost =
+ gst_ghost_pad_new_from_template (name, recv_rtp_sink, templ);
+ gst_object_unref (recv_rtp_sink);
+ gst_pad_set_active (session->recv_rtp_sink_ghost, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), session->recv_rtp_sink_ghost);
+
+ complete_session_receiver (rtpbin, session, sessid);
+
+ return session->recv_rtp_sink_ghost;
+
+ /* ERRORS */
+no_name:
+ {
+ g_warning ("rtpbin: cannot find session id for pad: %s",
+ GST_STR_NULL (name));
+ return NULL;
+ }
+create_error:
+ {
+ /* create_session already warned */
+ return NULL;
+ }
+session_sink_failed:
+ {
+ /* warning already done */
+ return NULL;
+ }
+}
+
+static void
+remove_recv_rtp (GstRtpBin * rtpbin, GstRtpBinSession * session)
+{
+ if (session->demux_newpad_sig) {
+ g_signal_handler_disconnect (session->demux, session->demux_newpad_sig);
+ session->demux_newpad_sig = 0;
+ }
+ if (session->demux_padremoved_sig) {
+ g_signal_handler_disconnect (session->demux, session->demux_padremoved_sig);
+ session->demux_padremoved_sig = 0;
+ }
+ if (session->recv_rtp_src) {
+ gst_object_unref (session->recv_rtp_src);
+ session->recv_rtp_src = NULL;
+ }
+ if (session->recv_rtp_sink) {
+ gst_element_release_request_pad (session->session, session->recv_rtp_sink);
+ gst_object_unref (session->recv_rtp_sink);
+ session->recv_rtp_sink = NULL;
+ }
+ if (session->recv_rtp_sink_ghost) {
+ gst_pad_set_active (session->recv_rtp_sink_ghost, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpbin),
+ session->recv_rtp_sink_ghost);
+ session->recv_rtp_sink_ghost = NULL;
+ }
+}
+
+static GstPad *
+complete_session_fec (GstRtpBin * rtpbin, GstRtpBinSession * session,
+ guint fec_idx)
+{
+ gchar *padname;
+ GstPad *ret;
+
+ if (!ensure_fec_decoder (rtpbin, session))
+ goto no_decoder;
+
+ GST_DEBUG_OBJECT (rtpbin, "getting FEC sink pad");
+ padname = g_strdup_printf ("fec_%u", fec_idx);
+ ret = gst_element_request_pad_simple (session->fec_decoder, padname);
+ g_free (padname);
+
+ if (ret == NULL)
+ goto pad_failed;
+
+ session->recv_fec_sinks = g_slist_prepend (session->recv_fec_sinks, ret);
+
+ return ret;
+
+pad_failed:
+ {
+ g_warning ("rtpbin: failed to get decoder fec pad");
+ return NULL;
+ }
+no_decoder:
+ {
+ g_warning ("rtpbin: failed to build FEC decoder for session %u",
+ session->id);
+ return NULL;
+ }
+}
+
+static GstPad *
+complete_session_rtcp (GstRtpBin * rtpbin, GstRtpBinSession * session,
+ guint sessid)
+{
+ GstElement *decoder;
+ GstPad *sinkdpad;
+ GstPad *decsink = NULL;
+
+ /* get recv_rtp pad and store */
+ GST_DEBUG_OBJECT (rtpbin, "getting RTCP sink pad");
+ session->recv_rtcp_sink =
+ gst_element_request_pad_simple (session->session, "recv_rtcp_sink");
+ if (session->recv_rtcp_sink == NULL)
+ goto pad_failed;
+
+ GST_DEBUG_OBJECT (rtpbin, "getting RTCP decoder");
+ decoder = session_request_element (session, SIGNAL_REQUEST_RTCP_DECODER);
+ if (decoder) {
+ GstPad *decsrc;
+ GstPadLinkReturn ret;
+
+ GST_DEBUG_OBJECT (rtpbin, "linking RTCP decoder");
+ decsink = gst_element_get_static_pad (decoder, "rtcp_sink");
+ decsrc = gst_element_get_static_pad (decoder, "rtcp_src");
+
+ if (decsink == NULL)
+ goto dec_sink_failed;
+
+ if (decsrc == NULL)
+ goto dec_src_failed;
+
+ ret = gst_pad_link (decsrc, session->recv_rtcp_sink);
+
+ gst_object_unref (decsrc);
+
+ if (ret != GST_PAD_LINK_OK)
+ goto dec_link_failed;
+ } else {
+ GST_DEBUG_OBJECT (rtpbin, "no RTCP decoder given");
+ decsink = gst_object_ref (session->recv_rtcp_sink);
+ }
+
+ /* get srcpad, link to SSRCDemux */
+ GST_DEBUG_OBJECT (rtpbin, "getting sync src pad");
+ session->sync_src = gst_element_get_static_pad (session->session, "sync_src");
+ if (session->sync_src == NULL)
+ goto src_pad_failed;
+
+ GST_DEBUG_OBJECT (rtpbin, "getting demuxer RTCP sink pad");
+ sinkdpad = gst_element_get_static_pad (session->demux, "rtcp_sink");
+ gst_pad_link_full (session->sync_src, sinkdpad, GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (sinkdpad);
+
+ return decsink;
+
+pad_failed:
+ {
+ g_warning ("rtpbin: failed to get session rtcp_sink pad");
+ return NULL;
+ }
+dec_sink_failed:
+ {
+ g_warning ("rtpbin: failed to get decoder sink pad for session %u", sessid);
+ return NULL;
+ }
+dec_src_failed:
+ {
+ g_warning ("rtpbin: failed to get decoder src pad for session %u", sessid);
+ goto cleanup;
+ }
+dec_link_failed:
+ {
+ g_warning ("rtpbin: failed to link rtcp decoder for session %u", sessid);
+ goto cleanup;
+ }
+src_pad_failed:
+ {
+ g_warning ("rtpbin: failed to get session sync_src pad");
+ }
+
+cleanup:
+ gst_object_unref (decsink);
+ return NULL;
+}
+
+/* Create a pad for receiving RTCP for the session in @name. Must be called with
+ * RTP_BIN_LOCK.
+ */
+static GstPad *
+create_recv_rtcp (GstRtpBin * rtpbin, GstPadTemplate * templ,
+ const gchar * name)
+{
+ guint sessid;
+ GstRtpBinSession *session;
+ GstPad *decsink = NULL;
+
+ /* first get the session number */
+ if (name == NULL || sscanf (name, "recv_rtcp_sink_%u", &sessid) != 1)
+ goto no_name;
+
+ GST_DEBUG_OBJECT (rtpbin, "finding session %u", sessid);
+
+ /* get or create the session */
+ session = find_session_by_id (rtpbin, sessid);
+ if (!session) {
+ GST_DEBUG_OBJECT (rtpbin, "creating session %u", sessid);
+ /* create session now */
+ session = create_session (rtpbin, sessid);
+ if (session == NULL)
+ goto create_error;
+ }
+
+ /* check if pad was requested */
+ if (session->recv_rtcp_sink_ghost != NULL)
+ return session->recv_rtcp_sink_ghost;
+
+ decsink = complete_session_rtcp (rtpbin, session, sessid);
+ if (!decsink)
+ goto create_error;
+
+ session->recv_rtcp_sink_ghost =
+ gst_ghost_pad_new_from_template (name, decsink, templ);
+ gst_object_unref (decsink);
+ gst_pad_set_active (session->recv_rtcp_sink_ghost, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpbin),
+ session->recv_rtcp_sink_ghost);
+
+ return session->recv_rtcp_sink_ghost;
+
+ /* ERRORS */
+no_name:
+ {
+ g_warning ("rtpbin: cannot find session id for pad: %s",
+ GST_STR_NULL (name));
+ return NULL;
+ }
+create_error:
+ {
+ /* create_session already warned */
+ return NULL;
+ }
+}
+
+static GstPad *
+create_recv_fec (GstRtpBin * rtpbin, GstPadTemplate * templ, const gchar * name)
+{
+ guint sessid, fec_idx;
+ GstRtpBinSession *session;
+ GstPad *decsink = NULL;
+ GstPad *ghost;
+
+ /* first get the session number */
+ if (name == NULL
+ || sscanf (name, "recv_fec_sink_%u_%u", &sessid, &fec_idx) != 2)
+ goto no_name;
+
+ if (fec_idx > 1)
+ goto invalid_idx;
+
+ GST_DEBUG_OBJECT (rtpbin, "finding session %u", sessid);
+
+ /* get or create the session */
+ session = find_session_by_id (rtpbin, sessid);
+ if (!session) {
+ GST_DEBUG_OBJECT (rtpbin, "creating session %u", sessid);
+ /* create session now */
+ session = create_session (rtpbin, sessid);
+ if (session == NULL)
+ goto create_error;
+ }
+
+ decsink = complete_session_fec (rtpbin, session, fec_idx);
+ if (!decsink)
+ goto create_error;
+
+ ghost = gst_ghost_pad_new_from_template (name, decsink, templ);
+ session->recv_fec_sink_ghosts =
+ g_slist_prepend (session->recv_fec_sink_ghosts, ghost);
+ gst_object_unref (decsink);
+ gst_pad_set_active (ghost, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), ghost);
+
+ return ghost;
+
+ /* ERRORS */
+no_name:
+ {
+ g_warning ("rtpbin: cannot find session id for pad: %s",
+ GST_STR_NULL (name));
+ return NULL;
+ }
+invalid_idx:
+ {
+ g_warning ("rtpbin: invalid FEC index: %s", GST_STR_NULL (name));
+ return NULL;
+ }
+create_error:
+ {
+ /* create_session already warned */
+ return NULL;
+ }
+}
+
+static void
+remove_recv_rtcp (GstRtpBin * rtpbin, GstRtpBinSession * session)
+{
+ if (session->recv_rtcp_sink_ghost) {
+ gst_pad_set_active (session->recv_rtcp_sink_ghost, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpbin),
+ session->recv_rtcp_sink_ghost);
+ session->recv_rtcp_sink_ghost = NULL;
+ }
+ if (session->sync_src) {
+ /* releasing the request pad should also unref the sync pad */
+ gst_object_unref (session->sync_src);
+ session->sync_src = NULL;
+ }
+ if (session->recv_rtcp_sink) {
+ gst_element_release_request_pad (session->session, session->recv_rtcp_sink);
+ gst_object_unref (session->recv_rtcp_sink);
+ session->recv_rtcp_sink = NULL;
+ }
+}
+
+static void
+remove_recv_fec_for_pad (GstRtpBin * rtpbin, GstRtpBinSession * session,
+ GstPad * ghost)
+{
+ GSList *item;
+ GstPad *target;
+
+ target = gst_ghost_pad_get_target (GST_GHOST_PAD (ghost));
+
+ if (target) {
+ item = g_slist_find (session->recv_fec_sinks, target);
+ if (item) {
+ gst_element_release_request_pad (session->fec_decoder, item->data);
+ session->recv_fec_sinks =
+ g_slist_delete_link (session->recv_fec_sinks, item);
+ }
+ gst_object_unref (target);
+ }
+
+ item = g_slist_find (session->recv_fec_sink_ghosts, ghost);
+ if (item)
+ session->recv_fec_sink_ghosts =
+ g_slist_delete_link (session->recv_fec_sink_ghosts, item);
+
+ gst_pad_set_active (ghost, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpbin), ghost);
+}
+
+static void
+remove_recv_fec (GstRtpBin * rtpbin, GstRtpBinSession * session)
+{
+ GSList *copy;
+ GSList *tmp;
+
+ copy = g_slist_copy (session->recv_fec_sink_ghosts);
+
+ for (tmp = copy; tmp; tmp = tmp->next) {
+ remove_recv_fec_for_pad (rtpbin, session, (GstPad *) tmp->data);
+ }
+
+ g_slist_free (copy);
+}
+
+static gboolean
+complete_session_src (GstRtpBin * rtpbin, GstRtpBinSession * session)
+{
+ gchar *gname;
+ guint sessid = session->id;
+ GstPad *send_rtp_src;
+ GstElement *encoder;
+ GstElementClass *klass;
+ GstPadTemplate *templ;
+ gboolean ret = FALSE;
+
+ /* get srcpad */
+ send_rtp_src = gst_element_get_static_pad (session->session, "send_rtp_src");
+
+ if (send_rtp_src == NULL)
+ goto no_srcpad;
+
+ GST_DEBUG_OBJECT (rtpbin, "getting RTP encoder");
+ encoder = session_request_element (session, SIGNAL_REQUEST_RTP_ENCODER);
+ if (encoder) {
+ gchar *ename;
+ GstPad *encsrc, *encsink;
+ GstPadLinkReturn ret;
+
+ GST_DEBUG_OBJECT (rtpbin, "linking RTP encoder");
+ ename = g_strdup_printf ("rtp_src_%u", sessid);
+ encsrc = gst_element_get_static_pad (encoder, ename);
+ g_free (ename);
+
+ if (encsrc == NULL)
+ goto enc_src_failed;
+
+ ename = g_strdup_printf ("rtp_sink_%u", sessid);
+ encsink = gst_element_get_static_pad (encoder, ename);
+ g_free (ename);
+ if (encsink == NULL)
+ goto enc_sink_failed;
+
+ ret = gst_pad_link (send_rtp_src, encsink);
+ gst_object_unref (encsink);
+ gst_object_unref (send_rtp_src);
+
+ send_rtp_src = encsrc;
+
+ if (ret != GST_PAD_LINK_OK)
+ goto enc_link_failed;
+ } else {
+ GST_DEBUG_OBJECT (rtpbin, "no RTP encoder given");
+ }
+
+ /* ghost the new source pad */
+ klass = GST_ELEMENT_GET_CLASS (rtpbin);
+ gname = g_strdup_printf ("send_rtp_src_%u", sessid);
+ templ = gst_element_class_get_pad_template (klass, "send_rtp_src_%u");
+ session->send_rtp_src_ghost =
+ gst_ghost_pad_new_from_template (gname, send_rtp_src, templ);
+ gst_pad_set_active (session->send_rtp_src_ghost, TRUE);
+ gst_pad_sticky_events_foreach (send_rtp_src, copy_sticky_events,
+ session->send_rtp_src_ghost);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), session->send_rtp_src_ghost);
+ g_free (gname);
+
+ ret = TRUE;
+
+done:
+ if (send_rtp_src)
+ gst_object_unref (send_rtp_src);
+
+ return ret;
+
+ /* ERRORS */
+no_srcpad:
+ {
+ g_warning ("rtpbin: failed to get rtp source pad for session %u", sessid);
+ goto done;
+ }
+enc_src_failed:
+ {
+ g_warning ("rtpbin: failed to get %" GST_PTR_FORMAT
+ " src pad for session %u", encoder, sessid);
+ goto done;
+ }
+enc_sink_failed:
+ {
+ g_warning ("rtpbin: failed to get %" GST_PTR_FORMAT
+ " sink pad for session %u", encoder, sessid);
+ goto done;
+ }
+enc_link_failed:
+ {
+ g_warning ("rtpbin: failed to link %" GST_PTR_FORMAT " for session %u",
+ encoder, sessid);
+ goto done;
+ }
+}
+
+static gboolean
+setup_aux_sender_fold (const GValue * item, GValue * result, gpointer user_data)
+{
+ GstPad *pad;
+ gchar *name;
+ guint sessid;
+ GstRtpBinSession *session = user_data, *newsess;
+ GstRtpBin *rtpbin = session->bin;
+ GstPadLinkReturn ret;
+
+ pad = g_value_get_object (item);
+ name = gst_pad_get_name (pad);
+
+ if (name == NULL || sscanf (name, "src_%u", &sessid) != 1)
+ goto no_name;
+
+ g_free (name);
+
+ newsess = find_session_by_id (rtpbin, sessid);
+ if (newsess == NULL) {
+ /* create new session */
+ newsess = create_session (rtpbin, sessid);
+ if (newsess == NULL)
+ goto create_error;
+ } else if (newsess->send_rtp_sink != NULL)
+ goto existing_session;
+
+ /* get send_rtp pad and store */
+ newsess->send_rtp_sink =
+ gst_element_request_pad_simple (newsess->session, "send_rtp_sink");
+ if (newsess->send_rtp_sink == NULL)
+ goto pad_failed;
+
+ ret = gst_pad_link (pad, newsess->send_rtp_sink);
+ if (ret != GST_PAD_LINK_OK)
+ goto aux_link_failed;
+
+ if (!complete_session_src (rtpbin, newsess))
+ goto session_src_failed;
+
+ return TRUE;
+
+ /* ERRORS */
+no_name:
+ {
+ GST_WARNING ("ignoring invalid pad name %s", GST_STR_NULL (name));
+ g_free (name);
+ return TRUE;
+ }
+create_error:
+ {
+ /* create_session already warned */
+ return FALSE;
+ }
+existing_session:
+ {
+ GST_DEBUG_OBJECT (rtpbin,
+ "skipping src_%i setup, since it is already configured.", sessid);
+ return TRUE;
+ }
+pad_failed:
+ {
+ g_warning ("rtpbin: failed to get session pad for session %u", sessid);
+ return FALSE;
+ }
+aux_link_failed:
+ {
+ g_warning ("rtpbin: failed to link AUX for session %u", sessid);
+ return FALSE;
+ }
+session_src_failed:
+ {
+ g_warning ("rtpbin: failed to complete AUX for session %u", sessid);
+ return FALSE;
+ }
+}
+
+static gboolean
+setup_aux_sender (GstRtpBin * rtpbin, GstRtpBinSession * session,
+ GstElement * aux)
+{
+ GstIterator *it;
+ GValue result = { 0, };
+ GstIteratorResult res;
+
+ it = gst_element_iterate_src_pads (aux);
+ res = gst_iterator_fold (it, setup_aux_sender_fold, &result, session);
+ gst_iterator_free (it);
+
+ return res == GST_ITERATOR_DONE;
+}
+
+static void
+fec_encoder_pad_added_cb (GstElement * encoder, GstPad * pad,
+ GstRtpBinSession * session)
+{
+ GstElementClass *klass;
+ gchar *gname;
+ GstPadTemplate *templ;
+ guint fec_idx;
+ GstPad *ghost;
+
+ if (sscanf (GST_PAD_NAME (pad), "fec_%u", &fec_idx) != 1) {
+ GST_WARNING_OBJECT (session->bin,
+ "FEC encoder added pad with name not matching fec_%%u (%s)",
+ GST_PAD_NAME (pad));
+ goto done;
+ }
+
+ GST_INFO_OBJECT (session->bin, "FEC encoder for session %u exposed new pad",
+ session->id);
+
+ GST_RTP_BIN_LOCK (session->bin);
+ klass = GST_ELEMENT_GET_CLASS (session->bin);
+ gname = g_strdup_printf ("send_fec_src_%u_%u", session->id, fec_idx);
+ templ = gst_element_class_get_pad_template (klass, "send_fec_src_%u_%u");
+ ghost = gst_ghost_pad_new_from_template (gname, pad, templ);
+ session->send_fec_src_ghosts =
+ g_slist_prepend (session->send_fec_src_ghosts, ghost);
+ gst_pad_set_active (ghost, TRUE);
+ gst_pad_sticky_events_foreach (pad, copy_sticky_events, ghost);
+ gst_element_add_pad (GST_ELEMENT (session->bin), ghost);
+ g_free (gname);
+ GST_RTP_BIN_UNLOCK (session->bin);
+
+done:
+ return;
+}
+
+static GstElement *
+request_fec_encoder (GstRtpBin * rtpbin, GstRtpBinSession * session,
+ guint sessid)
+{
+ GstElement *ret = NULL;
+ const gchar *factory;
+ gchar *sess_id_str;
+
+ sess_id_str = g_strdup_printf ("%u", sessid);
+ factory = gst_structure_get_string (rtpbin->fec_encoders, sess_id_str);
+ g_free (sess_id_str);
+
+ /* First try the property */
+ if (factory) {
+ GError *err = NULL;
+
+ ret =
+ gst_parse_bin_from_description_full (factory, TRUE, NULL,
+ GST_PARSE_FLAG_NO_SINGLE_ELEMENT_BINS | GST_PARSE_FLAG_FATAL_ERRORS,
+ &err);
+ if (!ret) {
+ GST_ERROR_OBJECT (rtpbin, "Failed to build encoder from factory: %s",
+ err->message);
+ goto done;
+ }
+
+ bin_manage_element (session->bin, ret);
+ session->elements = g_slist_prepend (session->elements, ret);
+ GST_INFO_OBJECT (rtpbin, "Built FEC encoder: %" GST_PTR_FORMAT
+ " for session %u", ret, sessid);
+ }
+
+ /* Fallback to the signal */
+ if (!ret)
+ ret = session_request_element (session, SIGNAL_REQUEST_FEC_ENCODER);
+
+ if (ret) {
+ g_signal_connect (ret, "pad-added", G_CALLBACK (fec_encoder_pad_added_cb),
+ session);
+ }
+
+done:
+ return ret;
+}
+
+/* Create a pad for sending RTP for the session in @name. Must be called with
+ * RTP_BIN_LOCK.
+ */
+static GstPad *
+create_send_rtp (GstRtpBin * rtpbin, GstPadTemplate * templ, const gchar * name)
+{
+ gchar *pname;
+ guint sessid;
+ GstPad *send_rtp_sink;
+ GstElement *aux;
+ GstElement *encoder;
+ GstElement *prev = NULL;
+ GstRtpBinSession *session;
+
+ /* first get the session number */
+ if (name == NULL || sscanf (name, "send_rtp_sink_%u", &sessid) != 1)
+ goto no_name;
+
+ /* get or create session */
+ session = find_session_by_id (rtpbin, sessid);
+ if (!session) {
+ /* create session now */
+ session = create_session (rtpbin, sessid);
+ if (session == NULL)
+ goto create_error;
+ }
+
+ /* check if pad was requested */
+ if (session->send_rtp_sink_ghost != NULL)
+ return session->send_rtp_sink_ghost;
+
+ /* check if we are already using this session as a sender */
+ if (session->send_rtp_sink != NULL)
+ goto existing_session;
+
+ encoder = request_fec_encoder (rtpbin, session, sessid);
+
+ if (encoder) {
+ GST_DEBUG_OBJECT (rtpbin, "Linking FEC encoder");
+
+ send_rtp_sink = gst_element_get_static_pad (encoder, "sink");
+
+ if (!send_rtp_sink)
+ goto enc_sink_failed;
+
+ prev = encoder;
+ }
+
+ GST_DEBUG_OBJECT (rtpbin, "getting RTP AUX sender");
+ aux = session_request_element (session, SIGNAL_REQUEST_AUX_SENDER);
+ if (aux) {
+ GstPad *sinkpad;
+ GST_DEBUG_OBJECT (rtpbin, "linking AUX sender");
+ if (!setup_aux_sender (rtpbin, session, aux))
+ goto aux_session_failed;
+
+ pname = g_strdup_printf ("sink_%u", sessid);
+ sinkpad = gst_element_get_static_pad (aux, pname);
+ g_free (pname);
+
+ if (sinkpad == NULL)
+ goto aux_sink_failed;
+
+ if (!prev) {
+ send_rtp_sink = sinkpad;
+ } else {
+ GstPad *srcpad = gst_element_get_static_pad (prev, "src");
+ GstPadLinkReturn ret;
+
+ ret = gst_pad_link (srcpad, sinkpad);
+ gst_object_unref (srcpad);
+ if (ret != GST_PAD_LINK_OK) {
+ goto aux_link_failed;
+ }
+ }
+ prev = aux;
+ } else {
+ /* get send_rtp pad and store */
+ session->send_rtp_sink =
+ gst_element_request_pad_simple (session->session, "send_rtp_sink");
+ if (session->send_rtp_sink == NULL)
+ goto pad_failed;
+
+ if (!complete_session_src (rtpbin, session))
+ goto session_src_failed;
+
+ if (!prev) {
+ send_rtp_sink = gst_object_ref (session->send_rtp_sink);
+ } else {
+ GstPad *srcpad = gst_element_get_static_pad (prev, "src");
+ GstPadLinkReturn ret;
+
+ ret = gst_pad_link (srcpad, session->send_rtp_sink);
+ gst_object_unref (srcpad);
+ if (ret != GST_PAD_LINK_OK)
+ goto session_link_failed;
+ }
+ }
+
+ session->send_rtp_sink_ghost =
+ gst_ghost_pad_new_from_template (name, send_rtp_sink, templ);
+ gst_object_unref (send_rtp_sink);
+ gst_pad_set_active (session->send_rtp_sink_ghost, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), session->send_rtp_sink_ghost);
+
+ return session->send_rtp_sink_ghost;
+
+ /* ERRORS */
+no_name:
+ {
+ g_warning ("rtpbin: cannot find session id for pad: %s",
+ GST_STR_NULL (name));
+ return NULL;
+ }
+create_error:
+ {
+ /* create_session already warned */
+ return NULL;
+ }
+existing_session:
+ {
+ g_warning ("rtpbin: session %u is already in use", sessid);
+ return NULL;
+ }
+aux_session_failed:
+ {
+ g_warning ("rtpbin: failed to get AUX sink pad for session %u", sessid);
+ return NULL;
+ }
+aux_sink_failed:
+ {
+ g_warning ("rtpbin: failed to get AUX sink pad for session %u", sessid);
+ return NULL;
+ }
+aux_link_failed:
+ {
+ g_warning ("rtpbin: failed to link %" GST_PTR_FORMAT " for session %u",
+ aux, sessid);
+ return NULL;
+ }
+pad_failed:
+ {
+ g_warning ("rtpbin: failed to get session pad for session %u", sessid);
+ return NULL;
+ }
+session_src_failed:
+ {
+ g_warning ("rtpbin: failed to setup source pads for session %u", sessid);
+ return NULL;
+ }
+session_link_failed:
+ {
+ g_warning ("rtpbin: failed to link %" GST_PTR_FORMAT " for session %u",
+ session, sessid);
+ return NULL;
+ }
+enc_sink_failed:
+ {
+ g_warning ("rtpbin: failed to get %" GST_PTR_FORMAT
+ " sink pad for session %u", encoder, sessid);
+ return NULL;
+ }
+}
+
+static void
+remove_send_rtp (GstRtpBin * rtpbin, GstRtpBinSession * session)
+{
+ if (session->send_rtp_src_ghost) {
+ gst_pad_set_active (session->send_rtp_src_ghost, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpbin),
+ session->send_rtp_src_ghost);
+ session->send_rtp_src_ghost = NULL;
+ }
+ if (session->send_rtp_sink) {
+ gst_element_release_request_pad (GST_ELEMENT_CAST (session->session),
+ session->send_rtp_sink);
+ gst_object_unref (session->send_rtp_sink);
+ session->send_rtp_sink = NULL;
+ }
+ if (session->send_rtp_sink_ghost) {
+ gst_pad_set_active (session->send_rtp_sink_ghost, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpbin),
+ session->send_rtp_sink_ghost);
+ session->send_rtp_sink_ghost = NULL;
+ }
+}
+
+static void
+remove_send_fec (GstRtpBin * rtpbin, GstRtpBinSession * session)
+{
+ GSList *tmp;
+
+ for (tmp = session->send_fec_src_ghosts; tmp; tmp = tmp->next) {
+ GstPad *ghost = GST_PAD (tmp->data);
+ gst_pad_set_active (ghost, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpbin), ghost);
+ }
+
+ g_slist_free (session->send_fec_src_ghosts);
+ session->send_fec_src_ghosts = NULL;
+}
+
+/* Create a pad for sending RTCP for the session in @name. Must be called with
+ * RTP_BIN_LOCK.
+ */
+static GstPad *
+create_send_rtcp (GstRtpBin * rtpbin, GstPadTemplate * templ,
+ const gchar * name)
+{
+ guint sessid;
+ GstPad *encsrc;
+ GstElement *encoder;
+ GstRtpBinSession *session;
+
+ /* first get the session number */
+ if (name == NULL || sscanf (name, "send_rtcp_src_%u", &sessid) != 1)
+ goto no_name;
+
+ /* get or create session */
+ session = find_session_by_id (rtpbin, sessid);
+ if (!session) {
+ GST_DEBUG_OBJECT (rtpbin, "creating session %u", sessid);
+ /* create session now */
+ session = create_session (rtpbin, sessid);
+ if (session == NULL)
+ goto create_error;
+ }
+
+ /* check if pad was requested */
+ if (session->send_rtcp_src_ghost != NULL)
+ return session->send_rtcp_src_ghost;
+
+ /* get rtcp_src pad and store */
+ session->send_rtcp_src =
+ gst_element_request_pad_simple (session->session, "send_rtcp_src");
+ if (session->send_rtcp_src == NULL)
+ goto pad_failed;
+
+ GST_DEBUG_OBJECT (rtpbin, "getting RTCP encoder");
+ encoder = session_request_element (session, SIGNAL_REQUEST_RTCP_ENCODER);
+ if (encoder) {
+ gchar *ename;
+ GstPad *encsink;
+ GstPadLinkReturn ret;
+
+ GST_DEBUG_OBJECT (rtpbin, "linking RTCP encoder");
+
+ ename = g_strdup_printf ("rtcp_src_%u", sessid);
+ encsrc = gst_element_get_static_pad (encoder, ename);
+ g_free (ename);
+ if (encsrc == NULL)
+ goto enc_src_failed;
+
+ ename = g_strdup_printf ("rtcp_sink_%u", sessid);
+ encsink = gst_element_get_static_pad (encoder, ename);
+ g_free (ename);
+ if (encsink == NULL)
+ goto enc_sink_failed;
+
+ ret = gst_pad_link (session->send_rtcp_src, encsink);
+ gst_object_unref (encsink);
+
+ if (ret != GST_PAD_LINK_OK)
+ goto enc_link_failed;
+ } else {
+ GST_DEBUG_OBJECT (rtpbin, "no RTCP encoder given");
+ encsrc = gst_object_ref (session->send_rtcp_src);
+ }
+
+ session->send_rtcp_src_ghost =
+ gst_ghost_pad_new_from_template (name, encsrc, templ);
+ gst_object_unref (encsrc);
+ gst_pad_set_active (session->send_rtcp_src_ghost, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), session->send_rtcp_src_ghost);
+
+ return session->send_rtcp_src_ghost;
+
+ /* ERRORS */
+no_name:
+ {
+ g_warning ("rtpbin: cannot find session id for pad: %s",
+ GST_STR_NULL (name));
+ return NULL;
+ }
+create_error:
+ {
+ /* create_session already warned */
+ return NULL;
+ }
+pad_failed:
+ {
+ g_warning ("rtpbin: failed to get rtcp pad for session %u", sessid);
+ return NULL;
+ }
+enc_src_failed:
+ {
+ g_warning ("rtpbin: failed to get encoder src pad for session %u", sessid);
+ return NULL;
+ }
+enc_sink_failed:
+ {
+ g_warning ("rtpbin: failed to get encoder sink pad for session %u", sessid);
+ gst_object_unref (encsrc);
+ return NULL;
+ }
+enc_link_failed:
+ {
+ g_warning ("rtpbin: failed to link rtcp encoder for session %u", sessid);
+ gst_object_unref (encsrc);
+ return NULL;
+ }
+}
+
+static void
+remove_rtcp (GstRtpBin * rtpbin, GstRtpBinSession * session)
+{
+ if (session->send_rtcp_src_ghost) {
+ gst_pad_set_active (session->send_rtcp_src_ghost, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpbin),
+ session->send_rtcp_src_ghost);
+ session->send_rtcp_src_ghost = NULL;
+ }
+ if (session->send_rtcp_src) {
+ gst_element_release_request_pad (session->session, session->send_rtcp_src);
+ gst_object_unref (session->send_rtcp_src);
+ session->send_rtcp_src = NULL;
+ }
+}
+
+/* If the requested name is NULL we should create a name with
+ * the session number assuming we want the lowest possible session
+ * with a free pad like the template */
+static gchar *
+gst_rtp_bin_get_free_pad_name (GstElement * element, GstPadTemplate * templ)
+{
+ gboolean name_found = FALSE;
+ gint session = 0;
+ GstIterator *pad_it = NULL;
+ gchar *pad_name = NULL;
+ GValue data = { 0, };
+
+ GST_DEBUG_OBJECT (element, "find a free pad name for template");
+ while (!name_found) {
+ gboolean done = FALSE;
+
+ g_free (pad_name);
+ pad_name = g_strdup_printf (templ->name_template, session++);
+ pad_it = gst_element_iterate_pads (GST_ELEMENT (element));
+ name_found = TRUE;
+ while (!done) {
+ switch (gst_iterator_next (pad_it, &data)) {
+ case GST_ITERATOR_OK:
+ {
+ GstPad *pad;
+ gchar *name;
+
+ pad = g_value_get_object (&data);
+ name = gst_pad_get_name (pad);
+
+ if (strcmp (name, pad_name) == 0) {
+ done = TRUE;
+ name_found = FALSE;
+ }
+ g_free (name);
+ g_value_reset (&data);
+ break;
+ }
+ case GST_ITERATOR_ERROR:
+ case GST_ITERATOR_RESYNC:
+ /* restart iteration */
+ done = TRUE;
+ name_found = FALSE;
+ session = 0;
+ break;
+ case GST_ITERATOR_DONE:
+ done = TRUE;
+ break;
+ }
+ }
+ g_value_unset (&data);
+ gst_iterator_free (pad_it);
+ }
+
+ GST_DEBUG_OBJECT (element, "free pad name found: '%s'", pad_name);
+ return pad_name;
+}
+
+/*
+ */
+static GstPad *
+gst_rtp_bin_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps)
+{
+ GstRtpBin *rtpbin;
+ GstElementClass *klass;
+ GstPad *result;
+
+ gchar *pad_name = NULL;
+
+ g_return_val_if_fail (templ != NULL, NULL);
+ g_return_val_if_fail (GST_IS_RTP_BIN (element), NULL);
+
+ rtpbin = GST_RTP_BIN (element);
+ klass = GST_ELEMENT_GET_CLASS (element);
+
+ GST_RTP_BIN_LOCK (rtpbin);
+
+ if (name == NULL) {
+ /* use a free pad name */
+ pad_name = gst_rtp_bin_get_free_pad_name (element, templ);
+ } else {
+ /* use the provided name */
+ pad_name = g_strdup (name);
+ }
+
+ GST_DEBUG_OBJECT (rtpbin, "Trying to request a pad with name %s", pad_name);
+
+ /* figure out the template */
+ if (templ == gst_element_class_get_pad_template (klass, "recv_rtp_sink_%u")) {
+ result = create_recv_rtp (rtpbin, templ, pad_name);
+ } else if (templ == gst_element_class_get_pad_template (klass,
+ "recv_rtcp_sink_%u")) {
+ result = create_recv_rtcp (rtpbin, templ, pad_name);
+ } else if (templ == gst_element_class_get_pad_template (klass,
+ "send_rtp_sink_%u")) {
+ result = create_send_rtp (rtpbin, templ, pad_name);
+ } else if (templ == gst_element_class_get_pad_template (klass,
+ "send_rtcp_src_%u")) {
+ result = create_send_rtcp (rtpbin, templ, pad_name);
+ } else if (templ == gst_element_class_get_pad_template (klass,
+ "recv_fec_sink_%u_%u")) {
+ result = create_recv_fec (rtpbin, templ, pad_name);
+ } else
+ goto wrong_template;
+
+ g_free (pad_name);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+
+ return result;
+
+ /* ERRORS */
+wrong_template:
+ {
+ g_free (pad_name);
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ g_warning ("rtpbin: this is not our template");
+ return NULL;
+ }
+}
+
+static void
+gst_rtp_bin_release_pad (GstElement * element, GstPad * pad)
+{
+ GstRtpBinSession *session;
+ GstRtpBin *rtpbin;
+
+ g_return_if_fail (GST_IS_GHOST_PAD (pad));
+ g_return_if_fail (GST_IS_RTP_BIN (element));
+
+ rtpbin = GST_RTP_BIN (element);
+
+ GST_RTP_BIN_LOCK (rtpbin);
+ GST_DEBUG_OBJECT (rtpbin, "Trying to release pad %s:%s",
+ GST_DEBUG_PAD_NAME (pad));
+
+ if (!(session = find_session_by_pad (rtpbin, pad)))
+ goto unknown_pad;
+
+ if (session->recv_rtp_sink_ghost == pad) {
+ remove_recv_rtp (rtpbin, session);
+ } else if (session->recv_rtcp_sink_ghost == pad) {
+ remove_recv_rtcp (rtpbin, session);
+ } else if (session->send_rtp_sink_ghost == pad) {
+ remove_send_rtp (rtpbin, session);
+ } else if (session->send_rtcp_src_ghost == pad) {
+ remove_rtcp (rtpbin, session);
+ } else if (pad_is_recv_fec (session, pad)) {
+ remove_recv_fec_for_pad (rtpbin, session, pad);
+ }
+
+ /* no more request pads, free the complete session */
+ if (session->recv_rtp_sink_ghost == NULL
+ && session->recv_rtcp_sink_ghost == NULL
+ && session->send_rtp_sink_ghost == NULL
+ && session->send_rtcp_src_ghost == NULL
+ && session->recv_fec_sink_ghosts == NULL) {
+ GST_DEBUG_OBJECT (rtpbin, "no more pads for session %p", session);
+ rtpbin->sessions = g_slist_remove (rtpbin->sessions, session);
+ free_session (session, rtpbin);
+ }
+ GST_RTP_BIN_UNLOCK (rtpbin);
+
+ return;
+
+ /* ERROR */
+unknown_pad:
+ {
+ GST_RTP_BIN_UNLOCK (rtpbin);
+ g_warning ("rtpbin: %s:%s is not one of our request pads",
+ GST_DEBUG_PAD_NAME (pad));
+ return;
+ }
+}
diff --git a/gst/rtpmanager/gstrtpbin.h b/gst/rtpmanager/gstrtpbin.h
new file mode 100644
index 0000000000..28c8d2b066
--- /dev/null
+++ b/gst/rtpmanager/gstrtpbin.h
@@ -0,0 +1,155 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_BIN_H__
+#define __GST_RTP_BIN_H__
+
+#include <gst/gst.h>
+
+#include "rtpsession.h"
+#include "gstrtpsession.h"
+#include "rtpjitterbuffer.h"
+
+#define GST_TYPE_RTP_BIN \
+ (gst_rtp_bin_get_type())
+#define GST_RTP_BIN(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_BIN,GstRtpBin))
+#define GST_RTP_BIN_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_BIN,GstRtpBinClass))
+#define GST_IS_RTP_BIN(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_BIN))
+#define GST_IS_RTP_BIN_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_BIN))
+
+typedef enum
+{
+ GST_RTP_BIN_RTCP_SYNC_ALWAYS,
+ GST_RTP_BIN_RTCP_SYNC_INITIAL,
+ GST_RTP_BIN_RTCP_SYNC_RTP
+} GstRTCPSync;
+
+typedef struct _GstRtpBin GstRtpBin;
+typedef struct _GstRtpBinClass GstRtpBinClass;
+typedef struct _GstRtpBinPrivate GstRtpBinPrivate;
+
+struct _GstRtpBin {
+ GstBin bin;
+
+ /*< private >*/
+ /* default latency for sessions */
+ guint latency_ms;
+ guint64 latency_ns;
+ gboolean drop_on_latency;
+ gboolean do_lost;
+ gboolean ignore_pt;
+ gboolean ntp_sync;
+ gint rtcp_sync;
+ guint rtcp_sync_interval;
+ RTPJitterBufferMode buffer_mode;
+ gboolean buffering;
+ gboolean use_pipeline_clock;
+ GstRtpNtpTimeSource ntp_time_source;
+ gboolean send_sync_event;
+ GstClockTime buffer_start;
+ gboolean do_retransmission;
+ GstRTPProfile rtp_profile;
+ gboolean rtcp_sync_send_time;
+ gint max_rtcp_rtp_time_diff;
+ guint32 max_dropout_time;
+ guint32 max_misorder_time;
+ gboolean rfc7273_sync;
+ guint max_streams;
+ guint64 max_ts_offset_adjustment;
+ gint64 max_ts_offset;
+ gboolean max_ts_offset_is_set;
+
+ /* a list of session */
+ GSList *sessions;
+
+ /* a list of clients, these are streams with the same CNAME */
+ GSList *clients;
+
+ /* the default SDES items for sessions */
+ GstStructure *sdes;
+
+ /* the default FEC decoder factories for sessions */
+ GstStructure *fec_decoders;
+
+ /* the default FEC encoder factories for sessions */
+ GstStructure *fec_encoders;
+
+ /*< private >*/
+ GstRtpBinPrivate *priv;
+};
+
+struct _GstRtpBinClass {
+ GstBinClass parent_class;
+
+ /* get the caps for pt */
+ GstCaps* (*request_pt_map) (GstRtpBin *rtpbin, guint session, guint pt);
+
+ void (*payload_type_change) (GstRtpBin *rtpbin, guint session, guint pt);
+
+ void (*new_jitterbuffer) (GstRtpBin *rtpbin, GstElement *jitterbuffer, guint session, guint32 ssrc);
+
+ void (*new_storage) (GstRtpBin *rtpbin, GstElement *jitterbuffer, guint session);
+
+ /* action signals */
+ void (*clear_pt_map) (GstRtpBin *rtpbin);
+ void (*reset_sync) (GstRtpBin *rtpbin);
+ GstElement* (*get_session) (GstRtpBin *rtpbin, guint session);
+ RTPSession* (*get_internal_session) (GstRtpBin *rtpbin, guint session);
+ GstElement* (*get_storage) (GstRtpBin *rtpbin, guint session);
+ GObject* (*get_internal_storage) (GstRtpBin *rtpbin, guint session);
+ void (*clear_ssrc) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+
+ /* session manager signals */
+ void (*on_new_ssrc) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ void (*on_ssrc_collision) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ void (*on_ssrc_validated) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ void (*on_ssrc_active) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ void (*on_ssrc_sdes) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ void (*on_bye_ssrc) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ void (*on_bye_timeout) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ void (*on_timeout) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ void (*on_sender_timeout) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ void (*on_npt_stop) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+
+ GstElement* (*request_rtp_encoder) (GstRtpBin *rtpbin, guint session);
+ GstElement* (*request_rtp_decoder) (GstRtpBin *rtpbin, guint session);
+ GstElement* (*request_rtcp_encoder) (GstRtpBin *rtpbin, guint session);
+ GstElement* (*request_rtcp_decoder) (GstRtpBin *rtpbin, guint session);
+
+ GstElement* (*request_aux_sender) (GstRtpBin *rtpbin, guint session);
+ GstElement* (*request_aux_receiver) (GstRtpBin *rtpbin, guint session);
+
+ GstElement* (*request_fec_encoder) (GstRtpBin *rtpbin, guint session);
+ GstElement* (*request_fec_decoder) (GstRtpBin *rtpbin, guint session);
+
+ GstElement* (*request_jitterbuffer) (GstRtpBin *rtpbin, guint session);
+
+ void (*on_new_sender_ssrc) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+ void (*on_sender_ssrc_active) (GstRtpBin *rtpbin, guint session, guint32 ssrc);
+};
+
+GType gst_rtp_bin_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (rtpbin);
+
+#endif /* __GST_RTP_BIN_H__ */
diff --git a/gst/rtpmanager/gstrtpdtmfmux.c b/gst/rtpmanager/gstrtpdtmfmux.c
new file mode 100644
index 0000000000..b55c067249
--- /dev/null
+++ b/gst/rtpmanager/gstrtpdtmfmux.c
@@ -0,0 +1,228 @@
+/* RTP DTMF muxer element for GStreamer
+ *
+ * gstrtpdtmfmux.c:
+ *
+ * Copyright (C) <2007-2010> Nokia Corporation.
+ * Contact: Zeeshan Ali <zeeshan.ali@nokia.com>
+ * Copyright (C) <2007-2010> Collabora Ltd
+ * Contact: Olivier Crete <olivier.crete@collabora.co.uk>
+ * Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2000,2005 Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpdtmfmux
+ * @title: rtpdtmfmux
+ * @see_also: rtpdtmfsrc, dtmfsrc, rtpmux
+ *
+ * The RTP "DTMF" Muxer muxes multiple RTP streams into a valid RTP
+ * stream. It does exactly what its parent (#rtpmux) does, except
+ * that it prevent buffers coming over a regular sink_\%u pad from going through
+ * for the duration of buffers that came in a priority_sink_\%u pad.
+ *
+ * This is especially useful if a discontinuous source like dtmfsrc or
+ * rtpdtmfsrc are connected to the priority sink pads. This way, the generated
+ * DTMF signal can replace the recorded audio while the tone is being sent.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <string.h>
+
+#include "gstrtpdtmfmux.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_dtmf_mux_debug);
+#define GST_CAT_DEFAULT gst_rtp_dtmf_mux_debug
+
+static GstStaticPadTemplate priority_sink_factory =
+GST_STATIC_PAD_TEMPLATE ("priority_sink_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtp"));
+
+static GstPad *gst_rtp_dtmf_mux_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
+static GstStateChangeReturn gst_rtp_dtmf_mux_change_state (GstElement * element,
+ GstStateChange transition);
+
+static gboolean gst_rtp_dtmf_mux_accept_buffer_locked (GstRTPMux * rtp_mux,
+ GstRTPMuxPadPrivate * padpriv, GstRTPBuffer * rtpbuffer);
+static gboolean gst_rtp_dtmf_mux_src_event (GstRTPMux * rtp_mux,
+ GstEvent * event);
+
+G_DEFINE_TYPE_WITH_CODE (GstRTPDTMFMux, gst_rtp_dtmf_mux, GST_TYPE_RTP_MUX,
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_dtmf_mux_debug, "rtpdtmfmux", 0,
+ "rtp dtmf muxer"););
+GST_ELEMENT_REGISTER_DEFINE (rtpdtmfmux, "rtpdtmfmux", GST_RANK_NONE,
+ GST_TYPE_RTP_DTMF_MUX);
+
+static void
+gst_rtp_dtmf_mux_init (GstRTPDTMFMux * mux)
+{
+}
+
+
+static void
+gst_rtp_dtmf_mux_class_init (GstRTPDTMFMuxClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GstRTPMuxClass *gstrtpmux_class;
+
+ gstelement_class = (GstElementClass *) klass;
+ gstrtpmux_class = (GstRTPMuxClass *) klass;
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &priority_sink_factory);
+
+ gst_element_class_set_static_metadata (gstelement_class, "RTP muxer",
+ "Codec/Muxer",
+ "mixes RTP DTMF streams into other RTP streams",
+ "Zeeshan Ali <first.last@nokia.com>");
+
+ gstelement_class->request_new_pad =
+ GST_DEBUG_FUNCPTR (gst_rtp_dtmf_mux_request_new_pad);
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_rtp_dtmf_mux_change_state);
+ gstrtpmux_class->accept_buffer_locked = gst_rtp_dtmf_mux_accept_buffer_locked;
+ gstrtpmux_class->src_event = gst_rtp_dtmf_mux_src_event;
+}
+
+static gboolean
+gst_rtp_dtmf_mux_accept_buffer_locked (GstRTPMux * rtp_mux,
+ GstRTPMuxPadPrivate * padpriv, GstRTPBuffer * rtpbuffer)
+{
+ GstRTPDTMFMux *mux = GST_RTP_DTMF_MUX (rtp_mux);
+ GstClockTime running_ts;
+
+ running_ts = GST_BUFFER_PTS (rtpbuffer->buffer);
+
+ if (GST_CLOCK_TIME_IS_VALID (running_ts)) {
+ if (padpriv && padpriv->segment.format == GST_FORMAT_TIME)
+ running_ts = gst_segment_to_running_time (&padpriv->segment,
+ GST_FORMAT_TIME, GST_BUFFER_PTS (rtpbuffer->buffer));
+
+ if (padpriv && padpriv->priority) {
+ if (GST_BUFFER_PTS_IS_VALID (rtpbuffer->buffer)) {
+ if (GST_CLOCK_TIME_IS_VALID (mux->last_priority_end))
+ mux->last_priority_end =
+ MAX (running_ts + GST_BUFFER_DURATION (rtpbuffer->buffer),
+ mux->last_priority_end);
+ else
+ mux->last_priority_end = running_ts +
+ GST_BUFFER_DURATION (rtpbuffer->buffer);
+ GST_LOG_OBJECT (mux, "Got buffer %p on priority pad, "
+ " blocking regular pads until %" GST_TIME_FORMAT, rtpbuffer->buffer,
+ GST_TIME_ARGS (mux->last_priority_end));
+ } else {
+ GST_WARNING_OBJECT (mux, "Buffer %p has an invalid duration,"
+ " not blocking other pad", rtpbuffer->buffer);
+ }
+ } else {
+ if (GST_CLOCK_TIME_IS_VALID (mux->last_priority_end) &&
+ running_ts < mux->last_priority_end) {
+ GST_LOG_OBJECT (mux, "Dropping buffer %p because running time"
+ " %" GST_TIME_FORMAT " < %" GST_TIME_FORMAT, rtpbuffer->buffer,
+ GST_TIME_ARGS (running_ts), GST_TIME_ARGS (mux->last_priority_end));
+ return FALSE;
+ }
+ }
+ } else {
+ GST_LOG_OBJECT (mux, "Buffer %p has an invalid timestamp,"
+ " letting through", rtpbuffer->buffer);
+ }
+
+ return TRUE;
+}
+
+
+static GstPad *
+gst_rtp_dtmf_mux_request_new_pad (GstElement * element, GstPadTemplate * templ,
+ const gchar * name, const GstCaps * caps)
+{
+ GstPad *pad;
+
+ pad =
+ GST_ELEMENT_CLASS (gst_rtp_dtmf_mux_parent_class)->request_new_pad
+ (element, templ, name, caps);
+
+ if (pad) {
+ GstRTPMuxPadPrivate *padpriv;
+
+ GST_OBJECT_LOCK (element);
+ padpriv = gst_pad_get_element_private (pad);
+
+ if (gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (element),
+ "priority_sink_%u") == GST_PAD_PAD_TEMPLATE (pad))
+ padpriv->priority = TRUE;
+ GST_OBJECT_UNLOCK (element);
+ }
+
+ return pad;
+}
+
+static gboolean
+gst_rtp_dtmf_mux_src_event (GstRTPMux * rtp_mux, GstEvent * event)
+{
+ if (GST_EVENT_TYPE (event) == GST_EVENT_CUSTOM_UPSTREAM) {
+ const GstStructure *s = gst_event_get_structure (event);
+
+ if (s && gst_structure_has_name (s, "dtmf-event")) {
+ GST_OBJECT_LOCK (rtp_mux);
+ if (GST_CLOCK_TIME_IS_VALID (rtp_mux->last_stop)) {
+ event = (GstEvent *)
+ gst_mini_object_make_writable (GST_MINI_OBJECT_CAST (event));
+ s = gst_event_get_structure (event);
+ gst_structure_set ((GstStructure *) s,
+ "last-stop", G_TYPE_UINT64, rtp_mux->last_stop, NULL);
+ }
+ GST_OBJECT_UNLOCK (rtp_mux);
+ }
+ }
+
+ return GST_RTP_MUX_CLASS (gst_rtp_dtmf_mux_parent_class)->src_event (rtp_mux,
+ event);
+}
+
+
+static GstStateChangeReturn
+gst_rtp_dtmf_mux_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstRTPDTMFMux *mux = GST_RTP_DTMF_MUX (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ {
+ GST_OBJECT_LOCK (mux);
+ mux->last_priority_end = GST_CLOCK_TIME_NONE;
+ GST_OBJECT_UNLOCK (mux);
+ break;
+ }
+ default:
+ break;
+ }
+
+ ret =
+ GST_ELEMENT_CLASS (gst_rtp_dtmf_mux_parent_class)->change_state (element,
+ transition);
+
+ return ret;
+}
diff --git a/gst/rtpmanager/gstrtpdtmfmux.h b/gst/rtpmanager/gstrtpdtmfmux.h
new file mode 100644
index 0000000000..df4eebd646
--- /dev/null
+++ b/gst/rtpmanager/gstrtpdtmfmux.h
@@ -0,0 +1,68 @@
+/* RTP muxer element for GStreamer
+ *
+ * gstrtpdtmfmux.h:
+ *
+ * Copyright (C) <2007> Nokia Corporation.
+ * Contact: Zeeshan Ali <zeeshan.ali@nokia.com>
+ * Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2000,2005 Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_DTMF_MUX_H__
+#define __GST_RTP_DTMF_MUX_H__
+
+#include <gst/gst.h>
+#include "gstrtpmux.h"
+
+G_BEGIN_DECLS
+#define GST_TYPE_RTP_DTMF_MUX (gst_rtp_dtmf_mux_get_type())
+#define GST_RTP_DTMF_MUX(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_DTMF_MUX, GstRTPDTMFMux))
+#define GST_RTP_DTMF_MUX_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_DTMF_MUX, GstRTPDTMFMux))
+#define GST_IS_RTP_DTMF_MUX(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_DTMF_MUX))
+#define GST_IS_RTP_DTMF_MUX_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_DTMF_MUX))
+typedef struct _GstRTPDTMFMux GstRTPDTMFMux;
+typedef struct _GstRTPDTMFMuxClass GstRTPDTMFMuxClass;
+
+/**
+ * GstRTPDTMFMux:
+ *
+ * The opaque #GstRTPDTMFMux structure.
+ */
+struct _GstRTPDTMFMux
+{
+ GstRTPMux mux;
+
+ /* Protected by object lock */
+ GstClockTime last_priority_end;
+};
+
+struct _GstRTPDTMFMuxClass
+{
+ GstRTPMuxClass parent_class;
+
+ /* signals */
+ void (*locking) (GstElement * element, GstPad * pad);
+ void (*unlocked) (GstElement * element, GstPad * pad);
+};
+
+GType gst_rtp_dtmf_mux_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (rtpdtmfmux);
+
+G_END_DECLS
+#endif /* __GST_RTP_DTMF_MUX_H__ */
diff --git a/gst/rtpmanager/gstrtpfunnel.c b/gst/rtpmanager/gstrtpfunnel.c
new file mode 100644
index 0000000000..67a3c23c11
--- /dev/null
+++ b/gst/rtpmanager/gstrtpfunnel.c
@@ -0,0 +1,690 @@
+/* RTP funnel element for GStreamer
+ *
+ * gstrtpfunnel.c:
+ *
+ * Copyright (C) <2017> Pexip.
+ * Contact: Havard Graff <havard@pexip.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+ /**
+ * SECTION:element-rtpfunnel
+ * @title: rtpfunnel
+ * @see_also: rtpbasepaylaoder, rtpsession
+ *
+ * RTP funnel is basically like a normal funnel with a few added
+ * functionalities to support bundling.
+ *
+ * Bundle is the concept of sending multiple streams in a single RTP session.
+ * These can be both audio and video streams, and several of both.
+ * One of the advantages with bundling is that you can get away with fewer
+ * ports for sending and receiving media. Also the RTCP traffic gets more
+ * compact if you can report on multiple streams in a single sender/receiver
+ * report.
+ *
+ * One of the reasons for a specialized RTP funnel is that some messages
+ * coming upstream want to find their way back to the right stream,
+ * and a normal funnel can't know which of its sinkpads it should send
+ * these messages to. The RTP funnel achieves this by keeping track of the
+ * SSRC of each stream on its sinkpad, and then uses the fact that upstream
+ * events are tagged inside rtpbin with the appropriate SSRC, so that upon
+ * receiving such an event, the RTP funnel can do a simple lookup for the
+ * right pad to forward the event to.
+ *
+ * A good example here is the KeyUnit event. If several video encoders are
+ * being bundled together using the RTP funnel, and one of the decoders on
+ * the receiving side asks for a KeyUnit, typically a RTCP PLI message will
+ * be sent from the receiver to the sender, and this will be transformed into
+ * a GstForceKeyUnit event inside GstRTPSession, and sent upstream. The
+ * RTP funnel can than make sure that this event hits the right encoder based
+ * on the SSRC embedded in the event.
+ *
+ * Another feature of the RTP funnel is that it will mux together TWCC
+ * (Transport-Wide Congestion Control) sequence-numbers. The point being that
+ * it should increment "transport-wide", meaning potentially several
+ * bundled streams. Note that not *all* streams being bundled needs to be
+ * affected by this. As an example Google WebRTC will use bundle with audio
+ * and video, but will only use TWCC sequence-numbers for the video-stream(s).
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/rtp/gstrtphdrext.h>
+
+#include "gstrtpfunnel.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_funnel_debug);
+#define GST_CAT_DEFAULT gst_rtp_funnel_debug
+
+/**************** GstRTPFunnelPad ****************/
+
+struct _GstRtpFunnelPadClass
+{
+ GstPadClass class;
+};
+
+struct _GstRtpFunnelPad
+{
+ GstPad pad;
+ guint32 ssrc;
+ gboolean has_twcc;
+};
+
+G_DEFINE_TYPE (GstRtpFunnelPad, gst_rtp_funnel_pad, GST_TYPE_PAD);
+GST_ELEMENT_REGISTER_DEFINE (rtpfunnel, "rtpfunnel", GST_RANK_NONE,
+ GST_TYPE_RTP_FUNNEL);
+
+static void
+gst_rtp_funnel_pad_class_init (G_GNUC_UNUSED GstRtpFunnelPadClass * klass)
+{
+}
+
+static void
+gst_rtp_funnel_pad_init (G_GNUC_UNUSED GstRtpFunnelPad * pad)
+{
+}
+
+/**************** GstRTPFunnel ****************/
+
+enum
+{
+ PROP_0,
+ PROP_COMMON_TS_OFFSET,
+};
+
+#define DEFAULT_COMMON_TS_OFFSET -1
+
+struct _GstRtpFunnelClass
+{
+ GstElementClass class;
+};
+
+struct _GstRtpFunnel
+{
+ GstElement element;
+
+ GstPad *srcpad;
+ GstCaps *srccaps; /* protected by OBJECT_LOCK */
+ gboolean send_sticky_events;
+ GHashTable *ssrc_to_pad; /* protected by OBJECT_LOCK */
+ /* The last pad data was chained on */
+ GstPad *current_pad;
+
+ guint twcc_pads; /* numer of sinkpads with negotiated twcc */
+ GstRTPHeaderExtension *twcc_ext;
+
+ /* properties */
+ gint common_ts_offset;
+};
+
+#define RTP_CAPS "application/x-rtp"
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS (RTP_CAPS));
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (RTP_CAPS));
+
+#define gst_rtp_funnel_parent_class parent_class
+G_DEFINE_TYPE (GstRtpFunnel, gst_rtp_funnel, GST_TYPE_ELEMENT);
+
+
+static void
+gst_rtp_funnel_send_sticky (GstRtpFunnel * funnel, GstPad * pad)
+{
+ GstEvent *stream_start;
+ GstCaps *caps;
+ GstEvent *caps_ev;
+
+ if (!funnel->send_sticky_events)
+ goto done;
+
+ stream_start = gst_pad_get_sticky_event (pad, GST_EVENT_STREAM_START, 0);
+ if (stream_start && !gst_pad_push_event (funnel->srcpad, stream_start)) {
+ GST_ERROR_OBJECT (funnel, "Could not push stream start");
+ goto done;
+ }
+
+ /* We modify these caps in our sink pad event handlers, so make sure to
+ * send a copy downstream so that we can keep our internal caps writable */
+ GST_OBJECT_LOCK (funnel);
+ caps = gst_caps_copy (funnel->srccaps);
+ GST_OBJECT_UNLOCK (funnel);
+
+ caps_ev = gst_event_new_caps (caps);
+ gst_caps_unref (caps);
+ if (caps_ev && !gst_pad_push_event (funnel->srcpad, caps_ev)) {
+ GST_ERROR_OBJECT (funnel, "Could not push caps");
+ goto done;
+ }
+
+ funnel->send_sticky_events = FALSE;
+
+done:
+ return;
+}
+
+static void
+gst_rtp_funnel_forward_segment (GstRtpFunnel * funnel, GstPad * pad)
+{
+ GstEvent *event;
+ guint i;
+
+ if (pad == funnel->current_pad) {
+ goto done;
+ }
+
+ event = gst_pad_get_sticky_event (pad, GST_EVENT_SEGMENT, 0);
+ if (event && !gst_pad_push_event (funnel->srcpad, event)) {
+ GST_ERROR_OBJECT (funnel, "Could not push segment");
+ goto done;
+ }
+
+ for (i = 0;; i++) {
+ event = gst_pad_get_sticky_event (pad, GST_EVENT_CUSTOM_DOWNSTREAM_STICKY,
+ i);
+ if (event == NULL)
+ break;
+ if (!gst_pad_push_event (funnel->srcpad, event))
+ GST_ERROR_OBJECT (funnel, "Could not push custom event");
+ }
+
+ funnel->current_pad = pad;
+
+done:
+ return;
+}
+
+static void
+gst_rtp_funnel_set_twcc_seqnum (GstRtpFunnel * funnel,
+ GstPad * pad, GstBuffer ** buf)
+{
+ GstRtpFunnelPad *fpad = GST_RTP_FUNNEL_PAD_CAST (pad);
+ guint8 twcc_seq[2] = { 0, };
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+ guint ext_id = gst_rtp_header_extension_get_id (funnel->twcc_ext);
+ guint8 *existing;
+ guint size;
+
+ if (!funnel->twcc_ext || !fpad->has_twcc)
+ return;
+
+ *buf = gst_buffer_make_writable (*buf);
+
+ gst_rtp_header_extension_write (funnel->twcc_ext, *buf,
+ GST_RTP_HEADER_EXTENSION_ONE_BYTE, *buf, twcc_seq, sizeof (twcc_seq));
+
+ if (!gst_rtp_buffer_map (*buf, GST_MAP_READWRITE, &rtp))
+ goto map_failed;
+
+ if (gst_rtp_buffer_get_extension_onebyte_header (&rtp, ext_id,
+ 0, (gpointer) & existing, &size)) {
+ if (size >= gst_rtp_header_extension_get_max_size (funnel->twcc_ext, *buf)) {
+ existing[0] = twcc_seq[0];
+ existing[1] = twcc_seq[1];
+ }
+ }
+ /* TODO: two-byte variant */
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ return;
+
+map_failed:
+ {
+ GST_ERROR ("failed to map buffer %p", *buf);
+ }
+}
+
+static GstFlowReturn
+gst_rtp_funnel_sink_chain_object (GstPad * pad, GstRtpFunnel * funnel,
+ gboolean is_list, GstMiniObject * obj)
+{
+ GstFlowReturn res;
+
+ GST_DEBUG_OBJECT (pad, "received %" GST_PTR_FORMAT, obj);
+
+ GST_PAD_STREAM_LOCK (funnel->srcpad);
+
+ gst_rtp_funnel_send_sticky (funnel, pad);
+ gst_rtp_funnel_forward_segment (funnel, pad);
+
+ if (is_list) {
+ res = gst_pad_push_list (funnel->srcpad, GST_BUFFER_LIST_CAST (obj));
+ } else {
+ GstBuffer *buf = GST_BUFFER_CAST (obj);
+ gst_rtp_funnel_set_twcc_seqnum (funnel, pad, &buf);
+ res = gst_pad_push (funnel->srcpad, buf);
+ }
+ GST_PAD_STREAM_UNLOCK (funnel->srcpad);
+
+ return res;
+}
+
+static GstFlowReturn
+gst_rtp_funnel_sink_chain_list (GstPad * pad, GstObject * parent,
+ GstBufferList * list)
+{
+ GstRtpFunnel *funnel = GST_RTP_FUNNEL_CAST (parent);
+
+ return gst_rtp_funnel_sink_chain_object (pad, funnel, TRUE,
+ GST_MINI_OBJECT_CAST (list));
+}
+
+static GstFlowReturn
+gst_rtp_funnel_sink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+{
+ GstRtpFunnel *funnel = GST_RTP_FUNNEL_CAST (parent);
+
+ return gst_rtp_funnel_sink_chain_object (pad, funnel, FALSE,
+ GST_MINI_OBJECT_CAST (buffer));
+}
+
+static void
+gst_rtp_funnel_set_twcc_ext_id (GstRtpFunnel * funnel, guint8 twcc_ext_id)
+{
+ gchar *name;
+ guint current_ext_id;
+
+ current_ext_id = gst_rtp_header_extension_get_id (funnel->twcc_ext);
+ g_object_set (funnel->twcc_ext, "n-streams", funnel->twcc_pads, NULL);
+
+ if (current_ext_id == twcc_ext_id)
+ return;
+
+ name = g_strdup_printf ("extmap-%u", twcc_ext_id);
+
+ gst_caps_set_simple (funnel->srccaps, name, G_TYPE_STRING,
+ gst_rtp_header_extension_get_uri (funnel->twcc_ext), NULL);
+
+ g_free (name);
+
+ /* make sure we update the sticky with the new caps */
+ funnel->send_sticky_events = TRUE;
+
+ gst_rtp_header_extension_set_id (funnel->twcc_ext, twcc_ext_id);
+}
+
+static guint8
+_get_extmap_id_for_attribute (const GstStructure * s, const gchar * ext_name)
+{
+ guint i;
+ guint8 extmap_id = 0;
+ guint n_fields = gst_structure_n_fields (s);
+
+ for (i = 0; i < n_fields; i++) {
+ const gchar *field_name = gst_structure_nth_field_name (s, i);
+ if (g_str_has_prefix (field_name, "extmap-")) {
+ const gchar *str = gst_structure_get_string (s, field_name);
+ if (str && g_strcmp0 (str, ext_name) == 0) {
+ gint64 id = g_ascii_strtoll (field_name + 7, NULL, 10);
+ if (id > 0 && id < 15) {
+ extmap_id = id;
+ break;
+ }
+ }
+ }
+ }
+ return extmap_id;
+}
+
+#define TWCC_EXTMAP_STR "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01"
+
+static gboolean
+gst_rtp_funnel_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstRtpFunnel *funnel = GST_RTP_FUNNEL_CAST (parent);
+ GstRtpFunnelPad *fpad = GST_RTP_FUNNEL_PAD_CAST (pad);
+
+ gboolean forward = TRUE;
+ gboolean ret = TRUE;
+
+ GST_DEBUG_OBJECT (pad, "received event %" GST_PTR_FORMAT, event);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_STREAM_START:
+ case GST_EVENT_SEGMENT:
+ forward = FALSE;
+ break;
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+ GstStructure *s;
+ guint ssrc;
+ guint8 ext_id;
+
+ gst_event_parse_caps (event, &caps);
+
+ GST_OBJECT_LOCK (funnel);
+ if (!gst_caps_can_intersect (funnel->srccaps, caps)) {
+ GST_ERROR_OBJECT (funnel, "Can't intersect with caps %" GST_PTR_FORMAT,
+ caps);
+ g_assert_not_reached ();
+ }
+
+ s = gst_caps_get_structure (caps, 0);
+ if (gst_structure_get_uint (s, "ssrc", &ssrc)) {
+ fpad->ssrc = ssrc;
+ GST_DEBUG_OBJECT (pad, "Got ssrc: %u", ssrc);
+ g_hash_table_insert (funnel->ssrc_to_pad, GUINT_TO_POINTER (ssrc), pad);
+ }
+
+ if (!funnel->twcc_ext)
+ funnel->twcc_ext =
+ gst_rtp_header_extension_create_from_uri (TWCC_EXTMAP_STR);
+
+ ext_id = _get_extmap_id_for_attribute (s, TWCC_EXTMAP_STR);
+ if (ext_id > 0) {
+ fpad->has_twcc = TRUE;
+ funnel->twcc_pads++;
+ gst_rtp_funnel_set_twcc_ext_id (funnel, ext_id);
+ }
+ GST_OBJECT_UNLOCK (funnel);
+
+ forward = FALSE;
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (forward) {
+ ret = gst_pad_event_default (pad, parent, event);
+ } else {
+ gst_event_unref (event);
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_rtp_funnel_sink_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ GstRtpFunnel *funnel = GST_RTP_FUNNEL_CAST (parent);
+ gboolean res = TRUE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter_caps;
+ GstCaps *new_caps;
+
+ gst_query_parse_caps (query, &filter_caps);
+
+ GST_OBJECT_LOCK (funnel);
+ if (filter_caps) {
+ new_caps = gst_caps_intersect_full (funnel->srccaps, filter_caps,
+ GST_CAPS_INTERSECT_FIRST);
+ } else {
+ new_caps = gst_caps_copy (funnel->srccaps);
+ }
+ GST_OBJECT_UNLOCK (funnel);
+
+ if (funnel->common_ts_offset >= 0)
+ gst_caps_set_simple (new_caps, "timestamp-offset", G_TYPE_UINT,
+ (guint) funnel->common_ts_offset, NULL);
+
+ gst_query_set_caps_result (query, new_caps);
+ GST_DEBUG_OBJECT (pad, "Answering caps-query with caps: %"
+ GST_PTR_FORMAT, new_caps);
+ gst_caps_unref (new_caps);
+ break;
+ }
+ case GST_QUERY_ACCEPT_CAPS:
+ {
+ GstCaps *caps;
+ gboolean result;
+
+ gst_query_parse_accept_caps (query, &caps);
+
+ GST_OBJECT_LOCK (funnel);
+ result = gst_caps_is_subset (caps, funnel->srccaps);
+ if (!result) {
+ GST_ERROR_OBJECT (pad,
+ "caps: %" GST_PTR_FORMAT " were not compatible with: %"
+ GST_PTR_FORMAT, caps, funnel->srccaps);
+ }
+ GST_OBJECT_UNLOCK (funnel);
+
+ gst_query_set_accept_caps_result (query, result);
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return res;
+}
+
+static gboolean
+gst_rtp_funnel_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstRtpFunnel *funnel = GST_RTP_FUNNEL_CAST (parent);
+ gboolean handled = FALSE;
+ gboolean ret = TRUE;
+
+ GST_DEBUG_OBJECT (pad, "received event %" GST_PTR_FORMAT, event);
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_CUSTOM_UPSTREAM) {
+ const GstStructure *s = gst_event_get_structure (event);
+ GstPad *fpad;
+ guint ssrc;
+ if (s && gst_structure_get_uint (s, "ssrc", &ssrc)) {
+ handled = TRUE;
+
+ GST_OBJECT_LOCK (funnel);
+ fpad = g_hash_table_lookup (funnel->ssrc_to_pad, GUINT_TO_POINTER (ssrc));
+ if (fpad)
+ gst_object_ref (fpad);
+ GST_OBJECT_UNLOCK (funnel);
+
+ if (fpad) {
+ GST_INFO_OBJECT (pad, "Sending %" GST_PTR_FORMAT " to %" GST_PTR_FORMAT,
+ event, fpad);
+ ret = gst_pad_push_event (fpad, event);
+ gst_object_unref (fpad);
+ } else {
+ gst_event_unref (event);
+ }
+ }
+ }
+
+ if (!handled) {
+ gst_pad_event_default (pad, parent, event);
+ }
+
+ return ret;
+}
+
+static GstPad *
+gst_rtp_funnel_request_new_pad (GstElement * element, GstPadTemplate * templ,
+ const gchar * name, const GstCaps * caps)
+{
+ GstPad *sinkpad;
+ (void) caps;
+
+ GST_DEBUG_OBJECT (element, "requesting pad");
+
+ sinkpad = GST_PAD_CAST (g_object_new (GST_TYPE_RTP_FUNNEL_PAD,
+ "name", name, "direction", templ->direction, "template", templ,
+ NULL));
+
+ gst_pad_set_chain_function (sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_funnel_sink_chain));
+ gst_pad_set_chain_list_function (sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_funnel_sink_chain_list));
+ gst_pad_set_event_function (sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_funnel_sink_event));
+ gst_pad_set_query_function (sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_funnel_sink_query));
+
+ GST_OBJECT_FLAG_SET (sinkpad, GST_PAD_FLAG_PROXY_CAPS);
+ GST_OBJECT_FLAG_SET (sinkpad, GST_PAD_FLAG_PROXY_ALLOCATION);
+
+ gst_pad_set_active (sinkpad, TRUE);
+
+ gst_element_add_pad (element, sinkpad);
+
+ GST_DEBUG_OBJECT (element, "requested pad %s:%s",
+ GST_DEBUG_PAD_NAME (sinkpad));
+
+ return sinkpad;
+}
+
+static void
+gst_rtp_funnel_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpFunnel *funnel = GST_RTP_FUNNEL_CAST (object);
+
+ switch (prop_id) {
+ case PROP_COMMON_TS_OFFSET:
+ funnel->common_ts_offset = g_value_get_int (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_funnel_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstRtpFunnel *funnel = GST_RTP_FUNNEL_CAST (object);
+
+ switch (prop_id) {
+ case PROP_COMMON_TS_OFFSET:
+ g_value_set_int (value, funnel->common_ts_offset);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_funnel_change_state (GstElement * element, GstStateChange transition)
+{
+ GstRtpFunnel *funnel = GST_RTP_FUNNEL_CAST (element);
+ GstStateChangeReturn ret;
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ funnel->send_sticky_events = TRUE;
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+_remove_pad_func (gpointer key, gpointer value, gpointer user_data)
+{
+ (void) key;
+ if (GST_PAD_CAST (value) == GST_PAD_CAST (user_data))
+ return TRUE;
+ return FALSE;
+}
+
+static void
+gst_rtp_funnel_release_pad (GstElement * element, GstPad * pad)
+{
+ GstRtpFunnel *funnel = GST_RTP_FUNNEL_CAST (element);
+
+ GST_DEBUG_OBJECT (funnel, "releasing pad %s:%s", GST_DEBUG_PAD_NAME (pad));
+
+ g_hash_table_foreach_remove (funnel->ssrc_to_pad, _remove_pad_func, pad);
+
+ gst_pad_set_active (pad, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (funnel), pad);
+}
+
+static void
+gst_rtp_funnel_finalize (GObject * object)
+{
+ GstRtpFunnel *funnel = GST_RTP_FUNNEL_CAST (object);
+
+ gst_caps_unref (funnel->srccaps);
+ g_hash_table_destroy (funnel->ssrc_to_pad);
+
+ gst_clear_object (&funnel->twcc_ext);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_rtp_funnel_class_init (GstRtpFunnelClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_rtp_funnel_finalize);
+ gobject_class->get_property = GST_DEBUG_FUNCPTR (gst_rtp_funnel_get_property);
+ gobject_class->set_property = GST_DEBUG_FUNCPTR (gst_rtp_funnel_set_property);
+ gstelement_class->request_new_pad =
+ GST_DEBUG_FUNCPTR (gst_rtp_funnel_request_new_pad);
+ gstelement_class->release_pad =
+ GST_DEBUG_FUNCPTR (gst_rtp_funnel_release_pad);
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_rtp_funnel_change_state);
+
+ gst_element_class_set_static_metadata (gstelement_class, "RTP funnel",
+ "RTP Funneling",
+ "Funnel RTP buffers together for multiplexing",
+ "Havard Graff <havard@gstip.com>");
+
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class, &src_template);
+
+ g_object_class_install_property (gobject_class, PROP_COMMON_TS_OFFSET,
+ g_param_spec_int ("common-ts-offset", "Common Timestamp Offset",
+ "Use the same RTP timestamp offset for all sinkpads (-1 = disable)",
+ -1, G_MAXINT32, DEFAULT_COMMON_TS_OFFSET,
+ G_PARAM_CONSTRUCT | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_funnel_debug,
+ "gstrtpfunnel", 0, "funnel element");
+}
+
+static void
+gst_rtp_funnel_init (GstRtpFunnel * funnel)
+{
+ funnel->srcpad = gst_pad_new_from_static_template (&src_template, "src");
+ gst_pad_use_fixed_caps (funnel->srcpad);
+ gst_pad_set_event_function (funnel->srcpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_funnel_src_event));
+ gst_element_add_pad (GST_ELEMENT (funnel), funnel->srcpad);
+
+ funnel->send_sticky_events = TRUE;
+ funnel->srccaps = gst_caps_new_empty_simple (RTP_CAPS);
+ funnel->ssrc_to_pad = g_hash_table_new (NULL, NULL);
+ funnel->current_pad = NULL;
+}
diff --git a/gst/rtpmanager/gstrtpfunnel.h b/gst/rtpmanager/gstrtpfunnel.h
new file mode 100644
index 0000000000..fb436734d9
--- /dev/null
+++ b/gst/rtpmanager/gstrtpfunnel.h
@@ -0,0 +1,50 @@
+/* RTP funnel element for GStreamer
+ *
+ * gstrtpfunnel.h:
+ *
+ * Copyright (C) <2017> Pexip.
+ * Contact: Havard Graff <havard@pexip.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#ifndef __GST_RTP_FUNNEL_H__
+#define __GST_RTP_FUNNEL_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstRtpFunnelClass GstRtpFunnelClass;
+typedef struct _GstRtpFunnel GstRtpFunnel;
+
+#define GST_TYPE_RTP_FUNNEL (gst_rtp_funnel_get_type())
+#define GST_RTP_FUNNEL_CAST(obj) ((GstRtpFunnel *)(obj))
+
+GType gst_rtp_funnel_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (rtpfunnel);
+
+typedef struct _GstRtpFunnelPadClass GstRtpFunnelPadClass;
+typedef struct _GstRtpFunnelPad GstRtpFunnelPad;
+
+#define GST_TYPE_RTP_FUNNEL_PAD (gst_rtp_funnel_pad_get_type())
+#define GST_RTP_FUNNEL_PAD_CAST(obj) ((GstRtpFunnelPad *)(obj))
+
+GType gst_rtp_funnel_pad_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_FUNNEL_H__ */
diff --git a/gst/rtpmanager/gstrtphdrext-rfc6464.c b/gst/rtpmanager/gstrtphdrext-rfc6464.c
new file mode 100644
index 0000000000..03f9e54360
--- /dev/null
+++ b/gst/rtpmanager/gstrtphdrext-rfc6464.c
@@ -0,0 +1,326 @@
+/* GStreamer
+ * Copyright (C) <2018> Havard Graff <havard.graff@gmail.com>
+ * Copyright (C) <2020-2021> Guillaume Desmottes <guillaume.desmottes@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more
+ */
+
+/**
+ * SECTION:element-rtphdrextrfc6464
+ * @title: rtphdrextrfc6464
+ * @short_description: Client-to-Mixer Audio Level Indication (RFC6464) RTP Header Extension
+ *
+ * Client-to-Mixer Audio Level Indication (RFC6464) RTP Header Extension.
+ * The extension should be automatically created by payloader and depayloaders,
+ * if their `auto-header-extension` property is enabled, if the extension
+ * is part of the RTP caps.
+ *
+ * ## Example pipeline
+ * |[
+ * gst-launch-1.0 pulsesrc ! level audio-level-meta=true ! audiconvert !
+ * rtpL16pay ! application/x-rtp,
+ * extmap-1=(string)\< \"\", urn:ietf:params:rtp-hdrext:ssrc-audio-level,
+ * \"vad=on\" \> ! udpsink
+ * ]|
+ *
+ * Since: 1.20
+ *
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstrtphdrext-rfc6464.h"
+
+#include <gst/audio/audio.h>
+
+#define RFC6464_HDR_EXT_URI GST_RTP_HDREXT_BASE"ssrc-audio-level"
+
+GST_DEBUG_CATEGORY_STATIC (rtphdrrfc6464_twcc_debug);
+#define GST_CAT_DEFAULT (rtphdrrfc6464_twcc_debug)
+
+#define DEFAULT_VAD TRUE
+
+enum
+{
+ PROP_0,
+ PROP_VAD,
+};
+
+struct _GstRTPHeaderExtensionRfc6464
+{
+ GstRTPHeaderExtension parent;
+
+ gboolean vad;
+};
+
+G_DEFINE_TYPE_WITH_CODE (GstRTPHeaderExtensionRfc6464,
+ gst_rtp_header_extension_rfc6464, GST_TYPE_RTP_HEADER_EXTENSION,
+ GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, "rtphdrextrfc6464", 0,
+ "RTP RFC 6464 Header Extensions"););
+GST_ELEMENT_REGISTER_DEFINE (rtphdrextrfc6464, "rtphdrextrfc6464",
+ GST_RANK_MARGINAL, GST_TYPE_RTP_HEADER_EXTENSION_RFC6464);
+
+static void
+gst_rtp_header_extension_rfc6464_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstRTPHeaderExtensionRfc6464 *self =
+ GST_RTP_HEADER_EXTENSION_RFC6464 (object);
+
+ switch (prop_id) {
+ case PROP_VAD:
+ g_value_set_boolean (value, self->vad);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstRTPHeaderExtensionFlags
+gst_rtp_header_extension_rfc6464_get_supported_flags (GstRTPHeaderExtension *
+ ext)
+{
+ return GST_RTP_HEADER_EXTENSION_ONE_BYTE | GST_RTP_HEADER_EXTENSION_TWO_BYTE;
+}
+
+static gsize
+gst_rtp_header_extension_rfc6464_get_max_size (GstRTPHeaderExtension * ext,
+ const GstBuffer * input_meta)
+{
+ return 2;
+}
+
+static void
+set_vad (GstRTPHeaderExtension * ext, gboolean vad)
+{
+ GstRTPHeaderExtensionRfc6464 *self = GST_RTP_HEADER_EXTENSION_RFC6464 (ext);
+
+ if (self->vad == vad)
+ return;
+
+ GST_DEBUG_OBJECT (ext, "vad: %d", vad);
+ self->vad = vad;
+ g_object_notify (G_OBJECT (self), "vad");
+}
+
+static gboolean
+gst_rtp_header_extension_rfc6464_set_attributes_from_caps (GstRTPHeaderExtension
+ * ext, const GstCaps * caps)
+{
+ gchar *field_name = gst_rtp_header_extension_get_sdp_caps_field_name (ext);
+ GstStructure *s = gst_caps_get_structure (caps, 0);
+ const gchar *ext_uri;
+ const GValue *arr;
+
+ if (!field_name)
+ return FALSE;
+
+ if ((ext_uri = gst_structure_get_string (s, field_name))) {
+ if (g_strcmp0 (ext_uri, gst_rtp_header_extension_get_uri (ext)) != 0) {
+ /* incompatible extension uri for this instance */
+ goto error;
+ }
+ set_vad (ext, DEFAULT_VAD);
+ } else if ((arr = gst_structure_get_value (s, field_name))
+ && GST_VALUE_HOLDS_ARRAY (arr)
+ && gst_value_array_get_size (arr) == 3) {
+ const GValue *val;
+ const gchar *vad_attr;
+
+ val = gst_value_array_get_value (arr, 1);
+ if (!G_VALUE_HOLDS_STRING (val))
+ goto error;
+ if (g_strcmp0 (g_value_get_string (val),
+ gst_rtp_header_extension_get_uri (ext)) != 0)
+ goto error;
+
+ val = gst_value_array_get_value (arr, 2);
+ if (!G_VALUE_HOLDS_STRING (val))
+ goto error;
+
+ vad_attr = g_value_get_string (val);
+
+ if (g_str_equal (vad_attr, "vad=on"))
+ set_vad (ext, TRUE);
+ else if (g_str_equal (vad_attr, "vad=off"))
+ set_vad (ext, FALSE);
+ else {
+ GST_WARNING_OBJECT (ext, "Invalid attribute: %s", vad_attr);
+ goto error;
+ }
+ } else {
+ /* unknown caps format */
+ goto error;
+ }
+
+ g_free (field_name);
+ return TRUE;
+
+error:
+ g_free (field_name);
+ return FALSE;
+}
+
+static gboolean
+gst_rtp_header_extension_rfc6464_set_caps_from_attributes (GstRTPHeaderExtension
+ * ext, GstCaps * caps)
+{
+ GstRTPHeaderExtensionRfc6464 *self = GST_RTP_HEADER_EXTENSION_RFC6464 (ext);
+ gchar *field_name = gst_rtp_header_extension_get_sdp_caps_field_name (ext);
+ GstStructure *s = gst_caps_get_structure (caps, 0);
+ GValue arr = G_VALUE_INIT;
+ GValue val = G_VALUE_INIT;
+
+ if (!field_name)
+ return FALSE;
+
+ g_value_init (&arr, GST_TYPE_ARRAY);
+ g_value_init (&val, G_TYPE_STRING);
+
+ /* direction */
+ g_value_set_string (&val, "");
+ gst_value_array_append_value (&arr, &val);
+
+ /* uri */
+ g_value_set_string (&val, gst_rtp_header_extension_get_uri (ext));
+ gst_value_array_append_value (&arr, &val);
+
+ /* attributes */
+ if (self->vad)
+ g_value_set_string (&val, "vad=on");
+ else
+ g_value_set_string (&val, "vad=off");
+ gst_value_array_append_value (&arr, &val);
+
+ gst_structure_set_value (s, field_name, &arr);
+
+ GST_DEBUG_OBJECT (self, "%" GST_PTR_FORMAT, caps);
+
+ g_value_unset (&val);
+ g_value_unset (&arr);
+
+ g_free (field_name);
+ return TRUE;
+}
+
+static gssize
+gst_rtp_header_extension_rfc6464_write (GstRTPHeaderExtension * ext,
+ const GstBuffer * input_meta, GstRTPHeaderExtensionFlags write_flags,
+ GstBuffer * output, guint8 * data, gsize size)
+{
+ GstAudioLevelMeta *meta;
+ guint level;
+
+ g_return_val_if_fail (size >=
+ gst_rtp_header_extension_rfc6464_get_max_size (ext, NULL), -1);
+ g_return_val_if_fail (write_flags &
+ gst_rtp_header_extension_rfc6464_get_supported_flags (ext), -1);
+
+ meta = gst_buffer_get_audio_level_meta ((GstBuffer *) input_meta);
+ if (!meta) {
+ GST_LOG_OBJECT (ext, "no meta");
+ return 0;
+ }
+
+ level = meta->level;
+ if (level > 127) {
+ GST_LOG_OBJECT (ext, "level from meta is higher than 127: %d, cropping",
+ meta->level);
+ level = 127;
+ }
+
+ GST_LOG_OBJECT (ext, "writing ext (level: %d voice: %d)", meta->level,
+ meta->voice_activity);
+
+ /* Both one & two byte use the same format, the second byte being padding */
+ data[0] = (meta->level & 0x7F) | (meta->voice_activity << 7);
+ if (write_flags & GST_RTP_HEADER_EXTENSION_ONE_BYTE) {
+ return 1;
+ }
+ data[1] = 0;
+ return 2;
+}
+
+static gboolean
+gst_rtp_header_extension_rfc6464_read (GstRTPHeaderExtension * ext,
+ GstRTPHeaderExtensionFlags read_flags, const guint8 * data, gsize size,
+ GstBuffer * buffer)
+{
+ guint8 level;
+ gboolean voice_activity;
+
+ g_return_val_if_fail (read_flags &
+ gst_rtp_header_extension_rfc6464_get_supported_flags (ext), -1);
+
+ /* Both one & two byte use the same format, the second byte being padding */
+ level = data[0] & 0x7F;
+ voice_activity = (data[0] & 0x80) >> 7;
+
+ GST_LOG_OBJECT (ext, "reading ext (level: %d voice: %d)", level,
+ voice_activity);
+
+ gst_buffer_add_audio_level_meta (buffer, level, voice_activity);
+
+ return TRUE;
+}
+
+static void
+gst_rtp_header_extension_rfc6464_class_init (GstRTPHeaderExtensionRfc6464Class *
+ klass)
+{
+ GstRTPHeaderExtensionClass *rtp_hdr_class;
+ GstElementClass *gstelement_class;
+ GObjectClass *gobject_class;
+
+ rtp_hdr_class = GST_RTP_HEADER_EXTENSION_CLASS (klass);
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ gobject_class->get_property = gst_rtp_header_extension_rfc6464_get_property;
+
+ /**
+ * rtphdrextrfc6464:vad:
+ *
+ * If the vad extension attribute is enabled or not, default to %FALSE.
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class, PROP_VAD,
+ g_param_spec_boolean ("vad", "vad",
+ "If the vad extension attribute is enabled or not",
+ DEFAULT_VAD, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ rtp_hdr_class->get_supported_flags =
+ gst_rtp_header_extension_rfc6464_get_supported_flags;
+ rtp_hdr_class->get_max_size = gst_rtp_header_extension_rfc6464_get_max_size;
+ rtp_hdr_class->set_attributes_from_caps =
+ gst_rtp_header_extension_rfc6464_set_attributes_from_caps;
+ rtp_hdr_class->set_caps_from_attributes =
+ gst_rtp_header_extension_rfc6464_set_caps_from_attributes;
+ rtp_hdr_class->write = gst_rtp_header_extension_rfc6464_write;
+ rtp_hdr_class->read = gst_rtp_header_extension_rfc6464_read;
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Client-to-Mixer Audio Level Indication (RFC6464) RTP Header Extension",
+ GST_RTP_HDREXT_ELEMENT_CLASS,
+ "Client-to-Mixer Audio Level Indication (RFC6464) RTP Header Extension",
+ "Guillaume Desmottes <guillaume.desmottes@collabora.com>");
+ gst_rtp_header_extension_class_set_uri (rtp_hdr_class, RFC6464_HDR_EXT_URI);
+}
+
+static void
+gst_rtp_header_extension_rfc6464_init (GstRTPHeaderExtensionRfc6464 * self)
+{
+ GST_DEBUG_OBJECT (self, "creating element");
+ self->vad = DEFAULT_VAD;
+}
diff --git a/gst/rtpmanager/gstrtphdrext-rfc6464.h b/gst/rtpmanager/gstrtphdrext-rfc6464.h
new file mode 100644
index 0000000000..f2439a952d
--- /dev/null
+++ b/gst/rtpmanager/gstrtphdrext-rfc6464.h
@@ -0,0 +1,32 @@
+/* GStreamer
+ * Copyright (C) <2018> Havard Graff <havard.graff@gmail.com>
+ * Copyright (C) <2020-2021> Guillaume Desmottes <guillaume.desmottes@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more
+ */
+
+#ifndef __GST_RTPHDREXT_RFC6464_H__
+#define __GST_RTPHDREXT_RFC6464_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtphdrext.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_HEADER_EXTENSION_RFC6464 (gst_rtp_header_extension_rfc6464_get_type())
+
+G_DECLARE_FINAL_TYPE (GstRTPHeaderExtensionRfc6464, gst_rtp_header_extension_rfc6464, GST, RTP_HEADER_EXTENSION_RFC6464, GstRTPHeaderExtension)
+
+GST_ELEMENT_REGISTER_DECLARE (rtphdrextrfc6464);
+
+G_END_DECLS
+
+#endif /* __GST_RTPHDREXT_RFC6464_H__ */
diff --git a/gst/rtpmanager/gstrtphdrext-twcc.c b/gst/rtpmanager/gstrtphdrext-twcc.c
new file mode 100644
index 0000000000..1382e7ca21
--- /dev/null
+++ b/gst/rtpmanager/gstrtphdrext-twcc.c
@@ -0,0 +1,234 @@
+/* GStreamer
+ * Copyright (C) <2020> Matthew Waters <matthew@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/**
+ * SECTION:rtphdrexttwcc
+ * @title: GstRtphdrext-TWCC
+ * @short_description: Helper methods for dealing with RTP header extensions
+ * in the Audio/Video RTP Profile for transport-wide-cc
+ * @see_also: #GstRTPHeaderExtension, #GstRTPBasePayload, #GstRTPBaseDepayload, gstrtpbuffer
+ *
+ * Since: 1.20
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "gstrtphdrext-twcc.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtphdrext_twcc_debug);
+#define GST_CAT_DEFAULT (rtphdrext_twcc_debug)
+
+#define gst_gl_base_filter_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstRTPHeaderExtensionTWCC,
+ gst_rtp_header_extension_twcc, GST_TYPE_RTP_HEADER_EXTENSION,
+ GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, "rtphdrexttwcc", 0,
+ "RTP TWCC Header Extensions");
+ );
+GST_ELEMENT_REGISTER_DEFINE (rtphdrexttwcc, "rtphdrexttwcc", GST_RANK_MARGINAL,
+ GST_TYPE_RTP_HEADER_EXTENSION_TWCC);
+
+#define TWCC_EXTMAP_STR "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01"
+
+static void gst_rtp_header_extension_twcc_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_rtp_header_extension_twcc_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+static GstRTPHeaderExtensionFlags
+gst_rtp_header_extension_twcc_get_supported_flags (GstRTPHeaderExtension * ext);
+static gsize gst_rtp_header_extension_twcc_get_max_size (GstRTPHeaderExtension *
+ ext, const GstBuffer * buffer);
+static gssize gst_rtp_header_extension_twcc_write (GstRTPHeaderExtension * ext,
+ const GstBuffer * input_meta, GstRTPHeaderExtensionFlags write_flags,
+ GstBuffer * output, guint8 * data, gsize size);
+static gboolean gst_rtp_header_extension_twcc_read (GstRTPHeaderExtension * ext,
+ GstRTPHeaderExtensionFlags read_flags, const guint8 * data, gsize size,
+ GstBuffer * buffer);
+
+enum
+{
+ PROP_0,
+ PROP_N_STREAMS,
+};
+
+static void
+gst_rtp_header_extension_twcc_class_init (GstRTPHeaderExtensionTWCCClass *
+ klass)
+{
+ GstRTPHeaderExtensionClass *rtp_hdr_class;
+ GstElementClass *gstelement_class;
+ GObjectClass *gobject_class;
+
+ rtp_hdr_class = (GstRTPHeaderExtensionClass *) klass;
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->set_property = gst_rtp_header_extension_twcc_set_property;
+ gobject_class->get_property = gst_rtp_header_extension_twcc_get_property;
+
+ /**
+ * rtphdrexttwcc:n-streams:
+ *
+ * The number of independant RTP streams that are being used for the transport
+ * wide counter for TWCC. If set to 1 (the default), then any existing
+ * transport wide counter is kept.
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class, PROP_N_STREAMS,
+ g_param_spec_uint ("n-streams", "N Streams",
+ "The number of separate RTP streams this header applies to",
+ 1, G_MAXUINT32, 1, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ rtp_hdr_class->get_supported_flags =
+ gst_rtp_header_extension_twcc_get_supported_flags;
+ rtp_hdr_class->get_max_size = gst_rtp_header_extension_twcc_get_max_size;
+ rtp_hdr_class->write = gst_rtp_header_extension_twcc_write;
+ rtp_hdr_class->read = gst_rtp_header_extension_twcc_read;
+ rtp_hdr_class->set_attributes_from_caps =
+ gst_rtp_header_extension_set_attributes_from_caps_simple_sdp;
+ rtp_hdr_class->set_caps_from_attributes =
+ gst_rtp_header_extension_set_caps_from_attributes_simple_sdp;
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Transport Wide Congestion Control", GST_RTP_HDREXT_ELEMENT_CLASS,
+ "Extends RTP packets to add sequence number transport wide.",
+ "Matthew Waters <matthew@centricular.com>");
+ gst_rtp_header_extension_class_set_uri (rtp_hdr_class, TWCC_EXTMAP_STR);
+}
+
+static void
+gst_rtp_header_extension_twcc_init (GstRTPHeaderExtensionTWCC * twcc)
+{
+ twcc->n_streams = 1;
+}
+
+static void
+gst_rtp_header_extension_twcc_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRTPHeaderExtensionTWCC *twcc = GST_RTP_HEADER_EXTENSION_TWCC (object);
+
+ switch (prop_id) {
+ case PROP_N_STREAMS:
+ twcc->n_streams = g_value_get_uint (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_header_extension_twcc_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRTPHeaderExtensionTWCC *twcc = GST_RTP_HEADER_EXTENSION_TWCC (object);
+
+ switch (prop_id) {
+ case PROP_N_STREAMS:
+ g_value_set_uint (value, twcc->n_streams);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstRTPHeaderExtensionFlags
+gst_rtp_header_extension_twcc_get_supported_flags (GstRTPHeaderExtension * ext)
+{
+ return GST_RTP_HEADER_EXTENSION_ONE_BYTE;
+}
+
+static gsize
+gst_rtp_header_extension_twcc_get_max_size (GstRTPHeaderExtension * ext,
+ const GstBuffer * buffer)
+{
+ return 2;
+}
+
+static gssize
+gst_rtp_header_extension_twcc_write (GstRTPHeaderExtension * ext,
+ const GstBuffer * input_meta, GstRTPHeaderExtensionFlags write_flags,
+ GstBuffer * output, guint8 * data, gsize size)
+{
+ GstRTPHeaderExtensionTWCC *twcc = GST_RTP_HEADER_EXTENSION_TWCC (ext);
+ GstRTPBuffer rtp = { NULL, };
+ gpointer ext_data;
+ guint ext_size;
+ gsize written = 0;
+
+ g_return_val_if_fail (size >= gst_rtp_header_extension_twcc_get_max_size (ext,
+ NULL), -1);
+ g_return_val_if_fail (write_flags &
+ gst_rtp_header_extension_twcc_get_supported_flags (ext), -1);
+
+ if (!gst_rtp_buffer_map (output, GST_MAP_READWRITE, &rtp))
+ goto map_failed;
+
+ /* if there already is a twcc-seqnum inside the packet */
+ if (gst_rtp_buffer_get_extension_onebyte_header (&rtp,
+ gst_rtp_header_extension_get_id (ext), 0, &ext_data, &ext_size)) {
+ if (ext_size < gst_rtp_header_extension_twcc_get_max_size (ext, NULL))
+ goto existing_too_small;
+
+ /* with only one stream, we read the twcc-seqnum */
+ if (twcc->n_streams == 1)
+ twcc->seqnum = GST_READ_UINT16_BE (ext_data);
+ } else {
+ /* with only one stream, we read the existing seqnum */
+ if (twcc->n_streams == 1)
+ twcc->seqnum = gst_rtp_buffer_get_seq (&rtp);
+
+ written = 2;
+ }
+ GST_WRITE_UINT16_BE (data, twcc->seqnum);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ twcc->seqnum++;
+
+ return written;
+
+ /* ERRORS */
+map_failed:
+ {
+ GST_ERROR ("failed to map buffer %p", output);
+ return -1;
+ }
+
+existing_too_small:
+ {
+ GST_ERROR ("Cannot rewrite twcc data of smaller size (%u)", ext_size);
+ return 0;
+ }
+}
+
+static gboolean
+gst_rtp_header_extension_twcc_read (GstRTPHeaderExtension * ext,
+ GstRTPHeaderExtensionFlags read_flags, const guint8 * data, gsize size,
+ GstBuffer * buffer)
+{
+ /* TODO: does this need an extra GstMeta? */
+ return TRUE;
+}
diff --git a/gst/rtpmanager/gstrtphdrext-twcc.h b/gst/rtpmanager/gstrtphdrext-twcc.h
new file mode 100644
index 0000000000..3df0249a68
--- /dev/null
+++ b/gst/rtpmanager/gstrtphdrext-twcc.h
@@ -0,0 +1,83 @@
+/* GStreamer
+ * Copyright (C) 2020 Matthew Waters <matthew@centricular.com>
+ *
+ * gstrtphdrexttwcc.h: transport-wide-cc RTP header extensions for the
+ * Audio/Video RTP Profile
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_RTPHDREXT_TWCC_H__
+#define __GST_RTPHDREXT_TWCC_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtphdrext.h>
+
+G_BEGIN_DECLS
+
+GType gst_rtp_header_extension_twcc_get_type (void);
+#define GST_TYPE_RTP_HEADER_EXTENSION_TWCC (gst_rtp_header_extension_twcc_get_type())
+#define GST_RTP_HEADER_EXTENSION_TWCC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_HEADER_EXTENSION_TWCC,GstRTPHeaderExtensionTWCC))
+#define GST_RTP_HEADER_EXTENSION_TWCC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_HEADER_EXTENSION_TWCC,GstRTPHeaderExtensionTWCCClass))
+#define GST_RTP_HEADER_EXTENSION_TWCC_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS ((obj),GST_TYPE_RTP_HEADER_EXTENSION_TWCC,GstRTPHeaderExtensionTWCCClass))
+#define GST_IS_RTP_HEADER_EXTENSION_TWCC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_HEADER_EXTENSION_TWCC))
+#define GST_IS_RTP_HEADER_EXTENSION_TWCC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_HEADER_EXTENSION_TWCC))
+#define GST_RTP_HEADER_EXTENSION_TWCC_CAST(obj) ((GstRTPHeaderExtensionTWCC *)(obj))
+
+typedef struct _GstRTPHeaderExtensionTWCC GstRTPHeaderExtensionTWCC;
+typedef struct _GstRTPHeaderExtensionTWCCClass GstRTPHeaderExtensionTWCCClass;
+
+/**
+ * GstRTPHeaderExtensionTWCC:
+ * @parent: the parent #GstRTPHeaderExtension
+ *
+ * Instance struct for a transport-wide-cc RTP Audio/Video header extension.
+ *
+ * http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01
+ */
+struct _GstRTPHeaderExtensionTWCC
+{
+ GstRTPHeaderExtension parent;
+
+ guint16 seqnum;
+ guint n_streams;
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+/**
+ * GstRTPHeaderExtensionTWCCClass:
+ * @parent_class: the parent class
+ */
+struct _GstRTPHeaderExtensionTWCCClass
+{
+ GstRTPHeaderExtensionClass parent_class;
+
+ /*< private >*/
+ gpointer _gst_reserved[GST_PADDING];
+};
+
+GST_ELEMENT_REGISTER_DECLARE (rtphdrexttwcc);
+
+G_END_DECLS
+
+#endif /* __GST_RTPHDREXT_TWCC_H__ */
diff --git a/gst/rtpmanager/gstrtpjitterbuffer.c b/gst/rtpmanager/gstrtpjitterbuffer.c
new file mode 100644
index 0000000000..db43cc8de0
--- /dev/null
+++ b/gst/rtpmanager/gstrtpjitterbuffer.c
@@ -0,0 +1,4926 @@
+/*
+ * Farsight Voice+Video library
+ *
+ * Copyright 2007 Collabora Ltd,
+ * Copyright 2007 Nokia Corporation
+ * @author: Philippe Kalaf <philippe.kalaf@collabora.co.uk>.
+ * Copyright 2007 Wim Taymans <wim.taymans@gmail.com>
+ * Copyright 2015 Kurento (http://kurento.org/)
+ * @author: Miguel París <mparisdiaz@gmail.com>
+ * Copyright 2016 Pexip AS
+ * @author: Havard Graff <havard@pexip.com>
+ * @author: Stian Selnes <stian@pexip.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+/**
+ * SECTION:element-rtpjitterbuffer
+ * @title: rtpjitterbuffer
+ *
+ * This element reorders and removes duplicate RTP packets as they are received
+ * from a network source.
+ *
+ * The element needs the clock-rate of the RTP payload in order to estimate the
+ * delay. This information is obtained either from the caps on the sink pad or,
+ * when no caps are present, from the #GstRtpJitterBuffer::request-pt-map signal.
+ * To clear the previous pt-map use the #GstRtpJitterBuffer::clear-pt-map signal.
+ *
+ * The rtpjitterbuffer will wait for missing packets up to a configurable time
+ * limit using the #GstRtpJitterBuffer:latency property. Packets arriving too
+ * late are considered to be lost packets. If the #GstRtpJitterBuffer:do-lost
+ * property is set, lost packets will result in a custom serialized downstream
+ * event of name GstRTPPacketLost. The lost packet events are usually used by a
+ * depayloader or other element to create concealment data or some other logic
+ * to gracefully handle the missing packets.
+ *
+ * The jitterbuffer will use the DTS (or PTS if no DTS is set) of the incoming
+ * buffer and the rtptime inside the RTP packet to create a PTS on the outgoing
+ * buffer.
+ *
+ * The jitterbuffer can also be configured to send early retransmission events
+ * upstream by setting the #GstRtpJitterBuffer:do-retransmission property. In
+ * this mode, the jitterbuffer tries to estimate when a packet should arrive and
+ * sends a custom upstream event named GstRTPRetransmissionRequest when the
+ * packet is considered late. The initial expected packet arrival time is
+ * calculated as follows:
+ *
+ * - If seqnum N arrived at time T, seqnum N+1 is expected to arrive at
+ * T + packet-spacing + #GstRtpJitterBuffer:rtx-delay. The packet spacing is
+ * calculated from the DTS (or PTS is no DTS) of two consecutive RTP
+ * packets with different rtptime.
+ *
+ * - If seqnum N0 arrived at time T0 and seqnum Nm arrived at time Tm,
+ * seqnum Ni is expected at time Ti = T0 + i*(Tm - T0)/(Nm - N0). Any
+ * previously scheduled timeout is overwritten.
+ *
+ * - If seqnum N arrived, all seqnum older than
+ * N - #GstRtpJitterBuffer:rtx-delay-reorder are considered late
+ * immediately. This is to request fast feedback for abnormally reorder
+ * packets before any of the previous timeouts is triggered.
+ *
+ * A late packet triggers the GstRTPRetransmissionRequest custom upstream
+ * event. After the initial timeout expires and the retransmission event is
+ * sent, the timeout is scheduled for
+ * T + #GstRtpJitterBuffer:rtx-retry-timeout. If the missing packet did not
+ * arrive after #GstRtpJitterBuffer:rtx-retry-timeout, a new
+ * GstRTPRetransmissionRequest is sent upstream and the timeout is rescheduled
+ * again for T + #GstRtpJitterBuffer:rtx-retry-timeout. This repeats until
+ * #GstRtpJitterBuffer:rtx-retry-period elapsed, at which point no further
+ * retransmission requests are sent and the regular logic is performed to
+ * schedule a lost packet as discussed above.
+ *
+ * This element acts as a live element and so adds #GstRtpJitterBuffer:latency
+ * to the pipeline.
+ *
+ * This element will automatically be used inside rtpbin.
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 rtspsrc location=rtsp://192.168.1.133:8554/mpeg1or2AudioVideoTest ! rtpjitterbuffer ! rtpmpvdepay ! mpeg2dec ! xvimagesink
+ * ]| Connect to a streaming server and decode the MPEG video. The jitterbuffer is
+ * inserted into the pipeline to smooth out network jitter and to reorder the
+ * out-of-order RTP packets.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/net/net.h>
+
+#include "gstrtpjitterbuffer.h"
+#include "rtpjitterbuffer.h"
+#include "rtpstats.h"
+#include "rtptimerqueue.h"
+
+#include <gst/glib-compat-private.h>
+
+GST_DEBUG_CATEGORY (rtpjitterbuffer_debug);
+#define GST_CAT_DEFAULT (rtpjitterbuffer_debug)
+
+/* RTPJitterBuffer signals and args */
+enum
+{
+ SIGNAL_REQUEST_PT_MAP,
+ SIGNAL_CLEAR_PT_MAP,
+ SIGNAL_HANDLE_SYNC,
+ SIGNAL_ON_NPT_STOP,
+ SIGNAL_SET_ACTIVE,
+ LAST_SIGNAL
+};
+
+#define DEFAULT_LATENCY_MS 200
+#define DEFAULT_DROP_ON_LATENCY FALSE
+#define DEFAULT_TS_OFFSET 0
+#define DEFAULT_MAX_TS_OFFSET_ADJUSTMENT 0
+#define DEFAULT_DO_LOST FALSE
+#define DEFAULT_POST_DROP_MESSAGES FALSE
+#define DEFAULT_DROP_MESSAGES_INTERVAL_MS 200
+#define DEFAULT_MODE RTP_JITTER_BUFFER_MODE_SLAVE
+#define DEFAULT_PERCENT 0
+#define DEFAULT_DO_RETRANSMISSION FALSE
+#define DEFAULT_RTX_NEXT_SEQNUM TRUE
+#define DEFAULT_RTX_DELAY -1
+#define DEFAULT_RTX_MIN_DELAY 0
+#define DEFAULT_RTX_DELAY_REORDER 3
+#define DEFAULT_RTX_RETRY_TIMEOUT -1
+#define DEFAULT_RTX_MIN_RETRY_TIMEOUT -1
+#define DEFAULT_RTX_RETRY_PERIOD -1
+#define DEFAULT_RTX_MAX_RETRIES -1
+#define DEFAULT_RTX_DEADLINE -1
+#define DEFAULT_RTX_STATS_TIMEOUT 1000
+#define DEFAULT_MAX_RTCP_RTP_TIME_DIFF 1000
+#define DEFAULT_MAX_DROPOUT_TIME 60000
+#define DEFAULT_MAX_MISORDER_TIME 2000
+#define DEFAULT_RFC7273_SYNC FALSE
+#define DEFAULT_FASTSTART_MIN_PACKETS 0
+
+#define DEFAULT_AUTO_RTX_DELAY (20 * GST_MSECOND)
+#define DEFAULT_AUTO_RTX_TIMEOUT (40 * GST_MSECOND)
+
+enum
+{
+ PROP_0,
+ PROP_LATENCY,
+ PROP_DROP_ON_LATENCY,
+ PROP_TS_OFFSET,
+ PROP_MAX_TS_OFFSET_ADJUSTMENT,
+ PROP_DO_LOST,
+ PROP_POST_DROP_MESSAGES,
+ PROP_DROP_MESSAGES_INTERVAL,
+ PROP_MODE,
+ PROP_PERCENT,
+ PROP_DO_RETRANSMISSION,
+ PROP_RTX_NEXT_SEQNUM,
+ PROP_RTX_DELAY,
+ PROP_RTX_MIN_DELAY,
+ PROP_RTX_DELAY_REORDER,
+ PROP_RTX_RETRY_TIMEOUT,
+ PROP_RTX_MIN_RETRY_TIMEOUT,
+ PROP_RTX_RETRY_PERIOD,
+ PROP_RTX_MAX_RETRIES,
+ PROP_RTX_DEADLINE,
+ PROP_RTX_STATS_TIMEOUT,
+ PROP_STATS,
+ PROP_MAX_RTCP_RTP_TIME_DIFF,
+ PROP_MAX_DROPOUT_TIME,
+ PROP_MAX_MISORDER_TIME,
+ PROP_RFC7273_SYNC,
+ PROP_FASTSTART_MIN_PACKETS
+};
+
+#define JBUF_LOCK(priv) G_STMT_START { \
+ GST_TRACE("Locking from thread %p", g_thread_self()); \
+ (g_mutex_lock (&(priv)->jbuf_lock)); \
+ GST_TRACE("Locked from thread %p", g_thread_self()); \
+ } G_STMT_END
+
+#define JBUF_LOCK_CHECK(priv,label) G_STMT_START { \
+ JBUF_LOCK (priv); \
+ if (G_UNLIKELY (priv->srcresult != GST_FLOW_OK)) \
+ goto label; \
+} G_STMT_END
+#define JBUF_UNLOCK(priv) G_STMT_START { \
+ GST_TRACE ("Unlocking from thread %p", g_thread_self ()); \
+ (g_mutex_unlock (&(priv)->jbuf_lock)); \
+} G_STMT_END
+
+#define JBUF_WAIT_QUEUE(priv) G_STMT_START { \
+ GST_DEBUG ("waiting queue"); \
+ (priv)->waiting_queue++; \
+ g_cond_wait (&(priv)->jbuf_queue, &(priv)->jbuf_lock); \
+ (priv)->waiting_queue--; \
+ GST_DEBUG ("waiting queue done"); \
+} G_STMT_END
+#define JBUF_SIGNAL_QUEUE(priv) G_STMT_START { \
+ if (G_UNLIKELY ((priv)->waiting_queue)) { \
+ GST_DEBUG ("signal queue, %d waiters", (priv)->waiting_queue); \
+ g_cond_signal (&(priv)->jbuf_queue); \
+ } \
+} G_STMT_END
+
+#define JBUF_WAIT_TIMER(priv) G_STMT_START { \
+ GST_DEBUG ("waiting timer"); \
+ (priv)->waiting_timer++; \
+ g_cond_wait (&(priv)->jbuf_timer, &(priv)->jbuf_lock); \
+ (priv)->waiting_timer--; \
+ GST_DEBUG ("waiting timer done"); \
+} G_STMT_END
+#define JBUF_SIGNAL_TIMER(priv) G_STMT_START { \
+ if (G_UNLIKELY ((priv)->waiting_timer)) { \
+ GST_DEBUG ("signal timer, %d waiters", (priv)->waiting_timer); \
+ g_cond_signal (&(priv)->jbuf_timer); \
+ } \
+} G_STMT_END
+
+#define JBUF_WAIT_EVENT(priv,label) G_STMT_START { \
+ if (G_UNLIKELY (priv->srcresult != GST_FLOW_OK)) \
+ goto label; \
+ GST_DEBUG ("waiting event"); \
+ (priv)->waiting_event = TRUE; \
+ g_cond_wait (&(priv)->jbuf_event, &(priv)->jbuf_lock); \
+ (priv)->waiting_event = FALSE; \
+ GST_DEBUG ("waiting event done"); \
+ if (G_UNLIKELY (priv->srcresult != GST_FLOW_OK)) \
+ goto label; \
+} G_STMT_END
+#define JBUF_SIGNAL_EVENT(priv) G_STMT_START { \
+ if (G_UNLIKELY ((priv)->waiting_event)) { \
+ GST_DEBUG ("signal event"); \
+ g_cond_signal (&(priv)->jbuf_event); \
+ } \
+} G_STMT_END
+
+#define JBUF_WAIT_QUERY(priv,label) G_STMT_START { \
+ if (G_UNLIKELY (priv->srcresult != GST_FLOW_OK)) \
+ goto label; \
+ GST_DEBUG ("waiting query"); \
+ (priv)->waiting_query = TRUE; \
+ g_cond_wait (&(priv)->jbuf_query, &(priv)->jbuf_lock); \
+ (priv)->waiting_query = FALSE; \
+ GST_DEBUG ("waiting query done"); \
+ if (G_UNLIKELY (priv->srcresult != GST_FLOW_OK)) \
+ goto label; \
+} G_STMT_END
+#define JBUF_SIGNAL_QUERY(priv,res) G_STMT_START { \
+ (priv)->last_query = res; \
+ if (G_UNLIKELY ((priv)->waiting_query)) { \
+ GST_DEBUG ("signal query"); \
+ g_cond_signal (&(priv)->jbuf_query); \
+ } \
+} G_STMT_END
+
+#define GST_BUFFER_IS_RETRANSMISSION(buffer) \
+ GST_BUFFER_FLAG_IS_SET (buffer, GST_RTP_BUFFER_FLAG_RETRANSMISSION)
+
+#if !GLIB_CHECK_VERSION(2, 60, 0)
+#define g_queue_clear_full queue_clear_full
+static void
+queue_clear_full (GQueue * queue, GDestroyNotify free_func)
+{
+ gpointer data;
+
+ while ((data = g_queue_pop_head (queue)) != NULL)
+ free_func (data);
+}
+#endif
+
+struct _GstRtpJitterBufferPrivate
+{
+ GstPad *sinkpad, *srcpad;
+ GstPad *rtcpsinkpad;
+
+ RTPJitterBuffer *jbuf;
+ GMutex jbuf_lock;
+ guint waiting_queue;
+ GCond jbuf_queue;
+ guint waiting_timer;
+ GCond jbuf_timer;
+ gboolean waiting_event;
+ GCond jbuf_event;
+ gboolean waiting_query;
+ GCond jbuf_query;
+ gboolean last_query;
+ gboolean discont;
+ gboolean ts_discont;
+ gboolean active;
+ guint64 out_offset;
+ guint32 segment_seqnum;
+
+ gboolean timer_running;
+ GThread *timer_thread;
+
+ /* properties */
+ guint latency_ms;
+ guint64 latency_ns;
+ gboolean drop_on_latency;
+ gint64 ts_offset;
+ guint64 max_ts_offset_adjustment;
+ gboolean do_lost;
+ gboolean post_drop_messages;
+ guint drop_messages_interval_ms;
+ gboolean do_retransmission;
+ gboolean rtx_next_seqnum;
+ gint rtx_delay;
+ guint rtx_min_delay;
+ gint rtx_delay_reorder;
+ gint rtx_retry_timeout;
+ gint rtx_min_retry_timeout;
+ gint rtx_retry_period;
+ gint rtx_max_retries;
+ guint rtx_stats_timeout;
+ gint rtx_deadline_ms;
+ gint max_rtcp_rtp_time_diff;
+ guint32 max_dropout_time;
+ guint32 max_misorder_time;
+ guint faststart_min_packets;
+
+ /* the last seqnum we pushed out */
+ guint32 last_popped_seqnum;
+ /* the next expected seqnum we push */
+ guint32 next_seqnum;
+ /* seqnum-base, if known */
+ guint32 seqnum_base;
+ /* last output time */
+ GstClockTime last_out_time;
+ /* last valid input timestamp and rtptime pair */
+ GstClockTime ips_pts;
+ guint64 ips_rtptime;
+ GstClockTime packet_spacing;
+ gint equidistant;
+
+ GQueue gap_packets;
+
+ /* the next expected seqnum we receive */
+ GstClockTime last_in_pts;
+ guint32 next_in_seqnum;
+
+ /* "normal" timers */
+ RtpTimerQueue *timers;
+ /* timers used for RTX statistics backlog */
+ RtpTimerQueue *rtx_stats_timers;
+
+ /* start and stop ranges */
+ GstClockTime npt_start;
+ GstClockTime npt_stop;
+ guint64 ext_timestamp;
+ guint64 last_elapsed;
+ guint64 estimated_eos;
+ GstClockID eos_id;
+
+ /* state */
+ gboolean eos;
+ guint last_percent;
+
+ /* clock rate and rtp timestamp offset */
+ gint last_pt;
+ gint32 clock_rate;
+ gint64 clock_base;
+ gint64 ts_offset_remainder;
+
+ /* when we are shutting down */
+ GstFlowReturn srcresult;
+ gboolean blocked;
+
+ /* for sync */
+ GstSegment segment;
+ GstClockID clock_id;
+ GstClockTime timer_timeout;
+ guint16 timer_seqnum;
+ /* the latency of the upstream peer, we have to take this into account when
+ * synchronizing the buffers. */
+ GstClockTime peer_latency;
+ guint64 ext_rtptime;
+ GstBuffer *last_sr;
+
+ /* some accounting */
+ guint64 num_pushed;
+ guint64 num_lost;
+ guint64 num_late;
+ guint64 num_duplicates;
+ guint64 num_rtx_requests;
+ guint64 num_rtx_success;
+ guint64 num_rtx_failed;
+ gdouble avg_rtx_num;
+ guint64 avg_rtx_rtt;
+ RTPPacketRateCtx packet_rate_ctx;
+
+ /* for the jitter */
+ GstClockTime last_dts;
+ GstClockTime last_pts;
+ guint64 last_rtptime;
+ GstClockTime avg_jitter;
+
+ /* for dropped packet messages */
+ GstClockTime last_drop_msg_timestamp;
+ /* accumulators; reset every time a drop message is posted */
+ guint num_too_late;
+ guint num_drop_on_latency;
+};
+typedef enum
+{
+ REASON_TOO_LATE,
+ REASON_DROP_ON_LATENCY
+} DropMessageReason;
+
+static GstStaticPadTemplate gst_rtp_jitter_buffer_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp"
+ /* "clock-rate = (int) [ 1, 2147483647 ], "
+ * "payload = (int) , "
+ * "encoding-name = (string) "
+ */ )
+ );
+
+static GstStaticPadTemplate gst_rtp_jitter_buffer_sink_rtcp_template =
+GST_STATIC_PAD_TEMPLATE ("sink_rtcp",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtcp")
+ );
+
+static GstStaticPadTemplate gst_rtp_jitter_buffer_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp"
+ /* "payload = (int) , "
+ * "clock-rate = (int) , "
+ * "encoding-name = (string) "
+ */ )
+ );
+
+static guint gst_rtp_jitter_buffer_signals[LAST_SIGNAL] = { 0 };
+
+#define gst_rtp_jitter_buffer_parent_class parent_class
+G_DEFINE_TYPE_WITH_PRIVATE (GstRtpJitterBuffer, gst_rtp_jitter_buffer,
+ GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE (rtpjitterbuffer, "rtpjitterbuffer", GST_RANK_NONE,
+ GST_TYPE_RTP_JITTER_BUFFER);
+
+/* object overrides */
+static void gst_rtp_jitter_buffer_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_rtp_jitter_buffer_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+static void gst_rtp_jitter_buffer_finalize (GObject * object);
+
+/* element overrides */
+static GstStateChangeReturn gst_rtp_jitter_buffer_change_state (GstElement
+ * element, GstStateChange transition);
+static GstPad *gst_rtp_jitter_buffer_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * filter);
+static void gst_rtp_jitter_buffer_release_pad (GstElement * element,
+ GstPad * pad);
+static GstClock *gst_rtp_jitter_buffer_provide_clock (GstElement * element);
+static gboolean gst_rtp_jitter_buffer_set_clock (GstElement * element,
+ GstClock * clock);
+
+/* pad overrides */
+static GstCaps *gst_rtp_jitter_buffer_getcaps (GstPad * pad, GstCaps * filter);
+static GstIterator *gst_rtp_jitter_buffer_iterate_internal_links (GstPad * pad,
+ GstObject * parent);
+
+/* sinkpad overrides */
+static gboolean gst_rtp_jitter_buffer_sink_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+static GstFlowReturn gst_rtp_jitter_buffer_chain (GstPad * pad,
+ GstObject * parent, GstBuffer * buffer);
+static GstFlowReturn gst_rtp_jitter_buffer_chain_list (GstPad * pad,
+ GstObject * parent, GstBufferList * buffer_list);
+
+static gboolean gst_rtp_jitter_buffer_sink_rtcp_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+static GstFlowReturn gst_rtp_jitter_buffer_chain_rtcp (GstPad * pad,
+ GstObject * parent, GstBuffer * buffer);
+
+static gboolean gst_rtp_jitter_buffer_sink_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
+
+/* srcpad overrides */
+static gboolean gst_rtp_jitter_buffer_src_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+static gboolean gst_rtp_jitter_buffer_src_activate_mode (GstPad * pad,
+ GstObject * parent, GstPadMode mode, gboolean active);
+static void gst_rtp_jitter_buffer_loop (GstRtpJitterBuffer * jitterbuffer);
+static gboolean gst_rtp_jitter_buffer_src_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
+
+static void
+gst_rtp_jitter_buffer_clear_pt_map (GstRtpJitterBuffer * jitterbuffer);
+static GstClockTime
+gst_rtp_jitter_buffer_set_active (GstRtpJitterBuffer * jitterbuffer,
+ gboolean active, guint64 base_time);
+static void do_handle_sync (GstRtpJitterBuffer * jitterbuffer);
+
+static void unschedule_current_timer (GstRtpJitterBuffer * jitterbuffer);
+
+static void wait_next_timeout (GstRtpJitterBuffer * jitterbuffer);
+
+static GstStructure *gst_rtp_jitter_buffer_create_stats (GstRtpJitterBuffer *
+ jitterbuffer);
+
+static void update_rtx_stats (GstRtpJitterBuffer * jitterbuffer,
+ const RtpTimer * timer, GstClockTime dts, gboolean success);
+
+static GstClockTime get_current_running_time (GstRtpJitterBuffer *
+ jitterbuffer);
+
+static void
+gst_rtp_jitter_buffer_class_init (GstRtpJitterBufferClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->finalize = gst_rtp_jitter_buffer_finalize;
+
+ gobject_class->set_property = gst_rtp_jitter_buffer_set_property;
+ gobject_class->get_property = gst_rtp_jitter_buffer_get_property;
+
+ /**
+ * GstRtpJitterBuffer:latency:
+ *
+ * The maximum latency of the jitterbuffer. Packets will be kept in the buffer
+ * for at most this time.
+ */
+ g_object_class_install_property (gobject_class, PROP_LATENCY,
+ g_param_spec_uint ("latency", "Buffer latency in ms",
+ "Amount of ms to buffer", 0, G_MAXUINT, DEFAULT_LATENCY_MS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRtpJitterBuffer:drop-on-latency:
+ *
+ * Drop oldest buffers when the queue is completely filled.
+ */
+ g_object_class_install_property (gobject_class, PROP_DROP_ON_LATENCY,
+ g_param_spec_boolean ("drop-on-latency",
+ "Drop buffers when maximum latency is reached",
+ "Tells the jitterbuffer to never exceed the given latency in size",
+ DEFAULT_DROP_ON_LATENCY, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRtpJitterBuffer:ts-offset:
+ *
+ * Adjust GStreamer output buffer timestamps in the jitterbuffer with offset.
+ * This is mainly used to ensure interstream synchronisation.
+ */
+ g_object_class_install_property (gobject_class, PROP_TS_OFFSET,
+ g_param_spec_int64 ("ts-offset", "Timestamp Offset",
+ "Adjust buffer timestamps with offset in nanoseconds", G_MININT64,
+ G_MAXINT64, DEFAULT_TS_OFFSET,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpJitterBuffer:max-ts-offset-adjustment:
+ *
+ * The maximum number of nanoseconds per frame that time offset may be
+ * adjusted with. This is used to avoid sudden large changes to time stamps.
+ */
+ g_object_class_install_property (gobject_class, PROP_MAX_TS_OFFSET_ADJUSTMENT,
+ g_param_spec_uint64 ("max-ts-offset-adjustment",
+ "Max Timestamp Offset Adjustment",
+ "The maximum number of nanoseconds per frame that time stamp "
+ "offsets may be adjusted (0 = no limit).", 0, G_MAXUINT64,
+ DEFAULT_MAX_TS_OFFSET_ADJUSTMENT, G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpJitterBuffer:do-lost:
+ *
+ * Send out a GstRTPPacketLost event downstream when a packet is considered
+ * lost.
+ */
+ g_object_class_install_property (gobject_class, PROP_DO_LOST,
+ g_param_spec_boolean ("do-lost", "Do Lost",
+ "Send an event downstream when a packet is lost", DEFAULT_DO_LOST,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpJitterBuffer:post-drop-messages:
+ *
+ * Post custom messages to the bus when a packet is dropped by the
+ * jitterbuffer due to arriving too late, being already considered lost,
+ * or being dropped due to the drop-on-latency property being enabled.
+ * Message is of type GST_MESSAGE_ELEMENT and contains a GstStructure named
+ * "drop-msg" with the following fields:
+ *
+ * * #guint `seqnum`: Seqnum of dropped packet.
+ * * #guint64 `timestamp`: PTS timestamp of dropped packet.
+ * * #GString `reason`: Reason for dropping the packet.
+ * * #guint `num-too-late`: Number of packets arriving too late since
+ * last drop message.
+ * * #guint `num-drop-on-latency`: Number of packets dropped due to the
+ * drop-on-latency property since last drop message.
+ *
+ * Since: 1.18
+ */
+ g_object_class_install_property (gobject_class, PROP_POST_DROP_MESSAGES,
+ g_param_spec_boolean ("post-drop-messages", "Post drop messages",
+ "Post a custom message to the bus when a packet is dropped by the jitterbuffer",
+ DEFAULT_POST_DROP_MESSAGES,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpJitterBuffer:drop-messages-interval:
+ *
+ * Minimal time in milliseconds between posting dropped packet messages, if enabled
+ * by setting property by setting #GstRtpJitterBuffer:post-drop-messages to %TRUE.
+ * If interval is set to 0, every dropped packet will result in a drop message being posted.
+ *
+ * Since: 1.18
+ */
+ g_object_class_install_property (gobject_class, PROP_DROP_MESSAGES_INTERVAL,
+ g_param_spec_uint ("drop-messages-interval",
+ "Drop message interval",
+ "Minimal time between posting dropped packet messages", 0,
+ G_MAXUINT, DEFAULT_DROP_MESSAGES_INTERVAL_MS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpJitterBuffer:mode:
+ *
+ * Control the buffering and timestamping mode used by the jitterbuffer.
+ */
+ g_object_class_install_property (gobject_class, PROP_MODE,
+ g_param_spec_enum ("mode", "Mode",
+ "Control the buffering algorithm in use", RTP_TYPE_JITTER_BUFFER_MODE,
+ DEFAULT_MODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRtpJitterBuffer:percent:
+ *
+ * The percent of the jitterbuffer that is filled.
+ */
+ g_object_class_install_property (gobject_class, PROP_PERCENT,
+ g_param_spec_int ("percent", "percent",
+ "The buffer filled percent", 0, 100,
+ 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRtpJitterBuffer:do-retransmission:
+ *
+ * Send out a GstRTPRetransmission event upstream when a packet is considered
+ * late and should be retransmitted.
+ *
+ * Since: 1.2
+ */
+ g_object_class_install_property (gobject_class, PROP_DO_RETRANSMISSION,
+ g_param_spec_boolean ("do-retransmission", "Do Retransmission",
+ "Send retransmission events upstream when a packet is late",
+ DEFAULT_DO_RETRANSMISSION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpJitterBuffer:rtx-next-seqnum
+ *
+ * Estimate when the next packet should arrive and schedule a retransmission
+ * request for it.
+ * This is, when packet N arrives, a GstRTPRetransmission event is schedule
+ * for packet N+1. So it will be requested if it does not arrive at the expected time.
+ * The expected time is calculated using the dts of N and the packet spacing.
+ *
+ * Since: 1.6
+ */
+ g_object_class_install_property (gobject_class, PROP_RTX_NEXT_SEQNUM,
+ g_param_spec_boolean ("rtx-next-seqnum", "RTX next seqnum",
+ "Estimate when the next packet should arrive and schedule a "
+ "retransmission request for it.",
+ DEFAULT_RTX_NEXT_SEQNUM, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpJitterBuffer:rtx-delay:
+ *
+ * When a packet did not arrive at the expected time, wait this extra amount
+ * of time before sending a retransmission event.
+ *
+ * When -1 is used, the max jitter will be used as extra delay.
+ *
+ * Since: 1.2
+ */
+ g_object_class_install_property (gobject_class, PROP_RTX_DELAY,
+ g_param_spec_int ("rtx-delay", "RTX Delay",
+ "Extra time in ms to wait before sending retransmission "
+ "event (-1 automatic)", -1, G_MAXINT, DEFAULT_RTX_DELAY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpJitterBuffer:rtx-min-delay:
+ *
+ * When a packet did not arrive at the expected time, wait at least this extra amount
+ * of time before sending a retransmission event.
+ *
+ * Since: 1.6
+ */
+ g_object_class_install_property (gobject_class, PROP_RTX_MIN_DELAY,
+ g_param_spec_uint ("rtx-min-delay", "Minimum RTX Delay",
+ "Minimum time in ms to wait before sending retransmission "
+ "event", 0, G_MAXUINT, DEFAULT_RTX_MIN_DELAY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRtpJitterBuffer:rtx-delay-reorder:
+ *
+ * Assume that a retransmission event should be sent when we see
+ * this much packet reordering.
+ *
+ * When -1 is used, the value will be estimated based on observed packet
+ * reordering. When 0 is used packet reordering alone will not cause a
+ * retransmission event (Since 1.10).
+ *
+ * Since: 1.2
+ */
+ g_object_class_install_property (gobject_class, PROP_RTX_DELAY_REORDER,
+ g_param_spec_int ("rtx-delay-reorder", "RTX Delay Reorder",
+ "Sending retransmission event when this much reordering "
+ "(0 disable)",
+ -1, G_MAXINT, DEFAULT_RTX_DELAY_REORDER,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRtpJitterBuffer:rtx-retry-timeout:
+ *
+ * When no packet has been received after sending a retransmission event
+ * for this time, retry sending a retransmission event.
+ *
+ * When -1 is used, the value will be estimated based on observed round
+ * trip time.
+ *
+ * Since: 1.2
+ */
+ g_object_class_install_property (gobject_class, PROP_RTX_RETRY_TIMEOUT,
+ g_param_spec_int ("rtx-retry-timeout", "RTX Retry Timeout",
+ "Retry sending a transmission event after this timeout in "
+ "ms (-1 automatic)", -1, G_MAXINT, DEFAULT_RTX_RETRY_TIMEOUT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRtpJitterBuffer:rtx-min-retry-timeout:
+ *
+ * The minimum amount of time between retry timeouts. When
+ * GstRtpJitterBuffer::rtx-retry-timeout is -1, this value ensures a
+ * minimum interval between retry timeouts.
+ *
+ * When -1 is used, the value will be estimated based on the
+ * packet spacing.
+ *
+ * Since: 1.6
+ */
+ g_object_class_install_property (gobject_class, PROP_RTX_MIN_RETRY_TIMEOUT,
+ g_param_spec_int ("rtx-min-retry-timeout", "RTX Min Retry Timeout",
+ "Minimum timeout between sending a transmission event in "
+ "ms (-1 automatic)", -1, G_MAXINT, DEFAULT_RTX_MIN_RETRY_TIMEOUT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRtpJitterBuffer:rtx-retry-period:
+ *
+ * The amount of time to try to get a retransmission.
+ *
+ * When -1 is used, the value will be estimated based on the jitterbuffer
+ * latency and the observed round trip time.
+ *
+ * Since: 1.2
+ */
+ g_object_class_install_property (gobject_class, PROP_RTX_RETRY_PERIOD,
+ g_param_spec_int ("rtx-retry-period", "RTX Retry Period",
+ "Try to get a retransmission for this many ms "
+ "(-1 automatic)", -1, G_MAXINT, DEFAULT_RTX_RETRY_PERIOD,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRtpJitterBuffer:rtx-max-retries:
+ *
+ * The maximum number of retries to request a retransmission.
+ *
+ * This implies that as maximum (rtx-max-retries + 1) retransmissions will be requested.
+ * When -1 is used, the number of retransmission request will not be limited.
+ *
+ * Since: 1.6
+ */
+ g_object_class_install_property (gobject_class, PROP_RTX_MAX_RETRIES,
+ g_param_spec_int ("rtx-max-retries", "RTX Max Retries",
+ "The maximum number of retries to request a retransmission. "
+ "(-1 not limited)", -1, G_MAXINT, DEFAULT_RTX_MAX_RETRIES,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRtpJitterBuffer:rtx-deadline:
+ *
+ * The deadline for a valid RTX request in ms.
+ *
+ * How long the RTX RTCP will be valid for.
+ * When -1 is used, the size of the jitterbuffer will be used.
+ *
+ * Since: 1.10
+ */
+ g_object_class_install_property (gobject_class, PROP_RTX_DEADLINE,
+ g_param_spec_int ("rtx-deadline", "RTX Deadline (ms)",
+ "The deadline for a valid RTX request in milliseconds. "
+ "(-1 automatic)", -1, G_MAXINT, DEFAULT_RTX_DEADLINE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+/**
+ * GstRtpJitterBuffer:rtx-stats-timeout:
+ *
+ * The time to wait for a retransmitted packet after it has been
+ * considered lost in order to collect RTX statistics.
+ *
+ * Since: 1.10
+ */
+ g_object_class_install_property (gobject_class, PROP_RTX_STATS_TIMEOUT,
+ g_param_spec_uint ("rtx-stats-timeout", "RTX Statistics Timeout",
+ "The time to wait for a retransmitted packet after it has been "
+ "considered lost in order to collect statistics (ms)",
+ 0, G_MAXUINT, DEFAULT_RTX_STATS_TIMEOUT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_DROPOUT_TIME,
+ g_param_spec_uint ("max-dropout-time", "Max dropout time",
+ "The maximum time (milliseconds) of missing packets tolerated.",
+ 0, G_MAXINT32, DEFAULT_MAX_DROPOUT_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_MISORDER_TIME,
+ g_param_spec_uint ("max-misorder-time", "Max misorder time",
+ "The maximum time (milliseconds) of misordered packets tolerated.",
+ 0, G_MAXUINT, DEFAULT_MAX_MISORDER_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRtpJitterBuffer:stats:
+ *
+ * Various jitterbuffer statistics. This property returns a GstStructure
+ * with name application/x-rtp-jitterbuffer-stats with the following fields:
+ *
+ * * #guint64 `num-pushed`: the number of packets pushed out.
+ * * #guint64 `num-lost`: the number of packets considered lost.
+ * * #guint64 `num-late`: the number of packets arriving too late.
+ * * #guint64 `num-duplicates`: the number of duplicate packets.
+ * * #guint64 `avg-jitter`: the average jitter in nanoseconds.
+ * * #guint64 `rtx-count`: the number of retransmissions requested.
+ * * #guint64 `rtx-success-count`: the number of successful retransmissions.
+ * * #gdouble `rtx-per-packet`: average number of RTX per packet.
+ * * #guint64 `rtx-rtt`: average round trip time per RTX.
+ *
+ * Since: 1.4
+ */
+ g_object_class_install_property (gobject_class, PROP_STATS,
+ g_param_spec_boxed ("stats", "Statistics",
+ "Various statistics", GST_TYPE_STRUCTURE,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpJitterBuffer:max-rtcp-rtp-time-diff
+ *
+ * The maximum amount of time in ms that the RTP time in the RTCP SRs
+ * is allowed to be ahead of the last RTP packet we received. Use
+ * -1 to disable ignoring of RTCP packets.
+ *
+ * Since: 1.8
+ */
+ g_object_class_install_property (gobject_class, PROP_MAX_RTCP_RTP_TIME_DIFF,
+ g_param_spec_int ("max-rtcp-rtp-time-diff", "Max RTCP RTP Time Diff",
+ "Maximum amount of time in ms that the RTP time in RTCP SRs "
+ "is allowed to be ahead (-1 disabled)", -1, G_MAXINT,
+ DEFAULT_MAX_RTCP_RTP_TIME_DIFF,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RFC7273_SYNC,
+ g_param_spec_boolean ("rfc7273-sync", "Sync on RFC7273 clock",
+ "Synchronize received streams to the RFC7273 clock "
+ "(requires clock and offset to be provided)", DEFAULT_RFC7273_SYNC,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpJitterBuffer:faststart-min-packets
+ *
+ * The number of consecutive packets needed to start (set to 0 to
+ * disable faststart. The jitterbuffer will by default start after the
+ * latency has elapsed)
+ *
+ * Since: 1.14
+ */
+ g_object_class_install_property (gobject_class, PROP_FASTSTART_MIN_PACKETS,
+ g_param_spec_uint ("faststart-min-packets", "Faststart minimum packets",
+ "The number of consecutive packets needed to start (set to 0 to "
+ "disable faststart. The jitterbuffer will by default start after "
+ "the latency has elapsed)",
+ 0, G_MAXUINT, DEFAULT_FASTSTART_MIN_PACKETS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpJitterBuffer::request-pt-map:
+ * @buffer: the object which received the signal
+ * @pt: the pt
+ *
+ * Request the payload type as #GstCaps for @pt.
+ */
+ gst_rtp_jitter_buffer_signals[SIGNAL_REQUEST_PT_MAP] =
+ g_signal_new ("request-pt-map", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpJitterBufferClass,
+ request_pt_map), NULL, NULL, NULL, GST_TYPE_CAPS, 1, G_TYPE_UINT);
+ /**
+ * GstRtpJitterBuffer::handle-sync:
+ * @buffer: the object which received the signal
+ * @struct: a GstStructure containing sync values.
+ *
+ * Be notified of new sync values.
+ */
+ gst_rtp_jitter_buffer_signals[SIGNAL_HANDLE_SYNC] =
+ g_signal_new ("handle-sync", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpJitterBufferClass,
+ handle_sync), NULL, NULL, NULL,
+ G_TYPE_NONE, 1, GST_TYPE_STRUCTURE | G_SIGNAL_TYPE_STATIC_SCOPE);
+
+ /**
+ * GstRtpJitterBuffer::on-npt-stop:
+ * @buffer: the object which received the signal
+ *
+ * Signal that the jitterbuffer has pushed the RTP packet that corresponds to
+ * the npt-stop position.
+ */
+ gst_rtp_jitter_buffer_signals[SIGNAL_ON_NPT_STOP] =
+ g_signal_new ("on-npt-stop", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpJitterBufferClass,
+ on_npt_stop), NULL, NULL, NULL, G_TYPE_NONE, 0, G_TYPE_NONE);
+
+ /**
+ * GstRtpJitterBuffer::clear-pt-map:
+ * @buffer: the object which received the signal
+ *
+ * Invalidate the clock-rate as obtained with the
+ * #GstRtpJitterBuffer::request-pt-map signal.
+ */
+ gst_rtp_jitter_buffer_signals[SIGNAL_CLEAR_PT_MAP] =
+ g_signal_new ("clear-pt-map", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_STRUCT_OFFSET (GstRtpJitterBufferClass, clear_pt_map), NULL, NULL,
+ NULL, G_TYPE_NONE, 0, G_TYPE_NONE);
+
+ /**
+ * GstRtpJitterBuffer::set-active:
+ * @buffer: the object which received the signal
+ *
+ * Start pushing out packets with the given base time. This signal is only
+ * useful in buffering mode.
+ *
+ * Returns: the time of the last pushed packet.
+ */
+ gst_rtp_jitter_buffer_signals[SIGNAL_SET_ACTIVE] =
+ g_signal_new ("set-active", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_STRUCT_OFFSET (GstRtpJitterBufferClass, set_active), NULL, NULL,
+ NULL, G_TYPE_UINT64, 2, G_TYPE_BOOLEAN, G_TYPE_UINT64);
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_change_state);
+ gstelement_class->request_new_pad =
+ GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_request_new_pad);
+ gstelement_class->release_pad =
+ GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_release_pad);
+ gstelement_class->provide_clock =
+ GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_provide_clock);
+ gstelement_class->set_clock =
+ GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_set_clock);
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_jitter_buffer_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_jitter_buffer_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_jitter_buffer_sink_rtcp_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP packet jitter-buffer", "Filter/Network/RTP",
+ "A buffer that deals with network jitter and other transmission faults",
+ "Philippe Kalaf <philippe.kalaf@collabora.co.uk>, "
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ klass->clear_pt_map = GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_clear_pt_map);
+ klass->set_active = GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_set_active);
+
+ GST_DEBUG_CATEGORY_INIT
+ (rtpjitterbuffer_debug, "rtpjitterbuffer", 0, "RTP Jitter Buffer");
+ GST_DEBUG_REGISTER_FUNCPTR (gst_rtp_jitter_buffer_chain_rtcp);
+
+ gst_type_mark_as_plugin_api (RTP_TYPE_JITTER_BUFFER_MODE, 0);
+}
+
+static void
+gst_rtp_jitter_buffer_init (GstRtpJitterBuffer * jitterbuffer)
+{
+ GstRtpJitterBufferPrivate *priv;
+
+ priv = gst_rtp_jitter_buffer_get_instance_private (jitterbuffer);
+ jitterbuffer->priv = priv;
+
+ priv->latency_ms = DEFAULT_LATENCY_MS;
+ priv->latency_ns = priv->latency_ms * GST_MSECOND;
+ priv->drop_on_latency = DEFAULT_DROP_ON_LATENCY;
+ priv->ts_offset = DEFAULT_TS_OFFSET;
+ priv->max_ts_offset_adjustment = DEFAULT_MAX_TS_OFFSET_ADJUSTMENT;
+ priv->do_lost = DEFAULT_DO_LOST;
+ priv->post_drop_messages = DEFAULT_POST_DROP_MESSAGES;
+ priv->drop_messages_interval_ms = DEFAULT_DROP_MESSAGES_INTERVAL_MS;
+ priv->do_retransmission = DEFAULT_DO_RETRANSMISSION;
+ priv->rtx_next_seqnum = DEFAULT_RTX_NEXT_SEQNUM;
+ priv->rtx_delay = DEFAULT_RTX_DELAY;
+ priv->rtx_min_delay = DEFAULT_RTX_MIN_DELAY;
+ priv->rtx_delay_reorder = DEFAULT_RTX_DELAY_REORDER;
+ priv->rtx_retry_timeout = DEFAULT_RTX_RETRY_TIMEOUT;
+ priv->rtx_min_retry_timeout = DEFAULT_RTX_MIN_RETRY_TIMEOUT;
+ priv->rtx_retry_period = DEFAULT_RTX_RETRY_PERIOD;
+ priv->rtx_max_retries = DEFAULT_RTX_MAX_RETRIES;
+ priv->rtx_deadline_ms = DEFAULT_RTX_DEADLINE;
+ priv->rtx_stats_timeout = DEFAULT_RTX_STATS_TIMEOUT;
+ priv->max_rtcp_rtp_time_diff = DEFAULT_MAX_RTCP_RTP_TIME_DIFF;
+ priv->max_dropout_time = DEFAULT_MAX_DROPOUT_TIME;
+ priv->max_misorder_time = DEFAULT_MAX_MISORDER_TIME;
+ priv->faststart_min_packets = DEFAULT_FASTSTART_MIN_PACKETS;
+
+ priv->ts_offset_remainder = 0;
+ priv->last_dts = -1;
+ priv->last_pts = -1;
+ priv->last_rtptime = -1;
+ priv->avg_jitter = 0;
+ priv->last_drop_msg_timestamp = GST_CLOCK_TIME_NONE;
+ priv->num_too_late = 0;
+ priv->num_drop_on_latency = 0;
+ priv->segment_seqnum = GST_SEQNUM_INVALID;
+ priv->timers = rtp_timer_queue_new ();
+ priv->rtx_stats_timers = rtp_timer_queue_new ();
+ priv->jbuf = rtp_jitter_buffer_new ();
+ g_mutex_init (&priv->jbuf_lock);
+ g_cond_init (&priv->jbuf_queue);
+ g_cond_init (&priv->jbuf_timer);
+ g_cond_init (&priv->jbuf_event);
+ g_cond_init (&priv->jbuf_query);
+ g_queue_init (&priv->gap_packets);
+ gst_segment_init (&priv->segment, GST_FORMAT_TIME);
+
+ /* reset skew detection initially */
+ rtp_jitter_buffer_reset_skew (priv->jbuf);
+ rtp_jitter_buffer_set_delay (priv->jbuf, priv->latency_ns);
+ rtp_jitter_buffer_set_buffering (priv->jbuf, FALSE);
+ priv->active = TRUE;
+
+ priv->srcpad =
+ gst_pad_new_from_static_template (&gst_rtp_jitter_buffer_src_template,
+ "src");
+
+ gst_pad_set_activatemode_function (priv->srcpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_src_activate_mode));
+ gst_pad_set_query_function (priv->srcpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_src_query));
+ gst_pad_set_event_function (priv->srcpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_src_event));
+
+ priv->sinkpad =
+ gst_pad_new_from_static_template (&gst_rtp_jitter_buffer_sink_template,
+ "sink");
+
+ gst_pad_set_chain_function (priv->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_chain));
+ gst_pad_set_chain_list_function (priv->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_chain_list));
+ gst_pad_set_event_function (priv->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_sink_event));
+ gst_pad_set_query_function (priv->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_jitter_buffer_sink_query));
+
+ gst_element_add_pad (GST_ELEMENT (jitterbuffer), priv->srcpad);
+ gst_element_add_pad (GST_ELEMENT (jitterbuffer), priv->sinkpad);
+
+ GST_OBJECT_FLAG_SET (jitterbuffer, GST_ELEMENT_FLAG_PROVIDE_CLOCK);
+}
+
+static void
+free_item_and_retain_sticky_events (RTPJitterBufferItem * item,
+ gpointer user_data)
+{
+ GList **l = user_data;
+
+ if (item->data && item->type == ITEM_TYPE_EVENT
+ && GST_EVENT_IS_STICKY (item->data)) {
+ *l = g_list_prepend (*l, item->data);
+ item->data = NULL;
+ }
+
+ rtp_jitter_buffer_free_item (item);
+}
+
+static void
+gst_rtp_jitter_buffer_finalize (GObject * object)
+{
+ GstRtpJitterBuffer *jitterbuffer;
+ GstRtpJitterBufferPrivate *priv;
+
+ jitterbuffer = GST_RTP_JITTER_BUFFER (object);
+ priv = jitterbuffer->priv;
+
+ g_object_unref (priv->timers);
+ g_object_unref (priv->rtx_stats_timers);
+ g_mutex_clear (&priv->jbuf_lock);
+ g_cond_clear (&priv->jbuf_queue);
+ g_cond_clear (&priv->jbuf_timer);
+ g_cond_clear (&priv->jbuf_event);
+ g_cond_clear (&priv->jbuf_query);
+
+ rtp_jitter_buffer_flush (priv->jbuf, NULL, NULL);
+ g_queue_foreach (&priv->gap_packets, (GFunc) gst_buffer_unref, NULL);
+ g_queue_clear (&priv->gap_packets);
+ g_object_unref (priv->jbuf);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static GstIterator *
+gst_rtp_jitter_buffer_iterate_internal_links (GstPad * pad, GstObject * parent)
+{
+ GstRtpJitterBuffer *jitterbuffer;
+ GstPad *otherpad = NULL;
+ GstIterator *it = NULL;
+ GValue val = { 0, };
+
+ jitterbuffer = GST_RTP_JITTER_BUFFER_CAST (parent);
+
+ if (pad == jitterbuffer->priv->sinkpad) {
+ otherpad = jitterbuffer->priv->srcpad;
+ } else if (pad == jitterbuffer->priv->srcpad) {
+ otherpad = jitterbuffer->priv->sinkpad;
+ } else if (pad == jitterbuffer->priv->rtcpsinkpad) {
+ it = gst_iterator_new_single (GST_TYPE_PAD, NULL);
+ }
+
+ if (it == NULL) {
+ g_value_init (&val, GST_TYPE_PAD);
+ g_value_set_object (&val, otherpad);
+ it = gst_iterator_new_single (GST_TYPE_PAD, &val);
+ g_value_unset (&val);
+ }
+
+ return it;
+}
+
+static GstPad *
+create_rtcp_sink (GstRtpJitterBuffer * jitterbuffer)
+{
+ GstRtpJitterBufferPrivate *priv;
+
+ priv = jitterbuffer->priv;
+
+ GST_DEBUG_OBJECT (jitterbuffer, "creating RTCP sink pad");
+
+ priv->rtcpsinkpad =
+ gst_pad_new_from_static_template
+ (&gst_rtp_jitter_buffer_sink_rtcp_template, "sink_rtcp");
+ gst_pad_set_chain_function (priv->rtcpsinkpad,
+ gst_rtp_jitter_buffer_chain_rtcp);
+ gst_pad_set_event_function (priv->rtcpsinkpad,
+ (GstPadEventFunction) gst_rtp_jitter_buffer_sink_rtcp_event);
+ gst_pad_set_iterate_internal_links_function (priv->rtcpsinkpad,
+ gst_rtp_jitter_buffer_iterate_internal_links);
+ gst_pad_set_active (priv->rtcpsinkpad, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (jitterbuffer), priv->rtcpsinkpad);
+
+ return priv->rtcpsinkpad;
+}
+
+static void
+remove_rtcp_sink (GstRtpJitterBuffer * jitterbuffer)
+{
+ GstRtpJitterBufferPrivate *priv;
+
+ priv = jitterbuffer->priv;
+
+ GST_DEBUG_OBJECT (jitterbuffer, "removing RTCP sink pad");
+
+ gst_pad_set_active (priv->rtcpsinkpad, FALSE);
+
+ gst_element_remove_pad (GST_ELEMENT_CAST (jitterbuffer), priv->rtcpsinkpad);
+ priv->rtcpsinkpad = NULL;
+}
+
+static GstPad *
+gst_rtp_jitter_buffer_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * filter)
+{
+ GstRtpJitterBuffer *jitterbuffer;
+ GstElementClass *klass;
+ GstPad *result;
+ GstRtpJitterBufferPrivate *priv;
+
+ g_return_val_if_fail (templ != NULL, NULL);
+ g_return_val_if_fail (GST_IS_RTP_JITTER_BUFFER (element), NULL);
+
+ jitterbuffer = GST_RTP_JITTER_BUFFER_CAST (element);
+ priv = jitterbuffer->priv;
+ klass = GST_ELEMENT_GET_CLASS (element);
+
+ GST_DEBUG_OBJECT (element, "requesting pad %s", GST_STR_NULL (name));
+
+ /* figure out the template */
+ if (templ == gst_element_class_get_pad_template (klass, "sink_rtcp")) {
+ if (priv->rtcpsinkpad != NULL)
+ goto exists;
+
+ result = create_rtcp_sink (jitterbuffer);
+ } else
+ goto wrong_template;
+
+ return result;
+
+ /* ERRORS */
+wrong_template:
+ {
+ g_warning ("rtpjitterbuffer: this is not our template");
+ return NULL;
+ }
+exists:
+ {
+ g_warning ("rtpjitterbuffer: pad already requested");
+ return NULL;
+ }
+}
+
+static void
+gst_rtp_jitter_buffer_release_pad (GstElement * element, GstPad * pad)
+{
+ GstRtpJitterBuffer *jitterbuffer;
+ GstRtpJitterBufferPrivate *priv;
+
+ g_return_if_fail (GST_IS_RTP_JITTER_BUFFER (element));
+ g_return_if_fail (GST_IS_PAD (pad));
+
+ jitterbuffer = GST_RTP_JITTER_BUFFER_CAST (element);
+ priv = jitterbuffer->priv;
+
+ GST_DEBUG_OBJECT (element, "releasing pad %s:%s", GST_DEBUG_PAD_NAME (pad));
+
+ if (priv->rtcpsinkpad == pad) {
+ remove_rtcp_sink (jitterbuffer);
+ } else
+ goto wrong_pad;
+
+ return;
+
+ /* ERRORS */
+wrong_pad:
+ {
+ g_warning ("gstjitterbuffer: asked to release an unknown pad");
+ return;
+ }
+}
+
+static GstClock *
+gst_rtp_jitter_buffer_provide_clock (GstElement * element)
+{
+ return gst_system_clock_obtain ();
+}
+
+static gboolean
+gst_rtp_jitter_buffer_set_clock (GstElement * element, GstClock * clock)
+{
+ GstRtpJitterBuffer *jitterbuffer = GST_RTP_JITTER_BUFFER (element);
+
+ rtp_jitter_buffer_set_pipeline_clock (jitterbuffer->priv->jbuf, clock);
+
+ return GST_ELEMENT_CLASS (parent_class)->set_clock (element, clock);
+}
+
+static void
+gst_rtp_jitter_buffer_clear_pt_map (GstRtpJitterBuffer * jitterbuffer)
+{
+ GstRtpJitterBufferPrivate *priv;
+
+ priv = jitterbuffer->priv;
+
+ /* this will trigger a new pt-map request signal, FIXME, do something better. */
+
+ JBUF_LOCK (priv);
+ priv->clock_rate = -1;
+ /* do not clear current content, but refresh state for new arrival */
+ GST_DEBUG_OBJECT (jitterbuffer, "reset jitterbuffer");
+ rtp_jitter_buffer_reset_skew (priv->jbuf);
+ JBUF_UNLOCK (priv);
+}
+
+static GstClockTime
+gst_rtp_jitter_buffer_set_active (GstRtpJitterBuffer * jbuf, gboolean active,
+ guint64 offset)
+{
+ GstRtpJitterBufferPrivate *priv;
+ GstClockTime last_out;
+ RTPJitterBufferItem *item;
+
+ priv = jbuf->priv;
+
+ JBUF_LOCK (priv);
+ GST_DEBUG_OBJECT (jbuf, "setting active %d with offset %" GST_TIME_FORMAT,
+ active, GST_TIME_ARGS (offset));
+
+ if (active != priv->active) {
+ /* add the amount of time spent in paused to the output offset. All
+ * outgoing buffers will have this offset applied to their timestamps in
+ * order to make them arrive in time in the sink. */
+ priv->out_offset = offset;
+ GST_DEBUG_OBJECT (jbuf, "out offset %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (priv->out_offset));
+ priv->active = active;
+ JBUF_SIGNAL_EVENT (priv);
+ }
+ if (!active) {
+ rtp_jitter_buffer_set_buffering (priv->jbuf, TRUE);
+ }
+ if ((item = rtp_jitter_buffer_peek (priv->jbuf))) {
+ /* head buffer timestamp and offset gives our output time */
+ last_out = item->pts + priv->ts_offset;
+ } else {
+ /* use last known time when the buffer is empty */
+ last_out = priv->last_out_time;
+ }
+ JBUF_UNLOCK (priv);
+
+ return last_out;
+}
+
+static GstCaps *
+gst_rtp_jitter_buffer_getcaps (GstPad * pad, GstCaps * filter)
+{
+ GstRtpJitterBuffer *jitterbuffer;
+ GstRtpJitterBufferPrivate *priv;
+ GstPad *other;
+ GstCaps *caps;
+ GstCaps *templ;
+
+ jitterbuffer = GST_RTP_JITTER_BUFFER (gst_pad_get_parent (pad));
+ priv = jitterbuffer->priv;
+
+ other = (pad == priv->srcpad ? priv->sinkpad : priv->srcpad);
+
+ caps = gst_pad_peer_query_caps (other, filter);
+
+ templ = gst_pad_get_pad_template_caps (pad);
+ if (caps == NULL) {
+ GST_DEBUG_OBJECT (jitterbuffer, "use template");
+ caps = templ;
+ } else {
+ GstCaps *intersect;
+
+ GST_DEBUG_OBJECT (jitterbuffer, "intersect with template");
+
+ intersect = gst_caps_intersect (caps, templ);
+ gst_caps_unref (caps);
+ gst_caps_unref (templ);
+
+ caps = intersect;
+ }
+ gst_object_unref (jitterbuffer);
+
+ return caps;
+}
+
+/*
+ * Must be called with JBUF_LOCK held
+ */
+
+static gboolean
+gst_jitter_buffer_sink_parse_caps (GstRtpJitterBuffer * jitterbuffer,
+ GstCaps * caps, gint pt)
+{
+ GstRtpJitterBufferPrivate *priv;
+ GstStructure *caps_struct;
+ guint val;
+ gint payload = -1;
+ GstClockTime tval;
+ const gchar *ts_refclk, *mediaclk;
+
+ priv = jitterbuffer->priv;
+
+ /* first parse the caps */
+ caps_struct = gst_caps_get_structure (caps, 0);
+
+ GST_DEBUG_OBJECT (jitterbuffer, "got caps %" GST_PTR_FORMAT, caps);
+
+ if (gst_structure_get_int (caps_struct, "payload", &payload) && pt != -1
+ && payload != pt) {
+ GST_ERROR_OBJECT (jitterbuffer,
+ "Got caps with wrong payload type (got %d, expected %d)", pt, payload);
+ return FALSE;
+ }
+
+ if (payload != -1) {
+ GST_DEBUG_OBJECT (jitterbuffer, "Got payload type %d", payload);
+ priv->last_pt = payload;
+ }
+
+ /* we need a clock-rate to convert the rtp timestamps to GStreamer time and to
+ * measure the amount of data in the buffer */
+ if (!gst_structure_get_int (caps_struct, "clock-rate", &priv->clock_rate))
+ goto error;
+
+ if (priv->clock_rate <= 0)
+ goto wrong_rate;
+
+ GST_DEBUG_OBJECT (jitterbuffer, "got clock-rate %d", priv->clock_rate);
+
+ rtp_jitter_buffer_set_clock_rate (priv->jbuf, priv->clock_rate);
+
+ gst_rtp_packet_rate_ctx_reset (&priv->packet_rate_ctx, priv->clock_rate);
+
+ /* The clock base is the RTP timestamp corrsponding to the npt-start value. We
+ * can use this to track the amount of time elapsed on the sender. */
+ if (gst_structure_get_uint (caps_struct, "clock-base", &val))
+ priv->clock_base = val;
+ else
+ priv->clock_base = -1;
+
+ priv->ext_timestamp = priv->clock_base;
+
+ GST_DEBUG_OBJECT (jitterbuffer, "got clock-base %" G_GINT64_FORMAT,
+ priv->clock_base);
+
+ if (gst_structure_get_uint (caps_struct, "seqnum-base", &val)) {
+ /* first expected seqnum, only update when we didn't have a previous base. */
+ if (priv->next_in_seqnum == -1)
+ priv->next_in_seqnum = val;
+ if (priv->next_seqnum == -1) {
+ priv->next_seqnum = val;
+ JBUF_SIGNAL_EVENT (priv);
+ }
+ priv->seqnum_base = val;
+ } else {
+ priv->seqnum_base = -1;
+ }
+
+ GST_DEBUG_OBJECT (jitterbuffer, "got seqnum-base %d", priv->next_in_seqnum);
+
+ /* the start and stop times. The seqnum-base corresponds to the start time. We
+ * will keep track of the seqnums on the output and when we reach the one
+ * corresponding to npt-stop, we emit the npt-stop-reached signal */
+ if (gst_structure_get_clock_time (caps_struct, "npt-start", &tval))
+ priv->npt_start = tval;
+ else
+ priv->npt_start = 0;
+
+ if (gst_structure_get_clock_time (caps_struct, "npt-stop", &tval))
+ priv->npt_stop = tval;
+ else
+ priv->npt_stop = -1;
+
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "npt start/stop: %" GST_TIME_FORMAT "-%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (priv->npt_start), GST_TIME_ARGS (priv->npt_stop));
+
+ if ((ts_refclk = gst_structure_get_string (caps_struct, "a-ts-refclk"))) {
+ GstClock *clock = NULL;
+ guint64 clock_offset = -1;
+
+ GST_DEBUG_OBJECT (jitterbuffer, "Have timestamp reference clock %s",
+ ts_refclk);
+
+ if (g_str_has_prefix (ts_refclk, "ntp=")) {
+ if (g_str_has_prefix (ts_refclk, "ntp=/traceable/")) {
+ GST_FIXME_OBJECT (jitterbuffer, "Can't handle traceable NTP clocks");
+ } else {
+ const gchar *host, *portstr;
+ gchar *hostname;
+ guint port;
+
+ host = ts_refclk + sizeof ("ntp=") - 1;
+ if (host[0] == '[') {
+ /* IPv6 */
+ portstr = strchr (host, ']');
+ if (portstr && portstr[1] == ':')
+ portstr = portstr + 1;
+ else
+ portstr = NULL;
+ } else {
+ portstr = strrchr (host, ':');
+ }
+
+
+ if (!portstr || sscanf (portstr, ":%u", &port) != 1)
+ port = 123;
+
+ if (portstr)
+ hostname = g_strndup (host, (portstr - host));
+ else
+ hostname = g_strdup (host);
+
+ clock = gst_ntp_clock_new (NULL, hostname, port, 0);
+ g_free (hostname);
+ }
+ } else if (g_str_has_prefix (ts_refclk, "ptp=IEEE1588-2008:")) {
+ const gchar *domainstr =
+ ts_refclk + sizeof ("ptp=IEEE1588-2008:XX-XX-XX-XX-XX-XX-XX-XX") - 1;
+ guint domain;
+
+ if (domainstr[0] != ':' || sscanf (domainstr, ":%u", &domain) != 1)
+ domain = 0;
+
+ clock = gst_ptp_clock_new (NULL, domain);
+ } else {
+ GST_FIXME_OBJECT (jitterbuffer, "Unsupported timestamp reference clock");
+ }
+
+ if ((mediaclk = gst_structure_get_string (caps_struct, "a-mediaclk"))) {
+ GST_DEBUG_OBJECT (jitterbuffer, "Got media clock %s", mediaclk);
+
+ if (!g_str_has_prefix (mediaclk, "direct=") ||
+ !g_ascii_string_to_unsigned (&mediaclk[7], 10, 0, G_MAXUINT64,
+ &clock_offset, NULL))
+ GST_FIXME_OBJECT (jitterbuffer, "Unsupported media clock");
+ if (strstr (mediaclk, "rate=") != NULL) {
+ GST_FIXME_OBJECT (jitterbuffer, "Rate property not supported");
+ clock_offset = -1;
+ }
+ }
+
+ rtp_jitter_buffer_set_media_clock (priv->jbuf, clock, clock_offset);
+ } else {
+ rtp_jitter_buffer_set_media_clock (priv->jbuf, NULL, -1);
+ }
+
+ return TRUE;
+
+ /* ERRORS */
+error:
+ {
+ GST_DEBUG_OBJECT (jitterbuffer, "No clock-rate in caps!");
+ return FALSE;
+ }
+wrong_rate:
+ {
+ GST_DEBUG_OBJECT (jitterbuffer, "Invalid clock-rate %d", priv->clock_rate);
+ return FALSE;
+ }
+}
+
+static void
+gst_rtp_jitter_buffer_flush_start (GstRtpJitterBuffer * jitterbuffer)
+{
+ GstRtpJitterBufferPrivate *priv;
+
+ priv = jitterbuffer->priv;
+
+ JBUF_LOCK (priv);
+ /* mark ourselves as flushing */
+ priv->srcresult = GST_FLOW_FLUSHING;
+ GST_DEBUG_OBJECT (jitterbuffer, "Disabling pop on queue");
+ /* this unblocks any waiting pops on the src pad task */
+ JBUF_SIGNAL_EVENT (priv);
+ JBUF_SIGNAL_QUERY (priv, FALSE);
+ JBUF_SIGNAL_QUEUE (priv);
+ JBUF_UNLOCK (priv);
+}
+
+static void
+gst_rtp_jitter_buffer_flush_stop (GstRtpJitterBuffer * jitterbuffer)
+{
+ GstRtpJitterBufferPrivate *priv;
+
+ priv = jitterbuffer->priv;
+
+ JBUF_LOCK (priv);
+ GST_DEBUG_OBJECT (jitterbuffer, "Enabling pop on queue");
+ /* Mark as non flushing */
+ priv->srcresult = GST_FLOW_OK;
+ gst_segment_init (&priv->segment, GST_FORMAT_TIME);
+ priv->last_popped_seqnum = -1;
+ priv->last_out_time = GST_CLOCK_TIME_NONE;
+ priv->next_seqnum = -1;
+ priv->seqnum_base = -1;
+ priv->ips_rtptime = -1;
+ priv->ips_pts = GST_CLOCK_TIME_NONE;
+ priv->packet_spacing = 0;
+ priv->next_in_seqnum = -1;
+ priv->clock_rate = -1;
+ priv->last_pt = -1;
+ priv->eos = FALSE;
+ priv->estimated_eos = -1;
+ priv->last_elapsed = 0;
+ priv->ext_timestamp = -1;
+ priv->avg_jitter = 0;
+ priv->last_dts = -1;
+ priv->last_rtptime = -1;
+ priv->last_in_pts = 0;
+ priv->equidistant = 0;
+ priv->segment_seqnum = GST_SEQNUM_INVALID;
+ priv->last_drop_msg_timestamp = GST_CLOCK_TIME_NONE;
+ priv->num_too_late = 0;
+ priv->num_drop_on_latency = 0;
+ GST_DEBUG_OBJECT (jitterbuffer, "flush and reset jitterbuffer");
+ rtp_jitter_buffer_flush (priv->jbuf, NULL, NULL);
+ rtp_jitter_buffer_disable_buffering (priv->jbuf, FALSE);
+ rtp_jitter_buffer_reset_skew (priv->jbuf);
+ rtp_timer_queue_remove_all (priv->timers);
+ g_queue_foreach (&priv->gap_packets, (GFunc) gst_buffer_unref, NULL);
+ g_queue_clear (&priv->gap_packets);
+ JBUF_UNLOCK (priv);
+}
+
+static gboolean
+gst_rtp_jitter_buffer_src_activate_mode (GstPad * pad, GstObject * parent,
+ GstPadMode mode, gboolean active)
+{
+ gboolean result;
+ GstRtpJitterBuffer *jitterbuffer = NULL;
+
+ jitterbuffer = GST_RTP_JITTER_BUFFER (parent);
+
+ switch (mode) {
+ case GST_PAD_MODE_PUSH:
+ if (active) {
+ /* allow data processing */
+ gst_rtp_jitter_buffer_flush_stop (jitterbuffer);
+
+ /* start pushing out buffers */
+ GST_DEBUG_OBJECT (jitterbuffer, "Starting task on srcpad");
+ result = gst_pad_start_task (jitterbuffer->priv->srcpad,
+ (GstTaskFunction) gst_rtp_jitter_buffer_loop, jitterbuffer, NULL);
+ } else {
+ /* make sure all data processing stops ASAP */
+ gst_rtp_jitter_buffer_flush_start (jitterbuffer);
+
+ /* NOTE this will hardlock if the state change is called from the src pad
+ * task thread because we will _join() the thread. */
+ GST_DEBUG_OBJECT (jitterbuffer, "Stopping task on srcpad");
+ result = gst_pad_stop_task (pad);
+ }
+ break;
+ default:
+ result = FALSE;
+ break;
+ }
+ return result;
+}
+
+static GstStateChangeReturn
+gst_rtp_jitter_buffer_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstRtpJitterBuffer *jitterbuffer;
+ GstRtpJitterBufferPrivate *priv;
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+
+ jitterbuffer = GST_RTP_JITTER_BUFFER (element);
+ priv = jitterbuffer->priv;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ JBUF_LOCK (priv);
+ /* reset negotiated values */
+ priv->clock_rate = -1;
+ priv->clock_base = -1;
+ priv->peer_latency = 0;
+ priv->last_pt = -1;
+ /* block until we go to PLAYING */
+ priv->blocked = TRUE;
+ priv->timer_running = TRUE;
+ priv->srcresult = GST_FLOW_OK;
+ priv->timer_thread =
+ g_thread_new ("timer", (GThreadFunc) wait_next_timeout, jitterbuffer);
+ JBUF_UNLOCK (priv);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ JBUF_LOCK (priv);
+ /* unblock to allow streaming in PLAYING */
+ priv->blocked = FALSE;
+ JBUF_SIGNAL_EVENT (priv);
+ JBUF_SIGNAL_TIMER (priv);
+ JBUF_UNLOCK (priv);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ /* we are a live element because we sync to the clock, which we can only
+ * do in the PLAYING state */
+ if (ret != GST_STATE_CHANGE_FAILURE)
+ ret = GST_STATE_CHANGE_NO_PREROLL;
+ break;
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ JBUF_LOCK (priv);
+ /* block to stop streaming when PAUSED */
+ priv->blocked = TRUE;
+ unschedule_current_timer (jitterbuffer);
+ JBUF_UNLOCK (priv);
+ if (ret != GST_STATE_CHANGE_FAILURE)
+ ret = GST_STATE_CHANGE_NO_PREROLL;
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ JBUF_LOCK (priv);
+ gst_buffer_replace (&priv->last_sr, NULL);
+ priv->timer_running = FALSE;
+ priv->srcresult = GST_FLOW_FLUSHING;
+ unschedule_current_timer (jitterbuffer);
+ JBUF_SIGNAL_TIMER (priv);
+ JBUF_SIGNAL_QUERY (priv, FALSE);
+ JBUF_SIGNAL_QUEUE (priv);
+ JBUF_UNLOCK (priv);
+ g_thread_join (priv->timer_thread);
+ priv->timer_thread = NULL;
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_rtp_jitter_buffer_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ gboolean ret = TRUE;
+ GstRtpJitterBuffer *jitterbuffer;
+ GstRtpJitterBufferPrivate *priv;
+
+ jitterbuffer = GST_RTP_JITTER_BUFFER_CAST (parent);
+ priv = jitterbuffer->priv;
+
+ GST_DEBUG_OBJECT (jitterbuffer, "received %s", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_LATENCY:
+ {
+ GstClockTime latency;
+
+ gst_event_parse_latency (event, &latency);
+
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "configuring latency of %" GST_TIME_FORMAT, GST_TIME_ARGS (latency));
+
+ JBUF_LOCK (priv);
+ /* adjust the overall buffer delay to the total pipeline latency in
+ * buffering mode because if downstream consumes too fast (because of
+ * large latency or queues, we would start rebuffering again. */
+ if (rtp_jitter_buffer_get_mode (priv->jbuf) ==
+ RTP_JITTER_BUFFER_MODE_BUFFER) {
+ rtp_jitter_buffer_set_delay (priv->jbuf, latency);
+ }
+ JBUF_UNLOCK (priv);
+
+ ret = gst_pad_push_event (priv->sinkpad, event);
+ break;
+ }
+ default:
+ ret = gst_pad_push_event (priv->sinkpad, event);
+ break;
+ }
+
+ return ret;
+}
+
+/* handles and stores the event in the jitterbuffer, must be called with
+ * LOCK */
+static gboolean
+queue_event (GstRtpJitterBuffer * jitterbuffer, GstEvent * event)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+ gboolean head;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ gst_jitter_buffer_sink_parse_caps (jitterbuffer, caps, -1);
+ break;
+ }
+ case GST_EVENT_SEGMENT:
+ {
+ GstSegment segment;
+ gst_event_copy_segment (event, &segment);
+
+ priv->segment_seqnum = gst_event_get_seqnum (event);
+
+ /* we need time for now */
+ if (segment.format != GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (jitterbuffer, "ignoring non-TIME newsegment");
+ gst_event_unref (event);
+
+ gst_segment_init (&segment, GST_FORMAT_TIME);
+ event = gst_event_new_segment (&segment);
+ gst_event_set_seqnum (event, priv->segment_seqnum);
+ }
+
+ priv->segment = segment;
+ break;
+ }
+ case GST_EVENT_EOS:
+ priv->eos = TRUE;
+ rtp_jitter_buffer_disable_buffering (priv->jbuf, TRUE);
+ break;
+ default:
+ break;
+ }
+
+ GST_DEBUG_OBJECT (jitterbuffer, "adding event");
+ head = rtp_jitter_buffer_append_event (priv->jbuf, event);
+ if (head || priv->eos)
+ JBUF_SIGNAL_EVENT (priv);
+
+ return TRUE;
+}
+
+static gboolean
+gst_rtp_jitter_buffer_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ gboolean ret = TRUE;
+ GstRtpJitterBuffer *jitterbuffer;
+ GstRtpJitterBufferPrivate *priv;
+
+ jitterbuffer = GST_RTP_JITTER_BUFFER (parent);
+ priv = jitterbuffer->priv;
+
+ GST_DEBUG_OBJECT (jitterbuffer, "received %s", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_START:
+ ret = gst_pad_push_event (priv->srcpad, event);
+ gst_rtp_jitter_buffer_flush_start (jitterbuffer);
+ /* wait for the loop to go into PAUSED */
+ gst_pad_pause_task (priv->srcpad);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ ret = gst_pad_push_event (priv->srcpad, event);
+ ret =
+ gst_rtp_jitter_buffer_src_activate_mode (priv->srcpad, parent,
+ GST_PAD_MODE_PUSH, TRUE);
+ break;
+ default:
+ if (GST_EVENT_IS_SERIALIZED (event)) {
+ /* serialized events go in the queue */
+ JBUF_LOCK (priv);
+ if (priv->srcresult != GST_FLOW_OK) {
+ /* Errors in sticky event pushing are no problem and ignored here
+ * as they will cause more meaningful errors during data flow.
+ * For EOS events, that are not followed by data flow, we still
+ * return FALSE here though.
+ */
+ if (!GST_EVENT_IS_STICKY (event) ||
+ GST_EVENT_TYPE (event) == GST_EVENT_EOS)
+ goto out_flow_error;
+ }
+ /* refuse more events on EOS */
+ if (priv->eos)
+ goto out_eos;
+ ret = queue_event (jitterbuffer, event);
+ JBUF_UNLOCK (priv);
+ } else {
+ /* non-serialized events are forwarded downstream immediately */
+ ret = gst_pad_push_event (priv->srcpad, event);
+ }
+ break;
+ }
+ return ret;
+
+ /* ERRORS */
+out_flow_error:
+ {
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "refusing event, we have a downstream flow error: %s",
+ gst_flow_get_name (priv->srcresult));
+ JBUF_UNLOCK (priv);
+ gst_event_unref (event);
+ return FALSE;
+ }
+out_eos:
+ {
+ GST_DEBUG_OBJECT (jitterbuffer, "refusing event, we are EOS");
+ JBUF_UNLOCK (priv);
+ gst_event_unref (event);
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_rtp_jitter_buffer_sink_rtcp_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ gboolean ret = TRUE;
+ GstRtpJitterBuffer *jitterbuffer;
+
+ jitterbuffer = GST_RTP_JITTER_BUFFER (parent);
+
+ GST_DEBUG_OBJECT (jitterbuffer, "received %s", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_START:
+ gst_event_unref (event);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ gst_event_unref (event);
+ break;
+ default:
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return ret;
+}
+
+/*
+ * Must be called with JBUF_LOCK held, will release the LOCK when emitting the
+ * signal. The function returns GST_FLOW_ERROR when a parsing error happened and
+ * GST_FLOW_FLUSHING when the element is shutting down. On success
+ * GST_FLOW_OK is returned.
+ */
+static GstFlowReturn
+gst_rtp_jitter_buffer_get_clock_rate (GstRtpJitterBuffer * jitterbuffer,
+ guint8 pt)
+{
+ GValue ret = { 0 };
+ GValue args[2] = { {0}, {0} };
+ GstCaps *caps;
+ gboolean res;
+
+ g_value_init (&args[0], GST_TYPE_ELEMENT);
+ g_value_set_object (&args[0], jitterbuffer);
+ g_value_init (&args[1], G_TYPE_UINT);
+ g_value_set_uint (&args[1], pt);
+
+ g_value_init (&ret, GST_TYPE_CAPS);
+ g_value_set_boxed (&ret, NULL);
+
+ JBUF_UNLOCK (jitterbuffer->priv);
+ g_signal_emitv (args, gst_rtp_jitter_buffer_signals[SIGNAL_REQUEST_PT_MAP], 0,
+ &ret);
+ JBUF_LOCK_CHECK (jitterbuffer->priv, out_flushing);
+
+ g_value_unset (&args[0]);
+ g_value_unset (&args[1]);
+ caps = (GstCaps *) g_value_dup_boxed (&ret);
+ g_value_unset (&ret);
+ if (!caps)
+ goto no_caps;
+
+ res = gst_jitter_buffer_sink_parse_caps (jitterbuffer, caps, pt);
+ gst_caps_unref (caps);
+
+ if (G_UNLIKELY (!res))
+ goto parse_failed;
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+no_caps:
+ {
+ GST_DEBUG_OBJECT (jitterbuffer, "could not get caps");
+ return GST_FLOW_ERROR;
+ }
+out_flushing:
+ {
+ GST_DEBUG_OBJECT (jitterbuffer, "we are flushing");
+ return GST_FLOW_FLUSHING;
+ }
+parse_failed:
+ {
+ GST_DEBUG_OBJECT (jitterbuffer, "parse failed");
+ return GST_FLOW_ERROR;
+ }
+}
+
+/* call with jbuf lock held */
+static GstMessage *
+check_buffering_percent (GstRtpJitterBuffer * jitterbuffer, gint percent)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+ GstMessage *message = NULL;
+
+ if (percent == -1)
+ return NULL;
+
+ /* Post a buffering message */
+ if (priv->last_percent != percent) {
+ priv->last_percent = percent;
+ message =
+ gst_message_new_buffering (GST_OBJECT_CAST (jitterbuffer), percent);
+ gst_message_set_buffering_stats (message, GST_BUFFERING_LIVE, -1, -1, -1);
+ }
+
+ return message;
+}
+
+/* call with jbuf lock held */
+static GstMessage *
+new_drop_message (GstRtpJitterBuffer * jitterbuffer, guint seqnum,
+ GstClockTime timestamp, DropMessageReason reason)
+{
+
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+ GstMessage *drop_msg = NULL;
+ GstStructure *s;
+ GstClockTime current_time;
+ GstClockTime time_diff;
+ const gchar *reason_str;
+
+ current_time = get_current_running_time (jitterbuffer);
+ time_diff = current_time - priv->last_drop_msg_timestamp;
+
+ if (reason == REASON_TOO_LATE) {
+ priv->num_too_late++;
+ reason_str = "too-late";
+ } else if (reason == REASON_DROP_ON_LATENCY) {
+ priv->num_drop_on_latency++;
+ reason_str = "drop-on-latency";
+ } else {
+ GST_WARNING_OBJECT (jitterbuffer, "Invalid reason for drop message");
+ return drop_msg;
+ }
+
+ /* Only create new drop_msg if time since last drop_msg is larger that
+ * that the set interval, or if it is the first drop message posted */
+ if ((time_diff >= priv->drop_messages_interval_ms * GST_MSECOND) ||
+ (priv->last_drop_msg_timestamp == GST_CLOCK_TIME_NONE)) {
+
+ s = gst_structure_new ("drop-msg",
+ "seqnum", G_TYPE_UINT, seqnum,
+ "timestamp", GST_TYPE_CLOCK_TIME, timestamp,
+ "reason", G_TYPE_STRING, reason_str,
+ "num-too-late", G_TYPE_UINT, priv->num_too_late,
+ "num-drop-on-latency", G_TYPE_UINT, priv->num_drop_on_latency, NULL);
+
+ priv->last_drop_msg_timestamp = current_time;
+ priv->num_too_late = 0;
+ priv->num_drop_on_latency = 0;
+ drop_msg = gst_message_new_element (GST_OBJECT (jitterbuffer), s);
+ }
+ return drop_msg;
+}
+
+
+static inline GstClockTimeDiff
+timeout_offset (GstRtpJitterBuffer * jitterbuffer)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+ return priv->ts_offset + priv->out_offset + priv->latency_ns;
+}
+
+static inline GstClockTime
+get_pts_timeout (const RtpTimer * timer)
+{
+ if (timer->timeout == -1)
+ return -1;
+
+ return timer->timeout - timer->offset;
+}
+
+static inline gboolean
+safe_add (guint64 * res, guint64 val, gint64 offset)
+{
+ if (val <= G_MAXINT64) {
+ gint64 tmp = (gint64) val + offset;
+ if (tmp >= 0) {
+ *res = tmp;
+ return TRUE;
+ }
+ return FALSE;
+ }
+ /* From here, val > G_MAXINT64 */
+
+ /* Negative value */
+ if (offset < 0 && val < -offset)
+ return FALSE;
+
+ *res = val + offset;
+ return TRUE;
+}
+
+static void
+update_timer_offsets (GstRtpJitterBuffer * jitterbuffer)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+ RtpTimer *test = rtp_timer_queue_peek_earliest (priv->timers);
+ GstClockTimeDiff new_offset = timeout_offset (jitterbuffer);
+
+ while (test) {
+ if (test->type != RTP_TIMER_EXPECTED) {
+ GstClockTime pts = get_pts_timeout (test);
+ if (safe_add (&test->timeout, pts, new_offset)) {
+ test->offset = new_offset;
+ } else {
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "Invalidating timeout (pts lower than new offset)");
+ test->timeout = GST_CLOCK_TIME_NONE;
+ test->offset = 0;
+ }
+ /* as we apply the offset on all timers, the order of timers won't
+ * change and we can skip updating the timer queue */
+ }
+
+ test = rtp_timer_get_next (test);
+ }
+}
+
+static void
+update_offset (GstRtpJitterBuffer * jitterbuffer)
+{
+ GstRtpJitterBufferPrivate *priv;
+
+ priv = jitterbuffer->priv;
+
+ if (priv->ts_offset_remainder != 0) {
+ GST_DEBUG ("adjustment %" G_GUINT64_FORMAT " remain %" G_GINT64_FORMAT
+ " off %" G_GINT64_FORMAT, priv->max_ts_offset_adjustment,
+ priv->ts_offset_remainder, priv->ts_offset);
+ if (ABS (priv->ts_offset_remainder) > priv->max_ts_offset_adjustment) {
+ if (priv->ts_offset_remainder > 0) {
+ priv->ts_offset += priv->max_ts_offset_adjustment;
+ priv->ts_offset_remainder -= priv->max_ts_offset_adjustment;
+ } else {
+ priv->ts_offset -= priv->max_ts_offset_adjustment;
+ priv->ts_offset_remainder += priv->max_ts_offset_adjustment;
+ }
+ } else {
+ priv->ts_offset += priv->ts_offset_remainder;
+ priv->ts_offset_remainder = 0;
+ }
+
+ update_timer_offsets (jitterbuffer);
+ }
+}
+
+static GstClockTime
+apply_offset (GstRtpJitterBuffer * jitterbuffer, GstClockTime timestamp)
+{
+ GstRtpJitterBufferPrivate *priv;
+
+ priv = jitterbuffer->priv;
+
+ if (timestamp == -1)
+ return -1;
+
+ /* apply the timestamp offset, this is used for inter stream sync */
+ if (!safe_add (&timestamp, timestamp, priv->ts_offset))
+ timestamp = 0;
+ /* add the offset, this is used when buffering */
+ timestamp += priv->out_offset;
+
+ return timestamp;
+}
+
+static void
+unschedule_current_timer (GstRtpJitterBuffer * jitterbuffer)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+
+ if (priv->clock_id) {
+ GST_DEBUG_OBJECT (jitterbuffer, "unschedule current timer");
+ gst_clock_id_unschedule (priv->clock_id);
+ priv->clock_id = NULL;
+ }
+}
+
+static void
+update_current_timer (GstRtpJitterBuffer * jitterbuffer)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+ RtpTimer *timer;
+
+ timer = rtp_timer_queue_peek_earliest (priv->timers);
+
+ /* we never need to wakeup the timer thread when there is no more timers, if
+ * it was waiting on a clock id, it will simply do later and then wait on
+ * the conditions */
+ if (timer == NULL) {
+ GST_DEBUG_OBJECT (jitterbuffer, "no more timers");
+ return;
+ }
+
+ GST_DEBUG_OBJECT (jitterbuffer, "waiting till %" GST_TIME_FORMAT
+ " and earliest timeout is at %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (priv->timer_timeout), GST_TIME_ARGS (timer->timeout));
+
+ /* wakeup the timer thread in case the timer queue was empty */
+ JBUF_SIGNAL_TIMER (priv);
+
+ /* no need to wait if the current wait is earlier or later */
+ if (timer->timeout != -1 && timer->timeout >= priv->timer_timeout)
+ return;
+
+ /* for other cases, force a reschedule of the timer thread */
+ unschedule_current_timer (jitterbuffer);
+}
+
+/* get the extra delay to wait before sending RTX */
+static GstClockTime
+get_rtx_delay (GstRtpJitterBufferPrivate * priv)
+{
+ GstClockTime delay;
+
+ if (priv->rtx_delay == -1) {
+ /* the maximum delay for any RTX-packet is given by the latency, since
+ anything after that is considered lost. For various calulcations,
+ (given large avg_jitter and/or packet_spacing), the resulting delay
+ could exceed the configured latency, ending up issuing an RTX-request
+ that would never arrive in time. To help this we cap the delay
+ for any RTX with the last possible time it could still arrive in time. */
+ GstClockTime delay_max = (priv->latency_ns > priv->avg_rtx_rtt) ?
+ priv->latency_ns - priv->avg_rtx_rtt : priv->latency_ns;
+
+ if (priv->avg_jitter == 0 && priv->packet_spacing == 0) {
+ delay = DEFAULT_AUTO_RTX_DELAY;
+ } else {
+ /* jitter is in nanoseconds, maximum of 2x jitter and half the
+ * packet spacing is a good margin */
+ delay = MAX (priv->avg_jitter * 2, priv->packet_spacing / 2);
+ }
+
+ delay = MIN (delay_max, delay);
+ } else {
+ delay = priv->rtx_delay * GST_MSECOND;
+ }
+ if (priv->rtx_min_delay > 0)
+ delay = MAX (delay, priv->rtx_min_delay * GST_MSECOND);
+
+ return delay;
+}
+
+/* we just received a packet with seqnum and dts.
+ *
+ * First check for old seqnum that we are still expecting. If the gap with the
+ * current seqnum is too big, unschedule the timeouts.
+ *
+ * If we have a valid packet spacing estimate we can set a timer for when we
+ * should receive the next packet.
+ * If we don't have a valid estimate, we remove any timer we might have
+ * had for this packet.
+ */
+static void
+update_rtx_timers (GstRtpJitterBuffer * jitterbuffer, guint16 seqnum,
+ GstClockTime dts, GstClockTime pts, gboolean do_next_seqnum,
+ gboolean is_rtx, RtpTimer * timer)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+ gboolean is_stats_timer = FALSE;
+
+ if (timer && rtp_timer_queue_find (priv->rtx_stats_timers, timer->seqnum))
+ is_stats_timer = TRUE;
+
+ /* schedule immediatly expected timer which exceed the maximum RTX delay
+ * reorder configuration */
+ if (priv->do_retransmission && priv->rtx_delay_reorder > 0) {
+ RtpTimer *test = rtp_timer_queue_peek_earliest (priv->timers);
+ while (test) {
+ gint gap;
+
+ /* filter the timer type to speed up this loop */
+ if (test->type != RTP_TIMER_EXPECTED) {
+ test = rtp_timer_get_next (test);
+ continue;
+ }
+
+ gap = gst_rtp_buffer_compare_seqnum (test->seqnum, seqnum);
+
+ GST_DEBUG_OBJECT (jitterbuffer, "%d, #%d<->#%d gap %d",
+ test->type, test->seqnum, seqnum, gap);
+
+ /* if this expected packet have a smaller gap then the configured one,
+ * then earlier timer are not expected to have bigger gap as the timer
+ * queue is ordered */
+ if (gap <= priv->rtx_delay_reorder)
+ break;
+
+ /* max gap, we exceeded the max reorder distance and we don't expect the
+ * missing packet to be this reordered */
+ if (test->num_rtx_retry == 0 && test->type == RTP_TIMER_EXPECTED)
+ rtp_timer_queue_update_timer (priv->timers, test, test->seqnum,
+ -1, 0, 0, FALSE);
+
+ test = rtp_timer_get_next (test);
+ }
+ }
+
+ do_next_seqnum = do_next_seqnum && priv->packet_spacing > 0
+ && priv->rtx_next_seqnum;
+
+ if (timer && timer->type != RTP_TIMER_DEADLINE) {
+ if (timer->num_rtx_retry > 0) {
+ if (is_rtx) {
+ update_rtx_stats (jitterbuffer, timer, dts, TRUE);
+ /* don't try to estimate the next seqnum because this is a retransmitted
+ * packet and it probably did not arrive with the expected packet
+ * spacing. */
+ do_next_seqnum = FALSE;
+ }
+
+ if (!is_stats_timer && (!is_rtx || timer->num_rtx_retry > 1)) {
+ RtpTimer *stats_timer = rtp_timer_dup (timer);
+ /* Store timer in order to record stats when/if the retransmitted
+ * packet arrives. We should also store timer information if we've
+ * requested retransmission more than once since we may receive
+ * several retransmitted packets. For accuracy we should update the
+ * stats also when the redundant retransmitted packets arrives. */
+ stats_timer->timeout = pts + priv->rtx_stats_timeout * GST_MSECOND;
+ stats_timer->type = RTP_TIMER_EXPECTED;
+ rtp_timer_queue_insert (priv->rtx_stats_timers, stats_timer);
+ }
+ }
+ }
+
+ if (do_next_seqnum && pts != GST_CLOCK_TIME_NONE) {
+ GstClockTime next_expected_pts, delay;
+
+ /* calculate expected arrival time of the next seqnum */
+ next_expected_pts = pts + priv->packet_spacing;
+
+ delay = get_rtx_delay (priv);
+
+ /* and update/install timer for next seqnum */
+ GST_DEBUG_OBJECT (jitterbuffer, "Add RTX timer #%d, next_expected_pts %"
+ GST_TIME_FORMAT ", delay %" GST_TIME_FORMAT ", est packet duration %"
+ GST_TIME_FORMAT ", jitter %" GST_TIME_FORMAT, priv->next_in_seqnum,
+ GST_TIME_ARGS (next_expected_pts), GST_TIME_ARGS (delay),
+ GST_TIME_ARGS (priv->packet_spacing), GST_TIME_ARGS (priv->avg_jitter));
+
+ if (timer && !is_stats_timer) {
+ timer->type = RTP_TIMER_EXPECTED;
+ rtp_timer_queue_update_timer (priv->timers, timer, priv->next_in_seqnum,
+ next_expected_pts, delay, 0, TRUE);
+ } else {
+ rtp_timer_queue_set_expected (priv->timers, priv->next_in_seqnum,
+ next_expected_pts, delay, priv->packet_spacing);
+ }
+ } else if (timer && timer->type != RTP_TIMER_DEADLINE && !is_stats_timer) {
+ /* if we had a timer, remove it, we don't know when to expect the next
+ * packet. */
+ rtp_timer_queue_unschedule (priv->timers, timer);
+ rtp_timer_free (timer);
+ }
+}
+
+static void
+calculate_packet_spacing (GstRtpJitterBuffer * jitterbuffer, guint32 rtptime,
+ GstClockTime pts)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+
+ /* we need consecutive seqnums with a different
+ * rtptime to estimate the packet spacing. */
+ if (priv->ips_rtptime != rtptime) {
+ /* rtptime changed, check pts diff */
+ if (priv->ips_pts != -1 && pts != -1 && pts > priv->ips_pts) {
+ GstClockTime new_packet_spacing = pts - priv->ips_pts;
+ GstClockTime old_packet_spacing = priv->packet_spacing;
+
+ /* Biased towards bigger packet spacings to prevent
+ * too many unneeded retransmission requests for next
+ * packets that just arrive a little later than we would
+ * expect */
+ if (old_packet_spacing > new_packet_spacing)
+ priv->packet_spacing =
+ (new_packet_spacing + 3 * old_packet_spacing) / 4;
+ else if (old_packet_spacing > 0)
+ priv->packet_spacing =
+ (3 * new_packet_spacing + old_packet_spacing) / 4;
+ else
+ priv->packet_spacing = new_packet_spacing;
+
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "new packet spacing %" GST_TIME_FORMAT
+ " old packet spacing %" GST_TIME_FORMAT
+ " combined to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (new_packet_spacing),
+ GST_TIME_ARGS (old_packet_spacing),
+ GST_TIME_ARGS (priv->packet_spacing));
+ }
+ priv->ips_rtptime = rtptime;
+ priv->ips_pts = pts;
+ }
+}
+
+static void
+insert_lost_event (GstRtpJitterBuffer * jitterbuffer,
+ guint16 seqnum, guint lost_packets, GstClockTime timestamp,
+ GstClockTime duration, guint num_rtx_retry)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+ GstEvent *event = NULL;
+ guint next_in_seqnum;
+
+ /* we had a gap and thus we lost some packets. Create an event for this. */
+ if (lost_packets > 1)
+ GST_DEBUG_OBJECT (jitterbuffer, "Packets #%d -> #%d lost", seqnum,
+ seqnum + lost_packets - 1);
+ else
+ GST_DEBUG_OBJECT (jitterbuffer, "Packet #%d lost", seqnum);
+
+ priv->num_lost += lost_packets;
+ priv->num_rtx_failed += num_rtx_retry;
+
+ next_in_seqnum = (seqnum + lost_packets) & 0xffff;
+
+ /* we now only accept seqnum bigger than this */
+ if (gst_rtp_buffer_compare_seqnum (priv->next_in_seqnum, next_in_seqnum) > 0) {
+ priv->next_in_seqnum = next_in_seqnum;
+ priv->last_in_pts = timestamp;
+ }
+
+ /* Avoid creating events if we don't need it. Note that we still need to create
+ * the lost *ITEM* since it will be used to notify the outgoing thread of
+ * lost items (so that we can set discont flags and such) */
+ if (priv->do_lost) {
+ /* create packet lost event */
+ if (duration == GST_CLOCK_TIME_NONE && priv->packet_spacing > 0)
+ duration = priv->packet_spacing;
+ event = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
+ gst_structure_new ("GstRTPPacketLost",
+ "seqnum", G_TYPE_UINT, (guint) seqnum,
+ "timestamp", G_TYPE_UINT64, timestamp,
+ "duration", G_TYPE_UINT64, duration,
+ "retry", G_TYPE_UINT, num_rtx_retry, NULL));
+ }
+ if (rtp_jitter_buffer_append_lost_event (priv->jbuf,
+ event, seqnum, lost_packets))
+ JBUF_SIGNAL_EVENT (priv);
+}
+
+static void
+gst_rtp_jitter_buffer_handle_missing_packets (GstRtpJitterBuffer * jitterbuffer,
+ guint32 missing_seqnum, guint16 current_seqnum, GstClockTime pts, gint gap,
+ GstClockTime now)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+ GstClockTime est_pkt_duration, est_pts;
+ gboolean equidistant = priv->equidistant > 0;
+ GstClockTime last_in_pts = priv->last_in_pts;
+ GstClockTimeDiff offset = timeout_offset (jitterbuffer);
+ GstClockTime rtx_delay = get_rtx_delay (priv);
+ guint16 remaining_gap;
+ GstClockTimeDiff remaining_duration;
+ GstClockTimeDiff remainder_duration;
+ guint i;
+
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "Missing packets: (#%u->#%u), gap %d, pts %" GST_TIME_FORMAT
+ ", last-pts %" GST_TIME_FORMAT,
+ missing_seqnum, current_seqnum - 1, gap, GST_TIME_ARGS (pts),
+ GST_TIME_ARGS (last_in_pts));
+
+ if (equidistant) {
+ GstClockTimeDiff total_duration;
+ gboolean too_late;
+
+ /* the total duration spanned by the missing packets */
+ total_duration = MAX (0, GST_CLOCK_DIFF (last_in_pts, pts));
+
+ /* interpolate between the current time and the last time based on
+ * number of packets we are missing, this is the estimated duration
+ * for the missing packet based on equidistant packet spacing. */
+ est_pkt_duration = total_duration / (gap + 1);
+
+ /* if we have valid packet-spacing, use that */
+ if (total_duration > 0 && priv->packet_spacing) {
+ est_pkt_duration = priv->packet_spacing;
+ }
+
+ est_pts = last_in_pts + est_pkt_duration;
+ GST_DEBUG_OBJECT (jitterbuffer, "estimated missing packet pts %"
+ GST_TIME_FORMAT " and duration %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (est_pts), GST_TIME_ARGS (est_pkt_duration));
+
+ /* a packet is considered too late if our estimated pts plus all
+ applicable offsets are in the past */
+ too_late = now > (est_pts + offset);
+
+ /* Here we optimistically try to save any packets that could potentially
+ be saved by making sure we create lost/rtx timers for them, and for
+ the rest that could not possibly be saved, we create a "multi-lost"
+ event immediately containing the missing duration and sequence numbers */
+ if (too_late) {
+ guint lost_packets;
+ GstClockTime lost_duration;
+ GstClockTimeDiff gap_time;
+ guint max_saveable_packets = 0;
+ GstClockTime max_saveable_duration;
+ GstClockTime saveable_duration;
+
+ /* gap time represents the total duration of all missing packets */
+ gap_time = MAX (0, GST_CLOCK_DIFF (est_pts, pts));
+
+ /* based on the estimated packet duration, we
+ can figure out how many packets we could possibly save */
+ if (est_pkt_duration)
+ max_saveable_packets = offset / est_pkt_duration;
+
+ /* and say that the amount of lost packet is the sequence-number
+ gap minus these saveable packets, but at least 1 */
+ lost_packets = MAX (1, (gint) gap - (gint) max_saveable_packets);
+
+ /* now we know how many packets we can possibly save */
+ max_saveable_packets = gap - lost_packets;
+
+ /* we convert that to time */
+ max_saveable_duration = max_saveable_packets * est_pkt_duration;
+
+ /* determine the actual amount of time we can save */
+ saveable_duration = MIN (max_saveable_duration, gap_time);
+
+ /* and we now have the duration we need to fill */
+ lost_duration = GST_CLOCK_DIFF (saveable_duration, gap_time);
+
+ /* this multi-lost-packet event will be inserted directly into the packet-queue
+ for immediate processing */
+ if (lost_packets > 0) {
+ RtpTimer *timer;
+ GstClockTime timestamp = apply_offset (jitterbuffer, est_pts);
+
+ GST_INFO_OBJECT (jitterbuffer, "lost event for %d packet(s) (#%d->#%d) "
+ "for duration %" GST_TIME_FORMAT, lost_packets, missing_seqnum,
+ missing_seqnum + lost_packets - 1, GST_TIME_ARGS (lost_duration));
+
+ insert_lost_event (jitterbuffer, missing_seqnum, lost_packets,
+ timestamp, lost_duration, 0);
+
+ timer = rtp_timer_queue_find (priv->timers, missing_seqnum);
+ if (timer && timer->type != RTP_TIMER_DEADLINE) {
+ if (timer->queued)
+ rtp_timer_queue_unschedule (priv->timers, timer);
+ GST_DEBUG_OBJECT (jitterbuffer, "removing timer for seqnum #%u",
+ missing_seqnum);
+ rtp_timer_free (timer);
+ }
+
+ missing_seqnum += lost_packets;
+ est_pts += lost_duration;
+ }
+ }
+
+ } else {
+ /* If we cannot assume equidistant packet spacing, the only thing we now
+ * for sure is that the missing packets have expected pts not later than
+ * the last received pts. */
+ est_pkt_duration = 0;
+ est_pts = pts;
+ }
+
+ /* Figure out how many more packets we are missing. */
+ remaining_gap = current_seqnum - missing_seqnum;
+ /* and how much time these packets represent */
+ remaining_duration = MAX (0, GST_CLOCK_DIFF (est_pts, pts));
+ /* Given the calculated packet-duration (packet spacing when equidistant),
+ the remainder is what we are left with after subtracting the ideal time
+ for the gap */
+ remainder_duration =
+ MAX (0, GST_CLOCK_DIFF (est_pkt_duration * remaining_gap,
+ remaining_duration));
+
+ GST_DEBUG_OBJECT (jitterbuffer, "remaining gap of %u, with "
+ "duration %" GST_TIME_FORMAT " gives remainder duration %"
+ GST_STIME_FORMAT, remaining_gap, GST_TIME_ARGS (remaining_duration),
+ GST_STIME_ARGS (remainder_duration));
+
+ for (i = 0; i < remaining_gap; i++) {
+ GstClockTime duration = est_pkt_duration;
+ /* we add the remainder on the first packet */
+ if (i == 0)
+ duration += remainder_duration;
+
+ /* clip duration to what is actually left */
+ remaining_duration = MAX (0, GST_CLOCK_DIFF (est_pts, pts));
+ duration = MIN (duration, remaining_duration);
+
+ if (priv->do_retransmission) {
+ RtpTimer *timer = rtp_timer_queue_find (priv->timers, missing_seqnum);
+
+ /* if we had a timer for the missing packet, update it. */
+ if (timer && timer->type == RTP_TIMER_EXPECTED) {
+ timer->duration = duration;
+ if (timer->timeout > (est_pts + rtx_delay) && timer->num_rtx_retry == 0) {
+ rtp_timer_queue_update_timer (priv->timers, timer, timer->seqnum,
+ est_pts, rtx_delay, 0, TRUE);
+ GST_DEBUG_OBJECT (jitterbuffer, "Update RTX timer(s) #%u, "
+ "pts %" GST_TIME_FORMAT ", delay %" GST_TIME_FORMAT
+ ", duration %" GST_TIME_FORMAT,
+ missing_seqnum, GST_TIME_ARGS (est_pts),
+ GST_TIME_ARGS (rtx_delay), GST_TIME_ARGS (duration));
+ }
+ } else {
+ GST_DEBUG_OBJECT (jitterbuffer, "Add RTX timer(s) #%u, "
+ "pts %" GST_TIME_FORMAT ", delay %" GST_TIME_FORMAT
+ ", duration %" GST_TIME_FORMAT,
+ missing_seqnum, GST_TIME_ARGS (est_pts),
+ GST_TIME_ARGS (rtx_delay), GST_TIME_ARGS (duration));
+ rtp_timer_queue_set_expected (priv->timers, missing_seqnum, est_pts,
+ rtx_delay, duration);
+ }
+ } else {
+ GST_INFO_OBJECT (jitterbuffer,
+ "Add Lost timer for #%u, pts %" GST_TIME_FORMAT
+ ", duration %" GST_TIME_FORMAT ", offset %" GST_STIME_FORMAT,
+ missing_seqnum, GST_TIME_ARGS (est_pts),
+ GST_TIME_ARGS (duration), GST_STIME_ARGS (offset));
+ rtp_timer_queue_set_lost (priv->timers, missing_seqnum, est_pts,
+ duration, offset);
+ }
+
+ missing_seqnum++;
+ est_pts += duration;
+ }
+}
+
+static void
+calculate_jitter (GstRtpJitterBuffer * jitterbuffer, GstClockTime dts,
+ guint32 rtptime)
+{
+ gint32 rtpdiff;
+ GstClockTimeDiff dtsdiff, rtpdiffns, diff;
+ GstRtpJitterBufferPrivate *priv;
+
+ priv = jitterbuffer->priv;
+
+ if (G_UNLIKELY (dts == GST_CLOCK_TIME_NONE) || priv->clock_rate <= 0)
+ goto no_time;
+
+ if (priv->last_dts != -1)
+ dtsdiff = dts - priv->last_dts;
+ else
+ dtsdiff = 0;
+
+ if (priv->last_rtptime != -1)
+ rtpdiff = rtptime - (guint32) priv->last_rtptime;
+ else
+ rtpdiff = 0;
+
+ /* Guess whether stream currently uses equidistant packet spacing. If we
+ * often see identical timestamps it means the packets are not
+ * equidistant. */
+ if (rtptime == priv->last_rtptime)
+ priv->equidistant -= 2;
+ else
+ priv->equidistant += 1;
+ priv->equidistant = CLAMP (priv->equidistant, -7, 7);
+
+ priv->last_dts = dts;
+ priv->last_rtptime = rtptime;
+
+ if (rtpdiff > 0)
+ rtpdiffns =
+ gst_util_uint64_scale_int (rtpdiff, GST_SECOND, priv->clock_rate);
+ else
+ rtpdiffns =
+ -gst_util_uint64_scale_int (-rtpdiff, GST_SECOND, priv->clock_rate);
+
+ diff = ABS (dtsdiff - rtpdiffns);
+
+ /* jitter is stored in nanoseconds */
+ priv->avg_jitter = (diff + (15 * priv->avg_jitter)) >> 4;
+
+ GST_LOG_OBJECT (jitterbuffer,
+ "dtsdiff %" GST_STIME_FORMAT " rtptime %" GST_STIME_FORMAT
+ ", clock-rate %d, diff %" GST_STIME_FORMAT ", jitter: %" GST_TIME_FORMAT,
+ GST_STIME_ARGS (dtsdiff), GST_STIME_ARGS (rtpdiffns), priv->clock_rate,
+ GST_STIME_ARGS (diff), GST_TIME_ARGS (priv->avg_jitter));
+
+ return;
+
+ /* ERRORS */
+no_time:
+ {
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "no dts or no clock-rate, can't calculate jitter");
+ return;
+ }
+}
+
+static gint
+compare_buffer_seqnum (GstBuffer * a, GstBuffer * b, gpointer user_data)
+{
+ GstRTPBuffer rtp_a = GST_RTP_BUFFER_INIT;
+ GstRTPBuffer rtp_b = GST_RTP_BUFFER_INIT;
+ guint seq_a, seq_b;
+
+ gst_rtp_buffer_map (a, GST_MAP_READ, &rtp_a);
+ seq_a = gst_rtp_buffer_get_seq (&rtp_a);
+ gst_rtp_buffer_unmap (&rtp_a);
+
+ gst_rtp_buffer_map (b, GST_MAP_READ, &rtp_b);
+ seq_b = gst_rtp_buffer_get_seq (&rtp_b);
+ gst_rtp_buffer_unmap (&rtp_b);
+
+ return gst_rtp_buffer_compare_seqnum (seq_b, seq_a);
+}
+
+static gboolean
+handle_big_gap_buffer (GstRtpJitterBuffer * jitterbuffer, GstBuffer * buffer,
+ guint8 pt, guint16 seqnum, gint gap, guint max_dropout, guint max_misorder)
+{
+ GstRtpJitterBufferPrivate *priv;
+ guint gap_packets_length;
+ gboolean reset = FALSE;
+ gboolean future = gap > 0;
+
+ priv = jitterbuffer->priv;
+
+ if ((gap_packets_length = g_queue_get_length (&priv->gap_packets)) > 0) {
+ GList *l;
+ guint32 prev_gap_seq = -1;
+ gboolean all_consecutive = TRUE;
+
+ g_queue_insert_sorted (&priv->gap_packets, buffer,
+ (GCompareDataFunc) compare_buffer_seqnum, NULL);
+
+ for (l = priv->gap_packets.head; l; l = l->next) {
+ GstBuffer *gap_buffer = l->data;
+ GstRTPBuffer gap_rtp = GST_RTP_BUFFER_INIT;
+ guint32 gap_seq;
+
+ gst_rtp_buffer_map (gap_buffer, GST_MAP_READ, &gap_rtp);
+
+ all_consecutive = (gst_rtp_buffer_get_payload_type (&gap_rtp) == pt);
+
+ gap_seq = gst_rtp_buffer_get_seq (&gap_rtp);
+ if (prev_gap_seq == -1)
+ prev_gap_seq = gap_seq;
+ else if (gst_rtp_buffer_compare_seqnum (gap_seq, prev_gap_seq) != -1)
+ all_consecutive = FALSE;
+ else
+ prev_gap_seq = gap_seq;
+
+ gst_rtp_buffer_unmap (&gap_rtp);
+ if (!all_consecutive)
+ break;
+ }
+
+ if (all_consecutive && gap_packets_length > 3) {
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "buffer too %s %d < %d, got 5 consecutive ones - reset",
+ (future ? "new" : "old"), gap,
+ (future ? max_dropout : -max_misorder));
+ reset = TRUE;
+ } else if (!all_consecutive) {
+ g_queue_foreach (&priv->gap_packets, (GFunc) gst_buffer_unref, NULL);
+ g_queue_clear (&priv->gap_packets);
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "buffer too %s %d < %d, got no 5 consecutive ones - dropping",
+ (future ? "new" : "old"), gap,
+ (future ? max_dropout : -max_misorder));
+ buffer = NULL;
+ } else {
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "buffer too %s %d < %d, got %u consecutive ones - waiting",
+ (future ? "new" : "old"), gap,
+ (future ? max_dropout : -max_misorder), gap_packets_length + 1);
+ buffer = NULL;
+ }
+ } else {
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "buffer too %s %d < %d, first one - waiting", (future ? "new" : "old"),
+ gap, -max_misorder);
+ g_queue_push_tail (&priv->gap_packets, buffer);
+ buffer = NULL;
+ }
+
+ return reset;
+}
+
+static GstClockTime
+get_current_running_time (GstRtpJitterBuffer * jitterbuffer)
+{
+ GstClock *clock = gst_element_get_clock (GST_ELEMENT_CAST (jitterbuffer));
+ GstClockTime running_time = GST_CLOCK_TIME_NONE;
+
+ if (clock) {
+ GstClockTime base_time =
+ gst_element_get_base_time (GST_ELEMENT_CAST (jitterbuffer));
+ GstClockTime clock_time = gst_clock_get_time (clock);
+
+ if (clock_time > base_time)
+ running_time = clock_time - base_time;
+ else
+ running_time = 0;
+
+ gst_object_unref (clock);
+ }
+
+ return running_time;
+}
+
+static GstFlowReturn
+gst_rtp_jitter_buffer_reset (GstRtpJitterBuffer * jitterbuffer,
+ GstPad * pad, GstObject * parent, guint16 seqnum)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GList *events = NULL, *l;
+ GList *buffers;
+
+ GST_DEBUG_OBJECT (jitterbuffer, "flush and reset jitterbuffer");
+ rtp_jitter_buffer_flush (priv->jbuf,
+ (GFunc) free_item_and_retain_sticky_events, &events);
+ rtp_jitter_buffer_reset_skew (priv->jbuf);
+ rtp_timer_queue_remove_all (priv->timers);
+ priv->discont = TRUE;
+ priv->last_popped_seqnum = -1;
+
+ if (priv->gap_packets.head) {
+ GstBuffer *gap_buffer = priv->gap_packets.head->data;
+ GstRTPBuffer gap_rtp = GST_RTP_BUFFER_INIT;
+
+ gst_rtp_buffer_map (gap_buffer, GST_MAP_READ, &gap_rtp);
+ priv->next_seqnum = gst_rtp_buffer_get_seq (&gap_rtp);
+ gst_rtp_buffer_unmap (&gap_rtp);
+ } else {
+ priv->next_seqnum = seqnum;
+ }
+
+ priv->last_in_pts = -1;
+ priv->next_in_seqnum = -1;
+
+ /* Insert all sticky events again in order, otherwise we would
+ * potentially loose STREAM_START, CAPS or SEGMENT events
+ */
+ events = g_list_reverse (events);
+ for (l = events; l; l = l->next) {
+ rtp_jitter_buffer_append_event (priv->jbuf, l->data);
+ }
+ g_list_free (events);
+
+ JBUF_SIGNAL_EVENT (priv);
+
+ /* reset spacing estimation when gap */
+ priv->ips_rtptime = -1;
+ priv->ips_pts = GST_CLOCK_TIME_NONE;
+
+ buffers = g_list_copy (priv->gap_packets.head);
+ g_queue_clear (&priv->gap_packets);
+
+ priv->ips_rtptime = -1;
+ priv->ips_pts = GST_CLOCK_TIME_NONE;
+ JBUF_UNLOCK (jitterbuffer->priv);
+
+ for (l = buffers; l; l = l->next) {
+ ret = gst_rtp_jitter_buffer_chain (pad, parent, l->data);
+ l->data = NULL;
+ if (ret != GST_FLOW_OK) {
+ l = l->next;
+ break;
+ }
+ }
+ for (; l; l = l->next)
+ gst_buffer_unref (l->data);
+ g_list_free (buffers);
+
+ return ret;
+}
+
+static gboolean
+gst_rtp_jitter_buffer_fast_start (GstRtpJitterBuffer * jitterbuffer)
+{
+ GstRtpJitterBufferPrivate *priv;
+ RTPJitterBufferItem *item;
+ RtpTimer *timer;
+
+ priv = jitterbuffer->priv;
+
+ if (priv->faststart_min_packets == 0)
+ return FALSE;
+
+ item = rtp_jitter_buffer_peek (priv->jbuf);
+ if (!item)
+ return FALSE;
+
+ timer = rtp_timer_queue_find (priv->timers, item->seqnum);
+ if (!timer || timer->type != RTP_TIMER_DEADLINE)
+ return FALSE;
+
+ if (rtp_jitter_buffer_can_fast_start (priv->jbuf,
+ priv->faststart_min_packets)) {
+ GST_INFO_OBJECT (jitterbuffer, "We found %i consecutive packet, start now",
+ priv->faststart_min_packets);
+ timer->timeout = -1;
+ rtp_timer_queue_reschedule (priv->timers, timer);
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+static GstFlowReturn
+gst_rtp_jitter_buffer_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
+{
+ GstRtpJitterBuffer *jitterbuffer;
+ GstRtpJitterBufferPrivate *priv;
+ guint16 seqnum;
+ guint32 expected, rtptime;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstClockTime now;
+ GstClockTime dts, pts;
+ guint64 latency_ts;
+ gboolean head;
+ gboolean duplicate;
+ gint percent = -1;
+ guint8 pt;
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+ gboolean do_next_seqnum = FALSE;
+ GstMessage *msg = NULL;
+ GstMessage *drop_msg = NULL;
+ gboolean estimated_dts = FALSE;
+ gint32 packet_rate, max_dropout, max_misorder;
+ RtpTimer *timer = NULL;
+ gboolean is_rtx;
+
+ jitterbuffer = GST_RTP_JITTER_BUFFER_CAST (parent);
+
+ priv = jitterbuffer->priv;
+
+ if (G_UNLIKELY (!gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp)))
+ goto invalid_buffer;
+
+ pt = gst_rtp_buffer_get_payload_type (&rtp);
+ seqnum = gst_rtp_buffer_get_seq (&rtp);
+ rtptime = gst_rtp_buffer_get_timestamp (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
+
+ is_rtx = GST_BUFFER_IS_RETRANSMISSION (buffer);
+ now = get_current_running_time (jitterbuffer);
+
+ /* make sure we have PTS and DTS set */
+ pts = GST_BUFFER_PTS (buffer);
+ dts = GST_BUFFER_DTS (buffer);
+ if (dts == -1)
+ dts = pts;
+ else if (pts == -1)
+ pts = dts;
+
+ if (dts == -1) {
+ /* If we have no DTS here, i.e. no capture time, get one from the
+ * clock now to have something to calculate with in the future. */
+ dts = now;
+ pts = dts;
+
+ /* Remember that we estimated the DTS if we are running already
+ * and this is not our first packet (or first packet after a reset).
+ * If it's the first packet, we somehow must generate a timestamp for
+ * everything, otherwise we can't calculate any times
+ */
+ estimated_dts = (priv->next_in_seqnum != -1);
+ } else {
+ /* take the DTS of the buffer. This is the time when the packet was
+ * received and is used to calculate jitter and clock skew. We will adjust
+ * this DTS with the smoothed value after processing it in the
+ * jitterbuffer and assign it as the PTS. */
+ /* bring to running time */
+ dts = gst_segment_to_running_time (&priv->segment, GST_FORMAT_TIME, dts);
+ }
+
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "Received packet #%d at time %" GST_TIME_FORMAT ", discont %d, rtx %d",
+ seqnum, GST_TIME_ARGS (dts), GST_BUFFER_IS_DISCONT (buffer), is_rtx);
+
+ JBUF_LOCK_CHECK (priv, out_flushing);
+
+ if (G_UNLIKELY (priv->last_pt != pt)) {
+ GstCaps *caps;
+
+ GST_DEBUG_OBJECT (jitterbuffer, "pt changed from %u to %u", priv->last_pt,
+ pt);
+
+ priv->last_pt = pt;
+ /* reset clock-rate so that we get a new one */
+ priv->clock_rate = -1;
+
+ /* Try to get the clock-rate from the caps first if we can. If there are no
+ * caps we must fire the signal to get the clock-rate. */
+ if ((caps = gst_pad_get_current_caps (pad))) {
+ gst_jitter_buffer_sink_parse_caps (jitterbuffer, caps, pt);
+ gst_caps_unref (caps);
+ }
+ }
+
+ if (G_UNLIKELY (priv->clock_rate == -1)) {
+ /* no clock rate given on the caps, try to get one with the signal */
+ if (gst_rtp_jitter_buffer_get_clock_rate (jitterbuffer,
+ pt) == GST_FLOW_FLUSHING)
+ goto out_flushing;
+
+ if (G_UNLIKELY (priv->clock_rate == -1))
+ goto no_clock_rate;
+
+ gst_rtp_packet_rate_ctx_reset (&priv->packet_rate_ctx, priv->clock_rate);
+ }
+
+ /* don't accept more data on EOS */
+ if (G_UNLIKELY (priv->eos))
+ goto have_eos;
+
+ if (!is_rtx)
+ calculate_jitter (jitterbuffer, dts, rtptime);
+
+ if (priv->seqnum_base != -1) {
+ gint gap;
+
+ gap = gst_rtp_buffer_compare_seqnum (priv->seqnum_base, seqnum);
+
+ if (gap < 0) {
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "packet seqnum #%d before seqnum-base #%d", seqnum,
+ priv->seqnum_base);
+ gst_buffer_unref (buffer);
+ goto finished;
+ } else if (gap > 16384) {
+ /* From now on don't compare against the seqnum base anymore as
+ * at some point in the future we will wrap around and also that
+ * much reordering is very unlikely */
+ priv->seqnum_base = -1;
+ }
+ }
+
+ expected = priv->next_in_seqnum;
+
+ /* don't update packet-rate based on RTX, as those arrive highly unregularly */
+ if (!is_rtx) {
+ packet_rate = gst_rtp_packet_rate_ctx_update (&priv->packet_rate_ctx,
+ seqnum, rtptime);
+ GST_TRACE_OBJECT (jitterbuffer, "updated packet_rate: %d", packet_rate);
+ }
+ max_dropout =
+ gst_rtp_packet_rate_ctx_get_max_dropout (&priv->packet_rate_ctx,
+ priv->max_dropout_time);
+ max_misorder =
+ gst_rtp_packet_rate_ctx_get_max_misorder (&priv->packet_rate_ctx,
+ priv->max_misorder_time);
+ GST_TRACE_OBJECT (jitterbuffer, "max_dropout: %d, max_misorder: %d",
+ max_dropout, max_misorder);
+
+ timer = rtp_timer_queue_find (priv->timers, seqnum);
+ if (is_rtx) {
+ if (G_UNLIKELY (!priv->do_retransmission))
+ goto unsolicited_rtx;
+
+ if (!timer)
+ timer = rtp_timer_queue_find (priv->rtx_stats_timers, seqnum);
+
+ /* If the first buffer is an (old) rtx, e.g. from before a reset, or
+ * already lost, ignore it */
+ if (!timer || expected == -1)
+ goto unsolicited_rtx;
+ }
+
+ /* now check against our expected seqnum */
+ if (G_UNLIKELY (expected == -1)) {
+ GST_DEBUG_OBJECT (jitterbuffer, "First buffer #%d", seqnum);
+
+ /* calculate a pts based on rtptime and arrival time (dts) */
+ pts =
+ rtp_jitter_buffer_calculate_pts (priv->jbuf, dts, estimated_dts,
+ rtptime, gst_element_get_base_time (GST_ELEMENT_CAST (jitterbuffer)),
+ 0, FALSE);
+
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (pts))) {
+ /* A valid timestamp cannot be calculated, discard packet */
+ goto discard_invalid;
+ }
+
+ /* we don't know what the next_in_seqnum should be, wait for the last
+ * possible moment to push this buffer, maybe we get an earlier seqnum
+ * while we wait */
+ rtp_timer_queue_set_deadline (priv->timers, seqnum, pts,
+ timeout_offset (jitterbuffer));
+
+ do_next_seqnum = TRUE;
+ /* take rtptime and pts to calculate packet spacing */
+ priv->ips_rtptime = rtptime;
+ priv->ips_pts = pts;
+
+ } else {
+ gint gap;
+ /* now calculate gap */
+ gap = gst_rtp_buffer_compare_seqnum (expected, seqnum);
+ GST_DEBUG_OBJECT (jitterbuffer, "expected #%d, got #%d, gap of %d",
+ expected, seqnum, gap);
+
+ if (G_UNLIKELY (gap > 0 &&
+ rtp_timer_queue_length (priv->timers) >= max_dropout)) {
+ /* If we have timers for more than RTP_MAX_DROPOUT packets
+ * pending this means that we have a huge gap overall. We can
+ * reset the jitterbuffer at this point because there's
+ * just too much data missing to be able to do anything
+ * sensible with the past data. Just try again from the
+ * next packet */
+ GST_WARNING_OBJECT (jitterbuffer, "%d pending timers > %d - resetting",
+ rtp_timer_queue_length (priv->timers), max_dropout);
+ g_queue_insert_sorted (&priv->gap_packets, buffer,
+ (GCompareDataFunc) compare_buffer_seqnum, NULL);
+ return gst_rtp_jitter_buffer_reset (jitterbuffer, pad, parent, seqnum);
+ }
+
+ /* Special handling of large gaps */
+ if (!is_rtx && ((gap != -1 && gap < -max_misorder) || (gap >= max_dropout))) {
+ gboolean reset = handle_big_gap_buffer (jitterbuffer, buffer, pt, seqnum,
+ gap, max_dropout, max_misorder);
+ if (reset) {
+ return gst_rtp_jitter_buffer_reset (jitterbuffer, pad, parent, seqnum);
+ } else {
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "Had big gap, waiting for more consecutive packets");
+ goto finished;
+ }
+ }
+
+ /* We had no huge gap, let's drop all the gap packets */
+ GST_DEBUG_OBJECT (jitterbuffer, "Clearing gap packets");
+ g_queue_foreach (&priv->gap_packets, (GFunc) gst_buffer_unref, NULL);
+ g_queue_clear (&priv->gap_packets);
+
+ /* calculate a pts based on rtptime and arrival time (dts) */
+ /* If we estimated the DTS, don't consider it in the clock skew calculations */
+ pts =
+ rtp_jitter_buffer_calculate_pts (priv->jbuf, dts, estimated_dts,
+ rtptime, gst_element_get_base_time (GST_ELEMENT_CAST (jitterbuffer)),
+ gap, is_rtx);
+
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (pts))) {
+ /* A valid timestamp cannot be calculated, discard packet */
+ goto discard_invalid;
+ }
+
+ if (G_LIKELY (gap == 0)) {
+ /* packet is expected */
+ calculate_packet_spacing (jitterbuffer, rtptime, pts);
+ do_next_seqnum = TRUE;
+ } else {
+
+ /* we have a gap */
+ if (gap > 0) {
+ GST_DEBUG_OBJECT (jitterbuffer, "%d missing packets", gap);
+ /* fill in the gap with EXPECTED timers */
+ gst_rtp_jitter_buffer_handle_missing_packets (jitterbuffer, expected,
+ seqnum, pts, gap, now);
+ do_next_seqnum = TRUE;
+ } else {
+ GST_DEBUG_OBJECT (jitterbuffer, "old packet received");
+ do_next_seqnum = FALSE;
+ }
+
+ /* reset spacing estimation when gap */
+ priv->ips_rtptime = -1;
+ priv->ips_pts = GST_CLOCK_TIME_NONE;
+ }
+ }
+
+ if (do_next_seqnum) {
+ priv->last_in_pts = pts;
+ priv->next_in_seqnum = (seqnum + 1) & 0xffff;
+ }
+
+ if (is_rtx)
+ timer->num_rtx_received++;
+
+ /* At 2^15, we would detect a seqnum rollover too early, therefore
+ * limit the queue size. But let's not limit it to a number that is
+ * too small to avoid emptying it needlessly if there is a spurious huge
+ * sequence number, let's allow at least 10k packets in any case. */
+ while (rtp_jitter_buffer_is_full (priv->jbuf) &&
+ priv->srcresult == GST_FLOW_OK) {
+ RtpTimer *timer = rtp_timer_queue_peek_earliest (priv->timers);
+ while (timer) {
+ timer->timeout = -1;
+ if (timer->type == RTP_TIMER_DEADLINE)
+ break;
+ timer = rtp_timer_get_next (timer);
+ }
+
+ update_current_timer (jitterbuffer);
+ JBUF_WAIT_QUEUE (priv);
+ if (priv->srcresult != GST_FLOW_OK)
+ goto out_flushing;
+ }
+
+ /* let's check if this buffer is too late, we can only accept packets with
+ * bigger seqnum than the one we last pushed. */
+ if (G_LIKELY (priv->last_popped_seqnum != -1)) {
+ gint gap;
+
+ gap = gst_rtp_buffer_compare_seqnum (priv->last_popped_seqnum, seqnum);
+
+ /* priv->last_popped_seqnum >= seqnum, we're too late. */
+ if (G_UNLIKELY (gap <= 0)) {
+ if (priv->do_retransmission) {
+ if (is_rtx && timer) {
+ update_rtx_stats (jitterbuffer, timer, dts, FALSE);
+ /* Only count the retranmitted packet too late if it has been
+ * considered lost. If the original packet arrived before the
+ * retransmitted we just count it as a duplicate. */
+ if (timer->type != RTP_TIMER_LOST)
+ goto rtx_duplicate;
+ }
+ }
+ goto too_late;
+ }
+ }
+
+ /* let's drop oldest packet if the queue is already full and drop-on-latency
+ * is set. We can only do this when there actually is a latency. When no
+ * latency is set, we just pump it in the queue and let the other end push it
+ * out as fast as possible. */
+ if (priv->latency_ms && priv->drop_on_latency) {
+ latency_ts =
+ gst_util_uint64_scale_int (priv->latency_ms, priv->clock_rate, 1000);
+
+ if (G_UNLIKELY (rtp_jitter_buffer_get_ts_diff (priv->jbuf) >= latency_ts)) {
+ RTPJitterBufferItem *old_item;
+
+ old_item = rtp_jitter_buffer_peek (priv->jbuf);
+
+ if (IS_DROPABLE (old_item)) {
+ old_item = rtp_jitter_buffer_pop (priv->jbuf, &percent);
+ GST_DEBUG_OBJECT (jitterbuffer, "Queue full, dropping old packet %p",
+ old_item);
+ priv->next_seqnum = (old_item->seqnum + old_item->count) & 0xffff;
+ if (priv->post_drop_messages) {
+ drop_msg =
+ new_drop_message (jitterbuffer, old_item->seqnum, old_item->pts,
+ REASON_DROP_ON_LATENCY);
+ }
+ rtp_jitter_buffer_free_item (old_item);
+ }
+ /* we might have removed some head buffers, signal the pushing thread to
+ * see if it can push now */
+ JBUF_SIGNAL_EVENT (priv);
+ }
+ }
+
+ /* If we estimated the DTS, don't consider it in the clock skew calculations
+ * later. The code above always sets dts to pts or the other way around if
+ * any of those is valid in the buffer, so we know that if we estimated the
+ * dts that both are unknown */
+ head = rtp_jitter_buffer_append_buffer (priv->jbuf, buffer,
+ estimated_dts ? GST_CLOCK_TIME_NONE : dts, pts, seqnum, rtptime,
+ &duplicate, &percent);
+
+ /* now insert the packet into the queue in sorted order. This function returns
+ * FALSE if a packet with the same seqnum was already in the queue, meaning we
+ * have a duplicate. */
+ if (G_UNLIKELY (duplicate)) {
+ if (is_rtx && timer)
+ update_rtx_stats (jitterbuffer, timer, dts, FALSE);
+ goto duplicate;
+ }
+
+ /* Trigger fast start if needed */
+ if (gst_rtp_jitter_buffer_fast_start (jitterbuffer))
+ head = TRUE;
+
+ /* update rtx timers */
+ if (priv->do_retransmission)
+ update_rtx_timers (jitterbuffer, seqnum, dts, pts, do_next_seqnum, is_rtx,
+ timer);
+
+ /* we had an unhandled SR, handle it now */
+ if (priv->last_sr)
+ do_handle_sync (jitterbuffer);
+
+ if (G_UNLIKELY (head)) {
+ /* signal addition of new buffer when the _loop is waiting. */
+ if (G_LIKELY (priv->active))
+ JBUF_SIGNAL_EVENT (priv);
+ }
+
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "Pushed packet #%d, now %d packets, head: %d, " "percent %d", seqnum,
+ rtp_jitter_buffer_num_packets (priv->jbuf), head, percent);
+
+ msg = check_buffering_percent (jitterbuffer, percent);
+
+finished:
+ update_current_timer (jitterbuffer);
+ JBUF_UNLOCK (priv);
+
+ if (msg)
+ gst_element_post_message (GST_ELEMENT_CAST (jitterbuffer), msg);
+ if (drop_msg)
+ gst_element_post_message (GST_ELEMENT_CAST (jitterbuffer), drop_msg);
+
+ return ret;
+
+ /* ERRORS */
+invalid_buffer:
+ {
+ /* this is not fatal but should be filtered earlier */
+ GST_ELEMENT_WARNING (jitterbuffer, STREAM, DECODE, (NULL),
+ ("Received invalid RTP payload, dropping"));
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ }
+no_clock_rate:
+ {
+ GST_WARNING_OBJECT (jitterbuffer,
+ "No clock-rate in caps!, dropping buffer");
+ gst_buffer_unref (buffer);
+ goto finished;
+ }
+out_flushing:
+ {
+ ret = priv->srcresult;
+ GST_DEBUG_OBJECT (jitterbuffer, "flushing %s", gst_flow_get_name (ret));
+ gst_buffer_unref (buffer);
+ goto finished;
+ }
+have_eos:
+ {
+ ret = GST_FLOW_EOS;
+ GST_WARNING_OBJECT (jitterbuffer, "we are EOS, refusing buffer");
+ gst_buffer_unref (buffer);
+ goto finished;
+ }
+too_late:
+ {
+ GST_DEBUG_OBJECT (jitterbuffer, "Packet #%d too late as #%d was already"
+ " popped, dropping", seqnum, priv->last_popped_seqnum);
+ priv->num_late++;
+ if (priv->post_drop_messages) {
+ drop_msg = new_drop_message (jitterbuffer, seqnum, pts, REASON_TOO_LATE);
+ }
+ gst_buffer_unref (buffer);
+ goto finished;
+ }
+duplicate:
+ {
+ GST_DEBUG_OBJECT (jitterbuffer, "Duplicate packet #%d detected, dropping",
+ seqnum);
+ priv->num_duplicates++;
+ goto finished;
+ }
+rtx_duplicate:
+ {
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "Duplicate RTX packet #%d detected, dropping", seqnum);
+ priv->num_duplicates++;
+ gst_buffer_unref (buffer);
+ goto finished;
+ }
+unsolicited_rtx:
+ {
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "Unsolicited RTX packet #%d detected, dropping", seqnum);
+ gst_buffer_unref (buffer);
+ goto finished;
+ }
+discard_invalid:
+ {
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "cannot calculate a valid pts for #%d (rtx: %d), discard",
+ seqnum, is_rtx);
+ gst_buffer_unref (buffer);
+ goto finished;
+ }
+}
+
+/* FIXME: hopefully we can do something more efficient here, especially when
+ * all packets are in order and/or outside of the currently cached range.
+ * Still worthwhile to have it, avoids taking/releasing object lock and pad
+ * stream lock for every single buffer in the default chain_list fallback. */
+static GstFlowReturn
+gst_rtp_jitter_buffer_chain_list (GstPad * pad, GstObject * parent,
+ GstBufferList * buffer_list)
+{
+ GstFlowReturn flow_ret = GST_FLOW_OK;
+ guint i, n;
+
+ n = gst_buffer_list_length (buffer_list);
+ for (i = 0; i < n; ++i) {
+ GstBuffer *buf = gst_buffer_list_get (buffer_list, i);
+
+ flow_ret = gst_rtp_jitter_buffer_chain (pad, parent, gst_buffer_ref (buf));
+
+ if (flow_ret != GST_FLOW_OK)
+ break;
+ }
+ gst_buffer_list_unref (buffer_list);
+
+ return flow_ret;
+}
+
+static GstClockTime
+compute_elapsed (GstRtpJitterBuffer * jitterbuffer, RTPJitterBufferItem * item)
+{
+ guint64 ext_time, elapsed;
+ guint32 rtp_time;
+ GstRtpJitterBufferPrivate *priv;
+
+ priv = jitterbuffer->priv;
+ rtp_time = item->rtptime;
+
+ GST_LOG_OBJECT (jitterbuffer, "rtp %" G_GUINT32_FORMAT ", ext %"
+ G_GUINT64_FORMAT, rtp_time, priv->ext_timestamp);
+
+ ext_time = priv->ext_timestamp;
+ ext_time = gst_rtp_buffer_ext_timestamp (&ext_time, rtp_time);
+ if (ext_time < priv->ext_timestamp) {
+ ext_time = priv->ext_timestamp;
+ } else {
+ priv->ext_timestamp = ext_time;
+ }
+
+ if (ext_time > priv->clock_base)
+ elapsed = ext_time - priv->clock_base;
+ else
+ elapsed = 0;
+
+ elapsed = gst_util_uint64_scale_int (elapsed, GST_SECOND, priv->clock_rate);
+ return elapsed;
+}
+
+static void
+update_estimated_eos (GstRtpJitterBuffer * jitterbuffer,
+ RTPJitterBufferItem * item)
+{
+ guint64 total, elapsed, left, estimated;
+ GstClockTime out_time;
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+
+ if (priv->npt_stop == -1 || priv->ext_timestamp == -1
+ || priv->clock_base == -1 || priv->clock_rate <= 0)
+ return;
+
+ /* compute the elapsed time */
+ elapsed = compute_elapsed (jitterbuffer, item);
+
+ /* do nothing if elapsed time doesn't increment */
+ if (priv->last_elapsed && elapsed <= priv->last_elapsed)
+ return;
+
+ priv->last_elapsed = elapsed;
+
+ /* this is the total time we need to play */
+ total = priv->npt_stop - priv->npt_start;
+ GST_LOG_OBJECT (jitterbuffer, "total %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (total));
+
+ /* this is how much time there is left */
+ if (total > elapsed)
+ left = total - elapsed;
+ else
+ left = 0;
+
+ /* if we have less time left that the size of the buffer, we will not
+ * be able to keep it filled, disabled buffering then */
+ if (left < rtp_jitter_buffer_get_delay (priv->jbuf)) {
+ GST_DEBUG_OBJECT (jitterbuffer, "left %" GST_TIME_FORMAT
+ ", disable buffering close to EOS", GST_TIME_ARGS (left));
+ rtp_jitter_buffer_disable_buffering (priv->jbuf, TRUE);
+ }
+
+ /* this is the current time as running-time */
+ out_time = item->pts;
+
+ if (elapsed > 0)
+ estimated = gst_util_uint64_scale (out_time, total, elapsed);
+ else {
+ /* if there is almost nothing left,
+ * we may never advance enough to end up in the above case */
+ if (total < GST_SECOND)
+ estimated = GST_SECOND;
+ else
+ estimated = -1;
+ }
+ GST_LOG_OBJECT (jitterbuffer, "elapsed %" GST_TIME_FORMAT ", estimated %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (elapsed), GST_TIME_ARGS (estimated));
+
+ if (estimated != -1 && priv->estimated_eos != estimated) {
+ rtp_timer_queue_set_eos (priv->timers, estimated,
+ timeout_offset (jitterbuffer));
+ priv->estimated_eos = estimated;
+ }
+}
+
+/* take a buffer from the queue and push it */
+static GstFlowReturn
+pop_and_push_next (GstRtpJitterBuffer * jitterbuffer, guint seqnum)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+ GstFlowReturn result = GST_FLOW_OK;
+ RTPJitterBufferItem *item;
+ GstBuffer *outbuf = NULL;
+ GstEvent *outevent = NULL;
+ GstQuery *outquery = NULL;
+ GstClockTime dts, pts;
+ gint percent = -1;
+ gboolean do_push = TRUE;
+ guint type;
+ GstMessage *msg;
+
+ /* when we get here we are ready to pop and push the buffer */
+ item = rtp_jitter_buffer_pop (priv->jbuf, &percent);
+ type = item->type;
+
+ switch (type) {
+ case ITEM_TYPE_BUFFER:
+
+ /* we need to make writable to change the flags and timestamps */
+ outbuf = gst_buffer_make_writable (item->data);
+
+ if (G_UNLIKELY (priv->discont)) {
+ /* set DISCONT flag when we missed a packet. We pushed the buffer writable
+ * into the jitterbuffer so we can modify now. */
+ GST_DEBUG_OBJECT (jitterbuffer, "mark output buffer discont");
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
+ priv->discont = FALSE;
+ }
+ if (G_UNLIKELY (priv->ts_discont)) {
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_RESYNC);
+ priv->ts_discont = FALSE;
+ }
+
+ dts =
+ gst_segment_position_from_running_time (&priv->segment,
+ GST_FORMAT_TIME, item->dts);
+ pts =
+ gst_segment_position_from_running_time (&priv->segment,
+ GST_FORMAT_TIME, item->pts);
+
+ /* if this is a new frame, check if ts_offset needs to be updated */
+ if (pts != priv->last_pts) {
+ update_offset (jitterbuffer);
+ }
+
+ /* apply timestamp with offset to buffer now */
+ GST_BUFFER_DTS (outbuf) = apply_offset (jitterbuffer, dts);
+ GST_BUFFER_PTS (outbuf) = apply_offset (jitterbuffer, pts);
+
+ /* update the elapsed time when we need to check against the npt stop time. */
+ update_estimated_eos (jitterbuffer, item);
+
+ priv->last_pts = pts;
+ priv->last_out_time = GST_BUFFER_PTS (outbuf);
+ break;
+ case ITEM_TYPE_LOST:
+ priv->discont = TRUE;
+ if (!priv->do_lost)
+ do_push = FALSE;
+ /* FALLTHROUGH */
+ case ITEM_TYPE_EVENT:
+ outevent = item->data;
+ break;
+ case ITEM_TYPE_QUERY:
+ outquery = item->data;
+ break;
+ }
+
+ /* now we are ready to push the buffer. Save the seqnum and release the lock
+ * so the other end can push stuff in the queue again. */
+ if (seqnum != -1) {
+ priv->last_popped_seqnum = seqnum;
+ priv->next_seqnum = (seqnum + item->count) & 0xffff;
+ }
+ msg = check_buffering_percent (jitterbuffer, percent);
+
+ if (type == ITEM_TYPE_EVENT && outevent &&
+ GST_EVENT_TYPE (outevent) == GST_EVENT_EOS) {
+ g_assert (priv->eos);
+ while (rtp_timer_queue_length (priv->timers) > 0) {
+ /* Stopping timers */
+ unschedule_current_timer (jitterbuffer);
+ JBUF_WAIT_TIMER (priv);
+ }
+ }
+
+ JBUF_UNLOCK (priv);
+
+ item->data = NULL;
+ rtp_jitter_buffer_free_item (item);
+
+ if (msg)
+ gst_element_post_message (GST_ELEMENT_CAST (jitterbuffer), msg);
+
+ switch (type) {
+ case ITEM_TYPE_BUFFER:
+ /* push buffer */
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "Pushing buffer %d, dts %" GST_TIME_FORMAT ", pts %" GST_TIME_FORMAT,
+ seqnum, GST_TIME_ARGS (GST_BUFFER_DTS (outbuf)),
+ GST_TIME_ARGS (GST_BUFFER_PTS (outbuf)));
+ priv->num_pushed++;
+ GST_BUFFER_DTS (outbuf) = GST_CLOCK_TIME_NONE;
+ result = gst_pad_push (priv->srcpad, outbuf);
+
+ JBUF_LOCK_CHECK (priv, out_flushing);
+ break;
+ case ITEM_TYPE_LOST:
+ case ITEM_TYPE_EVENT:
+ /* We got not enough consecutive packets with a huge gap, we can
+ * as well just drop them here now on EOS */
+ if (outevent && GST_EVENT_TYPE (outevent) == GST_EVENT_EOS) {
+ GST_DEBUG_OBJECT (jitterbuffer, "Clearing gap packets on EOS");
+ g_queue_foreach (&priv->gap_packets, (GFunc) gst_buffer_unref, NULL);
+ g_queue_clear (&priv->gap_packets);
+ }
+
+ GST_DEBUG_OBJECT (jitterbuffer, "%sPushing event %" GST_PTR_FORMAT
+ ", seqnum %d", do_push ? "" : "NOT ", outevent, seqnum);
+
+ if (do_push)
+ gst_pad_push_event (priv->srcpad, outevent);
+ else if (outevent)
+ gst_event_unref (outevent);
+
+ result = GST_FLOW_OK;
+
+ JBUF_LOCK_CHECK (priv, out_flushing);
+ break;
+ case ITEM_TYPE_QUERY:
+ {
+ gboolean res;
+
+ res = gst_pad_peer_query (priv->srcpad, outquery);
+
+ JBUF_LOCK_CHECK (priv, out_flushing);
+ result = GST_FLOW_OK;
+ GST_LOG_OBJECT (jitterbuffer, "did query %p, return %d", outquery, res);
+ JBUF_SIGNAL_QUERY (priv, res);
+ break;
+ }
+ }
+ return result;
+
+ /* ERRORS */
+out_flushing:
+ {
+ return priv->srcresult;
+ }
+}
+
+#define GST_FLOW_WAIT GST_FLOW_CUSTOM_SUCCESS
+
+/* Peek a buffer and compare the seqnum to the expected seqnum.
+ * If all is fine, the buffer is pushed.
+ * If something is wrong, we wait for some event
+ */
+static GstFlowReturn
+handle_next_buffer (GstRtpJitterBuffer * jitterbuffer)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+ GstFlowReturn result;
+ RTPJitterBufferItem *item;
+ guint seqnum;
+ guint32 next_seqnum;
+
+ /* only push buffers when PLAYING and active and not buffering */
+ if (priv->blocked || !priv->active ||
+ rtp_jitter_buffer_is_buffering (priv->jbuf)) {
+ return GST_FLOW_WAIT;
+ }
+
+ /* peek a buffer, we're just looking at the sequence number.
+ * If all is fine, we'll pop and push it. If the sequence number is wrong we
+ * wait for a timeout or something to change.
+ * The peeked buffer is valid for as long as we hold the jitterbuffer lock. */
+ item = rtp_jitter_buffer_peek (priv->jbuf);
+ if (item == NULL) {
+ goto wait;
+ }
+
+ /* get the seqnum and the next expected seqnum */
+ seqnum = item->seqnum;
+ if (seqnum == -1) {
+ return pop_and_push_next (jitterbuffer, seqnum);
+ }
+
+ next_seqnum = priv->next_seqnum;
+
+ /* get the gap between this and the previous packet. If we don't know the
+ * previous packet seqnum assume no gap. */
+ if (G_UNLIKELY (next_seqnum == -1)) {
+ GST_DEBUG_OBJECT (jitterbuffer, "First buffer #%d", seqnum);
+ /* we don't know what the next_seqnum should be, the chain function should
+ * have scheduled a DEADLINE timer that will increment next_seqnum when it
+ * fires, so wait for that */
+ result = GST_FLOW_WAIT;
+ } else {
+ gint gap = gst_rtp_buffer_compare_seqnum (next_seqnum, seqnum);
+
+ if (G_LIKELY (gap == 0)) {
+ /* no missing packet, pop and push */
+ result = pop_and_push_next (jitterbuffer, seqnum);
+ } else if (G_UNLIKELY (gap < 0)) {
+ /* if we have a packet that we already pushed or considered dropped, pop it
+ * off and get the next packet */
+ GST_DEBUG_OBJECT (jitterbuffer, "Old packet #%d, next #%d dropping",
+ seqnum, next_seqnum);
+ item = rtp_jitter_buffer_pop (priv->jbuf, NULL);
+ rtp_jitter_buffer_free_item (item);
+ result = GST_FLOW_OK;
+ } else {
+ /* the chain function has scheduled timers to request retransmission or
+ * when to consider the packet lost, wait for that */
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "Sequence number GAP detected: expected %d instead of %d (%d missing)",
+ next_seqnum, seqnum, gap);
+ /* if we have reached EOS, just keep processing */
+ /* Also do the same if we block input because the JB is full */
+ if (priv->eos || rtp_jitter_buffer_is_full (priv->jbuf)) {
+ result = pop_and_push_next (jitterbuffer, seqnum);
+ result = GST_FLOW_OK;
+ } else {
+ result = GST_FLOW_WAIT;
+ }
+ }
+ }
+
+ return result;
+
+wait:
+ {
+ GST_DEBUG_OBJECT (jitterbuffer, "no buffer, going to wait");
+ if (priv->eos) {
+ return GST_FLOW_EOS;
+ } else {
+ return GST_FLOW_WAIT;
+ }
+ }
+}
+
+static GstClockTime
+get_rtx_retry_timeout (GstRtpJitterBufferPrivate * priv)
+{
+ GstClockTime rtx_retry_timeout;
+ GstClockTime rtx_min_retry_timeout;
+
+ if (priv->rtx_retry_timeout == -1) {
+ if (priv->avg_rtx_rtt == 0)
+ rtx_retry_timeout = DEFAULT_AUTO_RTX_TIMEOUT;
+ else
+ /* we want to ask for a retransmission after we waited for a
+ * complete RTT and the additional jitter */
+ rtx_retry_timeout = priv->avg_rtx_rtt + priv->avg_jitter * 2;
+ } else {
+ rtx_retry_timeout = priv->rtx_retry_timeout * GST_MSECOND;
+ }
+ /* make sure we don't retry too often. On very low latency networks,
+ * the RTT and jitter can be very low. */
+ if (priv->rtx_min_retry_timeout == -1) {
+ rtx_min_retry_timeout = priv->packet_spacing;
+ } else {
+ rtx_min_retry_timeout = priv->rtx_min_retry_timeout * GST_MSECOND;
+ }
+ rtx_retry_timeout = MAX (rtx_retry_timeout, rtx_min_retry_timeout);
+
+ return rtx_retry_timeout;
+}
+
+static GstClockTime
+get_rtx_retry_period (GstRtpJitterBufferPrivate * priv,
+ GstClockTime rtx_retry_timeout)
+{
+ GstClockTime rtx_retry_period;
+
+ if (priv->rtx_retry_period == -1) {
+ /* we retry up to the configured jitterbuffer size but leaving some
+ * room for the retransmission to arrive in time */
+ if (rtx_retry_timeout > priv->latency_ns) {
+ rtx_retry_period = 0;
+ } else {
+ rtx_retry_period = priv->latency_ns - rtx_retry_timeout;
+ }
+ } else {
+ rtx_retry_period = priv->rtx_retry_period * GST_MSECOND;
+ }
+ return rtx_retry_period;
+}
+
+/*
+ 1. For *larger* rtx-rtt, weigh a new measurement as before (1/8th)
+ 2. For *smaller* rtx-rtt, be a bit more conservative and weigh a bit less (1/16th)
+ 3. For very large measurements (> avg * 2), consider them "outliers"
+ and count them a lot less (1/48th)
+*/
+static void
+update_avg_rtx_rtt (GstRtpJitterBufferPrivate * priv, GstClockTime rtt)
+{
+ gint weight;
+
+ if (priv->avg_rtx_rtt == 0) {
+ priv->avg_rtx_rtt = rtt;
+ return;
+ }
+
+ if (rtt > 2 * priv->avg_rtx_rtt)
+ weight = 48;
+ else if (rtt > priv->avg_rtx_rtt)
+ weight = 8;
+ else
+ weight = 16;
+
+ priv->avg_rtx_rtt = (rtt + (weight - 1) * priv->avg_rtx_rtt) / weight;
+}
+
+static void
+update_rtx_stats (GstRtpJitterBuffer * jitterbuffer, const RtpTimer * timer,
+ GstClockTime dts, gboolean success)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+ GstClockTime delay;
+
+ if (success) {
+ /* we scheduled a retry for this packet and now we have it */
+ priv->num_rtx_success++;
+ /* all the previous retry attempts failed */
+ priv->num_rtx_failed += timer->num_rtx_retry - 1;
+ } else {
+ /* All retries failed or was too late */
+ priv->num_rtx_failed += timer->num_rtx_retry;
+ }
+
+ /* number of retries before (hopefully) receiving the packet */
+ if (priv->avg_rtx_num == 0.0)
+ priv->avg_rtx_num = timer->num_rtx_retry;
+ else
+ priv->avg_rtx_num = (timer->num_rtx_retry + 7 * priv->avg_rtx_num) / 8;
+
+ /* Calculate the delay between retransmission request and receiving this
+ * packet. We have a valid delay if and only if this packet is a response to
+ * our last request. If not we don't know if this is a response to an
+ * earlier request and delay could be way off. For RTT is more important
+ * with correct values than to update for every packet. */
+ if (timer->num_rtx_retry == timer->num_rtx_received &&
+ dts != GST_CLOCK_TIME_NONE && dts > timer->rtx_last) {
+ delay = dts - timer->rtx_last;
+ update_avg_rtx_rtt (priv, delay);
+ } else {
+ delay = 0;
+ }
+
+ GST_LOG_OBJECT (jitterbuffer,
+ "RTX #%d, result %d, success %" G_GUINT64_FORMAT ", failed %"
+ G_GUINT64_FORMAT ", requests %" G_GUINT64_FORMAT ", dups %"
+ G_GUINT64_FORMAT ", avg-num %g, delay %" GST_TIME_FORMAT ", avg-rtt %"
+ GST_TIME_FORMAT, timer->seqnum, success, priv->num_rtx_success,
+ priv->num_rtx_failed, priv->num_rtx_requests, priv->num_duplicates,
+ priv->avg_rtx_num, GST_TIME_ARGS (delay),
+ GST_TIME_ARGS (priv->avg_rtx_rtt));
+}
+
+/* the timeout for when we expected a packet expired */
+static gboolean
+do_expected_timeout (GstRtpJitterBuffer * jitterbuffer, RtpTimer * timer,
+ GstClockTime now, GQueue * events)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+ GstEvent *event;
+ guint delay, delay_ms, avg_rtx_rtt_ms;
+ guint rtx_retry_timeout_ms, rtx_retry_period_ms;
+ guint rtx_deadline_ms;
+ GstClockTime rtx_retry_period;
+ GstClockTime rtx_retry_timeout;
+ GstClock *clock;
+ GstClockTimeDiff offset = 0;
+ GstClockTime timeout;
+
+ GST_DEBUG_OBJECT (jitterbuffer, "expected #%d didn't arrive, now %"
+ GST_TIME_FORMAT, timer->seqnum, GST_TIME_ARGS (now));
+
+ rtx_retry_timeout = get_rtx_retry_timeout (priv);
+ rtx_retry_period = get_rtx_retry_period (priv, rtx_retry_timeout);
+
+ /* delay expresses how late this packet is currently */
+ delay = now - timer->rtx_base;
+
+ delay_ms = GST_TIME_AS_MSECONDS (delay);
+ rtx_retry_timeout_ms = GST_TIME_AS_MSECONDS (rtx_retry_timeout);
+ rtx_retry_period_ms = GST_TIME_AS_MSECONDS (rtx_retry_period);
+ avg_rtx_rtt_ms = GST_TIME_AS_MSECONDS (priv->avg_rtx_rtt);
+ rtx_deadline_ms =
+ priv->rtx_deadline_ms != -1 ? priv->rtx_deadline_ms : priv->latency_ms;
+
+ event = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
+ gst_structure_new ("GstRTPRetransmissionRequest",
+ "seqnum", G_TYPE_UINT, (guint) timer->seqnum,
+ "running-time", G_TYPE_UINT64, timer->rtx_base,
+ "delay", G_TYPE_UINT, delay_ms,
+ "retry", G_TYPE_UINT, timer->num_rtx_retry,
+ "frequency", G_TYPE_UINT, rtx_retry_timeout_ms,
+ "period", G_TYPE_UINT, rtx_retry_period_ms,
+ "deadline", G_TYPE_UINT, rtx_deadline_ms,
+ "packet-spacing", G_TYPE_UINT64, priv->packet_spacing,
+ "avg-rtt", G_TYPE_UINT, avg_rtx_rtt_ms, NULL));
+ g_queue_push_tail (events, event);
+ GST_DEBUG_OBJECT (jitterbuffer, "Request RTX: %" GST_PTR_FORMAT, event);
+
+ priv->num_rtx_requests++;
+ timer->num_rtx_retry++;
+
+ GST_OBJECT_LOCK (jitterbuffer);
+ if ((clock = GST_ELEMENT_CLOCK (jitterbuffer))) {
+ timer->rtx_last = gst_clock_get_time (clock);
+ timer->rtx_last -= GST_ELEMENT_CAST (jitterbuffer)->base_time;
+ } else {
+ timer->rtx_last = now;
+ }
+ GST_OBJECT_UNLOCK (jitterbuffer);
+
+ /*
+ Calculate the timeout for the next retransmission attempt:
+ We have just successfully sent one RTX request, and we need to
+ find out when to schedule the next one.
+
+ The rtx_retry_timeout tells us the logical timeout between RTX
+ requests based on things like round-trip time, jitter and packet spacing,
+ and is how long we are going to wait before attempting another RTX packet
+ */
+ timeout = timer->rtx_last + rtx_retry_timeout;
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "timer #%i new timeout %" GST_TIME_FORMAT ", rtx retry timeout %"
+ GST_TIME_FORMAT ", num_retry %u", timer->seqnum, GST_TIME_ARGS (timeout),
+ GST_TIME_ARGS (rtx_retry_timeout), timer->num_rtx_retry);
+ if ((priv->rtx_max_retries != -1
+ && timer->num_rtx_retry >= priv->rtx_max_retries)
+ || (timeout > timer->rtx_base + rtx_retry_period)) {
+ /* too many retransmission request, we now convert the timer
+ * to a lost timer, leave the num_rtx_retry as it is for stats */
+ timer->type = RTP_TIMER_LOST;
+ timeout = timer->rtx_base;
+ offset = timeout_offset (jitterbuffer);
+ GST_DEBUG_OBJECT (jitterbuffer, "reschedule #%i as LOST timer for %"
+ GST_TIME_FORMAT, timer->seqnum,
+ GST_TIME_ARGS (timer->rtx_base + offset));
+ }
+ rtp_timer_queue_update_timer (priv->timers, timer, timer->seqnum,
+ timeout, 0, offset, FALSE);
+
+ return FALSE;
+}
+
+/* a packet is lost */
+static gboolean
+do_lost_timeout (GstRtpJitterBuffer * jitterbuffer, RtpTimer * timer,
+ GstClockTime now)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+ GstClockTime timestamp;
+
+ timestamp = apply_offset (jitterbuffer, get_pts_timeout (timer));
+ insert_lost_event (jitterbuffer, timer->seqnum, 1, timestamp,
+ timer->duration, timer->num_rtx_retry);
+
+ if (GST_CLOCK_TIME_IS_VALID (timer->rtx_last)) {
+ /* Store info to update stats if the packet arrives too late */
+ timer->timeout = now + priv->rtx_stats_timeout * GST_MSECOND;
+ timer->type = RTP_TIMER_LOST;
+ rtp_timer_queue_insert (priv->rtx_stats_timers, timer);
+ } else {
+ rtp_timer_free (timer);
+ }
+
+ return TRUE;
+}
+
+static gboolean
+do_eos_timeout (GstRtpJitterBuffer * jitterbuffer, RtpTimer * timer,
+ GstClockTime now)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+
+ GST_INFO_OBJECT (jitterbuffer, "got the NPT timeout");
+ rtp_timer_free (timer);
+ if (!priv->eos) {
+ GstEvent *event;
+
+ /* there was no EOS in the buffer, put one in there now */
+ event = gst_event_new_eos ();
+ if (priv->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, priv->segment_seqnum);
+ queue_event (jitterbuffer, event);
+ }
+ JBUF_SIGNAL_EVENT (priv);
+
+ return TRUE;
+}
+
+static gboolean
+do_deadline_timeout (GstRtpJitterBuffer * jitterbuffer, RtpTimer * timer,
+ GstClockTime now)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+
+ GST_INFO_OBJECT (jitterbuffer, "got deadline timeout");
+
+ /* timer seqnum might have been obsoleted by caps seqnum-base,
+ * only mess with current ongoing seqnum if still unknown */
+ if (priv->next_seqnum == -1)
+ priv->next_seqnum = timer->seqnum;
+ rtp_timer_free (timer);
+ JBUF_SIGNAL_EVENT (priv);
+
+ return TRUE;
+}
+
+static gboolean
+do_timeout (GstRtpJitterBuffer * jitterbuffer, RtpTimer * timer,
+ GstClockTime now, GQueue * events)
+{
+ gboolean removed = FALSE;
+
+ switch (timer->type) {
+ case RTP_TIMER_EXPECTED:
+ removed = do_expected_timeout (jitterbuffer, timer, now, events);
+ break;
+ case RTP_TIMER_LOST:
+ removed = do_lost_timeout (jitterbuffer, timer, now);
+ break;
+ case RTP_TIMER_DEADLINE:
+ removed = do_deadline_timeout (jitterbuffer, timer, now);
+ break;
+ case RTP_TIMER_EOS:
+ removed = do_eos_timeout (jitterbuffer, timer, now);
+ break;
+ }
+ return removed;
+}
+
+static void
+push_rtx_events_unlocked (GstRtpJitterBuffer * jitterbuffer, GQueue * events)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+ GstEvent *event;
+
+ while ((event = (GstEvent *) g_queue_pop_head (events)))
+ gst_pad_push_event (priv->sinkpad, event);
+}
+
+/* called with JBUF lock
+ *
+ * Pushes all events in @events queue.
+ *
+ * Returns: %TRUE if the timer thread is not longer running
+ */
+static void
+push_rtx_events (GstRtpJitterBuffer * jitterbuffer, GQueue * events)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+
+ if (events->length == 0)
+ return;
+
+ JBUF_UNLOCK (priv);
+ push_rtx_events_unlocked (jitterbuffer, events);
+ JBUF_LOCK (priv);
+}
+
+/* called when we need to wait for the next timeout.
+ *
+ * We loop over the array of recorded timeouts and wait for the earliest one.
+ * When it timed out, do the logic associated with the timer.
+ *
+ * If there are no timers, we wait on a gcond until something new happens.
+ */
+static void
+wait_next_timeout (GstRtpJitterBuffer * jitterbuffer)
+{
+ GstRtpJitterBufferPrivate *priv = jitterbuffer->priv;
+ GstClockTime now = 0;
+
+ JBUF_LOCK (priv);
+ while (priv->timer_running) {
+ RtpTimer *timer = NULL;
+ GQueue events = G_QUEUE_INIT;
+
+ /* don't produce data in paused */
+ while (priv->blocked) {
+ JBUF_WAIT_TIMER (priv);
+ if (!priv->timer_running)
+ goto stopping;
+ }
+
+ /* If we have a clock, update "now" now with the very
+ * latest running time we have. If timers are unscheduled below we
+ * otherwise wouldn't update now (it's only updated when timers
+ * expire), and also for the very first loop iteration now would
+ * otherwise always be 0
+ */
+ GST_OBJECT_LOCK (jitterbuffer);
+ if (priv->eos) {
+ now = GST_CLOCK_TIME_NONE;
+ } else if (GST_ELEMENT_CLOCK (jitterbuffer)) {
+ now =
+ gst_clock_get_time (GST_ELEMENT_CLOCK (jitterbuffer)) -
+ GST_ELEMENT_CAST (jitterbuffer)->base_time;
+ }
+ GST_OBJECT_UNLOCK (jitterbuffer);
+
+ GST_DEBUG_OBJECT (jitterbuffer, "now %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (now));
+
+ /* Clear expired rtx-stats timers */
+ if (priv->do_retransmission)
+ rtp_timer_queue_remove_until (priv->rtx_stats_timers, now);
+
+ /* Iterate expired "normal" timers */
+ while ((timer = rtp_timer_queue_pop_until (priv->timers, now)))
+ do_timeout (jitterbuffer, timer, now, &events);
+
+ timer = rtp_timer_queue_peek_earliest (priv->timers);
+ if (timer) {
+ GstClock *clock;
+ GstClockTime sync_time;
+ GstClockID id;
+ GstClockReturn ret;
+ GstClockTimeDiff clock_jitter;
+
+ /* we poped all immediate and due timer, so this should just never
+ * happens */
+ g_assert (GST_CLOCK_TIME_IS_VALID (timer->timeout));
+
+ GST_OBJECT_LOCK (jitterbuffer);
+ clock = GST_ELEMENT_CLOCK (jitterbuffer);
+ if (!clock) {
+ GST_OBJECT_UNLOCK (jitterbuffer);
+ /* let's just push if there is no clock */
+ GST_DEBUG_OBJECT (jitterbuffer, "No clock, timeout right away");
+ now = timer->timeout;
+ push_rtx_events (jitterbuffer, &events);
+ continue;
+ }
+
+ /* prepare for sync against clock */
+ sync_time = timer->timeout + GST_ELEMENT_CAST (jitterbuffer)->base_time;
+ /* add latency of peer to get input time */
+ sync_time += priv->peer_latency;
+
+ GST_DEBUG_OBJECT (jitterbuffer, "timer #%i sync to timestamp %"
+ GST_TIME_FORMAT " with sync time %" GST_TIME_FORMAT, timer->seqnum,
+ GST_TIME_ARGS (get_pts_timeout (timer)), GST_TIME_ARGS (sync_time));
+
+ /* create an entry for the clock */
+ id = priv->clock_id = gst_clock_new_single_shot_id (clock, sync_time);
+ priv->timer_timeout = timer->timeout;
+ priv->timer_seqnum = timer->seqnum;
+ GST_OBJECT_UNLOCK (jitterbuffer);
+
+ /* release the lock so that the other end can push stuff or unlock */
+ JBUF_UNLOCK (priv);
+
+ push_rtx_events_unlocked (jitterbuffer, &events);
+
+ ret = gst_clock_id_wait (id, &clock_jitter);
+
+ JBUF_LOCK (priv);
+
+ if (!priv->timer_running) {
+ g_queue_clear_full (&events, (GDestroyNotify) gst_event_unref);
+ gst_clock_id_unref (id);
+ priv->clock_id = NULL;
+ break;
+ }
+
+ if (ret != GST_CLOCK_UNSCHEDULED) {
+ now = priv->timer_timeout + MAX (clock_jitter, 0);
+ GST_DEBUG_OBJECT (jitterbuffer,
+ "sync done, %d, #%d, %" GST_STIME_FORMAT, ret, priv->timer_seqnum,
+ GST_STIME_ARGS (clock_jitter));
+ } else {
+ GST_DEBUG_OBJECT (jitterbuffer, "sync unscheduled");
+ }
+
+ /* and free the entry */
+ gst_clock_id_unref (id);
+ priv->clock_id = NULL;
+ } else {
+ push_rtx_events_unlocked (jitterbuffer, &events);
+
+ /* when draining the timers, the pusher thread will reuse our
+ * condition to wait for completion. Signal that thread before
+ * sleeping again here */
+ if (priv->eos)
+ JBUF_SIGNAL_TIMER (priv);
+
+ /* no timers, wait for activity */
+ JBUF_WAIT_TIMER (priv);
+ }
+ }
+stopping:
+ JBUF_UNLOCK (priv);
+
+ GST_DEBUG_OBJECT (jitterbuffer, "we are stopping");
+ return;
+}
+
+/*
+ * This function implements the main pushing loop on the source pad.
+ *
+ * It first tries to push as many buffers as possible. If there is a seqnum
+ * mismatch, we wait for the next timeouts.
+ */
+static void
+gst_rtp_jitter_buffer_loop (GstRtpJitterBuffer * jitterbuffer)
+{
+ GstRtpJitterBufferPrivate *priv;
+ GstFlowReturn result = GST_FLOW_OK;
+
+ priv = jitterbuffer->priv;
+
+ JBUF_LOCK_CHECK (priv, flushing);
+ do {
+ result = handle_next_buffer (jitterbuffer);
+ if (G_LIKELY (result == GST_FLOW_WAIT)) {
+ /* now wait for the next event */
+ JBUF_SIGNAL_QUEUE (priv);
+ JBUF_WAIT_EVENT (priv, flushing);
+ result = GST_FLOW_OK;
+ }
+ } while (result == GST_FLOW_OK);
+ /* store result for upstream */
+ priv->srcresult = result;
+ /* if we get here we need to pause */
+ goto pause;
+
+ /* ERRORS */
+flushing:
+ {
+ result = priv->srcresult;
+ goto pause;
+ }
+pause:
+ {
+ GstEvent *event;
+
+ JBUF_SIGNAL_QUERY (priv, FALSE);
+ JBUF_UNLOCK (priv);
+
+ GST_DEBUG_OBJECT (jitterbuffer, "pausing task, reason %s",
+ gst_flow_get_name (result));
+ gst_pad_pause_task (priv->srcpad);
+ if (result == GST_FLOW_EOS) {
+ event = gst_event_new_eos ();
+ if (priv->segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, priv->segment_seqnum);
+ gst_pad_push_event (priv->srcpad, event);
+ }
+ return;
+ }
+}
+
+/* collect the info from the latest RTCP packet and the jitterbuffer sync, do
+ * some sanity checks and then emit the handle-sync signal with the parameters.
+ * This function must be called with the LOCK */
+static void
+do_handle_sync (GstRtpJitterBuffer * jitterbuffer)
+{
+ GstRtpJitterBufferPrivate *priv;
+ guint64 base_rtptime, base_time;
+ guint32 clock_rate;
+ guint64 last_rtptime;
+ guint64 clock_base;
+ guint64 ext_rtptime, diff;
+ gboolean valid = TRUE, keep = FALSE;
+
+ priv = jitterbuffer->priv;
+
+ /* get the last values from the jitterbuffer */
+ rtp_jitter_buffer_get_sync (priv->jbuf, &base_rtptime, &base_time,
+ &clock_rate, &last_rtptime);
+
+ clock_base = priv->clock_base;
+ ext_rtptime = priv->ext_rtptime;
+
+ GST_DEBUG_OBJECT (jitterbuffer, "ext SR %" G_GUINT64_FORMAT ", base %"
+ G_GUINT64_FORMAT ", clock-rate %" G_GUINT32_FORMAT
+ ", clock-base %" G_GUINT64_FORMAT ", last-rtptime %" G_GUINT64_FORMAT,
+ ext_rtptime, base_rtptime, clock_rate, clock_base, last_rtptime);
+
+ if (base_rtptime == -1 || clock_rate == -1 || base_time == -1) {
+ /* we keep this SR packet for later. When we get a valid RTP packet the
+ * above values will be set and we can try to use the SR packet */
+ GST_DEBUG_OBJECT (jitterbuffer, "keeping for later, no RTP values");
+ keep = TRUE;
+ } else {
+ /* we can't accept anything that happened before we did the last resync */
+ if (base_rtptime > ext_rtptime) {
+ GST_DEBUG_OBJECT (jitterbuffer, "dropping, older than base time");
+ valid = FALSE;
+ } else {
+ /* the SR RTP timestamp must be something close to what we last observed
+ * in the jitterbuffer */
+ if (ext_rtptime > last_rtptime) {
+ /* check how far ahead it is to our RTP timestamps */
+ diff = ext_rtptime - last_rtptime;
+ /* if bigger than 1 second, we drop it */
+ if (jitterbuffer->priv->max_rtcp_rtp_time_diff != -1 &&
+ diff >
+ gst_util_uint64_scale (jitterbuffer->priv->max_rtcp_rtp_time_diff,
+ clock_rate, 1000)) {
+ GST_DEBUG_OBJECT (jitterbuffer, "too far ahead");
+ /* should drop this, but some RTSP servers end up with bogus
+ * way too ahead RTCP packet when repeated PAUSE/PLAY,
+ * so still trigger rptbin sync but invalidate RTCP data
+ * (sync might use other methods) */
+ ext_rtptime = -1;
+ }
+ GST_DEBUG_OBJECT (jitterbuffer, "ext last %" G_GUINT64_FORMAT ", diff %"
+ G_GUINT64_FORMAT, last_rtptime, diff);
+ }
+ }
+ }
+
+ if (keep) {
+ GST_DEBUG_OBJECT (jitterbuffer, "keeping RTCP packet for later");
+ } else if (valid) {
+ GstStructure *s;
+
+ s = gst_structure_new ("application/x-rtp-sync",
+ "base-rtptime", G_TYPE_UINT64, base_rtptime,
+ "base-time", G_TYPE_UINT64, base_time,
+ "clock-rate", G_TYPE_UINT, clock_rate,
+ "clock-base", G_TYPE_UINT64, clock_base,
+ "sr-ext-rtptime", G_TYPE_UINT64, ext_rtptime,
+ "sr-buffer", GST_TYPE_BUFFER, priv->last_sr, NULL);
+
+ GST_DEBUG_OBJECT (jitterbuffer, "signaling sync");
+ gst_buffer_replace (&priv->last_sr, NULL);
+ JBUF_UNLOCK (priv);
+ g_signal_emit (jitterbuffer,
+ gst_rtp_jitter_buffer_signals[SIGNAL_HANDLE_SYNC], 0, s);
+ JBUF_LOCK (priv);
+ gst_structure_free (s);
+ } else {
+ GST_DEBUG_OBJECT (jitterbuffer, "dropping RTCP packet");
+ gst_buffer_replace (&priv->last_sr, NULL);
+ }
+}
+
+static GstFlowReturn
+gst_rtp_jitter_buffer_chain_rtcp (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
+{
+ GstRtpJitterBuffer *jitterbuffer;
+ GstRtpJitterBufferPrivate *priv;
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint32 ssrc;
+ GstRTCPPacket packet;
+ guint64 ext_rtptime;
+ guint32 rtptime;
+ GstRTCPBuffer rtcp = { NULL, };
+
+ jitterbuffer = GST_RTP_JITTER_BUFFER (parent);
+
+ if (G_UNLIKELY (!gst_rtcp_buffer_validate_reduced (buffer)))
+ goto invalid_buffer;
+
+ priv = jitterbuffer->priv;
+
+ gst_rtcp_buffer_map (buffer, GST_MAP_READ, &rtcp);
+
+ if (!gst_rtcp_buffer_get_first_packet (&rtcp, &packet))
+ goto empty_buffer;
+
+ /* first packet must be SR or RR or else the validate would have failed */
+ switch (gst_rtcp_packet_get_type (&packet)) {
+ case GST_RTCP_TYPE_SR:
+ gst_rtcp_packet_sr_get_sender_info (&packet, &ssrc, NULL, &rtptime,
+ NULL, NULL);
+ break;
+ default:
+ goto ignore_buffer;
+ }
+ gst_rtcp_buffer_unmap (&rtcp);
+
+ GST_DEBUG_OBJECT (jitterbuffer, "received RTCP of SSRC %08x", ssrc);
+
+ JBUF_LOCK (priv);
+ /* convert the RTP timestamp to our extended timestamp, using the same offset
+ * we used in the jitterbuffer */
+ ext_rtptime = priv->jbuf->ext_rtptime;
+ ext_rtptime = gst_rtp_buffer_ext_timestamp (&ext_rtptime, rtptime);
+
+ priv->ext_rtptime = ext_rtptime;
+ gst_buffer_replace (&priv->last_sr, buffer);
+
+ do_handle_sync (jitterbuffer);
+ JBUF_UNLOCK (priv);
+
+done:
+ gst_buffer_unref (buffer);
+
+ return ret;
+
+invalid_buffer:
+ {
+ /* this is not fatal but should be filtered earlier */
+ GST_ELEMENT_WARNING (jitterbuffer, STREAM, DECODE, (NULL),
+ ("Received invalid RTCP payload, dropping"));
+ ret = GST_FLOW_OK;
+ goto done;
+ }
+empty_buffer:
+ {
+ /* this is not fatal but should be filtered earlier */
+ GST_ELEMENT_WARNING (jitterbuffer, STREAM, DECODE, (NULL),
+ ("Received empty RTCP payload, dropping"));
+ gst_rtcp_buffer_unmap (&rtcp);
+ ret = GST_FLOW_OK;
+ goto done;
+ }
+ignore_buffer:
+ {
+ GST_DEBUG_OBJECT (jitterbuffer, "ignoring RTCP packet");
+ gst_rtcp_buffer_unmap (&rtcp);
+ ret = GST_FLOW_OK;
+ goto done;
+ }
+}
+
+static gboolean
+gst_rtp_jitter_buffer_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ gboolean res = FALSE;
+ GstRtpJitterBuffer *jitterbuffer;
+ GstRtpJitterBufferPrivate *priv;
+
+ jitterbuffer = GST_RTP_JITTER_BUFFER (parent);
+ priv = jitterbuffer->priv;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_rtp_jitter_buffer_getcaps (pad, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ res = TRUE;
+ break;
+ }
+ default:
+ if (GST_QUERY_IS_SERIALIZED (query)) {
+ JBUF_LOCK_CHECK (priv, out_flushing);
+ if (rtp_jitter_buffer_get_mode (priv->jbuf) !=
+ RTP_JITTER_BUFFER_MODE_BUFFER) {
+ GST_DEBUG_OBJECT (jitterbuffer, "adding serialized query");
+ if (rtp_jitter_buffer_append_query (priv->jbuf, query))
+ JBUF_SIGNAL_EVENT (priv);
+ JBUF_WAIT_QUERY (priv, out_flushing);
+ res = priv->last_query;
+ } else {
+ GST_DEBUG_OBJECT (jitterbuffer, "refusing query, we are buffering");
+ res = FALSE;
+ }
+ JBUF_UNLOCK (priv);
+ } else {
+ res = gst_pad_query_default (pad, parent, query);
+ }
+ break;
+ }
+ return res;
+ /* ERRORS */
+out_flushing:
+ {
+ GST_DEBUG_OBJECT (jitterbuffer, "we are flushing");
+ JBUF_UNLOCK (priv);
+ return FALSE;
+ }
+
+}
+
+static gboolean
+gst_rtp_jitter_buffer_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ GstRtpJitterBuffer *jitterbuffer;
+ GstRtpJitterBufferPrivate *priv;
+ gboolean res = FALSE;
+
+ jitterbuffer = GST_RTP_JITTER_BUFFER (parent);
+ priv = jitterbuffer->priv;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_LATENCY:
+ {
+ /* We need to send the query upstream and add the returned latency to our
+ * own */
+ GstClockTime min_latency, max_latency;
+ gboolean us_live;
+ GstClockTime our_latency;
+
+ if ((res = gst_pad_peer_query (priv->sinkpad, query))) {
+ gst_query_parse_latency (query, &us_live, &min_latency, &max_latency);
+
+ GST_DEBUG_OBJECT (jitterbuffer, "Peer latency: min %"
+ GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
+
+ /* store this so that we can safely sync on the peer buffers. */
+ JBUF_LOCK (priv);
+ priv->peer_latency = min_latency;
+ our_latency = priv->latency_ns;
+ JBUF_UNLOCK (priv);
+
+ GST_DEBUG_OBJECT (jitterbuffer, "Our latency: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (our_latency));
+
+ /* we add some latency but can buffer an infinite amount of time */
+ min_latency += our_latency;
+ max_latency = -1;
+
+ GST_DEBUG_OBJECT (jitterbuffer, "Calculated total latency : min %"
+ GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
+
+ gst_query_set_latency (query, TRUE, min_latency, max_latency);
+ }
+ break;
+ }
+ case GST_QUERY_POSITION:
+ {
+ GstClockTime start, last_out;
+ GstFormat fmt;
+
+ gst_query_parse_position (query, &fmt, NULL);
+ if (fmt != GST_FORMAT_TIME) {
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ JBUF_LOCK (priv);
+ start = priv->npt_start;
+ last_out = priv->last_out_time;
+ JBUF_UNLOCK (priv);
+
+ GST_DEBUG_OBJECT (jitterbuffer, "npt start %" GST_TIME_FORMAT
+ ", last out %" GST_TIME_FORMAT, GST_TIME_ARGS (start),
+ GST_TIME_ARGS (last_out));
+
+ if (GST_CLOCK_TIME_IS_VALID (start) && GST_CLOCK_TIME_IS_VALID (last_out)) {
+ /* bring 0-based outgoing time to stream time */
+ gst_query_set_position (query, GST_FORMAT_TIME, start + last_out);
+ res = TRUE;
+ } else {
+ res = gst_pad_query_default (pad, parent, query);
+ }
+ break;
+ }
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_rtp_jitter_buffer_getcaps (pad, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return res;
+}
+
+static void
+gst_rtp_jitter_buffer_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstRtpJitterBuffer *jitterbuffer;
+ GstRtpJitterBufferPrivate *priv;
+
+ jitterbuffer = GST_RTP_JITTER_BUFFER (object);
+ priv = jitterbuffer->priv;
+
+ switch (prop_id) {
+ case PROP_LATENCY:
+ {
+ guint new_latency, old_latency;
+
+ new_latency = g_value_get_uint (value);
+
+ JBUF_LOCK (priv);
+ old_latency = priv->latency_ms;
+ priv->latency_ms = new_latency;
+ priv->latency_ns = priv->latency_ms * GST_MSECOND;
+ rtp_jitter_buffer_set_delay (priv->jbuf, priv->latency_ns);
+ JBUF_UNLOCK (priv);
+
+ /* post message if latency changed, this will inform the parent pipeline
+ * that a latency reconfiguration is possible/needed. */
+ if (new_latency != old_latency) {
+ GST_DEBUG_OBJECT (jitterbuffer, "latency changed to: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (new_latency * GST_MSECOND));
+
+ gst_element_post_message (GST_ELEMENT_CAST (jitterbuffer),
+ gst_message_new_latency (GST_OBJECT_CAST (jitterbuffer)));
+ }
+ break;
+ }
+ case PROP_DROP_ON_LATENCY:
+ JBUF_LOCK (priv);
+ priv->drop_on_latency = g_value_get_boolean (value);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_TS_OFFSET:
+ JBUF_LOCK (priv);
+ if (priv->max_ts_offset_adjustment != 0) {
+ gint64 new_offset = g_value_get_int64 (value);
+
+ if (new_offset > priv->ts_offset) {
+ priv->ts_offset_remainder = new_offset - priv->ts_offset;
+ } else {
+ priv->ts_offset_remainder = -(priv->ts_offset - new_offset);
+ }
+ } else {
+ priv->ts_offset = g_value_get_int64 (value);
+ priv->ts_offset_remainder = 0;
+ update_timer_offsets (jitterbuffer);
+ }
+ priv->ts_discont = TRUE;
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_MAX_TS_OFFSET_ADJUSTMENT:
+ JBUF_LOCK (priv);
+ priv->max_ts_offset_adjustment = g_value_get_uint64 (value);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_DO_LOST:
+ JBUF_LOCK (priv);
+ priv->do_lost = g_value_get_boolean (value);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_POST_DROP_MESSAGES:
+ JBUF_LOCK (priv);
+ priv->post_drop_messages = g_value_get_boolean (value);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_DROP_MESSAGES_INTERVAL:
+ JBUF_LOCK (priv);
+ priv->drop_messages_interval_ms = g_value_get_uint (value);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_MODE:
+ JBUF_LOCK (priv);
+ rtp_jitter_buffer_set_mode (priv->jbuf, g_value_get_enum (value));
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_DO_RETRANSMISSION:
+ JBUF_LOCK (priv);
+ priv->do_retransmission = g_value_get_boolean (value);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RTX_NEXT_SEQNUM:
+ JBUF_LOCK (priv);
+ priv->rtx_next_seqnum = g_value_get_boolean (value);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RTX_DELAY:
+ JBUF_LOCK (priv);
+ priv->rtx_delay = g_value_get_int (value);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RTX_MIN_DELAY:
+ JBUF_LOCK (priv);
+ priv->rtx_min_delay = g_value_get_uint (value);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RTX_DELAY_REORDER:
+ JBUF_LOCK (priv);
+ priv->rtx_delay_reorder = g_value_get_int (value);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RTX_RETRY_TIMEOUT:
+ JBUF_LOCK (priv);
+ priv->rtx_retry_timeout = g_value_get_int (value);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RTX_MIN_RETRY_TIMEOUT:
+ JBUF_LOCK (priv);
+ priv->rtx_min_retry_timeout = g_value_get_int (value);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RTX_RETRY_PERIOD:
+ JBUF_LOCK (priv);
+ priv->rtx_retry_period = g_value_get_int (value);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RTX_MAX_RETRIES:
+ JBUF_LOCK (priv);
+ priv->rtx_max_retries = g_value_get_int (value);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RTX_DEADLINE:
+ JBUF_LOCK (priv);
+ priv->rtx_deadline_ms = g_value_get_int (value);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RTX_STATS_TIMEOUT:
+ JBUF_LOCK (priv);
+ priv->rtx_stats_timeout = g_value_get_uint (value);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_MAX_RTCP_RTP_TIME_DIFF:
+ JBUF_LOCK (priv);
+ priv->max_rtcp_rtp_time_diff = g_value_get_int (value);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_MAX_DROPOUT_TIME:
+ JBUF_LOCK (priv);
+ priv->max_dropout_time = g_value_get_uint (value);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_MAX_MISORDER_TIME:
+ JBUF_LOCK (priv);
+ priv->max_misorder_time = g_value_get_uint (value);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RFC7273_SYNC:
+ JBUF_LOCK (priv);
+ rtp_jitter_buffer_set_rfc7273_sync (priv->jbuf,
+ g_value_get_boolean (value));
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_FASTSTART_MIN_PACKETS:
+ JBUF_LOCK (priv);
+ priv->faststart_min_packets = g_value_get_uint (value);
+ JBUF_UNLOCK (priv);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_jitter_buffer_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstRtpJitterBuffer *jitterbuffer;
+ GstRtpJitterBufferPrivate *priv;
+
+ jitterbuffer = GST_RTP_JITTER_BUFFER (object);
+ priv = jitterbuffer->priv;
+
+ switch (prop_id) {
+ case PROP_LATENCY:
+ JBUF_LOCK (priv);
+ g_value_set_uint (value, priv->latency_ms);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_DROP_ON_LATENCY:
+ JBUF_LOCK (priv);
+ g_value_set_boolean (value, priv->drop_on_latency);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_TS_OFFSET:
+ JBUF_LOCK (priv);
+ g_value_set_int64 (value, priv->ts_offset);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_MAX_TS_OFFSET_ADJUSTMENT:
+ JBUF_LOCK (priv);
+ g_value_set_uint64 (value, priv->max_ts_offset_adjustment);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_DO_LOST:
+ JBUF_LOCK (priv);
+ g_value_set_boolean (value, priv->do_lost);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_POST_DROP_MESSAGES:
+ JBUF_LOCK (priv);
+ g_value_set_boolean (value, priv->post_drop_messages);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_DROP_MESSAGES_INTERVAL:
+ JBUF_LOCK (priv);
+ g_value_set_uint (value, priv->drop_messages_interval_ms);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_MODE:
+ JBUF_LOCK (priv);
+ g_value_set_enum (value, rtp_jitter_buffer_get_mode (priv->jbuf));
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_PERCENT:
+ {
+ gint percent;
+
+ JBUF_LOCK (priv);
+ if (priv->srcresult != GST_FLOW_OK)
+ percent = 100;
+ else
+ percent = rtp_jitter_buffer_get_percent (priv->jbuf);
+
+ g_value_set_int (value, percent);
+ JBUF_UNLOCK (priv);
+ break;
+ }
+ case PROP_DO_RETRANSMISSION:
+ JBUF_LOCK (priv);
+ g_value_set_boolean (value, priv->do_retransmission);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RTX_NEXT_SEQNUM:
+ JBUF_LOCK (priv);
+ g_value_set_boolean (value, priv->rtx_next_seqnum);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RTX_DELAY:
+ JBUF_LOCK (priv);
+ g_value_set_int (value, priv->rtx_delay);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RTX_MIN_DELAY:
+ JBUF_LOCK (priv);
+ g_value_set_uint (value, priv->rtx_min_delay);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RTX_DELAY_REORDER:
+ JBUF_LOCK (priv);
+ g_value_set_int (value, priv->rtx_delay_reorder);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RTX_RETRY_TIMEOUT:
+ JBUF_LOCK (priv);
+ g_value_set_int (value, priv->rtx_retry_timeout);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RTX_MIN_RETRY_TIMEOUT:
+ JBUF_LOCK (priv);
+ g_value_set_int (value, priv->rtx_min_retry_timeout);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RTX_RETRY_PERIOD:
+ JBUF_LOCK (priv);
+ g_value_set_int (value, priv->rtx_retry_period);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RTX_MAX_RETRIES:
+ JBUF_LOCK (priv);
+ g_value_set_int (value, priv->rtx_max_retries);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RTX_DEADLINE:
+ JBUF_LOCK (priv);
+ g_value_set_int (value, priv->rtx_deadline_ms);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RTX_STATS_TIMEOUT:
+ JBUF_LOCK (priv);
+ g_value_set_uint (value, priv->rtx_stats_timeout);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_STATS:
+ g_value_take_boxed (value,
+ gst_rtp_jitter_buffer_create_stats (jitterbuffer));
+ break;
+ case PROP_MAX_RTCP_RTP_TIME_DIFF:
+ JBUF_LOCK (priv);
+ g_value_set_int (value, priv->max_rtcp_rtp_time_diff);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_MAX_DROPOUT_TIME:
+ JBUF_LOCK (priv);
+ g_value_set_uint (value, priv->max_dropout_time);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_MAX_MISORDER_TIME:
+ JBUF_LOCK (priv);
+ g_value_set_uint (value, priv->max_misorder_time);
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_RFC7273_SYNC:
+ JBUF_LOCK (priv);
+ g_value_set_boolean (value,
+ rtp_jitter_buffer_get_rfc7273_sync (priv->jbuf));
+ JBUF_UNLOCK (priv);
+ break;
+ case PROP_FASTSTART_MIN_PACKETS:
+ JBUF_LOCK (priv);
+ g_value_set_uint (value, priv->faststart_min_packets);
+ JBUF_UNLOCK (priv);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstStructure *
+gst_rtp_jitter_buffer_create_stats (GstRtpJitterBuffer * jbuf)
+{
+ GstRtpJitterBufferPrivate *priv = jbuf->priv;
+ GstStructure *s;
+
+ JBUF_LOCK (priv);
+ s = gst_structure_new ("application/x-rtp-jitterbuffer-stats",
+ "num-pushed", G_TYPE_UINT64, priv->num_pushed,
+ "num-lost", G_TYPE_UINT64, priv->num_lost,
+ "num-late", G_TYPE_UINT64, priv->num_late,
+ "num-duplicates", G_TYPE_UINT64, priv->num_duplicates,
+ "avg-jitter", G_TYPE_UINT64, priv->avg_jitter,
+ "rtx-count", G_TYPE_UINT64, priv->num_rtx_requests,
+ "rtx-success-count", G_TYPE_UINT64, priv->num_rtx_success,
+ "rtx-per-packet", G_TYPE_DOUBLE, priv->avg_rtx_num,
+ "rtx-rtt", G_TYPE_UINT64, priv->avg_rtx_rtt, NULL);
+ JBUF_UNLOCK (priv);
+
+ return s;
+}
diff --git a/gst/rtpmanager/gstrtpjitterbuffer.h b/gst/rtpmanager/gstrtpjitterbuffer.h
new file mode 100644
index 0000000000..9605e7ba2f
--- /dev/null
+++ b/gst/rtpmanager/gstrtpjitterbuffer.h
@@ -0,0 +1,89 @@
+/*
+ * Farsight Voice+Video library
+ *
+ * Copyright 2007 Collabora Ltd,
+ * Copyright 2007 Nokia Corporation
+ * @author: Philippe Kalaf <philippe.kalaf@collabora.co.uk>.
+ * Copyright 2007 Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+#ifndef __GST_RTP_JITTER_BUFFER_H__
+#define __GST_RTP_JITTER_BUFFER_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+G_BEGIN_DECLS
+
+/* #define's don't like whitespacey bits */
+#define GST_TYPE_RTP_JITTER_BUFFER \
+ (gst_rtp_jitter_buffer_get_type())
+#define GST_RTP_JITTER_BUFFER(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), \
+ GST_TYPE_RTP_JITTER_BUFFER,GstRtpJitterBuffer))
+#define GST_RTP_JITTER_BUFFER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), \
+ GST_TYPE_RTP_JITTER_BUFFER,GstRtpJitterBufferClass))
+#define GST_IS_RTP_JITTER_BUFFER(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_JITTER_BUFFER))
+#define GST_IS_RTP_JITTER_BUFFER_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_JITTER_BUFFER))
+#define GST_RTP_JITTER_BUFFER_CAST(obj) \
+ ((GstRtpJitterBuffer *)(obj))
+
+typedef struct _GstRtpJitterBuffer GstRtpJitterBuffer;
+typedef struct _GstRtpJitterBufferClass GstRtpJitterBufferClass;
+typedef struct _GstRtpJitterBufferPrivate GstRtpJitterBufferPrivate;
+
+/**
+ * GstRtpJitterBuffer:
+ *
+ * Opaque jitterbuffer structure.
+ */
+struct _GstRtpJitterBuffer
+{
+ GstElement parent;
+
+ /*< private >*/
+ GstRtpJitterBufferPrivate *priv; /* FIXME: remove? */
+};
+
+struct _GstRtpJitterBufferClass
+{
+ GstElementClass parent_class;
+
+ /* signals */
+ GstCaps* (*request_pt_map) (GstRtpJitterBuffer *buffer, guint pt);
+
+ void (*handle_sync) (GstRtpJitterBuffer *buffer, GstStructure *s);
+ void (*on_npt_stop) (GstRtpJitterBuffer *buffer);
+
+ /* actions */
+ void (*clear_pt_map) (GstRtpJitterBuffer *buffer);
+
+ GstClockTime (*set_active) (GstRtpJitterBuffer *buffer, gboolean active, guint64 elapsed);
+};
+
+GType gst_rtp_jitter_buffer_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (rtpjitterbuffer);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_JITTER_BUFFER_H__ */
diff --git a/gst/rtpmanager/gstrtpmanager.c b/gst/rtpmanager/gstrtpmanager.c
new file mode 100644
index 0000000000..c3fc29b30b
--- /dev/null
+++ b/gst/rtpmanager/gstrtpmanager.c
@@ -0,0 +1,66 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstrtpbin.h"
+#include "gstrtpjitterbuffer.h"
+#include "gstrtpptdemux.h"
+#include "gstrtpsession.h"
+#include "gstrtprtxqueue.h"
+#include "gstrtprtxreceive.h"
+#include "gstrtprtxsend.h"
+#include "gstrtpssrcdemux.h"
+#include "gstrtpdtmfmux.h"
+#include "gstrtpmux.h"
+#include "gstrtpfunnel.h"
+#include "gstrtpst2022-1-fecdec.h"
+#include "gstrtpst2022-1-fecenc.h"
+#include "gstrtphdrext-twcc.h"
+#include "gstrtphdrext-rfc6464.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (rtpbin, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpjitterbuffer, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpptdemux, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpsession, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtprtxqueue, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtprtxreceive, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtprtxsend, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpssrcdemux, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpmux, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpdtmfmux, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpfunnel, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpst2022_1_fecdec, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpst2022_1_fecenc, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtphdrexttwcc, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtphdrextrfc6464, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, GST_VERSION_MINOR, rtpmanager,
+ "RTP session management plugin library", plugin_init, VERSION, "LGPL",
+ GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/rtpmanager/gstrtpmux.c b/gst/rtpmanager/gstrtpmux.c
new file mode 100644
index 0000000000..6387c655c3
--- /dev/null
+++ b/gst/rtpmanager/gstrtpmux.c
@@ -0,0 +1,1016 @@
+/* RTP muxer element for GStreamer
+ *
+ * gstrtpmux.c:
+ *
+ * Copyright (C) <2007-2010> Nokia Corporation.
+ * Contact: Zeeshan Ali <zeeshan.ali@nokia.com>
+ * Copyright (C) <2007-2010> Collabora Ltd
+ * Contact: Olivier Crete <olivier.crete@collabora.co.uk>
+ * Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2000,2005 Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpmux
+ * @title: rtpmux
+ * @see_also: rtpdtmfmux
+ *
+ * The rtp muxer takes multiple RTP streams having the same clock-rate and
+ * muxes into a single stream with a single SSRC.
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 rtpmux name=mux ! udpsink host=127.0.0.1 port=8888 \
+ * alsasrc ! alawenc ! rtppcmapay ! \
+ * application/x-rtp, payload=8, rate=8000 ! mux.sink_0 \
+ * audiotestsrc is-live=1 ! \
+ * mulawenc ! rtppcmupay ! \
+ * application/x-rtp, payload=0, rate=8000 ! mux.sink_1
+ * ]|
+ * In this example, an audio stream is captured from ALSA and another is
+ * generated, both are encoded into different payload types and muxed together
+ * so they can be sent on the same port.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <string.h>
+
+#include "gstrtpmux.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_mux_debug);
+#define GST_CAT_DEFAULT gst_rtp_mux_debug
+
+enum
+{
+ PROP_0,
+ PROP_TIMESTAMP_OFFSET,
+ PROP_SEQNUM_OFFSET,
+ PROP_SEQNUM,
+ PROP_SSRC
+};
+
+#define DEFAULT_TIMESTAMP_OFFSET -1
+#define DEFAULT_SEQNUM_OFFSET -1
+#define DEFAULT_SSRC -1
+
+static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+static GstPad *gst_rtp_mux_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
+static void gst_rtp_mux_release_pad (GstElement * element, GstPad * pad);
+static GstFlowReturn gst_rtp_mux_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+static GstFlowReturn gst_rtp_mux_chain_list (GstPad * pad, GstObject * parent,
+ GstBufferList * bufferlist);
+static gboolean gst_rtp_mux_setcaps (GstPad * pad, GstRTPMux * rtp_mux,
+ GstCaps * caps);
+static gboolean gst_rtp_mux_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static gboolean gst_rtp_mux_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+
+static GstStateChangeReturn gst_rtp_mux_change_state (GstElement *
+ element, GstStateChange transition);
+
+static void gst_rtp_mux_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtp_mux_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_rtp_mux_dispose (GObject * object);
+
+static gboolean gst_rtp_mux_src_event_real (GstRTPMux * rtp_mux,
+ GstEvent * event);
+
+G_DEFINE_TYPE_WITH_CODE (GstRTPMux, gst_rtp_mux, GST_TYPE_ELEMENT,
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_mux_debug, "rtpmux", 0, "rtp muxer"));
+GST_ELEMENT_REGISTER_DEFINE (rtpmux, "rtpmux", GST_RANK_NONE, GST_TYPE_RTP_MUX);
+
+static void
+gst_rtp_mux_class_init (GstRTPMuxClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+
+ gst_element_class_add_static_pad_template (gstelement_class, &src_factory);
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_factory);
+
+ gst_element_class_set_static_metadata (gstelement_class, "RTP muxer",
+ "Codec/Muxer",
+ "multiplex N rtp streams into one", "Zeeshan Ali <first.last@nokia.com>");
+
+ gobject_class->get_property = gst_rtp_mux_get_property;
+ gobject_class->set_property = gst_rtp_mux_set_property;
+ gobject_class->dispose = gst_rtp_mux_dispose;
+
+ klass->src_event = gst_rtp_mux_src_event_real;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass),
+ PROP_TIMESTAMP_OFFSET, g_param_spec_int ("timestamp-offset",
+ "Timestamp Offset",
+ "Offset to add to all outgoing timestamps (-1 = random)", -1,
+ G_MAXINT, DEFAULT_TIMESTAMP_OFFSET,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_SEQNUM_OFFSET,
+ g_param_spec_int ("seqnum-offset", "Sequence number Offset",
+ "Offset to add to all outgoing seqnum (-1 = random)", -1, G_MAXINT,
+ DEFAULT_SEQNUM_OFFSET, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_SEQNUM,
+ g_param_spec_uint ("seqnum", "Sequence number",
+ "The RTP sequence number of the last processed packet",
+ 0, G_MAXUINT, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_SSRC,
+ g_param_spec_uint ("ssrc", "SSRC",
+ "The SSRC of the packets (default == random)",
+ 0, G_MAXUINT, DEFAULT_SSRC,
+ GST_PARAM_MUTABLE_PLAYING | G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ gstelement_class->request_new_pad =
+ GST_DEBUG_FUNCPTR (gst_rtp_mux_request_new_pad);
+ gstelement_class->release_pad = GST_DEBUG_FUNCPTR (gst_rtp_mux_release_pad);
+ gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_rtp_mux_change_state);
+}
+
+static void
+gst_rtp_mux_dispose (GObject * object)
+{
+ GstRTPMux *rtp_mux = GST_RTP_MUX (object);
+ GList *item;
+
+ g_clear_object (&rtp_mux->last_pad);
+
+restart:
+ for (item = GST_ELEMENT_PADS (object); item; item = g_list_next (item)) {
+ GstPad *pad = GST_PAD (item->data);
+ if (GST_PAD_IS_SINK (pad)) {
+ gst_element_release_request_pad (GST_ELEMENT (object), pad);
+ goto restart;
+ }
+ }
+
+ G_OBJECT_CLASS (gst_rtp_mux_parent_class)->dispose (object);
+}
+
+static gboolean
+gst_rtp_mux_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstRTPMux *rtp_mux = GST_RTP_MUX (parent);
+ GstRTPMuxClass *klass;
+ gboolean ret;
+
+ klass = GST_RTP_MUX_GET_CLASS (rtp_mux);
+
+ ret = klass->src_event (rtp_mux, event);
+
+ return ret;
+}
+
+static gboolean
+gst_rtp_mux_src_event_real (GstRTPMux * rtp_mux, GstEvent * event)
+{
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_UPSTREAM:
+ {
+ const GstStructure *s = gst_event_get_structure (event);
+
+ if (gst_structure_has_name (s, "GstRTPCollision")) {
+ guint ssrc = 0;
+
+ if (!gst_structure_get_uint (s, "ssrc", &ssrc))
+ ssrc = -1;
+
+ GST_DEBUG_OBJECT (rtp_mux, "collided ssrc: %x", ssrc);
+
+ /* choose another ssrc for our stream */
+ GST_OBJECT_LOCK (rtp_mux);
+ if (ssrc == rtp_mux->current_ssrc) {
+ GstCaps *caps;
+ guint suggested_ssrc = 0;
+ guint32 new_ssrc;
+
+ if (gst_structure_get_uint (s, "suggested-ssrc", &suggested_ssrc))
+ rtp_mux->current_ssrc = suggested_ssrc;
+
+ while (ssrc == rtp_mux->current_ssrc)
+ rtp_mux->current_ssrc = g_random_int ();
+
+ new_ssrc = rtp_mux->current_ssrc;
+ GST_INFO_OBJECT (rtp_mux, "New ssrc after collision %x (was: %x)",
+ new_ssrc, ssrc);
+ GST_OBJECT_UNLOCK (rtp_mux);
+
+ caps = gst_pad_get_current_caps (rtp_mux->srcpad);
+ caps = gst_caps_make_writable (caps);
+ gst_caps_set_simple (caps, "ssrc", G_TYPE_UINT, new_ssrc, NULL);
+ gst_pad_set_caps (rtp_mux->srcpad, caps);
+ gst_caps_unref (caps);
+ } else {
+ GST_OBJECT_UNLOCK (rtp_mux);
+ }
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+
+ return gst_pad_event_default (rtp_mux->srcpad, GST_OBJECT (rtp_mux), event);
+}
+
+static void
+gst_rtp_mux_init (GstRTPMux * rtp_mux)
+{
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (rtp_mux);
+
+ rtp_mux->srcpad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
+ "src"), "src");
+ gst_pad_set_event_function (rtp_mux->srcpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_mux_src_event));
+ gst_pad_use_fixed_caps (rtp_mux->srcpad);
+ gst_element_add_pad (GST_ELEMENT (rtp_mux), rtp_mux->srcpad);
+
+ rtp_mux->ssrc = DEFAULT_SSRC;
+ rtp_mux->current_ssrc = DEFAULT_SSRC;
+ rtp_mux->ts_offset = DEFAULT_TIMESTAMP_OFFSET;
+ rtp_mux->seqnum_offset = DEFAULT_SEQNUM_OFFSET;
+
+ rtp_mux->last_stop = GST_CLOCK_TIME_NONE;
+}
+
+static void
+gst_rtp_mux_setup_sinkpad (GstRTPMux * rtp_mux, GstPad * sinkpad)
+{
+ GstRTPMuxPadPrivate *padpriv = g_slice_new0 (GstRTPMuxPadPrivate);
+
+ /* setup some pad functions */
+ gst_pad_set_chain_function (sinkpad, GST_DEBUG_FUNCPTR (gst_rtp_mux_chain));
+ gst_pad_set_chain_list_function (sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_mux_chain_list));
+ gst_pad_set_event_function (sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_mux_sink_event));
+ gst_pad_set_query_function (sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_mux_sink_query));
+
+
+ gst_segment_init (&padpriv->segment, GST_FORMAT_UNDEFINED);
+
+ gst_pad_set_element_private (sinkpad, padpriv);
+
+ gst_pad_set_active (sinkpad, TRUE);
+ gst_element_add_pad (GST_ELEMENT (rtp_mux), sinkpad);
+}
+
+static GstPad *
+gst_rtp_mux_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
+{
+ GstRTPMux *rtp_mux;
+ GstPad *newpad;
+
+ g_return_val_if_fail (templ != NULL, NULL);
+ g_return_val_if_fail (GST_IS_RTP_MUX (element), NULL);
+
+ rtp_mux = GST_RTP_MUX (element);
+
+ if (templ->direction != GST_PAD_SINK) {
+ GST_WARNING_OBJECT (rtp_mux, "request pad that is not a SINK pad");
+ return NULL;
+ }
+
+ newpad = gst_pad_new_from_template (templ, req_name);
+ if (newpad)
+ gst_rtp_mux_setup_sinkpad (rtp_mux, newpad);
+ else
+ GST_WARNING_OBJECT (rtp_mux, "failed to create request pad");
+
+ return newpad;
+}
+
+static void
+gst_rtp_mux_release_pad (GstElement * element, GstPad * pad)
+{
+ GstRTPMuxPadPrivate *padpriv;
+
+ GST_OBJECT_LOCK (element);
+ padpriv = gst_pad_get_element_private (pad);
+ gst_pad_set_element_private (pad, NULL);
+ GST_OBJECT_UNLOCK (element);
+
+ gst_element_remove_pad (element, pad);
+
+ if (padpriv) {
+ g_slice_free (GstRTPMuxPadPrivate, padpriv);
+ }
+}
+
+/* Put our own timestamp-offset on the buffer */
+static void
+gst_rtp_mux_readjust_rtp_timestamp_locked (GstRTPMux * rtp_mux,
+ GstRTPMuxPadPrivate * padpriv, GstRTPBuffer * rtpbuffer)
+{
+ guint32 ts;
+ guint32 sink_ts_base = 0;
+
+ if (padpriv && padpriv->have_timestamp_offset)
+ sink_ts_base = padpriv->timestamp_offset;
+
+ ts = gst_rtp_buffer_get_timestamp (rtpbuffer) - sink_ts_base +
+ rtp_mux->ts_base;
+ GST_LOG_OBJECT (rtp_mux, "Re-adjusting RTP ts %u to %u",
+ gst_rtp_buffer_get_timestamp (rtpbuffer), ts);
+ gst_rtp_buffer_set_timestamp (rtpbuffer, ts);
+}
+
+static gboolean
+process_buffer_locked (GstRTPMux * rtp_mux, GstRTPMuxPadPrivate * padpriv,
+ GstRTPBuffer * rtpbuffer)
+{
+ GstRTPMuxClass *klass = GST_RTP_MUX_GET_CLASS (rtp_mux);
+
+ if (klass->accept_buffer_locked)
+ if (!klass->accept_buffer_locked (rtp_mux, padpriv, rtpbuffer))
+ return FALSE;
+
+ rtp_mux->seqnum++;
+ gst_rtp_buffer_set_seq (rtpbuffer, rtp_mux->seqnum);
+
+ gst_rtp_buffer_set_ssrc (rtpbuffer, rtp_mux->current_ssrc);
+ gst_rtp_mux_readjust_rtp_timestamp_locked (rtp_mux, padpriv, rtpbuffer);
+ GST_LOG_OBJECT (rtp_mux,
+ "Pushing packet size %" G_GSIZE_FORMAT ", seq=%d, ts=%u, ssrc=%x",
+ rtpbuffer->map[0].size, rtp_mux->seqnum,
+ gst_rtp_buffer_get_timestamp (rtpbuffer), rtp_mux->current_ssrc);
+
+ if (padpriv) {
+ if (padpriv->segment.format == GST_FORMAT_TIME) {
+ GST_BUFFER_PTS (rtpbuffer->buffer) =
+ gst_segment_to_running_time (&padpriv->segment, GST_FORMAT_TIME,
+ GST_BUFFER_PTS (rtpbuffer->buffer));
+ GST_BUFFER_DTS (rtpbuffer->buffer) =
+ gst_segment_to_running_time (&padpriv->segment, GST_FORMAT_TIME,
+ GST_BUFFER_DTS (rtpbuffer->buffer));
+ }
+ }
+
+ return TRUE;
+}
+
+struct BufferListData
+{
+ GstRTPMux *rtp_mux;
+ GstRTPMuxPadPrivate *padpriv;
+ gboolean drop;
+};
+
+static gboolean
+process_list_item (GstBuffer ** buffer, guint idx, gpointer user_data)
+{
+ struct BufferListData *bd = user_data;
+ GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT;
+
+ *buffer = gst_buffer_make_writable (*buffer);
+
+ gst_rtp_buffer_map (*buffer, GST_MAP_READWRITE, &rtpbuffer);
+
+ bd->drop = !process_buffer_locked (bd->rtp_mux, bd->padpriv, &rtpbuffer);
+
+ gst_rtp_buffer_unmap (&rtpbuffer);
+
+ if (bd->drop)
+ return FALSE;
+
+ if (GST_BUFFER_DURATION_IS_VALID (*buffer) &&
+ GST_BUFFER_PTS_IS_VALID (*buffer))
+ bd->rtp_mux->last_stop = GST_BUFFER_PTS (*buffer) +
+ GST_BUFFER_DURATION (*buffer);
+ else
+ bd->rtp_mux->last_stop = GST_CLOCK_TIME_NONE;
+
+ return TRUE;
+}
+
+static gboolean resend_events (GstPad * pad, GstEvent ** event,
+ gpointer user_data);
+
+static GstFlowReturn
+gst_rtp_mux_chain_list (GstPad * pad, GstObject * parent,
+ GstBufferList * bufferlist)
+{
+ GstRTPMux *rtp_mux;
+ GstFlowReturn ret;
+ GstRTPMuxPadPrivate *padpriv;
+ gboolean changed = FALSE;
+ struct BufferListData bd;
+
+ rtp_mux = GST_RTP_MUX (parent);
+
+ if (gst_pad_check_reconfigure (rtp_mux->srcpad)) {
+ GstCaps *current_caps = gst_pad_get_current_caps (pad);
+
+ if (!gst_rtp_mux_setcaps (pad, rtp_mux, current_caps)) {
+ gst_pad_mark_reconfigure (rtp_mux->srcpad);
+ if (GST_PAD_IS_FLUSHING (rtp_mux->srcpad))
+ ret = GST_FLOW_FLUSHING;
+ else
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ gst_buffer_list_unref (bufferlist);
+ goto out;
+ }
+ gst_caps_unref (current_caps);
+ }
+
+ GST_OBJECT_LOCK (rtp_mux);
+
+ padpriv = gst_pad_get_element_private (pad);
+ if (!padpriv) {
+ GST_OBJECT_UNLOCK (rtp_mux);
+ ret = GST_FLOW_NOT_LINKED;
+ gst_buffer_list_unref (bufferlist);
+ goto out;
+ }
+
+ bd.rtp_mux = rtp_mux;
+ bd.padpriv = padpriv;
+ bd.drop = FALSE;
+
+ bufferlist = gst_buffer_list_make_writable (bufferlist);
+ gst_buffer_list_foreach (bufferlist, process_list_item, &bd);
+
+ if (!bd.drop && pad != rtp_mux->last_pad) {
+ changed = TRUE;
+ g_clear_object (&rtp_mux->last_pad);
+ rtp_mux->last_pad = g_object_ref (pad);
+ }
+
+ GST_OBJECT_UNLOCK (rtp_mux);
+
+ if (changed)
+ gst_pad_sticky_events_foreach (pad, resend_events, rtp_mux);
+
+ if (bd.drop) {
+ gst_buffer_list_unref (bufferlist);
+ ret = GST_FLOW_OK;
+ } else {
+ ret = gst_pad_push_list (rtp_mux->srcpad, bufferlist);
+ }
+
+out:
+
+ return ret;
+}
+
+static gboolean
+resend_events (GstPad * pad, GstEvent ** event, gpointer user_data)
+{
+ GstRTPMux *rtp_mux = user_data;
+
+ if (GST_EVENT_TYPE (*event) == GST_EVENT_CAPS) {
+ GstCaps *caps;
+
+ gst_event_parse_caps (*event, &caps);
+ gst_rtp_mux_setcaps (pad, rtp_mux, caps);
+ } else if (GST_EVENT_TYPE (*event) == GST_EVENT_SEGMENT) {
+ GstSegment new_segment;
+ gst_segment_init (&new_segment, GST_FORMAT_TIME);
+ gst_pad_push_event (rtp_mux->srcpad, gst_event_new_segment (&new_segment));
+ } else {
+ gst_pad_push_event (rtp_mux->srcpad, gst_event_ref (*event));
+ }
+
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_rtp_mux_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+{
+ GstRTPMux *rtp_mux;
+ GstFlowReturn ret;
+ GstRTPMuxPadPrivate *padpriv;
+ gboolean drop;
+ gboolean changed = FALSE;
+ GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT;
+
+ rtp_mux = GST_RTP_MUX (parent);
+
+ if (gst_pad_check_reconfigure (rtp_mux->srcpad)) {
+ GstCaps *current_caps = gst_pad_get_current_caps (pad);
+
+ if (!gst_rtp_mux_setcaps (pad, rtp_mux, current_caps)) {
+ gst_pad_mark_reconfigure (rtp_mux->srcpad);
+ if (GST_PAD_IS_FLUSHING (rtp_mux->srcpad))
+ ret = GST_FLOW_FLUSHING;
+ else
+ ret = GST_FLOW_NOT_NEGOTIATED;
+ gst_buffer_unref (buffer);
+ goto out;
+ }
+ gst_caps_unref (current_caps);
+ }
+
+ GST_OBJECT_LOCK (rtp_mux);
+ padpriv = gst_pad_get_element_private (pad);
+
+ if (!padpriv) {
+ GST_OBJECT_UNLOCK (rtp_mux);
+ gst_buffer_unref (buffer);
+ return GST_FLOW_NOT_LINKED;
+ }
+
+ buffer = gst_buffer_make_writable (buffer);
+
+ if (!gst_rtp_buffer_map (buffer, GST_MAP_READWRITE, &rtpbuffer)) {
+ GST_OBJECT_UNLOCK (rtp_mux);
+ gst_buffer_unref (buffer);
+ GST_ERROR_OBJECT (rtp_mux, "Invalid RTP buffer");
+ return GST_FLOW_ERROR;
+ }
+
+ drop = !process_buffer_locked (rtp_mux, padpriv, &rtpbuffer);
+
+ gst_rtp_buffer_unmap (&rtpbuffer);
+
+ if (!drop) {
+ if (pad != rtp_mux->last_pad) {
+ changed = TRUE;
+ g_clear_object (&rtp_mux->last_pad);
+ rtp_mux->last_pad = g_object_ref (pad);
+ }
+
+ if (GST_BUFFER_DURATION_IS_VALID (buffer) &&
+ GST_BUFFER_PTS_IS_VALID (buffer))
+ rtp_mux->last_stop = GST_BUFFER_PTS (buffer) +
+ GST_BUFFER_DURATION (buffer);
+ else
+ rtp_mux->last_stop = GST_CLOCK_TIME_NONE;
+ }
+
+ GST_OBJECT_UNLOCK (rtp_mux);
+
+ if (changed)
+ gst_pad_sticky_events_foreach (pad, resend_events, rtp_mux);
+
+ if (drop) {
+ gst_buffer_unref (buffer);
+ ret = GST_FLOW_OK;
+ } else {
+ ret = gst_pad_push (rtp_mux->srcpad, buffer);
+ }
+
+out:
+ return ret;
+}
+
+static gboolean
+gst_rtp_mux_setcaps (GstPad * pad, GstRTPMux * rtp_mux, GstCaps * caps)
+{
+ GstStructure *structure;
+ gboolean ret = FALSE;
+ GstRTPMuxPadPrivate *padpriv;
+ GstCaps *peercaps;
+
+ if (caps == NULL)
+ return FALSE;
+
+ if (!gst_caps_is_fixed (caps))
+ return FALSE;
+
+ peercaps = gst_pad_peer_query_caps (rtp_mux->srcpad, NULL);
+ if (peercaps) {
+ GstCaps *tcaps, *othercaps;;
+ tcaps = gst_pad_get_pad_template_caps (pad);
+ othercaps = gst_caps_intersect_full (peercaps, tcaps,
+ GST_CAPS_INTERSECT_FIRST);
+
+ if (gst_caps_get_size (othercaps) > 0) {
+ structure = gst_caps_get_structure (othercaps, 0);
+ GST_OBJECT_LOCK (rtp_mux);
+ if (gst_structure_get_uint (structure, "ssrc", &rtp_mux->current_ssrc)) {
+ GST_INFO_OBJECT (pad, "Use downstream ssrc: %x", rtp_mux->current_ssrc);
+ rtp_mux->have_ssrc = TRUE;
+ }
+ if (gst_structure_get_uint (structure,
+ "timestamp-offset", &rtp_mux->ts_base)) {
+ GST_INFO_OBJECT (pad, "Use downstream timestamp-offset: %u",
+ rtp_mux->ts_base);
+ }
+ GST_OBJECT_UNLOCK (rtp_mux);
+ }
+
+ gst_caps_unref (othercaps);
+
+ gst_caps_unref (peercaps);
+ gst_caps_unref (tcaps);
+ }
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (!structure)
+ return FALSE;
+
+ GST_OBJECT_LOCK (rtp_mux);
+ padpriv = gst_pad_get_element_private (pad);
+ if (padpriv &&
+ gst_structure_get_uint (structure, "timestamp-offset",
+ &padpriv->timestamp_offset)) {
+ padpriv->have_timestamp_offset = TRUE;
+ }
+
+ caps = gst_caps_copy (caps);
+
+ /* if we don't have a specified ssrc, first try to take one from the caps,
+ and if that fails, generate one */
+ if (rtp_mux->ssrc == DEFAULT_SSRC) {
+ if (rtp_mux->current_ssrc == DEFAULT_SSRC) {
+ if (!gst_structure_get_uint (structure, "ssrc", &rtp_mux->current_ssrc)) {
+ rtp_mux->current_ssrc = g_random_int ();
+ GST_INFO_OBJECT (rtp_mux, "Set random ssrc %x", rtp_mux->current_ssrc);
+ }
+ }
+ } else {
+ rtp_mux->current_ssrc = rtp_mux->ssrc;
+ GST_INFO_OBJECT (rtp_mux, "Set ssrc %x", rtp_mux->current_ssrc);
+ }
+
+ gst_caps_set_simple (caps,
+ "timestamp-offset", G_TYPE_UINT, rtp_mux->ts_base,
+ "seqnum-offset", G_TYPE_UINT, rtp_mux->seqnum_base,
+ "ssrc", G_TYPE_UINT, rtp_mux->current_ssrc, NULL);
+
+ GST_OBJECT_UNLOCK (rtp_mux);
+
+ if (rtp_mux->send_stream_start) {
+ gchar s_id[32];
+
+ /* stream-start (FIXME: create id based on input ids) */
+ g_snprintf (s_id, sizeof (s_id), "interleave-%08x", g_random_int ());
+ gst_pad_push_event (rtp_mux->srcpad, gst_event_new_stream_start (s_id));
+
+ rtp_mux->send_stream_start = FALSE;
+ }
+
+ GST_DEBUG_OBJECT (rtp_mux,
+ "setting caps %" GST_PTR_FORMAT " on src pad..", caps);
+ ret = gst_pad_set_caps (rtp_mux->srcpad, caps);
+
+
+ gst_caps_unref (caps);
+
+ return ret;
+}
+
+static void
+clear_caps (GstCaps * caps, gboolean only_clock_rate)
+{
+ gint i, j;
+
+ /* Lets only match on the clock-rate */
+ for (i = 0; i < gst_caps_get_size (caps); i++) {
+ GstStructure *s = gst_caps_get_structure (caps, i);
+
+ for (j = 0; j < gst_structure_n_fields (s); j++) {
+ const gchar *name = gst_structure_nth_field_name (s, j);
+
+ if (strcmp (name, "clock-rate") && (only_clock_rate ||
+ (strcmp (name, "ssrc")))) {
+ gst_structure_remove_field (s, name);
+ j--;
+ }
+ }
+ }
+}
+
+static gboolean
+same_clock_rate_fold (const GValue * item, GValue * ret, gpointer user_data)
+{
+ GstPad *mypad = user_data;
+ GstPad *pad = g_value_get_object (item);
+ GstCaps *peercaps;
+ GstCaps *accumcaps;
+
+ if (pad == mypad)
+ return TRUE;
+
+ accumcaps = g_value_get_boxed (ret);
+ peercaps = gst_pad_peer_query_caps (pad, accumcaps);
+ if (!peercaps) {
+ g_warning ("no peercaps");
+ return TRUE;
+ }
+ peercaps = gst_caps_make_writable (peercaps);
+ clear_caps (peercaps, TRUE);
+
+ g_value_take_boxed (ret, peercaps);
+
+ return !gst_caps_is_empty (peercaps);
+}
+
+static GstCaps *
+gst_rtp_mux_getcaps (GstPad * pad, GstRTPMux * mux, GstCaps * filter)
+{
+ GstCaps *caps = NULL;
+ GstIterator *iter = NULL;
+ GValue v = { 0 };
+ GstIteratorResult res;
+ GstCaps *peercaps;
+ GstCaps *othercaps;
+ GstCaps *tcaps;
+ const GstStructure *structure;
+
+ peercaps = gst_pad_peer_query_caps (mux->srcpad, NULL);
+
+ if (peercaps) {
+ tcaps = gst_pad_get_pad_template_caps (pad);
+ othercaps = gst_caps_intersect_full (peercaps, tcaps,
+ GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (peercaps);
+ } else {
+ tcaps = gst_pad_get_pad_template_caps (mux->srcpad);
+ if (filter)
+ othercaps = gst_caps_intersect_full (filter, tcaps,
+ GST_CAPS_INTERSECT_FIRST);
+ else
+ othercaps = gst_caps_copy (tcaps);
+ }
+ gst_caps_unref (tcaps);
+
+ GST_LOG_OBJECT (pad, "Intersected srcpad-peercaps and template caps: %"
+ GST_PTR_FORMAT, othercaps);
+
+ structure = gst_caps_get_structure (othercaps, 0);
+ if (mux->ssrc == DEFAULT_SSRC) {
+ if (gst_structure_get_uint (structure, "ssrc", &mux->current_ssrc))
+ GST_DEBUG_OBJECT (pad, "Use downstream ssrc: %x", mux->current_ssrc);
+ }
+
+ clear_caps (othercaps, TRUE);
+
+ g_value_init (&v, GST_TYPE_CAPS);
+
+ iter = gst_element_iterate_sink_pads (GST_ELEMENT (mux));
+ do {
+ gst_value_set_caps (&v, othercaps);
+ res = gst_iterator_fold (iter, same_clock_rate_fold, &v, pad);
+ gst_iterator_resync (iter);
+ } while (res == GST_ITERATOR_RESYNC);
+ gst_iterator_free (iter);
+
+ caps = gst_caps_intersect ((GstCaps *) gst_value_get_caps (&v), othercaps);
+
+ g_value_unset (&v);
+ gst_caps_unref (othercaps);
+
+ if (res == GST_ITERATOR_ERROR) {
+ gst_caps_unref (caps);
+ caps = gst_caps_new_empty ();
+ }
+
+
+ return caps;
+}
+
+static gboolean
+gst_rtp_mux_sink_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ GstRTPMux *mux = GST_RTP_MUX (parent);
+ gboolean res = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ GST_LOG_OBJECT (pad, "Received caps-query with filter-caps: %"
+ GST_PTR_FORMAT, filter);
+ caps = gst_rtp_mux_getcaps (pad, mux, filter);
+ gst_query_set_caps_result (query, caps);
+ GST_LOG_OBJECT (mux, "Answering caps-query with caps: %"
+ GST_PTR_FORMAT, caps);
+ gst_caps_unref (caps);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return res;
+}
+
+static void
+gst_rtp_mux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstRTPMux *rtp_mux;
+
+ rtp_mux = GST_RTP_MUX (object);
+
+ GST_OBJECT_LOCK (rtp_mux);
+ switch (prop_id) {
+ case PROP_TIMESTAMP_OFFSET:
+ g_value_set_int (value, rtp_mux->ts_offset);
+ break;
+ case PROP_SEQNUM_OFFSET:
+ g_value_set_int (value, rtp_mux->seqnum_offset);
+ break;
+ case PROP_SEQNUM:
+ g_value_set_uint (value, rtp_mux->seqnum);
+ break;
+ case PROP_SSRC:
+ g_value_set_uint (value, rtp_mux->ssrc);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (rtp_mux);
+}
+
+static void
+gst_rtp_mux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstRTPMux *rtp_mux;
+
+ rtp_mux = GST_RTP_MUX (object);
+
+ switch (prop_id) {
+ case PROP_TIMESTAMP_OFFSET:
+ rtp_mux->ts_offset = g_value_get_int (value);
+ break;
+ case PROP_SEQNUM_OFFSET:
+ rtp_mux->seqnum_offset = g_value_get_int (value);
+ break;
+ case PROP_SSRC:
+ GST_OBJECT_LOCK (rtp_mux);
+ rtp_mux->ssrc = g_value_get_uint (value);
+ rtp_mux->current_ssrc = rtp_mux->ssrc;
+ rtp_mux->have_ssrc = TRUE;
+ GST_DEBUG_OBJECT (rtp_mux, "ssrc prop set to %x", rtp_mux->ssrc);
+ GST_OBJECT_UNLOCK (rtp_mux);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+gst_rtp_mux_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstRTPMux *mux = GST_RTP_MUX (parent);
+ gboolean is_pad;
+ gboolean ret;
+
+ GST_OBJECT_LOCK (mux);
+ is_pad = (pad == mux->last_pad);
+ GST_OBJECT_UNLOCK (mux);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ GST_LOG_OBJECT (pad, "Received caps-event with caps: %"
+ GST_PTR_FORMAT, caps);
+ ret = gst_rtp_mux_setcaps (pad, mux, caps);
+ gst_event_unref (event);
+ return ret;
+ }
+ case GST_EVENT_FLUSH_STOP:
+ {
+ GST_OBJECT_LOCK (mux);
+ mux->last_stop = GST_CLOCK_TIME_NONE;
+ GST_OBJECT_UNLOCK (mux);
+ break;
+ }
+ case GST_EVENT_SEGMENT:
+ {
+ GstRTPMuxPadPrivate *padpriv;
+
+ GST_OBJECT_LOCK (mux);
+ padpriv = gst_pad_get_element_private (pad);
+
+ if (padpriv) {
+ gst_event_copy_segment (event, &padpriv->segment);
+ }
+ GST_OBJECT_UNLOCK (mux);
+
+ if (is_pad) {
+ GstSegment new_segment;
+ gst_segment_init (&new_segment, GST_FORMAT_TIME);
+ gst_event_unref (event);
+ event = gst_event_new_segment (&new_segment);
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (is_pad) {
+ return gst_pad_push_event (mux->srcpad, event);
+ } else {
+ gst_event_unref (event);
+ return TRUE;
+ }
+}
+
+static void
+gst_rtp_mux_ready_to_paused (GstRTPMux * rtp_mux)
+{
+
+ GST_OBJECT_LOCK (rtp_mux);
+
+ g_clear_object (&rtp_mux->last_pad);
+ rtp_mux->send_stream_start = TRUE;
+
+ if (rtp_mux->seqnum_offset == -1)
+ rtp_mux->seqnum_base = g_random_int_range (0, G_MAXUINT16);
+ else
+ rtp_mux->seqnum_base = rtp_mux->seqnum_offset;
+ rtp_mux->seqnum = rtp_mux->seqnum_base;
+
+ if (rtp_mux->ts_offset == -1)
+ rtp_mux->ts_base = g_random_int ();
+ else
+ rtp_mux->ts_base = rtp_mux->ts_offset;
+
+ rtp_mux->last_stop = GST_CLOCK_TIME_NONE;
+
+ if (rtp_mux->have_ssrc)
+ rtp_mux->current_ssrc = rtp_mux->ssrc;
+
+ GST_DEBUG_OBJECT (rtp_mux, "set timestamp-offset to %u", rtp_mux->ts_base);
+
+ GST_OBJECT_UNLOCK (rtp_mux);
+}
+
+static GstStateChangeReturn
+gst_rtp_mux_change_state (GstElement * element, GstStateChange transition)
+{
+ GstRTPMux *rtp_mux;
+ GstStateChangeReturn ret;
+
+ rtp_mux = GST_RTP_MUX (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_rtp_mux_ready_to_paused (rtp_mux);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (gst_rtp_mux_parent_class)->change_state (element,
+ transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ g_clear_object (&rtp_mux->last_pad);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
diff --git a/gst/rtpmanager/gstrtpmux.h b/gst/rtpmanager/gstrtpmux.h
new file mode 100644
index 0000000000..82d00c7d32
--- /dev/null
+++ b/gst/rtpmanager/gstrtpmux.h
@@ -0,0 +1,96 @@
+/* RTP muxer element for GStreamer
+ *
+ * gstrtpmux.h:
+ *
+ * Copyright (C) <2007> Nokia Corporation.
+ * Contact: Zeeshan Ali <zeeshan.ali@nokia.com>
+ * Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2000,2005 Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_MUX_H__
+#define __GST_RTP_MUX_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_RTP_MUX (gst_rtp_mux_get_type())
+#define GST_RTP_MUX(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_MUX, GstRTPMux))
+#define GST_RTP_MUX_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_MUX, GstRTPMuxClass))
+#define GST_RTP_MUX_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_RTP_MUX, GstRTPMuxClass))
+#define GST_IS_RTP_MUX(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_MUX))
+#define GST_IS_RTP_MUX_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_MUX))
+typedef struct _GstRTPMux GstRTPMux;
+typedef struct _GstRTPMuxClass GstRTPMuxClass;
+
+
+typedef struct
+{
+ gboolean have_timestamp_offset;
+ guint timestamp_offset;
+
+ GstSegment segment;
+
+ gboolean priority;
+} GstRTPMuxPadPrivate;
+
+
+/* GstRTPMux:
+ *
+ * The opaque #GstRTPMux structure.
+ */
+struct _GstRTPMux
+{
+ GstElement element;
+
+ /* pad */
+ GstPad *srcpad;
+
+ guint32 ts_base;
+ guint16 seqnum_base;
+
+ gint32 ts_offset;
+ gint16 seqnum_offset;
+ guint16 seqnum; /* protected by object lock */
+ guint ssrc;
+ guint current_ssrc;
+ gboolean have_ssrc;
+
+ GstPad *last_pad; /* protected by object lock */
+
+ GstClockTime last_stop;
+ gboolean send_stream_start;
+};
+
+struct _GstRTPMuxClass
+{
+ GstElementClass parent_class;
+
+ gboolean (*accept_buffer_locked) (GstRTPMux *rtp_mux,
+ GstRTPMuxPadPrivate * padpriv, GstRTPBuffer * buffer);
+
+ gboolean (*src_event) (GstRTPMux *rtp_mux, GstEvent *event);
+};
+
+GType gst_rtp_mux_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (rtpmux);
+
+G_END_DECLS
+#endif /* __GST_RTP_MUX_H__ */
diff --git a/gst/rtpmanager/gstrtpptdemux.c b/gst/rtpmanager/gstrtpptdemux.c
new file mode 100644
index 0000000000..e588a3fa21
--- /dev/null
+++ b/gst/rtpmanager/gstrtpptdemux.c
@@ -0,0 +1,757 @@
+/*
+ * RTP Demux element
+ *
+ * Copyright (C) 2005 Nokia Corporation.
+ * @author Kai Vehmanen <kai.vehmanen@nokia.com>
+ *
+ * Loosely based on GStreamer gstdecodebin
+ * Copyright (C) <2004> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpptdemux
+ * @title: rtpptdemux
+ *
+ * rtpptdemux acts as a demuxer for RTP packets based on the payload type of
+ * the packets. Its main purpose is to allow an application to easily receive
+ * and decode an RTP stream with multiple payload types.
+ *
+ * For each payload type that is detected, a new pad will be created and the
+ * #GstRtpPtDemux::new-payload-type signal will be emitted. When the payload for
+ * the RTP stream changes, the #GstRtpPtDemux::payload-type-change signal will be
+ * emitted.
+ *
+ * The element will try to set complete and unique application/x-rtp caps
+ * on the output pads based on the result of the #GstRtpPtDemux::request-pt-map
+ * signal.
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 udpsrc caps="application/x-rtp" ! rtpptdemux ! fakesink
+ * ]| Takes an RTP stream and send the RTP packets with the first detected
+ * payload type to fakesink, discarding the other payload types.
+ *
+ */
+
+/*
+ * Contributors:
+ * Andre Moreira Magalhaes <andre.magalhaes@indt.org.br>
+ */
+/*
+ * Status:
+ * - works with the test_rtpdemux.c tool
+ *
+ * Check:
+ * - is emitting a signal enough, or should we
+ * use GstEvent to notify downstream elements
+ * of the new packet... no?
+ *
+ * Notes:
+ * - emits event both for new PTs, and whenever
+ * a PT is changed
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "gstrtpptdemux.h"
+
+/* generic templates */
+static GstStaticPadTemplate rtp_pt_demux_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+static GstStaticPadTemplate rtp_pt_demux_src_template =
+GST_STATIC_PAD_TEMPLATE ("src_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("application/x-rtp, " "payload = (int) [ 0, 255 ]")
+ );
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_pt_demux_debug);
+#define GST_CAT_DEFAULT gst_rtp_pt_demux_debug
+
+/*
+ * Item for storing GstPad<->pt pairs.
+ */
+struct _GstRtpPtDemuxPad
+{
+ GstPad *pad; /*< pointer to the actual pad */
+ gint pt; /*< RTP payload-type attached to pad */
+ gboolean newcaps;
+};
+
+/* signals */
+enum
+{
+ SIGNAL_REQUEST_PT_MAP,
+ SIGNAL_NEW_PAYLOAD_TYPE,
+ SIGNAL_PAYLOAD_TYPE_CHANGE,
+ SIGNAL_CLEAR_PT_MAP,
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_IGNORED_PTS,
+};
+
+#define gst_rtp_pt_demux_parent_class parent_class
+G_DEFINE_TYPE (GstRtpPtDemux, gst_rtp_pt_demux, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE (rtpptdemux, "rtpptdemux", GST_RANK_NONE,
+ GST_TYPE_RTP_PT_DEMUX);
+
+static void gst_rtp_pt_demux_finalize (GObject * object);
+
+static void gst_rtp_pt_demux_release (GstRtpPtDemux * ptdemux);
+static gboolean gst_rtp_pt_demux_setup (GstRtpPtDemux * ptdemux);
+
+static gboolean gst_rtp_pt_demux_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstFlowReturn gst_rtp_pt_demux_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+static GstStateChangeReturn gst_rtp_pt_demux_change_state (GstElement * element,
+ GstStateChange transition);
+static void gst_rtp_pt_demux_clear_pt_map (GstRtpPtDemux * rtpdemux);
+
+static GstPad *find_pad_for_pt (GstRtpPtDemux * rtpdemux, guint8 pt);
+
+static gboolean gst_rtp_pt_demux_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+
+
+static guint gst_rtp_pt_demux_signals[LAST_SIGNAL] = { 0 };
+
+static void
+gst_rtp_pt_demux_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpPtDemux *rtpptdemux = GST_RTP_PT_DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_IGNORED_PTS:
+ g_value_copy (value, &rtpptdemux->ignored_pts);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_pt_demux_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpPtDemux *rtpptdemux = GST_RTP_PT_DEMUX (object);
+
+ switch (prop_id) {
+ case PROP_IGNORED_PTS:
+ g_value_copy (&rtpptdemux->ignored_pts, value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_pt_demux_class_init (GstRtpPtDemuxClass * klass)
+{
+ GObjectClass *gobject_klass;
+ GstElementClass *gstelement_klass;
+
+ gobject_klass = (GObjectClass *) klass;
+ gstelement_klass = (GstElementClass *) klass;
+
+ /**
+ * GstRtpPtDemux::request-pt-map:
+ * @demux: the object which received the signal
+ * @pt: the payload type
+ *
+ * Request the payload type as #GstCaps for @pt.
+ */
+ gst_rtp_pt_demux_signals[SIGNAL_REQUEST_PT_MAP] =
+ g_signal_new ("request-pt-map", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpPtDemuxClass, request_pt_map),
+ NULL, NULL, NULL, GST_TYPE_CAPS, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpPtDemux::new-payload-type:
+ * @demux: the object which received the signal
+ * @pt: the payload type
+ * @pad: the pad with the new payload
+ *
+ * Emitted when a new payload type pad has been created in @demux.
+ */
+ gst_rtp_pt_demux_signals[SIGNAL_NEW_PAYLOAD_TYPE] =
+ g_signal_new ("new-payload-type", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpPtDemuxClass, new_payload_type),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, GST_TYPE_PAD);
+
+ /**
+ * GstRtpPtDemux::payload-type-change:
+ * @demux: the object which received the signal
+ * @pt: the new payload type
+ *
+ * Emitted when the payload type changed.
+ */
+ gst_rtp_pt_demux_signals[SIGNAL_PAYLOAD_TYPE_CHANGE] =
+ g_signal_new ("payload-type-change", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpPtDemuxClass,
+ payload_type_change), NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpPtDemux::clear-pt-map:
+ * @demux: the object which received the signal
+ *
+ * The application can call this signal to instruct the element to discard the
+ * currently cached payload type map.
+ */
+ gst_rtp_pt_demux_signals[SIGNAL_CLEAR_PT_MAP] =
+ g_signal_new ("clear-pt-map", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_ACTION | G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpPtDemuxClass,
+ clear_pt_map), NULL, NULL, NULL, G_TYPE_NONE, 0, G_TYPE_NONE);
+
+ gobject_klass->set_property = gst_rtp_pt_demux_set_property;
+ gobject_klass->get_property = gst_rtp_pt_demux_get_property;
+
+ /**
+ * GstRtpPtDemux:ignored-payload-types:
+ *
+ * If specified, packets with an ignored payload type will be dropped,
+ * instead of causing a new pad to be exposed for these to be pushed on.
+ *
+ * This is for example useful to drop FEC protection packets, as they
+ * need to go through the #GstRtpJitterBuffer, but cease to be useful
+ * past that point, #GstRtpBin will make use of this property for that
+ * purpose.
+ *
+ * Since: 1.14
+ */
+ g_object_class_install_property (gobject_klass, PROP_IGNORED_PTS,
+ gst_param_spec_array ("ignored-payload-types",
+ "Ignored payload types",
+ "Packets with these payload types will be dropped",
+ g_param_spec_int ("payload-types", "payload-types", "Payload types",
+ 0, G_MAXINT, 0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS),
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gobject_klass->finalize = gst_rtp_pt_demux_finalize;
+
+ gstelement_klass->change_state =
+ GST_DEBUG_FUNCPTR (gst_rtp_pt_demux_change_state);
+
+ klass->clear_pt_map = GST_DEBUG_FUNCPTR (gst_rtp_pt_demux_clear_pt_map);
+
+ gst_element_class_add_static_pad_template (gstelement_klass,
+ &rtp_pt_demux_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_klass,
+ &rtp_pt_demux_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_klass, "RTP Demux",
+ "Demux/Network/RTP",
+ "Parses codec streams transmitted in the same RTP session",
+ "Kai Vehmanen <kai.vehmanen@nokia.com>");
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_pt_demux_debug,
+ "rtpptdemux", 0, "RTP codec demuxer");
+
+ GST_DEBUG_REGISTER_FUNCPTR (gst_rtp_pt_demux_chain);
+}
+
+static void
+gst_rtp_pt_demux_init (GstRtpPtDemux * ptdemux)
+{
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (ptdemux);
+
+ ptdemux->sink =
+ gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
+ "sink"), "sink");
+ g_assert (ptdemux->sink != NULL);
+
+ gst_pad_set_chain_function (ptdemux->sink, gst_rtp_pt_demux_chain);
+ gst_pad_set_event_function (ptdemux->sink, gst_rtp_pt_demux_sink_event);
+
+ gst_element_add_pad (GST_ELEMENT (ptdemux), ptdemux->sink);
+
+ g_value_init (&ptdemux->ignored_pts, GST_TYPE_ARRAY);
+}
+
+static void
+gst_rtp_pt_demux_finalize (GObject * object)
+{
+ gst_rtp_pt_demux_release (GST_RTP_PT_DEMUX (object));
+
+ g_value_unset (&GST_RTP_PT_DEMUX (object)->ignored_pts);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static GstCaps *
+gst_rtp_pt_demux_get_caps (GstRtpPtDemux * rtpdemux, guint pt)
+{
+ guint32 ssrc = 0;
+ gboolean have_ssrc = FALSE;
+ GstCaps *caps, *sink_caps;
+ GValue ret = { 0 };
+ GValue args[2] = { {0}, {0} };
+
+ /* figure out the caps */
+ g_value_init (&args[0], GST_TYPE_ELEMENT);
+ g_value_set_object (&args[0], rtpdemux);
+ g_value_init (&args[1], G_TYPE_UINT);
+ g_value_set_uint (&args[1], pt);
+
+ g_value_init (&ret, GST_TYPE_CAPS);
+ g_value_set_boxed (&ret, NULL);
+
+ g_signal_emitv (args, gst_rtp_pt_demux_signals[SIGNAL_REQUEST_PT_MAP], 0,
+ &ret);
+
+ g_value_unset (&args[0]);
+ g_value_unset (&args[1]);
+ caps = g_value_dup_boxed (&ret);
+ sink_caps = gst_pad_get_current_caps (rtpdemux->sink);
+ g_value_unset (&ret);
+
+ if (caps == NULL) {
+ caps = sink_caps;
+ } else if (sink_caps) {
+ have_ssrc =
+ gst_structure_get_uint (gst_caps_get_structure (sink_caps, 0), "ssrc",
+ &ssrc);
+ gst_caps_unref (sink_caps);
+ }
+
+ if (caps != NULL) {
+ caps = gst_caps_make_writable (caps);
+ gst_caps_set_simple (caps, "payload", G_TYPE_INT, pt, NULL);
+ if (have_ssrc)
+ gst_caps_set_simple (caps, "ssrc", G_TYPE_UINT, ssrc, NULL);
+ }
+
+ GST_DEBUG_OBJECT (rtpdemux, "pt %d, got caps %" GST_PTR_FORMAT, pt, caps);
+
+ return caps;
+}
+
+static void
+gst_rtp_pt_demux_clear_pt_map (GstRtpPtDemux * rtpdemux)
+{
+ GSList *walk;
+
+ GST_OBJECT_LOCK (rtpdemux);
+ GST_DEBUG_OBJECT (rtpdemux, "clearing pt map");
+ for (walk = rtpdemux->srcpads; walk; walk = g_slist_next (walk)) {
+ GstRtpPtDemuxPad *pad = walk->data;
+
+ pad->newcaps = TRUE;
+ }
+ GST_OBJECT_UNLOCK (rtpdemux);
+}
+
+static gboolean
+need_caps_for_pt (GstRtpPtDemux * rtpdemux, guint8 pt)
+{
+ GSList *walk;
+ gboolean ret = FALSE;
+
+ GST_OBJECT_LOCK (rtpdemux);
+ for (walk = rtpdemux->srcpads; walk; walk = g_slist_next (walk)) {
+ GstRtpPtDemuxPad *pad = walk->data;
+
+ if (pad->pt == pt) {
+ ret = pad->newcaps;
+ }
+ }
+ GST_OBJECT_UNLOCK (rtpdemux);
+
+ return ret;
+}
+
+
+static void
+clear_newcaps_for_pt (GstRtpPtDemux * rtpdemux, guint8 pt)
+{
+ GSList *walk;
+
+ GST_OBJECT_LOCK (rtpdemux);
+ for (walk = rtpdemux->srcpads; walk; walk = g_slist_next (walk)) {
+ GstRtpPtDemuxPad *pad = walk->data;
+
+ if (pad->pt == pt) {
+ pad->newcaps = FALSE;
+ break;
+ }
+ }
+ GST_OBJECT_UNLOCK (rtpdemux);
+}
+
+static gboolean
+forward_sticky_events (GstPad * pad, GstEvent ** event, gpointer user_data)
+{
+ GstPad *srcpad = GST_PAD_CAST (user_data);
+
+ /* Stream start and caps have already been pushed */
+ if (GST_EVENT_TYPE (*event) >= GST_EVENT_SEGMENT)
+ gst_pad_push_event (srcpad, gst_event_ref (*event));
+
+ return TRUE;
+}
+
+static gboolean
+gst_rtp_pt_demux_pt_is_ignored (GstRtpPtDemux * ptdemux, guint8 pt)
+{
+ gboolean ret = FALSE;
+ guint i;
+
+ for (i = 0; i < gst_value_array_get_size (&ptdemux->ignored_pts); i++) {
+ const GValue *tmp = gst_value_array_get_value (&ptdemux->ignored_pts, i);
+
+ if (g_value_get_int (tmp) == pt) {
+ ret = TRUE;
+ break;
+ }
+ }
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_rtp_pt_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstRtpPtDemux *rtpdemux;
+ guint8 pt;
+ GstPad *srcpad;
+ GstCaps *caps;
+ GstRTPBuffer rtp = { NULL };
+
+ rtpdemux = GST_RTP_PT_DEMUX (parent);
+
+ if (!gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp))
+ goto invalid_buffer;
+
+ pt = gst_rtp_buffer_get_payload_type (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
+
+ if (gst_rtp_pt_demux_pt_is_ignored (rtpdemux, pt))
+ goto ignored;
+
+ GST_DEBUG_OBJECT (rtpdemux, "received buffer for pt %d", pt);
+
+ srcpad = find_pad_for_pt (rtpdemux, pt);
+ if (srcpad == NULL) {
+ /* new PT, create a src pad */
+ GstRtpPtDemuxPad *rtpdemuxpad;
+ GstElementClass *klass;
+ GstPadTemplate *templ;
+ gchar *padname;
+
+ caps = gst_rtp_pt_demux_get_caps (rtpdemux, pt);
+ if (!caps)
+ goto no_caps;
+
+ if (gst_rtp_pt_demux_pt_is_ignored (rtpdemux, pt))
+ goto ignored;
+
+ klass = GST_ELEMENT_GET_CLASS (rtpdemux);
+ templ = gst_element_class_get_pad_template (klass, "src_%u");
+ padname = g_strdup_printf ("src_%u", pt);
+ srcpad = gst_pad_new_from_template (templ, padname);
+ gst_pad_use_fixed_caps (srcpad);
+ g_free (padname);
+ gst_pad_set_event_function (srcpad, gst_rtp_pt_demux_src_event);
+
+ GST_DEBUG_OBJECT (rtpdemux, "Adding pt=%d to the list.", pt);
+ rtpdemuxpad = g_slice_new0 (GstRtpPtDemuxPad);
+ rtpdemuxpad->pt = pt;
+ rtpdemuxpad->newcaps = FALSE;
+ rtpdemuxpad->pad = srcpad;
+ gst_object_ref (srcpad);
+ GST_OBJECT_LOCK (rtpdemux);
+ rtpdemux->srcpads = g_slist_append (rtpdemux->srcpads, rtpdemuxpad);
+ GST_OBJECT_UNLOCK (rtpdemux);
+
+ gst_pad_set_active (srcpad, TRUE);
+
+ /* First push the stream-start event, it must always come first */
+ gst_pad_push_event (srcpad,
+ gst_pad_get_sticky_event (rtpdemux->sink, GST_EVENT_STREAM_START, 0));
+
+ /* Then caps event is sent */
+ gst_pad_set_caps (srcpad, caps);
+ gst_caps_unref (caps);
+
+ /* First sticky events on sink pad are forwarded to the new src pad */
+ gst_pad_sticky_events_foreach (rtpdemux->sink, forward_sticky_events,
+ srcpad);
+
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpdemux), srcpad);
+
+ GST_DEBUG_OBJECT (rtpdemux, "emitting new-payload-type for pt %d", pt);
+ g_signal_emit (G_OBJECT (rtpdemux),
+ gst_rtp_pt_demux_signals[SIGNAL_NEW_PAYLOAD_TYPE], 0, pt, srcpad);
+ }
+
+ if (pt != rtpdemux->last_pt) {
+ gint emit_pt = pt;
+
+ /* our own signal with an extra flag that this is the only pad */
+ rtpdemux->last_pt = pt;
+ GST_DEBUG_OBJECT (rtpdemux, "emitting payload-type-changed for pt %d",
+ emit_pt);
+ g_signal_emit (G_OBJECT (rtpdemux),
+ gst_rtp_pt_demux_signals[SIGNAL_PAYLOAD_TYPE_CHANGE], 0, emit_pt);
+ }
+
+ while (need_caps_for_pt (rtpdemux, pt)) {
+ GST_DEBUG_OBJECT (rtpdemux, "need new caps for %d", pt);
+ caps = gst_rtp_pt_demux_get_caps (rtpdemux, pt);
+ if (!caps)
+ goto no_caps;
+
+ clear_newcaps_for_pt (rtpdemux, pt);
+
+ gst_pad_set_caps (srcpad, caps);
+ gst_caps_unref (caps);
+ }
+
+ /* push to srcpad */
+ ret = gst_pad_push (srcpad, buf);
+
+ gst_object_unref (srcpad);
+
+ return ret;
+
+ignored:
+ {
+ GST_DEBUG_OBJECT (rtpdemux, "Dropped buffer for pt %d", pt);
+ gst_buffer_unref (buf);
+ return GST_FLOW_OK;
+ }
+
+ /* ERRORS */
+invalid_buffer:
+ {
+ /* this should not be fatal */
+ GST_ELEMENT_WARNING (rtpdemux, STREAM, DEMUX, (NULL),
+ ("Dropping invalid RTP payload"));
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+no_caps:
+ {
+ GST_ELEMENT_ERROR (rtpdemux, STREAM, DECODE, (NULL),
+ ("Could not get caps for payload"));
+ gst_buffer_unref (buf);
+ if (srcpad)
+ gst_object_unref (srcpad);
+ return GST_FLOW_ERROR;
+ }
+}
+
+static GstPad *
+find_pad_for_pt (GstRtpPtDemux * rtpdemux, guint8 pt)
+{
+ GstPad *respad = NULL;
+ GSList *walk;
+
+ GST_OBJECT_LOCK (rtpdemux);
+ for (walk = rtpdemux->srcpads; walk; walk = g_slist_next (walk)) {
+ GstRtpPtDemuxPad *pad = walk->data;
+
+ if (pad->pt == pt) {
+ respad = gst_object_ref (pad->pad);
+ break;
+ }
+ }
+ GST_OBJECT_UNLOCK (rtpdemux);
+
+ return respad;
+}
+
+static gboolean
+gst_rtp_pt_demux_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstRtpPtDemux *rtpdemux;
+ gboolean res = FALSE;
+
+ rtpdemux = GST_RTP_PT_DEMUX (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ gst_rtp_pt_demux_clear_pt_map (rtpdemux);
+ /* don't forward the event, we cleared the ptmap and on the next buffer we
+ * will add the pt to the caps and push a new caps event */
+ gst_event_unref (event);
+ res = TRUE;
+ break;
+ }
+ case GST_EVENT_CUSTOM_DOWNSTREAM:
+ {
+ const GstStructure *s;
+
+ s = gst_event_get_structure (event);
+
+ if (gst_structure_has_name (s, "GstRTPPacketLost")) {
+ GstPad *srcpad = find_pad_for_pt (rtpdemux, rtpdemux->last_pt);
+
+ if (srcpad) {
+ res = gst_pad_push_event (srcpad, event);
+ gst_object_unref (srcpad);
+ } else {
+ gst_event_unref (event);
+ }
+
+ } else {
+ res = gst_pad_event_default (pad, parent, event);
+ }
+ break;
+ }
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return res;
+}
+
+
+static gboolean
+gst_rtp_pt_demux_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstRtpPtDemux *demux;
+ const GstStructure *s;
+
+ demux = GST_RTP_PT_DEMUX (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_UPSTREAM:
+ case GST_EVENT_CUSTOM_BOTH:
+ case GST_EVENT_CUSTOM_BOTH_OOB:
+ s = gst_event_get_structure (event);
+ if (s && !gst_structure_has_field (s, "payload")) {
+ GSList *walk;
+
+ GST_OBJECT_LOCK (demux);
+ for (walk = demux->srcpads; walk; walk = g_slist_next (walk)) {
+ GstRtpPtDemuxPad *dpad = (GstRtpPtDemuxPad *) walk->data;
+
+ if (dpad->pad == pad) {
+ GstStructure *ws;
+
+ event =
+ GST_EVENT_CAST (gst_mini_object_make_writable
+ (GST_MINI_OBJECT_CAST (event)));
+ ws = gst_event_writable_structure (event);
+ gst_structure_set (ws, "payload", G_TYPE_UINT, dpad->pt, NULL);
+ break;
+ }
+ }
+ GST_OBJECT_UNLOCK (demux);
+ }
+ break;
+ default:
+ break;
+ }
+
+ return gst_pad_event_default (pad, parent, event);
+}
+
+/*
+ * Reserves resources for the object.
+ */
+static gboolean
+gst_rtp_pt_demux_setup (GstRtpPtDemux * ptdemux)
+{
+ ptdemux->srcpads = NULL;
+ ptdemux->last_pt = 0xFFFF;
+
+ return TRUE;
+}
+
+/*
+ * Free resources for the object.
+ */
+static void
+gst_rtp_pt_demux_release (GstRtpPtDemux * ptdemux)
+{
+ GSList *tmppads;
+ GSList *walk;
+
+ GST_OBJECT_LOCK (ptdemux);
+ tmppads = ptdemux->srcpads;
+ ptdemux->srcpads = NULL;
+ GST_OBJECT_UNLOCK (ptdemux);
+
+ for (walk = tmppads; walk; walk = g_slist_next (walk)) {
+ GstRtpPtDemuxPad *pad = walk->data;
+
+ gst_pad_set_active (pad->pad, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (ptdemux), pad->pad);
+ g_slice_free (GstRtpPtDemuxPad, pad);
+ }
+ g_slist_free (tmppads);
+}
+
+static GstStateChangeReturn
+gst_rtp_pt_demux_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstRtpPtDemux *ptdemux;
+
+ ptdemux = GST_RTP_PT_DEMUX (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ if (gst_rtp_pt_demux_setup (ptdemux) != TRUE)
+ ret = GST_STATE_CHANGE_FAILURE;
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ gst_rtp_pt_demux_release (ptdemux);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
diff --git a/gst/rtpmanager/gstrtpptdemux.h b/gst/rtpmanager/gstrtpptdemux.h
new file mode 100644
index 0000000000..9ba4f77676
--- /dev/null
+++ b/gst/rtpmanager/gstrtpptdemux.h
@@ -0,0 +1,65 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_PT_DEMUX_H__
+#define __GST_RTP_PT_DEMUX_H__
+
+#include <gst/gst.h>
+
+#define GST_TYPE_RTP_PT_DEMUX (gst_rtp_pt_demux_get_type())
+#define GST_RTP_PT_DEMUX(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_PT_DEMUX,GstRtpPtDemux))
+#define GST_RTP_PT_DEMUX_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_PT_DEMUX,GstRtpPtDemuxClass))
+#define GST_IS_RTP_PT_DEMUX(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_PT_DEMUX))
+#define GST_IS_RTP_PT_DEMUX_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_PT_DEMUX))
+
+typedef struct _GstRtpPtDemux GstRtpPtDemux;
+typedef struct _GstRtpPtDemuxClass GstRtpPtDemuxClass;
+typedef struct _GstRtpPtDemuxPad GstRtpPtDemuxPad;
+
+struct _GstRtpPtDemux
+{
+ GstElement parent; /*< parent class */
+
+ GstPad *sink; /*< the sink pad */
+ guint16 last_pt; /*< pt of the last packet 0xFFFF if none */
+ GSList *srcpads; /*< a linked list of GstRtpPtDemuxPad objects */
+ GValue ignored_pts; /*< a GstValueArray of payload types that will not have pads created for */
+};
+
+struct _GstRtpPtDemuxClass
+{
+ GstElementClass parent_class;
+
+ /* get the caps for pt */
+ GstCaps* (*request_pt_map) (GstRtpPtDemux *demux, guint pt);
+
+ /* signal emitted when a new PT is found from the incoming stream */
+ void (*new_payload_type) (GstRtpPtDemux *demux, guint pt, GstPad * pad);
+
+ /* signal emitted when the payload type changes */
+ void (*payload_type_change) (GstRtpPtDemux *demux, guint pt);
+
+ void (*clear_pt_map) (GstRtpPtDemux *demux);
+};
+
+GType gst_rtp_pt_demux_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (rtpptdemux);
+
+#endif /* __GST_RTP_PT_DEMUX_H__ */
diff --git a/gst/rtpmanager/gstrtprtxqueue.c b/gst/rtpmanager/gstrtprtxqueue.c
new file mode 100644
index 0000000000..e098eb9c29
--- /dev/null
+++ b/gst/rtpmanager/gstrtprtxqueue.c
@@ -0,0 +1,520 @@
+/* RTP Retransmission queue element for GStreamer
+ *
+ * gstrtprtxqueue.c:
+ *
+ * Copyright (C) 2013 Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtprtxqueue
+ * @title: rtprtxqueue
+ *
+ * rtprtxqueue maintains a queue of transmitted RTP packets, up to a
+ * configurable limit (see #GstRTPRtxQueue:max-size-time,
+ * #GstRTPRtxQueue:max-size-packets), and retransmits them upon request
+ * from the downstream rtpsession (GstRTPRetransmissionRequest event).
+ *
+ * This element is similar to rtprtxsend, but it has differences:
+ * - Retransmission from rtprtxqueue is not RFC 4588 compliant. The
+ * retransmitted packets have the same ssrc and payload type as the original
+ * stream.
+ * - As a side-effect of the above, rtprtxqueue does not require the use of
+ * rtprtxreceive on the receiving end. rtpjitterbuffer alone is able to
+ * reconstruct the stream.
+ * - Retransmission from rtprtxqueue happens as soon as the next regular flow
+ * packet is chained, while rtprtxsend retransmits as soon as the retransmission
+ * event is received, using a helper thread.
+ * - rtprtxqueue can be used with rtpbin without the need of hooking to its
+ * #GstRtpBin::request-aux-sender signal, which means it can be used with
+ * rtpbin using gst-launch.
+ *
+ * See also #GstRtpRtxSend, #GstRtpRtxReceive
+ *
+ * # Example pipelines
+ *
+ * |[
+ * gst-launch-1.0 rtpbin name=b rtp-profile=avpf \
+ * audiotestsrc is-live=true ! opusenc ! rtpopuspay pt=96 ! rtprtxqueue ! b.send_rtp_sink_0 \
+ * b.send_rtp_src_0 ! identity drop-probability=0.01 ! udpsink host="127.0.0.1" port=5000 \
+ * udpsrc port=5001 ! b.recv_rtcp_sink_0 \
+ * b.send_rtcp_src_0 ! udpsink host="127.0.0.1" port=5002 sync=false async=false
+ * ]|
+ * Sender pipeline
+ *
+ * |[
+ * gst-launch-1.0 rtpbin name=b rtp-profile=avpf do-retransmission=true \
+ * udpsrc port=5000 caps="application/x-rtp,media=(string)audio,clock-rate=(int)48000,encoding-name=(string)OPUS,payload=(int)96" ! \
+ * b.recv_rtp_sink_0 \
+ * b. ! rtpopusdepay ! opusdec ! audioconvert ! audioresample ! autoaudiosink \
+ * udpsrc port=5002 ! b.recv_rtcp_sink_0 \
+ * b.send_rtcp_src_0 ! udpsink host="127.0.0.1" port=5001 sync=false async=false
+ * ]|
+ * Receiver pipeline
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <string.h>
+
+#include "gstrtprtxqueue.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_rtx_queue_debug);
+#define GST_CAT_DEFAULT gst_rtp_rtx_queue_debug
+
+#define DEFAULT_MAX_SIZE_TIME 0
+#define DEFAULT_MAX_SIZE_PACKETS 100
+
+enum
+{
+ PROP_0,
+ PROP_MAX_SIZE_TIME,
+ PROP_MAX_SIZE_PACKETS,
+ PROP_REQUESTS,
+ PROP_FULFILLED_REQUESTS,
+};
+
+static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+static gboolean gst_rtp_rtx_queue_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static gboolean gst_rtp_rtx_queue_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstFlowReturn gst_rtp_rtx_queue_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+static GstFlowReturn gst_rtp_rtx_queue_chain_list (GstPad * pad,
+ GstObject * parent, GstBufferList * list);
+
+static GstStateChangeReturn gst_rtp_rtx_queue_change_state (GstElement *
+ element, GstStateChange transition);
+
+static void gst_rtp_rtx_queue_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtp_rtx_queue_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_rtp_rtx_queue_finalize (GObject * object);
+
+G_DEFINE_TYPE_WITH_CODE (GstRTPRtxQueue, gst_rtp_rtx_queue, GST_TYPE_ELEMENT,
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_rtx_queue_debug, "rtprtxqueue", 0,
+ "rtp retransmission queue"));
+GST_ELEMENT_REGISTER_DEFINE (rtprtxqueue, "rtprtxqueue", GST_RANK_NONE,
+ GST_TYPE_RTP_RTX_QUEUE);
+
+static void
+gst_rtp_rtx_queue_class_init (GstRTPRtxQueueClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->get_property = gst_rtp_rtx_queue_get_property;
+ gobject_class->set_property = gst_rtp_rtx_queue_set_property;
+ gobject_class->finalize = gst_rtp_rtx_queue_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_MAX_SIZE_TIME,
+ g_param_spec_uint ("max-size-time", "Max Size Times",
+ "Amount of ms to queue (0 = unlimited)", 0, G_MAXUINT,
+ DEFAULT_MAX_SIZE_TIME, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_SIZE_PACKETS,
+ g_param_spec_uint ("max-size-packets", "Max Size Packets",
+ "Amount of packets to queue (0 = unlimited)", 0, G_MAXUINT,
+ DEFAULT_MAX_SIZE_PACKETS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_REQUESTS,
+ g_param_spec_uint ("requests", "Requests",
+ "Total number of retransmission requests", 0, G_MAXUINT,
+ 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_FULFILLED_REQUESTS,
+ g_param_spec_uint ("fulfilled-requests", "Fulfilled Requests",
+ "Number of fulfilled retransmission requests", 0, G_MAXUINT,
+ 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class, &src_factory);
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_factory);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP Retransmission Queue", "Codec",
+ "Keep RTP packets in a queue for retransmission",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_rtp_rtx_queue_change_state);
+}
+
+static void
+gst_rtp_rtx_queue_reset (GstRTPRtxQueue * rtx, gboolean full)
+{
+ g_mutex_lock (&rtx->lock);
+ g_queue_foreach (rtx->queue, (GFunc) gst_buffer_unref, NULL);
+ g_queue_clear (rtx->queue);
+ g_list_foreach (rtx->pending, (GFunc) gst_buffer_unref, NULL);
+ g_list_free (rtx->pending);
+ rtx->pending = NULL;
+ rtx->n_requests = 0;
+ rtx->n_fulfilled_requests = 0;
+ g_mutex_unlock (&rtx->lock);
+}
+
+static void
+gst_rtp_rtx_queue_finalize (GObject * object)
+{
+ GstRTPRtxQueue *rtx = GST_RTP_RTX_QUEUE (object);
+
+ gst_rtp_rtx_queue_reset (rtx, TRUE);
+ g_queue_free (rtx->queue);
+ g_mutex_clear (&rtx->lock);
+
+ G_OBJECT_CLASS (gst_rtp_rtx_queue_parent_class)->finalize (object);
+}
+
+static void
+gst_rtp_rtx_queue_init (GstRTPRtxQueue * rtx)
+{
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (rtx);
+
+ rtx->srcpad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
+ "src"), "src");
+ GST_PAD_SET_PROXY_CAPS (rtx->srcpad);
+ GST_PAD_SET_PROXY_ALLOCATION (rtx->srcpad);
+ gst_pad_set_event_function (rtx->srcpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_rtx_queue_src_event));
+ gst_element_add_pad (GST_ELEMENT (rtx), rtx->srcpad);
+
+ rtx->sinkpad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
+ "sink"), "sink");
+ GST_PAD_SET_PROXY_CAPS (rtx->sinkpad);
+ GST_PAD_SET_PROXY_ALLOCATION (rtx->sinkpad);
+ gst_pad_set_event_function (rtx->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_rtx_queue_sink_event));
+ gst_pad_set_chain_function (rtx->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_rtx_queue_chain));
+ gst_pad_set_chain_list_function (rtx->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_rtx_queue_chain_list));
+ gst_element_add_pad (GST_ELEMENT (rtx), rtx->sinkpad);
+
+ rtx->queue = g_queue_new ();
+ g_mutex_init (&rtx->lock);
+
+ rtx->max_size_time = DEFAULT_MAX_SIZE_TIME;
+ rtx->max_size_packets = DEFAULT_MAX_SIZE_PACKETS;
+}
+
+typedef struct
+{
+ GstRTPRtxQueue *rtx;
+ guint seqnum;
+ gboolean found;
+} RTXData;
+
+static void
+push_seqnum (GstBuffer * buffer, RTXData * data)
+{
+ GstRTPRtxQueue *rtx = data->rtx;
+ GstRTPBuffer rtpbuffer = GST_RTP_BUFFER_INIT;
+ guint16 seqnum;
+
+ if (data->found)
+ return;
+
+ if (!GST_IS_BUFFER (buffer) ||
+ !gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtpbuffer))
+ return;
+
+ seqnum = gst_rtp_buffer_get_seq (&rtpbuffer);
+ gst_rtp_buffer_unmap (&rtpbuffer);
+
+ if (seqnum == data->seqnum) {
+ data->found = TRUE;
+ GST_DEBUG_OBJECT (rtx, "found %d", seqnum);
+ rtx->pending = g_list_prepend (rtx->pending, gst_buffer_ref (buffer));
+ }
+}
+
+static gboolean
+gst_rtp_rtx_queue_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstRTPRtxQueue *rtx = GST_RTP_RTX_QUEUE (parent);
+ gboolean res;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_UPSTREAM:
+ {
+ const GstStructure *s;
+
+ s = gst_event_get_structure (event);
+ if (gst_structure_has_name (s, "GstRTPRetransmissionRequest")) {
+ guint seqnum;
+ RTXData data;
+
+ if (!gst_structure_get_uint (s, "seqnum", &seqnum))
+ seqnum = -1;
+
+ GST_DEBUG_OBJECT (rtx, "request %d", seqnum);
+
+ g_mutex_lock (&rtx->lock);
+ data.rtx = rtx;
+ data.seqnum = seqnum;
+ data.found = FALSE;
+ rtx->n_requests += 1;
+ g_queue_foreach (rtx->queue, (GFunc) push_seqnum, &data);
+ g_mutex_unlock (&rtx->lock);
+
+ gst_event_unref (event);
+ res = TRUE;
+ } else {
+ res = gst_pad_event_default (pad, parent, event);
+ }
+ break;
+ }
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+ return res;
+}
+
+static gboolean
+gst_rtp_rtx_queue_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstRTPRtxQueue *rtx = GST_RTP_RTX_QUEUE (parent);
+ gboolean res;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:
+ {
+ g_mutex_lock (&rtx->lock);
+ gst_event_copy_segment (event, &rtx->head_segment);
+ g_queue_push_head (rtx->queue, gst_event_ref (event));
+ g_mutex_unlock (&rtx->lock);
+ /* fall through */
+ }
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+ return res;
+}
+
+static void
+do_push (GstBuffer * buffer, GstRTPRtxQueue * rtx)
+{
+ rtx->n_fulfilled_requests += 1;
+ gst_pad_push (rtx->srcpad, buffer);
+}
+
+static guint32
+get_ts_diff (GstRTPRtxQueue * rtx)
+{
+ GstClockTime high_ts, low_ts;
+ GstClockTimeDiff result;
+ GstBuffer *high_buf, *low_buf;
+
+ high_buf = g_queue_peek_head (rtx->queue);
+
+ while (GST_IS_EVENT ((low_buf = g_queue_peek_tail (rtx->queue)))) {
+ GstEvent *event = g_queue_pop_tail (rtx->queue);
+ gst_event_copy_segment (event, &rtx->tail_segment);
+ gst_event_unref (event);
+ }
+
+ if (!high_buf || !low_buf || high_buf == low_buf)
+ return 0;
+
+ high_ts = GST_BUFFER_TIMESTAMP (high_buf);
+ low_ts = GST_BUFFER_TIMESTAMP (low_buf);
+
+ high_ts = gst_segment_to_running_time (&rtx->head_segment, GST_FORMAT_TIME,
+ high_ts);
+ low_ts = gst_segment_to_running_time (&rtx->tail_segment, GST_FORMAT_TIME,
+ low_ts);
+
+ result = high_ts - low_ts;
+
+ /* return value in ms instead of ns */
+ return (guint32) gst_util_uint64_scale_int (result, 1, GST_MSECOND);
+}
+
+/* Must be called with rtx->lock */
+static void
+shrink_queue (GstRTPRtxQueue * rtx)
+{
+ if (rtx->max_size_packets) {
+ while (g_queue_get_length (rtx->queue) > rtx->max_size_packets)
+ gst_buffer_unref (g_queue_pop_tail (rtx->queue));
+ }
+ if (rtx->max_size_time) {
+ while (get_ts_diff (rtx) > rtx->max_size_time)
+ gst_buffer_unref (g_queue_pop_tail (rtx->queue));
+ }
+}
+
+static GstFlowReturn
+gst_rtp_rtx_queue_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+{
+ GstRTPRtxQueue *rtx;
+ GstFlowReturn ret;
+ GList *pending;
+
+ rtx = GST_RTP_RTX_QUEUE (parent);
+
+ g_mutex_lock (&rtx->lock);
+ g_queue_push_head (rtx->queue, gst_buffer_ref (buffer));
+ shrink_queue (rtx);
+
+ pending = rtx->pending;
+ rtx->pending = NULL;
+ g_mutex_unlock (&rtx->lock);
+
+ pending = g_list_reverse (pending);
+ g_list_foreach (pending, (GFunc) do_push, rtx);
+ g_list_free (pending);
+
+ ret = gst_pad_push (rtx->srcpad, buffer);
+
+ return ret;
+}
+
+static gboolean
+push_to_queue (GstBuffer ** buffer, guint idx, gpointer user_data)
+{
+ GQueue *queue = user_data;
+
+ g_queue_push_head (queue, gst_buffer_ref (*buffer));
+
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_rtp_rtx_queue_chain_list (GstPad * pad, GstObject * parent,
+ GstBufferList * list)
+{
+ GstRTPRtxQueue *rtx;
+ GstFlowReturn ret;
+ GList *pending;
+
+ rtx = GST_RTP_RTX_QUEUE (parent);
+
+ g_mutex_lock (&rtx->lock);
+ gst_buffer_list_foreach (list, push_to_queue, rtx->queue);
+ shrink_queue (rtx);
+
+ pending = rtx->pending;
+ rtx->pending = NULL;
+ g_mutex_unlock (&rtx->lock);
+
+ pending = g_list_reverse (pending);
+ g_list_foreach (pending, (GFunc) do_push, rtx);
+ g_list_free (pending);
+
+ ret = gst_pad_push_list (rtx->srcpad, list);
+
+ return ret;
+}
+
+static void
+gst_rtp_rtx_queue_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstRTPRtxQueue *rtx = GST_RTP_RTX_QUEUE (object);
+
+ switch (prop_id) {
+ case PROP_MAX_SIZE_TIME:
+ g_value_set_uint (value, rtx->max_size_time);
+ break;
+ case PROP_MAX_SIZE_PACKETS:
+ g_value_set_uint (value, rtx->max_size_packets);
+ break;
+ case PROP_REQUESTS:
+ g_value_set_uint (value, rtx->n_requests);
+ break;
+ case PROP_FULFILLED_REQUESTS:
+ g_value_set_uint (value, rtx->n_fulfilled_requests);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_rtx_queue_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstRTPRtxQueue *rtx = GST_RTP_RTX_QUEUE (object);
+
+ switch (prop_id) {
+ case PROP_MAX_SIZE_TIME:
+ rtx->max_size_time = g_value_get_uint (value);
+ break;
+ case PROP_MAX_SIZE_PACKETS:
+ rtx->max_size_packets = g_value_get_uint (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_rtx_queue_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstRTPRtxQueue *rtx;
+
+ rtx = GST_RTP_RTX_QUEUE (element);
+
+ switch (transition) {
+ default:
+ break;
+ }
+
+ ret =
+ GST_ELEMENT_CLASS (gst_rtp_rtx_queue_parent_class)->change_state (element,
+ transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_rtx_queue_reset (rtx, TRUE);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
diff --git a/gst/rtpmanager/gstrtprtxqueue.h b/gst/rtpmanager/gstrtprtxqueue.h
new file mode 100644
index 0000000000..92269732e4
--- /dev/null
+++ b/gst/rtpmanager/gstrtprtxqueue.h
@@ -0,0 +1,80 @@
+/* RTP muxer element for GStreamer
+ *
+ * gstrtpmux.h:
+ *
+ * Copyright (C) <2007> Nokia Corporation.
+ * Contact: Zeeshan Ali <zeeshan.ali@nokia.com>
+ * Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
+ * 2000,2005 Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_RTX_QUEUE_H__
+#define __GST_RTP_RTX_QUEUE_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_RTP_RTX_QUEUE (gst_rtp_rtx_queue_get_type())
+#define GST_RTP_RTX_QUEUE(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_RTX_QUEUE, GstRTPRtxQueue))
+#define GST_RTP_RTX_QUEUE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_RTX_QUEUE, GstRTPRtxQueueClass))
+#define GST_RTP_RTX_QUEUE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_RTP_RTX_QUEUE, GstRTPRtxQueueClass))
+#define GST_IS_RTP_RTX_QUEUE(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_RTX_QUEUE))
+#define GST_IS_RTP_RTX_QUEUE_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_RTX_QUEUE))
+typedef struct _GstRTPRtxQueue GstRTPRtxQueue;
+typedef struct _GstRTPRtxQueueClass GstRTPRtxQueueClass;
+
+/**
+ * GstRTPRtxQueue:
+ *
+ * The opaque #GstRTPRtxQueue structure.
+ */
+struct _GstRTPRtxQueue
+{
+ GstElement element;
+
+ /* pad */
+ GstPad *sinkpad;
+ GstPad *srcpad;
+
+ GMutex lock;
+ GQueue *queue;
+ GList *pending;
+
+ guint max_size_time;
+ guint max_size_packets;
+
+ GstSegment head_segment;
+ GstSegment tail_segment;
+
+ /* Statistics */
+ guint n_requests;
+ guint n_fulfilled_requests;
+};
+
+struct _GstRTPRtxQueueClass
+{
+ GstElementClass parent_class;
+};
+
+GType gst_rtp_rtx_queue_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (rtprtxqueue);
+
+G_END_DECLS
+#endif /* __GST_RTP_RTX_QUEUE_H__ */
diff --git a/gst/rtpmanager/gstrtprtxreceive.c b/gst/rtpmanager/gstrtprtxreceive.c
new file mode 100644
index 0000000000..8a315f039d
--- /dev/null
+++ b/gst/rtpmanager/gstrtprtxreceive.c
@@ -0,0 +1,791 @@
+/* RTP Retransmission receiver element for GStreamer
+ *
+ * gstrtprtxreceive.c:
+ *
+ * Copyright (C) 2013 Collabora Ltd.
+ * @author Julien Isorce <julien.isorce@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtprtxreceive
+ * @title: rtprtxreceive
+ * @see_also: rtprtxsend, rtpsession, rtpjitterbuffer
+ *
+ * rtprtxreceive listens to the retransmission events from the
+ * downstream rtpjitterbuffer and remembers the SSRC (ssrc1) of the stream and
+ * the sequence number that was requested. When it receives a packet with
+ * a sequence number equal to one of the ones stored and with a different SSRC,
+ * it identifies the new SSRC (ssrc2) as the retransmission stream of ssrc1.
+ * From this point on, it replaces ssrc2 with ssrc1 in all packets of the
+ * ssrc2 stream and flags them as retransmissions, so that rtpjitterbuffer
+ * can reconstruct the original stream.
+ *
+ * This algorithm is implemented as specified in RFC 4588.
+ *
+ * This element is meant to be used with rtprtxsend on the sender side.
+ * See #GstRtpRtxSend
+ *
+ * Below you can see some examples that illustrate how rtprtxreceive and
+ * rtprtxsend fit among the other rtp elements and how they work internally.
+ * Normally, hoewever, you should avoid using such pipelines and use
+ * rtpbin instead, with its #GstRtpBin::request-aux-sender and
+ * #GstRtpBin::request-aux-receiver signals. See #GstRtpBin.
+ *
+ * ## Example pipelines
+ *
+ * |[
+ * gst-launch-1.0 rtpsession name=rtpsession rtp-profile=avpf \
+ * audiotestsrc is-live=true ! opusenc ! rtpopuspay pt=96 ! \
+ * rtprtxsend payload-type-map="application/x-rtp-pt-map,96=(uint)97" ! \
+ * rtpsession.send_rtp_sink \
+ * rtpsession.send_rtp_src ! identity drop-probability=0.01 ! \
+ * udpsink host="127.0.0.1" port=5000 \
+ * udpsrc port=5001 ! rtpsession.recv_rtcp_sink \
+ * rtpsession.send_rtcp_src ! udpsink host="127.0.0.1" port=5002 \
+ * sync=false async=false
+ * ]| Send audio stream through port 5000 (5001 and 5002 are just the rtcp
+ * link with the receiver)
+ *
+ * |[
+ * gst-launch-1.0 rtpsession name=rtpsession rtp-profile=avpf \
+ * udpsrc port=5000 caps="application/x-rtp,media=(string)audio,clock-rate=(int)48000,encoding-name=(string)OPUS,payload=(int)96" ! \
+ * rtpsession.recv_rtp_sink \
+ * rtpsession.recv_rtp_src ! \
+ * rtprtxreceive payload-type-map="application/x-rtp-pt-map,96=(uint)97" ! \
+ * rtpssrcdemux ! rtpjitterbuffer do-retransmission=true ! \
+ * rtpopusdepay ! opusdec ! audioconvert ! audioresample ! autoaudiosink \
+ * rtpsession.send_rtcp_src ! \
+ * udpsink host="127.0.0.1" port=5001 sync=false async=false \
+ * udpsrc port=5002 ! rtpsession.recv_rtcp_sink
+ * ]|
+ * Receive audio stream from port 5000 (5001 and 5002 are just the rtcp
+ * link with the sender)
+ *
+ * In this example we can see a simple streaming of an OPUS stream with some
+ * of the packets being artificially dropped by the identity element.
+ * Thanks to retransmission, you should still hear a clear sound when setting
+ * drop-probability to something greater than 0.
+ *
+ * Internally, the rtpjitterbuffer will generate a custom upstream event,
+ * GstRTPRetransmissionRequest, when it detects that one packet is missing.
+ * Then this request is translated to a FB NACK in the rtcp link by rtpsession.
+ * Finally the rtpsession of the sender side will re-convert it in a
+ * GstRTPRetransmissionRequest that will be handled by rtprtxsend. rtprtxsend
+ * will then re-send the missing packet with a new srrc and a different payload
+ * type (here, 97), but with the same original sequence number. On the receiver
+ * side, rtprtxreceive will associate this new stream with the original and
+ * forward the retransmission packets to rtpjitterbuffer with the original
+ * ssrc and payload type.
+ *
+ * |[
+ * gst-launch-1.0 rtpsession name=rtpsession rtp-profile=avpf \
+ * audiotestsrc is-live=true ! opusenc ! rtpopuspay pt=97 seqnum-offset=1 ! \
+ * rtprtxsend payload-type-map="application/x-rtp-pt-map,97=(uint)99" ! \
+ * funnel name=f ! rtpsession.send_rtp_sink \
+ * audiotestsrc freq=660.0 is-live=true ! opusenc ! \
+ * rtpopuspay pt=97 seqnum-offset=100 ! \
+ * rtprtxsend payload-type-map="application/x-rtp-pt-map,97=(uint)99" ! \
+ * f. \
+ * rtpsession.send_rtp_src ! identity drop-probability=0.01 ! \
+ * udpsink host="127.0.0.1" port=5000 \
+ * udpsrc port=5001 ! rtpsession.recv_rtcp_sink \
+ * rtpsession.send_rtcp_src ! udpsink host="127.0.0.1" port=5002 \
+ * sync=false async=false
+ * ]|
+ * Send two audio streams to port 5000.
+ * |[
+ * gst-launch-1.0 rtpsession name=rtpsession rtp-profile=avpf \
+ * udpsrc port=5000 caps="application/x-rtp,media=(string)audio,clock-rate=(int)48000,encoding-name=(string)OPUS,payload=(int)97" ! \
+ * rtpsession.recv_rtp_sink \
+ * rtpsession.recv_rtp_src ! \
+ * rtprtxreceive payload-type-map="application/x-rtp-pt-map,97=(uint)99" ! \
+ * rtpssrcdemux name=demux \
+ * demux. ! queue ! rtpjitterbuffer do-retransmission=true ! rtpopusdepay ! \
+ * opusdec ! audioconvert ! autoaudiosink \
+ * demux. ! queue ! rtpjitterbuffer do-retransmission=true ! rtpopusdepay ! \
+ * opusdec ! audioconvert ! autoaudiosink \
+ * udpsrc port=5002 ! rtpsession.recv_rtcp_sink \
+ * rtpsession.send_rtcp_src ! udpsink host="127.0.0.1" port=5001 \
+ * sync=false async=false
+ * ]|
+ * Receive two audio streams from port 5000.
+ *
+ * In this example we are streaming two streams of the same type through the
+ * same port. They, however, are using a different SSRC (ssrc is randomly
+ * generated on each payloader - rtpopuspay in this example), so they can be
+ * identified and demultiplexed by rtpssrcdemux on the receiver side. This is
+ * an example of SSRC-multiplexing.
+ *
+ * It is important here to use a different starting sequence number
+ * (seqnum-offset), since this is the only means of identification that
+ * rtprtxreceive uses the very first time to identify retransmission streams.
+ * It is an error, according to RFC4588 to have two retransmission requests for
+ * packets belonging to two different streams but with the same sequence number.
+ * Note that the default seqnum-offset value (-1, which means random) would
+ * work just fine, but it is overridden here for illustration purposes.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <string.h>
+#include <stdlib.h>
+
+#include "gstrtprtxreceive.h"
+
+#define ASSOC_TIMEOUT (GST_SECOND)
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_rtx_receive_debug);
+#define GST_CAT_DEFAULT gst_rtp_rtx_receive_debug
+
+enum
+{
+ PROP_0,
+ PROP_PAYLOAD_TYPE_MAP,
+ PROP_NUM_RTX_REQUESTS,
+ PROP_NUM_RTX_PACKETS,
+ PROP_NUM_RTX_ASSOC_PACKETS
+};
+
+static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+static gboolean gst_rtp_rtx_receive_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstFlowReturn gst_rtp_rtx_receive_chain (GstPad * pad,
+ GstObject * parent, GstBuffer * buffer);
+
+static GstStateChangeReturn gst_rtp_rtx_receive_change_state (GstElement *
+ element, GstStateChange transition);
+
+static void gst_rtp_rtx_receive_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtp_rtx_receive_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_rtp_rtx_receive_finalize (GObject * object);
+
+G_DEFINE_TYPE_WITH_CODE (GstRtpRtxReceive, gst_rtp_rtx_receive,
+ GST_TYPE_ELEMENT, GST_DEBUG_CATEGORY_INIT (gst_rtp_rtx_receive_debug,
+ "rtprtxreceive", 0, "rtp retransmission receiver"));
+GST_ELEMENT_REGISTER_DEFINE (rtprtxreceive, "rtprtxreceive", GST_RANK_NONE,
+ GST_TYPE_RTP_RTX_RECEIVE);
+
+static void
+gst_rtp_rtx_receive_class_init (GstRtpRtxReceiveClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->get_property = gst_rtp_rtx_receive_get_property;
+ gobject_class->set_property = gst_rtp_rtx_receive_set_property;
+ gobject_class->finalize = gst_rtp_rtx_receive_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_PAYLOAD_TYPE_MAP,
+ g_param_spec_boxed ("payload-type-map", "Payload Type Map",
+ "Map of original payload types to their retransmission payload types",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_NUM_RTX_REQUESTS,
+ g_param_spec_uint ("num-rtx-requests", "Num RTX Requests",
+ "Number of retransmission events received", 0, G_MAXUINT,
+ 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_NUM_RTX_PACKETS,
+ g_param_spec_uint ("num-rtx-packets", "Num RTX Packets",
+ " Number of retransmission packets received", 0, G_MAXUINT,
+ 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_NUM_RTX_ASSOC_PACKETS,
+ g_param_spec_uint ("num-rtx-assoc-packets",
+ "Num RTX Associated Packets", "Number of retransmission packets "
+ "correctly associated with retransmission requests", 0, G_MAXUINT,
+ 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class, &src_factory);
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_factory);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP Retransmission receiver", "Codec",
+ "Receive retransmitted RTP packets according to RFC4588",
+ "Julien Isorce <julien.isorce@collabora.co.uk>");
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_rtp_rtx_receive_change_state);
+}
+
+static void
+gst_rtp_rtx_receive_reset (GstRtpRtxReceive * rtx)
+{
+ GST_OBJECT_LOCK (rtx);
+ g_hash_table_remove_all (rtx->ssrc2_ssrc1_map);
+ g_hash_table_remove_all (rtx->seqnum_ssrc1_map);
+ rtx->num_rtx_requests = 0;
+ rtx->num_rtx_packets = 0;
+ rtx->num_rtx_assoc_packets = 0;
+ GST_OBJECT_UNLOCK (rtx);
+}
+
+static void
+gst_rtp_rtx_receive_finalize (GObject * object)
+{
+ GstRtpRtxReceive *rtx = GST_RTP_RTX_RECEIVE (object);
+
+ g_hash_table_unref (rtx->ssrc2_ssrc1_map);
+ g_hash_table_unref (rtx->seqnum_ssrc1_map);
+ g_hash_table_unref (rtx->rtx_pt_map);
+ if (rtx->rtx_pt_map_structure)
+ gst_structure_free (rtx->rtx_pt_map_structure);
+
+ G_OBJECT_CLASS (gst_rtp_rtx_receive_parent_class)->finalize (object);
+}
+
+typedef struct
+{
+ guint32 ssrc;
+ GstClockTime time;
+} SsrcAssoc;
+
+static SsrcAssoc *
+ssrc_assoc_new (guint32 ssrc, GstClockTime time)
+{
+ SsrcAssoc *assoc = g_slice_new (SsrcAssoc);
+
+ assoc->ssrc = ssrc;
+ assoc->time = time;
+
+ return assoc;
+}
+
+static void
+ssrc_assoc_free (SsrcAssoc * assoc)
+{
+ g_slice_free (SsrcAssoc, assoc);
+}
+
+static void
+gst_rtp_rtx_receive_init (GstRtpRtxReceive * rtx)
+{
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (rtx);
+
+ rtx->srcpad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
+ "src"), "src");
+ GST_PAD_SET_PROXY_CAPS (rtx->srcpad);
+ GST_PAD_SET_PROXY_ALLOCATION (rtx->srcpad);
+ gst_pad_set_event_function (rtx->srcpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_rtx_receive_src_event));
+ gst_element_add_pad (GST_ELEMENT (rtx), rtx->srcpad);
+
+ rtx->sinkpad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
+ "sink"), "sink");
+ GST_PAD_SET_PROXY_CAPS (rtx->sinkpad);
+ GST_PAD_SET_PROXY_ALLOCATION (rtx->sinkpad);
+ gst_pad_set_chain_function (rtx->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_rtx_receive_chain));
+ gst_element_add_pad (GST_ELEMENT (rtx), rtx->sinkpad);
+
+ rtx->ssrc2_ssrc1_map = g_hash_table_new (g_direct_hash, g_direct_equal);
+ rtx->seqnum_ssrc1_map = g_hash_table_new_full (g_direct_hash, g_direct_equal,
+ NULL, (GDestroyNotify) ssrc_assoc_free);
+
+ rtx->rtx_pt_map = g_hash_table_new (g_direct_hash, g_direct_equal);
+}
+
+static gboolean
+gst_rtp_rtx_receive_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstRtpRtxReceive *rtx = GST_RTP_RTX_RECEIVE (parent);
+ gboolean res;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_UPSTREAM:
+ {
+ const GstStructure *s = gst_event_get_structure (event);
+
+ /* This event usually comes from the downstream gstrtpjitterbuffer */
+ if (gst_structure_has_name (s, "GstRTPRetransmissionRequest")) {
+ guint seqnum = 0;
+ guint ssrc = 0;
+ gpointer ssrc2 = 0;
+
+ /* retrieve seqnum of the packet that need to be retransmitted */
+ if (!gst_structure_get_uint (s, "seqnum", &seqnum))
+ seqnum = -1;
+
+ /* retrieve ssrc of the packet that need to be retransmitted
+ * it's useful when reconstructing the original packet from the rtx packet */
+ if (!gst_structure_get_uint (s, "ssrc", &ssrc))
+ ssrc = -1;
+
+ GST_DEBUG_OBJECT (rtx, "got rtx request for seqnum: %u, ssrc: %X",
+ seqnum, ssrc);
+
+ GST_OBJECT_LOCK (rtx);
+
+ /* increase number of seen requests for our statistics */
+ ++rtx->num_rtx_requests;
+
+ /* First, we lookup in our map to see if we have already associate this
+ * master stream ssrc with its retransmitted stream.
+ * Every ssrc are unique so we can use the same hash table
+ * for both retrieving the ssrc1 from ssrc2 and also ssrc2 from ssrc1
+ */
+ if (g_hash_table_lookup_extended (rtx->ssrc2_ssrc1_map,
+ GUINT_TO_POINTER (ssrc), NULL, &ssrc2)
+ && GPOINTER_TO_UINT (ssrc2) != GPOINTER_TO_UINT (ssrc)) {
+ GST_TRACE_OBJECT (rtx, "Retransmitted stream %X already associated "
+ "to its master, %X", GPOINTER_TO_UINT (ssrc2), ssrc);
+ } else {
+ SsrcAssoc *assoc;
+
+ /* not already associated but also we have to check that we have not
+ * already considered this request.
+ */
+ if (g_hash_table_lookup_extended (rtx->seqnum_ssrc1_map,
+ GUINT_TO_POINTER (seqnum), NULL, (gpointer *) & assoc)) {
+ if (assoc->ssrc == ssrc) {
+ /* same seqnum, same ssrc */
+
+ /* do nothing because we have already considered this request
+ * The jitter may be too impatient of the rtx packet has been
+ * lost too.
+ * It does not mean we reject the event, we still want to forward
+ * the request to the gstrtpsession to be translator into a FB NACK
+ */
+ GST_LOG_OBJECT (rtx, "Duplicate request: seqnum: %u, ssrc: %X",
+ seqnum, ssrc);
+ } else {
+ /* same seqnum, different ssrc */
+
+ /* If the association attempt is larger than ASSOC_TIMEOUT,
+ * then we give up on it, and try this one.
+ */
+ if (!GST_CLOCK_TIME_IS_VALID (rtx->last_time) ||
+ !GST_CLOCK_TIME_IS_VALID (assoc->time) ||
+ assoc->time + ASSOC_TIMEOUT < rtx->last_time) {
+ /* From RFC 4588:
+ * the receiver MUST NOT have two outstanding requests for the
+ * same packet sequence number in two different original streams
+ * before the association is resolved. Otherwise it's impossible
+ * to associate a rtx stream and its master stream
+ */
+
+ /* remove seqnum in order to reuse the spot */
+ g_hash_table_remove (rtx->seqnum_ssrc1_map,
+ GUINT_TO_POINTER (seqnum));
+ goto retransmit;
+ } else {
+ GST_INFO_OBJECT (rtx, "rejecting request for seqnum %u"
+ " of master stream %X; there is already a pending request "
+ "for the same seqnum on ssrc %X that has not expired",
+ seqnum, ssrc, assoc->ssrc);
+
+ /* do not forward the event as we are rejecting this request */
+ GST_OBJECT_UNLOCK (rtx);
+ gst_event_unref (event);
+ return TRUE;
+ }
+ }
+ } else {
+ retransmit:
+ /* the request has not been already considered
+ * insert it for the first time */
+ g_hash_table_insert (rtx->seqnum_ssrc1_map,
+ GUINT_TO_POINTER (seqnum),
+ ssrc_assoc_new (ssrc, rtx->last_time));
+ }
+ }
+
+ GST_DEBUG_OBJECT (rtx, "packet number %u of master stream %X"
+ " needs to be retransmitted", seqnum, ssrc);
+
+ GST_OBJECT_UNLOCK (rtx);
+ }
+
+ /* Transfer event upstream so that the request can actually by translated
+ * through gstrtpsession through the network */
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+ return res;
+}
+
+/* Copy fixed header and extension. Replace current ssrc by ssrc1,
+ * remove OSN and replace current seq num by OSN.
+ * Copy memory to avoid to manually copy each rtp buffer field.
+ */
+static GstBuffer *
+_gst_rtp_buffer_new_from_rtx (GstRTPBuffer * rtp, guint32 ssrc1,
+ guint16 orign_seqnum, guint8 origin_payload_type)
+{
+ GstMemory *mem = NULL;
+ GstRTPBuffer new_rtp = GST_RTP_BUFFER_INIT;
+ GstBuffer *new_buffer = gst_buffer_new ();
+ GstMapInfo map;
+ guint payload_len = 0;
+
+ /* copy fixed header */
+ mem = gst_memory_copy (rtp->map[0].memory,
+ (guint8 *) rtp->data[0] - rtp->map[0].data, rtp->size[0]);
+ gst_buffer_append_memory (new_buffer, mem);
+
+ /* copy extension if any */
+ if (rtp->size[1]) {
+ mem = gst_memory_copy (rtp->map[1].memory,
+ (guint8 *) rtp->data[1] - rtp->map[1].data, rtp->size[1]);
+ gst_buffer_append_memory (new_buffer, mem);
+ }
+
+ /* copy payload and remove OSN */
+ payload_len = rtp->size[2] - 2;
+ mem = gst_allocator_alloc (NULL, payload_len, NULL);
+
+ gst_memory_map (mem, &map, GST_MAP_WRITE);
+ if (rtp->size[2])
+ memcpy (map.data, (guint8 *) rtp->data[2] + 2, payload_len);
+ gst_memory_unmap (mem, &map);
+ gst_buffer_append_memory (new_buffer, mem);
+
+ /* the sender always constructs rtx packets without padding,
+ * But the receiver can still receive rtx packets with padding.
+ * So just copy it.
+ */
+ if (rtp->size[3]) {
+ guint pad_len = rtp->size[3];
+
+ mem = gst_allocator_alloc (NULL, pad_len, NULL);
+
+ gst_memory_map (mem, &map, GST_MAP_WRITE);
+ map.data[pad_len - 1] = pad_len;
+ gst_memory_unmap (mem, &map);
+
+ gst_buffer_append_memory (new_buffer, mem);
+ }
+
+ /* set ssrc and seq num */
+ gst_rtp_buffer_map (new_buffer, GST_MAP_WRITE, &new_rtp);
+ gst_rtp_buffer_set_ssrc (&new_rtp, ssrc1);
+ gst_rtp_buffer_set_seq (&new_rtp, orign_seqnum);
+ gst_rtp_buffer_set_payload_type (&new_rtp, origin_payload_type);
+ gst_rtp_buffer_unmap (&new_rtp);
+
+ gst_buffer_copy_into (new_buffer, rtp->buffer,
+ GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
+ GST_BUFFER_FLAG_SET (new_buffer, GST_RTP_BUFFER_FLAG_RETRANSMISSION);
+
+ return new_buffer;
+}
+
+static GstFlowReturn
+gst_rtp_rtx_receive_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+{
+ GstRtpRtxReceive *rtx = GST_RTP_RTX_RECEIVE (parent);
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBuffer *new_buffer = NULL;
+ guint32 ssrc = 0;
+ gpointer ssrc1 = 0;
+ guint32 ssrc2 = 0;
+ guint16 seqnum = 0;
+ guint16 orign_seqnum = 0;
+ guint8 payload_type = 0;
+ gpointer payload = NULL;
+ guint8 origin_payload_type = 0;
+ gboolean is_rtx;
+ gboolean drop = FALSE;
+
+ /* map current rtp packet to parse its header */
+ if (!gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp))
+ goto invalid_buffer;
+
+ ssrc = gst_rtp_buffer_get_ssrc (&rtp);
+ seqnum = gst_rtp_buffer_get_seq (&rtp);
+ payload_type = gst_rtp_buffer_get_payload_type (&rtp);
+
+ /* check if we have a retransmission packet (this information comes from SDP) */
+ GST_OBJECT_LOCK (rtx);
+
+ is_rtx =
+ g_hash_table_lookup_extended (rtx->rtx_pt_map,
+ GUINT_TO_POINTER (payload_type), NULL, NULL);
+
+ if (is_rtx) {
+ payload = gst_rtp_buffer_get_payload (&rtp);
+
+ if (!payload || gst_rtp_buffer_get_payload_len (&rtp) < 2) {
+ GST_OBJECT_UNLOCK (rtx);
+ gst_rtp_buffer_unmap (&rtp);
+ goto invalid_buffer;
+ }
+ }
+
+ rtx->last_time = GST_BUFFER_PTS (buffer);
+
+ if (g_hash_table_size (rtx->seqnum_ssrc1_map) > 0) {
+ GHashTableIter iter;
+ gpointer key, value;
+
+ g_hash_table_iter_init (&iter, rtx->seqnum_ssrc1_map);
+ while (g_hash_table_iter_next (&iter, &key, &value)) {
+ SsrcAssoc *assoc = value;
+
+ /* remove association request if it is too old */
+ if (GST_CLOCK_TIME_IS_VALID (rtx->last_time) &&
+ GST_CLOCK_TIME_IS_VALID (assoc->time) &&
+ assoc->time + ASSOC_TIMEOUT < rtx->last_time) {
+ g_hash_table_iter_remove (&iter);
+ }
+ }
+ }
+
+ /* if the current packet is from a retransmission stream */
+ if (is_rtx) {
+ /* increase our statistic */
+ ++rtx->num_rtx_packets;
+
+ /* read OSN in the rtx payload */
+ orign_seqnum = GST_READ_UINT16_BE (gst_rtp_buffer_get_payload (&rtp));
+ origin_payload_type =
+ GPOINTER_TO_UINT (g_hash_table_lookup (rtx->rtx_pt_map,
+ GUINT_TO_POINTER (payload_type)));
+
+ GST_DEBUG_OBJECT (rtx, "Got rtx packet: rtx seqnum %u, rtx ssrc %X, "
+ "rtx pt %u, orig seqnum %u, orig pt %u", seqnum, ssrc, payload_type,
+ orign_seqnum, origin_payload_type);
+
+ /* first we check if we already have associated this retransmission stream
+ * to a master stream */
+ if (g_hash_table_lookup_extended (rtx->ssrc2_ssrc1_map,
+ GUINT_TO_POINTER (ssrc), NULL, &ssrc1)) {
+ GST_TRACE_OBJECT (rtx,
+ "packet is from retransmission stream %X already associated to "
+ "master stream %X", ssrc, GPOINTER_TO_UINT (ssrc1));
+ ssrc2 = ssrc;
+ } else {
+ SsrcAssoc *assoc;
+
+ /* the current retransmitted packet has its rtx stream not already
+ * associated to a master stream, so retrieve it from our request
+ * history */
+ if (g_hash_table_lookup_extended (rtx->seqnum_ssrc1_map,
+ GUINT_TO_POINTER (orign_seqnum), NULL, (gpointer *) & assoc)) {
+ GST_LOG_OBJECT (rtx,
+ "associating retransmitted stream %X to master stream %X thanks "
+ "to rtx packet %u (orig seqnum %u)", ssrc, assoc->ssrc, seqnum,
+ orign_seqnum);
+ ssrc1 = GUINT_TO_POINTER (assoc->ssrc);
+ ssrc2 = ssrc;
+
+ /* just put a guard */
+ if (GPOINTER_TO_UINT (ssrc1) == ssrc2)
+ GST_WARNING_OBJECT (rtx, "RTX receiver ssrc2_ssrc1_map bad state, "
+ "master and rtx SSRCs are the same (%X)\n", ssrc);
+
+ /* free the spot so that this seqnum can be used to do another
+ * association */
+ g_hash_table_remove (rtx->seqnum_ssrc1_map,
+ GUINT_TO_POINTER (orign_seqnum));
+
+ /* actually do the association between rtx stream and master stream */
+ g_hash_table_insert (rtx->ssrc2_ssrc1_map, GUINT_TO_POINTER (ssrc2),
+ ssrc1);
+
+ /* also do the association between master stream and rtx stream
+ * every ssrc are unique so we can use the same hash table
+ * for both retrieving the ssrc1 from ssrc2 and also ssrc2 from ssrc1
+ */
+ g_hash_table_insert (rtx->ssrc2_ssrc1_map, ssrc1,
+ GUINT_TO_POINTER (ssrc2));
+
+ } else {
+ /* we are not able to associate this rtx packet with a master stream */
+ GST_INFO_OBJECT (rtx,
+ "dropping rtx packet %u because its orig seqnum (%u) is not in our"
+ " pending retransmission requests", seqnum, orign_seqnum);
+ drop = TRUE;
+ }
+ }
+ }
+
+ /* if not dropped the packet was successfully associated */
+ if (is_rtx && !drop)
+ ++rtx->num_rtx_assoc_packets;
+
+ GST_OBJECT_UNLOCK (rtx);
+
+ /* just drop the packet if the association could not have been made */
+ if (drop) {
+ gst_rtp_buffer_unmap (&rtp);
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ }
+
+ /* create the retransmission packet */
+ if (is_rtx)
+ new_buffer =
+ _gst_rtp_buffer_new_from_rtx (&rtp, GPOINTER_TO_UINT (ssrc1),
+ orign_seqnum, origin_payload_type);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ /* push the packet */
+ if (is_rtx) {
+ gst_buffer_unref (buffer);
+ GST_LOG_OBJECT (rtx, "pushing packet seqnum:%u from restransmission "
+ "stream ssrc: %X (master ssrc %X)", orign_seqnum, ssrc2,
+ GPOINTER_TO_UINT (ssrc1));
+ ret = gst_pad_push (rtx->srcpad, new_buffer);
+ } else {
+ GST_TRACE_OBJECT (rtx, "pushing packet seqnum:%u from master stream "
+ "ssrc: %X", seqnum, ssrc);
+ ret = gst_pad_push (rtx->srcpad, buffer);
+ }
+
+ return ret;
+
+invalid_buffer:
+ {
+ GST_ELEMENT_WARNING (rtx, STREAM, DECODE, (NULL),
+ ("Received invalid RTP payload, dropping"));
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ }
+}
+
+static void
+gst_rtp_rtx_receive_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstRtpRtxReceive *rtx = GST_RTP_RTX_RECEIVE (object);
+
+ switch (prop_id) {
+ case PROP_PAYLOAD_TYPE_MAP:
+ GST_OBJECT_LOCK (rtx);
+ g_value_set_boxed (value, rtx->rtx_pt_map_structure);
+ GST_OBJECT_UNLOCK (rtx);
+ break;
+ case PROP_NUM_RTX_REQUESTS:
+ GST_OBJECT_LOCK (rtx);
+ g_value_set_uint (value, rtx->num_rtx_requests);
+ GST_OBJECT_UNLOCK (rtx);
+ break;
+ case PROP_NUM_RTX_PACKETS:
+ GST_OBJECT_LOCK (rtx);
+ g_value_set_uint (value, rtx->num_rtx_packets);
+ GST_OBJECT_UNLOCK (rtx);
+ break;
+ case PROP_NUM_RTX_ASSOC_PACKETS:
+ GST_OBJECT_LOCK (rtx);
+ g_value_set_uint (value, rtx->num_rtx_assoc_packets);
+ GST_OBJECT_UNLOCK (rtx);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+structure_to_hash_table_inv (GQuark field_id, const GValue * value,
+ gpointer hash)
+{
+ const gchar *field_str;
+ guint field_uint;
+ guint value_uint;
+
+ field_str = g_quark_to_string (field_id);
+ field_uint = atoi (field_str);
+ value_uint = g_value_get_uint (value);
+ g_hash_table_insert ((GHashTable *) hash, GUINT_TO_POINTER (value_uint),
+ GUINT_TO_POINTER (field_uint));
+
+ return TRUE;
+}
+
+static void
+gst_rtp_rtx_receive_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstRtpRtxReceive *rtx = GST_RTP_RTX_RECEIVE (object);
+
+ switch (prop_id) {
+ case PROP_PAYLOAD_TYPE_MAP:
+ GST_OBJECT_LOCK (rtx);
+ if (rtx->rtx_pt_map_structure)
+ gst_structure_free (rtx->rtx_pt_map_structure);
+ rtx->rtx_pt_map_structure = g_value_dup_boxed (value);
+ g_hash_table_remove_all (rtx->rtx_pt_map);
+ gst_structure_foreach (rtx->rtx_pt_map_structure,
+ structure_to_hash_table_inv, rtx->rtx_pt_map);
+ GST_OBJECT_UNLOCK (rtx);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_rtx_receive_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstRtpRtxReceive *rtx;
+
+ rtx = GST_RTP_RTX_RECEIVE (element);
+
+ switch (transition) {
+ default:
+ break;
+ }
+
+ ret =
+ GST_ELEMENT_CLASS (gst_rtp_rtx_receive_parent_class)->change_state
+ (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_rtx_receive_reset (rtx);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
diff --git a/gst/rtpmanager/gstrtprtxreceive.h b/gst/rtpmanager/gstrtprtxreceive.h
new file mode 100644
index 0000000000..401278024d
--- /dev/null
+++ b/gst/rtpmanager/gstrtprtxreceive.h
@@ -0,0 +1,81 @@
+/* RTP Retransmission receiver element for GStreamer
+ *
+ * gstrtprtxreceive.h:
+ *
+ * Copyright (C) 2013 Collabora Ltd.
+ * @author Julien Isorce <julien.isorce@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_RTX_RECEIVE_H__
+#define __GST_RTP_RTX_RECEIVE_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_RTP_RTX_RECEIVE (gst_rtp_rtx_receive_get_type())
+#define GST_RTP_RTX_RECEIVE(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_RTX_RECEIVE, GstRtpRtxReceive))
+#define GST_RTP_RTX_RECEIVE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_RTX_RECEIVE, GstRtpRtxReceiveClass))
+#define GST_RTP_RTX_RECEIVE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_RTP_RTX_RECEIVE, GstRtpRtxReceiveClass))
+#define GST_IS_RTP_RTX_RECEIVE(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_RTX_RECEIVE))
+#define GST_IS_RTP_RTX_RECEIVE_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_RTX_RECEIVE))
+typedef struct _GstRtpRtxReceive GstRtpRtxReceive;
+typedef struct _GstRtpRtxReceiveClass GstRtpRtxReceiveClass;
+
+struct _GstRtpRtxReceive
+{
+ GstElement element;
+
+ /* pad */
+ GstPad *sinkpad;
+ GstPad *srcpad;
+
+ /* retrieve associated master stream from rtx stream
+ * it also works to retrieve rtx stream from master stream
+ * as we make sure all ssrc are unique */
+ GHashTable *ssrc2_ssrc1_map;
+
+ /* contains seqnum of request packets of whom their ssrc have
+ * not been associated to a rtx stream yet */
+ GHashTable *seqnum_ssrc1_map;
+
+ /* rtx pt (uint) -> origin pt (uint) */
+ GHashTable *rtx_pt_map;
+ /* origin pt (string) -> rtx pt (uint) */
+ GstStructure *rtx_pt_map_structure;
+
+ /* statistics */
+ guint num_rtx_requests;
+ guint num_rtx_packets;
+ guint num_rtx_assoc_packets;
+
+ GstClockTime last_time;
+};
+
+struct _GstRtpRtxReceiveClass
+{
+ GstElementClass parent_class;
+};
+
+
+GType gst_rtp_rtx_receive_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (rtprtxreceive);
+
+G_END_DECLS
+#endif /* __GST_RTP_RTX_RECEIVE_H__ */
diff --git a/gst/rtpmanager/gstrtprtxsend.c b/gst/rtpmanager/gstrtprtxsend.c
new file mode 100644
index 0000000000..e24bf8852b
--- /dev/null
+++ b/gst/rtpmanager/gstrtprtxsend.c
@@ -0,0 +1,1000 @@
+/* RTP Retransmission sender element for GStreamer
+ *
+ * gstrtprtxsend.c:
+ *
+ * Copyright (C) 2013 Collabora Ltd.
+ * @author Julien Isorce <julien.isorce@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtprtxsend
+ * @title: rtprtxsend
+ *
+ * See #GstRtpRtxReceive for examples
+ *
+ * The purpose of the sender RTX object is to keep a history of RTP packets up
+ * to a configurable limit (max-size-time or max-size-packets). It will listen
+ * for upstream custom retransmission events (GstRTPRetransmissionRequest) that
+ * comes from downstream (#GstRtpSession). When receiving a request it will
+ * look up the requested seqnum in its list of stored packets. If the packet
+ * is available, it will create a RTX packet according to RFC 4588 and send
+ * this as an auxiliary stream. RTX is SSRC-multiplexed
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <string.h>
+#include <stdlib.h>
+
+#include "gstrtprtxsend.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_rtx_send_debug);
+#define GST_CAT_DEFAULT gst_rtp_rtx_send_debug
+
+#define DEFAULT_RTX_PAYLOAD_TYPE 0
+#define DEFAULT_MAX_SIZE_TIME 0
+#define DEFAULT_MAX_SIZE_PACKETS 100
+
+enum
+{
+ PROP_0,
+ PROP_SSRC_MAP,
+ PROP_PAYLOAD_TYPE_MAP,
+ PROP_MAX_SIZE_TIME,
+ PROP_MAX_SIZE_PACKETS,
+ PROP_NUM_RTX_REQUESTS,
+ PROP_NUM_RTX_PACKETS,
+ PROP_CLOCK_RATE_MAP,
+};
+
+static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+static gboolean gst_rtp_rtx_send_queue_check_full (GstDataQueue * queue,
+ guint visible, guint bytes, guint64 time, gpointer checkdata);
+
+static gboolean gst_rtp_rtx_send_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static gboolean gst_rtp_rtx_send_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstFlowReturn gst_rtp_rtx_send_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+static GstFlowReturn gst_rtp_rtx_send_chain_list (GstPad * pad,
+ GstObject * parent, GstBufferList * list);
+
+static void gst_rtp_rtx_send_src_loop (GstRtpRtxSend * rtx);
+static gboolean gst_rtp_rtx_send_activate_mode (GstPad * pad,
+ GstObject * parent, GstPadMode mode, gboolean active);
+
+static GstStateChangeReturn gst_rtp_rtx_send_change_state (GstElement *
+ element, GstStateChange transition);
+
+static void gst_rtp_rtx_send_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtp_rtx_send_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_rtp_rtx_send_finalize (GObject * object);
+
+G_DEFINE_TYPE_WITH_CODE (GstRtpRtxSend, gst_rtp_rtx_send, GST_TYPE_ELEMENT,
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_rtx_send_debug, "rtprtxsend", 0,
+ "rtp retransmission sender"));
+GST_ELEMENT_REGISTER_DEFINE (rtprtxsend, "rtprtxsend", GST_RANK_NONE,
+ GST_TYPE_RTP_RTX_SEND);
+
+typedef struct
+{
+ guint16 seqnum;
+ guint32 timestamp;
+ GstBuffer *buffer;
+} BufferQueueItem;
+
+static void
+buffer_queue_item_free (BufferQueueItem * item)
+{
+ gst_buffer_unref (item->buffer);
+ g_slice_free (BufferQueueItem, item);
+}
+
+typedef struct
+{
+ guint32 rtx_ssrc;
+ guint16 seqnum_base, next_seqnum;
+ gint clock_rate;
+
+ /* history of rtp packets */
+ GSequence *queue;
+} SSRCRtxData;
+
+static SSRCRtxData *
+ssrc_rtx_data_new (guint32 rtx_ssrc)
+{
+ SSRCRtxData *data = g_slice_new0 (SSRCRtxData);
+
+ data->rtx_ssrc = rtx_ssrc;
+ data->next_seqnum = data->seqnum_base = g_random_int_range (0, G_MAXUINT16);
+ data->queue = g_sequence_new ((GDestroyNotify) buffer_queue_item_free);
+
+ return data;
+}
+
+static void
+ssrc_rtx_data_free (SSRCRtxData * data)
+{
+ g_sequence_free (data->queue);
+ g_slice_free (SSRCRtxData, data);
+}
+
+static void
+gst_rtp_rtx_send_class_init (GstRtpRtxSendClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->get_property = gst_rtp_rtx_send_get_property;
+ gobject_class->set_property = gst_rtp_rtx_send_set_property;
+ gobject_class->finalize = gst_rtp_rtx_send_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_SSRC_MAP,
+ g_param_spec_boxed ("ssrc-map", "SSRC Map",
+ "Map of SSRCs to their retransmission SSRCs for SSRC-multiplexed mode"
+ " (default = random)", GST_TYPE_STRUCTURE,
+ G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_PAYLOAD_TYPE_MAP,
+ g_param_spec_boxed ("payload-type-map", "Payload Type Map",
+ "Map of original payload types to their retransmission payload types",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_SIZE_TIME,
+ g_param_spec_uint ("max-size-time", "Max Size Time",
+ "Amount of ms to queue (0 = unlimited)", 0, G_MAXUINT,
+ DEFAULT_MAX_SIZE_TIME, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_SIZE_PACKETS,
+ g_param_spec_uint ("max-size-packets", "Max Size Packets",
+ "Amount of packets to queue (0 = unlimited)", 0, G_MAXINT16,
+ DEFAULT_MAX_SIZE_PACKETS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_NUM_RTX_REQUESTS,
+ g_param_spec_uint ("num-rtx-requests", "Num RTX Requests",
+ "Number of retransmission events received", 0, G_MAXUINT,
+ 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_NUM_RTX_PACKETS,
+ g_param_spec_uint ("num-rtx-packets", "Num RTX Packets",
+ " Number of retransmission packets sent", 0, G_MAXUINT,
+ 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_CLOCK_RATE_MAP,
+ g_param_spec_boxed ("clock-rate-map", "Clock Rate Map",
+ "Map of payload types to their clock rates",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class, &src_factory);
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_factory);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTP Retransmission Sender", "Codec",
+ "Retransmit RTP packets when needed, according to RFC4588",
+ "Julien Isorce <julien.isorce@collabora.co.uk>");
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_rtp_rtx_send_change_state);
+}
+
+static void
+gst_rtp_rtx_send_reset (GstRtpRtxSend * rtx)
+{
+ GST_OBJECT_LOCK (rtx);
+ gst_data_queue_flush (rtx->queue);
+ g_hash_table_remove_all (rtx->ssrc_data);
+ g_hash_table_remove_all (rtx->rtx_ssrcs);
+ rtx->num_rtx_requests = 0;
+ rtx->num_rtx_packets = 0;
+ GST_OBJECT_UNLOCK (rtx);
+}
+
+static void
+gst_rtp_rtx_send_finalize (GObject * object)
+{
+ GstRtpRtxSend *rtx = GST_RTP_RTX_SEND (object);
+
+ g_hash_table_unref (rtx->ssrc_data);
+ g_hash_table_unref (rtx->rtx_ssrcs);
+ if (rtx->external_ssrc_map)
+ gst_structure_free (rtx->external_ssrc_map);
+ g_hash_table_unref (rtx->rtx_pt_map);
+ if (rtx->rtx_pt_map_structure)
+ gst_structure_free (rtx->rtx_pt_map_structure);
+ g_hash_table_unref (rtx->clock_rate_map);
+ if (rtx->clock_rate_map_structure)
+ gst_structure_free (rtx->clock_rate_map_structure);
+ g_object_unref (rtx->queue);
+
+ G_OBJECT_CLASS (gst_rtp_rtx_send_parent_class)->finalize (object);
+}
+
+static void
+gst_rtp_rtx_send_init (GstRtpRtxSend * rtx)
+{
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (rtx);
+
+ rtx->srcpad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
+ "src"), "src");
+ GST_PAD_SET_PROXY_CAPS (rtx->srcpad);
+ GST_PAD_SET_PROXY_ALLOCATION (rtx->srcpad);
+ gst_pad_set_event_function (rtx->srcpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_rtx_send_src_event));
+ gst_pad_set_activatemode_function (rtx->srcpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_rtx_send_activate_mode));
+ gst_element_add_pad (GST_ELEMENT (rtx), rtx->srcpad);
+
+ rtx->sinkpad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
+ "sink"), "sink");
+ GST_PAD_SET_PROXY_CAPS (rtx->sinkpad);
+ GST_PAD_SET_PROXY_ALLOCATION (rtx->sinkpad);
+ gst_pad_set_event_function (rtx->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_rtx_send_sink_event));
+ gst_pad_set_chain_function (rtx->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_rtx_send_chain));
+ gst_pad_set_chain_list_function (rtx->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtp_rtx_send_chain_list));
+ gst_element_add_pad (GST_ELEMENT (rtx), rtx->sinkpad);
+
+ rtx->queue = gst_data_queue_new (gst_rtp_rtx_send_queue_check_full, NULL,
+ NULL, rtx);
+ rtx->ssrc_data = g_hash_table_new_full (g_direct_hash, g_direct_equal,
+ NULL, (GDestroyNotify) ssrc_rtx_data_free);
+ rtx->rtx_ssrcs = g_hash_table_new (g_direct_hash, g_direct_equal);
+ rtx->rtx_pt_map = g_hash_table_new (g_direct_hash, g_direct_equal);
+ rtx->clock_rate_map = g_hash_table_new (g_direct_hash, g_direct_equal);
+
+ rtx->max_size_time = DEFAULT_MAX_SIZE_TIME;
+ rtx->max_size_packets = DEFAULT_MAX_SIZE_PACKETS;
+}
+
+static void
+gst_rtp_rtx_send_set_flushing (GstRtpRtxSend * rtx, gboolean flush)
+{
+ GST_OBJECT_LOCK (rtx);
+ gst_data_queue_set_flushing (rtx->queue, flush);
+ gst_data_queue_flush (rtx->queue);
+ GST_OBJECT_UNLOCK (rtx);
+}
+
+static gboolean
+gst_rtp_rtx_send_queue_check_full (GstDataQueue * queue,
+ guint visible, guint bytes, guint64 time, gpointer checkdata)
+{
+ return FALSE;
+}
+
+static void
+gst_rtp_rtx_data_queue_item_free (gpointer item)
+{
+ GstDataQueueItem *data = item;
+ if (data->object)
+ gst_mini_object_unref (data->object);
+ g_slice_free (GstDataQueueItem, data);
+}
+
+static gboolean
+gst_rtp_rtx_send_push_out (GstRtpRtxSend * rtx, gpointer object)
+{
+ GstDataQueueItem *data;
+ gboolean success;
+
+ data = g_slice_new0 (GstDataQueueItem);
+ data->object = GST_MINI_OBJECT (object);
+ data->size = 1;
+ data->duration = 1;
+ data->visible = TRUE;
+ data->destroy = gst_rtp_rtx_data_queue_item_free;
+
+ success = gst_data_queue_push (rtx->queue, data);
+
+ if (!success)
+ data->destroy (data);
+
+ return success;
+}
+
+static guint32
+gst_rtp_rtx_send_choose_ssrc (GstRtpRtxSend * rtx, guint32 choice,
+ gboolean consider_choice)
+{
+ guint32 ssrc = consider_choice ? choice : g_random_int ();
+
+ /* make sure to be different than any other */
+ while (g_hash_table_contains (rtx->ssrc_data, GUINT_TO_POINTER (ssrc)) ||
+ g_hash_table_contains (rtx->rtx_ssrcs, GUINT_TO_POINTER (ssrc))) {
+ ssrc = g_random_int ();
+ }
+
+ return ssrc;
+}
+
+static SSRCRtxData *
+gst_rtp_rtx_send_get_ssrc_data (GstRtpRtxSend * rtx, guint32 ssrc)
+{
+ SSRCRtxData *data;
+ guint32 rtx_ssrc = 0;
+ gboolean consider = FALSE;
+
+ if (G_UNLIKELY (!g_hash_table_contains (rtx->ssrc_data,
+ GUINT_TO_POINTER (ssrc)))) {
+ if (rtx->external_ssrc_map) {
+ gchar *ssrc_str;
+ ssrc_str = g_strdup_printf ("%" G_GUINT32_FORMAT, ssrc);
+ consider = gst_structure_get_uint (rtx->external_ssrc_map, ssrc_str,
+ &rtx_ssrc);
+ g_free (ssrc_str);
+ }
+ rtx_ssrc = gst_rtp_rtx_send_choose_ssrc (rtx, rtx_ssrc, consider);
+ data = ssrc_rtx_data_new (rtx_ssrc);
+ g_hash_table_insert (rtx->ssrc_data, GUINT_TO_POINTER (ssrc), data);
+ g_hash_table_insert (rtx->rtx_ssrcs, GUINT_TO_POINTER (rtx_ssrc),
+ GUINT_TO_POINTER (ssrc));
+ } else {
+ data = g_hash_table_lookup (rtx->ssrc_data, GUINT_TO_POINTER (ssrc));
+ }
+ return data;
+}
+
+/* Copy fixed header and extension. Add OSN before to copy payload
+ * Copy memory to avoid to manually copy each rtp buffer field.
+ */
+static GstBuffer *
+gst_rtp_rtx_buffer_new (GstRtpRtxSend * rtx, GstBuffer * buffer)
+{
+ GstMemory *mem = NULL;
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+ GstRTPBuffer new_rtp = GST_RTP_BUFFER_INIT;
+ GstBuffer *new_buffer = gst_buffer_new ();
+ GstMapInfo map;
+ guint payload_len = 0;
+ SSRCRtxData *data;
+ guint32 ssrc;
+ guint16 seqnum;
+ guint8 fmtp;
+
+ gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp);
+
+ /* get needed data from GstRtpRtxSend */
+ ssrc = gst_rtp_buffer_get_ssrc (&rtp);
+ data = gst_rtp_rtx_send_get_ssrc_data (rtx, ssrc);
+ ssrc = data->rtx_ssrc;
+ seqnum = data->next_seqnum++;
+ fmtp = GPOINTER_TO_UINT (g_hash_table_lookup (rtx->rtx_pt_map,
+ GUINT_TO_POINTER (gst_rtp_buffer_get_payload_type (&rtp))));
+
+ GST_DEBUG_OBJECT (rtx, "creating rtx buffer, orig seqnum: %u, "
+ "rtx seqnum: %u, rtx ssrc: %X", gst_rtp_buffer_get_seq (&rtp),
+ seqnum, ssrc);
+
+ /* gst_rtp_buffer_map does not map the payload so do it now */
+ gst_rtp_buffer_get_payload (&rtp);
+
+ /* copy fixed header */
+ mem = gst_memory_copy (rtp.map[0].memory, 0, rtp.size[0]);
+ gst_buffer_append_memory (new_buffer, mem);
+
+ /* copy extension if any */
+ if (rtp.size[1]) {
+ mem = gst_allocator_alloc (NULL, rtp.size[1], NULL);
+ gst_memory_map (mem, &map, GST_MAP_WRITE);
+ memcpy (map.data, rtp.data[1], rtp.size[1]);
+ gst_memory_unmap (mem, &map);
+ gst_buffer_append_memory (new_buffer, mem);
+ }
+
+ /* copy payload and add OSN just before */
+ payload_len = 2 + rtp.size[2];
+ mem = gst_allocator_alloc (NULL, payload_len, NULL);
+
+ gst_memory_map (mem, &map, GST_MAP_WRITE);
+ GST_WRITE_UINT16_BE (map.data, gst_rtp_buffer_get_seq (&rtp));
+ if (rtp.size[2])
+ memcpy (map.data + 2, rtp.data[2], rtp.size[2]);
+ gst_memory_unmap (mem, &map);
+ gst_buffer_append_memory (new_buffer, mem);
+
+ /* everything needed is copied */
+ gst_rtp_buffer_unmap (&rtp);
+
+ /* set ssrc, seqnum and fmtp */
+ gst_rtp_buffer_map (new_buffer, GST_MAP_WRITE, &new_rtp);
+ gst_rtp_buffer_set_ssrc (&new_rtp, ssrc);
+ gst_rtp_buffer_set_seq (&new_rtp, seqnum);
+ gst_rtp_buffer_set_payload_type (&new_rtp, fmtp);
+ /* RFC 4588: let other elements do the padding, as normal */
+ gst_rtp_buffer_set_padding (&new_rtp, FALSE);
+ gst_rtp_buffer_unmap (&new_rtp);
+
+ /* Copy over timestamps */
+ gst_buffer_copy_into (new_buffer, buffer, GST_BUFFER_COPY_TIMESTAMPS, 0, -1);
+
+ return new_buffer;
+}
+
+static gint
+buffer_queue_items_cmp (BufferQueueItem * a, BufferQueueItem * b,
+ gpointer user_data)
+{
+ /* gst_rtp_buffer_compare_seqnum returns the opposite of what we want,
+ * it returns negative when seqnum1 > seqnum2 and we want negative
+ * when b > a, i.e. a is smaller, so it comes first in the sequence */
+ return gst_rtp_buffer_compare_seqnum (b->seqnum, a->seqnum);
+}
+
+static gboolean
+gst_rtp_rtx_send_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstRtpRtxSend *rtx = GST_RTP_RTX_SEND (parent);
+ gboolean res;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_UPSTREAM:
+ {
+ const GstStructure *s = gst_event_get_structure (event);
+
+ /* This event usually comes from the downstream gstrtpsession */
+ if (gst_structure_has_name (s, "GstRTPRetransmissionRequest")) {
+ guint seqnum = 0;
+ guint ssrc = 0;
+ GstBuffer *rtx_buf = NULL;
+
+ /* retrieve seqnum of the packet that need to be retransmitted */
+ if (!gst_structure_get_uint (s, "seqnum", &seqnum))
+ seqnum = -1;
+
+ /* retrieve ssrc of the packet that need to be retransmitted */
+ if (!gst_structure_get_uint (s, "ssrc", &ssrc))
+ ssrc = -1;
+
+ GST_DEBUG_OBJECT (rtx, "got rtx request for seqnum: %u, ssrc: %X",
+ seqnum, ssrc);
+
+ GST_OBJECT_LOCK (rtx);
+ /* check if request is for us */
+ if (g_hash_table_contains (rtx->ssrc_data, GUINT_TO_POINTER (ssrc))) {
+ SSRCRtxData *data;
+ GSequenceIter *iter;
+ BufferQueueItem search_item;
+
+ /* update statistics */
+ ++rtx->num_rtx_requests;
+
+ data = gst_rtp_rtx_send_get_ssrc_data (rtx, ssrc);
+
+ search_item.seqnum = seqnum;
+ iter = g_sequence_lookup (data->queue, &search_item,
+ (GCompareDataFunc) buffer_queue_items_cmp, NULL);
+ if (iter) {
+ BufferQueueItem *item = g_sequence_get (iter);
+ GST_LOG_OBJECT (rtx, "found %u", item->seqnum);
+ rtx_buf = gst_rtp_rtx_buffer_new (rtx, item->buffer);
+ }
+#ifndef GST_DISABLE_DEBUG
+ else {
+ BufferQueueItem *item = NULL;
+
+ iter = g_sequence_get_begin_iter (data->queue);
+ if (!g_sequence_iter_is_end (iter))
+ item = g_sequence_get (iter);
+
+ if (item && seqnum < item->seqnum) {
+ GST_DEBUG_OBJECT (rtx, "requested seqnum %u has already been "
+ "removed from the rtx queue; the first available is %u",
+ seqnum, item->seqnum);
+ } else {
+ GST_WARNING_OBJECT (rtx, "requested seqnum %u has not been "
+ "transmitted yet in the original stream; either the remote end "
+ "is not configured correctly, or the source is too slow",
+ seqnum);
+ }
+ }
+#endif
+ }
+ GST_OBJECT_UNLOCK (rtx);
+
+ if (rtx_buf)
+ gst_rtp_rtx_send_push_out (rtx, rtx_buf);
+
+ gst_event_unref (event);
+ res = TRUE;
+
+ /* This event usually comes from the downstream gstrtpsession */
+ } else if (gst_structure_has_name (s, "GstRTPCollision")) {
+ guint ssrc = 0;
+
+ if (!gst_structure_get_uint (s, "ssrc", &ssrc))
+ ssrc = -1;
+
+ GST_DEBUG_OBJECT (rtx, "got ssrc collision, ssrc: %X", ssrc);
+
+ GST_OBJECT_LOCK (rtx);
+
+ /* choose another ssrc for our retransmitted stream */
+ if (g_hash_table_contains (rtx->rtx_ssrcs, GUINT_TO_POINTER (ssrc))) {
+ guint master_ssrc;
+ SSRCRtxData *data;
+
+ master_ssrc = GPOINTER_TO_UINT (g_hash_table_lookup (rtx->rtx_ssrcs,
+ GUINT_TO_POINTER (ssrc)));
+ data = gst_rtp_rtx_send_get_ssrc_data (rtx, master_ssrc);
+
+ /* change rtx_ssrc and update the reverse map */
+ data->rtx_ssrc = gst_rtp_rtx_send_choose_ssrc (rtx, 0, FALSE);
+ g_hash_table_remove (rtx->rtx_ssrcs, GUINT_TO_POINTER (ssrc));
+ g_hash_table_insert (rtx->rtx_ssrcs,
+ GUINT_TO_POINTER (data->rtx_ssrc),
+ GUINT_TO_POINTER (master_ssrc));
+
+ GST_OBJECT_UNLOCK (rtx);
+
+ /* no need to forward to payloader because we make sure to have
+ * a different ssrc
+ */
+ gst_event_unref (event);
+ res = TRUE;
+ } else {
+ /* if master ssrc has collided, remove it from our data, as it
+ * is not going to be used any longer */
+ if (g_hash_table_contains (rtx->ssrc_data, GUINT_TO_POINTER (ssrc))) {
+ SSRCRtxData *data;
+ data = gst_rtp_rtx_send_get_ssrc_data (rtx, ssrc);
+ g_hash_table_remove (rtx->rtx_ssrcs,
+ GUINT_TO_POINTER (data->rtx_ssrc));
+ g_hash_table_remove (rtx->ssrc_data, GUINT_TO_POINTER (ssrc));
+ }
+
+ GST_OBJECT_UNLOCK (rtx);
+
+ /* forward event to payloader in case collided ssrc is
+ * master stream */
+ res = gst_pad_event_default (pad, parent, event);
+ }
+ } else {
+ res = gst_pad_event_default (pad, parent, event);
+ }
+ break;
+ }
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+ return res;
+}
+
+static gboolean
+gst_rtp_rtx_send_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstRtpRtxSend *rtx = GST_RTP_RTX_SEND (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_START:
+ gst_pad_push_event (rtx->srcpad, event);
+ gst_rtp_rtx_send_set_flushing (rtx, TRUE);
+ gst_pad_pause_task (rtx->srcpad);
+ return TRUE;
+ case GST_EVENT_FLUSH_STOP:
+ gst_pad_push_event (rtx->srcpad, event);
+ gst_rtp_rtx_send_set_flushing (rtx, FALSE);
+ gst_pad_start_task (rtx->srcpad,
+ (GstTaskFunction) gst_rtp_rtx_send_src_loop, rtx, NULL);
+ return TRUE;
+ case GST_EVENT_EOS:
+ GST_INFO_OBJECT (rtx, "Got EOS - enqueueing it");
+ gst_rtp_rtx_send_push_out (rtx, event);
+ return TRUE;
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+ GstStructure *s;
+ guint ssrc;
+ gint payload;
+ gpointer rtx_payload;
+ SSRCRtxData *data;
+
+ gst_event_parse_caps (event, &caps);
+
+ s = gst_caps_get_structure (caps, 0);
+ if (!gst_structure_get_uint (s, "ssrc", &ssrc))
+ ssrc = -1;
+ if (!gst_structure_get_int (s, "payload", &payload))
+ payload = -1;
+
+ if (payload == -1 || ssrc == G_MAXUINT)
+ break;
+
+ if (payload == -1)
+ GST_WARNING_OBJECT (rtx, "No payload in caps");
+
+ GST_OBJECT_LOCK (rtx);
+ data = gst_rtp_rtx_send_get_ssrc_data (rtx, ssrc);
+ if (!g_hash_table_lookup_extended (rtx->rtx_pt_map,
+ GUINT_TO_POINTER (payload), NULL, &rtx_payload))
+ rtx_payload = GINT_TO_POINTER (-1);
+
+ if (GPOINTER_TO_INT (rtx_payload) == -1 && payload != -1)
+ GST_WARNING_OBJECT (rtx, "Payload %d not in rtx-pt-map", payload);
+
+ GST_DEBUG_OBJECT (rtx,
+ "got caps for payload: %d->%d, ssrc: %u->%u : %" GST_PTR_FORMAT,
+ payload, GPOINTER_TO_INT (rtx_payload), ssrc, data->rtx_ssrc, caps);
+
+ gst_structure_get_int (s, "clock-rate", &data->clock_rate);
+
+ /* The session might need to know the RTX ssrc */
+ caps = gst_caps_copy (caps);
+ gst_caps_set_simple (caps, "rtx-ssrc", G_TYPE_UINT, data->rtx_ssrc,
+ "rtx-seqnum-offset", G_TYPE_UINT, data->seqnum_base, NULL);
+
+ if (GPOINTER_TO_INT (rtx_payload) != -1)
+ gst_caps_set_simple (caps, "rtx-payload", G_TYPE_INT,
+ GPOINTER_TO_INT (rtx_payload), NULL);
+
+ GST_DEBUG_OBJECT (rtx, "got clock-rate from caps: %d for ssrc: %u",
+ data->clock_rate, ssrc);
+ GST_OBJECT_UNLOCK (rtx);
+
+ gst_event_unref (event);
+ event = gst_event_new_caps (caps);
+ gst_caps_unref (caps);
+ break;
+ }
+ default:
+ break;
+ }
+ return gst_pad_event_default (pad, parent, event);
+}
+
+/* like rtp_jitter_buffer_get_ts_diff() */
+static guint32
+gst_rtp_rtx_send_get_ts_diff (SSRCRtxData * data)
+{
+ guint64 high_ts, low_ts;
+ BufferQueueItem *high_buf, *low_buf;
+ guint32 result;
+
+ high_buf =
+ g_sequence_get (g_sequence_iter_prev (g_sequence_get_end_iter
+ (data->queue)));
+ low_buf = g_sequence_get (g_sequence_get_begin_iter (data->queue));
+
+ if (!high_buf || !low_buf || high_buf == low_buf)
+ return 0;
+
+ high_ts = high_buf->timestamp;
+ low_ts = low_buf->timestamp;
+
+ /* it needs to work if ts wraps */
+ if (high_ts >= low_ts) {
+ result = (guint32) (high_ts - low_ts);
+ } else {
+ result = (guint32) (high_ts + G_MAXUINT32 + 1 - low_ts);
+ }
+
+ /* return value in ms instead of clock ticks */
+ return (guint32) gst_util_uint64_scale_int (result, 1000, data->clock_rate);
+}
+
+/* Must be called with lock */
+static void
+process_buffer (GstRtpRtxSend * rtx, GstBuffer * buffer)
+{
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+ BufferQueueItem *item;
+ SSRCRtxData *data;
+ guint16 seqnum;
+ guint8 payload_type;
+ guint32 ssrc, rtptime;
+
+ /* read the information we want from the buffer */
+ gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp);
+ seqnum = gst_rtp_buffer_get_seq (&rtp);
+ payload_type = gst_rtp_buffer_get_payload_type (&rtp);
+ ssrc = gst_rtp_buffer_get_ssrc (&rtp);
+ rtptime = gst_rtp_buffer_get_timestamp (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
+
+ GST_TRACE_OBJECT (rtx, "Processing buffer seqnum: %u, ssrc: %X", seqnum,
+ ssrc);
+
+ /* do not store the buffer if it's payload type is unknown */
+ if (g_hash_table_contains (rtx->rtx_pt_map, GUINT_TO_POINTER (payload_type))) {
+ data = gst_rtp_rtx_send_get_ssrc_data (rtx, ssrc);
+
+ if (data->clock_rate == 0 && rtx->clock_rate_map_structure) {
+ data->clock_rate =
+ GPOINTER_TO_INT (g_hash_table_lookup (rtx->clock_rate_map,
+ GUINT_TO_POINTER (payload_type)));
+ }
+
+ /* add current rtp buffer to queue history */
+ item = g_slice_new0 (BufferQueueItem);
+ item->seqnum = seqnum;
+ item->timestamp = rtptime;
+ item->buffer = gst_buffer_ref (buffer);
+ g_sequence_append (data->queue, item);
+
+ /* remove oldest packets from history if they are too many */
+ if (rtx->max_size_packets) {
+ while (g_sequence_get_length (data->queue) > rtx->max_size_packets)
+ g_sequence_remove (g_sequence_get_begin_iter (data->queue));
+ }
+ if (rtx->max_size_time) {
+ while (gst_rtp_rtx_send_get_ts_diff (data) > rtx->max_size_time)
+ g_sequence_remove (g_sequence_get_begin_iter (data->queue));
+ }
+ }
+}
+
+static GstFlowReturn
+gst_rtp_rtx_send_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+{
+ GstRtpRtxSend *rtx = GST_RTP_RTX_SEND (parent);
+ GstFlowReturn ret;
+
+ GST_OBJECT_LOCK (rtx);
+ process_buffer (rtx, buffer);
+ GST_OBJECT_UNLOCK (rtx);
+ ret = gst_pad_push (rtx->srcpad, buffer);
+
+ return ret;
+}
+
+static gboolean
+process_buffer_from_list (GstBuffer ** buffer, guint idx, gpointer user_data)
+{
+ process_buffer (user_data, *buffer);
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_rtp_rtx_send_chain_list (GstPad * pad, GstObject * parent,
+ GstBufferList * list)
+{
+ GstRtpRtxSend *rtx = GST_RTP_RTX_SEND (parent);
+ GstFlowReturn ret;
+
+ GST_OBJECT_LOCK (rtx);
+ gst_buffer_list_foreach (list, process_buffer_from_list, rtx);
+ GST_OBJECT_UNLOCK (rtx);
+
+ ret = gst_pad_push_list (rtx->srcpad, list);
+
+ return ret;
+}
+
+static void
+gst_rtp_rtx_send_src_loop (GstRtpRtxSend * rtx)
+{
+ GstDataQueueItem *data;
+
+ if (gst_data_queue_pop (rtx->queue, &data)) {
+ GST_LOG_OBJECT (rtx, "pushing rtx buffer %p", data->object);
+
+ if (G_LIKELY (GST_IS_BUFFER (data->object))) {
+ GST_OBJECT_LOCK (rtx);
+ /* Update statistics just before pushing. */
+ rtx->num_rtx_packets++;
+ GST_OBJECT_UNLOCK (rtx);
+
+ gst_pad_push (rtx->srcpad, GST_BUFFER (data->object));
+ } else if (GST_IS_EVENT (data->object)) {
+ gst_pad_push_event (rtx->srcpad, GST_EVENT (data->object));
+
+ /* after EOS, we should not send any more buffers,
+ * even if there are more requests coming in */
+ if (GST_EVENT_TYPE (data->object) == GST_EVENT_EOS) {
+ gst_rtp_rtx_send_set_flushing (rtx, TRUE);
+ }
+ } else {
+ g_assert_not_reached ();
+ }
+
+ data->object = NULL; /* we no longer own that object */
+ data->destroy (data);
+ } else {
+ GST_LOG_OBJECT (rtx, "flushing");
+ gst_pad_pause_task (rtx->srcpad);
+ }
+}
+
+static gboolean
+gst_rtp_rtx_send_activate_mode (GstPad * pad, GstObject * parent,
+ GstPadMode mode, gboolean active)
+{
+ GstRtpRtxSend *rtx = GST_RTP_RTX_SEND (parent);
+ gboolean ret = FALSE;
+
+ switch (mode) {
+ case GST_PAD_MODE_PUSH:
+ if (active) {
+ gst_rtp_rtx_send_set_flushing (rtx, FALSE);
+ ret = gst_pad_start_task (rtx->srcpad,
+ (GstTaskFunction) gst_rtp_rtx_send_src_loop, rtx, NULL);
+ } else {
+ gst_rtp_rtx_send_set_flushing (rtx, TRUE);
+ ret = gst_pad_stop_task (rtx->srcpad);
+ }
+ GST_INFO_OBJECT (rtx, "activate_mode: active %d, ret %d", active, ret);
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
+
+static void
+gst_rtp_rtx_send_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstRtpRtxSend *rtx = GST_RTP_RTX_SEND (object);
+
+ switch (prop_id) {
+ case PROP_PAYLOAD_TYPE_MAP:
+ GST_OBJECT_LOCK (rtx);
+ g_value_set_boxed (value, rtx->rtx_pt_map_structure);
+ GST_OBJECT_UNLOCK (rtx);
+ break;
+ case PROP_MAX_SIZE_TIME:
+ GST_OBJECT_LOCK (rtx);
+ g_value_set_uint (value, rtx->max_size_time);
+ GST_OBJECT_UNLOCK (rtx);
+ break;
+ case PROP_MAX_SIZE_PACKETS:
+ GST_OBJECT_LOCK (rtx);
+ g_value_set_uint (value, rtx->max_size_packets);
+ GST_OBJECT_UNLOCK (rtx);
+ break;
+ case PROP_NUM_RTX_REQUESTS:
+ GST_OBJECT_LOCK (rtx);
+ g_value_set_uint (value, rtx->num_rtx_requests);
+ GST_OBJECT_UNLOCK (rtx);
+ break;
+ case PROP_NUM_RTX_PACKETS:
+ GST_OBJECT_LOCK (rtx);
+ g_value_set_uint (value, rtx->num_rtx_packets);
+ GST_OBJECT_UNLOCK (rtx);
+ break;
+ case PROP_CLOCK_RATE_MAP:
+ GST_OBJECT_LOCK (rtx);
+ g_value_set_boxed (value, rtx->clock_rate_map_structure);
+ GST_OBJECT_UNLOCK (rtx);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+structure_to_hash_table (GQuark field_id, const GValue * value, gpointer hash)
+{
+ const gchar *field_str;
+ guint field_uint;
+ guint value_uint;
+
+ field_str = g_quark_to_string (field_id);
+ field_uint = atoi (field_str);
+ value_uint = g_value_get_uint (value);
+ g_hash_table_insert ((GHashTable *) hash, GUINT_TO_POINTER (field_uint),
+ GUINT_TO_POINTER (value_uint));
+
+ return TRUE;
+}
+
+static void
+gst_rtp_rtx_send_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstRtpRtxSend *rtx = GST_RTP_RTX_SEND (object);
+
+ switch (prop_id) {
+ case PROP_SSRC_MAP:
+ GST_OBJECT_LOCK (rtx);
+ if (rtx->external_ssrc_map)
+ gst_structure_free (rtx->external_ssrc_map);
+ rtx->external_ssrc_map = g_value_dup_boxed (value);
+ GST_OBJECT_UNLOCK (rtx);
+ break;
+ case PROP_PAYLOAD_TYPE_MAP:
+ GST_OBJECT_LOCK (rtx);
+ if (rtx->rtx_pt_map_structure)
+ gst_structure_free (rtx->rtx_pt_map_structure);
+ rtx->rtx_pt_map_structure = g_value_dup_boxed (value);
+ g_hash_table_remove_all (rtx->rtx_pt_map);
+ gst_structure_foreach (rtx->rtx_pt_map_structure, structure_to_hash_table,
+ rtx->rtx_pt_map);
+ GST_OBJECT_UNLOCK (rtx);
+ break;
+ case PROP_MAX_SIZE_TIME:
+ GST_OBJECT_LOCK (rtx);
+ rtx->max_size_time = g_value_get_uint (value);
+ GST_OBJECT_UNLOCK (rtx);
+ break;
+ case PROP_MAX_SIZE_PACKETS:
+ GST_OBJECT_LOCK (rtx);
+ rtx->max_size_packets = g_value_get_uint (value);
+ GST_OBJECT_UNLOCK (rtx);
+ break;
+ case PROP_CLOCK_RATE_MAP:
+ GST_OBJECT_LOCK (rtx);
+ if (rtx->clock_rate_map_structure)
+ gst_structure_free (rtx->clock_rate_map_structure);
+ rtx->clock_rate_map_structure = g_value_dup_boxed (value);
+ g_hash_table_remove_all (rtx->clock_rate_map);
+ gst_structure_foreach (rtx->clock_rate_map_structure,
+ structure_to_hash_table, rtx->clock_rate_map);
+ GST_OBJECT_UNLOCK (rtx);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstStateChangeReturn
+gst_rtp_rtx_send_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstRtpRtxSend *rtx;
+
+ rtx = GST_RTP_RTX_SEND (element);
+
+ switch (transition) {
+ default:
+ break;
+ }
+
+ ret =
+ GST_ELEMENT_CLASS (gst_rtp_rtx_send_parent_class)->change_state (element,
+ transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtp_rtx_send_reset (rtx);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
diff --git a/gst/rtpmanager/gstrtprtxsend.h b/gst/rtpmanager/gstrtprtxsend.h
new file mode 100644
index 0000000000..b70422f847
--- /dev/null
+++ b/gst/rtpmanager/gstrtprtxsend.h
@@ -0,0 +1,89 @@
+/* RTP Retransmission sender element for GStreamer
+ *
+ * gstrtprtxsend.h:
+ *
+ * Copyright (C) 2013 Collabora Ltd.
+ * @author Julien Isorce <julien.isorce@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_RTX_SEND_H__
+#define __GST_RTP_RTX_SEND_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/base/gstdataqueue.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_RTP_RTX_SEND (gst_rtp_rtx_send_get_type())
+#define GST_RTP_RTX_SEND(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_RTX_SEND, GstRtpRtxSend))
+#define GST_RTP_RTX_SEND_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_RTX_SEND, GstRtpRtxSendClass))
+#define GST_RTP_RTX_SEND_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_RTP_RTX_SEND, GstRtpRtxSendClass))
+#define GST_IS_RTP_RTX_SEND(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_RTX_SEND))
+#define GST_IS_RTP_RTX_SEND_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_RTX_SEND))
+typedef struct _GstRtpRtxSend GstRtpRtxSend;
+typedef struct _GstRtpRtxSendClass GstRtpRtxSendClass;
+
+struct _GstRtpRtxSend
+{
+ GstElement element;
+
+ /* pad */
+ GstPad *sinkpad;
+ GstPad *srcpad;
+
+ /* rtp packets that will be pushed out */
+ GstDataQueue *queue;
+
+ /* ssrc -> SSRCRtxData */
+ GHashTable *ssrc_data;
+ /* rtx ssrc -> master ssrc */
+ GHashTable *rtx_ssrcs;
+
+ /* master ssrc -> rtx ssrc (property) */
+ GstStructure *external_ssrc_map;
+
+ /* orig pt (uint) -> rtx pt (uint) */
+ GHashTable *rtx_pt_map;
+ /* orig pt (string) -> rtx pt (uint) */
+ GstStructure *rtx_pt_map_structure;
+
+ /* orig pt (uint) -> clock rate (uint) */
+ GHashTable *clock_rate_map;
+ /* orig pt (string) -> clock rate (uint) */
+ GstStructure *clock_rate_map_structure;
+
+ /* buffering control properties */
+ guint max_size_time;
+ guint max_size_packets;
+
+ /* statistics */
+ guint num_rtx_requests;
+ guint num_rtx_packets;
+};
+
+struct _GstRtpRtxSendClass
+{
+ GstElementClass parent_class;
+};
+
+GType gst_rtp_rtx_send_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (rtprtxsend);
+
+G_END_DECLS
+#endif /* __GST_RTP_RTX_SEND_H__ */
diff --git a/gst/rtpmanager/gstrtpsession.c b/gst/rtpmanager/gstrtpsession.c
new file mode 100644
index 0000000000..424e933f17
--- /dev/null
+++ b/gst/rtpmanager/gstrtpsession.c
@@ -0,0 +1,2902 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpsession
+ * @title: rtpsession
+ * @see_also: rtpjitterbuffer, rtpbin, rtpptdemux, rtpssrcdemux
+ *
+ * The RTP session manager models participants with unique SSRC in an RTP
+ * session. This session can be used to send and receive RTP and RTCP packets.
+ * Based on what REQUEST pads are requested from the session manager, specific
+ * functionality can be activated.
+ *
+ * The session manager currently implements RFC 3550 including:
+ *
+ * * RTP packet validation based on consecutive sequence numbers.
+ *
+ * * Maintenance of the SSRC participant database.
+ *
+ * * Keeping per participant statistics based on received RTCP packets.
+ *
+ * * Scheduling of RR/SR RTCP packets.
+ *
+ * * Support for multiple sender SSRC.
+ *
+ * The rtpsession will not demux packets based on SSRC or payload type, nor will
+ * it correct for packet reordering and jitter. Use #GstRtpSsrcDemux,
+ * #GstRtpPtDemux and GstRtpJitterBuffer in addition to #GstRtpSession to
+ * perform these tasks. It is usually a good idea to use #GstRtpBin, which
+ * combines all these features in one element.
+ *
+ * To use #GstRtpSession as an RTP receiver, request a recv_rtp_sink pad, which will
+ * automatically create recv_rtp_src pad. Data received on the recv_rtp_sink pad
+ * will be processed in the session and after being validated forwarded on the
+ * recv_rtp_src pad.
+ *
+ * To also use #GstRtpSession as an RTCP receiver, request a recv_rtcp_sink pad,
+ * which will automatically create a sync_src pad. Packets received on the RTCP
+ * pad will be used by the session manager to update the stats and database of
+ * the other participants. SR packets will be forwarded on the sync_src pad
+ * so that they can be used to perform inter-stream synchronisation when needed.
+ *
+ * If you want the session manager to generate and send RTCP packets, request
+ * the send_rtcp_src pad. Packet pushed on this pad contain SR/RR RTCP reports
+ * that should be sent to all participants in the session.
+ *
+ * To use #GstRtpSession as a sender, request a send_rtp_sink pad, which will
+ * automatically create a send_rtp_src pad. The session manager will
+ * forward the packets on the send_rtp_src pad after updating its internal state.
+ *
+ * The session manager needs the clock-rate of the payload types it is handling
+ * and will signal the #GstRtpSession::request-pt-map signal when it needs such a
+ * mapping. One can clear the cached values with the #GstRtpSession::clear-pt-map
+ * signal.
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 udpsrc port=5000 caps="application/x-rtp, ..." ! .recv_rtp_sink rtpsession .recv_rtp_src ! rtptheoradepay ! theoradec ! xvimagesink
+ * ]| Receive theora RTP packets from port 5000 and send them to the depayloader,
+ * decoder and display. Note that the application/x-rtp caps on udpsrc should be
+ * configured based on some negotiation process such as RTSP for this pipeline
+ * to work correctly.
+ * |[
+ * gst-launch-1.0 udpsrc port=5000 caps="application/x-rtp, ..." ! .recv_rtp_sink rtpsession name=session \
+ * .recv_rtp_src ! rtptheoradepay ! theoradec ! xvimagesink \
+ * udpsrc port=5001 caps="application/x-rtcp" ! session.recv_rtcp_sink
+ * ]| Receive theora RTP packets from port 5000 and send them to the depayloader,
+ * decoder and display. Receive RTCP packets from port 5001 and process them in
+ * the session manager.
+ * Note that the application/x-rtp caps on udpsrc should be
+ * configured based on some negotiation process such as RTSP for this pipeline
+ * to work correctly.
+ * |[
+ * gst-launch-1.0 videotestsrc ! theoraenc ! rtptheorapay ! .send_rtp_sink rtpsession .send_rtp_src ! udpsink port=5000
+ * ]| Send theora RTP packets through the session manager and out on UDP port
+ * 5000.
+ * |[
+ * gst-launch-1.0 videotestsrc ! theoraenc ! rtptheorapay ! .send_rtp_sink rtpsession name=session .send_rtp_src \
+ * ! udpsink port=5000 session.send_rtcp_src ! udpsink port=5001
+ * ]| Send theora RTP packets through the session manager and out on UDP port
+ * 5000. Send RTCP packets on port 5001. Note that this pipeline will not preroll
+ * correctly because the second udpsink will not preroll correctly (no RTCP
+ * packets are sent in the PAUSED state). Applications should manually set and
+ * keep (see gst_element_set_locked_state()) the RTCP udpsink to the PLAYING state.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include <gst/glib-compat-private.h>
+
+#include "gstrtpsession.h"
+#include "rtpsession.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_session_debug);
+#define GST_CAT_DEFAULT gst_rtp_session_debug
+
+#define GST_TYPE_RTP_NTP_TIME_SOURCE (gst_rtp_ntp_time_source_get_type ())
+GType
+gst_rtp_ntp_time_source_get_type (void)
+{
+ static GType type = 0;
+ static const GEnumValue values[] = {
+ {GST_RTP_NTP_TIME_SOURCE_NTP, "NTP time based on realtime clock", "ntp"},
+ {GST_RTP_NTP_TIME_SOURCE_UNIX, "UNIX time based on realtime clock", "unix"},
+ {GST_RTP_NTP_TIME_SOURCE_RUNNING_TIME,
+ "Running time based on pipeline clock",
+ "running-time"},
+ {GST_RTP_NTP_TIME_SOURCE_CLOCK_TIME, "Pipeline clock time", "clock-time"},
+ {0, NULL, NULL},
+ };
+
+ if (!type) {
+ type = g_enum_register_static ("GstRtpNtpTimeSource", values);
+ }
+ return type;
+}
+
+/* sink pads */
+static GstStaticPadTemplate rtpsession_recv_rtp_sink_template =
+GST_STATIC_PAD_TEMPLATE ("recv_rtp_sink",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+static GstStaticPadTemplate rtpsession_recv_rtcp_sink_template =
+GST_STATIC_PAD_TEMPLATE ("recv_rtcp_sink",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtcp")
+ );
+
+static GstStaticPadTemplate rtpsession_send_rtp_sink_template =
+GST_STATIC_PAD_TEMPLATE ("send_rtp_sink",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+/* src pads */
+static GstStaticPadTemplate rtpsession_recv_rtp_src_template =
+GST_STATIC_PAD_TEMPLATE ("recv_rtp_src",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+static GstStaticPadTemplate rtpsession_sync_src_template =
+GST_STATIC_PAD_TEMPLATE ("sync_src",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("application/x-rtcp")
+ );
+
+static GstStaticPadTemplate rtpsession_send_rtp_src_template =
+GST_STATIC_PAD_TEMPLATE ("send_rtp_src",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+static GstStaticPadTemplate rtpsession_send_rtcp_src_template =
+GST_STATIC_PAD_TEMPLATE ("send_rtcp_src",
+ GST_PAD_SRC,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtcp")
+ );
+
+/* signals and args */
+enum
+{
+ SIGNAL_REQUEST_PT_MAP,
+ SIGNAL_CLEAR_PT_MAP,
+
+ SIGNAL_ON_NEW_SSRC,
+ SIGNAL_ON_SSRC_COLLISION,
+ SIGNAL_ON_SSRC_VALIDATED,
+ SIGNAL_ON_SSRC_ACTIVE,
+ SIGNAL_ON_SSRC_SDES,
+ SIGNAL_ON_BYE_SSRC,
+ SIGNAL_ON_BYE_TIMEOUT,
+ SIGNAL_ON_TIMEOUT,
+ SIGNAL_ON_SENDER_TIMEOUT,
+ SIGNAL_ON_NEW_SENDER_SSRC,
+ SIGNAL_ON_SENDER_SSRC_ACTIVE,
+ LAST_SIGNAL
+};
+
+#define DEFAULT_BANDWIDTH 0
+#define DEFAULT_RTCP_FRACTION RTP_STATS_RTCP_FRACTION
+#define DEFAULT_RTCP_RR_BANDWIDTH -1
+#define DEFAULT_RTCP_RS_BANDWIDTH -1
+#define DEFAULT_SDES NULL
+#define DEFAULT_NUM_SOURCES 0
+#define DEFAULT_NUM_ACTIVE_SOURCES 0
+#define DEFAULT_USE_PIPELINE_CLOCK FALSE
+#define DEFAULT_RTCP_MIN_INTERVAL (RTP_STATS_MIN_INTERVAL * GST_SECOND)
+#define DEFAULT_PROBATION RTP_DEFAULT_PROBATION
+#define DEFAULT_MAX_DROPOUT_TIME 60000
+#define DEFAULT_MAX_MISORDER_TIME 2000
+#define DEFAULT_RTP_PROFILE GST_RTP_PROFILE_AVP
+#define DEFAULT_NTP_TIME_SOURCE GST_RTP_NTP_TIME_SOURCE_NTP
+#define DEFAULT_RTCP_SYNC_SEND_TIME TRUE
+
+enum
+{
+ PROP_0,
+ PROP_BANDWIDTH,
+ PROP_RTCP_FRACTION,
+ PROP_RTCP_RR_BANDWIDTH,
+ PROP_RTCP_RS_BANDWIDTH,
+ PROP_SDES,
+ PROP_NUM_SOURCES,
+ PROP_NUM_ACTIVE_SOURCES,
+ PROP_INTERNAL_SESSION,
+ PROP_USE_PIPELINE_CLOCK,
+ PROP_RTCP_MIN_INTERVAL,
+ PROP_PROBATION,
+ PROP_MAX_DROPOUT_TIME,
+ PROP_MAX_MISORDER_TIME,
+ PROP_STATS,
+ PROP_TWCC_STATS,
+ PROP_RTP_PROFILE,
+ PROP_NTP_TIME_SOURCE,
+ PROP_RTCP_SYNC_SEND_TIME
+};
+
+#define GST_RTP_SESSION_LOCK(sess) g_mutex_lock (&(sess)->priv->lock)
+#define GST_RTP_SESSION_UNLOCK(sess) g_mutex_unlock (&(sess)->priv->lock)
+
+#define GST_RTP_SESSION_WAIT(sess) g_cond_wait (&(sess)->priv->cond, &(sess)->priv->lock)
+#define GST_RTP_SESSION_SIGNAL(sess) g_cond_signal (&(sess)->priv->cond)
+
+struct _GstRtpSessionPrivate
+{
+ GMutex lock;
+ GCond cond;
+ GstClock *sysclock;
+
+ RTPSession *session;
+
+ /* thread for sending out RTCP */
+ GstClockID id;
+ gboolean stop_thread;
+ GThread *thread;
+ gboolean thread_stopped;
+ gboolean wait_send;
+
+ /* caps mapping */
+ GHashTable *ptmap;
+
+ GstClockTime send_latency;
+
+ gboolean use_pipeline_clock;
+ GstRtpNtpTimeSource ntp_time_source;
+ gboolean rtcp_sync_send_time;
+
+ guint recv_rtx_req_count;
+ guint sent_rtx_req_count;
+
+ GstStructure *last_twcc_stats;
+
+ /*
+ * This is the list of processed packets in the receive path when upstream
+ * pushed a buffer list.
+ */
+ GstBufferList *processed_list;
+};
+
+/* callbacks to handle actions from the session manager */
+static GstFlowReturn gst_rtp_session_process_rtp (RTPSession * sess,
+ RTPSource * src, GstBuffer * buffer, gpointer user_data);
+static GstFlowReturn gst_rtp_session_send_rtp (RTPSession * sess,
+ RTPSource * src, gpointer data, gpointer user_data);
+static GstFlowReturn gst_rtp_session_send_rtcp (RTPSession * sess,
+ RTPSource * src, GstBuffer * buffer, gboolean eos, gpointer user_data);
+static GstFlowReturn gst_rtp_session_sync_rtcp (RTPSession * sess,
+ GstBuffer * buffer, gpointer user_data);
+static gint gst_rtp_session_clock_rate (RTPSession * sess, guint8 payload,
+ gpointer user_data);
+static void gst_rtp_session_reconsider (RTPSession * sess, gpointer user_data);
+static void gst_rtp_session_request_key_unit (RTPSession * sess, guint32 ssrc,
+ gboolean all_headers, gpointer user_data);
+static GstClockTime gst_rtp_session_request_time (RTPSession * session,
+ gpointer user_data);
+static void gst_rtp_session_notify_nack (RTPSession * sess,
+ guint16 seqnum, guint16 blp, guint32 ssrc, gpointer user_data);
+static void gst_rtp_session_notify_twcc (RTPSession * sess,
+ GstStructure * twcc_packets, GstStructure * twcc_stats, gpointer user_data);
+static void gst_rtp_session_reconfigure (RTPSession * sess, gpointer user_data);
+static void gst_rtp_session_notify_early_rtcp (RTPSession * sess,
+ gpointer user_data);
+static GstFlowReturn gst_rtp_session_chain_recv_rtp (GstPad * pad,
+ GstObject * parent, GstBuffer * buffer);
+static GstFlowReturn gst_rtp_session_chain_recv_rtp_list (GstPad * pad,
+ GstObject * parent, GstBufferList * list);
+static GstFlowReturn gst_rtp_session_chain_recv_rtcp (GstPad * pad,
+ GstObject * parent, GstBuffer * buffer);
+static GstFlowReturn gst_rtp_session_chain_send_rtp (GstPad * pad,
+ GstObject * parent, GstBuffer * buffer);
+static GstFlowReturn gst_rtp_session_chain_send_rtp_list (GstPad * pad,
+ GstObject * parent, GstBufferList * list);
+
+static RTPSessionCallbacks callbacks = {
+ gst_rtp_session_process_rtp,
+ gst_rtp_session_send_rtp,
+ gst_rtp_session_sync_rtcp,
+ gst_rtp_session_send_rtcp,
+ gst_rtp_session_clock_rate,
+ gst_rtp_session_reconsider,
+ gst_rtp_session_request_key_unit,
+ gst_rtp_session_request_time,
+ gst_rtp_session_notify_nack,
+ gst_rtp_session_notify_twcc,
+ gst_rtp_session_reconfigure,
+ gst_rtp_session_notify_early_rtcp
+};
+
+/* GObject vmethods */
+static void gst_rtp_session_finalize (GObject * object);
+static void gst_rtp_session_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtp_session_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+/* GstElement vmethods */
+static GstStateChangeReturn gst_rtp_session_change_state (GstElement * element,
+ GstStateChange transition);
+static GstPad *gst_rtp_session_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
+static void gst_rtp_session_release_pad (GstElement * element, GstPad * pad);
+
+static gboolean gst_rtp_session_sink_setcaps (GstPad * pad,
+ GstRtpSession * rtpsession, GstCaps * caps);
+static gboolean gst_rtp_session_setcaps_send_rtp (GstPad * pad,
+ GstRtpSession * rtpsession, GstCaps * caps);
+
+static void gst_rtp_session_clear_pt_map (GstRtpSession * rtpsession);
+
+static GstStructure *gst_rtp_session_create_stats (GstRtpSession * rtpsession);
+
+static guint gst_rtp_session_signals[LAST_SIGNAL] = { 0 };
+
+static void
+on_new_ssrc (RTPSession * session, RTPSource * src, GstRtpSession * sess)
+{
+ g_signal_emit (sess, gst_rtp_session_signals[SIGNAL_ON_NEW_SSRC], 0,
+ src->ssrc);
+}
+
+static void
+on_ssrc_collision (RTPSession * session, RTPSource * src, GstRtpSession * sess)
+{
+ GstPad *send_rtp_sink;
+
+ g_signal_emit (sess, gst_rtp_session_signals[SIGNAL_ON_SSRC_COLLISION], 0,
+ src->ssrc);
+
+ GST_RTP_SESSION_LOCK (sess);
+ if ((send_rtp_sink = sess->send_rtp_sink))
+ gst_object_ref (send_rtp_sink);
+ GST_RTP_SESSION_UNLOCK (sess);
+
+ if (send_rtp_sink) {
+ GstStructure *structure;
+ GstEvent *event;
+ RTPSource *internal_src;
+ guint32 suggested_ssrc;
+
+ structure = gst_structure_new ("GstRTPCollision", "ssrc", G_TYPE_UINT,
+ (guint) src->ssrc, NULL);
+
+ /* if there is no source using the suggested ssrc, most probably because
+ * this ssrc has just collided, suggest upstream to use it */
+ suggested_ssrc = rtp_session_suggest_ssrc (session, NULL);
+ internal_src = rtp_session_get_source_by_ssrc (session, suggested_ssrc);
+ if (!internal_src)
+ gst_structure_set (structure, "suggested-ssrc", G_TYPE_UINT,
+ (guint) suggested_ssrc, NULL);
+ else
+ g_object_unref (internal_src);
+
+ event = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM, structure);
+ gst_pad_push_event (send_rtp_sink, event);
+ gst_object_unref (send_rtp_sink);
+ }
+}
+
+static void
+on_ssrc_validated (RTPSession * session, RTPSource * src, GstRtpSession * sess)
+{
+ g_signal_emit (sess, gst_rtp_session_signals[SIGNAL_ON_SSRC_VALIDATED], 0,
+ src->ssrc);
+}
+
+static void
+on_ssrc_active (RTPSession * session, RTPSource * src, GstRtpSession * sess)
+{
+ g_signal_emit (sess, gst_rtp_session_signals[SIGNAL_ON_SSRC_ACTIVE], 0,
+ src->ssrc);
+}
+
+static void
+on_ssrc_sdes (RTPSession * session, RTPSource * src, GstRtpSession * sess)
+{
+ GstStructure *s;
+ GstMessage *m;
+
+ /* convert the new SDES info into a message */
+ RTP_SESSION_LOCK (session);
+ g_object_get (src, "sdes", &s, NULL);
+ RTP_SESSION_UNLOCK (session);
+
+ m = gst_message_new_custom (GST_MESSAGE_ELEMENT, GST_OBJECT (sess), s);
+ gst_element_post_message (GST_ELEMENT_CAST (sess), m);
+
+ g_signal_emit (sess, gst_rtp_session_signals[SIGNAL_ON_SSRC_SDES], 0,
+ src->ssrc);
+}
+
+static void
+on_bye_ssrc (RTPSession * session, RTPSource * src, GstRtpSession * sess)
+{
+ g_signal_emit (sess, gst_rtp_session_signals[SIGNAL_ON_BYE_SSRC], 0,
+ src->ssrc);
+}
+
+static void
+on_bye_timeout (RTPSession * session, RTPSource * src, GstRtpSession * sess)
+{
+ g_signal_emit (sess, gst_rtp_session_signals[SIGNAL_ON_BYE_TIMEOUT], 0,
+ src->ssrc);
+}
+
+static void
+on_timeout (RTPSession * session, RTPSource * src, GstRtpSession * sess)
+{
+ g_signal_emit (sess, gst_rtp_session_signals[SIGNAL_ON_TIMEOUT], 0,
+ src->ssrc);
+}
+
+static void
+on_sender_timeout (RTPSession * session, RTPSource * src, GstRtpSession * sess)
+{
+ g_signal_emit (sess, gst_rtp_session_signals[SIGNAL_ON_SENDER_TIMEOUT], 0,
+ src->ssrc);
+}
+
+static void
+on_new_sender_ssrc (RTPSession * session, RTPSource * src, GstRtpSession * sess)
+{
+ g_signal_emit (sess, gst_rtp_session_signals[SIGNAL_ON_NEW_SENDER_SSRC], 0,
+ src->ssrc);
+}
+
+static void
+on_sender_ssrc_active (RTPSession * session, RTPSource * src,
+ GstRtpSession * sess)
+{
+ g_signal_emit (sess, gst_rtp_session_signals[SIGNAL_ON_SENDER_SSRC_ACTIVE], 0,
+ src->ssrc);
+}
+
+static void
+on_notify_stats (RTPSession * session, GParamSpec * spec,
+ GstRtpSession * rtpsession)
+{
+ g_object_notify (G_OBJECT (rtpsession), "stats");
+}
+
+#define gst_rtp_session_parent_class parent_class
+G_DEFINE_TYPE_WITH_PRIVATE (GstRtpSession, gst_rtp_session, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE (rtpsession, "rtpsession", GST_RANK_NONE,
+ GST_TYPE_RTP_SESSION);
+
+static void
+gst_rtp_session_class_init (GstRtpSessionClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->finalize = gst_rtp_session_finalize;
+ gobject_class->set_property = gst_rtp_session_set_property;
+ gobject_class->get_property = gst_rtp_session_get_property;
+
+ /**
+ * GstRtpSession::request-pt-map:
+ * @sess: the object which received the signal
+ * @pt: the pt
+ *
+ * Request the payload type as #GstCaps for @pt.
+ */
+ gst_rtp_session_signals[SIGNAL_REQUEST_PT_MAP] =
+ g_signal_new ("request-pt-map", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpSessionClass, request_pt_map),
+ NULL, NULL, NULL, GST_TYPE_CAPS, 1, G_TYPE_UINT);
+ /**
+ * GstRtpSession::clear-pt-map:
+ * @sess: the object which received the signal
+ *
+ * Clear the cached pt-maps requested with #GstRtpSession::request-pt-map.
+ */
+ gst_rtp_session_signals[SIGNAL_CLEAR_PT_MAP] =
+ g_signal_new ("clear-pt-map", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_STRUCT_OFFSET (GstRtpSessionClass, clear_pt_map),
+ NULL, NULL, NULL, G_TYPE_NONE, 0, G_TYPE_NONE);
+
+ /**
+ * GstRtpSession::on-new-ssrc:
+ * @sess: the object which received the signal
+ * @ssrc: the SSRC
+ *
+ * Notify of a new SSRC that entered @session.
+ */
+ gst_rtp_session_signals[SIGNAL_ON_NEW_SSRC] =
+ g_signal_new ("on-new-ssrc", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpSessionClass, on_new_ssrc),
+ NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_UINT);
+ /**
+ * GstRtpSession::on-ssrc_collision:
+ * @sess: the object which received the signal
+ * @ssrc: the SSRC
+ *
+ * Notify when we have an SSRC collision
+ */
+ gst_rtp_session_signals[SIGNAL_ON_SSRC_COLLISION] =
+ g_signal_new ("on-ssrc-collision", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpSessionClass,
+ on_ssrc_collision), NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_UINT);
+ /**
+ * GstRtpSession::on-ssrc_validated:
+ * @sess: the object which received the signal
+ * @ssrc: the SSRC
+ *
+ * Notify of a new SSRC that became validated.
+ */
+ gst_rtp_session_signals[SIGNAL_ON_SSRC_VALIDATED] =
+ g_signal_new ("on-ssrc-validated", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpSessionClass,
+ on_ssrc_validated), NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_UINT);
+ /**
+ * GstRtpSession::on-ssrc-active:
+ * @sess: the object which received the signal
+ * @ssrc: the SSRC
+ *
+ * Notify of a SSRC that is active, i.e., sending RTCP.
+ */
+ gst_rtp_session_signals[SIGNAL_ON_SSRC_ACTIVE] =
+ g_signal_new ("on-ssrc-active", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpSessionClass,
+ on_ssrc_active), NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_UINT);
+ /**
+ * GstRtpSession::on-ssrc-sdes:
+ * @session: the object which received the signal
+ * @src: the SSRC
+ *
+ * Notify that a new SDES was received for SSRC.
+ */
+ gst_rtp_session_signals[SIGNAL_ON_SSRC_SDES] =
+ g_signal_new ("on-ssrc-sdes", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpSessionClass, on_ssrc_sdes),
+ NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpSession::on-bye-ssrc:
+ * @sess: the object which received the signal
+ * @ssrc: the SSRC
+ *
+ * Notify of an SSRC that became inactive because of a BYE packet.
+ */
+ gst_rtp_session_signals[SIGNAL_ON_BYE_SSRC] =
+ g_signal_new ("on-bye-ssrc", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpSessionClass, on_bye_ssrc),
+ NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_UINT);
+ /**
+ * GstRtpSession::on-bye-timeout:
+ * @sess: the object which received the signal
+ * @ssrc: the SSRC
+ *
+ * Notify of an SSRC that has timed out because of BYE
+ */
+ gst_rtp_session_signals[SIGNAL_ON_BYE_TIMEOUT] =
+ g_signal_new ("on-bye-timeout", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpSessionClass, on_bye_timeout),
+ NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_UINT);
+ /**
+ * GstRtpSession::on-timeout:
+ * @sess: the object which received the signal
+ * @ssrc: the SSRC
+ *
+ * Notify of an SSRC that has timed out
+ */
+ gst_rtp_session_signals[SIGNAL_ON_TIMEOUT] =
+ g_signal_new ("on-timeout", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpSessionClass, on_timeout),
+ NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_UINT);
+ /**
+ * GstRtpSession::on-sender-timeout:
+ * @sess: the object which received the signal
+ * @ssrc: the SSRC
+ *
+ * Notify of a sender SSRC that has timed out and became a receiver
+ */
+ gst_rtp_session_signals[SIGNAL_ON_SENDER_TIMEOUT] =
+ g_signal_new ("on-sender-timeout", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpSessionClass,
+ on_sender_timeout), NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpSession::on-new-sender-ssrc:
+ * @sess: the object which received the signal
+ * @ssrc: the sender SSRC
+ *
+ * Notify of a new sender SSRC that entered @session.
+ *
+ * Since: 1.8
+ */
+ gst_rtp_session_signals[SIGNAL_ON_NEW_SENDER_SSRC] =
+ g_signal_new ("on-new-sender-ssrc", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpSessionClass, on_new_ssrc),
+ NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_UINT);
+
+ /**
+ * GstRtpSession::on-sender-ssrc-active:
+ * @sess: the object which received the signal
+ * @ssrc: the sender SSRC
+ *
+ * Notify of a sender SSRC that is active, i.e., sending RTCP.
+ *
+ * Since: 1.8
+ */
+ gst_rtp_session_signals[SIGNAL_ON_SENDER_SSRC_ACTIVE] =
+ g_signal_new ("on-sender-ssrc-active", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRtpSessionClass,
+ on_ssrc_active), NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_UINT);
+
+ g_object_class_install_property (gobject_class, PROP_BANDWIDTH,
+ g_param_spec_double ("bandwidth", "Bandwidth",
+ "The bandwidth of the session in bytes per second (0 for auto-discover)",
+ 0.0, G_MAXDOUBLE, DEFAULT_BANDWIDTH,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RTCP_FRACTION,
+ g_param_spec_double ("rtcp-fraction", "RTCP Fraction",
+ "The RTCP bandwidth of the session in bytes per second "
+ "(or as a real fraction of the RTP bandwidth if < 1.0)",
+ 0.0, G_MAXDOUBLE, DEFAULT_RTCP_FRACTION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RTCP_RR_BANDWIDTH,
+ g_param_spec_int ("rtcp-rr-bandwidth", "RTCP RR bandwidth",
+ "The RTCP bandwidth used for receivers in bytes per second (-1 = default)",
+ -1, G_MAXINT, DEFAULT_RTCP_RR_BANDWIDTH,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RTCP_RS_BANDWIDTH,
+ g_param_spec_int ("rtcp-rs-bandwidth", "RTCP RS bandwidth",
+ "The RTCP bandwidth used for senders in bytes per second (-1 = default)",
+ -1, G_MAXINT, DEFAULT_RTCP_RS_BANDWIDTH,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_SDES,
+ g_param_spec_boxed ("sdes", "SDES",
+ "The SDES items of this session",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS
+ | GST_PARAM_DOC_SHOW_DEFAULT));
+
+ g_object_class_install_property (gobject_class, PROP_NUM_SOURCES,
+ g_param_spec_uint ("num-sources", "Num Sources",
+ "The number of sources in the session", 0, G_MAXUINT,
+ DEFAULT_NUM_SOURCES, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_NUM_ACTIVE_SOURCES,
+ g_param_spec_uint ("num-active-sources", "Num Active Sources",
+ "The number of active sources in the session", 0, G_MAXUINT,
+ DEFAULT_NUM_ACTIVE_SOURCES,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_INTERNAL_SESSION,
+ g_param_spec_object ("internal-session", "Internal Session",
+ "The internal RTPSession object", RTP_TYPE_SESSION,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_USE_PIPELINE_CLOCK,
+ g_param_spec_boolean ("use-pipeline-clock", "Use pipeline clock",
+ "Use the pipeline running-time to set the NTP time in the RTCP SR messages "
+ "(DEPRECATED: Use ntp-time-source property)",
+ DEFAULT_USE_PIPELINE_CLOCK,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
+
+ g_object_class_install_property (gobject_class, PROP_RTCP_MIN_INTERVAL,
+ g_param_spec_uint64 ("rtcp-min-interval", "Minimum RTCP interval",
+ "Minimum interval between Regular RTCP packet (in ns)",
+ 0, G_MAXUINT64, DEFAULT_RTCP_MIN_INTERVAL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_PROBATION,
+ g_param_spec_uint ("probation", "Number of probations",
+ "Consecutive packet sequence numbers to accept the source",
+ 0, G_MAXUINT, DEFAULT_PROBATION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_DROPOUT_TIME,
+ g_param_spec_uint ("max-dropout-time", "Max dropout time",
+ "The maximum time (milliseconds) of missing packets tolerated.",
+ 0, G_MAXUINT, DEFAULT_MAX_DROPOUT_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_MISORDER_TIME,
+ g_param_spec_uint ("max-misorder-time", "Max misorder time",
+ "The maximum time (milliseconds) of misordered packets tolerated.",
+ 0, G_MAXUINT, DEFAULT_MAX_MISORDER_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpSession:stats:
+ *
+ * Various session statistics. This property returns a #GstStructure
+ * with name `application/x-rtp-session-stats` with the following fields:
+ *
+ * * "recv-rtx-req-count" G_TYPE_UINT The number of retransmission events
+ * received from downstream (in receiver mode) (Since 1.16)
+ * * "sent-rtx-req-count" G_TYPE_UINT The number of retransmission events
+ * sent downstream (in sender mode) (Since 1.16)
+ * * "rtx-count" G_TYPE_UINT DEPRECATED Since 1.16, same as
+ * "recv-rtx-req-count".
+ * * "rtx-drop-count" G_TYPE_UINT The number of retransmission events
+ * dropped (due to bandwidth constraints)
+ * * "sent-nack-count" G_TYPE_UINT Number of NACKs sent
+ * * "recv-nack-count" G_TYPE_UINT Number of NACKs received
+ * * "source-stats" G_TYPE_BOXED GValueArray of #RTPSource:stats for all
+ * RTP sources (Since 1.8)
+ *
+ * Since: 1.4
+ */
+ g_object_class_install_property (gobject_class, PROP_STATS,
+ g_param_spec_boxed ("stats", "Statistics",
+ "Various statistics", GST_TYPE_STRUCTURE,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpSession:twcc-stats:
+ *
+ * Various statistics derived from TWCC. This property returns a GstStructure
+ * with name RTPTWCCStats with the following fields:
+ *
+ * "bitrate-sent" G_TYPE_UINT The actual sent bitrate of TWCC packets
+ * "bitrate-recv" G_TYPE_UINT The estimated bitrate for the receiver.
+ * "packets-sent" G_TYPE_UINT Number of packets sent
+ * "packets-recv" G_TYPE_UINT Number of packets reported recevied
+ * "packet-loss-pct" G_TYPE_DOUBLE Packetloss percentage, based on
+ * packets reported as lost from the recevier.
+ * "avg-delta-of-delta", G_TYPE_INT64 In nanoseconds, a moving window
+ * average of the difference in inter-packet spacing between
+ * sender and receiver. A sudden increase in this number can indicate
+ * network congestion.
+ *
+ * Since: 1.18
+ */
+ g_object_class_install_property (gobject_class, PROP_TWCC_STATS,
+ g_param_spec_boxed ("twcc-stats", "TWCC Statistics",
+ "Various statistics from TWCC", GST_TYPE_STRUCTURE,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RTP_PROFILE,
+ g_param_spec_enum ("rtp-profile", "RTP Profile",
+ "RTP profile to use", GST_TYPE_RTP_PROFILE, DEFAULT_RTP_PROFILE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_NTP_TIME_SOURCE,
+ g_param_spec_enum ("ntp-time-source", "NTP Time Source",
+ "NTP time source for RTCP packets",
+ GST_TYPE_RTP_NTP_TIME_SOURCE, DEFAULT_NTP_TIME_SOURCE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RTCP_SYNC_SEND_TIME,
+ g_param_spec_boolean ("rtcp-sync-send-time", "RTCP Sync Send Time",
+ "Use send time or capture time for RTCP sync "
+ "(TRUE = send time, FALSE = capture time)",
+ DEFAULT_RTCP_SYNC_SEND_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_rtp_session_change_state);
+ gstelement_class->request_new_pad =
+ GST_DEBUG_FUNCPTR (gst_rtp_session_request_new_pad);
+ gstelement_class->release_pad =
+ GST_DEBUG_FUNCPTR (gst_rtp_session_release_pad);
+
+ klass->clear_pt_map = GST_DEBUG_FUNCPTR (gst_rtp_session_clear_pt_map);
+
+ /* sink pads */
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpsession_recv_rtp_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpsession_recv_rtcp_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpsession_send_rtp_sink_template);
+
+ /* src pads */
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpsession_recv_rtp_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpsession_sync_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpsession_send_rtp_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &rtpsession_send_rtcp_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "RTP Session",
+ "Filter/Network/RTP",
+ "Implement an RTP session", "Wim Taymans <wim.taymans@gmail.com>");
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_session_debug,
+ "rtpsession", 0, "RTP Session");
+
+ GST_DEBUG_REGISTER_FUNCPTR (gst_rtp_session_chain_recv_rtp);
+ GST_DEBUG_REGISTER_FUNCPTR (gst_rtp_session_chain_recv_rtp_list);
+ GST_DEBUG_REGISTER_FUNCPTR (gst_rtp_session_chain_recv_rtcp);
+ GST_DEBUG_REGISTER_FUNCPTR (gst_rtp_session_chain_send_rtp);
+ GST_DEBUG_REGISTER_FUNCPTR (gst_rtp_session_chain_send_rtp_list);
+
+ gst_type_mark_as_plugin_api (GST_TYPE_RTP_NTP_TIME_SOURCE, 0);
+ gst_type_mark_as_plugin_api (RTP_TYPE_SESSION, 0);
+ gst_type_mark_as_plugin_api (RTP_TYPE_SOURCE, 0);
+}
+
+static void
+gst_rtp_session_init (GstRtpSession * rtpsession)
+{
+ rtpsession->priv = gst_rtp_session_get_instance_private (rtpsession);
+ g_mutex_init (&rtpsession->priv->lock);
+ g_cond_init (&rtpsession->priv->cond);
+ rtpsession->priv->sysclock = gst_system_clock_obtain ();
+ rtpsession->priv->session = rtp_session_new ();
+ rtpsession->priv->use_pipeline_clock = DEFAULT_USE_PIPELINE_CLOCK;
+ rtpsession->priv->rtcp_sync_send_time = DEFAULT_RTCP_SYNC_SEND_TIME;
+
+ /* configure callbacks */
+ rtp_session_set_callbacks (rtpsession->priv->session, &callbacks, rtpsession);
+ /* configure signals */
+ g_signal_connect (rtpsession->priv->session, "on-new-ssrc",
+ (GCallback) on_new_ssrc, rtpsession);
+ g_signal_connect (rtpsession->priv->session, "on-ssrc-collision",
+ (GCallback) on_ssrc_collision, rtpsession);
+ g_signal_connect (rtpsession->priv->session, "on-ssrc-validated",
+ (GCallback) on_ssrc_validated, rtpsession);
+ g_signal_connect (rtpsession->priv->session, "on-ssrc-active",
+ (GCallback) on_ssrc_active, rtpsession);
+ g_signal_connect (rtpsession->priv->session, "on-ssrc-sdes",
+ (GCallback) on_ssrc_sdes, rtpsession);
+ g_signal_connect (rtpsession->priv->session, "on-bye-ssrc",
+ (GCallback) on_bye_ssrc, rtpsession);
+ g_signal_connect (rtpsession->priv->session, "on-bye-timeout",
+ (GCallback) on_bye_timeout, rtpsession);
+ g_signal_connect (rtpsession->priv->session, "on-timeout",
+ (GCallback) on_timeout, rtpsession);
+ g_signal_connect (rtpsession->priv->session, "on-sender-timeout",
+ (GCallback) on_sender_timeout, rtpsession);
+ g_signal_connect (rtpsession->priv->session, "on-new-sender-ssrc",
+ (GCallback) on_new_sender_ssrc, rtpsession);
+ g_signal_connect (rtpsession->priv->session, "on-sender-ssrc-active",
+ (GCallback) on_sender_ssrc_active, rtpsession);
+ g_signal_connect (rtpsession->priv->session, "notify::stats",
+ (GCallback) on_notify_stats, rtpsession);
+ rtpsession->priv->ptmap = g_hash_table_new_full (NULL, NULL, NULL,
+ (GDestroyNotify) gst_caps_unref);
+
+ rtpsession->recv_rtcp_segment_seqnum = GST_SEQNUM_INVALID;
+
+ gst_segment_init (&rtpsession->recv_rtp_seg, GST_FORMAT_UNDEFINED);
+ gst_segment_init (&rtpsession->send_rtp_seg, GST_FORMAT_UNDEFINED);
+
+ rtpsession->priv->thread_stopped = TRUE;
+
+ rtpsession->priv->recv_rtx_req_count = 0;
+ rtpsession->priv->sent_rtx_req_count = 0;
+
+ rtpsession->priv->ntp_time_source = DEFAULT_NTP_TIME_SOURCE;
+}
+
+static void
+gst_rtp_session_finalize (GObject * object)
+{
+ GstRtpSession *rtpsession;
+
+ rtpsession = GST_RTP_SESSION (object);
+
+ g_hash_table_destroy (rtpsession->priv->ptmap);
+ g_mutex_clear (&rtpsession->priv->lock);
+ g_cond_clear (&rtpsession->priv->cond);
+ g_object_unref (rtpsession->priv->sysclock);
+ g_object_unref (rtpsession->priv->session);
+ if (rtpsession->priv->last_twcc_stats)
+ gst_structure_free (rtpsession->priv->last_twcc_stats);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_rtp_session_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpSession *rtpsession;
+ GstRtpSessionPrivate *priv;
+
+ rtpsession = GST_RTP_SESSION (object);
+ priv = rtpsession->priv;
+
+ switch (prop_id) {
+ case PROP_BANDWIDTH:
+ g_object_set_property (G_OBJECT (priv->session), "bandwidth", value);
+ break;
+ case PROP_RTCP_FRACTION:
+ g_object_set_property (G_OBJECT (priv->session), "rtcp-fraction", value);
+ break;
+ case PROP_RTCP_RR_BANDWIDTH:
+ g_object_set_property (G_OBJECT (priv->session), "rtcp-rr-bandwidth",
+ value);
+ break;
+ case PROP_RTCP_RS_BANDWIDTH:
+ g_object_set_property (G_OBJECT (priv->session), "rtcp-rs-bandwidth",
+ value);
+ break;
+ case PROP_SDES:
+ rtp_session_set_sdes_struct (priv->session, g_value_get_boxed (value));
+ break;
+ case PROP_USE_PIPELINE_CLOCK:
+ priv->use_pipeline_clock = g_value_get_boolean (value);
+ break;
+ case PROP_RTCP_MIN_INTERVAL:
+ g_object_set_property (G_OBJECT (priv->session), "rtcp-min-interval",
+ value);
+ break;
+ case PROP_PROBATION:
+ g_object_set_property (G_OBJECT (priv->session), "probation", value);
+ break;
+ case PROP_MAX_DROPOUT_TIME:
+ g_object_set_property (G_OBJECT (priv->session), "max-dropout-time",
+ value);
+ break;
+ case PROP_MAX_MISORDER_TIME:
+ g_object_set_property (G_OBJECT (priv->session), "max-misorder-time",
+ value);
+ break;
+ case PROP_RTP_PROFILE:
+ g_object_set_property (G_OBJECT (priv->session), "rtp-profile", value);
+ break;
+ case PROP_NTP_TIME_SOURCE:
+ priv->ntp_time_source = g_value_get_enum (value);
+ break;
+ case PROP_RTCP_SYNC_SEND_TIME:
+ priv->rtcp_sync_send_time = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_session_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpSession *rtpsession;
+ GstRtpSessionPrivate *priv;
+
+ rtpsession = GST_RTP_SESSION (object);
+ priv = rtpsession->priv;
+
+ switch (prop_id) {
+ case PROP_BANDWIDTH:
+ g_object_get_property (G_OBJECT (priv->session), "bandwidth", value);
+ break;
+ case PROP_RTCP_FRACTION:
+ g_object_get_property (G_OBJECT (priv->session), "rtcp-fraction", value);
+ break;
+ case PROP_RTCP_RR_BANDWIDTH:
+ g_object_get_property (G_OBJECT (priv->session), "rtcp-rr-bandwidth",
+ value);
+ break;
+ case PROP_RTCP_RS_BANDWIDTH:
+ g_object_get_property (G_OBJECT (priv->session), "rtcp-rs-bandwidth",
+ value);
+ break;
+ case PROP_SDES:
+ g_value_take_boxed (value, rtp_session_get_sdes_struct (priv->session));
+ break;
+ case PROP_NUM_SOURCES:
+ g_value_set_uint (value, rtp_session_get_num_sources (priv->session));
+ break;
+ case PROP_NUM_ACTIVE_SOURCES:
+ g_value_set_uint (value,
+ rtp_session_get_num_active_sources (priv->session));
+ break;
+ case PROP_INTERNAL_SESSION:
+ g_value_set_object (value, priv->session);
+ break;
+ case PROP_USE_PIPELINE_CLOCK:
+ g_value_set_boolean (value, priv->use_pipeline_clock);
+ break;
+ case PROP_RTCP_MIN_INTERVAL:
+ g_object_get_property (G_OBJECT (priv->session), "rtcp-min-interval",
+ value);
+ break;
+ case PROP_PROBATION:
+ g_object_get_property (G_OBJECT (priv->session), "probation", value);
+ break;
+ case PROP_MAX_DROPOUT_TIME:
+ g_object_get_property (G_OBJECT (priv->session), "max-dropout-time",
+ value);
+ break;
+ case PROP_MAX_MISORDER_TIME:
+ g_object_get_property (G_OBJECT (priv->session), "max-misorder-time",
+ value);
+ break;
+ case PROP_STATS:
+ g_value_take_boxed (value, gst_rtp_session_create_stats (rtpsession));
+ break;
+ case PROP_TWCC_STATS:
+ GST_RTP_SESSION_LOCK (rtpsession);
+ g_value_set_boxed (value, priv->last_twcc_stats);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+ break;
+ case PROP_RTP_PROFILE:
+ g_object_get_property (G_OBJECT (priv->session), "rtp-profile", value);
+ break;
+ case PROP_NTP_TIME_SOURCE:
+ g_value_set_enum (value, priv->ntp_time_source);
+ break;
+ case PROP_RTCP_SYNC_SEND_TIME:
+ g_value_set_boolean (value, priv->rtcp_sync_send_time);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstStructure *
+gst_rtp_session_create_stats (GstRtpSession * rtpsession)
+{
+ GstStructure *s;
+
+ g_object_get (rtpsession->priv->session, "stats", &s, NULL);
+ gst_structure_set (s, "rtx-count", G_TYPE_UINT,
+ rtpsession->priv->recv_rtx_req_count, "recv-rtx-req-count", G_TYPE_UINT,
+ rtpsession->priv->recv_rtx_req_count, "sent-rtx-req-count", G_TYPE_UINT,
+ rtpsession->priv->sent_rtx_req_count, NULL);
+
+ return s;
+}
+
+static void
+get_current_times (GstRtpSession * rtpsession, GstClockTime * running_time,
+ guint64 * ntpnstime)
+{
+ guint64 ntpns = -1;
+ GstClock *clock;
+ GstClockTime base_time, rt, clock_time;
+
+ GST_OBJECT_LOCK (rtpsession);
+ if ((clock = GST_ELEMENT_CLOCK (rtpsession))) {
+ base_time = GST_ELEMENT_CAST (rtpsession)->base_time;
+ gst_object_ref (clock);
+ GST_OBJECT_UNLOCK (rtpsession);
+
+ /* get current clock time and convert to running time */
+ clock_time = gst_clock_get_time (clock);
+ rt = clock_time - base_time;
+
+ if (rtpsession->priv->use_pipeline_clock) {
+ ntpns = rt;
+ /* add constant to convert from 1970 based time to 1900 based time */
+ ntpns += (2208988800LL * GST_SECOND);
+ } else {
+ switch (rtpsession->priv->ntp_time_source) {
+ case GST_RTP_NTP_TIME_SOURCE_NTP:
+ case GST_RTP_NTP_TIME_SOURCE_UNIX:{
+ /* get current NTP time */
+ ntpns = g_get_real_time () * GST_USECOND;
+
+ /* add constant to convert from 1970 based time to 1900 based time */
+ if (rtpsession->priv->ntp_time_source == GST_RTP_NTP_TIME_SOURCE_NTP)
+ ntpns += (2208988800LL * GST_SECOND);
+ break;
+ }
+ case GST_RTP_NTP_TIME_SOURCE_RUNNING_TIME:
+ ntpns = rt;
+ break;
+ case GST_RTP_NTP_TIME_SOURCE_CLOCK_TIME:
+ ntpns = clock_time;
+ break;
+ default:
+ ntpns = -1;
+ g_assert_not_reached ();
+ break;
+ }
+ }
+
+ gst_object_unref (clock);
+ } else {
+ GST_OBJECT_UNLOCK (rtpsession);
+ rt = -1;
+ ntpns = -1;
+ }
+ if (running_time)
+ *running_time = rt;
+ if (ntpnstime)
+ *ntpnstime = ntpns;
+}
+
+/* must be called with GST_RTP_SESSION_LOCK */
+static void
+signal_waiting_rtcp_thread_unlocked (GstRtpSession * rtpsession)
+{
+ if (rtpsession->priv->wait_send) {
+ GST_LOG_OBJECT (rtpsession, "signal RTCP thread");
+ rtpsession->priv->wait_send = FALSE;
+ GST_RTP_SESSION_SIGNAL (rtpsession);
+ }
+}
+
+static void
+rtcp_thread (GstRtpSession * rtpsession)
+{
+ GstClockID id;
+ GstClockTime current_time;
+ GstClockTime next_timeout;
+ guint64 ntpnstime;
+ GstClockTime running_time;
+ RTPSession *session;
+ GstClock *sysclock;
+
+ GST_DEBUG_OBJECT (rtpsession, "entering RTCP thread");
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+
+ while (rtpsession->priv->wait_send) {
+ GST_LOG_OBJECT (rtpsession, "waiting for getting started");
+ GST_RTP_SESSION_WAIT (rtpsession);
+ GST_LOG_OBJECT (rtpsession, "signaled...");
+ }
+
+ sysclock = rtpsession->priv->sysclock;
+ current_time = gst_clock_get_time (sysclock);
+
+ session = rtpsession->priv->session;
+
+ GST_DEBUG_OBJECT (rtpsession, "starting at %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (current_time));
+ session->start_time = current_time;
+
+ while (!rtpsession->priv->stop_thread) {
+ GstClockReturn res;
+
+ /* get initial estimate */
+ next_timeout = rtp_session_next_timeout (session, current_time);
+
+ GST_DEBUG_OBJECT (rtpsession, "next check time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (next_timeout));
+
+ /* leave if no more timeouts, the session ended */
+ if (next_timeout == GST_CLOCK_TIME_NONE)
+ break;
+
+ id = rtpsession->priv->id =
+ gst_clock_new_single_shot_id (sysclock, next_timeout);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ res = gst_clock_id_wait (id, NULL);
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ gst_clock_id_unref (id);
+ rtpsession->priv->id = NULL;
+
+ if (rtpsession->priv->stop_thread)
+ break;
+
+ /* update current time */
+ current_time = gst_clock_get_time (sysclock);
+
+ /* get current NTP time */
+ get_current_times (rtpsession, &running_time, &ntpnstime);
+
+ /* we get unlocked because we need to perform reconsideration, don't perform
+ * the timeout but get a new reporting estimate. */
+ GST_DEBUG_OBJECT (rtpsession, "unlocked %d, current %" GST_TIME_FORMAT,
+ res, GST_TIME_ARGS (current_time));
+
+ /* perform actions, we ignore result. Release lock because it might push. */
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+ rtp_session_on_timeout (session, current_time, ntpnstime, running_time);
+ GST_RTP_SESSION_LOCK (rtpsession);
+ }
+ /* mark the thread as stopped now */
+ rtpsession->priv->thread_stopped = TRUE;
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ GST_DEBUG_OBJECT (rtpsession, "leaving RTCP thread");
+}
+
+static gboolean
+start_rtcp_thread (GstRtpSession * rtpsession)
+{
+ GError *error = NULL;
+ gboolean res;
+
+ GST_DEBUG_OBJECT (rtpsession, "starting RTCP thread");
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ rtpsession->priv->stop_thread = FALSE;
+ if (rtpsession->priv->thread_stopped) {
+ /* if the thread stopped, and we still have a handle to the thread, join it
+ * now. We can safely join with the lock held, the thread will not take it
+ * anymore. */
+ if (rtpsession->priv->thread)
+ g_thread_join (rtpsession->priv->thread);
+ /* only create a new thread if the old one was stopped. Otherwise we can
+ * just reuse the currently running one. */
+ rtpsession->priv->thread = g_thread_try_new ("rtpsession-rtcp",
+ (GThreadFunc) rtcp_thread, rtpsession, &error);
+ rtpsession->priv->thread_stopped = FALSE;
+ }
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ if (error != NULL) {
+ res = FALSE;
+ GST_DEBUG_OBJECT (rtpsession, "failed to start thread, %s", error->message);
+ g_error_free (error);
+ } else {
+ res = TRUE;
+ }
+ return res;
+}
+
+static void
+stop_rtcp_thread (GstRtpSession * rtpsession)
+{
+ GST_DEBUG_OBJECT (rtpsession, "stopping RTCP thread");
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ rtpsession->priv->stop_thread = TRUE;
+ signal_waiting_rtcp_thread_unlocked (rtpsession);
+ if (rtpsession->priv->id)
+ gst_clock_id_unschedule (rtpsession->priv->id);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+}
+
+static void
+join_rtcp_thread (GstRtpSession * rtpsession)
+{
+ GST_RTP_SESSION_LOCK (rtpsession);
+ /* don't try to join when we have no thread */
+ if (rtpsession->priv->thread != NULL) {
+ GST_DEBUG_OBJECT (rtpsession, "joining RTCP thread");
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ g_thread_join (rtpsession->priv->thread);
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ /* after the join, take the lock and clear the thread structure. The caller
+ * is supposed to not concurrently call start and join. */
+ rtpsession->priv->thread = NULL;
+ }
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+}
+
+static GstStateChangeReturn
+gst_rtp_session_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn res;
+ GstRtpSession *rtpsession;
+
+ rtpsession = GST_RTP_SESSION (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ GST_RTP_SESSION_LOCK (rtpsession);
+ rtpsession->priv->wait_send = TRUE;
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ /* no need to join yet, we might want to continue later. Also, the
+ * dataflow could block downstream so that a join could just block
+ * forever. */
+ stop_rtcp_thread (rtpsession);
+ break;
+ default:
+ break;
+ }
+
+ res = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ if (!start_rtcp_thread (rtpsession))
+ goto failed_thread;
+ break;
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ /* downstream is now releasing the dataflow and we can join. */
+ join_rtcp_thread (rtpsession);
+ rtp_session_reset (rtpsession->priv->session);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return res;
+
+ /* ERRORS */
+failed_thread:
+ {
+ return GST_STATE_CHANGE_FAILURE;
+ }
+}
+
+static gboolean
+return_true (gpointer key, gpointer value, gpointer user_data)
+{
+ return TRUE;
+}
+
+static void
+gst_rtp_session_clear_pt_map (GstRtpSession * rtpsession)
+{
+ GST_RTP_SESSION_LOCK (rtpsession);
+ g_hash_table_foreach_remove (rtpsession->priv->ptmap, return_true, NULL);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+}
+
+/* called when the session manager has an RTP packet ready to be pushed */
+static GstFlowReturn
+gst_rtp_session_process_rtp (RTPSession * sess, RTPSource * src,
+ GstBuffer * buffer, gpointer user_data)
+{
+ GstFlowReturn result;
+ GstRtpSession *rtpsession;
+ GstPad *rtp_src;
+
+ rtpsession = GST_RTP_SESSION (user_data);
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ if ((rtp_src = rtpsession->recv_rtp_src))
+ gst_object_ref (rtp_src);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ if (rtp_src) {
+ if (rtpsession->priv->processed_list) {
+ GST_LOG_OBJECT (rtpsession, "queueing received RTP packet");
+ gst_buffer_list_add (rtpsession->priv->processed_list, buffer);
+ result = GST_FLOW_OK;
+ } else {
+ GST_LOG_OBJECT (rtpsession, "pushing received RTP packet");
+ result = gst_pad_push (rtp_src, buffer);
+ }
+ gst_object_unref (rtp_src);
+ } else {
+ GST_DEBUG_OBJECT (rtpsession, "dropping received RTP packet");
+ gst_buffer_unref (buffer);
+ result = GST_FLOW_OK;
+ }
+ return result;
+}
+
+/* called when the session manager has an RTP packet ready for further
+ * sending */
+static GstFlowReturn
+gst_rtp_session_send_rtp (RTPSession * sess, RTPSource * src,
+ gpointer data, gpointer user_data)
+{
+ GstFlowReturn result;
+ GstRtpSession *rtpsession;
+ GstPad *rtp_src;
+
+ rtpsession = GST_RTP_SESSION (user_data);
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ if ((rtp_src = rtpsession->send_rtp_src))
+ gst_object_ref (rtp_src);
+ signal_waiting_rtcp_thread_unlocked (rtpsession);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ if (rtp_src) {
+ if (GST_IS_BUFFER (data)) {
+ GST_LOG_OBJECT (rtpsession, "sending RTP packet");
+ result = gst_pad_push (rtp_src, GST_BUFFER_CAST (data));
+ } else {
+ GST_LOG_OBJECT (rtpsession, "sending RTP list");
+ result = gst_pad_push_list (rtp_src, GST_BUFFER_LIST_CAST (data));
+ }
+ gst_object_unref (rtp_src);
+ } else {
+ gst_mini_object_unref (GST_MINI_OBJECT_CAST (data));
+ result = GST_FLOW_OK;
+ }
+ return result;
+}
+
+static void
+do_rtcp_events (GstRtpSession * rtpsession, GstPad * srcpad)
+{
+ GstCaps *caps;
+ GstSegment seg;
+ GstEvent *event;
+ gchar *stream_id;
+ gboolean have_group_id;
+ guint group_id;
+
+ stream_id =
+ g_strdup_printf ("%08x%08x%08x%08x", g_random_int (), g_random_int (),
+ g_random_int (), g_random_int ());
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ if (rtpsession->recv_rtp_sink) {
+ event =
+ gst_pad_get_sticky_event (rtpsession->recv_rtp_sink,
+ GST_EVENT_STREAM_START, 0);
+ if (event) {
+ if (gst_event_parse_group_id (event, &group_id))
+ have_group_id = TRUE;
+ else
+ have_group_id = FALSE;
+ gst_event_unref (event);
+ } else {
+ have_group_id = TRUE;
+ group_id = gst_util_group_id_next ();
+ }
+ } else {
+ have_group_id = TRUE;
+ group_id = gst_util_group_id_next ();
+ }
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ event = gst_event_new_stream_start (stream_id);
+ rtpsession->recv_rtcp_segment_seqnum = gst_event_get_seqnum (event);
+ gst_event_set_seqnum (event, rtpsession->recv_rtcp_segment_seqnum);
+ if (have_group_id)
+ gst_event_set_group_id (event, group_id);
+ gst_pad_push_event (srcpad, event);
+ g_free (stream_id);
+
+ caps = gst_caps_new_empty_simple ("application/x-rtcp");
+ gst_pad_set_caps (srcpad, caps);
+ gst_caps_unref (caps);
+
+ gst_segment_init (&seg, GST_FORMAT_TIME);
+ event = gst_event_new_segment (&seg);
+ gst_event_set_seqnum (event, rtpsession->recv_rtcp_segment_seqnum);
+ gst_pad_push_event (srcpad, event);
+}
+
+/* called when the session manager has an RTCP packet ready for further
+ * sending. The eos flag is set when an EOS event should be sent downstream as
+ * well. */
+static GstFlowReturn
+gst_rtp_session_send_rtcp (RTPSession * sess, RTPSource * src,
+ GstBuffer * buffer, gboolean all_sources_bye, gpointer user_data)
+{
+ GstFlowReturn result;
+ GstRtpSession *rtpsession;
+ GstPad *rtcp_src;
+
+ rtpsession = GST_RTP_SESSION (user_data);
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ if (rtpsession->priv->stop_thread)
+ goto stopping;
+
+ if ((rtcp_src = rtpsession->send_rtcp_src)) {
+ gst_object_ref (rtcp_src);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ /* set rtcp caps on output pad */
+ if (!gst_pad_has_current_caps (rtcp_src))
+ do_rtcp_events (rtpsession, rtcp_src);
+
+ GST_LOG_OBJECT (rtpsession, "sending RTCP");
+ result = gst_pad_push (rtcp_src, buffer);
+
+ /* Forward send an EOS on the RTCP sink if we received an EOS on the
+ * send_rtp_sink. We don't need to check the recv_rtp_sink since in this
+ * case the EOS event would already have been sent */
+ if (all_sources_bye && rtpsession->send_rtp_sink &&
+ GST_PAD_IS_EOS (rtpsession->send_rtp_sink)) {
+ GstEvent *event;
+
+ GST_LOG_OBJECT (rtpsession, "sending EOS");
+
+ event = gst_event_new_eos ();
+ gst_event_set_seqnum (event, rtpsession->recv_rtcp_segment_seqnum);
+ gst_pad_push_event (rtcp_src, event);
+ }
+ gst_object_unref (rtcp_src);
+ } else {
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ GST_DEBUG_OBJECT (rtpsession, "not sending RTCP, no output pad");
+ gst_buffer_unref (buffer);
+ result = GST_FLOW_OK;
+ }
+ return result;
+
+ /* ERRORS */
+stopping:
+ {
+ GST_DEBUG_OBJECT (rtpsession, "we are stopping");
+ gst_buffer_unref (buffer);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+ return GST_FLOW_OK;
+ }
+}
+
+/* called when the session manager has an SR RTCP packet ready for handling
+ * inter stream synchronisation */
+static GstFlowReturn
+gst_rtp_session_sync_rtcp (RTPSession * sess,
+ GstBuffer * buffer, gpointer user_data)
+{
+ GstFlowReturn result;
+ GstRtpSession *rtpsession;
+ GstPad *sync_src;
+
+ rtpsession = GST_RTP_SESSION (user_data);
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ if (rtpsession->priv->stop_thread)
+ goto stopping;
+
+ if ((sync_src = rtpsession->sync_src)) {
+ gst_object_ref (sync_src);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ /* set rtcp caps on output pad, this happens
+ * when we receive RTCP muxed with RTP according
+ * to RFC5761. Otherwise we would have forwarded
+ * the events from the recv_rtcp_sink pad already
+ */
+ if (!gst_pad_has_current_caps (sync_src))
+ do_rtcp_events (rtpsession, sync_src);
+
+ GST_LOG_OBJECT (rtpsession, "sending Sync RTCP");
+ result = gst_pad_push (sync_src, buffer);
+ gst_object_unref (sync_src);
+ } else {
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ GST_DEBUG_OBJECT (rtpsession, "not sending Sync RTCP, no output pad");
+ gst_buffer_unref (buffer);
+ result = GST_FLOW_OK;
+ }
+ return result;
+
+ /* ERRORS */
+stopping:
+ {
+ GST_DEBUG_OBJECT (rtpsession, "we are stopping");
+ gst_buffer_unref (buffer);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+ return GST_FLOW_OK;
+ }
+}
+
+static void
+gst_rtp_session_cache_caps (GstRtpSession * rtpsession, GstCaps * caps)
+{
+ GstRtpSessionPrivate *priv;
+ const GstStructure *s;
+ gint payload;
+
+ priv = rtpsession->priv;
+
+ GST_DEBUG_OBJECT (rtpsession, "parsing caps");
+
+ s = gst_caps_get_structure (caps, 0);
+
+ if (!gst_structure_get_int (s, "payload", &payload))
+ return;
+
+ if (g_hash_table_lookup (priv->ptmap, GINT_TO_POINTER (payload)))
+ return;
+
+ rtp_session_update_recv_caps_structure (rtpsession->priv->session, s);
+
+ g_hash_table_insert (priv->ptmap, GINT_TO_POINTER (payload),
+ gst_caps_ref (caps));
+}
+
+static GstCaps *
+gst_rtp_session_get_caps_for_pt (GstRtpSession * rtpsession, guint payload)
+{
+ GstCaps *caps = NULL;
+ GValue args[2] = { {0}, {0} };
+ GValue ret = { 0 };
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ caps = g_hash_table_lookup (rtpsession->priv->ptmap,
+ GINT_TO_POINTER (payload));
+ if (caps) {
+ gst_caps_ref (caps);
+ goto done;
+ }
+
+ /* not found in the cache, try to get it with a signal */
+ g_value_init (&args[0], GST_TYPE_ELEMENT);
+ g_value_set_object (&args[0], rtpsession);
+ g_value_init (&args[1], G_TYPE_UINT);
+ g_value_set_uint (&args[1], payload);
+
+ g_value_init (&ret, GST_TYPE_CAPS);
+ g_value_set_boxed (&ret, NULL);
+
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ g_signal_emitv (args, gst_rtp_session_signals[SIGNAL_REQUEST_PT_MAP], 0,
+ &ret);
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+
+ g_value_unset (&args[0]);
+ g_value_unset (&args[1]);
+ caps = (GstCaps *) g_value_dup_boxed (&ret);
+ g_value_unset (&ret);
+ if (!caps)
+ goto no_caps;
+
+ gst_rtp_session_cache_caps (rtpsession, caps);
+
+done:
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ return caps;
+
+no_caps:
+ {
+ GST_DEBUG_OBJECT (rtpsession, "could not get caps");
+ goto done;
+ }
+}
+
+/* called when the session manager needs the clock rate */
+static gint
+gst_rtp_session_clock_rate (RTPSession * sess, guint8 payload,
+ gpointer user_data)
+{
+ gint result = -1;
+ GstRtpSession *rtpsession;
+ GstCaps *caps;
+ const GstStructure *s;
+
+ rtpsession = GST_RTP_SESSION_CAST (user_data);
+
+ caps = gst_rtp_session_get_caps_for_pt (rtpsession, payload);
+
+ if (!caps)
+ goto done;
+
+ s = gst_caps_get_structure (caps, 0);
+ if (!gst_structure_get_int (s, "clock-rate", &result))
+ goto no_clock_rate;
+
+ gst_caps_unref (caps);
+
+ GST_DEBUG_OBJECT (rtpsession, "parsed clock-rate %d", result);
+
+done:
+
+ return result;
+
+ /* ERRORS */
+no_clock_rate:
+ {
+ gst_caps_unref (caps);
+ GST_DEBUG_OBJECT (rtpsession, "No clock-rate in caps!");
+ goto done;
+ }
+}
+
+/* called when the session manager asks us to reconsider the timeout */
+static void
+gst_rtp_session_reconsider (RTPSession * sess, gpointer user_data)
+{
+ GstRtpSession *rtpsession;
+
+ rtpsession = GST_RTP_SESSION_CAST (user_data);
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ GST_DEBUG_OBJECT (rtpsession, "unlock timer for reconsideration");
+ if (rtpsession->priv->id)
+ gst_clock_id_unschedule (rtpsession->priv->id);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+}
+
+static gboolean
+gst_rtp_session_event_recv_rtp_sink (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstRtpSession *rtpsession;
+ gboolean ret = FALSE;
+
+ rtpsession = GST_RTP_SESSION (parent);
+
+ GST_DEBUG_OBJECT (rtpsession, "received event %s",
+ GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ /* process */
+ gst_event_parse_caps (event, &caps);
+ gst_rtp_session_sink_setcaps (pad, rtpsession, caps);
+ ret = gst_pad_push_event (rtpsession->recv_rtp_src, event);
+ break;
+ }
+ case GST_EVENT_FLUSH_STOP:
+ gst_segment_init (&rtpsession->recv_rtp_seg, GST_FORMAT_UNDEFINED);
+ rtpsession->recv_rtcp_segment_seqnum = GST_SEQNUM_INVALID;
+ ret = gst_pad_push_event (rtpsession->recv_rtp_src, event);
+ break;
+ case GST_EVENT_SEGMENT:
+ {
+ GstSegment *segment, in_segment;
+
+ segment = &rtpsession->recv_rtp_seg;
+
+ /* the newsegment event is needed to convert the RTP timestamp to
+ * running_time, which is needed to generate a mapping from RTP to NTP
+ * timestamps in SR reports */
+ gst_event_copy_segment (event, &in_segment);
+ GST_DEBUG_OBJECT (rtpsession, "received segment %" GST_SEGMENT_FORMAT,
+ &in_segment);
+
+ /* accept upstream */
+ gst_segment_copy_into (&in_segment, segment);
+
+ /* push event forward */
+ ret = gst_pad_push_event (rtpsession->recv_rtp_src, event);
+ break;
+ }
+ case GST_EVENT_EOS:
+ {
+ GstPad *rtcp_src;
+
+ ret =
+ gst_pad_push_event (rtpsession->recv_rtp_src, gst_event_ref (event));
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ if ((rtcp_src = rtpsession->send_rtcp_src))
+ gst_object_ref (rtcp_src);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ gst_event_unref (event);
+
+ if (rtcp_src) {
+ event = gst_event_new_eos ();
+ if (rtpsession->recv_rtcp_segment_seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, rtpsession->recv_rtcp_segment_seqnum);
+ ret = gst_pad_push_event (rtcp_src, event);
+ gst_object_unref (rtcp_src);
+ } else {
+ ret = TRUE;
+ }
+ break;
+ }
+ default:
+ ret = gst_pad_push_event (rtpsession->recv_rtp_src, event);
+ break;
+ }
+
+ return ret;
+
+}
+
+static gboolean
+gst_rtp_session_request_remote_key_unit (GstRtpSession * rtpsession,
+ guint32 ssrc, guint payload, gboolean all_headers, gint count)
+{
+ GstCaps *caps;
+
+ caps = gst_rtp_session_get_caps_for_pt (rtpsession, payload);
+
+ if (caps) {
+ const GstStructure *s = gst_caps_get_structure (caps, 0);
+ gboolean pli;
+ gboolean fir;
+
+ pli = gst_structure_has_field (s, "rtcp-fb-nack-pli");
+ fir = gst_structure_has_field (s, "rtcp-fb-ccm-fir") && all_headers;
+
+ /* Google Talk uses FIR for repair, so send it even if we just want a
+ * regular PLI */
+ if (!pli &&
+ gst_structure_has_field (s, "rtcp-fb-x-gstreamer-fir-as-repair"))
+ fir = TRUE;
+
+ gst_caps_unref (caps);
+
+ if (pli || fir)
+ return rtp_session_request_key_unit (rtpsession->priv->session, ssrc,
+ fir, count);
+ }
+
+ return FALSE;
+}
+
+static gboolean
+gst_rtp_session_event_recv_rtp_src (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstRtpSession *rtpsession;
+ gboolean forward = TRUE;
+ gboolean ret = TRUE;
+ const GstStructure *s;
+ guint32 ssrc;
+ guint pt;
+
+ rtpsession = GST_RTP_SESSION (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_UPSTREAM:
+ s = gst_event_get_structure (event);
+ if (gst_structure_has_name (s, "GstForceKeyUnit") &&
+ gst_structure_get_uint (s, "ssrc", &ssrc) &&
+ gst_structure_get_uint (s, "payload", &pt)) {
+ gboolean all_headers = FALSE;
+ gint count = -1;
+
+ gst_structure_get_boolean (s, "all-headers", &all_headers);
+ if (gst_structure_get_int (s, "count", &count) && count < 0)
+ count += G_MAXINT; /* Make sure count is positive if present */
+ if (gst_rtp_session_request_remote_key_unit (rtpsession, ssrc, pt,
+ all_headers, count))
+ forward = FALSE;
+ } else if (gst_structure_has_name (s, "GstRTPRetransmissionRequest")) {
+ guint seqnum, delay, deadline, max_delay, avg_rtt;
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ rtpsession->priv->recv_rtx_req_count++;
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ if (!gst_structure_get_uint (s, "ssrc", &ssrc))
+ ssrc = -1;
+ if (!gst_structure_get_uint (s, "seqnum", &seqnum))
+ seqnum = -1;
+ if (!gst_structure_get_uint (s, "delay", &delay))
+ delay = 0;
+ if (!gst_structure_get_uint (s, "deadline", &deadline))
+ deadline = 100;
+ if (!gst_structure_get_uint (s, "avg-rtt", &avg_rtt))
+ avg_rtt = 40;
+
+ /* remaining time to receive the packet */
+ max_delay = deadline;
+ if (max_delay > delay)
+ max_delay -= delay;
+ /* estimated RTT */
+ if (max_delay > avg_rtt)
+ max_delay -= avg_rtt;
+ else
+ max_delay = 0;
+
+ if (rtp_session_request_nack (rtpsession->priv->session, ssrc, seqnum,
+ max_delay * GST_MSECOND))
+ forward = FALSE;
+ }
+ break;
+ default:
+ break;
+ }
+
+ if (forward) {
+ GstPad *recv_rtp_sink;
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ if ((recv_rtp_sink = rtpsession->recv_rtp_sink))
+ gst_object_ref (recv_rtp_sink);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ if (recv_rtp_sink) {
+ ret = gst_pad_push_event (recv_rtp_sink, event);
+ gst_object_unref (recv_rtp_sink);
+ } else
+ gst_event_unref (event);
+ } else {
+ gst_event_unref (event);
+ }
+
+ return ret;
+}
+
+
+static GstIterator *
+gst_rtp_session_iterate_internal_links (GstPad * pad, GstObject * parent)
+{
+ GstRtpSession *rtpsession;
+ GstPad *otherpad = NULL;
+ GstIterator *it = NULL;
+
+ rtpsession = GST_RTP_SESSION (parent);
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ if (pad == rtpsession->recv_rtp_src) {
+ otherpad = gst_object_ref (rtpsession->recv_rtp_sink);
+ } else if (pad == rtpsession->recv_rtp_sink) {
+ otherpad = gst_object_ref (rtpsession->recv_rtp_src);
+ } else if (pad == rtpsession->send_rtp_src) {
+ otherpad = gst_object_ref (rtpsession->send_rtp_sink);
+ } else if (pad == rtpsession->send_rtp_sink) {
+ otherpad = gst_object_ref (rtpsession->send_rtp_src);
+ }
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ if (otherpad) {
+ GValue val = { 0, };
+
+ g_value_init (&val, GST_TYPE_PAD);
+ g_value_set_object (&val, otherpad);
+ it = gst_iterator_new_single (GST_TYPE_PAD, &val);
+ g_value_unset (&val);
+ gst_object_unref (otherpad);
+ } else {
+ it = gst_iterator_new_single (GST_TYPE_PAD, NULL);
+ }
+
+ return it;
+}
+
+static gboolean
+gst_rtp_session_sink_setcaps (GstPad * pad, GstRtpSession * rtpsession,
+ GstCaps * caps)
+{
+ GST_RTP_SESSION_LOCK (rtpsession);
+ gst_rtp_session_cache_caps (rtpsession, caps);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ return TRUE;
+}
+
+/* receive a packet from a sender, send it to the RTP session manager and
+ * forward the packet on the rtp_src pad
+ */
+static GstFlowReturn
+gst_rtp_session_chain_recv_rtp (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
+{
+ GstRtpSession *rtpsession;
+ GstRtpSessionPrivate *priv;
+ GstFlowReturn ret;
+ GstClockTime current_time, running_time;
+ GstClockTime timestamp;
+ guint64 ntpnstime;
+
+ rtpsession = GST_RTP_SESSION (parent);
+ priv = rtpsession->priv;
+
+ GST_LOG_OBJECT (rtpsession, "received RTP packet");
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ signal_waiting_rtcp_thread_unlocked (rtpsession);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ /* get NTP time when this packet was captured, this depends on the timestamp. */
+ timestamp = GST_BUFFER_PTS (buffer);
+ if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
+ /* convert to running time using the segment values */
+ running_time =
+ gst_segment_to_running_time (&rtpsession->recv_rtp_seg, GST_FORMAT_TIME,
+ timestamp);
+ ntpnstime = GST_CLOCK_TIME_NONE;
+ } else {
+ get_current_times (rtpsession, &running_time, &ntpnstime);
+ }
+ current_time = gst_clock_get_time (priv->sysclock);
+
+ ret = rtp_session_process_rtp (priv->session, buffer, current_time,
+ running_time, ntpnstime);
+ if (ret != GST_FLOW_OK)
+ goto push_error;
+
+done:
+
+ return ret;
+
+ /* ERRORS */
+push_error:
+ {
+ GST_DEBUG_OBJECT (rtpsession, "process returned %s",
+ gst_flow_get_name (ret));
+ goto done;
+ }
+}
+
+static gboolean
+process_received_buffer_in_list (GstBuffer ** buffer, guint idx, gpointer data)
+{
+ gint ret;
+
+ ret = gst_rtp_session_chain_recv_rtp (NULL, data, *buffer);
+ if (ret != GST_FLOW_OK)
+ GST_ERROR ("Processing individual buffer in a list failed");
+
+ /*
+ * The buffer has been processed, remove it from the original list, if it was
+ * a valid RTP buffer it has been added to the "processed" list in
+ * gst_rtp_session_process_rtp().
+ */
+ *buffer = NULL;
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_rtp_session_chain_recv_rtp_list (GstPad * pad, GstObject * parent,
+ GstBufferList * list)
+{
+ GstRtpSession *rtpsession = GST_RTP_SESSION (parent);
+ GstBufferList *processed_list;
+
+ processed_list = gst_buffer_list_new ();
+
+ /* Set some private data to detect that a buffer list is being pushed. */
+ rtpsession->priv->processed_list = processed_list;
+
+ /*
+ * Individually process the buffers from the incoming buffer list as the
+ * incoming RTP packets in the list can be mixed in all sorts of ways:
+ * - different frames,
+ * - different sources,
+ * - different types (RTP or RTCP)
+ */
+ gst_buffer_list_foreach (list,
+ (GstBufferListFunc) process_received_buffer_in_list, parent);
+
+ gst_buffer_list_unref (list);
+
+ /* Clean up private data in case the next push does not use a buffer list. */
+ rtpsession->priv->processed_list = NULL;
+
+ if (gst_buffer_list_length (processed_list) == 0 || !rtpsession->recv_rtp_src) {
+ gst_buffer_list_unref (processed_list);
+ return GST_FLOW_OK;
+ }
+
+ GST_LOG_OBJECT (rtpsession, "pushing received RTP list");
+ return gst_pad_push_list (rtpsession->recv_rtp_src, processed_list);
+}
+
+static gboolean
+gst_rtp_session_event_recv_rtcp_sink (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstRtpSession *rtpsession;
+ gboolean ret = FALSE;
+
+ rtpsession = GST_RTP_SESSION (parent);
+
+ GST_DEBUG_OBJECT (rtpsession, "received event %s",
+ GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEGMENT:
+ /* Make sure that the sync_src pad has caps before the segment event.
+ * Otherwise we might get a segment event before caps from the receive
+ * RTCP pad, and then later when receiving RTCP packets will set caps.
+ * This will results in a sticky event misordering warning
+ */
+ if (!gst_pad_has_current_caps (rtpsession->sync_src)) {
+ GstCaps *caps = gst_caps_new_empty_simple ("application/x-rtcp");
+ gst_pad_set_caps (rtpsession->sync_src, caps);
+ gst_caps_unref (caps);
+ }
+ /* fall through */
+ default:
+ ret = gst_pad_push_event (rtpsession->sync_src, event);
+ break;
+ }
+
+ return ret;
+}
+
+/* Receive an RTCP packet from a sender, send it to the RTP session manager and
+ * forward the SR packets to the sync_src pad.
+ */
+static GstFlowReturn
+gst_rtp_session_chain_recv_rtcp (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
+{
+ GstRtpSession *rtpsession;
+ GstRtpSessionPrivate *priv;
+ GstClockTime current_time;
+ GstClockTime running_time;
+ guint64 ntpnstime;
+
+ rtpsession = GST_RTP_SESSION (parent);
+ priv = rtpsession->priv;
+
+ GST_LOG_OBJECT (rtpsession, "received RTCP packet");
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ signal_waiting_rtcp_thread_unlocked (rtpsession);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ current_time = gst_clock_get_time (priv->sysclock);
+ get_current_times (rtpsession, &running_time, &ntpnstime);
+
+ rtp_session_process_rtcp (priv->session, buffer, current_time, running_time,
+ ntpnstime);
+
+ return GST_FLOW_OK; /* always return OK */
+}
+
+static gboolean
+gst_rtp_session_query_send_rtcp_src (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ GstRtpSession *rtpsession;
+ gboolean ret = FALSE;
+
+ rtpsession = GST_RTP_SESSION (parent);
+
+ GST_DEBUG_OBJECT (rtpsession, "received QUERY %s",
+ GST_QUERY_TYPE_NAME (query));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_LATENCY:
+ ret = TRUE;
+ /* use the defaults for the latency query. */
+ gst_query_set_latency (query, FALSE, 0, -1);
+ break;
+ default:
+ /* other queries simply fail for now */
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_rtp_session_event_send_rtcp_src (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstRtpSession *rtpsession;
+ gboolean ret = TRUE;
+
+ rtpsession = GST_RTP_SESSION (parent);
+ GST_DEBUG_OBJECT (rtpsession, "received EVENT %s",
+ GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ case GST_EVENT_LATENCY:
+ gst_event_unref (event);
+ ret = TRUE;
+ break;
+ default:
+ /* other events simply fail for now */
+ gst_event_unref (event);
+ ret = FALSE;
+ break;
+ }
+
+ return ret;
+}
+
+
+static gboolean
+gst_rtp_session_event_send_rtp_sink (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstRtpSession *rtpsession;
+ gboolean ret = FALSE;
+
+ rtpsession = GST_RTP_SESSION (parent);
+
+ GST_DEBUG_OBJECT (rtpsession, "received EVENT %s",
+ GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ /* process */
+ gst_event_parse_caps (event, &caps);
+ gst_rtp_session_setcaps_send_rtp (pad, rtpsession, caps);
+ ret = gst_pad_push_event (rtpsession->send_rtp_src, event);
+ break;
+ }
+ case GST_EVENT_FLUSH_STOP:
+ gst_segment_init (&rtpsession->send_rtp_seg, GST_FORMAT_UNDEFINED);
+ ret = gst_pad_push_event (rtpsession->send_rtp_src, event);
+ break;
+ case GST_EVENT_SEGMENT:{
+ GstSegment *segment, in_segment;
+
+ segment = &rtpsession->send_rtp_seg;
+
+ /* the newsegment event is needed to convert the RTP timestamp to
+ * running_time, which is needed to generate a mapping from RTP to NTP
+ * timestamps in SR reports */
+ gst_event_copy_segment (event, &in_segment);
+ GST_DEBUG_OBJECT (rtpsession, "received segment %" GST_SEGMENT_FORMAT,
+ &in_segment);
+
+ /* accept upstream */
+ gst_segment_copy_into (&in_segment, segment);
+
+ /* push event forward */
+ ret = gst_pad_push_event (rtpsession->send_rtp_src, event);
+ break;
+ }
+ case GST_EVENT_EOS:{
+ GstClockTime current_time;
+
+ /* push downstream FIXME, we are not supposed to leave the session just
+ * because we stop sending. */
+ ret = gst_pad_push_event (rtpsession->send_rtp_src, event);
+ current_time = gst_clock_get_time (rtpsession->priv->sysclock);
+
+ GST_DEBUG_OBJECT (rtpsession, "scheduling BYE message");
+ rtp_session_mark_all_bye (rtpsession->priv->session, "End Of Stream");
+ rtp_session_schedule_bye (rtpsession->priv->session, current_time);
+ break;
+ }
+ default:{
+ GstPad *send_rtp_src;
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ if ((send_rtp_src = rtpsession->send_rtp_src))
+ gst_object_ref (send_rtp_src);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ if (send_rtp_src) {
+ ret = gst_pad_push_event (send_rtp_src, event);
+ gst_object_unref (send_rtp_src);
+ } else
+ gst_event_unref (event);
+
+ break;
+ }
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_rtp_session_event_send_rtp_src (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstRtpSession *rtpsession;
+ gboolean ret = FALSE;
+
+ rtpsession = GST_RTP_SESSION (parent);
+
+ GST_DEBUG_OBJECT (rtpsession, "received EVENT %s",
+ GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_LATENCY:
+ /* save the latency, we need this to know when an RTP packet will be
+ * rendered by the sink */
+ gst_event_parse_latency (event, &rtpsession->priv->send_latency);
+
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ default:
+ ret = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+ return ret;
+}
+
+static GstCaps *
+gst_rtp_session_getcaps_send_rtp (GstPad * pad, GstRtpSession * rtpsession,
+ GstCaps * filter)
+{
+ GstRtpSessionPrivate *priv;
+ GstCaps *result;
+ GstStructure *s1, *s2;
+ guint ssrc;
+ gboolean is_random;
+
+ priv = rtpsession->priv;
+
+ ssrc = rtp_session_suggest_ssrc (priv->session, &is_random);
+
+ /* we can basically accept anything but we prefer to receive packets with our
+ * internal SSRC so that we don't have to patch it. Create a structure with
+ * the SSRC and another one without.
+ * Only do this if the session actually decided on an ssrc already,
+ * otherwise we give upstream the opportunity to select an ssrc itself */
+ if (!is_random) {
+ s1 = gst_structure_new ("application/x-rtp", "ssrc", G_TYPE_UINT, ssrc,
+ NULL);
+ s2 = gst_structure_new_empty ("application/x-rtp");
+
+ result = gst_caps_new_full (s1, s2, NULL);
+ } else {
+ result = gst_caps_new_empty_simple ("application/x-rtp");
+ }
+
+ if (filter) {
+ GstCaps *caps = result;
+
+ result = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ }
+
+ GST_DEBUG_OBJECT (rtpsession, "getting caps %" GST_PTR_FORMAT, result);
+
+ return result;
+}
+
+static gboolean
+gst_rtp_session_query_send_rtp (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ gboolean res = FALSE;
+ GstRtpSession *rtpsession;
+
+ rtpsession = GST_RTP_SESSION (parent);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_rtp_session_getcaps_send_rtp (pad, rtpsession, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return res;
+}
+
+static gboolean
+gst_rtp_session_setcaps_send_rtp (GstPad * pad, GstRtpSession * rtpsession,
+ GstCaps * caps)
+{
+ GstRtpSessionPrivate *priv;
+
+ priv = rtpsession->priv;
+
+ rtp_session_update_send_caps (priv->session, caps);
+
+ return TRUE;
+}
+
+/* Receive an RTP packet or a list of packets to be sent to the receivers,
+ * send to RTP session manager and forward to send_rtp_src.
+ */
+static GstFlowReturn
+gst_rtp_session_chain_send_rtp_common (GstRtpSession * rtpsession,
+ gpointer data, gboolean is_list)
+{
+ GstRtpSessionPrivate *priv;
+ GstFlowReturn ret;
+ GstClockTime timestamp, running_time;
+ GstClockTime current_time;
+
+ priv = rtpsession->priv;
+
+ GST_LOG_OBJECT (rtpsession, "received RTP %s", is_list ? "list" : "packet");
+
+ /* get NTP time when this packet was captured, this depends on the timestamp. */
+ if (is_list) {
+ GstBuffer *buffer = NULL;
+
+ /* All buffers in a list have the same timestamp.
+ * So, just take it from the first buffer. */
+ buffer = gst_buffer_list_get (GST_BUFFER_LIST_CAST (data), 0);
+ if (buffer)
+ timestamp = GST_BUFFER_PTS (buffer);
+ else
+ timestamp = -1;
+ } else {
+ timestamp = GST_BUFFER_PTS (GST_BUFFER_CAST (data));
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
+ /* convert to running time using the segment start value. */
+ running_time =
+ gst_segment_to_running_time (&rtpsession->send_rtp_seg, GST_FORMAT_TIME,
+ timestamp);
+ if (priv->rtcp_sync_send_time)
+ running_time += priv->send_latency;
+ } else {
+ /* no timestamp. */
+ running_time = -1;
+ }
+
+ current_time = gst_clock_get_time (priv->sysclock);
+ ret = rtp_session_send_rtp (priv->session, data, is_list, current_time,
+ running_time);
+ if (ret != GST_FLOW_OK)
+ goto push_error;
+
+done:
+
+ return ret;
+
+ /* ERRORS */
+push_error:
+ {
+ GST_DEBUG_OBJECT (rtpsession, "process returned %s",
+ gst_flow_get_name (ret));
+ goto done;
+ }
+}
+
+static GstFlowReturn
+gst_rtp_session_chain_send_rtp (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
+{
+ GstRtpSession *rtpsession = GST_RTP_SESSION (parent);
+
+ return gst_rtp_session_chain_send_rtp_common (rtpsession, buffer, FALSE);
+}
+
+static GstFlowReturn
+gst_rtp_session_chain_send_rtp_list (GstPad * pad, GstObject * parent,
+ GstBufferList * list)
+{
+ GstRtpSession *rtpsession = GST_RTP_SESSION (parent);
+
+ return gst_rtp_session_chain_send_rtp_common (rtpsession, list, TRUE);
+}
+
+/* Create sinkpad to receive RTP packets from senders. This will also create a
+ * srcpad for the RTP packets.
+ */
+static GstPad *
+create_recv_rtp_sink (GstRtpSession * rtpsession)
+{
+ GST_DEBUG_OBJECT (rtpsession, "creating RTP sink pad");
+
+ rtpsession->recv_rtp_sink =
+ gst_pad_new_from_static_template (&rtpsession_recv_rtp_sink_template,
+ "recv_rtp_sink");
+ gst_pad_set_chain_function (rtpsession->recv_rtp_sink,
+ gst_rtp_session_chain_recv_rtp);
+ gst_pad_set_chain_list_function (rtpsession->recv_rtp_sink,
+ gst_rtp_session_chain_recv_rtp_list);
+ gst_pad_set_event_function (rtpsession->recv_rtp_sink,
+ gst_rtp_session_event_recv_rtp_sink);
+ gst_pad_set_iterate_internal_links_function (rtpsession->recv_rtp_sink,
+ gst_rtp_session_iterate_internal_links);
+ GST_PAD_SET_PROXY_ALLOCATION (rtpsession->recv_rtp_sink);
+ gst_pad_set_active (rtpsession->recv_rtp_sink, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpsession),
+ rtpsession->recv_rtp_sink);
+
+ GST_DEBUG_OBJECT (rtpsession, "creating RTP src pad");
+ rtpsession->recv_rtp_src =
+ gst_pad_new_from_static_template (&rtpsession_recv_rtp_src_template,
+ "recv_rtp_src");
+ gst_pad_set_event_function (rtpsession->recv_rtp_src,
+ gst_rtp_session_event_recv_rtp_src);
+ gst_pad_set_iterate_internal_links_function (rtpsession->recv_rtp_src,
+ gst_rtp_session_iterate_internal_links);
+ gst_pad_use_fixed_caps (rtpsession->recv_rtp_src);
+ gst_pad_set_active (rtpsession->recv_rtp_src, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpsession), rtpsession->recv_rtp_src);
+
+ return rtpsession->recv_rtp_sink;
+}
+
+/* Remove sinkpad to receive RTP packets from senders. This will also remove
+ * the srcpad for the RTP packets.
+ */
+static void
+remove_recv_rtp_sink (GstRtpSession * rtpsession)
+{
+ GST_DEBUG_OBJECT (rtpsession, "removing RTP sink pad");
+
+ /* deactivate from source to sink */
+ gst_pad_set_active (rtpsession->recv_rtp_src, FALSE);
+ gst_pad_set_active (rtpsession->recv_rtp_sink, FALSE);
+
+ /* remove pads */
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpsession),
+ rtpsession->recv_rtp_sink);
+ rtpsession->recv_rtp_sink = NULL;
+
+ GST_DEBUG_OBJECT (rtpsession, "removing RTP src pad");
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpsession),
+ rtpsession->recv_rtp_src);
+ rtpsession->recv_rtp_src = NULL;
+}
+
+/* Create a sinkpad to receive RTCP messages from senders, this will also create a
+ * sync_src pad for the SR packets.
+ */
+static GstPad *
+create_recv_rtcp_sink (GstRtpSession * rtpsession)
+{
+ GST_DEBUG_OBJECT (rtpsession, "creating RTCP sink pad");
+
+ rtpsession->recv_rtcp_sink =
+ gst_pad_new_from_static_template (&rtpsession_recv_rtcp_sink_template,
+ "recv_rtcp_sink");
+ gst_pad_set_chain_function (rtpsession->recv_rtcp_sink,
+ gst_rtp_session_chain_recv_rtcp);
+ gst_pad_set_event_function (rtpsession->recv_rtcp_sink,
+ gst_rtp_session_event_recv_rtcp_sink);
+ gst_pad_set_iterate_internal_links_function (rtpsession->recv_rtcp_sink,
+ gst_rtp_session_iterate_internal_links);
+ gst_pad_set_active (rtpsession->recv_rtcp_sink, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpsession),
+ rtpsession->recv_rtcp_sink);
+
+ GST_DEBUG_OBJECT (rtpsession, "creating sync src pad");
+ rtpsession->sync_src =
+ gst_pad_new_from_static_template (&rtpsession_sync_src_template,
+ "sync_src");
+ gst_pad_set_iterate_internal_links_function (rtpsession->sync_src,
+ gst_rtp_session_iterate_internal_links);
+ gst_pad_use_fixed_caps (rtpsession->sync_src);
+ gst_pad_set_active (rtpsession->sync_src, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpsession), rtpsession->sync_src);
+
+ return rtpsession->recv_rtcp_sink;
+}
+
+static void
+remove_recv_rtcp_sink (GstRtpSession * rtpsession)
+{
+ GST_DEBUG_OBJECT (rtpsession, "removing RTCP sink pad");
+
+ gst_pad_set_active (rtpsession->sync_src, FALSE);
+ gst_pad_set_active (rtpsession->recv_rtcp_sink, FALSE);
+
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpsession),
+ rtpsession->recv_rtcp_sink);
+ rtpsession->recv_rtcp_sink = NULL;
+
+ GST_DEBUG_OBJECT (rtpsession, "removing sync src pad");
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpsession), rtpsession->sync_src);
+ rtpsession->sync_src = NULL;
+}
+
+/* Create a sinkpad to receive RTP packets for receivers. This will also create a
+ * send_rtp_src pad.
+ */
+static GstPad *
+create_send_rtp_sink (GstRtpSession * rtpsession)
+{
+ GST_DEBUG_OBJECT (rtpsession, "creating pad");
+
+ rtpsession->send_rtp_sink =
+ gst_pad_new_from_static_template (&rtpsession_send_rtp_sink_template,
+ "send_rtp_sink");
+ gst_pad_set_chain_function (rtpsession->send_rtp_sink,
+ gst_rtp_session_chain_send_rtp);
+ gst_pad_set_chain_list_function (rtpsession->send_rtp_sink,
+ gst_rtp_session_chain_send_rtp_list);
+ gst_pad_set_query_function (rtpsession->send_rtp_sink,
+ gst_rtp_session_query_send_rtp);
+ gst_pad_set_event_function (rtpsession->send_rtp_sink,
+ gst_rtp_session_event_send_rtp_sink);
+ gst_pad_set_iterate_internal_links_function (rtpsession->send_rtp_sink,
+ gst_rtp_session_iterate_internal_links);
+ GST_PAD_SET_PROXY_CAPS (rtpsession->send_rtp_sink);
+ GST_PAD_SET_PROXY_ALLOCATION (rtpsession->send_rtp_sink);
+ gst_pad_set_active (rtpsession->send_rtp_sink, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpsession),
+ rtpsession->send_rtp_sink);
+
+ rtpsession->send_rtp_src =
+ gst_pad_new_from_static_template (&rtpsession_send_rtp_src_template,
+ "send_rtp_src");
+ gst_pad_set_iterate_internal_links_function (rtpsession->send_rtp_src,
+ gst_rtp_session_iterate_internal_links);
+ gst_pad_set_event_function (rtpsession->send_rtp_src,
+ gst_rtp_session_event_send_rtp_src);
+ GST_PAD_SET_PROXY_CAPS (rtpsession->send_rtp_src);
+ gst_pad_set_active (rtpsession->send_rtp_src, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpsession), rtpsession->send_rtp_src);
+
+ return rtpsession->send_rtp_sink;
+}
+
+static void
+remove_send_rtp_sink (GstRtpSession * rtpsession)
+{
+ GST_DEBUG_OBJECT (rtpsession, "removing pad");
+
+ gst_pad_set_active (rtpsession->send_rtp_src, FALSE);
+ gst_pad_set_active (rtpsession->send_rtp_sink, FALSE);
+
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpsession),
+ rtpsession->send_rtp_sink);
+ rtpsession->send_rtp_sink = NULL;
+
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpsession),
+ rtpsession->send_rtp_src);
+ rtpsession->send_rtp_src = NULL;
+}
+
+/* Create a srcpad with the RTCP packets to send out.
+ * This pad will be driven by the RTP session manager when it wants to send out
+ * RTCP packets.
+ */
+static GstPad *
+create_send_rtcp_src (GstRtpSession * rtpsession)
+{
+ GST_DEBUG_OBJECT (rtpsession, "creating pad");
+
+ rtpsession->send_rtcp_src =
+ gst_pad_new_from_static_template (&rtpsession_send_rtcp_src_template,
+ "send_rtcp_src");
+ gst_pad_use_fixed_caps (rtpsession->send_rtcp_src);
+ gst_pad_set_active (rtpsession->send_rtcp_src, TRUE);
+ gst_pad_set_iterate_internal_links_function (rtpsession->send_rtcp_src,
+ gst_rtp_session_iterate_internal_links);
+ gst_pad_set_query_function (rtpsession->send_rtcp_src,
+ gst_rtp_session_query_send_rtcp_src);
+ gst_pad_set_event_function (rtpsession->send_rtcp_src,
+ gst_rtp_session_event_send_rtcp_src);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpsession),
+ rtpsession->send_rtcp_src);
+
+ return rtpsession->send_rtcp_src;
+}
+
+static void
+remove_send_rtcp_src (GstRtpSession * rtpsession)
+{
+ GST_DEBUG_OBJECT (rtpsession, "removing pad");
+
+ gst_pad_set_active (rtpsession->send_rtcp_src, FALSE);
+
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpsession),
+ rtpsession->send_rtcp_src);
+ rtpsession->send_rtcp_src = NULL;
+}
+
+static GstPad *
+gst_rtp_session_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps)
+{
+ GstRtpSession *rtpsession;
+ GstElementClass *klass;
+ GstPad *result;
+
+ g_return_val_if_fail (templ != NULL, NULL);
+ g_return_val_if_fail (GST_IS_RTP_SESSION (element), NULL);
+
+ rtpsession = GST_RTP_SESSION (element);
+ klass = GST_ELEMENT_GET_CLASS (element);
+
+ GST_DEBUG_OBJECT (element, "requesting pad %s", GST_STR_NULL (name));
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+
+ /* figure out the template */
+ if (templ == gst_element_class_get_pad_template (klass, "recv_rtp_sink")) {
+ if (rtpsession->recv_rtp_sink != NULL)
+ goto exists;
+
+ result = create_recv_rtp_sink (rtpsession);
+ } else if (templ == gst_element_class_get_pad_template (klass,
+ "recv_rtcp_sink")) {
+ if (rtpsession->recv_rtcp_sink != NULL)
+ goto exists;
+
+ result = create_recv_rtcp_sink (rtpsession);
+ } else if (templ == gst_element_class_get_pad_template (klass,
+ "send_rtp_sink")) {
+ if (rtpsession->send_rtp_sink != NULL)
+ goto exists;
+
+ result = create_send_rtp_sink (rtpsession);
+ } else if (templ == gst_element_class_get_pad_template (klass,
+ "send_rtcp_src")) {
+ if (rtpsession->send_rtcp_src != NULL)
+ goto exists;
+
+ result = create_send_rtcp_src (rtpsession);
+ } else
+ goto wrong_template;
+
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ return result;
+
+ /* ERRORS */
+wrong_template:
+ {
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+ g_warning ("rtpsession: this is not our template");
+ return NULL;
+ }
+exists:
+ {
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+ g_warning ("rtpsession: pad already requested");
+ return NULL;
+ }
+}
+
+static void
+gst_rtp_session_release_pad (GstElement * element, GstPad * pad)
+{
+ GstRtpSession *rtpsession;
+
+ g_return_if_fail (GST_IS_RTP_SESSION (element));
+ g_return_if_fail (GST_IS_PAD (pad));
+
+ rtpsession = GST_RTP_SESSION (element);
+
+ GST_DEBUG_OBJECT (element, "releasing pad %s:%s", GST_DEBUG_PAD_NAME (pad));
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+
+ if (rtpsession->recv_rtp_sink == pad) {
+ remove_recv_rtp_sink (rtpsession);
+ } else if (rtpsession->recv_rtcp_sink == pad) {
+ remove_recv_rtcp_sink (rtpsession);
+ } else if (rtpsession->send_rtp_sink == pad) {
+ remove_send_rtp_sink (rtpsession);
+ } else if (rtpsession->send_rtcp_src == pad) {
+ remove_send_rtcp_src (rtpsession);
+ } else
+ goto wrong_pad;
+
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ return;
+
+ /* ERRORS */
+wrong_pad:
+ {
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+ g_warning ("rtpsession: asked to release an unknown pad");
+ return;
+ }
+}
+
+static void
+gst_rtp_session_request_key_unit (RTPSession * sess,
+ guint32 ssrc, gboolean all_headers, gpointer user_data)
+{
+ GstRtpSession *rtpsession = GST_RTP_SESSION (user_data);
+ GstEvent *event;
+ GstPad *send_rtp_sink;
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ if ((send_rtp_sink = rtpsession->send_rtp_sink))
+ gst_object_ref (send_rtp_sink);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ if (send_rtp_sink) {
+ event = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
+ gst_structure_new ("GstForceKeyUnit", "ssrc", G_TYPE_UINT, ssrc,
+ "all-headers", G_TYPE_BOOLEAN, all_headers, NULL));
+ gst_pad_push_event (send_rtp_sink, event);
+ gst_object_unref (send_rtp_sink);
+ }
+}
+
+static GstClockTime
+gst_rtp_session_request_time (RTPSession * session, gpointer user_data)
+{
+ GstRtpSession *rtpsession = GST_RTP_SESSION (user_data);
+
+ return gst_clock_get_time (rtpsession->priv->sysclock);
+}
+
+static void
+gst_rtp_session_notify_nack (RTPSession * sess, guint16 seqnum,
+ guint16 blp, guint32 ssrc, gpointer user_data)
+{
+ GstRtpSession *rtpsession = GST_RTP_SESSION (user_data);
+ GstEvent *event;
+ GstPad *send_rtp_sink;
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ if ((send_rtp_sink = rtpsession->send_rtp_sink))
+ gst_object_ref (send_rtp_sink);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ if (send_rtp_sink) {
+ while (TRUE) {
+ event = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
+ gst_structure_new ("GstRTPRetransmissionRequest",
+ "seqnum", G_TYPE_UINT, (guint) seqnum,
+ "ssrc", G_TYPE_UINT, (guint) ssrc, NULL));
+ gst_pad_push_event (send_rtp_sink, event);
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ rtpsession->priv->sent_rtx_req_count++;
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ if (blp == 0)
+ break;
+
+ seqnum++;
+ while ((blp & 1) == 0) {
+ seqnum++;
+ blp >>= 1;
+ }
+ blp >>= 1;
+ }
+ gst_object_unref (send_rtp_sink);
+ }
+}
+
+static void
+gst_rtp_session_notify_twcc (RTPSession * sess,
+ GstStructure * twcc_packets, GstStructure * twcc_stats, gpointer user_data)
+{
+ GstRtpSession *rtpsession = GST_RTP_SESSION (user_data);
+ GstEvent *event;
+ GstPad *send_rtp_sink;
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ if ((send_rtp_sink = rtpsession->send_rtp_sink))
+ gst_object_ref (send_rtp_sink);
+ if (rtpsession->priv->last_twcc_stats)
+ gst_structure_free (rtpsession->priv->last_twcc_stats);
+ rtpsession->priv->last_twcc_stats = twcc_stats;
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ if (send_rtp_sink) {
+ event = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM, twcc_packets);
+ gst_pad_push_event (send_rtp_sink, event);
+ gst_object_unref (send_rtp_sink);
+ }
+
+ g_object_notify (G_OBJECT (rtpsession), "twcc-stats");
+}
+
+static void
+gst_rtp_session_reconfigure (RTPSession * sess, gpointer user_data)
+{
+ GstRtpSession *rtpsession = GST_RTP_SESSION (user_data);
+ GstPad *send_rtp_sink;
+
+ GST_RTP_SESSION_LOCK (rtpsession);
+ if ((send_rtp_sink = rtpsession->send_rtp_sink))
+ gst_object_ref (send_rtp_sink);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+
+ if (send_rtp_sink) {
+ gst_pad_push_event (send_rtp_sink, gst_event_new_reconfigure ());
+ gst_object_unref (send_rtp_sink);
+ }
+}
+
+static void
+gst_rtp_session_notify_early_rtcp (RTPSession * sess, gpointer user_data)
+{
+ GstRtpSession *rtpsession = GST_RTP_SESSION (user_data);
+
+ GST_DEBUG_OBJECT (rtpsession, "Notified of early RTCP");
+ /* with an early RTCP request, we might have to start the RTCP thread */
+ GST_RTP_SESSION_LOCK (rtpsession);
+ signal_waiting_rtcp_thread_unlocked (rtpsession);
+ GST_RTP_SESSION_UNLOCK (rtpsession);
+}
diff --git a/gst/rtpmanager/gstrtpsession.h b/gst/rtpmanager/gstrtpsession.h
new file mode 100644
index 0000000000..5a3ecfc1c1
--- /dev/null
+++ b/gst/rtpmanager/gstrtpsession.h
@@ -0,0 +1,94 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_SESSION_H__
+#define __GST_RTP_SESSION_H__
+
+#include <gst/gst.h>
+
+#define GST_TYPE_RTP_SESSION \
+ (gst_rtp_session_get_type())
+#define GST_RTP_SESSION(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_SESSION,GstRtpSession))
+#define GST_RTP_SESSION_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_SESSION,GstRtpSessionClass))
+#define GST_IS_RTP_SESSION(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_SESSION))
+#define GST_IS_RTP_SESSION_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_SESSION))
+#define GST_RTP_SESSION_CAST(obj) ((GstRtpSession *)(obj))
+
+typedef struct _GstRtpSession GstRtpSession;
+typedef struct _GstRtpSessionClass GstRtpSessionClass;
+typedef struct _GstRtpSessionPrivate GstRtpSessionPrivate;
+
+struct _GstRtpSession {
+ GstElement element;
+
+ /*< private >*/
+ GstPad *recv_rtp_sink;
+ GstSegment recv_rtp_seg;
+ GstPad *recv_rtcp_sink;
+ GstPad *send_rtp_sink;
+ GstSegment send_rtp_seg;
+
+ GstPad *recv_rtp_src;
+ GstPad *sync_src;
+ GstPad *send_rtp_src;
+ GstPad *send_rtcp_src;
+
+ guint32 recv_rtcp_segment_seqnum;
+
+ GstRtpSessionPrivate *priv;
+};
+
+struct _GstRtpSessionClass {
+ GstElementClass parent_class;
+
+ /* signals */
+ GstCaps* (*request_pt_map) (GstRtpSession *sess, guint pt);
+ void (*clear_pt_map) (GstRtpSession *sess);
+
+ void (*on_new_ssrc) (GstRtpSession *sess, guint32 ssrc);
+ void (*on_ssrc_collision) (GstRtpSession *sess, guint32 ssrc);
+ void (*on_ssrc_validated) (GstRtpSession *sess, guint32 ssrc);
+ void (*on_ssrc_active) (GstRtpSession *sess, guint32 ssrc);
+ void (*on_ssrc_sdes) (GstRtpSession *sess, guint32 ssrc);
+ void (*on_bye_ssrc) (GstRtpSession *sess, guint32 ssrc);
+ void (*on_bye_timeout) (GstRtpSession *sess, guint32 ssrc);
+ void (*on_timeout) (GstRtpSession *sess, guint32 ssrc);
+ void (*on_sender_timeout) (GstRtpSession *sess, guint32 ssrc);
+ void (*on_new_sender_ssrc) (GstRtpSession *sess, guint32 ssrc);
+ void (*on_sender_ssrc_active) (GstRtpSession *sess, guint32 ssrc);
+};
+
+GType gst_rtp_session_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (rtpsession);
+
+typedef enum {
+ GST_RTP_NTP_TIME_SOURCE_NTP,
+ GST_RTP_NTP_TIME_SOURCE_UNIX,
+ GST_RTP_NTP_TIME_SOURCE_RUNNING_TIME,
+ GST_RTP_NTP_TIME_SOURCE_CLOCK_TIME
+} GstRtpNtpTimeSource;
+
+GType gst_rtp_ntp_time_source_get_type (void);
+
+#endif /* __GST_RTP_SESSION_H__ */
diff --git a/gst/rtpmanager/gstrtpssrcdemux.c b/gst/rtpmanager/gstrtpssrcdemux.c
new file mode 100644
index 0000000000..9713131e1b
--- /dev/null
+++ b/gst/rtpmanager/gstrtpssrcdemux.c
@@ -0,0 +1,1008 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * RTP SSRC demuxer
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpssrcdemux
+ * @title: rtpssrcdemux
+ *
+ * rtpssrcdemux acts as a demuxer for RTP packets based on the SSRC of the
+ * packets. Its main purpose is to allow an application to easily receive and
+ * decode an RTP stream with multiple SSRCs.
+ *
+ * For each SSRC that is detected, a new pad will be created and the
+ * #GstRtpSsrcDemux::new-ssrc-pad signal will be emitted.
+ *
+ * ## Example pipelines
+ * |[
+ * gst-launch-1.0 udpsrc caps="application/x-rtp" ! rtpssrcdemux ! fakesink
+ * ]| Takes an RTP stream and send the RTP packets with the first detected SSRC
+ * to fakesink, discarding the other SSRCs.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/rtp/gstrtcpbuffer.h>
+
+#include "gstrtpssrcdemux.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtp_ssrc_demux_debug);
+#define GST_CAT_DEFAULT gst_rtp_ssrc_demux_debug
+
+/* generic templates */
+static GstStaticPadTemplate rtp_ssrc_demux_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+static GstStaticPadTemplate rtp_ssrc_demux_rtcp_sink_template =
+GST_STATIC_PAD_TEMPLATE ("rtcp_sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtcp")
+ );
+
+static GstStaticPadTemplate rtp_ssrc_demux_src_template =
+GST_STATIC_PAD_TEMPLATE ("src_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+static GstStaticPadTemplate rtp_ssrc_demux_rtcp_src_template =
+GST_STATIC_PAD_TEMPLATE ("rtcp_src_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("application/x-rtcp")
+ );
+
+#define INTERNAL_STREAM_LOCK(obj) (g_rec_mutex_lock (&(obj)->padlock))
+#define INTERNAL_STREAM_UNLOCK(obj) (g_rec_mutex_unlock (&(obj)->padlock))
+
+#define GST_PAD_FLAG_STICKIES_SENT (GST_PAD_FLAG_LAST << 0)
+#define GST_PAD_STICKIES_SENT(pad) (GST_OBJECT_FLAG_IS_SET (pad, GST_PAD_FLAG_STICKIES_SENT))
+#define GST_PAD_SET_STICKIES_SENT(pad) (GST_OBJECT_FLAG_SET (pad, GST_PAD_FLAG_STICKIES_SENT))
+
+typedef enum
+{
+ RTP_PAD,
+ RTCP_PAD
+} PadType;
+
+#define DEFAULT_MAX_STREAMS G_MAXUINT
+enum
+{
+ PROP_0,
+ PROP_MAX_STREAMS
+};
+
+/* signals */
+enum
+{
+ SIGNAL_NEW_SSRC_PAD,
+ SIGNAL_REMOVED_SSRC_PAD,
+ SIGNAL_CLEAR_SSRC,
+ LAST_SIGNAL
+};
+
+#define gst_rtp_ssrc_demux_parent_class parent_class
+G_DEFINE_TYPE (GstRtpSsrcDemux, gst_rtp_ssrc_demux, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE (rtpssrcdemux, "rtpssrcdemux", GST_RANK_NONE,
+ GST_TYPE_RTP_SSRC_DEMUX);
+
+/* GObject vmethods */
+static void gst_rtp_ssrc_demux_dispose (GObject * object);
+static void gst_rtp_ssrc_demux_finalize (GObject * object);
+
+/* GstElement vmethods */
+static GstStateChangeReturn gst_rtp_ssrc_demux_change_state (GstElement *
+ element, GstStateChange transition);
+
+static void gst_rtp_ssrc_demux_clear_ssrc (GstRtpSsrcDemux * demux,
+ guint32 ssrc);
+
+/* sinkpad stuff */
+static GstFlowReturn gst_rtp_ssrc_demux_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+static gboolean gst_rtp_ssrc_demux_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+
+static GstFlowReturn gst_rtp_ssrc_demux_rtcp_chain (GstPad * pad,
+ GstObject * parent, GstBuffer * buf);
+static GstIterator *gst_rtp_ssrc_demux_iterate_internal_links_sink (GstPad *
+ pad, GstObject * parent);
+
+/* srcpad stuff */
+static gboolean gst_rtp_ssrc_demux_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstIterator *gst_rtp_ssrc_demux_iterate_internal_links_src (GstPad * pad,
+ GstObject * parent);
+static gboolean gst_rtp_ssrc_demux_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+
+static guint gst_rtp_ssrc_demux_signals[LAST_SIGNAL] = { 0 };
+
+/*
+ * Item for storing GstPad <-> SSRC pairs.
+ */
+typedef struct
+{
+ guint32 ssrc;
+ GstPad *rtp_pad;
+ GstCaps *caps;
+ GstPad *rtcp_pad;
+} GstRtpSsrcDemuxPads;
+
+/* find a src pad for a given SSRC, returns NULL if the SSRC was not found
+ * MUST be called with object lock
+ */
+static GstRtpSsrcDemuxPads *
+find_demux_pads_for_ssrc (GstRtpSsrcDemux * demux, guint32 ssrc)
+{
+ GSList *walk;
+
+ for (walk = demux->srcpads; walk; walk = g_slist_next (walk)) {
+ GstRtpSsrcDemuxPads *pad = (GstRtpSsrcDemuxPads *) walk->data;
+
+ if (pad->ssrc == ssrc)
+ return pad;
+ }
+ return NULL;
+}
+
+/* returns a reference to the pad if found, %NULL otherwise */
+static GstPad *
+get_demux_pad_for_ssrc (GstRtpSsrcDemux * demux, guint32 ssrc, PadType padtype)
+{
+ GstRtpSsrcDemuxPads *dpads;
+ GstPad *retpad;
+
+ GST_OBJECT_LOCK (demux);
+
+ dpads = find_demux_pads_for_ssrc (demux, ssrc);
+ if (!dpads) {
+ GST_OBJECT_UNLOCK (demux);
+ return NULL;
+ }
+
+ switch (padtype) {
+ case RTP_PAD:
+ retpad = gst_object_ref (dpads->rtp_pad);
+ break;
+ case RTCP_PAD:
+ retpad = gst_object_ref (dpads->rtcp_pad);
+ break;
+ default:
+ retpad = NULL;
+ g_assert_not_reached ();
+ }
+
+ GST_OBJECT_UNLOCK (demux);
+
+ return retpad;
+}
+
+static GstEvent *
+add_ssrc_and_ref (GstEvent * event, guint32 ssrc)
+{
+ /* Set the ssrc on the output caps */
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+ GstCaps *newcaps;
+ GstStructure *s;
+
+ gst_event_parse_caps (event, &caps);
+ newcaps = gst_caps_copy (caps);
+
+ s = gst_caps_get_structure (newcaps, 0);
+ gst_structure_set (s, "ssrc", G_TYPE_UINT, ssrc, NULL);
+ event = gst_event_new_caps (newcaps);
+ gst_caps_unref (newcaps);
+ break;
+ }
+ default:
+ gst_event_ref (event);
+ break;
+ }
+
+ return event;
+}
+
+struct ForwardStickyEventData
+{
+ GstPad *pad;
+ guint32 ssrc;
+};
+
+/* With internal stream lock held */
+static gboolean
+forward_sticky_events (GstPad * pad, GstEvent ** event, gpointer user_data)
+{
+ struct ForwardStickyEventData *data = user_data;
+ GstEvent *newevent;
+
+ newevent = add_ssrc_and_ref (*event, data->ssrc);
+ gst_pad_push_event (data->pad, newevent);
+
+ return TRUE;
+}
+
+static void
+forward_initial_events (GstRtpSsrcDemux * demux, guint32 ssrc, GstPad * pad,
+ PadType padtype)
+{
+ struct ForwardStickyEventData fdata;
+ GstPad *sinkpad = NULL;
+
+ if (padtype == RTP_PAD)
+ sinkpad = demux->rtp_sink;
+ else if (padtype == RTCP_PAD)
+ sinkpad = demux->rtcp_sink;
+ else
+ g_assert_not_reached ();
+
+ fdata.ssrc = ssrc;
+ fdata.pad = pad;
+
+ gst_pad_sticky_events_foreach (sinkpad, forward_sticky_events, &fdata);
+}
+
+/* MUST only be called from streaming thread */
+static GstPad *
+find_or_create_demux_pad_for_ssrc (GstRtpSsrcDemux * demux, guint32 ssrc,
+ PadType padtype)
+{
+ GstPad *rtp_pad, *rtcp_pad;
+ GstElementClass *klass;
+ GstPadTemplate *templ;
+ gchar *padname;
+ GstRtpSsrcDemuxPads *dpads;
+ GstPad *retpad;
+ guint num_streams;
+
+ INTERNAL_STREAM_LOCK (demux);
+
+ retpad = get_demux_pad_for_ssrc (demux, ssrc, padtype);
+ if (retpad != NULL) {
+ INTERNAL_STREAM_UNLOCK (demux);
+ return retpad;
+ }
+ /* We create 2 src pads per ssrc (RTP & RTCP). Checking if we are allowed
+ to create 2 more pads */
+ num_streams = (GST_ELEMENT_CAST (demux)->numsrcpads) >> 1;
+ if (num_streams >= demux->max_streams) {
+ INTERNAL_STREAM_UNLOCK (demux);
+ return NULL;
+ }
+
+ GST_DEBUG_OBJECT (demux, "creating new pad for SSRC %08x", ssrc);
+
+ klass = GST_ELEMENT_GET_CLASS (demux);
+ templ = gst_element_class_get_pad_template (klass, "src_%u");
+ padname = g_strdup_printf ("src_%u", ssrc);
+ rtp_pad = gst_pad_new_from_template (templ, padname);
+ g_free (padname);
+
+ templ = gst_element_class_get_pad_template (klass, "rtcp_src_%u");
+ padname = g_strdup_printf ("rtcp_src_%u", ssrc);
+ rtcp_pad = gst_pad_new_from_template (templ, padname);
+ g_free (padname);
+
+ /* wrap in structure and add to list */
+ dpads = g_new0 (GstRtpSsrcDemuxPads, 1);
+ dpads->ssrc = ssrc;
+ dpads->rtp_pad = rtp_pad;
+ dpads->rtcp_pad = rtcp_pad;
+
+ GST_OBJECT_LOCK (demux);
+ demux->srcpads = g_slist_prepend (demux->srcpads, dpads);
+ GST_OBJECT_UNLOCK (demux);
+
+ gst_pad_set_query_function (rtp_pad, gst_rtp_ssrc_demux_src_query);
+ gst_pad_set_iterate_internal_links_function (rtp_pad,
+ gst_rtp_ssrc_demux_iterate_internal_links_src);
+ gst_pad_set_event_function (rtp_pad, gst_rtp_ssrc_demux_src_event);
+ gst_pad_use_fixed_caps (rtp_pad);
+ gst_pad_set_active (rtp_pad, TRUE);
+
+ gst_pad_set_event_function (rtcp_pad, gst_rtp_ssrc_demux_src_event);
+ gst_pad_set_iterate_internal_links_function (rtcp_pad,
+ gst_rtp_ssrc_demux_iterate_internal_links_src);
+ gst_pad_use_fixed_caps (rtcp_pad);
+ gst_pad_set_active (rtcp_pad, TRUE);
+
+ gst_element_add_pad (GST_ELEMENT_CAST (demux), rtp_pad);
+ gst_element_add_pad (GST_ELEMENT_CAST (demux), rtcp_pad);
+
+ switch (padtype) {
+ case RTP_PAD:
+ retpad = gst_object_ref (dpads->rtp_pad);
+ break;
+ case RTCP_PAD:
+ retpad = gst_object_ref (dpads->rtcp_pad);
+ break;
+ default:
+ retpad = NULL;
+ g_assert_not_reached ();
+ }
+
+ g_signal_emit (G_OBJECT (demux),
+ gst_rtp_ssrc_demux_signals[SIGNAL_NEW_SSRC_PAD], 0, ssrc, rtp_pad);
+
+ INTERNAL_STREAM_UNLOCK (demux);
+
+ return retpad;
+}
+
+static void
+gst_rtp_ssrc_demux_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRtpSsrcDemux *demux;
+
+ demux = GST_RTP_SSRC_DEMUX (object);
+ switch (prop_id) {
+ case PROP_MAX_STREAMS:
+ demux->max_streams = g_value_get_uint (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_ssrc_demux_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRtpSsrcDemux *demux;
+
+ demux = GST_RTP_SSRC_DEMUX (object);
+ switch (prop_id) {
+ case PROP_MAX_STREAMS:
+ g_value_set_uint (value, demux->max_streams);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_ssrc_demux_class_init (GstRtpSsrcDemuxClass * klass)
+{
+ GObjectClass *gobject_klass;
+ GstElementClass *gstelement_klass;
+ GstRtpSsrcDemuxClass *gstrtpssrcdemux_klass;
+
+ gobject_klass = (GObjectClass *) klass;
+ gstelement_klass = (GstElementClass *) klass;
+ gstrtpssrcdemux_klass = (GstRtpSsrcDemuxClass *) klass;
+
+ gobject_klass->dispose = gst_rtp_ssrc_demux_dispose;
+ gobject_klass->finalize = gst_rtp_ssrc_demux_finalize;
+ gobject_klass->set_property = gst_rtp_ssrc_demux_set_property;
+ gobject_klass->get_property = gst_rtp_ssrc_demux_get_property;
+
+ g_object_class_install_property (gobject_klass, PROP_MAX_STREAMS,
+ g_param_spec_uint ("max-streams", "Max Streams",
+ "The maximum number of streams allowed",
+ 0, G_MAXUINT, DEFAULT_MAX_STREAMS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRtpSsrcDemux::new-ssrc-pad:
+ * @demux: the object which received the signal
+ * @ssrc: the SSRC of the pad
+ * @pad: the new pad.
+ *
+ * Emitted when a new SSRC pad has been created.
+ */
+ gst_rtp_ssrc_demux_signals[SIGNAL_NEW_SSRC_PAD] =
+ g_signal_new ("new-ssrc-pad",
+ G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET (GstRtpSsrcDemuxClass, new_ssrc_pad),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, GST_TYPE_PAD);
+
+ /**
+ * GstRtpSsrcDemux::removed-ssrc-pad:
+ * @demux: the object which received the signal
+ * @ssrc: the SSRC of the pad
+ * @pad: the removed pad.
+ *
+ * Emitted when a SSRC pad has been removed.
+ */
+ gst_rtp_ssrc_demux_signals[SIGNAL_REMOVED_SSRC_PAD] =
+ g_signal_new ("removed-ssrc-pad",
+ G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET (GstRtpSsrcDemuxClass, removed_ssrc_pad),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, GST_TYPE_PAD);
+
+ /**
+ * GstRtpSsrcDemux::clear-ssrc:
+ * @demux: the object which received the signal
+ * @ssrc: the SSRC of the pad
+ *
+ * Action signal to remove the pad for SSRC.
+ */
+ gst_rtp_ssrc_demux_signals[SIGNAL_CLEAR_SSRC] =
+ g_signal_new ("clear-ssrc",
+ G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_STRUCT_OFFSET (GstRtpSsrcDemuxClass, clear_ssrc),
+ NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_UINT);
+
+ gstelement_klass->change_state =
+ GST_DEBUG_FUNCPTR (gst_rtp_ssrc_demux_change_state);
+ gstrtpssrcdemux_klass->clear_ssrc =
+ GST_DEBUG_FUNCPTR (gst_rtp_ssrc_demux_clear_ssrc);
+
+ gst_element_class_add_static_pad_template (gstelement_klass,
+ &rtp_ssrc_demux_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_klass,
+ &rtp_ssrc_demux_rtcp_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_klass,
+ &rtp_ssrc_demux_src_template);
+ gst_element_class_add_static_pad_template (gstelement_klass,
+ &rtp_ssrc_demux_rtcp_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_klass, "RTP SSRC Demux",
+ "Demux/Network/RTP",
+ "Splits RTP streams based on the SSRC",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtp_ssrc_demux_debug,
+ "rtpssrcdemux", 0, "RTP SSRC demuxer");
+
+ GST_DEBUG_REGISTER_FUNCPTR (gst_rtp_ssrc_demux_chain);
+ GST_DEBUG_REGISTER_FUNCPTR (gst_rtp_ssrc_demux_rtcp_chain);
+}
+
+static void
+gst_rtp_ssrc_demux_init (GstRtpSsrcDemux * demux)
+{
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (demux);
+
+ demux->rtp_sink =
+ gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
+ "sink"), "sink");
+ gst_pad_set_chain_function (demux->rtp_sink, gst_rtp_ssrc_demux_chain);
+ gst_pad_set_event_function (demux->rtp_sink, gst_rtp_ssrc_demux_sink_event);
+ gst_pad_set_iterate_internal_links_function (demux->rtp_sink,
+ gst_rtp_ssrc_demux_iterate_internal_links_sink);
+ gst_element_add_pad (GST_ELEMENT_CAST (demux), demux->rtp_sink);
+
+ demux->rtcp_sink =
+ gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
+ "rtcp_sink"), "rtcp_sink");
+ gst_pad_set_chain_function (demux->rtcp_sink, gst_rtp_ssrc_demux_rtcp_chain);
+ gst_pad_set_event_function (demux->rtcp_sink, gst_rtp_ssrc_demux_sink_event);
+ gst_pad_set_iterate_internal_links_function (demux->rtcp_sink,
+ gst_rtp_ssrc_demux_iterate_internal_links_sink);
+ gst_element_add_pad (GST_ELEMENT_CAST (demux), demux->rtcp_sink);
+
+ demux->max_streams = DEFAULT_MAX_STREAMS;
+
+ g_rec_mutex_init (&demux->padlock);
+}
+
+static void
+gst_rtp_ssrc_demux_pads_free (GstRtpSsrcDemuxPads * dpads)
+{
+ gst_pad_set_active (dpads->rtp_pad, FALSE);
+ gst_pad_set_active (dpads->rtcp_pad, FALSE);
+
+ gst_element_remove_pad (GST_PAD_PARENT (dpads->rtp_pad), dpads->rtp_pad);
+ gst_element_remove_pad (GST_PAD_PARENT (dpads->rtcp_pad), dpads->rtcp_pad);
+
+ g_free (dpads);
+}
+
+static void
+gst_rtp_ssrc_demux_reset (GstRtpSsrcDemux * demux)
+{
+ g_slist_free_full (demux->srcpads,
+ (GDestroyNotify) gst_rtp_ssrc_demux_pads_free);
+ demux->srcpads = NULL;
+}
+
+static void
+gst_rtp_ssrc_demux_dispose (GObject * object)
+{
+ GstRtpSsrcDemux *demux;
+
+ demux = GST_RTP_SSRC_DEMUX (object);
+
+ gst_rtp_ssrc_demux_reset (demux);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static void
+gst_rtp_ssrc_demux_finalize (GObject * object)
+{
+ GstRtpSsrcDemux *demux;
+
+ demux = GST_RTP_SSRC_DEMUX (object);
+ g_rec_mutex_clear (&demux->padlock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_rtp_ssrc_demux_clear_ssrc (GstRtpSsrcDemux * demux, guint32 ssrc)
+{
+ GstRtpSsrcDemuxPads *dpads;
+
+ GST_OBJECT_LOCK (demux);
+ dpads = find_demux_pads_for_ssrc (demux, ssrc);
+ if (dpads == NULL) {
+ GST_OBJECT_UNLOCK (demux);
+ goto unknown_pad;
+ }
+
+ GST_DEBUG_OBJECT (demux, "clearing pad for SSRC %08x", ssrc);
+
+ demux->srcpads = g_slist_remove (demux->srcpads, dpads);
+ GST_OBJECT_UNLOCK (demux);
+
+ g_signal_emit (G_OBJECT (demux),
+ gst_rtp_ssrc_demux_signals[SIGNAL_REMOVED_SSRC_PAD], 0, ssrc,
+ dpads->rtp_pad);
+
+ gst_rtp_ssrc_demux_pads_free (dpads);
+
+ return;
+
+ /* ERRORS */
+unknown_pad:
+ {
+ GST_WARNING_OBJECT (demux, "unknown SSRC %08x", ssrc);
+ return;
+ }
+}
+
+struct ForwardEventData
+{
+ GstRtpSsrcDemux *demux;
+ GstEvent *event;
+ gboolean res;
+ GstPad *pad;
+};
+
+static gboolean
+forward_event (GstPad * pad, gpointer user_data)
+{
+ struct ForwardEventData *fdata = user_data;
+ GSList *walk = NULL;
+ GstEvent *newevent = NULL;
+
+ /* special case for EOS */
+ if (GST_EVENT_TYPE (fdata->event) == GST_EVENT_EOS)
+ GST_PAD_SET_STICKIES_SENT (pad);
+
+ if (GST_EVENT_IS_STICKY (fdata->event) && !GST_PAD_STICKIES_SENT (pad))
+ return FALSE;
+
+ GST_OBJECT_LOCK (fdata->demux);
+ for (walk = fdata->demux->srcpads; walk; walk = walk->next) {
+ GstRtpSsrcDemuxPads *dpads = (GstRtpSsrcDemuxPads *) walk->data;
+
+ if (pad == dpads->rtp_pad || pad == dpads->rtcp_pad) {
+ newevent = add_ssrc_and_ref (fdata->event, dpads->ssrc);
+ break;
+ }
+ }
+ GST_OBJECT_UNLOCK (fdata->demux);
+
+ if (newevent)
+ fdata->res &= gst_pad_push_event (pad, newevent);
+
+ return FALSE;
+}
+
+
+static gboolean
+gst_rtp_ssrc_demux_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstRtpSsrcDemux *demux;
+ struct ForwardEventData fdata;
+
+ demux = GST_RTP_SSRC_DEMUX (parent);
+
+ fdata.demux = demux;
+ fdata.pad = pad;
+ fdata.event = event;
+ fdata.res = TRUE;
+
+ gst_pad_forward (pad, forward_event, &fdata);
+
+ gst_event_unref (event);
+
+ return fdata.res;
+}
+
+static GstFlowReturn
+gst_rtp_ssrc_demux_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+{
+ GstFlowReturn ret;
+ GstRtpSsrcDemux *demux;
+ guint32 ssrc;
+ GstRTPBuffer rtp = { NULL };
+ GstPad *srcpad;
+
+ demux = GST_RTP_SSRC_DEMUX (parent);
+
+ if (!gst_rtp_buffer_map (buf, GST_MAP_READ, &rtp))
+ goto invalid_payload;
+
+ ssrc = gst_rtp_buffer_get_ssrc (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
+
+ GST_DEBUG_OBJECT (demux, "received buffer of SSRC %08x", ssrc);
+
+ srcpad = find_or_create_demux_pad_for_ssrc (demux, ssrc, RTP_PAD);
+ if (srcpad == NULL)
+ goto create_failed;
+
+ if (!GST_PAD_STICKIES_SENT (srcpad)) {
+ forward_initial_events (demux, ssrc, srcpad, RTP_PAD);
+ GST_PAD_SET_STICKIES_SENT (srcpad);
+ }
+
+ /* push to srcpad */
+ ret = gst_pad_push (srcpad, buf);
+
+ if (ret != GST_FLOW_OK) {
+ GstPad *active_pad;
+
+ /* check if the ssrc still there, may have been removed */
+ active_pad = get_demux_pad_for_ssrc (demux, ssrc, RTP_PAD);
+
+ if (active_pad == NULL || active_pad != srcpad) {
+ /* SSRC was removed during the push ... ignore the error */
+ ret = GST_FLOW_OK;
+ }
+
+ g_clear_object (&active_pad);
+ }
+
+ gst_object_unref (srcpad);
+
+ return ret;
+
+ /* ERRORS */
+invalid_payload:
+ {
+ GST_DEBUG_OBJECT (demux, "Dropping invalid RTP packet");
+ gst_buffer_unref (buf);
+ return GST_FLOW_OK;
+ }
+create_failed:
+ {
+ gst_buffer_unref (buf);
+ GST_WARNING_OBJECT (demux,
+ "Dropping buffer SSRC %08x. "
+ "Max streams number reached (%u)", ssrc, demux->max_streams);
+ return GST_FLOW_OK;
+ }
+}
+
+static GstFlowReturn
+gst_rtp_ssrc_demux_rtcp_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf)
+{
+ GstFlowReturn ret;
+ GstRtpSsrcDemux *demux;
+ guint32 ssrc;
+ GstRTCPPacket packet;
+ GstRTCPBuffer rtcp = { NULL, };
+ GstPad *srcpad;
+
+ demux = GST_RTP_SSRC_DEMUX (parent);
+
+ if (!gst_rtcp_buffer_validate_reduced (buf))
+ goto invalid_rtcp;
+
+ gst_rtcp_buffer_map (buf, GST_MAP_READ, &rtcp);
+ if (!gst_rtcp_buffer_get_first_packet (&rtcp, &packet)) {
+ gst_rtcp_buffer_unmap (&rtcp);
+ goto invalid_rtcp;
+ }
+
+ /* first packet must be SR or RR, or in case of a reduced size RTCP packet
+ * it must be APP, RTPFB or PSFB feeadback, or else the validate would
+ * have failed */
+ switch (gst_rtcp_packet_get_type (&packet)) {
+ case GST_RTCP_TYPE_SR:
+ /* get the ssrc so that we can route it to the right source pad */
+ gst_rtcp_packet_sr_get_sender_info (&packet, &ssrc, NULL, NULL, NULL,
+ NULL);
+ break;
+ case GST_RTCP_TYPE_RR:
+ ssrc = gst_rtcp_packet_rr_get_ssrc (&packet);
+ break;
+ case GST_RTCP_TYPE_APP:
+ ssrc = gst_rtcp_packet_app_get_ssrc (&packet);
+ break;
+ case GST_RTCP_TYPE_RTPFB:
+ case GST_RTCP_TYPE_PSFB:
+ ssrc = gst_rtcp_packet_fb_get_sender_ssrc (&packet);
+ break;
+ default:
+ goto unexpected_rtcp;
+ }
+ gst_rtcp_buffer_unmap (&rtcp);
+
+ GST_DEBUG_OBJECT (demux, "received RTCP of SSRC %08x", ssrc);
+
+ srcpad = find_or_create_demux_pad_for_ssrc (demux, ssrc, RTCP_PAD);
+ if (srcpad == NULL)
+ goto create_failed;
+
+ if (!GST_PAD_STICKIES_SENT (srcpad)) {
+ forward_initial_events (demux, ssrc, srcpad, RTCP_PAD);
+ GST_PAD_SET_STICKIES_SENT (srcpad);
+ }
+
+ /* push to srcpad */
+ ret = gst_pad_push (srcpad, buf);
+
+ if (ret != GST_FLOW_OK) {
+ GstPad *active_pad;
+
+ /* check if the ssrc still there, may have been removed */
+ active_pad = get_demux_pad_for_ssrc (demux, ssrc, RTCP_PAD);
+ if (active_pad == NULL || active_pad != srcpad) {
+ /* SSRC was removed during the push ... ignore the error */
+ ret = GST_FLOW_OK;
+ }
+
+ g_clear_object (&active_pad);
+ }
+
+ gst_object_unref (srcpad);
+
+ return ret;
+
+ /* ERRORS */
+invalid_rtcp:
+ {
+ GST_DEBUG_OBJECT (demux, "Dropping invalid RTCP packet");
+ gst_buffer_unref (buf);
+ return GST_FLOW_OK;
+ }
+unexpected_rtcp:
+ {
+ GST_DEBUG_OBJECT (demux, "dropping unexpected RTCP packet");
+ gst_buffer_unref (buf);
+ return GST_FLOW_OK;
+ }
+create_failed:
+ {
+ gst_buffer_unref (buf);
+ GST_WARNING_OBJECT (demux,
+ "Dropping buffer SSRC %08x. "
+ "Max streams number reached (%u)", ssrc, demux->max_streams);
+ return GST_FLOW_OK;
+ }
+}
+
+static GstRtpSsrcDemuxPads *
+find_demux_pad_for_pad (GstRtpSsrcDemux * demux, GstPad * pad)
+{
+ GSList *walk;
+
+ for (walk = demux->srcpads; walk; walk = g_slist_next (walk)) {
+ GstRtpSsrcDemuxPads *dpads = (GstRtpSsrcDemuxPads *) walk->data;
+ if (dpads->rtp_pad == pad || dpads->rtcp_pad == pad) {
+ return dpads;
+ }
+ }
+
+ return NULL;
+}
+
+
+static gboolean
+gst_rtp_ssrc_demux_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstRtpSsrcDemux *demux;
+ const GstStructure *s;
+
+ demux = GST_RTP_SSRC_DEMUX (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CUSTOM_UPSTREAM:
+ case GST_EVENT_CUSTOM_BOTH:
+ case GST_EVENT_CUSTOM_BOTH_OOB:
+ s = gst_event_get_structure (event);
+ if (s && !gst_structure_has_field (s, "ssrc")) {
+ GstRtpSsrcDemuxPads *dpads = find_demux_pad_for_pad (demux, pad);
+
+ if (dpads) {
+ GstStructure *ws;
+
+ event = gst_event_make_writable (event);
+ ws = gst_event_writable_structure (event);
+ gst_structure_set (ws, "ssrc", G_TYPE_UINT, dpads->ssrc, NULL);
+ }
+ }
+ break;
+ default:
+ break;
+ }
+
+ return gst_pad_event_default (pad, parent, event);
+}
+
+static GstIterator *
+gst_rtp_ssrc_demux_iterate_internal_links_src (GstPad * pad, GstObject * parent)
+{
+ GstRtpSsrcDemux *demux;
+ GstPad *otherpad = NULL;
+ GstIterator *it = NULL;
+ GSList *current;
+
+ demux = GST_RTP_SSRC_DEMUX (parent);
+
+ GST_OBJECT_LOCK (demux);
+ for (current = demux->srcpads; current; current = g_slist_next (current)) {
+ GstRtpSsrcDemuxPads *dpads = (GstRtpSsrcDemuxPads *) current->data;
+
+ if (pad == dpads->rtp_pad) {
+ otherpad = demux->rtp_sink;
+ break;
+ } else if (pad == dpads->rtcp_pad) {
+ otherpad = demux->rtcp_sink;
+ break;
+ }
+ }
+ if (otherpad) {
+ GValue val = { 0, };
+
+ g_value_init (&val, GST_TYPE_PAD);
+ g_value_set_object (&val, otherpad);
+ it = gst_iterator_new_single (GST_TYPE_PAD, &val);
+ g_value_unset (&val);
+
+ }
+ GST_OBJECT_UNLOCK (demux);
+
+ return it;
+}
+
+/* Should return 0 for elements to be included */
+static gint
+src_pad_compare_func (gconstpointer a, gconstpointer b)
+{
+ GstPad *pad = GST_PAD (g_value_get_object (a));
+ const gchar *prefix = g_value_get_string (b);
+ gint res;
+
+ /* 0 means equal means we accept the pad, accepted if there is a name
+ * and it starts with the prefix */
+ GST_OBJECT_LOCK (pad);
+ res = !GST_PAD_NAME (pad) || !g_str_has_prefix (GST_PAD_NAME (pad), prefix);
+ GST_OBJECT_UNLOCK (pad);
+
+ return res;
+}
+
+static GstIterator *
+gst_rtp_ssrc_demux_iterate_internal_links_sink (GstPad * pad,
+ GstObject * parent)
+{
+ GstRtpSsrcDemux *demux;
+ GstIterator *it = NULL;
+ GValue gval = { 0, };
+
+ demux = GST_RTP_SSRC_DEMUX (parent);
+
+ g_value_init (&gval, G_TYPE_STRING);
+ if (pad == demux->rtp_sink)
+ g_value_set_static_string (&gval, "src_");
+ else if (pad == demux->rtcp_sink)
+ g_value_set_static_string (&gval, "rtcp_src_");
+ else
+ g_assert_not_reached ();
+
+ it = gst_element_iterate_src_pads (GST_ELEMENT_CAST (demux));
+ it = gst_iterator_filter (it, src_pad_compare_func, &gval);
+
+ return it;
+}
+
+
+static gboolean
+gst_rtp_ssrc_demux_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ GstRtpSsrcDemux *demux;
+ gboolean res = FALSE;
+
+ demux = GST_RTP_SSRC_DEMUX (parent);
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_LATENCY:
+ {
+
+ if ((res = gst_pad_peer_query (demux->rtp_sink, query))) {
+ gboolean live;
+ GstClockTime min_latency, max_latency;
+
+ gst_query_parse_latency (query, &live, &min_latency, &max_latency);
+
+ GST_DEBUG_OBJECT (pad, "peer min latency %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (min_latency));
+
+ gst_query_set_latency (query, live, min_latency, max_latency);
+ }
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return res;
+}
+
+static GstStateChangeReturn
+gst_rtp_ssrc_demux_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstRtpSsrcDemux *demux;
+
+ demux = GST_RTP_SSRC_DEMUX (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ gst_rtp_ssrc_demux_reset (demux);
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/rtpmanager/gstrtpssrcdemux.h b/gst/rtpmanager/gstrtpssrcdemux.h
new file mode 100644
index 0000000000..e7e347d251
--- /dev/null
+++ b/gst/rtpmanager/gstrtpssrcdemux.h
@@ -0,0 +1,62 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTP_SSRC_DEMUX_H__
+#define __GST_RTP_SSRC_DEMUX_H__
+
+#include <gst/gst.h>
+
+#define GST_TYPE_RTP_SSRC_DEMUX (gst_rtp_ssrc_demux_get_type())
+#define GST_RTP_SSRC_DEMUX(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_SSRC_DEMUX,GstRtpSsrcDemux))
+#define GST_RTP_SSRC_DEMUX_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_SSRC_DEMUX,GstRtpSsrcDemuxClass))
+#define GST_IS_RTP_SSRC_DEMUX(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_SSRC_DEMUX))
+#define GST_IS_RTP_SSRC_DEMUX_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_SSRC_DEMUX))
+
+typedef struct _GstRtpSsrcDemux GstRtpSsrcDemux;
+typedef struct _GstRtpSsrcDemuxClass GstRtpSsrcDemuxClass;
+
+struct _GstRtpSsrcDemux
+{
+ GstElement parent;
+
+ GstPad *rtp_sink;
+ GstPad *rtcp_sink;
+
+ GRecMutex padlock;
+ GSList *srcpads;
+ guint max_streams;
+};
+
+struct _GstRtpSsrcDemuxClass
+{
+ GstElementClass parent_class;
+
+ /* signals */
+ void (*new_ssrc_pad) (GstRtpSsrcDemux *demux, guint32 ssrc, GstPad *pad);
+ void (*removed_ssrc_pad) (GstRtpSsrcDemux *demux, guint32 ssrc, GstPad *pad);
+
+ /* actions */
+ void (*clear_ssrc) (GstRtpSsrcDemux *demux, guint32 ssrc);
+};
+
+GType gst_rtp_ssrc_demux_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (rtpssrcdemux);
+
+#endif /* __GST_RTP_SSRC_DEMUX_H__ */
diff --git a/gst/rtpmanager/gstrtpst2022-1-fecdec.c b/gst/rtpmanager/gstrtpst2022-1-fecdec.c
new file mode 100644
index 0000000000..ca9884f4ae
--- /dev/null
+++ b/gst/rtpmanager/gstrtpst2022-1-fecdec.c
@@ -0,0 +1,1012 @@
+/* GStreamer
+ * Copyright (C) <2020> Mathieu Duponchelle <mathieu@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpst2022-1-fecdec
+ * @see_also: #element-rtpst2022-1-fecenc
+ *
+ * This element takes as input a media stream and up to two FEC
+ * streams as described in SMPTE 2022-1: Forward Error Correction
+ * for Real-Time Video/Audio Transport Over IP Networks, and makes
+ * use of the FEC packets to recover media packets that may have
+ * gotten lost.
+ *
+ * ## Design
+ *
+ * The approach picked for this element is to proactively reconstruct missing
+ * packets as soon as possible. When a FEC packet arrives, the element
+ * immediately checks whether a media packet in the row / column it protects
+ * can be reconstructed.
+ *
+ * Similarly, when a media packet comes in, the element checks whether it has
+ * already received a corresponding packet in both the column and row the packet
+ * belongs to, and if so goes through the first step listed above.
+ *
+ * This process is repeated recursively, allowing for recoveries over one
+ * dimension to unblock recoveries over the other.
+ *
+ * In perfect networking conditions, this incurs next to no overhead as FEC
+ * packets will arrive after the media packets, causing no reconstruction to
+ * take place, just a few checks upon chaining.
+ *
+ * ## sender / receiver example
+ *
+ * ``` shell
+ * gst-launch-1.0 \
+ * rtpbin name=rtp fec-encoders='fec,0="rtpst2022-1-fecenc\ rows\=5\ columns\=5";' \
+ * uridecodebin uri=file:///path/to/video/file ! x264enc key-int-max=60 tune=zerolatency ! \
+ * queue ! mpegtsmux ! rtpmp2tpay ssrc=0 ! rtp.send_rtp_sink_0 \
+ * rtp.send_rtp_src_0 ! udpsink host=127.0.0.1 port=5000 \
+ * rtp.send_fec_src_0_0 ! udpsink host=127.0.0.1 port=5002 async=false \
+ * rtp.send_fec_src_0_1 ! udpsink host=127.0.0.1 port=5004 async=false
+ * ```
+ *
+ * ``` shell
+ * gst-launch-1.0 \
+ * rtpbin latency=500 fec-decoders='fec,0="rtpst2022-1-fecdec\ size-time\=1000000000";' name=rtp \
+ * udpsrc address=127.0.0.1 port=5002 caps="application/x-rtp, payload=96" ! queue ! rtp.recv_fec_sink_0_0 \
+ * udpsrc address=127.0.0.1 port=5004 caps="application/x-rtp, payload=96" ! queue ! rtp.recv_fec_sink_0_1 \
+ * udpsrc address=127.0.0.1 port=5000 caps="application/x-rtp, media=video, clock-rate=90000, encoding-name=mp2t, payload=33" ! \
+ * queue ! netsim drop-probability=0.05 ! rtp.recv_rtp_sink_0 \
+ * rtp. ! decodebin ! videoconvert ! queue ! autovideosink
+ * ```
+ *
+ * With the above command line, as the media packet size is constant,
+ * the fec overhead can be approximated to the number of fec packets
+ * per 2-d matrix of media packet, here 10 fec packets for each 25
+ * media packets.
+ *
+ * Increasing the number of rows and columns will decrease the overhead,
+ * but obviously increase the likelihood of recovery failure for lost
+ * packets on the receiver side.
+ *
+ * Since: 1.20
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/base/base.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "gstrtpst2022-1-fecdec.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtpst_2022_1_fecdec_debug);
+#define GST_CAT_DEFAULT gst_rtpst_2022_1_fecdec_debug
+
+#define DEFAULT_SIZE_TIME (GST_SECOND)
+
+typedef struct
+{
+ guint16 seq;
+ GstBuffer *buffer;
+} Item;
+
+static GstFlowReturn store_media_item (GstRTPST_2022_1_FecDec * dec,
+ GstRTPBuffer * rtp, Item * item);
+
+static void
+free_item (Item * item)
+{
+ gst_buffer_unref (item->buffer);
+ item->buffer = NULL;
+ g_free (item);
+}
+
+static gint
+cmp_items (Item * a, Item * b, gpointer unused)
+{
+ return gst_rtp_buffer_compare_seqnum (b->seq, a->seq);
+}
+
+enum
+{
+ PROP_0,
+ PROP_SIZE_TIME,
+};
+
+struct _GstRTPST_2022_1_FecDecClass
+{
+ GstElementClass class;
+};
+
+struct _GstRTPST_2022_1_FecDec
+{
+ GstElement element;
+
+ GstPad *srcpad;
+ GstPad *sinkpad;
+ GList *fec_sinkpads;
+
+ /* All the following field are protected by the OBJECT_LOCK */
+ GSequence *packets;
+ GHashTable *column_fec_packets;
+ GSequence *fec_packets[2];
+ /* N columns */
+ guint l;
+ /* N rows */
+ guint d;
+
+ GstClockTime size_time;
+ GstClockTime max_arrival_time;
+ GstClockTime max_fec_arrival_time[2];
+};
+
+#define RTP_CAPS "application/x-rtp"
+
+typedef struct
+{
+ guint16 seq;
+ guint16 len;
+ guint8 E;
+ guint8 pt;
+ guint32 mask;
+ guint32 timestamp;
+ guint8 N;
+ guint8 D;
+ guint8 type;
+ guint8 index;
+ guint8 offset;
+ guint8 NA;
+ guint8 seq_ext;
+ guint8 *payload;
+ guint payload_len;
+ gboolean marker;
+ gboolean padding;
+ gboolean extension;
+} Rtp2DFecHeader;
+
+static GstStaticPadTemplate fec_sink_template =
+GST_STATIC_PAD_TEMPLATE ("fec_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS (RTP_CAPS));
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (RTP_CAPS));
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (RTP_CAPS));
+
+#define gst_rtpst_2022_1_fecdec_parent_class parent_class
+G_DEFINE_TYPE (GstRTPST_2022_1_FecDec, gst_rtpst_2022_1_fecdec,
+ GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE (rtpst2022_1_fecdec, "rtpst2022-1-fecdec",
+ GST_RANK_NONE, GST_TYPE_RTPST_2022_1_FECDEC);
+
+static void
+trim_items (GstRTPST_2022_1_FecDec * dec)
+{
+ GSequenceIter *tmp_iter, *iter = NULL;
+
+ for (tmp_iter = g_sequence_get_begin_iter (dec->packets);
+ tmp_iter; tmp_iter = g_sequence_iter_next (tmp_iter)) {
+ Item *item;
+
+ if (g_sequence_iter_is_end (tmp_iter))
+ break;
+
+ item = g_sequence_get (tmp_iter);
+
+ if (dec->max_arrival_time - GST_BUFFER_DTS_OR_PTS (item->buffer) <
+ dec->size_time)
+ break;
+
+ iter = tmp_iter;
+ }
+
+ if (iter) {
+ Item *item = g_sequence_get (iter);
+ GST_TRACE_OBJECT (dec,
+ "Trimming packets up to %" GST_TIME_FORMAT " (seq: %u)",
+ GST_TIME_ARGS (GST_BUFFER_DTS_OR_PTS (item->buffer)), item->seq);
+ g_sequence_remove_range (g_sequence_get_begin_iter (dec->packets), iter);
+ }
+}
+
+static void
+trim_fec_items (GstRTPST_2022_1_FecDec * dec, guint D)
+{
+ GSequenceIter *tmp_iter, *iter = NULL;
+
+ for (tmp_iter = g_sequence_get_begin_iter (dec->fec_packets[D]);
+ tmp_iter; tmp_iter = g_sequence_iter_next (tmp_iter)) {
+ Item *item;
+
+ if (g_sequence_iter_is_end (tmp_iter))
+ break;
+
+ item = g_sequence_get (tmp_iter);
+
+ if (dec->max_fec_arrival_time[D] - GST_BUFFER_DTS_OR_PTS (item->buffer) <
+ dec->size_time)
+ break;
+
+ if (!D) {
+ guint i;
+ guint16 seq;
+
+ for (i = 0; i < dec->d; i++) {
+ seq = item->seq + i * dec->l;
+ g_hash_table_remove (dec->column_fec_packets, GUINT_TO_POINTER (seq));
+ }
+ }
+
+ iter = tmp_iter;
+ }
+
+ if (iter) {
+ Item *item = g_sequence_get (iter);
+ GST_TRACE_OBJECT (dec,
+ "Trimming %s FEC packets up to %" GST_TIME_FORMAT " (seq: %u)",
+ D ? "row" : "column",
+ GST_TIME_ARGS (GST_BUFFER_DTS_OR_PTS (item->buffer)), item->seq);
+ g_sequence_remove_range (g_sequence_get_begin_iter (dec->fec_packets[D]),
+ iter);
+ }
+}
+
+static Item *
+lookup_media_packet (GstRTPST_2022_1_FecDec * dec, guint16 seqnum)
+{
+ GSequenceIter *iter;
+ Item *ret = NULL;
+ Item dummy = { seqnum, NULL };
+
+ iter =
+ g_sequence_lookup (dec->packets, &dummy, (GCompareDataFunc) cmp_items,
+ NULL);
+
+ if (iter)
+ ret = g_sequence_get (iter);
+
+ return ret;
+}
+
+static gboolean
+parse_header (GstRTPBuffer * rtp, Rtp2DFecHeader * fec)
+{
+ gboolean ret = FALSE;
+ GstBitReader bits;
+ guint8 *data = gst_rtp_buffer_get_payload (rtp);
+ guint len = gst_rtp_buffer_get_payload_len (rtp);
+
+ if (len < 16)
+ goto done;
+
+ gst_bit_reader_init (&bits, data, len);
+
+ fec->marker = gst_rtp_buffer_get_marker (rtp);
+ fec->padding = gst_rtp_buffer_get_padding (rtp);
+ fec->extension = gst_rtp_buffer_get_extension (rtp);
+ fec->seq = gst_bit_reader_get_bits_uint16_unchecked (&bits, 16);
+ fec->len = gst_bit_reader_get_bits_uint16_unchecked (&bits, 16);
+ fec->E = gst_bit_reader_get_bits_uint8_unchecked (&bits, 1);
+ fec->pt = gst_bit_reader_get_bits_uint8_unchecked (&bits, 7);
+ fec->mask = gst_bit_reader_get_bits_uint32_unchecked (&bits, 24);
+ fec->timestamp = gst_bit_reader_get_bits_uint32_unchecked (&bits, 32);
+ fec->N = gst_bit_reader_get_bits_uint8_unchecked (&bits, 1);
+ fec->D = gst_bit_reader_get_bits_uint8_unchecked (&bits, 1);
+ fec->type = gst_bit_reader_get_bits_uint8_unchecked (&bits, 3);
+ fec->index = gst_bit_reader_get_bits_uint8_unchecked (&bits, 3);
+ fec->offset = gst_bit_reader_get_bits_uint8_unchecked (&bits, 8);
+ fec->NA = gst_bit_reader_get_bits_uint8_unchecked (&bits, 8);
+ fec->seq_ext = gst_bit_reader_get_bits_uint8_unchecked (&bits, 8);
+ fec->payload = data + 16;
+ fec->payload_len = len - 16;
+
+ ret = TRUE;
+
+done:
+ return ret;
+}
+
+static Item *
+get_row_fec (GstRTPST_2022_1_FecDec * dec, guint16 seqnum)
+{
+ GSequenceIter *iter;
+ Item *ret = NULL;
+ Item dummy = { 0, };
+
+ if (dec->l == G_MAXUINT)
+ goto done;
+
+ /* Potential underflow is intended */
+ dummy.seq = seqnum - dec->l;
+
+ iter =
+ g_sequence_search (dec->fec_packets[1], &dummy,
+ (GCompareDataFunc) cmp_items, NULL);
+
+ if (!g_sequence_iter_is_end (iter)) {
+ gint seqdiff;
+ ret = g_sequence_get (iter);
+
+ seqdiff = gst_rtp_buffer_compare_seqnum (ret->seq, seqnum);
+
+ /* Now check whether the fec packet does apply */
+ if (seqdiff < 0 || seqdiff >= dec->l)
+ ret = NULL;
+ }
+
+done:
+ return ret;
+}
+
+static Item *
+get_column_fec (GstRTPST_2022_1_FecDec * dec, guint16 seqnum)
+{
+ Item *ret = NULL;
+
+ if (dec->l == G_MAXUINT || dec->d == G_MAXUINT)
+ goto done;
+
+ ret =
+ g_hash_table_lookup (dec->column_fec_packets, GUINT_TO_POINTER (seqnum));
+
+done:
+ return ret;
+}
+
+static void
+_xor_mem (guint8 * restrict dst, const guint8 * restrict src, gsize length)
+{
+ guint i;
+
+ for (i = 0; i < (length / sizeof (guint64)); ++i) {
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ GST_WRITE_UINT64_LE (dst,
+ GST_READ_UINT64_LE (dst) ^ GST_READ_UINT64_LE (src));
+#else
+ GST_WRITE_UINT64_BE (dst,
+ GST_READ_UINT64_BE (dst) ^ GST_READ_UINT64_BE (src));
+#endif
+ dst += sizeof (guint64);
+ src += sizeof (guint64);
+ }
+ for (i = 0; i < (length % sizeof (guint64)); ++i)
+ dst[i] ^= src[i];
+}
+
+static GstFlowReturn
+xor_items (GstRTPST_2022_1_FecDec * dec, Rtp2DFecHeader * fec, GList * packets,
+ guint16 seqnum)
+{
+ guint8 *xored;
+ guint32 xored_timestamp;
+ guint8 xored_pt;
+ guint16 xored_payload_len;
+ Item *item;
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+ GList *tmp;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBuffer *buffer;
+ gboolean xored_marker;
+ gboolean xored_padding;
+ gboolean xored_extension;
+
+ /* Figure out the recovered packet length first */
+ xored_payload_len = fec->len;
+ for (tmp = packets; tmp; tmp = tmp->next) {
+ GstRTPBuffer media_rtp = GST_RTP_BUFFER_INIT;
+ Item *item = (Item *) tmp->data;
+
+ gst_rtp_buffer_map (item->buffer, GST_MAP_READ, &media_rtp);
+ xored_payload_len ^= gst_rtp_buffer_get_payload_len (&media_rtp);
+ gst_rtp_buffer_unmap (&media_rtp);
+ }
+
+ if (xored_payload_len > fec->payload_len) {
+ GST_WARNING_OBJECT (dec, "FEC payload len %u < length recovery %u",
+ fec->payload_len, xored_payload_len);
+ goto done;
+ }
+
+ item = g_malloc0 (sizeof (Item));
+ item->seq = seqnum;
+ item->buffer = gst_rtp_buffer_new_allocate (xored_payload_len, 0, 0);
+ gst_rtp_buffer_map (item->buffer, GST_MAP_WRITE, &rtp);
+
+ xored = gst_rtp_buffer_get_payload (&rtp);
+ memcpy (xored, fec->payload, xored_payload_len);
+ xored_timestamp = fec->timestamp;
+ xored_pt = fec->pt;
+ xored_marker = fec->marker;
+ xored_padding = fec->padding;
+ xored_extension = fec->extension;
+
+ for (tmp = packets; tmp; tmp = tmp->next) {
+ GstRTPBuffer media_rtp = GST_RTP_BUFFER_INIT;
+ Item *item = (Item *) tmp->data;
+
+ gst_rtp_buffer_map (item->buffer, GST_MAP_READ, &media_rtp);
+ _xor_mem (xored, gst_rtp_buffer_get_payload (&media_rtp),
+ MIN (gst_rtp_buffer_get_payload_len (&media_rtp), xored_payload_len));
+ xored_timestamp ^= gst_rtp_buffer_get_timestamp (&media_rtp);
+ xored_pt ^= gst_rtp_buffer_get_payload_type (&media_rtp);
+ xored_marker ^= gst_rtp_buffer_get_marker (&media_rtp);
+ xored_padding ^= gst_rtp_buffer_get_padding (&media_rtp);
+ xored_extension ^= gst_rtp_buffer_get_extension (&media_rtp);
+
+ gst_rtp_buffer_unmap (&media_rtp);
+ }
+
+ GST_DEBUG_OBJECT (dec,
+ "Recovered buffer through %s FEC with seqnum %u, payload len %u and timestamp %u",
+ fec->D ? "row" : "column", seqnum, xored_payload_len, xored_timestamp);
+
+ GST_BUFFER_DTS (item->buffer) = dec->max_arrival_time;
+
+ gst_rtp_buffer_set_timestamp (&rtp, xored_timestamp);
+ gst_rtp_buffer_set_seq (&rtp, seqnum);
+ gst_rtp_buffer_set_payload_type (&rtp, xored_pt);
+ gst_rtp_buffer_set_marker (&rtp, xored_marker);
+ gst_rtp_buffer_set_padding (&rtp, xored_padding);
+ gst_rtp_buffer_set_extension (&rtp, xored_extension);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ /* Store a ref on item->buffer as store_media_item may
+ * recurse and call this method again, potentially releasing
+ * the object lock and leaving our item unprotected in
+ * dec->packets
+ */
+ buffer = gst_buffer_ref (item->buffer);
+
+ /* It is right that we should celebrate,
+ * for your brother was dead, and is alive again */
+ gst_rtp_buffer_map (item->buffer, GST_MAP_READ, &rtp);
+ ret = store_media_item (dec, &rtp, item);
+ gst_rtp_buffer_unmap (&rtp);
+
+ if (ret == GST_FLOW_OK) {
+ /* Unlocking here is safe */
+ GST_OBJECT_UNLOCK (dec);
+ ret = gst_pad_push (dec->srcpad, buffer);
+ GST_OBJECT_LOCK (dec);
+ } else {
+ gst_buffer_unref (buffer);
+ }
+
+done:
+ return ret;
+}
+
+/* Returns a flow value if we should discard the packet, GST_FLOW_CUSTOM_SUCCESS otherwise */
+static GstFlowReturn
+check_fec (GstRTPST_2022_1_FecDec * dec, Rtp2DFecHeader * fec)
+{
+ GList *packets = NULL;
+ gint missing_seq = -1;
+ guint n_packets = 0;
+ guint required_n_packets;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ if (fec->D) {
+ guint i = 0;
+
+ required_n_packets = dec->l;
+
+ for (i = 0; i < dec->l; i++) {
+ Item *item = lookup_media_packet (dec, fec->seq + i);
+
+ if (item) {
+ packets = g_list_prepend (packets, item);
+ n_packets += 1;
+ } else {
+ missing_seq = fec->seq + i;
+ }
+ }
+ } else {
+ guint i = 0;
+
+ required_n_packets = dec->d;
+
+ for (i = 0; i < dec->d; i++) {
+ Item *item = lookup_media_packet (dec, fec->seq + i * dec->l);
+
+ if (item) {
+ packets = g_list_prepend (packets, item);
+ n_packets += 1;
+ } else {
+ missing_seq = fec->seq + i * dec->l;
+ }
+ }
+ }
+
+ if (n_packets == required_n_packets) {
+ g_assert (missing_seq == -1);
+ GST_LOG_OBJECT (dec,
+ "All media packets present, we can discard that FEC packet");
+ } else if (n_packets + 1 == required_n_packets) {
+ g_assert (missing_seq != -1);
+ ret = xor_items (dec, fec, packets, missing_seq);
+ GST_LOG_OBJECT (dec, "We have enough info to reconstruct %u", missing_seq);
+ } else {
+ ret = GST_FLOW_CUSTOM_SUCCESS;
+ GST_LOG_OBJECT (dec, "Too many media packets missing, storing FEC packet");
+ }
+ g_list_free (packets);
+
+ return ret;
+}
+
+static GstFlowReturn
+check_fec_item (GstRTPST_2022_1_FecDec * dec, Item * item)
+{
+ Rtp2DFecHeader fec;
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+ GstFlowReturn ret;
+
+ gst_rtp_buffer_map (item->buffer, GST_MAP_READ, &rtp);
+
+ parse_header (&rtp, &fec);
+
+ ret = check_fec (dec, &fec);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ return ret;
+}
+
+static GstFlowReturn
+store_media_item (GstRTPST_2022_1_FecDec * dec, GstRTPBuffer * rtp, Item * item)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ Item *fec_item;
+ guint16 seq;
+
+ seq = gst_rtp_buffer_get_seq (rtp);
+
+ g_sequence_insert_sorted (dec->packets, item, (GCompareDataFunc) cmp_items,
+ NULL);
+
+ if ((fec_item = get_row_fec (dec, seq))) {
+ ret = check_fec_item (dec, fec_item);
+ if (ret == GST_FLOW_CUSTOM_SUCCESS)
+ ret = GST_FLOW_OK;
+ }
+
+ if (ret == GST_FLOW_OK && (fec_item = get_column_fec (dec, seq))) {
+ ret = check_fec_item (dec, fec_item);
+ if (ret == GST_FLOW_CUSTOM_SUCCESS)
+ ret = GST_FLOW_OK;
+ }
+
+ return ret;
+}
+
+static GstFlowReturn
+store_media (GstRTPST_2022_1_FecDec * dec, GstRTPBuffer * rtp,
+ GstBuffer * buffer)
+{
+ Item *item;
+ guint16 seq;
+
+ seq = gst_rtp_buffer_get_seq (rtp);
+ item = g_malloc0 (sizeof (Item));
+ item->buffer = gst_buffer_ref (buffer);
+ item->seq = seq;
+
+ return store_media_item (dec, rtp, item);
+}
+
+static GstFlowReturn
+gst_rtpst_2022_1_fecdec_sink_chain_fec (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
+{
+ GstRTPST_2022_1_FecDec *dec = GST_RTPST_2022_1_FECDEC_CAST (parent);
+ Rtp2DFecHeader fec = { 0, };
+ guint payload_len;
+ guint8 *payload;
+ GstFlowReturn ret = GST_FLOW_OK;
+ Item *item;
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+
+ GST_OBJECT_LOCK (dec);
+
+ if (!gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp)) {
+ GST_WARNING_OBJECT (pad, "Chained FEC buffer isn't valid RTP");
+ goto discard;
+ }
+
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+ payload = gst_rtp_buffer_get_payload (&rtp);
+
+ if (!parse_header (&rtp, &fec)) {
+ GST_WARNING_OBJECT (pad, "Failed to parse FEC header (payload len: %d)",
+ payload_len);
+ GST_MEMDUMP_OBJECT (pad, "Invalid payload", payload, payload_len);
+ goto discard;
+ }
+
+ GST_TRACE_OBJECT
+ (pad,
+ "Handling FEC buffer with SNBase / N / D / NA / offset %u / %u / %u / %u / %u",
+ fec.seq, fec.N, fec.D, fec.NA, fec.offset);
+
+ if (fec.D) {
+ if (dec->l == G_MAXUINT) {
+ dec->l = fec.NA;
+ } else if (fec.NA != dec->l) {
+ GST_WARNING_OBJECT (dec, "2D FEC dimensionality cannot change");
+ goto discard;
+ }
+
+ if (fec.offset != 1) {
+ GST_WARNING_OBJECT (pad, "offset must be 1 for row FEC packets");
+ goto discard;
+ }
+ } else {
+ if (dec->d == G_MAXUINT) {
+ dec->d = fec.NA;
+ } else if (fec.NA != dec->d) {
+ GST_WARNING_OBJECT (dec, "2D FEC dimensionality cannot change");
+ goto discard;
+ }
+
+ if (dec->l == G_MAXUINT) {
+ dec->l = fec.offset;
+ } else if (fec.offset != dec->l) {
+ GST_WARNING_OBJECT (dec, "2D FEC dimensionality cannot change");
+ goto discard;
+ }
+ }
+
+ dec->max_fec_arrival_time[fec.D] = GST_BUFFER_DTS_OR_PTS (buffer);
+ trim_fec_items (dec, fec.D);
+
+ ret = check_fec (dec, &fec);
+
+ if (ret == GST_FLOW_CUSTOM_SUCCESS) {
+ item = g_malloc0 (sizeof (Item));
+ item->buffer = buffer;
+ item->seq = fec.seq;
+
+ if (!fec.D) {
+ guint i;
+ guint16 seq;
+
+ for (i = 0; i < dec->d; i++) {
+ seq = fec.seq + i * dec->l;
+ g_hash_table_insert (dec->column_fec_packets, GUINT_TO_POINTER (seq),
+ item);
+ }
+ }
+ g_sequence_insert_sorted (dec->fec_packets[fec.D], item,
+ (GCompareDataFunc) cmp_items, NULL);
+ ret = GST_FLOW_OK;
+ } else {
+ goto discard;
+ }
+
+ gst_rtp_buffer_unmap (&rtp);
+
+done:
+ GST_OBJECT_UNLOCK (dec);
+ return ret;
+
+discard:
+ if (rtp.buffer != NULL)
+ gst_rtp_buffer_unmap (&rtp);
+
+ gst_buffer_unref (buffer);
+
+ goto done;
+}
+
+static GstFlowReturn
+gst_rtpst_2022_1_fecdec_sink_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
+{
+ GstRTPST_2022_1_FecDec *dec = GST_RTPST_2022_1_FECDEC_CAST (parent);
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+
+ if (!gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp)) {
+ GST_WARNING_OBJECT (pad, "Chained buffer isn't valid RTP");
+ goto error;
+ }
+
+ GST_OBJECT_LOCK (dec);
+ dec->max_arrival_time =
+ MAX (dec->max_arrival_time, GST_BUFFER_DTS_OR_PTS (buffer));
+ trim_items (dec);
+ ret = store_media (dec, &rtp, buffer);
+ GST_OBJECT_UNLOCK (dec);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ if (ret == GST_FLOW_OK)
+ ret = gst_pad_push (dec->srcpad, buffer);
+
+done:
+ return ret;
+
+error:
+ gst_buffer_unref (buffer);
+ goto done;
+}
+
+static gboolean
+gst_rtpst_2022_1_fecdec_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ gboolean handled = FALSE;
+ gboolean ret = TRUE;
+
+ if (!handled) {
+ gst_pad_event_default (pad, parent, event);
+ }
+
+ return ret;
+}
+
+/* Takes the object lock */
+static void
+gst_rtpst_2022_1_fecdec_reset (GstRTPST_2022_1_FecDec * dec, gboolean allocate)
+{
+ guint i;
+
+ GST_OBJECT_LOCK (dec);
+
+ if (dec->packets) {
+ g_sequence_free (dec->packets);
+ dec->packets = NULL;
+ }
+
+ if (dec->column_fec_packets) {
+ g_hash_table_unref (dec->column_fec_packets);
+ dec->column_fec_packets = NULL;
+ }
+
+ if (allocate) {
+ dec->packets = g_sequence_new ((GDestroyNotify) free_item);
+ dec->column_fec_packets = g_hash_table_new (g_direct_hash, g_direct_equal);
+ }
+
+ for (i = 0; i < 2; i++) {
+ if (dec->fec_packets[i]) {
+ g_sequence_free (dec->fec_packets[i]);
+ dec->fec_packets[i] = NULL;
+ }
+
+ if (allocate)
+ dec->fec_packets[i] = g_sequence_new ((GDestroyNotify) free_item);
+ }
+
+ dec->d = G_MAXUINT;
+ dec->l = G_MAXUINT;
+
+ GST_OBJECT_UNLOCK (dec);
+}
+
+static GstStateChangeReturn
+gst_rtpst_2022_1_fecdec_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstRTPST_2022_1_FecDec *dec = GST_RTPST_2022_1_FECDEC_CAST (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_rtpst_2022_1_fecdec_reset (dec, TRUE);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtpst_2022_1_fecdec_reset (dec, FALSE);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ return ret;
+}
+
+static void
+gst_rtpst_2022_1_fecdec_finalize (GObject * object)
+{
+ GstRTPST_2022_1_FecDec *dec = GST_RTPST_2022_1_FECDEC_CAST (object);
+
+ gst_rtpst_2022_1_fecdec_reset (dec, FALSE);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_rtpst_2022_1_fecdec_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRTPST_2022_1_FecDec *dec = GST_RTPST_2022_1_FECDEC_CAST (object);
+
+ switch (prop_id) {
+ case PROP_SIZE_TIME:
+ dec->size_time = g_value_get_uint64 (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtpst_2022_1_fecdec_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRTPST_2022_1_FecDec *dec = GST_RTPST_2022_1_FECDEC_CAST (object);
+
+ switch (prop_id) {
+ case PROP_SIZE_TIME:
+ g_value_set_uint64 (value, dec->size_time);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+gst_2d_fec_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstRTPST_2022_1_FecDec *dec = GST_RTPST_2022_1_FECDEC_CAST (parent);
+ gboolean ret;
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP)
+ gst_rtpst_2022_1_fecdec_reset (dec, TRUE);
+
+ ret = gst_pad_event_default (pad, parent, event);
+
+ return ret;
+}
+
+static GstIterator *
+gst_rtpst_2022_1_fecdec_iterate_linked_pads (GstPad * pad, GstObject * parent)
+{
+ GstRTPST_2022_1_FecDec *dec = GST_RTPST_2022_1_FECDEC_CAST (parent);
+ GstPad *otherpad = NULL;
+ GstIterator *it = NULL;
+ GValue val = { 0, };
+
+ if (pad == dec->srcpad)
+ otherpad = dec->sinkpad;
+ else if (pad == dec->sinkpad)
+ otherpad = dec->srcpad;
+
+ if (otherpad) {
+ g_value_init (&val, GST_TYPE_PAD);
+ g_value_set_object (&val, otherpad);
+ it = gst_iterator_new_single (GST_TYPE_PAD, &val);
+ g_value_unset (&val);
+ }
+
+ return it;
+}
+
+static GstPad *
+gst_rtpst_2022_1_fecdec_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps)
+{
+ GstRTPST_2022_1_FecDec *dec = GST_RTPST_2022_1_FECDEC_CAST (element);
+ GstPad *sinkpad = NULL;
+
+ GST_DEBUG_OBJECT (element, "requesting pad");
+
+ if (g_list_length (dec->fec_sinkpads) > 1) {
+ GST_ERROR_OBJECT (dec, "not accepting more than two fec streams");
+ goto done;
+ }
+
+ sinkpad = gst_pad_new_from_template (templ, name);
+ gst_pad_set_chain_function (sinkpad, gst_rtpst_2022_1_fecdec_sink_chain_fec);
+ gst_element_add_pad (GST_ELEMENT (dec), sinkpad);
+ gst_pad_set_iterate_internal_links_function (sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecdec_iterate_linked_pads));
+
+ gst_pad_set_active (sinkpad, TRUE);
+
+ GST_DEBUG_OBJECT (element, "requested pad %s:%s",
+ GST_DEBUG_PAD_NAME (sinkpad));
+
+done:
+ return sinkpad;
+}
+
+static void
+gst_rtpst_2022_1_fecdec_release_pad (GstElement * element, GstPad * pad)
+{
+ GstRTPST_2022_1_FecDec *dec = GST_RTPST_2022_1_FECDEC_CAST (element);
+
+ GST_DEBUG_OBJECT (element, "releasing pad %s:%s", GST_DEBUG_PAD_NAME (pad));
+
+ dec->fec_sinkpads = g_list_remove (dec->fec_sinkpads, pad);
+
+ gst_pad_set_active (pad, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (dec), pad);
+}
+
+static void
+gst_rtpst_2022_1_fecdec_class_init (GstRTPST_2022_1_FecDecClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ gobject_class->set_property =
+ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecdec_set_property);
+ gobject_class->get_property =
+ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecdec_get_property);
+ gobject_class->finalize =
+ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecdec_finalize);
+
+ g_object_class_install_property (gobject_class, PROP_SIZE_TIME,
+ g_param_spec_uint64 ("size-time", "Storage size (in ns)",
+ "The amount of data to store (in ns, 0-disable)", 0,
+ G_MAXUINT64, DEFAULT_SIZE_TIME,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecdec_change_state);
+ gstelement_class->request_new_pad =
+ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecdec_request_new_pad);
+ gstelement_class->release_pad =
+ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecdec_release_pad);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "SMPTE 2022-1 FEC decoder", "SMPTE 2022-1 FEC decoding",
+ "performs FEC as described by SMPTE 2022-1",
+ "Mathieu Duponchelle <mathieu@centricular.com>");
+
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &fec_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class, &src_template);
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtpst_2022_1_fecdec_debug,
+ "rtpst2022-1-fecdec", 0, "SMPTE 2022-1 FEC decoder element");
+}
+
+static void
+gst_rtpst_2022_1_fecdec_init (GstRTPST_2022_1_FecDec * dec)
+{
+ dec->srcpad = gst_pad_new_from_static_template (&src_template, "src");
+ GST_PAD_SET_PROXY_CAPS (dec->srcpad);
+ gst_pad_use_fixed_caps (dec->srcpad);
+ gst_pad_set_event_function (dec->srcpad,
+ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecdec_src_event));
+ gst_pad_set_iterate_internal_links_function (dec->srcpad,
+ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecdec_iterate_linked_pads));
+ gst_element_add_pad (GST_ELEMENT (dec), dec->srcpad);
+
+ dec->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink");
+ GST_PAD_SET_PROXY_CAPS (dec->sinkpad);
+ gst_pad_set_chain_function (dec->sinkpad, gst_rtpst_2022_1_fecdec_sink_chain);
+ gst_pad_set_event_function (dec->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_2d_fec_sink_event));
+ gst_pad_set_iterate_internal_links_function (dec->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecdec_iterate_linked_pads));
+ gst_element_add_pad (GST_ELEMENT (dec), dec->sinkpad);
+
+ dec->d = G_MAXUINT;
+ dec->l = G_MAXUINT;
+}
diff --git a/gst/rtpmanager/gstrtpst2022-1-fecdec.h b/gst/rtpmanager/gstrtpst2022-1-fecdec.h
new file mode 100644
index 0000000000..7fc4631432
--- /dev/null
+++ b/gst/rtpmanager/gstrtpst2022-1-fecdec.h
@@ -0,0 +1,39 @@
+/* GStreamer
+ * Copyright (C) <2020> Mathieu Duponchelle <mathieu@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTPST_2022_1_FECDEC_H__
+#define __GST_RTPST_2022_1_FECDEC_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstRTPST_2022_1_FecDecClass GstRTPST_2022_1_FecDecClass;
+typedef struct _GstRTPST_2022_1_FecDec GstRTPST_2022_1_FecDec;
+
+#define GST_TYPE_RTPST_2022_1_FECDEC (gst_rtpst_2022_1_fecdec_get_type())
+#define GST_RTPST_2022_1_FECDEC_CAST(obj) ((GstRTPST_2022_1_FecDec *)(obj))
+
+GType gst_rtpst_2022_1_fecdec_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (rtpst2022_1_fecdec);
+
+G_END_DECLS
+
+#endif /* __GST_RTPST_2022_1_FECDEC_H__ */
diff --git a/gst/rtpmanager/gstrtpst2022-1-fecenc.c b/gst/rtpmanager/gstrtpst2022-1-fecenc.c
new file mode 100644
index 0000000000..95595079e5
--- /dev/null
+++ b/gst/rtpmanager/gstrtpst2022-1-fecenc.c
@@ -0,0 +1,796 @@
+/* GStreamer
+ * Copyright (C) <2020> Mathieu Duponchelle <mathieu@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-rtpst2022-1-fecenc
+ * @see_also: #element-rtpst2022-1-fecdec
+ *
+ * This element takes as input a media stream and up to two FEC
+ * streams as described in SMPTE 2022-1: Forward Error Correction
+ * for Real-Time Video/Audio Transport Over IP Networks, and makes
+ * use of the FEC packets to recover media packets that may have
+ * gotten lost.
+ *
+ * ## sender / receiver example
+ *
+ * ``` shell
+ * gst-launch-1.0 \
+ * rtpbin name=rtp fec-encoders='fec,0="rtpst2022-1-fecenc\ rows\=5\ columns\=5";' \
+ * uridecodebin uri=file:///path/to/video/file ! x264enc key-int-max=60 tune=zerolatency ! \
+ * queue ! mpegtsmux ! rtpmp2tpay ssrc=0 ! rtp.send_rtp_sink_0 \
+ * rtp.send_rtp_src_0 ! udpsink host=127.0.0.1 port=5000 \
+ * rtp.send_fec_src_0_0 ! udpsink host=127.0.0.1 port=5002 async=false \
+ * rtp.send_fec_src_0_1 ! udpsink host=127.0.0.1 port=5004 async=false
+ * ```
+ *
+ * ``` shell
+ * gst-launch-1.0 \
+ * rtpbin latency=500 fec-decoders='fec,0="rtpst2022-1-fecdec\ size-time\=1000000000";' name=rtp \
+ * udpsrc address=127.0.0.1 port=5002 caps="application/x-rtp, payload=96" ! queue ! rtp.recv_fec_sink_0_0 \
+ * udpsrc address=127.0.0.1 port=5004 caps="application/x-rtp, payload=96" ! queue ! rtp.recv_fec_sink_0_1 \
+ * udpsrc address=127.0.0.1 port=5000 caps="application/x-rtp, media=video, clock-rate=90000, encoding-name=mp2t, payload=33" ! \
+ * queue ! netsim drop-probability=0.05 ! rtp.recv_rtp_sink_0 \
+ * rtp. ! decodebin ! videoconvert ! queue ! autovideosink
+ * ```
+ *
+ * With the above command line, as the media packet size is constant,
+ * the fec overhead can be approximated to the number of fec packets
+ * per 2-d matrix of media packet, here 10 fec packets for each 25
+ * media packets.
+ *
+ * Increasing the number of rows and columns will decrease the overhead,
+ * but obviously increase the likelihood of recovery failure for lost
+ * packets on the receiver side.
+ *
+ * Since: 1.20
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/base/base.h>
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "gstrtpst2022-1-fecenc.h"
+
+#if !GLIB_CHECK_VERSION(2, 60, 0)
+#define g_queue_clear_full queue_clear_full
+static void
+queue_clear_full (GQueue * queue, GDestroyNotify free_func)
+{
+ gpointer data;
+
+ while ((data = g_queue_pop_head (queue)) != NULL)
+ free_func (data);
+}
+#endif
+
+GST_DEBUG_CATEGORY_STATIC (gst_rtpst_2022_1_fecenc_debug);
+#define GST_CAT_DEFAULT gst_rtpst_2022_1_fecenc_debug
+
+enum
+{
+ PROP_0,
+ PROP_COLUMNS,
+ PROP_ROWS,
+ PROP_PT,
+ PROP_ENABLE_COLUMN,
+ PROP_ENABLE_ROW,
+};
+
+#define DEFAULT_ROWS 0
+#define DEFAULT_COLUMNS 0
+#define DEFAULT_PT 96
+#define DEFAULT_ENABLE_COLUMN TRUE
+#define DEFAULT_ENABLE_ROW TRUE
+
+typedef struct
+{
+ guint16 target_media_seq; /* The media seqnum we want to send that packet alongside */
+ guint16 seq_base; /* Only used for logging purposes */
+ GstBuffer *buffer;
+} Item;
+
+typedef struct
+{
+ guint8 *xored_payload;
+ guint32 xored_timestamp;
+ guint8 xored_pt;
+ guint16 xored_payload_len;
+ gboolean xored_marker;
+ gboolean xored_padding;
+ gboolean xored_extension;
+
+ guint16 seq_base;
+
+ guint16 payload_len;
+ guint n_packets;
+} FecPacket;
+
+struct _GstRTPST_2022_1_FecEncClass
+{
+ GstElementClass class;
+};
+
+struct _GstRTPST_2022_1_FecEnc
+{
+ GstElement element;
+
+ GstPad *srcpad;
+ GstPad *sinkpad;
+
+ /* These pads do not participate in the flow return of the element,
+ * which should continue working even if the sending of FEC packets
+ * fails
+ */
+ GstPad *row_fec_srcpad;
+ GstPad *column_fec_srcpad;
+
+ /* The following fields are only accessed on state change or from the
+ * streaming thread, and only settable in state < PAUSED */
+
+ /* N columns */
+ guint l;
+ /* N rows */
+ guint d;
+
+ /* Whether we have pushed initial events on the column FEC source pad */
+ gboolean column_events_pushed;
+
+ /* The current row FEC packet */
+ FecPacket *row;
+ /* Tracks the row seqnum */
+ guint16 row_seq;
+ /* Whether we have pushed initial events on the row FEC source pad */
+ gboolean row_events_pushed;
+
+ /* These two fields are used to enforce input seqnum consecutiveness,
+ * and to determine when column FEC packets should be pushed */
+ gboolean last_media_seqnum_set;
+ guint16 last_media_seqnum;
+
+ /* This field is used to timestamp our FEC packets, we just piggy back */
+ guint32 last_media_timestamp;
+
+ /* The payload type of the FEC packets */
+ gint pt;
+
+ /* The following fields can be changed while PLAYING, and are
+ * protected with the OBJECT_LOCK
+ */
+ /* Tracks the property, can be changed while PLAYING */
+ gboolean enable_row;
+ /* Tracks the property, can be changed while PLAYING */
+ gboolean enable_column;
+
+ /* Array of FecPackets, with size enc->l */
+ GPtrArray *columns;
+ /* Index of the current column in the array above */
+ guint current_column;
+ /* Tracks the column seqnum */
+ guint16 column_seq;
+ /* Column FEC packets must be delayed to make them more resilient
+ * to loss bursts, we store them here */
+ GQueue queued_column_packets;
+};
+
+#define RTP_CAPS "application/x-rtp"
+
+static GstStaticPadTemplate fec_src_template =
+GST_STATIC_PAD_TEMPLATE ("fec_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS (RTP_CAPS));
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (RTP_CAPS));
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (RTP_CAPS));
+
+#define gst_rtpst_2022_1_fecenc_parent_class parent_class
+G_DEFINE_TYPE (GstRTPST_2022_1_FecEnc, gst_rtpst_2022_1_fecenc,
+ GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE (rtpst2022_1_fecenc, "rtpst2022-1-fecenc",
+ GST_RANK_NONE, GST_TYPE_RTPST_2022_1_FECENC);
+
+static void
+free_item (Item * item)
+{
+ if (item->buffer)
+ gst_buffer_unref (item->buffer);
+
+ g_free (item);
+}
+
+static void
+free_fec_packet (FecPacket * packet)
+{
+ if (packet->xored_payload)
+ g_free (packet->xored_payload);
+ g_free (packet);
+}
+
+static void
+_xor_mem (guint8 * restrict dst, const guint8 * restrict src, gsize length)
+{
+ guint i;
+
+ for (i = 0; i < (length / sizeof (guint64)); ++i) {
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+ GST_WRITE_UINT64_LE (dst,
+ GST_READ_UINT64_LE (dst) ^ GST_READ_UINT64_LE (src));
+#else
+ GST_WRITE_UINT64_BE (dst,
+ GST_READ_UINT64_BE (dst) ^ GST_READ_UINT64_BE (src));
+#endif
+ dst += sizeof (guint64);
+ src += sizeof (guint64);
+ }
+ for (i = 0; i < (length % sizeof (guint64)); ++i)
+ dst[i] ^= src[i];
+}
+
+static void
+fec_packet_update (FecPacket * fec, GstRTPBuffer * rtp)
+{
+ if (fec->n_packets == 0) {
+ fec->seq_base = gst_rtp_buffer_get_seq (rtp);
+ fec->payload_len = gst_rtp_buffer_get_payload_len (rtp);
+ fec->xored_payload_len = gst_rtp_buffer_get_payload_len (rtp);
+ fec->xored_pt = gst_rtp_buffer_get_payload_type (rtp);
+ fec->xored_timestamp = gst_rtp_buffer_get_timestamp (rtp);
+ fec->xored_marker = gst_rtp_buffer_get_marker (rtp);
+ fec->xored_padding = gst_rtp_buffer_get_padding (rtp);
+ fec->xored_extension = gst_rtp_buffer_get_extension (rtp);
+ fec->xored_payload = g_malloc (sizeof (guint8) * fec->payload_len);
+ memcpy (fec->xored_payload, gst_rtp_buffer_get_payload (rtp),
+ fec->payload_len);
+ } else {
+ guint plen = gst_rtp_buffer_get_payload_len (rtp);
+
+ if (fec->payload_len < plen) {
+ fec->xored_payload =
+ g_realloc (fec->xored_payload, sizeof (guint8) * plen);
+ memset (fec->xored_payload + fec->payload_len, 0,
+ plen - fec->payload_len);
+ fec->payload_len = plen;
+ }
+
+ fec->xored_payload_len ^= plen;
+ fec->xored_pt ^= gst_rtp_buffer_get_payload_type (rtp);
+ fec->xored_timestamp ^= gst_rtp_buffer_get_timestamp (rtp);
+ fec->xored_marker ^= gst_rtp_buffer_get_marker (rtp);
+ fec->xored_padding ^= gst_rtp_buffer_get_padding (rtp);
+ fec->xored_extension ^= gst_rtp_buffer_get_extension (rtp);
+ _xor_mem (fec->xored_payload, gst_rtp_buffer_get_payload (rtp), plen);
+ }
+
+ fec->n_packets += 1;
+}
+
+static void
+push_initial_events (GstRTPST_2022_1_FecEnc * enc, GstPad * pad,
+ const gchar * id)
+{
+ gchar *stream_id;
+ GstCaps *caps;
+ GstSegment segment;
+
+ stream_id = gst_pad_create_stream_id (pad, GST_ELEMENT (enc), id);
+ gst_pad_push_event (pad, gst_event_new_stream_start (stream_id));
+ g_free (stream_id);
+
+ caps = gst_caps_new_simple ("application/x-rtp",
+ "payload", G_TYPE_UINT, enc->pt, "ssrc", G_TYPE_UINT, 0, NULL);
+ gst_pad_push_event (pad, gst_event_new_caps (caps));
+ gst_caps_unref (caps);
+
+ gst_segment_init (&segment, GST_FORMAT_TIME);
+ gst_pad_push_event (pad, gst_event_new_segment (&segment));
+}
+
+static void
+queue_fec_packet (GstRTPST_2022_1_FecEnc * enc, FecPacket * fec, gboolean row)
+{
+ GstBuffer *buffer = gst_rtp_buffer_new_allocate (fec->payload_len + 16, 0, 0);
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+ GstBitWriter bits;
+ guint8 *data;
+
+ gst_rtp_buffer_map (buffer, GST_MAP_WRITE, &rtp);
+ data = gst_rtp_buffer_get_payload (&rtp);
+ memset (data, 0x00, 16);
+
+ gst_bit_writer_init_with_data (&bits, data, 17, FALSE);
+
+ gst_bit_writer_put_bits_uint16 (&bits, fec->seq_base, 16); /* SNBase low bits */
+ gst_bit_writer_put_bits_uint16 (&bits, fec->xored_payload_len, 16); /* Length Recovery */
+ gst_bit_writer_put_bits_uint8 (&bits, 1, 1); /* E */
+ gst_bit_writer_put_bits_uint8 (&bits, fec->xored_pt, 7); /* PT recovery */
+ gst_bit_writer_put_bits_uint32 (&bits, 0, 24); /* Mask */
+ gst_bit_writer_put_bits_uint32 (&bits, fec->xored_timestamp, 32); /* TS recovery */
+ gst_bit_writer_put_bits_uint8 (&bits, 0, 1); /* N */
+ gst_bit_writer_put_bits_uint8 (&bits, row ? 1 : 0, 1); /* D */
+ gst_bit_writer_put_bits_uint8 (&bits, 0, 3); /* type */
+ gst_bit_writer_put_bits_uint8 (&bits, 0, 3); /* index */
+ gst_bit_writer_put_bits_uint8 (&bits, row ? 1 : enc->l, 8); /* Offset */
+ gst_bit_writer_put_bits_uint8 (&bits, fec->n_packets, 8); /* NA */
+ gst_bit_writer_put_bits_uint8 (&bits, 0, 8); /* SNBase ext bits */
+
+ memcpy (data + 16, fec->xored_payload, fec->payload_len);
+
+ gst_bit_writer_reset (&bits);
+
+ gst_rtp_buffer_set_payload_type (&rtp, enc->pt);
+ gst_rtp_buffer_set_seq (&rtp, row ? enc->row_seq++ : enc->column_seq++);
+ gst_rtp_buffer_set_marker (&rtp, fec->xored_marker);
+ gst_rtp_buffer_set_padding (&rtp, fec->xored_padding);
+ gst_rtp_buffer_set_extension (&rtp, fec->xored_extension);
+
+ /* We're sending it out immediately */
+ if (row)
+ gst_rtp_buffer_set_timestamp (&rtp, enc->last_media_timestamp);
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ /* We can send row FEC packets immediately, column packets need
+ * delaying by L <= delay < L * D
+ */
+ if (row) {
+ GstFlowReturn ret;
+
+ GST_LOG_OBJECT (enc,
+ "Pushing row FEC packet, seq base: %u, media seqnum: %u",
+ fec->seq_base, enc->last_media_seqnum);
+
+ /* Safe to unlock here */
+ GST_OBJECT_UNLOCK (enc);
+ ret = gst_pad_push (enc->row_fec_srcpad, buffer);
+ GST_OBJECT_LOCK (enc);
+
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_FLUSHING)
+ GST_WARNING_OBJECT (enc->row_fec_srcpad,
+ "Failed to push row FEC packet: %s", gst_flow_get_name (ret));
+ } else {
+ Item *item = g_malloc0 (sizeof (Item));
+
+ item->buffer = buffer;
+ item->seq_base = fec->seq_base;
+ /* Let's get cute and linearize */
+ item->target_media_seq =
+ enc->last_media_seqnum + enc->l - enc->current_column +
+ enc->d * enc->current_column;
+
+ g_queue_push_tail (&enc->queued_column_packets, item);
+ }
+}
+
+static GstFlowReturn
+gst_rtpst_2022_1_fecenc_sink_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
+{
+ GstRTPST_2022_1_FecEnc *enc = GST_RTPST_2022_1_FECENC_CAST (parent);
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+
+ if (!gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp)) {
+ GST_ERROR_OBJECT (enc, "Chained buffer isn't valid RTP");
+ goto error;
+ }
+
+ if (gst_rtp_buffer_get_ssrc (&rtp) != 0) {
+ GST_ERROR_OBJECT (enc, "Chained buffer must have SSRC == 0");
+ goto error;
+ }
+
+ if (enc->last_media_seqnum_set
+ && (guint16) (enc->last_media_seqnum + 1) !=
+ gst_rtp_buffer_get_seq (&rtp)) {
+ GST_ERROR_OBJECT (enc, "consecutive sequence numbers are required");
+ goto error;
+ }
+
+ if (!enc->row_events_pushed) {
+ push_initial_events (enc, enc->row_fec_srcpad, "row-fec");
+ enc->row_events_pushed = TRUE;
+ }
+
+ if (!enc->column_events_pushed) {
+ push_initial_events (enc, enc->column_fec_srcpad, "column-fec");
+ enc->column_events_pushed = TRUE;
+ }
+
+ enc->last_media_timestamp = gst_rtp_buffer_get_timestamp (&rtp);
+ enc->last_media_seqnum = gst_rtp_buffer_get_seq (&rtp);
+ enc->last_media_seqnum_set = TRUE;
+
+ GST_OBJECT_LOCK (enc);
+ if (enc->enable_row && enc->l) {
+ g_assert (enc->row->n_packets < enc->l);
+ fec_packet_update (enc->row, &rtp);
+ if (enc->row->n_packets == enc->l) {
+ queue_fec_packet (enc, enc->row, TRUE);
+ g_free (enc->row->xored_payload);
+ memset (enc->row, 0x00, sizeof (FecPacket));
+ }
+ }
+
+ if (enc->enable_column && enc->l && enc->d) {
+ FecPacket *column = g_ptr_array_index (enc->columns, enc->current_column);
+
+ fec_packet_update (column, &rtp);
+ if (column->n_packets == enc->d) {
+ queue_fec_packet (enc, column, FALSE);
+ g_free (column->xored_payload);
+ memset (column, 0x00, sizeof (FecPacket));
+ }
+
+ enc->current_column++;
+ enc->current_column %= enc->l;
+ }
+
+ gst_rtp_buffer_unmap (&rtp);
+
+ if (g_queue_get_length (&enc->queued_column_packets) > 0) {
+ Item *item = g_queue_peek_head (&enc->queued_column_packets);
+
+ if (item->target_media_seq == enc->last_media_seqnum) {
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+
+ g_queue_pop_head (&enc->queued_column_packets);
+ GST_LOG_OBJECT (enc,
+ "Pushing column FEC packet, seq base: %u, media seqnum: %u",
+ item->seq_base, enc->last_media_seqnum);
+ gst_rtp_buffer_map (item->buffer, GST_MAP_WRITE, &rtp);
+ gst_rtp_buffer_set_timestamp (&rtp, enc->last_media_timestamp);
+ gst_rtp_buffer_unmap (&rtp);
+ GST_OBJECT_UNLOCK (enc);
+ ret =
+ gst_pad_push (enc->column_fec_srcpad, gst_buffer_ref (item->buffer));
+ GST_OBJECT_LOCK (enc);
+
+ if (ret != GST_FLOW_OK && ret != GST_FLOW_FLUSHING)
+ GST_WARNING_OBJECT (enc->column_fec_srcpad,
+ "Failed to push column FEC packet: %s", gst_flow_get_name (ret));
+
+ free_item (item);
+ }
+ }
+ GST_OBJECT_UNLOCK (enc);
+
+ ret = gst_pad_push (enc->srcpad, buffer);
+
+done:
+ return ret;
+
+error:
+ if (rtp.buffer)
+ gst_rtp_buffer_unmap (&rtp);
+ gst_buffer_unref (buffer);
+ ret = GST_FLOW_ERROR;
+ goto done;
+}
+
+static GstIterator *
+gst_rtpst_2022_1_fecenc_iterate_linked_pads (GstPad * pad, GstObject * parent)
+{
+ GstRTPST_2022_1_FecEnc *enc = GST_RTPST_2022_1_FECENC_CAST (parent);
+ GstPad *otherpad = NULL;
+ GstIterator *it = NULL;
+ GValue val = { 0, };
+
+ if (pad == enc->srcpad)
+ otherpad = enc->sinkpad;
+ else if (pad == enc->sinkpad)
+ otherpad = enc->srcpad;
+
+ if (otherpad) {
+ g_value_init (&val, GST_TYPE_PAD);
+ g_value_set_object (&val, otherpad);
+ it = gst_iterator_new_single (GST_TYPE_PAD, &val);
+ g_value_unset (&val);
+ }
+
+ return it;
+}
+
+static void
+gst_rtpst_2022_1_fecenc_reset (GstRTPST_2022_1_FecEnc * enc, gboolean allocate)
+{
+ if (enc->row) {
+ free_fec_packet (enc->row);
+ enc->row = NULL;
+ }
+
+ if (enc->columns) {
+ g_ptr_array_unref (enc->columns);
+ enc->columns = NULL;
+ }
+
+ if (enc->row_fec_srcpad) {
+ gst_element_remove_pad (GST_ELEMENT (enc), enc->row_fec_srcpad);
+ enc->row_fec_srcpad = NULL;
+ }
+
+ if (enc->column_fec_srcpad) {
+ gst_element_remove_pad (GST_ELEMENT (enc), enc->column_fec_srcpad);
+ enc->column_fec_srcpad = NULL;
+ }
+
+ g_queue_clear_full (&enc->queued_column_packets, (GDestroyNotify) free_item);
+
+ if (allocate) {
+ guint i;
+
+ enc->row = g_malloc0 (sizeof (FecPacket));
+ enc->columns =
+ g_ptr_array_new_full (enc->l, (GDestroyNotify) free_fec_packet);
+
+ for (i = 0; i < enc->l; i++) {
+ g_ptr_array_add (enc->columns, g_malloc0 (sizeof (FecPacket)));
+ }
+
+ g_queue_init (&enc->queued_column_packets);
+
+ enc->column_fec_srcpad =
+ gst_pad_new_from_static_template (&fec_src_template, "fec_0");
+ gst_pad_set_active (enc->column_fec_srcpad, TRUE);
+ gst_pad_set_iterate_internal_links_function (enc->column_fec_srcpad,
+ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecenc_iterate_linked_pads));
+ gst_element_add_pad (GST_ELEMENT (enc), enc->column_fec_srcpad);
+
+ enc->row_fec_srcpad =
+ gst_pad_new_from_static_template (&fec_src_template, "fec_1");
+ gst_pad_set_active (enc->row_fec_srcpad, TRUE);
+ gst_pad_set_iterate_internal_links_function (enc->row_fec_srcpad,
+ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecenc_iterate_linked_pads));
+ gst_element_add_pad (GST_ELEMENT (enc), enc->row_fec_srcpad);
+
+ gst_element_no_more_pads (GST_ELEMENT (enc));
+ }
+
+ enc->current_column = 0;
+ enc->last_media_seqnum_set = FALSE;
+}
+
+static GstStateChangeReturn
+gst_rtpst_2022_1_fecenc_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstRTPST_2022_1_FecEnc *enc = GST_RTPST_2022_1_FECENC_CAST (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_rtpst_2022_1_fecenc_reset (enc, TRUE);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_rtpst_2022_1_fecenc_reset (enc, FALSE);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ return ret;
+}
+
+static void
+gst_rtpst_2022_1_fecenc_finalize (GObject * object)
+{
+ GstRTPST_2022_1_FecEnc *enc = GST_RTPST_2022_1_FECENC_CAST (object);
+
+ gst_rtpst_2022_1_fecenc_reset (enc, FALSE);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_rtpst_2022_1_fecenc_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRTPST_2022_1_FecEnc *enc = GST_RTPST_2022_1_FECENC_CAST (object);
+
+ if (GST_STATE (enc) > GST_STATE_READY) {
+ GST_ERROR_OBJECT (enc,
+ "rtpst2022-1-fecenc properties can't be changed in PLAYING or PAUSED state");
+ return;
+ }
+
+ switch (prop_id) {
+ case PROP_COLUMNS:
+ enc->l = g_value_get_uint (value);
+ break;
+ case PROP_ROWS:
+ enc->d = g_value_get_uint (value);
+ break;
+ case PROP_PT:
+ enc->pt = g_value_get_int (value);
+ break;
+ case PROP_ENABLE_COLUMN:
+ GST_OBJECT_LOCK (enc);
+ enc->enable_column = g_value_get_boolean (value);
+ if (!enc->enable_column) {
+ guint i;
+
+ if (enc->columns) {
+ for (i = 0; i < enc->l; i++) {
+ FecPacket *column = g_ptr_array_index (enc->columns, i);
+ g_free (column->xored_payload);
+ memset (column, 0x00, sizeof (FecPacket));
+ }
+ }
+ enc->current_column = 0;
+ enc->column_seq = 0;
+ g_queue_clear_full (&enc->queued_column_packets,
+ (GDestroyNotify) free_item);
+ }
+ GST_OBJECT_UNLOCK (enc);
+ break;
+ case PROP_ENABLE_ROW:
+ GST_OBJECT_LOCK (enc);
+ enc->enable_row = g_value_get_boolean (value);
+ GST_OBJECT_UNLOCK (enc);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtpst_2022_1_fecenc_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstRTPST_2022_1_FecEnc *enc = GST_RTPST_2022_1_FECENC_CAST (object);
+
+ switch (prop_id) {
+ case PROP_COLUMNS:
+ g_value_set_uint (value, enc->l);
+ break;
+ case PROP_ROWS:
+ g_value_set_uint (value, enc->d);
+ break;
+ case PROP_PT:
+ g_value_set_int (value, enc->pt);
+ break;
+ case PROP_ENABLE_COLUMN:
+ GST_OBJECT_LOCK (enc);
+ g_value_set_boolean (value, enc->enable_column);
+ GST_OBJECT_UNLOCK (enc);
+ break;
+ case PROP_ENABLE_ROW:
+ GST_OBJECT_LOCK (enc);
+ g_value_set_boolean (value, enc->enable_row);
+ GST_OBJECT_UNLOCK (enc);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+gst_2d_fec_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstRTPST_2022_1_FecEnc *enc = GST_RTPST_2022_1_FECENC_CAST (parent);
+ gboolean ret;
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP)
+ gst_rtpst_2022_1_fecenc_reset (enc, TRUE);
+
+ ret = gst_pad_event_default (pad, parent, event);
+
+ return ret;
+}
+
+static void
+gst_rtpst_2022_1_fecenc_class_init (GstRTPST_2022_1_FecEncClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ gobject_class->set_property =
+ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecenc_set_property);
+ gobject_class->get_property =
+ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecenc_get_property);
+ gobject_class->finalize =
+ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecenc_finalize);
+
+ g_object_class_install_property (gobject_class, PROP_COLUMNS,
+ g_param_spec_uint ("columns", "Columns",
+ "Number of columns to apply row FEC on, 0=disabled", 0,
+ 255, DEFAULT_COLUMNS,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+
+ g_object_class_install_property (gobject_class, PROP_ROWS,
+ g_param_spec_uint ("rows", "Rows",
+ "Number of rows to apply column FEC on, 0=disabled", 0,
+ 255, DEFAULT_ROWS,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+
+ g_object_class_install_property (gobject_class, PROP_PT,
+ g_param_spec_int ("pt", "Payload Type",
+ "The payload type of FEC packets", 96,
+ 255, DEFAULT_PT,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_READY));
+
+ g_object_class_install_property (gobject_class, PROP_ENABLE_COLUMN,
+ g_param_spec_boolean ("enable-column-fec", "Enable Column FEC",
+ "Whether the encoder should compute and send column FEC",
+ DEFAULT_ENABLE_COLUMN,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+
+ g_object_class_install_property (gobject_class, PROP_ENABLE_ROW,
+ g_param_spec_boolean ("enable-row-fec", "Enable Row FEC",
+ "Whether the encoder should compute and send row FEC",
+ DEFAULT_ENABLE_ROW,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING));
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecenc_change_state);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "SMPTE 2022-1 FEC encoder", "SMPTE 2022-1 FEC encoding",
+ "performs FEC as described by SMPTE 2022-1",
+ "Mathieu Duponchelle <mathieu@centricular.com>");
+
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &fec_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class, &src_template);
+
+ GST_DEBUG_CATEGORY_INIT (gst_rtpst_2022_1_fecenc_debug,
+ "rtpst2022-1-fecenc", 0, "SMPTE 2022-1 FEC encoder element");
+}
+
+static void
+gst_rtpst_2022_1_fecenc_init (GstRTPST_2022_1_FecEnc * enc)
+{
+ enc->srcpad = gst_pad_new_from_static_template (&src_template, "src");
+ gst_pad_use_fixed_caps (enc->srcpad);
+ GST_PAD_SET_PROXY_CAPS (enc->srcpad);
+ gst_pad_set_iterate_internal_links_function (enc->srcpad,
+ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecenc_iterate_linked_pads));
+ gst_element_add_pad (GST_ELEMENT (enc), enc->srcpad);
+
+ enc->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink");
+ GST_PAD_SET_PROXY_CAPS (enc->sinkpad);
+ gst_pad_set_chain_function (enc->sinkpad, gst_rtpst_2022_1_fecenc_sink_chain);
+ gst_pad_set_event_function (enc->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_2d_fec_sink_event));
+ gst_pad_set_iterate_internal_links_function (enc->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_rtpst_2022_1_fecenc_iterate_linked_pads));
+ gst_element_add_pad (GST_ELEMENT (enc), enc->sinkpad);
+
+ enc->d = 0;
+ enc->l = 0;
+}
diff --git a/gst/rtpmanager/gstrtpst2022-1-fecenc.h b/gst/rtpmanager/gstrtpst2022-1-fecenc.h
new file mode 100644
index 0000000000..33e5de43a2
--- /dev/null
+++ b/gst/rtpmanager/gstrtpst2022-1-fecenc.h
@@ -0,0 +1,39 @@
+/* GStreamer
+ * Copyright (C) <2020> Mathieu Duponchelle <mathieu@centricular.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_RTPST_2022_1_FECENC_H__
+#define __GST_RTPST_2022_1_FECENC_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstRTPST_2022_1_FecEncClass GstRTPST_2022_1_FecEncClass;
+typedef struct _GstRTPST_2022_1_FecEnc GstRTPST_2022_1_FecEnc;
+
+#define GST_TYPE_RTPST_2022_1_FECENC (gst_rtpst_2022_1_fecenc_get_type())
+#define GST_RTPST_2022_1_FECENC_CAST(obj) ((GstRTPST_2022_1_FecEnc *)(obj))
+
+GType gst_rtpst_2022_1_fecenc_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (rtpst2022_1_fecenc);
+
+G_END_DECLS
+
+#endif /* __GST_RTPST_2022_1_FECENC_H__ */
diff --git a/gst/rtpmanager/meson.build b/gst/rtpmanager/meson.build
new file mode 100644
index 0000000000..6f8aee2871
--- /dev/null
+++ b/gst/rtpmanager/meson.build
@@ -0,0 +1,35 @@
+rtpmanager_sources = [
+ 'gstrtpmanager.c',
+ 'gstrtpbin.c',
+ 'gstrtpdtmfmux.c',
+ 'gstrtpjitterbuffer.c',
+ 'gstrtphdrext-twcc.c',
+ 'gstrtphdrext-rfc6464.c',
+ 'gstrtpmux.c',
+ 'gstrtpptdemux.c',
+ 'gstrtprtxqueue.c',
+ 'gstrtprtxreceive.c',
+ 'gstrtprtxsend.c',
+ 'gstrtpssrcdemux.c',
+ 'rtpjitterbuffer.c',
+ 'rtpsession.c',
+ 'rtpsource.c',
+ 'rtpstats.c',
+ 'rtptimerqueue.c',
+ 'rtptwcc.c',
+ 'gstrtpsession.c',
+ 'gstrtpfunnel.c',
+ 'gstrtpst2022-1-fecdec.c',
+ 'gstrtpst2022-1-fecenc.c'
+]
+
+gstrtpmanager = library('gstrtpmanager',
+ rtpmanager_sources,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc, libsinc],
+ dependencies : [gstbase_dep, gstnet_dep, gstrtp_dep, gstaudio_dep, gio_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstrtpmanager, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstrtpmanager]
diff --git a/gst/rtpmanager/rtpjitterbuffer.c b/gst/rtpmanager/rtpjitterbuffer.c
new file mode 100644
index 0000000000..aef5cbc352
--- /dev/null
+++ b/gst/rtpmanager/rtpjitterbuffer.c
@@ -0,0 +1,1559 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#include <string.h>
+#include <stdlib.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/rtp/gstrtcpbuffer.h>
+
+#include "rtpjitterbuffer.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtp_jitter_buffer_debug);
+#define GST_CAT_DEFAULT rtp_jitter_buffer_debug
+
+#define MAX_WINDOW RTP_JITTER_BUFFER_MAX_WINDOW
+#define MAX_TIME (2 * GST_SECOND)
+
+/* signals and args */
+enum
+{
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0
+};
+
+/* GObject vmethods */
+static void rtp_jitter_buffer_finalize (GObject * object);
+
+GType
+rtp_jitter_buffer_mode_get_type (void)
+{
+ static GType jitter_buffer_mode_type = 0;
+ static const GEnumValue jitter_buffer_modes[] = {
+ {RTP_JITTER_BUFFER_MODE_NONE, "Only use RTP timestamps", "none"},
+ {RTP_JITTER_BUFFER_MODE_SLAVE, "Slave receiver to sender clock", "slave"},
+ {RTP_JITTER_BUFFER_MODE_BUFFER, "Do low/high watermark buffering",
+ "buffer"},
+ {RTP_JITTER_BUFFER_MODE_SYNCED, "Synchronized sender and receiver clocks",
+ "synced"},
+ {0, NULL, NULL},
+ };
+
+ if (!jitter_buffer_mode_type) {
+ jitter_buffer_mode_type =
+ g_enum_register_static ("RTPJitterBufferMode", jitter_buffer_modes);
+ }
+ return jitter_buffer_mode_type;
+}
+
+/* static guint rtp_jitter_buffer_signals[LAST_SIGNAL] = { 0 }; */
+
+G_DEFINE_TYPE (RTPJitterBuffer, rtp_jitter_buffer, G_TYPE_OBJECT);
+
+static void
+rtp_jitter_buffer_class_init (RTPJitterBufferClass * klass)
+{
+ GObjectClass *gobject_class;
+
+ gobject_class = (GObjectClass *) klass;
+
+ gobject_class->finalize = rtp_jitter_buffer_finalize;
+
+ GST_DEBUG_CATEGORY_INIT (rtp_jitter_buffer_debug, "rtpjitterbuffer", 0,
+ "RTP Jitter Buffer");
+}
+
+static void
+rtp_jitter_buffer_init (RTPJitterBuffer * jbuf)
+{
+ g_mutex_init (&jbuf->clock_lock);
+
+ g_queue_init (&jbuf->packets);
+ jbuf->mode = RTP_JITTER_BUFFER_MODE_SLAVE;
+
+ rtp_jitter_buffer_reset_skew (jbuf);
+}
+
+static void
+rtp_jitter_buffer_finalize (GObject * object)
+{
+ RTPJitterBuffer *jbuf;
+
+ jbuf = RTP_JITTER_BUFFER_CAST (object);
+
+ if (jbuf->media_clock_synced_id)
+ g_signal_handler_disconnect (jbuf->media_clock,
+ jbuf->media_clock_synced_id);
+ if (jbuf->media_clock) {
+ /* Make sure to clear any clock master before releasing the clock */
+ gst_clock_set_master (jbuf->media_clock, NULL);
+ gst_object_unref (jbuf->media_clock);
+ }
+
+ if (jbuf->pipeline_clock)
+ gst_object_unref (jbuf->pipeline_clock);
+
+ /* We cannot use g_queue_clear() as it would pass the wrong size to
+ * g_slice_free() which may lead to data corruption in the slice allocator.
+ */
+ rtp_jitter_buffer_flush (jbuf, NULL, NULL);
+
+ g_mutex_clear (&jbuf->clock_lock);
+
+ G_OBJECT_CLASS (rtp_jitter_buffer_parent_class)->finalize (object);
+}
+
+/**
+ * rtp_jitter_buffer_new:
+ *
+ * Create an #RTPJitterBuffer.
+ *
+ * Returns: a new #RTPJitterBuffer. Use g_object_unref() after usage.
+ */
+RTPJitterBuffer *
+rtp_jitter_buffer_new (void)
+{
+ RTPJitterBuffer *jbuf;
+
+ jbuf = g_object_new (RTP_TYPE_JITTER_BUFFER, NULL);
+
+ return jbuf;
+}
+
+/**
+ * rtp_jitter_buffer_get_mode:
+ * @jbuf: an #RTPJitterBuffer
+ *
+ * Get the current jitterbuffer mode.
+ *
+ * Returns: the current jitterbuffer mode.
+ */
+RTPJitterBufferMode
+rtp_jitter_buffer_get_mode (RTPJitterBuffer * jbuf)
+{
+ return jbuf->mode;
+}
+
+/**
+ * rtp_jitter_buffer_set_mode:
+ * @jbuf: an #RTPJitterBuffer
+ * @mode: a #RTPJitterBufferMode
+ *
+ * Set the buffering and clock slaving algorithm used in the @jbuf.
+ */
+void
+rtp_jitter_buffer_set_mode (RTPJitterBuffer * jbuf, RTPJitterBufferMode mode)
+{
+ jbuf->mode = mode;
+}
+
+GstClockTime
+rtp_jitter_buffer_get_delay (RTPJitterBuffer * jbuf)
+{
+ return jbuf->delay;
+}
+
+void
+rtp_jitter_buffer_set_delay (RTPJitterBuffer * jbuf, GstClockTime delay)
+{
+ jbuf->delay = delay;
+ jbuf->low_level = (delay * 15) / 100;
+ /* the high level is at 90% in order to release packets before we fill up the
+ * buffer up to the latency */
+ jbuf->high_level = (delay * 90) / 100;
+
+ GST_DEBUG ("delay %" GST_TIME_FORMAT ", min %" GST_TIME_FORMAT ", max %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (jbuf->delay),
+ GST_TIME_ARGS (jbuf->low_level), GST_TIME_ARGS (jbuf->high_level));
+}
+
+/**
+ * rtp_jitter_buffer_set_clock_rate:
+ * @jbuf: an #RTPJitterBuffer
+ * @clock_rate: the new clock rate
+ *
+ * Set the clock rate in the jitterbuffer.
+ */
+void
+rtp_jitter_buffer_set_clock_rate (RTPJitterBuffer * jbuf, guint32 clock_rate)
+{
+ if (jbuf->clock_rate != clock_rate) {
+ GST_DEBUG ("Clock rate changed from %" G_GUINT32_FORMAT " to %"
+ G_GUINT32_FORMAT, jbuf->clock_rate, clock_rate);
+ jbuf->clock_rate = clock_rate;
+ rtp_jitter_buffer_reset_skew (jbuf);
+ }
+}
+
+/**
+ * rtp_jitter_buffer_get_clock_rate:
+ * @jbuf: an #RTPJitterBuffer
+ *
+ * Get the currently configure clock rate in @jbuf.
+ *
+ * Returns: the current clock-rate
+ */
+guint32
+rtp_jitter_buffer_get_clock_rate (RTPJitterBuffer * jbuf)
+{
+ return jbuf->clock_rate;
+}
+
+static void
+media_clock_synced_cb (GstClock * clock, gboolean synced,
+ RTPJitterBuffer * jbuf)
+{
+ GstClockTime internal, external;
+
+ g_mutex_lock (&jbuf->clock_lock);
+ if (jbuf->pipeline_clock) {
+ internal = gst_clock_get_internal_time (jbuf->media_clock);
+ external = gst_clock_get_time (jbuf->pipeline_clock);
+
+ gst_clock_set_calibration (jbuf->media_clock, internal, external, 1, 1);
+ }
+ g_mutex_unlock (&jbuf->clock_lock);
+}
+
+/**
+ * rtp_jitter_buffer_set_media_clock:
+ * @jbuf: an #RTPJitterBuffer
+ * @clock: (transfer full): media #GstClock
+ * @clock_offset: RTP time at clock epoch or -1
+ *
+ * Sets the media clock for the media and the clock offset
+ *
+ */
+void
+rtp_jitter_buffer_set_media_clock (RTPJitterBuffer * jbuf, GstClock * clock,
+ guint64 clock_offset)
+{
+ g_mutex_lock (&jbuf->clock_lock);
+ if (jbuf->media_clock) {
+ if (jbuf->media_clock_synced_id)
+ g_signal_handler_disconnect (jbuf->media_clock,
+ jbuf->media_clock_synced_id);
+ jbuf->media_clock_synced_id = 0;
+ gst_object_unref (jbuf->media_clock);
+ }
+ jbuf->media_clock = clock;
+ jbuf->media_clock_offset = clock_offset;
+
+ if (jbuf->pipeline_clock && jbuf->media_clock &&
+ jbuf->pipeline_clock != jbuf->media_clock) {
+ jbuf->media_clock_synced_id =
+ g_signal_connect (jbuf->media_clock, "synced",
+ G_CALLBACK (media_clock_synced_cb), jbuf);
+ if (gst_clock_is_synced (jbuf->media_clock)) {
+ GstClockTime internal, external;
+
+ internal = gst_clock_get_internal_time (jbuf->media_clock);
+ external = gst_clock_get_time (jbuf->pipeline_clock);
+
+ gst_clock_set_calibration (jbuf->media_clock, internal, external, 1, 1);
+ }
+
+ gst_clock_set_master (jbuf->media_clock, jbuf->pipeline_clock);
+ }
+ g_mutex_unlock (&jbuf->clock_lock);
+}
+
+/**
+ * rtp_jitter_buffer_set_pipeline_clock:
+ * @jbuf: an #RTPJitterBuffer
+ * @clock: pipeline #GstClock
+ *
+ * Sets the pipeline clock
+ *
+ */
+void
+rtp_jitter_buffer_set_pipeline_clock (RTPJitterBuffer * jbuf, GstClock * clock)
+{
+ g_mutex_lock (&jbuf->clock_lock);
+ if (jbuf->pipeline_clock)
+ gst_object_unref (jbuf->pipeline_clock);
+ jbuf->pipeline_clock = clock ? gst_object_ref (clock) : NULL;
+
+ if (jbuf->pipeline_clock && jbuf->media_clock &&
+ jbuf->pipeline_clock != jbuf->media_clock) {
+ if (gst_clock_is_synced (jbuf->media_clock)) {
+ GstClockTime internal, external;
+
+ internal = gst_clock_get_internal_time (jbuf->media_clock);
+ external = gst_clock_get_time (jbuf->pipeline_clock);
+
+ gst_clock_set_calibration (jbuf->media_clock, internal, external, 1, 1);
+ }
+
+ gst_clock_set_master (jbuf->media_clock, jbuf->pipeline_clock);
+ }
+ g_mutex_unlock (&jbuf->clock_lock);
+}
+
+gboolean
+rtp_jitter_buffer_get_rfc7273_sync (RTPJitterBuffer * jbuf)
+{
+ return jbuf->rfc7273_sync;
+}
+
+void
+rtp_jitter_buffer_set_rfc7273_sync (RTPJitterBuffer * jbuf,
+ gboolean rfc7273_sync)
+{
+ jbuf->rfc7273_sync = rfc7273_sync;
+}
+
+/**
+ * rtp_jitter_buffer_reset_skew:
+ * @jbuf: an #RTPJitterBuffer
+ *
+ * Reset the skew calculations in @jbuf.
+ */
+void
+rtp_jitter_buffer_reset_skew (RTPJitterBuffer * jbuf)
+{
+ jbuf->base_time = -1;
+ jbuf->base_rtptime = -1;
+ jbuf->base_extrtp = -1;
+ jbuf->media_clock_base_time = -1;
+ jbuf->ext_rtptime = -1;
+ jbuf->last_rtptime = -1;
+ jbuf->window_pos = 0;
+ jbuf->window_filling = TRUE;
+ jbuf->window_min = 0;
+ jbuf->skew = 0;
+ jbuf->prev_send_diff = -1;
+ jbuf->prev_out_time = -1;
+ jbuf->need_resync = TRUE;
+
+ GST_DEBUG ("reset skew correction");
+}
+
+/**
+ * rtp_jitter_buffer_disable_buffering:
+ * @jbuf: an #RTPJitterBuffer
+ * @disabled: the new state
+ *
+ * Enable or disable buffering on @jbuf.
+ */
+void
+rtp_jitter_buffer_disable_buffering (RTPJitterBuffer * jbuf, gboolean disabled)
+{
+ jbuf->buffering_disabled = disabled;
+}
+
+static void
+rtp_jitter_buffer_resync (RTPJitterBuffer * jbuf, GstClockTime time,
+ GstClockTime gstrtptime, guint64 ext_rtptime, gboolean reset_skew)
+{
+ jbuf->base_time = time;
+ jbuf->media_clock_base_time = -1;
+ jbuf->base_rtptime = gstrtptime;
+ jbuf->base_extrtp = ext_rtptime;
+ jbuf->prev_out_time = -1;
+ jbuf->prev_send_diff = -1;
+ if (reset_skew) {
+ jbuf->window_filling = TRUE;
+ jbuf->window_pos = 0;
+ jbuf->window_min = 0;
+ jbuf->window_size = 0;
+ jbuf->skew = 0;
+ }
+ jbuf->need_resync = FALSE;
+}
+
+static guint64
+get_buffer_level (RTPJitterBuffer * jbuf)
+{
+ RTPJitterBufferItem *high_buf = NULL, *low_buf = NULL;
+ guint64 level;
+
+ /* first buffer with timestamp */
+ high_buf = (RTPJitterBufferItem *) g_queue_peek_tail_link (&jbuf->packets);
+ while (high_buf) {
+ if (high_buf->dts != -1 || high_buf->pts != -1)
+ break;
+
+ high_buf = (RTPJitterBufferItem *) g_list_previous (high_buf);
+ }
+
+ low_buf = (RTPJitterBufferItem *) g_queue_peek_head_link (&jbuf->packets);
+ while (low_buf) {
+ if (low_buf->dts != -1 || low_buf->pts != -1)
+ break;
+
+ low_buf = (RTPJitterBufferItem *) g_list_next (low_buf);
+ }
+
+ if (!high_buf || !low_buf || high_buf == low_buf) {
+ level = 0;
+ } else {
+ guint64 high_ts, low_ts;
+
+ high_ts = high_buf->dts != -1 ? high_buf->dts : high_buf->pts;
+ low_ts = low_buf->dts != -1 ? low_buf->dts : low_buf->pts;
+
+ if (high_ts > low_ts)
+ level = high_ts - low_ts;
+ else
+ level = 0;
+
+ GST_LOG_OBJECT (jbuf,
+ "low %" GST_TIME_FORMAT " high %" GST_TIME_FORMAT " level %"
+ G_GUINT64_FORMAT, GST_TIME_ARGS (low_ts), GST_TIME_ARGS (high_ts),
+ level);
+ }
+ return level;
+}
+
+static void
+update_buffer_level (RTPJitterBuffer * jbuf, gint * percent)
+{
+ gboolean post = FALSE;
+ guint64 level;
+
+ level = get_buffer_level (jbuf);
+ GST_DEBUG ("buffer level %" GST_TIME_FORMAT, GST_TIME_ARGS (level));
+
+ if (jbuf->buffering_disabled) {
+ GST_DEBUG ("buffering is disabled");
+ level = jbuf->high_level;
+ }
+
+ if (jbuf->buffering) {
+ post = TRUE;
+ if (level >= jbuf->high_level) {
+ GST_DEBUG ("buffering finished");
+ jbuf->buffering = FALSE;
+ }
+ } else {
+ if (level < jbuf->low_level) {
+ GST_DEBUG ("buffering started");
+ jbuf->buffering = TRUE;
+ post = TRUE;
+ }
+ }
+ if (post) {
+ gint perc;
+
+ if (jbuf->buffering && (jbuf->high_level != 0)) {
+ perc = (level * 100 / jbuf->high_level);
+ perc = MIN (perc, 100);
+ } else {
+ perc = 100;
+ }
+
+ if (percent)
+ *percent = perc;
+
+ GST_DEBUG ("buffering %d", perc);
+ }
+}
+
+/* For the clock skew we use a windowed low point averaging algorithm as can be
+ * found in Fober, Orlarey and Letz, 2005, "Real Time Clock Skew Estimation
+ * over Network Delays":
+ * http://www.grame.fr/Ressources/pub/TR-050601.pdf
+ * http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.102.1546
+ *
+ * The idea is that the jitter is composed of:
+ *
+ * J = N + n
+ *
+ * N : a constant network delay.
+ * n : random added noise. The noise is concentrated around 0
+ *
+ * In the receiver we can track the elapsed time at the sender with:
+ *
+ * send_diff(i) = (Tsi - Ts0);
+ *
+ * Tsi : The time at the sender at packet i
+ * Ts0 : The time at the sender at the first packet
+ *
+ * This is the difference between the RTP timestamp in the first received packet
+ * and the current packet.
+ *
+ * At the receiver we have to deal with the jitter introduced by the network.
+ *
+ * recv_diff(i) = (Tri - Tr0)
+ *
+ * Tri : The time at the receiver at packet i
+ * Tr0 : The time at the receiver at the first packet
+ *
+ * Both of these values contain a jitter Ji, a jitter for packet i, so we can
+ * write:
+ *
+ * recv_diff(i) = (Cri + D + ni) - (Cr0 + D + n0))
+ *
+ * Cri : The time of the clock at the receiver for packet i
+ * D + ni : The jitter when receiving packet i
+ *
+ * We see that the network delay is irrelevant here as we can eliminate D:
+ *
+ * recv_diff(i) = (Cri + ni) - (Cr0 + n0))
+ *
+ * The drift is now expressed as:
+ *
+ * Drift(i) = recv_diff(i) - send_diff(i);
+ *
+ * We now keep the W latest values of Drift and find the minimum (this is the
+ * one with the lowest network jitter and thus the one which is least affected
+ * by it). We average this lowest value to smooth out the resulting network skew.
+ *
+ * Both the window and the weighting used for averaging influence the accuracy
+ * of the drift estimation. Finding the correct parameters turns out to be a
+ * compromise between accuracy and inertia.
+ *
+ * We use a 2 second window or up to 512 data points, which is statistically big
+ * enough to catch spikes (FIXME, detect spikes).
+ * We also use a rather large weighting factor (125) to smoothly adapt. During
+ * startup, when filling the window, we use a parabolic weighting factor, the
+ * more the window is filled, the faster we move to the detected possible skew.
+ *
+ * Returns: @time adjusted with the clock skew.
+ */
+static GstClockTime
+calculate_skew (RTPJitterBuffer * jbuf, guint64 ext_rtptime,
+ GstClockTime gstrtptime, GstClockTime time, gint gap, gboolean is_rtx)
+{
+ guint64 send_diff, recv_diff;
+ gint64 delta;
+ gint64 old;
+ gint pos, i;
+ GstClockTime out_time;
+ guint64 slope;
+
+ /* elapsed time at sender */
+ send_diff = gstrtptime - jbuf->base_rtptime;
+
+ /* we don't have an arrival timestamp so we can't do skew detection. we
+ * should still apply a timestamp based on RTP timestamp and base_time */
+ if (time == -1 || jbuf->base_time == -1 || is_rtx)
+ goto no_skew;
+
+ /* elapsed time at receiver, includes the jitter */
+ recv_diff = time - jbuf->base_time;
+
+ /* measure the diff */
+ delta = ((gint64) recv_diff) - ((gint64) send_diff);
+
+ /* measure the slope, this gives a rought estimate between the sender speed
+ * and the receiver speed. This should be approximately 8, higher values
+ * indicate a burst (especially when the connection starts) */
+ if (recv_diff > 0)
+ slope = (send_diff * 8) / recv_diff;
+ else
+ slope = 8;
+
+ GST_DEBUG ("time %" GST_TIME_FORMAT ", base %" GST_TIME_FORMAT ", recv_diff %"
+ GST_TIME_FORMAT ", slope %" G_GUINT64_FORMAT, GST_TIME_ARGS (time),
+ GST_TIME_ARGS (jbuf->base_time), GST_TIME_ARGS (recv_diff), slope);
+
+ /* if the difference between the sender timeline and the receiver timeline
+ * changed too quickly we have to resync because the server likely restarted
+ * its timestamps. */
+ if (ABS (delta - jbuf->skew) > GST_SECOND) {
+ GST_WARNING ("delta - skew: %" GST_TIME_FORMAT " too big, reset skew",
+ GST_TIME_ARGS (ABS (delta - jbuf->skew)));
+ rtp_jitter_buffer_resync (jbuf, time, gstrtptime, ext_rtptime, TRUE);
+ send_diff = 0;
+ delta = 0;
+ gap = 0;
+ }
+
+ /* only do skew calculations if we didn't have a gap. if too much time
+ * has elapsed despite there being a gap, we resynced already. */
+ if (G_UNLIKELY (gap != 0))
+ goto no_skew;
+
+ pos = jbuf->window_pos;
+
+ if (G_UNLIKELY (jbuf->window_filling)) {
+ /* we are filling the window */
+ GST_DEBUG ("filling %d, delta %" G_GINT64_FORMAT, pos, delta);
+ jbuf->window[pos++] = delta;
+ /* calc the min delta we observed */
+ if (G_UNLIKELY (pos == 1 || delta < jbuf->window_min))
+ jbuf->window_min = delta;
+
+ if (G_UNLIKELY (send_diff >= MAX_TIME || pos >= MAX_WINDOW)) {
+ jbuf->window_size = pos;
+
+ /* window filled */
+ GST_DEBUG ("min %" G_GINT64_FORMAT, jbuf->window_min);
+
+ /* the skew is now the min */
+ jbuf->skew = jbuf->window_min;
+ jbuf->window_filling = FALSE;
+ } else {
+ gint perc_time, perc_window, perc;
+
+ /* figure out how much we filled the window, this depends on the amount of
+ * time we have or the max number of points we keep. */
+ perc_time = send_diff * 100 / MAX_TIME;
+ perc_window = pos * 100 / MAX_WINDOW;
+ perc = MAX (perc_time, perc_window);
+
+ /* make a parabolic function, the closer we get to the MAX, the more value
+ * we give to the scaling factor of the new value */
+ perc = perc * perc;
+
+ /* quickly go to the min value when we are filling up, slowly when we are
+ * just starting because we're not sure it's a good value yet. */
+ jbuf->skew =
+ (perc * jbuf->window_min + ((10000 - perc) * jbuf->skew)) / 10000;
+ jbuf->window_size = pos + 1;
+ }
+ } else {
+ /* pick old value and store new value. We keep the previous value in order
+ * to quickly check if the min of the window changed */
+ old = jbuf->window[pos];
+ jbuf->window[pos++] = delta;
+
+ if (G_UNLIKELY (delta <= jbuf->window_min)) {
+ /* if the new value we inserted is smaller or equal to the current min,
+ * it becomes the new min */
+ jbuf->window_min = delta;
+ } else if (G_UNLIKELY (old == jbuf->window_min)) {
+ gint64 min = G_MAXINT64;
+
+ /* if we removed the old min, we have to find a new min */
+ for (i = 0; i < jbuf->window_size; i++) {
+ /* we found another value equal to the old min, we can stop searching now */
+ if (jbuf->window[i] == old) {
+ min = old;
+ break;
+ }
+ if (jbuf->window[i] < min)
+ min = jbuf->window[i];
+ }
+ jbuf->window_min = min;
+ }
+ /* average the min values */
+ jbuf->skew = (jbuf->window_min + (124 * jbuf->skew)) / 125;
+ GST_DEBUG ("delta %" G_GINT64_FORMAT ", new min: %" G_GINT64_FORMAT,
+ delta, jbuf->window_min);
+ }
+ /* wrap around in the window */
+ if (G_UNLIKELY (pos >= jbuf->window_size))
+ pos = 0;
+ jbuf->window_pos = pos;
+
+no_skew:
+ /* the output time is defined as the base timestamp plus the RTP time
+ * adjusted for the clock skew .*/
+ if (jbuf->base_time != -1) {
+ out_time = jbuf->base_time + send_diff;
+ /* skew can be negative and we don't want to make invalid timestamps */
+ if (jbuf->skew < 0 && out_time < -jbuf->skew) {
+ out_time = 0;
+ } else {
+ out_time += jbuf->skew;
+ }
+ } else
+ out_time = -1;
+
+ GST_DEBUG ("skew %" G_GINT64_FORMAT ", out %" GST_TIME_FORMAT,
+ jbuf->skew, GST_TIME_ARGS (out_time));
+
+ return out_time;
+}
+
+static void
+queue_do_insert (RTPJitterBuffer * jbuf, GList * list, GList * item)
+{
+ GQueue *queue = &jbuf->packets;
+
+ /* It's more likely that the packet was inserted at the tail of the queue */
+ if (G_LIKELY (list)) {
+ item->prev = list;
+ item->next = list->next;
+ list->next = item;
+ } else {
+ item->prev = NULL;
+ item->next = queue->head;
+ queue->head = item;
+ }
+ if (item->next)
+ item->next->prev = item;
+ else
+ queue->tail = item;
+ queue->length++;
+}
+
+GstClockTime
+rtp_jitter_buffer_calculate_pts (RTPJitterBuffer * jbuf, GstClockTime dts,
+ gboolean estimated_dts, guint32 rtptime, GstClockTime base_time,
+ gint gap, gboolean is_rtx)
+{
+ guint64 ext_rtptime;
+ GstClockTime gstrtptime, pts;
+ GstClock *media_clock, *pipeline_clock;
+ guint64 media_clock_offset;
+ gboolean rfc7273_mode;
+
+ /* rtp time jumps are checked for during skew calculation, but bypassed
+ * in other mode, so mind those here and reset jb if needed.
+ * Only reset if valid input time, which is likely for UDP input
+ * where we expect this might happen due to async thread effects
+ * (in seek and state change cycles), but not so much for TCP input */
+ if (GST_CLOCK_TIME_IS_VALID (dts) && !estimated_dts &&
+ jbuf->mode != RTP_JITTER_BUFFER_MODE_SLAVE &&
+ jbuf->base_time != -1 && jbuf->last_rtptime != -1) {
+ GstClockTime ext_rtptime = jbuf->ext_rtptime;
+
+ ext_rtptime = gst_rtp_buffer_ext_timestamp (&ext_rtptime, rtptime);
+ if (ext_rtptime > jbuf->last_rtptime + 3 * jbuf->clock_rate ||
+ ext_rtptime + 3 * jbuf->clock_rate < jbuf->last_rtptime) {
+ if (!is_rtx) {
+ /* reset even if we don't have valid incoming time;
+ * still better than producing possibly very bogus output timestamp */
+ GST_WARNING ("rtp delta too big, reset skew");
+ rtp_jitter_buffer_reset_skew (jbuf);
+ } else {
+ GST_WARNING ("rtp delta too big: ignore rtx packet");
+ media_clock = NULL;
+ pipeline_clock = NULL;
+ pts = GST_CLOCK_TIME_NONE;
+ goto done;
+ }
+ }
+ }
+
+ /* Return the last time if we got the same RTP timestamp again */
+ ext_rtptime = gst_rtp_buffer_ext_timestamp (&jbuf->ext_rtptime, rtptime);
+ if (jbuf->last_rtptime != -1 && ext_rtptime == jbuf->last_rtptime) {
+ return jbuf->prev_out_time;
+ }
+
+ /* keep track of the last extended rtptime */
+ jbuf->last_rtptime = ext_rtptime;
+
+ g_mutex_lock (&jbuf->clock_lock);
+ media_clock = jbuf->media_clock ? gst_object_ref (jbuf->media_clock) : NULL;
+ pipeline_clock =
+ jbuf->pipeline_clock ? gst_object_ref (jbuf->pipeline_clock) : NULL;
+ media_clock_offset = jbuf->media_clock_offset;
+ g_mutex_unlock (&jbuf->clock_lock);
+
+ gstrtptime =
+ gst_util_uint64_scale_int (ext_rtptime, GST_SECOND, jbuf->clock_rate);
+
+ if (G_LIKELY (jbuf->base_rtptime != -1)) {
+ /* check elapsed time in RTP units */
+ if (gstrtptime < jbuf->base_rtptime) {
+ if (!is_rtx) {
+ /* elapsed time at sender, timestamps can go backwards and thus be
+ * smaller than our base time, schedule to take a new base time in
+ * that case. */
+ GST_WARNING ("backward timestamps at server, schedule resync");
+ jbuf->need_resync = TRUE;
+ } else {
+ GST_WARNING ("backward timestamps: ignore rtx packet");
+ pts = GST_CLOCK_TIME_NONE;
+ goto done;
+ }
+ }
+ }
+
+ switch (jbuf->mode) {
+ case RTP_JITTER_BUFFER_MODE_NONE:
+ case RTP_JITTER_BUFFER_MODE_BUFFER:
+ /* send 0 as the first timestamp and -1 for the other ones. This will
+ * interpolate them from the RTP timestamps with a 0 origin. In buffering
+ * mode we will adjust the outgoing timestamps according to the amount of
+ * time we spent buffering. */
+ if (jbuf->base_time == -1)
+ dts = 0;
+ else
+ dts = -1;
+ break;
+ case RTP_JITTER_BUFFER_MODE_SYNCED:
+ /* synchronized clocks, take first timestamp as base, use RTP timestamps
+ * to interpolate */
+ if (jbuf->base_time != -1 && !jbuf->need_resync)
+ dts = -1;
+ break;
+ case RTP_JITTER_BUFFER_MODE_SLAVE:
+ default:
+ break;
+ }
+
+ /* need resync, lock on to time and gstrtptime if we can, otherwise we
+ * do with the previous values */
+ if (G_UNLIKELY (jbuf->need_resync && dts != -1)) {
+ if (is_rtx) {
+ GST_DEBUG ("not resyncing on rtx packet, discard");
+ pts = GST_CLOCK_TIME_NONE;
+ goto done;
+ }
+ GST_INFO ("resync to time %" GST_TIME_FORMAT ", rtptime %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (dts), GST_TIME_ARGS (gstrtptime));
+ rtp_jitter_buffer_resync (jbuf, dts, gstrtptime, ext_rtptime, FALSE);
+ }
+
+ GST_DEBUG ("extrtp %" G_GUINT64_FORMAT ", gstrtp %" GST_TIME_FORMAT ", base %"
+ GST_TIME_FORMAT ", send_diff %" GST_TIME_FORMAT, ext_rtptime,
+ GST_TIME_ARGS (gstrtptime), GST_TIME_ARGS (jbuf->base_rtptime),
+ GST_TIME_ARGS (gstrtptime - jbuf->base_rtptime));
+
+ rfc7273_mode = media_clock && pipeline_clock
+ && gst_clock_is_synced (media_clock);
+
+ if (rfc7273_mode && jbuf->mode == RTP_JITTER_BUFFER_MODE_SLAVE
+ && (media_clock_offset == -1 || !jbuf->rfc7273_sync)) {
+ GstClockTime internal, external;
+ GstClockTime rate_num, rate_denom;
+ GstClockTime nsrtptimediff, rtpntptime, rtpsystime;
+
+ gst_clock_get_calibration (media_clock, &internal, &external, &rate_num,
+ &rate_denom);
+
+ /* Slave to the RFC7273 media clock instead of trying to estimate it
+ * based on receive times and RTP timestamps */
+
+ if (jbuf->media_clock_base_time == -1) {
+ if (jbuf->base_time != -1) {
+ jbuf->media_clock_base_time =
+ gst_clock_unadjust_with_calibration (media_clock,
+ jbuf->base_time + base_time, internal, external, rate_num,
+ rate_denom);
+ } else {
+ if (dts != -1)
+ jbuf->media_clock_base_time =
+ gst_clock_unadjust_with_calibration (media_clock, dts + base_time,
+ internal, external, rate_num, rate_denom);
+ else
+ jbuf->media_clock_base_time =
+ gst_clock_get_internal_time (media_clock);
+ jbuf->base_rtptime = gstrtptime;
+ }
+ }
+
+ if (gstrtptime > jbuf->base_rtptime)
+ nsrtptimediff = gstrtptime - jbuf->base_rtptime;
+ else
+ nsrtptimediff = 0;
+
+ rtpntptime = nsrtptimediff + jbuf->media_clock_base_time;
+
+ rtpsystime =
+ gst_clock_adjust_with_calibration (media_clock, rtpntptime, internal,
+ external, rate_num, rate_denom);
+
+ if (rtpsystime > base_time)
+ pts = rtpsystime - base_time;
+ else
+ pts = 0;
+
+ GST_DEBUG ("RFC7273 clock time %" GST_TIME_FORMAT ", out %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (rtpsystime), GST_TIME_ARGS (pts));
+ } else if (rfc7273_mode && (jbuf->mode == RTP_JITTER_BUFFER_MODE_SLAVE
+ || jbuf->mode == RTP_JITTER_BUFFER_MODE_SYNCED)
+ && media_clock_offset != -1 && jbuf->rfc7273_sync) {
+ GstClockTime ntptime, rtptime_tmp;
+ GstClockTime ntprtptime, rtpsystime;
+ GstClockTime internal, external;
+ GstClockTime rate_num, rate_denom;
+
+ /* Don't do any of the dts related adjustments further down */
+ dts = -1;
+
+ /* Calculate the actual clock time on the sender side based on the
+ * RFC7273 clock and convert it to our pipeline clock
+ */
+
+ gst_clock_get_calibration (media_clock, &internal, &external, &rate_num,
+ &rate_denom);
+
+ ntptime = gst_clock_get_internal_time (media_clock);
+
+ ntprtptime = gst_util_uint64_scale (ntptime, jbuf->clock_rate, GST_SECOND);
+ ntprtptime += media_clock_offset;
+ ntprtptime &= 0xffffffff;
+
+ rtptime_tmp = rtptime;
+ /* Check for wraparounds, we assume that the diff between current RTP
+ * timestamp and current media clock time can't be bigger than
+ * 2**31 clock units */
+ if (ntprtptime > rtptime_tmp && ntprtptime - rtptime_tmp >= 0x80000000)
+ rtptime_tmp += G_GUINT64_CONSTANT (0x100000000);
+ else if (rtptime_tmp > ntprtptime && rtptime_tmp - ntprtptime >= 0x80000000)
+ ntprtptime += G_GUINT64_CONSTANT (0x100000000);
+
+ if (ntprtptime > rtptime_tmp)
+ ntptime -=
+ gst_util_uint64_scale (ntprtptime - rtptime_tmp, GST_SECOND,
+ jbuf->clock_rate);
+ else
+ ntptime +=
+ gst_util_uint64_scale (rtptime_tmp - ntprtptime, GST_SECOND,
+ jbuf->clock_rate);
+
+ rtpsystime =
+ gst_clock_adjust_with_calibration (media_clock, ntptime, internal,
+ external, rate_num, rate_denom);
+ /* All this assumes that the pipeline has enough additional
+ * latency to cover for the network delay */
+ if (rtpsystime > base_time)
+ pts = rtpsystime - base_time;
+ else
+ pts = 0;
+
+ GST_DEBUG ("RFC7273 clock time %" GST_TIME_FORMAT ", ntptime %"
+ GST_TIME_FORMAT ", ntprtptime %" G_GUINT64_FORMAT ", rtptime %"
+ G_GUINT32_FORMAT ", base_time %" GST_TIME_FORMAT ", internal %"
+ GST_TIME_FORMAT ", external %" GST_TIME_FORMAT ", out %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (rtpsystime), GST_TIME_ARGS (ntptime),
+ ntprtptime, rtptime, GST_TIME_ARGS (base_time),
+ GST_TIME_ARGS (internal), GST_TIME_ARGS (external),
+ GST_TIME_ARGS (pts));
+ } else {
+ /* If we used the RFC7273 clock before and not anymore,
+ * we need to resync it later again */
+ jbuf->media_clock_base_time = -1;
+
+ /* do skew calculation by measuring the difference between rtptime and the
+ * receive dts, this function will return the skew corrected rtptime. */
+ pts = calculate_skew (jbuf, ext_rtptime, gstrtptime, dts, gap, is_rtx);
+ }
+
+ /* check if timestamps are not going backwards, we can only check this if we
+ * have a previous out time and a previous send_diff */
+ if (G_LIKELY (pts != -1 && jbuf->prev_out_time != -1
+ && jbuf->prev_send_diff != -1)) {
+ /* now check for backwards timestamps */
+ if (G_UNLIKELY (
+ /* if the server timestamps went up and the out_time backwards */
+ (gstrtptime - jbuf->base_rtptime > jbuf->prev_send_diff
+ && pts < jbuf->prev_out_time) ||
+ /* if the server timestamps went backwards and the out_time forwards */
+ (gstrtptime - jbuf->base_rtptime < jbuf->prev_send_diff
+ && pts > jbuf->prev_out_time) ||
+ /* if the server timestamps did not change */
+ gstrtptime - jbuf->base_rtptime == jbuf->prev_send_diff)) {
+ GST_DEBUG ("backwards timestamps, using previous time");
+ pts = jbuf->prev_out_time;
+ }
+ }
+
+ if (gap == 0 && dts != -1 && pts + jbuf->delay < dts) {
+ /* if we are going to produce a timestamp that is later than the input
+ * timestamp, we need to reset the jitterbuffer. Likely the server paused
+ * temporarily */
+ GST_DEBUG ("out %" GST_TIME_FORMAT " + %" G_GUINT64_FORMAT " < time %"
+ GST_TIME_FORMAT ", reset jitterbuffer and discard", GST_TIME_ARGS (pts),
+ jbuf->delay, GST_TIME_ARGS (dts));
+ rtp_jitter_buffer_reset_skew (jbuf);
+ rtp_jitter_buffer_resync (jbuf, dts, gstrtptime, ext_rtptime, TRUE);
+ pts = dts;
+ }
+
+ jbuf->prev_out_time = pts;
+ jbuf->prev_send_diff = gstrtptime - jbuf->base_rtptime;
+
+done:
+ if (media_clock)
+ gst_object_unref (media_clock);
+ if (pipeline_clock)
+ gst_object_unref (pipeline_clock);
+
+ return pts;
+}
+
+
+/**
+ * rtp_jitter_buffer_insert:
+ * @jbuf: an #RTPJitterBuffer
+ * @item: an #RTPJitterBufferItem to insert
+ * @head: TRUE when the head element changed.
+ * @percent: the buffering percent after insertion
+ *
+ * Inserts @item into the packet queue of @jbuf. The sequence number of the
+ * packet will be used to sort the packets. This function takes ownerhip of
+ * @buf when the function returns %TRUE.
+ *
+ * When @head is %TRUE, the new packet was added at the head of the queue and
+ * will be available with the next call to rtp_jitter_buffer_pop() and
+ * rtp_jitter_buffer_peek().
+ *
+ * Returns: %FALSE if a packet with the same number already existed.
+ */
+static gboolean
+rtp_jitter_buffer_insert (RTPJitterBuffer * jbuf, RTPJitterBufferItem * item,
+ gboolean * head, gint * percent)
+{
+ GList *list, *event = NULL;
+ guint16 seqnum;
+
+ g_return_val_if_fail (jbuf != NULL, FALSE);
+ g_return_val_if_fail (item != NULL, FALSE);
+
+ list = jbuf->packets.tail;
+
+ /* no seqnum, simply append then */
+ if (item->seqnum == -1)
+ goto append;
+
+ seqnum = item->seqnum;
+
+ /* loop the list to skip strictly larger seqnum buffers */
+ for (; list; list = g_list_previous (list)) {
+ guint16 qseq;
+ gint gap;
+ RTPJitterBufferItem *qitem = (RTPJitterBufferItem *) list;
+
+ if (qitem->seqnum == -1) {
+ /* keep a pointer to the first consecutive event if not already
+ * set. we will insert the packet after the event if we can't find
+ * a packet with lower sequence number before the event. */
+ if (event == NULL)
+ event = list;
+ continue;
+ }
+
+ qseq = qitem->seqnum;
+
+ /* compare the new seqnum to the one in the buffer */
+ gap = gst_rtp_buffer_compare_seqnum (seqnum, qseq);
+
+ /* we hit a packet with the same seqnum, notify a duplicate */
+ if (G_UNLIKELY (gap == 0))
+ goto duplicate;
+
+ /* seqnum > qseq, we can stop looking */
+ if (G_LIKELY (gap < 0))
+ break;
+
+ /* if we've found a packet with greater sequence number, cleanup the
+ * event pointer as the packet will be inserted before the event */
+ event = NULL;
+ }
+
+ /* if event is set it means that packets before the event had smaller
+ * sequence number, so we will insert our packet after the event */
+ if (event)
+ list = event;
+
+append:
+ queue_do_insert (jbuf, list, (GList *) item);
+
+ /* buffering mode, update buffer stats */
+ if (jbuf->mode == RTP_JITTER_BUFFER_MODE_BUFFER)
+ update_buffer_level (jbuf, percent);
+ else if (percent)
+ *percent = -1;
+
+ /* head was changed when we did not find a previous packet, we set the return
+ * flag when requested. */
+ if (G_LIKELY (head))
+ *head = (list == NULL);
+
+ return TRUE;
+
+ /* ERRORS */
+duplicate:
+ {
+ GST_DEBUG ("duplicate packet %d found", (gint) seqnum);
+ if (G_LIKELY (head))
+ *head = FALSE;
+ if (percent)
+ *percent = -1;
+ return FALSE;
+ }
+}
+
+/**
+ * rtp_jitter_buffer_alloc_item:
+ * @data: The data stored in this item
+ * @type: User specific item type
+ * @dts: Decoding Timestamp
+ * @pts: Presentation Timestamp
+ * @seqnum: Sequence number
+ * @count: Number of packet this item represent
+ * @rtptime: The RTP specific timestamp
+ * @free_data: A function to free @data (optional)
+ *
+ * Create an item that can then be stored in the jitter buffer.
+ *
+ * Returns: a newly allocated RTPJitterbufferItem
+ */
+static RTPJitterBufferItem *
+rtp_jitter_buffer_alloc_item (gpointer data, guint type, GstClockTime dts,
+ GstClockTime pts, guint seqnum, guint count, guint rtptime,
+ GDestroyNotify free_data)
+{
+ RTPJitterBufferItem *item;
+
+ item = g_slice_new (RTPJitterBufferItem);
+ item->data = data;
+ item->next = NULL;
+ item->prev = NULL;
+ item->type = type;
+ item->dts = dts;
+ item->pts = pts;
+ item->seqnum = seqnum;
+ item->count = count;
+ item->rtptime = rtptime;
+ item->free_data = free_data;
+
+ return item;
+}
+
+static inline RTPJitterBufferItem *
+alloc_event_item (GstEvent * event)
+{
+ return rtp_jitter_buffer_alloc_item (event, ITEM_TYPE_EVENT, -1, -1, -1, 0,
+ -1, (GDestroyNotify) gst_mini_object_unref);
+}
+
+/**
+ * rtp_jitter_buffer_append_event:
+ * @jbuf: an #RTPJitterBuffer
+ * @event: an #GstEvent to insert
+
+ * Inserts @event into the packet queue of @jbuf.
+ *
+ * Returns: %TRUE if the event is at the head of the queue
+ */
+gboolean
+rtp_jitter_buffer_append_event (RTPJitterBuffer * jbuf, GstEvent * event)
+{
+ RTPJitterBufferItem *item = alloc_event_item (event);
+ gboolean head;
+ rtp_jitter_buffer_insert (jbuf, item, &head, NULL);
+ return head;
+}
+
+/**
+ * rtp_jitter_buffer_append_query:
+ * @jbuf: an #RTPJitterBuffer
+ * @query: an #GstQuery to insert
+
+ * Inserts @query into the packet queue of @jbuf.
+ *
+ * Returns: %TRUE if the query is at the head of the queue
+ */
+gboolean
+rtp_jitter_buffer_append_query (RTPJitterBuffer * jbuf, GstQuery * query)
+{
+ RTPJitterBufferItem *item =
+ rtp_jitter_buffer_alloc_item (query, ITEM_TYPE_QUERY, -1, -1, -1, 0, -1,
+ NULL);
+ gboolean head;
+ rtp_jitter_buffer_insert (jbuf, item, &head, NULL);
+ return head;
+}
+
+/**
+ * rtp_jitter_buffer_append_lost_event:
+ * @jbuf: an #RTPJitterBuffer
+ * @event: an #GstEvent to insert
+ * @seqnum: Sequence number
+ * @lost_packets: Number of lost packet this item represent
+
+ * Inserts @event into the packet queue of @jbuf.
+ *
+ * Returns: %TRUE if the event is at the head of the queue
+ */
+gboolean
+rtp_jitter_buffer_append_lost_event (RTPJitterBuffer * jbuf, GstEvent * event,
+ guint16 seqnum, guint lost_packets)
+{
+ RTPJitterBufferItem *item = rtp_jitter_buffer_alloc_item (event,
+ ITEM_TYPE_LOST, -1, -1, seqnum, lost_packets, -1,
+ (GDestroyNotify) gst_mini_object_unref);
+ gboolean head;
+
+ if (!rtp_jitter_buffer_insert (jbuf, item, &head, NULL)) {
+ /* Duplicate */
+ rtp_jitter_buffer_free_item (item);
+ head = FALSE;
+ }
+
+ return head;
+}
+
+/**
+ * rtp_jitter_buffer_append_buffer:
+ * @jbuf: an #RTPJitterBuffer
+ * @buf: an #GstBuffer to insert
+ * @seqnum: Sequence number
+ * @duplicate: TRUE when the packet inserted is a duplicate
+ * @percent: the buffering percent after insertion
+ *
+ * Inserts @buf into the packet queue of @jbuf.
+ *
+ * Returns: %TRUE if the buffer is at the head of the queue
+ */
+gboolean
+rtp_jitter_buffer_append_buffer (RTPJitterBuffer * jbuf, GstBuffer * buf,
+ GstClockTime dts, GstClockTime pts, guint16 seqnum, guint rtptime,
+ gboolean * duplicate, gint * percent)
+{
+ RTPJitterBufferItem *item = rtp_jitter_buffer_alloc_item (buf,
+ ITEM_TYPE_BUFFER, dts, pts, seqnum, 1, rtptime,
+ (GDestroyNotify) gst_mini_object_unref);
+ gboolean head;
+ gboolean inserted;
+
+ inserted = rtp_jitter_buffer_insert (jbuf, item, &head, percent);
+ if (!inserted)
+ rtp_jitter_buffer_free_item (item);
+
+ if (duplicate)
+ *duplicate = !inserted;
+
+ return head;
+}
+
+/**
+ * rtp_jitter_buffer_pop:
+ * @jbuf: an #RTPJitterBuffer
+ * @percent: the buffering percent
+ *
+ * Pops the oldest buffer from the packet queue of @jbuf. The popped buffer will
+ * have its timestamp adjusted with the incoming running_time and the detected
+ * clock skew.
+ *
+ * Returns: a #GstBuffer or %NULL when there was no packet in the queue.
+ */
+RTPJitterBufferItem *
+rtp_jitter_buffer_pop (RTPJitterBuffer * jbuf, gint * percent)
+{
+ GList *item = NULL;
+ GQueue *queue;
+
+ g_return_val_if_fail (jbuf != NULL, NULL);
+
+ queue = &jbuf->packets;
+
+ item = queue->head;
+ if (item) {
+ queue->head = item->next;
+ if (queue->head)
+ queue->head->prev = NULL;
+ else
+ queue->tail = NULL;
+ queue->length--;
+ }
+
+ /* buffering mode, update buffer stats */
+ if (jbuf->mode == RTP_JITTER_BUFFER_MODE_BUFFER)
+ update_buffer_level (jbuf, percent);
+ else if (percent)
+ *percent = -1;
+
+ /* let's clear the pointers so we can ensure we don't free items that are
+ * still in the jitterbuffer */
+ item->next = item->prev = NULL;
+
+ return (RTPJitterBufferItem *) item;
+}
+
+/**
+ * rtp_jitter_buffer_peek:
+ * @jbuf: an #RTPJitterBuffer
+ *
+ * Peek the oldest buffer from the packet queue of @jbuf.
+ *
+ * See rtp_jitter_buffer_insert() to check when an older packet was
+ * added.
+ *
+ * Returns: a #GstBuffer or %NULL when there was no packet in the queue.
+ */
+RTPJitterBufferItem *
+rtp_jitter_buffer_peek (RTPJitterBuffer * jbuf)
+{
+ g_return_val_if_fail (jbuf != NULL, NULL);
+
+ return (RTPJitterBufferItem *) jbuf->packets.head;
+}
+
+/**
+ * rtp_jitter_buffer_flush:
+ * @jbuf: an #RTPJitterBuffer
+ * @free_func: function to free each item (optional)
+ * @user_data: user data passed to @free_func
+ *
+ * Flush all packets from the jitterbuffer.
+ */
+void
+rtp_jitter_buffer_flush (RTPJitterBuffer * jbuf, GFunc free_func,
+ gpointer user_data)
+{
+ GList *item;
+
+ g_return_if_fail (jbuf != NULL);
+
+ if (free_func == NULL)
+ free_func = (GFunc) rtp_jitter_buffer_free_item;
+
+ while ((item = g_queue_pop_head_link (&jbuf->packets)))
+ free_func ((RTPJitterBufferItem *) item, user_data);
+}
+
+/**
+ * rtp_jitter_buffer_is_buffering:
+ * @jbuf: an #RTPJitterBuffer
+ *
+ * Check if @jbuf is buffering currently. Users of the jitterbuffer should not
+ * pop packets while in buffering mode.
+ *
+ * Returns: the buffering state of @jbuf
+ */
+gboolean
+rtp_jitter_buffer_is_buffering (RTPJitterBuffer * jbuf)
+{
+ return jbuf->buffering && !jbuf->buffering_disabled;
+}
+
+/**
+ * rtp_jitter_buffer_set_buffering:
+ * @jbuf: an #RTPJitterBuffer
+ * @buffering: the new buffering state
+ *
+ * Forces @jbuf to go into the buffering state.
+ */
+void
+rtp_jitter_buffer_set_buffering (RTPJitterBuffer * jbuf, gboolean buffering)
+{
+ jbuf->buffering = buffering;
+}
+
+/**
+ * rtp_jitter_buffer_get_percent:
+ * @jbuf: an #RTPJitterBuffer
+ *
+ * Get the buffering percent of the jitterbuffer.
+ *
+ * Returns: the buffering percent
+ */
+gint
+rtp_jitter_buffer_get_percent (RTPJitterBuffer * jbuf)
+{
+ gint percent;
+ guint64 level;
+
+ if (G_UNLIKELY (jbuf->high_level == 0))
+ return 100;
+
+ if (G_UNLIKELY (jbuf->buffering_disabled))
+ return 100;
+
+ level = get_buffer_level (jbuf);
+ percent = (level * 100 / jbuf->high_level);
+ percent = MIN (percent, 100);
+
+ return percent;
+}
+
+/**
+ * rtp_jitter_buffer_num_packets:
+ * @jbuf: an #RTPJitterBuffer
+ *
+ * Get the number of packets currently in "jbuf.
+ *
+ * Returns: The number of packets in @jbuf.
+ */
+guint
+rtp_jitter_buffer_num_packets (RTPJitterBuffer * jbuf)
+{
+ g_return_val_if_fail (jbuf != NULL, 0);
+
+ return jbuf->packets.length;
+}
+
+/**
+ * rtp_jitter_buffer_get_ts_diff:
+ * @jbuf: an #RTPJitterBuffer
+ *
+ * Get the difference between the timestamps of first and last packet in the
+ * jitterbuffer.
+ *
+ * Returns: The difference expressed in the timestamp units of the packets.
+ */
+guint32
+rtp_jitter_buffer_get_ts_diff (RTPJitterBuffer * jbuf)
+{
+ guint64 high_ts, low_ts;
+ RTPJitterBufferItem *high_buf, *low_buf;
+ guint32 result;
+
+ g_return_val_if_fail (jbuf != NULL, 0);
+
+ high_buf = (RTPJitterBufferItem *) g_queue_peek_tail_link (&jbuf->packets);
+ low_buf = (RTPJitterBufferItem *) g_queue_peek_head_link (&jbuf->packets);
+
+ if (!high_buf || !low_buf || high_buf == low_buf)
+ return 0;
+
+ high_ts = high_buf->rtptime;
+ low_ts = low_buf->rtptime;
+
+ /* it needs to work if ts wraps */
+ if (high_ts >= low_ts) {
+ result = (guint32) (high_ts - low_ts);
+ } else {
+ result = (guint32) (high_ts + G_MAXUINT32 + 1 - low_ts);
+ }
+ return result;
+}
+
+
+/*
+ * rtp_jitter_buffer_get_seqnum_diff:
+ * @jbuf: an #RTPJitterBuffer
+ *
+ * Get the difference between the seqnum of first and last packet in the
+ * jitterbuffer.
+ *
+ * Returns: The difference expressed in seqnum.
+ */
+static guint16
+rtp_jitter_buffer_get_seqnum_diff (RTPJitterBuffer * jbuf)
+{
+ guint32 high_seqnum, low_seqnum;
+ RTPJitterBufferItem *high_buf, *low_buf;
+ guint16 result;
+
+ g_return_val_if_fail (jbuf != NULL, 0);
+
+ high_buf = (RTPJitterBufferItem *) g_queue_peek_tail_link (&jbuf->packets);
+ low_buf = (RTPJitterBufferItem *) g_queue_peek_head_link (&jbuf->packets);
+
+ while (high_buf && high_buf->seqnum == -1)
+ high_buf = (RTPJitterBufferItem *) high_buf->prev;
+
+ while (low_buf && low_buf->seqnum == -1)
+ low_buf = (RTPJitterBufferItem *) low_buf->next;
+
+ if (!high_buf || !low_buf || high_buf == low_buf)
+ return 0;
+
+ high_seqnum = high_buf->seqnum;
+ low_seqnum = low_buf->seqnum;
+
+ /* it needs to work if ts wraps */
+ if (high_seqnum >= low_seqnum) {
+ result = (guint32) (high_seqnum - low_seqnum);
+ } else {
+ result = (guint32) (high_seqnum + G_MAXUINT16 + 1 - low_seqnum);
+ }
+ return result;
+}
+
+/**
+ * rtp_jitter_buffer_get_sync:
+ * @jbuf: an #RTPJitterBuffer
+ * @rtptime: result RTP time
+ * @timestamp: result GStreamer timestamp
+ * @clock_rate: clock-rate of @rtptime
+ * @last_rtptime: last seen rtptime.
+ *
+ * Calculates the relation between the RTP timestamp and the GStreamer timestamp
+ * used for constructing timestamps.
+ *
+ * For extended RTP timestamp @rtptime with a clock-rate of @clock_rate,
+ * the GStreamer timestamp is currently @timestamp.
+ *
+ * The last seen extended RTP timestamp with clock-rate @clock-rate is returned in
+ * @last_rtptime.
+ */
+void
+rtp_jitter_buffer_get_sync (RTPJitterBuffer * jbuf, guint64 * rtptime,
+ guint64 * timestamp, guint32 * clock_rate, guint64 * last_rtptime)
+{
+ if (rtptime)
+ *rtptime = jbuf->base_extrtp;
+ if (timestamp)
+ *timestamp = jbuf->base_time + jbuf->skew;
+ if (clock_rate)
+ *clock_rate = jbuf->clock_rate;
+ if (last_rtptime)
+ *last_rtptime = jbuf->last_rtptime;
+}
+
+/**
+ * rtp_jitter_buffer_can_fast_start:
+ * @jbuf: an #RTPJitterBuffer
+ * @num_packets: Number of consecutive packets needed
+ *
+ * Check if in the queue if there is enough packets with consecutive seqnum in
+ * order to start delivering them.
+ *
+ * Returns: %TRUE if the required number of consecutive packets was found.
+ */
+gboolean
+rtp_jitter_buffer_can_fast_start (RTPJitterBuffer * jbuf, gint num_packet)
+{
+ gboolean ret = TRUE;
+ RTPJitterBufferItem *last_item = NULL, *item;
+ gint i;
+
+ if (rtp_jitter_buffer_num_packets (jbuf) < num_packet)
+ return FALSE;
+
+ item = rtp_jitter_buffer_peek (jbuf);
+ for (i = 0; i < num_packet; i++) {
+ if (G_LIKELY (last_item)) {
+ guint16 expected_seqnum = last_item->seqnum + 1;
+
+ if (expected_seqnum != item->seqnum) {
+ ret = FALSE;
+ break;
+ }
+ }
+
+ last_item = item;
+ item = (RTPJitterBufferItem *) last_item->next;
+ }
+
+ return ret;
+}
+
+gboolean
+rtp_jitter_buffer_is_full (RTPJitterBuffer * jbuf)
+{
+ return rtp_jitter_buffer_get_seqnum_diff (jbuf) >= 32765 &&
+ rtp_jitter_buffer_num_packets (jbuf) > 10000;
+}
+
+
+/**
+ * rtp_jitter_buffer_free_item:
+ * @item: the item to be freed
+ *
+ * Free the jitter buffer item.
+ */
+void
+rtp_jitter_buffer_free_item (RTPJitterBufferItem * item)
+{
+ g_return_if_fail (item != NULL);
+ /* needs to be unlinked first */
+ g_return_if_fail (item->next == NULL);
+ g_return_if_fail (item->prev == NULL);
+
+ if (item->data && item->free_data)
+ item->free_data (item->data);
+ g_slice_free (RTPJitterBufferItem, item);
+}
diff --git a/gst/rtpmanager/rtpjitterbuffer.h b/gst/rtpmanager/rtpjitterbuffer.h
new file mode 100644
index 0000000000..8accee4b40
--- /dev/null
+++ b/gst/rtpmanager/rtpjitterbuffer.h
@@ -0,0 +1,221 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __RTP_JITTER_BUFFER_H__
+#define __RTP_JITTER_BUFFER_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstrtcpbuffer.h>
+
+typedef struct _RTPJitterBuffer RTPJitterBuffer;
+typedef struct _RTPJitterBufferClass RTPJitterBufferClass;
+typedef struct _RTPJitterBufferItem RTPJitterBufferItem;
+
+#define RTP_TYPE_JITTER_BUFFER (rtp_jitter_buffer_get_type())
+#define RTP_JITTER_BUFFER(src) (G_TYPE_CHECK_INSTANCE_CAST((src),RTP_TYPE_JITTER_BUFFER,RTPJitterBuffer))
+#define RTP_JITTER_BUFFER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),RTP_TYPE_JITTER_BUFFER,RTPJitterBufferClass))
+#define RTP_IS_JITTER_BUFFER(src) (G_TYPE_CHECK_INSTANCE_TYPE((src),RTP_TYPE_JITTER_BUFFER))
+#define RTP_IS_JITTER_BUFFER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),RTP_TYPE_JITTER_BUFFER))
+#define RTP_JITTER_BUFFER_CAST(src) ((RTPJitterBuffer *)(src))
+
+/**
+ * RTPJitterBufferMode:
+ * @RTP_JITTER_BUFFER_MODE_NONE: don't do any skew correction, outgoing
+ * timestamps are calculated directly from the RTP timestamps. This mode is
+ * good for recording but not for real-time applications.
+ * @RTP_JITTER_BUFFER_MODE_SLAVE: calculate the skew between sender and receiver
+ * and produce smoothed adjusted outgoing timestamps. This mode is good for
+ * low latency communications.
+ * @RTP_JITTER_BUFFER_MODE_BUFFER: buffer packets between low/high watermarks.
+ * This mode is good for streaming communication.
+ * @RTP_JITTER_BUFFER_MODE_SYNCED: sender and receiver clocks are synchronized,
+ * like #RTP_JITTER_BUFFER_MODE_SLAVE but skew is assumed to be 0. Good for
+ * low latency communication when sender and receiver clocks are
+ * synchronized and there is thus no clock skew.
+ * @RTP_JITTER_BUFFER_MODE_LAST: last buffer mode.
+ *
+ * The different buffer modes for a jitterbuffer.
+ */
+typedef enum {
+ RTP_JITTER_BUFFER_MODE_NONE = 0,
+ RTP_JITTER_BUFFER_MODE_SLAVE = 1,
+ RTP_JITTER_BUFFER_MODE_BUFFER = 2,
+ /* FIXME 3 is missing because it was used for 'auto' in jitterbuffer */
+ RTP_JITTER_BUFFER_MODE_SYNCED = 4,
+ RTP_JITTER_BUFFER_MODE_LAST
+} RTPJitterBufferMode;
+
+#define RTP_TYPE_JITTER_BUFFER_MODE (rtp_jitter_buffer_mode_get_type())
+GType rtp_jitter_buffer_mode_get_type (void);
+
+#define RTP_JITTER_BUFFER_MAX_WINDOW 512
+/**
+ * RTPJitterBuffer:
+ *
+ * A JitterBuffer in the #RTPSession
+ */
+struct _RTPJitterBuffer {
+ GObject object;
+
+ GQueue packets;
+
+ RTPJitterBufferMode mode;
+
+ GstClockTime delay;
+
+ /* for buffering */
+ gboolean buffering;
+ guint64 low_level;
+ guint64 high_level;
+
+ /* for calculating skew */
+ gboolean need_resync;
+ GstClockTime base_time;
+ GstClockTime base_rtptime;
+ GstClockTime media_clock_base_time;
+ guint32 clock_rate;
+ GstClockTime base_extrtp;
+ GstClockTime prev_out_time;
+ guint64 ext_rtptime;
+ guint64 last_rtptime;
+ gint64 window[RTP_JITTER_BUFFER_MAX_WINDOW];
+ guint window_pos;
+ guint window_size;
+ gboolean window_filling;
+ gint64 window_min;
+ gint64 skew;
+ gint64 prev_send_diff;
+ gboolean buffering_disabled;
+
+ GMutex clock_lock;
+ GstClock *pipeline_clock;
+ GstClock *media_clock;
+ gulong media_clock_synced_id;
+ guint64 media_clock_offset;
+
+ gboolean rfc7273_sync;
+};
+
+struct _RTPJitterBufferClass {
+ GObjectClass parent_class;
+};
+
+#define IS_DROPABLE(it) (((it)->type == ITEM_TYPE_BUFFER) || ((it)->type == ITEM_TYPE_LOST))
+#define ITEM_TYPE_BUFFER 0
+#define ITEM_TYPE_LOST 1
+#define ITEM_TYPE_EVENT 2
+#define ITEM_TYPE_QUERY 3
+
+/**
+ * RTPJitterBufferItem:
+ * @data: the data of the item
+ * @next: pointer to next item
+ * @prev: pointer to previous item
+ * @type: the type of @data, used freely by caller
+ * @dts: input DTS
+ * @pts: output PTS
+ * @seqnum: seqnum, the seqnum is used to insert the item in the
+ * right position in the jitterbuffer and detect duplicates. Use -1 to
+ * append.
+ * @count: amount of seqnum in this item
+ * @rtptime: rtp timestamp
+ * @data_free: Function to free @data (optional)
+ *
+ * An object containing an RTP packet or event. First members of this structure
+ * copied from GList so they can be inserted into lists without doing more
+ * allocations.
+ */
+struct _RTPJitterBufferItem {
+ /* a GList */
+ gpointer data;
+ GList *next;
+ GList *prev;
+
+ /* item metadata */
+ guint type;
+ GstClockTime dts;
+ GstClockTime pts;
+ guint seqnum;
+ guint count;
+ guint rtptime;
+
+ GDestroyNotify free_data;
+};
+
+GType rtp_jitter_buffer_get_type (void);
+
+/* managing lifetime */
+RTPJitterBuffer* rtp_jitter_buffer_new (void);
+
+RTPJitterBufferMode rtp_jitter_buffer_get_mode (RTPJitterBuffer *jbuf);
+void rtp_jitter_buffer_set_mode (RTPJitterBuffer *jbuf, RTPJitterBufferMode mode);
+
+GstClockTime rtp_jitter_buffer_get_delay (RTPJitterBuffer *jbuf);
+void rtp_jitter_buffer_set_delay (RTPJitterBuffer *jbuf, GstClockTime delay);
+
+void rtp_jitter_buffer_set_clock_rate (RTPJitterBuffer *jbuf, guint32 clock_rate);
+guint32 rtp_jitter_buffer_get_clock_rate (RTPJitterBuffer *jbuf);
+
+void rtp_jitter_buffer_set_media_clock (RTPJitterBuffer *jbuf, GstClock * clock, guint64 clock_offset);
+void rtp_jitter_buffer_set_pipeline_clock (RTPJitterBuffer *jbuf, GstClock * clock);
+
+gboolean rtp_jitter_buffer_get_rfc7273_sync (RTPJitterBuffer *jbuf);
+void rtp_jitter_buffer_set_rfc7273_sync (RTPJitterBuffer *jbuf, gboolean rfc7273_sync);
+
+void rtp_jitter_buffer_reset_skew (RTPJitterBuffer *jbuf);
+
+gboolean rtp_jitter_buffer_append_event (RTPJitterBuffer * jbuf, GstEvent * event);
+gboolean rtp_jitter_buffer_append_query (RTPJitterBuffer * jbuf, GstQuery * query);
+gboolean rtp_jitter_buffer_append_lost_event (RTPJitterBuffer * jbuf, GstEvent * event,
+ guint16 seqnum, guint lost_packets);
+gboolean rtp_jitter_buffer_append_buffer (RTPJitterBuffer * jbuf, GstBuffer * buf,
+ GstClockTime dts, GstClockTime pts,
+ guint16 seqnum, guint rtptime,
+ gboolean * duplicate, gint * percent);
+
+void rtp_jitter_buffer_disable_buffering (RTPJitterBuffer *jbuf, gboolean disabled);
+
+RTPJitterBufferItem * rtp_jitter_buffer_peek (RTPJitterBuffer *jbuf);
+RTPJitterBufferItem * rtp_jitter_buffer_pop (RTPJitterBuffer *jbuf, gint *percent);
+
+void rtp_jitter_buffer_flush (RTPJitterBuffer *jbuf,
+ GFunc free_func, gpointer user_data);
+
+gboolean rtp_jitter_buffer_is_buffering (RTPJitterBuffer * jbuf);
+void rtp_jitter_buffer_set_buffering (RTPJitterBuffer * jbuf, gboolean buffering);
+gint rtp_jitter_buffer_get_percent (RTPJitterBuffer * jbuf);
+
+guint rtp_jitter_buffer_num_packets (RTPJitterBuffer *jbuf);
+guint32 rtp_jitter_buffer_get_ts_diff (RTPJitterBuffer *jbuf);
+
+void rtp_jitter_buffer_get_sync (RTPJitterBuffer *jbuf, guint64 *rtptime,
+ guint64 *timestamp, guint32 *clock_rate,
+ guint64 *last_rtptime);
+
+GstClockTime rtp_jitter_buffer_calculate_pts (RTPJitterBuffer * jbuf, GstClockTime dts, gboolean estimated_dts,
+ guint32 rtptime, GstClockTime base_time, gint gap,
+ gboolean is_rtx);
+
+gboolean rtp_jitter_buffer_can_fast_start (RTPJitterBuffer * jbuf, gint num_packet);
+
+gboolean rtp_jitter_buffer_is_full (RTPJitterBuffer * jbuf);
+
+void rtp_jitter_buffer_free_item (RTPJitterBufferItem * item);
+
+#endif /* __RTP_JITTER_BUFFER_H__ */
diff --git a/gst/rtpmanager/rtpsession.c b/gst/rtpmanager/rtpsession.c
new file mode 100644
index 0000000000..04a73b9d42
--- /dev/null
+++ b/gst/rtpmanager/rtpsession.c
@@ -0,0 +1,4882 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/* FIXME 0.11: suppress warnings for deprecated API such as GValueArray
+ * with newer GLib versions (>= 2.31.0) */
+#define GLIB_DISABLE_DEPRECATION_WARNINGS
+
+#include <string.h>
+#include <stdlib.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/rtp/gstrtcpbuffer.h>
+
+#include <gst/glib-compat-private.h>
+
+#include "rtpsession.h"
+
+GST_DEBUG_CATEGORY (rtp_session_debug);
+#define GST_CAT_DEFAULT rtp_session_debug
+
+/* signals and args */
+enum
+{
+ SIGNAL_GET_SOURCE_BY_SSRC,
+ SIGNAL_ON_NEW_SSRC,
+ SIGNAL_ON_SSRC_COLLISION,
+ SIGNAL_ON_SSRC_VALIDATED,
+ SIGNAL_ON_SSRC_ACTIVE,
+ SIGNAL_ON_SSRC_SDES,
+ SIGNAL_ON_BYE_SSRC,
+ SIGNAL_ON_BYE_TIMEOUT,
+ SIGNAL_ON_TIMEOUT,
+ SIGNAL_ON_SENDER_TIMEOUT,
+ SIGNAL_ON_SENDING_RTCP,
+ SIGNAL_ON_APP_RTCP,
+ SIGNAL_ON_FEEDBACK_RTCP,
+ SIGNAL_SEND_RTCP,
+ SIGNAL_SEND_RTCP_FULL,
+ SIGNAL_ON_RECEIVING_RTCP,
+ SIGNAL_ON_NEW_SENDER_SSRC,
+ SIGNAL_ON_SENDER_SSRC_ACTIVE,
+ SIGNAL_ON_SENDING_NACKS,
+ LAST_SIGNAL
+};
+
+#define DEFAULT_INTERNAL_SOURCE NULL
+#define DEFAULT_BANDWIDTH 0.0
+#define DEFAULT_RTCP_FRACTION RTP_STATS_RTCP_FRACTION
+#define DEFAULT_RTCP_RR_BANDWIDTH -1
+#define DEFAULT_RTCP_RS_BANDWIDTH -1
+#define DEFAULT_RTCP_MTU 1400
+#define DEFAULT_SDES NULL
+#define DEFAULT_NUM_SOURCES 0
+#define DEFAULT_NUM_ACTIVE_SOURCES 0
+#define DEFAULT_SOURCES NULL
+#define DEFAULT_RTCP_MIN_INTERVAL (RTP_STATS_MIN_INTERVAL * GST_SECOND)
+#define DEFAULT_RTCP_FEEDBACK_RETENTION_WINDOW (2 * GST_SECOND)
+#define DEFAULT_RTCP_IMMEDIATE_FEEDBACK_THRESHOLD (3)
+#define DEFAULT_PROBATION RTP_DEFAULT_PROBATION
+#define DEFAULT_MAX_DROPOUT_TIME 60000
+#define DEFAULT_MAX_MISORDER_TIME 2000
+#define DEFAULT_RTP_PROFILE GST_RTP_PROFILE_AVP
+#define DEFAULT_RTCP_REDUCED_SIZE FALSE
+#define DEFAULT_RTCP_DISABLE_SR_TIMESTAMP FALSE
+#define DEFAULT_TWCC_FEEDBACK_INTERVAL GST_CLOCK_TIME_NONE
+
+enum
+{
+ PROP_0,
+ PROP_INTERNAL_SSRC,
+ PROP_INTERNAL_SOURCE,
+ PROP_BANDWIDTH,
+ PROP_RTCP_FRACTION,
+ PROP_RTCP_RR_BANDWIDTH,
+ PROP_RTCP_RS_BANDWIDTH,
+ PROP_RTCP_MTU,
+ PROP_SDES,
+ PROP_NUM_SOURCES,
+ PROP_NUM_ACTIVE_SOURCES,
+ PROP_SOURCES,
+ PROP_FAVOR_NEW,
+ PROP_RTCP_MIN_INTERVAL,
+ PROP_RTCP_FEEDBACK_RETENTION_WINDOW,
+ PROP_RTCP_IMMEDIATE_FEEDBACK_THRESHOLD,
+ PROP_PROBATION,
+ PROP_MAX_DROPOUT_TIME,
+ PROP_MAX_MISORDER_TIME,
+ PROP_STATS,
+ PROP_RTP_PROFILE,
+ PROP_RTCP_REDUCED_SIZE,
+ PROP_RTCP_DISABLE_SR_TIMESTAMP,
+ PROP_TWCC_FEEDBACK_INTERVAL,
+};
+
+/* update average packet size */
+#define INIT_AVG(avg, val) \
+ (avg) = (val);
+#define UPDATE_AVG(avg, val) \
+ if ((avg) == 0) \
+ (avg) = (val); \
+ else \
+ (avg) = ((val) + (15 * (avg))) >> 4;
+
+/* GObject vmethods */
+static void rtp_session_finalize (GObject * object);
+static void rtp_session_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void rtp_session_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean rtp_session_send_rtcp (RTPSession * sess,
+ GstClockTime max_delay);
+static gboolean rtp_session_send_rtcp_with_deadline (RTPSession * sess,
+ GstClockTime deadline);
+
+static guint rtp_session_signals[LAST_SIGNAL] = { 0 };
+
+G_DEFINE_TYPE (RTPSession, rtp_session, G_TYPE_OBJECT);
+
+static guint32 rtp_session_create_new_ssrc (RTPSession * sess);
+static RTPSource *obtain_source (RTPSession * sess, guint32 ssrc,
+ gboolean * created, RTPPacketInfo * pinfo, gboolean rtp);
+static RTPSource *obtain_internal_source (RTPSession * sess,
+ guint32 ssrc, gboolean * created, GstClockTime current_time);
+static GstFlowReturn rtp_session_schedule_bye_locked (RTPSession * sess,
+ GstClockTime current_time);
+static GstClockTime calculate_rtcp_interval (RTPSession * sess,
+ gboolean deterministic, gboolean first);
+
+static gboolean
+accumulate_trues (GSignalInvocationHint * ihint, GValue * return_accu,
+ const GValue * handler_return, gpointer data)
+{
+ if (g_value_get_boolean (handler_return))
+ g_value_set_boolean (return_accu, TRUE);
+
+ return TRUE;
+}
+
+static void
+rtp_session_class_init (RTPSessionClass * klass)
+{
+ GObjectClass *gobject_class;
+
+ gobject_class = (GObjectClass *) klass;
+
+ gobject_class->finalize = rtp_session_finalize;
+ gobject_class->set_property = rtp_session_set_property;
+ gobject_class->get_property = rtp_session_get_property;
+
+ /**
+ * RTPSession::get-source-by-ssrc:
+ * @session: the object which received the signal
+ * @ssrc: the SSRC of the RTPSource
+ *
+ * Request the #RTPSource object with SSRC @ssrc in @session.
+ */
+ rtp_session_signals[SIGNAL_GET_SOURCE_BY_SSRC] =
+ g_signal_new ("get-source-by-ssrc", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (RTPSessionClass,
+ get_source_by_ssrc), NULL, NULL, NULL,
+ RTP_TYPE_SOURCE, 1, G_TYPE_UINT);
+
+ /**
+ * RTPSession::on-new-ssrc:
+ * @session: the object which received the signal
+ * @src: the new RTPSource
+ *
+ * Notify of a new SSRC that entered @session.
+ */
+ rtp_session_signals[SIGNAL_ON_NEW_SSRC] =
+ g_signal_new ("on-new-ssrc", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (RTPSessionClass, on_new_ssrc),
+ NULL, NULL, NULL, G_TYPE_NONE, 1, RTP_TYPE_SOURCE);
+ /**
+ * RTPSession::on-ssrc-collision:
+ * @session: the object which received the signal
+ * @src: the #RTPSource that caused a collision
+ *
+ * Notify when we have an SSRC collision
+ */
+ rtp_session_signals[SIGNAL_ON_SSRC_COLLISION] =
+ g_signal_new ("on-ssrc-collision", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (RTPSessionClass, on_ssrc_collision),
+ NULL, NULL, NULL, G_TYPE_NONE, 1, RTP_TYPE_SOURCE);
+ /**
+ * RTPSession::on-ssrc-validated:
+ * @session: the object which received the signal
+ * @src: the new validated RTPSource
+ *
+ * Notify of a new SSRC that became validated.
+ */
+ rtp_session_signals[SIGNAL_ON_SSRC_VALIDATED] =
+ g_signal_new ("on-ssrc-validated", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (RTPSessionClass, on_ssrc_validated),
+ NULL, NULL, NULL, G_TYPE_NONE, 1, RTP_TYPE_SOURCE);
+ /**
+ * RTPSession::on-ssrc-active:
+ * @session: the object which received the signal
+ * @src: the active RTPSource
+ *
+ * Notify of a SSRC that is active, i.e., sending RTCP.
+ */
+ rtp_session_signals[SIGNAL_ON_SSRC_ACTIVE] =
+ g_signal_new ("on-ssrc-active", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (RTPSessionClass, on_ssrc_active),
+ NULL, NULL, NULL, G_TYPE_NONE, 1, RTP_TYPE_SOURCE);
+ /**
+ * RTPSession::on-ssrc-sdes:
+ * @session: the object which received the signal
+ * @src: the RTPSource
+ *
+ * Notify that a new SDES was received for SSRC.
+ */
+ rtp_session_signals[SIGNAL_ON_SSRC_SDES] =
+ g_signal_new ("on-ssrc-sdes", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (RTPSessionClass, on_ssrc_sdes),
+ NULL, NULL, NULL, G_TYPE_NONE, 1, RTP_TYPE_SOURCE);
+ /**
+ * RTPSession::on-bye-ssrc:
+ * @session: the object which received the signal
+ * @src: the RTPSource that went away
+ *
+ * Notify of an SSRC that became inactive because of a BYE packet.
+ */
+ rtp_session_signals[SIGNAL_ON_BYE_SSRC] =
+ g_signal_new ("on-bye-ssrc", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (RTPSessionClass, on_bye_ssrc),
+ NULL, NULL, NULL, G_TYPE_NONE, 1, RTP_TYPE_SOURCE);
+ /**
+ * RTPSession::on-bye-timeout:
+ * @session: the object which received the signal
+ * @src: the RTPSource that timed out
+ *
+ * Notify of an SSRC that has timed out because of BYE
+ */
+ rtp_session_signals[SIGNAL_ON_BYE_TIMEOUT] =
+ g_signal_new ("on-bye-timeout", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (RTPSessionClass, on_bye_timeout),
+ NULL, NULL, NULL, G_TYPE_NONE, 1, RTP_TYPE_SOURCE);
+ /**
+ * RTPSession::on-timeout:
+ * @session: the object which received the signal
+ * @src: the RTPSource that timed out
+ *
+ * Notify of an SSRC that has timed out
+ */
+ rtp_session_signals[SIGNAL_ON_TIMEOUT] =
+ g_signal_new ("on-timeout", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (RTPSessionClass, on_timeout),
+ NULL, NULL, NULL, G_TYPE_NONE, 1, RTP_TYPE_SOURCE);
+ /**
+ * RTPSession::on-sender-timeout:
+ * @session: the object which received the signal
+ * @src: the RTPSource that timed out
+ *
+ * Notify of an SSRC that was a sender but timed out and became a receiver.
+ */
+ rtp_session_signals[SIGNAL_ON_SENDER_TIMEOUT] =
+ g_signal_new ("on-sender-timeout", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (RTPSessionClass, on_sender_timeout),
+ NULL, NULL, NULL, G_TYPE_NONE, 1, RTP_TYPE_SOURCE);
+
+ /**
+ * RTPSession::on-sending-rtcp
+ * @session: the object which received the signal
+ * @buffer: the #GstBuffer containing the RTCP packet about to be sent
+ * @early: %TRUE if the packet is early, %FALSE if it is regular
+ *
+ * This signal is emitted before sending an RTCP packet, it can be used
+ * to add extra RTCP Packets.
+ *
+ * Returns: %TRUE if the RTCP buffer should NOT be suppressed, %FALSE
+ * if suppressing it is acceptable
+ */
+ rtp_session_signals[SIGNAL_ON_SENDING_RTCP] =
+ g_signal_new ("on-sending-rtcp", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (RTPSessionClass, on_sending_rtcp),
+ accumulate_trues, NULL, NULL, G_TYPE_BOOLEAN, 2,
+ GST_TYPE_BUFFER | G_SIGNAL_TYPE_STATIC_SCOPE, G_TYPE_BOOLEAN);
+
+ /**
+ * RTPSession::on-app-rtcp:
+ * @session: the object which received the signal
+ * @subtype: The subtype of the packet
+ * @ssrc: The SSRC/CSRC of the packet
+ * @name: The name of the packet
+ * @data: a #GstBuffer with the application-dependant data or %NULL if
+ * there was no data
+ *
+ * Notify that a RTCP APP packet has been received
+ */
+ rtp_session_signals[SIGNAL_ON_APP_RTCP] =
+ g_signal_new ("on-app-rtcp", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (RTPSessionClass, on_app_rtcp),
+ NULL, NULL, NULL, G_TYPE_NONE, 4, G_TYPE_UINT, G_TYPE_UINT,
+ G_TYPE_STRING, GST_TYPE_BUFFER);
+
+ /**
+ * RTPSession::on-feedback-rtcp:
+ * @session: the object which received the signal
+ * @type: Type of RTCP packet, will be %GST_RTCP_TYPE_RTPFB or
+ * %GST_RTCP_TYPE_RTPFB
+ * @fbtype: The type of RTCP FB packet, probably part of #GstRTCPFBType
+ * @sender_ssrc: The SSRC of the sender
+ * @media_ssrc: The SSRC of the media this refers to
+ * @fci: a #GstBuffer with the FCI data from the FB packet or %NULL if
+ * there was no FCI
+ *
+ * Notify that a RTCP feedback packet has been received
+ */
+ rtp_session_signals[SIGNAL_ON_FEEDBACK_RTCP] =
+ g_signal_new ("on-feedback-rtcp", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (RTPSessionClass, on_feedback_rtcp),
+ NULL, NULL, NULL, G_TYPE_NONE, 5, G_TYPE_UINT, G_TYPE_UINT, G_TYPE_UINT,
+ G_TYPE_UINT, GST_TYPE_BUFFER);
+
+ /**
+ * RTPSession::send-rtcp:
+ * @session: the object which received the signal
+ * @max_delay: The maximum delay after which the feedback will not be useful
+ * anymore
+ *
+ * Requests that the #RTPSession initiate a new RTCP packet as soon as
+ * possible within the requested delay.
+ *
+ * This sets feedback to %TRUE if not already done before.
+ */
+ rtp_session_signals[SIGNAL_SEND_RTCP] =
+ g_signal_new ("send-rtcp", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_STRUCT_OFFSET (RTPSessionClass, send_rtcp), NULL, NULL,
+ NULL, G_TYPE_NONE, 1, G_TYPE_UINT64);
+
+ /**
+ * RTPSession::send-rtcp-full:
+ * @session: the object which received the signal
+ * @max_delay: The maximum delay after which the feedback will not be useful
+ * anymore
+ *
+ * Requests that the #RTPSession initiate a new RTCP packet as soon as
+ * possible within the requested delay.
+ *
+ * This sets feedback to %TRUE if not already done before.
+ *
+ * Returns: TRUE if the new RTCP packet could be scheduled within the
+ * requested delay, FALSE otherwise.
+ *
+ * Since: 1.6
+ */
+ rtp_session_signals[SIGNAL_SEND_RTCP_FULL] =
+ g_signal_new ("send-rtcp-full", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_STRUCT_OFFSET (RTPSessionClass, send_rtcp), NULL, NULL,
+ NULL, G_TYPE_BOOLEAN, 1, G_TYPE_UINT64);
+
+ /**
+ * RTPSession::on-receiving-rtcp
+ * @session: the object which received the signal
+ * @buffer: the #GstBuffer containing the RTCP packet that was received
+ *
+ * This signal is emitted when receiving an RTCP packet before it is handled
+ * by the session. It can be used to extract custom information from RTCP packets.
+ *
+ * Since: 1.6
+ */
+ rtp_session_signals[SIGNAL_ON_RECEIVING_RTCP] =
+ g_signal_new ("on-receiving-rtcp", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (RTPSessionClass, on_receiving_rtcp),
+ NULL, NULL, NULL, G_TYPE_NONE, 1,
+ GST_TYPE_BUFFER | G_SIGNAL_TYPE_STATIC_SCOPE);
+
+ /**
+ * RTPSession::on-new-sender-ssrc:
+ * @session: the object which received the signal
+ * @src: the new sender RTPSource
+ *
+ * Notify of a new sender SSRC that entered @session.
+ *
+ * Since: 1.8
+ */
+ rtp_session_signals[SIGNAL_ON_NEW_SENDER_SSRC] =
+ g_signal_new ("on-new-sender-ssrc", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (RTPSessionClass, on_new_sender_ssrc),
+ NULL, NULL, NULL, G_TYPE_NONE, 1, RTP_TYPE_SOURCE);
+
+ /**
+ * RTPSession::on-sender-ssrc-active:
+ * @session: the object which received the signal
+ * @src: the active sender RTPSource
+ *
+ * Notify of a sender SSRC that is active, i.e., sending RTCP.
+ *
+ * Since: 1.8
+ */
+ rtp_session_signals[SIGNAL_ON_SENDER_SSRC_ACTIVE] =
+ g_signal_new ("on-sender-ssrc-active", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (RTPSessionClass,
+ on_sender_ssrc_active), NULL, NULL, NULL,
+ G_TYPE_NONE, 1, RTP_TYPE_SOURCE);
+
+ /**
+ * RTPSession::on-sending-nack
+ * @session: the object which received the signal
+ * @sender_ssrc: the sender ssrc
+ * @media_ssrc: the media ssrc
+ * @nacks: (element-type guint16): the list of seqnum to be nacked
+ * @buffer: the #GstBuffer containing the RTCP packet about to be sent
+ *
+ * This signal is emitted before NACK packets are added into the RTCP
+ * packet. This signal can be used to override the conversion of the NACK
+ * seqnum array into packets. This can be used if your protocol uses
+ * different type of NACK (e.g. based on RTCP APP).
+ *
+ * The handler should transform the seqnum from @nacks array into packets.
+ * @nacks seqnum must be consumed from the start. The remaining will be
+ * rescheduled for later base on bandwidth. Only one handler will be
+ * signalled.
+ *
+ * A handler may return 0 to signal that generic NACKs should be created
+ * for this set. This can be useful if the signal is used for other purpose
+ * or if the other type of NACK would use more space.
+ *
+ * Returns: the number of NACK seqnum that was consumed from @nacks.
+ *
+ * Since: 1.16
+ */
+ rtp_session_signals[SIGNAL_ON_SENDING_NACKS] =
+ g_signal_new ("on-sending-nacks", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (RTPSessionClass, on_sending_nacks),
+ g_signal_accumulator_first_wins, NULL, NULL,
+ G_TYPE_UINT, 4, G_TYPE_UINT, G_TYPE_UINT, G_TYPE_ARRAY,
+ GST_TYPE_BUFFER | G_SIGNAL_TYPE_STATIC_SCOPE);
+
+ g_object_class_install_property (gobject_class, PROP_INTERNAL_SSRC,
+ g_param_spec_uint ("internal-ssrc", "Internal SSRC",
+ "The internal SSRC used for the session (deprecated)",
+ 0, G_MAXUINT, 0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_DOC_SHOW_DEFAULT));
+
+ g_object_class_install_property (gobject_class, PROP_INTERNAL_SOURCE,
+ g_param_spec_object ("internal-source", "Internal Source",
+ "The internal source element of the session (deprecated)",
+ RTP_TYPE_SOURCE, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_BANDWIDTH,
+ g_param_spec_double ("bandwidth", "Bandwidth",
+ "The bandwidth of the session in bits per second (0 for auto-discover)",
+ 0.0, G_MAXDOUBLE, DEFAULT_BANDWIDTH,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RTCP_FRACTION,
+ g_param_spec_double ("rtcp-fraction", "RTCP Fraction",
+ "The fraction of the bandwidth used for RTCP in bits per second (or as a real fraction of the RTP bandwidth if < 1)",
+ 0.0, G_MAXDOUBLE, DEFAULT_RTCP_FRACTION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RTCP_RR_BANDWIDTH,
+ g_param_spec_int ("rtcp-rr-bandwidth", "RTCP RR bandwidth",
+ "The RTCP bandwidth used for receivers in bits per second (-1 = default)",
+ -1, G_MAXINT, DEFAULT_RTCP_RR_BANDWIDTH,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RTCP_RS_BANDWIDTH,
+ g_param_spec_int ("rtcp-rs-bandwidth", "RTCP RS bandwidth",
+ "The RTCP bandwidth used for senders in bits per second (-1 = default)",
+ -1, G_MAXINT, DEFAULT_RTCP_RS_BANDWIDTH,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RTCP_MTU,
+ g_param_spec_uint ("rtcp-mtu", "RTCP MTU",
+ "The maximum size of the RTCP packets",
+ 16, G_MAXINT16, DEFAULT_RTCP_MTU,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_SDES,
+ g_param_spec_boxed ("sdes", "SDES",
+ "The SDES items of this session",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS
+ | GST_PARAM_DOC_SHOW_DEFAULT));
+
+ g_object_class_install_property (gobject_class, PROP_NUM_SOURCES,
+ g_param_spec_uint ("num-sources", "Num Sources",
+ "The number of sources in the session", 0, G_MAXUINT,
+ DEFAULT_NUM_SOURCES, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_NUM_ACTIVE_SOURCES,
+ g_param_spec_uint ("num-active-sources", "Num Active Sources",
+ "The number of active sources in the session", 0, G_MAXUINT,
+ DEFAULT_NUM_ACTIVE_SOURCES,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ /**
+ * RTPSource:sources
+ *
+ * Get a GValue Array of all sources in the session.
+ *
+ * ## Getting the #RTPSources of a session
+ *
+ * ``` C
+ * {
+ * GValueArray *arr;
+ * GValue *val;
+ * guint i;
+ *
+ * g_object_get (sess, "sources", &arr, NULL);
+ *
+ * for (i = 0; i < arr->n_values; i++) {
+ * RTPSource *source;
+ *
+ * val = g_value_array_get_nth (arr, i);
+ * source = g_value_get_object (val);
+ * }
+ * g_value_array_free (arr);
+ * }
+ * ```
+ */
+ g_object_class_install_property (gobject_class, PROP_SOURCES,
+ g_param_spec_boxed ("sources", "Sources",
+ "An array of all known sources in the session",
+ G_TYPE_VALUE_ARRAY, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_FAVOR_NEW,
+ g_param_spec_boolean ("favor-new", "Favor new sources",
+ "Resolve SSRC conflict in favor of new sources", FALSE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RTCP_MIN_INTERVAL,
+ g_param_spec_uint64 ("rtcp-min-interval", "Minimum RTCP interval",
+ "Minimum interval between Regular RTCP packet (in ns)",
+ 0, G_MAXUINT64, DEFAULT_RTCP_MIN_INTERVAL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_RTCP_FEEDBACK_RETENTION_WINDOW,
+ g_param_spec_uint64 ("rtcp-feedback-retention-window",
+ "RTCP Feedback retention window",
+ "Duration during which RTCP Feedback packets are retained (in ns)",
+ 0, G_MAXUINT64, DEFAULT_RTCP_FEEDBACK_RETENTION_WINDOW,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_RTCP_IMMEDIATE_FEEDBACK_THRESHOLD,
+ g_param_spec_uint ("rtcp-immediate-feedback-threshold",
+ "RTCP Immediate Feedback threshold",
+ "The maximum number of members of a RTP session for which immediate"
+ " feedback is used (DEPRECATED: has no effect and is not needed)",
+ 0, G_MAXUINT, DEFAULT_RTCP_IMMEDIATE_FEEDBACK_THRESHOLD,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
+
+ g_object_class_install_property (gobject_class, PROP_PROBATION,
+ g_param_spec_uint ("probation", "Number of probations",
+ "Consecutive packet sequence numbers to accept the source",
+ 0, G_MAXUINT, DEFAULT_PROBATION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_DROPOUT_TIME,
+ g_param_spec_uint ("max-dropout-time", "Max dropout time",
+ "The maximum time (milliseconds) of missing packets tolerated.",
+ 0, G_MAXUINT, DEFAULT_MAX_DROPOUT_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_MISORDER_TIME,
+ g_param_spec_uint ("max-misorder-time", "Max misorder time",
+ "The maximum time (milliseconds) of misordered packets tolerated.",
+ 0, G_MAXUINT, DEFAULT_MAX_MISORDER_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * RTPSession:stats:
+ *
+ * Various session statistics. This property returns a GstStructure
+ * with name application/x-rtp-session-stats with the following fields:
+ *
+ * * "rtx-drop-count" G_TYPE_UINT The number of retransmission events
+ * dropped (due to bandwidth constraints)
+ * * "sent-nack-count" G_TYPE_UINT Number of NACKs sent
+ * * "recv-nack-count" G_TYPE_UINT Number of NACKs received
+ * * "source-stats" G_TYPE_BOXED GValueArray of #RTPSource:stats for all
+ * RTP sources (Since 1.8)
+ *
+ * Since: 1.4
+ */
+ g_object_class_install_property (gobject_class, PROP_STATS,
+ g_param_spec_boxed ("stats", "Statistics",
+ "Various statistics", GST_TYPE_STRUCTURE,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RTP_PROFILE,
+ g_param_spec_enum ("rtp-profile", "RTP Profile",
+ "RTP profile to use for this session", GST_TYPE_RTP_PROFILE,
+ DEFAULT_RTP_PROFILE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RTCP_REDUCED_SIZE,
+ g_param_spec_boolean ("rtcp-reduced-size", "RTCP Reduced Size",
+ "Use Reduced Size RTCP for feedback packets",
+ DEFAULT_RTCP_REDUCED_SIZE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * RTPSession:disable-sr-timestamp:
+ *
+ * Whether sender reports should be timestamped.
+ *
+ * Since: 1.16
+ */
+ g_object_class_install_property (gobject_class,
+ PROP_RTCP_DISABLE_SR_TIMESTAMP,
+ g_param_spec_boolean ("disable-sr-timestamp",
+ "Disable Sender Report Timestamp",
+ "Whether sender reports should be timestamped",
+ DEFAULT_RTCP_DISABLE_SR_TIMESTAMP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * RTPSession:twcc-feedback-interval:
+ *
+ * The interval to send TWCC reports on.
+ * This overrides the default behavior of sending reports
+ * based on marker-bits.
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class,
+ PROP_TWCC_FEEDBACK_INTERVAL,
+ g_param_spec_uint64 ("twcc-feedback-interval",
+ "TWCC Feedback Interval",
+ "The interval to send TWCC reports on",
+ 0, G_MAXUINT64, DEFAULT_TWCC_FEEDBACK_INTERVAL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ klass->get_source_by_ssrc =
+ GST_DEBUG_FUNCPTR (rtp_session_get_source_by_ssrc);
+ klass->send_rtcp = GST_DEBUG_FUNCPTR (rtp_session_send_rtcp);
+
+ GST_DEBUG_CATEGORY_INIT (rtp_session_debug, "rtpsession", 0, "RTP Session");
+}
+
+static void
+rtp_session_init (RTPSession * sess)
+{
+ gint i;
+ gchar *str;
+
+ g_mutex_init (&sess->lock);
+ sess->key = g_random_int ();
+ sess->mask_idx = 0;
+ sess->mask = 0;
+
+ /* TODO: We currently only use the first hash table but this is the
+ * beginning of an implementation for RFC2762
+ for (i = 0; i < 32; i++) {
+ */
+ for (i = 0; i < 1; i++) {
+ sess->ssrcs[i] =
+ g_hash_table_new_full (NULL, NULL, NULL,
+ (GDestroyNotify) g_object_unref);
+ }
+
+ rtp_stats_init_defaults (&sess->stats);
+ INIT_AVG (sess->stats.avg_rtcp_packet_size, 100);
+ rtp_stats_set_min_interval (&sess->stats,
+ (gdouble) DEFAULT_RTCP_MIN_INTERVAL / GST_SECOND);
+
+ sess->recalc_bandwidth = TRUE;
+ sess->bandwidth = DEFAULT_BANDWIDTH;
+ sess->rtcp_bandwidth = DEFAULT_RTCP_FRACTION;
+ sess->rtcp_rr_bandwidth = DEFAULT_RTCP_RR_BANDWIDTH;
+ sess->rtcp_rs_bandwidth = DEFAULT_RTCP_RS_BANDWIDTH;
+
+ /* default UDP header length */
+ sess->header_len = UDP_IP_HEADER_OVERHEAD;
+ sess->mtu = DEFAULT_RTCP_MTU;
+
+ sess->probation = DEFAULT_PROBATION;
+ sess->max_dropout_time = DEFAULT_MAX_DROPOUT_TIME;
+ sess->max_misorder_time = DEFAULT_MAX_MISORDER_TIME;
+
+ /* some default SDES entries */
+ sess->sdes = gst_structure_new_empty ("application/x-rtp-source-sdes");
+
+ /* we do not want to leak details like the username or hostname here */
+ str = g_strdup_printf ("user%u@host-%x", g_random_int (), g_random_int ());
+ gst_structure_set (sess->sdes, "cname", G_TYPE_STRING, str, NULL);
+ g_free (str);
+
+#if 0
+ /* we do not want to leak the user's real name here */
+ str = g_strdup_printf ("Anon%u", g_random_int ());
+ gst_structure_set (sdes, "name", G_TYPE_STRING, str, NULL);
+ g_free (str);
+#endif
+
+ gst_structure_set (sess->sdes, "tool", G_TYPE_STRING, "GStreamer", NULL);
+
+ /* this is the SSRC we suggest */
+ sess->suggested_ssrc = rtp_session_create_new_ssrc (sess);
+ sess->internal_ssrc_set = FALSE;
+
+ sess->first_rtcp = TRUE;
+ sess->next_rtcp_check_time = GST_CLOCK_TIME_NONE;
+ sess->last_rtcp_check_time = GST_CLOCK_TIME_NONE;
+ sess->last_rtcp_send_time = GST_CLOCK_TIME_NONE;
+ sess->last_rtcp_interval = GST_CLOCK_TIME_NONE;
+
+ sess->next_early_rtcp_time = GST_CLOCK_TIME_NONE;
+ sess->rtcp_feedback_retention_window = DEFAULT_RTCP_FEEDBACK_RETENTION_WINDOW;
+ sess->rtcp_immediate_feedback_threshold =
+ DEFAULT_RTCP_IMMEDIATE_FEEDBACK_THRESHOLD;
+ sess->rtp_profile = DEFAULT_RTP_PROFILE;
+ sess->reduced_size_rtcp = DEFAULT_RTCP_REDUCED_SIZE;
+ sess->timestamp_sender_reports = !DEFAULT_RTCP_DISABLE_SR_TIMESTAMP;
+
+ sess->is_doing_ptp = TRUE;
+
+ sess->twcc = rtp_twcc_manager_new (sess->mtu);
+ sess->twcc_stats = rtp_twcc_stats_new ();
+}
+
+static void
+rtp_session_finalize (GObject * object)
+{
+ RTPSession *sess;
+ gint i;
+
+ sess = RTP_SESSION_CAST (object);
+
+ gst_structure_free (sess->sdes);
+
+ g_list_free_full (sess->conflicting_addresses,
+ (GDestroyNotify) rtp_conflicting_address_free);
+
+ /* TODO: Change this again when implementing RFC 2762
+ * for (i = 0; i < 32; i++)
+ */
+ for (i = 0; i < 1; i++)
+ g_hash_table_destroy (sess->ssrcs[i]);
+
+ g_object_unref (sess->twcc);
+ rtp_twcc_stats_free (sess->twcc_stats);
+
+ g_mutex_clear (&sess->lock);
+
+ G_OBJECT_CLASS (rtp_session_parent_class)->finalize (object);
+}
+
+static void
+copy_source (gpointer key, RTPSource * source, GValueArray * arr)
+{
+ GValue value = { 0 };
+
+ g_value_init (&value, RTP_TYPE_SOURCE);
+ g_value_take_object (&value, source);
+ /* copies the value */
+ g_value_array_append (arr, &value);
+}
+
+static GValueArray *
+rtp_session_create_sources (RTPSession * sess)
+{
+ GValueArray *res;
+ guint size;
+
+ RTP_SESSION_LOCK (sess);
+ /* get number of elements in the table */
+ size = g_hash_table_size (sess->ssrcs[sess->mask_idx]);
+ /* create the result value array */
+ res = g_value_array_new (size);
+
+ /* and copy all values into the array */
+ g_hash_table_foreach (sess->ssrcs[sess->mask_idx], (GHFunc) copy_source, res);
+ RTP_SESSION_UNLOCK (sess);
+
+ return res;
+}
+
+static void
+create_source_stats (gpointer key, RTPSource * source, GValueArray * arr)
+{
+ GValue *value;
+ GstStructure *s;
+
+ g_object_get (source, "stats", &s, NULL);
+
+ g_value_array_append (arr, NULL);
+ value = g_value_array_get_nth (arr, arr->n_values - 1);
+ g_value_init (value, GST_TYPE_STRUCTURE);
+ g_value_take_boxed (value, s);
+}
+
+static GstStructure *
+rtp_session_create_stats (RTPSession * sess)
+{
+ GstStructure *s;
+ GValueArray *source_stats;
+ GValue source_stats_v = G_VALUE_INIT;
+ guint size;
+
+ RTP_SESSION_LOCK (sess);
+ s = gst_structure_new ("application/x-rtp-session-stats",
+ "rtx-drop-count", G_TYPE_UINT, sess->stats.nacks_dropped,
+ "sent-nack-count", G_TYPE_UINT, sess->stats.nacks_sent,
+ "recv-nack-count", G_TYPE_UINT, sess->stats.nacks_received, NULL);
+
+ size = g_hash_table_size (sess->ssrcs[sess->mask_idx]);
+ source_stats = g_value_array_new (size);
+ g_hash_table_foreach (sess->ssrcs[sess->mask_idx],
+ (GHFunc) create_source_stats, source_stats);
+ RTP_SESSION_UNLOCK (sess);
+
+ g_value_init (&source_stats_v, G_TYPE_VALUE_ARRAY);
+ g_value_take_boxed (&source_stats_v, source_stats);
+ gst_structure_take_value (s, "source-stats", &source_stats_v);
+
+ return s;
+}
+
+static void
+rtp_session_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ RTPSession *sess;
+
+ sess = RTP_SESSION (object);
+
+ switch (prop_id) {
+ case PROP_INTERNAL_SSRC:
+ RTP_SESSION_LOCK (sess);
+ sess->suggested_ssrc = g_value_get_uint (value);
+ sess->internal_ssrc_set = TRUE;
+ sess->internal_ssrc_from_caps_or_property = TRUE;
+ RTP_SESSION_UNLOCK (sess);
+ if (sess->callbacks.reconfigure)
+ sess->callbacks.reconfigure (sess, sess->reconfigure_user_data);
+ break;
+ case PROP_BANDWIDTH:
+ RTP_SESSION_LOCK (sess);
+ sess->bandwidth = g_value_get_double (value);
+ sess->recalc_bandwidth = TRUE;
+ RTP_SESSION_UNLOCK (sess);
+ break;
+ case PROP_RTCP_FRACTION:
+ RTP_SESSION_LOCK (sess);
+ sess->rtcp_bandwidth = g_value_get_double (value);
+ sess->recalc_bandwidth = TRUE;
+ RTP_SESSION_UNLOCK (sess);
+ break;
+ case PROP_RTCP_RR_BANDWIDTH:
+ RTP_SESSION_LOCK (sess);
+ sess->rtcp_rr_bandwidth = g_value_get_int (value);
+ sess->recalc_bandwidth = TRUE;
+ RTP_SESSION_UNLOCK (sess);
+ break;
+ case PROP_RTCP_RS_BANDWIDTH:
+ RTP_SESSION_LOCK (sess);
+ sess->rtcp_rs_bandwidth = g_value_get_int (value);
+ sess->recalc_bandwidth = TRUE;
+ RTP_SESSION_UNLOCK (sess);
+ break;
+ case PROP_RTCP_MTU:
+ sess->mtu = g_value_get_uint (value);
+ rtp_twcc_manager_set_mtu (sess->twcc, sess->mtu);
+ break;
+ case PROP_SDES:
+ rtp_session_set_sdes_struct (sess, g_value_get_boxed (value));
+ break;
+ case PROP_FAVOR_NEW:
+ sess->favor_new = g_value_get_boolean (value);
+ break;
+ case PROP_RTCP_MIN_INTERVAL:
+ rtp_stats_set_min_interval (&sess->stats,
+ (gdouble) g_value_get_uint64 (value) / GST_SECOND);
+ /* trigger reconsideration */
+ RTP_SESSION_LOCK (sess);
+ sess->next_rtcp_check_time = 0;
+ RTP_SESSION_UNLOCK (sess);
+ if (sess->callbacks.reconsider)
+ sess->callbacks.reconsider (sess, sess->reconsider_user_data);
+ break;
+ case PROP_RTCP_FEEDBACK_RETENTION_WINDOW:
+ sess->rtcp_feedback_retention_window = g_value_get_uint64 (value);
+ break;
+ case PROP_RTCP_IMMEDIATE_FEEDBACK_THRESHOLD:
+ sess->rtcp_immediate_feedback_threshold = g_value_get_uint (value);
+ break;
+ case PROP_PROBATION:
+ sess->probation = g_value_get_uint (value);
+ break;
+ case PROP_MAX_DROPOUT_TIME:
+ sess->max_dropout_time = g_value_get_uint (value);
+ break;
+ case PROP_MAX_MISORDER_TIME:
+ sess->max_misorder_time = g_value_get_uint (value);
+ break;
+ case PROP_RTP_PROFILE:
+ sess->rtp_profile = g_value_get_enum (value);
+ /* trigger reconsideration */
+ RTP_SESSION_LOCK (sess);
+ sess->next_rtcp_check_time = 0;
+ RTP_SESSION_UNLOCK (sess);
+ if (sess->callbacks.reconsider)
+ sess->callbacks.reconsider (sess, sess->reconsider_user_data);
+ break;
+ case PROP_RTCP_REDUCED_SIZE:
+ sess->reduced_size_rtcp = g_value_get_boolean (value);
+ break;
+ case PROP_RTCP_DISABLE_SR_TIMESTAMP:
+ sess->timestamp_sender_reports = !g_value_get_boolean (value);
+ break;
+ case PROP_TWCC_FEEDBACK_INTERVAL:
+ rtp_twcc_manager_set_feedback_interval (sess->twcc,
+ g_value_get_uint64 (value));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+rtp_session_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ RTPSession *sess;
+
+ sess = RTP_SESSION (object);
+
+ switch (prop_id) {
+ case PROP_INTERNAL_SSRC:
+ g_value_set_uint (value, rtp_session_suggest_ssrc (sess, NULL));
+ break;
+ case PROP_INTERNAL_SOURCE:
+ /* FIXME, return a random source */
+ g_value_set_object (value, NULL);
+ break;
+ case PROP_BANDWIDTH:
+ g_value_set_double (value, sess->bandwidth);
+ break;
+ case PROP_RTCP_FRACTION:
+ g_value_set_double (value, sess->rtcp_bandwidth);
+ break;
+ case PROP_RTCP_RR_BANDWIDTH:
+ g_value_set_int (value, sess->rtcp_rr_bandwidth);
+ break;
+ case PROP_RTCP_RS_BANDWIDTH:
+ g_value_set_int (value, sess->rtcp_rs_bandwidth);
+ break;
+ case PROP_RTCP_MTU:
+ g_value_set_uint (value, sess->mtu);
+ break;
+ case PROP_SDES:
+ g_value_take_boxed (value, rtp_session_get_sdes_struct (sess));
+ break;
+ case PROP_NUM_SOURCES:
+ g_value_set_uint (value, rtp_session_get_num_sources (sess));
+ break;
+ case PROP_NUM_ACTIVE_SOURCES:
+ g_value_set_uint (value, rtp_session_get_num_active_sources (sess));
+ break;
+ case PROP_SOURCES:
+ g_value_take_boxed (value, rtp_session_create_sources (sess));
+ break;
+ case PROP_FAVOR_NEW:
+ g_value_set_boolean (value, sess->favor_new);
+ break;
+ case PROP_RTCP_MIN_INTERVAL:
+ g_value_set_uint64 (value, sess->stats.min_interval * GST_SECOND);
+ break;
+ case PROP_RTCP_FEEDBACK_RETENTION_WINDOW:
+ g_value_set_uint64 (value, sess->rtcp_feedback_retention_window);
+ break;
+ case PROP_RTCP_IMMEDIATE_FEEDBACK_THRESHOLD:
+ g_value_set_uint (value, sess->rtcp_immediate_feedback_threshold);
+ break;
+ case PROP_PROBATION:
+ g_value_set_uint (value, sess->probation);
+ break;
+ case PROP_MAX_DROPOUT_TIME:
+ g_value_set_uint (value, sess->max_dropout_time);
+ break;
+ case PROP_MAX_MISORDER_TIME:
+ g_value_set_uint (value, sess->max_misorder_time);
+ break;
+ case PROP_STATS:
+ g_value_take_boxed (value, rtp_session_create_stats (sess));
+ break;
+ case PROP_RTP_PROFILE:
+ g_value_set_enum (value, sess->rtp_profile);
+ break;
+ case PROP_RTCP_REDUCED_SIZE:
+ g_value_set_boolean (value, sess->reduced_size_rtcp);
+ break;
+ case PROP_RTCP_DISABLE_SR_TIMESTAMP:
+ g_value_set_boolean (value, !sess->timestamp_sender_reports);
+ break;
+ case PROP_TWCC_FEEDBACK_INTERVAL:
+ g_value_set_uint64 (value,
+ rtp_twcc_manager_get_feedback_interval (sess->twcc));
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+on_new_ssrc (RTPSession * sess, RTPSource * source)
+{
+ g_object_ref (source);
+ RTP_SESSION_UNLOCK (sess);
+ g_signal_emit (sess, rtp_session_signals[SIGNAL_ON_NEW_SSRC], 0, source);
+ RTP_SESSION_LOCK (sess);
+ g_object_unref (source);
+}
+
+static void
+on_ssrc_collision (RTPSession * sess, RTPSource * source)
+{
+ g_object_ref (source);
+ RTP_SESSION_UNLOCK (sess);
+ g_signal_emit (sess, rtp_session_signals[SIGNAL_ON_SSRC_COLLISION], 0,
+ source);
+ RTP_SESSION_LOCK (sess);
+ g_object_unref (source);
+}
+
+static void
+on_ssrc_validated (RTPSession * sess, RTPSource * source)
+{
+ g_object_ref (source);
+ RTP_SESSION_UNLOCK (sess);
+ g_signal_emit (sess, rtp_session_signals[SIGNAL_ON_SSRC_VALIDATED], 0,
+ source);
+ RTP_SESSION_LOCK (sess);
+ g_object_unref (source);
+}
+
+static void
+on_ssrc_active (RTPSession * sess, RTPSource * source)
+{
+ g_object_ref (source);
+ RTP_SESSION_UNLOCK (sess);
+ g_signal_emit (sess, rtp_session_signals[SIGNAL_ON_SSRC_ACTIVE], 0, source);
+ RTP_SESSION_LOCK (sess);
+ g_object_unref (source);
+}
+
+static void
+on_ssrc_sdes (RTPSession * sess, RTPSource * source)
+{
+ g_object_ref (source);
+ GST_DEBUG ("SDES changed for SSRC %08x", source->ssrc);
+ RTP_SESSION_UNLOCK (sess);
+ g_signal_emit (sess, rtp_session_signals[SIGNAL_ON_SSRC_SDES], 0, source);
+ RTP_SESSION_LOCK (sess);
+ g_object_unref (source);
+}
+
+static void
+on_bye_ssrc (RTPSession * sess, RTPSource * source)
+{
+ g_object_ref (source);
+ RTP_SESSION_UNLOCK (sess);
+ g_signal_emit (sess, rtp_session_signals[SIGNAL_ON_BYE_SSRC], 0, source);
+ RTP_SESSION_LOCK (sess);
+ g_object_unref (source);
+}
+
+static void
+on_bye_timeout (RTPSession * sess, RTPSource * source)
+{
+ g_object_ref (source);
+ RTP_SESSION_UNLOCK (sess);
+ g_signal_emit (sess, rtp_session_signals[SIGNAL_ON_BYE_TIMEOUT], 0, source);
+ RTP_SESSION_LOCK (sess);
+ g_object_unref (source);
+}
+
+static void
+on_timeout (RTPSession * sess, RTPSource * source)
+{
+ g_object_ref (source);
+ RTP_SESSION_UNLOCK (sess);
+ g_signal_emit (sess, rtp_session_signals[SIGNAL_ON_TIMEOUT], 0, source);
+ RTP_SESSION_LOCK (sess);
+ g_object_unref (source);
+}
+
+static void
+on_sender_timeout (RTPSession * sess, RTPSource * source)
+{
+ g_object_ref (source);
+ RTP_SESSION_UNLOCK (sess);
+ g_signal_emit (sess, rtp_session_signals[SIGNAL_ON_SENDER_TIMEOUT], 0,
+ source);
+ RTP_SESSION_LOCK (sess);
+ g_object_unref (source);
+}
+
+static void
+on_new_sender_ssrc (RTPSession * sess, RTPSource * source)
+{
+ g_object_ref (source);
+ RTP_SESSION_UNLOCK (sess);
+ g_signal_emit (sess, rtp_session_signals[SIGNAL_ON_NEW_SENDER_SSRC], 0,
+ source);
+ RTP_SESSION_LOCK (sess);
+ g_object_unref (source);
+}
+
+static void
+on_sender_ssrc_active (RTPSession * sess, RTPSource * source)
+{
+ g_object_ref (source);
+ RTP_SESSION_UNLOCK (sess);
+ g_signal_emit (sess, rtp_session_signals[SIGNAL_ON_SENDER_SSRC_ACTIVE], 0,
+ source);
+ RTP_SESSION_LOCK (sess);
+ g_object_unref (source);
+}
+
+/**
+ * rtp_session_new:
+ *
+ * Create a new session object.
+ *
+ * Returns: a new #RTPSession. g_object_unref() after usage.
+ */
+RTPSession *
+rtp_session_new (void)
+{
+ RTPSession *sess;
+
+ sess = g_object_new (RTP_TYPE_SESSION, NULL);
+
+ return sess;
+}
+
+/**
+ * rtp_session_reset:
+ * @sess: an #RTPSession
+ *
+ * Reset the sources of @sess.
+ */
+void
+rtp_session_reset (RTPSession * sess)
+{
+ g_return_if_fail (RTP_IS_SESSION (sess));
+
+ /* remove all sources */
+ g_hash_table_remove_all (sess->ssrcs[sess->mask_idx]);
+ sess->total_sources = 0;
+ sess->stats.sender_sources = 0;
+ sess->stats.internal_sender_sources = 0;
+ sess->stats.internal_sources = 0;
+ sess->stats.active_sources = 0;
+
+ sess->generation = 0;
+ sess->first_rtcp = TRUE;
+ sess->next_rtcp_check_time = GST_CLOCK_TIME_NONE;
+ sess->last_rtcp_check_time = GST_CLOCK_TIME_NONE;
+ sess->last_rtcp_send_time = GST_CLOCK_TIME_NONE;
+ sess->last_rtcp_interval = GST_CLOCK_TIME_NONE;
+ sess->next_early_rtcp_time = GST_CLOCK_TIME_NONE;
+ sess->scheduled_bye = FALSE;
+
+ /* reset session stats */
+ sess->stats.bye_members = 0;
+ sess->stats.nacks_dropped = 0;
+ sess->stats.nacks_sent = 0;
+ sess->stats.nacks_received = 0;
+
+ sess->is_doing_ptp = TRUE;
+
+ g_list_free_full (sess->conflicting_addresses,
+ (GDestroyNotify) rtp_conflicting_address_free);
+ sess->conflicting_addresses = NULL;
+}
+
+/**
+ * rtp_session_set_callbacks:
+ * @sess: an #RTPSession
+ * @callbacks: callbacks to configure
+ * @user_data: user data passed in the callbacks
+ *
+ * Configure a set of callbacks to be notified of actions.
+ */
+void
+rtp_session_set_callbacks (RTPSession * sess, RTPSessionCallbacks * callbacks,
+ gpointer user_data)
+{
+ g_return_if_fail (RTP_IS_SESSION (sess));
+
+ if (callbacks->process_rtp) {
+ sess->callbacks.process_rtp = callbacks->process_rtp;
+ sess->process_rtp_user_data = user_data;
+ }
+ if (callbacks->send_rtp) {
+ sess->callbacks.send_rtp = callbacks->send_rtp;
+ sess->send_rtp_user_data = user_data;
+ }
+ if (callbacks->send_rtcp) {
+ sess->callbacks.send_rtcp = callbacks->send_rtcp;
+ sess->send_rtcp_user_data = user_data;
+ }
+ if (callbacks->sync_rtcp) {
+ sess->callbacks.sync_rtcp = callbacks->sync_rtcp;
+ sess->sync_rtcp_user_data = user_data;
+ }
+ if (callbacks->clock_rate) {
+ sess->callbacks.clock_rate = callbacks->clock_rate;
+ sess->clock_rate_user_data = user_data;
+ }
+ if (callbacks->reconsider) {
+ sess->callbacks.reconsider = callbacks->reconsider;
+ sess->reconsider_user_data = user_data;
+ }
+ if (callbacks->request_key_unit) {
+ sess->callbacks.request_key_unit = callbacks->request_key_unit;
+ sess->request_key_unit_user_data = user_data;
+ }
+ if (callbacks->request_time) {
+ sess->callbacks.request_time = callbacks->request_time;
+ sess->request_time_user_data = user_data;
+ }
+ if (callbacks->notify_nack) {
+ sess->callbacks.notify_nack = callbacks->notify_nack;
+ sess->notify_nack_user_data = user_data;
+ }
+ if (callbacks->notify_twcc) {
+ sess->callbacks.notify_twcc = callbacks->notify_twcc;
+ sess->notify_twcc_user_data = user_data;
+ }
+ if (callbacks->reconfigure) {
+ sess->callbacks.reconfigure = callbacks->reconfigure;
+ sess->reconfigure_user_data = user_data;
+ }
+ if (callbacks->notify_early_rtcp) {
+ sess->callbacks.notify_early_rtcp = callbacks->notify_early_rtcp;
+ sess->notify_early_rtcp_user_data = user_data;
+ }
+}
+
+/**
+ * rtp_session_set_process_rtp_callback:
+ * @sess: an #RTPSession
+ * @callback: callback to set
+ * @user_data: user data passed in the callback
+ *
+ * Configure only the process_rtp callback to be notified of the process_rtp action.
+ */
+void
+rtp_session_set_process_rtp_callback (RTPSession * sess,
+ RTPSessionProcessRTP callback, gpointer user_data)
+{
+ g_return_if_fail (RTP_IS_SESSION (sess));
+
+ sess->callbacks.process_rtp = callback;
+ sess->process_rtp_user_data = user_data;
+}
+
+/**
+ * rtp_session_set_send_rtp_callback:
+ * @sess: an #RTPSession
+ * @callback: callback to set
+ * @user_data: user data passed in the callback
+ *
+ * Configure only the send_rtp callback to be notified of the send_rtp action.
+ */
+void
+rtp_session_set_send_rtp_callback (RTPSession * sess,
+ RTPSessionSendRTP callback, gpointer user_data)
+{
+ g_return_if_fail (RTP_IS_SESSION (sess));
+
+ sess->callbacks.send_rtp = callback;
+ sess->send_rtp_user_data = user_data;
+}
+
+/**
+ * rtp_session_set_send_rtcp_callback:
+ * @sess: an #RTPSession
+ * @callback: callback to set
+ * @user_data: user data passed in the callback
+ *
+ * Configure only the send_rtcp callback to be notified of the send_rtcp action.
+ */
+void
+rtp_session_set_send_rtcp_callback (RTPSession * sess,
+ RTPSessionSendRTCP callback, gpointer user_data)
+{
+ g_return_if_fail (RTP_IS_SESSION (sess));
+
+ sess->callbacks.send_rtcp = callback;
+ sess->send_rtcp_user_data = user_data;
+}
+
+/**
+ * rtp_session_set_sync_rtcp_callback:
+ * @sess: an #RTPSession
+ * @callback: callback to set
+ * @user_data: user data passed in the callback
+ *
+ * Configure only the sync_rtcp callback to be notified of the sync_rtcp action.
+ */
+void
+rtp_session_set_sync_rtcp_callback (RTPSession * sess,
+ RTPSessionSyncRTCP callback, gpointer user_data)
+{
+ g_return_if_fail (RTP_IS_SESSION (sess));
+
+ sess->callbacks.sync_rtcp = callback;
+ sess->sync_rtcp_user_data = user_data;
+}
+
+/**
+ * rtp_session_set_clock_rate_callback:
+ * @sess: an #RTPSession
+ * @callback: callback to set
+ * @user_data: user data passed in the callback
+ *
+ * Configure only the clock_rate callback to be notified of the clock_rate action.
+ */
+void
+rtp_session_set_clock_rate_callback (RTPSession * sess,
+ RTPSessionClockRate callback, gpointer user_data)
+{
+ g_return_if_fail (RTP_IS_SESSION (sess));
+
+ sess->callbacks.clock_rate = callback;
+ sess->clock_rate_user_data = user_data;
+}
+
+/**
+ * rtp_session_set_reconsider_callback:
+ * @sess: an #RTPSession
+ * @callback: callback to set
+ * @user_data: user data passed in the callback
+ *
+ * Configure only the reconsider callback to be notified of the reconsider action.
+ */
+void
+rtp_session_set_reconsider_callback (RTPSession * sess,
+ RTPSessionReconsider callback, gpointer user_data)
+{
+ g_return_if_fail (RTP_IS_SESSION (sess));
+
+ sess->callbacks.reconsider = callback;
+ sess->reconsider_user_data = user_data;
+}
+
+/**
+ * rtp_session_set_request_time_callback:
+ * @sess: an #RTPSession
+ * @callback: callback to set
+ * @user_data: user data passed in the callback
+ *
+ * Configure only the request_time callback
+ */
+void
+rtp_session_set_request_time_callback (RTPSession * sess,
+ RTPSessionRequestTime callback, gpointer user_data)
+{
+ g_return_if_fail (RTP_IS_SESSION (sess));
+
+ sess->callbacks.request_time = callback;
+ sess->request_time_user_data = user_data;
+}
+
+/**
+ * rtp_session_set_bandwidth:
+ * @sess: an #RTPSession
+ * @bandwidth: the bandwidth allocated
+ *
+ * Set the session bandwidth in bits per second.
+ */
+void
+rtp_session_set_bandwidth (RTPSession * sess, gdouble bandwidth)
+{
+ g_return_if_fail (RTP_IS_SESSION (sess));
+
+ RTP_SESSION_LOCK (sess);
+ sess->stats.bandwidth = bandwidth;
+ RTP_SESSION_UNLOCK (sess);
+}
+
+/**
+ * rtp_session_get_bandwidth:
+ * @sess: an #RTPSession
+ *
+ * Get the session bandwidth.
+ *
+ * Returns: the session bandwidth.
+ */
+gdouble
+rtp_session_get_bandwidth (RTPSession * sess)
+{
+ gdouble result;
+
+ g_return_val_if_fail (RTP_IS_SESSION (sess), 0);
+
+ RTP_SESSION_LOCK (sess);
+ result = sess->stats.bandwidth;
+ RTP_SESSION_UNLOCK (sess);
+
+ return result;
+}
+
+/**
+ * rtp_session_set_rtcp_fraction:
+ * @sess: an #RTPSession
+ * @bandwidth: the RTCP bandwidth
+ *
+ * Set the bandwidth in bits per second that should be used for RTCP
+ * messages.
+ */
+void
+rtp_session_set_rtcp_fraction (RTPSession * sess, gdouble bandwidth)
+{
+ g_return_if_fail (RTP_IS_SESSION (sess));
+
+ RTP_SESSION_LOCK (sess);
+ sess->stats.rtcp_bandwidth = bandwidth;
+ RTP_SESSION_UNLOCK (sess);
+}
+
+/**
+ * rtp_session_get_rtcp_fraction:
+ * @sess: an #RTPSession
+ *
+ * Get the session bandwidth used for RTCP.
+ *
+ * Returns: The bandwidth used for RTCP messages.
+ */
+gdouble
+rtp_session_get_rtcp_fraction (RTPSession * sess)
+{
+ gdouble result;
+
+ g_return_val_if_fail (RTP_IS_SESSION (sess), 0.0);
+
+ RTP_SESSION_LOCK (sess);
+ result = sess->stats.rtcp_bandwidth;
+ RTP_SESSION_UNLOCK (sess);
+
+ return result;
+}
+
+/**
+ * rtp_session_get_sdes_struct:
+ * @sess: an #RTSPSession
+ *
+ * Get the SDES data as a #GstStructure
+ *
+ * Returns: a GstStructure with SDES items for @sess. This function returns a
+ * copy of the SDES structure, use gst_structure_free() after usage.
+ */
+GstStructure *
+rtp_session_get_sdes_struct (RTPSession * sess)
+{
+ GstStructure *result = NULL;
+
+ g_return_val_if_fail (RTP_IS_SESSION (sess), NULL);
+
+ RTP_SESSION_LOCK (sess);
+ if (sess->sdes)
+ result = gst_structure_copy (sess->sdes);
+ RTP_SESSION_UNLOCK (sess);
+
+ return result;
+}
+
+static void
+source_set_sdes (const gchar * key, RTPSource * source, GstStructure * sdes)
+{
+ rtp_source_set_sdes_struct (source, gst_structure_copy (sdes));
+}
+
+/**
+ * rtp_session_set_sdes_struct:
+ * @sess: an #RTSPSession
+ * @sdes: a #GstStructure
+ *
+ * Set the SDES data as a #GstStructure. This function makes a copy of @sdes.
+ */
+void
+rtp_session_set_sdes_struct (RTPSession * sess, const GstStructure * sdes)
+{
+ g_return_if_fail (sdes);
+ g_return_if_fail (RTP_IS_SESSION (sess));
+
+ RTP_SESSION_LOCK (sess);
+ if (sess->sdes)
+ gst_structure_free (sess->sdes);
+ sess->sdes = gst_structure_copy (sdes);
+
+ g_hash_table_foreach (sess->ssrcs[sess->mask_idx],
+ (GHFunc) source_set_sdes, sess->sdes);
+ RTP_SESSION_UNLOCK (sess);
+}
+
+static GstFlowReturn
+source_push_rtp (RTPSource * source, gpointer data, RTPSession * session)
+{
+ GstFlowReturn result = GST_FLOW_OK;
+
+ if (source->internal) {
+ GST_LOG ("source %08x pushed sender RTP packet", source->ssrc);
+
+ RTP_SESSION_UNLOCK (session);
+
+ if (session->callbacks.send_rtp)
+ result =
+ session->callbacks.send_rtp (session, source, data,
+ session->send_rtp_user_data);
+ else {
+ gst_mini_object_unref (GST_MINI_OBJECT_CAST (data));
+ }
+ } else {
+ GST_LOG ("source %08x pushed receiver RTP packet", source->ssrc);
+ RTP_SESSION_UNLOCK (session);
+
+ if (session->callbacks.process_rtp)
+ result =
+ session->callbacks.process_rtp (session, source,
+ GST_BUFFER_CAST (data), session->process_rtp_user_data);
+ else
+ gst_buffer_unref (GST_BUFFER_CAST (data));
+ }
+ RTP_SESSION_LOCK (session);
+
+ return result;
+}
+
+static gint
+source_clock_rate (RTPSource * source, guint8 pt, RTPSession * session)
+{
+ gint result;
+
+ RTP_SESSION_UNLOCK (session);
+
+ if (session->callbacks.clock_rate)
+ result =
+ session->callbacks.clock_rate (session, pt,
+ session->clock_rate_user_data);
+ else
+ result = -1;
+
+ RTP_SESSION_LOCK (session);
+
+ GST_DEBUG ("got clock-rate %d for pt %d", result, pt);
+
+ return result;
+}
+
+static RTPSourceCallbacks callbacks = {
+ (RTPSourcePushRTP) source_push_rtp,
+ (RTPSourceClockRate) source_clock_rate,
+};
+
+
+/**
+ * rtp_session_find_conflicting_address:
+ * @session: The session the packet came in
+ * @address: address to check for
+ * @time: The time when the packet that is possibly in conflict arrived
+ *
+ * Checks if an address which has a conflict is already known. If it is
+ * a known conflict, remember the time
+ *
+ * Returns: TRUE if it was a known conflict, FALSE otherwise
+ */
+static gboolean
+rtp_session_find_conflicting_address (RTPSession * session,
+ GSocketAddress * address, GstClockTime time)
+{
+ return find_conflicting_address (session->conflicting_addresses, address,
+ time);
+}
+
+/**
+ * rtp_session_add_conflicting_address:
+ * @session: The session the packet came in
+ * @address: address to remember
+ * @time: The time when the packet that is in conflict arrived
+ *
+ * Adds a new conflict address
+ */
+static void
+rtp_session_add_conflicting_address (RTPSession * sess,
+ GSocketAddress * address, GstClockTime time)
+{
+ sess->conflicting_addresses =
+ add_conflicting_address (sess->conflicting_addresses, address, time);
+}
+
+static void
+rtp_session_have_conflict (RTPSession * sess, RTPSource * source,
+ GSocketAddress * address, GstClockTime current_time)
+{
+ guint32 ssrc = rtp_source_get_ssrc (source);
+
+ /* Its a new collision, lets change our SSRC */
+ rtp_session_add_conflicting_address (sess, address, current_time);
+
+ /* mark the source BYE */
+ rtp_source_mark_bye (source, "SSRC Collision");
+ /* if we were suggesting this SSRC, change to something else */
+ if (sess->suggested_ssrc == ssrc) {
+ sess->suggested_ssrc = rtp_session_create_new_ssrc (sess);
+ sess->internal_ssrc_set = TRUE;
+ }
+
+ on_ssrc_collision (sess, source);
+
+ rtp_session_schedule_bye_locked (sess, current_time);
+}
+
+static gboolean
+check_collision (RTPSession * sess, RTPSource * source,
+ RTPPacketInfo * pinfo, gboolean rtp)
+{
+ guint32 ssrc;
+
+ /* If we have no pinfo address, we can't do collision checking */
+ if (!pinfo->address)
+ return FALSE;
+
+ ssrc = rtp_source_get_ssrc (source);
+
+ if (!source->internal) {
+ GSocketAddress *from;
+
+ /* This is not our local source, but lets check if two remote
+ * source collide */
+ if (rtp) {
+ from = source->rtp_from;
+ } else {
+ from = source->rtcp_from;
+ }
+
+ if (from) {
+ if (__g_socket_address_equal (from, pinfo->address)) {
+ /* Address is the same */
+ return FALSE;
+ } else {
+ GST_LOG ("we have a third-party collision or loop ssrc:%x", ssrc);
+ if (sess->favor_new) {
+ if (rtp_source_find_conflicting_address (source,
+ pinfo->address, pinfo->current_time)) {
+ gchar *buf1;
+
+ buf1 = __g_socket_address_to_string (pinfo->address);
+ GST_LOG ("Known conflict on %x for %s, dropping packet", ssrc,
+ buf1);
+ g_free (buf1);
+
+ return TRUE;
+ } else {
+ gchar *buf1, *buf2;
+
+ /* Current address is not a known conflict, lets assume this is
+ * a new source. Save old address in possible conflict list
+ */
+ rtp_source_add_conflicting_address (source, from,
+ pinfo->current_time);
+
+ buf1 = __g_socket_address_to_string (from);
+ buf2 = __g_socket_address_to_string (pinfo->address);
+
+ GST_DEBUG ("New conflict for ssrc %x, replacing %s with %s,"
+ " saving old as known conflict", ssrc, buf1, buf2);
+
+ if (rtp)
+ rtp_source_set_rtp_from (source, pinfo->address);
+ else
+ rtp_source_set_rtcp_from (source, pinfo->address);
+
+ g_free (buf1);
+ g_free (buf2);
+
+ return FALSE;
+ }
+ } else {
+ /* Don't need to save old addresses, we ignore new sources */
+ return TRUE;
+ }
+ }
+ } else {
+ /* We don't already have a from address for RTP, just set it */
+ if (rtp)
+ rtp_source_set_rtp_from (source, pinfo->address);
+ else
+ rtp_source_set_rtcp_from (source, pinfo->address);
+ return FALSE;
+ }
+
+ /* FIXME: Log 3rd party collision somehow
+ * Maybe should be done in upper layer, only the SDES can tell us
+ * if its a collision or a loop
+ */
+ } else {
+ /* This is sending with our ssrc, is it an address we already know */
+ if (rtp_session_find_conflicting_address (sess, pinfo->address,
+ pinfo->current_time)) {
+ /* Its a known conflict, its probably a loop, not a collision
+ * lets just drop the incoming packet
+ */
+ GST_DEBUG ("Our packets are being looped back to us, dropping");
+ } else {
+ GST_DEBUG ("Collision for SSRC %x from new incoming packet,"
+ " change our sender ssrc", ssrc);
+
+ rtp_session_have_conflict (sess, source, pinfo->address,
+ pinfo->current_time);
+ }
+ }
+
+ return TRUE;
+}
+
+typedef struct
+{
+ gboolean is_doing_ptp;
+ GSocketAddress *new_addr;
+} CompareAddrData;
+
+/* check if the two given ip addr are the same (do not care about the port) */
+static gboolean
+ip_addr_equal (GSocketAddress * a, GSocketAddress * b)
+{
+ return
+ g_inet_address_equal (g_inet_socket_address_get_address
+ (G_INET_SOCKET_ADDRESS (a)),
+ g_inet_socket_address_get_address (G_INET_SOCKET_ADDRESS (b)));
+}
+
+static void
+compare_rtp_source_addr (const gchar * key, RTPSource * source,
+ CompareAddrData * data)
+{
+ /* only compare ip addr of remote sources which are also not closing */
+ if (!source->internal && !source->closing && source->rtp_from) {
+ /* look for the first rtp source */
+ if (!data->new_addr)
+ data->new_addr = source->rtp_from;
+ /* compare current ip addr with the first one */
+ else
+ data->is_doing_ptp &= ip_addr_equal (data->new_addr, source->rtp_from);
+ }
+}
+
+static void
+compare_rtcp_source_addr (const gchar * key, RTPSource * source,
+ CompareAddrData * data)
+{
+ /* only compare ip addr of remote sources which are also not closing */
+ if (!source->internal && !source->closing && source->rtcp_from) {
+ /* look for the first rtcp source */
+ if (!data->new_addr)
+ data->new_addr = source->rtcp_from;
+ else
+ /* compare current ip addr with the first one */
+ data->is_doing_ptp &= ip_addr_equal (data->new_addr, source->rtcp_from);
+ }
+}
+
+/* loop over our non-internal source to know if the session
+ * is doing point-to-point */
+static void
+session_update_ptp (RTPSession * sess)
+{
+ /* to know if the session is doing point to point, the ip addr
+ * of each non-internal (=remotes) source have to be compared
+ * to each other.
+ */
+ gboolean is_doing_rtp_ptp;
+ gboolean is_doing_rtcp_ptp;
+ CompareAddrData data;
+
+ /* compare the first remote source's ip addr that receive rtp packets
+ * with other remote rtp source.
+ * it's enough because the session just needs to know if they are all
+ * equals or not
+ */
+ data.is_doing_ptp = TRUE;
+ data.new_addr = NULL;
+ g_hash_table_foreach (sess->ssrcs[sess->mask_idx],
+ (GHFunc) compare_rtp_source_addr, (gpointer) & data);
+ is_doing_rtp_ptp = data.is_doing_ptp;
+
+ /* same but about rtcp */
+ data.is_doing_ptp = TRUE;
+ data.new_addr = NULL;
+ g_hash_table_foreach (sess->ssrcs[sess->mask_idx],
+ (GHFunc) compare_rtcp_source_addr, (gpointer) & data);
+ is_doing_rtcp_ptp = data.is_doing_ptp;
+
+ /* the session is doing point-to-point if all rtp remote have the same
+ * ip addr and if all rtcp remote sources have the same ip addr */
+ sess->is_doing_ptp = is_doing_rtp_ptp && is_doing_rtcp_ptp;
+
+ GST_DEBUG ("doing point-to-point: %d", sess->is_doing_ptp);
+}
+
+static void
+add_source (RTPSession * sess, RTPSource * src)
+{
+ g_hash_table_insert (sess->ssrcs[sess->mask_idx],
+ GINT_TO_POINTER (src->ssrc), src);
+ /* report the new source ASAP */
+ src->generation = sess->generation;
+ /* we have one more source now */
+ sess->total_sources++;
+ if (RTP_SOURCE_IS_ACTIVE (src))
+ sess->stats.active_sources++;
+ if (src->internal) {
+ sess->stats.internal_sources++;
+ if (!sess->internal_ssrc_from_caps_or_property
+ && sess->suggested_ssrc != src->ssrc) {
+ sess->suggested_ssrc = src->ssrc;
+ sess->internal_ssrc_set = TRUE;
+ }
+ }
+
+ /* update point-to-point status */
+ if (!src->internal)
+ session_update_ptp (sess);
+}
+
+static RTPSource *
+find_source (RTPSession * sess, guint32 ssrc)
+{
+ return g_hash_table_lookup (sess->ssrcs[sess->mask_idx],
+ GINT_TO_POINTER (ssrc));
+}
+
+/* must be called with the session lock, the returned source needs to be
+ * unreffed after usage. */
+static RTPSource *
+obtain_source (RTPSession * sess, guint32 ssrc, gboolean * created,
+ RTPPacketInfo * pinfo, gboolean rtp)
+{
+ RTPSource *source;
+
+ source = find_source (sess, ssrc);
+ if (source == NULL) {
+ /* make new Source in probation and insert */
+ source = rtp_source_new (ssrc);
+
+ GST_DEBUG ("creating new source %08x %p", ssrc, source);
+
+ /* for RTP packets we need to set the source in probation. Receiving RTCP
+ * packets of an SSRC, on the other hand, is a strong indication that we
+ * are dealing with a valid source. */
+ g_object_set (source, "probation", rtp ? sess->probation : 0,
+ "max-dropout-time", sess->max_dropout_time, "max-misorder-time",
+ sess->max_misorder_time, NULL);
+
+ /* store from address, if any */
+ if (pinfo->address) {
+ if (rtp)
+ rtp_source_set_rtp_from (source, pinfo->address);
+ else
+ rtp_source_set_rtcp_from (source, pinfo->address);
+ }
+
+ /* configure a callback on the source */
+ rtp_source_set_callbacks (source, &callbacks, sess);
+
+ add_source (sess, source);
+ *created = TRUE;
+ } else {
+ *created = FALSE;
+ /* check for collision, this updates the address when not previously set */
+ if (check_collision (sess, source, pinfo, rtp)) {
+ return NULL;
+ }
+ /* Receiving RTCP packets of an SSRC is a strong indication that we
+ * are dealing with a valid source. */
+ if (!rtp)
+ g_object_set (source, "probation", 0, NULL);
+ }
+ /* update last activity */
+ source->last_activity = pinfo->current_time;
+ if (rtp)
+ source->last_rtp_activity = pinfo->current_time;
+ g_object_ref (source);
+
+ return source;
+}
+
+/* must be called with the session lock, the returned source needs to be
+ * unreffed after usage. */
+static RTPSource *
+obtain_internal_source (RTPSession * sess, guint32 ssrc, gboolean * created,
+ GstClockTime current_time)
+{
+ RTPSource *source;
+
+ source = find_source (sess, ssrc);
+ if (source == NULL) {
+ /* make new internal Source and insert */
+ source = rtp_source_new (ssrc);
+
+ GST_DEBUG ("creating new internal source %08x %p", ssrc, source);
+
+ source->validated = TRUE;
+ source->internal = TRUE;
+ source->probation = FALSE;
+ rtp_source_set_sdes_struct (source, gst_structure_copy (sess->sdes));
+ rtp_source_set_callbacks (source, &callbacks, sess);
+
+ add_source (sess, source);
+ *created = TRUE;
+ } else {
+ *created = FALSE;
+ }
+ /* update last activity */
+ if (current_time != GST_CLOCK_TIME_NONE) {
+ source->last_activity = current_time;
+ source->last_rtp_activity = current_time;
+ }
+ g_object_ref (source);
+
+ return source;
+}
+
+/**
+ * rtp_session_suggest_ssrc:
+ * @sess: a #RTPSession
+ * @is_random: if the suggested ssrc is random
+ *
+ * Suggest an unused SSRC in @sess.
+ *
+ * Returns: a free unused SSRC
+ */
+guint32
+rtp_session_suggest_ssrc (RTPSession * sess, gboolean * is_random)
+{
+ guint32 result;
+
+ g_return_val_if_fail (RTP_IS_SESSION (sess), 0);
+
+ RTP_SESSION_LOCK (sess);
+ result = sess->suggested_ssrc;
+ if (is_random)
+ *is_random = !sess->internal_ssrc_set;
+ RTP_SESSION_UNLOCK (sess);
+
+ return result;
+}
+
+/**
+ * rtp_session_add_source:
+ * @sess: a #RTPSession
+ * @src: #RTPSource to add
+ *
+ * Add @src to @session.
+ *
+ * Returns: %TRUE on success, %FALSE if a source with the same SSRC already
+ * existed in the session.
+ */
+gboolean
+rtp_session_add_source (RTPSession * sess, RTPSource * src)
+{
+ gboolean result = FALSE;
+ RTPSource *find;
+
+ g_return_val_if_fail (RTP_IS_SESSION (sess), FALSE);
+ g_return_val_if_fail (src != NULL, FALSE);
+
+ RTP_SESSION_LOCK (sess);
+ find = find_source (sess, src->ssrc);
+ if (find == NULL) {
+ add_source (sess, src);
+ result = TRUE;
+ }
+ RTP_SESSION_UNLOCK (sess);
+
+ return result;
+}
+
+/**
+ * rtp_session_get_num_sources:
+ * @sess: an #RTPSession
+ *
+ * Get the number of sources in @sess.
+ *
+ * Returns: The number of sources in @sess.
+ */
+guint
+rtp_session_get_num_sources (RTPSession * sess)
+{
+ guint result;
+
+ g_return_val_if_fail (RTP_IS_SESSION (sess), FALSE);
+
+ RTP_SESSION_LOCK (sess);
+ result = sess->total_sources;
+ RTP_SESSION_UNLOCK (sess);
+
+ return result;
+}
+
+/**
+ * rtp_session_get_num_active_sources:
+ * @sess: an #RTPSession
+ *
+ * Get the number of active sources in @sess. A source is considered active when
+ * it has been validated and has not yet received a BYE RTCP message.
+ *
+ * Returns: The number of active sources in @sess.
+ */
+guint
+rtp_session_get_num_active_sources (RTPSession * sess)
+{
+ guint result;
+
+ g_return_val_if_fail (RTP_IS_SESSION (sess), 0);
+
+ RTP_SESSION_LOCK (sess);
+ result = sess->stats.active_sources;
+ RTP_SESSION_UNLOCK (sess);
+
+ return result;
+}
+
+/**
+ * rtp_session_get_source_by_ssrc:
+ * @sess: an #RTPSession
+ * @ssrc: an SSRC
+ *
+ * Find the source with @ssrc in @sess.
+ *
+ * Returns: a #RTPSource with SSRC @ssrc or NULL if the source was not found.
+ * g_object_unref() after usage.
+ */
+RTPSource *
+rtp_session_get_source_by_ssrc (RTPSession * sess, guint32 ssrc)
+{
+ RTPSource *result;
+
+ g_return_val_if_fail (RTP_IS_SESSION (sess), NULL);
+
+ RTP_SESSION_LOCK (sess);
+ result = find_source (sess, ssrc);
+ if (result != NULL)
+ g_object_ref (result);
+ RTP_SESSION_UNLOCK (sess);
+
+ return result;
+}
+
+/* should be called with the SESSION lock */
+static guint32
+rtp_session_create_new_ssrc (RTPSession * sess)
+{
+ guint32 ssrc;
+
+ while (TRUE) {
+ ssrc = g_random_int ();
+
+ /* see if it exists in the session, we're done if it doesn't */
+ if (find_source (sess, ssrc) == NULL)
+ break;
+ }
+ return ssrc;
+}
+
+static gboolean
+update_packet (GstBuffer ** buffer, guint idx, RTPPacketInfo * pinfo)
+{
+ GstNetAddressMeta *meta;
+
+ /* get packet size including header overhead */
+ pinfo->bytes += gst_buffer_get_size (*buffer) + pinfo->header_len;
+ pinfo->packets++;
+
+ if (pinfo->rtp) {
+ GstRTPBuffer rtp = { NULL };
+
+ if (!gst_rtp_buffer_map (*buffer, GST_MAP_READ, &rtp))
+ goto invalid_packet;
+
+ pinfo->payload_len += gst_rtp_buffer_get_payload_len (&rtp);
+ if (idx == 0) {
+ gint i;
+
+ /* only keep info for first buffer */
+ pinfo->ssrc = gst_rtp_buffer_get_ssrc (&rtp);
+ pinfo->seqnum = gst_rtp_buffer_get_seq (&rtp);
+ pinfo->pt = gst_rtp_buffer_get_payload_type (&rtp);
+ pinfo->rtptime = gst_rtp_buffer_get_timestamp (&rtp);
+ pinfo->marker = gst_rtp_buffer_get_marker (&rtp);
+ /* copy available csrc */
+ pinfo->csrc_count = gst_rtp_buffer_get_csrc_count (&rtp);
+ for (i = 0; i < pinfo->csrc_count; i++)
+ pinfo->csrcs[i] = gst_rtp_buffer_get_csrc (&rtp, i);
+
+ /* RTP header extensions */
+ pinfo->header_ext = gst_rtp_buffer_get_extension_bytes (&rtp,
+ &pinfo->header_ext_bit_pattern);
+ }
+ gst_rtp_buffer_unmap (&rtp);
+ }
+
+ if (idx == 0) {
+ /* for netbuffer we can store the IP address to check for collisions */
+ meta = gst_buffer_get_net_address_meta (*buffer);
+ if (pinfo->address)
+ g_object_unref (pinfo->address);
+ if (meta) {
+ pinfo->address = G_SOCKET_ADDRESS (g_object_ref (meta->addr));
+ } else {
+ pinfo->address = NULL;
+ }
+ }
+ return TRUE;
+
+ /* ERRORS */
+invalid_packet:
+ {
+ GST_DEBUG ("invalid RTP packet received");
+ return FALSE;
+ }
+}
+
+/* update the RTPPacketInfo structure with the current time and other bits
+ * about the current buffer we are handling.
+ * This function is typically called when a validated packet is received.
+ * This function should be called with the RTP_SESSION_LOCK
+ */
+static gboolean
+update_packet_info (RTPSession * sess, RTPPacketInfo * pinfo,
+ gboolean send, gboolean rtp, gboolean is_list, gpointer data,
+ GstClockTime current_time, GstClockTime running_time, guint64 ntpnstime)
+{
+ gboolean res;
+
+ pinfo->send = send;
+ pinfo->rtp = rtp;
+ pinfo->is_list = is_list;
+ pinfo->data = data;
+ pinfo->current_time = current_time;
+ pinfo->running_time = running_time;
+ pinfo->ntpnstime = ntpnstime;
+ pinfo->header_len = sess->header_len;
+ pinfo->bytes = 0;
+ pinfo->payload_len = 0;
+ pinfo->packets = 0;
+ pinfo->marker = FALSE;
+
+ if (is_list) {
+ GstBufferList *list = GST_BUFFER_LIST_CAST (data);
+ res =
+ gst_buffer_list_foreach (list, (GstBufferListFunc) update_packet,
+ pinfo);
+ pinfo->arrival_time = GST_CLOCK_TIME_NONE;
+ } else {
+ GstBuffer *buffer = GST_BUFFER_CAST (data);
+ res = update_packet (&buffer, 0, pinfo);
+ pinfo->arrival_time = GST_BUFFER_DTS (buffer);
+ }
+
+ return res;
+}
+
+static void
+clean_packet_info (RTPPacketInfo * pinfo)
+{
+ if (pinfo->address)
+ g_object_unref (pinfo->address);
+ if (pinfo->data) {
+ gst_mini_object_unref (pinfo->data);
+ pinfo->data = NULL;
+ }
+ if (pinfo->header_ext)
+ g_bytes_unref (pinfo->header_ext);
+}
+
+static gboolean
+source_update_active (RTPSession * sess, RTPSource * source,
+ gboolean prevactive)
+{
+ gboolean active = RTP_SOURCE_IS_ACTIVE (source);
+ guint32 ssrc = source->ssrc;
+
+ if (prevactive == active)
+ return FALSE;
+
+ if (active) {
+ sess->stats.active_sources++;
+ GST_DEBUG ("source: %08x became active, %d active sources", ssrc,
+ sess->stats.active_sources);
+ } else {
+ sess->stats.active_sources--;
+ GST_DEBUG ("source: %08x became inactive, %d active sources", ssrc,
+ sess->stats.active_sources);
+ }
+ return TRUE;
+}
+
+static void
+process_twcc_packet (RTPSession * sess, RTPPacketInfo * pinfo)
+{
+ if (rtp_twcc_manager_recv_packet (sess->twcc, pinfo)) {
+ RTP_SESSION_UNLOCK (sess);
+
+ /* TODO: find a better rational for this number, and possibly tune it based
+ on factors like framerate / bandwidth etc */
+ if (!rtp_session_send_rtcp (sess, 100 * GST_MSECOND)) {
+ GST_INFO ("Could not schedule TWCC straight away");
+ }
+ RTP_SESSION_LOCK (sess);
+ }
+}
+
+static gboolean
+source_update_sender (RTPSession * sess, RTPSource * source,
+ gboolean prevsender)
+{
+ gboolean sender = RTP_SOURCE_IS_SENDER (source);
+ guint32 ssrc = source->ssrc;
+
+ if (prevsender == sender)
+ return FALSE;
+
+ if (sender) {
+ sess->stats.sender_sources++;
+ if (source->internal)
+ sess->stats.internal_sender_sources++;
+ GST_DEBUG ("source: %08x became sender, %d sender sources", ssrc,
+ sess->stats.sender_sources);
+ } else {
+ sess->stats.sender_sources--;
+ if (source->internal)
+ sess->stats.internal_sender_sources--;
+ GST_DEBUG ("source: %08x became non sender, %d sender sources", ssrc,
+ sess->stats.sender_sources);
+ }
+ return TRUE;
+}
+
+/**
+ * rtp_session_process_rtp:
+ * @sess: and #RTPSession
+ * @buffer: an RTP buffer
+ * @current_time: the current system time
+ * @running_time: the running_time of @buffer
+ *
+ * Process an RTP buffer in the session manager. This function takes ownership
+ * of @buffer.
+ *
+ * Returns: a #GstFlowReturn.
+ */
+GstFlowReturn
+rtp_session_process_rtp (RTPSession * sess, GstBuffer * buffer,
+ GstClockTime current_time, GstClockTime running_time, guint64 ntpnstime)
+{
+ GstFlowReturn result;
+ guint32 ssrc;
+ RTPSource *source;
+ gboolean created;
+ gboolean prevsender, prevactive;
+ RTPPacketInfo pinfo = { 0, };
+ guint64 oldrate;
+
+ g_return_val_if_fail (RTP_IS_SESSION (sess), GST_FLOW_ERROR);
+ g_return_val_if_fail (GST_IS_BUFFER (buffer), GST_FLOW_ERROR);
+
+ RTP_SESSION_LOCK (sess);
+
+ /* update pinfo stats */
+ if (!update_packet_info (sess, &pinfo, FALSE, TRUE, FALSE, buffer,
+ current_time, running_time, ntpnstime)) {
+ GST_DEBUG ("invalid RTP packet received");
+ RTP_SESSION_UNLOCK (sess);
+ return rtp_session_process_rtcp (sess, buffer, current_time, running_time,
+ ntpnstime);
+ }
+
+ ssrc = pinfo.ssrc;
+
+ source = obtain_source (sess, ssrc, &created, &pinfo, TRUE);
+ if (!source)
+ goto collision;
+
+ prevsender = RTP_SOURCE_IS_SENDER (source);
+ prevactive = RTP_SOURCE_IS_ACTIVE (source);
+ oldrate = source->bitrate;
+
+ if (created)
+ on_new_ssrc (sess, source);
+
+ /* let source process the packet */
+ result = rtp_source_process_rtp (source, &pinfo);
+ process_twcc_packet (sess, &pinfo);
+
+ /* source became active */
+ if (source_update_active (sess, source, prevactive))
+ on_ssrc_validated (sess, source);
+
+ source_update_sender (sess, source, prevsender);
+
+ if (oldrate != source->bitrate)
+ sess->recalc_bandwidth = TRUE;
+
+
+ if (source->validated) {
+ gboolean created;
+ gint i;
+
+ /* for validated sources, we add the CSRCs as well */
+ for (i = 0; i < pinfo.csrc_count; i++) {
+ guint32 csrc;
+ RTPSource *csrc_src;
+
+ csrc = pinfo.csrcs[i];
+
+ /* get source */
+ csrc_src = obtain_source (sess, csrc, &created, &pinfo, TRUE);
+ if (!csrc_src)
+ continue;
+
+ if (created) {
+ GST_DEBUG ("created new CSRC: %08x", csrc);
+ rtp_source_set_as_csrc (csrc_src);
+ source_update_active (sess, csrc_src, FALSE);
+ on_new_ssrc (sess, csrc_src);
+ }
+ g_object_unref (csrc_src);
+ }
+ }
+ g_object_unref (source);
+
+ RTP_SESSION_UNLOCK (sess);
+
+ clean_packet_info (&pinfo);
+
+ return result;
+
+ /* ERRORS */
+collision:
+ {
+ RTP_SESSION_UNLOCK (sess);
+ clean_packet_info (&pinfo);
+ GST_DEBUG ("ignoring packet because its collisioning");
+ return GST_FLOW_OK;
+ }
+}
+
+static void
+rtp_session_process_rb (RTPSession * sess, RTPSource * source,
+ GstRTCPPacket * packet, RTPPacketInfo * pinfo)
+{
+ guint count, i;
+
+ count = gst_rtcp_packet_get_rb_count (packet);
+ for (i = 0; i < count; i++) {
+ guint32 ssrc, exthighestseq, jitter, lsr, dlsr;
+ guint8 fractionlost;
+ gint32 packetslost;
+ RTPSource *src;
+
+ gst_rtcp_packet_get_rb (packet, i, &ssrc, &fractionlost,
+ &packetslost, &exthighestseq, &jitter, &lsr, &dlsr);
+
+ GST_DEBUG ("RB %d: SSRC %08x, jitter %" G_GUINT32_FORMAT, i, ssrc, jitter);
+
+ /* find our own source */
+ src = find_source (sess, ssrc);
+ if (src == NULL)
+ continue;
+
+ if (src->internal && RTP_SOURCE_IS_ACTIVE (src)) {
+ /* only deal with report blocks for our session, we update the stats of
+ * the sender of the RTCP message. We could also compare our stats against
+ * the other sender to see if we are better or worse. */
+ /* FIXME, need to keep track who the RB block is from */
+ rtp_source_process_rb (source, ssrc, pinfo->ntpnstime, fractionlost,
+ packetslost, exthighestseq, jitter, lsr, dlsr);
+ }
+ }
+ on_ssrc_active (sess, source);
+}
+
+/* A Sender report contains statistics about how the sender is doing. This
+ * includes timing informataion such as the relation between RTP and NTP
+ * timestamps and the number of packets/bytes it sent to us.
+ *
+ * In this report is also included a set of report blocks related to how this
+ * sender is receiving data (in case we (or somebody else) is also sending stuff
+ * to it). This info includes the packet loss, jitter and seqnum. It also
+ * contains information to calculate the round trip time (LSR/DLSR).
+ */
+static void
+rtp_session_process_sr (RTPSession * sess, GstRTCPPacket * packet,
+ RTPPacketInfo * pinfo, gboolean * do_sync)
+{
+ guint32 senderssrc, rtptime, packet_count, octet_count;
+ guint64 ntptime;
+ RTPSource *source;
+ gboolean created, prevsender;
+
+ gst_rtcp_packet_sr_get_sender_info (packet, &senderssrc, &ntptime, &rtptime,
+ &packet_count, &octet_count);
+
+ GST_DEBUG ("got SR packet: SSRC %08x, time %" GST_TIME_FORMAT,
+ senderssrc, GST_TIME_ARGS (pinfo->current_time));
+
+ source = obtain_source (sess, senderssrc, &created, pinfo, FALSE);
+ if (!source)
+ return;
+
+ /* skip non-bye packets for sources that are marked BYE */
+ if (sess->scheduled_bye && RTP_SOURCE_IS_MARKED_BYE (source))
+ goto out;
+
+ /* don't try to do lip-sync for sources that sent a BYE */
+ if (RTP_SOURCE_IS_MARKED_BYE (source))
+ *do_sync = FALSE;
+ else
+ *do_sync = TRUE;
+
+ prevsender = RTP_SOURCE_IS_SENDER (source);
+
+ /* first update the source */
+ rtp_source_process_sr (source, pinfo->current_time, ntptime, rtptime,
+ packet_count, octet_count);
+
+ source_update_sender (sess, source, prevsender);
+
+ if (created)
+ on_new_ssrc (sess, source);
+
+ rtp_session_process_rb (sess, source, packet, pinfo);
+
+out:
+ g_object_unref (source);
+}
+
+/* A receiver report contains statistics about how a receiver is doing. It
+ * includes stuff like packet loss, jitter and the seqnum it received last. It
+ * also contains info to calculate the round trip time.
+ *
+ * We are only interested in how the sender of this report is doing wrt to us.
+ */
+static void
+rtp_session_process_rr (RTPSession * sess, GstRTCPPacket * packet,
+ RTPPacketInfo * pinfo)
+{
+ guint32 senderssrc;
+ RTPSource *source;
+ gboolean created;
+
+ senderssrc = gst_rtcp_packet_rr_get_ssrc (packet);
+
+ GST_DEBUG ("got RR packet: SSRC %08x", senderssrc);
+
+ source = obtain_source (sess, senderssrc, &created, pinfo, FALSE);
+ if (!source)
+ return;
+
+ /* skip non-bye packets for sources that are marked BYE */
+ if (sess->scheduled_bye && RTP_SOURCE_IS_MARKED_BYE (source))
+ goto out;
+
+ if (created)
+ on_new_ssrc (sess, source);
+
+ rtp_session_process_rb (sess, source, packet, pinfo);
+
+out:
+ g_object_unref (source);
+}
+
+/* Get SDES items and store them in the SSRC */
+static void
+rtp_session_process_sdes (RTPSession * sess, GstRTCPPacket * packet,
+ RTPPacketInfo * pinfo)
+{
+ guint items, i, j;
+ gboolean more_items, more_entries;
+
+ items = gst_rtcp_packet_sdes_get_item_count (packet);
+ GST_DEBUG ("got SDES packet with %d items", items);
+
+ more_items = gst_rtcp_packet_sdes_first_item (packet);
+ i = 0;
+ while (more_items) {
+ guint32 ssrc;
+ gboolean changed, created, prevactive;
+ RTPSource *source;
+ GstStructure *sdes;
+
+ ssrc = gst_rtcp_packet_sdes_get_ssrc (packet);
+
+ GST_DEBUG ("item %d, SSRC %08x", i, ssrc);
+
+ changed = FALSE;
+
+ /* find src, no probation when dealing with RTCP */
+ source = obtain_source (sess, ssrc, &created, pinfo, FALSE);
+ if (!source)
+ return;
+
+ /* skip non-bye packets for sources that are marked BYE */
+ if (sess->scheduled_bye && RTP_SOURCE_IS_MARKED_BYE (source))
+ goto next;
+
+ sdes = gst_structure_new_empty ("application/x-rtp-source-sdes");
+
+ more_entries = gst_rtcp_packet_sdes_first_entry (packet);
+ j = 0;
+ while (more_entries) {
+ GstRTCPSDESType type;
+ guint8 len;
+ guint8 *data;
+ gchar *name;
+ gchar *value;
+
+ gst_rtcp_packet_sdes_get_entry (packet, &type, &len, &data);
+
+ GST_DEBUG ("entry %d, type %d, len %d, data %.*s", j, type, len, len,
+ data);
+
+ if (type == GST_RTCP_SDES_PRIV) {
+ name = g_strndup ((const gchar *) &data[1], data[0]);
+ len -= data[0] + 1;
+ data += data[0] + 1;
+ } else {
+ name = g_strdup (gst_rtcp_sdes_type_to_name (type));
+ }
+
+ value = g_strndup ((const gchar *) data, len);
+
+ if (g_utf8_validate (value, -1, NULL)) {
+ gst_structure_set (sdes, name, G_TYPE_STRING, value, NULL);
+ } else {
+ GST_WARNING ("ignore SDES field %s with non-utf8 data %s", name, value);
+ }
+
+ g_free (name);
+ g_free (value);
+
+ more_entries = gst_rtcp_packet_sdes_next_entry (packet);
+ j++;
+ }
+
+ /* takes ownership of sdes */
+ changed = rtp_source_set_sdes_struct (source, sdes);
+
+ prevactive = RTP_SOURCE_IS_ACTIVE (source);
+ source->validated = TRUE;
+
+ if (created)
+ on_new_ssrc (sess, source);
+
+ /* source became active */
+ if (source_update_active (sess, source, prevactive))
+ on_ssrc_validated (sess, source);
+
+ if (changed)
+ on_ssrc_sdes (sess, source);
+
+ next:
+ g_object_unref (source);
+
+ more_items = gst_rtcp_packet_sdes_next_item (packet);
+ i++;
+ }
+}
+
+/* BYE is sent when a client leaves the session
+ */
+static void
+rtp_session_process_bye (RTPSession * sess, GstRTCPPacket * packet,
+ RTPPacketInfo * pinfo)
+{
+ guint count, i;
+ gchar *reason;
+ gboolean reconsider = FALSE;
+
+ reason = gst_rtcp_packet_bye_get_reason (packet);
+ GST_DEBUG ("got BYE packet (reason: %s)", GST_STR_NULL (reason));
+
+ count = gst_rtcp_packet_bye_get_ssrc_count (packet);
+ for (i = 0; i < count; i++) {
+ guint32 ssrc;
+ RTPSource *source;
+ gboolean prevactive, prevsender;
+ guint pmembers, members;
+
+ ssrc = gst_rtcp_packet_bye_get_nth_ssrc (packet, i);
+ GST_DEBUG ("SSRC: %08x", ssrc);
+
+ /* find src and mark bye, no probation when dealing with RTCP */
+ source = find_source (sess, ssrc);
+ if (!source || source->internal) {
+ GST_DEBUG ("Ignoring suspicious BYE packet (reason: %s)",
+ !source ? "can't find source" : "has internal source SSRC");
+ break;
+ }
+
+ /* store time for when we need to time out this source */
+ source->bye_time = pinfo->current_time;
+
+ prevactive = RTP_SOURCE_IS_ACTIVE (source);
+ prevsender = RTP_SOURCE_IS_SENDER (source);
+
+ /* mark the source BYE */
+ rtp_source_mark_bye (source, reason);
+
+ pmembers = sess->stats.active_sources;
+
+ source_update_active (sess, source, prevactive);
+ source_update_sender (sess, source, prevsender);
+
+ members = sess->stats.active_sources;
+
+ if (!sess->scheduled_bye && members < pmembers) {
+ /* some members went away since the previous timeout estimate.
+ * Perform reverse reconsideration but only when we are not scheduling a
+ * BYE ourselves. */
+ if (sess->next_rtcp_check_time != GST_CLOCK_TIME_NONE &&
+ pinfo->current_time < sess->next_rtcp_check_time) {
+ GstClockTime time_remaining;
+
+ /* Scale our next RTCP check time according to the change of numbers
+ * of members. But only if a) this is the first RTCP, or b) this is not
+ * a feedback session, or c) this is a feedback session but we schedule
+ * for every RTCP interval (aka no t-rr-interval set).
+ *
+ * FIXME: a) and b) are not great as we will possibly go below Tmin
+ * for non-feedback profiles and in case of a) below
+ * Tmin/t-rr-interval in any case.
+ */
+ if (sess->last_rtcp_send_time == GST_CLOCK_TIME_NONE ||
+ !(sess->rtp_profile == GST_RTP_PROFILE_AVPF
+ || sess->rtp_profile == GST_RTP_PROFILE_SAVPF) ||
+ sess->next_rtcp_check_time - sess->last_rtcp_send_time ==
+ sess->last_rtcp_interval) {
+ time_remaining = sess->next_rtcp_check_time - pinfo->current_time;
+ sess->next_rtcp_check_time =
+ gst_util_uint64_scale (time_remaining, members, pmembers);
+ sess->next_rtcp_check_time += pinfo->current_time;
+ }
+ sess->last_rtcp_interval =
+ gst_util_uint64_scale (sess->last_rtcp_interval, members, pmembers);
+
+ GST_DEBUG ("reverse reconsideration %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (sess->next_rtcp_check_time));
+
+ /* mark pending reconsider. We only want to signal the reconsideration
+ * once after we handled all the source in the bye packet */
+ reconsider = TRUE;
+ }
+ }
+
+ on_bye_ssrc (sess, source);
+ }
+ if (reconsider) {
+ RTP_SESSION_UNLOCK (sess);
+ /* notify app of reconsideration */
+ if (sess->callbacks.reconsider)
+ sess->callbacks.reconsider (sess, sess->reconsider_user_data);
+ RTP_SESSION_LOCK (sess);
+ }
+
+ g_free (reason);
+}
+
+static void
+rtp_session_process_app (RTPSession * sess, GstRTCPPacket * packet,
+ RTPPacketInfo * pinfo)
+{
+ GST_DEBUG ("received APP");
+
+ if (g_signal_has_handler_pending (sess,
+ rtp_session_signals[SIGNAL_ON_APP_RTCP], 0, TRUE)) {
+ GstBuffer *data_buffer = NULL;
+ guint16 data_length;
+ gchar name[5];
+
+ data_length = gst_rtcp_packet_app_get_data_length (packet) * 4;
+ if (data_length > 0) {
+ guint8 *data = gst_rtcp_packet_app_get_data (packet);
+ data_buffer = gst_buffer_copy_region (packet->rtcp->buffer,
+ GST_BUFFER_COPY_MEMORY, data - packet->rtcp->map.data, data_length);
+ GST_BUFFER_PTS (data_buffer) = pinfo->running_time;
+ }
+
+ memcpy (name, gst_rtcp_packet_app_get_name (packet), 4);
+ name[4] = '\0';
+
+ RTP_SESSION_UNLOCK (sess);
+ g_signal_emit (sess, rtp_session_signals[SIGNAL_ON_APP_RTCP], 0,
+ gst_rtcp_packet_app_get_subtype (packet),
+ gst_rtcp_packet_app_get_ssrc (packet), name, data_buffer);
+ RTP_SESSION_LOCK (sess);
+
+ if (data_buffer)
+ gst_buffer_unref (data_buffer);
+ }
+}
+
+static gboolean
+rtp_session_request_local_key_unit (RTPSession * sess, RTPSource * src,
+ guint32 media_ssrc, gboolean fir, GstClockTime current_time)
+{
+ guint32 round_trip = 0;
+
+ rtp_source_get_last_rb (src, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
+ &round_trip);
+
+ if (src->last_keyframe_request != GST_CLOCK_TIME_NONE && round_trip) {
+ GstClockTime round_trip_in_ns = gst_util_uint64_scale (round_trip,
+ GST_SECOND, 65536);
+
+ /* Sanity check to avoid always ignoring PLI/FIR if we receive RTCP
+ * packets with erroneous values resulting in crazy high RTT. */
+ if (round_trip_in_ns > 5 * GST_SECOND)
+ round_trip_in_ns = GST_SECOND / 2;
+
+ if (current_time - src->last_keyframe_request < 2 * round_trip_in_ns) {
+ GST_DEBUG ("Ignoring %s request from %X because one was send without one "
+ "RTT (%" GST_TIME_FORMAT " < %" GST_TIME_FORMAT ")",
+ fir ? "FIR" : "PLI", rtp_source_get_ssrc (src),
+ GST_TIME_ARGS (current_time - src->last_keyframe_request),
+ GST_TIME_ARGS (round_trip_in_ns));
+ return FALSE;
+ }
+ }
+
+ src->last_keyframe_request = current_time;
+
+ GST_LOG ("received %s request from %X about %X %p(%p)", fir ? "FIR" : "PLI",
+ rtp_source_get_ssrc (src), media_ssrc, sess->callbacks.process_rtp,
+ sess->callbacks.request_key_unit);
+
+ RTP_SESSION_UNLOCK (sess);
+ sess->callbacks.request_key_unit (sess, media_ssrc, fir,
+ sess->request_key_unit_user_data);
+ RTP_SESSION_LOCK (sess);
+
+ return TRUE;
+}
+
+static void
+rtp_session_process_pli (RTPSession * sess, guint32 sender_ssrc,
+ guint32 media_ssrc, GstClockTime current_time)
+{
+ RTPSource *src;
+
+ if (!sess->callbacks.request_key_unit)
+ return;
+
+ src = find_source (sess, sender_ssrc);
+ if (src == NULL) {
+ /* try to find a src with media_ssrc instead */
+ src = find_source (sess, media_ssrc);
+ if (src == NULL)
+ return;
+ }
+
+ rtp_session_request_local_key_unit (sess, src, media_ssrc, FALSE,
+ current_time);
+}
+
+static void
+rtp_session_process_fir (RTPSession * sess, guint32 sender_ssrc,
+ guint32 media_ssrc, guint8 * fci_data, guint fci_length,
+ GstClockTime current_time)
+{
+ RTPSource *src;
+ guint32 ssrc;
+ guint position = 0;
+ gboolean our_request = FALSE;
+
+ if (!sess->callbacks.request_key_unit)
+ return;
+
+ if (fci_length < 8)
+ return;
+
+ src = find_source (sess, sender_ssrc);
+
+ /* Hack because Google fails to set the sender_ssrc correctly */
+ if (!src && sender_ssrc == 1) {
+ GHashTableIter iter;
+
+ /* we can't find the source if there are multiple */
+ if (sess->stats.sender_sources > sess->stats.internal_sender_sources + 1)
+ return;
+
+ g_hash_table_iter_init (&iter, sess->ssrcs[sess->mask_idx]);
+ while (g_hash_table_iter_next (&iter, NULL, (gpointer *) & src)) {
+ if (!src->internal && rtp_source_is_sender (src))
+ break;
+ src = NULL;
+ }
+ }
+ if (!src)
+ return;
+
+ for (position = 0; position < fci_length; position += 8) {
+ guint8 *data = fci_data + position;
+ RTPSource *own;
+
+ ssrc = GST_READ_UINT32_BE (data);
+
+ own = find_source (sess, ssrc);
+ if (own == NULL)
+ continue;
+
+ if (own->internal) {
+ our_request = TRUE;
+ break;
+ }
+ }
+ if (!our_request)
+ return;
+
+ rtp_session_request_local_key_unit (sess, src, media_ssrc, TRUE,
+ current_time);
+}
+
+static void
+rtp_session_process_nack (RTPSession * sess, guint32 sender_ssrc,
+ guint32 media_ssrc, guint8 * fci_data, guint fci_length,
+ GstClockTime current_time)
+{
+ sess->stats.nacks_received++;
+
+ if (!sess->callbacks.notify_nack)
+ return;
+
+ while (fci_length > 0) {
+ guint16 seqnum, blp;
+
+ seqnum = GST_READ_UINT16_BE (fci_data);
+ blp = GST_READ_UINT16_BE (fci_data + 2);
+
+ GST_DEBUG ("NACK #%u, blp %04x, SSRC 0x%08x", seqnum, blp, media_ssrc);
+
+ RTP_SESSION_UNLOCK (sess);
+ sess->callbacks.notify_nack (sess, seqnum, blp, media_ssrc,
+ sess->notify_nack_user_data);
+ RTP_SESSION_LOCK (sess);
+
+ fci_data += 4;
+ fci_length -= 4;
+ }
+}
+
+static void
+rtp_session_process_twcc (RTPSession * sess, guint32 sender_ssrc,
+ guint32 media_ssrc, guint8 * fci_data, guint fci_length)
+{
+ GArray *twcc_packets;
+ GstStructure *twcc_packets_s;
+ GstStructure *twcc_stats_s;
+
+ twcc_packets = rtp_twcc_manager_parse_fci (sess->twcc,
+ fci_data, fci_length * sizeof (guint32));
+ if (twcc_packets == NULL)
+ return;
+
+ twcc_packets_s = rtp_twcc_stats_get_packets_structure (twcc_packets);
+ twcc_stats_s =
+ rtp_twcc_stats_process_packets (sess->twcc_stats, twcc_packets);
+
+ GST_DEBUG_OBJECT (sess, "Parsed TWCC: %" GST_PTR_FORMAT, twcc_packets_s);
+ GST_INFO_OBJECT (sess, "Current TWCC stats %" GST_PTR_FORMAT, twcc_stats_s);
+
+ g_array_unref (twcc_packets);
+
+ RTP_SESSION_UNLOCK (sess);
+ if (sess->callbacks.notify_twcc)
+ sess->callbacks.notify_twcc (sess, twcc_packets_s, twcc_stats_s,
+ sess->notify_twcc_user_data);
+ RTP_SESSION_LOCK (sess);
+}
+
+static void
+rtp_session_process_feedback (RTPSession * sess, GstRTCPPacket * packet,
+ RTPPacketInfo * pinfo, GstClockTime current_time)
+{
+ GstRTCPType type;
+ GstRTCPFBType fbtype;
+ guint32 sender_ssrc, media_ssrc;
+ guint8 *fci_data;
+ guint fci_length;
+ RTPSource *src;
+
+ /* The feedback packet must include both sender SSRC and media SSRC */
+ if (packet->length < 2)
+ return;
+
+ type = gst_rtcp_packet_get_type (packet);
+ fbtype = gst_rtcp_packet_fb_get_type (packet);
+ sender_ssrc = gst_rtcp_packet_fb_get_sender_ssrc (packet);
+ media_ssrc = gst_rtcp_packet_fb_get_media_ssrc (packet);
+
+ src = find_source (sess, media_ssrc);
+
+ /* skip non-bye packets for sources that are marked BYE */
+ if (sess->scheduled_bye && src && RTP_SOURCE_IS_MARKED_BYE (src))
+ return;
+
+ if (src)
+ g_object_ref (src);
+
+ fci_data = gst_rtcp_packet_fb_get_fci (packet);
+ fci_length = gst_rtcp_packet_fb_get_fci_length (packet) * sizeof (guint32);
+
+ GST_DEBUG ("received feedback %d:%d from %08X about %08X with FCI of "
+ "length %d", type, fbtype, sender_ssrc, media_ssrc, fci_length);
+
+ if (g_signal_has_handler_pending (sess,
+ rtp_session_signals[SIGNAL_ON_FEEDBACK_RTCP], 0, TRUE)) {
+ GstBuffer *fci_buffer = NULL;
+
+ if (fci_length > 0) {
+ fci_buffer = gst_buffer_copy_region (packet->rtcp->buffer,
+ GST_BUFFER_COPY_MEMORY, fci_data - packet->rtcp->map.data,
+ fci_length);
+ GST_BUFFER_PTS (fci_buffer) = pinfo->running_time;
+ }
+
+ RTP_SESSION_UNLOCK (sess);
+ g_signal_emit (sess, rtp_session_signals[SIGNAL_ON_FEEDBACK_RTCP], 0,
+ type, fbtype, sender_ssrc, media_ssrc, fci_buffer);
+ RTP_SESSION_LOCK (sess);
+
+ if (fci_buffer)
+ gst_buffer_unref (fci_buffer);
+ }
+
+ if (src && sess->rtcp_feedback_retention_window != GST_CLOCK_TIME_NONE) {
+ rtp_source_retain_rtcp_packet (src, packet, pinfo->running_time);
+ }
+
+ if ((src && src->internal) ||
+ /* PSFB FIR puts the media ssrc inside the FCI */
+ (type == GST_RTCP_TYPE_PSFB && fbtype == GST_RTCP_PSFB_TYPE_FIR) ||
+ /* TWCC is for all sources, so a single media-ssrc is not enough */
+ (type == GST_RTCP_TYPE_RTPFB && fbtype == GST_RTCP_RTPFB_TYPE_TWCC)) {
+ switch (type) {
+ case GST_RTCP_TYPE_PSFB:
+ switch (fbtype) {
+ case GST_RTCP_PSFB_TYPE_PLI:
+ if (src)
+ src->stats.recv_pli_count++;
+ rtp_session_process_pli (sess, sender_ssrc, media_ssrc,
+ current_time);
+ break;
+ case GST_RTCP_PSFB_TYPE_FIR:
+ if (src)
+ src->stats.recv_fir_count++;
+ rtp_session_process_fir (sess, sender_ssrc, media_ssrc, fci_data,
+ fci_length, current_time);
+ break;
+ default:
+ break;
+ }
+ break;
+ case GST_RTCP_TYPE_RTPFB:
+ switch (fbtype) {
+ case GST_RTCP_RTPFB_TYPE_NACK:
+ if (src)
+ src->stats.recv_nack_count++;
+ rtp_session_process_nack (sess, sender_ssrc, media_ssrc,
+ fci_data, fci_length, current_time);
+ break;
+ case GST_RTCP_RTPFB_TYPE_TWCC:
+ rtp_session_process_twcc (sess, sender_ssrc, media_ssrc,
+ fci_data, fci_length);
+ break;
+ default:
+ break;
+ }
+ default:
+ break;
+ }
+ }
+
+ if (src)
+ g_object_unref (src);
+}
+
+/**
+ * rtp_session_process_rtcp:
+ * @sess: and #RTPSession
+ * @buffer: an RTCP buffer
+ * @current_time: the current system time
+ * @ntpnstime: the current NTP time in nanoseconds
+ *
+ * Process an RTCP buffer in the session manager. This function takes ownership
+ * of @buffer.
+ *
+ * Returns: a #GstFlowReturn.
+ */
+GstFlowReturn
+rtp_session_process_rtcp (RTPSession * sess, GstBuffer * buffer,
+ GstClockTime current_time, GstClockTime running_time, guint64 ntpnstime)
+{
+ GstRTCPPacket packet;
+ gboolean more, is_bye = FALSE, do_sync = FALSE;
+ RTPPacketInfo pinfo = { 0, };
+ GstFlowReturn result = GST_FLOW_OK;
+ GstRTCPBuffer rtcp = { NULL, };
+
+ g_return_val_if_fail (RTP_IS_SESSION (sess), GST_FLOW_ERROR);
+ g_return_val_if_fail (GST_IS_BUFFER (buffer), GST_FLOW_ERROR);
+
+ if (!gst_rtcp_buffer_validate_reduced (buffer))
+ goto invalid_packet;
+
+ GST_DEBUG ("received RTCP packet");
+
+ g_signal_emit (sess, rtp_session_signals[SIGNAL_ON_RECEIVING_RTCP], 0,
+ buffer);
+
+ RTP_SESSION_LOCK (sess);
+ /* update pinfo stats */
+ update_packet_info (sess, &pinfo, FALSE, FALSE, FALSE, buffer, current_time,
+ running_time, ntpnstime);
+
+ /* start processing the compound packet */
+ gst_rtcp_buffer_map (buffer, GST_MAP_READ, &rtcp);
+ more = gst_rtcp_buffer_get_first_packet (&rtcp, &packet);
+ while (more) {
+ GstRTCPType type;
+
+ type = gst_rtcp_packet_get_type (&packet);
+
+ switch (type) {
+ case GST_RTCP_TYPE_SR:
+ rtp_session_process_sr (sess, &packet, &pinfo, &do_sync);
+ break;
+ case GST_RTCP_TYPE_RR:
+ rtp_session_process_rr (sess, &packet, &pinfo);
+ break;
+ case GST_RTCP_TYPE_SDES:
+ rtp_session_process_sdes (sess, &packet, &pinfo);
+ break;
+ case GST_RTCP_TYPE_BYE:
+ is_bye = TRUE;
+ /* don't try to attempt lip-sync anymore for streams with a BYE */
+ do_sync = FALSE;
+ rtp_session_process_bye (sess, &packet, &pinfo);
+ break;
+ case GST_RTCP_TYPE_APP:
+ rtp_session_process_app (sess, &packet, &pinfo);
+ break;
+ case GST_RTCP_TYPE_RTPFB:
+ case GST_RTCP_TYPE_PSFB:
+ rtp_session_process_feedback (sess, &packet, &pinfo, current_time);
+ break;
+ case GST_RTCP_TYPE_XR:
+ /* FIXME: This block is added to downgrade warning level.
+ * Once the parser is implemented, it should be replaced with
+ * a proper process function. */
+ GST_DEBUG ("got RTCP XR packet, but ignored");
+ break;
+ default:
+ GST_WARNING ("got unknown RTCP packet type: %d", type);
+ break;
+ }
+ more = gst_rtcp_packet_move_to_next (&packet);
+ }
+
+ gst_rtcp_buffer_unmap (&rtcp);
+
+ /* if we are scheduling a BYE, we only want to count bye packets, else we
+ * count everything */
+ if (sess->scheduled_bye && is_bye) {
+ sess->bye_stats.bye_members++;
+ UPDATE_AVG (sess->bye_stats.avg_rtcp_packet_size, pinfo.bytes);
+ }
+
+ /* keep track of average packet size */
+ UPDATE_AVG (sess->stats.avg_rtcp_packet_size, pinfo.bytes);
+
+ GST_DEBUG ("%p, received RTCP packet, avg size %u, %u", &sess->stats,
+ sess->stats.avg_rtcp_packet_size, pinfo.bytes);
+ RTP_SESSION_UNLOCK (sess);
+
+ pinfo.data = NULL;
+ clean_packet_info (&pinfo);
+
+ /* notify caller of sr packets in the callback */
+ if (do_sync && sess->callbacks.sync_rtcp) {
+ result = sess->callbacks.sync_rtcp (sess, buffer,
+ sess->sync_rtcp_user_data);
+ } else
+ gst_buffer_unref (buffer);
+
+ return result;
+
+ /* ERRORS */
+invalid_packet:
+ {
+ GST_DEBUG ("invalid RTCP packet received");
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ }
+}
+
+/**
+ * rtp_session_update_send_caps:
+ * @sess: an #RTPSession
+ * @caps: a #GstCaps
+ *
+ * Update the caps of the sender in the rtp session.
+ */
+void
+rtp_session_update_send_caps (RTPSession * sess, GstCaps * caps)
+{
+ GstStructure *s;
+ guint ssrc;
+
+ g_return_if_fail (RTP_IS_SESSION (sess));
+ g_return_if_fail (GST_IS_CAPS (caps));
+
+ GST_LOG ("received caps %" GST_PTR_FORMAT, caps);
+
+ s = gst_caps_get_structure (caps, 0);
+
+ if (gst_structure_get_uint (s, "ssrc", &ssrc)) {
+ RTPSource *source;
+ gboolean created;
+
+ RTP_SESSION_LOCK (sess);
+ source = obtain_internal_source (sess, ssrc, &created, GST_CLOCK_TIME_NONE);
+ sess->suggested_ssrc = ssrc;
+ sess->internal_ssrc_set = TRUE;
+ sess->internal_ssrc_from_caps_or_property = TRUE;
+ if (source) {
+ rtp_source_update_caps (source, caps);
+
+ if (created)
+ on_new_sender_ssrc (sess, source);
+
+ g_object_unref (source);
+ }
+
+ if (gst_structure_get_uint (s, "rtx-ssrc", &ssrc)) {
+ source =
+ obtain_internal_source (sess, ssrc, &created, GST_CLOCK_TIME_NONE);
+ if (source) {
+ rtp_source_update_caps (source, caps);
+
+ if (created)
+ on_new_sender_ssrc (sess, source);
+
+ g_object_unref (source);
+ }
+ }
+ RTP_SESSION_UNLOCK (sess);
+ } else {
+ sess->internal_ssrc_from_caps_or_property = FALSE;
+ }
+
+ rtp_twcc_manager_parse_send_ext_id (sess->twcc, s);
+}
+
+/**
+ * rtp_session_send_rtp:
+ * @sess: an #RTPSession
+ * @data: pointer to either an RTP buffer or a list of RTP buffers
+ * @is_list: TRUE when @data is a buffer list
+ * @current_time: the current system time
+ * @running_time: the running time of @data
+ *
+ * Send the RTP data (a buffer or buffer list) in the session manager. This
+ * function takes ownership of @data.
+ *
+ * Returns: a #GstFlowReturn.
+ */
+GstFlowReturn
+rtp_session_send_rtp (RTPSession * sess, gpointer data, gboolean is_list,
+ GstClockTime current_time, GstClockTime running_time)
+{
+ GstFlowReturn result;
+ RTPSource *source;
+ gboolean prevsender;
+ guint64 oldrate;
+ RTPPacketInfo pinfo = { 0, };
+ gboolean created;
+
+ g_return_val_if_fail (RTP_IS_SESSION (sess), GST_FLOW_ERROR);
+ g_return_val_if_fail (is_list || GST_IS_BUFFER (data), GST_FLOW_ERROR);
+
+ GST_LOG ("received RTP %s for sending", is_list ? "list" : "packet");
+
+ RTP_SESSION_LOCK (sess);
+ if (!update_packet_info (sess, &pinfo, TRUE, TRUE, is_list, data,
+ current_time, running_time, -1))
+ goto invalid_packet;
+
+ rtp_twcc_manager_send_packet (sess->twcc, &pinfo);
+
+ source = obtain_internal_source (sess, pinfo.ssrc, &created, current_time);
+ if (created)
+ on_new_sender_ssrc (sess, source);
+
+ if (!source->internal) {
+ GSocketAddress *from;
+
+ if (source->rtp_from)
+ from = source->rtp_from;
+ else
+ from = source->rtcp_from;
+ if (from) {
+ if (rtp_session_find_conflicting_address (sess, from, current_time)) {
+ /* Its a known conflict, its probably a loop, not a collision
+ * lets just drop the incoming packet
+ */
+ GST_LOG ("Our packets are being looped back to us, ignoring collision");
+ } else {
+ GST_DEBUG ("Collision for SSRC %x, change our sender ssrc", pinfo.ssrc);
+
+ rtp_session_have_conflict (sess, source, from, current_time);
+ }
+ } else {
+ GST_LOG ("Ignoring collision on sent SSRC %x because remote source"
+ " doesn't have an address", pinfo.ssrc);
+ }
+
+ /* the the sending source is not internal, we have to drop the packet,
+ or else we will end up receving it ourselves! */
+ goto collision;
+ }
+
+ prevsender = RTP_SOURCE_IS_SENDER (source);
+ oldrate = source->bitrate;
+
+ /* we use our own source to send */
+ result = rtp_source_send_rtp (source, &pinfo);
+
+ source_update_sender (sess, source, prevsender);
+
+ if (oldrate != source->bitrate)
+ sess->recalc_bandwidth = TRUE;
+ RTP_SESSION_UNLOCK (sess);
+
+ g_object_unref (source);
+ clean_packet_info (&pinfo);
+
+ return result;
+
+invalid_packet:
+ {
+ gst_mini_object_unref (GST_MINI_OBJECT_CAST (data));
+ RTP_SESSION_UNLOCK (sess);
+ GST_DEBUG ("invalid RTP packet received");
+ return GST_FLOW_OK;
+ }
+collision:
+ {
+ g_object_unref (source);
+ clean_packet_info (&pinfo);
+ RTP_SESSION_UNLOCK (sess);
+ GST_WARNING ("non-internal source with same ssrc %08x, drop packet",
+ pinfo.ssrc);
+ return GST_FLOW_OK;
+ }
+}
+
+static void
+add_bitrates (gpointer key, RTPSource * source, gdouble * bandwidth)
+{
+ *bandwidth += source->bitrate;
+}
+
+/* must be called with session lock */
+static GstClockTime
+calculate_rtcp_interval (RTPSession * sess, gboolean deterministic,
+ gboolean first)
+{
+ GstClockTime result;
+ RTPSessionStats *stats;
+
+ /* recalculate bandwidth when it changed */
+ if (sess->recalc_bandwidth) {
+ gdouble bandwidth;
+
+ if (sess->bandwidth > 0)
+ bandwidth = sess->bandwidth;
+ else {
+ /* If it is <= 0, then try to estimate the actual bandwidth */
+ bandwidth = 0;
+
+ g_hash_table_foreach (sess->ssrcs[sess->mask_idx],
+ (GHFunc) add_bitrates, &bandwidth);
+ }
+ if (bandwidth < RTP_STATS_BANDWIDTH)
+ bandwidth = RTP_STATS_BANDWIDTH;
+
+ rtp_stats_set_bandwidths (&sess->stats, bandwidth,
+ sess->rtcp_bandwidth, sess->rtcp_rs_bandwidth, sess->rtcp_rr_bandwidth);
+
+ sess->recalc_bandwidth = FALSE;
+ }
+
+ if (sess->scheduled_bye) {
+ stats = &sess->bye_stats;
+ result = rtp_stats_calculate_bye_interval (stats);
+ } else {
+ session_update_ptp (sess);
+
+ stats = &sess->stats;
+ result = rtp_stats_calculate_rtcp_interval (stats,
+ stats->internal_sender_sources > 0, sess->rtp_profile,
+ sess->is_doing_ptp, first);
+ }
+
+ GST_DEBUG ("next deterministic interval: %" GST_TIME_FORMAT ", first %d",
+ GST_TIME_ARGS (result), first);
+
+ if (!deterministic && result != GST_CLOCK_TIME_NONE)
+ result = rtp_stats_add_rtcp_jitter (stats, result);
+
+ GST_DEBUG ("next interval: %" GST_TIME_FORMAT, GST_TIME_ARGS (result));
+
+ return result;
+}
+
+static void
+source_mark_bye (const gchar * key, RTPSource * source, const gchar * reason)
+{
+ if (source->internal)
+ rtp_source_mark_bye (source, reason);
+}
+
+/**
+ * rtp_session_mark_all_bye:
+ * @sess: an #RTPSession
+ * @reason: a reason
+ *
+ * Mark all internal sources of the session as BYE with @reason.
+ */
+void
+rtp_session_mark_all_bye (RTPSession * sess, const gchar * reason)
+{
+ g_return_if_fail (RTP_IS_SESSION (sess));
+
+ RTP_SESSION_LOCK (sess);
+ g_hash_table_foreach (sess->ssrcs[sess->mask_idx],
+ (GHFunc) source_mark_bye, (gpointer) reason);
+ RTP_SESSION_UNLOCK (sess);
+}
+
+/* Stop the current @sess and schedule a BYE message for the other members.
+ * One must have the session lock to call this function
+ */
+static GstFlowReturn
+rtp_session_schedule_bye_locked (RTPSession * sess, GstClockTime current_time)
+{
+ GstFlowReturn result = GST_FLOW_OK;
+ GstClockTime interval;
+
+ /* nothing to do it we already scheduled bye */
+ if (sess->scheduled_bye)
+ goto done;
+
+ /* we schedule BYE now */
+ sess->scheduled_bye = TRUE;
+ /* at least one member wants to send a BYE */
+ memcpy (&sess->bye_stats, &sess->stats, sizeof (RTPSessionStats));
+ INIT_AVG (sess->bye_stats.avg_rtcp_packet_size, 100);
+ sess->bye_stats.bye_members = 1;
+ sess->first_rtcp = TRUE;
+
+ /* reschedule transmission */
+ sess->last_rtcp_send_time = current_time;
+ sess->last_rtcp_check_time = current_time;
+ interval = calculate_rtcp_interval (sess, FALSE, TRUE);
+
+ if (interval != GST_CLOCK_TIME_NONE)
+ sess->next_rtcp_check_time = current_time + interval;
+ else
+ sess->next_rtcp_check_time = GST_CLOCK_TIME_NONE;
+ sess->last_rtcp_interval = interval;
+
+ GST_DEBUG ("Schedule BYE for %" GST_TIME_FORMAT ", %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (interval), GST_TIME_ARGS (sess->next_rtcp_check_time));
+
+ RTP_SESSION_UNLOCK (sess);
+ /* notify app of reconsideration */
+ if (sess->callbacks.reconsider)
+ sess->callbacks.reconsider (sess, sess->reconsider_user_data);
+ RTP_SESSION_LOCK (sess);
+done:
+
+ return result;
+}
+
+/**
+ * rtp_session_schedule_bye:
+ * @sess: an #RTPSession
+ * @current_time: the current system time
+ *
+ * Schedule a BYE message for all sources marked as BYE in @sess.
+ *
+ * Returns: a #GstFlowReturn.
+ */
+GstFlowReturn
+rtp_session_schedule_bye (RTPSession * sess, GstClockTime current_time)
+{
+ GstFlowReturn result;
+
+ g_return_val_if_fail (RTP_IS_SESSION (sess), GST_FLOW_ERROR);
+
+ RTP_SESSION_LOCK (sess);
+ result = rtp_session_schedule_bye_locked (sess, current_time);
+ RTP_SESSION_UNLOCK (sess);
+
+ return result;
+}
+
+/**
+ * rtp_session_next_timeout:
+ * @sess: an #RTPSession
+ * @current_time: the current system time
+ *
+ * Get the next time we should perform session maintenance tasks.
+ *
+ * Returns: a time when rtp_session_on_timeout() should be called with the
+ * current system time.
+ */
+GstClockTime
+rtp_session_next_timeout (RTPSession * sess, GstClockTime current_time)
+{
+ GstClockTime result, interval = 0;
+
+ g_return_val_if_fail (RTP_IS_SESSION (sess), GST_CLOCK_TIME_NONE);
+
+ RTP_SESSION_LOCK (sess);
+
+ if (GST_CLOCK_TIME_IS_VALID (sess->next_early_rtcp_time)) {
+ GST_DEBUG ("have early rtcp time");
+ result = sess->next_early_rtcp_time;
+ goto early_exit;
+ }
+
+ result = sess->next_rtcp_check_time;
+
+ GST_DEBUG ("current time: %" GST_TIME_FORMAT
+ ", next time: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (current_time), GST_TIME_ARGS (result));
+
+ if (result == GST_CLOCK_TIME_NONE || result < current_time) {
+ GST_DEBUG ("take current time as base");
+ /* our previous check time expired, start counting from the current time
+ * again. */
+ result = current_time;
+ }
+
+ if (sess->scheduled_bye) {
+ if (sess->bye_stats.active_sources >= 50) {
+ GST_DEBUG ("reconsider BYE, more than 50 sources");
+ /* reconsider BYE if members >= 50 */
+ interval = calculate_rtcp_interval (sess, FALSE, TRUE);
+ sess->last_rtcp_interval = interval;
+ }
+ } else {
+ if (sess->first_rtcp) {
+ GST_DEBUG ("first RTCP packet");
+ /* we are called for the first time */
+ interval = calculate_rtcp_interval (sess, FALSE, TRUE);
+ sess->last_rtcp_interval = interval;
+ } else if (sess->next_rtcp_check_time < current_time) {
+ GST_DEBUG ("old check time expired, getting new timeout");
+ /* get a new timeout when we need to */
+ interval = calculate_rtcp_interval (sess, FALSE, FALSE);
+ sess->last_rtcp_interval = interval;
+
+ if ((sess->rtp_profile == GST_RTP_PROFILE_AVPF
+ || sess->rtp_profile == GST_RTP_PROFILE_SAVPF)
+ && interval != GST_CLOCK_TIME_NONE) {
+ /* Apply the rules from RFC 4585 section 3.5.3 */
+ if (sess->stats.min_interval != 0) {
+ GstClockTime T_rr_current_interval = g_random_double_range (0.5,
+ 1.5) * sess->stats.min_interval * GST_SECOND;
+
+ if (T_rr_current_interval > interval) {
+ GST_DEBUG ("Adjusting interval for t-rr-interval: %" GST_TIME_FORMAT
+ " > %" GST_TIME_FORMAT, GST_TIME_ARGS (T_rr_current_interval),
+ GST_TIME_ARGS (interval));
+ interval = T_rr_current_interval;
+ }
+ }
+ }
+ }
+ }
+
+ if (interval != GST_CLOCK_TIME_NONE)
+ result += interval;
+ else
+ result = GST_CLOCK_TIME_NONE;
+
+ sess->next_rtcp_check_time = result;
+
+early_exit:
+
+ GST_DEBUG ("current time: %" GST_TIME_FORMAT
+ ", next time: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (current_time), GST_TIME_ARGS (result));
+ RTP_SESSION_UNLOCK (sess);
+
+ return result;
+}
+
+typedef struct
+{
+ RTPSource *source;
+ gboolean is_bye;
+ GstBuffer *buffer;
+} ReportOutput;
+
+typedef struct
+{
+ GstRTCPBuffer rtcpbuf;
+ RTPSession *sess;
+ RTPSource *source;
+ guint num_to_report;
+ gboolean have_fir;
+ gboolean have_pli;
+ gboolean have_nack;
+ GstBuffer *rtcp;
+ GstClockTime current_time;
+ guint64 ntpnstime;
+ GstClockTime running_time;
+ GstClockTime interval;
+ GstRTCPPacket packet;
+ gboolean has_sdes;
+ gboolean is_early;
+ gboolean may_suppress;
+ GQueue output;
+ guint nacked_seqnums;
+} ReportData;
+
+static void
+session_start_rtcp (RTPSession * sess, ReportData * data)
+{
+ GstRTCPPacket *packet = &data->packet;
+ RTPSource *own = data->source;
+ GstRTCPBuffer *rtcp = &data->rtcpbuf;
+
+ data->rtcp = gst_rtcp_buffer_new (sess->mtu);
+ data->has_sdes = FALSE;
+
+ gst_rtcp_buffer_map (data->rtcp, GST_MAP_READWRITE, rtcp);
+
+ if (data->is_early && sess->reduced_size_rtcp)
+ return;
+
+ if (RTP_SOURCE_IS_SENDER (own)) {
+ guint64 ntptime;
+ guint32 rtptime;
+ guint32 packet_count, octet_count;
+
+ /* we are a sender, create SR */
+ GST_DEBUG ("create SR for SSRC %08x", own->ssrc);
+ gst_rtcp_buffer_add_packet (rtcp, GST_RTCP_TYPE_SR, packet);
+
+ /* get latest stats */
+ rtp_source_get_new_sr (own, data->ntpnstime, data->running_time,
+ &ntptime, &rtptime, &packet_count, &octet_count);
+ /* store stats */
+ rtp_source_process_sr (own, data->current_time, ntptime, rtptime,
+ packet_count, octet_count);
+
+ /* fill in sender report info */
+ gst_rtcp_packet_sr_set_sender_info (packet, own->ssrc,
+ sess->timestamp_sender_reports ? ntptime : 0,
+ sess->timestamp_sender_reports ? rtptime : 0,
+ packet_count, octet_count);
+ } else {
+ /* we are only receiver, create RR */
+ GST_DEBUG ("create RR for SSRC %08x", own->ssrc);
+ gst_rtcp_buffer_add_packet (rtcp, GST_RTCP_TYPE_RR, packet);
+ gst_rtcp_packet_rr_set_ssrc (packet, own->ssrc);
+ }
+}
+
+/* construct a Sender or Receiver Report */
+static void
+session_report_blocks (const gchar * key, RTPSource * source, ReportData * data)
+{
+ RTPSession *sess = data->sess;
+ GstRTCPPacket *packet = &data->packet;
+ guint8 fractionlost;
+ gint32 packetslost;
+ guint32 exthighestseq, jitter;
+ guint32 lsr, dlsr;
+
+ /* don't report for sources in future generations */
+ if (((gint16) (source->generation - sess->generation)) > 0) {
+ GST_DEBUG ("source %08x generation %u > %u", source->ssrc,
+ source->generation, sess->generation);
+ return;
+ }
+
+ if (g_hash_table_contains (source->reported_in_sr_of,
+ GUINT_TO_POINTER (data->source->ssrc))) {
+ GST_DEBUG ("source %08x already reported in this generation", source->ssrc);
+ return;
+ }
+
+ if (gst_rtcp_packet_get_rb_count (packet) == GST_RTCP_MAX_RB_COUNT) {
+ GST_DEBUG ("max RB count reached");
+ return;
+ }
+
+ /* only report about remote sources */
+ if (source->internal)
+ goto reported;
+
+ if (!RTP_SOURCE_IS_SENDER (source)) {
+ GST_DEBUG ("source %08x not sender", source->ssrc);
+ goto reported;
+ }
+
+ if (source->disable_rtcp) {
+ GST_DEBUG ("source %08x has RTCP disabled", source->ssrc);
+ goto reported;
+ }
+
+ GST_DEBUG ("create RB for SSRC %08x", source->ssrc);
+
+ /* get new stats */
+ rtp_source_get_new_rb (source, data->current_time, &fractionlost,
+ &packetslost, &exthighestseq, &jitter, &lsr, &dlsr);
+
+ /* store last generated RR packet */
+ source->last_rr.is_valid = TRUE;
+ source->last_rr.ssrc = data->source->ssrc;
+ source->last_rr.fractionlost = fractionlost;
+ source->last_rr.packetslost = packetslost;
+ source->last_rr.exthighestseq = exthighestseq;
+ source->last_rr.jitter = jitter;
+ source->last_rr.lsr = lsr;
+ source->last_rr.dlsr = dlsr;
+
+ /* packet is not yet filled, add report block for this source. */
+ gst_rtcp_packet_add_rb (packet, source->ssrc, fractionlost, packetslost,
+ exthighestseq, jitter, lsr, dlsr);
+
+reported:
+ g_hash_table_add (source->reported_in_sr_of,
+ GUINT_TO_POINTER (data->source->ssrc));
+}
+
+/* construct FIR */
+static void
+session_add_fir (const gchar * key, RTPSource * source, ReportData * data)
+{
+ GstRTCPPacket *packet = &data->packet;
+ guint16 len;
+ guint8 *fci_data;
+
+ if (!source->send_fir)
+ return;
+
+ len = gst_rtcp_packet_fb_get_fci_length (packet);
+ if (!gst_rtcp_packet_fb_set_fci_length (packet, len + 2))
+ /* exit because the packet is full, will put next request in a
+ * further packet */
+ return;
+
+ fci_data = gst_rtcp_packet_fb_get_fci (packet) + (len * 4);
+
+ GST_WRITE_UINT32_BE (fci_data, source->ssrc);
+ fci_data += 4;
+ fci_data[0] = source->current_send_fir_seqnum;
+ fci_data[1] = fci_data[2] = fci_data[3] = 0;
+
+ source->send_fir = FALSE;
+ source->stats.sent_fir_count++;
+}
+
+static void
+session_fir (RTPSession * sess, ReportData * data)
+{
+ GstRTCPBuffer *rtcp = &data->rtcpbuf;
+ GstRTCPPacket *packet = &data->packet;
+
+ if (!gst_rtcp_buffer_add_packet (rtcp, GST_RTCP_TYPE_PSFB, packet))
+ return;
+
+ gst_rtcp_packet_fb_set_type (packet, GST_RTCP_PSFB_TYPE_FIR);
+ gst_rtcp_packet_fb_set_sender_ssrc (packet, data->source->ssrc);
+ gst_rtcp_packet_fb_set_media_ssrc (packet, 0);
+
+ g_hash_table_foreach (sess->ssrcs[sess->mask_idx],
+ (GHFunc) session_add_fir, data);
+
+ if (gst_rtcp_packet_fb_get_fci_length (packet) == 0)
+ gst_rtcp_packet_remove (packet);
+ else
+ data->may_suppress = FALSE;
+}
+
+static gboolean
+has_pli_compare_func (gconstpointer a, gconstpointer ignored)
+{
+ GstRTCPPacket packet;
+ GstRTCPBuffer rtcp = { NULL, };
+ gboolean ret = FALSE;
+
+ gst_rtcp_buffer_map ((GstBuffer *) a, GST_MAP_READ, &rtcp);
+
+ if (gst_rtcp_buffer_get_first_packet (&rtcp, &packet)) {
+ if (gst_rtcp_packet_get_type (&packet) == GST_RTCP_TYPE_PSFB &&
+ gst_rtcp_packet_fb_get_type (&packet) == GST_RTCP_PSFB_TYPE_PLI)
+ ret = TRUE;
+ }
+
+ gst_rtcp_buffer_unmap (&rtcp);
+
+ return ret;
+}
+
+/* construct PLI */
+static void
+session_pli (const gchar * key, RTPSource * source, ReportData * data)
+{
+ GstRTCPBuffer *rtcp = &data->rtcpbuf;
+ GstRTCPPacket *packet = &data->packet;
+
+ if (!source->send_pli)
+ return;
+
+ if (rtp_source_has_retained (source, has_pli_compare_func, NULL))
+ return;
+
+ if (!gst_rtcp_buffer_add_packet (rtcp, GST_RTCP_TYPE_PSFB, packet))
+ /* exit because the packet is full, will put next request in a
+ * further packet */
+ return;
+
+ gst_rtcp_packet_fb_set_type (packet, GST_RTCP_PSFB_TYPE_PLI);
+ gst_rtcp_packet_fb_set_sender_ssrc (packet, data->source->ssrc);
+ gst_rtcp_packet_fb_set_media_ssrc (packet, source->ssrc);
+
+ source->send_pli = FALSE;
+ data->may_suppress = FALSE;
+
+ source->stats.sent_pli_count++;
+}
+
+/* construct NACK */
+static void
+session_nack (const gchar * key, RTPSource * source, ReportData * data)
+{
+ RTPSession *sess = data->sess;
+ GstRTCPBuffer *rtcp = &data->rtcpbuf;
+ GstRTCPPacket *packet = &data->packet;
+ guint16 *nacks;
+ GstClockTime *nack_deadlines;
+ guint n_nacks, i = 0;
+ guint nacked_seqnums = 0;
+ guint16 n_fb_nacks = 0;
+ guint8 *fci_data;
+
+ if (!source->send_nack)
+ return;
+
+ nacks = rtp_source_get_nacks (source, &n_nacks);
+ nack_deadlines = rtp_source_get_nack_deadlines (source, NULL);
+ GST_DEBUG ("%u NACKs current time %" GST_TIME_FORMAT, n_nacks,
+ GST_TIME_ARGS (data->current_time));
+
+ /* cleanup expired nacks */
+ for (i = 0; i < n_nacks; i++) {
+ GST_DEBUG ("#%u deadline %" GST_TIME_FORMAT, nacks[i],
+ GST_TIME_ARGS (nack_deadlines[i]));
+ if (nack_deadlines[i] >= data->current_time)
+ break;
+ }
+
+ if (data->is_early) {
+ /* don't remove them all if this is an early RTCP packet. It may happen
+ * that the NACKs are late due to high RTT, not sending NACKs at all would
+ * keep the RTX RTT stats high and maintain a dropping state. */
+ i = MIN (n_nacks - 1, i);
+ }
+
+ if (i) {
+ GST_WARNING ("Removing %u expired NACKS", i);
+ rtp_source_clear_nacks (source, i);
+ n_nacks -= i;
+ if (n_nacks == 0)
+ return;
+ }
+
+ /* allow overriding NACK to packet conversion */
+ if (g_signal_has_handler_pending (sess,
+ rtp_session_signals[SIGNAL_ON_SENDING_NACKS], 0, TRUE)) {
+ /* this is needed as it will actually resize the buffer */
+ gst_rtcp_buffer_unmap (rtcp);
+
+ g_signal_emit (sess, rtp_session_signals[SIGNAL_ON_SENDING_NACKS], 0,
+ data->source->ssrc, source->ssrc, source->nacks, data->rtcp,
+ &nacked_seqnums);
+
+ /* and now remap for the remaining work */
+ gst_rtcp_buffer_map (data->rtcp, GST_MAP_READWRITE, rtcp);
+
+ if (nacked_seqnums > 0)
+ goto done;
+ }
+
+ if (!gst_rtcp_buffer_add_packet (rtcp, GST_RTCP_TYPE_RTPFB, packet))
+ /* exit because the packet is full, will put next request in a
+ * further packet */
+ return;
+
+ gst_rtcp_packet_fb_set_type (packet, GST_RTCP_RTPFB_TYPE_NACK);
+ gst_rtcp_packet_fb_set_sender_ssrc (packet, data->source->ssrc);
+ gst_rtcp_packet_fb_set_media_ssrc (packet, source->ssrc);
+
+ if (!gst_rtcp_packet_fb_set_fci_length (packet, 1)) {
+ gst_rtcp_packet_remove (packet);
+ GST_WARNING ("no nacks fit in the packet");
+ return;
+ }
+
+ fci_data = gst_rtcp_packet_fb_get_fci (packet);
+ for (i = 0; i < n_nacks; i = nacked_seqnums) {
+ guint16 seqnum = nacks[i];
+ guint16 blp = 0;
+ guint j;
+
+ if (!gst_rtcp_packet_fb_set_fci_length (packet, n_fb_nacks + 1))
+ break;
+
+ n_fb_nacks++;
+ nacked_seqnums++;
+
+ for (j = i + 1; j < n_nacks; j++) {
+ gint diff;
+
+ diff = gst_rtp_buffer_compare_seqnum (seqnum, nacks[j]);
+ GST_TRACE ("[%u][%u] %u %u diff %i", i, j, seqnum, nacks[j], diff);
+ if (diff > 16)
+ break;
+
+ blp |= 1 << (diff - 1);
+ nacked_seqnums++;
+ }
+
+ GST_WRITE_UINT32_BE (fci_data, seqnum << 16 | blp);
+ fci_data += 4;
+ }
+
+ GST_DEBUG ("Sent %u seqnums into %u FB NACKs", nacked_seqnums, n_fb_nacks);
+ source->stats.sent_nack_count += n_fb_nacks;
+
+done:
+ data->nacked_seqnums += nacked_seqnums;
+ rtp_source_clear_nacks (source, nacked_seqnums);
+ data->may_suppress = FALSE;
+}
+
+/* perform cleanup of sources that timed out */
+static void
+session_cleanup (const gchar * key, RTPSource * source, ReportData * data)
+{
+ gboolean remove = FALSE;
+ gboolean byetimeout = FALSE;
+ gboolean sendertimeout = FALSE;
+ gboolean is_sender, is_active;
+ RTPSession *sess = data->sess;
+ GstClockTime interval, binterval;
+ GstClockTime btime;
+
+ GST_DEBUG ("look at %08x, generation %u", source->ssrc, source->generation);
+
+ /* check for outdated collisions */
+ if (source->internal) {
+ GST_DEBUG ("Timing out collisions for %x", source->ssrc);
+ rtp_source_timeout (source, data->current_time, data->running_time,
+ sess->rtcp_feedback_retention_window);
+ }
+
+ /* nothing else to do when without RTCP */
+ if (data->interval == GST_CLOCK_TIME_NONE)
+ return;
+
+ is_sender = RTP_SOURCE_IS_SENDER (source);
+ is_active = RTP_SOURCE_IS_ACTIVE (source);
+
+ /* our own rtcp interval may have been forced low by secondary configuration,
+ * while sender side may still operate with higher interval,
+ * so do not just take our interval to decide on timing out sender,
+ * but take (if data->interval <= 5 * GST_SECOND):
+ * interval = CLAMP (sender_interval, data->interval, 5 * GST_SECOND)
+ * where sender_interval is difference between last 2 received RTCP reports
+ */
+ if (data->interval >= 5 * GST_SECOND || source->internal) {
+ binterval = data->interval;
+ } else {
+ GST_LOG ("prev_rtcp %" GST_TIME_FORMAT ", last_rtcp %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (source->stats.prev_rtcptime),
+ GST_TIME_ARGS (source->stats.last_rtcptime));
+ /* if not received enough yet, fallback to larger default */
+ if (source->stats.last_rtcptime > source->stats.prev_rtcptime)
+ binterval = source->stats.last_rtcptime - source->stats.prev_rtcptime;
+ else
+ binterval = 5 * GST_SECOND;
+ binterval = CLAMP (binterval, data->interval, 5 * GST_SECOND);
+ }
+ GST_LOG ("timeout base interval %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (binterval));
+
+ if (!source->internal && source->marked_bye) {
+ /* if we received a BYE from the source, remove the source after some
+ * time. */
+ if (data->current_time > source->bye_time &&
+ data->current_time - source->bye_time > sess->stats.bye_timeout) {
+ GST_DEBUG ("removing BYE source %08x", source->ssrc);
+ remove = TRUE;
+ byetimeout = TRUE;
+ }
+ }
+
+ if (source->internal && source->sent_bye) {
+ GST_DEBUG ("removing internal source that has sent BYE %08x", source->ssrc);
+ remove = TRUE;
+ }
+
+ /* sources that were inactive for more than 5 times the deterministic reporting
+ * interval get timed out. the min timeout is 5 seconds. */
+ /* mind old time that might pre-date last time going to PLAYING */
+ btime = MAX (source->last_activity, sess->start_time);
+ if (data->current_time > btime) {
+ interval = MAX (binterval * 5, 5 * GST_SECOND);
+ if (data->current_time - btime > interval) {
+ GST_DEBUG ("removing timeout source %08x, last %" GST_TIME_FORMAT,
+ source->ssrc, GST_TIME_ARGS (btime));
+ if (source->internal) {
+ /* this is an internal source that is not using our suggested ssrc.
+ * since there must be another source using this ssrc, we can remove
+ * this one instead of making it a receiver forever */
+ if (source->ssrc != sess->suggested_ssrc) {
+ rtp_source_mark_bye (source, "timed out");
+ /* do not schedule bye here, since we are inside the RTCP timeout
+ * processing and scheduling bye will interfere with SR/RR sending */
+ }
+ } else {
+ remove = TRUE;
+ }
+ }
+ }
+
+ /* senders that did not send for a long time become a receiver, this also
+ * holds for our own sources. */
+ if (is_sender) {
+ /* mind old time that might pre-date last time going to PLAYING */
+ btime = MAX (source->last_rtp_activity, sess->start_time);
+ if (data->current_time > btime) {
+ interval = MAX (binterval * 2, 5 * GST_SECOND);
+ if (data->current_time - btime > interval) {
+ GST_DEBUG ("sender source %08x timed out and became receiver, last %"
+ GST_TIME_FORMAT, source->ssrc, GST_TIME_ARGS (btime));
+ sendertimeout = TRUE;
+ }
+ }
+ }
+
+ if (remove) {
+ sess->total_sources--;
+ if (is_sender) {
+ sess->stats.sender_sources--;
+ if (source->internal)
+ sess->stats.internal_sender_sources--;
+ }
+ if (is_active)
+ sess->stats.active_sources--;
+
+ if (source->internal)
+ sess->stats.internal_sources--;
+
+ if (byetimeout)
+ on_bye_timeout (sess, source);
+ else
+ on_timeout (sess, source);
+ } else {
+ if (sendertimeout) {
+ source->is_sender = FALSE;
+ sess->stats.sender_sources--;
+ if (source->internal)
+ sess->stats.internal_sender_sources--;
+
+ on_sender_timeout (sess, source);
+ }
+ /* count how many source to report in this generation */
+ if (((gint16) (source->generation - sess->generation)) <= 0)
+ data->num_to_report++;
+ }
+ source->closing = remove;
+}
+
+static void
+session_sdes (RTPSession * sess, ReportData * data)
+{
+ GstRTCPPacket *packet = &data->packet;
+ const GstStructure *sdes;
+ gint i, n_fields;
+ GstRTCPBuffer *rtcp = &data->rtcpbuf;
+
+ /* add SDES packet */
+ gst_rtcp_buffer_add_packet (rtcp, GST_RTCP_TYPE_SDES, packet);
+
+ gst_rtcp_packet_sdes_add_item (packet, data->source->ssrc);
+
+ sdes = rtp_source_get_sdes_struct (data->source);
+
+ /* add all fields in the structure, the order is not important. */
+ n_fields = gst_structure_n_fields (sdes);
+ for (i = 0; i < n_fields; ++i) {
+ const gchar *field;
+ const gchar *value;
+ GstRTCPSDESType type;
+
+ field = gst_structure_nth_field_name (sdes, i);
+ if (field == NULL)
+ continue;
+ value = gst_structure_get_string (sdes, field);
+ if (value == NULL)
+ continue;
+ type = gst_rtcp_sdes_name_to_type (field);
+
+ /* Early packets are minimal and only include the CNAME */
+ if (data->is_early && type != GST_RTCP_SDES_CNAME)
+ continue;
+
+ if (type > GST_RTCP_SDES_END && type < GST_RTCP_SDES_PRIV) {
+ gst_rtcp_packet_sdes_add_entry (packet, type, strlen (value),
+ (const guint8 *) value);
+ } else if (type == GST_RTCP_SDES_PRIV) {
+ gsize prefix_len;
+ gsize value_len;
+ gsize data_len;
+ guint8 data[256];
+
+ /* don't accept entries that are too big */
+ prefix_len = strlen (field);
+ if (prefix_len > 255)
+ continue;
+ value_len = strlen (value);
+ if (value_len > 255)
+ continue;
+ data_len = 1 + prefix_len + value_len;
+ if (data_len > 255)
+ continue;
+
+ data[0] = prefix_len;
+ memcpy (&data[1], field, prefix_len);
+ memcpy (&data[1 + prefix_len], value, value_len);
+
+ gst_rtcp_packet_sdes_add_entry (packet, type, data_len, data);
+ }
+ }
+
+ data->has_sdes = TRUE;
+}
+
+/* schedule a BYE packet */
+static void
+make_source_bye (RTPSession * sess, RTPSource * source, ReportData * data)
+{
+ GstRTCPPacket *packet = &data->packet;
+ GstRTCPBuffer *rtcp = &data->rtcpbuf;
+
+ /* add SDES */
+ session_sdes (sess, data);
+ /* add a BYE packet */
+ gst_rtcp_buffer_add_packet (rtcp, GST_RTCP_TYPE_BYE, packet);
+ gst_rtcp_packet_bye_add_ssrc (packet, source->ssrc);
+ if (source->bye_reason)
+ gst_rtcp_packet_bye_set_reason (packet, source->bye_reason);
+
+ /* we have a BYE packet now */
+ source->sent_bye = TRUE;
+}
+
+static gboolean
+is_rtcp_time (RTPSession * sess, GstClockTime current_time, ReportData * data)
+{
+ GstClockTime new_send_time;
+ GstClockTime interval;
+ RTPSessionStats *stats;
+
+ if (sess->scheduled_bye)
+ stats = &sess->bye_stats;
+ else
+ stats = &sess->stats;
+
+ if (GST_CLOCK_TIME_IS_VALID (sess->next_early_rtcp_time))
+ data->is_early = TRUE;
+ else
+ data->is_early = FALSE;
+
+ if (data->is_early && sess->next_early_rtcp_time <= current_time) {
+ GST_DEBUG ("early feedback %" GST_TIME_FORMAT " <= now %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (sess->next_early_rtcp_time),
+ GST_TIME_ARGS (current_time));
+ } else if (sess->next_rtcp_check_time == GST_CLOCK_TIME_NONE ||
+ sess->next_rtcp_check_time > current_time) {
+ GST_DEBUG ("no check time yet, next %" GST_TIME_FORMAT " > now %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (sess->next_rtcp_check_time),
+ GST_TIME_ARGS (current_time));
+ return FALSE;
+ }
+
+ /* take interval and add jitter */
+ interval = data->interval;
+ if (interval != GST_CLOCK_TIME_NONE)
+ interval = rtp_stats_add_rtcp_jitter (stats, interval);
+
+ if (sess->last_rtcp_check_time != GST_CLOCK_TIME_NONE) {
+ /* perform forward reconsideration */
+ if (interval != GST_CLOCK_TIME_NONE) {
+ GstClockTime elapsed;
+
+ /* get elapsed time since we last reported */
+ elapsed = current_time - sess->last_rtcp_check_time;
+
+ GST_DEBUG ("forward reconsideration %" GST_TIME_FORMAT ", elapsed %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (interval), GST_TIME_ARGS (elapsed));
+ new_send_time = interval + sess->last_rtcp_check_time;
+ } else {
+ new_send_time = sess->last_rtcp_check_time;
+ }
+ } else {
+ /* If this is the first RTCP packet, we can reconsider anything based
+ * on the last RTCP send time because there was none.
+ */
+ g_warn_if_fail (!data->is_early);
+ data->is_early = FALSE;
+ new_send_time = current_time;
+ }
+
+ if (!data->is_early) {
+ /* check if reconsideration */
+ if (new_send_time == GST_CLOCK_TIME_NONE || current_time < new_send_time) {
+ GST_DEBUG ("reconsider RTCP for %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (new_send_time));
+ /* store new check time */
+ sess->next_rtcp_check_time = new_send_time;
+ sess->last_rtcp_interval = interval;
+ return FALSE;
+ }
+
+ sess->last_rtcp_interval = interval;
+ if ((sess->rtp_profile == GST_RTP_PROFILE_AVPF
+ || sess->rtp_profile == GST_RTP_PROFILE_SAVPF)
+ && interval != GST_CLOCK_TIME_NONE) {
+ /* Apply the rules from RFC 4585 section 3.5.3 */
+ if (stats->min_interval != 0 && !sess->first_rtcp) {
+ GstClockTime T_rr_current_interval =
+ g_random_double_range (0.5, 1.5) * stats->min_interval * GST_SECOND;
+
+ if (T_rr_current_interval > interval) {
+ GST_DEBUG ("Adjusting interval for t-rr-interval: %" GST_TIME_FORMAT
+ " > %" GST_TIME_FORMAT, GST_TIME_ARGS (T_rr_current_interval),
+ GST_TIME_ARGS (interval));
+ interval = T_rr_current_interval;
+ }
+ }
+ }
+ sess->next_rtcp_check_time = current_time + interval;
+ }
+
+
+ GST_DEBUG ("can send RTCP now, next %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (sess->next_rtcp_check_time));
+
+ return TRUE;
+}
+
+static void
+clone_ssrcs_hashtable (gchar * key, RTPSource * source, GHashTable * hash_table)
+{
+ g_hash_table_insert (hash_table, key, g_object_ref (source));
+}
+
+static gboolean
+remove_closing_sources (const gchar * key, RTPSource * source,
+ ReportData * data)
+{
+ if (source->closing)
+ return TRUE;
+
+ if (source->send_fir)
+ data->have_fir = TRUE;
+ if (source->send_pli)
+ data->have_pli = TRUE;
+ if (source->send_nack)
+ data->have_nack = TRUE;
+
+ return FALSE;
+}
+
+static void
+generate_twcc (const gchar * key, RTPSource * source, ReportData * data)
+{
+ RTPSession *sess = data->sess;
+ GstBuffer *buf;
+
+ /* only generate RTCP for active internal sources */
+ if (!source->internal || source->sent_bye)
+ return;
+
+ /* ignore other sources when we do the timeout after a scheduled BYE */
+ if (sess->scheduled_bye && !source->marked_bye)
+ return;
+
+ /* skip if RTCP is disabled */
+ if (source->disable_rtcp) {
+ GST_DEBUG ("source %08x has RTCP disabled", source->ssrc);
+ return;
+ }
+
+ while ((buf = rtp_twcc_manager_get_feedback (sess->twcc, source->ssrc))) {
+ ReportOutput *output = g_slice_new (ReportOutput);
+ output->source = g_object_ref (source);
+ output->is_bye = FALSE;
+ output->buffer = buf;
+ /* queue the RTCP packet to push later */
+ g_queue_push_tail (&data->output, output);
+ }
+}
+
+
+static void
+generate_rtcp (const gchar * key, RTPSource * source, ReportData * data)
+{
+ RTPSession *sess = data->sess;
+ gboolean is_bye = FALSE;
+ ReportOutput *output;
+
+ /* only generate RTCP for active internal sources */
+ if (!source->internal || source->sent_bye)
+ return;
+
+ /* ignore other sources when we do the timeout after a scheduled BYE */
+ if (sess->scheduled_bye && !source->marked_bye)
+ return;
+
+ /* skip if RTCP is disabled */
+ if (source->disable_rtcp) {
+ GST_DEBUG ("source %08x has RTCP disabled", source->ssrc);
+ return;
+ }
+
+ data->source = source;
+
+ /* open packet */
+ session_start_rtcp (sess, data);
+
+ if (source->marked_bye) {
+ /* send BYE */
+ make_source_bye (sess, source, data);
+ is_bye = TRUE;
+ } else if (!data->is_early) {
+ /* loop over all known sources and add report blocks. If we are early, we
+ * just make a minimal RTCP packet and skip this step */
+ g_hash_table_foreach (sess->ssrcs[sess->mask_idx],
+ (GHFunc) session_report_blocks, data);
+ }
+ if (!data->has_sdes && (!data->is_early || !sess->reduced_size_rtcp))
+ session_sdes (sess, data);
+
+ if (data->have_fir)
+ session_fir (sess, data);
+
+ if (data->have_pli)
+ g_hash_table_foreach (sess->ssrcs[sess->mask_idx],
+ (GHFunc) session_pli, data);
+
+ if (data->have_nack)
+ g_hash_table_foreach (sess->ssrcs[sess->mask_idx],
+ (GHFunc) session_nack, data);
+
+ gst_rtcp_buffer_unmap (&data->rtcpbuf);
+
+ output = g_slice_new (ReportOutput);
+ output->source = g_object_ref (source);
+ output->is_bye = is_bye;
+ output->buffer = data->rtcp;
+ /* queue the RTCP packet to push later */
+ g_queue_push_tail (&data->output, output);
+}
+
+static void
+update_generation (const gchar * key, RTPSource * source, ReportData * data)
+{
+ RTPSession *sess = data->sess;
+
+ if (g_hash_table_size (source->reported_in_sr_of) >=
+ sess->stats.internal_sources) {
+ /* source is reported, move to next generation */
+ source->generation = sess->generation + 1;
+ g_hash_table_remove_all (source->reported_in_sr_of);
+
+ GST_LOG ("reported source %x, new generation: %d", source->ssrc,
+ source->generation);
+
+ /* if we reported all sources in this generation, move to next */
+ if (--data->num_to_report == 0) {
+ sess->generation++;
+ GST_DEBUG ("all reported, generation now %u", sess->generation);
+ }
+ }
+}
+
+static void
+schedule_remaining_nacks (const gchar * key, RTPSource * source,
+ ReportData * data)
+{
+ RTPSession *sess = data->sess;
+ GstClockTime *nack_deadlines;
+ GstClockTime deadline;
+ guint n_nacks;
+
+ if (!source->send_nack)
+ return;
+
+ /* the scheduling is entirely based on available bandwidth, just take the
+ * biggest seqnum, which will have the largest deadline to request early
+ * RTCP. */
+ nack_deadlines = rtp_source_get_nack_deadlines (source, &n_nacks);
+ deadline = nack_deadlines[n_nacks - 1];
+ RTP_SESSION_UNLOCK (sess);
+ rtp_session_send_rtcp_with_deadline (sess, deadline);
+ RTP_SESSION_LOCK (sess);
+}
+
+static gboolean
+rtp_session_are_all_sources_bye (RTPSession * sess)
+{
+ GHashTableIter iter;
+ RTPSource *src;
+
+ RTP_SESSION_LOCK (sess);
+ g_hash_table_iter_init (&iter, sess->ssrcs[sess->mask_idx]);
+ while (g_hash_table_iter_next (&iter, NULL, (gpointer *) & src)) {
+ if (src->internal && !src->sent_bye) {
+ RTP_SESSION_UNLOCK (sess);
+ return FALSE;
+ }
+ }
+ RTP_SESSION_UNLOCK (sess);
+
+ return TRUE;
+}
+
+/**
+ * rtp_session_on_timeout:
+ * @sess: an #RTPSession
+ * @current_time: the current system time
+ * @ntpnstime: the current NTP time in nanoseconds
+ * @running_time: the current running_time of the pipeline
+ *
+ * Perform maintenance actions after the timeout obtained with
+ * rtp_session_next_timeout() expired.
+ *
+ * This function will perform timeouts of receivers and senders, send a BYE
+ * packet or generate RTCP packets with current session stats.
+ *
+ * This function can call the #RTPSessionSendRTCP callback, possibly multiple
+ * times, for each packet that should be processed.
+ *
+ * Returns: a #GstFlowReturn.
+ */
+GstFlowReturn
+rtp_session_on_timeout (RTPSession * sess, GstClockTime current_time,
+ guint64 ntpnstime, GstClockTime running_time)
+{
+ GstFlowReturn result = GST_FLOW_OK;
+ ReportData data = { GST_RTCP_BUFFER_INIT };
+ GHashTable *table_copy;
+ ReportOutput *output;
+ gboolean all_empty = FALSE;
+
+ g_return_val_if_fail (RTP_IS_SESSION (sess), GST_FLOW_ERROR);
+
+ GST_DEBUG ("reporting at %" GST_TIME_FORMAT ", NTP time %" GST_TIME_FORMAT
+ ", running-time %" GST_TIME_FORMAT, GST_TIME_ARGS (current_time),
+ GST_TIME_ARGS (ntpnstime), GST_TIME_ARGS (running_time));
+
+ data.sess = sess;
+ data.current_time = current_time;
+ data.ntpnstime = ntpnstime;
+ data.running_time = running_time;
+ data.num_to_report = 0;
+ data.may_suppress = FALSE;
+ data.nacked_seqnums = 0;
+ g_queue_init (&data.output);
+
+ RTP_SESSION_LOCK (sess);
+ /* get a new interval, we need this for various cleanups etc */
+ data.interval = calculate_rtcp_interval (sess, TRUE, sess->first_rtcp);
+
+ GST_DEBUG ("interval %" GST_TIME_FORMAT, GST_TIME_ARGS (data.interval));
+
+ /* we need an internal source now */
+ if (sess->stats.internal_sources == 0) {
+ RTPSource *source;
+ gboolean created;
+
+ source = obtain_internal_source (sess, sess->suggested_ssrc, &created,
+ current_time);
+ sess->internal_ssrc_set = TRUE;
+
+ if (created)
+ on_new_sender_ssrc (sess, source);
+
+ g_object_unref (source);
+ }
+
+ sess->conflicting_addresses =
+ timeout_conflicting_addresses (sess->conflicting_addresses, current_time);
+
+ /* Make a local copy of the hashtable. We need to do this because the
+ * cleanup stage below releases the session lock. */
+ table_copy = g_hash_table_new_full (NULL, NULL, NULL,
+ (GDestroyNotify) g_object_unref);
+ g_hash_table_foreach (sess->ssrcs[sess->mask_idx],
+ (GHFunc) clone_ssrcs_hashtable, table_copy);
+
+ /* Clean up the session, mark the source for removing, this might release the
+ * session lock. */
+ g_hash_table_foreach (table_copy, (GHFunc) session_cleanup, &data);
+ g_hash_table_destroy (table_copy);
+
+ /* Now remove the marked sources */
+ g_hash_table_foreach_remove (sess->ssrcs[sess->mask_idx],
+ (GHRFunc) remove_closing_sources, &data);
+
+ /* update point-to-point status */
+ session_update_ptp (sess);
+
+ /* see if we need to generate SR or RR packets */
+ if (!is_rtcp_time (sess, current_time, &data))
+ goto done;
+
+ /* check if all the buffers are empty after generation */
+ all_empty = TRUE;
+
+ GST_DEBUG
+ ("doing RTCP generation %u for %u sources, early %d, may suppress %d",
+ sess->generation, data.num_to_report, data.is_early, data.may_suppress);
+
+ /* generate RTCP for all internal sources */
+ g_hash_table_foreach (sess->ssrcs[sess->mask_idx],
+ (GHFunc) generate_rtcp, &data);
+
+ g_hash_table_foreach (sess->ssrcs[sess->mask_idx],
+ (GHFunc) generate_twcc, &data);
+
+ /* update the generation for all the sources that have been reported */
+ g_hash_table_foreach (sess->ssrcs[sess->mask_idx],
+ (GHFunc) update_generation, &data);
+
+ /* we keep track of the last report time in order to timeout inactive
+ * receivers or senders */
+ if (!data.is_early) {
+ GST_DEBUG ("Time since last regular RTCP: %" GST_TIME_FORMAT " - %"
+ GST_TIME_FORMAT " = %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (data.current_time),
+ GST_TIME_ARGS (sess->last_rtcp_send_time),
+ GST_TIME_ARGS (data.current_time - sess->last_rtcp_send_time));
+ sess->last_rtcp_send_time = data.current_time;
+ }
+
+ GST_DEBUG ("Time since last RTCP: %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT
+ " = %" GST_TIME_FORMAT, GST_TIME_ARGS (data.current_time),
+ GST_TIME_ARGS (sess->last_rtcp_check_time),
+ GST_TIME_ARGS (data.current_time - sess->last_rtcp_check_time));
+ sess->last_rtcp_check_time = data.current_time;
+ sess->first_rtcp = FALSE;
+ sess->next_early_rtcp_time = GST_CLOCK_TIME_NONE;
+ sess->scheduled_bye = FALSE;
+
+done:
+ RTP_SESSION_UNLOCK (sess);
+
+ /* notify about updated statistics */
+ g_object_notify (G_OBJECT (sess), "stats");
+
+ /* push out the RTCP packets */
+ while ((output = g_queue_pop_head (&data.output))) {
+ gboolean do_not_suppress, empty_buffer;
+ GstBuffer *buffer = output->buffer;
+ RTPSource *source = output->source;
+
+ /* Give the user a change to add its own packet */
+ g_signal_emit (sess, rtp_session_signals[SIGNAL_ON_SENDING_RTCP], 0,
+ buffer, data.is_early, &do_not_suppress);
+
+ empty_buffer = gst_buffer_get_size (buffer) == 0;
+
+ if (!empty_buffer)
+ all_empty = FALSE;
+
+ if (sess->callbacks.send_rtcp &&
+ !empty_buffer && (do_not_suppress || !data.may_suppress)) {
+ guint packet_size;
+
+ packet_size = gst_buffer_get_size (buffer) + sess->header_len;
+
+ UPDATE_AVG (sess->stats.avg_rtcp_packet_size, packet_size);
+ GST_DEBUG ("%p, sending RTCP packet, avg size %u, %u", &sess->stats,
+ sess->stats.avg_rtcp_packet_size, packet_size);
+ result =
+ sess->callbacks.send_rtcp (sess, source, buffer,
+ rtp_session_are_all_sources_bye (sess), sess->send_rtcp_user_data);
+
+ RTP_SESSION_LOCK (sess);
+ sess->stats.nacks_sent += data.nacked_seqnums;
+ on_sender_ssrc_active (sess, source);
+ RTP_SESSION_UNLOCK (sess);
+ } else {
+ GST_DEBUG ("freeing packet callback: %p"
+ " empty_buffer: %d, "
+ " do_not_suppress: %d may_suppress: %d", sess->callbacks.send_rtcp,
+ empty_buffer, do_not_suppress, data.may_suppress);
+ if (!empty_buffer) {
+ RTP_SESSION_LOCK (sess);
+ sess->stats.nacks_dropped += data.nacked_seqnums;
+ RTP_SESSION_UNLOCK (sess);
+ }
+ gst_buffer_unref (buffer);
+ }
+ g_object_unref (source);
+ g_slice_free (ReportOutput, output);
+ }
+
+ if (all_empty)
+ GST_ERROR ("generated empty RTCP messages for all the sources");
+
+ /* schedule remaining nacks */
+ RTP_SESSION_LOCK (sess);
+ g_hash_table_foreach (sess->ssrcs[sess->mask_idx],
+ (GHFunc) schedule_remaining_nacks, &data);
+ RTP_SESSION_UNLOCK (sess);
+
+ return result;
+}
+
+/**
+ * rtp_session_request_early_rtcp:
+ * @sess: an #RTPSession
+ * @current_time: the current system time
+ * @max_delay: maximum delay
+ *
+ * Request transmission of early RTCP
+ *
+ * Returns: %TRUE if the related RTCP can be scheduled.
+ */
+gboolean
+rtp_session_request_early_rtcp (RTPSession * sess, GstClockTime current_time,
+ GstClockTime max_delay)
+{
+ GstClockTime T_dither_max, T_rr, offset = 0;
+ gboolean ret;
+ gboolean allow_early;
+
+ /* Implements the algorithm described in RFC 4585 section 3.5.2 */
+
+ RTP_SESSION_LOCK (sess);
+
+ /* We assume a feedback profile if something is requesting RTCP
+ * to be sent */
+ sess->rtp_profile = GST_RTP_PROFILE_AVPF;
+
+ /* Check if already requested */
+ /* RFC 4585 section 3.5.2 step 2 */
+ if (GST_CLOCK_TIME_IS_VALID (sess->next_early_rtcp_time)) {
+ GST_LOG_OBJECT (sess, "already have next early rtcp time");
+ ret = (current_time + max_delay > sess->next_early_rtcp_time);
+ goto end;
+ }
+
+ if (!GST_CLOCK_TIME_IS_VALID (sess->next_rtcp_check_time)) {
+ GST_LOG_OBJECT (sess, "no next RTCP check time");
+ ret = FALSE;
+ goto end;
+ }
+
+ /* RFC 4585 section 3.5.3 step 1
+ * If no regular RTCP packet has been sent before, then a regular
+ * RTCP packet has to be scheduled first and FB messages might be
+ * included there
+ */
+ if (!GST_CLOCK_TIME_IS_VALID (sess->last_rtcp_send_time)) {
+ GST_LOG_OBJECT (sess, "no RTCP sent yet");
+
+ if (current_time + max_delay > sess->next_rtcp_check_time) {
+ GST_LOG_OBJECT (sess,
+ "next scheduled time is soon %" GST_TIME_FORMAT " + %" GST_TIME_FORMAT
+ " > %" GST_TIME_FORMAT, GST_TIME_ARGS (current_time),
+ GST_TIME_ARGS (max_delay),
+ GST_TIME_ARGS (sess->next_rtcp_check_time));
+ ret = TRUE;
+ } else {
+ GST_LOG_OBJECT (sess,
+ "can't allow early feedback, next scheduled time is too late %"
+ GST_TIME_FORMAT " + %" GST_TIME_FORMAT " < %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (current_time), GST_TIME_ARGS (max_delay),
+ GST_TIME_ARGS (sess->next_rtcp_check_time));
+ ret = FALSE;
+ }
+ goto end;
+ }
+
+ T_rr = sess->last_rtcp_interval;
+
+ /* RFC 4585 section 3.5.2 step 2b */
+ /* If the total sources is <=2, then there is only us and one peer */
+ /* When there is one auxiliary stream the session can still do point
+ * to point.
+ */
+ if (sess->is_doing_ptp) {
+ T_dither_max = 0;
+ } else {
+ /* Divide by 2 because l = 0.5 */
+ T_dither_max = T_rr;
+ T_dither_max /= 2;
+ }
+
+ /* RFC 4585 section 3.5.2 step 3 */
+ if (current_time + T_dither_max > sess->next_rtcp_check_time) {
+ GST_LOG_OBJECT (sess,
+ "don't send because of dither, next scheduled time is too soon %"
+ GST_TIME_FORMAT " + %" GST_TIME_FORMAT " > %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (current_time), GST_TIME_ARGS (T_dither_max),
+ GST_TIME_ARGS (sess->next_rtcp_check_time));
+ ret = T_dither_max <= max_delay;
+ goto end;
+ }
+
+ /* RFC 4585 section 3.5.2 step 4a and
+ * RFC 4585 section 3.5.2 step 6 */
+ allow_early = FALSE;
+ if (sess->last_rtcp_check_time == sess->last_rtcp_send_time) {
+ /* Last time we sent a full RTCP packet, we can now immediately
+ * send an early one as allow_early was reset to TRUE */
+ allow_early = TRUE;
+ } else if (sess->last_rtcp_check_time + T_rr <= current_time + max_delay) {
+ /* Last packet we sent was an early RTCP packet and more than
+ * T_rr has passed since then, meaning we would have suppressed
+ * a regular RTCP packet already and reset allow_early to TRUE */
+ allow_early = TRUE;
+
+ /* We have to offset a bit as T_rr has not passed yet, but will before
+ * max_delay */
+ if (sess->last_rtcp_check_time + T_rr > current_time)
+ offset = (sess->last_rtcp_check_time + T_rr) - current_time;
+ } else {
+ GST_DEBUG_OBJECT (sess,
+ "can't allow early RTCP yet: last regular %" GST_TIME_FORMAT ", %"
+ GST_TIME_FORMAT " + %" GST_TIME_FORMAT " > %" GST_TIME_FORMAT " + %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (sess->last_rtcp_send_time),
+ GST_TIME_ARGS (sess->last_rtcp_check_time), GST_TIME_ARGS (T_rr),
+ GST_TIME_ARGS (current_time), GST_TIME_ARGS (max_delay));
+ }
+
+ if (!allow_early) {
+ /* Ignore the request a scheduled packet will be in time anyway */
+ if (current_time + max_delay > sess->next_rtcp_check_time) {
+ GST_LOG_OBJECT (sess,
+ "next scheduled time is soon %" GST_TIME_FORMAT " + %" GST_TIME_FORMAT
+ " > %" GST_TIME_FORMAT, GST_TIME_ARGS (current_time),
+ GST_TIME_ARGS (max_delay),
+ GST_TIME_ARGS (sess->next_rtcp_check_time));
+ ret = TRUE;
+ } else {
+ GST_LOG_OBJECT (sess,
+ "can't allow early feedback and next scheduled time is too late %"
+ GST_TIME_FORMAT " + %" GST_TIME_FORMAT " < %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (current_time), GST_TIME_ARGS (max_delay),
+ GST_TIME_ARGS (sess->next_rtcp_check_time));
+ ret = FALSE;
+ }
+ goto end;
+ }
+
+ /* RFC 4585 section 3.5.2 step 4b */
+ if (T_dither_max) {
+ /* Schedule an early transmission later */
+ sess->next_early_rtcp_time = g_random_double () * T_dither_max +
+ current_time + offset;
+ } else {
+ /* If no dithering, schedule it for NOW */
+ sess->next_early_rtcp_time = current_time + offset;
+ }
+
+ GST_LOG_OBJECT (sess, "next early RTCP time %" GST_TIME_FORMAT
+ ", next regular RTCP time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (sess->next_early_rtcp_time),
+ GST_TIME_ARGS (sess->next_rtcp_check_time));
+ RTP_SESSION_UNLOCK (sess);
+
+ /* notify app of need to send packet early
+ * and therefore of timeout change */
+ if (sess->callbacks.reconsider)
+ sess->callbacks.reconsider (sess, sess->reconsider_user_data);
+
+ return TRUE;
+
+end:
+
+ RTP_SESSION_UNLOCK (sess);
+
+ return ret;
+}
+
+static gboolean
+rtp_session_send_rtcp_internal (RTPSession * sess, GstClockTime now,
+ GstClockTime max_delay)
+{
+ /* notify the application that we intend to send early RTCP */
+ if (sess->callbacks.notify_early_rtcp)
+ sess->callbacks.notify_early_rtcp (sess, sess->notify_early_rtcp_user_data);
+
+ return rtp_session_request_early_rtcp (sess, now, max_delay);
+}
+
+static gboolean
+rtp_session_send_rtcp_with_deadline (RTPSession * sess, GstClockTime deadline)
+{
+ GstClockTime now, max_delay;
+
+ if (!sess->callbacks.send_rtcp)
+ return FALSE;
+
+ now = sess->callbacks.request_time (sess, sess->request_time_user_data);
+
+ if (deadline < now)
+ return FALSE;
+
+ max_delay = deadline - now;
+
+ return rtp_session_send_rtcp_internal (sess, now, max_delay);
+}
+
+static gboolean
+rtp_session_send_rtcp (RTPSession * sess, GstClockTime max_delay)
+{
+ GstClockTime now;
+
+ if (!sess->callbacks.send_rtcp)
+ return FALSE;
+
+ now = sess->callbacks.request_time (sess, sess->request_time_user_data);
+
+ return rtp_session_send_rtcp_internal (sess, now, max_delay);
+}
+
+gboolean
+rtp_session_request_key_unit (RTPSession * sess, guint32 ssrc,
+ gboolean fir, gint count)
+{
+ RTPSource *src;
+
+ RTP_SESSION_LOCK (sess);
+ src = find_source (sess, ssrc);
+ if (src == NULL)
+ goto no_source;
+
+ if (fir) {
+ src->send_pli = FALSE;
+ src->send_fir = TRUE;
+
+ if (count == -1 || count != src->last_fir_count)
+ src->current_send_fir_seqnum++;
+ src->last_fir_count = count;
+ } else if (!src->send_fir) {
+ src->send_pli = TRUE;
+ }
+ RTP_SESSION_UNLOCK (sess);
+
+ if (!rtp_session_send_rtcp (sess, 5 * GST_SECOND)) {
+ GST_DEBUG ("FIR/PLI not sent early, sending with next regular RTCP");
+ }
+
+ return TRUE;
+
+ /* ERRORS */
+no_source:
+ {
+ RTP_SESSION_UNLOCK (sess);
+ return FALSE;
+ }
+}
+
+/**
+ * rtp_session_request_nack:
+ * @sess: a #RTPSession
+ * @ssrc: the SSRC
+ * @seqnum: the missing seqnum
+ * @max_delay: max delay to request NACK
+ *
+ * Request scheduling of a NACK feedback packet for @seqnum in @ssrc.
+ *
+ * Returns: %TRUE if the NACK feedback could be scheduled
+ */
+gboolean
+rtp_session_request_nack (RTPSession * sess, guint32 ssrc, guint16 seqnum,
+ GstClockTime max_delay)
+{
+ RTPSource *source;
+ GstClockTime now;
+
+ if (!sess->callbacks.send_rtcp)
+ return FALSE;
+
+ now = sess->callbacks.request_time (sess, sess->request_time_user_data);
+
+ RTP_SESSION_LOCK (sess);
+ source = find_source (sess, ssrc);
+ if (source == NULL)
+ goto no_source;
+
+ GST_DEBUG ("request NACK for SSRC %08x, #%u, deadline %" GST_TIME_FORMAT,
+ ssrc, seqnum, GST_TIME_ARGS (now + max_delay));
+ rtp_source_register_nack (source, seqnum, now + max_delay);
+ RTP_SESSION_UNLOCK (sess);
+
+ if (!rtp_session_send_rtcp_internal (sess, now, max_delay)) {
+ GST_DEBUG ("NACK not sent early, sending with next regular RTCP");
+ }
+
+ return TRUE;
+
+ /* ERRORS */
+no_source:
+ {
+ RTP_SESSION_UNLOCK (sess);
+ return FALSE;
+ }
+}
+
+/**
+ * rtp_session_update_recv_caps_structure:
+ * @sess: an #RTPSession
+ * @s: a #GstStructure from a #GstCaps
+ *
+ * Update the caps of the receiver in the rtp session.
+ */
+void
+rtp_session_update_recv_caps_structure (RTPSession * sess,
+ const GstStructure * s)
+{
+ rtp_twcc_manager_parse_recv_ext_id (sess->twcc, s);
+}
diff --git a/gst/rtpmanager/rtpsession.h b/gst/rtpmanager/rtpsession.h
new file mode 100644
index 0000000000..949fcc49b8
--- /dev/null
+++ b/gst/rtpmanager/rtpsession.h
@@ -0,0 +1,443 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __RTP_SESSION_H__
+#define __RTP_SESSION_H__
+
+#include <gst/gst.h>
+
+#include "rtpsource.h"
+#include "rtptwcc.h"
+
+typedef struct _RTPSession RTPSession;
+typedef struct _RTPSessionClass RTPSessionClass;
+
+#define RTP_TYPE_SESSION (rtp_session_get_type())
+#define RTP_SESSION(sess) (G_TYPE_CHECK_INSTANCE_CAST((sess),RTP_TYPE_SESSION,RTPSession))
+#define RTP_SESSION_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),RTP_TYPE_SESSION,RTPSessionClass))
+#define RTP_IS_SESSION(sess) (G_TYPE_CHECK_INSTANCE_TYPE((sess),RTP_TYPE_SESSION))
+#define RTP_IS_SESSION_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),RTP_TYPE_SESSION))
+#define RTP_SESSION_CAST(sess) ((RTPSession *)(sess))
+
+#define RTP_SESSION_LOCK(sess) (g_mutex_lock (&(sess)->lock))
+#define RTP_SESSION_UNLOCK(sess) (g_mutex_unlock (&(sess)->lock))
+
+/**
+ * RTPSessionProcessRTP:
+ * @sess: an #RTPSession
+ * @src: the #RTPSource
+ * @buffer: the RTP buffer ready for processing
+ * @user_data: user data specified when registering
+ *
+ * This callback will be called when @sess has @buffer ready for further
+ * processing. Processing the buffer typically includes decoding and displaying
+ * the buffer.
+ *
+ * Returns: a #GstFlowReturn.
+ */
+typedef GstFlowReturn (*RTPSessionProcessRTP) (RTPSession *sess, RTPSource *src, GstBuffer *buffer, gpointer user_data);
+
+/**
+ * RTPSessionSendRTP:
+ * @sess: an #RTPSession
+ * @src: the #RTPSource
+ * @data: the RTP buffer or buffer list ready for sending
+ * @user_data: user data specified when registering
+ *
+ * This callback will be called when @sess has @data (a buffer or buffer list)
+ * ready for sending to all listening participants in this session.
+ *
+ * Returns: a #GstFlowReturn.
+ */
+typedef GstFlowReturn (*RTPSessionSendRTP) (RTPSession *sess, RTPSource *src, gpointer data, gpointer user_data);
+
+/**
+ * RTPSessionSendRTCP:
+ * @sess: an #RTPSession
+ * @src: the #RTPSource
+ * @buffer: the RTCP buffer ready for sending
+ * @eos: if an EOS event should be pushed
+ * @user_data: user data specified when registering
+ *
+ * This callback will be called when @sess has @buffer ready for sending to
+ * all listening participants in this session.
+ *
+ * Returns: a #GstFlowReturn.
+ */
+typedef GstFlowReturn (*RTPSessionSendRTCP) (RTPSession *sess, RTPSource *src, GstBuffer *buffer,
+ gboolean eos, gpointer user_data);
+
+/**
+ * RTPSessionSyncRTCP:
+ * @sess: an #RTPSession
+ * @buffer: the RTCP buffer ready for synchronisation
+ * @user_data: user data specified when registering
+ *
+ * This callback will be called when @sess has an SR @buffer ready for doing
+ * synchronisation between streams.
+ *
+ * Returns: a #GstFlowReturn.
+ */
+typedef GstFlowReturn (*RTPSessionSyncRTCP) (RTPSession *sess, GstBuffer *buffer, gpointer user_data);
+
+/**
+ * RTPSessionClockRate:
+ * @sess: an #RTPSession
+ * @payload: the payload
+ * @user_data: user data specified when registering
+ *
+ * This callback will be called when @sess needs the clock-rate of @payload.
+ *
+ * Returns: the clock-rate of @pt.
+ */
+typedef gint (*RTPSessionClockRate) (RTPSession *sess, guint8 payload, gpointer user_data);
+
+/**
+ * RTPSessionReconsider:
+ * @sess: an #RTPSession
+ * @user_data: user data specified when registering
+ *
+ * This callback will be called when @sess needs to cancel the current timeout.
+ * The currently running timeout should be canceled and a new reporting interval
+ * should be requested from @sess.
+ */
+typedef void (*RTPSessionReconsider) (RTPSession *sess, gpointer user_data);
+
+/**
+ * RTPSessionRequestKeyUnit:
+ * @sess: an #RTPSession
+ * @ssrc: SSRC of the source related to the key unit request
+ * @all_headers: %TRUE if "all-headers" property should be set on the key unit
+ * request
+ * @user_data: user data specified when registering
+ *
+ * Asks the encoder to produce a key unit as soon as possibly within the
+ * bandwidth constraints
+ */
+typedef void (*RTPSessionRequestKeyUnit) (RTPSession *sess, guint32 ssrc,
+ gboolean all_headers, gpointer user_data);
+
+/**
+ * RTPSessionRequestTime:
+ * @sess: an #RTPSession
+ * @user_data: user data specified when registering
+ *
+ * This callback will be called when @sess needs the current time. The time
+ * should be returned as a #GstClockTime
+ */
+typedef GstClockTime (*RTPSessionRequestTime) (RTPSession *sess,
+ gpointer user_data);
+
+/**
+ * RTPSessionNotifyNACK:
+ * @sess: an #RTPSession
+ * @seqnum: the missing seqnum
+ * @blp: other missing seqnums
+ * @ssrc: SSRC of requested stream
+ * @user_data: user data specified when registering
+ *
+ * Notifies of NACKed frames.
+ */
+typedef void (*RTPSessionNotifyNACK) (RTPSession *sess,
+ guint16 seqnum, guint16 blp, guint32 ssrc, gpointer user_data);
+
+/**
+ * RTPSessionNotifyTWCC:
+ * @user_data: user data specified when registering
+ *
+ * Notifies of Transport-wide congestion control packets and stats.
+ */
+typedef void (*RTPSessionNotifyTWCC) (RTPSession *sess,
+ GstStructure * twcc_packets, GstStructure * twcc_stats, gpointer user_data);
+
+/**
+ * RTPSessionReconfigure:
+ * @sess: an #RTPSession
+ * @user_data: user data specified when registering
+ *
+ * This callback will be called when @sess wants to reconfigure the
+ * negotiated parameters.
+ */
+typedef void (*RTPSessionReconfigure) (RTPSession *sess, gpointer user_data);
+
+/**
+ * RTPSessionNotifyEarlyRTCP:
+ * @sess: an #RTPSession
+ * @user_data: user data specified when registering
+ *
+ * Notifies of early RTCP being requested
+ */
+typedef void (*RTPSessionNotifyEarlyRTCP) (RTPSession *sess,
+ gpointer user_data);
+
+/**
+ * RTPSessionCallbacks:
+ * @RTPSessionProcessRTP: callback to process RTP packets
+ * @RTPSessionSendRTP: callback for sending RTP packets
+ * @RTPSessionSendRTCP: callback for sending RTCP packets
+ * @RTPSessionSyncRTCP: callback for handling SR packets
+ * @RTPSessionReconsider: callback for reconsidering the timeout
+ * @RTPSessionRequestKeyUnit: callback for requesting a new key unit
+ * @RTPSessionRequestTime: callback for requesting the current time
+ * @RTPSessionNotifyNACK: callback for notifying NACK
+ * @RTPSessionNotifyTWCC: callback for notifying TWCC
+ * @RTPSessionReconfigure: callback for requesting reconfiguration
+ * @RTPSessionNotifyEarlyRTCP: callback for notifying early RTCP
+ *
+ * These callbacks can be installed on the session manager to get notification
+ * when RTP and RTCP packets are ready for further processing. These callbacks
+ * are not implemented with signals for performance reasons.
+ */
+typedef struct {
+ RTPSessionProcessRTP process_rtp;
+ RTPSessionSendRTP send_rtp;
+ RTPSessionSyncRTCP sync_rtcp;
+ RTPSessionSendRTCP send_rtcp;
+ RTPSessionClockRate clock_rate;
+ RTPSessionReconsider reconsider;
+ RTPSessionRequestKeyUnit request_key_unit;
+ RTPSessionRequestTime request_time;
+ RTPSessionNotifyNACK notify_nack;
+ RTPSessionNotifyTWCC notify_twcc;
+ RTPSessionReconfigure reconfigure;
+ RTPSessionNotifyEarlyRTCP notify_early_rtcp;
+} RTPSessionCallbacks;
+
+/**
+ * RTPSession:
+ * @lock: lock to protect the session
+ * @source: the source of this session
+ * @ssrcs: Hashtable of sources indexed by SSRC
+ * @num_sources: the number of sources
+ * @activecount: the number of active sources
+ * @callbacks: callbacks
+ * @user_data: user data passed in callbacks
+ * @stats: session statistics
+ * @conflicting_addresses: GList of conflicting addresses
+ *
+ * The RTP session manager object
+ */
+struct _RTPSession {
+ GObject object;
+
+ GMutex lock;
+
+ guint header_len;
+ guint mtu;
+
+ GstStructure *sdes;
+
+ guint probation;
+ guint32 max_dropout_time;
+ guint32 max_misorder_time;
+
+ GstRTPProfile rtp_profile;
+
+ gboolean reduced_size_rtcp;
+
+ /* bandwidths */
+ gboolean recalc_bandwidth;
+ guint bandwidth;
+ gdouble rtcp_bandwidth;
+ guint rtcp_rr_bandwidth;
+ guint rtcp_rs_bandwidth;
+
+ guint32 suggested_ssrc;
+ gboolean internal_ssrc_set;
+ gboolean internal_ssrc_from_caps_or_property;
+
+ /* for sender/receiver counting */
+ guint32 key;
+ guint32 mask_idx;
+ guint32 mask;
+ GHashTable *ssrcs[32];
+ guint total_sources;
+
+ guint16 generation;
+ GstClockTime next_rtcp_check_time; /* tn */
+ GstClockTime last_rtcp_check_time; /* tp */
+ GstClockTime last_rtcp_send_time; /* t_rr_last */
+ GstClockTime last_rtcp_interval; /* T_rr */
+ GstClockTime start_time;
+ gboolean first_rtcp;
+ gboolean allow_early;
+
+ GstClockTime next_early_rtcp_time;
+
+ gboolean scheduled_bye;
+
+ RTPSessionCallbacks callbacks;
+ gpointer process_rtp_user_data;
+ gpointer send_rtp_user_data;
+ gpointer send_rtcp_user_data;
+ gpointer sync_rtcp_user_data;
+ gpointer clock_rate_user_data;
+ gpointer reconsider_user_data;
+ gpointer request_key_unit_user_data;
+ gpointer request_time_user_data;
+ gpointer notify_nack_user_data;
+ gpointer notify_twcc_user_data;
+ gpointer reconfigure_user_data;
+ gpointer notify_early_rtcp_user_data;
+
+ RTPSessionStats stats;
+ RTPSessionStats bye_stats;
+
+ gboolean favor_new;
+ GstClockTime rtcp_feedback_retention_window;
+ guint rtcp_immediate_feedback_threshold;
+
+ gboolean is_doing_ptp;
+
+ GList *conflicting_addresses;
+
+ gboolean timestamp_sender_reports;
+
+ /* Transport-wide cc-extension */
+ RTPTWCCManager *twcc;
+ RTPTWCCStats *twcc_stats;
+ guint8 twcc_recv_ext_id;
+ guint8 twcc_send_ext_id;
+};
+
+/**
+ * RTPSessionClass:
+ * @on_new_ssrc: emitted when a new source is found
+ * @on_bye_ssrc: emitted when a source is gone
+ *
+ * The session class.
+ */
+struct _RTPSessionClass {
+ GObjectClass parent_class;
+
+ /* action signals */
+ RTPSource* (*get_source_by_ssrc) (RTPSession *sess, guint32 ssrc);
+
+ /* signals */
+ void (*on_new_ssrc) (RTPSession *sess, RTPSource *source);
+ void (*on_ssrc_collision) (RTPSession *sess, RTPSource *source);
+ void (*on_ssrc_validated) (RTPSession *sess, RTPSource *source);
+ void (*on_ssrc_active) (RTPSession *sess, RTPSource *source);
+ void (*on_ssrc_sdes) (RTPSession *sess, RTPSource *source);
+ void (*on_bye_ssrc) (RTPSession *sess, RTPSource *source);
+ void (*on_bye_timeout) (RTPSession *sess, RTPSource *source);
+ void (*on_timeout) (RTPSession *sess, RTPSource *source);
+ void (*on_sender_timeout) (RTPSession *sess, RTPSource *source);
+ gboolean (*on_sending_rtcp) (RTPSession *sess, GstBuffer *buffer,
+ gboolean early);
+ void (*on_app_rtcp) (RTPSession *sess, guint subtype, guint ssrc,
+ const gchar *name, GstBuffer *data);
+ void (*on_feedback_rtcp) (RTPSession *sess, guint type, guint fbtype,
+ guint sender_ssrc, guint media_ssrc, GstBuffer *fci);
+ gboolean (*send_rtcp) (RTPSession *sess, GstClockTime max_delay);
+ void (*on_receiving_rtcp) (RTPSession *sess, GstBuffer *buffer);
+ void (*on_new_sender_ssrc) (RTPSession *sess, RTPSource *source);
+ void (*on_sender_ssrc_active) (RTPSession *sess, RTPSource *source);
+ guint (*on_sending_nacks) (RTPSession *sess, guint sender_ssrc,
+ guint media_ssrc, GArray *nacks, GstBuffer *buffer);
+};
+
+GType rtp_session_get_type (void);
+
+/* create and configure */
+RTPSession* rtp_session_new (void);
+void rtp_session_reset (RTPSession *sess);
+void rtp_session_set_callbacks (RTPSession *sess,
+ RTPSessionCallbacks *callbacks,
+ gpointer user_data);
+void rtp_session_set_process_rtp_callback (RTPSession * sess,
+ RTPSessionProcessRTP callback,
+ gpointer user_data);
+void rtp_session_set_send_rtp_callback (RTPSession * sess,
+ RTPSessionSendRTP callback,
+ gpointer user_data);
+void rtp_session_set_send_rtcp_callback (RTPSession * sess,
+ RTPSessionSendRTCP callback,
+ gpointer user_data);
+void rtp_session_set_sync_rtcp_callback (RTPSession * sess,
+ RTPSessionSyncRTCP callback,
+ gpointer user_data);
+void rtp_session_set_clock_rate_callback (RTPSession * sess,
+ RTPSessionClockRate callback,
+ gpointer user_data);
+void rtp_session_set_reconsider_callback (RTPSession * sess,
+ RTPSessionReconsider callback,
+ gpointer user_data);
+void rtp_session_set_request_time_callback (RTPSession * sess,
+ RTPSessionRequestTime callback,
+ gpointer user_data);
+
+void rtp_session_set_bandwidth (RTPSession *sess, gdouble bandwidth);
+gdouble rtp_session_get_bandwidth (RTPSession *sess);
+void rtp_session_set_rtcp_fraction (RTPSession *sess, gdouble fraction);
+gdouble rtp_session_get_rtcp_fraction (RTPSession *sess);
+
+GstStructure * rtp_session_get_sdes_struct (RTPSession *sess);
+void rtp_session_set_sdes_struct (RTPSession *sess, const GstStructure *sdes);
+
+/* handling sources */
+guint32 rtp_session_suggest_ssrc (RTPSession *sess, gboolean *is_random);
+
+gboolean rtp_session_add_source (RTPSession *sess, RTPSource *src);
+guint rtp_session_get_num_sources (RTPSession *sess);
+guint rtp_session_get_num_active_sources (RTPSession *sess);
+RTPSource* rtp_session_get_source_by_ssrc (RTPSession *sess, guint32 ssrc);
+
+/* processing packets from receivers */
+GstFlowReturn rtp_session_process_rtp (RTPSession *sess, GstBuffer *buffer,
+ GstClockTime current_time,
+ GstClockTime running_time,
+ guint64 ntpnstime);
+GstFlowReturn rtp_session_process_rtcp (RTPSession *sess, GstBuffer *buffer,
+ GstClockTime current_time,
+ GstClockTime running_time,
+ guint64 ntpnstime);
+
+/* processing packets for sending */
+void rtp_session_update_send_caps (RTPSession *sess, GstCaps *caps);
+GstFlowReturn rtp_session_send_rtp (RTPSession *sess, gpointer data, gboolean is_list,
+ GstClockTime current_time, GstClockTime running_time);
+
+/* scheduling bye */
+void rtp_session_mark_all_bye (RTPSession *sess, const gchar *reason);
+GstFlowReturn rtp_session_schedule_bye (RTPSession *sess, GstClockTime current_time);
+
+/* get interval for next RTCP interval */
+GstClockTime rtp_session_next_timeout (RTPSession *sess, GstClockTime current_time);
+GstFlowReturn rtp_session_on_timeout (RTPSession *sess, GstClockTime current_time,
+ guint64 ntpnstime, GstClockTime running_time);
+
+/* request the transmission of an early RTCP packet */
+gboolean rtp_session_request_early_rtcp (RTPSession * sess, GstClockTime current_time,
+ GstClockTime max_delay);
+
+/* Notify session of a request for a new key unit */
+gboolean rtp_session_request_key_unit (RTPSession * sess,
+ guint32 ssrc,
+ gboolean fir,
+ gint count);
+gboolean rtp_session_request_nack (RTPSession * sess,
+ guint32 ssrc,
+ guint16 seqnum,
+ GstClockTime max_delay);
+
+void rtp_session_update_recv_caps_structure (RTPSession * sess, const GstStructure * s);
+
+
+#endif /* __RTP_SESSION_H__ */
diff --git a/gst/rtpmanager/rtpsource.c b/gst/rtpmanager/rtpsource.c
new file mode 100644
index 0000000000..865848c495
--- /dev/null
+++ b/gst/rtpmanager/rtpsource.c
@@ -0,0 +1,2073 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ * Copyright (C) 2015 Kurento (http://kurento.org/)
+ * @author: Miguel París <mparisdiaz@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/rtp/gstrtcpbuffer.h>
+
+#include "rtpsource.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtp_source_debug);
+#define GST_CAT_DEFAULT rtp_source_debug
+
+#define RTP_MAX_PROBATION_LEN 32
+
+/* signals and args */
+enum
+{
+ LAST_SIGNAL
+};
+
+#define DEFAULT_SSRC 0
+#define DEFAULT_IS_CSRC FALSE
+#define DEFAULT_IS_VALIDATED FALSE
+#define DEFAULT_IS_SENDER FALSE
+#define DEFAULT_SDES NULL
+#define DEFAULT_PROBATION RTP_DEFAULT_PROBATION
+#define DEFAULT_MAX_DROPOUT_TIME 60000
+#define DEFAULT_MAX_MISORDER_TIME 2000
+#define DEFAULT_DISABLE_RTCP FALSE
+
+enum
+{
+ PROP_0,
+ PROP_SSRC,
+ PROP_IS_CSRC,
+ PROP_IS_VALIDATED,
+ PROP_IS_SENDER,
+ PROP_SDES,
+ PROP_STATS,
+ PROP_PROBATION,
+ PROP_MAX_DROPOUT_TIME,
+ PROP_MAX_MISORDER_TIME,
+ PROP_DISABLE_RTCP
+};
+
+/* GObject vmethods */
+static void rtp_source_finalize (GObject * object);
+static void rtp_source_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void rtp_source_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+/* static guint rtp_source_signals[LAST_SIGNAL] = { 0 }; */
+
+G_DEFINE_TYPE (RTPSource, rtp_source, G_TYPE_OBJECT);
+
+static void
+rtp_source_class_init (RTPSourceClass * klass)
+{
+ GObjectClass *gobject_class;
+
+ gobject_class = (GObjectClass *) klass;
+
+ gobject_class->finalize = rtp_source_finalize;
+
+ gobject_class->set_property = rtp_source_set_property;
+ gobject_class->get_property = rtp_source_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_SSRC,
+ g_param_spec_uint ("ssrc", "SSRC",
+ "The SSRC of this source", 0, G_MAXUINT, DEFAULT_SSRC,
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT_ONLY | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_IS_CSRC,
+ g_param_spec_boolean ("is-csrc", "Is CSRC",
+ "If this SSRC is acting as a contributing source",
+ DEFAULT_IS_CSRC, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_IS_VALIDATED,
+ g_param_spec_boolean ("is-validated", "Is Validated",
+ "If this SSRC is validated", DEFAULT_IS_VALIDATED,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_IS_SENDER,
+ g_param_spec_boolean ("is-sender", "Is Sender",
+ "If this SSRC is a sender", DEFAULT_IS_SENDER,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * RTPSource:sdes
+ *
+ * The current SDES items of the source. Returns a structure with name
+ * application/x-rtp-source-sdes and may contain the following fields:
+ *
+ * 'cname' G_TYPE_STRING : The canonical name in the form user@host
+ * 'name' G_TYPE_STRING : The user name
+ * 'email' G_TYPE_STRING : The user's electronic mail address
+ * 'phone' G_TYPE_STRING : The user's phone number
+ * 'location' G_TYPE_STRING : The geographic user location
+ * 'tool' G_TYPE_STRING : The name of application or tool
+ * 'note' G_TYPE_STRING : A notice about the source
+ *
+ * Other fields may be present and these represent private items in
+ * the SDES where the field name is the prefix.
+ */
+ g_object_class_install_property (gobject_class, PROP_SDES,
+ g_param_spec_boxed ("sdes", "SDES",
+ "The SDES information for this source",
+ GST_TYPE_STRUCTURE, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * RTPSource:stats
+ *
+ * This property returns a GstStructure named application/x-rtp-source-stats with
+ * fields useful for statistics and diagnostics.
+ *
+ * Take note of each respective field's units:
+ *
+ * - NTP times are in the appropriate 32-bit or 64-bit fixed-point format
+ * starting from January 1, 1970 (except for timespans).
+ * - RTP times are in clock rate units (i.e. clock rate = 1 second)
+ * starting at a random offset.
+ * - For fields indicating packet loss, note that late packets are not considered lost,
+ * and duplicates are not taken into account. Hence, the loss may be negative
+ * if there are duplicates.
+ *
+ * The following fields are always present.
+ *
+ * * "ssrc" G_TYPE_UINT the SSRC of this source
+ * * "internal" G_TYPE_BOOLEAN this source is a source of the session
+ * * "validated" G_TYPE_BOOLEAN the source is validated
+ * * "received-bye" G_TYPE_BOOLEAN we received a BYE from this source
+ * * "is-csrc" G_TYPE_BOOLEAN this source was found as CSRC
+ * * "is-sender" G_TYPE_BOOLEAN this source is a sender
+ * * "seqnum-base" G_TYPE_INT first seqnum if known
+ * * "clock-rate" G_TYPE_INT the clock rate of the media
+ *
+ * The following fields are only present when known.
+ *
+ * * "rtp-from" G_TYPE_STRING where we received the last RTP packet from
+ * * "rtcp-from" G_TYPE_STRING where we received the last RTCP packet from
+ *
+ * The following fields make sense for internal sources and will only increase
+ * when "is-sender" is TRUE.
+ *
+ * * "octets-sent" G_TYPE_UINT64 number of payload bytes we sent
+ * * "packets-sent" G_TYPE_UINT64 number of packets we sent
+ *
+ * The following fields make sense for non-internal sources and will only
+ * increase when "is-sender" is TRUE.
+ *
+ * * "octets-received" G_TYPE_UINT64 total number of payload bytes received
+ * * "packets-received" G_TYPE_UINT64 total number of packets received
+ * * "bytes-received" G_TYPE_UINT64 total number of bytes received including lower level headers overhead
+ *
+ * Following fields are updated when "is-sender" is TRUE.
+ *
+ * * "bitrate" G_TYPE_UINT64 bitrate in bits per second
+ * * "jitter" G_TYPE_UINT estimated jitter (in clock rate units)
+ * * "packets-lost" G_TYPE_INT estimated amount of packets lost
+ *
+ * The last SR report this source sent. This only updates when "is-sender" is
+ * TRUE.
+ *
+ * * "have-sr" G_TYPE_BOOLEAN the source has sent SR
+ * * "sr-ntptime" G_TYPE_UINT64 NTP time of SR (in NTP Timestamp Format, 32.32 fixed point)
+ * * "sr-rtptime" G_TYPE_UINT RTP time of SR (in clock rate units)
+ * * "sr-octet-count" G_TYPE_UINT the number of bytes in the SR
+ * * "sr-packet-count" G_TYPE_UINT the number of packets in the SR
+ *
+ * The following fields are only present for non-internal sources and
+ * represent the content of the last RB packet that was sent to this source.
+ * These values are only updated when the source is sending.
+ *
+ * * "sent-rb" G_TYPE_BOOLEAN we have sent an RB
+ * * "sent-rb-fractionlost" G_TYPE_UINT calculated lost 8-bit fraction
+ * * "sent-rb-packetslost" G_TYPE_INT lost packets
+ * * "sent-rb-exthighestseq" G_TYPE_UINT last seen seqnum
+ * * "sent-rb-jitter" G_TYPE_UINT jitter (in clock rate units)
+ * * "sent-rb-lsr" G_TYPE_UINT last SR time (seconds in NTP Short Format, 16.16 fixed point)
+ * * "sent-rb-dlsr" G_TYPE_UINT delay since last SR (seconds in NTP Short Format, 16.16 fixed point)
+ *
+ * The following fields are only present for non-internal sources and
+ * represents the last RB that this source sent. This is only updated
+ * when the source is receiving data and sending RB blocks.
+ *
+ * * "have-rb" G_TYPE_BOOLEAN the source has sent RB
+ * * "rb-fractionlost" G_TYPE_UINT lost 8-bit fraction
+ * * "rb-packetslost" G_TYPE_INT lost packets
+ * * "rb-exthighestseq" G_TYPE_UINT highest received seqnum
+ * * "rb-jitter" G_TYPE_UINT reception jitter (in clock rate units)
+ * * "rb-lsr" G_TYPE_UINT last SR time (seconds in NTP Short Format, 16.16 fixed point)
+ * * "rb-dlsr" G_TYPE_UINT delay since last SR (seconds in NTP Short Format, 16.16 fixed point)
+ *
+ * The round trip of this source is calculated from the last RB
+ * values and the reception time of the last RB packet. It is only present for
+ * non-internal sources.
+ *
+ * * "rb-round-trip" G_TYPE_UINT the round-trip time (seconds in NTP Short Format, 16.16 fixed point)
+ *
+ */
+ g_object_class_install_property (gobject_class, PROP_STATS,
+ g_param_spec_boxed ("stats", "Stats",
+ "The stats of this source", GST_TYPE_STRUCTURE,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_PROBATION,
+ g_param_spec_uint ("probation", "Number of probations",
+ "Consecutive packet sequence numbers to accept the source",
+ 0, G_MAXUINT, DEFAULT_PROBATION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_DROPOUT_TIME,
+ g_param_spec_uint ("max-dropout-time", "Max dropout time",
+ "The maximum time (milliseconds) of missing packets tolerated.",
+ 0, G_MAXUINT, DEFAULT_MAX_DROPOUT_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_MISORDER_TIME,
+ g_param_spec_uint ("max-misorder-time", "Max misorder time",
+ "The maximum time (milliseconds) of misordered packets tolerated.",
+ 0, G_MAXUINT, DEFAULT_MAX_MISORDER_TIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * RTPSource:disable-rtcp:
+ *
+ * Allow disabling the sending of RTCP packets for this source.
+ */
+ g_object_class_install_property (gobject_class, PROP_DISABLE_RTCP,
+ g_param_spec_boolean ("disable-rtcp", "Disable RTCP",
+ "Disable sending RTCP packets for this source",
+ DEFAULT_DISABLE_RTCP, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ GST_DEBUG_CATEGORY_INIT (rtp_source_debug, "rtpsource", 0, "RTP Source");
+}
+
+/**
+ * rtp_source_reset:
+ * @src: an #RTPSource
+ *
+ * Reset the stats of @src.
+ */
+void
+rtp_source_reset (RTPSource * src)
+{
+ src->marked_bye = FALSE;
+ if (src->bye_reason)
+ g_free (src->bye_reason);
+ src->bye_reason = NULL;
+ src->sent_bye = FALSE;
+ g_hash_table_remove_all (src->reported_in_sr_of);
+ g_queue_foreach (src->retained_feedback, (GFunc) gst_buffer_unref, NULL);
+ g_queue_clear (src->retained_feedback);
+ src->last_rtptime = -1;
+
+ src->stats.cycles = -1;
+ src->stats.jitter = 0;
+ src->stats.transit = -1;
+ src->stats.curr_sr = 0;
+ src->stats.sr[0].is_valid = FALSE;
+ src->stats.curr_rr = 0;
+ src->stats.rr[0].is_valid = FALSE;
+ src->stats.prev_rtptime = GST_CLOCK_TIME_NONE;
+ src->stats.prev_rtcptime = GST_CLOCK_TIME_NONE;
+ src->stats.last_rtptime = GST_CLOCK_TIME_NONE;
+ src->stats.last_rtcptime = GST_CLOCK_TIME_NONE;
+ g_array_set_size (src->nacks, 0);
+
+ src->stats.sent_pli_count = 0;
+ src->stats.sent_fir_count = 0;
+ src->stats.sent_nack_count = 0;
+ src->stats.recv_nack_count = 0;
+}
+
+static void
+rtp_source_init (RTPSource * src)
+{
+ /* sources are initially on probation until we receive enough valid RTP
+ * packets or a valid RTCP packet */
+ src->validated = FALSE;
+ src->internal = FALSE;
+ src->probation = DEFAULT_PROBATION;
+ src->curr_probation = src->probation;
+ src->closing = FALSE;
+ src->max_dropout_time = DEFAULT_MAX_DROPOUT_TIME;
+ src->max_misorder_time = DEFAULT_MAX_MISORDER_TIME;
+
+ src->sdes = gst_structure_new_empty ("application/x-rtp-source-sdes");
+
+ src->payload = -1;
+ src->clock_rate = -1;
+ src->packets = g_queue_new ();
+ src->seqnum_offset = -1;
+
+ src->retained_feedback = g_queue_new ();
+ src->nacks = g_array_new (FALSE, FALSE, sizeof (guint16));
+ src->nack_deadlines = g_array_new (FALSE, FALSE, sizeof (GstClockTime));
+
+ src->reported_in_sr_of = g_hash_table_new (g_direct_hash, g_direct_equal);
+
+ src->last_keyframe_request = GST_CLOCK_TIME_NONE;
+
+ rtp_source_reset (src);
+
+ src->pt_set = FALSE;
+}
+
+void
+rtp_conflicting_address_free (RTPConflictingAddress * addr)
+{
+ g_object_unref (addr->address);
+ g_slice_free (RTPConflictingAddress, addr);
+}
+
+static void
+rtp_source_finalize (GObject * object)
+{
+ RTPSource *src;
+
+ src = RTP_SOURCE_CAST (object);
+
+ g_queue_foreach (src->packets, (GFunc) gst_buffer_unref, NULL);
+ g_queue_free (src->packets);
+
+ gst_structure_free (src->sdes);
+
+ g_free (src->bye_reason);
+
+ gst_caps_replace (&src->caps, NULL);
+
+ g_list_free_full (src->conflicting_addresses,
+ (GDestroyNotify) rtp_conflicting_address_free);
+ g_queue_foreach (src->retained_feedback, (GFunc) gst_buffer_unref, NULL);
+ g_queue_free (src->retained_feedback);
+
+ g_array_free (src->nacks, TRUE);
+ g_array_free (src->nack_deadlines, TRUE);
+
+ if (src->rtp_from)
+ g_object_unref (src->rtp_from);
+ if (src->rtcp_from)
+ g_object_unref (src->rtcp_from);
+
+ g_hash_table_unref (src->reported_in_sr_of);
+
+ G_OBJECT_CLASS (rtp_source_parent_class)->finalize (object);
+}
+
+static GstStructure *
+rtp_source_create_stats (RTPSource * src)
+{
+ GstStructure *s;
+ gboolean is_sender = src->is_sender;
+ gboolean internal = src->internal;
+ gchar *address_str;
+ gboolean have_rb;
+ guint32 ssrc = 0;
+ guint8 fractionlost = 0;
+ gint32 packetslost = 0;
+ guint32 exthighestseq = 0;
+ guint32 jitter = 0;
+ guint32 lsr = 0;
+ guint32 dlsr = 0;
+ guint32 round_trip = 0;
+ gboolean have_sr;
+ GstClockTime time = 0;
+ guint64 ntptime = 0;
+ guint32 rtptime = 0;
+ guint32 packet_count = 0;
+ guint32 octet_count = 0;
+
+
+ /* common data for all types of sources */
+ s = gst_structure_new ("application/x-rtp-source-stats",
+ "ssrc", G_TYPE_UINT, (guint) src->ssrc,
+ "internal", G_TYPE_BOOLEAN, internal,
+ "validated", G_TYPE_BOOLEAN, src->validated,
+ "received-bye", G_TYPE_BOOLEAN, src->marked_bye,
+ "is-csrc", G_TYPE_BOOLEAN, src->is_csrc,
+ "is-sender", G_TYPE_BOOLEAN, is_sender,
+ "seqnum-base", G_TYPE_INT, src->seqnum_offset,
+ "clock-rate", G_TYPE_INT, src->clock_rate, NULL);
+
+ /* add address and port */
+ if (src->rtp_from) {
+ address_str = __g_socket_address_to_string (src->rtp_from);
+ gst_structure_set (s, "rtp-from", G_TYPE_STRING, address_str, NULL);
+ g_free (address_str);
+ }
+ if (src->rtcp_from) {
+ address_str = __g_socket_address_to_string (src->rtcp_from);
+ gst_structure_set (s, "rtcp-from", G_TYPE_STRING, address_str, NULL);
+ g_free (address_str);
+ }
+
+ gst_structure_set (s,
+ "octets-sent", G_TYPE_UINT64, src->stats.octets_sent,
+ "packets-sent", G_TYPE_UINT64, src->stats.packets_sent,
+ "octets-received", G_TYPE_UINT64, src->stats.octets_received,
+ "packets-received", G_TYPE_UINT64, src->stats.packets_received,
+ "bytes-received", G_TYPE_UINT64, src->stats.bytes_received,
+ "bitrate", G_TYPE_UINT64, src->bitrate,
+ "packets-lost", G_TYPE_INT,
+ (gint) rtp_stats_get_packets_lost (&src->stats), "jitter", G_TYPE_UINT,
+ (guint) (src->stats.jitter >> 4),
+ "sent-pli-count", G_TYPE_UINT, src->stats.sent_pli_count,
+ "recv-pli-count", G_TYPE_UINT, src->stats.recv_pli_count,
+ "sent-fir-count", G_TYPE_UINT, src->stats.sent_fir_count,
+ "recv-fir-count", G_TYPE_UINT, src->stats.recv_fir_count,
+ "sent-nack-count", G_TYPE_UINT, src->stats.sent_nack_count,
+ "recv-nack-count", G_TYPE_UINT, src->stats.recv_nack_count,
+ "recv-packet-rate", G_TYPE_UINT,
+ gst_rtp_packet_rate_ctx_get (&src->packet_rate_ctx), NULL);
+
+ /* get the last SR. */
+ have_sr = rtp_source_get_last_sr (src, &time, &ntptime, &rtptime,
+ &packet_count, &octet_count);
+ gst_structure_set (s,
+ "have-sr", G_TYPE_BOOLEAN, have_sr,
+ "sr-ntptime", G_TYPE_UINT64, ntptime,
+ "sr-rtptime", G_TYPE_UINT, (guint) rtptime,
+ "sr-octet-count", G_TYPE_UINT, (guint) octet_count,
+ "sr-packet-count", G_TYPE_UINT, (guint) packet_count, NULL);
+
+ if (!internal) {
+ /* get the last RB we sent */
+ gst_structure_set (s,
+ "sent-rb", G_TYPE_BOOLEAN, src->last_rr.is_valid,
+ "sent-rb-fractionlost", G_TYPE_UINT, (guint) src->last_rr.fractionlost,
+ "sent-rb-packetslost", G_TYPE_INT, (gint) src->last_rr.packetslost,
+ "sent-rb-exthighestseq", G_TYPE_UINT,
+ (guint) src->last_rr.exthighestseq, "sent-rb-jitter", G_TYPE_UINT,
+ (guint) src->last_rr.jitter, "sent-rb-lsr", G_TYPE_UINT,
+ (guint) src->last_rr.lsr, "sent-rb-dlsr", G_TYPE_UINT,
+ (guint) src->last_rr.dlsr, NULL);
+
+ /* get the last RB */
+ have_rb = rtp_source_get_last_rb (src, &ssrc, &fractionlost,
+ &packetslost, &exthighestseq, &jitter, &lsr, &dlsr, &round_trip);
+
+ gst_structure_set (s,
+ "have-rb", G_TYPE_BOOLEAN, have_rb,
+ "rb-ssrc", G_TYPE_UINT, ssrc,
+ "rb-fractionlost", G_TYPE_UINT, (guint) fractionlost,
+ "rb-packetslost", G_TYPE_INT, (gint) packetslost,
+ "rb-exthighestseq", G_TYPE_UINT, (guint) exthighestseq,
+ "rb-jitter", G_TYPE_UINT, (guint) jitter,
+ "rb-lsr", G_TYPE_UINT, (guint) lsr,
+ "rb-dlsr", G_TYPE_UINT, (guint) dlsr,
+ "rb-round-trip", G_TYPE_UINT, (guint) round_trip, NULL);
+ }
+
+ return s;
+}
+
+/**
+ * rtp_source_get_sdes_struct:
+ * @src: an #RTPSource
+ *
+ * Get the SDES from @src. See the SDES property for more details.
+ *
+ * Returns: %GstStructure of type "application/x-rtp-source-sdes". The result is
+ * valid until the SDES items of @src are modified.
+ */
+const GstStructure *
+rtp_source_get_sdes_struct (RTPSource * src)
+{
+ g_return_val_if_fail (RTP_IS_SOURCE (src), NULL);
+
+ return src->sdes;
+}
+
+static gboolean
+sdes_struct_compare_func (GQuark field_id, const GValue * value,
+ gpointer user_data)
+{
+ GstStructure *old;
+ const gchar *field;
+
+ old = GST_STRUCTURE (user_data);
+ field = g_quark_to_string (field_id);
+
+ if (!gst_structure_has_field (old, field))
+ return FALSE;
+
+ g_assert (G_VALUE_HOLDS_STRING (value));
+
+ return strcmp (g_value_get_string (value), gst_structure_get_string (old,
+ field)) == 0;
+}
+
+/**
+ * rtp_source_set_sdes_struct:
+ * @src: an #RTPSource
+ * @sdes: the SDES structure
+ *
+ * Store the @sdes in @src. @sdes must be a structure of type
+ * "application/x-rtp-source-sdes", see the SDES property for more details.
+ *
+ * This function takes ownership of @sdes.
+ *
+ * Returns: %FALSE if the SDES was unchanged.
+ */
+gboolean
+rtp_source_set_sdes_struct (RTPSource * src, GstStructure * sdes)
+{
+ gboolean changed;
+
+ g_return_val_if_fail (RTP_IS_SOURCE (src), FALSE);
+ g_return_val_if_fail (strcmp (gst_structure_get_name (sdes),
+ "application/x-rtp-source-sdes") == 0, FALSE);
+
+ changed = !gst_structure_foreach (sdes, sdes_struct_compare_func, src->sdes);
+
+ if (changed) {
+ gst_structure_free (src->sdes);
+ src->sdes = sdes;
+ } else {
+ gst_structure_free (sdes);
+ }
+ return changed;
+}
+
+static void
+rtp_source_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ RTPSource *src;
+
+ src = RTP_SOURCE (object);
+
+ switch (prop_id) {
+ case PROP_SSRC:
+ src->ssrc = g_value_get_uint (value);
+ break;
+ case PROP_PROBATION:
+ src->probation = g_value_get_uint (value);
+ break;
+ case PROP_MAX_DROPOUT_TIME:
+ src->max_dropout_time = g_value_get_uint (value);
+ break;
+ case PROP_MAX_MISORDER_TIME:
+ src->max_misorder_time = g_value_get_uint (value);
+ break;
+ case PROP_DISABLE_RTCP:
+ src->disable_rtcp = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+rtp_source_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ RTPSource *src;
+
+ src = RTP_SOURCE (object);
+
+ switch (prop_id) {
+ case PROP_SSRC:
+ g_value_set_uint (value, rtp_source_get_ssrc (src));
+ break;
+ case PROP_IS_CSRC:
+ g_value_set_boolean (value, rtp_source_is_as_csrc (src));
+ break;
+ case PROP_IS_VALIDATED:
+ g_value_set_boolean (value, rtp_source_is_validated (src));
+ break;
+ case PROP_IS_SENDER:
+ g_value_set_boolean (value, rtp_source_is_sender (src));
+ break;
+ case PROP_SDES:
+ g_value_set_boxed (value, rtp_source_get_sdes_struct (src));
+ break;
+ case PROP_STATS:
+ g_value_take_boxed (value, rtp_source_create_stats (src));
+ break;
+ case PROP_PROBATION:
+ g_value_set_uint (value, src->probation);
+ break;
+ case PROP_MAX_DROPOUT_TIME:
+ g_value_set_uint (value, src->max_dropout_time);
+ break;
+ case PROP_MAX_MISORDER_TIME:
+ g_value_set_uint (value, src->max_misorder_time);
+ break;
+ case PROP_DISABLE_RTCP:
+ g_value_set_boolean (value, src->disable_rtcp);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+/**
+ * rtp_source_new:
+ * @ssrc: an SSRC
+ *
+ * Create a #RTPSource with @ssrc.
+ *
+ * Returns: a new #RTPSource. Use g_object_unref() after usage.
+ */
+RTPSource *
+rtp_source_new (guint32 ssrc)
+{
+ RTPSource *src;
+
+ src = g_object_new (RTP_TYPE_SOURCE, NULL);
+ src->ssrc = ssrc;
+
+ return src;
+}
+
+/**
+ * rtp_source_set_callbacks:
+ * @src: an #RTPSource
+ * @cb: callback functions
+ * @user_data: user data
+ *
+ * Set the callbacks for the source.
+ */
+void
+rtp_source_set_callbacks (RTPSource * src, RTPSourceCallbacks * cb,
+ gpointer user_data)
+{
+ g_return_if_fail (RTP_IS_SOURCE (src));
+
+ src->callbacks.push_rtp = cb->push_rtp;
+ src->callbacks.clock_rate = cb->clock_rate;
+ src->user_data = user_data;
+}
+
+/**
+ * rtp_source_get_ssrc:
+ * @src: an #RTPSource
+ *
+ * Get the SSRC of @source.
+ *
+ * Returns: the SSRC of src.
+ */
+guint32
+rtp_source_get_ssrc (RTPSource * src)
+{
+ guint32 result;
+
+ g_return_val_if_fail (RTP_IS_SOURCE (src), 0);
+
+ result = src->ssrc;
+
+ return result;
+}
+
+/**
+ * rtp_source_set_as_csrc:
+ * @src: an #RTPSource
+ *
+ * Configure @src as a CSRC, this will also validate @src.
+ */
+void
+rtp_source_set_as_csrc (RTPSource * src)
+{
+ g_return_if_fail (RTP_IS_SOURCE (src));
+
+ src->validated = TRUE;
+ src->is_csrc = TRUE;
+}
+
+/**
+ * rtp_source_is_as_csrc:
+ * @src: an #RTPSource
+ *
+ * Check if @src is a contributing source.
+ *
+ * Returns: %TRUE if @src is acting as a contributing source.
+ */
+gboolean
+rtp_source_is_as_csrc (RTPSource * src)
+{
+ gboolean result;
+
+ g_return_val_if_fail (RTP_IS_SOURCE (src), FALSE);
+
+ result = src->is_csrc;
+
+ return result;
+}
+
+/**
+ * rtp_source_is_active:
+ * @src: an #RTPSource
+ *
+ * Check if @src is an active source. A source is active if it has been
+ * validated and has not yet received a BYE packet
+ *
+ * Returns: %TRUE if @src is an qactive source.
+ */
+gboolean
+rtp_source_is_active (RTPSource * src)
+{
+ gboolean result;
+
+ g_return_val_if_fail (RTP_IS_SOURCE (src), FALSE);
+
+ result = RTP_SOURCE_IS_ACTIVE (src);
+
+ return result;
+}
+
+/**
+ * rtp_source_is_validated:
+ * @src: an #RTPSource
+ *
+ * Check if @src is a validated source.
+ *
+ * Returns: %TRUE if @src is a validated source.
+ */
+gboolean
+rtp_source_is_validated (RTPSource * src)
+{
+ gboolean result;
+
+ g_return_val_if_fail (RTP_IS_SOURCE (src), FALSE);
+
+ result = src->validated;
+
+ return result;
+}
+
+/**
+ * rtp_source_is_sender:
+ * @src: an #RTPSource
+ *
+ * Check if @src is a sending source.
+ *
+ * Returns: %TRUE if @src is a sending source.
+ */
+gboolean
+rtp_source_is_sender (RTPSource * src)
+{
+ gboolean result;
+
+ g_return_val_if_fail (RTP_IS_SOURCE (src), FALSE);
+
+ result = RTP_SOURCE_IS_SENDER (src);
+
+ return result;
+}
+
+/**
+ * rtp_source_is_marked_bye:
+ * @src: an #RTPSource
+ *
+ * Check if @src is marked as leaving the session with a BYE packet.
+ *
+ * Returns: %TRUE if @src has been marked BYE.
+ */
+gboolean
+rtp_source_is_marked_bye (RTPSource * src)
+{
+ gboolean result;
+
+ g_return_val_if_fail (RTP_IS_SOURCE (src), FALSE);
+
+ result = RTP_SOURCE_IS_MARKED_BYE (src);
+
+ return result;
+}
+
+
+/**
+ * rtp_source_get_bye_reason:
+ * @src: an #RTPSource
+ *
+ * Get the BYE reason for @src. Check if the source is marked as leaving the
+ * session with a BYE message first with rtp_source_is_marked_bye().
+ *
+ * Returns: The BYE reason or NULL when no reason was given or the source was
+ * not marked BYE yet. g_free() after usage.
+ */
+gchar *
+rtp_source_get_bye_reason (RTPSource * src)
+{
+ gchar *result;
+
+ g_return_val_if_fail (RTP_IS_SOURCE (src), NULL);
+
+ result = g_strdup (src->bye_reason);
+
+ return result;
+}
+
+/**
+ * rtp_source_update_caps:
+ * @src: an #RTPSource
+ * @caps: a #GstCaps
+ *
+ * Parse @caps and store all relevant information in @source.
+ */
+void
+rtp_source_update_caps (RTPSource * src, GstCaps * caps)
+{
+ GstStructure *s;
+ guint val;
+ gint ival;
+ gboolean rtx;
+
+ /* nothing changed, return */
+ if (caps == NULL || src->caps == caps)
+ return;
+
+ s = gst_caps_get_structure (caps, 0);
+
+ rtx = (gst_structure_get_uint (s, "rtx-ssrc", &val) && val == src->ssrc);
+
+ if (gst_structure_get_int (s, rtx ? "rtx-payload" : "payload", &ival))
+ src->payload = ival;
+ else
+ src->payload = -1;
+
+ GST_DEBUG ("got %spayload %d", rtx ? "rtx " : "", src->payload);
+
+ if (gst_structure_get_int (s, "clock-rate", &ival))
+ src->clock_rate = ival;
+ else
+ src->clock_rate = -1;
+
+ GST_DEBUG ("got clock-rate %d", src->clock_rate);
+
+ if (gst_structure_get_uint (s, rtx ? "rtx-seqnum-offset" : "seqnum-offset",
+ &val))
+ src->seqnum_offset = val;
+ else
+ src->seqnum_offset = -1;
+
+ GST_DEBUG ("got %sseqnum-offset %" G_GINT32_FORMAT, rtx ? "rtx " : "",
+ src->seqnum_offset);
+
+ gst_caps_replace (&src->caps, caps);
+}
+
+/**
+ * rtp_source_set_rtp_from:
+ * @src: an #RTPSource
+ * @address: the RTP address to set
+ *
+ * Set that @src is receiving RTP packets from @address. This is used for
+ * collistion checking.
+ */
+void
+rtp_source_set_rtp_from (RTPSource * src, GSocketAddress * address)
+{
+ g_return_if_fail (RTP_IS_SOURCE (src));
+
+ if (src->rtp_from)
+ g_object_unref (src->rtp_from);
+ src->rtp_from = G_SOCKET_ADDRESS (g_object_ref (address));
+}
+
+/**
+ * rtp_source_set_rtcp_from:
+ * @src: an #RTPSource
+ * @address: the RTCP address to set
+ *
+ * Set that @src is receiving RTCP packets from @address. This is used for
+ * collistion checking.
+ */
+void
+rtp_source_set_rtcp_from (RTPSource * src, GSocketAddress * address)
+{
+ g_return_if_fail (RTP_IS_SOURCE (src));
+
+ if (src->rtcp_from)
+ g_object_unref (src->rtcp_from);
+ src->rtcp_from = G_SOCKET_ADDRESS (g_object_ref (address));
+}
+
+static GstFlowReturn
+push_packet (RTPSource * src, GstBuffer * buffer)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ /* push queued packets first if any */
+ while (!g_queue_is_empty (src->packets)) {
+ GstBuffer *buffer = GST_BUFFER_CAST (g_queue_pop_head (src->packets));
+
+ GST_LOG ("pushing queued packet");
+ if (src->callbacks.push_rtp)
+ src->callbacks.push_rtp (src, buffer, src->user_data);
+ else
+ gst_buffer_unref (buffer);
+ }
+ GST_LOG ("pushing new packet");
+ /* push packet */
+ if (src->callbacks.push_rtp)
+ ret = src->callbacks.push_rtp (src, buffer, src->user_data);
+ else
+ gst_buffer_unref (buffer);
+
+ return ret;
+}
+
+static void
+fetch_clock_rate_from_payload (RTPSource * src, guint8 payload)
+{
+ if (src->payload == -1) {
+ /* first payload received, nothing was in the caps, lock on to this payload */
+ src->payload = payload;
+ GST_DEBUG ("first payload %d", payload);
+ } else if (payload != src->payload) {
+ /* we have a different payload than before, reset the clock-rate */
+ GST_DEBUG ("new payload %d", payload);
+ src->payload = payload;
+ src->clock_rate = -1;
+ src->stats.transit = -1;
+ }
+
+ if (src->clock_rate == -1) {
+ gint clock_rate = -1;
+
+ if (src->callbacks.clock_rate)
+ clock_rate = src->callbacks.clock_rate (src, payload, src->user_data);
+
+ GST_DEBUG ("got clock-rate %d", clock_rate);
+
+ src->clock_rate = clock_rate;
+ gst_rtp_packet_rate_ctx_reset (&src->packet_rate_ctx, clock_rate);
+ }
+}
+
+/* Jitter is the variation in the delay of received packets in a flow. It is
+ * measured by comparing the interval when RTP packets were sent to the interval
+ * at which they were received. For instance, if packet #1 and packet #2 leave
+ * 50 milliseconds apart and arrive 60 milliseconds apart, then the jitter is 10
+ * milliseconds. */
+static void
+calculate_jitter (RTPSource * src, RTPPacketInfo * pinfo)
+{
+ GstClockTime running_time;
+ guint32 rtparrival, transit, rtptime;
+ gint32 diff;
+
+ /* get arrival time */
+ if ((running_time = pinfo->running_time) == GST_CLOCK_TIME_NONE)
+ goto no_time;
+
+ GST_LOG ("SSRC %08x got payload %d", src->ssrc, pinfo->pt);
+
+ /* check if clock-rate is valid */
+ if (src->clock_rate == -1)
+ goto no_clock_rate;
+
+ rtptime = pinfo->rtptime;
+
+ /* convert arrival time to RTP timestamp units, truncate to 32 bits, we don't
+ * care about the absolute value, just the difference. */
+ rtparrival =
+ gst_util_uint64_scale_int (running_time, src->clock_rate, GST_SECOND);
+
+ /* transit time is difference with RTP timestamp */
+ transit = rtparrival - rtptime;
+
+ /* get ABS diff with previous transit time */
+ if (src->stats.transit != -1) {
+ if (transit > src->stats.transit)
+ diff = transit - src->stats.transit;
+ else
+ diff = src->stats.transit - transit;
+ } else
+ diff = 0;
+
+ src->stats.transit = transit;
+
+ /* update jitter, the value we store is scaled up so we can keep precision. */
+ src->stats.jitter += diff - ((src->stats.jitter + 8) >> 4);
+
+ src->stats.prev_rtptime = src->stats.last_rtptime;
+ src->stats.last_rtptime = rtparrival;
+
+ GST_LOG ("rtparrival %u, rtptime %u, clock-rate %d, diff %d, jitter: %f",
+ rtparrival, rtptime, src->clock_rate, diff, (src->stats.jitter) / 16.0);
+
+ return;
+
+ /* ERRORS */
+no_time:
+ {
+ GST_WARNING ("cannot get current running_time");
+ return;
+ }
+no_clock_rate:
+ {
+ GST_WARNING ("cannot get clock-rate for pt %d", pinfo->pt);
+ return;
+ }
+}
+
+static void
+update_queued_stats (GstBuffer * buffer, RTPSource * src)
+{
+ GstRTPBuffer rtp = { NULL };
+ guint payload_len;
+ guint64 bytes;
+
+ /* no need to check the return value, a queued packet is a valid RTP one */
+ gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp);
+ payload_len = gst_rtp_buffer_get_payload_len (&rtp);
+
+ bytes = gst_buffer_get_size (buffer) + UDP_IP_HEADER_OVERHEAD;
+
+ src->stats.octets_received += payload_len;
+ src->stats.bytes_received += bytes;
+ src->stats.packets_received++;
+ /* for the bitrate estimation consider all lower level headers */
+ src->bytes_received += bytes;
+
+ gst_rtp_buffer_unmap (&rtp);
+}
+
+static void
+init_seq (RTPSource * src, guint16 seq)
+{
+ src->stats.base_seq = seq;
+ src->stats.max_seq = seq;
+ src->stats.bad_seq = RTP_SEQ_MOD + 1; /* so seq == bad_seq is false */
+ src->stats.cycles = 0;
+ src->stats.packets_received = 0;
+ src->stats.octets_received = 0;
+ src->stats.bytes_received = 0;
+ src->stats.prev_received = 0;
+ src->stats.prev_expected = 0;
+ src->stats.recv_pli_count = 0;
+ src->stats.recv_fir_count = 0;
+
+ /* if there are queued packets, consider them too in the stats */
+ g_queue_foreach (src->packets, (GFunc) update_queued_stats, src);
+
+ GST_DEBUG ("base_seq %d", seq);
+}
+
+#define BITRATE_INTERVAL (2 * GST_SECOND)
+
+static void
+do_bitrate_estimation (RTPSource * src, GstClockTime running_time,
+ guint64 * bytes_handled)
+{
+ guint64 elapsed;
+
+ if (src->prev_rtime) {
+ elapsed = running_time - src->prev_rtime;
+
+ if (elapsed > BITRATE_INTERVAL) {
+ guint64 rate;
+
+ rate = gst_util_uint64_scale (*bytes_handled, 8 * GST_SECOND, elapsed);
+
+ GST_LOG ("Elapsed %" G_GUINT64_FORMAT ", bytes %" G_GUINT64_FORMAT
+ ", rate %" G_GUINT64_FORMAT, elapsed, *bytes_handled, rate);
+
+ if (src->bitrate == 0)
+ src->bitrate = rate;
+ else
+ src->bitrate = ((src->bitrate * 3) + rate) / 4;
+
+ src->prev_rtime = running_time;
+ *bytes_handled = 0;
+ }
+ } else {
+ GST_LOG ("Reset bitrate measurement");
+ src->prev_rtime = running_time;
+ src->bitrate = 0;
+ }
+}
+
+static gboolean
+update_receiver_stats (RTPSource * src, RTPPacketInfo * pinfo,
+ gboolean is_receive)
+{
+ guint16 seqnr, expected;
+ RTPSourceStats *stats;
+ gint16 delta;
+ gint32 packet_rate, max_dropout, max_misorder;
+
+ stats = &src->stats;
+
+ seqnr = pinfo->seqnum;
+
+ packet_rate =
+ gst_rtp_packet_rate_ctx_update (&src->packet_rate_ctx, pinfo->seqnum,
+ pinfo->rtptime);
+ max_dropout =
+ gst_rtp_packet_rate_ctx_get_max_dropout (&src->packet_rate_ctx,
+ src->max_dropout_time);
+ max_misorder =
+ gst_rtp_packet_rate_ctx_get_max_misorder (&src->packet_rate_ctx,
+ src->max_misorder_time);
+ GST_TRACE ("SSRC %08x, packet_rate: %d, max_dropout: %d, max_misorder: %d",
+ src->ssrc, packet_rate, max_dropout, max_misorder);
+
+ if (stats->cycles == -1) {
+ GST_DEBUG ("received first packet");
+ /* first time we heard of this source */
+ init_seq (src, seqnr);
+ src->stats.max_seq = seqnr - 1;
+ src->curr_probation = src->probation;
+ }
+
+ if (is_receive) {
+ expected = src->stats.max_seq + 1;
+ delta = gst_rtp_buffer_compare_seqnum (expected, seqnr);
+
+ /* if we are still on probation, check seqnum */
+ if (src->curr_probation) {
+ /* when in probation, we require consecutive seqnums */
+ if (delta == 0) {
+ /* expected packet */
+ GST_DEBUG ("probation: seqnr %d == expected %d", seqnr, expected);
+ src->curr_probation--;
+ if (seqnr < stats->max_seq) {
+ /* sequence number wrapped - count another 64K cycle. */
+ stats->cycles += RTP_SEQ_MOD;
+ }
+ src->stats.max_seq = seqnr;
+
+ if (src->curr_probation == 0) {
+ GST_DEBUG ("probation done!");
+ init_seq (src, seqnr);
+ } else {
+ GstBuffer *q;
+
+ GST_DEBUG ("probation %d: queue packet", src->curr_probation);
+ /* when still in probation, keep packets in a list. */
+ g_queue_push_tail (src->packets, pinfo->data);
+ pinfo->data = NULL;
+ /* remove packets from queue if there are too many */
+ while (g_queue_get_length (src->packets) > RTP_MAX_PROBATION_LEN) {
+ q = g_queue_pop_head (src->packets);
+ gst_buffer_unref (q);
+ }
+ goto done;
+ }
+ } else {
+ /* unexpected seqnum in probation
+ *
+ * There is no need to clean the queue at this point because the
+ * invalid packets in the queue are not going to be pushed as we are
+ * still in probation, and some cleanup will be performed at future
+ * probation attempts anyway if there are too many old packets in the
+ * queue.
+ */
+ goto probation_seqnum;
+ }
+ } else if (delta >= 0 && delta < max_dropout) {
+ /* Clear bad packets */
+ stats->bad_seq = RTP_SEQ_MOD + 1; /* so seq == bad_seq is false */
+ g_queue_foreach (src->packets, (GFunc) gst_buffer_unref, NULL);
+ g_queue_clear (src->packets);
+
+ /* in order, with permissible gap */
+ if (seqnr < stats->max_seq) {
+ /* sequence number wrapped - count another 64K cycle. */
+ stats->cycles += RTP_SEQ_MOD;
+ }
+ stats->max_seq = seqnr;
+ } else if (delta < -max_misorder || delta >= max_dropout) {
+ /* the sequence number made a very large jump */
+ if (seqnr == stats->bad_seq && src->packets->head) {
+ /* two sequential packets -- assume that the other side
+ * restarted without telling us so just re-sync
+ * (i.e., pretend this was the first packet). */
+ init_seq (src, seqnr);
+ } else {
+ /* unacceptable jump */
+ stats->bad_seq = (seqnr + 1) & (RTP_SEQ_MOD - 1);
+ g_queue_foreach (src->packets, (GFunc) gst_buffer_unref, NULL);
+ g_queue_clear (src->packets);
+ g_queue_push_tail (src->packets, pinfo->data);
+ pinfo->data = NULL;
+ goto bad_sequence;
+ }
+ } else { /* delta < 0 && delta >= -max_misorder */
+ /* Clear bad packets */
+ stats->bad_seq = RTP_SEQ_MOD + 1; /* so seq == bad_seq is false */
+ g_queue_foreach (src->packets, (GFunc) gst_buffer_unref, NULL);
+ g_queue_clear (src->packets);
+
+ /* duplicate or reordered packet, will be filtered by jitterbuffer. */
+ GST_INFO ("duplicate or reordered packet (seqnr %u, expected %u)",
+ seqnr, expected);
+ }
+ }
+
+ src->stats.octets_received += pinfo->payload_len;
+ src->stats.bytes_received += pinfo->bytes;
+ src->stats.packets_received += pinfo->packets;
+ /* for the bitrate estimation consider all lower level headers */
+ src->bytes_received += pinfo->bytes;
+
+ GST_LOG ("seq %u, PC: %" G_GUINT64_FORMAT ", OC: %" G_GUINT64_FORMAT,
+ seqnr, src->stats.packets_received, src->stats.octets_received);
+
+ return TRUE;
+
+ /* ERRORS */
+done:
+ {
+ return FALSE;
+ }
+bad_sequence:
+ {
+ GST_WARNING
+ ("unacceptable seqnum received (seqnr %u, delta %d, packet_rate: %d, max_dropout: %d, max_misorder: %d)",
+ seqnr, delta, packet_rate, max_dropout, max_misorder);
+ return FALSE;
+ }
+probation_seqnum:
+ {
+ GST_WARNING ("probation: seqnr %d != expected %d "
+ "(SSRC %u curr_probation %i probation %i)", seqnr, expected, src->ssrc,
+ src->curr_probation, src->probation);
+ src->curr_probation = src->probation;
+ src->stats.max_seq = seqnr;
+ return FALSE;
+ }
+}
+
+/**
+ * rtp_source_process_rtp:
+ * @src: an #RTPSource
+ * @pinfo: an #RTPPacketInfo
+ *
+ * Let @src handle the incoming RTP packet described in @pinfo.
+ *
+ * Returns: a #GstFlowReturn.
+ */
+GstFlowReturn
+rtp_source_process_rtp (RTPSource * src, RTPPacketInfo * pinfo)
+{
+ GstFlowReturn result;
+
+ g_return_val_if_fail (RTP_IS_SOURCE (src), GST_FLOW_ERROR);
+ g_return_val_if_fail (pinfo != NULL, GST_FLOW_ERROR);
+
+ fetch_clock_rate_from_payload (src, pinfo->pt);
+
+ if (!update_receiver_stats (src, pinfo, TRUE))
+ return GST_FLOW_OK;
+
+ /* the source that sent the packet must be a sender */
+ src->is_sender = TRUE;
+ src->validated = TRUE;
+
+ do_bitrate_estimation (src, pinfo->running_time, &src->bytes_received);
+
+ /* calculate jitter for the stats */
+ calculate_jitter (src, pinfo);
+
+ /* we're ready to push the RTP packet now */
+ result = push_packet (src, pinfo->data);
+ pinfo->data = NULL;
+
+ return result;
+}
+
+/**
+ * rtp_source_mark_bye:
+ * @src: an #RTPSource
+ * @reason: the reason for leaving
+ *
+ * Mark @src in the BYE state. This can happen when the source wants to
+ * leave the session or when a BYE packets has been received.
+ *
+ * This will make the source inactive.
+ */
+void
+rtp_source_mark_bye (RTPSource * src, const gchar * reason)
+{
+ g_return_if_fail (RTP_IS_SOURCE (src));
+
+ GST_DEBUG ("marking SSRC %08x as BYE, reason: %s", src->ssrc,
+ GST_STR_NULL (reason));
+
+ /* copy the reason and mark as bye */
+ g_free (src->bye_reason);
+ src->bye_reason = g_strdup (reason);
+ src->marked_bye = TRUE;
+}
+
+/**
+ * rtp_source_send_rtp:
+ * @src: an #RTPSource
+ * @pinfo: an #RTPPacketInfo
+ *
+ * Send data (an RTP buffer or buffer list from @pinfo) originating from @src.
+ * This will make @src a sender. This function takes ownership of the data and
+ * modifies the SSRC in the RTP packet to that of @src when needed.
+ *
+ * Returns: a #GstFlowReturn.
+ */
+GstFlowReturn
+rtp_source_send_rtp (RTPSource * src, RTPPacketInfo * pinfo)
+{
+ GstFlowReturn result;
+ GstClockTime running_time;
+ guint32 rtptime;
+ guint64 ext_rtptime;
+ guint64 rt_diff, rtp_diff;
+
+ g_return_val_if_fail (RTP_IS_SOURCE (src), GST_FLOW_ERROR);
+
+ /* we are a sender now */
+ src->is_sender = TRUE;
+
+ /* we are also a receiver of our packets */
+ if (!update_receiver_stats (src, pinfo, FALSE))
+ return GST_FLOW_OK;
+
+ if (src->pt_set && src->pt != pinfo->pt) {
+ GST_WARNING ("Changing pt from %u to %u for SSRC %u", src->pt, pinfo->pt,
+ src->ssrc);
+ }
+
+ src->pt = pinfo->pt;
+ src->pt_set = TRUE;
+
+ /* update stats for the SR */
+ src->stats.packets_sent += pinfo->packets;
+ src->stats.octets_sent += pinfo->payload_len;
+ src->bytes_sent += pinfo->bytes;
+
+ running_time = pinfo->running_time;
+
+ do_bitrate_estimation (src, running_time, &src->bytes_sent);
+
+ rtptime = pinfo->rtptime;
+
+ ext_rtptime = src->last_rtptime;
+ ext_rtptime = gst_rtp_buffer_ext_timestamp (&ext_rtptime, rtptime);
+
+ GST_LOG ("SSRC %08x, RTP %" G_GUINT64_FORMAT ", running_time %"
+ GST_TIME_FORMAT, src->ssrc, ext_rtptime, GST_TIME_ARGS (running_time));
+
+ if (ext_rtptime > src->last_rtptime) {
+ rtp_diff = ext_rtptime - src->last_rtptime;
+ rt_diff = running_time - src->last_rtime;
+
+ /* calc the diff so we can detect drift at the sender. This can also be used
+ * to guestimate the clock rate if the NTP time is locked to the RTP
+ * timestamps (as is the case when the capture device is providing the clock). */
+ GST_LOG ("SSRC %08x, diff RTP %" G_GUINT64_FORMAT ", diff running_time %"
+ GST_TIME_FORMAT, src->ssrc, rtp_diff, GST_TIME_ARGS (rt_diff));
+ }
+
+ /* we keep track of the last received RTP timestamp and the corresponding
+ * buffer running_time so that we can use this info when constructing SR reports */
+ src->last_rtime = running_time;
+ src->last_rtptime = ext_rtptime;
+
+ /* push packet */
+ if (!src->callbacks.push_rtp)
+ goto no_callback;
+
+ GST_LOG ("pushing RTP %s %" G_GUINT64_FORMAT,
+ pinfo->is_list ? "list" : "packet", src->stats.packets_sent);
+
+ result = src->callbacks.push_rtp (src, pinfo->data, src->user_data);
+ pinfo->data = NULL;
+
+ return result;
+
+ /* ERRORS */
+no_callback:
+ {
+ GST_WARNING ("no callback installed, dropping packet");
+ return GST_FLOW_OK;
+ }
+}
+
+/**
+ * rtp_source_process_sr:
+ * @src: an #RTPSource
+ * @time: time of packet arrival
+ * @ntptime: the NTP time (in NTP Timestamp Format, 32.32 fixed point)
+ * @rtptime: the RTP time (in clock rate units)
+ * @packet_count: the packet count
+ * @octet_count: the octet count
+ *
+ * Update the sender report in @src.
+ */
+void
+rtp_source_process_sr (RTPSource * src, GstClockTime time, guint64 ntptime,
+ guint32 rtptime, guint32 packet_count, guint32 octet_count)
+{
+ RTPSenderReport *curr;
+ gint curridx;
+
+ g_return_if_fail (RTP_IS_SOURCE (src));
+
+ GST_DEBUG ("got SR packet: SSRC %08x, NTP %08x:%08x, RTP %" G_GUINT32_FORMAT
+ ", PC %" G_GUINT32_FORMAT ", OC %" G_GUINT32_FORMAT, src->ssrc,
+ (guint32) (ntptime >> 32), (guint32) (ntptime & 0xffffffff), rtptime,
+ packet_count, octet_count);
+
+ curridx = src->stats.curr_sr ^ 1;
+ curr = &src->stats.sr[curridx];
+
+ /* this is a sender now */
+ src->is_sender = TRUE;
+
+ /* update current */
+ curr->is_valid = TRUE;
+ curr->ntptime = ntptime;
+ curr->rtptime = rtptime;
+ curr->packet_count = packet_count;
+ curr->octet_count = octet_count;
+ curr->time = time;
+
+ /* make current */
+ src->stats.curr_sr = curridx;
+
+ src->stats.prev_rtcptime = src->stats.last_rtcptime;
+ src->stats.last_rtcptime = time;
+}
+
+/**
+ * rtp_source_process_rb:
+ * @src: an #RTPSource
+ * @ssrc: SSRC of the local source for this this RB was sent
+ * @ntpnstime: the current time in nanoseconds since 1970
+ * @fractionlost: fraction lost since last SR/RR
+ * @packetslost: the cumulative number of packets lost
+ * @exthighestseq: the extended last sequence number received
+ * @jitter: the interarrival jitter (in clock rate units)
+ * @lsr: the time of the last SR packet on this source
+ * (in NTP Short Format, 16.16 fixed point)
+ * @dlsr: the delay since the last SR packet
+ * (in NTP Short Format, 16.16 fixed point)
+ *
+ * Update the report block in @src.
+ */
+void
+rtp_source_process_rb (RTPSource * src, guint32 ssrc, guint64 ntpnstime,
+ guint8 fractionlost, gint32 packetslost, guint32 exthighestseq,
+ guint32 jitter, guint32 lsr, guint32 dlsr)
+{
+ RTPReceiverReport *curr;
+ gint curridx;
+ guint32 ntp, A;
+ guint64 f_ntp;
+
+ g_return_if_fail (RTP_IS_SOURCE (src));
+
+ GST_DEBUG ("got RB packet: SSRC %08x, FL %2x, PL %d, HS %" G_GUINT32_FORMAT
+ ", jitter %" G_GUINT32_FORMAT ", LSR %04x:%04x, DLSR %04x:%04x",
+ src->ssrc, fractionlost, packetslost, exthighestseq, jitter, lsr >> 16,
+ lsr & 0xffff, dlsr >> 16, dlsr & 0xffff);
+
+ curridx = src->stats.curr_rr ^ 1;
+ curr = &src->stats.rr[curridx];
+
+ /* update current */
+ curr->is_valid = TRUE;
+ curr->ssrc = ssrc;
+ curr->fractionlost = fractionlost;
+ curr->packetslost = packetslost;
+ curr->exthighestseq = exthighestseq;
+ curr->jitter = jitter;
+ curr->lsr = lsr;
+ curr->dlsr = dlsr;
+
+ /* convert the NTP time in nanoseconds to 32.32 fixed point */
+ f_ntp = gst_util_uint64_scale (ntpnstime, (1LL << 32), GST_SECOND);
+ /* calculate round trip, round the time up */
+ ntp = ((f_ntp + 0xffff) >> 16) & 0xffffffff;
+
+ A = dlsr + lsr;
+ if (A > 0 && ntp > A)
+ A = ntp - A;
+ else
+ A = 0;
+ curr->round_trip = A;
+
+ GST_DEBUG ("NTP %04x:%04x, round trip %04x:%04x", ntp >> 16, ntp & 0xffff,
+ A >> 16, A & 0xffff);
+
+ /* make current */
+ src->stats.curr_rr = curridx;
+}
+
+/**
+ * rtp_source_get_new_sr:
+ * @src: an #RTPSource
+ * @ntpnstime: the current time in nanoseconds since 1970
+ * @running_time: the current running_time of the pipeline
+ * @ntptime: the NTP time (in NTP Timestamp Format, 32.32 fixed point)
+ * @rtptime: the RTP time corresponding to @ntptime (in clock rate units)
+ * @packet_count: the packet count
+ * @octet_count: the octet count
+ *
+ * Get new values to put into a new SR report from this source.
+ *
+ * @running_time and @ntpnstime are captured at the same time and represent the
+ * running time of the pipeline clock and the absolute current system time in
+ * nanoseconds respectively. Together with the last running_time and RTP timestamp
+ * we have observed in the source, we can generate @ntptime and @rtptime for an SR
+ * packet. @ntptime is basically the fixed point representation of @ntpnstime
+ * and @rtptime the associated RTP timestamp.
+ *
+ * Returns: %TRUE on success.
+ */
+gboolean
+rtp_source_get_new_sr (RTPSource * src, guint64 ntpnstime,
+ GstClockTime running_time, guint64 * ntptime, guint32 * rtptime,
+ guint32 * packet_count, guint32 * octet_count)
+{
+ guint64 t_rtp;
+ guint64 t_current_ntp;
+ GstClockTimeDiff diff;
+
+ g_return_val_if_fail (RTP_IS_SOURCE (src), FALSE);
+
+ /* We last saw a buffer with last_rtptime at last_rtime. Given a running_time
+ * and an NTP time, we can scale the RTP timestamps so that they match the
+ * given NTP time. for scaling, we assume that the slope of the rtptime vs
+ * running_time vs ntptime curve is close to 1, which is certainly
+ * sufficient for the frequency at which we report SR and the rate we send
+ * out RTP packets. */
+ t_rtp = src->last_rtptime;
+
+ GST_DEBUG ("last_rtime %" GST_TIME_FORMAT ", last_rtptime %"
+ G_GUINT64_FORMAT, GST_TIME_ARGS (src->last_rtime), t_rtp);
+
+ if (src->clock_rate == -1 && src->pt_set) {
+ GST_INFO ("no clock-rate, getting for pt %u and SSRC %u", src->pt,
+ src->ssrc);
+ fetch_clock_rate_from_payload (src, src->pt);
+ }
+
+ if (src->clock_rate != -1) {
+ /* get the diff between the clock running_time and the buffer running_time.
+ * This is the elapsed time, as measured against the pipeline clock, between
+ * when the rtp timestamp was observed and the current running_time.
+ *
+ * We need to apply this diff to the RTP timestamp to get the RTP timestamp
+ * for the given ntpnstime. */
+ diff = GST_CLOCK_DIFF (src->last_rtime, running_time);
+ GST_DEBUG ("running_time %" GST_TIME_FORMAT ", diff %" GST_STIME_FORMAT,
+ GST_TIME_ARGS (running_time), GST_STIME_ARGS (diff));
+
+ /* now translate the diff to RTP time, handle positive and negative cases.
+ * If there is no diff, we already set rtptime correctly above. */
+ if (diff > 0) {
+ t_rtp += gst_util_uint64_scale_int (diff, src->clock_rate, GST_SECOND);
+ } else {
+ diff = -diff;
+ t_rtp -= gst_util_uint64_scale_int (diff, src->clock_rate, GST_SECOND);
+ }
+ } else {
+ GST_WARNING ("no clock-rate, cannot interpolate rtp time for SSRC %u",
+ src->ssrc);
+ }
+
+ /* convert the NTP time in nanoseconds to 32.32 fixed point */
+ t_current_ntp = gst_util_uint64_scale (ntpnstime, (1LL << 32), GST_SECOND);
+
+ GST_DEBUG ("NTP %08x:%08x, RTP %" G_GUINT32_FORMAT,
+ (guint32) (t_current_ntp >> 32), (guint32) (t_current_ntp & 0xffffffff),
+ (guint32) t_rtp);
+
+ if (ntptime)
+ *ntptime = t_current_ntp;
+ if (rtptime)
+ *rtptime = t_rtp;
+ if (packet_count)
+ *packet_count = src->stats.packets_sent;
+ if (octet_count)
+ *octet_count = src->stats.octets_sent;
+
+ return TRUE;
+}
+
+/**
+ * rtp_source_get_new_rb:
+ * @src: an #RTPSource
+ * @time: the current time of the system clock
+ * @fractionlost: fraction lost since last SR/RR
+ * @packetslost: the cumulative number of packets lost
+ * @exthighestseq: the extended last sequence number received
+ * @jitter: the interarrival jitter (in clock rate units)
+ * @lsr: the time of the last SR packet on this source
+ * (in NTP Short Format, 16.16 fixed point)
+ * @dlsr: the delay since the last SR packet
+ * (in NTP Short Format, 16.16 fixed point)
+ *
+ * Get new values to put into a new report block from this source.
+ *
+ * Returns: %TRUE on success.
+ */
+gboolean
+rtp_source_get_new_rb (RTPSource * src, GstClockTime time,
+ guint8 * fractionlost, gint32 * packetslost, guint32 * exthighestseq,
+ guint32 * jitter, guint32 * lsr, guint32 * dlsr)
+{
+ RTPSourceStats *stats;
+ guint64 extended_max, expected;
+ guint64 expected_interval, received_interval, ntptime;
+ gint64 lost, lost_interval;
+ guint32 fraction, LSR, DLSR;
+ GstClockTime sr_time;
+
+ stats = &src->stats;
+
+ extended_max = stats->cycles + stats->max_seq;
+ expected = extended_max - stats->base_seq + 1;
+
+ GST_DEBUG ("ext_max %" G_GUINT64_FORMAT ", expected %" G_GUINT64_FORMAT
+ ", received %" G_GUINT64_FORMAT ", base_seq %" G_GUINT32_FORMAT,
+ extended_max, expected, stats->packets_received, stats->base_seq);
+
+ lost = expected - stats->packets_received;
+ lost = CLAMP (lost, -0x800000, 0x7fffff);
+
+ expected_interval = expected - stats->prev_expected;
+ stats->prev_expected = expected;
+ received_interval = stats->packets_received - stats->prev_received;
+ stats->prev_received = stats->packets_received;
+
+ lost_interval = expected_interval - received_interval;
+
+ if (expected_interval == 0 || lost_interval <= 0)
+ fraction = 0;
+ else
+ fraction = (lost_interval << 8) / expected_interval;
+
+ GST_DEBUG ("add RR for SSRC %08x", src->ssrc);
+ /* we scaled the jitter up for additional precision */
+ GST_DEBUG ("fraction %" G_GUINT32_FORMAT ", lost %" G_GINT64_FORMAT
+ ", extseq %" G_GUINT64_FORMAT ", jitter %d", fraction, lost,
+ extended_max, stats->jitter >> 4);
+
+ if (rtp_source_get_last_sr (src, &sr_time, &ntptime, NULL, NULL, NULL)) {
+ GstClockTime diff;
+
+ /* LSR is middle 32 bits of the last ntptime */
+ LSR = (ntptime >> 16) & 0xffffffff;
+ diff = time - sr_time;
+ GST_DEBUG ("last SR time diff %" GST_TIME_FORMAT, GST_TIME_ARGS (diff));
+ /* DLSR, delay since last SR is expressed in 1/65536 second units */
+ DLSR = gst_util_uint64_scale_int (diff, 65536, GST_SECOND);
+ } else {
+ /* No valid SR received, LSR/DLSR are set to 0 then */
+ GST_DEBUG ("no valid SR received");
+ LSR = 0;
+ DLSR = 0;
+ }
+ GST_DEBUG ("LSR %04x:%04x, DLSR %04x:%04x", LSR >> 16, LSR & 0xffff,
+ DLSR >> 16, DLSR & 0xffff);
+
+ if (fractionlost)
+ *fractionlost = fraction;
+ if (packetslost)
+ *packetslost = lost;
+ if (exthighestseq)
+ *exthighestseq = extended_max;
+ if (jitter)
+ *jitter = stats->jitter >> 4;
+ if (lsr)
+ *lsr = LSR;
+ if (dlsr)
+ *dlsr = DLSR;
+
+ return TRUE;
+}
+
+/**
+ * rtp_source_get_last_sr:
+ * @src: an #RTPSource
+ * @time: time of packet arrival
+ * @ntptime: the NTP time (in NTP Timestamp Format, 32.32 fixed point)
+ * @rtptime: the RTP time (in clock rate units)
+ * @packet_count: the packet count
+ * @octet_count: the octet count
+ *
+ * Get the values of the last sender report as set with rtp_source_process_sr().
+ *
+ * Returns: %TRUE if there was a valid SR report.
+ */
+gboolean
+rtp_source_get_last_sr (RTPSource * src, GstClockTime * time, guint64 * ntptime,
+ guint32 * rtptime, guint32 * packet_count, guint32 * octet_count)
+{
+ RTPSenderReport *curr;
+
+ g_return_val_if_fail (RTP_IS_SOURCE (src), FALSE);
+
+ curr = &src->stats.sr[src->stats.curr_sr];
+ if (!curr->is_valid)
+ return FALSE;
+
+ if (ntptime)
+ *ntptime = curr->ntptime;
+ if (rtptime)
+ *rtptime = curr->rtptime;
+ if (packet_count)
+ *packet_count = curr->packet_count;
+ if (octet_count)
+ *octet_count = curr->octet_count;
+ if (time)
+ *time = curr->time;
+
+ return TRUE;
+}
+
+/**
+ * rtp_source_get_last_rb:
+ * @src: an #RTPSource
+ * @ssrc: SSRC of the local source for this this RB was sent
+ * @fractionlost: fraction lost since last SR/RR
+ * @packetslost: the cumulative number of packets lost
+ * @exthighestseq: the extended last sequence number received
+ * @jitter: the interarrival jitter (in clock rate units)
+ * @lsr: the time of the last SR packet on this source
+ * (in NTP Short Format, 16.16 fixed point)
+ * @dlsr: the delay since the last SR packet
+ * (in NTP Short Format, 16.16 fixed point)
+ * @round_trip: the round-trip time
+ * (in NTP Short Format, 16.16 fixed point)
+ *
+ * Get the values of the last RB report set with rtp_source_process_rb().
+ *
+ * Returns: %TRUE if there was a valid SB report.
+ */
+gboolean
+rtp_source_get_last_rb (RTPSource * src, guint32 * ssrc,
+ guint8 * fractionlost, gint32 * packetslost, guint32 * exthighestseq,
+ guint32 * jitter, guint32 * lsr, guint32 * dlsr, guint32 * round_trip)
+{
+ RTPReceiverReport *curr;
+
+ g_return_val_if_fail (RTP_IS_SOURCE (src), FALSE);
+
+ curr = &src->stats.rr[src->stats.curr_rr];
+ if (!curr->is_valid)
+ return FALSE;
+
+ if (ssrc)
+ *ssrc = curr->ssrc;
+ if (fractionlost)
+ *fractionlost = curr->fractionlost;
+ if (packetslost)
+ *packetslost = curr->packetslost;
+ if (exthighestseq)
+ *exthighestseq = curr->exthighestseq;
+ if (jitter)
+ *jitter = curr->jitter;
+ if (lsr)
+ *lsr = curr->lsr;
+ if (dlsr)
+ *dlsr = curr->dlsr;
+ if (round_trip)
+ *round_trip = curr->round_trip;
+
+ return TRUE;
+}
+
+gboolean
+find_conflicting_address (GList * conflicting_addresses,
+ GSocketAddress * address, GstClockTime time)
+{
+ GList *item;
+
+ for (item = conflicting_addresses; item; item = g_list_next (item)) {
+ RTPConflictingAddress *known_conflict = item->data;
+
+ if (__g_socket_address_equal (address, known_conflict->address)) {
+ known_conflict->time = time;
+ return TRUE;
+ }
+ }
+
+ return FALSE;
+}
+
+GList *
+add_conflicting_address (GList * conflicting_addresses,
+ GSocketAddress * address, GstClockTime time)
+{
+ RTPConflictingAddress *new_conflict;
+
+ new_conflict = g_slice_new (RTPConflictingAddress);
+
+ new_conflict->address = G_SOCKET_ADDRESS (g_object_ref (address));
+ new_conflict->time = time;
+
+ return g_list_prepend (conflicting_addresses, new_conflict);
+}
+
+GList *
+timeout_conflicting_addresses (GList * conflicting_addresses,
+ GstClockTime current_time)
+{
+ GList *item;
+ /* "a relatively long time" -- RFC 3550 section 8.2 */
+ const GstClockTime collision_timeout =
+ RTP_STATS_MIN_INTERVAL * GST_SECOND * 10;
+
+ item = g_list_first (conflicting_addresses);
+ while (item) {
+ RTPConflictingAddress *known_conflict = item->data;
+ GList *next_item = g_list_next (item);
+
+ if (known_conflict->time + collision_timeout < current_time) {
+ gchar *buf;
+
+ conflicting_addresses = g_list_delete_link (conflicting_addresses, item);
+ buf = __g_socket_address_to_string (known_conflict->address);
+ GST_DEBUG ("collision %p timed out: %s", known_conflict, buf);
+ g_free (buf);
+ rtp_conflicting_address_free (known_conflict);
+ }
+ item = next_item;
+ }
+
+ return conflicting_addresses;
+}
+
+/**
+ * rtp_source_find_conflicting_address:
+ * @src: The source the packet came in
+ * @address: address to check for
+ * @time: The time when the packet that is possibly in conflict arrived
+ *
+ * Checks if an address which has a conflict is already known. If it is
+ * a known conflict, remember the time
+ *
+ * Returns: TRUE if it was a known conflict, FALSE otherwise
+ */
+gboolean
+rtp_source_find_conflicting_address (RTPSource * src, GSocketAddress * address,
+ GstClockTime time)
+{
+ return find_conflicting_address (src->conflicting_addresses, address, time);
+}
+
+/**
+ * rtp_source_add_conflicting_address:
+ * @src: The source the packet came in
+ * @address: address to remember
+ * @time: The time when the packet that is in conflict arrived
+ *
+ * Adds a new conflict address
+ */
+void
+rtp_source_add_conflicting_address (RTPSource * src,
+ GSocketAddress * address, GstClockTime time)
+{
+ src->conflicting_addresses =
+ add_conflicting_address (src->conflicting_addresses, address, time);
+}
+
+/**
+ * rtp_source_timeout:
+ * @src: The #RTPSource
+ * @current_time: The current time
+ * @feedback_retention_window: The running time before which retained feedback
+ * packets have to be discarded
+ *
+ * This is processed on each RTCP interval. It times out old collisions.
+ * It also times out old retained feedback packets
+ */
+void
+rtp_source_timeout (RTPSource * src, GstClockTime current_time,
+ GstClockTime running_time, GstClockTime feedback_retention_window)
+{
+ GstRTCPPacket *pkt;
+ GstClockTime max_pts_window;
+ guint pruned = 0;
+
+ src->conflicting_addresses =
+ timeout_conflicting_addresses (src->conflicting_addresses, current_time);
+
+ if (feedback_retention_window == GST_CLOCK_TIME_NONE ||
+ running_time < feedback_retention_window) {
+ return;
+ }
+
+ max_pts_window = running_time - feedback_retention_window;
+
+ /* Time out AVPF packets that are older than the desired length */
+ while ((pkt = g_queue_peek_head (src->retained_feedback)) &&
+ GST_BUFFER_PTS (pkt) < max_pts_window) {
+ gst_buffer_unref (g_queue_pop_head (src->retained_feedback));
+ pruned++;
+ }
+
+ GST_LOG_OBJECT (src,
+ "%u RTCP packets pruned with PTS less than %" GST_TIME_FORMAT
+ ", queue len: %u", pruned, GST_TIME_ARGS (max_pts_window),
+ g_queue_get_length (src->retained_feedback));
+}
+
+static gint
+compare_buffers (gconstpointer a, gconstpointer b, gpointer user_data)
+{
+ const GstBuffer *bufa = a;
+ const GstBuffer *bufb = b;
+
+ g_return_val_if_fail (GST_BUFFER_PTS (bufa) != GST_CLOCK_TIME_NONE, -1);
+ g_return_val_if_fail (GST_BUFFER_PTS (bufb) != GST_CLOCK_TIME_NONE, 1);
+
+ if (GST_BUFFER_PTS (bufa) < GST_BUFFER_PTS (bufb)) {
+ return -1;
+ } else if (GST_BUFFER_PTS (bufa) > GST_BUFFER_PTS (bufb)) {
+ return 1;
+ }
+
+ return 0;
+}
+
+void
+rtp_source_retain_rtcp_packet (RTPSource * src, GstRTCPPacket * packet,
+ GstClockTime running_time)
+{
+ GstBuffer *buffer;
+
+ g_return_if_fail (running_time != GST_CLOCK_TIME_NONE);
+
+ buffer = gst_buffer_copy_region (packet->rtcp->buffer, GST_BUFFER_COPY_MEMORY,
+ packet->offset, (gst_rtcp_packet_get_length (packet) + 1) * 4);
+
+ GST_BUFFER_PTS (buffer) = running_time;
+
+ g_queue_insert_sorted (src->retained_feedback, buffer, compare_buffers, NULL);
+
+ GST_LOG_OBJECT (src, "RTCP packet retained with PTS: %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (running_time));
+}
+
+gboolean
+rtp_source_has_retained (RTPSource * src, GCompareFunc func, gconstpointer data)
+{
+ if (g_queue_find_custom (src->retained_feedback, data, func))
+ return TRUE;
+ else
+ return FALSE;
+}
+
+/**
+ * rtp_source_register_nack:
+ * @src: The #RTPSource
+ * @seqnum: a seqnum
+ * @deadline: the deadline before which RTX is still possible
+ *
+ * Register that @seqnum has not been received from @src.
+ */
+void
+rtp_source_register_nack (RTPSource * src, guint16 seqnum,
+ GstClockTime deadline)
+{
+ gint i;
+ guint len;
+ gint diff = -1;
+ guint16 tseq;
+
+ len = src->nacks->len;
+ for (i = len - 1; i >= 0; i--) {
+ tseq = g_array_index (src->nacks, guint16, i);
+ diff = gst_rtp_buffer_compare_seqnum (tseq, seqnum);
+
+ GST_TRACE ("[%u] %u %u diff %i len %u", i, tseq, seqnum, diff, len);
+
+ if (diff >= 0)
+ break;
+ }
+
+ if (diff == 0) {
+ GST_DEBUG ("update NACK #%u deadline to %" GST_TIME_FORMAT, seqnum,
+ GST_TIME_ARGS (deadline));
+ g_array_index (src->nack_deadlines, GstClockTime, i) = deadline;
+ } else if (i == len - 1) {
+ GST_DEBUG ("append NACK #%u with deadline %" GST_TIME_FORMAT, seqnum,
+ GST_TIME_ARGS (deadline));
+ g_array_append_val (src->nacks, seqnum);
+ g_array_append_val (src->nack_deadlines, deadline);
+ } else {
+ GST_DEBUG ("insert NACK #%u with deadline %" GST_TIME_FORMAT, seqnum,
+ GST_TIME_ARGS (deadline));
+ g_array_insert_val (src->nacks, i + 1, seqnum);
+ g_array_insert_val (src->nack_deadlines, i + 1, deadline);
+ }
+
+ src->send_nack = TRUE;
+}
+
+/**
+ * rtp_source_get_nacks:
+ * @src: The #RTPSource
+ * @n_nacks: result number of nacks
+ *
+ * Get the registered NACKS since the last rtp_source_clear_nacks().
+ *
+ * Returns: an array of @n_nacks seqnum values.
+ */
+guint16 *
+rtp_source_get_nacks (RTPSource * src, guint * n_nacks)
+{
+ if (n_nacks)
+ *n_nacks = src->nacks->len;
+
+ return (guint16 *) src->nacks->data;
+}
+
+/**
+ * rtp_source_get_nacks:
+ * @src: The #RTPSource
+ * @n_nacks: result number of nacks
+ *
+ * Get the registered NACKS deadlines.
+ *
+ * Returns: an array of @n_nacks deadline values.
+ */
+GstClockTime *
+rtp_source_get_nack_deadlines (RTPSource * src, guint * n_nacks)
+{
+ if (n_nacks)
+ *n_nacks = src->nack_deadlines->len;
+
+ return (GstClockTime *) src->nack_deadlines->data;
+}
+
+/**
+ * rtp_source_clear_nacks:
+ * @src: The #RTPSource
+ * @n_nacks: number of nacks
+ *
+ * Remove @n_nacks oldest NACKS form array.
+ */
+void
+rtp_source_clear_nacks (RTPSource * src, guint n_nacks)
+{
+ g_return_if_fail (n_nacks <= src->nacks->len);
+
+ if (src->nacks->len == n_nacks) {
+ g_array_set_size (src->nacks, 0);
+ g_array_set_size (src->nack_deadlines, 0);
+ src->send_nack = FALSE;
+ } else {
+ g_array_remove_range (src->nacks, 0, n_nacks);
+ g_array_remove_range (src->nack_deadlines, 0, n_nacks);
+ }
+}
diff --git a/gst/rtpmanager/rtpsource.h b/gst/rtpmanager/rtpsource.h
new file mode 100644
index 0000000000..20e65f1324
--- /dev/null
+++ b/gst/rtpmanager/rtpsource.h
@@ -0,0 +1,311 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ * Copyright (C) 2015 Kurento (http://kurento.org/)
+ * @author: Miguel París <mparisdiaz@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __RTP_SOURCE_H__
+#define __RTP_SOURCE_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/rtp.h>
+#include <gst/net/gstnetaddressmeta.h>
+#include <gio/gio.h>
+
+#include "rtpstats.h"
+
+/* the default number of consecutive RTP packets we need to receive before the
+ * source is considered valid */
+#define RTP_NO_PROBATION 0
+#define RTP_DEFAULT_PROBATION 2
+
+#define RTP_SEQ_MOD (1 << 16)
+
+typedef struct _RTPSource RTPSource;
+typedef struct _RTPSourceClass RTPSourceClass;
+
+#define RTP_TYPE_SOURCE (rtp_source_get_type())
+#define RTP_SOURCE(src) (G_TYPE_CHECK_INSTANCE_CAST((src),RTP_TYPE_SOURCE,RTPSource))
+#define RTP_SOURCE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),RTP_TYPE_SOURCE,RTPSourceClass))
+#define RTP_IS_SOURCE(src) (G_TYPE_CHECK_INSTANCE_TYPE((src),RTP_TYPE_SOURCE))
+#define RTP_IS_SOURCE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),RTP_TYPE_SOURCE))
+#define RTP_SOURCE_CAST(src) ((RTPSource *)(src))
+
+/**
+ * RTP_SOURCE_IS_ACTIVE:
+ * @src: an #RTPSource
+ *
+ * Check if @src is active. A source is active when it has been validated
+ * and has not yet received a BYE packet.
+ */
+#define RTP_SOURCE_IS_ACTIVE(src) (src->validated && !src->marked_bye)
+
+/**
+ * RTP_SOURCE_IS_SENDER:
+ * @src: an #RTPSource
+ *
+ * Check if @src is a sender.
+ */
+#define RTP_SOURCE_IS_SENDER(src) (src->is_sender)
+/**
+ * RTP_SOURCE_IS_MARKED_BYE:
+ * @src: an #RTPSource
+ *
+ * Check if @src is a marked as BYE.
+ */
+#define RTP_SOURCE_IS_MARKED_BYE(src) (src->marked_bye)
+
+
+/**
+ * RTPSourcePushRTP:
+ * @src: an #RTPSource
+ * @data: the RTP buffer or buffer list ready for processing
+ * @user_data: user data specified when registering
+ *
+ * This callback will be called when @src has @buffer ready for further
+ * processing.
+ *
+ * Returns: a #GstFlowReturn.
+ */
+typedef GstFlowReturn (*RTPSourcePushRTP) (RTPSource *src, gpointer data,
+ gpointer user_data);
+
+/**
+ * RTPSourceClockRate:
+ * @src: an #RTPSource
+ * @payload: a payload type
+ * @user_data: user data specified when registering
+ *
+ * This callback will be called when @src needs the clock-rate of the
+ * @payload.
+ *
+ * Returns: a clock-rate for @payload.
+ */
+typedef gint (*RTPSourceClockRate) (RTPSource *src, guint8 payload, gpointer user_data);
+
+/**
+ * RTPSourceCallbacks:
+ * @push_rtp: a packet becomes available for handling
+ * @clock_rate: a clock-rate is requested
+ * @get_time: the current clock time is requested
+ *
+ * Callbacks performed by #RTPSource when actions need to be performed.
+ */
+typedef struct {
+ RTPSourcePushRTP push_rtp;
+ RTPSourceClockRate clock_rate;
+} RTPSourceCallbacks;
+
+/**
+ * RTPConflictingAddress:
+ * @address: #GSocketAddress which conflicted
+ * @last_conflict_time: time when the last conflict was seen
+ *
+ * This structure is used to account for addresses that have conflicted to find
+ * loops.
+ */
+typedef struct {
+ GSocketAddress *address;
+ GstClockTime time;
+} RTPConflictingAddress;
+
+/**
+ * RTPSource:
+ *
+ * A source in the #RTPSession
+ *
+ * @conflicting_addresses: GList of conflicting addresses
+ */
+struct _RTPSource {
+ GObject object;
+
+ /*< private >*/
+ guint32 ssrc;
+
+ guint16 generation;
+ GHashTable *reported_in_sr_of; /* set of SSRCs */
+
+ guint probation;
+ guint curr_probation;
+ gboolean validated;
+ gboolean internal;
+ gboolean is_csrc;
+ gboolean is_sender;
+ gboolean closing;
+
+ GstStructure *sdes;
+
+ gboolean marked_bye;
+ gchar *bye_reason;
+ gboolean sent_bye;
+
+ GSocketAddress *rtp_from;
+ GSocketAddress *rtcp_from;
+
+ gint payload;
+ GstCaps *caps;
+ gint clock_rate;
+ gint32 seqnum_offset;
+
+ GstClockTime bye_time;
+ GstClockTime last_activity;
+ GstClockTime last_rtp_activity;
+
+ GstClockTime last_rtime;
+ GstClockTime last_rtptime;
+
+ /* for bitrate estimation */
+ guint64 bitrate;
+ GstClockTime prev_rtime;
+ guint64 bytes_sent;
+ guint64 bytes_received;
+
+ GQueue *packets;
+ RTPPacketRateCtx packet_rate_ctx;
+ guint32 max_dropout_time;
+ guint32 max_misorder_time;
+
+ RTPSourceCallbacks callbacks;
+ gpointer user_data;
+
+ RTPSourceStats stats;
+ RTPReceiverReport last_rr;
+
+ GList *conflicting_addresses;
+
+ GQueue *retained_feedback;
+
+ gboolean send_pli;
+ gboolean send_fir;
+ guint8 current_send_fir_seqnum;
+ gint last_fir_count;
+ GstClockTime last_keyframe_request;
+
+ gboolean send_nack;
+ GArray *nacks;
+ GArray *nack_deadlines;
+
+ gboolean pt_set;
+ guint8 pt;
+
+ gboolean disable_rtcp;
+};
+
+struct _RTPSourceClass {
+ GObjectClass parent_class;
+};
+
+GType rtp_source_get_type (void);
+
+/* managing lifetime of sources */
+RTPSource* rtp_source_new (guint32 ssrc);
+void rtp_source_set_callbacks (RTPSource *src, RTPSourceCallbacks *cb, gpointer data);
+
+/* properties */
+guint32 rtp_source_get_ssrc (RTPSource *src);
+
+void rtp_source_set_as_csrc (RTPSource *src);
+gboolean rtp_source_is_as_csrc (RTPSource *src);
+
+gboolean rtp_source_is_active (RTPSource *src);
+gboolean rtp_source_is_validated (RTPSource *src);
+gboolean rtp_source_is_sender (RTPSource *src);
+
+void rtp_source_mark_bye (RTPSource *src, const gchar *reason);
+gboolean rtp_source_is_marked_bye (RTPSource *src);
+gchar * rtp_source_get_bye_reason (RTPSource *src);
+
+void rtp_source_update_caps (RTPSource *src, GstCaps *caps);
+
+/* SDES info */
+const GstStructure *
+ rtp_source_get_sdes_struct (RTPSource * src);
+gboolean rtp_source_set_sdes_struct (RTPSource * src, GstStructure *sdes);
+
+/* handling network address */
+void rtp_source_set_rtp_from (RTPSource *src, GSocketAddress *address);
+void rtp_source_set_rtcp_from (RTPSource *src, GSocketAddress *address);
+
+/* handling RTP */
+GstFlowReturn rtp_source_process_rtp (RTPSource *src, RTPPacketInfo *pinfo);
+
+GstFlowReturn rtp_source_send_rtp (RTPSource *src, RTPPacketInfo *pinfo);
+
+/* RTCP messages */
+void rtp_source_process_sr (RTPSource *src, GstClockTime time, guint64 ntptime,
+ guint32 rtptime, guint32 packet_count, guint32 octet_count);
+void rtp_source_process_rb (RTPSource *src, guint32 ssrc, guint64 ntpnstime, guint8 fractionlost,
+ gint32 packetslost, guint32 exthighestseq, guint32 jitter,
+ guint32 lsr, guint32 dlsr);
+
+gboolean rtp_source_get_new_sr (RTPSource *src, guint64 ntpnstime, GstClockTime running_time,
+ guint64 *ntptime, guint32 *rtptime, guint32 *packet_count,
+ guint32 *octet_count);
+gboolean rtp_source_get_new_rb (RTPSource *src, GstClockTime time, guint8 *fractionlost,
+ gint32 *packetslost, guint32 *exthighestseq, guint32 *jitter,
+ guint32 *lsr, guint32 *dlsr);
+
+gboolean rtp_source_get_last_sr (RTPSource *src, GstClockTime *time, guint64 *ntptime,
+ guint32 *rtptime, guint32 *packet_count,
+ guint32 *octet_count);
+gboolean rtp_source_get_last_rb (RTPSource *src, guint32 * ssrc, guint8 *fractionlost, gint32 *packetslost,
+ guint32 *exthighestseq, guint32 *jitter,
+ guint32 *lsr, guint32 *dlsr, guint32 *round_trip);
+
+void rtp_source_reset (RTPSource * src);
+
+gboolean rtp_source_find_conflicting_address (RTPSource * src,
+ GSocketAddress *address,
+ GstClockTime time);
+
+void rtp_source_add_conflicting_address (RTPSource * src,
+ GSocketAddress *address,
+ GstClockTime time);
+
+gboolean find_conflicting_address (GList * conflicting_address,
+ GSocketAddress * address,
+ GstClockTime time);
+
+GList * add_conflicting_address (GList * conflicting_addresses,
+ GSocketAddress * address,
+ GstClockTime time);
+GList * timeout_conflicting_addresses (GList * conflicting_addresses,
+ GstClockTime current_time);
+
+void rtp_conflicting_address_free (RTPConflictingAddress * addr);
+
+void rtp_source_timeout (RTPSource * src,
+ GstClockTime current_time,
+ GstClockTime running_time,
+ GstClockTime feedback_retention_window);
+
+void rtp_source_retain_rtcp_packet (RTPSource * src,
+ GstRTCPPacket *pkt,
+ GstClockTime running_time);
+gboolean rtp_source_has_retained (RTPSource * src,
+ GCompareFunc func,
+ gconstpointer data);
+
+void rtp_source_register_nack (RTPSource * src,
+ guint16 seqnum,
+ GstClockTime deadline);
+guint16 * rtp_source_get_nacks (RTPSource * src, guint *n_nacks);
+GstClockTime * rtp_source_get_nack_deadlines (RTPSource * src, guint *n_nacks);
+void rtp_source_clear_nacks (RTPSource * src, guint n_nacks);
+
+#endif /* __RTP_SOURCE_H__ */
diff --git a/gst/rtpmanager/rtpstats.c b/gst/rtpmanager/rtpstats.c
new file mode 100644
index 0000000000..0f35046f1e
--- /dev/null
+++ b/gst/rtpmanager/rtpstats.c
@@ -0,0 +1,680 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ * Copyright (C) 2015 Kurento (http://kurento.org/)
+ * @author: Miguel París <mparisdiaz@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#define GLIB_DISABLE_DEPRECATION_WARNINGS
+
+#include "rtpstats.h"
+#include "rtptwcc.h"
+
+void
+gst_rtp_packet_rate_ctx_reset (RTPPacketRateCtx * ctx, gint32 clock_rate)
+{
+ ctx->clock_rate = clock_rate;
+ ctx->probed = FALSE;
+ ctx->avg_packet_rate = -1;
+ ctx->last_ts = -1;
+}
+
+guint32
+gst_rtp_packet_rate_ctx_update (RTPPacketRateCtx * ctx, guint16 seqnum,
+ guint32 ts)
+{
+ guint64 new_ts, diff_ts;
+ gint diff_seqnum;
+ gint32 new_packet_rate;
+ gint32 base;
+
+ if (ctx->clock_rate <= 0) {
+ return ctx->avg_packet_rate;
+ }
+
+ new_ts = ctx->last_ts;
+ gst_rtp_buffer_ext_timestamp (&new_ts, ts);
+
+ if (!ctx->probed) {
+ ctx->probed = TRUE;
+ goto done_but_save;
+ }
+
+ diff_seqnum = gst_rtp_buffer_compare_seqnum (ctx->last_seqnum, seqnum);
+ /* Ignore seqnums that are over 15,000 away from the latest one, it's close
+ * to 2^14 but far enough to avoid any risk of computing error.
+ */
+ if (diff_seqnum > 15000)
+ goto done_but_save;
+
+ /* Ignore any packet that is in the past, we're only interested in newer
+ * packets to compute the packet rate.
+ */
+ if (diff_seqnum <= 0 || new_ts <= ctx->last_ts)
+ goto done;
+
+ diff_ts = new_ts - ctx->last_ts;
+ diff_ts = gst_util_uint64_scale_int (diff_ts, GST_SECOND, ctx->clock_rate);
+ new_packet_rate = gst_util_uint64_scale (diff_seqnum, GST_SECOND, diff_ts);
+
+ /* The goal is that higher packet rates "win".
+ * If there's a sudden burst, the average will go up fast,
+ * but it will go down again slowly.
+ * This is useful for bursty cases, where a lot of packets are close
+ * to each other and should allow a higher reorder/dropout there.
+ * Round up the new average.
+ * We do it on different rates depending on the packet rate, so it's not too
+ * jumpy.
+ */
+ if (ctx->avg_packet_rate > new_packet_rate)
+ base = MAX (ctx->avg_packet_rate / 3, 8); /* about 333 ms */
+ else
+ base = MAX (ctx->avg_packet_rate / 15, 2); /* about 66 ms */
+
+ diff_seqnum = MIN (diff_seqnum, base - 1);
+
+ ctx->avg_packet_rate = (((base - diff_seqnum) * ctx->avg_packet_rate) +
+ (new_packet_rate * diff_seqnum)) / base;
+
+
+done_but_save:
+
+ ctx->last_seqnum = seqnum;
+ ctx->last_ts = new_ts;
+done:
+
+ return ctx->avg_packet_rate;
+}
+
+guint32
+gst_rtp_packet_rate_ctx_get (RTPPacketRateCtx * ctx)
+{
+ return ctx->avg_packet_rate;
+}
+
+guint32
+gst_rtp_packet_rate_ctx_get_max_dropout (RTPPacketRateCtx * ctx, gint32 time_ms)
+{
+ if (time_ms <= 0 || !ctx->probed || ctx->avg_packet_rate == -1) {
+ return RTP_DEF_DROPOUT;
+ }
+
+ return MAX (RTP_MIN_DROPOUT, ctx->avg_packet_rate * time_ms / 1000);
+}
+
+guint32
+gst_rtp_packet_rate_ctx_get_max_misorder (RTPPacketRateCtx * ctx,
+ gint32 time_ms)
+{
+ if (time_ms <= 0 || !ctx->probed || ctx->avg_packet_rate == -1) {
+ return RTP_DEF_MISORDER;
+ }
+
+ return MAX (RTP_MIN_MISORDER, ctx->avg_packet_rate * time_ms / 1000);
+}
+
+/**
+ * rtp_stats_init_defaults:
+ * @stats: an #RTPSessionStats struct
+ *
+ * Initialize @stats with its default values.
+ */
+void
+rtp_stats_init_defaults (RTPSessionStats * stats)
+{
+ rtp_stats_set_bandwidths (stats, -1, -1, -1, -1);
+ stats->min_interval = RTP_STATS_MIN_INTERVAL;
+ stats->bye_timeout = RTP_STATS_BYE_TIMEOUT;
+ stats->nacks_dropped = 0;
+ stats->nacks_sent = 0;
+ stats->nacks_received = 0;
+}
+
+/**
+ * rtp_stats_set_bandwidths:
+ * @stats: an #RTPSessionStats struct
+ * @rtp_bw: RTP bandwidth
+ * @rtcp_bw: RTCP bandwidth
+ * @rs: sender RTCP bandwidth
+ * @rr: receiver RTCP bandwidth
+ *
+ * Configure the bandwidth parameters in the stats. When an input variable is
+ * set to -1, it will be calculated from the other input variables and from the
+ * defaults.
+ */
+void
+rtp_stats_set_bandwidths (RTPSessionStats * stats, guint rtp_bw,
+ gdouble rtcp_bw, guint rs, guint rr)
+{
+ GST_DEBUG ("recalc bandwidths: RTP %u, RTCP %f, RS %u, RR %u", rtp_bw,
+ rtcp_bw, rs, rr);
+
+ /* when given, sender and receive bandwidth add up to the total
+ * rtcp bandwidth */
+ if (rs != -1 && rr != -1)
+ rtcp_bw = rs + rr;
+
+ /* If rtcp_bw is between 0 and 1, it is a fraction of rtp_bw */
+ if (rtcp_bw > 0.0 && rtcp_bw < 1.0) {
+ if (rtp_bw > 0.0)
+ rtcp_bw = rtp_bw * rtcp_bw;
+ else
+ rtcp_bw = -1.0;
+ }
+
+ /* RTCP is 5% of the RTP bandwidth */
+ if (rtp_bw == -1 && rtcp_bw > 1.0)
+ rtp_bw = rtcp_bw * 20;
+ else if (rtp_bw != -1 && rtcp_bw < 0.0)
+ rtcp_bw = rtp_bw / 20;
+ else if (rtp_bw == -1 && rtcp_bw < 0.0) {
+ /* nothing given, take defaults */
+ rtp_bw = RTP_STATS_BANDWIDTH;
+ rtcp_bw = rtp_bw * RTP_STATS_RTCP_FRACTION;
+ }
+
+ stats->bandwidth = rtp_bw;
+ stats->rtcp_bandwidth = rtcp_bw;
+
+ /* now figure out the fractions */
+ if (rs == -1) {
+ /* rs unknown */
+ if (rr == -1) {
+ /* both not given, use defaults */
+ rs = stats->rtcp_bandwidth * RTP_STATS_SENDER_FRACTION;
+ rr = stats->rtcp_bandwidth * RTP_STATS_RECEIVER_FRACTION;
+ } else {
+ /* rr known, calculate rs */
+ if (stats->rtcp_bandwidth > rr)
+ rs = stats->rtcp_bandwidth - rr;
+ else
+ rs = 0;
+ }
+ } else if (rr == -1) {
+ /* rs known, calculate rr */
+ if (stats->rtcp_bandwidth > rs)
+ rr = stats->rtcp_bandwidth - rs;
+ else
+ rr = 0;
+ }
+
+ if (stats->rtcp_bandwidth > 0) {
+ stats->sender_fraction = ((gdouble) rs) / ((gdouble) stats->rtcp_bandwidth);
+ stats->receiver_fraction = 1.0 - stats->sender_fraction;
+ } else {
+ /* no RTCP bandwidth, set dummy values */
+ stats->sender_fraction = 0.0;
+ stats->receiver_fraction = 0.0;
+ }
+ GST_DEBUG ("bandwidths: RTP %u, RTCP %u, RS %f, RR %f", stats->bandwidth,
+ stats->rtcp_bandwidth, stats->sender_fraction, stats->receiver_fraction);
+}
+
+/**
+ * rtp_stats_calculate_rtcp_interval:
+ * @stats: an #RTPSessionStats struct
+ * @sender: if we are a sender
+ * @profile: RTP profile of this session
+ * @ptp: if this session is a point-to-point session
+ * @first: if this is the first time
+ *
+ * Calculate the RTCP interval. The result of this function is the amount of
+ * time to wait (in nanoseconds) before sending a new RTCP message.
+ *
+ * Returns: the RTCP interval.
+ */
+GstClockTime
+rtp_stats_calculate_rtcp_interval (RTPSessionStats * stats, gboolean we_send,
+ GstRTPProfile profile, gboolean ptp, gboolean first)
+{
+ gdouble members, senders, n;
+ gdouble avg_rtcp_size, rtcp_bw;
+ gdouble interval;
+ gdouble rtcp_min_time;
+
+ if (profile == GST_RTP_PROFILE_AVPF || profile == GST_RTP_PROFILE_SAVPF) {
+ /* RFC 4585 3.4d), 3.5.1 */
+
+ if (first && !ptp)
+ rtcp_min_time = 1.0;
+ else
+ rtcp_min_time = 0.0;
+ } else {
+ /* Very first call at application start-up uses half the min
+ * delay for quicker notification while still allowing some time
+ * before reporting for randomization and to learn about other
+ * sources so the report interval will converge to the correct
+ * interval more quickly.
+ */
+ rtcp_min_time = stats->min_interval;
+ if (first)
+ rtcp_min_time /= 2.0;
+ }
+
+ /* Dedicate a fraction of the RTCP bandwidth to senders unless
+ * the number of senders is large enough that their share is
+ * more than that fraction.
+ */
+ n = members = stats->active_sources;
+ senders = (gdouble) stats->sender_sources;
+ rtcp_bw = stats->rtcp_bandwidth;
+
+ if (senders <= members * stats->sender_fraction) {
+ if (we_send) {
+ rtcp_bw *= stats->sender_fraction;
+ n = senders;
+ } else {
+ rtcp_bw *= stats->receiver_fraction;
+ n -= senders;
+ }
+ }
+
+ /* no bandwidth for RTCP, return NONE to signal that we don't want to send
+ * RTCP packets */
+ if (rtcp_bw <= 0.0001)
+ return GST_CLOCK_TIME_NONE;
+
+ avg_rtcp_size = 8.0 * stats->avg_rtcp_packet_size;
+ /*
+ * The effective number of sites times the average packet size is
+ * the total number of octets sent when each site sends a report.
+ * Dividing this by the effective bandwidth gives the time
+ * interval over which those packets must be sent in order to
+ * meet the bandwidth target, with a minimum enforced. In that
+ * time interval we send one report so this time is also our
+ * average time between reports.
+ */
+ GST_DEBUG ("avg size %f, n %f, rtcp_bw %f", avg_rtcp_size, n, rtcp_bw);
+ interval = avg_rtcp_size * n / rtcp_bw;
+ if (interval < rtcp_min_time)
+ interval = rtcp_min_time;
+
+ return interval * GST_SECOND;
+}
+
+/**
+ * rtp_stats_add_rtcp_jitter:
+ * @stats: an #RTPSessionStats struct
+ * @interval: an RTCP interval
+ *
+ * Apply a random jitter to the @interval. @interval is typically obtained with
+ * rtp_stats_calculate_rtcp_interval().
+ *
+ * Returns: the new RTCP interval.
+ */
+GstClockTime
+rtp_stats_add_rtcp_jitter (RTPSessionStats * stats, GstClockTime interval)
+{
+ gdouble temp;
+
+ /* see RFC 3550 p 30
+ * To compensate for "unconditional reconsideration" converging to a
+ * value below the intended average.
+ */
+#define COMPENSATION (2.71828 - 1.5);
+
+ temp = (interval * g_random_double_range (0.5, 1.5)) / COMPENSATION;
+
+ return (GstClockTime) temp;
+}
+
+
+/**
+ * rtp_stats_calculate_bye_interval:
+ * @stats: an #RTPSessionStats struct
+ *
+ * Calculate the BYE interval. The result of this function is the amount of
+ * time to wait (in nanoseconds) before sending a BYE message.
+ *
+ * Returns: the BYE interval.
+ */
+GstClockTime
+rtp_stats_calculate_bye_interval (RTPSessionStats * stats)
+{
+ gdouble members;
+ gdouble avg_rtcp_size, rtcp_bw;
+ gdouble interval;
+ gdouble rtcp_min_time;
+
+ /* no interval when we have less than 50 members */
+ if (stats->active_sources < 50)
+ return 0;
+
+ rtcp_min_time = (stats->min_interval) / 2.0;
+
+ /* Dedicate a fraction of the RTCP bandwidth to senders unless
+ * the number of senders is large enough that their share is
+ * more than that fraction.
+ */
+ members = stats->bye_members;
+ rtcp_bw = stats->rtcp_bandwidth * stats->receiver_fraction;
+
+ /* no bandwidth for RTCP, return NONE to signal that we don't want to send
+ * RTCP packets */
+ if (rtcp_bw <= 0.0001)
+ return GST_CLOCK_TIME_NONE;
+
+ avg_rtcp_size = 8.0 * stats->avg_rtcp_packet_size;
+ /*
+ * The effective number of sites times the average packet size is
+ * the total number of octets sent when each site sends a report.
+ * Dividing this by the effective bandwidth gives the time
+ * interval over which those packets must be sent in order to
+ * meet the bandwidth target, with a minimum enforced. In that
+ * time interval we send one report so this time is also our
+ * average time between reports.
+ */
+ interval = avg_rtcp_size * members / rtcp_bw;
+ if (interval < rtcp_min_time)
+ interval = rtcp_min_time;
+
+ return interval * GST_SECOND;
+}
+
+/**
+ * rtp_stats_get_packets_lost:
+ * @stats: an #RTPSourceStats struct
+ *
+ * Calculate the total number of RTP packets lost since beginning of
+ * reception. Packets that arrive late are not considered lost, and
+ * duplicates are not taken into account. Hence, the loss may be negative
+ * if there are duplicates.
+ *
+ * Returns: total RTP packets lost.
+ */
+gint64
+rtp_stats_get_packets_lost (const RTPSourceStats * stats)
+{
+ gint64 lost;
+ guint64 extended_max, expected;
+
+ extended_max = stats->cycles + stats->max_seq;
+ expected = extended_max - stats->base_seq + 1;
+ lost = expected - stats->packets_received;
+
+ return lost;
+}
+
+void
+rtp_stats_set_min_interval (RTPSessionStats * stats, gdouble min_interval)
+{
+ stats->min_interval = min_interval;
+}
+
+gboolean
+__g_socket_address_equal (GSocketAddress * a, GSocketAddress * b)
+{
+ GInetSocketAddress *ia, *ib;
+ GInetAddress *iaa, *iab;
+
+ ia = G_INET_SOCKET_ADDRESS (a);
+ ib = G_INET_SOCKET_ADDRESS (b);
+
+ if (g_inet_socket_address_get_port (ia) !=
+ g_inet_socket_address_get_port (ib))
+ return FALSE;
+
+ iaa = g_inet_socket_address_get_address (ia);
+ iab = g_inet_socket_address_get_address (ib);
+
+ return g_inet_address_equal (iaa, iab);
+}
+
+gchar *
+__g_socket_address_to_string (GSocketAddress * addr)
+{
+ GInetSocketAddress *ia;
+ gchar *ret, *tmp;
+
+ ia = G_INET_SOCKET_ADDRESS (addr);
+
+ tmp = g_inet_address_to_string (g_inet_socket_address_get_address (ia));
+ ret = g_strdup_printf ("%s:%u", tmp, g_inet_socket_address_get_port (ia));
+ g_free (tmp);
+
+ return ret;
+}
+
+static void
+_append_structure_to_value_array (GValueArray * array, GstStructure * s)
+{
+ GValue *val;
+ g_value_array_append (array, NULL);
+ val = g_value_array_get_nth (array, array->n_values - 1);
+ g_value_init (val, GST_TYPE_STRUCTURE);
+ g_value_take_boxed (val, s);
+}
+
+static void
+_structure_take_value_array (GstStructure * s,
+ const gchar * field_name, GValueArray * array)
+{
+ GValue value = G_VALUE_INIT;
+ g_value_init (&value, G_TYPE_VALUE_ARRAY);
+ g_value_take_boxed (&value, array);
+ gst_structure_take_value (s, field_name, &value);
+ g_value_unset (&value);
+}
+
+GstStructure *
+rtp_twcc_stats_get_packets_structure (GArray * twcc_packets)
+{
+ GstStructure *ret = gst_structure_new_empty ("RTPTWCCPackets");
+ GValueArray *array = g_value_array_new (0);
+ guint i;
+
+ for (i = 0; i < twcc_packets->len; i++) {
+ RTPTWCCPacket *pkt = &g_array_index (twcc_packets, RTPTWCCPacket, i);
+
+ GstStructure *pkt_s = gst_structure_new ("RTPTWCCPacket",
+ "seqnum", G_TYPE_UINT, pkt->seqnum,
+ "local-ts", G_TYPE_UINT64, pkt->local_ts,
+ "remote-ts", G_TYPE_UINT64, pkt->remote_ts,
+ "payload-type", G_TYPE_UCHAR, pkt->pt,
+ "size", G_TYPE_UINT, pkt->size,
+ "lost", G_TYPE_BOOLEAN, pkt->status == RTP_TWCC_PACKET_STATUS_NOT_RECV,
+ NULL);
+ _append_structure_to_value_array (array, pkt_s);
+ }
+
+ _structure_take_value_array (ret, "packets", array);
+ return ret;
+}
+
+static void
+rtp_twcc_stats_calculate_stats (RTPTWCCStats * stats, GArray * twcc_packets)
+{
+ guint packets_recv = 0;
+ guint i;
+
+ for (i = 0; i < twcc_packets->len; i++) {
+ RTPTWCCPacket *pkt = &g_array_index (twcc_packets, RTPTWCCPacket, i);
+
+ if (pkt->status != RTP_TWCC_PACKET_STATUS_NOT_RECV)
+ packets_recv++;
+
+ if (GST_CLOCK_TIME_IS_VALID (pkt->local_ts) &&
+ GST_CLOCK_TIME_IS_VALID (stats->last_local_ts)) {
+ pkt->local_delta = GST_CLOCK_DIFF (stats->last_local_ts, pkt->local_ts);
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (pkt->remote_ts) &&
+ GST_CLOCK_TIME_IS_VALID (stats->last_remote_ts)) {
+ pkt->remote_delta =
+ GST_CLOCK_DIFF (stats->last_remote_ts, pkt->remote_ts);
+ }
+
+ if (GST_CLOCK_STIME_IS_VALID (pkt->local_delta) &&
+ GST_CLOCK_STIME_IS_VALID (pkt->remote_delta)) {
+ pkt->delta_delta = pkt->remote_delta - pkt->local_delta;
+ }
+
+ stats->last_local_ts = pkt->local_ts;
+ stats->last_remote_ts = pkt->remote_ts;
+ }
+
+ stats->packets_sent = twcc_packets->len;
+ stats->packets_recv = packets_recv;
+}
+
+static gint
+_get_window_start_index (RTPTWCCStats * stats, GstClockTime duration,
+ GstClockTime * local_duration, GstClockTime * remote_duration)
+{
+ RTPTWCCPacket *last = NULL;
+ guint i;
+
+ if (stats->packets->len < 2)
+ return -1;
+
+ for (i = 0; i < stats->packets->len; i++) {
+ guint start_index = stats->packets->len - 1 - i;
+ RTPTWCCPacket *pkt =
+ &g_array_index (stats->packets, RTPTWCCPacket, start_index);
+ if (GST_CLOCK_TIME_IS_VALID (pkt->local_ts)
+ && GST_CLOCK_TIME_IS_VALID (pkt->remote_ts)) {
+ /* first find the last valid packet */
+ if (last == NULL) {
+ last = pkt;
+ } else {
+ /* and then get the duration in local ts */
+ GstClockTimeDiff ld = GST_CLOCK_DIFF (pkt->local_ts, last->local_ts);
+ if (ld >= duration) {
+ *local_duration = ld;
+ *remote_duration = GST_CLOCK_DIFF (pkt->remote_ts, last->remote_ts);
+ return start_index;
+ }
+ }
+ }
+ }
+
+ return -1;
+}
+
+static void
+rtp_twcc_stats_calculate_windowed_stats (RTPTWCCStats * stats)
+{
+ guint i;
+ gint start_idx;
+ guint bits_sent = 0;
+ guint bits_recv = 0;
+ guint packets_sent = 0;
+ guint packets_recv = 0;
+ guint packets_lost;
+ GstClockTimeDiff delta_delta_sum = 0;
+ guint delta_delta_count = 0;
+ GstClockTime local_duration;
+ GstClockTime remote_duration;
+
+ start_idx = _get_window_start_index (stats, stats->window_size,
+ &local_duration, &remote_duration);
+ if (start_idx == -1) {
+ return;
+ }
+
+ /* remove the old packets */
+ if (start_idx > 0)
+ g_array_remove_range (stats->packets, 0, start_idx);
+
+ packets_sent = stats->packets->len - 1;
+
+ for (i = 0; i < packets_sent; i++) {
+ RTPTWCCPacket *pkt = &g_array_index (stats->packets, RTPTWCCPacket, i);
+
+ if (GST_CLOCK_TIME_IS_VALID (pkt->local_ts)) {
+ bits_sent += pkt->size * 8;
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (pkt->remote_ts)) {
+ bits_recv += pkt->size * 8;
+ packets_recv++;
+ }
+
+ if (GST_CLOCK_STIME_IS_VALID (pkt->delta_delta)) {
+ delta_delta_sum += pkt->delta_delta;
+ delta_delta_count++;
+ }
+ }
+
+ packets_lost = packets_sent - packets_recv;
+ stats->packet_loss_pct = (packets_lost * 100) / (gfloat) packets_sent;
+
+ if (delta_delta_count) {
+ GstClockTimeDiff avg_delta_of_delta = delta_delta_sum / delta_delta_count;
+ if (GST_CLOCK_STIME_IS_VALID (stats->avg_delta_of_delta)) {
+ stats->avg_delta_of_delta_change =
+ (avg_delta_of_delta -
+ stats->avg_delta_of_delta) / (250 * GST_USECOND);
+ }
+ stats->avg_delta_of_delta = avg_delta_of_delta;
+ }
+
+ if (local_duration > 0)
+ stats->bitrate_sent =
+ gst_util_uint64_scale (bits_sent, GST_SECOND, local_duration);
+ if (remote_duration > 0)
+ stats->bitrate_recv =
+ gst_util_uint64_scale (bits_recv, GST_SECOND, remote_duration);
+
+ GST_DEBUG ("Got stats: bits_sent: %u, bits_recv: %u, packets_sent = %u, "
+ "packets_recv: %u, packetlost_pct = %f, sent_bitrate = %u, "
+ "recv_bitrate = %u, delta-delta-avg = %" GST_STIME_FORMAT ", "
+ "delta-delta-change: %f", bits_sent, bits_recv, stats->packets_sent,
+ packets_recv, stats->packet_loss_pct, stats->bitrate_sent,
+ stats->bitrate_recv, GST_STIME_ARGS (stats->avg_delta_of_delta),
+ stats->avg_delta_of_delta_change);
+}
+
+RTPTWCCStats *
+rtp_twcc_stats_new (void)
+{
+ RTPTWCCStats *stats = g_new0 (RTPTWCCStats, 1);
+ stats->packets = g_array_new (FALSE, FALSE, sizeof (RTPTWCCPacket));
+ stats->last_local_ts = GST_CLOCK_TIME_NONE;
+ stats->last_remote_ts = GST_CLOCK_TIME_NONE;
+ stats->avg_delta_of_delta = GST_CLOCK_STIME_NONE;
+ stats->window_size = 300 * GST_MSECOND; /* FIXME: could be configurable? */
+ return stats;
+}
+
+void
+rtp_twcc_stats_free (RTPTWCCStats * stats)
+{
+ g_array_unref (stats->packets);
+ g_free (stats);
+}
+
+static GstStructure *
+rtp_twcc_stats_get_stats_structure (RTPTWCCStats * stats)
+{
+ return gst_structure_new ("RTPTWCCStats",
+ "bitrate-sent", G_TYPE_UINT, stats->bitrate_sent,
+ "bitrate-recv", G_TYPE_UINT, stats->bitrate_recv,
+ "packets-sent", G_TYPE_UINT, stats->packets_sent,
+ "packets-recv", G_TYPE_UINT, stats->packets_recv,
+ "packet-loss-pct", G_TYPE_DOUBLE, stats->packet_loss_pct,
+ "avg-delta-of-delta", G_TYPE_INT64, stats->avg_delta_of_delta, NULL);
+}
+
+GstStructure *
+rtp_twcc_stats_process_packets (RTPTWCCStats * stats, GArray * twcc_packets)
+{
+ rtp_twcc_stats_calculate_stats (stats, twcc_packets);
+ g_array_append_vals (stats->packets, twcc_packets->data, twcc_packets->len);
+ rtp_twcc_stats_calculate_windowed_stats (stats);
+ return rtp_twcc_stats_get_stats_structure (stats);
+}
diff --git a/gst/rtpmanager/rtpstats.h b/gst/rtpmanager/rtpstats.h
new file mode 100644
index 0000000000..4b08e1629b
--- /dev/null
+++ b/gst/rtpmanager/rtpstats.h
@@ -0,0 +1,303 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ * Copyright (C) 2015 Kurento (http://kurento.org/)
+ * @author: Miguel París <mparisdiaz@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __RTP_STATS_H__
+#define __RTP_STATS_H__
+
+#include <gst/gst.h>
+#include <gst/net/gstnetaddressmeta.h>
+#include <gst/rtp/rtp.h>
+#include <gio/gio.h>
+
+/* UDP/IP is assumed for bandwidth calculation */
+#define UDP_IP_HEADER_OVERHEAD 28
+
+/**
+ * RTPSenderReport:
+ *
+ * A sender report structure.
+ */
+typedef struct {
+ gboolean is_valid;
+ guint64 ntptime;
+ guint32 rtptime;
+ guint32 packet_count;
+ guint32 octet_count;
+ GstClockTime time;
+} RTPSenderReport;
+
+/**
+ * RTPReceiverReport:
+ *
+ * A receiver report structure.
+ */
+typedef struct {
+ gboolean is_valid;
+ guint32 ssrc; /* which source is the report about */
+ guint8 fractionlost;
+ guint32 packetslost;
+ guint32 exthighestseq;
+ guint32 jitter;
+ guint32 lsr;
+ guint32 dlsr;
+ guint32 round_trip;
+} RTPReceiverReport;
+
+/**
+ * RTPPacketInfo:
+ * @send: if this is a packet for sending
+ * @rtp: if this info is about an RTP packet
+ * @is_list: if this is a bufferlist
+ * @data: a #GstBuffer or #GstBufferList
+ * @address: address of the sender of the packet
+ * @current_time: current time according to the system clock
+ * @running_time: time of a packet as buffer running_time
+ * @arrival_time: time of arrival of a packet
+ * @ntpnstime: time of a packet NTP time in nanoseconds
+ * @header_len: number of overhead bytes per packet
+ * @bytes: bytes of the packet including lowlevel overhead
+ * @payload_len: bytes of the RTP payload
+ * @seqnum: the seqnum of the packet
+ * @pt: the payload type of the packet
+ * @rtptime: the RTP time of the packet
+ * @marker: the marker bit
+ *
+ * @tw_seqnum_ext_id: the extension-header ID for transport-wide seqnums
+ * @tw_seqnum: the transport-wide seqnum of the packet
+ *
+ * Structure holding information about the packet.
+ */
+typedef struct {
+ gboolean send;
+ gboolean rtp;
+ gboolean is_list;
+ gpointer data;
+ GSocketAddress *address;
+ GstClockTime current_time;
+ GstClockTime running_time;
+ GstClockTime arrival_time;
+ guint64 ntpnstime;
+ guint header_len;
+ guint bytes;
+ guint packets;
+ guint payload_len;
+ guint32 ssrc;
+ guint16 seqnum;
+ guint8 pt;
+ guint32 rtptime;
+ gboolean marker;
+ guint32 csrc_count;
+ guint32 csrcs[16];
+ GBytes *header_ext;
+ guint16 header_ext_bit_pattern;
+} RTPPacketInfo;
+
+/**
+ * RTPSourceStats:
+ * @packets_received: number of received packets in total
+ * @prev_received: number of packets received in previous reporting
+ * interval
+ * @octets_received: number of payload bytes received
+ * @bytes_received: number of total bytes received including headers and lower
+ * protocol level overhead
+ * @max_seqnr: highest sequence number received
+ * @transit: previous transit time used for calculating @jitter
+ * @jitter: current jitter (in clock rate units scaled by 16 for precision)
+ * @prev_rtptime: previous time when an RTP packet was received
+ * @prev_rtcptime: previous time when an RTCP packet was received
+ * @last_rtptime: time when last RTP packet received
+ * @last_rtcptime: time when last RTCP packet received
+ * @curr_rr: index of current @rr block
+ * @rr: previous and current receiver report block
+ * @curr_sr: index of current @sr block
+ * @sr: previous and current sender report block
+ *
+ * Stats about a source.
+ */
+typedef struct {
+ guint64 packets_received;
+ guint64 octets_received;
+ guint64 bytes_received;
+
+ guint32 prev_expected;
+ guint32 prev_received;
+
+ guint16 max_seq;
+ guint64 cycles;
+ guint32 base_seq;
+ guint32 bad_seq;
+ guint32 transit;
+ guint32 jitter;
+
+ guint64 packets_sent;
+ guint64 octets_sent;
+
+ guint sent_pli_count;
+ guint recv_pli_count;
+ guint sent_fir_count;
+ guint recv_fir_count;
+ guint sent_nack_count;
+ guint recv_nack_count;
+
+ /* when we received stuff */
+ GstClockTime prev_rtptime;
+ GstClockTime prev_rtcptime;
+ GstClockTime last_rtptime;
+ GstClockTime last_rtcptime;
+
+ /* sender and receiver reports */
+ gint curr_rr;
+ RTPReceiverReport rr[2];
+ gint curr_sr;
+ RTPSenderReport sr[2];
+} RTPSourceStats;
+
+#define RTP_STATS_BANDWIDTH 64000
+#define RTP_STATS_RTCP_FRACTION 0.05
+/*
+ * Minimum average time between RTCP packets from this site (in
+ * seconds). This time prevents the reports from `clumping' when
+ * sessions are small and the law of large numbers isn't helping
+ * to smooth out the traffic. It also keeps the report interval
+ * from becoming ridiculously small during transient outages like
+ * a network partition.
+ */
+#define RTP_STATS_MIN_INTERVAL 5.0
+/*
+ * Fraction of the RTCP bandwidth to be shared among active
+ * senders. (This fraction was chosen so that in a typical
+ * session with one or two active senders, the computed report
+ * time would be roughly equal to the minimum report time so that
+ * we don't unnecessarily slow down receiver reports.) The
+ * receiver fraction must be 1 - the sender fraction.
+ */
+#define RTP_STATS_SENDER_FRACTION (0.25)
+#define RTP_STATS_RECEIVER_FRACTION (1.0 - RTP_STATS_SENDER_FRACTION)
+
+/*
+ * When receiving a BYE from a source, remove the source from the database
+ * after this timeout.
+ */
+#define RTP_STATS_BYE_TIMEOUT (2 * GST_SECOND)
+
+/*
+ * The default and minimum values of the maximum number of missing packets we tolerate.
+ * These are packets with asequence number bigger than the last seen packet.
+ */
+#define RTP_DEF_DROPOUT 3000
+#define RTP_MIN_DROPOUT 30
+
+/*
+ * The default and minimum values of the maximum number of misordered packets we tolerate.
+ * These are packets with a sequence number smaller than the last seen packet.
+ */
+#define RTP_DEF_MISORDER 100
+#define RTP_MIN_MISORDER 10
+
+/**
+ * RTPPacketRateCtx:
+ *
+ * Context to calculate the pseudo-average packet rate.
+ */
+typedef struct {
+ gboolean probed;
+ gint32 clock_rate;
+ guint16 last_seqnum;
+ guint64 last_ts;
+ guint32 avg_packet_rate;
+} RTPPacketRateCtx;
+
+void gst_rtp_packet_rate_ctx_reset (RTPPacketRateCtx * ctx, gint32 clock_rate);
+guint32 gst_rtp_packet_rate_ctx_update (RTPPacketRateCtx *ctx, guint16 seqnum, guint32 ts);
+guint32 gst_rtp_packet_rate_ctx_get (RTPPacketRateCtx *ctx);
+guint32 gst_rtp_packet_rate_ctx_get_max_dropout (RTPPacketRateCtx *ctx, gint32 time_ms);
+guint32 gst_rtp_packet_rate_ctx_get_max_misorder (RTPPacketRateCtx *ctx, gint32 time_ms);
+
+/**
+ * RTPSessionStats:
+ *
+ * Stats kept for a session and used to produce RTCP packet timeouts.
+ */
+typedef struct {
+ guint bandwidth;
+ guint rtcp_bandwidth;
+ gdouble sender_fraction;
+ gdouble receiver_fraction;
+ gdouble min_interval;
+ GstClockTime bye_timeout;
+ guint internal_sources;
+ guint sender_sources;
+ guint internal_sender_sources;
+ guint active_sources;
+ guint avg_rtcp_packet_size;
+ guint bye_members;
+ guint nacks_dropped;
+ guint nacks_sent;
+ guint nacks_received;
+} RTPSessionStats;
+
+/**
+ * RTPTWCCStats:
+ *
+ * Stats kept for a session and used to produce TWCC stats.
+ */
+typedef struct {
+ GArray *packets;
+ GstClockTime window_size;
+ GstClockTime last_local_ts;
+ GstClockTime last_remote_ts;
+
+ guint bitrate_sent;
+ guint bitrate_recv;
+ guint packets_sent;
+ guint packets_recv;
+ gfloat packet_loss_pct;
+ GstClockTimeDiff avg_delta_of_delta;
+ gfloat avg_delta_of_delta_change;
+} RTPTWCCStats;
+
+
+void rtp_stats_init_defaults (RTPSessionStats *stats);
+
+void rtp_stats_set_bandwidths (RTPSessionStats *stats,
+ guint rtp_bw,
+ gdouble rtcp_bw,
+ guint rs, guint rr);
+
+GstClockTime rtp_stats_calculate_rtcp_interval (RTPSessionStats *stats, gboolean sender, GstRTPProfile profile, gboolean ptp, gboolean first);
+GstClockTime rtp_stats_add_rtcp_jitter (RTPSessionStats *stats, GstClockTime interval);
+GstClockTime rtp_stats_calculate_bye_interval (RTPSessionStats *stats);
+gint64 rtp_stats_get_packets_lost (const RTPSourceStats *stats);
+
+void rtp_stats_set_min_interval (RTPSessionStats *stats,
+ gdouble min_interval);
+
+
+gboolean __g_socket_address_equal (GSocketAddress *a, GSocketAddress *b);
+gchar * __g_socket_address_to_string (GSocketAddress * addr);
+
+RTPTWCCStats * rtp_twcc_stats_new (void);
+void rtp_twcc_stats_free (RTPTWCCStats * stats);
+GstStructure * rtp_twcc_stats_process_packets (RTPTWCCStats * stats,
+ GArray * twcc_packets);
+GstStructure * rtp_twcc_stats_get_packets_structure (GArray * twcc_packets);
+
+#endif /* __RTP_STATS_H__ */
diff --git a/gst/rtpmanager/rtptimerqueue.c b/gst/rtpmanager/rtptimerqueue.c
new file mode 100644
index 0000000000..7b1365753d
--- /dev/null
+++ b/gst/rtpmanager/rtptimerqueue.c
@@ -0,0 +1,742 @@
+/* GStreamer RTP Manager
+ *
+ * Copyright (C) 2019 Net Insight AB
+ * Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#include <string.h>
+
+#include <gst/rtp/gstrtpbuffer.h>
+
+#include "rtptimerqueue.h"
+
+struct _RtpTimerQueue
+{
+ GObject parent;
+
+ GQueue timers;
+ GHashTable *hashtable;
+};
+
+G_DEFINE_TYPE (RtpTimerQueue, rtp_timer_queue, G_TYPE_OBJECT);
+
+/* some timer private helpers */
+
+static RtpTimer *
+rtp_timer_new (void)
+{
+ return g_slice_new0 (RtpTimer);
+}
+
+static inline void
+rtp_timer_set_next (RtpTimer * timer, RtpTimer * next)
+{
+ GList *list = (GList *) timer;
+ list->next = (GList *) next;
+}
+
+static inline void
+rtp_timer_set_prev (RtpTimer * timer, RtpTimer * prev)
+{
+ GList *list = (GList *) timer;
+ list->prev = (GList *) prev;
+}
+
+static inline gboolean
+rtp_timer_is_later (RtpTimer * timer, RtpTimer * next)
+{
+ if (next == NULL)
+ return FALSE;
+
+ if (GST_CLOCK_TIME_IS_VALID (next->timeout)) {
+ if (!GST_CLOCK_TIME_IS_VALID (timer->timeout))
+ return FALSE;
+
+ if (timer->timeout > next->timeout)
+ return TRUE;
+ }
+
+ if (timer->timeout == next->timeout &&
+ gst_rtp_buffer_compare_seqnum (timer->seqnum, next->seqnum) < 0)
+ return TRUE;
+
+ return FALSE;
+}
+
+static inline gboolean
+rtp_timer_is_sooner (RtpTimer * timer, RtpTimer * prev)
+{
+ if (prev == NULL)
+ return FALSE;
+
+ if (GST_CLOCK_TIME_IS_VALID (prev->timeout)) {
+ if (!GST_CLOCK_TIME_IS_VALID (timer->timeout))
+ return TRUE;
+
+ if (timer->timeout < prev->timeout)
+ return TRUE;
+ }
+
+ if (timer->timeout == prev->timeout &&
+ gst_rtp_buffer_compare_seqnum (timer->seqnum, prev->seqnum) > 0)
+ return TRUE;
+
+ return FALSE;
+}
+
+static inline gboolean
+rtp_timer_is_closer_to_head (RtpTimer * timer, RtpTimer * head)
+{
+ RtpTimer *prev = rtp_timer_get_prev (timer);
+ GstClockTimeDiff prev_delta = 0;
+ GstClockTimeDiff head_delta = 0;
+
+ if (prev == NULL)
+ return FALSE;
+
+ if (rtp_timer_is_sooner (timer, head))
+ return TRUE;
+
+ if (rtp_timer_is_later (timer, prev))
+ return FALSE;
+
+ if (prev->timeout == head->timeout) {
+ gint prev_gap, head_gap;
+
+ prev_gap = gst_rtp_buffer_compare_seqnum (timer->seqnum, prev->seqnum);
+ head_gap = gst_rtp_buffer_compare_seqnum (head->seqnum, timer->seqnum);
+
+ if (head_gap < prev_gap)
+ return TRUE;
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (timer->timeout) &&
+ GST_CLOCK_TIME_IS_VALID (head->timeout)) {
+ prev_delta = GST_CLOCK_DIFF (timer->timeout, prev->timeout);
+ head_delta = GST_CLOCK_DIFF (head->timeout, timer->timeout);
+
+ if (head_delta < prev_delta)
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+static inline gboolean
+rtp_timer_is_closer_to_tail (RtpTimer * timer, RtpTimer * tail)
+{
+ RtpTimer *next = rtp_timer_get_next (timer);
+ GstClockTimeDiff tail_delta = 0;
+ GstClockTimeDiff next_delta = 0;
+
+ if (next == NULL)
+ return FALSE;
+
+ if (rtp_timer_is_later (timer, tail))
+ return TRUE;
+
+ if (rtp_timer_is_sooner (timer, next))
+ return FALSE;
+
+ if (tail->timeout == next->timeout) {
+ gint tail_gap, next_gap;
+
+ tail_gap = gst_rtp_buffer_compare_seqnum (timer->seqnum, tail->seqnum);
+ next_gap = gst_rtp_buffer_compare_seqnum (next->seqnum, timer->seqnum);
+
+ if (tail_gap < next_gap)
+ return TRUE;
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (timer->timeout) &&
+ GST_CLOCK_TIME_IS_VALID (next->timeout)) {
+ tail_delta = GST_CLOCK_DIFF (timer->timeout, tail->timeout);
+ next_delta = GST_CLOCK_DIFF (next->timeout, timer->timeout);
+
+ if (tail_delta < next_delta)
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+static inline RtpTimer *
+rtp_timer_queue_get_tail (RtpTimerQueue * queue)
+{
+ return (RtpTimer *) queue->timers.tail;
+}
+
+static inline void
+rtp_timer_queue_set_tail (RtpTimerQueue * queue, RtpTimer * timer)
+{
+ queue->timers.tail = (GList *) timer;
+ g_assert (queue->timers.tail->next == NULL);
+}
+
+static inline RtpTimer *
+rtp_timer_queue_get_head (RtpTimerQueue * queue)
+{
+ return (RtpTimer *) queue->timers.head;
+}
+
+static inline void
+rtp_timer_queue_set_head (RtpTimerQueue * queue, RtpTimer * timer)
+{
+ queue->timers.head = (GList *) timer;
+ g_assert (queue->timers.head->prev == NULL);
+}
+
+static void
+rtp_timer_queue_insert_before (RtpTimerQueue * queue, RtpTimer * sibling,
+ RtpTimer * timer)
+{
+ if (sibling == rtp_timer_queue_get_head (queue)) {
+ rtp_timer_queue_set_head (queue, timer);
+ } else {
+ rtp_timer_set_prev (timer, rtp_timer_get_prev (sibling));
+ rtp_timer_set_next (rtp_timer_get_prev (sibling), timer);
+ }
+
+ rtp_timer_set_next (timer, sibling);
+ rtp_timer_set_prev (sibling, timer);
+
+ queue->timers.length++;
+}
+
+static void
+rtp_timer_queue_insert_after (RtpTimerQueue * queue, RtpTimer * sibling,
+ RtpTimer * timer)
+{
+ if (sibling == rtp_timer_queue_get_tail (queue)) {
+ rtp_timer_queue_set_tail (queue, timer);
+ } else {
+ rtp_timer_set_next (timer, rtp_timer_get_next (sibling));
+ rtp_timer_set_prev (rtp_timer_get_next (sibling), timer);
+ }
+
+ rtp_timer_set_prev (timer, sibling);
+ rtp_timer_set_next (sibling, timer);
+
+ queue->timers.length++;
+}
+
+static void
+rtp_timer_queue_insert_tail (RtpTimerQueue * queue, RtpTimer * timer)
+{
+ RtpTimer *it = rtp_timer_queue_get_tail (queue);
+
+ while (it) {
+ if (!GST_CLOCK_TIME_IS_VALID (it->timeout))
+ break;
+
+ if (timer->timeout > it->timeout)
+ break;
+
+ if (timer->timeout == it->timeout &&
+ gst_rtp_buffer_compare_seqnum (timer->seqnum, it->seqnum) < 0)
+ break;
+
+ it = rtp_timer_get_prev (it);
+ }
+
+ /* the queue is empty, or this is the earliest timeout */
+ if (it == NULL)
+ g_queue_push_head_link (&queue->timers, (GList *) timer);
+ else
+ rtp_timer_queue_insert_after (queue, it, timer);
+}
+
+static void
+rtp_timer_queue_insert_head (RtpTimerQueue * queue, RtpTimer * timer)
+{
+ RtpTimer *it = rtp_timer_queue_get_head (queue);
+
+ while (it) {
+ if (GST_CLOCK_TIME_IS_VALID (it->timeout)) {
+ if (!GST_CLOCK_TIME_IS_VALID (timer->timeout))
+ break;
+
+ if (timer->timeout < it->timeout)
+ break;
+ }
+
+ if (timer->timeout == it->timeout &&
+ gst_rtp_buffer_compare_seqnum (timer->seqnum, it->seqnum) > 0)
+ break;
+
+ it = rtp_timer_get_next (it);
+ }
+
+ /* the queue is empty, or this is the oldest */
+ if (it == NULL)
+ g_queue_push_tail_link (&queue->timers, (GList *) timer);
+ else
+ rtp_timer_queue_insert_before (queue, it, timer);
+}
+
+static void
+rtp_timer_queue_init (RtpTimerQueue * queue)
+{
+ queue->hashtable = g_hash_table_new (NULL, NULL);
+}
+
+static void
+rtp_timer_queue_finalize (GObject * object)
+{
+ RtpTimerQueue *queue = RTP_TIMER_QUEUE (object);
+ RtpTimer *timer;
+
+ while ((timer = rtp_timer_queue_pop_until (queue, GST_CLOCK_TIME_NONE)))
+ rtp_timer_free (timer);
+ g_hash_table_unref (queue->hashtable);
+ g_assert (queue->timers.length == 0);
+}
+
+static void
+rtp_timer_queue_class_init (RtpTimerQueueClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ gobject_class->finalize = rtp_timer_queue_finalize;
+}
+
+/**
+ * rtp_timer_free:
+ *
+ * Free a #RtpTimer structure. This should be used after a timer has been
+ * poped or unscheduled. The timer must be queued.
+ */
+void
+rtp_timer_free (RtpTimer * timer)
+{
+ g_return_if_fail (timer);
+ g_return_if_fail (timer->queued == FALSE);
+ g_return_if_fail (timer->list.next == NULL);
+ g_return_if_fail (timer->list.prev == NULL);
+
+ g_slice_free (RtpTimer, timer);
+}
+
+/**
+ * rtp_timer_dup:
+ * @timer: a #RtpTimer
+ *
+ * This allow cloning a #RtpTimer structure.
+ *
+ * Returns: a copy of @timer
+ */
+RtpTimer *
+rtp_timer_dup (const RtpTimer * timer)
+{
+ RtpTimer *copy = g_slice_new (RtpTimer);
+ memcpy (copy, timer, sizeof (RtpTimer));
+ memset (&copy->list, 0, sizeof (GList));
+ copy->queued = FALSE;
+ return copy;
+}
+
+/**
+ * rtp_timer_queue_find:
+ * @queue: the #RtpTimerQueue object
+ * @seqnum: the sequence number of the #RtpTimer
+ *
+ * Lookup for a timer with @seqnum. Only one timer per seqnum exist withing
+ * the #RtpTimerQueue. This operation is o(1).
+ *
+ * Rerturn: the matching #RtpTimer or %NULL
+ */
+RtpTimer *
+rtp_timer_queue_find (RtpTimerQueue * queue, guint seqnum)
+{
+ return g_hash_table_lookup (queue->hashtable, GINT_TO_POINTER (seqnum));
+}
+
+/**
+ * rtp_timer_queue_peek_earliest:
+ * @queue: the #RtpTimerQueue object
+ *
+ * Rerturns: the #RtpTimer with earliest timeout value
+ */
+RtpTimer *
+rtp_timer_queue_peek_earliest (RtpTimerQueue * queue)
+{
+ return rtp_timer_queue_get_head (queue);
+}
+
+
+/**
+ * rtp_timer_queue_new:
+ *
+ * Returns: a freshly allocated #RtpTimerQueue
+ */
+RtpTimerQueue *
+rtp_timer_queue_new (void)
+{
+ return g_object_new (RTP_TYPE_TIMER_QUEUE, NULL);
+}
+
+/**
+ * rtp_timer_queue_insert:
+ * @queue: the #RtpTimerQueue object
+ * @timer: (transfer full): the #RtpTimer to insert
+ *
+ * Insert a timer into the queue. Earliest timer are at the head and then
+ * timer are sorted by seqnum (smaller seqnum first). This function is o(n)
+ * but it is expected that most timers added are schedule later, in which case
+ * the insertion will be faster.
+ *
+ * Returns: %FALSE if a timer with the same seqnum already existed
+ */
+gboolean
+rtp_timer_queue_insert (RtpTimerQueue * queue, RtpTimer * timer)
+{
+ g_return_val_if_fail (timer->queued == FALSE, FALSE);
+
+ if (rtp_timer_queue_find (queue, timer->seqnum)) {
+ rtp_timer_free (timer);
+ GST_WARNING ("Timer queue collision, freeing duplicate.");
+ return FALSE;
+ }
+
+ if (timer->timeout == -1)
+ rtp_timer_queue_insert_head (queue, timer);
+ else
+ rtp_timer_queue_insert_tail (queue, timer);
+
+ g_hash_table_insert (queue->hashtable,
+ GINT_TO_POINTER (timer->seqnum), timer);
+ timer->queued = TRUE;
+
+ return TRUE;
+}
+
+/**
+ * rtp_timer_queue_reschedule:
+ * @queue: the #RtpTimerQueue object
+ * @timer: the #RtpTimer to reschedule
+ *
+ * This function moves @timer inside the queue to put it back to it's new
+ * location. This function is o(n) but it is assumed that nearby modification
+ * of the timeout will occure.
+ *
+ * Returns: %TRUE if the timer was moved
+ */
+gboolean
+rtp_timer_queue_reschedule (RtpTimerQueue * queue, RtpTimer * timer)
+{
+ RtpTimer *it = timer;
+
+ g_return_val_if_fail (timer->queued == TRUE, FALSE);
+
+ if (rtp_timer_is_closer_to_head (timer, rtp_timer_queue_get_head (queue))) {
+ g_queue_unlink (&queue->timers, (GList *) timer);
+ rtp_timer_queue_insert_head (queue, timer);
+ return TRUE;
+ }
+
+ while (rtp_timer_is_sooner (timer, rtp_timer_get_prev (it)))
+ it = rtp_timer_get_prev (it);
+
+ if (it != timer) {
+ g_queue_unlink (&queue->timers, (GList *) timer);
+ rtp_timer_queue_insert_before (queue, it, timer);
+ return TRUE;
+ }
+
+ if (rtp_timer_is_closer_to_tail (timer, rtp_timer_queue_get_tail (queue))) {
+ g_queue_unlink (&queue->timers, (GList *) timer);
+ rtp_timer_queue_insert_tail (queue, timer);
+ return TRUE;
+ }
+
+ while (rtp_timer_is_later (timer, rtp_timer_get_next (it)))
+ it = rtp_timer_get_next (it);
+
+ if (it != timer) {
+ g_queue_unlink (&queue->timers, (GList *) timer);
+ rtp_timer_queue_insert_after (queue, it, timer);
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+/**
+ * rtp_timer_queue_unschedule:
+ * @queue: the #RtpTimerQueue
+ * @timer: the #RtpTimer to unschedule
+ *
+ * This removes a timer from the queue. The timer structure can be reused,
+ * or freed using rtp_timer_free(). This function is o(1).
+ */
+void
+rtp_timer_queue_unschedule (RtpTimerQueue * queue, RtpTimer * timer)
+{
+ g_return_if_fail (timer->queued == TRUE);
+
+ g_queue_unlink (&queue->timers, (GList *) timer);
+ g_hash_table_remove (queue->hashtable, GINT_TO_POINTER (timer->seqnum));
+ timer->queued = FALSE;
+}
+
+/**
+ * rtp_timer_queue_pop_until:
+ * @queue: the #RtpTimerQueue
+ * @timeout: Time at witch timers expired
+ *
+ * Unschdedule and return the earliest packet that has a timeout smaller or
+ * equal to @timeout. The returns #RtpTimer must be freed with
+ * rtp_timer_free(). This function is o(1).
+ *
+ * Returns: an expired timer according to @timeout, or %NULL.
+ */
+RtpTimer *
+rtp_timer_queue_pop_until (RtpTimerQueue * queue, GstClockTime timeout)
+{
+ RtpTimer *timer;
+
+ timer = (RtpTimer *) g_queue_peek_head_link (&queue->timers);
+ if (!timer)
+ return NULL;
+
+ if (!GST_CLOCK_TIME_IS_VALID (timer->timeout) || timer->timeout <= timeout) {
+ rtp_timer_queue_unschedule (queue, timer);
+ return timer;
+ }
+
+ return NULL;
+}
+
+/**
+ * rtp_timer_queue_remove_until:
+ * @queue: the #RtpTimerQueue
+ * @timeout: Time at witch timers expired
+ *
+ * Unschedule and free all timers that has a timeout smaller or equal to
+ * @timeout.
+ */
+void
+rtp_timer_queue_remove_until (RtpTimerQueue * queue, GstClockTime timeout)
+{
+ RtpTimer *timer;
+
+ while ((timer = rtp_timer_queue_pop_until (queue, timeout))) {
+ GST_LOG ("Removing expired timer #%d, %" GST_TIME_FORMAT " < %"
+ GST_TIME_FORMAT, timer->seqnum, GST_TIME_ARGS (timer->timeout),
+ GST_TIME_ARGS (timeout));
+ rtp_timer_free (timer);
+ }
+}
+
+/**
+ * rtp_timer_queue_remove_all:
+ * @queue: the #RtpTimerQueue
+ *
+ * Unschedule and free all timers from the queue.
+ */
+void
+rtp_timer_queue_remove_all (RtpTimerQueue * queue)
+{
+ rtp_timer_queue_remove_until (queue, GST_CLOCK_TIME_NONE);
+}
+
+/**
+ * rtp_timer_queue_set_timer:
+ * @queue: the #RtpTimerQueue
+ * @type: the #RtpTimerType
+ * @senum: the timer seqnum
+ * @timeout: the timer timeout
+ * @delay: the additional delay (will be added to @timeout)
+ * @duration: the duration of the event related to the timer
+ * @offset: offset that can be used to convert the timeout to timestamp
+ *
+ * If there exist a timer with this seqnum it will be updated other a new
+ * timer is created and inserted into the queue. This function is o(n) except
+ * that it's optimized for later timer insertion.
+ */
+void
+rtp_timer_queue_set_timer (RtpTimerQueue * queue, RtpTimerType type,
+ guint16 seqnum, GstClockTime timeout, GstClockTime delay,
+ GstClockTime duration, GstClockTimeDiff offset)
+{
+ RtpTimer *timer;
+
+ timer = rtp_timer_queue_find (queue, seqnum);
+ if (!timer)
+ timer = rtp_timer_new ();
+
+ /* for new timers or on seqnum change reset the RTX data */
+ if (!timer->queued || timer->seqnum != seqnum) {
+ if (type == RTP_TIMER_EXPECTED) {
+ timer->rtx_base = timeout;
+ }
+
+ timer->rtx_last = GST_CLOCK_TIME_NONE;
+ timer->num_rtx_retry = 0;
+ timer->num_rtx_received = 0;
+ }
+
+ timer->type = type;
+ timer->seqnum = seqnum;
+
+ if (timeout == -1)
+ timer->timeout = -1;
+ else
+ timer->timeout = timeout + delay + offset;
+
+ timer->offset = offset;
+ timer->duration = duration;
+
+ if (timer->queued)
+ rtp_timer_queue_reschedule (queue, timer);
+ else
+ rtp_timer_queue_insert (queue, timer);
+}
+
+/**
+ * rtp_timer_queue_set_expected:
+ * @queue: the #RtpTimerQueue
+ * @senum: the timer seqnum
+ * @timeout: the timer timeout
+ * @delay: the additional delay (will be added to @timeout)
+ * @duration: the duration of the event related to the timer
+ *
+ * Specialized version of rtp_timer_queue_set_timer() that creates or updates a
+ * timer with type %RTP_TIMER_EXPECTED. Expected timers do not carry
+ * a timestamp, hence have no offset.
+ */
+void
+rtp_timer_queue_set_expected (RtpTimerQueue * queue, guint16 seqnum,
+ GstClockTime timeout, GstClockTime delay, GstClockTime duration)
+{
+ rtp_timer_queue_set_timer (queue, RTP_TIMER_EXPECTED, seqnum, timeout,
+ delay, duration, 0);
+}
+
+/**
+ * rtp_timer_queue_set_lost:
+ * @queue: the #RtpTimerQueue
+ * @senum: the timer seqnum
+ * @timeout: the timer timeout
+ * @duration: the duration of the event related to the timer
+ * @offset: offset that can be used to convert the timeout to timestamp
+ *
+ * Specialized version of rtp_timer_queue_set_timer() that creates or updates a
+ * timer with type %RTP_TIMER_LOST.
+ */
+void
+rtp_timer_queue_set_lost (RtpTimerQueue * queue, guint16 seqnum,
+ GstClockTime timeout, GstClockTime duration, GstClockTimeDiff offset)
+{
+ rtp_timer_queue_set_timer (queue, RTP_TIMER_LOST, seqnum, timeout, 0,
+ duration, offset);
+}
+
+/**
+ * rtp_timer_queue_set_eos:
+ * @queue: the #RtpTimerQueue
+ * @timeout: the timer timeout
+ * @offset: offset that can be used to convert the timeout to timestamp
+ *
+ * Specialized version of rtp_timer_queue_set_timer() that creates or updates a
+ * timer with type %RTP_TIMER_EOS. There is only one such a timer and it has
+ * the special seqnum value -1 (FIXME this is not an invalid seqnum,).
+ */
+void
+rtp_timer_queue_set_eos (RtpTimerQueue * queue, GstClockTime timeout,
+ GstClockTimeDiff offset)
+{
+ rtp_timer_queue_set_timer (queue, RTP_TIMER_EOS, -1, timeout, 0, 0, offset);
+}
+
+/**
+ * rtp_timer_queue_set_deadline:
+ * @queue: the #RtpTimerQueue
+ * @senum: the timer seqnum
+ * @timeout: the timer timeout
+ * @offset: offset that can be used to convert the timeout to timestamp
+ *
+ * Specialized version of rtp_timer_queue_set_timer() that creates or updates a
+ * timer with type %RTP_TIMER_DEADLINE. There should be only one such a timer,
+ * its seqnum matches the first packet to be output.
+ */
+void
+rtp_timer_queue_set_deadline (RtpTimerQueue * queue, guint16 seqnum,
+ GstClockTime timeout, GstClockTimeDiff offset)
+{
+ rtp_timer_queue_set_timer (queue, RTP_TIMER_DEADLINE, seqnum, timeout, 0,
+ 0, offset);
+}
+
+/**
+ * rtp_timer_queue_update_timer:
+ * @queue: the #RtpTimerQueue
+ * @senum: the timer seqnum
+ * @timeout: the timer timeout
+ * @delay: the additional delay (will be added to @timeout)
+ * @offset: offset that can be used to convert the timeout to timestamp
+ * @reset: if the RTX statistics should be reset
+ *
+ * A utility to update an already queued timer.
+ */
+void
+rtp_timer_queue_update_timer (RtpTimerQueue * queue, RtpTimer * timer,
+ guint16 seqnum, GstClockTime timeout, GstClockTime delay,
+ GstClockTimeDiff offset, gboolean reset)
+{
+ g_return_if_fail (timer != NULL);
+
+ if (reset) {
+ GST_DEBUG ("reset rtx base %" GST_TIME_FORMAT "->%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timer->rtx_base), GST_TIME_ARGS (timeout));
+ timer->rtx_base = timeout;
+ }
+
+ if (timer->seqnum != seqnum) {
+ timer->num_rtx_retry = 0;
+ timer->num_rtx_received = 0;
+
+ if (timer->queued) {
+ g_hash_table_remove (queue->hashtable, GINT_TO_POINTER (timer->seqnum));
+ g_hash_table_insert (queue->hashtable, GINT_TO_POINTER (seqnum), timer);
+ }
+ }
+
+ if (timeout == -1)
+ timer->timeout = -1;
+ else
+ timer->timeout = timeout + delay + offset;
+
+ timer->seqnum = seqnum;
+ timer->offset = offset;
+
+ if (timer->queued)
+ rtp_timer_queue_reschedule (queue, timer);
+ else
+ rtp_timer_queue_insert (queue, timer);
+}
+
+/**
+ * rtp_timer_queue_length:
+ * @queue: the #RtpTimerQueue
+ *
+ * Returns: the number of timers in the #RtpTimerQueue
+ */
+guint
+rtp_timer_queue_length (RtpTimerQueue * queue)
+{
+ return queue->timers.length;
+}
diff --git a/gst/rtpmanager/rtptimerqueue.h b/gst/rtpmanager/rtptimerqueue.h
new file mode 100644
index 0000000000..283f228791
--- /dev/null
+++ b/gst/rtpmanager/rtptimerqueue.h
@@ -0,0 +1,125 @@
+/* GStreamer RTP Manager
+ *
+ * Copyright 2007 Collabora Ltd,
+ * Copyright 2007 Nokia Corporation
+ * @author: Philippe Kalaf <philippe.kalaf@collabora.co.uk>.
+ * Copyright 2007 Wim Taymans <wim.taymans@gmail.com>
+ * Copyright 2015 Kurento (http://kurento.org/)
+ * @author: Miguel París <mparisdiaz@gmail.com>
+ * Copyright 2016 Pexip AS
+ * @author: Havard Graff <havard@pexip.com>
+ * @author: Stian Selnes <stian@pexip.com>
+ * Copyright (C) 2019 Net Insight AB
+ * Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include <gst/gst.h>
+
+#ifndef __RTP_TIMER_QUEUE_H__
+#define __RTP_TIMER_QUEUE_H__
+
+#define RTP_TYPE_TIMER_QUEUE rtp_timer_queue_get_type()
+G_DECLARE_FINAL_TYPE (RtpTimerQueue, rtp_timer_queue, RTP_TIMER, QUEUE, GObject);
+
+/**
+ * RtpTimerType:
+ * @RTP_TIMER_EXPECTED: This is used to track when to emit retranmission
+ * requests. They may be converted into %RTP_TIMER_LOST
+ * timer if the number of retry has been exhausted.
+ * @RTP_TIMER_LOST: This is used to track when a packet is considered lost.
+ * @RTP_TIMER_DEADLINE: This is used to track when the jitterbuffer should
+ * start pushing buffers.
+ * @RTP_TIMER_EOS: This is used to track when end of stream is reached.
+ */
+typedef enum
+{
+ RTP_TIMER_EXPECTED,
+ RTP_TIMER_LOST,
+ RTP_TIMER_DEADLINE,
+ RTP_TIMER_EOS
+} RtpTimerType;
+
+typedef struct
+{
+ GList list;
+ gboolean queued;
+
+ guint16 seqnum;
+ RtpTimerType type;
+ GstClockTime timeout;
+ GstClockTimeDiff offset;
+ GstClockTime duration;
+ GstClockTime rtx_base;
+ GstClockTime rtx_last;
+ guint num_rtx_retry;
+ guint num_rtx_received;
+} RtpTimer;
+
+void rtp_timer_free (RtpTimer * timer);
+RtpTimer * rtp_timer_dup (const RtpTimer * timer);
+
+static inline RtpTimer * rtp_timer_get_next (RtpTimer * timer)
+{
+ GList *list = (GList *) timer;
+ return (RtpTimer *) list->next;
+}
+
+static inline RtpTimer * rtp_timer_get_prev (RtpTimer * timer)
+{
+ GList *list = (GList *) timer;
+ return (RtpTimer *) list->prev;
+}
+
+RtpTimerQueue * rtp_timer_queue_new (void);
+
+RtpTimer * rtp_timer_queue_find (RtpTimerQueue * queue, guint seqnum);
+
+RtpTimer * rtp_timer_queue_peek_earliest (RtpTimerQueue * queue);
+
+gboolean rtp_timer_queue_insert (RtpTimerQueue * queue, RtpTimer * timer);
+
+gboolean rtp_timer_queue_reschedule (RtpTimerQueue * queue, RtpTimer * timer);
+
+void rtp_timer_queue_unschedule (RtpTimerQueue * queue, RtpTimer * timer);
+
+RtpTimer * rtp_timer_queue_pop_until (RtpTimerQueue * queue, GstClockTime timeout);
+
+void rtp_timer_queue_remove_until (RtpTimerQueue * queue, GstClockTime timeout);
+
+void rtp_timer_queue_remove_all (RtpTimerQueue * queue);
+
+void rtp_timer_queue_set_timer (RtpTimerQueue * queue, RtpTimerType type,
+ guint16 seqnum, GstClockTime timeout,
+ GstClockTime delay, GstClockTime duration,
+ GstClockTimeDiff offset);
+void rtp_timer_queue_set_expected (RtpTimerQueue * queue, guint16 seqnum,
+ GstClockTime timeout, GstClockTime delay,
+ GstClockTime duration);
+void rtp_timer_queue_set_lost (RtpTimerQueue * queue, guint16 seqnum,
+ GstClockTime timeout,
+ GstClockTime duration, GstClockTimeDiff offset);
+void rtp_timer_queue_set_eos (RtpTimerQueue * queue, GstClockTime timeout,
+ GstClockTimeDiff offset);
+void rtp_timer_queue_set_deadline (RtpTimerQueue * queue, guint16 seqnum,
+ GstClockTime timeout, GstClockTimeDiff offset);
+void rtp_timer_queue_update_timer (RtpTimerQueue * queue, RtpTimer * timer, guint16 seqnum,
+ GstClockTime timeout, GstClockTime delay,
+ GstClockTimeDiff offset, gboolean reset);
+guint rtp_timer_queue_length (RtpTimerQueue * queue);
+
+#endif
diff --git a/gst/rtpmanager/rtptwcc.c b/gst/rtpmanager/rtptwcc.c
new file mode 100644
index 0000000000..691428c214
--- /dev/null
+++ b/gst/rtpmanager/rtptwcc.c
@@ -0,0 +1,1116 @@
+/* GStreamer
+ * Copyright (C) 2019 Pexip (http://pexip.com/)
+ * @author: Havard Graff <havard@pexip.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+#include "rtptwcc.h"
+#include <gst/rtp/gstrtcpbuffer.h>
+#include <gst/base/gstbitreader.h>
+#include <gst/base/gstbitwriter.h>
+
+GST_DEBUG_CATEGORY_EXTERN (rtp_session_debug);
+#define GST_CAT_DEFAULT rtp_session_debug
+
+#define TWCC_EXTMAP_STR "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01"
+
+#define REF_TIME_UNIT (64 * GST_MSECOND)
+#define DELTA_UNIT (250 * GST_USECOND)
+#define MAX_TS_DELTA (0xff * DELTA_UNIT)
+
+#define STATUS_VECTOR_MAX_CAPACITY 14
+#define STATUS_VECTOR_TWO_BIT_MAX_CAPACITY 7
+
+typedef enum
+{
+ RTP_TWCC_CHUNK_TYPE_RUN_LENGTH = 0,
+ RTP_TWCC_CHUNK_TYPE_STATUS_VECTOR = 1,
+} RTPTWCCChunkType;
+
+typedef struct
+{
+ guint8 base_seqnum[2];
+ guint8 packet_count[2];
+ guint8 base_time[3];
+ guint8 fb_pkt_count[1];
+} RTPTWCCHeader;
+
+typedef struct
+{
+ GstClockTime ts;
+ guint16 seqnum;
+
+ gint64 delta;
+ RTPTWCCPacketStatus status;
+ guint16 missing_run;
+ guint equal_run;
+} RecvPacket;
+
+typedef struct
+{
+ GstClockTime ts;
+ GstClockTime socket_ts;
+ GstClockTime remote_ts;
+ guint16 seqnum;
+ guint8 pt;
+ guint size;
+ gboolean lost;
+} SentPacket;
+
+struct _RTPTWCCManager
+{
+ GObject object;
+
+ guint8 send_ext_id;
+ guint8 recv_ext_id;
+ guint16 send_seqnum;
+
+ guint mtu;
+ guint max_packets_per_rtcp;
+ GArray *recv_packets;
+
+ guint64 fb_pkt_count;
+ gint32 last_seqnum;
+
+ GArray *sent_packets;
+ GArray *parsed_packets;
+ GQueue *rtcp_buffers;
+
+ guint64 recv_sender_ssrc;
+ guint64 recv_media_ssrc;
+
+ guint16 expected_recv_seqnum;
+ guint16 packet_count_no_marker;
+
+ gboolean first_fci_parse;
+ guint16 expected_parsed_seqnum;
+ guint8 expected_parsed_fb_pkt_count;
+
+ GstClockTime next_feedback_send_time;
+ GstClockTime feedback_interval;
+};
+
+G_DEFINE_TYPE (RTPTWCCManager, rtp_twcc_manager, G_TYPE_OBJECT);
+
+static void
+rtp_twcc_manager_init (RTPTWCCManager * twcc)
+{
+ twcc->recv_packets = g_array_new (FALSE, FALSE, sizeof (RecvPacket));
+ twcc->sent_packets = g_array_new (FALSE, FALSE, sizeof (SentPacket));
+ twcc->parsed_packets = g_array_new (FALSE, FALSE, sizeof (RecvPacket));
+
+ twcc->rtcp_buffers = g_queue_new ();
+
+ twcc->last_seqnum = -1;
+ twcc->recv_media_ssrc = -1;
+ twcc->recv_sender_ssrc = -1;
+
+ twcc->first_fci_parse = TRUE;
+
+ twcc->feedback_interval = GST_CLOCK_TIME_NONE;
+ twcc->next_feedback_send_time = GST_CLOCK_TIME_NONE;
+}
+
+static void
+rtp_twcc_manager_finalize (GObject * object)
+{
+ RTPTWCCManager *twcc = RTP_TWCC_MANAGER_CAST (object);
+
+ g_array_unref (twcc->recv_packets);
+ g_array_unref (twcc->sent_packets);
+ g_array_unref (twcc->parsed_packets);
+ g_queue_free_full (twcc->rtcp_buffers, (GDestroyNotify) gst_buffer_unref);
+
+ G_OBJECT_CLASS (rtp_twcc_manager_parent_class)->finalize (object);
+}
+
+static void
+rtp_twcc_manager_class_init (RTPTWCCManagerClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ gobject_class->finalize = rtp_twcc_manager_finalize;
+}
+
+RTPTWCCManager *
+rtp_twcc_manager_new (guint mtu)
+{
+ RTPTWCCManager *twcc = g_object_new (RTP_TYPE_TWCC_MANAGER, NULL);
+
+ rtp_twcc_manager_set_mtu (twcc, mtu);
+
+ return twcc;
+}
+
+static void
+recv_packet_init (RecvPacket * packet, guint16 seqnum, RTPPacketInfo * pinfo)
+{
+ memset (packet, 0, sizeof (RecvPacket));
+ packet->seqnum = seqnum;
+
+ if (GST_CLOCK_TIME_IS_VALID (pinfo->arrival_time))
+ packet->ts = pinfo->arrival_time;
+ else
+ packet->ts = pinfo->current_time;
+}
+
+static guint8
+_get_extmap_id_for_attribute (const GstStructure * s, const gchar * ext_name)
+{
+ guint i;
+ guint8 extmap_id = 0;
+ guint n_fields = gst_structure_n_fields (s);
+
+ for (i = 0; i < n_fields; i++) {
+ const gchar *field_name = gst_structure_nth_field_name (s, i);
+ if (g_str_has_prefix (field_name, "extmap-")) {
+ const gchar *str = gst_structure_get_string (s, field_name);
+ if (str && g_strcmp0 (str, ext_name) == 0) {
+ gint64 id = g_ascii_strtoll (field_name + 7, NULL, 10);
+ if (id > 0 && id < 15) {
+ extmap_id = id;
+ break;
+ }
+ }
+ }
+ }
+ return extmap_id;
+}
+
+void
+rtp_twcc_manager_parse_recv_ext_id (RTPTWCCManager * twcc,
+ const GstStructure * s)
+{
+ guint8 recv_ext_id = _get_extmap_id_for_attribute (s, TWCC_EXTMAP_STR);
+ if (recv_ext_id > 0) {
+ twcc->recv_ext_id = recv_ext_id;
+ GST_INFO ("TWCC enabled for recv using extension id: %u",
+ twcc->recv_ext_id);
+ }
+}
+
+void
+rtp_twcc_manager_parse_send_ext_id (RTPTWCCManager * twcc,
+ const GstStructure * s)
+{
+ guint8 send_ext_id = _get_extmap_id_for_attribute (s, TWCC_EXTMAP_STR);
+ if (send_ext_id > 0) {
+ twcc->send_ext_id = send_ext_id;
+ GST_INFO ("TWCC enabled for send using extension id: %u",
+ twcc->send_ext_id);
+ }
+}
+
+void
+rtp_twcc_manager_set_mtu (RTPTWCCManager * twcc, guint mtu)
+{
+ twcc->mtu = mtu;
+
+ /* the absolute worst case is that 7 packets uses
+ header (4 * 4 * 4) 32 bytes) and
+ packet_chunk 2 bytes +
+ recv_deltas (2 * 7) 14 bytes */
+ twcc->max_packets_per_rtcp = ((twcc->mtu - 32) * 7) / (2 + 14);
+}
+
+void
+rtp_twcc_manager_set_feedback_interval (RTPTWCCManager * twcc,
+ GstClockTime feedback_interval)
+{
+ twcc->feedback_interval = feedback_interval;
+}
+
+GstClockTime
+rtp_twcc_manager_get_feedback_interval (RTPTWCCManager * twcc)
+{
+ return twcc->feedback_interval;
+}
+
+static gboolean
+_get_twcc_seqnum_data (RTPPacketInfo * pinfo, guint8 ext_id, gpointer * data)
+{
+ gboolean ret = FALSE;
+ guint size;
+
+ if (pinfo->header_ext &&
+ gst_rtp_buffer_get_extension_onebyte_header_from_bytes (pinfo->header_ext,
+ pinfo->header_ext_bit_pattern, ext_id, 0, data, &size)) {
+ if (size == 2)
+ ret = TRUE;
+ }
+ return ret;
+}
+
+static void
+_set_twcc_seqnum_data (GstBuffer * buf, guint8 ext_id, guint16 seqnum)
+{
+ GstRTPBuffer rtp = GST_RTP_BUFFER_INIT;
+ gpointer data;
+
+ if (gst_rtp_buffer_map (buf, GST_MAP_READWRITE, &rtp)) {
+ if (gst_rtp_buffer_get_extension_onebyte_header (&rtp,
+ ext_id, 0, &data, NULL)) {
+ GST_WRITE_UINT16_BE (data, seqnum);
+ }
+ gst_rtp_buffer_unmap (&rtp);
+ }
+}
+
+static guint16
+rtp_twcc_manager_set_send_twcc_seqnum (RTPTWCCManager * twcc,
+ RTPPacketInfo * pinfo)
+{
+ guint16 seqnum = twcc->send_seqnum++;
+ pinfo->data = gst_buffer_make_writable (pinfo->data);
+ _set_twcc_seqnum_data (pinfo->data, twcc->send_ext_id, seqnum);
+ return seqnum;
+}
+
+static gint32
+rtp_twcc_manager_get_recv_twcc_seqnum (RTPTWCCManager * twcc,
+ RTPPacketInfo * pinfo)
+{
+ gint32 val = -1;
+ gpointer data;
+
+ if (twcc->recv_ext_id == 0)
+ return val;
+
+ if (_get_twcc_seqnum_data (pinfo, twcc->recv_ext_id, &data)) {
+ val = GST_READ_UINT16_BE (data);
+ }
+
+ return val;
+}
+
+static gint
+_twcc_seqnum_sort (gconstpointer a, gconstpointer b)
+{
+ gint32 seqa = ((RecvPacket *) a)->seqnum;
+ gint32 seqb = ((RecvPacket *) b)->seqnum;
+ gint res = seqa - seqb;
+ if (res < -65000)
+ res = 1;
+ if (res > 65000)
+ res = -1;
+ return res;
+}
+
+static void
+rtp_twcc_write_recv_deltas (guint8 * fci_data, GArray * twcc_packets)
+{
+ guint i;
+ for (i = 0; i < twcc_packets->len; i++) {
+ RecvPacket *pkt = &g_array_index (twcc_packets, RecvPacket, i);
+
+ if (pkt->status == RTP_TWCC_PACKET_STATUS_SMALL_DELTA) {
+ GST_WRITE_UINT8 (fci_data, pkt->delta);
+ fci_data += 1;
+ } else if (pkt->status == RTP_TWCC_PACKET_STATUS_LARGE_NEGATIVE_DELTA) {
+ GST_WRITE_UINT16_BE (fci_data, pkt->delta);
+ fci_data += 2;
+ }
+ }
+}
+
+static void
+rtp_twcc_write_run_length_chunk (GArray * packet_chunks,
+ RTPTWCCPacketStatus status, guint run_length)
+{
+ guint written = 0;
+ while (written < run_length) {
+ GstBitWriter writer;
+ guint16 data = 0;
+ guint len = MIN (run_length - written, 8191);
+
+ GST_LOG ("Writing a run-length of %u with status %u", len, status);
+
+ gst_bit_writer_init_with_data (&writer, (guint8 *) & data, 2, FALSE);
+ gst_bit_writer_put_bits_uint8 (&writer, RTP_TWCC_CHUNK_TYPE_RUN_LENGTH, 1);
+ gst_bit_writer_put_bits_uint8 (&writer, status, 2);
+ gst_bit_writer_put_bits_uint16 (&writer, len, 13);
+ g_array_append_val (packet_chunks, data);
+ written += len;
+ }
+}
+
+typedef struct
+{
+ GArray *packet_chunks;
+ GstBitWriter writer;
+ guint16 data;
+ guint symbol_size;
+} ChunkBitWriter;
+
+static void
+chunk_bit_writer_reset (ChunkBitWriter * writer)
+{
+ writer->data = 0;
+ gst_bit_writer_init_with_data (&writer->writer,
+ (guint8 *) & writer->data, 2, FALSE);
+
+ gst_bit_writer_put_bits_uint8 (&writer->writer,
+ RTP_TWCC_CHUNK_TYPE_STATUS_VECTOR, 1);
+ /* 1 for 2-bit symbol-size, 0 for 1-bit */
+ gst_bit_writer_put_bits_uint8 (&writer->writer, writer->symbol_size - 1, 1);
+}
+
+static void
+chunk_bit_writer_configure (ChunkBitWriter * writer, guint symbol_size)
+{
+ writer->symbol_size = symbol_size;
+ chunk_bit_writer_reset (writer);
+}
+
+static gboolean
+chunk_bit_writer_is_empty (ChunkBitWriter * writer)
+{
+ return writer->writer.bit_size == 2;
+}
+
+static gboolean
+chunk_bit_writer_is_full (ChunkBitWriter * writer)
+{
+ return writer->writer.bit_size == 16;
+}
+
+static guint
+chunk_bit_writer_get_available_slots (ChunkBitWriter * writer)
+{
+ return (16 - writer->writer.bit_size) / writer->symbol_size;
+}
+
+static guint
+chunk_bit_writer_get_total_slots (ChunkBitWriter * writer)
+{
+ return STATUS_VECTOR_MAX_CAPACITY / writer->symbol_size;
+}
+
+static void
+chunk_bit_writer_flush (ChunkBitWriter * writer)
+{
+ /* don't append a chunk if no bits have been written */
+ if (!chunk_bit_writer_is_empty (writer)) {
+ g_array_append_val (writer->packet_chunks, writer->data);
+ chunk_bit_writer_reset (writer);
+ }
+}
+
+static void
+chunk_bit_writer_init (ChunkBitWriter * writer,
+ GArray * packet_chunks, guint symbol_size)
+{
+ writer->packet_chunks = packet_chunks;
+ chunk_bit_writer_configure (writer, symbol_size);
+}
+
+static void
+chunk_bit_writer_write (ChunkBitWriter * writer, RTPTWCCPacketStatus status)
+{
+ gst_bit_writer_put_bits_uint8 (&writer->writer, status, writer->symbol_size);
+ if (chunk_bit_writer_is_full (writer)) {
+ chunk_bit_writer_flush (writer);
+ }
+}
+
+static void
+rtp_twcc_write_status_vector_chunk (ChunkBitWriter * writer, RecvPacket * pkt)
+{
+ if (pkt->missing_run > 0) {
+ guint available = chunk_bit_writer_get_available_slots (writer);
+ guint total = chunk_bit_writer_get_total_slots (writer);
+ guint i;
+
+ if (pkt->missing_run > (available + total)) {
+ /* here it is better to finish up the current status-chunk and then
+ go for run-length */
+ for (i = 0; i < available; i++) {
+ chunk_bit_writer_write (writer, RTP_TWCC_PACKET_STATUS_NOT_RECV);
+ }
+ rtp_twcc_write_run_length_chunk (writer->packet_chunks,
+ RTP_TWCC_PACKET_STATUS_NOT_RECV, pkt->missing_run - available);
+ } else {
+ for (i = 0; i < pkt->missing_run; i++) {
+ chunk_bit_writer_write (writer, RTP_TWCC_PACKET_STATUS_NOT_RECV);
+ }
+ }
+ }
+
+ chunk_bit_writer_write (writer, pkt->status);
+}
+
+typedef struct
+{
+ RecvPacket *equal;
+} RunLengthHelper;
+
+static void
+run_lenght_helper_update (RunLengthHelper * rlh, RecvPacket * pkt)
+{
+ /* for missing packets we reset */
+ if (pkt->missing_run > 0) {
+ rlh->equal = NULL;
+ }
+
+ /* all status equal run */
+ if (rlh->equal == NULL) {
+ rlh->equal = pkt;
+ rlh->equal->equal_run = 0;
+ }
+
+ if (rlh->equal->status == pkt->status) {
+ rlh->equal->equal_run++;
+ } else {
+ rlh->equal = pkt;
+ rlh->equal->equal_run = 1;
+ }
+}
+
+static guint
+_get_max_packets_capacity (guint symbol_size)
+{
+ if (symbol_size == 2)
+ return STATUS_VECTOR_TWO_BIT_MAX_CAPACITY;
+
+ return STATUS_VECTOR_MAX_CAPACITY;
+}
+
+static gboolean
+_pkt_fits_run_length_chunk (RecvPacket * pkt, guint packets_per_chunks,
+ guint remaining_packets)
+{
+ if (pkt->missing_run == 0) {
+ /* we have more or the same equal packets than the ones we can write in to a status chunk */
+ if (pkt->equal_run >= packets_per_chunks)
+ return TRUE;
+
+ /* we have more than one equal and not enough space for the remainings */
+ if (pkt->equal_run > 1 && remaining_packets > STATUS_VECTOR_MAX_CAPACITY)
+ return TRUE;
+
+ /* we have all equal packets for the remaining to write */
+ if (pkt->equal_run == remaining_packets)
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+static void
+rtp_twcc_write_chunks (GArray * packet_chunks,
+ GArray * twcc_packets, guint symbol_size)
+{
+ ChunkBitWriter writer;
+ guint i;
+ guint packets_per_chunks = _get_max_packets_capacity (symbol_size);
+
+ chunk_bit_writer_init (&writer, packet_chunks, symbol_size);
+
+ for (i = 0; i < twcc_packets->len; i++) {
+ RecvPacket *pkt = &g_array_index (twcc_packets, RecvPacket, i);
+ guint remaining_packets = twcc_packets->len - i;
+
+ GST_LOG
+ ("About to write pkt: #%u missing_run: %u equal_run: %u status: %u, remaining_packets: %u",
+ pkt->seqnum, pkt->missing_run, pkt->equal_run, pkt->status,
+ remaining_packets);
+
+ /* we can only start a run-length chunk if the status-chunk is
+ completed */
+ if (chunk_bit_writer_is_empty (&writer)) {
+ /* first write in any preceeding gaps, we use run-length
+ if it would take up more than one chunk (14/7) */
+ if (pkt->missing_run > packets_per_chunks) {
+ rtp_twcc_write_run_length_chunk (packet_chunks,
+ RTP_TWCC_PACKET_STATUS_NOT_RECV, pkt->missing_run);
+ }
+
+ /* we have a run of the same status, write a run-length chunk and skip
+ to the next point */
+ if (_pkt_fits_run_length_chunk (pkt, packets_per_chunks,
+ remaining_packets)) {
+
+ rtp_twcc_write_run_length_chunk (packet_chunks,
+ pkt->status, pkt->equal_run);
+ i += pkt->equal_run - 1;
+ continue;
+ }
+ }
+
+ GST_LOG ("i=%u: Writing a %u-bit vector of status: %u",
+ i, symbol_size, pkt->status);
+ rtp_twcc_write_status_vector_chunk (&writer, pkt);
+ }
+ chunk_bit_writer_flush (&writer);
+}
+
+static void
+rtp_twcc_manager_add_fci (RTPTWCCManager * twcc, GstRTCPPacket * packet)
+{
+ RecvPacket *first, *last, *prev;
+ guint16 packet_count;
+ GstClockTime base_time;
+ GstClockTime ts_rounded;
+ guint i;
+ GArray *packet_chunks = g_array_new (FALSE, FALSE, 2);
+ RTPTWCCHeader header;
+ guint header_size = sizeof (RTPTWCCHeader);
+ guint packet_chunks_size;
+ guint recv_deltas_size = 0;
+ guint16 fci_length;
+ guint16 fci_chunks;
+ guint8 *fci_data;
+ guint8 *fci_data_ptr;
+ RunLengthHelper rlh = { NULL };
+ guint symbol_size = 1;
+ GstClockTimeDiff delta_ts;
+ gint64 delta_ts_rounded;
+ guint8 fb_pkt_count;
+
+ g_array_sort (twcc->recv_packets, _twcc_seqnum_sort);
+
+ /* get first and last packet */
+ first = &g_array_index (twcc->recv_packets, RecvPacket, 0);
+ last =
+ &g_array_index (twcc->recv_packets, RecvPacket,
+ twcc->recv_packets->len - 1);
+
+ packet_count = last->seqnum - first->seqnum + 1;
+ base_time = first->ts / REF_TIME_UNIT;
+ fb_pkt_count = (guint8) (twcc->fb_pkt_count % G_MAXUINT8);
+
+ GST_WRITE_UINT16_BE (header.base_seqnum, first->seqnum);
+ GST_WRITE_UINT16_BE (header.packet_count, packet_count);
+ GST_WRITE_UINT24_BE (header.base_time, base_time);
+ GST_WRITE_UINT8 (header.fb_pkt_count, fb_pkt_count);
+
+ base_time *= REF_TIME_UNIT;
+ ts_rounded = base_time;
+
+ GST_DEBUG ("Created TWCC feedback: base_seqnum: #%u, packet_count: %u, "
+ "base_time %" GST_TIME_FORMAT " fb_pkt_count: %u",
+ first->seqnum, packet_count, GST_TIME_ARGS (base_time), fb_pkt_count);
+
+ twcc->fb_pkt_count++;
+ twcc->expected_recv_seqnum = first->seqnum + packet_count;
+
+ /* calculate all deltas and check for gaps etc */
+ prev = first;
+ for (i = 0; i < twcc->recv_packets->len; i++) {
+ RecvPacket *pkt = &g_array_index (twcc->recv_packets, RecvPacket, i);
+ if (i != 0) {
+ pkt->missing_run = pkt->seqnum - prev->seqnum - 1;
+ }
+
+ delta_ts = GST_CLOCK_DIFF (ts_rounded, pkt->ts);
+ pkt->delta = delta_ts / DELTA_UNIT;
+ delta_ts_rounded = pkt->delta * DELTA_UNIT;
+ ts_rounded += delta_ts_rounded;
+
+ if (delta_ts_rounded < 0 || delta_ts_rounded > MAX_TS_DELTA) {
+ pkt->status = RTP_TWCC_PACKET_STATUS_LARGE_NEGATIVE_DELTA;
+ recv_deltas_size += 2;
+ symbol_size = 2;
+ } else {
+ pkt->status = RTP_TWCC_PACKET_STATUS_SMALL_DELTA;
+ recv_deltas_size += 1;
+ }
+ run_lenght_helper_update (&rlh, pkt);
+
+ GST_LOG ("pkt: #%u, ts: %" GST_TIME_FORMAT
+ " ts_rounded: %" GST_TIME_FORMAT
+ " delta_ts: %" GST_STIME_FORMAT
+ " delta_ts_rounded: %" GST_STIME_FORMAT
+ " missing_run: %u, status: %u", pkt->seqnum,
+ GST_TIME_ARGS (pkt->ts), GST_TIME_ARGS (ts_rounded),
+ GST_STIME_ARGS (delta_ts), GST_STIME_ARGS (delta_ts_rounded),
+ pkt->missing_run, pkt->status);
+ prev = pkt;
+ }
+
+ rtp_twcc_write_chunks (packet_chunks, twcc->recv_packets, symbol_size);
+
+ packet_chunks_size = packet_chunks->len * 2;
+ fci_length = header_size + packet_chunks_size + recv_deltas_size;
+ fci_chunks = (fci_length - 1) / sizeof (guint32) + 1;
+
+ if (!gst_rtcp_packet_fb_set_fci_length (packet, fci_chunks)) {
+ GST_ERROR ("Could not fit: %u packets", packet_count);
+ g_assert_not_reached ();
+ }
+
+ fci_data = gst_rtcp_packet_fb_get_fci (packet);
+ fci_data_ptr = fci_data;
+
+ memcpy (fci_data_ptr, &header, header_size);
+ fci_data_ptr += header_size;
+
+ memcpy (fci_data_ptr, packet_chunks->data, packet_chunks_size);
+ fci_data_ptr += packet_chunks_size;
+
+ rtp_twcc_write_recv_deltas (fci_data_ptr, twcc->recv_packets);
+
+ GST_MEMDUMP ("twcc-header:", (guint8 *) & header, header_size);
+ GST_MEMDUMP ("packet-chunks:", (guint8 *) packet_chunks->data,
+ packet_chunks_size);
+ GST_MEMDUMP ("full fci:", fci_data, fci_length);
+
+ g_array_unref (packet_chunks);
+ g_array_set_size (twcc->recv_packets, 0);
+}
+
+static void
+rtp_twcc_manager_create_feedback (RTPTWCCManager * twcc)
+{
+ GstBuffer *buf;
+ GstRTCPBuffer rtcp = GST_RTCP_BUFFER_INIT;
+ GstRTCPPacket packet;
+
+ buf = gst_rtcp_buffer_new (twcc->mtu);
+
+ gst_rtcp_buffer_map (buf, GST_MAP_READWRITE, &rtcp);
+
+ gst_rtcp_buffer_add_packet (&rtcp, GST_RTCP_TYPE_RTPFB, &packet);
+
+ gst_rtcp_packet_fb_set_type (&packet, GST_RTCP_RTPFB_TYPE_TWCC);
+ if (twcc->recv_sender_ssrc != 1)
+ gst_rtcp_packet_fb_set_sender_ssrc (&packet, twcc->recv_sender_ssrc);
+ gst_rtcp_packet_fb_set_media_ssrc (&packet, twcc->recv_media_ssrc);
+
+ rtp_twcc_manager_add_fci (twcc, &packet);
+
+ gst_rtcp_buffer_unmap (&rtcp);
+
+ g_queue_push_tail (twcc->rtcp_buffers, buf);
+}
+
+/* we have calculated a (very pessimistic) max-packets per RTCP feedback,
+ so this is to make sure we don't exceed that */
+static gboolean
+_exceeds_max_packets (RTPTWCCManager * twcc, guint16 seqnum)
+{
+ if (twcc->recv_packets->len + 1 > twcc->max_packets_per_rtcp)
+ return TRUE;
+
+ return FALSE;
+}
+
+/* in this case we could have lost the packet with the marker bit,
+ so with a large (30) amount of packets, lost packets and still no marker,
+ we send a feedback anyway */
+static gboolean
+_many_packets_some_lost (RTPTWCCManager * twcc, guint16 seqnum)
+{
+ RecvPacket *first;
+ guint16 packet_count;
+ guint received_packets = twcc->recv_packets->len;
+ guint lost_packets;
+ if (received_packets == 0)
+ return FALSE;
+
+ first = &g_array_index (twcc->recv_packets, RecvPacket, 0);
+ packet_count = seqnum - first->seqnum + 1;
+
+ /* check if we lost half of the threshold */
+ lost_packets = packet_count - received_packets;
+ if (received_packets >= 30 && lost_packets >= 60)
+ return TRUE;
+
+ /* we have lost the marker bit for some and lost some */
+ if (twcc->packet_count_no_marker >= 10 && lost_packets >= 60)
+ return TRUE;
+
+ return FALSE;
+}
+
+gboolean
+rtp_twcc_manager_recv_packet (RTPTWCCManager * twcc, RTPPacketInfo * pinfo)
+{
+ gboolean send_feedback = FALSE;
+ RecvPacket packet;
+ gint32 seqnum;
+ gint diff;
+
+ seqnum = rtp_twcc_manager_get_recv_twcc_seqnum (twcc, pinfo);
+ if (seqnum == -1)
+ return FALSE;
+
+ /* if this packet would exceed the capacity of our MTU, we create a feedback
+ with the current packets, and start over with this one */
+ if (_exceeds_max_packets (twcc, seqnum)) {
+ GST_INFO ("twcc-seqnum: %u would overflow max packets: %u, create feedback"
+ " with current packets", seqnum, twcc->max_packets_per_rtcp);
+ rtp_twcc_manager_create_feedback (twcc);
+ send_feedback = TRUE;
+ }
+
+ /* we can have multiple ssrcs here, so just pick the first one */
+ if (twcc->recv_media_ssrc == -1)
+ twcc->recv_media_ssrc = pinfo->ssrc;
+
+ /* check if we are reordered, and treat it as lost if we already sent
+ a feedback msg with a higher seqnum. If the diff is huge, treat
+ it as a restart of a stream */
+ diff = gst_rtp_buffer_compare_seqnum (twcc->expected_recv_seqnum, seqnum);
+ if (twcc->fb_pkt_count > 0 && diff < 0) {
+ GST_INFO ("Received out of order packet (%u after %u), treating as lost",
+ seqnum, twcc->expected_recv_seqnum);
+ return FALSE;
+ }
+
+ if (twcc->recv_packets->len > 0) {
+ RecvPacket *last = &g_array_index (twcc->recv_packets, RecvPacket,
+ twcc->recv_packets->len - 1);
+
+ diff = gst_rtp_buffer_compare_seqnum (last->seqnum, seqnum);
+ if (diff == 0) {
+ GST_INFO ("Received duplicate packet (%u), dropping", seqnum);
+ return FALSE;
+ }
+ }
+
+ /* store the packet for Transport-wide RTCP feedback message */
+ recv_packet_init (&packet, seqnum, pinfo);
+ g_array_append_val (twcc->recv_packets, packet);
+ twcc->last_seqnum = seqnum;
+
+ GST_LOG ("Receive: twcc-seqnum: %u, pt: %u, marker: %d, ts: %"
+ GST_TIME_FORMAT, seqnum, pinfo->pt, pinfo->marker,
+ GST_TIME_ARGS (pinfo->arrival_time));
+
+ if (!pinfo->marker)
+ twcc->packet_count_no_marker++;
+
+ /* are we sending on an interval, or based on marker bit */
+ if (GST_CLOCK_TIME_IS_VALID (twcc->feedback_interval)) {
+ if (!GST_CLOCK_TIME_IS_VALID (twcc->next_feedback_send_time))
+ twcc->next_feedback_send_time =
+ pinfo->running_time + twcc->feedback_interval;
+
+ if (pinfo->running_time >= twcc->next_feedback_send_time) {
+ rtp_twcc_manager_create_feedback (twcc);
+ send_feedback = TRUE;
+
+ while (pinfo->running_time >= twcc->next_feedback_send_time)
+ twcc->next_feedback_send_time += twcc->feedback_interval;
+ }
+ } else if (pinfo->marker || _many_packets_some_lost (twcc, seqnum)) {
+ rtp_twcc_manager_create_feedback (twcc);
+ send_feedback = TRUE;
+
+ twcc->packet_count_no_marker = 0;
+ }
+
+ return send_feedback;
+}
+
+static void
+_change_rtcp_fb_sender_ssrc (GstBuffer * buf, guint32 sender_ssrc)
+{
+ GstRTCPBuffer rtcp = GST_RTCP_BUFFER_INIT;
+ GstRTCPPacket packet;
+ gst_rtcp_buffer_map (buf, GST_MAP_READWRITE, &rtcp);
+ gst_rtcp_buffer_get_first_packet (&rtcp, &packet);
+ gst_rtcp_packet_fb_set_sender_ssrc (&packet, sender_ssrc);
+ gst_rtcp_buffer_unmap (&rtcp);
+}
+
+GstBuffer *
+rtp_twcc_manager_get_feedback (RTPTWCCManager * twcc, guint sender_ssrc)
+{
+ GstBuffer *buf;
+ buf = g_queue_pop_head (twcc->rtcp_buffers);
+
+ if (buf && twcc->recv_sender_ssrc != sender_ssrc) {
+ _change_rtcp_fb_sender_ssrc (buf, sender_ssrc);
+ twcc->recv_sender_ssrc = sender_ssrc;
+ }
+
+ return buf;
+}
+
+static void
+sent_packet_init (SentPacket * packet, guint16 seqnum, RTPPacketInfo * pinfo)
+{
+ packet->seqnum = seqnum;
+ packet->ts = pinfo->current_time;
+ packet->size = pinfo->payload_len;
+ packet->pt = pinfo->pt;
+ packet->remote_ts = GST_CLOCK_TIME_NONE;
+ packet->socket_ts = GST_CLOCK_TIME_NONE;
+ packet->lost = FALSE;
+}
+
+void
+rtp_twcc_manager_send_packet (RTPTWCCManager * twcc, RTPPacketInfo * pinfo)
+{
+ SentPacket packet;
+ guint16 seqnum;
+
+ if (twcc->send_ext_id == 0)
+ return;
+
+ seqnum = rtp_twcc_manager_set_send_twcc_seqnum (twcc, pinfo);
+
+ sent_packet_init (&packet, seqnum, pinfo);
+ g_array_append_val (twcc->sent_packets, packet);
+
+
+ GST_LOG ("Send: twcc-seqnum: %u, pt: %u, marker: %d, ts: %"
+ GST_TIME_FORMAT, seqnum, pinfo->pt, pinfo->marker,
+ GST_TIME_ARGS (pinfo->current_time));
+}
+
+static void
+_add_twcc_packet (GArray * twcc_packets, guint16 seqnum, guint status)
+{
+ RTPTWCCPacket packet;
+ memset (&packet, 0, sizeof (RTPTWCCPacket));
+ packet.local_ts = GST_CLOCK_TIME_NONE;
+ packet.remote_ts = GST_CLOCK_TIME_NONE;
+ packet.local_delta = GST_CLOCK_STIME_NONE;
+ packet.remote_delta = GST_CLOCK_STIME_NONE;
+ packet.delta_delta = GST_CLOCK_STIME_NONE;
+ packet.seqnum = seqnum;
+ packet.status = status;
+ g_array_append_val (twcc_packets, packet);
+}
+
+static guint
+_parse_run_length_chunk (GstBitReader * reader, GArray * twcc_packets,
+ guint16 seqnum_offset, guint remaining_packets)
+{
+ guint16 run_length;
+ guint8 status_code;
+ guint i;
+
+ gst_bit_reader_get_bits_uint8 (reader, &status_code, 2);
+ gst_bit_reader_get_bits_uint16 (reader, &run_length, 13);
+
+ run_length = MIN (remaining_packets, run_length);
+
+ for (i = 0; i < run_length; i++) {
+ _add_twcc_packet (twcc_packets, seqnum_offset + i, status_code);
+ }
+
+ return run_length;
+}
+
+static guint
+_parse_status_vector_chunk (GstBitReader * reader, GArray * twcc_packets,
+ guint16 seqnum_offset, guint remaining_packets)
+{
+ guint8 symbol_size;
+ guint num_bits;
+ guint i;
+
+ gst_bit_reader_get_bits_uint8 (reader, &symbol_size, 1);
+ symbol_size += 1;
+ num_bits = MIN (remaining_packets, 14 / symbol_size);
+
+ for (i = 0; i < num_bits; i++) {
+ guint8 status_code;
+ if (gst_bit_reader_get_bits_uint8 (reader, &status_code, symbol_size))
+ _add_twcc_packet (twcc_packets, seqnum_offset + i, status_code);
+ }
+
+ return num_bits;
+}
+
+/* Remove all locally stored packets that has been reported
+ back to us */
+static void
+_prune_sent_packets (RTPTWCCManager * twcc, GArray * twcc_packets)
+{
+ SentPacket *first;
+ RTPTWCCPacket *last;
+ guint16 last_idx;
+
+ if (twcc_packets->len == 0 || twcc->sent_packets->len == 0)
+ return;
+
+ first = &g_array_index (twcc->sent_packets, SentPacket, 0);
+ last = &g_array_index (twcc_packets, RTPTWCCPacket, twcc_packets->len - 1);
+
+ last_idx = last->seqnum - first->seqnum;
+
+ if (last_idx < twcc->sent_packets->len)
+ g_array_remove_range (twcc->sent_packets, 0, last_idx);
+}
+
+static void
+_check_for_lost_packets (RTPTWCCManager * twcc, GArray * twcc_packets,
+ guint16 base_seqnum, guint16 packet_count, guint8 fb_pkt_count)
+{
+ guint packets_lost;
+ gint8 fb_pkt_count_diff;
+ guint i;
+
+ /* first packet */
+ if (twcc->first_fci_parse) {
+ twcc->first_fci_parse = FALSE;
+ goto done;
+ }
+
+ fb_pkt_count_diff =
+ (gint8) (fb_pkt_count - twcc->expected_parsed_fb_pkt_count);
+
+ /* we have gone backwards, don't reset the expectations,
+ but process the packet nonetheless */
+ if (fb_pkt_count_diff < 0) {
+ GST_DEBUG ("feedback packet count going backwards (%u < %u)",
+ fb_pkt_count, twcc->expected_parsed_fb_pkt_count);
+ return;
+ }
+
+ /* we have jumped forwards, reset expectations, but don't trigger
+ lost packets in case the missing fb-packet(s) arrive later */
+ if (fb_pkt_count_diff > 0) {
+ GST_DEBUG ("feedback packet count jumped ahead (%u > %u)",
+ fb_pkt_count, twcc->expected_parsed_fb_pkt_count);
+ goto done;
+ }
+
+ if (base_seqnum < twcc->expected_parsed_seqnum) {
+ GST_DEBUG ("twcc seqnum is older than expected (%u < %u)", base_seqnum,
+ twcc->expected_parsed_seqnum);
+ return;
+ }
+
+ packets_lost = base_seqnum - twcc->expected_parsed_seqnum;
+ for (i = 0; i < packets_lost; i++) {
+ _add_twcc_packet (twcc_packets, twcc->expected_parsed_seqnum + i,
+ RTP_TWCC_PACKET_STATUS_NOT_RECV);
+ }
+
+done:
+ twcc->expected_parsed_seqnum = base_seqnum + packet_count;
+ twcc->expected_parsed_fb_pkt_count = fb_pkt_count + 1;
+ return;
+}
+
+GArray *
+rtp_twcc_manager_parse_fci (RTPTWCCManager * twcc,
+ guint8 * fci_data, guint fci_length)
+{
+ GArray *twcc_packets;
+ guint16 base_seqnum;
+ guint16 packet_count;
+ GstClockTime base_time;
+ GstClockTime ts_rounded;
+ guint8 fb_pkt_count;
+ guint packets_parsed = 0;
+ guint fci_parsed;
+ guint i;
+ SentPacket *first_sent_pkt = NULL;
+
+ if (fci_length < 10) {
+ GST_WARNING ("Malformed TWCC RTCP feedback packet");
+ return NULL;
+ }
+
+ base_seqnum = GST_READ_UINT16_BE (&fci_data[0]);
+ packet_count = GST_READ_UINT16_BE (&fci_data[2]);
+ base_time = GST_READ_UINT24_BE (&fci_data[4]) * REF_TIME_UNIT;
+ fb_pkt_count = fci_data[7];
+
+ GST_DEBUG ("Parsed TWCC feedback: base_seqnum: #%u, packet_count: %u, "
+ "base_time %" GST_TIME_FORMAT " fb_pkt_count: %u",
+ base_seqnum, packet_count, GST_TIME_ARGS (base_time), fb_pkt_count);
+
+ twcc_packets = g_array_sized_new (FALSE, FALSE,
+ sizeof (RTPTWCCPacket), packet_count);
+
+ _check_for_lost_packets (twcc, twcc_packets,
+ base_seqnum, packet_count, fb_pkt_count);
+
+ fci_parsed = 8;
+ while (packets_parsed < packet_count && (fci_parsed + 1) < fci_length) {
+ GstBitReader reader = GST_BIT_READER_INIT (&fci_data[fci_parsed], 2);
+ guint8 chunk_type;
+ guint seqnum_offset = base_seqnum + packets_parsed;
+ guint remaining_packets = packet_count - packets_parsed;
+
+ gst_bit_reader_get_bits_uint8 (&reader, &chunk_type, 1);
+
+ if (chunk_type == RTP_TWCC_CHUNK_TYPE_RUN_LENGTH) {
+ packets_parsed += _parse_run_length_chunk (&reader,
+ twcc_packets, seqnum_offset, remaining_packets);
+ } else {
+ packets_parsed += _parse_status_vector_chunk (&reader,
+ twcc_packets, seqnum_offset, remaining_packets);
+ }
+ fci_parsed += 2;
+ }
+
+ if (twcc->sent_packets->len > 0)
+ first_sent_pkt = &g_array_index (twcc->sent_packets, SentPacket, 0);
+
+ ts_rounded = base_time;
+ for (i = 0; i < twcc_packets->len; i++) {
+ RTPTWCCPacket *pkt = &g_array_index (twcc_packets, RTPTWCCPacket, i);
+ gint16 delta = 0;
+ GstClockTimeDiff delta_ts;
+
+ if (pkt->status == RTP_TWCC_PACKET_STATUS_SMALL_DELTA) {
+ delta = fci_data[fci_parsed];
+ fci_parsed += 1;
+ } else if (pkt->status == RTP_TWCC_PACKET_STATUS_LARGE_NEGATIVE_DELTA) {
+ delta = GST_READ_UINT16_BE (&fci_data[fci_parsed]);
+ fci_parsed += 2;
+ }
+
+ if (fci_parsed > fci_length) {
+ GST_WARNING ("Malformed TWCC RTCP feedback packet");
+ g_array_set_size (twcc_packets, 0);
+ break;
+ }
+
+ if (pkt->status != RTP_TWCC_PACKET_STATUS_NOT_RECV) {
+ delta_ts = delta * DELTA_UNIT;
+ ts_rounded += delta_ts;
+ pkt->remote_ts = ts_rounded;
+
+ GST_LOG ("pkt: #%u, remote_ts: %" GST_TIME_FORMAT
+ " delta_ts: %" GST_STIME_FORMAT
+ " status: %u", pkt->seqnum,
+ GST_TIME_ARGS (pkt->remote_ts), GST_STIME_ARGS (delta_ts),
+ pkt->status);
+ }
+
+ if (first_sent_pkt) {
+ SentPacket *found = NULL;
+ guint16 sent_idx = pkt->seqnum - first_sent_pkt->seqnum;
+ if (sent_idx < twcc->sent_packets->len)
+ found = &g_array_index (twcc->sent_packets, SentPacket, sent_idx);
+ if (found && found->seqnum == pkt->seqnum) {
+ if (GST_CLOCK_TIME_IS_VALID (found->socket_ts)) {
+ pkt->local_ts = found->socket_ts;
+ } else {
+ pkt->local_ts = found->ts;
+ }
+ pkt->size = found->size;
+ pkt->pt = found->pt;
+
+ GST_LOG ("matching pkt: #%u with local_ts: %" GST_TIME_FORMAT
+ " size: %u", pkt->seqnum, GST_TIME_ARGS (pkt->local_ts), pkt->size);
+ }
+ }
+ }
+
+ _prune_sent_packets (twcc, twcc_packets);
+
+ return twcc_packets;
+}
diff --git a/gst/rtpmanager/rtptwcc.h b/gst/rtpmanager/rtptwcc.h
new file mode 100644
index 0000000000..a826e9a4c9
--- /dev/null
+++ b/gst/rtpmanager/rtptwcc.h
@@ -0,0 +1,78 @@
+/* GStreamer
+ * Copyright (C) 2019 Pexip (http://pexip.com/)
+ * @author: Havard Graff <havard@pexip.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __RTP_TWCC_H__
+#define __RTP_TWCC_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/rtp.h>
+#include "rtpstats.h"
+
+typedef struct _RTPTWCCPacket RTPTWCCPacket;
+typedef enum _RTPTWCCPacketStatus RTPTWCCPacketStatus;
+
+G_DECLARE_FINAL_TYPE (RTPTWCCManager, rtp_twcc_manager, RTP, TWCC_MANAGER, GObject)
+#define RTP_TYPE_TWCC_MANAGER (rtp_twcc_manager_get_type())
+#define RTP_TWCC_MANAGER_CAST(obj) ((RTPTWCCManager *)(obj))
+
+enum _RTPTWCCPacketStatus
+{
+ RTP_TWCC_PACKET_STATUS_NOT_RECV = 0,
+ RTP_TWCC_PACKET_STATUS_SMALL_DELTA = 1,
+ RTP_TWCC_PACKET_STATUS_LARGE_NEGATIVE_DELTA = 2,
+};
+
+struct _RTPTWCCPacket
+{
+ GstClockTime local_ts;
+ GstClockTime remote_ts;
+ GstClockTimeDiff local_delta;
+ GstClockTimeDiff remote_delta;
+ GstClockTimeDiff delta_delta;
+ RTPTWCCPacketStatus status;
+ guint16 seqnum;
+ guint size;
+ guint8 pt;
+};
+
+RTPTWCCManager * rtp_twcc_manager_new (guint mtu);
+
+void rtp_twcc_manager_parse_recv_ext_id (RTPTWCCManager * twcc,
+ const GstStructure * s);
+void rtp_twcc_manager_parse_send_ext_id (RTPTWCCManager * twcc,
+ const GstStructure * s);
+
+void rtp_twcc_manager_set_mtu (RTPTWCCManager * twcc, guint mtu);
+void rtp_twcc_manager_set_feedback_interval (RTPTWCCManager * twcc,
+ GstClockTime feedback_interval);
+GstClockTime rtp_twcc_manager_get_feedback_interval (RTPTWCCManager * twcc);
+
+gboolean rtp_twcc_manager_recv_packet (RTPTWCCManager * twcc,
+ RTPPacketInfo * pinfo);
+void rtp_twcc_manager_send_packet (RTPTWCCManager * twcc,
+ RTPPacketInfo * pinfo);
+
+GstBuffer * rtp_twcc_manager_get_feedback (RTPTWCCManager * twcc,
+ guint32 sender_ssrc);
+
+GArray * rtp_twcc_manager_parse_fci (RTPTWCCManager * twcc,
+ guint8 * fci_data, guint fci_length);
+
+#endif /* __RTP_TWCC_H__ */
diff --git a/gst/rtsp/COPYING.MIT b/gst/rtsp/COPYING.MIT
new file mode 100644
index 0000000000..6369b6da9e
--- /dev/null
+++ b/gst/rtsp/COPYING.MIT
@@ -0,0 +1,21 @@
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
diff --git a/gst/rtsp/README b/gst/rtsp/README
new file mode 100644
index 0000000000..845086341a
--- /dev/null
+++ b/gst/rtsp/README
@@ -0,0 +1,377 @@
+RTSP source
+-----------
+
+The RTSP source establishes a connection to an RTSP server and sets up
+the UDP sources and RTP session handlers.
+
+An RTSP session is created as follows:
+
+- Parse RTSP URL:
+
+ ex:
+ rtsp://thread:5454/south-rtsp.mp3
+
+- Open a connection to the server with the url. All further conversation with
+ the server should be done with this connection. Each request/reply has
+ a CSeq number added to the header.
+
+- Send a DESCRIBE request for the url. We currently support a response in
+ SDP.
+
+ ex:
+
+ >> DESCRIBE rtsp://thread:5454/south-rtsp.mp3 RTSP/1.0
+ >> Accept: application/sdp
+ >> CSeq: 0
+ >>
+ << RTSP/1.0 200 OK
+ << Content-Length: 84
+ << Content-Type: application/sdp
+ << CSeq: 0
+ << Date: Wed May 11 13:09:37 2005 GMT
+ <<
+ << v=0
+ << o=- 0 0 IN IP4 192.168.1.1
+ << s=No Title
+ << m=audio 0 RTP/AVP 14
+ << a=control:streamid=0
+
+- Parse the SDP message, for each stream (m=) we create an GstRTPStream. We need
+ to allocate two local UDP ports for receiving the RTP and RTCP data because we
+ need to send the port numbers to the server in the next request.
+
+ In RTSPSrc we first create an element that can handle the udp://0.0.0.0:0 uri. This
+ will create an udp source element with a random port number. We get the port
+ number by getting the "port" property of the element after setting the element to
+ PAUSED. This element is used for the RTP packets and has to be an even number. If
+ the random port number is not an even number we retry to allocate a new udp source.
+
+ We then create another UDP source element with the next (uneven) port number to
+ receive the RTCP packet on. After this step we have two udp ports we can use to
+ accept RTP packets.
+
+ +-----------------+
+ | +------------+ |
+ | | udpsrc0 | |
+ | | port=5000 | |
+ | +------------+ |
+ | +------------+ |
+ | | udpsrc1 | |
+ | | port=5001 | |
+ | +------------+ |
+ +-----------------+
+
+- Send a SETUP message to the server with the RTP ports. We get the setup URI from
+ the a= attribute in the SDP message. This can be an absolute URL or a relative
+ url.
+
+ ex:
+
+ >> SETUP rtsp://thread:5454/south-rtsp.mp3/streamid=0 RTSP/1.0
+ >> CSeq: 1
+ >> Transport: RTP/AVP/UDP;unicast;client_port=5000-5001,RTP/AVP/UDP;multicast,RTP/AVP/TCP
+ >>
+ << RTSP/1.0 200 OK
+ << Transport: RTP/AVP/UDP;unicast;client_port=5000-5001;server_port=6000-6001
+ << CSeq: 1
+ << Date: Wed May 11 13:21:43 2005 GMT
+ << Session: 5d5cb94413288ccd
+ <<
+
+ The client needs to send the local ports to the server along with the supported
+ transport types. The server selects the final transport which it returns in the
+ Transport header field. The server also includes its ports where RTP and RTCP
+ messages can be sent to.
+
+ In the above example UDP was chosen as a transport. At this point the RTSPSrc element
+ will further configure its elements to process this stream.
+
+ The RTSPSrc will create and connect an RTP session manager element and will
+ connect it to the src pads of the udp element. The data pad from the RTP session
+ manager is ghostpadded to RTPSrc.
+ The RTCP pad of the rtpdec is routed to a new udpsink that sends data to the RTCP
+ port of the server as returned in the Transport: header field.
+
+ +---------------------------------------------+
+ | +------------+ |
+ | | udpsrc0 | +--------+ |
+ | | port=5000 ----- rtpdec --------------------
+ | +------------+ | | |
+ | +------------+ | | +------------+ |
+ | | udpsrc1 ----- RTCP ---- udpsink | |
+ | | port=5001 | +--------+ | port=6001 | |
+ | +------------+ +------------+ |
+ +---------------------------------------------+
+
+ The output type of rtpdec is configured as the media type specified in the SDP
+ message.
+
+- All the elements are set to PAUSED/PLAYING and the PLAY RTSP message is
+ sent.
+
+ >> PLAY rtsp://thread:5454/south-rtsp.mp3 RTSP/1.0
+ >> CSeq: 2
+ >> Session: 5d5cb94413288ccd
+ >>
+ << RTSP/1.0 200 OK
+ << CSeq: 2
+ << Date: Wed May 11 13:21:43 2005 GMT
+ << Session: 5d5cb94413288ccd
+ <<
+
+- The udp source elements receive data from that point and the RTP/RTCP messages
+ are processed by the elements.
+
+- In the case of interleaved mode, the SETUP method yields:
+
+ >> SETUP rtsp://thread:5454/south-rtsp.mp3/streamid=0 RTSP/1.0
+ >> CSeq: 1
+ >> Transport: RTP/AVP/UDP;unicast;client_port=5000-5001,RTP/AVP/UDP;multicast,RTP/AVP/TCP
+ >>
+ << RTSP/1.0 200 OK
+ << Transport: RTP/AVP/TCP;interleaved=0-1
+ << CSeq: 1
+ << Date: Wed May 11 13:21:43 2005 GMT
+ << Session: 5d5cb94413288ccd
+ <<
+
+ This means that RTP/RTCP messages will be sent on channel 0/1 respectively and that
+ the data will be received on the same connection as the RTSP connection.
+
+ At this point, we remove the UDP source elements as we don't need them anymore. We
+ set up the rtpsess session manager element though as follows:
+
+ +---------------------------------------------+
+ | +------------+ |
+ | | _loop() | +--------+ |
+ | | ----- rtpses --------------------
+ | | | | | |
+ | | | | | +------------+ |
+ | | ----- RTCP ---- udpsink | |
+ | | | +--------+ | port=6001 | |
+ | +------------+ +------------+ |
+ +---------------------------------------------+
+
+ We start an interal task to start reading from the RTSP connection waiting
+ for data. The received data is then pushed to the rtpdec element.
+
+ When reading from the RTSP connection we receive data packets in the
+ following layout (see also RFC2326)
+
+ $<1 byte channel><2 bytes length, big endian><length bytes of data>
+
+
+RTSP server
+-----------
+
+An RTSP server listen on a port (default 554) for client connections. The client
+typically keeps this channel open during the RTSP session to instruct the server
+to pause/play/stop the stream.
+
+The server exposes a stream consisting of one or more media streams using an
+URL. The media streams are typically audio and video.
+
+ ex:
+ rtsp://thread:5454/alien-rtsp.mpeg
+
+ exposes an audio/video stream. The video is mpeg packetized in RTP and
+ the audio is mp3 in RTP.
+
+The streaming server typically uses a different channel to send the media
+data to clients, typically using RTP over UDP. It is also possible to stream
+the data to the client using the initial RTSP TCP session (the interleaved
+mode). This last mode is useful when the client is behind a firewall but
+does not take advantage of the RTP/UDP features.
+
+In both cases, media data is send to the clients in an unmultiplexed format
+packetized as RTP packets.
+
+The streaming server has to negotiate a connection protocol for each of the
+media streams with the client.
+
+Minimal server requirements:
+
+- The server should copy the CSeq header field in a client request to the
+ response so that the client can match the response to the request.
+
+- The server should keep a session for each client after the client issued
+ a SETUP command. The client should use the same session id for all future
+ request to this server.
+
+- the server must support an OPTIONS request send over the RTSP connection.
+
+ >> OPTIONS * RTSP/1.0
+ >> CSeq: 1
+ >>
+ << RTSP/1.0 200 OK
+ << CSeq: 1
+ << Date: Wed May 11 13:21:43 2005 GMT
+ << Session: 5d5cb94413288ccd
+ << Public: DESCRIBE, SETUP, TEARDOWN, PLAY
+ <<
+
+ The OPTIONS request should list all supported methods on the server.
+
+ - The server should support the DESCRIBE method.
+
+ >> DESCRIBE rtsp://thread:5454/south-rtsp.mp3 RTSP/1.0
+ >> Accept: application/sdp
+ >> CSeq: 2
+ >>
+ << RTSP/1.0 200 OK
+ << Content-Length: 84
+ << Content-Type: application/sdp
+ << CSeq: 2
+ << Date: Wed May 11 13:09:37 2005 GMT
+ <<
+ << v=0
+ << o=- 0 0 IN IP4 192.168.1.1
+ << s=No Title
+ << m=audio 0 RTP/AVP 14
+ << a=control:streamid=0
+
+ The client issues a DESCRIBE command for a specific URL that corresponds
+ to an available stream. The client will also send an Accept header to
+ list its supported formats.
+
+ The server answers this request with a reply in one of the client supported
+ formats (application/sdp is the most common). The server typically sends a
+ fixed reply to all clients for each configured stream.
+
+ - The server must support the SETUP command to configure the media streams
+ that were listed in the DESCRIBE command.
+
+ >> SETUP rtsp://thread:5454/south-rtsp.mp3/streamid=0 RTSP/1.0
+ >> CSeq: 3
+ >> Transport: RTP/AVP/UDP;unicast;client_port=5000-5001,RTP/AVP/UDP;multicast,RTP/AVP/TCP
+ >>
+ << RTSP/1.0 200 OK
+ << Transport: RTP/AVP/UDP;unicast;client_port=5000-5001;server_port=6000-6001
+ << CSeq: 3
+ << Date: Wed May 11 13:21:43 2005 GMT
+ << Session: 5d5cb94413288ccd
+
+ The client will send a SETUP command for each of the streams listed in the
+ DESCRIBE reply. For sdp will use a URL as listed in the a=control: property.
+
+ The client will list the supported transports in the Transport: header field.
+ Each transport is separated with a comma (,) and listed in order of preference.
+ The server has to select the first supported transport.
+
+ In the above example 3 transport are listed:
+
+ RTP/AVP/UDP;unicast;client_port=5000-5001
+
+ The client will accept RTP over UDP on the port pair 5000-5001. Port
+ 5000 will accept the RTP packets, 5001 the RTSP packets send by the
+ server.
+
+ RTP/AVP/UDP;multicast
+
+ The client can join a multicast group for the specific media stream.
+ The port numbers of the multicast group it will connect to have to
+ be specified by the server in the reply.
+
+ RTP/AVP/TCP
+
+ the client can accept RTP packets interleaved on the RTSP connection.
+
+ The server selects a supported transport an allocates an RTP port pair to
+ receive RTP and RTSP data from the client. This last step is optional when
+ the server does not accept RTP data.
+
+ The server should allocate a session for the client and should send the
+ sessionId to the client. The client should use this session id for all
+ future requests.
+
+ The server may refuse a client that does not use the same transport method
+ for all media streams.
+
+ The server stores all client port pairs in the server client session along
+ with the transport method.
+
+ ex:
+
+ For an on-demand stream the server could construct a (minimal) RTP GStreamer
+ pipeline for the client as follows (for an mp3 stream):
+
+ +---------+ +-----------+ +------------+ +-------------+
+ | filesrc | | rtpmp3enc | | rtpsession | | udpsink |
+ | | | | | | | host=XXX |
+ | | | | | | | port=5000 |
+ | src--sink src--rtpsink rtpsrc--sink |
+ +---------+ +-----------+ | | +-------------+
+ | | +-------------+
+ | | | udpsink |
+ | | | host=XXX |
+ | | | port=5001 |
+ | rtspsrc--sink |
+ +------------+ +-------------+
+
+ The server would set the above pipeline to PAUSE to make sure no data
+ is sent to the client yet.
+
+ optionally udpsrc elements can be configured to receive client RTP and
+ RTSP messages.
+
+ ex:
+
+ For a live stream the server could construct a (minimal) RTP GStreamer
+ pipeline for the clients as follows (for an mp3 stream):
+
+ +---------+ +--------+ +-----------+ +------------+ +--------------+
+ | source | | mp3enc | | rtpmp3enc | | rtpsession | | multiudpsink |
+ | | | | | | | | | host=... |
+ | | | | | | | | | port=... |
+ | src--sink src--sink src--rtpsink rtpsrc--sink |
+ +---------+ +--------+ +-----------+ | | +--------------+
+ | | +--------------+
+ | | | multiudpsink |
+ | | | host=... |
+ | | | port=... |
+ | rtspsrc--sink |
+ +------------+ +--------------+
+
+ Media data is streamed to clients by adding the client host and port numbers
+ to the multiudpsinks.
+
+ optionally udpsrc elements can be configured to receive client RTP and
+ RTSP messages.
+
+ - The server must support the PLAY command to start playback of the configured
+ media streams.
+
+ >> PLAY rtsp://thread:5454/south-rtsp.mp3 RTSP/1.0
+ >> CSeq: 2
+ >> Session: 5d5cb94413288ccd
+ >>
+ << RTSP/1.0 200 OK
+ << CSeq: 2
+ << Date: Wed May 11 13:21:43 2005 GMT
+ << Session: 5d5cb94413288ccd
+ <<
+
+ Using the Session: header field, the server finds the pipeline of the session
+ to PLAY and sets the pipeline to PLAYING at which point the client receives
+ the media stream data.
+
+ In case of a live stream, the server adds the port numbers to a multiudpsink
+ element.
+
+ - The server must support the TEARDOWN command to stop playback and free the
+ session of a client.
+
+ >> TEARDOWN rtsp://thread:5454/south-rtsp.mp3 RTSP/1.0
+ >> CSeq: 4
+ >> Session: 5d5cb94413288ccd
+ >>
+ << RTSP/1.0 200 OK
+ << CSeq: 4
+ << Date: Wed May 11 13:21:43 2005 GMT
+ <<
+
+ The server destroys the client pipeline in case of an on-demand stream or
+ removes the client ports from the multiudpsinks. This effectively stops
+ streaming to the client.
+
+
diff --git a/gst/rtsp/URLS b/gst/rtsp/URLS
new file mode 100644
index 0000000000..26b40085e1
--- /dev/null
+++ b/gst/rtsp/URLS
@@ -0,0 +1,38 @@
+Some test URLS:
+
+SVQ3 video:
+ rtsp://cumulus.creative.auckland.ac.nz/~shado/nelson_iv_512k.mov
+ rtsp://streamr.hitpops.jp/ngc/mov/m0609.mov
+
+ASF (audio/video):
+ rtsp://aod.mylisten.com/aod/8/03/069803_0903135.wma
+ rtsp://195.246.8.69/dur_pull_hi
+ rtsp://195.219.160.200/mcp?chid=25&pid=196&vid=2402201&br=800&tid=1&void=2844&muk=H1J4yFr0mJB
+ rtsp://a1174.v26630f.c26630.g.vm.akamaistream.net/7/1174/26630/361/od-video.msn.com/8/mbr/rs_perf_dilana_091206_finale9135.wmv
+
+MP4V-ES/mpeg4-generic(ACC):
+ rtsp://vod.nwec.jp/quicktime/505.mov
+ rtsp://203.140.68.241:554/hirakataeizou9.mp4
+ rtsp://kmdi.utoronto.ca:555/osconf/2004_may9.1.mp4
+
+X-QT(h264)/mpeg4-generic(ACC):
+ rtsp://a2047.v1413b.c1413.g.vq.akamaistream.net/5/2047/1413/1_h264_110/1a1a1ae656c632970267e04ebd3196c428970e7ce857b81c4aab1677e445aedc3fae1b4a7bafe013/8848125_1_110.mov
+
+MP4V-ES/MP4A-LATM
+ rtsp://68.251.168.13/thisislove.3gp
+
+H264/MPA
+ rtsp://130.192.86.166/ed.mov
+
+REAL:
+ rtsp://213.254.239.61/farm/*/encoder/tagesschau/live1high.rm
+ rtsp://64.192.137.105:554/real.amazon-de.eu2/phononet/B/0/0/0/H/W/Y/4/K/S/01.01.rm?cloakport=80,554,7070
+
+ rtsp://211.89.225.1/encoder/cnr7_p
+ rtsp://stream2.visual.cz/broadcast/ct/CT24-High.rm
+
+AAC, interleaved:
+ rtsp://ia300135.us.archive.org:554/0/items/uncovered_interviews/uncovered_interviews_3_256kb.mp4
+
+Various samples here:
+ http://www.jet-stream.nl/samples/mp4.html
diff --git a/gst/rtsp/gstrtpdec.c b/gst/rtsp/gstrtpdec.c
new file mode 100644
index 0000000000..c5550ce362
--- /dev/null
+++ b/gst/rtsp/gstrtpdec.c
@@ -0,0 +1,895 @@
+/* GStreamer
+ * Copyright (C) <2005,2006> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+/* Element-Checklist-Version: 5 */
+
+/**
+ * SECTION:element-rtpdec
+ * @title: rtpdec
+ *
+ * A simple RTP session manager used internally by rtspsrc.
+ */
+
+/* #define HAVE_RTCP */
+
+#include <gst/rtp/gstrtpbuffer.h>
+
+#ifdef HAVE_RTCP
+#include <gst/rtp/gstrtcpbuffer.h>
+#endif
+
+#include "gstrtspelements.h"
+#include "gstrtpdec.h"
+#include <stdio.h>
+
+GST_DEBUG_CATEGORY_STATIC (rtpdec_debug);
+#define GST_CAT_DEFAULT (rtpdec_debug)
+
+/* GstRTPDec signals and args */
+enum
+{
+ SIGNAL_REQUEST_PT_MAP,
+ SIGNAL_CLEAR_PT_MAP,
+
+ SIGNAL_ON_NEW_SSRC,
+ SIGNAL_ON_SSRC_COLLISION,
+ SIGNAL_ON_SSRC_VALIDATED,
+ SIGNAL_ON_BYE_SSRC,
+ SIGNAL_ON_BYE_TIMEOUT,
+ SIGNAL_ON_TIMEOUT,
+ LAST_SIGNAL
+};
+
+#define DEFAULT_LATENCY_MS 200
+
+enum
+{
+ PROP_0,
+ PROP_LATENCY
+};
+
+static GstStaticPadTemplate gst_rtp_dec_recv_rtp_sink_template =
+GST_STATIC_PAD_TEMPLATE ("recv_rtp_sink_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+static GstStaticPadTemplate gst_rtp_dec_recv_rtcp_sink_template =
+GST_STATIC_PAD_TEMPLATE ("recv_rtcp_sink_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtcp")
+ );
+
+static GstStaticPadTemplate gst_rtp_dec_recv_rtp_src_template =
+GST_STATIC_PAD_TEMPLATE ("recv_rtp_src_%u_%u_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("application/x-rtp")
+ );
+
+static GstStaticPadTemplate gst_rtp_dec_rtcp_src_template =
+GST_STATIC_PAD_TEMPLATE ("rtcp_src_%u",
+ GST_PAD_SRC,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("application/x-rtcp")
+ );
+
+static void gst_rtp_dec_finalize (GObject * object);
+static void gst_rtp_dec_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_rtp_dec_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+static GstClock *gst_rtp_dec_provide_clock (GstElement * element);
+static GstStateChangeReturn gst_rtp_dec_change_state (GstElement * element,
+ GstStateChange transition);
+static GstPad *gst_rtp_dec_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps);
+static void gst_rtp_dec_release_pad (GstElement * element, GstPad * pad);
+
+static GstFlowReturn gst_rtp_dec_chain_rtp (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+static GstFlowReturn gst_rtp_dec_chain_rtcp (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer);
+
+
+/* Manages the receiving end of the packets.
+ *
+ * There is one such structure for each RTP session (audio/video/...).
+ * We get the RTP/RTCP packets and stuff them into the session manager.
+ */
+struct _GstRTPDecSession
+{
+ /* session id */
+ gint id;
+ /* the parent bin */
+ GstRTPDec *dec;
+
+ gboolean active;
+ /* we only support one ssrc and one pt */
+ guint32 ssrc;
+ guint8 pt;
+ GstCaps *caps;
+
+ /* the pads of the session */
+ GstPad *recv_rtp_sink;
+ GstPad *recv_rtp_src;
+ GstPad *recv_rtcp_sink;
+ GstPad *rtcp_src;
+};
+
+/* find a session with the given id */
+static GstRTPDecSession *
+find_session_by_id (GstRTPDec * rtpdec, gint id)
+{
+ GSList *walk;
+
+ for (walk = rtpdec->sessions; walk; walk = g_slist_next (walk)) {
+ GstRTPDecSession *sess = (GstRTPDecSession *) walk->data;
+
+ if (sess->id == id)
+ return sess;
+ }
+ return NULL;
+}
+
+/* create a session with the given id */
+static GstRTPDecSession *
+create_session (GstRTPDec * rtpdec, gint id)
+{
+ GstRTPDecSession *sess;
+
+ sess = g_new0 (GstRTPDecSession, 1);
+ sess->id = id;
+ sess->dec = rtpdec;
+ rtpdec->sessions = g_slist_prepend (rtpdec->sessions, sess);
+
+ return sess;
+}
+
+static void
+free_session (GstRTPDecSession * session)
+{
+ g_free (session);
+}
+
+static guint gst_rtp_dec_signals[LAST_SIGNAL] = { 0 };
+
+#define gst_rtp_dec_parent_class parent_class
+G_DEFINE_TYPE (GstRTPDec, gst_rtp_dec, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtpdec, "rtpdec", GST_RANK_NONE,
+ GST_TYPE_RTP_DEC, rtsp_element_init (plugin));
+
+static void
+gst_rtp_dec_class_init (GstRTPDecClass * g_class)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstRTPDecClass *klass;
+
+ klass = (GstRTPDecClass *) g_class;
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (rtpdec_debug, "rtpdec", 0, "RTP decoder");
+
+ gobject_class->finalize = gst_rtp_dec_finalize;
+ gobject_class->set_property = gst_rtp_dec_set_property;
+ gobject_class->get_property = gst_rtp_dec_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_LATENCY,
+ g_param_spec_uint ("latency", "Buffer latency in ms",
+ "Amount of ms to buffer", 0, G_MAXUINT, DEFAULT_LATENCY_MS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTPDec::request-pt-map:
+ * @rtpdec: the object which received the signal
+ * @session: the session
+ * @pt: the pt
+ *
+ * Request the payload type as #GstCaps for @pt in @session.
+ */
+ gst_rtp_dec_signals[SIGNAL_REQUEST_PT_MAP] =
+ g_signal_new ("request-pt-map", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTPDecClass, request_pt_map), NULL,
+ NULL, NULL, GST_TYPE_CAPS, 2, G_TYPE_UINT, G_TYPE_UINT);
+
+ gst_rtp_dec_signals[SIGNAL_CLEAR_PT_MAP] =
+ g_signal_new ("clear-pt-map", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTPDecClass, clear_pt_map), NULL,
+ NULL, NULL, G_TYPE_NONE, 0, G_TYPE_NONE);
+
+ /**
+ * GstRTPDec::on-new-ssrc:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of a new SSRC that entered @session.
+ */
+ gst_rtp_dec_signals[SIGNAL_ON_NEW_SSRC] =
+ g_signal_new ("on-new-ssrc", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTPDecClass, on_new_ssrc), NULL,
+ NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ /**
+ * GstRTPDec::on-ssrc_collision:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify when we have an SSRC collision
+ */
+ gst_rtp_dec_signals[SIGNAL_ON_SSRC_COLLISION] =
+ g_signal_new ("on-ssrc-collision", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTPDecClass, on_ssrc_collision),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ /**
+ * GstRTPDec::on-ssrc_validated:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of a new SSRC that became validated.
+ */
+ gst_rtp_dec_signals[SIGNAL_ON_SSRC_VALIDATED] =
+ g_signal_new ("on-ssrc-validated", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTPDecClass, on_ssrc_validated),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+
+ /**
+ * GstRTPDec::on-bye-ssrc:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of an SSRC that became inactive because of a BYE packet.
+ */
+ gst_rtp_dec_signals[SIGNAL_ON_BYE_SSRC] =
+ g_signal_new ("on-bye-ssrc", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTPDecClass, on_bye_ssrc), NULL,
+ NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ /**
+ * GstRTPDec::on-bye-timeout:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of an SSRC that has timed out because of BYE
+ */
+ gst_rtp_dec_signals[SIGNAL_ON_BYE_TIMEOUT] =
+ g_signal_new ("on-bye-timeout", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTPDecClass, on_bye_timeout),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+ /**
+ * GstRTPDec::on-timeout:
+ * @rtpbin: the object which received the signal
+ * @session: the session
+ * @ssrc: the SSRC
+ *
+ * Notify of an SSRC that has timed out
+ */
+ gst_rtp_dec_signals[SIGNAL_ON_TIMEOUT] =
+ g_signal_new ("on-timeout", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstRTPDecClass, on_timeout), NULL,
+ NULL, NULL, G_TYPE_NONE, 2, G_TYPE_UINT, G_TYPE_UINT);
+
+ gstelement_class->provide_clock =
+ GST_DEBUG_FUNCPTR (gst_rtp_dec_provide_clock);
+ gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_rtp_dec_change_state);
+ gstelement_class->request_new_pad =
+ GST_DEBUG_FUNCPTR (gst_rtp_dec_request_new_pad);
+ gstelement_class->release_pad = GST_DEBUG_FUNCPTR (gst_rtp_dec_release_pad);
+
+ /* sink pads */
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_dec_recv_rtp_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_dec_recv_rtcp_sink_template);
+ /* src pads */
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_dec_recv_rtp_src_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_rtp_dec_rtcp_src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "RTP Decoder",
+ "Codec/Parser/Network",
+ "Accepts raw RTP and RTCP packets and sends them forward",
+ "Wim Taymans <wim.taymans@gmail.com>");
+}
+
+static void
+gst_rtp_dec_init (GstRTPDec * rtpdec)
+{
+ rtpdec->provided_clock = gst_system_clock_obtain ();
+ rtpdec->latency = DEFAULT_LATENCY_MS;
+
+ GST_OBJECT_FLAG_SET (rtpdec, GST_ELEMENT_FLAG_PROVIDE_CLOCK);
+}
+
+static void
+gst_rtp_dec_finalize (GObject * object)
+{
+ GstRTPDec *rtpdec;
+
+ rtpdec = GST_RTP_DEC (object);
+
+ gst_object_unref (rtpdec->provided_clock);
+ g_slist_foreach (rtpdec->sessions, (GFunc) free_session, NULL);
+ g_slist_free (rtpdec->sessions);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_rtp_dec_query_src (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ gboolean res;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_LATENCY:
+ {
+ /* we pretend to be live with a 3 second latency */
+ /* FIXME: Do we really have infinite maximum latency? */
+ gst_query_set_latency (query, TRUE, 3 * GST_SECOND, -1);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+ return res;
+}
+
+static GstFlowReturn
+gst_rtp_dec_chain_rtp (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+{
+ GstFlowReturn res;
+ GstRTPDec *rtpdec;
+ GstRTPDecSession *session;
+ guint32 ssrc;
+ guint8 pt;
+ GstRTPBuffer rtp = { NULL, };
+
+ rtpdec = GST_RTP_DEC (parent);
+
+ GST_DEBUG_OBJECT (rtpdec, "got rtp packet");
+
+ if (!gst_rtp_buffer_map (buffer, GST_MAP_READ, &rtp))
+ goto bad_packet;
+
+ ssrc = gst_rtp_buffer_get_ssrc (&rtp);
+ pt = gst_rtp_buffer_get_payload_type (&rtp);
+ gst_rtp_buffer_unmap (&rtp);
+
+ GST_DEBUG_OBJECT (rtpdec, "SSRC %08x, PT %d", ssrc, pt);
+
+ /* find session */
+ session = gst_pad_get_element_private (pad);
+
+ /* see if we have the pad */
+ if (!session->active) {
+ GstPadTemplate *templ;
+ GstElementClass *klass;
+ gchar *name;
+ GstCaps *caps;
+ GValue ret = { 0 };
+ GValue args[3] = { {0}
+ , {0}
+ , {0}
+ };
+
+ GST_DEBUG_OBJECT (rtpdec, "creating stream");
+
+ session->ssrc = ssrc;
+ session->pt = pt;
+
+ /* get pt map */
+ g_value_init (&args[0], GST_TYPE_ELEMENT);
+ g_value_set_object (&args[0], rtpdec);
+ g_value_init (&args[1], G_TYPE_UINT);
+ g_value_set_uint (&args[1], session->id);
+ g_value_init (&args[2], G_TYPE_UINT);
+ g_value_set_uint (&args[2], pt);
+
+ g_value_init (&ret, GST_TYPE_CAPS);
+ g_value_set_boxed (&ret, NULL);
+
+ g_signal_emitv (args, gst_rtp_dec_signals[SIGNAL_REQUEST_PT_MAP], 0, &ret);
+
+ caps = (GstCaps *) g_value_get_boxed (&ret);
+
+ name = g_strdup_printf ("recv_rtp_src_%u_%u_%u", session->id, ssrc, pt);
+ klass = GST_ELEMENT_GET_CLASS (rtpdec);
+ templ = gst_element_class_get_pad_template (klass, "recv_rtp_src_%u_%u_%u");
+ session->recv_rtp_src = gst_pad_new_from_template (templ, name);
+ g_free (name);
+
+ gst_pad_set_caps (session->recv_rtp_src, caps);
+
+ gst_pad_set_element_private (session->recv_rtp_src, session);
+ gst_pad_set_query_function (session->recv_rtp_src, gst_rtp_dec_query_src);
+ gst_pad_set_active (session->recv_rtp_src, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpdec), session->recv_rtp_src);
+
+ session->active = TRUE;
+ }
+
+ res = gst_pad_push (session->recv_rtp_src, buffer);
+
+ return res;
+
+bad_packet:
+ {
+ GST_ELEMENT_WARNING (rtpdec, STREAM, DECODE, (NULL),
+ ("RTP packet did not validate, dropping"));
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ }
+}
+
+static GstFlowReturn
+gst_rtp_dec_chain_rtcp (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+{
+ GstRTPDec *src;
+
+#ifdef HAVE_RTCP
+ gboolean valid;
+ GstRTCPPacket packet;
+ gboolean more;
+#endif
+
+ src = GST_RTP_DEC (parent);
+
+ GST_DEBUG_OBJECT (src, "got rtcp packet");
+
+#ifdef HAVE_RTCP
+ valid = gst_rtcp_buffer_validate (buffer);
+ if (!valid)
+ goto bad_packet;
+
+ /* position on first packet */
+ more = gst_rtcp_buffer_get_first_packet (buffer, &packet);
+ while (more) {
+ switch (gst_rtcp_packet_get_type (&packet)) {
+ case GST_RTCP_TYPE_SR:
+ {
+ guint32 ssrc, rtptime, packet_count, octet_count;
+ guint64 ntptime;
+ guint count, i;
+
+ gst_rtcp_packet_sr_get_sender_info (&packet, &ssrc, &ntptime, &rtptime,
+ &packet_count, &octet_count);
+
+ GST_DEBUG_OBJECT (src,
+ "got SR packet: SSRC %08x, NTP %" G_GUINT64_FORMAT
+ ", RTP %u, PC %u, OC %u", ssrc, ntptime, rtptime, packet_count,
+ octet_count);
+
+ count = gst_rtcp_packet_get_rb_count (&packet);
+ for (i = 0; i < count; i++) {
+ guint32 ssrc, exthighestseq, jitter, lsr, dlsr;
+ guint8 fractionlost;
+ gint32 packetslost;
+
+ gst_rtcp_packet_get_rb (&packet, i, &ssrc, &fractionlost,
+ &packetslost, &exthighestseq, &jitter, &lsr, &dlsr);
+
+ GST_DEBUG_OBJECT (src, "got RB packet %d: SSRC %08x, FL %u"
+ ", PL %u, HS %u, JITTER %u, LSR %u, DLSR %u", ssrc, fractionlost,
+ packetslost, exthighestseq, jitter, lsr, dlsr);
+ }
+ break;
+ }
+ case GST_RTCP_TYPE_RR:
+ {
+ guint32 ssrc;
+ guint count, i;
+
+ ssrc = gst_rtcp_packet_rr_get_ssrc (&packet);
+
+ GST_DEBUG_OBJECT (src, "got RR packet: SSRC %08x", ssrc);
+
+ count = gst_rtcp_packet_get_rb_count (&packet);
+ for (i = 0; i < count; i++) {
+ guint32 ssrc, exthighestseq, jitter, lsr, dlsr;
+ guint8 fractionlost;
+ gint32 packetslost;
+
+ gst_rtcp_packet_get_rb (&packet, i, &ssrc, &fractionlost,
+ &packetslost, &exthighestseq, &jitter, &lsr, &dlsr);
+
+ GST_DEBUG_OBJECT (src, "got RB packet %d: SSRC %08x, FL %u"
+ ", PL %u, HS %u, JITTER %u, LSR %u, DLSR %u", ssrc, fractionlost,
+ packetslost, exthighestseq, jitter, lsr, dlsr);
+ }
+ break;
+ }
+ case GST_RTCP_TYPE_SDES:
+ {
+ guint chunks, i, j;
+ gboolean more_chunks, more_items;
+
+ chunks = gst_rtcp_packet_sdes_get_chunk_count (&packet);
+ GST_DEBUG_OBJECT (src, "got SDES packet with %d chunks", chunks);
+
+ more_chunks = gst_rtcp_packet_sdes_first_chunk (&packet);
+ i = 0;
+ while (more_chunks) {
+ guint32 ssrc;
+
+ ssrc = gst_rtcp_packet_sdes_get_ssrc (&packet);
+
+ GST_DEBUG_OBJECT (src, "chunk %d, SSRC %08x", i, ssrc);
+
+ more_items = gst_rtcp_packet_sdes_first_item (&packet);
+ j = 0;
+ while (more_items) {
+ GstRTCPSDESType type;
+ guint8 len;
+ gchar *data;
+
+ gst_rtcp_packet_sdes_get_item (&packet, &type, &len, &data);
+
+ GST_DEBUG_OBJECT (src, "item %d, type %d, len %d, data %s", j,
+ type, len, data);
+
+ more_items = gst_rtcp_packet_sdes_next_item (&packet);
+ j++;
+ }
+ more_chunks = gst_rtcp_packet_sdes_next_chunk (&packet);
+ i++;
+ }
+ break;
+ }
+ case GST_RTCP_TYPE_BYE:
+ {
+ guint count, i;
+ gchar *reason;
+
+ reason = gst_rtcp_packet_bye_get_reason (&packet);
+ GST_DEBUG_OBJECT (src, "got BYE packet (reason: %s)",
+ GST_STR_NULL (reason));
+ g_free (reason);
+
+ count = gst_rtcp_packet_bye_get_ssrc_count (&packet);
+ for (i = 0; i < count; i++) {
+ guint32 ssrc;
+
+
+ ssrc = gst_rtcp_packet_bye_get_nth_ssrc (&packet, i);
+
+ GST_DEBUG_OBJECT (src, "SSRC: %08x", ssrc);
+ }
+ break;
+ }
+ case GST_RTCP_TYPE_APP:
+ GST_DEBUG_OBJECT (src, "got APP packet");
+ break;
+ default:
+ GST_WARNING_OBJECT (src, "got unknown RTCP packet");
+ break;
+ }
+ more = gst_rtcp_packet_move_to_next (&packet);
+ }
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+
+bad_packet:
+ {
+ GST_WARNING_OBJECT (src, "got invalid RTCP packet");
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+ }
+#else
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+#endif
+}
+
+static void
+gst_rtp_dec_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstRTPDec *src;
+
+ src = GST_RTP_DEC (object);
+
+ switch (prop_id) {
+ case PROP_LATENCY:
+ src->latency = g_value_get_uint (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtp_dec_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstRTPDec *src;
+
+ src = GST_RTP_DEC (object);
+
+ switch (prop_id) {
+ case PROP_LATENCY:
+ g_value_set_uint (value, src->latency);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstClock *
+gst_rtp_dec_provide_clock (GstElement * element)
+{
+ GstRTPDec *rtpdec;
+
+ rtpdec = GST_RTP_DEC (element);
+
+ return GST_CLOCK_CAST (gst_object_ref (rtpdec->provided_clock));
+}
+
+static GstStateChangeReturn
+gst_rtp_dec_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+
+ switch (transition) {
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ /* we're NO_PREROLL when going to PAUSED */
+ ret = GST_STATE_CHANGE_NO_PREROLL;
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+/* Create a pad for receiving RTP for the session in @name
+ */
+static GstPad *
+create_recv_rtp (GstRTPDec * rtpdec, GstPadTemplate * templ, const gchar * name)
+{
+ guint sessid;
+ GstRTPDecSession *session;
+
+ /* first get the session number */
+ if (name == NULL || sscanf (name, "recv_rtp_sink_%u", &sessid) != 1)
+ goto no_name;
+
+ GST_DEBUG_OBJECT (rtpdec, "finding session %d", sessid);
+
+ /* get or create session */
+ session = find_session_by_id (rtpdec, sessid);
+ if (!session) {
+ GST_DEBUG_OBJECT (rtpdec, "creating session %d", sessid);
+ /* create session now */
+ session = create_session (rtpdec, sessid);
+ if (session == NULL)
+ goto create_error;
+ }
+ /* check if pad was requested */
+ if (session->recv_rtp_sink != NULL)
+ goto existed;
+
+ GST_DEBUG_OBJECT (rtpdec, "getting RTP sink pad");
+
+ session->recv_rtp_sink = gst_pad_new_from_template (templ, name);
+ gst_pad_set_element_private (session->recv_rtp_sink, session);
+ gst_pad_set_chain_function (session->recv_rtp_sink, gst_rtp_dec_chain_rtp);
+ gst_pad_set_active (session->recv_rtp_sink, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpdec), session->recv_rtp_sink);
+
+ return session->recv_rtp_sink;
+
+ /* ERRORS */
+no_name:
+ {
+ g_warning ("rtpdec: invalid name given");
+ return NULL;
+ }
+create_error:
+ {
+ /* create_session already warned */
+ return NULL;
+ }
+existed:
+ {
+ g_warning ("rtpdec: recv_rtp pad already requested for session %d", sessid);
+ return NULL;
+ }
+}
+
+/* Create a pad for receiving RTCP for the session in @name
+ */
+static GstPad *
+create_recv_rtcp (GstRTPDec * rtpdec, GstPadTemplate * templ,
+ const gchar * name)
+{
+ guint sessid;
+ GstRTPDecSession *session;
+
+ /* first get the session number */
+ if (name == NULL || sscanf (name, "recv_rtcp_sink_%u", &sessid) != 1)
+ goto no_name;
+
+ GST_DEBUG_OBJECT (rtpdec, "finding session %d", sessid);
+
+ /* get the session, it must exist or we error */
+ session = find_session_by_id (rtpdec, sessid);
+ if (!session)
+ goto no_session;
+
+ /* check if pad was requested */
+ if (session->recv_rtcp_sink != NULL)
+ goto existed;
+
+ GST_DEBUG_OBJECT (rtpdec, "getting RTCP sink pad");
+
+ session->recv_rtcp_sink = gst_pad_new_from_template (templ, name);
+ gst_pad_set_element_private (session->recv_rtp_sink, session);
+ gst_pad_set_chain_function (session->recv_rtcp_sink, gst_rtp_dec_chain_rtcp);
+ gst_pad_set_active (session->recv_rtcp_sink, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpdec), session->recv_rtcp_sink);
+
+ return session->recv_rtcp_sink;
+
+ /* ERRORS */
+no_name:
+ {
+ g_warning ("rtpdec: invalid name given");
+ return NULL;
+ }
+no_session:
+ {
+ g_warning ("rtpdec: no session with id %d", sessid);
+ return NULL;
+ }
+existed:
+ {
+ g_warning ("rtpdec: recv_rtcp pad already requested for session %d",
+ sessid);
+ return NULL;
+ }
+}
+
+/* Create a pad for sending RTCP for the session in @name
+ */
+static GstPad *
+create_rtcp (GstRTPDec * rtpdec, GstPadTemplate * templ, const gchar * name)
+{
+ guint sessid;
+ GstRTPDecSession *session;
+
+ /* first get the session number */
+ if (name == NULL || sscanf (name, "rtcp_src_%u", &sessid) != 1)
+ goto no_name;
+
+ /* get or create session */
+ session = find_session_by_id (rtpdec, sessid);
+ if (!session)
+ goto no_session;
+
+ /* check if pad was requested */
+ if (session->rtcp_src != NULL)
+ goto existed;
+
+ session->rtcp_src = gst_pad_new_from_template (templ, name);
+ gst_pad_set_active (session->rtcp_src, TRUE);
+ gst_element_add_pad (GST_ELEMENT_CAST (rtpdec), session->rtcp_src);
+
+ return session->rtcp_src;
+
+ /* ERRORS */
+no_name:
+ {
+ g_warning ("rtpdec: invalid name given");
+ return NULL;
+ }
+no_session:
+ {
+ g_warning ("rtpdec: session with id %d does not exist", sessid);
+ return NULL;
+ }
+existed:
+ {
+ g_warning ("rtpdec: rtcp_src pad already requested for session %d", sessid);
+ return NULL;
+ }
+}
+
+/*
+ */
+static GstPad *
+gst_rtp_dec_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name, const GstCaps * caps)
+{
+ GstRTPDec *rtpdec;
+ GstElementClass *klass;
+ GstPad *result;
+
+ g_return_val_if_fail (templ != NULL, NULL);
+ g_return_val_if_fail (GST_IS_RTP_DEC (element), NULL);
+
+ rtpdec = GST_RTP_DEC (element);
+ klass = GST_ELEMENT_GET_CLASS (element);
+
+ /* figure out the template */
+ if (templ == gst_element_class_get_pad_template (klass, "recv_rtp_sink_%u")) {
+ result = create_recv_rtp (rtpdec, templ, name);
+ } else if (templ == gst_element_class_get_pad_template (klass,
+ "recv_rtcp_sink_%u")) {
+ result = create_recv_rtcp (rtpdec, templ, name);
+ } else if (templ == gst_element_class_get_pad_template (klass, "rtcp_src_%u")) {
+ result = create_rtcp (rtpdec, templ, name);
+ } else
+ goto wrong_template;
+
+ return result;
+
+ /* ERRORS */
+wrong_template:
+ {
+ g_warning ("rtpdec: this is not our template");
+ return NULL;
+ }
+}
+
+static void
+gst_rtp_dec_release_pad (GstElement * element, GstPad * pad)
+{
+}
diff --git a/gst/rtsp/gstrtpdec.h b/gst/rtsp/gstrtpdec.h
new file mode 100644
index 0000000000..5e83e2365a
--- /dev/null
+++ b/gst/rtsp/gstrtpdec.h
@@ -0,0 +1,88 @@
+/* GStreamer
+ * Copyright (C) <2005,2006> Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#ifndef __GST_RTP_DEC_H__
+#define __GST_RTP_DEC_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_RTP_DEC (gst_rtp_dec_get_type())
+#define GST_IS_RTP_DEC(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_DEC))
+#define GST_IS_RTP_DEC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_DEC))
+#define GST_RTP_DEC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_DEC, GstRTPDec))
+#define GST_RTP_DEC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_DEC, GstRTPDecClass))
+
+typedef struct _GstRTPDec GstRTPDec;
+typedef struct _GstRTPDecClass GstRTPDecClass;
+typedef struct _GstRTPDecSession GstRTPDecSession;
+
+struct _GstRTPDec {
+ GstElement element;
+
+ guint latency;
+ GSList *sessions;
+ GstClock *provided_clock;
+};
+
+struct _GstRTPDecClass {
+ GstElementClass parent_class;
+
+ /* get the caps for pt */
+ GstCaps* (*request_pt_map) (GstRTPDec *rtpdec, guint session, guint pt);
+
+ void (*clear_pt_map) (GstRTPDec *rtpdec);
+
+ void (*on_new_ssrc) (GstRTPDec *rtpdec, guint session, guint32 ssrc);
+ void (*on_ssrc_collision) (GstRTPDec *rtpdec, guint session, guint32 ssrc);
+ void (*on_ssrc_validated) (GstRTPDec *rtpdec, guint session, guint32 ssrc);
+ void (*on_bye_ssrc) (GstRTPDec *rtpdec, guint session, guint32 ssrc);
+ void (*on_bye_timeout) (GstRTPDec *rtpdec, guint session, guint32 ssrc);
+ void (*on_timeout) (GstRTPDec *rtpdec, guint session, guint32 ssrc);
+};
+
+GType gst_rtp_dec_get_type(void);
+
+G_END_DECLS
+
+#endif /* __GST_RTP_DEC_H__ */
diff --git a/gst/rtsp/gstrtsp.c b/gst/rtsp/gstrtsp.c
new file mode 100644
index 0000000000..89bb88ae9b
--- /dev/null
+++ b/gst/rtsp/gstrtsp.c
@@ -0,0 +1,65 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * <2006> Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstrtspelements.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (rtspsrc, plugin);
+ ret |= GST_ELEMENT_REGISTER (rtpdec, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ rtsp,
+ "transfer data via RTSP",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/rtsp/gstrtspelement.c b/gst/rtsp/gstrtspelement.c
new file mode 100644
index 0000000000..f296a4b0d4
--- /dev/null
+++ b/gst/rtsp/gstrtspelement.c
@@ -0,0 +1,65 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * <2006> Wim Taymans <wim@fluendo.com>
+ * Copyright (C) 2020 Huawei Technologies Co., Ltd.
+ * @Author: Stéphane Cerveau <stephane.cerveau@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gst/gst-i18n-plugin.h"
+
+#include "gstrtspelements.h"
+
+void
+rtsp_element_init (GstPlugin * plugin)
+{
+ static gsize res = FALSE;
+ if (g_once_init_enter (&res)) {
+#ifdef ENABLE_NLS
+ bindtextdomain (GETTEXT_PACKAGE, LOCALEDIR);
+ bind_textdomain_codeset (GETTEXT_PACKAGE, "UTF-8");
+#endif /* ENABLE_NLS */
+ g_once_init_leave (&res, TRUE);
+ }
+}
diff --git a/gst/rtsp/gstrtspelements.h b/gst/rtsp/gstrtspelements.h
new file mode 100644
index 0000000000..cfab82aab6
--- /dev/null
+++ b/gst/rtsp/gstrtspelements.h
@@ -0,0 +1,62 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * <2006> Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#ifndef __GST_RTSP_ELEMENTS_H__
+#define __GST_RTSP_ELEMENTS_H__
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+void rtsp_element_init (GstPlugin * plugin);
+
+GST_ELEMENT_REGISTER_DECLARE (rtspsrc);
+GST_ELEMENT_REGISTER_DECLARE (rtpdec);
+
+G_END_DECLS
+
+#endif /* __GST_RTSP_ELEMENTS_H__ */
diff --git a/gst/rtsp/gstrtspext.c b/gst/rtsp/gstrtspext.c
new file mode 100644
index 0000000000..07b5a97d71
--- /dev/null
+++ b/gst/rtsp/gstrtspext.c
@@ -0,0 +1,268 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#include "gstrtspext.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtspext_debug);
+#define GST_CAT_DEFAULT (rtspext_debug)
+
+static GList *extensions;
+
+static gboolean
+gst_rtsp_ext_list_filter (GstPluginFeature * feature, gpointer user_data)
+{
+ GstElementFactory *factory;
+ guint rank;
+
+ /* we only care about element factories */
+ if (!GST_IS_ELEMENT_FACTORY (feature))
+ return FALSE;
+
+ factory = GST_ELEMENT_FACTORY (feature);
+
+ if (!gst_element_factory_has_interface (factory, "GstRTSPExtension"))
+ return FALSE;
+
+ /* only select elements with autoplugging rank */
+ rank = gst_plugin_feature_get_rank (feature);
+ if (rank < GST_RANK_MARGINAL)
+ return FALSE;
+
+ return TRUE;
+}
+
+void
+gst_rtsp_ext_list_init (void)
+{
+ GST_DEBUG_CATEGORY_INIT (rtspext_debug, "rtspext", 0, "RTSP extension");
+
+ /* get a list of all extensions */
+ extensions = gst_registry_feature_filter (gst_registry_get (),
+ (GstPluginFeatureFilter) gst_rtsp_ext_list_filter, FALSE, NULL);
+}
+
+GstRTSPExtensionList *
+gst_rtsp_ext_list_get (void)
+{
+ GstRTSPExtensionList *result;
+ GList *walk;
+
+ result = g_new0 (GstRTSPExtensionList, 1);
+
+ for (walk = extensions; walk; walk = g_list_next (walk)) {
+ GstElementFactory *factory = GST_ELEMENT_FACTORY (walk->data);
+ GstElement *element;
+
+ element = gst_element_factory_create (factory, NULL);
+ if (!element) {
+ GST_ERROR ("could not create extension instance");
+ continue;
+ }
+
+ GST_DEBUG ("added extension interface for '%s'",
+ GST_ELEMENT_NAME (element));
+ result->extensions = g_list_prepend (result->extensions, element);
+ }
+ return result;
+}
+
+void
+gst_rtsp_ext_list_free (GstRTSPExtensionList * ext)
+{
+ GList *walk;
+
+ for (walk = ext->extensions; walk; walk = g_list_next (walk)) {
+ GstRTSPExtension *elem = (GstRTSPExtension *) walk->data;
+
+ gst_object_unref (GST_OBJECT_CAST (elem));
+ }
+ g_list_free (ext->extensions);
+ g_free (ext);
+}
+
+gboolean
+gst_rtsp_ext_list_detect_server (GstRTSPExtensionList * ext,
+ GstRTSPMessage * resp)
+{
+ GList *walk;
+ gboolean res = TRUE;
+
+ for (walk = ext->extensions; walk; walk = g_list_next (walk)) {
+ GstRTSPExtension *elem = (GstRTSPExtension *) walk->data;
+
+ res = gst_rtsp_extension_detect_server (elem, resp);
+ }
+ return res;
+}
+
+GstRTSPResult
+gst_rtsp_ext_list_before_send (GstRTSPExtensionList * ext, GstRTSPMessage * req)
+{
+ GList *walk;
+ GstRTSPResult res = GST_RTSP_OK;
+
+ for (walk = ext->extensions; walk; walk = g_list_next (walk)) {
+ GstRTSPExtension *elem = (GstRTSPExtension *) walk->data;
+
+ res = gst_rtsp_extension_before_send (elem, req);
+ }
+ return res;
+}
+
+GstRTSPResult
+gst_rtsp_ext_list_after_send (GstRTSPExtensionList * ext, GstRTSPMessage * req,
+ GstRTSPMessage * resp)
+{
+ GList *walk;
+ GstRTSPResult res = GST_RTSP_OK;
+
+ for (walk = ext->extensions; walk; walk = g_list_next (walk)) {
+ GstRTSPExtension *elem = (GstRTSPExtension *) walk->data;
+
+ res = gst_rtsp_extension_after_send (elem, req, resp);
+ }
+ return res;
+}
+
+GstRTSPResult
+gst_rtsp_ext_list_parse_sdp (GstRTSPExtensionList * ext, GstSDPMessage * sdp,
+ GstStructure * s)
+{
+ GList *walk;
+ GstRTSPResult res = GST_RTSP_OK;
+
+ for (walk = ext->extensions; walk; walk = g_list_next (walk)) {
+ GstRTSPExtension *elem = (GstRTSPExtension *) walk->data;
+
+ res = gst_rtsp_extension_parse_sdp (elem, sdp, s);
+ }
+ return res;
+}
+
+GstRTSPResult
+gst_rtsp_ext_list_setup_media (GstRTSPExtensionList * ext, GstSDPMedia * media)
+{
+ GList *walk;
+ GstRTSPResult res = GST_RTSP_OK;
+
+ for (walk = ext->extensions; walk; walk = g_list_next (walk)) {
+ GstRTSPExtension *elem = (GstRTSPExtension *) walk->data;
+
+ res = gst_rtsp_extension_setup_media (elem, media);
+ }
+ return res;
+}
+
+gboolean
+gst_rtsp_ext_list_configure_stream (GstRTSPExtensionList * ext, GstCaps * caps)
+{
+ GList *walk;
+ gboolean res = TRUE;
+
+ for (walk = ext->extensions; walk; walk = g_list_next (walk)) {
+ GstRTSPExtension *elem = (GstRTSPExtension *) walk->data;
+
+ res = gst_rtsp_extension_configure_stream (elem, caps);
+ if (!res)
+ break;
+ }
+ return res;
+}
+
+GstRTSPResult
+gst_rtsp_ext_list_get_transports (GstRTSPExtensionList * ext,
+ GstRTSPLowerTrans protocols, gchar ** transport)
+{
+ GList *walk;
+ GstRTSPResult res = GST_RTSP_OK;
+
+ for (walk = ext->extensions; walk; walk = g_list_next (walk)) {
+ GstRTSPExtension *elem = (GstRTSPExtension *) walk->data;
+
+ res = gst_rtsp_extension_get_transports (elem, protocols, transport);
+ }
+ return res;
+}
+
+GstRTSPResult
+gst_rtsp_ext_list_stream_select (GstRTSPExtensionList * ext, GstRTSPUrl * url)
+{
+ GList *walk;
+ GstRTSPResult res = GST_RTSP_OK;
+
+ for (walk = ext->extensions; walk; walk = g_list_next (walk)) {
+ GstRTSPExtension *elem = (GstRTSPExtension *) walk->data;
+
+ res = gst_rtsp_extension_stream_select (elem, url);
+ }
+ return res;
+}
+
+void
+gst_rtsp_ext_list_connect (GstRTSPExtensionList * ext,
+ const gchar * detailed_signal, GCallback c_handler, gpointer data)
+{
+ GList *walk;
+
+ for (walk = ext->extensions; walk; walk = g_list_next (walk)) {
+ GstRTSPExtension *elem = (GstRTSPExtension *) walk->data;
+
+ g_signal_connect (elem, detailed_signal, c_handler, data);
+ }
+}
+
+GstRTSPResult
+gst_rtsp_ext_list_receive_request (GstRTSPExtensionList * ext,
+ GstRTSPMessage * req)
+{
+ GList *walk;
+ GstRTSPResult res = GST_RTSP_ENOTIMPL;
+
+ for (walk = ext->extensions; walk; walk = g_list_next (walk)) {
+ GstRTSPExtension *elem = (GstRTSPExtension *) walk->data;
+
+ res = gst_rtsp_extension_receive_request (elem, req);
+ if (res != GST_RTSP_ENOTIMPL)
+ break;
+ }
+ return res;
+}
diff --git a/gst/rtsp/gstrtspext.h b/gst/rtsp/gstrtspext.h
new file mode 100644
index 0000000000..2e877968eb
--- /dev/null
+++ b/gst/rtsp/gstrtspext.h
@@ -0,0 +1,83 @@
+/* GStreamer
+ * Copyright (C) <2006> Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#ifndef __GST_RTSP_EXT_H__
+#define __GST_RTSP_EXT_H__
+
+#include <gst/gst.h>
+#include <gst/rtsp/gstrtspextension.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstRTSPExtensionList GstRTSPExtensionList;
+
+struct _GstRTSPExtensionList
+{
+ GList *extensions;
+};
+
+void gst_rtsp_ext_list_init (void);
+
+GstRTSPExtensionList * gst_rtsp_ext_list_get (void);
+void gst_rtsp_ext_list_free (GstRTSPExtensionList *ext);
+
+gboolean gst_rtsp_ext_list_detect_server (GstRTSPExtensionList *ext, GstRTSPMessage *resp);
+
+GstRTSPResult gst_rtsp_ext_list_before_send (GstRTSPExtensionList *ext, GstRTSPMessage *req);
+GstRTSPResult gst_rtsp_ext_list_after_send (GstRTSPExtensionList *ext, GstRTSPMessage *req,
+ GstRTSPMessage *resp);
+GstRTSPResult gst_rtsp_ext_list_parse_sdp (GstRTSPExtensionList *ext, GstSDPMessage *sdp,
+ GstStructure *s);
+GstRTSPResult gst_rtsp_ext_list_setup_media (GstRTSPExtensionList *ext, GstSDPMedia *media);
+gboolean gst_rtsp_ext_list_configure_stream (GstRTSPExtensionList *ext, GstCaps *caps);
+GstRTSPResult gst_rtsp_ext_list_get_transports (GstRTSPExtensionList *ext, GstRTSPLowerTrans protocols,
+ gchar **transport);
+GstRTSPResult gst_rtsp_ext_list_stream_select (GstRTSPExtensionList *ext, GstRTSPUrl *url);
+
+void gst_rtsp_ext_list_connect (GstRTSPExtensionList *ext,
+ const gchar *detailed_signal, GCallback c_handler,
+ gpointer data);
+GstRTSPResult gst_rtsp_ext_list_receive_request (GstRTSPExtensionList *ext, GstRTSPMessage *req);
+
+G_END_DECLS
+
+#endif /* __GST_RTSP_EXT_H__ */
diff --git a/gst/rtsp/gstrtspsrc.c b/gst/rtsp/gstrtspsrc.c
new file mode 100644
index 0000000000..063b761743
--- /dev/null
+++ b/gst/rtsp/gstrtspsrc.c
@@ -0,0 +1,10030 @@
+/* GStreamer
+ * Copyright (C) <2005,2006> Wim Taymans <wim at fluendo dot com>
+ * <2006> Lutz Mueller <lutz at topfrose dot de>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+/**
+ * SECTION:element-rtspsrc
+ * @title: rtspsrc
+ *
+ * Makes a connection to an RTSP server and read the data.
+ * rtspsrc strictly follows RFC 2326 and therefore does not (yet) support
+ * RealMedia/Quicktime/Microsoft extensions.
+ *
+ * RTSP supports transport over TCP or UDP in unicast or multicast mode. By
+ * default rtspsrc will negotiate a connection in the following order:
+ * UDP unicast/UDP multicast/TCP. The order cannot be changed but the allowed
+ * protocols can be controlled with the #GstRTSPSrc:protocols property.
+ *
+ * rtspsrc currently understands SDP as the format of the session description.
+ * For each stream listed in the SDP a new rtp_stream\%d pad will be created
+ * with caps derived from the SDP media description. This is a caps of mime type
+ * "application/x-rtp" that can be connected to any available RTP depayloader
+ * element.
+ *
+ * rtspsrc will internally instantiate an RTP session manager element
+ * that will handle the RTCP messages to and from the server, jitter removal,
+ * packet reordering along with providing a clock for the pipeline.
+ * This feature is implemented using the gstrtpbin element.
+ *
+ * rtspsrc acts like a live source and will therefore only generate data in the
+ * PLAYING state.
+ *
+ * If a RTP session times out then the rtspsrc will generate an element message
+ * named "GstRTSPSrcTimeout". Currently this is only supported for timeouts
+ * triggered by RTCP.
+ *
+ * The message's structure contains three fields:
+ *
+ * GstRTSPSrcTimeoutCause `cause`: the cause of the timeout.
+ *
+ * #gint `stream-number`: an internal identifier of the stream that timed out.
+ *
+ * #guint `ssrc`: the SSRC of the stream that timed out.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 rtspsrc location=rtsp://some.server/url ! fakesink
+ * ]| Establish a connection to an RTSP server and send the raw RTP packets to a
+ * fakesink.
+ *
+ * NOTE: rtspsrc will send a PAUSE command to the server if you set the
+ * element to the PAUSED state, and will send a PLAY command if you set it to
+ * the PLAYING state.
+ *
+ * Unfortunately, going to the NULL state involves going through PAUSED, so
+ * rtspsrc does not know the difference and will send a PAUSE when you wanted
+ * a TEARDOWN. The workaround is to hook into the `before-send` signal and
+ * return FALSE in this case.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#ifdef HAVE_UNISTD_H
+#include <unistd.h>
+#endif /* HAVE_UNISTD_H */
+#include <stdlib.h>
+#include <string.h>
+#include <stdio.h>
+#include <stdarg.h>
+
+#include <gst/net/gstnet.h>
+#include <gst/sdp/gstsdpmessage.h>
+#include <gst/sdp/gstmikey.h>
+#include <gst/rtp/rtp.h>
+
+#include "gst/gst-i18n-plugin.h"
+
+#include "gstrtspelements.h"
+#include "gstrtspsrc.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtspsrc_debug);
+#define GST_CAT_DEFAULT (rtspsrc_debug)
+
+static GstStaticPadTemplate rtptemplate = GST_STATIC_PAD_TEMPLATE ("stream_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("application/x-rtp; application/x-rdt"));
+
+/* templates used internally */
+static GstStaticPadTemplate anysrctemplate =
+GST_STATIC_PAD_TEMPLATE ("internalsrc_%u",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+static GstStaticPadTemplate anysinktemplate =
+GST_STATIC_PAD_TEMPLATE ("internalsink_%u",
+ GST_PAD_SINK,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS_ANY);
+
+enum
+{
+ SIGNAL_HANDLE_REQUEST,
+ SIGNAL_ON_SDP,
+ SIGNAL_SELECT_STREAM,
+ SIGNAL_NEW_MANAGER,
+ SIGNAL_REQUEST_RTCP_KEY,
+ SIGNAL_ACCEPT_CERTIFICATE,
+ SIGNAL_BEFORE_SEND,
+ SIGNAL_PUSH_BACKCHANNEL_BUFFER,
+ SIGNAL_GET_PARAMETER,
+ SIGNAL_GET_PARAMETERS,
+ SIGNAL_SET_PARAMETER,
+ LAST_SIGNAL
+};
+
+enum _GstRtspSrcRtcpSyncMode
+{
+ RTCP_SYNC_ALWAYS,
+ RTCP_SYNC_INITIAL,
+ RTCP_SYNC_RTP
+};
+
+enum _GstRtspSrcBufferMode
+{
+ BUFFER_MODE_NONE,
+ BUFFER_MODE_SLAVE,
+ BUFFER_MODE_BUFFER,
+ BUFFER_MODE_AUTO,
+ BUFFER_MODE_SYNCED
+};
+
+#define GST_TYPE_RTSP_SRC_BUFFER_MODE (gst_rtsp_src_buffer_mode_get_type())
+static GType
+gst_rtsp_src_buffer_mode_get_type (void)
+{
+ static GType buffer_mode_type = 0;
+ static const GEnumValue buffer_modes[] = {
+ {BUFFER_MODE_NONE, "Only use RTP timestamps", "none"},
+ {BUFFER_MODE_SLAVE, "Slave receiver to sender clock", "slave"},
+ {BUFFER_MODE_BUFFER, "Do low/high watermark buffering", "buffer"},
+ {BUFFER_MODE_AUTO, "Choose mode depending on stream live", "auto"},
+ {BUFFER_MODE_SYNCED, "Synchronized sender and receiver clocks", "synced"},
+ {0, NULL, NULL},
+ };
+
+ if (!buffer_mode_type) {
+ buffer_mode_type =
+ g_enum_register_static ("GstRTSPSrcBufferMode", buffer_modes);
+ }
+ return buffer_mode_type;
+}
+
+enum _GstRtspSrcNtpTimeSource
+{
+ NTP_TIME_SOURCE_NTP,
+ NTP_TIME_SOURCE_UNIX,
+ NTP_TIME_SOURCE_RUNNING_TIME,
+ NTP_TIME_SOURCE_CLOCK_TIME
+};
+
+#define DEBUG_RTSP(__self,msg) gst_rtspsrc_print_rtsp_message (__self, msg)
+#define DEBUG_SDP(__self,msg) gst_rtspsrc_print_sdp_message (__self, msg)
+
+#define GST_TYPE_RTSP_SRC_NTP_TIME_SOURCE (gst_rtsp_src_ntp_time_source_get_type())
+static GType
+gst_rtsp_src_ntp_time_source_get_type (void)
+{
+ static GType ntp_time_source_type = 0;
+ static const GEnumValue ntp_time_source_values[] = {
+ {NTP_TIME_SOURCE_NTP, "NTP time based on realtime clock", "ntp"},
+ {NTP_TIME_SOURCE_UNIX, "UNIX time based on realtime clock", "unix"},
+ {NTP_TIME_SOURCE_RUNNING_TIME,
+ "Running time based on pipeline clock",
+ "running-time"},
+ {NTP_TIME_SOURCE_CLOCK_TIME, "Pipeline clock time", "clock-time"},
+ {0, NULL, NULL},
+ };
+
+ if (!ntp_time_source_type) {
+ ntp_time_source_type =
+ g_enum_register_static ("GstRTSPSrcNtpTimeSource",
+ ntp_time_source_values);
+ }
+ return ntp_time_source_type;
+}
+
+enum _GstRtspBackchannel
+{
+ BACKCHANNEL_NONE,
+ BACKCHANNEL_ONVIF
+};
+
+#define GST_TYPE_RTSP_BACKCHANNEL (gst_rtsp_backchannel_get_type())
+static GType
+gst_rtsp_backchannel_get_type (void)
+{
+ static GType backchannel_type = 0;
+ static const GEnumValue backchannel_values[] = {
+ {BACKCHANNEL_NONE, "No backchannel", "none"},
+ {BACKCHANNEL_ONVIF, "ONVIF audio backchannel", "onvif"},
+ {0, NULL, NULL},
+ };
+
+ if (G_UNLIKELY (backchannel_type == 0)) {
+ backchannel_type =
+ g_enum_register_static ("GstRTSPBackchannel", backchannel_values);
+ }
+ return backchannel_type;
+}
+
+#define BACKCHANNEL_ONVIF_HDR_REQUIRE_VAL "www.onvif.org/ver20/backchannel"
+
+#define DEFAULT_LOCATION NULL
+#define DEFAULT_PROTOCOLS GST_RTSP_LOWER_TRANS_UDP | GST_RTSP_LOWER_TRANS_UDP_MCAST | GST_RTSP_LOWER_TRANS_TCP
+#define DEFAULT_DEBUG FALSE
+#define DEFAULT_RETRY 20
+#define DEFAULT_TIMEOUT 5000000
+#define DEFAULT_UDP_BUFFER_SIZE 0x80000
+#define DEFAULT_TCP_TIMEOUT 20000000
+#define DEFAULT_LATENCY_MS 2000
+#define DEFAULT_DROP_ON_LATENCY FALSE
+#define DEFAULT_CONNECTION_SPEED 0
+#define DEFAULT_NAT_METHOD GST_RTSP_NAT_DUMMY
+#define DEFAULT_DO_RTCP TRUE
+#define DEFAULT_DO_RTSP_KEEP_ALIVE TRUE
+#define DEFAULT_PROXY NULL
+#define DEFAULT_RTP_BLOCKSIZE 0
+#define DEFAULT_USER_ID NULL
+#define DEFAULT_USER_PW NULL
+#define DEFAULT_BUFFER_MODE BUFFER_MODE_AUTO
+#define DEFAULT_PORT_RANGE NULL
+#define DEFAULT_SHORT_HEADER FALSE
+#define DEFAULT_PROBATION 2
+#define DEFAULT_UDP_RECONNECT TRUE
+#define DEFAULT_MULTICAST_IFACE NULL
+#define DEFAULT_NTP_SYNC FALSE
+#define DEFAULT_USE_PIPELINE_CLOCK FALSE
+#define DEFAULT_TLS_VALIDATION_FLAGS G_TLS_CERTIFICATE_VALIDATE_ALL
+#define DEFAULT_TLS_DATABASE NULL
+#define DEFAULT_TLS_INTERACTION NULL
+#define DEFAULT_DO_RETRANSMISSION TRUE
+#define DEFAULT_NTP_TIME_SOURCE NTP_TIME_SOURCE_NTP
+#define DEFAULT_USER_AGENT "GStreamer/" PACKAGE_VERSION
+#define DEFAULT_MAX_RTCP_RTP_TIME_DIFF 1000
+#define DEFAULT_RFC7273_SYNC FALSE
+#define DEFAULT_MAX_TS_OFFSET_ADJUSTMENT G_GUINT64_CONSTANT(0)
+#define DEFAULT_MAX_TS_OFFSET G_GINT64_CONSTANT(3000000000)
+#define DEFAULT_VERSION GST_RTSP_VERSION_1_0
+#define DEFAULT_BACKCHANNEL GST_RTSP_BACKCHANNEL_NONE
+#define DEFAULT_TEARDOWN_TIMEOUT (100 * GST_MSECOND)
+#define DEFAULT_ONVIF_MODE FALSE
+#define DEFAULT_ONVIF_RATE_CONTROL TRUE
+#define DEFAULT_IS_LIVE TRUE
+#define DEFAULT_IGNORE_X_SERVER_REPLY FALSE
+
+enum
+{
+ PROP_0,
+ PROP_LOCATION,
+ PROP_PROTOCOLS,
+ PROP_DEBUG,
+ PROP_RETRY,
+ PROP_TIMEOUT,
+ PROP_TCP_TIMEOUT,
+ PROP_LATENCY,
+ PROP_DROP_ON_LATENCY,
+ PROP_CONNECTION_SPEED,
+ PROP_NAT_METHOD,
+ PROP_DO_RTCP,
+ PROP_DO_RTSP_KEEP_ALIVE,
+ PROP_PROXY,
+ PROP_PROXY_ID,
+ PROP_PROXY_PW,
+ PROP_RTP_BLOCKSIZE,
+ PROP_USER_ID,
+ PROP_USER_PW,
+ PROP_BUFFER_MODE,
+ PROP_PORT_RANGE,
+ PROP_UDP_BUFFER_SIZE,
+ PROP_SHORT_HEADER,
+ PROP_PROBATION,
+ PROP_UDP_RECONNECT,
+ PROP_MULTICAST_IFACE,
+ PROP_NTP_SYNC,
+ PROP_USE_PIPELINE_CLOCK,
+ PROP_SDES,
+ PROP_TLS_VALIDATION_FLAGS,
+ PROP_TLS_DATABASE,
+ PROP_TLS_INTERACTION,
+ PROP_DO_RETRANSMISSION,
+ PROP_NTP_TIME_SOURCE,
+ PROP_USER_AGENT,
+ PROP_MAX_RTCP_RTP_TIME_DIFF,
+ PROP_RFC7273_SYNC,
+ PROP_MAX_TS_OFFSET_ADJUSTMENT,
+ PROP_MAX_TS_OFFSET,
+ PROP_DEFAULT_VERSION,
+ PROP_BACKCHANNEL,
+ PROP_TEARDOWN_TIMEOUT,
+ PROP_ONVIF_MODE,
+ PROP_ONVIF_RATE_CONTROL,
+ PROP_IS_LIVE,
+ PROP_IGNORE_X_SERVER_REPLY
+};
+
+#define GST_TYPE_RTSP_NAT_METHOD (gst_rtsp_nat_method_get_type())
+static GType
+gst_rtsp_nat_method_get_type (void)
+{
+ static GType rtsp_nat_method_type = 0;
+ static const GEnumValue rtsp_nat_method[] = {
+ {GST_RTSP_NAT_NONE, "None", "none"},
+ {GST_RTSP_NAT_DUMMY, "Send Dummy packets", "dummy"},
+ {0, NULL, NULL},
+ };
+
+ if (!rtsp_nat_method_type) {
+ rtsp_nat_method_type =
+ g_enum_register_static ("GstRTSPNatMethod", rtsp_nat_method);
+ }
+ return rtsp_nat_method_type;
+}
+
+#define RTSP_SRC_RESPONSE_ERROR(src, response_msg, err_cat, err_code, error_message) \
+ do { \
+ GST_ELEMENT_ERROR_WITH_DETAILS((src), err_cat, err_code, ("%s", error_message), \
+ ("%s (%d)", (response_msg)->type_data.response.reason, (response_msg)->type_data.response.code), \
+ ("rtsp-status-code", G_TYPE_UINT, (response_msg)->type_data.response.code, \
+ "rtsp-status-reason", G_TYPE_STRING, GST_STR_NULL((response_msg)->type_data.response.reason), NULL)); \
+ } while (0)
+
+typedef struct _ParameterRequest
+{
+ gint cmd;
+ gchar *content_type;
+ GString *body;
+ GstPromise *promise;
+} ParameterRequest;
+
+static void gst_rtspsrc_finalize (GObject * object);
+
+static void gst_rtspsrc_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_rtspsrc_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static GstClock *gst_rtspsrc_provide_clock (GstElement * element);
+
+static void gst_rtspsrc_uri_handler_init (gpointer g_iface,
+ gpointer iface_data);
+
+static gboolean gst_rtspsrc_set_proxy (GstRTSPSrc * rtsp, const gchar * proxy);
+static void gst_rtspsrc_set_tcp_timeout (GstRTSPSrc * rtspsrc, guint64 timeout);
+
+static GstStateChangeReturn gst_rtspsrc_change_state (GstElement * element,
+ GstStateChange transition);
+static gboolean gst_rtspsrc_send_event (GstElement * element, GstEvent * event);
+static void gst_rtspsrc_handle_message (GstBin * bin, GstMessage * message);
+
+static gboolean gst_rtspsrc_setup_auth (GstRTSPSrc * src,
+ GstRTSPMessage * response);
+
+static gboolean gst_rtspsrc_loop_send_cmd (GstRTSPSrc * src, gint cmd,
+ gint mask);
+static GstRTSPResult gst_rtspsrc_send_cb (GstRTSPExtension * ext,
+ GstRTSPMessage * request, GstRTSPMessage * response, GstRTSPSrc * src);
+
+static GstRTSPResult gst_rtspsrc_open (GstRTSPSrc * src, gboolean async);
+static GstRTSPResult gst_rtspsrc_play (GstRTSPSrc * src, GstSegment * segment,
+ gboolean async, const gchar * seek_style);
+static GstRTSPResult gst_rtspsrc_pause (GstRTSPSrc * src, gboolean async);
+static GstRTSPResult gst_rtspsrc_close (GstRTSPSrc * src, gboolean async,
+ gboolean only_close);
+
+static gboolean gst_rtspsrc_uri_set_uri (GstURIHandler * handler,
+ const gchar * uri, GError ** error);
+static gchar *gst_rtspsrc_uri_get_uri (GstURIHandler * handler);
+
+static gboolean gst_rtspsrc_activate_streams (GstRTSPSrc * src);
+static gboolean gst_rtspsrc_loop (GstRTSPSrc * src);
+static gboolean gst_rtspsrc_stream_push_event (GstRTSPSrc * src,
+ GstRTSPStream * stream, GstEvent * event);
+static gboolean gst_rtspsrc_push_event (GstRTSPSrc * src, GstEvent * event);
+static void gst_rtspsrc_connection_flush (GstRTSPSrc * src, gboolean flush);
+static GstRTSPResult gst_rtsp_conninfo_close (GstRTSPSrc * src,
+ GstRTSPConnInfo * info, gboolean free);
+static void
+gst_rtspsrc_print_rtsp_message (GstRTSPSrc * src, const GstRTSPMessage * msg);
+static void
+gst_rtspsrc_print_sdp_message (GstRTSPSrc * src, const GstSDPMessage * msg);
+
+static GstRTSPResult
+gst_rtspsrc_get_parameter (GstRTSPSrc * src, ParameterRequest * req);
+
+static GstRTSPResult
+gst_rtspsrc_set_parameter (GstRTSPSrc * src, ParameterRequest * req);
+
+static gboolean get_parameter (GstRTSPSrc * src, const gchar * parameter,
+ const gchar * content_type, GstPromise * promise);
+
+static gboolean get_parameters (GstRTSPSrc * src, gchar ** parameters,
+ const gchar * content_type, GstPromise * promise);
+
+static gboolean set_parameter (GstRTSPSrc * src, const gchar * name,
+ const gchar * value, const gchar * content_type, GstPromise * promise);
+
+static GstFlowReturn gst_rtspsrc_push_backchannel_buffer (GstRTSPSrc * src,
+ guint id, GstSample * sample);
+
+typedef struct
+{
+ guint8 pt;
+ GstCaps *caps;
+} PtMapItem;
+
+/* commands we send to out loop to notify it of events */
+#define CMD_OPEN (1 << 0)
+#define CMD_PLAY (1 << 1)
+#define CMD_PAUSE (1 << 2)
+#define CMD_CLOSE (1 << 3)
+#define CMD_WAIT (1 << 4)
+#define CMD_RECONNECT (1 << 5)
+#define CMD_LOOP (1 << 6)
+#define CMD_GET_PARAMETER (1 << 7)
+#define CMD_SET_PARAMETER (1 << 8)
+
+/* mask for all commands */
+#define CMD_ALL ((CMD_SET_PARAMETER << 1) - 1)
+
+#define GST_ELEMENT_PROGRESS(el, type, code, text) \
+G_STMT_START { \
+ gchar *__txt = _gst_element_error_printf text; \
+ gst_element_post_message (GST_ELEMENT_CAST (el), \
+ gst_message_new_progress (GST_OBJECT_CAST (el), \
+ GST_PROGRESS_TYPE_ ##type, code, __txt)); \
+ g_free (__txt); \
+} G_STMT_END
+
+static guint gst_rtspsrc_signals[LAST_SIGNAL] = { 0 };
+
+#define gst_rtspsrc_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstRTSPSrc, gst_rtspsrc, GST_TYPE_BIN,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER, gst_rtspsrc_uri_handler_init));
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (rtspsrc, "rtspsrc", GST_RANK_NONE,
+ GST_TYPE_RTSPSRC, rtsp_element_init (plugin));
+
+#ifndef GST_DISABLE_GST_DEBUG
+static inline const char *
+cmd_to_string (guint cmd)
+{
+ switch (cmd) {
+ case CMD_OPEN:
+ return "OPEN";
+ case CMD_PLAY:
+ return "PLAY";
+ case CMD_PAUSE:
+ return "PAUSE";
+ case CMD_CLOSE:
+ return "CLOSE";
+ case CMD_WAIT:
+ return "WAIT";
+ case CMD_RECONNECT:
+ return "RECONNECT";
+ case CMD_LOOP:
+ return "LOOP";
+ case CMD_GET_PARAMETER:
+ return "GET_PARAMETER";
+ case CMD_SET_PARAMETER:
+ return "SET_PARAMETER";
+ }
+
+ return "unknown";
+}
+#endif
+
+static gboolean
+default_select_stream (GstRTSPSrc * src, guint id, GstCaps * caps)
+{
+ GST_DEBUG_OBJECT (src, "default handler");
+ return TRUE;
+}
+
+static gboolean
+select_stream_accum (GSignalInvocationHint * ihint,
+ GValue * return_accu, const GValue * handler_return, gpointer data)
+{
+ gboolean myboolean;
+
+ myboolean = g_value_get_boolean (handler_return);
+ GST_DEBUG ("accum %d", myboolean);
+ g_value_set_boolean (return_accu, myboolean);
+
+ /* stop emission if FALSE */
+ return myboolean;
+}
+
+static gboolean
+default_before_send (GstRTSPSrc * src, GstRTSPMessage * msg)
+{
+ GST_DEBUG_OBJECT (src, "default handler");
+ return TRUE;
+}
+
+static gboolean
+before_send_accum (GSignalInvocationHint * ihint,
+ GValue * return_accu, const GValue * handler_return, gpointer data)
+{
+ gboolean myboolean;
+
+ myboolean = g_value_get_boolean (handler_return);
+ g_value_set_boolean (return_accu, myboolean);
+
+ /* prevent send if FALSE */
+ return myboolean;
+}
+
+static void
+gst_rtspsrc_class_init (GstRTSPSrcClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstBinClass *gstbin_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstbin_class = (GstBinClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (rtspsrc_debug, "rtspsrc", 0, "RTSP src");
+
+ gobject_class->set_property = gst_rtspsrc_set_property;
+ gobject_class->get_property = gst_rtspsrc_get_property;
+
+ gobject_class->finalize = gst_rtspsrc_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_LOCATION,
+ g_param_spec_string ("location", "RTSP Location",
+ "Location of the RTSP url to read",
+ DEFAULT_LOCATION, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_PROTOCOLS,
+ g_param_spec_flags ("protocols", "Protocols",
+ "Allowed lower transport protocols", GST_TYPE_RTSP_LOWER_TRANS,
+ DEFAULT_PROTOCOLS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_DEBUG,
+ g_param_spec_boolean ("debug", "Debug",
+ "Dump request and response messages to stdout"
+ "(DEPRECATED: Printed all RTSP message to gstreamer log as 'log' level)",
+ DEFAULT_DEBUG,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
+
+ g_object_class_install_property (gobject_class, PROP_RETRY,
+ g_param_spec_uint ("retry", "Retry",
+ "Max number of retries when allocating RTP ports.",
+ 0, G_MAXUINT16, DEFAULT_RETRY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_TIMEOUT,
+ g_param_spec_uint64 ("timeout", "Timeout",
+ "Retry TCP transport after UDP timeout microseconds (0 = disabled)",
+ 0, G_MAXUINT64, DEFAULT_TIMEOUT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_TCP_TIMEOUT,
+ g_param_spec_uint64 ("tcp-timeout", "TCP Timeout",
+ "Fail after timeout microseconds on TCP connections (0 = disabled)",
+ 0, G_MAXUINT64, DEFAULT_TCP_TIMEOUT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_LATENCY,
+ g_param_spec_uint ("latency", "Buffer latency in ms",
+ "Amount of ms to buffer", 0, G_MAXUINT, DEFAULT_LATENCY_MS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_DROP_ON_LATENCY,
+ g_param_spec_boolean ("drop-on-latency",
+ "Drop buffers when maximum latency is reached",
+ "Tells the jitterbuffer to never exceed the given latency in size",
+ DEFAULT_DROP_ON_LATENCY, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_CONNECTION_SPEED,
+ g_param_spec_uint64 ("connection-speed", "Connection Speed",
+ "Network connection speed in kbps (0 = unknown)",
+ 0, G_MAXUINT64 / 1000, DEFAULT_CONNECTION_SPEED,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_NAT_METHOD,
+ g_param_spec_enum ("nat-method", "NAT Method",
+ "Method to use for traversing firewalls and NAT",
+ GST_TYPE_RTSP_NAT_METHOD, DEFAULT_NAT_METHOD,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:do-rtcp:
+ *
+ * Enable RTCP support. Some old server don't like RTCP and then this property
+ * needs to be set to FALSE.
+ */
+ g_object_class_install_property (gobject_class, PROP_DO_RTCP,
+ g_param_spec_boolean ("do-rtcp", "Do RTCP",
+ "Send RTCP packets, disable for old incompatible server.",
+ DEFAULT_DO_RTCP, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:do-rtsp-keep-alive:
+ *
+ * Enable RTSP keep alive support. Some old server don't like RTSP
+ * keep alive and then this property needs to be set to FALSE.
+ */
+ g_object_class_install_property (gobject_class, PROP_DO_RTSP_KEEP_ALIVE,
+ g_param_spec_boolean ("do-rtsp-keep-alive", "Do RTSP Keep Alive",
+ "Send RTSP keep alive packets, disable for old incompatible server.",
+ DEFAULT_DO_RTSP_KEEP_ALIVE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:proxy:
+ *
+ * Set the proxy parameters. This has to be a string of the format
+ * [http://][user:passwd@]host[:port].
+ */
+ g_object_class_install_property (gobject_class, PROP_PROXY,
+ g_param_spec_string ("proxy", "Proxy",
+ "Proxy settings for HTTP tunneling. Format: [http://][user:passwd@]host[:port]",
+ DEFAULT_PROXY, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRTSPSrc:proxy-id:
+ *
+ * Sets the proxy URI user id for authentication. If the URI set via the
+ * "proxy" property contains a user-id already, that will take precedence.
+ *
+ * Since: 1.2
+ */
+ g_object_class_install_property (gobject_class, PROP_PROXY_ID,
+ g_param_spec_string ("proxy-id", "proxy-id",
+ "HTTP proxy URI user id for authentication", "",
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstRTSPSrc:proxy-pw:
+ *
+ * Sets the proxy URI password for authentication. If the URI set via the
+ * "proxy" property contains a password already, that will take precedence.
+ *
+ * Since: 1.2
+ */
+ g_object_class_install_property (gobject_class, PROP_PROXY_PW,
+ g_param_spec_string ("proxy-pw", "proxy-pw",
+ "HTTP proxy URI user password for authentication", "",
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:rtp-blocksize:
+ *
+ * RTP package size to suggest to server.
+ */
+ g_object_class_install_property (gobject_class, PROP_RTP_BLOCKSIZE,
+ g_param_spec_uint ("rtp-blocksize", "RTP Blocksize",
+ "RTP package size to suggest to server (0 = disabled)",
+ 0, 65536, DEFAULT_RTP_BLOCKSIZE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class,
+ PROP_USER_ID,
+ g_param_spec_string ("user-id", "user-id",
+ "RTSP location URI user id for authentication", DEFAULT_USER_ID,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_USER_PW,
+ g_param_spec_string ("user-pw", "user-pw",
+ "RTSP location URI user password for authentication", DEFAULT_USER_PW,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:buffer-mode:
+ *
+ * Control the buffering and timestamping mode used by the jitterbuffer.
+ */
+ g_object_class_install_property (gobject_class, PROP_BUFFER_MODE,
+ g_param_spec_enum ("buffer-mode", "Buffer Mode",
+ "Control the buffering algorithm in use",
+ GST_TYPE_RTSP_SRC_BUFFER_MODE, DEFAULT_BUFFER_MODE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:port-range:
+ *
+ * Configure the client port numbers that can be used to receive RTP and
+ * RTCP.
+ */
+ g_object_class_install_property (gobject_class, PROP_PORT_RANGE,
+ g_param_spec_string ("port-range", "Port range",
+ "Client port range that can be used to receive RTP and RTCP data, "
+ "eg. 3000-3005 (NULL = no restrictions)", DEFAULT_PORT_RANGE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:udp-buffer-size:
+ *
+ * Size of the kernel UDP receive buffer in bytes.
+ */
+ g_object_class_install_property (gobject_class, PROP_UDP_BUFFER_SIZE,
+ g_param_spec_int ("udp-buffer-size", "UDP Buffer Size",
+ "Size of the kernel UDP receive buffer in bytes, 0=default",
+ 0, G_MAXINT, DEFAULT_UDP_BUFFER_SIZE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:short-header:
+ *
+ * Only send the basic RTSP headers for broken encoders.
+ */
+ g_object_class_install_property (gobject_class, PROP_SHORT_HEADER,
+ g_param_spec_boolean ("short-header", "Short Header",
+ "Only send the basic RTSP headers for broken encoders",
+ DEFAULT_SHORT_HEADER, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_PROBATION,
+ g_param_spec_uint ("probation", "Number of probations",
+ "Consecutive packet sequence numbers to accept the source",
+ 0, G_MAXUINT, DEFAULT_PROBATION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_UDP_RECONNECT,
+ g_param_spec_boolean ("udp-reconnect", "Reconnect to the server",
+ "Reconnect to the server if RTSP connection is closed when doing UDP",
+ DEFAULT_UDP_RECONNECT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MULTICAST_IFACE,
+ g_param_spec_string ("multicast-iface", "Multicast Interface",
+ "The network interface on which to join the multicast group",
+ DEFAULT_MULTICAST_IFACE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_NTP_SYNC,
+ g_param_spec_boolean ("ntp-sync", "Sync on NTP clock",
+ "Synchronize received streams to the NTP clock", DEFAULT_NTP_SYNC,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_USE_PIPELINE_CLOCK,
+ g_param_spec_boolean ("use-pipeline-clock", "Use pipeline clock",
+ "Use the pipeline running-time to set the NTP time in the RTCP SR messages"
+ "(DEPRECATED: Use ntp-time-source property)",
+ DEFAULT_USE_PIPELINE_CLOCK,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
+
+ g_object_class_install_property (gobject_class, PROP_SDES,
+ g_param_spec_boxed ("sdes", "SDES",
+ "The SDES items of this session",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc::tls-validation-flags:
+ *
+ * TLS certificate validation flags used to validate server
+ * certificate.
+ *
+ * Since: 1.2.1
+ */
+ g_object_class_install_property (gobject_class, PROP_TLS_VALIDATION_FLAGS,
+ g_param_spec_flags ("tls-validation-flags", "TLS validation flags",
+ "TLS certificate validation flags used to validate the server certificate",
+ G_TYPE_TLS_CERTIFICATE_FLAGS, DEFAULT_TLS_VALIDATION_FLAGS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc::tls-database:
+ *
+ * TLS database with anchor certificate authorities used to validate
+ * the server certificate.
+ *
+ * Since: 1.4
+ */
+ g_object_class_install_property (gobject_class, PROP_TLS_DATABASE,
+ g_param_spec_object ("tls-database", "TLS database",
+ "TLS database with anchor certificate authorities used to validate the server certificate",
+ G_TYPE_TLS_DATABASE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc::tls-interaction:
+ *
+ * A #GTlsInteraction object to be used when the connection or certificate
+ * database need to interact with the user. This will be used to prompt the
+ * user for passwords where necessary.
+ *
+ * Since: 1.6
+ */
+ g_object_class_install_property (gobject_class, PROP_TLS_INTERACTION,
+ g_param_spec_object ("tls-interaction", "TLS interaction",
+ "A GTlsInteraction object to prompt the user for password or certificate",
+ G_TYPE_TLS_INTERACTION, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc::do-retransmission:
+ *
+ * Attempt to ask the server to retransmit lost packets according to RFC4588.
+ *
+ * Note: currently only works with SSRC-multiplexed retransmission streams
+ *
+ * Since: 1.6
+ */
+ g_object_class_install_property (gobject_class, PROP_DO_RETRANSMISSION,
+ g_param_spec_boolean ("do-retransmission", "Retransmission",
+ "Ask the server to retransmit lost packets",
+ DEFAULT_DO_RETRANSMISSION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc::ntp-time-source:
+ *
+ * allows to select the time source that should be used
+ * for the NTP time in RTCP packets
+ *
+ * Since: 1.6
+ */
+ g_object_class_install_property (gobject_class, PROP_NTP_TIME_SOURCE,
+ g_param_spec_enum ("ntp-time-source", "NTP Time Source",
+ "NTP time source for RTCP packets",
+ GST_TYPE_RTSP_SRC_NTP_TIME_SOURCE, DEFAULT_NTP_TIME_SOURCE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc::user-agent:
+ *
+ * The string to set in the User-Agent header.
+ *
+ * Since: 1.6
+ */
+ g_object_class_install_property (gobject_class, PROP_USER_AGENT,
+ g_param_spec_string ("user-agent", "User Agent",
+ "The User-Agent string to send to the server",
+ DEFAULT_USER_AGENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MAX_RTCP_RTP_TIME_DIFF,
+ g_param_spec_int ("max-rtcp-rtp-time-diff", "Max RTCP RTP Time Diff",
+ "Maximum amount of time in ms that the RTP time in RTCP SRs "
+ "is allowed to be ahead (-1 disabled)", -1, G_MAXINT,
+ DEFAULT_MAX_RTCP_RTP_TIME_DIFF,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_RFC7273_SYNC,
+ g_param_spec_boolean ("rfc7273-sync", "Sync on RFC7273 clock",
+ "Synchronize received streams to the RFC7273 clock "
+ "(requires clock and offset to be provided)", DEFAULT_RFC7273_SYNC,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:default-rtsp-version:
+ *
+ * The preferred RTSP version to use while negotiating the version with the server.
+ *
+ * Since: 1.14
+ */
+ g_object_class_install_property (gobject_class, PROP_DEFAULT_VERSION,
+ g_param_spec_enum ("default-rtsp-version",
+ "The RTSP version to try first",
+ "The RTSP version that should be tried first when negotiating version.",
+ GST_TYPE_RTSP_VERSION, DEFAULT_VERSION,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:max-ts-offset-adjustment:
+ *
+ * Syncing time stamps to NTP time adds a time offset. This parameter
+ * specifies the maximum number of nanoseconds per frame that this time offset
+ * may be adjusted with. This is used to avoid sudden large changes to time
+ * stamps.
+ */
+ g_object_class_install_property (gobject_class, PROP_MAX_TS_OFFSET_ADJUSTMENT,
+ g_param_spec_uint64 ("max-ts-offset-adjustment",
+ "Max Timestamp Offset Adjustment",
+ "The maximum number of nanoseconds per frame that time stamp offsets "
+ "may be adjusted (0 = no limit).", 0, G_MAXUINT64,
+ DEFAULT_MAX_TS_OFFSET_ADJUSTMENT, G_PARAM_READWRITE |
+ G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:max-ts-offset:
+ *
+ * Used to set an upper limit of how large a time offset may be. This
+ * is used to protect against unrealistic values as a result of either
+ * client,server or clock issues.
+ */
+ g_object_class_install_property (gobject_class, PROP_MAX_TS_OFFSET,
+ g_param_spec_int64 ("max-ts-offset", "Max TS Offset",
+ "The maximum absolute value of the time offset in (nanoseconds). "
+ "Note, if the ntp-sync parameter is set the default value is "
+ "changed to 0 (no limit)", 0, G_MAXINT64, DEFAULT_MAX_TS_OFFSET,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:backchannel
+ *
+ * Select a type of backchannel to setup with the RTSP server.
+ * Default value is "none". Allowed values are "none" and "onvif".
+ *
+ * Since: 1.14
+ */
+ g_object_class_install_property (gobject_class, PROP_BACKCHANNEL,
+ g_param_spec_enum ("backchannel", "Backchannel type",
+ "The type of backchannel to setup. Default is 'none'.",
+ GST_TYPE_RTSP_BACKCHANNEL, BACKCHANNEL_NONE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:teardown-timeout
+ *
+ * When transitioning PAUSED-READY, allow up to timeout (in nanoseconds)
+ * delay in order to send teardown (0 = disabled)
+ *
+ * Since: 1.14
+ */
+ g_object_class_install_property (gobject_class, PROP_TEARDOWN_TIMEOUT,
+ g_param_spec_uint64 ("teardown-timeout", "Teardown Timeout",
+ "When transitioning PAUSED-READY, allow up to timeout (in nanoseconds) "
+ "delay in order to send teardown (0 = disabled)",
+ 0, G_MAXUINT64, DEFAULT_TEARDOWN_TIMEOUT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:onvif-mode
+ *
+ * Act as an ONVIF client. When set to %TRUE:
+ *
+ * - seeks will be interpreted as nanoseconds since prime epoch (1900-01-01)
+ *
+ * - #GstRTSPSrc:onvif-rate-control can be used to request that the server sends
+ * data as fast as it can
+ *
+ * - TCP is picked as the transport protocol
+ *
+ * - Trickmode flags in seek events are transformed into the appropriate ONVIF
+ * request headers
+ *
+ * Since: 1.18
+ */
+ g_object_class_install_property (gobject_class, PROP_ONVIF_MODE,
+ g_param_spec_boolean ("onvif-mode", "Onvif Mode",
+ "Act as an ONVIF client",
+ DEFAULT_ONVIF_MODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:onvif-rate-control
+ *
+ * When in onvif-mode, whether to set Rate-Control to yes or no. When set
+ * to %FALSE, the server will deliver data as fast as the client can consume
+ * it.
+ *
+ * Since: 1.18
+ */
+ g_object_class_install_property (gobject_class, PROP_ONVIF_RATE_CONTROL,
+ g_param_spec_boolean ("onvif-rate-control", "Onvif Rate Control",
+ "When in onvif-mode, whether to set Rate-Control to yes or no",
+ DEFAULT_ONVIF_RATE_CONTROL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:is-live
+ *
+ * Whether to act as a live source. This is useful in combination with
+ * #GstRTSPSrc:onvif-rate-control set to %FALSE and usage of the TCP
+ * protocol. In that situation, data delivery rate can be entirely
+ * controlled from the client side, enabling features such as frame
+ * stepping and instantaneous rate changes.
+ *
+ * Since: 1.18
+ */
+ g_object_class_install_property (gobject_class, PROP_IS_LIVE,
+ g_param_spec_boolean ("is-live", "Is live",
+ "Whether to act as a live source",
+ DEFAULT_IS_LIVE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc:ignore-x-server-reply
+ *
+ * When connecting to an RTSP server in tunneled mode (HTTP) the server
+ * usually replies with an x-server-ip-address header. This contains the
+ * address of the intended streaming server. However some servers return an
+ * "invalid" address. Here follows two examples when it might happen.
+ *
+ * 1. A server uses Apache combined with a separate RTSP process to handle
+ * HTTPS requests on port 443. In this case Apache handles TLS and
+ * connects to the local RTSP server, which results in a local
+ * address 127.0.0.1 or ::1 in the header reply. This address is
+ * returned to the actual RTSP client in the header. The client will
+ * receive this address and try to connect to it and fail.
+ *
+ * 2. The client uses an IPv6 link local address with a specified scope id
+ * fe80::aaaa:bbbb:cccc:dddd%eth0 and connects via HTTP on port 80.
+ * The RTSP server receives the connection and returns the address
+ * in the x-server-ip-address header. The client will receive this
+ * address and try to connect to it "as is" without the scope id and
+ * fail.
+ *
+ * In the case of streaming data from RTSP servers like 1 and 2, it's
+ * useful to have the option to simply ignore the x-server-ip-address
+ * header reply and continue using the original address.
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class, PROP_IGNORE_X_SERVER_REPLY,
+ g_param_spec_boolean ("ignore-x-server-reply",
+ "Ignore x-server-ip-address",
+ "Whether to ignore the x-server-ip-address server header reply",
+ DEFAULT_IGNORE_X_SERVER_REPLY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstRTSPSrc::handle-request:
+ * @rtspsrc: a #GstRTSPSrc
+ * @request: a #GstRTSPMessage
+ * @response: a #GstRTSPMessage
+ *
+ * Handle a server request in @request and prepare @response.
+ *
+ * This signal is called from the streaming thread, you should therefore not
+ * do any state changes on @rtspsrc because this might deadlock. If you want
+ * to modify the state as a result of this signal, post a
+ * #GST_MESSAGE_REQUEST_STATE message on the bus or signal the main thread
+ * in some other way.
+ *
+ * Since: 1.2
+ */
+ gst_rtspsrc_signals[SIGNAL_HANDLE_REQUEST] =
+ g_signal_new ("handle-request", G_TYPE_FROM_CLASS (klass), 0,
+ 0, NULL, NULL, NULL, G_TYPE_NONE, 2,
+ GST_TYPE_RTSP_MESSAGE | G_SIGNAL_TYPE_STATIC_SCOPE,
+ GST_TYPE_RTSP_MESSAGE | G_SIGNAL_TYPE_STATIC_SCOPE);
+
+ /**
+ * GstRTSPSrc::on-sdp:
+ * @rtspsrc: a #GstRTSPSrc
+ * @sdp: a #GstSDPMessage
+ *
+ * Emitted when the client has retrieved the SDP and before it configures the
+ * streams in the SDP. @sdp can be inspected and modified.
+ *
+ * This signal is called from the streaming thread, you should therefore not
+ * do any state changes on @rtspsrc because this might deadlock. If you want
+ * to modify the state as a result of this signal, post a
+ * #GST_MESSAGE_REQUEST_STATE message on the bus or signal the main thread
+ * in some other way.
+ *
+ * Since: 1.2
+ */
+ gst_rtspsrc_signals[SIGNAL_ON_SDP] =
+ g_signal_new ("on-sdp", G_TYPE_FROM_CLASS (klass), 0,
+ 0, NULL, NULL, NULL, G_TYPE_NONE, 1,
+ GST_TYPE_SDP_MESSAGE | G_SIGNAL_TYPE_STATIC_SCOPE);
+
+ /**
+ * GstRTSPSrc::select-stream:
+ * @rtspsrc: a #GstRTSPSrc
+ * @num: the stream number
+ * @caps: the stream caps
+ *
+ * Emitted before the client decides to configure the stream @num with
+ * @caps.
+ *
+ * Returns: %TRUE when the stream should be selected, %FALSE when the stream
+ * is to be ignored.
+ *
+ * Since: 1.2
+ */
+ gst_rtspsrc_signals[SIGNAL_SELECT_STREAM] =
+ g_signal_new_class_handler ("select-stream", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST,
+ (GCallback) default_select_stream, select_stream_accum, NULL, NULL,
+ G_TYPE_BOOLEAN, 2, G_TYPE_UINT, GST_TYPE_CAPS);
+ /**
+ * GstRTSPSrc::new-manager:
+ * @rtspsrc: a #GstRTSPSrc
+ * @manager: a #GstElement
+ *
+ * Emitted after a new manager (like rtpbin) was created and the default
+ * properties were configured.
+ *
+ * Since: 1.4
+ */
+ gst_rtspsrc_signals[SIGNAL_NEW_MANAGER] =
+ g_signal_new_class_handler ("new-manager", G_TYPE_FROM_CLASS (klass),
+ 0, 0, NULL, NULL, NULL, G_TYPE_NONE, 1, GST_TYPE_ELEMENT);
+
+ /**
+ * GstRTSPSrc::request-rtcp-key:
+ * @rtspsrc: a #GstRTSPSrc
+ * @num: the stream number
+ *
+ * Signal emitted to get the crypto parameters relevant to the RTCP
+ * stream. User should provide the key and the RTCP encryption ciphers
+ * and authentication, and return them wrapped in a GstCaps.
+ *
+ * Since: 1.4
+ */
+ gst_rtspsrc_signals[SIGNAL_REQUEST_RTCP_KEY] =
+ g_signal_new ("request-rtcp-key", G_TYPE_FROM_CLASS (klass),
+ 0, 0, NULL, NULL, NULL, GST_TYPE_CAPS, 1, G_TYPE_UINT);
+
+ /**
+ * GstRTSPSrc::accept-certificate:
+ * @rtspsrc: a #GstRTSPSrc
+ * @peer_cert: the peer's #GTlsCertificate
+ * @errors: the problems with @peer_cert
+ * @user_data: user data set when the signal handler was connected.
+ *
+ * This will directly map to #GTlsConnection 's "accept-certificate"
+ * signal and be performed after the default checks of #GstRTSPConnection
+ * (checking against the #GTlsDatabase with the given #GTlsCertificateFlags)
+ * have failed. If no #GTlsDatabase is set on this connection, only this
+ * signal will be emitted.
+ *
+ * Since: 1.14
+ */
+ gst_rtspsrc_signals[SIGNAL_ACCEPT_CERTIFICATE] =
+ g_signal_new ("accept-certificate", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, 0, g_signal_accumulator_true_handled, NULL, NULL,
+ G_TYPE_BOOLEAN, 3, G_TYPE_TLS_CONNECTION, G_TYPE_TLS_CERTIFICATE,
+ G_TYPE_TLS_CERTIFICATE_FLAGS);
+
+ /**
+ * GstRTSPSrc::before-send:
+ * @rtspsrc: a #GstRTSPSrc
+ * @num: the stream number
+ *
+ * Emitted before each RTSP request is sent, in order to allow
+ * the application to modify send parameters or to skip the message entirely.
+ * This can be used, for example, to work with ONVIF Profile G servers,
+ * which need a different/additional range, rate-control, and intra/x
+ * parameters.
+ *
+ * Returns: %TRUE when the command should be sent, %FALSE when the
+ * command should be dropped.
+ *
+ * Since: 1.14
+ */
+ gst_rtspsrc_signals[SIGNAL_BEFORE_SEND] =
+ g_signal_new_class_handler ("before-send", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST,
+ (GCallback) default_before_send, before_send_accum, NULL, NULL,
+ G_TYPE_BOOLEAN, 1, GST_TYPE_RTSP_MESSAGE | G_SIGNAL_TYPE_STATIC_SCOPE);
+
+ /**
+ * GstRTSPSrc::push-backchannel-buffer:
+ * @rtspsrc: a #GstRTSPSrc
+ * @sample: RTP sample to send back
+ *
+ *
+ */
+ gst_rtspsrc_signals[SIGNAL_PUSH_BACKCHANNEL_BUFFER] =
+ g_signal_new ("push-backchannel-buffer", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRTSPSrcClass,
+ push_backchannel_buffer), NULL, NULL, NULL,
+ GST_TYPE_FLOW_RETURN, 2, G_TYPE_UINT, GST_TYPE_SAMPLE);
+
+ /**
+ * GstRTSPSrc::get-parameter:
+ * @rtspsrc: a #GstRTSPSrc
+ * @parameter: the parameter name
+ * @parameter: the content type
+ * @parameter: a pointer to #GstPromise
+ *
+ * Handle the GET_PARAMETER signal.
+ *
+ * Returns: %TRUE when the command could be issued, %FALSE otherwise
+ *
+ */
+ gst_rtspsrc_signals[SIGNAL_GET_PARAMETER] =
+ g_signal_new ("get-parameter", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRTSPSrcClass,
+ get_parameter), NULL, NULL, NULL,
+ G_TYPE_BOOLEAN, 3, G_TYPE_STRING, G_TYPE_STRING, GST_TYPE_PROMISE);
+
+ /**
+ * GstRTSPSrc::get-parameters:
+ * @rtspsrc: a #GstRTSPSrc
+ * @parameter: a NULL-terminated array of parameters
+ * @parameter: the content type
+ * @parameter: a pointer to #GstPromise
+ *
+ * Handle the GET_PARAMETERS signal.
+ *
+ * Returns: %TRUE when the command could be issued, %FALSE otherwise
+ *
+ */
+ gst_rtspsrc_signals[SIGNAL_GET_PARAMETERS] =
+ g_signal_new ("get-parameters", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRTSPSrcClass,
+ get_parameters), NULL, NULL, NULL,
+ G_TYPE_BOOLEAN, 3, G_TYPE_STRV, G_TYPE_STRING, GST_TYPE_PROMISE);
+
+ /**
+ * GstRTSPSrc::set-parameter:
+ * @rtspsrc: a #GstRTSPSrc
+ * @parameter: the parameter name
+ * @parameter: the parameter value
+ * @parameter: the content type
+ * @parameter: a pointer to #GstPromise
+ *
+ * Handle the SET_PARAMETER signal.
+ *
+ * Returns: %TRUE when the command could be issued, %FALSE otherwise
+ *
+ */
+ gst_rtspsrc_signals[SIGNAL_SET_PARAMETER] =
+ g_signal_new ("set-parameter", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_STRUCT_OFFSET (GstRTSPSrcClass,
+ set_parameter), NULL, NULL, NULL, G_TYPE_BOOLEAN, 4, G_TYPE_STRING,
+ G_TYPE_STRING, G_TYPE_STRING, GST_TYPE_PROMISE);
+
+ gstelement_class->send_event = gst_rtspsrc_send_event;
+ gstelement_class->provide_clock = gst_rtspsrc_provide_clock;
+ gstelement_class->change_state = gst_rtspsrc_change_state;
+
+ gst_element_class_add_static_pad_template (gstelement_class, &rtptemplate);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "RTSP packet receiver", "Source/Network",
+ "Receive data over the network via RTSP (RFC 2326)",
+ "Wim Taymans <wim@fluendo.com>, "
+ "Thijs Vermeir <thijs.vermeir@barco.com>, "
+ "Lutz Mueller <lutz@topfrose.de>");
+
+ gstbin_class->handle_message = gst_rtspsrc_handle_message;
+
+ klass->push_backchannel_buffer = gst_rtspsrc_push_backchannel_buffer;
+ klass->get_parameter = GST_DEBUG_FUNCPTR (get_parameter);
+ klass->get_parameters = GST_DEBUG_FUNCPTR (get_parameters);
+ klass->set_parameter = GST_DEBUG_FUNCPTR (set_parameter);
+
+ gst_rtsp_ext_list_init ();
+
+ gst_type_mark_as_plugin_api (GST_TYPE_RTSP_SRC_BUFFER_MODE, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_RTSP_SRC_NTP_TIME_SOURCE, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_RTSP_BACKCHANNEL, 0);
+ gst_type_mark_as_plugin_api (GST_TYPE_RTSP_NAT_METHOD, 0);
+}
+
+static gboolean
+validate_set_get_parameter_name (const gchar * parameter_name)
+{
+ gchar *ptr = (gchar *) parameter_name;
+
+ while (*ptr) {
+ /* Don't allow '\r', '\n', \'t', ' ' etc in the parameter name */
+ if (g_ascii_isspace (*ptr) || g_ascii_iscntrl (*ptr)) {
+ GST_DEBUG ("invalid parameter name '%s'", parameter_name);
+ return FALSE;
+ }
+ ptr++;
+ }
+ return TRUE;
+}
+
+static gboolean
+validate_set_get_parameters (gchar ** parameter_names)
+{
+ while (*parameter_names) {
+ if (!validate_set_get_parameter_name (*parameter_names)) {
+ return FALSE;
+ }
+ parameter_names++;
+ }
+ return TRUE;
+}
+
+static gboolean
+get_parameter (GstRTSPSrc * src, const gchar * parameter,
+ const gchar * content_type, GstPromise * promise)
+{
+ gchar *parameters[] = { (gchar *) parameter, NULL };
+
+ GST_LOG_OBJECT (src, "get_parameter: %s", GST_STR_NULL (parameter));
+
+ if (parameter == NULL || parameter[0] == '\0' || promise == NULL) {
+ GST_DEBUG ("invalid input");
+ return FALSE;
+ }
+
+ return get_parameters (src, parameters, content_type, promise);
+}
+
+static gboolean
+get_parameters (GstRTSPSrc * src, gchar ** parameters,
+ const gchar * content_type, GstPromise * promise)
+{
+ ParameterRequest *req;
+
+ GST_LOG_OBJECT (src, "get_parameters: %d", g_strv_length (parameters));
+
+ if (parameters == NULL || promise == NULL) {
+ GST_DEBUG ("invalid input");
+ return FALSE;
+ }
+
+ if (src->state == GST_RTSP_STATE_INVALID) {
+ GST_DEBUG ("invalid state");
+ return FALSE;
+ }
+
+ if (!validate_set_get_parameters (parameters)) {
+ return FALSE;
+ }
+
+ req = g_new0 (ParameterRequest, 1);
+ req->promise = gst_promise_ref (promise);
+ req->cmd = CMD_GET_PARAMETER;
+ /* Set the request body according to RFC 2326 or RFC 7826 */
+ req->body = g_string_new (NULL);
+ while (*parameters) {
+ g_string_append_printf (req->body, "%s:\r\n", *parameters);
+ parameters++;
+ }
+ if (content_type)
+ req->content_type = g_strdup (content_type);
+
+ GST_OBJECT_LOCK (src);
+ g_queue_push_tail (&src->set_get_param_q, req);
+ GST_OBJECT_UNLOCK (src);
+
+ gst_rtspsrc_loop_send_cmd (src, CMD_GET_PARAMETER, CMD_LOOP);
+
+ return TRUE;
+}
+
+static gboolean
+set_parameter (GstRTSPSrc * src, const gchar * name, const gchar * value,
+ const gchar * content_type, GstPromise * promise)
+{
+ ParameterRequest *req;
+
+ GST_LOG_OBJECT (src, "set_parameter: %s: %s", GST_STR_NULL (name),
+ GST_STR_NULL (value));
+
+ if (name == NULL || name[0] == '\0' || value == NULL || promise == NULL) {
+ GST_DEBUG ("invalid input");
+ return FALSE;
+ }
+
+ if (src->state == GST_RTSP_STATE_INVALID) {
+ GST_DEBUG ("invalid state");
+ return FALSE;
+ }
+
+ if (!validate_set_get_parameter_name (name)) {
+ return FALSE;
+ }
+
+ req = g_new0 (ParameterRequest, 1);
+ req->cmd = CMD_SET_PARAMETER;
+ req->promise = gst_promise_ref (promise);
+ req->body = g_string_new (NULL);
+ /* Set the request body according to RFC 2326 or RFC 7826 */
+ g_string_append_printf (req->body, "%s: %s\r\n", name, value);
+ if (content_type)
+ req->content_type = g_strdup (content_type);
+
+ GST_OBJECT_LOCK (src);
+ g_queue_push_tail (&src->set_get_param_q, req);
+ GST_OBJECT_UNLOCK (src);
+
+ gst_rtspsrc_loop_send_cmd (src, CMD_SET_PARAMETER, CMD_LOOP);
+
+ return TRUE;
+}
+
+static void
+gst_rtspsrc_init (GstRTSPSrc * src)
+{
+ src->conninfo.location = g_strdup (DEFAULT_LOCATION);
+ src->protocols = DEFAULT_PROTOCOLS;
+ src->debug = DEFAULT_DEBUG;
+ src->retry = DEFAULT_RETRY;
+ src->udp_timeout = DEFAULT_TIMEOUT;
+ gst_rtspsrc_set_tcp_timeout (src, DEFAULT_TCP_TIMEOUT);
+ src->latency = DEFAULT_LATENCY_MS;
+ src->drop_on_latency = DEFAULT_DROP_ON_LATENCY;
+ src->connection_speed = DEFAULT_CONNECTION_SPEED;
+ src->nat_method = DEFAULT_NAT_METHOD;
+ src->do_rtcp = DEFAULT_DO_RTCP;
+ src->do_rtsp_keep_alive = DEFAULT_DO_RTSP_KEEP_ALIVE;
+ gst_rtspsrc_set_proxy (src, DEFAULT_PROXY);
+ src->rtp_blocksize = DEFAULT_RTP_BLOCKSIZE;
+ src->user_id = g_strdup (DEFAULT_USER_ID);
+ src->user_pw = g_strdup (DEFAULT_USER_PW);
+ src->buffer_mode = DEFAULT_BUFFER_MODE;
+ src->client_port_range.min = 0;
+ src->client_port_range.max = 0;
+ src->udp_buffer_size = DEFAULT_UDP_BUFFER_SIZE;
+ src->short_header = DEFAULT_SHORT_HEADER;
+ src->probation = DEFAULT_PROBATION;
+ src->udp_reconnect = DEFAULT_UDP_RECONNECT;
+ src->multi_iface = g_strdup (DEFAULT_MULTICAST_IFACE);
+ src->ntp_sync = DEFAULT_NTP_SYNC;
+ src->use_pipeline_clock = DEFAULT_USE_PIPELINE_CLOCK;
+ src->sdes = NULL;
+ src->tls_validation_flags = DEFAULT_TLS_VALIDATION_FLAGS;
+ src->tls_database = DEFAULT_TLS_DATABASE;
+ src->tls_interaction = DEFAULT_TLS_INTERACTION;
+ src->do_retransmission = DEFAULT_DO_RETRANSMISSION;
+ src->ntp_time_source = DEFAULT_NTP_TIME_SOURCE;
+ src->user_agent = g_strdup (DEFAULT_USER_AGENT);
+ src->max_rtcp_rtp_time_diff = DEFAULT_MAX_RTCP_RTP_TIME_DIFF;
+ src->rfc7273_sync = DEFAULT_RFC7273_SYNC;
+ src->max_ts_offset_adjustment = DEFAULT_MAX_TS_OFFSET_ADJUSTMENT;
+ src->max_ts_offset = DEFAULT_MAX_TS_OFFSET;
+ src->max_ts_offset_is_set = FALSE;
+ src->default_version = DEFAULT_VERSION;
+ src->version = GST_RTSP_VERSION_INVALID;
+ src->teardown_timeout = DEFAULT_TEARDOWN_TIMEOUT;
+ src->onvif_mode = DEFAULT_ONVIF_MODE;
+ src->onvif_rate_control = DEFAULT_ONVIF_RATE_CONTROL;
+ src->is_live = DEFAULT_IS_LIVE;
+ src->seek_seqnum = GST_SEQNUM_INVALID;
+ src->group_id = GST_GROUP_ID_INVALID;
+
+ /* get a list of all extensions */
+ src->extensions = gst_rtsp_ext_list_get ();
+
+ /* connect to send signal */
+ gst_rtsp_ext_list_connect (src->extensions, "send",
+ (GCallback) gst_rtspsrc_send_cb, src);
+
+ /* protects the streaming thread in interleaved mode or the polling
+ * thread in UDP mode. */
+ g_rec_mutex_init (&src->stream_rec_lock);
+
+ /* protects our state changes from multiple invocations */
+ g_rec_mutex_init (&src->state_rec_lock);
+
+ g_queue_init (&src->set_get_param_q);
+
+ src->state = GST_RTSP_STATE_INVALID;
+
+ g_mutex_init (&src->conninfo.send_lock);
+ g_mutex_init (&src->conninfo.recv_lock);
+ g_cond_init (&src->cmd_cond);
+
+ g_mutex_init (&src->group_lock);
+
+ GST_OBJECT_FLAG_SET (src, GST_ELEMENT_FLAG_SOURCE);
+ gst_bin_set_suppressed_flags (GST_BIN (src),
+ GST_ELEMENT_FLAG_SOURCE | GST_ELEMENT_FLAG_SINK);
+}
+
+static void
+free_param_data (ParameterRequest * req)
+{
+ gst_promise_unref (req->promise);
+ if (req->body)
+ g_string_free (req->body, TRUE);
+ g_free (req->content_type);
+ g_free (req);
+}
+
+static void
+gst_rtspsrc_finalize (GObject * object)
+{
+ GstRTSPSrc *rtspsrc;
+
+ rtspsrc = GST_RTSPSRC (object);
+
+ gst_rtsp_ext_list_free (rtspsrc->extensions);
+ g_free (rtspsrc->conninfo.location);
+ gst_rtsp_url_free (rtspsrc->conninfo.url);
+ g_free (rtspsrc->conninfo.url_str);
+ g_free (rtspsrc->user_id);
+ g_free (rtspsrc->user_pw);
+ g_free (rtspsrc->multi_iface);
+ g_free (rtspsrc->user_agent);
+
+ if (rtspsrc->sdp) {
+ gst_sdp_message_free (rtspsrc->sdp);
+ rtspsrc->sdp = NULL;
+ }
+ if (rtspsrc->provided_clock)
+ gst_object_unref (rtspsrc->provided_clock);
+
+ if (rtspsrc->sdes)
+ gst_structure_free (rtspsrc->sdes);
+
+ if (rtspsrc->tls_database)
+ g_object_unref (rtspsrc->tls_database);
+
+ if (rtspsrc->tls_interaction)
+ g_object_unref (rtspsrc->tls_interaction);
+
+ /* free locks */
+ g_rec_mutex_clear (&rtspsrc->stream_rec_lock);
+ g_rec_mutex_clear (&rtspsrc->state_rec_lock);
+
+ g_mutex_clear (&rtspsrc->conninfo.send_lock);
+ g_mutex_clear (&rtspsrc->conninfo.recv_lock);
+ g_cond_clear (&rtspsrc->cmd_cond);
+
+ g_mutex_clear (&rtspsrc->group_lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static GstClock *
+gst_rtspsrc_provide_clock (GstElement * element)
+{
+ GstRTSPSrc *src = GST_RTSPSRC (element);
+ GstClock *clock;
+
+ if ((clock = src->provided_clock) != NULL)
+ return gst_object_ref (clock);
+
+ return GST_ELEMENT_CLASS (parent_class)->provide_clock (element);
+}
+
+/* a proxy string of the format [user:passwd@]host[:port] */
+static gboolean
+gst_rtspsrc_set_proxy (GstRTSPSrc * rtsp, const gchar * proxy)
+{
+ gchar *p, *at, *col;
+
+ g_free (rtsp->proxy_user);
+ rtsp->proxy_user = NULL;
+ g_free (rtsp->proxy_passwd);
+ rtsp->proxy_passwd = NULL;
+ g_free (rtsp->proxy_host);
+ rtsp->proxy_host = NULL;
+ rtsp->proxy_port = 0;
+
+ p = (gchar *) proxy;
+
+ if (p == NULL)
+ return TRUE;
+
+ /* we allow http:// in front but ignore it */
+ if (g_str_has_prefix (p, "http://"))
+ p += 7;
+
+ at = strchr (p, '@');
+ if (at) {
+ /* look for user:passwd */
+ col = strchr (proxy, ':');
+ if (col == NULL || col > at)
+ return FALSE;
+
+ rtsp->proxy_user = g_strndup (p, col - p);
+ col++;
+ rtsp->proxy_passwd = g_strndup (col, at - col);
+
+ /* move to host */
+ p = at + 1;
+ } else {
+ if (rtsp->prop_proxy_id != NULL && *rtsp->prop_proxy_id != '\0')
+ rtsp->proxy_user = g_strdup (rtsp->prop_proxy_id);
+ if (rtsp->prop_proxy_pw != NULL && *rtsp->prop_proxy_pw != '\0')
+ rtsp->proxy_passwd = g_strdup (rtsp->prop_proxy_pw);
+ if (rtsp->proxy_user != NULL || rtsp->proxy_passwd != NULL) {
+ GST_LOG_OBJECT (rtsp, "set proxy user/pw from properties: %s:%s",
+ GST_STR_NULL (rtsp->proxy_user), GST_STR_NULL (rtsp->proxy_passwd));
+ }
+ }
+ col = strchr (p, ':');
+
+ if (col) {
+ /* everything before the colon is the hostname */
+ rtsp->proxy_host = g_strndup (p, col - p);
+ p = col + 1;
+ rtsp->proxy_port = strtoul (p, (char **) &p, 10);
+ } else {
+ rtsp->proxy_host = g_strdup (p);
+ rtsp->proxy_port = 8080;
+ }
+ return TRUE;
+}
+
+static void
+gst_rtspsrc_set_tcp_timeout (GstRTSPSrc * rtspsrc, guint64 timeout)
+{
+ rtspsrc->tcp_timeout = timeout;
+}
+
+static void
+gst_rtspsrc_set_property (GObject * object, guint prop_id, const GValue * value,
+ GParamSpec * pspec)
+{
+ GstRTSPSrc *rtspsrc;
+
+ rtspsrc = GST_RTSPSRC (object);
+
+ switch (prop_id) {
+ case PROP_LOCATION:
+ gst_rtspsrc_uri_set_uri (GST_URI_HANDLER (rtspsrc),
+ g_value_get_string (value), NULL);
+ break;
+ case PROP_PROTOCOLS:
+ rtspsrc->protocols = g_value_get_flags (value);
+ break;
+ case PROP_DEBUG:
+ rtspsrc->debug = g_value_get_boolean (value);
+ break;
+ case PROP_RETRY:
+ rtspsrc->retry = g_value_get_uint (value);
+ break;
+ case PROP_TIMEOUT:
+ rtspsrc->udp_timeout = g_value_get_uint64 (value);
+ break;
+ case PROP_TCP_TIMEOUT:
+ gst_rtspsrc_set_tcp_timeout (rtspsrc, g_value_get_uint64 (value));
+ break;
+ case PROP_LATENCY:
+ rtspsrc->latency = g_value_get_uint (value);
+ break;
+ case PROP_DROP_ON_LATENCY:
+ rtspsrc->drop_on_latency = g_value_get_boolean (value);
+ break;
+ case PROP_CONNECTION_SPEED:
+ rtspsrc->connection_speed = g_value_get_uint64 (value);
+ break;
+ case PROP_NAT_METHOD:
+ rtspsrc->nat_method = g_value_get_enum (value);
+ break;
+ case PROP_DO_RTCP:
+ rtspsrc->do_rtcp = g_value_get_boolean (value);
+ break;
+ case PROP_DO_RTSP_KEEP_ALIVE:
+ rtspsrc->do_rtsp_keep_alive = g_value_get_boolean (value);
+ break;
+ case PROP_PROXY:
+ gst_rtspsrc_set_proxy (rtspsrc, g_value_get_string (value));
+ break;
+ case PROP_PROXY_ID:
+ g_free (rtspsrc->prop_proxy_id);
+ rtspsrc->prop_proxy_id = g_value_dup_string (value);
+ break;
+ case PROP_PROXY_PW:
+ g_free (rtspsrc->prop_proxy_pw);
+ rtspsrc->prop_proxy_pw = g_value_dup_string (value);
+ break;
+ case PROP_RTP_BLOCKSIZE:
+ rtspsrc->rtp_blocksize = g_value_get_uint (value);
+ break;
+ case PROP_USER_ID:
+ g_free (rtspsrc->user_id);
+ rtspsrc->user_id = g_value_dup_string (value);
+ break;
+ case PROP_USER_PW:
+ g_free (rtspsrc->user_pw);
+ rtspsrc->user_pw = g_value_dup_string (value);
+ break;
+ case PROP_BUFFER_MODE:
+ rtspsrc->buffer_mode = g_value_get_enum (value);
+ break;
+ case PROP_PORT_RANGE:
+ {
+ const gchar *str;
+
+ str = g_value_get_string (value);
+ if (str == NULL || sscanf (str, "%u-%u", &rtspsrc->client_port_range.min,
+ &rtspsrc->client_port_range.max) != 2) {
+ rtspsrc->client_port_range.min = 0;
+ rtspsrc->client_port_range.max = 0;
+ }
+ break;
+ }
+ case PROP_UDP_BUFFER_SIZE:
+ rtspsrc->udp_buffer_size = g_value_get_int (value);
+ break;
+ case PROP_SHORT_HEADER:
+ rtspsrc->short_header = g_value_get_boolean (value);
+ break;
+ case PROP_PROBATION:
+ rtspsrc->probation = g_value_get_uint (value);
+ break;
+ case PROP_UDP_RECONNECT:
+ rtspsrc->udp_reconnect = g_value_get_boolean (value);
+ break;
+ case PROP_MULTICAST_IFACE:
+ g_free (rtspsrc->multi_iface);
+
+ if (g_value_get_string (value) == NULL)
+ rtspsrc->multi_iface = g_strdup (DEFAULT_MULTICAST_IFACE);
+ else
+ rtspsrc->multi_iface = g_value_dup_string (value);
+ break;
+ case PROP_NTP_SYNC:
+ rtspsrc->ntp_sync = g_value_get_boolean (value);
+ /* The default value of max_ts_offset depends on ntp_sync. If user
+ * hasn't set it then change default value */
+ if (!rtspsrc->max_ts_offset_is_set) {
+ if (rtspsrc->ntp_sync) {
+ rtspsrc->max_ts_offset = 0;
+ } else {
+ rtspsrc->max_ts_offset = DEFAULT_MAX_TS_OFFSET;
+ }
+ }
+ break;
+ case PROP_USE_PIPELINE_CLOCK:
+ rtspsrc->use_pipeline_clock = g_value_get_boolean (value);
+ break;
+ case PROP_SDES:
+ rtspsrc->sdes = g_value_dup_boxed (value);
+ break;
+ case PROP_TLS_VALIDATION_FLAGS:
+ rtspsrc->tls_validation_flags = g_value_get_flags (value);
+ break;
+ case PROP_TLS_DATABASE:
+ g_clear_object (&rtspsrc->tls_database);
+ rtspsrc->tls_database = g_value_dup_object (value);
+ break;
+ case PROP_TLS_INTERACTION:
+ g_clear_object (&rtspsrc->tls_interaction);
+ rtspsrc->tls_interaction = g_value_dup_object (value);
+ break;
+ case PROP_DO_RETRANSMISSION:
+ rtspsrc->do_retransmission = g_value_get_boolean (value);
+ break;
+ case PROP_NTP_TIME_SOURCE:
+ rtspsrc->ntp_time_source = g_value_get_enum (value);
+ break;
+ case PROP_USER_AGENT:
+ g_free (rtspsrc->user_agent);
+ rtspsrc->user_agent = g_value_dup_string (value);
+ break;
+ case PROP_MAX_RTCP_RTP_TIME_DIFF:
+ rtspsrc->max_rtcp_rtp_time_diff = g_value_get_int (value);
+ break;
+ case PROP_RFC7273_SYNC:
+ rtspsrc->rfc7273_sync = g_value_get_boolean (value);
+ break;
+ case PROP_MAX_TS_OFFSET_ADJUSTMENT:
+ rtspsrc->max_ts_offset_adjustment = g_value_get_uint64 (value);
+ break;
+ case PROP_MAX_TS_OFFSET:
+ rtspsrc->max_ts_offset = g_value_get_int64 (value);
+ rtspsrc->max_ts_offset_is_set = TRUE;
+ break;
+ case PROP_DEFAULT_VERSION:
+ rtspsrc->default_version = g_value_get_enum (value);
+ break;
+ case PROP_BACKCHANNEL:
+ rtspsrc->backchannel = g_value_get_enum (value);
+ break;
+ case PROP_TEARDOWN_TIMEOUT:
+ rtspsrc->teardown_timeout = g_value_get_uint64 (value);
+ break;
+ case PROP_ONVIF_MODE:
+ rtspsrc->onvif_mode = g_value_get_boolean (value);
+ break;
+ case PROP_ONVIF_RATE_CONTROL:
+ rtspsrc->onvif_rate_control = g_value_get_boolean (value);
+ break;
+ case PROP_IS_LIVE:
+ rtspsrc->is_live = g_value_get_boolean (value);
+ break;
+ case PROP_IGNORE_X_SERVER_REPLY:
+ rtspsrc->ignore_x_server_reply = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_rtspsrc_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstRTSPSrc *rtspsrc;
+
+ rtspsrc = GST_RTSPSRC (object);
+
+ switch (prop_id) {
+ case PROP_LOCATION:
+ g_value_set_string (value, rtspsrc->conninfo.location);
+ break;
+ case PROP_PROTOCOLS:
+ g_value_set_flags (value, rtspsrc->protocols);
+ break;
+ case PROP_DEBUG:
+ g_value_set_boolean (value, rtspsrc->debug);
+ break;
+ case PROP_RETRY:
+ g_value_set_uint (value, rtspsrc->retry);
+ break;
+ case PROP_TIMEOUT:
+ g_value_set_uint64 (value, rtspsrc->udp_timeout);
+ break;
+ case PROP_TCP_TIMEOUT:
+ g_value_set_uint64 (value, rtspsrc->tcp_timeout);
+ break;
+ case PROP_LATENCY:
+ g_value_set_uint (value, rtspsrc->latency);
+ break;
+ case PROP_DROP_ON_LATENCY:
+ g_value_set_boolean (value, rtspsrc->drop_on_latency);
+ break;
+ case PROP_CONNECTION_SPEED:
+ g_value_set_uint64 (value, rtspsrc->connection_speed);
+ break;
+ case PROP_NAT_METHOD:
+ g_value_set_enum (value, rtspsrc->nat_method);
+ break;
+ case PROP_DO_RTCP:
+ g_value_set_boolean (value, rtspsrc->do_rtcp);
+ break;
+ case PROP_DO_RTSP_KEEP_ALIVE:
+ g_value_set_boolean (value, rtspsrc->do_rtsp_keep_alive);
+ break;
+ case PROP_PROXY:
+ {
+ gchar *str;
+
+ if (rtspsrc->proxy_host) {
+ str =
+ g_strdup_printf ("%s:%d", rtspsrc->proxy_host, rtspsrc->proxy_port);
+ } else {
+ str = NULL;
+ }
+ g_value_take_string (value, str);
+ break;
+ }
+ case PROP_PROXY_ID:
+ g_value_set_string (value, rtspsrc->prop_proxy_id);
+ break;
+ case PROP_PROXY_PW:
+ g_value_set_string (value, rtspsrc->prop_proxy_pw);
+ break;
+ case PROP_RTP_BLOCKSIZE:
+ g_value_set_uint (value, rtspsrc->rtp_blocksize);
+ break;
+ case PROP_USER_ID:
+ g_value_set_string (value, rtspsrc->user_id);
+ break;
+ case PROP_USER_PW:
+ g_value_set_string (value, rtspsrc->user_pw);
+ break;
+ case PROP_BUFFER_MODE:
+ g_value_set_enum (value, rtspsrc->buffer_mode);
+ break;
+ case PROP_PORT_RANGE:
+ {
+ gchar *str;
+
+ if (rtspsrc->client_port_range.min != 0) {
+ str = g_strdup_printf ("%u-%u", rtspsrc->client_port_range.min,
+ rtspsrc->client_port_range.max);
+ } else {
+ str = NULL;
+ }
+ g_value_take_string (value, str);
+ break;
+ }
+ case PROP_UDP_BUFFER_SIZE:
+ g_value_set_int (value, rtspsrc->udp_buffer_size);
+ break;
+ case PROP_SHORT_HEADER:
+ g_value_set_boolean (value, rtspsrc->short_header);
+ break;
+ case PROP_PROBATION:
+ g_value_set_uint (value, rtspsrc->probation);
+ break;
+ case PROP_UDP_RECONNECT:
+ g_value_set_boolean (value, rtspsrc->udp_reconnect);
+ break;
+ case PROP_MULTICAST_IFACE:
+ g_value_set_string (value, rtspsrc->multi_iface);
+ break;
+ case PROP_NTP_SYNC:
+ g_value_set_boolean (value, rtspsrc->ntp_sync);
+ break;
+ case PROP_USE_PIPELINE_CLOCK:
+ g_value_set_boolean (value, rtspsrc->use_pipeline_clock);
+ break;
+ case PROP_SDES:
+ g_value_set_boxed (value, rtspsrc->sdes);
+ break;
+ case PROP_TLS_VALIDATION_FLAGS:
+ g_value_set_flags (value, rtspsrc->tls_validation_flags);
+ break;
+ case PROP_TLS_DATABASE:
+ g_value_set_object (value, rtspsrc->tls_database);
+ break;
+ case PROP_TLS_INTERACTION:
+ g_value_set_object (value, rtspsrc->tls_interaction);
+ break;
+ case PROP_DO_RETRANSMISSION:
+ g_value_set_boolean (value, rtspsrc->do_retransmission);
+ break;
+ case PROP_NTP_TIME_SOURCE:
+ g_value_set_enum (value, rtspsrc->ntp_time_source);
+ break;
+ case PROP_USER_AGENT:
+ g_value_set_string (value, rtspsrc->user_agent);
+ break;
+ case PROP_MAX_RTCP_RTP_TIME_DIFF:
+ g_value_set_int (value, rtspsrc->max_rtcp_rtp_time_diff);
+ break;
+ case PROP_RFC7273_SYNC:
+ g_value_set_boolean (value, rtspsrc->rfc7273_sync);
+ break;
+ case PROP_MAX_TS_OFFSET_ADJUSTMENT:
+ g_value_set_uint64 (value, rtspsrc->max_ts_offset_adjustment);
+ break;
+ case PROP_MAX_TS_OFFSET:
+ g_value_set_int64 (value, rtspsrc->max_ts_offset);
+ break;
+ case PROP_DEFAULT_VERSION:
+ g_value_set_enum (value, rtspsrc->default_version);
+ break;
+ case PROP_BACKCHANNEL:
+ g_value_set_enum (value, rtspsrc->backchannel);
+ break;
+ case PROP_TEARDOWN_TIMEOUT:
+ g_value_set_uint64 (value, rtspsrc->teardown_timeout);
+ break;
+ case PROP_ONVIF_MODE:
+ g_value_set_boolean (value, rtspsrc->onvif_mode);
+ break;
+ case PROP_ONVIF_RATE_CONTROL:
+ g_value_set_boolean (value, rtspsrc->onvif_rate_control);
+ break;
+ case PROP_IS_LIVE:
+ g_value_set_boolean (value, rtspsrc->is_live);
+ break;
+ case PROP_IGNORE_X_SERVER_REPLY:
+ g_value_set_boolean (value, rtspsrc->ignore_x_server_reply);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gint
+find_stream_by_id (GstRTSPStream * stream, gint * id)
+{
+ if (stream->id == *id)
+ return 0;
+
+ return -1;
+}
+
+static gint
+find_stream_by_channel (GstRTSPStream * stream, gint * channel)
+{
+ /* ignore unconfigured channels here (e.g., those that
+ * were explicitly skipped during SETUP) */
+ if ((stream->channelpad[0] != NULL) &&
+ (stream->channel[0] == *channel || stream->channel[1] == *channel))
+ return 0;
+
+ return -1;
+}
+
+static gint
+find_stream_by_udpsrc (GstRTSPStream * stream, gconstpointer a)
+{
+ GstElement *src = (GstElement *) a;
+
+ if (stream->udpsrc[0] == src)
+ return 0;
+ if (stream->udpsrc[1] == src)
+ return 0;
+
+ return -1;
+}
+
+static gint
+find_stream_by_setup (GstRTSPStream * stream, gconstpointer a)
+{
+ if (stream->conninfo.location) {
+ /* check qualified setup_url */
+ if (!strcmp (stream->conninfo.location, (gchar *) a))
+ return 0;
+ }
+ if (stream->control_url) {
+ /* check original control_url */
+ if (!strcmp (stream->control_url, (gchar *) a))
+ return 0;
+
+ /* check if qualified setup_url ends with string */
+ if (g_str_has_suffix (stream->control_url, (gchar *) a))
+ return 0;
+ }
+
+ return -1;
+}
+
+static GstRTSPStream *
+find_stream (GstRTSPSrc * src, gconstpointer data, gconstpointer func)
+{
+ GList *lstream;
+
+ /* find and get stream */
+ if ((lstream = g_list_find_custom (src->streams, data, (GCompareFunc) func)))
+ return (GstRTSPStream *) lstream->data;
+
+ return NULL;
+}
+
+static const GstSDPBandwidth *
+gst_rtspsrc_get_bandwidth (GstRTSPSrc * src, const GstSDPMessage * sdp,
+ const GstSDPMedia * media, const gchar * type)
+{
+ guint i, len;
+
+ /* first look in the media specific section */
+ len = gst_sdp_media_bandwidths_len (media);
+ for (i = 0; i < len; i++) {
+ const GstSDPBandwidth *bw = gst_sdp_media_get_bandwidth (media, i);
+
+ if (strcmp (bw->bwtype, type) == 0)
+ return bw;
+ }
+ /* then look in the message specific section */
+ len = gst_sdp_message_bandwidths_len (sdp);
+ for (i = 0; i < len; i++) {
+ const GstSDPBandwidth *bw = gst_sdp_message_get_bandwidth (sdp, i);
+
+ if (strcmp (bw->bwtype, type) == 0)
+ return bw;
+ }
+ return NULL;
+}
+
+static void
+gst_rtspsrc_collect_bandwidth (GstRTSPSrc * src, const GstSDPMessage * sdp,
+ const GstSDPMedia * media, GstRTSPStream * stream)
+{
+ const GstSDPBandwidth *bw;
+
+ if ((bw = gst_rtspsrc_get_bandwidth (src, sdp, media, GST_SDP_BWTYPE_AS)))
+ stream->as_bandwidth = bw->bandwidth;
+ else
+ stream->as_bandwidth = -1;
+
+ if ((bw = gst_rtspsrc_get_bandwidth (src, sdp, media, GST_SDP_BWTYPE_RR)))
+ stream->rr_bandwidth = bw->bandwidth;
+ else
+ stream->rr_bandwidth = -1;
+
+ if ((bw = gst_rtspsrc_get_bandwidth (src, sdp, media, GST_SDP_BWTYPE_RS)))
+ stream->rs_bandwidth = bw->bandwidth;
+ else
+ stream->rs_bandwidth = -1;
+}
+
+static void
+gst_rtspsrc_do_stream_connection (GstRTSPSrc * src, GstRTSPStream * stream,
+ const GstSDPConnection * conn)
+{
+ if (conn->nettype == NULL || strcmp (conn->nettype, "IN") != 0)
+ return;
+
+ if (conn->addrtype == NULL)
+ return;
+
+ /* check for IPV6 */
+ if (strcmp (conn->addrtype, "IP4") == 0)
+ stream->is_ipv6 = FALSE;
+ else if (strcmp (conn->addrtype, "IP6") == 0)
+ stream->is_ipv6 = TRUE;
+ else
+ return;
+
+ /* save address */
+ g_free (stream->destination);
+ stream->destination = g_strdup (conn->address);
+
+ /* check for multicast */
+ stream->is_multicast =
+ gst_sdp_address_is_multicast (conn->nettype, conn->addrtype,
+ conn->address);
+ stream->ttl = conn->ttl;
+}
+
+/* Go over the connections for a stream.
+ * - If we are dealing with IPV6, we will setup IPV6 sockets for sending and
+ * receiving.
+ * - If we are dealing with a localhost address, we disable multicast
+ */
+static void
+gst_rtspsrc_collect_connections (GstRTSPSrc * src, const GstSDPMessage * sdp,
+ const GstSDPMedia * media, GstRTSPStream * stream)
+{
+ const GstSDPConnection *conn;
+ guint i, len;
+
+ /* first look in the media specific section */
+ len = gst_sdp_media_connections_len (media);
+ for (i = 0; i < len; i++) {
+ conn = gst_sdp_media_get_connection (media, i);
+
+ gst_rtspsrc_do_stream_connection (src, stream, conn);
+ }
+ /* then look in the message specific section */
+ if ((conn = gst_sdp_message_get_connection (sdp))) {
+ gst_rtspsrc_do_stream_connection (src, stream, conn);
+ }
+}
+
+static gchar *
+make_stream_id (GstRTSPStream * stream, const GstSDPMedia * media)
+{
+ gchar *stream_id =
+ g_strdup_printf ("%s:%d:%d:%s:%d", media->media, media->port,
+ media->num_ports, media->proto, stream->default_pt);
+
+ g_strcanon (stream_id, G_CSET_a_2_z G_CSET_A_2_Z G_CSET_DIGITS, ':');
+
+ return stream_id;
+}
+
+/* m=<media> <UDP port> RTP/AVP <payload>
+ */
+static void
+gst_rtspsrc_collect_payloads (GstRTSPSrc * src, const GstSDPMessage * sdp,
+ const GstSDPMedia * media, GstRTSPStream * stream)
+{
+ guint i, len;
+ const gchar *proto;
+ GstCaps *global_caps;
+
+ /* get proto */
+ proto = gst_sdp_media_get_proto (media);
+ if (proto == NULL)
+ goto no_proto;
+
+ if (g_str_equal (proto, "RTP/AVP"))
+ stream->profile = GST_RTSP_PROFILE_AVP;
+ else if (g_str_equal (proto, "RTP/SAVP"))
+ stream->profile = GST_RTSP_PROFILE_SAVP;
+ else if (g_str_equal (proto, "RTP/AVPF"))
+ stream->profile = GST_RTSP_PROFILE_AVPF;
+ else if (g_str_equal (proto, "RTP/SAVPF"))
+ stream->profile = GST_RTSP_PROFILE_SAVPF;
+ else
+ goto unknown_proto;
+
+ if (gst_sdp_media_get_attribute_val (media, "sendonly") != NULL &&
+ /* We want to setup caps for streams configured as backchannel */
+ !stream->is_backchannel && src->backchannel != BACKCHANNEL_NONE)
+ goto sendonly_media;
+
+ /* Parse global SDP attributes once */
+ global_caps = gst_caps_new_empty_simple ("application/x-unknown");
+ GST_DEBUG ("mapping sdp session level attributes to caps");
+ gst_sdp_message_attributes_to_caps (sdp, global_caps);
+ GST_DEBUG ("mapping sdp media level attributes to caps");
+ gst_sdp_media_attributes_to_caps (media, global_caps);
+
+ /* Keep a copy of the SDP key management */
+ gst_sdp_media_parse_keymgmt (media, &stream->mikey);
+ if (stream->mikey == NULL)
+ gst_sdp_message_parse_keymgmt (sdp, &stream->mikey);
+
+ len = gst_sdp_media_formats_len (media);
+ for (i = 0; i < len; i++) {
+ gint pt;
+ GstCaps *caps, *outcaps;
+ GstStructure *s;
+ const gchar *enc;
+ PtMapItem item;
+
+ pt = atoi (gst_sdp_media_get_format (media, i));
+
+ GST_DEBUG_OBJECT (src, " looking at %d pt: %d", i, pt);
+
+ /* convert caps */
+ caps = gst_sdp_media_get_caps_from_media (media, pt);
+ if (caps == NULL) {
+ GST_WARNING_OBJECT (src, " skipping pt %d without caps", pt);
+ continue;
+ }
+
+ /* do some tweaks */
+ s = gst_caps_get_structure (caps, 0);
+ if ((enc = gst_structure_get_string (s, "encoding-name"))) {
+ stream->is_real = (strstr (enc, "-REAL") != NULL);
+ if (strcmp (enc, "X-ASF-PF") == 0)
+ stream->container = TRUE;
+ }
+
+ /* Merge in global caps */
+ /* Intersect will merge in missing fields to the current caps */
+ outcaps = gst_caps_intersect (caps, global_caps);
+ gst_caps_unref (caps);
+
+ /* the first pt will be the default */
+ if (stream->ptmap->len == 0)
+ stream->default_pt = pt;
+
+ item.pt = pt;
+ item.caps = outcaps;
+
+ g_array_append_val (stream->ptmap, item);
+ }
+
+ stream->stream_id = make_stream_id (stream, media);
+
+ gst_caps_unref (global_caps);
+ return;
+
+no_proto:
+ {
+ GST_ERROR_OBJECT (src, "can't find proto in media");
+ return;
+ }
+unknown_proto:
+ {
+ GST_ERROR_OBJECT (src, "unknown proto in media: '%s'", proto);
+ return;
+ }
+sendonly_media:
+ {
+ GST_DEBUG_OBJECT (src, "sendonly media ignored, no backchannel");
+ return;
+ }
+}
+
+static const gchar *
+get_aggregate_control (GstRTSPSrc * src)
+{
+ const gchar *base;
+
+ if (src->control)
+ base = src->control;
+ else if (src->content_base)
+ base = src->content_base;
+ else if (src->conninfo.url_str)
+ base = src->conninfo.url_str;
+ else
+ base = "/";
+
+ return base;
+}
+
+static void
+clear_ptmap_item (PtMapItem * item)
+{
+ if (item->caps)
+ gst_caps_unref (item->caps);
+}
+
+static GstRTSPStream *
+gst_rtspsrc_create_stream (GstRTSPSrc * src, GstSDPMessage * sdp, gint idx,
+ gint n_streams)
+{
+ GstRTSPStream *stream;
+ const gchar *control_path;
+ const GstSDPMedia *media;
+
+ /* get media, should not return NULL */
+ media = gst_sdp_message_get_media (sdp, idx);
+ if (media == NULL)
+ return NULL;
+
+ stream = g_new0 (GstRTSPStream, 1);
+ stream->parent = src;
+ /* we mark the pad as not linked, we will mark it as OK when we add the pad to
+ * the element. */
+ stream->last_ret = GST_FLOW_NOT_LINKED;
+ stream->added = FALSE;
+ stream->setup = FALSE;
+ stream->skipped = FALSE;
+ stream->id = idx;
+ stream->eos = FALSE;
+ stream->discont = TRUE;
+ stream->seqbase = -1;
+ stream->timebase = -1;
+ stream->send_ssrc = g_random_int ();
+ stream->profile = GST_RTSP_PROFILE_AVP;
+ stream->ptmap = g_array_new (FALSE, FALSE, sizeof (PtMapItem));
+ stream->mikey = NULL;
+ stream->stream_id = NULL;
+ stream->is_backchannel = FALSE;
+ g_mutex_init (&stream->conninfo.send_lock);
+ g_mutex_init (&stream->conninfo.recv_lock);
+ g_array_set_clear_func (stream->ptmap, (GDestroyNotify) clear_ptmap_item);
+
+ /* stream is sendonly and onvif backchannel is requested */
+ if (gst_sdp_media_get_attribute_val (media, "sendonly") != NULL &&
+ src->backchannel != BACKCHANNEL_NONE)
+ stream->is_backchannel = TRUE;
+
+ /* collect bandwidth information for this steam. FIXME, configure in the RTP
+ * session manager to scale RTCP. */
+ gst_rtspsrc_collect_bandwidth (src, sdp, media, stream);
+
+ /* collect connection info */
+ gst_rtspsrc_collect_connections (src, sdp, media, stream);
+
+ /* make the payload type map */
+ gst_rtspsrc_collect_payloads (src, sdp, media, stream);
+
+ /* collect port number */
+ stream->port = gst_sdp_media_get_port (media);
+
+ /* get control url to construct the setup url. The setup url is used to
+ * configure the transport of the stream and is used to identity the stream in
+ * the RTP-Info header field returned from PLAY. */
+ control_path = gst_sdp_media_get_attribute_val (media, "control");
+ if (control_path == NULL)
+ control_path = gst_sdp_message_get_attribute_val_n (sdp, "control", 0);
+
+ GST_DEBUG_OBJECT (src, "stream %d, (%p)", stream->id, stream);
+ GST_DEBUG_OBJECT (src, " port: %d", stream->port);
+ GST_DEBUG_OBJECT (src, " container: %d", stream->container);
+ GST_DEBUG_OBJECT (src, " control: %s", GST_STR_NULL (control_path));
+
+ /* RFC 2326, C.3: missing control_path permitted in case of a single stream */
+ if (control_path == NULL && n_streams == 1) {
+ control_path = "";
+ }
+
+ if (control_path != NULL) {
+ stream->control_url = g_strdup (control_path);
+ /* Build a fully qualified url using the content_base if any or by prefixing
+ * the original request.
+ * If the control_path starts with a non rtsp: protocol we will most
+ * likely build a URL that the server will fail to understand, this is ok,
+ * we will fail then. */
+ if (g_str_has_prefix (control_path, "rtsp://"))
+ stream->conninfo.location = g_strdup (control_path);
+ else {
+ if (g_strcmp0 (control_path, "*") == 0)
+ control_path = "";
+ /* handle url with query */
+ if (src->conninfo.url && src->conninfo.url->query) {
+ stream->conninfo.location =
+ gst_rtsp_url_get_request_uri_with_control (src->conninfo.url,
+ control_path);
+ } else {
+ const gchar *base;
+ gboolean has_slash;
+ const gchar *slash;
+ const gchar *actual_control_path = NULL;
+
+ base = get_aggregate_control (src);
+ has_slash = g_str_has_suffix (base, "/");
+ /* manage existence or non-existence of / in control path */
+ if (control_path && strlen (control_path) > 0) {
+ gboolean control_has_slash = g_str_has_prefix (control_path, "/");
+
+ actual_control_path = control_path;
+ if (has_slash && control_has_slash) {
+ if (strlen (control_path) == 1) {
+ actual_control_path = NULL;
+ } else {
+ actual_control_path = control_path + 1;
+ }
+ } else {
+ has_slash = has_slash || control_has_slash;
+ }
+ }
+ slash = (!has_slash && (actual_control_path != NULL)) ? "/" : "";
+ /* concatenate the two strings, insert / when not present */
+ stream->conninfo.location =
+ g_strdup_printf ("%s%s%s", base, slash, control_path);
+ }
+ }
+ }
+ GST_DEBUG_OBJECT (src, " setup: %s",
+ GST_STR_NULL (stream->conninfo.location));
+
+ /* we keep track of all streams */
+ src->streams = g_list_append (src->streams, stream);
+
+ return stream;
+
+ /* ERRORS */
+}
+
+static void
+gst_rtspsrc_stream_free (GstRTSPSrc * src, GstRTSPStream * stream)
+{
+ gint i;
+
+ GST_DEBUG_OBJECT (src, "free stream %p", stream);
+
+ g_array_free (stream->ptmap, TRUE);
+
+ g_free (stream->destination);
+ g_free (stream->control_url);
+ g_free (stream->conninfo.location);
+ g_free (stream->stream_id);
+
+ for (i = 0; i < 2; i++) {
+ if (stream->udpsrc[i]) {
+ gst_element_set_state (stream->udpsrc[i], GST_STATE_NULL);
+ if (gst_object_has_as_parent (GST_OBJECT (stream->udpsrc[i]),
+ GST_OBJECT (src)))
+ gst_bin_remove (GST_BIN_CAST (src), stream->udpsrc[i]);
+ gst_object_unref (stream->udpsrc[i]);
+ }
+ if (stream->channelpad[i])
+ gst_object_unref (stream->channelpad[i]);
+
+ if (stream->udpsink[i]) {
+ gst_element_set_state (stream->udpsink[i], GST_STATE_NULL);
+ if (gst_object_has_as_parent (GST_OBJECT (stream->udpsink[i]),
+ GST_OBJECT (src)))
+ gst_bin_remove (GST_BIN_CAST (src), stream->udpsink[i]);
+ gst_object_unref (stream->udpsink[i]);
+ }
+ }
+ if (stream->rtpsrc) {
+ gst_element_set_state (stream->rtpsrc, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN_CAST (src), stream->rtpsrc);
+ gst_object_unref (stream->rtpsrc);
+ }
+ if (stream->srcpad) {
+ gst_pad_set_active (stream->srcpad, FALSE);
+ if (stream->added)
+ gst_element_remove_pad (GST_ELEMENT_CAST (src), stream->srcpad);
+ }
+ if (stream->srtpenc)
+ gst_object_unref (stream->srtpenc);
+ if (stream->srtpdec)
+ gst_object_unref (stream->srtpdec);
+ if (stream->srtcpparams)
+ gst_caps_unref (stream->srtcpparams);
+ if (stream->mikey)
+ gst_mikey_message_unref (stream->mikey);
+ if (stream->rtcppad)
+ gst_object_unref (stream->rtcppad);
+ if (stream->session)
+ g_object_unref (stream->session);
+ if (stream->rtx_pt_map)
+ gst_structure_free (stream->rtx_pt_map);
+
+ g_mutex_clear (&stream->conninfo.send_lock);
+ g_mutex_clear (&stream->conninfo.recv_lock);
+
+ g_free (stream);
+}
+
+static void
+gst_rtspsrc_cleanup (GstRTSPSrc * src)
+{
+ GList *walk;
+ ParameterRequest *req;
+
+ GST_DEBUG_OBJECT (src, "cleanup");
+
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+
+ gst_rtspsrc_stream_free (src, stream);
+ }
+ g_list_free (src->streams);
+ src->streams = NULL;
+ if (src->manager) {
+ if (src->manager_sig_id) {
+ g_signal_handler_disconnect (src->manager, src->manager_sig_id);
+ src->manager_sig_id = 0;
+ }
+ gst_element_set_state (src->manager, GST_STATE_NULL);
+ gst_bin_remove (GST_BIN_CAST (src), src->manager);
+ src->manager = NULL;
+ }
+ if (src->props)
+ gst_structure_free (src->props);
+ src->props = NULL;
+
+ g_free (src->content_base);
+ src->content_base = NULL;
+
+ g_free (src->control);
+ src->control = NULL;
+
+ if (src->range)
+ gst_rtsp_range_free (src->range);
+ src->range = NULL;
+
+ /* don't clear the SDP when it was used in the url */
+ if (src->sdp && !src->from_sdp) {
+ gst_sdp_message_free (src->sdp);
+ src->sdp = NULL;
+ }
+
+ src->need_segment = FALSE;
+ src->clip_out_segment = FALSE;
+
+ if (src->provided_clock) {
+ gst_object_unref (src->provided_clock);
+ src->provided_clock = NULL;
+ }
+
+ GST_OBJECT_LOCK (src);
+ /* free parameter requests queue */
+ while ((req = g_queue_pop_head (&src->set_get_param_q))) {
+ gst_promise_expire (req->promise);
+ free_param_data (req);
+ }
+ GST_OBJECT_UNLOCK (src);
+
+}
+
+static gboolean
+gst_rtspsrc_alloc_udp_ports (GstRTSPStream * stream,
+ gint * rtpport, gint * rtcpport)
+{
+ GstRTSPSrc *src;
+ GstStateChangeReturn ret;
+ GstElement *udpsrc0, *udpsrc1;
+ gint tmp_rtp, tmp_rtcp;
+ guint count;
+ const gchar *host;
+
+ src = stream->parent;
+
+ udpsrc0 = NULL;
+ udpsrc1 = NULL;
+ count = 0;
+
+ /* Start at next port */
+ tmp_rtp = src->next_port_num;
+
+ if (stream->is_ipv6)
+ host = "udp://[::0]";
+ else
+ host = "udp://0.0.0.0";
+
+ /* try to allocate 2 UDP ports, the RTP port should be an even
+ * number and the RTCP port should be the next (uneven) port */
+again:
+
+ if (tmp_rtp != 0 && src->client_port_range.max > 0 &&
+ tmp_rtp >= src->client_port_range.max)
+ goto no_ports;
+
+ udpsrc0 = gst_element_make_from_uri (GST_URI_SRC, host, NULL, NULL);
+ if (udpsrc0 == NULL)
+ goto no_udp_protocol;
+ g_object_set (G_OBJECT (udpsrc0), "port", tmp_rtp, "reuse", FALSE, NULL);
+
+ if (src->udp_buffer_size != 0)
+ g_object_set (G_OBJECT (udpsrc0), "buffer-size", src->udp_buffer_size,
+ NULL);
+
+ ret = gst_element_set_state (udpsrc0, GST_STATE_READY);
+ if (ret == GST_STATE_CHANGE_FAILURE) {
+ if (tmp_rtp != 0) {
+ GST_DEBUG_OBJECT (src, "Unable to make udpsrc from RTP port %d", tmp_rtp);
+
+ tmp_rtp += 2;
+ if (++count > src->retry)
+ goto no_ports;
+
+ GST_DEBUG_OBJECT (src, "free RTP udpsrc");
+ gst_element_set_state (udpsrc0, GST_STATE_NULL);
+ gst_object_unref (udpsrc0);
+ udpsrc0 = NULL;
+
+ GST_DEBUG_OBJECT (src, "retry %d", count);
+ goto again;
+ }
+ goto no_udp_protocol;
+ }
+
+ g_object_get (G_OBJECT (udpsrc0), "port", &tmp_rtp, NULL);
+ GST_DEBUG_OBJECT (src, "got RTP port %d", tmp_rtp);
+
+ /* check if port is even */
+ if ((tmp_rtp & 0x01) != 0) {
+ /* port not even, close and allocate another */
+ if (++count > src->retry)
+ goto no_ports;
+
+ GST_DEBUG_OBJECT (src, "RTP port not even");
+
+ GST_DEBUG_OBJECT (src, "free RTP udpsrc");
+ gst_element_set_state (udpsrc0, GST_STATE_NULL);
+ gst_object_unref (udpsrc0);
+ udpsrc0 = NULL;
+
+ GST_DEBUG_OBJECT (src, "retry %d", count);
+ tmp_rtp++;
+ goto again;
+ }
+
+ /* allocate port+1 for RTCP now */
+ udpsrc1 = gst_element_make_from_uri (GST_URI_SRC, host, NULL, NULL);
+ if (udpsrc1 == NULL)
+ goto no_udp_rtcp_protocol;
+
+ /* set port */
+ tmp_rtcp = tmp_rtp + 1;
+ if (src->client_port_range.max > 0 && tmp_rtcp > src->client_port_range.max)
+ goto no_ports;
+
+ g_object_set (G_OBJECT (udpsrc1), "port", tmp_rtcp, "reuse", FALSE, NULL);
+
+ GST_DEBUG_OBJECT (src, "starting RTCP on port %d", tmp_rtcp);
+ ret = gst_element_set_state (udpsrc1, GST_STATE_READY);
+ /* tmp_rtcp port is busy already : retry to make rtp/rtcp pair */
+ if (ret == GST_STATE_CHANGE_FAILURE) {
+ GST_DEBUG_OBJECT (src, "Unable to make udpsrc from RTCP port %d", tmp_rtcp);
+
+ if (++count > src->retry)
+ goto no_ports;
+
+ GST_DEBUG_OBJECT (src, "free RTP udpsrc");
+ gst_element_set_state (udpsrc0, GST_STATE_NULL);
+ gst_object_unref (udpsrc0);
+ udpsrc0 = NULL;
+
+ GST_DEBUG_OBJECT (src, "free RTCP udpsrc");
+ gst_element_set_state (udpsrc1, GST_STATE_NULL);
+ gst_object_unref (udpsrc1);
+ udpsrc1 = NULL;
+
+ tmp_rtp += 2;
+ GST_DEBUG_OBJECT (src, "retry %d", count);
+ goto again;
+ }
+
+ /* all fine, do port check */
+ g_object_get (G_OBJECT (udpsrc0), "port", rtpport, NULL);
+ g_object_get (G_OBJECT (udpsrc1), "port", rtcpport, NULL);
+
+ /* this should not happen... */
+ if (*rtpport != tmp_rtp || *rtcpport != tmp_rtcp)
+ goto port_error;
+
+ /* we keep these elements, we configure all in configure_transport when the
+ * server told us to really use the UDP ports. */
+ stream->udpsrc[0] = gst_object_ref_sink (udpsrc0);
+ stream->udpsrc[1] = gst_object_ref_sink (udpsrc1);
+ gst_element_set_locked_state (stream->udpsrc[0], TRUE);
+ gst_element_set_locked_state (stream->udpsrc[1], TRUE);
+
+ /* keep track of next available port number when we have a range
+ * configured */
+ if (src->next_port_num != 0)
+ src->next_port_num = tmp_rtcp + 1;
+
+ return TRUE;
+
+ /* ERRORS */
+no_udp_protocol:
+ {
+ GST_DEBUG_OBJECT (src, "could not get UDP source");
+ goto cleanup;
+ }
+no_ports:
+ {
+ GST_DEBUG_OBJECT (src, "could not allocate UDP port pair after %d retries",
+ count);
+ goto cleanup;
+ }
+no_udp_rtcp_protocol:
+ {
+ GST_DEBUG_OBJECT (src, "could not get UDP source for RTCP");
+ goto cleanup;
+ }
+port_error:
+ {
+ GST_DEBUG_OBJECT (src, "ports don't match rtp: %d<->%d, rtcp: %d<->%d",
+ tmp_rtp, *rtpport, tmp_rtcp, *rtcpport);
+ goto cleanup;
+ }
+cleanup:
+ {
+ if (udpsrc0) {
+ gst_element_set_state (udpsrc0, GST_STATE_NULL);
+ gst_object_unref (udpsrc0);
+ }
+ if (udpsrc1) {
+ gst_element_set_state (udpsrc1, GST_STATE_NULL);
+ gst_object_unref (udpsrc1);
+ }
+ return FALSE;
+ }
+}
+
+static void
+gst_rtspsrc_set_state (GstRTSPSrc * src, GstState state)
+{
+ GList *walk;
+
+ if (src->manager)
+ gst_element_set_state (GST_ELEMENT_CAST (src->manager), state);
+
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ gint i;
+
+ for (i = 0; i < 2; i++) {
+ if (stream->udpsrc[i])
+ gst_element_set_state (stream->udpsrc[i], state);
+ }
+ }
+}
+
+static void
+gst_rtspsrc_flush (GstRTSPSrc * src, gboolean flush, gboolean playing,
+ guint32 seqnum)
+{
+ GstEvent *event;
+ gint cmd;
+ GstState state;
+
+ if (flush) {
+ event = gst_event_new_flush_start ();
+ gst_event_set_seqnum (event, seqnum);
+ GST_DEBUG_OBJECT (src, "start flush");
+ cmd = CMD_WAIT;
+ state = GST_STATE_PAUSED;
+ } else {
+ event = gst_event_new_flush_stop (TRUE);
+ gst_event_set_seqnum (event, seqnum);
+ GST_DEBUG_OBJECT (src, "stop flush; playing %d", playing);
+ cmd = CMD_LOOP;
+ if (playing)
+ state = GST_STATE_PLAYING;
+ else
+ state = GST_STATE_PAUSED;
+ }
+ gst_rtspsrc_push_event (src, event);
+ gst_rtspsrc_loop_send_cmd (src, cmd, CMD_LOOP);
+ gst_rtspsrc_set_state (src, state);
+}
+
+static GstRTSPResult
+gst_rtspsrc_connection_send (GstRTSPSrc * src, GstRTSPConnInfo * conninfo,
+ GstRTSPMessage * message, gint64 timeout)
+{
+ GstRTSPResult ret;
+
+ if (conninfo->connection) {
+ g_mutex_lock (&conninfo->send_lock);
+ ret =
+ gst_rtsp_connection_send_usec (conninfo->connection, message, timeout);
+ g_mutex_unlock (&conninfo->send_lock);
+ } else {
+ ret = GST_RTSP_ERROR;
+ }
+
+ return ret;
+}
+
+static GstRTSPResult
+gst_rtspsrc_connection_receive (GstRTSPSrc * src, GstRTSPConnInfo * conninfo,
+ GstRTSPMessage * message, gint64 timeout)
+{
+ GstRTSPResult ret;
+
+ if (conninfo->connection) {
+ g_mutex_lock (&conninfo->recv_lock);
+ ret = gst_rtsp_connection_receive_usec (conninfo->connection, message,
+ timeout);
+ g_mutex_unlock (&conninfo->recv_lock);
+ } else {
+ ret = GST_RTSP_ERROR;
+ }
+
+ return ret;
+}
+
+static void
+gst_rtspsrc_get_position (GstRTSPSrc * src)
+{
+ GstQuery *query;
+ GList *walk;
+
+ query = gst_query_new_position (GST_FORMAT_TIME);
+ /* should be known somewhere down the stream (e.g. jitterbuffer) */
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ GstFormat fmt;
+ gint64 pos;
+
+ if (stream->srcpad) {
+ if (gst_pad_query (stream->srcpad, query)) {
+ gst_query_parse_position (query, &fmt, &pos);
+ GST_DEBUG_OBJECT (src, "retaining position %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (pos));
+ src->last_pos = pos;
+ goto out;
+ }
+ }
+ }
+
+ src->last_pos = 0;
+
+out:
+
+ gst_query_unref (query);
+}
+
+static gboolean
+gst_rtspsrc_perform_seek (GstRTSPSrc * src, GstEvent * event)
+{
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType cur_type = GST_SEEK_TYPE_NONE, stop_type = GST_SEEK_TYPE_NONE;
+ gint64 cur, stop;
+ gboolean flush, server_side_trickmode;
+ gboolean update;
+ gboolean playing;
+ GstSegment seeksegment = { 0, };
+ GList *walk;
+ const gchar *seek_style = NULL;
+ gboolean rate_change_only = FALSE;
+ gboolean rate_change_same_direction = FALSE;
+
+ GST_DEBUG_OBJECT (src, "doing seek with event %" GST_PTR_FORMAT, event);
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &cur_type, &cur, &stop_type, &stop);
+ rate_change_only = cur_type == GST_SEEK_TYPE_NONE
+ && stop_type == GST_SEEK_TYPE_NONE;
+
+ /* we need TIME format */
+ if (format != src->segment.format)
+ goto no_format;
+
+ /* Check if we are not at all seekable */
+ if (src->seekable == -1.0)
+ goto not_seekable;
+
+ /* Additional seeking-to-beginning-only check */
+ if (src->seekable == 0.0 && cur != 0)
+ goto not_seekable;
+
+ if (flags & GST_SEEK_FLAG_SEGMENT)
+ goto invalid_segment_flag;
+
+ /* get flush flag */
+ flush = flags & GST_SEEK_FLAG_FLUSH;
+ server_side_trickmode = flags & GST_SEEK_FLAG_TRICKMODE;
+
+ gst_event_parse_seek_trickmode_interval (event, &src->trickmode_interval);
+
+ /* now we need to make sure the streaming thread is stopped. We do this by
+ * either sending a FLUSH_START event downstream which will cause the
+ * streaming thread to stop with a WRONG_STATE.
+ * For a non-flushing seek we simply pause the task, which will happen as soon
+ * as it completes one iteration (and thus might block when the sink is
+ * blocking in preroll). */
+ if (flush) {
+ GST_DEBUG_OBJECT (src, "starting flush");
+ gst_rtspsrc_flush (src, TRUE, FALSE, gst_event_get_seqnum (event));
+ } else {
+ if (src->task) {
+ gst_task_pause (src->task);
+ }
+ }
+
+ /* we should now be able to grab the streaming thread because we stopped it
+ * with the above flush/pause code */
+ GST_RTSP_STREAM_LOCK (src);
+
+ GST_DEBUG_OBJECT (src, "stopped streaming");
+
+ /* stop flushing the rtsp connection so we can send PAUSE/PLAY below */
+ gst_rtspsrc_connection_flush (src, FALSE);
+
+ /* copy segment, we need this because we still need the old
+ * segment when we close the current segment. */
+ seeksegment = src->segment;
+
+ /* configure the seek parameters in the seeksegment. We will then have the
+ * right values in the segment to perform the seek */
+ GST_DEBUG_OBJECT (src, "configuring seek");
+ rate_change_same_direction = (rate * seeksegment.rate) > 0;
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
+ cur_type, cur, stop_type, stop, &update);
+
+ /* if we were playing, pause first */
+ playing = (src->state == GST_RTSP_STATE_PLAYING);
+ if (playing) {
+ /* obtain current position in case seek fails */
+ gst_rtspsrc_get_position (src);
+ gst_rtspsrc_pause (src, FALSE);
+ }
+ src->server_side_trickmode = server_side_trickmode;
+
+ src->state = GST_RTSP_STATE_SEEKING;
+
+ /* PLAY will add the range header now. */
+ src->need_range = TRUE;
+
+ /* If an accurate seek was requested, we want to clip the segment we
+ * output in ONVIF mode to the requested bounds */
+ src->clip_out_segment = ! !(flags & GST_SEEK_FLAG_ACCURATE);
+ src->seek_seqnum = gst_event_get_seqnum (event);
+
+ /* prepare for streaming again */
+ if (flush) {
+ /* if we started flush, we stop now */
+ GST_DEBUG_OBJECT (src, "stopping flush");
+ gst_rtspsrc_flush (src, FALSE, playing, gst_event_get_seqnum (event));
+ }
+
+ /* now we did the seek and can activate the new segment values */
+ src->segment = seeksegment;
+
+ /* if we're doing a segment seek, post a SEGMENT_START message */
+ if (src->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ gst_element_post_message (GST_ELEMENT_CAST (src),
+ gst_message_new_segment_start (GST_OBJECT_CAST (src),
+ src->segment.format, src->segment.position));
+ }
+
+ /* mark discont when needed */
+ if (!(rate_change_only && rate_change_same_direction)) {
+ GST_DEBUG_OBJECT (src, "mark DISCONT, we did a seek to another position");
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ stream->discont = TRUE;
+ }
+ }
+
+ /* and continue playing if needed. If we are not acting as a live source,
+ * then only the RTSP PLAYING state, set earlier, matters. */
+ GST_OBJECT_LOCK (src);
+ if (src->is_live) {
+ playing = (GST_STATE_PENDING (src) == GST_STATE_VOID_PENDING
+ && GST_STATE (src) == GST_STATE_PLAYING)
+ || (GST_STATE_PENDING (src) == GST_STATE_PLAYING);
+ }
+ GST_OBJECT_UNLOCK (src);
+
+ if (src->version >= GST_RTSP_VERSION_2_0) {
+ if (flags & GST_SEEK_FLAG_ACCURATE)
+ seek_style = "RAP";
+ else if (flags & GST_SEEK_FLAG_KEY_UNIT)
+ seek_style = "CoRAP";
+ else if (flags & GST_SEEK_FLAG_KEY_UNIT
+ && flags & GST_SEEK_FLAG_SNAP_BEFORE)
+ seek_style = "First-Prior";
+ else if (flags & GST_SEEK_FLAG_KEY_UNIT && flags & GST_SEEK_FLAG_SNAP_AFTER)
+ seek_style = "Next";
+ }
+
+ if (playing)
+ gst_rtspsrc_play (src, &seeksegment, FALSE, seek_style);
+
+ GST_RTSP_STREAM_UNLOCK (src);
+
+ return TRUE;
+
+ /* ERRORS */
+no_format:
+ {
+ GST_DEBUG_OBJECT (src, "unsupported format given, seek aborted.");
+ return FALSE;
+ }
+not_seekable:
+ {
+ GST_DEBUG_OBJECT (src, "stream is not seekable");
+ return FALSE;
+ }
+invalid_segment_flag:
+ {
+ GST_WARNING_OBJECT (src, "Segment seeks not supported");
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_rtspsrc_handle_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstRTSPSrc *src;
+ gboolean res = TRUE;
+ gboolean forward;
+
+ src = GST_RTSPSRC_CAST (parent);
+
+ GST_DEBUG_OBJECT (src, "pad %s:%s received event %s",
+ GST_DEBUG_PAD_NAME (pad), GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ {
+ guint32 seqnum = gst_event_get_seqnum (event);
+ if (seqnum == src->seek_seqnum) {
+ GST_LOG_OBJECT (pad, "Drop duplicated SEEK event seqnum %"
+ G_GUINT32_FORMAT, seqnum);
+ } else {
+ res = gst_rtspsrc_perform_seek (src, event);
+ }
+ }
+ forward = FALSE;
+ break;
+ case GST_EVENT_QOS:
+ case GST_EVENT_NAVIGATION:
+ case GST_EVENT_LATENCY:
+ default:
+ forward = TRUE;
+ break;
+ }
+ if (forward) {
+ GstPad *target;
+
+ if ((target = gst_ghost_pad_get_target (GST_GHOST_PAD_CAST (pad)))) {
+ res = gst_pad_send_event (target, event);
+ gst_object_unref (target);
+ } else {
+ gst_event_unref (event);
+ }
+ } else {
+ gst_event_unref (event);
+ }
+
+ return res;
+}
+
+static void
+gst_rtspsrc_stream_start_event_add_group_id (GstRTSPSrc * src, GstEvent * event)
+{
+ g_mutex_lock (&src->group_lock);
+
+ if (src->group_id == GST_GROUP_ID_INVALID)
+ src->group_id = gst_util_group_id_next ();
+
+ g_mutex_unlock (&src->group_lock);
+
+ gst_event_set_group_id (event, src->group_id);
+}
+
+static gboolean
+gst_rtspsrc_handle_src_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstRTSPStream *stream;
+ GstRTSPSrc *self = GST_RTSPSRC (GST_OBJECT_PARENT (parent));
+
+ stream = gst_pad_get_element_private (pad);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_STREAM_START:{
+ GChecksum *cs;
+ gchar *uri;
+ gchar *stream_id;
+
+ cs = g_checksum_new (G_CHECKSUM_SHA256);
+ uri = self->conninfo.location;
+ g_checksum_update (cs, (const guchar *) uri, strlen (uri));
+
+ stream_id =
+ g_strdup_printf ("%s/%s", g_checksum_get_string (cs),
+ stream->stream_id);
+
+ g_checksum_free (cs);
+ gst_event_unref (event);
+ event = gst_event_new_stream_start (stream_id);
+ gst_rtspsrc_stream_start_event_add_group_id (self, event);
+ g_free (stream_id);
+ break;
+ }
+ case GST_EVENT_SEGMENT:
+ if (self->seek_seqnum != GST_SEQNUM_INVALID)
+ GST_EVENT_SEQNUM (event) = self->seek_seqnum;
+ break;
+ default:
+ break;
+ }
+
+ return gst_pad_push_event (stream->srcpad, event);
+}
+
+/* this is the final event function we receive on the internal source pad when
+ * we deal with TCP connections */
+static gboolean
+gst_rtspsrc_handle_internal_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ gboolean res;
+
+ GST_DEBUG_OBJECT (pad, "received event %s", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ case GST_EVENT_QOS:
+ case GST_EVENT_NAVIGATION:
+ case GST_EVENT_LATENCY:
+ default:
+ gst_event_unref (event);
+ res = TRUE;
+ break;
+ }
+ return res;
+}
+
+/* this is the final query function we receive on the internal source pad when
+ * we deal with TCP connections */
+static gboolean
+gst_rtspsrc_handle_internal_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ GstRTSPSrc *src;
+ gboolean res = FALSE;
+
+ src = GST_RTSPSRC_CAST (gst_pad_get_element_private (pad));
+
+ GST_DEBUG_OBJECT (src, "pad %s:%s received query %s",
+ GST_DEBUG_PAD_NAME (pad), GST_QUERY_TYPE_NAME (query));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:
+ {
+ /* no idea */
+ break;
+ }
+ case GST_QUERY_DURATION:
+ {
+ GstFormat format;
+
+ gst_query_parse_duration (query, &format, NULL);
+
+ switch (format) {
+ case GST_FORMAT_TIME:
+ gst_query_set_duration (query, format, src->segment.duration);
+ res = TRUE;
+ break;
+ default:
+ break;
+ }
+ break;
+ }
+ case GST_QUERY_LATENCY:
+ {
+ /* we are live with a min latency of 0 and unlimited max latency, this
+ * result will be updated by the session manager if there is any. */
+ gst_query_set_latency (query, src->is_live, 0, -1);
+ res = TRUE;
+ break;
+ }
+ default:
+ break;
+ }
+
+ return res;
+}
+
+/* this query is executed on the ghost source pad exposed on rtspsrc. */
+static gboolean
+gst_rtspsrc_handle_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ GstRTSPSrc *src;
+ gboolean res = FALSE;
+
+ src = GST_RTSPSRC_CAST (parent);
+
+ GST_DEBUG_OBJECT (src, "pad %s:%s received query %s",
+ GST_DEBUG_PAD_NAME (pad), GST_QUERY_TYPE_NAME (query));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_DURATION:
+ {
+ GstFormat format;
+
+ gst_query_parse_duration (query, &format, NULL);
+
+ switch (format) {
+ case GST_FORMAT_TIME:
+ gst_query_set_duration (query, format, src->segment.duration);
+ res = TRUE;
+ break;
+ default:
+ break;
+ }
+ break;
+ }
+ case GST_QUERY_SEEKING:
+ {
+ GstFormat format;
+
+ gst_query_parse_seeking (query, &format, NULL, NULL, NULL);
+ if (format == GST_FORMAT_TIME) {
+ gboolean seekable = TRUE;
+ GstClockTime start = 0, duration = src->segment.duration;
+
+ /* seeking without duration is unlikely */
+ seekable = seekable && src->seekable >= 0.0 && src->segment.duration &&
+ GST_CLOCK_TIME_IS_VALID (src->segment.duration);
+
+ if (seekable) {
+ if (src->seekable > 0.0) {
+ start = src->last_pos - src->seekable * GST_SECOND;
+ } else {
+ /* src->seekable == 0 means that we can only seek to 0 */
+ start = 0;
+ duration = 0;
+ }
+ }
+
+ GST_LOG_OBJECT (src, "seekable: %d, duration: %" GST_TIME_FORMAT
+ ", src->seekable: %f", seekable,
+ GST_TIME_ARGS (src->segment.duration), src->seekable);
+
+ gst_query_set_seeking (query, GST_FORMAT_TIME, seekable, start,
+ duration);
+ res = TRUE;
+ }
+ break;
+ }
+ case GST_QUERY_URI:
+ {
+ gchar *uri;
+
+ uri = gst_rtspsrc_uri_get_uri (GST_URI_HANDLER (src));
+ if (uri != NULL) {
+ gst_query_set_uri (query, uri);
+ g_free (uri);
+ res = TRUE;
+ }
+ break;
+ }
+ default:
+ {
+ GstPad *target = gst_ghost_pad_get_target (GST_GHOST_PAD_CAST (pad));
+
+ /* forward the query to the proxy target pad */
+ if (target) {
+ res = gst_pad_query (target, query);
+ gst_object_unref (target);
+ }
+ break;
+ }
+ }
+
+ return res;
+}
+
+/* callback for RTCP messages to be sent to the server when operating in TCP
+ * mode. */
+static GstFlowReturn
+gst_rtspsrc_sink_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
+{
+ GstRTSPSrc *src;
+ GstRTSPStream *stream;
+ GstFlowReturn res = GST_FLOW_OK;
+ GstRTSPResult ret;
+ GstRTSPMessage message = { 0 };
+ GstRTSPConnInfo *conninfo;
+
+ stream = (GstRTSPStream *) gst_pad_get_element_private (pad);
+ src = stream->parent;
+
+ gst_rtsp_message_init_data (&message, stream->channel[1]);
+
+ /* lend the body data to the message */
+ gst_rtsp_message_set_body_buffer (&message, buffer);
+
+ if (stream->conninfo.connection)
+ conninfo = &stream->conninfo;
+ else
+ conninfo = &src->conninfo;
+
+ GST_DEBUG_OBJECT (src, "sending %u bytes RTCP",
+ (guint) gst_buffer_get_size (buffer));
+ ret = gst_rtspsrc_connection_send (src, conninfo, &message, 0);
+ GST_DEBUG_OBJECT (src, "sent RTCP, %d", ret);
+
+ gst_rtsp_message_unset (&message);
+
+ gst_buffer_unref (buffer);
+
+ return res;
+}
+
+static GstFlowReturn
+gst_rtspsrc_push_backchannel_buffer (GstRTSPSrc * src, guint id,
+ GstSample * sample)
+{
+ GstFlowReturn res = GST_FLOW_OK;
+ GstRTSPStream *stream;
+
+ if (!src->conninfo.connected || src->state != GST_RTSP_STATE_PLAYING)
+ goto out;
+
+ stream = find_stream (src, &id, (gpointer) find_stream_by_id);
+ if (stream == NULL) {
+ GST_ERROR_OBJECT (src, "no stream with id %u", id);
+ goto out;
+ }
+
+ if (src->interleaved) {
+ GstBuffer *buffer;
+ GstRTSPResult ret;
+ GstRTSPMessage message = { 0 };
+ GstRTSPConnInfo *conninfo;
+
+ buffer = gst_sample_get_buffer (sample);
+
+ gst_rtsp_message_init_data (&message, stream->channel[0]);
+
+ /* lend the body data to the message */
+ gst_rtsp_message_set_body_buffer (&message, buffer);
+
+ if (stream->conninfo.connection)
+ conninfo = &stream->conninfo;
+ else
+ conninfo = &src->conninfo;
+
+ GST_DEBUG_OBJECT (src, "sending %u bytes backchannel RTP",
+ (guint) gst_buffer_get_size (buffer));
+ ret = gst_rtspsrc_connection_send (src, conninfo, &message, 0);
+ GST_DEBUG_OBJECT (src, "sent backchannel RTP, %d", ret);
+
+ gst_rtsp_message_unset (&message);
+
+ res = GST_FLOW_OK;
+ } else {
+ g_signal_emit_by_name (stream->rtpsrc, "push-sample", sample, &res);
+ GST_DEBUG_OBJECT (src, "sent backchannel RTP sample %p: %s", sample,
+ gst_flow_get_name (res));
+ }
+
+out:
+ gst_sample_unref (sample);
+
+ return res;
+}
+
+static GstPadProbeReturn
+pad_blocked (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
+{
+ GstRTSPSrc *src = user_data;
+
+ GST_DEBUG_OBJECT (src, "pad %s:%s blocked, activating streams",
+ GST_DEBUG_PAD_NAME (pad));
+
+ /* activate the streams */
+ GST_OBJECT_LOCK (src);
+ if (!src->need_activate)
+ goto was_ok;
+
+ src->need_activate = FALSE;
+ GST_OBJECT_UNLOCK (src);
+
+ gst_rtspsrc_activate_streams (src);
+
+ return GST_PAD_PROBE_OK;
+
+was_ok:
+ {
+ GST_OBJECT_UNLOCK (src);
+ return GST_PAD_PROBE_OK;
+ }
+}
+
+static GstPadProbeReturn
+udpsrc_probe_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
+{
+ guint32 *segment_seqnum = user_data;
+
+ switch (GST_EVENT_TYPE (info->data)) {
+ case GST_EVENT_SEGMENT:
+ if (!gst_event_is_writable (info->data))
+ info->data = gst_event_make_writable (info->data);
+
+ *segment_seqnum = gst_event_get_seqnum (info->data);
+ default:
+ break;
+ }
+
+ return GST_PAD_PROBE_OK;
+}
+
+static gboolean
+copy_sticky_events (GstPad * pad, GstEvent ** event, gpointer user_data)
+{
+ GstPad *gpad = GST_PAD_CAST (user_data);
+
+ GST_DEBUG_OBJECT (gpad, "store sticky event %" GST_PTR_FORMAT, *event);
+ gst_pad_store_sticky_event (gpad, *event);
+
+ return TRUE;
+}
+
+static gboolean
+add_backchannel_fakesink (GstRTSPSrc * src, GstRTSPStream * stream,
+ GstPad * srcpad)
+{
+ GstPad *sinkpad;
+ GstElement *fakesink;
+
+ fakesink = gst_element_factory_make ("fakesink", NULL);
+ if (fakesink == NULL) {
+ GST_ERROR_OBJECT (src, "no fakesink");
+ return FALSE;
+ }
+
+ sinkpad = gst_element_get_static_pad (fakesink, "sink");
+
+ GST_DEBUG_OBJECT (src, "backchannel stream %p, hooking fakesink", stream);
+
+ gst_bin_add (GST_BIN_CAST (src), fakesink);
+ if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {
+ GST_WARNING_OBJECT (src, "could not link to fakesink");
+ return FALSE;
+ }
+
+ gst_object_unref (sinkpad);
+
+ gst_element_sync_state_with_parent (fakesink);
+ return TRUE;
+}
+
+/* this callback is called when the session manager generated a new src pad with
+ * payloaded RTP packets. We simply ghost the pad here. */
+static void
+new_manager_pad (GstElement * manager, GstPad * pad, GstRTSPSrc * src)
+{
+ gchar *name;
+ GstPadTemplate *template;
+ gint id, ssrc, pt;
+ GList *ostreams;
+ GstRTSPStream *stream;
+ gboolean all_added;
+ GstPad *internal_src;
+
+ GST_DEBUG_OBJECT (src, "got new manager pad %" GST_PTR_FORMAT, pad);
+
+ GST_RTSP_STATE_LOCK (src);
+ /* find stream */
+ name = gst_object_get_name (GST_OBJECT_CAST (pad));
+ if (sscanf (name, "recv_rtp_src_%u_%u_%u", &id, &ssrc, &pt) != 3)
+ goto unknown_stream;
+
+ GST_DEBUG_OBJECT (src, "stream: %u, SSRC %08x, PT %d", id, ssrc, pt);
+
+ stream = find_stream (src, &id, (gpointer) find_stream_by_id);
+ if (stream == NULL)
+ goto unknown_stream;
+
+ /* save SSRC */
+ stream->ssrc = ssrc;
+
+ /* we'll add it later see below */
+ stream->added = TRUE;
+
+ /* check if we added all streams */
+ all_added = TRUE;
+ for (ostreams = src->streams; ostreams; ostreams = g_list_next (ostreams)) {
+ GstRTSPStream *ostream = (GstRTSPStream *) ostreams->data;
+
+ GST_DEBUG_OBJECT (src, "stream %p, container %d, added %d, setup %d",
+ ostream, ostream->container, ostream->added, ostream->setup);
+
+ /* if we find a stream for which we did a setup that is not added, we
+ * need to wait some more */
+ if (ostream->setup && !ostream->added) {
+ all_added = FALSE;
+ break;
+ }
+ }
+ GST_RTSP_STATE_UNLOCK (src);
+
+ /* create a new pad we will use to stream to */
+ template = gst_static_pad_template_get (&rtptemplate);
+ stream->srcpad = gst_ghost_pad_new_from_template (name, pad, template);
+ gst_object_unref (template);
+ g_free (name);
+
+ /* We intercept and modify the stream start event */
+ internal_src =
+ GST_PAD (gst_proxy_pad_get_internal (GST_PROXY_PAD (stream->srcpad)));
+ gst_pad_set_element_private (internal_src, stream);
+ gst_pad_set_event_function (internal_src, gst_rtspsrc_handle_src_sink_event);
+ gst_object_unref (internal_src);
+
+ gst_pad_set_event_function (stream->srcpad, gst_rtspsrc_handle_src_event);
+ gst_pad_set_query_function (stream->srcpad, gst_rtspsrc_handle_src_query);
+ gst_pad_set_active (stream->srcpad, TRUE);
+ gst_pad_sticky_events_foreach (pad, copy_sticky_events, stream->srcpad);
+
+ /* don't add the srcpad if this is a sendonly stream */
+ if (stream->is_backchannel)
+ add_backchannel_fakesink (src, stream, stream->srcpad);
+ else
+ gst_element_add_pad (GST_ELEMENT_CAST (src), stream->srcpad);
+
+ if (all_added) {
+ GST_DEBUG_OBJECT (src, "We added all streams");
+ /* when we get here, all stream are added and we can fire the no-more-pads
+ * signal. */
+ gst_element_no_more_pads (GST_ELEMENT_CAST (src));
+ }
+
+ return;
+
+ /* ERRORS */
+unknown_stream:
+ {
+ GST_DEBUG_OBJECT (src, "ignoring unknown stream");
+ GST_RTSP_STATE_UNLOCK (src);
+ g_free (name);
+ return;
+ }
+}
+
+static GstCaps *
+stream_get_caps_for_pt (GstRTSPStream * stream, guint pt)
+{
+ guint i, len;
+
+ len = stream->ptmap->len;
+ for (i = 0; i < len; i++) {
+ PtMapItem *item = &g_array_index (stream->ptmap, PtMapItem, i);
+ if (item->pt == pt)
+ return item->caps;
+ }
+ return NULL;
+}
+
+static GstCaps *
+request_pt_map (GstElement * manager, guint session, guint pt, GstRTSPSrc * src)
+{
+ GstRTSPStream *stream;
+ GstCaps *caps;
+
+ GST_DEBUG_OBJECT (src, "getting pt map for pt %d in session %d", pt, session);
+
+ GST_RTSP_STATE_LOCK (src);
+ stream = find_stream (src, &session, (gpointer) find_stream_by_id);
+ if (!stream)
+ goto unknown_stream;
+
+ if ((caps = stream_get_caps_for_pt (stream, pt)))
+ gst_caps_ref (caps);
+ GST_RTSP_STATE_UNLOCK (src);
+
+ return caps;
+
+unknown_stream:
+ {
+ GST_DEBUG_OBJECT (src, "unknown stream %d", session);
+ GST_RTSP_STATE_UNLOCK (src);
+ return NULL;
+ }
+}
+
+static void
+gst_rtspsrc_do_stream_eos (GstRTSPSrc * src, GstRTSPStream * stream)
+{
+ GST_DEBUG_OBJECT (src, "setting stream for session %u to EOS", stream->id);
+
+ if (stream->eos)
+ goto was_eos;
+
+ stream->eos = TRUE;
+ gst_rtspsrc_stream_push_event (src, stream, gst_event_new_eos ());
+ return;
+
+ /* ERRORS */
+was_eos:
+ {
+ GST_DEBUG_OBJECT (src, "stream for session %u was already EOS", stream->id);
+ return;
+ }
+}
+
+static void
+on_bye_ssrc (GObject * session, GObject * source, GstRTSPStream * stream)
+{
+ GstRTSPSrc *src = stream->parent;
+ guint ssrc;
+
+ g_object_get (source, "ssrc", &ssrc, NULL);
+
+ GST_DEBUG_OBJECT (src, "source %08x, stream %08x, session %u received BYE",
+ ssrc, stream->ssrc, stream->id);
+
+ if (ssrc == stream->ssrc)
+ gst_rtspsrc_do_stream_eos (src, stream);
+}
+
+static void
+on_timeout_common (GObject * session, GObject * source, GstRTSPStream * stream)
+{
+ GstRTSPSrc *src = stream->parent;
+ guint ssrc;
+
+ g_object_get (source, "ssrc", &ssrc, NULL);
+
+ GST_WARNING_OBJECT (src, "source %08x, stream %08x in session %u timed out",
+ ssrc, stream->ssrc, stream->id);
+
+ if (ssrc == stream->ssrc)
+ gst_rtspsrc_do_stream_eos (src, stream);
+}
+
+static void
+on_timeout (GObject * session, GObject * source, GstRTSPStream * stream)
+{
+ GstRTSPSrc *src = stream->parent;
+
+ /* timeout, post element message */
+ gst_element_post_message (GST_ELEMENT_CAST (src),
+ gst_message_new_element (GST_OBJECT_CAST (src),
+ gst_structure_new ("GstRTSPSrcTimeout",
+ "cause", G_TYPE_ENUM, GST_RTSP_SRC_TIMEOUT_CAUSE_RTCP,
+ "stream-number", G_TYPE_INT, stream->id, "ssrc", G_TYPE_UINT,
+ stream->ssrc, NULL)));
+
+ /* In non-live mode, timeouts can occur if we are PAUSED, this doesn't mean
+ * the stream is EOS, it may simply be blocked */
+ if (src->is_live || !src->interleaved)
+ on_timeout_common (session, source, stream);
+}
+
+static void
+on_npt_stop (GstElement * rtpbin, guint session, guint ssrc, GstRTSPSrc * src)
+{
+ GstRTSPStream *stream;
+
+ GST_DEBUG_OBJECT (src, "source in session %u reached NPT stop", session);
+
+ /* get stream for session */
+ stream = find_stream (src, &session, (gpointer) find_stream_by_id);
+ if (stream) {
+ gst_rtspsrc_do_stream_eos (src, stream);
+ }
+}
+
+static void
+on_ssrc_active (GObject * session, GObject * source, GstRTSPStream * stream)
+{
+ GST_DEBUG_OBJECT (stream->parent, "source in session %u is active",
+ stream->id);
+}
+
+static void
+set_manager_buffer_mode (GstRTSPSrc * src)
+{
+ GObjectClass *klass;
+
+ if (src->manager == NULL)
+ return;
+
+ klass = G_OBJECT_GET_CLASS (G_OBJECT (src->manager));
+
+ if (!g_object_class_find_property (klass, "buffer-mode"))
+ return;
+
+ if (src->buffer_mode != BUFFER_MODE_AUTO) {
+ g_object_set (src->manager, "buffer-mode", src->buffer_mode, NULL);
+
+ return;
+ }
+
+ GST_DEBUG_OBJECT (src,
+ "auto buffering mode, have clock %" GST_PTR_FORMAT, src->provided_clock);
+
+ if (src->provided_clock) {
+ GstClock *clock = gst_element_get_clock (GST_ELEMENT_CAST (src));
+
+ if (clock == src->provided_clock) {
+ GST_DEBUG_OBJECT (src, "selected synced");
+ g_object_set (src->manager, "buffer-mode", BUFFER_MODE_SYNCED, NULL);
+
+ if (clock)
+ gst_object_unref (clock);
+
+ return;
+ }
+
+ /* Otherwise fall-through and use another buffer mode */
+ if (clock)
+ gst_object_unref (clock);
+ }
+
+ GST_DEBUG_OBJECT (src, "auto buffering mode");
+ if (src->use_buffering) {
+ GST_DEBUG_OBJECT (src, "selected buffer");
+ g_object_set (src->manager, "buffer-mode", BUFFER_MODE_BUFFER, NULL);
+ } else {
+ GST_DEBUG_OBJECT (src, "selected slave");
+ g_object_set (src->manager, "buffer-mode", BUFFER_MODE_SLAVE, NULL);
+ }
+}
+
+static GstCaps *
+request_key (GstElement * srtpdec, guint ssrc, GstRTSPStream * stream)
+{
+ guint i;
+ GstCaps *caps;
+ GstMIKEYMessage *msg = stream->mikey;
+
+ GST_DEBUG ("request key SSRC %u", ssrc);
+
+ caps = gst_caps_ref (stream_get_caps_for_pt (stream, stream->default_pt));
+ caps = gst_caps_make_writable (caps);
+
+ /* parse crypto sessions and look for the SSRC rollover counter */
+ msg = stream->mikey;
+ for (i = 0; msg && i < gst_mikey_message_get_n_cs (msg); i++) {
+ const GstMIKEYMapSRTP *map = gst_mikey_message_get_cs_srtp (msg, i);
+
+ if (ssrc == map->ssrc) {
+ gst_caps_set_simple (caps, "roc", G_TYPE_UINT, map->roc, NULL);
+ break;
+ }
+ }
+
+ return caps;
+}
+
+static GstElement *
+request_rtp_decoder (GstElement * rtpbin, guint session, GstRTSPStream * stream)
+{
+ GST_DEBUG ("decoder session %u, stream %p, %d", session, stream, stream->id);
+ if (stream->id != session)
+ return NULL;
+
+ if (stream->profile != GST_RTSP_PROFILE_SAVP &&
+ stream->profile != GST_RTSP_PROFILE_SAVPF)
+ return NULL;
+
+ if (stream->srtpdec == NULL) {
+ gchar *name;
+
+ name = g_strdup_printf ("srtpdec_%u", session);
+ stream->srtpdec = gst_element_factory_make ("srtpdec", name);
+ g_free (name);
+
+ if (stream->srtpdec == NULL) {
+ GST_ELEMENT_ERROR (stream->parent, CORE, MISSING_PLUGIN, (NULL),
+ ("no srtpdec element present!"));
+ return NULL;
+ }
+ g_signal_connect (stream->srtpdec, "request-key",
+ (GCallback) request_key, stream);
+ }
+ return gst_object_ref (stream->srtpdec);
+}
+
+static GstElement *
+request_rtcp_encoder (GstElement * rtpbin, guint session,
+ GstRTSPStream * stream)
+{
+ gchar *name;
+ GstPad *pad;
+
+ GST_DEBUG ("decoder session %u, stream %p, %d", session, stream, stream->id);
+ if (stream->id != session)
+ return NULL;
+
+ if (stream->profile != GST_RTSP_PROFILE_SAVP &&
+ stream->profile != GST_RTSP_PROFILE_SAVPF)
+ return NULL;
+
+ if (stream->srtpenc == NULL) {
+ GstStructure *s;
+
+ name = g_strdup_printf ("srtpenc_%u", session);
+ stream->srtpenc = gst_element_factory_make ("srtpenc", name);
+ g_free (name);
+
+ if (stream->srtpenc == NULL) {
+ GST_ELEMENT_ERROR (stream->parent, CORE, MISSING_PLUGIN, (NULL),
+ ("no srtpenc element present!"));
+ return NULL;
+ }
+
+ /* get RTCP crypto parameters from caps */
+ s = gst_caps_get_structure (stream->srtcpparams, 0);
+ if (s) {
+ GstBuffer *buf;
+ const gchar *str;
+ GType ciphertype, authtype;
+ GValue rtcp_cipher = G_VALUE_INIT, rtcp_auth = G_VALUE_INIT;
+
+ ciphertype = g_type_from_name ("GstSrtpCipherType");
+ authtype = g_type_from_name ("GstSrtpAuthType");
+ g_value_init (&rtcp_cipher, ciphertype);
+ g_value_init (&rtcp_auth, authtype);
+
+ str = gst_structure_get_string (s, "srtcp-cipher");
+ gst_value_deserialize (&rtcp_cipher, str);
+ str = gst_structure_get_string (s, "srtcp-auth");
+ gst_value_deserialize (&rtcp_auth, str);
+ gst_structure_get (s, "srtp-key", GST_TYPE_BUFFER, &buf, NULL);
+
+ g_object_set_property (G_OBJECT (stream->srtpenc), "rtp-cipher",
+ &rtcp_cipher);
+ g_object_set_property (G_OBJECT (stream->srtpenc), "rtp-auth",
+ &rtcp_auth);
+ g_object_set_property (G_OBJECT (stream->srtpenc), "rtcp-cipher",
+ &rtcp_cipher);
+ g_object_set_property (G_OBJECT (stream->srtpenc), "rtcp-auth",
+ &rtcp_auth);
+ g_object_set (stream->srtpenc, "key", buf, NULL);
+
+ g_value_unset (&rtcp_cipher);
+ g_value_unset (&rtcp_auth);
+ gst_buffer_unref (buf);
+ }
+ }
+ name = g_strdup_printf ("rtcp_sink_%d", session);
+ pad = gst_element_request_pad_simple (stream->srtpenc, name);
+ g_free (name);
+ gst_object_unref (pad);
+
+ return gst_object_ref (stream->srtpenc);
+}
+
+static GstElement *
+request_aux_receiver (GstElement * rtpbin, guint sessid, GstRTSPSrc * src)
+{
+ GstElement *rtx, *bin;
+ GstPad *pad;
+ gchar *name;
+ GstRTSPStream *stream;
+
+ stream = find_stream (src, &sessid, (gpointer) find_stream_by_id);
+ if (!stream) {
+ GST_WARNING_OBJECT (src, "Stream %u not found", sessid);
+ return NULL;
+ }
+
+ GST_INFO_OBJECT (src, "creating retransmision receiver for session %u "
+ "with map %" GST_PTR_FORMAT, sessid, stream->rtx_pt_map);
+ bin = gst_bin_new (NULL);
+ rtx = gst_element_factory_make ("rtprtxreceive", NULL);
+ g_object_set (rtx, "payload-type-map", stream->rtx_pt_map, NULL);
+ gst_bin_add (GST_BIN (bin), rtx);
+
+ pad = gst_element_get_static_pad (rtx, "src");
+ name = g_strdup_printf ("src_%u", sessid);
+ gst_element_add_pad (bin, gst_ghost_pad_new (name, pad));
+ g_free (name);
+ gst_object_unref (pad);
+
+ pad = gst_element_get_static_pad (rtx, "sink");
+ name = g_strdup_printf ("sink_%u", sessid);
+ gst_element_add_pad (bin, gst_ghost_pad_new (name, pad));
+ g_free (name);
+ gst_object_unref (pad);
+
+ return bin;
+}
+
+static void
+add_retransmission (GstRTSPSrc * src, GstRTSPTransport * transport)
+{
+ GList *walk;
+ guint signal_id;
+ gboolean do_retransmission = FALSE;
+
+ if (transport->trans != GST_RTSP_TRANS_RTP)
+ return;
+ if (transport->profile != GST_RTSP_PROFILE_AVPF &&
+ transport->profile != GST_RTSP_PROFILE_SAVPF)
+ return;
+
+ signal_id = g_signal_lookup ("request-aux-receiver",
+ G_OBJECT_TYPE (src->manager));
+ /* there's already something connected */
+ if (g_signal_handler_find (src->manager, G_SIGNAL_MATCH_ID, signal_id, 0,
+ NULL, NULL, NULL) != 0) {
+ GST_DEBUG_OBJECT (src, "Not adding RTX AUX element as "
+ "\"request-aux-receiver\" signal is "
+ "already used by the application");
+ return;
+ }
+
+ /* build the retransmission payload type map */
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ gboolean do_retransmission_stream = FALSE;
+ int i;
+
+ if (stream->rtx_pt_map)
+ gst_structure_free (stream->rtx_pt_map);
+ stream->rtx_pt_map = gst_structure_new_empty ("application/x-rtp-pt-map");
+
+ for (i = 0; i < stream->ptmap->len; i++) {
+ PtMapItem *item = &g_array_index (stream->ptmap, PtMapItem, i);
+ GstStructure *s = gst_caps_get_structure (item->caps, 0);
+ const gchar *encoding;
+
+ /* we only care about RTX streams */
+ if ((encoding = gst_structure_get_string (s, "encoding-name"))
+ && g_strcmp0 (encoding, "RTX") == 0) {
+ const gchar *stream_pt_s;
+ gint rtx_pt;
+
+ if (gst_structure_get_int (s, "payload", &rtx_pt)
+ && (stream_pt_s = gst_structure_get_string (s, "apt"))) {
+
+ if (rtx_pt != 0) {
+ gst_structure_set (stream->rtx_pt_map, stream_pt_s, G_TYPE_UINT,
+ rtx_pt, NULL);
+ do_retransmission_stream = TRUE;
+ }
+ }
+ }
+ }
+
+ if (do_retransmission_stream) {
+ GST_DEBUG_OBJECT (src, "built retransmission payload map for stream "
+ "id %i: %" GST_PTR_FORMAT, stream->id, stream->rtx_pt_map);
+ do_retransmission = TRUE;
+ } else {
+ GST_DEBUG_OBJECT (src, "no retransmission payload map for stream "
+ "id %i", stream->id);
+ gst_structure_free (stream->rtx_pt_map);
+ stream->rtx_pt_map = NULL;
+ }
+ }
+
+ if (do_retransmission) {
+ GST_DEBUG_OBJECT (src, "Enabling retransmissions");
+
+ g_object_set (src->manager, "do-retransmission", TRUE, NULL);
+
+ /* enable RFC4588 retransmission handling by setting rtprtxreceive
+ * as the "aux" element of rtpbin */
+ g_signal_connect (src->manager, "request-aux-receiver",
+ (GCallback) request_aux_receiver, src);
+ } else {
+ GST_DEBUG_OBJECT (src,
+ "Not enabling retransmissions as no stream had a retransmission payload map");
+ }
+}
+
+/* try to get and configure a manager */
+static gboolean
+gst_rtspsrc_stream_configure_manager (GstRTSPSrc * src, GstRTSPStream * stream,
+ GstRTSPTransport * transport)
+{
+ const gchar *manager;
+ gchar *name;
+ GstStateChangeReturn ret;
+
+ if (!src->is_live)
+ goto use_no_manager;
+
+ /* find a manager */
+ if (gst_rtsp_transport_get_manager (transport->trans, &manager, 0) < 0)
+ goto no_manager;
+
+ if (manager) {
+ GST_DEBUG_OBJECT (src, "using manager %s", manager);
+
+ /* configure the manager */
+ if (src->manager == NULL) {
+ GObjectClass *klass;
+
+ if (!(src->manager = gst_element_factory_make (manager, "manager"))) {
+ /* fallback */
+ if (gst_rtsp_transport_get_manager (transport->trans, &manager, 1) < 0)
+ goto no_manager;
+
+ if (!manager)
+ goto use_no_manager;
+
+ if (!(src->manager = gst_element_factory_make (manager, "manager")))
+ goto manager_failed;
+ }
+
+ /* we manage this element */
+ gst_element_set_locked_state (src->manager, TRUE);
+ gst_bin_add (GST_BIN_CAST (src), src->manager);
+
+ ret = gst_element_set_state (src->manager, GST_STATE_PAUSED);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto start_manager_failure;
+
+ g_object_set (src->manager, "latency", src->latency, NULL);
+
+ klass = G_OBJECT_GET_CLASS (G_OBJECT (src->manager));
+
+ if (g_object_class_find_property (klass, "ntp-sync")) {
+ g_object_set (src->manager, "ntp-sync", src->ntp_sync, NULL);
+ }
+
+ if (g_object_class_find_property (klass, "rfc7273-sync")) {
+ g_object_set (src->manager, "rfc7273-sync", src->rfc7273_sync, NULL);
+ }
+
+ if (src->use_pipeline_clock) {
+ if (g_object_class_find_property (klass, "use-pipeline-clock")) {
+ g_object_set (src->manager, "use-pipeline-clock", TRUE, NULL);
+ }
+ } else {
+ if (g_object_class_find_property (klass, "ntp-time-source")) {
+ g_object_set (src->manager, "ntp-time-source", src->ntp_time_source,
+ NULL);
+ }
+ }
+
+ if (src->sdes && g_object_class_find_property (klass, "sdes")) {
+ g_object_set (src->manager, "sdes", src->sdes, NULL);
+ }
+
+ if (g_object_class_find_property (klass, "drop-on-latency")) {
+ g_object_set (src->manager, "drop-on-latency", src->drop_on_latency,
+ NULL);
+ }
+
+ if (g_object_class_find_property (klass, "max-rtcp-rtp-time-diff")) {
+ g_object_set (src->manager, "max-rtcp-rtp-time-diff",
+ src->max_rtcp_rtp_time_diff, NULL);
+ }
+
+ if (g_object_class_find_property (klass, "max-ts-offset-adjustment")) {
+ g_object_set (src->manager, "max-ts-offset-adjustment",
+ src->max_ts_offset_adjustment, NULL);
+ }
+
+ if (g_object_class_find_property (klass, "max-ts-offset")) {
+ gint64 max_ts_offset;
+
+ /* setting max-ts-offset in the manager has side effects so only do it
+ * if the value differs */
+ g_object_get (src->manager, "max-ts-offset", &max_ts_offset, NULL);
+ if (max_ts_offset != src->max_ts_offset) {
+ g_object_set (src->manager, "max-ts-offset", src->max_ts_offset,
+ NULL);
+ }
+ }
+
+ /* buffer mode pauses are handled by adding offsets to buffer times,
+ * but some depayloaders may have a hard time syncing output times
+ * with such input times, e.g. container ones, most notably ASF */
+ /* TODO alternatives are having an event that indicates these shifts,
+ * or having rtsp extensions provide suggestion on buffer mode */
+ /* valid duration implies not likely live pipeline,
+ * so slaving in jitterbuffer does not make much sense
+ * (and might mess things up due to bursts) */
+ if (GST_CLOCK_TIME_IS_VALID (src->segment.duration) &&
+ src->segment.duration && stream->container) {
+ src->use_buffering = TRUE;
+ } else {
+ src->use_buffering = FALSE;
+ }
+
+ set_manager_buffer_mode (src);
+
+ /* connect to signals */
+ GST_DEBUG_OBJECT (src, "connect to signals on session manager, stream %p",
+ stream);
+ src->manager_sig_id =
+ g_signal_connect (src->manager, "pad-added",
+ (GCallback) new_manager_pad, src);
+ src->manager_ptmap_id =
+ g_signal_connect (src->manager, "request-pt-map",
+ (GCallback) request_pt_map, src);
+
+ g_signal_connect (src->manager, "on-npt-stop", (GCallback) on_npt_stop,
+ src);
+
+ g_signal_emit (src, gst_rtspsrc_signals[SIGNAL_NEW_MANAGER], 0,
+ src->manager);
+
+ if (src->do_retransmission)
+ add_retransmission (src, transport);
+ }
+ g_signal_connect (src->manager, "request-rtp-decoder",
+ (GCallback) request_rtp_decoder, stream);
+ g_signal_connect (src->manager, "request-rtcp-decoder",
+ (GCallback) request_rtp_decoder, stream);
+ g_signal_connect (src->manager, "request-rtcp-encoder",
+ (GCallback) request_rtcp_encoder, stream);
+
+ /* we stream directly to the manager, get some pads. Each RTSP stream goes
+ * into a separate RTP session. */
+ name = g_strdup_printf ("recv_rtp_sink_%u", stream->id);
+ stream->channelpad[0] = gst_element_request_pad_simple (src->manager, name);
+ g_free (name);
+ name = g_strdup_printf ("recv_rtcp_sink_%u", stream->id);
+ stream->channelpad[1] = gst_element_request_pad_simple (src->manager, name);
+ g_free (name);
+
+ /* now configure the bandwidth in the manager */
+ if (g_signal_lookup ("get-internal-session",
+ G_OBJECT_TYPE (src->manager)) != 0) {
+ GObject *rtpsession;
+
+ g_signal_emit_by_name (src->manager, "get-internal-session", stream->id,
+ &rtpsession);
+ if (rtpsession) {
+ GstRTPProfile rtp_profile;
+
+ GST_INFO_OBJECT (src, "configure bandwidth in session %p", rtpsession);
+
+ stream->session = rtpsession;
+
+ if (stream->as_bandwidth != -1) {
+ GST_INFO_OBJECT (src, "setting AS: %f",
+ (gdouble) (stream->as_bandwidth * 1000));
+ g_object_set (rtpsession, "bandwidth",
+ (gdouble) (stream->as_bandwidth * 1000), NULL);
+ }
+ if (stream->rr_bandwidth != -1) {
+ GST_INFO_OBJECT (src, "setting RR: %u", stream->rr_bandwidth);
+ g_object_set (rtpsession, "rtcp-rr-bandwidth", stream->rr_bandwidth,
+ NULL);
+ }
+ if (stream->rs_bandwidth != -1) {
+ GST_INFO_OBJECT (src, "setting RS: %u", stream->rs_bandwidth);
+ g_object_set (rtpsession, "rtcp-rs-bandwidth", stream->rs_bandwidth,
+ NULL);
+ }
+
+ switch (stream->profile) {
+ case GST_RTSP_PROFILE_AVPF:
+ rtp_profile = GST_RTP_PROFILE_AVPF;
+ break;
+ case GST_RTSP_PROFILE_SAVP:
+ rtp_profile = GST_RTP_PROFILE_SAVP;
+ break;
+ case GST_RTSP_PROFILE_SAVPF:
+ rtp_profile = GST_RTP_PROFILE_SAVPF;
+ break;
+ case GST_RTSP_PROFILE_AVP:
+ default:
+ rtp_profile = GST_RTP_PROFILE_AVP;
+ break;
+ }
+
+ g_object_set (rtpsession, "rtp-profile", rtp_profile, NULL);
+
+ g_object_set (rtpsession, "probation", src->probation, NULL);
+
+ g_object_set (rtpsession, "internal-ssrc", stream->send_ssrc, NULL);
+
+ g_signal_connect (rtpsession, "on-bye-ssrc", (GCallback) on_bye_ssrc,
+ stream);
+ g_signal_connect (rtpsession, "on-bye-timeout",
+ (GCallback) on_timeout_common, stream);
+ g_signal_connect (rtpsession, "on-timeout", (GCallback) on_timeout,
+ stream);
+ g_signal_connect (rtpsession, "on-ssrc-active",
+ (GCallback) on_ssrc_active, stream);
+ }
+ }
+ }
+
+use_no_manager:
+ return TRUE;
+
+ /* ERRORS */
+no_manager:
+ {
+ GST_DEBUG_OBJECT (src, "cannot get a session manager");
+ return FALSE;
+ }
+manager_failed:
+ {
+ GST_DEBUG_OBJECT (src, "no session manager element %s found", manager);
+ return FALSE;
+ }
+start_manager_failure:
+ {
+ GST_DEBUG_OBJECT (src, "could not start session manager");
+ return FALSE;
+ }
+}
+
+/* free the UDP sources allocated when negotiating a transport.
+ * This function is called when the server negotiated to a transport where the
+ * UDP sources are not needed anymore, such as TCP or multicast. */
+static void
+gst_rtspsrc_stream_free_udp (GstRTSPStream * stream)
+{
+ gint i;
+
+ for (i = 0; i < 2; i++) {
+ if (stream->udpsrc[i]) {
+ GST_DEBUG ("free UDP source %d for stream %p", i, stream);
+ gst_element_set_state (stream->udpsrc[i], GST_STATE_NULL);
+ gst_object_unref (stream->udpsrc[i]);
+ stream->udpsrc[i] = NULL;
+ }
+ }
+}
+
+/* for TCP, create pads to send and receive data to and from the manager and to
+ * intercept various events and queries
+ */
+static gboolean
+gst_rtspsrc_stream_configure_tcp (GstRTSPSrc * src, GstRTSPStream * stream,
+ GstRTSPTransport * transport, GstPad ** outpad)
+{
+ gchar *name;
+ GstPadTemplate *template;
+ GstPad *pad0, *pad1;
+
+ /* configure for interleaved delivery, nothing needs to be done
+ * here, the loop function will call the chain functions of the
+ * session manager. */
+ stream->channel[0] = transport->interleaved.min;
+ stream->channel[1] = transport->interleaved.max;
+ GST_DEBUG_OBJECT (src, "stream %p on channels %d-%d", stream,
+ stream->channel[0], stream->channel[1]);
+
+ /* we can remove the allocated UDP ports now */
+ gst_rtspsrc_stream_free_udp (stream);
+
+ /* no session manager, send data to srcpad directly */
+ if (!stream->channelpad[0]) {
+ GST_DEBUG_OBJECT (src, "no manager, creating pad");
+
+ /* create a new pad we will use to stream to */
+ name = g_strdup_printf ("stream_%u", stream->id);
+ template = gst_static_pad_template_get (&rtptemplate);
+ stream->channelpad[0] = gst_pad_new_from_template (template, name);
+ gst_object_unref (template);
+ g_free (name);
+
+ /* set caps and activate */
+ gst_pad_use_fixed_caps (stream->channelpad[0]);
+ gst_pad_set_active (stream->channelpad[0], TRUE);
+
+ *outpad = gst_object_ref (stream->channelpad[0]);
+ } else {
+ GST_DEBUG_OBJECT (src, "using manager source pad");
+
+ template = gst_static_pad_template_get (&anysrctemplate);
+
+ /* allocate pads for sending the channel data into the manager */
+ pad0 = gst_pad_new_from_template (template, "internalsrc_0");
+ gst_pad_link_full (pad0, stream->channelpad[0], GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (stream->channelpad[0]);
+ stream->channelpad[0] = pad0;
+ gst_pad_set_event_function (pad0, gst_rtspsrc_handle_internal_src_event);
+ gst_pad_set_query_function (pad0, gst_rtspsrc_handle_internal_src_query);
+ gst_pad_set_element_private (pad0, src);
+ gst_pad_set_active (pad0, TRUE);
+
+ if (stream->channelpad[1]) {
+ /* if we have a sinkpad for the other channel, create a pad and link to the
+ * manager. */
+ pad1 = gst_pad_new_from_template (template, "internalsrc_1");
+ gst_pad_set_event_function (pad1, gst_rtspsrc_handle_internal_src_event);
+ gst_pad_link_full (pad1, stream->channelpad[1],
+ GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (stream->channelpad[1]);
+ stream->channelpad[1] = pad1;
+ gst_pad_set_active (pad1, TRUE);
+ }
+ gst_object_unref (template);
+ }
+ /* setup RTCP transport back to the server if we have to. */
+ if (src->manager && src->do_rtcp) {
+ GstPad *pad;
+
+ template = gst_static_pad_template_get (&anysinktemplate);
+
+ stream->rtcppad = gst_pad_new_from_template (template, "internalsink_0");
+ gst_pad_set_chain_function (stream->rtcppad, gst_rtspsrc_sink_chain);
+ gst_pad_set_element_private (stream->rtcppad, stream);
+ gst_pad_set_active (stream->rtcppad, TRUE);
+
+ /* get session RTCP pad */
+ name = g_strdup_printf ("send_rtcp_src_%u", stream->id);
+ pad = gst_element_request_pad_simple (src->manager, name);
+ g_free (name);
+
+ /* and link */
+ if (pad) {
+ gst_pad_link_full (pad, stream->rtcppad, GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (pad);
+ }
+
+ gst_object_unref (template);
+ }
+ return TRUE;
+}
+
+static void
+gst_rtspsrc_get_transport_info (GstRTSPSrc * src, GstRTSPStream * stream,
+ GstRTSPTransport * transport, const gchar ** destination, gint * min,
+ gint * max, guint * ttl)
+{
+ if (transport->lower_transport == GST_RTSP_LOWER_TRANS_UDP_MCAST) {
+ if (destination) {
+ if (!(*destination = transport->destination))
+ *destination = stream->destination;
+ }
+ if (min && max) {
+ /* transport first */
+ *min = transport->port.min;
+ *max = transport->port.max;
+ if (*min == -1 && *max == -1) {
+ /* then try from SDP */
+ if (stream->port != 0) {
+ *min = stream->port;
+ *max = stream->port + 1;
+ }
+ }
+ }
+
+ if (ttl) {
+ if (!(*ttl = transport->ttl))
+ *ttl = stream->ttl;
+ }
+ } else {
+ if (destination) {
+ /* first take the source, then the endpoint to figure out where to send
+ * the RTCP. */
+ if (!(*destination = transport->source)) {
+ if (src->conninfo.connection)
+ *destination = gst_rtsp_connection_get_ip (src->conninfo.connection);
+ else if (stream->conninfo.connection)
+ *destination =
+ gst_rtsp_connection_get_ip (stream->conninfo.connection);
+ }
+ }
+ if (min && max) {
+ /* for unicast we only expect the ports here */
+ *min = transport->server_port.min;
+ *max = transport->server_port.max;
+ }
+ }
+}
+
+/* For multicast create UDP sources and join the multicast group. */
+static gboolean
+gst_rtspsrc_stream_configure_mcast (GstRTSPSrc * src, GstRTSPStream * stream,
+ GstRTSPTransport * transport, GstPad ** outpad)
+{
+ gchar *uri;
+ const gchar *destination;
+ gint min, max;
+
+ GST_DEBUG_OBJECT (src, "creating UDP sources for multicast");
+
+ /* we can remove the allocated UDP ports now */
+ gst_rtspsrc_stream_free_udp (stream);
+
+ gst_rtspsrc_get_transport_info (src, stream, transport, &destination, &min,
+ &max, NULL);
+
+ /* we need a destination now */
+ if (destination == NULL)
+ goto no_destination;
+
+ /* we really need ports now or we won't be able to receive anything at all */
+ if (min == -1 && max == -1)
+ goto no_ports;
+
+ GST_DEBUG_OBJECT (src, "have destination '%s' and ports (%d)-(%d)",
+ destination, min, max);
+
+ /* creating UDP source for RTP */
+ if (min != -1) {
+ uri = g_strdup_printf ("udp://%s:%d", destination, min);
+ stream->udpsrc[0] =
+ gst_element_make_from_uri (GST_URI_SRC, uri, NULL, NULL);
+ g_free (uri);
+ if (stream->udpsrc[0] == NULL)
+ goto no_element;
+
+ /* take ownership */
+ gst_object_ref_sink (stream->udpsrc[0]);
+
+ if (src->udp_buffer_size != 0)
+ g_object_set (G_OBJECT (stream->udpsrc[0]), "buffer-size",
+ src->udp_buffer_size, NULL);
+
+ if (src->multi_iface != NULL)
+ g_object_set (G_OBJECT (stream->udpsrc[0]), "multicast-iface",
+ src->multi_iface, NULL);
+
+ /* change state */
+ gst_element_set_locked_state (stream->udpsrc[0], TRUE);
+ gst_element_set_state (stream->udpsrc[0], GST_STATE_READY);
+ }
+
+ /* creating another UDP source for RTCP */
+ if (max != -1) {
+ GstCaps *caps;
+
+ uri = g_strdup_printf ("udp://%s:%d", destination, max);
+ stream->udpsrc[1] =
+ gst_element_make_from_uri (GST_URI_SRC, uri, NULL, NULL);
+ g_free (uri);
+ if (stream->udpsrc[1] == NULL)
+ goto no_element;
+
+ if (stream->profile == GST_RTSP_PROFILE_SAVP ||
+ stream->profile == GST_RTSP_PROFILE_SAVPF)
+ caps = gst_caps_new_empty_simple ("application/x-srtcp");
+ else
+ caps = gst_caps_new_empty_simple ("application/x-rtcp");
+ g_object_set (stream->udpsrc[1], "caps", caps, NULL);
+ gst_caps_unref (caps);
+
+ /* take ownership */
+ gst_object_ref_sink (stream->udpsrc[1]);
+
+ if (src->multi_iface != NULL)
+ g_object_set (G_OBJECT (stream->udpsrc[1]), "multicast-iface",
+ src->multi_iface, NULL);
+
+ gst_element_set_state (stream->udpsrc[1], GST_STATE_READY);
+ }
+ return TRUE;
+
+ /* ERRORS */
+no_element:
+ {
+ GST_DEBUG_OBJECT (src, "no UDP source element found");
+ return FALSE;
+ }
+no_destination:
+ {
+ GST_DEBUG_OBJECT (src, "no destination found");
+ return FALSE;
+ }
+no_ports:
+ {
+ GST_DEBUG_OBJECT (src, "no ports found");
+ return FALSE;
+ }
+}
+
+/* configure the remainder of the UDP ports */
+static gboolean
+gst_rtspsrc_stream_configure_udp (GstRTSPSrc * src, GstRTSPStream * stream,
+ GstRTSPTransport * transport, GstPad ** outpad)
+{
+ /* we manage the UDP elements now. For unicast, the UDP sources where
+ * allocated in the stream when we suggested a transport. */
+ if (stream->udpsrc[0]) {
+ GstCaps *caps;
+
+ gst_element_set_locked_state (stream->udpsrc[0], TRUE);
+ gst_bin_add (GST_BIN_CAST (src), stream->udpsrc[0]);
+
+ GST_DEBUG_OBJECT (src, "setting up UDP source");
+
+ /* configure a timeout on the UDP port. When the timeout message is
+ * posted, we assume UDP transport is not possible. We reconnect using TCP
+ * if we can. */
+ g_object_set (G_OBJECT (stream->udpsrc[0]), "timeout",
+ src->udp_timeout * 1000, NULL);
+
+ if ((caps = stream_get_caps_for_pt (stream, stream->default_pt)))
+ g_object_set (stream->udpsrc[0], "caps", caps, NULL);
+
+ /* get output pad of the UDP source. */
+ *outpad = gst_element_get_static_pad (stream->udpsrc[0], "src");
+
+ /* save it so we can unblock */
+ stream->blockedpad = *outpad;
+
+ /* configure pad block on the pad. As soon as there is dataflow on the
+ * UDP source, we know that UDP is not blocked by a firewall and we can
+ * configure all the streams to let the application autoplug decoders. */
+ stream->blockid =
+ gst_pad_add_probe (stream->blockedpad,
+ GST_PAD_PROBE_TYPE_BLOCK | GST_PAD_PROBE_TYPE_BUFFER |
+ GST_PAD_PROBE_TYPE_BUFFER_LIST, pad_blocked, src, NULL);
+
+ gst_pad_add_probe (stream->blockedpad,
+ GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, udpsrc_probe_cb,
+ &(stream->segment_seqnum[0]), NULL);
+
+ if (stream->channelpad[0]) {
+ GST_DEBUG_OBJECT (src, "connecting UDP source 0 to manager");
+ /* configure for UDP delivery, we need to connect the UDP pads to
+ * the session plugin. */
+ gst_pad_link_full (*outpad, stream->channelpad[0],
+ GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (*outpad);
+ *outpad = NULL;
+ /* we connected to pad-added signal to get pads from the manager */
+ } else {
+ GST_DEBUG_OBJECT (src, "using UDP src pad as output");
+ }
+ }
+
+ /* RTCP port */
+ if (stream->udpsrc[1]) {
+ GstCaps *caps;
+
+ gst_element_set_locked_state (stream->udpsrc[1], TRUE);
+ gst_bin_add (GST_BIN_CAST (src), stream->udpsrc[1]);
+
+ if (stream->profile == GST_RTSP_PROFILE_SAVP ||
+ stream->profile == GST_RTSP_PROFILE_SAVPF)
+ caps = gst_caps_new_empty_simple ("application/x-srtcp");
+ else
+ caps = gst_caps_new_empty_simple ("application/x-rtcp");
+ g_object_set (stream->udpsrc[1], "caps", caps, NULL);
+ gst_caps_unref (caps);
+
+ if (stream->channelpad[1]) {
+ GstPad *pad;
+
+ GST_DEBUG_OBJECT (src, "connecting UDP source 1 to manager");
+
+ pad = gst_element_get_static_pad (stream->udpsrc[1], "src");
+ gst_pad_add_probe (pad,
+ GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, udpsrc_probe_cb,
+ &(stream->segment_seqnum[1]), NULL);
+ gst_pad_link_full (pad, stream->channelpad[1],
+ GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (pad);
+ } else {
+ /* leave unlinked */
+ }
+ }
+ return TRUE;
+}
+
+/* configure the UDP sink back to the server for status reports */
+static gboolean
+gst_rtspsrc_stream_configure_udp_sinks (GstRTSPSrc * src,
+ GstRTSPStream * stream, GstRTSPTransport * transport)
+{
+ GstPad *pad;
+ gint rtp_port, rtcp_port;
+ gboolean do_rtp, do_rtcp;
+ const gchar *destination;
+ gchar *uri, *name;
+ guint ttl = 0;
+ GSocket *socket;
+
+ /* get transport info */
+ gst_rtspsrc_get_transport_info (src, stream, transport, &destination,
+ &rtp_port, &rtcp_port, &ttl);
+
+ /* see what we need to do */
+ do_rtp = (rtp_port != -1);
+ /* it's possible that the server does not want us to send RTCP in which case
+ * the port is -1 */
+ do_rtcp = (rtcp_port != -1 && src->manager != NULL && src->do_rtcp);
+
+ /* we need a destination when we have RTP or RTCP ports */
+ if (destination == NULL && (do_rtp || do_rtcp))
+ goto no_destination;
+
+ /* try to construct the fakesrc to the RTP port of the server to open up any
+ * NAT firewalls or, if backchannel, construct an appsrc */
+ if (do_rtp) {
+ GST_DEBUG_OBJECT (src, "configure RTP UDP sink for %s:%d", destination,
+ rtp_port);
+
+ uri = g_strdup_printf ("udp://%s:%d", destination, rtp_port);
+ stream->udpsink[0] =
+ gst_element_make_from_uri (GST_URI_SINK, uri, NULL, NULL);
+ g_free (uri);
+ if (stream->udpsink[0] == NULL)
+ goto no_sink_element;
+
+ /* don't join multicast group, we will have the source socket do that */
+ /* no sync or async state changes needed */
+ g_object_set (G_OBJECT (stream->udpsink[0]), "auto-multicast", FALSE,
+ "loop", FALSE, "sync", FALSE, "async", FALSE, NULL);
+ if (ttl > 0)
+ g_object_set (G_OBJECT (stream->udpsink[0]), "ttl", ttl, NULL);
+
+ if (stream->udpsrc[0]) {
+ /* configure socket, we give it the same UDP socket as the udpsrc for RTP
+ * so that NAT firewalls will open a hole for us */
+ g_object_get (G_OBJECT (stream->udpsrc[0]), "used-socket", &socket, NULL);
+ if (!socket)
+ goto no_socket;
+
+ GST_DEBUG_OBJECT (src, "RTP UDP src has sock %p", socket);
+ /* configure socket and make sure udpsink does not close it when shutting
+ * down, it belongs to udpsrc after all. */
+ g_object_set (G_OBJECT (stream->udpsink[0]), "socket", socket,
+ "close-socket", FALSE, NULL);
+ g_object_unref (socket);
+ }
+
+ if (stream->is_backchannel) {
+ /* appsrc is for the app to shovel data using push-backchannel-buffer */
+ stream->rtpsrc = gst_element_factory_make ("appsrc", NULL);
+ if (stream->rtpsrc == NULL)
+ goto no_appsrc_element;
+
+ /* interal use only, don't emit signals */
+ g_object_set (G_OBJECT (stream->rtpsrc), "emit-signals", TRUE,
+ "is-live", TRUE, NULL);
+ } else {
+ /* the source for the dummy packets to open up NAT */
+ stream->rtpsrc = gst_element_factory_make ("fakesrc", NULL);
+ if (stream->rtpsrc == NULL)
+ goto no_fakesrc_element;
+
+ /* random data in 5 buffers, a size of 200 bytes should be fine */
+ g_object_set (G_OBJECT (stream->rtpsrc), "filltype", 3, "num-buffers", 5,
+ "sizetype", 2, "sizemax", 200, "silent", TRUE, NULL);
+ }
+
+ /* keep everything locked */
+ gst_element_set_locked_state (stream->udpsink[0], TRUE);
+ gst_element_set_locked_state (stream->rtpsrc, TRUE);
+
+ gst_object_ref (stream->udpsink[0]);
+ gst_bin_add (GST_BIN_CAST (src), stream->udpsink[0]);
+ gst_object_ref (stream->rtpsrc);
+ gst_bin_add (GST_BIN_CAST (src), stream->rtpsrc);
+
+ gst_element_link_pads_full (stream->rtpsrc, "src", stream->udpsink[0],
+ "sink", GST_PAD_LINK_CHECK_NOTHING);
+ }
+ if (do_rtcp) {
+ GST_DEBUG_OBJECT (src, "configure RTCP UDP sink for %s:%d", destination,
+ rtcp_port);
+
+ uri = g_strdup_printf ("udp://%s:%d", destination, rtcp_port);
+ stream->udpsink[1] =
+ gst_element_make_from_uri (GST_URI_SINK, uri, NULL, NULL);
+ g_free (uri);
+ if (stream->udpsink[1] == NULL)
+ goto no_sink_element;
+
+ /* don't join multicast group, we will have the source socket do that */
+ /* no sync or async state changes needed */
+ g_object_set (G_OBJECT (stream->udpsink[1]), "auto-multicast", FALSE,
+ "loop", FALSE, "sync", FALSE, "async", FALSE, NULL);
+ if (ttl > 0)
+ g_object_set (G_OBJECT (stream->udpsink[0]), "ttl", ttl, NULL);
+
+ if (stream->udpsrc[1]) {
+ /* configure socket, we give it the same UDP socket as the udpsrc for RTCP
+ * because some servers check the port number of where it sends RTCP to identify
+ * the RTCP packets it receives */
+ g_object_get (G_OBJECT (stream->udpsrc[1]), "used-socket", &socket, NULL);
+ if (!socket)
+ goto no_socket;
+
+ GST_DEBUG_OBJECT (src, "RTCP UDP src has sock %p", socket);
+ /* configure socket and make sure udpsink does not close it when shutting
+ * down, it belongs to udpsrc after all. */
+ g_object_set (G_OBJECT (stream->udpsink[1]), "socket", socket,
+ "close-socket", FALSE, NULL);
+ g_object_unref (socket);
+ }
+
+ /* we keep this playing always */
+ gst_element_set_locked_state (stream->udpsink[1], TRUE);
+ gst_element_set_state (stream->udpsink[1], GST_STATE_PLAYING);
+
+ gst_object_ref (stream->udpsink[1]);
+ gst_bin_add (GST_BIN_CAST (src), stream->udpsink[1]);
+
+ stream->rtcppad = gst_element_get_static_pad (stream->udpsink[1], "sink");
+
+ /* get session RTCP pad */
+ name = g_strdup_printf ("send_rtcp_src_%u", stream->id);
+ pad = gst_element_request_pad_simple (src->manager, name);
+ g_free (name);
+
+ /* and link */
+ if (pad) {
+ gst_pad_link_full (pad, stream->rtcppad, GST_PAD_LINK_CHECK_NOTHING);
+ gst_object_unref (pad);
+ }
+ }
+
+ return TRUE;
+
+ /* ERRORS */
+no_destination:
+ {
+ GST_ERROR_OBJECT (src, "no destination address specified");
+ return FALSE;
+ }
+no_sink_element:
+ {
+ GST_ERROR_OBJECT (src, "no UDP sink element found");
+ return FALSE;
+ }
+no_appsrc_element:
+ {
+ GST_ERROR_OBJECT (src, "no appsrc element found");
+ return FALSE;
+ }
+no_fakesrc_element:
+ {
+ GST_ERROR_OBJECT (src, "no fakesrc element found");
+ return FALSE;
+ }
+no_socket:
+ {
+ GST_ERROR_OBJECT (src, "failed to create socket");
+ return FALSE;
+ }
+}
+
+/* sets up all elements needed for streaming over the specified transport.
+ * Does not yet expose the element pads, this will be done when there is actuall
+ * dataflow detected, which might never happen when UDP is blocked in a
+ * firewall, for example.
+ */
+static gboolean
+gst_rtspsrc_stream_configure_transport (GstRTSPStream * stream,
+ GstRTSPTransport * transport)
+{
+ GstRTSPSrc *src;
+ GstPad *outpad = NULL;
+ GstPadTemplate *template;
+ gchar *name;
+ const gchar *media_type;
+ guint i, len;
+
+ src = stream->parent;
+
+ GST_DEBUG_OBJECT (src, "configuring transport for stream %p", stream);
+
+ /* get the proper media type for this stream now */
+ if (gst_rtsp_transport_get_media_type (transport, &media_type) < 0)
+ goto unknown_transport;
+ if (!media_type)
+ goto unknown_transport;
+
+ /* configure the final media type */
+ GST_DEBUG_OBJECT (src, "setting media type to %s", media_type);
+
+ len = stream->ptmap->len;
+ for (i = 0; i < len; i++) {
+ GstStructure *s;
+ PtMapItem *item = &g_array_index (stream->ptmap, PtMapItem, i);
+
+ if (item->caps == NULL)
+ continue;
+
+ s = gst_caps_get_structure (item->caps, 0);
+ gst_structure_set_name (s, media_type);
+ /* set ssrc if known */
+ if (transport->ssrc)
+ gst_structure_set (s, "ssrc", G_TYPE_UINT, transport->ssrc, NULL);
+ }
+
+ /* try to get and configure a manager, channelpad[0-1] will be configured with
+ * the pads for the manager, or NULL when no manager is needed. */
+ if (!gst_rtspsrc_stream_configure_manager (src, stream, transport))
+ goto no_manager;
+
+ switch (transport->lower_transport) {
+ case GST_RTSP_LOWER_TRANS_TCP:
+ if (!gst_rtspsrc_stream_configure_tcp (src, stream, transport, &outpad))
+ goto transport_failed;
+ break;
+ case GST_RTSP_LOWER_TRANS_UDP_MCAST:
+ if (!gst_rtspsrc_stream_configure_mcast (src, stream, transport, &outpad))
+ goto transport_failed;
+ /* fallthrough, the rest is the same for UDP and MCAST */
+ case GST_RTSP_LOWER_TRANS_UDP:
+ if (!gst_rtspsrc_stream_configure_udp (src, stream, transport, &outpad))
+ goto transport_failed;
+ /* configure udpsinks back to the server for RTCP messages, for the
+ * dummy RTP messages to open NAT, and for the backchannel */
+ if (!gst_rtspsrc_stream_configure_udp_sinks (src, stream, transport))
+ goto transport_failed;
+ break;
+ default:
+ goto unknown_transport;
+ }
+
+ /* using backchannel and no manager, hence no srcpad for this stream */
+ if (outpad && stream->is_backchannel) {
+ add_backchannel_fakesink (src, stream, outpad);
+ gst_object_unref (outpad);
+ } else if (outpad) {
+ GST_DEBUG_OBJECT (src, "creating ghostpad for stream %p", stream);
+
+ gst_pad_use_fixed_caps (outpad);
+
+ /* create ghostpad, don't add just yet, this will be done when we activate
+ * the stream. */
+ name = g_strdup_printf ("stream_%u", stream->id);
+ template = gst_static_pad_template_get (&rtptemplate);
+ stream->srcpad = gst_ghost_pad_new_from_template (name, outpad, template);
+ gst_pad_set_event_function (stream->srcpad, gst_rtspsrc_handle_src_event);
+ gst_pad_set_query_function (stream->srcpad, gst_rtspsrc_handle_src_query);
+ gst_object_unref (template);
+ g_free (name);
+
+ gst_object_unref (outpad);
+ }
+ /* mark pad as ok */
+ stream->last_ret = GST_FLOW_OK;
+
+ return TRUE;
+
+ /* ERRORS */
+transport_failed:
+ {
+ GST_WARNING_OBJECT (src, "failed to configure transport");
+ return FALSE;
+ }
+unknown_transport:
+ {
+ GST_WARNING_OBJECT (src, "unknown transport");
+ return FALSE;
+ }
+no_manager:
+ {
+ GST_WARNING_OBJECT (src, "cannot get a session manager");
+ return FALSE;
+ }
+}
+
+/* send a couple of dummy random packets on the receiver RTP port to the server,
+ * this should make a firewall think we initiated the data transfer and
+ * hopefully allow packets to go from the sender port to our RTP receiver port */
+static gboolean
+gst_rtspsrc_send_dummy_packets (GstRTSPSrc * src)
+{
+ GList *walk;
+
+ if (src->nat_method != GST_RTSP_NAT_DUMMY)
+ return TRUE;
+
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+
+ if (!stream->rtpsrc || !stream->udpsink[0])
+ continue;
+
+ if (stream->is_backchannel)
+ GST_DEBUG_OBJECT (src, "starting backchannel stream %p", stream);
+ else
+ GST_DEBUG_OBJECT (src, "sending dummy packet to stream %p", stream);
+
+ gst_element_set_state (stream->udpsink[0], GST_STATE_NULL);
+ gst_element_set_state (stream->rtpsrc, GST_STATE_NULL);
+ gst_element_set_state (stream->udpsink[0], GST_STATE_PLAYING);
+ gst_element_set_state (stream->rtpsrc, GST_STATE_PLAYING);
+ }
+ return TRUE;
+}
+
+/* Adds the source pads of all configured streams to the element.
+ * This code is performed when we detected dataflow.
+ *
+ * We detect dataflow from either the _loop function or with pad probes on the
+ * udp sources.
+ */
+static gboolean
+gst_rtspsrc_activate_streams (GstRTSPSrc * src)
+{
+ GList *walk;
+
+ GST_DEBUG_OBJECT (src, "activating streams");
+
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+
+ if (stream->udpsrc[0]) {
+ /* remove timeout, we are streaming now and timeouts will be handled by
+ * the session manager and jitter buffer */
+ g_object_set (G_OBJECT (stream->udpsrc[0]), "timeout", (guint64) 0, NULL);
+ }
+ if (stream->srcpad) {
+ GST_DEBUG_OBJECT (src, "activating stream pad %p", stream);
+ gst_pad_set_active (stream->srcpad, TRUE);
+
+ /* if we don't have a session manager, set the caps now. If we have a
+ * session, we will get a notification of the pad and the caps. */
+ if (!src->manager) {
+ GstCaps *caps;
+
+ caps = stream_get_caps_for_pt (stream, stream->default_pt);
+ GST_DEBUG_OBJECT (src, "setting pad caps for stream %p", stream);
+ gst_pad_set_caps (stream->srcpad, caps);
+ }
+ /* add the pad */
+ if (!stream->added) {
+ GST_DEBUG_OBJECT (src, "adding stream pad %p", stream);
+ if (stream->is_backchannel)
+ add_backchannel_fakesink (src, stream, stream->srcpad);
+ else
+ gst_element_add_pad (GST_ELEMENT_CAST (src), stream->srcpad);
+ stream->added = TRUE;
+ }
+ }
+ }
+
+ /* unblock all pads */
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+
+ if (stream->blockid) {
+ GST_DEBUG_OBJECT (src, "unblocking stream pad %p", stream);
+ gst_pad_remove_probe (stream->blockedpad, stream->blockid);
+ stream->blockid = 0;
+ }
+ }
+
+ return TRUE;
+}
+
+static void
+gst_rtspsrc_configure_caps (GstRTSPSrc * src, GstSegment * segment,
+ gboolean reset_manager)
+{
+ GList *walk;
+ guint64 start, stop;
+ gdouble play_speed, play_scale;
+
+ GST_DEBUG_OBJECT (src, "configuring stream caps");
+
+ start = segment->rate > 0.0 ? segment->start : segment->stop;
+ stop = segment->rate > 0.0 ? segment->stop : segment->start;
+ play_speed = segment->rate;
+ play_scale = segment->applied_rate;
+
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ guint j, len;
+
+ if (!stream->setup)
+ continue;
+
+ len = stream->ptmap->len;
+ for (j = 0; j < len; j++) {
+ GstCaps *caps;
+ PtMapItem *item = &g_array_index (stream->ptmap, PtMapItem, j);
+
+ if (item->caps == NULL)
+ continue;
+
+ caps = gst_caps_make_writable (item->caps);
+ /* update caps */
+ if (stream->timebase != -1)
+ gst_caps_set_simple (caps, "clock-base", G_TYPE_UINT,
+ (guint) stream->timebase, NULL);
+ if (stream->seqbase != -1)
+ gst_caps_set_simple (caps, "seqnum-base", G_TYPE_UINT,
+ (guint) stream->seqbase, NULL);
+ gst_caps_set_simple (caps, "npt-start", G_TYPE_UINT64, start, NULL);
+ if (stop != -1)
+ gst_caps_set_simple (caps, "npt-stop", G_TYPE_UINT64, stop, NULL);
+ gst_caps_set_simple (caps, "play-speed", G_TYPE_DOUBLE, play_speed, NULL);
+ gst_caps_set_simple (caps, "play-scale", G_TYPE_DOUBLE, play_scale, NULL);
+ gst_caps_set_simple (caps, "onvif-mode", G_TYPE_BOOLEAN, src->onvif_mode,
+ NULL);
+
+ item->caps = caps;
+ GST_DEBUG_OBJECT (src, "stream %p, pt %d, caps %" GST_PTR_FORMAT, stream,
+ item->pt, caps);
+
+ if (item->pt == stream->default_pt) {
+ if (stream->udpsrc[0])
+ g_object_set (stream->udpsrc[0], "caps", caps, NULL);
+ stream->need_caps = TRUE;
+ }
+ }
+ }
+ if (reset_manager && src->manager) {
+ GST_DEBUG_OBJECT (src, "clear session");
+ g_signal_emit_by_name (src->manager, "clear-pt-map", NULL);
+ }
+}
+
+static GstFlowReturn
+gst_rtspsrc_combine_flows (GstRTSPSrc * src, GstRTSPStream * stream,
+ GstFlowReturn ret)
+{
+ GList *streams;
+
+ /* store the value */
+ stream->last_ret = ret;
+
+ /* if it's success we can return the value right away */
+ if (ret == GST_FLOW_OK)
+ goto done;
+
+ /* any other error that is not-linked can be returned right
+ * away */
+ if (ret != GST_FLOW_NOT_LINKED)
+ goto done;
+
+ /* only return NOT_LINKED if all other pads returned NOT_LINKED */
+ for (streams = src->streams; streams; streams = g_list_next (streams)) {
+ GstRTSPStream *ostream = (GstRTSPStream *) streams->data;
+
+ ret = ostream->last_ret;
+ /* some other return value (must be SUCCESS but we can return
+ * other values as well) */
+ if (ret != GST_FLOW_NOT_LINKED)
+ goto done;
+ }
+ /* if we get here, all other pads were unlinked and we return
+ * NOT_LINKED then */
+done:
+ return ret;
+}
+
+static gboolean
+gst_rtspsrc_stream_push_event (GstRTSPSrc * src, GstRTSPStream * stream,
+ GstEvent * event)
+{
+ gboolean res = TRUE;
+
+ /* only streams that have a connection to the outside world */
+ if (!stream->setup)
+ goto done;
+
+ if (stream->udpsrc[0]) {
+ GstEvent *sent_event;
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
+ sent_event = gst_event_new_eos ();
+ gst_event_set_seqnum (sent_event, stream->segment_seqnum[0]);
+ } else {
+ sent_event = gst_event_ref (event);
+ }
+
+ res = gst_element_send_event (stream->udpsrc[0], sent_event);
+ } else if (stream->channelpad[0]) {
+ gst_event_ref (event);
+ if (GST_PAD_IS_SRC (stream->channelpad[0]))
+ res = gst_pad_push_event (stream->channelpad[0], event);
+ else
+ res = gst_pad_send_event (stream->channelpad[0], event);
+ }
+
+ if (stream->udpsrc[1]) {
+ GstEvent *sent_event;
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
+ sent_event = gst_event_new_eos ();
+ if (stream->segment_seqnum[1] != GST_SEQNUM_INVALID) {
+ gst_event_set_seqnum (sent_event, stream->segment_seqnum[1]);
+ }
+ } else {
+ sent_event = gst_event_ref (event);
+ }
+
+ res &= gst_element_send_event (stream->udpsrc[1], sent_event);
+ } else if (stream->channelpad[1]) {
+ gst_event_ref (event);
+ if (GST_PAD_IS_SRC (stream->channelpad[1]))
+ res &= gst_pad_push_event (stream->channelpad[1], event);
+ else
+ res &= gst_pad_send_event (stream->channelpad[1], event);
+ }
+
+done:
+ gst_event_unref (event);
+
+ return res;
+}
+
+static gboolean
+gst_rtspsrc_push_event (GstRTSPSrc * src, GstEvent * event)
+{
+ GList *streams;
+ gboolean res = TRUE;
+
+ for (streams = src->streams; streams; streams = g_list_next (streams)) {
+ GstRTSPStream *ostream = (GstRTSPStream *) streams->data;
+
+ gst_event_ref (event);
+ res &= gst_rtspsrc_stream_push_event (src, ostream, event);
+ }
+ gst_event_unref (event);
+
+ return res;
+}
+
+static gboolean
+accept_certificate_cb (GTlsConnection * conn, GTlsCertificate * peer_cert,
+ GTlsCertificateFlags errors, gpointer user_data)
+{
+ GstRTSPSrc *src = user_data;
+ gboolean accept = FALSE;
+
+ g_signal_emit (src, gst_rtspsrc_signals[SIGNAL_ACCEPT_CERTIFICATE], 0, conn,
+ peer_cert, errors, &accept);
+
+ return accept;
+}
+
+static GstRTSPResult
+gst_rtsp_conninfo_connect (GstRTSPSrc * src, GstRTSPConnInfo * info,
+ gboolean async)
+{
+ GstRTSPResult res;
+ GstRTSPMessage response;
+ gboolean retry = FALSE;
+ memset (&response, 0, sizeof (response));
+ gst_rtsp_message_init (&response);
+ do {
+ if (info->connection == NULL) {
+ if (info->url == NULL) {
+ GST_DEBUG_OBJECT (src, "parsing uri (%s)...", info->location);
+ if ((res = gst_rtsp_url_parse (info->location, &info->url)) < 0)
+ goto parse_error;
+ }
+ /* create connection */
+ GST_DEBUG_OBJECT (src, "creating connection (%s)...", info->location);
+ if ((res = gst_rtsp_connection_create (info->url, &info->connection)) < 0)
+ goto could_not_create;
+
+ if (retry) {
+ gst_rtspsrc_setup_auth (src, &response);
+ }
+
+ g_free (info->url_str);
+ info->url_str = gst_rtsp_url_get_request_uri (info->url);
+
+ GST_DEBUG_OBJECT (src, "sanitized uri %s", info->url_str);
+
+ if (info->url->transports & GST_RTSP_LOWER_TRANS_TLS) {
+ if (!gst_rtsp_connection_set_tls_validation_flags (info->connection,
+ src->tls_validation_flags))
+ GST_WARNING_OBJECT (src, "Unable to set TLS validation flags");
+
+ if (src->tls_database)
+ gst_rtsp_connection_set_tls_database (info->connection,
+ src->tls_database);
+
+ if (src->tls_interaction)
+ gst_rtsp_connection_set_tls_interaction (info->connection,
+ src->tls_interaction);
+ gst_rtsp_connection_set_accept_certificate_func (info->connection,
+ accept_certificate_cb, src, NULL);
+ }
+
+ if (info->url->transports & GST_RTSP_LOWER_TRANS_HTTP) {
+ gst_rtsp_connection_set_tunneled (info->connection, TRUE);
+ gst_rtsp_connection_set_ignore_x_server_reply (info->connection,
+ src->ignore_x_server_reply);
+ }
+
+ if (src->proxy_host) {
+ GST_DEBUG_OBJECT (src, "setting proxy %s:%d", src->proxy_host,
+ src->proxy_port);
+ gst_rtsp_connection_set_proxy (info->connection, src->proxy_host,
+ src->proxy_port);
+ }
+ }
+
+ if (!info->connected) {
+ /* connect */
+ if (async)
+ GST_ELEMENT_PROGRESS (src, CONTINUE, "connect",
+ ("Connecting to %s", info->location));
+ GST_DEBUG_OBJECT (src, "connecting (%s)...", info->location);
+ res = gst_rtsp_connection_connect_with_response_usec (info->connection,
+ src->tcp_timeout, &response);
+
+ if (response.type == GST_RTSP_MESSAGE_HTTP_RESPONSE &&
+ response.type_data.response.code == GST_RTSP_STS_UNAUTHORIZED) {
+ gst_rtsp_conninfo_close (src, info, TRUE);
+ if (!retry)
+ retry = TRUE;
+ else
+ retry = FALSE; // we should not retry more than once
+ } else {
+ retry = FALSE;
+ }
+
+ if (res == GST_RTSP_OK)
+ info->connected = TRUE;
+ else if (!retry)
+ goto could_not_connect;
+ }
+ } while (!info->connected && retry);
+
+ gst_rtsp_message_unset (&response);
+ return GST_RTSP_OK;
+
+ /* ERRORS */
+parse_error:
+ {
+ GST_ERROR_OBJECT (src, "No valid RTSP URL was provided");
+ gst_rtsp_message_unset (&response);
+ return res;
+ }
+could_not_create:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+ GST_ERROR_OBJECT (src, "Could not create connection. (%s)", str);
+ g_free (str);
+ gst_rtsp_message_unset (&response);
+ return res;
+ }
+could_not_connect:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+ GST_ERROR_OBJECT (src, "Could not connect to server. (%s)", str);
+ g_free (str);
+ gst_rtsp_message_unset (&response);
+ return res;
+ }
+}
+
+static GstRTSPResult
+gst_rtsp_conninfo_close (GstRTSPSrc * src, GstRTSPConnInfo * info,
+ gboolean free)
+{
+ GST_RTSP_STATE_LOCK (src);
+ if (info->connected) {
+ GST_DEBUG_OBJECT (src, "closing connection...");
+ gst_rtsp_connection_close (info->connection);
+ info->connected = FALSE;
+ }
+ if (free && info->connection) {
+ /* free connection */
+ GST_DEBUG_OBJECT (src, "freeing connection...");
+ gst_rtsp_connection_free (info->connection);
+ info->connection = NULL;
+ info->flushing = FALSE;
+ }
+ GST_RTSP_STATE_UNLOCK (src);
+ return GST_RTSP_OK;
+}
+
+static GstRTSPResult
+gst_rtsp_conninfo_reconnect (GstRTSPSrc * src, GstRTSPConnInfo * info,
+ gboolean async)
+{
+ GstRTSPResult res;
+
+ GST_DEBUG_OBJECT (src, "reconnecting connection...");
+ gst_rtsp_conninfo_close (src, info, FALSE);
+ res = gst_rtsp_conninfo_connect (src, info, async);
+
+ return res;
+}
+
+static void
+gst_rtspsrc_connection_flush (GstRTSPSrc * src, gboolean flush)
+{
+ GList *walk;
+
+ GST_DEBUG_OBJECT (src, "set flushing %d", flush);
+ GST_RTSP_STATE_LOCK (src);
+ if (src->conninfo.connection && src->conninfo.flushing != flush) {
+ GST_DEBUG_OBJECT (src, "connection flush");
+ gst_rtsp_connection_flush (src->conninfo.connection, flush);
+ src->conninfo.flushing = flush;
+ }
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ if (stream->conninfo.connection && stream->conninfo.flushing != flush) {
+ GST_DEBUG_OBJECT (src, "stream %p flush", stream);
+ gst_rtsp_connection_flush (stream->conninfo.connection, flush);
+ stream->conninfo.flushing = flush;
+ }
+ }
+ GST_RTSP_STATE_UNLOCK (src);
+}
+
+static GstRTSPResult
+gst_rtspsrc_init_request (GstRTSPSrc * src, GstRTSPMessage * msg,
+ GstRTSPMethod method, const gchar * uri)
+{
+ GstRTSPResult res;
+
+ res = gst_rtsp_message_init_request (msg, method, uri);
+ if (res < 0)
+ return res;
+
+ /* set user-agent */
+ if (src->user_agent)
+ gst_rtsp_message_add_header (msg, GST_RTSP_HDR_USER_AGENT, src->user_agent);
+
+ return res;
+}
+
+/* FIXME, handle server request, reply with OK, for now */
+static GstRTSPResult
+gst_rtspsrc_handle_request (GstRTSPSrc * src, GstRTSPConnInfo * conninfo,
+ GstRTSPMessage * request)
+{
+ GstRTSPMessage response = { 0 };
+ GstRTSPResult res;
+
+ GST_DEBUG_OBJECT (src, "got server request message");
+
+ DEBUG_RTSP (src, request);
+
+ res = gst_rtsp_ext_list_receive_request (src->extensions, request);
+
+ if (res == GST_RTSP_ENOTIMPL) {
+ /* default implementation, send OK */
+ GST_DEBUG_OBJECT (src, "prepare OK reply");
+ res =
+ gst_rtsp_message_init_response (&response, GST_RTSP_STS_OK, "OK",
+ request);
+ if (res < 0)
+ goto send_error;
+
+ /* let app parse and reply */
+ g_signal_emit (src, gst_rtspsrc_signals[SIGNAL_HANDLE_REQUEST],
+ 0, request, &response);
+
+ DEBUG_RTSP (src, &response);
+
+ res = gst_rtspsrc_connection_send (src, conninfo, &response, 0);
+ if (res < 0)
+ goto send_error;
+
+ gst_rtsp_message_unset (&response);
+ } else if (res == GST_RTSP_EEOF)
+ return res;
+
+ return GST_RTSP_OK;
+
+ /* ERRORS */
+send_error:
+ {
+ gst_rtsp_message_unset (&response);
+ return res;
+ }
+}
+
+/* send server keep-alive */
+static GstRTSPResult
+gst_rtspsrc_send_keep_alive (GstRTSPSrc * src)
+{
+ GstRTSPMessage request = { 0 };
+ GstRTSPResult res;
+ GstRTSPMethod method;
+ const gchar *control;
+
+ if (src->do_rtsp_keep_alive == FALSE) {
+ GST_DEBUG_OBJECT (src, "do-rtsp-keep-alive is FALSE, not sending.");
+ gst_rtsp_connection_reset_timeout (src->conninfo.connection);
+ return GST_RTSP_OK;
+ }
+
+ GST_DEBUG_OBJECT (src, "creating server keep-alive");
+
+ /* find a method to use for keep-alive */
+ if (src->methods & GST_RTSP_GET_PARAMETER)
+ method = GST_RTSP_GET_PARAMETER;
+ else
+ method = GST_RTSP_OPTIONS;
+
+ control = get_aggregate_control (src);
+ if (control == NULL)
+ goto no_control;
+
+ res = gst_rtspsrc_init_request (src, &request, method, control);
+ if (res < 0)
+ goto send_error;
+
+ request.type_data.request.version = src->version;
+
+ res = gst_rtspsrc_connection_send (src, &src->conninfo, &request, 0);
+ if (res < 0)
+ goto send_error;
+
+ gst_rtsp_connection_reset_timeout (src->conninfo.connection);
+ gst_rtsp_message_unset (&request);
+
+ return GST_RTSP_OK;
+
+ /* ERRORS */
+no_control:
+ {
+ GST_WARNING_OBJECT (src, "no control url to send keepalive");
+ return GST_RTSP_OK;
+ }
+send_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ gst_rtsp_message_unset (&request);
+ GST_ELEMENT_WARNING (src, RESOURCE, WRITE, (NULL),
+ ("Could not send keep-alive. (%s)", str));
+ g_free (str);
+ return res;
+ }
+}
+
+static GstFlowReturn
+gst_rtspsrc_handle_data (GstRTSPSrc * src, GstRTSPMessage * message)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ gint channel;
+ GstRTSPStream *stream;
+ GstPad *outpad = NULL;
+ guint8 *data;
+ guint size;
+ GstBuffer *buf;
+ gboolean is_rtcp;
+
+ channel = message->type_data.data.channel;
+
+ stream = find_stream (src, &channel, (gpointer) find_stream_by_channel);
+ if (!stream)
+ goto unknown_stream;
+
+ if (channel == stream->channel[0]) {
+ outpad = stream->channelpad[0];
+ is_rtcp = FALSE;
+ } else if (channel == stream->channel[1]) {
+ outpad = stream->channelpad[1];
+ is_rtcp = TRUE;
+ } else {
+ is_rtcp = FALSE;
+ }
+
+ /* take a look at the body to figure out what we have */
+ gst_rtsp_message_get_body (message, &data, &size);
+ if (size < 2)
+ goto invalid_length;
+
+ /* channels are not correct on some servers, do extra check */
+ if (data[1] >= 200 && data[1] <= 204) {
+ /* hmm RTCP message switch to the RTCP pad of the same stream. */
+ outpad = stream->channelpad[1];
+ is_rtcp = TRUE;
+ }
+
+ /* we have no clue what this is, just ignore then. */
+ if (outpad == NULL)
+ goto unknown_stream;
+
+ /* take the message body for further processing */
+ gst_rtsp_message_steal_body (message, &data, &size);
+
+ /* strip the trailing \0 */
+ size -= 1;
+
+ buf = gst_buffer_new ();
+ gst_buffer_append_memory (buf,
+ gst_memory_new_wrapped (0, data, size, 0, size, data, g_free));
+
+ /* don't need message anymore */
+ gst_rtsp_message_unset (message);
+
+ GST_DEBUG_OBJECT (src, "pushing data of size %d on channel %d", size,
+ channel);
+
+ if (src->need_activate) {
+ gchar *stream_id;
+ GstEvent *event;
+ GChecksum *cs;
+ gchar *uri;
+ GList *streams;
+
+ /* generate an SHA256 sum of the URI */
+ cs = g_checksum_new (G_CHECKSUM_SHA256);
+ uri = src->conninfo.location;
+ g_checksum_update (cs, (const guchar *) uri, strlen (uri));
+
+ for (streams = src->streams; streams; streams = g_list_next (streams)) {
+ GstRTSPStream *ostream = (GstRTSPStream *) streams->data;
+ GstCaps *caps;
+
+ /* Activate in advance so that the stream-start event is registered */
+ if (stream->srcpad) {
+ gst_pad_set_active (stream->srcpad, TRUE);
+ }
+
+ stream_id =
+ g_strdup_printf ("%s/%d", g_checksum_get_string (cs), ostream->id);
+
+ event = gst_event_new_stream_start (stream_id);
+
+ gst_rtspsrc_stream_start_event_add_group_id (src, event);
+
+ g_free (stream_id);
+ gst_rtspsrc_stream_push_event (src, ostream, event);
+
+ if ((caps = stream_get_caps_for_pt (ostream, ostream->default_pt))) {
+ /* only streams that have a connection to the outside world */
+ if (ostream->setup) {
+ if (ostream->udpsrc[0]) {
+ gst_element_send_event (ostream->udpsrc[0],
+ gst_event_new_caps (caps));
+ } else if (ostream->channelpad[0]) {
+ if (GST_PAD_IS_SRC (ostream->channelpad[0]))
+ gst_pad_push_event (ostream->channelpad[0],
+ gst_event_new_caps (caps));
+ else
+ gst_pad_send_event (ostream->channelpad[0],
+ gst_event_new_caps (caps));
+ }
+ ostream->need_caps = FALSE;
+
+ if (ostream->profile == GST_RTSP_PROFILE_SAVP ||
+ ostream->profile == GST_RTSP_PROFILE_SAVPF)
+ caps = gst_caps_new_empty_simple ("application/x-srtcp");
+ else
+ caps = gst_caps_new_empty_simple ("application/x-rtcp");
+
+ if (ostream->udpsrc[1]) {
+ gst_element_send_event (ostream->udpsrc[1],
+ gst_event_new_caps (caps));
+ } else if (ostream->channelpad[1]) {
+ if (GST_PAD_IS_SRC (ostream->channelpad[1]))
+ gst_pad_push_event (ostream->channelpad[1],
+ gst_event_new_caps (caps));
+ else
+ gst_pad_send_event (ostream->channelpad[1],
+ gst_event_new_caps (caps));
+ }
+
+ gst_caps_unref (caps);
+ }
+ }
+ }
+ g_checksum_free (cs);
+
+ gst_rtspsrc_activate_streams (src);
+ src->need_activate = FALSE;
+ src->need_segment = TRUE;
+ }
+
+ if (src->base_time == -1) {
+ /* Take current running_time. This timestamp will be put on
+ * the first buffer of each stream because we are a live source and so we
+ * timestamp with the running_time. When we are dealing with TCP, we also
+ * only timestamp the first buffer (using the DISCONT flag) because a server
+ * typically bursts data, for which we don't want to compensate by speeding
+ * up the media. The other timestamps will be interpollated from this one
+ * using the RTP timestamps. */
+ GST_OBJECT_LOCK (src);
+ if (GST_ELEMENT_CLOCK (src)) {
+ GstClockTime now;
+ GstClockTime base_time;
+
+ now = gst_clock_get_time (GST_ELEMENT_CLOCK (src));
+ base_time = GST_ELEMENT_CAST (src)->base_time;
+
+ src->base_time = now - base_time;
+
+ GST_DEBUG_OBJECT (src, "first buffer at time %" GST_TIME_FORMAT ", base %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (now), GST_TIME_ARGS (base_time));
+ }
+ GST_OBJECT_UNLOCK (src);
+ }
+
+ /* If needed send a new segment, don't forget we are live and buffer are
+ * timestamped with running time */
+ if (src->need_segment) {
+ src->need_segment = FALSE;
+ if (src->onvif_mode) {
+ gst_rtspsrc_push_event (src, gst_event_new_segment (&src->out_segment));
+ } else {
+ GstSegment segment;
+
+ gst_segment_init (&segment, GST_FORMAT_TIME);
+ gst_rtspsrc_push_event (src, gst_event_new_segment (&segment));
+ }
+ }
+
+ if (stream->need_caps) {
+ GstCaps *caps;
+
+ if ((caps = stream_get_caps_for_pt (stream, stream->default_pt))) {
+ /* only streams that have a connection to the outside world */
+ if (stream->setup) {
+ /* Only need to update the TCP caps here, UDP is already handled */
+ if (stream->channelpad[0]) {
+ if (GST_PAD_IS_SRC (stream->channelpad[0]))
+ gst_pad_push_event (stream->channelpad[0],
+ gst_event_new_caps (caps));
+ else
+ gst_pad_send_event (stream->channelpad[0],
+ gst_event_new_caps (caps));
+ }
+ stream->need_caps = FALSE;
+ }
+ }
+
+ stream->need_caps = FALSE;
+ }
+
+ if (stream->discont && !is_rtcp) {
+ /* mark first RTP buffer as discont */
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+ stream->discont = FALSE;
+ /* first buffer gets the timestamp, other buffers are not timestamped and
+ * their presentation time will be interpollated from the rtp timestamps. */
+ GST_DEBUG_OBJECT (src, "setting timestamp %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (src->base_time));
+
+ GST_BUFFER_TIMESTAMP (buf) = src->base_time;
+ }
+
+ /* chain to the peer pad */
+ if (GST_PAD_IS_SINK (outpad))
+ ret = gst_pad_chain (outpad, buf);
+ else
+ ret = gst_pad_push (outpad, buf);
+
+ if (!is_rtcp) {
+ /* combine all stream flows for the data transport */
+ ret = gst_rtspsrc_combine_flows (src, stream, ret);
+ }
+ return ret;
+
+ /* ERRORS */
+unknown_stream:
+ {
+ GST_DEBUG_OBJECT (src, "unknown stream on channel %d, ignored", channel);
+ gst_rtsp_message_unset (message);
+ return GST_FLOW_OK;
+ }
+invalid_length:
+ {
+ GST_ELEMENT_WARNING (src, RESOURCE, READ, (NULL),
+ ("Short message received, ignoring."));
+ gst_rtsp_message_unset (message);
+ return GST_FLOW_OK;
+ }
+}
+
+static GstFlowReturn
+gst_rtspsrc_loop_interleaved (GstRTSPSrc * src)
+{
+ GstRTSPMessage message = { 0 };
+ GstRTSPResult res;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ while (TRUE) {
+ gst_rtsp_message_unset (&message);
+
+ if (src->conninfo.flushing) {
+ /* do not attempt to receive if flushing */
+ res = GST_RTSP_EINTR;
+ } else {
+ /* protect the connection with the connection lock so that we can see when
+ * we are finished doing server communication */
+ res = gst_rtspsrc_connection_receive (src, &src->conninfo, &message,
+ src->tcp_timeout);
+ }
+
+ switch (res) {
+ case GST_RTSP_OK:
+ GST_DEBUG_OBJECT (src, "we received a server message");
+ break;
+ case GST_RTSP_EINTR:
+ /* we got interrupted this means we need to stop */
+ goto interrupt;
+ case GST_RTSP_ETIMEOUT:
+ /* no reply, send keep alive */
+ GST_DEBUG_OBJECT (src, "timeout, sending keep-alive");
+ if ((res = gst_rtspsrc_send_keep_alive (src)) == GST_RTSP_EINTR)
+ goto interrupt;
+ continue;
+ case GST_RTSP_EEOF:
+ /* go EOS when the server closed the connection */
+ goto server_eof;
+ default:
+ goto receive_error;
+ }
+
+ switch (message.type) {
+ case GST_RTSP_MESSAGE_REQUEST:
+ /* server sends us a request message, handle it */
+ res = gst_rtspsrc_handle_request (src, &src->conninfo, &message);
+ if (res == GST_RTSP_EEOF)
+ goto server_eof;
+ else if (res < 0)
+ goto handle_request_failed;
+ break;
+ case GST_RTSP_MESSAGE_RESPONSE:
+ /* we ignore response messages */
+ GST_DEBUG_OBJECT (src, "ignoring response message");
+ DEBUG_RTSP (src, &message);
+ break;
+ case GST_RTSP_MESSAGE_DATA:
+ GST_DEBUG_OBJECT (src, "got data message");
+ ret = gst_rtspsrc_handle_data (src, &message);
+ if (ret != GST_FLOW_OK)
+ goto handle_data_failed;
+ break;
+ default:
+ GST_WARNING_OBJECT (src, "ignoring unknown message type %d",
+ message.type);
+ break;
+ }
+ }
+ g_assert_not_reached ();
+
+ /* ERRORS */
+server_eof:
+ {
+ GST_DEBUG_OBJECT (src, "we got an eof from the server");
+ GST_ELEMENT_WARNING (src, RESOURCE, READ, (NULL),
+ ("The server closed the connection."));
+ src->conninfo.connected = FALSE;
+ gst_rtsp_message_unset (&message);
+ return GST_FLOW_EOS;
+ }
+interrupt:
+ {
+ gst_rtsp_message_unset (&message);
+ GST_DEBUG_OBJECT (src, "got interrupted");
+ return GST_FLOW_FLUSHING;
+ }
+receive_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
+ ("Could not receive message. (%s)", str));
+ g_free (str);
+
+ gst_rtsp_message_unset (&message);
+ return GST_FLOW_ERROR;
+ }
+handle_request_failed:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Could not handle server message. (%s)", str));
+ g_free (str);
+ gst_rtsp_message_unset (&message);
+ return GST_FLOW_ERROR;
+ }
+handle_data_failed:
+ {
+ GST_DEBUG_OBJECT (src, "could no handle data message");
+ return ret;
+ }
+}
+
+static GstFlowReturn
+gst_rtspsrc_loop_udp (GstRTSPSrc * src)
+{
+ GstRTSPResult res;
+ GstRTSPMessage message = { 0 };
+ gint retry = 0;
+
+ while (TRUE) {
+ gint64 timeout;
+
+ /* get the next timeout interval */
+ timeout = gst_rtsp_connection_next_timeout_usec (src->conninfo.connection);
+
+ GST_DEBUG_OBJECT (src, "doing receive with timeout %d seconds",
+ (gint) timeout / G_USEC_PER_SEC);
+
+ gst_rtsp_message_unset (&message);
+
+ /* we should continue reading the TCP socket because the server might
+ * send us requests. When the session timeout expires, we need to send a
+ * keep-alive request to keep the session open. */
+ if (src->conninfo.flushing) {
+ /* do not attempt to receive if flushing */
+ res = GST_RTSP_EINTR;
+ } else {
+ res = gst_rtspsrc_connection_receive (src, &src->conninfo, &message,
+ timeout);
+ }
+
+ switch (res) {
+ case GST_RTSP_OK:
+ GST_DEBUG_OBJECT (src, "we received a server message");
+ break;
+ case GST_RTSP_EINTR:
+ /* we got interrupted, see what we have to do */
+ goto interrupt;
+ case GST_RTSP_ETIMEOUT:
+ /* send keep-alive, ignore the result, a warning will be posted. */
+ GST_DEBUG_OBJECT (src, "timeout, sending keep-alive");
+ if ((res = gst_rtspsrc_send_keep_alive (src)) == GST_RTSP_EINTR)
+ goto interrupt;
+ continue;
+ case GST_RTSP_EEOF:
+ /* server closed the connection. not very fatal for UDP, reconnect and
+ * see what happens. */
+ GST_ELEMENT_WARNING (src, RESOURCE, READ, (NULL),
+ ("The server closed the connection."));
+ if (src->udp_reconnect) {
+ if ((res =
+ gst_rtsp_conninfo_reconnect (src, &src->conninfo, FALSE)) < 0)
+ goto connect_error;
+ } else {
+ goto server_eof;
+ }
+ continue;
+ case GST_RTSP_ENET:
+ GST_DEBUG_OBJECT (src, "An ethernet problem occurred.");
+ default:
+ GST_ELEMENT_WARNING (src, RESOURCE, READ, (NULL),
+ ("Unhandled return value %d.", res));
+ goto receive_error;
+ }
+
+ switch (message.type) {
+ case GST_RTSP_MESSAGE_REQUEST:
+ /* server sends us a request message, handle it */
+ res = gst_rtspsrc_handle_request (src, &src->conninfo, &message);
+ if (res == GST_RTSP_EEOF)
+ goto server_eof;
+ else if (res < 0)
+ goto handle_request_failed;
+ break;
+ case GST_RTSP_MESSAGE_RESPONSE:
+ /* we ignore response and data messages */
+ GST_DEBUG_OBJECT (src, "ignoring response message");
+ DEBUG_RTSP (src, &message);
+ if (message.type_data.response.code == GST_RTSP_STS_UNAUTHORIZED) {
+ GST_DEBUG_OBJECT (src, "but is Unauthorized response ...");
+ if (gst_rtspsrc_setup_auth (src, &message) && !(retry++)) {
+ GST_DEBUG_OBJECT (src, "so retrying keep-alive");
+ if ((res = gst_rtspsrc_send_keep_alive (src)) == GST_RTSP_EINTR)
+ goto interrupt;
+ }
+ } else {
+ retry = 0;
+ }
+ break;
+ case GST_RTSP_MESSAGE_DATA:
+ /* we ignore response and data messages */
+ GST_DEBUG_OBJECT (src, "ignoring data message");
+ break;
+ default:
+ GST_WARNING_OBJECT (src, "ignoring unknown message type %d",
+ message.type);
+ break;
+ }
+ }
+ g_assert_not_reached ();
+
+ /* we get here when the connection got interrupted */
+interrupt:
+ {
+ gst_rtsp_message_unset (&message);
+ GST_DEBUG_OBJECT (src, "got interrupted");
+ return GST_FLOW_FLUSHING;
+ }
+connect_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+ GstFlowReturn ret;
+
+ src->conninfo.connected = FALSE;
+ if (res != GST_RTSP_EINTR) {
+ GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ_WRITE, (NULL),
+ ("Could not connect to server. (%s)", str));
+ g_free (str);
+ ret = GST_FLOW_ERROR;
+ } else {
+ ret = GST_FLOW_FLUSHING;
+ }
+ return ret;
+ }
+receive_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
+ ("Could not receive message. (%s)", str));
+ g_free (str);
+ return GST_FLOW_ERROR;
+ }
+handle_request_failed:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+ GstFlowReturn ret;
+
+ gst_rtsp_message_unset (&message);
+ if (res != GST_RTSP_EINTR) {
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Could not handle server message. (%s)", str));
+ g_free (str);
+ ret = GST_FLOW_ERROR;
+ } else {
+ ret = GST_FLOW_FLUSHING;
+ }
+ return ret;
+ }
+server_eof:
+ {
+ GST_DEBUG_OBJECT (src, "we got an eof from the server");
+ GST_ELEMENT_WARNING (src, RESOURCE, READ, (NULL),
+ ("The server closed the connection."));
+ src->conninfo.connected = FALSE;
+ gst_rtsp_message_unset (&message);
+ return GST_FLOW_EOS;
+ }
+}
+
+static GstRTSPResult
+gst_rtspsrc_reconnect (GstRTSPSrc * src, gboolean async)
+{
+ GstRTSPResult res = GST_RTSP_OK;
+ gboolean restart;
+
+ GST_DEBUG_OBJECT (src, "doing reconnect");
+
+ GST_OBJECT_LOCK (src);
+ /* only restart when the pads were not yet activated, else we were
+ * streaming over UDP */
+ restart = src->need_activate;
+ GST_OBJECT_UNLOCK (src);
+
+ /* no need to restart, we're done */
+ if (!restart)
+ goto done;
+
+ /* we can try only TCP now */
+ src->cur_protocols = GST_RTSP_LOWER_TRANS_TCP;
+
+ /* close and cleanup our state */
+ if ((res = gst_rtspsrc_close (src, async, FALSE)) < 0)
+ goto done;
+
+ /* see if we have TCP left to try. Also don't try TCP when we were configured
+ * with an SDP. */
+ if (!(src->protocols & GST_RTSP_LOWER_TRANS_TCP) || src->from_sdp)
+ goto no_protocols;
+
+ /* We post a warning message now to inform the user
+ * that nothing happened. It's most likely a firewall thing. */
+ GST_ELEMENT_WARNING (src, RESOURCE, READ, (NULL),
+ ("Could not receive any UDP packets for %.4f seconds, maybe your "
+ "firewall is blocking it. Retrying using a tcp connection.",
+ gst_guint64_to_gdouble (src->udp_timeout) / 1000000.0));
+
+ /* open new connection using tcp */
+ if (gst_rtspsrc_open (src, async) < 0)
+ goto open_failed;
+
+ /* start playback */
+ if (gst_rtspsrc_play (src, &src->segment, async, NULL) < 0)
+ goto play_failed;
+
+done:
+ return res;
+
+ /* ERRORS */
+no_protocols:
+ {
+ src->cur_protocols = 0;
+ /* no transport possible, post an error and stop */
+ GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
+ ("Could not receive any UDP packets for %.4f seconds, maybe your "
+ "firewall is blocking it. No other protocols to try.",
+ gst_guint64_to_gdouble (src->udp_timeout) / 1000000.0));
+ return GST_RTSP_ERROR;
+ }
+open_failed:
+ {
+ GST_DEBUG_OBJECT (src, "open failed");
+ return GST_RTSP_OK;
+ }
+play_failed:
+ {
+ GST_DEBUG_OBJECT (src, "play failed");
+ return GST_RTSP_OK;
+ }
+}
+
+static void
+gst_rtspsrc_loop_start_cmd (GstRTSPSrc * src, gint cmd)
+{
+ switch (cmd) {
+ case CMD_OPEN:
+ GST_ELEMENT_PROGRESS (src, START, "open", ("Opening Stream"));
+ break;
+ case CMD_PLAY:
+ GST_ELEMENT_PROGRESS (src, START, "request", ("Sending PLAY request"));
+ break;
+ case CMD_PAUSE:
+ GST_ELEMENT_PROGRESS (src, START, "request", ("Sending PAUSE request"));
+ break;
+ case CMD_GET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, START, "request",
+ ("Sending GET_PARAMETER request"));
+ break;
+ case CMD_SET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, START, "request",
+ ("Sending SET_PARAMETER request"));
+ break;
+ case CMD_CLOSE:
+ GST_ELEMENT_PROGRESS (src, START, "close", ("Closing Stream"));
+ break;
+ default:
+ break;
+ }
+}
+
+static void
+gst_rtspsrc_loop_complete_cmd (GstRTSPSrc * src, gint cmd)
+{
+ switch (cmd) {
+ case CMD_OPEN:
+ GST_ELEMENT_PROGRESS (src, COMPLETE, "open", ("Opened Stream"));
+ break;
+ case CMD_PLAY:
+ GST_ELEMENT_PROGRESS (src, COMPLETE, "request", ("Sent PLAY request"));
+ break;
+ case CMD_PAUSE:
+ GST_ELEMENT_PROGRESS (src, COMPLETE, "request", ("Sent PAUSE request"));
+ break;
+ case CMD_GET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, COMPLETE, "request",
+ ("Sent GET_PARAMETER request"));
+ break;
+ case CMD_SET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, COMPLETE, "request",
+ ("Sent SET_PARAMETER request"));
+ break;
+ case CMD_CLOSE:
+ GST_ELEMENT_PROGRESS (src, COMPLETE, "close", ("Closed Stream"));
+ break;
+ default:
+ break;
+ }
+}
+
+static void
+gst_rtspsrc_loop_cancel_cmd (GstRTSPSrc * src, gint cmd)
+{
+ switch (cmd) {
+ case CMD_OPEN:
+ GST_ELEMENT_PROGRESS (src, CANCELED, "open", ("Open canceled"));
+ break;
+ case CMD_PLAY:
+ GST_ELEMENT_PROGRESS (src, CANCELED, "request", ("PLAY canceled"));
+ break;
+ case CMD_PAUSE:
+ GST_ELEMENT_PROGRESS (src, CANCELED, "request", ("PAUSE canceled"));
+ break;
+ case CMD_GET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, CANCELED, "request",
+ ("GET_PARAMETER canceled"));
+ break;
+ case CMD_SET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, CANCELED, "request",
+ ("SET_PARAMETER canceled"));
+ break;
+ case CMD_CLOSE:
+ GST_ELEMENT_PROGRESS (src, CANCELED, "close", ("Close canceled"));
+ break;
+ default:
+ break;
+ }
+}
+
+static void
+gst_rtspsrc_loop_error_cmd (GstRTSPSrc * src, gint cmd)
+{
+ switch (cmd) {
+ case CMD_OPEN:
+ GST_ELEMENT_PROGRESS (src, ERROR, "open", ("Open failed"));
+ break;
+ case CMD_PLAY:
+ GST_ELEMENT_PROGRESS (src, ERROR, "request", ("PLAY failed"));
+ break;
+ case CMD_PAUSE:
+ GST_ELEMENT_PROGRESS (src, ERROR, "request", ("PAUSE failed"));
+ break;
+ case CMD_GET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, ERROR, "request", ("GET_PARAMETER failed"));
+ break;
+ case CMD_SET_PARAMETER:
+ GST_ELEMENT_PROGRESS (src, ERROR, "request", ("SET_PARAMETER failed"));
+ break;
+ case CMD_CLOSE:
+ GST_ELEMENT_PROGRESS (src, ERROR, "close", ("Close failed"));
+ break;
+ default:
+ break;
+ }
+}
+
+static void
+gst_rtspsrc_loop_end_cmd (GstRTSPSrc * src, gint cmd, GstRTSPResult ret)
+{
+ if (ret == GST_RTSP_OK)
+ gst_rtspsrc_loop_complete_cmd (src, cmd);
+ else if (ret == GST_RTSP_EINTR)
+ gst_rtspsrc_loop_cancel_cmd (src, cmd);
+ else
+ gst_rtspsrc_loop_error_cmd (src, cmd);
+}
+
+static gboolean
+gst_rtspsrc_loop_send_cmd (GstRTSPSrc * src, gint cmd, gint mask)
+{
+ gint old;
+ gboolean flushed = FALSE;
+
+ /* start new request */
+ gst_rtspsrc_loop_start_cmd (src, cmd);
+
+ GST_DEBUG_OBJECT (src, "sending cmd %s", cmd_to_string (cmd));
+
+ GST_OBJECT_LOCK (src);
+ old = src->pending_cmd;
+
+ if (old == CMD_RECONNECT) {
+ GST_DEBUG_OBJECT (src, "ignore, we were reconnecting");
+ cmd = CMD_RECONNECT;
+ } else if (old == CMD_CLOSE) {
+ /* our CMD_CLOSE might have interrutped CMD_LOOP. gst_rtspsrc_loop
+ * will send a CMD_WAIT which would cancel our pending CMD_CLOSE (if
+ * still pending). We just avoid it here by making sure CMD_CLOSE is
+ * still the pending command. */
+ GST_DEBUG_OBJECT (src, "ignore, we were closing");
+ cmd = CMD_CLOSE;
+ } else if (old == CMD_SET_PARAMETER) {
+ GST_DEBUG_OBJECT (src, "ignore, we have a pending %s", cmd_to_string (old));
+ cmd = CMD_SET_PARAMETER;
+ } else if (old == CMD_GET_PARAMETER) {
+ GST_DEBUG_OBJECT (src, "ignore, we have a pending %s", cmd_to_string (old));
+ cmd = CMD_GET_PARAMETER;
+ } else if (old != CMD_WAIT) {
+ src->pending_cmd = CMD_WAIT;
+ GST_OBJECT_UNLOCK (src);
+ /* cancel previous request */
+ GST_DEBUG_OBJECT (src, "cancel previous request %s", cmd_to_string (old));
+ gst_rtspsrc_loop_cancel_cmd (src, old);
+ GST_OBJECT_LOCK (src);
+ }
+ src->pending_cmd = cmd;
+ /* interrupt if allowed */
+ if (src->busy_cmd & mask) {
+ GST_DEBUG_OBJECT (src, "connection flush busy %s",
+ cmd_to_string (src->busy_cmd));
+ gst_rtspsrc_connection_flush (src, TRUE);
+ flushed = TRUE;
+ } else {
+ GST_DEBUG_OBJECT (src, "not interrupting busy cmd %s",
+ cmd_to_string (src->busy_cmd));
+ }
+ if (src->task)
+ gst_task_start (src->task);
+ GST_OBJECT_UNLOCK (src);
+
+ return flushed;
+}
+
+static gboolean
+gst_rtspsrc_loop_send_cmd_and_wait (GstRTSPSrc * src, gint cmd, gint mask,
+ GstClockTime timeout)
+{
+ gboolean flushed = gst_rtspsrc_loop_send_cmd (src, cmd, mask);
+
+ if (timeout > 0) {
+ gint64 end_time = g_get_monotonic_time () + (timeout / 1000);
+ GST_OBJECT_LOCK (src);
+ while (src->pending_cmd == cmd || src->busy_cmd == cmd) {
+ if (!g_cond_wait_until (&src->cmd_cond, GST_OBJECT_GET_LOCK (src),
+ end_time)) {
+ GST_WARNING_OBJECT (src,
+ "Timed out waiting for TEARDOWN to be processed.");
+ break; /* timeout passed */
+ }
+ }
+ GST_OBJECT_UNLOCK (src);
+ }
+ return flushed;
+}
+
+static gboolean
+gst_rtspsrc_loop (GstRTSPSrc * src)
+{
+ GstFlowReturn ret;
+
+ if (!src->conninfo.connection || !src->conninfo.connected)
+ goto no_connection;
+
+ if (src->interleaved)
+ ret = gst_rtspsrc_loop_interleaved (src);
+ else
+ ret = gst_rtspsrc_loop_udp (src);
+
+ if (ret != GST_FLOW_OK)
+ goto pause;
+
+ return TRUE;
+
+ /* ERRORS */
+no_connection:
+ {
+ GST_WARNING_OBJECT (src, "we are not connected");
+ ret = GST_FLOW_FLUSHING;
+ goto pause;
+ }
+pause:
+ {
+ const gchar *reason = gst_flow_get_name (ret);
+
+ GST_DEBUG_OBJECT (src, "pausing task, reason %s", reason);
+ src->running = FALSE;
+ if (ret == GST_FLOW_EOS) {
+ /* perform EOS logic */
+ if (src->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ gst_element_post_message (GST_ELEMENT_CAST (src),
+ gst_message_new_segment_done (GST_OBJECT_CAST (src),
+ src->segment.format, src->segment.position));
+ gst_rtspsrc_push_event (src,
+ gst_event_new_segment_done (src->segment.format,
+ src->segment.position));
+ } else {
+ gst_rtspsrc_push_event (src, gst_event_new_eos ());
+ }
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
+ /* for fatal errors we post an error message, post the error before the
+ * EOS so the app knows about the error first. */
+ GST_ELEMENT_FLOW_ERROR (src, ret);
+ gst_rtspsrc_push_event (src, gst_event_new_eos ());
+ }
+ gst_rtspsrc_loop_send_cmd (src, CMD_WAIT, CMD_LOOP);
+ return FALSE;
+ }
+}
+
+#ifndef GST_DISABLE_GST_DEBUG
+static const gchar *
+gst_rtsp_auth_method_to_string (GstRTSPAuthMethod method)
+{
+ gint index = 0;
+
+ while (method != 0) {
+ index++;
+ method >>= 1;
+ }
+ switch (index) {
+ case 0:
+ return "None";
+ case 1:
+ return "Basic";
+ case 2:
+ return "Digest";
+ }
+
+ return "Unknown";
+}
+#endif
+
+/* Parse a WWW-Authenticate Response header and determine the
+ * available authentication methods
+ *
+ * This code should also cope with the fact that each WWW-Authenticate
+ * header can contain multiple challenge methods + tokens
+ *
+ * At the moment, for Basic auth, we just do a minimal check and don't
+ * even parse out the realm */
+static void
+gst_rtspsrc_parse_auth_hdr (GstRTSPMessage * response,
+ GstRTSPAuthMethod * methods, GstRTSPConnection * conn, gboolean * stale)
+{
+ GstRTSPAuthCredential **credentials, **credential;
+
+ g_return_if_fail (response != NULL);
+ g_return_if_fail (methods != NULL);
+ g_return_if_fail (stale != NULL);
+
+ credentials =
+ gst_rtsp_message_parse_auth_credentials (response,
+ GST_RTSP_HDR_WWW_AUTHENTICATE);
+ if (!credentials)
+ return;
+
+ credential = credentials;
+ while (*credential) {
+ if ((*credential)->scheme == GST_RTSP_AUTH_BASIC) {
+ *methods |= GST_RTSP_AUTH_BASIC;
+ } else if ((*credential)->scheme == GST_RTSP_AUTH_DIGEST) {
+ GstRTSPAuthParam **param = (*credential)->params;
+
+ *methods |= GST_RTSP_AUTH_DIGEST;
+
+ gst_rtsp_connection_clear_auth_params (conn);
+ *stale = FALSE;
+
+ while (*param) {
+ if (strcmp ((*param)->name, "stale") == 0
+ && g_ascii_strcasecmp ((*param)->value, "TRUE") == 0)
+ *stale = TRUE;
+ gst_rtsp_connection_set_auth_param (conn, (*param)->name,
+ (*param)->value);
+ param++;
+ }
+ }
+
+ credential++;
+ }
+
+ gst_rtsp_auth_credentials_free (credentials);
+}
+
+/**
+ * gst_rtspsrc_setup_auth:
+ * @src: the rtsp source
+ *
+ * Configure a username and password and auth method on the
+ * connection object based on a response we received from the
+ * peer.
+ *
+ * Currently, this requires that a username and password were supplied
+ * in the uri. In the future, they may be requested on demand by sending
+ * a message up the bus.
+ *
+ * Returns: TRUE if authentication information could be set up correctly.
+ */
+static gboolean
+gst_rtspsrc_setup_auth (GstRTSPSrc * src, GstRTSPMessage * response)
+{
+ gchar *user = NULL;
+ gchar *pass = NULL;
+ GstRTSPAuthMethod avail_methods = GST_RTSP_AUTH_NONE;
+ GstRTSPAuthMethod method;
+ GstRTSPResult auth_result;
+ GstRTSPUrl *url;
+ GstRTSPConnection *conn;
+ gboolean stale = FALSE;
+
+ conn = src->conninfo.connection;
+
+ /* Identify the available auth methods and see if any are supported */
+ gst_rtspsrc_parse_auth_hdr (response, &avail_methods, conn, &stale);
+
+ if (avail_methods == GST_RTSP_AUTH_NONE)
+ goto no_auth_available;
+
+ /* For digest auth, if the response indicates that the session
+ * data are stale, we just update them in the connection object and
+ * return TRUE to retry the request */
+ if (stale)
+ src->tried_url_auth = FALSE;
+
+ url = gst_rtsp_connection_get_url (conn);
+
+ /* Do we have username and password available? */
+ if (url != NULL && !src->tried_url_auth && url->user != NULL
+ && url->passwd != NULL) {
+ user = url->user;
+ pass = url->passwd;
+ src->tried_url_auth = TRUE;
+ GST_DEBUG_OBJECT (src,
+ "Attempting authentication using credentials from the URL");
+ } else {
+ user = src->user_id;
+ pass = src->user_pw;
+ GST_DEBUG_OBJECT (src,
+ "Attempting authentication using credentials from the properties");
+ }
+
+ /* FIXME: If the url didn't contain username and password or we tried them
+ * already, request a username and passwd from the application via some kind
+ * of credentials request message */
+
+ /* If we don't have a username and passwd at this point, bail out. */
+ if (user == NULL || pass == NULL)
+ goto no_user_pass;
+
+ /* Try to configure for each available authentication method, strongest to
+ * weakest */
+ for (method = GST_RTSP_AUTH_MAX; method != GST_RTSP_AUTH_NONE; method >>= 1) {
+ /* Check if this method is available on the server */
+ if ((method & avail_methods) == 0)
+ continue;
+
+ /* Pass the credentials to the connection to try on the next request */
+ auth_result = gst_rtsp_connection_set_auth (conn, method, user, pass);
+ /* INVAL indicates an invalid username/passwd were supplied, so we'll just
+ * ignore it and end up retrying later */
+ if (auth_result == GST_RTSP_OK || auth_result == GST_RTSP_EINVAL) {
+ GST_DEBUG_OBJECT (src, "Attempting %s authentication",
+ gst_rtsp_auth_method_to_string (method));
+ break;
+ }
+ }
+
+ if (method == GST_RTSP_AUTH_NONE)
+ goto no_auth_available;
+
+ return TRUE;
+
+no_auth_available:
+ {
+ /* Output an error indicating that we couldn't connect because there were
+ * no supported authentication protocols */
+ GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
+ ("No supported authentication protocol was found"));
+ return FALSE;
+ }
+no_user_pass:
+ {
+ /* We don't fire an error message, we just return FALSE and let the
+ * normal NOT_AUTHORIZED error be propagated */
+ return FALSE;
+ }
+}
+
+static GstRTSPResult
+gst_rtsp_src_receive_response (GstRTSPSrc * src, GstRTSPConnInfo * conninfo,
+ GstRTSPMessage * response, GstRTSPStatusCode * code)
+{
+ GstRTSPStatusCode thecode;
+ gchar *content_base = NULL;
+ GstRTSPResult res;
+
+next:
+ if (conninfo->flushing) {
+ /* do not attempt to receive if flushing */
+ res = GST_RTSP_EINTR;
+ } else {
+ res = gst_rtspsrc_connection_receive (src, conninfo, response,
+ src->tcp_timeout);
+ }
+
+ if (res < 0)
+ goto receive_error;
+
+ DEBUG_RTSP (src, response);
+
+ switch (response->type) {
+ case GST_RTSP_MESSAGE_REQUEST:
+ res = gst_rtspsrc_handle_request (src, conninfo, response);
+ if (res == GST_RTSP_EEOF)
+ goto server_eof;
+ else if (res < 0)
+ goto handle_request_failed;
+
+ /* Not a response, receive next message */
+ goto next;
+ case GST_RTSP_MESSAGE_RESPONSE:
+ /* ok, a response is good */
+ GST_DEBUG_OBJECT (src, "received response message");
+ break;
+ case GST_RTSP_MESSAGE_DATA:
+ /* get next response */
+ GST_DEBUG_OBJECT (src, "handle data response message");
+ gst_rtspsrc_handle_data (src, response);
+
+ /* Not a response, receive next message */
+ goto next;
+ default:
+ GST_WARNING_OBJECT (src, "ignoring unknown message type %d",
+ response->type);
+
+ /* Not a response, receive next message */
+ goto next;
+ }
+
+ thecode = response->type_data.response.code;
+
+ GST_DEBUG_OBJECT (src, "got response message %d", thecode);
+
+ /* if the caller wanted the result code, we store it. */
+ if (code)
+ *code = thecode;
+
+ /* If the request didn't succeed, bail out before doing any more */
+ if (thecode != GST_RTSP_STS_OK)
+ return GST_RTSP_OK;
+
+ /* store new content base if any */
+ gst_rtsp_message_get_header (response, GST_RTSP_HDR_CONTENT_BASE,
+ &content_base, 0);
+ if (content_base) {
+ g_free (src->content_base);
+ src->content_base = g_strdup (content_base);
+ }
+
+ return GST_RTSP_OK;
+
+ /* ERRORS */
+receive_error:
+ {
+ switch (res) {
+ case GST_RTSP_EEOF:
+ return GST_RTSP_EEOF;
+ default:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ if (res != GST_RTSP_EINTR) {
+ GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
+ ("Could not receive message. (%s)", str));
+ } else {
+ GST_WARNING_OBJECT (src, "receive interrupted");
+ }
+ g_free (str);
+ break;
+ }
+ }
+ return res;
+ }
+handle_request_failed:
+ {
+ /* ERROR was posted */
+ gst_rtsp_message_unset (response);
+ return res;
+ }
+server_eof:
+ {
+ GST_DEBUG_OBJECT (src, "we got an eof from the server");
+ GST_ELEMENT_WARNING (src, RESOURCE, READ, (NULL),
+ ("The server closed the connection."));
+ gst_rtsp_message_unset (response);
+ return res;
+ }
+}
+
+
+static GstRTSPResult
+gst_rtspsrc_try_send (GstRTSPSrc * src, GstRTSPConnInfo * conninfo,
+ GstRTSPMessage * request, GstRTSPMessage * response,
+ GstRTSPStatusCode * code)
+{
+ GstRTSPResult res;
+ gint try = 0;
+ gboolean allow_send = TRUE;
+
+again:
+ if (!src->short_header)
+ gst_rtsp_ext_list_before_send (src->extensions, request);
+
+ g_signal_emit (src, gst_rtspsrc_signals[SIGNAL_BEFORE_SEND], 0,
+ request, &allow_send);
+ if (!allow_send) {
+ GST_DEBUG_OBJECT (src, "skipping message, disabled by signal");
+ return GST_RTSP_OK;
+ }
+
+ GST_DEBUG_OBJECT (src, "sending message");
+
+ DEBUG_RTSP (src, request);
+
+ res = gst_rtspsrc_connection_send (src, conninfo, request, src->tcp_timeout);
+ if (res < 0)
+ goto send_error;
+
+ gst_rtsp_connection_reset_timeout (conninfo->connection);
+ if (!response)
+ return res;
+
+ res = gst_rtsp_src_receive_response (src, conninfo, response, code);
+ if (res == GST_RTSP_EEOF) {
+ GST_WARNING_OBJECT (src, "server closed connection");
+ /* only try once after reconnect, then fallthrough and error out */
+ if ((try == 0) && !src->interleaved && src->udp_reconnect) {
+ try++;
+ /* if reconnect succeeds, try again */
+ if ((res = gst_rtsp_conninfo_reconnect (src, &src->conninfo, FALSE)) == 0)
+ goto again;
+ }
+ }
+
+ if (res < 0)
+ goto receive_error;
+
+ gst_rtsp_ext_list_after_send (src->extensions, request, response);
+
+ return res;
+
+send_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ if (res != GST_RTSP_EINTR) {
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Could not send message. (%s)", str));
+ } else {
+ GST_WARNING_OBJECT (src, "send interrupted");
+ }
+ g_free (str);
+ return res;
+ }
+
+receive_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ if (res != GST_RTSP_EINTR) {
+ GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
+ ("Could not receive message. (%s)", str));
+ } else {
+ GST_WARNING_OBJECT (src, "receive interrupted");
+ }
+ g_free (str);
+ return res;
+ }
+}
+
+/**
+ * gst_rtspsrc_send:
+ * @src: the rtsp source
+ * @conninfo: the connection information to send on
+ * @request: must point to a valid request
+ * @response: must point to an empty #GstRTSPMessage
+ * @code: an optional code result
+ * @versions: List of versions to try, setting it back onto the @request message
+ * if not set, `src->version` will be used as RTSP version.
+ *
+ * send @request and retrieve the response in @response. optionally @code can be
+ * non-NULL in which case it will contain the status code of the response.
+ *
+ * If This function returns #GST_RTSP_OK, @response will contain a valid response
+ * message that should be cleaned with gst_rtsp_message_unset() after usage.
+ *
+ * If @code is NULL, this function will return #GST_RTSP_ERROR (with an invalid
+ * @response message) if the response code was not 200 (OK).
+ *
+ * If the attempt results in an authentication failure, then this will attempt
+ * to retrieve authentication credentials via gst_rtspsrc_setup_auth and retry
+ * the request.
+ *
+ * Returns: #GST_RTSP_OK if the processing was successful.
+ */
+static GstRTSPResult
+gst_rtspsrc_send (GstRTSPSrc * src, GstRTSPConnInfo * conninfo,
+ GstRTSPMessage * request, GstRTSPMessage * response,
+ GstRTSPStatusCode * code, GstRTSPVersion * versions)
+{
+ GstRTSPStatusCode int_code = GST_RTSP_STS_OK;
+ GstRTSPResult res = GST_RTSP_ERROR;
+ gint count;
+ gboolean retry;
+ GstRTSPMethod method = GST_RTSP_INVALID;
+ gint version_retry = 0;
+
+ count = 0;
+ do {
+ retry = FALSE;
+
+ /* make sure we don't loop forever */
+ if (count++ > 8)
+ break;
+
+ /* save method so we can disable it when the server complains */
+ method = request->type_data.request.method;
+
+ if (!versions)
+ request->type_data.request.version = src->version;
+
+ if ((res =
+ gst_rtspsrc_try_send (src, conninfo, request, response,
+ &int_code)) < 0)
+ goto error;
+
+ switch (int_code) {
+ case GST_RTSP_STS_UNAUTHORIZED:
+ case GST_RTSP_STS_NOT_FOUND:
+ if (gst_rtspsrc_setup_auth (src, response)) {
+ /* Try the request/response again after configuring the auth info
+ * and loop again */
+ retry = TRUE;
+ }
+ break;
+ case GST_RTSP_STS_RTSP_VERSION_NOT_SUPPORTED:
+ GST_INFO_OBJECT (src, "Version %s not supported by the server",
+ versions ? gst_rtsp_version_as_text (versions[version_retry]) :
+ "unknown");
+ if (versions && versions[version_retry] != GST_RTSP_VERSION_INVALID) {
+ GST_INFO_OBJECT (src, "Unsupported version %s => trying %s",
+ gst_rtsp_version_as_text (request->type_data.request.version),
+ gst_rtsp_version_as_text (versions[version_retry]));
+ request->type_data.request.version = versions[version_retry];
+ retry = TRUE;
+ version_retry++;
+ break;
+ }
+ /* fallthrough */
+ default:
+ break;
+ }
+ } while (retry == TRUE);
+
+ /* If the user requested the code, let them handle errors, otherwise
+ * post an error below */
+ if (code != NULL)
+ *code = int_code;
+ else if (int_code != GST_RTSP_STS_OK)
+ goto error_response;
+
+ return res;
+
+ /* ERRORS */
+error:
+ {
+ GST_DEBUG_OBJECT (src, "got error %d", res);
+ return res;
+ }
+error_response:
+ {
+ res = GST_RTSP_ERROR;
+
+ switch (response->type_data.response.code) {
+ case GST_RTSP_STS_NOT_FOUND:
+ RTSP_SRC_RESPONSE_ERROR (src, response, RESOURCE, NOT_FOUND,
+ "Not found");
+ break;
+ case GST_RTSP_STS_UNAUTHORIZED:
+ RTSP_SRC_RESPONSE_ERROR (src, response, RESOURCE, NOT_AUTHORIZED,
+ "Unauthorized");
+ break;
+ case GST_RTSP_STS_MOVED_PERMANENTLY:
+ case GST_RTSP_STS_MOVE_TEMPORARILY:
+ {
+ gchar *new_location;
+ GstRTSPLowerTrans transports;
+
+ GST_DEBUG_OBJECT (src, "got redirection");
+ /* if we don't have a Location Header, we must error */
+ if (gst_rtsp_message_get_header (response, GST_RTSP_HDR_LOCATION,
+ &new_location, 0) < 0)
+ break;
+
+ /* When we receive a redirect result, we go back to the INIT state after
+ * parsing the new URI. The caller should do the needed steps to issue
+ * a new setup when it detects this state change. */
+ GST_DEBUG_OBJECT (src, "redirection to %s", new_location);
+
+ /* save current transports */
+ if (src->conninfo.url)
+ transports = src->conninfo.url->transports;
+ else
+ transports = GST_RTSP_LOWER_TRANS_UNKNOWN;
+
+ gst_rtspsrc_uri_set_uri (GST_URI_HANDLER (src), new_location, NULL);
+
+ /* set old transports */
+ if (src->conninfo.url && transports != GST_RTSP_LOWER_TRANS_UNKNOWN)
+ src->conninfo.url->transports = transports;
+
+ src->need_redirect = TRUE;
+ res = GST_RTSP_OK;
+ break;
+ }
+ case GST_RTSP_STS_NOT_ACCEPTABLE:
+ case GST_RTSP_STS_NOT_IMPLEMENTED:
+ case GST_RTSP_STS_METHOD_NOT_ALLOWED:
+ /* Some cameras (e.g. HikVision DS-2CD2732F-IS) return "551
+ * Option not supported" when a command is sent that is not implemented
+ * (e.g. PAUSE). Instead; it should return "501 Not Implemented".
+ *
+ * This is wrong, as previously, the camera did announce support
+ * for PAUSE in the OPTIONS.
+ *
+ * In this case, handle the 551 as if it was 501 to avoid throwing
+ * errors to application level. */
+ case GST_RTSP_STS_OPTION_NOT_SUPPORTED:
+ GST_WARNING_OBJECT (src, "got NOT IMPLEMENTED, disable method %s",
+ gst_rtsp_method_as_text (method));
+ src->methods &= ~method;
+ res = GST_RTSP_OK;
+ break;
+ default:
+ RTSP_SRC_RESPONSE_ERROR (src, response, RESOURCE, READ,
+ "Unhandled error");
+ break;
+ }
+ /* if we return ERROR we should unset the response ourselves */
+ if (res == GST_RTSP_ERROR)
+ gst_rtsp_message_unset (response);
+
+ return res;
+ }
+}
+
+static GstRTSPResult
+gst_rtspsrc_send_cb (GstRTSPExtension * ext, GstRTSPMessage * request,
+ GstRTSPMessage * response, GstRTSPSrc * src)
+{
+ return gst_rtspsrc_send (src, &src->conninfo, request, response, NULL, NULL);
+}
+
+
+/* parse the response and collect all the supported methods. We need this
+ * information so that we don't try to send an unsupported request to the
+ * server.
+ */
+static gboolean
+gst_rtspsrc_parse_methods (GstRTSPSrc * src, GstRTSPMessage * response)
+{
+ GstRTSPHeaderField field;
+ gchar *respoptions;
+ gint indx = 0;
+
+ /* reset supported methods */
+ src->methods = 0;
+
+ /* Try Allow Header first */
+ field = GST_RTSP_HDR_ALLOW;
+ while (TRUE) {
+ respoptions = NULL;
+ gst_rtsp_message_get_header (response, field, &respoptions, indx);
+ if (!respoptions)
+ break;
+
+ src->methods |= gst_rtsp_options_from_text (respoptions);
+
+ indx++;
+ }
+
+ indx = 0;
+ field = GST_RTSP_HDR_PUBLIC;
+ while (TRUE) {
+ respoptions = NULL;
+ gst_rtsp_message_get_header (response, field, &respoptions, indx);
+ if (!respoptions)
+ break;
+
+ src->methods |= gst_rtsp_options_from_text (respoptions);
+
+ indx++;
+ }
+
+ if (src->methods == 0) {
+ /* neither Allow nor Public are required, assume the server supports
+ * at least DESCRIBE, SETUP, we always assume it supports PLAY as
+ * well. */
+ GST_DEBUG_OBJECT (src, "could not get OPTIONS");
+ src->methods = GST_RTSP_DESCRIBE | GST_RTSP_SETUP;
+ }
+ /* always assume PLAY, FIXME, extensions should be able to override
+ * this */
+ src->methods |= GST_RTSP_PLAY;
+ /* also assume it will support Range */
+ src->seekable = G_MAXFLOAT;
+
+ /* we need describe and setup */
+ if (!(src->methods & GST_RTSP_DESCRIBE))
+ goto no_describe;
+ if (!(src->methods & GST_RTSP_SETUP))
+ goto no_setup;
+
+ return TRUE;
+
+ /* ERRORS */
+no_describe:
+ {
+ GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
+ ("Server does not support DESCRIBE."));
+ return FALSE;
+ }
+no_setup:
+ {
+ GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
+ ("Server does not support SETUP."));
+ return FALSE;
+ }
+}
+
+/* masks to be kept in sync with the hardcoded protocol order of preference
+ * in code below */
+static const guint protocol_masks[] = {
+ GST_RTSP_LOWER_TRANS_UDP,
+ GST_RTSP_LOWER_TRANS_UDP_MCAST,
+ GST_RTSP_LOWER_TRANS_TCP,
+ 0
+};
+
+static GstRTSPResult
+gst_rtspsrc_create_transports_string (GstRTSPSrc * src,
+ GstRTSPLowerTrans protocols, GstRTSPProfile profile, gchar ** transports)
+{
+ GstRTSPResult res;
+ GString *result;
+ gboolean add_udp_str;
+
+ *transports = NULL;
+
+ res =
+ gst_rtsp_ext_list_get_transports (src->extensions, protocols, transports);
+
+ if (res < 0)
+ goto failed;
+
+ GST_DEBUG_OBJECT (src, "got transports %s", GST_STR_NULL (*transports));
+
+ /* extension listed transports, use those */
+ if (*transports != NULL)
+ return GST_RTSP_OK;
+
+ /* it's the default */
+ add_udp_str = FALSE;
+
+ /* the default RTSP transports */
+ result = g_string_new ("RTP");
+
+ switch (profile) {
+ case GST_RTSP_PROFILE_AVP:
+ g_string_append (result, "/AVP");
+ break;
+ case GST_RTSP_PROFILE_SAVP:
+ g_string_append (result, "/SAVP");
+ break;
+ case GST_RTSP_PROFILE_AVPF:
+ g_string_append (result, "/AVPF");
+ break;
+ case GST_RTSP_PROFILE_SAVPF:
+ g_string_append (result, "/SAVPF");
+ break;
+ default:
+ break;
+ }
+
+ if (protocols & GST_RTSP_LOWER_TRANS_UDP) {
+ GST_DEBUG_OBJECT (src, "adding UDP unicast");
+ if (add_udp_str)
+ g_string_append (result, "/UDP");
+ g_string_append (result, ";unicast;client_port=%%u1-%%u2");
+ } else if (protocols & GST_RTSP_LOWER_TRANS_UDP_MCAST) {
+ GST_DEBUG_OBJECT (src, "adding UDP multicast");
+ /* we don't have to allocate any UDP ports yet, if the selected transport
+ * turns out to be multicast we can create them and join the multicast
+ * group indicated in the transport reply */
+ if (add_udp_str)
+ g_string_append (result, "/UDP");
+ g_string_append (result, ";multicast");
+ if (src->next_port_num != 0) {
+ if (src->client_port_range.max > 0 &&
+ src->next_port_num >= src->client_port_range.max)
+ goto no_ports;
+
+ g_string_append_printf (result, ";client_port=%d-%d",
+ src->next_port_num, src->next_port_num + 1);
+ }
+ } else if (protocols & GST_RTSP_LOWER_TRANS_TCP) {
+ GST_DEBUG_OBJECT (src, "adding TCP");
+
+ g_string_append (result, "/TCP;unicast;interleaved=%%i1-%%i2");
+ }
+ *transports = g_string_free (result, FALSE);
+
+ GST_DEBUG_OBJECT (src, "prepared transports %s", GST_STR_NULL (*transports));
+
+ return GST_RTSP_OK;
+
+ /* ERRORS */
+failed:
+ {
+ GST_ERROR ("extension gave error %d", res);
+ return res;
+ }
+no_ports:
+ {
+ GST_ERROR ("no more ports available");
+ return GST_RTSP_ERROR;
+ }
+}
+
+static GstRTSPResult
+gst_rtspsrc_prepare_transports (GstRTSPStream * stream, gchar ** transports,
+ gint orig_rtpport, gint orig_rtcpport)
+{
+ GstRTSPSrc *src;
+ gint nr_udp, nr_int;
+ gchar *next, *p;
+ gint rtpport = 0, rtcpport = 0;
+ GString *str;
+
+ src = stream->parent;
+
+ /* find number of placeholders first */
+ if (strstr (*transports, "%%i2"))
+ nr_int = 2;
+ else if (strstr (*transports, "%%i1"))
+ nr_int = 1;
+ else
+ nr_int = 0;
+
+ if (strstr (*transports, "%%u2"))
+ nr_udp = 2;
+ else if (strstr (*transports, "%%u1"))
+ nr_udp = 1;
+ else
+ nr_udp = 0;
+
+ if (nr_udp == 0 && nr_int == 0)
+ goto done;
+
+ if (nr_udp > 0) {
+ if (!orig_rtpport || !orig_rtcpport) {
+ if (!gst_rtspsrc_alloc_udp_ports (stream, &rtpport, &rtcpport))
+ goto failed;
+ } else {
+ rtpport = orig_rtpport;
+ rtcpport = orig_rtcpport;
+ }
+ }
+
+ str = g_string_new ("");
+ p = *transports;
+ while ((next = strstr (p, "%%"))) {
+ g_string_append_len (str, p, next - p);
+ if (next[2] == 'u') {
+ if (next[3] == '1')
+ g_string_append_printf (str, "%d", rtpport);
+ else if (next[3] == '2')
+ g_string_append_printf (str, "%d", rtcpport);
+ }
+ if (next[2] == 'i') {
+ if (next[3] == '1')
+ g_string_append_printf (str, "%d", src->free_channel);
+ else if (next[3] == '2')
+ g_string_append_printf (str, "%d", src->free_channel + 1);
+
+ }
+
+ p = next + 4;
+ }
+ if (src->version >= GST_RTSP_VERSION_2_0)
+ src->free_channel += 2;
+
+ /* append final part */
+ g_string_append (str, p);
+
+ g_free (*transports);
+ *transports = g_string_free (str, FALSE);
+
+done:
+ return GST_RTSP_OK;
+
+ /* ERRORS */
+failed:
+ {
+ GST_ERROR ("failed to allocate udp ports");
+ return GST_RTSP_ERROR;
+ }
+}
+
+static GstCaps *
+signal_get_srtcp_params (GstRTSPSrc * src, GstRTSPStream * stream)
+{
+ GstCaps *caps = NULL;
+
+ g_signal_emit (src, gst_rtspsrc_signals[SIGNAL_REQUEST_RTCP_KEY], 0,
+ stream->id, &caps);
+
+ if (caps != NULL)
+ GST_DEBUG_OBJECT (src, "SRTP parameters received");
+
+ return caps;
+}
+
+static GstCaps *
+default_srtcp_params (void)
+{
+ guint i;
+ GstCaps *caps;
+ GstBuffer *buf;
+ guint8 *key_data;
+#define KEY_SIZE 30
+ guint data_size = GST_ROUND_UP_4 (KEY_SIZE);
+
+ /* create a random key */
+ key_data = g_malloc (data_size);
+ for (i = 0; i < data_size; i += 4)
+ GST_WRITE_UINT32_BE (key_data + i, g_random_int ());
+
+ buf = gst_buffer_new_wrapped (key_data, KEY_SIZE);
+
+ caps = gst_caps_new_simple ("application/x-srtcp",
+ "srtp-key", GST_TYPE_BUFFER, buf,
+ "srtp-cipher", G_TYPE_STRING, "aes-128-icm",
+ "srtp-auth", G_TYPE_STRING, "hmac-sha1-80",
+ "srtcp-cipher", G_TYPE_STRING, "aes-128-icm",
+ "srtcp-auth", G_TYPE_STRING, "hmac-sha1-80", NULL);
+
+ gst_buffer_unref (buf);
+
+ return caps;
+}
+
+static gchar *
+gst_rtspsrc_stream_make_keymgmt (GstRTSPSrc * src, GstRTSPStream * stream)
+{
+ gchar *base64, *result = NULL;
+ GstMIKEYMessage *mikey_msg;
+
+ stream->srtcpparams = signal_get_srtcp_params (src, stream);
+ if (stream->srtcpparams == NULL)
+ stream->srtcpparams = default_srtcp_params ();
+
+ mikey_msg = gst_mikey_message_new_from_caps (stream->srtcpparams);
+ if (mikey_msg) {
+ /* add policy '0' for our SSRC */
+ gst_mikey_message_add_cs_srtp (mikey_msg, 0, stream->send_ssrc, 0);
+
+ base64 = gst_mikey_message_base64_encode (mikey_msg);
+ gst_mikey_message_unref (mikey_msg);
+
+ if (base64) {
+ result = gst_sdp_make_keymgmt (stream->conninfo.location, base64);
+ g_free (base64);
+ }
+ }
+
+ return result;
+}
+
+static GstRTSPResult
+gst_rtsp_src_setup_stream_from_response (GstRTSPSrc * src,
+ GstRTSPStream * stream, GstRTSPMessage * response,
+ GstRTSPLowerTrans * protocols, gint retry, gint * rtpport, gint * rtcpport)
+{
+ gchar *resptrans = NULL;
+ GstRTSPTransport transport = { 0 };
+
+ gst_rtsp_message_get_header (response, GST_RTSP_HDR_TRANSPORT, &resptrans, 0);
+ if (!resptrans) {
+ gst_rtspsrc_stream_free_udp (stream);
+ goto no_transport;
+ }
+
+ /* parse transport, go to next stream on parse error */
+ if (gst_rtsp_transport_parse (resptrans, &transport) != GST_RTSP_OK) {
+ GST_WARNING_OBJECT (src, "failed to parse transport %s", resptrans);
+ return GST_RTSP_ELAST;
+ }
+
+ /* update allowed transports for other streams. once the transport of
+ * one stream has been determined, we make sure that all other streams
+ * are configured in the same way */
+ switch (transport.lower_transport) {
+ case GST_RTSP_LOWER_TRANS_TCP:
+ GST_DEBUG_OBJECT (src, "stream %p as TCP interleaved", stream);
+ if (protocols)
+ *protocols = GST_RTSP_LOWER_TRANS_TCP;
+ src->interleaved = TRUE;
+ if (src->version < GST_RTSP_VERSION_2_0) {
+ /* update free channels */
+ src->free_channel = MAX (transport.interleaved.min, src->free_channel);
+ src->free_channel = MAX (transport.interleaved.max, src->free_channel);
+ src->free_channel++;
+ }
+ break;
+ case GST_RTSP_LOWER_TRANS_UDP_MCAST:
+ /* only allow multicast for other streams */
+ GST_DEBUG_OBJECT (src, "stream %p as UDP multicast", stream);
+ if (protocols)
+ *protocols = GST_RTSP_LOWER_TRANS_UDP_MCAST;
+ /* if the server selected our ports, increment our counters so that
+ * we select a new port later */
+ if (src->next_port_num == transport.port.min &&
+ src->next_port_num + 1 == transport.port.max) {
+ src->next_port_num += 2;
+ }
+ break;
+ case GST_RTSP_LOWER_TRANS_UDP:
+ /* only allow unicast for other streams */
+ GST_DEBUG_OBJECT (src, "stream %p as UDP unicast", stream);
+ if (protocols)
+ *protocols = GST_RTSP_LOWER_TRANS_UDP;
+ break;
+ default:
+ GST_DEBUG_OBJECT (src, "stream %p unknown transport %d", stream,
+ transport.lower_transport);
+ break;
+ }
+
+ if (!src->interleaved || !retry) {
+ /* now configure the stream with the selected transport */
+ if (!gst_rtspsrc_stream_configure_transport (stream, &transport)) {
+ GST_DEBUG_OBJECT (src,
+ "could not configure stream %p transport, skipping stream", stream);
+ goto done;
+ } else if (stream->udpsrc[0] && stream->udpsrc[1] && rtpport && rtcpport) {
+ /* retain the first allocated UDP port pair */
+ g_object_get (G_OBJECT (stream->udpsrc[0]), "port", rtpport, NULL);
+ g_object_get (G_OBJECT (stream->udpsrc[1]), "port", rtcpport, NULL);
+ }
+ }
+ /* we need to activate at least one stream when we detect activity */
+ src->need_activate = TRUE;
+
+ /* stream is setup now */
+ stream->setup = TRUE;
+ stream->waiting_setup_response = FALSE;
+
+ if (src->version >= GST_RTSP_VERSION_2_0) {
+ gchar *prop, *media_properties;
+ gchar **props;
+ gint i;
+
+ if (gst_rtsp_message_get_header (response, GST_RTSP_HDR_MEDIA_PROPERTIES,
+ &media_properties, 0) != GST_RTSP_OK) {
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Error: No MEDIA_PROPERTY header in a SETUP request in RTSP 2.0"
+ " - this header is mandatory."));
+
+ gst_rtsp_message_unset (response);
+ return GST_RTSP_ERROR;
+ }
+
+ props = g_strsplit (media_properties, ",", -2);
+ for (i = 0; props[i]; i++) {
+ prop = props[i];
+
+ while (*prop == ' ')
+ prop++;
+
+ if (strstr (prop, "Random-Access")) {
+ gchar **random_seekable_val = g_strsplit (prop, "=", 2);
+
+ if (!random_seekable_val[1])
+ src->seekable = G_MAXFLOAT;
+ else
+ src->seekable = g_ascii_strtod (random_seekable_val[1], NULL);
+
+ g_strfreev (random_seekable_val);
+ } else if (!g_strcmp0 (prop, "No-Seeking")) {
+ src->seekable = -1.0;
+ } else if (!g_strcmp0 (prop, "Beginning-Only")) {
+ src->seekable = 0.0;
+ }
+ }
+
+ g_strfreev (props);
+ }
+
+done:
+ /* clean up our transport struct */
+ gst_rtsp_transport_init (&transport);
+ /* clean up used RTSP messages */
+ gst_rtsp_message_unset (response);
+
+ return GST_RTSP_OK;
+
+no_transport:
+ {
+ GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
+ ("Server did not select transport."));
+
+ gst_rtsp_message_unset (response);
+ return GST_RTSP_ERROR;
+ }
+}
+
+static GstRTSPResult
+gst_rtspsrc_setup_streams_end (GstRTSPSrc * src, gboolean async)
+{
+ GList *tmp;
+ GstRTSPConnInfo *conninfo;
+
+ g_assert (src->version >= GST_RTSP_VERSION_2_0);
+
+ conninfo = &src->conninfo;
+ for (tmp = src->streams; tmp; tmp = tmp->next) {
+ GstRTSPStream *stream = (GstRTSPStream *) tmp->data;
+ GstRTSPMessage response = { 0, };
+
+ if (!stream->waiting_setup_response)
+ continue;
+
+ if (!src->conninfo.connection)
+ conninfo = &((GstRTSPStream *) tmp->data)->conninfo;
+
+ gst_rtsp_src_receive_response (src, conninfo, &response, NULL);
+
+ gst_rtsp_src_setup_stream_from_response (src, stream,
+ &response, NULL, 0, NULL, NULL);
+ }
+
+ return GST_RTSP_OK;
+}
+
+/* Perform the SETUP request for all the streams.
+ *
+ * We ask the server for a specific transport, which initially includes all the
+ * ones we can support (UDP/TCP/MULTICAST). For the UDP transport we allocate
+ * two local UDP ports that we send to the server.
+ *
+ * Once the server replied with a transport, we configure the other streams
+ * with the same transport.
+ *
+ * In case setup request are not pipelined, this function will also configure the
+ * stream for the selected transport, * which basically means creating the pipeline.
+ * Otherwise, the first stream is setup right away from the reply and a
+ * CMD_FINALIZE_SETUP command is set for the stream pipelines to happen on the
+ * remaining streams from the RTSP thread.
+ */
+static GstRTSPResult
+gst_rtspsrc_setup_streams_start (GstRTSPSrc * src, gboolean async)
+{
+ GList *walk;
+ GstRTSPResult res = GST_RTSP_ERROR;
+ GstRTSPMessage request = { 0 };
+ GstRTSPMessage response = { 0 };
+ GstRTSPStream *stream = NULL;
+ GstRTSPLowerTrans protocols;
+ GstRTSPStatusCode code;
+ gboolean unsupported_real = FALSE;
+ gint rtpport, rtcpport;
+ GstRTSPUrl *url;
+ gchar *hval;
+ gchar *pipelined_request_id = NULL;
+
+ if (src->conninfo.connection) {
+ url = gst_rtsp_connection_get_url (src->conninfo.connection);
+ /* we initially allow all configured lower transports. based on the URL
+ * transports and the replies from the server we narrow them down. */
+ protocols = url->transports & src->cur_protocols;
+ } else {
+ url = NULL;
+ protocols = src->cur_protocols;
+ }
+
+ /* In ONVIF mode, we only want to try TCP transport */
+ if (src->onvif_mode && (protocols & GST_RTSP_LOWER_TRANS_TCP))
+ protocols = GST_RTSP_LOWER_TRANS_TCP;
+
+ if (protocols == 0)
+ goto no_protocols;
+
+ /* reset some state */
+ src->free_channel = 0;
+ src->interleaved = FALSE;
+ src->need_activate = FALSE;
+ /* keep track of next port number, 0 is random */
+ src->next_port_num = src->client_port_range.min;
+ rtpport = rtcpport = 0;
+
+ if (G_UNLIKELY (src->streams == NULL))
+ goto no_streams;
+
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPConnInfo *conninfo;
+ gchar *transports;
+ gint retry = 0;
+ guint mask = 0;
+ gboolean selected;
+ GstCaps *caps;
+
+ stream = (GstRTSPStream *) walk->data;
+
+ caps = stream_get_caps_for_pt (stream, stream->default_pt);
+ if (caps == NULL) {
+ GST_WARNING_OBJECT (src, "skipping stream %p, no caps", stream);
+ continue;
+ }
+
+ if (stream->skipped) {
+ GST_DEBUG_OBJECT (src, "skipping stream %p", stream);
+ continue;
+ }
+
+ /* see if we need to configure this stream */
+ if (!gst_rtsp_ext_list_configure_stream (src->extensions, caps)) {
+ GST_DEBUG_OBJECT (src, "skipping stream %p, disabled by extension",
+ stream);
+ continue;
+ }
+
+ g_signal_emit (src, gst_rtspsrc_signals[SIGNAL_SELECT_STREAM], 0,
+ stream->id, caps, &selected);
+ if (!selected) {
+ GST_DEBUG_OBJECT (src, "skipping stream %p, disabled by signal", stream);
+ continue;
+ }
+
+ /* merge/overwrite global caps */
+ if (caps) {
+ guint j, num;
+ GstStructure *s;
+
+ s = gst_caps_get_structure (caps, 0);
+
+ num = gst_structure_n_fields (src->props);
+ for (j = 0; j < num; j++) {
+ const gchar *name;
+ const GValue *val;
+
+ name = gst_structure_nth_field_name (src->props, j);
+ val = gst_structure_get_value (src->props, name);
+ gst_structure_set_value (s, name, val);
+
+ GST_DEBUG_OBJECT (src, "copied %s", name);
+ }
+ }
+
+ /* skip setup if we have no URL for it */
+ if (stream->conninfo.location == NULL) {
+ GST_WARNING_OBJECT (src, "skipping stream %p, no setup", stream);
+ continue;
+ }
+
+ if (src->conninfo.connection == NULL) {
+ if (!gst_rtsp_conninfo_connect (src, &stream->conninfo, async)) {
+ GST_WARNING_OBJECT (src, "skipping stream %p, failed to connect",
+ stream);
+ continue;
+ }
+ conninfo = &stream->conninfo;
+ } else {
+ conninfo = &src->conninfo;
+ }
+ GST_DEBUG_OBJECT (src, "doing setup of stream %p with %s", stream,
+ stream->conninfo.location);
+
+ /* if we have a multicast connection, only suggest multicast from now on */
+ if (stream->is_multicast)
+ protocols &= GST_RTSP_LOWER_TRANS_UDP_MCAST;
+
+ next_protocol:
+ /* first selectable protocol */
+ while (protocol_masks[mask] && !(protocols & protocol_masks[mask]))
+ mask++;
+ if (!protocol_masks[mask])
+ goto no_protocols;
+
+ retry:
+ GST_DEBUG_OBJECT (src, "protocols = 0x%x, protocol mask = 0x%x", protocols,
+ protocol_masks[mask]);
+ /* create a string with first transport in line */
+ transports = NULL;
+ res = gst_rtspsrc_create_transports_string (src,
+ protocols & protocol_masks[mask], stream->profile, &transports);
+ if (res < 0 || transports == NULL)
+ goto setup_transport_failed;
+
+ if (strlen (transports) == 0) {
+ g_free (transports);
+ GST_DEBUG_OBJECT (src, "no transports found");
+ mask++;
+ goto next_protocol;
+ }
+
+ GST_DEBUG_OBJECT (src, "replace ports in %s", GST_STR_NULL (transports));
+
+ /* replace placeholders with real values, this function will optionally
+ * allocate UDP ports and other info needed to execute the setup request */
+ res = gst_rtspsrc_prepare_transports (stream, &transports,
+ retry > 0 ? rtpport : 0, retry > 0 ? rtcpport : 0);
+ if (res < 0) {
+ g_free (transports);
+ goto setup_transport_failed;
+ }
+
+ GST_DEBUG_OBJECT (src, "transport is now %s", GST_STR_NULL (transports));
+ /* create SETUP request */
+ res =
+ gst_rtspsrc_init_request (src, &request, GST_RTSP_SETUP,
+ stream->conninfo.location);
+ if (res < 0) {
+ g_free (transports);
+ goto create_request_failed;
+ }
+
+ if (src->version >= GST_RTSP_VERSION_2_0) {
+ if (!pipelined_request_id)
+ pipelined_request_id = g_strdup_printf ("%d",
+ g_random_int_range (0, G_MAXINT32));
+
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_PIPELINED_REQUESTS,
+ pipelined_request_id);
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_ACCEPT_RANGES,
+ "npt, clock, smpte, clock");
+ }
+
+ /* select transport */
+ gst_rtsp_message_take_header (&request, GST_RTSP_HDR_TRANSPORT, transports);
+
+ if (stream->is_backchannel && src->backchannel == BACKCHANNEL_ONVIF)
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_REQUIRE,
+ BACKCHANNEL_ONVIF_HDR_REQUIRE_VAL);
+
+ /* set up keys */
+ if (stream->profile == GST_RTSP_PROFILE_SAVP ||
+ stream->profile == GST_RTSP_PROFILE_SAVPF) {
+ hval = gst_rtspsrc_stream_make_keymgmt (src, stream);
+ gst_rtsp_message_take_header (&request, GST_RTSP_HDR_KEYMGMT, hval);
+ }
+
+ /* if the user wants a non default RTP packet size we add the blocksize
+ * parameter */
+ if (src->rtp_blocksize > 0) {
+ hval = g_strdup_printf ("%d", src->rtp_blocksize);
+ gst_rtsp_message_take_header (&request, GST_RTSP_HDR_BLOCKSIZE, hval);
+ }
+
+ if (async)
+ GST_ELEMENT_PROGRESS (src, CONTINUE, "request", ("SETUP stream %d",
+ stream->id));
+
+ /* handle the code ourselves */
+ res =
+ gst_rtspsrc_send (src, conninfo, &request,
+ pipelined_request_id ? NULL : &response, &code, NULL);
+ if (res < 0)
+ goto send_error;
+
+ switch (code) {
+ case GST_RTSP_STS_OK:
+ break;
+ case GST_RTSP_STS_UNSUPPORTED_TRANSPORT:
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+ /* cleanup of leftover transport */
+ gst_rtspsrc_stream_free_udp (stream);
+ /* MS WMServer RTSP MUST use same UDP pair in all SETUP requests;
+ * we might be in this case */
+ if (stream->container && rtpport && rtcpport && !retry) {
+ GST_DEBUG_OBJECT (src, "retrying with original port pair %u-%u",
+ rtpport, rtcpport);
+ retry++;
+ goto retry;
+ }
+ /* this transport did not go down well, but we may have others to try
+ * that we did not send yet, try those and only give up then
+ * but not without checking for lost cause/extension so we can
+ * post a nicer/more useful error message later */
+ if (!unsupported_real)
+ unsupported_real = stream->is_real;
+ /* select next available protocol, give up on this stream if none */
+ mask++;
+ while (protocol_masks[mask] && !(protocols & protocol_masks[mask]))
+ mask++;
+ if (!protocol_masks[mask] || unsupported_real)
+ continue;
+ else
+ goto retry;
+ default:
+ /* cleanup of leftover transport and move to the next stream */
+ gst_rtspsrc_stream_free_udp (stream);
+ goto response_error;
+ }
+
+
+ if (!pipelined_request_id) {
+ /* parse response transport */
+ res = gst_rtsp_src_setup_stream_from_response (src, stream,
+ &response, &protocols, retry, &rtpport, &rtcpport);
+ switch (res) {
+ case GST_RTSP_ERROR:
+ goto cleanup_error;
+ case GST_RTSP_ELAST:
+ goto retry;
+ default:
+ break;
+ }
+ } else {
+ stream->waiting_setup_response = TRUE;
+ /* we need to activate at least one stream when we detect activity */
+ src->need_activate = TRUE;
+ }
+
+ {
+ GList *skip = walk;
+
+ while (TRUE) {
+ GstRTSPStream *sskip;
+
+ skip = g_list_next (skip);
+ if (skip == NULL)
+ break;
+
+ sskip = (GstRTSPStream *) skip->data;
+
+ /* skip all streams with the same control url */
+ if (g_str_equal (stream->conninfo.location, sskip->conninfo.location)) {
+ GST_DEBUG_OBJECT (src, "found stream %p with same control %s",
+ sskip, sskip->conninfo.location);
+ sskip->skipped = TRUE;
+ }
+ }
+ }
+ gst_rtsp_message_unset (&request);
+ }
+
+ if (pipelined_request_id) {
+ gst_rtspsrc_setup_streams_end (src, TRUE);
+ }
+
+ /* store the transport protocol that was configured */
+ src->cur_protocols = protocols;
+
+ gst_rtsp_ext_list_stream_select (src->extensions, url);
+
+ if (pipelined_request_id)
+ g_free (pipelined_request_id);
+
+ /* if there is nothing to activate, error out */
+ if (!src->need_activate)
+ goto nothing_to_activate;
+
+ return res;
+
+ /* ERRORS */
+no_protocols:
+ {
+ /* no transport possible, post an error and stop */
+ GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
+ ("Could not connect to server, no protocols left"));
+ return GST_RTSP_ERROR;
+ }
+no_streams:
+ {
+ GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
+ ("SDP contains no streams"));
+ return GST_RTSP_ERROR;
+ }
+create_request_failed:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
+ ("Could not create request. (%s)", str));
+ g_free (str);
+ goto cleanup_error;
+ }
+setup_transport_failed:
+ {
+ GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
+ ("Could not setup transport."));
+ res = GST_RTSP_ERROR;
+ goto cleanup_error;
+ }
+response_error:
+ {
+ const gchar *str = gst_rtsp_status_as_text (code);
+
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Error (%d): %s", code, GST_STR_NULL (str)));
+ res = GST_RTSP_ERROR;
+ goto cleanup_error;
+ }
+send_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ if (res != GST_RTSP_EINTR) {
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Could not send message. (%s)", str));
+ } else {
+ GST_WARNING_OBJECT (src, "send interrupted");
+ }
+ g_free (str);
+ goto cleanup_error;
+ }
+nothing_to_activate:
+ {
+ /* none of the available error codes is really right .. */
+ if (unsupported_real) {
+ GST_ELEMENT_ERROR (src, STREAM, CODEC_NOT_FOUND,
+ (_("No supported stream was found. You might need to install a "
+ "GStreamer RTSP extension plugin for Real media streams.")),
+ (NULL));
+ } else {
+ GST_ELEMENT_ERROR (src, STREAM, CODEC_NOT_FOUND,
+ (_("No supported stream was found. You might need to allow "
+ "more transport protocols or may otherwise be missing "
+ "the right GStreamer RTSP extension plugin.")), (NULL));
+ }
+ return GST_RTSP_ERROR;
+ }
+cleanup_error:
+ {
+ if (pipelined_request_id)
+ g_free (pipelined_request_id);
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+ return res;
+ }
+}
+
+static gboolean
+gst_rtspsrc_parse_range (GstRTSPSrc * src, const gchar * range,
+ GstSegment * segment, gboolean update_duration)
+{
+ GstClockTime begin_seconds, end_seconds;
+ gint64 seconds;
+ GstRTSPTimeRange *therange;
+
+ if (src->range)
+ gst_rtsp_range_free (src->range);
+
+ if (gst_rtsp_range_parse (range, &therange) == GST_RTSP_OK) {
+ GST_DEBUG_OBJECT (src, "parsed range %s", range);
+ src->range = therange;
+ } else {
+ GST_DEBUG_OBJECT (src, "failed to parse range %s", range);
+ src->range = NULL;
+ gst_segment_init (segment, GST_FORMAT_TIME);
+ return FALSE;
+ }
+
+ gst_rtsp_range_get_times (therange, &begin_seconds, &end_seconds);
+
+ GST_DEBUG_OBJECT (src, "range: type %d, min %f - type %d, max %f ",
+ therange->min.type, therange->min.seconds, therange->max.type,
+ therange->max.seconds);
+
+ if (therange->min.type == GST_RTSP_TIME_NOW)
+ seconds = 0;
+ else if (therange->min.type == GST_RTSP_TIME_END)
+ seconds = 0;
+ else
+ seconds = begin_seconds;
+
+ GST_DEBUG_OBJECT (src, "range: min %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (seconds));
+
+ /* we need to start playback without clipping from the position reported by
+ * the server */
+ if (segment->rate > 0.0)
+ segment->start = seconds;
+ else
+ segment->stop = seconds;
+
+ segment->position = seconds;
+
+ if (therange->max.type == GST_RTSP_TIME_NOW)
+ seconds = -1;
+ else if (therange->max.type == GST_RTSP_TIME_END)
+ seconds = -1;
+ else
+ seconds = end_seconds;
+
+ GST_DEBUG_OBJECT (src, "range: max %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (seconds));
+
+ /* live (WMS) server might send overflowed large max as its idea of infinity,
+ * compensate to prevent problems later on */
+ if (seconds != -1 && seconds < 0) {
+ seconds = -1;
+ GST_DEBUG_OBJECT (src, "insane range, set to NONE");
+ }
+
+ /* live (WMS) might send min == max, which is not worth recording */
+ if (segment->duration == -1 && seconds == begin_seconds)
+ seconds = -1;
+
+ /* don't change duration with unknown value, we might have a valid value
+ * there that we want to keep. Also, the total duration of the stream
+ * can only be determined from the response to a DESCRIBE request, not
+ * from a PLAY request where we might have requested a custom range, so
+ * don't update duration in that case */
+ if (update_duration && seconds != -1) {
+ segment->duration = seconds;
+ GST_DEBUG_OBJECT (src, "set duration from range as %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (seconds));
+ } else {
+ GST_DEBUG_OBJECT (src, "not updating existing duration %" GST_TIME_FORMAT
+ " from range %" GST_TIME_FORMAT, GST_TIME_ARGS (segment->duration),
+ GST_TIME_ARGS (seconds));
+ }
+
+ if (segment->rate > 0.0)
+ segment->stop = seconds;
+ else
+ segment->start = seconds;
+
+ return TRUE;
+}
+
+/* Parse clock profived by the server with following syntax:
+ *
+ * "GstNetTimeProvider <wrapped-clock> <server-IP:port> <clock-time>"
+ */
+static gboolean
+gst_rtspsrc_parse_gst_clock (GstRTSPSrc * src, const gchar * gstclock)
+{
+ gboolean res = FALSE;
+
+ if (g_str_has_prefix (gstclock, "GstNetTimeProvider ")) {
+ gchar **fields = NULL, **parts = NULL;
+ gchar *remote_ip, *str;
+ gint port;
+ GstClockTime base_time;
+ GstClock *netclock;
+
+ fields = g_strsplit (gstclock, " ", 0);
+
+ /* wrapped clock, not very interesting for now */
+ if (fields[1] == NULL)
+ goto cleanup;
+
+ /* remote IP address and port */
+ if ((str = fields[2]) == NULL)
+ goto cleanup;
+
+ parts = g_strsplit (str, ":", 0);
+
+ if ((remote_ip = parts[0]) == NULL)
+ goto cleanup;
+
+ if ((str = parts[1]) == NULL)
+ goto cleanup;
+
+ port = atoi (str);
+ if (port == 0)
+ goto cleanup;
+
+ /* base-time */
+ if ((str = fields[3]) == NULL)
+ goto cleanup;
+
+ base_time = g_ascii_strtoull (str, NULL, 10);
+
+ netclock =
+ gst_net_client_clock_new ((gchar *) "GstRTSPClock", remote_ip, port,
+ base_time);
+
+ if (src->provided_clock)
+ gst_object_unref (src->provided_clock);
+ src->provided_clock = netclock;
+
+ gst_element_post_message (GST_ELEMENT_CAST (src),
+ gst_message_new_clock_provide (GST_OBJECT_CAST (src),
+ src->provided_clock, TRUE));
+
+ res = TRUE;
+ cleanup:
+ g_strfreev (fields);
+ g_strfreev (parts);
+ }
+ return res;
+}
+
+/* must be called with the RTSP state lock */
+static GstRTSPResult
+gst_rtspsrc_open_from_sdp (GstRTSPSrc * src, GstSDPMessage * sdp,
+ gboolean async)
+{
+ GstRTSPResult res;
+ gint i, n_streams;
+
+ /* prepare global stream caps properties */
+ if (src->props)
+ gst_structure_remove_all_fields (src->props);
+ else
+ src->props = gst_structure_new_empty ("RTSPProperties");
+
+ DEBUG_SDP (src, sdp);
+
+ gst_rtsp_ext_list_parse_sdp (src->extensions, sdp, src->props);
+
+ /* let the app inspect and change the SDP */
+ g_signal_emit (src, gst_rtspsrc_signals[SIGNAL_ON_SDP], 0, sdp);
+
+ gst_segment_init (&src->segment, GST_FORMAT_TIME);
+
+ /* parse range for duration reporting. */
+ {
+ const gchar *range;
+
+ for (i = 0;; i++) {
+ range = gst_sdp_message_get_attribute_val_n (sdp, "range", i);
+ if (range == NULL)
+ break;
+
+ /* keep track of the range and configure it in the segment */
+ if (gst_rtspsrc_parse_range (src, range, &src->segment, TRUE))
+ break;
+ }
+ }
+ /* parse clock information. This is GStreamer specific, a server can tell the
+ * client what clock it is using and wrap that in a network clock. The
+ * advantage of that is that we can slave to it. */
+ {
+ const gchar *gstclock;
+
+ for (i = 0;; i++) {
+ gstclock = gst_sdp_message_get_attribute_val_n (sdp, "x-gst-clock", i);
+ if (gstclock == NULL)
+ break;
+
+ /* parse the clock and expose it in the provide_clock method */
+ if (gst_rtspsrc_parse_gst_clock (src, gstclock))
+ break;
+ }
+ }
+ /* try to find a global control attribute. Note that a '*' means that we should
+ * do aggregate control with the current url (so we don't do anything and
+ * leave the current connection as is) */
+ {
+ const gchar *control;
+
+ for (i = 0;; i++) {
+ control = gst_sdp_message_get_attribute_val_n (sdp, "control", i);
+ if (control == NULL)
+ break;
+
+ /* only take fully qualified urls */
+ if (g_str_has_prefix (control, "rtsp://"))
+ break;
+ }
+ if (control) {
+ g_free (src->conninfo.location);
+ src->conninfo.location = g_strdup (control);
+ /* make a connection for this, if there was a connection already, nothing
+ * happens. */
+ if (gst_rtsp_conninfo_connect (src, &src->conninfo, async) < 0) {
+ GST_ERROR_OBJECT (src, "could not connect");
+ }
+ }
+ /* we need to keep the control url separate from the connection url because
+ * the rules for constructing the media control url need it */
+ g_free (src->control);
+ src->control = g_strdup (control);
+ }
+
+ /* create streams */
+ n_streams = gst_sdp_message_medias_len (sdp);
+ for (i = 0; i < n_streams; i++) {
+ gst_rtspsrc_create_stream (src, sdp, i, n_streams);
+ }
+
+ src->state = GST_RTSP_STATE_INIT;
+
+ /* setup streams */
+ if ((res = gst_rtspsrc_setup_streams_start (src, async)) < 0)
+ goto setup_failed;
+
+ /* reset our state */
+ src->need_range = TRUE;
+ src->server_side_trickmode = FALSE;
+ src->trickmode_interval = 0;
+
+ src->state = GST_RTSP_STATE_READY;
+
+ return res;
+
+ /* ERRORS */
+setup_failed:
+ {
+ GST_ERROR_OBJECT (src, "setup failed");
+ gst_rtspsrc_cleanup (src);
+ return res;
+ }
+}
+
+static GstRTSPResult
+gst_rtspsrc_retrieve_sdp (GstRTSPSrc * src, GstSDPMessage ** sdp,
+ gboolean async)
+{
+ GstRTSPResult res;
+ GstRTSPMessage request = { 0 };
+ GstRTSPMessage response = { 0 };
+ guint8 *data;
+ guint size;
+ gchar *respcont = NULL;
+ GstRTSPVersion versions[] =
+ { GST_RTSP_VERSION_2_0, GST_RTSP_VERSION_INVALID };
+
+ src->version = src->default_version;
+ if (src->default_version == GST_RTSP_VERSION_2_0) {
+ versions[0] = GST_RTSP_VERSION_1_0;
+ }
+
+restart:
+ src->need_redirect = FALSE;
+
+ /* can't continue without a valid url */
+ if (G_UNLIKELY (src->conninfo.url == NULL)) {
+ res = GST_RTSP_EINVAL;
+ goto no_url;
+ }
+ src->tried_url_auth = FALSE;
+
+ if ((res = gst_rtsp_conninfo_connect (src, &src->conninfo, async)) < 0)
+ goto connect_failed;
+
+ /* create OPTIONS */
+ GST_DEBUG_OBJECT (src, "create options... (%s)", async ? "async" : "sync");
+ res =
+ gst_rtspsrc_init_request (src, &request, GST_RTSP_OPTIONS,
+ src->conninfo.url_str);
+ if (res < 0)
+ goto create_request_failed;
+
+ /* send OPTIONS */
+ request.type_data.request.version = src->version;
+ GST_DEBUG_OBJECT (src, "send options...");
+
+ if (async)
+ GST_ELEMENT_PROGRESS (src, CONTINUE, "open", ("Retrieving server options"));
+
+ if ((res =
+ gst_rtspsrc_send (src, &src->conninfo, &request, &response,
+ NULL, versions)) < 0) {
+ goto send_error;
+ }
+
+ src->version = request.type_data.request.version;
+ GST_INFO_OBJECT (src, "Now using version: %s",
+ gst_rtsp_version_as_text (src->version));
+
+ /* parse OPTIONS */
+ if (!gst_rtspsrc_parse_methods (src, &response))
+ goto methods_error;
+
+ /* create DESCRIBE */
+ GST_DEBUG_OBJECT (src, "create describe...");
+ res =
+ gst_rtspsrc_init_request (src, &request, GST_RTSP_DESCRIBE,
+ src->conninfo.url_str);
+ if (res < 0)
+ goto create_request_failed;
+
+ /* we only accept SDP for now */
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_ACCEPT,
+ "application/sdp");
+
+ if (src->backchannel == BACKCHANNEL_ONVIF)
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_REQUIRE,
+ BACKCHANNEL_ONVIF_HDR_REQUIRE_VAL);
+ /* TODO: Handle the case when backchannel is unsupported and goto restart */
+
+ /* send DESCRIBE */
+ GST_DEBUG_OBJECT (src, "send describe...");
+
+ if (async)
+ GST_ELEMENT_PROGRESS (src, CONTINUE, "open", ("Retrieving media info"));
+
+ if ((res =
+ gst_rtspsrc_send (src, &src->conninfo, &request, &response,
+ NULL, NULL)) < 0)
+ goto send_error;
+
+ /* we only perform redirect for describe and play, currently */
+ if (src->need_redirect) {
+ /* close connection, we don't have to send a TEARDOWN yet, ignore the
+ * result. */
+ gst_rtsp_conninfo_close (src, &src->conninfo, TRUE);
+
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+
+ /* and now retry */
+ goto restart;
+ }
+
+ /* it could be that the DESCRIBE method was not implemented */
+ if (!(src->methods & GST_RTSP_DESCRIBE))
+ goto no_describe;
+
+ /* check if reply is SDP */
+ gst_rtsp_message_get_header (&response, GST_RTSP_HDR_CONTENT_TYPE, &respcont,
+ 0);
+ /* could not be set but since the request returned OK, we assume it
+ * was SDP, else check it. */
+ if (respcont) {
+ const gchar *props = strchr (respcont, ';');
+
+ if (props) {
+ gchar *mimetype = g_strndup (respcont, props - respcont);
+
+ mimetype = g_strstrip (mimetype);
+ if (g_ascii_strcasecmp (mimetype, "application/sdp") != 0) {
+ g_free (mimetype);
+ goto wrong_content_type;
+ }
+
+ /* TODO: Check for charset property and do conversions of all messages if
+ * needed. Some servers actually send that property */
+
+ g_free (mimetype);
+ } else if (g_ascii_strcasecmp (respcont, "application/sdp") != 0) {
+ goto wrong_content_type;
+ }
+ }
+
+ /* get message body and parse as SDP */
+ gst_rtsp_message_get_body (&response, &data, &size);
+ if (data == NULL || size == 0)
+ goto no_describe;
+
+ GST_DEBUG_OBJECT (src, "parse SDP...");
+ gst_sdp_message_new (sdp);
+ gst_sdp_message_parse_buffer (data, size, *sdp);
+
+ /* clean up any messages */
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+
+ return res;
+
+ /* ERRORS */
+no_url:
+ {
+ GST_ELEMENT_ERROR (src, RESOURCE, NOT_FOUND, (NULL),
+ ("No valid RTSP URL was provided"));
+ goto cleanup_error;
+ }
+connect_failed:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ if (res != GST_RTSP_EINTR) {
+ GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ_WRITE, (NULL),
+ ("Failed to connect. (%s)", str));
+ } else {
+ GST_WARNING_OBJECT (src, "connect interrupted");
+ }
+ g_free (str);
+ goto cleanup_error;
+ }
+create_request_failed:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
+ ("Could not create request. (%s)", str));
+ g_free (str);
+ goto cleanup_error;
+ }
+send_error:
+ {
+ /* Don't post a message - the rtsp_send method will have
+ * taken care of it because we passed NULL for the response code */
+ goto cleanup_error;
+ }
+methods_error:
+ {
+ /* error was posted */
+ res = GST_RTSP_ERROR;
+ goto cleanup_error;
+ }
+wrong_content_type:
+ {
+ GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
+ ("Server does not support SDP, got %s.", respcont));
+ res = GST_RTSP_ERROR;
+ goto cleanup_error;
+ }
+no_describe:
+ {
+ GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
+ ("Server can not provide an SDP."));
+ res = GST_RTSP_ERROR;
+ goto cleanup_error;
+ }
+cleanup_error:
+ {
+ if (src->conninfo.connection) {
+ GST_DEBUG_OBJECT (src, "free connection");
+ gst_rtsp_conninfo_close (src, &src->conninfo, TRUE);
+ }
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+ return res;
+ }
+}
+
+static GstRTSPResult
+gst_rtspsrc_open (GstRTSPSrc * src, gboolean async)
+{
+ GstRTSPResult ret;
+
+ src->methods =
+ GST_RTSP_SETUP | GST_RTSP_PLAY | GST_RTSP_PAUSE | GST_RTSP_TEARDOWN;
+
+ if (src->sdp == NULL) {
+ if ((ret = gst_rtspsrc_retrieve_sdp (src, &src->sdp, async)) < 0)
+ goto no_sdp;
+ }
+
+ if ((ret = gst_rtspsrc_open_from_sdp (src, src->sdp, async)) < 0)
+ goto open_failed;
+
+ if (src->initial_seek) {
+ if (!gst_rtspsrc_perform_seek (src, src->initial_seek))
+ goto initial_seek_failed;
+ gst_event_replace (&src->initial_seek, NULL);
+ }
+
+done:
+ if (async)
+ gst_rtspsrc_loop_end_cmd (src, CMD_OPEN, ret);
+
+ return ret;
+
+ /* ERRORS */
+no_sdp:
+ {
+ GST_WARNING_OBJECT (src, "can't get sdp");
+ src->open_error = TRUE;
+ goto done;
+ }
+open_failed:
+ {
+ GST_WARNING_OBJECT (src, "can't setup streaming from sdp");
+ src->open_error = TRUE;
+ goto done;
+ }
+initial_seek_failed:
+ {
+ GST_WARNING_OBJECT (src, "Failed to perform initial seek");
+ ret = GST_RTSP_ERROR;
+ src->open_error = TRUE;
+ goto done;
+ }
+}
+
+static GstRTSPResult
+gst_rtspsrc_close (GstRTSPSrc * src, gboolean async, gboolean only_close)
+{
+ GstRTSPMessage request = { 0 };
+ GstRTSPMessage response = { 0 };
+ GstRTSPResult res = GST_RTSP_OK;
+ GList *walk;
+ const gchar *control;
+
+ GST_DEBUG_OBJECT (src, "TEARDOWN...");
+
+ gst_rtspsrc_set_state (src, GST_STATE_READY);
+
+ if (src->state < GST_RTSP_STATE_READY) {
+ GST_DEBUG_OBJECT (src, "not ready, doing cleanup");
+ goto close;
+ }
+
+ if (only_close)
+ goto close;
+
+ /* construct a control url */
+ control = get_aggregate_control (src);
+
+ if (!(src->methods & (GST_RTSP_PLAY | GST_RTSP_TEARDOWN)))
+ goto not_supported;
+
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ const gchar *setup_url;
+ GstRTSPConnInfo *info;
+
+ /* try aggregate control first but do non-aggregate control otherwise */
+ if (control)
+ setup_url = control;
+ else if ((setup_url = stream->conninfo.location) == NULL)
+ continue;
+
+ if (src->conninfo.connection) {
+ info = &src->conninfo;
+ } else if (stream->conninfo.connection) {
+ info = &stream->conninfo;
+ } else {
+ continue;
+ }
+ if (!info->connected)
+ goto next;
+
+ /* do TEARDOWN */
+ res =
+ gst_rtspsrc_init_request (src, &request, GST_RTSP_TEARDOWN, setup_url);
+ GST_LOG_OBJECT (src, "Teardown on %s", setup_url);
+ if (res < 0)
+ goto create_request_failed;
+
+ if (stream->is_backchannel && src->backchannel == BACKCHANNEL_ONVIF)
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_REQUIRE,
+ BACKCHANNEL_ONVIF_HDR_REQUIRE_VAL);
+
+ if (async)
+ GST_ELEMENT_PROGRESS (src, CONTINUE, "close", ("Closing stream"));
+
+ if ((res =
+ gst_rtspsrc_send (src, info, &request, &response, NULL, NULL)) < 0)
+ goto send_error;
+
+ /* FIXME, parse result? */
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+
+ next:
+ /* early exit when we did aggregate control */
+ if (control)
+ break;
+ }
+
+close:
+ /* close connections */
+ GST_DEBUG_OBJECT (src, "closing connection...");
+ gst_rtsp_conninfo_close (src, &src->conninfo, TRUE);
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ gst_rtsp_conninfo_close (src, &stream->conninfo, TRUE);
+ }
+
+ /* cleanup */
+ gst_rtspsrc_cleanup (src);
+
+ src->state = GST_RTSP_STATE_INVALID;
+
+ if (async)
+ gst_rtspsrc_loop_end_cmd (src, CMD_CLOSE, res);
+
+ return res;
+
+ /* ERRORS */
+create_request_failed:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
+ ("Could not create request. (%s)", str));
+ g_free (str);
+ goto close;
+ }
+send_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ gst_rtsp_message_unset (&request);
+ if (res != GST_RTSP_EINTR) {
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Could not send message. (%s)", str));
+ } else {
+ GST_WARNING_OBJECT (src, "TEARDOWN interrupted");
+ }
+ g_free (str);
+ goto close;
+ }
+not_supported:
+ {
+ GST_DEBUG_OBJECT (src,
+ "TEARDOWN and PLAY not supported, can't do TEARDOWN");
+ goto close;
+ }
+}
+
+/* RTP-Info is of the format:
+ *
+ * url=<URL>;[seq=<seqbase>;rtptime=<timebase>] [, url=...]
+ *
+ * rtptime corresponds to the timestamp for the NPT time given in the header
+ * seqbase corresponds to the next sequence number we received. This number
+ * indicates the first seqnum after the seek and should be used to discard
+ * packets that are from before the seek.
+ */
+static gboolean
+gst_rtspsrc_parse_rtpinfo (GstRTSPSrc * src, gchar * rtpinfo)
+{
+ gchar **infos;
+ gint i, j;
+
+ GST_DEBUG_OBJECT (src, "parsing RTP-Info %s", rtpinfo);
+
+ infos = g_strsplit (rtpinfo, ",", 0);
+ for (i = 0; infos[i]; i++) {
+ gchar **fields;
+ GstRTSPStream *stream;
+ gint32 seqbase;
+ gint64 timebase;
+
+ GST_DEBUG_OBJECT (src, "parsing info %s", infos[i]);
+
+ /* init values, types of seqbase and timebase are bigger than needed so we
+ * can store -1 as uninitialized values */
+ stream = NULL;
+ seqbase = -1;
+ timebase = -1;
+
+ /* parse url, find stream for url.
+ * parse seq and rtptime. The seq number should be configured in the rtp
+ * depayloader or session manager to detect gaps. Same for the rtptime, it
+ * should be used to create an initial time newsegment. */
+ fields = g_strsplit (infos[i], ";", 0);
+ for (j = 0; fields[j]; j++) {
+ GST_DEBUG_OBJECT (src, "parsing field %s", fields[j]);
+ /* remove leading whitespace */
+ fields[j] = g_strchug (fields[j]);
+ if (g_str_has_prefix (fields[j], "url=")) {
+ /* get the url and the stream */
+ stream =
+ find_stream (src, (fields[j] + 4), (gpointer) find_stream_by_setup);
+ } else if (g_str_has_prefix (fields[j], "seq=")) {
+ seqbase = atoi (fields[j] + 4);
+ } else if (g_str_has_prefix (fields[j], "rtptime=")) {
+ timebase = g_ascii_strtoll (fields[j] + 8, NULL, 10);
+ }
+ }
+ g_strfreev (fields);
+ /* now we need to store the values for the caps of the stream */
+ if (stream != NULL) {
+ GST_DEBUG_OBJECT (src,
+ "found stream %p, setting: seqbase %d, timebase %" G_GINT64_FORMAT,
+ stream, seqbase, timebase);
+
+ /* we have a stream, configure detected params */
+ stream->seqbase = seqbase;
+ stream->timebase = timebase;
+ }
+ }
+ g_strfreev (infos);
+
+ return TRUE;
+}
+
+static void
+gst_rtspsrc_handle_rtcp_interval (GstRTSPSrc * src, gchar * rtcp)
+{
+ guint64 interval;
+ GList *walk;
+
+ interval = strtoul (rtcp, NULL, 10);
+ GST_DEBUG_OBJECT (src, "rtcp interval: %" G_GUINT64_FORMAT " ms", interval);
+
+ if (!interval)
+ return;
+
+ interval *= GST_MSECOND;
+
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+
+ /* already (optionally) retrieved this when configuring manager */
+ if (stream->session) {
+ GObject *rtpsession = stream->session;
+
+ GST_DEBUG_OBJECT (src, "configure rtcp interval in session %p",
+ rtpsession);
+ g_object_set (rtpsession, "rtcp-min-interval", interval, NULL);
+ }
+ }
+
+ /* now it happens that (Xenon) server sending this may also provide bogus
+ * RTCP SR sync data (i.e. with quite some jitter), so never mind those
+ * and just use RTP-Info to sync */
+ if (src->manager) {
+ GObjectClass *klass;
+
+ klass = G_OBJECT_GET_CLASS (G_OBJECT (src->manager));
+ if (g_object_class_find_property (klass, "rtcp-sync")) {
+ GST_DEBUG_OBJECT (src, "configuring rtp sync method");
+ g_object_set (src->manager, "rtcp-sync", RTCP_SYNC_RTP, NULL);
+ }
+ }
+}
+
+static gdouble
+gst_rtspsrc_get_float (const gchar * dstr)
+{
+ gchar s[G_ASCII_DTOSTR_BUF_SIZE] = { 0, };
+
+ /* canonicalise floating point string so we can handle float strings
+ * in the form "24.930" or "24,930" irrespective of the current locale */
+ g_strlcpy (s, dstr, sizeof (s));
+ g_strdelimit (s, ",", '.');
+ return g_ascii_strtod (s, NULL);
+}
+
+static gchar *
+gen_range_header (GstRTSPSrc * src, GstSegment * segment)
+{
+ GstRTSPTimeRange range = { 0, };
+ gdouble begin_seconds, end_seconds;
+
+ if (segment->rate > 0) {
+ begin_seconds = (gdouble) segment->start / GST_SECOND;
+ end_seconds = (gdouble) segment->stop / GST_SECOND;
+ } else {
+ begin_seconds = (gdouble) segment->stop / GST_SECOND;
+ end_seconds = (gdouble) segment->start / GST_SECOND;
+ }
+
+ if (src->onvif_mode) {
+ GDateTime *prime_epoch, *datetime;
+
+ range.unit = GST_RTSP_RANGE_CLOCK;
+
+ prime_epoch = g_date_time_new_utc (1900, 1, 1, 0, 0, 0);
+
+ datetime = g_date_time_add_seconds (prime_epoch, begin_seconds);
+
+ range.min.type = GST_RTSP_TIME_UTC;
+ range.min2.year = g_date_time_get_year (datetime);
+ range.min2.month = g_date_time_get_month (datetime);
+ range.min2.day = g_date_time_get_day_of_month (datetime);
+ range.min.seconds =
+ g_date_time_get_seconds (datetime) +
+ g_date_time_get_minute (datetime) * 60 +
+ g_date_time_get_hour (datetime) * 60 * 60;
+
+ g_date_time_unref (datetime);
+
+ datetime = g_date_time_add_seconds (prime_epoch, end_seconds);
+
+ range.max.type = GST_RTSP_TIME_UTC;
+ range.max2.year = g_date_time_get_year (datetime);
+ range.max2.month = g_date_time_get_month (datetime);
+ range.max2.day = g_date_time_get_day_of_month (datetime);
+ range.max.seconds =
+ g_date_time_get_seconds (datetime) +
+ g_date_time_get_minute (datetime) * 60 +
+ g_date_time_get_hour (datetime) * 60 * 60;
+
+ g_date_time_unref (datetime);
+ g_date_time_unref (prime_epoch);
+ } else {
+ range.unit = GST_RTSP_RANGE_NPT;
+
+ if (src->range && src->range->min.type == GST_RTSP_TIME_NOW) {
+ range.min.type = GST_RTSP_TIME_NOW;
+ } else {
+ range.min.type = GST_RTSP_TIME_SECONDS;
+ range.min.seconds = begin_seconds;
+ }
+
+ if (src->range && src->range->max.type == GST_RTSP_TIME_END) {
+ range.max.type = GST_RTSP_TIME_END;
+ } else {
+ range.max.type = GST_RTSP_TIME_SECONDS;
+ range.max.seconds = end_seconds;
+ }
+ }
+
+ /* Don't set end bounds when not required to */
+ if (!GST_CLOCK_TIME_IS_VALID (segment->stop)) {
+ if (segment->rate > 0)
+ range.max.type = GST_RTSP_TIME_END;
+ else
+ range.min.type = GST_RTSP_TIME_END;
+ }
+
+ return gst_rtsp_range_to_string (&range);
+}
+
+static void
+clear_rtp_base (GstRTSPSrc * src, GstRTSPStream * stream)
+{
+ guint i, len;
+
+ stream->timebase = -1;
+ stream->seqbase = -1;
+
+ len = stream->ptmap->len;
+ for (i = 0; i < len; i++) {
+ PtMapItem *item = &g_array_index (stream->ptmap, PtMapItem, i);
+ GstStructure *s;
+
+ if (item->caps == NULL)
+ continue;
+
+ item->caps = gst_caps_make_writable (item->caps);
+ s = gst_caps_get_structure (item->caps, 0);
+ gst_structure_remove_fields (s, "clock-base", "seqnum-base", NULL);
+ if (item->pt == stream->default_pt && stream->udpsrc[0])
+ g_object_set (stream->udpsrc[0], "caps", item->caps, NULL);
+ }
+ stream->need_caps = TRUE;
+}
+
+static GstRTSPResult
+gst_rtspsrc_ensure_open (GstRTSPSrc * src, gboolean async)
+{
+ GstRTSPResult res = GST_RTSP_OK;
+
+ if (src->state < GST_RTSP_STATE_READY) {
+ res = GST_RTSP_ERROR;
+ if (src->open_error) {
+ GST_DEBUG_OBJECT (src, "the stream was in error");
+ goto done;
+ }
+ if (async)
+ gst_rtspsrc_loop_start_cmd (src, CMD_OPEN);
+
+ if ((res = gst_rtspsrc_open (src, async)) < 0) {
+ GST_DEBUG_OBJECT (src, "failed to open stream");
+ goto done;
+ }
+ }
+
+done:
+ return res;
+}
+
+static GstRTSPResult
+gst_rtspsrc_play (GstRTSPSrc * src, GstSegment * segment, gboolean async,
+ const gchar * seek_style)
+{
+ GstRTSPMessage request = { 0 };
+ GstRTSPMessage response = { 0 };
+ GstRTSPResult res = GST_RTSP_OK;
+ GList *walk;
+ gchar *hval;
+ gint hval_idx;
+ const gchar *control;
+ GstSegment requested;
+
+ GST_DEBUG_OBJECT (src, "PLAY...");
+
+restart:
+ if ((res = gst_rtspsrc_ensure_open (src, async)) < 0)
+ goto open_failed;
+
+ if (!(src->methods & GST_RTSP_PLAY))
+ goto not_supported;
+
+ if (src->state == GST_RTSP_STATE_PLAYING)
+ goto was_playing;
+
+ if (!src->conninfo.connection || !src->conninfo.connected)
+ goto done;
+
+ requested = *segment;
+
+ /* send some dummy packets before we activate the receive in the
+ * udp sources */
+ gst_rtspsrc_send_dummy_packets (src);
+
+ /* require new SR packets */
+ if (src->manager)
+ g_signal_emit_by_name (src->manager, "reset-sync", NULL);
+
+ /* construct a control url */
+ control = get_aggregate_control (src);
+
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ const gchar *setup_url;
+ GstRTSPConnInfo *conninfo;
+
+ /* try aggregate control first but do non-aggregate control otherwise */
+ if (control)
+ setup_url = control;
+ else if ((setup_url = stream->conninfo.location) == NULL)
+ continue;
+
+ if (src->conninfo.connection) {
+ conninfo = &src->conninfo;
+ } else if (stream->conninfo.connection) {
+ conninfo = &stream->conninfo;
+ } else {
+ continue;
+ }
+
+ /* do play */
+ res = gst_rtspsrc_init_request (src, &request, GST_RTSP_PLAY, setup_url);
+ if (res < 0)
+ goto create_request_failed;
+
+ if (src->need_range && src->seekable >= 0.0) {
+ hval = gen_range_header (src, segment);
+
+ gst_rtsp_message_take_header (&request, GST_RTSP_HDR_RANGE, hval);
+
+ /* store the newsegment event so it can be sent from the streaming thread. */
+ src->need_segment = TRUE;
+ }
+
+ if (segment->rate != 1.0) {
+ gchar scale_val[G_ASCII_DTOSTR_BUF_SIZE];
+ gchar speed_val[G_ASCII_DTOSTR_BUF_SIZE];
+
+ if (src->server_side_trickmode) {
+ g_ascii_dtostr (scale_val, sizeof (scale_val), segment->rate);
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_SCALE, scale_val);
+ } else if (segment->rate < 0.0) {
+ g_ascii_dtostr (scale_val, sizeof (scale_val), -1.0);
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_SCALE, scale_val);
+
+ if (ABS (segment->rate) != 1.0) {
+ g_ascii_dtostr (speed_val, sizeof (speed_val), ABS (segment->rate));
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_SPEED, speed_val);
+ }
+ } else {
+ g_ascii_dtostr (speed_val, sizeof (speed_val), segment->rate);
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_SPEED, speed_val);
+ }
+ }
+
+ if (src->onvif_mode) {
+ if (segment->flags & GST_SEEK_FLAG_TRICKMODE_KEY_UNITS) {
+ gchar *hval;
+
+ if (src->trickmode_interval)
+ hval =
+ g_strdup_printf ("intra/%" G_GUINT64_FORMAT,
+ src->trickmode_interval / GST_MSECOND);
+ else
+ hval = g_strdup ("intra");
+
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_FRAMES, hval);
+
+ g_free (hval);
+ } else if (segment->flags & GST_SEEK_FLAG_TRICKMODE_FORWARD_PREDICTED) {
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_FRAMES,
+ "predicted");
+ }
+ }
+
+ if (seek_style)
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_SEEK_STYLE,
+ seek_style);
+
+ /* when we have an ONVIF audio backchannel, the PLAY request must have the
+ * Require: header when doing either aggregate or non-aggregate control */
+ if (src->backchannel == BACKCHANNEL_ONVIF &&
+ (control || stream->is_backchannel))
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_REQUIRE,
+ BACKCHANNEL_ONVIF_HDR_REQUIRE_VAL);
+
+ if (src->onvif_mode) {
+ if (src->onvif_rate_control)
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_RATE_CONTROL,
+ "yes");
+ else
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_RATE_CONTROL, "no");
+ }
+
+ if (async)
+ GST_ELEMENT_PROGRESS (src, CONTINUE, "request", ("Sending PLAY request"));
+
+ if ((res =
+ gst_rtspsrc_send (src, conninfo, &request, &response, NULL, NULL))
+ < 0)
+ goto send_error;
+
+ if (src->need_redirect) {
+ GST_DEBUG_OBJECT (src,
+ "redirect: tearing down and restarting with new url");
+ /* teardown and restart with new url */
+ gst_rtspsrc_close (src, TRUE, FALSE);
+ /* reset protocols to force re-negotiation with redirected url */
+ src->cur_protocols = src->protocols;
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+ goto restart;
+ }
+
+ /* seek may have silently failed as it is not supported */
+ if (!(src->methods & GST_RTSP_PLAY)) {
+ GST_DEBUG_OBJECT (src, "PLAY Range not supported; re-enable PLAY");
+
+ if (src->version >= GST_RTSP_VERSION_2_0 && src->seekable >= 0.0) {
+ GST_WARNING_OBJECT (src, "Server declared stream as seekable but"
+ " playing with range failed... Ignoring information.");
+ }
+ /* obviously it is supported as we made it here */
+ src->methods |= GST_RTSP_PLAY;
+ src->seekable = -1.0;
+ /* but there is nothing to parse in the response,
+ * so convey we have no idea and not to expect anything particular */
+ clear_rtp_base (src, stream);
+ if (control) {
+ GList *run;
+
+ /* need to do for all streams */
+ for (run = src->streams; run; run = g_list_next (run))
+ clear_rtp_base (src, (GstRTSPStream *) run->data);
+ }
+ /* NOTE the above also disables npt based eos detection */
+ /* and below forces position to 0,
+ * which is visible feedback we lost the plot */
+ segment->start = segment->position = src->last_pos;
+ }
+
+ gst_rtsp_message_unset (&request);
+
+ /* parse RTP npt field. This is the current position in the stream (Normal
+ * Play Time) and should be put in the NEWSEGMENT position field. */
+ if (gst_rtsp_message_get_header (&response, GST_RTSP_HDR_RANGE, &hval,
+ 0) == GST_RTSP_OK)
+ gst_rtspsrc_parse_range (src, hval, segment, FALSE);
+
+ /* assume 1.0 rate now, overwrite when the SCALE or SPEED headers are present. */
+ segment->rate = 1.0;
+
+ /* parse Speed header. This is the intended playback rate of the stream
+ * and should be put in the NEWSEGMENT rate field. */
+ if (gst_rtsp_message_get_header (&response, GST_RTSP_HDR_SPEED, &hval,
+ 0) == GST_RTSP_OK) {
+ segment->rate = gst_rtspsrc_get_float (hval);
+ } else if (gst_rtsp_message_get_header (&response, GST_RTSP_HDR_SCALE,
+ &hval, 0) == GST_RTSP_OK) {
+ segment->rate = gst_rtspsrc_get_float (hval);
+ }
+
+ /* parse the RTP-Info header field (if ANY) to get the base seqnum and timestamp
+ * for the RTP packets. If this is not present, we assume all starts from 0...
+ * This is info for the RTP session manager that we pass to it in caps. */
+ hval_idx = 0;
+ while (gst_rtsp_message_get_header (&response, GST_RTSP_HDR_RTP_INFO,
+ &hval, hval_idx++) == GST_RTSP_OK)
+ gst_rtspsrc_parse_rtpinfo (src, hval);
+
+ /* some servers indicate RTCP parameters in PLAY response,
+ * rather than properly in SDP */
+ if (gst_rtsp_message_get_header (&response, GST_RTSP_HDR_RTCP_INTERVAL,
+ &hval, 0) == GST_RTSP_OK)
+ gst_rtspsrc_handle_rtcp_interval (src, hval);
+
+ gst_rtsp_message_unset (&response);
+
+ /* early exit when we did aggregate control */
+ if (control)
+ break;
+ }
+
+ src->out_segment = *segment;
+
+ if (src->clip_out_segment) {
+ /* Only clip the output segment when the server has answered with valid
+ * values, we cannot know otherwise whether the requested bounds were
+ * available */
+ if (GST_CLOCK_TIME_IS_VALID (src->segment.start) &&
+ GST_CLOCK_TIME_IS_VALID (requested.start))
+ src->out_segment.start = MAX (src->out_segment.start, requested.start);
+ if (GST_CLOCK_TIME_IS_VALID (src->segment.stop) &&
+ GST_CLOCK_TIME_IS_VALID (requested.stop))
+ src->out_segment.stop = MIN (src->out_segment.stop, requested.stop);
+ }
+
+ /* configure the caps of the streams after we parsed all headers. Only reset
+ * the manager object when we set a new Range header (we did a seek) */
+ gst_rtspsrc_configure_caps (src, segment, src->need_range);
+
+ /* set to PLAYING after we have configured the caps, otherwise we
+ * might end up calling request_key (with SRTP) while caps are still
+ * being configured. */
+ gst_rtspsrc_set_state (src, GST_STATE_PLAYING);
+
+ /* set again when needed */
+ src->need_range = FALSE;
+
+ src->running = TRUE;
+ src->base_time = -1;
+ src->state = GST_RTSP_STATE_PLAYING;
+
+ /* mark discont */
+ GST_DEBUG_OBJECT (src, "mark DISCONT, we did a seek to another position");
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ stream->discont = TRUE;
+ }
+
+done:
+ if (async)
+ gst_rtspsrc_loop_end_cmd (src, CMD_PLAY, res);
+
+ return res;
+
+ /* ERRORS */
+open_failed:
+ {
+ GST_WARNING_OBJECT (src, "failed to open stream");
+ goto done;
+ }
+not_supported:
+ {
+ GST_WARNING_OBJECT (src, "PLAY is not supported");
+ goto done;
+ }
+was_playing:
+ {
+ GST_WARNING_OBJECT (src, "we were already PLAYING");
+ goto done;
+ }
+create_request_failed:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
+ ("Could not create request. (%s)", str));
+ g_free (str);
+ goto done;
+ }
+send_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ gst_rtsp_message_unset (&request);
+ if (res != GST_RTSP_EINTR) {
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Could not send message. (%s)", str));
+ } else {
+ GST_WARNING_OBJECT (src, "PLAY interrupted");
+ }
+ g_free (str);
+ goto done;
+ }
+}
+
+static GstRTSPResult
+gst_rtspsrc_pause (GstRTSPSrc * src, gboolean async)
+{
+ GstRTSPResult res = GST_RTSP_OK;
+ GstRTSPMessage request = { 0 };
+ GstRTSPMessage response = { 0 };
+ GList *walk;
+ const gchar *control;
+
+ GST_DEBUG_OBJECT (src, "PAUSE...");
+
+ if ((res = gst_rtspsrc_ensure_open (src, async)) < 0)
+ goto open_failed;
+
+ if (!(src->methods & GST_RTSP_PAUSE))
+ goto not_supported;
+
+ if (src->state == GST_RTSP_STATE_READY)
+ goto was_paused;
+
+ if (!src->conninfo.connection || !src->conninfo.connected)
+ goto no_connection;
+
+ /* construct a control url */
+ control = get_aggregate_control (src);
+
+ /* loop over the streams. We might exit the loop early when we could do an
+ * aggregate control */
+ for (walk = src->streams; walk; walk = g_list_next (walk)) {
+ GstRTSPStream *stream = (GstRTSPStream *) walk->data;
+ GstRTSPConnInfo *conninfo;
+ const gchar *setup_url;
+
+ /* try aggregate control first but do non-aggregate control otherwise */
+ if (control)
+ setup_url = control;
+ else if ((setup_url = stream->conninfo.location) == NULL)
+ continue;
+
+ if (src->conninfo.connection) {
+ conninfo = &src->conninfo;
+ } else if (stream->conninfo.connection) {
+ conninfo = &stream->conninfo;
+ } else {
+ continue;
+ }
+
+ if (async)
+ GST_ELEMENT_PROGRESS (src, CONTINUE, "request",
+ ("Sending PAUSE request"));
+
+ if ((res =
+ gst_rtspsrc_init_request (src, &request, GST_RTSP_PAUSE,
+ setup_url)) < 0)
+ goto create_request_failed;
+
+ /* when we have an ONVIF audio backchannel, the PAUSE request must have the
+ * Require: header when doing either aggregate or non-aggregate control */
+ if (src->backchannel == BACKCHANNEL_ONVIF &&
+ (control || stream->is_backchannel))
+ gst_rtsp_message_add_header (&request, GST_RTSP_HDR_REQUIRE,
+ BACKCHANNEL_ONVIF_HDR_REQUIRE_VAL);
+
+ if ((res =
+ gst_rtspsrc_send (src, conninfo, &request, &response, NULL,
+ NULL)) < 0)
+ goto send_error;
+
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+
+ /* exit early when we did aggregate control */
+ if (control)
+ break;
+ }
+
+ /* change element states now */
+ gst_rtspsrc_set_state (src, GST_STATE_PAUSED);
+
+no_connection:
+ src->state = GST_RTSP_STATE_READY;
+
+done:
+ if (async)
+ gst_rtspsrc_loop_end_cmd (src, CMD_PAUSE, res);
+
+ return res;
+
+ /* ERRORS */
+open_failed:
+ {
+ GST_DEBUG_OBJECT (src, "failed to open stream");
+ goto done;
+ }
+not_supported:
+ {
+ GST_DEBUG_OBJECT (src, "PAUSE is not supported");
+ goto done;
+ }
+was_paused:
+ {
+ GST_DEBUG_OBJECT (src, "we were already PAUSED");
+ goto done;
+ }
+create_request_failed:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ GST_ELEMENT_ERROR (src, LIBRARY, INIT, (NULL),
+ ("Could not create request. (%s)", str));
+ g_free (str);
+ goto done;
+ }
+send_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ gst_rtsp_message_unset (&request);
+ if (res != GST_RTSP_EINTR) {
+ GST_ELEMENT_ERROR (src, RESOURCE, WRITE, (NULL),
+ ("Could not send message. (%s)", str));
+ } else {
+ GST_WARNING_OBJECT (src, "PAUSE interrupted");
+ }
+ g_free (str);
+ goto done;
+ }
+}
+
+static void
+gst_rtspsrc_handle_message (GstBin * bin, GstMessage * message)
+{
+ GstRTSPSrc *rtspsrc;
+
+ rtspsrc = GST_RTSPSRC (bin);
+
+ switch (GST_MESSAGE_TYPE (message)) {
+ case GST_MESSAGE_STREAM_START:
+ case GST_MESSAGE_EOS:
+ gst_message_unref (message);
+ break;
+ case GST_MESSAGE_ELEMENT:
+ {
+ const GstStructure *s = gst_message_get_structure (message);
+
+ if (gst_structure_has_name (s, "GstUDPSrcTimeout")) {
+ gboolean ignore_timeout;
+
+ GST_DEBUG_OBJECT (bin, "timeout on UDP port");
+
+ GST_OBJECT_LOCK (rtspsrc);
+ ignore_timeout = rtspsrc->ignore_timeout;
+ rtspsrc->ignore_timeout = TRUE;
+ GST_OBJECT_UNLOCK (rtspsrc);
+
+ /* we only act on the first udp timeout message, others are irrelevant
+ * and can be ignored. */
+ if (!ignore_timeout)
+ gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_RECONNECT, CMD_LOOP);
+ /* eat and free */
+ gst_message_unref (message);
+ return;
+ }
+ GST_BIN_CLASS (parent_class)->handle_message (bin, message);
+ break;
+ }
+ case GST_MESSAGE_ERROR:
+ {
+ GstObject *udpsrc;
+ GstRTSPStream *stream;
+ GstFlowReturn ret;
+
+ udpsrc = GST_MESSAGE_SRC (message);
+
+ GST_DEBUG_OBJECT (rtspsrc, "got error from %s",
+ GST_ELEMENT_NAME (udpsrc));
+
+ stream = find_stream (rtspsrc, udpsrc, (gpointer) find_stream_by_udpsrc);
+ if (!stream)
+ goto forward;
+
+ /* we ignore the RTCP udpsrc */
+ if (stream->udpsrc[1] == GST_ELEMENT_CAST (udpsrc))
+ goto done;
+
+ /* if we get error messages from the udp sources, that's not a problem as
+ * long as not all of them error out. We also don't really know what the
+ * problem is, the message does not give enough detail... */
+ ret = gst_rtspsrc_combine_flows (rtspsrc, stream, GST_FLOW_NOT_LINKED);
+ GST_DEBUG_OBJECT (rtspsrc, "combined flows: %s", gst_flow_get_name (ret));
+ if (ret != GST_FLOW_OK)
+ goto forward;
+
+ done:
+ gst_message_unref (message);
+ break;
+
+ forward:
+ /* fatal but not our message, forward */
+ GST_BIN_CLASS (parent_class)->handle_message (bin, message);
+ break;
+ }
+ default:
+ {
+ GST_BIN_CLASS (parent_class)->handle_message (bin, message);
+ break;
+ }
+ }
+}
+
+/* the thread where everything happens */
+static void
+gst_rtspsrc_thread (GstRTSPSrc * src)
+{
+ gint cmd;
+ ParameterRequest *req = NULL;
+
+ GST_OBJECT_LOCK (src);
+ cmd = src->pending_cmd;
+ if (cmd == CMD_RECONNECT || cmd == CMD_PLAY || cmd == CMD_PAUSE
+ || cmd == CMD_LOOP || cmd == CMD_OPEN || cmd == CMD_GET_PARAMETER
+ || cmd == CMD_SET_PARAMETER) {
+ if (g_queue_is_empty (&src->set_get_param_q)) {
+ src->pending_cmd = CMD_LOOP;
+ } else {
+ ParameterRequest *next_req;
+ if (cmd == CMD_GET_PARAMETER || cmd == CMD_SET_PARAMETER) {
+ req = g_queue_pop_head (&src->set_get_param_q);
+ }
+ next_req = g_queue_peek_head (&src->set_get_param_q);
+ src->pending_cmd = next_req ? next_req->cmd : CMD_LOOP;
+ }
+ } else
+ src->pending_cmd = CMD_WAIT;
+ GST_DEBUG_OBJECT (src, "got command %s", cmd_to_string (cmd));
+
+ /* we got the message command, so ensure communication is possible again */
+ gst_rtspsrc_connection_flush (src, FALSE);
+
+ src->busy_cmd = cmd;
+ GST_OBJECT_UNLOCK (src);
+
+ switch (cmd) {
+ case CMD_OPEN:
+ gst_rtspsrc_open (src, TRUE);
+ break;
+ case CMD_PLAY:
+ gst_rtspsrc_play (src, &src->segment, TRUE, NULL);
+ break;
+ case CMD_PAUSE:
+ gst_rtspsrc_pause (src, TRUE);
+ break;
+ case CMD_CLOSE:
+ gst_rtspsrc_close (src, TRUE, FALSE);
+ break;
+ case CMD_GET_PARAMETER:
+ gst_rtspsrc_get_parameter (src, req);
+ break;
+ case CMD_SET_PARAMETER:
+ gst_rtspsrc_set_parameter (src, req);
+ break;
+ case CMD_LOOP:
+ gst_rtspsrc_loop (src);
+ break;
+ case CMD_RECONNECT:
+ gst_rtspsrc_reconnect (src, FALSE);
+ break;
+ default:
+ break;
+ }
+
+ GST_OBJECT_LOCK (src);
+ /* No more cmds, wake any waiters */
+ g_cond_broadcast (&src->cmd_cond);
+ /* and go back to sleep */
+ if (src->pending_cmd == CMD_WAIT) {
+ if (src->task)
+ gst_task_pause (src->task);
+ }
+ /* reset waiting */
+ src->busy_cmd = CMD_WAIT;
+ GST_OBJECT_UNLOCK (src);
+}
+
+static gboolean
+gst_rtspsrc_start (GstRTSPSrc * src)
+{
+ GST_DEBUG_OBJECT (src, "starting");
+
+ GST_OBJECT_LOCK (src);
+
+ src->pending_cmd = CMD_WAIT;
+
+ if (src->task == NULL) {
+ src->task = gst_task_new ((GstTaskFunction) gst_rtspsrc_thread, src, NULL);
+ if (src->task == NULL)
+ goto task_error;
+
+ gst_task_set_lock (src->task, GST_RTSP_STREAM_GET_LOCK (src));
+ }
+ GST_OBJECT_UNLOCK (src);
+
+ return TRUE;
+
+ /* ERRORS */
+task_error:
+ {
+ GST_OBJECT_UNLOCK (src);
+ GST_ERROR_OBJECT (src, "failed to create task");
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_rtspsrc_stop (GstRTSPSrc * src)
+{
+ GstTask *task;
+
+ GST_DEBUG_OBJECT (src, "stopping");
+
+ /* also cancels pending task */
+ gst_rtspsrc_loop_send_cmd (src, CMD_WAIT, CMD_ALL);
+
+ GST_OBJECT_LOCK (src);
+ if ((task = src->task)) {
+ src->task = NULL;
+ GST_OBJECT_UNLOCK (src);
+
+ gst_task_stop (task);
+
+ /* make sure it is not running */
+ GST_RTSP_STREAM_LOCK (src);
+ GST_RTSP_STREAM_UNLOCK (src);
+
+ /* now wait for the task to finish */
+ gst_task_join (task);
+
+ /* and free the task */
+ gst_object_unref (GST_OBJECT (task));
+
+ GST_OBJECT_LOCK (src);
+ }
+ GST_OBJECT_UNLOCK (src);
+
+ /* ensure synchronously all is closed and clean */
+ gst_rtspsrc_close (src, FALSE, TRUE);
+
+ return TRUE;
+}
+
+static GstStateChangeReturn
+gst_rtspsrc_change_state (GstElement * element, GstStateChange transition)
+{
+ GstRTSPSrc *rtspsrc;
+ GstStateChangeReturn ret;
+
+ rtspsrc = GST_RTSPSRC (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ if (!gst_rtspsrc_start (rtspsrc))
+ goto start_failed;
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ /* init some state */
+ rtspsrc->cur_protocols = rtspsrc->protocols;
+ /* first attempt, don't ignore timeouts */
+ rtspsrc->ignore_timeout = FALSE;
+ rtspsrc->open_error = FALSE;
+ if (rtspsrc->is_live)
+ gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_OPEN, 0);
+ else
+ gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_PLAY, 0);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ set_manager_buffer_mode (rtspsrc);
+ /* fall-through */
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ if (rtspsrc->is_live) {
+ /* unblock the tcp tasks and make the loop waiting */
+ if (gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_WAIT, CMD_LOOP)) {
+ /* make sure it is waiting before we send PAUSE or PLAY below */
+ GST_RTSP_STREAM_LOCK (rtspsrc);
+ GST_RTSP_STREAM_UNLOCK (rtspsrc);
+ }
+ }
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ rtspsrc->group_id = GST_GROUP_ID_INVALID;
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto done;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ ret = GST_STATE_CHANGE_SUCCESS;
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ if (rtspsrc->is_live)
+ ret = GST_STATE_CHANGE_NO_PREROLL;
+ else
+ ret = GST_STATE_CHANGE_SUCCESS;
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ if (rtspsrc->is_live)
+ gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_PLAY, 0);
+ ret = GST_STATE_CHANGE_SUCCESS;
+ break;
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ if (rtspsrc->is_live) {
+ /* send pause request and keep the idle task around */
+ gst_rtspsrc_loop_send_cmd (rtspsrc, CMD_PAUSE, CMD_LOOP);
+ }
+ ret = GST_STATE_CHANGE_SUCCESS;
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ rtspsrc->seek_seqnum = GST_SEQNUM_INVALID;
+ gst_rtspsrc_loop_send_cmd_and_wait (rtspsrc, CMD_CLOSE, CMD_ALL,
+ rtspsrc->teardown_timeout);
+ ret = GST_STATE_CHANGE_SUCCESS;
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ gst_rtspsrc_stop (rtspsrc);
+ ret = GST_STATE_CHANGE_SUCCESS;
+ break;
+ default:
+ /* Otherwise it's success, we don't want to return spurious
+ * NO_PREROLL or ASYNC from internal elements as we care for
+ * state changes ourselves here
+ *
+ * This is to catch PAUSED->PAUSED and PLAYING->PLAYING transitions.
+ */
+ if (GST_STATE_TRANSITION_NEXT (transition) == GST_STATE_PAUSED)
+ ret = GST_STATE_CHANGE_NO_PREROLL;
+ else
+ ret = GST_STATE_CHANGE_SUCCESS;
+ break;
+ }
+
+done:
+ return ret;
+
+start_failed:
+ {
+ GST_DEBUG_OBJECT (rtspsrc, "start failed");
+ return GST_STATE_CHANGE_FAILURE;
+ }
+}
+
+static gboolean
+gst_rtspsrc_send_event (GstElement * element, GstEvent * event)
+{
+ gboolean res;
+ GstRTSPSrc *rtspsrc;
+
+ rtspsrc = GST_RTSPSRC (element);
+
+ if (GST_EVENT_TYPE (event) == GST_EVENT_SEEK) {
+ if (rtspsrc->state >= GST_RTSP_STATE_READY) {
+ res = gst_rtspsrc_perform_seek (rtspsrc, event);
+ gst_event_unref (event);
+ } else {
+ /* Store for later use */
+ res = TRUE;
+ rtspsrc->initial_seek = event;
+ }
+ } else if (GST_EVENT_IS_DOWNSTREAM (event)) {
+ res = gst_rtspsrc_push_event (rtspsrc, event);
+ } else {
+ res = GST_ELEMENT_CLASS (parent_class)->send_event (element, event);
+ }
+
+ return res;
+}
+
+
+/*** GSTURIHANDLER INTERFACE *************************************************/
+
+static GstURIType
+gst_rtspsrc_uri_get_type (GType type)
+{
+ return GST_URI_SRC;
+}
+
+static const gchar *const *
+gst_rtspsrc_uri_get_protocols (GType type)
+{
+ static const gchar *protocols[] =
+ { "rtsp", "rtspu", "rtspt", "rtsph", "rtsp-sdp",
+ "rtsps", "rtspsu", "rtspst", "rtspsh", NULL
+ };
+
+ return protocols;
+}
+
+static gchar *
+gst_rtspsrc_uri_get_uri (GstURIHandler * handler)
+{
+ GstRTSPSrc *src = GST_RTSPSRC (handler);
+
+ /* FIXME: make thread-safe */
+ return g_strdup (src->conninfo.location);
+}
+
+static gboolean
+gst_rtspsrc_uri_set_uri (GstURIHandler * handler, const gchar * uri,
+ GError ** error)
+{
+ GstRTSPSrc *src;
+ GstRTSPResult res;
+ GstSDPResult sres;
+ GstRTSPUrl *newurl = NULL;
+ GstSDPMessage *sdp = NULL;
+
+ src = GST_RTSPSRC (handler);
+
+ /* same URI, we're fine */
+ if (src->conninfo.location && uri && !strcmp (uri, src->conninfo.location))
+ goto was_ok;
+
+ if (g_str_has_prefix (uri, "rtsp-sdp://")) {
+ sres = gst_sdp_message_new (&sdp);
+ if (sres < 0)
+ goto sdp_failed;
+
+ GST_DEBUG_OBJECT (src, "parsing SDP message");
+ sres = gst_sdp_message_parse_uri (uri, sdp);
+ if (sres < 0)
+ goto invalid_sdp;
+ } else {
+ /* try to parse */
+ GST_DEBUG_OBJECT (src, "parsing URI");
+ if ((res = gst_rtsp_url_parse (uri, &newurl)) < 0)
+ goto parse_error;
+ }
+
+ /* if worked, free previous and store new url object along with the original
+ * location. */
+ GST_DEBUG_OBJECT (src, "configuring URI");
+ g_free (src->conninfo.location);
+ src->conninfo.location = g_strdup (uri);
+ gst_rtsp_url_free (src->conninfo.url);
+ src->conninfo.url = newurl;
+ g_free (src->conninfo.url_str);
+ if (newurl)
+ src->conninfo.url_str = gst_rtsp_url_get_request_uri (src->conninfo.url);
+ else
+ src->conninfo.url_str = NULL;
+
+ if (src->sdp)
+ gst_sdp_message_free (src->sdp);
+ src->sdp = sdp;
+ src->from_sdp = sdp != NULL;
+
+ GST_DEBUG_OBJECT (src, "set uri: %s", GST_STR_NULL (uri));
+ GST_DEBUG_OBJECT (src, "request uri is: %s",
+ GST_STR_NULL (src->conninfo.url_str));
+
+ return TRUE;
+
+ /* Special cases */
+was_ok:
+ {
+ GST_DEBUG_OBJECT (src, "URI was ok: '%s'", GST_STR_NULL (uri));
+ return TRUE;
+ }
+sdp_failed:
+ {
+ GST_ERROR_OBJECT (src, "Could not create new SDP (%d)", sres);
+ g_set_error_literal (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
+ "Could not create SDP");
+ return FALSE;
+ }
+invalid_sdp:
+ {
+ GST_ERROR_OBJECT (src, "Not a valid SDP (%d) '%s'", sres,
+ GST_STR_NULL (uri));
+ gst_sdp_message_free (sdp);
+ g_set_error_literal (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
+ "Invalid SDP");
+ return FALSE;
+ }
+parse_error:
+ {
+ GST_ERROR_OBJECT (src, "Not a valid RTSP url '%s' (%d)",
+ GST_STR_NULL (uri), res);
+ g_set_error_literal (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
+ "Invalid RTSP URI");
+ return FALSE;
+ }
+}
+
+static void
+gst_rtspsrc_uri_handler_init (gpointer g_iface, gpointer iface_data)
+{
+ GstURIHandlerInterface *iface = (GstURIHandlerInterface *) g_iface;
+
+ iface->get_type = gst_rtspsrc_uri_get_type;
+ iface->get_protocols = gst_rtspsrc_uri_get_protocols;
+ iface->get_uri = gst_rtspsrc_uri_get_uri;
+ iface->set_uri = gst_rtspsrc_uri_set_uri;
+}
+
+
+/* send GET_PARAMETER */
+static GstRTSPResult
+gst_rtspsrc_get_parameter (GstRTSPSrc * src, ParameterRequest * req)
+{
+ GstRTSPMessage request = { 0 };
+ GstRTSPMessage response = { 0 };
+ GstRTSPResult res;
+ GstRTSPStatusCode code = GST_RTSP_STS_OK;
+ const gchar *control;
+ gchar *recv_body = NULL;
+ guint recv_body_len;
+
+ GST_DEBUG_OBJECT (src, "creating server get_parameter");
+
+ g_assert (req);
+
+ if ((res = gst_rtspsrc_ensure_open (src, FALSE)) < 0)
+ goto open_failed;
+
+ control = get_aggregate_control (src);
+ if (control == NULL)
+ goto no_control;
+
+ if (!(src->methods & GST_RTSP_GET_PARAMETER))
+ goto not_supported;
+
+ gst_rtspsrc_connection_flush (src, FALSE);
+
+ res = gst_rtsp_message_init_request (&request, GST_RTSP_GET_PARAMETER,
+ control);
+ if (res < 0)
+ goto create_request_failed;
+
+ res = gst_rtsp_message_add_header (&request, GST_RTSP_HDR_CONTENT_TYPE,
+ req->content_type == NULL ? "text/parameters" : req->content_type);
+ if (res < 0)
+ goto add_content_hdr_failed;
+
+ if (req->body && req->body->len) {
+ res =
+ gst_rtsp_message_set_body (&request, (guint8 *) req->body->str,
+ req->body->len);
+ if (res < 0)
+ goto set_body_failed;
+ }
+
+ if ((res = gst_rtspsrc_send (src, &src->conninfo,
+ &request, &response, &code, NULL)) < 0)
+ goto send_error;
+
+ res = gst_rtsp_message_get_body (&response, (guint8 **) & recv_body,
+ &recv_body_len);
+ if (res < 0)
+ goto get_body_failed;
+
+done:
+ {
+ gst_promise_reply (req->promise,
+ gst_structure_new ("get-parameter-reply",
+ "rtsp-result", G_TYPE_INT, res,
+ "rtsp-code", G_TYPE_INT, code,
+ "rtsp-reason", G_TYPE_STRING, gst_rtsp_status_as_text (code),
+ "body", G_TYPE_STRING, GST_STR_NULL (recv_body), NULL));
+ free_param_data (req);
+
+
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+
+ return res;
+ }
+
+ /* ERRORS */
+open_failed:
+ {
+ GST_DEBUG_OBJECT (src, "failed to open stream");
+ goto done;
+ }
+no_control:
+ {
+ GST_DEBUG_OBJECT (src, "no control url to send GET_PARAMETER");
+ res = GST_RTSP_ERROR;
+ goto done;
+ }
+not_supported:
+ {
+ GST_DEBUG_OBJECT (src, "GET_PARAMETER is not supported");
+ res = GST_RTSP_ERROR;
+ goto done;
+ }
+create_request_failed:
+ {
+ GST_DEBUG_OBJECT (src, "could not create GET_PARAMETER request");
+ goto done;
+ }
+add_content_hdr_failed:
+ {
+ GST_DEBUG_OBJECT (src, "could not add content header");
+ goto done;
+ }
+set_body_failed:
+ {
+ GST_DEBUG_OBJECT (src, "could not set body");
+ goto done;
+ }
+send_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ GST_ELEMENT_WARNING (src, RESOURCE, WRITE, (NULL),
+ ("Could not send get-parameter. (%s)", str));
+ g_free (str);
+ goto done;
+ }
+get_body_failed:
+ {
+ GST_DEBUG_OBJECT (src, "could not get body");
+ goto done;
+ }
+}
+
+/* send SET_PARAMETER */
+static GstRTSPResult
+gst_rtspsrc_set_parameter (GstRTSPSrc * src, ParameterRequest * req)
+{
+ GstRTSPMessage request = { 0 };
+ GstRTSPMessage response = { 0 };
+ GstRTSPResult res = GST_RTSP_OK;
+ GstRTSPStatusCode code = GST_RTSP_STS_OK;
+ const gchar *control;
+
+ GST_DEBUG_OBJECT (src, "creating server set_parameter");
+
+ g_assert (req);
+
+ if ((res = gst_rtspsrc_ensure_open (src, FALSE)) < 0)
+ goto open_failed;
+
+ control = get_aggregate_control (src);
+ if (control == NULL)
+ goto no_control;
+
+ if (!(src->methods & GST_RTSP_SET_PARAMETER))
+ goto not_supported;
+
+ gst_rtspsrc_connection_flush (src, FALSE);
+
+ res =
+ gst_rtsp_message_init_request (&request, GST_RTSP_SET_PARAMETER, control);
+ if (res < 0)
+ goto send_error;
+
+ res = gst_rtsp_message_add_header (&request, GST_RTSP_HDR_CONTENT_TYPE,
+ req->content_type == NULL ? "text/parameters" : req->content_type);
+ if (res < 0)
+ goto add_content_hdr_failed;
+
+ if (req->body && req->body->len) {
+ res =
+ gst_rtsp_message_set_body (&request, (guint8 *) req->body->str,
+ req->body->len);
+
+ if (res < 0)
+ goto set_body_failed;
+ }
+
+ if ((res = gst_rtspsrc_send (src, &src->conninfo,
+ &request, &response, &code, NULL)) < 0)
+ goto send_error;
+
+done:
+ {
+ gst_promise_reply (req->promise, gst_structure_new ("set-parameter-reply",
+ "rtsp-result", G_TYPE_INT, res,
+ "rtsp-code", G_TYPE_INT, code,
+ "rtsp-reason", G_TYPE_STRING, gst_rtsp_status_as_text (code),
+ NULL));
+ free_param_data (req);
+
+ gst_rtsp_message_unset (&request);
+ gst_rtsp_message_unset (&response);
+
+ return res;
+ }
+
+ /* ERRORS */
+open_failed:
+ {
+ GST_DEBUG_OBJECT (src, "failed to open stream");
+ goto done;
+ }
+no_control:
+ {
+ GST_DEBUG_OBJECT (src, "no control url to send SET_PARAMETER");
+ res = GST_RTSP_ERROR;
+ goto done;
+ }
+not_supported:
+ {
+ GST_DEBUG_OBJECT (src, "SET_PARAMETER is not supported");
+ res = GST_RTSP_ERROR;
+ goto done;
+ }
+add_content_hdr_failed:
+ {
+ GST_DEBUG_OBJECT (src, "could not add content header");
+ goto done;
+ }
+set_body_failed:
+ {
+ GST_DEBUG_OBJECT (src, "could not set body");
+ goto done;
+ }
+send_error:
+ {
+ gchar *str = gst_rtsp_strresult (res);
+
+ GST_ELEMENT_WARNING (src, RESOURCE, WRITE, (NULL),
+ ("Could not send set-parameter. (%s)", str));
+ g_free (str);
+ goto done;
+ }
+}
+
+typedef struct _RTSPKeyValue
+{
+ GstRTSPHeaderField field;
+ gchar *value;
+ gchar *custom_key; /* custom header string (field is INVALID then) */
+} RTSPKeyValue;
+
+static void
+key_value_foreach (GArray * array, GFunc func, gpointer user_data)
+{
+ guint i;
+
+ g_return_if_fail (array != NULL);
+
+ for (i = 0; i < array->len; i++) {
+ (*func) (&g_array_index (array, RTSPKeyValue, i), user_data);
+ }
+}
+
+static void
+dump_key_value (gpointer data, gpointer user_data G_GNUC_UNUSED)
+{
+ RTSPKeyValue *key_value = (RTSPKeyValue *) data;
+ GstRTSPSrc *src = GST_RTSPSRC (user_data);
+ const gchar *key_string;
+
+ if (key_value->custom_key != NULL)
+ key_string = key_value->custom_key;
+ else
+ key_string = gst_rtsp_header_as_text (key_value->field);
+
+ GST_LOG_OBJECT (src, " key: '%s', value: '%s'", key_string,
+ key_value->value);
+}
+
+static void
+gst_rtspsrc_print_rtsp_message (GstRTSPSrc * src, const GstRTSPMessage * msg)
+{
+ guint8 *data;
+ guint size;
+ GString *body_string = NULL;
+
+ g_return_if_fail (src != NULL);
+ g_return_if_fail (msg != NULL);
+
+ if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) < GST_LEVEL_LOG)
+ return;
+
+ GST_LOG_OBJECT (src, "--------------------------------------------");
+ switch (msg->type) {
+ case GST_RTSP_MESSAGE_REQUEST:
+ GST_LOG_OBJECT (src, "RTSP request message %p", msg);
+ GST_LOG_OBJECT (src, " request line:");
+ GST_LOG_OBJECT (src, " method: '%s'",
+ gst_rtsp_method_as_text (msg->type_data.request.method));
+ GST_LOG_OBJECT (src, " uri: '%s'", msg->type_data.request.uri);
+ GST_LOG_OBJECT (src, " version: '%s'",
+ gst_rtsp_version_as_text (msg->type_data.request.version));
+ GST_LOG_OBJECT (src, " headers:");
+ key_value_foreach (msg->hdr_fields, dump_key_value, src);
+ GST_LOG_OBJECT (src, " body:");
+ gst_rtsp_message_get_body (msg, &data, &size);
+ if (size > 0) {
+ body_string = g_string_new_len ((const gchar *) data, size);
+ GST_LOG_OBJECT (src, " %s(%d)", body_string->str, size);
+ g_string_free (body_string, TRUE);
+ body_string = NULL;
+ }
+ break;
+ case GST_RTSP_MESSAGE_RESPONSE:
+ GST_LOG_OBJECT (src, "RTSP response message %p", msg);
+ GST_LOG_OBJECT (src, " status line:");
+ GST_LOG_OBJECT (src, " code: '%d'", msg->type_data.response.code);
+ GST_LOG_OBJECT (src, " reason: '%s'", msg->type_data.response.reason);
+ GST_LOG_OBJECT (src, " version: '%s",
+ gst_rtsp_version_as_text (msg->type_data.response.version));
+ GST_LOG_OBJECT (src, " headers:");
+ key_value_foreach (msg->hdr_fields, dump_key_value, src);
+ gst_rtsp_message_get_body (msg, &data, &size);
+ GST_LOG_OBJECT (src, " body: length %d", size);
+ if (size > 0) {
+ body_string = g_string_new_len ((const gchar *) data, size);
+ GST_LOG_OBJECT (src, " %s(%d)", body_string->str, size);
+ g_string_free (body_string, TRUE);
+ body_string = NULL;
+ }
+ break;
+ case GST_RTSP_MESSAGE_HTTP_REQUEST:
+ GST_LOG_OBJECT (src, "HTTP request message %p", msg);
+ GST_LOG_OBJECT (src, " request line:");
+ GST_LOG_OBJECT (src, " method: '%s'",
+ gst_rtsp_method_as_text (msg->type_data.request.method));
+ GST_LOG_OBJECT (src, " uri: '%s'", msg->type_data.request.uri);
+ GST_LOG_OBJECT (src, " version: '%s'",
+ gst_rtsp_version_as_text (msg->type_data.request.version));
+ GST_LOG_OBJECT (src, " headers:");
+ key_value_foreach (msg->hdr_fields, dump_key_value, src);
+ GST_LOG_OBJECT (src, " body:");
+ gst_rtsp_message_get_body (msg, &data, &size);
+ if (size > 0) {
+ body_string = g_string_new_len ((const gchar *) data, size);
+ GST_LOG_OBJECT (src, " %s(%d)", body_string->str, size);
+ g_string_free (body_string, TRUE);
+ body_string = NULL;
+ }
+ break;
+ case GST_RTSP_MESSAGE_HTTP_RESPONSE:
+ GST_LOG_OBJECT (src, "HTTP response message %p", msg);
+ GST_LOG_OBJECT (src, " status line:");
+ GST_LOG_OBJECT (src, " code: '%d'", msg->type_data.response.code);
+ GST_LOG_OBJECT (src, " reason: '%s'", msg->type_data.response.reason);
+ GST_LOG_OBJECT (src, " version: '%s'",
+ gst_rtsp_version_as_text (msg->type_data.response.version));
+ GST_LOG_OBJECT (src, " headers:");
+ key_value_foreach (msg->hdr_fields, dump_key_value, src);
+ gst_rtsp_message_get_body (msg, &data, &size);
+ GST_LOG_OBJECT (src, " body: length %d", size);
+ if (size > 0) {
+ body_string = g_string_new_len ((const gchar *) data, size);
+ GST_LOG_OBJECT (src, " %s(%d)", body_string->str, size);
+ g_string_free (body_string, TRUE);
+ body_string = NULL;
+ }
+ break;
+ case GST_RTSP_MESSAGE_DATA:
+ GST_LOG_OBJECT (src, "RTSP data message %p", msg);
+ GST_LOG_OBJECT (src, " channel: '%d'", msg->type_data.data.channel);
+ GST_LOG_OBJECT (src, " size: '%d'", msg->body_size);
+ gst_rtsp_message_get_body (msg, &data, &size);
+ if (size > 0) {
+ body_string = g_string_new_len ((const gchar *) data, size);
+ GST_LOG_OBJECT (src, " %s(%d)", body_string->str, size);
+ g_string_free (body_string, TRUE);
+ body_string = NULL;
+ }
+ break;
+ default:
+ GST_LOG_OBJECT (src, "unsupported message type %d", msg->type);
+ break;
+ }
+ GST_LOG_OBJECT (src, "--------------------------------------------");
+}
+
+static void
+gst_rtspsrc_print_sdp_media (GstRTSPSrc * src, GstSDPMedia * media)
+{
+ GST_LOG_OBJECT (src, " media: '%s'", GST_STR_NULL (media->media));
+ GST_LOG_OBJECT (src, " port: '%u'", media->port);
+ GST_LOG_OBJECT (src, " num_ports: '%u'", media->num_ports);
+ GST_LOG_OBJECT (src, " proto: '%s'", GST_STR_NULL (media->proto));
+ if (media->fmts && media->fmts->len > 0) {
+ guint i;
+
+ GST_LOG_OBJECT (src, " formats:");
+ for (i = 0; i < media->fmts->len; i++) {
+ GST_LOG_OBJECT (src, " format '%s'", g_array_index (media->fmts,
+ gchar *, i));
+ }
+ }
+ GST_LOG_OBJECT (src, " information: '%s'",
+ GST_STR_NULL (media->information));
+ if (media->connections && media->connections->len > 0) {
+ guint i;
+
+ GST_LOG_OBJECT (src, " connections:");
+ for (i = 0; i < media->connections->len; i++) {
+ GstSDPConnection *conn =
+ &g_array_index (media->connections, GstSDPConnection, i);
+
+ GST_LOG_OBJECT (src, " nettype: '%s'",
+ GST_STR_NULL (conn->nettype));
+ GST_LOG_OBJECT (src, " addrtype: '%s'",
+ GST_STR_NULL (conn->addrtype));
+ GST_LOG_OBJECT (src, " address: '%s'",
+ GST_STR_NULL (conn->address));
+ GST_LOG_OBJECT (src, " ttl: '%u'", conn->ttl);
+ GST_LOG_OBJECT (src, " addr_number: '%u'", conn->addr_number);
+ }
+ }
+ if (media->bandwidths && media->bandwidths->len > 0) {
+ guint i;
+
+ GST_LOG_OBJECT (src, " bandwidths:");
+ for (i = 0; i < media->bandwidths->len; i++) {
+ GstSDPBandwidth *bw =
+ &g_array_index (media->bandwidths, GstSDPBandwidth, i);
+
+ GST_LOG_OBJECT (src, " type: '%s'", GST_STR_NULL (bw->bwtype));
+ GST_LOG_OBJECT (src, " bandwidth: '%u'", bw->bandwidth);
+ }
+ }
+ GST_LOG_OBJECT (src, " key:");
+ GST_LOG_OBJECT (src, " type: '%s'", GST_STR_NULL (media->key.type));
+ GST_LOG_OBJECT (src, " data: '%s'", GST_STR_NULL (media->key.data));
+ if (media->attributes && media->attributes->len > 0) {
+ guint i;
+
+ GST_LOG_OBJECT (src, " attributes:");
+ for (i = 0; i < media->attributes->len; i++) {
+ GstSDPAttribute *attr =
+ &g_array_index (media->attributes, GstSDPAttribute, i);
+
+ GST_LOG_OBJECT (src, " attribute '%s' : '%s'", attr->key, attr->value);
+ }
+ }
+}
+
+void
+gst_rtspsrc_print_sdp_message (GstRTSPSrc * src, const GstSDPMessage * msg)
+{
+ g_return_if_fail (src != NULL);
+ g_return_if_fail (msg != NULL);
+
+ if (gst_debug_category_get_threshold (GST_CAT_DEFAULT) < GST_LEVEL_LOG)
+ return;
+
+ GST_LOG_OBJECT (src, "--------------------------------------------");
+ GST_LOG_OBJECT (src, "sdp packet %p:", msg);
+ GST_LOG_OBJECT (src, " version: '%s'", GST_STR_NULL (msg->version));
+ GST_LOG_OBJECT (src, " origin:");
+ GST_LOG_OBJECT (src, " username: '%s'",
+ GST_STR_NULL (msg->origin.username));
+ GST_LOG_OBJECT (src, " sess_id: '%s'",
+ GST_STR_NULL (msg->origin.sess_id));
+ GST_LOG_OBJECT (src, " sess_version: '%s'",
+ GST_STR_NULL (msg->origin.sess_version));
+ GST_LOG_OBJECT (src, " nettype: '%s'",
+ GST_STR_NULL (msg->origin.nettype));
+ GST_LOG_OBJECT (src, " addrtype: '%s'",
+ GST_STR_NULL (msg->origin.addrtype));
+ GST_LOG_OBJECT (src, " addr: '%s'", GST_STR_NULL (msg->origin.addr));
+ GST_LOG_OBJECT (src, " session_name: '%s'",
+ GST_STR_NULL (msg->session_name));
+ GST_LOG_OBJECT (src, " information: '%s'", GST_STR_NULL (msg->information));
+ GST_LOG_OBJECT (src, " uri: '%s'", GST_STR_NULL (msg->uri));
+
+ if (msg->emails && msg->emails->len > 0) {
+ guint i;
+
+ GST_LOG_OBJECT (src, " emails:");
+ for (i = 0; i < msg->emails->len; i++) {
+ GST_LOG_OBJECT (src, " email '%s'", g_array_index (msg->emails, gchar *,
+ i));
+ }
+ }
+ if (msg->phones && msg->phones->len > 0) {
+ guint i;
+
+ GST_LOG_OBJECT (src, " phones:");
+ for (i = 0; i < msg->phones->len; i++) {
+ GST_LOG_OBJECT (src, " phone '%s'", g_array_index (msg->phones, gchar *,
+ i));
+ }
+ }
+ GST_LOG_OBJECT (src, " connection:");
+ GST_LOG_OBJECT (src, " nettype: '%s'",
+ GST_STR_NULL (msg->connection.nettype));
+ GST_LOG_OBJECT (src, " addrtype: '%s'",
+ GST_STR_NULL (msg->connection.addrtype));
+ GST_LOG_OBJECT (src, " address: '%s'",
+ GST_STR_NULL (msg->connection.address));
+ GST_LOG_OBJECT (src, " ttl: '%u'", msg->connection.ttl);
+ GST_LOG_OBJECT (src, " addr_number: '%u'", msg->connection.addr_number);
+ if (msg->bandwidths && msg->bandwidths->len > 0) {
+ guint i;
+
+ GST_LOG_OBJECT (src, " bandwidths:");
+ for (i = 0; i < msg->bandwidths->len; i++) {
+ GstSDPBandwidth *bw =
+ &g_array_index (msg->bandwidths, GstSDPBandwidth, i);
+
+ GST_LOG_OBJECT (src, " type: '%s'", GST_STR_NULL (bw->bwtype));
+ GST_LOG_OBJECT (src, " bandwidth: '%u'", bw->bandwidth);
+ }
+ }
+ GST_LOG_OBJECT (src, " key:");
+ GST_LOG_OBJECT (src, " type: '%s'", GST_STR_NULL (msg->key.type));
+ GST_LOG_OBJECT (src, " data: '%s'", GST_STR_NULL (msg->key.data));
+ if (msg->attributes && msg->attributes->len > 0) {
+ guint i;
+
+ GST_LOG_OBJECT (src, " attributes:");
+ for (i = 0; i < msg->attributes->len; i++) {
+ GstSDPAttribute *attr =
+ &g_array_index (msg->attributes, GstSDPAttribute, i);
+
+ GST_LOG_OBJECT (src, " attribute '%s' : '%s'", attr->key, attr->value);
+ }
+ }
+ if (msg->medias && msg->medias->len > 0) {
+ guint i;
+
+ GST_LOG_OBJECT (src, " medias:");
+ for (i = 0; i < msg->medias->len; i++) {
+ GST_LOG_OBJECT (src, " media %u:", i);
+ gst_rtspsrc_print_sdp_media (src, &g_array_index (msg->medias,
+ GstSDPMedia, i));
+ }
+ }
+ GST_LOG_OBJECT (src, "--------------------------------------------");
+}
diff --git a/gst/rtsp/gstrtspsrc.h b/gst/rtsp/gstrtspsrc.h
new file mode 100644
index 0000000000..5af00f9e55
--- /dev/null
+++ b/gst/rtsp/gstrtspsrc.h
@@ -0,0 +1,347 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * <2006> Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/*
+ * Unless otherwise indicated, Source Code is licensed under MIT license.
+ * See further explanation attached in License Statement (distributed in the file
+ * LICENSE).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy of
+ * this software and associated documentation files (the "Software"), to deal in
+ * the Software without restriction, including without limitation the rights to
+ * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+ * of the Software, and to permit persons to whom the Software is furnished to do
+ * so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#ifndef __GST_RTSPSRC_H__
+#define __GST_RTSPSRC_H__
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+#include <gst/rtsp/rtsp.h>
+#include <gio/gio.h>
+
+#include "gstrtspext.h"
+
+#define GST_TYPE_RTSPSRC \
+ (gst_rtspsrc_get_type())
+#define GST_RTSPSRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTSPSRC,GstRTSPSrc))
+#define GST_RTSPSRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTSPSRC,GstRTSPSrcClass))
+#define GST_IS_RTSPSRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTSPSRC))
+#define GST_IS_RTSPSRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTSPSRC))
+#define GST_RTSPSRC_CAST(obj) \
+ ((GstRTSPSrc *)(obj))
+
+typedef struct _GstRTSPSrc GstRTSPSrc;
+typedef struct _GstRTSPSrcClass GstRTSPSrcClass;
+
+#define GST_RTSP_STATE_GET_LOCK(rtsp) (&GST_RTSPSRC_CAST(rtsp)->state_rec_lock)
+#define GST_RTSP_STATE_LOCK(rtsp) (g_rec_mutex_lock (GST_RTSP_STATE_GET_LOCK(rtsp)))
+#define GST_RTSP_STATE_UNLOCK(rtsp) (g_rec_mutex_unlock (GST_RTSP_STATE_GET_LOCK(rtsp)))
+
+#define GST_RTSP_STREAM_GET_LOCK(rtsp) (&GST_RTSPSRC_CAST(rtsp)->stream_rec_lock)
+#define GST_RTSP_STREAM_LOCK(rtsp) (g_rec_mutex_lock (GST_RTSP_STREAM_GET_LOCK(rtsp)))
+#define GST_RTSP_STREAM_UNLOCK(rtsp) (g_rec_mutex_unlock (GST_RTSP_STREAM_GET_LOCK(rtsp)))
+
+typedef struct _GstRTSPConnInfo GstRTSPConnInfo;
+
+struct _GstRTSPConnInfo {
+ gchar *location;
+ GstRTSPUrl *url;
+ gchar *url_str;
+ GstRTSPConnection *connection;
+ gboolean connected;
+ gboolean flushing;
+
+ GMutex send_lock;
+ GMutex recv_lock;
+};
+
+typedef struct _GstRTSPStream GstRTSPStream;
+
+struct _GstRTSPStream {
+ gint id;
+
+ GstRTSPSrc *parent; /* parent, no extra ref to parent is taken */
+
+ /* pad we expose or NULL when it does not have an actual pad */
+ GstPad *srcpad;
+ GstFlowReturn last_ret;
+ gboolean added;
+ gboolean setup;
+ gboolean skipped;
+ gboolean eos;
+ gboolean discont;
+ gboolean need_caps;
+ gboolean waiting_setup_response;
+
+ /* for interleaved mode */
+ guint8 channel[2];
+ GstPad *channelpad[2];
+
+ /* our udp sources */
+ GstElement *udpsrc[2];
+ GstPad *blockedpad;
+ gulong blockid;
+ gboolean is_ipv6;
+
+ /* our udp sinks back to the server */
+ GstElement *udpsink[2];
+ GstPad *rtcppad;
+
+ /* fakesrc for sending dummy data or appsrc for sending backchannel data */
+ GstElement *rtpsrc;
+
+ /* state */
+ guint port;
+ gboolean container;
+ gboolean is_real;
+ guint8 default_pt;
+ GstRTSPProfile profile;
+ GArray *ptmap;
+ /* original control url */
+ gchar *control_url;
+ guint32 ssrc;
+ guint32 seqbase;
+ guint64 timebase;
+ GstElement *srtpdec;
+ GstCaps *srtcpparams;
+ GstElement *srtpenc;
+ guint32 send_ssrc;
+
+ /* per stream connection */
+ GstRTSPConnInfo conninfo;
+
+ /* session */
+ GObject *session;
+
+ /* srtp key management */
+ GstMIKEYMessage *mikey;
+
+ /* bandwidth */
+ guint as_bandwidth;
+ guint rs_bandwidth;
+ guint rr_bandwidth;
+
+ /* destination */
+ gchar *destination;
+ gboolean is_multicast;
+ guint ttl;
+ gboolean is_backchannel;
+
+ /* A unique and stable id we will use for the stream start event */
+ gchar *stream_id;
+
+ GstStructure *rtx_pt_map;
+
+ guint32 segment_seqnum[2];
+};
+
+/**
+ * GstRTSPSrcTimeoutCause:
+ * @GST_RTSP_SRC_TIMEOUT_CAUSE_RTCP: timeout triggered by RTCP
+ *
+ * Different causes to why the rtspsrc generated the GstRTSPSrcTimeout
+ * message.
+ */
+typedef enum
+{
+ GST_RTSP_SRC_TIMEOUT_CAUSE_RTCP
+} GstRTSPSrcTimeoutCause;
+
+/**
+ * GstRTSPNatMethod:
+ * @GST_RTSP_NAT_NONE: none
+ * @GST_RTSP_NAT_DUMMY: send dummy packets
+ *
+ * Different methods for trying to traverse firewalls.
+ */
+typedef enum
+{
+ GST_RTSP_NAT_NONE,
+ GST_RTSP_NAT_DUMMY
+} GstRTSPNatMethod;
+
+
+struct _GstRTSPSrc {
+ GstBin parent;
+
+ /* task and mutex for interleaved mode */
+ gboolean interleaved;
+ GstTask *task;
+ GRecMutex stream_rec_lock;
+ GstSegment segment;
+ gboolean running;
+ gboolean need_range;
+ gboolean server_side_trickmode;
+ GstClockTime trickmode_interval;
+ gint free_channel;
+ gboolean need_segment;
+ gboolean clip_out_segment;
+ GstSegment out_segment;
+ GstClockTime base_time;
+
+ /* UDP mode loop */
+ gint pending_cmd;
+ gint busy_cmd;
+ GCond cmd_cond;
+ gboolean ignore_timeout;
+ gboolean open_error;
+
+ /* mutex for protecting state changes */
+ GRecMutex state_rec_lock;
+
+ GstSDPMessage *sdp;
+ gboolean from_sdp;
+ GList *streams;
+ GstStructure *props;
+ gboolean need_activate;
+
+ /* properties */
+ GstRTSPLowerTrans protocols;
+ gboolean debug;
+ guint retry;
+ guint64 udp_timeout;
+ gint64 tcp_timeout;
+ guint latency;
+ gboolean drop_on_latency;
+ guint64 connection_speed;
+ GstRTSPNatMethod nat_method;
+ gboolean do_rtcp;
+ gboolean do_rtsp_keep_alive;
+ gchar *proxy_host;
+ guint proxy_port;
+ gchar *proxy_user; /* from url or property */
+ gchar *proxy_passwd; /* from url or property */
+ gchar *prop_proxy_id; /* set via property */
+ gchar *prop_proxy_pw; /* set via property */
+ guint rtp_blocksize;
+ gchar *user_id;
+ gchar *user_pw;
+ gint buffer_mode;
+ GstRTSPRange client_port_range;
+ gint udp_buffer_size;
+ gboolean short_header;
+ guint probation;
+ gboolean udp_reconnect;
+ gchar *multi_iface;
+ gboolean ntp_sync;
+ gboolean use_pipeline_clock;
+ GstStructure *sdes;
+ GTlsCertificateFlags tls_validation_flags;
+ GTlsDatabase *tls_database;
+ GTlsInteraction *tls_interaction;
+ gboolean do_retransmission;
+ gint ntp_time_source;
+ gchar *user_agent;
+ gint max_rtcp_rtp_time_diff;
+ gboolean rfc7273_sync;
+ guint64 max_ts_offset_adjustment;
+ gint64 max_ts_offset;
+ gboolean max_ts_offset_is_set;
+ gint backchannel;
+ GstClockTime teardown_timeout;
+ gboolean onvif_mode;
+ gboolean onvif_rate_control;
+ gboolean is_live;
+ gboolean ignore_x_server_reply;
+
+ /* state */
+ GstRTSPState state;
+ gchar *content_base;
+ GstRTSPLowerTrans cur_protocols;
+ gboolean tried_url_auth;
+ gchar *addr;
+ gboolean need_redirect;
+ GstRTSPTimeRange *range;
+ gchar *control;
+ guint next_port_num;
+ GstClock *provided_clock;
+
+ /* supported methods */
+ gint methods;
+
+ /* seekability
+ * -1.0 : Stream is not seekable
+ * 0.0 : seekable only to the beginning
+ * G_MAXFLOAT : Any value is possible
+ *
+ * Any other positive value indicates the longest duration
+ * between any two random access points
+ * */
+ gfloat seekable;
+ guint32 seek_seqnum;
+ GstClockTime last_pos;
+
+ /* session management */
+ GstElement *manager;
+ gulong manager_sig_id;
+ gulong manager_ptmap_id;
+ gboolean use_buffering;
+
+ GstRTSPConnInfo conninfo;
+
+ /* SET/GET PARAMETER requests queue */
+ GQueue set_get_param_q;
+
+ /* a list of RTSP extensions as GstElement */
+ GstRTSPExtensionList *extensions;
+
+ GstRTSPVersion default_version;
+ GstRTSPVersion version;
+
+ GstEvent *initial_seek;
+
+ guint group_id;
+ GMutex group_lock;
+};
+
+struct _GstRTSPSrcClass {
+ GstBinClass parent_class;
+
+ /* action signals */
+ gboolean (*get_parameter) (GstRTSPSrc *rtsp, const gchar *parameter, const gchar *content_type, GstPromise *promise);
+ gboolean (*get_parameters) (GstRTSPSrc *rtsp, gchar **parameters, const gchar *content_type, GstPromise *promise);
+ gboolean (*set_parameter) (GstRTSPSrc *rtsp, const gchar *name, const gchar *value, const gchar *content_type, GstPromise *promise);
+ GstFlowReturn (*push_backchannel_buffer) (GstRTSPSrc *src, guint id, GstSample *sample);
+};
+
+GType gst_rtspsrc_get_type(void);
+
+G_END_DECLS
+
+#endif /* __GST_RTSPSRC_H__ */
diff --git a/gst/rtsp/meson.build b/gst/rtsp/meson.build
new file mode 100644
index 0000000000..d60d9a975c
--- /dev/null
+++ b/gst/rtsp/meson.build
@@ -0,0 +1,18 @@
+rtsp_sources = [
+ 'gstrtspelement.c',
+ 'gstrtsp.c',
+ 'gstrtspsrc.c',
+ 'gstrtpdec.c',
+ 'gstrtspext.c',
+]
+
+gstrtsp = library('gstrtsp',
+ rtsp_sources,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc, libsinc],
+ dependencies : [gstbase_dep, gio_dep, gstrtp_dep, gstrtsp_dep, gstsdp_dep, gstnet_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstrtsp, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstrtsp]
diff --git a/gst/shapewipe/gstshapewipe.c b/gst/shapewipe/gstshapewipe.c
new file mode 100644
index 0000000000..06aa5b7ea5
--- /dev/null
+++ b/gst/shapewipe/gstshapewipe.c
@@ -0,0 +1,1146 @@
+/* GStreamer
+ * Copyright (C) 2009,2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-shapewipe
+ * @title: shapewipe
+ *
+ * The shapewipe element provides custom transitions on video streams
+ * based on a grayscale bitmap. The state of the transition can be
+ * controlled by the position property and an optional blended border
+ * can be added by the border property.
+ *
+ * Transition bitmaps can be downloaded from the Cinelerra pages
+ * [here](http://cinelerra-cv.wikidot.com/main:transitions-themes) or
+ * [here](https://cinelerra-gg.org/download/CinelerraGG_Manual/Shape_Wipe.html).
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v videotestsrc ! video/x-raw,format=AYUV,width=640,height=480 ! shapewipe position=0.5 name=shape ! videomixer name=mixer ! videoconvert ! autovideosink filesrc location=mask.png ! typefind ! decodebin ! videoconvert ! videoscale ! queue ! shape.mask_sink videotestsrc pattern=snow ! video/x-raw,format=AYUV,width=640,height=480 ! queue ! mixer.
+ * ]| This pipeline adds the transition from mask.png with position 0.5 to an SMPTE test screen and snow.
+ *
+ */
+
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <string.h>
+
+#include <gst/gst.h>
+
+#include "gstshapewipe.h"
+
+static void gst_shape_wipe_finalize (GObject * object);
+static void gst_shape_wipe_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_shape_wipe_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+
+static void gst_shape_wipe_reset (GstShapeWipe * self);
+static void gst_shape_wipe_update_qos (GstShapeWipe * self, gdouble proportion,
+ GstClockTimeDiff diff, GstClockTime time);
+static void gst_shape_wipe_reset_qos (GstShapeWipe * self);
+static void gst_shape_wipe_read_qos (GstShapeWipe * self, gdouble * proportion,
+ GstClockTime * time);
+
+static GstStateChangeReturn gst_shape_wipe_change_state (GstElement * element,
+ GstStateChange transition);
+
+static GstFlowReturn gst_shape_wipe_video_sink_chain (GstPad * pad,
+ GstObject * parent, GstBuffer * buffer);
+static gboolean gst_shape_wipe_video_sink_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+static gboolean gst_shape_wipe_video_sink_setcaps (GstShapeWipe * self,
+ GstCaps * caps);
+static GstCaps *gst_shape_wipe_video_sink_getcaps (GstShapeWipe * self,
+ GstPad * pad, GstCaps * filter);
+static gboolean gst_shape_wipe_video_sink_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
+static GstFlowReturn gst_shape_wipe_mask_sink_chain (GstPad * pad,
+ GstObject * parent, GstBuffer * buffer);
+static gboolean gst_shape_wipe_mask_sink_event (GstPad * pad,
+ GstObject * parent, GstEvent * event);
+static gboolean gst_shape_wipe_mask_sink_setcaps (GstShapeWipe * self,
+ GstCaps * caps);
+static GstCaps *gst_shape_wipe_mask_sink_getcaps (GstShapeWipe * self,
+ GstPad * pad, GstCaps * filter);
+static gboolean gst_shape_wipe_mask_sink_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
+static gboolean gst_shape_wipe_src_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstCaps *gst_shape_wipe_src_getcaps (GstPad * pad, GstCaps * filter);
+static gboolean gst_shape_wipe_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+
+enum
+{
+ PROP_0,
+ PROP_POSITION,
+ PROP_BORDER
+};
+
+#define DEFAULT_POSITION 0.0
+#define DEFAULT_BORDER 0.0
+
+static GstStaticPadTemplate video_sink_pad_template =
+GST_STATIC_PAD_TEMPLATE ("video_sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, ARGB, BGRA, ABGR, RGBA }")));
+
+static GstStaticPadTemplate mask_sink_pad_template =
+ GST_STATIC_PAD_TEMPLATE ("mask_sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-raw, "
+ "format = (string) GRAY8, "
+ "width = " GST_VIDEO_SIZE_RANGE ", "
+ "height = " GST_VIDEO_SIZE_RANGE ", " "framerate = 0/1 ; "
+ "video/x-raw, " "format = (string) " GST_VIDEO_NE (GRAY16) ", "
+ "width = " GST_VIDEO_SIZE_RANGE ", "
+ "height = " GST_VIDEO_SIZE_RANGE ", " "framerate = 0/1"));
+
+static GstStaticPadTemplate src_pad_template =
+GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, ARGB, BGRA, ABGR, RGBA }")));
+
+GST_DEBUG_CATEGORY_STATIC (gst_shape_wipe_debug);
+#define GST_CAT_DEFAULT gst_shape_wipe_debug
+
+#define gst_shape_wipe_parent_class parent_class
+G_DEFINE_TYPE (GstShapeWipe, gst_shape_wipe, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (shapewipe, "shapewipe", GST_RANK_NONE,
+ GST_TYPE_SHAPE_WIPE, GST_DEBUG_CATEGORY_INIT (gst_shape_wipe_debug,
+ "shapewipe", 0, "shapewipe element"););
+
+static void
+gst_shape_wipe_class_init (GstShapeWipeClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ gobject_class->finalize = gst_shape_wipe_finalize;
+ gobject_class->set_property = gst_shape_wipe_set_property;
+ gobject_class->get_property = gst_shape_wipe_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_POSITION,
+ g_param_spec_float ("position", "Position", "Position of the mask",
+ 0.0, 1.0, DEFAULT_POSITION,
+ G_PARAM_STATIC_STRINGS | G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (gobject_class, PROP_BORDER,
+ g_param_spec_float ("border", "Border", "Border of the mask",
+ 0.0, 1.0, DEFAULT_BORDER,
+ G_PARAM_STATIC_STRINGS | G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE));
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_shape_wipe_change_state);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Shape Wipe transition filter",
+ "Filter/Editor/Video",
+ "Adds a shape wipe transition to a video stream",
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &video_sink_pad_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &mask_sink_pad_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &src_pad_template);
+}
+
+static void
+gst_shape_wipe_init (GstShapeWipe * self)
+{
+ self->video_sinkpad =
+ gst_pad_new_from_static_template (&video_sink_pad_template, "video_sink");
+ gst_pad_set_chain_function (self->video_sinkpad,
+ GST_DEBUG_FUNCPTR (gst_shape_wipe_video_sink_chain));
+ gst_pad_set_event_function (self->video_sinkpad,
+ GST_DEBUG_FUNCPTR (gst_shape_wipe_video_sink_event));
+ gst_pad_set_query_function (self->video_sinkpad,
+ GST_DEBUG_FUNCPTR (gst_shape_wipe_video_sink_query));
+ gst_element_add_pad (GST_ELEMENT (self), self->video_sinkpad);
+
+ self->mask_sinkpad =
+ gst_pad_new_from_static_template (&mask_sink_pad_template, "mask_sink");
+ gst_pad_set_chain_function (self->mask_sinkpad,
+ GST_DEBUG_FUNCPTR (gst_shape_wipe_mask_sink_chain));
+ gst_pad_set_event_function (self->mask_sinkpad,
+ GST_DEBUG_FUNCPTR (gst_shape_wipe_mask_sink_event));
+ gst_pad_set_query_function (self->mask_sinkpad,
+ GST_DEBUG_FUNCPTR (gst_shape_wipe_mask_sink_query));
+ gst_element_add_pad (GST_ELEMENT (self), self->mask_sinkpad);
+
+ self->srcpad = gst_pad_new_from_static_template (&src_pad_template, "src");
+ gst_pad_set_event_function (self->srcpad,
+ GST_DEBUG_FUNCPTR (gst_shape_wipe_src_event));
+ gst_pad_set_query_function (self->srcpad,
+ GST_DEBUG_FUNCPTR (gst_shape_wipe_src_query));
+ gst_element_add_pad (GST_ELEMENT (self), self->srcpad);
+
+ g_mutex_init (&self->mask_mutex);
+ g_cond_init (&self->mask_cond);
+
+ gst_shape_wipe_reset (self);
+}
+
+static void
+gst_shape_wipe_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstShapeWipe *self = GST_SHAPE_WIPE (object);
+
+ switch (prop_id) {
+ case PROP_POSITION:
+ g_value_set_float (value, self->mask_position);
+ break;
+ case PROP_BORDER:
+ g_value_set_float (value, self->mask_border);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_shape_wipe_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstShapeWipe *self = GST_SHAPE_WIPE (object);
+
+ switch (prop_id) {
+ case PROP_POSITION:{
+ gfloat f = g_value_get_float (value);
+
+ GST_LOG_OBJECT (self, "Setting mask position: %f", f);
+ self->mask_position = f;
+ break;
+ }
+ case PROP_BORDER:{
+ gfloat f = g_value_get_float (value);
+
+ GST_LOG_OBJECT (self, "Setting mask border: %f", f);
+ self->mask_border = f;
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_shape_wipe_finalize (GObject * object)
+{
+ GstShapeWipe *self = GST_SHAPE_WIPE (object);
+
+ gst_shape_wipe_reset (self);
+
+ g_cond_clear (&self->mask_cond);
+ g_mutex_clear (&self->mask_mutex);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_shape_wipe_reset (GstShapeWipe * self)
+{
+ GST_DEBUG_OBJECT (self, "Resetting internal state");
+
+ if (self->mask)
+ gst_buffer_unref (self->mask);
+ self->mask = NULL;
+
+ g_mutex_lock (&self->mask_mutex);
+ g_cond_signal (&self->mask_cond);
+ g_mutex_unlock (&self->mask_mutex);
+
+ gst_video_info_init (&self->vinfo);
+ gst_video_info_init (&self->minfo);
+ self->mask_bpp = 0;
+
+ gst_segment_init (&self->segment, GST_FORMAT_TIME);
+
+ gst_shape_wipe_reset_qos (self);
+ self->frame_duration = 0;
+}
+
+static gboolean
+gst_shape_wipe_video_sink_setcaps (GstShapeWipe * self, GstCaps * caps)
+{
+ gboolean ret = TRUE;
+ GstVideoInfo info;
+
+ GST_DEBUG_OBJECT (self, "Setting caps: %" GST_PTR_FORMAT, caps);
+
+ if (!gst_video_info_from_caps (&info, caps))
+ goto invalid_caps;
+
+ if ((self->vinfo.width != info.width || self->vinfo.height != info.height) &&
+ self->vinfo.width > 0 && self->vinfo.height > 0) {
+ g_mutex_lock (&self->mask_mutex);
+ if (self->mask)
+ gst_buffer_unref (self->mask);
+ self->mask = NULL;
+ g_mutex_unlock (&self->mask_mutex);
+ }
+
+
+ if (info.fps_n != 0)
+ self->frame_duration =
+ gst_util_uint64_scale (GST_SECOND, info.fps_d, info.fps_n);
+ else
+ self->frame_duration = 0;
+
+ self->vinfo = info;
+
+ ret = gst_pad_set_caps (self->srcpad, caps);
+
+ return ret;
+
+ /* ERRORS */
+invalid_caps:
+ {
+ GST_ERROR_OBJECT (self, "Invalid caps");
+ return FALSE;
+ }
+}
+
+static GstCaps *
+gst_shape_wipe_video_sink_getcaps (GstShapeWipe * self, GstPad * pad,
+ GstCaps * filter)
+{
+ GstCaps *templ, *ret, *tmp;
+
+ ret = gst_pad_get_current_caps (pad);
+ if (ret != NULL)
+ return ret;
+
+ templ = gst_pad_get_pad_template_caps (pad);
+ tmp = gst_pad_peer_query_caps (self->srcpad, NULL);
+ if (tmp) {
+ ret = gst_caps_intersect (tmp, templ);
+ gst_caps_unref (templ);
+ gst_caps_unref (tmp);
+ } else {
+ ret = templ;
+ }
+
+ GST_LOG_OBJECT (pad, "srcpad accepted caps: %" GST_PTR_FORMAT, ret);
+
+ if (gst_caps_is_empty (ret))
+ goto done;
+
+ tmp = gst_pad_peer_query_caps (pad, NULL);
+
+ GST_LOG_OBJECT (pad, "peerpad accepted caps: %" GST_PTR_FORMAT, tmp);
+ if (tmp) {
+ GstCaps *intersection;
+
+ intersection = gst_caps_intersect (tmp, ret);
+ gst_caps_unref (tmp);
+ gst_caps_unref (ret);
+ ret = intersection;
+ }
+
+ GST_LOG_OBJECT (pad, "intersection: %" GST_PTR_FORMAT, tmp);
+
+ if (gst_caps_is_empty (ret))
+ goto done;
+
+ if (self->vinfo.height && self->vinfo.width) {
+ guint i, n;
+
+ ret = gst_caps_make_writable (ret);
+ n = gst_caps_get_size (ret);
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (ret, i);
+
+ gst_structure_set (s, "width", G_TYPE_INT, self->vinfo.width, "height",
+ G_TYPE_INT, self->vinfo.height, NULL);
+ }
+ }
+
+ tmp = gst_pad_peer_query_caps (self->mask_sinkpad, NULL);
+
+ GST_LOG_OBJECT (pad, "mask accepted caps: %" GST_PTR_FORMAT, tmp);
+ if (tmp) {
+ GstCaps *intersection, *tmp2;
+ guint i, n;
+
+ tmp2 = gst_pad_get_pad_template_caps (self->mask_sinkpad);
+ intersection = gst_caps_intersect (tmp, tmp2);
+ gst_caps_unref (tmp);
+ gst_caps_unref (tmp2);
+ tmp = intersection;
+
+ tmp = gst_caps_make_writable (tmp);
+ n = gst_caps_get_size (tmp);
+
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (tmp, i);
+
+ gst_structure_remove_fields (s, "format", "framerate", NULL);
+ gst_structure_set_name (s, "video/x-raw");
+ }
+
+ intersection = gst_caps_intersect (tmp, ret);
+ gst_caps_unref (tmp);
+ gst_caps_unref (ret);
+ ret = intersection;
+ }
+done:
+ GST_LOG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret);
+
+ return ret;
+}
+
+static gboolean
+gst_shape_wipe_mask_sink_setcaps (GstShapeWipe * self, GstCaps * caps)
+{
+ gboolean ret = TRUE;
+ gint width, height, bpp;
+ GstVideoInfo info;
+
+ GST_DEBUG_OBJECT (self, "Setting caps: %" GST_PTR_FORMAT, caps);
+
+ if (!gst_video_info_from_caps (&info, caps)) {
+ ret = FALSE;
+ goto done;
+ }
+
+ width = GST_VIDEO_INFO_WIDTH (&info);
+ height = GST_VIDEO_INFO_HEIGHT (&info);
+ bpp = GST_VIDEO_INFO_COMP_DEPTH (&info, 0);
+
+ if ((self->vinfo.width != width || self->vinfo.height != height) &&
+ self->vinfo.width > 0 && self->vinfo.height > 0) {
+ GST_ERROR_OBJECT (self, "Mask caps must have the same width/height "
+ "as the video caps");
+ ret = FALSE;
+ goto done;
+ }
+
+ self->mask_bpp = bpp;
+ self->minfo = info;
+
+done:
+ return ret;
+}
+
+static GstCaps *
+gst_shape_wipe_mask_sink_getcaps (GstShapeWipe * self, GstPad * pad,
+ GstCaps * filter)
+{
+ GstCaps *ret, *tmp, *tcaps;
+ guint i, n;
+
+ ret = gst_pad_get_current_caps (pad);
+ if (ret != NULL)
+ return ret;
+
+ tcaps = gst_pad_get_pad_template_caps (self->video_sinkpad);
+ tmp = gst_pad_peer_query_caps (self->video_sinkpad, NULL);
+ if (tmp) {
+ ret = gst_caps_intersect (tmp, tcaps);
+ gst_caps_unref (tcaps);
+ gst_caps_unref (tmp);
+ } else {
+ ret = tcaps;
+ }
+
+ GST_LOG_OBJECT (pad, "video sink accepted caps: %" GST_PTR_FORMAT, ret);
+
+ if (gst_caps_is_empty (ret))
+ goto done;
+
+ tmp = gst_pad_peer_query_caps (self->srcpad, NULL);
+ GST_LOG_OBJECT (pad, "srcpad accepted caps: %" GST_PTR_FORMAT, ret);
+
+ if (tmp) {
+ GstCaps *intersection;
+
+ intersection = gst_caps_intersect (ret, tmp);
+ gst_caps_unref (ret);
+ gst_caps_unref (tmp);
+ ret = intersection;
+ }
+
+ GST_LOG_OBJECT (pad, "intersection: %" GST_PTR_FORMAT, ret);
+
+ if (gst_caps_is_empty (ret))
+ goto done;
+
+ ret = gst_caps_make_writable (ret);
+ n = gst_caps_get_size (ret);
+ tmp = gst_caps_new_empty ();
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (ret, i);
+ GstStructure *t;
+
+ gst_structure_set_name (s, "video/x-raw");
+ gst_structure_remove_fields (s, "format", "framerate", NULL);
+
+ if (self->vinfo.width && self->vinfo.height)
+ gst_structure_set (s, "width", G_TYPE_INT, self->vinfo.width, "height",
+ G_TYPE_INT, self->vinfo.height, NULL);
+
+ gst_structure_set (s, "framerate", GST_TYPE_FRACTION, 0, 1, NULL);
+
+ t = gst_structure_copy (s);
+
+ gst_structure_set (s, "format", G_TYPE_STRING, GST_VIDEO_NE (GRAY16), NULL);
+ gst_structure_set (t, "format", G_TYPE_STRING, "GRAY8", NULL);
+
+ gst_caps_append_structure (tmp, t);
+ }
+ gst_caps_append (ret, tmp);
+
+ tmp = gst_pad_peer_query_caps (pad, NULL);
+ GST_LOG_OBJECT (pad, "peer accepted caps: %" GST_PTR_FORMAT, tmp);
+
+ if (tmp) {
+ GstCaps *intersection;
+
+ intersection = gst_caps_intersect (tmp, ret);
+ gst_caps_unref (tmp);
+ gst_caps_unref (ret);
+ ret = intersection;
+ }
+
+done:
+ GST_LOG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret);
+
+ return ret;
+}
+
+static GstCaps *
+gst_shape_wipe_src_getcaps (GstPad * pad, GstCaps * filter)
+{
+ GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad));
+ GstCaps *templ, *ret, *tmp;
+
+ ret = gst_pad_get_current_caps (pad);
+ if (ret != NULL)
+ return ret;
+
+ ret = gst_pad_get_current_caps (self->video_sinkpad);
+ if (ret != NULL)
+ return ret;
+
+ templ = gst_pad_get_pad_template_caps (self->video_sinkpad);
+ tmp = gst_pad_peer_query_caps (self->video_sinkpad, NULL);
+ if (tmp) {
+ ret = gst_caps_intersect (tmp, templ);
+ gst_caps_unref (templ);
+ gst_caps_unref (tmp);
+ } else {
+ ret = templ;
+ }
+
+ GST_LOG_OBJECT (pad, "video sink accepted caps: %" GST_PTR_FORMAT, ret);
+
+ if (gst_caps_is_empty (ret))
+ goto done;
+
+ tmp = gst_pad_peer_query_caps (pad, NULL);
+ GST_LOG_OBJECT (pad, "peer accepted caps: %" GST_PTR_FORMAT, ret);
+ if (tmp) {
+ GstCaps *intersection;
+
+ intersection = gst_caps_intersect (tmp, ret);
+ gst_caps_unref (tmp);
+ gst_caps_unref (ret);
+ ret = intersection;
+ }
+
+ GST_LOG_OBJECT (pad, "intersection: %" GST_PTR_FORMAT, ret);
+
+ if (gst_caps_is_empty (ret))
+ goto done;
+
+ if (self->vinfo.height && self->vinfo.width) {
+ guint i, n;
+
+ ret = gst_caps_make_writable (ret);
+ n = gst_caps_get_size (ret);
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (ret, i);
+
+ gst_structure_set (s, "width", G_TYPE_INT, self->vinfo.width, "height",
+ G_TYPE_INT, self->vinfo.height, NULL);
+ }
+ }
+
+ tmp = gst_pad_peer_query_caps (self->mask_sinkpad, NULL);
+ GST_LOG_OBJECT (pad, "mask sink accepted caps: %" GST_PTR_FORMAT, ret);
+ if (tmp) {
+ GstCaps *intersection, *tmp2;
+ guint i, n;
+
+ tmp2 = gst_pad_get_pad_template_caps (self->mask_sinkpad);
+ intersection = gst_caps_intersect (tmp, tmp2);
+ gst_caps_unref (tmp);
+ gst_caps_unref (tmp2);
+
+ tmp = gst_caps_make_writable (intersection);
+ n = gst_caps_get_size (tmp);
+
+ for (i = 0; i < n; i++) {
+ GstStructure *s = gst_caps_get_structure (tmp, i);
+
+ gst_structure_remove_fields (s, "format", "framerate", NULL);
+ gst_structure_set_name (s, "video/x-raw");
+ }
+
+ intersection = gst_caps_intersect (tmp, ret);
+ gst_caps_unref (tmp);
+ gst_caps_unref (ret);
+ ret = intersection;
+ }
+
+done:
+
+ gst_object_unref (self);
+
+ GST_LOG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, ret);
+
+ return ret;
+}
+
+static gboolean
+gst_shape_wipe_video_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ GstShapeWipe *self = (GstShapeWipe *) parent;
+ gboolean ret;
+
+ GST_LOG_OBJECT (pad, "Handling query of type '%s'",
+ gst_query_type_get_name (GST_QUERY_TYPE (query)));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_shape_wipe_video_sink_getcaps (self, pad, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ ret = TRUE;
+ break;
+ }
+ default:
+ ret = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_shape_wipe_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ GstShapeWipe *self = GST_SHAPE_WIPE (parent);
+ gboolean ret;
+
+ GST_LOG_OBJECT (pad, "Handling query of type '%s'",
+ gst_query_type_get_name (GST_QUERY_TYPE (query)));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_shape_wipe_src_getcaps (pad, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ ret = TRUE;
+ break;
+ }
+ default:
+ ret = gst_pad_peer_query (self->video_sinkpad, query);
+ break;
+ }
+
+ return ret;
+}
+
+static void
+gst_shape_wipe_update_qos (GstShapeWipe * self, gdouble proportion,
+ GstClockTimeDiff diff, GstClockTime timestamp)
+{
+ GST_OBJECT_LOCK (self);
+ self->proportion = proportion;
+ if (G_LIKELY (timestamp != GST_CLOCK_TIME_NONE)) {
+ if (G_UNLIKELY (diff > 0))
+ self->earliest_time = timestamp + 2 * diff + self->frame_duration;
+ else
+ self->earliest_time = timestamp + diff;
+ } else {
+ self->earliest_time = GST_CLOCK_TIME_NONE;
+ }
+ GST_OBJECT_UNLOCK (self);
+}
+
+static void
+gst_shape_wipe_reset_qos (GstShapeWipe * self)
+{
+ gst_shape_wipe_update_qos (self, 0.5, 0, GST_CLOCK_TIME_NONE);
+}
+
+static void
+gst_shape_wipe_read_qos (GstShapeWipe * self, gdouble * proportion,
+ GstClockTime * time)
+{
+ GST_OBJECT_LOCK (self);
+ *proportion = self->proportion;
+ *time = self->earliest_time;
+ GST_OBJECT_UNLOCK (self);
+}
+
+/* Perform qos calculations before processing the next frame. Returns TRUE if
+ * the frame should be processed, FALSE if the frame can be dropped entirely */
+static gboolean
+gst_shape_wipe_do_qos (GstShapeWipe * self, GstClockTime timestamp)
+{
+ GstClockTime qostime, earliest_time;
+ gdouble proportion;
+
+ /* no timestamp, can't do QoS => process frame */
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (timestamp))) {
+ GST_LOG_OBJECT (self, "invalid timestamp, can't do QoS, process frame");
+ return TRUE;
+ }
+
+ /* get latest QoS observation values */
+ gst_shape_wipe_read_qos (self, &proportion, &earliest_time);
+
+ /* skip qos if we have no observation (yet) => process frame */
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (earliest_time))) {
+ GST_LOG_OBJECT (self, "no observation yet, process frame");
+ return TRUE;
+ }
+
+ /* qos is done on running time */
+ qostime = gst_segment_to_running_time (&self->segment, GST_FORMAT_TIME,
+ timestamp);
+
+ /* see how our next timestamp relates to the latest qos timestamp */
+ GST_LOG_OBJECT (self, "qostime %" GST_TIME_FORMAT ", earliest %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (qostime), GST_TIME_ARGS (earliest_time));
+
+ if (qostime != GST_CLOCK_TIME_NONE && qostime <= earliest_time) {
+ GST_DEBUG_OBJECT (self, "we are late, drop frame");
+ return FALSE;
+ }
+
+ GST_LOG_OBJECT (self, "process frame");
+ return TRUE;
+}
+
+#define CREATE_ARGB_FUNCTIONS(depth, name, shift, a, r, g, b) \
+static void \
+gst_shape_wipe_blend_##name##_##depth (GstShapeWipe * self, GstVideoFrame * inframe, \
+ GstVideoFrame * maskframe, GstVideoFrame * outframe) \
+{ \
+ const guint##depth *mask = (const guint##depth *) GST_VIDEO_FRAME_PLANE_DATA (maskframe, 0); \
+ const guint8 *input = (const guint8 *) GST_VIDEO_FRAME_PLANE_DATA (inframe, 0); \
+ guint8 *output = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (outframe, 0); \
+ guint i, j; \
+ gint width = GST_VIDEO_FRAME_WIDTH (inframe); \
+ gint height = GST_VIDEO_FRAME_HEIGHT (inframe); \
+ guint mask_increment = ((depth == 16) ? GST_ROUND_UP_2 (width) : \
+ GST_ROUND_UP_4 (width)) - width; \
+ gfloat position = self->mask_position; \
+ gfloat low = position - (self->mask_border / 2.0f); \
+ gfloat high = position + (self->mask_border / 2.0f); \
+ guint32 low_i, high_i, round_i; \
+ \
+ if (low < 0.0f) { \
+ high = 0.0f; \
+ low = 0.0f; \
+ } \
+ \
+ if (high > 1.0f) { \
+ low = 1.0f; \
+ high = 1.0f; \
+ } \
+ \
+ low_i = low * 65536; \
+ high_i = high * 65536; \
+ round_i = (high_i - low_i) >> 1; \
+ \
+ for (i = 0; i < height; i++) { \
+ for (j = 0; j < width; j++) { \
+ guint32 in = *mask << shift; \
+ \
+ if (in < low_i) { \
+ output[a] = 0x00; /* A */ \
+ output[r] = input[r]; /* R */ \
+ output[g] = input[g]; /* G */ \
+ output[b] = input[b]; /* B */ \
+ } else if (in >= high_i) { \
+ output[a] = input[a]; /* A */ \
+ output[r] = input[r]; /* R */ \
+ output[g] = input[g]; /* G */ \
+ output[b] = input[b]; /* B */ \
+ } else { \
+ guint32 val; \
+ /* Note: This will never overflow or be larger than 255! */ \
+ val = (((in - low_i) << 16) + round_i) / (high_i - low_i); \
+ val = (val * input[a] + 32768) >> 16; \
+ \
+ output[a] = val; /* A */ \
+ output[r] = input[r]; /* R */ \
+ output[g] = input[g]; /* G */ \
+ output[b] = input[b]; /* B */ \
+ } \
+ \
+ mask++; \
+ input += 4; \
+ output += 4; \
+ } \
+ mask += mask_increment; \
+ } \
+}
+
+CREATE_ARGB_FUNCTIONS (16, argb, 0, 0, 1, 2, 3);
+CREATE_ARGB_FUNCTIONS (8, argb, 8, 0, 1, 2, 3);
+
+CREATE_ARGB_FUNCTIONS (16, bgra, 0, 3, 2, 1, 0);
+CREATE_ARGB_FUNCTIONS (8, bgra, 8, 3, 2, 1, 0);
+
+static GstFlowReturn
+gst_shape_wipe_video_sink_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
+{
+ GstShapeWipe *self = GST_SHAPE_WIPE (parent);
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBuffer *mask = NULL, *outbuf = NULL;
+ GstClockTime timestamp;
+ GstVideoFrame inframe, outframe, maskframe;
+
+ if (G_UNLIKELY (GST_VIDEO_INFO_FORMAT (&self->vinfo) ==
+ GST_VIDEO_FORMAT_UNKNOWN))
+ goto not_negotiated;
+
+ timestamp = GST_BUFFER_TIMESTAMP (buffer);
+ timestamp =
+ gst_segment_to_stream_time (&self->segment, GST_FORMAT_TIME, timestamp);
+
+ if (GST_CLOCK_TIME_IS_VALID (timestamp))
+ gst_object_sync_values (GST_OBJECT (self), timestamp);
+
+ GST_LOG_OBJECT (self,
+ "Blending buffer with timestamp %" GST_TIME_FORMAT " at position %f",
+ GST_TIME_ARGS (timestamp), self->mask_position);
+
+ g_mutex_lock (&self->mask_mutex);
+ if (self->shutdown)
+ goto shutdown;
+
+ if (!self->mask)
+ g_cond_wait (&self->mask_cond, &self->mask_mutex);
+
+ if (self->mask == NULL || self->shutdown) {
+ goto shutdown;
+ } else {
+ mask = gst_buffer_ref (self->mask);
+ }
+ g_mutex_unlock (&self->mask_mutex);
+
+ if (!gst_shape_wipe_do_qos (self, GST_BUFFER_TIMESTAMP (buffer)))
+ goto qos;
+
+ /* Will blend inplace if buffer is writable */
+ outbuf = gst_buffer_make_writable (buffer);
+ gst_video_frame_map (&outframe, &self->vinfo, outbuf, GST_MAP_READWRITE);
+ gst_video_frame_map (&inframe, &self->vinfo, outbuf, GST_MAP_READ);
+
+ gst_video_frame_map (&maskframe, &self->minfo, mask, GST_MAP_READ);
+
+ switch (GST_VIDEO_INFO_FORMAT (&self->vinfo)) {
+ case GST_VIDEO_FORMAT_AYUV:
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_ABGR:
+ if (self->mask_bpp == 16)
+ gst_shape_wipe_blend_argb_16 (self, &inframe, &maskframe, &outframe);
+ else
+ gst_shape_wipe_blend_argb_8 (self, &inframe, &maskframe, &outframe);
+ break;
+ case GST_VIDEO_FORMAT_BGRA:
+ case GST_VIDEO_FORMAT_RGBA:
+ if (self->mask_bpp == 16)
+ gst_shape_wipe_blend_bgra_16 (self, &inframe, &maskframe, &outframe);
+ else
+ gst_shape_wipe_blend_bgra_8 (self, &inframe, &maskframe, &outframe);
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+
+ gst_video_frame_unmap (&outframe);
+ gst_video_frame_unmap (&inframe);
+
+ gst_video_frame_unmap (&maskframe);
+
+ gst_buffer_unref (mask);
+
+ ret = gst_pad_push (self->srcpad, outbuf);
+ if (G_UNLIKELY (ret != GST_FLOW_OK))
+ goto push_failed;
+
+ return ret;
+
+ /* Errors */
+not_negotiated:
+ {
+ GST_ERROR_OBJECT (self, "No valid caps yet");
+ gst_buffer_unref (buffer);
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+shutdown:
+ {
+ GST_DEBUG_OBJECT (self, "Shutting down");
+ gst_buffer_unref (buffer);
+ return GST_FLOW_FLUSHING;
+ }
+qos:
+ {
+ GST_DEBUG_OBJECT (self, "Dropping buffer because of QoS");
+ gst_buffer_unref (buffer);
+ gst_buffer_unref (mask);
+ return GST_FLOW_OK;
+ }
+push_failed:
+ {
+ if (ret != GST_FLOW_FLUSHING)
+ GST_ERROR_OBJECT (self, "Pushing buffer downstream failed: %s",
+ gst_flow_get_name (ret));
+ return ret;
+ }
+}
+
+static GstFlowReturn
+gst_shape_wipe_mask_sink_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
+{
+ GstShapeWipe *self = GST_SHAPE_WIPE (parent);
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ g_mutex_lock (&self->mask_mutex);
+ GST_DEBUG_OBJECT (self, "Setting new mask buffer: %" GST_PTR_FORMAT, buffer);
+
+ gst_buffer_replace (&self->mask, buffer);
+ g_cond_signal (&self->mask_cond);
+ g_mutex_unlock (&self->mask_mutex);
+
+ gst_buffer_unref (buffer);
+
+ return ret;
+}
+
+static GstStateChangeReturn
+gst_shape_wipe_change_state (GstElement * element, GstStateChange transition)
+{
+ GstShapeWipe *self = GST_SHAPE_WIPE (element);
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ self->shutdown = FALSE;
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ /* Unblock video sink chain function */
+ g_mutex_lock (&self->mask_mutex);
+ self->shutdown = TRUE;
+ g_cond_signal (&self->mask_cond);
+ g_mutex_unlock (&self->mask_mutex);
+ break;
+ default:
+ break;
+ }
+
+ if (GST_ELEMENT_CLASS (parent_class)->change_state)
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_shape_wipe_reset (self);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_shape_wipe_video_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstShapeWipe *self = GST_SHAPE_WIPE (parent);
+ gboolean ret;
+
+ GST_LOG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ ret = gst_shape_wipe_video_sink_setcaps (self, caps);
+ gst_event_unref (event);
+ break;
+ }
+ case GST_EVENT_SEGMENT:
+ {
+ GstSegment seg;
+
+ gst_event_copy_segment (event, &seg);
+ if (seg.format == GST_FORMAT_TIME) {
+ GST_DEBUG_OBJECT (pad,
+ "Got SEGMENT event in GST_FORMAT_TIME %" GST_PTR_FORMAT, &seg);
+ self->segment = seg;
+ } else {
+ gst_segment_init (&self->segment, GST_FORMAT_TIME);
+ }
+ }
+ /* fall through */
+ case GST_EVENT_FLUSH_STOP:
+ gst_shape_wipe_reset_qos (self);
+ /* fall through */
+ default:
+ ret = gst_pad_push_event (self->srcpad, event);
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_shape_wipe_mask_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event)
+{
+ GstShapeWipe *self = GST_SHAPE_WIPE (parent);
+
+ GST_LOG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ gst_shape_wipe_mask_sink_setcaps (self, caps);
+ break;
+ }
+ case GST_EVENT_FLUSH_STOP:
+ g_mutex_lock (&self->mask_mutex);
+ gst_buffer_replace (&self->mask, NULL);
+ g_mutex_unlock (&self->mask_mutex);
+ break;
+ default:
+ break;
+ }
+
+ /* Dropping all events here */
+ gst_event_unref (event);
+
+ return TRUE;
+}
+
+static gboolean
+gst_shape_wipe_mask_sink_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ GstShapeWipe *self = GST_SHAPE_WIPE (parent);
+ gboolean ret;
+
+ GST_LOG_OBJECT (pad, "Handling query of type '%s'",
+ gst_query_type_get_name (GST_QUERY_TYPE (query)));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_shape_wipe_mask_sink_getcaps (self, pad, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ ret = TRUE;
+ break;
+ }
+ default:
+ ret = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+
+ return ret;
+}
+
+
+static gboolean
+gst_shape_wipe_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstShapeWipe *self = GST_SHAPE_WIPE (parent);
+ gboolean ret;
+
+ GST_LOG_OBJECT (pad, "Got %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_QOS:{
+ GstQOSType type;
+ GstClockTimeDiff diff;
+ GstClockTime timestamp;
+ gdouble proportion;
+
+ gst_event_parse_qos (event, &type, &proportion, &diff, &timestamp);
+
+ gst_shape_wipe_update_qos (self, proportion, diff, timestamp);
+ }
+ /* fall through */
+ default:
+ ret = gst_pad_push_event (self->video_sinkpad, event);
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ return GST_ELEMENT_REGISTER (shapewipe, plugin);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ shapewipe,
+ "Shape Wipe transition filter",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/shapewipe/gstshapewipe.h b/gst/shapewipe/gstshapewipe.h
new file mode 100644
index 0000000000..374e6c98a8
--- /dev/null
+++ b/gst/shapewipe/gstshapewipe.h
@@ -0,0 +1,83 @@
+/* GStreamer
+ * Copyright (C) 2009,2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_SHAPE_WIPE_H__
+#define __GST_SHAPE_WIPE_H__
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_SHAPE_WIPE \
+ (gst_shape_wipe_get_type())
+#define GST_SHAPE_WIPE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_SHAPE_WIPE,GstShapeWipe))
+#define GST_SHAPE_WIPE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_SHAPE_WIPE,GstShapeWipeClass))
+#define GST_SHAPE_WIPE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj),GST_TYPE_SHAPE_WIPE,GstShapeWipeClass))
+#define GST_IS_SHAPE_WIPE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_SHAPE_WIPE))
+#define GST_IS_SHAPE_WIPE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_SHAPE_WIPE))
+
+typedef struct _GstShapeWipe GstShapeWipe;
+typedef struct _GstShapeWipeClass GstShapeWipeClass;
+
+struct _GstShapeWipe
+{
+ GstElement parent;
+
+ /* private */
+ GstPad *video_sinkpad;
+ GstPad *mask_sinkpad;
+
+ GstPad *srcpad;
+
+ GstSegment segment;
+
+ GstBuffer *mask;
+ gfloat mask_position;
+ gfloat mask_border;
+ GMutex mask_mutex;
+ GCond mask_cond;
+ gint mask_bpp;
+
+ GstVideoInfo vinfo;
+ GstVideoInfo minfo;
+
+ gboolean shutdown;
+
+ gdouble proportion;
+ GstClockTime earliest_time;
+ GstClockTime frame_duration;
+};
+
+struct _GstShapeWipeClass
+{
+ GstElementClass parent_class;
+};
+
+GType gst_shape_wipe_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (shapewipe);
+
+G_END_DECLS
+
+#endif /* __GST_SHAPE_WIPE_H__ */
diff --git a/gst/shapewipe/meson.build b/gst/shapewipe/meson.build
new file mode 100644
index 0000000000..2dff8df4f2
--- /dev/null
+++ b/gst/shapewipe/meson.build
@@ -0,0 +1,10 @@
+gstshapewipe = library('gstshapewipe',
+ 'gstshapewipe.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gio_dep, gst_dep, gstvideo_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstshapewipe, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstshapewipe]
diff --git a/gst/smpte/barboxwipes.c b/gst/smpte/barboxwipes.c
new file mode 100644
index 0000000000..4f4e62e42a
--- /dev/null
+++ b/gst/smpte/barboxwipes.c
@@ -0,0 +1,963 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "paint.h"
+#include "gstmask.h"
+
+enum
+{
+ BOX_VERTICAL = 1,
+ BOX_HORIZONTAL = 2,
+ BOX_CLOCK = 3,
+ TRIGANLE_LINEAR = 4
+};
+
+static const gint boxes_1b[][7] = {
+#define WIPE_B1_1 0
+ {BOX_VERTICAL, 0, 0, 0, 1, 1, 1},
+#define WIPE_B1_2 1
+ {BOX_HORIZONTAL, 0, 0, 0, 1, 1, 1}
+};
+
+static const gint boxes_2b[][7 * 2] = {
+#define WIPE_B2_21 0
+ {BOX_VERTICAL, 0, 0, 1, 1, 2, 0,
+ BOX_VERTICAL, 1, 0, 0, 2, 2, 1},
+#define WIPE_B2_22 1
+ {BOX_HORIZONTAL, 0, 0, 1, 2, 1, 0,
+ BOX_HORIZONTAL, 0, 1, 0, 2, 2, 1},
+};
+
+static const gint box_clock_1b[][1 * 10] = {
+#define WIPE_B1_241 0
+ {BOX_CLOCK, 0, 0, 0, 1, 0, 0, 0, 1, 1},
+#define WIPE_B1_242 1
+ {BOX_CLOCK, 0, 1, 0, 1, 1, 0, 0, 0, 1},
+#define WIPE_B1_243 2
+ {BOX_CLOCK, 1, 1, 0, 0, 1, 0, 1, 0, 1},
+#define WIPE_B1_244 3
+ {BOX_CLOCK, 1, 0, 0, 0, 0, 0, 1, 1, 1},
+};
+
+#define WIPE_B2_221 0
+static const gint box_clock_2b[][2 * 10] = {
+#define WIPE_B2_221 0
+ {BOX_CLOCK, 1, 0, 0, 2, 0, 0, 1, 2, 1,
+ BOX_CLOCK, 1, 0, 0, 1, 2, 1, 0, 0, 2},
+#define WIPE_B2_222 1
+ {BOX_CLOCK, 2, 1, 0, 2, 2, 0, 0, 1, 1,
+ BOX_CLOCK, 2, 1, 0, 0, 1, 1, 2, 0, 2},
+#define WIPE_B2_223 2
+ {BOX_CLOCK, 1, 2, 0, 0, 2, 0, 1, 0, 1,
+ BOX_CLOCK, 1, 2, 0, 1, 0, 1, 2, 2, 2},
+#define WIPE_B2_224 3
+ {BOX_CLOCK, 0, 1, 0, 0, 0, 0, 2, 1, 1,
+ BOX_CLOCK, 0, 1, 0, 2, 1, 1, 0, 2, 2},
+#define WIPE_B2_225 4
+ {BOX_CLOCK, 1, 0, 0, 2, 0, 0, 1, 2, 1,
+ BOX_CLOCK, 1, 2, 0, 0, 2, 0, 1, 0, 1},
+#define WIPE_B2_226 5
+ {BOX_CLOCK, 0, 1, 0, 0, 0, 0, 2, 1, 1,
+ BOX_CLOCK, 2, 1, 0, 2, 2, 0, 0, 1, 1},
+#define WIPE_B2_231 6
+ {BOX_CLOCK, 1, 0, 0, 1, 2, 0, 2, 0, 1,
+ BOX_CLOCK, 1, 0, 0, 1, 2, 0, 0, 0, 1},
+#define WIPE_B2_232 7
+ {BOX_CLOCK, 2, 1, 0, 0, 1, 0, 2, 0, 1,
+ BOX_CLOCK, 2, 1, 0, 0, 1, 0, 2, 2, 1},
+#define WIPE_B2_233 8
+ {BOX_CLOCK, 1, 2, 0, 1, 0, 0, 2, 2, 1,
+ BOX_CLOCK, 1, 2, 0, 1, 0, 0, 0, 2, 1},
+#define WIPE_B2_234 9
+ {BOX_CLOCK, 0, 1, 0, 2, 1, 0, 0, 0, 1,
+ BOX_CLOCK, 0, 1, 0, 2, 1, 0, 0, 2, 1},
+#define WIPE_B2_251 10
+ {BOX_CLOCK, 0, 0, 0, 1, 0, 0, 0, 2, 1,
+ BOX_CLOCK, 2, 0, 0, 1, 0, 0, 2, 2, 1},
+#define WIPE_B2_252 11
+ {BOX_CLOCK, 0, 0, 0, 0, 1, 0, 2, 0, 1,
+ BOX_CLOCK, 0, 2, 0, 0, 1, 0, 2, 2, 1},
+#define WIPE_B2_253 12
+ {BOX_CLOCK, 0, 2, 0, 1, 2, 0, 0, 0, 1,
+ BOX_CLOCK, 2, 2, 0, 1, 2, 0, 2, 0, 1},
+#define WIPE_B2_254 13
+ {BOX_CLOCK, 2, 0, 0, 2, 1, 0, 0, 0, 1,
+ BOX_CLOCK, 2, 2, 0, 2, 1, 0, 0, 2, 1},
+};
+
+static const gint box_clock_4b[][4 * 10] = {
+#define WIPE_B4_201 0
+ {BOX_CLOCK, 1, 1, 0, 1, 0, 0, 2, 1, 1,
+ BOX_CLOCK, 1, 1, 0, 2, 1, 1, 1, 2, 2,
+ BOX_CLOCK, 1, 1, 0, 1, 2, 2, 0, 1, 3,
+ BOX_CLOCK, 1, 1, 0, 0, 1, 3, 1, 0, 4},
+#define WIPE_B4_202 1
+ {BOX_CLOCK, 1, 1, 0, 1, 0, 3, 2, 1, 4,
+ BOX_CLOCK, 1, 1, 0, 2, 1, 0, 1, 2, 1,
+ BOX_CLOCK, 1, 1, 0, 1, 2, 1, 0, 1, 2,
+ BOX_CLOCK, 1, 1, 0, 0, 1, 2, 1, 0, 3},
+#define WIPE_B4_203 2
+ {BOX_CLOCK, 1, 1, 0, 1, 0, 2, 2, 1, 3,
+ BOX_CLOCK, 1, 1, 0, 2, 1, 3, 1, 2, 4,
+ BOX_CLOCK, 1, 1, 0, 1, 2, 0, 0, 1, 1,
+ BOX_CLOCK, 1, 1, 0, 0, 1, 1, 1, 0, 2},
+#define WIPE_B4_204 3
+ {BOX_CLOCK, 1, 1, 0, 1, 0, 1, 2, 1, 2,
+ BOX_CLOCK, 1, 1, 0, 2, 1, 2, 1, 2, 3,
+ BOX_CLOCK, 1, 1, 0, 1, 2, 3, 0, 1, 4,
+ BOX_CLOCK, 1, 1, 0, 0, 1, 0, 1, 0, 1},
+#define WIPE_B4_205 4
+ {BOX_CLOCK, 1, 1, 0, 1, 0, 0, 2, 1, 1,
+ BOX_CLOCK, 1, 1, 0, 2, 1, 1, 1, 2, 2,
+ BOX_CLOCK, 1, 1, 0, 1, 2, 0, 0, 1, 1,
+ BOX_CLOCK, 1, 1, 0, 0, 1, 1, 1, 0, 2},
+#define WIPE_B4_206 5
+ {BOX_CLOCK, 1, 1, 0, 1, 0, 1, 2, 1, 2,
+ BOX_CLOCK, 1, 1, 0, 2, 1, 0, 1, 2, 1,
+ BOX_CLOCK, 1, 1, 0, 1, 2, 1, 0, 1, 2,
+ BOX_CLOCK, 1, 1, 0, 0, 1, 0, 1, 0, 1},
+#define WIPE_B4_207 6
+ {BOX_CLOCK, 1, 1, 0, 1, 0, 0, 2, 1, 1,
+ BOX_CLOCK, 1, 1, 0, 2, 1, 0, 1, 2, 1,
+ BOX_CLOCK, 1, 1, 0, 1, 2, 0, 0, 1, 1,
+ BOX_CLOCK, 1, 1, 0, 0, 1, 0, 1, 0, 1},
+#define WIPE_B4_211 7
+ {BOX_CLOCK, 1, 1, 0, 1, 0, 0, 2, 1, 1,
+ BOX_CLOCK, 1, 1, 0, 2, 1, 1, 1, 2, 2,
+ BOX_CLOCK, 1, 1, 0, 1, 0, 0, 0, 1, 1,
+ BOX_CLOCK, 1, 1, 0, 0, 1, 1, 1, 2, 2},
+#define WIPE_B4_212 8
+ {BOX_CLOCK, 1, 1, 0, 2, 1, 0, 1, 0, 1,
+ BOX_CLOCK, 1, 1, 0, 1, 0, 1, 0, 1, 2,
+ BOX_CLOCK, 1, 1, 0, 2, 1, 0, 1, 2, 1,
+ BOX_CLOCK, 1, 1, 0, 1, 2, 1, 0, 1, 2},
+#define WIPE_B4_213 9
+ {BOX_CLOCK, 1, 1, 0, 1, 0, 0, 2, 1, 1,
+ BOX_CLOCK, 1, 1, 0, 1, 0, 0, 0, 1, 1,
+ BOX_CLOCK, 1, 1, 0, 1, 2, 0, 2, 1, 1,
+ BOX_CLOCK, 1, 1, 0, 1, 2, 0, 0, 1, 1},
+#define WIPE_B4_214 10
+ {BOX_CLOCK, 1, 1, 0, 2, 1, 0, 1, 0, 1,
+ BOX_CLOCK, 1, 1, 0, 2, 1, 0, 1, 2, 1,
+ BOX_CLOCK, 1, 1, 0, 0, 1, 0, 1, 0, 1,
+ BOX_CLOCK, 1, 1, 0, 0, 1, 0, 1, 2, 1},
+#define WIPE_B4_227 11
+ {BOX_CLOCK, 1, 0, 0, 2, 0, 0, 1, 1, 1,
+ BOX_CLOCK, 1, 0, 0, 1, 1, 1, 0, 0, 2,
+ BOX_CLOCK, 1, 2, 0, 2, 2, 0, 1, 1, 1,
+ BOX_CLOCK, 1, 2, 0, 1, 1, 1, 0, 2, 2},
+#define WIPE_B4_228 12
+ {BOX_CLOCK, 0, 1, 0, 0, 0, 0, 1, 1, 1,
+ BOX_CLOCK, 0, 1, 0, 1, 1, 1, 0, 2, 2,
+ BOX_CLOCK, 2, 1, 0, 2, 0, 0, 1, 1, 1,
+ BOX_CLOCK, 2, 1, 0, 1, 1, 1, 2, 2, 2},
+#define WIPE_B4_235 13
+ {BOX_CLOCK, 1, 0, 0, 1, 1, 0, 0, 0, 1,
+ BOX_CLOCK, 1, 0, 0, 1, 1, 0, 2, 0, 1,
+ BOX_CLOCK, 1, 2, 0, 1, 1, 0, 2, 2, 1,
+ BOX_CLOCK, 1, 2, 0, 1, 1, 0, 0, 2, 1},
+#define WIPE_B4_236 14
+ {BOX_CLOCK, 0, 1, 0, 1, 1, 0, 0, 0, 1,
+ BOX_CLOCK, 0, 1, 0, 1, 1, 0, 0, 2, 1,
+ BOX_CLOCK, 2, 1, 0, 1, 1, 0, 2, 0, 1,
+ BOX_CLOCK, 2, 1, 0, 1, 1, 0, 2, 2, 1},
+};
+
+static const gint box_clock_8b[][8 * 10] = {
+#define WIPE_B8_261 0
+ {BOX_CLOCK, 2, 1, 0, 2, 2, 0, 4, 1, 1,
+ BOX_CLOCK, 2, 1, 0, 4, 1, 1, 2, 0, 2,
+ BOX_CLOCK, 2, 1, 0, 2, 0, 2, 0, 1, 3,
+ BOX_CLOCK, 2, 1, 0, 0, 1, 3, 2, 2, 4,
+ BOX_CLOCK, 2, 3, 0, 2, 2, 0, 4, 3, 1,
+ BOX_CLOCK, 2, 3, 0, 4, 3, 1, 2, 4, 2,
+ BOX_CLOCK, 2, 3, 0, 2, 4, 2, 0, 3, 3,
+ BOX_CLOCK, 2, 3, 0, 0, 3, 3, 2, 2, 4},
+#define WIPE_B8_262 1
+ {BOX_CLOCK, 1, 2, 0, 2, 2, 0, 1, 0, 1,
+ BOX_CLOCK, 1, 2, 0, 1, 0, 1, 0, 2, 2,
+ BOX_CLOCK, 1, 2, 0, 0, 2, 2, 1, 4, 3,
+ BOX_CLOCK, 1, 2, 0, 1, 4, 3, 2, 2, 4,
+ BOX_CLOCK, 3, 2, 0, 2, 2, 0, 3, 0, 1,
+ BOX_CLOCK, 3, 2, 0, 3, 0, 1, 4, 2, 2,
+ BOX_CLOCK, 3, 2, 0, 4, 2, 2, 3, 4, 3,
+ BOX_CLOCK, 3, 2, 0, 3, 4, 3, 2, 2, 4},
+#define WIPE_B8_263 2
+ {BOX_CLOCK, 2, 1, 0, 2, 0, 0, 4, 1, 1,
+ BOX_CLOCK, 2, 1, 0, 4, 1, 1, 2, 2, 2,
+ BOX_CLOCK, 2, 1, 0, 2, 0, 0, 0, 1, 1,
+ BOX_CLOCK, 2, 1, 0, 0, 1, 1, 2, 2, 2,
+ BOX_CLOCK, 2, 3, 0, 2, 4, 0, 4, 3, 1,
+ BOX_CLOCK, 2, 3, 0, 4, 3, 1, 2, 2, 2,
+ BOX_CLOCK, 2, 3, 0, 2, 4, 0, 0, 3, 1,
+ BOX_CLOCK, 2, 3, 0, 0, 3, 1, 2, 2, 2},
+#define WIPE_B8_264 3
+ {BOX_CLOCK, 1, 2, 0, 0, 2, 0, 1, 0, 1,
+ BOX_CLOCK, 1, 2, 0, 1, 0, 1, 2, 2, 2,
+ BOX_CLOCK, 1, 2, 0, 0, 2, 0, 1, 4, 1,
+ BOX_CLOCK, 1, 2, 0, 1, 4, 1, 2, 2, 2,
+ BOX_CLOCK, 3, 2, 0, 4, 2, 0, 3, 0, 1,
+ BOX_CLOCK, 3, 2, 0, 3, 0, 1, 2, 2, 2,
+ BOX_CLOCK, 3, 2, 0, 4, 2, 0, 3, 4, 1,
+ BOX_CLOCK, 3, 2, 0, 3, 4, 1, 2, 2, 2},
+};
+
+static const gint triangles_2t[][2 * 9] = {
+ /* 3 -> 6 */
+#define WIPE_T2_3 0
+ {0, 0, 0, 0, 1, 1, 1, 1, 1,
+ 1, 0, 1, 0, 0, 0, 1, 1, 1},
+#define WIPE_T2_4 WIPE_T2_3+1
+ {0, 0, 1, 1, 0, 0, 0, 1, 1,
+ 1, 0, 0, 0, 1, 1, 1, 1, 1},
+#define WIPE_T2_5 WIPE_T2_4+1
+ {0, 0, 1, 0, 1, 1, 1, 1, 0,
+ 1, 0, 1, 0, 0, 1, 1, 1, 0},
+#define WIPE_T2_6 WIPE_T2_5+1
+ {0, 0, 1, 1, 0, 1, 0, 1, 0,
+ 1, 0, 1, 0, 1, 0, 1, 1, 1},
+#define WIPE_T2_41 WIPE_T2_6+1
+ {0, 0, 0, 1, 0, 1, 0, 1, 1,
+ 1, 0, 1, 0, 1, 1, 1, 1, 2},
+#define WIPE_T2_42 WIPE_T2_41+1
+ {0, 0, 1, 1, 0, 0, 1, 1, 1,
+ 0, 0, 1, 0, 1, 2, 1, 1, 1},
+#define WIPE_T2_45 WIPE_T2_42+1
+ {0, 0, 1, 1, 0, 0, 0, 1, 0,
+ 1, 0, 0, 0, 1, 0, 1, 1, 1},
+#define WIPE_T2_46 WIPE_T2_45+1
+ {0, 0, 0, 1, 0, 1, 1, 1, 0,
+ 0, 0, 0, 0, 1, 1, 1, 1, 0},
+#define WIPE_T2_245 WIPE_T2_46+1
+ {0, 0, 0, 2, 0, 0, 2, 2, 1,
+ 2, 2, 0, 0, 2, 0, 0, 0, 1},
+#define WIPE_T2_246 WIPE_T2_245+1
+ {0, 2, 0, 0, 0, 0, 2, 0, 1,
+ 2, 0, 0, 2, 2, 0, 0, 2, 1},
+};
+
+static const gint triangles_3t[][3 * 9] = {
+ /* 23 -> 26 */
+#define WIPE_T3_23 0
+ {0, 0, 1, 1, 0, 0, 0, 2, 1,
+ 1, 0, 0, 0, 2, 1, 2, 2, 1,
+ 1, 0, 0, 2, 0, 1, 2, 2, 1},
+#define WIPE_T3_24 1
+ {0, 0, 1, 2, 0, 1, 2, 1, 0,
+ 0, 0, 1, 2, 1, 0, 0, 2, 1,
+ 2, 1, 0, 0, 2, 1, 2, 2, 1},
+#define WIPE_T3_25 2
+ {0, 0, 1, 0, 2, 1, 1, 2, 0,
+ 0, 0, 1, 2, 0, 1, 1, 2, 0,
+ 2, 0, 1, 1, 2, 0, 2, 2, 1},
+#define WIPE_T3_26 3
+ {0, 0, 1, 2, 0, 1, 0, 1, 0,
+ 2, 0, 1, 0, 1, 0, 2, 2, 1,
+ 0, 1, 0, 0, 2, 1, 2, 2, 1},
+};
+
+static const gint triangles_4t[][4 * 9] = {
+#define WIPE_T4_61 0
+ {0, 0, 1, 1, 0, 0, 1, 2, 1,
+ 0, 0, 1, 0, 2, 2, 1, 2, 1,
+ 1, 0, 0, 2, 0, 1, 1, 2, 1,
+ 2, 0, 1, 1, 2, 1, 2, 2, 2},
+#define WIPE_T4_62 1
+ {0, 0, 2, 2, 0, 1, 0, 1, 1,
+ 2, 0, 1, 0, 1, 1, 2, 1, 0,
+ 0, 1, 1, 2, 1, 0, 2, 2, 1,
+ 0, 1, 1, 0, 2, 2, 2, 2, 1},
+#define WIPE_T4_63 2
+ {0, 0, 2, 1, 0, 1, 0, 2, 1,
+ 1, 0, 1, 0, 2, 1, 1, 2, 0,
+ 1, 0, 1, 1, 2, 0, 2, 2, 1,
+ 1, 0, 1, 2, 0, 2, 2, 2, 1},
+#define WIPE_T4_64 3
+ {0, 0, 1, 2, 0, 2, 2, 1, 1,
+ 0, 0, 1, 0, 1, 0, 2, 1, 1,
+ 0, 1, 0, 2, 1, 1, 0, 2, 1,
+ 2, 1, 1, 0, 2, 1, 2, 2, 2},
+#define WIPE_T4_65 4
+ {0, 0, 0, 1, 0, 1, 1, 2, 0,
+ 0, 0, 0, 0, 2, 1, 1, 2, 0,
+ 1, 0, 1, 2, 0, 0, 1, 2, 0,
+ 2, 0, 0, 1, 2, 0, 2, 2, 1},
+#define WIPE_T4_66 5
+ {0, 0, 1, 2, 0, 0, 0, 1, 0,
+ 2, 0, 0, 0, 1, 0, 2, 1, 1,
+ 0, 1, 0, 2, 1, 1, 2, 2, 0,
+ 0, 1, 0, 0, 2, 1, 2, 2, 0},
+#define WIPE_T4_67 6
+ {0, 0, 1, 1, 0, 0, 0, 2, 0,
+ 1, 0, 0, 0, 2, 0, 1, 2, 1,
+ 1, 0, 0, 1, 2, 1, 2, 2, 0,
+ 1, 0, 0, 2, 0, 1, 2, 2, 0},
+#define WIPE_T4_68 7
+ {0, 0, 0, 2, 0, 1, 2, 1, 0,
+ 0, 0, 0, 0, 1, 1, 2, 1, 0,
+ 0, 1, 1, 2, 1, 0, 0, 2, 0,
+ 2, 1, 0, 0, 2, 0, 2, 2, 1},
+#define WIPE_T4_101 8
+ {0, 0, 1, 2, 0, 1, 1, 1, 0,
+ 0, 0, 1, 1, 1, 0, 0, 2, 1,
+ 1, 1, 0, 0, 2, 1, 2, 2, 1,
+ 2, 0, 1, 1, 1, 0, 2, 2, 1},
+};
+
+static const gint triangles_8t[][8 * 9] = {
+ /* 7 */
+#define WIPE_T8_7 0
+ {0, 0, 0, 1, 0, 1, 1, 1, 1,
+ 1, 0, 1, 2, 0, 0, 1, 1, 1,
+ 2, 0, 0, 1, 1, 1, 2, 1, 1,
+ 1, 1, 1, 2, 1, 1, 2, 2, 0,
+ 1, 1, 1, 1, 2, 1, 2, 2, 0,
+ 1, 1, 1, 0, 2, 0, 1, 2, 1,
+ 0, 1, 1, 1, 1, 1, 0, 2, 0,
+ 0, 0, 0, 0, 1, 1, 1, 1, 1},
+#define WIPE_T8_43 1
+ {0, 0, 1, 1, 0, 0, 1, 1, 1,
+ 1, 0, 0, 2, 0, 1, 1, 1, 1,
+ 2, 0, 1, 1, 1, 1, 2, 1, 2,
+ 1, 1, 1, 2, 1, 2, 2, 2, 1,
+ 1, 1, 1, 1, 2, 0, 2, 2, 1,
+ 1, 1, 1, 0, 2, 1, 1, 2, 0,
+ 0, 1, 2, 1, 1, 1, 0, 2, 1,
+ 0, 0, 1, 0, 1, 2, 1, 1, 1},
+#define WIPE_T8_44 2
+ {0, 0, 1, 1, 0, 2, 1, 1, 1,
+ 1, 0, 2, 2, 0, 1, 1, 1, 1,
+ 2, 0, 1, 1, 1, 1, 2, 1, 0,
+ 1, 1, 1, 2, 1, 0, 2, 2, 1,
+ 1, 1, 1, 1, 2, 2, 2, 2, 1,
+ 1, 1, 1, 0, 2, 1, 1, 2, 2,
+ 0, 1, 0, 1, 1, 1, 0, 2, 1,
+ 0, 0, 1, 0, 1, 0, 1, 1, 1},
+#define WIPE_T8_47 3
+ {0, 0, 0, 1, 0, 1, 1, 1, 0,
+ 1, 0, 1, 2, 0, 0, 1, 1, 0,
+ 2, 0, 0, 1, 1, 0, 2, 1, 1,
+ 1, 1, 0, 2, 1, 1, 2, 2, 0,
+ 1, 1, 0, 1, 2, 1, 2, 2, 0,
+ 1, 1, 0, 0, 2, 0, 1, 2, 1,
+ 0, 1, 1, 1, 1, 0, 0, 2, 0,
+ 0, 0, 0, 0, 1, 1, 1, 1, 0},
+#define WIPE_T8_48 4
+ {0, 0, 1, 1, 0, 0, 0, 1, 0,
+ 1, 0, 0, 0, 1, 0, 1, 1, 1,
+ 1, 0, 0, 2, 0, 1, 2, 1, 0,
+ 1, 0, 0, 1, 1, 1, 2, 1, 0,
+ 0, 1, 0, 1, 1, 1, 1, 2, 0,
+ 0, 1, 0, 0, 2, 1, 1, 2, 0,
+ 1, 1, 1, 2, 1, 0, 1, 2, 0,
+ 2, 1, 0, 1, 2, 0, 2, 2, 1},
+};
+
+static const gint triangles_16t[][16 * 9] = {
+ /* 8 */
+#define WIPE_T16_8 0
+ {0, 0, 1, 2, 0, 1, 1, 1, 0,
+ 2, 0, 1, 1, 1, 0, 2, 2, 1,
+ 1, 1, 0, 0, 2, 1, 2, 2, 1,
+ 0, 0, 1, 1, 1, 0, 0, 2, 1,
+ 2, 0, 1, 4, 0, 1, 3, 1, 0,
+ 4, 0, 1, 3, 1, 0, 4, 2, 1,
+ 3, 1, 0, 2, 2, 1, 4, 2, 1,
+ 2, 0, 1, 3, 1, 0, 2, 2, 1,
+ 0, 2, 1, 2, 2, 1, 1, 3, 0,
+ 2, 2, 1, 1, 3, 0, 2, 4, 1,
+ 1, 3, 0, 0, 4, 1, 2, 4, 1,
+ 0, 2, 1, 1, 3, 0, 0, 4, 1,
+ 2, 2, 1, 4, 2, 1, 3, 3, 0,
+ 4, 2, 1, 3, 3, 0, 4, 4, 1,
+ 3, 3, 0, 2, 4, 1, 4, 4, 1,
+ 2, 2, 1, 3, 3, 0, 2, 4, 1}
+};
+
+typedef struct _GstWipeConfig GstWipeConfig;
+
+struct _GstWipeConfig
+{
+ const gint *objects;
+ gint nobjects;
+ gint xscale;
+ gint yscale;
+ gint cscale;
+};
+
+static const GstWipeConfig wipe_config[] = {
+#define WIPE_CONFIG_1 0
+ {boxes_1b[WIPE_B1_1], 1, 0, 0, 0}, /* 1 */
+#define WIPE_CONFIG_2 WIPE_CONFIG_1+1
+ {boxes_1b[WIPE_B1_2], 1, 0, 0, 0}, /* 2 */
+#define WIPE_CONFIG_3 WIPE_CONFIG_2+1
+ {triangles_2t[WIPE_T2_3], 2, 0, 0, 0}, /* 3 */
+#define WIPE_CONFIG_4 WIPE_CONFIG_3+1
+ {triangles_2t[WIPE_T2_4], 2, 0, 0, 0}, /* 4 */
+#define WIPE_CONFIG_5 WIPE_CONFIG_4+1
+ {triangles_2t[WIPE_T2_5], 2, 0, 0, 0}, /* 5 */
+#define WIPE_CONFIG_6 WIPE_CONFIG_5+1
+ {triangles_2t[WIPE_T2_6], 2, 0, 0, 0}, /* 6 */
+#define WIPE_CONFIG_7 WIPE_CONFIG_6+1
+ {triangles_8t[WIPE_T8_7], 8, 1, 1, 0}, /* 7 */
+#define WIPE_CONFIG_8 WIPE_CONFIG_7+1
+ {triangles_16t[WIPE_T16_8], 16, 2, 2, 0}, /* 8 */
+
+#define WIPE_CONFIG_21 WIPE_CONFIG_8+1
+ {boxes_2b[WIPE_B2_21], 2, 1, 1, 0}, /* 21 */
+#define WIPE_CONFIG_22 WIPE_CONFIG_21+1
+ {boxes_2b[WIPE_B2_22], 2, 1, 1, 0}, /* 22 */
+
+#define WIPE_CONFIG_23 WIPE_CONFIG_22+1
+ {triangles_3t[WIPE_T3_23], 3, 1, 1, 0}, /* 23 */
+#define WIPE_CONFIG_24 WIPE_CONFIG_23+1
+ {triangles_3t[WIPE_T3_24], 3, 1, 1, 0}, /* 24 */
+#define WIPE_CONFIG_25 WIPE_CONFIG_24+1
+ {triangles_3t[WIPE_T3_25], 3, 1, 1, 0}, /* 25 */
+#define WIPE_CONFIG_26 WIPE_CONFIG_25+1
+ {triangles_3t[WIPE_T3_26], 3, 1, 1, 0}, /* 26 */
+#define WIPE_CONFIG_41 WIPE_CONFIG_26+1
+ {triangles_2t[WIPE_T2_41], 2, 0, 0, 1}, /* 41 */
+#define WIPE_CONFIG_42 WIPE_CONFIG_41+1
+ {triangles_2t[WIPE_T2_42], 2, 0, 0, 1}, /* 42 */
+#define WIPE_CONFIG_43 WIPE_CONFIG_42+1
+ {triangles_8t[WIPE_T8_43], 8, 1, 1, 1}, /* 43 */
+#define WIPE_CONFIG_44 WIPE_CONFIG_43+1
+ {triangles_8t[WIPE_T8_44], 8, 1, 1, 1}, /* 44 */
+#define WIPE_CONFIG_45 WIPE_CONFIG_44+1
+ {triangles_2t[WIPE_T2_45], 2, 0, 0, 0}, /* 45 */
+#define WIPE_CONFIG_46 WIPE_CONFIG_45+1
+ {triangles_2t[WIPE_T2_46], 2, 0, 0, 0}, /* 46 */
+#define WIPE_CONFIG_47 WIPE_CONFIG_46+1
+ {triangles_8t[WIPE_T8_47], 8, 1, 1, 0}, /* 47 */
+#define WIPE_CONFIG_48 WIPE_CONFIG_47+1
+ {triangles_8t[WIPE_T8_48], 8, 1, 1, 0}, /* 48 */
+#define WIPE_CONFIG_61 WIPE_CONFIG_48+1
+ {triangles_4t[WIPE_T4_61], 4, 1, 1, 1}, /* 61 */
+#define WIPE_CONFIG_62 WIPE_CONFIG_61+1
+ {triangles_4t[WIPE_T4_62], 4, 1, 1, 1}, /* 62 */
+#define WIPE_CONFIG_63 WIPE_CONFIG_62+1
+ {triangles_4t[WIPE_T4_63], 4, 1, 1, 1}, /* 63 */
+#define WIPE_CONFIG_64 WIPE_CONFIG_63+1
+ {triangles_4t[WIPE_T4_64], 4, 1, 1, 1}, /* 64 */
+#define WIPE_CONFIG_65 WIPE_CONFIG_64+1
+ {triangles_4t[WIPE_T4_65], 4, 1, 1, 0}, /* 65 */
+#define WIPE_CONFIG_66 WIPE_CONFIG_65+1
+ {triangles_4t[WIPE_T4_66], 4, 1, 1, 0}, /* 66 */
+#define WIPE_CONFIG_67 WIPE_CONFIG_66+1
+ {triangles_4t[WIPE_T4_67], 4, 1, 1, 0}, /* 67 */
+#define WIPE_CONFIG_68 WIPE_CONFIG_67+1
+ {triangles_4t[WIPE_T4_68], 4, 1, 1, 0}, /* 68 */
+#define WIPE_CONFIG_101 WIPE_CONFIG_68+1
+ {triangles_4t[WIPE_T4_101], 4, 1, 1, 0}, /* 101 */
+#define WIPE_CONFIG_201 WIPE_CONFIG_101+1
+ {box_clock_4b[WIPE_B4_201], 4, 1, 1, 2}, /* 201 */
+#define WIPE_CONFIG_202 WIPE_CONFIG_201+1
+ {box_clock_4b[WIPE_B4_202], 4, 1, 1, 2}, /* 202 */
+#define WIPE_CONFIG_203 WIPE_CONFIG_202+1
+ {box_clock_4b[WIPE_B4_203], 4, 1, 1, 2}, /* 203 */
+#define WIPE_CONFIG_204 WIPE_CONFIG_203+1
+ {box_clock_4b[WIPE_B4_204], 4, 1, 1, 2}, /* 204 */
+#define WIPE_CONFIG_205 WIPE_CONFIG_204+1
+ {box_clock_4b[WIPE_B4_205], 4, 1, 1, 1}, /* 205 */
+#define WIPE_CONFIG_206 WIPE_CONFIG_205+1
+ {box_clock_4b[WIPE_B4_206], 4, 1, 1, 1}, /* 206 */
+#define WIPE_CONFIG_207 WIPE_CONFIG_206+1
+ {box_clock_4b[WIPE_B4_207], 4, 1, 1, 0}, /* 207 */
+#define WIPE_CONFIG_211 WIPE_CONFIG_207+1
+ {box_clock_4b[WIPE_B4_211], 4, 1, 1, 1}, /* 211 */
+#define WIPE_CONFIG_212 WIPE_CONFIG_211+1
+ {box_clock_4b[WIPE_B4_212], 4, 1, 1, 1}, /* 212 */
+#define WIPE_CONFIG_213 WIPE_CONFIG_212+1
+ {box_clock_4b[WIPE_B4_213], 4, 1, 1, 0}, /* 213 */
+#define WIPE_CONFIG_214 WIPE_CONFIG_213+1
+ {box_clock_4b[WIPE_B4_214], 4, 1, 1, 0}, /* 214 */
+#define WIPE_CONFIG_221 WIPE_CONFIG_214+1
+ {box_clock_2b[WIPE_B2_221], 2, 1, 1, 1}, /* 221 */
+#define WIPE_CONFIG_222 WIPE_CONFIG_221+1
+ {box_clock_2b[WIPE_B2_222], 2, 1, 1, 1}, /* 222 */
+#define WIPE_CONFIG_223 WIPE_CONFIG_222+1
+ {box_clock_2b[WIPE_B2_223], 2, 1, 1, 1}, /* 223 */
+#define WIPE_CONFIG_224 WIPE_CONFIG_223+1
+ {box_clock_2b[WIPE_B2_224], 2, 1, 1, 1}, /* 224 */
+#define WIPE_CONFIG_225 WIPE_CONFIG_224+1
+ {box_clock_2b[WIPE_B2_225], 2, 1, 1, 0}, /* 225 */
+#define WIPE_CONFIG_226 WIPE_CONFIG_225+1
+ {box_clock_2b[WIPE_B2_226], 2, 1, 1, 0}, /* 226 */
+#define WIPE_CONFIG_227 WIPE_CONFIG_226+1
+ {box_clock_4b[WIPE_B4_227], 4, 1, 1, 1}, /* 227 */
+#define WIPE_CONFIG_228 WIPE_CONFIG_227+1
+ {box_clock_4b[WIPE_B4_228], 4, 1, 1, 1}, /* 228 */
+#define WIPE_CONFIG_231 WIPE_CONFIG_228+1
+ {box_clock_2b[WIPE_B2_231], 2, 1, 1, 0}, /* 231 */
+#define WIPE_CONFIG_232 WIPE_CONFIG_231+1
+ {box_clock_2b[WIPE_B2_232], 2, 1, 1, 0}, /* 232 */
+#define WIPE_CONFIG_233 WIPE_CONFIG_232+1
+ {box_clock_2b[WIPE_B2_233], 2, 1, 1, 0}, /* 233 */
+#define WIPE_CONFIG_234 WIPE_CONFIG_233+1
+ {box_clock_2b[WIPE_B2_234], 2, 1, 1, 0}, /* 234 */
+#define WIPE_CONFIG_235 WIPE_CONFIG_234+1
+ {box_clock_4b[WIPE_B4_235], 4, 1, 1, 0}, /* 235 */
+#define WIPE_CONFIG_236 WIPE_CONFIG_235+1
+ {box_clock_4b[WIPE_B4_236], 4, 1, 1, 0}, /* 236 */
+#define WIPE_CONFIG_241 WIPE_CONFIG_236+1
+ {box_clock_1b[WIPE_B1_241], 1, 0, 0, 0}, /* 241 */
+#define WIPE_CONFIG_242 WIPE_CONFIG_241+1
+ {box_clock_1b[WIPE_B1_242], 1, 0, 0, 0}, /* 242 */
+#define WIPE_CONFIG_243 WIPE_CONFIG_242+1
+ {box_clock_1b[WIPE_B1_243], 1, 0, 0, 0}, /* 243 */
+#define WIPE_CONFIG_244 WIPE_CONFIG_243+1
+ {box_clock_1b[WIPE_B1_244], 1, 0, 0, 0}, /* 244 */
+#define WIPE_CONFIG_245 WIPE_CONFIG_244+1
+ {triangles_2t[WIPE_T2_245], 2, 1, 1, 0}, /* 245 */
+#define WIPE_CONFIG_246 WIPE_CONFIG_245+1
+ {triangles_2t[WIPE_T2_246], 2, 1, 1, 0}, /* 246 */
+#define WIPE_CONFIG_251 WIPE_CONFIG_246+1
+ {box_clock_2b[WIPE_B2_251], 2, 1, 1, 0}, /* 251 */
+#define WIPE_CONFIG_252 WIPE_CONFIG_251+1
+ {box_clock_2b[WIPE_B2_252], 2, 1, 1, 0}, /* 252 */
+#define WIPE_CONFIG_253 WIPE_CONFIG_252+1
+ {box_clock_2b[WIPE_B2_253], 2, 1, 1, 0}, /* 253 */
+#define WIPE_CONFIG_254 WIPE_CONFIG_253+1
+ {box_clock_2b[WIPE_B2_254], 2, 1, 1, 0}, /* 254 */
+
+#define WIPE_CONFIG_261 WIPE_CONFIG_254+1
+ {box_clock_8b[WIPE_B8_261], 8, 2, 2, 2}, /* 261 */
+#define WIPE_CONFIG_262 WIPE_CONFIG_261+1
+ {box_clock_8b[WIPE_B8_262], 8, 2, 2, 2}, /* 262 */
+#define WIPE_CONFIG_263 WIPE_CONFIG_262+1
+ {box_clock_8b[WIPE_B8_263], 8, 2, 2, 1}, /* 263 */
+#define WIPE_CONFIG_264 WIPE_CONFIG_263+1
+ {box_clock_8b[WIPE_B8_264], 8, 2, 2, 1}, /* 264 */
+};
+
+static void
+gst_wipe_boxes_draw (GstMask * mask)
+{
+ const GstWipeConfig *config = mask->user_data;
+ const gint *impacts = config->objects;
+ gint width = (mask->width >> config->xscale);
+ gint height = (mask->height >> config->yscale);
+ gint depth = (1 << mask->bpp) >> config->cscale;
+
+ gint i;
+
+ for (i = 0; i < config->nobjects; i++) {
+ switch (impacts[0]) {
+ case BOX_VERTICAL:
+ /* vbox does not draw last pixels */
+ gst_smpte_paint_vbox (mask->data, mask->width,
+ impacts[1] * width, impacts[2] * height, impacts[3] * depth,
+ impacts[4] * width, impacts[5] * height, impacts[6] * depth);
+ impacts += 7;
+ break;
+ case BOX_HORIZONTAL:
+ /* hbox does not draw last pixels */
+ gst_smpte_paint_hbox (mask->data, mask->width,
+ impacts[1] * width, impacts[2] * height, impacts[3] * depth,
+ impacts[4] * width, impacts[5] * height, impacts[6] * depth);
+ impacts += 7;
+ break;
+ case BOX_CLOCK:
+ {
+ gint x0, y0, x1, y1, x2, y2;
+
+ /* make sure not to draw outside the area */
+ x0 = MIN (impacts[1] * width, mask->width - 1);
+ y0 = MIN (impacts[2] * height, mask->height - 1);
+ x1 = MIN (impacts[4] * width, mask->width - 1);
+ y1 = MIN (impacts[5] * height, mask->height - 1);
+ x2 = MIN (impacts[7] * width, mask->width - 1);
+ y2 = MIN (impacts[8] * height, mask->height - 1);
+
+ gst_smpte_paint_box_clock (mask->data, mask->width,
+ x0, y0, impacts[3] * depth, x1, y1, impacts[6] * depth,
+ x2, y2, impacts[9] * depth);
+ impacts += 10;
+ }
+ default:
+ break;
+ }
+ }
+}
+
+static void
+gst_wipe_triangles_clock_draw (GstMask * mask)
+{
+ const GstWipeConfig *config = mask->user_data;
+ const gint *impacts = config->objects;
+ gint width = (mask->width >> config->xscale);
+ gint height = (mask->height >> config->yscale);
+ gint depth = (1 << mask->bpp) >> config->cscale;
+ gint i;
+
+ for (i = 0; i < config->nobjects; i++) {
+ gint x0, y0, x1, y1, x2, y2;
+
+ /* make sure not to draw outside the area */
+ x0 = MIN (impacts[0] * width, mask->width - 1);
+ y0 = MIN (impacts[1] * height, mask->height - 1);
+ x1 = MIN (impacts[3] * width, mask->width - 1);
+ y1 = MIN (impacts[4] * height, mask->height - 1);
+ x2 = MIN (impacts[6] * width, mask->width - 1);
+ y2 = MIN (impacts[7] * height, mask->height - 1);
+
+ gst_smpte_paint_triangle_clock (mask->data, mask->width,
+ x0, y0, impacts[2] * depth, x1, y1, impacts[5] * depth,
+ x2, y2, impacts[8] * depth);
+ impacts += 9;
+ }
+}
+
+static void
+gst_wipe_triangles_draw (GstMask * mask)
+{
+ const GstWipeConfig *config = mask->user_data;
+ const gint *impacts = config->objects;
+ gint width = (mask->width >> config->xscale);
+ gint height = (mask->height >> config->yscale);
+ gint depth = (1 << mask->bpp) >> config->cscale;
+
+ gint i;
+
+ for (i = 0; i < config->nobjects; i++) {
+ gint x0, y0, x1, y1, x2, y2;
+
+ /* make sure not to draw outside the area */
+ x0 = MIN (impacts[0] * width, mask->width - 1);
+ y0 = MIN (impacts[1] * height, mask->height - 1);
+ x1 = MIN (impacts[3] * width, mask->width - 1);
+ y1 = MIN (impacts[4] * height, mask->height - 1);
+ x2 = MIN (impacts[6] * width, mask->width - 1);
+ y2 = MIN (impacts[7] * height, mask->height - 1);
+
+ gst_smpte_paint_triangle_linear (mask->data, mask->width,
+ x0, y0, impacts[2] * depth, x1, y1, impacts[5] * depth,
+ x2, y2, impacts[8] * depth);
+ impacts += 9;
+ }
+}
+
+/* see also:
+ * http://www.w3c.rl.ac.uk/pasttalks/slidemaker/XML_Multimedia/htmls/transitions.html
+ */
+static const GstMaskDefinition definitions[] = {
+ {1, "bar-wipe-lr",
+ "A bar moves from left to right",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_1]},
+ {2, "bar-wipe-tb",
+ "A bar moves from top to bottom",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_2]},
+ {3, "box-wipe-tl",
+ "A box expands from the upper-left corner to the lower-right corner",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_3]},
+ {4, "box-wipe-tr",
+ "A box expands from the upper-right corner to the lower-left corner",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_4]},
+ {5, "box-wipe-br",
+ "A box expands from the lower-right corner to the upper-left corner",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_5]},
+ {6, "box-wipe-bl",
+ "A box expands from the lower-left corner to the upper-right corner",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_6]},
+ {7, "four-box-wipe-ci",
+ "A box shape expands from each of the four corners toward the center",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_7]},
+ {8, "four-box-wipe-co",
+ "A box shape expands from the center of each quadrant toward the corners of each quadrant",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_8]},
+ {21, "barndoor-v",
+ "A central, vertical line splits and expands toward the left and right edges",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_21]},
+ {22, "barndoor-h",
+ "A central, horizontal line splits and expands toward the top and bottom edges",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_22]},
+ {23, "box-wipe-tc",
+ "A box expands from the top edge's midpoint to the bottom corners",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_23]},
+ {24, "box-wipe-rc",
+ "A box expands from the right edge's midpoint to the left corners",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_24]},
+ {25, "box-wipe-bc",
+ "A box expands from the bottom edge's midpoint to the top corners",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_25]},
+ {26, "box-wipe-lc",
+ "A box expands from the left edge's midpoint to the right corners",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_26]},
+ {41, "diagonal-tl",
+ "A diagonal line moves from the upper-left corner to the lower-right corner",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_41]},
+ {42, "diagonal-tr",
+ "A diagonal line moves from the upper right corner to the lower-left corner",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_42]},
+ {43, "bowtie-v",
+ "Two wedge shapes slide in from the top and bottom edges toward the center",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_43]},
+ {44, "bowtie-h",
+ "Two wedge shapes slide in from the left and right edges toward the center",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_44]},
+ {45, "barndoor-dbl",
+ "A diagonal line from the lower-left to upper-right corners splits and expands toward the opposite corners",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_45]},
+ {46, "barndoor-dtl",
+ "A diagonal line from upper-left to lower-right corners splits and expands toward the opposite corners",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_46]},
+ {47, "misc-diagonal-dbd",
+ "Four wedge shapes split from the center and retract toward the four edges",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_47]},
+ {48, "misc-diagonal-dd",
+ "A diamond connecting the four edge midpoints simultaneously contracts toward the center and expands toward the edges",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_48]},
+ {61, "vee-d",
+ "A wedge shape moves from top to bottom",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_61]},
+ {62, "vee-l",
+ "A wedge shape moves from right to left",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_62]},
+ {63, "vee-u",
+ "A wedge shape moves from bottom to top",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_63]},
+ {64, "vee-r",
+ "A wedge shape moves from left to right",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_64]},
+ {65, "barnvee-d",
+ "A 'V' shape extending from the bottom edge's midpoint to the opposite corners contracts toward the center and expands toward the edges",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_65]},
+ {66, "barnvee-l",
+ "A 'V' shape extending from the left edge's midpoint to the opposite corners contracts toward the center and expands toward the edges",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_66]},
+ {67, "barnvee-u",
+ "A 'V' shape extending from the top edge's midpoint to the opposite corners contracts toward the center and expands toward the edges",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_67]},
+ {68, "barnvee-r",
+ "A 'V' shape extending from the right edge's midpoint to the opposite corners contracts toward the center and expands toward the edges",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_68]},
+ {101, "iris-rect",
+ "A rectangle expands from the center.",
+ gst_wipe_triangles_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_101]},
+ {201, "clock-cw12",
+ "A radial hand sweeps clockwise from the twelve o'clock position",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_201]},
+ {202, "clock-cw3",
+ "A radial hand sweeps clockwise from the three o'clock position",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_202]},
+ {203, "clock-cw6",
+ "A radial hand sweeps clockwise from the six o'clock position",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_203]},
+ {204, "clock-cw9",
+ "A radial hand sweeps clockwise from the nine o'clock position",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_204]},
+ {205, "pinwheel-tbv",
+ "Two radial hands sweep clockwise from the twelve and six o'clock positions",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_205]},
+ {206, "pinwheel-tbh",
+ "Two radial hands sweep clockwise from the nine and three o'clock positions",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_206]},
+ {207, "pinwheel-fb",
+ "Four radial hands sweep clockwise",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_207]},
+ {211, "fan-ct",
+ "A fan unfolds from the top edge, the fan axis at the center",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_211]},
+ {212, "fan-cr",
+ "A fan unfolds from the right edge, the fan axis at the center",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_212]},
+ {213, "doublefan-fov",
+ "Two fans, their axes at the center, unfold from the top and bottom",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_213]},
+ {214, "doublefan-foh",
+ "Two fans, their axes at the center, unfold from the left and right",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_214]},
+ {221, "singlesweep-cwt",
+ "A radial hand sweeps clockwise from the top edge's midpoint",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_221]},
+ {222, "singlesweep-cwr",
+ "A radial hand sweeps clockwise from the right edge's midpoint",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_222]},
+ {223, "singlesweep-cwb",
+ "A radial hand sweeps clockwise from the bottom edge's midpoint",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_223]},
+ {224, "singlesweep-cwl",
+ "A radial hand sweeps clockwise from the left edge's midpoint",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_224]},
+ {225, "doublesweep-pv",
+ "Two radial hands sweep clockwise and counter-clockwise from the top and bottom edges' midpoints",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_225]},
+ {226, "doublesweep-pd",
+ "Two radial hands sweep clockwise and counter-clockwise from the left and right edges' midpoints",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_226]},
+ {227, "doublesweep-ov",
+ "Two radial hands attached at the top and bottom edges' midpoints sweep from right to left",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_227]},
+ {228, "doublesweep-oh",
+ "Two radial hands attached at the left and right edges' midpoints sweep from top to bottom",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_228]},
+ {231, "fan-t",
+ "A fan unfolds from the bottom, the fan axis at the top edge's midpoint",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_231]},
+ {232, "fan-r",
+ "A fan unfolds from the left, the fan axis at the right edge's midpoint",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_232]},
+ {233, "fan-b",
+ "A fan unfolds from the top, the fan axis at the bottom edge's midpoint",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_233]},
+ {234, "fan-l",
+ "A fan unfolds from the right, the fan axis at the left edge's midpoint",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_234]},
+ {235, "doublefan-fiv",
+ "Two fans, their axes at the top and bottom, unfold from the center",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_235]},
+ {236, "doublefan-fih",
+ "Two fans, their axes at the left and right, unfold from the center",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_236]},
+ {241, "singlesweep-cwtl",
+ "A radial hand sweeps clockwise from the upper-left corner",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_241]},
+ {242, "singlesweep-cwbl",
+ "A radial hand sweeps counter-clockwise from the lower-left corner.",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_242]},
+ {243, "singlesweep-cwbr",
+ "A radial hand sweeps clockwise from the lower-right corner",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_243]},
+ {244, "singlesweep-cwtr",
+ "A radial hand sweeps counter-clockwise from the upper-right corner",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_244]},
+ {245, "doublesweep-pdtl",
+ "Two radial hands attached at the upper-left and lower-right corners sweep down and up",
+ gst_wipe_triangles_clock_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_245]},
+ {246, "doublesweep-pdbl",
+ "Two radial hands attached at the lower-left and upper-right corners sweep down and up",
+ gst_wipe_triangles_clock_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_246]},
+ {251, "saloondoor-t",
+ "Two radial hands attached at the upper-left and upper-right corners sweep down",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_251]},
+ {252, "saloondoor-l",
+ "Two radial hands attached at the upper-left and lower-left corners sweep to the right",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_252]},
+ {253, "saloondoor-b",
+ "Two radial hands attached at the lower-left and lower-right corners sweep up",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_253]},
+ {254, "saloondoor-r",
+ "Two radial hands attached at the upper-right and lower-right corners sweep to the left",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_254]},
+ {261, "windshield-r",
+ "Two radial hands attached at the midpoints of the top and bottom halves sweep from right to left",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_261]},
+ {262, "windshield-u",
+ "Two radial hands attached at the midpoints of the left and right halves sweep from top to bottom",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_262]},
+ {263, "windshield-v",
+ "Two sets of radial hands attached at the midpoints of the top and bottom halves sweep from top to bottom and bottom to top",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_263]},
+ {264, "windshield-h",
+ "Two sets of radial hands attached at the midpoints of the left and right halves sweep from left to right and right to left",
+ gst_wipe_boxes_draw, _gst_mask_default_destroy,
+ &wipe_config[WIPE_CONFIG_264]},
+ {0, NULL, NULL, NULL}
+};
+
+void
+_gst_barboxwipes_register (void)
+{
+ static gsize id = 0;
+
+ if (g_once_init_enter (&id)) {
+ gint i = 0;
+
+ while (definitions[i].short_name) {
+ _gst_mask_register (&definitions[i]);
+ i++;
+ }
+
+ g_once_init_leave (&id, 1);
+ }
+}
diff --git a/gst/smpte/gstmask.c b/gst/smpte/gstmask.c
new file mode 100644
index 0000000000..92b591936c
--- /dev/null
+++ b/gst/smpte/gstmask.c
@@ -0,0 +1,121 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstmask.h"
+#include "paint.h"
+
+static GList *masks = NULL;
+
+void
+_gst_mask_init (void)
+{
+ _gst_barboxwipes_register ();
+}
+
+static gint
+gst_mask_compare (GstMaskDefinition * def1, GstMaskDefinition * def2)
+{
+ return (def1->type - def2->type);
+}
+
+void
+_gst_mask_register (const GstMaskDefinition * definition)
+{
+ masks =
+ g_list_insert_sorted (masks, (gpointer) definition,
+ (GCompareFunc) gst_mask_compare);
+}
+
+const GList *
+gst_mask_get_definitions (void)
+{
+ return masks;
+}
+
+static GstMaskDefinition *
+gst_mask_find_definition (gint type)
+{
+ GList *walk = masks;
+
+ while (walk) {
+ GstMaskDefinition *def = (GstMaskDefinition *) walk->data;
+
+ if (def->type == type)
+ return def;
+
+ walk = g_list_next (walk);
+ }
+ return NULL;
+}
+
+GstMask *
+gst_mask_factory_new (gint type, gboolean invert, gint bpp, gint width,
+ gint height)
+{
+ GstMaskDefinition *definition;
+ GstMask *mask = NULL;
+
+ definition = gst_mask_find_definition (type);
+ if (definition) {
+ mask = g_new0 (GstMask, 1);
+
+ mask->type = definition->type;
+ mask->bpp = bpp;
+ mask->width = width;
+ mask->height = height;
+ mask->destroy_func = definition->destroy_func;
+ mask->user_data = definition->user_data;
+ mask->data = g_malloc (width * height * sizeof (guint32));
+
+ definition->draw_func (mask);
+
+ if (invert) {
+ gint i, j;
+ guint32 *datap = mask->data;
+ guint32 max = (1 << bpp);
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ *datap = max - *datap;
+ datap++;
+ }
+ }
+ }
+ }
+
+ return mask;
+}
+
+void
+_gst_mask_default_destroy (GstMask * mask)
+{
+ g_free (mask->data);
+ g_free (mask);
+}
+
+void
+gst_mask_destroy (GstMask * mask)
+{
+ if (mask->destroy_func)
+ mask->destroy_func (mask);
+}
diff --git a/gst/smpte/gstmask.h b/gst/smpte/gstmask.h
new file mode 100644
index 0000000000..53a5082a12
--- /dev/null
+++ b/gst/smpte/gstmask.h
@@ -0,0 +1,64 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_MASK_H__
+#define __GST_MASK_H__
+
+#include <gst/gst.h>
+
+typedef struct _GstMask GstMask;
+typedef struct _GstMaskDefinition GstMaskDefinition;
+
+typedef void (*GstMaskDrawFunc) (GstMask *mask);
+typedef void (*GstMaskDestroyFunc) (GstMask *mask);
+
+struct _GstMaskDefinition {
+ gint type;
+ const gchar *short_name;
+ const gchar *long_name;
+ GstMaskDrawFunc draw_func;
+ GstMaskDestroyFunc destroy_func;
+ gconstpointer user_data;
+};
+
+struct _GstMask {
+ gint type;
+ guint32 *data;
+ gconstpointer user_data;
+
+ gint width;
+ gint height;
+ gint bpp;
+
+ GstMaskDestroyFunc destroy_func;
+};
+
+void _gst_mask_init (void);
+void _gst_mask_register (const GstMaskDefinition *definition);
+
+void _gst_mask_default_destroy (GstMask *mask);
+
+const GList* gst_mask_get_definitions (void);
+GstMask* gst_mask_factory_new (gint type, gboolean invert, gint bpp, gint width, gint height);
+void gst_mask_destroy (GstMask *mask);
+
+void _gst_barboxwipes_register (void);
+
+#endif /* __GST_MASK_H__ */
diff --git a/gst/smpte/gstsmpte.c b/gst/smpte/gstsmpte.c
new file mode 100644
index 0000000000..0516c9cab4
--- /dev/null
+++ b/gst/smpte/gstsmpte.c
@@ -0,0 +1,667 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-smpte
+ * @title: smpte
+ *
+ * smpte can accept I420 video streams with the same width, height and
+ * framerate. The two incoming buffers are blended together using an effect
+ * specific alpha mask.
+ *
+ * The #GstSMPTE:depth property defines the presision in bits of the mask. A
+ * higher presision will create a mask with smoother gradients in order to avoid
+ * banding.
+ *
+ * ## Sample pipelines
+ * |[
+ * gst-launch-1.0 -v videotestsrc pattern=1 ! smpte name=s border=20000 type=234 duration=2000000000 ! videoconvert ! ximagesink videotestsrc ! s.
+ * ]| A pipeline to demonstrate the smpte transition.
+ * It shows a pinwheel transition a from a snow videotestsrc to an smpte
+ * pattern videotestsrc. The transition will take 2 seconds to complete. The
+ * edges of the transition are smoothed with a 20000 big border.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include <string.h>
+#include "gstsmpte.h"
+#include "paint.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_smpte_debug);
+#define GST_CAT_DEFAULT gst_smpte_debug
+
+static GstStaticPadTemplate gst_smpte_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420")
+ )
+ );
+
+static GstStaticPadTemplate gst_smpte_sink1_template =
+GST_STATIC_PAD_TEMPLATE ("sink1",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420")
+ )
+ );
+
+static GstStaticPadTemplate gst_smpte_sink2_template =
+GST_STATIC_PAD_TEMPLATE ("sink2",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420")
+ )
+ );
+
+
+/* SMPTE signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+#define DEFAULT_PROP_TYPE 1
+#define DEFAULT_PROP_BORDER 0
+#define DEFAULT_PROP_DEPTH 16
+#define DEFAULT_PROP_DURATION GST_SECOND
+#define DEFAULT_PROP_INVERT FALSE
+
+enum
+{
+ PROP_0,
+ PROP_TYPE,
+ PROP_BORDER,
+ PROP_DEPTH,
+ PROP_DURATION,
+ PROP_INVERT
+};
+
+#define GST_TYPE_SMPTE_TRANSITION_TYPE (gst_smpte_transition_type_get_type())
+static GType
+gst_smpte_transition_type_get_type (void)
+{
+ static GType smpte_transition_type = 0;
+ GEnumValue *smpte_transitions;
+
+ if (!smpte_transition_type) {
+ const GList *definitions;
+ gint i = 0;
+
+ definitions = gst_mask_get_definitions ();
+ smpte_transitions =
+ g_new0 (GEnumValue, g_list_length ((GList *) definitions) + 1);
+
+ while (definitions) {
+ GstMaskDefinition *definition = (GstMaskDefinition *) definitions->data;
+
+ definitions = g_list_next (definitions);
+
+ smpte_transitions[i].value = definition->type;
+ /* older GLib versions have the two fields as non-const, hence the cast */
+ smpte_transitions[i].value_nick = (gchar *) definition->short_name;
+ smpte_transitions[i].value_name = (gchar *) definition->long_name;
+
+ i++;
+ }
+
+ smpte_transition_type =
+ g_enum_register_static ("GstSMPTETransitionType", smpte_transitions);
+ }
+ return smpte_transition_type;
+}
+
+
+static void gst_smpte_finalize (GstSMPTE * smpte);
+
+static GstFlowReturn gst_smpte_collected (GstCollectPads * pads,
+ GstSMPTE * smpte);
+
+static void gst_smpte_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_smpte_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static GstStateChangeReturn gst_smpte_change_state (GstElement * element,
+ GstStateChange transition);
+
+/*static guint gst_smpte_signals[LAST_SIGNAL] = { 0 }; */
+
+#define gst_smpte_parent_class parent_class
+G_DEFINE_TYPE (GstSMPTE, gst_smpte, GST_TYPE_ELEMENT);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (smpte, "smpte", GST_RANK_NONE,
+ GST_TYPE_SMPTE, GST_DEBUG_CATEGORY_INIT (gst_smpte_debug, "smpte", 0,
+ "SMPTE transition effect"));
+
+static void
+gst_smpte_class_init (GstSMPTEClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->set_property = gst_smpte_set_property;
+ gobject_class->get_property = gst_smpte_get_property;
+ gobject_class->finalize = (GObjectFinalizeFunc) gst_smpte_finalize;
+
+ _gst_mask_init ();
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_TYPE,
+ g_param_spec_enum ("type", "Type", "The type of transition to use",
+ GST_TYPE_SMPTE_TRANSITION_TYPE, DEFAULT_PROP_TYPE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BORDER,
+ g_param_spec_int ("border", "Border",
+ "The border width of the transition", 0, G_MAXINT,
+ DEFAULT_PROP_BORDER, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_DEPTH,
+ g_param_spec_int ("depth", "Depth", "Depth of the mask in bits", 1, 24,
+ DEFAULT_PROP_DEPTH, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_DURATION,
+ g_param_spec_uint64 ("duration", "Duration",
+ "Duration of the transition effect in nanoseconds", 0, G_MAXUINT64,
+ DEFAULT_PROP_DURATION, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_INVERT,
+ g_param_spec_boolean ("invert", "Invert",
+ "Invert transition mask", DEFAULT_PROP_INVERT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_smpte_change_state);
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_smpte_sink1_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_smpte_sink2_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_smpte_src_template);
+ gst_element_class_set_static_metadata (gstelement_class, "SMPTE transitions",
+ "Filter/Editor/Video",
+ "Apply the standard SMPTE transitions on video images",
+ "Wim Taymans <wim.taymans@chello.be>");
+
+ gst_type_mark_as_plugin_api (GST_TYPE_SMPTE_TRANSITION_TYPE, 0);
+}
+
+/* wht yel cya grn mag red blu blk -I Q */
+static const int y_colors[] = { 255, 226, 179, 150, 105, 76, 29, 16, 16, 0 };
+static const int u_colors[] = { 128, 0, 170, 46, 212, 85, 255, 128, 0, 128 };
+static const int v_colors[] = { 128, 155, 0, 21, 235, 255, 107, 128, 128, 255 };
+
+static void
+fill_i420 (GstVideoInfo * vinfo, guint8 * data, gint height, gint color)
+{
+ gint size = GST_VIDEO_INFO_COMP_STRIDE (vinfo, 0) * GST_ROUND_UP_2 (height);
+ gint size4 = size >> 2;
+ guint8 *yp = data;
+ guint8 *up = data + GST_VIDEO_INFO_COMP_OFFSET (vinfo, 1);
+ guint8 *vp = data + GST_VIDEO_INFO_COMP_OFFSET (vinfo, 2);
+
+ memset (yp, y_colors[color], size);
+ memset (up, u_colors[color], size4);
+ memset (vp, v_colors[color], size4);
+}
+
+static gboolean
+gst_smpte_update_mask (GstSMPTE * smpte, gint type, gboolean invert,
+ gint depth, gint width, gint height)
+{
+ GstMask *newmask;
+
+ if (smpte->mask) {
+ if (smpte->type == type &&
+ smpte->invert == invert &&
+ smpte->depth == depth &&
+ smpte->width == width && smpte->height == height)
+ return TRUE;
+ }
+
+ newmask = gst_mask_factory_new (type, invert, depth, width, height);
+ if (newmask) {
+ if (smpte->mask) {
+ gst_mask_destroy (smpte->mask);
+ }
+ smpte->mask = newmask;
+ smpte->type = type;
+ smpte->invert = invert;
+ smpte->depth = depth;
+ smpte->width = width;
+ smpte->height = height;
+
+ return TRUE;
+ }
+ return FALSE;
+}
+
+static gboolean
+gst_smpte_setcaps (GstPad * pad, GstCaps * caps)
+{
+ GstSMPTE *smpte;
+ gboolean ret;
+ GstVideoInfo vinfo;
+
+ smpte = GST_SMPTE (GST_PAD_PARENT (pad));
+
+ gst_video_info_init (&vinfo);
+ if (!gst_video_info_from_caps (&vinfo, caps))
+ return FALSE;
+
+ smpte->width = GST_VIDEO_INFO_WIDTH (&vinfo);
+ smpte->height = GST_VIDEO_INFO_HEIGHT (&vinfo);
+ smpte->fps_num = GST_VIDEO_INFO_FPS_N (&vinfo);
+ smpte->fps_denom = GST_VIDEO_INFO_FPS_D (&vinfo);
+
+ /* figure out the duration in frames */
+ smpte->end_position = gst_util_uint64_scale (smpte->duration,
+ smpte->fps_num, GST_SECOND * smpte->fps_denom);
+
+ GST_DEBUG_OBJECT (smpte, "duration: %d frames", smpte->end_position);
+
+ ret =
+ gst_smpte_update_mask (smpte, smpte->type, smpte->invert, smpte->depth,
+ smpte->width, smpte->height);
+
+ if (pad == smpte->sinkpad1) {
+ GST_DEBUG_OBJECT (smpte, "setting pad1 info");
+ smpte->vinfo1 = vinfo;
+ } else {
+ GST_DEBUG_OBJECT (smpte, "setting pad2 info");
+ smpte->vinfo2 = vinfo;
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_smpte_sink_event (GstCollectPads * pads,
+ GstCollectData * data, GstEvent * event, gpointer user_data)
+{
+ GstPad *pad;
+ gboolean ret = FALSE;
+
+ pad = data->pad;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ ret = gst_smpte_setcaps (pad, caps);
+ gst_event_unref (event);
+ event = NULL;
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (event != NULL)
+ return gst_collect_pads_event_default (pads, data, event, FALSE);
+
+ return ret;
+}
+
+static void
+gst_smpte_init (GstSMPTE * smpte)
+{
+ smpte->sinkpad1 =
+ gst_pad_new_from_static_template (&gst_smpte_sink1_template, "sink1");
+ GST_PAD_SET_PROXY_CAPS (smpte->sinkpad1);
+ gst_element_add_pad (GST_ELEMENT (smpte), smpte->sinkpad1);
+
+ smpte->sinkpad2 =
+ gst_pad_new_from_static_template (&gst_smpte_sink2_template, "sink2");
+ GST_PAD_SET_PROXY_CAPS (smpte->sinkpad2);
+ gst_element_add_pad (GST_ELEMENT (smpte), smpte->sinkpad2);
+
+ smpte->srcpad =
+ gst_pad_new_from_static_template (&gst_smpte_src_template, "src");
+ gst_element_add_pad (GST_ELEMENT (smpte), smpte->srcpad);
+
+ smpte->collect = gst_collect_pads_new ();
+ gst_collect_pads_set_function (smpte->collect,
+ (GstCollectPadsFunction) GST_DEBUG_FUNCPTR (gst_smpte_collected), smpte);
+ gst_collect_pads_set_event_function (smpte->collect,
+ GST_DEBUG_FUNCPTR (gst_smpte_sink_event), smpte);
+
+ gst_collect_pads_add_pad (smpte->collect, smpte->sinkpad1,
+ sizeof (GstCollectData), NULL, TRUE);
+ gst_collect_pads_add_pad (smpte->collect, smpte->sinkpad2,
+ sizeof (GstCollectData), NULL, TRUE);
+
+ smpte->type = DEFAULT_PROP_TYPE;
+ smpte->border = DEFAULT_PROP_BORDER;
+ smpte->depth = DEFAULT_PROP_DEPTH;
+ smpte->duration = DEFAULT_PROP_DURATION;
+ smpte->invert = DEFAULT_PROP_INVERT;
+ smpte->fps_num = 0;
+ smpte->fps_denom = 1;
+}
+
+static void
+gst_smpte_finalize (GstSMPTE * smpte)
+{
+ if (smpte->collect) {
+ gst_object_unref (smpte->collect);
+ }
+ if (smpte->mask) {
+ gst_mask_destroy (smpte->mask);
+ }
+
+ G_OBJECT_CLASS (parent_class)->finalize ((GObject *) smpte);
+}
+
+static void
+gst_smpte_reset (GstSMPTE * smpte)
+{
+ smpte->width = -1;
+ smpte->height = -1;
+ smpte->position = 0;
+ smpte->end_position = 0;
+ smpte->send_stream_start = TRUE;
+}
+
+static void
+gst_smpte_blend_i420 (GstVideoFrame * frame1, GstVideoFrame * frame2,
+ GstVideoFrame * oframe, GstMask * mask, gint border, gint pos)
+{
+ guint32 *maskp;
+ gint value;
+ gint i, j;
+ gint min, max;
+ guint8 *in1, *in2, *out, *in1u, *in1v, *in2u, *in2v, *outu, *outv;
+ gint width, height;
+
+ if (border == 0)
+ border++;
+
+ min = pos - border;
+ max = pos;
+
+ width = GST_VIDEO_FRAME_WIDTH (frame1);
+ height = GST_VIDEO_FRAME_HEIGHT (frame1);
+
+ in1 = GST_VIDEO_FRAME_COMP_DATA (frame1, 0);
+ in2 = GST_VIDEO_FRAME_COMP_DATA (frame2, 0);
+ out = GST_VIDEO_FRAME_COMP_DATA (oframe, 0);
+
+ in1u = GST_VIDEO_FRAME_COMP_DATA (frame1, 1);
+ in1v = GST_VIDEO_FRAME_COMP_DATA (frame1, 2);
+ in2u = GST_VIDEO_FRAME_COMP_DATA (frame2, 1);
+ in2v = GST_VIDEO_FRAME_COMP_DATA (frame2, 2);
+ outu = GST_VIDEO_FRAME_COMP_DATA (oframe, 1);
+ outv = GST_VIDEO_FRAME_COMP_DATA (oframe, 2);
+
+ maskp = mask->data;
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ value = *maskp++;
+ value = ((CLAMP (value, min, max) - min) << 8) / border;
+
+ out[j] = ((in1[j] * value) + (in2[j] * (256 - value))) >> 8;
+ if (!(i & 1) && !(j & 1)) {
+ outu[j / 2] =
+ ((in1u[j / 2] * value) + (in2u[j / 2] * (256 - value))) >> 8;
+ outv[j / 2] =
+ ((in1v[j / 2] * value) + (in2v[j / 2] * (256 - value))) >> 8;
+ }
+ }
+
+ in1 += GST_VIDEO_FRAME_COMP_STRIDE (frame1, 0);
+ in2 += GST_VIDEO_FRAME_COMP_STRIDE (frame2, 0);
+ out += GST_VIDEO_FRAME_COMP_STRIDE (oframe, 0);
+
+ if (!(i & 1)) {
+ in1u += GST_VIDEO_FRAME_COMP_STRIDE (frame1, 1);
+ in2u += GST_VIDEO_FRAME_COMP_STRIDE (frame2, 1);
+ in1v += GST_VIDEO_FRAME_COMP_STRIDE (frame1, 2);
+ in2v += GST_VIDEO_FRAME_COMP_STRIDE (frame1, 2);
+ outu += GST_VIDEO_FRAME_COMP_STRIDE (oframe, 1);
+ outv += GST_VIDEO_FRAME_COMP_STRIDE (oframe, 2);
+ }
+ }
+}
+
+static GstFlowReturn
+gst_smpte_collected (GstCollectPads * pads, GstSMPTE * smpte)
+{
+ GstBuffer *outbuf;
+ GstClockTime ts;
+ GstBuffer *in1 = NULL, *in2 = NULL;
+ GSList *collected;
+ GstMapInfo map;
+ GstVideoFrame frame1, frame2, oframe;
+
+ if (G_UNLIKELY (smpte->fps_num == 0))
+ goto not_negotiated;
+
+ if (!gst_pad_has_current_caps (smpte->sinkpad1) ||
+ !gst_pad_has_current_caps (smpte->sinkpad2))
+ goto not_negotiated;
+
+ if (!gst_video_info_is_equal (&smpte->vinfo1, &smpte->vinfo2))
+ goto input_formats_do_not_match;
+
+ if (smpte->send_stream_start) {
+ gchar s_id[32];
+
+ /* stream-start (FIXME: create id based on input ids) */
+ g_snprintf (s_id, sizeof (s_id), "smpte-%08x", g_random_int ());
+ gst_pad_push_event (smpte->srcpad, gst_event_new_stream_start (s_id));
+ smpte->send_stream_start = FALSE;
+ }
+
+ ts = gst_util_uint64_scale_int (smpte->position * GST_SECOND,
+ smpte->fps_denom, smpte->fps_num);
+
+ for (collected = pads->data; collected; collected = g_slist_next (collected)) {
+ GstCollectData *data;
+
+ data = (GstCollectData *) collected->data;
+
+ if (data->pad == smpte->sinkpad1)
+ in1 = gst_collect_pads_pop (pads, data);
+ else if (data->pad == smpte->sinkpad2)
+ in2 = gst_collect_pads_pop (pads, data);
+ }
+
+ if (in1 == NULL) {
+ /* if no input, make picture black */
+ in1 = gst_buffer_new_and_alloc (GST_VIDEO_INFO_SIZE (&smpte->vinfo1));
+ gst_buffer_map (in1, &map, GST_MAP_WRITE);
+ fill_i420 (&smpte->vinfo1, map.data, smpte->height, 7);
+ gst_buffer_unmap (in1, &map);
+ }
+ if (in2 == NULL) {
+ /* if no input, make picture white */
+ in2 = gst_buffer_new_and_alloc (GST_VIDEO_INFO_SIZE (&smpte->vinfo2));
+ gst_buffer_map (in2, &map, GST_MAP_WRITE);
+ fill_i420 (&smpte->vinfo2, map.data, smpte->height, 0);
+ gst_buffer_unmap (in2, &map);
+ }
+
+ if (smpte->position < smpte->end_position) {
+ outbuf = gst_buffer_new_and_alloc (GST_VIDEO_INFO_SIZE (&smpte->vinfo1));
+
+ /* set caps if not done yet */
+ if (!gst_pad_has_current_caps (smpte->srcpad)) {
+ GstCaps *caps;
+ GstSegment segment;
+
+ caps = gst_video_info_to_caps (&smpte->vinfo1);
+
+ gst_pad_set_caps (smpte->srcpad, caps);
+ gst_caps_unref (caps);
+
+ gst_segment_init (&segment, GST_FORMAT_TIME);
+ gst_pad_push_event (smpte->srcpad, gst_event_new_segment (&segment));
+ }
+
+ gst_video_frame_map (&frame1, &smpte->vinfo1, in1, GST_MAP_READ);
+ gst_video_frame_map (&frame2, &smpte->vinfo2, in2, GST_MAP_READ);
+ /* re-use either info, now know they are essentially identical */
+ gst_video_frame_map (&oframe, &smpte->vinfo1, outbuf, GST_MAP_WRITE);
+ gst_smpte_blend_i420 (&frame1, &frame2, &oframe, smpte->mask, smpte->border,
+ ((1 << smpte->depth) + smpte->border) *
+ smpte->position / smpte->end_position);
+ gst_video_frame_unmap (&frame1);
+ gst_video_frame_unmap (&frame2);
+ gst_video_frame_unmap (&oframe);
+ } else {
+ outbuf = in2;
+ gst_buffer_ref (in2);
+ }
+
+ smpte->position++;
+
+ if (in1)
+ gst_buffer_unref (in1);
+ if (in2)
+ gst_buffer_unref (in2);
+
+ GST_BUFFER_TIMESTAMP (outbuf) = ts;
+
+ return gst_pad_push (smpte->srcpad, outbuf);
+
+ /* ERRORS */
+not_negotiated:
+ {
+ GST_ELEMENT_ERROR (smpte, CORE, NEGOTIATION, (NULL),
+ ("No input format negotiated"));
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+input_formats_do_not_match:
+ {
+ GstCaps *caps1, *caps2;
+
+ caps1 = gst_pad_get_current_caps (smpte->sinkpad1);
+ caps2 = gst_pad_get_current_caps (smpte->sinkpad2);
+ GST_ELEMENT_ERROR (smpte, CORE, NEGOTIATION, (NULL),
+ ("input formats don't match: %" GST_PTR_FORMAT " vs. %" GST_PTR_FORMAT,
+ caps1, caps2));
+ if (caps1)
+ gst_caps_unref (caps1);
+ if (caps2)
+ gst_caps_unref (caps2);
+ return GST_FLOW_ERROR;
+ }
+}
+
+static void
+gst_smpte_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstSMPTE *smpte;
+
+ smpte = GST_SMPTE (object);
+
+ switch (prop_id) {
+ case PROP_TYPE:
+ smpte->type = g_value_get_enum (value);
+ break;
+ case PROP_BORDER:
+ smpte->border = g_value_get_int (value);
+ break;
+ case PROP_DEPTH:
+ smpte->depth = g_value_get_int (value);
+ break;
+ case PROP_DURATION:
+ smpte->duration = g_value_get_uint64 (value);
+ break;
+ case PROP_INVERT:
+ smpte->invert = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_smpte_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstSMPTE *smpte;
+
+ smpte = GST_SMPTE (object);
+
+ switch (prop_id) {
+ case PROP_TYPE:
+ g_value_set_enum (value, smpte->type);
+ break;
+ case PROP_BORDER:
+ g_value_set_int (value, smpte->border);
+ break;
+ case PROP_DEPTH:
+ g_value_set_int (value, smpte->depth);
+ break;
+ case PROP_DURATION:
+ g_value_set_uint64 (value, smpte->duration);
+ break;
+ case PROP_INVERT:
+ g_value_set_boolean (value, smpte->invert);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstStateChangeReturn
+gst_smpte_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstSMPTE *smpte;
+
+ smpte = GST_SMPTE (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_smpte_reset (smpte);
+ GST_LOG_OBJECT (smpte, "starting collectpads");
+ gst_collect_pads_start (smpte->collect);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ GST_LOG_OBJECT (smpte, "stopping collectpads");
+ gst_collect_pads_stop (smpte->collect);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_smpte_reset (smpte);
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
diff --git a/gst/smpte/gstsmpte.h b/gst/smpte/gstsmpte.h
new file mode 100644
index 0000000000..4458b31343
--- /dev/null
+++ b/gst/smpte/gstsmpte.h
@@ -0,0 +1,86 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_SMPTE_H__
+#define __GST_SMPTE_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstcollectpads.h>
+#include <gst/video/video.h>
+
+G_BEGIN_DECLS
+
+#include "gstmask.h"
+
+#define GST_TYPE_SMPTE \
+ (gst_smpte_get_type())
+#define GST_SMPTE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_SMPTE,GstSMPTE))
+#define GST_SMPTE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_SMPTE,GstSMPTEClass))
+#define GST_IS_SMPTE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_SMPTE))
+#define GST_IS_SMPTE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_SMPTE))
+
+typedef struct _GstSMPTE GstSMPTE;
+typedef struct _GstSMPTEClass GstSMPTEClass;
+
+struct _GstSMPTE {
+ GstElement element;
+
+ /* pads */
+ GstPad *srcpad,
+ *sinkpad1,
+ *sinkpad2;
+ GstCollectPads *collect;
+ gboolean send_stream_start;
+
+ /* properties */
+ gint type;
+ gint border;
+ gint depth;
+ guint64 duration;
+ gboolean invert;
+
+ /* negotiated format */
+ gint width;
+ gint height;
+ gint fps_num;
+ gint fps_denom;
+ GstVideoInfo vinfo1;
+ GstVideoInfo vinfo2;
+
+ /* state of the effect */
+ gint position;
+ gint end_position;
+ GstMask *mask;
+};
+
+struct _GstSMPTEClass {
+ GstElementClass parent_class;
+};
+
+GType gst_smpte_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (smpte);
+
+G_END_DECLS
+#endif /* __GST_SMPTE_H__ */
diff --git a/gst/smpte/gstsmptealpha.c b/gst/smpte/gstsmptealpha.c
new file mode 100644
index 0000000000..4046ced721
--- /dev/null
+++ b/gst/smpte/gstsmptealpha.c
@@ -0,0 +1,805 @@
+/* GStreamer
+ * Copyright (C) <2008> Wim Taymans <wim.taymans@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-smptealpha
+ * @title: smptealpha
+ *
+ * smptealpha can accept an I420 or AYUV video stream. An alpha channel is added
+ * using an effect specific SMPTE mask in the I420 input case. In the AYUV case,
+ * the alpha channel is modified using the effect specific SMPTE mask.
+ *
+ * The #GstSMPTEAlpha:position property is a controllabe double between 0.0 and
+ * 1.0 that specifies the position in the transition. 0.0 is the start of the
+ * transition with the alpha channel to complete opaque where 1.0 has the alpha
+ * channel set to completely transparent.
+ *
+ * The #GstSMPTEAlpha:depth property defines the precision in bits of the mask.
+ * A higher presision will create a mask with smoother gradients in order to
+ * avoid banding.
+ *
+ * ## Sample pipelines
+ *
+ * Here is a pipeline to demonstrate the smpte transition :
+ * |[
+ * gst-launch-1.0 -v videotestsrc ! smptealpha border=20000 type=44
+ * position=0.5 ! videomixer ! videoconvert ! ximagesink
+ * ]|
+ * This shows a midway bowtie-h transition a from a videotestsrc to a
+ * transparent image. The edges of the transition are smoothed with a
+ * 20000 big border.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include <string.h>
+
+#include "gstsmptealpha.h"
+#include "paint.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_smpte_alpha_debug);
+#define GST_CAT_DEFAULT gst_smpte_alpha_debug
+
+static GstStaticPadTemplate gst_smpte_alpha_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("AYUV") ";"
+ GST_VIDEO_CAPS_MAKE ("ARGB") ";" GST_VIDEO_CAPS_MAKE ("BGRA") ";"
+ GST_VIDEO_CAPS_MAKE ("RGBA") ";" GST_VIDEO_CAPS_MAKE ("ARGB"))
+ );
+
+static GstStaticPadTemplate gst_smpte_alpha_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420") ";"
+ GST_VIDEO_CAPS_MAKE ("YV12")
+ ";" GST_VIDEO_CAPS_MAKE ("AYUV")
+ ";" GST_VIDEO_CAPS_MAKE ("ARGB") ";" GST_VIDEO_CAPS_MAKE ("BGRA")
+ ";" GST_VIDEO_CAPS_MAKE ("RGBA") ";" GST_VIDEO_CAPS_MAKE ("ARGB"))
+ );
+
+/* SMPTE signals and properties */
+
+#define DEFAULT_PROP_TYPE 1
+#define DEFAULT_PROP_BORDER 0
+#define DEFAULT_PROP_DEPTH 16
+#define DEFAULT_PROP_POSITION 0.0
+#define DEFAULT_PROP_INVERT FALSE
+
+enum
+{
+ PROP_0,
+ PROP_TYPE,
+ PROP_BORDER,
+ PROP_DEPTH,
+ PROP_POSITION,
+ PROP_INVERT
+};
+
+#define AYUV_SIZE(w,h) ((w) * (h) * 4)
+
+#define GST_TYPE_SMPTE_TRANSITION_TYPE (gst_smpte_alpha_transition_type_get_type())
+static GType
+gst_smpte_alpha_transition_type_get_type (void)
+{
+ static GType smpte_transition_type = 0;
+ GEnumValue *smpte_transitions;
+
+ if (!smpte_transition_type) {
+ const GList *definitions;
+ gint i = 0;
+
+ definitions = gst_mask_get_definitions ();
+ smpte_transitions =
+ g_new0 (GEnumValue, g_list_length ((GList *) definitions) + 1);
+
+ while (definitions) {
+ GstMaskDefinition *definition = (GstMaskDefinition *) definitions->data;
+
+ definitions = g_list_next (definitions);
+
+ smpte_transitions[i].value = definition->type;
+ /* older GLib versions have the two fields as non-const, hence the cast */
+ smpte_transitions[i].value_nick = (gchar *) definition->short_name;
+ smpte_transitions[i].value_name = (gchar *) definition->long_name;
+
+ i++;
+ }
+
+ smpte_transition_type =
+ g_enum_register_static ("GstSMPTEAlphaTransitionType",
+ smpte_transitions);
+ }
+ return smpte_transition_type;
+}
+
+
+static void gst_smpte_alpha_finalize (GstSMPTEAlpha * smpte);
+
+static void gst_smpte_alpha_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_smpte_alpha_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_smpte_alpha_set_info (GstVideoFilter * vfilter,
+ GstCaps * incaps, GstVideoInfo * in_info,
+ GstCaps * outcaps, GstVideoInfo * out_info);
+static GstFlowReturn gst_smpte_alpha_transform_frame (GstVideoFilter * vfilter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame);
+static void gst_smpte_alpha_before_transform (GstBaseTransform * trans,
+ GstBuffer * buf);
+static GstCaps *gst_smpte_alpha_transform_caps (GstBaseTransform * trans,
+ GstPadDirection direction, GstCaps * from, GstCaps * filter);
+
+#define gst_smpte_alpha_parent_class parent_class
+G_DEFINE_TYPE (GstSMPTEAlpha, gst_smpte_alpha, GST_TYPE_VIDEO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (smptealpha, "smptealpha", GST_RANK_NONE,
+ GST_TYPE_SMPTE_ALPHA, GST_DEBUG_CATEGORY_INIT (gst_smpte_alpha_debug,
+ "smptealpha", 0, "SMPTE alpha effect"));
+
+static void
+gst_smpte_alpha_class_init (GstSMPTEAlphaClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *element_class = (GstElementClass *) (klass);
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
+
+ gobject_class->set_property = gst_smpte_alpha_set_property;
+ gobject_class->get_property = gst_smpte_alpha_get_property;
+
+ gobject_class->finalize = (GObjectFinalizeFunc) gst_smpte_alpha_finalize;
+
+ _gst_mask_init ();
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_TYPE,
+ g_param_spec_enum ("type", "Type", "The type of transition to use",
+ GST_TYPE_SMPTE_TRANSITION_TYPE, DEFAULT_PROP_TYPE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BORDER,
+ g_param_spec_int ("border", "Border",
+ "The border width of the transition", 0, G_MAXINT,
+ DEFAULT_PROP_BORDER,
+ GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_DEPTH,
+ g_param_spec_int ("depth", "Depth", "Depth of the mask in bits", 1, 24,
+ DEFAULT_PROP_DEPTH, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_POSITION,
+ g_param_spec_double ("position", "Position",
+ "Position of the transition effect", 0.0, 1.0, DEFAULT_PROP_POSITION,
+ GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstSMPTEAlpha:invert:
+ *
+ * Set to TRUE to invert the transition mask (ie. flip it horizontally).
+ */
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_INVERT,
+ g_param_spec_boolean ("invert", "Invert",
+ "Invert transition mask", DEFAULT_PROP_POSITION,
+ GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ trans_class->before_transform =
+ GST_DEBUG_FUNCPTR (gst_smpte_alpha_before_transform);
+ trans_class->transform_caps =
+ GST_DEBUG_FUNCPTR (gst_smpte_alpha_transform_caps);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_smpte_alpha_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_smpte_alpha_transform_frame);
+
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_smpte_alpha_sink_template);
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_smpte_alpha_src_template);
+ gst_element_class_set_static_metadata (element_class, "SMPTE transitions",
+ "Filter/Editor/Video",
+ "Apply the standard SMPTE transitions as alpha on video images",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gst_type_mark_as_plugin_api (GST_TYPE_SMPTE_TRANSITION_TYPE, 0);
+}
+
+static gboolean
+gst_smpte_alpha_update_mask (GstSMPTEAlpha * smpte, gint type,
+ gboolean invert, gint depth, gint width, gint height)
+{
+ GstMask *newmask;
+
+ /* try to avoid regenerating the mask if we already have one that is
+ * correct */
+ if (smpte->mask) {
+ if (smpte->type == type &&
+ smpte->invert == invert &&
+ smpte->depth == depth &&
+ smpte->width == width && smpte->height == height)
+ return TRUE;
+ }
+
+ smpte->type = type;
+ smpte->invert = invert;
+ smpte->depth = depth;
+ smpte->width = width;
+ smpte->height = height;
+
+ /* Not negotiated yet */
+ if (width == 0 || height == 0) {
+ return TRUE;
+ }
+
+ newmask = gst_mask_factory_new (type, invert, depth, width, height);
+ if (!newmask)
+ goto mask_failed;
+
+ if (smpte->mask)
+ gst_mask_destroy (smpte->mask);
+
+ smpte->mask = newmask;
+
+ return TRUE;
+
+ /* ERRORS */
+mask_failed:
+ {
+ GST_ERROR_OBJECT (smpte, "failed to create a mask");
+ return FALSE;
+ }
+}
+
+static void
+gst_smpte_alpha_init (GstSMPTEAlpha * smpte)
+{
+ smpte->type = DEFAULT_PROP_TYPE;
+ smpte->border = DEFAULT_PROP_BORDER;
+ smpte->depth = DEFAULT_PROP_DEPTH;
+ smpte->position = DEFAULT_PROP_POSITION;
+ smpte->invert = DEFAULT_PROP_INVERT;
+}
+
+#define CREATE_ARGB_FUNC(name, A, R, G, B) \
+static void \
+gst_smpte_alpha_process_##name##_##name (GstSMPTEAlpha * smpte, \
+ const GstVideoFrame * in_frame, GstVideoFrame * out_frame, GstMask * mask, \
+ gint border, gint pos) \
+{ \
+ gint i, j; \
+ const guint32 *maskp; \
+ gint value; \
+ gint min, max; \
+ gint width, height; \
+ guint8 *in, *out; \
+ gint src_wrap, dest_wrap; \
+ \
+ if (border == 0) \
+ border++; \
+ \
+ min = pos - border; \
+ max = pos; \
+ GST_DEBUG_OBJECT (smpte, "pos %d, min %d, max %d, border %d", pos, min, max, \
+ border); \
+ \
+ maskp = mask->data; \
+ \
+ width = GST_VIDEO_FRAME_WIDTH (out_frame); \
+ height = GST_VIDEO_FRAME_HEIGHT (out_frame); \
+ \
+ in = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0); \
+ out = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0); \
+ src_wrap = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0) - (width << 2); \
+ dest_wrap = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0) - (width << 2); \
+ \
+ /* we basically copy the source to dest but we scale the alpha channel with \
+ * the mask */ \
+ for (i = 0; i < height; i++) { \
+ for (j = 0; j < width; j++) { \
+ value = *maskp++; \
+ out[A] = (in[A] * ((CLAMP (value, min, max) - min) << 8) / border) >> 8; \
+ out[R] = in[R]; \
+ out[G] = in[G]; \
+ out[B] = in[B]; \
+ out += 4; \
+ in += 4; \
+ } \
+ in += src_wrap; \
+ out += dest_wrap; \
+ } \
+}
+
+CREATE_ARGB_FUNC (argb, 0, 1, 2, 3);
+CREATE_ARGB_FUNC (bgra, 3, 2, 1, 0);
+CREATE_ARGB_FUNC (abgr, 0, 3, 2, 1);
+CREATE_ARGB_FUNC (rgba, 3, 0, 1, 2);
+
+static void
+gst_smpte_alpha_process_ayuv_ayuv (GstSMPTEAlpha * smpte,
+ const GstVideoFrame * in_frame, GstVideoFrame * out_frame, GstMask * mask,
+ gint border, gint pos)
+{
+ gint i, j;
+ const guint32 *maskp;
+ gint value;
+ gint min, max;
+ gint width, height;
+ guint8 *in, *out;
+ gint src_wrap, dest_wrap;
+
+ if (border == 0)
+ border++;
+
+ min = pos - border;
+ max = pos;
+ GST_DEBUG_OBJECT (smpte, "pos %d, min %d, max %d, border %d", pos, min, max,
+ border);
+
+ maskp = mask->data;
+
+ width = GST_VIDEO_FRAME_WIDTH (out_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (out_frame);
+
+ in = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ out = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+ src_wrap = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0) - (width << 2);
+ dest_wrap = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0) - (width << 2);
+
+ /* we basically copy the source to dest but we scale the alpha channel with
+ * the mask */
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ value = *maskp++;
+ *out++ = (*in++ * ((CLAMP (value, min, max) - min) << 8) / border) >> 8;
+ *out++ = *in++;
+ *out++ = *in++;
+ *out++ = *in++;
+ }
+ in += src_wrap;
+ out += dest_wrap;
+ }
+}
+
+static void
+gst_smpte_alpha_process_i420_ayuv (GstSMPTEAlpha * smpte,
+ const GstVideoFrame * in_frame, GstVideoFrame * out_frame, GstMask * mask,
+ gint border, gint pos)
+{
+ const guint8 *srcY;
+ const guint8 *srcU;
+ const guint8 *srcV;
+ guint8 *out;
+ gint i, j;
+ gint src_wrap, src_u_wrap, src_v_wrap, dest_wrap;
+ gint y_stride, u_stride, v_stride;
+ gboolean odd_width;
+ const guint32 *maskp;
+ gint value;
+ gint min, max;
+ gint width, height;
+
+ if (border == 0)
+ border++;
+
+ min = pos - border;
+ max = pos;
+ GST_DEBUG_OBJECT (smpte, "pos %d, min %d, max %d, border %d", pos, min, max,
+ border);
+
+ maskp = mask->data;
+
+ width = GST_VIDEO_FRAME_WIDTH (out_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (out_frame);
+
+ y_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 0);
+ u_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 1);
+ v_stride = GST_VIDEO_FRAME_COMP_STRIDE (in_frame, 2);
+
+ src_wrap = y_stride - width;
+ src_u_wrap = u_stride - (width / 2);
+ src_v_wrap = v_stride - (width / 2);
+
+ srcY = GST_VIDEO_FRAME_COMP_DATA (in_frame, 0);
+ srcU = GST_VIDEO_FRAME_COMP_DATA (in_frame, 1);
+ srcV = GST_VIDEO_FRAME_COMP_DATA (in_frame, 2);
+
+ out = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+ dest_wrap = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0) - (width << 2);
+
+ odd_width = (width % 2 != 0);
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width / 2; j++) {
+ value = *maskp++;
+ *out++ = (0xff * ((CLAMP (value, min, max) - min) << 8) / border) >> 8;
+ *out++ = *srcY++;
+ *out++ = *srcU;
+ *out++ = *srcV;
+ value = *maskp++;
+ *out++ = (0xff * ((CLAMP (value, min, max) - min) << 8) / border) >> 8;
+ *out++ = *srcY++;
+ *out++ = *srcU++;
+ *out++ = *srcV++;
+ }
+ /* Might have one odd column left to do */
+ if (odd_width) {
+ value = *maskp++;
+ *out++ = (0xff * ((CLAMP (value, min, max) - min) << 8) / border) >> 8;
+ *out++ = *srcY++;
+ *out++ = *srcU;
+ *out++ = *srcV;
+ }
+ if (i % 2 == 0) {
+ srcU -= width / 2;
+ srcV -= width / 2;
+ } else {
+ srcU += src_u_wrap;
+ srcV += src_v_wrap;
+ }
+ srcY += src_wrap;
+ out += dest_wrap;
+ }
+}
+
+static void
+gst_smpte_alpha_before_transform (GstBaseTransform * trans, GstBuffer * buf)
+{
+ GstSMPTEAlpha *smpte = GST_SMPTE_ALPHA (trans);
+ GstClockTime timestamp, stream_time;
+
+ /* first sync the controller to the current stream_time of the buffer */
+ timestamp = GST_BUFFER_TIMESTAMP (buf);
+ stream_time =
+ gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (smpte, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (smpte), stream_time);
+}
+
+static GstFlowReturn
+gst_smpte_alpha_transform_frame (GstVideoFilter * vfilter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame)
+{
+ GstSMPTEAlpha *smpte = GST_SMPTE_ALPHA (vfilter);
+ gdouble position;
+ gint border;
+
+ if (G_UNLIKELY (!smpte->process))
+ goto not_negotiated;
+
+ GST_OBJECT_LOCK (smpte);
+ position = smpte->position;
+ border = smpte->border;
+
+ /* run the type specific filter code */
+ smpte->process (smpte, in_frame, out_frame,
+ smpte->mask, border, ((1 << smpte->depth) + border) * position);
+ GST_OBJECT_UNLOCK (smpte);
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+not_negotiated:
+ {
+ GST_ELEMENT_ERROR (smpte, CORE, NEGOTIATION, (NULL),
+ ("No input format negotiated"));
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+}
+
+static GstCaps *
+gst_smpte_alpha_transform_caps (GstBaseTransform * trans,
+ GstPadDirection direction, GstCaps * from, GstCaps * filter)
+{
+ GstCaps *result, *tmp_caps, *tmpl_caps = NULL;
+ gint i, j;
+
+ tmp_caps = gst_caps_new_empty ();
+
+ for (i = 0; i < gst_caps_get_size (from); i++) {
+ GstStructure *structure;
+ const GValue *val, *lval;
+ GValue list = { 0, };
+ GValue aval = { 0, };
+ const gchar *str;
+
+ structure = gst_structure_copy (gst_caps_get_structure (from, i));
+ /* we can transform I420 to AYUV,
+ * so need to locate and substitute AYUV for the both of them */
+ val = gst_structure_get_value (structure, "format");
+ if (val && GST_VALUE_HOLDS_LIST (val)) {
+ gboolean seen_ayuv = FALSE, seen_i420 = FALSE;
+
+ g_value_init (&list, GST_TYPE_LIST);
+ for (j = 0; j < gst_value_list_get_size (val); j++) {
+ lval = gst_value_list_get_value (val, j);
+ if ((str = g_value_get_string (lval))) {
+ if (strcmp (str, "AYUV") == 0) {
+ seen_ayuv = TRUE;
+ } else if (strcmp (str, "I420") == 0) {
+ seen_i420 = TRUE;
+ }
+ }
+ }
+ if (seen_ayuv && !seen_i420) {
+ str = "I420";
+ } else if (seen_i420 && !seen_ayuv) {
+ str = "AYUV";
+ } else
+ str = NULL;
+ if (str) {
+ g_value_copy (val, &list);
+ g_value_init (&aval, G_TYPE_STRING);
+ g_value_set_string (&aval, str);
+ gst_value_list_append_value (&list, &aval);
+ g_value_reset (&aval);
+ gst_structure_set_value (structure, "format", &list);
+ g_value_unset (&list);
+ }
+ } else if (val && G_VALUE_HOLDS_STRING (val)) {
+ if ((str = g_value_get_string (val)) &&
+ ((strcmp (str, "AYUV") == 0) || (strcmp (str, "I420") == 0))) {
+ g_value_init (&list, GST_TYPE_LIST);
+ g_value_init (&aval, G_TYPE_STRING);
+ g_value_set_string (&aval, "AYUV");
+ gst_value_list_append_value (&list, &aval);
+ g_value_reset (&aval);
+ g_value_set_string (&aval, "I420");
+ gst_value_list_append_value (&list, &aval);
+ g_value_reset (&aval);
+ gst_structure_set_value (structure, "format", &list);
+ g_value_unset (&list);
+ }
+ } else {
+ gst_structure_remove_field (structure, "format");
+ }
+
+ gst_structure_remove_field (structure, "colorimetry");
+ gst_structure_remove_field (structure, "chroma-site");
+
+ gst_caps_append_structure (tmp_caps, structure);
+ }
+
+ /* Get the appropriate template */
+ if (direction == GST_PAD_SINK) {
+ tmpl_caps =
+ gst_static_pad_template_get_caps (&gst_smpte_alpha_src_template);
+ } else if (direction == GST_PAD_SRC) {
+ tmpl_caps =
+ gst_static_pad_template_get_caps (&gst_smpte_alpha_sink_template);
+ } else {
+ g_assert_not_reached ();
+ }
+
+ /* Intersect with our template caps */
+ result = gst_caps_intersect (tmp_caps, tmpl_caps);
+ gst_caps_unref (tmpl_caps);
+ gst_caps_unref (tmp_caps);
+
+ result = gst_caps_simplify (result);
+
+ GST_LOG_OBJECT (trans, "transformed %" GST_PTR_FORMAT " to %" GST_PTR_FORMAT,
+ from, result);
+
+ if (filter) {
+ GstCaps *intersection;
+
+ GST_DEBUG_OBJECT (trans, "Using filter caps %" GST_PTR_FORMAT, filter);
+ intersection =
+ gst_caps_intersect_full (filter, result, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (result);
+ result = intersection;
+ GST_DEBUG_OBJECT (trans, "Intersection %" GST_PTR_FORMAT, result);
+ }
+
+ return result;
+}
+
+static gboolean
+gst_smpte_alpha_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
+{
+ GstSMPTEAlpha *smpte = GST_SMPTE_ALPHA (vfilter);
+ gboolean ret;
+
+ smpte->process = NULL;
+ smpte->in_format = GST_VIDEO_INFO_FORMAT (in_info);
+ smpte->out_format = GST_VIDEO_INFO_FORMAT (out_info);
+
+ /* try to update the mask now, this will also adjust the width/height on
+ * success */
+ GST_OBJECT_LOCK (smpte);
+ ret =
+ gst_smpte_alpha_update_mask (smpte, smpte->type, smpte->invert,
+ smpte->depth, GST_VIDEO_INFO_WIDTH (out_info),
+ GST_VIDEO_INFO_HEIGHT (out_info));
+ GST_OBJECT_UNLOCK (smpte);
+
+ if (!ret)
+ goto mask_failed;
+
+ switch (smpte->out_format) {
+ case GST_VIDEO_FORMAT_AYUV:
+ switch (smpte->in_format) {
+ case GST_VIDEO_FORMAT_AYUV:
+ smpte->process = gst_smpte_alpha_process_ayuv_ayuv;
+ break;
+ case GST_VIDEO_FORMAT_I420:
+ smpte->process = gst_smpte_alpha_process_i420_ayuv;
+ break;
+ default:
+ break;
+ }
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ switch (smpte->in_format) {
+ case GST_VIDEO_FORMAT_ARGB:
+ smpte->process = gst_smpte_alpha_process_argb_argb;
+ break;
+ default:
+ break;
+ }
+ break;
+ case GST_VIDEO_FORMAT_RGBA:
+ switch (smpte->in_format) {
+ case GST_VIDEO_FORMAT_RGBA:
+ smpte->process = gst_smpte_alpha_process_rgba_rgba;
+ break;
+ default:
+ break;
+ }
+ break;
+ case GST_VIDEO_FORMAT_ABGR:
+ switch (smpte->in_format) {
+ case GST_VIDEO_FORMAT_ABGR:
+ smpte->process = gst_smpte_alpha_process_abgr_abgr;
+ break;
+ default:
+ break;
+ }
+ break;
+ case GST_VIDEO_FORMAT_BGRA:
+ switch (smpte->in_format) {
+ case GST_VIDEO_FORMAT_BGRA:
+ smpte->process = gst_smpte_alpha_process_bgra_bgra;
+ break;
+ default:
+ break;
+ }
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+
+ /* ERRORS */
+mask_failed:
+ {
+ GST_ERROR_OBJECT (smpte, "failed creating the mask");
+ return FALSE;
+ }
+}
+
+static void
+gst_smpte_alpha_finalize (GstSMPTEAlpha * smpte)
+{
+ if (smpte->mask)
+ gst_mask_destroy (smpte->mask);
+ smpte->mask = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize ((GObject *) smpte);
+}
+
+static void
+gst_smpte_alpha_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstSMPTEAlpha *smpte = GST_SMPTE_ALPHA (object);
+
+ switch (prop_id) {
+ case PROP_TYPE:{
+ gint type;
+
+ type = g_value_get_enum (value);
+
+ GST_OBJECT_LOCK (smpte);
+ gst_smpte_alpha_update_mask (smpte, type, smpte->invert,
+ smpte->depth, smpte->width, smpte->height);
+ GST_OBJECT_UNLOCK (smpte);
+ break;
+ }
+ case PROP_BORDER:
+ GST_OBJECT_LOCK (smpte);
+ smpte->border = g_value_get_int (value);
+ GST_OBJECT_UNLOCK (smpte);
+ break;
+ case PROP_DEPTH:{
+ gint depth;
+
+ depth = g_value_get_int (value);
+
+ GST_OBJECT_LOCK (smpte);
+ gst_smpte_alpha_update_mask (smpte, smpte->type, smpte->invert,
+ depth, smpte->width, smpte->height);
+ GST_OBJECT_UNLOCK (smpte);
+ break;
+ }
+ case PROP_POSITION:
+ GST_OBJECT_LOCK (smpte);
+ smpte->position = g_value_get_double (value);
+ GST_OBJECT_UNLOCK (smpte);
+ break;
+ case PROP_INVERT:{
+ gboolean invert;
+
+ invert = g_value_get_boolean (value);
+ GST_OBJECT_LOCK (smpte);
+ gst_smpte_alpha_update_mask (smpte, smpte->type, invert,
+ smpte->depth, smpte->width, smpte->height);
+ GST_OBJECT_UNLOCK (smpte);
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_smpte_alpha_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstSMPTEAlpha *smpte;
+
+ smpte = GST_SMPTE_ALPHA (object);
+
+ switch (prop_id) {
+ case PROP_TYPE:
+ GST_OBJECT_LOCK (smpte);
+ g_value_set_enum (value, smpte->type);
+ GST_OBJECT_UNLOCK (smpte);
+ break;
+ case PROP_BORDER:
+ GST_OBJECT_LOCK (smpte);
+ g_value_set_int (value, smpte->border);
+ GST_OBJECT_UNLOCK (smpte);
+ break;
+ case PROP_DEPTH:
+ GST_OBJECT_LOCK (smpte);
+ g_value_set_int (value, smpte->depth);
+ GST_OBJECT_UNLOCK (smpte);
+ break;
+ case PROP_POSITION:
+ GST_OBJECT_LOCK (smpte);
+ g_value_set_double (value, smpte->position);
+ GST_OBJECT_UNLOCK (smpte);
+ break;
+ case PROP_INVERT:
+ GST_OBJECT_LOCK (smpte);
+ g_value_set_boolean (value, smpte->invert);
+ GST_OBJECT_UNLOCK (smpte);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/smpte/gstsmptealpha.h b/gst/smpte/gstsmptealpha.h
new file mode 100644
index 0000000000..630137fc94
--- /dev/null
+++ b/gst/smpte/gstsmptealpha.h
@@ -0,0 +1,80 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_SMPTE_ALPHA_H__
+#define __GST_SMPTE_ALPHA_H__
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include <gst/video/gstvideofilter.h>
+
+
+G_BEGIN_DECLS
+
+#include "gstmask.h"
+
+#define GST_TYPE_SMPTE_ALPHA \
+ (gst_smpte_alpha_get_type())
+#define GST_SMPTE_ALPHA(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_SMPTE_ALPHA,GstSMPTEAlpha))
+#define GST_SMPTE_ALPHA_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_SMPTE_ALPHA,GstSMPTEAlphaClass))
+#define GST_IS_SMPTE_ALPHA(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_SMPTE_ALPHA))
+#define GST_IS_SMPTE_ALPHA_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_SMPTE_ALPHA))
+
+typedef struct _GstSMPTEAlpha GstSMPTEAlpha;
+typedef struct _GstSMPTEAlphaClass GstSMPTEAlphaClass;
+
+struct _GstSMPTEAlpha {
+ GstVideoFilter element;
+
+ /* properties */
+ gint type;
+ gint border;
+ gint depth;
+ gdouble position;
+ gboolean invert;
+
+ /* negotiated format */
+ GstVideoFormat in_format, out_format;
+ gint width;
+ gint height;
+
+ /* state of the effect */
+ GstMask *mask;
+
+ /* processing function */
+ void (*process) (GstSMPTEAlpha * smpte, const GstVideoFrame * in, GstVideoFrame * out,
+ GstMask * mask, gint border, gint pos);
+};
+
+struct _GstSMPTEAlphaClass {
+ GstVideoFilterClass parent_class;
+};
+
+GType gst_smpte_alpha_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (smptealpha);
+
+G_END_DECLS
+
+#endif /* __GST_SMPTE_ALPHA_H__ */
diff --git a/gst/smpte/meson.build b/gst/smpte/meson.build
new file mode 100644
index 0000000000..01e9caaec5
--- /dev/null
+++ b/gst/smpte/meson.build
@@ -0,0 +1,19 @@
+smpte_sources = [
+ 'gstsmpte.c',
+ 'gstmask.c',
+ 'barboxwipes.c',
+ 'paint.c',
+ 'gstsmptealpha.c',
+ 'plugin.c',
+]
+
+gstsmpte = library('gstsmpte',
+ smpte_sources,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gstvideo_dep, gst_dep, libm],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstsmpte, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstsmpte]
diff --git a/gst/smpte/paint.c b/gst/smpte/paint.c
new file mode 100644
index 0000000000..cd97323cba
--- /dev/null
+++ b/gst/smpte/paint.c
@@ -0,0 +1,338 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <math.h>
+#include <stdlib.h>
+#include "paint.h"
+
+#ifndef M_PI
+#define M_PI 3.14159265358979323846
+#endif
+
+void
+gst_smpte_paint_vbox (guint32 * dest, gint stride,
+ gint x0, gint y0, gint c0, gint x1, gint y1, gint c1)
+{
+ gint i, j;
+ gint width, height;
+
+ width = x1 - x0;
+ height = y1 - y0;
+
+ g_assert (width > 0);
+ g_assert (height > 0);
+
+ dest = dest + y0 * stride + x0;
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ dest[j] = (c1 * j + c0 * (width - j)) / width;
+ }
+ dest += stride;
+ }
+}
+
+void
+gst_smpte_paint_hbox (guint32 * dest, gint stride,
+ gint x0, gint y0, gint c0, gint x1, gint y1, gint c1)
+{
+ gint i, j;
+ gint width, height;
+
+ width = x1 - x0;
+ height = y1 - y0;
+
+ g_assert (width > 0);
+ g_assert (height > 0);
+
+ dest = dest + y0 * stride + x0;
+
+ for (i = 0; i < height; i++) {
+ guint32 value = (c1 * i + c0 * (height - i)) / height;
+
+ for (j = 0; j < width; j++) {
+ dest[j] = value;
+ }
+ dest += stride;
+ }
+}
+
+#define STEP_3D_LINE(dxabs,dyabs,dzabs,sdx,sdy,sdz,xr,yr,zr,px,py,pz) \
+G_STMT_START { \
+ if (dxabs >= dyabs && dxabs >= dzabs) { \
+ yr += dyabs; \
+ zr += dzabs; \
+ if (yr >= dxabs) { \
+ py += sdy; \
+ yr -= dxabs; \
+ } \
+ if (zr >= dzabs) { \
+ pz += sdz; \
+ zr -= dxabs; \
+ } \
+ px += sdx; \
+ } else if (dyabs >= dxabs && dyabs >= dzabs) { \
+ xr += dxabs; \
+ zr += dzabs; \
+ if (xr >= dyabs) { \
+ px += sdx; \
+ xr -= dyabs; \
+ } \
+ if (zr >= dzabs) { \
+ pz += sdz; \
+ zr -= dyabs; \
+ } \
+ py += sdy; \
+ } else { \
+ yr += dyabs; \
+ xr += dxabs; \
+ if (yr >= dyabs) { \
+ py += sdy; \
+ yr -= dzabs; \
+ } \
+ if (xr >= dyabs) { \
+ px += sdx; \
+ xr -= dzabs; \
+ } \
+ pz += sdz; \
+ } \
+} G_STMT_END
+
+#define SWAP_INT(a,b) \
+G_STMT_START { \
+ gint tmp; \
+ tmp = (a); \
+ (a) = (b); \
+ (b) = (tmp); \
+} G_STMT_END
+
+#define SIGN(a) ((a) < 0 ? -1 : 1)
+
+#define PREPARE_3D_LINE(x0,y0,z0,x1,y1,z1,dxabs,dyabs,dzabs,sdx,sdy,sdz,xr,yr,zr,px,py,pz)\
+G_STMT_START { \
+ gint dx, dy, dz; \
+ dx = x1 - x0; \
+ dy = y1 - y0; \
+ dz = z1 - z0; \
+ dxabs = abs (dx); \
+ dyabs = abs (dy); \
+ dzabs = abs (dz); \
+ sdx = SIGN (dx); \
+ sdy = SIGN (dy); \
+ sdz = SIGN (dz); \
+ xr = dxabs >> 1; \
+ yr = dyabs >> 1; \
+ zr = dzabs >> 1; \
+ px = x0; \
+ py = y0; \
+ pz = z0; \
+} G_STMT_END
+
+void
+gst_smpte_paint_triangle_linear (guint32 * dest, gint stride,
+ gint x0, gint y0, gint c0,
+ gint x1, gint y1, gint c1, gint x2, gint y2, gint c2)
+{
+ gint sdxl, sdyl, sdcl, dxlabs, dylabs, dclabs, xrl, yrl, crl, pxl, pyl, pcl;
+ gint sdxr, sdyr, sdcr, dxrabs, dyrabs, dcrabs, xrr, yrr, crr, pxr, pyr, pcr;
+ gint i, j, k, seg_start, seg_end;
+
+ if (y0 > y1) {
+ SWAP_INT (x0, x1);
+ SWAP_INT (y0, y1);
+ SWAP_INT (c0, c1);
+ }
+ if (y0 > y2) {
+ SWAP_INT (x0, x2);
+ SWAP_INT (y0, y2);
+ SWAP_INT (c0, c2);
+ }
+ if (y1 > y2) {
+ SWAP_INT (x1, x2);
+ SWAP_INT (y1, y2);
+ SWAP_INT (c1, c2);
+ }
+
+ PREPARE_3D_LINE (x0, y0, c0, x2, y2, c2,
+ dxlabs, dylabs, dclabs, sdxl, sdyl, sdcl, xrl, yrl, crl, pxl, pyl, pcl);
+
+ PREPARE_3D_LINE (x0, y0, c0, x1, y1, c1,
+ dxrabs, dyrabs, dcrabs, sdxr, sdyr, sdcr, xrr, yrr, crr, pxr, pyr, pcr);
+
+ dest = dest + stride * y0;
+ seg_start = y0;
+ seg_end = y1;
+
+ /* do two passes */
+ for (k = 0; k < 2; k++) {
+ for (i = seg_start; i < seg_end; i++) {
+ gint s = pxl, e = pxr, sc = pcl, ec = pcr;
+ gint sign = SIGN (e - s);
+
+ e += sign;
+ for (j = s; j != e; j += sign) {
+ dest[j] = (ec * (j - s) + sc * (e - j)) / (e - s);
+ }
+
+ while (pyr == i) {
+ STEP_3D_LINE (dxrabs, dyrabs, dcrabs, sdxr, sdyr, sdcr,
+ xrr, yrr, crr, pxr, pyr, pcr);
+ }
+ while (pyl == i) {
+ STEP_3D_LINE (dxlabs, dylabs, dclabs, sdxl, sdyl, sdcl,
+ xrl, yrl, crl, pxl, pyl, pcl);
+ }
+ dest += stride;
+ }
+
+ PREPARE_3D_LINE (x1, y1, c1, x2, y2, c2,
+ dxrabs, dyrabs, dcrabs, sdxr, sdyr, sdcr, xrr, yrr, crr, pxr, pyr, pcr);
+
+ seg_start = y1;
+ seg_end = y2;
+ }
+}
+
+static void
+draw_bresenham_line (guint32 * dest, gint stride,
+ gint x0, gint y0, gint x1, gint y1, guint32 col)
+{
+ gint dx, dy;
+ gint x_incr, y_incr;
+ gint i, dpr, dpru, P, indep;
+
+ dx = abs (x1 - x0);
+ dy = abs (y1 - y0);
+
+ dest = dest + y0 * stride + x0;
+
+ x_incr = SIGN (x1 - x0);
+ y_incr = SIGN (y1 - y0) * stride;
+
+ if (dx >= dy) {
+ dpr = dy << 1;
+ i = dx;
+ indep = x_incr;
+ } else {
+ dpr = dx << 1;
+ i = dy;
+ indep = y_incr;
+ }
+
+ dpru = dpr - (i << 1);
+ P = dpr - i;
+
+ for (; i >= 0; i--) {
+ *dest = col;
+
+ if (P > 0) {
+ dest += x_incr;
+ dest += y_incr;
+ P += dpru;
+ } else {
+ dest += indep;
+ P += dpr;
+ }
+ }
+}
+
+void
+gst_smpte_paint_triangle_clock (guint32 * dest, gint stride,
+ gint x0, gint y0, gint c0,
+ gint x1, gint y1, gint c1, gint x2, gint y2, gint c2)
+{
+ gint i;
+ gint sign;
+ gfloat angle, angle_e;
+ gfloat len1;
+
+ angle_e = acos (((x1 - x0) * (x2 - x0) + (y1 - y0) * (y2 - y0)) /
+ (sqrt ((x1 - x0) * (x1 - x0) + (y1 - y0) * (y1 - y0)) *
+ sqrt ((x2 - x0) * (x2 - x0) + (y2 - y0) * (y2 - y0))));
+
+ len1 = sqrt ((x1 - x0) * (x1 - x0) + (y1 - y0) * (y1 - y0));
+
+ if (x1 == x2) {
+ sign = SIGN (y2 - y1);
+
+ for (i = y1; i != (y2 + sign); i += sign) {
+ if (y1 == i)
+ angle = 0;
+ else
+ angle = acos (((x1 - x0) * (x2 - x0) + (y1 - y0) * (i - y0)) /
+ (len1 * sqrt ((x1 - x0) * (x1 - x0) + (i - y0) * (i -
+ y0)))) / angle_e;
+
+ draw_bresenham_line (dest, stride,
+ x0, y0, x1, i, (c2 * angle + c1 * (1.0 - angle)));
+ }
+ } else if (y1 == y2) {
+ sign = SIGN (x2 - x1);
+
+ for (i = x1; i != (x2 + sign); i += sign) {
+ if (x1 == i)
+ angle = 0;
+ else
+ angle = acos (((x1 - x0) * (i - x0) + (y1 - y0) * (y2 - y0)) /
+ (len1 * sqrt ((i - x0) * (i - x0) + (y2 - y0) * (y2 -
+ y0)))) / angle_e;
+
+ draw_bresenham_line (dest, stride,
+ x0, y0, i, y1, (c2 * angle + c1 * (1.0 - angle)));
+ }
+ } else {
+ g_warning ("paint triangle clock: not supported");
+ return;
+ }
+}
+
+void
+gst_smpte_paint_box_clock (guint32 * dest, gint stride,
+ gint x0, gint y0, gint c0,
+ gint x1, gint y1, gint c1, gint x2, gint y2, gint c2)
+{
+ gfloat angle_m, col_m;
+ gint xv, yv;
+
+ if (x1 == x0) {
+ xv = x2;
+ yv = y1;
+ } else if (y1 == y0) {
+ xv = x1;
+ yv = y2;
+ } else {
+ g_warning ("paint box clock: not supported");
+ return;
+ }
+
+ angle_m = 2 * acos (((x1 - x0) * (xv - x0) + (y1 - y0) * (yv - y0)) /
+ (sqrt ((x1 - x0) * (x1 - x0) + (y1 - y0) * (y1 - y0)) *
+ sqrt ((xv - x0) * (xv - x0) + (yv - y0) * (yv - y0)))) / M_PI;
+
+ col_m = c2 * angle_m + c1 * (1.0 - angle_m);
+
+ gst_smpte_paint_triangle_clock (dest, stride,
+ x0, y0, c0, x1, y1, c1, xv, yv, col_m);
+ gst_smpte_paint_triangle_clock (dest, stride,
+ x0, y0, c0, xv, yv, col_m, x2, y2, c2);
+}
diff --git a/gst/smpte/paint.h b/gst/smpte/paint.h
new file mode 100644
index 0000000000..c815e0bee2
--- /dev/null
+++ b/gst/smpte/paint.h
@@ -0,0 +1,47 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_SMPTE_PAINT_H__
+#define __GST_SMPTE_PAINT_H__
+
+#include <glib.h>
+
+void gst_smpte_paint_vbox (guint32 *dest, gint stride,
+ gint x0, gint y0, gint c0,
+ gint x1, gint y1, gint c1);
+void gst_smpte_paint_hbox (guint32 *dest, gint stride,
+ gint x0, gint y0, gint c0,
+ gint x1, gint y1, gint c1);
+
+void gst_smpte_paint_triangle_linear (guint32 *dest, gint stride,
+ gint x0, gint y0, gint c0,
+ gint x1, gint y1, gint c1,
+ gint x2, gint y2, gint c2);
+
+void gst_smpte_paint_triangle_clock (guint32 *dest, gint stride,
+ gint x0, gint y0, gint c0,
+ gint x1, gint y1, gint c1,
+ gint x2, gint y2, gint c2);
+
+void gst_smpte_paint_box_clock (guint32 *dest, gint stride,
+ gint x0, gint y0, gint c0,
+ gint x1, gint y1, gint c1,
+ gint x2, gint y2, gint c2);
+
+#endif /* __GST_SMPTE_PAINT_H__ */
diff --git a/gst/smpte/plugin.c b/gst/smpte/plugin.c
new file mode 100644
index 0000000000..5595162d44
--- /dev/null
+++ b/gst/smpte/plugin.c
@@ -0,0 +1,42 @@
+/* GStreamer
+ * Copyright (C) <2008> Wim Taymans <wim.taymans@google.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstsmpte.h"
+#include "gstsmptealpha.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (smpte, plugin);
+ ret |= GST_ELEMENT_REGISTER (smptealpha, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ smpte,
+ "Apply the standard SMPTE transitions on video images",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/spectrum/gstspectrum.c b/gst/spectrum/gstspectrum.c
new file mode 100644
index 0000000000..d95915a8ed
--- /dev/null
+++ b/gst/spectrum/gstspectrum.c
@@ -0,0 +1,966 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * <2006,2011> Stefan Kost <ensonic@users.sf.net>
+ * <2007-2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-spectrum
+ * @title: spectrum
+ *
+ * The Spectrum element analyzes the frequency spectrum of an audio signal.
+ * If the #GstSpectrum:post-messages property is %TRUE, it sends analysis results
+ * as element messages named
+ * `spectrum` after each interval of time given
+ * by the #GstSpectrum:interval property.
+ *
+ * The message's structure contains some combination of these fields:
+ *
+ * * #GstClockTime `timestamp`: the timestamp of the buffer that triggered the message.
+ * * #GstClockTime `stream-time`: the stream time of the buffer.
+ * * #GstClockTime `running-time`: the running_time of the buffer.
+ * * #GstClockTime `duration`: the duration of the buffer.
+ * * #GstClockTime `endtime`: the end time of the buffer that triggered the message as stream time (this
+ * is deprecated, as it can be calculated from stream-time + duration)
+ * * A #GST_TYPE_LIST value of #gfloat `magnitude`: the level for each frequency band in dB.
+ * All values below the value of the
+ * #GstSpectrum:threshold property will be set to the threshold. Only present
+ * if the #GstSpectrum:message-magnitude property is %TRUE.
+ * * A #GST_TYPE_LIST of #gfloat `phase`: The phase for each frequency band. The value is between -pi and pi. Only
+ * present if the #GstSpectrum:message-phase property is %TRUE.
+ *
+ * If #GstSpectrum:multi-channel property is set to true. magnitude and phase
+ * fields will be each a nested #GST_TYPE_ARRAY value. The first dimension are the
+ * channels and the second dimension are the values.
+ *
+ * ## Example application
+ *
+ * {{ tests/examples/spectrum/spectrum-example.c }}
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <math.h>
+#include "gstspectrum.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_spectrum_debug);
+#define GST_CAT_DEFAULT gst_spectrum_debug
+
+/* elementfactory information */
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+# define FORMATS "{ S16LE, S24LE, S32LE, F32LE, F64LE }"
+#else
+# define FORMATS "{ S16BE, S24BE, S32BE, F32BE, F64BE }"
+#endif
+
+#define ALLOWED_CAPS \
+ GST_AUDIO_CAPS_MAKE (FORMATS) ", " \
+ "layout = (string) interleaved"
+
+/* Spectrum properties */
+#define DEFAULT_POST_MESSAGES TRUE
+#define DEFAULT_MESSAGE_MAGNITUDE TRUE
+#define DEFAULT_MESSAGE_PHASE FALSE
+#define DEFAULT_INTERVAL (GST_SECOND / 10)
+#define DEFAULT_BANDS 128
+#define DEFAULT_THRESHOLD -60
+#define DEFAULT_MULTI_CHANNEL FALSE
+
+enum
+{
+ PROP_0,
+ PROP_POST_MESSAGES,
+ PROP_MESSAGE_MAGNITUDE,
+ PROP_MESSAGE_PHASE,
+ PROP_INTERVAL,
+ PROP_BANDS,
+ PROP_THRESHOLD,
+ PROP_MULTI_CHANNEL
+};
+
+#define gst_spectrum_parent_class parent_class
+G_DEFINE_TYPE (GstSpectrum, gst_spectrum, GST_TYPE_AUDIO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (spectrum, "spectrum", GST_RANK_NONE,
+ GST_TYPE_SPECTRUM);
+
+static void gst_spectrum_finalize (GObject * object);
+static void gst_spectrum_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_spectrum_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static gboolean gst_spectrum_start (GstBaseTransform * trans);
+static gboolean gst_spectrum_stop (GstBaseTransform * trans);
+static GstFlowReturn gst_spectrum_transform_ip (GstBaseTransform * trans,
+ GstBuffer * in);
+static gboolean gst_spectrum_setup (GstAudioFilter * base,
+ const GstAudioInfo * info);
+
+static void
+gst_spectrum_class_init (GstSpectrumClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstBaseTransformClass *trans_class = GST_BASE_TRANSFORM_CLASS (klass);
+ GstAudioFilterClass *filter_class = GST_AUDIO_FILTER_CLASS (klass);
+ GstCaps *caps;
+
+ gobject_class->set_property = gst_spectrum_set_property;
+ gobject_class->get_property = gst_spectrum_get_property;
+ gobject_class->finalize = gst_spectrum_finalize;
+
+ trans_class->start = GST_DEBUG_FUNCPTR (gst_spectrum_start);
+ trans_class->stop = GST_DEBUG_FUNCPTR (gst_spectrum_stop);
+ trans_class->transform_ip = GST_DEBUG_FUNCPTR (gst_spectrum_transform_ip);
+ trans_class->passthrough_on_same_caps = TRUE;
+
+ filter_class->setup = GST_DEBUG_FUNCPTR (gst_spectrum_setup);
+
+ g_object_class_install_property (gobject_class, PROP_POST_MESSAGES,
+ g_param_spec_boolean ("post-messages", "Post Messages",
+ "Whether to post a 'spectrum' element message on the bus for each "
+ "passed interval", DEFAULT_POST_MESSAGES,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MESSAGE_MAGNITUDE,
+ g_param_spec_boolean ("message-magnitude", "Magnitude",
+ "Whether to add a 'magnitude' field to the structure of any "
+ "'spectrum' element messages posted on the bus",
+ DEFAULT_MESSAGE_MAGNITUDE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MESSAGE_PHASE,
+ g_param_spec_boolean ("message-phase", "Phase",
+ "Whether to add a 'phase' field to the structure of any "
+ "'spectrum' element messages posted on the bus",
+ DEFAULT_MESSAGE_PHASE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_INTERVAL,
+ g_param_spec_uint64 ("interval", "Interval",
+ "Interval of time between message posts (in nanoseconds)",
+ 1, G_MAXUINT64, DEFAULT_INTERVAL,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_BANDS,
+ g_param_spec_uint ("bands", "Bands", "Number of frequency bands",
+ 2, ((guint) G_MAXINT + 2) / 2, DEFAULT_BANDS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_THRESHOLD,
+ g_param_spec_int ("threshold", "Threshold",
+ "dB threshold for result. All lower values will be set to this",
+ G_MININT, 0, DEFAULT_THRESHOLD,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_MULTI_CHANNEL,
+ g_param_spec_boolean ("multi-channel", "Multichannel results",
+ "Send separate results for each channel",
+ DEFAULT_MULTI_CHANNEL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ GST_DEBUG_CATEGORY_INIT (gst_spectrum_debug, "spectrum", 0,
+ "audio spectrum analyser element");
+
+ gst_element_class_set_static_metadata (element_class, "Spectrum analyzer",
+ "Filter/Analyzer/Audio",
+ "Run an FFT on the audio signal, output spectrum data",
+ "Erik Walthinsen <omega@cse.ogi.edu>, "
+ "Stefan Kost <ensonic@users.sf.net>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ caps = gst_caps_from_string (ALLOWED_CAPS);
+ gst_audio_filter_class_add_pad_templates (filter_class, caps);
+ gst_caps_unref (caps);
+}
+
+static void
+gst_spectrum_init (GstSpectrum * spectrum)
+{
+ spectrum->post_messages = DEFAULT_POST_MESSAGES;
+ spectrum->message_magnitude = DEFAULT_MESSAGE_MAGNITUDE;
+ spectrum->message_phase = DEFAULT_MESSAGE_PHASE;
+ spectrum->interval = DEFAULT_INTERVAL;
+ spectrum->bands = DEFAULT_BANDS;
+ spectrum->threshold = DEFAULT_THRESHOLD;
+
+ g_mutex_init (&spectrum->lock);
+}
+
+static void
+gst_spectrum_alloc_channel_data (GstSpectrum * spectrum)
+{
+ gint i;
+ GstSpectrumChannel *cd;
+ guint bands = spectrum->bands;
+ guint nfft = 2 * bands - 2;
+
+ g_assert (spectrum->channel_data == NULL);
+
+ spectrum->num_channels = (spectrum->multi_channel) ?
+ GST_AUDIO_FILTER_CHANNELS (spectrum) : 1;
+
+ GST_DEBUG_OBJECT (spectrum, "allocating data for %d channels",
+ spectrum->num_channels);
+
+ spectrum->channel_data = g_new (GstSpectrumChannel, spectrum->num_channels);
+ for (i = 0; i < spectrum->num_channels; i++) {
+ cd = &spectrum->channel_data[i];
+ cd->fft_ctx = gst_fft_f32_new (nfft, FALSE);
+ cd->input = g_new0 (gfloat, nfft);
+ cd->input_tmp = g_new0 (gfloat, nfft);
+ cd->freqdata = g_new0 (GstFFTF32Complex, bands);
+ cd->spect_magnitude = g_new0 (gfloat, bands);
+ cd->spect_phase = g_new0 (gfloat, bands);
+ }
+}
+
+static void
+gst_spectrum_free_channel_data (GstSpectrum * spectrum)
+{
+ if (spectrum->channel_data) {
+ gint i;
+ GstSpectrumChannel *cd;
+
+ GST_DEBUG_OBJECT (spectrum, "freeing data for %d channels",
+ spectrum->num_channels);
+
+ for (i = 0; i < spectrum->num_channels; i++) {
+ cd = &spectrum->channel_data[i];
+ if (cd->fft_ctx)
+ gst_fft_f32_free (cd->fft_ctx);
+ g_free (cd->input);
+ g_free (cd->input_tmp);
+ g_free (cd->freqdata);
+ g_free (cd->spect_magnitude);
+ g_free (cd->spect_phase);
+ }
+ g_free (spectrum->channel_data);
+ spectrum->channel_data = NULL;
+ }
+}
+
+static void
+gst_spectrum_flush (GstSpectrum * spectrum)
+{
+ spectrum->num_frames = 0;
+ spectrum->num_fft = 0;
+
+ spectrum->accumulated_error = 0;
+}
+
+static void
+gst_spectrum_reset_state (GstSpectrum * spectrum)
+{
+ GST_DEBUG_OBJECT (spectrum, "resetting state");
+
+ gst_spectrum_free_channel_data (spectrum);
+ gst_spectrum_flush (spectrum);
+}
+
+static void
+gst_spectrum_finalize (GObject * object)
+{
+ GstSpectrum *spectrum = GST_SPECTRUM (object);
+
+ gst_spectrum_reset_state (spectrum);
+ g_mutex_clear (&spectrum->lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_spectrum_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstSpectrum *filter = GST_SPECTRUM (object);
+
+ switch (prop_id) {
+ case PROP_POST_MESSAGES:
+ filter->post_messages = g_value_get_boolean (value);
+ break;
+ case PROP_MESSAGE_MAGNITUDE:
+ filter->message_magnitude = g_value_get_boolean (value);
+ break;
+ case PROP_MESSAGE_PHASE:
+ filter->message_phase = g_value_get_boolean (value);
+ break;
+ case PROP_INTERVAL:{
+ guint64 interval = g_value_get_uint64 (value);
+ g_mutex_lock (&filter->lock);
+ if (filter->interval != interval) {
+ filter->interval = interval;
+ gst_spectrum_reset_state (filter);
+ }
+ g_mutex_unlock (&filter->lock);
+ break;
+ }
+ case PROP_BANDS:{
+ guint bands = g_value_get_uint (value);
+ g_mutex_lock (&filter->lock);
+ if (filter->bands != bands) {
+ filter->bands = bands;
+ gst_spectrum_reset_state (filter);
+ }
+ g_mutex_unlock (&filter->lock);
+ break;
+ }
+ case PROP_THRESHOLD:
+ filter->threshold = g_value_get_int (value);
+ break;
+ case PROP_MULTI_CHANNEL:{
+ gboolean multi_channel = g_value_get_boolean (value);
+ g_mutex_lock (&filter->lock);
+ if (filter->multi_channel != multi_channel) {
+ filter->multi_channel = multi_channel;
+ gst_spectrum_reset_state (filter);
+ }
+ g_mutex_unlock (&filter->lock);
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_spectrum_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstSpectrum *filter = GST_SPECTRUM (object);
+
+ switch (prop_id) {
+ case PROP_POST_MESSAGES:
+ g_value_set_boolean (value, filter->post_messages);
+ break;
+ case PROP_MESSAGE_MAGNITUDE:
+ g_value_set_boolean (value, filter->message_magnitude);
+ break;
+ case PROP_MESSAGE_PHASE:
+ g_value_set_boolean (value, filter->message_phase);
+ break;
+ case PROP_INTERVAL:
+ g_value_set_uint64 (value, filter->interval);
+ break;
+ case PROP_BANDS:
+ g_value_set_uint (value, filter->bands);
+ break;
+ case PROP_THRESHOLD:
+ g_value_set_int (value, filter->threshold);
+ break;
+ case PROP_MULTI_CHANNEL:
+ g_value_set_boolean (value, filter->multi_channel);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+gst_spectrum_start (GstBaseTransform * trans)
+{
+ GstSpectrum *spectrum = GST_SPECTRUM (trans);
+
+ gst_spectrum_reset_state (spectrum);
+
+ return TRUE;
+}
+
+static gboolean
+gst_spectrum_stop (GstBaseTransform * trans)
+{
+ GstSpectrum *spectrum = GST_SPECTRUM (trans);
+
+ gst_spectrum_reset_state (spectrum);
+
+ return TRUE;
+}
+
+/* mixing data readers */
+
+static void
+input_data_mixed_float (const guint8 * _in, gfloat * out, guint len,
+ guint channels, gfloat max_value, guint op, guint nfft)
+{
+ guint i, j, ip = 0;
+ gfloat v;
+ gfloat *in = (gfloat *) _in;
+
+ for (j = 0; j < len; j++) {
+ v = in[ip++];
+ for (i = 1; i < channels; i++)
+ v += in[ip++];
+ out[op] = v / channels;
+ op = (op + 1) % nfft;
+ }
+}
+
+static void
+input_data_mixed_double (const guint8 * _in, gfloat * out, guint len,
+ guint channels, gfloat max_value, guint op, guint nfft)
+{
+ guint i, j, ip = 0;
+ gfloat v;
+ gdouble *in = (gdouble *) _in;
+
+ for (j = 0; j < len; j++) {
+ v = in[ip++];
+ for (i = 1; i < channels; i++)
+ v += in[ip++];
+ out[op] = v / channels;
+ op = (op + 1) % nfft;
+ }
+}
+
+static void
+input_data_mixed_int32_max (const guint8 * _in, gfloat * out, guint len,
+ guint channels, gfloat max_value, guint op, guint nfft)
+{
+ guint i, j, ip = 0;
+ gint32 *in = (gint32 *) _in;
+ gfloat v;
+
+ for (j = 0; j < len; j++) {
+ v = in[ip++] / max_value;
+ for (i = 1; i < channels; i++)
+ v += in[ip++] / max_value;
+ out[op] = v / channels;
+ op = (op + 1) % nfft;
+ }
+}
+
+static void
+input_data_mixed_int24_max (const guint8 * _in, gfloat * out, guint len,
+ guint channels, gfloat max_value, guint op, guint nfft)
+{
+ guint i, j;
+ gfloat v = 0.0;
+
+ for (j = 0; j < len; j++) {
+ for (i = 0; i < channels; i++) {
+#if G_BYTE_ORDER == G_BIG_ENDIAN
+ gint32 value = GST_READ_UINT24_BE (_in);
+#else
+ gint32 value = GST_READ_UINT24_LE (_in);
+#endif
+ if (value & 0x00800000)
+ value |= 0xff000000;
+ v += value / max_value;
+ _in += 3;
+ }
+ out[op] = v / channels;
+ op = (op + 1) % nfft;
+ }
+}
+
+static void
+input_data_mixed_int16_max (const guint8 * _in, gfloat * out, guint len,
+ guint channels, gfloat max_value, guint op, guint nfft)
+{
+ guint i, j, ip = 0;
+ gint16 *in = (gint16 *) _in;
+ gfloat v;
+
+ for (j = 0; j < len; j++) {
+ v = in[ip++] / max_value;
+ for (i = 1; i < channels; i++)
+ v += in[ip++] / max_value;
+ out[op] = v / channels;
+ op = (op + 1) % nfft;
+ }
+}
+
+/* non mixing data readers */
+
+static void
+input_data_float (const guint8 * _in, gfloat * out, guint len, guint channels,
+ gfloat max_value, guint op, guint nfft)
+{
+ guint j, ip;
+ gfloat *in = (gfloat *) _in;
+
+ for (j = 0, ip = 0; j < len; j++, ip += channels) {
+ out[op] = in[ip];
+ op = (op + 1) % nfft;
+ }
+}
+
+static void
+input_data_double (const guint8 * _in, gfloat * out, guint len, guint channels,
+ gfloat max_value, guint op, guint nfft)
+{
+ guint j, ip;
+ gdouble *in = (gdouble *) _in;
+
+ for (j = 0, ip = 0; j < len; j++, ip += channels) {
+ out[op] = in[ip];
+ op = (op + 1) % nfft;
+ }
+}
+
+static void
+input_data_int32_max (const guint8 * _in, gfloat * out, guint len,
+ guint channels, gfloat max_value, guint op, guint nfft)
+{
+ guint j, ip;
+ gint32 *in = (gint32 *) _in;
+
+ for (j = 0, ip = 0; j < len; j++, ip += channels) {
+ out[op] = in[ip] / max_value;
+ op = (op + 1) % nfft;
+ }
+}
+
+static void
+input_data_int24_max (const guint8 * _in, gfloat * out, guint len,
+ guint channels, gfloat max_value, guint op, guint nfft)
+{
+ guint j;
+
+ for (j = 0; j < len; j++) {
+#if G_BYTE_ORDER == G_BIG_ENDIAN
+ gint32 v = GST_READ_UINT24_BE (_in);
+#else
+ gint32 v = GST_READ_UINT24_LE (_in);
+#endif
+ if (v & 0x00800000)
+ v |= 0xff000000;
+ _in += 3 * channels;
+ out[op] = v / max_value;
+ op = (op + 1) % nfft;
+ }
+}
+
+static void
+input_data_int16_max (const guint8 * _in, gfloat * out, guint len,
+ guint channels, gfloat max_value, guint op, guint nfft)
+{
+ guint j, ip;
+ gint16 *in = (gint16 *) _in;
+
+ for (j = 0, ip = 0; j < len; j++, ip += channels) {
+ out[op] = in[ip] / max_value;
+ op = (op + 1) % nfft;
+ }
+}
+
+static gboolean
+gst_spectrum_setup (GstAudioFilter * base, const GstAudioInfo * info)
+{
+ GstSpectrum *spectrum = GST_SPECTRUM (base);
+ gboolean multi_channel = spectrum->multi_channel;
+ GstSpectrumInputData input_data = NULL;
+
+ g_mutex_lock (&spectrum->lock);
+ switch (GST_AUDIO_INFO_FORMAT (info)) {
+ case GST_AUDIO_FORMAT_S16:
+ input_data =
+ multi_channel ? input_data_int16_max : input_data_mixed_int16_max;
+ break;
+ case GST_AUDIO_FORMAT_S24:
+ input_data =
+ multi_channel ? input_data_int24_max : input_data_mixed_int24_max;
+ break;
+ case GST_AUDIO_FORMAT_S32:
+ input_data =
+ multi_channel ? input_data_int32_max : input_data_mixed_int32_max;
+ break;
+ case GST_AUDIO_FORMAT_F32:
+ input_data = multi_channel ? input_data_float : input_data_mixed_float;
+ break;
+ case GST_AUDIO_FORMAT_F64:
+ input_data = multi_channel ? input_data_double : input_data_mixed_double;
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ spectrum->input_data = input_data;
+
+ gst_spectrum_reset_state (spectrum);
+ g_mutex_unlock (&spectrum->lock);
+
+ return TRUE;
+}
+
+static GValue *
+gst_spectrum_message_add_container (GstStructure * s, GType type,
+ const gchar * name)
+{
+ GValue v = { 0, };
+
+ g_value_init (&v, type);
+ /* will copy-by-value */
+ gst_structure_set_value (s, name, &v);
+ g_value_unset (&v);
+ return (GValue *) gst_structure_get_value (s, name);
+}
+
+static void
+gst_spectrum_message_add_list (GValue * cv, gfloat * data, guint num_values)
+{
+ GValue v = { 0, };
+ guint i;
+
+ g_value_init (&v, G_TYPE_FLOAT);
+ for (i = 0; i < num_values; i++) {
+ g_value_set_float (&v, data[i]);
+ gst_value_list_append_value (cv, &v); /* copies by value */
+ }
+ g_value_unset (&v);
+}
+
+static void
+gst_spectrum_message_add_array (GValue * cv, gfloat * data, guint num_values)
+{
+ GValue v = { 0, };
+ GValue a = { 0, };
+ guint i;
+
+ g_value_init (&a, GST_TYPE_ARRAY);
+
+ g_value_init (&v, G_TYPE_FLOAT);
+ for (i = 0; i < num_values; i++) {
+ g_value_set_float (&v, data[i]);
+ gst_value_array_append_value (&a, &v); /* copies by value */
+ }
+ g_value_unset (&v);
+
+ gst_value_array_append_value (cv, &a); /* copies by value */
+ g_value_unset (&a);
+}
+
+static GstMessage *
+gst_spectrum_message_new (GstSpectrum * spectrum, GstClockTime timestamp,
+ GstClockTime duration)
+{
+ GstBaseTransform *trans = GST_BASE_TRANSFORM_CAST (spectrum);
+ GstSpectrumChannel *cd;
+ GstStructure *s;
+ GValue *mcv = NULL, *pcv = NULL;
+ GstClockTime endtime, running_time, stream_time;
+
+ GST_DEBUG_OBJECT (spectrum, "preparing message, bands =%d ", spectrum->bands);
+
+ running_time = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME,
+ timestamp);
+ stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME,
+ timestamp);
+ /* endtime is for backwards compatibility */
+ endtime = stream_time + duration;
+
+ s = gst_structure_new ("spectrum",
+ "endtime", GST_TYPE_CLOCK_TIME, endtime,
+ "timestamp", G_TYPE_UINT64, timestamp,
+ "stream-time", G_TYPE_UINT64, stream_time,
+ "running-time", G_TYPE_UINT64, running_time,
+ "duration", G_TYPE_UINT64, duration, NULL);
+
+ if (!spectrum->multi_channel) {
+ cd = &spectrum->channel_data[0];
+
+ if (spectrum->message_magnitude) {
+ /* FIXME 0.11: this should be an array, not a list */
+ mcv = gst_spectrum_message_add_container (s, GST_TYPE_LIST, "magnitude");
+ gst_spectrum_message_add_list (mcv, cd->spect_magnitude, spectrum->bands);
+ }
+ if (spectrum->message_phase) {
+ /* FIXME 0.11: this should be an array, not a list */
+ pcv = gst_spectrum_message_add_container (s, GST_TYPE_LIST, "phase");
+ gst_spectrum_message_add_list (pcv, cd->spect_phase, spectrum->bands);
+ }
+ } else {
+ guint c;
+ guint channels = GST_AUDIO_FILTER_CHANNELS (spectrum);
+
+ if (spectrum->message_magnitude) {
+ mcv = gst_spectrum_message_add_container (s, GST_TYPE_ARRAY, "magnitude");
+ }
+ if (spectrum->message_phase) {
+ pcv = gst_spectrum_message_add_container (s, GST_TYPE_ARRAY, "phase");
+ }
+
+ for (c = 0; c < channels; c++) {
+ cd = &spectrum->channel_data[c];
+
+ if (spectrum->message_magnitude) {
+ gst_spectrum_message_add_array (mcv, cd->spect_magnitude,
+ spectrum->bands);
+ }
+ if (spectrum->message_phase) {
+ gst_spectrum_message_add_array (pcv, cd->spect_phase, spectrum->bands);
+ }
+ }
+ }
+ return gst_message_new_element (GST_OBJECT (spectrum), s);
+}
+
+static void
+gst_spectrum_run_fft (GstSpectrum * spectrum, GstSpectrumChannel * cd,
+ guint input_pos)
+{
+ guint i;
+ guint bands = spectrum->bands;
+ guint nfft = 2 * bands - 2;
+ gint threshold = spectrum->threshold;
+ gfloat *input = cd->input;
+ gfloat *input_tmp = cd->input_tmp;
+ gfloat *spect_magnitude = cd->spect_magnitude;
+ gfloat *spect_phase = cd->spect_phase;
+ GstFFTF32Complex *freqdata = cd->freqdata;
+ GstFFTF32 *fft_ctx = cd->fft_ctx;
+
+ for (i = 0; i < nfft; i++)
+ input_tmp[i] = input[(input_pos + i) % nfft];
+
+ gst_fft_f32_window (fft_ctx, input_tmp, GST_FFT_WINDOW_HAMMING);
+
+ gst_fft_f32_fft (fft_ctx, input_tmp, freqdata);
+
+ if (spectrum->message_magnitude) {
+ gdouble val;
+ /* Calculate magnitude in db */
+ for (i = 0; i < bands; i++) {
+ val = freqdata[i].r * freqdata[i].r;
+ val += freqdata[i].i * freqdata[i].i;
+ val /= nfft * nfft;
+ val = 10.0 * log10 (val);
+ if (val < threshold)
+ val = threshold;
+ spect_magnitude[i] += val;
+ }
+ }
+
+ if (spectrum->message_phase) {
+ /* Calculate phase */
+ for (i = 0; i < bands; i++)
+ spect_phase[i] += atan2 (freqdata[i].i, freqdata[i].r);
+ }
+}
+
+static void
+gst_spectrum_prepare_message_data (GstSpectrum * spectrum,
+ GstSpectrumChannel * cd)
+{
+ guint i;
+ guint bands = spectrum->bands;
+ guint num_fft = spectrum->num_fft;
+
+ /* Calculate average */
+ if (spectrum->message_magnitude) {
+ gfloat *spect_magnitude = cd->spect_magnitude;
+ for (i = 0; i < bands; i++)
+ spect_magnitude[i] /= num_fft;
+ }
+ if (spectrum->message_phase) {
+ gfloat *spect_phase = cd->spect_phase;
+ for (i = 0; i < bands; i++)
+ spect_phase[i] /= num_fft;
+ }
+}
+
+static void
+gst_spectrum_reset_message_data (GstSpectrum * spectrum,
+ GstSpectrumChannel * cd)
+{
+ guint bands = spectrum->bands;
+ gfloat *spect_magnitude = cd->spect_magnitude;
+ gfloat *spect_phase = cd->spect_phase;
+
+ /* reset spectrum accumulators */
+ memset (spect_magnitude, 0, bands * sizeof (gfloat));
+ memset (spect_phase, 0, bands * sizeof (gfloat));
+}
+
+static GstFlowReturn
+gst_spectrum_transform_ip (GstBaseTransform * trans, GstBuffer * buffer)
+{
+ GstSpectrum *spectrum = GST_SPECTRUM (trans);
+ guint rate = GST_AUDIO_FILTER_RATE (spectrum);
+ guint channels = GST_AUDIO_FILTER_CHANNELS (spectrum);
+ guint bps = GST_AUDIO_FILTER_BPS (spectrum);
+ guint bpf = GST_AUDIO_FILTER_BPF (spectrum);
+ guint output_channels = spectrum->multi_channel ? channels : 1;
+ guint c;
+ gfloat max_value = (1UL << ((bps << 3) - 1)) - 1;
+ guint bands = spectrum->bands;
+ guint nfft = 2 * bands - 2;
+ guint input_pos;
+ gfloat *input;
+ GstMapInfo map;
+ const guint8 *data;
+ gsize size;
+ guint fft_todo, msg_todo, block_size;
+ gboolean have_full_interval;
+ GstSpectrumChannel *cd;
+ GstSpectrumInputData input_data;
+
+ g_mutex_lock (&spectrum->lock);
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+ data = map.data;
+ size = map.size;
+
+ GST_LOG_OBJECT (spectrum, "input size: %" G_GSIZE_FORMAT " bytes", size);
+
+ if (GST_BUFFER_IS_DISCONT (buffer)) {
+ GST_DEBUG_OBJECT (spectrum, "Discontinuity detected -- flushing");
+ gst_spectrum_flush (spectrum);
+ }
+
+ /* If we don't have a FFT context yet (or it was reset due to parameter
+ * changes) get one and allocate memory for everything
+ */
+ if (spectrum->channel_data == NULL) {
+ GST_DEBUG_OBJECT (spectrum, "allocating for bands %u", bands);
+
+ gst_spectrum_alloc_channel_data (spectrum);
+
+ /* number of sample frames we process before posting a message
+ * interval is in ns */
+ spectrum->frames_per_interval =
+ gst_util_uint64_scale (spectrum->interval, rate, GST_SECOND);
+ spectrum->frames_todo = spectrum->frames_per_interval;
+ /* rounding error for frames_per_interval in ns,
+ * aggregated it in accumulated_error */
+ spectrum->error_per_interval = (spectrum->interval * rate) % GST_SECOND;
+ if (spectrum->frames_per_interval == 0)
+ spectrum->frames_per_interval = 1;
+
+ GST_INFO_OBJECT (spectrum, "interval %" GST_TIME_FORMAT ", fpi %"
+ G_GUINT64_FORMAT ", error %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (spectrum->interval), spectrum->frames_per_interval,
+ GST_TIME_ARGS (spectrum->error_per_interval));
+
+ spectrum->input_pos = 0;
+
+ gst_spectrum_flush (spectrum);
+ }
+
+ if (spectrum->num_frames == 0)
+ spectrum->message_ts = GST_BUFFER_TIMESTAMP (buffer);
+
+ input_pos = spectrum->input_pos;
+ input_data = spectrum->input_data;
+
+ while (size >= bpf) {
+ /* run input_data for a chunk of data */
+ fft_todo = nfft - (spectrum->num_frames % nfft);
+ msg_todo = spectrum->frames_todo - spectrum->num_frames;
+ GST_LOG_OBJECT (spectrum,
+ "message frames todo: %u, fft frames todo: %u, input frames %"
+ G_GSIZE_FORMAT, msg_todo, fft_todo, (size / bpf));
+ block_size = msg_todo;
+ if (block_size > (size / bpf))
+ block_size = (size / bpf);
+ if (block_size > fft_todo)
+ block_size = fft_todo;
+
+ for (c = 0; c < output_channels; c++) {
+ cd = &spectrum->channel_data[c];
+ input = cd->input;
+ /* Move the current frames into our ringbuffers */
+ input_data (data + c * bps, input, block_size, channels, max_value,
+ input_pos, nfft);
+ }
+ data += block_size * bpf;
+ size -= block_size * bpf;
+ input_pos = (input_pos + block_size) % nfft;
+ spectrum->num_frames += block_size;
+
+ have_full_interval = (spectrum->num_frames == spectrum->frames_todo);
+
+ GST_LOG_OBJECT (spectrum,
+ "size: %" G_GSIZE_FORMAT ", do-fft = %d, do-message = %d", size,
+ (spectrum->num_frames % nfft == 0), have_full_interval);
+
+ /* If we have enough frames for an FFT or we have all frames required for
+ * the interval and we haven't run a FFT, then run an FFT */
+ if ((spectrum->num_frames % nfft == 0) ||
+ (have_full_interval && !spectrum->num_fft)) {
+ for (c = 0; c < output_channels; c++) {
+ cd = &spectrum->channel_data[c];
+ gst_spectrum_run_fft (spectrum, cd, input_pos);
+ }
+ spectrum->num_fft++;
+ }
+
+ /* Do we have the FFTs for one interval? */
+ if (have_full_interval) {
+ GST_DEBUG_OBJECT (spectrum, "nfft: %u frames: %" G_GUINT64_FORMAT
+ " fpi: %" G_GUINT64_FORMAT " error: %" GST_TIME_FORMAT, nfft,
+ spectrum->num_frames, spectrum->frames_per_interval,
+ GST_TIME_ARGS (spectrum->accumulated_error));
+
+ spectrum->frames_todo = spectrum->frames_per_interval;
+ if (spectrum->accumulated_error >= GST_SECOND) {
+ spectrum->accumulated_error -= GST_SECOND;
+ spectrum->frames_todo++;
+ }
+ spectrum->accumulated_error += spectrum->error_per_interval;
+
+ if (spectrum->post_messages) {
+ GstMessage *m;
+
+ for (c = 0; c < output_channels; c++) {
+ cd = &spectrum->channel_data[c];
+ gst_spectrum_prepare_message_data (spectrum, cd);
+ }
+
+ m = gst_spectrum_message_new (spectrum, spectrum->message_ts,
+ spectrum->interval);
+
+ gst_element_post_message (GST_ELEMENT (spectrum), m);
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (spectrum->message_ts))
+ spectrum->message_ts +=
+ gst_util_uint64_scale (spectrum->num_frames, GST_SECOND, rate);
+
+ for (c = 0; c < output_channels; c++) {
+ cd = &spectrum->channel_data[c];
+ gst_spectrum_reset_message_data (spectrum, cd);
+ }
+ spectrum->num_frames = 0;
+ spectrum->num_fft = 0;
+ }
+ }
+
+ spectrum->input_pos = input_pos;
+
+ gst_buffer_unmap (buffer, &map);
+ g_mutex_unlock (&spectrum->lock);
+
+ g_assert (size == 0);
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+
+ return GST_ELEMENT_REGISTER (spectrum, plugin);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ spectrum,
+ "Run an FFT on the audio signal, output spectrum data",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/spectrum/gstspectrum.h b/gst/spectrum/gstspectrum.h
new file mode 100644
index 0000000000..d73c19f688
--- /dev/null
+++ b/gst/spectrum/gstspectrum.h
@@ -0,0 +1,97 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_SPECTRUM_H__
+#define __GST_SPECTRUM_H__
+
+#include <gst/gst.h>
+#include <gst/audio/gstaudiofilter.h>
+#include <gst/fft/gstfftf32.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_SPECTRUM (gst_spectrum_get_type())
+#define GST_SPECTRUM(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_SPECTRUM,GstSpectrum))
+#define GST_IS_SPECTRUM(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_SPECTRUM))
+#define GST_SPECTRUM_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_SPECTRUM,GstSpectrumClass))
+#define GST_IS_SPECTRUM_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_SPECTRUM))
+typedef struct _GstSpectrum GstSpectrum;
+typedef struct _GstSpectrumClass GstSpectrumClass;
+typedef struct _GstSpectrumChannel GstSpectrumChannel;
+
+typedef void (*GstSpectrumInputData)(const guint8 * in, gfloat * out,
+ guint len, guint channels, gfloat max_value, guint op, guint nfft);
+
+struct _GstSpectrumChannel
+{
+ gfloat *input;
+ gfloat *input_tmp;
+ GstFFTF32Complex *freqdata;
+ gfloat *spect_magnitude; /* accumulated mangitude and phase */
+ gfloat *spect_phase; /* will be scaled by num_fft before sending */
+ GstFFTF32 *fft_ctx;
+};
+
+struct _GstSpectrum
+{
+ GstAudioFilter parent;
+
+ /* properties */
+ gboolean post_messages; /* whether or not to post messages */
+ gboolean message_magnitude;
+ gboolean message_phase;
+ guint64 interval; /* how many nanoseconds between emits */
+ guint64 frames_per_interval; /* how many frames per interval */
+ guint64 frames_todo;
+ guint bands; /* number of spectrum bands */
+ gint threshold; /* energy level threshold */
+ gboolean multi_channel; /* send separate channel results */
+
+ guint64 num_frames; /* frame count (1 sample per channel)
+ * since last emit */
+ guint64 num_fft; /* number of FFTs since last emit */
+ GstClockTime message_ts; /* starttime for next message */
+
+ /* <private> */
+ GstSpectrumChannel *channel_data;
+ guint num_channels;
+
+ guint input_pos;
+ guint64 error_per_interval;
+ guint64 accumulated_error;
+
+ GMutex lock;
+
+ GstSpectrumInputData input_data;
+};
+
+struct _GstSpectrumClass
+{
+ GstAudioFilterClass parent_class;
+};
+
+GType gst_spectrum_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (spectrum);
+
+G_END_DECLS
+
+#endif /* __GST_SPECTRUM_H__ */
diff --git a/gst/spectrum/meson.build b/gst/spectrum/meson.build
new file mode 100644
index 0000000000..4d954c4d74
--- /dev/null
+++ b/gst/spectrum/meson.build
@@ -0,0 +1,10 @@
+gstspectrum = library('gstspectrum',
+ 'gstspectrum.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gstbase_dep, gstfft_dep, gstaudio_dep, libm],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstspectrum, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstspectrum]
diff --git a/gst/udp/README b/gst/udp/README
new file mode 100644
index 0000000000..9b7cebb622
--- /dev/null
+++ b/gst/udp/README
@@ -0,0 +1,7 @@
+* What is UDP src/sink?
+
+It is a set of element to transfer data using UDP, nothing more, nothing less.
+Its main purpose is to be used in conjunction with RTP but they are kept as
+separate elements because we can.
+
+
diff --git a/gst/udp/gstdynudpsink.c b/gst/udp/gstdynudpsink.c
new file mode 100644
index 0000000000..42f2e02430
--- /dev/null
+++ b/gst/udp/gstdynudpsink.c
@@ -0,0 +1,601 @@
+/* GStreamer
+ * Copyright (C) <2005> Philippe Khalaf <burger@speedy.org>
+ * Copyright (C) <2005> Nokia Corporation <kai.vehmanen@nokia.com>
+ * Copyright (C) <2006> Joni Valtanen <joni.valtanen@movial.fi>
+ * Copyright (C) <2012> Collabora Ltd.
+ * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include "gstudpelements.h"
+#include "gstdynudpsink.h"
+
+#include <gst/net/gstnetaddressmeta.h>
+
+GST_DEBUG_CATEGORY_STATIC (dynudpsink_debug);
+#define GST_CAT_DEFAULT (dynudpsink_debug)
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+/* DynUDPSink signals and args */
+enum
+{
+ /* methods */
+ SIGNAL_GET_STATS,
+
+ /* signals */
+
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+#define UDP_DEFAULT_SOCKET NULL
+#define UDP_DEFAULT_CLOSE_SOCKET TRUE
+#define UDP_DEFAULT_BIND_ADDRESS NULL
+#define UDP_DEFAULT_BIND_PORT 0
+
+enum
+{
+ PROP_0,
+ PROP_SOCKET,
+ PROP_SOCKET_V6,
+ PROP_CLOSE_SOCKET,
+ PROP_BIND_ADDRESS,
+ PROP_BIND_PORT
+};
+
+static void gst_dynudpsink_finalize (GObject * object);
+
+static GstFlowReturn gst_dynudpsink_render (GstBaseSink * sink,
+ GstBuffer * buffer);
+static gboolean gst_dynudpsink_stop (GstBaseSink * bsink);
+static gboolean gst_dynudpsink_start (GstBaseSink * bsink);
+static gboolean gst_dynudpsink_unlock (GstBaseSink * bsink);
+static gboolean gst_dynudpsink_unlock_stop (GstBaseSink * bsink);
+
+static void gst_dynudpsink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_dynudpsink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static GstStructure *gst_dynudpsink_get_stats (GstDynUDPSink * sink,
+ const gchar * host, gint port);
+
+static guint gst_dynudpsink_signals[LAST_SIGNAL] = { 0 };
+
+#define gst_dynudpsink_parent_class parent_class
+G_DEFINE_TYPE (GstDynUDPSink, gst_dynudpsink, GST_TYPE_BASE_SINK);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (dynudpsink, "dynudpsink", GST_RANK_NONE,
+ GST_TYPE_DYNUDPSINK, udp_element_init (plugin));
+
+static void
+gst_dynudpsink_class_init (GstDynUDPSinkClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstBaseSinkClass *gstbasesink_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstbasesink_class = (GstBaseSinkClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->set_property = gst_dynudpsink_set_property;
+ gobject_class->get_property = gst_dynudpsink_get_property;
+ gobject_class->finalize = gst_dynudpsink_finalize;
+
+ gst_dynudpsink_signals[SIGNAL_GET_STATS] =
+ g_signal_new ("get-stats", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_STRUCT_OFFSET (GstDynUDPSinkClass, get_stats),
+ NULL, NULL, NULL, GST_TYPE_STRUCTURE, 2, G_TYPE_STRING, G_TYPE_INT);
+
+ g_object_class_install_property (gobject_class, PROP_SOCKET,
+ g_param_spec_object ("socket", "Socket",
+ "Socket to use for UDP sending. (NULL == allocate)",
+ G_TYPE_SOCKET, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_SOCKET_V6,
+ g_param_spec_object ("socket-v6", "Socket IPv6",
+ "Socket to use for UDPv6 sending. (NULL == allocate)",
+ G_TYPE_SOCKET, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_CLOSE_SOCKET,
+ g_param_spec_boolean ("close-socket", "Close socket",
+ "Close socket if passed as property on state change",
+ UDP_DEFAULT_CLOSE_SOCKET,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_BIND_ADDRESS,
+ g_param_spec_string ("bind-address", "Bind Address",
+ "Address to bind the socket to", UDP_DEFAULT_BIND_ADDRESS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_BIND_PORT,
+ g_param_spec_int ("bind-port", "Bind Port",
+ "Port to bind the socket to", 0, G_MAXUINT16,
+ UDP_DEFAULT_BIND_PORT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "UDP packet sender",
+ "Sink/Network",
+ "Send data over the network via UDP with packet destinations picked up "
+ "dynamically from meta on the buffers passed",
+ "Philippe Khalaf <burger@speedy.org>");
+
+ gstbasesink_class->render = gst_dynudpsink_render;
+ gstbasesink_class->start = gst_dynudpsink_start;
+ gstbasesink_class->stop = gst_dynudpsink_stop;
+ gstbasesink_class->unlock = gst_dynudpsink_unlock;
+ gstbasesink_class->unlock_stop = gst_dynudpsink_unlock_stop;
+
+ klass->get_stats = gst_dynudpsink_get_stats;
+
+ GST_DEBUG_CATEGORY_INIT (dynudpsink_debug, "dynudpsink", 0, "UDP sink");
+}
+
+static void
+gst_dynudpsink_init (GstDynUDPSink * sink)
+{
+ sink->socket = UDP_DEFAULT_SOCKET;
+ sink->socket_v6 = UDP_DEFAULT_SOCKET;
+ sink->close_socket = UDP_DEFAULT_CLOSE_SOCKET;
+ sink->external_socket = FALSE;
+ sink->bind_address = UDP_DEFAULT_BIND_ADDRESS;
+ sink->bind_port = UDP_DEFAULT_BIND_PORT;
+
+ sink->used_socket = NULL;
+ sink->used_socket_v6 = NULL;
+}
+
+static void
+gst_dynudpsink_finalize (GObject * object)
+{
+ GstDynUDPSink *sink;
+
+ sink = GST_DYNUDPSINK (object);
+
+ if (sink->socket)
+ g_object_unref (sink->socket);
+ sink->socket = NULL;
+
+ if (sink->socket_v6)
+ g_object_unref (sink->socket_v6);
+ sink->socket_v6 = NULL;
+
+ if (sink->used_socket)
+ g_object_unref (sink->used_socket);
+ sink->used_socket = NULL;
+
+ if (sink->used_socket_v6)
+ g_object_unref (sink->used_socket_v6);
+ sink->used_socket_v6 = NULL;
+
+ g_free (sink->bind_address);
+ sink->bind_address = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static GstFlowReturn
+gst_dynudpsink_render (GstBaseSink * bsink, GstBuffer * buffer)
+{
+ GstDynUDPSink *sink;
+ gssize ret;
+ GstMapInfo map;
+ GstNetAddressMeta *meta;
+ GSocketAddress *addr;
+ GError *err = NULL;
+ GSocketFamily family;
+ GSocket *socket;
+
+ meta = gst_buffer_get_net_address_meta (buffer);
+
+ if (meta == NULL) {
+ GST_DEBUG ("Received buffer without GstNetAddressMeta, skipping");
+ return GST_FLOW_OK;
+ }
+
+ sink = GST_DYNUDPSINK (bsink);
+
+ /* let's get the address from the metadata */
+ addr = meta->addr;
+
+ family = g_socket_address_get_family (addr);
+ if (family == G_SOCKET_FAMILY_IPV6 && !sink->used_socket_v6)
+ goto invalid_family;
+
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ GST_DEBUG ("about to send %" G_GSIZE_FORMAT " bytes", map.size);
+
+#ifndef GST_DISABLE_GST_DEBUG
+ {
+ gchar *host;
+
+ host =
+ g_inet_address_to_string (g_inet_socket_address_get_address
+ (G_INET_SOCKET_ADDRESS (addr)));
+ GST_DEBUG ("sending %" G_GSIZE_FORMAT " bytes to client %s port %d",
+ map.size, host,
+ g_inet_socket_address_get_port (G_INET_SOCKET_ADDRESS (addr)));
+ g_free (host);
+ }
+#endif
+
+ /* Select socket to send from for this address */
+ if (family == G_SOCKET_FAMILY_IPV6 || !sink->used_socket)
+ socket = sink->used_socket_v6;
+ else
+ socket = sink->used_socket;
+
+ ret =
+ g_socket_send_to (socket, addr, (gchar *) map.data, map.size,
+ sink->cancellable, &err);
+ gst_buffer_unmap (buffer, &map);
+
+ if (ret < 0)
+ goto send_error;
+
+ GST_DEBUG ("sent %" G_GSSIZE_FORMAT " bytes", ret);
+
+ return GST_FLOW_OK;
+
+send_error:
+ {
+ GstFlowReturn flow_ret;
+
+ if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) {
+ GST_DEBUG_OBJECT (sink, "send cancelled");
+ flow_ret = GST_FLOW_FLUSHING;
+ } else {
+ GST_ELEMENT_ERROR (sink, RESOURCE, WRITE, (NULL),
+ ("send error: %s", err->message));
+ flow_ret = GST_FLOW_ERROR;
+ }
+ g_clear_error (&err);
+ return flow_ret;
+ }
+invalid_family:
+ {
+ GST_DEBUG ("invalid address family (got %d)", family);
+ return GST_FLOW_ERROR;
+ }
+}
+
+static void
+gst_dynudpsink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstDynUDPSink *udpsink;
+
+ udpsink = GST_DYNUDPSINK (object);
+
+ switch (prop_id) {
+ case PROP_SOCKET:
+ if (udpsink->socket != NULL && udpsink->socket != udpsink->used_socket &&
+ udpsink->close_socket) {
+ GError *err = NULL;
+
+ if (!g_socket_close (udpsink->socket, &err)) {
+ GST_ERROR ("failed to close socket %p: %s", udpsink->socket,
+ err->message);
+ g_clear_error (&err);
+ }
+ }
+ if (udpsink->socket)
+ g_object_unref (udpsink->socket);
+ udpsink->socket = g_value_dup_object (value);
+ GST_DEBUG ("setting socket to %p", udpsink->socket);
+ break;
+ case PROP_SOCKET_V6:
+ if (udpsink->socket_v6 != NULL
+ && udpsink->socket_v6 != udpsink->used_socket_v6
+ && udpsink->close_socket) {
+ GError *err = NULL;
+
+ if (!g_socket_close (udpsink->socket_v6, &err)) {
+ GST_ERROR ("failed to close socket %p: %s", udpsink->socket_v6,
+ err->message);
+ g_clear_error (&err);
+ }
+ }
+ if (udpsink->socket_v6)
+ g_object_unref (udpsink->socket_v6);
+ udpsink->socket_v6 = g_value_dup_object (value);
+ GST_DEBUG ("setting socket v6 to %p", udpsink->socket_v6);
+ break;
+ case PROP_CLOSE_SOCKET:
+ udpsink->close_socket = g_value_get_boolean (value);
+ break;
+ case PROP_BIND_ADDRESS:
+ g_free (udpsink->bind_address);
+ udpsink->bind_address = g_value_dup_string (value);
+ break;
+ case PROP_BIND_PORT:
+ udpsink->bind_port = g_value_get_int (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_dynudpsink_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstDynUDPSink *udpsink;
+
+ udpsink = GST_DYNUDPSINK (object);
+
+ switch (prop_id) {
+ case PROP_SOCKET:
+ g_value_set_object (value, udpsink->socket);
+ break;
+ case PROP_SOCKET_V6:
+ g_value_set_object (value, udpsink->socket_v6);
+ break;
+ case PROP_CLOSE_SOCKET:
+ g_value_set_boolean (value, udpsink->close_socket);
+ break;
+ case PROP_BIND_ADDRESS:
+ g_value_set_string (value, udpsink->bind_address);
+ break;
+ case PROP_BIND_PORT:
+ g_value_set_int (value, udpsink->bind_port);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_dynudpsink_create_cancellable (GstDynUDPSink * sink)
+{
+ GPollFD pollfd;
+
+ sink->cancellable = g_cancellable_new ();
+ sink->made_cancel_fd = g_cancellable_make_pollfd (sink->cancellable, &pollfd);
+}
+
+static void
+gst_dynudpsink_free_cancellable (GstDynUDPSink * sink)
+{
+ if (sink->made_cancel_fd) {
+ g_cancellable_release_fd (sink->cancellable);
+ sink->made_cancel_fd = FALSE;
+ }
+ g_object_unref (sink->cancellable);
+ sink->cancellable = NULL;
+}
+
+/* create a socket for sending to remote machine */
+static gboolean
+gst_dynudpsink_start (GstBaseSink * bsink)
+{
+ GstDynUDPSink *udpsink;
+ GError *err = NULL;
+
+ udpsink = GST_DYNUDPSINK (bsink);
+
+ gst_dynudpsink_create_cancellable (udpsink);
+
+ udpsink->external_socket = FALSE;
+
+ if (udpsink->socket) {
+ if (g_socket_get_family (udpsink->socket) == G_SOCKET_FAMILY_IPV6) {
+ udpsink->used_socket_v6 = G_SOCKET (g_object_ref (udpsink->socket));
+ udpsink->external_socket = TRUE;
+ } else {
+ udpsink->used_socket = G_SOCKET (g_object_ref (udpsink->socket));
+ udpsink->external_socket = TRUE;
+ }
+ }
+
+ if (udpsink->socket_v6) {
+ g_return_val_if_fail (g_socket_get_family (udpsink->socket) !=
+ G_SOCKET_FAMILY_IPV6, FALSE);
+
+ if (udpsink->used_socket_v6
+ && udpsink->used_socket_v6 != udpsink->socket_v6) {
+ GST_ERROR_OBJECT (udpsink,
+ "Provided different IPv6 sockets in socket and socket-v6 properties");
+ return FALSE;
+ }
+
+ udpsink->used_socket_v6 = G_SOCKET (g_object_ref (udpsink->socket_v6));
+ udpsink->external_socket = TRUE;
+ }
+
+ if (!udpsink->used_socket && !udpsink->used_socket_v6) {
+ GSocketAddress *bind_addr;
+ GInetAddress *bind_iaddr;
+
+ if (udpsink->bind_address) {
+ GSocketFamily family;
+
+ bind_iaddr = g_inet_address_new_from_string (udpsink->bind_address);
+ if (!bind_iaddr) {
+ GList *results;
+ GResolver *resolver;
+
+ resolver = g_resolver_get_default ();
+ results =
+ g_resolver_lookup_by_name (resolver, udpsink->bind_address,
+ udpsink->cancellable, &err);
+ if (!results) {
+ g_object_unref (resolver);
+ goto name_resolve;
+ }
+ bind_iaddr = G_INET_ADDRESS (g_object_ref (results->data));
+ g_resolver_free_addresses (results);
+ g_object_unref (resolver);
+ }
+
+ bind_addr = g_inet_socket_address_new (bind_iaddr, udpsink->bind_port);
+ g_object_unref (bind_iaddr);
+ family = g_socket_address_get_family (G_SOCKET_ADDRESS (bind_addr));
+
+ if ((udpsink->used_socket =
+ g_socket_new (family, G_SOCKET_TYPE_DATAGRAM,
+ G_SOCKET_PROTOCOL_UDP, &err)) == NULL) {
+ g_object_unref (bind_addr);
+ goto no_socket;
+ }
+
+ g_socket_bind (udpsink->used_socket, bind_addr, TRUE, &err);
+ if (err != NULL)
+ goto bind_error;
+ } else {
+ /* create sender sockets if none available */
+ if ((udpsink->used_socket = g_socket_new (G_SOCKET_FAMILY_IPV4,
+ G_SOCKET_TYPE_DATAGRAM, G_SOCKET_PROTOCOL_UDP, &err)) == NULL)
+ goto no_socket;
+
+ bind_iaddr = g_inet_address_new_any (G_SOCKET_FAMILY_IPV4);
+ bind_addr = g_inet_socket_address_new (bind_iaddr, 0);
+ g_socket_bind (udpsink->used_socket, bind_addr, TRUE, &err);
+ g_object_unref (bind_addr);
+ g_object_unref (bind_iaddr);
+ if (err != NULL)
+ goto bind_error;
+
+ if ((udpsink->used_socket_v6 = g_socket_new (G_SOCKET_FAMILY_IPV6,
+ G_SOCKET_TYPE_DATAGRAM, G_SOCKET_PROTOCOL_UDP,
+ &err)) == NULL) {
+ GST_INFO_OBJECT (udpsink, "Failed to create IPv6 socket: %s",
+ err->message);
+ g_clear_error (&err);
+ } else {
+ bind_iaddr = g_inet_address_new_any (G_SOCKET_FAMILY_IPV6);
+ bind_addr = g_inet_socket_address_new (bind_iaddr, 0);
+ g_socket_bind (udpsink->used_socket_v6, bind_addr, TRUE, &err);
+ g_object_unref (bind_addr);
+ g_object_unref (bind_iaddr);
+ if (err != NULL)
+ goto bind_error;
+ }
+ }
+ }
+
+ if (udpsink->used_socket)
+ g_socket_set_broadcast (udpsink->used_socket, TRUE);
+ if (udpsink->used_socket_v6)
+ g_socket_set_broadcast (udpsink->used_socket_v6, TRUE);
+
+ return TRUE;
+
+ /* ERRORS */
+no_socket:
+ {
+ GST_ERROR_OBJECT (udpsink, "Failed to create IPv4 socket: %s",
+ err->message);
+ g_clear_error (&err);
+ return FALSE;
+ }
+bind_error:
+ {
+ GST_ELEMENT_ERROR (udpsink, RESOURCE, FAILED, (NULL),
+ ("Failed to bind socket: %s", err->message));
+ g_clear_error (&err);
+ return FALSE;
+ }
+name_resolve:
+ {
+ GST_ELEMENT_ERROR (udpsink, RESOURCE, FAILED, (NULL),
+ ("Failed to resolve bind address %s: %s", udpsink->bind_address,
+ err->message));
+ g_clear_error (&err);
+ return FALSE;
+ }
+}
+
+static GstStructure *
+gst_dynudpsink_get_stats (GstDynUDPSink * sink, const gchar * host, gint port)
+{
+ return NULL;
+}
+
+static gboolean
+gst_dynudpsink_stop (GstBaseSink * bsink)
+{
+ GstDynUDPSink *udpsink;
+
+ udpsink = GST_DYNUDPSINK (bsink);
+
+ if (udpsink->used_socket) {
+ if (udpsink->close_socket || !udpsink->external_socket) {
+ GError *err = NULL;
+
+ if (!g_socket_close (udpsink->used_socket, &err)) {
+ GST_ERROR_OBJECT (udpsink, "Failed to close socket: %s", err->message);
+ g_clear_error (&err);
+ }
+ }
+
+ g_object_unref (udpsink->used_socket);
+ udpsink->used_socket = NULL;
+ }
+
+ if (udpsink->used_socket_v6) {
+ if (udpsink->close_socket || !udpsink->external_socket) {
+ GError *err = NULL;
+
+ if (!g_socket_close (udpsink->used_socket_v6, &err)) {
+ GST_ERROR_OBJECT (udpsink, "Failed to close socket: %s", err->message);
+ g_clear_error (&err);
+ }
+ }
+
+ g_object_unref (udpsink->used_socket_v6);
+ udpsink->used_socket_v6 = NULL;
+ }
+
+ gst_dynudpsink_free_cancellable (udpsink);
+
+ return TRUE;
+}
+
+static gboolean
+gst_dynudpsink_unlock (GstBaseSink * bsink)
+{
+ GstDynUDPSink *udpsink;
+
+ udpsink = GST_DYNUDPSINK (bsink);
+
+ g_cancellable_cancel (udpsink->cancellable);
+
+ return TRUE;
+}
+
+static gboolean
+gst_dynudpsink_unlock_stop (GstBaseSink * bsink)
+{
+ GstDynUDPSink *udpsink;
+
+ udpsink = GST_DYNUDPSINK (bsink);
+
+ gst_dynudpsink_free_cancellable (udpsink);
+ gst_dynudpsink_create_cancellable (udpsink);
+
+ return TRUE;
+}
diff --git a/gst/udp/gstdynudpsink.h b/gst/udp/gstdynudpsink.h
new file mode 100644
index 0000000000..6d85bb6697
--- /dev/null
+++ b/gst/udp/gstdynudpsink.h
@@ -0,0 +1,72 @@
+/* GStreamer
+ * Copyright (C) <2005> Philippe Khalaf <burger@speedy.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_DYNUDPSINK_H__
+#define __GST_DYNUDPSINK_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbasesink.h>
+#include <gio/gio.h>
+
+G_BEGIN_DECLS
+
+#include "gstudpnetutils.h"
+
+#define GST_TYPE_DYNUDPSINK (gst_dynudpsink_get_type())
+#define GST_DYNUDPSINK(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_DYNUDPSINK,GstDynUDPSink))
+#define GST_DYNUDPSINK_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_DYNUDPSINK,GstDynUDPSinkClass))
+#define GST_IS_DYNUDPSINK(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_DYNUDPSINK))
+#define GST_IS_DYNUDPSINK_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_DYNUDPSINK))
+
+typedef struct _GstDynUDPSink GstDynUDPSink;
+typedef struct _GstDynUDPSinkClass GstDynUDPSinkClass;
+
+
+/* sends udp packets to host/port pairs contained in the GstNetBuffer received.
+ */
+struct _GstDynUDPSink {
+ GstBaseSink parent;
+
+ /* properties */
+ GSocket *socket, *socket_v6;
+ gboolean close_socket;
+ gchar *bind_address;
+ gint bind_port;
+
+ /* the socket in use */
+ GSocket *used_socket, *used_socket_v6;
+ gboolean external_socket;
+ gboolean made_cancel_fd;
+ GCancellable *cancellable;
+};
+
+struct _GstDynUDPSinkClass {
+ GstBaseSinkClass parent_class;
+
+ /* element methods */
+ GstStructure* (*get_stats) (GstDynUDPSink *sink, const gchar *host, gint port);
+
+ /* signals */
+};
+
+GType gst_dynudpsink_get_type(void);
+
+G_END_DECLS
+
+#endif /* __GST_DYNUDPSINK_H__ */
diff --git a/gst/udp/gstmultiudpsink.c b/gst/udp/gstmultiudpsink.c
new file mode 100644
index 0000000000..f5ddfa4ebf
--- /dev/null
+++ b/gst/udp/gstmultiudpsink.c
@@ -0,0 +1,1808 @@
+/* GStreamer
+ * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
+ * Copyright (C) <2009> Jarkko Palviainen <jarkko.palviainen@sesca.com>
+ * Copyright (C) <2012> Collabora Ltd.
+ * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-multiudpsink
+ * @title: multiudpsink
+ * @see_also: udpsink, multifdsink
+ *
+ * multiudpsink is a network sink that sends UDP packets to multiple
+ * clients.
+ * It can be combined with rtp payload encoders to implement RTP streaming.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include "gstudpelements.h"
+#include "gstmultiudpsink.h"
+
+#include <string.h>
+
+#ifdef HAVE_SYS_SOCKET_H
+#include <sys/socket.h>
+#endif
+
+#include <gio/gnetworking.h>
+
+#include "gst/net/net.h"
+#include "gst/glib-compat-private.h"
+
+GST_DEBUG_CATEGORY_STATIC (multiudpsink_debug);
+#define GST_CAT_DEFAULT (multiudpsink_debug)
+
+#define UDP_MAX_SIZE 65507
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+/* MultiUDPSink signals and args */
+enum
+{
+ /* methods */
+ SIGNAL_ADD,
+ SIGNAL_REMOVE,
+ SIGNAL_CLEAR,
+ SIGNAL_GET_STATS,
+
+ /* signals */
+ SIGNAL_CLIENT_ADDED,
+ SIGNAL_CLIENT_REMOVED,
+
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+#define DEFAULT_SOCKET NULL
+#define DEFAULT_CLOSE_SOCKET TRUE
+#define DEFAULT_USED_SOCKET NULL
+#define DEFAULT_CLIENTS NULL
+/* FIXME, this should be disabled by default, we don't need to join a multicast
+ * group for sending, if this socket is also used for receiving, it should
+ * be configured in the element that does the receive. */
+#define DEFAULT_AUTO_MULTICAST TRUE
+#define DEFAULT_MULTICAST_IFACE NULL
+#define DEFAULT_TTL 64
+#define DEFAULT_TTL_MC 1
+#define DEFAULT_LOOP TRUE
+#define DEFAULT_FORCE_IPV4 FALSE
+#define DEFAULT_QOS_DSCP -1
+#define DEFAULT_SEND_DUPLICATES TRUE
+#define DEFAULT_BUFFER_SIZE 0
+#define DEFAULT_BIND_ADDRESS NULL
+#define DEFAULT_BIND_PORT 0
+
+enum
+{
+ PROP_0,
+ PROP_BYTES_TO_SERVE,
+ PROP_BYTES_SERVED,
+ PROP_SOCKET,
+ PROP_SOCKET_V6,
+ PROP_CLOSE_SOCKET,
+ PROP_USED_SOCKET,
+ PROP_USED_SOCKET_V6,
+ PROP_CLIENTS,
+ PROP_AUTO_MULTICAST,
+ PROP_MULTICAST_IFACE,
+ PROP_TTL,
+ PROP_TTL_MC,
+ PROP_LOOP,
+ PROP_FORCE_IPV4,
+ PROP_QOS_DSCP,
+ PROP_SEND_DUPLICATES,
+ PROP_BUFFER_SIZE,
+ PROP_BIND_ADDRESS,
+ PROP_BIND_PORT
+};
+
+static void gst_multiudpsink_finalize (GObject * object);
+
+static GstFlowReturn gst_multiudpsink_render (GstBaseSink * sink,
+ GstBuffer * buffer);
+static GstFlowReturn gst_multiudpsink_render_list (GstBaseSink * bsink,
+ GstBufferList * buffer_list);
+
+static gboolean gst_multiudpsink_start (GstBaseSink * bsink);
+static gboolean gst_multiudpsink_stop (GstBaseSink * bsink);
+static gboolean gst_multiudpsink_unlock (GstBaseSink * bsink);
+static gboolean gst_multiudpsink_unlock_stop (GstBaseSink * bsink);
+
+static void gst_multiudpsink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_multiudpsink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static void gst_multiudpsink_add_internal (GstMultiUDPSink * sink,
+ const gchar * host, gint port, gboolean lock);
+static void gst_multiudpsink_clear_internal (GstMultiUDPSink * sink,
+ gboolean lock);
+
+static guint gst_multiudpsink_signals[LAST_SIGNAL] = { 0 };
+
+#define gst_multiudpsink_parent_class parent_class
+G_DEFINE_TYPE (GstMultiUDPSink, gst_multiudpsink, GST_TYPE_BASE_SINK);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (multiudpsink, "multiudpsink",
+ GST_RANK_NONE, GST_TYPE_MULTIUDPSINK, udp_element_init (plugin));
+
+static void
+gst_multiudpsink_class_init (GstMultiUDPSinkClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstBaseSinkClass *gstbasesink_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstbasesink_class = (GstBaseSinkClass *) klass;
+
+ gobject_class->set_property = gst_multiudpsink_set_property;
+ gobject_class->get_property = gst_multiudpsink_get_property;
+ gobject_class->finalize = gst_multiudpsink_finalize;
+
+ /**
+ * GstMultiUDPSink::add:
+ * @gstmultiudpsink: the sink on which the signal is emitted
+ * @host: the hostname/IP address of the client to add
+ * @port: the port of the client to add
+ *
+ * Add a client with destination @host and @port to the list of
+ * clients. When the same host/port pair is added multiple times, the
+ * send-duplicates property defines if the packets are sent multiple times to
+ * the same host/port pair or not.
+ *
+ * When a host/port pair is added multiple times, an equal amount of remove
+ * calls must be performed to actually remove the host/port pair from the list
+ * of destinations.
+ */
+ gst_multiudpsink_signals[SIGNAL_ADD] =
+ g_signal_new ("add", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_STRUCT_OFFSET (GstMultiUDPSinkClass, add),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_STRING, G_TYPE_INT);
+ /**
+ * GstMultiUDPSink::remove:
+ * @gstmultiudpsink: the sink on which the signal is emitted
+ * @host: the hostname/IP address of the client to remove
+ * @port: the port of the client to remove
+ *
+ * Remove the client with destination @host and @port from the list of
+ * clients.
+ */
+ gst_multiudpsink_signals[SIGNAL_REMOVE] =
+ g_signal_new ("remove", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_STRUCT_OFFSET (GstMultiUDPSinkClass, remove),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_STRING, G_TYPE_INT);
+ /**
+ * GstMultiUDPSink::clear:
+ * @gstmultiudpsink: the sink on which the signal is emitted
+ *
+ * Clear the list of clients.
+ */
+ gst_multiudpsink_signals[SIGNAL_CLEAR] =
+ g_signal_new ("clear", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_STRUCT_OFFSET (GstMultiUDPSinkClass, clear), NULL, NULL, NULL,
+ G_TYPE_NONE, 0);
+ /**
+ * GstMultiUDPSink::get-stats:
+ * @gstmultiudpsink: the sink on which the signal is emitted
+ * @host: the hostname/IP address of the client to get stats on
+ * @port: the port of the client to get stats on
+ *
+ * Get the statistics of the client with destination @host and @port.
+ *
+ * Returns: a GstStructure: bytes_sent, packets_sent, connect_time
+ * (in epoch nanoseconds), disconnect_time (in epoch
+ * nanoseconds)
+ */
+ gst_multiudpsink_signals[SIGNAL_GET_STATS] =
+ g_signal_new ("get-stats", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
+ G_STRUCT_OFFSET (GstMultiUDPSinkClass, get_stats),
+ NULL, NULL, NULL, GST_TYPE_STRUCTURE, 2, G_TYPE_STRING, G_TYPE_INT);
+ /**
+ * GstMultiUDPSink::client-added:
+ * @gstmultiudpsink: the sink emitting the signal
+ * @host: the hostname/IP address of the added client
+ * @port: the port of the added client
+ *
+ * Signal emitted when a new client is added to the list of
+ * clients.
+ */
+ gst_multiudpsink_signals[SIGNAL_CLIENT_ADDED] =
+ g_signal_new ("client-added", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstMultiUDPSinkClass, client_added),
+ NULL, NULL, NULL, G_TYPE_NONE, 2, G_TYPE_STRING, G_TYPE_INT);
+ /**
+ * GstMultiUDPSink::client-removed:
+ * @gstmultiudpsink: the sink emitting the signal
+ * @host: the hostname/IP address of the removed client
+ * @port: the port of the removed client
+ *
+ * Signal emitted when a client is removed from the list of
+ * clients.
+ */
+ gst_multiudpsink_signals[SIGNAL_CLIENT_REMOVED] =
+ g_signal_new ("client-removed", G_TYPE_FROM_CLASS (klass),
+ G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstMultiUDPSinkClass,
+ client_removed), NULL, NULL, NULL,
+ G_TYPE_NONE, 2, G_TYPE_STRING, G_TYPE_INT);
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BYTES_TO_SERVE,
+ g_param_spec_uint64 ("bytes-to-serve", "Bytes to serve",
+ "Number of bytes received to serve to clients", 0, G_MAXUINT64, 0,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BYTES_SERVED,
+ g_param_spec_uint64 ("bytes-served", "Bytes served",
+ "Total number of bytes sent to all clients", 0, G_MAXUINT64, 0,
+ G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_SOCKET,
+ g_param_spec_object ("socket", "Socket Handle",
+ "Socket to use for UDP sending. (NULL == allocate)",
+ G_TYPE_SOCKET, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_SOCKET_V6,
+ g_param_spec_object ("socket-v6", "Socket Handle IPv6",
+ "Socket to use for UDPv6 sending. (NULL == allocate)",
+ G_TYPE_SOCKET, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_CLOSE_SOCKET,
+ g_param_spec_boolean ("close-socket", "Close socket",
+ "Close socket if passed as property on state change",
+ DEFAULT_CLOSE_SOCKET, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_USED_SOCKET,
+ g_param_spec_object ("used-socket", "Used Socket Handle",
+ "Socket currently in use for UDP sending. (NULL == no socket)",
+ G_TYPE_SOCKET, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_USED_SOCKET_V6,
+ g_param_spec_object ("used-socket-v6", "Used Socket Handle IPv6",
+ "Socket currently in use for UDPv6 sending. (NULL == no socket)",
+ G_TYPE_SOCKET, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_CLIENTS,
+ g_param_spec_string ("clients", "Clients",
+ "A comma separated list of host:port pairs with destinations",
+ DEFAULT_CLIENTS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_AUTO_MULTICAST,
+ g_param_spec_boolean ("auto-multicast",
+ "Automatically join/leave multicast groups",
+ "Automatically join/leave the multicast groups, FALSE means user"
+ " has to do it himself", DEFAULT_AUTO_MULTICAST,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_MULTICAST_IFACE,
+ g_param_spec_string ("multicast-iface", "Multicast Interface",
+ "The network interface on which to join the multicast group",
+ DEFAULT_MULTICAST_IFACE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_TTL,
+ g_param_spec_int ("ttl", "Unicast TTL",
+ "Used for setting the unicast TTL parameter",
+ 0, 255, DEFAULT_TTL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_TTL_MC,
+ g_param_spec_int ("ttl-mc", "Multicast TTL",
+ "Used for setting the multicast TTL parameter",
+ 0, 255, DEFAULT_TTL_MC, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_LOOP,
+ g_param_spec_boolean ("loop", "Multicast Loopback",
+ "Used for setting the multicast loop parameter. TRUE = enable,"
+ " FALSE = disable", DEFAULT_LOOP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstMultiUDPSink::force-ipv4:
+ *
+ * Force the use of an IPv4 socket.
+ *
+ * Since: 1.0.2
+ */
+#ifndef GST_REMOVE_DEPRECATED
+ g_object_class_install_property (gobject_class, PROP_FORCE_IPV4,
+ g_param_spec_boolean ("force-ipv4", "Force IPv4",
+ "Forcing the use of an IPv4 socket (DEPRECATED, has no effect anymore)",
+ DEFAULT_FORCE_IPV4,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
+#endif
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_QOS_DSCP,
+ g_param_spec_int ("qos-dscp", "QoS diff srv code point",
+ "Quality of Service, differentiated services code point (-1 default)",
+ -1, 63, DEFAULT_QOS_DSCP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstMultiUDPSink::send-duplicates:
+ *
+ * When a host/port pair is added multiple times, send the packet to the host
+ * multiple times as well.
+ */
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_SEND_DUPLICATES,
+ g_param_spec_boolean ("send-duplicates", "Send Duplicates",
+ "When a distination/port pair is added multiple times, send packets "
+ "multiple times as well", DEFAULT_SEND_DUPLICATES,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BUFFER_SIZE,
+ g_param_spec_int ("buffer-size", "Buffer Size",
+ "Size of the kernel send buffer in bytes, 0=default", 0, G_MAXINT,
+ DEFAULT_BUFFER_SIZE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (gobject_class, PROP_BIND_ADDRESS,
+ g_param_spec_string ("bind-address", "Bind Address",
+ "Address to bind the socket to", DEFAULT_BIND_ADDRESS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_BIND_PORT,
+ g_param_spec_int ("bind-port", "Bind Port",
+ "Port to bind the socket to", 0, G_MAXUINT16,
+ DEFAULT_BIND_PORT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "UDP packet sender",
+ "Sink/Network",
+ "Send data over the network via UDP to one or multiple recipients "
+ "which can be added or removed at runtime using action signals",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ gstbasesink_class->render = gst_multiudpsink_render;
+ gstbasesink_class->render_list = gst_multiudpsink_render_list;
+ gstbasesink_class->start = gst_multiudpsink_start;
+ gstbasesink_class->stop = gst_multiudpsink_stop;
+ gstbasesink_class->unlock = gst_multiudpsink_unlock;
+ gstbasesink_class->unlock_stop = gst_multiudpsink_unlock_stop;
+ klass->add = gst_multiudpsink_add;
+ klass->remove = gst_multiudpsink_remove;
+ klass->clear = gst_multiudpsink_clear;
+ klass->get_stats = gst_multiudpsink_get_stats;
+
+ GST_DEBUG_CATEGORY_INIT (multiudpsink_debug, "multiudpsink", 0, "UDP sink");
+}
+
+static void
+gst_multiudpsink_create_cancellable (GstMultiUDPSink * sink)
+{
+ GPollFD pollfd;
+
+ sink->cancellable = g_cancellable_new ();
+ sink->made_cancel_fd = g_cancellable_make_pollfd (sink->cancellable, &pollfd);
+}
+
+static void
+gst_multiudpsink_free_cancellable (GstMultiUDPSink * sink)
+{
+ if (sink->made_cancel_fd) {
+ g_cancellable_release_fd (sink->cancellable);
+ sink->made_cancel_fd = FALSE;
+ }
+ g_object_unref (sink->cancellable);
+ sink->cancellable = NULL;
+}
+
+static void
+gst_multiudpsink_init (GstMultiUDPSink * sink)
+{
+ guint max_mem;
+
+ g_mutex_init (&sink->client_lock);
+ sink->clients = NULL;
+ sink->num_v4_unique = 0;
+ sink->num_v4_all = 0;
+ sink->num_v6_unique = 0;
+ sink->num_v6_all = 0;
+
+ sink->socket = DEFAULT_SOCKET;
+ sink->socket_v6 = DEFAULT_SOCKET;
+ sink->used_socket = DEFAULT_USED_SOCKET;
+ sink->used_socket_v6 = DEFAULT_USED_SOCKET;
+ sink->close_socket = DEFAULT_CLOSE_SOCKET;
+ sink->external_socket = (sink->socket != NULL);
+ sink->auto_multicast = DEFAULT_AUTO_MULTICAST;
+ sink->ttl = DEFAULT_TTL;
+ sink->ttl_mc = DEFAULT_TTL_MC;
+ sink->loop = DEFAULT_LOOP;
+ sink->force_ipv4 = DEFAULT_FORCE_IPV4;
+ sink->qos_dscp = DEFAULT_QOS_DSCP;
+ sink->send_duplicates = DEFAULT_SEND_DUPLICATES;
+ sink->multi_iface = g_strdup (DEFAULT_MULTICAST_IFACE);
+
+ gst_multiudpsink_create_cancellable (sink);
+
+ /* pre-allocate OutputVector, MapInfo and OutputMessage arrays
+ * for use in the render and render_list functions */
+ max_mem = gst_buffer_get_max_memory ();
+
+ sink->n_vecs = max_mem;
+ sink->vecs = g_new (GOutputVector, sink->n_vecs);
+
+ sink->n_maps = max_mem;
+ sink->maps = g_new (GstMapInfo, sink->n_maps);
+
+ sink->n_messages = 1;
+ sink->messages = g_new (GstOutputMessage, sink->n_messages);
+
+ /* we assume that the number of memories per buffer can fit into a guint8 */
+ g_warn_if_fail (max_mem <= G_MAXUINT8);
+}
+
+static GstUDPClient *
+gst_udp_client_new (GstMultiUDPSink * sink, const gchar * host, gint port)
+{
+ GstUDPClient *client;
+ GInetAddress *addr;
+ GSocketAddress *sockaddr;
+ GResolver *resolver;
+ GError *err = NULL;
+
+ sockaddr = g_inet_socket_address_new_from_string (host, port);
+ if (!sockaddr) {
+ GList *results;
+
+ resolver = g_resolver_get_default ();
+ results =
+ g_resolver_lookup_by_name (resolver, host, sink->cancellable, &err);
+ if (!results)
+ goto name_resolve;
+ addr = G_INET_ADDRESS (g_object_ref (results->data));
+ sockaddr = g_inet_socket_address_new (addr, port);
+
+ g_resolver_free_addresses (results);
+ g_object_unref (resolver);
+ g_object_unref (addr);
+ }
+ addr = g_inet_socket_address_get_address (G_INET_SOCKET_ADDRESS (sockaddr));
+#ifndef GST_DISABLE_GST_DEBUG
+ {
+ gchar *ip = g_inet_address_to_string (addr);
+
+ GST_DEBUG_OBJECT (sink, "IP address for host %s is %s", host, ip);
+ g_free (ip);
+ }
+#endif
+
+ client = g_slice_new0 (GstUDPClient);
+ client->ref_count = 1;
+ client->add_count = 0;
+ client->host = g_strdup (host);
+ client->port = port;
+ client->addr = sockaddr;
+
+ return client;
+
+name_resolve:
+ {
+ g_clear_error (&err);
+ g_object_unref (resolver);
+
+ return NULL;
+ }
+}
+
+/* call with client lock held */
+static void
+gst_udp_client_unref (GstUDPClient * client)
+{
+ if (--client->ref_count == 0) {
+ g_object_unref (client->addr);
+ g_free (client->host);
+ g_slice_free (GstUDPClient, client);
+ }
+}
+
+/* call with client lock held */
+static inline GstUDPClient *
+gst_udp_client_ref (GstUDPClient * client)
+{
+ ++client->ref_count;
+ return client;
+}
+
+static gint
+client_compare (GstUDPClient * a, GstUDPClient * b)
+{
+ if ((a->port == b->port) && (strcmp (a->host, b->host) == 0))
+ return 0;
+
+ return 1;
+}
+
+static void
+gst_multiudpsink_finalize (GObject * object)
+{
+ GstMultiUDPSink *sink;
+
+ sink = GST_MULTIUDPSINK (object);
+
+ g_list_foreach (sink->clients, (GFunc) gst_udp_client_unref, NULL);
+ g_list_free (sink->clients);
+
+ if (sink->socket)
+ g_object_unref (sink->socket);
+ sink->socket = NULL;
+
+ if (sink->socket_v6)
+ g_object_unref (sink->socket_v6);
+ sink->socket_v6 = NULL;
+
+ if (sink->used_socket)
+ g_object_unref (sink->used_socket);
+ sink->used_socket = NULL;
+
+ if (sink->used_socket_v6)
+ g_object_unref (sink->used_socket_v6);
+ sink->used_socket_v6 = NULL;
+
+ gst_multiudpsink_free_cancellable (sink);
+
+ g_free (sink->multi_iface);
+ sink->multi_iface = NULL;
+
+ g_free (sink->vecs);
+ sink->vecs = NULL;
+ g_free (sink->maps);
+ sink->maps = NULL;
+ g_free (sink->messages);
+ sink->messages = NULL;
+
+ g_free (sink->bind_address);
+ sink->bind_address = NULL;
+
+ g_mutex_clear (&sink->client_lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gsize
+fill_vectors (GOutputVector * vecs, GstMapInfo * maps, guint n, GstBuffer * buf)
+{
+ GstMemory *mem;
+ gsize size = 0;
+ guint i;
+
+ g_assert (gst_buffer_n_memory (buf) == n);
+
+ for (i = 0; i < n; ++i) {
+ mem = gst_buffer_peek_memory (buf, i);
+ if (gst_memory_map (mem, &maps[i], GST_MAP_READ)) {
+ vecs[i].buffer = maps[i].data;
+ vecs[i].size = maps[i].size;
+ } else {
+ GST_WARNING ("Failed to map memory %p for reading", mem);
+ vecs[i].buffer = "";
+ vecs[i].size = 0;
+ }
+ size += vecs[i].size;
+ }
+
+ return size;
+}
+
+static gsize
+gst_udp_calc_message_size (GstOutputMessage * msg)
+{
+ gsize size = 0;
+ guint i;
+
+ for (i = 0; i < msg->num_vectors; ++i)
+ size += msg->vectors[i].size;
+
+ return size;
+}
+
+static gint
+gst_udp_messsages_find_first_not_sent (GstOutputMessage * messages,
+ guint num_messages)
+{
+ guint i;
+
+ for (i = 0; i < num_messages; ++i) {
+ GstOutputMessage *msg = &messages[i];
+
+ if (msg->bytes_sent == 0 && gst_udp_calc_message_size (msg) > 0)
+ return i;
+ }
+
+ return -1;
+}
+
+static inline gchar *
+gst_udp_address_get_string (GSocketAddress * addr, gchar * s, gsize size)
+{
+ GInetSocketAddress *isa = G_INET_SOCKET_ADDRESS (addr);
+ GInetAddress *ia;
+ gchar *addr_str;
+
+ ia = g_inet_socket_address_get_address (isa);
+ addr_str = g_inet_address_to_string (ia);
+ g_snprintf (s, size, "%s:%u", addr_str, g_inet_socket_address_get_port (isa));
+ g_free (addr_str);
+
+ return s;
+}
+
+/* Wrapper around g_socket_send_messages() plus error handling (ignoring).
+ * Returns FALSE if we got cancelled, otherwise TRUE. */
+static GstFlowReturn
+gst_multiudpsink_send_messages (GstMultiUDPSink * sink, GSocket * socket,
+ GstOutputMessage * messages, guint num_messages)
+{
+ gboolean sent_max_size_warning = FALSE;
+
+ while (num_messages > 0) {
+ gchar astr[64] G_GNUC_UNUSED;
+ GError *err = NULL;
+ guint msg_size, skip, i;
+ gint ret, err_idx;
+
+ ret = g_socket_send_messages (socket, messages, num_messages, 0,
+ sink->cancellable, &err);
+
+ if (G_UNLIKELY (ret < 0)) {
+ GstOutputMessage *msg;
+
+ if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) {
+ GstFlowReturn flow_ret;
+
+ g_clear_error (&err);
+
+ flow_ret = gst_base_sink_wait_preroll (GST_BASE_SINK (sink));
+
+ if (flow_ret == GST_FLOW_OK)
+ continue;
+
+ return flow_ret;
+ }
+
+ err_idx = gst_udp_messsages_find_first_not_sent (messages, num_messages);
+ if (err_idx < 0)
+ break;
+
+ msg = &messages[err_idx];
+ msg_size = gst_udp_calc_message_size (msg);
+
+ GST_LOG_OBJECT (sink, "error sending %u bytes to client %s: %s", msg_size,
+ gst_udp_address_get_string (msg->address, astr, sizeof (astr)),
+ err->message);
+
+ skip = 1;
+ if (msg_size > UDP_MAX_SIZE) {
+ if (!sent_max_size_warning) {
+ GST_ELEMENT_WARNING (sink, RESOURCE, WRITE,
+ ("Attempting to send a UDP packets larger than maximum size "
+ "(%u > %d)", msg_size, UDP_MAX_SIZE),
+ ("Reason: %s", err ? err->message : "unknown reason"));
+ sent_max_size_warning = FALSE;
+ }
+ } else {
+ GST_ELEMENT_WARNING (sink, RESOURCE, WRITE,
+ ("Error sending UDP packets"), ("client %s, reason: %s",
+ gst_udp_address_get_string (msg->address, astr, sizeof (astr)),
+ (err != NULL) ? err->message : "unknown reason"));
+
+ for (i = err_idx + 1; i < num_messages; ++i, ++skip) {
+ if (messages[i].address != msg->address)
+ break;
+ }
+ GST_DEBUG_OBJECT (sink, "skipping %d message(s) to same client", skip);
+ }
+
+ /* ignore any errors and try sending the rest */
+ g_clear_error (&err);
+ ret = skip;
+ }
+
+ g_assert (ret <= num_messages);
+
+ messages += ret;
+ num_messages -= ret;
+ }
+
+ return GST_FLOW_OK;
+}
+
+static GstFlowReturn
+gst_multiudpsink_render_buffers (GstMultiUDPSink * sink, GstBuffer ** buffers,
+ guint num_buffers, guint8 * mem_nums, guint total_mem_num)
+{
+ GstOutputMessage *msgs;
+ gboolean send_duplicates;
+ GstUDPClient **clients;
+ GOutputVector *vecs;
+ GstMapInfo *map_infos;
+ GstFlowReturn flow_ret;
+ guint num_addr_v4, num_addr_v6;
+ guint num_addr, num_msgs;
+ guint i, j, mem;
+ gsize size = 0;
+ GList *l;
+
+ send_duplicates = sink->send_duplicates;
+
+ g_mutex_lock (&sink->client_lock);
+
+ if (send_duplicates) {
+ num_addr_v4 = sink->num_v4_all;
+ num_addr_v6 = sink->num_v6_all;
+ } else {
+ num_addr_v4 = sink->num_v4_unique;
+ num_addr_v6 = sink->num_v6_unique;
+ }
+ num_addr = num_addr_v4 + num_addr_v6;
+
+ if (num_addr == 0)
+ goto no_clients;
+
+ clients = g_newa (GstUDPClient *, num_addr);
+ for (l = sink->clients, i = 0; l != NULL; l = l->next) {
+ GstUDPClient *client = l->data;
+
+ clients[i++] = gst_udp_client_ref (client);
+ for (j = 1; send_duplicates && j < client->add_count; ++j)
+ clients[i++] = gst_udp_client_ref (client);
+ }
+ g_assert_cmpuint (i, ==, num_addr);
+
+ g_mutex_unlock (&sink->client_lock);
+
+ GST_LOG_OBJECT (sink, "%u buffers, %u memories -> to be sent to %u clients",
+ num_buffers, total_mem_num, num_addr);
+
+ /* ensure our pre-allocated scratch space arrays are large enough */
+ if (sink->n_vecs < total_mem_num) {
+ sink->n_vecs = GST_ROUND_UP_16 (total_mem_num);
+ g_free (sink->vecs);
+ sink->vecs = g_new (GOutputVector, sink->n_vecs);
+ }
+ vecs = sink->vecs;
+
+ if (sink->n_maps < total_mem_num) {
+ sink->n_maps = GST_ROUND_UP_16 (total_mem_num);
+ g_free (sink->maps);
+ sink->maps = g_new (GstMapInfo, sink->n_maps);
+ }
+ map_infos = sink->maps;
+
+ num_msgs = num_addr * num_buffers;
+ if (sink->n_messages < num_msgs) {
+ sink->n_messages = GST_ROUND_UP_16 (num_msgs);
+ g_free (sink->messages);
+ sink->messages = g_new (GstOutputMessage, sink->n_messages);
+ }
+ msgs = sink->messages;
+
+ /* populate first num_buffers messages with output vectors for the buffers */
+ for (i = 0, mem = 0; i < num_buffers; ++i) {
+ size += fill_vectors (&vecs[mem], &map_infos[mem], mem_nums[i], buffers[i]);
+ msgs[i].vectors = &vecs[mem];
+ msgs[i].num_vectors = mem_nums[i];
+ msgs[i].num_control_messages = 0;
+ msgs[i].bytes_sent = 0;
+ msgs[i].control_messages = NULL;
+ msgs[i].address = clients[0]->addr;
+ mem += mem_nums[i];
+ }
+
+ /* FIXME: how about some locking? (there wasn't any before either, but..) */
+ sink->bytes_to_serve += size;
+
+ /* now copy the pre-filled num_buffer messages over to the next num_buffer
+ * messages for the next client, where we also change the target address */
+ for (i = 1; i < num_addr; ++i) {
+ for (j = 0; j < num_buffers; ++j) {
+ msgs[i * num_buffers + j] = msgs[j];
+ msgs[i * num_buffers + j].address = clients[i]->addr;
+ }
+ }
+
+ /* now send it! */
+
+ /* no IPv4 socket? Send it all from the IPv6 socket then.. */
+ if (sink->used_socket == NULL) {
+ flow_ret = gst_multiudpsink_send_messages (sink, sink->used_socket_v6,
+ msgs, num_msgs);
+ } else {
+ guint num_msgs_v4 = num_buffers * num_addr_v4;
+ guint num_msgs_v6 = num_buffers * num_addr_v6;
+
+ /* our client list is sorted with IPv4 clients first and IPv6 ones last */
+ flow_ret = gst_multiudpsink_send_messages (sink, sink->used_socket,
+ msgs, num_msgs_v4);
+
+ if (flow_ret != GST_FLOW_OK)
+ goto cancelled;
+
+ flow_ret = gst_multiudpsink_send_messages (sink, sink->used_socket_v6,
+ msgs + num_msgs_v4, num_msgs_v6);
+ }
+
+ if (flow_ret != GST_FLOW_OK)
+ goto cancelled;
+
+ /* now update stats */
+ g_mutex_lock (&sink->client_lock);
+
+ for (i = 0; i < num_addr; ++i) {
+ GstUDPClient *client = clients[i];
+
+ for (j = 0; j < num_buffers; ++j) {
+ gsize bytes_sent;
+
+ bytes_sent = msgs[i * num_buffers + j].bytes_sent;
+
+ client->bytes_sent += bytes_sent;
+ client->packets_sent++;
+ sink->bytes_served += bytes_sent;
+ }
+ gst_udp_client_unref (client);
+ }
+
+ g_mutex_unlock (&sink->client_lock);
+
+out:
+
+ for (i = 0; i < mem; ++i)
+ gst_memory_unmap (map_infos[i].memory, &map_infos[i]);
+
+ return flow_ret;
+
+no_clients:
+ {
+ g_mutex_unlock (&sink->client_lock);
+ GST_LOG_OBJECT (sink, "no clients");
+ return GST_FLOW_OK;
+ }
+cancelled:
+ {
+ GST_INFO_OBJECT (sink, "cancelled");
+
+ g_mutex_lock (&sink->client_lock);
+ for (i = 0; i < num_addr; ++i)
+ gst_udp_client_unref (clients[i]);
+ g_mutex_unlock (&sink->client_lock);
+ goto out;
+ }
+}
+
+static GstFlowReturn
+gst_multiudpsink_render_list (GstBaseSink * bsink, GstBufferList * buffer_list)
+{
+ GstMultiUDPSink *sink;
+ GstBuffer **buffers;
+ GstFlowReturn flow;
+ guint8 *mem_nums;
+ guint total_mems;
+ guint i, num_buffers;
+
+ sink = GST_MULTIUDPSINK_CAST (bsink);
+
+ num_buffers = gst_buffer_list_length (buffer_list);
+ if (num_buffers == 0)
+ goto no_data;
+
+ buffers = g_newa (GstBuffer *, num_buffers);
+ mem_nums = g_newa (guint8, num_buffers);
+ for (i = 0, total_mems = 0; i < num_buffers; ++i) {
+ buffers[i] = gst_buffer_list_get (buffer_list, i);
+ mem_nums[i] = gst_buffer_n_memory (buffers[i]);
+ total_mems += mem_nums[i];
+ }
+
+ flow = gst_multiudpsink_render_buffers (sink, buffers, num_buffers,
+ mem_nums, total_mems);
+
+ return flow;
+
+no_data:
+ {
+ GST_LOG_OBJECT (sink, "empty buffer");
+ return GST_FLOW_OK;
+ }
+}
+
+static GstFlowReturn
+gst_multiudpsink_render (GstBaseSink * bsink, GstBuffer * buffer)
+{
+ GstMultiUDPSink *sink;
+ GstFlowReturn flow;
+ guint8 n_mem;
+
+ sink = GST_MULTIUDPSINK_CAST (bsink);
+
+ n_mem = gst_buffer_n_memory (buffer);
+
+ if (n_mem > 0)
+ flow = gst_multiudpsink_render_buffers (sink, &buffer, 1, &n_mem, n_mem);
+ else
+ flow = GST_FLOW_OK;
+
+ return flow;
+}
+
+static void
+gst_multiudpsink_set_clients_string (GstMultiUDPSink * sink,
+ const gchar * string)
+{
+ gchar **clients;
+ gint i;
+
+ clients = g_strsplit (string, ",", 0);
+
+ g_mutex_lock (&sink->client_lock);
+ /* clear all existing clients */
+ gst_multiudpsink_clear_internal (sink, FALSE);
+ for (i = 0; clients[i]; i++) {
+ gchar *host, *p;
+ gint64 port = 0;
+
+ host = clients[i];
+ p = strstr (clients[i], ":");
+ if (p != NULL) {
+ *p = '\0';
+ port = g_ascii_strtoll (p + 1, NULL, 10);
+ }
+ if (port != 0)
+ gst_multiudpsink_add_internal (sink, host, port, FALSE);
+ }
+ g_mutex_unlock (&sink->client_lock);
+
+ g_strfreev (clients);
+}
+
+static gchar *
+gst_multiudpsink_get_clients_string (GstMultiUDPSink * sink)
+{
+ GString *str;
+ GList *clients;
+
+ str = g_string_new ("");
+
+ g_mutex_lock (&sink->client_lock);
+ clients = sink->clients;
+ while (clients) {
+ GstUDPClient *client;
+ gint count;
+
+ client = (GstUDPClient *) clients->data;
+
+ clients = g_list_next (clients);
+
+ count = client->add_count;
+ while (count--) {
+ g_string_append_printf (str, "%s:%d%s", client->host, client->port,
+ (clients || count > 1 ? "," : ""));
+ }
+ }
+ g_mutex_unlock (&sink->client_lock);
+
+ return g_string_free (str, FALSE);
+}
+
+static void
+gst_multiudpsink_setup_qos_dscp (GstMultiUDPSink * sink, GSocket * socket)
+{
+ /* don't touch on -1 */
+ if (sink->qos_dscp < 0)
+ return;
+
+ if (socket == NULL)
+ return;
+
+ if (!gst_net_utils_set_socket_tos (socket, sink->qos_dscp))
+ GST_ERROR_OBJECT (sink, "could not set qos dscp: %d", sink->qos_dscp);
+}
+
+static void
+gst_multiudpsink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstMultiUDPSink *udpsink;
+
+ udpsink = GST_MULTIUDPSINK (object);
+
+ switch (prop_id) {
+ case PROP_SOCKET:
+ if (udpsink->socket != NULL && udpsink->socket != udpsink->used_socket &&
+ udpsink->close_socket) {
+ GError *err = NULL;
+
+ if (!g_socket_close (udpsink->socket, &err)) {
+ GST_ERROR ("failed to close socket %p: %s", udpsink->socket,
+ err->message);
+ g_clear_error (&err);
+ }
+ }
+ if (udpsink->socket)
+ g_object_unref (udpsink->socket);
+ udpsink->socket = g_value_dup_object (value);
+ GST_DEBUG_OBJECT (udpsink, "setting socket to %p", udpsink->socket);
+ break;
+ case PROP_SOCKET_V6:
+ if (udpsink->socket_v6 != NULL
+ && udpsink->socket_v6 != udpsink->used_socket_v6
+ && udpsink->close_socket) {
+ GError *err = NULL;
+
+ if (!g_socket_close (udpsink->socket_v6, &err)) {
+ GST_ERROR ("failed to close socket %p: %s", udpsink->socket_v6,
+ err->message);
+ g_clear_error (&err);
+ }
+ }
+ if (udpsink->socket_v6)
+ g_object_unref (udpsink->socket_v6);
+ udpsink->socket_v6 = g_value_dup_object (value);
+ GST_DEBUG_OBJECT (udpsink, "setting socket to %p", udpsink->socket_v6);
+ break;
+ case PROP_CLOSE_SOCKET:
+ udpsink->close_socket = g_value_get_boolean (value);
+ break;
+ case PROP_CLIENTS:
+ gst_multiudpsink_set_clients_string (udpsink, g_value_get_string (value));
+ break;
+ case PROP_AUTO_MULTICAST:
+ udpsink->auto_multicast = g_value_get_boolean (value);
+ break;
+ case PROP_MULTICAST_IFACE:
+ g_free (udpsink->multi_iface);
+
+ if (g_value_get_string (value) == NULL)
+ udpsink->multi_iface = g_strdup (DEFAULT_MULTICAST_IFACE);
+ else
+ udpsink->multi_iface = g_value_dup_string (value);
+ break;
+ case PROP_TTL:
+ udpsink->ttl = g_value_get_int (value);
+ break;
+ case PROP_TTL_MC:
+ udpsink->ttl_mc = g_value_get_int (value);
+ break;
+ case PROP_LOOP:
+ udpsink->loop = g_value_get_boolean (value);
+ break;
+ case PROP_FORCE_IPV4:
+ udpsink->force_ipv4 = g_value_get_boolean (value);
+ break;
+ case PROP_QOS_DSCP:
+ udpsink->qos_dscp = g_value_get_int (value);
+ gst_multiudpsink_setup_qos_dscp (udpsink, udpsink->used_socket);
+ gst_multiudpsink_setup_qos_dscp (udpsink, udpsink->used_socket_v6);
+ break;
+ case PROP_SEND_DUPLICATES:
+ udpsink->send_duplicates = g_value_get_boolean (value);
+ break;
+ case PROP_BUFFER_SIZE:
+ udpsink->buffer_size = g_value_get_int (value);
+ break;
+ case PROP_BIND_ADDRESS:
+ g_free (udpsink->bind_address);
+ udpsink->bind_address = g_value_dup_string (value);
+ break;
+ case PROP_BIND_PORT:
+ udpsink->bind_port = g_value_get_int (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_multiudpsink_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstMultiUDPSink *udpsink;
+
+ udpsink = GST_MULTIUDPSINK (object);
+
+ switch (prop_id) {
+ case PROP_BYTES_TO_SERVE:
+ g_value_set_uint64 (value, udpsink->bytes_to_serve);
+ break;
+ case PROP_BYTES_SERVED:
+ g_value_set_uint64 (value, udpsink->bytes_served);
+ break;
+ case PROP_SOCKET:
+ g_value_set_object (value, udpsink->socket);
+ break;
+ case PROP_SOCKET_V6:
+ g_value_set_object (value, udpsink->socket_v6);
+ break;
+ case PROP_CLOSE_SOCKET:
+ g_value_set_boolean (value, udpsink->close_socket);
+ break;
+ case PROP_USED_SOCKET:
+ g_value_set_object (value, udpsink->used_socket);
+ break;
+ case PROP_USED_SOCKET_V6:
+ g_value_set_object (value, udpsink->used_socket_v6);
+ break;
+ case PROP_CLIENTS:
+ g_value_take_string (value,
+ gst_multiudpsink_get_clients_string (udpsink));
+ break;
+ case PROP_AUTO_MULTICAST:
+ g_value_set_boolean (value, udpsink->auto_multicast);
+ break;
+ case PROP_MULTICAST_IFACE:
+ g_value_set_string (value, udpsink->multi_iface);
+ break;
+ case PROP_TTL:
+ g_value_set_int (value, udpsink->ttl);
+ break;
+ case PROP_TTL_MC:
+ g_value_set_int (value, udpsink->ttl_mc);
+ break;
+ case PROP_LOOP:
+ g_value_set_boolean (value, udpsink->loop);
+ break;
+ case PROP_FORCE_IPV4:
+ g_value_set_boolean (value, udpsink->force_ipv4);
+ break;
+ case PROP_QOS_DSCP:
+ g_value_set_int (value, udpsink->qos_dscp);
+ break;
+ case PROP_SEND_DUPLICATES:
+ g_value_set_boolean (value, udpsink->send_duplicates);
+ break;
+ case PROP_BUFFER_SIZE:
+ g_value_set_int (value, udpsink->buffer_size);
+ break;
+ case PROP_BIND_ADDRESS:
+ g_value_set_string (value, udpsink->bind_address);
+ break;
+ case PROP_BIND_PORT:
+ g_value_set_int (value, udpsink->bind_port);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+gst_multiudpsink_configure_client (GstMultiUDPSink * sink,
+ GstUDPClient * client)
+{
+ GInetSocketAddress *saddr = G_INET_SOCKET_ADDRESS (client->addr);
+ GInetAddress *addr = g_inet_socket_address_get_address (saddr);
+ GSocketFamily family = g_socket_address_get_family (G_SOCKET_ADDRESS (saddr));
+ GSocket *socket;
+ GError *err = NULL;
+
+ GST_DEBUG_OBJECT (sink, "configuring client %p", client);
+
+ if (family == G_SOCKET_FAMILY_IPV6 && !sink->used_socket_v6)
+ goto invalid_family;
+
+ /* Select socket to send from for this address */
+ if (family == G_SOCKET_FAMILY_IPV6 || !sink->used_socket)
+ socket = sink->used_socket_v6;
+ else
+ socket = sink->used_socket;
+
+ if (g_inet_address_get_is_multicast (addr)) {
+ GST_DEBUG_OBJECT (sink, "we have a multicast client %p", client);
+ if (sink->auto_multicast) {
+ GST_DEBUG_OBJECT (sink, "autojoining group");
+ if (!g_socket_join_multicast_group (socket, addr, FALSE,
+ sink->multi_iface, &err))
+ goto join_group_failed;
+ }
+ GST_DEBUG_OBJECT (sink, "setting loop to %d", sink->loop);
+ g_socket_set_multicast_loopback (socket, sink->loop);
+ GST_DEBUG_OBJECT (sink, "setting ttl to %d", sink->ttl_mc);
+ g_socket_set_multicast_ttl (socket, sink->ttl_mc);
+ } else {
+ GST_DEBUG_OBJECT (sink, "setting unicast ttl to %d", sink->ttl);
+ g_socket_set_ttl (socket, sink->ttl);
+ }
+ return TRUE;
+
+ /* ERRORS */
+join_group_failed:
+ {
+ gst_multiudpsink_stop (GST_BASE_SINK (sink));
+ GST_ELEMENT_ERROR (sink, RESOURCE, SETTINGS, (NULL),
+ ("Could not join multicast group: %s",
+ err ? err->message : "unknown reason"));
+ g_clear_error (&err);
+ return FALSE;
+ }
+invalid_family:
+ {
+ gst_multiudpsink_stop (GST_BASE_SINK (sink));
+ GST_ELEMENT_ERROR (sink, RESOURCE, SETTINGS, (NULL),
+ ("Invalid address family (got %d)", family));
+ return FALSE;
+ }
+}
+
+/* create a socket for sending to remote machine */
+static gboolean
+gst_multiudpsink_start (GstBaseSink * bsink)
+{
+ GstMultiUDPSink *sink;
+ GList *clients;
+ GstUDPClient *client;
+ GError *err = NULL;
+
+ sink = GST_MULTIUDPSINK (bsink);
+
+ sink->external_socket = FALSE;
+
+ if (sink->socket) {
+ GST_DEBUG_OBJECT (sink, "using configured socket");
+ if (g_socket_get_family (sink->socket) == G_SOCKET_FAMILY_IPV6) {
+ sink->used_socket_v6 = G_SOCKET (g_object_ref (sink->socket));
+ sink->external_socket = TRUE;
+ } else {
+ sink->used_socket = G_SOCKET (g_object_ref (sink->socket));
+ sink->external_socket = TRUE;
+ }
+ }
+
+ if (sink->socket_v6) {
+ GST_DEBUG_OBJECT (sink, "using configured IPv6 socket");
+ g_return_val_if_fail (!sink->socket || g_socket_get_family (sink->socket) !=
+ G_SOCKET_FAMILY_IPV6, FALSE);
+
+ if (sink->used_socket_v6 && sink->used_socket_v6 != sink->socket_v6) {
+ GST_ERROR_OBJECT (sink,
+ "Provided different IPv6 sockets in socket and socket-v6 properties");
+ return FALSE;
+ }
+
+ sink->used_socket_v6 = G_SOCKET (g_object_ref (sink->socket_v6));
+ sink->external_socket = TRUE;
+ }
+
+ if (!sink->used_socket && !sink->used_socket_v6) {
+ GSocketAddress *bind_addr;
+ GInetAddress *bind_iaddr;
+
+ if (sink->bind_address) {
+ GSocketFamily family;
+
+ bind_iaddr = g_inet_address_new_from_string (sink->bind_address);
+ if (!bind_iaddr) {
+ GList *results;
+ GResolver *resolver;
+
+ resolver = g_resolver_get_default ();
+ results =
+ g_resolver_lookup_by_name (resolver, sink->bind_address,
+ sink->cancellable, &err);
+ if (!results) {
+ g_object_unref (resolver);
+ goto name_resolve;
+ }
+ bind_iaddr = G_INET_ADDRESS (g_object_ref (results->data));
+ g_resolver_free_addresses (results);
+ g_object_unref (resolver);
+ }
+
+ bind_addr = g_inet_socket_address_new (bind_iaddr, sink->bind_port);
+ g_object_unref (bind_iaddr);
+ family = g_socket_address_get_family (G_SOCKET_ADDRESS (bind_addr));
+
+ if ((sink->used_socket =
+ g_socket_new (family, G_SOCKET_TYPE_DATAGRAM,
+ G_SOCKET_PROTOCOL_UDP, &err)) == NULL) {
+ g_object_unref (bind_addr);
+ goto no_socket;
+ }
+
+ g_socket_bind (sink->used_socket, bind_addr, TRUE, &err);
+ g_object_unref (bind_addr);
+ if (err != NULL)
+ goto bind_error;
+ } else {
+ /* create sender sockets if none available */
+ if ((sink->used_socket = g_socket_new (G_SOCKET_FAMILY_IPV4,
+ G_SOCKET_TYPE_DATAGRAM, G_SOCKET_PROTOCOL_UDP, &err)) == NULL)
+ goto no_socket;
+
+ bind_iaddr = g_inet_address_new_any (G_SOCKET_FAMILY_IPV4);
+ bind_addr = g_inet_socket_address_new (bind_iaddr, sink->bind_port);
+ g_socket_bind (sink->used_socket, bind_addr, TRUE, &err);
+ g_object_unref (bind_addr);
+ g_object_unref (bind_iaddr);
+ if (err != NULL)
+ goto bind_error;
+
+ if ((sink->used_socket_v6 = g_socket_new (G_SOCKET_FAMILY_IPV6,
+ G_SOCKET_TYPE_DATAGRAM, G_SOCKET_PROTOCOL_UDP,
+ &err)) == NULL) {
+ GST_INFO_OBJECT (sink, "Failed to create IPv6 socket: %s",
+ err->message);
+ g_clear_error (&err);
+ } else {
+ bind_iaddr = g_inet_address_new_any (G_SOCKET_FAMILY_IPV6);
+ bind_addr = g_inet_socket_address_new (bind_iaddr, sink->bind_port);
+ g_socket_bind (sink->used_socket_v6, bind_addr, TRUE, &err);
+ g_object_unref (bind_addr);
+ g_object_unref (bind_iaddr);
+ if (err != NULL)
+ goto bind_error;
+ }
+ }
+ }
+#ifdef SO_SNDBUF
+ {
+ gint sndsize;
+ GError *opt_err = NULL;
+
+ if (sink->buffer_size != 0) {
+ sndsize = sink->buffer_size;
+
+ GST_DEBUG_OBJECT (sink, "setting udp buffer of %d bytes", sndsize);
+ /* set buffer size, Note that on Linux this is typically limited to a
+ * maximum of around 100K. Also a minimum of 128 bytes is required on
+ * Linux. */
+
+ if (sink->used_socket) {
+ if (!g_socket_set_option (sink->used_socket, SOL_SOCKET, SO_SNDBUF,
+ sndsize, &opt_err)) {
+ GST_ELEMENT_WARNING (sink, RESOURCE, SETTINGS, (NULL),
+ ("Could not create a buffer of requested %d bytes (%s)",
+ sndsize, opt_err->message));
+ g_clear_error (&opt_err);
+ }
+ }
+
+ if (sink->used_socket_v6) {
+ if (!g_socket_set_option (sink->used_socket_v6, SOL_SOCKET, SO_SNDBUF,
+ sndsize, &opt_err)) {
+ GST_ELEMENT_WARNING (sink, RESOURCE, SETTINGS, (NULL),
+ ("Could not create a buffer of requested %d bytes (%s)",
+ sndsize, opt_err->message));
+ g_clear_error (&opt_err);
+ }
+ }
+ }
+
+ /* read the value of the receive buffer. Note that on linux this returns 2x the
+ * value we set because the kernel allocates extra memory for metadata.
+ * The default on Linux is about 100K (which is about 50K without metadata) */
+ if (sink->used_socket) {
+ if (g_socket_get_option (sink->used_socket, SOL_SOCKET, SO_SNDBUF,
+ &sndsize, NULL)) {
+ GST_DEBUG_OBJECT (sink, "have UDP buffer of %d bytes", sndsize);
+ } else {
+ GST_DEBUG_OBJECT (sink, "could not get UDP buffer size");
+ }
+ }
+
+ if (sink->used_socket_v6) {
+ if (g_socket_get_option (sink->used_socket_v6, SOL_SOCKET, SO_SNDBUF,
+ &sndsize, NULL)) {
+ GST_DEBUG_OBJECT (sink, "have UDPv6 buffer of %d bytes", sndsize);
+ } else {
+ GST_DEBUG_OBJECT (sink, "could not get UDPv6 buffer size");
+ }
+ }
+ }
+#endif
+
+#ifdef SO_BINDTODEVICE
+ if (sink->multi_iface) {
+ if (sink->used_socket) {
+ if (setsockopt (g_socket_get_fd (sink->used_socket), SOL_SOCKET,
+ SO_BINDTODEVICE, sink->multi_iface,
+ strlen (sink->multi_iface)) < 0)
+ GST_WARNING_OBJECT (sink, "setsockopt SO_BINDTODEVICE failed: %s",
+ strerror (errno));
+ }
+ if (sink->used_socket_v6) {
+ if (setsockopt (g_socket_get_fd (sink->used_socket_v6), SOL_SOCKET,
+ SO_BINDTODEVICE, sink->multi_iface,
+ strlen (sink->multi_iface)) < 0)
+ GST_WARNING_OBJECT (sink, "setsockopt SO_BINDTODEVICE failed (v6): %s",
+ strerror (errno));
+ }
+ }
+#endif
+
+ if (sink->used_socket)
+ g_socket_set_broadcast (sink->used_socket, TRUE);
+ if (sink->used_socket_v6)
+ g_socket_set_broadcast (sink->used_socket_v6, TRUE);
+
+ sink->bytes_to_serve = 0;
+ sink->bytes_served = 0;
+
+ gst_multiudpsink_setup_qos_dscp (sink, sink->used_socket);
+ gst_multiudpsink_setup_qos_dscp (sink, sink->used_socket_v6);
+
+ /* look for multicast clients and join multicast groups appropriately
+ set also ttl and multicast loopback delivery appropriately */
+ for (clients = sink->clients; clients; clients = g_list_next (clients)) {
+ client = (GstUDPClient *) clients->data;
+
+ if (!gst_multiudpsink_configure_client (sink, client))
+ return FALSE;
+ }
+ return TRUE;
+
+ /* ERRORS */
+no_socket:
+ {
+ GST_ELEMENT_ERROR (sink, RESOURCE, FAILED, (NULL),
+ ("Could not create socket: %s", err->message));
+ g_clear_error (&err);
+ return FALSE;
+ }
+bind_error:
+ {
+ GST_ELEMENT_ERROR (sink, RESOURCE, FAILED, (NULL),
+ ("Failed to bind socket: %s", err->message));
+ g_clear_error (&err);
+ return FALSE;
+ }
+name_resolve:
+ {
+ GST_ELEMENT_ERROR (sink, RESOURCE, FAILED, (NULL),
+ ("Failed to resolve bind address %s: %s", sink->bind_address,
+ err->message));
+ g_clear_error (&err);
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_multiudpsink_stop (GstBaseSink * bsink)
+{
+ GstMultiUDPSink *udpsink;
+
+ udpsink = GST_MULTIUDPSINK (bsink);
+
+ if (udpsink->used_socket) {
+ if (udpsink->close_socket || !udpsink->external_socket) {
+ GError *err = NULL;
+
+ if (!g_socket_close (udpsink->used_socket, &err)) {
+ GST_ERROR_OBJECT (udpsink, "Failed to close socket: %s", err->message);
+ g_clear_error (&err);
+ }
+ }
+
+ g_object_unref (udpsink->used_socket);
+ udpsink->used_socket = NULL;
+ }
+
+ if (udpsink->used_socket_v6) {
+ if (udpsink->close_socket || !udpsink->external_socket) {
+ GError *err = NULL;
+
+ if (!g_socket_close (udpsink->used_socket_v6, &err)) {
+ GST_ERROR_OBJECT (udpsink, "Failed to close socket: %s", err->message);
+ g_clear_error (&err);
+ }
+ }
+
+ g_object_unref (udpsink->used_socket_v6);
+ udpsink->used_socket_v6 = NULL;
+ }
+
+ return TRUE;
+}
+
+static gint
+gst_udp_client_compare_socket_family (GstUDPClient * a, GstUDPClient * b)
+{
+ GSocketFamily fa = g_socket_address_get_family (a->addr);
+ GSocketFamily fb = g_socket_address_get_family (b->addr);
+
+ if (fa == fb)
+ return 0;
+
+ /* a should go before b */
+ if (fa == G_SOCKET_FAMILY_IPV4 && fb == G_SOCKET_FAMILY_IPV6)
+ return -1;
+
+ /* b should go before a */
+ return 1;
+}
+
+static void
+gst_multiudpsink_add_internal (GstMultiUDPSink * sink, const gchar * host,
+ gint port, gboolean lock)
+{
+ GSocketFamily family;
+ GstUDPClient *client;
+ GstUDPClient udpclient;
+ GList *find;
+
+ udpclient.host = (gchar *) host;
+ udpclient.port = port;
+
+ GST_DEBUG_OBJECT (sink, "adding client on host %s, port %d", host, port);
+
+ if (lock)
+ g_mutex_lock (&sink->client_lock);
+
+ find = g_list_find_custom (sink->clients, &udpclient,
+ (GCompareFunc) client_compare);
+
+ if (!find) {
+ find = g_list_find_custom (sink->clients_to_be_removed, &udpclient,
+ (GCompareFunc) client_compare);
+ if (find)
+ gst_udp_client_ref (find->data);
+ }
+
+ if (find) {
+ client = (GstUDPClient *) find->data;
+
+ family = g_socket_address_get_family (client->addr);
+
+ GST_DEBUG_OBJECT (sink, "found %d existing clients with host %s, port %d",
+ client->add_count, host, port);
+ } else {
+ client = gst_udp_client_new (sink, host, port);
+ if (!client)
+ goto error;
+
+ family = g_socket_address_get_family (client->addr);
+
+ client->connect_time = g_get_real_time () * GST_USECOND;
+
+ if (sink->used_socket)
+ gst_multiudpsink_configure_client (sink, client);
+
+ GST_DEBUG_OBJECT (sink, "add client with host %s, port %d", host, port);
+
+ /* keep IPv4 clients at the beginning, and IPv6 at the end, we can make
+ * use of this in gst_multiudpsink_render_buffers() */
+ sink->clients = g_list_insert_sorted (sink->clients, client,
+ (GCompareFunc) gst_udp_client_compare_socket_family);
+
+ if (family == G_SOCKET_FAMILY_IPV4)
+ ++sink->num_v4_unique;
+ else
+ ++sink->num_v6_unique;
+ }
+
+ ++client->add_count;
+
+ if (family == G_SOCKET_FAMILY_IPV4)
+ ++sink->num_v4_all;
+ else
+ ++sink->num_v6_all;
+
+ if (lock)
+ g_mutex_unlock (&sink->client_lock);
+
+ g_signal_emit (G_OBJECT (sink),
+ gst_multiudpsink_signals[SIGNAL_CLIENT_ADDED], 0, host, port);
+
+ GST_DEBUG_OBJECT (sink, "added client on host %s, port %d", host, port);
+ return;
+
+ /* ERRORS */
+error:
+ {
+ GST_DEBUG_OBJECT (sink, "did not add client on host %s, port %d", host,
+ port);
+ if (lock)
+ g_mutex_unlock (&sink->client_lock);
+ return;
+ }
+}
+
+void
+gst_multiudpsink_add (GstMultiUDPSink * sink, const gchar * host, gint port)
+{
+ gst_multiudpsink_add_internal (sink, host, port, TRUE);
+}
+
+void
+gst_multiudpsink_remove (GstMultiUDPSink * sink, const gchar * host, gint port)
+{
+ GSocketFamily family;
+ GList *find;
+ GstUDPClient udpclient;
+ GstUDPClient *client;
+
+ udpclient.host = (gchar *) host;
+ udpclient.port = port;
+
+ g_mutex_lock (&sink->client_lock);
+ find = g_list_find_custom (sink->clients, &udpclient,
+ (GCompareFunc) client_compare);
+ if (!find)
+ goto not_found;
+
+ client = (GstUDPClient *) find->data;
+
+ GST_DEBUG_OBJECT (sink, "found %d clients with host %s, port %d",
+ client->add_count, host, port);
+
+ --client->add_count;
+
+ family = g_socket_address_get_family (client->addr);
+ if (family == G_SOCKET_FAMILY_IPV4)
+ --sink->num_v4_all;
+ else
+ --sink->num_v6_all;
+
+ if (client->add_count == 0) {
+ GInetSocketAddress *saddr = G_INET_SOCKET_ADDRESS (client->addr);
+ GInetAddress *addr = g_inet_socket_address_get_address (saddr);
+ GSocket *socket;
+
+ /* Select socket to send from for this address */
+ if (family == G_SOCKET_FAMILY_IPV6 || !sink->used_socket)
+ socket = sink->used_socket_v6;
+ else
+ socket = sink->used_socket;
+
+ GST_DEBUG_OBJECT (sink, "remove client with host %s, port %d", host, port);
+
+ client->disconnect_time = g_get_real_time () * GST_USECOND;
+
+ if (socket && sink->auto_multicast
+ && g_inet_address_get_is_multicast (addr)) {
+ GError *err = NULL;
+
+ if (!g_socket_leave_multicast_group (socket, addr, FALSE,
+ sink->multi_iface, &err)) {
+ GST_DEBUG_OBJECT (sink, "Failed to leave multicast group: %s",
+ err->message);
+ g_clear_error (&err);
+ }
+ }
+
+ if (family == G_SOCKET_FAMILY_IPV4)
+ --sink->num_v4_unique;
+ else
+ --sink->num_v6_unique;
+
+ /* Keep state consistent for streaming thread, so remove from client list,
+ * but keep it around until after the signal has been emitted, in case a
+ * callback wants to get stats for that client or so */
+ sink->clients = g_list_delete_link (sink->clients, find);
+
+ sink->clients_to_be_removed =
+ g_list_prepend (sink->clients_to_be_removed, client);
+
+ /* Unlock to emit signal before we delete the actual client */
+ g_mutex_unlock (&sink->client_lock);
+ g_signal_emit (G_OBJECT (sink),
+ gst_multiudpsink_signals[SIGNAL_CLIENT_REMOVED], 0, host, port);
+ g_mutex_lock (&sink->client_lock);
+
+ sink->clients_to_be_removed =
+ g_list_remove (sink->clients_to_be_removed, client);
+
+ gst_udp_client_unref (client);
+ }
+ g_mutex_unlock (&sink->client_lock);
+
+ return;
+
+ /* ERRORS */
+not_found:
+ {
+ g_mutex_unlock (&sink->client_lock);
+ GST_WARNING_OBJECT (sink, "client at host %s, port %d not found",
+ host, port);
+ return;
+ }
+}
+
+static void
+gst_multiudpsink_clear_internal (GstMultiUDPSink * sink, gboolean lock)
+{
+ GST_DEBUG_OBJECT (sink, "clearing");
+ /* we only need to remove the client structure, there is no additional
+ * socket or anything to free for UDP */
+ if (lock)
+ g_mutex_lock (&sink->client_lock);
+ g_list_foreach (sink->clients, (GFunc) gst_udp_client_unref, sink);
+ g_list_free (sink->clients);
+ sink->clients = NULL;
+ sink->num_v4_unique = 0;
+ sink->num_v4_all = 0;
+ sink->num_v6_unique = 0;
+ sink->num_v6_all = 0;
+ if (lock)
+ g_mutex_unlock (&sink->client_lock);
+}
+
+void
+gst_multiudpsink_clear (GstMultiUDPSink * sink)
+{
+ gst_multiudpsink_clear_internal (sink, TRUE);
+}
+
+GstStructure *
+gst_multiudpsink_get_stats (GstMultiUDPSink * sink, const gchar * host,
+ gint port)
+{
+ GstUDPClient *client;
+ GstStructure *result = NULL;
+ GstUDPClient udpclient;
+ GList *find;
+
+ udpclient.host = (gchar *) host;
+ udpclient.port = port;
+
+ g_mutex_lock (&sink->client_lock);
+
+ find = g_list_find_custom (sink->clients, &udpclient,
+ (GCompareFunc) client_compare);
+
+ if (!find)
+ find = g_list_find_custom (sink->clients_to_be_removed, &udpclient,
+ (GCompareFunc) client_compare);
+
+ if (!find)
+ goto not_found;
+
+ GST_DEBUG_OBJECT (sink, "stats for client with host %s, port %d", host, port);
+
+ client = (GstUDPClient *) find->data;
+
+ result = gst_structure_new_empty ("multiudpsink-stats");
+
+ gst_structure_set (result,
+ "bytes-sent", G_TYPE_UINT64, client->bytes_sent,
+ "packets-sent", G_TYPE_UINT64, client->packets_sent,
+ "connect-time", G_TYPE_UINT64, client->connect_time,
+ "disconnect-time", G_TYPE_UINT64, client->disconnect_time, NULL);
+
+ g_mutex_unlock (&sink->client_lock);
+
+ return result;
+
+ /* ERRORS */
+not_found:
+ {
+ g_mutex_unlock (&sink->client_lock);
+ GST_WARNING_OBJECT (sink, "client with host %s, port %d not found",
+ host, port);
+ /* Apparently (see comment in gstmultifdsink.c) returning NULL from here may
+ * confuse/break python bindings */
+ return gst_structure_new_empty ("multiudpsink-stats");
+ }
+}
+
+static gboolean
+gst_multiudpsink_unlock (GstBaseSink * bsink)
+{
+ GstMultiUDPSink *sink;
+
+ sink = GST_MULTIUDPSINK (bsink);
+
+ g_cancellable_cancel (sink->cancellable);
+
+ return TRUE;
+}
+
+static gboolean
+gst_multiudpsink_unlock_stop (GstBaseSink * bsink)
+{
+ GstMultiUDPSink *sink;
+
+ sink = GST_MULTIUDPSINK (bsink);
+
+ gst_multiudpsink_free_cancellable (sink);
+ gst_multiudpsink_create_cancellable (sink);
+
+ return TRUE;
+}
diff --git a/gst/udp/gstmultiudpsink.h b/gst/udp/gstmultiudpsink.h
new file mode 100644
index 0000000000..6310724ff9
--- /dev/null
+++ b/gst/udp/gstmultiudpsink.h
@@ -0,0 +1,130 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim@fluendo.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_MULTIUDPSINK_H__
+#define __GST_MULTIUDPSINK_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstbasesink.h>
+#include <gio/gio.h>
+
+G_BEGIN_DECLS
+
+#include "gstudpnetutils.h"
+
+#define GST_TYPE_MULTIUDPSINK (gst_multiudpsink_get_type())
+#define GST_MULTIUDPSINK(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MULTIUDPSINK,GstMultiUDPSink))
+#define GST_MULTIUDPSINK_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MULTIUDPSINK,GstMultiUDPSinkClass))
+#define GST_IS_MULTIUDPSINK(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MULTIUDPSINK))
+#define GST_IS_MULTIUDPSINK_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MULTIUDPSINK))
+#define GST_MULTIUDPSINK_CAST(obj) ((GstMultiUDPSink*)(obj))
+
+typedef struct _GstMultiUDPSink GstMultiUDPSink;
+typedef struct _GstMultiUDPSinkClass GstMultiUDPSinkClass;
+
+typedef GOutputMessage GstOutputMessage;
+
+typedef struct {
+ gint ref_count; /* for memory management */
+ gint add_count; /* how often this address has been added */
+
+ GSocketAddress *addr;
+ gchar *host;
+ gint port;
+
+ /* Per-client stats */
+ guint64 bytes_sent;
+ guint64 packets_sent;
+ guint64 connect_time;
+ guint64 disconnect_time;
+} GstUDPClient;
+
+/* sends udp packets to multiple host/port pairs.
+ */
+struct _GstMultiUDPSink {
+ GstBaseSink parent;
+
+ GSocket *used_socket, *used_socket_v6;
+
+ GCancellable *cancellable;
+ gboolean made_cancel_fd;
+
+ /* client management */
+ GMutex client_lock;
+ GList *clients;
+ guint num_v4_unique; /* number IPv4 clients (excluding duplicates) */
+ guint num_v4_all; /* number IPv4 clients (including duplicates) */
+ guint num_v6_unique; /* number IPv6 clients (excluding duplicates) */
+ guint num_v6_all; /* number IPv6 clients (including duplicates) */
+ GList *clients_to_be_removed;
+
+ /* pre-allocated scrap space for render function */
+ GOutputVector *vecs;
+ guint n_vecs;
+ GstMapInfo *maps;
+ guint n_maps;
+ GstOutputMessage *messages;
+ guint n_messages;
+
+ /* properties */
+ guint64 bytes_to_serve;
+ guint64 bytes_served;
+ GSocket *socket, *socket_v6;
+ gboolean close_socket;
+
+ gboolean external_socket;
+
+ gboolean auto_multicast;
+ gchar *multi_iface;
+ gint ttl;
+ gint ttl_mc;
+ gboolean loop;
+ gboolean force_ipv4;
+ gint qos_dscp;
+
+ gboolean send_duplicates;
+ gint buffer_size;
+ gchar *bind_address;
+ gint bind_port;
+};
+
+struct _GstMultiUDPSinkClass {
+ GstBaseSinkClass parent_class;
+
+ /* element methods */
+ void (*add) (GstMultiUDPSink *sink, const gchar *host, gint port);
+ void (*remove) (GstMultiUDPSink *sink, const gchar *host, gint port);
+ void (*clear) (GstMultiUDPSink *sink);
+ GstStructure* (*get_stats) (GstMultiUDPSink *sink, const gchar *host, gint port);
+
+ /* signals */
+ void (*client_added) (GstElement *element, const gchar *host, gint port);
+ void (*client_removed) (GstElement *element, const gchar *host, gint port);
+};
+
+GType gst_multiudpsink_get_type(void);
+
+void gst_multiudpsink_add (GstMultiUDPSink *sink, const gchar *host, gint port);
+void gst_multiudpsink_remove (GstMultiUDPSink *sink, const gchar *host, gint port);
+void gst_multiudpsink_clear (GstMultiUDPSink *sink);
+GstStructure* gst_multiudpsink_get_stats (GstMultiUDPSink *sink, const gchar *host, gint port);
+
+G_END_DECLS
+
+#endif /* __GST_MULTIUDPSINK_H__ */
diff --git a/gst/udp/gstudp.c b/gst/udp/gstudp.c
new file mode 100644
index 0000000000..d5a21f22d5
--- /dev/null
+++ b/gst/udp/gstudp.c
@@ -0,0 +1,44 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstudpelements.h"
+
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (udpsink, plugin);
+ ret |= GST_ELEMENT_REGISTER (multiudpsink, plugin);
+ ret |= GST_ELEMENT_REGISTER (dynudpsink, plugin);
+ ret |= GST_ELEMENT_REGISTER (udpsrc, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ udp,
+ "transfer data via UDP",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/udp/gstudpelement.c b/gst/udp/gstudpelement.c
new file mode 100644
index 0000000000..96d0ae4525
--- /dev/null
+++ b/gst/udp/gstudpelement.c
@@ -0,0 +1,47 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) 2020 Huawei Technologies Co., Ltd.
+ * @Author: Julian Bouzas <julian.bouzas@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/net/gstnetaddressmeta.h>
+
+#include "gstudpelements.h"
+
+void
+udp_element_init (GstPlugin * plugin)
+{
+ static gsize res = FALSE;
+ if (g_once_init_enter (&res)) {
+ /* not using GLIB_CHECK_VERSION on purpose, run-time version matters */
+ if (glib_check_version (2, 36, 0) != NULL) {
+ GST_WARNING ("Your GLib version is < 2.36, UDP multicasting support may "
+ "be broken, see https://bugzilla.gnome.org/show_bug.cgi?id=688378");
+ }
+
+ /* register info of the netaddress metadata so that we can use it from
+ * multiple threads right away. Note that the plugin loading is always
+ * serialized */
+ gst_net_address_meta_get_info ();
+ g_once_init_leave (&res, TRUE);
+ }
+}
diff --git a/gst/udp/gstudpelements.h b/gst/udp/gstudpelements.h
new file mode 100644
index 0000000000..e9b116c1be
--- /dev/null
+++ b/gst/udp/gstudpelements.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) 2020 Huawei Technologies Co., Ltd.
+ * @Author: Julian Bouzas <julian.bouzas@collabora.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_UDP_ELEMENTS_H__
+#define __GST_UDP_ELEMENTS_H__
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+void udp_element_init (GstPlugin * plugin);
+
+GST_ELEMENT_REGISTER_DECLARE (dynudpsink);
+GST_ELEMENT_REGISTER_DECLARE (multiudpsink);
+GST_ELEMENT_REGISTER_DECLARE (udpsink);
+GST_ELEMENT_REGISTER_DECLARE (udpsrc);
+
+G_END_DECLS
+
+#endif /* __GST_UDP_ELEMENTS_H__ */
diff --git a/gst/udp/gstudpnetutils.c b/gst/udp/gstudpnetutils.c
new file mode 100644
index 0000000000..b4dc5ef018
--- /dev/null
+++ b/gst/udp/gstudpnetutils.c
@@ -0,0 +1,108 @@
+/* GStreamer UDP network utility functions
+ * Copyright (C) 2006 Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) 2006 Joni Valtanen <joni.valtanen@movial.fi>
+ * Copyright (C) 2009 Jarkko Palviainen <jarkko.palviainen@sesca.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <string.h>
+
+#include "gstudpnetutils.h"
+
+gboolean
+gst_udp_parse_uri (const gchar * uristr, gchar ** host, guint16 * port)
+{
+ gchar *protocol, *location_start;
+ gchar *location, *location_end;
+ gchar *colptr;
+
+ /* consider no protocol to be udp:// */
+ protocol = gst_uri_get_protocol (uristr);
+ if (!protocol)
+ goto no_protocol;
+ if (strcmp (protocol, "udp") != 0)
+ goto wrong_protocol;
+ g_free (protocol);
+
+ location_start = gst_uri_get_location (uristr);
+ if (!location_start)
+ return FALSE;
+
+ GST_DEBUG ("got location '%s'", location_start);
+
+ /* VLC compatibility, strip everything before the @ sign. VLC uses that as the
+ * remote address. */
+ location = g_strstr_len (location_start, -1, "@");
+ if (location == NULL)
+ location = location_start;
+ else
+ location += 1;
+
+ if (location[0] == '[') {
+ GST_DEBUG ("parse IPV6 address '%s'", location);
+ location_end = strchr (location, ']');
+ if (location_end == NULL)
+ goto wrong_address;
+
+ *host = g_strndup (location + 1, location_end - location - 1);
+ colptr = strrchr (location_end, ':');
+ } else {
+ GST_DEBUG ("parse IPV4 address '%s'", location);
+ colptr = strrchr (location, ':');
+
+ if (colptr != NULL) {
+ *host = g_strndup (location, colptr - location);
+ } else {
+ *host = g_strdup (location);
+ }
+ }
+ GST_DEBUG ("host set to '%s'", *host);
+
+ if (colptr != NULL) {
+ *port = g_ascii_strtoll (colptr + 1, NULL, 10);
+ } else {
+ *port = 0;
+ }
+ g_free (location_start);
+
+ return TRUE;
+
+ /* ERRORS */
+no_protocol:
+ {
+ GST_ERROR ("error parsing uri %s: no protocol", uristr);
+ return FALSE;
+ }
+wrong_protocol:
+ {
+ GST_ERROR ("error parsing uri %s: wrong protocol (%s != udp)", uristr,
+ protocol);
+ g_free (protocol);
+ return FALSE;
+ }
+wrong_address:
+ {
+ GST_ERROR ("error parsing uri %s", uristr);
+ g_free (location);
+ return FALSE;
+ }
+}
diff --git a/gst/udp/gstudpnetutils.h b/gst/udp/gstudpnetutils.h
new file mode 100644
index 0000000000..a62be562cb
--- /dev/null
+++ b/gst/udp/gstudpnetutils.h
@@ -0,0 +1,29 @@
+/* GStreamer UDP network utility functions
+ * Copyright (C) 2006 Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) 2006 Joni Valtanen <joni.valtanen@movial.fi>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include <gst/gst.h>
+
+#ifndef __GST_UDP_NET_UTILS_H__
+#define __GST_UDP_NET_UTILS_H__
+
+gboolean gst_udp_parse_uri (const gchar *uristr, gchar **host, guint16 *port);
+
+#endif /* __GST_UDP_NET_UTILS_H__*/
+
diff --git a/gst/udp/gstudpsink.c b/gst/udp/gstudpsink.c
new file mode 100644
index 0000000000..e4320ad834
--- /dev/null
+++ b/gst/udp/gstudpsink.c
@@ -0,0 +1,262 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim@fluendo.com>
+ * Copyright (C) <2012> Collabora Ltd.
+ * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-udpsink
+ * @title: udpsink
+ * @see_also: udpsrc, multifdsink
+ *
+ * udpsink is a network sink that sends UDP packets to the network.
+ * It can be combined with RTP payloaders to implement RTP streaming.
+ *
+ * ## Examples
+ * |[
+ * gst-launch-1.0 -v audiotestsrc ! udpsink
+ * ]|
+ *
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include "gstudpelements.h"
+#include "gstudpsink.h"
+
+#define UDP_DEFAULT_HOST "localhost"
+#define UDP_DEFAULT_PORT 5004
+
+/* UDPSink signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_HOST,
+ PROP_PORT,
+ PROP_URI,
+ /* FILL ME */
+};
+
+static void gst_udpsink_finalize (GstUDPSink * udpsink);
+
+static void gst_udpsink_uri_handler_init (gpointer g_iface,
+ gpointer iface_data);
+
+static void gst_udpsink_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_udpsink_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+/*static guint gst_udpsink_signals[LAST_SIGNAL] = { 0 }; */
+#define gst_udpsink_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstUDPSink, gst_udpsink, GST_TYPE_MULTIUDPSINK,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER, gst_udpsink_uri_handler_init));
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (udpsink, "udpsink", GST_RANK_NONE,
+ GST_TYPE_UDPSINK, udp_element_init (plugin));
+
+static void
+gst_udpsink_class_init (GstUDPSinkClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->set_property = gst_udpsink_set_property;
+ gobject_class->get_property = gst_udpsink_get_property;
+
+ gobject_class->finalize = (GObjectFinalizeFunc) gst_udpsink_finalize;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_HOST,
+ g_param_spec_string ("host", "host",
+ "The host/IP/Multicast group to send the packets to",
+ UDP_DEFAULT_HOST, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_PORT,
+ g_param_spec_int ("port", "port", "The port to send the packets to",
+ 0, 65535, UDP_DEFAULT_PORT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (gstelement_class, "UDP packet sender",
+ "Sink/Network",
+ "Send data over the network via UDP", "Wim Taymans <wim@fluendo.com>");
+}
+
+static void
+gst_udpsink_init (GstUDPSink * udpsink)
+{
+ udpsink->host = g_strdup (UDP_DEFAULT_HOST);
+ udpsink->port = UDP_DEFAULT_PORT;
+ udpsink->uri = g_strdup_printf ("udp://%s:%d", udpsink->host, udpsink->port);
+
+ gst_multiudpsink_add (GST_MULTIUDPSINK (udpsink), udpsink->host,
+ udpsink->port);
+}
+
+static void
+gst_udpsink_finalize (GstUDPSink * udpsink)
+{
+ g_free (udpsink->host);
+ udpsink->host = NULL;
+
+ g_free (udpsink->uri);
+ udpsink->uri = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize ((GObject *) udpsink);
+}
+
+static gboolean
+gst_udpsink_set_uri (GstUDPSink * sink, const gchar * uri, GError ** error)
+{
+ gchar *host;
+ guint16 port;
+
+ gst_multiudpsink_remove (GST_MULTIUDPSINK (sink), sink->host, sink->port);
+
+ if (!gst_udp_parse_uri (uri, &host, &port))
+ goto wrong_uri;
+
+ g_free (sink->host);
+ sink->host = host;
+ sink->port = port;
+
+ g_free (sink->uri);
+ sink->uri = g_strdup (uri);
+
+ gst_multiudpsink_add (GST_MULTIUDPSINK (sink), sink->host, sink->port);
+
+ return TRUE;
+
+ /* ERRORS */
+wrong_uri:
+ {
+ GST_ELEMENT_ERROR (sink, RESOURCE, READ, (NULL),
+ ("error parsing uri %s", uri));
+ g_set_error_literal (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
+ "Could not parse UDP URI");
+ return FALSE;
+ }
+}
+
+static void
+gst_udpsink_set_property (GObject * object, guint prop_id, const GValue * value,
+ GParamSpec * pspec)
+{
+ GstUDPSink *udpsink;
+
+ udpsink = GST_UDPSINK (object);
+
+ /* remove old host */
+ gst_multiudpsink_remove (GST_MULTIUDPSINK (udpsink),
+ udpsink->host, udpsink->port);
+
+ switch (prop_id) {
+ case PROP_HOST:
+ {
+ const gchar *host;
+
+ host = g_value_get_string (value);
+ g_free (udpsink->host);
+ udpsink->host = g_strdup (host);
+ g_free (udpsink->uri);
+ udpsink->uri =
+ g_strdup_printf ("udp://%s:%d", udpsink->host, udpsink->port);
+ break;
+ }
+ case PROP_PORT:
+ udpsink->port = g_value_get_int (value);
+ g_free (udpsink->uri);
+ udpsink->uri =
+ g_strdup_printf ("udp://%s:%d", udpsink->host, udpsink->port);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ /* add new host */
+ gst_multiudpsink_add (GST_MULTIUDPSINK (udpsink),
+ udpsink->host, udpsink->port);
+}
+
+static void
+gst_udpsink_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstUDPSink *udpsink;
+
+ udpsink = GST_UDPSINK (object);
+
+ switch (prop_id) {
+ case PROP_HOST:
+ g_value_set_string (value, udpsink->host);
+ break;
+ case PROP_PORT:
+ g_value_set_int (value, udpsink->port);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+/*** GSTURIHANDLER INTERFACE *************************************************/
+
+static GstURIType
+gst_udpsink_uri_get_type (GType type)
+{
+ return GST_URI_SINK;
+}
+
+static const gchar *const *
+gst_udpsink_uri_get_protocols (GType type)
+{
+ static const gchar *protocols[] = { "udp", NULL };
+
+ return protocols;
+}
+
+static gchar *
+gst_udpsink_uri_get_uri (GstURIHandler * handler)
+{
+ GstUDPSink *sink = GST_UDPSINK (handler);
+
+ return g_strdup (sink->uri);
+}
+
+static gboolean
+gst_udpsink_uri_set_uri (GstURIHandler * handler, const gchar * uri,
+ GError ** error)
+{
+ return gst_udpsink_set_uri (GST_UDPSINK (handler), uri, error);
+}
+
+static void
+gst_udpsink_uri_handler_init (gpointer g_iface, gpointer iface_data)
+{
+ GstURIHandlerInterface *iface = (GstURIHandlerInterface *) g_iface;
+
+ iface->get_type = gst_udpsink_uri_get_type;
+ iface->get_protocols = gst_udpsink_uri_get_protocols;
+ iface->get_uri = gst_udpsink_uri_get_uri;
+ iface->set_uri = gst_udpsink_uri_set_uri;
+}
diff --git a/gst/udp/gstudpsink.h b/gst/udp/gstudpsink.h
new file mode 100644
index 0000000000..a7b5b343b1
--- /dev/null
+++ b/gst/udp/gstudpsink.h
@@ -0,0 +1,57 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_UDPSINK_H__
+#define __GST_UDPSINK_H__
+
+#include <gst/gst.h>
+#include "gstmultiudpsink.h"
+
+G_BEGIN_DECLS
+
+#include "gstudpnetutils.h"
+
+#define GST_TYPE_UDPSINK (gst_udpsink_get_type())
+#define GST_UDPSINK(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_UDPSINK,GstUDPSink))
+#define GST_UDPSINK_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_UDPSINK,GstUDPSinkClass))
+#define GST_IS_UDPSINK(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_UDPSINK))
+#define GST_IS_UDPSINK_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_UDPSINK))
+
+typedef struct _GstUDPSink GstUDPSink;
+typedef struct _GstUDPSinkClass GstUDPSinkClass;
+
+struct _GstUDPSink {
+ GstMultiUDPSink parent;
+
+ gchar *host;
+ guint16 port;
+
+ gchar *uri;
+};
+
+struct _GstUDPSinkClass {
+ GstMultiUDPSinkClass parent_class;
+};
+
+GType gst_udpsink_get_type(void);
+
+G_END_DECLS
+
+#endif /* __GST_UDPSINK_H__ */
diff --git a/gst/udp/gstudpsrc.c b/gst/udp/gstudpsrc.c
new file mode 100644
index 0000000000..f957007468
--- /dev/null
+++ b/gst/udp/gstudpsrc.c
@@ -0,0 +1,1995 @@
+/* GStreamer
+ * Copyright (C) <2005> Wim Taymans <wim@fluendo.com>
+ * Copyright (C) <2005> Nokia Corporation <kai.vehmanen@nokia.com>
+ * Copyright (C) <2012> Collabora Ltd.
+ * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * Copyright (C) 2014 Tim-Philipp Müller <tim@centricular.com>
+ * Copyright (C) 2014 Centricular Ltd
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-udpsrc
+ * @title: udpsrc
+ * @see_also: udpsink, multifdsink
+ *
+ * udpsrc is a network source that reads UDP packets from the network.
+ * It can be combined with RTP depayloaders to implement RTP streaming.
+ *
+ * The udpsrc element supports automatic port allocation by setting the
+ * #GstUDPSrc:port property to 0. After setting the udpsrc to PAUSED, the
+ * allocated port can be obtained by reading the port property.
+ *
+ * udpsrc can read from multicast groups by setting the #GstUDPSrc:multicast-group
+ * property to the IP address of the multicast group.
+ *
+ * Alternatively one can provide a custom socket to udpsrc with the #GstUDPSrc:socket
+ * property, udpsrc will then not allocate a socket itself but use the provided
+ * one.
+ *
+ * The #GstUDPSrc:caps property is mainly used to give a type to the UDP packet
+ * so that they can be autoplugged in GStreamer pipelines. This is very useful
+ * for RTP implementations where the contents of the UDP packets is transferred
+ * out-of-bounds using SDP or other means.
+ *
+ * The #GstUDPSrc:buffer-size property is used to change the default kernel
+ * buffersizes used for receiving packets. The buffer size may be increased for
+ * high-volume connections, or may be decreased to limit the possible backlog of
+ * incoming data. The system places an absolute limit on these values, on Linux,
+ * for example, the default buffer size is typically 50K and can be increased to
+ * maximally 100K.
+ *
+ * The #GstUDPSrc:skip-first-bytes property is used to strip off an arbitrary
+ * number of bytes from the start of the raw udp packet and can be used to strip
+ * off proprietary header, for example.
+ *
+ * The udpsrc is always a live source. It does however not provide a #GstClock,
+ * this is left for downstream elements such as an RTP session manager or demuxer
+ * (such as an MPEG demuxer). As with all live sources, the captured buffers
+ * will have their timestamp set to the current running time of the pipeline.
+ *
+ * udpsrc implements a #GstURIHandler interface that handles udp://host:port
+ * type URIs.
+ *
+ * If the #GstUDPSrc:timeout property is set to a value bigger than 0, udpsrc
+ * will generate an element message named `GstUDPSrcTimeout`
+ * if no data was received in the given timeout.
+ *
+ * The message's structure contains one field:
+ *
+ * * #guint64 `timeout`: the timeout in microseconds that expired when waiting for data.
+ *
+ * The message is typically used to detect that no UDP arrives in the receiver
+ * because it is blocked by a firewall.
+ *
+ * A custom file descriptor can be configured with the
+ * #GstUDPSrc:socket property. The socket will be closed when setting
+ * the element to READY by default. This behaviour can be overridden
+ * with the #GstUDPSrc:close-socket property, in which case the
+ * application is responsible for closing the file descriptor.
+ *
+ * ## Examples
+ * |[
+ * gst-launch-1.0 -v udpsrc ! fakesink dump=1
+ * ]| A pipeline to read from the default port and dump the udp packets.
+ * To actually generate udp packets on the default port one can use the
+ * udpsink element. When running the following pipeline in another terminal, the
+ * above mentioned pipeline should dump data packets to the console.
+ * |[
+ * gst-launch-1.0 -v audiotestsrc ! udpsink
+ * ]|
+ * |[
+ * gst-launch-1.0 -v udpsrc port=0 ! fakesink
+ * ]| read udp packets from a free port.
+ *
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+/* Needed to get struct in6_pktinfo.
+ * Also all these have to be before glib.h is included as
+ * otherwise struct in6_pktinfo is not defined completely
+ * due to broken glibc headers */
+#define _GNU_SOURCE
+/* Needed for OSX/iOS to define the IPv6 variants */
+#define __APPLE_USE_RFC_3542
+#include <sys/types.h>
+#ifdef HAVE_SYS_SOCKET_H
+#include <sys/socket.h>
+#endif
+
+#include <string.h>
+#include "gstudpelements.h"
+#include "gstudpsrc.h"
+
+#include <gst/net/gstnetaddressmeta.h>
+
+#include <gio/gnetworking.h>
+
+/* Required for other parts of in_pktinfo / in6_pktinfo but only
+ * on non-Windows and can be included after glib.h */
+#ifndef G_PLATFORM_WIN32
+#include <netinet/ip.h>
+#endif
+
+/* Control messages for getting the destination address */
+#ifdef IP_PKTINFO
+GType gst_ip_pktinfo_message_get_type (void);
+
+#define GST_TYPE_IP_PKTINFO_MESSAGE (gst_ip_pktinfo_message_get_type ())
+#define GST_IP_PKTINFO_MESSAGE(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), GST_TYPE_IP_PKTINFO_MESSAGE, GstIPPktinfoMessage))
+#define GST_IP_PKTINFO_MESSAGE_CLASS(c) (G_TYPE_CHECK_CLASS_CAST ((c), GST_TYPE_IP_PKTINFO_MESSAGE, GstIPPktinfoMessageClass))
+#define GST_IS_IP_PKTINFO_MESSAGE(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), GST_TYPE_IP_PKTINFO_MESSAGE))
+#define GST_IS_IP_PKTINFO_MESSAGE_CLASS(c) (G_TYPE_CHECK_CLASS_TYPE ((c), GST_TYPE_IP_PKTINFO_MESSAGE))
+#define GST_IP_PKTINFO_MESSAGE_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), GST_TYPE_IP_PKTINFO_MESSAGE, GstIPPktinfoMessageClass))
+
+typedef struct _GstIPPktinfoMessage GstIPPktinfoMessage;
+typedef struct _GstIPPktinfoMessageClass GstIPPktinfoMessageClass;
+
+struct _GstIPPktinfoMessageClass
+{
+ GSocketControlMessageClass parent_class;
+
+};
+
+struct _GstIPPktinfoMessage
+{
+ GSocketControlMessage parent;
+
+ guint ifindex;
+#ifndef G_PLATFORM_WIN32
+#ifndef __NetBSD__
+ struct in_addr spec_dst;
+#endif
+#endif
+ struct in_addr addr;
+};
+
+G_DEFINE_TYPE (GstIPPktinfoMessage, gst_ip_pktinfo_message,
+ G_TYPE_SOCKET_CONTROL_MESSAGE);
+
+static gsize
+gst_ip_pktinfo_message_get_size (GSocketControlMessage * message)
+{
+ return sizeof (struct in_pktinfo);
+}
+
+static int
+gst_ip_pktinfo_message_get_level (GSocketControlMessage * message)
+{
+ return IPPROTO_IP;
+}
+
+static int
+gst_ip_pktinfo_message_get_msg_type (GSocketControlMessage * message)
+{
+ return IP_PKTINFO;
+}
+
+static GSocketControlMessage *
+gst_ip_pktinfo_message_deserialize (gint level,
+ gint type, gsize size, gpointer data)
+{
+ struct in_pktinfo *pktinfo;
+ GstIPPktinfoMessage *message;
+
+ if (level != IPPROTO_IP || type != IP_PKTINFO)
+ return NULL;
+
+ if (size < sizeof (struct in_pktinfo))
+ return NULL;
+
+ pktinfo = data;
+
+ message = g_object_new (GST_TYPE_IP_PKTINFO_MESSAGE, NULL);
+ message->ifindex = pktinfo->ipi_ifindex;
+#ifndef G_PLATFORM_WIN32
+#ifndef __NetBSD__
+ message->spec_dst = pktinfo->ipi_spec_dst;
+#endif
+#endif
+ message->addr = pktinfo->ipi_addr;
+
+ return G_SOCKET_CONTROL_MESSAGE (message);
+}
+
+static void
+gst_ip_pktinfo_message_init (GstIPPktinfoMessage * message)
+{
+}
+
+static void
+gst_ip_pktinfo_message_class_init (GstIPPktinfoMessageClass * class)
+{
+ GSocketControlMessageClass *scm_class;
+
+ scm_class = G_SOCKET_CONTROL_MESSAGE_CLASS (class);
+ scm_class->get_size = gst_ip_pktinfo_message_get_size;
+ scm_class->get_level = gst_ip_pktinfo_message_get_level;
+ scm_class->get_type = gst_ip_pktinfo_message_get_msg_type;
+ scm_class->deserialize = gst_ip_pktinfo_message_deserialize;
+}
+#endif
+
+#ifdef IPV6_PKTINFO
+GType gst_ipv6_pktinfo_message_get_type (void);
+
+#define GST_TYPE_IPV6_PKTINFO_MESSAGE (gst_ipv6_pktinfo_message_get_type ())
+#define GST_IPV6_PKTINFO_MESSAGE(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), GST_TYPE_IPV6_PKTINFO_MESSAGE, GstIPV6PktinfoMessage))
+#define GST_IPV6_PKTINFO_MESSAGE_CLASS(c) (G_TYPE_CHECK_CLASS_CAST ((c), GST_TYPE_IPV6_PKTINFO_MESSAGE, GstIPV6PktinfoMessageClass))
+#define GST_IS_IPV6_PKTINFO_MESSAGE(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), GST_TYPE_IPV6_PKTINFO_MESSAGE))
+#define GST_IS_IPV6_PKTINFO_MESSAGE_CLASS(c) (G_TYPE_CHECK_CLASS_TYPE ((c), GST_TYPE_IPV6_PKTINFO_MESSAGE))
+#define GST_IPV6_PKTINFO_MESSAGE_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), GST_TYPE_IPV6_PKTINFO_MESSAGE, GstIPV6PktinfoMessageClass))
+
+typedef struct _GstIPV6PktinfoMessage GstIPV6PktinfoMessage;
+typedef struct _GstIPV6PktinfoMessageClass GstIPV6PktinfoMessageClass;
+
+struct _GstIPV6PktinfoMessageClass
+{
+ GSocketControlMessageClass parent_class;
+
+};
+
+struct _GstIPV6PktinfoMessage
+{
+ GSocketControlMessage parent;
+
+ guint ifindex;
+ struct in6_addr addr;
+};
+
+G_DEFINE_TYPE (GstIPV6PktinfoMessage, gst_ipv6_pktinfo_message,
+ G_TYPE_SOCKET_CONTROL_MESSAGE);
+
+static gsize
+gst_ipv6_pktinfo_message_get_size (GSocketControlMessage * message)
+{
+ return sizeof (struct in6_pktinfo);
+}
+
+static int
+gst_ipv6_pktinfo_message_get_level (GSocketControlMessage * message)
+{
+ return IPPROTO_IPV6;
+}
+
+static int
+gst_ipv6_pktinfo_message_get_msg_type (GSocketControlMessage * message)
+{
+ return IPV6_PKTINFO;
+}
+
+static GSocketControlMessage *
+gst_ipv6_pktinfo_message_deserialize (gint level,
+ gint type, gsize size, gpointer data)
+{
+ struct in6_pktinfo *pktinfo;
+ GstIPV6PktinfoMessage *message;
+
+ if (level != IPPROTO_IPV6 || type != IPV6_PKTINFO)
+ return NULL;
+
+ if (size < sizeof (struct in6_pktinfo))
+ return NULL;
+
+ pktinfo = data;
+
+ message = g_object_new (GST_TYPE_IPV6_PKTINFO_MESSAGE, NULL);
+ message->ifindex = pktinfo->ipi6_ifindex;
+ message->addr = pktinfo->ipi6_addr;
+
+ return G_SOCKET_CONTROL_MESSAGE (message);
+}
+
+static void
+gst_ipv6_pktinfo_message_init (GstIPV6PktinfoMessage * message)
+{
+}
+
+static void
+gst_ipv6_pktinfo_message_class_init (GstIPV6PktinfoMessageClass * class)
+{
+ GSocketControlMessageClass *scm_class;
+
+ scm_class = G_SOCKET_CONTROL_MESSAGE_CLASS (class);
+ scm_class->get_size = gst_ipv6_pktinfo_message_get_size;
+ scm_class->get_level = gst_ipv6_pktinfo_message_get_level;
+ scm_class->get_type = gst_ipv6_pktinfo_message_get_msg_type;
+ scm_class->deserialize = gst_ipv6_pktinfo_message_deserialize;
+}
+
+#endif
+
+#ifdef IP_RECVDSTADDR
+GType gst_ip_recvdstaddr_message_get_type (void);
+
+#define GST_TYPE_IP_RECVDSTADDR_MESSAGE (gst_ip_recvdstaddr_message_get_type ())
+#define GST_IP_RECVDSTADDR_MESSAGE(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), GST_TYPE_IP_RECVDSTADDR_MESSAGE, GstIPRecvdstaddrMessage))
+#define GST_IP_RECVDSTADDR_MESSAGE_CLASS(c) (G_TYPE_CHECK_CLASS_CAST ((c), GST_TYPE_IP_RECVDSTADDR_MESSAGE, GstIPRecvdstaddrMessageClass))
+#define GST_IS_IP_RECVDSTADDR_MESSAGE(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), GST_TYPE_IP_RECVDSTADDR_MESSAGE))
+#define GST_IS_IP_RECVDSTADDR_MESSAGE_CLASS(c) (G_TYPE_CHECK_CLASS_TYPE ((c), GST_TYPE_IP_RECVDSTADDR_MESSAGE))
+#define GST_IP_RECVDSTADDR_MESSAGE_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), GST_TYPE_IP_RECVDSTADDR_MESSAGE, GstIPRecvdstaddrMessageClass))
+
+typedef struct _GstIPRecvdstaddrMessage GstIPRecvdstaddrMessage;
+typedef struct _GstIPRecvdstaddrMessageClass GstIPRecvdstaddrMessageClass;
+
+struct _GstIPRecvdstaddrMessageClass
+{
+ GSocketControlMessageClass parent_class;
+
+};
+
+struct _GstIPRecvdstaddrMessage
+{
+ GSocketControlMessage parent;
+
+ guint ifindex;
+ struct in_addr addr;
+};
+
+G_DEFINE_TYPE (GstIPRecvdstaddrMessage, gst_ip_recvdstaddr_message,
+ G_TYPE_SOCKET_CONTROL_MESSAGE);
+
+static gsize
+gst_ip_recvdstaddr_message_get_size (GSocketControlMessage * message)
+{
+ return sizeof (struct in_addr);
+}
+
+static int
+gst_ip_recvdstaddr_message_get_level (GSocketControlMessage * message)
+{
+ return IPPROTO_IP;
+}
+
+static int
+gst_ip_recvdstaddr_message_get_msg_type (GSocketControlMessage * message)
+{
+ return IP_RECVDSTADDR;
+}
+
+static GSocketControlMessage *
+gst_ip_recvdstaddr_message_deserialize (gint level,
+ gint type, gsize size, gpointer data)
+{
+ struct in_addr *addr;
+ GstIPRecvdstaddrMessage *message;
+
+ if (level != IPPROTO_IP || type != IP_RECVDSTADDR)
+ return NULL;
+
+ if (size < sizeof (struct in_addr))
+ return NULL;
+
+ addr = data;
+
+ message = g_object_new (GST_TYPE_IP_RECVDSTADDR_MESSAGE, NULL);
+ message->addr = *addr;
+
+ return G_SOCKET_CONTROL_MESSAGE (message);
+}
+
+static void
+gst_ip_recvdstaddr_message_init (GstIPRecvdstaddrMessage * message)
+{
+}
+
+static void
+gst_ip_recvdstaddr_message_class_init (GstIPRecvdstaddrMessageClass * class)
+{
+ GSocketControlMessageClass *scm_class;
+
+ scm_class = G_SOCKET_CONTROL_MESSAGE_CLASS (class);
+ scm_class->get_size = gst_ip_recvdstaddr_message_get_size;
+ scm_class->get_level = gst_ip_recvdstaddr_message_get_level;
+ scm_class->get_type = gst_ip_recvdstaddr_message_get_msg_type;
+ scm_class->deserialize = gst_ip_recvdstaddr_message_deserialize;
+}
+#endif
+
+#define GST_TYPE_SOCKET_TIMESTAMP_MODE gst_socket_timestamp_mode_get_type()
+#define GST_SOCKET_TIMESTAMP_MODE (gst_socket_timestamp_mode_get_type ())
+static GType
+gst_socket_timestamp_mode_get_type (void)
+{
+ static GType socket_timestamp_mode_type = 0;
+ static const GEnumValue socket_timestamp_mode_types[] = {
+ {GST_SOCKET_TIMESTAMP_MODE_DISABLED, "Disable additional timestamps",
+ "disabled"},
+ {GST_SOCKET_TIMESTAMP_MODE_REALTIME,
+ "Timestamp with realtime clock (nsec resolution, may not be monotonic)",
+ "realtime"},
+ {0, NULL, NULL}
+ };
+
+ if (!socket_timestamp_mode_type)
+ socket_timestamp_mode_type =
+ g_enum_register_static ("GstSocketTimestampMode",
+ socket_timestamp_mode_types);
+
+ return socket_timestamp_mode_type;
+}
+
+#ifdef SO_TIMESTAMPNS
+GType gst_socket_timestamp_message_get_type (void);
+
+#define GST_TYPE_SOCKET_TIMESTAMP_MESSAGE (gst_socket_timestamp_message_get_type ())
+#define GST_SOCKET_TIMESTAMP_MESSAGE(o) (G_TYPE_CHECK_INSTANCE_CAST ((o), GST_TYPE_SOCKET_TIMESTAMP_MESSAGE, GstSocketTimestampMessage))
+#define GST_SOCKET_TIMESTAMP_MESSAGE_CLASS(c) (G_TYPE_CHECK_CLASS_CAST ((c), GST_TYPE_SOCKET_TIMESTAMP_MESSAGE, GstSocketTimestampMessageClass))
+#define GST_IS_SOCKET_TIMESTAMP_MESSAGE(o) (G_TYPE_CHECK_INSTANCE_TYPE ((o), GST_TYPE_SOCKET_TIMESTAMP_MESSAGE))
+#define GST_IS_SOCKET_TIMESTAMP_MESSAGE_CLASS(c) (G_TYPE_CHECK_CLASS_TYPE ((c), GST_TYPE_SOCKET_TIMESTAMP_MESSAGE))
+#define GST_SOCKET_TIMESTAMP_MESSAGE_GET_CLASS(o) (G_TYPE_INSTANCE_GET_CLASS ((o), GST_TYPE_SOCKET_TIMESTAMP_MESSAGE, GstSocketTimestampMessageClass))
+
+typedef struct _GstSocketTimestampMessage GstSocketTimestampMessage;
+typedef struct _GstSocketTimestampMessageClass GstSocketTimestampMessageClass;
+
+struct _GstSocketTimestampMessageClass
+{
+ GSocketControlMessageClass parent_class;
+};
+
+struct _GstSocketTimestampMessage
+{
+ GSocketControlMessage parent;
+ struct timespec socket_ts;
+};
+
+G_DEFINE_TYPE (GstSocketTimestampMessage, gst_socket_timestamp_message,
+ G_TYPE_SOCKET_CONTROL_MESSAGE);
+
+static gsize
+gst_socket_timestamp_message_get_size (GSocketControlMessage * message)
+{
+ return sizeof (struct timespec);
+}
+
+static int
+gst_socket_timestamp_message_get_level (GSocketControlMessage * message)
+{
+ return SOL_SOCKET;
+}
+
+static int
+gst_socket_timestamp_message_get_msg_type (GSocketControlMessage * message)
+{
+ return SCM_TIMESTAMPNS;
+}
+
+static GSocketControlMessage *
+gst_socket_timestamp_message_deserialize (gint level,
+ gint type, gsize size, gpointer data)
+{
+ GstSocketTimestampMessage *message;
+
+ if (level != SOL_SOCKET)
+ return NULL;
+
+ if (size < sizeof (struct timespec))
+ return NULL;
+
+ message = g_object_new (GST_TYPE_SOCKET_TIMESTAMP_MESSAGE, NULL);
+ memcpy (&message->socket_ts, data, sizeof (struct timespec));
+
+ return G_SOCKET_CONTROL_MESSAGE (message);
+}
+
+static void
+gst_socket_timestamp_message_init (GstSocketTimestampMessage * message)
+{
+}
+
+static void
+gst_socket_timestamp_message_class_init (GstSocketTimestampMessageClass * class)
+{
+ GSocketControlMessageClass *scm_class;
+
+ scm_class = G_SOCKET_CONTROL_MESSAGE_CLASS (class);
+ scm_class->get_size = gst_socket_timestamp_message_get_size;
+ scm_class->get_level = gst_socket_timestamp_message_get_level;
+ scm_class->get_type = gst_socket_timestamp_message_get_msg_type;
+ scm_class->deserialize = gst_socket_timestamp_message_deserialize;
+}
+#endif
+
+static gboolean
+gst_udpsrc_decide_allocation (GstBaseSrc * bsrc, GstQuery * query)
+{
+ GstUDPSrc *udpsrc;
+ GstBufferPool *pool;
+ gboolean update;
+ GstStructure *config;
+ GstCaps *caps = NULL;
+
+ udpsrc = GST_UDPSRC (bsrc);
+
+ if (gst_query_get_n_allocation_pools (query) > 0) {
+ update = TRUE;
+ } else {
+ update = FALSE;
+ }
+
+ pool = gst_buffer_pool_new ();
+
+ config = gst_buffer_pool_get_config (pool);
+
+ gst_query_parse_allocation (query, &caps, NULL);
+
+ gst_buffer_pool_config_set_params (config, caps, udpsrc->mtu, 0, 0);
+
+ gst_buffer_pool_set_config (pool, config);
+
+ if (update)
+ gst_query_set_nth_allocation_pool (query, 0, pool, udpsrc->mtu, 0, 0);
+ else
+ gst_query_add_allocation_pool (query, pool, udpsrc->mtu, 0, 0);
+
+ gst_object_unref (pool);
+
+ return TRUE;
+}
+
+/* not 100% correct, but a good upper bound for memory allocation purposes */
+#define MAX_IPV4_UDP_PACKET_SIZE (65536 - 8)
+
+GST_DEBUG_CATEGORY_STATIC (udpsrc_debug);
+#define GST_CAT_DEFAULT (udpsrc_debug)
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+#define UDP_DEFAULT_PORT 5004
+#define UDP_DEFAULT_MULTICAST_GROUP "0.0.0.0"
+#define UDP_DEFAULT_MULTICAST_IFACE NULL
+#define UDP_DEFAULT_URI "udp://"UDP_DEFAULT_MULTICAST_GROUP":"G_STRINGIFY(UDP_DEFAULT_PORT)
+#define UDP_DEFAULT_CAPS NULL
+#define UDP_DEFAULT_SOCKET NULL
+#define UDP_DEFAULT_BUFFER_SIZE 0
+#define UDP_DEFAULT_TIMEOUT 0
+#define UDP_DEFAULT_SKIP_FIRST_BYTES 0
+#define UDP_DEFAULT_CLOSE_SOCKET TRUE
+#define UDP_DEFAULT_USED_SOCKET NULL
+#define UDP_DEFAULT_AUTO_MULTICAST TRUE
+#define UDP_DEFAULT_REUSE TRUE
+#define UDP_DEFAULT_LOOP TRUE
+#define UDP_DEFAULT_RETRIEVE_SENDER_ADDRESS TRUE
+#define UDP_DEFAULT_MTU (1492)
+
+enum
+{
+ PROP_0,
+
+ PROP_PORT,
+ PROP_MULTICAST_GROUP,
+ PROP_MULTICAST_IFACE,
+ PROP_URI,
+ PROP_CAPS,
+ PROP_SOCKET,
+ PROP_BUFFER_SIZE,
+ PROP_TIMEOUT,
+ PROP_SKIP_FIRST_BYTES,
+ PROP_CLOSE_SOCKET,
+ PROP_USED_SOCKET,
+ PROP_AUTO_MULTICAST,
+ PROP_REUSE,
+ PROP_ADDRESS,
+ PROP_LOOP,
+ PROP_RETRIEVE_SENDER_ADDRESS,
+ PROP_MTU,
+ PROP_SOCKET_TIMESTAMP,
+};
+
+static void gst_udpsrc_uri_handler_init (gpointer g_iface, gpointer iface_data);
+
+static GstCaps *gst_udpsrc_getcaps (GstBaseSrc * src, GstCaps * filter);
+static gboolean gst_udpsrc_close (GstUDPSrc * src);
+static gboolean gst_udpsrc_unlock (GstBaseSrc * bsrc);
+static gboolean gst_udpsrc_unlock_stop (GstBaseSrc * bsrc);
+static GstFlowReturn gst_udpsrc_fill (GstPushSrc * psrc, GstBuffer * outbuf);
+
+static void gst_udpsrc_finalize (GObject * object);
+
+static void gst_udpsrc_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_udpsrc_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static GstStateChangeReturn gst_udpsrc_change_state (GstElement * element,
+ GstStateChange transition);
+
+#define gst_udpsrc_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstUDPSrc, gst_udpsrc, GST_TYPE_PUSH_SRC,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER, gst_udpsrc_uri_handler_init));
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (udpsrc, "udpsrc", GST_RANK_NONE,
+ GST_TYPE_UDPSRC, udp_element_init (plugin));
+
+static void
+gst_udpsrc_class_init (GstUDPSrcClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstBaseSrcClass *gstbasesrc_class;
+ GstPushSrcClass *gstpushsrc_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ gstbasesrc_class = (GstBaseSrcClass *) klass;
+ gstpushsrc_class = (GstPushSrcClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (udpsrc_debug, "udpsrc", 0, "UDP src");
+
+#ifdef IP_PKTINFO
+ GST_TYPE_IP_PKTINFO_MESSAGE;
+#endif
+#ifdef IPV6_PKTINFO
+ GST_TYPE_IPV6_PKTINFO_MESSAGE;
+#endif
+#ifdef IP_RECVDSTADDR
+ GST_TYPE_IP_RECVDSTADDR_MESSAGE;
+#endif
+#ifdef SO_TIMESTAMPNS
+ GST_TYPE_SOCKET_TIMESTAMP_MESSAGE;
+#endif
+
+ gobject_class->set_property = gst_udpsrc_set_property;
+ gobject_class->get_property = gst_udpsrc_get_property;
+ gobject_class->finalize = gst_udpsrc_finalize;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_PORT,
+ g_param_spec_int ("port", "Port",
+ "The port to receive the packets from, 0=allocate", 0, G_MAXUINT16,
+ UDP_DEFAULT_PORT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /* FIXME 2.0: Remove multicast-group property */
+#ifndef GST_REMOVE_DEPRECATED
+ g_object_class_install_property (gobject_class, PROP_MULTICAST_GROUP,
+ g_param_spec_string ("multicast-group", "Multicast Group",
+ "The Address of multicast group to join. (DEPRECATED: "
+ "Use address property instead)", UDP_DEFAULT_MULTICAST_GROUP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | G_PARAM_DEPRECATED));
+#endif
+ g_object_class_install_property (gobject_class, PROP_MULTICAST_IFACE,
+ g_param_spec_string ("multicast-iface", "Multicast Interface",
+ "The network interface on which to join the multicast group."
+ "This allows multiple interfaces separated by comma. (\"eth0,eth1\")",
+ UDP_DEFAULT_MULTICAST_IFACE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_URI,
+ g_param_spec_string ("uri", "URI",
+ "URI in the form of udp://multicast_group:port", UDP_DEFAULT_URI,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_CAPS,
+ g_param_spec_boxed ("caps", "Caps",
+ "The caps of the source pad", GST_TYPE_CAPS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_SOCKET,
+ g_param_spec_object ("socket", "Socket",
+ "Socket to use for UDP reception. (NULL == allocate)",
+ G_TYPE_SOCKET, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BUFFER_SIZE,
+ g_param_spec_int ("buffer-size", "Buffer Size",
+ "Size of the kernel receive buffer in bytes, 0=default", 0, G_MAXINT,
+ UDP_DEFAULT_BUFFER_SIZE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_TIMEOUT,
+ g_param_spec_uint64 ("timeout", "Timeout",
+ "Post a message after timeout nanoseconds (0 = disabled)", 0,
+ G_MAXUINT64, UDP_DEFAULT_TIMEOUT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (G_OBJECT_CLASS (klass),
+ PROP_SKIP_FIRST_BYTES, g_param_spec_int ("skip-first-bytes",
+ "Skip first bytes", "number of bytes to skip for each udp packet", 0,
+ G_MAXINT, UDP_DEFAULT_SKIP_FIRST_BYTES,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_CLOSE_SOCKET,
+ g_param_spec_boolean ("close-socket", "Close socket",
+ "Close socket if passed as property on state change",
+ UDP_DEFAULT_CLOSE_SOCKET,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_USED_SOCKET,
+ g_param_spec_object ("used-socket", "Socket Handle",
+ "Socket currently in use for UDP reception. (NULL = no socket)",
+ G_TYPE_SOCKET, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_AUTO_MULTICAST,
+ g_param_spec_boolean ("auto-multicast", "Auto Multicast",
+ "Automatically join/leave multicast groups",
+ UDP_DEFAULT_AUTO_MULTICAST,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_REUSE,
+ g_param_spec_boolean ("reuse", "Reuse", "Enable reuse of the port",
+ UDP_DEFAULT_REUSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_ADDRESS,
+ g_param_spec_string ("address", "Address",
+ "Address to receive packets for. This is equivalent to the "
+ "multicast-group property for now", UDP_DEFAULT_MULTICAST_GROUP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstUDPSrc:loop:
+ *
+ * Can be used to disable multicast loopback.
+ *
+ * Since: 1.8
+ */
+ g_object_class_install_property (gobject_class, PROP_LOOP,
+ g_param_spec_boolean ("loop", "Multicast Loopback",
+ "Used for setting the multicast loop parameter. TRUE = enable,"
+ " FALSE = disable", UDP_DEFAULT_LOOP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstUDPSrc:retrieve-sender-address:
+ *
+ * Whether to retrieve the sender address and add it to the buffers as
+ * meta. Disabling this might result in minor performance improvements
+ * in certain scenarios.
+ *
+ * Since: 1.10
+ */
+ g_object_class_install_property (gobject_class, PROP_RETRIEVE_SENDER_ADDRESS,
+ g_param_spec_boolean ("retrieve-sender-address",
+ "Retrieve Sender Address",
+ "Whether to retrieve the sender address and add it to buffers as "
+ "meta. Disabling this might result in minor performance improvements "
+ "in certain scenarios", UDP_DEFAULT_RETRIEVE_SENDER_ADDRESS,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ /**
+ * GstUDPSrc:mtu:
+ *
+ * Maximum expected packet size. This directly defines the allocation
+ * size of the receive buffer pool.
+ *
+ * In case more data is received, a new #GstMemory is appended to the
+ * output buffer, ensuring no data is lost, this however leads to that
+ * buffer being freed and reallocated.
+ *
+ * Since: 1.14
+ */
+ g_object_class_install_property (gobject_class, PROP_MTU,
+ g_param_spec_uint ("mtu", "Expected Maximum Transmission Unit",
+ "Maximum expected packet size. This directly defines the allocation"
+ "size of the receive buffer pool.",
+ 0, G_MAXINT, UDP_DEFAULT_MTU,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ /**
+ * GstUDPSrc:socket-timestamp:
+ *
+ * Can be used to read the timestamp on incoming buffers using socket
+ * control messages and set as the DTS.
+ *
+ * Since: 1.20
+ */
+ g_object_class_install_property (gobject_class, PROP_SOCKET_TIMESTAMP,
+ g_param_spec_enum ("socket-timestamp",
+ "Use Socket Control Message Timestamp for DTS",
+ "Used for adding alternative timestamp using SO_TIMESTAMP.",
+ GST_SOCKET_TIMESTAMP_MODE, GST_SOCKET_TIMESTAMP_MODE_REALTIME,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class, &src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "UDP packet receiver", "Source/Network",
+ "Receive data over the network via UDP",
+ "Wim Taymans <wim@fluendo.com>, "
+ "Thijs Vermeir <thijs.vermeir@barco.com>");
+
+ gstelement_class->change_state = gst_udpsrc_change_state;
+
+ gstbasesrc_class->unlock = gst_udpsrc_unlock;
+ gstbasesrc_class->unlock_stop = gst_udpsrc_unlock_stop;
+ gstbasesrc_class->get_caps = gst_udpsrc_getcaps;
+ gstbasesrc_class->decide_allocation = gst_udpsrc_decide_allocation;
+
+ gstpushsrc_class->fill = gst_udpsrc_fill;
+
+ gst_type_mark_as_plugin_api (GST_TYPE_SOCKET_TIMESTAMP_MODE, 0);
+}
+
+static void
+gst_udpsrc_init (GstUDPSrc * udpsrc)
+{
+ udpsrc->uri =
+ g_strdup_printf ("udp://%s:%u", UDP_DEFAULT_MULTICAST_GROUP,
+ UDP_DEFAULT_PORT);
+
+ udpsrc->address = g_strdup (UDP_DEFAULT_MULTICAST_GROUP);
+ udpsrc->port = UDP_DEFAULT_PORT;
+ udpsrc->socket = UDP_DEFAULT_SOCKET;
+ udpsrc->multi_iface = g_strdup (UDP_DEFAULT_MULTICAST_IFACE);
+ udpsrc->buffer_size = UDP_DEFAULT_BUFFER_SIZE;
+ udpsrc->timeout = UDP_DEFAULT_TIMEOUT;
+ udpsrc->skip_first_bytes = UDP_DEFAULT_SKIP_FIRST_BYTES;
+ udpsrc->close_socket = UDP_DEFAULT_CLOSE_SOCKET;
+ udpsrc->external_socket = (udpsrc->socket != NULL);
+ udpsrc->auto_multicast = UDP_DEFAULT_AUTO_MULTICAST;
+ udpsrc->used_socket = UDP_DEFAULT_USED_SOCKET;
+ udpsrc->reuse = UDP_DEFAULT_REUSE;
+ udpsrc->loop = UDP_DEFAULT_LOOP;
+ udpsrc->retrieve_sender_address = UDP_DEFAULT_RETRIEVE_SENDER_ADDRESS;
+ udpsrc->mtu = UDP_DEFAULT_MTU;
+
+ /* configure basesrc to be a live source */
+ gst_base_src_set_live (GST_BASE_SRC (udpsrc), TRUE);
+ /* make basesrc output a segment in time */
+ gst_base_src_set_format (GST_BASE_SRC (udpsrc), GST_FORMAT_TIME);
+ /* make basesrc set timestamps on outgoing buffers based on the running_time
+ * when they were captured */
+ gst_base_src_set_do_timestamp (GST_BASE_SRC (udpsrc), TRUE);
+}
+
+static void
+gst_udpsrc_finalize (GObject * object)
+{
+ GstUDPSrc *udpsrc;
+
+ udpsrc = GST_UDPSRC (object);
+
+ if (udpsrc->caps)
+ gst_caps_unref (udpsrc->caps);
+ udpsrc->caps = NULL;
+
+ g_free (udpsrc->multi_iface);
+ udpsrc->multi_iface = NULL;
+
+ g_free (udpsrc->uri);
+ udpsrc->uri = NULL;
+
+ g_free (udpsrc->address);
+ udpsrc->address = NULL;
+
+ if (udpsrc->socket)
+ g_object_unref (udpsrc->socket);
+ udpsrc->socket = NULL;
+
+ if (udpsrc->used_socket)
+ g_object_unref (udpsrc->used_socket);
+ udpsrc->used_socket = NULL;
+
+ if (udpsrc->extra_mem)
+ gst_memory_unref (udpsrc->extra_mem);
+ udpsrc->extra_mem = NULL;
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static GstCaps *
+gst_udpsrc_getcaps (GstBaseSrc * src, GstCaps * filter)
+{
+ GstUDPSrc *udpsrc;
+ GstCaps *caps, *result;
+
+ udpsrc = GST_UDPSRC (src);
+
+ GST_OBJECT_LOCK (src);
+ if ((caps = udpsrc->caps))
+ gst_caps_ref (caps);
+ GST_OBJECT_UNLOCK (src);
+
+ if (caps) {
+ if (filter) {
+ result = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ } else {
+ result = caps;
+ }
+ } else {
+ result = (filter) ? gst_caps_ref (filter) : gst_caps_new_any ();
+ }
+ return result;
+}
+
+static void
+gst_udpsrc_create_cancellable (GstUDPSrc * src)
+{
+ GPollFD pollfd;
+
+ src->cancellable = g_cancellable_new ();
+ src->made_cancel_fd = g_cancellable_make_pollfd (src->cancellable, &pollfd);
+}
+
+static void
+gst_udpsrc_free_cancellable (GstUDPSrc * src)
+{
+ if (src->made_cancel_fd) {
+ g_cancellable_release_fd (src->cancellable);
+ src->made_cancel_fd = FALSE;
+ }
+ g_object_unref (src->cancellable);
+ src->cancellable = NULL;
+}
+
+static GstFlowReturn
+gst_udpsrc_fill (GstPushSrc * psrc, GstBuffer * outbuf)
+{
+ GstUDPSrc *udpsrc;
+ GSocketAddress *saddr = NULL;
+ GSocketAddress **p_saddr;
+ gint flags = G_SOCKET_MSG_NONE;
+ gboolean try_again;
+ GError *err = NULL;
+ gssize res;
+ gsize offset;
+ GSocketControlMessage **msgs = NULL;
+ GSocketControlMessage ***p_msgs;
+ gint n_msgs = 0, i;
+ GstMapInfo info;
+ GstMapInfo extra_info;
+ GInputVector ivec[2];
+
+ udpsrc = GST_UDPSRC_CAST (psrc);
+
+ /* optimization: use messages only in multicast mode and
+ * if we can't let the kernel do the filtering for us */
+ p_msgs =
+ (g_inet_address_get_is_multicast (g_inet_socket_address_get_address
+ (udpsrc->addr))) ? &msgs : NULL;
+#ifdef IP_MULTICAST_ALL
+ if (g_inet_address_get_family (g_inet_socket_address_get_address
+ (udpsrc->addr)) == G_SOCKET_FAMILY_IPV4)
+ p_msgs = NULL;
+#endif
+#ifdef SO_TIMESTAMPNS
+ if (udpsrc->socket_timestamp_mode == GST_SOCKET_TIMESTAMP_MODE_REALTIME)
+ p_msgs = &msgs;
+#endif
+
+ /* Retrieve sender address unless we've been configured not to do so */
+ p_saddr = (udpsrc->retrieve_sender_address) ? &saddr : NULL;
+
+ if (!gst_buffer_map (outbuf, &info, GST_MAP_READWRITE))
+ goto buffer_map_error;
+
+ ivec[0].buffer = info.data;
+ ivec[0].size = info.size;
+
+ /* Prepare memory in case the data size exceeds mtu */
+ if (udpsrc->extra_mem == NULL) {
+ GstBufferPool *pool;
+ GstStructure *config;
+ GstAllocator *allocator = NULL;
+ GstAllocationParams params;
+
+ pool = gst_base_src_get_buffer_pool (GST_BASE_SRC_CAST (psrc));
+ config = gst_buffer_pool_get_config (pool);
+ gst_buffer_pool_config_get_allocator (config, &allocator, &params);
+
+ udpsrc->extra_mem =
+ gst_allocator_alloc (allocator, MAX_IPV4_UDP_PACKET_SIZE, &params);
+
+ gst_object_unref (pool);
+ gst_structure_free (config);
+ if (allocator)
+ gst_object_unref (allocator);
+ }
+
+ if (!gst_memory_map (udpsrc->extra_mem, &extra_info, GST_MAP_READWRITE))
+ goto memory_map_error;
+
+ ivec[1].buffer = extra_info.data;
+ ivec[1].size = extra_info.size;
+
+retry:
+ if (saddr != NULL) {
+ g_object_unref (saddr);
+ saddr = NULL;
+ }
+
+ do {
+ gint64 timeout;
+
+ try_again = FALSE;
+
+ if (udpsrc->timeout)
+ timeout = udpsrc->timeout / 1000;
+ else
+ timeout = -1;
+
+ GST_LOG_OBJECT (udpsrc, "doing select, timeout %" G_GINT64_FORMAT, timeout);
+
+ if (!g_socket_condition_timed_wait (udpsrc->used_socket, G_IO_IN | G_IO_PRI,
+ timeout, udpsrc->cancellable, &err)) {
+ if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_BUSY)
+ || g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) {
+ goto stopped;
+ } else if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_TIMED_OUT)) {
+ g_clear_error (&err);
+ /* timeout, post element message */
+ gst_element_post_message (GST_ELEMENT_CAST (udpsrc),
+ gst_message_new_element (GST_OBJECT_CAST (udpsrc),
+ gst_structure_new ("GstUDPSrcTimeout",
+ "timeout", G_TYPE_UINT64, udpsrc->timeout, NULL)));
+ } else {
+ goto select_error;
+ }
+
+ try_again = TRUE;
+ }
+ } while (G_UNLIKELY (try_again));
+
+ res =
+ g_socket_receive_message (udpsrc->used_socket, p_saddr, ivec, 2,
+ p_msgs, &n_msgs, &flags, udpsrc->cancellable, &err);
+
+ if (G_UNLIKELY (res < 0)) {
+ /* G_IO_ERROR_HOST_UNREACHABLE for a UDP socket means that a packet sent
+ * with udpsink generated a "port unreachable" ICMP response. We ignore
+ * that and try again.
+ * On Windows we get G_IO_ERROR_CONNECTION_CLOSED instead */
+ if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_HOST_UNREACHABLE) ||
+ g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CONNECTION_CLOSED)) {
+ g_clear_error (&err);
+ goto retry;
+ }
+ goto receive_error;
+ }
+
+ /* Retry if multicast and the destination address is not ours. We don't want
+ * to receive arbitrary packets */
+ if (p_msgs) {
+ GInetAddress *iaddr = g_inet_socket_address_get_address (udpsrc->addr);
+ gboolean skip_packet = FALSE;
+ gsize iaddr_size = g_inet_address_get_native_size (iaddr);
+ const guint8 *iaddr_bytes = g_inet_address_to_bytes (iaddr);
+
+ for (i = 0; i < n_msgs && !skip_packet; i++) {
+#ifdef IP_PKTINFO
+ if (GST_IS_IP_PKTINFO_MESSAGE (msgs[i])) {
+ GstIPPktinfoMessage *msg = GST_IP_PKTINFO_MESSAGE (msgs[i]);
+
+ if (sizeof (msg->addr) == iaddr_size
+ && memcmp (iaddr_bytes, &msg->addr, sizeof (msg->addr)))
+ skip_packet = TRUE;
+ }
+#endif
+#ifdef IPV6_PKTINFO
+ if (GST_IS_IPV6_PKTINFO_MESSAGE (msgs[i])) {
+ GstIPV6PktinfoMessage *msg = GST_IPV6_PKTINFO_MESSAGE (msgs[i]);
+
+ if (sizeof (msg->addr) == iaddr_size
+ && memcmp (iaddr_bytes, &msg->addr, sizeof (msg->addr)))
+ skip_packet = TRUE;
+ }
+#endif
+#ifdef IP_RECVDSTADDR
+ if (GST_IS_IP_RECVDSTADDR_MESSAGE (msgs[i])) {
+ GstIPRecvdstaddrMessage *msg = GST_IP_RECVDSTADDR_MESSAGE (msgs[i]);
+
+ if (sizeof (msg->addr) == iaddr_size
+ && memcmp (iaddr_bytes, &msg->addr, sizeof (msg->addr)))
+ skip_packet = TRUE;
+ }
+#endif
+#ifdef SO_TIMESTAMPNS
+ if (GST_IS_SOCKET_TIMESTAMP_MESSAGE (msgs[i])) {
+ GstSocketTimestampMessage *msg = GST_SOCKET_TIMESTAMP_MESSAGE (msgs[i]);
+ GstClock *clock;
+ GstClockTime socket_ts;
+
+ socket_ts = GST_TIMESPEC_TO_TIME (msg->socket_ts);
+ GST_TRACE_OBJECT (udpsrc,
+ "Got SCM_TIMESTAMPNS %" GST_TIME_FORMAT " in msg",
+ GST_TIME_ARGS (socket_ts));
+
+ clock = gst_element_get_clock (GST_ELEMENT_CAST (udpsrc));
+ if (clock != NULL) {
+ gint64 adjust_dts, cur_sys_time, delta;
+ GstClockTime base_time, cur_gst_clk_time, running_time;
+
+ /*
+ * We use g_get_real_time as the time reference for SCM timestamps
+ * is always CLOCK_REALTIME.
+ */
+ cur_sys_time = g_get_real_time () * GST_USECOND;
+ cur_gst_clk_time = gst_clock_get_time (clock);
+
+ delta = (gint64) cur_sys_time - (gint64) socket_ts;
+ if (delta < 0) {
+ /*
+ * The current system time will always be greater than the SCM
+ * timestamp as the packet would have been timestamped at least
+ * some clock cycles before. If it is not, then the system time
+ * was adjusted. Since we cannot rely on the delta calculation in
+ * such a case, set the DTS to current pipeline clock when this
+ * happens.
+ */
+ GST_LOG_OBJECT (udpsrc,
+ "Current system time is behind SCM timestamp, setting DTS to pipeline clock");
+ GST_BUFFER_DTS (outbuf) = cur_gst_clk_time;
+ } else {
+ base_time = gst_element_get_base_time (GST_ELEMENT_CAST (udpsrc));
+ running_time = cur_gst_clk_time - base_time;
+ adjust_dts = (gint64) running_time - delta;
+ /*
+ * If the system time was adjusted much further ahead, we might
+ * end up with delta > cur_gst_clk_time. Set the DTS to current
+ * pipeline clock for this scenario as well.
+ */
+ if (adjust_dts < 0) {
+ GST_LOG_OBJECT (udpsrc,
+ "Current system time much ahead in time, setting DTS to pipeline clock");
+ GST_BUFFER_DTS (outbuf) = cur_gst_clk_time;
+ } else {
+ GST_BUFFER_DTS (outbuf) = adjust_dts;
+ GST_LOG_OBJECT (udpsrc, "Setting DTS to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (GST_BUFFER_DTS (outbuf)));
+ }
+ }
+ g_object_unref (clock);
+ } else {
+ GST_ERROR_OBJECT (udpsrc,
+ "Failed to get element clock, not setting DTS");
+ }
+ }
+#endif
+ }
+
+ for (i = 0; i < n_msgs; i++) {
+ g_object_unref (msgs[i]);
+ }
+ g_free (msgs);
+
+ if (skip_packet) {
+ GST_DEBUG_OBJECT (udpsrc,
+ "Dropping packet for a different multicast address");
+ goto retry;
+ }
+ }
+
+ gst_buffer_unmap (outbuf, &info);
+ gst_memory_unmap (udpsrc->extra_mem, &extra_info);
+
+ /* If this is the case, the buffer will be freed once unreffed,
+ * and the buffer pool will have to reallocate a new one.
+ */
+ if (res > udpsrc->mtu) {
+ gst_buffer_append_memory (outbuf, udpsrc->extra_mem);
+ udpsrc->extra_mem = NULL;
+ }
+
+ offset = udpsrc->skip_first_bytes;
+
+ if (G_UNLIKELY (offset > 0 && res < offset))
+ goto skip_error;
+
+ gst_buffer_resize (outbuf, offset, res - offset);
+
+ /* use buffer metadata so receivers can also track the address */
+ if (saddr) {
+ gst_buffer_add_net_address_meta (outbuf, saddr);
+ g_object_unref (saddr);
+ saddr = NULL;
+ }
+
+ GST_LOG_OBJECT (udpsrc, "read packet of %d bytes", (int) res);
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+buffer_map_error:
+ {
+ GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL),
+ ("Failed to map memory"));
+ return GST_FLOW_ERROR;
+ }
+memory_map_error:
+ {
+ gst_buffer_unmap (outbuf, &info);
+ GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL),
+ ("Failed to map memory"));
+ return GST_FLOW_ERROR;
+ }
+select_error:
+ {
+ gst_buffer_unmap (outbuf, &info);
+ gst_memory_unmap (udpsrc->extra_mem, &extra_info);
+ GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL),
+ ("select error: %s", err->message));
+ g_clear_error (&err);
+ return GST_FLOW_ERROR;
+ }
+stopped:
+ {
+ gst_buffer_unmap (outbuf, &info);
+ gst_memory_unmap (udpsrc->extra_mem, &extra_info);
+ GST_DEBUG ("stop called");
+ g_clear_error (&err);
+ return GST_FLOW_FLUSHING;
+ }
+receive_error:
+ {
+ gst_buffer_unmap (outbuf, &info);
+ gst_memory_unmap (udpsrc->extra_mem, &extra_info);
+ g_clear_object (&saddr);
+ if (g_error_matches (err, G_IO_ERROR, G_IO_ERROR_BUSY) ||
+ g_error_matches (err, G_IO_ERROR, G_IO_ERROR_CANCELLED)) {
+ g_clear_error (&err);
+ return GST_FLOW_FLUSHING;
+ } else {
+ GST_ELEMENT_ERROR (udpsrc, RESOURCE, READ, (NULL),
+ ("receive error %" G_GSSIZE_FORMAT ": %s", res, err->message));
+ g_clear_error (&err);
+ return GST_FLOW_ERROR;
+ }
+ }
+skip_error:
+ {
+ g_clear_object (&saddr);
+ GST_ELEMENT_ERROR (udpsrc, STREAM, DECODE, (NULL),
+ ("UDP buffer to small to skip header"));
+ return GST_FLOW_ERROR;
+ }
+}
+
+static gboolean
+gst_udpsrc_set_uri (GstUDPSrc * src, const gchar * uri, GError ** error)
+{
+ gchar *address;
+ guint16 port;
+
+ if (!gst_udp_parse_uri (uri, &address, &port))
+ goto wrong_uri;
+
+ if (port == (guint16) - 1)
+ port = UDP_DEFAULT_PORT;
+
+ g_free (src->address);
+ src->address = address;
+ src->port = port;
+
+ g_free (src->uri);
+ src->uri = g_strdup (uri);
+
+ return TRUE;
+
+ /* ERRORS */
+wrong_uri:
+ {
+ GST_ELEMENT_ERROR (src, RESOURCE, READ, (NULL),
+ ("error parsing uri %s", uri));
+ g_set_error_literal (error, GST_URI_ERROR, GST_URI_ERROR_BAD_URI,
+ "Could not parse UDP URI");
+ return FALSE;
+ }
+}
+
+static void
+gst_udpsrc_set_property (GObject * object, guint prop_id, const GValue * value,
+ GParamSpec * pspec)
+{
+ GstUDPSrc *udpsrc = GST_UDPSRC (object);
+
+ switch (prop_id) {
+ case PROP_BUFFER_SIZE:
+ udpsrc->buffer_size = g_value_get_int (value);
+ break;
+ case PROP_PORT:
+ udpsrc->port = g_value_get_int (value);
+ g_free (udpsrc->uri);
+ udpsrc->uri =
+ g_strdup_printf ("udp://%s:%u", udpsrc->address, udpsrc->port);
+ break;
+ case PROP_MULTICAST_GROUP:
+ case PROP_ADDRESS:
+ {
+ const gchar *group;
+
+ g_free (udpsrc->address);
+ if ((group = g_value_get_string (value)))
+ udpsrc->address = g_strdup (group);
+ else
+ udpsrc->address = g_strdup (UDP_DEFAULT_MULTICAST_GROUP);
+
+ g_free (udpsrc->uri);
+ udpsrc->uri =
+ g_strdup_printf ("udp://%s:%u", udpsrc->address, udpsrc->port);
+ break;
+ }
+ case PROP_MULTICAST_IFACE:
+ g_free (udpsrc->multi_iface);
+
+ if (g_value_get_string (value) == NULL)
+ udpsrc->multi_iface = g_strdup (UDP_DEFAULT_MULTICAST_IFACE);
+ else
+ udpsrc->multi_iface = g_value_dup_string (value);
+ break;
+ case PROP_URI:
+ gst_udpsrc_set_uri (udpsrc, g_value_get_string (value), NULL);
+ break;
+ case PROP_CAPS:
+ {
+ const GstCaps *new_caps_val = gst_value_get_caps (value);
+ GstCaps *new_caps;
+ GstCaps *old_caps;
+
+ if (new_caps_val == NULL) {
+ new_caps = gst_caps_new_any ();
+ } else {
+ new_caps = gst_caps_copy (new_caps_val);
+ }
+
+ GST_OBJECT_LOCK (udpsrc);
+ old_caps = udpsrc->caps;
+ udpsrc->caps = new_caps;
+ GST_OBJECT_UNLOCK (udpsrc);
+ if (old_caps)
+ gst_caps_unref (old_caps);
+
+ gst_pad_mark_reconfigure (GST_BASE_SRC_PAD (udpsrc));
+ break;
+ }
+ case PROP_SOCKET:
+ if (udpsrc->socket != NULL && udpsrc->socket != udpsrc->used_socket &&
+ udpsrc->close_socket) {
+ GError *err = NULL;
+
+ if (!g_socket_close (udpsrc->socket, &err)) {
+ GST_ERROR ("failed to close socket %p: %s", udpsrc->socket,
+ err->message);
+ g_clear_error (&err);
+ }
+ }
+ if (udpsrc->socket)
+ g_object_unref (udpsrc->socket);
+ udpsrc->socket = g_value_dup_object (value);
+ GST_DEBUG ("setting socket to %p", udpsrc->socket);
+ break;
+ case PROP_TIMEOUT:
+ udpsrc->timeout = g_value_get_uint64 (value);
+ break;
+ case PROP_SKIP_FIRST_BYTES:
+ udpsrc->skip_first_bytes = g_value_get_int (value);
+ break;
+ case PROP_CLOSE_SOCKET:
+ udpsrc->close_socket = g_value_get_boolean (value);
+ break;
+ case PROP_AUTO_MULTICAST:
+ udpsrc->auto_multicast = g_value_get_boolean (value);
+ break;
+ case PROP_REUSE:
+ udpsrc->reuse = g_value_get_boolean (value);
+ break;
+ case PROP_LOOP:
+ udpsrc->loop = g_value_get_boolean (value);
+ break;
+ case PROP_RETRIEVE_SENDER_ADDRESS:
+ udpsrc->retrieve_sender_address = g_value_get_boolean (value);
+ break;
+ case PROP_MTU:
+ udpsrc->mtu = g_value_get_uint (value);
+ break;
+ case PROP_SOCKET_TIMESTAMP:
+ udpsrc->socket_timestamp_mode = g_value_get_enum (value);
+ break;
+ default:
+ break;
+ }
+}
+
+static void
+gst_udpsrc_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstUDPSrc *udpsrc = GST_UDPSRC (object);
+
+ switch (prop_id) {
+ case PROP_BUFFER_SIZE:
+ g_value_set_int (value, udpsrc->buffer_size);
+ break;
+ case PROP_PORT:
+ g_value_set_int (value, udpsrc->port);
+ break;
+ case PROP_MULTICAST_GROUP:
+ case PROP_ADDRESS:
+ g_value_set_string (value, udpsrc->address);
+ break;
+ case PROP_MULTICAST_IFACE:
+ g_value_set_string (value, udpsrc->multi_iface);
+ break;
+ case PROP_URI:
+ g_value_set_string (value, udpsrc->uri);
+ break;
+ case PROP_CAPS:
+ GST_OBJECT_LOCK (udpsrc);
+ gst_value_set_caps (value, udpsrc->caps);
+ GST_OBJECT_UNLOCK (udpsrc);
+ break;
+ case PROP_SOCKET:
+ g_value_set_object (value, udpsrc->socket);
+ break;
+ case PROP_TIMEOUT:
+ g_value_set_uint64 (value, udpsrc->timeout);
+ break;
+ case PROP_SKIP_FIRST_BYTES:
+ g_value_set_int (value, udpsrc->skip_first_bytes);
+ break;
+ case PROP_CLOSE_SOCKET:
+ g_value_set_boolean (value, udpsrc->close_socket);
+ break;
+ case PROP_USED_SOCKET:
+ g_value_set_object (value, udpsrc->used_socket);
+ break;
+ case PROP_AUTO_MULTICAST:
+ g_value_set_boolean (value, udpsrc->auto_multicast);
+ break;
+ case PROP_REUSE:
+ g_value_set_boolean (value, udpsrc->reuse);
+ break;
+ case PROP_LOOP:
+ g_value_set_boolean (value, udpsrc->loop);
+ break;
+ case PROP_RETRIEVE_SENDER_ADDRESS:
+ g_value_set_boolean (value, udpsrc->retrieve_sender_address);
+ break;
+ case PROP_MTU:
+ g_value_set_uint (value, udpsrc->mtu);
+ break;
+ case PROP_SOCKET_TIMESTAMP:
+ g_value_set_enum (value, udpsrc->socket_timestamp_mode);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GInetAddress *
+gst_udpsrc_resolve (GstUDPSrc * src, const gchar * address)
+{
+ GInetAddress *addr;
+ GError *err = NULL;
+ GResolver *resolver;
+
+ addr = g_inet_address_new_from_string (address);
+ if (!addr) {
+ GList *results;
+
+ GST_DEBUG_OBJECT (src, "resolving IP address for host %s", address);
+ resolver = g_resolver_get_default ();
+ results =
+ g_resolver_lookup_by_name (resolver, address, src->cancellable, &err);
+ if (!results)
+ goto name_resolve;
+ addr = G_INET_ADDRESS (g_object_ref (results->data));
+
+ g_resolver_free_addresses (results);
+ g_object_unref (resolver);
+ }
+#ifndef GST_DISABLE_GST_DEBUG
+ {
+ gchar *ip = g_inet_address_to_string (addr);
+
+ GST_DEBUG_OBJECT (src, "IP address for host %s is %s", address, ip);
+ g_free (ip);
+ }
+#endif
+
+ return addr;
+
+name_resolve:
+ {
+ GST_WARNING_OBJECT (src, "Failed to resolve %s: %s", address, err->message);
+ g_clear_error (&err);
+ g_object_unref (resolver);
+ return NULL;
+ }
+}
+
+static gint
+gst_udpsrc_get_rcvbuf (GstUDPSrc * src)
+{
+ gint val = 0;
+
+ /* read the value of the receive buffer. Note that on linux this returns
+ * 2x the value we set because the kernel allocates extra memory for
+ * metadata. The default on Linux is about 100K (which is about 50K
+ * without metadata) */
+ if (!g_socket_get_option (src->used_socket, SOL_SOCKET, SO_RCVBUF, &val,
+ NULL)) {
+ GST_DEBUG_OBJECT (src, "could not get udp buffer size");
+ return 0;
+ }
+#ifdef __linux__
+ /* Devise by 2 so that the numbers matches when we do get/set */
+ val /= 2;
+#endif
+
+ return val;
+}
+
+/* create a socket for sending to remote machine */
+static gboolean
+gst_udpsrc_open (GstUDPSrc * src)
+{
+ GInetAddress *addr, *bind_addr;
+ GSocketAddress *bind_saddr;
+ GError *err = NULL;
+
+ gst_udpsrc_create_cancellable (src);
+
+ if (src->socket == NULL) {
+ /* need to allocate a socket */
+ GST_DEBUG_OBJECT (src, "allocating socket for %s:%d", src->address,
+ src->port);
+
+ addr = gst_udpsrc_resolve (src, src->address);
+ if (!addr)
+ goto name_resolve;
+
+ if ((src->used_socket =
+ g_socket_new (g_inet_address_get_family (addr),
+ G_SOCKET_TYPE_DATAGRAM, G_SOCKET_PROTOCOL_UDP, &err)) == NULL)
+ goto no_socket;
+
+ src->external_socket = FALSE;
+
+ GST_DEBUG_OBJECT (src, "got socket %p", src->used_socket);
+
+ if (src->addr)
+ g_object_unref (src->addr);
+ src->addr =
+ G_INET_SOCKET_ADDRESS (g_inet_socket_address_new (addr, src->port));
+
+ GST_DEBUG_OBJECT (src, "binding on port %d", src->port);
+
+ /* For multicast, bind to ANY and join the multicast group later */
+ if (g_inet_address_get_is_multicast (addr))
+ bind_addr = g_inet_address_new_any (g_inet_address_get_family (addr));
+ else
+ bind_addr = G_INET_ADDRESS (g_object_ref (addr));
+
+ g_object_unref (addr);
+
+ bind_saddr = g_inet_socket_address_new (bind_addr, src->port);
+ g_object_unref (bind_addr);
+ if (!g_socket_bind (src->used_socket, bind_saddr, src->reuse, &err)) {
+ GST_ERROR_OBJECT (src, "%s: error binding to %s:%d", err->message,
+ src->address, src->port);
+ goto bind_error;
+ }
+
+ g_object_unref (bind_saddr);
+ g_socket_set_multicast_loopback (src->used_socket, src->loop);
+ } else {
+ GInetSocketAddress *local_addr;
+
+ GST_DEBUG_OBJECT (src, "using provided socket %p", src->socket);
+ /* we use the configured socket, try to get some info about it */
+ src->used_socket = G_SOCKET (g_object_ref (src->socket));
+ src->external_socket = TRUE;
+
+ local_addr =
+ G_INET_SOCKET_ADDRESS (g_socket_get_local_address (src->used_socket,
+ &err));
+ if (!local_addr)
+ goto getsockname_error;
+
+ addr = gst_udpsrc_resolve (src, src->address);
+ if (!addr)
+ goto name_resolve;
+
+ /* If bound to ANY and address points to a multicast address, make
+ * sure that address is not overridden with ANY but we have the
+ * opportunity later to join the multicast address. This ensures that we
+ * have the same behaviour as for sockets created by udpsrc */
+ if (!src->auto_multicast ||
+ !g_inet_address_get_is_any (g_inet_socket_address_get_address
+ (local_addr))
+ || !g_inet_address_get_is_multicast (addr)) {
+ g_object_unref (addr);
+ if (src->addr)
+ g_object_unref (src->addr);
+ src->addr = local_addr;
+ } else {
+ g_object_unref (local_addr);
+ if (src->addr)
+ g_object_unref (src->addr);
+ src->addr =
+ G_INET_SOCKET_ADDRESS (g_inet_socket_address_new (addr, src->port));
+ g_object_unref (addr);
+ }
+ }
+
+ {
+ gint val;
+ GError *opt_err = NULL;
+ gboolean force_rcvbuf G_GNUC_UNUSED = FALSE;
+
+ if (src->buffer_size != 0) {
+ GST_INFO_OBJECT (src, "setting udp buffer of %d bytes", src->buffer_size);
+ /* set buffer size, Note that on Linux this is typically limited to a
+ * maximum of around 100K. Also a minimum of 128 bytes is required on
+ * Linux. */
+ if (!g_socket_set_option (src->used_socket, SOL_SOCKET, SO_RCVBUF,
+ src->buffer_size, &opt_err)) {
+ GST_INFO_OBJECT (src,
+ "Could not create a buffer of requested %d bytes (%s) try forcing",
+ src->buffer_size, opt_err->message);
+ g_clear_error (&opt_err);
+ force_rcvbuf = TRUE;
+ }
+ }
+#if defined(SO_RCVBUFFORCE)
+ val = gst_udpsrc_get_rcvbuf (src);
+ if (val < src->buffer_size)
+ force_rcvbuf = TRUE;
+
+ if (force_rcvbuf) {
+ GST_INFO_OBJECT (src,
+ "forcibly setting udp buffer of %d bytes", src->buffer_size);
+
+ /* Will only work with CAP_NET_ADMIN privilege */
+ if (!g_socket_set_option (src->used_socket, SOL_SOCKET, SO_RCVBUFFORCE,
+ src->buffer_size, &opt_err)) {
+ GST_ELEMENT_WARNING (src, RESOURCE, SETTINGS, (NULL),
+ ("Could not create a buffer of requested %d bytes (%s). Need net.admin privilege?",
+ src->buffer_size, opt_err->message));
+ g_clear_error (&opt_err);
+ }
+ }
+#endif
+
+ val = gst_udpsrc_get_rcvbuf (src);
+ if (val < src->buffer_size)
+ GST_WARNING_OBJECT (src,
+ "have udp buffer of %d bytes while %d were requested",
+ val, src->buffer_size);
+ else
+ GST_INFO_OBJECT (src, "have udp buffer of %d bytes", val);
+ }
+
+ g_socket_set_broadcast (src->used_socket, TRUE);
+
+ if (src->auto_multicast
+ &&
+ g_inet_address_get_is_multicast (g_inet_socket_address_get_address
+ (src->addr))) {
+
+ if (src->multi_iface) {
+ GStrv multi_ifaces = g_strsplit (src->multi_iface, ",", -1);
+ gchar **ifaces = multi_ifaces;
+ while (*ifaces) {
+ g_strstrip (*ifaces);
+ GST_DEBUG_OBJECT (src, "joining multicast group %s interface %s",
+ src->address, *ifaces);
+ if (!g_socket_join_multicast_group (src->used_socket,
+ g_inet_socket_address_get_address (src->addr),
+ FALSE, *ifaces, &err)) {
+ g_strfreev (multi_ifaces);
+ goto membership;
+ }
+
+ ifaces++;
+ }
+ g_strfreev (multi_ifaces);
+ } else {
+ GST_DEBUG_OBJECT (src, "joining multicast group %s", src->address);
+ if (!g_socket_join_multicast_group (src->used_socket,
+ g_inet_socket_address_get_address (src->addr), FALSE, NULL, &err))
+ goto membership;
+ }
+
+ if (g_inet_address_get_family (g_inet_socket_address_get_address
+ (src->addr)) == G_SOCKET_FAMILY_IPV4) {
+#if defined(IP_MULTICAST_ALL)
+ if (!g_socket_set_option (src->used_socket, IPPROTO_IP, IP_MULTICAST_ALL,
+ 0, &err)) {
+ GST_WARNING_OBJECT (src, "Failed to disable IP_MULTICAST_ALL: %s",
+ err->message);
+ g_clear_error (&err);
+ }
+#elif defined(IP_PKTINFO)
+ if (!g_socket_set_option (src->used_socket, IPPROTO_IP, IP_PKTINFO, TRUE,
+ &err)) {
+ GST_WARNING_OBJECT (src, "Failed to enable IP_PKTINFO: %s",
+ err->message);
+ g_clear_error (&err);
+ }
+#elif defined(IP_RECVDSTADDR)
+ if (!g_socket_set_option (src->used_socket, IPPROTO_IP, IP_RECVDSTADDR,
+ TRUE, &err)) {
+ GST_WARNING_OBJECT (src, "Failed to enable IP_RECVDSTADDR: %s",
+ err->message);
+ g_clear_error (&err);
+ }
+#else
+#pragma message("No API available for getting IPv4 destination address")
+ GST_WARNING_OBJECT (src, "No API available for getting IPv4 destination "
+ "address, will receive packets for every destination to our port");
+#endif
+ } else
+ if (g_inet_address_get_family (g_inet_socket_address_get_address
+ (src->addr)) == G_SOCKET_FAMILY_IPV6) {
+#ifdef IPV6_PKTINFO
+#ifdef IPV6_RECVPKTINFO
+ if (!g_socket_set_option (src->used_socket, IPPROTO_IPV6,
+ IPV6_RECVPKTINFO, TRUE, &err)) {
+#else
+ if (!g_socket_set_option (src->used_socket, IPPROTO_IPV6, IPV6_PKTINFO,
+ TRUE, &err)) {
+#endif
+ GST_WARNING_OBJECT (src, "Failed to enable IPV6_PKTINFO: %s",
+ err->message);
+ g_clear_error (&err);
+ }
+#else
+#pragma message("No API available for getting IPv6 destination address")
+ GST_WARNING_OBJECT (src, "No API available for getting IPv6 destination "
+ "address, will receive packets for every destination to our port");
+#endif
+ }
+ }
+
+ if (src->socket_timestamp_mode == GST_SOCKET_TIMESTAMP_MODE_REALTIME) {
+#ifdef SO_TIMESTAMPNS
+ if (!g_socket_set_option (src->used_socket, SOL_SOCKET, SO_TIMESTAMPNS,
+ TRUE, &err)) {
+ GST_WARNING_OBJECT (src,
+ "Failed to enable socket control message timestamps: %s",
+ err->message);
+ g_clear_error (&err);
+ src->socket_timestamp_mode = GST_SOCKET_TIMESTAMP_MODE_DISABLED;
+ g_object_notify (G_OBJECT (src), "socket-timestamp");
+ } else {
+ GST_LOG_OBJECT (src, "Socket control message timestamps enabled");
+ }
+ }
+#else
+ GST_WARNING_OBJECT (src,
+ "socket-timestamp was requested but SO_TIMESTAMPNS is not defined");
+ }
+#endif
+
+ /* NOTE: sockaddr_in.sin_port works for ipv4 and ipv6 because sin_port
+ * follows ss_family on both */
+ {
+ GInetSocketAddress *addr;
+ guint16 port;
+
+ addr =
+ G_INET_SOCKET_ADDRESS (g_socket_get_local_address (src->used_socket,
+ &err));
+ if (!addr)
+ goto getsockname_error;
+
+ port = g_inet_socket_address_get_port (addr);
+ GST_DEBUG_OBJECT (src, "bound, on port %d", port);
+ if (port != src->port) {
+ src->port = port;
+ GST_DEBUG_OBJECT (src, "notifying port %d", port);
+ g_object_notify (G_OBJECT (src), "port");
+ }
+ g_object_unref (addr);
+ }
+
+ return TRUE;
+
+ /* ERRORS */
+name_resolve:
+ {
+ return FALSE;
+ }
+no_socket:
+ {
+ GST_ELEMENT_ERROR (src, RESOURCE, OPEN_READ, (NULL),
+ ("no socket error: %s", err->message));
+ g_clear_error (&err);
+ g_object_unref (addr);
+ return FALSE;
+ }
+bind_error:
+ {
+ GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
+ ("bind failed: %s", err->message));
+ g_clear_error (&err);
+ g_object_unref (bind_saddr);
+ gst_udpsrc_close (src);
+ return FALSE;
+ }
+membership:
+ {
+ GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
+ ("could not add membership: %s", err->message));
+ g_clear_error (&err);
+ gst_udpsrc_close (src);
+ return FALSE;
+ }
+getsockname_error:
+ {
+ GST_ELEMENT_ERROR (src, RESOURCE, SETTINGS, (NULL),
+ ("getsockname failed: %s", err->message));
+ g_clear_error (&err);
+ gst_udpsrc_close (src);
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_udpsrc_unlock (GstBaseSrc * bsrc)
+{
+ GstUDPSrc *src;
+
+ src = GST_UDPSRC (bsrc);
+
+ GST_LOG_OBJECT (src, "Flushing");
+ g_cancellable_cancel (src->cancellable);
+
+ return TRUE;
+}
+
+static gboolean
+gst_udpsrc_unlock_stop (GstBaseSrc * bsrc)
+{
+ GstUDPSrc *src;
+
+ src = GST_UDPSRC (bsrc);
+
+ GST_LOG_OBJECT (src, "No longer flushing");
+
+ gst_udpsrc_free_cancellable (src);
+ gst_udpsrc_create_cancellable (src);
+
+ return TRUE;
+}
+
+static gboolean
+gst_udpsrc_close (GstUDPSrc * src)
+{
+ GST_DEBUG ("closing sockets");
+
+ if (src->used_socket) {
+ if (src->auto_multicast
+ &&
+ g_inet_address_get_is_multicast (g_inet_socket_address_get_address
+ (src->addr))) {
+ GError *err = NULL;
+
+ if (src->multi_iface) {
+ GStrv multi_ifaces = g_strsplit (src->multi_iface, ",", -1);
+ gchar **ifaces = multi_ifaces;
+ while (*ifaces) {
+ g_strstrip (*ifaces);
+ GST_DEBUG_OBJECT (src, "leaving multicast group %s interface %s",
+ src->address, *ifaces);
+ if (!g_socket_leave_multicast_group (src->used_socket,
+ g_inet_socket_address_get_address (src->addr),
+ FALSE, *ifaces, &err)) {
+ GST_ERROR_OBJECT (src, "Failed to leave multicast group: %s",
+ err->message);
+ g_clear_error (&err);
+ }
+ ifaces++;
+ }
+ g_strfreev (multi_ifaces);
+
+ } else {
+ GST_DEBUG_OBJECT (src, "leaving multicast group %s", src->address);
+ if (!g_socket_leave_multicast_group (src->used_socket,
+ g_inet_socket_address_get_address (src->addr), FALSE,
+ NULL, &err)) {
+ GST_ERROR_OBJECT (src, "Failed to leave multicast group: %s",
+ err->message);
+ g_clear_error (&err);
+ }
+ }
+ }
+
+ if (src->close_socket || !src->external_socket) {
+ GError *err = NULL;
+ if (!g_socket_close (src->used_socket, &err)) {
+ GST_ERROR_OBJECT (src, "Failed to close socket: %s", err->message);
+ g_clear_error (&err);
+ }
+ }
+
+ g_object_unref (src->used_socket);
+ src->used_socket = NULL;
+ g_object_unref (src->addr);
+ src->addr = NULL;
+ }
+
+ gst_udpsrc_free_cancellable (src);
+
+ return TRUE;
+}
+
+
+static GstStateChangeReturn
+gst_udpsrc_change_state (GstElement * element, GstStateChange transition)
+{
+ GstUDPSrc *src;
+ GstStateChangeReturn result;
+
+ src = GST_UDPSRC (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ if (!gst_udpsrc_open (src))
+ goto open_failed;
+ break;
+ default:
+ break;
+ }
+ if ((result =
+ GST_ELEMENT_CLASS (parent_class)->change_state (element,
+ transition)) == GST_STATE_CHANGE_FAILURE)
+ goto failure;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ gst_udpsrc_close (src);
+ break;
+ default:
+ break;
+ }
+ return result;
+ /* ERRORS */
+open_failed:
+ {
+ GST_DEBUG_OBJECT (src, "failed to open socket");
+ return GST_STATE_CHANGE_FAILURE;
+ }
+failure:
+ {
+ GST_DEBUG_OBJECT (src, "parent failed state change");
+ return result;
+ }
+}
+
+
+
+
+/*** GSTURIHANDLER INTERFACE *************************************************/
+
+static GstURIType
+gst_udpsrc_uri_get_type (GType type)
+{
+ return GST_URI_SRC;
+}
+
+static const gchar *const *
+gst_udpsrc_uri_get_protocols (GType type)
+{
+ static const gchar *protocols[] = { "udp", NULL };
+
+ return protocols;
+}
+
+static gchar *
+gst_udpsrc_uri_get_uri (GstURIHandler * handler)
+{
+ GstUDPSrc *src = GST_UDPSRC (handler);
+
+ return g_strdup (src->uri);
+}
+
+static gboolean
+gst_udpsrc_uri_set_uri (GstURIHandler * handler, const gchar * uri,
+ GError ** error)
+{
+ return gst_udpsrc_set_uri (GST_UDPSRC (handler), uri, error);
+}
+
+static void
+gst_udpsrc_uri_handler_init (gpointer g_iface, gpointer iface_data)
+{
+ GstURIHandlerInterface *iface = (GstURIHandlerInterface *) g_iface;
+
+ iface->get_type = gst_udpsrc_uri_get_type;
+ iface->get_protocols = gst_udpsrc_uri_get_protocols;
+ iface->get_uri = gst_udpsrc_uri_get_uri;
+ iface->set_uri = gst_udpsrc_uri_set_uri;
+}
diff --git a/gst/udp/gstudpsrc.h b/gst/udp/gstudpsrc.h
new file mode 100644
index 0000000000..1f7552481b
--- /dev/null
+++ b/gst/udp/gstudpsrc.h
@@ -0,0 +1,111 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_UDPSRC_H__
+#define __GST_UDPSRC_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstpushsrc.h>
+#include <gio/gio.h>
+
+G_BEGIN_DECLS
+
+#include "gstudpnetutils.h"
+
+#define GST_TYPE_UDPSRC \
+ (gst_udpsrc_get_type())
+#define GST_UDPSRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_UDPSRC,GstUDPSrc))
+#define GST_UDPSRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_UDPSRC,GstUDPSrcClass))
+#define GST_IS_UDPSRC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_UDPSRC))
+#define GST_IS_UDPSRC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_UDPSRC))
+#define GST_UDPSRC_CAST(obj) ((GstUDPSrc *)(obj))
+
+typedef struct _GstUDPSrc GstUDPSrc;
+typedef struct _GstUDPSrcClass GstUDPSrcClass;
+
+
+/**
+ * GstSocketTimestampMode:
+ * @GST_SOCKET_TIMESTAMP_MODE_DISABLED: Disable additional timestamps
+ * @GST_SOCKET_TIMESTAMP_MODE_REALTIME: Timestamp with realtime clock (nsec
+ * resolution, may not be monotonic)
+ *
+ * Since: 1.20
+ */
+typedef enum
+{
+ GST_SOCKET_TIMESTAMP_MODE_DISABLED = 0,
+ GST_SOCKET_TIMESTAMP_MODE_REALTIME
+} GstSocketTimestampMode;
+
+struct _GstUDPSrc {
+ GstPushSrc parent;
+
+ /* our sockets */
+ GSocket *used_socket; /* hot */
+ GInetSocketAddress *addr; /* hot */
+
+ GCancellable *cancellable; /* hot */
+
+ /* properties */
+ gint skip_first_bytes; /* hot */
+ guint64 timeout; /* hot */
+ gboolean retrieve_sender_address; /* hot */
+ gchar *address;
+ gint port;
+ gchar *multi_iface;
+ GstCaps *caps;
+ gint buffer_size;
+ GSocket *socket;
+ gboolean close_socket;
+ gboolean auto_multicast;
+ gboolean reuse;
+ gboolean loop;
+ GstSocketTimestampMode socket_timestamp_mode;
+
+ /* stats */
+ guint max_size;
+
+ gboolean external_socket;
+ gboolean made_cancel_fd;
+
+ /* Initial size of buffers in the buffer pool */
+ guint mtu;
+
+ /* Extra memory for buffers with a size superior to max_packet_size */
+ GstMemory *extra_mem;
+
+ gchar *uri;
+};
+
+struct _GstUDPSrcClass {
+ GstPushSrcClass parent_class;
+};
+
+GType gst_udpsrc_get_type(void);
+
+G_END_DECLS
+
+
+#endif /* __GST_UDPSRC_H__ */
diff --git a/gst/udp/meson.build b/gst/udp/meson.build
new file mode 100644
index 0000000000..26ee7a5111
--- /dev/null
+++ b/gst/udp/meson.build
@@ -0,0 +1,20 @@
+udp_sources = [
+ 'gstudp.c',
+ 'gstudpelement.c',
+ 'gstudpsrc.c',
+ 'gstudpsink.c',
+ 'gstmultiudpsink.c',
+ 'gstdynudpsink.c',
+ 'gstudpnetutils.c'
+]
+
+gstudp = library('gstudp',
+ udp_sources,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc, libsinc],
+ dependencies : [gst_dep, gstbase_dep, gstnet_dep, gio_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstudp, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstudp]
diff --git a/gst/videobox/README b/gst/videobox/README
new file mode 100644
index 0000000000..e71da5903a
--- /dev/null
+++ b/gst/videobox/README
@@ -0,0 +1,21 @@
+Videobox
+--------
+
+This plugin crops or enlarges the image. It takes 4 values as input, a
+top, bottom, left and right offset. Positive values will crop that much
+pixels from the respective border of the image, negative values will add
+that much pixels. When pixels are added, you can specify their color.
+Some predefined colors are usable with an enum property.
+
+The plugin is alpha channel aware and will try to negotiate with a format
+that supports alpha channels first. When alpha channel is active two
+other properties, alpha and border_alpha can be used to set the alpha
+values of the inner picture and the border respectively. an alpha value of
+0.0 means total transparency, 1.0 is opaque.
+
+The videobox plugin has many uses such as doing a mosaic of pictures,
+letterboxing video, cutting out pieces of video, picture in picture, etc..
+
+TODO
+
+- add enum to specify common aspect ratios/sizes and add borders/crop
diff --git a/gst/videobox/gstvideobox.c b/gst/videobox/gstvideobox.c
new file mode 100644
index 0000000000..0fa7655ca0
--- /dev/null
+++ b/gst/videobox/gstvideobox.c
@@ -0,0 +1,3349 @@
+/* GStreamer
+ * Copyright (C) 1999 Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) 2006 Tim-Philipp Müller <tim centricular net>
+ * Copyright (C) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-videobox
+ * @title: videobox
+ * @see_also: #GstVideoCrop
+ *
+ * This plugin crops or enlarges the image. It takes 4 values as input, a
+ * top, bottom, left and right offset. Positive values will crop that much
+ * pixels from the respective border of the image, negative values will add
+ * that much pixels. When pixels are added, you can specify their color.
+ * Some predefined colors are usable with an enum property.
+ *
+ * The plugin is alpha channel aware and will try to negotiate with a format
+ * that supports alpha channels first. When alpha channel is active two
+ * other properties, alpha and border_alpha can be used to set the alpha
+ * values of the inner picture and the border respectively. an alpha value of
+ * 0.0 means total transparency, 1.0 is opaque.
+ *
+ * The videobox plugin has many uses such as doing a mosaic of pictures,
+ * letterboxing video, cutting out pieces of video, picture in picture, etc..
+ *
+ * Setting autocrop to true changes the behavior of the plugin so that
+ * caps determine crop properties rather than the other way around: given
+ * input and output dimensions, the crop values are selected so that the
+ * smaller frame is effectively centered in the larger frame. This
+ * involves either cropping or padding.
+ *
+ * If you use autocrop there is little point in setting the other
+ * properties manually because they will be overridden if the caps change,
+ * but nothing stops you from doing so.
+ *
+ * Sample pipeline:
+ * |[
+ * gst-launch-1.0 videotestsrc ! videobox autocrop=true ! \
+ * "video/x-raw, width=600, height=400" ! videoconvert ! ximagesink
+ * ]|
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstvideobox.h"
+#include "gstvideoboxorc.h"
+
+#include <math.h>
+#include <string.h>
+
+GST_DEBUG_CATEGORY_STATIC (videobox_debug);
+#define GST_CAT_DEFAULT videobox_debug
+
+/* From videotestsrc.c */
+static const guint8 yuv_sdtv_colors_Y[VIDEO_BOX_FILL_LAST] =
+ { 16, 145, 41, 81, 210, 235 };
+static const guint8 yuv_sdtv_colors_U[VIDEO_BOX_FILL_LAST] =
+ { 128, 54, 240, 90, 16, 128 };
+static const guint8 yuv_sdtv_colors_V[VIDEO_BOX_FILL_LAST] =
+ { 128, 34, 110, 240, 146, 128 };
+
+static const guint8 yuv_hdtv_colors_Y[VIDEO_BOX_FILL_LAST] =
+ { 16, 173, 32, 63, 219, 235 };
+static const guint8 yuv_hdtv_colors_U[VIDEO_BOX_FILL_LAST] =
+ { 128, 42, 240, 102, 16, 128 };
+static const guint8 yuv_hdtv_colors_V[VIDEO_BOX_FILL_LAST] =
+ { 128, 26, 118, 240, 138, 128 };
+
+static const guint8 rgb_colors_R[VIDEO_BOX_FILL_LAST] =
+ { 0, 0, 0, 255, 255, 255 };
+static const guint8 rgb_colors_G[VIDEO_BOX_FILL_LAST] =
+ { 0, 255, 0, 0, 255, 255 };
+static const guint8 rgb_colors_B[VIDEO_BOX_FILL_LAST] =
+ { 0, 0, 255, 0, 0, 255 };
+
+/* Generated by -bad/ext/cog/generate_tables */
+static const int cog_ycbcr_to_rgb_matrix_8bit_hdtv[] = {
+ 298, 0, 459, -63514,
+ 298, -55, -136, 19681,
+ 298, 541, 0, -73988,
+};
+
+static const int cog_ycbcr_to_rgb_matrix_8bit_sdtv[] = {
+ 298, 0, 409, -57068,
+ 298, -100, -208, 34707,
+ 298, 516, 0, -70870,
+};
+
+static const gint cog_rgb_to_ycbcr_matrix_8bit_hdtv[] = {
+ 47, 157, 16, 4096,
+ -26, -87, 112, 32768,
+ 112, -102, -10, 32768,
+};
+
+static const gint cog_rgb_to_ycbcr_matrix_8bit_sdtv[] = {
+ 66, 129, 25, 4096,
+ -38, -74, 112, 32768,
+ 112, -94, -18, 32768,
+};
+
+static const gint cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit[] = {
+ 256, -30, -53, 10600,
+ 0, 261, 29, -4367,
+ 0, 19, 262, -3289,
+};
+
+static const gint cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit[] = {
+ 256, 25, 49, -9536,
+ 0, 253, -28, 3958,
+ 0, -19, 252, 2918,
+};
+
+static const gint cog_identity_matrix_8bit[] = {
+ 256, 0, 0, 0,
+ 0, 256, 0, 0,
+ 0, 0, 256, 0,
+};
+
+#define APPLY_MATRIX(m,o,v1,v2,v3) ((m[o*4] * v1 + m[o*4+1] * v2 + m[o*4+2] * v3 + m[o*4+3]) >> 8)
+
+static void
+fill_ayuv (GstVideoBoxFill fill_type, guint b_alpha,
+ GstVideoFrame * frame, gboolean sdtv)
+{
+ guint32 empty_pixel;
+ guint8 *dest;
+ gint width, height;
+ gint stride;
+
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
+ b_alpha = MIN (b_alpha, 255);
+
+ if (sdtv)
+ empty_pixel = GUINT32_FROM_BE ((b_alpha << 24) |
+ (yuv_sdtv_colors_Y[fill_type] << 16) |
+ (yuv_sdtv_colors_U[fill_type] << 8) | yuv_sdtv_colors_V[fill_type]);
+ else
+ empty_pixel = GUINT32_FROM_BE ((b_alpha << 24) |
+ (yuv_hdtv_colors_Y[fill_type] << 16) |
+ (yuv_hdtv_colors_U[fill_type] << 8) | yuv_hdtv_colors_V[fill_type]);
+
+ dest = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+
+ if (G_LIKELY (stride == 4 * width))
+ video_box_orc_splat_u32 ((guint32 *) dest, empty_pixel, width * height);
+ else if (height) {
+ for (; height; --height) {
+ video_box_orc_splat_u32 ((guint32 *) dest, empty_pixel, width);
+ dest += stride;
+ }
+ }
+}
+
+static void
+copy_ayuv_ayuv (guint i_alpha, GstVideoFrame * dest_frame,
+ gboolean dest_sdtv, gint dest_x, gint dest_y, GstVideoFrame * src_frame,
+ gboolean src_sdtv, gint src_x, gint src_y, gint w, gint h)
+{
+ gint i, j;
+ gint src_stride;
+ gint dest_stride;
+ guint8 *dest, *src;
+
+ src_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src_frame, 0);
+ dest_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest_frame, 0);
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (src_frame, 0);
+ dest = GST_VIDEO_FRAME_PLANE_DATA (dest_frame, 0);
+
+ dest = dest + dest_y * dest_stride + dest_x * 4;
+ src = src + src_y * src_stride + src_x * 4;
+
+ w *= 4;
+
+ if (dest_sdtv != src_sdtv) {
+ gint matrix[12];
+ gint y, u, v;
+
+ memcpy (matrix,
+ dest_sdtv ? cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit :
+ cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit, 12 * sizeof (gint));
+
+ for (i = 0; i < h; i++) {
+ for (j = 0; j < w; j += 4) {
+ /* ORC FIXME */
+ dest[j] = (src[j] * i_alpha) >> 8;
+ y = src[j + 1];
+ u = src[j + 2];
+ v = src[j + 3];
+ dest[j + 1] = APPLY_MATRIX (matrix, 0, y, u, v);
+ dest[j + 2] = APPLY_MATRIX (matrix, 1, y, u, v);
+ dest[j + 3] = APPLY_MATRIX (matrix, 2, y, u, v);
+ }
+ dest += dest_stride;
+ src += src_stride;
+ }
+ } else {
+ for (i = 0; i < h; i++) {
+ for (j = 0; j < w; j += 4) {
+ /* ORC FIXME */
+ dest[j] = (src[j] * i_alpha) >> 8;
+ dest[j + 1] = src[j + 1];
+ dest[j + 2] = src[j + 2];
+ dest[j + 3] = src[j + 3];
+ }
+ dest += dest_stride;
+ src += src_stride;
+ }
+ }
+}
+
+static void
+copy_ayuv_i420 (guint i_alpha, GstVideoFrame * dest_frame,
+ gboolean dest_sdtv, gint dest_x, gint dest_y, GstVideoFrame * src_frame,
+ gboolean src_sdtv, gint src_x, gint src_y, gint w, gint h)
+{
+ gint i, j;
+ guint8 *destY, *destY2, *destU, *destV;
+ gint dest_strideY, dest_strideU, dest_strideV;
+ const guint8 *src2;
+ gint src_stride;
+ gint y_idx, uv_idx;
+ gint y1, y2, y3, y4;
+ gint u1, u2, u3, u4;
+ gint v1, v2, v3, v4;
+ gint matrix[12];
+ guint8 *src;
+ gint dest_height, src_height, dest_width;
+
+ dest_height = GST_VIDEO_FRAME_HEIGHT (dest_frame);
+ dest_width = GST_VIDEO_FRAME_WIDTH (dest_frame);
+ src_height = GST_VIDEO_FRAME_HEIGHT (src_frame);
+
+ dest_strideY = GST_VIDEO_FRAME_COMP_STRIDE (dest_frame, 0);
+ dest_strideU = GST_VIDEO_FRAME_COMP_STRIDE (dest_frame, 1);
+ dest_strideV = GST_VIDEO_FRAME_COMP_STRIDE (dest_frame, 2);
+
+ src_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src_frame, 0);
+
+ destY = GST_VIDEO_FRAME_COMP_DATA (dest_frame, 0);
+ destU = GST_VIDEO_FRAME_COMP_DATA (dest_frame, 1);
+ destV = GST_VIDEO_FRAME_COMP_DATA (dest_frame, 2);
+
+ destY = destY + dest_y * dest_strideY + dest_x;
+ destY2 = (dest_y < dest_height) ? destY + dest_strideY : destY;
+ destU = destU + (dest_y / 2) * dest_strideU + dest_x / 2;
+ destV = destV + (dest_y / 2) * dest_strideV + dest_x / 2;
+
+ src = GST_VIDEO_FRAME_PLANE_DATA (src_frame, 0);
+ src = src + src_y * src_stride + src_x * 4;
+ src2 = (src_y < src_height) ? src + src_stride : src;
+
+ h = dest_y + h;
+ w = dest_x + w;
+
+ if (src_sdtv != dest_sdtv)
+ memcpy (matrix,
+ dest_sdtv ? cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit :
+ cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit, 12 * sizeof (gint));
+ else
+ memcpy (matrix, cog_identity_matrix_8bit, 12 * sizeof (gint));
+
+ /* 1. Handle the first destination scanline specially if it
+ * doesn't start at the macro pixel boundary, i.e. blend
+ * with the background! */
+ if (dest_y % 2 == 1) {
+ /* 1.1. Handle the first destination pixel if it doesn't
+ * start at the macro pixel boundary, i.e. blend with
+ * the background! */
+ if (dest_x % 2 == 1) {
+ y1 = src[4 * 0 + 1];
+ u1 = src[4 * 0 + 2];
+ v1 = src[4 * 0 + 3];
+
+ destY[0] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[0] =
+ CLAMP ((3 * destU[0] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 4, 0,
+ 255);
+ destV[0] =
+ CLAMP ((3 * destV[0] + APPLY_MATRIX (matrix, 2, y1, u1, v1)) / 4, 0,
+ 255);
+
+ j = dest_x + 1;
+ y_idx = uv_idx = 1;
+ } else {
+ j = dest_x;
+ y_idx = uv_idx = 0;
+ }
+
+ /* 1.2. Copy all macro pixels from the source to the destination
+ * but blend with the background because we're only filling
+ * the lower part of the macro pixels. */
+ for (; j < w - 1; j += 2) {
+ y1 = src[4 * y_idx + 1];
+ y2 = src[4 * y_idx + 4 + 1];
+
+ u1 = src[4 * y_idx + 2];
+ u2 = src[4 * y_idx + 4 + 2];
+
+ v1 = src[4 * y_idx + 3];
+ v2 = src[4 * y_idx + 4 + 3];
+
+ destY[y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY[y_idx + 1] = CLAMP (APPLY_MATRIX (matrix, 0, y2, u2, v2), 0, 255);
+ destU[uv_idx] = CLAMP (
+ (2 * destU[uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 1, y2, u2, v2)) / 4, 0, 255);
+ destV[uv_idx] = CLAMP (
+ (2 * destV[uv_idx] + APPLY_MATRIX (matrix, 2, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 2, y2, u2, v2)) / 4, 0, 255);
+
+ y_idx += 2;
+ uv_idx++;
+ }
+
+ /* 1.3. Now copy the last pixel if one exists and blend it
+ * with the background because we only fill part of
+ * the macro pixel. In case this is the last pixel of
+ * the destination we will a larger part. */
+ if (j == w - 1 && j == dest_width - 1) {
+ y1 = src[4 * y_idx + 1];
+ u1 = src[4 * y_idx + 2];
+ v1 = src[4 * y_idx + 3];
+
+ destY[y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[uv_idx] = CLAMP (
+ (destU[uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 2, 0, 255);
+ destV[uv_idx] = CLAMP (
+ (destV[uv_idx] + APPLY_MATRIX (matrix, 2, y1, u1, v1)) / 2, 0, 255);
+ } else if (j == w - 1) {
+ y1 = src[4 * y_idx + 1];
+ u1 = src[4 * y_idx + 2];
+ v1 = src[4 * y_idx + 3];
+
+ destY[y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[uv_idx] = CLAMP (
+ (3 * destU[uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 4, 0,
+ 255);
+ destV[uv_idx] =
+ CLAMP ((3 * destV[uv_idx] + APPLY_MATRIX (matrix, 2, y1, u1, v1)) / 4,
+ 0, 255);
+ }
+
+ destY += dest_strideY;
+ destY2 += dest_strideY;
+ destU += dest_strideU;
+ destV += dest_strideV;
+ src += src_stride;
+ src2 += src_stride;
+ i = dest_y + 1;
+ } else {
+ i = dest_y;
+ }
+
+ /* 2. Copy all macro pixel scanlines, the destination scanline
+ * now starts at macro pixel boundary. */
+ for (; i < h - 1; i += 2) {
+ /* 2.1. Handle the first destination pixel if it doesn't
+ * start at the macro pixel boundary, i.e. blend with
+ * the background! */
+ if (dest_x % 2 == 1) {
+ y1 = src[4 * 0 + 1];
+ y2 = src2[4 * 0 + 1];
+ u1 = src[4 * 0 + 2];
+ u2 = src2[4 * 0 + 2];
+ v1 = src[4 * 0 + 3];
+ v2 = src2[4 * 0 + 3];
+
+ destY[0] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY2[0] = CLAMP (APPLY_MATRIX (matrix, 0, y2, u2, v2), 0, 255);
+ destU[0] = CLAMP (
+ (2 * destU[0] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 1, y2, u2, v2)) / 4, 0, 255);
+ destV[0] = CLAMP (
+ (2 * destV[0] + APPLY_MATRIX (matrix, 2, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 2, y2, u2, v2)) / 4, 0, 255);
+ j = dest_x + 1;
+ y_idx = uv_idx = 1;
+ } else {
+ j = dest_x;
+ y_idx = uv_idx = 0;
+ }
+
+ /* 2.2. Copy all macro pixels from the source to the destination.
+ * All pixels now start at macro pixel boundary, i.e. no
+ * blending with the background is necessary. */
+ for (; j < w - 1; j += 2) {
+ y1 = src[4 * y_idx + 1];
+ y2 = src[4 * y_idx + 4 + 1];
+ y3 = src2[4 * y_idx + 1];
+ y4 = src2[4 * y_idx + 4 + 1];
+
+ u1 = src[4 * y_idx + 2];
+ u2 = src[4 * y_idx + 4 + 2];
+ u3 = src2[4 * y_idx + 2];
+ u4 = src2[4 * y_idx + 4 + 2];
+
+ v1 = src[4 * y_idx + 3];
+ v2 = src[4 * y_idx + 4 + 3];
+ v3 = src2[4 * y_idx + 3];
+ v4 = src2[4 * y_idx + 4 + 3];
+
+ destY[y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY[y_idx + 1] = CLAMP (APPLY_MATRIX (matrix, 0, y2, u2, v2), 0, 255);
+ destY2[y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y3, u3, v3), 0, 255);
+ destY2[y_idx + 1] = CLAMP (APPLY_MATRIX (matrix, 0, y4, u4, v4), 0, 255);
+
+ destU[uv_idx] = CLAMP (
+ (APPLY_MATRIX (matrix, 1, y1, u1, v1) + APPLY_MATRIX (matrix, 1, y2,
+ u2, v2) + APPLY_MATRIX (matrix, 1, y3, u3,
+ v3) + APPLY_MATRIX (matrix, 1, y4, u4, v4)) / 4, 0, 255);
+ destV[uv_idx] = CLAMP (
+ (APPLY_MATRIX (matrix, 2, y1, u1, v1) + APPLY_MATRIX (matrix, 2, y2,
+ u2, v2) + APPLY_MATRIX (matrix, 2, y3, u3,
+ v3) + APPLY_MATRIX (matrix, 2, y4, u4, v4)) / 4, 0, 255);
+
+ y_idx += 2;
+ uv_idx++;
+ }
+
+ /* 2.3. Now copy the last pixel if one exists and blend it
+ * with the background because we only fill part of
+ * the macro pixel. In case this is the last pixel of
+ * the destination we will a larger part. */
+ if (j == w - 1 && j == dest_width - 1) {
+ y1 = src[4 * y_idx + 1];
+ y2 = src2[4 * y_idx + 1];
+
+ u1 = src[4 * y_idx + 2];
+ u2 = src2[4 * y_idx + 2];
+
+ v1 = src[4 * y_idx + 3];
+ v2 = src2[4 * y_idx + 3];
+
+ destY[y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY2[y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y2, u2, v2), 0, 255);
+ destU[uv_idx] = CLAMP (
+ (APPLY_MATRIX (matrix, 1, y1, u1, v1) + APPLY_MATRIX (matrix, 2, y2,
+ u2, v2)) / 2, 0, 255);
+ destV[uv_idx] = CLAMP (
+ (APPLY_MATRIX (matrix, 1, y1, u1, v1) + APPLY_MATRIX (matrix, 2, y2,
+ u2, v2)) / 2, 0, 255);
+ } else if (j == w - 1) {
+ y1 = src[4 * y_idx + 1];
+ y2 = src2[4 * y_idx + 1];
+
+ u1 = src[4 * y_idx + 2];
+ u2 = src2[4 * y_idx + 2];
+
+ v1 = src[4 * y_idx + 3];
+ v2 = src2[4 * y_idx + 3];
+
+ destY[y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY2[y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y2, u2, v2), 0, 255);
+ destU[uv_idx] = CLAMP (
+ (2 * destU[uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 2, y2, u2, v2)) / 4, 0, 255);
+ destV[uv_idx] = CLAMP (
+ (2 * destV[uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 2, y2, u2, v2)) / 4, 0, 255);
+ }
+
+ destY += 2 * dest_strideY;
+ destY2 += 2 * dest_strideY;
+ destU += dest_strideU;
+ destV += dest_strideV;
+ src += 2 * src_stride;
+ src2 += 2 * src_stride;
+ }
+
+ /* 3. Handle the last scanline if one exists. This again
+ * doesn't start at macro pixel boundary but should
+ * only fill the upper part of the macro pixels. */
+ if (i == h - 1 && i == dest_height - 1) {
+ /* 3.1. Handle the first destination pixel if it doesn't
+ * start at the macro pixel boundary, i.e. blend with
+ * the background! */
+ if (dest_x % 2 == 1) {
+ y1 = src[4 * 0 + 1];
+ u1 = src[4 * 0 + 2];
+ v1 = src[4 * 0 + 3];
+
+ destY[0] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[0] =
+ CLAMP ((destU[0] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 2, 0, 255);
+ destV[0] =
+ CLAMP ((destV[0] + APPLY_MATRIX (matrix, 2, y1, u1, v1)) / 2, 0, 255);
+
+ j = dest_x + 1;
+ y_idx = uv_idx = 1;
+ } else {
+ j = dest_x;
+ y_idx = uv_idx = 0;
+ }
+
+ /* 3.2. Copy all macro pixels from the source to the destination
+ * but blend with the background because we're only filling
+ * the upper part of the macro pixels. */
+ for (; j < w - 1; j += 2) {
+ y1 = src[4 * y_idx + 1];
+ y2 = src[4 * y_idx + 4 + 1];
+
+ u1 = src[4 * y_idx + 2];
+ u2 = src[4 * y_idx + 4 + 2];
+
+ v1 = src[4 * y_idx + 3];
+ v2 = src[4 * y_idx + 4 + 3];
+
+ destY[y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY[y_idx + 1] = CLAMP (APPLY_MATRIX (matrix, 0, y2, u2, v2), 0, 255);
+
+ destU[uv_idx] = CLAMP (
+ (2 * destU[uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 1, y2, u2, v2)) / 4, 0, 255);
+ destV[uv_idx] = CLAMP (
+ (2 * destV[uv_idx] + APPLY_MATRIX (matrix, 2, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 2, y2, u2, v2)) / 4, 0, 255);
+
+ y_idx += 2;
+ uv_idx++;
+ }
+
+ /* 3.3. Now copy the last pixel if one exists and blend it
+ * with the background because we only fill part of
+ * the macro pixel. In case this is the last pixel of
+ * the destination we will a larger part. */
+ if (j == w - 1 && j == dest_width - 1) {
+ y1 = src[4 * y_idx + 1];
+ u1 = src[4 * y_idx + 2];
+ v1 = src[4 * y_idx + 3];
+
+ destY[y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[uv_idx] = CLAMP (
+ (destU[uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 2, 0, 255);
+ destV[uv_idx] = CLAMP (
+ (destV[uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 2, 0, 255);
+ } else if (j == w - 1) {
+ y1 = src[4 * y_idx + 1];
+ u1 = src[4 * y_idx + 2];
+ v1 = src[4 * y_idx + 3];
+
+ destY[y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[uv_idx] = CLAMP (
+ (3 * destU[uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 4, 0,
+ 255);
+ destV[uv_idx] =
+ CLAMP ((3 * destV[uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 4,
+ 0, 255);
+ }
+ } else if (i == h - 1) {
+ /* 3.1. Handle the first destination pixel if it doesn't
+ * start at the macro pixel boundary, i.e. blend with
+ * the background! */
+ if (dest_x % 2 == 1) {
+ y1 = src[4 * 0 + 1];
+ u1 = src[4 * 0 + 2];
+ v1 = src[4 * 0 + 3];
+
+ destY[0] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[0] =
+ CLAMP ((3 * destU[0] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 4, 0,
+ 255);
+ destV[0] =
+ CLAMP ((3 * destV[0] + APPLY_MATRIX (matrix, 2, y1, u1, v1)) / 4, 0,
+ 255);
+
+ j = dest_x + 1;
+ y_idx = uv_idx = 1;
+ } else {
+ j = dest_x;
+ y_idx = uv_idx = 0;
+ }
+
+ /* 3.2. Copy all macro pixels from the source to the destination
+ * but blend with the background because we're only filling
+ * the upper part of the macro pixels. */
+ for (; j < w - 1; j += 2) {
+ y1 = src[4 * y_idx + 1];
+ y2 = src[4 * y_idx + 4 + 1];
+
+ u1 = src[4 * y_idx + 2];
+ u2 = src[4 * y_idx + 4 + 2];
+
+ v1 = src[4 * y_idx + 3];
+ v2 = src[4 * y_idx + 4 + 3];
+
+ destY[y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY[y_idx + 1] = CLAMP (APPLY_MATRIX (matrix, 0, y2, u2, v2), 0, 255);
+
+ destU[uv_idx] = CLAMP (
+ (2 * destU[uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 1, y2, u2, v2)) / 4, 0, 255);
+ destV[uv_idx] = CLAMP (
+ (2 * destV[uv_idx] + APPLY_MATRIX (matrix, 2, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 2, y2, u2, v2)) / 4, 0, 255);
+
+ y_idx += 2;
+ uv_idx++;
+ }
+
+ /* 3.3. Now copy the last pixel if one exists and blend it
+ * with the background because we only fill part of
+ * the macro pixel. In case this is the last pixel of
+ * the destination we will a larger part. */
+ if (j == w - 1 && j == dest_width - 1) {
+ y1 = src[4 * y_idx + 1];
+ u1 = src[4 * y_idx + 2];
+ v1 = src[4 * y_idx + 3];
+
+ destY[y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[uv_idx] = CLAMP (
+ (destU[uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 2, 0, 255);
+ destV[uv_idx] = CLAMP (
+ (destV[uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 2, 0, 255);
+ } else if (j == w - 1) {
+ y1 = src[4 * y_idx + 1];
+ u1 = src[4 * y_idx + 2];
+ v1 = src[4 * y_idx + 3];
+
+ destY[y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[uv_idx] = CLAMP (
+ (3 * destU[uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 4, 0,
+ 255);
+ destV[uv_idx] =
+ CLAMP ((3 * destV[uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 4,
+ 0, 255);
+ }
+ }
+}
+
+static void
+fill_planar_yuv (GstVideoBoxFill fill_type, guint b_alpha,
+ GstVideoFrame * frame, gboolean sdtv)
+{
+ guint8 empty_pixel[3];
+ guint8 *destY, *destU, *destV;
+ gint strideY, strideU, strideV;
+ gint heightY, heightU, heightV;
+ gint widthY, widthU, widthV;
+
+ if (sdtv) {
+ empty_pixel[0] = yuv_sdtv_colors_Y[fill_type];
+ empty_pixel[1] = yuv_sdtv_colors_U[fill_type];
+ empty_pixel[2] = yuv_sdtv_colors_V[fill_type];
+ } else {
+ empty_pixel[0] = yuv_hdtv_colors_Y[fill_type];
+ empty_pixel[1] = yuv_hdtv_colors_U[fill_type];
+ empty_pixel[2] = yuv_hdtv_colors_V[fill_type];
+ }
+
+ strideY = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
+ strideU = GST_VIDEO_FRAME_COMP_STRIDE (frame, 1);
+ strideV = GST_VIDEO_FRAME_COMP_STRIDE (frame, 2);
+
+ destY = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
+ destU = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
+ destV = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
+
+ widthY = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
+ widthU = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
+ widthV = GST_VIDEO_FRAME_COMP_WIDTH (frame, 2);
+
+ heightY = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0);
+ heightU = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
+ heightV = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 2);
+
+ if (strideY == widthY) {
+ memset (destY, empty_pixel[0], strideY * heightY);
+ } else if (heightY) {
+ for (; heightY; --heightY) {
+ memset (destY, empty_pixel[0], widthY);
+ destY += strideY;
+ }
+ }
+ if (strideU == widthU) {
+ memset (destU, empty_pixel[1], strideU * heightU);
+ } else if (heightU) {
+ for (; heightU; --heightU) {
+ memset (destU, empty_pixel[1], widthU);
+ destU += strideU;
+ }
+ }
+ if (strideV == widthV) {
+ memset (destV, empty_pixel[2], strideV * heightV);
+ } else if (heightV) {
+ for (; heightV; --heightV) {
+ memset (destV, empty_pixel[2], widthV);
+ destV += strideV;
+ }
+ }
+}
+
+static void
+copy_y444_y444 (guint i_alpha, GstVideoFrame * dest,
+ gboolean dest_sdtv, gint dest_x, gint dest_y, GstVideoFrame * src,
+ gboolean src_sdtv, gint src_x, gint src_y, gint w, gint h)
+{
+ gint i, j;
+ guint8 *destY, *destU, *destV;
+ const guint8 *srcY, *srcU, *srcV;
+ gint dest_strideY, dest_strideU, dest_strideV;
+ gint src_strideY, src_strideU, src_strideV;
+
+ dest_strideY = GST_VIDEO_FRAME_COMP_STRIDE (dest, 0);
+ dest_strideU = GST_VIDEO_FRAME_COMP_STRIDE (dest, 1);
+ dest_strideV = GST_VIDEO_FRAME_COMP_STRIDE (dest, 2);
+
+ src_strideY = GST_VIDEO_FRAME_COMP_STRIDE (src, 0);
+ src_strideU = GST_VIDEO_FRAME_COMP_STRIDE (src, 1);
+ src_strideV = GST_VIDEO_FRAME_COMP_STRIDE (src, 2);
+
+ destY = GST_VIDEO_FRAME_COMP_DATA (dest, 0);
+ destU = GST_VIDEO_FRAME_COMP_DATA (dest, 1);
+ destV = GST_VIDEO_FRAME_COMP_DATA (dest, 2);
+
+ srcY = GST_VIDEO_FRAME_COMP_DATA (src, 0);
+ srcU = GST_VIDEO_FRAME_COMP_DATA (src, 1);
+ srcV = GST_VIDEO_FRAME_COMP_DATA (src, 2);
+
+ destY = destY + dest_y * dest_strideY + dest_x;
+ destU = destU + dest_y * dest_strideU + dest_x;
+ destV = destV + dest_y * dest_strideV + dest_x;
+
+ srcY = srcY + src_y * src_strideY + src_x;
+ srcU = srcU + src_y * src_strideU + src_x;
+ srcV = srcV + src_y * src_strideV + src_x;
+
+ if (src_sdtv != dest_sdtv) {
+ gint matrix[12];
+ gint y, u, v;
+
+ memcpy (matrix,
+ dest_sdtv ? cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit :
+ cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit, 12 * sizeof (gint));
+
+ for (i = 0; i < h; i++) {
+ for (j = 0; j < w; j++) {
+ y = APPLY_MATRIX (matrix, 0, srcY[j], srcU[j], srcV[j]);
+ u = APPLY_MATRIX (matrix, 1, srcY[j], srcU[j], srcV[j]);
+ v = APPLY_MATRIX (matrix, 2, srcY[j], srcU[j], srcV[j]);
+
+ destY[j] = y;
+ destU[j] = u;
+ destV[j] = v;
+ }
+ destY += dest_strideY;
+ destU += dest_strideU;
+ destV += dest_strideV;
+
+ srcY += src_strideY;
+ srcU += src_strideU;
+ srcV += src_strideV;
+ }
+ } else {
+ for (i = 0; i < h; i++) {
+ memcpy (destY, srcY, w);
+ memcpy (destU, srcU, w);
+ memcpy (destV, srcV, w);
+
+ destY += dest_strideY;
+ destU += dest_strideU;
+ destV += dest_strideV;
+
+ srcY += src_strideY;
+ srcU += src_strideU;
+ srcV += src_strideV;
+ }
+ }
+}
+
+static void
+copy_y42b_y42b (guint i_alpha, GstVideoFrame * dest,
+ gboolean dest_sdtv, gint dest_x, gint dest_y, GstVideoFrame * src,
+ gboolean src_sdtv, gint src_x, gint src_y, gint w, gint h)
+{
+ gint i, j;
+ guint8 *destY, *destU, *destV;
+ const guint8 *srcY, *srcU, *srcV;
+ gint dest_strideY, dest_strideU, dest_strideV;
+ gint src_strideY, src_strideU, src_strideV;
+ gint src_y_idx, src_uv_idx;
+ gint dest_y_idx, dest_uv_idx;
+ gint matrix[12];
+ gint y1, y2;
+ gint u1, u2;
+ gint v1, v2;
+ gint dest_width;
+
+ dest_width = GST_VIDEO_FRAME_WIDTH (dest);
+
+ dest_strideY = GST_VIDEO_FRAME_COMP_STRIDE (dest, 0);
+ dest_strideU = GST_VIDEO_FRAME_COMP_STRIDE (dest, 1);
+ dest_strideV = GST_VIDEO_FRAME_COMP_STRIDE (dest, 2);
+
+ src_strideY = GST_VIDEO_FRAME_COMP_STRIDE (src, 0);
+ src_strideU = GST_VIDEO_FRAME_COMP_STRIDE (src, 1);
+ src_strideV = GST_VIDEO_FRAME_COMP_STRIDE (src, 2);
+
+ destY = GST_VIDEO_FRAME_COMP_DATA (dest, 0);
+ destU = GST_VIDEO_FRAME_COMP_DATA (dest, 1);
+ destV = GST_VIDEO_FRAME_COMP_DATA (dest, 2);
+
+ srcY = GST_VIDEO_FRAME_COMP_DATA (src, 0);
+ srcU = GST_VIDEO_FRAME_COMP_DATA (src, 1);
+ srcV = GST_VIDEO_FRAME_COMP_DATA (src, 2);
+
+ destY = destY + dest_y * dest_strideY + dest_x;
+ destU = destU + dest_y * dest_strideU + dest_x / 2;
+ destV = destV + dest_y * dest_strideV + dest_x / 2;
+
+ srcY = srcY + src_y * src_strideY + src_x;
+ srcU = srcU + src_y * src_strideU + src_x / 2;
+ srcV = srcV + src_y * src_strideV + src_x / 2;
+
+ h = dest_y + h;
+ w = dest_x + w;
+
+ if (src_sdtv != dest_sdtv)
+ memcpy (matrix,
+ dest_sdtv ? cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit :
+ cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit, 12 * sizeof (gint));
+ else
+ memcpy (matrix, cog_identity_matrix_8bit, 12 * sizeof (gint));
+
+ /* 1. Copy all macro pixel scanlines, the destination scanline
+ * now starts at macro pixel boundary. */
+ for (i = dest_y; i < h; i++) {
+ /* 1.1. Handle the first destination pixel if it doesn't
+ * start at the macro pixel boundary, i.e. blend with
+ * the background! */
+ if (dest_x % 2 == 1) {
+ y1 = srcY[0];
+ u1 = srcU[0];
+ v1 = srcV[0];
+
+ destY[0] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[0] = CLAMP (
+ (destU[0] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 2, 0, 255);
+ destV[0] = CLAMP (
+ (destV[0] + APPLY_MATRIX (matrix, 2, y1, u1, v1)) / 2, 0, 255);
+ j = dest_x + 1;
+ src_y_idx = dest_y_idx = dest_uv_idx = 1;
+ src_uv_idx = (src_x % 2) + 1;
+ } else {
+ j = dest_x;
+ src_y_idx = dest_y_idx = dest_uv_idx = 0;
+ src_uv_idx = (src_x % 2);
+ }
+
+ /* 1.2. Copy all macro pixels from the source to the destination.
+ * All pixels now start at macro pixel boundary, i.e. no
+ * blending with the background is necessary. */
+ for (; j < w - 1; j += 2) {
+ y1 = srcY[src_y_idx];
+ y2 = srcY[src_y_idx + 1];
+
+ u1 = srcU[src_uv_idx / 2];
+ v1 = srcV[src_uv_idx / 2];
+ src_uv_idx++;
+ u2 = srcU[src_uv_idx / 2];
+ v2 = srcV[src_uv_idx / 2];
+ src_uv_idx++;
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY[dest_y_idx + 1] =
+ CLAMP (APPLY_MATRIX (matrix, 0, y2, u2, v2), 0, 255);
+
+ destU[dest_uv_idx] = CLAMP (
+ (APPLY_MATRIX (matrix, 1, y1, u1, v1) + APPLY_MATRIX (matrix, 1, y2,
+ u2, v2)) / 2, 0, 255);
+ destV[dest_uv_idx] = CLAMP (
+ (APPLY_MATRIX (matrix, 2, y1, u1, v1) + APPLY_MATRIX (matrix, 2, y2,
+ u2, v2)) / 2, 0, 255);
+
+ dest_y_idx += 2;
+ src_y_idx += 2;
+ dest_uv_idx++;
+ }
+
+ /* 1.3. Now copy the last pixel if one exists and blend it
+ * with the background because we only fill part of
+ * the macro pixel. In case this is the last pixel of
+ * the destination we will a larger part. */
+ if (j == w - 1 && j == dest_width - 1) {
+ y1 = srcY[src_y_idx];
+ u1 = srcU[src_uv_idx / 2];
+ v1 = srcV[src_uv_idx / 2];
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[dest_uv_idx] = CLAMP (APPLY_MATRIX (matrix, 1, y1, u1, v1), 0, 255);
+ destV[dest_uv_idx] = CLAMP (APPLY_MATRIX (matrix, 1, y1, u1, v1), 0, 255);
+ } else if (j == w - 1) {
+ y1 = srcY[src_y_idx];
+ u1 = srcU[src_uv_idx / 2];
+ v1 = srcV[src_uv_idx / 2];
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[dest_uv_idx] = CLAMP (
+ (destU[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1)) / 2, 0, 255);
+ destV[dest_uv_idx] = CLAMP (
+ (destV[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1)) / 2, 0, 255);
+ }
+
+ destY += dest_strideY;
+ destU += dest_strideU;
+ destV += dest_strideV;
+ srcY += src_strideY;
+
+ srcU += src_strideU;
+ srcV += src_strideV;
+ }
+}
+
+static void
+copy_y41b_y41b (guint i_alpha, GstVideoFrame * dest,
+ gboolean dest_sdtv, gint dest_x, gint dest_y, GstVideoFrame * src,
+ gboolean src_sdtv, gint src_x, gint src_y, gint w, gint h)
+{
+ gint i, j;
+ guint8 *destY, *destU, *destV;
+ const guint8 *srcY, *srcU, *srcV;
+ gint dest_strideY, dest_strideU, dest_strideV;
+ gint src_strideY, src_strideU, src_strideV;
+ gint src_y_idx, src_uv_idx;
+ gint dest_y_idx, dest_uv_idx;
+ gint matrix[12];
+ gint y1, y2, y3, y4;
+ gint u1, u2, u3, u4;
+ gint v1, v2, v3, v4;
+ gint dest_width;
+
+ dest_width = GST_VIDEO_FRAME_WIDTH (dest);
+
+ dest_strideY = GST_VIDEO_FRAME_COMP_STRIDE (dest, 0);
+ dest_strideU = GST_VIDEO_FRAME_COMP_STRIDE (dest, 1);
+ dest_strideV = GST_VIDEO_FRAME_COMP_STRIDE (dest, 2);
+
+ src_strideY = GST_VIDEO_FRAME_COMP_STRIDE (src, 0);
+ src_strideU = GST_VIDEO_FRAME_COMP_STRIDE (src, 1);
+ src_strideV = GST_VIDEO_FRAME_COMP_STRIDE (src, 2);
+
+ destY = GST_VIDEO_FRAME_COMP_DATA (dest, 0);
+ destU = GST_VIDEO_FRAME_COMP_DATA (dest, 1);
+ destV = GST_VIDEO_FRAME_COMP_DATA (dest, 2);
+
+ srcY = GST_VIDEO_FRAME_COMP_DATA (src, 0);
+ srcU = GST_VIDEO_FRAME_COMP_DATA (src, 1);
+ srcV = GST_VIDEO_FRAME_COMP_DATA (src, 2);
+
+ destY = destY + dest_y * dest_strideY + dest_x;
+ destU = destU + dest_y * dest_strideU + dest_x / 4;
+ destV = destV + dest_y * dest_strideV + dest_x / 4;
+
+ srcY = srcY + src_y * src_strideY + src_x;
+ srcU = srcU + src_y * src_strideU + src_x / 4;
+ srcV = srcV + src_y * src_strideV + src_x / 4;
+
+ h = dest_y + h;
+ w = dest_x + w;
+
+ if (src_sdtv != dest_sdtv)
+ memcpy (matrix,
+ dest_sdtv ? cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit :
+ cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit, 12 * sizeof (gint));
+ else
+ memcpy (matrix, cog_identity_matrix_8bit, 12 * sizeof (gint));
+
+ /* 1. Copy all macro pixel scanlines, the destination scanline
+ * now starts at macro pixel boundary. */
+ for (i = dest_y; i < h; i++) {
+ /* 1.1. Handle the first destination pixel if it doesn't
+ * start at the macro pixel boundary, i.e. blend with
+ * the background! */
+ if (dest_x % 4 == 1) {
+ y1 = srcY[0];
+ y2 = srcY[1];
+ y3 = srcY[2];
+ u1 = srcU[0];
+ v1 = srcV[0];
+
+ destY[0] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY[1] = CLAMP (APPLY_MATRIX (matrix, 0, y2, u1, v1), 0, 255);
+ destY[2] = CLAMP (APPLY_MATRIX (matrix, 0, y3, u1, v1), 0, 255);
+
+ destU[0] = CLAMP (
+ (destU[0] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 1, y2, u1,
+ v1) + APPLY_MATRIX (matrix, 1, y3, u1, v1)) / 4, 0, 255);
+ destV[0] =
+ CLAMP ((destV[0] + APPLY_MATRIX (matrix, 2, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 2, y2, u1,
+ v1) + APPLY_MATRIX (matrix, 2, y3, u1, v1)) / 4, 0, 255);
+
+ j = dest_x + 3;
+ src_y_idx = dest_y_idx = 3;
+ dest_uv_idx = 1;
+ src_uv_idx = (src_x % 4) + 3;
+ } else if (dest_x % 4 == 2) {
+ y1 = srcY[0];
+ y2 = srcY[1];
+ u1 = srcU[0];
+ v1 = srcV[0];
+
+ destY[0] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY[1] = CLAMP (APPLY_MATRIX (matrix, 0, y2, u1, v1), 0, 255);
+
+ destU[0] = CLAMP (
+ (2 * destU[0] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 1, y2, u1, v1)) / 4, 0, 255);
+ destV[0] =
+ CLAMP ((2 * destV[0] + APPLY_MATRIX (matrix, 2, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 2, y2, u1, v1)) / 4, 0, 255);
+
+ j = dest_x + 2;
+ src_y_idx = dest_y_idx = 2;
+ dest_uv_idx = 1;
+ src_uv_idx = (src_x % 4) + 2;
+ } else if (dest_x % 4 == 3) {
+ y1 = srcY[0];
+ u1 = srcU[0];
+ v1 = srcV[0];
+
+ destY[0] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+
+ destU[0] = CLAMP (
+ (3 * destU[0] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 4, 0, 255);
+ destV[0] = CLAMP (
+ (3 * destV[0] + APPLY_MATRIX (matrix, 2, y1, u1, v1)) / 4, 0, 255);
+
+ j = dest_x + 1;
+ src_y_idx = dest_y_idx = 1;
+ dest_uv_idx = 1;
+ src_uv_idx = (src_x % 4) + 1;
+ } else {
+ j = dest_x;
+ src_y_idx = dest_y_idx = dest_uv_idx = 0;
+ src_uv_idx = (src_x % 4);
+ }
+
+ /* 1.2. Copy all macro pixels from the source to the destination.
+ * All pixels now start at macro pixel boundary, i.e. no
+ * blending with the background is necessary. */
+ for (; j < w - 3; j += 4) {
+ y1 = srcY[src_y_idx];
+ y2 = srcY[src_y_idx + 1];
+ y3 = srcY[src_y_idx + 2];
+ y4 = srcY[src_y_idx + 3];
+
+ u1 = srcU[src_uv_idx / 4];
+ v1 = srcV[src_uv_idx / 4];
+ src_uv_idx++;
+ u2 = srcU[src_uv_idx / 4];
+ v2 = srcV[src_uv_idx / 4];
+ src_uv_idx++;
+ u3 = srcU[src_uv_idx / 4];
+ v3 = srcV[src_uv_idx / 4];
+ src_uv_idx++;
+ u4 = srcU[src_uv_idx / 4];
+ v4 = srcV[src_uv_idx / 4];
+ src_uv_idx++;
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY[dest_y_idx + 1] =
+ CLAMP (APPLY_MATRIX (matrix, 0, y2, u2, v2), 0, 255);
+ destY[dest_y_idx + 2] =
+ CLAMP (APPLY_MATRIX (matrix, 0, y3, u3, v3), 0, 255);
+ destY[dest_y_idx + 3] =
+ CLAMP (APPLY_MATRIX (matrix, 0, y4, u4, v4), 0, 255);
+
+ destU[dest_uv_idx] = CLAMP (
+ (APPLY_MATRIX (matrix, 1, y1, u1, v1) + APPLY_MATRIX (matrix, 1, y2,
+ u2, v2) + APPLY_MATRIX (matrix, 1, y3, u3,
+ v3) + APPLY_MATRIX (matrix, 1, y4, u4, v4)) / 4, 0, 255);
+ destV[dest_uv_idx] =
+ CLAMP ((APPLY_MATRIX (matrix, 2, y1, u1, v1) + APPLY_MATRIX (matrix,
+ 2, y2, u2, v2) + APPLY_MATRIX (matrix, 2, y3, u3,
+ v3) + APPLY_MATRIX (matrix, 2, y4, u4, v4)) / 4, 0, 255);
+
+ dest_y_idx += 4;
+ src_y_idx += 4;
+ dest_uv_idx++;
+ }
+
+ /* 1.3. Now copy the last pixel if one exists and blend it
+ * with the background because we only fill part of
+ * the macro pixel. In case this is the last pixel of
+ * the destination we will a larger part. */
+ if (j == w - 1 && j == dest_width - 1) {
+ y1 = srcY[src_y_idx];
+ u1 = srcU[src_uv_idx / 4];
+ v1 = srcV[src_uv_idx / 4];
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[dest_uv_idx] = CLAMP (APPLY_MATRIX (matrix, 1, y1, u1, v1), 0, 255);
+ destV[dest_uv_idx] = CLAMP (APPLY_MATRIX (matrix, 1, y1, u1, v1), 0, 255);
+ } else if (j == w - 1) {
+ y1 = srcY[src_y_idx];
+ u1 = srcU[src_uv_idx / 4];
+ v1 = srcV[src_uv_idx / 4];
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[dest_uv_idx] = CLAMP (
+ (destU[dest_uv_idx] + 3 * APPLY_MATRIX (matrix, 1, y1, u1,
+ v1)) / 4, 0, 255);
+ destV[dest_uv_idx] = CLAMP (
+ (destV[dest_uv_idx] + 3 * APPLY_MATRIX (matrix, 1, y1, u1,
+ v1)) / 4, 0, 255);
+ } else if (j == w - 2 && j == dest_width - 2) {
+ y1 = srcY[src_y_idx];
+ y2 = srcY[src_y_idx + 1];
+ u1 = srcU[src_uv_idx / 4];
+ v1 = srcV[src_uv_idx / 4];
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY[dest_y_idx + 1] =
+ CLAMP (APPLY_MATRIX (matrix, 0, y2, u1, v1), 0, 255);
+ destU[dest_uv_idx] = CLAMP (APPLY_MATRIX (matrix, 1, y1, u1, v1), 0, 255);
+ destV[dest_uv_idx] = CLAMP (APPLY_MATRIX (matrix, 1, y1, u1, v1), 0, 255);
+ } else if (j == w - 2) {
+ y1 = srcY[src_y_idx];
+ y2 = srcY[src_y_idx + 1];
+ u1 = srcU[src_uv_idx / 4];
+ v1 = srcV[src_uv_idx / 4];
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY[dest_y_idx + 1] =
+ CLAMP (APPLY_MATRIX (matrix, 0, y2, u1, v1), 0, 255);
+ destU[dest_uv_idx] =
+ CLAMP ((destU[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1)) / 2, 0, 255);
+ destV[dest_uv_idx] =
+ CLAMP ((destV[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1)) / 2, 0, 255);
+ } else if (j == w - 3 && j == dest_width - 3) {
+ y1 = srcY[src_y_idx];
+ y2 = srcY[src_y_idx + 1];
+ y3 = srcY[src_y_idx + 2];
+ u1 = srcU[src_uv_idx / 4];
+ v1 = srcV[src_uv_idx / 4];
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY[dest_y_idx + 1] =
+ CLAMP (APPLY_MATRIX (matrix, 0, y2, u1, v1), 0, 255);
+ destY[dest_y_idx + 2] =
+ CLAMP (APPLY_MATRIX (matrix, 0, y3, u1, v1), 0, 255);
+ destU[dest_uv_idx] = CLAMP (APPLY_MATRIX (matrix, 1, y1, u1, v1), 0, 255);
+ destV[dest_uv_idx] = CLAMP (APPLY_MATRIX (matrix, 1, y1, u1, v1), 0, 255);
+ } else if (j == w - 3) {
+ y1 = srcY[src_y_idx];
+ y2 = srcY[src_y_idx + 1];
+ y3 = srcY[src_y_idx + 2];
+ u1 = srcU[src_uv_idx / 4];
+ v1 = srcV[src_uv_idx / 4];
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY[dest_y_idx + 1] =
+ CLAMP (APPLY_MATRIX (matrix, 0, y2, u1, v1), 0, 255);
+ destY[dest_y_idx + 2] =
+ CLAMP (APPLY_MATRIX (matrix, 0, y3, u1, v1), 0, 255);
+ destU[dest_uv_idx] =
+ CLAMP ((3 * destU[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1)) / 4, 0, 255);
+ destV[dest_uv_idx] =
+ CLAMP ((3 * destV[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1)) / 4, 0, 255);
+ }
+
+ destY += dest_strideY;
+ destU += dest_strideU;
+ destV += dest_strideV;
+ srcY += src_strideY;
+ srcU += src_strideU;
+ srcV += src_strideV;
+ }
+}
+
+static void
+copy_i420_i420 (guint i_alpha, GstVideoFrame * dest,
+ gboolean dest_sdtv, gint dest_x, gint dest_y, GstVideoFrame * src,
+ gboolean src_sdtv, gint src_x, gint src_y, gint w, gint h)
+{
+ gint i, j;
+ guint8 *destY, *destU, *destV;
+ const guint8 *srcY, *srcU, *srcV;
+ guint8 *destY2;
+ const guint8 *srcY2, *srcU2, *srcV2;
+ gint dest_strideY, dest_strideU, dest_strideV;
+ gint src_strideY, src_strideU, src_strideV;
+ gint src_y_idx, src_uv_idx;
+ gint dest_y_idx, dest_uv_idx;
+ gint matrix[12];
+ gint y1, y2, y3, y4;
+ gint u1, u2, u3, u4;
+ gint v1, v2, v3, v4;
+ gint dest_width, dest_height;
+
+ dest_width = GST_VIDEO_FRAME_WIDTH (dest);
+ dest_height = GST_VIDEO_FRAME_HEIGHT (dest);
+
+ dest_strideY = GST_VIDEO_FRAME_COMP_STRIDE (dest, 0);
+ dest_strideU = GST_VIDEO_FRAME_COMP_STRIDE (dest, 1);
+ dest_strideV = GST_VIDEO_FRAME_COMP_STRIDE (dest, 2);
+
+ src_strideY = GST_VIDEO_FRAME_COMP_STRIDE (src, 0);
+ src_strideU = GST_VIDEO_FRAME_COMP_STRIDE (src, 1);
+ src_strideV = GST_VIDEO_FRAME_COMP_STRIDE (src, 2);
+
+ destY = GST_VIDEO_FRAME_COMP_DATA (dest, 0);
+ destU = GST_VIDEO_FRAME_COMP_DATA (dest, 1);
+ destV = GST_VIDEO_FRAME_COMP_DATA (dest, 2);
+
+ srcY = GST_VIDEO_FRAME_COMP_DATA (src, 0);
+ srcU = GST_VIDEO_FRAME_COMP_DATA (src, 1);
+ srcV = GST_VIDEO_FRAME_COMP_DATA (src, 2);
+
+ destY = destY + dest_y * dest_strideY + dest_x;
+ destU = destU + (dest_y / 2) * dest_strideU + dest_x / 2;
+ destV = destV + (dest_y / 2) * dest_strideV + dest_x / 2;
+
+ srcY = srcY + src_y * src_strideY + src_x;
+ srcU = srcU + (src_y / 2) * src_strideU + src_x / 2;
+ srcV = srcV + (src_y / 2) * src_strideV + src_x / 2;
+
+ destY2 = destY + dest_strideY;
+ srcY2 = srcY + src_strideY;
+
+ h = dest_y + h;
+ w = dest_x + w;
+
+ if (src_sdtv != dest_sdtv)
+ memcpy (matrix,
+ dest_sdtv ? cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit :
+ cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit, 12 * sizeof (gint));
+ else
+ memcpy (matrix, cog_identity_matrix_8bit, 12 * sizeof (gint));
+
+ /* 1. Handle the first destination scanline specially if it
+ * doesn't start at the macro pixel boundary, i.e. blend
+ * with the background! */
+ if (dest_y % 2 == 1) {
+ /* 1.1. Handle the first destination pixel if it doesn't
+ * start at the macro pixel boundary, i.e. blend with
+ * the background! */
+ if (dest_x % 2 == 1) {
+ y1 = srcY[0];
+ u1 = srcU[0];
+ v1 = srcV[0];
+
+ destY[0] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[0] =
+ CLAMP ((3 * destU[0] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 4, 0,
+ 255);
+ destV[0] =
+ CLAMP ((3 * destV[0] + APPLY_MATRIX (matrix, 2, y1, u1, v1)) / 4, 0,
+ 255);
+
+ j = dest_x + 1;
+ src_y_idx = dest_y_idx = dest_uv_idx = 1;
+ src_uv_idx = (src_x % 2) + 1;
+ } else {
+ j = dest_x;
+ src_y_idx = dest_y_idx = dest_uv_idx = 0;
+ src_uv_idx = (src_x % 2);
+ }
+
+ /* 1.2. Copy all macro pixels from the source to the destination
+ * but blend with the background because we're only filling
+ * the lower part of the macro pixels. */
+ for (; j < w - 1; j += 2) {
+ y1 = srcY[src_y_idx];
+ y2 = srcY[src_y_idx + 1];
+
+ u1 = srcU[src_uv_idx / 2];
+ v1 = srcV[src_uv_idx / 2];
+ src_uv_idx++;
+ u2 = srcU[src_uv_idx / 2];
+ v2 = srcV[src_uv_idx / 2];
+ src_uv_idx++;
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY[dest_y_idx + 1] =
+ CLAMP (APPLY_MATRIX (matrix, 0, y2, u2, v2), 0, 255);
+ destU[dest_uv_idx] =
+ CLAMP ((2 * destU[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 1, y2, u2, v2)) / 4, 0, 255);
+ destV[dest_uv_idx] =
+ CLAMP ((2 * destV[dest_uv_idx] + APPLY_MATRIX (matrix, 2, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 2, y2, u2, v2)) / 4, 0, 255);
+
+ dest_y_idx += 2;
+ src_y_idx += 2;
+ dest_uv_idx++;
+ }
+
+ /* 1.3. Now copy the last pixel if one exists and blend it
+ * with the background because we only fill part of
+ * the macro pixel. In case this is the last pixel of
+ * the destination we will a larger part. */
+ if (j == w - 1 && j == dest_width - 1) {
+ y1 = srcY[src_y_idx];
+ u1 = srcU[src_uv_idx / 2];
+ v1 = srcV[src_uv_idx / 2];
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[dest_uv_idx] = CLAMP (
+ (destU[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 2, 0,
+ 255);
+ destV[dest_uv_idx] =
+ CLAMP ((destV[dest_uv_idx] + APPLY_MATRIX (matrix, 2, y1, u1,
+ v1)) / 2, 0, 255);
+ } else if (j == w - 1) {
+ y1 = srcY[src_y_idx];
+ u1 = srcU[src_uv_idx / 2];
+ v1 = srcV[src_uv_idx / 2];
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[dest_uv_idx] = CLAMP (
+ (3 * destU[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 4,
+ 0, 255);
+ destV[dest_uv_idx] =
+ CLAMP ((3 * destV[dest_uv_idx] + APPLY_MATRIX (matrix, 2, y1, u1,
+ v1)) / 4, 0, 255);
+ }
+
+ destY += dest_strideY;
+ destY2 += dest_strideY;
+ destU += dest_strideU;
+ destV += dest_strideV;
+ srcY += src_strideY;
+ srcY2 += src_strideY;
+ src_y++;
+ if (src_y % 2 == 0) {
+ srcU += src_strideU;
+ srcV += src_strideV;
+ }
+ i = dest_y + 1;
+ } else {
+ i = dest_y;
+ }
+
+ /* 2. Copy all macro pixel scanlines, the destination scanline
+ * now starts at macro pixel boundary. */
+ for (; i < h - 1; i += 2) {
+ /* 2.1. Handle the first destination pixel if it doesn't
+ * start at the macro pixel boundary, i.e. blend with
+ * the background! */
+
+ srcU2 = srcU;
+ srcV2 = srcV;
+ if (src_y % 2 == 1) {
+ srcU2 += src_strideU;
+ srcV2 += src_strideV;
+ }
+
+ if (dest_x % 2 == 1) {
+ y1 = srcY[0];
+ y2 = srcY2[0];
+ u1 = srcU[0];
+ v1 = srcV[0];
+ u2 = srcU2[0];
+ v2 = srcV2[0];
+
+ destY[0] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY2[0] = CLAMP (APPLY_MATRIX (matrix, 0, y2, u2, v2), 0, 255);
+ destU[0] = CLAMP (
+ (2 * destU[0] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 1, y2, u2, v2)) / 4, 0, 255);
+ destV[0] = CLAMP (
+ (2 * destV[0] + APPLY_MATRIX (matrix, 2, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 2, y2, u2, v2)) / 4, 0, 255);
+ j = dest_x + 1;
+ src_y_idx = dest_y_idx = dest_uv_idx = 1;
+ src_uv_idx = (src_x % 2) + 1;
+ } else {
+ j = dest_x;
+ src_y_idx = dest_y_idx = dest_uv_idx = 0;
+ src_uv_idx = (src_x % 2);
+ }
+
+ /* 2.2. Copy all macro pixels from the source to the destination.
+ * All pixels now start at macro pixel boundary, i.e. no
+ * blending with the background is necessary. */
+ for (; j < w - 1; j += 2) {
+ y1 = srcY[src_y_idx];
+ y2 = srcY[src_y_idx + 1];
+ y3 = srcY2[src_y_idx];
+ y4 = srcY2[src_y_idx + 1];
+
+ u1 = srcU[src_uv_idx / 2];
+ u3 = srcU2[src_uv_idx / 2];
+ v1 = srcV[src_uv_idx / 2];
+ v3 = srcV2[src_uv_idx / 2];
+ src_uv_idx++;
+ u2 = srcU[src_uv_idx / 2];
+ u4 = srcU2[src_uv_idx / 2];
+ v2 = srcV[src_uv_idx / 2];
+ v4 = srcV2[src_uv_idx / 2];
+ src_uv_idx++;
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY[dest_y_idx + 1] =
+ CLAMP (APPLY_MATRIX (matrix, 0, y2, u2, v2), 0, 255);
+ destY2[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y3, u3, v3), 0, 255);
+ destY2[dest_y_idx + 1] =
+ CLAMP (APPLY_MATRIX (matrix, 0, y4, u4, v4), 0, 255);
+
+ destU[dest_uv_idx] = CLAMP (
+ (APPLY_MATRIX (matrix, 1, y1, u1, v1) + APPLY_MATRIX (matrix, 1, y2,
+ u2, v2) + APPLY_MATRIX (matrix, 1, y3, u3,
+ v3) + APPLY_MATRIX (matrix, 1, y4, u4, v4)) / 4, 0, 255);
+ destV[dest_uv_idx] = CLAMP (
+ (APPLY_MATRIX (matrix, 2, y1, u1, v1) + APPLY_MATRIX (matrix, 2, y2,
+ u2, v2) + APPLY_MATRIX (matrix, 2, y3, u3,
+ v3) + APPLY_MATRIX (matrix, 2, y4, u4, v4)) / 4, 0, 255);
+
+ dest_y_idx += 2;
+ src_y_idx += 2;
+ dest_uv_idx++;
+ }
+
+ /* 2.3. Now copy the last pixel if one exists and blend it
+ * with the background because we only fill part of
+ * the macro pixel. In case this is the last pixel of
+ * the destination we will a larger part. */
+ if (j == w - 1 && j == dest_width - 1) {
+ y1 = srcY[src_y_idx];
+ y2 = srcY2[src_y_idx];
+
+ u1 = srcU[src_uv_idx / 2];
+ u2 = srcU2[src_uv_idx / 2];
+
+ v1 = srcV[src_uv_idx / 2];
+ v2 = srcV2[src_uv_idx / 2];
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY2[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y2, u2, v2), 0, 255);
+ destU[dest_uv_idx] = CLAMP (
+ (APPLY_MATRIX (matrix, 1, y1, u1, v1) + APPLY_MATRIX (matrix, 2, y2,
+ u2, v2)) / 2, 0, 255);
+ destV[dest_uv_idx] = CLAMP (
+ (APPLY_MATRIX (matrix, 1, y1, u1, v1) + APPLY_MATRIX (matrix, 2, y2,
+ u2, v2)) / 2, 0, 255);
+ } else if (j == w - 1) {
+ y1 = srcY[src_y_idx];
+ y2 = srcY2[src_y_idx];
+
+ u1 = srcU[src_uv_idx / 2];
+ u2 = srcU2[src_uv_idx / 2];
+
+ v1 = srcV[src_uv_idx / 2];
+ v2 = srcV2[src_uv_idx / 2];
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY2[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y2, u2, v2), 0, 255);
+ destU[dest_uv_idx] = CLAMP (
+ (2 * destU[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 2, y2, u2, v2)) / 4, 0, 255);
+ destV[dest_uv_idx] = CLAMP (
+ (2 * destV[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 2, y2, u2, v2)) / 4, 0, 255);
+ }
+
+ destY += 2 * dest_strideY;
+ destY2 += 2 * dest_strideY;
+ destU += dest_strideU;
+ destV += dest_strideV;
+ srcY += 2 * src_strideY;
+ srcY2 += 2 * src_strideY;
+
+ src_y += 2;
+ srcU += src_strideU;
+ srcV += src_strideV;
+ }
+
+ /* 3. Handle the last scanline if one exists. This again
+ * doesn't start at macro pixel boundary but should
+ * only fill the upper part of the macro pixels. */
+ if (i == h - 1 && i == dest_height - 1) {
+ /* 3.1. Handle the first destination pixel if it doesn't
+ * start at the macro pixel boundary, i.e. blend with
+ * the background! */
+ if (dest_x % 2 == 1) {
+ y1 = srcY[0];
+ u1 = srcU[0];
+ v1 = srcV[0];
+
+ destY[0] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[0] =
+ CLAMP ((destU[0] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 2, 0, 255);
+ destV[0] =
+ CLAMP ((destV[0] + APPLY_MATRIX (matrix, 2, y1, u1, v1)) / 2, 0, 255);
+
+ j = dest_x + 1;
+ src_y_idx = dest_y_idx = dest_uv_idx = 1;
+ src_uv_idx = (src_x % 2) + 1;
+ } else {
+ j = dest_x;
+ src_y_idx = dest_y_idx = dest_uv_idx = 0;
+ src_uv_idx = (src_x % 2);
+ }
+
+ /* 3.2. Copy all macro pixels from the source to the destination
+ * but blend with the background because we're only filling
+ * the upper part of the macro pixels. */
+ for (; j < w - 1; j += 2) {
+ y1 = srcY[src_y_idx];
+ y2 = srcY[src_y_idx + 1];
+
+ u1 = srcU[src_uv_idx / 2];
+ v1 = srcV[src_uv_idx / 2];
+ src_uv_idx++;
+ u2 = srcU[src_uv_idx / 2];
+ v2 = srcV[src_uv_idx / 2];
+ src_uv_idx++;
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY[dest_y_idx + 1] =
+ CLAMP (APPLY_MATRIX (matrix, 0, y2, u2, v2), 0, 255);
+
+ destU[dest_uv_idx] = CLAMP (
+ (2 * destU[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 1, y2, u2, v2)) / 4, 0, 255);
+ destV[dest_uv_idx] = CLAMP (
+ (2 * destV[dest_uv_idx] + APPLY_MATRIX (matrix, 2, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 2, y2, u2, v2)) / 4, 0, 255);
+
+ dest_y_idx += 2;
+ src_y_idx += 2;
+ dest_uv_idx++;
+ }
+
+ /* 3.3. Now copy the last pixel if one exists and blend it
+ * with the background because we only fill part of
+ * the macro pixel. In case this is the last pixel of
+ * the destination we will a larger part. */
+ if (j == w - 1 && j == dest_width - 1) {
+ y1 = srcY[src_y_idx];
+ u1 = srcU[src_uv_idx / 2];
+ v1 = srcV[src_uv_idx / 2];
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[dest_uv_idx] = CLAMP (
+ (destU[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 2, 0,
+ 255);
+ destV[dest_uv_idx] =
+ CLAMP ((destV[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1)) / 2, 0, 255);
+ } else if (j == w - 1) {
+ y1 = srcY[src_y_idx];
+ u1 = srcU[src_uv_idx / 2];
+ v1 = srcV[src_uv_idx / 2];
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[dest_uv_idx] = CLAMP (
+ (3 * destU[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 4,
+ 0, 255);
+ destV[dest_uv_idx] =
+ CLAMP ((3 * destV[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1)) / 4, 0, 255);
+ }
+ } else if (i == h - 1) {
+ /* 3.1. Handle the first destination pixel if it doesn't
+ * start at the macro pixel boundary, i.e. blend with
+ * the background! */
+ if (dest_x % 2 == 1) {
+ y1 = srcY[0];
+ u1 = srcU[0];
+ v1 = srcV[0];
+
+ destY[0] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[0] =
+ CLAMP ((3 * destU[0] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 4, 0,
+ 255);
+ destV[0] =
+ CLAMP ((3 * destV[0] + APPLY_MATRIX (matrix, 2, y1, u1, v1)) / 4, 0,
+ 255);
+
+ j = dest_x + 1;
+ src_y_idx = dest_y_idx = dest_uv_idx = 1;
+ src_uv_idx = (src_x % 2) + 1;
+ } else {
+ j = dest_x;
+ src_y_idx = dest_y_idx = dest_uv_idx = 0;
+ src_uv_idx = (src_x % 2);
+ }
+
+ /* 3.2. Copy all macro pixels from the source to the destination
+ * but blend with the background because we're only filling
+ * the upper part of the macro pixels. */
+ for (; j < w - 1; j += 2) {
+ y1 = srcY[src_y_idx];
+ y2 = srcY[src_y_idx + 1];
+
+ u1 = srcU[src_uv_idx / 2];
+ v1 = srcV[src_uv_idx / 2];
+ src_uv_idx++;
+ u2 = srcU[src_uv_idx / 2];
+ v2 = srcV[src_uv_idx / 2];
+ src_uv_idx++;
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destY[dest_y_idx + 1] =
+ CLAMP (APPLY_MATRIX (matrix, 0, y2, u2, v2), 0, 255);
+
+ destU[dest_uv_idx] = CLAMP (
+ (2 * destU[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 1, y2, u2, v2)) / 4, 0, 255);
+ destV[dest_uv_idx] = CLAMP (
+ (2 * destV[dest_uv_idx] + APPLY_MATRIX (matrix, 2, y1, u1,
+ v1) + APPLY_MATRIX (matrix, 2, y2, u2, v2)) / 4, 0, 255);
+
+ dest_y_idx += 2;
+ src_y_idx += 2;
+ dest_uv_idx++;
+ }
+
+ /* 3.3. Now copy the last pixel if one exists and blend it
+ * with the background because we only fill part of
+ * the macro pixel. In case this is the last pixel of
+ * the destination we will a larger part. */
+ if (j == w - 1 && j == dest_width - 1) {
+ y1 = srcY[src_y_idx];
+ u1 = srcU[src_uv_idx / 2];
+ v1 = srcV[src_uv_idx / 2];
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[dest_uv_idx] = CLAMP (
+ (destU[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 2, 0,
+ 255);
+ destV[dest_uv_idx] =
+ CLAMP ((destV[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1)) / 2, 0, 255);
+ } else if (j == w - 1) {
+ y1 = srcY[src_y_idx];
+ u1 = srcU[src_uv_idx / 2];
+ v1 = srcV[src_uv_idx / 2];
+
+ destY[dest_y_idx] = CLAMP (APPLY_MATRIX (matrix, 0, y1, u1, v1), 0, 255);
+ destU[dest_uv_idx] = CLAMP (
+ (3 * destU[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1, v1)) / 4,
+ 0, 255);
+ destV[dest_uv_idx] =
+ CLAMP ((3 * destV[dest_uv_idx] + APPLY_MATRIX (matrix, 1, y1, u1,
+ v1)) / 4, 0, 255);
+ }
+ }
+}
+
+static void
+copy_i420_ayuv (guint i_alpha, GstVideoFrame * dest_frame,
+ gboolean dest_sdtv, gint dest_x, gint dest_y, GstVideoFrame * src_frame,
+ gboolean src_sdtv, gint src_x, gint src_y, gint w, gint h)
+{
+ const guint8 *srcY, *srcU, *srcV;
+ gint src_strideY, src_strideU, src_strideV;
+ gint dest_stride;
+ guint8 *dest;
+
+ src_strideY = GST_VIDEO_FRAME_COMP_STRIDE (src_frame, 0);
+ src_strideU = GST_VIDEO_FRAME_COMP_STRIDE (src_frame, 1);
+ src_strideV = GST_VIDEO_FRAME_COMP_STRIDE (src_frame, 2);
+
+ srcY = GST_VIDEO_FRAME_COMP_DATA (src_frame, 0);
+ srcU = GST_VIDEO_FRAME_COMP_DATA (src_frame, 1);
+ srcV = GST_VIDEO_FRAME_COMP_DATA (src_frame, 2);
+
+ dest_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest_frame, 0);
+
+ dest = GST_VIDEO_FRAME_PLANE_DATA (dest_frame, 0);
+ dest = dest + dest_y * dest_stride + dest_x * 4;
+
+ srcY = srcY + src_y * src_strideY + src_x;
+ srcU = srcU + (src_y / 2) * src_strideU + src_x / 2;
+ srcV = srcV + (src_y / 2) * src_strideV + src_x / 2;
+
+ i_alpha = MIN (i_alpha, 255);
+
+ if (src_sdtv != dest_sdtv) {
+ gint i, j, uv_idx;
+ gint y, u, v;
+ gint y1, u1, v1;
+ gint matrix[12];
+
+ memcpy (matrix,
+ dest_sdtv ? cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit :
+ cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit, 12 * sizeof (gint));
+
+ for (i = 0; i < h; i++) {
+ for (j = 0, uv_idx = src_x % 2; j < w; j++, uv_idx++) {
+ y = srcY[j];
+ u = srcU[uv_idx / 2];
+ v = srcV[uv_idx / 2];
+
+ y1 = APPLY_MATRIX (matrix, 0, y, u, v);
+ u1 = APPLY_MATRIX (matrix, 1, y, u, v);
+ v1 = APPLY_MATRIX (matrix, 2, y, u, v);
+
+ dest[4 * j + 0] = i_alpha;
+ dest[4 * j + 1] = y1;
+ dest[4 * j + 2] = u1;
+ dest[4 * j + 3] = v1;
+ }
+ dest += dest_stride;
+
+ src_y++;
+ srcY += src_strideY;
+ if (src_y % 2 == 0) {
+ srcU += src_strideU;
+ srcV += src_strideV;
+ }
+ }
+ } else {
+ gint i, j, uv_idx;
+ gint y, u, v;
+
+ for (i = 0; i < h; i++) {
+ for (j = 0, uv_idx = src_x % 2; j < w; j++, uv_idx++) {
+ y = srcY[j];
+ u = srcU[uv_idx / 2];
+ v = srcV[uv_idx / 2];
+
+ dest[4 * j + 0] = i_alpha;
+ dest[4 * j + 1] = y;
+ dest[4 * j + 2] = u;
+ dest[4 * j + 3] = v;
+ }
+ dest += dest_stride;
+
+ src_y++;
+ srcY += src_strideY;
+ if (src_y % 2 == 0) {
+ srcU += src_strideU;
+ srcV += src_strideV;
+ }
+ }
+ }
+}
+
+static void
+fill_rgb32 (GstVideoBoxFill fill_type, guint b_alpha,
+ GstVideoFrame * frame, gboolean sdtv)
+{
+ guint32 empty_pixel;
+ gint p[4];
+ guint8 *dest;
+ guint stride;
+ gint width, height;
+
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
+ dest = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+
+ p[0] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 3);
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 2);
+
+ b_alpha = MIN (b_alpha, 255);
+
+ if (GST_VIDEO_FRAME_N_COMPONENTS (frame) == 4) {
+ empty_pixel = GUINT32_FROM_LE ((b_alpha << (p[0] * 8)) |
+ (rgb_colors_R[fill_type] << (p[1] * 8)) |
+ (rgb_colors_G[fill_type] << (p[2] * 8)) |
+ (rgb_colors_B[fill_type] << (p[3] * 8)));
+ } else {
+ empty_pixel = GUINT32_FROM_LE (
+ (rgb_colors_R[fill_type] << (p[1] * 8)) |
+ (rgb_colors_G[fill_type] << (p[2] * 8)) |
+ (rgb_colors_B[fill_type] << (p[3] * 8)));
+ }
+
+ if (stride == width * 4) {
+ video_box_orc_splat_u32 ((guint32 *) dest, empty_pixel, width * height);
+ } else if (height) {
+ for (; height; --height) {
+ video_box_orc_splat_u32 ((guint32 *) dest, empty_pixel, width);
+ dest += stride;
+ }
+ }
+}
+
+static void
+fill_rgb24 (GstVideoBoxFill fill_type, guint b_alpha,
+ GstVideoFrame * frame, gboolean sdtv)
+{
+ gint dest_stride;
+ gint p[4];
+ gint i, j;
+ guint8 *dest;
+ gint width, height;
+
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
+ dest = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ dest_stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+
+ p[1] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 0);
+ p[2] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 1);
+ p[3] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 2);
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ dest[3 * j + p[1]] = rgb_colors_R[fill_type];
+ dest[3 * j + p[2]] = rgb_colors_G[fill_type];
+ dest[3 * j + p[3]] = rgb_colors_B[fill_type];
+ }
+ dest += dest_stride;
+ }
+}
+
+static void
+copy_rgb32 (guint i_alpha, GstVideoFrame * dest_frame,
+ gboolean dest_sdtv, gint dest_x, gint dest_y, GstVideoFrame * src_frame,
+ gboolean src_sdtv, gint src_x, gint src_y, gint w, gint h)
+{
+ gint i, j;
+ gint src_stride, dest_stride;
+ gboolean in_alpha, out_alpha;
+ gint in_bpp, out_bpp;
+ gint p_out[4];
+ gint p_in[4];
+ gboolean packed_out;
+ gboolean packed_in;
+ guint8 *src, *dest;
+
+ src_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src_frame, 0);
+ dest_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest_frame, 0);
+ in_bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (src_frame, 0);
+ out_bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (dest_frame, 0);
+ packed_in = (in_bpp < 4);
+ packed_out = (out_bpp < 4);
+
+ out_alpha = GST_VIDEO_INFO_HAS_ALPHA (&dest_frame->info);
+ p_out[0] = GST_VIDEO_FRAME_COMP_OFFSET (dest_frame, 3);
+ p_out[1] = GST_VIDEO_FRAME_COMP_OFFSET (dest_frame, 0);
+ p_out[2] = GST_VIDEO_FRAME_COMP_OFFSET (dest_frame, 1);
+ p_out[3] = GST_VIDEO_FRAME_COMP_OFFSET (dest_frame, 2);
+
+ in_alpha = GST_VIDEO_INFO_HAS_ALPHA (&src_frame->info);
+ p_in[0] = GST_VIDEO_FRAME_COMP_OFFSET (src_frame, 3);
+ p_in[1] = GST_VIDEO_FRAME_COMP_OFFSET (src_frame, 0);
+ p_in[2] = GST_VIDEO_FRAME_COMP_OFFSET (src_frame, 1);
+ p_in[3] = GST_VIDEO_FRAME_COMP_OFFSET (src_frame, 2);
+
+ dest = GST_VIDEO_FRAME_PLANE_DATA (dest_frame, 0);
+ dest = dest + dest_y * dest_stride + dest_x * out_bpp;
+ src = GST_VIDEO_FRAME_PLANE_DATA (src_frame, 0);
+ src = src + src_y * src_stride + src_x * in_bpp;
+
+ if (in_alpha && out_alpha) {
+ w *= 4;
+ for (i = 0; i < h; i++) {
+ for (j = 0; j < w; j += 4) {
+ dest[j + p_out[0]] = (src[j + p_in[0]] * i_alpha) >> 8;
+ dest[j + p_out[1]] = src[j + p_in[1]];
+ dest[j + p_out[2]] = src[j + p_in[2]];
+ dest[j + p_out[3]] = src[j + p_in[3]];
+ }
+ dest += dest_stride;
+ src += src_stride;
+ }
+ } else if (out_alpha && !packed_in) {
+ w *= 4;
+ i_alpha = MIN (i_alpha, 255);
+
+ for (i = 0; i < h; i++) {
+ for (j = 0; j < w; j += 4) {
+ dest[j + p_out[0]] = i_alpha;
+ dest[j + p_out[1]] = src[j + p_in[1]];
+ dest[j + p_out[2]] = src[j + p_in[2]];
+ dest[j + p_out[3]] = src[j + p_in[3]];
+ }
+ dest += dest_stride;
+ src += src_stride;
+ }
+ } else if (out_alpha && packed_in) {
+ i_alpha = MIN (i_alpha, 255);
+
+ for (i = 0; i < h; i++) {
+ for (j = 0; j < w; j++) {
+ dest[4 * j + p_out[0]] = i_alpha;
+ dest[4 * j + p_out[1]] = src[in_bpp * j + p_in[1]];
+ dest[4 * j + p_out[2]] = src[in_bpp * j + p_in[2]];
+ dest[4 * j + p_out[3]] = src[in_bpp * j + p_in[3]];
+ }
+ dest += dest_stride;
+ src += src_stride;
+ }
+ } else if (!packed_out && !packed_in) {
+ w *= 4;
+ for (i = 0; i < h; i++) {
+ for (j = 0; j < w; j += 4) {
+ dest[j + p_out[1]] = src[j + p_in[1]];
+ dest[j + p_out[2]] = src[j + p_in[2]];
+ dest[j + p_out[3]] = src[j + p_in[3]];
+ }
+ dest += dest_stride;
+ src += src_stride;
+ }
+ } else {
+ for (i = 0; i < h; i++) {
+ for (j = 0; j < w; j++) {
+ dest[out_bpp * j + p_out[1]] = src[in_bpp * j + p_in[1]];
+ dest[out_bpp * j + p_out[2]] = src[in_bpp * j + p_in[2]];
+ dest[out_bpp * j + p_out[3]] = src[in_bpp * j + p_in[3]];
+ }
+ dest += dest_stride;
+ src += src_stride;
+ }
+ }
+}
+
+static void
+copy_rgb32_ayuv (guint i_alpha, GstVideoFrame * dest_frame,
+ gboolean dest_sdtv, gint dest_x, gint dest_y, GstVideoFrame * src_frame,
+ gboolean src_sdtv, gint src_x, gint src_y, gint w, gint h)
+{
+ gint i, j;
+ gint src_stride, dest_stride;
+ gboolean in_alpha;
+ gint in_bpp;
+ gint p_in[4];
+ gboolean packed_in;
+ gint matrix[12];
+ gint a;
+ gint y, u, v;
+ gint r, g, b;
+ guint8 *dest, *src;
+
+ dest_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest_frame, 0);
+ src_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src_frame, 0);
+ in_bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (src_frame, 0);
+ packed_in = (in_bpp < 4);
+
+ in_alpha = GST_VIDEO_INFO_HAS_ALPHA (&src_frame->info);
+ p_in[0] = GST_VIDEO_FRAME_COMP_OFFSET (src_frame, 3);
+ p_in[1] = GST_VIDEO_FRAME_COMP_OFFSET (src_frame, 0);
+ p_in[2] = GST_VIDEO_FRAME_COMP_OFFSET (src_frame, 1);
+ p_in[3] = GST_VIDEO_FRAME_COMP_OFFSET (src_frame, 2);
+
+ memcpy (matrix,
+ (dest_sdtv) ? cog_rgb_to_ycbcr_matrix_8bit_sdtv :
+ cog_rgb_to_ycbcr_matrix_8bit_hdtv, 12 * sizeof (gint));
+
+ dest = GST_VIDEO_FRAME_PLANE_DATA (dest_frame, 0);
+ dest = dest + dest_y * dest_stride + dest_x * 4;
+ src = GST_VIDEO_FRAME_PLANE_DATA (src_frame, 0);
+ src = src + src_y * src_stride + src_x * in_bpp;
+
+ if (in_alpha) {
+ w *= 4;
+ for (i = 0; i < h; i++) {
+ for (j = 0; j < w; j += 4) {
+ a = (src[j + p_in[0]] * i_alpha) >> 8;
+ r = src[j + p_in[1]];
+ g = src[j + p_in[2]];
+ b = src[j + p_in[3]];
+
+ y = APPLY_MATRIX (matrix, 0, r, g, b);
+ u = APPLY_MATRIX (matrix, 1, r, g, b);
+ v = APPLY_MATRIX (matrix, 2, r, g, b);
+
+ dest[j + 0] = a;
+ dest[j + 1] = CLAMP (y, 0, 255);
+ dest[j + 2] = CLAMP (u, 0, 255);
+ dest[j + 3] = CLAMP (v, 0, 255);
+ }
+ dest += dest_stride;
+ src += src_stride;
+ }
+ } else if (!packed_in) {
+ w *= 4;
+ i_alpha = MIN (i_alpha, 255);
+
+ for (i = 0; i < h; i++) {
+ for (j = 0; j < w; j += 4) {
+ a = i_alpha;
+ r = src[j + p_in[1]];
+ g = src[j + p_in[2]];
+ b = src[j + p_in[3]];
+
+ y = APPLY_MATRIX (matrix, 0, r, g, b);
+ u = APPLY_MATRIX (matrix, 1, r, g, b);
+ v = APPLY_MATRIX (matrix, 2, r, g, b);
+
+ dest[j + 0] = a;
+ dest[j + 1] = CLAMP (y, 0, 255);
+ dest[j + 2] = CLAMP (u, 0, 255);
+ dest[j + 3] = CLAMP (v, 0, 255);
+ }
+ dest += dest_stride;
+ src += src_stride;
+ }
+ } else {
+ i_alpha = MIN (i_alpha, 255);
+
+ for (i = 0; i < h; i++) {
+ for (j = 0; j < w; j++) {
+ a = i_alpha;
+ r = src[in_bpp * j + p_in[1]];
+ g = src[in_bpp * j + p_in[2]];
+ b = src[in_bpp * j + p_in[3]];
+
+ y = APPLY_MATRIX (matrix, 0, r, g, b);
+ u = APPLY_MATRIX (matrix, 1, r, g, b);
+ v = APPLY_MATRIX (matrix, 2, r, g, b);
+
+ dest[4 * j + 0] = a;
+ dest[4 * j + 1] = CLAMP (y, 0, 255);
+ dest[4 * j + 2] = CLAMP (u, 0, 255);
+ dest[4 * j + 3] = CLAMP (v, 0, 255);
+ }
+ dest += dest_stride;
+ src += src_stride;
+ }
+ }
+}
+
+static void
+copy_ayuv_rgb32 (guint i_alpha, GstVideoFrame * dest_frame,
+ gboolean dest_sdtv, gint dest_x, gint dest_y, GstVideoFrame * src_frame,
+ gboolean src_sdtv, gint src_x, gint src_y, gint w, gint h)
+{
+ gint i, j;
+ gint src_stride, dest_stride;
+ gboolean out_alpha;
+ gint out_bpp;
+ gint p_out[4];
+ gboolean packed_out;
+ gint matrix[12];
+ gint a;
+ gint y, u, v;
+ gint r, g, b;
+ guint8 *src, *dest;
+
+ src_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src_frame, 0);
+ dest_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest_frame, 0);
+ out_bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (dest_frame, 0);
+ packed_out = (out_bpp < 4);
+
+ out_alpha = GST_VIDEO_INFO_HAS_ALPHA (&dest_frame->info);
+ p_out[0] = GST_VIDEO_FRAME_COMP_OFFSET (dest_frame, 3);
+ p_out[1] = GST_VIDEO_FRAME_COMP_OFFSET (dest_frame, 0);
+ p_out[2] = GST_VIDEO_FRAME_COMP_OFFSET (dest_frame, 1);
+ p_out[3] = GST_VIDEO_FRAME_COMP_OFFSET (dest_frame, 2);
+
+ memcpy (matrix,
+ (src_sdtv) ? cog_ycbcr_to_rgb_matrix_8bit_sdtv :
+ cog_ycbcr_to_rgb_matrix_8bit_hdtv, 12 * sizeof (gint));
+
+ dest = GST_VIDEO_FRAME_PLANE_DATA (dest_frame, 0);
+ dest = dest + dest_y * dest_stride + dest_x * out_bpp;
+ src = GST_VIDEO_FRAME_PLANE_DATA (src_frame, 0);
+ src = src + src_y * src_stride + src_x * 4;
+
+ if (out_alpha) {
+ w *= 4;
+ for (i = 0; i < h; i++) {
+ for (j = 0; j < w; j += 4) {
+ a = (src[j + 0] * i_alpha) >> 8;
+ y = src[j + 1];
+ u = src[j + 2];
+ v = src[j + 3];
+
+ r = APPLY_MATRIX (matrix, 0, y, u, v);
+ g = APPLY_MATRIX (matrix, 1, y, u, v);
+ b = APPLY_MATRIX (matrix, 2, y, u, v);
+
+ dest[j + p_out[0]] = a;
+ dest[j + p_out[1]] = CLAMP (r, 0, 255);
+ dest[j + p_out[2]] = CLAMP (g, 0, 255);
+ dest[j + p_out[3]] = CLAMP (b, 0, 255);
+ }
+ dest += dest_stride;
+ src += src_stride;
+ }
+ } else if (!packed_out) {
+ w *= 4;
+ for (i = 0; i < h; i++) {
+ for (j = 0; j < w; j += 4) {
+ y = src[j + 1];
+ u = src[j + 2];
+ v = src[j + 3];
+
+ r = APPLY_MATRIX (matrix, 0, y, u, v);
+ g = APPLY_MATRIX (matrix, 1, y, u, v);
+ b = APPLY_MATRIX (matrix, 2, y, u, v);
+
+ dest[j + p_out[1]] = CLAMP (r, 0, 255);
+ dest[j + p_out[2]] = CLAMP (g, 0, 255);
+ dest[j + p_out[3]] = CLAMP (b, 0, 255);
+ }
+ dest += dest_stride;
+ src += src_stride;
+ }
+ } else {
+ for (i = 0; i < h; i++) {
+ for (j = 0; j < w; j++) {
+ y = src[4 * j + 1];
+ u = src[4 * j + 2];
+ v = src[4 * j + 3];
+
+ r = APPLY_MATRIX (matrix, 0, y, u, v);
+ g = APPLY_MATRIX (matrix, 1, y, u, v);
+ b = APPLY_MATRIX (matrix, 2, y, u, v);
+
+ dest[out_bpp * j + p_out[1]] = CLAMP (r, 0, 255);
+ dest[out_bpp * j + p_out[2]] = CLAMP (g, 0, 255);
+ dest[out_bpp * j + p_out[3]] = CLAMP (b, 0, 255);
+ }
+ dest += dest_stride;
+ src += src_stride;
+ }
+ }
+}
+
+static void
+fill_gray (GstVideoBoxFill fill_type, guint b_alpha,
+ GstVideoFrame * frame, gboolean sdtv)
+{
+ gint i, j;
+ gint dest_stride;
+ guint8 *dest;
+ gint width, height;
+ GstVideoFormat format;
+
+ format = GST_VIDEO_FRAME_FORMAT (frame);
+
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
+ dest = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ dest_stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+
+ if (format == GST_VIDEO_FORMAT_GRAY8) {
+ guint8 val = yuv_sdtv_colors_Y[fill_type];
+
+ for (i = 0; i < height; i++) {
+ memset (dest, val, width);
+ dest += dest_stride;
+ }
+ } else {
+ guint16 val = yuv_sdtv_colors_Y[fill_type] << 8;
+
+ if (format == GST_VIDEO_FORMAT_GRAY16_BE) {
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ GST_WRITE_UINT16_BE (dest + 2 * j, val);
+ }
+ dest += dest_stride;
+ }
+ } else {
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ GST_WRITE_UINT16_LE (dest + 2 * j, val);
+ }
+ dest += dest_stride;
+ }
+ }
+ }
+}
+
+static void
+copy_packed_simple (guint i_alpha, GstVideoFrame * dest_frame,
+ gboolean dest_sdtv, gint dest_x, gint dest_y, GstVideoFrame * src_frame,
+ gboolean src_sdtv, gint src_x, gint src_y, gint w, gint h)
+{
+ gint i;
+ gint src_stride, dest_stride;
+ gint pixel_stride, row_size;
+ guint8 *src, *dest;
+
+ src_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src_frame, 0);
+ dest_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest_frame, 0);
+ pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (dest_frame, 0);
+ row_size = w * pixel_stride;
+
+ dest = GST_VIDEO_FRAME_PLANE_DATA (dest_frame, 0);
+ dest = dest + dest_y * dest_stride + dest_x * pixel_stride;
+ src = GST_VIDEO_FRAME_PLANE_DATA (src_frame, 0);
+ src = src + src_y * src_stride + src_x * pixel_stride;
+
+ for (i = 0; i < h; i++) {
+ memcpy (dest, src, row_size);
+ dest += dest_stride;
+ src += src_stride;
+ }
+}
+
+static void
+fill_yuy2 (GstVideoBoxFill fill_type, guint b_alpha,
+ GstVideoFrame * frame, gboolean sdtv)
+{
+ guint8 y, u, v;
+ gint i, j;
+ gint stride;
+ gint width, height;
+ guint8 *dest;
+ GstVideoFormat format;
+
+ format = GST_VIDEO_FRAME_FORMAT (frame);
+
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
+ dest = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+
+ y = (sdtv) ? yuv_sdtv_colors_Y[fill_type] : yuv_hdtv_colors_Y[fill_type];
+ u = (sdtv) ? yuv_sdtv_colors_U[fill_type] : yuv_hdtv_colors_U[fill_type];
+ v = (sdtv) ? yuv_sdtv_colors_V[fill_type] : yuv_hdtv_colors_V[fill_type];
+
+ width = width + (width % 2);
+
+ if (format == GST_VIDEO_FORMAT_YUY2) {
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j += 2) {
+ dest[j * 2 + 0] = y;
+ dest[j * 2 + 1] = u;
+ dest[j * 2 + 2] = y;
+ dest[j * 2 + 3] = v;
+ }
+
+ dest += stride;
+ }
+ } else if (format == GST_VIDEO_FORMAT_YVYU) {
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j += 2) {
+ dest[j * 2 + 0] = y;
+ dest[j * 2 + 1] = v;
+ dest[j * 2 + 2] = y;
+ dest[j * 2 + 3] = u;
+ }
+
+ dest += stride;
+ }
+ } else {
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j += 2) {
+ dest[j * 2 + 0] = u;
+ dest[j * 2 + 1] = y;
+ dest[j * 2 + 2] = v;
+ dest[j * 2 + 3] = y;
+ }
+
+ dest += stride;
+ }
+ }
+}
+
+static void
+copy_yuy2_yuy2 (guint i_alpha, GstVideoFrame * dest_frame,
+ gboolean dest_sdtv, gint dest_x, gint dest_y, GstVideoFrame * src_frame,
+ gboolean src_sdtv, gint src_x, gint src_y, gint w, gint h)
+{
+ gint i, j;
+ gint src_stride, dest_stride;
+ guint8 *src, *dest;
+ GstVideoFormat src_format;
+
+ src_format = GST_VIDEO_FRAME_FORMAT (src_frame);
+
+ src_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src_frame, 0);
+ dest_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest_frame, 0);
+
+ dest_x = (dest_x & ~1);
+ src_x = (src_x & ~1);
+
+ w = w + (w % 2);
+
+ dest = GST_VIDEO_FRAME_PLANE_DATA (dest_frame, 0);
+ dest = dest + dest_y * dest_stride + dest_x * 2;
+ src = GST_VIDEO_FRAME_PLANE_DATA (src_frame, 0);
+ src = src + src_y * src_stride + src_x * 2;
+
+ if (src_sdtv != dest_sdtv) {
+ gint y1, u1, v1;
+ gint y2, u2, v2;
+ gint matrix[12];
+
+ memcpy (matrix,
+ dest_sdtv ? cog_ycbcr_hdtv_to_ycbcr_sdtv_matrix_8bit :
+ cog_ycbcr_sdtv_to_ycbcr_hdtv_matrix_8bit, 12 * sizeof (gint));
+
+ if (src_format == GST_VIDEO_FORMAT_YUY2) {
+ for (i = 0; i < h; i++) {
+ for (j = 0; j < w; j += 2) {
+ y1 = src[j * 2 + 0];
+ y2 = src[j * 2 + 2];
+ u1 = u2 = src[j * 2 + 1];
+ v1 = v2 = src[j * 2 + 3];
+
+ dest[j * 2 + 0] = APPLY_MATRIX (matrix, 0, y1, u1, v1);
+ dest[j * 2 + 1] = APPLY_MATRIX (matrix, 1, y1, u1, v1);
+ dest[j * 2 + 2] = APPLY_MATRIX (matrix, 0, y1, u2, v2);
+ dest[j * 2 + 3] = APPLY_MATRIX (matrix, 2, y2, u2, v2);
+ }
+ dest += dest_stride;
+ src += src_stride;
+ }
+ } else if (src_format == GST_VIDEO_FORMAT_YVYU) {
+ for (i = 0; i < h; i++) {
+ for (j = 0; j < w; j += 2) {
+ y1 = src[j * 2 + 0];
+ y2 = src[j * 2 + 2];
+ v1 = v2 = src[j * 2 + 1];
+ u1 = u2 = src[j * 2 + 3];
+
+ dest[j * 2 + 0] = APPLY_MATRIX (matrix, 0, y1, u1, v1);
+ dest[j * 2 + 1] = APPLY_MATRIX (matrix, 2, y1, u1, v1);
+ dest[j * 2 + 2] = APPLY_MATRIX (matrix, 0, y1, u2, v2);
+ dest[j * 2 + 3] = APPLY_MATRIX (matrix, 1, y2, u2, v2);
+ }
+ dest += dest_stride;
+ src += src_stride;
+ }
+ } else {
+ for (i = 0; i < h; i++) {
+ for (j = 0; j < w; j += 2) {
+ u1 = u2 = src[j * 2 + 0];
+ v1 = v2 = src[j * 2 + 2];
+ y1 = src[j * 2 + 1];
+ y2 = src[j * 2 + 3];
+
+ dest[j * 2 + 1] = APPLY_MATRIX (matrix, 0, y1, u1, v1);
+ dest[j * 2 + 0] = APPLY_MATRIX (matrix, 1, y1, u1, v1);
+ dest[j * 2 + 3] = APPLY_MATRIX (matrix, 0, y1, u2, v2);
+ dest[j * 2 + 2] = APPLY_MATRIX (matrix, 2, y2, u2, v2);
+ }
+ dest += dest_stride;
+ src += src_stride;
+ }
+ }
+ } else {
+ for (i = 0; i < h; i++) {
+ memcpy (dest, src, w * 2);
+ dest += dest_stride;
+ src += src_stride;
+ }
+ }
+}
+
+#define DEFAULT_LEFT 0
+#define DEFAULT_RIGHT 0
+#define DEFAULT_TOP 0
+#define DEFAULT_BOTTOM 0
+#define DEFAULT_FILL_TYPE VIDEO_BOX_FILL_BLACK
+#define DEFAULT_ALPHA 1.0
+#define DEFAULT_BORDER_ALPHA 1.0
+
+enum
+{
+ PROP_0,
+ PROP_LEFT,
+ PROP_RIGHT,
+ PROP_TOP,
+ PROP_BOTTOM,
+ PROP_FILL_TYPE,
+ PROP_ALPHA,
+ PROP_BORDER_ALPHA,
+ PROP_AUTOCROP
+ /* FILL ME */
+};
+
+static GstStaticPadTemplate gst_video_box_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, xRGB, BGRx, xBGR, RGBx, RGB, BGR, "
+ "Y444, Y42B, YUY2, YVYU, UYVY, I420, YV12, Y41B, "
+ "GRAY8, GRAY16_BE, GRAY16_LE } "))
+ );
+
+static GstStaticPadTemplate gst_video_box_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, xRGB, BGRx, xBGR, RGBx, RGB, BGR, "
+ "Y444, Y42B, YUY2, YVYU, UYVY, I420, YV12, Y41B, "
+ "GRAY8, GRAY16_BE, GRAY16_LE } "))
+ );
+
+#define gst_video_box_parent_class parent_class
+G_DEFINE_TYPE (GstVideoBox, gst_video_box, GST_TYPE_VIDEO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (videobox, "videobox", GST_RANK_NONE,
+ GST_TYPE_VIDEO_BOX, GST_DEBUG_CATEGORY_INIT (videobox_debug, "videobox", 0,
+ "Resizes a video by adding borders or cropping"));
+
+static void gst_video_box_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_video_box_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_video_box_recalc_transform (GstVideoBox * video_box);
+static GstCaps *gst_video_box_transform_caps (GstBaseTransform * trans,
+ GstPadDirection direction, GstCaps * from, GstCaps * filter);
+static void gst_video_box_before_transform (GstBaseTransform * trans,
+ GstBuffer * in);
+static gboolean gst_video_box_src_event (GstBaseTransform * trans,
+ GstEvent * event);
+
+static gboolean gst_video_box_set_info (GstVideoFilter * vfilter, GstCaps * in,
+ GstVideoInfo * in_info, GstCaps * out, GstVideoInfo * out_info);
+static GstFlowReturn gst_video_box_transform_frame (GstVideoFilter * vfilter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame);
+
+#define GST_TYPE_VIDEO_BOX_FILL (gst_video_box_fill_get_type())
+static GType
+gst_video_box_fill_get_type (void)
+{
+ static GType video_box_fill_type = 0;
+ static const GEnumValue video_box_fill[] = {
+ {VIDEO_BOX_FILL_BLACK, "Black", "black"},
+ {VIDEO_BOX_FILL_GREEN, "Green", "green"},
+ {VIDEO_BOX_FILL_BLUE, "Blue", "blue"},
+ {VIDEO_BOX_FILL_RED, "Red", "red"},
+ {VIDEO_BOX_FILL_YELLOW, "Yellow", "yellow"},
+ {VIDEO_BOX_FILL_WHITE, "White", "white"},
+ {0, NULL, NULL},
+ };
+
+ if (!video_box_fill_type) {
+ video_box_fill_type =
+ g_enum_register_static ("GstVideoBoxFill", video_box_fill);
+ }
+ return video_box_fill_type;
+}
+
+static void
+gst_video_box_finalize (GObject * object)
+{
+ GstVideoBox *video_box = GST_VIDEO_BOX (object);
+
+ g_mutex_clear (&video_box->mutex);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_video_box_class_init (GstVideoBoxClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *element_class = (GstElementClass *) (klass);
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
+
+ gobject_class->set_property = gst_video_box_set_property;
+ gobject_class->get_property = gst_video_box_get_property;
+ gobject_class->finalize = gst_video_box_finalize;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_FILL_TYPE,
+ g_param_spec_enum ("fill", "Fill", "How to fill the borders",
+ GST_TYPE_VIDEO_BOX_FILL, DEFAULT_FILL_TYPE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_LEFT,
+ g_param_spec_int ("left", "Left",
+ "Pixels to box at left (<0 = add a border)", G_MININT, G_MAXINT,
+ DEFAULT_LEFT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_RIGHT,
+ g_param_spec_int ("right", "Right",
+ "Pixels to box at right (<0 = add a border)", G_MININT, G_MAXINT,
+ DEFAULT_RIGHT,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_TOP,
+ g_param_spec_int ("top", "Top",
+ "Pixels to box at top (<0 = add a border)", G_MININT, G_MAXINT,
+ DEFAULT_TOP,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BOTTOM,
+ g_param_spec_int ("bottom", "Bottom",
+ "Pixels to box at bottom (<0 = add a border)", G_MININT, G_MAXINT,
+ DEFAULT_BOTTOM,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_ALPHA,
+ g_param_spec_double ("alpha", "Alpha", "Alpha value picture", 0.0, 1.0,
+ DEFAULT_ALPHA,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BORDER_ALPHA,
+ g_param_spec_double ("border-alpha", "Border Alpha",
+ "Alpha value of the border", 0.0, 1.0, DEFAULT_BORDER_ALPHA,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
+ /**
+ * GstVideoBox:autocrop:
+ *
+ * If set to %TRUE videobox will automatically crop/pad the input
+ * video to be centered in the output.
+ */
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_AUTOCROP,
+ g_param_spec_boolean ("autocrop", "Auto crop",
+ "Auto crop", FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ trans_class->before_transform =
+ GST_DEBUG_FUNCPTR (gst_video_box_before_transform);
+ trans_class->transform_caps =
+ GST_DEBUG_FUNCPTR (gst_video_box_transform_caps);
+ trans_class->src_event = GST_DEBUG_FUNCPTR (gst_video_box_src_event);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_video_box_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_video_box_transform_frame);
+
+ gst_element_class_set_static_metadata (element_class, "Video box filter",
+ "Filter/Effect/Video",
+ "Resizes a video by adding borders or cropping",
+ "Wim Taymans <wim@fluendo.com>");
+
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_video_box_sink_template);
+ gst_element_class_add_static_pad_template (element_class,
+ &gst_video_box_src_template);
+
+ gst_type_mark_as_plugin_api (GST_TYPE_VIDEO_BOX_FILL, 0);
+}
+
+static void
+gst_video_box_init (GstVideoBox * video_box)
+{
+ video_box->box_right = DEFAULT_RIGHT;
+ video_box->box_left = DEFAULT_LEFT;
+ video_box->box_top = DEFAULT_TOP;
+ video_box->box_bottom = DEFAULT_BOTTOM;
+ video_box->crop_right = 0;
+ video_box->crop_left = 0;
+ video_box->crop_top = 0;
+ video_box->crop_bottom = 0;
+ video_box->fill_type = DEFAULT_FILL_TYPE;
+ video_box->alpha = DEFAULT_ALPHA;
+ video_box->border_alpha = DEFAULT_BORDER_ALPHA;
+ video_box->autocrop = FALSE;
+
+ g_mutex_init (&video_box->mutex);
+}
+
+static void
+gst_video_box_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstVideoBox *video_box = GST_VIDEO_BOX (object);
+
+ g_mutex_lock (&video_box->mutex);
+ switch (prop_id) {
+ case PROP_LEFT:
+ video_box->box_left = g_value_get_int (value);
+ if (video_box->box_left < 0) {
+ video_box->border_left = -video_box->box_left;
+ video_box->crop_left = 0;
+ } else {
+ video_box->border_left = 0;
+ video_box->crop_left = video_box->box_left;
+ }
+ break;
+ case PROP_RIGHT:
+ video_box->box_right = g_value_get_int (value);
+ if (video_box->box_right < 0) {
+ video_box->border_right = -video_box->box_right;
+ video_box->crop_right = 0;
+ } else {
+ video_box->border_right = 0;
+ video_box->crop_right = video_box->box_right;
+ }
+ break;
+ case PROP_TOP:
+ video_box->box_top = g_value_get_int (value);
+ if (video_box->box_top < 0) {
+ video_box->border_top = -video_box->box_top;
+ video_box->crop_top = 0;
+ } else {
+ video_box->border_top = 0;
+ video_box->crop_top = video_box->box_top;
+ }
+ break;
+ case PROP_BOTTOM:
+ video_box->box_bottom = g_value_get_int (value);
+ if (video_box->box_bottom < 0) {
+ video_box->border_bottom = -video_box->box_bottom;
+ video_box->crop_bottom = 0;
+ } else {
+ video_box->border_bottom = 0;
+ video_box->crop_bottom = video_box->box_bottom;
+ }
+ break;
+ case PROP_FILL_TYPE:
+ video_box->fill_type = g_value_get_enum (value);
+ break;
+ case PROP_ALPHA:
+ video_box->alpha = g_value_get_double (value);
+ break;
+ case PROP_BORDER_ALPHA:
+ video_box->border_alpha = g_value_get_double (value);
+ break;
+ case PROP_AUTOCROP:
+ video_box->autocrop = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ gst_video_box_recalc_transform (video_box);
+
+ GST_DEBUG_OBJECT (video_box, "Calling reconfigure");
+ gst_base_transform_reconfigure_src (GST_BASE_TRANSFORM_CAST (video_box));
+
+ g_mutex_unlock (&video_box->mutex);
+}
+
+static void
+gst_video_box_autocrop (GstVideoBox * video_box)
+{
+ gint crop_w = video_box->in_width - video_box->out_width;
+ gint crop_h = video_box->in_height - video_box->out_height;
+
+ video_box->box_left = crop_w / 2;
+ if (video_box->box_left < 0) {
+ video_box->border_left = -video_box->box_left;
+ video_box->crop_left = 0;
+ } else {
+ video_box->border_left = 0;
+ video_box->crop_left = video_box->box_left;
+ }
+
+ /* Round down/up for odd width differences */
+ if (crop_w < 0)
+ crop_w -= 1;
+ else
+ crop_w += 1;
+
+ video_box->box_right = crop_w / 2;
+ if (video_box->box_right < 0) {
+ video_box->border_right = -video_box->box_right;
+ video_box->crop_right = 0;
+ } else {
+ video_box->border_right = 0;
+ video_box->crop_right = video_box->box_right;
+ }
+
+ video_box->box_top = crop_h / 2;
+ if (video_box->box_top < 0) {
+ video_box->border_top = -video_box->box_top;
+ video_box->crop_top = 0;
+ } else {
+ video_box->border_top = 0;
+ video_box->crop_top = video_box->box_top;
+ }
+
+ /* Round down/up for odd height differences */
+ if (crop_h < 0)
+ crop_h -= 1;
+ else
+ crop_h += 1;
+ video_box->box_bottom = crop_h / 2;
+
+ if (video_box->box_bottom < 0) {
+ video_box->border_bottom = -video_box->box_bottom;
+ video_box->crop_bottom = 0;
+ } else {
+ video_box->border_bottom = 0;
+ video_box->crop_bottom = video_box->box_bottom;
+ }
+}
+
+static void
+gst_video_box_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstVideoBox *video_box = GST_VIDEO_BOX (object);
+
+ switch (prop_id) {
+ case PROP_LEFT:
+ g_value_set_int (value, video_box->box_left);
+ break;
+ case PROP_RIGHT:
+ g_value_set_int (value, video_box->box_right);
+ break;
+ case PROP_TOP:
+ g_value_set_int (value, video_box->box_top);
+ break;
+ case PROP_BOTTOM:
+ g_value_set_int (value, video_box->box_bottom);
+ break;
+ case PROP_FILL_TYPE:
+ g_value_set_enum (value, video_box->fill_type);
+ break;
+ case PROP_ALPHA:
+ g_value_set_double (value, video_box->alpha);
+ break;
+ case PROP_BORDER_ALPHA:
+ g_value_set_double (value, video_box->border_alpha);
+ break;
+ case PROP_AUTOCROP:
+ g_value_set_boolean (value, video_box->autocrop);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static inline gint
+gst_video_box_transform_dimension (gint val, gint delta)
+{
+ gint64 new_val = (gint64) val + (gint64) delta;
+
+ new_val = CLAMP (new_val, 1, G_MAXINT);
+
+ return (gint) new_val;
+}
+
+static gboolean
+gst_video_box_transform_dimension_value (const GValue * src_val,
+ gint delta, GValue * dest_val)
+{
+ gboolean ret = TRUE;
+
+ g_value_init (dest_val, G_VALUE_TYPE (src_val));
+
+ if (G_VALUE_HOLDS_INT (src_val)) {
+ gint ival = g_value_get_int (src_val);
+
+ ival = gst_video_box_transform_dimension (ival, delta);
+ g_value_set_int (dest_val, ival);
+ } else if (GST_VALUE_HOLDS_INT_RANGE (src_val)) {
+ gint min = gst_value_get_int_range_min (src_val);
+ gint max = gst_value_get_int_range_max (src_val);
+
+ min = gst_video_box_transform_dimension (min, delta);
+ max = gst_video_box_transform_dimension (max, delta);
+ if (min >= max) {
+ ret = FALSE;
+ g_value_unset (dest_val);
+ } else {
+ gst_value_set_int_range (dest_val, min, max);
+ }
+ } else if (GST_VALUE_HOLDS_LIST (src_val)) {
+ gint i;
+
+ for (i = 0; i < gst_value_list_get_size (src_val); ++i) {
+ const GValue *list_val;
+ GValue newval = { 0, };
+
+ list_val = gst_value_list_get_value (src_val, i);
+ if (gst_video_box_transform_dimension_value (list_val, delta, &newval))
+ gst_value_list_append_value (dest_val, &newval);
+ g_value_unset (&newval);
+ }
+
+ if (gst_value_list_get_size (dest_val) == 0) {
+ g_value_unset (dest_val);
+ ret = FALSE;
+ }
+ } else {
+ g_value_unset (dest_val);
+ ret = FALSE;
+ }
+
+ return ret;
+}
+
+static GstCaps *
+gst_video_box_transform_caps (GstBaseTransform * trans,
+ GstPadDirection direction, GstCaps * from, GstCaps * filter)
+{
+ GstVideoBox *video_box = GST_VIDEO_BOX (trans);
+ GstCaps *to, *ret;
+ GstCaps *templ;
+ GstStructure *structure;
+ GstPad *other;
+ gint i, j;
+
+ to = gst_caps_new_empty ();
+ for (i = 0; i < gst_caps_get_size (from); i++) {
+ const GValue *fval, *lval;
+ GValue list = { 0, };
+ GValue val = { 0, };
+ gboolean seen_yuv = FALSE, seen_rgb = FALSE;
+ const gchar *str;
+
+ structure = gst_structure_copy (gst_caps_get_structure (from, i));
+
+ /* Transform width/height */
+ if (video_box->autocrop) {
+ gst_structure_remove_field (structure, "width");
+ gst_structure_remove_field (structure, "height");
+ } else {
+ gint dw = 0, dh = 0;
+ const GValue *v;
+ GValue w_val = { 0, };
+ GValue h_val = { 0, };
+
+ /* calculate width and height */
+ if (direction == GST_PAD_SINK) {
+ dw -= video_box->box_left;
+ dw -= video_box->box_right;
+ } else {
+ dw += video_box->box_left;
+ dw += video_box->box_right;
+ }
+
+ if (direction == GST_PAD_SINK) {
+ dh -= video_box->box_top;
+ dh -= video_box->box_bottom;
+ } else {
+ dh += video_box->box_top;
+ dh += video_box->box_bottom;
+ }
+
+ v = gst_structure_get_value (structure, "width");
+ if (!gst_video_box_transform_dimension_value (v, dw, &w_val)) {
+ GST_WARNING_OBJECT (video_box,
+ "could not transform width value with dw=%d" ", caps structure=%"
+ GST_PTR_FORMAT, dw, structure);
+ goto bail;
+ }
+ gst_structure_set_value (structure, "width", &w_val);
+
+ v = gst_structure_get_value (structure, "height");
+ if (!gst_video_box_transform_dimension_value (v, dh, &h_val)) {
+ g_value_unset (&w_val);
+ GST_WARNING_OBJECT (video_box,
+ "could not transform height value with dh=%d" ", caps structure=%"
+ GST_PTR_FORMAT, dh, structure);
+ goto bail;
+ }
+ gst_structure_set_value (structure, "height", &h_val);
+ g_value_unset (&w_val);
+ g_value_unset (&h_val);
+ }
+
+ /* Supported conversions:
+ * I420->AYUV
+ * I420->YV12
+ * YV12->AYUV
+ * YV12->I420
+ * AYUV->I420
+ * AYUV->YV12
+ * AYUV->xRGB (24bpp, 32bpp, incl. alpha)
+ * xRGB->xRGB (24bpp, 32bpp, from/to all variants, incl. alpha)
+ * xRGB->AYUV (24bpp, 32bpp, incl. alpha)
+ *
+ * Passthrough only for everything else.
+ */
+ fval = gst_structure_get_value (structure, "format");
+ if (fval && GST_VALUE_HOLDS_LIST (fval)) {
+ for (j = 0; j < gst_value_list_get_size (fval); j++) {
+ lval = gst_value_list_get_value (fval, j);
+ if ((str = g_value_get_string (lval))) {
+ if (strcmp (str, "AYUV") == 0) {
+ seen_yuv = TRUE;
+ seen_rgb = TRUE;
+ break;
+ } else if (strstr (str, "RGB") || strstr (str, "BGR")) {
+ seen_rgb = TRUE;
+ } else if (strcmp (str, "I420") == 0 || strcmp (str, "YV12") == 0) {
+ seen_yuv = TRUE;
+ }
+ }
+ }
+ } else if (fval && G_VALUE_HOLDS_STRING (fval)) {
+ if ((str = g_value_get_string (fval))) {
+ if (strcmp (str, "AYUV") == 0) {
+ seen_yuv = TRUE;
+ seen_rgb = TRUE;
+ } else if (strstr (str, "RGB") || strstr (str, "BGR")) {
+ seen_rgb = TRUE;
+ } else if (strcmp (str, "I420") == 0 || strcmp (str, "YV12") == 0) {
+ seen_yuv = TRUE;
+ }
+ }
+ }
+
+ if (seen_yuv || seen_rgb) {
+ g_value_init (&list, GST_TYPE_LIST);
+
+ g_value_init (&val, G_TYPE_STRING);
+ g_value_set_string (&val, "AYUV");
+ gst_value_list_append_value (&list, &val);
+ g_value_unset (&val);
+
+ if (seen_yuv) {
+ g_value_init (&val, G_TYPE_STRING);
+ g_value_set_string (&val, "I420");
+ gst_value_list_append_value (&list, &val);
+ g_value_reset (&val);
+ g_value_set_string (&val, "YV12");
+ gst_value_list_append_value (&list, &val);
+ g_value_unset (&val);
+ }
+ if (seen_rgb) {
+ g_value_init (&val, G_TYPE_STRING);
+ g_value_set_string (&val, "RGBx");
+ gst_value_list_append_value (&list, &val);
+ g_value_reset (&val);
+ g_value_set_string (&val, "BGRx");
+ gst_value_list_append_value (&list, &val);
+ g_value_reset (&val);
+ g_value_set_string (&val, "xRGB");
+ gst_value_list_append_value (&list, &val);
+ g_value_reset (&val);
+ g_value_set_string (&val, "xBGR");
+ gst_value_list_append_value (&list, &val);
+ g_value_reset (&val);
+ g_value_set_string (&val, "RGBA");
+ gst_value_list_append_value (&list, &val);
+ g_value_reset (&val);
+ g_value_set_string (&val, "BGRA");
+ gst_value_list_append_value (&list, &val);
+ g_value_reset (&val);
+ g_value_set_string (&val, "ARGB");
+ gst_value_list_append_value (&list, &val);
+ g_value_reset (&val);
+ g_value_set_string (&val, "ABGR");
+ gst_value_list_append_value (&list, &val);
+ g_value_reset (&val);
+ g_value_set_string (&val, "RGB");
+ gst_value_list_append_value (&list, &val);
+ g_value_reset (&val);
+ g_value_set_string (&val, "BGR");
+ gst_value_list_append_value (&list, &val);
+ g_value_unset (&val);
+ }
+ gst_value_list_merge (&val, fval, &list);
+ gst_structure_set_value (structure, "format", &val);
+ g_value_unset (&val);
+ g_value_unset (&list);
+ }
+
+ gst_structure_remove_field (structure, "colorimetry");
+ gst_structure_remove_field (structure, "chroma-site");
+
+ gst_caps_append_structure (to, structure);
+ }
+
+ /* filter against set allowed caps on the pad */
+ other = (direction == GST_PAD_SINK) ? trans->srcpad : trans->sinkpad;
+ templ = gst_pad_get_pad_template_caps (other);
+ ret = gst_caps_intersect (to, templ);
+ gst_caps_unref (to);
+ gst_caps_unref (templ);
+
+ GST_DEBUG_OBJECT (video_box, "direction %d, transformed %" GST_PTR_FORMAT
+ " to %" GST_PTR_FORMAT, direction, from, ret);
+
+ if (ret && filter) {
+ GstCaps *intersection;
+
+ GST_DEBUG_OBJECT (video_box, "Using filter caps %" GST_PTR_FORMAT, filter);
+ intersection =
+ gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (ret);
+ ret = intersection;
+ GST_DEBUG_OBJECT (video_box, "Intersection %" GST_PTR_FORMAT, ret);
+ }
+
+ return ret;
+
+ /* ERRORS */
+bail:
+ {
+ gst_structure_free (structure);
+ gst_caps_unref (to);
+ to = gst_caps_new_empty ();
+ return to;
+ }
+}
+
+static gboolean
+gst_video_box_recalc_transform (GstVideoBox * video_box)
+{
+ gboolean res = TRUE;
+
+ /* if we have the same format in and out and we don't need to perform any
+ * cropping at all, we can just operate in passthrough mode */
+ if (video_box->in_format == video_box->out_format &&
+ video_box->box_left == 0 && video_box->box_right == 0 &&
+ video_box->box_top == 0 && video_box->box_bottom == 0 &&
+ video_box->in_sdtv == video_box->out_sdtv) {
+
+ GST_LOG_OBJECT (video_box, "we are using passthrough");
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM_CAST (video_box),
+ TRUE);
+ } else {
+ GST_LOG_OBJECT (video_box, "we are not using passthrough");
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM_CAST (video_box),
+ FALSE);
+ }
+ return res;
+}
+
+static gboolean
+gst_video_box_select_processing_functions (GstVideoBox * video_box)
+{
+ switch (video_box->out_format) {
+ case GST_VIDEO_FORMAT_AYUV:
+ video_box->fill = fill_ayuv;
+ switch (video_box->in_format) {
+ case GST_VIDEO_FORMAT_AYUV:
+ video_box->copy = copy_ayuv_ayuv;
+ break;
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
+ video_box->copy = copy_i420_ayuv;
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_BGRA:
+ case GST_VIDEO_FORMAT_xRGB:
+ case GST_VIDEO_FORMAT_xBGR:
+ case GST_VIDEO_FORMAT_RGBx:
+ case GST_VIDEO_FORMAT_BGRx:
+ case GST_VIDEO_FORMAT_RGB:
+ case GST_VIDEO_FORMAT_BGR:
+ video_box->copy = copy_rgb32_ayuv;
+ break;
+ default:
+ break;
+ }
+ break;
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
+ video_box->fill = fill_planar_yuv;
+ switch (video_box->in_format) {
+ case GST_VIDEO_FORMAT_AYUV:
+ video_box->copy = copy_ayuv_i420;
+ break;
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
+ video_box->copy = copy_i420_i420;
+ break;
+ default:
+ break;
+ }
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_BGRA:
+ case GST_VIDEO_FORMAT_xRGB:
+ case GST_VIDEO_FORMAT_xBGR:
+ case GST_VIDEO_FORMAT_RGBx:
+ case GST_VIDEO_FORMAT_BGRx:
+ case GST_VIDEO_FORMAT_RGB:
+ case GST_VIDEO_FORMAT_BGR:
+ video_box->fill = (video_box->out_format == GST_VIDEO_FORMAT_BGR
+ || video_box->out_format ==
+ GST_VIDEO_FORMAT_RGB) ? fill_rgb24 : fill_rgb32;
+ switch (video_box->in_format) {
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_BGRA:
+ case GST_VIDEO_FORMAT_xRGB:
+ case GST_VIDEO_FORMAT_xBGR:
+ case GST_VIDEO_FORMAT_RGBx:
+ case GST_VIDEO_FORMAT_BGRx:
+ case GST_VIDEO_FORMAT_RGB:
+ case GST_VIDEO_FORMAT_BGR:
+ video_box->copy = copy_rgb32;
+ break;
+ case GST_VIDEO_FORMAT_AYUV:
+ video_box->copy = copy_ayuv_rgb32;
+ default:
+ break;
+ }
+ break;
+ case GST_VIDEO_FORMAT_GRAY8:
+ case GST_VIDEO_FORMAT_GRAY16_BE:
+ case GST_VIDEO_FORMAT_GRAY16_LE:
+ video_box->fill = fill_gray;
+ switch (video_box->in_format) {
+ case GST_VIDEO_FORMAT_GRAY8:
+ case GST_VIDEO_FORMAT_GRAY16_BE:
+ case GST_VIDEO_FORMAT_GRAY16_LE:
+ video_box->copy = copy_packed_simple;
+ break;
+ default:
+ break;
+ }
+ break;
+ case GST_VIDEO_FORMAT_YUY2:
+ case GST_VIDEO_FORMAT_YVYU:
+ case GST_VIDEO_FORMAT_UYVY:
+ video_box->fill = fill_yuy2;
+ switch (video_box->in_format) {
+ case GST_VIDEO_FORMAT_YUY2:
+ case GST_VIDEO_FORMAT_YVYU:
+ case GST_VIDEO_FORMAT_UYVY:
+ video_box->copy = copy_yuy2_yuy2;
+ break;
+ default:
+ break;
+ }
+ break;
+ case GST_VIDEO_FORMAT_Y444:
+ case GST_VIDEO_FORMAT_Y42B:
+ case GST_VIDEO_FORMAT_Y41B:
+ video_box->fill = fill_planar_yuv;
+ switch (video_box->in_format) {
+ case GST_VIDEO_FORMAT_Y444:
+ video_box->copy = copy_y444_y444;
+ break;
+ case GST_VIDEO_FORMAT_Y42B:
+ video_box->copy = copy_y42b_y42b;
+ break;
+ case GST_VIDEO_FORMAT_Y41B:
+ video_box->copy = copy_y41b_y41b;
+ break;
+ default:
+ break;
+ }
+ break;
+ default:
+ break;
+ }
+
+ return video_box->fill != NULL && video_box->copy != NULL;
+}
+
+static gboolean
+gst_video_box_set_info (GstVideoFilter * vfilter, GstCaps * in,
+ GstVideoInfo * in_info, GstCaps * out, GstVideoInfo * out_info)
+{
+ GstVideoBox *video_box = GST_VIDEO_BOX (vfilter);
+ gboolean ret;
+
+ g_mutex_lock (&video_box->mutex);
+
+ video_box->in_format = GST_VIDEO_INFO_FORMAT (in_info);
+ video_box->in_width = GST_VIDEO_INFO_WIDTH (in_info);
+ video_box->in_height = GST_VIDEO_INFO_HEIGHT (in_info);
+
+ video_box->out_format = GST_VIDEO_INFO_FORMAT (out_info);
+ video_box->out_width = GST_VIDEO_INFO_WIDTH (out_info);
+ video_box->out_height = GST_VIDEO_INFO_HEIGHT (out_info);
+
+ video_box->in_sdtv =
+ in_info->colorimetry.matrix == GST_VIDEO_COLOR_MATRIX_BT601;
+ video_box->out_sdtv =
+ out_info->colorimetry.matrix == GST_VIDEO_COLOR_MATRIX_BT601;
+
+ GST_DEBUG_OBJECT (video_box, "Input w: %d h: %d", video_box->in_width,
+ video_box->in_height);
+ GST_DEBUG_OBJECT (video_box, "Output w: %d h: %d", video_box->out_width,
+ video_box->out_height);
+
+ if (video_box->autocrop)
+ gst_video_box_autocrop (video_box);
+
+ /* recalc the transformation strategy */
+ ret = gst_video_box_recalc_transform (video_box);
+
+ if (ret)
+ ret = gst_video_box_select_processing_functions (video_box);
+ g_mutex_unlock (&video_box->mutex);
+
+ return ret;
+}
+
+static gboolean
+gst_video_box_src_event (GstBaseTransform * trans, GstEvent * event)
+{
+ GstVideoBox *video_box = GST_VIDEO_BOX (trans);
+ GstStructure *new_structure;
+ const GstStructure *structure;
+ const gchar *event_name;
+ gdouble pointer_x;
+ gdouble pointer_y;
+
+ GST_OBJECT_LOCK (video_box);
+ if (GST_EVENT_TYPE (event) == GST_EVENT_NAVIGATION &&
+ (video_box->box_left != 0 || video_box->box_top != 0)) {
+ structure = gst_event_get_structure (event);
+ event_name = gst_structure_get_string (structure, "event");
+
+ if (event_name &&
+ (strcmp (event_name, "mouse-move") == 0 ||
+ strcmp (event_name, "mouse-button-press") == 0 ||
+ strcmp (event_name, "mouse-button-release") == 0)) {
+ if (gst_structure_get_double (structure, "pointer_x", &pointer_x) &&
+ gst_structure_get_double (structure, "pointer_y", &pointer_y)) {
+ gdouble new_pointer_x, new_pointer_y;
+ GstEvent *new_event;
+
+ new_pointer_x = pointer_x + video_box->box_left;
+ new_pointer_y = pointer_y + video_box->box_top;
+
+ new_structure = gst_structure_copy (structure);
+ gst_structure_set (new_structure,
+ "pointer_x", G_TYPE_DOUBLE, (gdouble) (new_pointer_x),
+ "pointer_y", G_TYPE_DOUBLE, (gdouble) (new_pointer_y), NULL);
+
+ new_event = gst_event_new_navigation (new_structure);
+ gst_event_unref (event);
+ event = new_event;
+ } else {
+ GST_WARNING_OBJECT (video_box, "Failed to read navigation event");
+ }
+ }
+ }
+ GST_OBJECT_UNLOCK (video_box);
+
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->src_event (trans, event);
+}
+
+static void
+gst_video_box_process (GstVideoBox * video_box, GstVideoFrame * in,
+ GstVideoFrame * out)
+{
+ guint b_alpha = CLAMP (video_box->border_alpha * 256, 0, 255);
+ guint i_alpha = CLAMP (video_box->alpha * 256, 0, 255);
+ GstVideoBoxFill fill_type = video_box->fill_type;
+ gint br, bl, bt, bb, crop_w, crop_h;
+
+ crop_h = 0;
+ crop_w = 0;
+
+ br = video_box->box_right;
+ bl = video_box->box_left;
+ bt = video_box->box_top;
+ bb = video_box->box_bottom;
+
+ if (br >= 0 && bl >= 0) {
+ crop_w = video_box->in_width - (br + bl);
+ } else if (br >= 0 && bl < 0) {
+ crop_w = video_box->in_width - (br);
+ } else if (br < 0 && bl >= 0) {
+ crop_w = video_box->in_width - (bl);
+ } else if (br < 0 && bl < 0) {
+ crop_w = video_box->in_width;
+ }
+
+ if (bb >= 0 && bt >= 0) {
+ crop_h = video_box->in_height - (bb + bt);
+ } else if (bb >= 0 && bt < 0) {
+ crop_h = video_box->in_height - (bb);
+ } else if (bb < 0 && bt >= 0) {
+ crop_h = video_box->in_height - (bt);
+ } else if (bb < 0 && bt < 0) {
+ crop_h = video_box->in_height;
+ }
+
+ GST_DEBUG_OBJECT (video_box, "Borders are: L:%d, R:%d, T:%d, B:%d", bl, br,
+ bt, bb);
+ GST_DEBUG_OBJECT (video_box, "Alpha value is: %u (frame) %u (border)",
+ i_alpha, b_alpha);
+
+ if (crop_h < 0 || crop_w < 0) {
+ video_box->fill (fill_type, b_alpha, out, video_box->out_sdtv);
+ } else if (bb == 0 && bt == 0 && br == 0 && bl == 0) {
+ video_box->copy (i_alpha, out, video_box->out_sdtv, 0, 0, in,
+ video_box->in_sdtv, 0, 0, crop_w, crop_h);
+ } else {
+ gint src_x = 0, src_y = 0;
+ gint dest_x = 0, dest_y = 0;
+
+ /* Fill everything if a border should be added somewhere */
+ if (bt < 0 || bb < 0 || br < 0 || bl < 0)
+ video_box->fill (fill_type, b_alpha, out, video_box->out_sdtv);
+
+ /* Top border */
+ if (bt < 0) {
+ dest_y += -bt;
+ } else {
+ src_y += bt;
+ }
+
+ /* Left border */
+ if (bl < 0) {
+ dest_x += -bl;
+ } else {
+ src_x += bl;
+ }
+
+ /* Frame */
+ video_box->copy (i_alpha, out, video_box->out_sdtv, dest_x, dest_y,
+ in, video_box->in_sdtv, src_x, src_y, crop_w, crop_h);
+ }
+
+ GST_LOG_OBJECT (video_box, "image created");
+}
+
+static void
+gst_video_box_before_transform (GstBaseTransform * trans, GstBuffer * in)
+{
+ GstVideoBox *video_box = GST_VIDEO_BOX (trans);
+ GstClockTime timestamp, stream_time;
+
+ timestamp = GST_BUFFER_TIMESTAMP (in);
+ stream_time =
+ gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (video_box, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (video_box), stream_time);
+}
+
+static GstFlowReturn
+gst_video_box_transform_frame (GstVideoFilter * vfilter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame)
+{
+ GstVideoBox *video_box = GST_VIDEO_BOX (vfilter);
+
+ g_mutex_lock (&video_box->mutex);
+ gst_video_box_process (video_box, in_frame, out_frame);
+ g_mutex_unlock (&video_box->mutex);
+ return GST_FLOW_OK;
+}
+
+/* FIXME: 0.11 merge with videocrop plugin */
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ return GST_ELEMENT_REGISTER (videobox, plugin);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ videobox,
+ "resizes a video by adding borders or cropping",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/videobox/gstvideobox.h b/gst/videobox/gstvideobox.h
new file mode 100644
index 0000000000..f2e5585193
--- /dev/null
+++ b/gst/videobox/gstvideobox.h
@@ -0,0 +1,94 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include <gst/gst.h>
+#include <gst/video/gstvideofilter.h>
+#include <gst/video/video.h>
+
+#ifndef __GST_VIDEO_BOX_H__
+#define __GST_VIDEO_BOX_H__
+
+#define GST_TYPE_VIDEO_BOX \
+ (gst_video_box_get_type())
+#define GST_VIDEO_BOX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_BOX,GstVideoBox))
+#define GST_VIDEO_BOX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_BOX,GstVideoBoxClass))
+#define GST_IS_VIDEO_BOX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_BOX))
+#define GST_IS_VIDEO_BOX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_BOX))
+
+typedef struct _GstVideoBox GstVideoBox;
+typedef struct _GstVideoBoxClass GstVideoBoxClass;
+
+typedef enum
+{
+ VIDEO_BOX_FILL_BLACK,
+ VIDEO_BOX_FILL_GREEN,
+ VIDEO_BOX_FILL_BLUE,
+ VIDEO_BOX_FILL_RED,
+ VIDEO_BOX_FILL_YELLOW,
+ VIDEO_BOX_FILL_WHITE,
+ VIDEO_BOX_FILL_LAST
+}
+GstVideoBoxFill;
+
+struct _GstVideoBox
+{
+ GstVideoFilter element;
+
+ /* <private> */
+
+ /* Guarding everything below */
+ GMutex mutex;
+ /* caps */
+ GstVideoFormat in_format;
+ gint in_width, in_height;
+ gboolean in_sdtv;
+ GstVideoFormat out_format;
+ gint out_width, out_height;
+ gboolean out_sdtv;
+
+ gint box_left, box_right, box_top, box_bottom;
+
+ gint border_left, border_right, border_top, border_bottom;
+ gint crop_left, crop_right, crop_top, crop_bottom;
+
+ gdouble alpha;
+ gdouble border_alpha;
+
+ GstVideoBoxFill fill_type;
+
+ gboolean autocrop;
+
+ void (*fill) (GstVideoBoxFill fill_type, guint b_alpha, GstVideoFrame *dest, gboolean sdtv);
+ void (*copy) (guint i_alpha, GstVideoFrame * dest, gboolean dest_sdtv, gint dest_x, gint dest_y, GstVideoFrame * src, gboolean src_sdtv, gint src_x, gint src_y, gint w, gint h);
+};
+
+struct _GstVideoBoxClass
+{
+ GstVideoFilterClass parent_class;
+};
+
+GType gst_video_box_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (videobox);
+
+#endif /* __GST_VIDEO_BOX_H__ */
diff --git a/gst/videobox/gstvideoboxorc-dist.c b/gst/videobox/gstvideoboxorc-dist.c
new file mode 100644
index 0000000000..098b4908e1
--- /dev/null
+++ b/gst/videobox/gstvideoboxorc-dist.c
@@ -0,0 +1,242 @@
+
+/* autogenerated from gstvideoboxorc.orc */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include <glib.h>
+
+#ifndef _ORC_INTEGER_TYPEDEFS_
+#define _ORC_INTEGER_TYPEDEFS_
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#include <stdint.h>
+typedef int8_t orc_int8;
+typedef int16_t orc_int16;
+typedef int32_t orc_int32;
+typedef int64_t orc_int64;
+typedef uint8_t orc_uint8;
+typedef uint16_t orc_uint16;
+typedef uint32_t orc_uint32;
+typedef uint64_t orc_uint64;
+#define ORC_UINT64_C(x) UINT64_C(x)
+#elif defined(_MSC_VER)
+typedef signed __int8 orc_int8;
+typedef signed __int16 orc_int16;
+typedef signed __int32 orc_int32;
+typedef signed __int64 orc_int64;
+typedef unsigned __int8 orc_uint8;
+typedef unsigned __int16 orc_uint16;
+typedef unsigned __int32 orc_uint32;
+typedef unsigned __int64 orc_uint64;
+#define ORC_UINT64_C(x) (x##Ui64)
+#define inline __inline
+#else
+#include <limits.h>
+typedef signed char orc_int8;
+typedef short orc_int16;
+typedef int orc_int32;
+typedef unsigned char orc_uint8;
+typedef unsigned short orc_uint16;
+typedef unsigned int orc_uint32;
+#if INT_MAX == LONG_MAX
+typedef long long orc_int64;
+typedef unsigned long long orc_uint64;
+#define ORC_UINT64_C(x) (x##ULL)
+#else
+typedef long orc_int64;
+typedef unsigned long orc_uint64;
+#define ORC_UINT64_C(x) (x##UL)
+#endif
+#endif
+typedef union
+{
+ orc_int16 i;
+ orc_int8 x2[2];
+} orc_union16;
+typedef union
+{
+ orc_int32 i;
+ float f;
+ orc_int16 x2[2];
+ orc_int8 x4[4];
+} orc_union32;
+typedef union
+{
+ orc_int64 i;
+ double f;
+ orc_int32 x2[2];
+ float x2f[2];
+ orc_int16 x4[4];
+} orc_union64;
+#endif
+#ifndef ORC_RESTRICT
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#define ORC_RESTRICT restrict
+#elif defined(__GNUC__) && __GNUC__ >= 4
+#define ORC_RESTRICT __restrict__
+#else
+#define ORC_RESTRICT
+#endif
+#endif
+
+#ifndef ORC_INTERNAL
+#if defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
+#define ORC_INTERNAL __attribute__((visibility("hidden")))
+#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x550)
+#define ORC_INTERNAL __hidden
+#elif defined (__GNUC__)
+#define ORC_INTERNAL __attribute__((visibility("hidden")))
+#else
+#define ORC_INTERNAL
+#endif
+#endif
+
+
+#ifndef DISABLE_ORC
+#include <orc/orc.h>
+#endif
+void video_box_orc_splat_u32 (guint32 * ORC_RESTRICT d1, int p1, int n);
+
+
+/* begin Orc C target preamble */
+#define ORC_CLAMP(x,a,b) ((x)<(a) ? (a) : ((x)>(b) ? (b) : (x)))
+#define ORC_ABS(a) ((a)<0 ? -(a) : (a))
+#define ORC_MIN(a,b) ((a)<(b) ? (a) : (b))
+#define ORC_MAX(a,b) ((a)>(b) ? (a) : (b))
+#define ORC_SB_MAX 127
+#define ORC_SB_MIN (-1-ORC_SB_MAX)
+#define ORC_UB_MAX (orc_uint8) 255
+#define ORC_UB_MIN 0
+#define ORC_SW_MAX 32767
+#define ORC_SW_MIN (-1-ORC_SW_MAX)
+#define ORC_UW_MAX (orc_uint16)65535
+#define ORC_UW_MIN 0
+#define ORC_SL_MAX 2147483647
+#define ORC_SL_MIN (-1-ORC_SL_MAX)
+#define ORC_UL_MAX 4294967295U
+#define ORC_UL_MIN 0
+#define ORC_CLAMP_SB(x) ORC_CLAMP(x,ORC_SB_MIN,ORC_SB_MAX)
+#define ORC_CLAMP_UB(x) ORC_CLAMP(x,ORC_UB_MIN,ORC_UB_MAX)
+#define ORC_CLAMP_SW(x) ORC_CLAMP(x,ORC_SW_MIN,ORC_SW_MAX)
+#define ORC_CLAMP_UW(x) ORC_CLAMP(x,ORC_UW_MIN,ORC_UW_MAX)
+#define ORC_CLAMP_SL(x) ORC_CLAMP(x,ORC_SL_MIN,ORC_SL_MAX)
+#define ORC_CLAMP_UL(x) ORC_CLAMP(x,ORC_UL_MIN,ORC_UL_MAX)
+#define ORC_SWAP_W(x) ((((x)&0xffU)<<8) | (((x)&0xff00U)>>8))
+#define ORC_SWAP_L(x) ((((x)&0xffU)<<24) | (((x)&0xff00U)<<8) | (((x)&0xff0000U)>>8) | (((x)&0xff000000U)>>24))
+#define ORC_SWAP_Q(x) ((((x)&ORC_UINT64_C(0xff))<<56) | (((x)&ORC_UINT64_C(0xff00))<<40) | (((x)&ORC_UINT64_C(0xff0000))<<24) | (((x)&ORC_UINT64_C(0xff000000))<<8) | (((x)&ORC_UINT64_C(0xff00000000))>>8) | (((x)&ORC_UINT64_C(0xff0000000000))>>24) | (((x)&ORC_UINT64_C(0xff000000000000))>>40) | (((x)&ORC_UINT64_C(0xff00000000000000))>>56))
+#define ORC_PTR_OFFSET(ptr,offset) ((void *)(((unsigned char *)(ptr)) + (offset)))
+#define ORC_DENORMAL(x) ((x) & ((((x)&0x7f800000) == 0) ? 0xff800000 : 0xffffffff))
+#define ORC_ISNAN(x) ((((x)&0x7f800000) == 0x7f800000) && (((x)&0x007fffff) != 0))
+#define ORC_DENORMAL_DOUBLE(x) ((x) & ((((x)&ORC_UINT64_C(0x7ff0000000000000)) == 0) ? ORC_UINT64_C(0xfff0000000000000) : ORC_UINT64_C(0xffffffffffffffff)))
+#define ORC_ISNAN_DOUBLE(x) ((((x)&ORC_UINT64_C(0x7ff0000000000000)) == ORC_UINT64_C(0x7ff0000000000000)) && (((x)&ORC_UINT64_C(0x000fffffffffffff)) != 0))
+#ifndef ORC_RESTRICT
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#define ORC_RESTRICT restrict
+#elif defined(__GNUC__) && __GNUC__ >= 4
+#define ORC_RESTRICT __restrict__
+#else
+#define ORC_RESTRICT
+#endif
+#endif
+/* end Orc C target preamble */
+
+
+
+/* video_box_orc_splat_u32 */
+#ifdef DISABLE_ORC
+void
+video_box_orc_splat_u32 (guint32 * ORC_RESTRICT d1, int p1, int n)
+{
+ int i;
+ orc_union32 *ORC_RESTRICT ptr0;
+ orc_union32 var32;
+ orc_union32 var33;
+
+ ptr0 = (orc_union32 *) d1;
+
+ /* 0: loadpl */
+ var32.i = p1;
+
+ for (i = 0; i < n; i++) {
+ /* 1: copyl */
+ var33.i = var32.i;
+ /* 2: storel */
+ ptr0[i] = var33;
+ }
+
+}
+
+#else
+static void
+_backup_video_box_orc_splat_u32 (OrcExecutor * ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union32 *ORC_RESTRICT ptr0;
+ orc_union32 var32;
+ orc_union32 var33;
+
+ ptr0 = (orc_union32 *) ex->arrays[0];
+
+ /* 0: loadpl */
+ var32.i = ex->params[24];
+
+ for (i = 0; i < n; i++) {
+ /* 1: copyl */
+ var33.i = var32.i;
+ /* 2: storel */
+ ptr0[i] = var33;
+ }
+
+}
+
+void
+video_box_orc_splat_u32 (guint32 * ORC_RESTRICT d1, int p1, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 23, 118, 105, 100, 101, 111, 95, 98, 111, 120, 95, 111, 114, 99,
+ 95, 115, 112, 108, 97, 116, 95, 117, 51, 50, 11, 4, 4, 16, 4, 112,
+ 0, 24, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p, _backup_video_box_orc_splat_u32);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "video_box_orc_splat_u32");
+ orc_program_set_backup_function (p, _backup_video_box_orc_splat_u32);
+ orc_program_add_destination (p, 4, "d1");
+ orc_program_add_parameter (p, 4, "p1");
+
+ orc_program_append_2 (p, "copyl", 0, ORC_VAR_D1, ORC_VAR_P1, ORC_VAR_D1,
+ ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->params[ORC_VAR_P1] = p1;
+
+ func = c->exec;
+ func (ex);
+}
+#endif
diff --git a/gst/videobox/gstvideoboxorc-dist.h b/gst/videobox/gstvideoboxorc-dist.h
new file mode 100644
index 0000000000..23c2d0bac2
--- /dev/null
+++ b/gst/videobox/gstvideoboxorc-dist.h
@@ -0,0 +1,90 @@
+
+/* autogenerated from gstvideoboxorc.orc */
+
+#ifndef _GSTVIDEOBOXORC_H_
+#define _GSTVIDEOBOXORC_H_
+
+#include <glib.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#ifndef _ORC_INTEGER_TYPEDEFS_
+#define _ORC_INTEGER_TYPEDEFS_
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#include <stdint.h>
+typedef int8_t orc_int8;
+typedef int16_t orc_int16;
+typedef int32_t orc_int32;
+typedef int64_t orc_int64;
+typedef uint8_t orc_uint8;
+typedef uint16_t orc_uint16;
+typedef uint32_t orc_uint32;
+typedef uint64_t orc_uint64;
+#define ORC_UINT64_C(x) UINT64_C(x)
+#elif defined(_MSC_VER)
+typedef signed __int8 orc_int8;
+typedef signed __int16 orc_int16;
+typedef signed __int32 orc_int32;
+typedef signed __int64 orc_int64;
+typedef unsigned __int8 orc_uint8;
+typedef unsigned __int16 orc_uint16;
+typedef unsigned __int32 orc_uint32;
+typedef unsigned __int64 orc_uint64;
+#define ORC_UINT64_C(x) (x##Ui64)
+#define inline __inline
+#else
+#include <limits.h>
+typedef signed char orc_int8;
+typedef short orc_int16;
+typedef int orc_int32;
+typedef unsigned char orc_uint8;
+typedef unsigned short orc_uint16;
+typedef unsigned int orc_uint32;
+#if INT_MAX == LONG_MAX
+typedef long long orc_int64;
+typedef unsigned long long orc_uint64;
+#define ORC_UINT64_C(x) (x##ULL)
+#else
+typedef long orc_int64;
+typedef unsigned long orc_uint64;
+#define ORC_UINT64_C(x) (x##UL)
+#endif
+#endif
+typedef union { orc_int16 i; orc_int8 x2[2]; } orc_union16;
+typedef union { orc_int32 i; float f; orc_int16 x2[2]; orc_int8 x4[4]; } orc_union32;
+typedef union { orc_int64 i; double f; orc_int32 x2[2]; float x2f[2]; orc_int16 x4[4]; } orc_union64;
+#endif
+#ifndef ORC_RESTRICT
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#define ORC_RESTRICT restrict
+#elif defined(__GNUC__) && __GNUC__ >= 4
+#define ORC_RESTRICT __restrict__
+#else
+#define ORC_RESTRICT
+#endif
+#endif
+
+#ifndef ORC_INTERNAL
+#if defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
+#define ORC_INTERNAL __attribute__((visibility("hidden")))
+#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x550)
+#define ORC_INTERNAL __hidden
+#elif defined (__GNUC__)
+#define ORC_INTERNAL __attribute__((visibility("hidden")))
+#else
+#define ORC_INTERNAL
+#endif
+#endif
+
+void video_box_orc_splat_u32 (guint32 * ORC_RESTRICT d1, int p1, int n);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
+
diff --git a/gst/videobox/gstvideoboxorc.orc b/gst/videobox/gstvideoboxorc.orc
new file mode 100644
index 0000000000..1dbf9e785b
--- /dev/null
+++ b/gst/videobox/gstvideoboxorc.orc
@@ -0,0 +1,7 @@
+
+.function video_box_orc_splat_u32
+.dest 4 d1 guint32
+.param 4 p1
+
+copyl d1, p1
+
diff --git a/gst/videobox/meson.build b/gst/videobox/meson.build
new file mode 100644
index 0000000000..1511c30661
--- /dev/null
+++ b/gst/videobox/meson.build
@@ -0,0 +1,30 @@
+orcsrc = 'gstvideoboxorc'
+if have_orcc
+ orc_h = custom_target(orcsrc + '.h',
+ input : orcsrc + '.orc',
+ output : orcsrc + '.h',
+ command : orcc_args + ['--header', '-o', '@OUTPUT@', '@INPUT@'])
+ orc_c = custom_target(orcsrc + '.c',
+ input : orcsrc + '.orc',
+ output : orcsrc + '.c',
+ command : orcc_args + ['--implementation', '-o', '@OUTPUT@', '@INPUT@'])
+ orc_targets += {'name': orcsrc, 'orc-source': files(orcsrc + '.orc'), 'header': orc_h, 'source': orc_c}
+else
+ orc_h = configure_file(input : orcsrc + '-dist.h',
+ output : orcsrc + '.h',
+ copy : true)
+ orc_c = configure_file(input : orcsrc + '-dist.c',
+ output : orcsrc + '.c',
+ copy : true)
+endif
+
+gstvideobox = library('gstvideobox',
+ 'gstvideobox.c', orc_c, orc_h,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [orc_dep, gstbase_dep, gstvideo_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstvideobox, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstvideobox]
diff --git a/gst/videocrop/gstaspectratiocrop.c b/gst/videocrop/gstaspectratiocrop.c
new file mode 100644
index 0000000000..6aa659782b
--- /dev/null
+++ b/gst/videocrop/gstaspectratiocrop.c
@@ -0,0 +1,516 @@
+/* GStreamer video frame cropping to aspect-ratio
+ * Copyright (C) 2009 Thijs Vermeir <thijsvermeir@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-aspectratiocrop
+ * @title: aspectratiocrop
+ * @see_also: #GstVideoCrop
+ *
+ * This element crops video frames to a specified #GstAspectRatioCrop:aspect-ratio.
+ *
+ * If the aspect-ratio is already correct, the element will operate
+ * in pass-through mode.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v videotestsrc ! video/x-raw,height=640,width=480 ! aspectratiocrop aspect-ratio=16/9 ! ximagesink
+ * ]| This pipeline generates a videostream in 4/3 and crops it to 16/9.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+#include "gstvideocrop.h"
+#include "gstaspectratiocrop.h"
+/* include private header which contains the supported formats */
+#include "gstvideocrop-private.h"
+
+#include "gst/glib-compat-private.h"
+
+GST_DEBUG_CATEGORY_STATIC (aspect_ratio_crop_debug);
+#define GST_CAT_DEFAULT aspect_ratio_crop_debug
+
+enum
+{
+ PROP_0,
+ PROP_ASPECT_RATIO_CROP,
+};
+
+/* we support the same caps as videocrop */
+#define ASPECT_RATIO_CROP_CAPS VIDEO_CROP_CAPS
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (ASPECT_RATIO_CROP_CAPS)
+ );
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (ASPECT_RATIO_CROP_CAPS)
+ );
+
+#define gst_aspect_ratio_crop_parent_class parent_class
+G_DEFINE_TYPE (GstAspectRatioCrop, gst_aspect_ratio_crop, GST_TYPE_BIN);
+GST_ELEMENT_REGISTER_DEFINE (aspectratiocrop, "aspectratiocrop", GST_RANK_NONE,
+ GST_TYPE_ASPECT_RATIO_CROP);
+
+static void gst_aspect_ratio_crop_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_aspect_ratio_crop_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static void gst_aspect_ratio_crop_set_cropping (GstAspectRatioCrop *
+ aspect_ratio_crop, gint top, gint right, gint bottom, gint left);
+static GstCaps *gst_aspect_ratio_crop_get_caps (GstPad * pad, GstCaps * filter);
+static gboolean gst_aspect_ratio_crop_src_query (GstPad * pad,
+ GstObject * parent, GstQuery * query);
+static gboolean gst_aspect_ratio_crop_set_caps (GstAspectRatioCrop *
+ aspect_ratio_crop, GstCaps * caps);
+static gboolean gst_aspect_ratio_crop_sink_event (GstPad * pad,
+ GstObject * parent, GstEvent * evt);
+static void gst_aspect_ratio_crop_finalize (GObject * object);
+static void gst_aspect_ratio_transform_structure (GstAspectRatioCrop *
+ aspect_ratio_crop, GstStructure * structure, GstStructure ** new_structure,
+ gboolean set_videocrop);
+
+static void
+gst_aspect_ratio_crop_set_cropping (GstAspectRatioCrop * aspect_ratio_crop,
+ gint top, gint right, gint bottom, gint left)
+{
+ GValue value = { 0 };
+ if (G_UNLIKELY (!aspect_ratio_crop->videocrop)) {
+ GST_WARNING_OBJECT (aspect_ratio_crop,
+ "Can't set the settings if there is no cropping element");
+ return;
+ }
+
+ g_value_init (&value, G_TYPE_INT);
+ g_value_set_int (&value, top);
+ GST_DEBUG_OBJECT (aspect_ratio_crop, "set top cropping to: %d", top);
+ g_object_set_property (G_OBJECT (aspect_ratio_crop->videocrop), "top",
+ &value);
+ g_value_set_int (&value, right);
+ GST_DEBUG_OBJECT (aspect_ratio_crop, "set right cropping to: %d", right);
+ g_object_set_property (G_OBJECT (aspect_ratio_crop->videocrop), "right",
+ &value);
+ g_value_set_int (&value, bottom);
+ GST_DEBUG_OBJECT (aspect_ratio_crop, "set bottom cropping to: %d", bottom);
+ g_object_set_property (G_OBJECT (aspect_ratio_crop->videocrop), "bottom",
+ &value);
+ g_value_set_int (&value, left);
+ GST_DEBUG_OBJECT (aspect_ratio_crop, "set left cropping to: %d", left);
+ g_object_set_property (G_OBJECT (aspect_ratio_crop->videocrop), "left",
+ &value);
+
+ g_value_unset (&value);
+}
+
+static gboolean
+gst_aspect_ratio_crop_set_caps (GstAspectRatioCrop * aspect_ratio_crop,
+ GstCaps * caps)
+{
+ GstPad *peer_pad;
+ GstStructure *structure;
+ gboolean ret;
+
+ g_mutex_lock (&aspect_ratio_crop->crop_lock);
+
+ structure = gst_caps_get_structure (caps, 0);
+ gst_aspect_ratio_transform_structure (aspect_ratio_crop, structure, NULL,
+ TRUE);
+ peer_pad =
+ gst_element_get_static_pad (GST_ELEMENT (aspect_ratio_crop->videocrop),
+ "sink");
+ ret = gst_pad_set_caps (peer_pad, caps);
+ gst_object_unref (peer_pad);
+ g_mutex_unlock (&aspect_ratio_crop->crop_lock);
+ return ret;
+}
+
+static gboolean
+gst_aspect_ratio_crop_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * evt)
+{
+ GstAspectRatioCrop *aspect_ratio_crop = GST_ASPECT_RATIO_CROP (parent);
+
+ switch (GST_EVENT_TYPE (evt)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (evt, &caps);
+ gst_aspect_ratio_crop_set_caps (aspect_ratio_crop, caps);
+ break;
+ }
+ default:
+ break;
+ }
+
+ return gst_pad_event_default (pad, parent, evt);
+}
+
+static void
+gst_aspect_ratio_crop_class_init (GstAspectRatioCropClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+
+ gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
+
+ gobject_class->set_property = gst_aspect_ratio_crop_set_property;
+ gobject_class->get_property = gst_aspect_ratio_crop_get_property;
+ gobject_class->finalize = gst_aspect_ratio_crop_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_ASPECT_RATIO_CROP,
+ gst_param_spec_fraction ("aspect-ratio", "aspect-ratio",
+ "Target aspect-ratio of video", 0, 1, G_MAXINT, 1, 0, 1,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (element_class, "aspectratiocrop",
+ "Filter/Effect/Video",
+ "Crops video into a user-defined aspect-ratio",
+ "Thijs Vermeir <thijsvermeir@gmail.com>");
+
+ gst_element_class_add_static_pad_template (element_class, &sink_template);
+ gst_element_class_add_static_pad_template (element_class, &src_template);
+}
+
+static void
+gst_aspect_ratio_crop_finalize (GObject * object)
+{
+ GstAspectRatioCrop *aspect_ratio_crop;
+
+ aspect_ratio_crop = GST_ASPECT_RATIO_CROP (object);
+
+ g_mutex_clear (&aspect_ratio_crop->crop_lock);
+ gst_clear_caps (&aspect_ratio_crop->renegotiation_caps);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static GstFlowReturn
+gst_aspect_ratio_crop_sink_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buffer)
+{
+ GstCaps *caps = NULL;
+ GstAspectRatioCrop *aspect_ratio_crop = GST_ASPECT_RATIO_CROP (parent);
+
+ GST_OBJECT_LOCK (parent);
+ caps = aspect_ratio_crop->renegotiation_caps;
+ aspect_ratio_crop->renegotiation_caps = NULL;
+ GST_OBJECT_UNLOCK (parent);
+
+ if (caps) {
+ gst_aspect_ratio_crop_set_caps (GST_ASPECT_RATIO_CROP (parent), caps);
+ gst_caps_unref (caps);
+ }
+
+ return gst_proxy_pad_chain_default (pad, parent, buffer);
+
+}
+
+static void
+gst_aspect_ratio_crop_init (GstAspectRatioCrop * aspect_ratio_crop)
+{
+ GstPad *link_pad;
+ GstPad *src_pad;
+
+ GST_DEBUG_CATEGORY_INIT (aspect_ratio_crop_debug, "aspectratiocrop", 0,
+ "aspectratiocrop");
+
+ aspect_ratio_crop->ar_num = 0;
+ aspect_ratio_crop->ar_denom = 1;
+
+ g_mutex_init (&aspect_ratio_crop->crop_lock);
+
+ /* add the transform element */
+ aspect_ratio_crop->videocrop = gst_element_factory_make ("videocrop", NULL);
+ gst_bin_add (GST_BIN (aspect_ratio_crop), aspect_ratio_crop->videocrop);
+
+ /* create ghost pad src */
+ link_pad =
+ gst_element_get_static_pad (GST_ELEMENT (aspect_ratio_crop->videocrop),
+ "src");
+ src_pad = gst_ghost_pad_new ("src", link_pad);
+ gst_pad_set_query_function (src_pad,
+ GST_DEBUG_FUNCPTR (gst_aspect_ratio_crop_src_query));
+ gst_element_add_pad (GST_ELEMENT (aspect_ratio_crop), src_pad);
+ gst_object_unref (link_pad);
+ /* create ghost pad sink */
+ link_pad =
+ gst_element_get_static_pad (GST_ELEMENT (aspect_ratio_crop->videocrop),
+ "sink");
+ aspect_ratio_crop->sink = gst_ghost_pad_new ("sink", link_pad);
+ gst_element_add_pad (GST_ELEMENT (aspect_ratio_crop),
+ aspect_ratio_crop->sink);
+ gst_object_unref (link_pad);
+
+ gst_pad_set_event_function (aspect_ratio_crop->sink,
+ GST_DEBUG_FUNCPTR (gst_aspect_ratio_crop_sink_event));
+ gst_pad_set_chain_function (aspect_ratio_crop->sink,
+ GST_DEBUG_FUNCPTR (gst_aspect_ratio_crop_sink_chain));
+}
+
+static void
+gst_aspect_ratio_transform_structure (GstAspectRatioCrop * aspect_ratio_crop,
+ GstStructure * structure, GstStructure ** new_structure,
+ gboolean set_videocrop)
+{
+ gdouble incoming_ar;
+ gdouble requested_ar;
+ gint width, height;
+ gint cropvalue;
+ gint par_d, par_n;
+
+ /* Check if we need to change the aspect ratio */
+ if (aspect_ratio_crop->ar_num < 1) {
+ GST_DEBUG_OBJECT (aspect_ratio_crop, "No cropping requested");
+ goto beach;
+ }
+
+ /* get the information from the caps */
+ if (!gst_structure_get_int (structure, "width", &width) ||
+ !gst_structure_get_int (structure, "height", &height))
+ goto beach;
+
+ if (!gst_structure_get_fraction (structure, "pixel-aspect-ratio",
+ &par_n, &par_d)) {
+ par_d = par_n = 1;
+ }
+
+ incoming_ar = ((gdouble) (width * par_n)) / (height * par_d);
+ GST_LOG_OBJECT (aspect_ratio_crop,
+ "incoming caps width(%d), height(%d), par (%d/%d) : ar = %f", width,
+ height, par_n, par_d, incoming_ar);
+
+ requested_ar =
+ (gdouble) aspect_ratio_crop->ar_num / aspect_ratio_crop->ar_denom;
+
+ /* check if the original aspect-ratio is the aspect-ratio that we want */
+ if (requested_ar == incoming_ar) {
+ GST_DEBUG_OBJECT (aspect_ratio_crop,
+ "Input video already has the correct aspect ratio (%.3f == %.3f)",
+ incoming_ar, requested_ar);
+ goto beach;
+ } else if (requested_ar > incoming_ar) {
+ /* fix aspect ratio with cropping on top and bottom */
+ cropvalue =
+ ((((double) aspect_ratio_crop->ar_denom /
+ (double) (aspect_ratio_crop->ar_num)) * ((double) par_n /
+ (double) par_d) * width) - height) / 2;
+ if (cropvalue < 0) {
+ cropvalue *= -1;
+ }
+ if (cropvalue >= (height / 2))
+ goto crop_failed;
+ if (set_videocrop) {
+ gst_aspect_ratio_crop_set_cropping (aspect_ratio_crop, cropvalue, 0,
+ cropvalue, 0);
+ }
+ if (new_structure) {
+ *new_structure = gst_structure_copy (structure);
+ gst_structure_set (*new_structure,
+ "height", G_TYPE_INT, (int) (height - (cropvalue * 2)), NULL);
+ }
+ } else {
+ /* fix aspect ratio with cropping on left and right */
+ cropvalue =
+ ((((double) aspect_ratio_crop->ar_num /
+ (double) (aspect_ratio_crop->ar_denom)) * ((double) par_d /
+ (double) par_n) * height) - width) / 2;
+ if (cropvalue < 0) {
+ cropvalue *= -1;
+ }
+ if (cropvalue >= (width / 2))
+ goto crop_failed;
+ if (set_videocrop) {
+ gst_aspect_ratio_crop_set_cropping (aspect_ratio_crop, 0, cropvalue,
+ 0, cropvalue);
+ }
+ if (new_structure) {
+ *new_structure = gst_structure_copy (structure);
+ gst_structure_set (*new_structure,
+ "width", G_TYPE_INT, (int) (width - (cropvalue * 2)), NULL);
+ }
+ }
+
+ return;
+
+crop_failed:
+ GST_WARNING_OBJECT (aspect_ratio_crop,
+ "can't crop to aspect ratio requested");
+ goto beach;
+beach:
+ if (set_videocrop) {
+ gst_aspect_ratio_crop_set_cropping (aspect_ratio_crop, 0, 0, 0, 0);
+ }
+
+ if (new_structure) {
+ *new_structure = gst_structure_copy (structure);
+ }
+}
+
+static GstCaps *
+gst_aspect_ratio_crop_transform_caps (GstAspectRatioCrop * aspect_ratio_crop,
+ GstCaps * caps)
+{
+ GstCaps *transform;
+ gint size, i;
+
+ transform = gst_caps_new_empty ();
+
+ size = gst_caps_get_size (caps);
+
+ for (i = 0; i < size; i++) {
+ GstStructure *s;
+ GstStructure *trans_s;
+
+ s = gst_caps_get_structure (caps, i);
+
+ gst_aspect_ratio_transform_structure (aspect_ratio_crop, s, &trans_s,
+ FALSE);
+ gst_caps_append_structure (transform, trans_s);
+ }
+
+ return transform;
+}
+
+static GstCaps *
+gst_aspect_ratio_crop_get_caps (GstPad * pad, GstCaps * filter)
+{
+ GstPad *peer;
+ GstAspectRatioCrop *aspect_ratio_crop;
+ GstCaps *return_caps;
+
+ aspect_ratio_crop = GST_ASPECT_RATIO_CROP (gst_pad_get_parent (pad));
+
+ g_mutex_lock (&aspect_ratio_crop->crop_lock);
+
+ peer = gst_pad_get_peer (aspect_ratio_crop->sink);
+ if (peer == NULL) {
+ return_caps = gst_static_pad_template_get_caps (&src_template);
+ } else {
+ GstCaps *peer_caps;
+
+ peer_caps = gst_pad_query_caps (peer, filter);
+ return_caps =
+ gst_aspect_ratio_crop_transform_caps (aspect_ratio_crop, peer_caps);
+ gst_caps_unref (peer_caps);
+ gst_object_unref (peer);
+ }
+
+ g_mutex_unlock (&aspect_ratio_crop->crop_lock);
+ gst_object_unref (aspect_ratio_crop);
+
+ if (return_caps && filter) {
+ GstCaps *tmp =
+ gst_caps_intersect_full (filter, return_caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_replace (&return_caps, tmp);
+ gst_caps_unref (tmp);
+ }
+
+ return return_caps;
+}
+
+static gboolean
+gst_aspect_ratio_crop_src_query (GstPad * pad, GstObject * parent,
+ GstQuery * query)
+{
+ gboolean res = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_aspect_ratio_crop_get_caps (pad, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+ return res;
+}
+
+static void
+gst_aspect_ratio_crop_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstAspectRatioCrop *aspect_ratio_crop;
+ gboolean recheck = FALSE;
+
+ aspect_ratio_crop = GST_ASPECT_RATIO_CROP (object);
+
+ GST_OBJECT_LOCK (aspect_ratio_crop);
+ switch (prop_id) {
+ case PROP_ASPECT_RATIO_CROP:
+ if (GST_VALUE_HOLDS_FRACTION (value)) {
+ aspect_ratio_crop->ar_num = gst_value_get_fraction_numerator (value);
+ aspect_ratio_crop->ar_denom =
+ gst_value_get_fraction_denominator (value);
+ recheck = gst_pad_has_current_caps (aspect_ratio_crop->sink);
+ }
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (aspect_ratio_crop);
+
+ if (recheck) {
+ GST_OBJECT_LOCK (aspect_ratio_crop);
+ gst_clear_caps (&aspect_ratio_crop->renegotiation_caps);
+ aspect_ratio_crop->renegotiation_caps =
+ gst_pad_get_current_caps (aspect_ratio_crop->sink);
+ GST_OBJECT_UNLOCK (aspect_ratio_crop);
+ }
+}
+
+static void
+gst_aspect_ratio_crop_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstAspectRatioCrop *aspect_ratio_crop;
+
+ aspect_ratio_crop = GST_ASPECT_RATIO_CROP (object);
+
+ GST_OBJECT_LOCK (aspect_ratio_crop);
+ switch (prop_id) {
+ case PROP_ASPECT_RATIO_CROP:
+ gst_value_set_fraction (value, aspect_ratio_crop->ar_num,
+ aspect_ratio_crop->ar_denom);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (aspect_ratio_crop);
+}
diff --git a/gst/videocrop/gstaspectratiocrop.h b/gst/videocrop/gstaspectratiocrop.h
new file mode 100644
index 0000000000..2fcec99543
--- /dev/null
+++ b/gst/videocrop/gstaspectratiocrop.h
@@ -0,0 +1,71 @@
+/* GStreamer video frame cropping to aspect-ratio
+ * Copyright (C) 2009 Thijs Vermeir <thijsvermeir@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_ASPECT_RATIO_CROP_H__
+#define __GST_ASPECT_RATIO_CROP_H__
+
+#include <gst/gstbin.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_ASPECT_RATIO_CROP \
+ (gst_aspect_ratio_crop_get_type())
+#define GST_ASPECT_RATIO_CROP(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_ASPECT_RATIO_CROP,GstAspectRatioCrop))
+#define GST_ASPECT_RATIO_CROP_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_ASPECT_RATIO_CROP,GstAspectRatioCropClass))
+#define GST_IS_ASPECT_RATIO_CROP(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_ASPECT_RATIO_CROP))
+#define GST_IS_ASPECT_RATIO_CROP_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_ASPECT_RATIO_CROP))
+
+GST_ELEMENT_REGISTER_DECLARE (aspectratiocrop);
+
+typedef struct _GstAspectRatioCrop GstAspectRatioCrop;
+typedef struct _GstAspectRatioCropClass GstAspectRatioCropClass;
+
+struct _GstAspectRatioCrop
+{
+ GstBin parent;
+
+ /* our videocrop element */
+ GstElement *videocrop;
+
+ GstPad *sink;
+
+ /* target aspect ratio */
+ gint ar_num; /* if < 1 then don't change ar */
+ gint ar_denom;
+
+ GstCaps *renegotiation_caps;
+
+ GMutex crop_lock;
+};
+
+struct _GstAspectRatioCropClass
+{
+ GstBinClass parent_class;
+};
+
+GType gst_aspect_ratio_crop_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_ASPECT_RATIO_CROP_H__ */
+
diff --git a/gst/videocrop/gstvideocrop-private.h b/gst/videocrop/gstvideocrop-private.h
new file mode 100644
index 0000000000..92b9b8903e
--- /dev/null
+++ b/gst/videocrop/gstvideocrop-private.h
@@ -0,0 +1,35 @@
+#ifndef __GST_VIDEO_CROP_PRIVATE_H__
+#define __GST_VIDEO_CROP_PRIVATE_H__
+
+/* aspectvideocrop and videocrop support the same pixel formats, since
+ * aspectvideocrop uses videocrop internally.
+ * The definitions of supported pixe formats can thus be shared
+ * between both, avoiding the need of manual synchronization
+ */
+
+#define VIDEO_CROP_FORMATS_PACKED_SIMPLE "RGB, BGR, RGB16, RGB15, " \
+ "RGBx, xRGB, BGRx, xBGR, RGBA, ARGB, BGRA, ABGR, " \
+ "GRAY8, GRAY16_LE, GRAY16_BE, AYUV"
+#define VIDEO_CROP_FORMATS_PACKED_COMPLEX "YVYU, YUY2, UYVY"
+#define VIDEO_CROP_FORMATS_PLANAR "I420, A420, YV12, Y444, Y42B, Y41B, " \
+ "I420_10BE, A420_10BE, Y444_10BE, A444_10BE, I422_10BE, A422_10BE, " \
+ "I420_10LE, A420_10LE, Y444_10LE, A444_10LE, I422_10LE, A422_10LE, " \
+ "I420_12BE, Y444_12BE, I422_12BE, " \
+ "I420_12LE, Y444_12LE, I422_12LE, " \
+ "GBR, GBR_10BE, GBR_10LE, GBR_12BE, GBR_12LE, " \
+ "GBRA, GBRA_10BE, GBRA_10LE, GBRA_12BE, GBRA_12LE"
+#define VIDEO_CROP_FORMATS_SEMI_PLANAR "NV12, NV21"
+
+/* aspectratiocrop uses videocrop. sync caps changes between both */
+#define VIDEO_CROP_CAPS \
+ GST_VIDEO_CAPS_MAKE ("{" \
+ VIDEO_CROP_FORMATS_PACKED_SIMPLE "," \
+ VIDEO_CROP_FORMATS_PACKED_COMPLEX "," \
+ VIDEO_CROP_FORMATS_PLANAR "," \
+ VIDEO_CROP_FORMATS_SEMI_PLANAR "}") "; " \
+ "video/x-raw(ANY), " \
+ "width = " GST_VIDEO_SIZE_RANGE ", " \
+ "height = " GST_VIDEO_SIZE_RANGE ", " \
+ "framerate = " GST_VIDEO_FPS_RANGE
+
+#endif /* __GST_VIDEO_CROP_PRIVATE_H__ */
diff --git a/gst/videocrop/gstvideocrop.c b/gst/videocrop/gstvideocrop.c
new file mode 100644
index 0000000000..8d4ba909c1
--- /dev/null
+++ b/gst/videocrop/gstvideocrop.c
@@ -0,0 +1,990 @@
+/* GStreamer video frame cropping
+ * Copyright (C) 2006 Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-videocrop
+ * @title: videocrop
+ * @see_also: #GstVideoBox
+ *
+ * This element crops video frames, meaning it can remove parts of the
+ * picture on the left, right, top or bottom of the picture and output
+ * a smaller picture than the input picture, with the unwanted parts at the
+ * border removed.
+ *
+ * The videocrop element is similar to the videobox element, but its main
+ * goal is to support a multitude of formats as efficiently as possible.
+ * Unlike videbox, it cannot add borders to the picture and unlike videbox
+ * it will always output images in exactly the same format as the input image.
+ *
+ * If there is nothing to crop, the element will operate in pass-through mode.
+ *
+ * Note that no special efforts are made to handle chroma-subsampled formats
+ * in the case of odd-valued cropping and compensate for sub-unit chroma plane
+ * shifts for such formats in the case where the #GstVideoCrop:left or
+ * #GstVideoCrop:top property is set to an odd number. This doesn't matter for
+ * most use cases, but it might matter for yours.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 -v videotestsrc ! videocrop top=42 left=1 right=4 bottom=0 ! ximagesink
+ * ]|
+ *
+ */
+
+/* TODO:
+ * - for packed formats, we could avoid memcpy() in case crop_left
+ * and crop_right are 0 and just create a sub-buffer of the input
+ * buffer
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+#include "gstvideocrop.h"
+#include "gstaspectratiocrop.h"
+/* include private header which contains the supported formats */
+#include "gstvideocrop-private.h"
+
+#include <string.h>
+
+GST_DEBUG_CATEGORY_STATIC (videocrop_debug);
+#define GST_CAT_DEFAULT videocrop_debug
+
+enum
+{
+ PROP_0,
+ PROP_LEFT,
+ PROP_RIGHT,
+ PROP_TOP,
+ PROP_BOTTOM
+};
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (VIDEO_CROP_CAPS)
+ );
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (VIDEO_CROP_CAPS)
+ );
+
+#define gst_video_crop_parent_class parent_class
+G_DEFINE_TYPE (GstVideoCrop, gst_video_crop, GST_TYPE_VIDEO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (videocrop, "videocrop", GST_RANK_NONE,
+ GST_TYPE_VIDEO_CROP);
+
+static void gst_video_crop_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_video_crop_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static void gst_video_crop_before_transform (GstBaseTransform * trans,
+ GstBuffer * in);
+static GstCaps *gst_video_crop_transform_caps (GstBaseTransform * trans,
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter_caps);
+static gboolean gst_video_crop_src_event (GstBaseTransform * trans,
+ GstEvent * event);
+
+static gboolean gst_video_crop_set_info (GstVideoFilter * vfilter, GstCaps * in,
+ GstVideoInfo * in_info, GstCaps * out, GstVideoInfo * out_info);
+static GstFlowReturn gst_video_crop_transform_frame (GstVideoFilter * vfilter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame);
+
+static gboolean gst_video_crop_decide_allocation (GstBaseTransform * trans,
+ GstQuery * query);
+static gboolean gst_video_crop_propose_allocation (GstBaseTransform * trans,
+ GstQuery * decide_query, GstQuery * query);
+static GstFlowReturn gst_video_crop_transform_ip (GstBaseTransform * trans,
+ GstBuffer * buf);
+
+static gboolean
+gst_video_crop_src_event (GstBaseTransform * trans, GstEvent * event)
+{
+ GstEvent *new_event;
+ GstStructure *new_structure;
+ const GstStructure *structure;
+ const gchar *event_name;
+ double pointer_x;
+ double pointer_y;
+
+ GstVideoCrop *vcrop = GST_VIDEO_CROP (trans);
+ new_event = NULL;
+
+ GST_OBJECT_LOCK (vcrop);
+ if (GST_EVENT_TYPE (event) == GST_EVENT_NAVIGATION &&
+ (vcrop->crop_left != 0 || vcrop->crop_top != 0)) {
+ structure = gst_event_get_structure (event);
+ event_name = gst_structure_get_string (structure, "event");
+
+ if (event_name &&
+ (strcmp (event_name, "mouse-move") == 0 ||
+ strcmp (event_name, "mouse-button-press") == 0 ||
+ strcmp (event_name, "mouse-button-release") == 0)) {
+
+ if (gst_structure_get_double (structure, "pointer_x", &pointer_x) &&
+ gst_structure_get_double (structure, "pointer_y", &pointer_y)) {
+
+ new_structure = gst_structure_copy (structure);
+ gst_structure_set (new_structure,
+ "pointer_x", G_TYPE_DOUBLE, (double) (pointer_x + vcrop->crop_left),
+ "pointer_y", G_TYPE_DOUBLE, (double) (pointer_y + vcrop->crop_top),
+ NULL);
+
+ new_event = gst_event_new_navigation (new_structure);
+ gst_event_unref (event);
+ } else {
+ GST_WARNING_OBJECT (vcrop, "Failed to read navigation event");
+ }
+ }
+ }
+
+ GST_OBJECT_UNLOCK (vcrop);
+
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->src_event (trans,
+ (new_event ? new_event : event));
+}
+
+static void
+gst_video_crop_class_init (GstVideoCropClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstBaseTransformClass *basetransform_class;
+ GstVideoFilterClass *vfilter_class;
+
+ gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
+ basetransform_class = (GstBaseTransformClass *) klass;
+ vfilter_class = (GstVideoFilterClass *) klass;
+
+ gobject_class->set_property = gst_video_crop_set_property;
+ gobject_class->get_property = gst_video_crop_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_LEFT,
+ g_param_spec_int ("left", "Left",
+ "Pixels to crop at left (-1 to auto-crop)", -1, G_MAXINT, 0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (gobject_class, PROP_RIGHT,
+ g_param_spec_int ("right", "Right",
+ "Pixels to crop at right (-1 to auto-crop)", -1, G_MAXINT, 0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (gobject_class, PROP_TOP,
+ g_param_spec_int ("top", "Top", "Pixels to crop at top (-1 to auto-crop)",
+ -1, G_MAXINT, 0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (gobject_class, PROP_BOTTOM,
+ g_param_spec_int ("bottom", "Bottom",
+ "Pixels to crop at bottom (-1 to auto-crop)", -1, G_MAXINT, 0,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
+ GST_PARAM_MUTABLE_PLAYING | GST_PARAM_CONTROLLABLE));
+
+ gst_element_class_add_static_pad_template (element_class, &sink_template);
+ gst_element_class_add_static_pad_template (element_class, &src_template);
+ gst_element_class_set_static_metadata (element_class, "Crop",
+ "Filter/Effect/Video",
+ "Crops video into a user-defined region",
+ "Tim-Philipp Müller <tim centricular net>");
+
+ basetransform_class->before_transform =
+ GST_DEBUG_FUNCPTR (gst_video_crop_before_transform);
+ basetransform_class->transform_ip_on_passthrough = FALSE;
+ basetransform_class->transform_caps =
+ GST_DEBUG_FUNCPTR (gst_video_crop_transform_caps);
+ basetransform_class->src_event = GST_DEBUG_FUNCPTR (gst_video_crop_src_event);
+ basetransform_class->decide_allocation =
+ GST_DEBUG_FUNCPTR (gst_video_crop_decide_allocation);
+ basetransform_class->propose_allocation =
+ GST_DEBUG_FUNCPTR (gst_video_crop_propose_allocation);
+ basetransform_class->transform_ip =
+ GST_DEBUG_FUNCPTR (gst_video_crop_transform_ip);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_video_crop_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_video_crop_transform_frame);
+}
+
+static void
+gst_video_crop_init (GstVideoCrop * vcrop)
+{
+ GST_DEBUG_CATEGORY_INIT (videocrop_debug, "videocrop", 0, "videocrop");
+
+ vcrop->crop_right = 0;
+ vcrop->crop_left = 0;
+ vcrop->crop_top = 0;
+ vcrop->crop_bottom = 0;
+}
+
+#define ROUND_DOWN_2(n) ((n)&(~1))
+
+static void
+gst_video_crop_transform_packed_complex (GstVideoCrop * vcrop,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame, gint x, gint y)
+{
+ guint8 *in_data, *out_data;
+ guint i, dx;
+ gint width, height;
+ gint in_stride;
+ gint out_stride;
+
+ width = GST_VIDEO_FRAME_WIDTH (out_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (out_frame);
+
+ in_data = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ out_data = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ in_stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
+ out_stride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);
+
+ in_data += vcrop->crop_top * in_stride;
+
+ /* rounding down here so we end up at the start of a macro-pixel and not
+ * in the middle of one */
+ in_data += ROUND_DOWN_2 (vcrop->crop_left) *
+ GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
+
+ dx = width * GST_VIDEO_FRAME_COMP_PSTRIDE (out_frame, 0);
+
+ /* UYVY = 4:2:2 - [U0 Y0 V0 Y1] [U2 Y2 V2 Y3] [U4 Y4 V4 Y5]
+ * YUYV = 4:2:2 - [Y0 U0 Y1 V0] [Y2 U2 Y3 V2] [Y4 U4 Y5 V4] = YUY2 */
+ if ((vcrop->crop_left % 2) != 0) {
+ for (i = 0; i < height; ++i) {
+ gint j;
+
+ memcpy (out_data, in_data, dx);
+
+ /* move just the Y samples one pixel to the left, don't worry about
+ * chroma shift */
+ for (j = vcrop->macro_y_off; j < out_stride - 2; j += 2)
+ out_data[j] = in_data[j + 2];
+
+ in_data += in_stride;
+ out_data += out_stride;
+ }
+ } else {
+ for (i = 0; i < height; ++i) {
+ memcpy (out_data, in_data, dx);
+ in_data += in_stride;
+ out_data += out_stride;
+ }
+ }
+}
+
+static void
+gst_video_crop_transform_packed_simple (GstVideoCrop * vcrop,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame, gint x, gint y)
+{
+ guint8 *in_data, *out_data;
+ gint width, height;
+ guint i, dx;
+ gint in_stride, out_stride;
+
+ width = GST_VIDEO_FRAME_WIDTH (out_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (out_frame);
+
+ in_data = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ out_data = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ in_stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
+ out_stride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);
+
+ in_data += (vcrop->crop_top + y) * in_stride;
+ in_data +=
+ (vcrop->crop_left + x) * GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
+
+ dx = width * GST_VIDEO_FRAME_COMP_PSTRIDE (out_frame, 0);
+
+ for (i = 0; i < height; ++i) {
+ memcpy (out_data, in_data, dx);
+ in_data += in_stride;
+ out_data += out_stride;
+ }
+}
+
+static void
+gst_video_crop_transform_planar (GstVideoCrop * vcrop,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame, gint x, gint y)
+{
+ const GstVideoFormatInfo *format_info;
+ gint crop_top, crop_left;
+ guint p;
+
+ format_info = in_frame->info.finfo;
+ crop_left = vcrop->crop_left + x;
+ crop_top = vcrop->crop_top + y;
+
+ for (p = 0; p < GST_VIDEO_FRAME_N_PLANES (in_frame); ++p) {
+ guint8 *plane_in, *plane_out;
+ guint sub_w_factor, sub_h_factor;
+ guint subsampled_crop_left, subsampled_crop_top;
+ guint copy_width;
+ gint i;
+ gsize bytes_per_pixel;
+
+ /* plane */
+ plane_in = GST_VIDEO_FRAME_PLANE_DATA (in_frame, p);
+ plane_out = GST_VIDEO_FRAME_PLANE_DATA (out_frame, p);
+
+ /* To support > 8bit, we need to add a byte-multiplier that specifies
+ * how many bytes are used per pixel value */
+ bytes_per_pixel = GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, p);
+
+ /* apply crop top/left
+ * crop_top and crop_left have to be rounded down to the corresponding
+ * subsampling factor, since, e.g.: the first line in a subsampled plane
+ * describes 2 lines in the actual image. A crop_top of 1 thus should
+ * not shift the pointer of the input plane. */
+ sub_w_factor = 1 << GST_VIDEO_FORMAT_INFO_W_SUB (format_info, p);
+ sub_h_factor = 1 << GST_VIDEO_FORMAT_INFO_H_SUB (format_info, p);
+ subsampled_crop_left = GST_ROUND_DOWN_N ((guint) crop_left, sub_w_factor);
+ subsampled_crop_top = GST_ROUND_DOWN_N ((guint) crop_top, sub_h_factor);
+
+ plane_in +=
+ GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (format_info, p,
+ subsampled_crop_top) * GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, p);
+ plane_in +=
+ GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (format_info, p,
+ subsampled_crop_left) * bytes_per_pixel;
+ copy_width = GST_VIDEO_FRAME_COMP_WIDTH (out_frame, p) * bytes_per_pixel;
+
+
+ for (i = 0; i < GST_VIDEO_FRAME_COMP_HEIGHT (out_frame, p); ++i) {
+ memcpy (plane_out, plane_in, copy_width);
+ plane_in += GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, p);
+ plane_out += GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, p);
+ }
+ }
+}
+
+static void
+gst_video_crop_transform_semi_planar (GstVideoCrop * vcrop,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame, gint x, gint y)
+{
+ gint width, height;
+ gint crop_top, crop_left;
+ guint8 *y_out, *uv_out;
+ guint8 *y_in, *uv_in;
+ guint i, dx;
+
+ width = GST_VIDEO_FRAME_WIDTH (out_frame);
+ height = GST_VIDEO_FRAME_HEIGHT (out_frame);
+ crop_left = vcrop->crop_left + x;
+ crop_top = vcrop->crop_top + y;
+
+ /* Y plane */
+ y_in = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
+ y_out = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
+
+ /* UV plane */
+ uv_in = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 1);
+ uv_out = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 1);
+
+ y_in += crop_top * GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0) + crop_left;
+ dx = width;
+
+ for (i = 0; i < height; ++i) {
+ memcpy (y_out, y_in, dx);
+ y_in += GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
+ y_out += GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);
+ }
+
+ uv_in += (crop_top / 2) * GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 1);
+ uv_in += GST_ROUND_DOWN_2 (crop_left);
+ dx = GST_ROUND_UP_2 (width);
+
+ for (i = 0; i < GST_ROUND_UP_2 (height) / 2; i++) {
+ memcpy (uv_out, uv_in, dx);
+ uv_in += GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 1);
+ uv_out += GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 1);
+ }
+}
+
+static GstFlowReturn
+gst_video_crop_transform_frame (GstVideoFilter * vfilter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame)
+{
+ GstVideoCrop *vcrop = GST_VIDEO_CROP (vfilter);
+ GstVideoCropMeta *meta = gst_buffer_get_video_crop_meta (in_frame->buffer);
+ gint x = 0, y = 0;
+
+ if (G_UNLIKELY (vcrop->need_update)) {
+ if (!gst_video_crop_set_info (vfilter, NULL, &vcrop->in_info, NULL,
+ &vcrop->out_info)) {
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ if (meta) {
+ x = meta->x;
+ y = meta->y;
+ }
+
+ switch (vcrop->packing) {
+ case VIDEO_CROP_PIXEL_FORMAT_PACKED_SIMPLE:
+ gst_video_crop_transform_packed_simple (vcrop, in_frame, out_frame, x, y);
+ break;
+ case VIDEO_CROP_PIXEL_FORMAT_PACKED_COMPLEX:
+ gst_video_crop_transform_packed_complex (vcrop, in_frame, out_frame, x,
+ y);
+ break;
+ case VIDEO_CROP_PIXEL_FORMAT_PLANAR:
+ gst_video_crop_transform_planar (vcrop, in_frame, out_frame, x, y);
+ break;
+ case VIDEO_CROP_PIXEL_FORMAT_SEMI_PLANAR:
+ gst_video_crop_transform_semi_planar (vcrop, in_frame, out_frame, x, y);
+ break;
+ default:
+ g_assert_not_reached ();
+ }
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_video_crop_decide_allocation (GstBaseTransform * trans, GstQuery * query)
+{
+ GstVideoCrop *crop = GST_VIDEO_CROP (trans);
+ gboolean use_crop_meta;
+
+ use_crop_meta = (gst_query_find_allocation_meta (query,
+ GST_VIDEO_CROP_META_API_TYPE, NULL) &&
+ gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL));
+
+ if ((crop->crop_left | crop->crop_right | crop->crop_top | crop->
+ crop_bottom) == 0) {
+ GST_INFO_OBJECT (crop, "we are using passthrough");
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (crop), TRUE);
+ gst_base_transform_set_in_place (GST_BASE_TRANSFORM (crop), FALSE);
+ } else if (use_crop_meta) {
+ GST_INFO_OBJECT (crop, "we are doing in-place transform using crop meta");
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (crop), FALSE);
+ gst_base_transform_set_in_place (GST_BASE_TRANSFORM (crop), TRUE);
+ } else if (crop->raw_caps) {
+ GST_INFO_OBJECT (crop, "we are not using passthrough");
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (crop), FALSE);
+ gst_base_transform_set_in_place (GST_BASE_TRANSFORM (crop), FALSE);
+ } else {
+ GST_ELEMENT_ERROR (crop, STREAM, WRONG_TYPE,
+ ("Dowstream doesn't support crop for non-raw caps"), (NULL));
+ return FALSE;
+ }
+
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->decide_allocation (trans,
+ query);
+}
+
+static gboolean
+gst_video_crop_propose_allocation (GstBaseTransform * trans,
+ GstQuery * decide_query, GstQuery * query)
+{
+ /* if we are not passthrough, we can handle video meta and crop meta */
+ if (decide_query) {
+ GST_DEBUG_OBJECT (trans, "Advertising video meta and crop meta support");
+ gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
+ gst_query_add_allocation_meta (query, GST_VIDEO_CROP_META_API_TYPE, NULL);
+ }
+
+ return GST_BASE_TRANSFORM_CLASS (parent_class)->propose_allocation (trans,
+ decide_query, query);
+}
+
+static void
+gst_video_crop_before_transform (GstBaseTransform * trans, GstBuffer * in)
+{
+ GstVideoCrop *video_crop = GST_VIDEO_CROP (trans);
+ GstClockTime timestamp, stream_time;
+
+ timestamp = GST_BUFFER_TIMESTAMP (in);
+ stream_time =
+ gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (video_crop, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (video_crop), stream_time);
+}
+
+static GstFlowReturn
+gst_video_crop_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
+{
+ GstVideoCrop *vcrop = GST_VIDEO_CROP (trans);
+ GstVideoFilter *vfilter = GST_VIDEO_FILTER (trans);
+ GstVideoMeta *video_meta;
+ GstVideoCropMeta *crop_meta;
+
+ GST_LOG_OBJECT (trans, "Transforming in-place");
+
+ if (G_UNLIKELY (vcrop->need_update)) {
+ if (!gst_video_crop_set_info (vfilter, NULL, &vcrop->in_info, NULL,
+ &vcrop->out_info)) {
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ /* The video meta is required since we are going to make the caps
+ * width/height smaller, which would not result in a usable GstVideoInfo for
+ * mapping the buffer. */
+ video_meta = gst_buffer_get_video_meta (buf);
+ if (!video_meta) {
+ video_meta = gst_buffer_add_video_meta (buf, GST_VIDEO_FRAME_FLAG_NONE,
+ GST_VIDEO_INFO_FORMAT (&vcrop->in_info), vcrop->in_info.width,
+ vcrop->in_info.height);
+ }
+
+ crop_meta = gst_buffer_get_video_crop_meta (buf);
+ if (!crop_meta)
+ crop_meta = gst_buffer_add_video_crop_meta (buf);
+
+ crop_meta->x += vcrop->crop_left;
+ crop_meta->y += vcrop->crop_top;
+ crop_meta->width = GST_VIDEO_INFO_WIDTH (&vcrop->out_info);
+ crop_meta->height = GST_VIDEO_INFO_HEIGHT (&vcrop->out_info);
+
+ return GST_FLOW_OK;
+}
+
+static gint
+gst_video_crop_transform_dimension (gint val, gint delta)
+{
+ gint64 new_val = (gint64) val + (gint64) delta;
+
+ new_val = CLAMP (new_val, 1, G_MAXINT);
+
+ return (gint) new_val;
+}
+
+static gboolean
+gst_video_crop_transform_dimension_value (const GValue * src_val,
+ gint delta, GValue * dest_val, GstPadDirection direction, gboolean dynamic)
+{
+ gboolean ret = TRUE;
+
+ if (G_VALUE_HOLDS_INT (src_val)) {
+ gint ival = g_value_get_int (src_val);
+ ival = gst_video_crop_transform_dimension (ival, delta);
+
+ if (dynamic) {
+ if (direction == GST_PAD_SRC) {
+ if (ival == G_MAXINT) {
+ g_value_init (dest_val, G_TYPE_INT);
+ g_value_set_int (dest_val, ival);
+ } else {
+ g_value_init (dest_val, GST_TYPE_INT_RANGE);
+ gst_value_set_int_range (dest_val, ival, G_MAXINT);
+ }
+ } else {
+ if (ival == 1) {
+ g_value_init (dest_val, G_TYPE_INT);
+ g_value_set_int (dest_val, ival);
+ } else {
+ g_value_init (dest_val, GST_TYPE_INT_RANGE);
+ gst_value_set_int_range (dest_val, 1, ival);
+ }
+ }
+ } else {
+ g_value_init (dest_val, G_TYPE_INT);
+ g_value_set_int (dest_val, ival);
+ }
+ } else if (GST_VALUE_HOLDS_INT_RANGE (src_val)) {
+ gint min = gst_value_get_int_range_min (src_val);
+ gint max = gst_value_get_int_range_max (src_val);
+
+ min = gst_video_crop_transform_dimension (min, delta);
+ max = gst_video_crop_transform_dimension (max, delta);
+
+ if (dynamic) {
+ if (direction == GST_PAD_SRC)
+ max = G_MAXINT;
+ else
+ min = 1;
+ }
+
+ if (min == max) {
+ g_value_init (dest_val, G_TYPE_INT);
+ g_value_set_int (dest_val, min);
+ } else {
+ g_value_init (dest_val, GST_TYPE_INT_RANGE);
+ gst_value_set_int_range (dest_val, min, max);
+ }
+ } else if (GST_VALUE_HOLDS_LIST (src_val)) {
+ gint i;
+
+ g_value_init (dest_val, GST_TYPE_LIST);
+
+ for (i = 0; i < gst_value_list_get_size (src_val); ++i) {
+ const GValue *list_val;
+ GValue newval = G_VALUE_INIT;
+
+ list_val = gst_value_list_get_value (src_val, i);
+ if (gst_video_crop_transform_dimension_value (list_val, delta, &newval,
+ direction, dynamic))
+ gst_value_list_append_value (dest_val, &newval);
+ g_value_unset (&newval);
+ }
+
+ if (gst_value_list_get_size (dest_val) == 0) {
+ g_value_unset (dest_val);
+ ret = FALSE;
+ }
+ } else {
+ ret = FALSE;
+ }
+
+ return ret;
+}
+
+static GstCaps *
+gst_video_crop_transform_caps (GstBaseTransform * trans,
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter_caps)
+{
+ GstVideoCrop *vcrop;
+ GstCaps *other_caps;
+ gint dy, dx, i, left, right, bottom, top;
+ gboolean w_dynamic, h_dynamic;
+
+ vcrop = GST_VIDEO_CROP (trans);
+
+ GST_OBJECT_LOCK (vcrop);
+
+ GST_LOG_OBJECT (vcrop, "l=%d,r=%d,b=%d,t=%d",
+ vcrop->prop_left, vcrop->prop_right, vcrop->prop_bottom, vcrop->prop_top);
+
+ w_dynamic = (vcrop->prop_left == -1 || vcrop->prop_right == -1);
+ h_dynamic = (vcrop->prop_top == -1 || vcrop->prop_bottom == -1);
+
+ left = (vcrop->prop_left == -1) ? 0 : vcrop->prop_left;
+ right = (vcrop->prop_right == -1) ? 0 : vcrop->prop_right;
+ bottom = (vcrop->prop_bottom == -1) ? 0 : vcrop->prop_bottom;
+ top = (vcrop->prop_top == -1) ? 0 : vcrop->prop_top;
+
+ GST_OBJECT_UNLOCK (vcrop);
+
+ if (direction == GST_PAD_SRC) {
+ dx = left + right;
+ dy = top + bottom;
+ } else {
+ dx = 0 - (left + right);
+ dy = 0 - (top + bottom);
+ }
+
+ GST_LOG_OBJECT (vcrop, "transforming caps %" GST_PTR_FORMAT, caps);
+
+ other_caps = gst_caps_new_empty ();
+
+ for (i = 0; i < gst_caps_get_size (caps); ++i) {
+ const GValue *v;
+ GstStructure *structure, *new_structure;
+ GValue w_val = G_VALUE_INIT, h_val = G_VALUE_INIT;
+ GstCapsFeatures *features;
+
+ structure = gst_caps_get_structure (caps, i);
+ features = gst_caps_get_features (caps, i);
+
+ v = gst_structure_get_value (structure, "width");
+ if (!gst_video_crop_transform_dimension_value (v, dx, &w_val, direction,
+ w_dynamic)) {
+ GST_WARNING_OBJECT (vcrop, "could not transform width value with dx=%d"
+ ", caps structure=%" GST_PTR_FORMAT, dx, structure);
+ continue;
+ }
+
+ v = gst_structure_get_value (structure, "height");
+ if (!gst_video_crop_transform_dimension_value (v, dy, &h_val, direction,
+ h_dynamic)) {
+ g_value_unset (&w_val);
+ GST_WARNING_OBJECT (vcrop, "could not transform height value with dy=%d"
+ ", caps structure=%" GST_PTR_FORMAT, dy, structure);
+ continue;
+ }
+
+ new_structure = gst_structure_copy (structure);
+ gst_structure_set_value (new_structure, "width", &w_val);
+ gst_structure_set_value (new_structure, "height", &h_val);
+ g_value_unset (&w_val);
+ g_value_unset (&h_val);
+
+ GST_LOG_OBJECT (vcrop, "transformed structure %2d: %" GST_PTR_FORMAT
+ " => %" GST_PTR_FORMAT "features %" GST_PTR_FORMAT, i, structure,
+ new_structure, features);
+ gst_caps_append_structure (other_caps, new_structure);
+
+ gst_caps_set_features (other_caps, i, gst_caps_features_copy (features));
+ }
+
+ if (!gst_caps_is_empty (other_caps) && filter_caps) {
+ GstCaps *tmp = gst_caps_intersect_full (filter_caps, other_caps,
+ GST_CAPS_INTERSECT_FIRST);
+ gst_caps_replace (&other_caps, tmp);
+ gst_caps_unref (tmp);
+ }
+
+ return other_caps;
+}
+
+static gboolean
+gst_video_crop_set_info (GstVideoFilter * vfilter, GstCaps * in,
+ GstVideoInfo * in_info, GstCaps * out, GstVideoInfo * out_info)
+{
+ GstVideoCrop *crop = GST_VIDEO_CROP (vfilter);
+ GstCapsFeatures *features;
+ int dx, dy;
+
+ GST_OBJECT_LOCK (crop);
+ crop->need_update = FALSE;
+ crop->crop_left = crop->prop_left;
+ crop->crop_right = crop->prop_right;
+ crop->crop_top = crop->prop_top;
+ crop->crop_bottom = crop->prop_bottom;
+ GST_OBJECT_UNLOCK (crop);
+
+ dx = GST_VIDEO_INFO_WIDTH (in_info) - GST_VIDEO_INFO_WIDTH (out_info);
+ dy = GST_VIDEO_INFO_HEIGHT (in_info) - GST_VIDEO_INFO_HEIGHT (out_info);
+
+ if (crop->crop_left == -1 && crop->crop_right == -1) {
+ crop->crop_left = dx / 2;
+ crop->crop_right = dx / 2 + (dx & 1);
+ } else if (crop->crop_left == -1) {
+ if (G_UNLIKELY (crop->crop_right > dx))
+ goto cropping_too_much;
+ crop->crop_left = dx - crop->crop_right;
+ } else if (crop->crop_right == -1) {
+ if (G_UNLIKELY (crop->crop_left > dx))
+ goto cropping_too_much;
+ crop->crop_right = dx - crop->crop_left;
+ }
+
+ if (crop->crop_top == -1 && crop->crop_bottom == -1) {
+ crop->crop_top = dy / 2;
+ crop->crop_bottom = dy / 2 + (dy & 1);
+ } else if (crop->crop_top == -1) {
+ if (G_UNLIKELY (crop->crop_bottom > dy))
+ goto cropping_too_much;
+ crop->crop_top = dy - crop->crop_bottom;
+ } else if (crop->crop_bottom == -1) {
+ if (G_UNLIKELY (crop->crop_top > dy))
+ goto cropping_too_much;
+ crop->crop_bottom = dy - crop->crop_top;
+ }
+
+ if (G_UNLIKELY ((crop->crop_left + crop->crop_right) >=
+ GST_VIDEO_INFO_WIDTH (in_info)
+ || (crop->crop_top + crop->crop_bottom) >=
+ GST_VIDEO_INFO_HEIGHT (in_info)))
+ goto cropping_too_much;
+
+ if (in && out)
+ GST_LOG_OBJECT (crop, "incaps = %" GST_PTR_FORMAT ", outcaps = %"
+ GST_PTR_FORMAT, in, out);
+
+ if (in) {
+ features = gst_caps_get_features (in, 0);
+ crop->raw_caps = gst_caps_features_is_equal (features,
+ GST_CAPS_FEATURES_MEMORY_SYSTEM_MEMORY);
+ }
+
+ if (!crop->raw_caps)
+ goto beach;
+
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
+ case GST_VIDEO_FORMAT_RGB:
+ case GST_VIDEO_FORMAT_BGR:
+ case GST_VIDEO_FORMAT_RGB16:
+ case GST_VIDEO_FORMAT_RGB15:
+ case GST_VIDEO_FORMAT_RGBx:
+ case GST_VIDEO_FORMAT_xRGB:
+ case GST_VIDEO_FORMAT_BGRx:
+ case GST_VIDEO_FORMAT_xBGR:
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_BGRA:
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_GRAY8:
+ case GST_VIDEO_FORMAT_GRAY16_LE:
+ case GST_VIDEO_FORMAT_GRAY16_BE:
+ case GST_VIDEO_FORMAT_AYUV:
+ crop->packing = VIDEO_CROP_PIXEL_FORMAT_PACKED_SIMPLE;
+ break;
+ case GST_VIDEO_FORMAT_YVYU:
+ case GST_VIDEO_FORMAT_YUY2:
+ case GST_VIDEO_FORMAT_UYVY:
+ crop->packing = VIDEO_CROP_PIXEL_FORMAT_PACKED_COMPLEX;
+ if (GST_VIDEO_INFO_FORMAT (in_info) == GST_VIDEO_FORMAT_UYVY) {
+ /* UYVY = 4:2:2 - [U0 Y0 V0 Y1] [U2 Y2 V2 Y3] [U4 Y4 V4 Y5] */
+ crop->macro_y_off = 1;
+ } else {
+ /* YUYV = 4:2:2 - [Y0 U0 Y1 V0] [Y2 U2 Y3 V2] [Y4 U4 Y5 V4] = YUY2 */
+ crop->macro_y_off = 0;
+ }
+ break;
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_I420_10BE:
+ case GST_VIDEO_FORMAT_I420_10LE:
+ case GST_VIDEO_FORMAT_I420_12BE:
+ case GST_VIDEO_FORMAT_I420_12LE:
+ case GST_VIDEO_FORMAT_A420:
+ case GST_VIDEO_FORMAT_A420_10BE:
+ case GST_VIDEO_FORMAT_A420_10LE:
+ case GST_VIDEO_FORMAT_YV12:
+ case GST_VIDEO_FORMAT_Y444:
+ case GST_VIDEO_FORMAT_Y444_10BE:
+ case GST_VIDEO_FORMAT_Y444_10LE:
+ case GST_VIDEO_FORMAT_Y444_12BE:
+ case GST_VIDEO_FORMAT_Y444_12LE:
+ case GST_VIDEO_FORMAT_A444_10BE:
+ case GST_VIDEO_FORMAT_A444_10LE:
+ case GST_VIDEO_FORMAT_Y42B:
+ case GST_VIDEO_FORMAT_I422_10BE:
+ case GST_VIDEO_FORMAT_I422_10LE:
+ case GST_VIDEO_FORMAT_A422_10BE:
+ case GST_VIDEO_FORMAT_A422_10LE:
+ case GST_VIDEO_FORMAT_I422_12BE:
+ case GST_VIDEO_FORMAT_I422_12LE:
+ case GST_VIDEO_FORMAT_GBR:
+ case GST_VIDEO_FORMAT_GBR_10BE:
+ case GST_VIDEO_FORMAT_GBR_10LE:
+ case GST_VIDEO_FORMAT_GBR_12BE:
+ case GST_VIDEO_FORMAT_GBR_12LE:
+ case GST_VIDEO_FORMAT_GBRA:
+ case GST_VIDEO_FORMAT_GBRA_10BE:
+ case GST_VIDEO_FORMAT_GBRA_10LE:
+ case GST_VIDEO_FORMAT_GBRA_12BE:
+ case GST_VIDEO_FORMAT_GBRA_12LE:
+ case GST_VIDEO_FORMAT_Y41B:
+ crop->packing = VIDEO_CROP_PIXEL_FORMAT_PLANAR;
+ break;
+ case GST_VIDEO_FORMAT_NV12:
+ case GST_VIDEO_FORMAT_NV21:
+ crop->packing = VIDEO_CROP_PIXEL_FORMAT_SEMI_PLANAR;
+ break;
+ default:
+ goto unknown_format;
+ }
+
+beach:
+ crop->in_info = *in_info;
+ crop->out_info = *out_info;
+
+ /* Ensure our decide_allocation will be called again when needed */
+ if (gst_base_transform_is_passthrough (GST_BASE_TRANSFORM (crop))) {
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (crop), FALSE);
+ gst_base_transform_set_in_place (GST_BASE_TRANSFORM (crop), FALSE);
+ }
+
+ return TRUE;
+
+ /* ERROR */
+cropping_too_much:
+ {
+ GST_WARNING_OBJECT (crop, "we are cropping too much");
+ return FALSE;
+ }
+unknown_format:
+ {
+ GST_WARNING_OBJECT (crop, "Unsupported format");
+ return FALSE;
+ }
+}
+
+/* called with object lock */
+static inline void
+gst_video_crop_set_crop (GstVideoCrop * vcrop, gint new_value, gint * prop)
+{
+ if (*prop != new_value) {
+ *prop = new_value;
+ vcrop->need_update = TRUE;
+ }
+}
+
+static void
+gst_video_crop_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstVideoCrop *video_crop;
+
+ video_crop = GST_VIDEO_CROP (object);
+
+ GST_OBJECT_LOCK (video_crop);
+ switch (prop_id) {
+ case PROP_LEFT:
+ gst_video_crop_set_crop (video_crop, g_value_get_int (value),
+ &video_crop->prop_left);
+ break;
+ case PROP_RIGHT:
+ gst_video_crop_set_crop (video_crop, g_value_get_int (value),
+ &video_crop->prop_right);
+ break;
+ case PROP_TOP:
+ gst_video_crop_set_crop (video_crop, g_value_get_int (value),
+ &video_crop->prop_top);
+ break;
+ case PROP_BOTTOM:
+ gst_video_crop_set_crop (video_crop, g_value_get_int (value),
+ &video_crop->prop_bottom);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_LOG_OBJECT (video_crop, "l=%d,r=%d,b=%d,t=%d, need_update:%d",
+ video_crop->prop_left, video_crop->prop_right, video_crop->prop_bottom,
+ video_crop->prop_top, video_crop->need_update);
+
+ GST_OBJECT_UNLOCK (video_crop);
+
+ gst_base_transform_reconfigure_src (GST_BASE_TRANSFORM (video_crop));
+}
+
+static void
+gst_video_crop_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstVideoCrop *video_crop;
+
+ video_crop = GST_VIDEO_CROP (object);
+
+ GST_OBJECT_LOCK (video_crop);
+ switch (prop_id) {
+ case PROP_LEFT:
+ g_value_set_int (value, video_crop->prop_left);
+ break;
+ case PROP_RIGHT:
+ g_value_set_int (value, video_crop->prop_right);
+ break;
+ case PROP_TOP:
+ g_value_set_int (value, video_crop->prop_top);
+ break;
+ case PROP_BOTTOM:
+ g_value_set_int (value, video_crop->prop_bottom);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+ GST_OBJECT_UNLOCK (video_crop);
+}
diff --git a/gst/videocrop/gstvideocrop.h b/gst/videocrop/gstvideocrop.h
new file mode 100644
index 0000000000..6a05fb2546
--- /dev/null
+++ b/gst/videocrop/gstvideocrop.h
@@ -0,0 +1,95 @@
+/* GStreamer video frame cropping
+ * Copyright (C) 2006 Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_CROP_H__
+#define __GST_VIDEO_CROP_H__
+
+#include <gst/video/gstvideofilter.h>
+
+G_BEGIN_DECLS
+#define GST_TYPE_VIDEO_CROP \
+ (gst_video_crop_get_type())
+#define GST_VIDEO_CROP(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_CROP,GstVideoCrop))
+#define GST_VIDEO_CROP_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_CROP,GstVideoCropClass))
+#define GST_IS_VIDEO_CROP(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_CROP))
+#define GST_IS_VIDEO_CROP_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_CROP))
+
+GST_ELEMENT_REGISTER_DECLARE (videocrop);
+
+typedef enum
+{
+ /* RGB (+ variants), ARGB (+ variants), AYUV, GRAY */
+ VIDEO_CROP_PIXEL_FORMAT_PACKED_SIMPLE = 0,
+ /* YVYU, YUY2, UYVY */
+ VIDEO_CROP_PIXEL_FORMAT_PACKED_COMPLEX,
+ /* I420, A420, YV12, Y444, Y42B, Y41B,
+ * I420_10BE, A420_10BE, Y444_10BE, A444_10BE, I422_10BE, A422_10BE,
+ * I420_10LE, A420_10LE, Y444_10LE, A444_10LE, I422_10LE, A422_10LE,
+ * I420_12BE, Y444_12BE, I422_12BE,
+ * I420_12LE, Y444_12LE, I422_12LE,
+ * GBR, GBR_10BE, GBR_10LE, GBR_12BE, GBR_12LE,
+ * GBRA, GBRA_10BE, GBRA_10LE, GBRA_12BE, GBRA_12LE */
+ VIDEO_CROP_PIXEL_FORMAT_PLANAR,
+ /* NV12, NV21 */
+ VIDEO_CROP_PIXEL_FORMAT_SEMI_PLANAR
+} VideoCropPixelFormat;
+
+typedef struct _GstVideoCropImageDetails GstVideoCropImageDetails;
+
+typedef struct _GstVideoCrop GstVideoCrop;
+typedef struct _GstVideoCropClass GstVideoCropClass;
+
+struct _GstVideoCrop
+{
+ GstVideoFilter parent;
+
+ /*< private > */
+ gint prop_left;
+ gint prop_right;
+ gint prop_top;
+ gint prop_bottom;
+ gboolean need_update;
+
+ GstVideoInfo in_info;
+ GstVideoInfo out_info;
+
+ gint crop_left;
+ gint crop_right;
+ gint crop_top;
+ gint crop_bottom;
+
+ VideoCropPixelFormat packing;
+ gint macro_y_off;
+
+ gboolean raw_caps;
+};
+
+struct _GstVideoCropClass
+{
+ GstVideoFilterClass parent_class;
+};
+
+GType gst_video_crop_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_VIDEO_CROP_H__ */
diff --git a/gst/videocrop/gstvideocropplugin.c b/gst/videocrop/gstvideocropplugin.c
new file mode 100644
index 0000000000..8502fca486
--- /dev/null
+++ b/gst/videocrop/gstvideocropplugin.c
@@ -0,0 +1,43 @@
+/* GStreamer video frame cropping
+ * Copyright (C) 2006 Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstaspectratiocrop.h"
+#include "gstvideocrop.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (videocrop, plugin);
+ ret |= GST_ELEMENT_REGISTER (aspectratiocrop, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ videocrop,
+ "Crops video into a user-defined region",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/videocrop/meson.build b/gst/videocrop/meson.build
new file mode 100644
index 0000000000..439c901752
--- /dev/null
+++ b/gst/videocrop/meson.build
@@ -0,0 +1,10 @@
+gstvideocrop = library('gstvideocrop',
+ 'gstvideocrop.c', 'gstaspectratiocrop.c', 'gstvideocropplugin.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc, libsinc],
+ dependencies : [gst_dep, gstbase_dep, gstvideo_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstvideocrop, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstvideocrop]
diff --git a/gst/videofilter/gstgamma.c b/gst/videofilter/gstgamma.c
new file mode 100644
index 0000000000..1d80a67f4a
--- /dev/null
+++ b/gst/videofilter/gstgamma.c
@@ -0,0 +1,415 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David Schleef <ds@schleef.org>
+ * Copyright (C) 2003 Arwed v. Merkatz <v.merkatz@gmx.net>
+ * Copyright (C) 2006 Mark Nauwelaerts <manauw@skynet.be>
+ * Copyright (C) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * This file was (probably) generated from
+ * gstvideotemplate.c,v 1.12 2004/01/07 21:07:12 ds Exp
+ * and
+ * make_filter,v 1.6 2004/01/07 21:33:01 ds Exp
+ */
+
+/**
+ * SECTION:element-gamma
+ * @title: gamma
+ *
+ * Performs gamma correction on a video stream.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 videotestsrc ! gamma gamma=2.0 ! videoconvert ! ximagesink
+ * ]| This pipeline will make the image "brighter".
+ * |[
+ * gst-launch-1.0 videotestsrc ! gamma gamma=0.5 ! videoconvert ! ximagesink
+ * ]| This pipeline will make the image "darker".
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstgamma.h"
+#include <string.h>
+#include <math.h>
+
+#include <gst/video/video.h>
+
+GST_DEBUG_CATEGORY_STATIC (gamma_debug);
+#define GST_CAT_DEFAULT gamma_debug
+
+/* GstGamma properties */
+enum
+{
+ PROP_0,
+ PROP_GAMMA
+ /* FILL ME */
+};
+
+#define DEFAULT_PROP_GAMMA 1
+
+static GstStaticPadTemplate gst_gamma_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, Y444, "
+ "xRGB, RGBx, xBGR, BGRx, RGB, BGR, Y42B, NV12, "
+ "NV21, YUY2, UYVY, YVYU, I420, YV12, IYUV, Y41B }"))
+ );
+
+static GstStaticPadTemplate gst_gamma_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, Y444, "
+ "xRGB, RGBx, xBGR, BGRx, RGB, BGR, Y42B, NV12, "
+ "NV21, YUY2, UYVY, YVYU, I420, YV12, IYUV, Y41B }"))
+ );
+
+static void gst_gamma_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_gamma_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_gamma_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info);
+static GstFlowReturn gst_gamma_transform_frame_ip (GstVideoFilter * vfilter,
+ GstVideoFrame * frame);
+static void gst_gamma_before_transform (GstBaseTransform * transform,
+ GstBuffer * buf);
+
+static void gst_gamma_calculate_tables (GstGamma * gamma);
+
+G_DEFINE_TYPE (GstGamma, gst_gamma, GST_TYPE_VIDEO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (gamma, "gamma", GST_RANK_NONE, GST_TYPE_GAMMA);
+
+static void
+gst_gamma_class_init (GstGammaClass * g_class)
+{
+ GObjectClass *gobject_class = (GObjectClass *) g_class;
+ GstElementClass *gstelement_class = (GstElementClass *) g_class;
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) g_class;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) g_class;
+
+ GST_DEBUG_CATEGORY_INIT (gamma_debug, "gamma", 0, "gamma");
+
+ gobject_class->set_property = gst_gamma_set_property;
+ gobject_class->get_property = gst_gamma_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_GAMMA,
+ g_param_spec_double ("gamma", "Gamma", "gamma",
+ 0.01, 10, DEFAULT_PROP_GAMMA,
+ GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS | G_PARAM_READWRITE));
+
+ gst_element_class_set_static_metadata (gstelement_class,
+ "Video gamma correction", "Filter/Effect/Video",
+ "Adjusts gamma on a video stream", "Arwed v. Merkatz <v.merkatz@gmx.net");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_gamma_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_gamma_src_template);
+
+ trans_class->before_transform =
+ GST_DEBUG_FUNCPTR (gst_gamma_before_transform);
+ trans_class->transform_ip_on_passthrough = FALSE;
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_gamma_set_info);
+ vfilter_class->transform_frame_ip =
+ GST_DEBUG_FUNCPTR (gst_gamma_transform_frame_ip);
+}
+
+static void
+gst_gamma_init (GstGamma * gamma)
+{
+ /* properties */
+ gamma->gamma = DEFAULT_PROP_GAMMA;
+ gst_gamma_calculate_tables (gamma);
+}
+
+static void
+gst_gamma_set_property (GObject * object, guint prop_id, const GValue * value,
+ GParamSpec * pspec)
+{
+ GstGamma *gamma = GST_GAMMA (object);
+
+ switch (prop_id) {
+ case PROP_GAMMA:{
+ gdouble val = g_value_get_double (value);
+
+ GST_DEBUG_OBJECT (gamma, "Changing gamma from %lf to %lf", gamma->gamma,
+ val);
+ GST_OBJECT_LOCK (gamma);
+ gamma->gamma = val;
+ GST_OBJECT_UNLOCK (gamma);
+ gst_gamma_calculate_tables (gamma);
+ break;
+ }
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_gamma_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstGamma *gamma = GST_GAMMA (object);
+
+ switch (prop_id) {
+ case PROP_GAMMA:
+ g_value_set_double (value, gamma->gamma);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_gamma_calculate_tables (GstGamma * gamma)
+{
+ gint n;
+ gdouble val;
+ gdouble exp;
+ gboolean passthrough = FALSE;
+
+ GST_OBJECT_LOCK (gamma);
+ if (gamma->gamma == 1.0) {
+ passthrough = TRUE;
+ } else {
+ exp = 1.0 / gamma->gamma;
+ for (n = 0; n < 256; n++) {
+ val = n / 255.0;
+ val = pow (val, exp);
+ val = 255.0 * val;
+ gamma->gamma_table[n] = (guint8) floor (val + 0.5);
+ }
+ }
+ GST_OBJECT_UNLOCK (gamma);
+
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (gamma), passthrough);
+}
+
+static void
+gst_gamma_planar_yuv_ip (GstGamma * gamma, GstVideoFrame * frame)
+{
+ gint i, j, height;
+ gint width, stride, row_wrap;
+ const guint8 *table = gamma->gamma_table;
+ guint8 *data;
+
+ data = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
+ stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
+ width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
+ height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0);
+ row_wrap = stride - width;
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ *data = table[*data];
+ data++;
+ }
+ data += row_wrap;
+ }
+}
+
+static void
+gst_gamma_packed_yuv_ip (GstGamma * gamma, GstVideoFrame * frame)
+{
+ gint i, j, height;
+ gint width, stride, row_wrap;
+ gint pixel_stride;
+ const guint8 *table = gamma->gamma_table;
+ guint8 *data;
+
+ data = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
+ stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
+ width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
+ height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0);
+ pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
+ row_wrap = stride - pixel_stride * width;
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ *data = table[*data];
+ data += pixel_stride;
+ }
+ data += row_wrap;
+ }
+}
+
+static const int cog_ycbcr_to_rgb_matrix_8bit_sdtv[] = {
+ 298, 0, 409, -57068,
+ 298, -100, -208, 34707,
+ 298, 516, 0, -70870,
+};
+
+static const gint cog_rgb_to_ycbcr_matrix_8bit_sdtv[] = {
+ 66, 129, 25, 4096,
+ -38, -74, 112, 32768,
+ 112, -94, -18, 32768,
+};
+
+#define APPLY_MATRIX(m,o,v1,v2,v3) ((m[o*4] * v1 + m[o*4+1] * v2 + m[o*4+2] * v3 + m[o*4+3]) >> 8)
+
+static void
+gst_gamma_packed_rgb_ip (GstGamma * gamma, GstVideoFrame * frame)
+{
+ gint i, j, height;
+ gint width, stride, row_wrap;
+ gint pixel_stride;
+ const guint8 *table = gamma->gamma_table;
+ gint offsets[3];
+ gint r, g, b;
+ gint y, u, v;
+ guint8 *data;
+
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+ width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
+ height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0);
+
+ offsets[0] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 0);
+ offsets[1] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 1);
+ offsets[2] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 2);
+
+ pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
+ row_wrap = stride - pixel_stride * width;
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ r = data[offsets[0]];
+ g = data[offsets[1]];
+ b = data[offsets[2]];
+
+ y = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 0, r, g, b);
+ u = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 1, r, g, b);
+ v = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 2, r, g, b);
+
+ y = table[CLAMP (y, 0, 255)];
+ r = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 0, y, u, v);
+ g = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 1, y, u, v);
+ b = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 2, y, u, v);
+
+ data[offsets[0]] = CLAMP (r, 0, 255);
+ data[offsets[1]] = CLAMP (g, 0, 255);
+ data[offsets[2]] = CLAMP (b, 0, 255);
+ data += pixel_stride;
+ }
+ data += row_wrap;
+ }
+}
+
+static gboolean
+gst_gamma_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
+{
+ GstGamma *gamma = GST_GAMMA (vfilter);
+
+ GST_DEBUG_OBJECT (gamma,
+ "setting caps: in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT, incaps,
+ outcaps);
+
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
+ case GST_VIDEO_FORMAT_Y41B:
+ case GST_VIDEO_FORMAT_Y42B:
+ case GST_VIDEO_FORMAT_Y444:
+ case GST_VIDEO_FORMAT_NV12:
+ case GST_VIDEO_FORMAT_NV21:
+ gamma->process = gst_gamma_planar_yuv_ip;
+ break;
+ case GST_VIDEO_FORMAT_YUY2:
+ case GST_VIDEO_FORMAT_UYVY:
+ case GST_VIDEO_FORMAT_AYUV:
+ case GST_VIDEO_FORMAT_YVYU:
+ gamma->process = gst_gamma_packed_yuv_ip;
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_BGRA:
+ case GST_VIDEO_FORMAT_xRGB:
+ case GST_VIDEO_FORMAT_xBGR:
+ case GST_VIDEO_FORMAT_RGBx:
+ case GST_VIDEO_FORMAT_BGRx:
+ case GST_VIDEO_FORMAT_RGB:
+ case GST_VIDEO_FORMAT_BGR:
+ gamma->process = gst_gamma_packed_rgb_ip;
+ break;
+ default:
+ goto invalid_caps;
+ break;
+ }
+ return TRUE;
+
+ /* ERRORS */
+invalid_caps:
+ {
+ GST_ERROR_OBJECT (gamma, "Invalid caps: %" GST_PTR_FORMAT, incaps);
+ return FALSE;
+ }
+}
+
+static void
+gst_gamma_before_transform (GstBaseTransform * base, GstBuffer * outbuf)
+{
+ GstGamma *gamma = GST_GAMMA (base);
+ GstClockTime timestamp, stream_time;
+
+ timestamp = GST_BUFFER_TIMESTAMP (outbuf);
+ stream_time =
+ gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (gamma, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (gamma), stream_time);
+}
+
+static GstFlowReturn
+gst_gamma_transform_frame_ip (GstVideoFilter * vfilter, GstVideoFrame * frame)
+{
+ GstGamma *gamma = GST_GAMMA (vfilter);
+
+ if (!gamma->process)
+ goto not_negotiated;
+
+ GST_OBJECT_LOCK (gamma);
+ gamma->process (gamma, frame);
+ GST_OBJECT_UNLOCK (gamma);
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+not_negotiated:
+ {
+ GST_ERROR_OBJECT (gamma, "Not negotiated yet");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+}
diff --git a/gst/videofilter/gstgamma.h b/gst/videofilter/gstgamma.h
new file mode 100644
index 0000000000..4d92010947
--- /dev/null
+++ b/gst/videofilter/gstgamma.h
@@ -0,0 +1,77 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David Schleef <ds@schleef.org>
+ * Copyright (C) 2003 Arwed v. Merkatz <v.merkatz@gmx.net>
+ * Copyright (C) 2006 Mark Nauwelaerts <manauw@skynet.be>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_VIDEO_GAMMA_H__
+#define __GST_VIDEO_GAMMA_H__
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include <gst/video/gstvideofilter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_GAMMA \
+ (gst_gamma_get_type())
+#define GST_GAMMA(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_GAMMA,GstGamma))
+#define GST_GAMMA_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_GAMMA,GstGammaClass))
+#define GST_IS_GAMMA(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_GAMMA))
+#define GST_IS_GAMMA_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_GAMMA))
+
+typedef struct _GstGamma GstGamma;
+typedef struct _GstGammaClass GstGammaClass;
+
+/**
+ * GstGamma:
+ *
+ * Opaque data structure.
+ */
+struct _GstGamma
+{
+ GstVideoFilter videofilter;
+
+ /* < private > */
+ /* properties */
+ gdouble gamma;
+
+ /* tables */
+ guint8 gamma_table[256];
+
+ void (*process) (GstGamma *gamma, GstVideoFrame *frame);
+};
+
+struct _GstGammaClass
+{
+ GstVideoFilterClass parent_class;
+};
+
+GType gst_gamma_get_type(void);
+
+GST_ELEMENT_REGISTER_DECLARE (gamma);
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_GAMMA_H__ */
diff --git a/gst/videofilter/gstvideobalance.c b/gst/videofilter/gstvideobalance.c
new file mode 100644
index 0000000000..55656abffa
--- /dev/null
+++ b/gst/videofilter/gstvideobalance.c
@@ -0,0 +1,844 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David Schleef <ds@schleef.org>
+ * Copyright (C) <2010> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * This file was (probably) generated from gstvideobalance.c,
+ * gstvideobalance.c,v 1.7 2003/11/08 02:48:59 dschleef Exp
+ */
+
+/**
+ * SECTION:element-videobalance
+ * @title: videobalance
+ *
+ * Adjusts brightness, contrast, hue, saturation on a video stream.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 videotestsrc ! videobalance saturation=0.0 ! videoconvert ! ximagesink
+ * ]| This pipeline converts the image to black and white by setting the
+ * saturation to 0.0.
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/math-compat.h>
+
+#include "gstvideobalance.h"
+#include <string.h>
+
+#include <gst/video/colorbalance.h>
+
+GST_DEBUG_CATEGORY_STATIC (videobalance_debug);
+#define GST_CAT_DEFAULT videobalance_debug
+
+/* GstVideoBalance properties */
+#define DEFAULT_PROP_CONTRAST 1.0
+#define DEFAULT_PROP_BRIGHTNESS 0.0
+#define DEFAULT_PROP_HUE 0.0
+#define DEFAULT_PROP_SATURATION 1.0
+
+enum
+{
+ PROP_0,
+ PROP_CONTRAST,
+ PROP_BRIGHTNESS,
+ PROP_HUE,
+ PROP_SATURATION
+};
+
+#define PROCESSING_CAPS \
+ "{ AYUV, ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx, " \
+ "xBGR, BGRx, RGB, BGR, Y42B, YUY2, UYVY, YVYU, " \
+ "I420, YV12, IYUV, Y41B, NV12, NV21 }"
+
+static GstStaticPadTemplate gst_video_balance_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (PROCESSING_CAPS) ";"
+ "video/x-raw(ANY)")
+ );
+
+static GstStaticPadTemplate gst_video_balance_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (PROCESSING_CAPS) ";"
+ "video/x-raw(ANY)")
+ );
+
+static void gst_video_balance_colorbalance_init (GstColorBalanceInterface *
+ iface);
+
+static void gst_video_balance_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_video_balance_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+#define gst_video_balance_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstVideoBalance, gst_video_balance,
+ GST_TYPE_VIDEO_FILTER,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_COLOR_BALANCE,
+ gst_video_balance_colorbalance_init));
+GST_ELEMENT_REGISTER_DEFINE (videobalance, "videobalance",
+ GST_RANK_NONE, GST_TYPE_VIDEO_BALANCE);
+
+/*
+ * look-up tables (LUT).
+ */
+static void
+gst_video_balance_update_tables (GstVideoBalance * vb)
+{
+ gint i, j;
+ gdouble y, u, v, hue_cos, hue_sin;
+
+ /* Y */
+ for (i = 0; i < 256; i++) {
+ y = 16 + ((i - 16) * vb->contrast + vb->brightness * 255);
+ if (y < 0)
+ y = 0;
+ else if (y > 255)
+ y = 255;
+ vb->tabley[i] = rint (y);
+ }
+
+ hue_cos = cos (G_PI * vb->hue);
+ hue_sin = sin (G_PI * vb->hue);
+
+ /* U/V lookup tables are 2D, since we need both U/V for each table
+ * separately. */
+ for (i = -128; i < 128; i++) {
+ for (j = -128; j < 128; j++) {
+ u = 128 + ((i * hue_cos + j * hue_sin) * vb->saturation);
+ v = 128 + ((-i * hue_sin + j * hue_cos) * vb->saturation);
+ if (u < 0)
+ u = 0;
+ else if (u > 255)
+ u = 255;
+ if (v < 0)
+ v = 0;
+ else if (v > 255)
+ v = 255;
+ vb->tableu[i + 128][j + 128] = rint (u);
+ vb->tablev[i + 128][j + 128] = rint (v);
+ }
+ }
+}
+
+static gboolean
+gst_video_balance_is_passthrough (GstVideoBalance * videobalance)
+{
+ return videobalance->contrast == 1.0 &&
+ videobalance->brightness == 0.0 &&
+ videobalance->hue == 0.0 && videobalance->saturation == 1.0;
+}
+
+static void
+gst_video_balance_update_properties (GstVideoBalance * videobalance)
+{
+ gboolean passthrough;
+ GstBaseTransform *base = GST_BASE_TRANSFORM (videobalance);
+
+ GST_OBJECT_LOCK (videobalance);
+ passthrough = gst_video_balance_is_passthrough (videobalance);
+ if (!passthrough)
+ gst_video_balance_update_tables (videobalance);
+ GST_OBJECT_UNLOCK (videobalance);
+
+ gst_base_transform_set_passthrough (base, passthrough);
+}
+
+static void
+gst_video_balance_planar_yuv (GstVideoBalance * videobalance,
+ GstVideoFrame * frame)
+{
+ gint x, y;
+ guint8 *ydata;
+ guint8 *udata, *vdata;
+ gint ystride, ustride, vstride;
+ gint width, height;
+ gint width2, height2;
+ guint8 *tabley = videobalance->tabley;
+ guint8 **tableu = videobalance->tableu;
+ guint8 **tablev = videobalance->tablev;
+
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
+ ydata = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ ystride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+
+ for (y = 0; y < height; y++) {
+ guint8 *yptr;
+
+ yptr = ydata + y * ystride;
+ for (x = 0; x < width; x++) {
+ *yptr = tabley[*yptr];
+ yptr++;
+ }
+ }
+
+ width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
+ height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
+
+ udata = GST_VIDEO_FRAME_PLANE_DATA (frame, 1);
+ vdata = GST_VIDEO_FRAME_PLANE_DATA (frame, 2);
+ ustride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 1);
+ vstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 2);
+
+ for (y = 0; y < height2; y++) {
+ guint8 *uptr, *vptr;
+ guint8 u1, v1;
+
+ uptr = udata + y * ustride;
+ vptr = vdata + y * vstride;
+
+ for (x = 0; x < width2; x++) {
+ u1 = *uptr;
+ v1 = *vptr;
+
+ *uptr++ = tableu[u1][v1];
+ *vptr++ = tablev[u1][v1];
+ }
+ }
+}
+
+static void
+gst_video_balance_semiplanar_yuv (GstVideoBalance * videobalance,
+ GstVideoFrame * frame)
+{
+ gint x, y;
+ guint8 *ydata;
+ guint8 *uvdata;
+ gint ystride, uvstride;
+ gint width, height;
+ gint width2, height2;
+ guint8 *tabley = videobalance->tabley;
+ guint8 **tableu = videobalance->tableu;
+ guint8 **tablev = videobalance->tablev;
+ gint upos, vpos;
+
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
+ ydata = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ ystride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+
+ for (y = 0; y < height; y++) {
+ guint8 *yptr;
+
+ yptr = ydata + y * ystride;
+ for (x = 0; x < width; x++) {
+ *yptr = tabley[*yptr];
+ yptr++;
+ }
+ }
+
+ width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
+ height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
+
+ uvdata = GST_VIDEO_FRAME_PLANE_DATA (frame, 1);
+ uvstride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 1);
+
+ upos = GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12 ? 0 : 1;
+ vpos = GST_VIDEO_INFO_FORMAT (&frame->info) == GST_VIDEO_FORMAT_NV12 ? 1 : 0;
+
+ for (y = 0; y < height2; y++) {
+ guint8 *uvptr;
+ guint8 u1, v1;
+
+ uvptr = uvdata + y * uvstride;
+
+ for (x = 0; x < width2; x++) {
+ u1 = uvptr[upos];
+ v1 = uvptr[vpos];
+
+ uvptr[upos] = tableu[u1][v1];
+ uvptr[vpos] = tablev[u1][v1];
+ uvptr += 2;
+ }
+ }
+}
+
+static void
+gst_video_balance_packed_yuv (GstVideoBalance * videobalance,
+ GstVideoFrame * frame)
+{
+ gint x, y, stride;
+ guint8 *ydata, *udata, *vdata;
+ gint yoff, uoff, voff;
+ gint width, height;
+ gint width2, height2;
+ guint8 *tabley = videobalance->tabley;
+ guint8 **tableu = videobalance->tableu;
+ guint8 **tablev = videobalance->tablev;
+
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+ ydata = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
+ yoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
+
+ for (y = 0; y < height; y++) {
+ guint8 *yptr;
+
+ yptr = ydata + y * stride;
+ for (x = 0; x < width; x++) {
+ *yptr = tabley[*yptr];
+ yptr += yoff;
+ }
+ }
+
+ width2 = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1);
+ height2 = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1);
+
+ udata = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
+ vdata = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
+ uoff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 1);
+ voff = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 2);
+
+ for (y = 0; y < height2; y++) {
+ guint8 *uptr, *vptr;
+ guint8 u1, v1;
+
+ uptr = udata + y * stride;
+ vptr = vdata + y * stride;
+
+ for (x = 0; x < width2; x++) {
+ u1 = *uptr;
+ v1 = *vptr;
+
+ *uptr = tableu[u1][v1];
+ *vptr = tablev[u1][v1];
+
+ uptr += uoff;
+ vptr += voff;
+ }
+ }
+}
+
+static const int cog_ycbcr_to_rgb_matrix_8bit_sdtv[] = {
+ 298, 0, 409, -57068,
+ 298, -100, -208, 34707,
+ 298, 516, 0, -70870,
+};
+
+static const gint cog_rgb_to_ycbcr_matrix_8bit_sdtv[] = {
+ 66, 129, 25, 4096,
+ -38, -74, 112, 32768,
+ 112, -94, -18, 32768,
+};
+
+#define APPLY_MATRIX(m,o,v1,v2,v3) ((m[o*4] * v1 + m[o*4+1] * v2 + m[o*4+2] * v3 + m[o*4+3]) >> 8)
+
+static void
+gst_video_balance_packed_rgb (GstVideoBalance * videobalance,
+ GstVideoFrame * frame)
+{
+ gint i, j, height;
+ gint width, stride, row_wrap;
+ gint pixel_stride;
+ guint8 *data;
+ gint offsets[3];
+ gint r, g, b;
+ gint y, u, v;
+ gint u_tmp, v_tmp;
+ guint8 *tabley = videobalance->tabley;
+ guint8 **tableu = videobalance->tableu;
+ guint8 **tablev = videobalance->tablev;
+
+ width = GST_VIDEO_FRAME_WIDTH (frame);
+ height = GST_VIDEO_FRAME_HEIGHT (frame);
+
+ offsets[0] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 0);
+ offsets[1] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 1);
+ offsets[2] = GST_VIDEO_FRAME_COMP_OFFSET (frame, 2);
+
+ data = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
+ stride = GST_VIDEO_FRAME_PLANE_STRIDE (frame, 0);
+
+ pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
+ row_wrap = stride - pixel_stride * width;
+
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j++) {
+ r = data[offsets[0]];
+ g = data[offsets[1]];
+ b = data[offsets[2]];
+
+ y = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 0, r, g, b);
+ u_tmp = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 1, r, g, b);
+ v_tmp = APPLY_MATRIX (cog_rgb_to_ycbcr_matrix_8bit_sdtv, 2, r, g, b);
+
+ y = CLAMP (y, 0, 255);
+ u_tmp = CLAMP (u_tmp, 0, 255);
+ v_tmp = CLAMP (v_tmp, 0, 255);
+
+ y = tabley[y];
+ u = tableu[u_tmp][v_tmp];
+ v = tablev[u_tmp][v_tmp];
+
+ r = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 0, y, u, v);
+ g = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 1, y, u, v);
+ b = APPLY_MATRIX (cog_ycbcr_to_rgb_matrix_8bit_sdtv, 2, y, u, v);
+
+ data[offsets[0]] = CLAMP (r, 0, 255);
+ data[offsets[1]] = CLAMP (g, 0, 255);
+ data[offsets[2]] = CLAMP (b, 0, 255);
+ data += pixel_stride;
+ }
+ data += row_wrap;
+ }
+}
+
+/* get notified of caps and plug in the correct process function */
+static gboolean
+gst_video_balance_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
+{
+ GstVideoBalance *videobalance = GST_VIDEO_BALANCE (vfilter);
+
+ GST_DEBUG_OBJECT (videobalance,
+ "in %" GST_PTR_FORMAT " out %" GST_PTR_FORMAT, incaps, outcaps);
+
+ videobalance->process = NULL;
+
+ switch (GST_VIDEO_INFO_FORMAT (in_info)) {
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
+ case GST_VIDEO_FORMAT_Y41B:
+ case GST_VIDEO_FORMAT_Y42B:
+ case GST_VIDEO_FORMAT_Y444:
+ videobalance->process = gst_video_balance_planar_yuv;
+ break;
+ case GST_VIDEO_FORMAT_YUY2:
+ case GST_VIDEO_FORMAT_UYVY:
+ case GST_VIDEO_FORMAT_AYUV:
+ case GST_VIDEO_FORMAT_YVYU:
+ videobalance->process = gst_video_balance_packed_yuv;
+ break;
+ case GST_VIDEO_FORMAT_NV12:
+ case GST_VIDEO_FORMAT_NV21:
+ videobalance->process = gst_video_balance_semiplanar_yuv;
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_BGRA:
+ case GST_VIDEO_FORMAT_xRGB:
+ case GST_VIDEO_FORMAT_xBGR:
+ case GST_VIDEO_FORMAT_RGBx:
+ case GST_VIDEO_FORMAT_BGRx:
+ case GST_VIDEO_FORMAT_RGB:
+ case GST_VIDEO_FORMAT_BGR:
+ videobalance->process = gst_video_balance_packed_rgb;
+ break;
+ default:
+ if (!gst_video_balance_is_passthrough (videobalance))
+ goto unknown_format;
+ break;
+ }
+
+ return TRUE;
+
+ /* ERRORS */
+unknown_format:
+ {
+ GST_ERROR_OBJECT (videobalance, "unknown format %" GST_PTR_FORMAT, incaps);
+ return FALSE;
+ }
+}
+
+static void
+gst_video_balance_before_transform (GstBaseTransform * base, GstBuffer * buf)
+{
+ GstVideoBalance *balance = GST_VIDEO_BALANCE (base);
+ GstClockTime timestamp, stream_time;
+
+ timestamp = GST_BUFFER_TIMESTAMP (buf);
+ stream_time =
+ gst_segment_to_stream_time (&base->segment, GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (balance, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (balance), stream_time);
+}
+
+static GstCaps *
+gst_video_balance_transform_caps (GstBaseTransform * trans,
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter)
+{
+ GstVideoBalance *balance = GST_VIDEO_BALANCE (trans);
+ GstCaps *ret;
+
+ if (!gst_video_balance_is_passthrough (balance)) {
+ static GstStaticCaps raw_caps =
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (PROCESSING_CAPS));
+
+ caps = gst_caps_intersect (caps, gst_static_caps_get (&raw_caps));
+
+ if (filter) {
+ ret = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (caps);
+ } else {
+ ret = caps;
+ }
+ } else {
+ if (filter) {
+ ret = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
+ } else {
+ ret = gst_caps_ref (caps);
+ }
+ }
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_video_balance_transform_frame_ip (GstVideoFilter * vfilter,
+ GstVideoFrame * frame)
+{
+ GstVideoBalance *videobalance = GST_VIDEO_BALANCE (vfilter);
+
+ if (!videobalance->process)
+ goto not_negotiated;
+
+ GST_OBJECT_LOCK (videobalance);
+ videobalance->process (videobalance, frame);
+ GST_OBJECT_UNLOCK (videobalance);
+
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+not_negotiated:
+ {
+ GST_ERROR_OBJECT (videobalance, "Not negotiated yet");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+}
+
+static void
+gst_video_balance_finalize (GObject * object)
+{
+ GList *channels = NULL;
+ GstVideoBalance *balance = GST_VIDEO_BALANCE (object);
+
+ g_free (balance->tableu[0]);
+
+ channels = balance->channels;
+ while (channels) {
+ GstColorBalanceChannel *channel = channels->data;
+
+ g_object_unref (channel);
+ channels->data = NULL;
+ channels = g_list_next (channels);
+ }
+
+ if (balance->channels)
+ g_list_free (balance->channels);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_video_balance_class_init (GstVideoBalanceClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
+
+ GST_DEBUG_CATEGORY_INIT (videobalance_debug, "videobalance", 0,
+ "videobalance");
+
+ gobject_class->finalize = gst_video_balance_finalize;
+ gobject_class->set_property = gst_video_balance_set_property;
+ gobject_class->get_property = gst_video_balance_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_CONTRAST,
+ g_param_spec_double ("contrast", "Contrast", "contrast",
+ 0.0, 2.0, DEFAULT_PROP_CONTRAST,
+ GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_BRIGHTNESS,
+ g_param_spec_double ("brightness", "Brightness", "brightness", -1.0, 1.0,
+ DEFAULT_PROP_BRIGHTNESS,
+ GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_HUE,
+ g_param_spec_double ("hue", "Hue", "hue", -1.0, 1.0, DEFAULT_PROP_HUE,
+ GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_SATURATION,
+ g_param_spec_double ("saturation", "Saturation", "saturation", 0.0, 2.0,
+ DEFAULT_PROP_SATURATION,
+ GST_PARAM_CONTROLLABLE | G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_set_static_metadata (gstelement_class, "Video balance",
+ "Filter/Effect/Video",
+ "Adjusts brightness, contrast, hue, saturation on a video stream",
+ "David Schleef <ds@schleef.org>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_video_balance_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_video_balance_src_template);
+
+ trans_class->before_transform =
+ GST_DEBUG_FUNCPTR (gst_video_balance_before_transform);
+ trans_class->transform_ip_on_passthrough = FALSE;
+ trans_class->transform_caps =
+ GST_DEBUG_FUNCPTR (gst_video_balance_transform_caps);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_video_balance_set_info);
+ vfilter_class->transform_frame_ip =
+ GST_DEBUG_FUNCPTR (gst_video_balance_transform_frame_ip);
+}
+
+static void
+gst_video_balance_init (GstVideoBalance * videobalance)
+{
+ const gchar *channels[4] = { "HUE", "SATURATION",
+ "BRIGHTNESS", "CONTRAST"
+ };
+ gint i;
+
+ /* Initialize propertiews */
+ videobalance->contrast = DEFAULT_PROP_CONTRAST;
+ videobalance->brightness = DEFAULT_PROP_BRIGHTNESS;
+ videobalance->hue = DEFAULT_PROP_HUE;
+ videobalance->saturation = DEFAULT_PROP_SATURATION;
+
+ videobalance->tableu[0] = g_new (guint8, 256 * 256 * 2);
+ for (i = 0; i < 256; i++) {
+ videobalance->tableu[i] =
+ videobalance->tableu[0] + i * 256 * sizeof (guint8);
+ videobalance->tablev[i] =
+ videobalance->tableu[0] + 256 * 256 * sizeof (guint8) +
+ i * 256 * sizeof (guint8);
+ }
+
+ gst_video_balance_update_properties (videobalance);
+
+ /* Generate the channels list */
+ for (i = 0; i < G_N_ELEMENTS (channels); i++) {
+ GstColorBalanceChannel *channel;
+
+ channel = g_object_new (GST_TYPE_COLOR_BALANCE_CHANNEL, NULL);
+ channel->label = g_strdup (channels[i]);
+ channel->min_value = -1000;
+ channel->max_value = 1000;
+
+ videobalance->channels = g_list_append (videobalance->channels, channel);
+ }
+}
+
+static const GList *
+gst_video_balance_colorbalance_list_channels (GstColorBalance * balance)
+{
+ GstVideoBalance *videobalance = GST_VIDEO_BALANCE (balance);
+
+ g_return_val_if_fail (videobalance != NULL, NULL);
+ g_return_val_if_fail (GST_IS_VIDEO_BALANCE (videobalance), NULL);
+
+ return videobalance->channels;
+}
+
+static void
+gst_video_balance_colorbalance_set_value (GstColorBalance * balance,
+ GstColorBalanceChannel * channel, gint value)
+{
+ GstVideoBalance *vb = GST_VIDEO_BALANCE (balance);
+ gdouble new_val;
+ gboolean changed = FALSE;
+
+ g_return_if_fail (vb != NULL);
+ g_return_if_fail (GST_IS_VIDEO_BALANCE (vb));
+ g_return_if_fail (GST_IS_VIDEO_FILTER (vb));
+ g_return_if_fail (channel->label != NULL);
+
+ GST_OBJECT_LOCK (vb);
+ if (!g_ascii_strcasecmp (channel->label, "HUE")) {
+ new_val = (value + 1000.0) * 2.0 / 2000.0 - 1.0;
+ changed = new_val != vb->hue;
+ vb->hue = new_val;
+ } else if (!g_ascii_strcasecmp (channel->label, "SATURATION")) {
+ new_val = (value + 1000.0) * 2.0 / 2000.0;
+ changed = new_val != vb->saturation;
+ vb->saturation = new_val;
+ } else if (!g_ascii_strcasecmp (channel->label, "BRIGHTNESS")) {
+ new_val = (value + 1000.0) * 2.0 / 2000.0 - 1.0;
+ changed = new_val != vb->brightness;
+ vb->brightness = new_val;
+ } else if (!g_ascii_strcasecmp (channel->label, "CONTRAST")) {
+ new_val = (value + 1000.0) * 2.0 / 2000.0;
+ changed = new_val != vb->contrast;
+ vb->contrast = new_val;
+ }
+ GST_OBJECT_UNLOCK (vb);
+
+ if (changed)
+ gst_video_balance_update_properties (vb);
+
+ if (changed) {
+ gst_color_balance_value_changed (balance, channel,
+ gst_color_balance_get_value (balance, channel));
+ }
+}
+
+static gint
+gst_video_balance_colorbalance_get_value (GstColorBalance * balance,
+ GstColorBalanceChannel * channel)
+{
+ GstVideoBalance *vb = GST_VIDEO_BALANCE (balance);
+ gint value = 0;
+
+ g_return_val_if_fail (vb != NULL, 0);
+ g_return_val_if_fail (GST_IS_VIDEO_BALANCE (vb), 0);
+ g_return_val_if_fail (channel->label != NULL, 0);
+
+ if (!g_ascii_strcasecmp (channel->label, "HUE")) {
+ value = (vb->hue + 1) * 2000.0 / 2.0 - 1000.0;
+ } else if (!g_ascii_strcasecmp (channel->label, "SATURATION")) {
+ value = vb->saturation * 2000.0 / 2.0 - 1000.0;
+ } else if (!g_ascii_strcasecmp (channel->label, "BRIGHTNESS")) {
+ value = (vb->brightness + 1) * 2000.0 / 2.0 - 1000.0;
+ } else if (!g_ascii_strcasecmp (channel->label, "CONTRAST")) {
+ value = vb->contrast * 2000.0 / 2.0 - 1000.0;
+ }
+
+ return value;
+}
+
+static GstColorBalanceType
+gst_video_balance_colorbalance_get_balance_type (GstColorBalance * balance)
+{
+ return GST_COLOR_BALANCE_SOFTWARE;
+}
+
+static void
+gst_video_balance_colorbalance_init (GstColorBalanceInterface * iface)
+{
+ iface->list_channels = gst_video_balance_colorbalance_list_channels;
+ iface->set_value = gst_video_balance_colorbalance_set_value;
+ iface->get_value = gst_video_balance_colorbalance_get_value;
+ iface->get_balance_type = gst_video_balance_colorbalance_get_balance_type;
+}
+
+static GstColorBalanceChannel *
+gst_video_balance_find_channel (GstVideoBalance * balance, const gchar * label)
+{
+ GList *l;
+
+ for (l = balance->channels; l; l = l->next) {
+ GstColorBalanceChannel *channel = l->data;
+
+ if (g_ascii_strcasecmp (channel->label, label) == 0)
+ return channel;
+ }
+ return NULL;
+}
+
+static void
+gst_video_balance_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstVideoBalance *balance = GST_VIDEO_BALANCE (object);
+ gdouble d;
+ const gchar *label = NULL;
+
+ GST_OBJECT_LOCK (balance);
+ switch (prop_id) {
+ case PROP_CONTRAST:
+ d = g_value_get_double (value);
+ GST_DEBUG_OBJECT (balance, "Changing contrast from %lf to %lf",
+ balance->contrast, d);
+ if (d != balance->contrast)
+ label = "CONTRAST";
+ balance->contrast = d;
+ break;
+ case PROP_BRIGHTNESS:
+ d = g_value_get_double (value);
+ GST_DEBUG_OBJECT (balance, "Changing brightness from %lf to %lf",
+ balance->brightness, d);
+ if (d != balance->brightness)
+ label = "BRIGHTNESS";
+ balance->brightness = d;
+ break;
+ case PROP_HUE:
+ d = g_value_get_double (value);
+ GST_DEBUG_OBJECT (balance, "Changing hue from %lf to %lf", balance->hue,
+ d);
+ if (d != balance->hue)
+ label = "HUE";
+ balance->hue = d;
+ break;
+ case PROP_SATURATION:
+ d = g_value_get_double (value);
+ GST_DEBUG_OBJECT (balance, "Changing saturation from %lf to %lf",
+ balance->saturation, d);
+ if (d != balance->saturation)
+ label = "SATURATION";
+ balance->saturation = d;
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+
+ GST_OBJECT_UNLOCK (balance);
+ gst_video_balance_update_properties (balance);
+
+ if (label) {
+ GstColorBalanceChannel *channel =
+ gst_video_balance_find_channel (balance, label);
+ gst_color_balance_value_changed (GST_COLOR_BALANCE (balance), channel,
+ gst_color_balance_get_value (GST_COLOR_BALANCE (balance), channel));
+ }
+}
+
+static void
+gst_video_balance_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstVideoBalance *balance = GST_VIDEO_BALANCE (object);
+
+ switch (prop_id) {
+ case PROP_CONTRAST:
+ g_value_set_double (value, balance->contrast);
+ break;
+ case PROP_BRIGHTNESS:
+ g_value_set_double (value, balance->brightness);
+ break;
+ case PROP_HUE:
+ g_value_set_double (value, balance->hue);
+ break;
+ case PROP_SATURATION:
+ g_value_set_double (value, balance->saturation);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/videofilter/gstvideobalance.h b/gst/videofilter/gstvideobalance.h
new file mode 100644
index 0000000000..020f5337d4
--- /dev/null
+++ b/gst/videofilter/gstvideobalance.h
@@ -0,0 +1,81 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_VIDEO_BALANCE_H__
+#define __GST_VIDEO_BALANCE_H__
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include <gst/video/gstvideofilter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_VIDEO_BALANCE \
+ (gst_video_balance_get_type())
+#define GST_VIDEO_BALANCE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_BALANCE,GstVideoBalance))
+#define GST_VIDEO_BALANCE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_BALANCE,GstVideoBalanceClass))
+#define GST_IS_VIDEO_BALANCE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_BALANCE))
+#define GST_IS_VIDEO_BALANCE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_BALANCE))
+
+typedef struct _GstVideoBalance GstVideoBalance;
+typedef struct _GstVideoBalanceClass GstVideoBalanceClass;
+
+/**
+ * GstVideoBalance:
+ *
+ * Opaque data structure.
+ */
+struct _GstVideoBalance {
+ GstVideoFilter videofilter;
+
+ /* < private > */
+
+ /* channels for interface */
+ GList *channels;
+
+ /* properties */
+ gdouble contrast;
+ gdouble brightness;
+ gdouble hue;
+ gdouble saturation;
+
+ /* tables */
+ guint8 tabley[256];
+ guint8 *tableu[256];
+ guint8 *tablev[256];
+
+ void (*process) (GstVideoBalance *balance, GstVideoFrame *frame);
+};
+
+struct _GstVideoBalanceClass {
+ GstVideoFilterClass parent_class;
+};
+
+GType gst_video_balance_get_type(void);
+
+GST_ELEMENT_REGISTER_DECLARE (videobalance);
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_BALANCE_H__ */
diff --git a/gst/videofilter/gstvideoflip.c b/gst/videofilter/gstvideoflip.c
new file mode 100644
index 0000000000..371f291716
--- /dev/null
+++ b/gst/videofilter/gstvideoflip.c
@@ -0,0 +1,1427 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David Schleef <ds@schleef.org>
+ * Copyright (C) <2010> Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ * Copyright (C) <2011> Youness Alaoui <youness.alaoui@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * This file was (probably) generated from gstvideoflip.c,
+ * gstvideoflip.c,v 1.7 2003/11/08 02:48:59 dschleef Exp
+ */
+/**
+ * SECTION:element-videoflip
+ * @title: videoflip
+ *
+ * Flips and rotates video.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 videotestsrc ! videoflip method=clockwise ! videoconvert ! ximagesink
+ * ]| This pipeline flips the test image 90 degrees clockwise.
+ *
+ */
+
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstvideoflip.h"
+
+#include <string.h>
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+/* GstVideoFlip properties */
+enum
+{
+ PROP_0,
+ PROP_METHOD,
+ PROP_VIDEO_DIRECTION
+ /* FILL ME */
+};
+
+#define PROP_METHOD_DEFAULT GST_VIDEO_FLIP_METHOD_IDENTITY
+
+GST_DEBUG_CATEGORY_STATIC (video_flip_debug);
+#define GST_CAT_DEFAULT video_flip_debug
+
+static GstStaticPadTemplate gst_video_flip_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx, xBGR, BGRx, "
+ "RGB, BGR, I420, YV12, IYUV, YUY2, UYVY, YVYU, NV12, NV21, "
+ "GRAY8, GRAY16_BE, GRAY16_LE }"))
+ );
+
+static GstStaticPadTemplate gst_video_flip_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ AYUV, "
+ "ARGB, BGRA, ABGR, RGBA, Y444, xRGB, RGBx, xBGR, BGRx, "
+ "RGB, BGR, I420, YV12, IYUV, YUY2, UYVY, YVYU, NV12, NV21, "
+ "GRAY8, GRAY16_BE, GRAY16_LE }"))
+ );
+
+#define GST_TYPE_VIDEO_FLIP_METHOD (gst_video_flip_method_get_type())
+
+static const GEnumValue video_flip_methods[] = {
+ {GST_VIDEO_FLIP_METHOD_IDENTITY, "Identity (no rotation)", "none"},
+ {GST_VIDEO_FLIP_METHOD_90R, "Rotate clockwise 90 degrees", "clockwise"},
+ {GST_VIDEO_FLIP_METHOD_180, "Rotate 180 degrees", "rotate-180"},
+ {GST_VIDEO_FLIP_METHOD_90L, "Rotate counter-clockwise 90 degrees",
+ "counterclockwise"},
+ {GST_VIDEO_FLIP_METHOD_HORIZ, "Flip horizontally", "horizontal-flip"},
+ {GST_VIDEO_FLIP_METHOD_VERT, "Flip vertically", "vertical-flip"},
+ {GST_VIDEO_FLIP_METHOD_TRANS,
+ "Flip across upper left/lower right diagonal", "upper-left-diagonal"},
+ {GST_VIDEO_FLIP_METHOD_OTHER,
+ "Flip across upper right/lower left diagonal", "upper-right-diagonal"},
+ {GST_VIDEO_FLIP_METHOD_AUTO,
+ "Select flip method based on image-orientation tag", "automatic"},
+ {0, NULL, NULL},
+};
+
+static GType
+gst_video_flip_method_get_type (void)
+{
+ static GType video_flip_method_type = 0;
+
+ if (!video_flip_method_type) {
+ video_flip_method_type = g_enum_register_static ("GstVideoFlipMethod",
+ video_flip_methods);
+ }
+ return video_flip_method_type;
+}
+
+static void
+gst_video_flip_video_direction_interface_init (GstVideoDirectionInterface *
+ iface)
+{
+ /* We implement the video-direction property */
+}
+
+#define gst_video_flip_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstVideoFlip, gst_video_flip, GST_TYPE_VIDEO_FILTER,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_VIDEO_DIRECTION,
+ gst_video_flip_video_direction_interface_init));
+GST_ELEMENT_REGISTER_DEFINE (videoflip, "videoflip", GST_RANK_NONE,
+ GST_TYPE_VIDEO_FLIP);
+
+static GstCaps *
+gst_video_flip_transform_caps (GstBaseTransform * trans,
+ GstPadDirection direction, GstCaps * caps, GstCaps * filter)
+{
+ GstVideoFlip *videoflip = GST_VIDEO_FLIP (trans);
+ GstCaps *ret;
+ gint width, height, i;
+
+ ret = gst_caps_copy (caps);
+
+ GST_OBJECT_LOCK (videoflip);
+
+ if (videoflip->change_configuring_method) {
+ GEnumValue *configuring_method_enum, *method_enum;
+ GEnumClass *enum_class =
+ g_type_class_ref (GST_TYPE_VIDEO_ORIENTATION_METHOD);
+
+ configuring_method_enum =
+ g_enum_get_value (enum_class, videoflip->configuring_method);
+ method_enum = g_enum_get_value (enum_class, videoflip->proposed_method);
+ GST_LOG_OBJECT (videoflip,
+ "Changing configuring method from %s to proposed %s",
+ configuring_method_enum ? configuring_method_enum->value_nick : "(nil)",
+ method_enum ? method_enum->value_nick : "(nil)");
+ g_type_class_unref (enum_class);
+
+ videoflip->configuring_method = videoflip->proposed_method;
+ }
+ videoflip->change_configuring_method = FALSE;
+
+ for (i = 0; i < gst_caps_get_size (ret); i++) {
+ GstStructure *structure = gst_caps_get_structure (ret, i);
+ gint par_n, par_d;
+
+ if (gst_structure_get_int (structure, "width", &width) &&
+ gst_structure_get_int (structure, "height", &height)) {
+
+ switch (videoflip->configuring_method) {
+ case GST_VIDEO_ORIENTATION_90R:
+ case GST_VIDEO_ORIENTATION_90L:
+ case GST_VIDEO_ORIENTATION_UL_LR:
+ case GST_VIDEO_ORIENTATION_UR_LL:
+ gst_structure_set (structure, "width", G_TYPE_INT, height,
+ "height", G_TYPE_INT, width, NULL);
+ if (gst_structure_get_fraction (structure, "pixel-aspect-ratio",
+ &par_n, &par_d)) {
+ if (par_n != 1 || par_d != 1) {
+ GValue val = { 0, };
+
+ g_value_init (&val, GST_TYPE_FRACTION);
+ gst_value_set_fraction (&val, par_d, par_n);
+ gst_structure_set_value (structure, "pixel-aspect-ratio", &val);
+ g_value_unset (&val);
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_IDENTITY:
+ case GST_VIDEO_ORIENTATION_180:
+ case GST_VIDEO_ORIENTATION_HORIZ:
+ case GST_VIDEO_ORIENTATION_VERT:
+ gst_structure_set (structure, "width", G_TYPE_INT, width,
+ "height", G_TYPE_INT, height, NULL);
+ break;
+ case GST_VIDEO_ORIENTATION_CUSTOM:
+ GST_WARNING_OBJECT (videoflip, "unsupported custom orientation");
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ }
+ }
+ GST_OBJECT_UNLOCK (videoflip);
+
+ GST_DEBUG_OBJECT (videoflip, "transformed %" GST_PTR_FORMAT " to %"
+ GST_PTR_FORMAT, caps, ret);
+
+ if (filter) {
+ GstCaps *intersection;
+
+ GST_DEBUG_OBJECT (videoflip, "Using filter caps %" GST_PTR_FORMAT, filter);
+ intersection =
+ gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST);
+ gst_caps_unref (ret);
+ ret = intersection;
+ GST_DEBUG_OBJECT (videoflip, "Intersection %" GST_PTR_FORMAT, ret);
+ }
+
+ return ret;
+}
+
+static void
+gst_video_flip_planar_yuv (GstVideoFlip * videoflip, GstVideoFrame * dest,
+ const GstVideoFrame * src)
+{
+ gint x, y;
+ guint8 const *s;
+ guint8 *d;
+ gint src_y_stride, src_u_stride, src_v_stride;
+ gint src_y_height, src_u_height, src_v_height;
+ gint src_y_width, src_u_width, src_v_width;
+ gint dest_y_stride, dest_u_stride, dest_v_stride;
+ gint dest_y_height, dest_u_height, dest_v_height;
+ gint dest_y_width, dest_u_width, dest_v_width;
+
+ src_y_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src, 0);
+ src_u_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src, 1);
+ src_v_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src, 2);
+
+ dest_y_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest, 0);
+ dest_u_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest, 1);
+ dest_v_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest, 2);
+
+ src_y_width = GST_VIDEO_FRAME_COMP_WIDTH (src, 0);
+ src_u_width = GST_VIDEO_FRAME_COMP_WIDTH (src, 1);
+ src_v_width = GST_VIDEO_FRAME_COMP_WIDTH (src, 2);
+
+ dest_y_width = GST_VIDEO_FRAME_COMP_WIDTH (dest, 0);
+ dest_u_width = GST_VIDEO_FRAME_COMP_WIDTH (dest, 1);
+ dest_v_width = GST_VIDEO_FRAME_COMP_WIDTH (dest, 2);
+
+ src_y_height = GST_VIDEO_FRAME_COMP_HEIGHT (src, 0);
+ src_u_height = GST_VIDEO_FRAME_COMP_HEIGHT (src, 1);
+ src_v_height = GST_VIDEO_FRAME_COMP_HEIGHT (src, 2);
+
+ dest_y_height = GST_VIDEO_FRAME_COMP_HEIGHT (dest, 0);
+ dest_u_height = GST_VIDEO_FRAME_COMP_HEIGHT (dest, 1);
+ dest_v_height = GST_VIDEO_FRAME_COMP_HEIGHT (dest, 2);
+
+ switch (videoflip->active_method) {
+ case GST_VIDEO_ORIENTATION_90R:
+ /* Flip Y */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
+ for (y = 0; y < dest_y_height; y++) {
+ for (x = 0; x < dest_y_width; x++) {
+ d[y * dest_y_stride + x] =
+ s[(src_y_height - 1 - x) * src_y_stride + y];
+ }
+ }
+ /* Flip U */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
+ for (y = 0; y < dest_u_height; y++) {
+ for (x = 0; x < dest_u_width; x++) {
+ d[y * dest_u_stride + x] =
+ s[(src_u_height - 1 - x) * src_u_stride + y];
+ }
+ }
+ /* Flip V */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
+ for (y = 0; y < dest_v_height; y++) {
+ for (x = 0; x < dest_v_width; x++) {
+ d[y * dest_v_stride + x] =
+ s[(src_v_height - 1 - x) * src_v_stride + y];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_90L:
+ /* Flip Y */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
+ for (y = 0; y < dest_y_height; y++) {
+ for (x = 0; x < dest_y_width; x++) {
+ d[y * dest_y_stride + x] =
+ s[x * src_y_stride + (src_y_width - 1 - y)];
+ }
+ }
+ /* Flip U */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
+ for (y = 0; y < dest_u_height; y++) {
+ for (x = 0; x < dest_u_width; x++) {
+ d[y * dest_u_stride + x] =
+ s[x * src_u_stride + (src_u_width - 1 - y)];
+ }
+ }
+ /* Flip V */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
+ for (y = 0; y < dest_v_height; y++) {
+ for (x = 0; x < dest_v_width; x++) {
+ d[y * dest_v_stride + x] =
+ s[x * src_v_stride + (src_v_width - 1 - y)];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_180:
+ /* Flip Y */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
+ for (y = 0; y < dest_y_height; y++) {
+ for (x = 0; x < dest_y_width; x++) {
+ d[y * dest_y_stride + x] =
+ s[(src_y_height - 1 - y) * src_y_stride + (src_y_width - 1 - x)];
+ }
+ }
+ /* Flip U */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
+ for (y = 0; y < dest_u_height; y++) {
+ for (x = 0; x < dest_u_width; x++) {
+ d[y * dest_u_stride + x] =
+ s[(src_u_height - 1 - y) * src_u_stride + (src_u_width - 1 - x)];
+ }
+ }
+ /* Flip V */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
+ for (y = 0; y < dest_v_height; y++) {
+ for (x = 0; x < dest_v_width; x++) {
+ d[y * dest_v_stride + x] =
+ s[(src_v_height - 1 - y) * src_v_stride + (src_v_width - 1 - x)];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_HORIZ:
+ /* Flip Y */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
+ for (y = 0; y < dest_y_height; y++) {
+ for (x = 0; x < dest_y_width; x++) {
+ d[y * dest_y_stride + x] =
+ s[y * src_y_stride + (src_y_width - 1 - x)];
+ }
+ }
+ /* Flip U */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
+ for (y = 0; y < dest_u_height; y++) {
+ for (x = 0; x < dest_u_width; x++) {
+ d[y * dest_u_stride + x] =
+ s[y * src_u_stride + (src_u_width - 1 - x)];
+ }
+ }
+ /* Flip V */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
+ for (y = 0; y < dest_v_height; y++) {
+ for (x = 0; x < dest_v_width; x++) {
+ d[y * dest_v_stride + x] =
+ s[y * src_v_stride + (src_v_width - 1 - x)];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_VERT:
+ /* Flip Y */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
+ for (y = 0; y < dest_y_height; y++) {
+ for (x = 0; x < dest_y_width; x++) {
+ d[y * dest_y_stride + x] =
+ s[(src_y_height - 1 - y) * src_y_stride + x];
+ }
+ }
+ /* Flip U */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
+ for (y = 0; y < dest_u_height; y++) {
+ for (x = 0; x < dest_u_width; x++) {
+ d[y * dest_u_stride + x] =
+ s[(src_u_height - 1 - y) * src_u_stride + x];
+ }
+ }
+ /* Flip V */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
+ for (y = 0; y < dest_v_height; y++) {
+ for (x = 0; x < dest_v_width; x++) {
+ d[y * dest_v_stride + x] =
+ s[(src_v_height - 1 - y) * src_v_stride + x];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_UL_LR:
+ /* Flip Y */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
+ for (y = 0; y < dest_y_height; y++) {
+ for (x = 0; x < dest_y_width; x++) {
+ d[y * dest_y_stride + x] = s[x * src_y_stride + y];
+ }
+ }
+ /* Flip U */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
+ for (y = 0; y < dest_u_height; y++) {
+ for (x = 0; x < dest_u_width; x++) {
+ d[y * dest_u_stride + x] = s[x * src_u_stride + y];
+ }
+ }
+ /* Flip V */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
+ for (y = 0; y < dest_u_height; y++) {
+ for (x = 0; x < dest_u_width; x++) {
+ d[y * dest_v_stride + x] = s[x * src_v_stride + y];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_UR_LL:
+ /* Flip Y */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
+ for (y = 0; y < dest_y_height; y++) {
+ for (x = 0; x < dest_y_width; x++) {
+ d[y * dest_y_stride + x] =
+ s[(src_y_height - 1 - x) * src_y_stride + (src_y_width - 1 - y)];
+ }
+ }
+ /* Flip U */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
+ for (y = 0; y < dest_u_height; y++) {
+ for (x = 0; x < dest_u_width; x++) {
+ d[y * dest_u_stride + x] =
+ s[(src_u_height - 1 - x) * src_u_stride + (src_u_width - 1 - y)];
+ }
+ }
+ /* Flip V */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 2);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 2);
+ for (y = 0; y < dest_v_height; y++) {
+ for (x = 0; x < dest_v_width; x++) {
+ d[y * dest_v_stride + x] =
+ s[(src_v_height - 1 - x) * src_v_stride + (src_v_width - 1 - y)];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_IDENTITY:
+ gst_video_frame_copy (dest, src);
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+}
+
+static void
+gst_video_flip_semi_planar_yuv (GstVideoFlip * videoflip, GstVideoFrame * dest,
+ const GstVideoFrame * src)
+{
+ gint x, y;
+ guint8 const *s;
+ guint8 *d;
+ gint s_off, d_off;
+ gint src_y_stride, src_uv_stride;
+ gint src_y_height, src_uv_height;
+ gint src_y_width, src_uv_width;
+ gint dest_y_stride, dest_uv_stride;
+ gint dest_y_height, dest_uv_height;
+ gint dest_y_width, dest_uv_width;
+
+
+ src_y_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src, 0);
+ src_uv_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src, 1);
+
+ dest_y_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest, 0);
+ dest_uv_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest, 1);
+
+ src_y_width = GST_VIDEO_FRAME_COMP_WIDTH (src, 0);
+ src_uv_width = GST_VIDEO_FRAME_COMP_WIDTH (src, 1);
+
+ dest_y_width = GST_VIDEO_FRAME_COMP_WIDTH (dest, 0);
+ dest_uv_width = GST_VIDEO_FRAME_COMP_WIDTH (dest, 1);
+
+ src_y_height = GST_VIDEO_FRAME_COMP_HEIGHT (src, 0);
+ src_uv_height = GST_VIDEO_FRAME_COMP_HEIGHT (src, 1);
+
+ dest_y_height = GST_VIDEO_FRAME_COMP_HEIGHT (dest, 0);
+ dest_uv_height = GST_VIDEO_FRAME_COMP_HEIGHT (dest, 1);
+
+ switch (videoflip->active_method) {
+ case GST_VIDEO_ORIENTATION_90R:
+ /* Flip Y */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
+ for (y = 0; y < dest_y_height; y++) {
+ for (x = 0; x < dest_y_width; x++) {
+ d[y * dest_y_stride + x] =
+ s[(src_y_height - 1 - x) * src_y_stride + y];
+ }
+ }
+ /* Flip UV */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
+ for (y = 0; y < dest_uv_height; y++) {
+ for (x = 0; x < dest_uv_width; x++) {
+ d_off = y * dest_uv_stride + x * 2;
+ s_off = (src_uv_height - 1 - x) * src_uv_stride + y * 2;
+ d[d_off] = s[s_off];
+ d[d_off + 1] = s[s_off + 1];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_90L:
+ /* Flip Y */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
+ for (y = 0; y < dest_y_height; y++) {
+ for (x = 0; x < dest_y_width; x++) {
+ d[y * dest_y_stride + x] =
+ s[x * src_y_stride + (src_y_width - 1 - y)];
+ }
+ }
+ /* Flip UV */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
+ for (y = 0; y < dest_uv_height; y++) {
+ for (x = 0; x < dest_uv_width; x++) {
+ d_off = y * dest_uv_stride + x * 2;
+ s_off = x * src_uv_stride + (src_uv_width - 1 - y) * 2;
+ d[d_off] = s[s_off];
+ d[d_off + 1] = s[s_off + 1];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_180:
+ /* Flip Y */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
+ for (y = 0; y < dest_y_height; y++) {
+ for (x = 0; x < dest_y_width; x++) {
+ d[y * dest_y_stride + x] =
+ s[(src_y_height - 1 - y) * src_y_stride + (src_y_width - 1 - x)];
+ }
+ }
+ /* Flip UV */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
+ for (y = 0; y < dest_uv_height; y++) {
+ for (x = 0; x < dest_uv_width; x++) {
+ d_off = y * dest_uv_stride + x * 2;
+ s_off = (src_uv_height - 1 - y) * src_uv_stride + (src_uv_width - 1 -
+ x) * 2;
+ d[d_off] = s[s_off];
+ d[d_off + 1] = s[s_off + 1];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_HORIZ:
+ /* Flip Y */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
+ for (y = 0; y < dest_y_height; y++) {
+ for (x = 0; x < dest_y_width; x++) {
+ d[y * dest_y_stride + x] =
+ s[y * src_y_stride + (src_y_width - 1 - x)];
+ }
+ }
+ /* Flip UV */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
+ for (y = 0; y < dest_uv_height; y++) {
+ for (x = 0; x < dest_uv_width; x++) {
+ d_off = y * dest_uv_stride + x * 2;
+ s_off = y * src_uv_stride + (src_uv_width - 1 - x) * 2;
+ d[d_off] = s[s_off];
+ d[d_off + 1] = s[s_off + 1];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_VERT:
+ /* Flip Y */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
+ for (y = 0; y < dest_y_height; y++) {
+ for (x = 0; x < dest_y_width; x++) {
+ d[y * dest_y_stride + x] =
+ s[(src_y_height - 1 - y) * src_y_stride + x];
+ }
+ }
+ /* Flip UV */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
+ for (y = 0; y < dest_uv_height; y++) {
+ for (x = 0; x < dest_uv_width; x++) {
+ d_off = y * dest_uv_stride + x * 2;
+ s_off = (src_uv_height - 1 - y) * src_uv_stride + x * 2;
+ d[d_off] = s[s_off];
+ d[d_off + 1] = s[s_off + 1];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_UL_LR:
+ /* Flip Y */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
+ for (y = 0; y < dest_y_height; y++) {
+ for (x = 0; x < dest_y_width; x++) {
+ d[y * dest_y_stride + x] = s[x * src_y_stride + y];
+ }
+ }
+ /* Flip UV */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
+ for (y = 0; y < dest_uv_height; y++) {
+ for (x = 0; x < dest_uv_width; x++) {
+ d_off = y * dest_uv_stride + x * 2;
+ s_off = x * src_uv_stride + y * 2;
+ d[d_off] = s[s_off];
+ d[d_off + 1] = s[s_off + 1];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_UR_LL:
+ /* Flip Y */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
+ for (y = 0; y < dest_y_height; y++) {
+ for (x = 0; x < dest_y_width; x++) {
+ d[y * dest_y_stride + x] =
+ s[(src_y_height - 1 - x) * src_y_stride + (src_y_width - 1 - y)];
+ }
+ }
+ /* Flip UV */
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 1);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 1);
+ for (y = 0; y < dest_uv_height; y++) {
+ for (x = 0; x < dest_uv_width; x++) {
+ d_off = y * dest_uv_stride + x * 2;
+ s_off = (src_uv_height - 1 - x) * src_uv_stride + (src_uv_width - 1 -
+ y) * 2;
+ d[d_off] = s[s_off];
+ d[d_off + 1] = s[s_off + 1];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_IDENTITY:
+ gst_video_frame_copy (dest, src);
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+}
+
+static void
+gst_video_flip_packed_simple (GstVideoFlip * videoflip, GstVideoFrame * dest,
+ const GstVideoFrame * src)
+{
+ gint x, y, z;
+ guint8 const *s;
+ guint8 *d;
+ gint sw = GST_VIDEO_FRAME_WIDTH (src);
+ gint sh = GST_VIDEO_FRAME_HEIGHT (src);
+ gint dw = GST_VIDEO_FRAME_WIDTH (dest);
+ gint dh = GST_VIDEO_FRAME_HEIGHT (dest);
+ gint src_stride, dest_stride;
+ gint bpp;
+
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
+
+ src_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src, 0);
+ dest_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest, 0);
+ /* This is only true for non-subsampled formats! */
+ bpp = GST_VIDEO_FRAME_COMP_PSTRIDE (src, 0);
+
+ switch (videoflip->active_method) {
+ case GST_VIDEO_ORIENTATION_90R:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x++) {
+ for (z = 0; z < bpp; z++) {
+ d[y * dest_stride + x * bpp + z] =
+ s[(sh - 1 - x) * src_stride + y * bpp + z];
+ }
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_90L:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x++) {
+ for (z = 0; z < bpp; z++) {
+ d[y * dest_stride + x * bpp + z] =
+ s[x * src_stride + (sw - 1 - y) * bpp + z];
+ }
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_180:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x++) {
+ for (z = 0; z < bpp; z++) {
+ d[y * dest_stride + x * bpp + z] =
+ s[(sh - 1 - y) * src_stride + (sw - 1 - x) * bpp + z];
+ }
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_HORIZ:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x++) {
+ for (z = 0; z < bpp; z++) {
+ d[y * dest_stride + x * bpp + z] =
+ s[y * src_stride + (sw - 1 - x) * bpp + z];
+ }
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_VERT:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x++) {
+ for (z = 0; z < bpp; z++) {
+ d[y * dest_stride + x * bpp + z] =
+ s[(sh - 1 - y) * src_stride + x * bpp + z];
+ }
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_UL_LR:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x++) {
+ for (z = 0; z < bpp; z++) {
+ d[y * dest_stride + x * bpp + z] = s[x * src_stride + y * bpp + z];
+ }
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_UR_LL:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x++) {
+ for (z = 0; z < bpp; z++) {
+ d[y * dest_stride + x * bpp + z] =
+ s[(sh - 1 - x) * src_stride + (sw - 1 - y) * bpp + z];
+ }
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_IDENTITY:
+ gst_video_frame_copy (dest, src);
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+}
+
+
+static void
+gst_video_flip_y422 (GstVideoFlip * videoflip, GstVideoFrame * dest,
+ const GstVideoFrame * src)
+{
+ gint x, y;
+ guint8 const *s;
+ guint8 *d;
+ gint sw = GST_VIDEO_FRAME_WIDTH (src);
+ gint sh = GST_VIDEO_FRAME_HEIGHT (src);
+ gint dw = GST_VIDEO_FRAME_WIDTH (dest);
+ gint dh = GST_VIDEO_FRAME_HEIGHT (dest);
+ gint src_stride, dest_stride;
+ gint bpp;
+ gint y_offset;
+ gint u_offset;
+ gint v_offset;
+ gint y_stride;
+
+ s = GST_VIDEO_FRAME_PLANE_DATA (src, 0);
+ d = GST_VIDEO_FRAME_PLANE_DATA (dest, 0);
+
+ src_stride = GST_VIDEO_FRAME_PLANE_STRIDE (src, 0);
+ dest_stride = GST_VIDEO_FRAME_PLANE_STRIDE (dest, 0);
+
+ y_offset = GST_VIDEO_FRAME_COMP_OFFSET (src, 0);
+ u_offset = GST_VIDEO_FRAME_COMP_OFFSET (src, 1);
+ v_offset = GST_VIDEO_FRAME_COMP_OFFSET (src, 2);
+ y_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (src, 0);
+ bpp = y_stride;
+
+ switch (videoflip->active_method) {
+ case GST_VIDEO_ORIENTATION_90R:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x += 2) {
+ guint8 u;
+ guint8 v;
+ /* u/v must be calculated using the offset of the even column */
+ gint even_y = (y & ~1);
+
+ u = s[(sh - 1 - x) * src_stride + even_y * bpp + u_offset];
+ if (x + 1 < dw)
+ u = (s[(sh - 1 - (x + 1)) * src_stride + even_y * bpp + u_offset]
+ + u) >> 1;
+ v = s[(sh - 1 - x) * src_stride + even_y * bpp + v_offset];
+ if (x + 1 < dw)
+ v = (s[(sh - 1 - (x + 1)) * src_stride + even_y * bpp + v_offset]
+ + v) >> 1;
+
+ d[y * dest_stride + x * bpp + u_offset] = u;
+ d[y * dest_stride + x * bpp + v_offset] = v;
+ d[y * dest_stride + x * bpp + y_offset] =
+ s[(sh - 1 - x) * src_stride + y * bpp + y_offset];
+ if (x + 1 < dw)
+ d[y * dest_stride + (x + 1) * bpp + y_offset] =
+ s[(sh - 1 - (x + 1)) * src_stride + y * bpp + y_offset];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_90L:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x += 2) {
+ guint8 u;
+ guint8 v;
+ /* u/v must be calculated using the offset of the even column */
+ gint even_y = ((sw - 1 - y) & ~1);
+
+ u = s[x * src_stride + even_y * bpp + u_offset];
+ if (x + 1 < dw)
+ u = (s[(x + 1) * src_stride + even_y * bpp + u_offset] + u) >> 1;
+ v = s[x * src_stride + even_y * bpp + v_offset];
+ if (x + 1 < dw)
+ v = (s[(x + 1) * src_stride + even_y * bpp + v_offset] + v) >> 1;
+
+ d[y * dest_stride + x * bpp + u_offset] = u;
+ d[y * dest_stride + x * bpp + v_offset] = v;
+ d[y * dest_stride + x * bpp + y_offset] =
+ s[x * src_stride + (sw - 1 - y) * bpp + y_offset];
+ if (x + 1 < dw)
+ d[y * dest_stride + (x + 1) * bpp + y_offset] =
+ s[(x + 1) * src_stride + (sw - 1 - y) * bpp + y_offset];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_180:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x += 2) {
+ guint8 u;
+ guint8 v;
+ /* u/v must be calculated using the offset of the even column */
+ gint even_x = ((sw - 1 - x) & ~1);
+
+ u = (s[(sh - 1 - y) * src_stride + even_x * bpp + u_offset] +
+ s[(sh - 1 - y) * src_stride + even_x * bpp + u_offset]) / 2;
+ v = (s[(sh - 1 - y) * src_stride + even_x * bpp + v_offset] +
+ s[(sh - 1 - y) * src_stride + even_x * bpp + v_offset]) / 2;
+
+ d[y * dest_stride + x * bpp + u_offset] = u;
+ d[y * dest_stride + x * bpp + v_offset] = v;
+ d[y * dest_stride + x * bpp + y_offset] =
+ s[(sh - 1 - y) * src_stride + (sw - 1 - x) * bpp + y_offset];
+ if (x + 1 < dw)
+ d[y * dest_stride + (x + 1) * bpp + y_offset] =
+ s[(sh - 1 - y) * src_stride + (sw - 1 - (x + 1)) * bpp +
+ y_offset];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_HORIZ:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x += 2) {
+ guint8 u;
+ guint8 v;
+ /* u/v must be calculated using the offset of the even column */
+ gint even_x = ((sw - 1 - x) & ~1);
+
+ u = (s[y * src_stride + even_x * bpp + u_offset] +
+ s[y * src_stride + even_x * bpp + u_offset]) / 2;
+ v = (s[y * src_stride + even_x * bpp + v_offset] +
+ s[y * src_stride + even_x * bpp + v_offset]) / 2;
+
+ d[y * dest_stride + x * bpp + u_offset] = u;
+ d[y * dest_stride + x * bpp + v_offset] = v;
+ d[y * dest_stride + x * bpp + y_offset] =
+ s[y * src_stride + (sw - 1 - x) * bpp + y_offset];
+ if (x + 1 < dw)
+ d[y * dest_stride + (x + 1) * bpp + y_offset] =
+ s[y * src_stride + (sw - 1 - (x + 1)) * bpp + y_offset];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_VERT:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x += 2) {
+ guint8 u;
+ guint8 v;
+ /* u/v must be calculated using the offset of the even column */
+ gint even_x = (x & ~1);
+
+ u = (s[(sh - 1 - y) * src_stride + even_x * bpp + u_offset] +
+ s[(sh - 1 - y) * src_stride + even_x * bpp + u_offset]) / 2;
+ v = (s[(sh - 1 - y) * src_stride + even_x * bpp + v_offset] +
+ s[(sh - 1 - y) * src_stride + even_x * bpp + v_offset]) / 2;
+
+ d[y * dest_stride + x * bpp + u_offset] = u;
+ d[y * dest_stride + x * bpp + v_offset] = v;
+ d[y * dest_stride + x * bpp + y_offset] =
+ s[(sh - 1 - y) * src_stride + x * bpp + y_offset];
+ if (x + 1 < dw)
+ d[y * dest_stride + (x + 1) * bpp + y_offset] =
+ s[(sh - 1 - y) * src_stride + (x + 1) * bpp + y_offset];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_UL_LR:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x += 2) {
+ guint8 u;
+ guint8 v;
+ /* u/v must be calculated using the offset of the even column */
+ gint even_y = (y & ~1);
+
+ u = s[x * src_stride + even_y * bpp + u_offset];
+ if (x + 1 < dw)
+ u = (s[(x + 1) * src_stride + even_y * bpp + u_offset] + u) >> 1;
+ v = s[x * src_stride + even_y * bpp + v_offset];
+ if (x + 1 < dw)
+ v = (s[(x + 1) * src_stride + even_y * bpp + v_offset] + v) >> 1;
+
+ d[y * dest_stride + x * bpp + u_offset] = u;
+ d[y * dest_stride + x * bpp + v_offset] = v;
+ d[y * dest_stride + x * bpp + y_offset] =
+ s[x * src_stride + y * bpp + y_offset];
+ if (x + 1 < dw)
+ d[y * dest_stride + (x + 1) * bpp + y_offset] =
+ s[(x + 1) * src_stride + y * bpp + y_offset];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_UR_LL:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x += 2) {
+ guint8 u;
+ guint8 v;
+ /* u/v must be calculated using the offset of the even column */
+ gint even_y = ((sw - 1 - y) & ~1);
+
+ u = s[(sh - 1 - x) * src_stride + even_y * bpp + u_offset];
+ if (x + 1 < dw)
+ u = (s[(sh - 1 - (x + 1)) * src_stride + even_y * bpp + u_offset]
+ + u) >> 1;
+ v = s[(sh - 1 - x) * src_stride + even_y * bpp + v_offset];
+ if (x + 1 < dw)
+ v = (s[(sh - 1 - (x + 1)) * src_stride + even_y * bpp + v_offset]
+ + v) >> 1;
+
+ d[y * dest_stride + x * bpp + u_offset] = u;
+ d[y * dest_stride + x * bpp + v_offset] = v;
+ d[y * dest_stride + x * bpp + y_offset] =
+ s[(sh - 1 - x) * src_stride + (sw - 1 - y) * bpp + y_offset];
+ if (x + 1 < dw)
+ d[y * dest_stride + (x + 1) * bpp + y_offset] =
+ s[(sh - 1 - (x + 1)) * src_stride + (sw - 1 - y) * bpp +
+ y_offset];
+ }
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_IDENTITY:
+ gst_video_frame_copy (dest, src);
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+}
+
+static void
+gst_video_flip_configure_process (GstVideoFlip * vf)
+{
+ switch (vf->v_format) {
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
+ case GST_VIDEO_FORMAT_Y444:
+ vf->process = gst_video_flip_planar_yuv;
+ break;
+ case GST_VIDEO_FORMAT_YUY2:
+ case GST_VIDEO_FORMAT_UYVY:
+ case GST_VIDEO_FORMAT_YVYU:
+ vf->process = gst_video_flip_y422;
+ break;
+ case GST_VIDEO_FORMAT_AYUV:
+ case GST_VIDEO_FORMAT_ARGB:
+ case GST_VIDEO_FORMAT_ABGR:
+ case GST_VIDEO_FORMAT_RGBA:
+ case GST_VIDEO_FORMAT_BGRA:
+ case GST_VIDEO_FORMAT_xRGB:
+ case GST_VIDEO_FORMAT_xBGR:
+ case GST_VIDEO_FORMAT_RGBx:
+ case GST_VIDEO_FORMAT_BGRx:
+ case GST_VIDEO_FORMAT_RGB:
+ case GST_VIDEO_FORMAT_BGR:
+ case GST_VIDEO_FORMAT_GRAY8:
+ case GST_VIDEO_FORMAT_GRAY16_BE:
+ case GST_VIDEO_FORMAT_GRAY16_LE:
+ vf->process = gst_video_flip_packed_simple;
+ break;
+ case GST_VIDEO_FORMAT_NV12:
+ case GST_VIDEO_FORMAT_NV21:
+ vf->process = gst_video_flip_semi_planar_yuv;
+ break;
+ default:
+ break;
+ }
+}
+
+static gboolean
+gst_video_flip_set_info (GstVideoFilter * vfilter, GstCaps * incaps,
+ GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
+{
+ GstVideoFlip *vf = GST_VIDEO_FLIP (vfilter);
+ gboolean ret = FALSE, need_reconfigure = FALSE;
+
+ vf->process = NULL;
+
+ if (GST_VIDEO_INFO_FORMAT (in_info) != GST_VIDEO_INFO_FORMAT (out_info))
+ goto invalid_caps;
+
+ /* Check that they are correct */
+ GST_OBJECT_LOCK (vf);
+ switch (vf->configuring_method) {
+ case GST_VIDEO_ORIENTATION_90R:
+ case GST_VIDEO_ORIENTATION_90L:
+ case GST_VIDEO_ORIENTATION_UL_LR:
+ case GST_VIDEO_ORIENTATION_UR_LL:
+ if ((in_info->width != out_info->height) ||
+ (in_info->height != out_info->width)) {
+ GST_ERROR_OBJECT (vf, "we are inverting width and height but caps "
+ "are not correct : %dx%d to %dx%d", in_info->width,
+ in_info->height, out_info->width, out_info->height);
+ goto beach;
+ }
+ break;
+ case GST_VIDEO_ORIENTATION_IDENTITY:
+ case GST_VIDEO_ORIENTATION_180:
+ case GST_VIDEO_ORIENTATION_HORIZ:
+ case GST_VIDEO_ORIENTATION_VERT:
+ if ((in_info->width != out_info->width) ||
+ (in_info->height != out_info->height)) {
+ GST_ERROR_OBJECT (vf, "we are keeping width and height but caps "
+ "are not correct : %dx%d to %dx%d", in_info->width,
+ in_info->height, out_info->width, out_info->height);
+ goto beach;
+ }
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+
+ ret = TRUE;
+
+ {
+ GEnumValue *active_method_enum, *method_enum;
+ GEnumClass *enum_class =
+ g_type_class_ref (GST_TYPE_VIDEO_ORIENTATION_METHOD);
+
+ active_method_enum = g_enum_get_value (enum_class, vf->active_method);
+ method_enum = g_enum_get_value (enum_class, vf->configuring_method);
+ GST_LOG_OBJECT (vf, "Changing active method from %s to configuring %s",
+ active_method_enum ? active_method_enum->value_nick : "(nil)",
+ method_enum ? method_enum->value_nick : "(nil)");
+ g_type_class_unref (enum_class);
+ }
+ vf->active_method = vf->configuring_method;
+ vf->change_configuring_method = TRUE;
+ if (vf->active_method != vf->proposed_method)
+ need_reconfigure = TRUE;
+
+ vf->v_format = GST_VIDEO_INFO_FORMAT (in_info);
+ gst_video_flip_configure_process (vf);
+
+beach:
+ GST_OBJECT_UNLOCK (vf);
+ if (need_reconfigure) {
+ gst_base_transform_reconfigure_src (GST_BASE_TRANSFORM (vf));
+ }
+
+ return ret && (vf->process != NULL);
+
+invalid_caps:
+ GST_ERROR_OBJECT (vf, "Invalid caps: %" GST_PTR_FORMAT " -> %" GST_PTR_FORMAT,
+ incaps, outcaps);
+ return FALSE;
+}
+
+static void
+gst_video_flip_set_method (GstVideoFlip * videoflip,
+ GstVideoOrientationMethod method, gboolean from_tag)
+{
+ GST_OBJECT_LOCK (videoflip);
+
+ if (method == GST_VIDEO_ORIENTATION_CUSTOM) {
+ GST_WARNING_OBJECT (videoflip, "unsupported custom orientation");
+ GST_OBJECT_UNLOCK (videoflip);
+ return;
+ }
+
+ /* Store updated method */
+ if (from_tag)
+ videoflip->tag_method = method;
+ else
+ videoflip->method = method;
+
+ /* Get the new method */
+ if (videoflip->method == GST_VIDEO_ORIENTATION_AUTO)
+ method = videoflip->tag_method;
+ else
+ method = videoflip->method;
+
+ if (method != videoflip->proposed_method) {
+ GEnumValue *active_method_enum, *method_enum;
+ GstBaseTransform *btrans = GST_BASE_TRANSFORM (videoflip);
+ GEnumClass *enum_class =
+ g_type_class_ref (GST_TYPE_VIDEO_ORIENTATION_METHOD);
+
+ active_method_enum =
+ g_enum_get_value (enum_class, videoflip->active_method);
+ method_enum = g_enum_get_value (enum_class, method);
+ GST_LOG_OBJECT (videoflip, "Changing method from %s to %s",
+ active_method_enum ? active_method_enum->value_nick : "(nil)",
+ method_enum ? method_enum->value_nick : "(nil)");
+ g_type_class_unref (enum_class);
+
+ videoflip->proposed_method = method;
+
+ GST_OBJECT_UNLOCK (videoflip);
+
+ gst_base_transform_set_passthrough (btrans,
+ method == GST_VIDEO_ORIENTATION_IDENTITY);
+ gst_base_transform_reconfigure_src (btrans);
+ } else {
+ GST_OBJECT_UNLOCK (videoflip);
+ }
+}
+
+static void
+gst_video_flip_before_transform (GstBaseTransform * trans, GstBuffer * in)
+{
+ GstVideoFlip *videoflip = GST_VIDEO_FLIP (trans);
+ GstClockTime timestamp, stream_time;
+
+ timestamp = GST_BUFFER_TIMESTAMP (in);
+ stream_time =
+ gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp);
+
+ GST_DEBUG_OBJECT (videoflip, "sync to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (timestamp));
+
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (videoflip), stream_time);
+}
+
+static GstFlowReturn
+gst_video_flip_transform_frame (GstVideoFilter * vfilter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame)
+{
+ GEnumClass *enum_class;
+ GstVideoOrientationMethod active, proposed;
+ GEnumValue *active_method_enum;
+ GstVideoFlip *videoflip = GST_VIDEO_FLIP (vfilter);
+
+ GST_OBJECT_LOCK (videoflip);
+ if (G_UNLIKELY (videoflip->process == NULL))
+ goto not_negotiated;
+
+ if (videoflip->configuring_method != videoflip->active_method) {
+ videoflip->active_method = videoflip->configuring_method;
+ gst_video_flip_configure_process (videoflip);
+ }
+
+ enum_class = g_type_class_ref (GST_TYPE_VIDEO_ORIENTATION_METHOD);
+ active_method_enum = g_enum_get_value (enum_class, videoflip->active_method);
+ GST_LOG_OBJECT (videoflip,
+ "videoflip: flipping (%s), input %ux%u output %ux%u",
+ active_method_enum ? active_method_enum->value_nick : "(nil)",
+ GST_VIDEO_FRAME_WIDTH (in_frame), GST_VIDEO_FRAME_HEIGHT (in_frame),
+ GST_VIDEO_FRAME_WIDTH (out_frame), GST_VIDEO_FRAME_HEIGHT (out_frame));
+ g_type_class_unref (enum_class);
+
+ videoflip->process (videoflip, out_frame, in_frame);
+
+ proposed = videoflip->proposed_method;
+ active = videoflip->active_method;
+ videoflip->change_configuring_method = TRUE;
+ GST_OBJECT_UNLOCK (videoflip);
+
+ if (proposed != active) {
+ gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (videoflip),
+ proposed == GST_VIDEO_ORIENTATION_IDENTITY);
+ gst_base_transform_reconfigure_src (GST_BASE_TRANSFORM (videoflip));
+ }
+
+ return GST_FLOW_OK;
+
+not_negotiated:
+ {
+ GST_OBJECT_UNLOCK (videoflip);
+ GST_ERROR_OBJECT (videoflip, "Not negotiated yet");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+}
+
+static gboolean
+gst_video_flip_src_event (GstBaseTransform * trans, GstEvent * event)
+{
+ GstVideoFlip *vf = GST_VIDEO_FLIP (trans);
+ gdouble new_x, new_y, x, y;
+ GstStructure *structure;
+ gboolean ret;
+ GstVideoInfo *out_info = &GST_VIDEO_FILTER (trans)->out_info;
+
+ GST_DEBUG_OBJECT (vf, "handling %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_NAVIGATION:
+ event =
+ GST_EVENT (gst_mini_object_make_writable (GST_MINI_OBJECT (event)));
+
+ structure = (GstStructure *) gst_event_get_structure (event);
+ if (gst_structure_get_double (structure, "pointer_x", &x) &&
+ gst_structure_get_double (structure, "pointer_y", &y)) {
+ GST_DEBUG_OBJECT (vf, "converting %fx%f", x, y);
+ GST_OBJECT_LOCK (vf);
+ switch (vf->active_method) {
+ case GST_VIDEO_ORIENTATION_90R:
+ new_x = y;
+ new_y = out_info->width - x;
+ break;
+ case GST_VIDEO_ORIENTATION_90L:
+ new_x = out_info->height - y;
+ new_y = x;
+ break;
+ case GST_VIDEO_ORIENTATION_UR_LL:
+ new_x = out_info->height - y;
+ new_y = out_info->width - x;
+ break;
+ case GST_VIDEO_ORIENTATION_UL_LR:
+ new_x = y;
+ new_y = x;
+ break;
+ case GST_VIDEO_ORIENTATION_180:
+ new_x = out_info->width - x;
+ new_y = out_info->height - y;
+ break;
+ case GST_VIDEO_ORIENTATION_HORIZ:
+ new_x = out_info->width - x;
+ new_y = y;
+ break;
+ case GST_VIDEO_ORIENTATION_VERT:
+ new_x = x;
+ new_y = out_info->height - y;
+ break;
+ default:
+ new_x = x;
+ new_y = y;
+ break;
+ }
+ GST_OBJECT_UNLOCK (vf);
+ GST_DEBUG_OBJECT (vf, "to %fx%f", new_x, new_y);
+ gst_structure_set (structure, "pointer_x", G_TYPE_DOUBLE, new_x,
+ "pointer_y", G_TYPE_DOUBLE, new_y, NULL);
+ }
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_BASE_TRANSFORM_CLASS (parent_class)->src_event (trans, event);
+
+ return ret;
+}
+
+static gboolean
+gst_video_flip_sink_event (GstBaseTransform * trans, GstEvent * event)
+{
+ GstVideoFlip *vf = GST_VIDEO_FLIP (trans);
+ GstTagList *taglist;
+ gchar *orientation;
+ gboolean ret;
+
+ GST_DEBUG_OBJECT (vf, "handling %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_TAG:
+ gst_event_parse_tag (event, &taglist);
+
+ if (gst_tag_list_get_string (taglist, "image-orientation", &orientation)) {
+ if (!g_strcmp0 ("rotate-0", orientation))
+ gst_video_flip_set_method (vf, GST_VIDEO_ORIENTATION_IDENTITY, TRUE);
+ else if (!g_strcmp0 ("rotate-90", orientation))
+ gst_video_flip_set_method (vf, GST_VIDEO_ORIENTATION_90R, TRUE);
+ else if (!g_strcmp0 ("rotate-180", orientation))
+ gst_video_flip_set_method (vf, GST_VIDEO_ORIENTATION_180, TRUE);
+ else if (!g_strcmp0 ("rotate-270", orientation))
+ gst_video_flip_set_method (vf, GST_VIDEO_ORIENTATION_90L, TRUE);
+ else if (!g_strcmp0 ("flip-rotate-0", orientation))
+ gst_video_flip_set_method (vf, GST_VIDEO_ORIENTATION_HORIZ, TRUE);
+ else if (!g_strcmp0 ("flip-rotate-90", orientation))
+ gst_video_flip_set_method (vf, GST_VIDEO_ORIENTATION_UL_LR, TRUE);
+ else if (!g_strcmp0 ("flip-rotate-180", orientation))
+ gst_video_flip_set_method (vf, GST_VIDEO_ORIENTATION_VERT, TRUE);
+ else if (!g_strcmp0 ("flip-rotate-270", orientation))
+ gst_video_flip_set_method (vf, GST_VIDEO_ORIENTATION_UR_LL, TRUE);
+
+ g_free (orientation);
+ }
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_BASE_TRANSFORM_CLASS (parent_class)->sink_event (trans, event);
+
+ return ret;
+}
+
+static void
+gst_video_flip_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstVideoFlip *videoflip = GST_VIDEO_FLIP (object);
+
+ switch (prop_id) {
+ case PROP_METHOD:
+ case PROP_VIDEO_DIRECTION:
+ gst_video_flip_set_method (videoflip, g_value_get_enum (value), FALSE);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_video_flip_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstVideoFlip *videoflip = GST_VIDEO_FLIP (object);
+
+ switch (prop_id) {
+ case PROP_METHOD:
+ case PROP_VIDEO_DIRECTION:
+ g_value_set_enum (value, videoflip->method);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_video_flip_class_init (GstVideoFlipClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+ GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
+ GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
+ GParamSpec *pspec;
+
+ GST_DEBUG_CATEGORY_INIT (video_flip_debug, "videoflip", 0, "videoflip");
+
+ gobject_class->set_property = gst_video_flip_set_property;
+ gobject_class->get_property = gst_video_flip_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_METHOD,
+ g_param_spec_enum ("method", "method",
+ "method (deprecated, use video-direction instead)",
+ GST_TYPE_VIDEO_FLIP_METHOD, PROP_METHOD_DEFAULT,
+ GST_PARAM_CONTROLLABLE | GST_PARAM_MUTABLE_PLAYING |
+ G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
+ g_object_class_override_property (gobject_class, PROP_VIDEO_DIRECTION,
+ "video-direction");
+ /* override the overriden property's flags to include the mutable in playing
+ * flag */
+ pspec = g_object_class_find_property (gobject_class, "video-direction");
+ pspec->flags |= GST_PARAM_MUTABLE_PLAYING;
+
+ gst_element_class_set_static_metadata (gstelement_class, "Video flipper",
+ "Filter/Effect/Video",
+ "Flips and rotates video", "David Schleef <ds@schleef.org>");
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_video_flip_sink_template);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &gst_video_flip_src_template);
+
+ trans_class->transform_caps =
+ GST_DEBUG_FUNCPTR (gst_video_flip_transform_caps);
+ trans_class->before_transform =
+ GST_DEBUG_FUNCPTR (gst_video_flip_before_transform);
+ trans_class->src_event = GST_DEBUG_FUNCPTR (gst_video_flip_src_event);
+ trans_class->sink_event = GST_DEBUG_FUNCPTR (gst_video_flip_sink_event);
+
+ vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_video_flip_set_info);
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_video_flip_transform_frame);
+
+ gst_type_mark_as_plugin_api (GST_TYPE_VIDEO_FLIP_METHOD, 0);
+}
+
+static void
+gst_video_flip_init (GstVideoFlip * videoflip)
+{
+ /* AUTO is not valid for active method, this is just to ensure we setup the
+ * method in gst_video_flip_set_method() */
+ videoflip->active_method = GST_VIDEO_ORIENTATION_AUTO;
+ videoflip->proposed_method = GST_VIDEO_ORIENTATION_IDENTITY;
+ videoflip->configuring_method = GST_VIDEO_ORIENTATION_IDENTITY;
+}
diff --git a/gst/videofilter/gstvideoflip.h b/gst/videofilter/gstvideoflip.h
new file mode 100644
index 0000000000..3320153181
--- /dev/null
+++ b/gst/videofilter/gstvideoflip.h
@@ -0,0 +1,99 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_FLIP_H__
+#define __GST_VIDEO_FLIP_H__
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include <gst/video/gstvideofilter.h>
+
+G_BEGIN_DECLS
+
+/**
+ * GstVideoFlipMethod:
+ * @GST_VIDEO_FLIP_METHOD_IDENTITY: Identity (no rotation)
+ * @GST_VIDEO_FLIP_METHOD_90R: Rotate clockwise 90 degrees
+ * @GST_VIDEO_FLIP_METHOD_180: Rotate 180 degrees
+ * @GST_VIDEO_FLIP_METHOD_90L: Rotate counter-clockwise 90 degrees
+ * @GST_VIDEO_FLIP_METHOD_HORIZ: Flip horizontally
+ * @GST_VIDEO_FLIP_METHOD_VERT: Flip vertically
+ * @GST_VIDEO_FLIP_METHOD_TRANS: Flip across upper left/lower right diagonal
+ * @GST_VIDEO_FLIP_METHOD_OTHER: Flip across upper right/lower left diagonal
+ * @GST_VIDEO_FLIP_METHOD_AUTO: Select flip method based on image-orientation tag
+ *
+ * The different flip methods.
+ */
+typedef enum {
+ GST_VIDEO_FLIP_METHOD_IDENTITY,
+ GST_VIDEO_FLIP_METHOD_90R,
+ GST_VIDEO_FLIP_METHOD_180,
+ GST_VIDEO_FLIP_METHOD_90L,
+ GST_VIDEO_FLIP_METHOD_HORIZ,
+ GST_VIDEO_FLIP_METHOD_VERT,
+ GST_VIDEO_FLIP_METHOD_TRANS,
+ GST_VIDEO_FLIP_METHOD_OTHER,
+ GST_VIDEO_FLIP_METHOD_AUTO
+} GstVideoFlipMethod;
+
+#define GST_TYPE_VIDEO_FLIP \
+ (gst_video_flip_get_type())
+#define GST_VIDEO_FLIP(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_FLIP,GstVideoFlip))
+#define GST_VIDEO_FLIP_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_FLIP,GstVideoFlipClass))
+#define GST_IS_VIDEO_FLIP(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_FLIP))
+#define GST_IS_VIDEO_FLIP_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_FLIP))
+
+typedef struct _GstVideoFlip GstVideoFlip;
+typedef struct _GstVideoFlipClass GstVideoFlipClass;
+
+/**
+ * GstVideoFlip:
+ *
+ * Opaque datastructure.
+ */
+struct _GstVideoFlip {
+ GstVideoFilter videofilter;
+
+ /* < private > */
+ GstVideoFormat v_format;
+
+ GstVideoOrientationMethod method;
+ GstVideoOrientationMethod tag_method;
+ GstVideoOrientationMethod proposed_method;
+ gboolean change_configuring_method;
+ GstVideoOrientationMethod configuring_method;
+ GstVideoOrientationMethod active_method;
+ void (*process) (GstVideoFlip *videoflip, GstVideoFrame *dest, const GstVideoFrame *src);
+};
+
+struct _GstVideoFlipClass {
+ GstVideoFilterClass parent_class;
+};
+
+GType gst_video_flip_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (videoflip);
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_FLIP_H__ */
diff --git a/gst/videofilter/gstvideomedian.c b/gst/videofilter/gstvideomedian.c
new file mode 100644
index 0000000000..9b78b03504
--- /dev/null
+++ b/gst/videofilter/gstvideomedian.c
@@ -0,0 +1,327 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include <string.h>
+#include "gstvideomedian.h"
+
+static GstStaticPadTemplate video_median_src_factory =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ I420, YV12 }"))
+ );
+
+static GstStaticPadTemplate video_median_sink_factory =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ I420, YV12 }"))
+ );
+
+
+/* Median signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+#define DEFAULT_FILTERSIZE 5
+#define DEFAULT_LUM_ONLY TRUE
+enum
+{
+ PROP_0,
+ PROP_FILTERSIZE,
+ PROP_LUM_ONLY
+};
+
+#define GST_TYPE_VIDEO_MEDIAN_SIZE (gst_video_median_size_get_type())
+
+static const GEnumValue video_median_sizes[] = {
+ {GST_VIDEO_MEDIAN_SIZE_5, "Median of 5 neighbour pixels", "5"},
+ {GST_VIDEO_MEDIAN_SIZE_9, "Median of 9 neighbour pixels", "9"},
+ {0, NULL, NULL},
+};
+
+static GType
+gst_video_median_size_get_type (void)
+{
+ static GType video_median_size_type = 0;
+
+ if (!video_median_size_type) {
+ video_median_size_type = g_enum_register_static ("GstVideoMedianSize",
+ video_median_sizes);
+ }
+ return video_median_size_type;
+}
+
+#define gst_video_median_parent_class parent_class
+G_DEFINE_TYPE (GstVideoMedian, gst_video_median, GST_TYPE_VIDEO_FILTER);
+GST_ELEMENT_REGISTER_DEFINE (videomedian, "videomedian",
+ GST_RANK_NONE, GST_TYPE_VIDEO_MEDIAN);
+
+static GstFlowReturn gst_video_median_transform_frame (GstVideoFilter * filter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame);
+
+static void gst_video_median_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_video_median_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static void
+gst_video_median_class_init (GstVideoMedianClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstVideoFilterClass *vfilter_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+ vfilter_class = (GstVideoFilterClass *) klass;
+
+ gobject_class->set_property = gst_video_median_set_property;
+ gobject_class->get_property = gst_video_median_get_property;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_FILTERSIZE,
+ g_param_spec_enum ("filtersize", "Filtersize", "The size of the filter",
+ GST_TYPE_VIDEO_MEDIAN_SIZE, DEFAULT_FILTERSIZE,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_LUM_ONLY,
+ g_param_spec_boolean ("lum-only", "Lum Only", "Only apply filter on "
+ "luminance", DEFAULT_LUM_ONLY,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &video_median_sink_factory);
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &video_median_src_factory);
+ gst_element_class_set_static_metadata (gstelement_class, "Median effect",
+ "Filter/Effect/Video", "Apply a median filter to an image",
+ "Wim Taymans <wim.taymans@gmail.com>");
+
+ vfilter_class->transform_frame =
+ GST_DEBUG_FUNCPTR (gst_video_median_transform_frame);
+
+ gst_type_mark_as_plugin_api (GST_TYPE_VIDEO_MEDIAN_SIZE, 0);
+}
+
+void
+gst_video_median_init (GstVideoMedian * median)
+{
+ median->filtersize = DEFAULT_FILTERSIZE;
+ median->lum_only = DEFAULT_LUM_ONLY;
+}
+
+#define PIX_SORT(a,b) { if ((a)>(b)) PIX_SWAP((a),(b)); }
+#define PIX_SWAP(a,b) { unsigned char temp=(a);(a)=(b);(b)=temp; }
+
+static void
+median_5 (guint8 * dest, gint dstride, const guint8 * src, gint sstride,
+ gint width, gint height)
+{
+ unsigned char p[5];
+ int i, j, k;
+
+ /* copy the top and bottom rows into the result array */
+ for (i = 0; i < width; i++) {
+ dest[i] = src[i];
+ dest[(height - 1) * dstride + i] = src[(height - 1) * sstride + i];
+ }
+
+ /* process the interior pixels */
+ for (k = 2; k < height; k++) {
+ dest += dstride;
+ src += sstride;
+
+ dest[0] = src[0];
+ for (j = 2, i = 1; j < width; j++, i++) {
+ p[0] = src[i - sstride];
+ p[1] = src[i - 1];
+ p[2] = src[i];
+ p[3] = src[i + 1];
+ p[4] = src[i + sstride];
+ PIX_SORT (p[0], p[1]);
+ PIX_SORT (p[3], p[4]);
+ PIX_SORT (p[0], p[3]);
+ PIX_SORT (p[1], p[4]);
+ PIX_SORT (p[1], p[2]);
+ PIX_SORT (p[2], p[3]);
+ PIX_SORT (p[1], p[2]);
+ dest[i] = p[2];
+ }
+ dest[i] = src[i];
+ }
+}
+
+static void
+median_9 (guint8 * dest, gint dstride, const guint8 * src, gint sstride,
+ gint width, gint height)
+{
+ unsigned char p[9];
+ int i, j, k;
+
+ /*copy the top and bottom rows into the result array */
+ for (i = 0; i < width; i++) {
+ dest[i] = src[i];
+ dest[(height - 1) * dstride + i] = src[(height - 1) * sstride + i];
+ }
+ /* process the interior pixels */
+ for (k = 2; k < height; k++) {
+ dest += dstride;
+ src += sstride;
+
+ dest[0] = src[0];
+ for (j = 2, i = 1; j < width; j++, i++) {
+ p[0] = src[i - sstride - 1];
+ p[1] = src[i - sstride];
+ p[2] = src[i - sstride + 1];
+ p[3] = src[i - 1];
+ p[4] = src[i];
+ p[5] = src[i + 1];
+ p[6] = src[i + sstride - 1];
+ p[7] = src[i + sstride];
+ p[8] = src[i + sstride + 1];
+ PIX_SORT (p[1], p[2]);
+ PIX_SORT (p[4], p[5]);
+ PIX_SORT (p[7], p[8]);
+ PIX_SORT (p[0], p[1]);
+ PIX_SORT (p[3], p[4]);
+ PIX_SORT (p[6], p[7]);
+ PIX_SORT (p[1], p[2]);
+ PIX_SORT (p[4], p[5]);
+ PIX_SORT (p[7], p[8]);
+ PIX_SORT (p[0], p[3]);
+ PIX_SORT (p[5], p[8]);
+ PIX_SORT (p[4], p[7]);
+ PIX_SORT (p[3], p[6]);
+ PIX_SORT (p[1], p[4]);
+ PIX_SORT (p[2], p[5]);
+ PIX_SORT (p[4], p[7]);
+ PIX_SORT (p[4], p[2]);
+ PIX_SORT (p[6], p[4]);
+ PIX_SORT (p[4], p[2]);
+ dest[i] = p[4];
+ }
+ dest[i] = src[i];
+ }
+}
+
+static GstFlowReturn
+gst_video_median_transform_frame (GstVideoFilter * filter,
+ GstVideoFrame * in_frame, GstVideoFrame * out_frame)
+{
+ GstVideoMedian *median = GST_VIDEO_MEDIAN (filter);
+
+ if (median->filtersize == 5) {
+ median_5 (GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0),
+ GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0),
+ GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0),
+ GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0),
+ GST_VIDEO_FRAME_WIDTH (in_frame), GST_VIDEO_FRAME_HEIGHT (in_frame));
+
+ if (median->lum_only) {
+ gst_video_frame_copy_plane (out_frame, in_frame, 1);
+ gst_video_frame_copy_plane (out_frame, in_frame, 2);
+ } else {
+ median_5 (GST_VIDEO_FRAME_PLANE_DATA (out_frame, 1),
+ GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 1),
+ GST_VIDEO_FRAME_PLANE_DATA (in_frame, 1),
+ GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 1),
+ GST_VIDEO_FRAME_WIDTH (in_frame) / 2,
+ GST_VIDEO_FRAME_HEIGHT (in_frame) / 2);
+ median_5 (GST_VIDEO_FRAME_PLANE_DATA (out_frame, 2),
+ GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 2),
+ GST_VIDEO_FRAME_PLANE_DATA (in_frame, 2),
+ GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 2),
+ GST_VIDEO_FRAME_WIDTH (in_frame) / 2,
+ GST_VIDEO_FRAME_HEIGHT (in_frame) / 2);
+ }
+ } else {
+ median_9 (GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0),
+ GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0),
+ GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0),
+ GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0),
+ GST_VIDEO_FRAME_WIDTH (in_frame), GST_VIDEO_FRAME_HEIGHT (in_frame));
+
+ if (median->lum_only) {
+ gst_video_frame_copy_plane (out_frame, in_frame, 1);
+ gst_video_frame_copy_plane (out_frame, in_frame, 2);
+ } else {
+ median_9 (GST_VIDEO_FRAME_PLANE_DATA (out_frame, 1),
+ GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 1),
+ GST_VIDEO_FRAME_PLANE_DATA (in_frame, 1),
+ GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 1),
+ GST_VIDEO_FRAME_WIDTH (in_frame) / 2,
+ GST_VIDEO_FRAME_HEIGHT (in_frame) / 2);
+ median_9 (GST_VIDEO_FRAME_PLANE_DATA (out_frame, 2),
+ GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 2),
+ GST_VIDEO_FRAME_PLANE_DATA (in_frame, 2),
+ GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 2),
+ GST_VIDEO_FRAME_WIDTH (in_frame) / 2,
+ GST_VIDEO_FRAME_HEIGHT (in_frame) / 2);
+ }
+ }
+
+ return GST_FLOW_OK;
+}
+
+static void
+gst_video_median_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstVideoMedian *median;
+
+ median = GST_VIDEO_MEDIAN (object);
+
+ switch (prop_id) {
+ case PROP_FILTERSIZE:
+ median->filtersize = g_value_get_enum (value);
+ break;
+ case PROP_LUM_ONLY:
+ median->lum_only = g_value_get_boolean (value);
+ break;
+ default:
+ break;
+ }
+}
+
+static void
+gst_video_median_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstVideoMedian *median;
+
+ median = GST_VIDEO_MEDIAN (object);
+
+ switch (prop_id) {
+ case PROP_FILTERSIZE:
+ g_value_set_enum (value, median->filtersize);
+ break;
+ case PROP_LUM_ONLY:
+ g_value_set_boolean (value, median->lum_only);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/videofilter/gstvideomedian.h b/gst/videofilter/gstvideomedian.h
new file mode 100644
index 0000000000..b8a1f051f3
--- /dev/null
+++ b/gst/videofilter/gstvideomedian.h
@@ -0,0 +1,68 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_VIDEO_MEDIAN_H__
+#define __GST_VIDEO_MEDIAN_H__
+
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include <gst/video/gstvideofilter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_VIDEO_MEDIAN \
+ (gst_video_median_get_type())
+#define GST_VIDEO_MEDIAN(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_MEDIAN,GstVideoMedian))
+#define GST_VIDEO_MEDIAN_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_MEDIAN,GstVideoMedianClass))
+#define GST_IS_VIDEO_MEDIAN(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_MEDIAN))
+#define GST_IS_VIDEO_MEDIAN_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_MEDIAN))
+
+typedef struct _GstVideoMedian GstVideoMedian;
+typedef struct _GstVideoMedianClass GstVideoMedianClass;
+
+typedef enum
+{
+ GST_VIDEO_MEDIAN_SIZE_5 = 5,
+ GST_VIDEO_MEDIAN_SIZE_9 = 9
+} GstVideoMedianSize;
+
+struct _GstVideoMedian {
+ GstVideoFilter parent;
+
+ GstVideoMedianSize filtersize;
+ gboolean lum_only;
+};
+
+struct _GstVideoMedianClass {
+ GstVideoFilterClass parent_class;
+};
+
+GType gst_video_median_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (videomedian);
+
+G_END_DECLS
+
+#endif /* __GST_VIDEO_MEDIAN_H__ */
diff --git a/gst/videofilter/gstvideotemplate.c b/gst/videofilter/gstvideotemplate.c
new file mode 100644
index 0000000000..660526a985
--- /dev/null
+++ b/gst/videofilter/gstvideotemplate.c
@@ -0,0 +1,253 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2003> David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/*
+ * This file was (probably) generated from
+ * $Id$
+ * and
+ * MAKEFILTERVERSION
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gstvideofilter.h>
+#include <string.h>
+
+#define GST_TYPE_VIDEOTEMPLATE \
+ (gst_videotemplate_get_type())
+#define GST_VIDEOTEMPLATE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEOTEMPLATE,GstVideotemplate))
+#define GST_VIDEOTEMPLATE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEOTEMPLATE,GstVideotemplateClass))
+#define GST_IS_VIDEOTEMPLATE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEOTEMPLATE))
+#define GST_IS_VIDEOTEMPLATE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEOTEMPLATE))
+
+typedef struct _GstVideotemplate GstVideotemplate;
+typedef struct _GstVideotemplateClass GstVideotemplateClass;
+
+struct _GstVideotemplate
+{
+ GstVideofilter videofilter;
+
+};
+
+struct _GstVideotemplateClass
+{
+ GstVideofilterClass parent_class;
+};
+
+
+/* GstVideotemplate signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0
+ /* FILL ME */
+};
+
+static void gst_videotemplate_base_init (gpointer g_class);
+static void gst_videotemplate_class_init (gpointer g_class,
+ gpointer class_data);
+static void gst_videotemplate_init (GTypeInstance * instance, gpointer g_class);
+
+static void gst_videotemplate_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_videotemplate_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static void gst_videotemplate_planar411 (GstVideofilter * videofilter,
+ void *dest, void *src);
+static void gst_videotemplate_setup (GstVideofilter * videofilter);
+
+GType
+gst_videotemplate_get_type (void)
+{
+ static GType videotemplate_type = 0;
+
+ if (!videotemplate_type) {
+ static const GTypeInfo videotemplate_info = {
+ sizeof (GstVideotemplateClass),
+ gst_videotemplate_base_init,
+ NULL,
+ gst_videotemplate_class_init,
+ NULL,
+ NULL,
+ sizeof (GstVideotemplate),
+ 0,
+ gst_videotemplate_init,
+ };
+
+ videotemplate_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
+ "GstVideotemplate", &videotemplate_info, 0);
+ }
+ return videotemplate_type;
+}
+
+static const GstVideofilterFormat gst_videotemplate_formats[] = {
+ {"I420", 12, gst_videotemplate_planar411,},
+};
+
+
+static void
+gst_videotemplate_base_init (gpointer g_class)
+{
+
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+ GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
+ int i;
+
+ gst_element_class_set_static_metadata (element_class, "Video filter template",
+ "Filter/Effect/Video",
+ "Template for a video filter", "David Schleef <ds@schleef.org>");
+
+ for (i = 0; i < G_N_ELEMENTS (gst_videotemplate_formats); i++) {
+ gst_videofilter_class_add_format (videofilter_class,
+ gst_videotemplate_formats + i);
+ }
+
+ gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
+}
+
+static void
+gst_videotemplate_class_init (gpointer g_class, gpointer class_data)
+{
+ GObjectClass *gobject_class;
+ GstVideofilterClass *videofilter_class;
+
+ gobject_class = G_OBJECT_CLASS (g_class);
+ videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
+
+#if 0
+ g_object_class_install_property (gobject_class, PROP_METHOD,
+ g_param_spec_enum ("method", "method", "method",
+ GST_TYPE_VIDEOTEMPLATE_METHOD, GST_VIDEOTEMPLATE_METHOD_1,
+ G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+#endif
+
+ gobject_class->set_property = gst_videotemplate_set_property;
+ gobject_class->get_property = gst_videotemplate_get_property;
+
+ videofilter_class->setup = gst_videotemplate_setup;
+}
+
+static void
+gst_videotemplate_init (GTypeInstance * instance, gpointer g_class)
+{
+ GstVideotemplate *videotemplate = GST_VIDEOTEMPLATE (instance);
+ GstVideofilter *videofilter;
+
+ GST_DEBUG ("gst_videotemplate_init");
+
+ videofilter = GST_VIDEOFILTER (videotemplate);
+
+ /* do stuff */
+}
+
+static void
+gst_videotemplate_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstVideotemplate *src;
+
+ g_return_if_fail (GST_IS_VIDEOTEMPLATE (object));
+ src = GST_VIDEOTEMPLATE (object);
+
+ GST_DEBUG ("gst_videotemplate_set_property");
+ switch (prop_id) {
+#if 0
+ case PROP_METHOD:
+ src->method = g_value_get_enum (value);
+ break;
+#endif
+ default:
+ break;
+ }
+}
+
+static void
+gst_videotemplate_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstVideotemplate *src;
+
+ g_return_if_fail (GST_IS_VIDEOTEMPLATE (object));
+ src = GST_VIDEOTEMPLATE (object);
+
+ switch (prop_id) {
+#if 0
+ case PROP_METHOD:
+ g_value_set_enum (value, src->method);
+ break;
+#endif
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ return gst_element_register (plugin, "videotemplate", GST_RANK_NONE,
+ GST_TYPE_VIDEOTEMPLATE);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ videotemplate,
+ "Template for a video filter",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
+
+ static void gst_videotemplate_setup (GstVideofilter * videofilter)
+{
+ GstVideotemplate *videotemplate;
+
+ g_return_if_fail (GST_IS_VIDEOTEMPLATE (videofilter));
+ videotemplate = GST_VIDEOTEMPLATE (videofilter);
+
+ /* if any setup needs to be done, do it here */
+
+}
+
+static void
+gst_videotemplate_planar411 (GstVideofilter * videofilter,
+ void *dest, void *src)
+{
+ GstVideotemplate *videotemplate;
+ int width = gst_videofilter_get_input_width (videofilter);
+ int height = gst_videofilter_get_input_height (videofilter);
+
+ g_return_if_fail (GST_IS_VIDEOTEMPLATE (videofilter));
+ videotemplate = GST_VIDEOTEMPLATE (videofilter);
+
+ /* do something interesting here. This simply copies the source
+ * to the destination. */
+ memcpy (dest, src, width * height + (width / 2) * (height / 2) * 2);
+}
diff --git a/gst/videofilter/make_filter b/gst/videofilter/make_filter
new file mode 100755
index 0000000000..692d0d5263
--- /dev/null
+++ b/gst/videofilter/make_filter
@@ -0,0 +1,39 @@
+#!/bin/sh
+
+LANG=C
+export LANG
+LC_COLLATE=C
+export LC_COLLATE
+
+Template=$1;
+srcfile=$2;
+
+if test x"$1" = x ; then
+ echo "$0 Objectname [srcfile]\n";
+ echo " creates gstobjectname.{c,h} implementing GstObjectname,\n";
+ echo " subclassing GstVideofilter.\n";
+ exit 1;
+fi
+
+if test x"$2" = x ; then
+ srcfile="gstvideotemplate.c"
+fi
+
+id=`echo '$Id$' | sed \
+ -e 's/\$I[d]: \([^$]*\)\$/\1/g'`
+echo $id
+
+TEMPLATE=`echo $Template | tr [:lower:] [:upper:]`
+template=`echo $Template | tr [:upper:] [:lower:]`
+
+# remember to break up the Id: in the line below
+sed \
+ -e 's/gstvideotemplate\.c/SOURCEFILE/g' \
+ -e "s/Videotemplate/$Template/g" \
+ -e "s/videotemplate/$template/g" \
+ -e "s/VIDEOTEMPLATE/$TEMPLATE/g" \
+ -e 's/\$I[d]: \([^$]*\)\$/\1/g' \
+ -e 's/SOURCEFILE/gstvideotemplate\.c/g' \
+ -e "s%MAKEFILTERVERSION%$id%g" \
+ $srcfile >gst$template.c.tmp && mv gst$template.c.tmp gst$template.c
+
diff --git a/gst/videofilter/meson.build b/gst/videofilter/meson.build
new file mode 100644
index 0000000000..acb5191f40
--- /dev/null
+++ b/gst/videofilter/meson.build
@@ -0,0 +1,18 @@
+vfilter_sources = [
+ 'plugin.c',
+ 'gstvideoflip.c',
+ 'gstvideobalance.c',
+ 'gstgamma.c',
+ 'gstvideomedian.c',
+]
+
+gstvideofilter = library('gstvideofilter',
+ vfilter_sources,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gstbase_dep, gstvideo_dep, libm],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstvideofilter, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstvideofilter]
diff --git a/gst/videofilter/plugin.c b/gst/videofilter/plugin.c
new file mode 100644
index 0000000000..6c66bf8c87
--- /dev/null
+++ b/gst/videofilter/plugin.c
@@ -0,0 +1,48 @@
+/* GStreamer
+ * Copyright (C) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+
+#include "gstgamma.h"
+#include "gstvideoflip.h"
+#include "gstvideobalance.h"
+#include "gstvideomedian.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ gboolean ret = FALSE;
+
+ ret |= GST_ELEMENT_REGISTER (gamma, plugin);
+ ret |= GST_ELEMENT_REGISTER (videobalance, plugin);
+ ret |= GST_ELEMENT_REGISTER (videoflip, plugin);
+ ret |= GST_ELEMENT_REGISTER (videomedian, plugin);
+
+ return ret;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ videofilter,
+ "Video filters plugin",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
diff --git a/gst/videomixer/README b/gst/videomixer/README
new file mode 100644
index 0000000000..3a66145e8d
--- /dev/null
+++ b/gst/videomixer/README
@@ -0,0 +1,27 @@
+Video Mixer
+-----------
+
+A generice video mixer, it blends the ayuv buffers from all pads onto
+a new buffer. The new buffer has by default a checkerboard pattern but
+its color can be changed with a property.
+The mixer can mix streams with different framerates and video sizes. It
+uses the duration value of the buffer to schedule the rendering of the
+buffers. For streams with a different resolution than the final output
+resolution one can specify the position of the top left corner where this
+image should be placed with the pad properties xpos and ypos.
+The overall alpha value of a stream can also be specified with a pad
+property.
+By default, the streams are blended in the order that the pads were
+requested from the element. This can be overridden by changing the
+zorder pad property of the stream, a stream with lower zorder gets
+drawn first.
+
+
+TODO
+----
+
+- really implement zorder
+- take I420 yuv as well
+- output AYUV if possible.
+- implement different blend modes, some code is already done
+- use filter caps on srcpad to decide on the final output size
diff --git a/gst/videomixer/blend.c b/gst/videomixer/blend.c
new file mode 100644
index 0000000000..e13f9255d5
--- /dev/null
+++ b/gst/videomixer/blend.c
@@ -0,0 +1,1061 @@
+/*
+ * Copyright (C) 2004 Wim Taymans <wim@fluendo.com>
+ * Copyright (C) 2006 Mindfruit Bv.
+ * Author: Sjoerd Simons <sjoerd@luon.net>
+ * Author: Alex Ugarte <alexugarte@gmail.com>
+ * Copyright (C) 2009 Alex Ugarte <augarte@vicomtech.org>
+ * Copyright (C) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "blend.h"
+#include "videomixerorc.h"
+
+#include <string.h>
+
+#include <gst/video/video.h>
+
+#define BLEND(D,S,alpha) (((D) * (256 - (alpha)) + (S) * (alpha)) >> 8)
+
+GST_DEBUG_CATEGORY_STATIC (gst_videomixer_blend_debug);
+#define GST_CAT_DEFAULT gst_videomixer_blend_debug
+
+/* Below are the implementations of everything */
+
+/* A32 is for AYUV, ARGB and BGRA */
+#define BLEND_A32(name, method, LOOP) \
+static void \
+method##_ ##name (GstVideoFrame * srcframe, gint xpos, gint ypos, \
+ gdouble src_alpha, GstVideoFrame * destframe) \
+{ \
+ guint s_alpha; \
+ gint src_stride, dest_stride; \
+ gint dest_width, dest_height; \
+ guint8 *src, *dest; \
+ gint src_width, src_height; \
+ \
+ src_width = GST_VIDEO_FRAME_WIDTH (srcframe); \
+ src_height = GST_VIDEO_FRAME_HEIGHT (srcframe); \
+ src = GST_VIDEO_FRAME_PLANE_DATA (srcframe, 0); \
+ src_stride = GST_VIDEO_FRAME_COMP_STRIDE (srcframe, 0); \
+ dest = GST_VIDEO_FRAME_PLANE_DATA (destframe, 0); \
+ dest_stride = GST_VIDEO_FRAME_COMP_STRIDE (destframe, 0); \
+ dest_width = GST_VIDEO_FRAME_COMP_WIDTH (destframe, 0); \
+ dest_height = GST_VIDEO_FRAME_COMP_HEIGHT (destframe, 0); \
+ \
+ s_alpha = CLAMP ((gint) (src_alpha * 256), 0, 256); \
+ \
+ /* If it's completely transparent... we just return */ \
+ if (G_UNLIKELY (s_alpha == 0)) \
+ return; \
+ \
+ /* adjust src pointers for negative sizes */ \
+ if (xpos < 0) { \
+ src += -xpos * 4; \
+ src_width -= -xpos; \
+ xpos = 0; \
+ } \
+ if (ypos < 0) { \
+ src += -ypos * src_stride; \
+ src_height -= -ypos; \
+ ypos = 0; \
+ } \
+ /* adjust width/height if the src is bigger than dest */ \
+ if (xpos + src_width > dest_width) { \
+ src_width = dest_width - xpos; \
+ } \
+ if (ypos + src_height > dest_height) { \
+ src_height = dest_height - ypos; \
+ } \
+ \
+ if (src_height > 0 && src_width > 0) { \
+ dest = dest + 4 * xpos + (ypos * dest_stride); \
+ \
+ LOOP (dest, src, src_height, src_width, src_stride, dest_stride, s_alpha); \
+ } \
+}
+
+#define BLEND_A32_LOOP(name, method) \
+static inline void \
+_##method##_loop_##name (guint8 * dest, const guint8 * src, gint src_height, \
+ gint src_width, gint src_stride, gint dest_stride, guint s_alpha) \
+{ \
+ s_alpha = MIN (255, s_alpha); \
+ video_mixer_orc_##method##_##name (dest, dest_stride, src, src_stride, \
+ s_alpha, src_width, src_height); \
+}
+
+BLEND_A32_LOOP (argb, blend);
+BLEND_A32_LOOP (bgra, blend);
+BLEND_A32_LOOP (argb, overlay);
+BLEND_A32_LOOP (bgra, overlay);
+
+#if G_BYTE_ORDER == G_LITTLE_ENDIAN
+BLEND_A32 (argb, blend, _blend_loop_argb);
+BLEND_A32 (bgra, blend, _blend_loop_bgra);
+BLEND_A32 (argb, overlay, _overlay_loop_argb);
+BLEND_A32 (bgra, overlay, _overlay_loop_bgra);
+#else
+BLEND_A32 (argb, blend, _blend_loop_bgra);
+BLEND_A32 (bgra, blend, _blend_loop_argb);
+BLEND_A32 (argb, overlay, _overlay_loop_bgra);
+BLEND_A32 (bgra, overlay, _overlay_loop_argb);
+#endif
+
+#define A32_CHECKER_C(name, RGB, A, C1, C2, C3) \
+static void \
+fill_checker_##name##_c (GstVideoFrame * frame) \
+{ \
+ gint i, j; \
+ gint val; \
+ static const gint tab[] = { 80, 160, 80, 160 }; \
+ gint width, height; \
+ guint8 *dest; \
+ \
+ dest = GST_VIDEO_FRAME_PLANE_DATA (frame, 0); \
+ width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0); \
+ height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0); \
+ \
+ if (!RGB) { \
+ for (i = 0; i < height; i++) { \
+ for (j = 0; j < width; j++) { \
+ dest[A] = 0xff; \
+ dest[C1] = tab[((i & 0x8) >> 3) + ((j & 0x8) >> 3)]; \
+ dest[C2] = 128; \
+ dest[C3] = 128; \
+ dest += 4; \
+ } \
+ } \
+ } else { \
+ for (i = 0; i < height; i++) { \
+ for (j = 0; j < width; j++) { \
+ val = tab[((i & 0x8) >> 3) + ((j & 0x8) >> 3)]; \
+ dest[A] = 0xFF; \
+ dest[C1] = val; \
+ dest[C2] = val; \
+ dest[C3] = val; \
+ dest += 4; \
+ } \
+ } \
+ } \
+}
+
+A32_CHECKER_C (argb, TRUE, 0, 1, 2, 3);
+A32_CHECKER_C (bgra, TRUE, 3, 2, 1, 0);
+A32_CHECKER_C (ayuv, FALSE, 0, 1, 2, 3);
+
+#define YUV_TO_R(Y,U,V) (CLAMP (1.164 * (Y - 16) + 1.596 * (V - 128), 0, 255))
+#define YUV_TO_G(Y,U,V) (CLAMP (1.164 * (Y - 16) - 0.813 * (V - 128) - 0.391 * (U - 128), 0, 255))
+#define YUV_TO_B(Y,U,V) (CLAMP (1.164 * (Y - 16) + 2.018 * (U - 128), 0, 255))
+
+#define A32_COLOR(name, RGB, A, C1, C2, C3) \
+static void \
+fill_color_##name (GstVideoFrame * frame, gint Y, gint U, gint V) \
+{ \
+ gint c1, c2, c3; \
+ guint32 val; \
+ gint width, height; \
+ guint8 *dest; \
+ \
+ dest = GST_VIDEO_FRAME_PLANE_DATA (frame, 0); \
+ width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0); \
+ height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0); \
+ \
+ if (RGB) { \
+ c1 = YUV_TO_R (Y, U, V); \
+ c2 = YUV_TO_G (Y, U, V); \
+ c3 = YUV_TO_B (Y, U, V); \
+ } else { \
+ c1 = Y; \
+ c2 = U; \
+ c3 = V; \
+ } \
+ val = GUINT32_FROM_BE ((0xff << A) | (c1 << C1) | (c2 << C2) | (c3 << C3)); \
+ \
+ video_mixer_orc_splat_u32 ((guint32 *) dest, val, height * width); \
+}
+
+A32_COLOR (argb, TRUE, 24, 16, 8, 0);
+A32_COLOR (bgra, TRUE, 0, 8, 16, 24);
+A32_COLOR (abgr, TRUE, 24, 0, 8, 16);
+A32_COLOR (rgba, TRUE, 0, 24, 16, 8);
+A32_COLOR (ayuv, FALSE, 24, 16, 8, 0);
+
+/* Y444, Y42B, I420, YV12, Y41B */
+#define PLANAR_YUV_BLEND(format_name,format_enum,x_round,y_round,MEMCPY,BLENDLOOP) \
+inline static void \
+_blend_##format_name (const guint8 * src, guint8 * dest, \
+ gint src_stride, gint dest_stride, gint src_width, gint src_height, \
+ gdouble src_alpha) \
+{ \
+ gint i; \
+ gint b_alpha; \
+ \
+ /* If it's completely transparent... we just return */ \
+ if (G_UNLIKELY (src_alpha == 0.0)) { \
+ GST_INFO ("Fast copy (alpha == 0.0)"); \
+ return; \
+ } \
+ \
+ /* If it's completely opaque, we do a fast copy */ \
+ if (G_UNLIKELY (src_alpha == 1.0)) { \
+ GST_INFO ("Fast copy (alpha == 1.0)"); \
+ for (i = 0; i < src_height; i++) { \
+ MEMCPY (dest, src, src_width); \
+ src += src_stride; \
+ dest += dest_stride; \
+ } \
+ return; \
+ } \
+ \
+ b_alpha = CLAMP ((gint) (src_alpha * 256), 0, 256); \
+ \
+ BLENDLOOP(dest, dest_stride, src, src_stride, b_alpha, src_width, src_height); \
+} \
+\
+static void \
+blend_##format_name (GstVideoFrame * srcframe, gint xpos, gint ypos, \
+ gdouble src_alpha, GstVideoFrame * destframe) \
+{ \
+ const guint8 *b_src; \
+ guint8 *b_dest; \
+ gint b_src_width; \
+ gint b_src_height; \
+ gint xoffset = 0; \
+ gint yoffset = 0; \
+ gint src_comp_rowstride, dest_comp_rowstride; \
+ gint src_comp_height; \
+ gint src_comp_width; \
+ gint comp_ypos, comp_xpos; \
+ gint comp_yoffset, comp_xoffset; \
+ gint dest_width, dest_height; \
+ const GstVideoFormatInfo *info; \
+ gint src_width, src_height; \
+ \
+ src_width = GST_VIDEO_FRAME_WIDTH (srcframe); \
+ src_height = GST_VIDEO_FRAME_HEIGHT (srcframe); \
+ \
+ info = srcframe->info.finfo; \
+ dest_width = GST_VIDEO_FRAME_WIDTH (destframe); \
+ dest_height = GST_VIDEO_FRAME_HEIGHT (destframe); \
+ \
+ xpos = x_round (xpos); \
+ ypos = y_round (ypos); \
+ \
+ b_src_width = src_width; \
+ b_src_height = src_height; \
+ \
+ /* adjust src pointers for negative sizes */ \
+ if (xpos < 0) { \
+ xoffset = -xpos; \
+ b_src_width -= -xpos; \
+ xpos = 0; \
+ } \
+ if (ypos < 0) { \
+ yoffset = -ypos; \
+ b_src_height -= -ypos; \
+ ypos = 0; \
+ } \
+ /* If x or y offset are larger then the source it's outside of the picture */ \
+ if (xoffset >= src_width || yoffset >= src_height) { \
+ return; \
+ } \
+ \
+ /* adjust width/height if the src is bigger than dest */ \
+ if (xpos + b_src_width > dest_width) { \
+ b_src_width = dest_width - xpos; \
+ } \
+ if (ypos + b_src_height > dest_height) { \
+ b_src_height = dest_height - ypos; \
+ } \
+ if (b_src_width <= 0 || b_src_height <= 0) { \
+ return; \
+ } \
+ \
+ /* First mix Y, then U, then V */ \
+ b_src = GST_VIDEO_FRAME_COMP_DATA (srcframe, 0); \
+ b_dest = GST_VIDEO_FRAME_COMP_DATA (destframe, 0); \
+ src_comp_rowstride = GST_VIDEO_FRAME_COMP_STRIDE (srcframe, 0); \
+ dest_comp_rowstride = GST_VIDEO_FRAME_COMP_STRIDE (destframe, 0); \
+ src_comp_width = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(info, 0, b_src_width); \
+ src_comp_height = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(info, 0, b_src_height); \
+ comp_xpos = (xpos == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (info, 0, xpos); \
+ comp_ypos = (ypos == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info, 0, ypos); \
+ comp_xoffset = (xoffset == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (info, 0, xoffset); \
+ comp_yoffset = (yoffset == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info, 0, yoffset); \
+ _blend_##format_name (b_src + comp_xoffset + comp_yoffset * src_comp_rowstride, \
+ b_dest + comp_xpos + comp_ypos * dest_comp_rowstride, \
+ src_comp_rowstride, \
+ dest_comp_rowstride, src_comp_width, src_comp_height, \
+ src_alpha); \
+ \
+ b_src = GST_VIDEO_FRAME_COMP_DATA (srcframe, 1); \
+ b_dest = GST_VIDEO_FRAME_COMP_DATA (destframe, 1); \
+ src_comp_rowstride = GST_VIDEO_FRAME_COMP_STRIDE (srcframe, 1); \
+ dest_comp_rowstride = GST_VIDEO_FRAME_COMP_STRIDE (destframe, 1); \
+ src_comp_width = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(info, 1, b_src_width); \
+ src_comp_height = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(info, 1, b_src_height); \
+ comp_xpos = (xpos == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (info, 1, xpos); \
+ comp_ypos = (ypos == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info, 1, ypos); \
+ comp_xoffset = (xoffset == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (info, 1, xoffset); \
+ comp_yoffset = (yoffset == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info, 1, yoffset); \
+ _blend_##format_name (b_src + comp_xoffset + comp_yoffset * src_comp_rowstride, \
+ b_dest + comp_xpos + comp_ypos * dest_comp_rowstride, \
+ src_comp_rowstride, \
+ dest_comp_rowstride, src_comp_width, src_comp_height, \
+ src_alpha); \
+ \
+ b_src = GST_VIDEO_FRAME_COMP_DATA (srcframe, 2); \
+ b_dest = GST_VIDEO_FRAME_COMP_DATA (destframe, 2); \
+ src_comp_rowstride = GST_VIDEO_FRAME_COMP_STRIDE (srcframe, 2); \
+ dest_comp_rowstride = GST_VIDEO_FRAME_COMP_STRIDE (destframe, 2); \
+ src_comp_width = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(info, 2, b_src_width); \
+ src_comp_height = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(info, 2, b_src_height); \
+ comp_xpos = (xpos == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (info, 2, xpos); \
+ comp_ypos = (ypos == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info, 2, ypos); \
+ comp_xoffset = (xoffset == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (info, 2, xoffset); \
+ comp_yoffset = (yoffset == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info, 2, yoffset); \
+ _blend_##format_name (b_src + comp_xoffset + comp_yoffset * src_comp_rowstride, \
+ b_dest + comp_xpos + comp_ypos * dest_comp_rowstride, \
+ src_comp_rowstride, \
+ dest_comp_rowstride, src_comp_width, src_comp_height, \
+ src_alpha); \
+}
+
+#define PLANAR_YUV_FILL_CHECKER(format_name, format_enum, MEMSET) \
+static void \
+fill_checker_##format_name (GstVideoFrame * frame) \
+{ \
+ gint i, j; \
+ static const int tab[] = { 80, 160, 80, 160 }; \
+ guint8 *p; \
+ gint comp_width, comp_height; \
+ gint rowstride; \
+ \
+ p = GST_VIDEO_FRAME_COMP_DATA (frame, 0); \
+ comp_width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0); \
+ comp_height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0); \
+ rowstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0); \
+ \
+ for (i = 0; i < comp_height; i++) { \
+ for (j = 0; j < comp_width; j++) { \
+ *p++ = tab[((i & 0x8) >> 3) + ((j & 0x8) >> 3)]; \
+ } \
+ p += rowstride - comp_width; \
+ } \
+ \
+ p = GST_VIDEO_FRAME_COMP_DATA (frame, 1); \
+ comp_width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1); \
+ comp_height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1); \
+ rowstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 1); \
+ \
+ for (i = 0; i < comp_height; i++) { \
+ MEMSET (p, 0x80, comp_width); \
+ p += rowstride; \
+ } \
+ \
+ p = GST_VIDEO_FRAME_COMP_DATA (frame, 2); \
+ comp_width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 2); \
+ comp_height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 2); \
+ rowstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 2); \
+ \
+ for (i = 0; i < comp_height; i++) { \
+ MEMSET (p, 0x80, comp_width); \
+ p += rowstride; \
+ } \
+}
+
+#define PLANAR_YUV_FILL_COLOR(format_name,format_enum,MEMSET) \
+static void \
+fill_color_##format_name (GstVideoFrame * frame, \
+ gint colY, gint colU, gint colV) \
+{ \
+ guint8 *p; \
+ gint comp_width, comp_height; \
+ gint rowstride; \
+ gint i; \
+ \
+ p = GST_VIDEO_FRAME_COMP_DATA (frame, 0); \
+ comp_width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0); \
+ comp_height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0); \
+ rowstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0); \
+ \
+ for (i = 0; i < comp_height; i++) { \
+ MEMSET (p, colY, comp_width); \
+ p += rowstride; \
+ } \
+ \
+ p = GST_VIDEO_FRAME_COMP_DATA (frame, 1); \
+ comp_width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1); \
+ comp_height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1); \
+ rowstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 1); \
+ \
+ for (i = 0; i < comp_height; i++) { \
+ MEMSET (p, colU, comp_width); \
+ p += rowstride; \
+ } \
+ \
+ p = GST_VIDEO_FRAME_COMP_DATA (frame, 2); \
+ comp_width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 2); \
+ comp_height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 2); \
+ rowstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 2); \
+ \
+ for (i = 0; i < comp_height; i++) { \
+ MEMSET (p, colV, comp_width); \
+ p += rowstride; \
+ } \
+}
+
+#define GST_ROUND_UP_1(x) (x)
+
+PLANAR_YUV_BLEND (i420, GST_VIDEO_FORMAT_I420, GST_ROUND_UP_2,
+ GST_ROUND_UP_2, memcpy, video_mixer_orc_blend_u8);
+PLANAR_YUV_FILL_CHECKER (i420, GST_VIDEO_FORMAT_I420, memset);
+PLANAR_YUV_FILL_COLOR (i420, GST_VIDEO_FORMAT_I420, memset);
+PLANAR_YUV_FILL_COLOR (yv12, GST_VIDEO_FORMAT_YV12, memset);
+PLANAR_YUV_BLEND (y444, GST_VIDEO_FORMAT_Y444, GST_ROUND_UP_1,
+ GST_ROUND_UP_1, memcpy, video_mixer_orc_blend_u8);
+PLANAR_YUV_FILL_CHECKER (y444, GST_VIDEO_FORMAT_Y444, memset);
+PLANAR_YUV_FILL_COLOR (y444, GST_VIDEO_FORMAT_Y444, memset);
+PLANAR_YUV_BLEND (y42b, GST_VIDEO_FORMAT_Y42B, GST_ROUND_UP_2,
+ GST_ROUND_UP_1, memcpy, video_mixer_orc_blend_u8);
+PLANAR_YUV_FILL_CHECKER (y42b, GST_VIDEO_FORMAT_Y42B, memset);
+PLANAR_YUV_FILL_COLOR (y42b, GST_VIDEO_FORMAT_Y42B, memset);
+PLANAR_YUV_BLEND (y41b, GST_VIDEO_FORMAT_Y41B, GST_ROUND_UP_4,
+ GST_ROUND_UP_1, memcpy, video_mixer_orc_blend_u8);
+PLANAR_YUV_FILL_CHECKER (y41b, GST_VIDEO_FORMAT_Y41B, memset);
+PLANAR_YUV_FILL_COLOR (y41b, GST_VIDEO_FORMAT_Y41B, memset);
+
+/* NV12, NV21 */
+#define NV_YUV_BLEND(format_name,MEMCPY,BLENDLOOP) \
+inline static void \
+_blend_##format_name (const guint8 * src, guint8 * dest, \
+ gint src_stride, gint dest_stride, gint src_width, gint src_height, \
+ gdouble src_alpha) \
+{ \
+ gint i; \
+ gint b_alpha; \
+ \
+ /* If it's completely transparent... we just return */ \
+ if (G_UNLIKELY (src_alpha == 0.0)) { \
+ GST_INFO ("Fast copy (alpha == 0.0)"); \
+ return; \
+ } \
+ \
+ /* If it's completely opaque, we do a fast copy */ \
+ if (G_UNLIKELY (src_alpha == 1.0)) { \
+ GST_INFO ("Fast copy (alpha == 1.0)"); \
+ for (i = 0; i < src_height; i++) { \
+ MEMCPY (dest, src, src_width); \
+ src += src_stride; \
+ dest += dest_stride; \
+ } \
+ return; \
+ } \
+ \
+ b_alpha = CLAMP ((gint) (src_alpha * 256), 0, 256); \
+ \
+ BLENDLOOP(dest, dest_stride, src, src_stride, b_alpha, src_width, src_height); \
+} \
+\
+static void \
+blend_##format_name (GstVideoFrame * srcframe, gint xpos, gint ypos, \
+ gdouble src_alpha, GstVideoFrame * destframe) \
+{ \
+ const guint8 *b_src; \
+ guint8 *b_dest; \
+ gint b_src_width; \
+ gint b_src_height; \
+ gint xoffset = 0; \
+ gint yoffset = 0; \
+ gint src_comp_rowstride, dest_comp_rowstride; \
+ gint src_comp_height; \
+ gint src_comp_width; \
+ gint comp_ypos, comp_xpos; \
+ gint comp_yoffset, comp_xoffset; \
+ gint dest_width, dest_height; \
+ const GstVideoFormatInfo *info; \
+ gint src_width, src_height; \
+ \
+ src_width = GST_VIDEO_FRAME_WIDTH (srcframe); \
+ src_height = GST_VIDEO_FRAME_HEIGHT (srcframe); \
+ \
+ info = srcframe->info.finfo; \
+ dest_width = GST_VIDEO_FRAME_WIDTH (destframe); \
+ dest_height = GST_VIDEO_FRAME_HEIGHT (destframe); \
+ \
+ xpos = GST_ROUND_UP_2 (xpos); \
+ ypos = GST_ROUND_UP_2 (ypos); \
+ \
+ b_src_width = src_width; \
+ b_src_height = src_height; \
+ \
+ /* adjust src pointers for negative sizes */ \
+ if (xpos < 0) { \
+ xoffset = -xpos; \
+ b_src_width -= -xpos; \
+ xpos = 0; \
+ } \
+ if (ypos < 0) { \
+ yoffset += -ypos; \
+ b_src_height -= -ypos; \
+ ypos = 0; \
+ } \
+ /* If x or y offset are larger then the source it's outside of the picture */ \
+ if (xoffset > src_width || yoffset > src_height) { \
+ return; \
+ } \
+ \
+ /* adjust width/height if the src is bigger than dest */ \
+ if (xpos + src_width > dest_width) { \
+ b_src_width = dest_width - xpos; \
+ } \
+ if (ypos + src_height > dest_height) { \
+ b_src_height = dest_height - ypos; \
+ } \
+ if (b_src_width < 0 || b_src_height < 0) { \
+ return; \
+ } \
+ \
+ /* First mix Y, then UV */ \
+ b_src = GST_VIDEO_FRAME_COMP_DATA (srcframe, 0); \
+ b_dest = GST_VIDEO_FRAME_COMP_DATA (destframe, 0); \
+ src_comp_rowstride = GST_VIDEO_FRAME_COMP_STRIDE (srcframe, 0); \
+ dest_comp_rowstride = GST_VIDEO_FRAME_COMP_STRIDE (destframe, 0); \
+ src_comp_width = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(info, 0, b_src_width); \
+ src_comp_height = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(info, 0, b_src_height); \
+ comp_xpos = (xpos == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (info, 0, xpos); \
+ comp_ypos = (ypos == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info, 0, ypos); \
+ comp_xoffset = (xoffset == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (info, 0, xoffset); \
+ comp_yoffset = (yoffset == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info, 0, yoffset); \
+ _blend_##format_name (b_src + comp_xoffset + comp_yoffset * src_comp_rowstride, \
+ b_dest + comp_xpos + comp_ypos * dest_comp_rowstride, \
+ src_comp_rowstride, \
+ dest_comp_rowstride, src_comp_width, src_comp_height, \
+ src_alpha); \
+ \
+ b_src = GST_VIDEO_FRAME_PLANE_DATA (srcframe, 1); \
+ b_dest = GST_VIDEO_FRAME_PLANE_DATA (destframe, 1); \
+ src_comp_rowstride = GST_VIDEO_FRAME_COMP_STRIDE (srcframe, 1); \
+ dest_comp_rowstride = GST_VIDEO_FRAME_COMP_STRIDE (destframe, 1); \
+ src_comp_width = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH(info, 1, b_src_width); \
+ src_comp_height = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT(info, 1, b_src_height); \
+ comp_xpos = (xpos == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (info, 1, xpos); \
+ comp_ypos = (ypos == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info, 1, ypos); \
+ comp_xoffset = (xoffset == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (info, 1, xoffset); \
+ comp_yoffset = (yoffset == 0) ? 0 : GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (info, 1, yoffset); \
+ _blend_##format_name (b_src + comp_xoffset * 2 + comp_yoffset * src_comp_rowstride, \
+ b_dest + comp_xpos * 2 + comp_ypos * dest_comp_rowstride, \
+ src_comp_rowstride, \
+ dest_comp_rowstride, 2 * src_comp_width, src_comp_height, \
+ src_alpha); \
+}
+
+#define NV_YUV_FILL_CHECKER(format_name, MEMSET) \
+static void \
+fill_checker_##format_name (GstVideoFrame * frame) \
+{ \
+ gint i, j; \
+ static const int tab[] = { 80, 160, 80, 160 }; \
+ guint8 *p; \
+ gint comp_width, comp_height; \
+ gint rowstride; \
+ \
+ p = GST_VIDEO_FRAME_COMP_DATA (frame, 0); \
+ comp_width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0); \
+ comp_height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0); \
+ rowstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0); \
+ \
+ for (i = 0; i < comp_height; i++) { \
+ for (j = 0; j < comp_width; j++) { \
+ *p++ = tab[((i & 0x8) >> 3) + ((j & 0x8) >> 3)]; \
+ } \
+ p += rowstride - comp_width; \
+ } \
+ \
+ p = GST_VIDEO_FRAME_PLANE_DATA (frame, 1); \
+ comp_width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1); \
+ comp_height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1); \
+ rowstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 1); \
+ \
+ for (i = 0; i < comp_height; i++) { \
+ MEMSET (p, 0x80, comp_width * 2); \
+ p += rowstride; \
+ } \
+}
+
+#define NV_YUV_FILL_COLOR(format_name,MEMSET) \
+static void \
+fill_color_##format_name (GstVideoFrame * frame, \
+ gint colY, gint colU, gint colV) \
+{ \
+ guint8 *y, *u, *v; \
+ gint comp_width, comp_height; \
+ gint rowstride; \
+ gint i, j; \
+ \
+ y = GST_VIDEO_FRAME_COMP_DATA (frame, 0); \
+ comp_width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0); \
+ comp_height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0); \
+ rowstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0); \
+ \
+ for (i = 0; i < comp_height; i++) { \
+ MEMSET (y, colY, comp_width); \
+ y += rowstride; \
+ } \
+ \
+ u = GST_VIDEO_FRAME_COMP_DATA (frame, 1); \
+ v = GST_VIDEO_FRAME_COMP_DATA (frame, 2); \
+ comp_width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 1); \
+ comp_height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1); \
+ rowstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 1); \
+ \
+ for (i = 0; i < comp_height; i++) { \
+ for (j = 0; j < comp_width; j++) { \
+ u[j*2] = colU; \
+ v[j*2] = colV; \
+ } \
+ u += rowstride; \
+ v += rowstride; \
+ } \
+}
+
+NV_YUV_BLEND (nv12, memcpy, video_mixer_orc_blend_u8);
+NV_YUV_FILL_CHECKER (nv12, memset);
+NV_YUV_FILL_COLOR (nv12, memset);
+NV_YUV_BLEND (nv21, memcpy, video_mixer_orc_blend_u8);
+NV_YUV_FILL_CHECKER (nv21, memset);
+
+/* RGB, BGR, xRGB, xBGR, RGBx, BGRx */
+
+#define RGB_BLEND(name, bpp, MEMCPY, BLENDLOOP) \
+static void \
+blend_##name (GstVideoFrame * srcframe, gint xpos, gint ypos, \
+ gdouble src_alpha, GstVideoFrame * destframe) \
+{ \
+ gint b_alpha; \
+ gint i; \
+ gint src_stride, dest_stride; \
+ gint dest_width, dest_height; \
+ guint8 *dest, *src; \
+ gint src_width, src_height; \
+ \
+ src_width = GST_VIDEO_FRAME_WIDTH (srcframe); \
+ src_height = GST_VIDEO_FRAME_HEIGHT (srcframe); \
+ \
+ src = GST_VIDEO_FRAME_PLANE_DATA (srcframe, 0); \
+ dest = GST_VIDEO_FRAME_PLANE_DATA (destframe, 0); \
+ \
+ dest_width = GST_VIDEO_FRAME_WIDTH (destframe); \
+ dest_height = GST_VIDEO_FRAME_HEIGHT (destframe); \
+ \
+ src_stride = GST_VIDEO_FRAME_COMP_STRIDE (srcframe, 0); \
+ dest_stride = GST_VIDEO_FRAME_COMP_STRIDE (destframe, 0); \
+ \
+ b_alpha = CLAMP ((gint) (src_alpha * 256), 0, 256); \
+ \
+ /* adjust src pointers for negative sizes */ \
+ if (xpos < 0) { \
+ src += -xpos * bpp; \
+ src_width -= -xpos; \
+ xpos = 0; \
+ } \
+ if (ypos < 0) { \
+ src += -ypos * src_stride; \
+ src_height -= -ypos; \
+ ypos = 0; \
+ } \
+ /* adjust width/height if the src is bigger than dest */ \
+ if (xpos + src_width > dest_width) { \
+ src_width = dest_width - xpos; \
+ } \
+ if (ypos + src_height > dest_height) { \
+ src_height = dest_height - ypos; \
+ } \
+ \
+ dest = dest + bpp * xpos + (ypos * dest_stride); \
+ /* If it's completely transparent... we just return */ \
+ if (G_UNLIKELY (src_alpha == 0.0)) { \
+ GST_INFO ("Fast copy (alpha == 0.0)"); \
+ return; \
+ } \
+ \
+ /* If it's completely opaque, we do a fast copy */ \
+ if (G_UNLIKELY (src_alpha == 1.0)) { \
+ GST_INFO ("Fast copy (alpha == 1.0)"); \
+ for (i = 0; i < src_height; i++) { \
+ MEMCPY (dest, src, bpp * src_width); \
+ src += src_stride; \
+ dest += dest_stride; \
+ } \
+ return; \
+ } \
+ \
+ BLENDLOOP(dest, dest_stride, src, src_stride, b_alpha, src_width * bpp, src_height); \
+}
+
+#define RGB_FILL_CHECKER_C(name, bpp, r, g, b) \
+static void \
+fill_checker_##name##_c (GstVideoFrame * frame) \
+{ \
+ gint i, j; \
+ static const int tab[] = { 80, 160, 80, 160 }; \
+ gint stride, dest_add, width, height; \
+ guint8 *dest; \
+ \
+ width = GST_VIDEO_FRAME_WIDTH (frame); \
+ height = GST_VIDEO_FRAME_HEIGHT (frame); \
+ dest = GST_VIDEO_FRAME_PLANE_DATA (frame, 0); \
+ stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0); \
+ dest_add = stride - width * bpp; \
+ \
+ for (i = 0; i < height; i++) { \
+ for (j = 0; j < width; j++) { \
+ dest[r] = tab[((i & 0x8) >> 3) + ((j & 0x8) >> 3)]; /* red */ \
+ dest[g] = tab[((i & 0x8) >> 3) + ((j & 0x8) >> 3)]; /* green */ \
+ dest[b] = tab[((i & 0x8) >> 3) + ((j & 0x8) >> 3)]; /* blue */ \
+ dest += bpp; \
+ } \
+ dest += dest_add; \
+ } \
+}
+
+#define RGB_FILL_COLOR(name, bpp, MEMSET_RGB) \
+static void \
+fill_color_##name (GstVideoFrame * frame, \
+ gint colY, gint colU, gint colV) \
+{ \
+ gint red, green, blue; \
+ gint i; \
+ gint dest_stride; \
+ gint width, height; \
+ guint8 *dest; \
+ \
+ width = GST_VIDEO_FRAME_WIDTH (frame); \
+ height = GST_VIDEO_FRAME_HEIGHT (frame); \
+ dest = GST_VIDEO_FRAME_PLANE_DATA (frame, 0); \
+ dest_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0); \
+ \
+ red = YUV_TO_R (colY, colU, colV); \
+ green = YUV_TO_G (colY, colU, colV); \
+ blue = YUV_TO_B (colY, colU, colV); \
+ \
+ for (i = 0; i < height; i++) { \
+ MEMSET_RGB (dest, red, green, blue, width); \
+ dest += dest_stride; \
+ } \
+}
+
+#define MEMSET_RGB_C(name, r, g, b) \
+static inline void \
+_memset_##name##_c (guint8* dest, gint red, gint green, gint blue, gint width) { \
+ gint j; \
+ \
+ for (j = 0; j < width; j++) { \
+ dest[r] = red; \
+ dest[g] = green; \
+ dest[b] = blue; \
+ dest += 3; \
+ } \
+}
+
+#define MEMSET_XRGB(name, r, g, b) \
+static inline void \
+_memset_##name (guint8* dest, gint red, gint green, gint blue, gint width) { \
+ guint32 val; \
+ \
+ val = GUINT32_FROM_BE ((red << r) | (green << g) | (blue << b)); \
+ video_mixer_orc_splat_u32 ((guint32 *) dest, val, width); \
+}
+
+#define _orc_memcpy_u32(dest,src,len) video_mixer_orc_memcpy_u32((guint32 *) dest, (const guint32 *) src, len/4)
+
+RGB_BLEND (rgb, 3, memcpy, video_mixer_orc_blend_u8);
+RGB_FILL_CHECKER_C (rgb, 3, 0, 1, 2);
+MEMSET_RGB_C (rgb, 0, 1, 2);
+RGB_FILL_COLOR (rgb_c, 3, _memset_rgb_c);
+
+MEMSET_RGB_C (bgr, 2, 1, 0);
+RGB_FILL_COLOR (bgr_c, 3, _memset_bgr_c);
+
+RGB_BLEND (xrgb, 4, _orc_memcpy_u32, video_mixer_orc_blend_u8);
+RGB_FILL_CHECKER_C (xrgb, 4, 1, 2, 3);
+MEMSET_XRGB (xrgb, 24, 16, 0);
+RGB_FILL_COLOR (xrgb, 4, _memset_xrgb);
+
+MEMSET_XRGB (xbgr, 0, 16, 24);
+RGB_FILL_COLOR (xbgr, 4, _memset_xbgr);
+
+MEMSET_XRGB (rgbx, 24, 16, 8);
+RGB_FILL_COLOR (rgbx, 4, _memset_rgbx);
+
+MEMSET_XRGB (bgrx, 8, 16, 24);
+RGB_FILL_COLOR (bgrx, 4, _memset_bgrx);
+
+/* YUY2, YVYU, UYVY */
+
+#define PACKED_422_BLEND(name, MEMCPY, BLENDLOOP) \
+static void \
+blend_##name (GstVideoFrame * srcframe, gint xpos, gint ypos, \
+ gdouble src_alpha, GstVideoFrame * destframe) \
+{ \
+ gint b_alpha; \
+ gint i; \
+ gint src_stride, dest_stride; \
+ gint dest_width, dest_height; \
+ guint8 *src, *dest; \
+ gint src_width, src_height; \
+ \
+ src_width = GST_VIDEO_FRAME_WIDTH (srcframe); \
+ src_height = GST_VIDEO_FRAME_HEIGHT (srcframe); \
+ \
+ dest_width = GST_VIDEO_FRAME_WIDTH (destframe); \
+ dest_height = GST_VIDEO_FRAME_HEIGHT (destframe); \
+ \
+ src = GST_VIDEO_FRAME_PLANE_DATA (srcframe, 0); \
+ dest = GST_VIDEO_FRAME_PLANE_DATA (destframe, 0); \
+ \
+ src_stride = GST_VIDEO_FRAME_COMP_STRIDE (srcframe, 0); \
+ dest_stride = GST_VIDEO_FRAME_COMP_STRIDE (destframe, 0); \
+ \
+ b_alpha = CLAMP ((gint) (src_alpha * 256), 0, 256); \
+ \
+ xpos = GST_ROUND_UP_2 (xpos); \
+ \
+ /* adjust src pointers for negative sizes */ \
+ if (xpos < 0) { \
+ src += -xpos * 2; \
+ src_width -= -xpos; \
+ xpos = 0; \
+ } \
+ if (ypos < 0) { \
+ src += -ypos * src_stride; \
+ src_height -= -ypos; \
+ ypos = 0; \
+ } \
+ \
+ /* adjust width/height if the src is bigger than dest */ \
+ if (xpos + src_width > dest_width) { \
+ src_width = dest_width - xpos; \
+ } \
+ if (ypos + src_height > dest_height) { \
+ src_height = dest_height - ypos; \
+ } \
+ \
+ dest = dest + 2 * xpos + (ypos * dest_stride); \
+ /* If it's completely transparent... we just return */ \
+ if (G_UNLIKELY (src_alpha == 0.0)) { \
+ GST_INFO ("Fast copy (alpha == 0.0)"); \
+ return; \
+ } \
+ \
+ /* If it's completely opaque, we do a fast copy */ \
+ if (G_UNLIKELY (src_alpha == 1.0)) { \
+ GST_INFO ("Fast copy (alpha == 1.0)"); \
+ for (i = 0; i < src_height; i++) { \
+ MEMCPY (dest, src, 2 * src_width); \
+ src += src_stride; \
+ dest += dest_stride; \
+ } \
+ return; \
+ } \
+ \
+ BLENDLOOP(dest, dest_stride, src, src_stride, b_alpha, 2 * src_width, src_height); \
+}
+
+#define PACKED_422_FILL_CHECKER_C(name, Y1, U, Y2, V) \
+static void \
+fill_checker_##name##_c (GstVideoFrame * frame) \
+{ \
+ gint i, j; \
+ static const int tab[] = { 80, 160, 80, 160 }; \
+ gint dest_add; \
+ gint width, height; \
+ guint8 *dest; \
+ \
+ width = GST_VIDEO_FRAME_WIDTH (frame); \
+ width = GST_ROUND_UP_2 (width); \
+ height = GST_VIDEO_FRAME_HEIGHT (frame); \
+ dest = GST_VIDEO_FRAME_PLANE_DATA (frame, 0); \
+ dest_add = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0) - width * 2; \
+ width /= 2; \
+ \
+ for (i = 0; i < height; i++) { \
+ for (j = 0; j < width; j++) { \
+ dest[Y1] = tab[((i & 0x8) >> 3) + ((j & 0x8) >> 3)]; \
+ dest[Y2] = tab[((i & 0x8) >> 3) + ((j & 0x8) >> 3)]; \
+ dest[U] = 128; \
+ dest[V] = 128; \
+ dest += 4; \
+ } \
+ dest += dest_add; \
+ } \
+}
+
+#define PACKED_422_FILL_COLOR(name, Y1, U, Y2, V) \
+static void \
+fill_color_##name (GstVideoFrame * frame, \
+ gint colY, gint colU, gint colV) \
+{ \
+ gint i; \
+ gint dest_stride; \
+ guint32 val; \
+ gint width, height; \
+ guint8 *dest; \
+ \
+ width = GST_VIDEO_FRAME_WIDTH (frame); \
+ width = GST_ROUND_UP_2 (width); \
+ height = GST_VIDEO_FRAME_HEIGHT (frame); \
+ dest = GST_VIDEO_FRAME_PLANE_DATA (frame, 0); \
+ dest_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0); \
+ width /= 2; \
+ \
+ val = GUINT32_FROM_BE ((colY << Y1) | (colY << Y2) | (colU << U) | (colV << V)); \
+ \
+ for (i = 0; i < height; i++) { \
+ video_mixer_orc_splat_u32 ((guint32 *) dest, val, width); \
+ dest += dest_stride; \
+ } \
+}
+
+PACKED_422_BLEND (yuy2, memcpy, video_mixer_orc_blend_u8);
+PACKED_422_FILL_CHECKER_C (yuy2, 0, 1, 2, 3);
+PACKED_422_FILL_CHECKER_C (uyvy, 1, 0, 3, 2);
+PACKED_422_FILL_COLOR (yuy2, 24, 16, 8, 0);
+PACKED_422_FILL_COLOR (yvyu, 24, 0, 8, 16);
+PACKED_422_FILL_COLOR (uyvy, 16, 24, 0, 8);
+
+/* Init function */
+BlendFunction gst_video_mixer_blend_argb;
+BlendFunction gst_video_mixer_blend_bgra;
+BlendFunction gst_video_mixer_overlay_argb;
+BlendFunction gst_video_mixer_overlay_bgra;
+/* AYUV/ABGR is equal to ARGB, RGBA is equal to BGRA */
+BlendFunction gst_video_mixer_blend_y444;
+BlendFunction gst_video_mixer_blend_y42b;
+BlendFunction gst_video_mixer_blend_i420;
+/* I420 is equal to YV12 */
+BlendFunction gst_video_mixer_blend_nv12;
+BlendFunction gst_video_mixer_blend_nv21;
+BlendFunction gst_video_mixer_blend_y41b;
+BlendFunction gst_video_mixer_blend_rgb;
+/* BGR is equal to RGB */
+BlendFunction gst_video_mixer_blend_rgbx;
+/* BGRx, xRGB, xBGR are equal to RGBx */
+BlendFunction gst_video_mixer_blend_yuy2;
+/* YVYU and UYVY are equal to YUY2 */
+
+FillCheckerFunction gst_video_mixer_fill_checker_argb;
+FillCheckerFunction gst_video_mixer_fill_checker_bgra;
+/* ABGR is equal to ARGB, RGBA is equal to BGRA */
+FillCheckerFunction gst_video_mixer_fill_checker_ayuv;
+FillCheckerFunction gst_video_mixer_fill_checker_y444;
+FillCheckerFunction gst_video_mixer_fill_checker_y42b;
+FillCheckerFunction gst_video_mixer_fill_checker_i420;
+/* I420 is equal to YV12 */
+FillCheckerFunction gst_video_mixer_fill_checker_nv12;
+FillCheckerFunction gst_video_mixer_fill_checker_nv21;
+FillCheckerFunction gst_video_mixer_fill_checker_y41b;
+FillCheckerFunction gst_video_mixer_fill_checker_rgb;
+/* BGR is equal to RGB */
+FillCheckerFunction gst_video_mixer_fill_checker_xrgb;
+/* BGRx, xRGB, xBGR are equal to RGBx */
+FillCheckerFunction gst_video_mixer_fill_checker_yuy2;
+/* YVYU is equal to YUY2 */
+FillCheckerFunction gst_video_mixer_fill_checker_uyvy;
+
+FillColorFunction gst_video_mixer_fill_color_argb;
+FillColorFunction gst_video_mixer_fill_color_bgra;
+FillColorFunction gst_video_mixer_fill_color_abgr;
+FillColorFunction gst_video_mixer_fill_color_rgba;
+FillColorFunction gst_video_mixer_fill_color_ayuv;
+FillColorFunction gst_video_mixer_fill_color_y444;
+FillColorFunction gst_video_mixer_fill_color_y42b;
+FillColorFunction gst_video_mixer_fill_color_i420;
+FillColorFunction gst_video_mixer_fill_color_yv12;
+FillColorFunction gst_video_mixer_fill_color_nv12;
+/* NV21 is equal to NV12 */
+FillColorFunction gst_video_mixer_fill_color_y41b;
+FillColorFunction gst_video_mixer_fill_color_rgb;
+FillColorFunction gst_video_mixer_fill_color_bgr;
+FillColorFunction gst_video_mixer_fill_color_xrgb;
+FillColorFunction gst_video_mixer_fill_color_xbgr;
+FillColorFunction gst_video_mixer_fill_color_rgbx;
+FillColorFunction gst_video_mixer_fill_color_bgrx;
+FillColorFunction gst_video_mixer_fill_color_yuy2;
+FillColorFunction gst_video_mixer_fill_color_yvyu;
+FillColorFunction gst_video_mixer_fill_color_uyvy;
+
+void
+gst_video_mixer_init_blend (void)
+{
+ GST_DEBUG_CATEGORY_INIT (gst_videomixer_blend_debug, "videomixer_blend", 0,
+ "video mixer blending functions");
+
+ gst_video_mixer_blend_argb = blend_argb;
+ gst_video_mixer_blend_bgra = blend_bgra;
+ gst_video_mixer_overlay_argb = overlay_argb;
+ gst_video_mixer_overlay_bgra = overlay_bgra;
+ gst_video_mixer_blend_i420 = blend_i420;
+ gst_video_mixer_blend_nv12 = blend_nv12;
+ gst_video_mixer_blend_nv21 = blend_nv21;
+ gst_video_mixer_blend_y444 = blend_y444;
+ gst_video_mixer_blend_y42b = blend_y42b;
+ gst_video_mixer_blend_y41b = blend_y41b;
+ gst_video_mixer_blend_rgb = blend_rgb;
+ gst_video_mixer_blend_xrgb = blend_xrgb;
+ gst_video_mixer_blend_yuy2 = blend_yuy2;
+
+ gst_video_mixer_fill_checker_argb = fill_checker_argb_c;
+ gst_video_mixer_fill_checker_bgra = fill_checker_bgra_c;
+ gst_video_mixer_fill_checker_ayuv = fill_checker_ayuv_c;
+ gst_video_mixer_fill_checker_i420 = fill_checker_i420;
+ gst_video_mixer_fill_checker_nv12 = fill_checker_nv12;
+ gst_video_mixer_fill_checker_nv21 = fill_checker_nv21;
+ gst_video_mixer_fill_checker_y444 = fill_checker_y444;
+ gst_video_mixer_fill_checker_y42b = fill_checker_y42b;
+ gst_video_mixer_fill_checker_y41b = fill_checker_y41b;
+ gst_video_mixer_fill_checker_rgb = fill_checker_rgb_c;
+ gst_video_mixer_fill_checker_xrgb = fill_checker_xrgb_c;
+ gst_video_mixer_fill_checker_yuy2 = fill_checker_yuy2_c;
+ gst_video_mixer_fill_checker_uyvy = fill_checker_uyvy_c;
+
+ gst_video_mixer_fill_color_argb = fill_color_argb;
+ gst_video_mixer_fill_color_bgra = fill_color_bgra;
+ gst_video_mixer_fill_color_abgr = fill_color_abgr;
+ gst_video_mixer_fill_color_rgba = fill_color_rgba;
+ gst_video_mixer_fill_color_ayuv = fill_color_ayuv;
+ gst_video_mixer_fill_color_i420 = fill_color_i420;
+ gst_video_mixer_fill_color_yv12 = fill_color_yv12;
+ gst_video_mixer_fill_color_nv12 = fill_color_nv12;
+ gst_video_mixer_fill_color_y444 = fill_color_y444;
+ gst_video_mixer_fill_color_y42b = fill_color_y42b;
+ gst_video_mixer_fill_color_y41b = fill_color_y41b;
+ gst_video_mixer_fill_color_rgb = fill_color_rgb_c;
+ gst_video_mixer_fill_color_bgr = fill_color_bgr_c;
+ gst_video_mixer_fill_color_xrgb = fill_color_xrgb;
+ gst_video_mixer_fill_color_xbgr = fill_color_xbgr;
+ gst_video_mixer_fill_color_rgbx = fill_color_rgbx;
+ gst_video_mixer_fill_color_bgrx = fill_color_bgrx;
+ gst_video_mixer_fill_color_yuy2 = fill_color_yuy2;
+ gst_video_mixer_fill_color_yvyu = fill_color_yvyu;
+ gst_video_mixer_fill_color_uyvy = fill_color_uyvy;
+}
diff --git a/gst/videomixer/blend.h b/gst/videomixer/blend.h
new file mode 100644
index 0000000000..7d16c56209
--- /dev/null
+++ b/gst/videomixer/blend.h
@@ -0,0 +1,103 @@
+/*
+ * Copyright (C) 2009 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __BLEND_H__
+#define __BLEND_H__
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+typedef void (*BlendFunction) (GstVideoFrame *srcframe, gint xpos, gint ypos, gdouble src_alpha, GstVideoFrame * destframe);
+typedef void (*FillCheckerFunction) (GstVideoFrame * frame);
+typedef void (*FillColorFunction) (GstVideoFrame * frame, gint c1, gint c2, gint c3);
+
+extern BlendFunction gst_video_mixer_blend_argb;
+extern BlendFunction gst_video_mixer_blend_bgra;
+#define gst_video_mixer_blend_ayuv gst_video_mixer_blend_argb
+#define gst_video_mixer_blend_abgr gst_video_mixer_blend_argb
+#define gst_video_mixer_blend_rgba gst_video_mixer_blend_bgra
+extern BlendFunction gst_video_mixer_overlay_argb;
+extern BlendFunction gst_video_mixer_overlay_bgra;
+#define gst_video_mixer_overlay_ayuv gst_video_mixer_overlay_argb
+#define gst_video_mixer_overlay_abgr gst_video_mixer_overlay_argb
+#define gst_video_mixer_overlay_rgba gst_video_mixer_overlay_bgra
+extern BlendFunction gst_video_mixer_blend_i420;
+#define gst_video_mixer_blend_yv12 gst_video_mixer_blend_i420
+extern BlendFunction gst_video_mixer_blend_nv12;
+extern BlendFunction gst_video_mixer_blend_nv21;
+extern BlendFunction gst_video_mixer_blend_y41b;
+extern BlendFunction gst_video_mixer_blend_y42b;
+extern BlendFunction gst_video_mixer_blend_y444;
+extern BlendFunction gst_video_mixer_blend_rgb;
+#define gst_video_mixer_blend_bgr gst_video_mixer_blend_rgb
+extern BlendFunction gst_video_mixer_blend_rgbx;
+#define gst_video_mixer_blend_bgrx gst_video_mixer_blend_rgbx
+#define gst_video_mixer_blend_xrgb gst_video_mixer_blend_rgbx
+#define gst_video_mixer_blend_xbgr gst_video_mixer_blend_rgbx
+extern BlendFunction gst_video_mixer_blend_yuy2;
+#define gst_video_mixer_blend_uyvy gst_video_mixer_blend_yuy2;
+#define gst_video_mixer_blend_yvyu gst_video_mixer_blend_yuy2;
+
+extern FillCheckerFunction gst_video_mixer_fill_checker_argb;
+#define gst_video_mixer_fill_checker_abgr gst_video_mixer_fill_checker_argb
+extern FillCheckerFunction gst_video_mixer_fill_checker_bgra;
+#define gst_video_mixer_fill_checker_rgba gst_video_mixer_fill_checker_bgra
+extern FillCheckerFunction gst_video_mixer_fill_checker_ayuv;
+extern FillCheckerFunction gst_video_mixer_fill_checker_i420;
+#define gst_video_mixer_fill_checker_yv12 gst_video_mixer_fill_checker_i420
+extern FillCheckerFunction gst_video_mixer_fill_checker_nv12;
+extern FillCheckerFunction gst_video_mixer_fill_checker_nv21;
+extern FillCheckerFunction gst_video_mixer_fill_checker_y41b;
+extern FillCheckerFunction gst_video_mixer_fill_checker_y42b;
+extern FillCheckerFunction gst_video_mixer_fill_checker_y444;
+extern FillCheckerFunction gst_video_mixer_fill_checker_rgb;
+#define gst_video_mixer_fill_checker_bgr gst_video_mixer_fill_checker_rgb
+extern FillCheckerFunction gst_video_mixer_fill_checker_rgbx;
+#define gst_video_mixer_fill_checker_bgrx gst_video_mixer_fill_checker_rgbx
+#define gst_video_mixer_fill_checker_xrgb gst_video_mixer_fill_checker_rgbx
+#define gst_video_mixer_fill_checker_xbgr gst_video_mixer_fill_checker_rgbx
+extern FillCheckerFunction gst_video_mixer_fill_checker_yuy2;
+#define gst_video_mixer_fill_checker_yvyu gst_video_mixer_fill_checker_yuy2;
+extern FillCheckerFunction gst_video_mixer_fill_checker_uyvy;
+
+extern FillColorFunction gst_video_mixer_fill_color_argb;
+extern FillColorFunction gst_video_mixer_fill_color_abgr;
+extern FillColorFunction gst_video_mixer_fill_color_bgra;
+extern FillColorFunction gst_video_mixer_fill_color_rgba;
+extern FillColorFunction gst_video_mixer_fill_color_ayuv;
+extern FillColorFunction gst_video_mixer_fill_color_i420;
+extern FillColorFunction gst_video_mixer_fill_color_yv12;
+extern FillColorFunction gst_video_mixer_fill_color_nv12;
+#define gst_video_mixer_fill_color_nv21 gst_video_mixer_fill_color_nv12;
+extern FillColorFunction gst_video_mixer_fill_color_y41b;
+extern FillColorFunction gst_video_mixer_fill_color_y42b;
+extern FillColorFunction gst_video_mixer_fill_color_y444;
+extern FillColorFunction gst_video_mixer_fill_color_rgb;
+extern FillColorFunction gst_video_mixer_fill_color_bgr;
+extern FillColorFunction gst_video_mixer_fill_color_xrgb;
+extern FillColorFunction gst_video_mixer_fill_color_xbgr;
+extern FillColorFunction gst_video_mixer_fill_color_rgbx;
+extern FillColorFunction gst_video_mixer_fill_color_bgrx;
+extern FillColorFunction gst_video_mixer_fill_color_yuy2;
+extern FillColorFunction gst_video_mixer_fill_color_yvyu;
+extern FillColorFunction gst_video_mixer_fill_color_uyvy;
+
+void gst_video_mixer_init_blend (void);
+
+#endif /* __BLEND_H__ */
diff --git a/gst/videomixer/meson.build b/gst/videomixer/meson.build
new file mode 100644
index 0000000000..2e1cb77743
--- /dev/null
+++ b/gst/videomixer/meson.build
@@ -0,0 +1,35 @@
+vmixer_sources = [
+ 'blend.c',
+ 'videomixer2.c',
+]
+
+orcsrc = 'videomixerorc'
+if have_orcc
+ orc_h = custom_target(orcsrc + '.h',
+ input : orcsrc + '.orc',
+ output : orcsrc + '.h',
+ command : orcc_args + ['--header', '-o', '@OUTPUT@', '@INPUT@'])
+ orc_c = custom_target(orcsrc + '.c',
+ input : orcsrc + '.orc',
+ output : orcsrc + '.c',
+ command : orcc_args + ['--implementation', '-o', '@OUTPUT@', '@INPUT@'])
+ orc_targets += {'name': orcsrc, 'orc-source': files(orcsrc + '.orc'), 'header': orc_h, 'source': orc_c}
+else
+ orc_h = configure_file(input : orcsrc + '-dist.h',
+ output : orcsrc + '.h',
+ copy : true)
+ orc_c = configure_file(input : orcsrc + '-dist.c',
+ output : orcsrc + '.c',
+ copy : true)
+endif
+
+gstvideomixer = library('gstvideomixer',
+ vmixer_sources, orc_c, orc_h,
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [orc_dep, gstvideo_dep, gstbase_dep, libm],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstvideomixer, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstvideomixer]
diff --git a/gst/videomixer/videomixer2.c b/gst/videomixer/videomixer2.c
new file mode 100644
index 0000000000..0ecc0c9224
--- /dev/null
+++ b/gst/videomixer/videomixer2.c
@@ -0,0 +1,2300 @@
+/* Generic video mixer plugin
+ * Copyright (C) 2004, 2008 Wim Taymans <wim@fluendo.com>
+ * Copyright (C) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-videomixer
+ * @title: videomixer
+ *
+ * IMPORTANT: #videomixer is deprecated in favor of #compositor, please do not
+ * use this element in newly-written code!
+ *
+ * Videomixer can accept AYUV, ARGB and BGRA video streams. For each of the requested
+ * sink pads it will compare the incoming geometry and framerate to define the
+ * output parameters. Indeed output video frames will have the geometry of the
+ * biggest incoming video stream and the framerate of the fastest incoming one.
+ *
+ * Videomixer will do colorspace conversion.
+ *
+ * Individual parameters for each input stream can be configured on the
+ * videomixer pads.
+ *
+ * ## Sample pipelines
+ * |[
+ * gst-launch-1.0 \
+ * videotestsrc pattern=1 ! \
+ * video/x-raw,format=AYUV,framerate=\(fraction\)10/1,width=100,height=100 ! \
+ * videobox border-alpha=0 top=-70 bottom=-70 right=-220 ! \
+ * videomixer name=mix sink_0::alpha=0.7 sink_1::alpha=0.5 ! \
+ * videoconvert ! xvimagesink \
+ * videotestsrc ! \
+ * video/x-raw,format=AYUV,framerate=\(fraction\)5/1,width=320,height=240 ! mix.
+ * ]| A pipeline to demonstrate videomixer used together with videobox.
+ * This should show a 320x240 pixels video test source with some transparency
+ * showing the background checker pattern. Another video test source with just
+ * the snow pattern of 100x100 pixels is overlaid on top of the first one on
+ * the left vertically centered with a small transparency showing the first
+ * video test source behind and the checker pattern under it. Note that the
+ * framerate of the output video is 10 frames per second.
+ * |[
+ * gst-launch-1.0 videotestsrc pattern=1 ! \
+ * video/x-raw, framerate=\(fraction\)10/1, width=100, height=100 ! \
+ * videomixer name=mix ! videoconvert ! ximagesink \
+ * videotestsrc ! \
+ * video/x-raw, framerate=\(fraction\)5/1, width=320, height=240 ! mix.
+ * ]| A pipeline to demonstrate bgra mixing. (This does not demonstrate alpha blending).
+ * |[
+ * gst-launch-1.0 videotestsrc pattern=1 ! \
+ * video/x-raw,format =I420, framerate=\(fraction\)10/1, width=100, height=100 ! \
+ * videomixer name=mix ! videoconvert ! ximagesink \
+ * videotestsrc ! \
+ * video/x-raw,format=I420, framerate=\(fraction\)5/1, width=320, height=240 ! mix.
+ * ]| A pipeline to test I420
+ * |[
+ * gst-launch-1.0 videomixer name=mixer sink_1::alpha=0.5 sink_1::xpos=50 sink_1::ypos=50 ! \
+ * videoconvert ! ximagesink \
+ * videotestsrc pattern=snow timestamp-offset=3000000000 ! \
+ * "video/x-raw,format=AYUV,width=640,height=480,framerate=(fraction)30/1" ! \
+ * timeoverlay ! queue2 ! mixer. \
+ * videotestsrc pattern=smpte ! \
+ * "video/x-raw,format=AYUV,width=800,height=600,framerate=(fraction)10/1" ! \
+ * timeoverlay ! queue2 ! mixer.
+ * ]| A pipeline to demonstrate synchronized mixing (the second stream starts after 3 seconds)
+ *
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+
+#include "videomixer2.h"
+#include "videomixer2pad.h"
+
+#ifdef DISABLE_ORC
+#define orc_memset memset
+#else
+#include <orc/orcfunctions.h>
+#endif
+
+GST_DEBUG_CATEGORY_STATIC (gst_videomixer2_debug);
+#define GST_CAT_DEFAULT gst_videomixer2_debug
+
+#define GST_VIDEO_MIXER2_GET_LOCK(mix) \
+ (&GST_VIDEO_MIXER2(mix)->lock)
+#define GST_VIDEO_MIXER2_LOCK(mix) \
+ (g_mutex_lock(GST_VIDEO_MIXER2_GET_LOCK (mix)))
+#define GST_VIDEO_MIXER2_UNLOCK(mix) \
+ (g_mutex_unlock(GST_VIDEO_MIXER2_GET_LOCK (mix)))
+#define GST_VIDEO_MIXER2_GET_SETCAPS_LOCK(mix) \
+ (&GST_VIDEO_MIXER2(mix)->setcaps_lock)
+#define GST_VIDEO_MIXER2_SETCAPS_LOCK(mix) \
+ (g_mutex_lock(GST_VIDEO_MIXER2_GET_SETCAPS_LOCK (mix)))
+#define GST_VIDEO_MIXER2_SETCAPS_UNLOCK(mix) \
+ (g_mutex_unlock(GST_VIDEO_MIXER2_GET_SETCAPS_LOCK (mix)))
+
+#define FORMATS " { AYUV, BGRA, ARGB, RGBA, ABGR, Y444, Y42B, YUY2, UYVY, "\
+ " YVYU, I420, YV12, NV12, NV21, Y41B, RGB, BGR, xRGB, xBGR, "\
+ " RGBx, BGRx } "
+
+static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (FORMATS))
+ );
+
+static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink_%u",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (FORMATS))
+ );
+
+static void gst_videomixer2_child_proxy_init (gpointer g_iface,
+ gpointer iface_data);
+static gboolean gst_videomixer2_push_sink_event (GstVideoMixer2 * mix,
+ GstEvent * event);
+static void gst_videomixer2_release_pad (GstElement * element, GstPad * pad);
+static void gst_videomixer2_reset_qos (GstVideoMixer2 * mix);
+
+struct _GstVideoMixer2Collect
+{
+ GstCollectData collect; /* we extend the CollectData */
+
+ GstVideoMixer2Pad *mixpad;
+
+ GstBuffer *queued; /* buffer for which we don't know the end time yet */
+ GstVideoInfo queued_vinfo;
+
+ GstBuffer *buffer; /* buffer that should be blended now */
+ GstVideoInfo buffer_vinfo;
+
+ GstClockTime start_time;
+ GstClockTime end_time;
+};
+
+#define DEFAULT_PAD_ZORDER 0
+#define DEFAULT_PAD_XPOS 0
+#define DEFAULT_PAD_YPOS 0
+#define DEFAULT_PAD_ALPHA 1.0
+enum
+{
+ PROP_PAD_0,
+ PROP_PAD_ZORDER,
+ PROP_PAD_XPOS,
+ PROP_PAD_YPOS,
+ PROP_PAD_ALPHA
+};
+
+G_DEFINE_TYPE (GstVideoMixer2Pad, gst_videomixer2_pad, GST_TYPE_PAD);
+
+static void
+gst_videomixer2_collect_free (GstCollectData * data)
+{
+ GstVideoMixer2Collect *cdata = (GstVideoMixer2Collect *) data;
+
+ gst_buffer_replace (&cdata->buffer, NULL);
+}
+
+static gboolean gst_videomixer2_src_setcaps (GstPad * pad, GstVideoMixer2 * mix,
+ GstCaps * caps);
+
+static gboolean
+gst_videomixer2_update_src_caps (GstVideoMixer2 * mix)
+{
+ GSList *l;
+ gint best_width = -1, best_height = -1;
+ gdouble best_fps = -1, cur_fps;
+ gint best_fps_n = -1, best_fps_d = -1;
+ gboolean ret = TRUE;
+
+ GST_VIDEO_MIXER2_SETCAPS_LOCK (mix);
+ GST_VIDEO_MIXER2_LOCK (mix);
+
+ for (l = mix->sinkpads; l; l = l->next) {
+ GstVideoMixer2Pad *mpad = l->data;
+ gint this_width, this_height;
+ gint fps_n, fps_d;
+ gint width, height;
+
+ fps_n = GST_VIDEO_INFO_FPS_N (&mpad->info);
+ fps_d = GST_VIDEO_INFO_FPS_D (&mpad->info);
+ width = GST_VIDEO_INFO_WIDTH (&mpad->info);
+ height = GST_VIDEO_INFO_HEIGHT (&mpad->info);
+
+ if (width == 0 || height == 0)
+ continue;
+
+ this_width = width + MAX (mpad->xpos, 0);
+ this_height = height + MAX (mpad->ypos, 0);
+
+ if (best_width < this_width)
+ best_width = this_width;
+ if (best_height < this_height)
+ best_height = this_height;
+
+ if (fps_d == 0)
+ cur_fps = 0.0;
+ else
+ gst_util_fraction_to_double (fps_n, fps_d, &cur_fps);
+
+ if (best_fps < cur_fps) {
+ best_fps = cur_fps;
+ best_fps_n = fps_n;
+ best_fps_d = fps_d;
+ }
+ }
+
+ if (best_fps_n <= 0 || best_fps_d <= 0 || best_fps == 0.0) {
+ best_fps_n = 25;
+ best_fps_d = 1;
+ best_fps = 25.0;
+ }
+
+ if (best_width > 0 && best_height > 0 && best_fps > 0) {
+ GstCaps *caps, *peercaps;
+ GstStructure *s;
+ GstVideoInfo info;
+
+ if (GST_VIDEO_INFO_FPS_N (&mix->info) != best_fps_n ||
+ GST_VIDEO_INFO_FPS_D (&mix->info) != best_fps_d) {
+ if (mix->segment.position != -1) {
+ mix->ts_offset = mix->segment.position - mix->segment.start;
+ mix->nframes = 0;
+ }
+ }
+ gst_video_info_init (&info);
+ gst_video_info_set_format (&info, GST_VIDEO_INFO_FORMAT (&mix->info),
+ best_width, best_height);
+ info.fps_n = best_fps_n;
+ info.fps_d = best_fps_d;
+ info.par_n = GST_VIDEO_INFO_PAR_N (&mix->info);
+ info.par_d = GST_VIDEO_INFO_PAR_D (&mix->info);
+
+ caps = gst_video_info_to_caps (&info);
+
+ peercaps = gst_pad_peer_query_caps (mix->srcpad, NULL);
+ if (peercaps && !gst_caps_can_intersect (peercaps, caps)) {
+ GstCaps *tmp;
+
+ s = gst_caps_get_structure (caps, 0);
+ gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT, "height",
+ GST_TYPE_INT_RANGE, 1, G_MAXINT, "framerate", GST_TYPE_FRACTION_RANGE,
+ 0, 1, G_MAXINT, 1, NULL);
+
+ tmp = gst_caps_intersect (caps, peercaps);
+ gst_caps_unref (caps);
+ gst_caps_unref (peercaps);
+ peercaps = NULL;
+ caps = tmp;
+ if (gst_caps_is_empty (caps)) {
+ GST_DEBUG_OBJECT (mix, "empty caps");
+ ret = FALSE;
+ GST_VIDEO_MIXER2_UNLOCK (mix);
+ goto done;
+ }
+
+ caps = gst_caps_truncate (caps);
+ s = gst_caps_get_structure (caps, 0);
+ gst_structure_fixate_field_nearest_int (s, "width", best_width);
+ gst_structure_fixate_field_nearest_int (s, "height", best_height);
+ gst_structure_fixate_field_nearest_fraction (s, "framerate", best_fps_n,
+ best_fps_d);
+
+ gst_structure_get_int (s, "width", &info.width);
+ gst_structure_get_int (s, "height", &info.height);
+ gst_structure_get_fraction (s, "fraction", &info.fps_n, &info.fps_d);
+ }
+ if (peercaps)
+ gst_caps_unref (peercaps);
+
+ gst_caps_unref (caps);
+ caps = gst_video_info_to_caps (&info);
+
+ GST_VIDEO_MIXER2_UNLOCK (mix);
+ ret = gst_videomixer2_src_setcaps (mix->srcpad, mix, caps);
+ gst_caps_unref (caps);
+ } else {
+ GST_VIDEO_MIXER2_UNLOCK (mix);
+ }
+
+done:
+ GST_VIDEO_MIXER2_SETCAPS_UNLOCK (mix);
+
+ return ret;
+}
+
+static gboolean
+gst_videomixer2_update_converters (GstVideoMixer2 * mix)
+{
+ GSList *tmp;
+ GstVideoFormat best_format;
+ GstVideoInfo best_info;
+ GstVideoMixer2Pad *pad;
+ gboolean need_alpha = FALSE;
+ gboolean at_least_one_alpha = FALSE;
+ GstCaps *downstream_caps;
+ GstCaps *possible_caps;
+ gchar *best_colorimetry;
+ const gchar *best_chroma;
+ GHashTable *formats_table;
+ gint best_format_number = 0;
+
+ best_format = GST_VIDEO_FORMAT_UNKNOWN;
+ gst_video_info_init (&best_info);
+
+ downstream_caps = gst_pad_get_allowed_caps (mix->srcpad);
+
+ if (!downstream_caps || gst_caps_is_empty (downstream_caps)) {
+ if (downstream_caps)
+ gst_caps_unref (downstream_caps);
+ return FALSE;
+ }
+
+ formats_table = g_hash_table_new (g_direct_hash, g_direct_equal);
+
+ /* first find new preferred format */
+ for (tmp = mix->sinkpads; tmp; tmp = tmp->next) {
+ GstStructure *s;
+ gint format_number;
+
+ pad = tmp->data;
+
+ if (!pad->info.finfo)
+ continue;
+
+ if (pad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)
+ at_least_one_alpha = TRUE;
+
+ /* If we want alpha, disregard all the other formats */
+ if (need_alpha && !(pad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA))
+ continue;
+
+ /* This can happen if we release a pad and another pad hasn't been negotiated yet */
+ if (GST_VIDEO_INFO_FORMAT (&pad->info) == GST_VIDEO_FORMAT_UNKNOWN)
+ continue;
+
+ possible_caps = gst_video_info_to_caps (&pad->info);
+
+ s = gst_caps_get_structure (possible_caps, 0);
+ gst_structure_remove_fields (s, "width", "height", "framerate",
+ "pixel-aspect-ratio", "interlace-mode", NULL);
+
+ /* Can downstream accept this format ? */
+ if (!gst_caps_can_intersect (downstream_caps, possible_caps)) {
+ gst_caps_unref (possible_caps);
+ continue;
+ }
+
+ gst_caps_unref (possible_caps);
+
+ format_number =
+ GPOINTER_TO_INT (g_hash_table_lookup (formats_table,
+ GINT_TO_POINTER (GST_VIDEO_INFO_FORMAT (&pad->info))));
+ format_number += 1;
+
+ g_hash_table_replace (formats_table,
+ GINT_TO_POINTER (GST_VIDEO_INFO_FORMAT (&pad->info)),
+ GINT_TO_POINTER (format_number));
+
+ /* If that pad is the first with alpha, set it as the new best format */
+ if (!need_alpha && (pad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)) {
+ need_alpha = TRUE;
+ best_format = GST_VIDEO_INFO_FORMAT (&pad->info);
+ best_info = pad->info;
+ best_format_number = format_number;
+ } else if (format_number > best_format_number) {
+ best_format = GST_VIDEO_INFO_FORMAT (&pad->info);
+ best_info = pad->info;
+ best_format_number = format_number;
+ }
+ }
+
+ g_hash_table_unref (formats_table);
+
+ if (best_format == GST_VIDEO_FORMAT_UNKNOWN) {
+ downstream_caps = gst_caps_fixate (downstream_caps);
+ gst_video_info_from_caps (&best_info, downstream_caps);
+ best_format = GST_VIDEO_INFO_FORMAT (&best_info);
+ }
+
+ gst_caps_unref (downstream_caps);
+
+ if (at_least_one_alpha
+ && !(best_info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)) {
+ GST_ELEMENT_ERROR (mix, CORE, NEGOTIATION,
+ ("At least one of the input pads contains alpha, but downstream can't support alpha."),
+ ("Either convert your inputs to not contain alpha or add a videoconvert after the mixer"));
+ return FALSE;
+ }
+
+ best_colorimetry = gst_video_colorimetry_to_string (&(best_info.colorimetry));
+ best_chroma = gst_video_chroma_to_string (best_info.chroma_site);
+
+ if (GST_VIDEO_INFO_FPS_N (&mix->info) != GST_VIDEO_INFO_FPS_N (&best_info) ||
+ GST_VIDEO_INFO_FPS_D (&mix->info) != GST_VIDEO_INFO_FPS_D (&best_info)) {
+ if (mix->segment.position != -1) {
+ mix->ts_offset = mix->segment.position - mix->segment.start;
+ mix->nframes = 0;
+ } else {
+ mix->ts_offset += gst_util_uint64_scale_round (mix->nframes,
+ GST_SECOND * GST_VIDEO_INFO_FPS_D (&mix->info),
+ GST_VIDEO_INFO_FPS_N (&mix->info));
+ mix->nframes = 0;
+ }
+ }
+
+ mix->info = best_info;
+
+ GST_DEBUG_OBJECT (mix,
+ "The output format will now be : %d with colorimetry : %s and chroma : %s",
+ best_format, best_colorimetry, best_chroma);
+
+ /* Then browse the sinks once more, setting or unsetting conversion if needed */
+ for (tmp = mix->sinkpads; tmp; tmp = tmp->next) {
+ gchar *colorimetry;
+ const gchar *chroma;
+
+ pad = tmp->data;
+
+ if (!pad->info.finfo)
+ continue;
+
+ if (GST_VIDEO_INFO_FORMAT (&pad->info) == GST_VIDEO_FORMAT_UNKNOWN)
+ continue;
+
+ if (pad->convert)
+ gst_video_converter_free (pad->convert);
+
+ pad->convert = NULL;
+
+ colorimetry = gst_video_colorimetry_to_string (&(pad->info.colorimetry));
+ chroma = gst_video_chroma_to_string (pad->info.chroma_site);
+
+ if (best_format != GST_VIDEO_INFO_FORMAT (&pad->info) ||
+ g_strcmp0 (colorimetry, best_colorimetry) ||
+ g_strcmp0 (chroma, best_chroma)) {
+ GstVideoInfo tmp_info = pad->info;
+ tmp_info.finfo = best_info.finfo;
+ tmp_info.chroma_site = best_info.chroma_site;
+ tmp_info.colorimetry = best_info.colorimetry;
+
+ GST_DEBUG_OBJECT (pad, "This pad will be converted from %d to %d",
+ GST_VIDEO_INFO_FORMAT (&pad->info),
+ GST_VIDEO_INFO_FORMAT (&best_info));
+ pad->convert = gst_video_converter_new (&pad->info, &tmp_info, NULL);
+ pad->need_conversion_update = TRUE;
+ if (!pad->convert) {
+ g_free (colorimetry);
+ g_free (best_colorimetry);
+ GST_WARNING ("No path found for conversion");
+ return FALSE;
+ }
+ } else {
+ GST_DEBUG_OBJECT (pad, "This pad will not need conversion");
+ }
+ g_free (colorimetry);
+ }
+
+ g_free (best_colorimetry);
+ return TRUE;
+}
+
+static gboolean
+gst_videomixer2_pad_sink_setcaps (GstPad * pad, GstObject * parent,
+ GstCaps * caps)
+{
+ GstVideoMixer2 *mix;
+ GstVideoMixer2Pad *mixpad;
+ GstVideoInfo info;
+ gboolean ret = FALSE;
+
+ GST_INFO_OBJECT (pad, "Setting caps %" GST_PTR_FORMAT, caps);
+
+ mix = GST_VIDEO_MIXER2 (parent);
+ mixpad = GST_VIDEO_MIXER2_PAD (pad);
+
+ if (!gst_video_info_from_caps (&info, caps)) {
+ GST_ERROR_OBJECT (pad, "Failed to parse caps");
+ goto beach;
+ }
+
+ GST_VIDEO_MIXER2_LOCK (mix);
+ if (GST_VIDEO_INFO_FORMAT (&mix->info) != GST_VIDEO_FORMAT_UNKNOWN) {
+ if (GST_VIDEO_INFO_PAR_N (&mix->info) != GST_VIDEO_INFO_PAR_N (&info)
+ || GST_VIDEO_INFO_PAR_D (&mix->info) != GST_VIDEO_INFO_PAR_D (&info) ||
+ GST_VIDEO_INFO_INTERLACE_MODE (&mix->info) !=
+ GST_VIDEO_INFO_INTERLACE_MODE (&info)) {
+ GST_DEBUG_OBJECT (pad,
+ "got input caps %" GST_PTR_FORMAT ", but " "current caps are %"
+ GST_PTR_FORMAT, caps, mix->current_caps);
+ GST_VIDEO_MIXER2_UNLOCK (mix);
+ return FALSE;
+ }
+ }
+
+ mixpad->info = info;
+
+ GST_COLLECT_PADS_STREAM_LOCK (mix->collect);
+
+ ret = gst_videomixer2_update_converters (mix);
+
+ GST_VIDEO_MIXER2_UNLOCK (mix);
+ if (ret)
+ ret = gst_videomixer2_update_src_caps (mix);
+ GST_COLLECT_PADS_STREAM_UNLOCK (mix->collect);
+
+beach:
+ return ret;
+}
+
+static GstCaps *
+gst_videomixer2_pad_sink_getcaps (GstPad * pad, GstVideoMixer2 * mix,
+ GstCaps * filter)
+{
+ GstCaps *srccaps;
+ GstCaps *template_caps;
+ GstCaps *filtered_caps;
+ GstCaps *returned_caps;
+ GstStructure *s;
+ gboolean had_current_caps = TRUE;
+ gint i, n;
+
+ template_caps = gst_pad_get_pad_template_caps (GST_PAD (mix->srcpad));
+
+ srccaps = gst_pad_get_current_caps (GST_PAD (mix->srcpad));
+ if (srccaps == NULL) {
+ had_current_caps = FALSE;
+ srccaps = template_caps;
+ }
+
+ srccaps = gst_caps_make_writable (srccaps);
+
+ n = gst_caps_get_size (srccaps);
+ for (i = 0; i < n; i++) {
+ s = gst_caps_get_structure (srccaps, i);
+ gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
+ "height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
+ if (!gst_structure_has_field (s, "pixel-aspect-ratio"))
+ gst_structure_set (s, "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
+ NULL);
+
+ gst_structure_remove_fields (s, "colorimetry", "chroma-site", "format",
+ NULL);
+ }
+
+ filtered_caps = srccaps;
+ if (filter)
+ filtered_caps = gst_caps_intersect (srccaps, filter);
+ returned_caps = gst_caps_intersect (filtered_caps, template_caps);
+
+ gst_caps_unref (srccaps);
+ if (filter)
+ gst_caps_unref (filtered_caps);
+ if (had_current_caps)
+ gst_caps_unref (template_caps);
+
+ return returned_caps;
+}
+
+static gboolean
+gst_videomixer2_pad_sink_acceptcaps (GstPad * pad, GstVideoMixer2 * mix,
+ GstCaps * caps)
+{
+ gboolean ret;
+ GstCaps *modified_caps;
+ GstCaps *accepted_caps;
+ GstCaps *template_caps;
+ gboolean had_current_caps = TRUE;
+ gint i, n;
+ GstStructure *s;
+
+ GST_DEBUG_OBJECT (pad, "%" GST_PTR_FORMAT, caps);
+
+ accepted_caps = gst_pad_get_current_caps (GST_PAD (mix->srcpad));
+
+ template_caps = gst_pad_get_pad_template_caps (GST_PAD (mix->srcpad));
+
+ if (accepted_caps == NULL) {
+ accepted_caps = template_caps;
+ had_current_caps = FALSE;
+ }
+
+ accepted_caps = gst_caps_make_writable (accepted_caps);
+
+ GST_LOG_OBJECT (pad, "src caps %" GST_PTR_FORMAT, accepted_caps);
+
+ n = gst_caps_get_size (accepted_caps);
+ for (i = 0; i < n; i++) {
+ s = gst_caps_get_structure (accepted_caps, i);
+ gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
+ "height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
+ if (!gst_structure_has_field (s, "pixel-aspect-ratio"))
+ gst_structure_set (s, "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
+ NULL);
+
+ gst_structure_remove_fields (s, "colorimetry", "chroma-site", "format",
+ NULL);
+ }
+
+ modified_caps = gst_caps_intersect (accepted_caps, template_caps);
+
+ ret = gst_caps_can_intersect (caps, accepted_caps);
+ GST_DEBUG_OBJECT (pad, "%saccepted caps %" GST_PTR_FORMAT,
+ (ret ? "" : "not "), caps);
+ GST_DEBUG_OBJECT (pad, "acceptable caps are %" GST_PTR_FORMAT, accepted_caps);
+ gst_caps_unref (accepted_caps);
+ gst_caps_unref (modified_caps);
+ if (had_current_caps)
+ gst_caps_unref (template_caps);
+ return ret;
+}
+
+static gboolean
+gst_videomixer2_sink_query (GstCollectPads * pads, GstCollectData * cdata,
+ GstQuery * query, GstVideoMixer2 * mix)
+{
+ GstVideoMixer2Pad *pad = GST_VIDEO_MIXER2_PAD (cdata->pad);
+ gboolean ret = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CAPS:
+ {
+ GstCaps *filter, *caps;
+
+ gst_query_parse_caps (query, &filter);
+ caps = gst_videomixer2_pad_sink_getcaps (GST_PAD (pad), mix, filter);
+ gst_query_set_caps_result (query, caps);
+ gst_caps_unref (caps);
+ ret = TRUE;
+ break;
+ }
+ case GST_QUERY_ACCEPT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_query_parse_accept_caps (query, &caps);
+ ret = gst_videomixer2_pad_sink_acceptcaps (GST_PAD (pad), mix, caps);
+ gst_query_set_accept_caps_result (query, ret);
+ ret = TRUE;
+ break;
+ }
+ default:
+ ret = gst_collect_pads_query_default (pads, cdata, query, FALSE);
+ break;
+ }
+ return ret;
+}
+
+static void
+gst_videomixer2_pad_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstVideoMixer2Pad *pad = GST_VIDEO_MIXER2_PAD (object);
+
+ switch (prop_id) {
+ case PROP_PAD_ZORDER:
+ g_value_set_uint (value, pad->zorder);
+ break;
+ case PROP_PAD_XPOS:
+ g_value_set_int (value, pad->xpos);
+ break;
+ case PROP_PAD_YPOS:
+ g_value_set_int (value, pad->ypos);
+ break;
+ case PROP_PAD_ALPHA:
+ g_value_set_double (value, pad->alpha);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static int
+pad_zorder_compare (const GstVideoMixer2Pad * pad1,
+ const GstVideoMixer2Pad * pad2)
+{
+ return pad1->zorder - pad2->zorder;
+}
+
+static void
+gst_videomixer2_pad_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstVideoMixer2Pad *pad = GST_VIDEO_MIXER2_PAD (object);
+ GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (gst_pad_get_parent (GST_PAD (pad)));
+
+ switch (prop_id) {
+ case PROP_PAD_ZORDER:
+ GST_VIDEO_MIXER2_LOCK (mix);
+ pad->zorder = g_value_get_uint (value);
+
+ mix->sinkpads = g_slist_sort (mix->sinkpads,
+ (GCompareFunc) pad_zorder_compare);
+ GST_VIDEO_MIXER2_UNLOCK (mix);
+ break;
+ case PROP_PAD_XPOS:
+ pad->xpos = g_value_get_int (value);
+ break;
+ case PROP_PAD_YPOS:
+ pad->ypos = g_value_get_int (value);
+ break;
+ case PROP_PAD_ALPHA:
+ pad->alpha = g_value_get_double (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+
+ gst_object_unref (mix);
+}
+
+static void
+gst_videomixer2_pad_class_init (GstVideoMixer2PadClass * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+
+ gobject_class->set_property = gst_videomixer2_pad_set_property;
+ gobject_class->get_property = gst_videomixer2_pad_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_PAD_ZORDER,
+ g_param_spec_uint ("zorder", "Z-Order", "Z Order of the picture",
+ 0, 10000, DEFAULT_PAD_ZORDER,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_PAD_XPOS,
+ g_param_spec_int ("xpos", "X Position", "X Position of the picture",
+ G_MININT, G_MAXINT, DEFAULT_PAD_XPOS,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_PAD_YPOS,
+ g_param_spec_int ("ypos", "Y Position", "Y Position of the picture",
+ G_MININT, G_MAXINT, DEFAULT_PAD_YPOS,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_PAD_ALPHA,
+ g_param_spec_double ("alpha", "Alpha", "Alpha of the picture", 0.0, 1.0,
+ DEFAULT_PAD_ALPHA,
+ G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
+}
+
+static void
+gst_videomixer2_pad_init (GstVideoMixer2Pad * mixerpad)
+{
+ mixerpad->zorder = DEFAULT_PAD_ZORDER;
+ mixerpad->xpos = DEFAULT_PAD_XPOS;
+ mixerpad->ypos = DEFAULT_PAD_YPOS;
+ mixerpad->alpha = DEFAULT_PAD_ALPHA;
+ mixerpad->convert = NULL;
+ mixerpad->need_conversion_update = FALSE;
+}
+
+/* GstVideoMixer2 */
+#define DEFAULT_BACKGROUND VIDEO_MIXER2_BACKGROUND_CHECKER
+enum
+{
+ PROP_0,
+ PROP_BACKGROUND
+};
+
+#define GST_TYPE_VIDEO_MIXER2_BACKGROUND (gst_videomixer2_background_get_type())
+static GType
+gst_videomixer2_background_get_type (void)
+{
+ static GType video_mixer_background_type = 0;
+
+ static const GEnumValue video_mixer_background[] = {
+ {VIDEO_MIXER2_BACKGROUND_CHECKER, "Checker pattern", "checker"},
+ {VIDEO_MIXER2_BACKGROUND_BLACK, "Black", "black"},
+ {VIDEO_MIXER2_BACKGROUND_WHITE, "White", "white"},
+ {VIDEO_MIXER2_BACKGROUND_TRANSPARENT,
+ "Transparent Background to enable further mixing", "transparent"},
+ {0, NULL, NULL},
+ };
+
+ if (!video_mixer_background_type) {
+ video_mixer_background_type =
+ g_enum_register_static ("GstVideoMixer2Background",
+ video_mixer_background);
+ }
+ return video_mixer_background_type;
+}
+
+#define gst_videomixer2_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstVideoMixer2, gst_videomixer2, GST_TYPE_ELEMENT,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_CHILD_PROXY,
+ gst_videomixer2_child_proxy_init));
+
+static void
+gst_videomixer2_update_qos (GstVideoMixer2 * mix, gdouble proportion,
+ GstClockTimeDiff diff, GstClockTime timestamp)
+{
+ GST_DEBUG_OBJECT (mix,
+ "Updating QoS: proportion %lf, diff %" GST_STIME_FORMAT ", timestamp %"
+ GST_TIME_FORMAT, proportion, GST_STIME_ARGS (diff),
+ GST_TIME_ARGS (timestamp));
+
+ GST_OBJECT_LOCK (mix);
+ mix->proportion = proportion;
+ if (G_LIKELY (timestamp != GST_CLOCK_TIME_NONE)) {
+ if (!mix->live && G_UNLIKELY (diff > 0))
+ mix->earliest_time =
+ timestamp + 2 * diff + gst_util_uint64_scale_int_round (GST_SECOND,
+ GST_VIDEO_INFO_FPS_D (&mix->info), GST_VIDEO_INFO_FPS_N (&mix->info));
+ else
+ mix->earliest_time = timestamp + diff;
+ } else {
+ mix->earliest_time = GST_CLOCK_TIME_NONE;
+ }
+ GST_OBJECT_UNLOCK (mix);
+}
+
+static void
+gst_videomixer2_reset_qos (GstVideoMixer2 * mix)
+{
+ gst_videomixer2_update_qos (mix, 0.5, 0, GST_CLOCK_TIME_NONE);
+ mix->qos_processed = mix->qos_dropped = 0;
+}
+
+static void
+gst_videomixer2_read_qos (GstVideoMixer2 * mix, gdouble * proportion,
+ GstClockTime * time)
+{
+ GST_OBJECT_LOCK (mix);
+ *proportion = mix->proportion;
+ *time = mix->earliest_time;
+ GST_OBJECT_UNLOCK (mix);
+}
+
+static void
+gst_videomixer2_reset (GstVideoMixer2 * mix)
+{
+ GSList *l;
+
+ gst_video_info_init (&mix->info);
+ mix->ts_offset = 0;
+ mix->nframes = 0;
+
+ gst_segment_init (&mix->segment, GST_FORMAT_TIME);
+ mix->segment.position = -1;
+
+ gst_videomixer2_reset_qos (mix);
+
+ for (l = mix->sinkpads; l; l = l->next) {
+ GstVideoMixer2Pad *p = l->data;
+ GstVideoMixer2Collect *mixcol = p->mixcol;
+
+ gst_buffer_replace (&mixcol->buffer, NULL);
+ mixcol->start_time = -1;
+ mixcol->end_time = -1;
+
+ gst_video_info_init (&p->info);
+ }
+
+ mix->newseg_pending = TRUE;
+}
+
+/* 1 == OK
+ * 0 == need more data
+ * -1 == EOS
+ * -2 == error
+ */
+static gint
+gst_videomixer2_fill_queues (GstVideoMixer2 * mix,
+ GstClockTime output_start_time, GstClockTime output_end_time)
+{
+ GSList *l;
+ gboolean eos = TRUE;
+ gboolean need_more_data = FALSE;
+
+ for (l = mix->sinkpads; l; l = l->next) {
+ GstVideoMixer2Pad *pad = l->data;
+ GstVideoMixer2Collect *mixcol = pad->mixcol;
+ GstSegment *segment = &pad->mixcol->collect.segment;
+ GstBuffer *buf;
+ GstVideoInfo *vinfo;
+
+ buf = gst_collect_pads_peek (mix->collect, &mixcol->collect);
+ if (buf) {
+ GstClockTime start_time, end_time;
+
+ start_time = GST_BUFFER_TIMESTAMP (buf);
+ if (start_time == -1) {
+ gst_buffer_unref (buf);
+ GST_ERROR_OBJECT (pad, "Need timestamped buffers!");
+ return -2;
+ }
+
+ vinfo = &pad->info;
+
+ /* FIXME: Make all this work with negative rates */
+
+ if ((mixcol->buffer && start_time < GST_BUFFER_TIMESTAMP (mixcol->buffer))
+ || (mixcol->queued
+ && start_time < GST_BUFFER_TIMESTAMP (mixcol->queued))) {
+ GST_WARNING_OBJECT (pad, "Buffer from the past, dropping");
+ gst_buffer_unref (buf);
+ buf = gst_collect_pads_pop (mix->collect, &mixcol->collect);
+ gst_buffer_unref (buf);
+ need_more_data = TRUE;
+ continue;
+ }
+
+ if (mixcol->queued) {
+ end_time = start_time - GST_BUFFER_TIMESTAMP (mixcol->queued);
+ start_time = GST_BUFFER_TIMESTAMP (mixcol->queued);
+ gst_buffer_unref (buf);
+ buf = gst_buffer_ref (mixcol->queued);
+ vinfo = &mixcol->queued_vinfo;
+ } else {
+ end_time = GST_BUFFER_DURATION (buf);
+
+ if (end_time == -1) {
+ mixcol->queued = buf;
+ buf = gst_collect_pads_pop (mix->collect, &mixcol->collect);
+ gst_buffer_unref (buf);
+ mixcol->queued_vinfo = pad->info;
+ need_more_data = TRUE;
+ continue;
+ }
+ }
+
+ g_assert (start_time != -1 && end_time != -1);
+ end_time += start_time; /* convert from duration to position */
+
+ /* Check if it's inside the segment */
+ if (start_time >= segment->stop || end_time < segment->start) {
+ GST_DEBUG_OBJECT (pad, "Buffer outside the segment");
+
+ if (buf == mixcol->queued) {
+ gst_buffer_unref (buf);
+ gst_buffer_replace (&mixcol->queued, NULL);
+ } else {
+ gst_buffer_unref (buf);
+ buf = gst_collect_pads_pop (mix->collect, &mixcol->collect);
+ gst_buffer_unref (buf);
+ }
+
+ need_more_data = TRUE;
+ continue;
+ }
+
+ /* Clip to segment and convert to running time */
+ start_time = MAX (start_time, segment->start);
+ if (segment->stop != -1)
+ end_time = MIN (end_time, segment->stop);
+ start_time =
+ gst_segment_to_running_time (segment, GST_FORMAT_TIME, start_time);
+ end_time =
+ gst_segment_to_running_time (segment, GST_FORMAT_TIME, end_time);
+ g_assert (start_time != -1 && end_time != -1);
+
+ /* Convert to the output segment rate */
+ if (ABS (mix->segment.rate) != 1.0) {
+ start_time *= ABS (mix->segment.rate);
+ end_time *= ABS (mix->segment.rate);
+ }
+
+ if (mixcol->end_time != -1 && mixcol->end_time > end_time) {
+ GST_DEBUG_OBJECT (pad, "Buffer from the past, dropping");
+ if (buf == mixcol->queued) {
+ gst_buffer_unref (buf);
+ gst_buffer_replace (&mixcol->queued, NULL);
+ } else {
+ gst_buffer_unref (buf);
+ buf = gst_collect_pads_pop (mix->collect, &mixcol->collect);
+ gst_buffer_unref (buf);
+ }
+
+ need_more_data = TRUE;
+ continue;
+ }
+
+ if (end_time >= output_start_time && start_time < output_end_time) {
+ GST_DEBUG_OBJECT (pad,
+ "Taking new buffer with start time %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (start_time));
+ gst_buffer_replace (&mixcol->buffer, buf);
+ mixcol->buffer_vinfo = *vinfo;
+ mixcol->start_time = start_time;
+ mixcol->end_time = end_time;
+
+ if (buf == mixcol->queued) {
+ gst_buffer_unref (buf);
+ gst_buffer_replace (&mixcol->queued, NULL);
+ } else {
+ gst_buffer_unref (buf);
+ buf = gst_collect_pads_pop (mix->collect, &mixcol->collect);
+ gst_buffer_unref (buf);
+ }
+ eos = FALSE;
+ } else if (start_time >= output_end_time) {
+ GST_DEBUG_OBJECT (pad, "Keeping buffer until %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (start_time));
+ gst_buffer_unref (buf);
+ eos = FALSE;
+ } else {
+ GST_DEBUG_OBJECT (pad, "Too old buffer -- dropping");
+ if (buf == mixcol->queued) {
+ gst_buffer_unref (buf);
+ gst_buffer_replace (&mixcol->queued, NULL);
+ } else {
+ gst_buffer_unref (buf);
+ buf = gst_collect_pads_pop (mix->collect, &mixcol->collect);
+ gst_buffer_unref (buf);
+ }
+
+ need_more_data = TRUE;
+ continue;
+ }
+ } else {
+ if (mixcol->end_time != -1) {
+ if (mixcol->end_time <= output_start_time) {
+ gst_buffer_replace (&mixcol->buffer, NULL);
+ mixcol->start_time = mixcol->end_time = -1;
+ if (!GST_COLLECT_PADS_STATE_IS_SET (mixcol,
+ GST_COLLECT_PADS_STATE_EOS))
+ need_more_data = TRUE;
+ } else if (!GST_COLLECT_PADS_STATE_IS_SET (mixcol,
+ GST_COLLECT_PADS_STATE_EOS)) {
+ eos = FALSE;
+ }
+ }
+ }
+ }
+
+ if (need_more_data)
+ return 0;
+ if (eos)
+ return -1;
+
+ return 1;
+}
+
+static GstFlowReturn
+gst_videomixer2_blend_buffers (GstVideoMixer2 * mix,
+ GstClockTime output_start_time, GstClockTime output_end_time,
+ GstBuffer ** outbuf)
+{
+ GSList *l;
+ guint outsize;
+ BlendFunction composite;
+ GstVideoFrame outframe;
+ static GstAllocationParams params = { 0, 15, 0, 0, };
+
+ outsize = GST_VIDEO_INFO_SIZE (&mix->info);
+
+ *outbuf = gst_buffer_new_allocate (NULL, outsize, &params);
+ GST_BUFFER_TIMESTAMP (*outbuf) = output_start_time;
+ GST_BUFFER_DURATION (*outbuf) = output_end_time - output_start_time;
+
+ gst_video_frame_map (&outframe, &mix->info, *outbuf, GST_MAP_READWRITE);
+
+ /* default to blending */
+ composite = mix->blend;
+ switch (mix->background) {
+ case VIDEO_MIXER2_BACKGROUND_CHECKER:
+ mix->fill_checker (&outframe);
+ break;
+ case VIDEO_MIXER2_BACKGROUND_BLACK:
+ mix->fill_color (&outframe, 16, 128, 128);
+ break;
+ case VIDEO_MIXER2_BACKGROUND_WHITE:
+ mix->fill_color (&outframe, 240, 128, 128);
+ break;
+ case VIDEO_MIXER2_BACKGROUND_TRANSPARENT:
+ {
+ guint i, plane, num_planes, height;
+
+ num_planes = GST_VIDEO_FRAME_N_PLANES (&outframe);
+ for (plane = 0; plane < num_planes; ++plane) {
+ guint8 *pdata;
+ gsize rowsize, plane_stride;
+
+ pdata = GST_VIDEO_FRAME_PLANE_DATA (&outframe, plane);
+ plane_stride = GST_VIDEO_FRAME_PLANE_STRIDE (&outframe, plane);
+ rowsize = GST_VIDEO_FRAME_COMP_WIDTH (&outframe, plane)
+ * GST_VIDEO_FRAME_COMP_PSTRIDE (&outframe, plane);
+ height = GST_VIDEO_FRAME_COMP_HEIGHT (&outframe, plane);
+ for (i = 0; i < height; ++i) {
+ memset (pdata, 0, rowsize);
+ pdata += plane_stride;
+ }
+ }
+
+ /* use overlay to keep background transparent */
+ composite = mix->overlay;
+ break;
+ }
+ }
+
+ for (l = mix->sinkpads; l; l = l->next) {
+ GstVideoMixer2Pad *pad = l->data;
+ GstVideoMixer2Collect *mixcol = pad->mixcol;
+
+ if (mixcol->buffer != NULL) {
+ GstClockTime timestamp;
+ gint64 stream_time;
+ GstSegment *seg;
+ GstVideoFrame converted_frame;
+ GstBuffer *converted_buf = NULL;
+ GstVideoFrame frame;
+
+ seg = &mixcol->collect.segment;
+
+ timestamp = GST_BUFFER_TIMESTAMP (mixcol->buffer);
+
+ stream_time =
+ gst_segment_to_stream_time (seg, GST_FORMAT_TIME, timestamp);
+
+ /* sync object properties on stream time */
+ if (GST_CLOCK_TIME_IS_VALID (stream_time))
+ gst_object_sync_values (GST_OBJECT (pad), stream_time);
+
+ gst_video_frame_map (&frame, &mixcol->buffer_vinfo, mixcol->buffer,
+ GST_MAP_READ);
+
+ if (pad->convert) {
+ gint converted_size;
+
+ /* We wait until here to set the conversion infos, in case mix->info changed */
+ if (pad->need_conversion_update) {
+ pad->conversion_info = mix->info;
+ gst_video_info_set_format (&(pad->conversion_info),
+ GST_VIDEO_INFO_FORMAT (&mix->info), pad->info.width,
+ pad->info.height);
+ pad->need_conversion_update = FALSE;
+ }
+
+ converted_size = pad->conversion_info.size;
+ converted_size = converted_size > outsize ? converted_size : outsize;
+ converted_buf = gst_buffer_new_allocate (NULL, converted_size, &params);
+
+ gst_video_frame_map (&converted_frame, &(pad->conversion_info),
+ converted_buf, GST_MAP_READWRITE);
+ gst_video_converter_frame (pad->convert, &frame, &converted_frame);
+ gst_video_frame_unmap (&frame);
+ } else {
+ converted_frame = frame;
+ }
+
+ composite (&converted_frame, pad->xpos, pad->ypos, pad->alpha, &outframe);
+
+ if (pad->convert)
+ gst_buffer_unref (converted_buf);
+
+ gst_video_frame_unmap (&converted_frame);
+ }
+ }
+ gst_video_frame_unmap (&outframe);
+
+ return GST_FLOW_OK;
+}
+
+/* Perform qos calculations before processing the next frame. Returns TRUE if
+ * the frame should be processed, FALSE if the frame can be dropped entirely */
+static gint64
+gst_videomixer2_do_qos (GstVideoMixer2 * mix, GstClockTime timestamp)
+{
+ GstClockTime qostime, earliest_time;
+ gdouble proportion;
+ gint64 jitter;
+
+ /* no timestamp, can't do QoS => process frame */
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (timestamp))) {
+ GST_LOG_OBJECT (mix, "invalid timestamp, can't do QoS, process frame");
+ return -1;
+ }
+
+ /* get latest QoS observation values */
+ gst_videomixer2_read_qos (mix, &proportion, &earliest_time);
+
+ /* skip qos if we have no observation (yet) => process frame */
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (earliest_time))) {
+ GST_LOG_OBJECT (mix, "no observation yet, process frame");
+ return -1;
+ }
+
+ /* qos is done on running time */
+ qostime =
+ gst_segment_to_running_time (&mix->segment, GST_FORMAT_TIME, timestamp);
+
+ /* see how our next timestamp relates to the latest qos timestamp */
+ GST_LOG_OBJECT (mix, "qostime %" GST_TIME_FORMAT ", earliest %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (qostime), GST_TIME_ARGS (earliest_time));
+
+ jitter = GST_CLOCK_DIFF (qostime, earliest_time);
+ if (qostime != GST_CLOCK_TIME_NONE && jitter > 0) {
+ GST_DEBUG_OBJECT (mix, "we are late, drop frame");
+ return jitter;
+ }
+
+ GST_LOG_OBJECT (mix, "process frame");
+ return jitter;
+}
+
+static GstFlowReturn
+gst_videomixer2_collected (GstCollectPads * pads, GstVideoMixer2 * mix)
+{
+ GstFlowReturn ret;
+ GstClockTime output_start_time, output_end_time;
+ GstBuffer *outbuf = NULL;
+ gint res;
+ gint64 jitter;
+
+ /* If we're not negotiated yet... */
+ if (GST_VIDEO_INFO_FORMAT (&mix->info) == GST_VIDEO_FORMAT_UNKNOWN)
+ return GST_FLOW_NOT_NEGOTIATED;
+
+ if (mix->send_stream_start) {
+ gchar s_id[32];
+
+ /* stream-start (FIXME: create id based on input ids) */
+ g_snprintf (s_id, sizeof (s_id), "mix-%08x", g_random_int ());
+ if (!gst_pad_push_event (mix->srcpad, gst_event_new_stream_start (s_id))) {
+ GST_WARNING_OBJECT (mix->srcpad, "Sending stream start event failed");
+ }
+ mix->send_stream_start = FALSE;
+ }
+
+ if (gst_pad_check_reconfigure (mix->srcpad))
+ gst_videomixer2_update_src_caps (mix);
+
+ if (mix->send_caps) {
+ if (!gst_pad_push_event (mix->srcpad,
+ gst_event_new_caps (mix->current_caps))) {
+ GST_WARNING_OBJECT (mix->srcpad, "Sending caps event failed");
+ }
+ mix->send_caps = FALSE;
+ }
+
+ GST_VIDEO_MIXER2_LOCK (mix);
+
+ if (mix->newseg_pending) {
+ GST_DEBUG_OBJECT (mix, "Sending NEWSEGMENT event");
+ GST_VIDEO_MIXER2_UNLOCK (mix);
+ if (!gst_pad_push_event (mix->srcpad,
+ gst_event_new_segment (&mix->segment))) {
+ ret = GST_FLOW_ERROR;
+ goto done_unlocked;
+ }
+ GST_VIDEO_MIXER2_LOCK (mix);
+ mix->newseg_pending = FALSE;
+ }
+
+ if (mix->segment.position == -1)
+ output_start_time = mix->segment.start;
+ else
+ output_start_time = mix->segment.position;
+
+ output_end_time =
+ mix->ts_offset + gst_util_uint64_scale_round (mix->nframes + 1,
+ GST_SECOND * GST_VIDEO_INFO_FPS_D (&mix->info),
+ GST_VIDEO_INFO_FPS_N (&mix->info)) + mix->segment.start;
+
+ if (output_end_time >= mix->segment.stop) {
+ GST_DEBUG_OBJECT (mix, "Segment done");
+ if (!(mix->segment.flags & GST_SEGMENT_FLAG_SEGMENT)) {
+ GST_VIDEO_MIXER2_UNLOCK (mix);
+ gst_pad_push_event (mix->srcpad, gst_event_new_eos ());
+
+ ret = GST_FLOW_EOS;
+ goto done_unlocked;
+ }
+ }
+
+ if (G_UNLIKELY (mix->pending_tags)) {
+ gst_pad_push_event (mix->srcpad, gst_event_new_tag (mix->pending_tags));
+ mix->pending_tags = NULL;
+ }
+
+ if (mix->segment.stop != -1)
+ output_end_time = MIN (output_end_time, mix->segment.stop);
+
+ res = gst_videomixer2_fill_queues (mix, output_start_time, output_end_time);
+
+ if (res == 0) {
+ GST_DEBUG_OBJECT (mix, "Need more data for decisions");
+ ret = GST_FLOW_OK;
+ goto done;
+ } else if (res == -1) {
+ GST_VIDEO_MIXER2_UNLOCK (mix);
+ GST_DEBUG_OBJECT (mix, "All sinkpads are EOS -- forwarding");
+ gst_pad_push_event (mix->srcpad, gst_event_new_eos ());
+ ret = GST_FLOW_EOS;
+ goto done_unlocked;
+ } else if (res == -2) {
+ GST_ERROR_OBJECT (mix, "Error collecting buffers");
+ ret = GST_FLOW_ERROR;
+ goto done;
+ }
+
+ jitter = gst_videomixer2_do_qos (mix, output_start_time);
+ if (jitter <= 0) {
+ ret =
+ gst_videomixer2_blend_buffers (mix, output_start_time,
+ output_end_time, &outbuf);
+ mix->qos_processed++;
+ } else {
+ GstMessage *msg;
+
+ mix->qos_dropped++;
+
+ /* TODO: live */
+ msg =
+ gst_message_new_qos (GST_OBJECT_CAST (mix), FALSE,
+ gst_segment_to_running_time (&mix->segment, GST_FORMAT_TIME,
+ output_start_time), gst_segment_to_stream_time (&mix->segment,
+ GST_FORMAT_TIME, output_start_time), output_start_time,
+ output_end_time - output_start_time);
+ gst_message_set_qos_values (msg, jitter, mix->proportion, 1000000);
+ gst_message_set_qos_stats (msg, GST_FORMAT_BUFFERS, mix->qos_processed,
+ mix->qos_dropped);
+ gst_element_post_message (GST_ELEMENT_CAST (mix), msg);
+
+ ret = GST_FLOW_OK;
+ }
+
+ mix->segment.position = output_end_time;
+ mix->nframes++;
+
+ GST_VIDEO_MIXER2_UNLOCK (mix);
+ if (outbuf) {
+ GST_LOG_OBJECT (mix,
+ "Pushing buffer with ts %" GST_TIME_FORMAT " and duration %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)));
+ ret = gst_pad_push (mix->srcpad, outbuf);
+ }
+ goto done_unlocked;
+
+done:
+ GST_VIDEO_MIXER2_UNLOCK (mix);
+
+done_unlocked:
+ return ret;
+}
+
+/* FIXME, the duration query should reflect how long you will produce
+ * data, that is the amount of stream time until you will emit EOS.
+ *
+ * For synchronized mixing this is always the max of all the durations
+ * of upstream since we emit EOS when all of them finished.
+ *
+ * We don't do synchronized mixing so this really depends on where the
+ * streams where punched in and what their relative offsets are against
+ * each other which we can get from the first timestamps we see.
+ *
+ * When we add a new stream (or remove a stream) the duration might
+ * also become invalid again and we need to post a new DURATION
+ * message to notify this fact to the parent.
+ * For now we take the max of all the upstream elements so the simple
+ * cases work at least somewhat.
+ */
+static gboolean
+gst_videomixer2_query_duration (GstVideoMixer2 * mix, GstQuery * query)
+{
+ GValue item = { 0 };
+ gint64 max;
+ gboolean res;
+ GstFormat format;
+ GstIterator *it;
+ gboolean done;
+
+ /* parse format */
+ gst_query_parse_duration (query, &format, NULL);
+
+ max = -1;
+ res = TRUE;
+ done = FALSE;
+
+ /* Take maximum of all durations */
+ it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mix));
+ while (!done) {
+ switch (gst_iterator_next (it, &item)) {
+ case GST_ITERATOR_DONE:
+ done = TRUE;
+ break;
+ case GST_ITERATOR_OK:
+ {
+ GstPad *pad;
+ gint64 duration;
+
+ pad = g_value_get_object (&item);
+
+ /* ask sink peer for duration */
+ res &= gst_pad_peer_query_duration (pad, format, &duration);
+ /* take max from all valid return values */
+ if (res) {
+ /* valid unknown length, stop searching */
+ if (duration == -1) {
+ max = duration;
+ done = TRUE;
+ }
+ /* else see if bigger than current max */
+ else if (duration > max)
+ max = duration;
+ }
+ g_value_reset (&item);
+ break;
+ }
+ case GST_ITERATOR_RESYNC:
+ max = -1;
+ res = TRUE;
+ gst_iterator_resync (it);
+ break;
+ default:
+ res = FALSE;
+ done = TRUE;
+ break;
+ }
+ }
+ g_value_unset (&item);
+ gst_iterator_free (it);
+
+ if (res) {
+ /* and store the max */
+ GST_DEBUG_OBJECT (mix, "Total duration in format %s: %"
+ GST_TIME_FORMAT, gst_format_get_name (format), GST_TIME_ARGS (max));
+ gst_query_set_duration (query, format, max);
+ }
+
+ return res;
+}
+
+static gboolean
+gst_videomixer2_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (parent);
+ gboolean res = FALSE;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:
+ {
+ GstFormat format;
+
+ gst_query_parse_position (query, &format, NULL);
+
+ switch (format) {
+ case GST_FORMAT_TIME:
+ gst_query_set_position (query, format,
+ gst_segment_to_stream_time (&mix->segment, GST_FORMAT_TIME,
+ mix->segment.position));
+ res = TRUE;
+ break;
+ default:
+ break;
+ }
+ break;
+ }
+ case GST_QUERY_DURATION:
+ res = gst_videomixer2_query_duration (mix, query);
+ break;
+ case GST_QUERY_CAPS:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ default:
+ /* FIXME, needs a custom query handler because we have multiple
+ * sinkpads */
+ res = FALSE;
+ break;
+ }
+ return res;
+}
+
+static gboolean
+gst_videomixer2_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (parent);
+ gboolean result;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_QOS:
+ {
+ GstQOSType type;
+ GstClockTimeDiff diff;
+ GstClockTime timestamp;
+ gdouble proportion;
+
+ gst_event_parse_qos (event, &type, &proportion, &diff, &timestamp);
+
+ gst_videomixer2_update_qos (mix, proportion, diff, timestamp);
+
+ result = gst_videomixer2_push_sink_event (mix, event);
+ break;
+ }
+ case GST_EVENT_SEEK:
+ {
+ gdouble rate;
+ GstFormat fmt;
+ GstSeekFlags flags;
+ GstSeekType start_type, stop_type;
+ gint64 start, stop;
+ GSList *l;
+ gdouble abs_rate;
+
+ /* parse the seek parameters */
+ gst_event_parse_seek (event, &rate, &fmt, &flags, &start_type,
+ &start, &stop_type, &stop);
+
+ if (rate <= 0.0) {
+ GST_ERROR_OBJECT (mix, "Negative rates not supported yet");
+ result = FALSE;
+ gst_event_unref (event);
+ break;
+ }
+
+ GST_DEBUG_OBJECT (mix, "Handling SEEK event");
+
+ abs_rate = ABS (rate);
+
+ GST_VIDEO_MIXER2_LOCK (mix);
+ for (l = mix->sinkpads; l; l = l->next) {
+ GstVideoMixer2Pad *p = l->data;
+
+ if (flags & GST_SEEK_FLAG_FLUSH) {
+ gst_buffer_replace (&p->mixcol->buffer, NULL);
+ p->mixcol->start_time = p->mixcol->end_time = -1;
+ continue;
+ }
+
+ /* Convert to the output segment rate */
+ if (ABS (mix->segment.rate) != abs_rate) {
+ if (ABS (mix->segment.rate) != 1.0 && p->mixcol->buffer) {
+ p->mixcol->start_time /= ABS (mix->segment.rate);
+ p->mixcol->end_time /= ABS (mix->segment.rate);
+ }
+ if (abs_rate != 1.0 && p->mixcol->buffer) {
+ p->mixcol->start_time *= abs_rate;
+ p->mixcol->end_time *= abs_rate;
+ }
+ }
+ }
+ GST_VIDEO_MIXER2_UNLOCK (mix);
+
+ gst_segment_do_seek (&mix->segment, rate, fmt, flags, start_type, start,
+ stop_type, stop, NULL);
+ mix->segment.position = -1;
+ mix->ts_offset = 0;
+ mix->nframes = 0;
+ mix->newseg_pending = TRUE;
+
+ gst_videomixer2_reset_qos (mix);
+
+ result = gst_collect_pads_src_event_default (mix->collect, pad, event);
+ break;
+ }
+ case GST_EVENT_NAVIGATION:
+ /* navigation is rather pointless. */
+ result = FALSE;
+ gst_event_unref (event);
+ break;
+ default:
+ /* just forward the rest for now */
+ result = gst_videomixer2_push_sink_event (mix, event);
+ break;
+ }
+
+ return result;
+}
+
+static gboolean
+gst_videomixer2_src_setcaps (GstPad * pad, GstVideoMixer2 * mix, GstCaps * caps)
+{
+ gboolean ret = FALSE;
+ GstVideoInfo info;
+
+ GST_INFO_OBJECT (pad, "set src caps: %" GST_PTR_FORMAT, caps);
+
+ if (!gst_video_info_from_caps (&info, caps))
+ goto done;
+
+ GST_VIDEO_MIXER2_LOCK (mix);
+
+ mix->blend = NULL;
+ mix->overlay = NULL;
+ mix->fill_checker = NULL;
+ mix->fill_color = NULL;
+
+ if (GST_VIDEO_INFO_FPS_N (&mix->info) != GST_VIDEO_INFO_FPS_N (&info) ||
+ GST_VIDEO_INFO_FPS_D (&mix->info) != GST_VIDEO_INFO_FPS_D (&info)) {
+ if (mix->segment.position != -1) {
+ mix->ts_offset = mix->segment.position - mix->segment.start;
+ mix->nframes = 0;
+ }
+ gst_videomixer2_reset_qos (mix);
+ }
+
+ mix->info = info;
+
+ switch (GST_VIDEO_INFO_FORMAT (&mix->info)) {
+ case GST_VIDEO_FORMAT_AYUV:
+ mix->blend = gst_video_mixer_blend_ayuv;
+ mix->overlay = gst_video_mixer_overlay_ayuv;
+ mix->fill_checker = gst_video_mixer_fill_checker_ayuv;
+ mix->fill_color = gst_video_mixer_fill_color_ayuv;
+ ret = TRUE;
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ mix->blend = gst_video_mixer_blend_argb;
+ mix->overlay = gst_video_mixer_overlay_argb;
+ mix->fill_checker = gst_video_mixer_fill_checker_argb;
+ mix->fill_color = gst_video_mixer_fill_color_argb;
+ ret = TRUE;
+ break;
+ case GST_VIDEO_FORMAT_BGRA:
+ mix->blend = gst_video_mixer_blend_bgra;
+ mix->overlay = gst_video_mixer_overlay_bgra;
+ mix->fill_checker = gst_video_mixer_fill_checker_bgra;
+ mix->fill_color = gst_video_mixer_fill_color_bgra;
+ ret = TRUE;
+ break;
+ case GST_VIDEO_FORMAT_ABGR:
+ mix->blend = gst_video_mixer_blend_abgr;
+ mix->overlay = gst_video_mixer_overlay_abgr;
+ mix->fill_checker = gst_video_mixer_fill_checker_abgr;
+ mix->fill_color = gst_video_mixer_fill_color_abgr;
+ ret = TRUE;
+ break;
+ case GST_VIDEO_FORMAT_RGBA:
+ mix->blend = gst_video_mixer_blend_rgba;
+ mix->overlay = gst_video_mixer_overlay_rgba;
+ mix->fill_checker = gst_video_mixer_fill_checker_rgba;
+ mix->fill_color = gst_video_mixer_fill_color_rgba;
+ ret = TRUE;
+ break;
+ case GST_VIDEO_FORMAT_Y444:
+ mix->blend = gst_video_mixer_blend_y444;
+ mix->overlay = mix->blend;
+ mix->fill_checker = gst_video_mixer_fill_checker_y444;
+ mix->fill_color = gst_video_mixer_fill_color_y444;
+ ret = TRUE;
+ break;
+ case GST_VIDEO_FORMAT_Y42B:
+ mix->blend = gst_video_mixer_blend_y42b;
+ mix->overlay = mix->blend;
+ mix->fill_checker = gst_video_mixer_fill_checker_y42b;
+ mix->fill_color = gst_video_mixer_fill_color_y42b;
+ ret = TRUE;
+ break;
+ case GST_VIDEO_FORMAT_YUY2:
+ mix->blend = gst_video_mixer_blend_yuy2;
+ mix->overlay = mix->blend;
+ mix->fill_checker = gst_video_mixer_fill_checker_yuy2;
+ mix->fill_color = gst_video_mixer_fill_color_yuy2;
+ ret = TRUE;
+ break;
+ case GST_VIDEO_FORMAT_UYVY:
+ mix->blend = gst_video_mixer_blend_uyvy;
+ mix->overlay = mix->blend;
+ mix->fill_checker = gst_video_mixer_fill_checker_uyvy;
+ mix->fill_color = gst_video_mixer_fill_color_uyvy;
+ ret = TRUE;
+ break;
+ case GST_VIDEO_FORMAT_YVYU:
+ mix->blend = gst_video_mixer_blend_yvyu;
+ mix->overlay = mix->blend;
+ mix->fill_checker = gst_video_mixer_fill_checker_yvyu;
+ mix->fill_color = gst_video_mixer_fill_color_yvyu;
+ ret = TRUE;
+ break;
+ case GST_VIDEO_FORMAT_I420:
+ mix->blend = gst_video_mixer_blend_i420;
+ mix->overlay = mix->blend;
+ mix->fill_checker = gst_video_mixer_fill_checker_i420;
+ mix->fill_color = gst_video_mixer_fill_color_i420;
+ ret = TRUE;
+ break;
+ case GST_VIDEO_FORMAT_YV12:
+ mix->blend = gst_video_mixer_blend_yv12;
+ mix->overlay = mix->blend;
+ mix->fill_checker = gst_video_mixer_fill_checker_yv12;
+ mix->fill_color = gst_video_mixer_fill_color_yv12;
+ ret = TRUE;
+ break;
+ case GST_VIDEO_FORMAT_NV12:
+ mix->blend = gst_video_mixer_blend_nv12;
+ mix->overlay = mix->blend;
+ mix->fill_checker = gst_video_mixer_fill_checker_nv12;
+ mix->fill_color = gst_video_mixer_fill_color_nv12;
+ ret = TRUE;
+ break;
+ case GST_VIDEO_FORMAT_NV21:
+ mix->blend = gst_video_mixer_blend_nv21;
+ mix->overlay = mix->blend;
+ mix->fill_checker = gst_video_mixer_fill_checker_nv21;
+ mix->fill_color = gst_video_mixer_fill_color_nv21;
+ ret = TRUE;
+ break;
+ case GST_VIDEO_FORMAT_Y41B:
+ mix->blend = gst_video_mixer_blend_y41b;
+ mix->overlay = mix->blend;
+ mix->fill_checker = gst_video_mixer_fill_checker_y41b;
+ mix->fill_color = gst_video_mixer_fill_color_y41b;
+ ret = TRUE;
+ break;
+ case GST_VIDEO_FORMAT_RGB:
+ mix->blend = gst_video_mixer_blend_rgb;
+ mix->overlay = mix->blend;
+ mix->fill_checker = gst_video_mixer_fill_checker_rgb;
+ mix->fill_color = gst_video_mixer_fill_color_rgb;
+ ret = TRUE;
+ break;
+ case GST_VIDEO_FORMAT_BGR:
+ mix->blend = gst_video_mixer_blend_bgr;
+ mix->overlay = mix->blend;
+ mix->fill_checker = gst_video_mixer_fill_checker_bgr;
+ mix->fill_color = gst_video_mixer_fill_color_bgr;
+ ret = TRUE;
+ break;
+ case GST_VIDEO_FORMAT_xRGB:
+ mix->blend = gst_video_mixer_blend_xrgb;
+ mix->overlay = mix->blend;
+ mix->fill_checker = gst_video_mixer_fill_checker_xrgb;
+ mix->fill_color = gst_video_mixer_fill_color_xrgb;
+ ret = TRUE;
+ break;
+ case GST_VIDEO_FORMAT_xBGR:
+ mix->blend = gst_video_mixer_blend_xbgr;
+ mix->overlay = mix->blend;
+ mix->fill_checker = gst_video_mixer_fill_checker_xbgr;
+ mix->fill_color = gst_video_mixer_fill_color_xbgr;
+ ret = TRUE;
+ break;
+ case GST_VIDEO_FORMAT_RGBx:
+ mix->blend = gst_video_mixer_blend_rgbx;
+ mix->overlay = mix->blend;
+ mix->fill_checker = gst_video_mixer_fill_checker_rgbx;
+ mix->fill_color = gst_video_mixer_fill_color_rgbx;
+ ret = TRUE;
+ break;
+ case GST_VIDEO_FORMAT_BGRx:
+ mix->blend = gst_video_mixer_blend_bgrx;
+ mix->overlay = mix->blend;
+ mix->fill_checker = gst_video_mixer_fill_checker_bgrx;
+ mix->fill_color = gst_video_mixer_fill_color_bgrx;
+ ret = TRUE;
+ break;
+ default:
+ break;
+ }
+ GST_VIDEO_MIXER2_UNLOCK (mix);
+
+ if (mix->current_caps == NULL ||
+ gst_caps_is_equal (caps, mix->current_caps) == FALSE) {
+ gst_caps_replace (&mix->current_caps, caps);
+ mix->send_caps = TRUE;
+ }
+
+done:
+ return ret;
+}
+
+static GstFlowReturn
+gst_videomixer2_sink_clip (GstCollectPads * pads,
+ GstCollectData * data, GstBuffer * buf, GstBuffer ** outbuf,
+ GstVideoMixer2 * mix)
+{
+ GstVideoMixer2Pad *pad = GST_VIDEO_MIXER2_PAD (data->pad);
+ GstVideoMixer2Collect *mixcol = pad->mixcol;
+ GstClockTime start_time, end_time;
+
+ start_time = GST_BUFFER_TIMESTAMP (buf);
+ if (start_time == -1) {
+ GST_ERROR_OBJECT (pad, "Timestamped buffers required!");
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+
+ end_time = GST_BUFFER_DURATION (buf);
+ if (end_time == -1 && GST_VIDEO_INFO_FPS_N (&pad->info) != 0)
+ end_time =
+ gst_util_uint64_scale_int_round (GST_SECOND,
+ GST_VIDEO_INFO_FPS_D (&pad->info), GST_VIDEO_INFO_FPS_N (&pad->info));
+ if (end_time == -1) {
+ *outbuf = buf;
+ return GST_FLOW_OK;
+ }
+
+ start_time = MAX (start_time, mixcol->collect.segment.start);
+ start_time =
+ gst_segment_to_running_time (&mixcol->collect.segment,
+ GST_FORMAT_TIME, start_time);
+
+ end_time += GST_BUFFER_TIMESTAMP (buf);
+ if (mixcol->collect.segment.stop != -1)
+ end_time = MIN (end_time, mixcol->collect.segment.stop);
+ end_time =
+ gst_segment_to_running_time (&mixcol->collect.segment,
+ GST_FORMAT_TIME, end_time);
+
+ /* Convert to the output segment rate */
+ if (ABS (mix->segment.rate) != 1.0) {
+ start_time *= ABS (mix->segment.rate);
+ end_time *= ABS (mix->segment.rate);
+ }
+
+ if (mixcol->buffer != NULL && end_time < mixcol->end_time) {
+ gst_buffer_unref (buf);
+ *outbuf = NULL;
+ return GST_FLOW_OK;
+ }
+
+ *outbuf = buf;
+ return GST_FLOW_OK;
+}
+
+static void
+gst_videomixer2_flush (GstCollectPads * pads, GstVideoMixer2 * mix)
+{
+ if (mix->pending_tags) {
+ gst_tag_list_unref (mix->pending_tags);
+ mix->pending_tags = NULL;
+ }
+}
+
+static gboolean
+gst_videomixer2_sink_event (GstCollectPads * pads, GstCollectData * cdata,
+ GstEvent * event, GstVideoMixer2 * mix)
+{
+ GstVideoMixer2Pad *pad = GST_VIDEO_MIXER2_PAD (cdata->pad);
+ gboolean ret = TRUE, discard = FALSE;
+
+ GST_DEBUG_OBJECT (pad, "Got %s event: %" GST_PTR_FORMAT,
+ GST_EVENT_TYPE_NAME (event), event);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ ret =
+ gst_videomixer2_pad_sink_setcaps (GST_PAD (pad), GST_OBJECT (mix),
+ caps);
+ gst_event_unref (event);
+ event = NULL;
+ break;
+ }
+ case GST_EVENT_SEGMENT:{
+ GstSegment seg;
+ gst_event_copy_segment (event, &seg);
+
+ g_assert (seg.format == GST_FORMAT_TIME);
+ gst_videomixer2_reset_qos (mix);
+ break;
+ }
+ case GST_EVENT_FLUSH_STOP:
+ mix->newseg_pending = TRUE;
+
+ gst_videomixer2_reset_qos (mix);
+ gst_buffer_replace (&pad->mixcol->buffer, NULL);
+ pad->mixcol->start_time = -1;
+ pad->mixcol->end_time = -1;
+
+ mix->segment.position = -1;
+ mix->ts_offset = 0;
+ mix->nframes = 0;
+ break;
+ case GST_EVENT_TAG:
+ {
+ /* collect tags here so we can push them out when we collect data */
+ GstTagList *tags;
+
+ gst_event_parse_tag (event, &tags);
+ tags = gst_tag_list_merge (mix->pending_tags, tags, GST_TAG_MERGE_APPEND);
+ if (mix->pending_tags)
+ gst_tag_list_unref (mix->pending_tags);
+ mix->pending_tags = tags;
+ event = NULL;
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (event != NULL)
+ return gst_collect_pads_event_default (pads, cdata, event, discard);
+
+ return ret;
+}
+
+static gboolean
+forward_event_func (GValue * item, GValue * ret, GstEvent * event)
+{
+ GstPad *pad = g_value_get_object (item);
+ gst_event_ref (event);
+ GST_LOG_OBJECT (pad, "About to send event %s", GST_EVENT_TYPE_NAME (event));
+ if (!gst_pad_push_event (pad, event)) {
+ g_value_set_boolean (ret, FALSE);
+ GST_WARNING_OBJECT (pad, "Sending event %p (%s) failed.",
+ event, GST_EVENT_TYPE_NAME (event));
+ } else {
+ GST_LOG_OBJECT (pad, "Sent event %p (%s).",
+ event, GST_EVENT_TYPE_NAME (event));
+ }
+ return TRUE;
+}
+
+static gboolean
+gst_videomixer2_push_sink_event (GstVideoMixer2 * mix, GstEvent * event)
+{
+ GstIterator *it;
+ GValue vret = { 0 };
+
+ GST_LOG_OBJECT (mix, "Forwarding event %p (%s)", event,
+ GST_EVENT_TYPE_NAME (event));
+
+ g_value_init (&vret, G_TYPE_BOOLEAN);
+ g_value_set_boolean (&vret, TRUE);
+ it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mix));
+ gst_iterator_fold (it, (GstIteratorFoldFunction) forward_event_func, &vret,
+ event);
+ gst_iterator_free (it);
+ gst_event_unref (event);
+
+ return g_value_get_boolean (&vret);
+}
+
+/* GstElement vmethods */
+static GstStateChangeReturn
+gst_videomixer2_change_state (GstElement * element, GstStateChange transition)
+{
+ GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (element);
+ GstStateChangeReturn ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ mix->send_stream_start = TRUE;
+ mix->send_caps = TRUE;
+ gst_segment_init (&mix->segment, GST_FORMAT_TIME);
+ gst_caps_replace (&mix->current_caps, NULL);
+ GST_LOG_OBJECT (mix, "starting collectpads");
+ gst_collect_pads_start (mix->collect);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ GST_LOG_OBJECT (mix, "stopping collectpads");
+ gst_collect_pads_stop (mix->collect);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_videomixer2_reset (mix);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static GstPad *
+gst_videomixer2_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
+{
+ GstVideoMixer2 *mix;
+ GstVideoMixer2Pad *mixpad;
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (element);
+
+ mix = GST_VIDEO_MIXER2 (element);
+
+ if (templ == gst_element_class_get_pad_template (klass, "sink_%u")) {
+ guint serial = 0;
+ gchar *name = NULL;
+ GstVideoMixer2Collect *mixcol = NULL;
+
+ GST_VIDEO_MIXER2_LOCK (mix);
+ if (req_name == NULL || strlen (req_name) < 6
+ || !g_str_has_prefix (req_name, "sink_")) {
+ /* no name given when requesting the pad, use next available int */
+ serial = mix->next_sinkpad++;
+ } else {
+ /* parse serial number from requested padname */
+ serial = g_ascii_strtoull (&req_name[5], NULL, 10);
+ if (serial >= mix->next_sinkpad)
+ mix->next_sinkpad = serial + 1;
+ }
+ /* create new pad with the name */
+ name = g_strdup_printf ("sink_%u", serial);
+ mixpad = g_object_new (GST_TYPE_VIDEO_MIXER2_PAD, "name", name, "direction",
+ templ->direction, "template", templ, NULL);
+ g_free (name);
+
+ mixpad->zorder = mix->numpads;
+ mixpad->xpos = DEFAULT_PAD_XPOS;
+ mixpad->ypos = DEFAULT_PAD_YPOS;
+ mixpad->alpha = DEFAULT_PAD_ALPHA;
+
+ mixcol = (GstVideoMixer2Collect *)
+ gst_collect_pads_add_pad (mix->collect, GST_PAD (mixpad),
+ sizeof (GstVideoMixer2Collect),
+ (GstCollectDataDestroyNotify) gst_videomixer2_collect_free, TRUE);
+
+ /* Keep track of each other */
+ mixcol->mixpad = mixpad;
+ mixpad->mixcol = mixcol;
+
+ mixcol->start_time = -1;
+ mixcol->end_time = -1;
+
+ /* Keep an internal list of mixpads for zordering */
+ mix->sinkpads = g_slist_insert_sorted (mix->sinkpads, mixpad,
+ (GCompareFunc) pad_zorder_compare);
+ mix->numpads++;
+ GST_VIDEO_MIXER2_UNLOCK (mix);
+ } else {
+ return NULL;
+ }
+
+ GST_DEBUG_OBJECT (element, "Adding pad %s", GST_PAD_NAME (mixpad));
+
+ /* add the pad to the element */
+ gst_element_add_pad (element, GST_PAD (mixpad));
+ gst_child_proxy_child_added (GST_CHILD_PROXY (mix), G_OBJECT (mixpad),
+ GST_OBJECT_NAME (mixpad));
+
+ return GST_PAD (mixpad);
+}
+
+static void
+gst_videomixer2_release_pad (GstElement * element, GstPad * pad)
+{
+ GstVideoMixer2 *mix = NULL;
+ GstVideoMixer2Pad *mixpad;
+ gboolean update_caps;
+
+ mix = GST_VIDEO_MIXER2 (element);
+
+ GST_VIDEO_MIXER2_LOCK (mix);
+ if (G_UNLIKELY (g_slist_find (mix->sinkpads, pad) == NULL)) {
+ g_warning ("Unknown pad %s", GST_PAD_NAME (pad));
+ goto error;
+ }
+
+ mixpad = GST_VIDEO_MIXER2_PAD (pad);
+
+ if (mixpad->convert)
+ gst_video_converter_free (mixpad->convert);
+ mixpad->convert = NULL;
+
+ mix->sinkpads = g_slist_remove (mix->sinkpads, pad);
+ gst_child_proxy_child_removed (GST_CHILD_PROXY (mix), G_OBJECT (mixpad),
+ GST_OBJECT_NAME (mixpad));
+ mix->numpads--;
+
+ GST_COLLECT_PADS_STREAM_LOCK (mix->collect);
+ gst_videomixer2_update_converters (mix);
+ GST_COLLECT_PADS_STREAM_UNLOCK (mix->collect);
+
+ update_caps = GST_VIDEO_INFO_FORMAT (&mix->info) != GST_VIDEO_FORMAT_UNKNOWN;
+ GST_VIDEO_MIXER2_UNLOCK (mix);
+
+ gst_collect_pads_remove_pad (mix->collect, pad);
+
+ if (update_caps)
+ gst_videomixer2_update_src_caps (mix);
+
+ gst_element_remove_pad (element, pad);
+ return;
+error:
+ GST_VIDEO_MIXER2_UNLOCK (mix);
+}
+
+/* GObject vmethods */
+static void
+gst_videomixer2_finalize (GObject * o)
+{
+ GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (o);
+
+ gst_object_unref (mix->collect);
+ g_mutex_clear (&mix->lock);
+ g_mutex_clear (&mix->setcaps_lock);
+
+ G_OBJECT_CLASS (parent_class)->finalize (o);
+}
+
+static void
+gst_videomixer2_dispose (GObject * o)
+{
+ GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (o);
+ GSList *tmp;
+
+ for (tmp = mix->sinkpads; tmp; tmp = tmp->next) {
+ GstVideoMixer2Pad *mixpad = tmp->data;
+
+ if (mixpad->convert)
+ gst_video_converter_free (mixpad->convert);
+ mixpad->convert = NULL;
+ }
+
+ if (mix->pending_tags) {
+ gst_tag_list_unref (mix->pending_tags);
+ mix->pending_tags = NULL;
+ }
+
+ gst_caps_replace (&mix->current_caps, NULL);
+
+ G_OBJECT_CLASS (parent_class)->dispose (o);
+}
+
+static void
+gst_videomixer2_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (object);
+
+ switch (prop_id) {
+ case PROP_BACKGROUND:
+ g_value_set_enum (value, mix->background);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_videomixer2_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (object);
+
+ switch (prop_id) {
+ case PROP_BACKGROUND:
+ mix->background = g_value_get_enum (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+/* GstChildProxy implementation */
+static GObject *
+gst_videomixer2_child_proxy_get_child_by_index (GstChildProxy * child_proxy,
+ guint index)
+{
+ GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (child_proxy);
+ GObject *obj;
+
+ GST_VIDEO_MIXER2_LOCK (mix);
+ if ((obj = g_slist_nth_data (mix->sinkpads, index)))
+ g_object_ref (obj);
+ GST_VIDEO_MIXER2_UNLOCK (mix);
+ return obj;
+}
+
+static guint
+gst_videomixer2_child_proxy_get_children_count (GstChildProxy * child_proxy)
+{
+ guint count = 0;
+ GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (child_proxy);
+
+ GST_VIDEO_MIXER2_LOCK (mix);
+ count = mix->numpads;
+ GST_VIDEO_MIXER2_UNLOCK (mix);
+ GST_INFO_OBJECT (mix, "Children Count: %d", count);
+ return count;
+}
+
+static void
+gst_videomixer2_child_proxy_init (gpointer g_iface, gpointer iface_data)
+{
+ GstChildProxyInterface *iface = g_iface;
+
+ GST_INFO ("initializing child proxy interface");
+ iface->get_child_by_index = gst_videomixer2_child_proxy_get_child_by_index;
+ iface->get_children_count = gst_videomixer2_child_proxy_get_children_count;
+}
+
+static void
+gst_videomixer2_constructed (GObject * obj)
+{
+ GstVideoMixer2 *mix = GST_VIDEO_MIXER2 (obj);
+ gchar *cp_name;
+
+ cp_name = g_strconcat (GST_OBJECT_NAME (obj), "-collectpads", NULL);
+ gst_object_set_name (GST_OBJECT (mix->collect), cp_name);
+ g_free (cp_name);
+
+ G_OBJECT_CLASS (gst_videomixer2_parent_class)->constructed (obj);
+}
+
+/* GObject boilerplate */
+static void
+gst_videomixer2_class_init (GstVideoMixer2Class * klass)
+{
+ GObjectClass *gobject_class = (GObjectClass *) klass;
+ GstElementClass *gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->constructed = gst_videomixer2_constructed;
+ gobject_class->finalize = gst_videomixer2_finalize;
+ gobject_class->dispose = gst_videomixer2_dispose;
+
+ gobject_class->get_property = gst_videomixer2_get_property;
+ gobject_class->set_property = gst_videomixer2_set_property;
+
+ g_object_class_install_property (gobject_class, PROP_BACKGROUND,
+ g_param_spec_enum ("background", "Background", "Background type",
+ GST_TYPE_VIDEO_MIXER2_BACKGROUND,
+ DEFAULT_BACKGROUND, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+
+ gstelement_class->request_new_pad =
+ GST_DEBUG_FUNCPTR (gst_videomixer2_request_new_pad);
+ gstelement_class->release_pad =
+ GST_DEBUG_FUNCPTR (gst_videomixer2_release_pad);
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_videomixer2_change_state);
+
+ gst_element_class_add_static_pad_template (gstelement_class, &src_factory);
+ gst_element_class_add_static_pad_template (gstelement_class, &sink_factory);
+
+ gst_element_class_set_static_metadata (gstelement_class, "Video mixer 2",
+ "Filter/Editor/Video/Compositor",
+ "Deprecated by compositor. Mix multiple video streams",
+ "Wim Taymans <wim@fluendo.com>, "
+ "Sebastian Dröge <sebastian.droege@collabora.co.uk>");
+
+ /* Register the pad class */
+ g_type_class_ref (GST_TYPE_VIDEO_MIXER2_PAD);
+
+ gst_type_mark_as_plugin_api (GST_TYPE_VIDEO_MIXER2_BACKGROUND, 0);
+}
+
+static void
+gst_videomixer2_init (GstVideoMixer2 * mix)
+{
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (mix);
+
+ mix->srcpad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
+ "src"), "src");
+ gst_pad_set_query_function (GST_PAD (mix->srcpad),
+ GST_DEBUG_FUNCPTR (gst_videomixer2_src_query));
+ gst_pad_set_event_function (GST_PAD (mix->srcpad),
+ GST_DEBUG_FUNCPTR (gst_videomixer2_src_event));
+ gst_element_add_pad (GST_ELEMENT (mix), mix->srcpad);
+
+ mix->collect = gst_collect_pads_new ();
+ gst_collect_pads_set_flush_function (mix->collect,
+ (GstCollectPadsFlushFunction) gst_videomixer2_flush, mix);
+ mix->background = DEFAULT_BACKGROUND;
+ mix->current_caps = NULL;
+ mix->pending_tags = NULL;
+
+ gst_collect_pads_set_function (mix->collect,
+ (GstCollectPadsFunction) GST_DEBUG_FUNCPTR (gst_videomixer2_collected),
+ mix);
+ gst_collect_pads_set_event_function (mix->collect,
+ (GstCollectPadsEventFunction) gst_videomixer2_sink_event, mix);
+ gst_collect_pads_set_query_function (mix->collect,
+ (GstCollectPadsQueryFunction) gst_videomixer2_sink_query, mix);
+ gst_collect_pads_set_clip_function (mix->collect,
+ (GstCollectPadsClipFunction) gst_videomixer2_sink_clip, mix);
+
+ g_mutex_init (&mix->lock);
+ g_mutex_init (&mix->setcaps_lock);
+ /* initialize variables */
+ gst_videomixer2_reset (mix);
+}
+
+/* Element registration */
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ GST_DEBUG_CATEGORY_INIT (gst_videomixer2_debug, "videomixer", 0,
+ "video mixer");
+
+ gst_video_mixer_init_blend ();
+
+ return gst_element_register (plugin, "videomixer", GST_RANK_PRIMARY,
+ GST_TYPE_VIDEO_MIXER2);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ videomixer,
+ "Video mixer", plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME,
+ GST_PACKAGE_ORIGIN)
diff --git a/gst/videomixer/videomixer2.h b/gst/videomixer/videomixer2.h
new file mode 100644
index 0000000000..8c6a65f66c
--- /dev/null
+++ b/gst/videomixer/videomixer2.h
@@ -0,0 +1,133 @@
+/* Generic video mixer plugin
+ * Copyright (C) 2008 Wim Taymans <wim@fluendo.com>
+ * Copyright (C) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_MIXER2_H__
+#define __GST_VIDEO_MIXER2_H__
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+#include "blend.h"
+#include <gst/base/gstcollectpads.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_VIDEO_MIXER2 (gst_videomixer2_get_type())
+#define GST_VIDEO_MIXER2(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_MIXER2, GstVideoMixer2))
+#define GST_VIDEO_MIXER2_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_MIXER2, GstVideoMixer2Class))
+#define GST_IS_VIDEO_MIXER2(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_MIXER2))
+#define GST_IS_VIDEO_MIXER2_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_MIXER2))
+
+typedef struct _GstVideoMixer2 GstVideoMixer2;
+typedef struct _GstVideoMixer2Class GstVideoMixer2Class;
+
+/**
+ * GstVideoMixer2Background:
+ * @VIDEO_MIXER2_BACKGROUND_CHECKER: checker pattern background
+ * @VIDEO_MIXER2_BACKGROUND_BLACK: solid color black background
+ * @VIDEO_MIXER2_BACKGROUND_WHITE: solid color white background
+ * @VIDEO_MIXER2_BACKGROUND_TRANSPARENT: background is left transparent and layers are composited using "A OVER B" composition rules. This is only applicable to AYUV and ARGB (and variants) as it preserves the alpha channel and allows for further mixing.
+ *
+ * The different backgrounds videomixer can blend over.
+ */
+typedef enum
+{
+ VIDEO_MIXER2_BACKGROUND_CHECKER,
+ VIDEO_MIXER2_BACKGROUND_BLACK,
+ VIDEO_MIXER2_BACKGROUND_WHITE,
+ VIDEO_MIXER2_BACKGROUND_TRANSPARENT,
+}
+GstVideoMixer2Background;
+
+/**
+ * GstVideoMixer2:
+ *
+ * The opaque #GstVideoMixer2 structure.
+ */
+struct _GstVideoMixer2
+{
+ GstElement element;
+
+ /* < private > */
+
+ /* pad */
+ GstPad *srcpad;
+
+ /* Lock to prevent the state to change while blending */
+ GMutex lock;
+
+ /* Lock to prevent two src setcaps from happening at the same time */
+ GMutex setcaps_lock;
+
+ /* Sink pads using Collect Pads 2*/
+ GstCollectPads *collect;
+
+ /* sinkpads, a GSList of GstVideoMixer2Pads */
+ GSList *sinkpads;
+ gint numpads;
+ /* Next available sinkpad index */
+ guint next_sinkpad;
+
+ /* Output caps */
+ GstVideoInfo info;
+
+ /* current caps */
+ GstCaps *current_caps;
+ gboolean send_caps;
+
+ gboolean newseg_pending;
+
+ GstVideoMixer2Background background;
+
+ /* Current downstream segment */
+ GstSegment segment;
+ GstClockTime ts_offset;
+ guint64 nframes;
+
+ /* QoS stuff */
+ gdouble proportion;
+ GstClockTime earliest_time;
+ guint64 qos_processed, qos_dropped;
+
+ BlendFunction blend, overlay;
+ FillCheckerFunction fill_checker;
+ FillColorFunction fill_color;
+
+ gboolean send_stream_start;
+
+ /* latency */
+ gboolean live;
+
+ GstTagList *pending_tags;
+};
+
+struct _GstVideoMixer2Class
+{
+ GstElementClass parent_class;
+};
+
+GType gst_videomixer2_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_VIDEO_MIXER2_H__ */
diff --git a/gst/videomixer/videomixer2pad.h b/gst/videomixer/videomixer2pad.h
new file mode 100644
index 0000000000..37711e206f
--- /dev/null
+++ b/gst/videomixer/videomixer2pad.h
@@ -0,0 +1,83 @@
+/* Generic video mixer plugin
+ * Copyright (C) 2008 Wim Taymans <wim@fluendo.com>
+ * Copyright (C) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef __GST_VIDEO_MIXER2_PAD_H__
+#define __GST_VIDEO_MIXER2_PAD_H__
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+#include <gst/base/gstcollectpads.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_VIDEO_MIXER2_PAD (gst_videomixer2_pad_get_type())
+#define GST_VIDEO_MIXER2_PAD(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_MIXER2_PAD, GstVideoMixer2Pad))
+#define GST_VIDEO_MIXER2_PAD_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_MIXER_PAD, GstVideoMixer2PadClass))
+#define GST_IS_VIDEO_MIXER2_PAD(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_MIXER2_PAD))
+#define GST_IS_VIDEO_MIXER2_PAD_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_MIXER2_PAD))
+
+typedef struct _GstVideoMixer2Pad GstVideoMixer2Pad;
+typedef struct _GstVideoMixer2PadClass GstVideoMixer2PadClass;
+typedef struct _GstVideoMixer2Collect GstVideoMixer2Collect;
+
+/**
+ * GstVideoMixer2Pad:
+ *
+ * The opaque #GstVideoMixer2Pad structure.
+ */
+struct _GstVideoMixer2Pad
+{
+ GstPad parent;
+
+ /* < private > */
+
+ /* caps */
+ GstVideoInfo info;
+
+ /* properties */
+ gint xpos, ypos;
+ guint zorder;
+ gdouble alpha;
+
+ GstVideoMixer2Collect *mixcol;
+
+ /* caps used for conversion if needed */
+ GstVideoInfo conversion_info;
+
+ /* Converter, if NULL no conversion is done */
+ GstVideoConverter *convert;
+
+ gboolean need_conversion_update;
+};
+
+struct _GstVideoMixer2PadClass
+{
+ GstPadClass parent_class;
+};
+
+GType gst_videomixer2_pad_get_type (void);
+
+G_END_DECLS
+#endif /* __GST_VIDEO_MIXER2_PAD_H__ */
diff --git a/gst/videomixer/videomixerorc-dist.c b/gst/videomixer/videomixerorc-dist.c
new file mode 100644
index 0000000000..dcc282b515
--- /dev/null
+++ b/gst/videomixer/videomixerorc-dist.c
@@ -0,0 +1,2414 @@
+
+/* autogenerated from videomixerorc.orc */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include <glib.h>
+
+#ifndef _ORC_INTEGER_TYPEDEFS_
+#define _ORC_INTEGER_TYPEDEFS_
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#include <stdint.h>
+typedef int8_t orc_int8;
+typedef int16_t orc_int16;
+typedef int32_t orc_int32;
+typedef int64_t orc_int64;
+typedef uint8_t orc_uint8;
+typedef uint16_t orc_uint16;
+typedef uint32_t orc_uint32;
+typedef uint64_t orc_uint64;
+#define ORC_UINT64_C(x) UINT64_C(x)
+#elif defined(_MSC_VER)
+typedef signed __int8 orc_int8;
+typedef signed __int16 orc_int16;
+typedef signed __int32 orc_int32;
+typedef signed __int64 orc_int64;
+typedef unsigned __int8 orc_uint8;
+typedef unsigned __int16 orc_uint16;
+typedef unsigned __int32 orc_uint32;
+typedef unsigned __int64 orc_uint64;
+#define ORC_UINT64_C(x) (x##Ui64)
+#define inline __inline
+#else
+#include <limits.h>
+typedef signed char orc_int8;
+typedef short orc_int16;
+typedef int orc_int32;
+typedef unsigned char orc_uint8;
+typedef unsigned short orc_uint16;
+typedef unsigned int orc_uint32;
+#if INT_MAX == LONG_MAX
+typedef long long orc_int64;
+typedef unsigned long long orc_uint64;
+#define ORC_UINT64_C(x) (x##ULL)
+#else
+typedef long orc_int64;
+typedef unsigned long orc_uint64;
+#define ORC_UINT64_C(x) (x##UL)
+#endif
+#endif
+typedef union
+{
+ orc_int16 i;
+ orc_int8 x2[2];
+} orc_union16;
+typedef union
+{
+ orc_int32 i;
+ float f;
+ orc_int16 x2[2];
+ orc_int8 x4[4];
+} orc_union32;
+typedef union
+{
+ orc_int64 i;
+ double f;
+ orc_int32 x2[2];
+ float x2f[2];
+ orc_int16 x4[4];
+} orc_union64;
+#endif
+#ifndef ORC_RESTRICT
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#define ORC_RESTRICT restrict
+#elif defined(__GNUC__) && __GNUC__ >= 4
+#define ORC_RESTRICT __restrict__
+#else
+#define ORC_RESTRICT
+#endif
+#endif
+
+#ifndef ORC_INTERNAL
+#if defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
+#define ORC_INTERNAL __attribute__((visibility("hidden")))
+#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x550)
+#define ORC_INTERNAL __hidden
+#elif defined (__GNUC__)
+#define ORC_INTERNAL __attribute__((visibility("hidden")))
+#else
+#define ORC_INTERNAL
+#endif
+#endif
+
+
+#ifndef DISABLE_ORC
+#include <orc/orc.h>
+#endif
+void video_mixer_orc_splat_u32 (guint32 * ORC_RESTRICT d1, int p1, int n);
+void video_mixer_orc_memcpy_u32 (guint32 * ORC_RESTRICT d1,
+ const guint32 * ORC_RESTRICT s1, int n);
+void video_mixer_orc_blend_u8 (guint8 * ORC_RESTRICT d1, int d1_stride,
+ const guint8 * ORC_RESTRICT s1, int s1_stride, int p1, int n, int m);
+void video_mixer_orc_blend_argb (guint8 * ORC_RESTRICT d1, int d1_stride,
+ const guint8 * ORC_RESTRICT s1, int s1_stride, int p1, int n, int m);
+void video_mixer_orc_blend_bgra (guint8 * ORC_RESTRICT d1, int d1_stride,
+ const guint8 * ORC_RESTRICT s1, int s1_stride, int p1, int n, int m);
+void video_mixer_orc_overlay_argb (guint8 * ORC_RESTRICT d1, int d1_stride,
+ const guint8 * ORC_RESTRICT s1, int s1_stride, int p1, int n, int m);
+void video_mixer_orc_overlay_bgra (guint8 * ORC_RESTRICT d1, int d1_stride,
+ const guint8 * ORC_RESTRICT s1, int s1_stride, int p1, int n, int m);
+
+
+/* begin Orc C target preamble */
+#define ORC_CLAMP(x,a,b) ((x)<(a) ? (a) : ((x)>(b) ? (b) : (x)))
+#define ORC_ABS(a) ((a)<0 ? -(a) : (a))
+#define ORC_MIN(a,b) ((a)<(b) ? (a) : (b))
+#define ORC_MAX(a,b) ((a)>(b) ? (a) : (b))
+#define ORC_SB_MAX 127
+#define ORC_SB_MIN (-1-ORC_SB_MAX)
+#define ORC_UB_MAX (orc_uint8) 255
+#define ORC_UB_MIN 0
+#define ORC_SW_MAX 32767
+#define ORC_SW_MIN (-1-ORC_SW_MAX)
+#define ORC_UW_MAX (orc_uint16)65535
+#define ORC_UW_MIN 0
+#define ORC_SL_MAX 2147483647
+#define ORC_SL_MIN (-1-ORC_SL_MAX)
+#define ORC_UL_MAX 4294967295U
+#define ORC_UL_MIN 0
+#define ORC_CLAMP_SB(x) ORC_CLAMP(x,ORC_SB_MIN,ORC_SB_MAX)
+#define ORC_CLAMP_UB(x) ORC_CLAMP(x,ORC_UB_MIN,ORC_UB_MAX)
+#define ORC_CLAMP_SW(x) ORC_CLAMP(x,ORC_SW_MIN,ORC_SW_MAX)
+#define ORC_CLAMP_UW(x) ORC_CLAMP(x,ORC_UW_MIN,ORC_UW_MAX)
+#define ORC_CLAMP_SL(x) ORC_CLAMP(x,ORC_SL_MIN,ORC_SL_MAX)
+#define ORC_CLAMP_UL(x) ORC_CLAMP(x,ORC_UL_MIN,ORC_UL_MAX)
+#define ORC_SWAP_W(x) ((((x)&0xffU)<<8) | (((x)&0xff00U)>>8))
+#define ORC_SWAP_L(x) ((((x)&0xffU)<<24) | (((x)&0xff00U)<<8) | (((x)&0xff0000U)>>8) | (((x)&0xff000000U)>>24))
+#define ORC_SWAP_Q(x) ((((x)&ORC_UINT64_C(0xff))<<56) | (((x)&ORC_UINT64_C(0xff00))<<40) | (((x)&ORC_UINT64_C(0xff0000))<<24) | (((x)&ORC_UINT64_C(0xff000000))<<8) | (((x)&ORC_UINT64_C(0xff00000000))>>8) | (((x)&ORC_UINT64_C(0xff0000000000))>>24) | (((x)&ORC_UINT64_C(0xff000000000000))>>40) | (((x)&ORC_UINT64_C(0xff00000000000000))>>56))
+#define ORC_PTR_OFFSET(ptr,offset) ((void *)(((unsigned char *)(ptr)) + (offset)))
+#define ORC_DENORMAL(x) ((x) & ((((x)&0x7f800000) == 0) ? 0xff800000 : 0xffffffff))
+#define ORC_ISNAN(x) ((((x)&0x7f800000) == 0x7f800000) && (((x)&0x007fffff) != 0))
+#define ORC_DENORMAL_DOUBLE(x) ((x) & ((((x)&ORC_UINT64_C(0x7ff0000000000000)) == 0) ? ORC_UINT64_C(0xfff0000000000000) : ORC_UINT64_C(0xffffffffffffffff)))
+#define ORC_ISNAN_DOUBLE(x) ((((x)&ORC_UINT64_C(0x7ff0000000000000)) == ORC_UINT64_C(0x7ff0000000000000)) && (((x)&ORC_UINT64_C(0x000fffffffffffff)) != 0))
+#ifndef ORC_RESTRICT
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#define ORC_RESTRICT restrict
+#elif defined(__GNUC__) && __GNUC__ >= 4
+#define ORC_RESTRICT __restrict__
+#else
+#define ORC_RESTRICT
+#endif
+#endif
+/* end Orc C target preamble */
+
+
+
+/* video_mixer_orc_splat_u32 */
+#ifdef DISABLE_ORC
+void
+video_mixer_orc_splat_u32 (guint32 * ORC_RESTRICT d1, int p1, int n)
+{
+ int i;
+ orc_union32 *ORC_RESTRICT ptr0;
+ orc_union32 var32;
+ orc_union32 var33;
+
+ ptr0 = (orc_union32 *) d1;
+
+ /* 0: loadpl */
+ var32.i = p1;
+
+ for (i = 0; i < n; i++) {
+ /* 1: copyl */
+ var33.i = var32.i;
+ /* 2: storel */
+ ptr0[i] = var33;
+ }
+
+}
+
+#else
+static void
+_backup_video_mixer_orc_splat_u32 (OrcExecutor * ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union32 *ORC_RESTRICT ptr0;
+ orc_union32 var32;
+ orc_union32 var33;
+
+ ptr0 = (orc_union32 *) ex->arrays[0];
+
+ /* 0: loadpl */
+ var32.i = ex->params[24];
+
+ for (i = 0; i < n; i++) {
+ /* 1: copyl */
+ var33.i = var32.i;
+ /* 2: storel */
+ ptr0[i] = var33;
+ }
+
+}
+
+void
+video_mixer_orc_splat_u32 (guint32 * ORC_RESTRICT d1, int p1, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 25, 118, 105, 100, 101, 111, 95, 109, 105, 120, 101, 114, 95, 111,
+ 114, 99, 95, 115, 112, 108, 97, 116, 95, 117, 51, 50, 11, 4, 4, 16,
+ 4, 112, 0, 24, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p, _backup_video_mixer_orc_splat_u32);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "video_mixer_orc_splat_u32");
+ orc_program_set_backup_function (p, _backup_video_mixer_orc_splat_u32);
+ orc_program_add_destination (p, 4, "d1");
+ orc_program_add_parameter (p, 4, "p1");
+
+ orc_program_append_2 (p, "copyl", 0, ORC_VAR_D1, ORC_VAR_P1, ORC_VAR_D1,
+ ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->params[ORC_VAR_P1] = p1;
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* video_mixer_orc_memcpy_u32 */
+#ifdef DISABLE_ORC
+void
+video_mixer_orc_memcpy_u32 (guint32 * ORC_RESTRICT d1,
+ const guint32 * ORC_RESTRICT s1, int n)
+{
+ int i;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union32 var32;
+ orc_union32 var33;
+
+ ptr0 = (orc_union32 *) d1;
+ ptr4 = (orc_union32 *) s1;
+
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var32 = ptr4[i];
+ /* 1: copyl */
+ var33.i = var32.i;
+ /* 2: storel */
+ ptr0[i] = var33;
+ }
+
+}
+
+#else
+static void
+_backup_video_mixer_orc_memcpy_u32 (OrcExecutor * ORC_RESTRICT ex)
+{
+ int i;
+ int n = ex->n;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union32 var32;
+ orc_union32 var33;
+
+ ptr0 = (orc_union32 *) ex->arrays[0];
+ ptr4 = (orc_union32 *) ex->arrays[4];
+
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var32 = ptr4[i];
+ /* 1: copyl */
+ var33.i = var32.i;
+ /* 2: storel */
+ ptr0[i] = var33;
+ }
+
+}
+
+void
+video_mixer_orc_memcpy_u32 (guint32 * ORC_RESTRICT d1,
+ const guint32 * ORC_RESTRICT s1, int n)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 9, 26, 118, 105, 100, 101, 111, 95, 109, 105, 120, 101, 114, 95, 111,
+ 114, 99, 95, 109, 101, 109, 99, 112, 121, 95, 117, 51, 50, 11, 4, 4,
+ 12, 4, 4, 112, 0, 4, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p, _backup_video_mixer_orc_memcpy_u32);
+#else
+ p = orc_program_new ();
+ orc_program_set_name (p, "video_mixer_orc_memcpy_u32");
+ orc_program_set_backup_function (p, _backup_video_mixer_orc_memcpy_u32);
+ orc_program_add_destination (p, 4, "d1");
+ orc_program_add_source (p, 4, "s1");
+
+ orc_program_append_2 (p, "copyl", 0, ORC_VAR_D1, ORC_VAR_S1, ORC_VAR_D1,
+ ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* video_mixer_orc_blend_u8 */
+#ifdef DISABLE_ORC
+void
+video_mixer_orc_blend_u8 (guint8 * ORC_RESTRICT d1, int d1_stride,
+ const guint8 * ORC_RESTRICT s1, int s1_stride, int p1, int n, int m)
+{
+ int i;
+ int j;
+ orc_int8 *ORC_RESTRICT ptr0;
+ const orc_int8 *ORC_RESTRICT ptr4;
+ orc_int8 var34;
+ orc_int8 var35;
+ orc_union16 var36;
+ orc_int8 var37;
+ orc_union16 var38;
+ orc_union16 var39;
+ orc_union16 var40;
+ orc_union16 var41;
+ orc_union16 var42;
+ orc_union16 var43;
+ orc_union16 var44;
+
+ for (j = 0; j < m; j++) {
+ ptr0 = ORC_PTR_OFFSET (d1, d1_stride * j);
+ ptr4 = ORC_PTR_OFFSET (s1, s1_stride * j);
+
+ /* 5: loadpw */
+ var36.i = p1;
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadb */
+ var34 = ptr0[i];
+ /* 1: convubw */
+ var38.i = (orc_uint8) var34;
+ /* 2: loadb */
+ var35 = ptr4[i];
+ /* 3: convubw */
+ var39.i = (orc_uint8) var35;
+ /* 4: subw */
+ var40.i = var39.i - var38.i;
+ /* 6: mullw */
+ var41.i = (var40.i * var36.i) & 0xffff;
+ /* 7: shlw */
+ var42.i = ((orc_uint16) var38.i) << 8;
+ /* 8: addw */
+ var43.i = var42.i + var41.i;
+ /* 9: shruw */
+ var44.i = ((orc_uint16) var43.i) >> 8;
+ /* 10: convsuswb */
+ var37 = ORC_CLAMP_UB (var44.i);
+ /* 11: storeb */
+ ptr0[i] = var37;
+ }
+ }
+
+}
+
+#else
+static void
+_backup_video_mixer_orc_blend_u8 (OrcExecutor * ORC_RESTRICT ex)
+{
+ int i;
+ int j;
+ int n = ex->n;
+ int m = ex->params[ORC_VAR_A1];
+ orc_int8 *ORC_RESTRICT ptr0;
+ const orc_int8 *ORC_RESTRICT ptr4;
+ orc_int8 var34;
+ orc_int8 var35;
+ orc_union16 var36;
+ orc_int8 var37;
+ orc_union16 var38;
+ orc_union16 var39;
+ orc_union16 var40;
+ orc_union16 var41;
+ orc_union16 var42;
+ orc_union16 var43;
+ orc_union16 var44;
+
+ for (j = 0; j < m; j++) {
+ ptr0 = ORC_PTR_OFFSET (ex->arrays[0], ex->params[0] * j);
+ ptr4 = ORC_PTR_OFFSET (ex->arrays[4], ex->params[4] * j);
+
+ /* 5: loadpw */
+ var36.i = ex->params[24];
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadb */
+ var34 = ptr0[i];
+ /* 1: convubw */
+ var38.i = (orc_uint8) var34;
+ /* 2: loadb */
+ var35 = ptr4[i];
+ /* 3: convubw */
+ var39.i = (orc_uint8) var35;
+ /* 4: subw */
+ var40.i = var39.i - var38.i;
+ /* 6: mullw */
+ var41.i = (var40.i * var36.i) & 0xffff;
+ /* 7: shlw */
+ var42.i = ((orc_uint16) var38.i) << 8;
+ /* 8: addw */
+ var43.i = var42.i + var41.i;
+ /* 9: shruw */
+ var44.i = ((orc_uint16) var43.i) >> 8;
+ /* 10: convsuswb */
+ var37 = ORC_CLAMP_UB (var44.i);
+ /* 11: storeb */
+ ptr0[i] = var37;
+ }
+ }
+
+}
+
+void
+video_mixer_orc_blend_u8 (guint8 * ORC_RESTRICT d1, int d1_stride,
+ const guint8 * ORC_RESTRICT s1, int s1_stride, int p1, int n, int m)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 7, 9, 24, 118, 105, 100, 101, 111, 95, 109, 105, 120, 101, 114, 95,
+ 111, 114, 99, 95, 98, 108, 101, 110, 100, 95, 117, 56, 11, 1, 1, 12,
+ 1, 1, 14, 1, 8, 0, 0, 0, 16, 2, 20, 2, 20, 2, 150, 32,
+ 0, 150, 33, 4, 98, 33, 33, 32, 89, 33, 33, 24, 93, 32, 32, 16,
+ 70, 33, 32, 33, 95, 33, 33, 16, 160, 0, 33, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p, _backup_video_mixer_orc_blend_u8);
+#else
+ p = orc_program_new ();
+ orc_program_set_2d (p);
+ orc_program_set_name (p, "video_mixer_orc_blend_u8");
+ orc_program_set_backup_function (p, _backup_video_mixer_orc_blend_u8);
+ orc_program_add_destination (p, 1, "d1");
+ orc_program_add_source (p, 1, "s1");
+ orc_program_add_constant (p, 1, 0x00000008, "c1");
+ orc_program_add_parameter (p, 2, "p1");
+ orc_program_add_temporary (p, 2, "t1");
+ orc_program_add_temporary (p, 2, "t2");
+
+ orc_program_append_2 (p, "convubw", 0, ORC_VAR_T1, ORC_VAR_D1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 0, ORC_VAR_T2, ORC_VAR_S1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "subw", 0, ORC_VAR_T2, ORC_VAR_T2, ORC_VAR_T1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mullw", 0, ORC_VAR_T2, ORC_VAR_T2, ORC_VAR_P1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "shlw", 0, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_C1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "addw", 0, ORC_VAR_T2, ORC_VAR_T1, ORC_VAR_T2,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "shruw", 0, ORC_VAR_T2, ORC_VAR_T2, ORC_VAR_C1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convsuswb", 0, ORC_VAR_D1, ORC_VAR_T2,
+ ORC_VAR_D1, ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ORC_EXECUTOR_M (ex) = m;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->params[ORC_VAR_D1] = d1_stride;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ ex->params[ORC_VAR_S1] = s1_stride;
+ ex->params[ORC_VAR_P1] = p1;
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* video_mixer_orc_blend_argb */
+#ifdef DISABLE_ORC
+void
+video_mixer_orc_blend_argb (guint8 * ORC_RESTRICT d1, int d1_stride,
+ const guint8 * ORC_RESTRICT s1, int s1_stride, int p1, int n, int m)
+{
+ int i;
+ int j;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union64 var39;
+#if defined(__APPLE__) && __GNUC__ == 4 && __GNUC_MINOR__ == 2 && defined (__i386__)
+ volatile orc_union32 var40;
+#else
+ orc_union32 var40;
+#endif
+ orc_union32 var41;
+ orc_union16 var42;
+ orc_int8 var43;
+ orc_union32 var44;
+ orc_union64 var45;
+ orc_union64 var46;
+ orc_union64 var47;
+ orc_union64 var48;
+ orc_union32 var49;
+ orc_union64 var50;
+ orc_union64 var51;
+ orc_union64 var52;
+ orc_union64 var53;
+ orc_union64 var54;
+ orc_union32 var55;
+ orc_union32 var56;
+
+ for (j = 0; j < m; j++) {
+ ptr0 = ORC_PTR_OFFSET (d1, d1_stride * j);
+ ptr4 = ORC_PTR_OFFSET (s1, s1_stride * j);
+
+ /* 5: loadpw */
+ var39.x4[0] = p1;
+ var39.x4[1] = p1;
+ var39.x4[2] = p1;
+ var39.x4[3] = p1;
+ /* 16: loadpl */
+ var40.i = 0x000000ff; /* 255 or 1.25987e-321f */
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var41 = ptr4[i];
+ /* 1: convlw */
+ var42.i = var41.i;
+ /* 2: convwb */
+ var43 = var42.i;
+ /* 3: splatbl */
+ var44.i =
+ ((((orc_uint32) var43) & 0xff) << 24) | ((((orc_uint32) var43) & 0xff)
+ << 16) | ((((orc_uint32) var43) & 0xff) << 8) | (((orc_uint32) var43)
+ & 0xff);
+ /* 4: convubw */
+ var45.x4[0] = (orc_uint8) var44.x4[0];
+ var45.x4[1] = (orc_uint8) var44.x4[1];
+ var45.x4[2] = (orc_uint8) var44.x4[2];
+ var45.x4[3] = (orc_uint8) var44.x4[3];
+ /* 6: mullw */
+ var46.x4[0] = (var45.x4[0] * var39.x4[0]) & 0xffff;
+ var46.x4[1] = (var45.x4[1] * var39.x4[1]) & 0xffff;
+ var46.x4[2] = (var45.x4[2] * var39.x4[2]) & 0xffff;
+ var46.x4[3] = (var45.x4[3] * var39.x4[3]) & 0xffff;
+ /* 7: shruw */
+ var47.x4[0] = ((orc_uint16) var46.x4[0]) >> 8;
+ var47.x4[1] = ((orc_uint16) var46.x4[1]) >> 8;
+ var47.x4[2] = ((orc_uint16) var46.x4[2]) >> 8;
+ var47.x4[3] = ((orc_uint16) var46.x4[3]) >> 8;
+ /* 8: convubw */
+ var48.x4[0] = (orc_uint8) var41.x4[0];
+ var48.x4[1] = (orc_uint8) var41.x4[1];
+ var48.x4[2] = (orc_uint8) var41.x4[2];
+ var48.x4[3] = (orc_uint8) var41.x4[3];
+ /* 9: loadl */
+ var49 = ptr0[i];
+ /* 10: convubw */
+ var50.x4[0] = (orc_uint8) var49.x4[0];
+ var50.x4[1] = (orc_uint8) var49.x4[1];
+ var50.x4[2] = (orc_uint8) var49.x4[2];
+ var50.x4[3] = (orc_uint8) var49.x4[3];
+ /* 11: subw */
+ var51.x4[0] = var48.x4[0] - var50.x4[0];
+ var51.x4[1] = var48.x4[1] - var50.x4[1];
+ var51.x4[2] = var48.x4[2] - var50.x4[2];
+ var51.x4[3] = var48.x4[3] - var50.x4[3];
+ /* 12: mullw */
+ var52.x4[0] = (var51.x4[0] * var47.x4[0]) & 0xffff;
+ var52.x4[1] = (var51.x4[1] * var47.x4[1]) & 0xffff;
+ var52.x4[2] = (var51.x4[2] * var47.x4[2]) & 0xffff;
+ var52.x4[3] = (var51.x4[3] * var47.x4[3]) & 0xffff;
+ /* 13: div255w */
+ var53.x4[0] =
+ ((orc_uint16) (((orc_uint16) (var52.x4[0] + 128)) +
+ (((orc_uint16) (var52.x4[0] + 128)) >> 8))) >> 8;
+ var53.x4[1] =
+ ((orc_uint16) (((orc_uint16) (var52.x4[1] + 128)) +
+ (((orc_uint16) (var52.x4[1] + 128)) >> 8))) >> 8;
+ var53.x4[2] =
+ ((orc_uint16) (((orc_uint16) (var52.x4[2] + 128)) +
+ (((orc_uint16) (var52.x4[2] + 128)) >> 8))) >> 8;
+ var53.x4[3] =
+ ((orc_uint16) (((orc_uint16) (var52.x4[3] + 128)) +
+ (((orc_uint16) (var52.x4[3] + 128)) >> 8))) >> 8;
+ /* 14: addw */
+ var54.x4[0] = var50.x4[0] + var53.x4[0];
+ var54.x4[1] = var50.x4[1] + var53.x4[1];
+ var54.x4[2] = var50.x4[2] + var53.x4[2];
+ var54.x4[3] = var50.x4[3] + var53.x4[3];
+ /* 15: convwb */
+ var55.x4[0] = var54.x4[0];
+ var55.x4[1] = var54.x4[1];
+ var55.x4[2] = var54.x4[2];
+ var55.x4[3] = var54.x4[3];
+ /* 17: orl */
+ var56.i = var55.i | var40.i;
+ /* 18: storel */
+ ptr0[i] = var56;
+ }
+ }
+
+}
+
+#else
+static void
+_backup_video_mixer_orc_blend_argb (OrcExecutor * ORC_RESTRICT ex)
+{
+ int i;
+ int j;
+ int n = ex->n;
+ int m = ex->params[ORC_VAR_A1];
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union64 var39;
+#if defined(__APPLE__) && __GNUC__ == 4 && __GNUC_MINOR__ == 2 && defined (__i386__)
+ volatile orc_union32 var40;
+#else
+ orc_union32 var40;
+#endif
+ orc_union32 var41;
+ orc_union16 var42;
+ orc_int8 var43;
+ orc_union32 var44;
+ orc_union64 var45;
+ orc_union64 var46;
+ orc_union64 var47;
+ orc_union64 var48;
+ orc_union32 var49;
+ orc_union64 var50;
+ orc_union64 var51;
+ orc_union64 var52;
+ orc_union64 var53;
+ orc_union64 var54;
+ orc_union32 var55;
+ orc_union32 var56;
+
+ for (j = 0; j < m; j++) {
+ ptr0 = ORC_PTR_OFFSET (ex->arrays[0], ex->params[0] * j);
+ ptr4 = ORC_PTR_OFFSET (ex->arrays[4], ex->params[4] * j);
+
+ /* 5: loadpw */
+ var39.x4[0] = ex->params[24];
+ var39.x4[1] = ex->params[24];
+ var39.x4[2] = ex->params[24];
+ var39.x4[3] = ex->params[24];
+ /* 16: loadpl */
+ var40.i = 0x000000ff; /* 255 or 1.25987e-321f */
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var41 = ptr4[i];
+ /* 1: convlw */
+ var42.i = var41.i;
+ /* 2: convwb */
+ var43 = var42.i;
+ /* 3: splatbl */
+ var44.i =
+ ((((orc_uint32) var43) & 0xff) << 24) | ((((orc_uint32) var43) & 0xff)
+ << 16) | ((((orc_uint32) var43) & 0xff) << 8) | (((orc_uint32) var43)
+ & 0xff);
+ /* 4: convubw */
+ var45.x4[0] = (orc_uint8) var44.x4[0];
+ var45.x4[1] = (orc_uint8) var44.x4[1];
+ var45.x4[2] = (orc_uint8) var44.x4[2];
+ var45.x4[3] = (orc_uint8) var44.x4[3];
+ /* 6: mullw */
+ var46.x4[0] = (var45.x4[0] * var39.x4[0]) & 0xffff;
+ var46.x4[1] = (var45.x4[1] * var39.x4[1]) & 0xffff;
+ var46.x4[2] = (var45.x4[2] * var39.x4[2]) & 0xffff;
+ var46.x4[3] = (var45.x4[3] * var39.x4[3]) & 0xffff;
+ /* 7: shruw */
+ var47.x4[0] = ((orc_uint16) var46.x4[0]) >> 8;
+ var47.x4[1] = ((orc_uint16) var46.x4[1]) >> 8;
+ var47.x4[2] = ((orc_uint16) var46.x4[2]) >> 8;
+ var47.x4[3] = ((orc_uint16) var46.x4[3]) >> 8;
+ /* 8: convubw */
+ var48.x4[0] = (orc_uint8) var41.x4[0];
+ var48.x4[1] = (orc_uint8) var41.x4[1];
+ var48.x4[2] = (orc_uint8) var41.x4[2];
+ var48.x4[3] = (orc_uint8) var41.x4[3];
+ /* 9: loadl */
+ var49 = ptr0[i];
+ /* 10: convubw */
+ var50.x4[0] = (orc_uint8) var49.x4[0];
+ var50.x4[1] = (orc_uint8) var49.x4[1];
+ var50.x4[2] = (orc_uint8) var49.x4[2];
+ var50.x4[3] = (orc_uint8) var49.x4[3];
+ /* 11: subw */
+ var51.x4[0] = var48.x4[0] - var50.x4[0];
+ var51.x4[1] = var48.x4[1] - var50.x4[1];
+ var51.x4[2] = var48.x4[2] - var50.x4[2];
+ var51.x4[3] = var48.x4[3] - var50.x4[3];
+ /* 12: mullw */
+ var52.x4[0] = (var51.x4[0] * var47.x4[0]) & 0xffff;
+ var52.x4[1] = (var51.x4[1] * var47.x4[1]) & 0xffff;
+ var52.x4[2] = (var51.x4[2] * var47.x4[2]) & 0xffff;
+ var52.x4[3] = (var51.x4[3] * var47.x4[3]) & 0xffff;
+ /* 13: div255w */
+ var53.x4[0] =
+ ((orc_uint16) (((orc_uint16) (var52.x4[0] + 128)) +
+ (((orc_uint16) (var52.x4[0] + 128)) >> 8))) >> 8;
+ var53.x4[1] =
+ ((orc_uint16) (((orc_uint16) (var52.x4[1] + 128)) +
+ (((orc_uint16) (var52.x4[1] + 128)) >> 8))) >> 8;
+ var53.x4[2] =
+ ((orc_uint16) (((orc_uint16) (var52.x4[2] + 128)) +
+ (((orc_uint16) (var52.x4[2] + 128)) >> 8))) >> 8;
+ var53.x4[3] =
+ ((orc_uint16) (((orc_uint16) (var52.x4[3] + 128)) +
+ (((orc_uint16) (var52.x4[3] + 128)) >> 8))) >> 8;
+ /* 14: addw */
+ var54.x4[0] = var50.x4[0] + var53.x4[0];
+ var54.x4[1] = var50.x4[1] + var53.x4[1];
+ var54.x4[2] = var50.x4[2] + var53.x4[2];
+ var54.x4[3] = var50.x4[3] + var53.x4[3];
+ /* 15: convwb */
+ var55.x4[0] = var54.x4[0];
+ var55.x4[1] = var54.x4[1];
+ var55.x4[2] = var54.x4[2];
+ var55.x4[3] = var54.x4[3];
+ /* 17: orl */
+ var56.i = var55.i | var40.i;
+ /* 18: storel */
+ ptr0[i] = var56;
+ }
+ }
+
+}
+
+void
+video_mixer_orc_blend_argb (guint8 * ORC_RESTRICT d1, int d1_stride,
+ const guint8 * ORC_RESTRICT s1, int s1_stride, int p1, int n, int m)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 7, 9, 26, 118, 105, 100, 101, 111, 95, 109, 105, 120, 101, 114, 95,
+ 111, 114, 99, 95, 98, 108, 101, 110, 100, 95, 97, 114, 103, 98, 11, 4,
+ 4, 12, 4, 4, 14, 4, 255, 0, 0, 0, 14, 2, 8, 0, 0, 0,
+ 16, 2, 20, 4, 20, 2, 20, 1, 20, 4, 20, 8, 20, 8, 20, 8,
+ 113, 32, 4, 163, 33, 32, 157, 34, 33, 152, 35, 34, 21, 2, 150, 38,
+ 35, 21, 2, 89, 38, 38, 24, 21, 2, 95, 38, 38, 17, 21, 2, 150,
+ 37, 32, 113, 32, 0, 21, 2, 150, 36, 32, 21, 2, 98, 37, 37, 36,
+ 21, 2, 89, 37, 37, 38, 21, 2, 80, 37, 37, 21, 2, 70, 36, 36,
+ 37, 21, 2, 157, 32, 36, 123, 32, 32, 16, 128, 0, 32, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p, _backup_video_mixer_orc_blend_argb);
+#else
+ p = orc_program_new ();
+ orc_program_set_2d (p);
+ orc_program_set_name (p, "video_mixer_orc_blend_argb");
+ orc_program_set_backup_function (p, _backup_video_mixer_orc_blend_argb);
+ orc_program_add_destination (p, 4, "d1");
+ orc_program_add_source (p, 4, "s1");
+ orc_program_add_constant (p, 4, 0x000000ff, "c1");
+ orc_program_add_constant (p, 2, 0x00000008, "c2");
+ orc_program_add_parameter (p, 2, "p1");
+ orc_program_add_temporary (p, 4, "t1");
+ orc_program_add_temporary (p, 2, "t2");
+ orc_program_add_temporary (p, 1, "t3");
+ orc_program_add_temporary (p, 4, "t4");
+ orc_program_add_temporary (p, 8, "t5");
+ orc_program_add_temporary (p, 8, "t6");
+ orc_program_add_temporary (p, 8, "t7");
+
+ orc_program_append_2 (p, "loadl", 0, ORC_VAR_T1, ORC_VAR_S1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convlw", 0, ORC_VAR_T2, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convwb", 0, ORC_VAR_T3, ORC_VAR_T2, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "splatbl", 0, ORC_VAR_T4, ORC_VAR_T3, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 2, ORC_VAR_T7, ORC_VAR_T4, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mullw", 2, ORC_VAR_T7, ORC_VAR_T7, ORC_VAR_P1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "shruw", 2, ORC_VAR_T7, ORC_VAR_T7, ORC_VAR_C2,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 2, ORC_VAR_T6, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "loadl", 0, ORC_VAR_T1, ORC_VAR_D1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 2, ORC_VAR_T5, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "subw", 2, ORC_VAR_T6, ORC_VAR_T6, ORC_VAR_T5,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mullw", 2, ORC_VAR_T6, ORC_VAR_T6, ORC_VAR_T7,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "div255w", 2, ORC_VAR_T6, ORC_VAR_T6, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "addw", 2, ORC_VAR_T5, ORC_VAR_T5, ORC_VAR_T6,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convwb", 2, ORC_VAR_T1, ORC_VAR_T5, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "orl", 0, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_C1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "storel", 0, ORC_VAR_D1, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ORC_EXECUTOR_M (ex) = m;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->params[ORC_VAR_D1] = d1_stride;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ ex->params[ORC_VAR_S1] = s1_stride;
+ ex->params[ORC_VAR_P1] = p1;
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* video_mixer_orc_blend_bgra */
+#ifdef DISABLE_ORC
+void
+video_mixer_orc_blend_bgra (guint8 * ORC_RESTRICT d1, int d1_stride,
+ const guint8 * ORC_RESTRICT s1, int s1_stride, int p1, int n, int m)
+{
+ int i;
+ int j;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union64 var40;
+#if defined(__APPLE__) && __GNUC__ == 4 && __GNUC_MINOR__ == 2 && defined (__i386__)
+ volatile orc_union32 var41;
+#else
+ orc_union32 var41;
+#endif
+ orc_union32 var42;
+ orc_union32 var43;
+ orc_union16 var44;
+ orc_int8 var45;
+ orc_union32 var46;
+ orc_union64 var47;
+ orc_union64 var48;
+ orc_union64 var49;
+ orc_union64 var50;
+ orc_union32 var51;
+ orc_union64 var52;
+ orc_union64 var53;
+ orc_union64 var54;
+ orc_union64 var55;
+ orc_union64 var56;
+ orc_union32 var57;
+ orc_union32 var58;
+
+ for (j = 0; j < m; j++) {
+ ptr0 = ORC_PTR_OFFSET (d1, d1_stride * j);
+ ptr4 = ORC_PTR_OFFSET (s1, s1_stride * j);
+
+ /* 6: loadpw */
+ var40.x4[0] = p1;
+ var40.x4[1] = p1;
+ var40.x4[2] = p1;
+ var40.x4[3] = p1;
+ /* 17: loadpl */
+ var41.i = 0xff000000; /* -16777216 or 2.11371e-314f */
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var42 = ptr4[i];
+ /* 1: shrul */
+ var43.i = ((orc_uint32) var42.i) >> 24;
+ /* 2: convlw */
+ var44.i = var43.i;
+ /* 3: convwb */
+ var45 = var44.i;
+ /* 4: splatbl */
+ var46.i =
+ ((((orc_uint32) var45) & 0xff) << 24) | ((((orc_uint32) var45) & 0xff)
+ << 16) | ((((orc_uint32) var45) & 0xff) << 8) | (((orc_uint32) var45)
+ & 0xff);
+ /* 5: convubw */
+ var47.x4[0] = (orc_uint8) var46.x4[0];
+ var47.x4[1] = (orc_uint8) var46.x4[1];
+ var47.x4[2] = (orc_uint8) var46.x4[2];
+ var47.x4[3] = (orc_uint8) var46.x4[3];
+ /* 7: mullw */
+ var48.x4[0] = (var47.x4[0] * var40.x4[0]) & 0xffff;
+ var48.x4[1] = (var47.x4[1] * var40.x4[1]) & 0xffff;
+ var48.x4[2] = (var47.x4[2] * var40.x4[2]) & 0xffff;
+ var48.x4[3] = (var47.x4[3] * var40.x4[3]) & 0xffff;
+ /* 8: shruw */
+ var49.x4[0] = ((orc_uint16) var48.x4[0]) >> 8;
+ var49.x4[1] = ((orc_uint16) var48.x4[1]) >> 8;
+ var49.x4[2] = ((orc_uint16) var48.x4[2]) >> 8;
+ var49.x4[3] = ((orc_uint16) var48.x4[3]) >> 8;
+ /* 9: convubw */
+ var50.x4[0] = (orc_uint8) var42.x4[0];
+ var50.x4[1] = (orc_uint8) var42.x4[1];
+ var50.x4[2] = (orc_uint8) var42.x4[2];
+ var50.x4[3] = (orc_uint8) var42.x4[3];
+ /* 10: loadl */
+ var51 = ptr0[i];
+ /* 11: convubw */
+ var52.x4[0] = (orc_uint8) var51.x4[0];
+ var52.x4[1] = (orc_uint8) var51.x4[1];
+ var52.x4[2] = (orc_uint8) var51.x4[2];
+ var52.x4[3] = (orc_uint8) var51.x4[3];
+ /* 12: subw */
+ var53.x4[0] = var50.x4[0] - var52.x4[0];
+ var53.x4[1] = var50.x4[1] - var52.x4[1];
+ var53.x4[2] = var50.x4[2] - var52.x4[2];
+ var53.x4[3] = var50.x4[3] - var52.x4[3];
+ /* 13: mullw */
+ var54.x4[0] = (var53.x4[0] * var49.x4[0]) & 0xffff;
+ var54.x4[1] = (var53.x4[1] * var49.x4[1]) & 0xffff;
+ var54.x4[2] = (var53.x4[2] * var49.x4[2]) & 0xffff;
+ var54.x4[3] = (var53.x4[3] * var49.x4[3]) & 0xffff;
+ /* 14: div255w */
+ var55.x4[0] =
+ ((orc_uint16) (((orc_uint16) (var54.x4[0] + 128)) +
+ (((orc_uint16) (var54.x4[0] + 128)) >> 8))) >> 8;
+ var55.x4[1] =
+ ((orc_uint16) (((orc_uint16) (var54.x4[1] + 128)) +
+ (((orc_uint16) (var54.x4[1] + 128)) >> 8))) >> 8;
+ var55.x4[2] =
+ ((orc_uint16) (((orc_uint16) (var54.x4[2] + 128)) +
+ (((orc_uint16) (var54.x4[2] + 128)) >> 8))) >> 8;
+ var55.x4[3] =
+ ((orc_uint16) (((orc_uint16) (var54.x4[3] + 128)) +
+ (((orc_uint16) (var54.x4[3] + 128)) >> 8))) >> 8;
+ /* 15: addw */
+ var56.x4[0] = var52.x4[0] + var55.x4[0];
+ var56.x4[1] = var52.x4[1] + var55.x4[1];
+ var56.x4[2] = var52.x4[2] + var55.x4[2];
+ var56.x4[3] = var52.x4[3] + var55.x4[3];
+ /* 16: convwb */
+ var57.x4[0] = var56.x4[0];
+ var57.x4[1] = var56.x4[1];
+ var57.x4[2] = var56.x4[2];
+ var57.x4[3] = var56.x4[3];
+ /* 18: orl */
+ var58.i = var57.i | var41.i;
+ /* 19: storel */
+ ptr0[i] = var58;
+ }
+ }
+
+}
+
+#else
+static void
+_backup_video_mixer_orc_blend_bgra (OrcExecutor * ORC_RESTRICT ex)
+{
+ int i;
+ int j;
+ int n = ex->n;
+ int m = ex->params[ORC_VAR_A1];
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union64 var40;
+#if defined(__APPLE__) && __GNUC__ == 4 && __GNUC_MINOR__ == 2 && defined (__i386__)
+ volatile orc_union32 var41;
+#else
+ orc_union32 var41;
+#endif
+ orc_union32 var42;
+ orc_union32 var43;
+ orc_union16 var44;
+ orc_int8 var45;
+ orc_union32 var46;
+ orc_union64 var47;
+ orc_union64 var48;
+ orc_union64 var49;
+ orc_union64 var50;
+ orc_union32 var51;
+ orc_union64 var52;
+ orc_union64 var53;
+ orc_union64 var54;
+ orc_union64 var55;
+ orc_union64 var56;
+ orc_union32 var57;
+ orc_union32 var58;
+
+ for (j = 0; j < m; j++) {
+ ptr0 = ORC_PTR_OFFSET (ex->arrays[0], ex->params[0] * j);
+ ptr4 = ORC_PTR_OFFSET (ex->arrays[4], ex->params[4] * j);
+
+ /* 6: loadpw */
+ var40.x4[0] = ex->params[24];
+ var40.x4[1] = ex->params[24];
+ var40.x4[2] = ex->params[24];
+ var40.x4[3] = ex->params[24];
+ /* 17: loadpl */
+ var41.i = 0xff000000; /* -16777216 or 2.11371e-314f */
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var42 = ptr4[i];
+ /* 1: shrul */
+ var43.i = ((orc_uint32) var42.i) >> 24;
+ /* 2: convlw */
+ var44.i = var43.i;
+ /* 3: convwb */
+ var45 = var44.i;
+ /* 4: splatbl */
+ var46.i =
+ ((((orc_uint32) var45) & 0xff) << 24) | ((((orc_uint32) var45) & 0xff)
+ << 16) | ((((orc_uint32) var45) & 0xff) << 8) | (((orc_uint32) var45)
+ & 0xff);
+ /* 5: convubw */
+ var47.x4[0] = (orc_uint8) var46.x4[0];
+ var47.x4[1] = (orc_uint8) var46.x4[1];
+ var47.x4[2] = (orc_uint8) var46.x4[2];
+ var47.x4[3] = (orc_uint8) var46.x4[3];
+ /* 7: mullw */
+ var48.x4[0] = (var47.x4[0] * var40.x4[0]) & 0xffff;
+ var48.x4[1] = (var47.x4[1] * var40.x4[1]) & 0xffff;
+ var48.x4[2] = (var47.x4[2] * var40.x4[2]) & 0xffff;
+ var48.x4[3] = (var47.x4[3] * var40.x4[3]) & 0xffff;
+ /* 8: shruw */
+ var49.x4[0] = ((orc_uint16) var48.x4[0]) >> 8;
+ var49.x4[1] = ((orc_uint16) var48.x4[1]) >> 8;
+ var49.x4[2] = ((orc_uint16) var48.x4[2]) >> 8;
+ var49.x4[3] = ((orc_uint16) var48.x4[3]) >> 8;
+ /* 9: convubw */
+ var50.x4[0] = (orc_uint8) var42.x4[0];
+ var50.x4[1] = (orc_uint8) var42.x4[1];
+ var50.x4[2] = (orc_uint8) var42.x4[2];
+ var50.x4[3] = (orc_uint8) var42.x4[3];
+ /* 10: loadl */
+ var51 = ptr0[i];
+ /* 11: convubw */
+ var52.x4[0] = (orc_uint8) var51.x4[0];
+ var52.x4[1] = (orc_uint8) var51.x4[1];
+ var52.x4[2] = (orc_uint8) var51.x4[2];
+ var52.x4[3] = (orc_uint8) var51.x4[3];
+ /* 12: subw */
+ var53.x4[0] = var50.x4[0] - var52.x4[0];
+ var53.x4[1] = var50.x4[1] - var52.x4[1];
+ var53.x4[2] = var50.x4[2] - var52.x4[2];
+ var53.x4[3] = var50.x4[3] - var52.x4[3];
+ /* 13: mullw */
+ var54.x4[0] = (var53.x4[0] * var49.x4[0]) & 0xffff;
+ var54.x4[1] = (var53.x4[1] * var49.x4[1]) & 0xffff;
+ var54.x4[2] = (var53.x4[2] * var49.x4[2]) & 0xffff;
+ var54.x4[3] = (var53.x4[3] * var49.x4[3]) & 0xffff;
+ /* 14: div255w */
+ var55.x4[0] =
+ ((orc_uint16) (((orc_uint16) (var54.x4[0] + 128)) +
+ (((orc_uint16) (var54.x4[0] + 128)) >> 8))) >> 8;
+ var55.x4[1] =
+ ((orc_uint16) (((orc_uint16) (var54.x4[1] + 128)) +
+ (((orc_uint16) (var54.x4[1] + 128)) >> 8))) >> 8;
+ var55.x4[2] =
+ ((orc_uint16) (((orc_uint16) (var54.x4[2] + 128)) +
+ (((orc_uint16) (var54.x4[2] + 128)) >> 8))) >> 8;
+ var55.x4[3] =
+ ((orc_uint16) (((orc_uint16) (var54.x4[3] + 128)) +
+ (((orc_uint16) (var54.x4[3] + 128)) >> 8))) >> 8;
+ /* 15: addw */
+ var56.x4[0] = var52.x4[0] + var55.x4[0];
+ var56.x4[1] = var52.x4[1] + var55.x4[1];
+ var56.x4[2] = var52.x4[2] + var55.x4[2];
+ var56.x4[3] = var52.x4[3] + var55.x4[3];
+ /* 16: convwb */
+ var57.x4[0] = var56.x4[0];
+ var57.x4[1] = var56.x4[1];
+ var57.x4[2] = var56.x4[2];
+ var57.x4[3] = var56.x4[3];
+ /* 18: orl */
+ var58.i = var57.i | var41.i;
+ /* 19: storel */
+ ptr0[i] = var58;
+ }
+ }
+
+}
+
+void
+video_mixer_orc_blend_bgra (guint8 * ORC_RESTRICT d1, int d1_stride,
+ const guint8 * ORC_RESTRICT s1, int s1_stride, int p1, int n, int m)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 7, 9, 26, 118, 105, 100, 101, 111, 95, 109, 105, 120, 101, 114, 95,
+ 111, 114, 99, 95, 98, 108, 101, 110, 100, 95, 98, 103, 114, 97, 11, 4,
+ 4, 12, 4, 4, 14, 4, 0, 0, 0, 255, 14, 4, 24, 0, 0, 0,
+ 14, 2, 8, 0, 0, 0, 16, 2, 20, 4, 20, 4, 20, 2, 20, 1,
+ 20, 4, 20, 8, 20, 8, 20, 8, 113, 32, 4, 126, 33, 32, 17, 163,
+ 34, 33, 157, 35, 34, 152, 36, 35, 21, 2, 150, 39, 36, 21, 2, 89,
+ 39, 39, 24, 21, 2, 95, 39, 39, 18, 21, 2, 150, 38, 32, 113, 32,
+ 0, 21, 2, 150, 37, 32, 21, 2, 98, 38, 38, 37, 21, 2, 89, 38,
+ 38, 39, 21, 2, 80, 38, 38, 21, 2, 70, 37, 37, 38, 21, 2, 157,
+ 32, 37, 123, 32, 32, 16, 128, 0, 32, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p, _backup_video_mixer_orc_blend_bgra);
+#else
+ p = orc_program_new ();
+ orc_program_set_2d (p);
+ orc_program_set_name (p, "video_mixer_orc_blend_bgra");
+ orc_program_set_backup_function (p, _backup_video_mixer_orc_blend_bgra);
+ orc_program_add_destination (p, 4, "d1");
+ orc_program_add_source (p, 4, "s1");
+ orc_program_add_constant (p, 4, 0xff000000, "c1");
+ orc_program_add_constant (p, 4, 0x00000018, "c2");
+ orc_program_add_constant (p, 2, 0x00000008, "c3");
+ orc_program_add_parameter (p, 2, "p1");
+ orc_program_add_temporary (p, 4, "t1");
+ orc_program_add_temporary (p, 4, "t2");
+ orc_program_add_temporary (p, 2, "t3");
+ orc_program_add_temporary (p, 1, "t4");
+ orc_program_add_temporary (p, 4, "t5");
+ orc_program_add_temporary (p, 8, "t6");
+ orc_program_add_temporary (p, 8, "t7");
+ orc_program_add_temporary (p, 8, "t8");
+
+ orc_program_append_2 (p, "loadl", 0, ORC_VAR_T1, ORC_VAR_S1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "shrul", 0, ORC_VAR_T2, ORC_VAR_T1, ORC_VAR_C2,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convlw", 0, ORC_VAR_T3, ORC_VAR_T2, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convwb", 0, ORC_VAR_T4, ORC_VAR_T3, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "splatbl", 0, ORC_VAR_T5, ORC_VAR_T4, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 2, ORC_VAR_T8, ORC_VAR_T5, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mullw", 2, ORC_VAR_T8, ORC_VAR_T8, ORC_VAR_P1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "shruw", 2, ORC_VAR_T8, ORC_VAR_T8, ORC_VAR_C3,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 2, ORC_VAR_T7, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "loadl", 0, ORC_VAR_T1, ORC_VAR_D1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 2, ORC_VAR_T6, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "subw", 2, ORC_VAR_T7, ORC_VAR_T7, ORC_VAR_T6,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mullw", 2, ORC_VAR_T7, ORC_VAR_T7, ORC_VAR_T8,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "div255w", 2, ORC_VAR_T7, ORC_VAR_T7, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "addw", 2, ORC_VAR_T6, ORC_VAR_T6, ORC_VAR_T7,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convwb", 2, ORC_VAR_T1, ORC_VAR_T6, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "orl", 0, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_C1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "storel", 0, ORC_VAR_D1, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ORC_EXECUTOR_M (ex) = m;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->params[ORC_VAR_D1] = d1_stride;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ ex->params[ORC_VAR_S1] = s1_stride;
+ ex->params[ORC_VAR_P1] = p1;
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* video_mixer_orc_overlay_argb */
+#ifdef DISABLE_ORC
+void
+video_mixer_orc_overlay_argb (guint8 * ORC_RESTRICT d1, int d1_stride,
+ const guint8 * ORC_RESTRICT s1, int s1_stride, int p1, int n, int m)
+{
+ int i;
+ int j;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union64 var41;
+#if defined(__APPLE__) && __GNUC__ == 4 && __GNUC_MINOR__ == 2 && defined (__i386__)
+ volatile orc_union32 var42;
+#else
+ orc_union32 var42;
+#endif
+#if defined(__APPLE__) && __GNUC__ == 4 && __GNUC_MINOR__ == 2 && defined (__i386__)
+ volatile orc_union32 var43;
+#else
+ orc_union32 var43;
+#endif
+ orc_union32 var44;
+ orc_union16 var45;
+ orc_int8 var46;
+ orc_union32 var47;
+ orc_union64 var48;
+ orc_union64 var49;
+ orc_union64 var50;
+ orc_union64 var51;
+ orc_union64 var52;
+ orc_union32 var53;
+ orc_union64 var54;
+ orc_union64 var55;
+ orc_union32 var56;
+ orc_union16 var57;
+ orc_int8 var58;
+ orc_union32 var59;
+ orc_union64 var60;
+ orc_union64 var61;
+ orc_union64 var62;
+ orc_union64 var63;
+ orc_union64 var64;
+ orc_union64 var65;
+ orc_union64 var66;
+ orc_union64 var67;
+ orc_union32 var68;
+ orc_union32 var69;
+ orc_union32 var70;
+ orc_union32 var71;
+ orc_union32 var72;
+
+ for (j = 0; j < m; j++) {
+ ptr0 = ORC_PTR_OFFSET (d1, d1_stride * j);
+ ptr4 = ORC_PTR_OFFSET (s1, s1_stride * j);
+
+ /* 5: loadpw */
+ var41.x4[0] = p1;
+ var41.x4[1] = p1;
+ var41.x4[2] = p1;
+ var41.x4[3] = p1;
+ /* 10: loadpl */
+ var53.i = 0xffffffff; /* -1 or 2.122e-314f */
+ /* 26: loadpl */
+ var42.i = 0xffffff00; /* -256 or 2.122e-314f */
+ /* 29: loadpl */
+ var43.i = 0x000000ff; /* 255 or 1.25987e-321f */
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var44 = ptr4[i];
+ /* 1: convlw */
+ var45.i = var44.i;
+ /* 2: convwb */
+ var46 = var45.i;
+ /* 3: splatbl */
+ var47.i =
+ ((((orc_uint32) var46) & 0xff) << 24) | ((((orc_uint32) var46) & 0xff)
+ << 16) | ((((orc_uint32) var46) & 0xff) << 8) | (((orc_uint32) var46)
+ & 0xff);
+ /* 4: convubw */
+ var48.x4[0] = (orc_uint8) var47.x4[0];
+ var48.x4[1] = (orc_uint8) var47.x4[1];
+ var48.x4[2] = (orc_uint8) var47.x4[2];
+ var48.x4[3] = (orc_uint8) var47.x4[3];
+ /* 6: mullw */
+ var49.x4[0] = (var48.x4[0] * var41.x4[0]) & 0xffff;
+ var49.x4[1] = (var48.x4[1] * var41.x4[1]) & 0xffff;
+ var49.x4[2] = (var48.x4[2] * var41.x4[2]) & 0xffff;
+ var49.x4[3] = (var48.x4[3] * var41.x4[3]) & 0xffff;
+ /* 7: shruw */
+ var50.x4[0] = ((orc_uint16) var49.x4[0]) >> 8;
+ var50.x4[1] = ((orc_uint16) var49.x4[1]) >> 8;
+ var50.x4[2] = ((orc_uint16) var49.x4[2]) >> 8;
+ var50.x4[3] = ((orc_uint16) var49.x4[3]) >> 8;
+ /* 8: convubw */
+ var51.x4[0] = (orc_uint8) var44.x4[0];
+ var51.x4[1] = (orc_uint8) var44.x4[1];
+ var51.x4[2] = (orc_uint8) var44.x4[2];
+ var51.x4[3] = (orc_uint8) var44.x4[3];
+ /* 9: mullw */
+ var52.x4[0] = (var51.x4[0] * var50.x4[0]) & 0xffff;
+ var52.x4[1] = (var51.x4[1] * var50.x4[1]) & 0xffff;
+ var52.x4[2] = (var51.x4[2] * var50.x4[2]) & 0xffff;
+ var52.x4[3] = (var51.x4[3] * var50.x4[3]) & 0xffff;
+ /* 11: convubw */
+ var54.x4[0] = (orc_uint8) var53.x4[0];
+ var54.x4[1] = (orc_uint8) var53.x4[1];
+ var54.x4[2] = (orc_uint8) var53.x4[2];
+ var54.x4[3] = (orc_uint8) var53.x4[3];
+ /* 12: subw */
+ var55.x4[0] = var54.x4[0] - var50.x4[0];
+ var55.x4[1] = var54.x4[1] - var50.x4[1];
+ var55.x4[2] = var54.x4[2] - var50.x4[2];
+ var55.x4[3] = var54.x4[3] - var50.x4[3];
+ /* 13: loadl */
+ var56 = ptr0[i];
+ /* 14: convlw */
+ var57.i = var56.i;
+ /* 15: convwb */
+ var58 = var57.i;
+ /* 16: splatbl */
+ var59.i =
+ ((((orc_uint32) var58) & 0xff) << 24) | ((((orc_uint32) var58) & 0xff)
+ << 16) | ((((orc_uint32) var58) & 0xff) << 8) | (((orc_uint32) var58)
+ & 0xff);
+ /* 17: convubw */
+ var60.x4[0] = (orc_uint8) var59.x4[0];
+ var60.x4[1] = (orc_uint8) var59.x4[1];
+ var60.x4[2] = (orc_uint8) var59.x4[2];
+ var60.x4[3] = (orc_uint8) var59.x4[3];
+ /* 18: mullw */
+ var61.x4[0] = (var60.x4[0] * var55.x4[0]) & 0xffff;
+ var61.x4[1] = (var60.x4[1] * var55.x4[1]) & 0xffff;
+ var61.x4[2] = (var60.x4[2] * var55.x4[2]) & 0xffff;
+ var61.x4[3] = (var60.x4[3] * var55.x4[3]) & 0xffff;
+ /* 19: div255w */
+ var62.x4[0] =
+ ((orc_uint16) (((orc_uint16) (var61.x4[0] + 128)) +
+ (((orc_uint16) (var61.x4[0] + 128)) >> 8))) >> 8;
+ var62.x4[1] =
+ ((orc_uint16) (((orc_uint16) (var61.x4[1] + 128)) +
+ (((orc_uint16) (var61.x4[1] + 128)) >> 8))) >> 8;
+ var62.x4[2] =
+ ((orc_uint16) (((orc_uint16) (var61.x4[2] + 128)) +
+ (((orc_uint16) (var61.x4[2] + 128)) >> 8))) >> 8;
+ var62.x4[3] =
+ ((orc_uint16) (((orc_uint16) (var61.x4[3] + 128)) +
+ (((orc_uint16) (var61.x4[3] + 128)) >> 8))) >> 8;
+ /* 20: convubw */
+ var63.x4[0] = (orc_uint8) var56.x4[0];
+ var63.x4[1] = (orc_uint8) var56.x4[1];
+ var63.x4[2] = (orc_uint8) var56.x4[2];
+ var63.x4[3] = (orc_uint8) var56.x4[3];
+ /* 21: mullw */
+ var64.x4[0] = (var63.x4[0] * var62.x4[0]) & 0xffff;
+ var64.x4[1] = (var63.x4[1] * var62.x4[1]) & 0xffff;
+ var64.x4[2] = (var63.x4[2] * var62.x4[2]) & 0xffff;
+ var64.x4[3] = (var63.x4[3] * var62.x4[3]) & 0xffff;
+ /* 22: addw */
+ var65.x4[0] = var64.x4[0] + var52.x4[0];
+ var65.x4[1] = var64.x4[1] + var52.x4[1];
+ var65.x4[2] = var64.x4[2] + var52.x4[2];
+ var65.x4[3] = var64.x4[3] + var52.x4[3];
+ /* 23: addw */
+ var66.x4[0] = var62.x4[0] + var50.x4[0];
+ var66.x4[1] = var62.x4[1] + var50.x4[1];
+ var66.x4[2] = var62.x4[2] + var50.x4[2];
+ var66.x4[3] = var62.x4[3] + var50.x4[3];
+ /* 24: divluw */
+ var67.x4[0] =
+ ((var66.x4[0] & 0xff) ==
+ 0) ? 255 : ORC_CLAMP_UB (((orc_uint16) var65.x4[0]) /
+ ((orc_uint16) var66.x4[0] & 0xff));
+ var67.x4[1] =
+ ((var66.x4[1] & 0xff) ==
+ 0) ? 255 : ORC_CLAMP_UB (((orc_uint16) var65.x4[1]) /
+ ((orc_uint16) var66.x4[1] & 0xff));
+ var67.x4[2] =
+ ((var66.x4[2] & 0xff) ==
+ 0) ? 255 : ORC_CLAMP_UB (((orc_uint16) var65.x4[2]) /
+ ((orc_uint16) var66.x4[2] & 0xff));
+ var67.x4[3] =
+ ((var66.x4[3] & 0xff) ==
+ 0) ? 255 : ORC_CLAMP_UB (((orc_uint16) var65.x4[3]) /
+ ((orc_uint16) var66.x4[3] & 0xff));
+ /* 25: convwb */
+ var68.x4[0] = var67.x4[0];
+ var68.x4[1] = var67.x4[1];
+ var68.x4[2] = var67.x4[2];
+ var68.x4[3] = var67.x4[3];
+ /* 27: andl */
+ var69.i = var68.i & var42.i;
+ /* 28: convwb */
+ var70.x4[0] = var66.x4[0];
+ var70.x4[1] = var66.x4[1];
+ var70.x4[2] = var66.x4[2];
+ var70.x4[3] = var66.x4[3];
+ /* 30: andl */
+ var71.i = var70.i & var43.i;
+ /* 31: orl */
+ var72.i = var69.i | var71.i;
+ /* 32: storel */
+ ptr0[i] = var72;
+ }
+ }
+
+}
+
+#else
+static void
+_backup_video_mixer_orc_overlay_argb (OrcExecutor * ORC_RESTRICT ex)
+{
+ int i;
+ int j;
+ int n = ex->n;
+ int m = ex->params[ORC_VAR_A1];
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union64 var41;
+#if defined(__APPLE__) && __GNUC__ == 4 && __GNUC_MINOR__ == 2 && defined (__i386__)
+ volatile orc_union32 var42;
+#else
+ orc_union32 var42;
+#endif
+#if defined(__APPLE__) && __GNUC__ == 4 && __GNUC_MINOR__ == 2 && defined (__i386__)
+ volatile orc_union32 var43;
+#else
+ orc_union32 var43;
+#endif
+ orc_union32 var44;
+ orc_union16 var45;
+ orc_int8 var46;
+ orc_union32 var47;
+ orc_union64 var48;
+ orc_union64 var49;
+ orc_union64 var50;
+ orc_union64 var51;
+ orc_union64 var52;
+ orc_union32 var53;
+ orc_union64 var54;
+ orc_union64 var55;
+ orc_union32 var56;
+ orc_union16 var57;
+ orc_int8 var58;
+ orc_union32 var59;
+ orc_union64 var60;
+ orc_union64 var61;
+ orc_union64 var62;
+ orc_union64 var63;
+ orc_union64 var64;
+ orc_union64 var65;
+ orc_union64 var66;
+ orc_union64 var67;
+ orc_union32 var68;
+ orc_union32 var69;
+ orc_union32 var70;
+ orc_union32 var71;
+ orc_union32 var72;
+
+ for (j = 0; j < m; j++) {
+ ptr0 = ORC_PTR_OFFSET (ex->arrays[0], ex->params[0] * j);
+ ptr4 = ORC_PTR_OFFSET (ex->arrays[4], ex->params[4] * j);
+
+ /* 5: loadpw */
+ var41.x4[0] = ex->params[24];
+ var41.x4[1] = ex->params[24];
+ var41.x4[2] = ex->params[24];
+ var41.x4[3] = ex->params[24];
+ /* 10: loadpl */
+ var53.i = 0xffffffff; /* -1 or 2.122e-314f */
+ /* 26: loadpl */
+ var42.i = 0xffffff00; /* -256 or 2.122e-314f */
+ /* 29: loadpl */
+ var43.i = 0x000000ff; /* 255 or 1.25987e-321f */
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var44 = ptr4[i];
+ /* 1: convlw */
+ var45.i = var44.i;
+ /* 2: convwb */
+ var46 = var45.i;
+ /* 3: splatbl */
+ var47.i =
+ ((((orc_uint32) var46) & 0xff) << 24) | ((((orc_uint32) var46) & 0xff)
+ << 16) | ((((orc_uint32) var46) & 0xff) << 8) | (((orc_uint32) var46)
+ & 0xff);
+ /* 4: convubw */
+ var48.x4[0] = (orc_uint8) var47.x4[0];
+ var48.x4[1] = (orc_uint8) var47.x4[1];
+ var48.x4[2] = (orc_uint8) var47.x4[2];
+ var48.x4[3] = (orc_uint8) var47.x4[3];
+ /* 6: mullw */
+ var49.x4[0] = (var48.x4[0] * var41.x4[0]) & 0xffff;
+ var49.x4[1] = (var48.x4[1] * var41.x4[1]) & 0xffff;
+ var49.x4[2] = (var48.x4[2] * var41.x4[2]) & 0xffff;
+ var49.x4[3] = (var48.x4[3] * var41.x4[3]) & 0xffff;
+ /* 7: shruw */
+ var50.x4[0] = ((orc_uint16) var49.x4[0]) >> 8;
+ var50.x4[1] = ((orc_uint16) var49.x4[1]) >> 8;
+ var50.x4[2] = ((orc_uint16) var49.x4[2]) >> 8;
+ var50.x4[3] = ((orc_uint16) var49.x4[3]) >> 8;
+ /* 8: convubw */
+ var51.x4[0] = (orc_uint8) var44.x4[0];
+ var51.x4[1] = (orc_uint8) var44.x4[1];
+ var51.x4[2] = (orc_uint8) var44.x4[2];
+ var51.x4[3] = (orc_uint8) var44.x4[3];
+ /* 9: mullw */
+ var52.x4[0] = (var51.x4[0] * var50.x4[0]) & 0xffff;
+ var52.x4[1] = (var51.x4[1] * var50.x4[1]) & 0xffff;
+ var52.x4[2] = (var51.x4[2] * var50.x4[2]) & 0xffff;
+ var52.x4[3] = (var51.x4[3] * var50.x4[3]) & 0xffff;
+ /* 11: convubw */
+ var54.x4[0] = (orc_uint8) var53.x4[0];
+ var54.x4[1] = (orc_uint8) var53.x4[1];
+ var54.x4[2] = (orc_uint8) var53.x4[2];
+ var54.x4[3] = (orc_uint8) var53.x4[3];
+ /* 12: subw */
+ var55.x4[0] = var54.x4[0] - var50.x4[0];
+ var55.x4[1] = var54.x4[1] - var50.x4[1];
+ var55.x4[2] = var54.x4[2] - var50.x4[2];
+ var55.x4[3] = var54.x4[3] - var50.x4[3];
+ /* 13: loadl */
+ var56 = ptr0[i];
+ /* 14: convlw */
+ var57.i = var56.i;
+ /* 15: convwb */
+ var58 = var57.i;
+ /* 16: splatbl */
+ var59.i =
+ ((((orc_uint32) var58) & 0xff) << 24) | ((((orc_uint32) var58) & 0xff)
+ << 16) | ((((orc_uint32) var58) & 0xff) << 8) | (((orc_uint32) var58)
+ & 0xff);
+ /* 17: convubw */
+ var60.x4[0] = (orc_uint8) var59.x4[0];
+ var60.x4[1] = (orc_uint8) var59.x4[1];
+ var60.x4[2] = (orc_uint8) var59.x4[2];
+ var60.x4[3] = (orc_uint8) var59.x4[3];
+ /* 18: mullw */
+ var61.x4[0] = (var60.x4[0] * var55.x4[0]) & 0xffff;
+ var61.x4[1] = (var60.x4[1] * var55.x4[1]) & 0xffff;
+ var61.x4[2] = (var60.x4[2] * var55.x4[2]) & 0xffff;
+ var61.x4[3] = (var60.x4[3] * var55.x4[3]) & 0xffff;
+ /* 19: div255w */
+ var62.x4[0] =
+ ((orc_uint16) (((orc_uint16) (var61.x4[0] + 128)) +
+ (((orc_uint16) (var61.x4[0] + 128)) >> 8))) >> 8;
+ var62.x4[1] =
+ ((orc_uint16) (((orc_uint16) (var61.x4[1] + 128)) +
+ (((orc_uint16) (var61.x4[1] + 128)) >> 8))) >> 8;
+ var62.x4[2] =
+ ((orc_uint16) (((orc_uint16) (var61.x4[2] + 128)) +
+ (((orc_uint16) (var61.x4[2] + 128)) >> 8))) >> 8;
+ var62.x4[3] =
+ ((orc_uint16) (((orc_uint16) (var61.x4[3] + 128)) +
+ (((orc_uint16) (var61.x4[3] + 128)) >> 8))) >> 8;
+ /* 20: convubw */
+ var63.x4[0] = (orc_uint8) var56.x4[0];
+ var63.x4[1] = (orc_uint8) var56.x4[1];
+ var63.x4[2] = (orc_uint8) var56.x4[2];
+ var63.x4[3] = (orc_uint8) var56.x4[3];
+ /* 21: mullw */
+ var64.x4[0] = (var63.x4[0] * var62.x4[0]) & 0xffff;
+ var64.x4[1] = (var63.x4[1] * var62.x4[1]) & 0xffff;
+ var64.x4[2] = (var63.x4[2] * var62.x4[2]) & 0xffff;
+ var64.x4[3] = (var63.x4[3] * var62.x4[3]) & 0xffff;
+ /* 22: addw */
+ var65.x4[0] = var64.x4[0] + var52.x4[0];
+ var65.x4[1] = var64.x4[1] + var52.x4[1];
+ var65.x4[2] = var64.x4[2] + var52.x4[2];
+ var65.x4[3] = var64.x4[3] + var52.x4[3];
+ /* 23: addw */
+ var66.x4[0] = var62.x4[0] + var50.x4[0];
+ var66.x4[1] = var62.x4[1] + var50.x4[1];
+ var66.x4[2] = var62.x4[2] + var50.x4[2];
+ var66.x4[3] = var62.x4[3] + var50.x4[3];
+ /* 24: divluw */
+ var67.x4[0] =
+ ((var66.x4[0] & 0xff) ==
+ 0) ? 255 : ORC_CLAMP_UB (((orc_uint16) var65.x4[0]) /
+ ((orc_uint16) var66.x4[0] & 0xff));
+ var67.x4[1] =
+ ((var66.x4[1] & 0xff) ==
+ 0) ? 255 : ORC_CLAMP_UB (((orc_uint16) var65.x4[1]) /
+ ((orc_uint16) var66.x4[1] & 0xff));
+ var67.x4[2] =
+ ((var66.x4[2] & 0xff) ==
+ 0) ? 255 : ORC_CLAMP_UB (((orc_uint16) var65.x4[2]) /
+ ((orc_uint16) var66.x4[2] & 0xff));
+ var67.x4[3] =
+ ((var66.x4[3] & 0xff) ==
+ 0) ? 255 : ORC_CLAMP_UB (((orc_uint16) var65.x4[3]) /
+ ((orc_uint16) var66.x4[3] & 0xff));
+ /* 25: convwb */
+ var68.x4[0] = var67.x4[0];
+ var68.x4[1] = var67.x4[1];
+ var68.x4[2] = var67.x4[2];
+ var68.x4[3] = var67.x4[3];
+ /* 27: andl */
+ var69.i = var68.i & var42.i;
+ /* 28: convwb */
+ var70.x4[0] = var66.x4[0];
+ var70.x4[1] = var66.x4[1];
+ var70.x4[2] = var66.x4[2];
+ var70.x4[3] = var66.x4[3];
+ /* 30: andl */
+ var71.i = var70.i & var43.i;
+ /* 31: orl */
+ var72.i = var69.i | var71.i;
+ /* 32: storel */
+ ptr0[i] = var72;
+ }
+ }
+
+}
+
+void
+video_mixer_orc_overlay_argb (guint8 * ORC_RESTRICT d1, int d1_stride,
+ const guint8 * ORC_RESTRICT s1, int s1_stride, int p1, int n, int m)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 7, 9, 28, 118, 105, 100, 101, 111, 95, 109, 105, 120, 101, 114, 95,
+ 111, 114, 99, 95, 111, 118, 101, 114, 108, 97, 121, 95, 97, 114, 103,
+ 98,
+ 11, 4, 4, 12, 4, 4, 14, 4, 255, 255, 255, 255, 14, 4, 255, 0,
+ 0, 0, 14, 4, 0, 255, 255, 255, 14, 2, 8, 0, 0, 0, 16, 2,
+ 20, 4, 20, 2, 20, 1, 20, 8, 20, 8, 20, 8, 20, 4, 20, 8,
+ 20, 8, 113, 32, 4, 163, 33, 32, 157, 34, 33, 152, 38, 34, 21, 2,
+ 150, 35, 38, 21, 2, 89, 35, 35, 24, 21, 2, 95, 35, 35, 19, 21,
+ 2, 150, 40, 32, 21, 2, 89, 40, 40, 35, 115, 38, 16, 21, 2, 150,
+ 36, 38, 21, 2, 98, 36, 36, 35, 113, 32, 0, 163, 33, 32, 157, 34,
+ 33, 152, 38, 34, 21, 2, 150, 37, 38, 21, 2, 89, 37, 37, 36, 21,
+ 2, 80, 37, 37, 21, 2, 150, 39, 32, 21, 2, 89, 39, 39, 37, 21,
+ 2, 70, 39, 39, 40, 21, 2, 70, 37, 37, 35, 21, 2, 81, 39, 39,
+ 37, 21, 2, 157, 32, 39, 106, 32, 32, 18, 21, 2, 157, 38, 37, 106,
+ 38, 38, 17, 123, 32, 32, 38, 128, 0, 32, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p, _backup_video_mixer_orc_overlay_argb);
+#else
+ p = orc_program_new ();
+ orc_program_set_2d (p);
+ orc_program_set_name (p, "video_mixer_orc_overlay_argb");
+ orc_program_set_backup_function (p, _backup_video_mixer_orc_overlay_argb);
+ orc_program_add_destination (p, 4, "d1");
+ orc_program_add_source (p, 4, "s1");
+ orc_program_add_constant (p, 4, 0xffffffff, "c1");
+ orc_program_add_constant (p, 4, 0x000000ff, "c2");
+ orc_program_add_constant (p, 4, 0xffffff00, "c3");
+ orc_program_add_constant (p, 2, 0x00000008, "c4");
+ orc_program_add_parameter (p, 2, "p1");
+ orc_program_add_temporary (p, 4, "t1");
+ orc_program_add_temporary (p, 2, "t2");
+ orc_program_add_temporary (p, 1, "t3");
+ orc_program_add_temporary (p, 8, "t4");
+ orc_program_add_temporary (p, 8, "t5");
+ orc_program_add_temporary (p, 8, "t6");
+ orc_program_add_temporary (p, 4, "t7");
+ orc_program_add_temporary (p, 8, "t8");
+ orc_program_add_temporary (p, 8, "t9");
+
+ orc_program_append_2 (p, "loadl", 0, ORC_VAR_T1, ORC_VAR_S1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convlw", 0, ORC_VAR_T2, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convwb", 0, ORC_VAR_T3, ORC_VAR_T2, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "splatbl", 0, ORC_VAR_T7, ORC_VAR_T3, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 2, ORC_VAR_T4, ORC_VAR_T7, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mullw", 2, ORC_VAR_T4, ORC_VAR_T4, ORC_VAR_P1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "shruw", 2, ORC_VAR_T4, ORC_VAR_T4, ORC_VAR_C4,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 2, ORC_VAR_T9, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mullw", 2, ORC_VAR_T9, ORC_VAR_T9, ORC_VAR_T4,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "loadpl", 0, ORC_VAR_T7, ORC_VAR_C1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 2, ORC_VAR_T5, ORC_VAR_T7, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "subw", 2, ORC_VAR_T5, ORC_VAR_T5, ORC_VAR_T4,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "loadl", 0, ORC_VAR_T1, ORC_VAR_D1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convlw", 0, ORC_VAR_T2, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convwb", 0, ORC_VAR_T3, ORC_VAR_T2, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "splatbl", 0, ORC_VAR_T7, ORC_VAR_T3, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 2, ORC_VAR_T6, ORC_VAR_T7, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mullw", 2, ORC_VAR_T6, ORC_VAR_T6, ORC_VAR_T5,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "div255w", 2, ORC_VAR_T6, ORC_VAR_T6, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 2, ORC_VAR_T8, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mullw", 2, ORC_VAR_T8, ORC_VAR_T8, ORC_VAR_T6,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "addw", 2, ORC_VAR_T8, ORC_VAR_T8, ORC_VAR_T9,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "addw", 2, ORC_VAR_T6, ORC_VAR_T6, ORC_VAR_T4,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "divluw", 2, ORC_VAR_T8, ORC_VAR_T8, ORC_VAR_T6,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convwb", 2, ORC_VAR_T1, ORC_VAR_T8, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "andl", 0, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_C3,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convwb", 2, ORC_VAR_T7, ORC_VAR_T6, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "andl", 0, ORC_VAR_T7, ORC_VAR_T7, ORC_VAR_C2,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "orl", 0, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_T7,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "storel", 0, ORC_VAR_D1, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ORC_EXECUTOR_M (ex) = m;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->params[ORC_VAR_D1] = d1_stride;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ ex->params[ORC_VAR_S1] = s1_stride;
+ ex->params[ORC_VAR_P1] = p1;
+
+ func = c->exec;
+ func (ex);
+}
+#endif
+
+
+/* video_mixer_orc_overlay_bgra */
+#ifdef DISABLE_ORC
+void
+video_mixer_orc_overlay_bgra (guint8 * ORC_RESTRICT d1, int d1_stride,
+ const guint8 * ORC_RESTRICT s1, int s1_stride, int p1, int n, int m)
+{
+ int i;
+ int j;
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union64 var42;
+#if defined(__APPLE__) && __GNUC__ == 4 && __GNUC_MINOR__ == 2 && defined (__i386__)
+ volatile orc_union32 var43;
+#else
+ orc_union32 var43;
+#endif
+#if defined(__APPLE__) && __GNUC__ == 4 && __GNUC_MINOR__ == 2 && defined (__i386__)
+ volatile orc_union32 var44;
+#else
+ orc_union32 var44;
+#endif
+ orc_union32 var45;
+ orc_union32 var46;
+ orc_union16 var47;
+ orc_int8 var48;
+ orc_union32 var49;
+ orc_union64 var50;
+ orc_union64 var51;
+ orc_union64 var52;
+ orc_union64 var53;
+ orc_union64 var54;
+ orc_union32 var55;
+ orc_union64 var56;
+ orc_union64 var57;
+ orc_union32 var58;
+ orc_union32 var59;
+ orc_union16 var60;
+ orc_int8 var61;
+ orc_union32 var62;
+ orc_union64 var63;
+ orc_union64 var64;
+ orc_union64 var65;
+ orc_union64 var66;
+ orc_union64 var67;
+ orc_union64 var68;
+ orc_union64 var69;
+ orc_union64 var70;
+ orc_union32 var71;
+ orc_union32 var72;
+ orc_union32 var73;
+ orc_union32 var74;
+ orc_union32 var75;
+
+ for (j = 0; j < m; j++) {
+ ptr0 = ORC_PTR_OFFSET (d1, d1_stride * j);
+ ptr4 = ORC_PTR_OFFSET (s1, s1_stride * j);
+
+ /* 6: loadpw */
+ var42.x4[0] = p1;
+ var42.x4[1] = p1;
+ var42.x4[2] = p1;
+ var42.x4[3] = p1;
+ /* 11: loadpl */
+ var55.i = 0xffffffff; /* -1 or 2.122e-314f */
+ /* 28: loadpl */
+ var43.i = 0x00ffffff; /* 16777215 or 8.28905e-317f */
+ /* 31: loadpl */
+ var44.i = 0xff000000; /* -16777216 or 2.11371e-314f */
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var45 = ptr4[i];
+ /* 1: shrul */
+ var46.i = ((orc_uint32) var45.i) >> 24;
+ /* 2: convlw */
+ var47.i = var46.i;
+ /* 3: convwb */
+ var48 = var47.i;
+ /* 4: splatbl */
+ var49.i =
+ ((((orc_uint32) var48) & 0xff) << 24) | ((((orc_uint32) var48) & 0xff)
+ << 16) | ((((orc_uint32) var48) & 0xff) << 8) | (((orc_uint32) var48)
+ & 0xff);
+ /* 5: convubw */
+ var50.x4[0] = (orc_uint8) var49.x4[0];
+ var50.x4[1] = (orc_uint8) var49.x4[1];
+ var50.x4[2] = (orc_uint8) var49.x4[2];
+ var50.x4[3] = (orc_uint8) var49.x4[3];
+ /* 7: mullw */
+ var51.x4[0] = (var50.x4[0] * var42.x4[0]) & 0xffff;
+ var51.x4[1] = (var50.x4[1] * var42.x4[1]) & 0xffff;
+ var51.x4[2] = (var50.x4[2] * var42.x4[2]) & 0xffff;
+ var51.x4[3] = (var50.x4[3] * var42.x4[3]) & 0xffff;
+ /* 8: shruw */
+ var52.x4[0] = ((orc_uint16) var51.x4[0]) >> 8;
+ var52.x4[1] = ((orc_uint16) var51.x4[1]) >> 8;
+ var52.x4[2] = ((orc_uint16) var51.x4[2]) >> 8;
+ var52.x4[3] = ((orc_uint16) var51.x4[3]) >> 8;
+ /* 9: convubw */
+ var53.x4[0] = (orc_uint8) var45.x4[0];
+ var53.x4[1] = (orc_uint8) var45.x4[1];
+ var53.x4[2] = (orc_uint8) var45.x4[2];
+ var53.x4[3] = (orc_uint8) var45.x4[3];
+ /* 10: mullw */
+ var54.x4[0] = (var53.x4[0] * var52.x4[0]) & 0xffff;
+ var54.x4[1] = (var53.x4[1] * var52.x4[1]) & 0xffff;
+ var54.x4[2] = (var53.x4[2] * var52.x4[2]) & 0xffff;
+ var54.x4[3] = (var53.x4[3] * var52.x4[3]) & 0xffff;
+ /* 12: convubw */
+ var56.x4[0] = (orc_uint8) var55.x4[0];
+ var56.x4[1] = (orc_uint8) var55.x4[1];
+ var56.x4[2] = (orc_uint8) var55.x4[2];
+ var56.x4[3] = (orc_uint8) var55.x4[3];
+ /* 13: subw */
+ var57.x4[0] = var56.x4[0] - var52.x4[0];
+ var57.x4[1] = var56.x4[1] - var52.x4[1];
+ var57.x4[2] = var56.x4[2] - var52.x4[2];
+ var57.x4[3] = var56.x4[3] - var52.x4[3];
+ /* 14: loadl */
+ var58 = ptr0[i];
+ /* 15: shrul */
+ var59.i = ((orc_uint32) var58.i) >> 24;
+ /* 16: convlw */
+ var60.i = var59.i;
+ /* 17: convwb */
+ var61 = var60.i;
+ /* 18: splatbl */
+ var62.i =
+ ((((orc_uint32) var61) & 0xff) << 24) | ((((orc_uint32) var61) & 0xff)
+ << 16) | ((((orc_uint32) var61) & 0xff) << 8) | (((orc_uint32) var61)
+ & 0xff);
+ /* 19: convubw */
+ var63.x4[0] = (orc_uint8) var62.x4[0];
+ var63.x4[1] = (orc_uint8) var62.x4[1];
+ var63.x4[2] = (orc_uint8) var62.x4[2];
+ var63.x4[3] = (orc_uint8) var62.x4[3];
+ /* 20: mullw */
+ var64.x4[0] = (var63.x4[0] * var57.x4[0]) & 0xffff;
+ var64.x4[1] = (var63.x4[1] * var57.x4[1]) & 0xffff;
+ var64.x4[2] = (var63.x4[2] * var57.x4[2]) & 0xffff;
+ var64.x4[3] = (var63.x4[3] * var57.x4[3]) & 0xffff;
+ /* 21: div255w */
+ var65.x4[0] =
+ ((orc_uint16) (((orc_uint16) (var64.x4[0] + 128)) +
+ (((orc_uint16) (var64.x4[0] + 128)) >> 8))) >> 8;
+ var65.x4[1] =
+ ((orc_uint16) (((orc_uint16) (var64.x4[1] + 128)) +
+ (((orc_uint16) (var64.x4[1] + 128)) >> 8))) >> 8;
+ var65.x4[2] =
+ ((orc_uint16) (((orc_uint16) (var64.x4[2] + 128)) +
+ (((orc_uint16) (var64.x4[2] + 128)) >> 8))) >> 8;
+ var65.x4[3] =
+ ((orc_uint16) (((orc_uint16) (var64.x4[3] + 128)) +
+ (((orc_uint16) (var64.x4[3] + 128)) >> 8))) >> 8;
+ /* 22: convubw */
+ var66.x4[0] = (orc_uint8) var58.x4[0];
+ var66.x4[1] = (orc_uint8) var58.x4[1];
+ var66.x4[2] = (orc_uint8) var58.x4[2];
+ var66.x4[3] = (orc_uint8) var58.x4[3];
+ /* 23: mullw */
+ var67.x4[0] = (var66.x4[0] * var65.x4[0]) & 0xffff;
+ var67.x4[1] = (var66.x4[1] * var65.x4[1]) & 0xffff;
+ var67.x4[2] = (var66.x4[2] * var65.x4[2]) & 0xffff;
+ var67.x4[3] = (var66.x4[3] * var65.x4[3]) & 0xffff;
+ /* 24: addw */
+ var68.x4[0] = var67.x4[0] + var54.x4[0];
+ var68.x4[1] = var67.x4[1] + var54.x4[1];
+ var68.x4[2] = var67.x4[2] + var54.x4[2];
+ var68.x4[3] = var67.x4[3] + var54.x4[3];
+ /* 25: addw */
+ var69.x4[0] = var65.x4[0] + var52.x4[0];
+ var69.x4[1] = var65.x4[1] + var52.x4[1];
+ var69.x4[2] = var65.x4[2] + var52.x4[2];
+ var69.x4[3] = var65.x4[3] + var52.x4[3];
+ /* 26: divluw */
+ var70.x4[0] =
+ ((var69.x4[0] & 0xff) ==
+ 0) ? 255 : ORC_CLAMP_UB (((orc_uint16) var68.x4[0]) /
+ ((orc_uint16) var69.x4[0] & 0xff));
+ var70.x4[1] =
+ ((var69.x4[1] & 0xff) ==
+ 0) ? 255 : ORC_CLAMP_UB (((orc_uint16) var68.x4[1]) /
+ ((orc_uint16) var69.x4[1] & 0xff));
+ var70.x4[2] =
+ ((var69.x4[2] & 0xff) ==
+ 0) ? 255 : ORC_CLAMP_UB (((orc_uint16) var68.x4[2]) /
+ ((orc_uint16) var69.x4[2] & 0xff));
+ var70.x4[3] =
+ ((var69.x4[3] & 0xff) ==
+ 0) ? 255 : ORC_CLAMP_UB (((orc_uint16) var68.x4[3]) /
+ ((orc_uint16) var69.x4[3] & 0xff));
+ /* 27: convwb */
+ var71.x4[0] = var70.x4[0];
+ var71.x4[1] = var70.x4[1];
+ var71.x4[2] = var70.x4[2];
+ var71.x4[3] = var70.x4[3];
+ /* 29: andl */
+ var72.i = var71.i & var43.i;
+ /* 30: convwb */
+ var73.x4[0] = var69.x4[0];
+ var73.x4[1] = var69.x4[1];
+ var73.x4[2] = var69.x4[2];
+ var73.x4[3] = var69.x4[3];
+ /* 32: andl */
+ var74.i = var73.i & var44.i;
+ /* 33: orl */
+ var75.i = var72.i | var74.i;
+ /* 34: storel */
+ ptr0[i] = var75;
+ }
+ }
+
+}
+
+#else
+static void
+_backup_video_mixer_orc_overlay_bgra (OrcExecutor * ORC_RESTRICT ex)
+{
+ int i;
+ int j;
+ int n = ex->n;
+ int m = ex->params[ORC_VAR_A1];
+ orc_union32 *ORC_RESTRICT ptr0;
+ const orc_union32 *ORC_RESTRICT ptr4;
+ orc_union64 var42;
+#if defined(__APPLE__) && __GNUC__ == 4 && __GNUC_MINOR__ == 2 && defined (__i386__)
+ volatile orc_union32 var43;
+#else
+ orc_union32 var43;
+#endif
+#if defined(__APPLE__) && __GNUC__ == 4 && __GNUC_MINOR__ == 2 && defined (__i386__)
+ volatile orc_union32 var44;
+#else
+ orc_union32 var44;
+#endif
+ orc_union32 var45;
+ orc_union32 var46;
+ orc_union16 var47;
+ orc_int8 var48;
+ orc_union32 var49;
+ orc_union64 var50;
+ orc_union64 var51;
+ orc_union64 var52;
+ orc_union64 var53;
+ orc_union64 var54;
+ orc_union32 var55;
+ orc_union64 var56;
+ orc_union64 var57;
+ orc_union32 var58;
+ orc_union32 var59;
+ orc_union16 var60;
+ orc_int8 var61;
+ orc_union32 var62;
+ orc_union64 var63;
+ orc_union64 var64;
+ orc_union64 var65;
+ orc_union64 var66;
+ orc_union64 var67;
+ orc_union64 var68;
+ orc_union64 var69;
+ orc_union64 var70;
+ orc_union32 var71;
+ orc_union32 var72;
+ orc_union32 var73;
+ orc_union32 var74;
+ orc_union32 var75;
+
+ for (j = 0; j < m; j++) {
+ ptr0 = ORC_PTR_OFFSET (ex->arrays[0], ex->params[0] * j);
+ ptr4 = ORC_PTR_OFFSET (ex->arrays[4], ex->params[4] * j);
+
+ /* 6: loadpw */
+ var42.x4[0] = ex->params[24];
+ var42.x4[1] = ex->params[24];
+ var42.x4[2] = ex->params[24];
+ var42.x4[3] = ex->params[24];
+ /* 11: loadpl */
+ var55.i = 0xffffffff; /* -1 or 2.122e-314f */
+ /* 28: loadpl */
+ var43.i = 0x00ffffff; /* 16777215 or 8.28905e-317f */
+ /* 31: loadpl */
+ var44.i = 0xff000000; /* -16777216 or 2.11371e-314f */
+
+ for (i = 0; i < n; i++) {
+ /* 0: loadl */
+ var45 = ptr4[i];
+ /* 1: shrul */
+ var46.i = ((orc_uint32) var45.i) >> 24;
+ /* 2: convlw */
+ var47.i = var46.i;
+ /* 3: convwb */
+ var48 = var47.i;
+ /* 4: splatbl */
+ var49.i =
+ ((((orc_uint32) var48) & 0xff) << 24) | ((((orc_uint32) var48) & 0xff)
+ << 16) | ((((orc_uint32) var48) & 0xff) << 8) | (((orc_uint32) var48)
+ & 0xff);
+ /* 5: convubw */
+ var50.x4[0] = (orc_uint8) var49.x4[0];
+ var50.x4[1] = (orc_uint8) var49.x4[1];
+ var50.x4[2] = (orc_uint8) var49.x4[2];
+ var50.x4[3] = (orc_uint8) var49.x4[3];
+ /* 7: mullw */
+ var51.x4[0] = (var50.x4[0] * var42.x4[0]) & 0xffff;
+ var51.x4[1] = (var50.x4[1] * var42.x4[1]) & 0xffff;
+ var51.x4[2] = (var50.x4[2] * var42.x4[2]) & 0xffff;
+ var51.x4[3] = (var50.x4[3] * var42.x4[3]) & 0xffff;
+ /* 8: shruw */
+ var52.x4[0] = ((orc_uint16) var51.x4[0]) >> 8;
+ var52.x4[1] = ((orc_uint16) var51.x4[1]) >> 8;
+ var52.x4[2] = ((orc_uint16) var51.x4[2]) >> 8;
+ var52.x4[3] = ((orc_uint16) var51.x4[3]) >> 8;
+ /* 9: convubw */
+ var53.x4[0] = (orc_uint8) var45.x4[0];
+ var53.x4[1] = (orc_uint8) var45.x4[1];
+ var53.x4[2] = (orc_uint8) var45.x4[2];
+ var53.x4[3] = (orc_uint8) var45.x4[3];
+ /* 10: mullw */
+ var54.x4[0] = (var53.x4[0] * var52.x4[0]) & 0xffff;
+ var54.x4[1] = (var53.x4[1] * var52.x4[1]) & 0xffff;
+ var54.x4[2] = (var53.x4[2] * var52.x4[2]) & 0xffff;
+ var54.x4[3] = (var53.x4[3] * var52.x4[3]) & 0xffff;
+ /* 12: convubw */
+ var56.x4[0] = (orc_uint8) var55.x4[0];
+ var56.x4[1] = (orc_uint8) var55.x4[1];
+ var56.x4[2] = (orc_uint8) var55.x4[2];
+ var56.x4[3] = (orc_uint8) var55.x4[3];
+ /* 13: subw */
+ var57.x4[0] = var56.x4[0] - var52.x4[0];
+ var57.x4[1] = var56.x4[1] - var52.x4[1];
+ var57.x4[2] = var56.x4[2] - var52.x4[2];
+ var57.x4[3] = var56.x4[3] - var52.x4[3];
+ /* 14: loadl */
+ var58 = ptr0[i];
+ /* 15: shrul */
+ var59.i = ((orc_uint32) var58.i) >> 24;
+ /* 16: convlw */
+ var60.i = var59.i;
+ /* 17: convwb */
+ var61 = var60.i;
+ /* 18: splatbl */
+ var62.i =
+ ((((orc_uint32) var61) & 0xff) << 24) | ((((orc_uint32) var61) & 0xff)
+ << 16) | ((((orc_uint32) var61) & 0xff) << 8) | (((orc_uint32) var61)
+ & 0xff);
+ /* 19: convubw */
+ var63.x4[0] = (orc_uint8) var62.x4[0];
+ var63.x4[1] = (orc_uint8) var62.x4[1];
+ var63.x4[2] = (orc_uint8) var62.x4[2];
+ var63.x4[3] = (orc_uint8) var62.x4[3];
+ /* 20: mullw */
+ var64.x4[0] = (var63.x4[0] * var57.x4[0]) & 0xffff;
+ var64.x4[1] = (var63.x4[1] * var57.x4[1]) & 0xffff;
+ var64.x4[2] = (var63.x4[2] * var57.x4[2]) & 0xffff;
+ var64.x4[3] = (var63.x4[3] * var57.x4[3]) & 0xffff;
+ /* 21: div255w */
+ var65.x4[0] =
+ ((orc_uint16) (((orc_uint16) (var64.x4[0] + 128)) +
+ (((orc_uint16) (var64.x4[0] + 128)) >> 8))) >> 8;
+ var65.x4[1] =
+ ((orc_uint16) (((orc_uint16) (var64.x4[1] + 128)) +
+ (((orc_uint16) (var64.x4[1] + 128)) >> 8))) >> 8;
+ var65.x4[2] =
+ ((orc_uint16) (((orc_uint16) (var64.x4[2] + 128)) +
+ (((orc_uint16) (var64.x4[2] + 128)) >> 8))) >> 8;
+ var65.x4[3] =
+ ((orc_uint16) (((orc_uint16) (var64.x4[3] + 128)) +
+ (((orc_uint16) (var64.x4[3] + 128)) >> 8))) >> 8;
+ /* 22: convubw */
+ var66.x4[0] = (orc_uint8) var58.x4[0];
+ var66.x4[1] = (orc_uint8) var58.x4[1];
+ var66.x4[2] = (orc_uint8) var58.x4[2];
+ var66.x4[3] = (orc_uint8) var58.x4[3];
+ /* 23: mullw */
+ var67.x4[0] = (var66.x4[0] * var65.x4[0]) & 0xffff;
+ var67.x4[1] = (var66.x4[1] * var65.x4[1]) & 0xffff;
+ var67.x4[2] = (var66.x4[2] * var65.x4[2]) & 0xffff;
+ var67.x4[3] = (var66.x4[3] * var65.x4[3]) & 0xffff;
+ /* 24: addw */
+ var68.x4[0] = var67.x4[0] + var54.x4[0];
+ var68.x4[1] = var67.x4[1] + var54.x4[1];
+ var68.x4[2] = var67.x4[2] + var54.x4[2];
+ var68.x4[3] = var67.x4[3] + var54.x4[3];
+ /* 25: addw */
+ var69.x4[0] = var65.x4[0] + var52.x4[0];
+ var69.x4[1] = var65.x4[1] + var52.x4[1];
+ var69.x4[2] = var65.x4[2] + var52.x4[2];
+ var69.x4[3] = var65.x4[3] + var52.x4[3];
+ /* 26: divluw */
+ var70.x4[0] =
+ ((var69.x4[0] & 0xff) ==
+ 0) ? 255 : ORC_CLAMP_UB (((orc_uint16) var68.x4[0]) /
+ ((orc_uint16) var69.x4[0] & 0xff));
+ var70.x4[1] =
+ ((var69.x4[1] & 0xff) ==
+ 0) ? 255 : ORC_CLAMP_UB (((orc_uint16) var68.x4[1]) /
+ ((orc_uint16) var69.x4[1] & 0xff));
+ var70.x4[2] =
+ ((var69.x4[2] & 0xff) ==
+ 0) ? 255 : ORC_CLAMP_UB (((orc_uint16) var68.x4[2]) /
+ ((orc_uint16) var69.x4[2] & 0xff));
+ var70.x4[3] =
+ ((var69.x4[3] & 0xff) ==
+ 0) ? 255 : ORC_CLAMP_UB (((orc_uint16) var68.x4[3]) /
+ ((orc_uint16) var69.x4[3] & 0xff));
+ /* 27: convwb */
+ var71.x4[0] = var70.x4[0];
+ var71.x4[1] = var70.x4[1];
+ var71.x4[2] = var70.x4[2];
+ var71.x4[3] = var70.x4[3];
+ /* 29: andl */
+ var72.i = var71.i & var43.i;
+ /* 30: convwb */
+ var73.x4[0] = var69.x4[0];
+ var73.x4[1] = var69.x4[1];
+ var73.x4[2] = var69.x4[2];
+ var73.x4[3] = var69.x4[3];
+ /* 32: andl */
+ var74.i = var73.i & var44.i;
+ /* 33: orl */
+ var75.i = var72.i | var74.i;
+ /* 34: storel */
+ ptr0[i] = var75;
+ }
+ }
+
+}
+
+void
+video_mixer_orc_overlay_bgra (guint8 * ORC_RESTRICT d1, int d1_stride,
+ const guint8 * ORC_RESTRICT s1, int s1_stride, int p1, int n, int m)
+{
+ OrcExecutor _ex, *ex = &_ex;
+ static volatile int p_inited = 0;
+ static OrcCode *c = 0;
+ void (*func) (OrcExecutor *);
+
+ if (!p_inited) {
+ orc_once_mutex_lock ();
+ if (!p_inited) {
+ OrcProgram *p;
+
+#if 1
+ static const orc_uint8 bc[] = {
+ 1, 7, 9, 28, 118, 105, 100, 101, 111, 95, 109, 105, 120, 101, 114, 95,
+ 111, 114, 99, 95, 111, 118, 101, 114, 108, 97, 121, 95, 98, 103, 114,
+ 97,
+ 11, 4, 4, 12, 4, 4, 14, 4, 255, 255, 255, 255, 14, 4, 0, 0,
+ 0, 255, 14, 4, 255, 255, 255, 0, 14, 4, 24, 0, 0, 0, 14, 2,
+ 8, 0, 0, 0, 16, 2, 20, 4, 20, 4, 20, 2, 20, 1, 20, 8,
+ 20, 8, 20, 8, 20, 4, 20, 8, 20, 8, 113, 32, 4, 126, 33, 32,
+ 19, 163, 34, 33, 157, 35, 34, 152, 39, 35, 21, 2, 150, 36, 39, 21,
+ 2, 89, 36, 36, 24, 21, 2, 95, 36, 36, 20, 21, 2, 150, 41, 32,
+ 21, 2, 89, 41, 41, 36, 115, 39, 16, 21, 2, 150, 37, 39, 21, 2,
+ 98, 37, 37, 36, 113, 32, 0, 126, 33, 32, 19, 163, 34, 33, 157, 35,
+ 34, 152, 39, 35, 21, 2, 150, 38, 39, 21, 2, 89, 38, 38, 37, 21,
+ 2, 80, 38, 38, 21, 2, 150, 40, 32, 21, 2, 89, 40, 40, 38, 21,
+ 2, 70, 40, 40, 41, 21, 2, 70, 38, 38, 36, 21, 2, 81, 40, 40,
+ 38, 21, 2, 157, 32, 40, 106, 32, 32, 18, 21, 2, 157, 39, 38, 106,
+ 39, 39, 17, 123, 32, 32, 39, 128, 0, 32, 2, 0,
+ };
+ p = orc_program_new_from_static_bytecode (bc);
+ orc_program_set_backup_function (p, _backup_video_mixer_orc_overlay_bgra);
+#else
+ p = orc_program_new ();
+ orc_program_set_2d (p);
+ orc_program_set_name (p, "video_mixer_orc_overlay_bgra");
+ orc_program_set_backup_function (p, _backup_video_mixer_orc_overlay_bgra);
+ orc_program_add_destination (p, 4, "d1");
+ orc_program_add_source (p, 4, "s1");
+ orc_program_add_constant (p, 4, 0xffffffff, "c1");
+ orc_program_add_constant (p, 4, 0xff000000, "c2");
+ orc_program_add_constant (p, 4, 0x00ffffff, "c3");
+ orc_program_add_constant (p, 4, 0x00000018, "c4");
+ orc_program_add_constant (p, 2, 0x00000008, "c5");
+ orc_program_add_parameter (p, 2, "p1");
+ orc_program_add_temporary (p, 4, "t1");
+ orc_program_add_temporary (p, 4, "t2");
+ orc_program_add_temporary (p, 2, "t3");
+ orc_program_add_temporary (p, 1, "t4");
+ orc_program_add_temporary (p, 8, "t5");
+ orc_program_add_temporary (p, 8, "t6");
+ orc_program_add_temporary (p, 8, "t7");
+ orc_program_add_temporary (p, 4, "t8");
+ orc_program_add_temporary (p, 8, "t9");
+ orc_program_add_temporary (p, 8, "t10");
+
+ orc_program_append_2 (p, "loadl", 0, ORC_VAR_T1, ORC_VAR_S1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "shrul", 0, ORC_VAR_T2, ORC_VAR_T1, ORC_VAR_C4,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convlw", 0, ORC_VAR_T3, ORC_VAR_T2, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convwb", 0, ORC_VAR_T4, ORC_VAR_T3, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "splatbl", 0, ORC_VAR_T8, ORC_VAR_T4, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 2, ORC_VAR_T5, ORC_VAR_T8, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mullw", 2, ORC_VAR_T5, ORC_VAR_T5, ORC_VAR_P1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "shruw", 2, ORC_VAR_T5, ORC_VAR_T5, ORC_VAR_C5,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 2, ORC_VAR_T10, ORC_VAR_T1,
+ ORC_VAR_D1, ORC_VAR_D1);
+ orc_program_append_2 (p, "mullw", 2, ORC_VAR_T10, ORC_VAR_T10, ORC_VAR_T5,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "loadpl", 0, ORC_VAR_T8, ORC_VAR_C1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 2, ORC_VAR_T6, ORC_VAR_T8, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "subw", 2, ORC_VAR_T6, ORC_VAR_T6, ORC_VAR_T5,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "loadl", 0, ORC_VAR_T1, ORC_VAR_D1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "shrul", 0, ORC_VAR_T2, ORC_VAR_T1, ORC_VAR_C4,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convlw", 0, ORC_VAR_T3, ORC_VAR_T2, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convwb", 0, ORC_VAR_T4, ORC_VAR_T3, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "splatbl", 0, ORC_VAR_T8, ORC_VAR_T4, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 2, ORC_VAR_T7, ORC_VAR_T8, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mullw", 2, ORC_VAR_T7, ORC_VAR_T7, ORC_VAR_T6,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "div255w", 2, ORC_VAR_T7, ORC_VAR_T7, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convubw", 2, ORC_VAR_T9, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "mullw", 2, ORC_VAR_T9, ORC_VAR_T9, ORC_VAR_T7,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "addw", 2, ORC_VAR_T9, ORC_VAR_T9, ORC_VAR_T10,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "addw", 2, ORC_VAR_T7, ORC_VAR_T7, ORC_VAR_T5,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "divluw", 2, ORC_VAR_T9, ORC_VAR_T9, ORC_VAR_T7,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convwb", 2, ORC_VAR_T1, ORC_VAR_T9, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "andl", 0, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_C3,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "convwb", 2, ORC_VAR_T8, ORC_VAR_T7, ORC_VAR_D1,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "andl", 0, ORC_VAR_T8, ORC_VAR_T8, ORC_VAR_C2,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "orl", 0, ORC_VAR_T1, ORC_VAR_T1, ORC_VAR_T8,
+ ORC_VAR_D1);
+ orc_program_append_2 (p, "storel", 0, ORC_VAR_D1, ORC_VAR_T1, ORC_VAR_D1,
+ ORC_VAR_D1);
+#endif
+
+ orc_program_compile (p);
+ c = orc_program_take_code (p);
+ orc_program_free (p);
+ }
+ p_inited = TRUE;
+ orc_once_mutex_unlock ();
+ }
+ ex->arrays[ORC_VAR_A2] = c;
+ ex->program = 0;
+
+ ex->n = n;
+ ORC_EXECUTOR_M (ex) = m;
+ ex->arrays[ORC_VAR_D1] = d1;
+ ex->params[ORC_VAR_D1] = d1_stride;
+ ex->arrays[ORC_VAR_S1] = (void *) s1;
+ ex->params[ORC_VAR_S1] = s1_stride;
+ ex->params[ORC_VAR_P1] = p1;
+
+ func = c->exec;
+ func (ex);
+}
+#endif
diff --git a/gst/videomixer/videomixerorc-dist.h b/gst/videomixer/videomixerorc-dist.h
new file mode 100644
index 0000000000..e0c070f878
--- /dev/null
+++ b/gst/videomixer/videomixerorc-dist.h
@@ -0,0 +1,96 @@
+
+/* autogenerated from videomixerorc.orc */
+
+#ifndef _VIDEOMIXERORC_H_
+#define _VIDEOMIXERORC_H_
+
+#include <glib.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#ifndef _ORC_INTEGER_TYPEDEFS_
+#define _ORC_INTEGER_TYPEDEFS_
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#include <stdint.h>
+typedef int8_t orc_int8;
+typedef int16_t orc_int16;
+typedef int32_t orc_int32;
+typedef int64_t orc_int64;
+typedef uint8_t orc_uint8;
+typedef uint16_t orc_uint16;
+typedef uint32_t orc_uint32;
+typedef uint64_t orc_uint64;
+#define ORC_UINT64_C(x) UINT64_C(x)
+#elif defined(_MSC_VER)
+typedef signed __int8 orc_int8;
+typedef signed __int16 orc_int16;
+typedef signed __int32 orc_int32;
+typedef signed __int64 orc_int64;
+typedef unsigned __int8 orc_uint8;
+typedef unsigned __int16 orc_uint16;
+typedef unsigned __int32 orc_uint32;
+typedef unsigned __int64 orc_uint64;
+#define ORC_UINT64_C(x) (x##Ui64)
+#define inline __inline
+#else
+#include <limits.h>
+typedef signed char orc_int8;
+typedef short orc_int16;
+typedef int orc_int32;
+typedef unsigned char orc_uint8;
+typedef unsigned short orc_uint16;
+typedef unsigned int orc_uint32;
+#if INT_MAX == LONG_MAX
+typedef long long orc_int64;
+typedef unsigned long long orc_uint64;
+#define ORC_UINT64_C(x) (x##ULL)
+#else
+typedef long orc_int64;
+typedef unsigned long orc_uint64;
+#define ORC_UINT64_C(x) (x##UL)
+#endif
+#endif
+typedef union { orc_int16 i; orc_int8 x2[2]; } orc_union16;
+typedef union { orc_int32 i; float f; orc_int16 x2[2]; orc_int8 x4[4]; } orc_union32;
+typedef union { orc_int64 i; double f; orc_int32 x2[2]; float x2f[2]; orc_int16 x4[4]; } orc_union64;
+#endif
+#ifndef ORC_RESTRICT
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+#define ORC_RESTRICT restrict
+#elif defined(__GNUC__) && __GNUC__ >= 4
+#define ORC_RESTRICT __restrict__
+#else
+#define ORC_RESTRICT
+#endif
+#endif
+
+#ifndef ORC_INTERNAL
+#if defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
+#define ORC_INTERNAL __attribute__((visibility("hidden")))
+#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x550)
+#define ORC_INTERNAL __hidden
+#elif defined (__GNUC__)
+#define ORC_INTERNAL __attribute__((visibility("hidden")))
+#else
+#define ORC_INTERNAL
+#endif
+#endif
+
+void video_mixer_orc_splat_u32 (guint32 * ORC_RESTRICT d1, int p1, int n);
+void video_mixer_orc_memcpy_u32 (guint32 * ORC_RESTRICT d1, const guint32 * ORC_RESTRICT s1, int n);
+void video_mixer_orc_blend_u8 (guint8 * ORC_RESTRICT d1, int d1_stride, const guint8 * ORC_RESTRICT s1, int s1_stride, int p1, int n, int m);
+void video_mixer_orc_blend_argb (guint8 * ORC_RESTRICT d1, int d1_stride, const guint8 * ORC_RESTRICT s1, int s1_stride, int p1, int n, int m);
+void video_mixer_orc_blend_bgra (guint8 * ORC_RESTRICT d1, int d1_stride, const guint8 * ORC_RESTRICT s1, int s1_stride, int p1, int n, int m);
+void video_mixer_orc_overlay_argb (guint8 * ORC_RESTRICT d1, int d1_stride, const guint8 * ORC_RESTRICT s1, int s1_stride, int p1, int n, int m);
+void video_mixer_orc_overlay_bgra (guint8 * ORC_RESTRICT d1, int d1_stride, const guint8 * ORC_RESTRICT s1, int s1_stride, int p1, int n, int m);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
+
diff --git a/gst/videomixer/videomixerorc.orc b/gst/videomixer/videomixerorc.orc
new file mode 100644
index 0000000000..8ba8237a24
--- /dev/null
+++ b/gst/videomixer/videomixerorc.orc
@@ -0,0 +1,221 @@
+.function video_mixer_orc_splat_u32
+.dest 4 d1 guint32
+.param 4 p1 guint32
+
+copyl d1, p1
+
+.function video_mixer_orc_memcpy_u32
+.dest 4 d1 guint32
+.source 4 s1 guint32
+
+copyl d1, s1
+
+.function video_mixer_orc_blend_u8
+.flags 2d
+.dest 1 d1 guint8
+.source 1 s1 guint8
+.param 2 p1
+.temp 2 t1
+.temp 2 t2
+.const 1 c1 8
+
+convubw t1, d1
+convubw t2, s1
+subw t2, t2, t1
+mullw t2, t2, p1
+shlw t1, t1, c1
+addw t2, t1, t2
+shruw t2, t2, c1
+convsuswb d1, t2
+
+
+.function video_mixer_orc_blend_argb
+.flags 2d
+.dest 4 d guint8
+.source 4 s guint8
+.param 2 alpha
+.temp 4 t
+.temp 2 tw
+.temp 1 tb
+.temp 4 a
+.temp 8 d_wide
+.temp 8 s_wide
+.temp 8 a_wide
+.const 4 a_alpha 0x000000ff
+
+loadl t, s
+convlw tw, t
+convwb tb, tw
+splatbl a, tb
+x4 convubw a_wide, a
+x4 mullw a_wide, a_wide, alpha
+x4 shruw a_wide, a_wide, 8
+x4 convubw s_wide, t
+loadl t, d
+x4 convubw d_wide, t
+x4 subw s_wide, s_wide, d_wide
+x4 mullw s_wide, s_wide, a_wide
+x4 div255w s_wide, s_wide
+x4 addw d_wide, d_wide, s_wide
+x4 convwb t, d_wide
+orl t, t, a_alpha
+storel d, t
+
+.function video_mixer_orc_blend_bgra
+.flags 2d
+.dest 4 d guint8
+.source 4 s guint8
+.param 2 alpha
+.temp 4 t
+.temp 4 t2
+.temp 2 tw
+.temp 1 tb
+.temp 4 a
+.temp 8 d_wide
+.temp 8 s_wide
+.temp 8 a_wide
+.const 4 a_alpha 0xff000000
+
+loadl t, s
+shrul t2, t, 24
+convlw tw, t2
+convwb tb, tw
+splatbl a, tb
+x4 convubw a_wide, a
+x4 mullw a_wide, a_wide, alpha
+x4 shruw a_wide, a_wide, 8
+x4 convubw s_wide, t
+loadl t, d
+x4 convubw d_wide, t
+x4 subw s_wide, s_wide, d_wide
+x4 mullw s_wide, s_wide, a_wide
+x4 div255w s_wide, s_wide
+x4 addw d_wide, d_wide, s_wide
+x4 convwb t, d_wide
+orl t, t, a_alpha
+storel d, t
+
+
+.function video_mixer_orc_overlay_argb
+.flags 2d
+.dest 4 d guint8
+.source 4 s guint8
+.param 2 alpha
+.temp 4 t
+.temp 2 tw
+.temp 1 tb
+.temp 8 alpha_s
+.temp 8 alpha_s_inv
+.temp 8 alpha_d
+.temp 4 a
+.temp 8 d_wide
+.temp 8 s_wide
+.const 4 xfs 0xffffffff
+.const 4 a_alpha 0x000000ff
+.const 4 a_alpha_inv 0xffffff00
+
+# calc source alpha as alpha_s = alpha_s * alpha / 256
+loadl t, s
+convlw tw, t
+convwb tb, tw
+splatbl a, tb
+x4 convubw alpha_s, a
+x4 mullw alpha_s, alpha_s, alpha
+x4 shruw alpha_s, alpha_s, 8
+x4 convubw s_wide, t
+x4 mullw s_wide, s_wide, alpha_s
+
+# calc destination alpha as alpha_d = (255-alpha_s) * alpha_d / 255
+loadpl a, xfs
+x4 convubw alpha_s_inv, a
+x4 subw alpha_s_inv, alpha_s_inv, alpha_s
+loadl t, d
+convlw tw, t
+convwb tb, tw
+splatbl a, tb
+x4 convubw alpha_d, a
+x4 mullw alpha_d, alpha_d, alpha_s_inv
+x4 div255w alpha_d, alpha_d
+x4 convubw d_wide, t
+x4 mullw d_wide, d_wide, alpha_d
+
+# calc final pixel as pix_d = pix_s*alpha_s + pix_d*alpha_d*(255-alpha_s)/255
+x4 addw d_wide, d_wide, s_wide
+
+# calc the final destination alpha_d = alpha_s + alpha_d * (255-alpha_s)/255
+x4 addw alpha_d, alpha_d, alpha_s
+
+# now normalize the pix_d by the final alpha to make it associative
+x4 divluw, d_wide, d_wide, alpha_d
+
+# pack the new alpha into the correct spot
+x4 convwb t, d_wide
+andl t, t, a_alpha_inv
+x4 convwb a, alpha_d
+andl a, a, a_alpha
+orl t, t, a
+storel d, t
+
+.function video_mixer_orc_overlay_bgra
+.flags 2d
+.dest 4 d guint8
+.source 4 s guint8
+.param 2 alpha
+.temp 4 t
+.temp 4 t2
+.temp 2 tw
+.temp 1 tb
+.temp 8 alpha_s
+.temp 8 alpha_s_inv
+.temp 8 alpha_d
+.temp 4 a
+.temp 8 d_wide
+.temp 8 s_wide
+.const 4 xfs 0xffffffff
+.const 4 a_alpha 0xff000000
+.const 4 a_alpha_inv 0x00ffffff
+
+# calc source alpha as alpha_s = alpha_s * alpha / 256
+loadl t, s
+shrul t2, t, 24
+convlw tw, t2
+convwb tb, tw
+splatbl a, tb
+x4 convubw alpha_s, a
+x4 mullw alpha_s, alpha_s, alpha
+x4 shruw alpha_s, alpha_s, 8
+x4 convubw s_wide, t
+x4 mullw s_wide, s_wide, alpha_s
+
+# calc destination alpha as alpha_d = (255-alpha_s) * alpha_d / 255
+loadpl a, xfs
+x4 convubw alpha_s_inv, a
+x4 subw alpha_s_inv, alpha_s_inv, alpha_s
+loadl t, d
+shrul t2, t, 24
+convlw tw, t2
+convwb tb, tw
+splatbl a, tb
+x4 convubw alpha_d, a
+x4 mullw alpha_d, alpha_d, alpha_s_inv
+x4 div255w alpha_d, alpha_d
+x4 convubw d_wide, t
+x4 mullw d_wide, d_wide, alpha_d
+
+# calc final pixel as pix_d = pix_s*alpha_s + pix_d*alpha_d*(255-alpha_s)/255
+x4 addw d_wide, d_wide, s_wide
+
+# calc the final destination alpha_d = alpha_s + alpha_d * (255-alpha_s)/255
+x4 addw alpha_d, alpha_d, alpha_s
+
+# now normalize the pix_d by the final alpha to make it associative
+x4 divluw, d_wide, d_wide, alpha_d
+
+# pack the new alpha into the correct spot
+x4 convwb t, d_wide
+andl t, t, a_alpha_inv
+x4 convwb a, alpha_d
+andl a, a, a_alpha
+orl t, t, a
+storel d, t
+
diff --git a/gst/wavenc/gstwavenc.c b/gst/wavenc/gstwavenc.c
new file mode 100644
index 0000000000..2392afef69
--- /dev/null
+++ b/gst/wavenc/gstwavenc.c
@@ -0,0 +1,1152 @@
+/* -*- mOde: C; tab-width: 2; indent-tabs-mode: t; c-basic-offset: 2 -*- */
+/* GStreamer .wav encoder
+ * Copyright (C) <2002> Iain Holmes <iain@prettypeople.org>
+ * Copyright (C) <2006> Tim-Philipp Müller <tim centricular net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+/**
+ * SECTION:element-wavenc
+ * @title: wavenc
+ *
+ * Format an audio stream into the wav format.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 cdparanoiasrc mode=continuous ! queue ! audioconvert ! wavenc ! filesink location=cd.wav
+ * ]| Rip a whole audio CD into a single wav file, with the track table written into a CUE sheet inside the file
+ * |[
+ * gst-launch-1.0 cdparanoiasrc track=5 ! queue ! audioconvert ! wavenc ! filesink location=track5.wav
+ * ]| Rip track 5 of an audio CD into a single wav file containing unencoded raw audio samples.
+ *
+ */
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include "gstwavenc.h"
+
+#include <gst/audio/audio.h>
+#include <gst/riff/riff-media.h>
+#include <gst/base/gstbytewriter.h>
+
+GST_DEBUG_CATEGORY_STATIC (wavenc_debug);
+#define GST_CAT_DEFAULT wavenc_debug
+
+typedef struct
+{
+ /* Offset Size Description Value
+ * 0x00 4 ID unique identification value
+ * 0x04 4 Position play order position
+ * 0x08 4 Data Chunk ID RIFF ID of corresponding data chunk
+ * 0x0c 4 Chunk Start Byte Offset of Data Chunk *
+ * 0x10 4 Block Start Byte Offset to sample of First Channel
+ * 0x14 4 Sample Offset Byte Offset to sample byte of First Channel
+ */
+ guint32 id;
+ guint32 position;
+ guint8 data_chunk_id[4];
+ guint32 chunk_start;
+ guint32 block_start;
+ guint32 sample_offset;
+} GstWavEncCue;
+
+typedef struct
+{
+ /* Offset Size Description Value
+ * 0x00 4 Chunk ID "labl" (0x6C61626C) or "note" (0x6E6F7465)
+ * 0x04 4 Chunk Data Size depends on contained text
+ * 0x08 4 Cue Point ID 0 - 0xFFFFFFFF
+ * 0x0c Text
+ */
+ guint8 chunk_id[4];
+ guint32 chunk_data_size;
+ guint32 cue_point_id;
+ gchar *text;
+} GstWavEncLabl, GstWavEncNote;
+
+/* FIXME: mono doesn't produce correct files it seems, at least mplayer xruns */
+#define SINK_CAPS \
+ "audio/x-raw, " \
+ "rate = (int) [ 1, MAX ], " \
+ "channels = (int) [ 1, 65535 ], " \
+ "format = (string) { S32LE, S24LE, S16LE, U8, F32LE, F64LE }, " \
+ "layout = (string) interleaved" \
+ "; " \
+ "audio/x-alaw, " \
+ "rate = (int) [ 8000, 192000 ], " \
+ "channels = (int) [ 1, 2 ]; " \
+ "audio/x-mulaw, " \
+ "rate = (int) [ 8000, 192000 ], " \
+ "channels = (int) [ 1, 2 ]"
+
+#define SRC_CAPS \
+ "audio/x-wav; " \
+ "audio/x-rf64"
+
+static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (SINK_CAPS)
+ );
+
+static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (SRC_CAPS)
+ );
+
+#define gst_wavenc_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstWavEnc, gst_wavenc, GST_TYPE_ELEMENT,
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL)
+ G_IMPLEMENT_INTERFACE (GST_TYPE_TOC_SETTER, NULL)
+ );
+GST_ELEMENT_REGISTER_DEFINE (wavenc, "wavenc", GST_RANK_PRIMARY,
+ GST_TYPE_WAVENC);
+
+static GstFlowReturn gst_wavenc_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+static gboolean gst_wavenc_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static GstStateChangeReturn gst_wavenc_change_state (GstElement * element,
+ GstStateChange transition);
+static gboolean gst_wavenc_sink_setcaps (GstPad * pad, GstCaps * caps);
+
+static void
+gst_wavenc_class_init (GstWavEncClass * klass)
+{
+ GstElementClass *element_class;
+
+ element_class = (GstElementClass *) klass;
+
+ element_class->change_state = GST_DEBUG_FUNCPTR (gst_wavenc_change_state);
+
+ gst_element_class_set_static_metadata (element_class, "WAV audio muxer",
+ "Codec/Muxer/Audio",
+ "Encode raw audio into WAV", "Iain Holmes <iain@prettypeople.org>");
+
+ gst_element_class_add_static_pad_template (element_class, &src_factory);
+ gst_element_class_add_static_pad_template (element_class, &sink_factory);
+
+ GST_DEBUG_CATEGORY_INIT (wavenc_debug, "wavenc", 0, "WAV encoder element");
+}
+
+static void
+gst_wavenc_init (GstWavEnc * wavenc)
+{
+ wavenc->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
+ gst_pad_set_chain_function (wavenc->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_wavenc_chain));
+ gst_pad_set_event_function (wavenc->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_wavenc_event));
+ gst_pad_use_fixed_caps (wavenc->sinkpad);
+ gst_element_add_pad (GST_ELEMENT (wavenc), wavenc->sinkpad);
+
+ wavenc->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
+ gst_pad_use_fixed_caps (wavenc->srcpad);
+ gst_element_add_pad (GST_ELEMENT (wavenc), wavenc->srcpad);
+}
+
+#define RIFF_CHUNK_LEN 12
+#define FMT_WAV_CHUNK_LEN 24
+#define FMT_EXT_CHUNK_LEN 48
+#define FACT_CHUNK_LEN 12
+#define DATA_HEADER_LEN 8
+#define DS64_CHUNK_LEN 36
+
+static gboolean
+use_format_ext (GstWavEnc * wavenc)
+{
+ return wavenc->channels > 2;
+}
+
+static gboolean
+use_fact_chunk (GstWavEnc * wavenc)
+{
+ return use_format_ext (wavenc) && !wavenc->use_rf64;
+}
+
+static int
+get_header_len (GstWavEnc * wavenc)
+{
+ int len = RIFF_CHUNK_LEN;
+
+ if (use_format_ext (wavenc))
+ len += FMT_EXT_CHUNK_LEN;
+ else
+ len += FMT_WAV_CHUNK_LEN;
+
+ if (use_fact_chunk (wavenc))
+ len += FACT_CHUNK_LEN;
+
+ if (wavenc->use_rf64)
+ len += DS64_CHUNK_LEN;
+
+ return len + DATA_HEADER_LEN;
+}
+
+static guint64
+gstmask_to_wavmask (guint64 gstmask, GstAudioChannelPosition * pos)
+{
+ const GstAudioChannelPosition valid_pos =
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT |
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT |
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER |
+ GST_AUDIO_CHANNEL_POSITION_LFE1 |
+ GST_AUDIO_CHANNEL_POSITION_REAR_LEFT |
+ GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT |
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER |
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER |
+ GST_AUDIO_CHANNEL_POSITION_REAR_CENTER |
+ GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT |
+ GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT |
+ GST_AUDIO_CHANNEL_POSITION_TOP_CENTER |
+ GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_LEFT |
+ GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_CENTER |
+ GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_RIGHT |
+ GST_AUDIO_CHANNEL_POSITION_TOP_REAR_LEFT |
+ GST_AUDIO_CHANNEL_POSITION_TOP_REAR_CENTER |
+ GST_AUDIO_CHANNEL_POSITION_TOP_REAR_RIGHT;
+
+ const GstAudioChannelPosition wav_pos[] = {
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_LFE1,
+ GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_REAR_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_TOP_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_TOP_FRONT_RIGHT,
+ GST_AUDIO_CHANNEL_POSITION_TOP_REAR_LEFT,
+ GST_AUDIO_CHANNEL_POSITION_TOP_REAR_CENTER,
+ GST_AUDIO_CHANNEL_POSITION_TOP_REAR_RIGHT,
+ };
+ int k;
+ int chan = 0;
+ guint64 ret = 0;
+ guint64 mask = 1;
+
+ if (gstmask == 0 || ((gstmask & ~valid_pos) != 0))
+ return 0;
+
+ for (k = 0; k < G_N_ELEMENTS (wav_pos); ++k) {
+ if (gstmask & (G_GUINT64_CONSTANT (1) << wav_pos[k])) {
+ ret |= mask;
+ pos[chan++] = wav_pos[k];
+ }
+ mask <<= 1;
+ }
+
+ return ret;
+}
+
+static guint8 *
+write_fmt_chunk (GstWavEnc * wavenc, guint8 * header)
+{
+ guint16 wBlockAlign;
+
+ wBlockAlign = (wavenc->width / 8) * wavenc->channels;
+
+ memcpy (header, "fmt ", 4);
+ /* wChannels */
+ GST_WRITE_UINT16_LE (header + 10, wavenc->channels);
+ /* dwSamplesPerSec */
+ GST_WRITE_UINT32_LE (header + 12, wavenc->rate);
+ /* dwAvgBytesPerSec */
+ GST_WRITE_UINT32_LE (header + 16, wBlockAlign * wavenc->rate);
+ /* wBlockAlign */
+ GST_WRITE_UINT16_LE (header + 20, wBlockAlign);
+ /* wBitsPerSample */
+ GST_WRITE_UINT16_LE (header + 22, wavenc->width);
+
+ if (use_format_ext (wavenc)) {
+ GST_DEBUG_OBJECT (wavenc, "Using WAVE_FORMAT_EXTENSIBLE");
+
+ GST_WRITE_UINT32_LE (header + 4, FMT_EXT_CHUNK_LEN - 8);
+
+ /* wFormatTag */
+ GST_WRITE_UINT16_LE (header + 8, 0xFFFE);
+ /* cbSize */
+ GST_WRITE_UINT16_LE (header + 24, 22);
+ /* wValidBitsPerSample */
+ GST_WRITE_UINT16_LE (header + 26, wavenc->width);
+ /* dwChannelMask */
+ GST_WRITE_UINT32_LE (header + 28, (guint32) wavenc->channel_mask);
+
+ GST_WRITE_UINT16_LE (header + 32, wavenc->format);
+
+ memcpy (header + 34,
+ "\x00\x00\x00\x00\x10\x00\x80\x00\x00\xAA\x00\x38\x9B\x71", 14);
+
+ header += FMT_EXT_CHUNK_LEN;
+
+ } else {
+ GST_WRITE_UINT32_LE (header + 4, FMT_WAV_CHUNK_LEN - 8);
+
+ /* wFormatTag */
+ GST_WRITE_UINT16_LE (header + 8, wavenc->format);
+ header += FMT_WAV_CHUNK_LEN;
+ }
+
+ return header;
+}
+
+static guint64
+get_num_frames (GstWavEnc * wavenc)
+{
+ if (wavenc->channels == 0 || wavenc->width == 0)
+ return 0;
+ return wavenc->audio_length / (wavenc->width / 8) / wavenc->channels;
+}
+
+static guint8 *
+write_fact_chunk (GstWavEnc * wavenc, guint8 * header)
+{
+ memcpy (header, "fact", 4);
+ GST_WRITE_UINT32_LE (header + 4, FACT_CHUNK_LEN - 8);
+ /* compressed files are only supported up to 2 channels,
+ * that means we never write a fact chunk for them */
+ if (wavenc->use_rf64)
+ GST_WRITE_UINT32_LE (header + 8, 0xFFFFFFFF);
+ else
+ GST_WRITE_UINT32_LE (header + 8, (guint32) get_num_frames (wavenc));
+ return header + FACT_CHUNK_LEN;
+}
+
+static guint8 *
+write_ds64_chunk (GstWavEnc * wavenc, guint64 riffLen, guint8 * header)
+{
+ guint64 numFrames = get_num_frames (wavenc);
+
+ GST_DEBUG_OBJECT (wavenc, "riffLen=%" G_GUINT64_FORMAT
+ ", audio length=%" G_GUINT64_FORMAT ", numFrames=%" G_GUINT64_FORMAT,
+ riffLen, wavenc->audio_length, numFrames);
+
+ memcpy (header, "ds64", 4);
+ GST_WRITE_UINT32_LE (header + 4, DS64_CHUNK_LEN - 8);
+ /* riffSize */
+ GST_WRITE_UINT32_LE (header + 8, (guint32) (riffLen & 0xFFFFFFFF));
+ GST_WRITE_UINT32_LE (header + 12, (guint32) (riffLen >> 32));
+ /* dataSize */
+ GST_WRITE_UINT32_LE (header + 16,
+ (guint32) (wavenc->audio_length & 0xFFFFFFFF));
+ GST_WRITE_UINT32_LE (header + 20, (guint32) (wavenc->audio_length >> 32));
+ /* sampleCount */
+ GST_WRITE_UINT32_LE (header + 24, (guint32) (numFrames & 0xFFFFFFFF));
+ GST_WRITE_UINT32_LE (header + 28, (guint32) (numFrames >> 32));
+ /* tableLength always zero for now */
+ GST_WRITE_UINT32_LE (header + 32, 0);
+
+ return header + DS64_CHUNK_LEN;
+}
+
+static GstBuffer *
+gst_wavenc_create_header_buf (GstWavEnc * wavenc)
+{
+ GstBuffer *buf;
+ GstMapInfo map;
+ guint8 *header;
+ guint64 riffLen;
+
+ GST_DEBUG_OBJECT (wavenc, "Header size: %d", get_header_len (wavenc));
+ buf = gst_buffer_new_and_alloc (get_header_len (wavenc));
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ header = map.data;
+ memset (header, 0, get_header_len (wavenc));
+
+ riffLen = wavenc->meta_length + wavenc->audio_length
+ + get_header_len (wavenc) - 8;
+
+ /* RIFF chunk */
+ if (wavenc->use_rf64) {
+ GST_DEBUG_OBJECT (wavenc, "Using RF64");
+ memcpy (header, "RF64", 4);
+ GST_WRITE_UINT32_LE (header + 4, 0xFFFFFFFF);
+ } else {
+ memcpy (header, "RIFF", 4);
+ GST_WRITE_UINT32_LE (header + 4, (guint32) riffLen);
+ }
+ memcpy (header + 8, "WAVE", 4);
+ header += RIFF_CHUNK_LEN;
+
+ if (wavenc->use_rf64)
+ header = write_ds64_chunk (wavenc, riffLen, header);
+
+ header = write_fmt_chunk (wavenc, header);
+ if (use_fact_chunk (wavenc))
+ header = write_fact_chunk (wavenc, header);
+
+ /* data chunk */
+ memcpy (header, "data ", 4);
+ if (wavenc->use_rf64)
+ GST_WRITE_UINT32_LE (header + 4, 0xFFFFFFFF);
+ else
+ GST_WRITE_UINT32_LE (header + 4, (guint32) wavenc->audio_length);
+
+ gst_buffer_unmap (buf, &map);
+
+ return buf;
+}
+
+static GstFlowReturn
+gst_wavenc_push_header (GstWavEnc * wavenc)
+{
+ GstFlowReturn ret;
+ GstBuffer *outbuf;
+ GstSegment segment;
+
+ /* seek to beginning of file */
+ gst_segment_init (&segment, GST_FORMAT_BYTES);
+ if (!gst_pad_push_event (wavenc->srcpad, gst_event_new_segment (&segment))) {
+ GST_WARNING_OBJECT (wavenc, "Seek to the beginning failed");
+ return GST_FLOW_ERROR;
+ }
+
+ GST_DEBUG_OBJECT (wavenc, "writing header, meta_size=%u, audio_size=%"
+ G_GUINT64_FORMAT, wavenc->meta_length, wavenc->audio_length);
+
+ outbuf = gst_wavenc_create_header_buf (wavenc);
+ GST_BUFFER_OFFSET (outbuf) = 0;
+
+ ret = gst_pad_push (wavenc->srcpad, outbuf);
+
+ if (ret != GST_FLOW_OK) {
+ GST_WARNING_OBJECT (wavenc, "push header failed: flow = %s",
+ gst_flow_get_name (ret));
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_wavenc_sink_setcaps (GstPad * pad, GstCaps * caps)
+{
+ GstWavEnc *wavenc;
+ GstStructure *structure;
+ const gchar *name;
+ gint chans, rate;
+ GstCaps *ccaps;
+
+ wavenc = GST_WAVENC (gst_pad_get_parent (pad));
+
+ ccaps = gst_pad_get_current_caps (pad);
+ if (wavenc->sent_header && ccaps && !gst_caps_can_intersect (caps, ccaps)) {
+ gst_caps_unref (ccaps);
+ GST_WARNING_OBJECT (wavenc, "cannot change format in middle of stream");
+ goto fail;
+ }
+ if (ccaps)
+ gst_caps_unref (ccaps);
+
+ GST_DEBUG_OBJECT (wavenc, "got caps: %" GST_PTR_FORMAT, caps);
+
+ structure = gst_caps_get_structure (caps, 0);
+ name = gst_structure_get_name (structure);
+
+ if (!gst_structure_get_int (structure, "channels", &chans) ||
+ !gst_structure_get_int (structure, "rate", &rate)) {
+ GST_WARNING_OBJECT (wavenc, "caps incomplete");
+ goto fail;
+ }
+
+ wavenc->channels = chans;
+ wavenc->rate = rate;
+ wavenc->channel_mask = 0;
+
+ if (strcmp (name, "audio/x-raw") == 0) {
+ GstAudioInfo info;
+ guint64 gstmask;
+
+ if (!gst_audio_info_from_caps (&info, caps)) {
+ GST_WARNING_OBJECT (wavenc, "Could not retrieve audio info from caps");
+ goto fail;
+ }
+ if (gst_audio_channel_positions_to_mask (info.position, wavenc->channels,
+ FALSE, &gstmask)) {
+ wavenc->channel_mask = gstmask_to_wavmask (gstmask, wavenc->destPos);
+ memcpy (wavenc->srcPos, info.position, sizeof (info.position));
+ GST_DEBUG_OBJECT (wavenc, "Channel mask input: 0x%" G_GINT64_MODIFIER "x"
+ " output: 0x%" G_GINT64_MODIFIER "x", gstmask, wavenc->channel_mask);
+ }
+ wavenc->audio_format = GST_AUDIO_INFO_FORMAT (&info);
+
+ if (GST_AUDIO_INFO_IS_INTEGER (&info))
+ wavenc->format = GST_RIFF_WAVE_FORMAT_PCM;
+ else if (GST_AUDIO_INFO_IS_FLOAT (&info))
+ wavenc->format = GST_RIFF_WAVE_FORMAT_IEEE_FLOAT;
+ else
+ goto fail;
+
+ wavenc->width = GST_AUDIO_INFO_WIDTH (&info);
+ } else if (strcmp (name, "audio/x-alaw") == 0) {
+ wavenc->format = GST_RIFF_WAVE_FORMAT_ALAW;
+ wavenc->width = 8;
+ } else if (strcmp (name, "audio/x-mulaw") == 0) {
+ wavenc->format = GST_RIFF_WAVE_FORMAT_MULAW;
+ wavenc->width = 8;
+ } else {
+ GST_WARNING_OBJECT (wavenc, "Unsupported format %s", name);
+ goto fail;
+ }
+
+ GST_LOG_OBJECT (wavenc,
+ "accepted caps: format=0x%04x chans=%u width=%u rate=%u",
+ wavenc->format, wavenc->channels, wavenc->width, wavenc->rate);
+
+ gst_object_unref (wavenc);
+ return TRUE;
+
+fail:
+ gst_object_unref (wavenc);
+ return FALSE;
+}
+
+static void
+gst_wavparse_tags_foreach (const GstTagList * tags, const gchar * tag,
+ gpointer data)
+{
+ const struct
+ {
+ guint32 fcc;
+ const gchar *tag;
+ } rifftags[] = {
+ {
+ GST_RIFF_INFO_IARL, GST_TAG_LOCATION}, {
+ GST_RIFF_INFO_IART, GST_TAG_ARTIST}, {
+ GST_RIFF_INFO_ICMT, GST_TAG_COMMENT}, {
+ GST_RIFF_INFO_ICOP, GST_TAG_COPYRIGHT}, {
+ GST_RIFF_INFO_ICRD, GST_TAG_DATE}, {
+ GST_RIFF_INFO_IGNR, GST_TAG_GENRE}, {
+ GST_RIFF_INFO_IKEY, GST_TAG_KEYWORDS}, {
+ GST_RIFF_INFO_INAM, GST_TAG_TITLE}, {
+ GST_RIFF_INFO_IPRD, GST_TAG_ALBUM}, {
+ GST_RIFF_INFO_ISBJ, GST_TAG_ALBUM_ARTIST}, {
+ GST_RIFF_INFO_ISFT, GST_TAG_ENCODER}, {
+ GST_RIFF_INFO_ISRC, GST_TAG_ISRC}, {
+ 0, NULL}
+ };
+ gint n;
+ size_t size;
+ gchar *str = NULL;
+ GstByteWriter *bw = data;
+ for (n = 0; rifftags[n].fcc != 0; n++) {
+ if (!strcmp (rifftags[n].tag, tag)) {
+ if (rifftags[n].fcc == GST_RIFF_INFO_ICRD) {
+ GDate *date;
+ /* special case for the date tag */
+ if (gst_tag_list_get_date (tags, tag, &date)) {
+ str =
+ g_strdup_printf ("%04d:%02d:%02d", g_date_get_year (date),
+ g_date_get_month (date), g_date_get_day (date));
+ g_date_free (date);
+ }
+ } else {
+ gst_tag_list_get_string (tags, tag, &str);
+ }
+ if (str) {
+ /* get string length including null termination */
+ size = strlen (str) + 1;
+ gst_byte_writer_put_uint32_le (bw, rifftags[n].fcc);
+ gst_byte_writer_put_uint32_le (bw, GST_ROUND_UP_2 (size));
+ gst_byte_writer_put_data (bw, (const guint8 *) str, size);
+ /* add padding if needed */
+ if (GST_ROUND_UP_2 (size) > size) {
+ gst_byte_writer_put_uint8 (bw, 0);
+ }
+ g_free (str);
+ str = NULL;
+ break;
+ }
+ }
+ }
+
+}
+
+static GstFlowReturn
+gst_wavenc_write_tags (GstWavEnc * wavenc)
+{
+ const GstTagList *user_tags;
+ GstTagList *tags;
+ guint size;
+ GstBuffer *buf;
+ GstByteWriter bw;
+
+ g_return_val_if_fail (wavenc != NULL, GST_FLOW_OK);
+
+ user_tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (wavenc));
+ if ((!wavenc->tags) && (!user_tags)) {
+ GST_DEBUG_OBJECT (wavenc, "have no tags");
+ return GST_FLOW_OK;
+ }
+ tags =
+ gst_tag_list_merge (user_tags, wavenc->tags,
+ gst_tag_setter_get_tag_merge_mode (GST_TAG_SETTER (wavenc)));
+
+ GST_DEBUG_OBJECT (wavenc, "writing tags");
+
+ gst_byte_writer_init_with_size (&bw, 1024, FALSE);
+
+ /* add LIST INFO chunk */
+ gst_byte_writer_put_data (&bw, (const guint8 *) "LIST", 4);
+ gst_byte_writer_put_uint32_le (&bw, 0);
+ gst_byte_writer_put_data (&bw, (const guint8 *) "INFO", 4);
+
+ /* add tags */
+ gst_tag_list_foreach (tags, gst_wavparse_tags_foreach, &bw);
+
+ /* sets real size of LIST INFO chunk */
+ size = gst_byte_writer_get_pos (&bw);
+ gst_byte_writer_set_pos (&bw, 4);
+ gst_byte_writer_put_uint32_le (&bw, size - 8);
+
+ gst_tag_list_unref (tags);
+
+ buf = gst_byte_writer_reset_and_get_buffer (&bw);
+ wavenc->meta_length += gst_buffer_get_size (buf);
+ return gst_pad_push (wavenc->srcpad, buf);
+}
+
+static gboolean
+gst_wavenc_is_cue_id_unique (guint32 id, GList * list)
+{
+ GstWavEncCue *cue;
+
+ while (list) {
+ cue = list->data;
+ if (cue->id == id)
+ return FALSE;
+ list = g_list_next (list);
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_wavenc_parse_cue (GstWavEnc * wavenc, guint32 id, GstTocEntry * entry)
+{
+ gint64 start;
+ GstWavEncCue *cue;
+
+ g_return_val_if_fail (entry != NULL, FALSE);
+
+ gst_toc_entry_get_start_stop_times (entry, &start, NULL);
+
+ cue = g_new (GstWavEncCue, 1);
+ cue->id = id;
+ cue->position = gst_util_uint64_scale_round (start, wavenc->rate, GST_SECOND);
+ memcpy (cue->data_chunk_id, "data", 4);
+ cue->chunk_start = 0;
+ cue->block_start = 0;
+ cue->sample_offset = cue->position;
+ wavenc->cues = g_list_append (wavenc->cues, cue);
+
+ return TRUE;
+}
+
+static gboolean
+gst_wavenc_parse_labl (GstWavEnc * wavenc, guint32 id, GstTocEntry * entry)
+{
+ gchar *tag;
+ GstTagList *tags;
+ GstWavEncLabl *labl;
+
+ g_return_val_if_fail (entry != NULL, FALSE);
+
+ tags = gst_toc_entry_get_tags (entry);
+ if (!tags) {
+ GST_INFO_OBJECT (wavenc, "no tags for entry: %d", id);
+ return FALSE;
+ }
+ if (!gst_tag_list_get_string (tags, GST_TAG_TITLE, &tag)) {
+ GST_INFO_OBJECT (wavenc, "no title tag for entry: %d", id);
+ return FALSE;
+ }
+
+ labl = g_new (GstWavEncLabl, 1);
+ memcpy (labl->chunk_id, "labl", 4);
+ labl->chunk_data_size = 4 + strlen (tag) + 1;
+ labl->cue_point_id = id;
+ labl->text = tag;
+
+ GST_DEBUG_OBJECT (wavenc, "got labl: '%s'", tag);
+
+ wavenc->labls = g_list_append (wavenc->labls, labl);
+
+ return TRUE;
+}
+
+static gboolean
+gst_wavenc_parse_note (GstWavEnc * wavenc, guint32 id, GstTocEntry * entry)
+{
+ gchar *tag;
+ GstTagList *tags;
+ GstWavEncNote *note;
+
+ g_return_val_if_fail (entry != NULL, FALSE);
+ tags = gst_toc_entry_get_tags (entry);
+ if (!tags) {
+ GST_INFO_OBJECT (wavenc, "no tags for entry: %d", id);
+ return FALSE;
+ }
+ if (!gst_tag_list_get_string (tags, GST_TAG_COMMENT, &tag)) {
+ GST_INFO_OBJECT (wavenc, "no comment tag for entry: %d", id);
+ return FALSE;
+ }
+
+ note = g_new (GstWavEncNote, 1);
+ memcpy (note->chunk_id, "note", 4);
+ note->chunk_data_size = 4 + strlen (tag) + 1;
+ note->cue_point_id = id;
+ note->text = tag;
+
+ GST_DEBUG_OBJECT (wavenc, "got note: '%s'", tag);
+
+ wavenc->notes = g_list_append (wavenc->notes, note);
+
+ return TRUE;
+}
+
+static gboolean
+gst_wavenc_write_cues (guint8 ** data, GList * list)
+{
+ GstWavEncCue *cue;
+
+ while (list) {
+ cue = list->data;
+ GST_WRITE_UINT32_LE (*data, cue->id);
+ GST_WRITE_UINT32_LE (*data + 4, cue->position);
+ memcpy (*data + 8, (gchar *) cue->data_chunk_id, 4);
+ GST_WRITE_UINT32_LE (*data + 12, cue->chunk_start);
+ GST_WRITE_UINT32_LE (*data + 16, cue->block_start);
+ GST_WRITE_UINT32_LE (*data + 20, cue->sample_offset);
+ *data += 24;
+ list = g_list_next (list);
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_wavenc_write_labls (guint8 ** data, GList * list)
+{
+ GstWavEncLabl *labl;
+
+ while (list) {
+ labl = list->data;
+ memcpy (*data, (gchar *) labl->chunk_id, 4);
+ GST_WRITE_UINT32_LE (*data + 4, labl->chunk_data_size);
+ GST_WRITE_UINT32_LE (*data + 8, labl->cue_point_id);
+ memcpy (*data + 12, (gchar *) labl->text, strlen (labl->text));
+ *data += 8 + GST_ROUND_UP_2 (labl->chunk_data_size);
+ list = g_list_next (list);
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_wavenc_write_notes (guint8 ** data, GList * list)
+{
+ GstWavEncNote *note;
+
+ while (list) {
+ note = list->data;
+ memcpy (*data, (gchar *) note->chunk_id, 4);
+ GST_WRITE_UINT32_LE (*data + 4, note->chunk_data_size);
+ GST_WRITE_UINT32_LE (*data + 8, note->cue_point_id);
+ memcpy (*data + 12, (gchar *) note->text, strlen (note->text));
+ *data += 8 + GST_ROUND_UP_2 (note->chunk_data_size);
+ list = g_list_next (list);
+ }
+
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_wavenc_write_toc (GstWavEnc * wavenc)
+{
+ GList *list;
+ GstToc *toc;
+ GstTocEntry *entry, *subentry;
+ GstBuffer *buf;
+ GstMapInfo map;
+ guint8 *data;
+ guint32 ncues, size, cues_size, labls_size, notes_size;
+
+ if (!wavenc->toc) {
+ GST_DEBUG_OBJECT (wavenc, "have no toc, checking toc_setter");
+ wavenc->toc = gst_toc_setter_get_toc (GST_TOC_SETTER (wavenc));
+ }
+ if (!wavenc->toc) {
+ GST_WARNING_OBJECT (wavenc, "have no toc");
+ return GST_FLOW_OK;
+ }
+
+ toc = gst_toc_ref (wavenc->toc);
+ size = 0;
+ cues_size = 0;
+ labls_size = 0;
+ notes_size = 0;
+
+ /* check if the TOC entries is valid */
+ list = gst_toc_get_entries (toc);
+ entry = list->data;
+ if (gst_toc_entry_is_alternative (entry)) {
+ list = gst_toc_entry_get_sub_entries (entry);
+ while (list) {
+ subentry = list->data;
+ if (!gst_toc_entry_is_sequence (subentry))
+ return FALSE;
+ list = g_list_next (list);
+ }
+ list = gst_toc_entry_get_sub_entries (entry);
+ }
+ if (gst_toc_entry_is_sequence (entry)) {
+ while (list) {
+ entry = list->data;
+ if (!gst_toc_entry_is_sequence (entry))
+ return FALSE;
+ list = g_list_next (list);
+ }
+ list = gst_toc_get_entries (toc);
+ }
+
+ ncues = g_list_length (list);
+ GST_DEBUG_OBJECT (wavenc, "number of cue entries: %d", ncues);
+
+ while (list) {
+ guint32 id = 0;
+ gint64 id64;
+ const gchar *uid;
+
+ entry = list->data;
+ uid = gst_toc_entry_get_uid (entry);
+ id64 = g_ascii_strtoll (uid, NULL, 0);
+ /* check if id unique compatible with guint32 else generate random */
+ if (id64 >= 0 && gst_wavenc_is_cue_id_unique (id64, wavenc->cues)) {
+ id = (guint32) id64;
+ } else {
+ do {
+ id = g_random_int ();
+ } while (!gst_wavenc_is_cue_id_unique (id, wavenc->cues));
+ }
+ gst_wavenc_parse_cue (wavenc, id, entry);
+ gst_wavenc_parse_labl (wavenc, id, entry);
+ gst_wavenc_parse_note (wavenc, id, entry);
+ list = g_list_next (list);
+ }
+
+ /* count cues size */
+ if (wavenc->cues) {
+ cues_size = 24 * g_list_length (wavenc->cues);
+ size += 12 + cues_size;
+ } else {
+ GST_WARNING_OBJECT (wavenc, "cue's not found");
+ return FALSE;
+ }
+ /* count labls size */
+ if (wavenc->labls) {
+ list = wavenc->labls;
+ while (list) {
+ GstWavEncLabl *labl;
+ labl = list->data;
+ labls_size += 8 + GST_ROUND_UP_2 (labl->chunk_data_size);
+ list = g_list_next (list);
+ }
+ size += labls_size;
+ }
+ /* count notes size */
+ if (wavenc->notes) {
+ list = wavenc->notes;
+ while (list) {
+ GstWavEncNote *note;
+ note = list->data;
+ notes_size += 8 + GST_ROUND_UP_2 (note->chunk_data_size);
+ list = g_list_next (list);
+ }
+ size += notes_size;
+ }
+ if (wavenc->labls || wavenc->notes) {
+ size += 12;
+ }
+
+ buf = gst_buffer_new_and_alloc (size);
+ gst_buffer_map (buf, &map, GST_MAP_WRITE);
+ data = map.data;
+ memset (data, 0, size);
+
+ /* write Cue Chunk */
+ if (wavenc->cues) {
+ memcpy (data, (gchar *) "cue ", 4);
+ GST_WRITE_UINT32_LE (data + 4, 4 + cues_size);
+ GST_WRITE_UINT32_LE (data + 8, ncues);
+ data += 12;
+ gst_wavenc_write_cues (&data, wavenc->cues);
+
+ /* write Associated Data List Chunk */
+ if (wavenc->labls || wavenc->notes) {
+ memcpy (data, (gchar *) "LIST", 4);
+ GST_WRITE_UINT32_LE (data + 4, 4 + labls_size + notes_size);
+ memcpy (data + 8, (gchar *) "adtl", 4);
+ data += 12;
+ if (wavenc->labls)
+ gst_wavenc_write_labls (&data, wavenc->labls);
+ if (wavenc->notes)
+ gst_wavenc_write_notes (&data, wavenc->notes);
+ }
+ }
+
+ /* free resources */
+ if (toc)
+ gst_toc_unref (toc);
+ if (wavenc->cues)
+ g_list_free_full (wavenc->cues, g_free);
+ if (wavenc->labls)
+ g_list_free_full (wavenc->labls, g_free);
+ if (wavenc->notes)
+ g_list_free_full (wavenc->notes, g_free);
+
+ gst_buffer_unmap (buf, &map);
+ wavenc->meta_length += gst_buffer_get_size (buf);
+
+ return gst_pad_push (wavenc->srcpad, buf);
+}
+
+static gboolean
+gst_wavenc_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ gboolean res = TRUE;
+ GstWavEnc *wavenc;
+ GstTagList *tags;
+ GstToc *toc;
+
+ wavenc = GST_WAVENC (parent);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ GstCaps *caps;
+
+ gst_event_parse_caps (event, &caps);
+ gst_wavenc_sink_setcaps (pad, caps);
+
+ /* have our own src caps */
+ gst_event_unref (event);
+ break;
+ }
+ case GST_EVENT_EOS:
+ {
+ GstFlowReturn flow;
+ GST_DEBUG_OBJECT (wavenc, "got EOS");
+
+ flow = gst_wavenc_write_toc (wavenc);
+ if (flow != GST_FLOW_OK) {
+ GST_WARNING_OBJECT (wavenc, "error pushing toc: %s",
+ gst_flow_get_name (flow));
+ }
+ flow = gst_wavenc_write_tags (wavenc);
+ if (flow != GST_FLOW_OK) {
+ GST_WARNING_OBJECT (wavenc, "error pushing tags: %s",
+ gst_flow_get_name (flow));
+ }
+
+ /* write header with correct length values */
+ gst_wavenc_push_header (wavenc);
+
+ /* we're done with this file */
+ wavenc->finished_properly = TRUE;
+
+ /* and forward the EOS event */
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+ case GST_EVENT_SEGMENT:
+ /* Just drop it, it's probably in TIME format
+ * anyway. We'll send our own newsegment event */
+ gst_event_unref (event);
+ break;
+ case GST_EVENT_TOC:
+ gst_event_parse_toc (event, &toc, NULL);
+ if (toc) {
+ if (wavenc->toc != toc) {
+ if (wavenc->toc)
+ gst_toc_unref (wavenc->toc);
+ wavenc->toc = toc;
+ } else {
+ gst_toc_unref (toc);
+ }
+ }
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ case GST_EVENT_TAG:
+ gst_event_parse_tag (event, &tags);
+ if (tags) {
+ if (wavenc->tags != tags) {
+ if (wavenc->tags)
+ gst_tag_list_unref (wavenc->tags);
+ wavenc->tags = gst_tag_list_ref (tags);
+ }
+ }
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ default:
+ res = gst_pad_event_default (pad, parent, event);
+ break;
+ }
+
+ return res;
+}
+
+static GstFlowReturn
+gst_wavenc_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+{
+ GstWavEnc *wavenc = GST_WAVENC (parent);
+ GstFlowReturn flow = GST_FLOW_OK;
+
+ if (wavenc->channels <= 0) {
+ GST_ERROR_OBJECT (wavenc, "Got data without caps");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+
+ if (G_UNLIKELY (!wavenc->sent_header)) {
+ GstStructure *s;
+ GstCaps *caps = gst_pad_get_allowed_caps (wavenc->srcpad);
+
+ GST_DEBUG_OBJECT (wavenc, "allowed src caps: %" GST_PTR_FORMAT, caps);
+ if (!gst_caps_is_fixed (caps)) {
+ caps = gst_caps_truncate (caps);
+ }
+ s = gst_caps_get_structure (caps, 0);
+ wavenc->use_rf64 = gst_structure_has_name (s, "audio/x-rf64");
+
+ gst_pad_set_caps (wavenc->srcpad, caps);
+ gst_caps_unref (caps);
+
+ /* starting a file, means we have to finish it properly */
+ wavenc->finished_properly = FALSE;
+
+ /* push initial bogus header, it will be updated on EOS */
+ flow = gst_wavenc_push_header (wavenc);
+ if (flow != GST_FLOW_OK) {
+ GST_WARNING_OBJECT (wavenc, "error pushing header: %s",
+ gst_flow_get_name (flow));
+ return flow;
+ }
+ GST_DEBUG_OBJECT (wavenc, "wrote dummy header");
+ wavenc->audio_length = 0;
+ wavenc->sent_header = TRUE;
+ }
+
+ GST_LOG_OBJECT (wavenc,
+ "pushing %" G_GSIZE_FORMAT " bytes raw audio, ts=%" GST_TIME_FORMAT,
+ gst_buffer_get_size (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));
+
+ buf = gst_buffer_make_writable (buf);
+
+ GST_BUFFER_OFFSET (buf) = get_header_len (wavenc) + wavenc->audio_length;
+ GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE;
+
+ wavenc->audio_length += gst_buffer_get_size (buf);
+
+ if (wavenc->channel_mask != 0 &&
+ !gst_audio_buffer_reorder_channels (buf, wavenc->audio_format,
+ wavenc->channels, wavenc->srcPos, wavenc->destPos)) {
+ GST_WARNING_OBJECT (wavenc, "Could not reorder channels");
+ }
+
+ flow = gst_pad_push (wavenc->srcpad, buf);
+
+ return flow;
+}
+
+static GstStateChangeReturn
+gst_wavenc_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+ GstWavEnc *wavenc = GST_WAVENC (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ wavenc->format = 0;
+ wavenc->channels = 0;
+ wavenc->width = 0;
+ wavenc->rate = 0;
+ /* use bogus size initially, we'll write the real
+ * header when we get EOS and know the exact length */
+ wavenc->audio_length = 0x7FFF0000;
+ wavenc->meta_length = 0;
+ wavenc->sent_header = FALSE;
+ /* its true because we haven't written anything */
+ wavenc->finished_properly = TRUE;
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret != GST_STATE_CHANGE_SUCCESS)
+ return ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ if (!wavenc->finished_properly) {
+ GST_ELEMENT_WARNING (wavenc, STREAM, MUX,
+ ("Wav stream not finished properly"),
+ ("Wav stream not finished properly, no EOS received "
+ "before shutdown"));
+ }
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ GST_DEBUG_OBJECT (wavenc, "tags: %p", wavenc->tags);
+ if (wavenc->tags) {
+ gst_tag_list_unref (wavenc->tags);
+ wavenc->tags = NULL;
+ }
+ GST_DEBUG_OBJECT (wavenc, "toc: %p", wavenc->toc);
+ if (wavenc->toc) {
+ gst_toc_unref (wavenc->toc);
+ wavenc->toc = NULL;
+ }
+ gst_tag_setter_reset_tags (GST_TAG_SETTER (wavenc));
+ gst_toc_setter_reset (GST_TOC_SETTER (wavenc));
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ return GST_ELEMENT_REGISTER (wavenc, plugin);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ wavenc,
+ "Encode raw audio into WAV",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/wavenc/gstwavenc.h b/gst/wavenc/gstwavenc.h
new file mode 100644
index 0000000000..a682d3277d
--- /dev/null
+++ b/gst/wavenc/gstwavenc.h
@@ -0,0 +1,85 @@
+/* GStreamer
+ * Copyright (C) 2002, Iain Holmes <iain@prettypeople.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_WAV_ENC_H__
+#define __GST_WAV_ENC_H__
+
+
+#include <gst/gst.h>
+#include <gst/audio/audio.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_WAVENC \
+ (gst_wavenc_get_type())
+#define GST_WAVENC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_WAVENC,GstWavEnc))
+#define GST_WAVENC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_WAVENC,GstWavEncClass))
+#define GST_IS_WAVENC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_WAVENC))
+#define GST_IS_WAVENC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_WAVENC))
+
+typedef struct _GstWavEnc GstWavEnc;
+typedef struct _GstWavEncClass GstWavEncClass;
+
+struct _GstWavEnc {
+ GstElement element;
+
+ GstPad *sinkpad;
+ GstPad *srcpad;
+
+ GstTagList *tags;
+ GstToc *toc;
+ GList *cues;
+ GList *labls;
+ GList *notes;
+
+ /* useful audio data */
+ GstAudioFormat audio_format;
+ guint16 format;
+ guint width;
+ guint rate;
+ guint channels;
+ guint64 channel_mask;
+ GstAudioChannelPosition srcPos[64];
+ GstAudioChannelPosition destPos[64];
+
+ /* data sizes */
+ guint64 audio_length;
+ guint32 meta_length;
+
+ gboolean use_rf64;
+ gboolean sent_header;
+ gboolean finished_properly;
+};
+
+struct _GstWavEncClass {
+ GstElementClass parent_class;
+};
+
+GType gst_wavenc_get_type (void);
+
+GST_ELEMENT_REGISTER_DECLARE (wavenc);
+
+G_END_DECLS
+
+#endif /* __GST_WAV_ENC_H__ */
diff --git a/gst/wavenc/meson.build b/gst/wavenc/meson.build
new file mode 100644
index 0000000000..9a8e723bc4
--- /dev/null
+++ b/gst/wavenc/meson.build
@@ -0,0 +1,10 @@
+gstwavenc = library('gstwavenc',
+ 'gstwavenc.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gstbase_dep, gstaudio_dep, gstriff_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstwavenc, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstwavenc]
diff --git a/gst/wavparse/gstwavparse.c b/gst/wavparse/gstwavparse.c
new file mode 100644
index 0000000000..45b9db94d3
--- /dev/null
+++ b/gst/wavparse/gstwavparse.c
@@ -0,0 +1,3002 @@
+/* -*- Mode: C; tab-width: 2; indent-tabs-mode: t; c-basic-offset: 2 -*- */
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2006> Nokia Corporation, Stefan Kost <stefan.kost@nokia.com>.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+/**
+ * SECTION:element-wavparse
+ * @title: wavparse
+ *
+ * Parse a .wav file into raw or compressed audio.
+ *
+ * Wavparse supports both push and pull mode operations, making it possible to
+ * stream from a network source.
+ *
+ * ## Example launch line
+ * |[
+ * gst-launch-1.0 filesrc location=sine.wav ! wavparse ! audioconvert ! alsasink
+ * ]| Read a wav file and output to the soundcard using the ALSA element. The
+ * wav file is assumed to contain raw uncompressed samples.
+ * |[
+ * gst-launch-1.0 gnomevfssrc location=http://www.example.org/sine.wav ! queue ! wavparse ! audioconvert ! alsasink
+ * ]| Stream data from a network url.
+ *
+ */
+
+/*
+ * TODO:
+ * http://replaygain.hydrogenaudio.org/file_format_wav.html
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include <math.h>
+
+#include "gstwavparse.h"
+#include "gst/riff/riff-media.h"
+#include <gst/base/gsttypefindhelper.h>
+#include <gst/pbutils/descriptions.h>
+#include <gst/gst-i18n-plugin.h>
+
+GST_DEBUG_CATEGORY_STATIC (wavparse_debug);
+#define GST_CAT_DEFAULT (wavparse_debug)
+
+/* Data size chunk of RF64,
+ * see http://tech.ebu.ch/docs/tech/tech3306-2009.pdf */
+#define GST_RS64_TAG_DS64 GST_MAKE_FOURCC ('d','s','6','4')
+
+static void gst_wavparse_dispose (GObject * object);
+
+static gboolean gst_wavparse_sink_activate (GstPad * sinkpad,
+ GstObject * parent);
+static gboolean gst_wavparse_sink_activate_mode (GstPad * sinkpad,
+ GstObject * parent, GstPadMode mode, gboolean active);
+static gboolean gst_wavparse_send_event (GstElement * element,
+ GstEvent * event);
+static GstStateChangeReturn gst_wavparse_change_state (GstElement * element,
+ GstStateChange transition);
+
+static gboolean gst_wavparse_pad_query (GstPad * pad, GstObject * parent,
+ GstQuery * query);
+static gboolean gst_wavparse_pad_convert (GstPad * pad, GstFormat src_format,
+ gint64 src_value, GstFormat * dest_format, gint64 * dest_value);
+
+static GstFlowReturn gst_wavparse_chain (GstPad * pad, GstObject * parent,
+ GstBuffer * buf);
+static gboolean gst_wavparse_sink_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+static void gst_wavparse_loop (GstPad * pad);
+static gboolean gst_wavparse_srcpad_event (GstPad * pad, GstObject * parent,
+ GstEvent * event);
+
+static void gst_wavparse_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_wavparse_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+#define DEFAULT_IGNORE_LENGTH FALSE
+
+enum
+{
+ PROP_0,
+ PROP_IGNORE_LENGTH,
+};
+
+static GstStaticPadTemplate sink_template_factory =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("audio/x-wav;audio/x-rf64")
+ );
+
+#define DEBUG_INIT \
+ GST_DEBUG_CATEGORY_INIT (wavparse_debug, "wavparse", 0, "WAV parser");
+
+#define gst_wavparse_parent_class parent_class
+G_DEFINE_TYPE_WITH_CODE (GstWavParse, gst_wavparse, GST_TYPE_ELEMENT,
+ DEBUG_INIT);
+
+GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (wavparse, "wavparse", GST_RANK_PRIMARY,
+ GST_TYPE_WAVPARSE, gst_riff_init ();
+ );
+
+typedef struct
+{
+ /* Offset Size Description Value
+ * 0x00 4 ID unique identification value
+ * 0x04 4 Position play order position
+ * 0x08 4 Data Chunk ID RIFF ID of corresponding data chunk
+ * 0x0c 4 Chunk Start Byte Offset of Data Chunk *
+ * 0x10 4 Block Start Byte Offset to sample of First Channel
+ * 0x14 4 Sample Offset Byte Offset to sample byte of First Channel
+ */
+ guint32 id;
+ guint32 position;
+ guint32 data_chunk_id;
+ guint32 chunk_start;
+ guint32 block_start;
+ guint32 sample_offset;
+} GstWavParseCue;
+
+typedef struct
+{
+ /* Offset Size Description Value
+ * 0x08 4 Cue Point ID 0 - 0xFFFFFFFF
+ * 0x0c Text
+ */
+ guint32 cue_point_id;
+ gchar *text;
+} GstWavParseLabl, GstWavParseNote;
+
+static void
+gst_wavparse_class_init (GstWavParseClass * klass)
+{
+ GstElementClass *gstelement_class;
+ GObjectClass *object_class;
+ GstPadTemplate *src_template;
+
+ gstelement_class = (GstElementClass *) klass;
+ object_class = (GObjectClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ object_class->dispose = gst_wavparse_dispose;
+
+ object_class->set_property = gst_wavparse_set_property;
+ object_class->get_property = gst_wavparse_get_property;
+
+ /**
+ * GstWavParse:ignore-length:
+ *
+ * This selects whether the length found in a data chunk
+ * should be ignored. This may be useful for streamed audio
+ * where the length is unknown until the end of streaming,
+ * and various software/hardware just puts some random value
+ * in there and hopes it doesn't break too much.
+ */
+ g_object_class_install_property (object_class, PROP_IGNORE_LENGTH,
+ g_param_spec_boolean ("ignore-length",
+ "Ignore length",
+ "Ignore length from the Wave header",
+ DEFAULT_IGNORE_LENGTH, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)
+ );
+
+ gstelement_class->change_state = gst_wavparse_change_state;
+ gstelement_class->send_event = gst_wavparse_send_event;
+
+ /* register pads */
+ gst_element_class_add_static_pad_template (gstelement_class,
+ &sink_template_factory);
+
+ src_template = gst_pad_template_new ("src", GST_PAD_SRC,
+ GST_PAD_ALWAYS, gst_riff_create_audio_template_caps ());
+ gst_element_class_add_pad_template (gstelement_class, src_template);
+
+ gst_element_class_set_static_metadata (gstelement_class, "WAV audio demuxer",
+ "Codec/Demuxer/Audio",
+ "Parse a .wav file into raw audio",
+ "Erik Walthinsen <omega@cse.ogi.edu>");
+}
+
+static void
+gst_wavparse_notes_free (GstWavParseNote * note)
+{
+ if (note)
+ g_free (note->text);
+ g_free (note);
+}
+
+static void
+gst_wavparse_labls_free (GstWavParseLabl * labl)
+{
+ if (labl)
+ g_free (labl->text);
+ g_free (labl);
+}
+
+static void
+gst_wavparse_reset (GstWavParse * wav)
+{
+ wav->state = GST_WAVPARSE_START;
+
+ /* These will all be set correctly in the fmt chunk */
+ wav->depth = 0;
+ wav->rate = 0;
+ wav->width = 0;
+ wav->channels = 0;
+ wav->blockalign = 0;
+ wav->bps = 0;
+ wav->fact = 0;
+ wav->offset = 0;
+ wav->end_offset = 0;
+ wav->dataleft = 0;
+ wav->datasize = 0;
+ wav->datastart = 0;
+ wav->chunk_size = 0;
+ wav->duration = 0;
+ wav->got_fmt = FALSE;
+ wav->first = TRUE;
+
+ if (wav->seek_event)
+ gst_event_unref (wav->seek_event);
+ wav->seek_event = NULL;
+ if (wav->adapter) {
+ gst_adapter_clear (wav->adapter);
+ g_object_unref (wav->adapter);
+ wav->adapter = NULL;
+ }
+ if (wav->tags)
+ gst_tag_list_unref (wav->tags);
+ wav->tags = NULL;
+ if (wav->toc)
+ gst_toc_unref (wav->toc);
+ wav->toc = NULL;
+ if (wav->cues)
+ g_list_free_full (wav->cues, g_free);
+ wav->cues = NULL;
+ if (wav->labls)
+ g_list_free_full (wav->labls, (GDestroyNotify) gst_wavparse_labls_free);
+ wav->labls = NULL;
+ if (wav->notes)
+ g_list_free_full (wav->notes, (GDestroyNotify) gst_wavparse_notes_free);
+ wav->notes = NULL;
+ if (wav->caps)
+ gst_caps_unref (wav->caps);
+ wav->caps = NULL;
+ if (wav->start_segment)
+ gst_event_unref (wav->start_segment);
+ wav->start_segment = NULL;
+}
+
+static void
+gst_wavparse_dispose (GObject * object)
+{
+ GstWavParse *wav = GST_WAVPARSE (object);
+
+ GST_DEBUG_OBJECT (wav, "WAV: Dispose");
+ gst_wavparse_reset (wav);
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static void
+gst_wavparse_init (GstWavParse * wavparse)
+{
+ gst_wavparse_reset (wavparse);
+
+ /* sink */
+ wavparse->sinkpad =
+ gst_pad_new_from_static_template (&sink_template_factory, "sink");
+ gst_pad_set_activate_function (wavparse->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_wavparse_sink_activate));
+ gst_pad_set_activatemode_function (wavparse->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_wavparse_sink_activate_mode));
+ gst_pad_set_chain_function (wavparse->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_wavparse_chain));
+ gst_pad_set_event_function (wavparse->sinkpad,
+ GST_DEBUG_FUNCPTR (gst_wavparse_sink_event));
+ gst_element_add_pad (GST_ELEMENT_CAST (wavparse), wavparse->sinkpad);
+
+ /* src */
+ wavparse->srcpad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template
+ (GST_ELEMENT_GET_CLASS (wavparse), "src"), "src");
+ gst_pad_use_fixed_caps (wavparse->srcpad);
+ gst_pad_set_query_function (wavparse->srcpad,
+ GST_DEBUG_FUNCPTR (gst_wavparse_pad_query));
+ gst_pad_set_event_function (wavparse->srcpad,
+ GST_DEBUG_FUNCPTR (gst_wavparse_srcpad_event));
+ gst_element_add_pad (GST_ELEMENT_CAST (wavparse), wavparse->srcpad);
+}
+
+static gboolean
+gst_wavparse_parse_file_header (GstElement * element, GstBuffer * buf)
+{
+ guint32 doctype;
+
+ if (!gst_riff_parse_file_header (element, buf, &doctype))
+ return FALSE;
+
+ if (doctype != GST_RIFF_RIFF_WAVE)
+ goto not_wav;
+
+ return TRUE;
+
+ /* ERRORS */
+not_wav:
+ {
+ GST_ELEMENT_ERROR (element, STREAM, WRONG_TYPE, (NULL),
+ ("File is not a WAVE file: 0x%" G_GINT32_MODIFIER "x", doctype));
+ return FALSE;
+ }
+}
+
+static GstFlowReturn
+gst_wavparse_stream_init (GstWavParse * wav)
+{
+ GstFlowReturn res;
+ GstBuffer *buf = NULL;
+
+ if ((res = gst_pad_pull_range (wav->sinkpad,
+ wav->offset, 12, &buf)) != GST_FLOW_OK)
+ return res;
+ else if (!gst_wavparse_parse_file_header (GST_ELEMENT_CAST (wav), buf))
+ return GST_FLOW_ERROR;
+
+ wav->offset += 12;
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_wavparse_time_to_bytepos (GstWavParse * wav, gint64 ts, gint64 * bytepos)
+{
+ /* -1 always maps to -1 */
+ if (ts == -1) {
+ *bytepos = -1;
+ return TRUE;
+ }
+
+ /* 0 always maps to 0 */
+ if (ts == 0) {
+ *bytepos = 0;
+ return TRUE;
+ }
+
+ if (wav->bps > 0) {
+ *bytepos = gst_util_uint64_scale_ceil (ts, (guint64) wav->bps, GST_SECOND);
+ return TRUE;
+ } else if (wav->fact) {
+ guint64 bps = gst_util_uint64_scale (wav->datasize, wav->rate, wav->fact);
+ *bytepos = gst_util_uint64_scale_ceil (ts, bps, GST_SECOND);
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+/* This function is used to perform seeks on the element.
+ *
+ * It also works when event is NULL, in which case it will just
+ * start from the last configured segment. This technique is
+ * used when activating the element and to perform the seek in
+ * READY.
+ */
+static gboolean
+gst_wavparse_perform_seek (GstWavParse * wav, GstEvent * event)
+{
+ gboolean res;
+ gdouble rate;
+ GstFormat format, bformat;
+ GstSeekFlags flags;
+ GstSeekType cur_type = GST_SEEK_TYPE_NONE, stop_type;
+ gint64 cur, stop, upstream_size;
+ gboolean flush;
+ gboolean update;
+ GstSegment seeksegment = { 0, };
+ gint64 last_stop;
+ guint32 seqnum = GST_SEQNUM_INVALID;
+
+ if (event) {
+ GST_DEBUG_OBJECT (wav, "doing seek with event");
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &cur_type, &cur, &stop_type, &stop);
+ seqnum = gst_event_get_seqnum (event);
+
+ /* no negative rates yet */
+ if (rate < 0.0)
+ goto negative_rate;
+
+ if (format != wav->segment.format) {
+ GST_INFO_OBJECT (wav, "converting seek-event from %s to %s",
+ gst_format_get_name (format),
+ gst_format_get_name (wav->segment.format));
+ res = TRUE;
+ if (cur_type != GST_SEEK_TYPE_NONE)
+ res =
+ gst_pad_query_convert (wav->srcpad, format, cur,
+ wav->segment.format, &cur);
+ if (res && stop_type != GST_SEEK_TYPE_NONE)
+ res =
+ gst_pad_query_convert (wav->srcpad, format, stop,
+ wav->segment.format, &stop);
+ if (!res)
+ goto no_format;
+
+ format = wav->segment.format;
+ }
+ } else {
+ GST_DEBUG_OBJECT (wav, "doing seek without event");
+ flags = 0;
+ rate = 1.0;
+ cur_type = GST_SEEK_TYPE_SET;
+ stop_type = GST_SEEK_TYPE_SET;
+ }
+
+ /* in push mode, we must delegate to upstream */
+ if (wav->streaming) {
+ gboolean res = FALSE;
+
+ /* if streaming not yet started; only prepare initial newsegment */
+ if (!event || wav->state != GST_WAVPARSE_DATA) {
+ if (wav->start_segment)
+ gst_event_unref (wav->start_segment);
+ wav->start_segment = gst_event_new_segment (&wav->segment);
+ res = TRUE;
+ } else {
+ /* convert seek positions to byte positions in data sections */
+ if (format == GST_FORMAT_TIME) {
+ /* should not fail */
+ if (!gst_wavparse_time_to_bytepos (wav, cur, &cur))
+ goto no_position;
+ if (!gst_wavparse_time_to_bytepos (wav, stop, &stop))
+ goto no_position;
+ }
+ /* mind sample boundary and header */
+ if (cur >= 0) {
+ cur -= (cur % wav->bytes_per_sample);
+ cur += wav->datastart;
+ }
+ if (stop >= 0) {
+ stop -= (stop % wav->bytes_per_sample);
+ stop += wav->datastart;
+ }
+ GST_DEBUG_OBJECT (wav, "Pushing BYTE seek rate %g, "
+ "start %" G_GINT64_FORMAT ", stop %" G_GINT64_FORMAT, rate, cur,
+ stop);
+ /* BYTE seek event */
+ event = gst_event_new_seek (rate, GST_FORMAT_BYTES, flags, cur_type, cur,
+ stop_type, stop);
+ if (seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (event, seqnum);
+ res = gst_pad_push_event (wav->sinkpad, event);
+ }
+ return res;
+ }
+
+ /* get flush flag */
+ flush = flags & GST_SEEK_FLAG_FLUSH;
+
+ /* now we need to make sure the streaming thread is stopped. We do this by
+ * either sending a FLUSH_START event downstream which will cause the
+ * streaming thread to stop with a WRONG_STATE.
+ * For a non-flushing seek we simply pause the task, which will happen as soon
+ * as it completes one iteration (and thus might block when the sink is
+ * blocking in preroll). */
+ if (flush) {
+ GstEvent *fevent;
+ GST_DEBUG_OBJECT (wav, "sending flush start");
+
+ fevent = gst_event_new_flush_start ();
+ if (seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (fevent, seqnum);
+ gst_pad_push_event (wav->sinkpad, gst_event_ref (fevent));
+ gst_pad_push_event (wav->srcpad, fevent);
+ } else {
+ gst_pad_pause_task (wav->sinkpad);
+ }
+
+ /* we should now be able to grab the streaming thread because we stopped it
+ * with the above flush/pause code */
+ GST_PAD_STREAM_LOCK (wav->sinkpad);
+
+ /* save current position */
+ last_stop = wav->segment.position;
+
+ GST_DEBUG_OBJECT (wav, "stopped streaming at %" G_GINT64_FORMAT, last_stop);
+
+ /* copy segment, we need this because we still need the old
+ * segment when we close the current segment. */
+ memcpy (&seeksegment, &wav->segment, sizeof (GstSegment));
+
+ /* configure the seek parameters in the seeksegment. We will then have the
+ * right values in the segment to perform the seek */
+ if (event) {
+ GST_DEBUG_OBJECT (wav, "configuring seek");
+ gst_segment_do_seek (&seeksegment, rate, format, flags,
+ cur_type, cur, stop_type, stop, &update);
+ }
+
+ /* figure out the last position we need to play. If it's configured (stop !=
+ * -1), use that, else we play until the total duration of the file */
+ if ((stop = seeksegment.stop) == -1)
+ stop = seeksegment.duration;
+
+ GST_DEBUG_OBJECT (wav, "cur_type =%d", cur_type);
+ if ((cur_type != GST_SEEK_TYPE_NONE)) {
+ /* bring offset to bytes, if the bps is 0, we have the segment in BYTES and
+ * we can just copy the last_stop. If not, we use the bps to convert TIME to
+ * bytes. */
+ if (!gst_wavparse_time_to_bytepos (wav, seeksegment.position,
+ (gint64 *) & wav->offset))
+ wav->offset = seeksegment.position;
+ GST_LOG_OBJECT (wav, "offset=%" G_GUINT64_FORMAT, wav->offset);
+ wav->offset -= (wav->offset % wav->bytes_per_sample);
+ GST_LOG_OBJECT (wav, "offset=%" G_GUINT64_FORMAT, wav->offset);
+ wav->offset += wav->datastart;
+ GST_LOG_OBJECT (wav, "offset=%" G_GUINT64_FORMAT, wav->offset);
+ } else {
+ GST_LOG_OBJECT (wav, "continue from offset=%" G_GUINT64_FORMAT,
+ wav->offset);
+ }
+
+ if (stop_type != GST_SEEK_TYPE_NONE) {
+ if (!gst_wavparse_time_to_bytepos (wav, stop, (gint64 *) & wav->end_offset))
+ wav->end_offset = stop;
+ GST_LOG_OBJECT (wav, "end_offset=%" G_GUINT64_FORMAT, wav->end_offset);
+ wav->end_offset -= (wav->end_offset % wav->bytes_per_sample);
+ GST_LOG_OBJECT (wav, "end_offset=%" G_GUINT64_FORMAT, wav->end_offset);
+ wav->end_offset += wav->datastart;
+ GST_LOG_OBJECT (wav, "end_offset=%" G_GUINT64_FORMAT, wav->end_offset);
+ } else {
+ GST_LOG_OBJECT (wav, "continue to end_offset=%" G_GUINT64_FORMAT,
+ wav->end_offset);
+ }
+
+ /* make sure filesize is not exceeded due to rounding errors or so,
+ * same precaution as in _stream_headers */
+ bformat = GST_FORMAT_BYTES;
+ if (gst_pad_peer_query_duration (wav->sinkpad, bformat, &upstream_size))
+ wav->end_offset = MIN (wav->end_offset, upstream_size);
+
+ if (wav->datasize > 0 && wav->end_offset > wav->datastart + wav->datasize)
+ wav->end_offset = wav->datastart + wav->datasize;
+
+ /* this is the range of bytes we will use for playback */
+ wav->offset = MIN (wav->offset, wav->end_offset);
+ wav->dataleft = wav->end_offset - wav->offset;
+
+ GST_DEBUG_OBJECT (wav,
+ "seek: rate %lf, offset %" G_GUINT64_FORMAT ", end %" G_GUINT64_FORMAT
+ ", segment %" GST_TIME_FORMAT " -- %" GST_TIME_FORMAT, rate, wav->offset,
+ wav->end_offset, GST_TIME_ARGS (seeksegment.start), GST_TIME_ARGS (stop));
+
+ /* prepare for streaming again */
+ if (flush) {
+ GstEvent *fevent;
+
+ /* if we sent a FLUSH_START, we now send a FLUSH_STOP */
+ GST_DEBUG_OBJECT (wav, "sending flush stop");
+
+ fevent = gst_event_new_flush_stop (TRUE);
+ if (seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (fevent, seqnum);
+ gst_pad_push_event (wav->sinkpad, gst_event_ref (fevent));
+ gst_pad_push_event (wav->srcpad, fevent);
+ }
+
+ /* now we did the seek and can activate the new segment values */
+ memcpy (&wav->segment, &seeksegment, sizeof (GstSegment));
+
+ /* if we're doing a segment seek, post a SEGMENT_START message */
+ if (wav->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ gst_element_post_message (GST_ELEMENT_CAST (wav),
+ gst_message_new_segment_start (GST_OBJECT_CAST (wav),
+ wav->segment.format, wav->segment.position));
+ }
+
+ /* now create the newsegment */
+ GST_DEBUG_OBJECT (wav, "Creating newsegment from %" G_GINT64_FORMAT
+ " to %" G_GINT64_FORMAT, wav->segment.position, stop);
+
+ /* store the newsegment event so it can be sent from the streaming thread. */
+ if (wav->start_segment)
+ gst_event_unref (wav->start_segment);
+ wav->start_segment = gst_event_new_segment (&wav->segment);
+ if (seqnum != GST_SEQNUM_INVALID)
+ gst_event_set_seqnum (wav->start_segment, seqnum);
+
+ /* mark discont if we are going to stream from another position. */
+ if (last_stop != wav->segment.position) {
+ GST_DEBUG_OBJECT (wav, "mark DISCONT, we did a seek to another position");
+ wav->discont = TRUE;
+ }
+
+ /* and start the streaming task again */
+ if (!wav->streaming) {
+ gst_pad_start_task (wav->sinkpad, (GstTaskFunction) gst_wavparse_loop,
+ wav->sinkpad, NULL);
+ }
+
+ GST_PAD_STREAM_UNLOCK (wav->sinkpad);
+
+ return TRUE;
+
+ /* ERRORS */
+negative_rate:
+ {
+ GST_DEBUG_OBJECT (wav, "negative playback rates are not supported yet.");
+ return FALSE;
+ }
+no_format:
+ {
+ GST_DEBUG_OBJECT (wav, "unsupported format given, seek aborted.");
+ return FALSE;
+ }
+no_position:
+ {
+ GST_DEBUG_OBJECT (wav,
+ "Could not determine byte position for desired time");
+ return FALSE;
+ }
+}
+
+/*
+ * gst_wavparse_peek_chunk_info:
+ * @wav Wavparse object
+ * @tag holder for tag
+ * @size holder for tag size
+ *
+ * Peek next chunk info (tag and size)
+ *
+ * Returns: %TRUE when the chunk info (header) is available
+ */
+static gboolean
+gst_wavparse_peek_chunk_info (GstWavParse * wav, guint32 * tag, guint32 * size)
+{
+ const guint8 *data = NULL;
+
+ if (gst_adapter_available (wav->adapter) < 8)
+ return FALSE;
+
+ data = gst_adapter_map (wav->adapter, 8);
+ *tag = GST_READ_UINT32_LE (data);
+ *size = GST_READ_UINT32_LE (data + 4);
+ gst_adapter_unmap (wav->adapter);
+
+ GST_DEBUG ("Next chunk size is %u bytes, type %" GST_FOURCC_FORMAT, *size,
+ GST_FOURCC_ARGS (*tag));
+
+ return TRUE;
+}
+
+/*
+ * gst_wavparse_peek_chunk:
+ * @wav Wavparse object
+ * @tag holder for tag
+ * @size holder for tag size
+ *
+ * Peek enough data for one full chunk
+ *
+ * Returns: %TRUE when the full chunk is available
+ */
+static gboolean
+gst_wavparse_peek_chunk (GstWavParse * wav, guint32 * tag, guint32 * size)
+{
+ guint32 peek_size = 0;
+ guint available;
+
+ if (!gst_wavparse_peek_chunk_info (wav, tag, size))
+ return FALSE;
+
+ /* size 0 -> empty data buffer would surprise most callers,
+ * large size -> do not bother trying to squeeze that into adapter,
+ * so we throw poor man's exception, which can be caught if caller really
+ * wants to handle 0 size chunk */
+ if (!(*size) || (*size) >= (1 << 30)) {
+ GST_INFO ("Invalid/unexpected chunk size %u for tag %" GST_FOURCC_FORMAT,
+ *size, GST_FOURCC_ARGS (*tag));
+ /* chain should give up */
+ wav->abort_buffering = TRUE;
+ return FALSE;
+ }
+ peek_size = (*size + 1) & ~1;
+ available = gst_adapter_available (wav->adapter);
+
+ if (available >= (8 + peek_size)) {
+ return TRUE;
+ } else {
+ GST_LOG ("but only %u bytes available now", available);
+ return FALSE;
+ }
+}
+
+/*
+ * gst_wavparse_calculate_duration:
+ * @wav: wavparse object
+ *
+ * Calculate duration on demand and store in @wav. Prefer bps, but use fact as a
+ * fallback.
+ *
+ * Returns: %TRUE if duration is available.
+ */
+static gboolean
+gst_wavparse_calculate_duration (GstWavParse * wav)
+{
+ if (wav->duration > 0)
+ return TRUE;
+
+ if (wav->bps > 0) {
+ GST_INFO_OBJECT (wav, "Got datasize %" G_GUINT64_FORMAT, wav->datasize);
+ wav->duration =
+ gst_util_uint64_scale_ceil (wav->datasize, GST_SECOND,
+ (guint64) wav->bps);
+ GST_INFO_OBJECT (wav, "Got duration (bps) %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (wav->duration));
+ return TRUE;
+ } else if (wav->fact) {
+ wav->duration =
+ gst_util_uint64_scale_ceil (GST_SECOND, wav->fact, wav->rate);
+ GST_INFO_OBJECT (wav, "Got duration (fact) %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (wav->duration));
+ return TRUE;
+ }
+ return FALSE;
+}
+
+static gboolean
+gst_waveparse_ignore_chunk (GstWavParse * wav, GstBuffer * buf, guint32 tag,
+ guint32 size)
+{
+ guint flush;
+
+ if (wav->streaming) {
+ if (!gst_wavparse_peek_chunk (wav, &tag, &size))
+ return FALSE;
+ }
+ GST_DEBUG_OBJECT (wav, "Ignoring tag %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ flush = 8 + ((size + 1) & ~1);
+ wav->offset += flush;
+ if (wav->streaming) {
+ gst_adapter_flush (wav->adapter, flush);
+ } else {
+ gst_buffer_unref (buf);
+ }
+
+ return TRUE;
+}
+
+/*
+ * gst_wavparse_cue_chunk:
+ * @wav GstWavParse object
+ * @data holder for data
+ * @size holder for data size
+ *
+ * Parse cue chunk from @data to wav->cues.
+ *
+ * Returns: %TRUE when cue chunk is available
+ */
+static gboolean
+gst_wavparse_cue_chunk (GstWavParse * wav, const guint8 * data, guint32 size)
+{
+ guint32 i, ncues;
+ GList *cues = NULL;
+ GstWavParseCue *cue;
+
+ if (wav->cues) {
+ GST_WARNING_OBJECT (wav, "found another cue's");
+ return TRUE;
+ }
+
+ ncues = GST_READ_UINT32_LE (data);
+
+ if (size < 4 + ncues * 24) {
+ GST_WARNING_OBJECT (wav, "broken file %d %d", size, ncues);
+ return FALSE;
+ }
+
+ /* parse data */
+ data += 4;
+ for (i = 0; i < ncues; i++) {
+ cue = g_new0 (GstWavParseCue, 1);
+ cue->id = GST_READ_UINT32_LE (data);
+ cue->position = GST_READ_UINT32_LE (data + 4);
+ cue->data_chunk_id = GST_READ_UINT32_LE (data + 8);
+ cue->chunk_start = GST_READ_UINT32_LE (data + 12);
+ cue->block_start = GST_READ_UINT32_LE (data + 16);
+ cue->sample_offset = GST_READ_UINT32_LE (data + 20);
+ cues = g_list_append (cues, cue);
+ data += 24;
+ }
+
+ wav->cues = cues;
+
+ return TRUE;
+}
+
+/*
+ * gst_wavparse_labl_chunk:
+ * @wav GstWavParse object
+ * @data holder for data
+ * @size holder for data size
+ *
+ * Parse labl from @data to wav->labls.
+ *
+ * Returns: %TRUE when labl chunk is available
+ */
+static gboolean
+gst_wavparse_labl_chunk (GstWavParse * wav, const guint8 * data, guint32 size)
+{
+ GstWavParseLabl *labl;
+
+ if (size < 5)
+ return FALSE;
+
+ labl = g_new0 (GstWavParseLabl, 1);
+
+ /* parse data */
+ labl->cue_point_id = GST_READ_UINT32_LE (data);
+ labl->text = g_strndup ((const gchar *) data + 4, size - 4);
+
+ wav->labls = g_list_append (wav->labls, labl);
+
+ return TRUE;
+}
+
+/*
+ * gst_wavparse_note_chunk:
+ * @wav GstWavParse object
+ * @data holder for data
+ * @size holder for data size
+ *
+ * Parse note from @data to wav->notes.
+ *
+ * Returns: %TRUE when note chunk is available
+ */
+static gboolean
+gst_wavparse_note_chunk (GstWavParse * wav, const guint8 * data, guint32 size)
+{
+ GstWavParseNote *note;
+
+ if (size < 5)
+ return FALSE;
+
+ note = g_new0 (GstWavParseNote, 1);
+
+ /* parse data */
+ note->cue_point_id = GST_READ_UINT32_LE (data);
+ note->text = g_strndup ((const gchar *) data + 4, size - 4);
+
+ wav->notes = g_list_append (wav->notes, note);
+
+ return TRUE;
+}
+
+/*
+ * gst_wavparse_smpl_chunk:
+ * @wav GstWavParse object
+ * @data holder for data
+ * @size holder for data size
+ *
+ * Parse smpl chunk from @data.
+ *
+ * Returns: %TRUE when cue chunk is available
+ */
+static gboolean
+gst_wavparse_smpl_chunk (GstWavParse * wav, const guint8 * data, guint32 size)
+{
+ guint32 note_number;
+
+ /*
+ manufacturer_id = GST_READ_UINT32_LE (data);
+ product_id = GST_READ_UINT32_LE (data + 4);
+ sample_period = GST_READ_UINT32_LE (data + 8);
+ */
+ note_number = GST_READ_UINT32_LE (data + 12);
+ /*
+ pitch_fraction = GST_READ_UINT32_LE (data + 16);
+ SMPTE_format = GST_READ_UINT32_LE (data + 20);
+ SMPTE_offset = GST_READ_UINT32_LE (data + 24);
+ num_sample_loops = GST_READ_UINT32_LE (data + 28);
+ List of Sample Loops, 24 bytes each
+ */
+
+ if (!wav->tags)
+ wav->tags = gst_tag_list_new_empty ();
+ gst_tag_list_add (wav->tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_MIDI_BASE_NOTE, (guint) note_number, NULL);
+ return TRUE;
+}
+
+/*
+ * gst_wavparse_adtl_chunk:
+ * @wav GstWavParse object
+ * @data holder for data
+ * @size holder for data size
+ *
+ * Parse adtl from @data.
+ *
+ * Returns: %TRUE when adtl chunk is available
+ */
+static gboolean
+gst_wavparse_adtl_chunk (GstWavParse * wav, const guint8 * data, guint32 size)
+{
+ guint32 ltag, lsize, offset = 0;
+
+ while (size >= 8) {
+ ltag = GST_READ_UINT32_LE (data + offset);
+ lsize = GST_READ_UINT32_LE (data + offset + 4);
+
+ if (lsize > (G_MAXUINT - 8) || lsize + 8 > size) {
+ GST_WARNING_OBJECT (wav, "Invalid adtl size: %u + 8 > %u", lsize, size);
+ return FALSE;
+ }
+
+ switch (ltag) {
+ case GST_RIFF_TAG_labl:
+ gst_wavparse_labl_chunk (wav, data + offset + 8, lsize);
+ break;
+ case GST_RIFF_TAG_note:
+ gst_wavparse_note_chunk (wav, data + offset + 8, lsize);
+ break;
+ default:
+ GST_WARNING_OBJECT (wav, "Unknowm adtl %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (ltag));
+ GST_MEMDUMP_OBJECT (wav, "Unknowm adtl", &data[offset], lsize);
+ break;
+ }
+ offset += 8 + GST_ROUND_UP_2 (lsize);
+ size -= 8 + GST_ROUND_UP_2 (lsize);
+ }
+
+ return TRUE;
+}
+
+static GstTagList *
+gst_wavparse_get_tags_toc_entry (GstToc * toc, gchar * id)
+{
+ GstTagList *tags = NULL;
+ GstTocEntry *entry = NULL;
+
+ entry = gst_toc_find_entry (toc, id);
+ if (entry != NULL) {
+ tags = gst_toc_entry_get_tags (entry);
+ if (tags == NULL) {
+ tags = gst_tag_list_new_empty ();
+ gst_toc_entry_set_tags (entry, tags);
+ }
+ }
+
+ return tags;
+}
+
+/*
+ * gst_wavparse_create_toc:
+ * @wav GstWavParse object
+ *
+ * Create TOC from wav->cues and wav->labls.
+ */
+static gboolean
+gst_wavparse_create_toc (GstWavParse * wav)
+{
+ gint64 start, stop;
+ gchar *id;
+ GList *list;
+ GstWavParseCue *cue;
+ GstWavParseLabl *labl;
+ GstWavParseNote *note;
+ GstTagList *tags;
+ GstToc *toc;
+ GstTocEntry *entry = NULL, *cur_subentry = NULL, *prev_subentry = NULL;
+
+ GST_OBJECT_LOCK (wav);
+ if (wav->toc) {
+ GST_OBJECT_UNLOCK (wav);
+ GST_WARNING_OBJECT (wav, "found another TOC");
+ return FALSE;
+ }
+
+ if (!wav->cues) {
+ GST_OBJECT_UNLOCK (wav);
+ return TRUE;
+ }
+
+ /* FIXME: send CURRENT scope toc too */
+ toc = gst_toc_new (GST_TOC_SCOPE_GLOBAL);
+
+ /* add cue edition */
+ entry = gst_toc_entry_new (GST_TOC_ENTRY_TYPE_EDITION, "cue");
+ gst_toc_entry_set_start_stop_times (entry, 0, wav->duration);
+ gst_toc_append_entry (toc, entry);
+
+ /* add tracks in cue edition */
+ list = wav->cues;
+ while (list) {
+ cue = list->data;
+ prev_subentry = cur_subentry;
+ /* previous track stop time = current track start time */
+ if (prev_subentry != NULL) {
+ gst_toc_entry_get_start_stop_times (prev_subentry, &start, NULL);
+ stop = gst_util_uint64_scale_round (cue->position, GST_SECOND, wav->rate);
+ gst_toc_entry_set_start_stop_times (prev_subentry, start, stop);
+ }
+ id = g_strdup_printf ("%08x", cue->id);
+ cur_subentry = gst_toc_entry_new (GST_TOC_ENTRY_TYPE_TRACK, id);
+ g_free (id);
+ start = gst_util_uint64_scale_round (cue->position, GST_SECOND, wav->rate);
+ stop = wav->duration;
+ gst_toc_entry_set_start_stop_times (cur_subentry, start, stop);
+ gst_toc_entry_append_sub_entry (entry, cur_subentry);
+ list = g_list_next (list);
+ }
+
+ /* add tags in tracks */
+ list = wav->labls;
+ while (list) {
+ labl = list->data;
+ id = g_strdup_printf ("%08x", labl->cue_point_id);
+ tags = gst_wavparse_get_tags_toc_entry (toc, id);
+ g_free (id);
+ if (tags != NULL) {
+ gst_tag_list_add (tags, GST_TAG_MERGE_APPEND, GST_TAG_TITLE, labl->text,
+ NULL);
+ }
+ list = g_list_next (list);
+ }
+ list = wav->notes;
+ while (list) {
+ note = list->data;
+ id = g_strdup_printf ("%08x", note->cue_point_id);
+ tags = gst_wavparse_get_tags_toc_entry (toc, id);
+ g_free (id);
+ if (tags != NULL) {
+ gst_tag_list_add (tags, GST_TAG_MERGE_PREPEND, GST_TAG_COMMENT,
+ note->text, NULL);
+ }
+ list = g_list_next (list);
+ }
+
+ /* send data as TOC */
+ wav->toc = toc;
+
+ /* send TOC event */
+ if (wav->toc) {
+ GST_OBJECT_UNLOCK (wav);
+ gst_pad_push_event (wav->srcpad, gst_event_new_toc (wav->toc, FALSE));
+ }
+
+ return TRUE;
+}
+
+#define MAX_BUFFER_SIZE 4096
+
+static gboolean
+parse_ds64 (GstWavParse * wav, GstBuffer * buf)
+{
+ GstMapInfo map;
+ guint32 dataSizeLow, dataSizeHigh;
+ guint32 sampleCountLow, sampleCountHigh;
+
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ dataSizeLow = GST_READ_UINT32_LE (map.data + 2 * 4);
+ dataSizeHigh = GST_READ_UINT32_LE (map.data + 3 * 4);
+ sampleCountLow = GST_READ_UINT32_LE (map.data + 4 * 4);
+ sampleCountHigh = GST_READ_UINT32_LE (map.data + 5 * 4);
+ gst_buffer_unmap (buf, &map);
+ if (dataSizeHigh != 0xFFFFFFFF && dataSizeLow != 0xFFFFFFFF) {
+ wav->datasize = ((guint64) dataSizeHigh << 32) | dataSizeLow;
+ }
+ if (sampleCountHigh != 0xFFFFFFFF && sampleCountLow != 0xFFFFFFFF) {
+ wav->fact = ((guint64) sampleCountHigh << 32) | sampleCountLow;
+ }
+
+ GST_DEBUG_OBJECT (wav, "Got 'ds64' TAG, datasize : %" G_GINT64_FORMAT
+ " fact: %" G_GINT64_FORMAT, wav->datasize, wav->fact);
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_wavparse_stream_headers (GstWavParse * wav)
+{
+ GstFlowReturn res = GST_FLOW_OK;
+ GstBuffer *buf = NULL;
+ gst_riff_strf_auds *header = NULL;
+ guint32 tag, size;
+ gboolean gotdata = FALSE;
+ GstCaps *caps = NULL;
+ gchar *codec_name = NULL;
+ gint64 upstream_size = 0;
+ GstStructure *s;
+
+ /* search for "_fmt" chunk, which must be before "data" */
+ while (!wav->got_fmt) {
+ GstBuffer *extra;
+
+ if (wav->streaming) {
+ if (!gst_wavparse_peek_chunk (wav, &tag, &size))
+ return res;
+
+ gst_adapter_flush (wav->adapter, 8);
+ wav->offset += 8;
+
+ if (size) {
+ buf = gst_adapter_take_buffer (wav->adapter, size);
+ if (size & 1)
+ gst_adapter_flush (wav->adapter, 1);
+ wav->offset += GST_ROUND_UP_2 (size);
+ } else {
+ buf = gst_buffer_new ();
+ }
+ } else {
+ if ((res = gst_riff_read_chunk (GST_ELEMENT_CAST (wav), wav->sinkpad,
+ &wav->offset, &tag, &buf)) != GST_FLOW_OK)
+ return res;
+ }
+
+ if (tag == GST_RS64_TAG_DS64) {
+ if (!parse_ds64 (wav, buf))
+ goto fail;
+ else
+ continue;
+ }
+
+ if (tag != GST_RIFF_TAG_fmt) {
+ GST_DEBUG_OBJECT (wav, "skipping %" GST_FOURCC_FORMAT " chunk",
+ GST_FOURCC_ARGS (tag));
+ gst_buffer_unref (buf);
+ buf = NULL;
+ continue;
+ }
+
+ if (!(gst_riff_parse_strf_auds (GST_ELEMENT_CAST (wav), buf, &header,
+ &extra)))
+ goto parse_header_error;
+
+ buf = NULL; /* parse_strf_auds() took ownership of buffer */
+
+ /* do sanity checks of header fields */
+ if (header->channels == 0)
+ goto no_channels;
+ if (header->rate == 0)
+ goto no_rate;
+
+ GST_DEBUG_OBJECT (wav, "creating the caps");
+
+ /* Note: gst_riff_create_audio_caps might need to fix values in
+ * the header header depending on the format, so call it first */
+ /* FIXME: Need to handle the channel reorder map */
+ caps = gst_riff_create_audio_caps (header->format, NULL, header, extra,
+ NULL, &codec_name, NULL);
+
+ if (extra)
+ gst_buffer_unref (extra);
+
+ if (!caps)
+ goto unknown_format;
+
+ /* If we got raw audio from upstream, we remove the codec_data field,
+ * which may have been added if the wav header included an extended
+ * chunk. We want to keep it for non raw audio.
+ */
+ s = gst_caps_get_structure (caps, 0);
+ if (s && gst_structure_has_name (s, "audio/x-raw")) {
+ gst_structure_remove_field (s, "codec_data");
+ }
+
+ /* do more sanity checks of header fields
+ * (these can be sanitized by gst_riff_create_audio_caps()
+ */
+ wav->format = header->format;
+ wav->rate = header->rate;
+ wav->channels = header->channels;
+ wav->blockalign = header->blockalign;
+ wav->depth = header->bits_per_sample;
+ wav->av_bps = header->av_bps;
+ wav->vbr = FALSE;
+
+ g_free (header);
+ header = NULL;
+
+ /* do format specific handling */
+ switch (wav->format) {
+ case GST_RIFF_WAVE_FORMAT_MPEGL12:
+ case GST_RIFF_WAVE_FORMAT_MPEGL3:
+ {
+ /* Note: workaround for mp2/mp3 embedded in wav, that relies on the
+ * bitrate inside the mpeg stream */
+ GST_INFO ("resetting bps from %u to 0 for mp2/3", wav->av_bps);
+ wav->bps = 0;
+ break;
+ }
+ case GST_RIFF_WAVE_FORMAT_PCM:
+ if (wav->blockalign > wav->channels * ((wav->depth + 7) / 8))
+ goto invalid_blockalign;
+ /* fall through */
+ default:
+ if (wav->av_bps > wav->blockalign * wav->rate)
+ goto invalid_bps;
+ /* use the configured bps */
+ wav->bps = wav->av_bps;
+ break;
+ }
+
+ wav->width = (wav->blockalign * 8) / wav->channels;
+ wav->bytes_per_sample = wav->channels * wav->width / 8;
+
+ if (wav->bytes_per_sample <= 0)
+ goto no_bytes_per_sample;
+
+ GST_DEBUG_OBJECT (wav, "blockalign = %u", (guint) wav->blockalign);
+ GST_DEBUG_OBJECT (wav, "width = %u", (guint) wav->width);
+ GST_DEBUG_OBJECT (wav, "depth = %u", (guint) wav->depth);
+ GST_DEBUG_OBJECT (wav, "av_bps = %u", (guint) wav->av_bps);
+ GST_DEBUG_OBJECT (wav, "frequency = %u", (guint) wav->rate);
+ GST_DEBUG_OBJECT (wav, "channels = %u", (guint) wav->channels);
+ GST_DEBUG_OBJECT (wav, "bytes_per_sample = %u", wav->bytes_per_sample);
+
+ /* bps can be 0 when we don't have a valid bitrate (mostly for compressed
+ * formats). This will make the element output a BYTE format segment and
+ * will not timestamp the outgoing buffers.
+ */
+ GST_DEBUG_OBJECT (wav, "bps = %u", (guint) wav->bps);
+
+ GST_DEBUG_OBJECT (wav, "caps = %" GST_PTR_FORMAT, caps);
+
+ /* create pad later so we can sniff the first few bytes
+ * of the real data and correct our caps if necessary */
+ gst_caps_replace (&wav->caps, caps);
+ gst_caps_replace (&caps, NULL);
+
+ wav->got_fmt = TRUE;
+
+ if (wav->tags == NULL)
+ wav->tags = gst_tag_list_new_empty ();
+
+ {
+ GstCaps *templ_caps = gst_pad_get_pad_template_caps (wav->sinkpad);
+ gst_pb_utils_add_codec_description_to_tag_list (wav->tags,
+ GST_TAG_CONTAINER_FORMAT, templ_caps);
+ gst_caps_unref (templ_caps);
+ }
+
+ /* If bps is nonzero, then we do have a valid bitrate that can be
+ * announced in a tag list. */
+ if (wav->bps) {
+ guint bitrate = wav->bps * 8;
+ gst_tag_list_add (wav->tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_BITRATE, bitrate, NULL);
+ }
+
+ if (codec_name) {
+ gst_tag_list_add (wav->tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_AUDIO_CODEC, codec_name, NULL);
+
+ g_free (codec_name);
+ codec_name = NULL;
+ }
+
+ }
+
+ gst_pad_peer_query_duration (wav->sinkpad, GST_FORMAT_BYTES, &upstream_size);
+ GST_DEBUG_OBJECT (wav, "upstream size %" G_GUINT64_FORMAT, upstream_size);
+
+ /* loop headers until we get data */
+ while (!gotdata) {
+ if (wav->streaming) {
+ if (!gst_wavparse_peek_chunk_info (wav, &tag, &size))
+ goto exit;
+ } else {
+ GstMapInfo map;
+
+ buf = NULL;
+ if ((res =
+ gst_pad_pull_range (wav->sinkpad, wav->offset, 8,
+ &buf)) != GST_FLOW_OK)
+ goto header_read_error;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ tag = GST_READ_UINT32_LE (map.data);
+ size = GST_READ_UINT32_LE (map.data + 4);
+ gst_buffer_unmap (buf, &map);
+ }
+
+ GST_INFO_OBJECT (wav,
+ "Got TAG: %" GST_FOURCC_FORMAT ", offset %" G_GUINT64_FORMAT ", size %"
+ G_GUINT32_FORMAT, GST_FOURCC_ARGS (tag), wav->offset, size);
+
+ /* Maximum valid size is INT_MAX */
+ if (size & 0x80000000) {
+ GST_WARNING_OBJECT (wav, "Invalid size, clipping to 0x7fffffff");
+ size = 0x7fffffff;
+ }
+
+ /* Clip to upstream size if known */
+ if (upstream_size > 0 && size + wav->offset > upstream_size) {
+ GST_WARNING_OBJECT (wav, "Clipping chunk size to file size");
+ g_assert (upstream_size >= wav->offset);
+ size = upstream_size - wav->offset;
+ }
+
+ /* wav is a st00pid format, we don't know for sure where data starts.
+ * So we have to go bit by bit until we find the 'data' header
+ */
+ switch (tag) {
+ case GST_RIFF_TAG_data:{
+ guint64 size64;
+
+ GST_DEBUG_OBJECT (wav, "Got 'data' TAG, size : %u", size);
+ size64 = size;
+ if (wav->ignore_length) {
+ GST_DEBUG_OBJECT (wav, "Ignoring length");
+ size64 = 0;
+ }
+ if (wav->streaming) {
+ gst_adapter_flush (wav->adapter, 8);
+ gotdata = TRUE;
+ } else {
+ gst_buffer_unref (buf);
+ }
+ wav->offset += 8;
+ wav->datastart = wav->offset;
+ /* use size from ds64 chunk if available */
+ if (size64 == -1 && wav->datasize > 0) {
+ GST_DEBUG_OBJECT (wav, "Using ds64 datasize");
+ size64 = wav->datasize;
+ }
+ wav->chunk_size = size64;
+
+ /* If size is zero, then the data chunk probably actually extends to
+ the end of the file */
+ if (size64 == 0 && upstream_size) {
+ size64 = upstream_size - wav->datastart;
+ }
+ /* Or the file might be truncated */
+ else if (upstream_size) {
+ size64 = MIN (size64, (upstream_size - wav->datastart));
+ }
+ wav->datasize = size64;
+ wav->dataleft = size64;
+ wav->end_offset = size64 + wav->datastart;
+ if (!wav->streaming) {
+ /* We will continue parsing tags 'till end */
+ wav->offset += size64;
+ }
+ GST_DEBUG_OBJECT (wav, "datasize = %" G_GUINT64_FORMAT, size64);
+ break;
+ }
+ case GST_RIFF_TAG_fact:{
+ if (wav->fact == 0 &&
+ wav->format != GST_RIFF_WAVE_FORMAT_MPEGL12 &&
+ wav->format != GST_RIFF_WAVE_FORMAT_MPEGL3) {
+ const guint data_size = 4;
+
+ GST_INFO_OBJECT (wav, "Have fact chunk");
+ if (size < data_size) {
+ if (!gst_waveparse_ignore_chunk (wav, buf, tag, size)) {
+ /* need more data */
+ goto exit;
+ }
+ GST_DEBUG_OBJECT (wav, "need %u, available %u; ignoring chunk",
+ data_size, size);
+ break;
+ }
+ /* number of samples (for compressed formats) */
+ if (wav->streaming) {
+ const guint8 *data = NULL;
+
+ if (!gst_wavparse_peek_chunk (wav, &tag, &size)) {
+ goto exit;
+ }
+ gst_adapter_flush (wav->adapter, 8);
+ data = gst_adapter_map (wav->adapter, data_size);
+ wav->fact = GST_READ_UINT32_LE (data);
+ gst_adapter_unmap (wav->adapter);
+ gst_adapter_flush (wav->adapter, GST_ROUND_UP_2 (size));
+ } else {
+ gst_buffer_unref (buf);
+ buf = NULL;
+ if ((res =
+ gst_pad_pull_range (wav->sinkpad, wav->offset + 8,
+ data_size, &buf)) != GST_FLOW_OK)
+ goto header_read_error;
+ gst_buffer_extract (buf, 0, &wav->fact, 4);
+ wav->fact = GUINT32_FROM_LE (wav->fact);
+ gst_buffer_unref (buf);
+ }
+ GST_DEBUG_OBJECT (wav, "have fact %" G_GUINT64_FORMAT, wav->fact);
+ wav->offset += 8 + GST_ROUND_UP_2 (size);
+ break;
+ } else {
+ if (!gst_waveparse_ignore_chunk (wav, buf, tag, size)) {
+ /* need more data */
+ goto exit;
+ }
+ }
+ break;
+ }
+ case GST_RIFF_TAG_acid:{
+ const gst_riff_acid *acid = NULL;
+ const guint data_size = sizeof (gst_riff_acid);
+ gfloat tempo;
+
+ GST_INFO_OBJECT (wav, "Have acid chunk");
+ if (size < data_size) {
+ if (!gst_waveparse_ignore_chunk (wav, buf, tag, size)) {
+ /* need more data */
+ goto exit;
+ }
+ GST_DEBUG_OBJECT (wav, "need %u, available %u; ignoring chunk",
+ data_size, size);
+ break;
+ }
+ if (wav->streaming) {
+ if (!gst_wavparse_peek_chunk (wav, &tag, &size)) {
+ goto exit;
+ }
+ gst_adapter_flush (wav->adapter, 8);
+ acid = (const gst_riff_acid *) gst_adapter_map (wav->adapter,
+ data_size);
+ tempo = acid->tempo;
+ gst_adapter_unmap (wav->adapter);
+ } else {
+ GstMapInfo map;
+ gst_buffer_unref (buf);
+ buf = NULL;
+ if ((res =
+ gst_pad_pull_range (wav->sinkpad, wav->offset + 8,
+ size, &buf)) != GST_FLOW_OK)
+ goto header_read_error;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ acid = (const gst_riff_acid *) map.data;
+ tempo = acid->tempo;
+ gst_buffer_unmap (buf, &map);
+ }
+ /* send data as tags */
+ if (!wav->tags)
+ wav->tags = gst_tag_list_new_empty ();
+ gst_tag_list_add (wav->tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_BEATS_PER_MINUTE, tempo, NULL);
+
+ size = GST_ROUND_UP_2 (size);
+ if (wav->streaming) {
+ gst_adapter_flush (wav->adapter, size);
+ } else {
+ gst_buffer_unref (buf);
+ }
+ wav->offset += 8 + size;
+ break;
+ }
+ /* FIXME: all list tags after data are ignored in streaming mode */
+ case GST_RIFF_TAG_LIST:{
+ guint32 ltag;
+
+ if (wav->streaming) {
+ const guint8 *data = NULL;
+
+ if (gst_adapter_available (wav->adapter) < 12) {
+ goto exit;
+ }
+ data = gst_adapter_map (wav->adapter, 12);
+ ltag = GST_READ_UINT32_LE (data + 8);
+ gst_adapter_unmap (wav->adapter);
+ } else {
+ gst_buffer_unref (buf);
+ buf = NULL;
+ if ((res =
+ gst_pad_pull_range (wav->sinkpad, wav->offset, 12,
+ &buf)) != GST_FLOW_OK)
+ goto header_read_error;
+ gst_buffer_extract (buf, 8, &ltag, 4);
+ ltag = GUINT32_FROM_LE (ltag);
+ }
+ switch (ltag) {
+ case GST_RIFF_LIST_INFO:{
+ const gint data_size = size - 4;
+ GstTagList *new;
+
+ GST_INFO_OBJECT (wav, "Have LIST chunk INFO size %u", data_size);
+ if (wav->streaming) {
+ if (!gst_wavparse_peek_chunk (wav, &tag, &size)) {
+ goto exit;
+ }
+ gst_adapter_flush (wav->adapter, 12);
+ wav->offset += 12;
+ if (data_size > 0) {
+ buf = gst_adapter_take_buffer (wav->adapter, data_size);
+ if (data_size & 1)
+ gst_adapter_flush (wav->adapter, 1);
+ }
+ } else {
+ wav->offset += 12;
+ gst_buffer_unref (buf);
+ buf = NULL;
+ if (data_size > 0) {
+ if ((res =
+ gst_pad_pull_range (wav->sinkpad, wav->offset,
+ data_size, &buf)) != GST_FLOW_OK)
+ goto header_read_error;
+ }
+ }
+ if (data_size > 0) {
+ /* parse tags */
+ gst_riff_parse_info (GST_ELEMENT (wav), buf, &new);
+ if (new) {
+ GstTagList *old = wav->tags;
+ wav->tags =
+ gst_tag_list_merge (old, new, GST_TAG_MERGE_REPLACE);
+ if (old)
+ gst_tag_list_unref (old);
+ gst_tag_list_unref (new);
+ }
+ gst_buffer_unref (buf);
+ wav->offset += GST_ROUND_UP_2 (data_size);
+ }
+ break;
+ }
+ case GST_RIFF_LIST_adtl:{
+ const gint data_size = size - 4;
+
+ GST_INFO_OBJECT (wav, "Have 'adtl' LIST, size %u", data_size);
+ if (wav->streaming) {
+ const guint8 *data = NULL;
+
+ gst_adapter_flush (wav->adapter, 12);
+ wav->offset += 12;
+ data = gst_adapter_map (wav->adapter, data_size);
+ gst_wavparse_adtl_chunk (wav, data, data_size);
+ gst_adapter_unmap (wav->adapter);
+ } else {
+ GstMapInfo map;
+
+ gst_buffer_unref (buf);
+ buf = NULL;
+ wav->offset += 12;
+ if ((res =
+ gst_pad_pull_range (wav->sinkpad, wav->offset,
+ data_size, &buf)) != GST_FLOW_OK)
+ goto header_read_error;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ gst_wavparse_adtl_chunk (wav, (const guint8 *) map.data,
+ data_size);
+ gst_buffer_unmap (buf, &map);
+ }
+ wav->offset += GST_ROUND_UP_2 (data_size);
+ break;
+ }
+ default:
+ GST_WARNING_OBJECT (wav, "Ignoring LIST chunk %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (ltag));
+ if (!gst_waveparse_ignore_chunk (wav, buf, tag, size))
+ /* need more data */
+ goto exit;
+ break;
+ }
+ break;
+ }
+ case GST_RIFF_TAG_cue:{
+ const guint data_size = size;
+
+ GST_DEBUG_OBJECT (wav, "Have 'cue' TAG, size : %u", data_size);
+ if (wav->streaming) {
+ const guint8 *data = NULL;
+
+ if (!gst_wavparse_peek_chunk (wav, &tag, &size)) {
+ goto exit;
+ }
+ gst_adapter_flush (wav->adapter, 8);
+ wav->offset += 8;
+ data = gst_adapter_map (wav->adapter, data_size);
+ if (!gst_wavparse_cue_chunk (wav, data, data_size)) {
+ goto header_read_error;
+ }
+ gst_adapter_unmap (wav->adapter);
+ } else {
+ GstMapInfo map;
+
+ wav->offset += 8;
+ gst_buffer_unref (buf);
+ buf = NULL;
+ if ((res =
+ gst_pad_pull_range (wav->sinkpad, wav->offset,
+ data_size, &buf)) != GST_FLOW_OK)
+ goto header_read_error;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (!gst_wavparse_cue_chunk (wav, (const guint8 *) map.data,
+ data_size)) {
+ goto header_read_error;
+ }
+ gst_buffer_unmap (buf, &map);
+ }
+ size = GST_ROUND_UP_2 (size);
+ if (wav->streaming) {
+ gst_adapter_flush (wav->adapter, size);
+ } else {
+ gst_buffer_unref (buf);
+ }
+ size = GST_ROUND_UP_2 (size);
+ wav->offset += size;
+ break;
+ }
+ case GST_RIFF_TAG_smpl:{
+ const gint data_size = size;
+
+ GST_DEBUG_OBJECT (wav, "Have 'smpl' TAG, size : %u", data_size);
+ if (wav->streaming) {
+ const guint8 *data = NULL;
+
+ if (!gst_wavparse_peek_chunk (wav, &tag, &size)) {
+ goto exit;
+ }
+ gst_adapter_flush (wav->adapter, 8);
+ wav->offset += 8;
+ data = gst_adapter_map (wav->adapter, data_size);
+ if (!gst_wavparse_smpl_chunk (wav, data, data_size)) {
+ goto header_read_error;
+ }
+ gst_adapter_unmap (wav->adapter);
+ } else {
+ GstMapInfo map;
+
+ wav->offset += 8;
+ gst_buffer_unref (buf);
+ buf = NULL;
+ if ((res =
+ gst_pad_pull_range (wav->sinkpad, wav->offset,
+ data_size, &buf)) != GST_FLOW_OK)
+ goto header_read_error;
+ gst_buffer_map (buf, &map, GST_MAP_READ);
+ if (!gst_wavparse_smpl_chunk (wav, (const guint8 *) map.data,
+ data_size)) {
+ goto header_read_error;
+ }
+ gst_buffer_unmap (buf, &map);
+ }
+ size = GST_ROUND_UP_2 (size);
+ if (wav->streaming) {
+ gst_adapter_flush (wav->adapter, size);
+ } else {
+ gst_buffer_unref (buf);
+ }
+ size = GST_ROUND_UP_2 (size);
+ wav->offset += size;
+ break;
+ }
+ default:
+ GST_WARNING_OBJECT (wav, "Ignoring chunk %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (tag));
+ if (!gst_waveparse_ignore_chunk (wav, buf, tag, size))
+ /* need more data */
+ goto exit;
+ break;
+ }
+
+ if (upstream_size && (wav->offset >= upstream_size)) {
+ /* Now we are gone through the whole file */
+ gotdata = TRUE;
+ }
+ }
+
+ GST_DEBUG_OBJECT (wav, "Finished parsing headers");
+
+ if (wav->bps <= 0 && wav->fact) {
+#if 0
+ /* not a good idea, as for embedded mp2/mp3 we set bps to 0 earlier */
+ wav->bps =
+ (guint32) gst_util_uint64_scale ((guint64) wav->rate, wav->datasize,
+ (guint64) wav->fact);
+ GST_INFO_OBJECT (wav, "calculated bps : %u, enabling VBR", wav->bps);
+#endif
+ wav->vbr = TRUE;
+ }
+
+ if (gst_wavparse_calculate_duration (wav)) {
+ gst_segment_init (&wav->segment, GST_FORMAT_TIME);
+ if (!wav->ignore_length)
+ wav->segment.duration = wav->duration;
+ if (!wav->toc)
+ gst_wavparse_create_toc (wav);
+ } else {
+ /* no bitrate, let downstream peer do the math, we'll feed it bytes. */
+ gst_segment_init (&wav->segment, GST_FORMAT_BYTES);
+ if (!wav->ignore_length)
+ wav->segment.duration = wav->datasize;
+ }
+
+ /* now we have all the info to perform a pending seek if any, if no
+ * event, this will still do the right thing and it will also send
+ * the right newsegment event downstream. */
+ gst_wavparse_perform_seek (wav, wav->seek_event);
+ /* remove pending event */
+ gst_event_replace (&wav->seek_event, NULL);
+
+ /* we just started, we are discont */
+ wav->discont = TRUE;
+
+ wav->state = GST_WAVPARSE_DATA;
+
+ /* determine reasonable max buffer size,
+ * that is, buffers not too small either size or time wise
+ * so we do not end up with too many of them */
+ /* var abuse */
+ if (gst_wavparse_time_to_bytepos (wav, 40 * GST_MSECOND, &upstream_size))
+ wav->max_buf_size = upstream_size;
+ else
+ wav->max_buf_size = 0;
+ wav->max_buf_size = MAX (wav->max_buf_size, MAX_BUFFER_SIZE);
+ if (wav->blockalign > 0)
+ wav->max_buf_size -= (wav->max_buf_size % wav->blockalign);
+
+ GST_DEBUG_OBJECT (wav, "max buffer size %u", wav->max_buf_size);
+
+ return GST_FLOW_OK;
+
+ /* ERROR */
+exit:
+ {
+ g_free (codec_name);
+ g_free (header);
+ if (caps)
+ gst_caps_unref (caps);
+ return res;
+ }
+fail:
+ {
+ res = GST_FLOW_ERROR;
+ goto exit;
+ }
+parse_header_error:
+ {
+ GST_ELEMENT_ERROR (wav, STREAM, DEMUX, (NULL),
+ ("Couldn't parse audio header"));
+ goto fail;
+ }
+no_channels:
+ {
+ GST_ELEMENT_ERROR (wav, STREAM, FAILED, (NULL),
+ ("Stream claims to contain no channels - invalid data"));
+ goto fail;
+ }
+no_rate:
+ {
+ GST_ELEMENT_ERROR (wav, STREAM, FAILED, (NULL),
+ ("Stream with sample_rate == 0 - invalid data"));
+ goto fail;
+ }
+invalid_blockalign:
+ {
+ GST_ELEMENT_ERROR (wav, STREAM, FAILED, (NULL),
+ ("Stream claims blockalign = %u, which is more than %u - invalid data",
+ wav->blockalign, wav->channels * ((wav->depth + 7) / 8)));
+ goto fail;
+ }
+invalid_bps:
+ {
+ GST_ELEMENT_ERROR (wav, STREAM, FAILED, (NULL),
+ ("Stream claims av_bsp = %u, which is more than %u - invalid data",
+ wav->av_bps, wav->blockalign * wav->rate));
+ goto fail;
+ }
+no_bytes_per_sample:
+ {
+ GST_ELEMENT_ERROR (wav, STREAM, FAILED, (NULL),
+ ("Could not calculate bytes per sample - invalid data"));
+ goto fail;
+ }
+unknown_format:
+ {
+ GST_ELEMENT_ERROR (wav, STREAM, TYPE_NOT_FOUND, (NULL),
+ ("No caps found for format 0x%x, %u channels, %u Hz",
+ wav->format, wav->channels, wav->rate));
+ goto fail;
+ }
+header_read_error:
+ {
+ GST_ELEMENT_ERROR (wav, STREAM, DEMUX, (NULL),
+ ("Couldn't read in header %d (%s)", res, gst_flow_get_name (res)));
+ goto fail;
+ }
+}
+
+/*
+ * Read WAV file tag when streaming
+ */
+static GstFlowReturn
+gst_wavparse_parse_stream_init (GstWavParse * wav)
+{
+ if (gst_adapter_available (wav->adapter) >= 12) {
+ GstBuffer *tmp;
+
+ /* _take flushes the data */
+ tmp = gst_adapter_take_buffer (wav->adapter, 12);
+
+ GST_DEBUG ("Parsing wav header");
+ if (!gst_wavparse_parse_file_header (GST_ELEMENT_CAST (wav), tmp))
+ return GST_FLOW_ERROR;
+
+ wav->offset += 12;
+ /* Go to next state */
+ wav->state = GST_WAVPARSE_HEADER;
+ }
+ return GST_FLOW_OK;
+}
+
+/* handle an event sent directly to the element.
+ *
+ * This event can be sent either in the READY state or the
+ * >READY state. The only event of interest really is the seek
+ * event.
+ *
+ * In the READY state we can only store the event and try to
+ * respect it when going to PAUSED. We assume we are in the
+ * READY state when our parsing state != GST_WAVPARSE_DATA.
+ *
+ * When we are steaming, we can simply perform the seek right
+ * away.
+ */
+static gboolean
+gst_wavparse_send_event (GstElement * element, GstEvent * event)
+{
+ GstWavParse *wav = GST_WAVPARSE (element);
+ gboolean res = FALSE;
+
+ GST_DEBUG_OBJECT (wav, "received event %s", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ if (wav->state == GST_WAVPARSE_DATA) {
+ /* we can handle the seek directly when streaming data */
+ res = gst_wavparse_perform_seek (wav, event);
+ } else {
+ GST_DEBUG_OBJECT (wav, "queuing seek for later");
+
+ gst_event_replace (&wav->seek_event, event);
+
+ /* we always return true */
+ res = TRUE;
+ }
+ break;
+ default:
+ break;
+ }
+ gst_event_unref (event);
+ return res;
+}
+
+static gboolean
+gst_wavparse_have_dts_caps (const GstCaps * caps, GstTypeFindProbability prob)
+{
+ GstStructure *s;
+
+ s = gst_caps_get_structure (caps, 0);
+ if (!gst_structure_has_name (s, "audio/x-dts"))
+ return FALSE;
+ /* typefind behavior for DTS:
+ * MAXIMUM: multiple frame syncs detected, certainly DTS
+ * LIKELY: single frame sync at offset 0. Maybe DTS?
+ * POSSIBLE: single frame sync, not at offset 0. Highly unlikely
+ * to be DTS. */
+ if (prob > GST_TYPE_FIND_LIKELY)
+ return TRUE;
+ if (prob <= GST_TYPE_FIND_POSSIBLE)
+ return FALSE;
+ /* for maybe, check for at least a valid-looking rate and channels */
+ if (!gst_structure_has_field (s, "channels"))
+ return FALSE;
+ /* and for extra assurance we could also check the rate from the DTS frame
+ * against the one in the wav header, but for now let's not do that */
+ return gst_structure_has_field (s, "rate");
+}
+
+static GstTagList *
+gst_wavparse_get_upstream_tags (GstWavParse * wav, GstTagScope scope)
+{
+ GstTagList *tags = NULL;
+ GstEvent *ev;
+ gint i;
+
+ i = 0;
+ while ((ev = gst_pad_get_sticky_event (wav->sinkpad, GST_EVENT_TAG, i++))) {
+ gst_event_parse_tag (ev, &tags);
+ if (tags != NULL && gst_tag_list_get_scope (tags) == scope) {
+ tags = gst_tag_list_copy (tags);
+ gst_tag_list_remove_tag (tags, GST_TAG_CONTAINER_FORMAT);
+ gst_event_unref (ev);
+ break;
+ }
+ tags = NULL;
+ gst_event_unref (ev);
+ }
+ return tags;
+}
+
+static void
+gst_wavparse_add_src_pad (GstWavParse * wav, GstBuffer * buf)
+{
+ GstStructure *s;
+ GstTagList *tags, *utags;
+
+ GST_DEBUG_OBJECT (wav, "adding src pad");
+
+ g_assert (wav->caps != NULL);
+
+ s = gst_caps_get_structure (wav->caps, 0);
+ if (s && gst_structure_has_name (s, "audio/x-raw") && buf != NULL
+ && (GST_BUFFER_OFFSET (buf) == 0 || !GST_BUFFER_OFFSET_IS_VALID (buf))) {
+ GstTypeFindProbability prob;
+ GstCaps *tf_caps;
+
+ tf_caps = gst_type_find_helper_for_buffer (GST_OBJECT (wav), buf, &prob);
+ if (tf_caps != NULL) {
+ GST_LOG ("typefind caps = %" GST_PTR_FORMAT ", P=%d", tf_caps, prob);
+ if (gst_wavparse_have_dts_caps (tf_caps, prob)) {
+ GST_INFO_OBJECT (wav, "Found DTS marker in file marked as raw PCM");
+ gst_caps_unref (wav->caps);
+ wav->caps = tf_caps;
+
+ gst_tag_list_add (wav->tags, GST_TAG_MERGE_REPLACE,
+ GST_TAG_AUDIO_CODEC, "dts", NULL);
+ } else {
+ GST_DEBUG_OBJECT (wav, "found caps %" GST_PTR_FORMAT " for stream "
+ "marked as raw PCM audio, but ignoring for now", tf_caps);
+ gst_caps_unref (tf_caps);
+ }
+ }
+ }
+
+ gst_pad_set_caps (wav->srcpad, wav->caps);
+
+ if (wav->start_segment) {
+ GST_DEBUG_OBJECT (wav, "Send start segment event on newpad");
+ gst_pad_push_event (wav->srcpad, wav->start_segment);
+ wav->start_segment = NULL;
+ }
+
+ /* upstream tags, e.g. from id3/ape tag before the wav file; assume for now
+ * that there'll be only one scope/type of tag list from upstream, if any */
+ utags = gst_wavparse_get_upstream_tags (wav, GST_TAG_SCOPE_GLOBAL);
+ if (utags == NULL)
+ utags = gst_wavparse_get_upstream_tags (wav, GST_TAG_SCOPE_STREAM);
+
+ /* if there's a tag upstream it's probably been added to override the
+ * tags from inside the wav header, so keep upstream tags if in doubt */
+ tags = gst_tag_list_merge (utags, wav->tags, GST_TAG_MERGE_KEEP);
+
+ if (wav->tags != NULL) {
+ gst_tag_list_unref (wav->tags);
+ wav->tags = NULL;
+ }
+
+ if (utags != NULL)
+ gst_tag_list_unref (utags);
+
+ /* send tags downstream, if any */
+ if (tags != NULL)
+ gst_pad_push_event (wav->srcpad, gst_event_new_tag (tags));
+}
+
+static GstFlowReturn
+gst_wavparse_stream_data (GstWavParse * wav, gboolean flushing)
+{
+ GstBuffer *buf = NULL;
+ GstFlowReturn res = GST_FLOW_OK;
+ guint64 desired, obtained;
+ GstClockTime timestamp, next_timestamp, duration;
+ guint64 pos, nextpos;
+
+iterate_adapter:
+ GST_LOG_OBJECT (wav,
+ "offset: %" G_GINT64_FORMAT " , end: %" G_GINT64_FORMAT " , dataleft: %"
+ G_GINT64_FORMAT, wav->offset, wav->end_offset, wav->dataleft);
+
+ if ((wav->dataleft == 0 || wav->dataleft < wav->blockalign)) {
+ /* In case chunk size is not declared in the beginning get size from the
+ * file size directly */
+ if (wav->chunk_size == 0) {
+ gint64 upstream_size = 0;
+
+ /* Get the size of the file */
+ if (!gst_pad_peer_query_duration (wav->sinkpad, GST_FORMAT_BYTES,
+ &upstream_size))
+ goto found_eos;
+
+ if (upstream_size < wav->offset + wav->datastart)
+ goto found_eos;
+
+ /* If file has updated since the beginning continue reading the file */
+ wav->dataleft = upstream_size - wav->offset - wav->datastart;
+ wav->end_offset = upstream_size;
+
+ /* Get the next n bytes and output them, if we can */
+ if (wav->dataleft == 0 || wav->dataleft < wav->blockalign)
+ goto found_eos;
+ } else {
+ goto found_eos;
+ }
+ }
+
+ /* scale the amount of data by the segment rate so we get equal
+ * amounts of data regardless of the playback rate */
+ desired =
+ MIN (gst_guint64_to_gdouble (wav->dataleft),
+ wav->max_buf_size * ABS (wav->segment.rate));
+
+ if (desired >= wav->blockalign && wav->blockalign > 0)
+ desired -= (desired % wav->blockalign);
+
+ GST_LOG_OBJECT (wav, "Fetching %" G_GINT64_FORMAT " bytes of data "
+ "from the sinkpad", desired);
+
+ if (wav->streaming) {
+ guint avail = gst_adapter_available (wav->adapter);
+ guint extra;
+
+ /* flush some bytes if evil upstream sends segment that starts
+ * before data or does is not send sample aligned segment */
+ if (G_LIKELY (wav->offset >= wav->datastart)) {
+ extra = (wav->offset - wav->datastart) % wav->bytes_per_sample;
+ } else {
+ extra = wav->datastart - wav->offset;
+ }
+
+ if (G_UNLIKELY (extra)) {
+ extra = wav->bytes_per_sample - extra;
+ if (extra <= avail) {
+ GST_DEBUG_OBJECT (wav, "flushing %u bytes to sample boundary", extra);
+ gst_adapter_flush (wav->adapter, extra);
+ wav->offset += extra;
+ wav->dataleft -= extra;
+ goto iterate_adapter;
+ } else {
+ GST_DEBUG_OBJECT (wav, "flushing %u bytes", avail);
+ gst_adapter_clear (wav->adapter);
+ wav->offset += avail;
+ wav->dataleft -= avail;
+ return GST_FLOW_OK;
+ }
+ }
+
+ if (avail < desired) {
+ GST_LOG_OBJECT (wav, "Got only %u bytes of data from the sinkpad", avail);
+
+ /* If we are at the end of the stream, we need to flush whatever we have left */
+ if (avail > 0 && flushing) {
+ if (avail >= wav->blockalign && wav->blockalign > 0) {
+ avail -= (avail % wav->blockalign);
+ buf = gst_adapter_take_buffer (wav->adapter, avail);
+ } else {
+ return GST_FLOW_OK;
+ }
+ } else {
+ return GST_FLOW_OK;
+ }
+ } else {
+ buf = gst_adapter_take_buffer (wav->adapter, desired);
+ }
+ } else {
+ if ((res = gst_pad_pull_range (wav->sinkpad, wav->offset,
+ desired, &buf)) != GST_FLOW_OK)
+ goto pull_error;
+
+ /* we may get a short buffer at the end of the file */
+ if (gst_buffer_get_size (buf) < desired) {
+ gsize size = gst_buffer_get_size (buf);
+
+ GST_LOG_OBJECT (wav, "Got only %" G_GSIZE_FORMAT " bytes of data", size);
+ if (size >= wav->blockalign) {
+ if (wav->blockalign > 0) {
+ buf = gst_buffer_make_writable (buf);
+ gst_buffer_resize (buf, 0, size - (size % wav->blockalign));
+ }
+ } else {
+ gst_buffer_unref (buf);
+ goto found_eos;
+ }
+ }
+ }
+
+ obtained = gst_buffer_get_size (buf);
+
+ /* our positions in bytes */
+ pos = wav->offset - wav->datastart;
+ nextpos = pos + obtained;
+
+ /* update offsets, does not overflow. */
+ buf = gst_buffer_make_writable (buf);
+ GST_BUFFER_OFFSET (buf) = pos / wav->bytes_per_sample;
+ GST_BUFFER_OFFSET_END (buf) = nextpos / wav->bytes_per_sample;
+
+ /* first chunk of data? create the source pad. We do this only here so
+ * we can detect broken .wav files with dts disguised as raw PCM (sigh) */
+ if (G_UNLIKELY (wav->first)) {
+ wav->first = FALSE;
+ /* this will also push the segment events */
+ gst_wavparse_add_src_pad (wav, buf);
+ } else {
+ /* If we have a pending start segment, send it now. */
+ if (G_UNLIKELY (wav->start_segment != NULL)) {
+ gst_pad_push_event (wav->srcpad, wav->start_segment);
+ wav->start_segment = NULL;
+ }
+ }
+
+ if (wav->bps > 0) {
+ /* and timestamps if we have a bitrate, be careful for overflows */
+ timestamp =
+ gst_util_uint64_scale_ceil (pos, GST_SECOND, (guint64) wav->bps);
+ next_timestamp =
+ gst_util_uint64_scale_ceil (nextpos, GST_SECOND, (guint64) wav->bps);
+ duration = next_timestamp - timestamp;
+
+ /* update current running segment position */
+ if (G_LIKELY (next_timestamp >= wav->segment.start))
+ wav->segment.position = next_timestamp;
+ } else if (wav->fact) {
+ guint64 bps =
+ gst_util_uint64_scale_int (wav->datasize, wav->rate, wav->fact);
+ /* and timestamps if we have a bitrate, be careful for overflows */
+ timestamp = gst_util_uint64_scale_ceil (pos, GST_SECOND, bps);
+ next_timestamp = gst_util_uint64_scale_ceil (nextpos, GST_SECOND, bps);
+ duration = next_timestamp - timestamp;
+ } else {
+ /* no bitrate, all we know is that the first sample has timestamp 0, all
+ * other positions and durations have unknown timestamp. */
+ if (pos == 0)
+ timestamp = 0;
+ else
+ timestamp = GST_CLOCK_TIME_NONE;
+ duration = GST_CLOCK_TIME_NONE;
+ /* update current running segment position with byte offset */
+ if (G_LIKELY (nextpos >= wav->segment.start))
+ wav->segment.position = nextpos;
+ }
+ if ((pos > 0) && wav->vbr) {
+ /* don't set timestamps for VBR files if it's not the first buffer */
+ timestamp = GST_CLOCK_TIME_NONE;
+ duration = GST_CLOCK_TIME_NONE;
+ }
+ if (wav->discont) {
+ GST_DEBUG_OBJECT (wav, "marking DISCONT");
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
+ wav->discont = FALSE;
+ }
+
+ GST_BUFFER_TIMESTAMP (buf) = timestamp;
+ GST_BUFFER_DURATION (buf) = duration;
+
+ GST_LOG_OBJECT (wav,
+ "Got buffer. timestamp:%" GST_TIME_FORMAT " , duration:%" GST_TIME_FORMAT
+ ", size:%" G_GSIZE_FORMAT, GST_TIME_ARGS (timestamp),
+ GST_TIME_ARGS (duration), gst_buffer_get_size (buf));
+
+ if ((res = gst_pad_push (wav->srcpad, buf)) != GST_FLOW_OK)
+ goto push_error;
+
+ if (obtained < wav->dataleft) {
+ wav->offset += obtained;
+ wav->dataleft -= obtained;
+ } else {
+ wav->offset += wav->dataleft;
+ wav->dataleft = 0;
+ }
+
+ /* Iterate until need more data, so adapter size won't grow */
+ if (wav->streaming) {
+ GST_LOG_OBJECT (wav,
+ "offset: %" G_GINT64_FORMAT " , end: %" G_GINT64_FORMAT, wav->offset,
+ wav->end_offset);
+ goto iterate_adapter;
+ }
+ return res;
+
+ /* ERROR */
+found_eos:
+ {
+ GST_DEBUG_OBJECT (wav, "found EOS");
+ return GST_FLOW_EOS;
+ }
+pull_error:
+ {
+ /* check if we got EOS */
+ if (res == GST_FLOW_EOS)
+ goto found_eos;
+
+ GST_WARNING_OBJECT (wav,
+ "Error getting %" G_GINT64_FORMAT " bytes from the "
+ "sinkpad (dataleft = %" G_GINT64_FORMAT ")", desired, wav->dataleft);
+ return res;
+ }
+push_error:
+ {
+ GST_INFO_OBJECT (wav,
+ "Error pushing on srcpad %s:%s, reason %s, is linked? = %d",
+ GST_DEBUG_PAD_NAME (wav->srcpad), gst_flow_get_name (res),
+ gst_pad_is_linked (wav->srcpad));
+ return res;
+ }
+}
+
+static void
+gst_wavparse_loop (GstPad * pad)
+{
+ GstFlowReturn ret;
+ GstWavParse *wav = GST_WAVPARSE (GST_PAD_PARENT (pad));
+ GstEvent *event;
+ gchar *stream_id;
+
+ GST_LOG_OBJECT (wav, "process data");
+
+ switch (wav->state) {
+ case GST_WAVPARSE_START:
+ GST_INFO_OBJECT (wav, "GST_WAVPARSE_START");
+ if ((ret = gst_wavparse_stream_init (wav)) != GST_FLOW_OK)
+ goto pause;
+
+ stream_id =
+ gst_pad_create_stream_id (wav->srcpad, GST_ELEMENT_CAST (wav), NULL);
+ event = gst_event_new_stream_start (stream_id);
+ gst_event_set_group_id (event, gst_util_group_id_next ());
+ gst_pad_push_event (wav->srcpad, event);
+ g_free (stream_id);
+
+ wav->state = GST_WAVPARSE_HEADER;
+ /* fall-through */
+
+ case GST_WAVPARSE_HEADER:
+ GST_INFO_OBJECT (wav, "GST_WAVPARSE_HEADER");
+ if ((ret = gst_wavparse_stream_headers (wav)) != GST_FLOW_OK)
+ goto pause;
+
+ wav->state = GST_WAVPARSE_DATA;
+ GST_INFO_OBJECT (wav, "GST_WAVPARSE_DATA");
+ /* fall-through */
+
+ case GST_WAVPARSE_DATA:
+ if ((ret = gst_wavparse_stream_data (wav, FALSE)) != GST_FLOW_OK)
+ goto pause;
+ break;
+ default:
+ g_assert_not_reached ();
+ }
+ return;
+
+ /* ERRORS */
+pause:
+ {
+ const gchar *reason = gst_flow_get_name (ret);
+
+ GST_DEBUG_OBJECT (wav, "pausing task, reason %s", reason);
+ gst_pad_pause_task (pad);
+
+ if (ret == GST_FLOW_EOS) {
+ /* handle end-of-stream/segment */
+ /* so align our position with the end of it, if there is one
+ * this ensures a subsequent will arrive at correct base/acc time */
+ if (wav->segment.format == GST_FORMAT_TIME) {
+ if (wav->segment.rate > 0.0 &&
+ GST_CLOCK_TIME_IS_VALID (wav->segment.stop))
+ wav->segment.position = wav->segment.stop;
+ else if (wav->segment.rate < 0.0)
+ wav->segment.position = wav->segment.start;
+ }
+ if (wav->state == GST_WAVPARSE_START || !wav->caps) {
+ GST_ELEMENT_ERROR (wav, STREAM, WRONG_TYPE, (NULL),
+ ("No valid input found before end of stream"));
+ gst_pad_push_event (wav->srcpad, gst_event_new_eos ());
+ } else {
+ /* add pad before we perform EOS */
+ if (G_UNLIKELY (wav->first)) {
+ wav->first = FALSE;
+ gst_wavparse_add_src_pad (wav, NULL);
+ }
+
+ /* perform EOS logic */
+ if (wav->segment.flags & GST_SEEK_FLAG_SEGMENT) {
+ GstClockTime stop;
+
+ if ((stop = wav->segment.stop) == -1)
+ stop = wav->segment.duration;
+
+ gst_element_post_message (GST_ELEMENT_CAST (wav),
+ gst_message_new_segment_done (GST_OBJECT_CAST (wav),
+ wav->segment.format, stop));
+ gst_pad_push_event (wav->srcpad,
+ gst_event_new_segment_done (wav->segment.format, stop));
+ } else {
+ gst_pad_push_event (wav->srcpad, gst_event_new_eos ());
+ }
+ }
+ } else if (ret == GST_FLOW_NOT_LINKED || ret < GST_FLOW_EOS) {
+ /* for fatal errors we post an error message, post the error
+ * first so the app knows about the error first. */
+ GST_ELEMENT_FLOW_ERROR (wav, ret);
+ gst_pad_push_event (wav->srcpad, gst_event_new_eos ());
+ }
+ return;
+ }
+}
+
+static GstFlowReturn
+gst_wavparse_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
+{
+ GstFlowReturn ret;
+ GstWavParse *wav = GST_WAVPARSE (parent);
+
+ GST_LOG_OBJECT (wav, "adapter_push %" G_GSIZE_FORMAT " bytes",
+ gst_buffer_get_size (buf));
+
+ gst_adapter_push (wav->adapter, buf);
+
+ switch (wav->state) {
+ case GST_WAVPARSE_START:
+ GST_INFO_OBJECT (wav, "GST_WAVPARSE_START");
+ if ((ret = gst_wavparse_parse_stream_init (wav)) != GST_FLOW_OK)
+ goto done;
+
+ if (wav->state != GST_WAVPARSE_HEADER)
+ break;
+
+ /* otherwise fall-through */
+ case GST_WAVPARSE_HEADER:
+ GST_INFO_OBJECT (wav, "GST_WAVPARSE_HEADER");
+ if ((ret = gst_wavparse_stream_headers (wav)) != GST_FLOW_OK)
+ goto done;
+
+ if (!wav->got_fmt || wav->datastart == 0)
+ break;
+
+ wav->state = GST_WAVPARSE_DATA;
+ GST_INFO_OBJECT (wav, "GST_WAVPARSE_DATA");
+
+ /* fall-through */
+ case GST_WAVPARSE_DATA:
+ if (buf && GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))
+ wav->discont = TRUE;
+ if ((ret = gst_wavparse_stream_data (wav, FALSE)) != GST_FLOW_OK)
+ goto done;
+ break;
+ default:
+ g_return_val_if_reached (GST_FLOW_ERROR);
+ }
+done:
+ if (G_UNLIKELY (wav->abort_buffering)) {
+ wav->abort_buffering = FALSE;
+ ret = GST_FLOW_ERROR;
+ /* sort of demux/parse error */
+ GST_ELEMENT_ERROR (wav, STREAM, DEMUX, (NULL), ("unhandled buffer size"));
+ }
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_wavparse_flush_data (GstWavParse * wav)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint av;
+
+ if ((av = gst_adapter_available (wav->adapter)) > 0) {
+ ret = gst_wavparse_stream_data (wav, TRUE);
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_wavparse_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstWavParse *wav = GST_WAVPARSE (parent);
+ gboolean ret = TRUE;
+
+ GST_LOG_OBJECT (wav, "handling %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_CAPS:
+ {
+ /* discard, we'll come up with proper src caps */
+ gst_event_unref (event);
+ break;
+ }
+ case GST_EVENT_SEGMENT:
+ {
+ gint64 start, stop, offset = 0, end_offset = -1;
+ GstSegment segment;
+
+ /* some debug output */
+ gst_event_copy_segment (event, &segment);
+ GST_DEBUG_OBJECT (wav, "received newsegment %" GST_SEGMENT_FORMAT,
+ &segment);
+
+ if (wav->state != GST_WAVPARSE_DATA) {
+ GST_DEBUG_OBJECT (wav, "still starting, eating event");
+ goto exit;
+ }
+
+ /* now we are either committed to TIME or BYTE format,
+ * and we only expect a BYTE segment, e.g. following a seek */
+ if (segment.format == GST_FORMAT_BYTES) {
+ /* handle (un)signed issues */
+ start = segment.start;
+ stop = segment.stop;
+ if (start > 0) {
+ offset = start;
+ start -= wav->datastart;
+ start = MAX (start, 0);
+ }
+ if (stop > 0) {
+ end_offset = stop;
+ stop -= wav->datastart;
+ stop = MAX (stop, 0);
+ }
+ if (wav->segment.format == GST_FORMAT_TIME) {
+ guint64 bps = wav->bps;
+
+ /* operating in format TIME, so we can convert */
+ if (!bps && wav->fact)
+ bps =
+ gst_util_uint64_scale_int (wav->datasize, wav->rate, wav->fact);
+ if (bps) {
+ if (start >= 0)
+ start =
+ gst_util_uint64_scale_ceil (start, GST_SECOND,
+ (guint64) wav->bps);
+ if (stop >= 0)
+ stop =
+ gst_util_uint64_scale_ceil (stop, GST_SECOND,
+ (guint64) wav->bps);
+ }
+ }
+ } else {
+ GST_DEBUG_OBJECT (wav, "unsupported segment format, ignoring");
+ goto exit;
+ }
+
+ segment.start = start;
+ segment.stop = stop;
+
+ /* accept upstream's notion of segment and distribute along */
+ segment.format = wav->segment.format;
+ segment.time = segment.position = segment.start;
+ segment.duration = wav->segment.duration;
+ segment.base = gst_segment_to_running_time (&wav->segment,
+ GST_FORMAT_TIME, wav->segment.position);
+
+ gst_segment_copy_into (&segment, &wav->segment);
+
+ /* also store the newsegment event for the streaming thread */
+ if (wav->start_segment)
+ gst_event_unref (wav->start_segment);
+ GST_DEBUG_OBJECT (wav, "Storing newseg %" GST_SEGMENT_FORMAT, &segment);
+ wav->start_segment = gst_event_new_segment (&segment);
+
+ /* stream leftover data in current segment */
+ gst_wavparse_flush_data (wav);
+ /* and set up streaming thread for next one */
+ wav->offset = offset;
+ wav->end_offset = end_offset;
+
+ if (wav->datasize > 0 && (wav->end_offset == -1
+ || wav->end_offset > wav->datastart + wav->datasize))
+ wav->end_offset = wav->datastart + wav->datasize;
+
+ if (wav->end_offset != -1) {
+ wav->dataleft = wav->end_offset - wav->offset;
+ } else {
+ /* infinity; upstream will EOS when done */
+ wav->dataleft = G_MAXUINT64;
+ }
+ exit:
+ gst_event_unref (event);
+ break;
+ }
+ case GST_EVENT_EOS:
+ if (wav->state == GST_WAVPARSE_START || !wav->caps) {
+ GST_ELEMENT_ERROR (wav, STREAM, WRONG_TYPE, (NULL),
+ ("No valid input found before end of stream"));
+ } else {
+ /* add pad if needed so EOS is seen downstream */
+ if (G_UNLIKELY (wav->first)) {
+ wav->first = FALSE;
+ gst_wavparse_add_src_pad (wav, NULL);
+ }
+
+ /* stream leftover data in current segment */
+ gst_wavparse_flush_data (wav);
+ }
+
+ /* fall-through */
+ case GST_EVENT_FLUSH_STOP:
+ {
+ GstClockTime dur;
+
+ if (wav->adapter)
+ gst_adapter_clear (wav->adapter);
+ wav->discont = TRUE;
+ dur = wav->segment.duration;
+ gst_segment_init (&wav->segment, wav->segment.format);
+ wav->segment.duration = dur;
+ /* fall-through */
+ }
+ default:
+ ret = gst_pad_event_default (wav->sinkpad, parent, event);
+ break;
+ }
+
+ return ret;
+}
+
+#if 0
+/* convert and query stuff */
+static const GstFormat *
+gst_wavparse_get_formats (GstPad * pad)
+{
+ static const GstFormat formats[] = {
+ GST_FORMAT_TIME,
+ GST_FORMAT_BYTES,
+ GST_FORMAT_DEFAULT, /* a "frame", ie a set of samples per Hz */
+ 0
+ };
+
+ return formats;
+}
+#endif
+
+static gboolean
+gst_wavparse_pad_convert (GstPad * pad,
+ GstFormat src_format, gint64 src_value,
+ GstFormat * dest_format, gint64 * dest_value)
+{
+ GstWavParse *wavparse;
+ gboolean res = TRUE;
+
+ wavparse = GST_WAVPARSE (GST_PAD_PARENT (pad));
+
+ if (*dest_format == src_format) {
+ *dest_value = src_value;
+ return TRUE;
+ }
+
+ if ((wavparse->bps == 0) && !wavparse->fact)
+ goto no_bps_fact;
+
+ GST_INFO_OBJECT (wavparse, "converting value from %s to %s",
+ gst_format_get_name (src_format), gst_format_get_name (*dest_format));
+
+ switch (src_format) {
+ case GST_FORMAT_BYTES:
+ switch (*dest_format) {
+ case GST_FORMAT_DEFAULT:
+ *dest_value = src_value / wavparse->bytes_per_sample;
+ /* make sure we end up on a sample boundary */
+ *dest_value -= *dest_value % wavparse->bytes_per_sample;
+ break;
+ case GST_FORMAT_TIME:
+ /* src_value + datastart = offset */
+ GST_INFO_OBJECT (wavparse,
+ "src=%" G_GINT64_FORMAT ", offset=%" G_GINT64_FORMAT, src_value,
+ wavparse->offset);
+ if (wavparse->bps > 0)
+ *dest_value = gst_util_uint64_scale_ceil (src_value, GST_SECOND,
+ (guint64) wavparse->bps);
+ else if (wavparse->fact) {
+ guint64 bps = gst_util_uint64_scale_int_ceil (wavparse->datasize,
+ wavparse->rate, wavparse->fact);
+
+ *dest_value =
+ gst_util_uint64_scale_int_ceil (src_value, GST_SECOND, bps);
+ } else {
+ res = FALSE;
+ }
+ break;
+ default:
+ res = FALSE;
+ goto done;
+ }
+ break;
+
+ case GST_FORMAT_DEFAULT:
+ switch (*dest_format) {
+ case GST_FORMAT_BYTES:
+ *dest_value = src_value * wavparse->bytes_per_sample;
+ break;
+ case GST_FORMAT_TIME:
+ *dest_value = gst_util_uint64_scale (src_value, GST_SECOND,
+ (guint64) wavparse->rate);
+ break;
+ default:
+ res = FALSE;
+ goto done;
+ }
+ break;
+
+ case GST_FORMAT_TIME:
+ switch (*dest_format) {
+ case GST_FORMAT_BYTES:
+ if (wavparse->bps > 0)
+ *dest_value = gst_util_uint64_scale (src_value,
+ (guint64) wavparse->bps, GST_SECOND);
+ else {
+ guint64 bps = gst_util_uint64_scale_int (wavparse->datasize,
+ wavparse->rate, wavparse->fact);
+
+ *dest_value = gst_util_uint64_scale (src_value, bps, GST_SECOND);
+ }
+ /* make sure we end up on a sample boundary */
+ *dest_value -= *dest_value % wavparse->blockalign;
+ break;
+ case GST_FORMAT_DEFAULT:
+ *dest_value = gst_util_uint64_scale (src_value,
+ (guint64) wavparse->rate, GST_SECOND);
+ break;
+ default:
+ res = FALSE;
+ goto done;
+ }
+ break;
+
+ default:
+ res = FALSE;
+ goto done;
+ }
+
+done:
+ return res;
+
+ /* ERRORS */
+no_bps_fact:
+ {
+ GST_DEBUG_OBJECT (wavparse, "bps 0 or no fact chunk, cannot convert");
+ res = FALSE;
+ goto done;
+ }
+}
+
+/* handle queries for location and length in requested format */
+static gboolean
+gst_wavparse_pad_query (GstPad * pad, GstObject * parent, GstQuery * query)
+{
+ gboolean res = TRUE;
+ GstWavParse *wav = GST_WAVPARSE (parent);
+
+ /* only if we know */
+ if (wav->state != GST_WAVPARSE_DATA) {
+ return FALSE;
+ }
+
+ GST_LOG_OBJECT (pad, "%s query", GST_QUERY_TYPE_NAME (query));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:
+ {
+ gint64 curb;
+ gint64 cur;
+ GstFormat format;
+
+ /* this is not very precise, as we have pushed severla buffer upstream for prerolling */
+ curb = wav->offset - wav->datastart;
+ gst_query_parse_position (query, &format, NULL);
+ GST_INFO_OBJECT (wav, "pos query at %" G_GINT64_FORMAT, curb);
+
+ switch (format) {
+ case GST_FORMAT_BYTES:
+ format = GST_FORMAT_BYTES;
+ cur = curb;
+ break;
+ default:
+ res = gst_wavparse_pad_convert (pad, GST_FORMAT_BYTES, curb,
+ &format, &cur);
+ break;
+ }
+ if (res)
+ gst_query_set_position (query, format, cur);
+ break;
+ }
+ case GST_QUERY_DURATION:
+ {
+ gint64 duration = 0;
+ GstFormat format;
+
+ if (wav->ignore_length) {
+ res = FALSE;
+ break;
+ }
+
+ gst_query_parse_duration (query, &format, NULL);
+
+ switch (format) {
+ case GST_FORMAT_BYTES:{
+ format = GST_FORMAT_BYTES;
+ duration = wav->datasize;
+ break;
+ }
+ case GST_FORMAT_TIME:
+ if ((res = gst_wavparse_calculate_duration (wav))) {
+ duration = wav->duration;
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ if (res)
+ gst_query_set_duration (query, format, duration);
+ break;
+ }
+ case GST_QUERY_CONVERT:
+ {
+ gint64 srcvalue, dstvalue;
+ GstFormat srcformat, dstformat;
+
+ gst_query_parse_convert (query, &srcformat, &srcvalue,
+ &dstformat, &dstvalue);
+ res = gst_wavparse_pad_convert (pad, srcformat, srcvalue,
+ &dstformat, &dstvalue);
+ if (res)
+ gst_query_set_convert (query, srcformat, srcvalue, dstformat, dstvalue);
+ break;
+ }
+ case GST_QUERY_SEEKING:{
+ GstFormat fmt;
+ gboolean seekable = FALSE;
+
+ gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
+ if (fmt == wav->segment.format) {
+ if (wav->streaming) {
+ GstQuery *q;
+
+ q = gst_query_new_seeking (GST_FORMAT_BYTES);
+ if ((res = gst_pad_peer_query (wav->sinkpad, q))) {
+ gst_query_parse_seeking (q, &fmt, &seekable, NULL, NULL);
+ GST_LOG_OBJECT (wav, "upstream BYTE seekable %d", seekable);
+ }
+ gst_query_unref (q);
+ } else {
+ GST_LOG_OBJECT (wav, "looping => seekable");
+ seekable = TRUE;
+ res = TRUE;
+ }
+ } else if (fmt == GST_FORMAT_TIME) {
+ res = TRUE;
+ }
+ if (res) {
+ gst_query_set_seeking (query, fmt, seekable, 0, wav->segment.duration);
+ }
+ break;
+ }
+ case GST_QUERY_SEGMENT:
+ {
+ GstFormat format;
+ gint64 start, stop;
+
+ format = wav->segment.format;
+
+ start =
+ gst_segment_to_stream_time (&wav->segment, format,
+ wav->segment.start);
+ if ((stop = wav->segment.stop) == -1)
+ stop = wav->segment.duration;
+ else
+ stop = gst_segment_to_stream_time (&wav->segment, format, stop);
+
+ gst_query_set_segment (query, wav->segment.rate, format, start, stop);
+ res = TRUE;
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, parent, query);
+ break;
+ }
+ return res;
+}
+
+static gboolean
+gst_wavparse_srcpad_event (GstPad * pad, GstObject * parent, GstEvent * event)
+{
+ GstWavParse *wavparse = GST_WAVPARSE (parent);
+ gboolean res = FALSE;
+
+ GST_DEBUG_OBJECT (wavparse, "%s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ /* can only handle events when we are in the data state */
+ if (wavparse->state == GST_WAVPARSE_DATA) {
+ res = gst_wavparse_perform_seek (wavparse, event);
+ }
+ gst_event_unref (event);
+ break;
+
+ case GST_EVENT_TOC_SELECT:
+ {
+ char *uid = NULL;
+ GstTocEntry *entry = NULL;
+ GstEvent *seek_event;
+ gint64 start_pos;
+
+ if (!wavparse->toc) {
+ GST_DEBUG_OBJECT (wavparse, "no TOC to select");
+ return FALSE;
+ } else {
+ gst_event_parse_toc_select (event, &uid);
+ if (uid != NULL) {
+ GST_OBJECT_LOCK (wavparse);
+ entry = gst_toc_find_entry (wavparse->toc, uid);
+ if (entry == NULL) {
+ GST_OBJECT_UNLOCK (wavparse);
+ GST_WARNING_OBJECT (wavparse, "no TOC entry with given UID: %s",
+ uid);
+ res = FALSE;
+ } else {
+ gst_toc_entry_get_start_stop_times (entry, &start_pos, NULL);
+ GST_OBJECT_UNLOCK (wavparse);
+ seek_event = gst_event_new_seek (1.0,
+ GST_FORMAT_TIME,
+ GST_SEEK_FLAG_FLUSH,
+ GST_SEEK_TYPE_SET, start_pos, GST_SEEK_TYPE_SET, -1);
+ res = gst_wavparse_perform_seek (wavparse, seek_event);
+ gst_event_unref (seek_event);
+ }
+ g_free (uid);
+ } else {
+ GST_WARNING_OBJECT (wavparse, "received empty TOC select event");
+ res = FALSE;
+ }
+ }
+ gst_event_unref (event);
+ break;
+ }
+
+ default:
+ res = gst_pad_push_event (wavparse->sinkpad, event);
+ break;
+ }
+ return res;
+}
+
+static gboolean
+gst_wavparse_sink_activate (GstPad * sinkpad, GstObject * parent)
+{
+ GstWavParse *wav = GST_WAVPARSE (parent);
+ GstQuery *query;
+ gboolean pull_mode;
+
+ if (wav->adapter) {
+ gst_adapter_clear (wav->adapter);
+ g_object_unref (wav->adapter);
+ wav->adapter = NULL;
+ }
+
+ query = gst_query_new_scheduling ();
+
+ if (!gst_pad_peer_query (sinkpad, query)) {
+ gst_query_unref (query);
+ goto activate_push;
+ }
+
+ pull_mode = gst_query_has_scheduling_mode_with_flags (query,
+ GST_PAD_MODE_PULL, GST_SCHEDULING_FLAG_SEEKABLE);
+ gst_query_unref (query);
+
+ if (!pull_mode)
+ goto activate_push;
+
+ GST_DEBUG_OBJECT (sinkpad, "activating pull");
+ wav->streaming = FALSE;
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PULL, TRUE);
+
+activate_push:
+ {
+ GST_DEBUG_OBJECT (sinkpad, "activating push");
+ wav->streaming = TRUE;
+ wav->adapter = gst_adapter_new ();
+ return gst_pad_activate_mode (sinkpad, GST_PAD_MODE_PUSH, TRUE);
+ }
+}
+
+
+static gboolean
+gst_wavparse_sink_activate_mode (GstPad * sinkpad, GstObject * parent,
+ GstPadMode mode, gboolean active)
+{
+ gboolean res;
+
+ switch (mode) {
+ case GST_PAD_MODE_PUSH:
+ res = TRUE;
+ break;
+ case GST_PAD_MODE_PULL:
+ if (active) {
+ /* if we have a scheduler we can start the task */
+ res = gst_pad_start_task (sinkpad, (GstTaskFunction) gst_wavparse_loop,
+ sinkpad, NULL);
+ } else {
+ res = gst_pad_stop_task (sinkpad);
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+ return res;
+}
+
+static GstStateChangeReturn
+gst_wavparse_change_state (GstElement * element, GstStateChange transition)
+{
+ GstStateChangeReturn ret;
+ GstWavParse *wav = GST_WAVPARSE (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_wavparse_reset (wav);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+ return ret;
+}
+
+static void
+gst_wavparse_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstWavParse *self;
+
+ g_return_if_fail (GST_IS_WAVPARSE (object));
+ self = GST_WAVPARSE (object);
+
+ switch (prop_id) {
+ case PROP_IGNORE_LENGTH:
+ self->ignore_length = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
+ }
+
+}
+
+static void
+gst_wavparse_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstWavParse *self;
+
+ g_return_if_fail (GST_IS_WAVPARSE (object));
+ self = GST_WAVPARSE (object);
+
+ switch (prop_id) {
+ case PROP_IGNORE_LENGTH:
+ g_value_set_boolean (value, self->ignore_length);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
+ }
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ return GST_ELEMENT_REGISTER (wavparse, plugin);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ wavparse,
+ "Parse a .wav file into raw audio",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/wavparse/gstwavparse.h b/gst/wavparse/gstwavparse.h
new file mode 100644
index 0000000000..17f9f64939
--- /dev/null
+++ b/gst/wavparse/gstwavparse.h
@@ -0,0 +1,140 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2006> Nokia Corporation, Stefan Kost <stefan.kost@nokia.com>.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_WAVPARSE_H__
+#define __GST_WAVPARSE_H__
+
+
+#include <gst/gst.h>
+#include "gst/riff/riff-ids.h"
+#include "gst/riff/riff-read.h"
+#include <gst/base/gstadapter.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_WAVPARSE \
+ (gst_wavparse_get_type())
+#define GST_WAVPARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_WAVPARSE,GstWavParse))
+#define GST_WAVPARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_WAVPARSE,GstWavParseClass))
+#define GST_IS_WAVPARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_WAVPARSE))
+#define GST_IS_WAVPARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_WAVPARSE))
+
+typedef enum {
+ GST_WAVPARSE_START,
+ GST_WAVPARSE_HEADER,
+ GST_WAVPARSE_DATA
+} GstWavParseState;
+
+typedef struct _GstWavParse GstWavParse;
+typedef struct _GstWavParseClass GstWavParseClass;
+
+/**
+ * GstWavParse:
+ *
+ * Opaque data structure.
+ */
+struct _GstWavParse {
+ GstElement parent;
+
+ /* pads */
+ GstPad *sinkpad,*srcpad;
+
+ /* for delayed source pad creation for when
+ * we have the first chunk of data and know
+ * the format for sure */
+ GstCaps *caps;
+ GstTagList *tags;
+ GstToc *toc;
+ GList *cues;
+ GList *labls;
+ GList *notes;
+ GstEvent *start_segment;
+
+ /* WAVE decoding state */
+ GstWavParseState state;
+ gboolean abort_buffering;
+
+ /* format of audio, see defines below */
+ gint format;
+
+ /* useful audio data */
+ guint16 depth;
+ guint32 rate;
+ guint16 channels;
+ guint16 blockalign;
+ guint16 width;
+ guint32 av_bps;
+ guint64 fact;
+
+ /* real bps used or 0 when no bitrate is known */
+ guint32 bps;
+ gboolean vbr;
+
+ guint bytes_per_sample;
+ guint max_buf_size;
+
+ /* position in data part */
+ guint64 offset;
+ guint64 end_offset;
+ guint64 dataleft;
+ /* offset/length of data part */
+ guint64 datastart;
+ guint64 datasize;
+ /* duration in time */
+ guint64 duration;
+
+ /* pending seek */
+ GstEvent *seek_event;
+
+ /* For streaming */
+ GstAdapter *adapter;
+ gboolean got_fmt;
+ gboolean streaming;
+
+ /* configured segment, start/stop expressed in time or bytes */
+ GstSegment segment;
+
+ /* for late pad configuration */
+ gboolean first;
+ /* discont after seek */
+ gboolean discont;
+
+ gboolean ignore_length;
+
+ /* Size of the data as written in the chunk size */
+ guint32 chunk_size;
+};
+
+struct _GstWavParseClass {
+ GstElementClass parent_class;
+};
+
+GType gst_wavparse_get_type(void);
+
+GST_ELEMENT_REGISTER_DECLARE (wavparse);
+
+G_END_DECLS
+
+#endif /* __GST_WAVPARSE_H__ */
diff --git a/gst/wavparse/meson.build b/gst/wavparse/meson.build
new file mode 100644
index 0000000000..68cc800f74
--- /dev/null
+++ b/gst/wavparse/meson.build
@@ -0,0 +1,11 @@
+gstwawparse = library('gstwavparse',
+ 'gstwavparse.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc, libsinc],
+ dependencies : [gstbase_dep, gstpbutils_dep, gstriff_dep, gstaudio_dep,
+ gsttag_dep, libm],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gstwawparse, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gstwawparse]
diff --git a/gst/y4m/gsty4mencode.c b/gst/y4m/gsty4mencode.c
new file mode 100644
index 0000000000..16890e7e99
--- /dev/null
+++ b/gst/y4m/gsty4mencode.c
@@ -0,0 +1,312 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ * Copyright (C) <2006> Mark Nauwelaerts <mnauw@users.sourceforge.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+/**
+ * SECTION:element-y4menc
+ * @title: y4menc
+ *
+ * Creates a YU4MPEG2 raw video stream as defined by the mjpegtools project.
+ *
+ * ## Example launch line
+ *
+ * (write everything in one line, without the backslash characters)
+ * |[
+ * gst-launch-1.0 videotestsrc num-buffers=250 \
+ * ! 'video/x-raw,format=(string)I420,width=320,height=240,framerate=(fraction)25/1' \
+ * ! y4menc ! filesink location=test.yuv
+ * ]|
+ *
+ */
+
+/* see mjpegtools/yuv4mpeg.h for yuv4mpeg format */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+#include <string.h>
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include "gsty4mencode.h"
+
+/* Filter signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0
+};
+
+static GstStaticPadTemplate y4mencode_src_factory =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-yuv4mpeg, " "y4mversion = (int) 2")
+ );
+
+static GstStaticPadTemplate y4mencode_sink_factory =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{ IYUV, I420, Y42B, Y41B, Y444 }"))
+ );
+
+
+static void gst_y4m_encode_reset (GstY4mEncode * filter);
+
+static GstStateChangeReturn gst_y4m_encode_change_state (GstElement * element,
+ GstStateChange transition);
+
+static GstFlowReturn
+gst_y4m_encode_handle_frame (GstVideoEncoder * encoder,
+ GstVideoCodecFrame * frame);
+static gboolean gst_y4m_encode_set_format (GstVideoEncoder * encoder,
+ GstVideoCodecState * state);
+
+#define gst_y4m_encode_parent_class parent_class
+G_DEFINE_TYPE (GstY4mEncode, gst_y4m_encode, GST_TYPE_VIDEO_ENCODER);
+GST_ELEMENT_REGISTER_DEFINE (y4menc, "y4menc", GST_RANK_PRIMARY,
+ GST_TYPE_Y4M_ENCODE);
+
+static void
+gst_y4m_encode_class_init (GstY4mEncodeClass * klass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstVideoEncoderClass *venc_class = GST_VIDEO_ENCODER_CLASS (klass);
+
+ element_class->change_state = GST_DEBUG_FUNCPTR (gst_y4m_encode_change_state);
+
+ gst_element_class_add_static_pad_template (element_class,
+ &y4mencode_src_factory);
+ gst_element_class_add_static_pad_template (element_class,
+ &y4mencode_sink_factory);
+
+ gst_element_class_set_static_metadata (element_class,
+ "YUV4MPEG video encoder", "Codec/Encoder/Video",
+ "Encodes a YUV frame into the yuv4mpeg format (mjpegtools)",
+ "Wim Taymans <wim.taymans@gmail.com>");
+ venc_class->set_format = gst_y4m_encode_set_format;
+ venc_class->handle_frame = gst_y4m_encode_handle_frame;
+
+}
+
+static void
+gst_y4m_encode_init (GstY4mEncode * filter)
+{
+ GST_PAD_SET_ACCEPT_TEMPLATE (GST_VIDEO_ENCODER_SINK_PAD (filter));
+
+ /* init properties */
+ gst_y4m_encode_reset (filter);
+}
+
+static void
+gst_y4m_encode_reset (GstY4mEncode * filter)
+{
+ filter->header = FALSE;
+}
+
+static gboolean
+gst_y4m_encode_set_format (GstVideoEncoder * encoder,
+ GstVideoCodecState * state)
+{
+ GstY4mEncode *y4menc;
+ GstVideoInfo *info;
+ GstVideoCodecState *output_state;
+
+ y4menc = GST_Y4M_ENCODE (encoder);
+ info = &state->info;
+
+ switch (GST_VIDEO_INFO_FORMAT (info)) {
+ case GST_VIDEO_FORMAT_I420:
+ y4menc->colorspace = "420";
+ break;
+ case GST_VIDEO_FORMAT_Y42B:
+ y4menc->colorspace = "422";
+ break;
+ case GST_VIDEO_FORMAT_Y41B:
+ y4menc->colorspace = "411";
+ break;
+ case GST_VIDEO_FORMAT_Y444:
+ y4menc->colorspace = "444";
+ break;
+ default:
+ goto invalid_format;
+ }
+
+ y4menc->info = *info;
+
+ output_state =
+ gst_video_encoder_set_output_state (encoder,
+ gst_static_pad_template_get_caps (&y4mencode_src_factory), state);
+ gst_video_codec_state_unref (output_state);
+
+ return TRUE;
+
+invalid_format:
+ {
+ GST_ERROR_OBJECT (y4menc, "Invalid format");
+ return FALSE;
+ }
+
+}
+
+static inline GstBuffer *
+gst_y4m_encode_get_stream_header (GstY4mEncode * filter, gboolean tff)
+{
+ gpointer header;
+ GstBuffer *buf;
+ gchar interlaced;
+ gsize len;
+
+ if (GST_VIDEO_INFO_IS_INTERLACED (&filter->info)) {
+ if (tff)
+ interlaced = 't';
+ else
+ interlaced = 'b';
+ } else {
+ interlaced = 'p';
+ }
+
+ header = g_strdup_printf ("YUV4MPEG2 C%s W%d H%d I%c F%d:%d A%d:%d\n",
+ filter->colorspace, GST_VIDEO_INFO_WIDTH (&filter->info),
+ GST_VIDEO_INFO_HEIGHT (&filter->info), interlaced,
+ GST_VIDEO_INFO_FPS_N (&filter->info),
+ GST_VIDEO_INFO_FPS_D (&filter->info),
+ GST_VIDEO_INFO_PAR_N (&filter->info),
+ GST_VIDEO_INFO_PAR_D (&filter->info));
+ len = strlen (header);
+
+ buf = gst_buffer_new ();
+ gst_buffer_append_memory (buf,
+ gst_memory_new_wrapped (0, header, len, 0, len, header, g_free));
+
+ return buf;
+}
+
+static inline GstBuffer *
+gst_y4m_encode_get_frame_header (GstY4mEncode * filter)
+{
+ gpointer header;
+ GstBuffer *buf;
+ gsize len;
+
+ header = g_strdup_printf ("FRAME\n");
+ len = strlen (header);
+
+ buf = gst_buffer_new ();
+ gst_buffer_append_memory (buf,
+ gst_memory_new_wrapped (0, header, len, 0, len, header, g_free));
+
+ return buf;
+}
+
+
+static GstFlowReturn
+gst_y4m_encode_handle_frame (GstVideoEncoder * encoder,
+ GstVideoCodecFrame * frame)
+{
+ GstY4mEncode *filter = GST_Y4M_ENCODE (encoder);
+ GstClockTime timestamp;
+
+ /* check we got some decent info from caps */
+ if (GST_VIDEO_INFO_FORMAT (&filter->info) == GST_VIDEO_FORMAT_UNKNOWN)
+ goto not_negotiated;
+
+ timestamp = GST_BUFFER_TIMESTAMP (frame->input_buffer);
+
+ if (G_UNLIKELY (!filter->header)) {
+ gboolean tff = FALSE;
+
+ if (GST_VIDEO_INFO_IS_INTERLACED (&filter->info)) {
+ tff =
+ GST_BUFFER_FLAG_IS_SET (frame->input_buffer,
+ GST_VIDEO_BUFFER_FLAG_TFF);
+ }
+ frame->output_buffer = gst_y4m_encode_get_stream_header (filter, tff);
+ filter->header = TRUE;
+ frame->output_buffer =
+ gst_buffer_append (frame->output_buffer,
+ gst_y4m_encode_get_frame_header (filter));
+ } else {
+ frame->output_buffer = gst_y4m_encode_get_frame_header (filter);
+ }
+
+ frame->output_buffer =
+ gst_buffer_append (frame->output_buffer,
+ gst_buffer_copy (frame->input_buffer));
+
+ /* decorate */
+ frame->output_buffer = gst_buffer_make_writable (frame->output_buffer);
+ GST_BUFFER_TIMESTAMP (frame->output_buffer) = timestamp;
+
+ return gst_video_encoder_finish_frame (encoder, frame);
+
+not_negotiated:
+ {
+ GST_ELEMENT_ERROR (filter, CORE, NEGOTIATION, (NULL),
+ ("format wasn't negotiated"));
+
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+}
+
+static GstStateChangeReturn
+gst_y4m_encode_change_state (GstElement * element, GstStateChange transition)
+{
+ GstY4mEncode *filter = GST_Y4M_ENCODE (element);
+ GstStateChangeReturn ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS, change_state,
+ (element, transition), GST_STATE_CHANGE_SUCCESS);
+ if (ret != GST_STATE_CHANGE_SUCCESS)
+ return ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_y4m_encode_reset (filter);
+ break;
+ default:
+ break;
+ }
+
+ return GST_STATE_CHANGE_SUCCESS;
+}
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ return GST_ELEMENT_REGISTER (y4menc, plugin);
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ y4menc,
+ "Encodes a YUV frame into the yuv4mpeg format (mjpegtools)",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/gst/y4m/gsty4mencode.h b/gst/y4m/gsty4mencode.h
new file mode 100644
index 0000000000..77c5edd39a
--- /dev/null
+++ b/gst/y4m/gsty4mencode.h
@@ -0,0 +1,66 @@
+/* GStreamer
+ * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+
+#ifndef __GST_Y4MENCODE_H__
+#define __GST_Y4MENCODE_H__
+
+
+#include <gst/gst.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_Y4M_ENCODE \
+ (gst_y4m_encode_get_type())
+#define GST_Y4M_ENCODE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_Y4M_ENCODE, GstY4mEncode))
+#define GST_Y4M_ENCODE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_Y4M_ENCODE, GstY4mEncodeClass))
+#define GST_Y4M_ENCODE_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_Y4M_ENCODE, GstY4mEncodeClass))
+#define GST_IS_Y4M_ENCODE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_Y4M_ENCODE))
+#define GST_IS_Y4M_ENCODE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_Y4M_ENCODE))
+
+typedef struct _GstY4mEncode GstY4mEncode;
+typedef struct _GstY4mEncodeClass GstY4mEncodeClass;
+
+struct _GstY4mEncode {
+ GstVideoEncoder parent;
+
+ /* caps information */
+ GstVideoInfo info;
+
+ const gchar *colorspace;
+ /* state information */
+ gboolean header;
+};
+
+struct _GstY4mEncodeClass {
+ GstVideoEncoderClass parent_class;
+};
+
+GType gst_y4m_encode_get_type(void);
+
+GST_ELEMENT_REGISTER_DECLARE (y4menc);
+
+G_END_DECLS
+
+#endif /* __GST_Y4MENCODE_H__ */
diff --git a/gst/y4m/meson.build b/gst/y4m/meson.build
new file mode 100644
index 0000000000..16a7096db6
--- /dev/null
+++ b/gst/y4m/meson.build
@@ -0,0 +1,10 @@
+gsty4menc = library('gsty4menc',
+ 'gsty4mencode.c',
+ c_args : gst_plugins_good_args,
+ include_directories : [configinc],
+ dependencies : [gstbase_dep, gstvideo_dep],
+ install : true,
+ install_dir : plugins_install_dir,
+)
+pkgconfig.generate(gsty4menc, install_dir : plugins_pkgconfig_install_dir)
+plugins += [gsty4menc]